docs-cache 0.5.5 → 0.5.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -27,31 +27,38 @@ Documentation is cached in a gitignored location, exposed to agent and tool targ
27
27
  # Initialize (optional)
28
28
  npx docs-cache init
29
29
 
30
- # Add Sources
30
+ # Add source(s)
31
31
  npx docs-cache add github:owner/repo#main
32
- npx docs-cache add gitlab:framework/core
33
- npx docs-cache add https://github.com/framework/core.git
34
- npx docs-cache add framework/core framework/other-repo
35
32
 
36
- # Sync
33
+ # Sync and lock
37
34
  npx docs-cache sync
35
+ npx docs-cache sync --frozen
38
36
 
39
- # Verify Integrity
40
- npx docs-cache verify
41
-
42
- # Check Status
43
- npx docs-cache status
37
+ # Refresh tracked refs (write lock/materialized output)
38
+ npx docs-cache update <source-id>
39
+ npx docs-cache update --all --dry-run
44
40
 
45
- # Removal
46
- npx docs-cache remove core
47
- npx docs-cache remove framework/other-repo --prune
41
+ # Optional: pin config ref(s) to commit SHA
42
+ npx docs-cache pin <source-id>
48
43
 
49
- # Clean
44
+ # Inspect / maintain
45
+ npx docs-cache verify
46
+ npx docs-cache status
47
+ npx docs-cache remove <source-id>
50
48
  npx docs-cache clean
51
49
  ```
52
50
 
53
51
  > for more options: `npx docs-cache --help`
54
52
 
53
+ ## Recommended Workflow
54
+
55
+ Use this flow to keep behavior predictable (similar to package manager manifest + lock workflows):
56
+
57
+ 1. Keep source intent in config (`ref: "main"`, `ref: "v1"`, or a commit SHA).
58
+ 2. Run `npx docs-cache update <id...>` (or `--all`) to refresh selected sources and lock data.
59
+ 3. Use `npx docs-cache sync --frozen` in CI to fail fast when lock data drifts.
60
+ 4. Use `npx docs-cache pin <id...>` only when you explicitly want to rewrite config refs to commit SHAs.
61
+
55
62
  ## Configuration
56
63
 
57
64
  `docs.config.json` at project root (or a `docs-cache` field in `package.json`):
package/dist/cli.mjs CHANGED
@@ -1,9 +1,11 @@
1
- import p from"node:path";import r from"node:process";import d from"picocolors";import{ExitCode as l}from"#cli/exit-code";import{parseArgs as w}from"#cli/parse-args";import{setSilentMode as y,setVerboseMode as v,symbols as a,ui as o}from"#cli/ui";const h="docs-cache",j=`
2
- Usage: ${h} <command> [options]
1
+ import l from"node:path";import a from"node:process";import d from"picocolors";import{ExitCode as g}from"#cli/exit-code";import{parseArgs as w}from"#cli/parse-args";import{setSilentMode as y,setVerboseMode as v,symbols as r,ui as o}from"#cli/ui";const u="docs-cache",j=`
2
+ Usage: ${u} <command> [options]
3
3
 
4
4
  Commands:
5
5
  add Add sources to the config (supports github:org/repo#ref)
6
6
  remove Remove sources from the config and targets
7
+ pin Pin source refs to current commits
8
+ update Refresh selected sources and lock data
7
9
  sync Synchronize cache with config
8
10
  status Show cache status
9
11
  clean Remove project cache
@@ -15,6 +17,7 @@ Commands:
15
17
  Global options:
16
18
  --config <path>
17
19
  --cache-dir <path>
20
+ --frozen
18
21
  --offline
19
22
  --fail-on-miss
20
23
  --lock-only
@@ -29,15 +32,25 @@ Add options:
29
32
  --target <dir>
30
33
  --target-dir <path>
31
34
  --id <id>
32
- `,f=()=>{r.stdout.write(j.trimStart())},m=i=>{r.stderr.write(`${a.error} ${i}
33
- `)},O=async i=>{const e=i.options,{addSources:s}=await import("#commands/add"),{runSync:n}=await import("#commands/sync");if(i.entries.length===0)throw new Error("Usage: docs-cache add [--source <repo> --target <dir>] <repo...>");const t=await s({configPath:e.config,entries:i.entries});if(e.offline?e.json||o.line(`${a.warn} Offline: skipped sync`):await n({configPath:e.config,cacheDirOverride:e.cacheDir,json:e.json,lockOnly:e.lockOnly,offline:e.offline,failOnMiss:e.failOnMiss,sourceFilter:t.sources.map(c=>c.id),timeoutMs:e.timeoutMs,verbose:e.verbose}),e.json){r.stdout.write(`${JSON.stringify(t,null,2)}
34
- `);return}for(const c of t.sources){const g=c.repo.replace(/^https?:\/\//,"").replace(/\.git$/,""),$=c.targetDir?` ${d.dim("->")} ${d.magenta(c.targetDir)}`:"";o.item(a.success,c.id,`${d.blue(g)}${$}`)}t.skipped?.length&&o.line(`${a.warn} Skipped ${t.skipped.length} existing source${t.skipped.length===1?"":"s"}: ${t.skipped.join(", ")}`),o.line(`${a.info} Updated ${d.gray(p.relative(r.cwd(),t.configPath)||"docs.config.json")}`),t.gitignoreUpdated&&t.gitignorePath&&o.line(`${a.info} Updated ${d.gray(o.path(t.gitignorePath))}`)},D=async i=>{const e=i.options,{removeSources:s}=await import("#commands/remove"),{pruneCache:n}=await import("#commands/prune");if(i.ids.length===0)throw new Error("Usage: docs-cache remove <id...>");const t=await s({configPath:e.config,ids:i.ids});if(e.json){r.stdout.write(`${JSON.stringify(t,null,2)}
35
- `);return}if(t.removed.length>0&&o.line(`${a.success} Removed ${t.removed.length} source${t.removed.length===1?"":"s"}: ${t.removed.join(", ")}`),t.missing.length>0&&o.line(`${a.warn} Missing ${t.missing.length} source${t.missing.length===1?"":"s"}: ${t.missing.join(", ")}`),t.targetsRemoved.length>0){const c=t.targetsRemoved.map(g=>`${g.id} -> ${o.path(g.targetDir)}`).join(", ");o.line(`${a.success} Removed ${t.targetsRemoved.length} target${t.targetsRemoved.length===1?"":"s"}: ${c}`)}o.line(`${a.info} Updated ${d.gray(p.relative(r.cwd(),t.configPath)||"docs.config.json")}`),e.prune&&await n({configPath:e.config,cacheDirOverride:e.cacheDir,json:e.json})},S=async i=>{const e=i.options,{getStatus:s,printStatus:n}=await import("#commands/status"),t=await s({configPath:e.config,cacheDirOverride:e.cacheDir,json:e.json});if(e.json){r.stdout.write(`${JSON.stringify(t,null,2)}
36
- `);return}n(t)},P=async i=>{const e=i.options,{cleanCache:s}=await import("#commands/clean"),n=await s({configPath:e.config,cacheDirOverride:e.cacheDir,json:e.json});if(e.json){r.stdout.write(`${JSON.stringify(n,null,2)}
37
- `);return}if(n.removed){o.line(`${a.success} Removed cache at ${o.path(n.cacheDir)}`);return}o.line(`${a.info} Cache already missing at ${o.path(n.cacheDir)}`)},C=async i=>{const e=i.options,{cleanGitCache:s}=await import("#commands/clean-git-cache"),n=await s();if(e.json){r.stdout.write(`${JSON.stringify(n,null,2)}
38
- `);return}if(!n.removed){o.line(`${a.info} Global git cache already empty at ${o.path(n.cacheDir)}`);return}const t=n.bytesFreed!==void 0?`${(n.bytesFreed/1024/1024).toFixed(2)} MB`:"unknown size",c=n.repoCount!==void 0?` (${n.repoCount} cached repositor${n.repoCount===1?"y":"ies"})`:"";o.line(`${a.success} Cleared global git cache${c}: ${t} freed`),o.line(`${a.info} Cache location: ${o.path(n.cacheDir)}`)},b=async i=>{const e=i.options,{pruneCache:s}=await import("#commands/prune"),n=await s({configPath:e.config,cacheDirOverride:e.cacheDir,json:e.json});if(e.json){r.stdout.write(`${JSON.stringify(n,null,2)}
39
- `);return}if(n.removed.length===0){o.line(`${a.info} No cache entries to prune.`);return}o.line(`${a.success} Pruned ${n.removed.length} cache entr${n.removed.length===1?"y":"ies"}: ${n.removed.join(", ")}`)},k=async i=>{const e=i.options,{printSyncPlan:s,runSync:n}=await import("#commands/sync"),t=await n({configPath:e.config,cacheDirOverride:e.cacheDir,json:e.json,lockOnly:e.lockOnly,offline:e.offline,failOnMiss:e.failOnMiss,timeoutMs:e.timeoutMs,verbose:e.verbose});if(e.json){r.stdout.write(`${JSON.stringify(t,null,2)}
40
- `);return}s(t)},M=async i=>{const e=i.options,{printVerify:s,verifyCache:n}=await import("#commands/verify"),t=await n({configPath:e.config,cacheDirOverride:e.cacheDir,json:e.json});e.json?r.stdout.write(`${JSON.stringify(t,null,2)}
41
- `):s(t),t.results.some(c=>!c.ok)&&r.exit(l.FatalError)},x=async i=>{const e=i.options,{initConfig:s}=await import("#commands/init");if(e.config)throw new Error("Init does not accept --config. Use the project root.");const n=await s({cacheDirOverride:e.cacheDir,json:e.json});if(e.json){r.stdout.write(`${JSON.stringify(n,null,2)}
42
- `);return}o.line(`${a.success} Wrote ${d.gray(o.path(n.configPath))}`),n.gitignoreUpdated&&n.gitignorePath&&o.line(`${a.info} Updated ${d.gray(o.path(n.gitignorePath))}`)},N=async i=>{switch(i.command){case"add":await O(i);return;case"remove":await D(i);return;case"status":await S(i);return;case"clean":await P(i);return;case"clean-cache":await C(i);return;case"prune":await b(i);return;case"sync":await k(i);return;case"verify":await M(i);return;case"init":await x(i);return;default:o.line(`${h} ${i.command}: not implemented yet.`)}};async function R(){try{r.on("uncaughtException",u),r.on("unhandledRejection",u);const i=w();y(i.options.silent),v(i.options.verbose),i.help&&(f(),r.exit(l.Success)),i.command||(f(),r.exit(l.InvalidArgument)),i.command!=="add"&&i.command!=="remove"&&i.positionals.length>0&&(m(`${h}: unexpected arguments.`),f(),r.exit(l.InvalidArgument)),await N(i.parsed)}catch(i){u(i)}}function u(i){const e=i instanceof Error?i.message:String(i);m(e),r.exit(l.FatalError)}export{h as CLI_NAME,R as main};
35
+
36
+ Pin options:
37
+ --all
38
+ --dry-run
39
+
40
+ Update options:
41
+ --all
42
+ --dry-run
43
+ `,h=()=>{a.stdout.write(j.trimStart())},m=i=>{a.stderr.write(`${r.error} ${i}
44
+ `)},O=async i=>{const e=i.options,{addSources:s}=await import("#commands/add"),{runSync:t}=await import("#commands/sync");if(i.entries.length===0)throw new Error("Usage: docs-cache add [--source <repo> --target <dir>] <repo...>");const n=await s({configPath:e.config,entries:i.entries});if(e.offline?e.json||o.line(`${r.warn} Offline: skipped sync`):await t({configPath:e.config,cacheDirOverride:e.cacheDir,json:e.json,lockOnly:e.lockOnly,offline:e.offline,failOnMiss:e.failOnMiss,sourceFilter:n.sources.map(c=>c.id),timeoutMs:e.timeoutMs,verbose:e.verbose}),e.json){a.stdout.write(`${JSON.stringify(n,null,2)}
45
+ `);return}for(const c of n.sources){const f=c.repo.replace(/^https?:\/\//,"").replace(/\.git$/,""),$=c.targetDir?` ${d.dim("->")} ${d.magenta(c.targetDir)}`:"";o.item(r.success,c.id,`${d.blue(f)}${$}`)}n.skipped?.length&&o.line(`${r.warn} Skipped ${n.skipped.length} existing source${n.skipped.length===1?"":"s"}: ${n.skipped.join(", ")}`),o.line(`${r.info} Updated ${d.gray(l.relative(a.cwd(),n.configPath)||"docs.config.json")}`),n.gitignoreUpdated&&n.gitignorePath&&o.line(`${r.info} Updated ${d.gray(o.path(n.gitignorePath))}`)},S=async i=>{const e=i.options,{removeSources:s}=await import("#commands/remove"),{pruneCache:t}=await import("#commands/prune");if(i.ids.length===0)throw new Error("Usage: docs-cache remove <id...>");const n=await s({configPath:e.config,ids:i.ids});if(e.json){a.stdout.write(`${JSON.stringify(n,null,2)}
46
+ `);return}if(n.removed.length>0&&o.line(`${r.success} Removed ${n.removed.length} source${n.removed.length===1?"":"s"}: ${n.removed.join(", ")}`),n.missing.length>0&&o.line(`${r.warn} Missing ${n.missing.length} source${n.missing.length===1?"":"s"}: ${n.missing.join(", ")}`),n.targetsRemoved.length>0){const c=n.targetsRemoved.map(f=>`${f.id} -> ${o.path(f.targetDir)}`).join(", ");o.line(`${r.success} Removed ${n.targetsRemoved.length} target${n.targetsRemoved.length===1?"":"s"}: ${c}`)}o.line(`${r.info} Updated ${d.gray(l.relative(a.cwd(),n.configPath)||"docs.config.json")}`),e.prune&&await t({configPath:e.config,cacheDirOverride:e.cacheDir,json:e.json})},D=async i=>{const e=i.options;if(e.offline)throw new Error("Pin does not support --offline.");if(!e.all&&i.ids.length===0)throw new Error("Usage: docs-cache pin <id...> [--all]");const{pinSources:s}=await import("#commands/pin"),t=await s({configPath:e.config,ids:i.ids,all:e.all,dryRun:e.dryRun,timeoutMs:e.timeoutMs});if(e.json){a.stdout.write(`${JSON.stringify(t,null,2)}
47
+ `);return}for(const n of t.updated)o.item(r.success,n.id,`${n.fromRef} -> ${n.toRef}`);for(const n of t.unchanged)o.item(r.info,n,"already pinned");if(t.missing.length>0&&o.line(`${r.warn} Missing ${t.missing.length} source${t.missing.length===1?"":"s"}: ${t.missing.join(", ")}`),t.dryRun){o.line(`${r.info} Dry run: no changes written to ${d.gray(l.relative(a.cwd(),t.configPath)||"docs.config.json")}`);return}o.line(`${r.info} Updated ${d.gray(l.relative(a.cwd(),t.configPath)||"docs.config.json")}`)},P=async i=>{const e=i.options;if(e.offline)throw new Error("Update does not support --offline.");if(!e.all&&i.ids.length===0)throw new Error("Usage: docs-cache update <id...> [--all]");const{printSyncPlan:s}=await import("#commands/sync"),{updateSources:t}=await import("#commands/update"),n=await t({configPath:e.config,cacheDirOverride:e.cacheDir,ids:i.ids,all:e.all,dryRun:e.dryRun,json:e.json,lockOnly:e.lockOnly,failOnMiss:e.failOnMiss,timeoutMs:e.timeoutMs,verbose:e.verbose,concurrency:e.concurrency,frozen:e.frozen});if(e.json){a.stdout.write(`${JSON.stringify(n,null,2)}
48
+ `);return}s(n.plan),n.missing.length>0&&o.line(`${r.warn} Missing ${n.missing.length} source${n.missing.length===1?"":"s"}: ${n.missing.join(", ")}`),n.dryRun&&o.line(`${r.info} Dry run: no changes written to ${d.gray(l.relative(a.cwd(),n.plan.configPath)||"docs.config.json")}`)},M=async i=>{const e=i.options,{getStatus:s,printStatus:t}=await import("#commands/status"),n=await s({configPath:e.config,cacheDirOverride:e.cacheDir,json:e.json});if(e.json){a.stdout.write(`${JSON.stringify(n,null,2)}
49
+ `);return}t(n)},R=async i=>{const e=i.options,{cleanCache:s}=await import("#commands/clean"),t=await s({configPath:e.config,cacheDirOverride:e.cacheDir,json:e.json});if(e.json){a.stdout.write(`${JSON.stringify(t,null,2)}
50
+ `);return}if(t.removed){o.line(`${r.success} Removed cache at ${o.path(t.cacheDir)}`);return}o.line(`${r.info} Cache already missing at ${o.path(t.cacheDir)}`)},b=async i=>{const e=i.options,{cleanGitCache:s}=await import("#commands/clean-git-cache"),t=await s();if(e.json){a.stdout.write(`${JSON.stringify(t,null,2)}
51
+ `);return}if(!t.removed){o.line(`${r.info} Global git cache already empty at ${o.path(t.cacheDir)}`);return}const n=t.bytesFreed!==void 0?`${(t.bytesFreed/1024/1024).toFixed(2)} MB`:"unknown size",c=t.repoCount!==void 0?` (${t.repoCount} cached repositor${t.repoCount===1?"y":"ies"})`:"";o.line(`${r.success} Cleared global git cache${c}: ${n} freed`),o.line(`${r.info} Cache location: ${o.path(t.cacheDir)}`)},C=async i=>{const e=i.options,{pruneCache:s}=await import("#commands/prune"),t=await s({configPath:e.config,cacheDirOverride:e.cacheDir,json:e.json});if(e.json){a.stdout.write(`${JSON.stringify(t,null,2)}
52
+ `);return}if(t.removed.length===0){o.line(`${r.info} No cache entries to prune.`);return}o.line(`${r.success} Pruned ${t.removed.length} cache entr${t.removed.length===1?"y":"ies"}: ${t.removed.join(", ")}`)},k=async i=>{const e=i.options,{printSyncPlan:s,runSync:t}=await import("#commands/sync"),n=i.ids.length>0?i.ids:void 0,c=await t({configPath:e.config,cacheDirOverride:e.cacheDir,json:e.json,lockOnly:e.lockOnly,offline:e.offline,failOnMiss:e.failOnMiss,frozen:e.frozen,sourceFilter:n,timeoutMs:e.timeoutMs,verbose:e.verbose});if(e.json){a.stdout.write(`${JSON.stringify(c,null,2)}
53
+ `);return}s(c)},U=async i=>{const e=i.options,{printVerify:s,verifyCache:t}=await import("#commands/verify"),n=await t({configPath:e.config,cacheDirOverride:e.cacheDir,json:e.json});e.json?a.stdout.write(`${JSON.stringify(n,null,2)}
54
+ `):s(n),n.results.some(c=>!c.ok)&&a.exit(g.FatalError)},E=async i=>{const e=i.options,{initConfig:s}=await import("#commands/init");if(e.config)throw new Error("Init does not accept --config. Use the project root.");const t=await s({cacheDirOverride:e.cacheDir,json:e.json});if(e.json){a.stdout.write(`${JSON.stringify(t,null,2)}
55
+ `);return}o.line(`${r.success} Wrote ${d.gray(o.path(t.configPath))}`),t.gitignoreUpdated&&t.gitignorePath&&o.line(`${r.info} Updated ${d.gray(o.path(t.gitignorePath))}`)},N=async i=>{switch(i.command){case"add":await O(i);return;case"remove":await S(i);return;case"pin":await D(i);return;case"update":await P(i);return;case"status":await M(i);return;case"clean":await R(i);return;case"clean-cache":await b(i);return;case"prune":await C(i);return;case"sync":await k(i);return;case"verify":await U(i);return;case"init":await E(i);return;default:o.line(`${u} ${i.command}: not implemented yet.`)}};async function x(){try{a.on("uncaughtException",p),a.on("unhandledRejection",p);const i=w();y(i.options.silent),v(i.options.verbose),i.help&&(h(),a.exit(g.Success)),i.command||(h(),a.exit(g.InvalidArgument)),i.command!=="add"&&i.command!=="remove"&&i.command!=="pin"&&i.command!=="update"&&i.command!=="sync"&&i.positionals.length>0&&(m(`${u}: unexpected arguments.`),h(),a.exit(g.InvalidArgument)),await N(i.parsed)}catch(i){p(i)}}function p(i){const e=i instanceof Error?i.message:String(i);m(e),a.exit(g.FatalError)}export{u as CLI_NAME,x as main};
43
56
  //# sourceMappingURL=cli.mjs.map
package/dist/esm/api.d.ts CHANGED
@@ -4,9 +4,11 @@ export { parseArgs } from "#cli/parse-args";
4
4
  export { cleanCache } from "#commands/clean";
5
5
  export { cleanGitCache } from "#commands/clean-git-cache";
6
6
  export { initConfig } from "#commands/init";
7
+ export { pinSources } from "#commands/pin";
7
8
  export { pruneCache } from "#commands/prune";
8
9
  export { removeSources } from "#commands/remove";
9
10
  export { printSyncPlan, runSync } from "#commands/sync";
11
+ export { updateSources } from "#commands/update";
10
12
  export { verifyCache } from "#commands/verify";
11
13
  export { loadConfig } from "#config";
12
14
  export { redactRepoUrl } from "#git/redact";
package/dist/esm/api.mjs CHANGED
@@ -4,9 +4,11 @@ export { parseArgs } from "#cli/parse-args";
4
4
  export { cleanCache } from "#commands/clean";
5
5
  export { cleanGitCache } from "#commands/clean-git-cache";
6
6
  export { initConfig } from "#commands/init";
7
+ export { pinSources } from "#commands/pin";
7
8
  export { pruneCache } from "#commands/prune";
8
9
  export { removeSources } from "#commands/remove";
9
10
  export { printSyncPlan, runSync } from "#commands/sync";
11
+ export { updateSources } from "#commands/update";
10
12
  export { verifyCache } from "#commands/verify";
11
13
  export { loadConfig } from "#config";
12
14
  export { redactRepoUrl } from "#git/redact";
@@ -11,6 +11,8 @@ Usage: ${CLI_NAME} <command> [options]
11
11
  Commands:
12
12
  add Add sources to the config (supports github:org/repo#ref)
13
13
  remove Remove sources from the config and targets
14
+ pin Pin source refs to current commits
15
+ update Refresh selected sources and lock data
14
16
  sync Synchronize cache with config
15
17
  status Show cache status
16
18
  clean Remove project cache
@@ -22,6 +24,7 @@ Commands:
22
24
  Global options:
23
25
  --config <path>
24
26
  --cache-dir <path>
27
+ --frozen
25
28
  --offline
26
29
  --fail-on-miss
27
30
  --lock-only
@@ -36,6 +39,14 @@ Add options:
36
39
  --target <dir>
37
40
  --target-dir <path>
38
41
  --id <id>
42
+
43
+ Pin options:
44
+ --all
45
+ --dry-run
46
+
47
+ Update options:
48
+ --all
49
+ --dry-run
39
50
  `;
40
51
  const printHelp = () => {
41
52
  process.stdout.write(HELP_TEXT.trimStart());
@@ -141,6 +152,89 @@ const runRemove = async (parsed) => {
141
152
  });
142
153
  }
143
154
  };
155
+ const runPin = async (parsed) => {
156
+ const options = parsed.options;
157
+ if (options.offline) {
158
+ throw new Error("Pin does not support --offline.");
159
+ }
160
+ if (!options.all && parsed.ids.length === 0) {
161
+ throw new Error("Usage: docs-cache pin <id...> [--all]");
162
+ }
163
+ const { pinSources } = await import("#commands/pin");
164
+ const result = await pinSources({
165
+ configPath: options.config,
166
+ ids: parsed.ids,
167
+ all: options.all,
168
+ dryRun: options.dryRun,
169
+ timeoutMs: options.timeoutMs
170
+ });
171
+ if (options.json) {
172
+ process.stdout.write(`${JSON.stringify(result, null, 2)}
173
+ `);
174
+ return;
175
+ }
176
+ for (const entry of result.updated) {
177
+ ui.item(symbols.success, entry.id, `${entry.fromRef} -> ${entry.toRef}`);
178
+ }
179
+ for (const id of result.unchanged) {
180
+ ui.item(symbols.info, id, "already pinned");
181
+ }
182
+ if (result.missing.length > 0) {
183
+ ui.line(
184
+ `${symbols.warn} Missing ${result.missing.length} source${result.missing.length === 1 ? "" : "s"}: ${result.missing.join(", ")}`
185
+ );
186
+ }
187
+ if (result.dryRun) {
188
+ ui.line(
189
+ `${symbols.info} Dry run: no changes written to ${pc.gray(path.relative(process.cwd(), result.configPath) || "docs.config.json")}`
190
+ );
191
+ return;
192
+ }
193
+ ui.line(
194
+ `${symbols.info} Updated ${pc.gray(path.relative(process.cwd(), result.configPath) || "docs.config.json")}`
195
+ );
196
+ };
197
+ const runUpdate = async (parsed) => {
198
+ const options = parsed.options;
199
+ if (options.offline) {
200
+ throw new Error("Update does not support --offline.");
201
+ }
202
+ if (!options.all && parsed.ids.length === 0) {
203
+ throw new Error("Usage: docs-cache update <id...> [--all]");
204
+ }
205
+ const { printSyncPlan } = await import("#commands/sync");
206
+ const { updateSources } = await import("#commands/update");
207
+ const result = await updateSources({
208
+ configPath: options.config,
209
+ cacheDirOverride: options.cacheDir,
210
+ ids: parsed.ids,
211
+ all: options.all,
212
+ dryRun: options.dryRun,
213
+ json: options.json,
214
+ lockOnly: options.lockOnly,
215
+ failOnMiss: options.failOnMiss,
216
+ timeoutMs: options.timeoutMs,
217
+ verbose: options.verbose,
218
+ concurrency: options.concurrency,
219
+ frozen: options.frozen
220
+ });
221
+ if (options.json) {
222
+ process.stdout.write(`${JSON.stringify(result, null, 2)}
223
+ `);
224
+ return;
225
+ }
226
+ printSyncPlan(result.plan);
227
+ if (result.missing.length > 0) {
228
+ ui.line(
229
+ `${symbols.warn} Missing ${result.missing.length} source${result.missing.length === 1 ? "" : "s"}: ${result.missing.join(", ")}`
230
+ );
231
+ }
232
+ if (result.dryRun) {
233
+ ui.line(
234
+ `${symbols.info} Dry run: no changes written to ${pc.gray(path.relative(process.cwd(), result.plan.configPath) || "docs.config.json")}`
235
+ );
236
+ }
237
+ };
144
238
  const runStatus = async (parsed) => {
145
239
  const options = parsed.options;
146
240
  const { getStatus, printStatus } = await import("#commands/status");
@@ -223,6 +317,7 @@ const runPrune = async (parsed) => {
223
317
  const runSyncCommand = async (parsed) => {
224
318
  const options = parsed.options;
225
319
  const { printSyncPlan, runSync } = await import("#commands/sync");
320
+ const sourceFilter = parsed.ids.length > 0 ? parsed.ids : void 0;
226
321
  const plan = await runSync({
227
322
  configPath: options.config,
228
323
  cacheDirOverride: options.cacheDir,
@@ -230,6 +325,8 @@ const runSyncCommand = async (parsed) => {
230
325
  lockOnly: options.lockOnly,
231
326
  offline: options.offline,
232
327
  failOnMiss: options.failOnMiss,
328
+ frozen: options.frozen,
329
+ sourceFilter,
233
330
  timeoutMs: options.timeoutMs,
234
331
  verbose: options.verbose
235
332
  });
@@ -288,6 +385,12 @@ const runCommand = async (parsed) => {
288
385
  case "remove":
289
386
  await runRemove(parsed);
290
387
  return;
388
+ case "pin":
389
+ await runPin(parsed);
390
+ return;
391
+ case "update":
392
+ await runUpdate(parsed);
393
+ return;
291
394
  case "status":
292
395
  await runStatus(parsed);
293
396
  return;
@@ -328,7 +431,7 @@ export async function main() {
328
431
  printHelp();
329
432
  process.exit(ExitCode.InvalidArgument);
330
433
  }
331
- if (parsed.command !== "add" && parsed.command !== "remove" && parsed.positionals.length > 0) {
434
+ if (parsed.command !== "add" && parsed.command !== "remove" && parsed.command !== "pin" && parsed.command !== "update" && parsed.command !== "sync" && parsed.positionals.length > 0) {
332
435
  printError(`${CLI_NAME}: unexpected arguments.`);
333
436
  printHelp();
334
437
  process.exit(ExitCode.InvalidArgument);
@@ -1,5 +1,5 @@
1
1
  import type { CliCommand, CliOptions } from "./types";
2
- declare const COMMANDS: readonly ["add", "remove", "sync", "status", "clean", "clean-cache", "prune", "verify", "init"];
2
+ declare const COMMANDS: readonly ["add", "remove", "pin", "update", "sync", "status", "clean", "clean-cache", "prune", "verify", "init"];
3
3
  type Command = (typeof COMMANDS)[number];
4
4
  export type ParsedArgs = {
5
5
  command: Command | null;
@@ -4,6 +4,8 @@ import { ExitCode } from "#cli/exit-code";
4
4
  const COMMANDS = [
5
5
  "add",
6
6
  "remove",
7
+ "pin",
8
+ "update",
7
9
  "sync",
8
10
  "status",
9
11
  "clean",
@@ -18,6 +20,7 @@ const ADD_ONLY_OPTIONS = /* @__PURE__ */ new Set([
18
20
  "--target-dir",
19
21
  "--id"
20
22
  ]);
23
+ const SCOPED_SOURCE_OPTIONS = /* @__PURE__ */ new Set(["--all", "--dry-run"]);
21
24
  const POSITIONAL_SKIP_OPTIONS = /* @__PURE__ */ new Set([
22
25
  "--config",
23
26
  "--cache-dir",
@@ -168,16 +171,40 @@ const parsePositionals = (rawArgs) => {
168
171
  };
169
172
  const assertAddOnlyOptions = (command, rawArgs) => {
170
173
  if (command === "add") {
174
+ for (const arg of rawArgs) {
175
+ const [flag] = arg.split("=");
176
+ if (SCOPED_SOURCE_OPTIONS.has(flag)) {
177
+ throw new Error(`${arg} is only valid for pin or update.`);
178
+ }
179
+ }
180
+ return;
181
+ }
182
+ if (command === "pin" || command === "update") {
183
+ for (const arg of rawArgs) {
184
+ if (ADD_ONLY_OPTIONS.has(arg)) {
185
+ throw new Error(`${arg} is only valid for add.`);
186
+ }
187
+ if (!arg.startsWith("--")) {
188
+ continue;
189
+ }
190
+ const [flag] = arg.split("=");
191
+ if (ADD_ONLY_OPTIONS_WITH_VALUES.has(flag)) {
192
+ throw new Error(`${flag} is only valid for add.`);
193
+ }
194
+ }
171
195
  return;
172
196
  }
173
197
  for (const arg of rawArgs) {
198
+ const [flag] = arg.split("=");
199
+ if (SCOPED_SOURCE_OPTIONS.has(flag)) {
200
+ throw new Error(`${arg} is only valid for pin or update.`);
201
+ }
174
202
  if (ADD_ONLY_OPTIONS.has(arg)) {
175
203
  throw new Error(`${arg} is only valid for add.`);
176
204
  }
177
205
  if (!arg.startsWith("--")) {
178
206
  continue;
179
207
  }
180
- const [flag] = arg.split("=");
181
208
  if (ADD_ONLY_OPTIONS_WITH_VALUES.has(flag)) {
182
209
  throw new Error(`${flag} is only valid for add.`);
183
210
  }
@@ -191,6 +218,9 @@ const buildOptions = (result) => {
191
218
  failOnMiss: Boolean(result.options.failOnMiss),
192
219
  lockOnly: Boolean(result.options.lockOnly),
193
220
  prune: Boolean(result.options.prune),
221
+ all: Boolean(result.options.all),
222
+ dryRun: Boolean(result.options.dryRun),
223
+ frozen: Boolean(result.options.frozen),
194
224
  concurrency: result.options.concurrency ? Number(result.options.concurrency) : void 0,
195
225
  json: Boolean(result.options.json),
196
226
  timeoutMs: result.options.timeoutMs ? Number(result.options.timeoutMs) : void 0,
@@ -236,8 +266,12 @@ const buildParsedCommand = (command, options, positionals, addEntries) => {
236
266
  };
237
267
  case "remove":
238
268
  return { command: "remove", ids: positionals, options };
269
+ case "pin":
270
+ return { command: "pin", ids: positionals, options };
271
+ case "update":
272
+ return { command: "update", ids: positionals, options };
239
273
  case "sync":
240
- return { command: "sync", options };
274
+ return { command: "sync", ids: positionals, options };
241
275
  case "status":
242
276
  return { command: "status", options };
243
277
  case "clean":
@@ -257,10 +291,12 @@ const buildParsedCommand = (command, options, positionals, addEntries) => {
257
291
  export const parseArgs = (argv = process.argv) => {
258
292
  try {
259
293
  const cli = cac("docs-cache");
260
- cli.option("--config <path>", "Path to config file").option("--cache-dir <path>", "Override cache directory").option("--offline", "Disable network access").option("--fail-on-miss", "Fail when required sources are missing").option("--lock-only", "Update lock without materializing files").option("--prune", "Prune cache on remove").option("--concurrency <n>", "Concurrency limit").option("--json", "Output JSON").option("--timeout-ms <n>", "Network timeout in milliseconds").option("--silent", "Suppress non-error output").option("--verbose", "Enable verbose logging").help();
294
+ cli.option("--config <path>", "Path to config file").option("--cache-dir <path>", "Override cache directory").option("--all", "Apply command to all sources").option("--dry-run", "Preview changes without writing files").option("--frozen", "Fail if lock and resolved refs differ").option("--offline", "Disable network access").option("--fail-on-miss", "Fail when required sources are missing").option("--lock-only", "Update lock without materializing files").option("--prune", "Prune cache on remove").option("--concurrency <n>", "Concurrency limit").option("--json", "Output JSON").option("--timeout-ms <n>", "Network timeout in milliseconds").option("--silent", "Suppress non-error output").option("--verbose", "Enable verbose logging").help();
261
295
  cli.command("add [repo...]", "Add sources to the config").option("--source <repo>", "Source repo").option("--target <dir>", "Target directory for source").option("--target-dir <path>", "Target directory for source").option("--id <id>", "Source id");
262
296
  cli.command("remove <id...>", "Remove sources from the config and targets");
263
- cli.command("sync", "Synchronize cache with config");
297
+ cli.command("pin [id...]", "Pin source refs to current commit");
298
+ cli.command("update [id...]", "Refresh selected sources and lock data");
299
+ cli.command("sync [id...]", "Synchronize cache with config");
264
300
  cli.command("status", "Show cache status");
265
301
  cli.command("clean", "Remove project cache");
266
302
  cli.command("clean-cache", "Clear global git cache");
@@ -5,6 +5,9 @@ export type CliOptions = {
5
5
  failOnMiss: boolean;
6
6
  lockOnly: boolean;
7
7
  prune: boolean;
8
+ all: boolean;
9
+ dryRun: boolean;
10
+ frozen: boolean;
8
11
  concurrency?: number;
9
12
  json: boolean;
10
13
  timeoutMs?: number;
@@ -24,8 +27,17 @@ export type CliCommand = {
24
27
  command: "remove";
25
28
  ids: string[];
26
29
  options: CliOptions;
30
+ } | {
31
+ command: "pin";
32
+ ids: string[];
33
+ options: CliOptions;
34
+ } | {
35
+ command: "update";
36
+ ids: string[];
37
+ options: CliOptions;
27
38
  } | {
28
39
  command: "sync";
40
+ ids: string[];
29
41
  options: CliOptions;
30
42
  } | {
31
43
  command: "status";
@@ -0,0 +1,26 @@
1
+ import { resolveRemoteCommit } from "#git/resolve-remote";
2
+ type PinParams = {
3
+ configPath?: string;
4
+ ids: string[];
5
+ all: boolean;
6
+ dryRun?: boolean;
7
+ timeoutMs?: number;
8
+ };
9
+ type PinDeps = {
10
+ resolveRemoteCommit?: typeof resolveRemoteCommit;
11
+ };
12
+ type PinResultEntry = {
13
+ id: string;
14
+ fromRef: string;
15
+ toRef: string;
16
+ repo: string;
17
+ };
18
+ export declare const pinSources: (params: PinParams, deps?: PinDeps) => Promise<{
19
+ configPath: any;
20
+ dryRun: boolean;
21
+ pinned: PinResultEntry[];
22
+ updated: PinResultEntry[];
23
+ unchanged: string[];
24
+ missing: string[];
25
+ }>;
26
+ export {};
@@ -0,0 +1,111 @@
1
+ import { resolveSources } from "#config";
2
+ import {
3
+ mergeConfigBase,
4
+ readConfigAtPath,
5
+ resolveConfigTarget,
6
+ writeConfigFile
7
+ } from "#config/io";
8
+ import { resolveRemoteCommit } from "#git/resolve-remote";
9
+ const DEFAULT_ALLOW_HOSTS = ["github.com", "gitlab.com", "visualstudio.com"];
10
+ const PIN_RESOLVE_CONCURRENCY = 4;
11
+ const isPinnedCommitRef = (ref) => /^[0-9a-f]{40}$/i.test(ref.trim());
12
+ export const pinSources = async (params, deps = {}) => {
13
+ if (!params.all && params.ids.length === 0) {
14
+ throw new Error("Usage: docs-cache pin <id...> [--all]");
15
+ }
16
+ const target = await resolveConfigTarget(params.configPath);
17
+ const resolvedPath = target.resolvedPath;
18
+ const { config, rawConfig, rawPackage } = await readConfigAtPath(target);
19
+ const selectedIds = params.all ? new Set(config.sources.map((source) => source.id)) : new Set(params.ids);
20
+ const missing = params.all ? [] : params.ids.filter(
21
+ (id) => !config.sources.some((source) => source.id === id)
22
+ );
23
+ const resolvedSources = resolveSources(config);
24
+ const resolvedById = new Map(
25
+ resolvedSources.map((source) => [source.id, source])
26
+ );
27
+ const allowHosts = config.defaults?.allowHosts ?? DEFAULT_ALLOW_HOSTS;
28
+ const resolveCommit = deps.resolveRemoteCommit ?? resolveRemoteCommit;
29
+ const entriesById = /* @__PURE__ */ new Map();
30
+ const sourcesToProcess = config.sources.filter(
31
+ (source) => selectedIds.has(source.id)
32
+ );
33
+ const queue = [];
34
+ let cursor = 0;
35
+ const runNext = async () => {
36
+ const index = cursor;
37
+ cursor += 1;
38
+ const source = sourcesToProcess[index];
39
+ if (!source) {
40
+ return;
41
+ }
42
+ const resolved = resolvedById.get(source.id);
43
+ if (!resolved) {
44
+ return runNext();
45
+ }
46
+ const fromRef = source.ref ?? resolved.ref;
47
+ const trimmedFromRef = fromRef.trim();
48
+ if (isPinnedCommitRef(trimmedFromRef)) {
49
+ entriesById.set(source.id, {
50
+ id: source.id,
51
+ fromRef,
52
+ toRef: trimmedFromRef,
53
+ repo: resolved.repo
54
+ });
55
+ return runNext();
56
+ }
57
+ const remote = await resolveCommit({
58
+ repo: resolved.repo,
59
+ ref: resolved.ref,
60
+ allowHosts,
61
+ timeoutMs: params.timeoutMs
62
+ });
63
+ entriesById.set(source.id, {
64
+ id: source.id,
65
+ fromRef,
66
+ toRef: remote.resolvedCommit,
67
+ repo: remote.repo
68
+ });
69
+ return runNext();
70
+ };
71
+ for (let worker = 0; worker < Math.min(PIN_RESOLVE_CONCURRENCY, sourcesToProcess.length); worker += 1) {
72
+ queue.push(runNext());
73
+ }
74
+ await Promise.all(queue);
75
+ if (entriesById.size === 0) {
76
+ throw new Error("No matching sources found to pin.");
77
+ }
78
+ const nextSources = config.sources.map((source) => {
79
+ const pin = entriesById.get(source.id);
80
+ if (!pin) {
81
+ return source;
82
+ }
83
+ if (source.ref === pin.toRef) {
84
+ return source;
85
+ }
86
+ return {
87
+ ...source,
88
+ ref: pin.toRef
89
+ };
90
+ });
91
+ if (!params.dryRun) {
92
+ const nextConfig = mergeConfigBase(rawConfig ?? config, nextSources);
93
+ await writeConfigFile({
94
+ mode: target.mode,
95
+ resolvedPath,
96
+ config: nextConfig,
97
+ rawPackage
98
+ });
99
+ }
100
+ const pinned = Array.from(entriesById.values());
101
+ const updated = pinned.filter((entry) => entry.fromRef !== entry.toRef);
102
+ const unchanged = pinned.filter((entry) => entry.fromRef === entry.toRef).map((entry) => entry.id);
103
+ return {
104
+ configPath: resolvedPath,
105
+ dryRun: Boolean(params.dryRun),
106
+ pinned,
107
+ updated,
108
+ unchanged,
109
+ missing
110
+ };
111
+ };
@@ -188,7 +188,17 @@ const buildLockSource = (result, prior, now) => ({
188
188
  const buildLock = async (plan, previous) => {
189
189
  const toolVersion = await loadToolVersion();
190
190
  const now = (/* @__PURE__ */ new Date()).toISOString();
191
- const sources = { ...previous?.sources ?? {} };
191
+ const configSourceIds = new Set(
192
+ plan.config.sources.map((source) => source.id)
193
+ );
194
+ const sources = {};
195
+ if (previous?.sources) {
196
+ for (const [id, source] of Object.entries(previous.sources)) {
197
+ if (configSourceIds.has(id)) {
198
+ sources[id] = source;
199
+ }
200
+ }
201
+ }
192
202
  for (const result of plan.results) {
193
203
  const prior = sources[result.id];
194
204
  sources[result.id] = buildLockSource(result, prior, now);
@@ -646,6 +656,18 @@ export const runSync = async (options, deps = {}) => {
646
656
  `Missing required source(s): ${requiredMissing.map((result) => result.id).join(", ")}.`
647
657
  );
648
658
  }
659
+ if (options.frozen) {
660
+ const drifted = plan.results.filter(
661
+ (result) => result.status !== "up-to-date"
662
+ );
663
+ if (drifted.length > 0) {
664
+ throw new Error(
665
+ `Frozen sync failed: lock is out of date for source(s): ${drifted.map((result) => result.id).join(
666
+ ", "
667
+ )}. Run docs-cache update or docs-cache sync to refresh the lock.`
668
+ );
669
+ }
670
+ }
649
671
  if (!options.lockOnly) {
650
672
  const defaults = plan.defaults;
651
673
  const runFetch = deps.fetchSource ?? fetchSource;
@@ -0,0 +1,28 @@
1
+ import type { materializeSource } from "#cache/materialize";
2
+ import type { fetchSource } from "#git/fetch-source";
3
+ import type { resolveRemoteCommit } from "#git/resolve-remote";
4
+ type UpdateOptions = {
5
+ configPath?: string;
6
+ cacheDirOverride?: string;
7
+ ids: string[];
8
+ all: boolean;
9
+ dryRun: boolean;
10
+ json: boolean;
11
+ lockOnly: boolean;
12
+ failOnMiss: boolean;
13
+ timeoutMs?: number;
14
+ verbose?: boolean;
15
+ concurrency?: number;
16
+ frozen?: boolean;
17
+ };
18
+ type UpdateDeps = {
19
+ resolveRemoteCommit?: typeof resolveRemoteCommit;
20
+ fetchSource?: typeof fetchSource;
21
+ materializeSource?: typeof materializeSource;
22
+ };
23
+ export declare const updateSources: (options: UpdateOptions, deps?: UpdateDeps) => Promise<{
24
+ dryRun: boolean;
25
+ missing: string[];
26
+ plan: any;
27
+ }>;
28
+ export {};
@@ -0,0 +1,51 @@
1
+ import { getSyncPlan, runSync } from "#commands/sync";
2
+ import { loadConfig } from "#config";
3
+ const resolveSelectedSourceIds = async (options) => {
4
+ const { sources } = await loadConfig(options.configPath);
5
+ if (options.all) {
6
+ return {
7
+ selectedIds: sources.map((source) => source.id),
8
+ missing: []
9
+ };
10
+ }
11
+ const existing = new Set(sources.map((source) => source.id));
12
+ const selectedIds = options.ids.filter((id) => existing.has(id));
13
+ const missing = options.ids.filter((id) => !existing.has(id));
14
+ if (selectedIds.length === 0) {
15
+ throw new Error("No matching sources found to update.");
16
+ }
17
+ return { selectedIds, missing };
18
+ };
19
+ export const updateSources = async (options, deps = {}) => {
20
+ if (!options.all && options.ids.length === 0) {
21
+ throw new Error("Usage: docs-cache update <id...> [--all]");
22
+ }
23
+ const { selectedIds, missing } = await resolveSelectedSourceIds(options);
24
+ const syncOptions = {
25
+ configPath: options.configPath,
26
+ cacheDirOverride: options.cacheDirOverride,
27
+ json: options.json,
28
+ lockOnly: options.lockOnly,
29
+ offline: false,
30
+ failOnMiss: options.failOnMiss,
31
+ frozen: options.frozen,
32
+ verbose: options.verbose,
33
+ concurrency: options.concurrency,
34
+ sourceFilter: selectedIds,
35
+ timeoutMs: options.timeoutMs
36
+ };
37
+ if (options.dryRun) {
38
+ const plan2 = await getSyncPlan(syncOptions, deps);
39
+ return {
40
+ dryRun: true,
41
+ missing,
42
+ plan: plan2
43
+ };
44
+ }
45
+ const plan = await runSync(syncOptions, deps);
46
+ return {
47
+ dryRun: false,
48
+ missing,
49
+ plan
50
+ };
51
+ };
@@ -418,8 +418,24 @@ const handleMissingCache = async (params, cachePath, cacheExists) => {
418
418
  await cloneRepo(params, cachePath);
419
419
  return { usedCache: false, worktreeUsed: false };
420
420
  };
421
- const cloneOrUpdateRepo = async (params, outDir) => {
421
+ const cloneOrUpdateInFlight = /* @__PURE__ */ new Map();
422
+ const cloneOrUpdateRepo = (params, outDir) => {
422
423
  const cachePath = getPersistentCachePath(params.repo);
424
+ const inflight = cloneOrUpdateInFlight.get(cachePath);
425
+ if (inflight !== void 0) {
426
+ return inflight.then(() => cloneOrUpdateRepo(params, outDir));
427
+ }
428
+ const promise = (async () => {
429
+ try {
430
+ return await cloneOrUpdateRepoImpl(params, outDir, cachePath);
431
+ } finally {
432
+ cloneOrUpdateInFlight.delete(cachePath);
433
+ }
434
+ })();
435
+ cloneOrUpdateInFlight.set(cachePath, promise);
436
+ return promise;
437
+ };
438
+ const cloneOrUpdateRepoImpl = async (params, outDir, cachePath) => {
423
439
  const cacheExists = await exists(cachePath);
424
440
  const cacheValid = cacheExists && await isValidGitRepo(cachePath);
425
441
  const isCommitRef = /^[0-9a-f]{7,40}$/i.test(params.ref);
@@ -5,6 +5,7 @@ export type SyncOptions = {
5
5
  lockOnly: boolean;
6
6
  offline: boolean;
7
7
  failOnMiss: boolean;
8
+ frozen?: boolean;
8
9
  verbose?: boolean;
9
10
  concurrency?: number;
10
11
  sourceFilter?: string[];
package/package.json CHANGED
@@ -2,7 +2,7 @@
2
2
  "name": "docs-cache",
3
3
  "private": false,
4
4
  "type": "module",
5
- "version": "0.5.5",
5
+ "version": "0.5.7",
6
6
  "description": "CLI for deterministic local caching of external documentation for agents and tools",
7
7
  "author": "Frederik Bosch",
8
8
  "license": "MIT",