docs-cache 0.4.0 → 0.4.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # 🗃️ `docs-cache`
2
2
 
3
- Deterministic local caching of external documentation for agents and tools
3
+ Deterministic local caching of external documentation for agents and developers
4
4
 
5
5
  [![License](https://img.shields.io/badge/license-MIT-blue.svg)](LICENSE)
6
6
  [![npm version](https://img.shields.io/npm/v/docs-cache)](https://www.npmjs.com/package/docs-cache)
@@ -8,13 +8,13 @@ Deterministic local caching of external documentation for agents and tools
8
8
 
9
9
  ## Purpose
10
10
 
11
- Provides agents and automation tools with local access to external documentation without committing it to the repository.
11
+ Provides agents and developers with local access to external documentation without committing it to the repository.
12
12
 
13
13
  Documentation is cached in a gitignored location, exposed to agent and tool targets via links or copies, and updated through sync commands or postinstall hooks.
14
14
 
15
15
  ## Features
16
16
 
17
- - **Local only**: Cache lives in the directory `.docs` (or a custom location) and _should_ be gitignored.
17
+ - **Local only**: Cache lives in the directory `.docs` (or a custom location) and can be gitignored.
18
18
  - **Deterministic**: `docs-lock.json` pins commits and file metadata.
19
19
  - **Fast**: Local cache avoids network roundtrips after sync.
20
20
  - **Flexible**: Cache full repos or just the subdirectories you need.
@@ -54,20 +54,21 @@ npx docs-cache clean
54
54
 
55
55
  ## Configuration
56
56
 
57
- `docs.config.json` at project root (or `docs-cache` inside `package.json`):
57
+ `docs.config.json` at project root (or a `docs-cache` field in `package.json`):
58
58
 
59
- ```json
59
+ ```jsonc
60
60
  {
61
61
  "$schema": "https://github.com/fbosch/docs-cache/blob/master/docs.config.schema.json",
62
62
  "sources": [
63
63
  {
64
64
  "id": "framework",
65
65
  "repo": "https://github.com/framework/core.git",
66
- "ref": "main",
67
- "targetDir": "./agents/skills/framework-skill/references",
68
- "include": ["guide/**"]
69
- }
70
- ]
66
+ "ref": "main", // or specific commit hash
67
+ "targetDir": "./agents/skills/framework-skill/references", // symlink/copy target
68
+ "include": ["guide/**"], // file globs to include from the source
69
+ "toc": true, // defaults to "compressed" (for agents)
70
+ },
71
+ ],
71
72
  }
72
73
  ```
73
74
 
@@ -78,28 +79,30 @@ npx docs-cache clean
78
79
  | Field | Details | Required |
79
80
  | ---------- | -------------------------------------- | -------- |
80
81
  | `cacheDir` | Directory for cache. Default: `.docs`. | Optional |
81
- | `sources` | List of repositories to sync. | Required |
82
82
  | `defaults` | Default settings for all sources. | Optional |
83
+ | `sources` | List of repositories to sync. | Required |
83
84
 
84
85
  <details>
85
86
  <summary>Show default and source options</summary>
86
87
 
87
88
  ### Default options
88
89
 
89
- All fields in `defaults` apply to all sources unless overridden per-source.
90
-
91
- | Field | Details |
92
- | --------------------- | ---------------------------------------------------------------------------------------------------------------- |
93
- | `ref` | Branch, tag, or commit. Default: `"HEAD"`. |
94
- | `mode` | Cache mode. Default: `"materialize"`. |
95
- | `include` | Glob patterns to copy. Default: `["**/*.{md,mdx,markdown,mkd,txt,rst,adoc,asciidoc}"]`. |
96
- | `targetMode` | How to link or copy from the cache to the destination. Default: `"symlink"` on Unix, `"copy"` on Windows. |
97
- | `required` | Whether missing sources should fail. Default: `true`. |
98
- | `maxBytes` | Maximum total bytes to materialize. Default: `200000000` (200 MB). |
99
- | `maxFiles` | Maximum total files to materialize. |
100
- | `allowHosts` | Allowed Git hosts. Default: `["github.com", "gitlab.com"]`. |
101
- | `toc` | Generate per-source `TOC.md`. Default: `true`. Supports `true`, `false`, or a format (`"tree"`, `"compressed"`). |
102
- | `unwrapSingleRootDir` | If the materialized output is nested under a single directory, unwrap it (recursively). Default: `false`. |
90
+ These fields can be set in `defaults` and are inherited by every source unless overridden per-source.
91
+
92
+ | Field | Details |
93
+ | --------------------- | ------------------------------------------------------------------------------------------------------------------------------- |
94
+ | `ref` | Branch, tag, or commit. Default: `"HEAD"`. |
95
+ | `mode` | Cache mode. Default: `"materialize"`. |
96
+ | `include` | Glob patterns to copy. Default: `["**/*.{md,mdx,markdown,mkd,txt,rst,adoc,asciidoc}"]`. |
97
+ | `exclude` | Glob patterns to skip. Default: `[]`. |
98
+ | `targetMode` | How to link or copy from the cache to the destination. Default: `"symlink"` on Unix, `"copy"` on Windows. |
99
+ | `required` | Whether missing sources should fail. Default: `true`. |
100
+ | `maxBytes` | Maximum total bytes to materialize. Default: `200000000` (200 MB). |
101
+ | `maxFiles` | Maximum total files to materialize. |
102
+ | `ignoreHidden` | Skip hidden files and directories (dotfiles). Default: `false`. |
103
+ | `allowHosts` | Allowed Git hosts. Default: `["github.com", "gitlab.com", "visualstudio.com"]`. |
104
+ | `toc` | Generate per-source `TOC.md`. Default: `true`. Supports `true`, `false`, or a format: `"tree"` (human readable), `"compressed"` |
105
+ | `unwrapSingleRootDir` | If the materialized output is nested under a single directory, unwrap it (recursively). Default: `false`. |
103
106
 
104
107
  ### Source options
105
108
 
@@ -110,22 +113,13 @@ All fields in `defaults` apply to all sources unless overridden per-source.
110
113
  | `repo` | Git URL. |
111
114
  | `id` | Unique identifier for the source. |
112
115
 
113
- #### Optional
114
-
115
- | Field | Details |
116
- | --------------------- | ----------------------------------------------------------------------------------------------- |
117
- | `ref` | Branch, tag, or commit. |
118
- | `include` | Glob patterns to copy. |
119
- | `exclude` | Glob patterns to skip. |
120
- | `targetDir` | Path where files should be symlinked/copied to, outside `.docs`. |
121
- | `targetMode` | How to link or copy from the cache to the destination. |
122
- | `required` | Whether missing sources should fail. |
123
- | `maxBytes` | Maximum total bytes to materialize. |
124
- | `maxFiles` | Maximum total files to materialize. |
125
- | `toc` | Generate per-source `TOC.md`. Supports `true`, `false`, or a format (`"tree"`, `"compressed"`). |
126
- | `unwrapSingleRootDir` | If the materialized output is nested under a single directory, unwrap it (recursively). |
127
-
128
- > **Note**: Sources are always downloaded to `.docs/<id>/`. If you provide a `targetDir`, `docs-cache` will create a symlink or copy pointing from the cache to that target directory. The target should be outside `.docs`. Git operation timeout is configured via the `--timeout-ms` CLI flag, not as a per-source configuration option.
116
+ #### Optional (source-only)
117
+
118
+ | Field | Details |
119
+ | ----------- | ---------------------------------------------------------------- |
120
+ | `targetDir` | Path where files should be symlinked/copied to, outside `.docs`. |
121
+
122
+ > **Note**: Sources are always downloaded to `.docs/<id>/`. If you provide a `targetDir`; `docs-cache` will create a symlink or copy pointing from the cache to that target directory.
129
123
 
130
124
  </details>
131
125
 
@@ -1,3 +1,3 @@
1
- import{readFile as D,writeFile as k,access as E}from"node:fs/promises";import l from"node:path";import{v,D as P,a as A,s as F,w as I,b as U,r as S}from"../shared/docs-cache.DpZFrqUt.mjs";import{e as N}from"../shared/docs-cache.DH8jN6rl.mjs";import{r as O}from"../shared/docs-cache.Oi01HUbh.mjs";import{r as x}from"../shared/docs-cache.BSvQNKuf.mjs";import"zod";import"node:process";import"cac";import"picocolors";const u=async o=>{try{return await E(o),!0}catch{return!1}},y="package.json",C=async o=>{const i=await D(o,"utf8"),e=JSON.parse(i),a=e["docs-cache"];return a?{parsed:e,config:v(a)}:{parsed:e,config:null}},J=async o=>{if(o){const a=S(o);return{resolvedPath:a,mode:l.basename(a)===y?"package":"config"}}const i=S();if(await u(i))return{resolvedPath:i,mode:"config"};const e=l.resolve(process.cwd(),y);return await u(e)&&(await C(e)).config?{resolvedPath:e,mode:"package"}:{resolvedPath:i,mode:"config"}},T=async o=>{const i=await J(o.configPath),e=i.resolvedPath;let a=P,t=null,g=null,f=!1;if(await u(e))if(i.mode==="package"){const r=await C(e);g=r.parsed,t=r.config,a=t??P,f=!!t}else{const r=await D(e,"utf8");t=JSON.parse(r.toString()),a=v(t),f=!0}const b="https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",p=new Set(a.sources.map(r=>r.id)),m=[],d=o.entries.map(r=>{const n=x(r.repo),w=r.id||n.inferredId;if(!w)throw new Error("Unable to infer id. Provide an explicit id.");const c=A(w,"source id");return p.has(c)?(m.push(c),null):(p.add(c),r.targetDir&&O(e,r.targetDir),{id:c,repo:n.repoUrl,...r.targetDir?{targetDir:r.targetDir}:{},...n.ref?{ref:n.ref}:{}})}).filter(Boolean);if(d.length===0)throw new Error("All sources already exist in config.");const s={$schema:b,sources:[...a.sources,...d]};if(t?.cacheDir&&(s.cacheDir=t.cacheDir),t?.defaults&&(s.defaults=t.defaults),i.mode==="package"){const r=g??{};r["docs-cache"]=F(s),await k(e,`${JSON.stringify(r,null,2)}
1
+ import{readFile as D,writeFile as k,access as E}from"node:fs/promises";import l from"node:path";import{v,D as P,a as A,s as F,w as I,b as U,r as S}from"../shared/docs-cache.bWkgSdUq.mjs";import{e as N}from"../shared/docs-cache.DH8jN6rl.mjs";import{r as O}from"../shared/docs-cache.Oi01HUbh.mjs";import{r as x}from"../shared/docs-cache.BSvQNKuf.mjs";import"zod";import"node:process";import"cac";import"picocolors";const u=async o=>{try{return await E(o),!0}catch{return!1}},y="package.json",C=async o=>{const i=await D(o,"utf8"),e=JSON.parse(i),a=e["docs-cache"];return a?{parsed:e,config:v(a)}:{parsed:e,config:null}},J=async o=>{if(o){const a=S(o);return{resolvedPath:a,mode:l.basename(a)===y?"package":"config"}}const i=S();if(await u(i))return{resolvedPath:i,mode:"config"};const e=l.resolve(process.cwd(),y);return await u(e)&&(await C(e)).config?{resolvedPath:e,mode:"package"}:{resolvedPath:i,mode:"config"}},T=async o=>{const i=await J(o.configPath),e=i.resolvedPath;let a=P,t=null,g=null,f=!1;if(await u(e))if(i.mode==="package"){const r=await C(e);g=r.parsed,t=r.config,a=t??P,f=!!t}else{const r=await D(e,"utf8");t=JSON.parse(r.toString()),a=v(t),f=!0}const b="https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",p=new Set(a.sources.map(r=>r.id)),m=[],d=o.entries.map(r=>{const n=x(r.repo),w=r.id||n.inferredId;if(!w)throw new Error("Unable to infer id. Provide an explicit id.");const c=A(w,"source id");return p.has(c)?(m.push(c),null):(p.add(c),r.targetDir&&O(e,r.targetDir),{id:c,repo:n.repoUrl,...r.targetDir?{targetDir:r.targetDir}:{},...n.ref?{ref:n.ref}:{}})}).filter(Boolean);if(d.length===0)throw new Error("All sources already exist in config.");const s={$schema:b,sources:[...a.sources,...d]};if(t?.cacheDir&&(s.cacheDir=t.cacheDir),t?.defaults&&(s.defaults=t.defaults),i.mode==="package"){const r=g??{};r["docs-cache"]=F(s),await k(e,`${JSON.stringify(r,null,2)}
2
2
  `,"utf8")}else await I(e,s);const h=f?null:await N(l.dirname(e),t?.cacheDir??U);return{configPath:e,sources:d,skipped:m,created:!0,gitignoreUpdated:h?.updated??!1,gitignorePath:h?.gitignorePath??null}};export{T as addSources};
3
3
  //# sourceMappingURL=add.mjs.map
@@ -1,2 +1,2 @@
1
- import{rm as o,access as i}from"node:fs/promises";import{l as s,b as m}from"../shared/docs-cache.DpZFrqUt.mjs";import{b as n}from"../shared/docs-cache.Oi01HUbh.mjs";import"node:path";import"zod";import"node:process";import"cac";import"picocolors";const f=async r=>{try{return await i(r),!0}catch{return!1}},p=async r=>{const{config:t,resolvedPath:c}=await s(r.configPath),a=n(c,t.cacheDir??m,r.cacheDirOverride),e=await f(a);return e&&await o(a,{recursive:!0,force:!0}),{cacheDir:a,removed:e}};export{p as cleanCache};
1
+ import{rm as o,access as i}from"node:fs/promises";import{l as s,b as m}from"../shared/docs-cache.bWkgSdUq.mjs";import{b as n}from"../shared/docs-cache.Oi01HUbh.mjs";import"node:path";import"zod";import"node:process";import"cac";import"picocolors";const f=async r=>{try{return await i(r),!0}catch{return!1}},p=async r=>{const{config:t,resolvedPath:c}=await s(r.configPath),a=n(c,t.cacheDir??m,r.cacheDirOverride),e=await f(a);return e&&await o(a,{recursive:!0,force:!0}),{cacheDir:a,removed:e}};export{p as cleanCache};
2
2
  //# sourceMappingURL=clean.mjs.map
@@ -1,3 +1,3 @@
1
- import{readFile as C,writeFile as N,access as S}from"node:fs/promises";import c from"node:path";import{confirm as V,isCancel as G,select as J,text as U}from"@clack/prompts";import{c as _,b as r,s as L,w as T}from"../shared/docs-cache.DpZFrqUt.mjs";import{g as H,e as k}from"../shared/docs-cache.DH8jN6rl.mjs";import"zod";import"../shared/docs-cache.Oi01HUbh.mjs";import"node:process";import"cac";import"picocolors";const h=async n=>{try{return await S(n),!0}catch{return!1}},M=async(n,s={})=>{const y=s.confirm??V,l=s.isCancel??G,x=s.select??J,F=s.text??U,f=n.cwd??process.cwd(),d=c.resolve(f,_),i=c.resolve(f,"package.json"),g=[];if(await h(d)&&g.push(d),await h(i)){const e=await C(i,"utf8");JSON.parse(e)["docs-cache"]&&g.push(i)}if(g.length>0)throw new Error(`Config already exists at ${g.join(", ")}. Init aborted.`);let b=!1;if(await h(i)){const e=await C(i,"utf8");if(!JSON.parse(e)["docs-cache"]){const o=await x({message:"Config location",options:[{value:"config",label:"docs.config.json"},{value:"package",label:"package.json"}],initialValue:"config"});if(l(o))throw new Error("Init cancelled.");b=o==="package"}}const $=b?i:d,v=n.cacheDirOverride??r,u=await F({message:"Cache directory",initialValue:v});if(l(u))throw new Error("Init cancelled.");const A=u||r,D=await y({message:"Generate TOC.md (table of contents with links to all documentation)",initialValue:!0});if(l(D))throw new Error("Init cancelled.");const I=await H(f,A);let P=!1;if(I.entry&&!I.hasEntry){const e=await y({message:"Add cache directory to .gitignore",initialValue:!0});if(l(e))throw new Error("Init cancelled.");P=e}const a={configPath:$,cacheDir:u,toc:D,gitignore:P},t=c.resolve(f,a.configPath);if(c.basename(t)==="package.json"){const e=await C(t,"utf8"),o=JSON.parse(e);if(o["docs-cache"])throw new Error(`docs-cache config already exists in ${t}.`);const p={$schema:"https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",sources:[]},E=a.cacheDir||r;E!==r&&(p.cacheDir=E),a.toc||(p.defaults={toc:!1}),o["docs-cache"]=L(p),await N(t,`${JSON.stringify(o,null,2)}
1
+ import{readFile as C,writeFile as N,access as S}from"node:fs/promises";import c from"node:path";import{confirm as V,isCancel as G,select as J,text as U}from"@clack/prompts";import{c as _,b as r,s as L,w as T}from"../shared/docs-cache.bWkgSdUq.mjs";import{g as H,e as k}from"../shared/docs-cache.DH8jN6rl.mjs";import"zod";import"../shared/docs-cache.Oi01HUbh.mjs";import"node:process";import"cac";import"picocolors";const h=async n=>{try{return await S(n),!0}catch{return!1}},M=async(n,s={})=>{const y=s.confirm??V,l=s.isCancel??G,x=s.select??J,F=s.text??U,f=n.cwd??process.cwd(),d=c.resolve(f,_),i=c.resolve(f,"package.json"),g=[];if(await h(d)&&g.push(d),await h(i)){const e=await C(i,"utf8");JSON.parse(e)["docs-cache"]&&g.push(i)}if(g.length>0)throw new Error(`Config already exists at ${g.join(", ")}. Init aborted.`);let b=!1;if(await h(i)){const e=await C(i,"utf8");if(!JSON.parse(e)["docs-cache"]){const o=await x({message:"Config location",options:[{value:"config",label:"docs.config.json"},{value:"package",label:"package.json"}],initialValue:"config"});if(l(o))throw new Error("Init cancelled.");b=o==="package"}}const $=b?i:d,v=n.cacheDirOverride??r,u=await F({message:"Cache directory",initialValue:v});if(l(u))throw new Error("Init cancelled.");const A=u||r,D=await y({message:"Generate TOC.md (table of contents with links to all documentation)",initialValue:!0});if(l(D))throw new Error("Init cancelled.");const I=await H(f,A);let P=!1;if(I.entry&&!I.hasEntry){const e=await y({message:"Add cache directory to .gitignore",initialValue:!0});if(l(e))throw new Error("Init cancelled.");P=e}const a={configPath:$,cacheDir:u,toc:D,gitignore:P},t=c.resolve(f,a.configPath);if(c.basename(t)==="package.json"){const e=await C(t,"utf8"),o=JSON.parse(e);if(o["docs-cache"])throw new Error(`docs-cache config already exists in ${t}.`);const p={$schema:"https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",sources:[]},E=a.cacheDir||r;E!==r&&(p.cacheDir=E),a.toc||(p.defaults={toc:!1}),o["docs-cache"]=L(p),await N(t,`${JSON.stringify(o,null,2)}
2
2
  `,"utf8");const O=a.gitignore?await k(c.dirname(t),E):null;return{configPath:t,created:!0,gitignoreUpdated:O?.updated??!1,gitignorePath:O?.gitignorePath??null}}if(await h(t))throw new Error(`Config already exists at ${t}.`);const w={$schema:"https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",sources:[]},m=a.cacheDir||r;m!==r&&(w.cacheDir=m),a.toc||(w.defaults={toc:!1}),await T(t,w);const j=a.gitignore?await k(c.dirname(t),m):null;return{configPath:t,created:!0,gitignoreUpdated:j?.updated??!1,gitignorePath:j?.gitignorePath??null}};export{M as initConfig};
3
3
  //# sourceMappingURL=init.mjs.map
@@ -1,2 +1,2 @@
1
- import{readdir as p,rm as f,access as h}from"node:fs/promises";import u from"node:path";import{l as d,b as D}from"../shared/docs-cache.DpZFrqUt.mjs";import{b as v}from"../shared/docs-cache.Oi01HUbh.mjs";import"zod";import"node:process";import"cac";import"picocolors";const w=async t=>{try{return await h(t),!0}catch{return!1}},l=async t=>{const{config:c,resolvedPath:s,sources:a}=await d(t.configPath),e=v(s,c.cacheDir??D,t.cacheDirOverride);if(!await w(e))return{cacheDir:e,removed:[],kept:a.map(r=>r.id)};const n=new Set(a.map(r=>r.id)),m=await p(e,{withFileTypes:!0}),o=[];for(const r of m){if(!r.isDirectory())continue;const i=r.name;n.has(i)||i.startsWith(".tmp-")||(await f(u.join(e,i),{recursive:!0,force:!0}),o.push(i))}return{cacheDir:e,removed:o,kept:a.map(r=>r.id)}};export{l as pruneCache};
1
+ import{readdir as p,rm as f,access as h}from"node:fs/promises";import u from"node:path";import{l as d,b as D}from"../shared/docs-cache.bWkgSdUq.mjs";import{b as v}from"../shared/docs-cache.Oi01HUbh.mjs";import"zod";import"node:process";import"cac";import"picocolors";const w=async t=>{try{return await h(t),!0}catch{return!1}},l=async t=>{const{config:c,resolvedPath:s,sources:a}=await d(t.configPath),e=v(s,c.cacheDir??D,t.cacheDirOverride);if(!await w(e))return{cacheDir:e,removed:[],kept:a.map(r=>r.id)};const n=new Set(a.map(r=>r.id)),m=await p(e,{withFileTypes:!0}),o=[];for(const r of m){if(!r.isDirectory())continue;const i=r.name;n.has(i)||i.startsWith(".tmp-")||(await f(u.join(e,i),{recursive:!0,force:!0}),o.push(i))}return{cacheDir:e,removed:o,kept:a.map(r=>r.id)}};export{l as pruneCache};
2
2
  //# sourceMappingURL=prune.mjs.map
@@ -1,3 +1,3 @@
1
- import{readFile as w,writeFile as N,rm as $,access as k}from"node:fs/promises";import v from"node:path";import{v as D,D as E,s as F,w as I,r as P}from"../shared/docs-cache.DpZFrqUt.mjs";import{r as O}from"../shared/docs-cache.Oi01HUbh.mjs";import{r as U}from"../shared/docs-cache.BSvQNKuf.mjs";import"zod";import"node:process";import"cac";import"picocolors";const f=async a=>{try{return await k(a),!0}catch{return!1}},y="package.json",S=async a=>{const s=await w(a,"utf8"),o=JSON.parse(s),r=o["docs-cache"];return r?{parsed:o,config:D(r)}:{parsed:o,config:null}},b=async a=>{if(a){const r=P(a);return{resolvedPath:r,mode:v.basename(r)===y?"package":"config"}}const s=P();if(await f(s))return{resolvedPath:s,mode:"config"};const o=v.resolve(process.cwd(),y);return await f(o)&&(await S(o)).config?{resolvedPath:o,mode:"package"}:{resolvedPath:s,mode:"config"}},J=async a=>{if(a.ids.length===0)throw new Error("No sources specified to remove.");const s=await b(a.configPath),o=s.resolvedPath;let r=E,t=null,d=null;if(await f(o))if(s.mode==="package"){const e=await S(o);if(d=e.parsed,t=e.config,!t)throw new Error(`Missing docs-cache config in ${o}.`);r=t}else{const e=await w(o,"utf8");t=JSON.parse(e.toString()),r=D(t)}else throw new Error(`Config not found at ${o}.`);const u=new Map(r.sources.map(e=>[e.id,e])),g=new Map(r.sources.map(e=>[e.repo,e])),n=new Set,l=[];for(const e of a.ids){if(u.has(e)){n.add(e);continue}const i=U(e);if(i.repoUrl&&g.has(i.repoUrl)){const p=g.get(i.repoUrl);p&&n.add(p.id);continue}if(i.inferredId&&u.has(i.inferredId)){n.add(i.inferredId);continue}l.push(e)}const C=r.sources.filter(e=>!n.has(e.id)),h=r.sources.filter(e=>n.has(e.id)).map(e=>e.id),M=r.sources.filter(e=>n.has(e.id));if(h.length===0)throw new Error("No matching sources found to remove.");const c={$schema:t?.$schema??"https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",sources:C};if(t?.cacheDir&&(c.cacheDir=t.cacheDir),t?.defaults&&(c.defaults=t.defaults),t?.targetMode&&(c.targetMode=t.targetMode),s.mode==="package"){const e=d??{};e["docs-cache"]=F(c),await N(o,`${JSON.stringify(e,null,2)}
1
+ import{readFile as w,writeFile as N,rm as $,access as k}from"node:fs/promises";import v from"node:path";import{v as D,D as E,s as F,w as I,r as P}from"../shared/docs-cache.bWkgSdUq.mjs";import{r as O}from"../shared/docs-cache.Oi01HUbh.mjs";import{r as U}from"../shared/docs-cache.BSvQNKuf.mjs";import"zod";import"node:process";import"cac";import"picocolors";const f=async a=>{try{return await k(a),!0}catch{return!1}},y="package.json",S=async a=>{const s=await w(a,"utf8"),o=JSON.parse(s),r=o["docs-cache"];return r?{parsed:o,config:D(r)}:{parsed:o,config:null}},b=async a=>{if(a){const r=P(a);return{resolvedPath:r,mode:v.basename(r)===y?"package":"config"}}const s=P();if(await f(s))return{resolvedPath:s,mode:"config"};const o=v.resolve(process.cwd(),y);return await f(o)&&(await S(o)).config?{resolvedPath:o,mode:"package"}:{resolvedPath:s,mode:"config"}},J=async a=>{if(a.ids.length===0)throw new Error("No sources specified to remove.");const s=await b(a.configPath),o=s.resolvedPath;let r=E,t=null,d=null;if(await f(o))if(s.mode==="package"){const e=await S(o);if(d=e.parsed,t=e.config,!t)throw new Error(`Missing docs-cache config in ${o}.`);r=t}else{const e=await w(o,"utf8");t=JSON.parse(e.toString()),r=D(t)}else throw new Error(`Config not found at ${o}.`);const u=new Map(r.sources.map(e=>[e.id,e])),g=new Map(r.sources.map(e=>[e.repo,e])),n=new Set,l=[];for(const e of a.ids){if(u.has(e)){n.add(e);continue}const i=U(e);if(i.repoUrl&&g.has(i.repoUrl)){const p=g.get(i.repoUrl);p&&n.add(p.id);continue}if(i.inferredId&&u.has(i.inferredId)){n.add(i.inferredId);continue}l.push(e)}const C=r.sources.filter(e=>!n.has(e.id)),h=r.sources.filter(e=>n.has(e.id)).map(e=>e.id),M=r.sources.filter(e=>n.has(e.id));if(h.length===0)throw new Error("No matching sources found to remove.");const c={$schema:t?.$schema??"https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",sources:C};if(t?.cacheDir&&(c.cacheDir=t.cacheDir),t?.defaults&&(c.defaults=t.defaults),t?.targetMode&&(c.targetMode=t.targetMode),s.mode==="package"){const e=d??{};e["docs-cache"]=F(c),await N(o,`${JSON.stringify(e,null,2)}
2
2
  `,"utf8")}else await I(o,c);const m=[];for(const e of M){if(!e.targetDir)continue;const i=O(o,e.targetDir);await $(i,{recursive:!0,force:!0}),m.push({id:e.id,targetDir:i})}return{configPath:o,removed:h,missing:l,targetsRemoved:m}};export{J as removeSources};
3
3
  //# sourceMappingURL=remove.mjs.map
@@ -1,2 +1,2 @@
1
- import{access as y}from"node:fs/promises";import a from"picocolors";import{u as o,a as u,b as D,g as w}from"../shared/docs-cache.Oi01HUbh.mjs";import{l as C,b as L}from"../shared/docs-cache.DpZFrqUt.mjs";import{DEFAULT_LOCK_FILENAME as v,resolveLockPath as x,readLock as P}from"../lock.mjs";import"node:process";import"cac";import"node:path";import"zod";const h=async r=>{try{return await y(r),!0}catch{return!1}},A=async r=>{const{config:c,resolvedPath:t,sources:n}=await C(r.configPath),s=D(t,c.cacheDir??L,r.cacheDirOverride),l=await h(s),e=x(t),i=await h(e);let d=!1,f=null;if(i)try{f=await P(e),d=!0}catch{d=!1}const E=await Promise.all(n.map(async m=>{const p=w(s,m.id),g=await h(p.sourceDir),k=f?.sources?.[m.id]??null;return{id:m.id,docsPath:p.sourceDir,docsExists:g,lockEntry:k}}));return{configPath:t,cacheDir:s,cacheDirExists:l,lockPath:e,lockExists:i,lockValid:d,sources:E}},_=r=>{const c=o.path(r.cacheDir),t=r.cacheDirExists?a.green("present"):a.red("missing"),n=r.lockExists?r.lockValid?a.green("valid"):a.red("invalid"):a.yellow("missing");if(o.header("Cache",`${c} (${t})`),o.header("Lock",`${v} (${n})`),r.sources.length===0){o.line(),o.line(`${u.warn} No sources configured.`);return}o.line();for(const s of r.sources){const l=s.docsExists?u.success:u.error,e=s.lockEntry?a.green("locked"):a.yellow("new"),i=o.hash(s.lockEntry?.resolvedCommit);o.item(l,s.id.padEnd(20),`${e.padEnd(10)} ${i}`)}};export{A as getStatus,_ as printStatus};
1
+ import{access as y}from"node:fs/promises";import a from"picocolors";import{u as o,a as u,b as D,g as w}from"../shared/docs-cache.Oi01HUbh.mjs";import{l as C,b as L}from"../shared/docs-cache.bWkgSdUq.mjs";import{DEFAULT_LOCK_FILENAME as v,resolveLockPath as x,readLock as P}from"../lock.mjs";import"node:process";import"cac";import"node:path";import"zod";const h=async r=>{try{return await y(r),!0}catch{return!1}},A=async r=>{const{config:c,resolvedPath:t,sources:n}=await C(r.configPath),s=D(t,c.cacheDir??L,r.cacheDirOverride),l=await h(s),e=x(t),i=await h(e);let d=!1,f=null;if(i)try{f=await P(e),d=!0}catch{d=!1}const E=await Promise.all(n.map(async m=>{const p=w(s,m.id),g=await h(p.sourceDir),k=f?.sources?.[m.id]??null;return{id:m.id,docsPath:p.sourceDir,docsExists:g,lockEntry:k}}));return{configPath:t,cacheDir:s,cacheDirExists:l,lockPath:e,lockExists:i,lockValid:d,sources:E}},_=r=>{const c=o.path(r.cacheDir),t=r.cacheDirExists?a.green("present"):a.red("missing"),n=r.lockExists?r.lockValid?a.green("valid"):a.red("invalid"):a.yellow("missing");if(o.header("Cache",`${c} (${t})`),o.header("Lock",`${v} (${n})`),r.sources.length===0){o.line(),o.line(`${u.warn} No sources configured.`);return}o.line();for(const s of r.sources){const l=s.docsExists?u.success:u.error,e=s.lockEntry?a.green("locked"):a.yellow("new"),i=o.hash(s.lockEntry?.resolvedCommit);o.item(l,s.id.padEnd(20),`${e.padEnd(10)} ${i}`)}};export{A as getStatus,_ as printStatus};
2
2
  //# sourceMappingURL=status.mjs.map
@@ -1,9 +1,9 @@
1
- import{createHash as H,randomBytes as Ce}from"node:crypto";import{mkdtemp as Y,rm as F,mkdir as A,readFile as j,writeFile as ee,access as L,rename as X,open as W,lstat as xe,symlink as Pe,readdir as Me,cp as Ee}from"node:fs/promises";import h from"node:path";import E from"picocolors";import{g as Te,t as J,D as te,r as q,u as P,a as T,b as Oe}from"../shared/docs-cache.Oi01HUbh.mjs";import{a as K,l as ke,D as Re,b as Fe}from"../shared/docs-cache.DpZFrqUt.mjs";import{execFile as re}from"node:child_process";import be,{tmpdir as oe}from"node:os";import{pathToFileURL as Ae}from"node:url";import{promisify as se}from"node:util";import{execa as je}from"execa";import{g as B,M as N,v as ie}from"./verify.mjs";import{e as Ie,r as ae}from"../shared/docs-cache.kK1DPQIQ.mjs";import{writeLock as _e,resolveLockPath as He,readLock as Le}from"../lock.mjs";import{createWriteStream as ne,createReadStream as Be,constants as ce}from"node:fs";import{pipeline as Ne}from"node:stream/promises";import le from"fast-glob";const ze=/^(https?:\/\/)([^@]+)@/i,I=e=>e.replace(ze,"$1***@"),Ue=se(re),Ge=3e4,Ye=new Set(["file:","ftp:","data:","javascript:"]),Xe=e=>{try{const r=new URL(e);if(Ye.has(r.protocol))throw new Error(`Blocked protocol '${r.protocol}' in repo URL '${I(e)}'.`)}catch(r){if(r instanceof TypeError)return;throw r}},We=e=>{if(Xe(e),e.startsWith("git@")){const r=e.indexOf("@"),t=e.indexOf(":",r+1);return t===-1?null:e.slice(r+1,t)||null}try{const r=new URL(e);return r.protocol!=="https:"&&r.protocol!=="ssh:"?null:r.hostname||null}catch{return null}},ue=(e,r)=>{const t=We(e);if(!t)throw new Error(`Unsupported repo URL '${I(e)}'. Use HTTPS or SSH.`);const s=t.toLowerCase();if(!r.map(o=>o.toLowerCase()).includes(s))throw new Error(`Host '${t}' is not in allowHosts for '${I(e)}'.`)},fe=e=>{const r=e.trim().split(`
2
- `).filter(Boolean);return r.length===0?null:r[0].split(/\s+/)[0]||null},Je=async e=>{ue(e.repo,e.allowHosts);const{stdout:r}=await Ue("git",["ls-remote",e.repo,e.ref],{timeout:e.timeoutMs??Ge,maxBuffer:1024*1024}),t=fe(r);if(!t)throw new Error(`Unable to resolve ref '${e.ref}' for ${I(e.repo)}.`);return{repo:e.repo,ref:e.ref,resolvedCommit:t}},qe=se(re),me=12e4,z=1,Ke=3,Ve=100,k=async(e,r)=>{const t=process.env.PATH??process.env.Path,s=process.env.PATHEXT??(process.platform==="win32"?".COM;.EXE;.BAT;.CMD":void 0),o=["-c","core.hooksPath=/dev/null","-c","submodule.recurse=false","-c","protocol.ext.allow=never"];r?.allowFileProtocol?o.push("-c","protocol.file.allow=always"):o.push("-c","protocol.file.allow=never"),await je("git",[...o,...e],{cwd:r?.cwd,timeout:r?.timeoutMs??me,maxBuffer:10*1024*1024,env:{...process.env,...t?{PATH:t,Path:t}:{},...s?{PATHEXT:s}:{},HOME:process.env.HOME,USER:process.env.USER,USERPROFILE:process.env.USERPROFILE,TMPDIR:process.env.TMPDIR,TMP:process.env.TMP,TEMP:process.env.TEMP,SYSTEMROOT:process.env.SYSTEMROOT,WINDIR:process.env.WINDIR,SSH_AUTH_SOCK:process.env.SSH_AUTH_SOCK,SSH_AGENT_PID:process.env.SSH_AGENT_PID,HTTP_PROXY:process.env.HTTP_PROXY,HTTPS_PROXY:process.env.HTTPS_PROXY,NO_PROXY:process.env.NO_PROXY,GIT_TERMINAL_PROMPT:"0",GIT_CONFIG_NOSYSTEM:"1",GIT_CONFIG_NOGLOBAL:"1",...process.platform==="win32"?{}:{GIT_ASKPASS:"/bin/false"}}})},b=async(e,r=Ke)=>{for(let t=0;t<=r;t+=1)try{await F(e,{recursive:!0,force:!0});return}catch(s){const o=B(s);if(o!=="ENOTEMPTY"&&o!=="EBUSY"&&o!=="EPERM"||t===r)throw s;await new Promise(n=>setTimeout(n,Ve*(t+1)))}},Qe=e=>H("sha256").update(e).digest("hex").substring(0,16),Ze=e=>{const r=Qe(e);return h.join(ae(),r)},et=async e=>{try{return await k(["rev-parse","--git-dir"],{cwd:e}),!0}catch{return!1}},de=async e=>{try{const r=h.join(e,".git","config"),t=(await j(r,"utf8")).toLowerCase();return t.includes("partialclone")||t.includes("promisor")||t.includes("partialclonefilter")}catch{return!1}},tt=async(e,r,t,s)=>{const o=h.join(t,"archive.tar");await k(["archive","--remote",e,"--format=tar","--output",o,r],{timeoutMs:s}),await qe("tar",["-xf",o,"-C",t],{timeout:s??me,maxBuffer:1024*1024}),await F(o,{force:!0})},he=e=>{if(!e||e.length===0)return!1;for(const r of e)if(!r||r.includes("**"))return!1;return!0},pe=e=>{if(!e)return[];const r=e.map(t=>{const s=t.replace(/\\/g,"/"),o=s.indexOf("*");return(o===-1?s:s.slice(0,o)).replace(/\/+$|\/$/,"")});return Array.from(new Set(r.filter(t=>t.length>0)))},V=async(e,r)=>{const t=/^[0-9a-f]{7,40}$/i.test(e.ref),s=he(e.include),o=["clone","--no-checkout","--depth",String(z),"--recurse-submodules=no","--no-tags"];if(s&&o.push("--sparse"),t||(o.push("--single-branch"),e.ref!=="HEAD"&&o.push("--branch",e.ref)),o.push(e.repo,r),await k(o,{timeoutMs:e.timeoutMs}),s){const n=pe(e.include);n.length>0&&await k(["-C",r,"sparse-checkout","set",...n],{timeoutMs:e.timeoutMs})}await k(["-C",r,"checkout","--quiet","--detach",e.resolvedCommit],{timeoutMs:e.timeoutMs})},rt=async(e,r)=>{const t=Ze(e.repo),s=await Ie(t),o=/^[0-9a-f]{7,40}$/i.test(e.ref),n=he(e.include),i=ae();if(await A(i,{recursive:!0}),s&&await et(t))if(await de(t))await b(t),await V(e,t);else try{const f=["fetch","origin"];if(o)f.push("--depth",String(z));else{const p=e.ref==="HEAD"?"HEAD":`${e.ref}:refs/remotes/origin/${e.ref}`;f.push(p,"--depth",String(z))}await k(["-C",t,...f],{timeoutMs:e.timeoutMs})}catch{await b(t),await V(e,t)}else s&&await b(t),await V(e,t);await A(r,{recursive:!0});const a=["clone","--no-checkout","--depth",String(z),"--recurse-submodules=no","--no-tags"];await de(t)&&a.splice(2,0,"--filter=blob:none"),n&&a.push("--sparse"),o||(a.push("--single-branch"),e.ref!=="HEAD"&&a.push("--branch",e.ref));const u=Ae(t).href;if(a.push(u,r),await k(a,{timeoutMs:e.timeoutMs,allowFileProtocol:!0}),n){const f=pe(e.include);f.length>0&&await k(["-C",r,"sparse-checkout","set",...f],{timeoutMs:e.timeoutMs,allowFileProtocol:!0})}await k(["-C",r,"checkout","--quiet","--detach",e.resolvedCommit],{timeoutMs:e.timeoutMs,allowFileProtocol:!0})},ot=async e=>{const r=await Y(h.join(oe(),`docs-cache-${e.sourceId}-`));try{return await tt(e.repo,e.resolvedCommit,r,e.timeoutMs),r}catch(t){throw await b(r),t}},st=async e=>{K(e.sourceId,"sourceId");try{const r=await ot(e);return{repoDir:r,cleanup:async()=>{await b(r)},fromCache:!1}}catch{const r=await Y(h.join(oe(),`docs-cache-${e.sourceId}-`));try{return await rt(e,r),{repoDir:r,cleanup:async()=>{await b(r)},fromCache:!0}}catch(t){throw await b(r),t}}},U=e=>J(e),Q=Number(process.env.DOCS_CACHE_STREAM_THRESHOLD_MB??"2"),it=Number.isFinite(Q)&&Q>0?Math.floor(Q*1024*1024):1024*1024,at=(e,r)=>{const t=h.resolve(e);if(!h.resolve(r).startsWith(t+h.sep))throw new Error(`Path traversal detected: ${r}`)},we=async e=>{try{return await W(e,ce.O_RDONLY|ce.O_NOFOLLOW)}catch(r){const t=B(r);if(t==="ELOOP")return null;if(t==="EINVAL"||t==="ENOSYS"||t==="ENOTSUP")return(await xe(e)).isSymbolicLink()?null:await W(e,"r");throw r}},nt=(e,r)=>{if(!r||e.length===0)return null;let t="";for(;;){let s=null;for(const n of e){const i=(t?n.normalized.slice(t.length):n.normalized).split("/");if(i.length<2)return t||null;const a=i[0];if(!s){s=a;continue}if(s!==a)return t||null}if(!s)return t||null;const o=`${t}${s}/`;if(o===t)return t||null;t=o}},ct=e=>({...e,exclude:e.exclude??[],unwrapSingleRootDir:e.unwrapSingleRootDir??!1}),lt=async(e,r=5e3)=>{const t=Date.now();for(;Date.now()-t<r;)try{const s=await W(e,"wx");return{release:async()=>{await s.close(),await F(e,{force:!0})}}}catch(s){if(B(s)!=="EEXIST")throw s;await new Promise(o=>setTimeout(o,100))}throw new Error(`Failed to acquire lock for ${e}.`)},ut=async e=>{const r=ct(e);K(r.sourceId,"sourceId");const t=Te(r.cacheDir,r.sourceId);await A(r.cacheDir,{recursive:!0});const s=await Y(h.join(r.cacheDir,`.tmp-${r.sourceId}-`));let o=null;const n=async()=>{const i=o;!i||i.closed||i.destroyed||await new Promise(a=>{const u=()=>{i.off("close",f),i.off("error",p),a()},f=()=>u(),p=()=>u();i.once("close",f),i.once("error",p);try{i.end()}catch{u()}})};try{const i=(await le(r.include,{cwd:r.repoDir,ignore:[".git/**",...r.exclude],dot:!0,onlyFiles:!0,followSymbolicLinks:!1})).map(l=>({relativePath:l,normalized:U(l)})).sort((l,m)=>l.normalized.localeCompare(m.normalized)),a=nt(i,r.unwrapSingleRootDir),u=new Set;for(const{normalized:l}of i){const m=a?l.slice(a.length):l;u.add(h.posix.dirname(m))}await Promise.all(Array.from(u,l=>A(h.join(s,l),{recursive:!0})));let f=0,p=0;const S=Math.max(1,Math.min(i.length,Math.max(8,Math.min(128,be.cpus().length*8)))),$=h.join(s,N),d=ne($,{encoding:"utf8"});o=d;const w=H("sha256"),y=async l=>new Promise((m,g)=>{const c=x=>{d.off("drain",D),g(x)},D=()=>{d.off("error",c),m()};d.once("error",c),d.write(l)?(d.off("error",c),m()):d.once("drain",D)});for(let l=0;l<i.length;l+=S){const m=i.slice(l,l+S),g=await Promise.all(m.map(async c=>{const D=h.join(r.repoDir,c.relativePath),x=await we(D);if(!x)return null;try{const O=await x.stat();if(!O.isFile())return null;const R=a?c.normalized.slice(a.length):c.normalized,M=h.join(s,R);if(at(s,M),O.size>=it){const G=Be(D,{fd:x.fd,autoClose:!1}),$e=ne(M);await Ne(G,$e)}else{const G=await x.readFile();await ee(M,G)}return{path:a?c.normalized.slice(a.length):c.normalized,size:O.size}}finally{await x.close()}}));for(const c of g){if(!c)continue;if(r.maxFiles!==void 0&&p+1>r.maxFiles)throw new Error(`Materialized content exceeds maxFiles (${r.maxFiles}).`);if(f+=c.size,f>r.maxBytes)throw new Error(`Materialized content exceeds maxBytes (${r.maxBytes}).`);const D=`${JSON.stringify(c)}
3
- `;w.update(D),await y(D),p+=1}}await new Promise((l,m)=>{d.end(()=>l()),d.once("error",m)});const v=w.digest("hex"),C=async l=>{try{return await L(l),!0}catch{return!1}};return await(async(l,m)=>{const g=await lt(`${m}.lock`);try{const c=await C(m),D=`${m}.bak-${Ce(8).toString("hex")}`;c&&await X(m,D);try{await X(l,m)}catch(x){if(c)try{await X(D,m)}catch(O){const R=O instanceof Error?O.message:String(O);process.stderr.write(`Warning: Failed to restore backup: ${R}
4
- `)}throw x}c&&await F(D,{recursive:!0,force:!0})}finally{await g.release()}})(s,t.sourceDir),{bytes:f,fileCount:p,manifestSha256:v}}catch(i){try{await n()}catch{}throw await F(s,{recursive:!0,force:!0}),i}},ft=async e=>{K(e.sourceId,"sourceId");const r=await le(e.include,{cwd:e.repoDir,ignore:[".git/**",...e.exclude??[]],dot:!0,onlyFiles:!0,followSymbolicLinks:!1});r.sort((n,i)=>U(n).localeCompare(U(i)));let t=0,s=0;const o=H("sha256");for(const n of r){const i=U(n),a=h.join(e.repoDir,n),u=await we(a);if(u)try{const f=await u.stat();if(!f.isFile())continue;if(e.maxFiles!==void 0&&s+1>e.maxFiles)throw new Error(`Materialized content exceeds maxFiles (${e.maxFiles}).`);if(t+=f.size,t>e.maxBytes)throw new Error(`Materialized content exceeds maxBytes (${e.maxBytes}).`);const p=`${JSON.stringify({path:i,size:f.size})}
5
- `;o.update(p),s+=1}finally{await u.close()}}return{bytes:t,fileCount:s,manifestSha256:o.digest("hex")}},mt=async(e,r)=>{await r.rm(e,{recursive:!0,force:!0})},dt=async(e,r)=>{if(!e.unwrapSingleRootDir)return e.sourceDir;const t=await r.readdir(e.sourceDir,{withFileTypes:!0}),s=new Set([N,te]),o=t.filter(a=>!(a.isFile()&&s.has(a.name))),n=o.filter(a=>a.isDirectory()),i=o.filter(a=>a.isFile());return n.length!==1||i.length>0?e.sourceDir:h.join(e.sourceDir,n[0].name)},Z=async e=>{const r=e.deps??{cp:Ee,mkdir:A,readdir:Me,rm:F,symlink:Pe,stderr:process.stderr},t=await dt(e,r),s=h.dirname(e.targetDir);await r.mkdir(s,{recursive:!0}),await mt(e.targetDir,r);const o=process.platform==="win32"?"copy":"symlink";if((e.mode??o)==="copy"){await r.cp(t,e.targetDir,{recursive:!0});return}const n=process.platform==="win32"?"junction":"dir";try{await r.symlink(t,e.targetDir,n)}catch(i){const a=B(i);if(a&&new Set(["EPERM","EACCES","ENOTSUP","EINVAL"]).has(a)){if(e.explicitTargetMode){const u=i instanceof Error?i.message:String(i);r.stderr.write(`Warning: Failed to create symlink at ${e.targetDir}. Falling back to copy. ${u}
6
- `)}await r.cp(t,e.targetDir,{recursive:!0});return}throw i}},ht=e=>{const r={dirs:new Map,files:[]};for(const t of e){const s=t.split("/").filter(Boolean);if(s.length===0)continue;let o=r;for(const i of s.slice(0,-1)){let a=o.dirs.get(i);a||(a={dirs:new Map,files:[]},o.dirs.set(i,a)),o=a}const n=s[s.length-1];o.files.push({name:n,path:t})}return r},ge=(e,r,t)=>{const s=" ".repeat(r),o=Array.from(e.dirs.keys()).sort(),n=[...e.files].sort((i,a)=>i.name.localeCompare(a.name));for(const i of o){t.push(`${s}- ${i}/`);const a=e.dirs.get(i);a&&ge(a,r+1,t)}for(const i of n)t.push(`${s}- [${i.name}](./${i.path})`)},pt=(e,r,t)=>{const s=[...e].sort((a,u)=>a.localeCompare(u)),o=new Map;for(const a of s){const u=a.lastIndexOf("/"),f=u===-1?"":a.substring(0,u),p=u===-1?a:a.substring(u+1),S=o.get(f);S?S.push(p):o.set(f,[p])}const n=Array.from(o.keys()).sort(),i=[];i.push(`[${t}]`);for(const a of n){const u=o.get(a);if(!u)continue;const f=u.join(",");a===""?i.push(`root:{${f}}`):i.push(`${a}:{${f}}`)}r.push(i.join("|"))},wt=(e,r="compressed")=>{const t=[];if(r==="tree"){t.push(`# ${e.id} - Documentation`),t.push(""),t.push("## Files"),t.push("");const s=ht(e.files);ge(s,0,t)}else{const s=`${e.id} Docs Index`;pt(e.files,t,s)}return t.push(""),t.join(`
7
- `)},gt=async e=>{const r=h.join(e,".manifest.jsonl");try{const t=await j(r,"utf8"),s=[];for(const o of t.split(`
8
- `))if(o.trim()){const n=JSON.parse(o);n.path&&s.push(n.path)}return s}catch{return[]}},yt=async e=>{const r=new Map(e.sources.map(s=>[s.id,s])),t=new Map((e.results??[]).map(s=>[s.id,s]));for(const[s,o]of Object.entries(e.lock.sources)){const n=r.get(s);n?.targetDir&&J(q(e.configPath,n.targetDir));const i=h.join(e.cacheDir,s);try{await L(i)}catch{continue}const a=await gt(i),u={id:s,repo:o.repo,ref:o.ref,resolvedCommit:o.resolvedCommit,fileCount:o.fileCount,cachePath:J(h.join(e.cacheDir,s)),files:a},f=n?.toc,p=f!==!1;let S="compressed";typeof f=="string"&&(S=f);const $=h.join(i,te);if(p){if(t.get(s)?.status==="up-to-date")try{await L($);continue}catch{}const d=wt(u,S);await ee($,d,"utf8")}else try{await F($,{force:!0})}catch{}}},St=e=>{if(e<1024)return`${e} B`;const r=["KB","MB","GB","TB"];let t=e,s=-1;for(;t>=1024&&s<r.length-1;)t/=1024,s+=1;return`${t.toFixed(1)} ${r[s]}`},_=async e=>{try{return await L(e),!0}catch{return!1}},ye=async(e,r)=>{const t=h.join(e,r);return await _(t)?await _(h.join(t,N)):!1},Dt=e=>{if(!e||e.length===0)return[];const r=e.map(t=>t.trim()).filter(t=>t.length>0);return Array.from(new Set(r)).sort()},vt=["mode","include","exclude","maxBytes","maxFiles","unwrapSingleRootDir"],$t=(e,r)=>e==="include"&&Array.isArray(r)||e==="exclude"&&Array.isArray(r)?Dt(r):r,Ct=e=>{const r=vt.map(o=>[o,$t(o,e[o])]);r.sort(([o],[n])=>o.localeCompare(n));const t=Object.fromEntries(r),s=H("sha256");return s.update(JSON.stringify(t)),s.digest("hex")},Se=async(e,r={})=>{const{config:t,resolvedPath:s,sources:o}=await ke(e.configPath),n=t.defaults??Re.defaults,i=Oe(s,t.cacheDir??Fe,e.cacheDirOverride),a=He(s),u=await _(a);let f=null;u&&(f=await Le(a));const p=r.resolveRemoteCommit??Je,S=e.sourceFilter?.length?o.filter(d=>e.sourceFilter?.includes(d.id)):o,$=await Promise.all(S.map(async d=>{const w=f?.sources?.[d.id],y=d.include??n.include,v=d.exclude,C=Ct({...d,include:y,exclude:v});if(e.offline){const c=await ye(i,d.id);return{id:d.id,repo:w?.repo??d.repo,ref:w?.ref??d.ref??n.ref,resolvedCommit:w?.resolvedCommit??"offline",lockCommit:w?.resolvedCommit??null,lockRulesSha256:w?.rulesSha256,status:w&&c?"up-to-date":"missing",bytes:w?.bytes,fileCount:w?.fileCount,manifestSha256:w?.manifestSha256,rulesSha256:C}}const l=await p({repo:d.repo,ref:d.ref,allowHosts:n.allowHosts,timeoutMs:e.timeoutMs}),m=w?.resolvedCommit===l.resolvedCommit&&w?.rulesSha256===C,g=w?m?"up-to-date":"changed":"missing";return{id:d.id,repo:l.repo,ref:l.ref,resolvedCommit:l.resolvedCommit,lockCommit:w?.resolvedCommit??null,lockRulesSha256:w?.rulesSha256,status:g,bytes:w?.bytes,fileCount:w?.fileCount,manifestSha256:w?.manifestSha256,rulesSha256:C}}));return{config:t,configPath:s,cacheDir:i,lockPath:a,lockExists:u,lockData:f,results:$,sources:S,defaults:n}},xt=async()=>{const e=h.resolve(process.cwd(),"package.json");try{const r=await j(e,"utf8"),t=JSON.parse(r.toString());return typeof t.version=="string"?t.version:"0.0.0"}catch{}try{const r=await j(new URL("../package.json",import.meta.url),"utf8"),t=JSON.parse(r.toString());return typeof t.version=="string"?t.version:"0.0.0"}catch{}try{const r=await j(new URL("../../package.json",import.meta.url),"utf8"),t=JSON.parse(r.toString());return typeof t.version=="string"?t.version:"0.0.0"}catch{return"0.0.0"}},Pt=async(e,r)=>{const t=await xt(),s=new Date().toISOString(),o={...r?.sources??{}};for(const n of e.results){const i=o[n.id];o[n.id]={repo:n.repo,ref:n.ref,resolvedCommit:n.resolvedCommit,bytes:n.bytes??i?.bytes??0,fileCount:n.fileCount??i?.fileCount??0,manifestSha256:n.manifestSha256??i?.manifestSha256??n.resolvedCommit,rulesSha256:n.rulesSha256??i?.rulesSha256,updatedAt:s}}return{version:1,generatedAt:s,toolVersion:t,sources:o}},De=async(e,r={})=>{const t=process.hrtime.bigint();let s=0;const o=await Se(e,r);await A(o.cacheDir,{recursive:!0});const n=o.lockData,i=o.results.filter(u=>{const f=o.sources.find(p=>p.id===u.id);return u.status==="missing"&&(f?.required??!0)});if(e.failOnMiss&&i.length>0)throw new Error(`Missing required source(s): ${i.map(u=>u.id).join(", ")}.`);if(!e.lockOnly){const u=o.defaults,f=r.fetchSource??st,p=r.materializeSource??ut,S=new Map,$=async(y,v)=>{const C=y?.length?o.results.filter(l=>y.includes(l.id)):o.results;return(await Promise.all(C.map(async l=>{const m=o.sources.find(c=>c.id===l.id);if(!m)return null;if(v)return{result:l,source:m};let g=S.get(l.id);return g===void 0&&(g=await ye(o.cacheDir,l.id),S.set(l.id,g)),l.status!=="up-to-date"||!g?{result:l,source:m}:null}))).filter(Boolean)},d=async()=>{await Promise.all(o.sources.map(async y=>{if(!y.targetDir)return;const v=q(o.configPath,y.targetDir);await _(v)||await Z({sourceDir:h.join(o.cacheDir,y.id),targetDir:v,mode:y.targetMode??u.targetMode,explicitTargetMode:y.targetMode!==void 0,unwrapSingleRootDir:y.unwrapSingleRootDir})}))},w=async y=>{const v=e.concurrency??4;let C=0;const l=async()=>{const m=y[C];if(!m||!m.source)return;C+=1;const{result:g,source:c}=m,D=o.lockData?.sources?.[c.id],x=await f({sourceId:c.id,repo:c.repo,ref:c.ref,resolvedCommit:g.resolvedCommit,cacheDir:o.cacheDir,include:c.include??u.include,timeoutMs:e.timeoutMs});e.json||P.step(x.fromCache?"Restoring from cache":"Downloading repo",c.id);try{const O=h.join(o.cacheDir,c.id,N);if(g.status!=="up-to-date"&&D?.manifestSha256&&D?.rulesSha256===g.rulesSha256&&await _(O)){const M=await ft({sourceId:c.id,repoDir:x.repoDir,cacheDir:o.cacheDir,include:c.include??u.include,exclude:c.exclude,maxBytes:c.maxBytes??u.maxBytes,maxFiles:c.maxFiles??u.maxFiles});if(M.manifestSha256===D.manifestSha256){g.bytes=M.bytes,g.fileCount=M.fileCount,g.manifestSha256=M.manifestSha256,g.status="up-to-date",e.json||P.item(T.success,c.id,"no content changes"),await l();return}}e.json||P.step("Building cache layout",c.id);const R=await p({sourceId:c.id,repoDir:x.repoDir,cacheDir:o.cacheDir,include:c.include??u.include,exclude:c.exclude,maxBytes:c.maxBytes??u.maxBytes,maxFiles:c.maxFiles??u.maxFiles,unwrapSingleRootDir:c.unwrapSingleRootDir});if(c.targetDir){const M=q(o.configPath,c.targetDir);await Z({sourceDir:h.join(o.cacheDir,c.id),targetDir:M,mode:c.targetMode??u.targetMode,explicitTargetMode:c.targetMode!==void 0,unwrapSingleRootDir:c.unwrapSingleRootDir})}g.bytes=R.bytes,g.fileCount=R.fileCount,g.manifestSha256=R.manifestSha256,e.json||P.item(T.success,c.id,`synced ${R.fileCount} files`)}finally{await x.cleanup()}await l()};await Promise.all(Array.from({length:Math.min(v,y.length)},l))};if(e.offline)await d();else{const y=await $();await w(y),await d()}if(!e.offline){const y=(await ie({configPath:o.configPath,cacheDirOverride:o.cacheDir})).results.filter(v=>!v.ok);if(y.length>0){const v=await $(y.map(l=>l.id),!0);v.length>0&&(await w(v),await d());const C=(await ie({configPath:o.configPath,cacheDirOverride:o.cacheDir})).results.filter(l=>!l.ok);if(C.length>0&&(s+=1,!e.json)){const l=C.map(m=>`${m.id} (${m.issues.join("; ")})`).join(", ");P.line(`${T.warn} Verify failed for ${C.length} source(s): ${l}`)}}}}const a=await Pt(o,n);if(await _e(o.lockPath,a),!e.json){const u=Number(process.hrtime.bigint()-t)/1e6,f=o.results.reduce((S,$)=>S+($.bytes??0),0),p=o.results.reduce((S,$)=>S+($.fileCount??0),0);P.line(`${T.info} Completed in ${u.toFixed(0)}ms \xB7 ${St(f)} \xB7 ${p} files${s?` \xB7 ${s} warning${s===1?"":"s"}`:""}`)}return await yt({cacheDir:o.cacheDir,configPath:o.configPath,lock:a,sources:o.sources,results:o.results}),o.lockExists=!0,o},ve=e=>{const r={upToDate:e.results.filter(t=>t.status==="up-to-date").length,changed:e.results.filter(t=>t.status==="changed").length,missing:e.results.filter(t=>t.status==="missing").length};if(e.results.length===0){P.line(`${T.info} No sources to sync.`);return}P.line(`${T.info} ${e.results.length} sources (${r.upToDate} up-to-date, ${r.changed} changed, ${r.missing} missing)`);for(const t of e.results){const s=P.hash(t.resolvedCommit),o=P.hash(t.lockCommit),n=!!t.lockRulesSha256&&!!t.rulesSha256&&t.lockRulesSha256!==t.rulesSha256;if(t.status==="up-to-date"){P.item(T.success,t.id,`${E.dim("up-to-date")} ${E.gray(s)}`);continue}if(t.status==="changed"){if(t.lockCommit===t.resolvedCommit&&n){P.item(T.warn,t.id,`${E.dim("rules changed")} ${E.gray(s)}`);continue}P.item(T.warn,t.id,`${E.dim("changed")} ${E.gray(o)} ${E.dim("->")} ${E.gray(s)}`);continue}P.item(T.warn,t.id,`${E.dim("missing")} ${E.gray(s)}`)}},Mt={__proto__:null,getSyncPlan:Se,printSyncPlan:ve,runSync:De};export{Z as a,ve as b,De as c,ue as e,fe as p,I as r,Mt as s};
1
+ import{createHash as L,randomBytes as Pe}from"node:crypto";import{mkdtemp as X,rm as R,mkdir as H,readFile as A,writeFile as re,access as _,rename as W,open as J,lstat as xe,symlink as Ee,readdir as Te,cp as Oe}from"node:fs/promises";import h from"node:path";import k from"picocolors";import{g as ke,t as q,D as oe,r as K,u as P,a as F,b as Fe}from"../shared/docs-cache.Oi01HUbh.mjs";import{a as V,l as Re,D as be,b as He}from"../shared/docs-cache.bWkgSdUq.mjs";import{execFile as ie}from"node:child_process";import Ae,{tmpdir as se}from"node:os";import{pathToFileURL as je}from"node:url";import{promisify as ae}from"node:util";import{execa as Ie}from"execa";import{g as N,M as B,v as ne}from"./verify.mjs";import{e as Le,r as ce}from"../shared/docs-cache.kK1DPQIQ.mjs";import{writeLock as _e,resolveLockPath as Ne,readLock as Be}from"../lock.mjs";import{createWriteStream as le,createReadStream as ze,constants as ue}from"node:fs";import{pipeline as Ue}from"node:stream/promises";import fe from"fast-glob";const Ye=/^(https?:\/\/)([^@]+)@/i,j=e=>e.replace(Ye,"$1***@"),Ge=ae(ie),Xe=3e4,We=new Set(["file:","ftp:","data:","javascript:"]),Je=e=>{try{const r=new URL(e);if(We.has(r.protocol))throw new Error(`Blocked protocol '${r.protocol}' in repo URL '${j(e)}'.`)}catch(r){if(r instanceof TypeError)return;throw r}},qe=e=>{if(Je(e),e.startsWith("git@")){const r=e.indexOf("@"),t=e.indexOf(":",r+1);return t===-1?null:e.slice(r+1,t)||null}try{const r=new URL(e);return r.protocol!=="https:"&&r.protocol!=="ssh:"?null:r.hostname||null}catch{return null}},me=(e,r)=>{const t=qe(e);if(!t)throw new Error(`Unsupported repo URL '${j(e)}'. Use HTTPS or SSH.`);const i=t.toLowerCase();if(!r.map(o=>o.toLowerCase()).includes(i))throw new Error(`Host '${t}' is not in allowHosts for '${j(e)}'.`)},de=e=>{const r=e.trim().split(`
2
+ `).filter(Boolean);return r.length===0?null:r[0].split(/\s+/)[0]||null},Ke=async e=>{me(e.repo,e.allowHosts);const{stdout:r}=await Ge("git",["ls-remote",e.repo,e.ref],{timeout:e.timeoutMs??Xe,maxBuffer:1024*1024}),t=de(r);if(!t)throw new Error(`Unable to resolve ref '${e.ref}' for ${j(e.repo)}.`);return{repo:e.repo,ref:e.ref,resolvedCommit:t}},Ve=ae(ie),he=12e4,z=1,Qe=3,Ze=100,T=async(e,r)=>{const t=process.env.PATH??process.env.Path,i=process.env.PATHEXT??(process.platform==="win32"?".COM;.EXE;.BAT;.CMD":void 0),o=["-c","core.hooksPath=/dev/null","-c","submodule.recurse=false","-c","protocol.ext.allow=never"];r?.allowFileProtocol?o.push("-c","protocol.file.allow=always"):o.push("-c","protocol.file.allow=never"),await Ie("git",[...o,...e],{cwd:r?.cwd,timeout:r?.timeoutMs??he,maxBuffer:10*1024*1024,env:{...process.env,...t?{PATH:t,Path:t}:{},...i?{PATHEXT:i}:{},HOME:process.env.HOME,USER:process.env.USER,USERPROFILE:process.env.USERPROFILE,TMPDIR:process.env.TMPDIR,TMP:process.env.TMP,TEMP:process.env.TEMP,SYSTEMROOT:process.env.SYSTEMROOT,WINDIR:process.env.WINDIR,SSH_AUTH_SOCK:process.env.SSH_AUTH_SOCK,SSH_AGENT_PID:process.env.SSH_AGENT_PID,HTTP_PROXY:process.env.HTTP_PROXY,HTTPS_PROXY:process.env.HTTPS_PROXY,NO_PROXY:process.env.NO_PROXY,GIT_TERMINAL_PROMPT:"0",GIT_CONFIG_NOSYSTEM:"1",GIT_CONFIG_NOGLOBAL:"1",...process.platform==="win32"?{}:{GIT_ASKPASS:"/bin/false"}}})},b=async(e,r=Qe)=>{for(let t=0;t<=r;t+=1)try{await R(e,{recursive:!0,force:!0});return}catch(i){const o=N(i);if(o!=="ENOTEMPTY"&&o!=="EBUSY"&&o!=="EPERM"||t===r)throw i;await new Promise(n=>setTimeout(n,Ze*(t+1)))}},et=e=>L("sha256").update(e).digest("hex").substring(0,16),tt=e=>{const r=et(e);return h.join(ce(),r)},rt=async e=>{try{return await T(["rev-parse","--git-dir"],{cwd:e}),!0}catch{return!1}},pe=async e=>{try{const r=h.join(e,".git","config"),t=(await A(r,"utf8")).toLowerCase();return t.includes("partialclone")||t.includes("promisor")||t.includes("partialclonefilter")}catch{return!1}},Q=async(e,r,t)=>{try{await T(["-C",e,"cat-file","-e",`${r}^{commit}`],{timeoutMs:t?.timeoutMs,allowFileProtocol:t?.allowFileProtocol});return}catch{}await T(["-C",e,"fetch","origin",r],{timeoutMs:t?.timeoutMs,allowFileProtocol:t?.allowFileProtocol})},ot=async(e,r,t,i)=>{const o=h.join(t,"archive.tar");await T(["archive","--remote",e,"--format=tar","--output",o,r],{timeoutMs:i}),await Ve("tar",["-xf",o,"-C",t],{timeout:i??he,maxBuffer:1024*1024}),await R(o,{force:!0})},we=e=>{if(!e||e.length===0)return!1;for(const r of e)if(!r||r.includes("**"))return!1;return!0},ge=e=>{if(!e)return[];const r=e.map(t=>{const i=t.replace(/\\/g,"/"),o=i.indexOf("*");return(o===-1?i:i.slice(0,o)).replace(/\/+$|\/$/,"")});return Array.from(new Set(r.filter(t=>t.length>0)))},Z=async(e,r)=>{const t=/^[0-9a-f]{7,40}$/i.test(e.ref),i=we(e.include),o=["clone","--no-checkout","--depth",String(z),"--recurse-submodules=no","--no-tags"];if(i&&o.push("--sparse"),t||(o.push("--single-branch"),e.ref!=="HEAD"&&o.push("--branch",e.ref)),o.push(e.repo,r),await T(o,{timeoutMs:e.timeoutMs}),await Q(r,e.resolvedCommit,{timeoutMs:e.timeoutMs}),i){const n=ge(e.include);n.length>0&&await T(["-C",r,"sparse-checkout","set",...n],{timeoutMs:e.timeoutMs})}await T(["-C",r,"checkout","--quiet","--detach",e.resolvedCommit],{timeoutMs:e.timeoutMs})},it=async(e,r)=>{const t=tt(e.repo),i=await Le(t),o=/^[0-9a-f]{7,40}$/i.test(e.ref),n=we(e.include),s=ce();if(await H(s,{recursive:!0}),i&&await rt(t))if(await pe(t))await b(t),await Z(e,t);else try{const u=["fetch","origin"];if(o)u.push("--depth",String(z));else{const w=e.ref==="HEAD"?"HEAD":`${e.ref}:refs/remotes/origin/${e.ref}`;u.push(w,"--depth",String(z))}await T(["-C",t,...u],{timeoutMs:e.timeoutMs}),await Q(t,e.resolvedCommit,{timeoutMs:e.timeoutMs})}catch{await b(t),await Z(e,t)}else i&&await b(t),await Z(e,t);await H(r,{recursive:!0});const a=["clone","--no-checkout","--depth",String(z),"--recurse-submodules=no","--no-tags"];await pe(t)&&a.splice(2,0,"--filter=blob:none"),n&&a.push("--sparse"),o||(a.push("--single-branch"),e.ref!=="HEAD"&&a.push("--branch",e.ref));const c=je(t).href;if(a.push(c,r),await T(a,{timeoutMs:e.timeoutMs,allowFileProtocol:!0}),n){const u=ge(e.include);u.length>0&&await T(["-C",r,"sparse-checkout","set",...u],{timeoutMs:e.timeoutMs,allowFileProtocol:!0})}await Q(r,e.resolvedCommit,{timeoutMs:e.timeoutMs,allowFileProtocol:!0}),await T(["-C",r,"checkout","--quiet","--detach",e.resolvedCommit],{timeoutMs:e.timeoutMs,allowFileProtocol:!0})},st=async e=>{const r=await X(h.join(se(),`docs-cache-${e.sourceId}-`));try{return await ot(e.repo,e.resolvedCommit,r,e.timeoutMs),r}catch(t){throw await b(r),t}},at=async e=>{V(e.sourceId,"sourceId");try{const r=await st(e);return{repoDir:r,cleanup:async()=>{await b(r)},fromCache:!1}}catch{const r=await X(h.join(se(),`docs-cache-${e.sourceId}-`));try{return await it(e,r),{repoDir:r,cleanup:async()=>{await b(r)},fromCache:!0}}catch(t){throw await b(r),t}}},U=e=>q(e),ee=Number(process.env.DOCS_CACHE_STREAM_THRESHOLD_MB??"2"),nt=Number.isFinite(ee)&&ee>0?Math.floor(ee*1024*1024):1024*1024,ct=(e,r)=>{const t=h.resolve(e);if(!h.resolve(r).startsWith(t+h.sep))throw new Error(`Path traversal detected: ${r}`)},ye=async e=>{try{return await J(e,ue.O_RDONLY|ue.O_NOFOLLOW)}catch(r){const t=N(r);if(t==="ELOOP")return null;if(t==="EINVAL"||t==="ENOSYS"||t==="ENOTSUP")return(await xe(e)).isSymbolicLink()?null:await J(e,"r");throw r}},lt=(e,r)=>{if(!r||e.length===0)return null;let t="";for(;;){let i=null;for(const n of e){const s=(t?n.normalized.slice(t.length):n.normalized).split("/");if(s.length<2)return t||null;const a=s[0];if(!i){i=a;continue}if(i!==a)return t||null}if(!i)return t||null;const o=`${t}${i}/`;if(o===t)return t||null;t=o}},ut=e=>({...e,exclude:e.exclude??[],ignoreHidden:e.ignoreHidden??!1,unwrapSingleRootDir:e.unwrapSingleRootDir??!1}),ft=async(e,r=5e3)=>{const t=Date.now();for(;Date.now()-t<r;)try{const i=await J(e,"wx");return{release:async()=>{await i.close(),await R(e,{force:!0})}}}catch(i){if(N(i)!=="EEXIST")throw i;await new Promise(o=>setTimeout(o,100))}throw new Error(`Failed to acquire lock for ${e}.`)},mt=async e=>{const r=ut(e);V(r.sourceId,"sourceId");const t=ke(r.cacheDir,r.sourceId);await H(r.cacheDir,{recursive:!0});const i=await X(h.join(r.cacheDir,`.tmp-${r.sourceId}-`));let o=null;const n=async()=>{const s=o;!s||s.closed||s.destroyed||await new Promise(a=>{const c=()=>{s.off("close",u),s.off("error",w),a()},u=()=>c(),w=()=>c();s.once("close",u),s.once("error",w);try{s.end()}catch{c()}})};try{const s=[".git/**",...r.ignoreHidden?[".*","**/.*","**/.*/**"]:[],...r.exclude],a=(await fe(r.include,{cwd:r.repoDir,ignore:s,dot:!0,onlyFiles:!0,followSymbolicLinks:!1})).map(f=>({relativePath:f,normalized:U(f)})).sort((f,m)=>f.normalized.localeCompare(m.normalized)),c=lt(a,r.unwrapSingleRootDir),u=new Set;for(const{normalized:f}of a){const m=c?f.slice(c.length):f;u.add(h.posix.dirname(m))}await Promise.all(Array.from(u,f=>H(h.join(i,f),{recursive:!0})));let w=0,D=0;const C=Math.max(1,Math.min(a.length,Math.max(8,Math.min(128,Ae.cpus().length*8)))),y=h.join(i,B),d=le(y,{encoding:"utf8"});o=d;const g=L("sha256"),$=async f=>new Promise((m,l)=>{const S=E=>{d.off("drain",v),l(E)},v=()=>{d.off("error",S),m()};d.once("error",S),d.write(f)?(d.off("error",S),m()):d.once("drain",v)});for(let f=0;f<a.length;f+=C){const m=a.slice(f,f+C),l=await Promise.all(m.map(async S=>{const v=h.join(r.repoDir,S.relativePath),E=await ye(v);if(!E)return null;try{const x=await E.stat();if(!x.isFile())return null;const O=c?S.normalized.slice(c.length):S.normalized,Y=h.join(i,O);if(ct(i,Y),x.size>=nt){const G=ze(v,{fd:E.fd,autoClose:!1}),Me=le(Y);await Ue(G,Me)}else{const G=await E.readFile();await re(Y,G)}return{path:c?S.normalized.slice(c.length):S.normalized,size:x.size}}finally{await E.close()}}));for(const S of l){if(!S)continue;if(r.maxFiles!==void 0&&D+1>r.maxFiles)throw new Error(`Materialized content exceeds maxFiles (${r.maxFiles}).`);if(w+=S.size,w>r.maxBytes)throw new Error(`Materialized content exceeds maxBytes (${r.maxBytes}).`);const v=`${JSON.stringify(S)}
3
+ `;g.update(v),await $(v),D+=1}}await new Promise((f,m)=>{d.end(()=>f()),d.once("error",m)});const M=g.digest("hex"),p=async f=>{try{return await _(f),!0}catch{return!1}};return await(async(f,m)=>{const l=await ft(`${m}.lock`);try{const S=await p(m),v=`${m}.bak-${Pe(8).toString("hex")}`;S&&await W(m,v);try{await W(f,m)}catch(E){if(S)try{await W(v,m)}catch(x){const O=x instanceof Error?x.message:String(x);process.stderr.write(`Warning: Failed to restore backup: ${O}
4
+ `)}throw E}S&&await R(v,{recursive:!0,force:!0})}finally{await l.release()}})(i,t.sourceDir),{bytes:w,fileCount:D,manifestSha256:M}}catch(s){try{await n()}catch{}throw await R(i,{recursive:!0,force:!0}),s}},dt=async e=>{V(e.sourceId,"sourceId");const r=await fe(e.include,{cwd:e.repoDir,ignore:[".git/**",...e.ignoreHidden?[".*","**/.*","**/.*/**"]:[],...e.exclude??[]],dot:!0,onlyFiles:!0,followSymbolicLinks:!1});r.sort((n,s)=>U(n).localeCompare(U(s)));let t=0,i=0;const o=L("sha256");for(const n of r){const s=U(n),a=h.join(e.repoDir,n),c=await ye(a);if(c)try{const u=await c.stat();if(!u.isFile())continue;if(e.maxFiles!==void 0&&i+1>e.maxFiles)throw new Error(`Materialized content exceeds maxFiles (${e.maxFiles}).`);if(t+=u.size,t>e.maxBytes)throw new Error(`Materialized content exceeds maxBytes (${e.maxBytes}).`);const w=`${JSON.stringify({path:s,size:u.size})}
5
+ `;o.update(w),i+=1}finally{await c.close()}}return{bytes:t,fileCount:i,manifestSha256:o.digest("hex")}},ht=async(e,r)=>{await r.rm(e,{recursive:!0,force:!0})},pt=async(e,r)=>{if(!e.unwrapSingleRootDir)return e.sourceDir;const t=await r.readdir(e.sourceDir,{withFileTypes:!0}),i=new Set([B,oe]),o=t.filter(a=>!(a.isFile()&&i.has(a.name))),n=o.filter(a=>a.isDirectory()),s=o.filter(a=>a.isFile());return n.length!==1||s.length>0?e.sourceDir:h.join(e.sourceDir,n[0].name)},te=async e=>{const r=e.deps??{cp:Oe,mkdir:H,readdir:Te,rm:R,symlink:Ee,stderr:process.stderr},t=await pt(e,r),i=h.dirname(e.targetDir);await r.mkdir(i,{recursive:!0}),await ht(e.targetDir,r);const o=process.platform==="win32"?"copy":"symlink";if((e.mode??o)==="copy"){await r.cp(t,e.targetDir,{recursive:!0});return}const n=process.platform==="win32"?"junction":"dir";try{await r.symlink(t,e.targetDir,n)}catch(s){const a=N(s);if(a&&new Set(["EPERM","EACCES","ENOTSUP","EINVAL"]).has(a)){if(e.explicitTargetMode){const c=s instanceof Error?s.message:String(s);r.stderr.write(`Warning: Failed to create symlink at ${e.targetDir}. Falling back to copy. ${c}
6
+ `)}await r.cp(t,e.targetDir,{recursive:!0});return}throw s}},wt=e=>{const r={dirs:new Map,files:[]};for(const t of e){const i=t.split("/").filter(Boolean);if(i.length===0)continue;let o=r;for(const s of i.slice(0,-1)){let a=o.dirs.get(s);a||(a={dirs:new Map,files:[]},o.dirs.set(s,a)),o=a}const n=i[i.length-1];o.files.push({name:n,path:t})}return r},Se=(e,r,t)=>{const i=" ".repeat(r),o=Array.from(e.dirs.keys()).sort(),n=[...e.files].sort((s,a)=>s.name.localeCompare(a.name));for(const s of o){t.push(`${i}- ${s}/`);const a=e.dirs.get(s);a&&Se(a,r+1,t)}for(const s of n)t.push(`${i}- [${s.name}](./${s.path})`)},gt=(e,r,t)=>{const i=[...e].sort((a,c)=>a.localeCompare(c)),o=new Map;for(const a of i){const c=a.lastIndexOf("/"),u=c===-1?"":a.substring(0,c),w=c===-1?a:a.substring(c+1),D=o.get(u);D?D.push(w):o.set(u,[w])}const n=Array.from(o.keys()).sort(),s=[];s.push(`[${t}]`);for(const a of n){const c=o.get(a);if(!c)continue;const u=c.join(",");a===""?s.push(`root:{${u}}`):s.push(`${a}:{${u}}`)}r.push(s.join("|"))},yt=(e,r="compressed")=>{const t=[];if(r==="tree"){t.push(`# ${e.id} - Documentation`),t.push(""),t.push("## Files"),t.push("");const i=wt(e.files);Se(i,0,t)}else{const i=`${e.id} Docs Index`;gt(e.files,t,i)}return t.push(""),t.join(`
7
+ `)},St=async e=>{const r=h.join(e,".manifest.jsonl");try{const t=await A(r,"utf8"),i=[];for(const o of t.split(`
8
+ `))if(o.trim()){const n=JSON.parse(o);n.path&&i.push(n.path)}return i}catch{return[]}},Dt=async e=>{const r=new Map(e.sources.map(i=>[i.id,i])),t=new Map((e.results??[]).map(i=>[i.id,i]));for(const[i,o]of Object.entries(e.lock.sources)){const n=r.get(i);n?.targetDir&&q(K(e.configPath,n.targetDir));const s=h.join(e.cacheDir,i);try{await _(s)}catch{continue}const a=await St(s),c={id:i,repo:o.repo,ref:o.ref,resolvedCommit:o.resolvedCommit,fileCount:o.fileCount,cachePath:q(h.join(e.cacheDir,i)),files:a},u=n?.toc,w=u!==!1;let D="compressed";typeof u=="string"&&(D=u);const C=h.join(s,oe);if(w){if(t.get(i)?.status==="up-to-date")try{await _(C);continue}catch{}const y=yt(c,D);await re(C,y,"utf8")}else try{await R(C,{force:!0})}catch{}}},vt=e=>{if(e<1024)return`${e} B`;const r=["KB","MB","GB","TB"];let t=e,i=-1;for(;t>=1024&&i<r.length-1;)t/=1024,i+=1;return`${t.toFixed(1)} ${r[i]}`},I=async e=>{try{return await _(e),!0}catch{return!1}},De=async(e,r)=>{const t=h.join(e,r);return await I(t)?await I(h.join(t,B)):!1},Ct=e=>{if(!e||e.length===0)return[];const r=e.map(t=>t.trim()).filter(t=>t.length>0);return Array.from(new Set(r)).sort()},$t=["mode","include","exclude","maxBytes","maxFiles","ignoreHidden","unwrapSingleRootDir"],Mt=(e,r)=>e==="include"&&Array.isArray(r)||e==="exclude"&&Array.isArray(r)?Ct(r):r,Pt=e=>{const r=$t.map(o=>[o,Mt(o,e[o])]);r.sort(([o],[n])=>o.localeCompare(n));const t=Object.fromEntries(r),i=L("sha256");return i.update(JSON.stringify(t)),i.digest("hex")},ve=async(e,r={})=>{const{config:t,resolvedPath:i,sources:o}=await Re(e.configPath),n=t.defaults??be.defaults,s=Fe(i,t.cacheDir??He,e.cacheDirOverride),a=Ne(i),c=await I(a);let u=null;c&&(u=await Be(a));const w=r.resolveRemoteCommit??Ke,D=e.sourceFilter?.length?o.filter(y=>e.sourceFilter?.includes(y.id)):o,C=await Promise.all(D.map(async y=>{const d=u?.sources?.[y.id],g=y.include??n.include,$=y.exclude??n.exclude,M=Pt({...y,include:g,exclude:$});if(e.offline){const l=await De(s,y.id);return{id:y.id,repo:d?.repo??y.repo,ref:d?.ref??y.ref??n.ref,resolvedCommit:d?.resolvedCommit??"offline",lockCommit:d?.resolvedCommit??null,lockRulesSha256:d?.rulesSha256,status:d&&l?"up-to-date":"missing",bytes:d?.bytes,fileCount:d?.fileCount,manifestSha256:d?.manifestSha256,rulesSha256:M}}const p=await w({repo:y.repo,ref:y.ref,allowHosts:n.allowHosts,timeoutMs:e.timeoutMs}),f=d?.resolvedCommit===p.resolvedCommit&&d?.rulesSha256===M,m=d?f?"up-to-date":"changed":"missing";return{id:y.id,repo:p.repo,ref:p.ref,resolvedCommit:p.resolvedCommit,lockCommit:d?.resolvedCommit??null,lockRulesSha256:d?.rulesSha256,status:m,bytes:d?.bytes,fileCount:d?.fileCount,manifestSha256:d?.manifestSha256,rulesSha256:M}}));return{config:t,configPath:i,cacheDir:s,lockPath:a,lockExists:c,lockData:u,results:C,sources:D,defaults:n}},xt=async()=>{const e=h.resolve(process.cwd(),"package.json");try{const r=await A(e,"utf8"),t=JSON.parse(r.toString());return typeof t.version=="string"?t.version:"0.0.0"}catch{}try{const r=await A(new URL("../package.json",import.meta.url),"utf8"),t=JSON.parse(r.toString());return typeof t.version=="string"?t.version:"0.0.0"}catch{}try{const r=await A(new URL("../../package.json",import.meta.url),"utf8"),t=JSON.parse(r.toString());return typeof t.version=="string"?t.version:"0.0.0"}catch{return"0.0.0"}},Et=async(e,r)=>{const t=await xt(),i=new Date().toISOString(),o={...r?.sources??{}};for(const n of e.results){const s=o[n.id];o[n.id]={repo:n.repo,ref:n.ref,resolvedCommit:n.resolvedCommit,bytes:n.bytes??s?.bytes??0,fileCount:n.fileCount??s?.fileCount??0,manifestSha256:n.manifestSha256??s?.manifestSha256??n.resolvedCommit,rulesSha256:n.rulesSha256??s?.rulesSha256,updatedAt:i}}return{version:1,generatedAt:i,toolVersion:t,sources:o}},Ce=async(e,r={})=>{const t=process.hrtime.bigint();let i=0;const o=await ve(e,r);await H(o.cacheDir,{recursive:!0});const n=o.lockData,s=o.results.filter(c=>{const u=o.sources.find(w=>w.id===c.id);return c.status==="missing"&&(u?.required??!0)});if(e.failOnMiss&&s.length>0)throw new Error(`Missing required source(s): ${s.map(c=>c.id).join(", ")}.`);if(!e.lockOnly){const c=o.defaults,u=r.fetchSource??at,w=r.materializeSource??mt,D=new Map,C=async(g,$)=>{const M=g?.length?o.results.filter(p=>g.includes(p.id)):o.results;return(await Promise.all(M.map(async p=>{const f=o.sources.find(l=>l.id===p.id);if(!f)return null;if($)return{result:p,source:f};let m=D.get(p.id);return m===void 0&&(m=await De(o.cacheDir,p.id),D.set(p.id,m)),p.status!=="up-to-date"||!m?{result:p,source:f}:null}))).filter(Boolean)},y=async()=>{await Promise.all(o.sources.map(async g=>{if(!g.targetDir)return;const $=K(o.configPath,g.targetDir);await I($)||await te({sourceDir:h.join(o.cacheDir,g.id),targetDir:$,mode:g.targetMode??c.targetMode,explicitTargetMode:g.targetMode!==void 0,unwrapSingleRootDir:g.unwrapSingleRootDir})}))},d=async g=>{const $=e.concurrency??4;let M=0;const p=async()=>{const f=g[M];if(!f||!f.source)return;M+=1;const{result:m,source:l}=f,S=o.lockData?.sources?.[l.id],v=await u({sourceId:l.id,repo:l.repo,ref:l.ref,resolvedCommit:m.resolvedCommit,cacheDir:o.cacheDir,include:l.include??c.include,timeoutMs:e.timeoutMs});e.json||P.step(v.fromCache?"Restoring from cache":"Downloading repo",l.id);try{const E=h.join(o.cacheDir,l.id,B);if(m.status!=="up-to-date"&&S?.manifestSha256&&S?.rulesSha256===m.rulesSha256&&await I(E)){const O=await dt({sourceId:l.id,repoDir:v.repoDir,cacheDir:o.cacheDir,include:l.include??c.include,exclude:l.exclude,maxBytes:l.maxBytes??c.maxBytes,maxFiles:l.maxFiles??c.maxFiles,ignoreHidden:l.ignoreHidden??c.ignoreHidden});if(O.manifestSha256===S.manifestSha256){m.bytes=O.bytes,m.fileCount=O.fileCount,m.manifestSha256=O.manifestSha256,m.status="up-to-date",e.json||P.item(F.success,l.id,"no content changes"),await p();return}}e.json||P.step("Materializing",l.id);const x=await w({sourceId:l.id,repoDir:v.repoDir,cacheDir:o.cacheDir,include:l.include??c.include,exclude:l.exclude,maxBytes:l.maxBytes??c.maxBytes,maxFiles:l.maxFiles??c.maxFiles,ignoreHidden:l.ignoreHidden??c.ignoreHidden,unwrapSingleRootDir:l.unwrapSingleRootDir});if(l.targetDir){const O=K(o.configPath,l.targetDir);await te({sourceDir:h.join(o.cacheDir,l.id),targetDir:O,mode:l.targetMode??c.targetMode,explicitTargetMode:l.targetMode!==void 0,unwrapSingleRootDir:l.unwrapSingleRootDir})}m.bytes=x.bytes,m.fileCount=x.fileCount,m.manifestSha256=x.manifestSha256,e.json||P.item(F.success,l.id,`synced ${x.fileCount} files`)}finally{await v.cleanup()}await p()};await Promise.all(Array.from({length:Math.min($,g.length)},p))};if(e.offline)await y();else{const g=await C();await d(g),await y()}if(!e.offline){const g=(await ne({configPath:o.configPath,cacheDirOverride:o.cacheDir})).results.filter($=>!$.ok);if(g.length>0){const $=await C(g.map(p=>p.id),!0);$.length>0&&(await d($),await y());const M=(await ne({configPath:o.configPath,cacheDirOverride:o.cacheDir})).results.filter(p=>!p.ok);if(M.length>0&&(i+=1,!e.json)){const p=M.map(f=>`${f.id} (${f.issues.join("; ")})`).join(", ");P.line(`${F.warn} Verify failed for ${M.length} source(s): ${p}`)}}}}const a=await Et(o,n);if(await _e(o.lockPath,a),!e.json){const c=Number(process.hrtime.bigint()-t)/1e6,u=o.results.reduce((D,C)=>D+(C.bytes??0),0),w=o.results.reduce((D,C)=>D+(C.fileCount??0),0);P.line(`${F.info} Completed in ${c.toFixed(0)}ms \xB7 ${vt(u)} \xB7 ${w} files${i?` \xB7 ${i} warning${i===1?"":"s"}`:""}`)}return await Dt({cacheDir:o.cacheDir,configPath:o.configPath,lock:a,sources:o.sources,results:o.results}),o.lockExists=!0,o},$e=e=>{const r={upToDate:e.results.filter(t=>t.status==="up-to-date").length,changed:e.results.filter(t=>t.status==="changed").length,missing:e.results.filter(t=>t.status==="missing").length};if(e.results.length===0){P.line(`${F.info} No sources to sync.`);return}P.line(`${F.info} ${e.results.length} sources (${r.upToDate} up-to-date, ${r.changed} changed, ${r.missing} missing)`);for(const t of e.results){const i=P.hash(t.resolvedCommit),o=P.hash(t.lockCommit),n=!!t.lockRulesSha256&&!!t.rulesSha256&&t.lockRulesSha256!==t.rulesSha256;if(t.status==="up-to-date"){P.item(F.success,t.id,`${k.dim("up-to-date")} ${k.gray(i)}`);continue}if(t.status==="changed"){if(t.lockCommit===t.resolvedCommit&&n){P.item(F.warn,t.id,`${k.dim("rules changed")} ${k.gray(i)}`);continue}P.item(F.warn,t.id,`${k.dim("changed")} ${k.gray(o)} ${k.dim("->")} ${k.gray(i)}`);continue}P.item(F.warn,t.id,`${k.dim("missing")} ${k.gray(i)}`)}},Tt={__proto__:null,getSyncPlan:ve,printSyncPlan:$e,runSync:Ce};export{te as a,$e as b,Ce as c,me as e,de as p,j as r,Tt as s};
9
9
  //# sourceMappingURL=sync.mjs.map
@@ -1,2 +1,2 @@
1
- import{stat as N,access as z}from"node:fs/promises";import h from"node:path";import{b as $,r as v,u as l,a as m}from"../shared/docs-cache.Oi01HUbh.mjs";import{l as M,b as j}from"../shared/docs-cache.DpZFrqUt.mjs";import{createReadStream as k}from"node:fs";import C from"node:readline";const T=t=>typeof t=="object"&&t!==null&&"code"in t&&(typeof t.code=="string"||typeof t.code=="number"||t.code===void 0),y=t=>T(t)&&typeof t.code=="string"?t.code:void 0,_=t=>{if(!t||typeof t!="object")throw new Error("Manifest entry must be an object.");const e=t;if(typeof e.path!="string"||e.path.length===0)throw new Error("Manifest entry path must be a non-empty string.");if(typeof e.size!="number"||Number.isNaN(e.size))throw new Error("Manifest entry size must be a number.");if(e.size<0)throw new Error("Manifest entry size must be zero or greater.");return{path:e.path,size:e.size}},w=".manifest.jsonl",I=async function*(t){const e=h.join(t,w),a=k(e,{encoding:"utf8"}),s=C.createInterface({input:a,crlfDelay:1/0});try{for await(const u of s){const f=u.trim();f&&(yield _(JSON.parse(f)))}}finally{s.close(),a.destroy()}},O=async t=>{try{return await z(t),!0}catch{return!1}},E=async t=>{const{config:e,resolvedPath:a,sources:s}=await M(t.configPath),u=$(a,e.cacheDir??j,t.cacheDirOverride),f=async(i,n)=>{if(!await O(i))return{ok:!1,issues:[n==="source"?"missing source directory":"missing target directory"]};try{let r=0,o=0;for await(const g of I(i)){const D=h.join(i,g.path);try{(await N(D)).size!==g.size&&(o+=1)}catch(p){const d=y(p);if(d==="ENOENT"||d==="ENOTDIR"){r+=1;continue}throw p}}const c=[];return r>0&&c.push(n==="source"?`missing files: ${r}`:`target missing files: ${r}`),o>0&&c.push(n==="source"?`size mismatch: ${o}`:`target size mismatch: ${o}`),{ok:c.length===0,issues:c}}catch(r){const o=y(r);if(o==="ENOENT"||o==="ENOTDIR")return{ok:!1,issues:[n==="source"?"missing manifest":"missing target manifest"]};throw r}},b=await Promise.all(s.map(async i=>{const n=h.join(u,i.id),r=[...(await f(n,"source")).issues];if(i.targetDir&&i.targetMode==="copy"){const o=v(a,i.targetDir),c=await f(o,"target");r.push(...c.issues)}return{id:i.id,ok:r.length===0,issues:r}}));return{cacheDir:u,results:b}},R=t=>{const e=t.results.filter(s=>s.ok).length,a=t.results.length-e;if(t.results.length===0){l.line(`${m.warn} No sources to verify.`);return}l.line(`${m.info} Verified ${t.results.length} sources (${e} ok, ${a} failed)`);for(const s of t.results)s.ok?l.item(m.success,s.id):l.item(m.warn,s.id,s.issues.join(", "))},A={__proto__:null,printVerify:R,verifyCache:E};export{w as M,A as a,y as g,E as v};
1
+ import{stat as N,access as z}from"node:fs/promises";import h from"node:path";import{b as $,r as v,u as l,a as m}from"../shared/docs-cache.Oi01HUbh.mjs";import{l as M,b as j}from"../shared/docs-cache.bWkgSdUq.mjs";import{createReadStream as k}from"node:fs";import C from"node:readline";const T=t=>typeof t=="object"&&t!==null&&"code"in t&&(typeof t.code=="string"||typeof t.code=="number"||t.code===void 0),y=t=>T(t)&&typeof t.code=="string"?t.code:void 0,_=t=>{if(!t||typeof t!="object")throw new Error("Manifest entry must be an object.");const e=t;if(typeof e.path!="string"||e.path.length===0)throw new Error("Manifest entry path must be a non-empty string.");if(typeof e.size!="number"||Number.isNaN(e.size))throw new Error("Manifest entry size must be a number.");if(e.size<0)throw new Error("Manifest entry size must be zero or greater.");return{path:e.path,size:e.size}},w=".manifest.jsonl",I=async function*(t){const e=h.join(t,w),a=k(e,{encoding:"utf8"}),s=C.createInterface({input:a,crlfDelay:1/0});try{for await(const u of s){const f=u.trim();f&&(yield _(JSON.parse(f)))}}finally{s.close(),a.destroy()}},O=async t=>{try{return await z(t),!0}catch{return!1}},E=async t=>{const{config:e,resolvedPath:a,sources:s}=await M(t.configPath),u=$(a,e.cacheDir??j,t.cacheDirOverride),f=async(i,n)=>{if(!await O(i))return{ok:!1,issues:[n==="source"?"missing source directory":"missing target directory"]};try{let r=0,o=0;for await(const g of I(i)){const D=h.join(i,g.path);try{(await N(D)).size!==g.size&&(o+=1)}catch(p){const d=y(p);if(d==="ENOENT"||d==="ENOTDIR"){r+=1;continue}throw p}}const c=[];return r>0&&c.push(n==="source"?`missing files: ${r}`:`target missing files: ${r}`),o>0&&c.push(n==="source"?`size mismatch: ${o}`:`target size mismatch: ${o}`),{ok:c.length===0,issues:c}}catch(r){const o=y(r);if(o==="ENOENT"||o==="ENOTDIR")return{ok:!1,issues:[n==="source"?"missing manifest":"missing target manifest"]};throw r}},b=await Promise.all(s.map(async i=>{const n=h.join(u,i.id),r=[...(await f(n,"source")).issues];if(i.targetDir&&i.targetMode==="copy"){const o=v(a,i.targetDir),c=await f(o,"target");r.push(...c.issues)}return{id:i.id,ok:r.length===0,issues:r}}));return{cacheDir:u,results:b}},R=t=>{const e=t.results.filter(s=>s.ok).length,a=t.results.length-e;if(t.results.length===0){l.line(`${m.warn} No sources to verify.`);return}l.line(`${m.info} Verified ${t.results.length} sources (${e} ok, ${a} failed)`);for(const s of t.results)s.ok?l.item(m.success,s.id):l.item(m.warn,s.id,s.issues.join(", "))},A={__proto__:null,printVerify:R,verifyCache:E};export{w as M,A as a,y as g,E as v};
2
2
  //# sourceMappingURL=verify.mjs.map
@@ -0,0 +1,3 @@
1
+ import{writeFile as I,readFile as T,access as z}from"node:fs/promises";import w from"node:path";import{z as t}from"zod";import{r as L}from"./docs-cache.Oi01HUbh.mjs";const x=t.enum(["symlink","copy"]),M=t.enum(["materialize"]),F=t.enum(["tree","compressed"]),U=t.object({type:t.enum(["commit","manifest"]),value:t.string().nullable()}).strict(),_=t.object({ref:t.string().min(1),mode:M,include:t.array(t.string().min(1)).min(1),exclude:t.array(t.string().min(1)).optional(),targetMode:x.optional(),required:t.boolean(),maxBytes:t.number().min(1),maxFiles:t.number().min(1).optional(),ignoreHidden:t.boolean(),allowHosts:t.array(t.string().min(1)).min(1),toc:t.union([t.boolean(),F]).optional(),unwrapSingleRootDir:t.boolean().optional()}).strict(),P=t.object({id:t.string().min(1),repo:t.string().min(1),targetDir:t.string().min(1).optional(),targetMode:x.optional(),ref:t.string().min(1).optional(),mode:M.optional(),include:t.array(t.string().min(1)).optional(),exclude:t.array(t.string().min(1)).optional(),required:t.boolean().optional(),maxBytes:t.number().min(1).optional(),maxFiles:t.number().min(1).optional(),ignoreHidden:t.boolean().optional(),integrity:U.optional(),toc:t.union([t.boolean(),F]).optional(),unwrapSingleRootDir:t.boolean().optional()}).strict(),J=t.object({$schema:t.string().min(1).optional(),cacheDir:t.string().min(1).optional(),targetMode:x.optional(),defaults:_.partial().optional(),sources:t.array(P)}).strict(),G=/^[a-zA-Z0-9_-]+$/,V=new Set([".","..","CON","PRN","AUX","NUL","COM1","LPT1"]),H=(e,r)=>{if(typeof e!="string"||e.length===0)throw new Error(`${r} must be a non-empty string.`);if(e.length>200)throw new Error(`${r} exceeds maximum length of 200.`);if(!G.test(e))throw new Error(`${r} must contain only alphanumeric characters, hyphens, and underscores.`);if(V.has(e.toUpperCase()))throw new Error(`${r} uses reserved name '${e}'.`);return e},j="docs.config.json",v=".docs",A="package.json",X=process.platform==="win32"?"copy":"symlink",f={cacheDir:v,defaults:{ref:"HEAD",mode:"materialize",include:["**/*.{md,mdx,markdown,mkd,txt,rst,adoc,asciidoc}"],exclude:[],targetMode:X,required:!0,maxBytes:2e8,ignoreHidden:!1,allowHosts:["github.com","gitlab.com","visualstudio.com"],toc:!0,unwrapSingleRootDir:!1},sources:[]},Z=(e,r)=>!e||!r?e===r:e.length!==r.length?!1:e.every((o,a)=>o===r[a]),C=e=>typeof e=="object"&&e!==null&&!Array.isArray(e),R=(e,r)=>{const o={};for(const[a,i]of Object.entries(e)){const d=r[a];if(Array.isArray(i)&&Array.isArray(d)){Z(i,d)||(o[a]=i);continue}if(C(i)&&C(d)){const s=R(i,d);Object.keys(s).length>0&&(o[a]=s);continue}i!==d&&(o[a]=i)}return o},K=e=>{const r={...f,$schema:e.$schema,defaults:{...f.defaults,...e.targetMode?{targetMode:e.targetMode}:void 0}},o=R(e,r),a={$schema:o.$schema,cacheDir:o.cacheDir,targetMode:o.targetMode,defaults:o.defaults,sources:e.sources};return(!a.defaults||Object.keys(a.defaults).length===0)&&delete a.defaults,a},y=e=>typeof e=="object"&&e!==null&&!Array.isArray(e),m=(e,r)=>{if(typeof e!="string"||e.length===0)throw new Error(`${r} must be a non-empty string.`);return e},g=(e,r)=>{if(typeof e!="boolean")throw new Error(`${r} must be a boolean.`);return e},Q=(e,r)=>{if(typeof e!="number"||Number.isNaN(e))throw new Error(`${r} must be a number.`);return e},h=(e,r)=>{const o=Q(e,r);if(o<1)throw new Error(`${r} must be greater than zero.`);return o},p=(e,r)=>{if(!Array.isArray(e)||e.length===0)throw new Error(`${r} must be a non-empty array of strings.`);for(const o of e)if(typeof o!="string"||o.length===0)throw new Error(`${r} must contain non-empty strings.`);return e},k=(e,r)=>{const o=m(e,r);if(o!=="symlink"&&o!=="copy")throw new Error(`${r} must be "symlink" or "copy".`);return o},q=(e,r)=>{if(e!=="materialize")throw new Error(`${r} must be "materialize".`);return e},W=(e,r)=>{if(!y(e))throw new Error(`${r} must be an object.`);const o=e.type;if(o!=="commit"&&o!=="manifest")throw new Error(`${r}.type must be "commit" or "manifest".`);const a=e.value;if(typeof a!="string"&&a!==null)throw new Error(`${r}.value must be a string or null.`);return{type:o,value:a}},B=e=>{if(!y(e))throw new Error("Config must be a JSON object.");const r=J.safeParse(e);if(!r.success){const n=r.error.issues.map(c=>`${c.path.join(".")||"config"} ${c.message}`).join("; ");throw new Error(`Config does not match schema: ${n}.`)}const o=r.data,a=o.cacheDir?m(o.cacheDir,"cacheDir"):v,i=o.defaults,d=o.targetMode!==void 0?k(o.targetMode,"targetMode"):void 0,s=f.defaults;let l=s;if(i!==void 0){if(!y(i))throw new Error("defaults must be an object.");l={ref:i.ref!==void 0?m(i.ref,"defaults.ref"):s.ref,mode:i.mode!==void 0?q(i.mode,"defaults.mode"):s.mode,include:i.include!==void 0?p(i.include,"defaults.include"):s.include,exclude:i.exclude!==void 0?p(i.exclude,"defaults.exclude"):s.exclude,targetMode:i.targetMode!==void 0?k(i.targetMode,"defaults.targetMode"):d??s.targetMode,required:i.required!==void 0?g(i.required,"defaults.required"):s.required,maxBytes:i.maxBytes!==void 0?h(i.maxBytes,"defaults.maxBytes"):s.maxBytes,maxFiles:i.maxFiles!==void 0?h(i.maxFiles,"defaults.maxFiles"):s.maxFiles,ignoreHidden:i.ignoreHidden!==void 0?g(i.ignoreHidden,"defaults.ignoreHidden"):s.ignoreHidden,allowHosts:i.allowHosts!==void 0?p(i.allowHosts,"defaults.allowHosts"):s.allowHosts,toc:i.toc!==void 0?i.toc:s.toc,unwrapSingleRootDir:i.unwrapSingleRootDir!==void 0?g(i.unwrapSingleRootDir,"defaults.unwrapSingleRootDir"):s.unwrapSingleRootDir}}else d!==void 0&&(l={...s,targetMode:d});const E=o.sources.map((n,c)=>{if(!y(n))throw new Error(`sources[${c}] must be an object.`);const u={id:H(n.id,`sources[${c}].id`),repo:m(n.repo,`sources[${c}].repo`)};if(n.targetDir!==void 0&&(u.targetDir=m(n.targetDir,`sources[${c}].targetDir`)),n.targetMode!==void 0){const b=m(n.targetMode,`sources[${c}].targetMode`);if(b!=="symlink"&&b!=="copy")throw new Error(`sources[${c}].targetMode must be "symlink" or "copy".`);u.targetMode=b}return n.ref!==void 0&&(u.ref=m(n.ref,`sources[${c}].ref`)),n.mode!==void 0&&(u.mode=q(n.mode,`sources[${c}].mode`)),n.include!==void 0&&(u.include=p(n.include,`sources[${c}].include`)),n.exclude!==void 0&&(u.exclude=p(n.exclude,`sources[${c}].exclude`)),n.required!==void 0&&(u.required=g(n.required,`sources[${c}].required`)),n.maxBytes!==void 0&&(u.maxBytes=h(n.maxBytes,`sources[${c}].maxBytes`)),n.maxFiles!==void 0&&(u.maxFiles=h(n.maxFiles,`sources[${c}].maxFiles`)),n.ignoreHidden!==void 0&&(u.ignoreHidden=g(n.ignoreHidden,`sources[${c}].ignoreHidden`)),n.integrity!==void 0&&(u.integrity=W(n.integrity,`sources[${c}].integrity`)),n.toc!==void 0&&(u.toc=n.toc),n.unwrapSingleRootDir!==void 0&&(u.unwrapSingleRootDir=g(n.unwrapSingleRootDir,`sources[${c}].unwrapSingleRootDir`)),u}),S=new Set,$=[];for(const n of E)S.has(n.id)&&$.push(n.id),S.add(n.id);if($.length>0)throw new Error(`Duplicate source IDs found: ${$.join(", ")}. Each source must have a unique ID.`);return{cacheDir:a,targetMode:d,defaults:l,sources:E}},Y=e=>{const r=e.defaults??f.defaults;return e.sources.map(o=>({id:o.id,repo:o.repo,targetDir:o.targetDir,targetMode:o.targetMode??r.targetMode,ref:o.ref??r.ref,mode:o.mode??r.mode,include:o.include??r.include,exclude:o.exclude??r.exclude,required:o.required??r.required,maxBytes:o.maxBytes??r.maxBytes,maxFiles:o.maxFiles??r.maxFiles,ignoreHidden:o.ignoreHidden??r.ignoreHidden,integrity:o.integrity,toc:o.toc??r.toc,unwrapSingleRootDir:o.unwrapSingleRootDir??r.unwrapSingleRootDir}))},N=e=>e?w.resolve(e):w.resolve(process.cwd(),j),ee=()=>w.resolve(process.cwd(),A),O=async e=>{try{return await z(e),!0}catch{return!1}},D=async(e,r)=>{let o;try{o=await T(e,"utf8")}catch(s){const l=s instanceof Error?s.message:String(s);throw new Error(`Failed to read config at ${e}: ${l}`)}let a;try{a=JSON.parse(o)}catch(s){const l=s instanceof Error?s.message:String(s);throw new Error(`Invalid JSON in ${e}: ${l}`)}const i=r==="package"?a?.["docs-cache"]:a;if(r==="package"&&i===void 0)throw new Error(`Missing docs-cache config in ${e}.`);const d=B(i);for(const s of d.sources)s.targetDir&&L(e,s.targetDir);return{config:d,resolvedPath:e,sources:Y(d)}},re=async(e,r)=>{const o=`${JSON.stringify(r,null,2)}
2
+ `;await I(e,o,"utf8")},oe=async e=>{const r=N(e),o=w.basename(r)===A;if(e)return D(r,o?"package":"config");if(await O(r))return D(r,"config");const a=ee();if(await O(a))try{return await D(a,"package")}catch{}throw new Error(`No docs.config.json found at ${r} and no docs-cache config in ${a}.`)};export{f as D,H as a,v as b,j as c,oe as l,N as r,K as s,B as v,re as w};
3
+ //# sourceMappingURL=docs-cache.bWkgSdUq.mjs.map
package/package.json CHANGED
@@ -1,95 +1,92 @@
1
1
  {
2
- "name": "docs-cache",
3
- "private": false,
4
- "type": "module",
5
- "version": "0.4.0",
6
- "packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748",
7
- "description": "CLI for deterministic local caching of external documentation for agents and tools",
8
- "author": "Frederik Bosch",
9
- "license": "MIT",
10
- "homepage": "https://github.com/fbosch/docs-cache#readme",
11
- "repository": {
12
- "type": "git",
13
- "url": "https://github.com/fbosch/docs-cache.git"
14
- },
15
- "bugs": {
16
- "url": "https://github.com/fbosch/docs-cache/issues"
17
- },
18
- "keywords": [
19
- "docs",
20
- "documentation",
21
- "cache",
22
- "agent",
23
- "ai",
24
- "git",
25
- "cli"
26
- ],
27
- "sideEffects": false,
28
- "engines": {
29
- "node": ">=18"
30
- },
31
- "bin": {
32
- "docs-cache": "./bin/docs-cache.mjs"
33
- },
34
- "files": [
35
- "bin",
36
- "dist/cli.mjs",
37
- "dist/chunks/*.mjs",
38
- "dist/lock.mjs",
39
- "dist/shared/*.mjs",
40
- "README.md",
41
- "LICENSE"
42
- ],
43
- "scripts": {
44
- "build": "unbuild",
45
- "dev": "unbuild --stub",
46
- "lint": "biome check .",
47
- "prepublishOnly": "pnpm audit --audit-level=high && pnpm build && pnpm size && pnpm schema:build",
48
- "release": "pnpm run lint && pnpm run typecheck && bumpp && pnpm publish --access public",
49
- "test": "pnpm build && node --test",
50
- "test:coverage": "pnpm build && c8 --include dist --exclude bin --reporter=text node --test",
51
- "bench": "pnpm build && node scripts/benchmarks/run.mjs",
52
- "schema:build": "node scripts/generate-schema.mjs",
53
- "size": "size-limit",
54
- "test:watch": "node --test --watch",
55
- "typecheck": "tsc --noEmit",
56
- "prepare": "simple-git-hooks"
57
- },
58
- "dependencies": {
59
- "@clack/prompts": "^1.0.0",
60
- "cac": "^6.7.14",
61
- "execa": "^9.6.1",
62
- "fast-glob": "^3.3.2",
63
- "picocolors": "^1.1.1",
64
- "picomatch": "^2.3.1",
65
- "zod": "^4.3.6"
66
- },
67
- "devDependencies": {
68
- "@biomejs/biome": "^2.3.8",
69
- "@size-limit/file": "^11.2.0",
70
- "@types/node": "^24.2.1",
71
- "bumpp": "^10.3.2",
72
- "c8": "^10.1.3",
73
- "jiti": "^2.5.1",
74
- "lint-staged": "^16.2.7",
75
- "simple-git-hooks": "^2.13.1",
76
- "size-limit": "^11.2.0",
77
- "tinybench": "^6.0.0",
78
- "typescript": "^5.9.3",
79
- "unbuild": "^3.6.1"
80
- },
81
- "size-limit": [
82
- {
83
- "path": "dist/cli.mjs",
84
- "limit": "10 kB"
85
- }
86
- ],
87
- "simple-git-hooks": {
88
- "pre-commit": "pnpm lint-staged && pnpm typecheck"
89
- },
90
- "lint-staged": {
91
- "*.{js,ts,cjs,mjs,d.cts,d.mts,jsx,tsx,json,jsonc}": [
92
- "biome check --write --no-errors-on-unmatched"
93
- ]
94
- }
95
- }
2
+ "name": "docs-cache",
3
+ "private": false,
4
+ "type": "module",
5
+ "version": "0.4.2",
6
+ "description": "CLI for deterministic local caching of external documentation for agents and tools",
7
+ "author": "Frederik Bosch",
8
+ "license": "MIT",
9
+ "homepage": "https://github.com/fbosch/docs-cache#readme",
10
+ "repository": {
11
+ "type": "git",
12
+ "url": "https://github.com/fbosch/docs-cache.git"
13
+ },
14
+ "bugs": {
15
+ "url": "https://github.com/fbosch/docs-cache/issues"
16
+ },
17
+ "keywords": [
18
+ "docs",
19
+ "documentation",
20
+ "cache",
21
+ "agent",
22
+ "ai",
23
+ "git",
24
+ "cli"
25
+ ],
26
+ "sideEffects": false,
27
+ "engines": {
28
+ "node": ">=18"
29
+ },
30
+ "bin": {
31
+ "docs-cache": "./bin/docs-cache.mjs"
32
+ },
33
+ "files": [
34
+ "bin",
35
+ "dist/cli.mjs",
36
+ "dist/chunks/*.mjs",
37
+ "dist/lock.mjs",
38
+ "dist/shared/*.mjs",
39
+ "README.md",
40
+ "LICENSE"
41
+ ],
42
+ "dependencies": {
43
+ "@clack/prompts": "^1.0.0",
44
+ "cac": "^6.7.14",
45
+ "execa": "^9.6.1",
46
+ "fast-glob": "^3.3.2",
47
+ "picocolors": "^1.1.1",
48
+ "picomatch": "^2.3.1",
49
+ "zod": "^4.3.6"
50
+ },
51
+ "devDependencies": {
52
+ "@biomejs/biome": "^2.3.8",
53
+ "@size-limit/file": "^11.2.0",
54
+ "@types/node": "^24.2.1",
55
+ "bumpp": "^10.3.2",
56
+ "c8": "^10.1.3",
57
+ "jiti": "^2.5.1",
58
+ "lint-staged": "^16.2.7",
59
+ "simple-git-hooks": "^2.13.1",
60
+ "size-limit": "^11.2.0",
61
+ "tinybench": "^6.0.0",
62
+ "typescript": "^5.9.3",
63
+ "unbuild": "^3.6.1"
64
+ },
65
+ "size-limit": [
66
+ {
67
+ "path": "dist/cli.mjs",
68
+ "limit": "10 kB"
69
+ }
70
+ ],
71
+ "simple-git-hooks": {
72
+ "pre-commit": "pnpm lint-staged && pnpm typecheck"
73
+ },
74
+ "lint-staged": {
75
+ "*.{js,ts,cjs,mjs,d.cts,d.mts,jsx,tsx,json,jsonc}": [
76
+ "biome check --write --no-errors-on-unmatched"
77
+ ]
78
+ },
79
+ "scripts": {
80
+ "build": "unbuild",
81
+ "dev": "unbuild --stub",
82
+ "lint": "biome check .",
83
+ "release": "pnpm run lint && pnpm run typecheck && bumpp && pnpm publish --access public",
84
+ "test": "pnpm build && node --test",
85
+ "test:coverage": "pnpm build && c8 --include dist --exclude bin --reporter=text node --test",
86
+ "bench": "pnpm build && node scripts/benchmarks/run.mjs",
87
+ "schema:build": "node scripts/generate-schema.mjs",
88
+ "size": "size-limit",
89
+ "test:watch": "node --test --watch",
90
+ "typecheck": "tsc --noEmit"
91
+ }
92
+ }
@@ -1,3 +0,0 @@
1
- import{writeFile as I,readFile as T,access as z}from"node:fs/promises";import f from"node:path";import{z as t}from"zod";import{r as L}from"./docs-cache.Oi01HUbh.mjs";const v=t.enum(["symlink","copy"]),M=t.enum(["materialize"]),F=t.enum(["tree","compressed"]),U=t.object({type:t.enum(["commit","manifest"]),value:t.string().nullable()}).strict(),_=t.object({ref:t.string().min(1),mode:M,include:t.array(t.string().min(1)).min(1),targetMode:v.optional(),required:t.boolean(),maxBytes:t.number().min(1),maxFiles:t.number().min(1).optional(),allowHosts:t.array(t.string().min(1)).min(1),toc:t.union([t.boolean(),F]).optional(),unwrapSingleRootDir:t.boolean().optional()}).strict(),P=t.object({id:t.string().min(1),repo:t.string().min(1),targetDir:t.string().min(1).optional(),targetMode:v.optional(),ref:t.string().min(1).optional(),mode:M.optional(),include:t.array(t.string().min(1)).optional(),exclude:t.array(t.string().min(1)).optional(),required:t.boolean().optional(),maxBytes:t.number().min(1).optional(),maxFiles:t.number().min(1).optional(),integrity:U.optional(),toc:t.union([t.boolean(),F]).optional(),unwrapSingleRootDir:t.boolean().optional()}).strict(),J=t.object({$schema:t.string().min(1).optional(),cacheDir:t.string().min(1).optional(),targetMode:v.optional(),defaults:_.partial().optional(),sources:t.array(P)}).strict(),G=/^[a-zA-Z0-9_-]+$/,V=new Set([".","..","CON","PRN","AUX","NUL","COM1","LPT1"]),j=(e,r)=>{if(typeof e!="string"||e.length===0)throw new Error(`${r} must be a non-empty string.`);if(e.length>200)throw new Error(`${r} exceeds maximum length of 200.`);if(!G.test(e))throw new Error(`${r} must contain only alphanumeric characters, hyphens, and underscores.`);if(V.has(e.toUpperCase()))throw new Error(`${r} uses reserved name '${e}'.`);return e},A="docs.config.json",D=".docs",C="package.json",X=process.platform==="win32"?"copy":"symlink",g={cacheDir:D,defaults:{ref:"HEAD",mode:"materialize",include:["**/*.{md,mdx,markdown,mkd,txt,rst,adoc,asciidoc}"],targetMode:X,required:!0,maxBytes:2e8,allowHosts:["github.com","gitlab.com"],toc:!0,unwrapSingleRootDir:!1},sources:[]},Z=(e,r)=>!e||!r?e===r:e.length!==r.length?!1:e.every((o,n)=>o===r[n]),R=e=>typeof e=="object"&&e!==null&&!Array.isArray(e),k=(e,r)=>{const o={};for(const[n,i]of Object.entries(e)){const u=r[n];if(Array.isArray(i)&&Array.isArray(u)){Z(i,u)||(o[n]=i);continue}if(R(i)&&R(u)){const a=k(i,u);Object.keys(a).length>0&&(o[n]=a);continue}i!==u&&(o[n]=i)}return o},K=e=>{const r={...g,$schema:e.$schema,defaults:{...g.defaults,...e.targetMode?{targetMode:e.targetMode}:void 0}},o=k(e,r),n={$schema:o.$schema,cacheDir:o.cacheDir,targetMode:o.targetMode,defaults:o.defaults,sources:e.sources};return(!n.defaults||Object.keys(n.defaults).length===0)&&delete n.defaults,n},p=e=>typeof e=="object"&&e!==null&&!Array.isArray(e),m=(e,r)=>{if(typeof e!="string"||e.length===0)throw new Error(`${r} must be a non-empty string.`);return e},w=(e,r)=>{if(typeof e!="boolean")throw new Error(`${r} must be a boolean.`);return e},Q=(e,r)=>{if(typeof e!="number"||Number.isNaN(e))throw new Error(`${r} must be a number.`);return e},y=(e,r)=>{const o=Q(e,r);if(o<1)throw new Error(`${r} must be greater than zero.`);return o},h=(e,r)=>{if(!Array.isArray(e)||e.length===0)throw new Error(`${r} must be a non-empty array of strings.`);for(const o of e)if(typeof o!="string"||o.length===0)throw new Error(`${r} must contain non-empty strings.`);return e},q=(e,r)=>{const o=m(e,r);if(o!=="symlink"&&o!=="copy")throw new Error(`${r} must be "symlink" or "copy".`);return o},B=(e,r)=>{if(e!=="materialize")throw new Error(`${r} must be "materialize".`);return e},W=(e,r)=>{if(!p(e))throw new Error(`${r} must be an object.`);const o=e.type;if(o!=="commit"&&o!=="manifest")throw new Error(`${r}.type must be "commit" or "manifest".`);const n=e.value;if(typeof n!="string"&&n!==null)throw new Error(`${r}.value must be a string or null.`);return{type:o,value:n}},N=e=>{if(!p(e))throw new Error("Config must be a JSON object.");const r=J.safeParse(e);if(!r.success){const s=r.error.issues.map(c=>`${c.path.join(".")||"config"} ${c.message}`).join("; ");throw new Error(`Config does not match schema: ${s}.`)}const o=r.data,n=o.cacheDir?m(o.cacheDir,"cacheDir"):D,i=o.defaults,u=o.targetMode!==void 0?q(o.targetMode,"targetMode"):void 0,a=g.defaults;let d=a;if(i!==void 0){if(!p(i))throw new Error("defaults must be an object.");d={ref:i.ref!==void 0?m(i.ref,"defaults.ref"):a.ref,mode:i.mode!==void 0?B(i.mode,"defaults.mode"):a.mode,include:i.include!==void 0?h(i.include,"defaults.include"):a.include,targetMode:i.targetMode!==void 0?q(i.targetMode,"defaults.targetMode"):u??a.targetMode,required:i.required!==void 0?w(i.required,"defaults.required"):a.required,maxBytes:i.maxBytes!==void 0?y(i.maxBytes,"defaults.maxBytes"):a.maxBytes,maxFiles:i.maxFiles!==void 0?y(i.maxFiles,"defaults.maxFiles"):a.maxFiles,allowHosts:i.allowHosts!==void 0?h(i.allowHosts,"defaults.allowHosts"):a.allowHosts,toc:i.toc!==void 0?i.toc:a.toc,unwrapSingleRootDir:i.unwrapSingleRootDir!==void 0?w(i.unwrapSingleRootDir,"defaults.unwrapSingleRootDir"):a.unwrapSingleRootDir}}else u!==void 0&&(d={...a,targetMode:u});const E=o.sources.map((s,c)=>{if(!p(s))throw new Error(`sources[${c}] must be an object.`);const l={id:j(s.id,`sources[${c}].id`),repo:m(s.repo,`sources[${c}].repo`)};if(s.targetDir!==void 0&&(l.targetDir=m(s.targetDir,`sources[${c}].targetDir`)),s.targetMode!==void 0){const b=m(s.targetMode,`sources[${c}].targetMode`);if(b!=="symlink"&&b!=="copy")throw new Error(`sources[${c}].targetMode must be "symlink" or "copy".`);l.targetMode=b}return s.ref!==void 0&&(l.ref=m(s.ref,`sources[${c}].ref`)),s.mode!==void 0&&(l.mode=B(s.mode,`sources[${c}].mode`)),s.include!==void 0&&(l.include=h(s.include,`sources[${c}].include`)),s.exclude!==void 0&&(l.exclude=h(s.exclude,`sources[${c}].exclude`)),s.required!==void 0&&(l.required=w(s.required,`sources[${c}].required`)),s.maxBytes!==void 0&&(l.maxBytes=y(s.maxBytes,`sources[${c}].maxBytes`)),s.maxFiles!==void 0&&(l.maxFiles=y(s.maxFiles,`sources[${c}].maxFiles`)),s.integrity!==void 0&&(l.integrity=W(s.integrity,`sources[${c}].integrity`)),s.toc!==void 0&&(l.toc=s.toc),s.unwrapSingleRootDir!==void 0&&(l.unwrapSingleRootDir=w(s.unwrapSingleRootDir,`sources[${c}].unwrapSingleRootDir`)),l}),S=new Set,$=[];for(const s of E)S.has(s.id)&&$.push(s.id),S.add(s.id);if($.length>0)throw new Error(`Duplicate source IDs found: ${$.join(", ")}. Each source must have a unique ID.`);return{cacheDir:n,targetMode:u,defaults:d,sources:E}},Y=e=>{const r=e.defaults??g.defaults;return e.sources.map(o=>({id:o.id,repo:o.repo,targetDir:o.targetDir,targetMode:o.targetMode??r.targetMode,ref:o.ref??r.ref,mode:o.mode??r.mode,include:o.include??r.include,exclude:o.exclude,required:o.required??r.required,maxBytes:o.maxBytes??r.maxBytes,maxFiles:o.maxFiles??r.maxFiles,integrity:o.integrity,toc:o.toc??r.toc,unwrapSingleRootDir:o.unwrapSingleRootDir??r.unwrapSingleRootDir}))},O=e=>e?f.resolve(e):f.resolve(process.cwd(),A),ee=()=>f.resolve(process.cwd(),C),H=async e=>{try{return await z(e),!0}catch{return!1}},x=async(e,r)=>{let o;try{o=await T(e,"utf8")}catch(a){const d=a instanceof Error?a.message:String(a);throw new Error(`Failed to read config at ${e}: ${d}`)}let n;try{n=JSON.parse(o)}catch(a){const d=a instanceof Error?a.message:String(a);throw new Error(`Invalid JSON in ${e}: ${d}`)}const i=r==="package"?n?.["docs-cache"]:n;if(r==="package"&&i===void 0)throw new Error(`Missing docs-cache config in ${e}.`);const u=N(i);for(const a of u.sources)a.targetDir&&L(e,a.targetDir);return{config:u,resolvedPath:e,sources:Y(u)}},re=async(e,r)=>{const o=`${JSON.stringify(r,null,2)}
2
- `;await I(e,o,"utf8")},oe=async e=>{const r=O(e),o=f.basename(r)===C;if(e)return x(r,o?"package":"config");if(await H(r))return x(r,"config");const n=ee();if(await H(n))try{return await x(n,"package")}catch{}throw new Error(`No docs.config.json found at ${r} and no docs-cache config in ${n}.`)};export{g as D,j as a,D as b,A as c,oe as l,O as r,K as s,N as v,re as w};
3
- //# sourceMappingURL=docs-cache.DpZFrqUt.mjs.map