docs-cache 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +111 -0
- package/bin/docs-cache.mjs +3 -0
- package/dist/chunks/add.mjs +3 -0
- package/dist/chunks/clean.mjs +2 -0
- package/dist/chunks/init.mjs +3 -0
- package/dist/chunks/prune.mjs +2 -0
- package/dist/chunks/remove.mjs +3 -0
- package/dist/chunks/status.mjs +2 -0
- package/dist/chunks/sync.mjs +7 -0
- package/dist/chunks/verify.mjs +2 -0
- package/dist/cli.mjs +36 -0
- package/dist/shared/docs-cache.BSvQNKuf.mjs +2 -0
- package/dist/shared/docs-cache.D9_kM5zq.mjs +6 -0
- package/dist/shared/docs-cache.goBsJvLg.mjs +3 -0
- package/package.json +90 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Frederik Bosch
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
# 🗃️ docs-cache
|
|
2
|
+
|
|
3
|
+
Deterministic local caching of external documentation for agents and tools
|
|
4
|
+
|
|
5
|
+
[](LICENSE)
|
|
6
|
+
[](https://www.npmjs.com/package/docs-cache)
|
|
7
|
+
[](https://github.com/fbosch/docs-cache/actions/workflows/audit.yml)
|
|
8
|
+
|
|
9
|
+
## Purpose
|
|
10
|
+
|
|
11
|
+
Provides agents and automation tools with local access to external documentation without committing it to the repository.
|
|
12
|
+
|
|
13
|
+
Documentation is cached in a gitignored location, exposed to agent and tool targets via links or copies, and updated through sync commands or postinstall hooks.
|
|
14
|
+
|
|
15
|
+
## Features
|
|
16
|
+
|
|
17
|
+
- **Local only**: Cache lives in the directory `.docs` (or a custom location) and _should_ be gitignored.
|
|
18
|
+
- **Deterministic**: `docs.lock` pins commits and file metadata.
|
|
19
|
+
- **Fast**: Local cache avoids network roundtrips after sync.
|
|
20
|
+
- **Flexible**: Cache full repos or just the subdirectories you need.
|
|
21
|
+
|
|
22
|
+
> **Note**: Sources are downloaded to a local cache. If you provide a `targetDir`, `docs-cache` creates a symlink or copy from the cache to that target directory. The target should be outside `.docs`.
|
|
23
|
+
|
|
24
|
+
## Usage
|
|
25
|
+
|
|
26
|
+
```bash
|
|
27
|
+
# Initialize (optional)
|
|
28
|
+
npx docs-cache init
|
|
29
|
+
|
|
30
|
+
# Add Sources
|
|
31
|
+
npx docs-cache add github:owner/repo#main
|
|
32
|
+
npx docs-cache add gitlab:framework/core
|
|
33
|
+
npx docs-cache add https://github.com/framework/core.git
|
|
34
|
+
npx docs-cache add framework/core framework/other-repo
|
|
35
|
+
|
|
36
|
+
# Sync
|
|
37
|
+
npx docs-cache sync
|
|
38
|
+
|
|
39
|
+
# Verify Integrity
|
|
40
|
+
npx docs-cache verify
|
|
41
|
+
|
|
42
|
+
# Check Status
|
|
43
|
+
npx docs-cache status
|
|
44
|
+
|
|
45
|
+
# Removal
|
|
46
|
+
npx docs-cache remove core
|
|
47
|
+
npx docs-cache remove framework/other-repo --prune
|
|
48
|
+
|
|
49
|
+
# Clean
|
|
50
|
+
npx docs-cache clean
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
> for more options: `npx docs-cache --help`
|
|
54
|
+
|
|
55
|
+
## Configuration
|
|
56
|
+
|
|
57
|
+
`docs.config.json` at project root (or `docs-cache` inside `package.json`):
|
|
58
|
+
|
|
59
|
+
```json
|
|
60
|
+
{
|
|
61
|
+
"$schema": "https://github.com/fbosch/docs-cache/blob/master/docs.config.schema.json",
|
|
62
|
+
"sources": [
|
|
63
|
+
{
|
|
64
|
+
"id": "framework",
|
|
65
|
+
"repo": "https://github.com/framework/core.git",
|
|
66
|
+
"ref": "main",
|
|
67
|
+
"targetDir": "./agents/skills/framework-skill/references",
|
|
68
|
+
"include": ["guide/**"]
|
|
69
|
+
}
|
|
70
|
+
]
|
|
71
|
+
}
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
### Options
|
|
75
|
+
|
|
76
|
+
| Field | Type | Description |
|
|
77
|
+
| ---------- | ------- | ---------------------------------------- |
|
|
78
|
+
| `cacheDir` | string | Directory for cache, defaults to `.docs` |
|
|
79
|
+
| `index` | boolean | Write `index.json` summary file |
|
|
80
|
+
| `sources` | array | List of repositories to sync |
|
|
81
|
+
| `defaults` | object | Default settings for all sources |
|
|
82
|
+
|
|
83
|
+
**Source Options:**
|
|
84
|
+
|
|
85
|
+
- `repo`: Git URL
|
|
86
|
+
- `ref`: Branch, tag, or commit
|
|
87
|
+
- `include`: Glob patterns to copy, defaults to `"**/*.{md,mdx,markdown,mkd,txt,rst,adoc,asciidoc}"`,
|
|
88
|
+
- `exclude`: Glob patterns to skip
|
|
89
|
+
- `targetDir`: Optional path where files should be symlinked/copied to, outside `.docs`
|
|
90
|
+
- `targetMode`: Defaults to `symlink` on Unix and `copy` on Windows
|
|
91
|
+
- `required`: Whether missing sources should fail in offline/strict runs
|
|
92
|
+
- `maxBytes`: Maximum total bytes to materialize for the source
|
|
93
|
+
- `maxFiles`: Maximum total files to materialize for the source
|
|
94
|
+
|
|
95
|
+
> **Note**: Sources are always downloaded to `.docs/<id>/`. If you provide a `targetDir`, `docs-cache` will create a symlink or copy pointing from the cache to that target directory. The target should be outside `.docs`.
|
|
96
|
+
|
|
97
|
+
## NPM Integration
|
|
98
|
+
|
|
99
|
+
Use `postinstall` to ensure documentation is available locally immediately after installation:
|
|
100
|
+
|
|
101
|
+
```json
|
|
102
|
+
{
|
|
103
|
+
"scripts": {
|
|
104
|
+
"postinstall": "npx docs-cache sync"
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
```
|
|
108
|
+
|
|
109
|
+
## License
|
|
110
|
+
|
|
111
|
+
MIT
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
import{readFile as m,writeFile as k,access as x}from"node:fs/promises";import h from"node:path";import{v as w,D as v,a as O,w as b,r as D}from"../shared/docs-cache.goBsJvLg.mjs";import{r as C}from"../shared/docs-cache.D9_kM5zq.mjs";import{r as F}from"../shared/docs-cache.BSvQNKuf.mjs";import"zod";import"node:process";import"cac";import"picocolors";const f=async o=>{try{return await x(o),!0}catch{return!1}},P="package.json",S=async o=>{const s=await m(o,"utf8"),r=JSON.parse(s),a=r["docs-cache"];return a?{parsed:r,config:w(a)}:{parsed:r,config:null}},I=async o=>{if(o){const a=D(o);return{resolvedPath:a,mode:h.basename(a)===P?"package":"config"}}const s=D();if(await f(s))return{resolvedPath:s,mode:"config"};const r=h.resolve(process.cwd(),P);return await f(r)&&(await S(r)).config?{resolvedPath:r,mode:"package"}:{resolvedPath:s,mode:"config"}},N=async o=>{const s=await I(o.configPath),r=s.resolvedPath;let a=v,t=null,l=null;if(await f(r))if(s.mode==="package"){const e=await S(r);l=e.parsed,t=e.config,a=t??v}else{const e=await m(r,"utf8");t=JSON.parse(e.toString()),a=w(t)}const y="https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",p=new Set(a.sources.map(e=>e.id)),u=[],d=o.entries.map(e=>{const c=F(e.repo),g=e.id||c.inferredId;if(!g)throw new Error("Unable to infer id. Provide an explicit id.");const n=O(g,"source id");return p.has(n)?(u.push(n),null):(p.add(n),e.targetDir&&C(r,e.targetDir),{id:n,repo:c.repoUrl,...e.targetDir?{targetDir:e.targetDir}:{},...c.ref?{ref:c.ref}:{}})}).filter(Boolean);if(d.length===0)throw new Error("All sources already exist in config.");const i={$schema:y,sources:[...a.sources,...d]};if(t?.cacheDir&&(i.cacheDir=t.cacheDir),t?.index!==void 0&&(i.index=t.index),t?.defaults&&(i.defaults=t.defaults),s.mode==="package"){const e=l??{};e["docs-cache"]=i,await k(r,`${JSON.stringify(e,null,2)}
|
|
2
|
+
`,"utf8")}else await b(r,i);return{configPath:r,sources:d,skipped:u,created:!0}};export{N as addSources};
|
|
3
|
+
//# sourceMappingURL=add.mjs.map
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
import{rm as o,access as i}from"node:fs/promises";import{l as s,b as m}from"../shared/docs-cache.goBsJvLg.mjs";import{a as n}from"../shared/docs-cache.D9_kM5zq.mjs";import"node:path";import"zod";import"node:process";import"cac";import"picocolors";const f=async r=>{try{return await i(r),!0}catch{return!1}},p=async r=>{const{config:t,resolvedPath:c}=await s(r.configPath),a=n(c,t.cacheDir??m,r.cacheDirOverride),e=await f(a);return e&&await o(a,{recursive:!0,force:!0}),{cacheDir:a,removed:e}};export{p as cleanCache};
|
|
2
|
+
//# sourceMappingURL=clean.mjs.map
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
import{readFile as u,writeFile as k,access as v}from"node:fs/promises";import f from"node:path";import{confirm as F,isCancel as N,select as O,text as P}from"@clack/prompts";import{c as A,b as i,w as J}from"../shared/docs-cache.goBsJvLg.mjs";import"zod";import"../shared/docs-cache.D9_kM5zq.mjs";import"node:process";import"cac";import"picocolors";const h=async r=>{try{return await v(r),!0}catch{return!1}},S=async(r,s={})=>{const y=s.confirm??F,l=s.isCancel??N,D=s.select??O,j=s.text??P,w=r.cwd??process.cwd(),d=f.resolve(w,A),a=f.resolve(w,"package.json"),n=[];if(await h(d)&&n.push(d),await h(a)){const o=await u(a,"utf8");JSON.parse(o)["docs-cache"]&&n.push(a)}if(n.length>0)throw new Error(`Config already exists at ${n.join(", ")}. Init aborted.`);let p=!1;if(await h(a)){const o=await u(a,"utf8");if(!JSON.parse(o)["docs-cache"]){const c=await D({message:"Config location",options:[{value:"config",label:"docs.config.json"},{value:"package",label:"package.json"}],initialValue:"config"});if(l(c))throw new Error("Init cancelled.");p=c==="package"}}const I=p?a:d,$=r.cacheDirOverride??i,x=await j({message:"Cache directory",initialValue:$});if(l(x))throw new Error("Init cancelled.");const C=await y({message:"Generate index.json (summary of cached sources + paths for tools)",initialValue:!1});if(l(C))throw new Error("Init cancelled.");const t={configPath:I,cacheDir:x,index:C},e=f.resolve(w,t.configPath);if(f.basename(e)==="package.json"){const o=await u(e,"utf8"),c=JSON.parse(o);if(c["docs-cache"])throw new Error(`docs-cache config already exists in ${e}.`);const g={$schema:"https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",sources:[]},b=t.cacheDir||i;return b!==i&&(g.cacheDir=b),t.index&&(g.index=!0),c["docs-cache"]=g,await k(e,`${JSON.stringify(c,null,2)}
|
|
2
|
+
`,"utf8"),{configPath:e,created:!0}}if(await h(e))throw new Error(`Config already exists at ${e}.`);const m={$schema:"https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",sources:[]},E=t.cacheDir||i;return E!==i&&(m.cacheDir=E),t.index&&(m.index=!0),await J(e,m),{configPath:e,created:!0}};export{S as initConfig};
|
|
3
|
+
//# sourceMappingURL=init.mjs.map
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
import{readdir as p,rm as f,access as h}from"node:fs/promises";import u from"node:path";import{l as d,b as D}from"../shared/docs-cache.goBsJvLg.mjs";import{a as v}from"../shared/docs-cache.D9_kM5zq.mjs";import"zod";import"node:process";import"cac";import"picocolors";const w=async t=>{try{return await h(t),!0}catch{return!1}},l=async t=>{const{config:c,resolvedPath:s,sources:a}=await d(t.configPath),e=v(s,c.cacheDir??D,t.cacheDirOverride);if(!await w(e))return{cacheDir:e,removed:[],kept:a.map(r=>r.id)};const n=new Set(a.map(r=>r.id)),m=await p(e,{withFileTypes:!0}),o=[];for(const r of m){if(!r.isDirectory())continue;const i=r.name;n.has(i)||i.startsWith(".tmp-")||(await f(u.join(e,i),{recursive:!0,force:!0}),o.push(i))}return{cacheDir:e,removed:o,kept:a.map(r=>r.id)}};export{l as pruneCache};
|
|
2
|
+
//# sourceMappingURL=prune.mjs.map
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
import{readFile as w,writeFile as N,rm as k,access as C}from"node:fs/promises";import v from"node:path";import{v as D,D as E,w as I,r as P}from"../shared/docs-cache.goBsJvLg.mjs";import{r as x}from"../shared/docs-cache.D9_kM5zq.mjs";import{r as F}from"../shared/docs-cache.BSvQNKuf.mjs";import"zod";import"node:process";import"cac";import"picocolors";const d=async s=>{try{return await C(s),!0}catch{return!1}},y="package.json",S=async s=>{const a=await w(s,"utf8"),o=JSON.parse(a),t=o["docs-cache"];return t?{parsed:o,config:D(t)}:{parsed:o,config:null}},O=async s=>{if(s){const t=P(s);return{resolvedPath:t,mode:v.basename(t)===y?"package":"config"}}const a=P();if(await d(a))return{resolvedPath:a,mode:"config"};const o=v.resolve(process.cwd(),y);return await d(o)&&(await S(o)).config?{resolvedPath:o,mode:"package"}:{resolvedPath:a,mode:"config"}},U=async s=>{if(s.ids.length===0)throw new Error("No sources specified to remove.");const a=await O(s.configPath),o=a.resolvedPath;let t=E,r=null,f=null;if(await d(o))if(a.mode==="package"){const e=await S(o);if(f=e.parsed,r=e.config,!r)throw new Error(`Missing docs-cache config in ${o}.`);t=r}else{const e=await w(o,"utf8");r=JSON.parse(e.toString()),t=D(r)}else throw new Error(`Config not found at ${o}.`);const u=new Map(t.sources.map(e=>[e.id,e])),g=new Map(t.sources.map(e=>[e.repo,e])),c=new Set,h=[];for(const e of s.ids){if(u.has(e)){c.add(e);continue}const i=F(e);if(i.repoUrl&&g.has(i.repoUrl)){const p=g.get(i.repoUrl);p&&c.add(p.id);continue}if(i.inferredId&&u.has(i.inferredId)){c.add(i.inferredId);continue}h.push(e)}const $=t.sources.filter(e=>!c.has(e.id)),m=t.sources.filter(e=>c.has(e.id)).map(e=>e.id),M=t.sources.filter(e=>c.has(e.id));if(m.length===0)throw new Error("No matching sources found to remove.");const n={$schema:r?.$schema??"https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",sources:$};if(r?.cacheDir&&(n.cacheDir=r.cacheDir),r?.index!==void 0&&(n.index=r.index),r?.defaults&&(n.defaults=r.defaults),r?.targetMode&&(n.targetMode=r.targetMode),a.mode==="package"){const e=f??{};e["docs-cache"]=n,await N(o,`${JSON.stringify(e,null,2)}
|
|
2
|
+
`,"utf8")}else await I(o,n);const l=[];for(const e of M){if(!e.targetDir)continue;const i=x(o,e.targetDir);await k(i,{recursive:!0,force:!0}),l.push({id:e.id,targetDir:i})}return{configPath:o,removed:m,missing:h,targetsRemoved:l}};export{U as removeSources};
|
|
3
|
+
//# sourceMappingURL=remove.mjs.map
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
import{access as E}from"node:fs/promises";import a from"picocolors";import{u as o,s as u,a as w,g as D}from"../shared/docs-cache.D9_kM5zq.mjs";import{l as v,b as x}from"../shared/docs-cache.goBsJvLg.mjs";import{resolveLockPath as C,readLock as P}from"../lock.mjs";import"node:process";import"cac";import"node:path";import"zod";const h=async s=>{try{return await E(s),!0}catch{return!1}},$=async s=>{const{config:e,resolvedPath:t,sources:n}=await v(s.configPath),r=w(t,e.cacheDir??x,s.cacheDirOverride),l=await h(r),c=C(t),i=await h(c);let d=!1,f=null;if(i)try{f=await P(c),d=!0}catch{d=!1}const g=await Promise.all(n.map(async m=>{const p=D(r,m.id),k=await h(p.sourceDir),y=f?.sources?.[m.id]??null;return{id:m.id,docsPath:p.sourceDir,docsExists:k,lockEntry:y}}));return{configPath:t,cacheDir:r,cacheDirExists:l,lockPath:c,lockExists:i,lockValid:d,sources:g}},L=s=>{const e=o.path(s.cacheDir),t=s.cacheDirExists?a.green("present"):a.red("missing"),n=s.lockExists?s.lockValid?a.green("valid"):a.red("invalid"):a.yellow("missing");if(o.header("Cache",`${e} (${t})`),o.header("Lock",`docs.lock (${n})`),s.sources.length===0){o.line(),o.line(`${u.warn} No sources configured.`);return}o.line();for(const r of s.sources){const l=r.docsExists?u.success:u.error,c=r.lockEntry?a.green("locked"):a.yellow("new"),i=o.hash(r.lockEntry?.resolvedCommit);o.item(l,r.id.padEnd(20),`${c.padEnd(10)} ${i}`)}};export{$ as getStatus,L as printStatus};
|
|
2
|
+
//# sourceMappingURL=status.mjs.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import{rm as C,mkdtemp as A,writeFile as G,mkdir as _,access as Y,rename as B,open as N,lstat as fe,cp as me,symlink as de,readFile as X}from"node:fs/promises";import h from"node:path";import E from"picocolors";import{t as R,r as j,D as he,g as we,u as D,s as P,a as pe}from"../shared/docs-cache.D9_kM5zq.mjs";import{a as H,l as ye,D as ge,b as Se}from"../shared/docs-cache.goBsJvLg.mjs";import{execFile as W}from"node:child_process";import De,{tmpdir as J}from"node:os";import{promisify as K}from"node:util";import{writeLock as ve,resolveLockPath as xe,readLock as Ce}from"../lock.mjs";import{M as U,v as q}from"./verify.mjs";import{createHash as V,randomBytes as Pe}from"node:crypto";import{createWriteStream as Z,createReadStream as $e,constants as Q}from"node:fs";import{pipeline as Ee}from"node:stream/promises";import ee from"fast-glob";const Me=/^(https?:\/\/)([^@]+)@/i,b=e=>e.replace(Me,"$1***@"),Oe=K(W),Te=3e4,Fe=new Set(["file:","ftp:","data:","javascript:"]),Ie=e=>{try{const t=new URL(e);if(Fe.has(t.protocol))throw new Error(`Blocked protocol '${t.protocol}' in repo URL '${b(e)}'.`)}catch(t){if(t instanceof TypeError)return;throw t}},be=e=>{if(Ie(e),e.startsWith("git@")){const t=e.indexOf("@"),r=e.indexOf(":",t+1);return r===-1?null:e.slice(t+1,r)||null}try{const t=new URL(e);return t.protocol!=="https:"&&t.protocol!=="ssh:"?null:t.hostname||null}catch{return null}},te=(e,t)=>{const r=be(e);if(!r)throw new Error(`Unsupported repo URL '${b(e)}'. Use HTTPS or SSH.`);const s=r.toLowerCase();if(!t.map(o=>o.toLowerCase()).includes(s))throw new Error(`Host '${r}' is not in allowHosts for '${b(e)}'.`)},re=e=>{const t=e.trim().split(`
|
|
2
|
+
`).filter(Boolean);return t.length===0?null:t[0].split(/\s+/)[0]||null},ke=async e=>{te(e.repo,e.allowHosts);const{stdout:t}=await Oe("git",["ls-remote",e.repo,e.ref],{timeout:e.timeoutMs??Te,maxBuffer:1024*1024}),r=re(t);if(!r)throw new Error(`Unable to resolve ref '${e.ref}' for ${b(e.repo)}.`);return{repo:e.repo,ref:e.ref,resolvedCommit:r}},oe=K(W),se=3e4,L=async(e,t)=>{await oe("git",["-c","core.hooksPath=/dev/null","-c","submodule.recurse=false","-c","protocol.file.allow=never","-c","protocol.ext.allow=never",...e],{cwd:t?.cwd,timeout:t?.timeoutMs??se,maxBuffer:1024*1024,env:{PATH:process.env.PATH,HOME:process.env.HOME,USER:process.env.USER,USERPROFILE:process.env.USERPROFILE,TMPDIR:process.env.TMPDIR,TMP:process.env.TMP,TEMP:process.env.TEMP,SYSTEMROOT:process.env.SYSTEMROOT,WINDIR:process.env.WINDIR,SSH_AUTH_SOCK:process.env.SSH_AUTH_SOCK,SSH_AGENT_PID:process.env.SSH_AGENT_PID,HTTP_PROXY:process.env.HTTP_PROXY,HTTPS_PROXY:process.env.HTTPS_PROXY,NO_PROXY:process.env.NO_PROXY,GIT_TERMINAL_PROMPT:"0",GIT_CONFIG_NOSYSTEM:"1",GIT_CONFIG_NOGLOBAL:"1",...process.platform==="win32"?{}:{GIT_ASKPASS:"/bin/false"}}})},_e=async(e,t,r,s)=>{const o=h.join(r,"archive.tar");await L(["archive","--remote",e,"--format=tar","--output",o,t],{timeoutMs:s}),await oe("tar",["-xf",o,"-C",r],{timeout:s??se,maxBuffer:1024*1024}),await C(o,{force:!0})},Re=e=>{if(!e||e.length===0)return!1;for(const t of e)if(!t||t.includes("**"))return!1;return!0},Le=e=>{if(!e)return[];const t=e.map(r=>{const s=r.replace(/\\/g,"/"),o=s.indexOf("*");return(o===-1?s:s.slice(0,o)).replace(/\/+$|\/$/,"")});return Array.from(new Set(t.filter(r=>r.length>0)))},Ae=async(e,t)=>{const r=/^[0-9a-f]{7,40}$/i.test(e.ref),s=Re(e.include),o=["clone","--no-checkout","--filter=blob:none","--depth",String(e.depth),"--recurse-submodules=no","--no-tags"];if(s&&o.push("--sparse"),r||(o.push("--single-branch"),e.ref!=="HEAD"&&o.push("--branch",e.ref)),o.push(e.repo,t),await L(o,{timeoutMs:e.timeoutMs}),s){const l=Le(e.include);l.length>0&&await L(["-C",t,"sparse-checkout","set",...l],{timeoutMs:e.timeoutMs})}await L(["-C",t,"checkout","--detach",e.resolvedCommit],{timeoutMs:e.timeoutMs})},Be=async e=>{const t=await A(h.join(J(),`docs-cache-${e.sourceId}-`));try{return await _e(e.repo,e.resolvedCommit,t,e.timeoutMs),t}catch(r){throw await C(t,{recursive:!0,force:!0}),r}},Ne=async e=>{H(e.sourceId,"sourceId");try{const t=await Be(e);return{repoDir:t,cleanup:async()=>{await C(t,{recursive:!0,force:!0})}}}catch{const t=await A(h.join(J(),`docs-cache-${e.sourceId}-`));try{return await Ae(e,t),{repoDir:t,cleanup:async()=>{await C(t,{recursive:!0,force:!0})}}}catch(r){throw await C(t,{recursive:!0,force:!0}),r}}},je=async e=>{const t=new Map(e.sources.map(m=>[m.id,m])),r={};for(const[m,w]of Object.entries(e.lock.sources)){const u=t.get(m),f=u?.targetDir?R(j(e.configPath,u.targetDir)):void 0;r[m]={repo:w.repo,ref:w.ref,resolvedCommit:w.resolvedCommit,bytes:w.bytes,fileCount:w.fileCount,manifestSha256:w.manifestSha256,updatedAt:w.updatedAt,cachePath:R(h.join(e.cacheDir,m)),...f?{targetDir:f}:{}}}const s={generatedAt:new Date().toISOString(),cacheDir:R(e.cacheDir),sources:r},o=h.join(e.cacheDir,he),l=`${JSON.stringify(s,null,2)}
|
|
3
|
+
`;await G(o,l,"utf8")},F=e=>R(e),z=Number(process.env.DOCS_CACHE_STREAM_THRESHOLD_MB??"2"),He=Number.isFinite(z)&&z>0?Math.floor(z*1024*1024):1024*1024,Ue=(e,t)=>{const r=h.resolve(e);if(!h.resolve(t).startsWith(r+h.sep))throw new Error(`Path traversal detected: ${t}`)},ie=async e=>{try{return await N(e,Q.O_RDONLY|Q.O_NOFOLLOW)}catch(t){const r=t.code;if(r==="ELOOP")return null;if(r==="EINVAL"||r==="ENOSYS"||r==="ENOTSUP")return(await fe(e)).isSymbolicLink()?null:await N(e,"r");throw t}},ze=async(e,t=5e3)=>{const r=Date.now();for(;Date.now()-r<t;)try{const s=await N(e,"wx");return{release:async()=>{await s.close(),await C(e,{force:!0})}}}catch(s){if(s.code!=="EEXIST")throw s;await new Promise(o=>setTimeout(o,100))}throw new Error(`Failed to acquire lock for ${e}.`)},Ge=async e=>{H(e.sourceId,"sourceId");const t=we(e.cacheDir,e.sourceId);await _(e.cacheDir,{recursive:!0});const r=await A(h.join(e.cacheDir,`.tmp-${e.sourceId}-`));try{const s=await ee(e.include,{cwd:e.repoDir,ignore:[".git/**",...e.exclude??[]],dot:!0,onlyFiles:!0,followSymbolicLinks:!1});s.sort((i,c)=>F(i).localeCompare(F(c)));const o=new Set;for(const i of s)o.add(h.dirname(i));await Promise.all(Array.from(o,i=>_(h.join(r,i),{recursive:!0})));let l=0,m=0;const w=Math.max(1,Math.min(s.length,Math.max(8,Math.min(128,De.cpus().length*8)))),u=h.join(r,U),f=Z(u,{encoding:"utf8"}),S=V("sha256"),v=async i=>new Promise((c,y)=>{const n=p=>{f.off("drain",d),y(p)},d=()=>{f.off("error",n),c()};f.once("error",n),f.write(i)?(f.off("error",n),c()):f.once("drain",d)});for(let i=0;i<s.length;i+=w){const c=s.slice(i,i+w),y=await Promise.all(c.map(async n=>{const d=F(n),p=h.join(e.repoDir,n),a=await ie(p);if(!a)return null;try{const $=await a.stat();if(!$.isFile())return null;const M=h.join(r,n);if(Ue(r,M),$.size>=He){const I=$e(p,{fd:a.fd,autoClose:!1}),O=Z(M);await Ee(I,O)}else{const I=await a.readFile();await G(M,I)}return{path:d,size:$.size}}finally{await a.close()}}));for(const n of y){if(!n)continue;if(e.maxFiles!==void 0&&m+1>e.maxFiles)throw new Error(`Materialized content exceeds maxFiles (${e.maxFiles}).`);if(l+=n.size,l>e.maxBytes)throw new Error(`Materialized content exceeds maxBytes (${e.maxBytes}).`);const d=`${JSON.stringify(n)}
|
|
4
|
+
`;S.update(d),await v(d),m+=1}}await new Promise((i,c)=>{f.end(()=>i()),f.once("error",c)});const x=S.digest("hex"),g=async i=>{try{return await Y(i),!0}catch{return!1}};return await(async(i,c)=>{const y=await ze(`${c}.lock`);try{const n=await g(c),d=`${c}.bak-${Pe(8).toString("hex")}`;n&&await B(c,d);try{await B(i,c)}catch(p){if(n)try{await B(d,c)}catch(a){const $=a instanceof Error?a.message:String(a);process.stderr.write(`Warning: Failed to restore backup: ${$}
|
|
5
|
+
`)}throw p}n&&await C(d,{recursive:!0,force:!0})}finally{await y.release()}})(r,t.sourceDir),{bytes:l,fileCount:m,manifestSha256:x}}catch(s){throw await C(r,{recursive:!0,force:!0}),s}},Ye=async e=>{H(e.sourceId,"sourceId");const t=await ee(e.include,{cwd:e.repoDir,ignore:[".git/**",...e.exclude??[]],dot:!0,onlyFiles:!0,followSymbolicLinks:!1});t.sort((l,m)=>F(l).localeCompare(F(m)));let r=0,s=0;const o=V("sha256");for(const l of t){const m=F(l),w=h.join(e.repoDir,l),u=await ie(w);if(u)try{const f=await u.stat();if(!f.isFile())continue;if(e.maxFiles!==void 0&&s+1>e.maxFiles)throw new Error(`Materialized content exceeds maxFiles (${e.maxFiles}).`);if(r+=f.size,r>e.maxBytes)throw new Error(`Materialized content exceeds maxBytes (${e.maxBytes}).`);const S=`${JSON.stringify({path:m,size:f.size})}
|
|
6
|
+
`;o.update(S),s+=1}finally{await u.close()}}return{bytes:r,fileCount:s,manifestSha256:o.digest("hex")}},Xe=async e=>{await C(e,{recursive:!0,force:!0})},ae=async e=>{const t=h.dirname(e.targetDir);await _(t,{recursive:!0}),await Xe(e.targetDir);const r=process.platform==="win32"?"copy":"symlink";if((e.mode??r)==="copy"){await me(e.sourceDir,e.targetDir,{recursive:!0});return}const s=process.platform==="win32"?"junction":"dir";await de(e.sourceDir,e.targetDir,s)},We=e=>{if(e<1024)return`${e} B`;const t=["KB","MB","GB","TB"];let r=e,s=-1;for(;r>=1024&&s<t.length-1;)r/=1024,s+=1;return`${r.toFixed(1)} ${t[s]}`},k=async e=>{try{return await Y(e),!0}catch{return!1}},ne=async(e,t)=>{const r=h.join(e,t);return await k(r)?await k(h.join(r,U)):!1},ce=async(e,t={})=>{const{config:r,resolvedPath:s,sources:o}=await ye(e.configPath),l=r.defaults??ge.defaults,m=pe(s,r.cacheDir??Se,e.cacheDirOverride),w=xe(s),u=await k(w);let f=null;u&&(f=await Ce(w));const S=t.resolveRemoteCommit??ke,v=e.sourceFilter?.length?o.filter(g=>e.sourceFilter?.includes(g.id)):o,x=await Promise.all(v.map(async g=>{const i=f?.sources?.[g.id];if(e.offline){const d=await ne(m,g.id);return{id:g.id,repo:i?.repo??g.repo,ref:i?.ref??g.ref??l.ref,resolvedCommit:i?.resolvedCommit??"offline",lockCommit:i?.resolvedCommit??null,status:i&&d?"up-to-date":"missing",bytes:i?.bytes,fileCount:i?.fileCount,manifestSha256:i?.manifestSha256}}const c=await S({repo:g.repo,ref:g.ref,allowHosts:l.allowHosts,timeoutMs:e.timeoutMs}),y=i?.resolvedCommit===c.resolvedCommit,n=i?y?"up-to-date":"changed":"missing";return{id:g.id,repo:c.repo,ref:c.ref,resolvedCommit:c.resolvedCommit,lockCommit:i?.resolvedCommit??null,status:n,bytes:i?.bytes,fileCount:i?.fileCount,manifestSha256:i?.manifestSha256}}));return{config:r,configPath:s,cacheDir:m,lockPath:w,lockExists:u,lockData:f,results:x,sources:v,defaults:l}},Je=async()=>{const e=h.resolve(process.cwd(),"package.json");try{const t=await X(e,"utf8"),r=JSON.parse(t.toString());return typeof r.version=="string"?r.version:"0.0.0"}catch{}try{const t=await X(new URL("../package.json",import.meta.url),"utf8"),r=JSON.parse(t.toString());return typeof r.version=="string"?r.version:"0.0.0"}catch{return"0.0.0"}},Ke=async(e,t)=>{const r=await Je(),s=new Date().toISOString(),o={...t?.sources??{}};for(const l of e.results){const m=o[l.id];o[l.id]={repo:l.repo,ref:l.ref,resolvedCommit:l.resolvedCommit,bytes:l.bytes??m?.bytes??0,fileCount:l.fileCount??m?.fileCount??0,manifestSha256:l.manifestSha256??m?.manifestSha256??l.resolvedCommit,updatedAt:s}}return{version:1,generatedAt:s,toolVersion:r,sources:o}},le=async(e,t={})=>{const r=process.hrtime.bigint();let s=0;const o=await ce(e,t);await _(o.cacheDir,{recursive:!0});const l=o.lockData,m=o.results.filter(u=>{const f=o.sources.find(S=>S.id===u.id);return u.status==="missing"&&(f?.required??!0)});if(e.failOnMiss&&m.length>0)throw new Error(`Missing required source(s): ${m.map(u=>u.id).join(", ")}.`);if(!e.lockOnly){const u=o.defaults,f=t.fetchSource??Ne,S=t.materializeSource??Ge,v=async(i,c)=>{const y=i?.length?o.results.filter(n=>i.includes(n.id)):o.results;return(await Promise.all(y.map(async n=>{const d=o.sources.find(a=>a.id===n.id);if(!d)return null;const p=await ne(o.cacheDir,n.id);return c||n.status!=="up-to-date"||!p?{result:n,source:d}:null}))).filter(Boolean)},x=async()=>{await Promise.all(o.sources.map(async i=>{if(!i.targetDir)return;const c=j(o.configPath,i.targetDir);await k(c)||await ae({sourceDir:h.join(o.cacheDir,i.id),targetDir:c,mode:i.targetMode??u.targetMode})}))},g=async i=>{const c=e.concurrency??4;let y=0;const n=async()=>{const d=i[y];if(!d||!d.source)return;y+=1;const{result:p,source:a}=d,$=o.lockData?.sources?.[a.id];e.json||D.step("Fetching",a.id);const M=await f({sourceId:a.id,repo:a.repo,ref:a.ref,resolvedCommit:p.resolvedCommit,cacheDir:o.cacheDir,depth:a.depth??u.depth,include:a.include??u.include,timeoutMs:e.timeoutMs});try{const I=h.join(o.cacheDir,a.id,U);if(p.status!=="up-to-date"&&$?.manifestSha256&&await k(I)){const T=await Ye({sourceId:a.id,repoDir:M.repoDir,cacheDir:o.cacheDir,include:a.include??u.include,exclude:a.exclude,maxBytes:a.maxBytes??u.maxBytes,maxFiles:a.maxFiles??u.maxFiles});if(T.manifestSha256===$.manifestSha256){p.bytes=T.bytes,p.fileCount=T.fileCount,p.manifestSha256=T.manifestSha256,p.status="up-to-date",e.json||D.item(P.success,a.id,"no content changes"),await n();return}}const O=await S({sourceId:a.id,repoDir:M.repoDir,cacheDir:o.cacheDir,include:a.include??u.include,exclude:a.exclude,maxBytes:a.maxBytes??u.maxBytes,maxFiles:a.maxFiles??u.maxFiles});if(a.targetDir){const T=j(o.configPath,a.targetDir);await ae({sourceDir:h.join(o.cacheDir,a.id),targetDir:T,mode:a.targetMode??u.targetMode})}p.bytes=O.bytes,p.fileCount=O.fileCount,p.manifestSha256=O.manifestSha256,e.json||D.item(P.success,a.id,`synced ${O.fileCount} files`)}finally{await M.cleanup()}await n()};await Promise.all(Array.from({length:Math.min(c,i.length)},n))};if(e.offline)await x();else{const i=await v();await g(i),await x()}if(!e.offline){const i=(await q({configPath:o.configPath,cacheDirOverride:o.cacheDir})).results.filter(c=>!c.ok);if(i.length>0){const c=await v(i.map(n=>n.id),!0);c.length>0&&(await g(c),await x());const y=(await q({configPath:o.configPath,cacheDirOverride:o.cacheDir})).results.filter(n=>!n.ok);if(y.length>0&&(s+=1,!e.json)){const n=y.map(d=>`${d.id} (${d.issues.join("; ")})`).join(", ");D.line(`${P.warn} Verify failed for ${y.length} source(s): ${n}`)}}}}const w=await Ke(o,l);if(await ve(o.lockPath,w),!e.json){const u=Number(process.hrtime.bigint()-r)/1e6,f=o.results.reduce((v,x)=>v+(x.bytes??0),0),S=o.results.reduce((v,x)=>v+(x.fileCount??0),0);D.line(`${P.info} Completed in ${u.toFixed(0)}ms \xB7 ${We(f)} \xB7 ${S} files${s?` \xB7 ${s} warning${s===1?"":"s"}`:""}`)}return o.config.index&&await je({cacheDir:o.cacheDir,configPath:o.configPath,lock:w,sources:o.sources}),o.lockExists=!0,o},ue=e=>{const t={upToDate:e.results.filter(r=>r.status==="up-to-date").length,changed:e.results.filter(r=>r.status==="changed").length,missing:e.results.filter(r=>r.status==="missing").length};if(e.results.length===0){D.line(`${P.info} No sources to sync.`);return}D.line(`${P.info} ${e.results.length} sources (${t.upToDate} up-to-date, ${t.changed} changed, ${t.missing} missing)`);for(const r of e.results){const s=D.hash(r.resolvedCommit),o=D.hash(r.lockCommit);if(r.status==="up-to-date"){D.item(P.success,r.id,`${E.dim("up-to-date")} ${E.gray(s)}`);continue}if(r.status==="changed"){D.item(P.warn,r.id,`${E.dim("changed")} ${E.gray(o)} ${E.dim("->")} ${E.gray(s)}`);continue}D.item(P.warn,r.id,`${E.dim("missing")} ${E.gray(s)}`)}},qe={__proto__:null,getSyncPlan:ce,printSyncPlan:ue,runSync:le};export{ue as a,le as b,te as e,re as p,b as r,qe as s};
|
|
7
|
+
//# sourceMappingURL=sync.mjs.map
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
import{stat as v,access as k}from"node:fs/promises";import g from"node:path";import{a as _,r as C,u as l,s as m}from"../shared/docs-cache.D9_kM5zq.mjs";import{l as E,b as j}from"../shared/docs-cache.goBsJvLg.mjs";import{createReadStream as M}from"node:fs";import z from"node:readline";const d=".manifest.jsonl",A=async function*(s){const a=g.join(s,d),r=M(a,{encoding:"utf8"}),e=z.createInterface({input:r,crlfDelay:1/0});try{for await(const u of e){const f=u.trim();f&&(yield JSON.parse(f))}}finally{e.close(),r.destroy()}},p=async s=>{try{return await k(s),!0}catch{return!1}},w=async s=>{const{config:a,resolvedPath:r,sources:e}=await E(s.configPath),u=_(r,a.cacheDir??j,s.cacheDirOverride),f=async(i,o)=>{if(!await p(i))return{ok:!1,issues:[o==="source"?"missing source directory":"missing target directory"]};try{let t=0,n=0;for await(const h of A(i)){const y=g.join(i,h.path);if(!await p(y)){t+=1;continue}(await v(y)).size!==h.size&&(n+=1)}const c=[];return t>0&&c.push(o==="source"?`missing files: ${t}`:`target missing files: ${t}`),n>0&&c.push(o==="source"?`size mismatch: ${n}`:`target size mismatch: ${n}`),{ok:c.length===0,issues:c}}catch{return{ok:!1,issues:[o==="source"?"missing manifest":"missing target manifest"]}}},D=await Promise.all(e.map(async i=>{const o=g.join(u,i.id),t=[...(await f(o,"source")).issues];if(i.targetDir&&i.targetMode==="copy"){const n=C(r,i.targetDir),c=await f(n,"target");t.push(...c.issues)}return{id:i.id,ok:t.length===0,issues:t}}));return{cacheDir:u,results:D}},I=s=>{const a=s.results.filter(e=>e.ok).length,r=s.results.length-a;if(s.results.length===0){l.line(`${m.warn} No sources to verify.`);return}l.line(`${m.info} Verified ${s.results.length} sources (${a} ok, ${r} failed)`);for(const e of s.results)e.ok?l.item(m.success,e.id):l.item(m.warn,e.id,e.issues.join(", "))},N={__proto__:null,printVerify:I,verifyCache:w};export{d as M,N as a,w as v};
|
|
2
|
+
//# sourceMappingURL=verify.mjs.map
|
package/dist/cli.mjs
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import w from"node:path";import s from"node:process";import f from"picocolors";import{p as j,E as h,s as l,u as c}from"./shared/docs-cache.D9_kM5zq.mjs";import"cac";const u="docs-cache",O=`
|
|
2
|
+
Usage: ${u} <command> [options]
|
|
3
|
+
|
|
4
|
+
Commands:
|
|
5
|
+
add Add sources to the config (supports github:org/repo#ref)
|
|
6
|
+
remove Remove sources from the config and targets
|
|
7
|
+
sync Synchronize cache with config
|
|
8
|
+
status Show cache status
|
|
9
|
+
clean Remove cache
|
|
10
|
+
prune Remove unused data
|
|
11
|
+
verify Validate cache integrity
|
|
12
|
+
init Create a new config interactively
|
|
13
|
+
|
|
14
|
+
Global options:
|
|
15
|
+
--source <repo> (add only)
|
|
16
|
+
--target <dir> (add only)
|
|
17
|
+
--config <path>
|
|
18
|
+
--cache-dir <path>
|
|
19
|
+
--offline
|
|
20
|
+
--fail-on-miss
|
|
21
|
+
--lock-only
|
|
22
|
+
--target-dir <path> (add only)
|
|
23
|
+
--concurrency <n>
|
|
24
|
+
--json
|
|
25
|
+
--timeout-ms <n>
|
|
26
|
+
`,m=()=>{s.stdout.write(O.trimStart())},p=e=>{s.stderr.write(`${l.error} ${e}
|
|
27
|
+
`)},D=e=>{const t=e.findIndex(i=>!i.startsWith("-")),g=t===-1?[]:e.slice(t+1),d=[];let r=-1;const n=new Set(["--config","--cache-dir","--concurrency","--timeout-ms"]);for(let i=0;i<g.length;i+=1){const o=g[i];if(o==="--source"){const a=g[i+1];if(!a||a.startsWith("-"))throw new Error("--source expects a value.");d.push({repo:a}),r=d.length-1,i+=1;continue}if(o==="--target"||o==="--target-dir"){const a=g[i+1];if(!a||a.startsWith("-"))throw new Error("--target expects a value.");if(r===-1)throw new Error("--target must follow a --source entry.");d[r].targetDir=a,i+=1;continue}if(n.has(o)){i+=1;continue}o.startsWith("--")||(d.push({repo:o}),r=d.length-1)}return d},S=async(e,t,g,d)=>{if(e==="add"){const{addSources:r}=await import("./chunks/add.mjs"),{runSync:n}=await import("./chunks/sync.mjs").then(function(a){return a.s}),i=D(d);if(i.length===0)throw new Error("Usage: docs-cache add [--source <repo> --target <dir>] <repo...>");const o=await r({configPath:t.config,entries:i});if(t.offline?t.json||c.line(`${l.warn} Offline: skipped sync`):await n({configPath:t.config,cacheDirOverride:t.cacheDir,json:t.json,lockOnly:t.lockOnly,offline:t.offline,failOnMiss:t.failOnMiss,sourceFilter:o.sources.map(a=>a.id),timeoutMs:t.timeoutMs}),t.json)s.stdout.write(`${JSON.stringify(o,null,2)}
|
|
28
|
+
`);else{for(const a of o.sources){const v=a.repo.replace(/^https?:\/\//,"").replace(/\.git$/,""),y=a.targetDir?` ${f.dim("->")} ${f.magenta(a.targetDir)}`:"";c.item(l.success,a.id,`${f.blue(v)}${y}`)}o.skipped?.length&&c.line(`${l.warn} Skipped ${o.skipped.length} existing source${o.skipped.length===1?"":"s"}: ${o.skipped.join(", ")}`),c.line(`${l.info} Updated ${f.gray(w.relative(s.cwd(),o.configPath)||"docs.config.json")}`)}return}if(e==="remove"){const{removeSources:r}=await import("./chunks/remove.mjs"),{pruneCache:n}=await import("./chunks/prune.mjs");if(g.length===0)throw new Error("Usage: docs-cache remove <id...>");const i=await r({configPath:t.config,ids:g});if(t.json)s.stdout.write(`${JSON.stringify(i,null,2)}
|
|
29
|
+
`);else{if(i.removed.length>0&&c.line(`${l.success} Removed ${i.removed.length} source${i.removed.length===1?"":"s"}: ${i.removed.join(", ")}`),i.missing.length>0&&c.line(`${l.warn} Missing ${i.missing.length} source${i.missing.length===1?"":"s"}: ${i.missing.join(", ")}`),i.targetsRemoved.length>0){const o=i.targetsRemoved.map(a=>`${a.id} -> ${c.path(a.targetDir)}`).join(", ");c.line(`${l.success} Removed ${i.targetsRemoved.length} target${i.targetsRemoved.length===1?"":"s"}: ${o}`)}c.line(`${l.info} Updated ${f.gray(w.relative(s.cwd(),i.configPath)||"docs.config.json")}`)}t.prune&&await n({configPath:t.config,cacheDirOverride:t.cacheDir,json:t.json});return}if(e==="status"){const{getStatus:r,printStatus:n}=await import("./chunks/status.mjs"),i=await r({configPath:t.config,cacheDirOverride:t.cacheDir,json:t.json});t.json?s.stdout.write(`${JSON.stringify(i,null,2)}
|
|
30
|
+
`):n(i);return}if(e==="clean"){const{cleanCache:r}=await import("./chunks/clean.mjs"),n=await r({configPath:t.config,cacheDirOverride:t.cacheDir,json:t.json});t.json?s.stdout.write(`${JSON.stringify(n,null,2)}
|
|
31
|
+
`):n.removed?c.line(`${l.success} Removed cache at ${c.path(n.cacheDir)}`):c.line(`${l.info} Cache already missing at ${c.path(n.cacheDir)}`);return}if(e==="prune"){const{pruneCache:r}=await import("./chunks/prune.mjs"),n=await r({configPath:t.config,cacheDirOverride:t.cacheDir,json:t.json});t.json?s.stdout.write(`${JSON.stringify(n,null,2)}
|
|
32
|
+
`):n.removed.length===0?c.line(`${l.info} No cache entries to prune.`):c.line(`${l.success} Pruned ${n.removed.length} cache entr${n.removed.length===1?"y":"ies"}: ${n.removed.join(", ")}`);return}if(e==="sync"){const{printSyncPlan:r,runSync:n}=await import("./chunks/sync.mjs").then(function(o){return o.s}),i=await n({configPath:t.config,cacheDirOverride:t.cacheDir,json:t.json,lockOnly:t.lockOnly,offline:t.offline,failOnMiss:t.failOnMiss,timeoutMs:t.timeoutMs});t.json?s.stdout.write(`${JSON.stringify(i,null,2)}
|
|
33
|
+
`):r(i);return}if(e==="verify"){const{printVerify:r,verifyCache:n}=await import("./chunks/verify.mjs").then(function(o){return o.a}),i=await n({configPath:t.config,cacheDirOverride:t.cacheDir,json:t.json});t.json?s.stdout.write(`${JSON.stringify(i,null,2)}
|
|
34
|
+
`):r(i),i.results.some(o=>!o.ok)&&s.exit(h.FatalError);return}if(e==="init"){const{initConfig:r}=await import("./chunks/init.mjs");if(t.config)throw new Error("Init does not accept --config. Use the project root.");const n=await r({cacheDirOverride:t.cacheDir,json:t.json});t.json?s.stdout.write(`${JSON.stringify(n,null,2)}
|
|
35
|
+
`):c.line(`${l.success} Wrote ${f.gray(c.path(n.configPath))}`);return}c.line(`${u} ${e}: not implemented yet.`)};async function x(){try{s.on("uncaughtException",$),s.on("unhandledRejection",$);const e=j(),t=e.rawArgs;e.help&&(m(),s.exit(h.Success)),e.command||(m(),s.exit(h.InvalidArgument)),e.command!=="add"&&e.command!=="remove"&&e.positionals.length>0&&(p(`${u}: unexpected arguments.`),m(),s.exit(h.InvalidArgument)),e.command!=="add"&&e.options.targetDir&&(p(`${u}: --target-dir is only valid for add.`),m(),s.exit(h.InvalidArgument)),await S(e.command,e.options,e.positionals,e.rawArgs)}catch(e){$(e)}}function $(e){const t=e.message||String(e);p(t),s.exit(h.FatalError)}export{u as CLI_NAME,x as main};
|
|
36
|
+
//# sourceMappingURL=cli.mjs.map
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
const h=f=>{const e=f.trim(),p=e.match(/^git@([^:]+):(.+)$/);if(p){const t=p[1],r=p[2],[o,n]=r.split("#",2),i=o.replace(/^\//,""),c=i.split("/").filter(Boolean).pop()?.replace(/\.git$/i,""),s=`git@${t}:${i}`,a=n?.trim()||void 0;return{repoUrl:s,ref:a,inferredId:c}}const d=e.match(/^([^\s/:]+)\/([^\s#]+)(?:#(.+))?$/);if(d){const[,t,r,o]=d;return{repoUrl:`https://github.com/${`${t}/${r}`.replace(/\.git$/i,"")}.git`,ref:o?.trim()||void 0,inferredId:r.replace(/\.git$/i,"")}}const l=e.match(/^(github|gitlab):(.+)$/i);if(l){const t=l[1].toLowerCase(),r=l[2],[o,n]=r.split("#",2),i=o.replace(/^\//,""),c=i.split("/").filter(Boolean).pop()?.replace(/\.git$/i,""),s=t==="gitlab"?"gitlab.com":"github.com",a=i.endsWith(".git")?"":".git",g=`https://${s}/${i}${a}`,$=n?.trim()||void 0;return{repoUrl:g,ref:$,inferredId:c}}try{const t=new URL(e);if(t.protocol==="https:"||t.protocol==="ssh:"){const r=t.pathname.split("/").filter(Boolean).pop()?.replace(/\.git$/i,"");return{repoUrl:e,ref:void 0,inferredId:r}}}catch{}return{repoUrl:e,ref:void 0,inferredId:void 0}};export{h as r};
|
|
2
|
+
//# sourceMappingURL=docs-cache.BSvQNKuf.mjs.map
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import u from"node:process";import d from"cac";import n from"node:path";import s from"picocolors";var c=(o=>(o[o.Success=0]="Success",o[o.FatalError=1]="FatalError",o[o.InvalidArgument=9]="InvalidArgument",o))(c||{});const m=["add","remove","sync","status","clean","prune","verify","init"],h=(o=u.argv)=>{try{const r=d("docs-cache");r.option("--source <repo>","Source repo (add only)").option("--target <dir>","Target directory for source (add only)").option("--config <path>","Path to config file").option("--cache-dir <path>","Override cache directory").option("--offline","Disable network access").option("--fail-on-miss","Fail when required sources are missing").option("--lock-only","Update lock without materializing files").option("--prune","Prune cache on remove").option("--target-dir <path>","Target directory for add").option("--concurrency <n>","Concurrency limit").option("--json","Output JSON").option("--timeout-ms <n>","Network timeout in milliseconds").help();const e=r.parse(o,{run:!1}),t=e.args[0];if(t&&!m.includes(t))throw new Error(`Unknown command '${t}'.`);const i={config:e.options.config,cacheDir:e.options.cacheDir,offline:!!e.options.offline,failOnMiss:!!e.options.failOnMiss,lockOnly:!!e.options.lockOnly,prune:!!e.options.prune,targetDir:e.options.targetDir,concurrency:e.options.concurrency?Number(e.options.concurrency):void 0,json:!!e.options.json,timeoutMs:e.options.timeoutMs?Number(e.options.timeoutMs):void 0};if(i.concurrency!==void 0&&i.concurrency<1)throw new Error("--concurrency must be a positive number.");if(i.timeoutMs!==void 0&&i.timeoutMs<1)throw new Error("--timeout-ms must be a positive number.");const l=o.slice(2);return{command:t??null,options:i,positionals:e.args.slice(1),rawArgs:l,help:!!e.options.help}}catch(r){const e=r instanceof Error?r.message:String(r);console.error(e),u.exit(c.InvalidArgument)}},p="index.json",a=o=>o.replace(/\\/g,"/"),g=(o,r)=>{const e=n.dirname(n.resolve(o)),t=n.resolve(e,r),i=n.relative(e,t);if(i===".."||i.startsWith(`..${n.sep}`)||n.isAbsolute(i))throw new Error(`targetDir '${r}' escapes project directory. Must be within ${e}.`);if(a(i).split("/").filter(Boolean).includes(".git"))throw new Error("targetDir cannot be within .git directory.");return t},f=(o,r,e)=>{if(e)return n.resolve(e);const t=n.dirname(o);return n.resolve(t,r)},y=(o,r)=>{n.join(o,"repos");const e=n.join(o,r),t=n.join(o,p);return{cacheDir:o,sourceDir:e,indexPath:t}},w={error:s.red("\u2716"),success:s.green("\u2714"),info:s.blue("\u2139"),warn:s.yellow("\u26A0")},v={path:o=>{const r=n.relative(process.cwd(),o),e=r.length<o.length?r:o;return a(e)},hash:o=>o?o.slice(0,7):"-",pad:(o,r)=>o.padEnd(r),line:(o="")=>process.stdout.write(`${o}
|
|
2
|
+
`),header:(o,r)=>{process.stdout.write(`${s.blue("\u2139")} ${o.padEnd(10)} ${r}
|
|
3
|
+
`)},item:(o,r,e)=>{const t=s.bold(r),i=e?s.gray(e):"";process.stdout.write(` ${o} ${t} ${i}
|
|
4
|
+
`)},step:(o,r,e)=>{const t=s.cyan("\u2192");process.stdout.write(` ${t} ${o} ${s.bold(r)}${e?` ${s.dim(e)}`:""}
|
|
5
|
+
`)}};export{p as D,c as E,f as a,y as g,h as p,g as r,w as s,a as t,v as u};
|
|
6
|
+
//# sourceMappingURL=docs-cache.D9_kM5zq.mjs.map
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
import{writeFile as H,readFile as I,access as O}from"node:fs/promises";import g from"node:path";import{z as o}from"zod";import{r as z}from"./docs-cache.D9_kM5zq.mjs";const $=o.enum(["symlink","copy"]),F=o.enum(["materialize"]),T=o.object({type:o.enum(["commit","manifest"]),value:o.string().nullable()}).strict(),L=o.object({ref:o.string().min(1),mode:F,include:o.array(o.string().min(1)).min(1),targetMode:$.optional(),depth:o.number().min(1),required:o.boolean(),maxBytes:o.number().min(1),maxFiles:o.number().min(1).optional(),allowHosts:o.array(o.string().min(1)).min(1)}).strict(),U=o.object({id:o.string().min(1),repo:o.string().min(1),targetDir:o.string().min(1).optional(),targetMode:$.optional(),ref:o.string().min(1).optional(),mode:F.optional(),depth:o.number().min(1).optional(),include:o.array(o.string().min(1)).optional(),exclude:o.array(o.string().min(1)).optional(),required:o.boolean().optional(),maxBytes:o.number().min(1).optional(),maxFiles:o.number().min(1).optional(),integrity:T.optional()}).strict(),_=o.object({$schema:o.string().min(1).optional(),cacheDir:o.string().min(1).optional(),targetMode:$.optional(),index:o.boolean().optional(),defaults:L.partial().optional(),sources:o.array(U)}).strict(),P=/^[a-zA-Z0-9_-]+$/,J=new Set([".","..","CON","PRN","AUX","NUL","COM1","LPT1"]),S=(e,r)=>{if(typeof e!="string"||e.length===0)throw new Error(`${r} must be a non-empty string.`);if(e.length>200)throw new Error(`${r} exceeds maximum length of 200.`);if(!P.test(e))throw new Error(`${r} must contain only alphanumeric characters, hyphens, and underscores.`);if(J.has(e.toUpperCase()))throw new Error(`${r} uses reserved name '${e}'.`);return e},j="docs.config.json",b=".docs",q="package.json",G=process.platform==="win32"?"copy":"symlink",p={cacheDir:b,index:!1,defaults:{ref:"HEAD",mode:"materialize",include:["**/*.{md,mdx,markdown,mkd,txt,rst,adoc,asciidoc}"],targetMode:G,depth:1,required:!0,maxBytes:2e8,allowHosts:["github.com","gitlab.com"]},sources:[]},h=e=>typeof e=="object"&&e!==null&&!Array.isArray(e),l=(e,r)=>{if(typeof e!="string"||e.length===0)throw new Error(`${r} must be a non-empty string.`);return e},v=(e,r)=>{if(typeof e!="boolean")throw new Error(`${r} must be a boolean.`);return e},R=(e,r)=>{if(typeof e!="number"||Number.isNaN(e))throw new Error(`${r} must be a number.`);return e},f=(e,r)=>{const t=R(e,r);if(t<1)throw new Error(`${r} must be greater than zero.`);return t},w=(e,r)=>{if(!Array.isArray(e)||e.length===0)throw new Error(`${r} must be a non-empty array of strings.`);for(const t of e)if(typeof t!="string"||t.length===0)throw new Error(`${r} must contain non-empty strings.`);return e},B=(e,r)=>{const t=l(e,r);if(t!=="symlink"&&t!=="copy")throw new Error(`${r} must be "symlink" or "copy".`);return t},C=(e,r)=>{if(e!=="materialize")throw new Error(`${r} must be "materialize".`);return e},X=(e,r)=>{if(!h(e))throw new Error(`${r} must be an object.`);const t=e.type;if(t!=="commit"&&t!=="manifest")throw new Error(`${r}.type must be "commit" or "manifest".`);const c=e.value;if(typeof c!="string"&&c!==null)throw new Error(`${r}.value must be a string or null.`);return{type:t,value:c}},N=e=>{if(!h(e))throw new Error("Config must be a JSON object.");const r=_.safeParse(e);if(!r.success){const s=r.error.issues.map(n=>`${n.path.join(".")||"config"} ${n.message}`).join("; ");throw new Error(`Config does not match schema: ${s}.`)}const t=e.cacheDir?l(e.cacheDir,"cacheDir"):b,c=e.index!==void 0?v(e.index,"index"):p.index??!1,a=e.defaults,u=e.targetMode!==void 0?B(e.targetMode,"targetMode"):void 0,i=p.defaults;let m=i;if(a!==void 0){if(!h(a))throw new Error("defaults must be an object.");m={ref:a.ref!==void 0?l(a.ref,"defaults.ref"):i.ref,mode:a.mode!==void 0?C(a.mode,"defaults.mode"):i.mode,include:a.include!==void 0?w(a.include,"defaults.include"):i.include,targetMode:a.targetMode!==void 0?B(a.targetMode,"defaults.targetMode"):u??i.targetMode,depth:a.depth!==void 0?f(a.depth,"defaults.depth"):i.depth,required:a.required!==void 0?v(a.required,"defaults.required"):i.required,maxBytes:a.maxBytes!==void 0?f(a.maxBytes,"defaults.maxBytes"):i.maxBytes,maxFiles:a.maxFiles!==void 0?f(a.maxFiles,"defaults.maxFiles"):i.maxFiles,allowHosts:a.allowHosts!==void 0?w(a.allowHosts,"defaults.allowHosts"):i.allowHosts}}else u!==void 0&&(m={...i,targetMode:u});if(!Array.isArray(e.sources))throw new Error("sources must be an array.");const M=e.sources.map((s,n)=>{if(!h(s))throw new Error(`sources[${n}] must be an object.`);const d={id:S(s.id,`sources[${n}].id`),repo:l(s.repo,`sources[${n}].repo`)};if(s.targetDir!==void 0&&(d.targetDir=l(s.targetDir,`sources[${n}].targetDir`)),s.targetMode!==void 0){const x=l(s.targetMode,`sources[${n}].targetMode`);if(x!=="symlink"&&x!=="copy")throw new Error(`sources[${n}].targetMode must be "symlink" or "copy".`);d.targetMode=x}return s.ref!==void 0&&(d.ref=l(s.ref,`sources[${n}].ref`)),s.mode!==void 0&&(d.mode=C(s.mode,`sources[${n}].mode`)),s.depth!==void 0&&(d.depth=f(s.depth,`sources[${n}].depth`)),s.include!==void 0&&(d.include=w(s.include,`sources[${n}].include`)),s.exclude!==void 0&&(d.exclude=w(s.exclude,`sources[${n}].exclude`)),s.required!==void 0&&(d.required=v(s.required,`sources[${n}].required`)),s.maxBytes!==void 0&&(d.maxBytes=f(s.maxBytes,`sources[${n}].maxBytes`)),s.maxFiles!==void 0&&(d.maxFiles=f(s.maxFiles,`sources[${n}].maxFiles`)),s.integrity!==void 0&&(d.integrity=X(s.integrity,`sources[${n}].integrity`)),d}),D=new Set,y=[];for(const s of M)D.has(s.id)&&y.push(s.id),D.add(s.id);if(y.length>0)throw new Error(`Duplicate source IDs found: ${y.join(", ")}. Each source must have a unique ID.`);return{cacheDir:t,targetMode:u,index:c,defaults:m,sources:M}},Z=e=>{const r=e.defaults??p.defaults;return e.sources.map(t=>({id:t.id,repo:t.repo,targetDir:t.targetDir,targetMode:t.targetMode??r.targetMode,ref:t.ref??r.ref,mode:t.mode??r.mode,depth:t.depth??r.depth,include:t.include??r.include,exclude:t.exclude,required:t.required??r.required,maxBytes:t.maxBytes??r.maxBytes,maxFiles:t.maxFiles??r.maxFiles,integrity:t.integrity}))},A=e=>e?g.resolve(e):g.resolve(process.cwd(),j),K=()=>g.resolve(process.cwd(),q),k=async e=>{try{return await O(e),!0}catch{return!1}},E=async(e,r)=>{let t;try{t=await I(e,"utf8")}catch(i){const m=i instanceof Error?i.message:String(i);throw new Error(`Failed to read config at ${e}: ${m}`)}let c;try{c=JSON.parse(t)}catch(i){const m=i instanceof Error?i.message:String(i);throw new Error(`Invalid JSON in ${e}: ${m}`)}const a=r==="package"?c?.["docs-cache"]:c;if(r==="package"&&a===void 0)throw new Error(`Missing docs-cache config in ${e}.`);const u=N(a);for(const i of u.sources)i.targetDir&&z(e,i.targetDir);return{config:u,resolvedPath:e,sources:Z(u)}},Q=async(e,r)=>{const t=`${JSON.stringify(r,null,2)}
|
|
2
|
+
`;await H(e,t,"utf8")},V=async e=>{const r=A(e),t=g.basename(r)===q;if(e)return E(r,t?"package":"config");if(await k(r))return E(r,"config");const c=K();if(await k(c))try{return await E(c,"package")}catch{}throw new Error(`No docs.config.json found at ${r} and no docs-cache config in ${c}.`)};export{p as D,S as a,b,j as c,V as l,A as r,N as v,Q as w};
|
|
3
|
+
//# sourceMappingURL=docs-cache.goBsJvLg.mjs.map
|
package/package.json
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "docs-cache",
|
|
3
|
+
"private": false,
|
|
4
|
+
"type": "module",
|
|
5
|
+
"version": "0.1.0",
|
|
6
|
+
"description": "CLI for deterministic local caching of external documentation for agents and tools",
|
|
7
|
+
"author": "Frederik Bosch",
|
|
8
|
+
"license": "MIT",
|
|
9
|
+
"homepage": "https://github.com/fbosch/docs-cache#readme",
|
|
10
|
+
"repository": {
|
|
11
|
+
"type": "git",
|
|
12
|
+
"url": "https://github.com/fbosch/docs-cache.git"
|
|
13
|
+
},
|
|
14
|
+
"bugs": {
|
|
15
|
+
"url": "https://github.com/fbosch/docs-cache/issues"
|
|
16
|
+
},
|
|
17
|
+
"keywords": [
|
|
18
|
+
"docs",
|
|
19
|
+
"documentation",
|
|
20
|
+
"cache",
|
|
21
|
+
"agent",
|
|
22
|
+
"ai",
|
|
23
|
+
"git",
|
|
24
|
+
"cli"
|
|
25
|
+
],
|
|
26
|
+
"sideEffects": false,
|
|
27
|
+
"engines": {
|
|
28
|
+
"node": ">=18"
|
|
29
|
+
},
|
|
30
|
+
"bin": {
|
|
31
|
+
"docs-cache": "./bin/docs-cache.mjs"
|
|
32
|
+
},
|
|
33
|
+
"files": [
|
|
34
|
+
"bin",
|
|
35
|
+
"dist/cli.mjs",
|
|
36
|
+
"dist/chunks/*.mjs",
|
|
37
|
+
"dist/shared/*.mjs",
|
|
38
|
+
"README.md",
|
|
39
|
+
"LICENSE"
|
|
40
|
+
],
|
|
41
|
+
"dependencies": {
|
|
42
|
+
"@clack/prompts": "^1.0.0",
|
|
43
|
+
"cac": "^6.7.14",
|
|
44
|
+
"fast-glob": "^3.3.2",
|
|
45
|
+
"picocolors": "^1.1.1",
|
|
46
|
+
"picomatch": "^2.3.1",
|
|
47
|
+
"zod": "^4.3.6"
|
|
48
|
+
},
|
|
49
|
+
"devDependencies": {
|
|
50
|
+
"@biomejs/biome": "^2.3.8",
|
|
51
|
+
"@size-limit/file": "^11.2.0",
|
|
52
|
+
"@types/node": "^24.2.1",
|
|
53
|
+
"bumpp": "^10.3.2",
|
|
54
|
+
"c8": "^10.1.3",
|
|
55
|
+
"jiti": "^2.5.1",
|
|
56
|
+
"lint-staged": "^16.2.7",
|
|
57
|
+
"simple-git-hooks": "^2.13.1",
|
|
58
|
+
"size-limit": "^11.2.0",
|
|
59
|
+
"tinybench": "^6.0.0",
|
|
60
|
+
"typescript": "^5.9.3",
|
|
61
|
+
"unbuild": "^3.6.1"
|
|
62
|
+
},
|
|
63
|
+
"size-limit": [
|
|
64
|
+
{
|
|
65
|
+
"path": "dist/cli.mjs",
|
|
66
|
+
"limit": "10 kB"
|
|
67
|
+
}
|
|
68
|
+
],
|
|
69
|
+
"simple-git-hooks": {
|
|
70
|
+
"pre-commit": "pnpm lint-staged"
|
|
71
|
+
},
|
|
72
|
+
"lint-staged": {
|
|
73
|
+
"*.{js,ts,cjs,mjs,d.cts,d.mts,jsx,tsx,json,jsonc}": [
|
|
74
|
+
"biome check --write --no-errors-on-unmatched"
|
|
75
|
+
]
|
|
76
|
+
},
|
|
77
|
+
"scripts": {
|
|
78
|
+
"build": "unbuild",
|
|
79
|
+
"dev": "unbuild --stub",
|
|
80
|
+
"lint": "biome check .",
|
|
81
|
+
"release": "pnpm run lint && pnpm run typecheck && bumpp && pnpm publih --access public",
|
|
82
|
+
"test": "pnpm build && node --test",
|
|
83
|
+
"test:coverage": "pnpm build && c8 --include dist --exclude bin --reporter=text node --test",
|
|
84
|
+
"bench": "pnpm build && node scripts/benchmarks/run.mjs",
|
|
85
|
+
"schema:build": "node scripts/generate-schema.mjs",
|
|
86
|
+
"size": "size-limit",
|
|
87
|
+
"test:watch": "node --test --watch",
|
|
88
|
+
"typecheck": "tsc --noEmit"
|
|
89
|
+
}
|
|
90
|
+
}
|