docs-cache 0.1.2 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunks/add.mjs +2 -2
- package/dist/chunks/clean.mjs +1 -1
- package/dist/chunks/init.mjs +2 -2
- package/dist/chunks/prune.mjs +1 -1
- package/dist/chunks/remove.mjs +2 -2
- package/dist/chunks/status.mjs +1 -1
- package/dist/chunks/sync.mjs +7 -7
- package/dist/chunks/verify.mjs +1 -1
- package/dist/shared/docs-cache.D4Fth4X8.mjs +3 -0
- package/package.json +1 -1
- package/dist/shared/docs-cache.goBsJvLg.mjs +0 -3
package/dist/chunks/add.mjs
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import{readFile as m,writeFile as
|
|
2
|
-
`,"utf8")}else await
|
|
1
|
+
import{readFile as m,writeFile as y,access as x}from"node:fs/promises";import h from"node:path";import{v as w,D as v,a as b,s as C,w as F,r as D}from"../shared/docs-cache.D4Fth4X8.mjs";import{r as I}from"../shared/docs-cache.D9_kM5zq.mjs";import{r as N}from"../shared/docs-cache.BSvQNKuf.mjs";import"zod";import"node:process";import"cac";import"picocolors";const f=async o=>{try{return await x(o),!0}catch{return!1}},P="package.json",S=async o=>{const s=await m(o,"utf8"),r=JSON.parse(s),a=r["docs-cache"];return a?{parsed:r,config:w(a)}:{parsed:r,config:null}},O=async o=>{if(o){const a=D(o);return{resolvedPath:a,mode:h.basename(a)===P?"package":"config"}}const s=D();if(await f(s))return{resolvedPath:s,mode:"config"};const r=h.resolve(process.cwd(),P);return await f(r)&&(await S(r)).config?{resolvedPath:r,mode:"package"}:{resolvedPath:s,mode:"config"}},E=async o=>{const s=await O(o.configPath),r=s.resolvedPath;let a=v,t=null,l=null;if(await f(r))if(s.mode==="package"){const e=await S(r);l=e.parsed,t=e.config,a=t??v}else{const e=await m(r,"utf8");t=JSON.parse(e.toString()),a=w(t)}const k="https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",p=new Set(a.sources.map(e=>e.id)),u=[],d=o.entries.map(e=>{const c=N(e.repo),g=e.id||c.inferredId;if(!g)throw new Error("Unable to infer id. Provide an explicit id.");const n=b(g,"source id");return p.has(n)?(u.push(n),null):(p.add(n),e.targetDir&&I(r,e.targetDir),{id:n,repo:c.repoUrl,...e.targetDir?{targetDir:e.targetDir}:{},...c.ref?{ref:c.ref}:{}})}).filter(Boolean);if(d.length===0)throw new Error("All sources already exist in config.");const i={$schema:k,sources:[...a.sources,...d]};if(t?.cacheDir&&(i.cacheDir=t.cacheDir),t?.index!==void 0&&(i.index=t.index),t?.defaults&&(i.defaults=t.defaults),s.mode==="package"){const e=l??{};e["docs-cache"]=C(i),await y(r,`${JSON.stringify(e,null,2)}
|
|
2
|
+
`,"utf8")}else await F(r,i);return{configPath:r,sources:d,skipped:u,created:!0}};export{E as addSources};
|
|
3
3
|
//# sourceMappingURL=add.mjs.map
|
package/dist/chunks/clean.mjs
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import{rm as o,access as i}from"node:fs/promises";import{l as s,b as m}from"../shared/docs-cache.
|
|
1
|
+
import{rm as o,access as i}from"node:fs/promises";import{l as s,b as m}from"../shared/docs-cache.D4Fth4X8.mjs";import{a as n}from"../shared/docs-cache.D9_kM5zq.mjs";import"node:path";import"zod";import"node:process";import"cac";import"picocolors";const f=async r=>{try{return await i(r),!0}catch{return!1}},p=async r=>{const{config:t,resolvedPath:c}=await s(r.configPath),a=n(c,t.cacheDir??m,r.cacheDirOverride),e=await f(a);return e&&await o(a,{recursive:!0,force:!0}),{cacheDir:a,removed:e}};export{p as cleanCache};
|
|
2
2
|
//# sourceMappingURL=clean.mjs.map
|
package/dist/chunks/init.mjs
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import{readFile as
|
|
2
|
-
`,"utf8"),{configPath:e,created:!0}}if(await h(e))throw new Error(`Config already exists at ${e}.`);const
|
|
1
|
+
import{readFile as g,writeFile as k,access as v}from"node:fs/promises";import f from"node:path";import{confirm as F,isCancel as N,select as O,text as P}from"@clack/prompts";import{c as A,b as o,s as J,w as S}from"../shared/docs-cache.D4Fth4X8.mjs";import"zod";import"../shared/docs-cache.D9_kM5zq.mjs";import"node:process";import"cac";import"picocolors";const h=async s=>{try{return await v(s),!0}catch{return!1}},V=async(s,r={})=>{const b=r.confirm??F,l=r.isCancel??N,y=r.select??O,j=r.text??P,w=s.cwd??process.cwd(),d=f.resolve(w,A),a=f.resolve(w,"package.json"),n=[];if(await h(d)&&n.push(d),await h(a)){const i=await g(a,"utf8");JSON.parse(i)["docs-cache"]&&n.push(a)}if(n.length>0)throw new Error(`Config already exists at ${n.join(", ")}. Init aborted.`);let p=!1;if(await h(a)){const i=await g(a,"utf8");if(!JSON.parse(i)["docs-cache"]){const c=await y({message:"Config location",options:[{value:"config",label:"docs.config.json"},{value:"package",label:"package.json"}],initialValue:"config"});if(l(c))throw new Error("Init cancelled.");p=c==="package"}}const I=p?a:d,$=s.cacheDirOverride??o,x=await j({message:"Cache directory",initialValue:$});if(l(x))throw new Error("Init cancelled.");const C=await b({message:"Generate index.json (summary of cached sources + paths for tools)",initialValue:!1});if(l(C))throw new Error("Init cancelled.");const t={configPath:I,cacheDir:x,index:C},e=f.resolve(w,t.configPath);if(f.basename(e)==="package.json"){const i=await g(e,"utf8"),c=JSON.parse(i);if(c["docs-cache"])throw new Error(`docs-cache config already exists in ${e}.`);const m={$schema:"https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",sources:[]},D=t.cacheDir||o;return D!==o&&(m.cacheDir=D),t.index&&(m.index=!0),c["docs-cache"]=J(m),await k(e,`${JSON.stringify(c,null,2)}
|
|
2
|
+
`,"utf8"),{configPath:e,created:!0}}if(await h(e))throw new Error(`Config already exists at ${e}.`);const u={$schema:"https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",sources:[]},E=t.cacheDir||o;return E!==o&&(u.cacheDir=E),t.index&&(u.index=!0),await S(e,u),{configPath:e,created:!0}};export{V as initConfig};
|
|
3
3
|
//# sourceMappingURL=init.mjs.map
|
package/dist/chunks/prune.mjs
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import{readdir as p,rm as f,access as h}from"node:fs/promises";import u from"node:path";import{l as d,b as D}from"../shared/docs-cache.
|
|
1
|
+
import{readdir as p,rm as f,access as h}from"node:fs/promises";import u from"node:path";import{l as d,b as D}from"../shared/docs-cache.D4Fth4X8.mjs";import{a as v}from"../shared/docs-cache.D9_kM5zq.mjs";import"zod";import"node:process";import"cac";import"picocolors";const w=async t=>{try{return await h(t),!0}catch{return!1}},l=async t=>{const{config:c,resolvedPath:s,sources:a}=await d(t.configPath),e=v(s,c.cacheDir??D,t.cacheDirOverride);if(!await w(e))return{cacheDir:e,removed:[],kept:a.map(r=>r.id)};const n=new Set(a.map(r=>r.id)),m=await p(e,{withFileTypes:!0}),o=[];for(const r of m){if(!r.isDirectory())continue;const i=r.name;n.has(i)||i.startsWith(".tmp-")||(await f(u.join(e,i),{recursive:!0,force:!0}),o.push(i))}return{cacheDir:e,removed:o,kept:a.map(r=>r.id)}};export{l as pruneCache};
|
|
2
2
|
//# sourceMappingURL=prune.mjs.map
|
package/dist/chunks/remove.mjs
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import{readFile as w,writeFile as N,rm as
|
|
2
|
-
`,"utf8")}else await
|
|
1
|
+
import{readFile as w,writeFile as N,rm as $,access as k}from"node:fs/promises";import v from"node:path";import{v as D,D as E,s as I,w as x,r as P}from"../shared/docs-cache.D4Fth4X8.mjs";import{r as F}from"../shared/docs-cache.D9_kM5zq.mjs";import{r as O}from"../shared/docs-cache.BSvQNKuf.mjs";import"zod";import"node:process";import"cac";import"picocolors";const d=async s=>{try{return await k(s),!0}catch{return!1}},y="package.json",S=async s=>{const a=await w(s,"utf8"),o=JSON.parse(a),t=o["docs-cache"];return t?{parsed:o,config:D(t)}:{parsed:o,config:null}},U=async s=>{if(s){const t=P(s);return{resolvedPath:t,mode:v.basename(t)===y?"package":"config"}}const a=P();if(await d(a))return{resolvedPath:a,mode:"config"};const o=v.resolve(process.cwd(),y);return await d(o)&&(await S(o)).config?{resolvedPath:o,mode:"package"}:{resolvedPath:a,mode:"config"}},b=async s=>{if(s.ids.length===0)throw new Error("No sources specified to remove.");const a=await U(s.configPath),o=a.resolvedPath;let t=E,r=null,f=null;if(await d(o))if(a.mode==="package"){const e=await S(o);if(f=e.parsed,r=e.config,!r)throw new Error(`Missing docs-cache config in ${o}.`);t=r}else{const e=await w(o,"utf8");r=JSON.parse(e.toString()),t=D(r)}else throw new Error(`Config not found at ${o}.`);const u=new Map(t.sources.map(e=>[e.id,e])),g=new Map(t.sources.map(e=>[e.repo,e])),c=new Set,l=[];for(const e of s.ids){if(u.has(e)){c.add(e);continue}const i=O(e);if(i.repoUrl&&g.has(i.repoUrl)){const p=g.get(i.repoUrl);p&&c.add(p.id);continue}if(i.inferredId&&u.has(i.inferredId)){c.add(i.inferredId);continue}l.push(e)}const C=t.sources.filter(e=>!c.has(e.id)),h=t.sources.filter(e=>c.has(e.id)).map(e=>e.id),M=t.sources.filter(e=>c.has(e.id));if(h.length===0)throw new Error("No matching sources found to remove.");const n={$schema:r?.$schema??"https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",sources:C};if(r?.cacheDir&&(n.cacheDir=r.cacheDir),r?.index!==void 0&&(n.index=r.index),r?.defaults&&(n.defaults=r.defaults),r?.targetMode&&(n.targetMode=r.targetMode),a.mode==="package"){const e=f??{};e["docs-cache"]=I(n),await N(o,`${JSON.stringify(e,null,2)}
|
|
2
|
+
`,"utf8")}else await x(o,n);const m=[];for(const e of M){if(!e.targetDir)continue;const i=F(o,e.targetDir);await $(i,{recursive:!0,force:!0}),m.push({id:e.id,targetDir:i})}return{configPath:o,removed:h,missing:l,targetsRemoved:m}};export{b as removeSources};
|
|
3
3
|
//# sourceMappingURL=remove.mjs.map
|
package/dist/chunks/status.mjs
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import{access as E}from"node:fs/promises";import a from"picocolors";import{u as o,s as u,a as w,g as D}from"../shared/docs-cache.D9_kM5zq.mjs";import{l as v,b as x}from"../shared/docs-cache.
|
|
1
|
+
import{access as E}from"node:fs/promises";import a from"picocolors";import{u as o,s as u,a as w,g as D}from"../shared/docs-cache.D9_kM5zq.mjs";import{l as v,b as x}from"../shared/docs-cache.D4Fth4X8.mjs";import{resolveLockPath as C,readLock as P}from"../lock.mjs";import"node:process";import"cac";import"node:path";import"zod";const h=async s=>{try{return await E(s),!0}catch{return!1}},$=async s=>{const{config:e,resolvedPath:t,sources:n}=await v(s.configPath),r=w(t,e.cacheDir??x,s.cacheDirOverride),l=await h(r),c=C(t),i=await h(c);let d=!1,f=null;if(i)try{f=await P(c),d=!0}catch{d=!1}const g=await Promise.all(n.map(async m=>{const p=D(r,m.id),k=await h(p.sourceDir),y=f?.sources?.[m.id]??null;return{id:m.id,docsPath:p.sourceDir,docsExists:k,lockEntry:y}}));return{configPath:t,cacheDir:r,cacheDirExists:l,lockPath:c,lockExists:i,lockValid:d,sources:g}},L=s=>{const e=o.path(s.cacheDir),t=s.cacheDirExists?a.green("present"):a.red("missing"),n=s.lockExists?s.lockValid?a.green("valid"):a.red("invalid"):a.yellow("missing");if(o.header("Cache",`${e} (${t})`),o.header("Lock",`docs.lock (${n})`),s.sources.length===0){o.line(),o.line(`${u.warn} No sources configured.`);return}o.line();for(const r of s.sources){const l=r.docsExists?u.success:u.error,c=r.lockEntry?a.green("locked"):a.yellow("new"),i=o.hash(r.lockEntry?.resolvedCommit);o.item(l,r.id.padEnd(20),`${c.padEnd(10)} ${i}`)}};export{$ as getStatus,L as printStatus};
|
|
2
2
|
//# sourceMappingURL=status.mjs.map
|
package/dist/chunks/sync.mjs
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import{createHash as L,randomBytes as me}from"node:crypto";import{rm as $,mkdtemp as N,writeFile as
|
|
2
|
-
`).filter(Boolean);return r.length===0?null:r[0].split(/\s+/)[0]||null},_e=async e=>{re(e.repo,e.allowHosts);const{stdout:r}=await ke("git",["ls-remote",e.repo,e.ref],{timeout:e.timeoutMs??Fe,maxBuffer:1024*1024}),t=oe(r);if(!t)throw new Error(`Unable to resolve ref '${e.ref}' for ${I(e.repo)}.`);return{repo:e.repo,ref:e.ref,resolvedCommit:t}},se=V(K),ie=3e4,A=async(e,r)=>{await se("git",["-c","core.hooksPath=/dev/null","-c","submodule.recurse=false","-c","protocol.file.allow=never","-c","protocol.ext.allow=never",...e],{cwd:r?.cwd,timeout:r?.timeoutMs??ie,maxBuffer:1024*1024,env:{PATH:process.env.PATH,HOME:process.env.HOME,USER:process.env.USER,USERPROFILE:process.env.USERPROFILE,TMPDIR:process.env.TMPDIR,TMP:process.env.TMP,TEMP:process.env.TEMP,SYSTEMROOT:process.env.SYSTEMROOT,WINDIR:process.env.WINDIR,SSH_AUTH_SOCK:process.env.SSH_AUTH_SOCK,SSH_AGENT_PID:process.env.SSH_AGENT_PID,HTTP_PROXY:process.env.HTTP_PROXY,HTTPS_PROXY:process.env.HTTPS_PROXY,NO_PROXY:process.env.NO_PROXY,GIT_TERMINAL_PROMPT:"0",GIT_CONFIG_NOSYSTEM:"1",GIT_CONFIG_NOGLOBAL:"1",...process.platform==="win32"?{}:{GIT_ASKPASS:"/bin/false"}}})},Ae=async(e,r,t,i)=>{const o=w.join(t,"archive.tar");await A(["archive","--remote",e,"--format=tar","--output",o,r],{timeoutMs:i}),await se("tar",["-xf",o,"-C",t],{timeout:i??ie,maxBuffer:1024*1024}),await $(o,{force:!0})},Le=e=>{if(!e||e.length===0)return!1;for(const r of e)if(!r||r.includes("**"))return!1;return!0},Ne=e=>{if(!e)return[];const r=e.map(t=>{const i=t.replace(/\\/g,"/"),o=i.indexOf("*");return(o===-1?i:i.slice(0,o)).replace(/\/+$|\/$/,"")});return Array.from(new Set(r.filter(t=>t.length>0)))},
|
|
3
|
-
`;await
|
|
4
|
-
`;x.update(a),await y(a),l+=1}}await new Promise((f,c)=>{g.end(()=>f()),g.once("error",c)});const n=x.digest("hex"),S=async f=>{try{return await
|
|
5
|
-
`)}throw P}d&&await $(a,{recursive:!0,force:!0})}finally{await p.release()}})(t,r.sourceDir),{bytes:m,fileCount:l,manifestSha256:n}}catch(s){try{await o()}catch{}throw await $(t,{recursive:!0,force:!0}),s}},Xe=async e=>{
|
|
6
|
-
`;o.update(C),i+=1}finally{await l.close()}}return{bytes:t,fileCount:i,manifestSha256:o.digest("hex")}},Je=async(e,r)=>{await r.rm(e,{recursive:!0,force:!0})},
|
|
7
|
-
`)}await r.cp(e.sourceDir,e.targetDir,{recursive:!0});return}throw s}},Ke=e=>{if(e<1024)return`${e} B`;const r=["KB","MB","GB","TB"];let t=e,i=-1;for(;t>=1024&&i<r.length-1;)t/=1024,i+=1;return`${t.toFixed(1)} ${r[i]}`},b=async e=>{try{return await
|
|
1
|
+
import{createHash as L,randomBytes as me}from"node:crypto";import{rm as $,mkdtemp as N,writeFile as X,mkdir as R,access as J,rename as j,open as B,lstat as he,symlink as we,cp as pe,readFile as H}from"node:fs/promises";import w from"node:path";import M from"picocolors";import{t as _,r as U,D as ge,g as ye,u as v,s as O,a as Se}from"../shared/docs-cache.D9_kM5zq.mjs";import{a as z,l as De,D as ve,b as xe}from"../shared/docs-cache.D4Fth4X8.mjs";import{execFile as K}from"node:child_process";import Ce,{tmpdir as q}from"node:os";import{promisify as V}from"node:util";import{writeLock as Pe,resolveLockPath as Ee,readLock as Me}from"../lock.mjs";import{M as G,v as Z}from"./verify.mjs";import{createWriteStream as Q,createReadStream as Oe,constants as ee}from"node:fs";import{pipeline as $e}from"node:stream/promises";import te from"fast-glob";const Te=/^(https?:\/\/)([^@]+)@/i,I=e=>e.replace(Te,"$1***@"),ke=V(K),Fe=3e4,Ie=new Set(["file:","ftp:","data:","javascript:"]),be=e=>{try{const r=new URL(e);if(Ie.has(r.protocol))throw new Error(`Blocked protocol '${r.protocol}' in repo URL '${I(e)}'.`)}catch(r){if(r instanceof TypeError)return;throw r}},Re=e=>{if(be(e),e.startsWith("git@")){const r=e.indexOf("@"),t=e.indexOf(":",r+1);return t===-1?null:e.slice(r+1,t)||null}try{const r=new URL(e);return r.protocol!=="https:"&&r.protocol!=="ssh:"?null:r.hostname||null}catch{return null}},re=(e,r)=>{const t=Re(e);if(!t)throw new Error(`Unsupported repo URL '${I(e)}'. Use HTTPS or SSH.`);const i=t.toLowerCase();if(!r.map(o=>o.toLowerCase()).includes(i))throw new Error(`Host '${t}' is not in allowHosts for '${I(e)}'.`)},oe=e=>{const r=e.trim().split(`
|
|
2
|
+
`).filter(Boolean);return r.length===0?null:r[0].split(/\s+/)[0]||null},_e=async e=>{re(e.repo,e.allowHosts);const{stdout:r}=await ke("git",["ls-remote",e.repo,e.ref],{timeout:e.timeoutMs??Fe,maxBuffer:1024*1024}),t=oe(r);if(!t)throw new Error(`Unable to resolve ref '${e.ref}' for ${I(e.repo)}.`);return{repo:e.repo,ref:e.ref,resolvedCommit:t}},se=V(K),ie=3e4,A=async(e,r)=>{await se("git",["-c","core.hooksPath=/dev/null","-c","submodule.recurse=false","-c","protocol.file.allow=never","-c","protocol.ext.allow=never",...e],{cwd:r?.cwd,timeout:r?.timeoutMs??ie,maxBuffer:1024*1024,env:{PATH:process.env.PATH,HOME:process.env.HOME,USER:process.env.USER,USERPROFILE:process.env.USERPROFILE,TMPDIR:process.env.TMPDIR,TMP:process.env.TMP,TEMP:process.env.TEMP,SYSTEMROOT:process.env.SYSTEMROOT,WINDIR:process.env.WINDIR,SSH_AUTH_SOCK:process.env.SSH_AUTH_SOCK,SSH_AGENT_PID:process.env.SSH_AGENT_PID,HTTP_PROXY:process.env.HTTP_PROXY,HTTPS_PROXY:process.env.HTTPS_PROXY,NO_PROXY:process.env.NO_PROXY,GIT_TERMINAL_PROMPT:"0",GIT_CONFIG_NOSYSTEM:"1",GIT_CONFIG_NOGLOBAL:"1",...process.platform==="win32"?{}:{GIT_ASKPASS:"/bin/false"}}})},Ae=async(e,r,t,i)=>{const o=w.join(t,"archive.tar");await A(["archive","--remote",e,"--format=tar","--output",o,r],{timeoutMs:i}),await se("tar",["-xf",o,"-C",t],{timeout:i??ie,maxBuffer:1024*1024}),await $(o,{force:!0})},Le=e=>{if(!e||e.length===0)return!1;for(const r of e)if(!r||r.includes("**"))return!1;return!0},Ne=e=>{if(!e)return[];const r=e.map(t=>{const i=t.replace(/\\/g,"/"),o=i.indexOf("*");return(o===-1?i:i.slice(0,o)).replace(/\/+$|\/$/,"")});return Array.from(new Set(r.filter(t=>t.length>0)))},je=async(e,r)=>{const t=/^[0-9a-f]{7,40}$/i.test(e.ref),i=Le(e.include),o=["clone","--no-checkout","--filter=blob:none","--depth",String(e.depth),"--recurse-submodules=no","--no-tags"];if(i&&o.push("--sparse"),t||(o.push("--single-branch"),e.ref!=="HEAD"&&o.push("--branch",e.ref)),o.push(e.repo,r),await A(o,{timeoutMs:e.timeoutMs}),i){const s=Ne(e.include);s.length>0&&await A(["-C",r,"sparse-checkout","set",...s],{timeoutMs:e.timeoutMs})}await A(["-C",r,"checkout","--detach",e.resolvedCommit],{timeoutMs:e.timeoutMs})},Be=async e=>{const r=await N(w.join(q(),`docs-cache-${e.sourceId}-`));try{return await Ae(e.repo,e.resolvedCommit,r,e.timeoutMs),r}catch(t){throw await $(r,{recursive:!0,force:!0}),t}},He=async e=>{z(e.sourceId,"sourceId");try{const r=await Be(e);return{repoDir:r,cleanup:async()=>{await $(r,{recursive:!0,force:!0})}}}catch{const r=await N(w.join(q(),`docs-cache-${e.sourceId}-`));try{return await je(e,r),{repoDir:r,cleanup:async()=>{await $(r,{recursive:!0,force:!0})}}}catch(t){throw await $(r,{recursive:!0,force:!0}),t}}},Ue=async e=>{const r=new Map(e.sources.map(u=>[u.id,u])),t={};for(const[u,m]of Object.entries(e.lock.sources)){const l=r.get(u),h=l?.targetDir?_(U(e.configPath,l.targetDir)):void 0;t[u]={repo:m.repo,ref:m.ref,resolvedCommit:m.resolvedCommit,bytes:m.bytes,fileCount:m.fileCount,manifestSha256:m.manifestSha256,updatedAt:m.updatedAt,cachePath:_(w.join(e.cacheDir,u)),...h?{targetDir:h}:{}}}const i={generatedAt:new Date().toISOString(),cacheDir:_(e.cacheDir),sources:t},o=w.join(e.cacheDir,ge),s=`${JSON.stringify(i,null,2)}
|
|
3
|
+
`;await X(o,s,"utf8")},F=e=>_(e),Y=Number(process.env.DOCS_CACHE_STREAM_THRESHOLD_MB??"2"),ze=Number.isFinite(Y)&&Y>0?Math.floor(Y*1024*1024):1024*1024,Ge=(e,r)=>{const t=w.resolve(e);if(!w.resolve(r).startsWith(t+w.sep))throw new Error(`Path traversal detected: ${r}`)},ae=async e=>{try{return await B(e,ee.O_RDONLY|ee.O_NOFOLLOW)}catch(r){const t=r.code;if(t==="ELOOP")return null;if(t==="EINVAL"||t==="ENOSYS"||t==="ENOTSUP")return(await he(e)).isSymbolicLink()?null:await B(e,"r");throw r}},Ye=async(e,r=5e3)=>{const t=Date.now();for(;Date.now()-t<r;)try{const i=await B(e,"wx");return{release:async()=>{await i.close(),await $(e,{force:!0})}}}catch(i){if(i.code!=="EEXIST")throw i;await new Promise(o=>setTimeout(o,100))}throw new Error(`Failed to acquire lock for ${e}.`)},We=async e=>{z(e.sourceId,"sourceId");const r=ye(e.cacheDir,e.sourceId);await R(e.cacheDir,{recursive:!0});const t=await N(w.join(e.cacheDir,`.tmp-${e.sourceId}-`));let i=null;const o=async()=>{const s=i;!s||s.closed||s.destroyed||await new Promise(u=>{const m=()=>{s.off("close",l),s.off("error",h),u()},l=()=>m(),h=()=>m();s.once("close",l),s.once("error",h);try{s.end()}catch{m()}})};try{const s=await te(e.include,{cwd:e.repoDir,ignore:[".git/**",...e.exclude??[]],dot:!0,onlyFiles:!0,followSymbolicLinks:!1});s.sort((f,c)=>F(f).localeCompare(F(c)));const u=new Set;for(const f of s)u.add(w.dirname(f));await Promise.all(Array.from(u,f=>R(w.join(t,f),{recursive:!0})));let m=0,l=0;const h=Math.max(1,Math.min(s.length,Math.max(8,Math.min(128,Ce.cpus().length*8)))),C=w.join(t,G),g=Q(C,{encoding:"utf8"});i=g;const x=L("sha256"),y=async f=>new Promise((c,p)=>{const d=P=>{g.off("drain",a),p(P)},a=()=>{g.off("error",d),c()};g.once("error",d),g.write(f)?(g.off("error",d),c()):g.once("drain",a)});for(let f=0;f<s.length;f+=h){const c=s.slice(f,f+h),p=await Promise.all(c.map(async d=>{const a=F(d),P=w.join(e.repoDir,d),D=await ae(P);if(!D)return null;try{const k=await D.stat();if(!k.isFile())return null;const T=w.join(t,d);if(Ge(t,T),k.size>=ze){const E=Oe(P,{fd:D.fd,autoClose:!1}),de=Q(T);await $e(E,de)}else{const E=await D.readFile();await X(T,E)}return{path:a,size:k.size}}finally{await D.close()}}));for(const d of p){if(!d)continue;if(e.maxFiles!==void 0&&l+1>e.maxFiles)throw new Error(`Materialized content exceeds maxFiles (${e.maxFiles}).`);if(m+=d.size,m>e.maxBytes)throw new Error(`Materialized content exceeds maxBytes (${e.maxBytes}).`);const a=`${JSON.stringify(d)}
|
|
4
|
+
`;x.update(a),await y(a),l+=1}}await new Promise((f,c)=>{g.end(()=>f()),g.once("error",c)});const n=x.digest("hex"),S=async f=>{try{return await J(f),!0}catch{return!1}};return await(async(f,c)=>{const p=await Ye(`${c}.lock`);try{const d=await S(c),a=`${c}.bak-${me(8).toString("hex")}`;d&&await j(c,a);try{await j(f,c)}catch(P){if(d)try{await j(a,c)}catch(D){const k=D instanceof Error?D.message:String(D);process.stderr.write(`Warning: Failed to restore backup: ${k}
|
|
5
|
+
`)}throw P}d&&await $(a,{recursive:!0,force:!0})}finally{await p.release()}})(t,r.sourceDir),{bytes:m,fileCount:l,manifestSha256:n}}catch(s){try{await o()}catch{}throw await $(t,{recursive:!0,force:!0}),s}},Xe=async e=>{z(e.sourceId,"sourceId");const r=await te(e.include,{cwd:e.repoDir,ignore:[".git/**",...e.exclude??[]],dot:!0,onlyFiles:!0,followSymbolicLinks:!1});r.sort((s,u)=>F(s).localeCompare(F(u)));let t=0,i=0;const o=L("sha256");for(const s of r){const u=F(s),m=w.join(e.repoDir,s),l=await ae(m);if(l)try{const h=await l.stat();if(!h.isFile())continue;if(e.maxFiles!==void 0&&i+1>e.maxFiles)throw new Error(`Materialized content exceeds maxFiles (${e.maxFiles}).`);if(t+=h.size,t>e.maxBytes)throw new Error(`Materialized content exceeds maxBytes (${e.maxBytes}).`);const C=`${JSON.stringify({path:u,size:h.size})}
|
|
6
|
+
`;o.update(C),i+=1}finally{await l.close()}}return{bytes:t,fileCount:i,manifestSha256:o.digest("hex")}},Je=async(e,r)=>{await r.rm(e,{recursive:!0,force:!0})},W=async e=>{const r=e.deps??{cp:pe,mkdir:R,rm:$,symlink:we,stderr:process.stderr},t=w.dirname(e.targetDir);await r.mkdir(t,{recursive:!0}),await Je(e.targetDir,r);const i=process.platform==="win32"?"copy":"symlink";if((e.mode??i)==="copy"){await r.cp(e.sourceDir,e.targetDir,{recursive:!0});return}const o=process.platform==="win32"?"junction":"dir";try{await r.symlink(e.sourceDir,e.targetDir,o)}catch(s){const u=s.code;if(u&&new Set(["EPERM","EACCES","ENOTSUP","EINVAL"]).has(u)){if(e.explicitTargetMode){const m=s instanceof Error?s.message:String(s);r.stderr.write(`Warning: Failed to create symlink at ${e.targetDir}. Falling back to copy. ${m}
|
|
7
|
+
`)}await r.cp(e.sourceDir,e.targetDir,{recursive:!0});return}throw s}},Ke=e=>{if(e<1024)return`${e} B`;const r=["KB","MB","GB","TB"];let t=e,i=-1;for(;t>=1024&&i<r.length-1;)t/=1024,i+=1;return`${t.toFixed(1)} ${r[i]}`},b=async e=>{try{return await J(e),!0}catch{return!1}},ne=async(e,r)=>{const t=w.join(e,r);return await b(t)?await b(w.join(t,G)):!1},ce=e=>{if(!e||e.length===0)return[];const r=e.map(t=>t.trim()).filter(t=>t.length>0);return Array.from(new Set(r)).sort()},qe=e=>{const r={include:ce(e.include),exclude:ce(e.exclude)},t=L("sha256");return t.update(JSON.stringify(r)),t.digest("hex")},le=async(e,r={})=>{const{config:t,resolvedPath:i,sources:o}=await De(e.configPath),s=t.defaults??ve.defaults,u=Se(i,t.cacheDir??xe,e.cacheDirOverride),m=Ee(i),l=await b(m);let h=null;l&&(h=await Me(m));const C=r.resolveRemoteCommit??_e,g=e.sourceFilter?.length?o.filter(y=>e.sourceFilter?.includes(y.id)):o,x=await Promise.all(g.map(async y=>{const n=h?.sources?.[y.id],S=y.include??s.include,f=y.exclude,c=qe({include:S,exclude:f});if(e.offline){const P=await ne(u,y.id);return{id:y.id,repo:n?.repo??y.repo,ref:n?.ref??y.ref??s.ref,resolvedCommit:n?.resolvedCommit??"offline",lockCommit:n?.resolvedCommit??null,lockRulesSha256:n?.rulesSha256,status:n&&P?"up-to-date":"missing",bytes:n?.bytes,fileCount:n?.fileCount,manifestSha256:n?.manifestSha256,rulesSha256:c}}const p=await C({repo:y.repo,ref:y.ref,allowHosts:s.allowHosts,timeoutMs:e.timeoutMs}),d=n?.resolvedCommit===p.resolvedCommit&&n?.rulesSha256===c,a=n?d?"up-to-date":"changed":"missing";return{id:y.id,repo:p.repo,ref:p.ref,resolvedCommit:p.resolvedCommit,lockCommit:n?.resolvedCommit??null,lockRulesSha256:n?.rulesSha256,status:a,bytes:n?.bytes,fileCount:n?.fileCount,manifestSha256:n?.manifestSha256,rulesSha256:c}}));return{config:t,configPath:i,cacheDir:u,lockPath:m,lockExists:l,lockData:h,results:x,sources:g,defaults:s}},Ve=async()=>{const e=w.resolve(process.cwd(),"package.json");try{const r=await H(e,"utf8"),t=JSON.parse(r.toString());return typeof t.version=="string"?t.version:"0.0.0"}catch{}try{const r=await H(new URL("../package.json",import.meta.url),"utf8"),t=JSON.parse(r.toString());return typeof t.version=="string"?t.version:"0.0.0"}catch{}try{const r=await H(new URL("../../package.json",import.meta.url),"utf8"),t=JSON.parse(r.toString());return typeof t.version=="string"?t.version:"0.0.0"}catch{return"0.0.0"}},Ze=async(e,r)=>{const t=await Ve(),i=new Date().toISOString(),o={...r?.sources??{}};for(const s of e.results){const u=o[s.id];o[s.id]={repo:s.repo,ref:s.ref,resolvedCommit:s.resolvedCommit,bytes:s.bytes??u?.bytes??0,fileCount:s.fileCount??u?.fileCount??0,manifestSha256:s.manifestSha256??u?.manifestSha256??s.resolvedCommit,rulesSha256:s.rulesSha256??u?.rulesSha256,updatedAt:i}}return{version:1,generatedAt:i,toolVersion:t,sources:o}},ue=async(e,r={})=>{const t=process.hrtime.bigint();let i=0;const o=await le(e,r);await R(o.cacheDir,{recursive:!0});const s=o.lockData,u=o.results.filter(l=>{const h=o.sources.find(C=>C.id===l.id);return l.status==="missing"&&(h?.required??!0)});if(e.failOnMiss&&u.length>0)throw new Error(`Missing required source(s): ${u.map(l=>l.id).join(", ")}.`);if(!e.lockOnly){const l=o.defaults,h=r.fetchSource??He,C=r.materializeSource??We,g=async(n,S)=>{const f=n?.length?o.results.filter(c=>n.includes(c.id)):o.results;return(await Promise.all(f.map(async c=>{const p=o.sources.find(a=>a.id===c.id);if(!p)return null;const d=await ne(o.cacheDir,c.id);return S||c.status!=="up-to-date"||!d?{result:c,source:p}:null}))).filter(Boolean)},x=async()=>{await Promise.all(o.sources.map(async n=>{if(!n.targetDir)return;const S=U(o.configPath,n.targetDir);await b(S)||await W({sourceDir:w.join(o.cacheDir,n.id),targetDir:S,mode:n.targetMode??l.targetMode,explicitTargetMode:n.targetMode!==void 0})}))},y=async n=>{const S=e.concurrency??4;let f=0;const c=async()=>{const p=n[f];if(!p||!p.source)return;f+=1;const{result:d,source:a}=p,P=o.lockData?.sources?.[a.id];e.json||v.step("Fetching",a.id);const D=await h({sourceId:a.id,repo:a.repo,ref:a.ref,resolvedCommit:d.resolvedCommit,cacheDir:o.cacheDir,depth:a.depth??l.depth,include:a.include??l.include,timeoutMs:e.timeoutMs});try{const k=w.join(o.cacheDir,a.id,G);if(d.status!=="up-to-date"&&P?.manifestSha256&&await b(k)){const E=await Xe({sourceId:a.id,repoDir:D.repoDir,cacheDir:o.cacheDir,include:a.include??l.include,exclude:a.exclude,maxBytes:a.maxBytes??l.maxBytes,maxFiles:a.maxFiles??l.maxFiles});if(E.manifestSha256===P.manifestSha256){d.bytes=E.bytes,d.fileCount=E.fileCount,d.manifestSha256=E.manifestSha256,d.status="up-to-date",e.json||v.item(O.success,a.id,"no content changes"),await c();return}}const T=await C({sourceId:a.id,repoDir:D.repoDir,cacheDir:o.cacheDir,include:a.include??l.include,exclude:a.exclude,maxBytes:a.maxBytes??l.maxBytes,maxFiles:a.maxFiles??l.maxFiles});if(a.targetDir){const E=U(o.configPath,a.targetDir);await W({sourceDir:w.join(o.cacheDir,a.id),targetDir:E,mode:a.targetMode??l.targetMode,explicitTargetMode:a.targetMode!==void 0})}d.bytes=T.bytes,d.fileCount=T.fileCount,d.manifestSha256=T.manifestSha256,e.json||v.item(O.success,a.id,`synced ${T.fileCount} files`)}finally{await D.cleanup()}await c()};await Promise.all(Array.from({length:Math.min(S,n.length)},c))};if(e.offline)await x();else{const n=await g();await y(n),await x()}if(!e.offline){const n=(await Z({configPath:o.configPath,cacheDirOverride:o.cacheDir})).results.filter(S=>!S.ok);if(n.length>0){const S=await g(n.map(c=>c.id),!0);S.length>0&&(await y(S),await x());const f=(await Z({configPath:o.configPath,cacheDirOverride:o.cacheDir})).results.filter(c=>!c.ok);if(f.length>0&&(i+=1,!e.json)){const c=f.map(p=>`${p.id} (${p.issues.join("; ")})`).join(", ");v.line(`${O.warn} Verify failed for ${f.length} source(s): ${c}`)}}}}const m=await Ze(o,s);if(await Pe(o.lockPath,m),!e.json){const l=Number(process.hrtime.bigint()-t)/1e6,h=o.results.reduce((g,x)=>g+(x.bytes??0),0),C=o.results.reduce((g,x)=>g+(x.fileCount??0),0);v.line(`${O.info} Completed in ${l.toFixed(0)}ms \xB7 ${Ke(h)} \xB7 ${C} files${i?` \xB7 ${i} warning${i===1?"":"s"}`:""}`)}return o.config.index&&await Ue({cacheDir:o.cacheDir,configPath:o.configPath,lock:m,sources:o.sources}),o.lockExists=!0,o},fe=e=>{const r={upToDate:e.results.filter(t=>t.status==="up-to-date").length,changed:e.results.filter(t=>t.status==="changed").length,missing:e.results.filter(t=>t.status==="missing").length};if(e.results.length===0){v.line(`${O.info} No sources to sync.`);return}v.line(`${O.info} ${e.results.length} sources (${r.upToDate} up-to-date, ${r.changed} changed, ${r.missing} missing)`);for(const t of e.results){const i=v.hash(t.resolvedCommit),o=v.hash(t.lockCommit),s=!!t.lockRulesSha256&&!!t.rulesSha256&&t.lockRulesSha256!==t.rulesSha256;if(t.status==="up-to-date"){v.item(O.success,t.id,`${M.dim("up-to-date")} ${M.gray(i)}`);continue}if(t.status==="changed"){if(t.lockCommit===t.resolvedCommit&&s){v.item(O.warn,t.id,`${M.dim("rules changed")} ${M.gray(i)}`);continue}v.item(O.warn,t.id,`${M.dim("changed")} ${M.gray(o)} ${M.dim("->")} ${M.gray(i)}`);continue}v.item(O.warn,t.id,`${M.dim("missing")} ${M.gray(i)}`)}},Qe={__proto__:null,getSyncPlan:le,printSyncPlan:fe,runSync:ue};export{W as a,fe as b,ue as c,re as e,oe as p,I as r,Qe as s};
|
|
8
8
|
//# sourceMappingURL=sync.mjs.map
|
package/dist/chunks/verify.mjs
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import{stat as v,access as k}from"node:fs/promises";import g from"node:path";import{a as _,r as C,u as l,s as m}from"../shared/docs-cache.D9_kM5zq.mjs";import{l as E,b as j}from"../shared/docs-cache.
|
|
1
|
+
import{stat as v,access as k}from"node:fs/promises";import g from"node:path";import{a as _,r as C,u as l,s as m}from"../shared/docs-cache.D9_kM5zq.mjs";import{l as E,b as j}from"../shared/docs-cache.D4Fth4X8.mjs";import{createReadStream as M}from"node:fs";import z from"node:readline";const d=".manifest.jsonl",A=async function*(s){const a=g.join(s,d),r=M(a,{encoding:"utf8"}),e=z.createInterface({input:r,crlfDelay:1/0});try{for await(const u of e){const f=u.trim();f&&(yield JSON.parse(f))}}finally{e.close(),r.destroy()}},p=async s=>{try{return await k(s),!0}catch{return!1}},w=async s=>{const{config:a,resolvedPath:r,sources:e}=await E(s.configPath),u=_(r,a.cacheDir??j,s.cacheDirOverride),f=async(i,o)=>{if(!await p(i))return{ok:!1,issues:[o==="source"?"missing source directory":"missing target directory"]};try{let t=0,n=0;for await(const h of A(i)){const y=g.join(i,h.path);if(!await p(y)){t+=1;continue}(await v(y)).size!==h.size&&(n+=1)}const c=[];return t>0&&c.push(o==="source"?`missing files: ${t}`:`target missing files: ${t}`),n>0&&c.push(o==="source"?`size mismatch: ${n}`:`target size mismatch: ${n}`),{ok:c.length===0,issues:c}}catch{return{ok:!1,issues:[o==="source"?"missing manifest":"missing target manifest"]}}},D=await Promise.all(e.map(async i=>{const o=g.join(u,i.id),t=[...(await f(o,"source")).issues];if(i.targetDir&&i.targetMode==="copy"){const n=C(r,i.targetDir),c=await f(n,"target");t.push(...c.issues)}return{id:i.id,ok:t.length===0,issues:t}}));return{cacheDir:u,results:D}},I=s=>{const a=s.results.filter(e=>e.ok).length,r=s.results.length-a;if(s.results.length===0){l.line(`${m.warn} No sources to verify.`);return}l.line(`${m.info} Verified ${s.results.length} sources (${a} ok, ${r} failed)`);for(const e of s.results)e.ok?l.item(m.success,e.id):l.item(m.warn,e.id,e.issues.join(", "))},N={__proto__:null,printVerify:I,verifyCache:w};export{d as M,N as a,w as v};
|
|
2
2
|
//# sourceMappingURL=verify.mjs.map
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
import{writeFile as I,readFile as z,access as T}from"node:fs/promises";import h from"node:path";import{z as o}from"zod";import{r as L}from"./docs-cache.D9_kM5zq.mjs";const $=o.enum(["symlink","copy"]),F=o.enum(["materialize"]),U=o.object({type:o.enum(["commit","manifest"]),value:o.string().nullable()}).strict(),_=o.object({ref:o.string().min(1),mode:F,include:o.array(o.string().min(1)).min(1),targetMode:$.optional(),depth:o.number().min(1),required:o.boolean(),maxBytes:o.number().min(1),maxFiles:o.number().min(1).optional(),allowHosts:o.array(o.string().min(1)).min(1)}).strict(),P=o.object({id:o.string().min(1),repo:o.string().min(1),targetDir:o.string().min(1).optional(),targetMode:$.optional(),ref:o.string().min(1).optional(),mode:F.optional(),depth:o.number().min(1).optional(),include:o.array(o.string().min(1)).optional(),exclude:o.array(o.string().min(1)).optional(),required:o.boolean().optional(),maxBytes:o.number().min(1).optional(),maxFiles:o.number().min(1).optional(),integrity:U.optional()}).strict(),J=o.object({$schema:o.string().min(1).optional(),cacheDir:o.string().min(1).optional(),targetMode:$.optional(),index:o.boolean().optional(),defaults:_.partial().optional(),sources:o.array(P)}).strict(),R=/^[a-zA-Z0-9_-]+$/,G=new Set([".","..","CON","PRN","AUX","NUL","COM1","LPT1"]),A=(e,r)=>{if(typeof e!="string"||e.length===0)throw new Error(`${r} must be a non-empty string.`);if(e.length>200)throw new Error(`${r} exceeds maximum length of 200.`);if(!R.test(e))throw new Error(`${r} must contain only alphanumeric characters, hyphens, and underscores.`);if(G.has(e.toUpperCase()))throw new Error(`${r} uses reserved name '${e}'.`);return e},j="docs.config.json",b=".docs",S="package.json",V=process.platform==="win32"?"copy":"symlink",f={cacheDir:b,index:!1,defaults:{ref:"HEAD",mode:"materialize",include:["**/*.{md,mdx,markdown,mkd,txt,rst,adoc,asciidoc}"],targetMode:V,depth:1,required:!0,maxBytes:2e8,allowHosts:["github.com","gitlab.com"]},sources:[]},X=(e,r)=>!e||!r?e===r:e.length!==r.length?!1:e.every((t,n)=>t===r[n]),C=e=>typeof e=="object"&&e!==null&&!Array.isArray(e),k=(e,r)=>{const t={};for(const[n,s]of Object.entries(e)){const d=r[n];if(Array.isArray(s)&&Array.isArray(d)){X(s,d)||(t[n]=s);continue}if(C(s)&&C(d)){const a=k(s,d);Object.keys(a).length>0&&(t[n]=a);continue}s!==d&&(t[n]=s)}return t},Z=e=>{const r={...f,$schema:e.$schema,defaults:{...f.defaults,...e.targetMode?{targetMode:e.targetMode}:void 0}},t=k(e,r),n={$schema:t.$schema,cacheDir:t.cacheDir,index:t.index,targetMode:t.targetMode,defaults:t.defaults,sources:e.sources};return(!n.defaults||Object.keys(n.defaults).length===0)&&delete n.defaults,n},p=e=>typeof e=="object"&&e!==null&&!Array.isArray(e),m=(e,r)=>{if(typeof e!="string"||e.length===0)throw new Error(`${r} must be a non-empty string.`);return e},v=(e,r)=>{if(typeof e!="boolean")throw new Error(`${r} must be a boolean.`);return e},K=(e,r)=>{if(typeof e!="number"||Number.isNaN(e))throw new Error(`${r} must be a number.`);return e},g=(e,r)=>{const t=K(e,r);if(t<1)throw new Error(`${r} must be greater than zero.`);return t},y=(e,r)=>{if(!Array.isArray(e)||e.length===0)throw new Error(`${r} must be a non-empty array of strings.`);for(const t of e)if(typeof t!="string"||t.length===0)throw new Error(`${r} must contain non-empty strings.`);return e},q=(e,r)=>{const t=m(e,r);if(t!=="symlink"&&t!=="copy")throw new Error(`${r} must be "symlink" or "copy".`);return t},B=(e,r)=>{if(e!=="materialize")throw new Error(`${r} must be "materialize".`);return e},Q=(e,r)=>{if(!p(e))throw new Error(`${r} must be an object.`);const t=e.type;if(t!=="commit"&&t!=="manifest")throw new Error(`${r}.type must be "commit" or "manifest".`);const n=e.value;if(typeof n!="string"&&n!==null)throw new Error(`${r}.value must be a string or null.`);return{type:t,value:n}},N=e=>{if(!p(e))throw new Error("Config must be a JSON object.");const r=J.safeParse(e);if(!r.success){const i=r.error.issues.map(c=>`${c.path.join(".")||"config"} ${c.message}`).join("; ");throw new Error(`Config does not match schema: ${i}.`)}const t=e.cacheDir?m(e.cacheDir,"cacheDir"):b,n=e.index!==void 0?v(e.index,"index"):f.index??!1,s=e.defaults,d=e.targetMode!==void 0?q(e.targetMode,"targetMode"):void 0,a=f.defaults;let l=a;if(s!==void 0){if(!p(s))throw new Error("defaults must be an object.");l={ref:s.ref!==void 0?m(s.ref,"defaults.ref"):a.ref,mode:s.mode!==void 0?B(s.mode,"defaults.mode"):a.mode,include:s.include!==void 0?y(s.include,"defaults.include"):a.include,targetMode:s.targetMode!==void 0?q(s.targetMode,"defaults.targetMode"):d??a.targetMode,depth:s.depth!==void 0?g(s.depth,"defaults.depth"):a.depth,required:s.required!==void 0?v(s.required,"defaults.required"):a.required,maxBytes:s.maxBytes!==void 0?g(s.maxBytes,"defaults.maxBytes"):a.maxBytes,maxFiles:s.maxFiles!==void 0?g(s.maxFiles,"defaults.maxFiles"):a.maxFiles,allowHosts:s.allowHosts!==void 0?y(s.allowHosts,"defaults.allowHosts"):a.allowHosts}}else d!==void 0&&(l={...a,targetMode:d});if(!Array.isArray(e.sources))throw new Error("sources must be an array.");const M=e.sources.map((i,c)=>{if(!p(i))throw new Error(`sources[${c}] must be an object.`);const u={id:A(i.id,`sources[${c}].id`),repo:m(i.repo,`sources[${c}].repo`)};if(i.targetDir!==void 0&&(u.targetDir=m(i.targetDir,`sources[${c}].targetDir`)),i.targetMode!==void 0){const x=m(i.targetMode,`sources[${c}].targetMode`);if(x!=="symlink"&&x!=="copy")throw new Error(`sources[${c}].targetMode must be "symlink" or "copy".`);u.targetMode=x}return i.ref!==void 0&&(u.ref=m(i.ref,`sources[${c}].ref`)),i.mode!==void 0&&(u.mode=B(i.mode,`sources[${c}].mode`)),i.depth!==void 0&&(u.depth=g(i.depth,`sources[${c}].depth`)),i.include!==void 0&&(u.include=y(i.include,`sources[${c}].include`)),i.exclude!==void 0&&(u.exclude=y(i.exclude,`sources[${c}].exclude`)),i.required!==void 0&&(u.required=v(i.required,`sources[${c}].required`)),i.maxBytes!==void 0&&(u.maxBytes=g(i.maxBytes,`sources[${c}].maxBytes`)),i.maxFiles!==void 0&&(u.maxFiles=g(i.maxFiles,`sources[${c}].maxFiles`)),i.integrity!==void 0&&(u.integrity=Q(i.integrity,`sources[${c}].integrity`)),u}),D=new Set,w=[];for(const i of M)D.has(i.id)&&w.push(i.id),D.add(i.id);if(w.length>0)throw new Error(`Duplicate source IDs found: ${w.join(", ")}. Each source must have a unique ID.`);return{cacheDir:t,targetMode:d,index:n,defaults:l,sources:M}},W=e=>{const r=e.defaults??f.defaults;return e.sources.map(t=>({id:t.id,repo:t.repo,targetDir:t.targetDir,targetMode:t.targetMode??r.targetMode,ref:t.ref??r.ref,mode:t.mode??r.mode,depth:t.depth??r.depth,include:t.include??r.include,exclude:t.exclude,required:t.required??r.required,maxBytes:t.maxBytes??r.maxBytes,maxFiles:t.maxFiles??r.maxFiles,integrity:t.integrity}))},O=e=>e?h.resolve(e):h.resolve(process.cwd(),j),Y=()=>h.resolve(process.cwd(),S),H=async e=>{try{return await T(e),!0}catch{return!1}},E=async(e,r)=>{let t;try{t=await z(e,"utf8")}catch(a){const l=a instanceof Error?a.message:String(a);throw new Error(`Failed to read config at ${e}: ${l}`)}let n;try{n=JSON.parse(t)}catch(a){const l=a instanceof Error?a.message:String(a);throw new Error(`Invalid JSON in ${e}: ${l}`)}const s=r==="package"?n?.["docs-cache"]:n;if(r==="package"&&s===void 0)throw new Error(`Missing docs-cache config in ${e}.`);const d=N(s);for(const a of d.sources)a.targetDir&&L(e,a.targetDir);return{config:d,resolvedPath:e,sources:W(d)}},ee=async(e,r)=>{const t=`${JSON.stringify(r,null,2)}
|
|
2
|
+
`;await I(e,t,"utf8")},re=async e=>{const r=O(e),t=h.basename(r)===S;if(e)return E(r,t?"package":"config");if(await H(r))return E(r,"config");const n=Y();if(await H(n))try{return await E(n,"package")}catch{}throw new Error(`No docs.config.json found at ${r} and no docs-cache config in ${n}.`)};export{f as D,A as a,b,j as c,re as l,O as r,Z as s,N as v,ee as w};
|
|
3
|
+
//# sourceMappingURL=docs-cache.D4Fth4X8.mjs.map
|
package/package.json
CHANGED
|
@@ -1,3 +0,0 @@
|
|
|
1
|
-
import{writeFile as H,readFile as I,access as O}from"node:fs/promises";import g from"node:path";import{z as o}from"zod";import{r as z}from"./docs-cache.D9_kM5zq.mjs";const $=o.enum(["symlink","copy"]),F=o.enum(["materialize"]),T=o.object({type:o.enum(["commit","manifest"]),value:o.string().nullable()}).strict(),L=o.object({ref:o.string().min(1),mode:F,include:o.array(o.string().min(1)).min(1),targetMode:$.optional(),depth:o.number().min(1),required:o.boolean(),maxBytes:o.number().min(1),maxFiles:o.number().min(1).optional(),allowHosts:o.array(o.string().min(1)).min(1)}).strict(),U=o.object({id:o.string().min(1),repo:o.string().min(1),targetDir:o.string().min(1).optional(),targetMode:$.optional(),ref:o.string().min(1).optional(),mode:F.optional(),depth:o.number().min(1).optional(),include:o.array(o.string().min(1)).optional(),exclude:o.array(o.string().min(1)).optional(),required:o.boolean().optional(),maxBytes:o.number().min(1).optional(),maxFiles:o.number().min(1).optional(),integrity:T.optional()}).strict(),_=o.object({$schema:o.string().min(1).optional(),cacheDir:o.string().min(1).optional(),targetMode:$.optional(),index:o.boolean().optional(),defaults:L.partial().optional(),sources:o.array(U)}).strict(),P=/^[a-zA-Z0-9_-]+$/,J=new Set([".","..","CON","PRN","AUX","NUL","COM1","LPT1"]),S=(e,r)=>{if(typeof e!="string"||e.length===0)throw new Error(`${r} must be a non-empty string.`);if(e.length>200)throw new Error(`${r} exceeds maximum length of 200.`);if(!P.test(e))throw new Error(`${r} must contain only alphanumeric characters, hyphens, and underscores.`);if(J.has(e.toUpperCase()))throw new Error(`${r} uses reserved name '${e}'.`);return e},j="docs.config.json",b=".docs",q="package.json",G=process.platform==="win32"?"copy":"symlink",p={cacheDir:b,index:!1,defaults:{ref:"HEAD",mode:"materialize",include:["**/*.{md,mdx,markdown,mkd,txt,rst,adoc,asciidoc}"],targetMode:G,depth:1,required:!0,maxBytes:2e8,allowHosts:["github.com","gitlab.com"]},sources:[]},h=e=>typeof e=="object"&&e!==null&&!Array.isArray(e),l=(e,r)=>{if(typeof e!="string"||e.length===0)throw new Error(`${r} must be a non-empty string.`);return e},v=(e,r)=>{if(typeof e!="boolean")throw new Error(`${r} must be a boolean.`);return e},R=(e,r)=>{if(typeof e!="number"||Number.isNaN(e))throw new Error(`${r} must be a number.`);return e},f=(e,r)=>{const t=R(e,r);if(t<1)throw new Error(`${r} must be greater than zero.`);return t},w=(e,r)=>{if(!Array.isArray(e)||e.length===0)throw new Error(`${r} must be a non-empty array of strings.`);for(const t of e)if(typeof t!="string"||t.length===0)throw new Error(`${r} must contain non-empty strings.`);return e},B=(e,r)=>{const t=l(e,r);if(t!=="symlink"&&t!=="copy")throw new Error(`${r} must be "symlink" or "copy".`);return t},C=(e,r)=>{if(e!=="materialize")throw new Error(`${r} must be "materialize".`);return e},X=(e,r)=>{if(!h(e))throw new Error(`${r} must be an object.`);const t=e.type;if(t!=="commit"&&t!=="manifest")throw new Error(`${r}.type must be "commit" or "manifest".`);const c=e.value;if(typeof c!="string"&&c!==null)throw new Error(`${r}.value must be a string or null.`);return{type:t,value:c}},N=e=>{if(!h(e))throw new Error("Config must be a JSON object.");const r=_.safeParse(e);if(!r.success){const s=r.error.issues.map(n=>`${n.path.join(".")||"config"} ${n.message}`).join("; ");throw new Error(`Config does not match schema: ${s}.`)}const t=e.cacheDir?l(e.cacheDir,"cacheDir"):b,c=e.index!==void 0?v(e.index,"index"):p.index??!1,a=e.defaults,u=e.targetMode!==void 0?B(e.targetMode,"targetMode"):void 0,i=p.defaults;let m=i;if(a!==void 0){if(!h(a))throw new Error("defaults must be an object.");m={ref:a.ref!==void 0?l(a.ref,"defaults.ref"):i.ref,mode:a.mode!==void 0?C(a.mode,"defaults.mode"):i.mode,include:a.include!==void 0?w(a.include,"defaults.include"):i.include,targetMode:a.targetMode!==void 0?B(a.targetMode,"defaults.targetMode"):u??i.targetMode,depth:a.depth!==void 0?f(a.depth,"defaults.depth"):i.depth,required:a.required!==void 0?v(a.required,"defaults.required"):i.required,maxBytes:a.maxBytes!==void 0?f(a.maxBytes,"defaults.maxBytes"):i.maxBytes,maxFiles:a.maxFiles!==void 0?f(a.maxFiles,"defaults.maxFiles"):i.maxFiles,allowHosts:a.allowHosts!==void 0?w(a.allowHosts,"defaults.allowHosts"):i.allowHosts}}else u!==void 0&&(m={...i,targetMode:u});if(!Array.isArray(e.sources))throw new Error("sources must be an array.");const M=e.sources.map((s,n)=>{if(!h(s))throw new Error(`sources[${n}] must be an object.`);const d={id:S(s.id,`sources[${n}].id`),repo:l(s.repo,`sources[${n}].repo`)};if(s.targetDir!==void 0&&(d.targetDir=l(s.targetDir,`sources[${n}].targetDir`)),s.targetMode!==void 0){const x=l(s.targetMode,`sources[${n}].targetMode`);if(x!=="symlink"&&x!=="copy")throw new Error(`sources[${n}].targetMode must be "symlink" or "copy".`);d.targetMode=x}return s.ref!==void 0&&(d.ref=l(s.ref,`sources[${n}].ref`)),s.mode!==void 0&&(d.mode=C(s.mode,`sources[${n}].mode`)),s.depth!==void 0&&(d.depth=f(s.depth,`sources[${n}].depth`)),s.include!==void 0&&(d.include=w(s.include,`sources[${n}].include`)),s.exclude!==void 0&&(d.exclude=w(s.exclude,`sources[${n}].exclude`)),s.required!==void 0&&(d.required=v(s.required,`sources[${n}].required`)),s.maxBytes!==void 0&&(d.maxBytes=f(s.maxBytes,`sources[${n}].maxBytes`)),s.maxFiles!==void 0&&(d.maxFiles=f(s.maxFiles,`sources[${n}].maxFiles`)),s.integrity!==void 0&&(d.integrity=X(s.integrity,`sources[${n}].integrity`)),d}),D=new Set,y=[];for(const s of M)D.has(s.id)&&y.push(s.id),D.add(s.id);if(y.length>0)throw new Error(`Duplicate source IDs found: ${y.join(", ")}. Each source must have a unique ID.`);return{cacheDir:t,targetMode:u,index:c,defaults:m,sources:M}},Z=e=>{const r=e.defaults??p.defaults;return e.sources.map(t=>({id:t.id,repo:t.repo,targetDir:t.targetDir,targetMode:t.targetMode??r.targetMode,ref:t.ref??r.ref,mode:t.mode??r.mode,depth:t.depth??r.depth,include:t.include??r.include,exclude:t.exclude,required:t.required??r.required,maxBytes:t.maxBytes??r.maxBytes,maxFiles:t.maxFiles??r.maxFiles,integrity:t.integrity}))},A=e=>e?g.resolve(e):g.resolve(process.cwd(),j),K=()=>g.resolve(process.cwd(),q),k=async e=>{try{return await O(e),!0}catch{return!1}},E=async(e,r)=>{let t;try{t=await I(e,"utf8")}catch(i){const m=i instanceof Error?i.message:String(i);throw new Error(`Failed to read config at ${e}: ${m}`)}let c;try{c=JSON.parse(t)}catch(i){const m=i instanceof Error?i.message:String(i);throw new Error(`Invalid JSON in ${e}: ${m}`)}const a=r==="package"?c?.["docs-cache"]:c;if(r==="package"&&a===void 0)throw new Error(`Missing docs-cache config in ${e}.`);const u=N(a);for(const i of u.sources)i.targetDir&&z(e,i.targetDir);return{config:u,resolvedPath:e,sources:Z(u)}},Q=async(e,r)=>{const t=`${JSON.stringify(r,null,2)}
|
|
2
|
-
`;await H(e,t,"utf8")},V=async e=>{const r=A(e),t=g.basename(r)===q;if(e)return E(r,t?"package":"config");if(await k(r))return E(r,"config");const c=K();if(await k(c))try{return await E(c,"package")}catch{}throw new Error(`No docs.config.json found at ${r} and no docs-cache config in ${c}.`)};export{p as D,S as a,b,j as c,V as l,A as r,N as v,Q as w};
|
|
3
|
-
//# sourceMappingURL=docs-cache.goBsJvLg.mjs.map
|