@inkeep/open-knowledge 0.0.0-dev-20260421165805
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/banner-BxlLKtcG.mjs +3 -0
- package/dist/chokidar-7l0uFWrj.mjs +4 -0
- package/dist/cli.d.mts +1 -0
- package/dist/cli.mjs +595 -0
- package/dist/colors-BNvy_pwG.mjs +1 -0
- package/dist/colors-eCHTjaBL.mjs +2 -0
- package/dist/constants-Dx_MrCyB.mjs +2 -0
- package/dist/git-handle-B3_Ztqi2.mjs +1 -0
- package/dist/git-handle-CQtGq94I.mjs +2 -0
- package/dist/index.d.mts +86 -0
- package/dist/index.mjs +1 -0
- package/dist/init-8JV3QQNU.mjs +230 -0
- package/dist/init-B0G3IY-y.mjs +1 -0
- package/dist/init-DihXGBlf.mjs +1 -0
- package/dist/init-UYi5s8Q6.mjs +5 -0
- package/dist/is-object-DVVYT5oa.mjs +2 -0
- package/dist/keepalive-D-FSaNO6.mjs +2 -0
- package/dist/loader-DqrtMiNp.mjs +3 -0
- package/dist/loader-oJkH9tG6.mjs +1 -0
- package/dist/open-browser-DhlJOpN3.mjs +2 -0
- package/dist/paths-DRfvViD6.mjs +1 -0
- package/dist/paths-vKL8Dnp_.mjs +2 -0
- package/dist/preview-2OHXLW85.mjs +1 -0
- package/dist/preview-DsPtfetJ.mjs +3 -0
- package/dist/public/assets/dist-4KUZ8qw_.js +1 -0
- package/dist/public/assets/dist-Bvlmbwcy.js +1 -0
- package/dist/public/assets/dist-CWsgPYc6.js +13 -0
- package/dist/public/assets/dist-D5yBWykF.js +1 -0
- package/dist/public/assets/dist-DcQNS1eS.js +1 -0
- package/dist/public/assets/dist-DvCIPVI0.js +9 -0
- package/dist/public/assets/dist-csWIvcml.js +1 -0
- package/dist/public/assets/dist-saWP7AQm.js +1 -0
- package/dist/public/assets/go-B8kgVkyB.js +1 -0
- package/dist/public/assets/index-Cm78wxqN.js +381 -0
- package/dist/public/assets/index-cpkNCk7-.css +1 -0
- package/dist/public/assets/inter-cyrillic-ext-wght-normal-BOeWTOD4.woff2 +0 -0
- package/dist/public/assets/inter-cyrillic-wght-normal-DqGufNeO.woff2 +0 -0
- package/dist/public/assets/inter-greek-ext-wght-normal-DlzME5K_.woff2 +0 -0
- package/dist/public/assets/inter-greek-wght-normal-CkhJZR-_.woff2 +0 -0
- package/dist/public/assets/inter-latin-ext-wght-normal-DO1Apj_S.woff2 +0 -0
- package/dist/public/assets/inter-latin-wght-normal-Dx4kXJAl.woff2 +0 -0
- package/dist/public/assets/inter-vietnamese-wght-normal-CBcvBZtf.woff2 +0 -0
- package/dist/public/assets/jetbrains-mono-cyrillic-wght-normal-D73BlboJ.woff2 +0 -0
- package/dist/public/assets/jetbrains-mono-greek-wght-normal-Bw9x6K1M.woff2 +0 -0
- package/dist/public/assets/jetbrains-mono-latin-ext-wght-normal-DBQx-q_a.woff2 +0 -0
- package/dist/public/assets/jetbrains-mono-latin-wght-normal-B9CIFXIH.woff2 +0 -0
- package/dist/public/assets/jetbrains-mono-vietnamese-wght-normal-Bt-aOZkq.woff2 +0 -0
- package/dist/public/assets/shell-BsPlf5L6.js +1 -0
- package/dist/public/favicon.svg +5 -0
- package/dist/public/index.html +21 -0
- package/dist/server-lock-ChItwD14.mjs +2 -0
- package/dist/server-lock-nls6eoZ5.mjs +1 -0
- package/dist/src-9qc50QcN.mjs +93 -0
- package/dist/src-ByMiIIub.mjs +1 -0
- package/dist/src-Ds19hvLH.mjs +161 -0
- package/dist/src-DvkXxOes.mjs +1 -0
- package/dist/start-CfpWFTzI.mjs +2 -0
- package/dist/start-DsMb_t62.mjs +1 -0
- package/package.json +74 -0
package/dist/cli.mjs
ADDED
|
@@ -0,0 +1,595 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import{D as e,E as t,s as n}from"./src-Ds19hvLH.mjs";import{a as r,i}from"./constants-Dx_MrCyB.mjs";import{n as a,t as o}from"./paths-vKL8Dnp_.mjs";import{G as s,H as c,U as l,W as u,X as d,l as f}from"./src-9qc50QcN.mjs";import{c as p,f as m,r as h}from"./server-lock-ChItwD14.mjs";import{i as g}from"./init-8JV3QQNU.mjs";import{n as _}from"./colors-eCHTjaBL.mjs";import{t as v}from"./is-object-DVVYT5oa.mjs";import{r as y}from"./init-UYi5s8Q6.mjs";import{a as b,o as x}from"./start-CfpWFTzI.mjs";import{t as S}from"./loader-DqrtMiNp.mjs";import"./src-DvkXxOes.mjs";import{Command as C}from"commander";import{closeSync as w,existsSync as T,mkdirSync as E,openSync as D,readFileSync as O,readdirSync as ee,statSync as k,unlinkSync as A,writeFileSync as te}from"node:fs";import{homedir as ne,hostname as re}from"node:os";import{basename as ie,dirname as ae,isAbsolute as oe,join as se,relative as ce,resolve as j}from"node:path";import{parse as le,stringify as ue}from"yaml";import{createOAuthDeviceAuth as de}from"@octokit/auth-oauth-device";import fe from"@inquirer/password";import{Octokit as pe}from"@octokit/rest";import{fileURLToPath as me}from"node:url";import{randomUUID as he}from"node:crypto";import{execFileSync as ge,spawn as _e}from"node:child_process";import M from"simple-git";import{readFile as ve,readdir as ye,stat as be}from"node:fs/promises";import{createServer as xe,request as Se}from"node:http";import Ce from"picomatch";import{McpServer as we}from"@modelcontextprotocol/sdk/server/mcp.js";import{StdioServerTransport as Te}from"@modelcontextprotocol/sdk/server/stdio.js";import{RootsListChangedNotificationSchema as Ee}from"@modelcontextprotocol/sdk/types.js";import{z as N}from"zod";import{Bash as De,ReadWriteFs as Oe}from"just-bash";import ke from"shell-quote";const Ae=`open-knowledge`;var je=class{backend=`keyring`;async get(e){let{Entry:t}=await import(`@napi-rs/keyring`);try{let n=new t(Ae,e).getPassword();return n==null?null:JSON.parse(n)}catch{return null}}async set(e,t,n,r){let{Entry:i}=await import(`@napi-rs/keyring`),a=new i(Ae,e),o={login:t,token:n,...r};a.setPassword(JSON.stringify(o))}async clear(e){let{Entry:t}=await import(`@napi-rs/keyring`);try{new t(Ae,e).deletePassword()}catch{}}},Me=class{backend=`file`;authFile;constructor(e){this.authFile=e??se(ne(),`.open-knowledge`,`auth.yml`)}read(){if(!T(this.authFile))return{};try{return le(O(this.authFile,`utf-8`))??{}}catch(e){let t=e instanceof Error?e.message:`unknown error`;return process.stderr.write(`[auth] Failed to parse ${this.authFile}: ${t}. Starting with empty credentials.\n`),{}}}write(e){let t=ae(this.authFile);T(t)||E(t,{recursive:!0,mode:448}),te(this.authFile,ue(e),{mode:384})}async get(e){return this.read()[e]??null}async set(e,t,n,r){let i=this.read();i[e]={login:t,token:n,...r},this.write(i)}async clear(e){let t=this.read();delete t[e],this.write(t)}};async function Ne(e){try{let{Entry:e}=await import(`@napi-rs/keyring`);return new e(Ae,`__probe__`),process.stderr.write(`[auth] token storage: OS keychain
|
|
3
|
+
`),new je}catch{return process.stderr.write(`[auth] token storage: file (~/.open-knowledge/auth.yml)
|
|
4
|
+
`),new Me(e)}}async function Pe(e,t,n){let r=Fe(await Ie(e)).host??``;if(!r)return 1;let i=await n.get(r);if(i==null)return 1;let a=e=>e.replace(/[\r\n]/g,``);return t.write(`username=${a(i.login)}\npassword=${a(i.token)}\n`),0}function Fe(e){let t={};for(let n of e.split(`
|
|
5
|
+
`)){let e=n.trim();if(e===``)continue;let r=e.indexOf(`=`);r!==-1&&(t[e.slice(0,r)]=e.slice(r+1))}return t}function Ie(e){return new Promise((t,n)=>{let r=[];e.on(`data`,e=>r.push(e)),e.on(`end`,()=>t(Buffer.concat(r).toString(`utf-8`))),e.on(`error`,n)})}function Le(e){let t=new C(`git-credential`);return t.description(`Git credential helper (git credential-helper protocol)`),t.command(`get`).description(`Lookup credentials from TokenStore (called by git)`).action(async()=>{let t=await e(),n=await Pe(process.stdin,process.stdout,t);process.exit(n)}),t}async function Re(e){let{clientId:t,scopes:n=[`repo`,`read:user`,`user:email`],onVerification:r,host:i}=e,a=i&&i!==`github.com`?`https://${i}/api/v3`:`https://api.github.com`,o=de({clientType:`oauth-app`,clientId:t,scopes:n,onVerification:async e=>{await r({verificationUri:e.verification_uri,userCode:e.user_code,expiresIn:e.expires_in,interval:e.interval})},request:a===`https://api.github.com`?void 0:(await import(`@octokit/request`)).request.defaults({baseUrl:a})}),s;try{s=await o({type:`oauth`})}catch(e){if(e instanceof Error){let t=e.message.toLowerCase();throw t.includes(`access_denied`)?Error(`Device-flow authorization was denied.`):t.includes(`expired_token`)||t.includes(`timeout`)||t.includes(`timed out`)?Error(`Device-flow code expired before authorization — please try again.`):Error(`GitHub sign-in failed: ${e.message}`)}throw e}return{token:s.token,tokenType:s.tokenType,scopes:s.scopes??[]}}function ze(e){return process.env.OPEN_KNOWLEDGE_GITHUB_CLIENT_ID??e?.github?.oauthAppClientId??`Ov23liqlSd0V1MwR6rhI`}const Be=new Set([`gitlab.com`,`bitbucket.org`,`codeberg.org`,`gitea.com`,`sr.ht`,`sourcehut.org`]);function Ve(e){let t=e.toLowerCase().replace(/:\d+$/,``);Be.has(t)&&(process.stderr.write(`Error: ${e} is not a GitHub host. Only GitHub and GitHub Enterprise Server are supported.\n`),process.exit(1))}function He(e,t){e&&process.stdout.write(`${JSON.stringify(t)}\n`)}async function Ue(e,t,n,r=Re){let i=ze(n),{host:a,json:o}=e;Ve(a),o||process.stderr.write(`Logging in to ${a}…\n`);let s=await r({clientId:i,host:a===`github.com`?void 0:a,onVerification:e=>{e.userCode,e.verificationUri,o?He(!0,{type:`verification`,user_code:e.userCode,verification_uri:e.verificationUri,expires_in:e.expiresIn}):process.stderr.write(`Open: ${e.verificationUri}\nEnter code: ${e.userCode}\n`)}}),c=`unknown`,l,u;try{let e=a===`github.com`?`https://api.github.com`:`https://${a}/api/v3`,t=await fetch(`${e}/user`,{headers:{Authorization:`Bearer ${s.token}`,"User-Agent":`open-knowledge-cli`,Accept:`application/vnd.github+json`}});if(t.ok){let e=await t.json();c=e.login??c,l=e.name??void 0,u=e.email??void 0}}catch{}await t.set(a,c,s.token,{gitProtocol:`https`,name:l,email:u}),o?He(!0,{type:`complete`,host:a,login:c}):process.stderr.write(`✓ Logged in as ${c} on ${a}\n`)}function We(e,t){return new C(`login`).description(`Authenticate with GitHub via Device Flow`).option(`--host <host>`,`GitHub or GitHub Enterprise hostname`,`github.com`).option(`--json`,`Output JSONL progress events`,!1).action(async n=>{await Ue(n,await t(),e())})}async function Ge(e,t,n){let{host:r,json:i}=e;Ve(r);let a=await(n??(()=>fe({message:`Enter PAT:`})))();a||(process.stderr.write(`No token provided
|
|
6
|
+
`),process.exit(1));let o=r===`github.com`?void 0:`https://${r}/api/v3`,s=new pe({auth:a,...o?{baseUrl:o}:{}}),c=`unknown`,l,u;try{let{data:e}=await s.users.getAuthenticated();c=e.login,l=e.name??void 0,u=e.email??void 0}catch{process.stderr.write(`Token validation failed
|
|
7
|
+
`),process.exit(1)}await t.set(r,c,a,{gitProtocol:`https`,name:l,email:u}),i?process.stdout.write(`${JSON.stringify({type:`complete`,host:r,login:c})}\n`):process.stderr.write(`✓ PAT stored for ${c} on ${r}\n`)}function Ke(e){return new C(`pat`).description(`Store a Personal Access Token`).option(`--host <host>`,`GitHub or GitHub Enterprise hostname`,`github.com`).option(`--json`,`Output JSON`,!1).action(async t=>{await Ge(t,await e())})}async function qe(e,t){let{host:n,json:r}=e;Ve(n);let i=await t.get(n);i??(process.stderr.write(`Not logged in to ${n}\n`),process.exit(1));let a=n===`github.com`?void 0:`https://${n}/api/v3`,o=new pe({auth:i.token,...a?{baseUrl:a}:{}}),s=[];for await(let e of o.paginate.iterator(o.repos.listForAuthenticatedUser,{per_page:100,sort:`updated`}))for(let t of e.data)s.push({full_name:t.full_name,clone_url:t.clone_url,private:t.private});if(r)process.stdout.write(`${JSON.stringify({type:`repos`,host:n,repos:s})}\n`);else for(let e of s)process.stdout.write(`${e.full_name} ${e.clone_url}\n`)}function Je(e){return new C(`repos`).description(`List accessible repositories`).option(`--host <host>`,`GitHub or GitHub Enterprise hostname`,`github.com`).option(`--json`,`Output JSON`,!1).action(async t=>{await qe(t,await e())})}async function Ye(e,t){let{host:n}=e;await t.clear(n),process.stderr.write(`✓ Signed out from ${n}\n`)}function Xe(e){return new C(`signout`).description(`Remove stored credentials`).option(`--host <host>`,`GitHub hostname`,`github.com`).action(async t=>{await Ye(t,await e())})}async function Ze(e,t){let{host:n,json:r}=e;Ve(n);let i=await t.get(n);i??(r?process.stdout.write(`${JSON.stringify({type:`status`,host:n,authenticated:!1})}\n`):process.stderr.write(`Not logged in to ${n}\n`),process.exit(1));let a=n===`github.com`?void 0:`https://${n}/api/v3`,o=new pe({auth:i.token,...a?{baseUrl:a}:{}});try{let{data:e}=await o.users.getAuthenticated();r?process.stdout.write(`${JSON.stringify({type:`status`,host:n,authenticated:!0,login:e.login,name:e.name,email:e.email})}\n`):process.stderr.write(`✓ Logged in as ${e.login} on ${n}\n`)}catch{r?process.stdout.write(JSON.stringify({type:`status`,host:n,authenticated:!1,error:`token invalid`})+`
|
|
8
|
+
`):process.stderr.write(`✗ Token invalid for ${n}\n`),process.exit(1)}}function Qe(e){return new C(`status`).description(`Show authentication status`).option(`--host <host>`,`GitHub or GitHub Enterprise hostname`,`github.com`).option(`--json`,`Output JSON`,!1).action(async t=>{await Ze(t,await e())})}function $e(e){let t=new C(`auth`);t.description(`GitHub authentication management`);let n=()=>Ne(),r=e??(()=>({}));return t.addCommand(We(r,n)),t.addCommand(Qe(n)),t.addCommand(Je(n)),t.addCommand(Xe(n)),t.addCommand(Ke(n)),t.addCommand(Le(n)),t}function et(e,t,n={}){let r=p(e,t);if(!T(r))return{status:`missing`,lockPath:r};let i;try{i=JSON.parse(O(r,`utf-8`))}catch{return{status:`corrupt`,lockPath:r}}if(!i||typeof i!=`object`||typeof i.pid!=`number`)return{status:`corrupt`,lockPath:r};let a=i,o=n.host??re();return a.hostname===o?(n.isAlive??m)(a.pid)?{status:`alive`,lockPath:r,lock:a}:{status:`dead-pid`,lockPath:r,lock:a}:{status:`foreign-host`,lockPath:r,lock:a}}function tt(e,t){let n=[];for(let[r,i]of[[`server`,e],[`ui`,t]])(i.status===`dead-pid`||i.status===`corrupt`)&&n.push({name:r,lockPath:i.lockPath,reason:i.status});return{prune:n}}function nt(e){let t=e.inspect??(t=>et(e.lockDir,t)),n=e.unlink??(e=>A(e)),r=e.log??(e=>console.log(e)),i=e.error??(e=>console.error(e)),a=tt(t(`server`),t(`ui`));if(a.prune.length===0)return r(`No stale locks.`),{pruned:[],failed:[]};let o=[],s=[];for(let e of a.prune)try{n(e.lockPath),o.push(e)}catch(t){s.push({target:e,error:t instanceof Error?t.message:String(t)})}if(o.length>0){let e=o.map(e=>`${e.name} (${e.reason})`).join(`, `);r(`Pruned ${o.length} stale lock${o.length===1?``:`s`}: ${e}`)}return s.length>0&&i(`Failed to prune: ${s.map(({target:e,error:t})=>`${e.name} (${e.lockPath}): ${t}`).join(`; `)}`),{pruned:o,failed:s}}function rt(e){return new C(`clean`).description(`Prune stale / corrupt open-knowledge lock files (never touches live locks)`).action(()=>{nt({lockDir:a(o(e(),process.cwd()))}).failed.length>0&&(process.exitCode=1)})}function it(){try{let e=ge(`gh`,[`auth`,`token`],{encoding:`utf-8`,stdio:[`ignore`,`pipe`,`pipe`],timeout:5e3}).trim();return e.length===0?{available:!1}:{available:!0,token:e}}catch{return{available:!1}}}async function at(e,t,n={},r=it){if(!n.skipGhDetect&&r().available)return{tier:`A`,credentialArgs:[`-c`,`credential.helper=!gh auth git-credential`]};let i=await t.get(e);return i==null?{tier:`none`,credentialArgs:[]}:{tier:i.gitProtocol===`ssh`?`C`:`B`,credentialArgs:[`-c`,`credential.helper=!open-knowledge auth git-credential`]}}function ot(e){return e.replace(/:\d+$/,``)}function st(e){let t=e.trim();if(!t)return null;{let e=/^https?:\/\/([^/?#]+)\/([\w.\-~%]+)\/([\w.\-~%]+?)(?:\.git)?\/?$/.exec(t);if(e)return{protocol:`https`,hostname:ot(e[1]),owner:e[2],name:e[3]}}{let e=/^ssh:\/\/(?:[\w.-]+@)?([^/?#]+)\/([\w.\-~%]+)\/([\w.\-~%]+?)(?:\.git)?\/?$/.exec(t);if(e)return{protocol:`ssh`,hostname:ot(e[1]),owner:e[2],name:e[3]}}{let e=/^git:\/\/([^/?#]+)\/([\w.\-~%]+)\/([\w.\-~%]+?)(?:\.git)?\/?$/.exec(t);if(e)return{protocol:`git`,hostname:ot(e[1]),owner:e[2],name:e[3]}}{let e=/^(?:[\w.-]+@)?([\w.-]+):([\w.\-~%]+)\/([\w.\-~%]+?)(?:\.git)?$/.exec(t);if(e?.[1].includes(`.`)||e&&t.startsWith(`git@`))return{protocol:`ssh`,hostname:e[1],owner:e[2],name:e[3]}}{let e=/^git:([\w.-]+)\/([\w.\-~%]+)\/([\w.\-~%]+?)(?:\.git)?\/?$/.exec(t);if(e)return{protocol:`git`,hostname:e[1],owner:e[2],name:e[3]}}if(!t.includes(`://`)&&!t.includes(`@`)&&!t.startsWith(`/`)){let e=/^([\w.-]+)\/([\w.\-~%]+?)(?:\.git)?$/.exec(t);if(e)return{protocol:`https`,hostname:`github.com`,owner:e[1],name:e[2]}}return null}const ct=[[`count`,0,10],[`compress`,10,20],[`receiv`,20,60],[`resolv`,60,100]];function lt(e){let t=/^([\w ]+):\s+(\d+)%/.exec(e.trim());if(!t)return null;let n=t[1].toLowerCase(),r=Number(t[2]);for(let[e,i,a]of ct)if(n.includes(e))return{stage:t[1],pct:Math.round(i+r/100*(a-i))};return null}function ut(e,t){e&&process.stdout.write(`${JSON.stringify(t)}\n`)}async function dt(e,t,n,r=process.cwd()){let i=st(e);if(!i)throw Error(`Invalid git URL: ${e}`);let a=t.dir?j(r,t.dir):j(r,i.name);if(T(a)&&ee(a).length>0)throw Error(`Target directory is not empty: ${a}`);let o=await Ne(),s=await at(i.hostname,o,{}),c=M({baseDir:r,config:s.credentialArgs.length>=2?[s.credentialArgs[1]]:[],unsafe:{allowUnsafeCredentialHelper:!0}}).env({GIT_TERMINAL_PROMPT:`0`}),l=-1;if(c.outputHandler((e,n,r)=>{r.on(`data`,e=>{let n=e.toString(`utf-8`);for(let e of n.split(`
|
|
9
|
+
`)){let n=lt(e);n&&n.pct!==l&&(l=n.pct,ut(t.json,{type:`progress`,pct:n.pct,stage:n.stage}),t.json||process.stderr.write(`\r Cloning… ${n.pct}%`))}})}),await c.clone(e,a,[`--progress`]),t.json||process.stderr.write(`
|
|
10
|
+
`),!T(j(a,`.open-knowledge`)))try{let[{runInit:e},{ensureOkGitignoredAtRoot:t}]=await Promise.all([import(`./init-DihXGBlf.mjs`),import(`./init-B0G3IY-y.mjs`)]);e({cwd:a,mcp:!1});try{t(a)}catch{}}catch{}return a}function ft(e){return new C(`clone`).description(`Clone a git repository and open it`).argument(`<url>`,`Repository URL or owner/repo shorthand`).argument(`[dir]`,`Target directory (default: ./<repo-name>)`).option(`--json`,`Output JSONL progress events`,!1).action(async(t,n,r)=>{let i=e();try{let a=await dt(t,{json:r.json,dir:n},i);if(r.json)ut(!0,{type:`complete`,dir:a});else{process.stderr.write(`✓ Cloned to ${a}\n`),process.chdir(a);let{startCommand:t}=await import(`./start-DsMb_t62.mjs`);await t(e).parseAsync([],{from:`user`})}}catch(e){let t=e instanceof Error?e.message:String(e);r.json?ut(!0,{type:`error`,message:t}):process.stderr.write(`✗ ${t}\n`),process.exitCode=1}})}function P(e,t){return{content:[{type:`text`,text:e}],...t?{isError:!0}:{}}}function F(e,t,n){return{content:[{type:`text`,text:e}],structuredContent:t,...n?{isError:!0}:{}}}const I="Error: Hocuspocus server is not running. Start it with `open-knowledge start`, then retry.\nFor disk-only writes without real-time sync, use your native Edit tool directly.";async function L(e){return typeof e==`function`?await e():e}function R(e){let t=e.toLowerCase();return t.endsWith(`.md`)?{ok:!0,docName:e.slice(0,-3)}:t.endsWith(`.mdx`)?{ok:!0,docName:e.slice(0,-4)}:t.endsWith(`.markdown`)?{ok:!1,error:`Error: docName "${e}" ends in ".markdown", which is not a supported extension. Use ".md" or ".mdx", or strip the extension to let the server auto-detect.`}:{ok:!0,docName:e}}async function z(e,t){let n;try{n=await fetch(`${e}${t}`,{signal:AbortSignal.timeout(3e4)})}catch(e){return{ok:!1,error:`Server unreachable: ${e instanceof Error?e.message:e}`}}try{return await n.json()}catch{return{ok:!1,error:`Server returned HTTP ${n.status} with non-JSON body`}}}async function B(e,t,n){let r;try{r=await fetch(`${e}${t}`,{method:`POST`,headers:{"Content-Type":`application/json`},body:n?JSON.stringify(n):void 0,signal:AbortSignal.timeout(3e4)})}catch(e){return{ok:!1,error:`Server unreachable: ${e instanceof Error?e.message:e}`}}try{return await r.json()}catch{return{ok:!1,error:`Server returned HTTP ${r.status} with non-JSON body`}}}function pt(e,n){return`Promote existing research on this topic into a canonical article inside the project content directory. **Canonical, not provisional** — the output is the source of truth for future agents.
|
|
11
|
+
|
|
12
|
+
Topic: ${e}
|
|
13
|
+
|
|
14
|
+
The content directory for this project is **\`${n}\`** (from \`${t}/config.yml\`).
|
|
15
|
+
|
|
16
|
+
## When to use this workflow
|
|
17
|
+
|
|
18
|
+
- A team has made a decision after research and wants the outcome committed as canonical knowledge
|
|
19
|
+
- You want to compact several provisional research notes into one authoritative article
|
|
20
|
+
- A developer asks to "consolidate" or "finalize" the knowledge on a topic
|
|
21
|
+
|
|
22
|
+
Do NOT consolidate when:
|
|
23
|
+
- The team has not actually decided (the output would be misleading — keep it as research)
|
|
24
|
+
- You have not read the underlying sources (the output would lack evidence)
|
|
25
|
+
|
|
26
|
+
## Principle: canonical, not provisional
|
|
27
|
+
|
|
28
|
+
A consolidated article is the **source of truth**. Agents reading it should not need to dig further for context — it should stand on its own. That means:
|
|
29
|
+
|
|
30
|
+
- Clear, direct statements (no "tentative", no "initial findings")
|
|
31
|
+
- Decisions stated as decisions, not options
|
|
32
|
+
- Rationale explained so future readers understand the why
|
|
33
|
+
- Trade-offs acknowledged but framed against the chosen path, not as a menu
|
|
34
|
+
- Evidence linked but not the whole story — this article is the destination, not a trail
|
|
35
|
+
|
|
36
|
+
## Steps
|
|
37
|
+
|
|
38
|
+
### 1. Load the research + sources
|
|
39
|
+
|
|
40
|
+
Locate research articles on this topic:
|
|
41
|
+
|
|
42
|
+
- Use \`exec("grep -rn <topic-keyword> ${n}")\` to find prior research, or \`exec("ls <research-folder>")\` if the project groups research in a known location
|
|
43
|
+
- Read each research article fully via \`exec("cat <path>")\` (rich enrichment gives frontmatter + shadow-repo activity + project git history + backlinks)
|
|
44
|
+
- Follow its \`sources:\` frontmatter list — read every referenced source file
|
|
45
|
+
- Also read any existing canonical article on the topic — if one already exists, you may be **updating** it rather than creating a new one
|
|
46
|
+
|
|
47
|
+
If there is no research to consolidate, stop. Consolidation is promotion, not creation. Run \`research\` first.
|
|
48
|
+
|
|
49
|
+
### 2. Confirm the decision
|
|
50
|
+
|
|
51
|
+
Before writing, confirm with the developer:
|
|
52
|
+
|
|
53
|
+
- **What is the actual decision?** (e.g., "We chose Yjs for CRDT" — not "Yjs is one option")
|
|
54
|
+
- **What alternatives were considered and rejected?** (these get mentioned in trade-offs, not as equal options)
|
|
55
|
+
- **What's the rationale the team actually used?** (not your reconstruction from sources — ask if unclear)
|
|
56
|
+
|
|
57
|
+
If the decision is not yet made, **do not consolidate**. Return and tell the developer to either (a) make the decision first, or (b) keep the research as provisional.
|
|
58
|
+
|
|
59
|
+
### 3. Write the canonical article
|
|
60
|
+
|
|
61
|
+
Save inside the content directory (\`${n}\`). Path convention depends on the project:
|
|
62
|
+
|
|
63
|
+
- If the project uses the three-tier lifecycle (external-sources → research → articles), save under an \`articles/\` folder relative to the content dir, grouped by topic subfolder when the area is broad (e.g., \`articles/editor/crdt-architecture.md\`)
|
|
64
|
+
- If the project has an existing canonical-docs layout (\`docs/\`, \`guides/\`, etc.), save there in a location that matches the project's conventions
|
|
65
|
+
- Ask the user when the canonical location is ambiguous
|
|
66
|
+
|
|
67
|
+
Frontmatter:
|
|
68
|
+
|
|
69
|
+
\`\`\`yaml
|
|
70
|
+
---
|
|
71
|
+
title: Descriptive title
|
|
72
|
+
description: One-line summary of what this article covers
|
|
73
|
+
status: canonical
|
|
74
|
+
date: YYYY-MM-DD
|
|
75
|
+
tags:
|
|
76
|
+
- topic-tag
|
|
77
|
+
supersedes:
|
|
78
|
+
- <path-to-research-article>.md
|
|
79
|
+
---
|
|
80
|
+
\`\`\`
|
|
81
|
+
|
|
82
|
+
Structure:
|
|
83
|
+
|
|
84
|
+
\`\`\`markdown
|
|
85
|
+
## Summary
|
|
86
|
+
|
|
87
|
+
[One paragraph: what the decision is and why. A reader who reads only this paragraph should know the outcome.]
|
|
88
|
+
|
|
89
|
+
## Context
|
|
90
|
+
|
|
91
|
+
[What problem does this solve? What constraints shaped the decision?]
|
|
92
|
+
|
|
93
|
+
## Decision
|
|
94
|
+
|
|
95
|
+
[The chosen approach, stated directly. Not "we recommend" — "we chose".]
|
|
96
|
+
|
|
97
|
+
## Rationale
|
|
98
|
+
|
|
99
|
+
[Why this path over alternatives. Grounded in the constraints from Context.]
|
|
100
|
+
|
|
101
|
+
## Trade-offs
|
|
102
|
+
|
|
103
|
+
[What we gave up by choosing this path. Frame against the chosen decision, not as a menu.]
|
|
104
|
+
|
|
105
|
+
## Alternatives considered
|
|
106
|
+
|
|
107
|
+
[Briefly: what else was on the table, why it was rejected. Link to the research article for deeper analysis.]
|
|
108
|
+
|
|
109
|
+
## Implementation notes
|
|
110
|
+
|
|
111
|
+
[How this gets realized in the codebase — key files, patterns, gotchas.]
|
|
112
|
+
|
|
113
|
+
## Further reading
|
|
114
|
+
|
|
115
|
+
[Links to research articles and external sources for readers who want the trail.]
|
|
116
|
+
\`\`\`
|
|
117
|
+
|
|
118
|
+
### 4. Link aggressively
|
|
119
|
+
|
|
120
|
+
Canonical articles are destinations — they should be **linked heavily from everywhere they're relevant** and link **out to every related page** themselves. Underlinked canonical articles lose most of their value.
|
|
121
|
+
|
|
122
|
+
- **Inside this article:** every noun-phrase that names another document (other canonical articles, related research, external-source pages, sibling topics) should be a \`[[Page Name]]\` link, not plain prose. Prefer \`[[Page]]\` over Markdown \`[text](./page.md)\` — only wiki-links participate in the backlinks index.
|
|
123
|
+
- **Redlinks are fine.** If you mention a concept that *should* have a page but doesn't yet, \`[[link it anyway]]\`. The redlink signals future work.
|
|
124
|
+
- **Update neighbors.** After writing, find 2–3 closely-related existing pages (via \`exec("grep -rn <topic> ${n}")\`) and add a \`[[<new article>]]\` link from each — usually under a "See also" section or inline where the new article is relevant. This makes the article discoverable via backlinks, not just by remembering the path.
|
|
125
|
+
- **Link to the sources and superseded research** from "Further reading" — readers who want the trail can follow.
|
|
126
|
+
|
|
127
|
+
### 5. Supersede the research
|
|
128
|
+
|
|
129
|
+
Add a \`supersedes:\` list in the new article's frontmatter pointing at the research article(s) it consolidates. This creates an audit trail.
|
|
130
|
+
|
|
131
|
+
Do NOT delete the research articles — they remain as historical context for how the decision was reached. Edit their frontmatter to add:
|
|
132
|
+
|
|
133
|
+
\`\`\`yaml
|
|
134
|
+
superseded_by: <path-to-new-canonical-article>.md
|
|
135
|
+
\`\`\`
|
|
136
|
+
|
|
137
|
+
### 6. Verify
|
|
138
|
+
|
|
139
|
+
- File exists at the chosen path under the content directory
|
|
140
|
+
- Has \`status: canonical\` frontmatter
|
|
141
|
+
- Lists the research articles it supersedes
|
|
142
|
+
- Research articles updated with \`superseded_by\` pointer
|
|
143
|
+
- \`exec("ls <target-dir>")\` shows the new file
|
|
144
|
+
|
|
145
|
+
## Non-goals
|
|
146
|
+
|
|
147
|
+
- **Don't consolidate research that hasn't reached a decision** — the article would misrepresent the team's actual state of understanding
|
|
148
|
+
- **Don't delete research articles** — they are the trail; keep them with a \`superseded_by\` marker
|
|
149
|
+
- **Don't rewrite research prose verbatim** — canonical articles have a different voice (direct, decided) than research (exploratory, provisional)
|
|
150
|
+
- **Don't skip the supersedes / superseded_by links** — the audit trail matters for future readers
|
|
151
|
+
|
|
152
|
+
Full convention: read \`${t}/AGENTS.md\`.`}const mt=[`Promote research into a canonical article inside the project content directory. Canonical, not provisional — the output is the source of truth for future agents.`,``,`**Use when:**`,`- A team has made a decision after research and wants the outcome committed as canonical knowledge`,`- Compacting several provisional research notes into one authoritative article`,`- A developer asks to "consolidate" or "finalize" knowledge on a topic`,``,`**Triggers on:**`,`- "consolidate", "finalize", "promote to canonical", "make this official"`,`- User says the team has decided and wants the outcome written as canonical`,`- Research has stabilized and a destination article is needed`].join(`
|
|
153
|
+
`);function ht(e,t){e.tool(`consolidate`,mt,{topic:N.string().describe(`The topic to consolidate into a canonical article`)},e=>F(pt(e.topic,t.content.dir),{previewUrl:null}))}function gt(e){return e.split(`/`).map(encodeURIComponent).join(`/`)}function _t(e){return e.endsWith(`/`)?e.slice(0,-1):e}function vt(e){try{return new URL(e),!0}catch{return!1}}async function V(e,t){let n=await t.resolveCwd(),r=a(o(t.config,n));return U(e,{config:t.config,lockDir:r})}function yt(e){try{let t=d(e.lockDir);if(t&&t.port>0)return{baseUrl:`http://localhost:${t.port}`,port:t.port}}catch(t){process.stderr.write(`[preview-url] readUiLock failed at ${e.lockDir} while building ui block: ${t instanceof Error?t.message:String(t)}\n`)}return{baseUrl:null,port:null}}async function H(e){let t=await e.resolveCwd(),n=a(o(e.config,t)),r={config:e.config,lockDir:n};return{resolve:e=>U(e,r),ui:yt(r)}}function bt(e){let t=e.toLowerCase();return t.endsWith(`.md`)?e.slice(0,-3):t.endsWith(`.mdx`)?e.slice(0,-4):e}function U(e,t){let n=`/#/${gt(e)}`,r=process.env.OPEN_KNOWLEDGE_PREVIEW_BASE_URL;if(r&&vt(r))return{url:`${_t(r)}${n}`,source:`env`};try{let e=d(t.lockDir);if(e&&e.port>0)return{url:`http://localhost:${e.port}${n}`,source:`lock`}}catch(e){process.stderr.write(`[preview-url] readUiLock failed at ${t.lockDir}, falling through to config: ${e instanceof Error?e.message:String(e)}\n`)}let i=t.config.preview?.baseUrl;return i&&vt(i)?{url:`${_t(i)}${n}`,source:`config`}:null}const xt=["**IMPORTANT: Before calling this tool, you MUST first call `get_preview_url` and navigate to the returned URL in your preview browser. If `get_preview_url` returns null, start the server first (`open-knowledge start` or `preview_start`), then call `get_preview_url` again. Do NOT call this tool without the preview open. NEVER manually construct the URL.**",``,`[Requires: Hocuspocus server] Find-and-replace on a live document via the CRDT layer.`,`The patch is applied through Hocuspocus and propagated to all connected editors in real-time.`,"Use `offset` when you need to patch an exact occurrence; omit it to preserve first-match behavior.",``,"**When rewriting prose, add `[[wiki-links]]` aggressively.** If the replacement mentions other documents or entities that should have their own page, link them as `[[Page Name]]`. Over-linking is the goal; underlinked documents lose their value in backlink-driven navigation.",``,`**Parameters:**`,"- `docName` — Document name, typically without extension. A trailing `.md` or `.mdx` is stripped automatically.","- `find` — Text to find (exact match)","- `replace` — Replacement text","- `offset` (optional) — Exact occurrence to patch, as a JavaScript string offset in the current markdown. If the document changed and the text no longer matches there, the server returns a stale-target error; re-run `suggest_links` to get fresh offsets."].join(`
|
|
154
|
+
`);function St(e,t){e.tool(`edit_document`,xt,{docName:N.string().describe(`Document name to edit`),find:N.string().describe(`Text to find (exact match)`),replace:N.string().describe(`Replacement text`),offset:N.number().int().min(0).optional().describe(`Exact occurrence to patch, as a JavaScript string offset in the current markdown`)},async e=>{let n=await L(t.serverUrl);if(!n)return P(I,!0);let r=R(e.docName);if(!r.ok)return P(r.error,!0);let i=t.identityRef?.current,s=await B(n,`/api/agent-patch`,{docName:r.docName,find:e.find,replace:e.replace,offset:e.offset,...i?{agentId:i.connectionId,agentName:i.displayName,clientName:i.clientInfo?.name,colorSeed:i.colorSeed}:{}});if(!s.ok)return P(`Error: ${s.error}`,!0);let c=await t.resolveCwd(),l=a(o(t.config,c)),u=U(r.docName,{config:t.config,lockDir:l}),d=(typeof s.subscriberCount==`number`?s.subscriberCount:void 0)===0,f=[`Edit applied successfully.`];u&&f.push(`Preview: ${u.url}`),d&&f.push(u?`Warning: no preview is currently attached to "${r.docName}". Open ${u.url} to watch future edits live.`:`Warning: no preview is currently attached to "${r.docName}".`);let p=f.join(`
|
|
155
|
+
`);if(!u&&!d)return P(p);let m={};return u&&(m.previewUrl=u.url,m.previewUrlSource=u.source),d&&(m.warning={message:`No preview attached to ${r.docName}.`,previewUrl:u?.url??null}),F(p,m)})}const Ct=new Set([`cat`,`ls`,`grep`,`find`]),wt=/\b[\w./-]+\.(md|mdx)\b/g;function W(e){return/\.(md|mdx)$/.test(e)}function G(e){let t=e.trim();return t?(t=t.replace(/\/+/g,`/`),t.startsWith(`./`)&&(t=t.slice(2)),t.endsWith(`/`)&&(t=t.slice(0,-1)),t):``}function K(e){return e.args.slice(1)}function q(e){return e.filter(e=>!e.startsWith(`-`))}function Tt(e){return q(K(e)).filter(W)}function Et(e,t){let n=q(K(t)),r=n.length>0?n[n.length-1]:``,i=r&&r!==`.`?G(r):``,a=[];i&&a.push(i);for(let t of e.split(`
|
|
156
|
+
`)){let e=t.trim();if(!e||/\.[a-z0-9]+$/i.test(e)&&!W(e))continue;let n=i?`${i}/${e}`:e;a.push(n)}return a}function Dt(e){let t=[];for(let n of e.split(`
|
|
157
|
+
`)){if(!n)continue;let e=n.indexOf(`:`);if(e<0)continue;let r=G(n.slice(0,e));W(r)&&t.push(r)}return t}function Ot(e){let t=[];for(let n of e.split(`
|
|
158
|
+
`)){let e=G(n);e&&W(e)&&t.push(e)}return t}function kt(e){return q(K(e)).filter(W)}function At(e){return q(K(e)).length>0}function jt(e){let t=[],n=e.matchAll(wt);for(let e of n)t.push(G(e[0]));return t}function Mt(e,t){let n=null;for(let e=t.length-1;e>=0;e--){let r=t[e];if(Ct.has(r.command)){n=r;break}if((r.command===`head`||r.command===`tail`)&&At(r)){n=r;break}}let r;if(!n)r=jt(e);else{switch(n.command){case`cat`:r=Tt(n);break;case`ls`:r=Et(e,n);break;case`grep`:r=Dt(e);break;case`find`:r=Ot(e);break;case`head`:case`tail`:r=kt(n);break;default:r=jt(e)}r.length===0&&(r=jt(e))}let i=new Set,a=[];for(let e of r){let t=G(e);!t||i.has(t)||(i.add(t),a.push(t))}return a}function J(e){return e===``?`''`:/^[\w.\-/]+$/.test(e)?e:`'${e.replace(/'/g,`'\\''`)}'`}const Nt=16*1024*1024;var Pt=class extends Error{limitBytes;actualBytes;partial;constructor(e,t,n){super(`Output exceeded ${e} byte buffer (got ${t}); narrow the command`),this.name=`StdoutOverflowError`,this.limitBytes=e,this.actualBytes=t,this.partial=n}};function Ft(e){if(!oe(e))throw Error(`createBashInstance: cwd must be absolute (got: ${e})`);return new De({cwd:`/`,fs:new Oe({root:j(e),allowSymlinks:!1})})}async function It(e,t){let n=await e.exec(t);if(n.stdout.length>Nt)throw new Pt(Nt,n.stdout.length,{stdout:n.stdout.slice(0,Nt),stderr:n.stderr,exitCode:n.exitCode});return{stdout:n.stdout,stderr:n.stderr,exitCode:n.exitCode}}function Lt(e){return e.startsWith(`**/`)?e.slice(3):e}async function Rt(e,t,n={}){let r=Ft(t),i=[`-rn`,`-F`];(n.caseInsensitive??!0)&&i.push(`-i`);for(let e of n.include??[])i.push(`--include=${J(Lt(e))}`);for(let e of n.exclude??[])i.push(`--exclude=${J(Lt(e))}`),i.push(`--exclude-dir=${J(Lt(e))}`);let a=n.paths?.length?n.paths.map(J):[`.`],o=`grep ${i.join(` `)} ${J(e)} ${a.join(` `)}`,s;try{s=await It(r,o)}catch(e){if(e instanceof Pt)s=e.partial;else throw e}if(s.exitCode===1&&!s.stdout)return[];if(s.exitCode!==0&&s.exitCode!==1&&!s.stdout)throw Error(`grep exited ${s.exitCode}: ${s.stderr}`);let c=[],l=n.maxResults??1/0;for(let e of s.stdout.split(`
|
|
159
|
+
`)){if(!e)continue;if(c.length>=l)break;let t=e.indexOf(`:`);if(t===-1)continue;let n=e.indexOf(`:`,t+1);if(n===-1)continue;let r=e.slice(0,t),i=e.slice(t+1,n),a=e.slice(n+1),o=Number.parseInt(i,10);Number.isFinite(o)&&c.push({path:r,line:o,text:a})}return c}const zt=new Set([`.git`,t,`.openknowledge`,`node_modules`,`.changeset`,`.claude`,`.agents`,`dist`,`build`]);async function Bt(e){let t=j(e),n=new Map,r=0,i=!1;async function a(e){if(i)return;let o;try{o=await ye(e,{withFileTypes:!0})}catch{return}for(let s of o){if(i)return;if(s.isDirectory()&&zt.has(s.name))continue;let o=j(e,s.name);if(s.isDirectory()){await a(o);continue}if(s.isFile()){if(r>=1e3){i=!0;return}try{let e=await be(o);n.set(ce(t,o),e.mtimeMs),r++}catch{}}}}return await a(t),{snapshot:n,truncated:i}}function Vt(e,t){let n=[];for(let[r,i]of t){let t=e.get(r);(t===void 0||t!==i)&&n.push(r)}for(let[r]of e)t.has(r)||n.push(r);return{changed:n}}const Ht=[`node_modules`,`.git`,`dist`,`build`,`.next`,`.turbo`,`.nuxt`,`coverage`,`.cache`,`.parcel-cache`,`.vercel`,t,`.claude`];function Ut(e){return e===`--recursive`||e===`--dereference-recursive`?!0:e.startsWith(`--`)||!e.startsWith(`-`)?!1:/[rR]/.test(e.slice(1))}const Wt=[{command:`grep`,applies:e=>e.slice(1).some(Ut),hasUserExcludes:e=>e.some(e=>e===`--exclude-dir`||e.startsWith(`--exclude-dir=`)),buildExcludeArgs:e=>e.map(e=>`--exclude-dir=${e}`),insertionIndex:()=>1},{command:`find`,applies:()=>!0,hasUserExcludes:e=>e.slice(1).some(e=>e===`-not`||e===`!`||e===`-prune`),buildExcludeArgs:e=>{let t=[];for(let n of e)t.push(`-not`,`-path`,`*/${n}/*`);return t},insertionIndex:e=>{for(let t=1;t<e.length;t++)if(e[t].startsWith(`-`))return t;return e.length}}];function Gt(e){return e.map(e=>{let t=Wt.find(t=>t.command===e.command);if(!t||!t.applies(e.args)||t.hasUserExcludes(e.args))return e;let n=t.buildExcludeArgs(Ht),r=t.insertionIndex(e.args);return{command:e.command,args:[...e.args.slice(0,r),...n,...e.args.slice(r)]}})}function Kt(e){return e.map(e=>e.args.map(J).join(` `)).join(` | `)}const qt=new Set([`cat`,`ls`,`grep`,`find`,`head`,`tail`,`wc`,`sort`,`uniq`,`cut`]),Jt=new Set([`>`,`>>`,`<`,`>&`,`<&`,`|&`]),Yt=new Set([`&`,`;`,`;;`,`&&`,`||`,`(`,`)`,`<(`,`>(`,`<<`,`<<-`]),Xt=new Set([`-o`,`--output-file`,`--output`]),Zt=[`-o=`,`--output-file=`,`--output=`],Qt=new Set([`-exec`,`-execdir`,`-delete`,`-fprint`,`-fprintf`,`-fprint0`,`-ok`,`-okdir`]),$t=/[`]|\$\(|\$\{|\$'/;function en(e){return typeof e==`object`&&!!e&&`op`in e}function tn(e){let t=typeof e.op==`string`?e.op:`(unknown)`;return Jt.has(t)?{category:`write_blocked`,message:`Write operation blocked: '${t}'. exec is read-only. For document changes, use write_document or edit_document.`}:Yt.has(t)?{category:`shell_construct_blocked`,message:`Shell construct '${t}' is not supported. Only pipes (|) are allowed between allowlisted stages.`}:{category:`shell_construct_blocked`,message:`Operator '${t}' is not supported.`}}function nn(e){let t=[];for(let n of e){if(typeof n==`string`){if($t.test(n))return{error:{category:`shell_construct_blocked`,message:`Argument '${n}' contains a shell-injection pattern (backtick, $(), or \${}); not supported.`}};t.push(n);continue}if(!en(n))return{error:{category:`shell_construct_blocked`,message:`Unrecognized token shape.`}};if(n.op===`glob`&&typeof n.pattern==`string`){t.push(n.pattern);continue}return typeof n.comment==`string`?{error:{category:`shell_construct_blocked`,message:`Comments are not allowed in exec commands.`}}:{error:tn(n)}}return{args:t}}function rn(e){if(!qt.has(e.command))return{category:`unknown_command`,message:`Command '${e.command}' is not in the allowlist. For pattern matching try 'grep'; for file listing try 'ls' or 'find'. Allowlist: cat, ls, grep, find, head, tail, wc, sort, uniq, cut.`};for(let t of e.args.slice(1)){if(Xt.has(t)||Zt.some(e=>t.startsWith(e)))return{category:`write_blocked`,message:`Write operation blocked: '${t}'. exec is read-only. For document changes, use write_document or edit_document.`};if(e.command===`find`&&Qt.has(t))return{category:`write_blocked`,message:`find flag '${t}' is blocked (executes commands or deletes files). Use exec for read-only discovery; chain with another allowlisted tool via '|' if you need to transform output.`}}return null}function an(e){let t=e.trim();if(!t)return{error:{category:`unknown_command`,message:`Empty command.`}};let n;try{n=ke.parse(t)}catch{return{error:{category:`shell_construct_blocked`,message:`Failed to parse command — likely malformed quoting or an unsupported construct.`}}}let r=[],i=[];for(let e of n){if(en(e)&&e.op===`|`){r.push(i),i=[];continue}i.push(e)}r.push(i);let a=[];for(let e of r){let t=nn(e);if(`error`in t)return t;if(t.args.length===0)return{error:{category:`shell_construct_blocked`,message:`Empty pipeline stage (trailing pipe or leading pipe).`}};let n={command:t.args[0],args:t.args},r=rn(n);if(r)return{error:r};a.push(n)}return{stages:a}}const on=/^---\r?\n([\s\S]*?)\r?\n---(?:\r?\n|$)/;function sn(e,t){let n=e.match(on);if(!n)return null;try{let e=le(n[1]);if(v(e)){if(t){let n=t.safeParse(e);return n.success?n.data:null}return e}}catch{}return null}const cn=new WeakMap;function ln(e){let t=cn.get(e);if(t)return t;let n=e.map(e=>Ce(e.match,{dot:!0}));return cn.set(e,n),n}function un(e,t){if(e.length===0)return{};let n=ln(e),r={},i=[],a=!1;for(let o=0;o<e.length;o++){if(!n[o](t))continue;a=!0;let s=e[o].frontmatter;if(s.title!==void 0&&(r.title=s.title),s.description!==void 0&&(r.description=s.description),s.tags!==void 0)for(let e of s.tags)i.includes(e)||i.push(e)}return a?(i.length>0&&(r.tags=i),r):{}}function dn(e){try{return k(j(e,`.git`)).isDirectory()}catch{return!1}}function fn(e){return M({baseDir:j(e),timeout:{block:5e3}})}async function pn(e,t,n=5){if(!dn(e))return{commits:[],source:`git-absent`};let r=fn(e),i=``;try{i=await r.raw(`log`,`-${Math.max(1,n)}`,`--format=%H|%aI|%an|%s`,`--follow`,`--`,t)}catch{return{commits:[],source:`git`}}let a=[];for(let e of i.split(`
|
|
160
|
+
`)){if(!e)continue;let t=e.indexOf(`|`);if(t<0)continue;let n=e.indexOf(`|`,t+1);if(n<0)continue;let r=e.indexOf(`|`,n+1);r<0||a.push({hash:e.slice(0,t),date:e.slice(t+1,n),authorName:e.slice(n+1,r),subject:e.slice(r+1)})}return{commits:a,source:`git`}}const mn=5e3;async function hn(e){try{let t=(await M({baseDir:e,timeout:{block:mn}}).revparse([`--abbrev-ref`,`HEAD`])).trim();return t&&t!==`HEAD`?t:null}catch{return null}}function gn(e,t){return M({baseDir:t,timeout:{block:mn}}).env({GIT_DIR:e,GIT_WORK_TREE:t})}function _n(e,t){let n=l(t);return e.startsWith(n)?e.slice(n.length):e}async function vn(e,t,n,r,i){let a=``;try{a=await e.raw(`log`,t,`-${Math.max(1,i*2)}`,`--format=%H%x00%aI%x00%an%x00%s%x00%B%x1e`,`--`,n)}catch{return[]}let o=_n(t,r),c=s(o),l=[];for(let e of a.split(``)){let t=e.trimStart();if(!t)continue;let[n=``,i=``,a=``,s=``,d=``]=t.split(`\0`),f=n.trim();f.length===40&&l.push({hash:f,date:i,writerName:a,message:s,contributors:u(d),writerId:o,isAgent:c.isAgent,writerClassification:c.classification,branch:r})}return l}async function yn(e,t,n=5){let r=c(e);if(!r)return{commits:[],source:`shadow-repo-absent`};let i=await hn(e);if(!i)return{commits:[],source:`shadow-repo`};let a=gn(r,j(e)),o=``;try{o=await a.raw(`for-each-ref`,l(i),`--format=%(refname)`)}catch{return{commits:[],source:`shadow-repo`}}let s=o.split(`
|
|
161
|
+
`).map(e=>e.trim()).filter(Boolean);return s.length===0?{commits:[],source:`shadow-repo`}:{commits:(await Promise.all(s.map(e=>vn(a,e,t,i,n)))).flat().sort((e,t)=>t.date.localeCompare(e.date)).slice(0,n),source:`shadow-repo`}}const bn=1e3,xn=new Set([`.git`,t,`.openknowledge`,`node_modules`,`.changeset`,`.claude`,`.agents`,`dist`,`build`]),Sn=/\.(md|mdx)$/i,Cn=N.object({title:N.string().optional(),description:N.string().optional(),tags:N.array(N.string()).default([])});function wn(e){return e.replace(/\.md$/,``).replace(/\.mdx$/,``)}async function Tn(e){try{let t=sn(await ve(e,`utf-8`),Cn);return t?{title:t.title,description:t.description,tags:t.tags??[]}:{tags:[]}}catch{return null}}async function En(e,t){if(!e)return null;let n=await z(e,`/api/backlinks?docName=${encodeURIComponent(t)}`);if(!n.ok)return null;let r=n.backlinks??n.results??n.links;if(!Array.isArray(r))return[];let i=[];for(let e of r){if(typeof e!=`object`||!e)continue;let t=e,n=typeof t.docName==`string`?t.docName:typeof t.source==`string`?t.source:typeof t.page==`string`?t.page:void 0;n&&i.push({source:n,title:typeof t.title==`string`?t.title:void 0,snippet:typeof t.snippet==`string`?t.snippet:null})}return i}async function Dn(e,t){if(!e||t.length===0)return null;let n=[...new Set(t)],r=[];for(let e=0;e<n.length;e+=100)r.push(n.slice(e,e+100));let i=await Promise.all(r.map(async t=>{let n=await z(e,`/api/backlink-counts?docNames=${encodeURIComponent(t.join(`,`))}`);return n.ok?n.counts??{}:null})),a=new Map,o=!1;for(let e of i)if(e){o=!0;for(let[t,n]of Object.entries(e))typeof n==`number`&&Number.isFinite(n)&&a.set(t,n)}return o?a:null}async function On(e,t){if(!e)return null;let n=await z(e,`/api/forward-links?docName=${encodeURIComponent(t)}`);if(!n.ok)return null;let r=n.forwardLinks??n.links??n.results;if(!Array.isArray(r))return[];let i=[];for(let e of r){if(typeof e!=`object`||!e)continue;let t=e;if(t.kind===`external`&&typeof t.url==`string`){i.push({kind:`external`,url:t.url,title:typeof t.title==`string`?t.title:void 0,snippet:typeof t.snippet==`string`?t.snippet:null});continue}let n=typeof t.docName==`string`?t.docName:void 0;n&&i.push({kind:`doc`,docName:n,title:typeof t.title==`string`?t.title:void 0,snippet:typeof t.snippet==`string`?t.snippet:null})}return i}function kn(e,t,n){let r=t??[],i=r.length===0?{}:un(r,n),a=e?.title??i.title,o=e?.description??i.description,s=e?.tags??[],c=i.tags??[],l;if(c.length===0)l=s;else{let e=new Set;l=[];for(let t of c)e.has(t)||(e.add(t),l.push(t));for(let t of s)e.has(t)||(e.add(t),l.push(t))}return{title:a,description:o,tags:l}}async function An(e,t,n={}){let r=e.replace(/^\.\//,``).replace(/^\/+/,``),i=j(t.projectDir,r),a=t.historyDepth??5,o=n.includeRichFields===!0,s=Tn(i);if(!o){let e=kn(await s,t.folderRules,r);return{path:r,title:e.title,description:e.description,tags:e.tags,backlinkCount:null,backlinks:null,forwardLinkCount:null,forwardLinks:null,history:null,historySource:null,projectHistory:null,projectHistorySource:null}}let[c,l,u,d,f]=await Promise.all([s,En(t.serverUrl,wn(r)).catch(()=>null),On(t.serverUrl,wn(r)).catch(()=>null),yn(t.projectDir,r,a).catch(()=>({commits:[],source:`shadow-repo`})),pn(t.projectDir,r,a).catch(()=>({commits:[],source:`git`}))]),p=kn(c,t.folderRules,r);return{path:r,title:p.title,description:p.description,tags:p.tags,backlinkCount:l?.length??null,backlinks:l,forwardLinkCount:u?.length??null,forwardLinks:u,history:d.commits,historySource:d.source,projectHistory:f.commits,projectHistorySource:f.source}}async function jn(e,t){let n={directMdCount:0,recursiveMdCount:0,childDirCount:0,mostRecent:null,truncated:!1},r=0,i=[{path:e,depth:0}];for(;i.length>0;){let e=i.shift();if(!e)break;if(r>=bn){n.truncated=!0;break}let a;try{a=await ye(e.path,{withFileTypes:!0})}catch{continue}for(let o of a){if(r>=bn){n.truncated=!0;break}r++;let a=o.name;if(o.isDirectory()){if(xn.has(a)||a.startsWith(`.`))continue;e.depth===0&&n.childDirCount++,i.push({path:`${e.path}/${a}`,depth:e.depth+1})}else if(o.isFile()&&Sn.test(a)){n.recursiveMdCount++,e.depth===0&&n.directMdCount++;let r=`${e.path}/${a}`;try{let e=await be(r);(!n.mostRecent||e.mtimeMs>n.mostRecent.mtimeMs)&&(n.mostRecent={absPath:r,relPath:ce(t,r).split(/[\\/]/).filter(Boolean).join(`/`),mtimeMs:e.mtimeMs})}catch{}}}}return n}async function Mn(e,t){let n=e.replace(/^\.\//,``).replace(/^\/+/,``).replace(/\/+$/,``),r=await jn(j(t.projectDir,n),t.projectDir),i;if(r.mostRecent){let e=await Tn(r.mostRecent.absPath);i={path:r.mostRecent.relPath,title:e?.title??ie(r.mostRecent.relPath),updatedAt:new Date(r.mostRecent.mtimeMs).toISOString()}}let a={path:n,type:`directory`,directMdCount:r.directMdCount,recursiveMdCount:r.recursiveMdCount,childDirCount:r.childDirCount,mostRecentMd:i,truncated:r.truncated},o=t.folderRules??[];if(o.length>0){let e=un(o,n);e.title!==void 0&&(a.title=e.title),e.description!==void 0&&(a.description=e.description),e.tags!==void 0&&e.tags.length>0&&(a.tags=e.tags)}return a}const Nn=50*1024,Pn=/\.(png|jpe?g|gif|webp|svg|pdf|zip|tar|gz|tgz|mp4|mov|mp3|wav|ico|bmp)$/i,Fn=[`Run a read-only bash-like command against the project content directory. Returns raw stdout plus enriched metadata for every wiki file referenced (frontmatter, backlink/forward-link counts, shadow-repo activity with agent/human attribution).`,``,`Allowlist: cat, ls, grep, find, head, tail, wc, sort, uniq, cut. Pipes (|) work between stages. Redirections, subshells, and writes are rejected.`,``,"cwd: the command runs in the MCP client's first advertised root (the project the user is working in). Pass an explicit absolute `cwd` to run against a different directory. Paths inside the command resolve relative to that cwd; traversal above it is rejected.",``,"Stdout provenance headers (GNU-style): `ls <dir>/` prepends `<dir>/:`, single-file `cat`/`head`/`tail` prepends `==> <path> <==`, so the subject of the command is visible in raw output. Multi-file `cat a b` emits no header — the `enrichedPaths` array still lists every file. `head`/`tail` used as pipe trimmers (no file arg) defer to the upstream producer.",``,`Examples:`,'- `exec({ command: "cat articles/auth.md" })` — file contents + full enrichment','- `exec({ command: "ls articles/" })` — listing + per-file enrichment (slim)','- `exec({ command: "grep -rn oauth articles/ | head -5" })` — pipe with enrichment on matched files','- `exec({ command: "ls", cwd: "/abs/path/to/other-repo" })` — run in a different project'].join(`
|
|
162
|
+
`);function In(e){let t=e.split(`
|
|
163
|
+
`),n=t[t.length-1]===``?t.length-1:t.length;if(n<=500&&e.length<=Nn)return{text:e,truncated:!1,omittedLines:0};let r=Math.min(n,500),i=0,a=0;for(let e=0;e<r;e++){let n=t[e];if(i+=n.length+1,i>Nn)break;a++}let o=t.slice(0,a).join(`
|
|
164
|
+
`),s=n-a;return{text:`${o}\n<truncated: ${s} more lines — re-run with a more-specific query>`,truncated:!0,omittedLines:s}}function Ln(e){let t=[];for(let n of e)if(n.command===`cat`)for(let e of n.args.slice(1))e.startsWith(`-`)||Pn.test(e)&&t.push(e);return t}function Rn(e){for(let t=1;t<e.length;t++){let n=e[t],r=n.match(/^--lines=(\d+)$/);if(r)return Number(r[1]);if(n===`--lines`&&t+1<e.length){let n=Number(e[t+1]);if(Number.isFinite(n))return n}if(n===`-n`&&t+1<e.length){let n=Number(e[t+1]);if(Number.isFinite(n))return n}let i=n.match(/^-n(\d+)$/);if(i)return Number(i[1]);let a=n.match(/^-(\d+)$/);if(a)return Number(a[1])}return 10}function zn(e,t){if(e.length<2)return null;let n=e[e.length-1];if(n.command!==`head`&&n.command!==`tail`)return null;let r=Rn(n.args),i=t.split(`
|
|
165
|
+
`),a=i[i.length-1]===``?i.length-1:i.length;if(a<r)return null;let o=i.slice(0,a),s=new Set(o.map(e=>{let t=e.indexOf(`:`);return t>0?e.slice(0,t):e})).size,c=e.slice(0,-1).map(e=>e.command).join(` | `);return{banner:`Output hit \`${n.command} -${r}\` cap (${a} lines, ${s} unique file${s===1?``:`s`}). The \`${c}\` stage may have had more matches that never reached stdout. For existence checks across many files, prefer \`grep -rl PATTERN <dir>\` (list files only, no head). For enumeration, drop the \`| ${n.command}\` or widen the cap.`}}function Bn(e){return e.type===`directory`}function Vn(e){let t=[e.title?`**${e.title}** (${e.path}/)`:`**${e.path}/** (directory)`];e.description&&t.push(e.description),e.tags&&e.tags.length>0&&t.push(`tags: ${e.tags.join(`, `)}`);let n=[];return n.push(`${e.recursiveMdCount} md file${e.recursiveMdCount===1?``:`s`}`),e.childDirCount>0&&n.push(`${e.childDirCount} subdir${e.childDirCount===1?``:`s`}`),t.push(n.join(`, `)),e.mostRecentMd&&t.push(`most recent: ${e.mostRecentMd.title??e.mostRecentMd.path} (${e.mostRecentMd.path})`),e.truncated&&t.push(`scan truncated`),`- ${t.join(` — `)}`}function Hn(e){let t=[`**${e.title??e.path}** (${e.path})`];if(e.description&&t.push(e.description),e.tags.length>0&&t.push(`tags: ${e.tags.join(`, `)}`),e.backlinkCount!==null&&t.push(`backlinks: ${e.backlinkCount}`),e.forwardLinkCount!==null&&t.push(`forward links: ${e.forwardLinkCount}`),e.history&&e.history.length>0){let n=e.history.map(e=>{let t=e.writerClassification===`agent`?`agent: ${e.writerName}`:e.writerClassification===`human`?`human: ${e.writerName}`:`${e.writerClassification}: ${e.writerName}`;return`${e.hash.slice(0,7)} [${t}] ${e.message}`});t.push(`OK edits: ${n.join(` · `)}`)}if(e.projectHistory&&e.projectHistory.length>0){let n=e.projectHistory.map(e=>`${e.hash.slice(0,7)} ${e.authorName}: ${e.subject}`);t.push(`commits: ${n.join(` · `)}`)}return`- ${t.join(` — `)}`}function Un(e,t,n){let r=null;for(let t=e.length-1;t>=0;t--){let n=e[t],i=n.command;if(i===`ls`||i===`cat`){r=n;break}if((i===`head`||i===`tail`)&&q(K(n)).length>0){r=n;break}}if(!r)return``;let i=q(K(r));if(r.command===`ls`){let e=i[i.length-1];if(!e||e===`.`)return``;let n=e.replace(/\/+/g,`/`);return n.startsWith(`./`)&&(n=n.slice(2)),n.endsWith(`/`)&&(n=n.slice(0,-1)),!n||!t.has(n)?``:`${n}/:\n`}let a=i.filter(e=>/\.(md|mdx)$/.test(e)&&n.has(e));return a.length===1?`==> ${a[0]} <==\n`:``}function Wn(e){if(e.length===0)return``;let t=[``,`### Referenced files`,``];for(let n of e)t.push(Bn(n)?Vn(n):Hn(n));return t.join(`
|
|
166
|
+
`)}async function Gn(e,t){let n=[],r=[];return await Promise.all(t.map(async t=>{try{let i=await be(j(e,t));i.isDirectory()?r.push(t):i.isFile()&&n.push(t)}catch{/\.(md|mdx)$/i.test(t)&&n.push(t)}})),{files:n,dirs:r}}function Y(e,t){return F(t,{enrichedPaths:[],error:{category:e,message:t}},!0)}function Kn(e,t){return e.map(e=>{let n=t(bt(e.path));return{...e,previewUrl:n?.url??null,...n?{previewUrlSource:n.source}:{}}})}async function qn(e,t){let n=await t.resolveCwd(e.cwd),r=await L(t.serverUrl),i=an(e.command);if(`error`in i)return Y(i.error.category,i.error.message);let a=Gt(i.stages),o=Kt(a),s=await Bt(n),c=Ft(n),l=``,u=``;try{let e=await It(c,o);l=e.stdout,u=e.stderr}catch(e){return e instanceof Pt?Y(`output_overflow`,`Output exceeded 16 MB buffer. Narrow the command (e.g., add a more specific grep pattern, use head, restrict the path).`):Y(`shell_construct_blocked`,`exec failed: ${e instanceof Error?e.message:String(e)}`)}let d=await Bt(n),f=Vt(s.snapshot,d.snapshot);if(f.changed.length>0)return Y(`security_invariant_violation`,`Security invariant violated: file(s) in the content directory were modified during a read-only exec call: ${f.changed.join(`, `)}. This indicates a parser bug; the command has been logged.`);let p=In(l),m=Mt(l,a),{files:h,dirs:g}=await Gn(n,m),_=a.length===1&&a[0].command===`cat`&&h.length===1,v=t.config.folders,y=await Promise.all(h.map(e=>An(e,{projectDir:n,serverUrl:r,folderRules:v},{includeRichFields:_}).catch(()=>({path:e,tags:[],backlinkCount:null,backlinks:null,forwardLinkCount:null,forwardLinks:null,history:null,historySource:null,projectHistory:null,projectHistorySource:null})))),b=await Promise.all(g.map(e=>Mn(e,{projectDir:n,folderRules:v}).catch(()=>({path:e,type:`directory`,directMdCount:0,recursiveMdCount:0,childDirCount:0,truncated:!1}))));if(!_&&r&&y.length>0){let e=await Dn(r,y.map(e=>wn(e.path))).catch(()=>null);if(e)for(let t of y){let n=e.get(wn(t.path));typeof n==`number`&&(t.backlinkCount=n)}}let x=new Map(y.map(e=>[e.path,e])),S=new Map(b.map(e=>[e.path,e])),C=[];for(let e of m){let t=x.get(e);if(t){C.push(t);continue}let n=S.get(e);n&&C.push(n)}let w=Ln(a),T=[];w.length>0&&T.push(`File${w.length>1?`s`:``} ${w.join(`, `)} appear${w.length===1?`s`:``} to be binary (image/PDF/etc.) — exec returns text only (NG8). For binary retrieval, use native Read.`);let E=zn(a,l);E&&T.push(E.banner),u&&T.push(`stderr: ${u.trim()}`);let D=T.length>0?`${T.join(`
|
|
167
|
+
`)}\n\n`:``,O=Un(a,S,x)+p.text,ee=`${D}${O}${Wn(C)}`,k,A;if(t.config){let{resolve:e,ui:r}=await H({config:t.config,resolveCwd:async()=>n});k=Kn(C,e),A=r}else k=C.map(e=>({...e,previewUrl:null}));return F(ee,{enrichedPaths:k,stdout:O,stdoutTruncated:p.truncated,...A?{ui:A}:{},...T.length>0?{warnings:T}:{}})}function Jn(e,t){e.tool(`exec`,Fn,{command:N.string().describe(`Read-only bash command (allowlist: cat, ls, grep, find, head, tail, wc, sort, uniq, cut; pipes OK)`),cwd:N.string().optional().describe(`Absolute host path to run the command from. Defaults to the MCP client's first advertised root, then the server startup cwd.`)},async e=>{try{return await qn(e,t)}catch(e){return Y(`shell_construct_blocked`,`exec handler error: ${e instanceof Error?e.message:String(e)}`)}})}const Yn=[`[Requires: Hocuspocus server] Find all pages that link to a given page.`,`Returns source page names, resolved titles, and context snippets as JSON.`,``,`**Parameters:**`,'- `docName` — Target page docName, typically without extension (for example, "articles/project-alpha"). A trailing `.md` or `.mdx` is stripped automatically.'].join(`
|
|
168
|
+
`);function Xn(e,t){e.tool(`get_backlinks`,Yn,{docName:N.string().describe(`Target page docName`)},async e=>{let n=await L(t.serverUrl);if(!n)return P(I,!0);let r=R(e.docName);if(!r.ok)return P(r.error,!0);let i=await z(n,`/api/backlinks?docName=${encodeURIComponent(r.docName)}`);if(!i.ok)return P(`Error: ${i.error}`,!0);let{ok:a,...o}=i,s=o,{resolve:c,ui:l}=await H(t),u=(s.backlinks??[]).map(e=>{let t=typeof e.source==`string`?e.source:null,n=t?c(t):null;return{...e,previewUrl:n?.url??null,...n?{previewUrlSource:n.source}:{}}}),d={...s,backlinks:u,ui:l};return F(JSON.stringify(d,null,2),d)})}const Zn=[`[Requires: Hocuspocus server] Find missing internal page targets across the corpus.`,`Returns grouped dead links keyed by missing target with source-doc rows as JSON.`,``,`**Parameters:**`,"- `sourceDocNames` (optional) — Referring source docs to narrow the audit with OR semantics"].join(`
|
|
169
|
+
`);function Qn(e,t){e.tool(`get_dead_links`,Zn,{sourceDocNames:N.array(N.string()).optional().describe(`Referring source docs to narrow the audit with OR semantics`)},async e=>{let n=await L(t.serverUrl);if(!n)return P(I,!0);let r=new URLSearchParams;for(let t of e.sourceDocNames??[]){let e=R(t);if(!e.ok)return P(e.error,!0);r.append(`sourceDocName`,e.docName)}let i=r.toString(),a=await z(n,`/api/dead-links${i?`?${i}`:``}`);if(!a.ok)return P(`Error: ${a.error}`,!0);let{ok:o,...s}=a,c=s,{resolve:l,ui:u}=await H(t),d=(c.deadLinks??[]).map(e=>{let t=typeof e.target==`string`?e.target:null,n=t?l(t):null,r=(e.sources??[]).map(e=>{let t=typeof e.source==`string`?e.source:null,n=t?l(t):null;return{...e,previewUrl:n?.url??null,...n?{previewUrlSource:n.source}:{}}});return{...e,sources:r,previewUrl:n?.url??null,...n?{previewUrlSource:n.source}:{}}}),f={...c,deadLinks:d,ui:u};return F(JSON.stringify(f,null,2),f)})}const $n=[`[Requires: Hocuspocus server] Find all pages that a given page links to.`,`Returns forward links as JSON.`,``,`**Parameters:**`,"- `docName` — Source page docName, typically without extension. A trailing `.md` or `.mdx` is stripped automatically."].join(`
|
|
170
|
+
`);function er(e,t){e.tool(`get_forward_links`,$n,{docName:N.string().describe(`Source page docName`)},async e=>{let n=await L(t.serverUrl);if(!n)return P(I,!0);let r=R(e.docName);if(!r.ok)return P(r.error,!0);let i=await z(n,`/api/forward-links?docName=${encodeURIComponent(r.docName)}`);if(!i.ok)return P(`Error: ${i.error}`,!0);let{ok:a,...o}=i,s=o,{resolve:c,ui:l}=await H(t),u=(s.forwardLinks??[]).map(e=>{let t=e.kind===`doc`&&typeof e.docName==`string`?e.docName:null,n=t?c(t):null;return{...e,previewUrl:n?.url??null,...n?{previewUrlSource:n.source}:{}}}),d={...s,forwardLinks:u,ui:l};return F(JSON.stringify(d,null,2),d)})}const tr=[`[Requires: Hocuspocus server] List version history for a document.`,`Returns timeline entries from the shadow repo, sorted by timestamp descending.`,"Each entry includes a commit SHA that can be passed to `rollback_to_version`.",``,`**Parameters:**`,"- `docName` — Document name to query history for, typically without extension. A trailing `.md` or `.mdx` is stripped automatically.","- `branch` (optional) — Branch name (default: current branch)","- `limit` (optional) — Maximum entries to return (default 50, max 200)","- `offset` (optional) — Number of entries to skip for pagination (default 0)",'- `type` (optional) — Filter by entry type: "checkpoint", "upstream", or "wip"',"- `author` (optional) — Filter to entries by this author name or email","- `excludeAuthor` (optional) — Exclude entries by this author name or email"].join(`
|
|
171
|
+
`);function nr(e,t){e.tool(`get_history`,tr,{docName:N.string().describe(`Document name to query history for`),branch:N.string().optional().describe(`Branch name (default: current branch)`),limit:N.number().int().min(1).max(200).optional().describe(`Maximum entries to return (default 50, max 200)`),offset:N.number().int().min(0).optional().describe(`Number of entries to skip for pagination (default 0)`),type:N.enum([`checkpoint`,`upstream`,`wip`]).optional().describe(`Filter by entry type`),author:N.string().optional().describe(`Filter to entries by this author name or email`),excludeAuthor:N.string().optional().describe(`Exclude entries by this author name or email`)},async e=>{let n=await L(t.serverUrl);if(!n)return P(I,!0);let r=R(e.docName);if(!r.ok)return P(r.error,!0);let i=new URLSearchParams;i.set(`docName`,r.docName),e.branch&&i.set(`branch`,e.branch),e.limit!=null&&i.set(`limit`,String(e.limit)),e.offset!=null&&i.set(`offset`,String(e.offset)),e.type&&i.set(`type`,e.type),e.author&&i.set(`author`,e.author),e.excludeAuthor&&i.set(`excludeAuthor`,e.excludeAuthor);let a=await z(n,`/api/history?${i.toString()}`);if(!a.ok)return P(`Error: ${a.error}`,!0);let{ok:o,...s}=a,c=await V(r.docName,{config:t.config,resolveCwd:t.resolveCwd});return F(JSON.stringify(s,null,2),{...s,previewUrl:c?.url??null,...c?{previewUrlSource:c.source}:{}})})}const rr=[`[Requires: Hocuspocus server] Find the most-linked pages in the knowledge graph.`,`Returns hub pages ordered by inbound link count as JSON.`,``,`**Parameters:**`,"- `limit` (optional) — Maximum number of hubs to return (default 20)"].join(`
|
|
172
|
+
`);function ir(e,t){e.tool(`get_hubs`,rr,{limit:N.number().int().positive().optional().describe(`Maximum number of hubs to return`)},async e=>{let n=await L(t.serverUrl);if(!n)return P(I,!0);let r=await z(n,`/api/hubs${e.limit?`?limit=${encodeURIComponent(String(e.limit))}`:``}`);if(!r.ok)return P(`Error: ${r.error}`,!0);let{ok:i,...a}=r,o=a,{resolve:s,ui:c}=await H(t),l=(o.hubs??[]).map(e=>{let t=typeof e.docName==`string`?e.docName:null,n=t?s(t):null;return{...e,previewUrl:n?.url??null,...n?{previewUrlSource:n.source}:{}}}),u={...o,hubs:l,ui:c};return F(JSON.stringify(u,null,2),u)})}const ar=[`[Requires: Hocuspocus server] Find disconnected pages in the knowledge graph.`,`Returns orphaned pages as JSON.`,``,`**Parameters:**`,"- `mode` (optional) — Orphan lens: `incoming`, `outgoing`, or `both` (default `both`)"].join(`
|
|
173
|
+
`);function or(t,n){t.tool(`get_orphans`,ar,{mode:N.enum(e).optional().describe(`Filter which type of graph disconnection to surface`)},async e=>{let t=await L(n.serverUrl);if(!t)return P(I,!0);let r=await z(t,`/api/orphans${e.mode?`?mode=${encodeURIComponent(e.mode)}`:``}`);if(!r.ok)return P(`Error: ${r.error}`,!0);let{ok:i,...a}=r,o=a,{resolve:s,ui:c}=await H(n),l=(o.orphans??[]).map(e=>{let t=typeof e.docName==`string`?e.docName:null,n=t?s(t):null;return{...e,previewUrl:n?.url??null,...n?{previewUrlSource:n.source}:{}}}),u={...o,orphans:l,ui:c};return F(JSON.stringify(u,null,2),u)})}const sr=["Return a browser URL for the given wiki docName. Agents should call this IMMEDIATELY BEFORE `write_document` / `edit_document` so they can navigate the preview browser to the doc first and watch the CRDT edit land live.",``,`**Parameters:**`,"- `docName` — Wiki doc name, typically without extension.",``,"Returns `{ previewUrl, previewUrlSource }` (source: `env` / `lock` / `config`). When no source is configured, returns `{ previewUrl: null }` and the agent may proceed without navigation."].join(`
|
|
174
|
+
`);async function cr(e,t){let n=R(e.docName);if(!n.ok)return{ok:!1,error:n.error};let r=n.docName,i=await t.resolveCwd(),s=o(t.config,i),c;try{c=f({projectDir:i,contentDir:s,includePatterns:t.config.content.include,excludePatterns:t.config.content.exclude})}catch(e){return{ok:!1,error:`Cannot evaluate content filter: ${e instanceof Error?e.message:String(e)}`}}if(![`${r}.md`,`${r}.mdx`].some(e=>!c.isExcluded(e)))return{ok:!1,error:`Error: docName "${r}" is not inside content.include globs (${t.config.content.include.join(`, `)}). This tool only returns URLs for docs that match those globs.`};let l=a(s),u=U(r,{config:t.config,lockDir:l});return u?{ok:!0,result:{previewUrl:u.url,previewUrlSource:u.source},text:`Preview URL for "${r}" (source: ${u.source}):\n${u.url}`}:{ok:!0,result:{previewUrl:null},text:`No preview URL resolvable for "${r}". The server is likely not running yet. Start it with \`open-knowledge start\` (or \`preview_start\`), then **call \`get_preview_url\` again** — the server writes a lock file that this tool reads to resolve the URL. NEVER guess or manually construct the preview URL. Alternatively, set \`OPEN_KNOWLEDGE_PREVIEW_BASE_URL\` or add \`preview.baseUrl\` to .open-knowledge/config.yml.`}}function lr(e,t){e.tool(`get_preview_url`,sr,{docName:N.string().min(1)},async e=>{let n=await cr(e,t);return n.ok?F(n.text,n.result):P(n.error,!0)})}function ur(e,n){return`Capture this external source into the project knowledge base as raw reference material. **Raw preservation only** — no summary, no analysis, no interpretation. Summarizing is the job of the \`research\` tool later.
|
|
175
|
+
|
|
176
|
+
Source: ${e}
|
|
177
|
+
|
|
178
|
+
The content directory for this project is **\`${n}\`** (from \`${t}/config.yml\`).
|
|
179
|
+
|
|
180
|
+
## Step 1: Fetch the content
|
|
181
|
+
|
|
182
|
+
- **URL** → use your available web fetch tool.
|
|
183
|
+
- **Local file or attachment** → use your native file read tool.
|
|
184
|
+
|
|
185
|
+
If the fetch fails (login wall, 401/402/403/429, anti-scraping block), **stop and ask the user to paste the content directly**. Do not save a stub, an error page, or a login wall as "raw content" — that poisons the knowledge base.
|
|
186
|
+
|
|
187
|
+
If the fetcher returns an obvious *summary* of the page instead of the raw content (some LLM-backed fetch tools do this), note it and try a raw alternative (e.g., \`curl -sL <url>\` for text-heavy sources, or ask the user to paste). The goal is verbatim bytes.
|
|
188
|
+
|
|
189
|
+
## Step 2: Save as raw reference material
|
|
190
|
+
|
|
191
|
+
Write the content as a markdown file inside the content directory (\`${n}\`). The convention if this project adopts the three-tier lifecycle is to group raw sources together — e.g., an \`external-sources/\` subfolder under the content dir — but it's just a convention. Use whatever the project's existing docs layout calls for. If unsure, ask the user or default to a sensible top-level subfolder name.
|
|
192
|
+
|
|
193
|
+
Name the file with a kebab-case slug from the source's own title (e.g., \`karpathy-llm-wiki.md\`, \`anthropic-prompt-caching.md\`). Don't put dates in the filename — dates go in frontmatter.
|
|
194
|
+
|
|
195
|
+
Prepend this frontmatter:
|
|
196
|
+
|
|
197
|
+
\`\`\`yaml
|
|
198
|
+
---
|
|
199
|
+
title: Original title of the source
|
|
200
|
+
description: One-line summary from the source (their words, not yours)
|
|
201
|
+
source_url: https://example.com/article # for URLs
|
|
202
|
+
source_path: ./relative/path/to/file.pdf # for local files
|
|
203
|
+
date_fetched: YYYY-MM-DD
|
|
204
|
+
author: Original author if known
|
|
205
|
+
tags:
|
|
206
|
+
- relevant-topic
|
|
207
|
+
---
|
|
208
|
+
\`\`\`
|
|
209
|
+
|
|
210
|
+
## Step 3: Preserve the content faithfully
|
|
211
|
+
|
|
212
|
+
- **Keep** headings, lists, quotes, code blocks, images, citations, references
|
|
213
|
+
- **Strip** obvious boilerplate: nav menus, cookie banners, ads, footer links, "related articles" widgets
|
|
214
|
+
- **Do NOT** summarize, critique, paraphrase, or interpret. That's \`research\`'s job.
|
|
215
|
+
- **For very long sources**, consider splitting by major section with cross-references in frontmatter
|
|
216
|
+
|
|
217
|
+
## Step 4: Verify
|
|
218
|
+
|
|
219
|
+
- File exists at the chosen location under the content directory
|
|
220
|
+
- Valid frontmatter (at minimum \`title\`, \`description\`, and either \`source_url\` or \`source_path\`)
|
|
221
|
+
- \`exec("ls <dir>")\` should list the file with enrichment
|
|
222
|
+
|
|
223
|
+
## Non-goals
|
|
224
|
+
|
|
225
|
+
- **No analysis** — don't interpret, compare, or critique the source
|
|
226
|
+
- **No promotion to a canonical article** — that's the \`consolidate\` tool's job, later
|
|
227
|
+
- **No deduplication** — if the same source is ingested twice, let it happen; cleanup is a separate concern
|
|
228
|
+
|
|
229
|
+
Full convention: read \`${t}/AGENTS.md\`.`}const dr=[`Fetch an external source (URL or local file) and save raw content as reference material in the project content directory.`,`Raw preservation only — no analysis or interpretation.`,``,`**Use when:**`,`- Capturing reference material for the project knowledge base`,`- Saving a URL or document for later research`,`- Archiving an external source alongside the codebase`,`- The user shares a URL or document they want preserved`,``,`**Triggers on:**`,`- "ingest", "save this source", "capture this URL", "add to external sources"`,`- User shares a URL, article, or document to preserve in the knowledge base`,`- Research workflow needs raw sources before analysis`].join(`
|
|
230
|
+
`);function fr(e,t){e.tool(`ingest`,dr,{source:N.string().describe(`URL, file path, or identifier of the source to ingest`)},e=>F(ur(e.source,t.content.dir),{previewUrl:null}))}function pr(e){return`Initialize a project knowledge base at \`${e}\` for this repository.
|
|
231
|
+
|
|
232
|
+
The content directory for this project is **\`${e}\`** (from \`${t}/config.yml\`).
|
|
233
|
+
|
|
234
|
+
## When to use
|
|
235
|
+
|
|
236
|
+
- First time setting up a knowledge base in a repo where \`${t}/\` does not exist, or where the content directory has no articles yet
|
|
237
|
+
- When onboarding to a new codebase and you want to capture initial understanding for future agent sessions
|
|
238
|
+
|
|
239
|
+
## Steps
|
|
240
|
+
|
|
241
|
+
### 1. Verify the structure exists
|
|
242
|
+
|
|
243
|
+
If \`${t}/\` does not already exist, scaffold it from a terminal (not from within this MCP session — scaffolding is a CLI operation, not a tool call):
|
|
244
|
+
|
|
245
|
+
\`\`\`bash
|
|
246
|
+
open-knowledge init
|
|
247
|
+
# or: npx @inkeep/open-knowledge init
|
|
248
|
+
\`\`\`
|
|
249
|
+
|
|
250
|
+
That creates \`${t}/\` with \`config.yml\`, \`AGENTS.md\`, \`.gitignore\`, and wires this MCP server into \`.mcp.json\`. It does **not** scaffold content subdirectories — knowledge lives wherever \`content.dir\` points (currently \`${e}\`). After scaffolding, reconnect the MCP client so the server picks up the new config.
|
|
251
|
+
|
|
252
|
+
If you have \`Bash\` tool access, you can shell out: \`bash\` → \`npx @inkeep/open-knowledge init\`, then prompt the user to reconnect.
|
|
253
|
+
|
|
254
|
+
### 2. Read the codebase systematically
|
|
255
|
+
|
|
256
|
+
Explore the project to build understanding before writing anything:
|
|
257
|
+
|
|
258
|
+
1. **Start broad** — Read \`README.md\`, \`CLAUDE.md\` or \`AGENTS.md\`, \`package.json\` (or equivalent manifest), and any existing prose documentation
|
|
259
|
+
2. **Map the structure** — Use \`exec("ls <dir>")\` for directories under \`content.dir\` that match \`content.include\` (returns folder metadata — file counts, subdirs, most-recent md) and native \`Glob\`/\`ls\` for source code
|
|
260
|
+
3. **Read key files** — Entry points, config files, core modules, type definitions, schema files
|
|
261
|
+
4. **Check existing docs** — \`specs/\`, \`docs/\`, \`ARCHITECTURE.md\`, or any prose dirs: use \`exec\` for every \`.md\` / \`.mdx\` that matches \`content.include\` (under shipped defaults, that is essentially **all** markdown in the repo). Use native \`Read\`/\`Glob\` only for source code / non-markdown, or when MCP is unavailable
|
|
262
|
+
5. **Review recent history** — \`git log --oneline -30\` for recent decisions and direction
|
|
263
|
+
|
|
264
|
+
Don't rush this phase. The quality of articles depends on the quality of understanding.
|
|
265
|
+
|
|
266
|
+
### 3. Synthesize knowledge articles
|
|
267
|
+
|
|
268
|
+
Write articles inside the content directory (\`${e}\`). Organization is up to the project — no enforced structure:
|
|
269
|
+
|
|
270
|
+
- If the project already has a docs layout (\`docs/\`, \`guides/\`, topic-grouped subfolders), follow it
|
|
271
|
+
- If starting fresh, group by topic (e.g., \`architecture/\`, \`auth/\`, \`data-model/\`) — create subfolders as needed; no scaffolded directories exist by default
|
|
272
|
+
- **One topic per article** — keep articles focused (e.g., "Auth Architecture", not "Everything About The Backend")
|
|
273
|
+
- **Add proper frontmatter**:
|
|
274
|
+
|
|
275
|
+
\`\`\`yaml
|
|
276
|
+
---
|
|
277
|
+
title: Article Title
|
|
278
|
+
description: One-line summary
|
|
279
|
+
tags:
|
|
280
|
+
- relevant
|
|
281
|
+
- tags
|
|
282
|
+
---
|
|
283
|
+
\`\`\`
|
|
284
|
+
|
|
285
|
+
- **Write for future agents** — Explain the *why* and *how things connect*, not just *what exists*. Source code already says what exists.
|
|
286
|
+
- **Keep articles concise** — 100-300 lines is a good target. Split larger topics into multiple articles.
|
|
287
|
+
- **Link to source code** by file path when helpful, but don't duplicate code into articles.
|
|
288
|
+
|
|
289
|
+
### 4. Link aggressively
|
|
290
|
+
|
|
291
|
+
This is the single highest-leverage step for a new knowledge base. Articles that don't link each other are isolated documents; articles that cross-link form a navigable graph.
|
|
292
|
+
|
|
293
|
+
- **Every noun-phrase that names another article is a \`[[Page Name]]\` link.** Write links inline as you draft — don't save linking for a second pass. Prefer \`[[Page]]\` over Markdown \`[text](./page.md)\` since only wiki-links participate in the backlinks index.
|
|
294
|
+
- **Redlinks are fine — write them eagerly.** If you're drafting "Auth Architecture" and mention "session tokens", write \`[[Session Tokens]]\` even if that page doesn't exist yet. The redlink is a to-do list for the next pass.
|
|
295
|
+
- **Build hub articles.** Pick 2–3 broad topics (e.g., "Architecture Overview", "Data Model") and have them link out to the specific articles below them. Hubs are what agents discover first — their outbound links are how everything else becomes findable.
|
|
296
|
+
- **Cross-link siblings.** In each subfolder, 2–3 closely-related articles should link each other under a "See also" section or inline.
|
|
297
|
+
- **After writing a batch of articles, verify link density:** \`exec("cat <article>.md")\` on a sample and confirm the rendered output shows a healthy backlinks list. An article with zero backlinks is an island — link back to it from somewhere.
|
|
298
|
+
|
|
299
|
+
### 5. Suggested starting topics
|
|
300
|
+
|
|
301
|
+
Depending on the project, consider articles covering:
|
|
302
|
+
|
|
303
|
+
- **Architecture overview** — High-level system design, key components, how they connect
|
|
304
|
+
- **Data model** — Core entities, relationships, database schema
|
|
305
|
+
- **API surface** — Endpoints, protocols, authentication model
|
|
306
|
+
- **Deploy & infrastructure** — How to deploy, CI/CD, environments
|
|
307
|
+
- **Development workflow** — How to run locally, test conventions, contribution flow
|
|
308
|
+
- **Key decisions** — Architecture decisions and their rationale (the "why")
|
|
309
|
+
- **Domain concepts** — Business domain terms and their meaning in code
|
|
310
|
+
|
|
311
|
+
### 6. Verify
|
|
312
|
+
|
|
313
|
+
- \`exec("ls ${e}")\` shows the articles you wrote, each with title/description/tags enrichment
|
|
314
|
+
- \`exec("grep -rn <common-codebase-term> ${e}")\` finds the expected articles
|
|
315
|
+
- \`exec("cat <article>.md")\` on a sample shows the article plus its backlinks section — if the backlinks list is empty, go back to step 4 and link from somewhere
|
|
316
|
+
- Every article has frontmatter with at minimum \`title\` and \`description\`
|
|
317
|
+
|
|
318
|
+
## Non-goals
|
|
319
|
+
|
|
320
|
+
- **Don't produce a file-by-file code index** — the agent reads source code directly when needed
|
|
321
|
+
- **Don't copy source code into articles** — link by path
|
|
322
|
+
- **Don't write articles for things that change often** (dependency versions, file counts); focus on stable understanding
|
|
323
|
+
- **Don't create scaffolded subfolders you won't fill** — empty \`articles/\`/\`research/\`/\`external-sources/\` folders are clutter; organize as you actually need
|
|
324
|
+
|
|
325
|
+
Full convention: read \`${t}/AGENTS.md\`.`}const mr=[`Bootstrap the project knowledge base by reading the codebase and writing initial knowledge articles grouped by topic.`,``,`**Use when:**`,`- Setting up a knowledge base for the first time in a repo`,`- Onboarding to a new codebase and capturing initial understanding`,`- The content directory is empty or sparse`,``,`**Triggers on:**`,`- "init content", "bootstrap knowledge base", "populate articles", "set up project knowledge"`,`- User asks to document or catalog the codebase`].join(`
|
|
326
|
+
`);function hr(e,t){e.tool(`init-content`,mr,async()=>{let e=pr(t.config.content.dir),n=await t.resolveCwd(),r=a(o(t.config,n));return F(e,{ui:yt({config:t.config,lockDir:r})})})}const gr=[`[Requires: Hocuspocus server] List available documents from the Hocuspocus server.`,`Returns document names, optionally filtered by directory.`,``,`**Parameters:**`,"- `dir` (optional) — Filter to documents in this directory"].join(`
|
|
327
|
+
`);function _r(e,t){e.tool(`list_documents`,gr,{dir:N.string().optional().describe(`Optional directory to filter documents`)},async e=>{let n=await L(t.serverUrl);if(!n)return P(I,!0);let r=await z(n,`/api/documents${e.dir?`?dir=${encodeURIComponent(e.dir)}`:``}`);if(!r.ok)return P(`Error: ${r.error}`,!0);let{ok:i,...a}=r,o=a,{resolve:s,ui:c}=await H(t),l=(o.documents??[]).map(e=>{let t=typeof e.docName==`string`?e.docName:null,n=t?s(t):null;return{...e,previewUrl:n?.url??null,...n?{previewUrlSource:n.source}:{}}}),u={...o,documents:l,ui:c};return F(JSON.stringify(u,null,2),u)})}const vr=[`Read a wiki file with enriched context: contents + frontmatter metadata + recent shadow-repo activity (agent vs human attribution) + backlink/forward-link context.`,``,`**Use when:**`,`- Loading an article for context`,`- Understanding who changed a file recently and whether it was an agent or human`,`- Seeing how this page links out and what links back to it`,``,"Prefer this over your native `Read` for wiki files — one call returns what otherwise takes 3-4.",``,`**Parameters:**`,"- `path` — Project-root-relative path to the file, including extension (e.g. `articles/auth/sso.md`). To pass this document to `edit_document` / `write_document` / `get_backlinks`, strip the extension (they take extension-less `docName`).","- `since` (reserved) — Reserved for shadow-log since-filter; currently unused."].join(`
|
|
328
|
+
`);function yr(e){if(!e||e.length===0)return``;let t=[``,`### Recent activity (OK edits)`,``];for(let n of e){let e=n.writerClassification===`agent`?`agent: ${n.writerName}`:n.writerClassification===`human`?`human: ${n.writerName}`:`${n.writerClassification}: ${n.writerName}`,r=n.hash.slice(0,7);t.push(`- ${r} ${n.date} [${e}] ${n.message}`)}return t.join(`
|
|
329
|
+
`)}function br(e){if(!e||e.length===0)return``;let t=[``,`### Commit history (project git)`,``];for(let n of e){let e=n.hash.slice(0,7);t.push(`- ${e} ${n.date} ${n.authorName} — ${n.subject}`)}return t.join(`
|
|
330
|
+
`)}function xr(e){if(!e||e.length===0)return``;let t=[``,`### Backlinks (${e.length})`,``];for(let n of e){let e=n.title?` — "${n.title}"`:``,r=n.snippet?` — "${n.snippet}"`:``;t.push(`- ${n.source}${e}${r}`)}return t.join(`
|
|
331
|
+
`)}function Sr(e){if(!e||e.length===0)return``;let t=[``,`### Forward links (${e.length})`,``];for(let n of e){if(n.kind===`external`){let e=n.title?` — "${n.title}"`:``,r=n.snippet?` — "${n.snippet}"`:``;t.push(`- ${n.url}${e}${r}`);continue}let e=n.title?` — "${n.title}"`:``,r=n.snippet?` — "${n.snippet}"`:``;t.push(`- ${n.docName}${e}${r}`)}return t.join(`
|
|
332
|
+
`)}function Cr(e){return e.replace(/^\.\//,``).replace(/^\/+/,``)}function wr(e){return e.replace(/\.(md|mdx)$/i,``)}async function Tr(e,t){let n=await t.resolveCwd(e.cwd),r=Cr(e.path),i=j(n,r),a=t.config.mcp.tools.read_document.historyDepth,o=await L(t.serverUrl),[s,c]=await Promise.all([ve(i,`utf-8`),An(r,{projectDir:n,serverUrl:o,historyDepth:a,folderRules:t.config.folders},{includeRichFields:!0})]),l=r.split(`/`).pop()?.replace(/\.md$/,``).replace(/\.mdx$/,``)??r,u=c.title??l,d=c.description??``,f=c.tags,p=[];p.push(`## ${u}`),d&&p.push(`**Description:** ${d}`),f.length>0&&p.push(`**Tags:** ${f.join(`, `)}`),p.push(`**Path:** ${r}`);let m=yr(c.history);m&&p.push(m);let h=br(c.projectHistory);h&&p.push(h);let g=xr(c.backlinks);g&&p.push(g);let _=Sr(c.forwardLinks);return _&&p.push(_),p.push(``,`### Content`,``,s),p.join(`
|
|
333
|
+
`)}function Er(e,t){e.tool(`read_document`,vr,{path:N.string().describe(`Project-root-relative path to the file`),since:N.string().optional().describe(`Reserved; currently unused (§15 Future Work)`),cwd:N.string().optional().describe("Absolute host path to resolve `path` against. Defaults to the MCP client's first advertised root.")},async e=>{try{let n=await Tr(e,t),r=await V(wr(Cr(e.path)),{config:t.config,resolveCwd:t.resolveCwd});return r?F(n,{previewUrl:r.url,previewUrlSource:r.source}):F(n,{previewUrl:null})}catch(e){return P(`Error: ${e instanceof Error?e.message:String(e)}`,!0)}})}const Dr=["[Requires: Hocuspocus server] Rename a document through the managed rename flow at `POST /api/rename`.",`Renames the target document and rewrites inbound wiki-links plus supported internal inline Markdown links in affected docs.`,``,`**Parameters:**`,"- `docName` — Current document name, typically without extension. A trailing `.md` or `.mdx` is stripped automatically.","- `newDocName` — New document name, typically without extension. A trailing `.md` or `.mdx` is stripped automatically."].join(`
|
|
334
|
+
`);function Or(e){return Array.isArray(e)?e.flatMap(e=>{if(!e||typeof e!=`object`)return[];let{fromDocName:t,toDocName:n}=e;return typeof t==`string`&&typeof n==`string`?[{fromDocName:t,toDocName:n}]:[]}):[]}function kr(e){return Array.isArray(e)?e.flatMap(e=>{if(!e||typeof e!=`object`)return[];let{docName:t,rewrites:n}=e;return typeof t==`string`&&typeof n==`number`?[{docName:t,rewrites:n}]:[]}):[]}function Ar(e,t,n=`${t}s`){return e===1?t:n}function jr(e,t){e.tool(`rename_document`,Dr,{docName:N.string().describe(`Current document name`),newDocName:N.string().describe(`New document name`)},async e=>{let n=await L(t.serverUrl);if(!n)return P(I,!0);let r=R(e.docName);if(!r.ok)return P(r.error,!0);let i=R(e.newDocName);if(!i.ok)return P(i.error,!0);let a=await B(n,`/api/rename`,{docName:r.docName,newDocName:i.docName});if(!a.ok){let e=typeof a.error==`string`?a.error:`Rename failed`,t={ok:!1,error:e};return F(`Error: ${e}`,t,!0)}let o=Or(a.renamed),s=kr(a.rewrittenDocs),c=o.map(({fromDocName:e,toDocName:t})=>`${e} -> ${t}`).join(`, `)||`${r.docName} -> ${i.docName}`,l=s.length===0?`No inbound links required updates.`:`Rewrote ${s.length} ${Ar(s.length,`document`)}.`,u={config:t.config,resolveCwd:t.resolveCwd},d=await V(i.docName,u),f=await V(r.docName,u),p={ok:!0,renamed:o,rewrittenDocs:s,previewUrl:d?.url??null,...d?{previewUrlSource:d.source}:{},...f?{previousPreviewUrl:f.url}:{}};return F(`Renamed ${c}. ${l}`,p)})}function Mr(e,n){return`Research this topic and write provisional findings inside the project content directory. Research is **provisional, not canonical** — it captures findings, trade-offs, and open questions at a point in time. Promoting to canonical articles is a deliberate later step (via the \`consolidate\` tool).
|
|
335
|
+
|
|
336
|
+
Topic: ${e}
|
|
337
|
+
|
|
338
|
+
The content directory for this project is **\`${n}\`** (from \`${t}/config.yml\`).
|
|
339
|
+
|
|
340
|
+
## When to use this workflow
|
|
341
|
+
|
|
342
|
+
- A developer asks you to research a topic (e.g., "research CRDT alternatives for our editor")
|
|
343
|
+
- You're exploring a decision space before committing to an approach
|
|
344
|
+
- Spec conversations and exploratory work that isn't ready to be canonical yet
|
|
345
|
+
- You need to synthesize multiple sources into a structured analysis
|
|
346
|
+
|
|
347
|
+
## Principle: provisional, not canonical
|
|
348
|
+
|
|
349
|
+
Research articles are **provisional**. They capture what you found at a point in time. They are not the source of truth. When decisions solidify, research gets promoted to canonical articles via the \`consolidate\` tool (or manually). Until then, research is a place where uncertainty lives.
|
|
350
|
+
|
|
351
|
+
## Steps
|
|
352
|
+
|
|
353
|
+
### 1. Scope the research
|
|
354
|
+
|
|
355
|
+
Understand what the developer is actually asking:
|
|
356
|
+
|
|
357
|
+
- **What specific question needs answering?** If the prompt was vague, narrow it before gathering sources.
|
|
358
|
+
- **What's the decision this research will inform?** Research without a decision context tends to meander.
|
|
359
|
+
- **What's already known?** Use \`exec("grep -rn <topic-keyword> <content-dir>")\` to find prior work — grep results come with per-file enrichment (title, description, tags) so you can judge relevance without opening each. If prior work exists, use \`exec("cat <path>")\` to load it with full rich context (frontmatter + shadow-repo activity + project git history + backlinks) — you may be iterating on an existing research doc rather than creating a new one.
|
|
360
|
+
|
|
361
|
+
If the topic is itself a URL, treat that URL as the anchor source and widen from there. If it's a question, figure out 3–8 sources that could plausibly inform it.
|
|
362
|
+
|
|
363
|
+
### 2. Gather sources via \`ingest\`
|
|
364
|
+
|
|
365
|
+
Invoke the \`ingest\` tool for each relevant URL, paper, or document. **Typical research pulls 3–8 sources.** Too few and the synthesis is thin; too many and you'll be reading for the rest of the session.
|
|
366
|
+
|
|
367
|
+
**Don't skip \`ingest\`.** Raw sources must be preserved before analysis — it separates capture from interpretation and makes the research reproducible. A research article without preserved sources is just opinion; a research article with preserved sources is a trail someone else can follow.
|
|
368
|
+
|
|
369
|
+
If a fetch fails for a source you specifically need, stop and ask the user to paste it — don't silently drop it.
|
|
370
|
+
|
|
371
|
+
### 3. Read and analyze
|
|
372
|
+
|
|
373
|
+
Read each ingested source carefully. Also read:
|
|
374
|
+
|
|
375
|
+
- **Existing canonical articles** on the topic — use \`exec("cat <path>")\` (rich enrichment: frontmatter + shadow-repo activity + project git history + backlinks in one call)
|
|
376
|
+
- **Prior research** on adjacent topics — same: \`exec("cat <path>")\` for Open Knowledge markdown
|
|
377
|
+
- **Relevant source code** for projects where research is grounded in the codebase (read entry points, core modules, and any specs that touch the topic) — native \`Read\` is fine for \`.ts\` / \`.js\` / etc.; use \`exec\` for \`.md\` / \`.mdx\` under \`content.include\`
|
|
378
|
+
- **Project context** — project-root docs, \`specs/\`, \`reports/\`, or wherever the project organizes design material
|
|
379
|
+
|
|
380
|
+
Take notes on:
|
|
381
|
+
|
|
382
|
+
- **Key claims** and their evidence
|
|
383
|
+
- **Trade-offs** between options
|
|
384
|
+
- **Contradictions** between sources
|
|
385
|
+
- **Unknowns** and open questions
|
|
386
|
+
- **Relevance** to the specific decision at hand
|
|
387
|
+
|
|
388
|
+
### 4. Write the research article
|
|
389
|
+
|
|
390
|
+
Save the file as a markdown document inside the content directory. The path convention depends on the project:
|
|
391
|
+
|
|
392
|
+
- If the project has adopted the three-tier lifecycle (external-sources → research → articles), save under a \`research/\` folder relative to the content dir (\`<content-dir>/research/<slug>.md\`)
|
|
393
|
+
- If the project has an existing docs/reports/specs layout, save alongside that layout in a location that matches the project's conventions
|
|
394
|
+
- When a research topic is large enough to warrant a subfolder, create one (\`research/<topic>/<subtopic>.md\`)
|
|
395
|
+
|
|
396
|
+
Use descriptive kebab-case filenames: \`crdt-alternatives-for-editor.md\`, \`llm-maintained-wikis-pattern.md\`.
|
|
397
|
+
|
|
398
|
+
Frontmatter:
|
|
399
|
+
|
|
400
|
+
\`\`\`yaml
|
|
401
|
+
---
|
|
402
|
+
title: Descriptive title
|
|
403
|
+
description: One-line summary of the research question
|
|
404
|
+
status: provisional
|
|
405
|
+
date: YYYY-MM-DD
|
|
406
|
+
tags:
|
|
407
|
+
- research
|
|
408
|
+
- topic-tag
|
|
409
|
+
sources:
|
|
410
|
+
- <path-to-ingested-source-1>.md
|
|
411
|
+
- <path-to-ingested-source-2>.md
|
|
412
|
+
---
|
|
413
|
+
\`\`\`
|
|
414
|
+
|
|
415
|
+
Structure:
|
|
416
|
+
|
|
417
|
+
\`\`\`markdown
|
|
418
|
+
## Question
|
|
419
|
+
|
|
420
|
+
[What specific question is this research answering? Be precise.]
|
|
421
|
+
|
|
422
|
+
## Context
|
|
423
|
+
|
|
424
|
+
[Why does this matter? What decision does it inform? Who is the reader?]
|
|
425
|
+
|
|
426
|
+
## Findings
|
|
427
|
+
|
|
428
|
+
[Main findings organized by theme, option, or criterion. Cite sources by path.]
|
|
429
|
+
|
|
430
|
+
### Option A / Theme 1
|
|
431
|
+
|
|
432
|
+
- Pros
|
|
433
|
+
- Cons
|
|
434
|
+
- Evidence (with source links)
|
|
435
|
+
|
|
436
|
+
### Option B / Theme 2
|
|
437
|
+
|
|
438
|
+
...
|
|
439
|
+
|
|
440
|
+
## Trade-offs
|
|
441
|
+
|
|
442
|
+
[What you gain vs. lose with each option. A comparison table often helps.]
|
|
443
|
+
|
|
444
|
+
## Open questions
|
|
445
|
+
|
|
446
|
+
[What you still don't know — candidates for further research, prototyping, or decisions that need human judgment.]
|
|
447
|
+
|
|
448
|
+
## Tentative recommendation
|
|
449
|
+
|
|
450
|
+
[Your best guess, clearly marked as tentative. Explain the reasoning so a future reader can re-evaluate when new information arrives.]
|
|
451
|
+
\`\`\`
|
|
452
|
+
|
|
453
|
+
### 5. Link aggressively
|
|
454
|
+
|
|
455
|
+
Research articles are discovery surfaces — they should link out to **every** related document (sources, sibling research, prior canonical articles, adjacent topics). Under-linked research becomes an island that nobody finds.
|
|
456
|
+
|
|
457
|
+
- Every noun-phrase that names another document should be a \`[[Page Name]]\` link, not plain prose. Prefer \`[[Page]]\` over Markdown \`[text](./page.md)\` — only wiki-links participate in the backlinks index.
|
|
458
|
+
- Link sources inline where you cite them, not just in the \`sources:\` frontmatter list. "According to \`[[llm-agents-dust-tt]]\`..." is stronger than a bare path.
|
|
459
|
+
- Cross-link sibling research: if an adjacent topic has its own research doc, link it in "Open questions" or inline. Readers following one thread should find the others.
|
|
460
|
+
- **Redlinks are fine.** If the research surfaces a concept that needs its own page later, \`[[name it now]]\` — the redlink is a breadcrumb for future work.
|
|
461
|
+
- Update 1–2 closely-related existing pages to link back to this research (usually under "Further reading" or "See also").
|
|
462
|
+
|
|
463
|
+
### 6. Mark it provisional
|
|
464
|
+
|
|
465
|
+
- Set \`status: provisional\` in frontmatter
|
|
466
|
+
- Use language like "tentative", "initial findings", "based on current understanding"
|
|
467
|
+
- Do NOT write research articles as if they were canonical — that's misleading to future readers
|
|
468
|
+
- If you're uncertain, say so explicitly. Research is the layer where uncertainty is allowed to live.
|
|
469
|
+
|
|
470
|
+
### 7. Verify
|
|
471
|
+
|
|
472
|
+
- File exists at the chosen path under the content directory
|
|
473
|
+
- Has frontmatter with \`title\`, \`description\`, \`status: provisional\`, \`date\`, and a \`sources\` list
|
|
474
|
+
- \`exec("ls <dir>")\` should list the file with enrichment
|
|
475
|
+
- Linked source files from step 2 exist — broken source links mean something went wrong in \`ingest\`
|
|
476
|
+
|
|
477
|
+
## Non-goals
|
|
478
|
+
|
|
479
|
+
- **Don't promote to a canonical article** — that's the \`consolidate\` tool's job after the team actually decides
|
|
480
|
+
- **Don't hide uncertainty** — research is where uncertainty lives; be explicit about what you don't know
|
|
481
|
+
- **Don't skip \`ingest\`** — always capture raw sources first, then analyze
|
|
482
|
+
- **Don't overwrite existing research** — if the topic was researched before, either iterate on the existing file or create a clearly-named successor (e.g., \`crdt-alternatives-2.md\`) and mark the old one as superseded
|
|
483
|
+
|
|
484
|
+
Full convention: read \`${t}/AGENTS.md\`.`}const Nr=[`Analyze a topic by gathering sources via ingest and writing provisional findings into the project content directory.`,`Provisional, not canonical — findings live here until decisions solidify.`,``,`**Use when:**`,`- Researching a topic before committing to an approach`,`- Exploring a decision space or comparing alternatives`,`- Synthesizing multiple sources into structured analysis`,`- Spec conversations and exploratory work that is not yet canonical`,``,`**Triggers on:**`,`- "research", "investigate", "compare options for", "analyze alternatives"`,`- User asks to explore trade-offs, gather evidence, or evaluate approaches`,`- A decision needs structured analysis grounded in external sources`].join(`
|
|
485
|
+
`);function Pr(e,t){e.tool(`research`,Nr,{topic:N.string().describe(`The topic, question, or anchor URL to research`)},e=>F(Mr(e.topic,t.content.dir),{previewUrl:null}))}const Fr=[`[Requires: Hocuspocus server] Restore a document to a historical version via the CRDT layer.`,`The restore is append-only — it creates a new version with the old content,`,`preserving all history. All connected editors see the change in real-time.`,``,`**Parameters:**`,"- `docName` — Document name to restore, typically without extension. A trailing `.md` or `.mdx` is stripped automatically.","- `commitSha` — The 40-character SHA of the shadow repo commit to restore to."," Use `get_history` to find available versions."].join(`
|
|
486
|
+
`);function Ir(e,t){e.tool(`rollback_to_version`,Fr,{docName:N.string().describe(`Document name to restore`),commitSha:N.string().length(40).regex(/^[0-9a-f]+$/i).describe(`40-character commit SHA from the shadow repo timeline`)},async e=>{let n=await L(t.serverUrl);if(!n)return P(I,!0);let r=R(e.docName);if(!r.ok)return P(r.error,!0);let i=r.docName,a=await z(n,`/api/history/${e.commitSha}?docName=${encodeURIComponent(i)}`);if(!a.ok)return P(`Error: ${a.error??`Version not found`}`,!0);let o=await B(n,`/api/rollback`,{docName:i,commitSha:e.commitSha});if(!o.ok)return P(`Error: ${o.error}`,!0);let s=`Restored "${i}" to version ${e.commitSha.slice(0,8)} (${a.author}, ${a.timestamp}). The change has been applied to all connected editors.`,c=await V(i,{config:t.config,resolveCwd:t.resolveCwd});return F(s,{previewUrl:c?.url??null,...c?{previewUrlSource:c.source}:{}})})}const Lr=[`[Requires: Hocuspocus server] Save a version checkpoint of all documents.`,`Creates a checkpoint commit in the shadow repo and project repo,`,`preserving the current state of all documents. The checkpoint can later`,"be found via `get_history` and restored via `rollback_to_version`."].join(`
|
|
487
|
+
`);function Rr(e,t,n){e.tool(`save_version`,Lr,{},async()=>{let e=await L(t);if(!e)return P(I,!0);let r=n?.current,i=await B(e,`/api/save-version`,{...r?{writers:[{id:`agent-${r.connectionId}`,name:r.displayName,email:`agent-${r.connectionId}@openknowledge.local`}]}:{}});return i.ok?F(`Checkpoint saved. Checkpoint ref: ${i.checkpointRef}`,{checkpointRef:i.checkpointRef,previewUrl:null}):P(`Error: ${i.error}`,!0)})}const zr=[`Search wiki content with metadata-enriched results. Matches are grouped by file; each file is annotated with its title, description, and tags so you can judge relevance without opening it first.`,``,`**Use when:**`,`- Finding all articles mentioning a topic`,`- Locating a specific term across the wiki before deciding which file to read`,``,"Prefer this over your native `Grep` for wiki search — results include article metadata so you can skip irrelevant matches without extra reads.",``,`**Parameters:**`,"- `query` — Literal text to search for (fixed-string match, no regex)","- `case_sensitive` (optional, default false) — case-sensitive match"].join(`
|
|
488
|
+
`);function Br(e){let t=new Map;for(let n of e){let e=t.get(n.path);e?e.push(n):t.set(n.path,[n])}return[...t.entries()].map(([e,t])=>({path:e,matches:t}))}async function Vr(e,n){let r=await n.resolveCwd(e.cwd),i=n.config.mcp.tools.search.maxResults,a=n.config.content.include,o=n.config.content.exclude,s=await Rt(e.query,r,{caseInsensitive:!(e.case_sensitive??!1),include:a,exclude:[...o,`node_modules`,`.git`,`.claude`,`.changeset`,t],maxResults:i+1}),c=s.length>i,l=c?s.slice(0,i):s,{resolve:u,ui:d}=await H({config:n.config,resolveCwd:async()=>r});if(l.length===0)return{text:`No matches for "${e.query}".`,structured:{query:e.query,matchCount:0,fileCount:0,truncated:!1,results:[],ui:d}};let f=Br(l),p=new Map,m=await L(n.serverUrl),h=n.config.folders;await Promise.all(f.map(async e=>{try{let t=await An(e.path,{projectDir:r,serverUrl:m,folderRules:h});p.set(e.path,t)}catch{}}));let g=[];g.push(`## Search results for "${e.query}" (${l.length} match${l.length===1?``:`es`} in ${f.length} file${f.length===1?``:`s`})`,``);let _=[];for(let e of f){let t=p.get(e.path),n=t?.title??e.path;g.push(`### ${n} (${e.path})`),t?.tags?.length&&g.push(`Tags: ${t.tags.join(`, `)}`),t?.description&&g.push(`${t.description}`);for(let t of e.matches)g.push(`- Line ${t.line}: ${t.text}`);g.push(``);let r=bt(e.path),i=u(r);_.push({path:e.path,docName:r,title:t?.title??null,description:t?.description??null,tags:t?.tags??[],matches:e.matches.map(e=>({line:e.line,text:e.text})),previewUrl:i?.url??null,...i?{previewUrlSource:i.source}:{}})}return c&&g.push(`_${l.length} of ${s.length}+ matches shown. Raise \`mcp.tools.search.maxResults\` in config.yml to see more._`),{text:g.join(`
|
|
489
|
+
`),structured:{query:e.query,matchCount:l.length,fileCount:f.length,truncated:c,results:_,ui:d}}}function Hr(e,t){e.tool(`search`,zr,{query:N.string().describe(`Literal text to search for`),case_sensitive:N.boolean().optional().describe(`Case-sensitive search (default false)`),cwd:N.string().optional().describe(`Absolute host path to search in. Defaults to the MCP client's first advertised root.`)},async e=>{try{let{text:n,structured:r}=await Vr(e,t);return r?F(n,r):P(n)}catch(e){return P(`Error: ${e instanceof Error?e.message:String(e)}`,!0)}})}const Ur=[`[Requires: Hocuspocus server] Find missing link candidates for a target page.`,"Returns JSON with structure: `{ target: { docName, title, aliases }, mentions: [{ source, excerpt, offset }], truncated }`.","Each mention includes an `offset` you can pass to `edit_document` for precision patching.","When `truncated` is true, the scan hit its time budget before reading every admitted document.",``,`**Parameters:**`,'- `docName` — Target page docName, typically without extension (for example, "articles/project-alpha"). A trailing `.md` or `.mdx` is stripped automatically.'].join(`
|
|
490
|
+
`);function Wr(e,t){e.tool(`suggest_links`,Ur,{docName:N.string().describe(`Target page docName`)},async e=>{let n=await L(t.serverUrl);if(!n)return P(I,!0);let r=R(e.docName);if(!r.ok)return P(r.error,!0);let i=await z(n,`/api/suggest-links?docName=${encodeURIComponent(r.docName)}`);if(!i.ok)return P(`Error: ${i.error}`,!0);let{ok:a,...o}=i,s=await V(r.docName,{config:t.config,resolveCwd:t.resolveCwd});return F(JSON.stringify(o,null,2),{...o,previewUrl:s?.url??null,...s?{previewUrlSource:s.source}:{}})})}const Gr=["**IMPORTANT: Before calling this tool, you MUST first call `get_preview_url` and navigate to the returned URL in your preview browser. If `get_preview_url` returns null, start the server first (`open-knowledge start` or `preview_start`), then call `get_preview_url` again. Do NOT call this tool without the preview open. NEVER manually construct the URL.**",``,`[Requires: Hocuspocus server] Write markdown content to a document via the CRDT layer.`,`Content is applied through Hocuspocus and propagated to all connected editors in real-time.`,``,'**Link liberally.** Every noun-phrase that names another document in this knowledge base should be a `[[wiki-link]]`, not plain prose. Backlinks are the primary navigation surface — underlinked documents become islands. Redlinks (links to pages that don\'t exist yet) are fine; they signal "this should exist." Prefer `[[Page Name]]` over Markdown `[text](./page.md)` — only wiki-links participate in the backlinks index.',``,`**Parameters:**`,'- `docName` — Document name, typically without extension (e.g., "my-doc" or "notes/meeting"). A trailing `.md` or `.mdx` is stripped automatically. New documents are created as `.md` by default; to create a `.mdx` file, first place it on disk, then use this tool for edits.',"- `markdown` — Markdown content to write",'- `position` — Where to insert: "append", "prepend", or "replace"'].join(`
|
|
491
|
+
`);function Kr(e,t){e.tool(`write_document`,Gr,{docName:N.string().describe(`Document name to write to`),markdown:N.string().describe(`Markdown content to write`),position:N.enum([`append`,`prepend`,`replace`]).describe(`Where to insert the content`)},async e=>{let n=await L(t.serverUrl);if(!n)return P(I,!0);let r=R(e.docName);if(!r.ok)return P(r.error,!0);let i=t.identityRef?.current,s=await B(n,`/api/agent-write-md`,{docName:r.docName,markdown:e.markdown,position:e.position,...i?{agentId:i.connectionId,agentName:i.displayName,clientName:i.clientInfo?.name,colorSeed:i.colorSeed}:{}});if(!s.ok)return P(`Error: ${s.error}`,!0);let c=await t.resolveCwd(),l=a(o(t.config,c)),u=U(r.docName,{config:t.config,lockDir:l}),d=(typeof s.subscriberCount==`number`?s.subscriberCount:void 0)===0,f=Array.isArray(s.hints)?s.hints:void 0,p=[`Written successfully (${e.position}).`];if(u&&p.push(`Preview: ${u.url}`),d&&p.push(u?`Warning: no preview is currently attached to "${r.docName}". Open ${u.url} to watch future edits live.`:`Warning: no preview is currently attached to "${r.docName}".`),f)for(let e of f)e.message&&p.push(e.message);let m=p.join(`
|
|
492
|
+
`);if(!u&&!d&&!f)return P(m);let h={};return u&&(h.previewUrl=u.url,h.previewUrlSource=u.source),d&&(h.warning={message:`No preview attached to ${r.docName}.`,previewUrl:u?.url??null}),f&&(h.hints=f),F(m,h)})}const qr={exec:Fn,"init-content":mr,ingest:dr,research:Nr,consolidate:mt,read_document:vr,rename_document:Dr,search:zr,suggest_links:Ur,write_document:Gr,edit_document:xt,get_history:tr,save_version:Lr,rollback_to_version:Fr,list_documents:gr,get_backlinks:Yn,get_forward_links:$n,get_orphans:ar,get_hubs:rr,get_dead_links:Zn,get_preview_url:sr};function Jr(e,t){Jn(e,{resolveCwd:t.resolveCwd,serverUrl:t.serverUrl,config:t.config}),hr(e,{config:t.config,resolveCwd:t.resolveCwd}),fr(e,t.config),Pr(e,t.config),ht(e,t.config),Er(e,{resolveCwd:t.resolveCwd,config:t.config,serverUrl:t.serverUrl}),Hr(e,{resolveCwd:t.resolveCwd,config:t.config,serverUrl:t.serverUrl}),Wr(e,{serverUrl:t.serverUrl,config:t.config,resolveCwd:t.resolveCwd}),Kr(e,{serverUrl:t.serverUrl,config:t.config,resolveCwd:t.resolveCwd,identityRef:t.identityRef}),St(e,{serverUrl:t.serverUrl,config:t.config,resolveCwd:t.resolveCwd,identityRef:t.identityRef}),jr(e,{serverUrl:t.serverUrl,config:t.config,resolveCwd:t.resolveCwd}),nr(e,{serverUrl:t.serverUrl,config:t.config,resolveCwd:t.resolveCwd}),Rr(e,t.serverUrl,t.identityRef),Ir(e,{serverUrl:t.serverUrl,config:t.config,resolveCwd:t.resolveCwd}),_r(e,{serverUrl:t.serverUrl,config:t.config,resolveCwd:t.resolveCwd}),Xn(e,{serverUrl:t.serverUrl,config:t.config,resolveCwd:t.resolveCwd}),er(e,{serverUrl:t.serverUrl,config:t.config,resolveCwd:t.resolveCwd}),or(e,{serverUrl:t.serverUrl,config:t.config,resolveCwd:t.resolveCwd}),ir(e,{serverUrl:t.serverUrl,config:t.config,resolveCwd:t.resolveCwd}),Qn(e,{serverUrl:t.serverUrl,config:t.config,resolveCwd:t.resolveCwd}),lr(e,{resolveCwd:t.resolveCwd,config:t.config})}function X(e){process.stderr.write(`${_(`[mcp]`)} ${e}\n`)}function Yr(e){let{dir:t,include:n,exclude:r}=e.content,i=r.length>0?r.map(e=>`\`${e}\``).join(`, `):`(none)`;return`# MCP Instructions v2 — exec-primary (2026-04-13)
|
|
493
|
+
|
|
494
|
+
## This project's content layout (live config)
|
|
495
|
+
|
|
496
|
+
- **Content directory:** \`${t}\`
|
|
497
|
+
- **Include globs:** ${n.map(e=>`\`${e}\``).join(`, `)}
|
|
498
|
+
- **Exclude globs:** ${i}
|
|
499
|
+
|
|
500
|
+
**Path contract (\`config.yml\`):** \`.open-knowledge/config.yml\` (plus optional \`~/.open-knowledge/config.yml\`, with CLI/env overrides) owns the \`content\` keys. The table above is **this MCP session's resolved view** of that contract — same rules, no guessing from folder names. A file is an Open Knowledge document iff it lives under **Content directory**, matches at least one **Include glob**, and is not removed by **Exclude globs** or \`.gitignore\`.
|
|
501
|
+
|
|
502
|
+
Paths in \`exec\` commands are resolved relative to the content directory. The sandbox prevents paths escaping it.
|
|
503
|
+
|
|
504
|
+
**Default rule:** with the usual \`**/*.md\` + \`**/*.mdx\` globs from repo root, **every** such file under the content directory is an Open Knowledge document unless \`.gitignore\` or \`content.exclude\` drops it. Folder names (\`specs/\`, \`reports/\`, …) do not matter. If \`content.include\` was narrowed, only matching paths use \`exec\`.
|
|
505
|
+
|
|
506
|
+
## Navigation — \`exec\` is **mandatory** for Open Knowledge \`.md\` / \`.mdx\` (this server is registered)
|
|
507
|
+
|
|
508
|
+
**STOP:** Do not use the host IDE's \`Read\`, \`Grep\`, or \`Glob\` on in-scope \`.md\` / \`.mdx\` — use \`exec\` (or typed \`read_document\` / \`search\` when you need fixed \`structuredContent\`). Native file tools skip frontmatter, backlinks, shadow-repo activity, and project git history; reserve them for **source code and non-markdown** paths.
|
|
509
|
+
|
|
510
|
+
**MCP clients differ:** Your agent host may expose these tools directly or only through its MCP integration (server name from \`tools/list\`, user-chosen label). **That still counts as this server being available.** Invoke \`exec\` / \`search\` / \`read_document\` the way **your product's docs** describe — not native \`Grep\` on in-scope markdown. Missing a top-level symbol named \`exec\` is not an excuse.
|
|
511
|
+
|
|
512
|
+
\`exec\` provides the same enrichment as the typed tools plus bash composability (pipes, \`head\`, \`find\`). One tool covers reading, listing, grepping, and combining them. **Escape hatch:** only if this MCP server is **not** registered for the workspace, or after an MCP **call** failed — say \`Open Knowledge MCP unavailable:\`. Never use the hatch to skip trying MCP first.
|
|
513
|
+
|
|
514
|
+
For paths **outside** the include globs above, use native tools.
|
|
515
|
+
|
|
516
|
+
Examples:
|
|
517
|
+
|
|
518
|
+
- Read a file: \`exec("cat <path>.md")\` — returns file contents + rich enrichment
|
|
519
|
+
- List a directory: \`exec("ls <dir>")\` — each result comes with per-file enrichment in \`structuredContent.enrichedPaths\`
|
|
520
|
+
- Search: \`exec("grep -rn <term> <dir>")\` — matches + enrichment per matched file
|
|
521
|
+
- Combine: \`exec("grep -rn <term> <dir> | head -5")\` — top 5 matches with full enrichment
|
|
522
|
+
|
|
523
|
+
Allowlist (read-only): \`cat\`, \`ls\`, \`grep\`, \`find\`, \`head\`, \`tail\`, \`wc\`, \`sort\`, \`uniq\`, \`cut\`. Pipes (\`|\`) work between stages. Redirections, subshells, and writes are rejected with a category-specific error telling you the next step.
|
|
524
|
+
|
|
525
|
+
### Scope searches — \`grep\` and \`find\` can be slow if unscoped
|
|
526
|
+
|
|
527
|
+
Recursive \`grep -r\` / \`find\` walk every file under the path, which on a real repo includes source code, build output, and dependencies. For reads inside the content tree, scope deliberately:
|
|
528
|
+
|
|
529
|
+
- **Filter to markdown:** \`grep -rn TERM --include="*.md" <dir>\` — skips every non-md file.
|
|
530
|
+
- **Scope to a known knowledge dir:** \`grep -rn TERM reports/ specs/\` (or whatever folders the project uses) beats \`grep -rn TERM .\`.
|
|
531
|
+
- **Bail early:** pipe through \`| head -20\` for bounded output. The server waits for the pipeline to finish before returning, so unscoped commands block on the slowest stage.
|
|
532
|
+
- **Existence vs. enumeration:** "does X exist in any tracked doc?" is \`grep -rl PATTERN <dir>\` (list matching files, unbounded) — NOT \`grep -rn PATTERN <dir> | head -N\`. When \`head\` truncates, alphabetically-earlier files dominate the output and later files silently go missing. The server surfaces a banner when \`head\` / \`tail\` hits its cap, but the fix is to pick the right command up front.
|
|
533
|
+
- **Auto-prune (built in):** the server transparently adds \`--exclude-dir=\` for \`node_modules\`, \`.git\`, \`dist\`, \`build\`, \`.next\`, \`.turbo\`, \`coverage\`, \`.claude\`, etc. on recursive \`grep\`, and \`-not -path\` equivalents on \`find\`. This saves you from remembering them — but explicit scoping via \`--include\` or a narrower path is still dramatically faster on monorepos.
|
|
534
|
+
|
|
535
|
+
### Why \`exec\` over typed tools
|
|
536
|
+
|
|
537
|
+
\`exec\` is the default because it subsumes \`read_document\` and \`search\` enrichment paths (same shared helper under the hood) and adds bash composition. The typed tools remain registered as **Typed call sites (advanced)** — present for callers that consume \`structuredContent\` with fixed shapes — but they're not recommended for common agent reads.
|
|
538
|
+
|
|
539
|
+
## Writing
|
|
540
|
+
|
|
541
|
+
Agent writes to in-scope \`.md\` / \`.mdx\` (paths under \`content.include\`) **must** go through the \`write_document\` / \`edit_document\` MCP tools — never \`exec\` (which is read-only) and never native \`Edit\` / \`sed\`. Routing writes through the server is what captures agent-vs-human attribution in the shadow repo. Writes via other paths land as anonymous \`upstream\` imports and lose attribution.
|
|
542
|
+
|
|
543
|
+
${g}
|
|
544
|
+
|
|
545
|
+
## Linking — lean on \`[[wiki-links]]\` aggressively
|
|
546
|
+
|
|
547
|
+
**When writing or editing any document, link liberally to every other document it relates to.** Open Knowledge's value compounds with link density: backlinks surface cross-document context in every \`exec("cat X.md")\` read, \`get_hubs\` / \`get_orphans\` reveal structure, and agents (you, next session) navigate the knowledge base by following links the way you'd navigate a wiki. A document with no outbound links is an island; an island in a knowledge base is worse than no document at all.
|
|
548
|
+
|
|
549
|
+
**Defaults when writing:**
|
|
550
|
+
|
|
551
|
+
- **Every noun-phrase that names another document is a link.** If you mention a concept, project, decision, or entity that has (or should have) its own page, write it as \`[[Page Title]]\` instead of plain prose. Don't stop to check whether the target exists first — a redlink signals "this should exist" to future work. Over-linking is the goal, not the failure mode.
|
|
552
|
+
- **Cross-link siblings.** When you create a document in a folder, skim the siblings (\`exec("ls <folder>")\`) and link to the 2–3 most related ones. A "See also" section at the bottom is fine; inline links woven through the prose are better.
|
|
553
|
+
- **Link back to sources.** If a document is derived from research, spec decisions, external sources, or prior reports, link to them — don't re-summarize. The reader can follow.
|
|
554
|
+
- **Prefer \`[[Page]]\` over Markdown \`[text](./page.md)\`.** Wiki-links resolve by docName (file path minus \`.md\`) and participate in the backlinks index. Markdown links to other wiki files don't.
|
|
555
|
+
- **Update both sides when possible.** If you add an important link from A → B, consider whether B should link back to A or to a landing page that lists documents like A.
|
|
556
|
+
|
|
557
|
+
**Rule of thumb:** if a human reader would want to click a term to learn more, make it a link. Err on the side of too many links.
|
|
558
|
+
|
|
559
|
+
## Cadence — maintain hubs as you create children
|
|
560
|
+
|
|
561
|
+
When you create or meaningfully edit a doc inside a folder that has a hub doc (\`INDEX.md\`, \`README.md\`, \`REPORT.md\`, \`SPEC.md\`, or a file whose name matches the folder name — e.g. \`reports/r1/r1.md\`), update the hub to reflect the change before moving to the next child. Write one child → update hub → write next child. Don't batch five children and then the hub.
|
|
562
|
+
|
|
563
|
+
**Why:** the browser follows your focus in real time via push-nav on every write. Hub-as-you-go makes your work legible to the human watching — each pulse is a complete thought (child → hub → child → hub), and the hub doc itself functions as the live progress bar. Batched writes make the nav flicker, flatten the narrative, and hide the structure you're building.
|
|
564
|
+
|
|
565
|
+
When \`write_document\` creates a doc with zero incoming backlinks and a hub candidate exists in the folder tree, the response includes a \`hints: [{type: 'orphan', parentCandidates: [...], message: ...}]\` entry — that's the soft nudge to interleave the hub update next. Pair with the link-as-you-write discipline above.
|
|
566
|
+
|
|
567
|
+
## Frontmatter conventions
|
|
568
|
+
|
|
569
|
+
Open Knowledge has two metadata surfaces that merge at read time:
|
|
570
|
+
|
|
571
|
+
1. **Per-file frontmatter** — YAML at the top of each \`.md\` / \`.mdx\`: \`title\` (required), \`description\` (required), \`tags\` (recommended). This is where a file's own identity lives.
|
|
572
|
+
2. **Folder-level defaults via \`.open-knowledge/config.yml\` \`folders:\`** — declare \`title\` / \`description\` / \`tags\` defaults keyed by glob \`match:\`. Rules apply in declaration order; later matches override earlier scalars. Tags concatenate across ALL matching rules (in declaration order), with file tags appended last, and first-occurrence preserved on dedup. The file's own frontmatter wins per-scalar; folder defaults fill in blanks.
|
|
573
|
+
|
|
574
|
+
Folder metadata lives in \`config.yml\`, **not** in content files — this is intentionally different from the rejected \`INDEX.md\`-inside-content pattern. The merge happens on every \`exec\` / \`read_document\` / \`search\` call and is never written back to disk.
|
|
575
|
+
|
|
576
|
+
## Tools
|
|
577
|
+
|
|
578
|
+
**Primary:**
|
|
579
|
+
- \`exec\` — read-only bash with enriched output (see above).
|
|
580
|
+
|
|
581
|
+
**Workflow (instructional tools):**
|
|
582
|
+
- \`init-content\`, \`ingest\`, \`research\`, \`consolidate\` — each returns structured instructions you follow. Output text includes the live \`content.dir\` value (${t}) so you don't need to re-read the config.
|
|
583
|
+
|
|
584
|
+
**Writes:**
|
|
585
|
+
- \`write_document\`, \`edit_document\`, \`rename_document\`, \`undo_agent_edit\`, \`redo_agent_edit\` — mutate the CRDT through the server; attribution captured.
|
|
586
|
+
|
|
587
|
+
**Typed call sites (advanced) — prefer \`exec\` for common reads:**
|
|
588
|
+
- \`read_document\`, \`search\`, \`list_documents\`, \`get_backlinks\`, \`get_forward_links\`, \`get_orphans\`, \`get_hubs\`.
|
|
589
|
+
|
|
590
|
+
${Object.entries(qr).map(([e,t])=>`### \`${e}\`\n${t}`).join(`
|
|
591
|
+
|
|
592
|
+
`)}
|
|
593
|
+
`}async function Xr(e){try{let t=e.replace(`ws://`,`http://`).replace(`wss://`,`https://`);return(await fetch(`${t}/api/agent-undo-status`,{signal:AbortSignal.timeout(2e3)})).ok}catch(e){return X(`Hocuspocus check failed: ${e instanceof Error?e.message:String(e)}`),!1}}async function Zr(e){let{projectDir:t,serverUrl:n,config:s}=e;X(n?await Xr(n)?`Hocuspocus detected at ${n}`:`Hocuspocus not available at ${n} — using disk-only mode`:`No explicit server URL — will discover lazily from server.lock per call`);let c=new we({name:i,version:r},{instructions:Yr(s)}),l=[],u=!1;async function d(){try{l=(await c.server.listRoots()).roots.map(e=>e.uri).filter(e=>e.startsWith(`file://`)).map(e=>me(e)),X(l.length>0?`roots: ${l.join(`, `)}`:`client advertised no roots — falling back to startup cwd`)}catch(e){X(`listRoots unsupported by client (using startup cwd): ${e instanceof Error?e.message:String(e)}`)}finally{u=!0}}let f=!1;async function p(e){return e||(u||await d(),l.length===0?(f||=(X(`no client roots — falling back to startup cwd: ${t}`),!0),t):(f&&=(X(`client roots now available — using ${l[0]}`),!1),l[0]))}c.server.setNotificationHandler(Ee,async()=>{u=!1,await d()});let m=n?n.replace(`ws://`,`http://`).replace(`wss://`,`https://`):void 0,g=new Map,_=async()=>{if(m)return m;let e=await p(),t=Date.now(),n=g.get(e);if(n&&n.expiresAt>t)return n.url;let r=h(a(o(s,e))),i=r&&r.port>0?`http://localhost:${r.port}`:void 0;return g.set(e,{url:i,expiresAt:t+1e3}),i},v=he(),y=process.env.AGENT_LABEL||void 0,b={current:{connectionId:v,label:y,displayName:y??`Agent`,colorSeed:y??v}};c.server.oninitialized=()=>{let e=c.server.getClientVersion();b.current={connectionId:v,clientInfo:e?{name:e.name,version:e.version}:void 0,label:y,displayName:y??e?.name??`Agent`,colorSeed:y??e?.name??v},X(`Agent identity: ${b.current.displayName} (${v.slice(0,8)})`)},Jr(c,{serverUrl:_,resolveCwd:p,startupCwd:t,config:s,identityRef:b});let x=new Te;await c.connect(x),X(`MCP server running (stdio)`),d().catch(()=>{});let{startKeepalive:S}=await import(`./keepalive-D-FSaNO6.mjs`),C=S({resolveWsUrl:async()=>{let e=await _();if(e)return e.replace(/^http:/,`ws:`).replace(/^https:/,`wss:`)},log:X}),w=()=>{try{C.close()}catch{}process.exit(0)};process.on(`SIGINT`,w),process.on(`SIGTERM`,w)}function Qr(e){if(e===void 0||e===``)return;let t=Number.parseInt(e,10);if(!(Number.isNaN(t)||t<=0))return t}function $r(e){if(e.portOverride!==void 0){let t=Number.parseInt(e.portOverride,10);if(Number.isNaN(t))return{action:`disk-only`,message:`invalid --port value '${e.portOverride}' — disk-only mode`};if(t>0){let n=`ws://${e.host}:${t}`;return{action:`connect`,url:n,message:`using --port override, connecting to ${n}`}}return{action:`disk-only`,message:`--port=0 — disk-only mode`}}let t=e.readLock();if(t&&t.port>0&&e.isAlive(t.pid)){let e=`ws://localhost:${t.port}`;return{action:`connect`,url:e,message:`connected to running instance at ${e} (pid ${t.pid})`}}return e.envAutoStart===`0`?{action:`disk-only`,message:`auto-spawn disabled via OK_MCP_AUTOSTART=0 — disk-only mode`}:e.configAutoStart?t?{action:`spawn`,message:`existing lock is not usable (port=${t.port}, pid=${t.pid}) — spawning ok start`}:{action:`spawn`,message:`no running instance — spawning ok start`}:{action:`disk-only`,message:`auto-spawn disabled via config.mcp.autoStart=false — disk-only mode`}}async function ei(e){let t=e.readLock??(()=>h(e.lockDir)),n=e.isAlive??m,r=e.sleep??(e=>new Promise(t=>setTimeout(t,e))),i=e.spawn??_e,a=e.readErrorLog??(e=>T(e)?O(e,`utf-8`).trim():``),o=e.openErrorLog??(e=>D(e,`w`)),s=e.closeFd??(e=>w(e)),c=e.timeoutMs??5e3,l=e.pollIntervalMs??100,u=$r({host:e.host,portOverride:e.portOverride,envAutoStart:e.envAutoStart,configAutoStart:e.configAutoStart,readLock:t,isAlive:n});if(u.action===`connect`)return{serverUrl:u.url,message:u.message};if(u.action===`disk-only`)return{serverUrl:void 0,message:u.message};T(e.lockDir)||E(e.lockDir,{recursive:!0});let d=se(e.lockDir,`last-spawn-error.log`),f=o(d),p,g,_=x();try{try{p=i(_.command,[..._.prefixArgs,`start`],{detached:!0,stdio:[`ignore`,`ignore`,f],cwd:e.contentDir}),p.on(`error`,e=>{g=e instanceof Error?e.message:String(e)}),p.unref()}catch(e){g=e instanceof Error?e.message:String(e)}}finally{try{s(f)}catch{}}let v=Date.now()+c;for(;Date.now()<v;){if(g){let e=a(d);throw Error(`OK: spawn failed: ${g}${e?` stderr:\n${e}`:``}`)}await r(l);let e=t();if(e&&e.port>0&&n(e.pid)){let t=`ws://localhost:${e.port}`;return{serverUrl:t,message:`spawned ok start; connected at ${t} (pid ${e.pid})`}}}if(g){let e=a(d);throw Error(`OK: spawn failed: ${g}${e?` stderr:\n${e}`:``}`)}let y=a(d),b=(c/1e3).toFixed(c%1e3==0?0:2),S=p?.pid,C=``;throw typeof S==`number`&&(C=n(S)?` child pid=${S} is still running — raise OK_MCP_SPAWN_TIMEOUT_MS if this is a slow boot.`:` child pid=${S} exited — check last-spawn-error.log.`),Error(`OK: server did not start within ${b}s.${C}${y?` stderr:\n${y}`:``}`)}function ti(e){return new C(`mcp`).description(`Start MCP stdio server for project knowledge base`).option(`-p, --port <port>`,`Override port discovery and connect to this port (0 = disk-only)`,void 0).action(async t=>{try{let n=e(),r=process.cwd(),i=o(n,r),{serverUrl:s,message:c}=await ei({lockDir:a(i),contentDir:i,host:n.server.host,portOverride:t.port,envAutoStart:process.env.OK_MCP_AUTOSTART,configAutoStart:n.mcp.autoStart,timeoutMs:Qr(process.env.OK_MCP_SPAWN_TIMEOUT_MS)});process.stderr.write(`[mcp] ${c}\n`),await Zr({projectDir:r,serverUrl:s,config:n})}catch(e){process.stderr.write(`MCP server failed to start: ${e instanceof Error?e.message:String(e)}\n`),process.exitCode=1}})}function ni(e){return new C(`preview`).description(`Show what content the watcher will track (read-only)`).action(async()=>{let{previewContent:t,formatPreviewBlock:n}=await import(`./preview-2OHXLW85.mjs`),r=e(),i=process.cwd(),a=o(r,i),s;try{s=t({projectDir:i,contentDir:a,include:r.content.include,exclude:r.content.exclude})}catch(e){console.error(`Content preview failed: ${e instanceof Error?e.message:String(e)}`),process.exitCode=1;return}process.stdout.write(`${n(s,i)}\n`),s.totalCount===0&&s.warnings.length>0&&(process.exitCode=1)})}function Z(e,t){e&&process.stdout.write(`${JSON.stringify(t)}\n`)}async function ri(e,t,n=process.cwd()){let r=e.op??`sync`,i=h(a(o(t,n)));if(i&&i.port>0){let t=`http://127.0.0.1:${i.port}/api/sync/trigger`;e.json||process.stderr.write(`Triggering ${r} via running server (port ${i.port})…\n`);try{let n=await fetch(t,{method:`POST`,headers:{"Content-Type":`application/json`},body:JSON.stringify({op:r})});if(!n.ok){let e=await n.json().catch(()=>({}));throw Error(e.error??`Server responded with ${n.status}`)}Z(e.json,{type:`triggered`,op:r,port:i.port}),e.json||process.stderr.write(`✓ ${r} triggered\n`);return}catch(t){let n=t instanceof Error?t.message:String(t);e.json||process.stderr.write(`Server trigger failed (${n}), running directly…\n`)}}e.json||process.stderr.write(`Running ${r} directly (no live server)…\n`);let s=M({baseDir:n});if(r===`sync`||r===`pull`){Z(e.json,{type:`step`,step:`pull`});let t=await s.pull();Z(e.json,{type:`pull`,summary:t.summary}),e.json||process.stderr.write(` pull: ${t.summary.changes} changes\n`)}(r===`sync`||r===`push`)&&(Z(e.json,{type:`step`,step:`push`}),await s.push(),Z(e.json,{type:`push`,ok:!0}),e.json||process.stderr.write(` push: ok
|
|
594
|
+
`)),Z(e.json,{type:`complete`,op:r}),e.json||process.stderr.write(`✓ ${r} complete\n`)}function ii(e){return new C(`sync`).description(`Commit, pull, and push to the remote`).option(`--json`,`Output JSONL progress events`,!1).action(async t=>{try{await ri({json:t.json,op:`sync`},e())}catch(e){let n=e instanceof Error?e.message:String(e);t.json?process.stdout.write(`${JSON.stringify({type:`error`,message:n})}\n`):process.stderr.write(`✗ sync failed: ${n}\n`),process.exit(1)}})}function ai(e){return new C(`pull`).description(`Pull changes from the remote`).option(`--json`,`Output JSONL progress events`,!1).action(async t=>{try{await ri({json:t.json,op:`pull`},e())}catch(e){let n=e instanceof Error?e.message:String(e);t.json?process.stdout.write(`${JSON.stringify({type:`error`,message:n})}\n`):process.stderr.write(`✗ pull failed: ${n}\n`),process.exit(1)}})}function oi(e){return new C(`push`).description(`Push commits to the remote`).option(`--json`,`Output JSONL progress events`,!1).action(async t=>{try{await ri({json:t.json,op:`push`},e())}catch(e){let n=e instanceof Error?e.message:String(e);t.json?process.stdout.write(`${JSON.stringify({type:`error`,message:n})}\n`):process.stderr.write(`✗ push failed: ${n}\n`),process.exit(1)}})}function si(e,t){return{server:ci(`server`,e),ui:ci(`ui`,t)}}function ci(e,t){switch(t.status){case`missing`:return{name:e,state:`missing`,alive:!1};case`corrupt`:return{name:e,state:`corrupt`,alive:!1};case`foreign-host`:return{name:e,state:`foreign-host`,pid:t.lock.pid,port:t.lock.port,startedAt:t.lock.startedAt,host:t.lock.hostname,alive:`unknown`};case`dead-pid`:return{name:e,state:`dead-pid`,pid:t.lock.pid,port:t.lock.port,startedAt:t.lock.startedAt,host:t.lock.hostname,alive:!1};case`alive`:return{name:e,state:`alive`,pid:t.lock.pid,port:t.lock.port,startedAt:t.lock.startedAt,host:t.lock.hostname,alive:!0}}}function li(e){return`${ui(e.server)}\n${ui(e.ui)}`}function ui(e){let t=e.name===`server`?`server`:`ui `;return e.state===`missing`?`${t} not running`:e.state===`corrupt`?`${t} lock file corrupt — run \`ok clean\``:e.state===`foreign-host`?`${t} foreign host (${e.host}) pid=${e.pid} port=${e.port}`:e.state===`dead-pid`?`${t} stale (dead pid=${e.pid}) — run \`ok clean\``:`${t} alive pid=${e.pid} port=${e.port} started=${e.startedAt}`}function di(e){let t=e.inspect??(t=>et(e.lockDir,t)),n=e.log??(e=>console.log(e)),r=si(t(`server`),t(`ui`));return e.json?n(JSON.stringify(r,null,2)):n(li(r)),r}function fi(e){return new C(`status`).description(`Show live state of the server + ui lockfiles for this project`).option(`--json`,`Emit structured JSON instead of formatted text`).action(t=>{di({lockDir:a(o(e(),process.cwd())),json:t.json===!0})})}function pi(e,t){let n=[];return e.status===`alive`&&n.push({name:`server`,pid:e.lock.pid,port:e.lock.port}),t.status===`alive`&&n.push({name:`ui`,pid:t.lock.pid,port:t.lock.port}),{targets:n}}function mi(e){let t=e.inspect??(t=>et(e.lockDir,t)),n=e.kill??((e,t)=>process.kill(e,t)),r=e.log??(e=>console.log(e)),i=e.error??(e=>console.error(e)),a=pi(t(`server`),t(`ui`));if(a.targets.length===0)return r(`No running open-knowledge processes.`),{stopped:[],failed:[],hadTargets:!1};let o=[],s=[];for(let e of a.targets)try{n(e.pid,`SIGTERM`),o.push(e)}catch(t){s.push({target:e,error:t instanceof Error?t.message:String(t)})}return o.length>0&&r(`Stopped: ${o.map(e=>`${e.name} (pid=${e.pid}, port=${e.port})`).join(`, `)}`),s.length>0&&i(`Failed to stop: ${s.map(({target:e,error:t})=>`${e.name} (pid=${e.pid}): ${t}`).join(`; `)}`),{stopped:o,failed:s,hadTargets:!0}}function hi(e){return new C(`stop`).description(`Stop the running open-knowledge server and UI (live only)`).action(()=>{mi({lockDir:a(o(e(),process.cwd()))}).failed.length>0&&(process.exitCode=1)})}const gi=1e4,_i=[`connection`,`keep-alive`,`proxy-authenticate`,`proxy-authorization`,`te`,`trailer`,`transfer-encoding`,`upgrade`,`cookie`,`set-cookie`];async function vi(e){let t=e.upstreamTimeoutMs??gi,n=xe((n,r)=>{bi(n,r,e.upstreamHost,e.upstreamPort,t)});await new Promise((t,r)=>{let i=e=>r(e);n.once(`error`,i),n.listen(e.listenPort,e.host,()=>{n.off(`error`,i),t()})});let r=n.address();return{httpServer:n,port:typeof r==`object`&&r?r.port:e.listenPort,close:()=>new Promise(e=>{n.close(()=>e())})}}function yi(e,t,n){bi(e,t,n.upstreamHost,n.upstreamPort,n.upstreamTimeoutMs??gi)}function bi(e,t,n,r,i){let a={...e.headers};delete a.host;for(let e of _i)delete a[e];e.setTimeout(3e4,()=>{if(t.headersSent)try{t.end()}catch{}else try{t.writeHead(408,{"Content-Type":`text/plain`}),t.end(`Request Timeout`)}catch{}try{e.socket?.destroy()}catch{}});let o=Se({host:n,port:r,method:e.method,path:e.url,headers:{...a,host:`${n}:${r}`}},e=>{let n={...e.headers};for(let e of _i)delete n[e];t.writeHead(e.statusCode??502,n),e.pipe(t),e.once(`error`,()=>{try{t.end()}catch{}})});i>0&&o.setTimeout(i,()=>{if(!t.headersSent)t.writeHead(504,{"Content-Type":`text/plain`}),t.end(`Gateway Timeout`);else try{t.end()}catch{}o.destroy()}),o.on(`error`,()=>{if(!t.headersSent)t.writeHead(502,{"Content-Type":`text/plain`}),t.end(`Bad Gateway`);else try{t.end()}catch{}}),e.on(`error`,()=>{o.destroy()}),e.pipe(o)}async function xi(e){await Promise.all(e.map(e=>new Promise(t=>{e.close(()=>t())})))}async function Si(e){let{existsSync:t}=await import(`node:fs`),{createServer:r}=await import(`node:http`),{resolve:i}=await import(`node:path`),{acquireUiLock:a,readServerLock:o,releaseUiLock:s,updateUiLockPort:c}=await import(`./src-ByMiIIub.mjs`),{default:l}=await import(`sirv`),{resolveContentDir:u,resolveLockDir:d}=await import(`./paths-DRfvViD6.mjs`),f=u(e.config,e.cwd),p=d(f);a(p,{port:0,worktreeRoot:e.cwd});let m=import.meta.dirname??new URL(`.`,import.meta.url).pathname,h=[i(m,`public`),i(m,`../../app/dist`),i(m,`../../../app/dist`)].find(e=>t(e)),g=h?l(h,{single:!0,gzip:!0,immutable:!0}):null,_=t(f)?l(f,{dotfiles:!1}):null,v=e.port,y=null,b=(e,t)=>{let n=e.url?.split(`?`)[0];if(n===`/api/config`&&(e.method===`GET`||e.method===`HEAD`)){y?.();let n=o(p),r=n&&n.port>0?`ws://localhost:${n.port}/collab`:null,i=JSON.stringify({collabUrl:r,previewUrl:null,port:v});t.setHeader(`Content-Type`,`application/json`),t.setHeader(`Cache-Control`,`no-store`),t.setHeader(`X-Content-Type-Options`,`nosniff`),t.statusCode=200,e.method===`HEAD`?t.end():t.end(i);return}if(n?.startsWith(`/api/`)){y?.();let r=o(p);if(!r||r.port<=0){t.writeHead(503,{"Content-Type":`application/json`,"Cache-Control":`no-store`}),t.end(JSON.stringify({error:"Collab server not running. Start `ok start` or run `ok status`.",path:n}));return}yi(e,t,{upstreamHost:`localhost`,upstreamPort:r.port});return}if(decodeURIComponent(n?.replace(/^\//,``)??``)&&_){t.setHeader(`X-Content-Type-Options`,`nosniff`),_(e,t,()=>{g?g(e,t):Ci(t)});return}if(g){g(e,t);return}Ci(t)},x=e.host===void 0?[`::1`,`127.0.0.1`]:[e.host],S=[],C=e.port;try{for(let e of x){let t=r(b);S.push(t),await new Promise((n,r)=>{let i=e=>r(e);t.once(`error`,i),t.listen(C,e,()=>{t.off(`error`,i);let e=t.address();typeof e==`object`&&e&&(C=e.port),n()})})}}catch(e){await Promise.all(S.map(e=>new Promise(t=>{try{e.close(()=>t())}catch{t()}})));try{s(p)}catch{}throw e}let w=C;v=w,c(p,w);let T=e.scheduler??n,E=e.safetyNetMs??432e5,D=null,O=!1,ee=!1,k=()=>{O||(O=!0,D!==null&&(T.clearTimeout(D),D=null))},A=()=>{if(k(),!ee){ee=!0;try{s(p)}catch{}}},te=()=>{O||E<=0||(D!==null&&(T.clearTimeout(D),D=null),D=T.setTimeout(()=>{D=null,console.warn(`[ui] safety-net (${E}ms) reached — shutting down (D-025 backstop)`);try{e.onSafetyNet?.()}catch{}for(let e of S)try{e.close()}catch{}A()},E))},ne=()=>{O||E<=0||te()};return y=ne,te(),{httpServers:S,port:w,release:A,detachSafetyNet:k,nudgeSafetyNet:ne}}function Ci(e){e.writeHead(404),e.end(`Not found`)}function wi(e,t){if(e!==void 0){let t=Number.parseInt(e,10);if(Number.isNaN(t)||t<0||t>65535)throw Error(`Invalid --port value '${e}'`);return t}if(t!==void 0&&t!==``){let e=Number.parseInt(t,10);if(Number.isNaN(e)||e<0||e>65535)throw Error(`Invalid PORT env value '${t}'`);return e}return 0}async function Ti(e){let t=e.readLock??(async()=>{let{readUiLock:t}=await import(`./src-ByMiIIub.mjs`);return t(e.lockDir)}),n=await t();if(!n)throw Error(`UI lock collision reported but the lock disappeared before handling — retry acquiring.`);if(n.port===e.requestedPort&&n.port>0)return{mode:`already-running`,port:n.port};let r=n.port;if(r===0){let n=Date.now()+(e.pollDeadlineMs??2e3),i=e.pollIntervalMs??100;for(;Date.now()<n;){await new Promise(e=>{setTimeout(e,i)});let e=await t();if(e&&e.port>0){r=e.port;break}}if(r===0)throw Error("UI did not bind within 2s; run `ok clean`");if(r===e.requestedPort)return{mode:`already-running`,port:r}}return{mode:`proxy`,handle:await vi({listenPort:e.requestedPort,host:e.host,upstreamHost:`localhost`,upstreamPort:r}),upstreamPort:r}}function Ei(e){return new C(`ui`).description(`Serve the Open Knowledge React editor UI`).option(`-p, --port <port>`,`UI port (default: $PORT env or 0 / kernel-allocated)`).option(`-H, --host <host>`,"UI host. Default: two-socket loopback bind (`[::1]` + `127.0.0.1`) so cross-family collisions fail loud (D-033). Pass an explicit host (e.g. `127.0.0.1`, `0.0.0.0`) to bind a single socket on that host.").action(async t=>{let{dim:n}=await import(`./colors-BNvy_pwG.mjs`),{UiLockCollisionError:r}=await import(`./src-ByMiIIub.mjs`),{resolveContentDir:i,resolveLockDir:a}=await import(`./paths-DRfvViD6.mjs`),o=e(),s=t.host,c;try{c=wi(t.port,process.env.PORT)}catch(e){console.error(e instanceof Error?e.message:String(e)),process.exitCode=1;return}try{let e=await Si({config:o,cwd:process.cwd(),port:c,host:s}),t=s===void 0||s===`::`||s===`0.0.0.0`?`localhost`:s;console.log(`${n(`[ui]`)} listening on http://${t}:${e.port}`);let r=!1,i=t=>{if(r)return;r=!0,console.log(n(`\n[ui] Shutting down (${t})...`)),e.detachSafetyNet();let i=()=>{try{e.release()}finally{process.exit(process.exitCode??0)}};xi(e.httpServers).then(i,i),setTimeout(i,2e3).unref()};process.once(`SIGINT`,()=>i(`SIGINT`)),process.once(`SIGTERM`,()=>i(`SIGTERM`));return}catch(e){if(!(e instanceof r))throw e;let t=a(i(o,process.cwd())),l=s??`localhost`,u;try{u=await Ti({requestedPort:c,host:l,lockDir:t})}catch(e){console.error(e instanceof Error?e.message:String(e)),process.exit(1)}u.mode===`already-running`&&(console.log(`UI already running at http://${l}:${u.port}`),process.exit(0)),console.log(`UI running at http://${l}:${u.upstreamPort}; acting as HTTP proxy on port ${u.handle.port}`);let d=!1,f=e=>{d||(d=!0,console.log(n(`\n[ui-proxy] Shutting down (${e})...`)),u.handle.close().finally(()=>process.exit(process.exitCode??0)),setTimeout(()=>process.exit(process.exitCode??0),2e3).unref())};process.once(`SIGINT`,()=>f(`SIGINT`)),process.once(`SIGTERM`,()=>f(`SIGTERM`))}})}process.argv.includes(`--no-color`)?(process.env.NO_COLOR=`1`,delete process.env.FORCE_COLOR):process.argv.includes(`--color`)&&(process.env.FORCE_COLOR=`1`,delete process.env.NO_COLOR);const Q=new C;let $;Q.name(`open-knowledge`).description(`Local-first knowledge base with CRDT collaboration`).version(r).option(`--cwd <path>`,`Working directory`).option(`--log-level <level>`,`Log level`,`info`).option(`--no-color`,`Disable color output`).option(`--color`,`Force color output`).hook(`preAction`,e=>{let t=e.opts(),n=t.cwd;n!==void 0&&process.chdir(n);let{config:r}=S(n),i=e.args.length===0?t:e.commands[0]?.opts()??{};i.port!==void 0&&(r.server.port=Number(i.port)),i.host!==void 0&&(r.server.host=i.host),process.env.PORT&&(r.server.port=Number(process.env.PORT)),process.env.HOST&&(r.server.host=process.env.HOST),$=r});const Di=b(()=>$);Q.addCommand(Di,{isDefault:!0});const Oi=ti(()=>$);Q.addCommand(Oi),Q.addCommand(y());const ki=ni(()=>$);Q.addCommand(ki);const Ai=Ei(()=>$);Q.addCommand(Ai),Q.addCommand(hi(()=>$)),Q.addCommand(rt(()=>$)),Q.addCommand(fi(()=>$)),Q.addCommand($e(()=>$)),Q.addCommand(ft(()=>$)),Q.addCommand(ii(()=>$)),Q.addCommand(oi(()=>$)),Q.addCommand(ai(()=>$)),await Q.parseAsync();export{};
|
|
595
|
+
//# sourceMappingURL=cli.mjs.map
|