@inkeep/open-knowledge 0.0.0-dev-20260427081104 → 0.0.0-dev-20260427172649

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/dist/assets/skills/open-knowledge/SKILL.md +27 -15
  2. package/dist/cli.mjs +9 -9
  3. package/dist/constants-BTD7ZKSR.mjs +2 -0
  4. package/dist/dist-B8mg-f6q.mjs +1 -0
  5. package/dist/{dist-B_ROt9_c.mjs → dist-D4iyaPjq.mjs} +39 -39
  6. package/dist/{dist-D8JEQ6hO.mjs → dist-mmLEboji.mjs} +20 -8
  7. package/dist/index.mjs +1 -1
  8. package/dist/init--i3qKzKM.mjs +1 -0
  9. package/dist/{init-DvF3Dl7n.mjs → init-BvzZ82y2.mjs} +5 -5
  10. package/dist/init-CZ_E019R.mjs +1 -0
  11. package/dist/{init-DZDnMUjc.mjs → init-D8ujFTpS.mjs} +2 -2
  12. package/dist/loader-CPVGB7eo.mjs +1 -0
  13. package/dist/{loader-D3fsdR1w.mjs → loader-FD-NCStn.mjs} +2 -2
  14. package/dist/paths-DpAhhXfF.mjs +1 -0
  15. package/dist/paths-O_X0xRBt.mjs +2 -0
  16. package/dist/{preview-DyLhres3.mjs → preview-Dk-UNsFY.mjs} +2 -2
  17. package/dist/preview-JkR5aTPE.mjs +1 -0
  18. package/dist/public/assets/{ActivityModeContent-BdQtVWQT.js → ActivityModeContent-Bh4C07uL.js} +1 -1
  19. package/dist/public/assets/{DocumentContext-HT-mHcLO.js → DocumentContext-D125gYAt.js} +1 -1
  20. package/dist/public/assets/{GraphPanel-BJ9MldAi.js → GraphPanel-BkWSPzhS.js} +1 -1
  21. package/dist/public/assets/McpConsentDialogBody-QKrs5-z6.js +1 -0
  22. package/dist/public/assets/{PageListContext-1COIYjHT.js → PageListContext-BeAv6YTY.js} +7 -7
  23. package/dist/public/assets/{SourceEditor-C7wAoLYw.js → SourceEditor-M-p0lLYp.js} +1 -1
  24. package/dist/public/assets/{clipboard-D6iP9LKm.js → clipboard-BmiNdq3p.js} +1 -1
  25. package/dist/public/assets/{index-ChO0_t6R.js → index-DLXD_AE1.js} +64 -64
  26. package/dist/public/assets/index-DPJk5E6v.css +1 -0
  27. package/dist/public/assets/{toggle-group-AWjaf9XJ.js → toggle-group-C2g3gOrt.js} +1 -1
  28. package/dist/public/assets/{tooltip-0l1mzBHx.js → tooltip-D180uIJr.js} +1 -1
  29. package/dist/public/index.html +7 -7
  30. package/dist/src-BBeY6WLC.mjs +1 -0
  31. package/dist/start-Bds5ijsM.mjs +1 -0
  32. package/dist/{start-C2tHtLw6.mjs → start-CkSD8obv.mjs} +2 -2
  33. package/package.json +1 -1
  34. package/dist/constants-DY5Aq0Nx.mjs +0 -2
  35. package/dist/dist-DWb2jtNM.mjs +0 -1
  36. package/dist/init-CHB65M2f.mjs +0 -1
  37. package/dist/init-CXOhIFdP.mjs +0 -1
  38. package/dist/loader-ByLuMfzl.mjs +0 -1
  39. package/dist/paths-C6Cx7bux.mjs +0 -1
  40. package/dist/paths-CHt5HnJR.mjs +0 -2
  41. package/dist/preview-BE3bGa0b.mjs +0 -1
  42. package/dist/public/assets/McpConsentDialogBody-BY1JLPsB.js +0 -1
  43. package/dist/public/assets/index-UjCvBlfb.css +0 -1
  44. package/dist/src-BiR06L9a.mjs +0 -1
  45. package/dist/start-BF4fn4HY.mjs +0 -1
@@ -21,9 +21,9 @@ When this workspace has Open Knowledge MCP configured, do **not** use your host'
21
21
  - **`Bash ls` / `Bash find` / `Bash cat` on dirs containing in-scope markdown** — use `exec("ls …")` / `exec("find … -name '*.md'")` / `exec("cat …")` instead. Native returns bare names; `exec` returns frontmatter, backlink counts, and recent activity per child.
22
22
  - **Glob patterns that target markdown** (`**/*.md`, any dir known to be markdown-heavy like `specs/**`, `reports/**`, `docs/**`) — use `exec` with `find`, or `list_documents({ dir })`.
23
23
  - **Dispatching the Explore / general-purpose subagent for markdown-heavy exploration** — subagents use native `Read` / `Grep` / `Glob` internally and bypass Open Knowledge entirely. Do markdown exploration yourself via `exec` / `search`. Subagents remain appropriate for **source-code** exploration.
24
- - **Reading `.open-knowledge/AGENTS.md` via native `Read`** — observed failure mode during M1 testing. The `.open-knowledge/` directory is in-scope; treat its contents the same as any other wiki file.
24
+ - **Reading `.open-knowledge/AGENTS.md` via native `Read`** — observed failure mode during M1 testing. The `.open-knowledge/` directory is in-scope; treat its contents the same as any other knowledge-base file.
25
25
 
26
- Why: native tools skip frontmatter, backlinks, shadow-repo activity, and project git history that OK's tools return for every matched wiki file. `exec` is the primary read surface; it runs read-only bash (`cat`, `ls`, `grep`, `find`, `head`, `tail`, `wc`, `sort`, `uniq`, `cut` — pipes OK) and returns raw stdout plus enriched metadata per file.
26
+ Why: native tools skip frontmatter, backlinks, shadow-repo activity, and project git history that OK's tools return for every matched knowledge-base file. `exec` is the primary read surface; it runs read-only bash (`cat`, `ls`, `grep`, `find`, `head`, `tail`, `wc`, `sort`, `uniq`, `cut` — pipes OK) and returns raw stdout plus enriched metadata per file.
27
27
 
28
28
  **MCP tool visibility — not seeing `exec` is NOT the escape hatch.** MCP wiring varies by client. Claude Code, Cursor, Codex, Windsurf, VS Code — each surfaces MCP differently. Server labels are user-defined; tools may not appear as top-level symbols named `exec` in your specific UI. If Open Knowledge is registered as an MCP server in this workspace, route markdown reads through its `exec` / `search` / `read_document` via your client's documented MCP invocation (including any generic "call MCP tool" flow). Registration is the test, not top-level-symbol visibility.
29
29
 
@@ -62,22 +62,24 @@ Call `write_document` / `edit_document` as soon as you have content. Native `Edi
62
62
 
63
63
  ## Grounding — every factual claim needs a source (MUST)
64
64
 
65
- Knowledge-base docs are factual artifacts. Every claim must be traceable to a source.
65
+ Knowledge-base docs are factual artifacts — whether the project is a wiki, an LLM brain, a spec collection, a research log, or anything else markdown-shaped. Every claim must be traceable, and **the source has to live inside the knowledge base**, not float on the public web.
66
66
 
67
+ - **The knowledge base is source-of-truth — closed loop.** External sources don't get *cited out* to the live web; they get *pulled in* via `ingest`, then cited locally. A bare `[source](https://...)` URL inside a knowledge-base doc is **not** a finished citation — it's a TODO that says "this source still needs to be ingested." The chain only works if every leaf is a local doc.
67
68
  - **Every factual claim MUST cite its source at the point of claim.** No unsourced speculation.
68
- - **Web sources** → inline markdown link: `[source name](https://example.com/path)`. Use your host's web-fetch / web-search tool (`WebFetch`, `WebSearch`, or equivalent) to find the source *first*. Don't write a fact and then look for a source to justify it.
69
- - **Internal cross-refs** standard markdown link to the OK doc that contains the authoritative claim: `[text](./path/to/doc.md)`. The linked doc itself must cite its sources chains should terminate in external evidence eventually.
69
+ - **Web sources for knowledge-base docs** → fetch the page (your host's `WebFetch` / `WebSearch` / equivalent), then `ingest` it as a local doc, then cite the local path: `[source name](./path/to/source.md)`. The local doc carries the original URL in its frontmatter `source_url:`. **Inline `[source](URL)` is a chat affordance, not a knowledge-base one.**
70
+ - **Self-fetched counts.** When YOU fetched a URL to ground a claim that's about to land in the knowledge base, that fetch triggers `ingest` exactly like a user share does. Don't downgrade to inline-URL citation because the fetch was agent-initiated same KB, same closed-loop contract.
71
+ - **Internal cross-refs** → standard markdown link to the OK doc that contains the authoritative claim: `[text](./path/to/doc.md)`. The linked doc itself must cite its sources — chains should terminate in preserved local docs. Where ingested sources live is project-specific (an `external-sources/` folder if the project uses Karpathy's layout; wherever the project's existing layout puts raw references otherwise).
70
72
  - **If you don't have evidence:**
71
- 1. Run a web search and cite the result, OR
73
+ 1. Run a web search and `ingest` the result, OR
72
74
  2. Mark inline `(TODO: needs source)` so a human can verify, OR
73
75
  3. Don't write the claim. Do NOT fabricate.
74
76
  - Unsourced speculation looks authoritative but rots into tribal knowledge that can't be audited. The knowledge base loses its value if readers can't trust it.
75
- - If a fact is in the knowledge base, a reader must be able to trace it to its origin. Grounded evidence is the knowledge base's core contract.
77
+ - If a fact is in the knowledge base, a reader must be able to trace it to its origin via local docs only no dead-link-on-the-public-web exposure.
76
78
 
77
79
  ## Linking — use standard markdown links
78
80
 
79
81
  - **Every noun-phrase that names another document should be linked** using standard markdown link syntax: `[text](./relative/path.md)` or `[text](/absolute/from/content-root.md)`.
80
- - **External web sources** `[source name](https://...)` required for citations per the Grounding rule above.
82
+ - **External web sources are NOT inline body links.** Per the Grounding rule above, web URLs live in the `source_url:` frontmatter of an ingested doc under `external-sources/` (or the project's equivalent raw-sources folder); the body cites the local path: `[source name](./external-sources/source-slug.md)`. A raw `[source](https://...)` inline in the body is a TODO, not a citation — see Grounding for the closed-loop contract.
81
83
  - **Internal cross-refs between OK docs** → `[text](./other-doc.md)` — link liberally to aid navigation.
82
84
  - **Never wrap a link in backticks.** `` `[text](./foo.md)` `` is a bug — the backticks make it render as literal code rather than a link.
83
85
  - **Never use HTML anchors** (`<a href="...">`). Markdown link syntax only.
@@ -147,7 +149,7 @@ tags:
147
149
 
148
150
  - **Folder structure intent** — the `folders:` block tells you which folders exist, what each one contains, and what tags its files should carry. Every `exec("ls <folder>")` / `read_document` / `search` call merges these defaults with per-file frontmatter automatically, but you should also read config.yml directly when orienting so you can *place new docs in the right folder* and *write them in the voice + shape the project expects*.
149
151
  - **Per-folder instructions** — each `folders:` entry's `description:` field is the canonical place for "what does this folder contain + how should agents work inside it." Treat the description as a binding instruction, not flavor text. If a folder's description says "preserve verbatim, no analysis" (e.g. `external-sources/`), don't synthesize into those files; takeaways belong elsewhere.
150
- - **Content scope** — `content.dir` / `content.include` / `content.exclude` define which files count as knowledge-base documents. Anything outside those globs is regular source code, not a wiki doc.
152
+ - **Content scope** — `content.dir` / `content.include` / `content.exclude` define which files count as knowledge-base documents. Anything outside those globs is regular source code, not a knowledge-base doc.
151
153
 
152
154
  If a project uses `ok seed` to scaffold the Karpathy three-layer layout (`external-sources/` → `research/` → `articles/`), each folder's description in `config.yml` encodes the layer's rules. Projects with custom layouts put their own discipline in their own descriptions. Either way: **follow what config.yml says.**
153
155
 
@@ -195,6 +197,12 @@ If a hub doc exists in a folder, update it as you change children. Don't batch f
195
197
 
196
198
  This is primarily a human-watchability concern — the user watches edits land in the preview; interleaved cadence makes the narrative legible.
197
199
 
200
+ ## Log discipline — check for a project log when KB content changes
201
+
202
+ Some projects keep an append-only project log to make agent activity auditable. **After any turn that creates, edits, or restructures docs in the knowledge base, check for a project log:** look for a `log.md` at the project root (or at the seed `rootDir` if `ok seed --root <dir>` was used). If one exists, follow whatever its frontmatter `description:` and in-file comment say — they carry the project-specific contract (entry shape, cadence, categories). Different projects log differently — some treat the log as a wiki audit trail, others as an LLM-brain history, others as a spec changelog. If no `log.md` exists, no log discipline applies; don't fabricate one.
203
+
204
+ The skill carries the trigger ("KB content changed this turn — go look"). The file owns the policy.
205
+
198
206
  ## Anti-patterns — at a glance
199
207
 
200
208
  | Task | Don't | Do |
@@ -208,7 +216,9 @@ This is primarily a human-watchability concern — the user watches edits land i
208
216
  | Ignore the attach hint | Skip the `warning: { action: "attach-preview-once" }` hint in write-tool responses | Open the `previewUrl` when the hint fires; otherwise do nothing |
209
217
  | Reference another doc | `` `[text](./page.md)` `` (backticked) or HTML `<a>` | `[text](./page.md)` (raw markdown) |
210
218
  | Embed an image | `<img src="...">` (HTML) or hot-linked external URL | Fetch + save locally + `![meaningful alt](./assets/images/path)` |
211
- | Write a factual claim | plausible prose without citation | prose with `[source](URL)` per Grounding rule |
219
+ | Write a factual claim in a KB doc | plausible prose without citation, OR inline `[source](https://URL)` | `ingest` the source first, then cite the local path per Grounding |
220
+ | Cite a web source you just fetched | inline `[source](https://...)` because YOU did the fetch (not the user) | `ingest` it — agent-initiated fetches are not exempt from the closed-loop rule |
221
+ | Finish a turn that changed KB content | move on without checking for a log | check for a `log.md` and follow its contract per Log discipline |
212
222
  | Add an image | empty alt `![](./x.png)` or generic alt `![image](./x)` | meaningful alt + source caption below |
213
223
  | Catalog folder contents | create `INDEX.md` hub file | add `folders:` entry in `.open-knowledge/config.yml` |
214
224
  | Fork a skill and expect no stomp | Edit installed SKILL.md | `npx skills remove` before CLI upgrade |
@@ -217,14 +227,16 @@ This is primarily a human-watchability concern — the user watches edits land i
217
227
 
218
228
  Three MCP tools build on the primitives above and correspond to [Karpathy's three-layer knowledge-base pattern](https://gist.github.com/karpathy/442a6bf555914893e9891c11519de94f):
219
229
 
220
- | Tool | Layer | When to invoke |
221
- | ------------- | ----------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
222
- | `ingest` | Raw sources (immutable) | User shares a URL, PDF, or file to preserve verbatim. No analysis in the file itself — takeaways go back to the user in chat. |
223
- | `research` | Wiki, provisional | User asks you to investigate, compare alternatives, or synthesize multiple sources. Produces a `status: provisional` article with a `sources:` list. Follows scan-first routing, a STOP scoping gate, 3P-external framing, and a validate checklist — the tool body enforces each step. |
224
- | `consolidate` | Wiki, canonical | Team has actually decided after research and wants the outcome committed as source-of-truth. Starts with a STOP gate confirming the decision exists; writes a `status: canonical` article with a `supersedes:` chain. |
230
+ | Tool | Layer | When to invoke |
231
+ | ------------- | ----------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
232
+ | `ingest` | Raw sources (immutable) | User shares a URL/PDF/file to preserve verbatim, **OR you fetched a URL** (`WebFetch` / `WebSearch` / equivalent) to ground a claim that's about to land in the knowledge base. The KB is closed-loop — agent-initiated fetches are not exempt. No analysis in the file itself — takeaways go back to the user in chat. |
233
+ | `research` | KB, provisional | User asks you to investigate, compare alternatives, or synthesize multiple sources. Produces a `status: provisional` article with a `sources:` list. Follows scan-first routing, a STOP scoping gate, 3P-external framing, and a validate checklist — the tool body enforces each step. |
234
+ | `consolidate` | KB, canonical | Team has actually decided after research and wants the outcome committed as source-of-truth. Starts with a STOP gate confirming the decision exists; writes a `status: canonical` article with a `supersedes:` chain. |
225
235
 
226
236
  Each tool returns a multi-step instructional body when invoked. The bodies enforce their own gates — follow the numbered steps in order, don't skip the STOP gates.
227
237
 
238
+ **These tools are your default move, not `write_document`.** When the work fits one of the three layers — preserving an external source, investigating/synthesizing, committing a decided outcome — invoke the corresponding tool instead of going straight to `write_document` / `edit_document`. The tool bodies enforce framing (sources, status, supersedes chains) that hand-written articles routinely miss. `write_document` is correct for everything that does **not** fit the three layers (specs, runbooks, scratch notes, project pages); for the three that do, lead with the tool. This is doubly true in projects that ran `ok seed` — a doc landing in `external-sources/` / `research/` / `articles/` should have come out of `ingest` / `research` / `consolidate`.
239
+
228
240
  Typical day-2 flow: user shares a URL → `ingest` (preserve) → user asks "now research this" → `research` (provisional article + `ingest`s more sources as needed) → decision lands → `consolidate` (canonical article, supersedes the research).
229
241
 
230
242
  **Do not chain silently.** After `ingest`, ask the user whether to proceed to `research`. After `research`, let the user decide whether the findings are ready to `consolidate`. Each tool completes on its own terms — the user drives the transitions.
package/dist/cli.mjs CHANGED
@@ -1,5 +1,5 @@
1
1
  #!/usr/bin/env node
2
- import{i as e,r as t}from"./constants-DY5Aq0Nx.mjs";import{S as n,m as r,p as i}from"./dist-B_ROt9_c.mjs";import{n as a,t as o}from"./paths-CHt5HnJR.mjs";import{a as s,c,o as l}from"./server-lock-CH0GCP_4-DCYOtKMW.mjs";import{A as u,D as d,Gt as f,Jt as p,Kt as m,S as h,_t as g,ht as _,qt as v}from"./dist-D8JEQ6hO.mjs";import{i as y,n as b,o as x,r as S,s as C,t as w}from"./colors-Jf5QUiX2.mjs";import{t as T}from"./is-object-CEU0BgQ5.mjs";import{r as E}from"./init-DvF3Dl7n.mjs";import{i as D,n as O,t as ee}from"./loader-D3fsdR1w.mjs";import{o as k,s as te}from"./start-C2tHtLw6.mjs";import"./src-BiR06L9a.mjs";import{Command as A}from"commander";import{appendFileSync as ne,closeSync as re,existsSync as j,mkdirSync as ie,openSync as ae,readFileSync as oe,readdirSync as se,realpathSync as ce,statSync as le,unlinkSync as ue,writeFileSync as de}from"node:fs";import{homedir as fe,hostname as pe,platform as me}from"node:os";import{basename as he,dirname as ge,isAbsolute as _e,join as ve,relative as ye,resolve as M}from"node:path";import{parse as be,stringify as xe}from"yaml";import{createOAuthDeviceAuth as Se}from"@octokit/auth-oauth-device";import Ce from"@inquirer/password";import{Octokit as we}from"@octokit/rest";import{fileURLToPath as Te}from"node:url";import{z as N}from"zod";import P from"simple-git";import{randomUUID as Ee}from"node:crypto";import{execFileSync as De,spawn as Oe}from"node:child_process";import{mkdir as ke,readFile as Ae,readdir as je,stat as Me}from"node:fs/promises";import{createServer as Ne,request as Pe}from"node:http";import Fe from"picomatch";import{McpServer as Ie}from"@modelcontextprotocol/sdk/server/mcp.js";import{StdioServerTransport as Le}from"@modelcontextprotocol/sdk/server/stdio.js";import{RootsListChangedNotificationSchema as Re}from"@modelcontextprotocol/sdk/types.js";import{AsyncLocalStorage as ze}from"node:async_hooks";import{Bash as Be,ReadWriteFs as Ve}from"just-bash";import He from"shell-quote";import{createInterface as Ue}from"node:readline/promises";const We=`open-knowledge`;var Ge=class{backend=`keyring`;async get(e){let{Entry:t}=await import(`@napi-rs/keyring`);try{let n=new t(We,e).getPassword();return n==null?null:JSON.parse(n)}catch{return null}}async set(e,t,n,r){let{Entry:i}=await import(`@napi-rs/keyring`),a=new i(We,e),o={login:t,token:n,...r};a.setPassword(JSON.stringify(o))}async clear(e){let{Entry:t}=await import(`@napi-rs/keyring`);try{new t(We,e).deletePassword()}catch{}}},Ke=class{backend=`file`;authFile;constructor(e){this.authFile=e??ve(fe(),`.open-knowledge`,`auth.yml`)}read(){if(!j(this.authFile))return{};try{return be(oe(this.authFile,`utf-8`))??{}}catch(e){let t=e instanceof Error?e.message:`unknown error`;return process.stderr.write(`[auth] Failed to parse ${this.authFile}: ${t}. Starting with empty credentials.\n`),{}}}write(e){let t=ge(this.authFile);j(t)||ie(t,{recursive:!0,mode:448}),de(this.authFile,xe(e),{mode:384})}async get(e){return this.read()[e]??null}async set(e,t,n,r){let i=this.read();i[e]={login:t,token:n,...r},this.write(i)}async clear(e){let t=this.read();delete t[e],this.write(t)}};async function qe(e){try{let{Entry:e}=await import(`@napi-rs/keyring`);return new e(We,`__probe__`),process.stderr.write(`[auth] token storage: OS keychain
2
+ import{i as e,r as t}from"./constants-BTD7ZKSR.mjs";import{S as n,m as r,p as i}from"./dist-D4iyaPjq.mjs";import{n as a,t as o}from"./paths-O_X0xRBt.mjs";import{a as s,c,o as l}from"./server-lock-CH0GCP_4-DCYOtKMW.mjs";import{Jt as u,Kt as d,O as f,S as p,Yt as m,gt as h,j as g,qt as _,vt as v}from"./dist-mmLEboji.mjs";import{i as y,n as b,o as x,r as S,s as C,t as w}from"./colors-Jf5QUiX2.mjs";import{t as T}from"./is-object-CEU0BgQ5.mjs";import{r as E}from"./init-BvzZ82y2.mjs";import{i as D,n as O,t as ee}from"./loader-FD-NCStn.mjs";import{o as k,s as te}from"./start-CkSD8obv.mjs";import"./src-BBeY6WLC.mjs";import{Command as A}from"commander";import{appendFileSync as ne,closeSync as re,existsSync as j,mkdirSync as ie,openSync as ae,readFileSync as oe,readdirSync as se,realpathSync as ce,statSync as le,unlinkSync as ue,writeFileSync as de}from"node:fs";import{homedir as fe,hostname as pe,platform as me}from"node:os";import{basename as he,dirname as ge,isAbsolute as _e,join as ve,relative as ye,resolve as M}from"node:path";import{parse as be,stringify as xe}from"yaml";import{createOAuthDeviceAuth as Se}from"@octokit/auth-oauth-device";import Ce from"@inquirer/password";import{Octokit as we}from"@octokit/rest";import{fileURLToPath as Te}from"node:url";import{z as N}from"zod";import P from"simple-git";import{randomUUID as Ee}from"node:crypto";import{execFileSync as De,spawn as Oe}from"node:child_process";import{mkdir as ke,readFile as Ae,readdir as je,stat as Me}from"node:fs/promises";import{createServer as Ne,request as Pe}from"node:http";import Fe from"picomatch";import{McpServer as Ie}from"@modelcontextprotocol/sdk/server/mcp.js";import{StdioServerTransport as Le}from"@modelcontextprotocol/sdk/server/stdio.js";import{RootsListChangedNotificationSchema as Re}from"@modelcontextprotocol/sdk/types.js";import{AsyncLocalStorage as ze}from"node:async_hooks";import{Bash as Be,ReadWriteFs as Ve}from"just-bash";import He from"shell-quote";import{createInterface as Ue}from"node:readline/promises";const We=`open-knowledge`;var Ge=class{backend=`keyring`;async get(e){let{Entry:t}=await import(`@napi-rs/keyring`);try{let n=new t(We,e).getPassword();return n==null?null:JSON.parse(n)}catch{return null}}async set(e,t,n,r){let{Entry:i}=await import(`@napi-rs/keyring`),a=new i(We,e),o={login:t,token:n,...r};a.setPassword(JSON.stringify(o))}async clear(e){let{Entry:t}=await import(`@napi-rs/keyring`);try{new t(We,e).deletePassword()}catch{}}},Ke=class{backend=`file`;authFile;constructor(e){this.authFile=e??ve(fe(),`.open-knowledge`,`auth.yml`)}read(){if(!j(this.authFile))return{};try{return be(oe(this.authFile,`utf-8`))??{}}catch(e){let t=e instanceof Error?e.message:`unknown error`;return process.stderr.write(`[auth] Failed to parse ${this.authFile}: ${t}. Starting with empty credentials.\n`),{}}}write(e){let t=ge(this.authFile);j(t)||ie(t,{recursive:!0,mode:448}),de(this.authFile,xe(e),{mode:384})}async get(e){return this.read()[e]??null}async set(e,t,n,r){let i=this.read();i[e]={login:t,token:n,...r},this.write(i)}async clear(e){let t=this.read();delete t[e],this.write(t)}};async function qe(e){try{let{Entry:e}=await import(`@napi-rs/keyring`);return new e(We,`__probe__`),process.stderr.write(`[auth] token storage: OS keychain
3
3
  `),new Ge}catch{return process.stderr.write(`[auth] token storage: file (~/.open-knowledge/auth.yml)
4
4
  `),new Ke(e)}}async function Je(e,t,n){let r=Ye(await Xe(e)).host??``;if(!r)return 1;let i=await n.get(r);if(i==null)return 1;let a=e=>e.replace(/[\r\n]/g,``);return t.write(`username=${a(i.login)}\npassword=${a(i.token)}\n`),0}function Ye(e){let t={};for(let n of e.split(`
5
5
  `)){let e=n.trim();if(e===``)continue;let r=e.indexOf(`=`);r!==-1&&(t[e.slice(0,r)]=e.slice(r+1))}return t}function Xe(e){return new Promise((t,n)=>{let r=[];e.on(`data`,e=>r.push(e)),e.on(`end`,()=>t(Buffer.concat(r).toString(`utf-8`))),e.on(`error`,n)})}function Ze(e){let t=new A(`git-credential`);return t.description(`Git credential helper (git credential-helper protocol)`),t.command(`get`).description(`Lookup credentials from TokenStore (called by git)`).action(async()=>{let t=await e(),n=await Je(process.stdin,process.stdout,t);process.exit(n)}),t}async function Qe(e){let{clientId:t,scopes:n=[`repo`,`read:user`,`user:email`],onVerification:r,host:i}=e,a=i&&i!==`github.com`?`https://${i}/api/v3`:`https://api.github.com`,o=Se({clientType:`oauth-app`,clientId:t,scopes:n,onVerification:async e=>{await r({verificationUri:e.verification_uri,userCode:e.user_code,expiresIn:e.expires_in,interval:e.interval})},request:a===`https://api.github.com`?void 0:(await import(`@octokit/request`)).request.defaults({baseUrl:a})}),s;try{s=await o({type:`oauth`})}catch(e){if(e instanceof Error){let t=e.message.toLowerCase();throw t.includes(`access_denied`)?Error(`Device-flow authorization was denied.`):t.includes(`expired_token`)||t.includes(`timeout`)||t.includes(`timed out`)?Error(`Device-flow code expired before authorization — please try again.`):Error(`GitHub sign-in failed: ${e.message}`)}throw e}return{token:s.token,tokenType:s.tokenType,scopes:s.scopes??[]}}function $e(e){return process.env.OPEN_KNOWLEDGE_GITHUB_CLIENT_ID??e?.github?.oauthAppClientId??`Ov23liqlSd0V1MwR6rhI`}const et=new Set([`gitlab.com`,`bitbucket.org`,`codeberg.org`,`gitea.com`,`sr.ht`,`sourcehut.org`]);function tt(e){let t=e.toLowerCase().replace(/:\d+$/,``);et.has(t)&&(process.stderr.write(`Error: ${e} is not a GitHub host. Only GitHub and GitHub Enterprise Server are supported.\n`),process.exit(1))}function nt(e,t){e&&process.stdout.write(`${JSON.stringify(t)}\n`)}async function rt(e,t,n,r=Qe){let i=$e(n),{host:a,json:o}=e;tt(a),o||process.stderr.write(`Logging in to ${a}…\n`);let s=await r({clientId:i,host:a===`github.com`?void 0:a,onVerification:e=>{e.userCode,e.verificationUri,o?nt(!0,{type:`verification`,user_code:e.userCode,verification_uri:e.verificationUri,expires_in:e.expiresIn}):process.stderr.write(`Open: ${e.verificationUri}\nEnter code: ${e.userCode}\n`)}}),c=`unknown`,l,u;try{let e=a===`github.com`?`https://api.github.com`:`https://${a}/api/v3`,t=await fetch(`${e}/user`,{headers:{Authorization:`Bearer ${s.token}`,"User-Agent":`open-knowledge-cli`,Accept:`application/vnd.github+json`}});if(t.ok){let e=await t.json();c=e.login??c,l=e.name??void 0,u=e.email??void 0}}catch{}await t.set(a,c,s.token,{gitProtocol:`https`,name:l,email:u}),o?nt(!0,{type:`complete`,host:a,login:c}):process.stderr.write(`✓ Logged in as ${c} on ${a}\n`)}function it(e,t){return new A(`login`).description(`Authenticate with GitHub via Device Flow`).option(`--host <host>`,`GitHub or GitHub Enterprise hostname`,`github.com`).option(`--json`,`Output JSONL progress events`,!1).action(async n=>{await rt(n,await t(),e())})}async function at(e,t,n){let{host:r,json:i}=e;tt(r);let a=await(n??(()=>Ce({message:`Enter PAT:`})))();a||(process.stderr.write(`No token provided
@@ -7,7 +7,7 @@ import{i as e,r as t}from"./constants-DY5Aq0Nx.mjs";import{S as n,m as r,p as i}
7
7
  `),process.exit(1)}await t.set(r,c,a,{gitProtocol:`https`,name:l,email:u}),i?process.stdout.write(`${JSON.stringify({type:`complete`,host:r,login:c})}\n`):process.stderr.write(`✓ PAT stored for ${c} on ${r}\n`)}function ot(e){return new A(`pat`).description(`Store a Personal Access Token`).option(`--host <host>`,`GitHub or GitHub Enterprise hostname`,`github.com`).option(`--json`,`Output JSON`,!1).action(async t=>{await at(t,await e())})}async function st(e,t){let{host:n,json:r}=e;tt(n);let i=await t.get(n);i??(process.stderr.write(`Not logged in to ${n}\n`),process.exit(1));let a=n===`github.com`?void 0:`https://${n}/api/v3`,o=new we({auth:i.token,...a?{baseUrl:a}:{}}),s=[];for await(let e of o.paginate.iterator(o.repos.listForAuthenticatedUser,{per_page:100,sort:`updated`}))for(let t of e.data)s.push({full_name:t.full_name,clone_url:t.clone_url,private:t.private});if(r)process.stdout.write(`${JSON.stringify({type:`repos`,host:n,repos:s})}\n`);else for(let e of s)process.stdout.write(`${e.full_name} ${e.clone_url}\n`)}function ct(e){return new A(`repos`).description(`List accessible repositories`).option(`--host <host>`,`GitHub or GitHub Enterprise hostname`,`github.com`).option(`--json`,`Output JSON`,!1).action(async t=>{await st(t,await e())})}async function lt(e,t){let{host:n}=e;await t.clear(n),process.stderr.write(`✓ Signed out from ${n}\n`)}function ut(e){return new A(`signout`).description(`Remove stored credentials`).option(`--host <host>`,`GitHub hostname`,`github.com`).action(async t=>{await lt(t,await e())})}async function dt(e,t){let{host:n,json:r}=e;tt(n);let i=await t.get(n);i??(r?process.stdout.write(`${JSON.stringify({type:`status`,host:n,authenticated:!1})}\n`):process.stderr.write(`Not logged in to ${n}\n`),process.exit(1));let a=n===`github.com`?void 0:`https://${n}/api/v3`,o=new we({auth:i.token,...a?{baseUrl:a}:{}});try{let{data:e}=await o.users.getAuthenticated();r?process.stdout.write(`${JSON.stringify({type:`status`,host:n,authenticated:!0,login:e.login,name:e.name,email:e.email})}\n`):process.stderr.write(`✓ Logged in as ${e.login} on ${n}\n`)}catch{r?process.stdout.write(JSON.stringify({type:`status`,host:n,authenticated:!1,error:`token invalid`})+`
8
8
  `):process.stderr.write(`✗ Token invalid for ${n}\n`),process.exit(1)}}function ft(e){return new A(`status`).description(`Show authentication status`).option(`--host <host>`,`GitHub or GitHub Enterprise hostname`,`github.com`).option(`--json`,`Output JSON`,!1).action(async t=>{await dt(t,await e())})}function pt(e){let t=new A(`auth`);t.description(`GitHub authentication management`);let n=()=>qe(),r=e??(()=>({}));return t.addCommand(it(r,n)),t.addCommand(ft(n)),t.addCommand(ct(n)),t.addCommand(ut(n)),t.addCommand(ot(n)),t.addCommand(Ze(n)),t}function mt(e,t,n={}){let r=l(e,t);if(!j(r))return{status:`missing`,lockPath:r};let i;try{i=JSON.parse(oe(r,`utf-8`))}catch{return{status:`corrupt`,lockPath:r}}if(!i||typeof i!=`object`||typeof i.pid!=`number`)return{status:`corrupt`,lockPath:r};let a=i,o=n.host??pe();return a.hostname===o?(n.isAlive??s)(a.pid)?{status:`alive`,lockPath:r,lock:a}:{status:`dead-pid`,lockPath:r,lock:a}:{status:`foreign-host`,lockPath:r,lock:a}}function ht(e,t){let n=[];for(let[r,i]of[[`server`,e],[`ui`,t]])(i.status===`dead-pid`||i.status===`corrupt`)&&n.push({name:r,lockPath:i.lockPath,reason:i.status});return{prune:n}}function gt(e){let t=e.inspect??(t=>mt(e.lockDir,t)),n=e.unlink??(e=>ue(e)),r=e.log??(e=>console.log(e)),i=e.error??(e=>console.error(e)),a=ht(t(`server`),t(`ui`));if(a.prune.length===0)return r(`No stale locks.`),{pruned:[],failed:[]};let o=[],s=[];for(let e of a.prune)try{n(e.lockPath),o.push(e)}catch(t){s.push({target:e,error:t instanceof Error?t.message:String(t)})}if(o.length>0){let e=o.map(e=>`${e.name} (${e.reason})`).join(`, `);r(`Pruned ${o.length} stale lock${o.length===1?``:`s`}: ${e}`)}return s.length>0&&i(`Failed to prune: ${s.map(({target:e,error:t})=>`${e.name} (${e.lockPath}): ${t}`).join(`; `)}`),{pruned:o,failed:s}}function _t(e){return new A(`clean`).description(`Prune stale / corrupt open-knowledge lock files (never touches live locks)`).action(()=>{gt({lockDir:a(o(e(),process.cwd()))}).failed.length>0&&(process.exitCode=1)})}function vt(){try{let e=De(`gh`,[`auth`,`token`],{encoding:`utf-8`,stdio:[`ignore`,`pipe`,`pipe`],timeout:5e3}).trim();return e.length===0?{available:!1}:{available:!0,token:e}}catch{return{available:!1}}}async function yt(e,t,n={},r=vt){if(!n.skipGhDetect&&r().available)return{tier:`A`,credentialArgs:[`-c`,`credential.helper=!gh auth git-credential`]};let i=await t.get(e);return i==null?{tier:`none`,credentialArgs:[]}:{tier:i.gitProtocol===`ssh`?`C`:`B`,credentialArgs:[`-c`,`credential.helper=!open-knowledge auth git-credential`]}}function bt(e){return e.replace(/:\d+$/,``)}function xt(e){let t=e.trim();if(!t)return null;{let e=/^https?:\/\/([^/?#]+)\/([\w.\-~%]+)\/([\w.\-~%]+?)(?:\.git)?\/?$/.exec(t);if(e)return{protocol:`https`,hostname:bt(e[1]),owner:e[2],name:e[3]}}{let e=/^ssh:\/\/(?:[\w.-]+@)?([^/?#]+)\/([\w.\-~%]+)\/([\w.\-~%]+?)(?:\.git)?\/?$/.exec(t);if(e)return{protocol:`ssh`,hostname:bt(e[1]),owner:e[2],name:e[3]}}{let e=/^git:\/\/([^/?#]+)\/([\w.\-~%]+)\/([\w.\-~%]+?)(?:\.git)?\/?$/.exec(t);if(e)return{protocol:`git`,hostname:bt(e[1]),owner:e[2],name:e[3]}}{let e=/^(?:[\w.-]+@)?([\w.-]+):([\w.\-~%]+)\/([\w.\-~%]+?)(?:\.git)?$/.exec(t);if(e?.[1].includes(`.`)||e&&t.startsWith(`git@`))return{protocol:`ssh`,hostname:e[1],owner:e[2],name:e[3]}}{let e=/^git:([\w.-]+)\/([\w.\-~%]+)\/([\w.\-~%]+?)(?:\.git)?\/?$/.exec(t);if(e)return{protocol:`git`,hostname:e[1],owner:e[2],name:e[3]}}if(!t.includes(`://`)&&!t.includes(`@`)&&!t.startsWith(`/`)){let e=/^([\w.-]+)\/([\w.\-~%]+?)(?:\.git)?$/.exec(t);if(e)return{protocol:`https`,hostname:`github.com`,owner:e[1],name:e[2]}}return null}const St=[[`count`,0,10],[`compress`,10,20],[`receiv`,20,60],[`resolv`,60,100]];function Ct(e){let t=/^([\w ]+):\s+(\d+)%/.exec(e.trim());if(!t)return null;let n=t[1].toLowerCase(),r=Number(t[2]);for(let[e,i,a]of St)if(n.includes(e))return{stage:t[1],pct:Math.round(i+r/100*(a-i))};return null}function wt(e,t){e&&process.stdout.write(`${JSON.stringify(t)}\n`)}async function Tt(e,t,n,r=process.cwd()){let i=xt(e);if(!i)throw Error(`Invalid git URL: ${e}`);let a=t.dir?M(r,t.dir):M(r,i.name);if(j(a)&&se(a).length>0)throw Error(`Target directory is not empty: ${a}`);let o=await qe(),s=await yt(i.hostname,o,{}),c=P({baseDir:r,config:s.credentialArgs.length>=2?[s.credentialArgs[1]]:[],unsafe:{allowUnsafeCredentialHelper:!0}}).env({GIT_TERMINAL_PROMPT:`0`}),l=-1;if(c.outputHandler((e,n,r)=>{r.on(`data`,e=>{let n=e.toString(`utf-8`);for(let e of n.split(`
9
9
  `)){let n=Ct(e);n&&n.pct!==l&&(l=n.pct,wt(t.json,{type:`progress`,pct:n.pct,stage:n.stage}),t.json||process.stderr.write(`\r Cloning… ${n.pct}%`))}})}),await c.clone(e,a,[`--progress`]),t.json||process.stderr.write(`
10
- `),!j(M(a,`.open-knowledge`)))try{let[{runInit:e},{ensureOkGitignoredAtRoot:t}]=await Promise.all([import(`./init-CHB65M2f.mjs`),import(`./init-CXOhIFdP.mjs`)]);await e({cwd:a,mcp:!1});try{t(a)}catch{}}catch{}return a}function Et(e){return new A(`clone`).description(`Clone a git repository and open it`).argument(`<url>`,`Repository URL or owner/repo shorthand`).argument(`[dir]`,`Target directory (default: ./<repo-name>)`).option(`--json`,`Output JSONL progress events`,!1).action(async(t,n,r)=>{let i=e();try{let a=await Tt(t,{json:r.json,dir:n},i);if(r.json)wt(!0,{type:`complete`,dir:a});else{process.stderr.write(`✓ Cloned to ${a}\n`),process.chdir(a);let{startCommand:t}=await import(`./start-BF4fn4HY.mjs`);await t(e).parseAsync([],{from:`user`})}}catch(e){let t=e instanceof Error?e.message:String(e);r.json?wt(!0,{type:`error`,message:t}):process.stderr.write(`✗ ${t}\n`),process.exitCode=1}})}function Dt(e=fe()){return ve(e,`Downloads`,`openknowledge.skill`)}function Ot(e,t,n){try{return t===`darwin`?(n(`open`,[e],{detached:!0,stdio:`ignore`}).unref(),{ok:!0}):t===`win32`?(n(`cmd`,[`/c`,`start`,`""`,e],{detached:!0,stdio:`ignore`}).unref(),{ok:!0}):t===`linux`?(n(`xdg-open`,[e],{detached:!0,stdio:`ignore`}).unref(),{ok:!0}):{ok:!1,reason:`unsupported-platform`,message:`Platform '${t}' has no file-association invocation wired. Use --no-open and open the file manually.`}}catch(e){return{ok:!1,reason:`spawn-error`,message:e instanceof Error?e.message:String(e)}}}async function kt(e={}){let t=M(e.out??Dt()),n=e.platformName??me(),r=e.spawnFn??Oe;try{await ke(ge(t),{recursive:!0})}catch(e){return{status:`failed`,message:`${S(`Error:`)} could not create output directory: ${e instanceof Error?e.message:String(e)}`,exitCode:1}}let i;try{i=await u({outputPath:t,skipVersionCheck:!0})}catch(e){return{status:`failed`,message:`${S(`Error:`)} build failed — ${e instanceof Error?e.message:String(e)}`,exitCode:1}}if(e.noOpen)return{status:`built`,outputPath:i.outputPath,size:i.size,sha256:i.sha256,cliVersion:i.cliVersion,skillVersion:i.skillVersion,message:[x(`Built ${i.outputPath}`),b(` ${i.size} bytes • sha256 ${i.sha256.slice(0,12)}…`),y(` Open the Claude Desktop App, then: ${w(`Customize → Skills → + → Create skill → Upload skill`)} → pick the file.`)].join(`
10
+ `),!j(M(a,`.open-knowledge`)))try{let[{runInit:e},{ensureOkGitignoredAtRoot:t}]=await Promise.all([import(`./init-CZ_E019R.mjs`),import(`./init--i3qKzKM.mjs`)]);await e({cwd:a,mcp:!1});try{t(a)}catch{}}catch{}return a}function Et(e){return new A(`clone`).description(`Clone a git repository and open it`).argument(`<url>`,`Repository URL or owner/repo shorthand`).argument(`[dir]`,`Target directory (default: ./<repo-name>)`).option(`--json`,`Output JSONL progress events`,!1).action(async(t,n,r)=>{let i=e();try{let a=await Tt(t,{json:r.json,dir:n},i);if(r.json)wt(!0,{type:`complete`,dir:a});else{process.stderr.write(`✓ Cloned to ${a}\n`),process.chdir(a);let{startCommand:t}=await import(`./start-Bds5ijsM.mjs`);await t(e).parseAsync([],{from:`user`})}}catch(e){let t=e instanceof Error?e.message:String(e);r.json?wt(!0,{type:`error`,message:t}):process.stderr.write(`✗ ${t}\n`),process.exitCode=1}})}function Dt(e=fe()){return ve(e,`Downloads`,`openknowledge.skill`)}function Ot(e,t,n){try{return t===`darwin`?(n(`open`,[e],{detached:!0,stdio:`ignore`}).unref(),{ok:!0}):t===`win32`?(n(`cmd`,[`/c`,`start`,`""`,e],{detached:!0,stdio:`ignore`}).unref(),{ok:!0}):t===`linux`?(n(`xdg-open`,[e],{detached:!0,stdio:`ignore`}).unref(),{ok:!0}):{ok:!1,reason:`unsupported-platform`,message:`Platform '${t}' has no file-association invocation wired. Use --no-open and open the file manually.`}}catch(e){return{ok:!1,reason:`spawn-error`,message:e instanceof Error?e.message:String(e)}}}async function kt(e={}){let t=M(e.out??Dt()),n=e.platformName??me(),r=e.spawnFn??Oe;try{await ke(ge(t),{recursive:!0})}catch(e){return{status:`failed`,message:`${S(`Error:`)} could not create output directory: ${e instanceof Error?e.message:String(e)}`,exitCode:1}}let i;try{i=await g({outputPath:t,skipVersionCheck:!0})}catch(e){return{status:`failed`,message:`${S(`Error:`)} build failed — ${e instanceof Error?e.message:String(e)}`,exitCode:1}}if(e.noOpen)return{status:`built`,outputPath:i.outputPath,size:i.size,sha256:i.sha256,cliVersion:i.cliVersion,skillVersion:i.skillVersion,message:[x(`Built ${i.outputPath}`),b(` ${i.size} bytes • sha256 ${i.sha256.slice(0,12)}…`),y(` Open the Claude Desktop App, then: ${w(`Customize → Skills → + → Create skill → Upload skill`)} → pick the file.`)].join(`
11
11
  `),exitCode:0};let a=Ot(i.outputPath,n,r);return a.ok?{status:`installed`,outputPath:i.outputPath,size:i.size,sha256:i.sha256,cliVersion:i.cliVersion,skillVersion:i.skillVersion,message:[x(`Built ${i.outputPath}`),b(` ${i.size} bytes • sha256 ${i.sha256.slice(0,12)}… • CLI v${i.cliVersion}`),y(` Claude Desktop App opened. Now upload the file manually:`),` 1. ${w(`Customize`)} (sidebar) → ${w(`Skills`)}`,` 2. Click the ${w(`+`)} button`,` 3. Click ${w(`Create skill`)}`,` 4. Click ${w(`Upload skill`)}`,` 5. Pick ${w(`openknowledge.skill`)} from Downloads`,b(` If Claude Desktop didn't open, open it and start at step 1. The file is at ${i.outputPath}`)].join(`
12
12
  `),exitCode:0}:{status:`built`,outputPath:i.outputPath,size:i.size,sha256:i.sha256,cliVersion:i.cliVersion,skillVersion:i.skillVersion,message:[x(`Built ${i.outputPath}`),C(` Handoff failed: ${a.message}`),y(` Open the Claude Desktop App, then: ${w(`Customize → Skills → + → Create skill → Upload skill`)} → pick the file.`)].join(`
13
13
  `),exitCode:0}}function At(){return new A(`install-skill`).description("Build openknowledge.skill and open the Claude Desktop App so you can upload it for Claude Chat & Cowork. Not needed for Claude Code — `ok init` covers that separately.").option(`--out <path>`,`Custom output path (default: ~/Downloads/openknowledge.skill)`).option(`--no-open`,`Build the file but skip the OS file-association handoff`).action(async e=>{let t=await kt({out:e.out,noOpen:!e.open});process.stdout.write(`${t.message}\n`),t.exitCode!==0&&process.exit(t.exitCode)})}const jt=new ze;var Mt=class e{sessionId;corrId;component;constructor(e=`mcp`,t){this.sessionId=t??Ee().slice(0,12),this.corrId=Ee().slice(0,8),this.component=e}info(e,t={}){this.emit(`info`,e,t)}warn(e,t={}){this.emit(`warn`,e,t)}error(e,t,n={}){let r=t?{error:t instanceof Error?t.message:String(t),...n}:n;this.emit(`error`,e,r)}debug(e,t={}){(process.env.MCP_DEBUG===`1`||process.env.DEBUG?.includes(`mcp`))&&this.emit(`debug`,e,t)}child(t){return new e(t??this.component,this.sessionId)}asCallback(){return e=>this.info(e)}emit(e,t,n){let r={ts:new Date().toISOString(),level:e,sessionId:this.sessionId,corrId:this.corrId,component:this.component,msg:t,...n},i=`${JSON.stringify(r)}\n`;process.stderr.write(i);let a=process.env.OK_LOG_FILE;if(a)try{ne(a,i)}catch(e){console.warn(`[mcp-logger] Failed to write to OK_LOG_FILE: ${e instanceof Error?e.message:e}`)}}};function Nt(e=`mcp`){return new Mt(e)}function Pt(e,t){return jt.run(e,t)}function Ft(){return jt.getStore()}const It=new Set([`find`,`markdown`,`replace`]),Lt=[`backlinks`,`deadLinks`,`documents`,`enrichedPaths`,`entries`,`forwardLinks`,`hints`,`hubs`,`orphans`,`results`],Rt=[`checkpointRef`,`cwd`,`fileCount`,`matchCount`,`ok`,`query`,`stdoutTruncated`,`truncated`];function F(e){return typeof e==`object`&&!!e&&!Array.isArray(e)}function zt(e){return F(e)&&`requestId`in e}function Bt(e,t){if(It.has(e))return{redacted:!0,type:`string`,length:t.length,lines:t.length===0?0:t.split(`
@@ -176,14 +176,14 @@ superseded_by: <path-to-new-canonical-article>.md
176
176
  - **Don't rewrite research prose verbatim** — canonical articles have a different voice (direct, decided) than research (exploratory, provisional)
177
177
  - **Don't skip the supersedes / superseded_by links** — the audit trail matters for future readers
178
178
  `}const an=[`Promote research into a canonical article inside the project content directory. Canonical, not provisional — the output is the source of truth for future agents.`,``,`**Use when:**`,`- A team has made a decision after research and wants the outcome committed as canonical knowledge`,`- Compacting several provisional research notes into one authoritative article`,`- A developer asks to "consolidate" or "finalize" knowledge on a topic`,``,`**Triggers on:**`,`- "consolidate", "finalize", "promote to canonical", "make this official"`,`- User says the team has decided and wants the outcome written as canonical`,`- Research has stabilized and a destination article is needed`].join(`
179
- `);function on(e,t){e.tool(`consolidate`,an,{topic:N.string().describe(`The topic to consolidate into a canonical article`),cwd:N.string().optional().describe(I)},async e=>{let n=await nn(t.resolveCwd,t.config,e.cwd);return n.ok?R(rn(e.topic,n.config.content.dir),{previewUrl:null}):L(`Error: ${n.error}`,!0)})}function sn(e){return e.split(`/`).map(encodeURIComponent).join(`/`)}function cn(e){return e.endsWith(`/`)?e.slice(0,-1):e}function ln(e){try{return new URL(e),!0}catch{return!1}}async function W(e,t,n){let r=n??await t.resolveCwd(),i=await tn(t.config,r),s=o(i,r);return fn(e,{config:i,lockDir:a(s),contentDir:s})}function un(e){try{let t=g(e.lockDir);if(t&&t.port>0)return{baseUrl:`http://localhost:${t.port}`,port:t.port}}catch(t){process.stderr.write(`[preview-url] readUiLock failed at ${e.lockDir} while building ui block: ${t instanceof Error?t.message:String(t)}\n`)}return{baseUrl:null,port:null}}async function G(e,t){let n=t??await e.resolveCwd(),r=await tn(e.config,n),i=o(r,n),s={config:r,lockDir:a(i),contentDir:i};return{resolve:e=>fn(e,s),ui:un(s)}}function dn(e){let t=e.toLowerCase();return t.endsWith(`.md`)?e.slice(0,-3):t.endsWith(`.mdx`)?e.slice(0,-4):e}function fn(e,t){let n=`/#/${sn(e)}`;if(process.env.OK_ELECTRON_PROTOCOL_HOST===`1`&&t.contentDir)try{let n=ce(t.contentDir);return{url:`openknowledge://open?project=${encodeURIComponent(n)}&doc=${encodeURIComponent(e)}`,source:`electron-protocol`}}catch(e){process.stderr.write(`[preview-url] realpathSync failed for ${t.contentDir}, falling through to http sources: ${e instanceof Error?e.message:String(e)}\n`)}let r=process.env.OPEN_KNOWLEDGE_PREVIEW_BASE_URL;if(r&&ln(r))return{url:`${cn(r)}${n}`,source:`env`};try{let e=g(t.lockDir);if(e&&e.port>0)return{url:`http://localhost:${e.port}${n}`,source:`lock`}}catch(e){process.stderr.write(`[preview-url] readUiLock failed at ${t.lockDir}, falling through to config: ${e instanceof Error?e.message:String(e)}\n`)}let i=t.config.preview?.baseUrl;return i&&ln(i)?{url:`${cn(i)}${n}`,source:`config`}:null}const pn=[`[Requires: Hocuspocus server] Find-and-replace on a live document via the CRDT layer.`,`The patch is applied through Hocuspocus and propagated to all connected editors in real-time.`,"Use `offset` when you need to patch an exact occurrence; omit it to preserve first-match behavior.",``,"**When rewriting prose, add `[[wiki-links]]` aggressively.** If the replacement mentions other documents or entities that should have their own page, link them as `[[Page Name]]`. Over-linking is the goal; underlinked documents lose their value in backlink-driven navigation.",``,`**Parameters:**`,"- `docName` — Document name, typically without extension. A trailing `.md` or `.mdx` is stripped automatically.","- `find` — Text to find (exact match)","- `replace` — Replacement text","- `offset` (optional) — Exact occurrence to patch, as a JavaScript string offset in the current markdown. If the document changed and the text no longer matches there, the server returns a stale-target error; re-run `suggest_links` to get fresh offsets.",'- `summary` — Optional one-line user-outcome description of this edit (≤80 chars). Appears as a bullet in the document timeline so readers can scan intent without opening every diff. Prefer outcome phrasing ("Fixed token-refresh race") over structural ("Changed 1 line"). Avoid including secrets or PII — summaries are persisted to git history.'].join(`
179
+ `);function on(e,t){e.tool(`consolidate`,an,{topic:N.string().describe(`The topic to consolidate into a canonical article`),cwd:N.string().optional().describe(I)},async e=>{let n=await nn(t.resolveCwd,t.config,e.cwd);return n.ok?R(rn(e.topic,n.config.content.dir),{previewUrl:null}):L(`Error: ${n.error}`,!0)})}function sn(e){return e.split(`/`).map(encodeURIComponent).join(`/`)}function cn(e){return e.endsWith(`/`)?e.slice(0,-1):e}function ln(e){try{return new URL(e),!0}catch{return!1}}async function W(e,t,n){let r=n??await t.resolveCwd(),i=await tn(t.config,r),s=o(i,r);return fn(e,{config:i,lockDir:a(s),contentDir:s})}function un(e){try{let t=v(e.lockDir);if(t&&t.port>0)return{baseUrl:`http://localhost:${t.port}`,port:t.port}}catch(t){process.stderr.write(`[preview-url] readUiLock failed at ${e.lockDir} while building ui block: ${t instanceof Error?t.message:String(t)}\n`)}return{baseUrl:null,port:null}}async function G(e,t){let n=t??await e.resolveCwd(),r=await tn(e.config,n),i=o(r,n),s={config:r,lockDir:a(i),contentDir:i};return{resolve:e=>fn(e,s),ui:un(s)}}function dn(e){let t=e.toLowerCase();return t.endsWith(`.md`)?e.slice(0,-3):t.endsWith(`.mdx`)?e.slice(0,-4):e}function fn(e,t){let n=`/#/${sn(e)}`;if(process.env.OK_ELECTRON_PROTOCOL_HOST===`1`&&t.contentDir)try{let n=ce(t.contentDir);return{url:`openknowledge://open?project=${encodeURIComponent(n)}&doc=${encodeURIComponent(e)}`,source:`electron-protocol`}}catch(e){process.stderr.write(`[preview-url] realpathSync failed for ${t.contentDir}, falling through to http sources: ${e instanceof Error?e.message:String(e)}\n`)}let r=process.env.OPEN_KNOWLEDGE_PREVIEW_BASE_URL;if(r&&ln(r))return{url:`${cn(r)}${n}`,source:`env`};try{let e=v(t.lockDir);if(e&&e.port>0)return{url:`http://localhost:${e.port}${n}`,source:`lock`}}catch(e){process.stderr.write(`[preview-url] readUiLock failed at ${t.lockDir}, falling through to config: ${e instanceof Error?e.message:String(e)}\n`)}let i=t.config.preview?.baseUrl;return i&&ln(i)?{url:`${cn(i)}${n}`,source:`config`}:null}const pn=[`[Requires: Hocuspocus server] Find-and-replace on a live document via the CRDT layer.`,`The patch is applied through Hocuspocus and propagated to all connected editors in real-time.`,"Use `offset` when you need to patch an exact occurrence; omit it to preserve first-match behavior.",``,"**When rewriting prose, add `[[wiki-links]]` aggressively.** If the replacement mentions other documents or entities that should have their own page, link them as `[[Page Name]]`. Over-linking is the goal; underlinked documents lose their value in backlink-driven navigation.",``,`**Parameters:**`,"- `docName` — Document name, typically without extension. A trailing `.md` or `.mdx` is stripped automatically.","- `find` — Text to find (exact match)","- `replace` — Replacement text","- `offset` (optional) — Exact occurrence to patch, as a JavaScript string offset in the current markdown. If the document changed and the text no longer matches there, the server returns a stale-target error; re-run `suggest_links` to get fresh offsets.",'- `summary` — Optional one-line user-outcome description of this edit (≤80 chars). Appears as a bullet in the document timeline so readers can scan intent without opening every diff. Prefer outcome phrasing ("Fixed token-refresh race") over structural ("Changed 1 line"). Avoid including secrets or PII — summaries are persisted to git history.'].join(`
180
180
  `);function mn(e,t){e.tool(`edit_document`,pn,{docName:N.string().describe(`Document name to edit`),find:N.string().describe(`Text to find (exact match)`),replace:N.string().describe(`Replacement text`),offset:N.number().int().min(0).optional().describe(`Exact occurrence to patch, as a JavaScript string offset in the current markdown`),summary:Yt,cwd:N.string().optional().describe(I)},async e=>{let n=await B(t.resolveCwd,t.config,t.serverUrl,e.cwd);if(!n.ok)return L(`Error: ${n.error}`,!0);let{cwd:r,config:i,url:s}=n;if(!s)return L(z,!0);let c=V(e.docName);if(!c.ok)return L(c.error,!0);let l=t.identityRef?.current,u=await U(s,`/api/agent-patch`,{docName:c.docName,find:e.find,replace:e.replace,offset:e.offset,...e.summary===void 0?{}:{summary:e.summary},...l?{agentId:l.connectionId,agentName:l.displayName,clientName:l.clientInfo?.name,colorSeed:l.colorSeed}:{}});if(!u.ok)return L(`Error: ${u.error}`,!0);let d=a(o(i,r)),f=fn(c.docName,{config:i,lockDir:d}),p=typeof u.subscriberCount==`number`?u.subscriberCount:void 0,m=(typeof u.systemSubscriberCount==`number`?u.systemSubscriberCount:void 0)===0,h=p===0,g=u.summary&&typeof u.summary==`object`?u.summary:void 0,_=typeof g?.hint==`string`?g.hint:void 0,v=[`Edit applied successfully.`];f&&v.push(`Preview: ${f.url}`),m&&v.push(f?`Open ${f.url} in your preview browser.`:`No preview attached. Start the UI.`),_&&v.push(_);let y=v.join(`
181
181
  `);if(!f&&!m&&!h&&!g)return L(y);let b={};return f&&(b.previewUrl=f.url,b.previewUrlSource=f.source),m&&(b.warning={message:`Open the previewUrl in your preview browser.`,action:`attach-preview-once`,previewUrl:f?.url??null}),g&&(b.summary=g),R(y,b)})}const hn=new Set([`cat`,`ls`,`grep`,`find`]),gn=/\b[\w./-]+\.(md|mdx)\b/g;function _n(e){return/\.(md|mdx)$/.test(e)}function vn(e){let t=e.trim();return t?(t=t.replace(/\/+/g,`/`),t.startsWith(`./`)&&(t=t.slice(2)),t.endsWith(`/`)&&(t=t.slice(0,-1)),t):``}function K(e){return e.args.slice(1)}function q(e){return e.filter(e=>!e.startsWith(`-`))}function yn(e){return q(K(e)).filter(_n)}function bn(e,t){let n=q(K(t)),r=n.length>0?n[n.length-1]:``,i=r&&r!==`.`?vn(r):``,a=[];i&&a.push(i);for(let t of e.split(`
182
182
  `)){let e=t.trim();if(!e||/\.[a-z0-9]+$/i.test(e)&&!_n(e))continue;let n=i?`${i}/${e}`:e;a.push(n)}return a}function xn(e){let t=[];for(let n of e.split(`
183
183
  `)){if(!n)continue;let e=n.indexOf(`:`);if(e<0)continue;let r=vn(n.slice(0,e));_n(r)&&t.push(r)}return t}function Sn(e){let t=[];for(let n of e.split(`
184
184
  `)){let e=vn(n);e&&_n(e)&&t.push(e)}return t}function Cn(e){return q(K(e)).filter(_n)}function wn(e){return q(K(e)).length>0}function Tn(e){let t=[],n=e.matchAll(gn);for(let e of n)t.push(vn(e[0]));return t}function En(e,t){let n=null;for(let e=t.length-1;e>=0;e--){let r=t[e];if(hn.has(r.command)){n=r;break}if((r.command===`head`||r.command===`tail`)&&wn(r)){n=r;break}}let r;if(!n)r=Tn(e);else{switch(n.command){case`cat`:r=yn(n);break;case`ls`:r=bn(e,n);break;case`grep`:r=xn(e);break;case`find`:r=Sn(e);break;case`head`:case`tail`:r=Cn(n);break;default:r=Tn(e)}r.length===0&&(r=Tn(e))}let i=new Set,a=[];for(let e of r){let t=vn(e);!t||i.has(t)||(i.add(t),a.push(t))}return a}function J(e){return e===``?`''`:/^[\w.\-/]+$/.test(e)?e:`'${e.replace(/'/g,`'\\''`)}'`}const Dn=16*1024*1024;var On=class extends Error{limitBytes;actualBytes;partial;constructor(e,t,n){super(`Output exceeded ${e} byte buffer (got ${t}); narrow the command`),this.name=`StdoutOverflowError`,this.limitBytes=e,this.actualBytes=t,this.partial=n}};function kn(e){if(!_e(e))throw Error(`createBashInstance: cwd must be absolute (got: ${e})`);return new Be({cwd:`/`,fs:new Ve({root:M(e),allowSymlinks:!1})})}async function An(e,t){let n=await e.exec(t);if(n.stdout.length>Dn)throw new On(Dn,n.stdout.length,{stdout:n.stdout.slice(0,Dn),stderr:n.stderr,exitCode:n.exitCode});return{stdout:n.stdout,stderr:n.stderr,exitCode:n.exitCode}}function jn(e){return e.startsWith(`**/`)?e.slice(3):e}async function Mn(e,t,n={}){let r=kn(t),i=[`-rn`,`-F`];(n.caseInsensitive??!0)&&i.push(`-i`);for(let e of n.include??[])i.push(`--include=${J(jn(e))}`);for(let e of n.exclude??[])i.push(`--exclude=${J(jn(e))}`),i.push(`--exclude-dir=${J(jn(e))}`);let a=n.paths?.length?n.paths.map(J):[`.`],o=`grep ${i.join(` `)} ${J(e)} ${a.join(` `)}`,s;try{s=await An(r,o)}catch(e){if(e instanceof On)s=e.partial;else throw e}if(s.exitCode===1&&!s.stdout)return[];if(s.exitCode!==0&&s.exitCode!==1&&!s.stdout)throw Error(`grep exited ${s.exitCode}: ${s.stderr}`);let c=[],l=n.maxResults??1/0;for(let e of s.stdout.split(`
185
185
  `)){if(!e)continue;if(c.length>=l)break;let t=e.indexOf(`:`);if(t===-1)continue;let n=e.indexOf(`:`,t+1);if(n===-1)continue;let r=e.slice(0,t),i=e.slice(t+1,n),a=e.slice(n+1),o=Number.parseInt(i,10);Number.isFinite(o)&&c.push({path:r,line:o,text:a})}return c}const Nn=new Set([`.git`,i,`node_modules`,`.changeset`,`.claude`,`.agents`,`dist`,`build`]);async function Pn(e){let t=M(e),n=new Map,r=0,i=!1;async function a(e){if(i)return;let o;try{o=await je(e,{withFileTypes:!0})}catch{return}for(let s of o){if(i)return;if(s.isDirectory()&&Nn.has(s.name))continue;let o=M(e,s.name);if(s.isDirectory()){await a(o);continue}if(s.isFile()){if(r>=1e3){i=!0;return}try{let e=await Me(o);n.set(ye(t,o),e.mtimeMs),r++}catch{}}}}return await a(t),{snapshot:n,truncated:i}}function Fn(e,t){let n=[];for(let[r,i]of t){let t=e.get(r);(t===void 0||t!==i)&&n.push(r)}for(let[r]of e)t.has(r)||n.push(r);return{changed:n}}const In=[`node_modules`,`.git`,`dist`,`build`,`.next`,`.turbo`,`.nuxt`,`coverage`,`.cache`,`.parcel-cache`,`.vercel`,i,`.claude`];function Ln(e){return e===`--recursive`||e===`--dereference-recursive`?!0:e.startsWith(`--`)||!e.startsWith(`-`)?!1:/[rR]/.test(e.slice(1))}const Rn=[{command:`grep`,applies:e=>e.slice(1).some(Ln),hasUserExcludes:e=>e.some(e=>e===`--exclude-dir`||e.startsWith(`--exclude-dir=`)),buildExcludeArgs:e=>e.map(e=>`--exclude-dir=${e}`),insertionIndex:()=>1},{command:`find`,applies:()=>!0,hasUserExcludes:e=>e.slice(1).some(e=>e===`-not`||e===`!`||e===`-prune`),buildExcludeArgs:e=>{let t=[];for(let n of e)t.push(`-not`,`-path`,`*/${n}/*`);return t},insertionIndex:e=>{for(let t=1;t<e.length;t++)if(e[t].startsWith(`-`))return t;return e.length}}];function zn(e){return e.map(e=>{let t=Rn.find(t=>t.command===e.command);if(!t||!t.applies(e.args)||t.hasUserExcludes(e.args))return e;let n=t.buildExcludeArgs(In),r=t.insertionIndex(e.args);return{command:e.command,args:[...e.args.slice(0,r),...n,...e.args.slice(r)]}})}function Bn(e){return e.map(e=>e.args.map(J).join(` `)).join(` | `)}const Vn=new Set([`cat`,`ls`,`grep`,`find`,`head`,`tail`,`wc`,`sort`,`uniq`,`cut`]),Hn=new Set([`>`,`>>`,`<`,`>&`,`<&`,`|&`]),Un=new Set([`&`,`;`,`;;`,`&&`,`||`,`(`,`)`,`<(`,`>(`,`<<`,`<<-`]),Wn=new Set([`-o`,`--output-file`,`--output`]),Gn=[`-o=`,`--output-file=`,`--output=`],Kn=new Set([`-exec`,`-execdir`,`-delete`,`-fprint`,`-fprintf`,`-fprint0`,`-ok`,`-okdir`]),qn=/[`]|\$\(|\$\{|\$'/;function Jn(e){return typeof e==`object`&&!!e&&`op`in e}function Yn(e){let t=typeof e.op==`string`?e.op:`(unknown)`;return Hn.has(t)?{category:`write_blocked`,message:`Write operation blocked: '${t}'. exec is read-only. For document changes, use write_document or edit_document.`}:Un.has(t)?{category:`shell_construct_blocked`,message:`Shell construct '${t}' is not supported. Only pipes (|) are allowed between allowlisted stages.`}:{category:`shell_construct_blocked`,message:`Operator '${t}' is not supported.`}}function Xn(e){let t=[];for(let n of e){if(typeof n==`string`){if(qn.test(n))return{error:{category:`shell_construct_blocked`,message:`Argument '${n}' contains a shell-injection pattern (backtick, $(), or \${}); not supported.`}};t.push(n);continue}if(!Jn(n))return{error:{category:`shell_construct_blocked`,message:`Unrecognized token shape.`}};if(n.op===`glob`&&typeof n.pattern==`string`){t.push(n.pattern);continue}return typeof n.comment==`string`?{error:{category:`shell_construct_blocked`,message:`Comments are not allowed in exec commands.`}}:{error:Yn(n)}}return{args:t}}function Zn(e){if(!Vn.has(e.command))return{category:`unknown_command`,message:`Command '${e.command}' is not in the allowlist. For pattern matching try 'grep'; for file listing try 'ls' or 'find'. Allowlist: cat, ls, grep, find, head, tail, wc, sort, uniq, cut.`};for(let t of e.args.slice(1)){if(Wn.has(t)||Gn.some(e=>t.startsWith(e)))return{category:`write_blocked`,message:`Write operation blocked: '${t}'. exec is read-only. For document changes, use write_document or edit_document.`};if(e.command===`find`&&Kn.has(t))return{category:`write_blocked`,message:`find flag '${t}' is blocked (executes commands or deletes files). Use exec for read-only discovery; chain with another allowlisted tool via '|' if you need to transform output.`}}return null}function Qn(e){let t=e.trim();if(!t)return{error:{category:`unknown_command`,message:`Empty command.`}};let n;try{n=He.parse(t)}catch{return{error:{category:`shell_construct_blocked`,message:`Failed to parse command — likely malformed quoting or an unsupported construct.`}}}let r=[],i=[];for(let e of n){if(Jn(e)&&e.op===`|`){r.push(i),i=[];continue}i.push(e)}r.push(i);let a=[];for(let e of r){let t=Xn(e);if(`error`in t)return t;if(t.args.length===0)return{error:{category:`shell_construct_blocked`,message:`Empty pipeline stage (trailing pipe or leading pipe).`}};let n={command:t.args[0],args:t.args},r=Zn(n);if(r)return{error:r};a.push(n)}return{stages:a}}const $n=/^---\r?\n([\s\S]*?)\r?\n---(?:\r?\n|$)/;function er(e,t){let n=e.match($n);if(!n)return null;try{let e=be(n[1]);if(T(e)){if(t){let n=t.safeParse(e);return n.success?n.data:null}return e}}catch{}return null}const tr=new WeakMap;function nr(e){let t=tr.get(e);if(t)return t;let n=e.map(e=>Fe(e.match,{dot:!0}));return tr.set(e,n),n}function rr(e,t){if(e.length===0)return{};let n=nr(e),r={},i=[],a=!1;for(let o=0;o<e.length;o++){if(!n[o](t))continue;a=!0;let s=e[o].frontmatter;if(s.title!==void 0&&(r.title=s.title),s.description!==void 0&&(r.description=s.description),s.tags!==void 0)for(let e of s.tags)i.includes(e)||i.push(e)}return a?(i.length>0&&(r.tags=i),r):{}}function ir(e){try{return le(M(e,`.git`)).isDirectory()}catch{return!1}}function ar(e){return P({baseDir:M(e),timeout:{block:5e3}})}async function or(e,t,n=5){if(!ir(e))return{commits:[],source:`git-absent`};let r=ar(e),i=``;try{i=await r.raw(`log`,`-${Math.max(1,n)}`,`--format=%H|%aI|%an|%s`,`--follow`,`--`,t)}catch{return{commits:[],source:`git`}}let a=[];for(let e of i.split(`
186
- `)){if(!e)continue;let t=e.indexOf(`|`);if(t<0)continue;let n=e.indexOf(`|`,t+1);if(n<0)continue;let r=e.indexOf(`|`,n+1);r<0||a.push({hash:e.slice(0,t),date:e.slice(t+1,n),authorName:e.slice(n+1,r),subject:e.slice(r+1)})}return{commits:a,source:`git`}}const sr=5e3;async function cr(e){try{let t=(await P({baseDir:e,timeout:{block:sr}}).revparse([`--abbrev-ref`,`HEAD`])).trim();return t&&t!==`HEAD`?t:null}catch{return null}}function lr(e,t){return P({baseDir:t,timeout:{block:sr}}).env({GIT_DIR:e,GIT_WORK_TREE:t})}function ur(e,t){let n=m(t);return e.startsWith(n)?e.slice(n.length):e}async function dr(e,t,n,r,i){let a=``;try{a=await e.raw(`log`,t,`-${Math.max(1,i*2)}`,`--format=%H%x00%aI%x00%an%x00%s%x00%B%x1e`,`--`,n)}catch{return[]}let o=ur(t,r),s=v(o),c=[];for(let e of a.split(``)){let t=e.trimStart();if(!t)continue;let[n=``,i=``,a=``,l=``,u=``]=t.split(`\0`),d=n.trim();d.length===40&&c.push({hash:d,date:i,writerName:a,message:l,contributors:p(u),writerId:o,isAgent:s.isAgent,writerClassification:s.classification,branch:r})}return c}async function fr(e,t,n=5){let r=f(e);if(!r)return{commits:[],source:`shadow-repo-absent`};let i=await cr(e);if(!i)return{commits:[],source:`shadow-repo`};let a=lr(r,M(e)),o=``;try{o=await a.raw(`for-each-ref`,m(i),`--format=%(refname)`)}catch{return{commits:[],source:`shadow-repo`}}let s=o.split(`
186
+ `)){if(!e)continue;let t=e.indexOf(`|`);if(t<0)continue;let n=e.indexOf(`|`,t+1);if(n<0)continue;let r=e.indexOf(`|`,n+1);r<0||a.push({hash:e.slice(0,t),date:e.slice(t+1,n),authorName:e.slice(n+1,r),subject:e.slice(r+1)})}return{commits:a,source:`git`}}const sr=5e3;async function cr(e){try{let t=(await P({baseDir:e,timeout:{block:sr}}).revparse([`--abbrev-ref`,`HEAD`])).trim();return t&&t!==`HEAD`?t:null}catch{return null}}function lr(e,t){return P({baseDir:t,timeout:{block:sr}}).env({GIT_DIR:e,GIT_WORK_TREE:t})}function ur(e,t){let n=_(t);return e.startsWith(n)?e.slice(n.length):e}async function dr(e,t,n,r,i){let a=``;try{a=await e.raw(`log`,t,`-${Math.max(1,i*2)}`,`--format=%H%x00%aI%x00%an%x00%s%x00%B%x1e`,`--`,n)}catch{return[]}let o=ur(t,r),s=u(o),c=[];for(let e of a.split(``)){let t=e.trimStart();if(!t)continue;let[n=``,i=``,a=``,l=``,u=``]=t.split(`\0`),d=n.trim();d.length===40&&c.push({hash:d,date:i,writerName:a,message:l,contributors:m(u),writerId:o,isAgent:s.isAgent,writerClassification:s.classification,branch:r})}return c}async function fr(e,t,n=5){let r=d(e);if(!r)return{commits:[],source:`shadow-repo-absent`};let i=await cr(e);if(!i)return{commits:[],source:`shadow-repo`};let a=lr(r,M(e)),o=``;try{o=await a.raw(`for-each-ref`,_(i),`--format=%(refname)`)}catch{return{commits:[],source:`shadow-repo`}}let s=o.split(`
187
187
  `).map(e=>e.trim()).filter(Boolean);return s.length===0?{commits:[],source:`shadow-repo`}:{commits:(await Promise.all(s.map(e=>dr(a,e,t,i,n)))).flat().sort((e,t)=>t.date.localeCompare(e.date)).slice(0,n),source:`shadow-repo`}}const pr=1e3,mr=new Set([`.git`,i,`node_modules`,`.changeset`,`.claude`,`.agents`,`dist`,`build`]),hr=/\.(md|mdx)$/i,gr=N.object({title:N.string().optional(),description:N.string().optional(),tags:N.array(N.string()).default([])});function _r(e){return e.replace(/\.md$/,``).replace(/\.mdx$/,``)}async function vr(e){try{let t=er(await Ae(e,`utf-8`),gr);return t?{title:t.title,description:t.description,tags:t.tags??[]}:{tags:[]}}catch{return null}}async function yr(e,t){if(!e)return null;let n=await H(e,`/api/backlinks?docName=${encodeURIComponent(t)}`);if(!n.ok)return null;let r=n.backlinks??n.results??n.links;if(!Array.isArray(r))return[];let i=[];for(let e of r){if(typeof e!=`object`||!e)continue;let t=e,n=typeof t.docName==`string`?t.docName:typeof t.source==`string`?t.source:typeof t.page==`string`?t.page:void 0;n&&i.push({source:n,title:typeof t.title==`string`?t.title:void 0,snippet:typeof t.snippet==`string`?t.snippet:null})}return i}async function br(e,t){if(!e||t.length===0)return null;let n=[...new Set(t)],r=[];for(let e=0;e<n.length;e+=100)r.push(n.slice(e,e+100));let i=await Promise.all(r.map(async t=>{let n=await H(e,`/api/backlink-counts?docNames=${encodeURIComponent(t.join(`,`))}`);return n.ok?n.counts??{}:null})),a=new Map,o=!1;for(let e of i)if(e){o=!0;for(let[t,n]of Object.entries(e))typeof n==`number`&&Number.isFinite(n)&&a.set(t,n)}return o?a:null}async function xr(e,t){if(!e)return null;let n=await H(e,`/api/forward-links?docName=${encodeURIComponent(t)}`);if(!n.ok)return null;let r=n.forwardLinks??n.links??n.results;if(!Array.isArray(r))return[];let i=[];for(let e of r){if(typeof e!=`object`||!e)continue;let t=e;if(t.kind===`external`&&typeof t.url==`string`){i.push({kind:`external`,url:t.url,title:typeof t.title==`string`?t.title:void 0,snippet:typeof t.snippet==`string`?t.snippet:null});continue}let n=typeof t.docName==`string`?t.docName:void 0;n&&i.push({kind:`doc`,docName:n,title:typeof t.title==`string`?t.title:void 0,snippet:typeof t.snippet==`string`?t.snippet:null})}return i}function Sr(e,t,n){let r=t??[],i=r.length===0?{}:rr(r,n),a=e?.title??i.title,o=e?.description??i.description,s=e?.tags??[],c=i.tags??[],l;if(c.length===0)l=s;else{let e=new Set;l=[];for(let t of c)e.has(t)||(e.add(t),l.push(t));for(let t of s)e.has(t)||(e.add(t),l.push(t))}return{title:a,description:o,tags:l}}async function Cr(e,t,n={}){let r=e.replace(/^\.\//,``).replace(/^\/+/,``),i=M(t.projectDir,r),a=t.historyDepth??5,o=n.includeRichFields===!0,s=vr(i);if(!o){let e=Sr(await s,t.folderRules,r);return{path:r,title:e.title,description:e.description,tags:e.tags,backlinkCount:null,backlinks:null,forwardLinkCount:null,forwardLinks:null,history:null,historySource:null,projectHistory:null,projectHistorySource:null}}let[c,l,u,d,f]=await Promise.all([s,yr(t.serverUrl,_r(r)).catch(()=>null),xr(t.serverUrl,_r(r)).catch(()=>null),fr(t.projectDir,r,a).catch(()=>({commits:[],source:`shadow-repo`})),or(t.projectDir,r,a).catch(()=>({commits:[],source:`git`}))]),p=Sr(c,t.folderRules,r);return{path:r,title:p.title,description:p.description,tags:p.tags,backlinkCount:l?.length??null,backlinks:l,forwardLinkCount:u?.length??null,forwardLinks:u,history:d.commits,historySource:d.source,projectHistory:f.commits,projectHistorySource:f.source}}async function wr(e,t){let n={directMdCount:0,recursiveMdCount:0,childDirCount:0,mostRecent:null,truncated:!1},r=0,i=[{path:e,depth:0}];for(;i.length>0;){let e=i.shift();if(!e)break;if(r>=pr){n.truncated=!0;break}let a;try{a=await je(e.path,{withFileTypes:!0})}catch{continue}for(let o of a){if(r>=pr){n.truncated=!0;break}r++;let a=o.name;if(o.isDirectory()){if(mr.has(a)||a.startsWith(`.`))continue;e.depth===0&&n.childDirCount++,i.push({path:`${e.path}/${a}`,depth:e.depth+1})}else if(o.isFile()&&hr.test(a)){n.recursiveMdCount++,e.depth===0&&n.directMdCount++;let r=`${e.path}/${a}`;try{let e=await Me(r);(!n.mostRecent||e.mtimeMs>n.mostRecent.mtimeMs)&&(n.mostRecent={absPath:r,relPath:ye(t,r).split(/[\\/]/).filter(Boolean).join(`/`),mtimeMs:e.mtimeMs})}catch{}}}}return n}async function Tr(e,t){let n=e.replace(/^\.\//,``).replace(/^\/+/,``).replace(/\/+$/,``),r=await wr(M(t.projectDir,n),t.projectDir),i;if(r.mostRecent){let e=await vr(r.mostRecent.absPath);i={path:r.mostRecent.relPath,title:e?.title??he(r.mostRecent.relPath),updatedAt:new Date(r.mostRecent.mtimeMs).toISOString()}}let a={path:n,type:`directory`,directMdCount:r.directMdCount,recursiveMdCount:r.recursiveMdCount,childDirCount:r.childDirCount,mostRecentMd:i,truncated:r.truncated},o=t.folderRules??[];if(o.length>0){let e=rr(o,n);e.title!==void 0&&(a.title=e.title),e.description!==void 0&&(a.description=e.description),e.tags!==void 0&&e.tags.length>0&&(a.tags=e.tags)}return a}const Er=50*1024,Dr=/\.(png|jpe?g|gif|webp|svg|pdf|zip|tar|gz|tgz|mp4|mov|mp3|wav|ico|bmp)$/i,Or=["**STOP — native tools on in-scope markdown.** Do NOT use your host's native `Read`, `Grep`, or `Glob` on `.md` / `.mdx` paths inside OK's content directory — use `exec` (this tool) instead. Native file tools skip frontmatter, backlinks, shadow-repo activity, and project git history that `exec` returns for every matched wiki file. Reserve native `Read`/`Grep`/`Glob` for source code and non-markdown paths only.",``,`Run a read-only bash-like command against the project content directory. Returns raw stdout plus enriched metadata for every wiki file referenced (frontmatter, backlink/forward-link counts, shadow-repo activity with agent/human attribution).`,``,`Allowlist: cat, ls, grep, find, head, tail, wc, sort, uniq, cut. Pipes (|) work between stages. Redirections, subshells, and writes are rejected.`,``,"cwd: the command runs in the explicit absolute `cwd` you pass, or in the MCP client's only advertised root when there is exactly one. If the client has zero or multiple roots, pass `cwd` explicitly. Paths inside the command resolve relative to that cwd; traversal above it is rejected.",``,"Stdout provenance headers (GNU-style): `ls <dir>/` prepends `<dir>/:`, single-file `cat`/`head`/`tail` prepends `==> <path> <==`, so the subject of the command is visible in raw output. Multi-file `cat a b` emits no header — the `enrichedPaths` array still lists every file. `head`/`tail` used as pipe trimmers (no file arg) defer to the upstream producer.",``,`Examples:`,'- `exec({ command: "cat articles/auth.md" })` — file contents + full enrichment','- `exec({ command: "ls articles/" })` — listing + per-file enrichment (slim)','- `exec({ command: "grep -rn oauth articles/ | head -5" })` — pipe with enrichment on matched files','- `exec({ command: "ls", cwd: "/abs/path/to/other-repo" })` — run in a different project'].join(`
188
188
  `);function kr(e){let t=e.split(`
189
189
  `),n=t[t.length-1]===``?t.length-1:t.length;if(n<=500&&e.length<=Er)return{text:e,truncated:!1,omittedLines:0};let r=Math.min(n,500),i=0,a=0;for(let e=0;e<r;e++){let n=t[e];if(i+=n.length+1,i>Er)break;a++}let o=t.slice(0,a).join(`
@@ -196,7 +196,7 @@ superseded_by: <path-to-new-canonical-article>.md
196
196
  `);function qr(e,t){e.tool(`get_forward_links`,Kr,{docName:N.string().describe(`Source page docName`),cwd:N.string().optional().describe(I)},async e=>{let n=await B(t.resolveCwd,t.config,t.serverUrl,e.cwd);if(!n.ok)return L(`Error: ${n.error}`,!0);let{cwd:r,url:i}=n;if(!i)return L(z,!0);let a=V(e.docName);if(!a.ok)return L(a.error,!0);let o=await H(i,`/api/forward-links?docName=${encodeURIComponent(a.docName)}`);if(!o.ok)return L(`Error: ${o.error}`,!0);let{ok:s,...c}=o,l=c,{resolve:u,ui:d}=await G(t,r),f=(l.forwardLinks??[]).map(e=>{let t=e.kind===`doc`&&typeof e.docName==`string`?e.docName:null,n=t?u(t):null;return{...e,previewUrl:n?.url??null,...n?{previewUrlSource:n.source}:{}}}),p={...l,forwardLinks:f,ui:d,cwd:r};return R(JSON.stringify(p,null,2),p)})}const Jr=[`[Requires: Hocuspocus server] List version history for a document.`,`Returns timeline entries from the shadow repo, sorted by timestamp descending.`,"Each entry includes a commit SHA that can be passed to `rollback_to_version`.",``,`**Parameters:**`,"- `docName` — Document name to query history for, typically without extension. A trailing `.md` or `.mdx` is stripped automatically.","- `branch` (optional) — Branch name (default: current branch)","- `limit` (optional) — Maximum entries to return (default 50, max 200)","- `offset` (optional) — Number of entries to skip for pagination (default 0)",'- `type` (optional) — Filter by entry type: "checkpoint", "upstream", or "wip"',"- `author` (optional) — Filter to entries by this author name or email","- `excludeAuthor` (optional) — Exclude entries by this author name or email"].join(`
197
197
  `);function Yr(e,t){e.tool(`get_history`,Jr,{docName:N.string().describe(`Document name to query history for`),branch:N.string().optional().describe(`Branch name (default: current branch)`),limit:N.number().int().min(1).max(200).optional().describe(`Maximum entries to return (default 50, max 200)`),offset:N.number().int().min(0).optional().describe(`Number of entries to skip for pagination (default 0)`),type:N.enum([`checkpoint`,`upstream`,`wip`]).optional().describe(`Filter by entry type`),author:N.string().optional().describe(`Filter to entries by this author name or email`),excludeAuthor:N.string().optional().describe(`Exclude entries by this author name or email`),cwd:N.string().optional().describe(I)},async e=>{let n=await B(t.resolveCwd,t.config,t.serverUrl,e.cwd);if(!n.ok)return L(`Error: ${n.error}`,!0);let{cwd:r,url:i}=n;if(!i)return L(z,!0);let a=V(e.docName);if(!a.ok)return L(a.error,!0);let o=new URLSearchParams;o.set(`docName`,a.docName),e.branch&&o.set(`branch`,e.branch),e.limit!=null&&o.set(`limit`,String(e.limit)),e.offset!=null&&o.set(`offset`,String(e.offset)),e.type&&o.set(`type`,e.type),e.author&&o.set(`author`,e.author),e.excludeAuthor&&o.set(`excludeAuthor`,e.excludeAuthor);let s=await H(i,`/api/history?${o.toString()}`);if(!s.ok)return L(`Error: ${s.error}`,!0);let{ok:c,...l}=s,u=await W(a.docName,{config:t.config,resolveCwd:t.resolveCwd},r);return R(JSON.stringify(l,null,2),{...l,previewUrl:u?.url??null,...u?{previewUrlSource:u.source}:{}})})}const Xr=[`[Requires: Hocuspocus server] Find the most-linked pages in the knowledge graph.`,`Returns hub pages ordered by inbound link count as JSON.`,``,`**Parameters:**`,"- `limit` (optional) — Maximum number of hubs to return (default 20)"].join(`
198
198
  `);function Zr(e,t){e.tool(`get_hubs`,Xr,{limit:N.number().int().positive().optional().describe(`Maximum number of hubs to return`),cwd:N.string().optional().describe(I)},async e=>{let n=await B(t.resolveCwd,t.config,t.serverUrl,e.cwd);if(!n.ok)return L(`Error: ${n.error}`,!0);let{cwd:r,url:i}=n;if(!i)return L(z,!0);let a=await H(i,`/api/hubs${e.limit?`?limit=${encodeURIComponent(String(e.limit))}`:``}`);if(!a.ok)return L(`Error: ${a.error}`,!0);let{ok:o,...s}=a,c=s,{resolve:l,ui:u}=await G(t,r),d=(c.hubs??[]).map(e=>{let t=typeof e.docName==`string`?e.docName:null,n=t?l(t):null;return{...e,previewUrl:n?.url??null,...n?{previewUrlSource:n.source}:{}}}),f={...c,hubs:d,ui:u,cwd:r};return R(JSON.stringify(f,null,2),f)})}const Qr=[`[Requires: Hocuspocus server] Find disconnected pages in the knowledge graph.`,`Returns orphaned pages as JSON.`,``,`**Parameters:**`,"- `mode` (optional) — Orphan lens: `incoming`, `outgoing`, or `both` (default `both`)"].join(`
199
- `);function $r(e,t){e.tool(`get_orphans`,Qr,{mode:N.enum(r).optional().describe(`Filter which type of graph disconnection to surface`),cwd:N.string().optional().describe(I)},async e=>{let n=await B(t.resolveCwd,t.config,t.serverUrl,e.cwd);if(!n.ok)return L(`Error: ${n.error}`,!0);let{cwd:r,url:i}=n;if(!i)return L(z,!0);let a=await H(i,`/api/orphans${e.mode?`?mode=${encodeURIComponent(e.mode)}`:``}`);if(!a.ok)return L(`Error: ${a.error}`,!0);let{ok:o,...s}=a,c=s,{resolve:l,ui:u}=await G(t,r),d=(c.orphans??[]).map(e=>{let t=typeof e.docName==`string`?e.docName:null,n=t?l(t):null;return{...e,previewUrl:n?.url??null,...n?{previewUrlSource:n.source}:{}}}),f={...c,orphans:d,ui:u,cwd:r};return R(JSON.stringify(f,null,2),f)})}function ei(e,t){return`${$t(`ingest`)}Capture this external source into the project knowledge base as raw reference material. **Raw preservation only** — no summary, no analysis, no interpretation. Summarizing is the job of the \`research\` tool later.
199
+ `);function $r(e,t){e.tool(`get_orphans`,Qr,{mode:N.enum(r).optional().describe(`Filter which type of graph disconnection to surface`),cwd:N.string().optional().describe(I)},async e=>{let n=await B(t.resolveCwd,t.config,t.serverUrl,e.cwd);if(!n.ok)return L(`Error: ${n.error}`,!0);let{cwd:r,url:i}=n;if(!i)return L(z,!0);let a=await H(i,`/api/orphans${e.mode?`?mode=${encodeURIComponent(e.mode)}`:``}`);if(!a.ok)return L(`Error: ${a.error}`,!0);let{ok:o,...s}=a,c=s,{resolve:l,ui:u}=await G(t,r),d=(c.orphans??[]).map(e=>{let t=typeof e.docName==`string`?e.docName:null,n=t?l(t):null;return{...e,previewUrl:n?.url??null,...n?{previewUrlSource:n.source}:{}}}),f={...c,orphans:d,ui:u,cwd:r};return R(JSON.stringify(f,null,2),f)})}function ei(e,t){return`${$t(`ingest`)}Capture this external source into the project knowledge base as raw reference material. The KB is **closed-loop**: external sources are pulled INTO the knowledge base here so downstream docs cite local paths, never bare web URLs. This applies whether a user shared the source OR you fetched it yourself to ground a knowledge-base claim — agent-initiated fetches are not exempt. **Raw preservation only** — no summary, no analysis, no interpretation. Summarizing is the job of the \`research\` tool later.
200
200
 
201
201
  Source: ${e}
202
202
 
@@ -277,7 +277,7 @@ If the source is directly relevant to an existing article or research doc, updat
277
277
  - **No promotion to a canonical article** — that's the \`consolidate\` tool's job, later
278
278
  - **No silent chaining into research** — ingest completes on its own; the user explicitly opts into \`research\`
279
279
  - **No synthesis inside the raw file** — the takeaways live in chat or a separate summary doc, never mixed into the preserved source
280
- `}const ti=[`Fetch an external source (URL or local file) and save raw content as reference material in the project content directory.`,`Raw preservation only — no analysis or interpretation.`,``,`**Use when:**`,`- Capturing reference material for the project knowledge base`,`- Saving a URL or document for later research`,`- Archiving an external source alongside the codebase`,`- The user shares a URL or document they want preserved`,``,`**Triggers on:**`,`- "ingest", "save this source", "capture this URL", "add to external sources"`,`- User shares a URL, article, or document to preserve in the knowledge base`,`- Research workflow needs raw sources before analysis`].join(`
280
+ `}const ti=[`Fetch an external source (URL or local file) and save raw content as reference material in the project content directory.`,`Raw preservation only — no analysis or interpretation.`,`The knowledge base is closed-loop: web sources cited by KB docs MUST resolve to a local doc captured here, not bare URLs.`,``,`**Use when:**`,`- Capturing reference material for the project knowledge base`,`- Saving a URL or document for later research`,`- Archiving an external source alongside the codebase`,`- The user shares a URL or document they want preserved`,"- **You yourself fetched a URL (`WebFetch` / `WebSearch` / equivalent) to ground a claim that is about to land in a knowledge-base doc** — agent-initiated fetches are not exempt from the closed-loop rule",``,`**Triggers on:**`,`- "ingest", "save this source", "capture this URL", "add to external sources"`,`- User shares a URL, article, or document to preserve in the knowledge base`,`- Agent fetches a URL via WebFetch/WebSearch to support a knowledge-base claim — preserve the source before citing it`,`- Research workflow needs raw sources before analysis`].join(`
281
281
  `);function ni(e,t){e.tool(`ingest`,ti,{source:N.string().describe(`URL, file path, or identifier of the source to ingest`),cwd:N.string().optional().describe(I)},async e=>{let n=await nn(t.resolveCwd,t.config,e.cwd);return n.ok?R(ei(e.source,n.config.content.dir),{previewUrl:null}):L(`Error: ${n.error}`,!0)})}const ri=[`[Requires: Hocuspocus server] List available documents from the Hocuspocus server.`,`Returns document names, optionally filtered by directory.`,``,`**Parameters:**`,"- `dir` (optional) — Filter to documents in this directory"].join(`
282
282
  `);function ii(e,t){e.tool(`list_documents`,ri,{dir:N.string().optional().describe(`Optional directory to filter documents`),cwd:N.string().optional().describe(I)},async e=>{let n=await B(t.resolveCwd,t.config,t.serverUrl,e.cwd);if(!n.ok)return L(`Error: ${n.error}`,!0);let{cwd:r,url:i}=n;if(!i)return L(z,!0);let a=await H(i,`/api/documents${e.dir?`?dir=${encodeURIComponent(e.dir)}`:``}`);if(!a.ok)return L(`Error: ${a.error}`,!0);let{ok:o,...s}=a,c=s,{resolve:l,ui:u}=await G(t,r),d=(c.documents??[]).map(e=>{let t=typeof e.docName==`string`?e.docName:null,n=t?l(t):null;return{...e,previewUrl:n?.url??null,...n?{previewUrlSource:n.source}:{}}}),f={...c,documents:d,ui:u,cwd:r};return R(JSON.stringify(f,null,2),f)})}const ai=[`Read a wiki file with enriched context: contents + frontmatter metadata + recent shadow-repo activity (agent vs human attribution) + backlink/forward-link context.`,``,`**Use when:**`,`- Loading an article for context`,`- Understanding who changed a file recently and whether it was an agent or human`,`- Seeing how this page links out and what links back to it`,``,"Prefer this over your native `Read` for wiki files — one call returns what otherwise takes 3-4.",``,`**Parameters:**`,"- `path` — Project-root-relative path to the file, including extension (e.g. `articles/auth/sso.md`). To pass this document to `edit_document` / `write_document` / `get_backlinks`, strip the extension (they take extension-less `docName`).","- `since` (reserved) — Reserved for shadow-log since-filter; currently unused."].join(`
283
283
  `);function oi(e){if(!e||e.length===0)return``;let t=[``,`### Recent activity (OK edits)`,``];for(let n of e){let e=n.writerClassification===`agent`?`agent: ${n.writerName}`:n.writerClassification===`principal`?`human: ${n.writerName}`:`${n.writerClassification}: ${n.writerName}`,r=n.hash.slice(0,7);t.push(`- ${r} ${n.date} [${e}] ${n.message}`)}return t.join(`
@@ -667,8 +667,8 @@ Claude Code Desktop: \`preview_start("open-knowledge-ui")\`. Other hosts: open-U
667
667
  Detailed conventions (wiki-link authoring, frontmatter, anti-patterns) live in the installed \`open-knowledge\` Agent Skill. If missing, run \`npx @inkeep/open-knowledge init\`.
668
668
 
669
669
  **Escape hatch.** Native \`Read\`/\`Grep\`/\`Glob\` on \`.md\` is allowed ONLY when no OK MCP is registered, or immediately after an OK MCP call failed — then begin your sentence with \`Open Knowledge MCP unavailable:\`. Non-markdown: native tools always.
670
- `}async function Bi(e,t){try{let t=e.replace(`ws://`,`http://`).replace(`wss://`,`https://`);return(await fetch(`${t}/api/document`,{signal:AbortSignal.timeout(2e3)})).ok}catch(n){return t.warn(`Hocuspocus probe failed`,{serverUrl:e,error:n instanceof Error?n.message:String(n)}),!1}}async function Vi(n){let{projectDir:r,serverUrl:i,config:a,startupConfig:o,bypassProjectSelection:s=!1}=n;if(X=Nt(),X.info(`MCP server starting`,{startupCwd:r,bypassProjectSelection:s,serverUrlType:typeof i==`string`?`explicit`:`lazy`}),typeof i==`string`){let e=await Bi(i,X);X.info(`Hocuspocus detection complete`,{serverUrl:i,available:e})}else X.info(`server discovery is lazy per effective cwd`);let c=new Ie({name:t,version:e},{instructions:zi(o,{dynamicConfig:typeof a==`function`&&!s})}),l=Li({startupCwd:r,bypassProjectSelection:s,listRoots:()=>c.server.listRoots(),logger:X}),u=Ri({startupCwd:r,resolveCwd:l.resolveCwd,bypassProjectSelection:s}),d=u.resolveCwdForTools;c.server.setNotificationHandler(Re,async()=>{l.invalidateRoots()});let f=async e=>{if(typeof i==`string`)return i.replace(`ws://`,`http://`).replace(`wss://`,`https://`);let t=e??await d();return(typeof i==`function`?await i(t):i)?.replace(`ws://`,`http://`).replace(`wss://`,`https://`)},p=Ee(),m=process.env.AGENT_LABEL||void 0,h={current:{connectionId:p,label:m,displayName:m??`Agent`,colorSeed:m??p}};c.server.oninitialized=()=>{let e=c.server.getClientVersion();h.current={connectionId:p,clientInfo:e?{name:e.name,version:e.version}:void 0,label:m,displayName:m??e?.name??`Agent`,colorSeed:m??e?.name??p},X?.info(`agent identity established`,{displayName:h.current.displayName,connectionId:p.slice(0,8),clientName:e?.name})},Pi(c,{serverUrl:f,resolveCwd:d,config:a,identityRef:h,logger:X});let g=new Le;await c.connect(g),X.info(`MCP server running on stdio`);let{startKeepalive:_}=await import(`./keepalive-DCcA7pmC.mjs`),v=_({resolveWsUrl:async()=>{let e=await u.getKeepaliveCwd();if(!e)return;let t=await f(e);if(t)return t.replace(/^http:/,`ws:`).replace(/^https:/,`wss:`)},connectionId:`agent-${p}`,logger:X.child(`keepalive`)}),y=e=>{X?.info(`MCP server shutting down`,{signal:e});try{v.close()}catch{}process.exit(0)};process.on(`SIGINT`,()=>y(`SIGINT`)),process.on(`SIGTERM`,()=>y(`SIGTERM`))}function Hi(e){if(e===void 0||e===``)return;let t=Number.parseInt(e,10);if(!(Number.isNaN(t)||t<=0))return t}function Ui(e){if(e.portOverride!==void 0){let t=Number.parseInt(e.portOverride,10);if(Number.isNaN(t))return{action:`disk-only`,message:`invalid --port value '${e.portOverride}' — disk-only mode`};if(t>0){let n=`ws://${e.host}:${t}`;return{action:`connect`,url:n,message:`using --port override, connecting to ${n}`}}return{action:`disk-only`,message:`--port=0 — disk-only mode`}}let t=e.readLock();if(t&&t.port>0&&e.isAlive(t.pid)){let e=`ws://localhost:${t.port}`;return{action:`connect`,url:e,message:`connected to running instance at ${e} (pid ${t.pid})`}}return e.envAutoStart===`0`?{action:`disk-only`,message:`auto-spawn disabled via OK_MCP_AUTOSTART=0 — disk-only mode`}:e.configAutoStart?t?{action:`spawn`,message:`existing lock is not usable (port=${t.port}, pid=${t.pid}) — spawning ok start`}:{action:`spawn`,message:`no running instance — spawning ok start`}:{action:`disk-only`,message:`auto-spawn disabled via config.mcp.autoStart=false — disk-only mode`}}async function Wi(e){let t=e.readLock??(()=>c(e.lockDir)),n=e.isAlive??s,r=e.sleep??(e=>new Promise(t=>setTimeout(t,e))),i=e.spawn??Oe,a=e.readErrorLog??(e=>j(e)?oe(e,`utf-8`).trim():``),o=e.openErrorLog??(e=>ae(e,`w`)),l=e.closeFd??(e=>re(e)),u=e.timeoutMs??5e3,d=e.pollIntervalMs??100,f=Ui({host:e.host,portOverride:e.portOverride,envAutoStart:e.envAutoStart,configAutoStart:e.configAutoStart,readLock:t,isAlive:n});if(e.logger?.info(`auto-start decision`,{action:f.action,message:f.message,contentDir:e.contentDir}),f.action===`connect`)return{serverUrl:f.url,message:f.message};if(f.action===`disk-only`)return{serverUrl:void 0,message:f.message};j(e.lockDir)||ie(e.lockDir,{recursive:!0});let p=ve(e.lockDir,`last-spawn-error.log`),m=o(p),h,g,_=te();e.logger?.info(`spawning server`,{command:_.command,cwd:e.contentDir,timeoutMs:u});try{try{h=i(_.command,[..._.prefixArgs,`start`],{detached:!0,stdio:[`ignore`,`ignore`,m],cwd:e.contentDir}),h.on(`error`,e=>{g=e instanceof Error?e.message:String(e)}),h.unref()}catch(e){g=e instanceof Error?e.message:String(e)}}finally{try{l(m)}catch{}}let v=Date.now()+u;for(;Date.now()<v;){if(g){let t=a(p);throw e.logger?.error(`spawn failed`,void 0,{error:g,stderr:t}),Error(`Error: spawn failed: ${g}${t?` stderr:\n${t}`:``}`)}await r(d);let i=t();if(i&&i.port>0&&n(i.pid)){let t=`ws://localhost:${i.port}`;return e.logger?.info(`server ready after spawn`,{url:t,pid:i.pid}),{serverUrl:t,message:`spawned ok start; connected at ${t} (pid ${i.pid})`}}}if(g){let t=a(p);throw e.logger?.error(`spawn failed (post-deadline)`,void 0,{error:g,stderr:t}),Error(`Error: spawn failed: ${g}${t?` stderr:\n${t}`:``}`)}let y=a(p),b=(u/1e3).toFixed(u%1e3==0?0:2),x=h?.pid,S=``;throw typeof x==`number`&&(S=n(x)?` child pid=${x} is still running — raise OK_MCP_SPAWN_TIMEOUT_MS if this is a slow boot.`:` child pid=${x} exited — check last-spawn-error.log.`),e.logger?.error(`spawn poll timeout`,void 0,{timeoutMs:u,childPid:x,childAlive:typeof x==`number`?n(x):void 0,stderr:y||void 0}),Error(`Error: server did not start within ${b}s.${S}${y?` stderr:\n${y}`:``}`)}function Gi(e){if(e.portOverride!==void 0){let t=Number.parseInt(e.portOverride,10);if(Number.isNaN(t)||t<=0)return async()=>void 0;let n=`ws://${e.host}:${t}`;return async()=>n}let t=e.ensureServerRunningFn??Wi,n=e.cacheMs??1e3,r=new Map,i=new Map;return async s=>{let c=await D(s??e.startupCwd),l=Date.now(),u=r.get(c);if(u&&u.expiresAt>l)return e.logger?.debug(`server url cache hit`,{cwd:c,url:u.url}),u.url;let d=i.get(c);if(d)return e.logger?.debug(`server url resolution pending`,{cwd:c}),await d;e.logger?.debug(`server url cache miss`,{cwd:c});let f=(async()=>{let i=await e.resolveConfig(c),s=o(i,c),l=a(s),u=e.readLock,d=u?()=>u(l):void 0,f=await t({lockDir:l,contentDir:s,host:i.server.host,portOverride:void 0,envAutoStart:e.envAutoStart,configAutoStart:i.mcp.autoStart,logger:e.logger,timeoutMs:e.timeoutMs,pollIntervalMs:e.pollIntervalMs,spawn:e.spawn,readLock:d,isAlive:e.isAlive,sleep:e.sleep,readErrorLog:e.readErrorLog,openErrorLog:e.openErrorLog,closeFd:e.closeFd});return r.set(c,{url:f.serverUrl,expiresAt:Date.now()+n}),f.serverUrl})();i.set(c,f);try{return await f}finally{i.delete(c)}}}function Ki(e){return new A(`mcp`).description(`Start MCP stdio server for project knowledge base`).option(`-p, --port <port>`,`Override port discovery and connect to this port (0 = disk-only)`,void 0).action(async t=>{try{let n=e(),r=process.cwd(),i=ee({startupCwd:r,startupConfig:n}),a=Hi(process.env.OK_MCP_SPAWN_TIMEOUT_MS),o,s;if(t.port!==void 0){let e=Number.parseInt(t.port,10);Number.isNaN(e)?(o=void 0,s=`invalid --port value '${t.port}' — disk-only mode`):e>0?(o=`ws://${n.server.host}:${e}`,s=`using --port override, connecting to ${o}`):(o=void 0,s=`--port=0 — disk-only mode`)}else o=Gi({startupCwd:r,resolveConfig:i,host:n.server.host,portOverride:void 0,envAutoStart:process.env.OK_MCP_AUTOSTART,timeoutMs:a}),s=`project server discovery/autostart is lazy per effective cwd`;process.stderr.write(`[mcp] ${s}\n`),await Vi({projectDir:r,serverUrl:o,config:i,startupConfig:n,bypassProjectSelection:t.port!==void 0})}catch(e){process.stderr.write(`MCP server failed to start: ${e instanceof Error?e.message:String(e)}\n`),process.exitCode=1}})}function qi(e){return new A(`preview`).description(`Show what content the watcher will track (read-only)`).action(async()=>{let{previewContent:t,formatPreviewBlock:n}=await import(`./preview-BE3bGa0b.mjs`),r=e(),i=process.cwd(),a=o(r,i),s;try{s=t({projectDir:i,contentDir:a,include:r.content.include,exclude:r.content.exclude})}catch(e){console.error(`Content preview failed: ${e instanceof Error?e.message:String(e)}`),process.exitCode=1;return}process.stdout.write(`${n(s,i)}\n`),s.totalCount===0&&s.warnings.length>0&&(process.exitCode=1)})}function Z(e,t){e&&process.stdout.write(`${JSON.stringify(t)}\n`)}async function Ji(e,t,n=process.cwd()){let r=e.op??`sync`,i=c(a(o(t,n)));if(i&&i.port>0){let t=`http://127.0.0.1:${i.port}/api/sync/trigger`;e.json||process.stderr.write(`Triggering ${r} via running server (port ${i.port})…\n`);try{let n=await fetch(t,{method:`POST`,headers:{"Content-Type":`application/json`},body:JSON.stringify({op:r})});if(!n.ok){let e=await n.json().catch(()=>({}));throw Error(e.error??`Server responded with ${n.status}`)}Z(e.json,{type:`triggered`,op:r,port:i.port}),e.json||process.stderr.write(`✓ ${r} triggered\n`);return}catch(t){let n=t instanceof Error?t.message:String(t);e.json||process.stderr.write(`Server trigger failed (${n}), running directly…\n`)}}e.json||process.stderr.write(`Running ${r} directly (no live server)…\n`);let s=P({baseDir:n});if(r===`sync`||r===`pull`){Z(e.json,{type:`step`,step:`pull`});let t=await s.pull();Z(e.json,{type:`pull`,summary:t.summary}),e.json||process.stderr.write(` pull: ${t.summary.changes} changes\n`)}(r===`sync`||r===`push`)&&(Z(e.json,{type:`step`,step:`push`}),await s.push(),Z(e.json,{type:`push`,ok:!0}),e.json||process.stderr.write(` push: ok
671
- `)),Z(e.json,{type:`complete`,op:r}),e.json||process.stderr.write(`✓ ${r} complete\n`)}function Yi(e){return new A(`sync`).description(`Commit, pull, and push to the remote`).option(`--json`,`Output JSONL progress events`,!1).action(async t=>{try{await Ji({json:t.json,op:`sync`},e())}catch(e){let n=e instanceof Error?e.message:String(e);t.json?process.stdout.write(`${JSON.stringify({type:`error`,message:n})}\n`):process.stderr.write(`✗ sync failed: ${n}\n`),process.exit(1)}})}function Xi(e){return new A(`pull`).description(`Pull changes from the remote`).option(`--json`,`Output JSONL progress events`,!1).action(async t=>{try{await Ji({json:t.json,op:`pull`},e())}catch(e){let n=e instanceof Error?e.message:String(e);t.json?process.stdout.write(`${JSON.stringify({type:`error`,message:n})}\n`):process.stderr.write(`✗ pull failed: ${n}\n`),process.exit(1)}})}function Zi(e){return new A(`push`).description(`Push commits to the remote`).option(`--json`,`Output JSONL progress events`,!1).action(async t=>{try{await Ji({json:t.json,op:`push`},e())}catch(e){let n=e instanceof Error?e.message:String(e);t.json?process.stdout.write(`${JSON.stringify({type:`error`,message:n})}\n`):process.stderr.write(`✗ push failed: ${n}\n`),process.exit(1)}})}async function Qi(e={}){let t=M(e.cwd??process.cwd()),n;try{n=await _({projectDir:t})}catch(e){return e instanceof h?{status:`prerequisite-missing`,message:`${S(`Error:`)} ${e.message}`,exitCode:1}:{status:`failed`,message:`${S(`Error:`)} ${e instanceof Error?e.message:String(e)}`,exitCode:1}}if(n.created.length===0&&n.configEdits.length===0)return{status:`no-op`,message:`${x(`Your knowledge base is already seeded.`)}\n${b(`Nothing to do.`)}`,plan:n,exitCode:0};if(e.dryRun)return{status:`dry-run`,message:`${w(`Plan (dry-run — no changes made):`)}\n\n${$i(n,t)}`,plan:n,exitCode:0};if(!e.yes&&!await ea(`${w(`Plan:`)}\n\n${$i(n,t)}\n\n${w(`Apply?`)} ${b(`[Y/n] `)}`,e.confirmStream))return{status:`cancelled`,message:b(`Cancelled.`),plan:n,exitCode:0};let r=await d(n,{projectDir:t});if(r.errors.length>0){let e=r.errors.map(e=>` ${S(`✗`)} ${e.path}: ${e.error}`);return{status:`failed`,message:[`${C(`Applied`)} ${r.applied} entries, ${C(String(r.errors.length))} error(s):`,...e].join(`
670
+ `}async function Bi(e,t){try{let t=e.replace(`ws://`,`http://`).replace(`wss://`,`https://`);return(await fetch(`${t}/api/document`,{signal:AbortSignal.timeout(2e3)})).ok}catch(n){return t.warn(`Hocuspocus probe failed`,{serverUrl:e,error:n instanceof Error?n.message:String(n)}),!1}}async function Vi(n){let{projectDir:r,serverUrl:i,config:a,startupConfig:o,bypassProjectSelection:s=!1}=n;if(X=Nt(),X.info(`MCP server starting`,{startupCwd:r,bypassProjectSelection:s,serverUrlType:typeof i==`string`?`explicit`:`lazy`}),typeof i==`string`){let e=await Bi(i,X);X.info(`Hocuspocus detection complete`,{serverUrl:i,available:e})}else X.info(`server discovery is lazy per effective cwd`);let c=new Ie({name:t,version:e},{instructions:zi(o,{dynamicConfig:typeof a==`function`&&!s})}),l=Li({startupCwd:r,bypassProjectSelection:s,listRoots:()=>c.server.listRoots(),logger:X}),u=Ri({startupCwd:r,resolveCwd:l.resolveCwd,bypassProjectSelection:s}),d=u.resolveCwdForTools;c.server.setNotificationHandler(Re,async()=>{l.invalidateRoots()});let f=async e=>{if(typeof i==`string`)return i.replace(`ws://`,`http://`).replace(`wss://`,`https://`);let t=e??await d();return(typeof i==`function`?await i(t):i)?.replace(`ws://`,`http://`).replace(`wss://`,`https://`)},p=Ee(),m=process.env.AGENT_LABEL||void 0,h={current:{connectionId:p,label:m,displayName:m??`Agent`,colorSeed:m??p}};c.server.oninitialized=()=>{let e=c.server.getClientVersion();h.current={connectionId:p,clientInfo:e?{name:e.name,version:e.version}:void 0,label:m,displayName:m??e?.name??`Agent`,colorSeed:m??e?.name??p},X?.info(`agent identity established`,{displayName:h.current.displayName,connectionId:p.slice(0,8),clientName:e?.name})},Pi(c,{serverUrl:f,resolveCwd:d,config:a,identityRef:h,logger:X});let g=new Le;await c.connect(g),X.info(`MCP server running on stdio`);let{startKeepalive:_}=await import(`./keepalive-DCcA7pmC.mjs`),v=_({resolveWsUrl:async()=>{let e=await u.getKeepaliveCwd();if(!e)return;let t=await f(e);if(t)return t.replace(/^http:/,`ws:`).replace(/^https:/,`wss:`)},connectionId:`agent-${p}`,logger:X.child(`keepalive`)}),y=e=>{X?.info(`MCP server shutting down`,{signal:e});try{v.close()}catch{}process.exit(0)};process.on(`SIGINT`,()=>y(`SIGINT`)),process.on(`SIGTERM`,()=>y(`SIGTERM`))}function Hi(e){if(e===void 0||e===``)return;let t=Number.parseInt(e,10);if(!(Number.isNaN(t)||t<=0))return t}function Ui(e){if(e.portOverride!==void 0){let t=Number.parseInt(e.portOverride,10);if(Number.isNaN(t))return{action:`disk-only`,message:`invalid --port value '${e.portOverride}' — disk-only mode`};if(t>0){let n=`ws://${e.host}:${t}`;return{action:`connect`,url:n,message:`using --port override, connecting to ${n}`}}return{action:`disk-only`,message:`--port=0 — disk-only mode`}}let t=e.readLock();if(t&&t.port>0&&e.isAlive(t.pid)){let e=`ws://localhost:${t.port}`;return{action:`connect`,url:e,message:`connected to running instance at ${e} (pid ${t.pid})`}}return e.envAutoStart===`0`?{action:`disk-only`,message:`auto-spawn disabled via OK_MCP_AUTOSTART=0 — disk-only mode`}:e.configAutoStart?t?{action:`spawn`,message:`existing lock is not usable (port=${t.port}, pid=${t.pid}) — spawning ok start`}:{action:`spawn`,message:`no running instance — spawning ok start`}:{action:`disk-only`,message:`auto-spawn disabled via config.mcp.autoStart=false — disk-only mode`}}async function Wi(e){let t=e.readLock??(()=>c(e.lockDir)),n=e.isAlive??s,r=e.sleep??(e=>new Promise(t=>setTimeout(t,e))),i=e.spawn??Oe,a=e.readErrorLog??(e=>j(e)?oe(e,`utf-8`).trim():``),o=e.openErrorLog??(e=>ae(e,`w`)),l=e.closeFd??(e=>re(e)),u=e.timeoutMs??5e3,d=e.pollIntervalMs??100,f=Ui({host:e.host,portOverride:e.portOverride,envAutoStart:e.envAutoStart,configAutoStart:e.configAutoStart,readLock:t,isAlive:n});if(e.logger?.info(`auto-start decision`,{action:f.action,message:f.message,contentDir:e.contentDir}),f.action===`connect`)return{serverUrl:f.url,message:f.message};if(f.action===`disk-only`)return{serverUrl:void 0,message:f.message};j(e.lockDir)||ie(e.lockDir,{recursive:!0});let p=ve(e.lockDir,`last-spawn-error.log`),m=o(p),h,g,_=te();e.logger?.info(`spawning server`,{command:_.command,cwd:e.contentDir,timeoutMs:u});try{try{h=i(_.command,[..._.prefixArgs,`start`],{detached:!0,stdio:[`ignore`,`ignore`,m],cwd:e.contentDir}),h.on(`error`,e=>{g=e instanceof Error?e.message:String(e)}),h.unref()}catch(e){g=e instanceof Error?e.message:String(e)}}finally{try{l(m)}catch{}}let v=Date.now()+u;for(;Date.now()<v;){if(g){let t=a(p);throw e.logger?.error(`spawn failed`,void 0,{error:g,stderr:t}),Error(`Error: spawn failed: ${g}${t?` stderr:\n${t}`:``}`)}await r(d);let i=t();if(i&&i.port>0&&n(i.pid)){let t=`ws://localhost:${i.port}`;return e.logger?.info(`server ready after spawn`,{url:t,pid:i.pid}),{serverUrl:t,message:`spawned ok start; connected at ${t} (pid ${i.pid})`}}}if(g){let t=a(p);throw e.logger?.error(`spawn failed (post-deadline)`,void 0,{error:g,stderr:t}),Error(`Error: spawn failed: ${g}${t?` stderr:\n${t}`:``}`)}let y=a(p),b=(u/1e3).toFixed(u%1e3==0?0:2),x=h?.pid,S=``;throw typeof x==`number`&&(S=n(x)?` child pid=${x} is still running — raise OK_MCP_SPAWN_TIMEOUT_MS if this is a slow boot.`:` child pid=${x} exited — check last-spawn-error.log.`),e.logger?.error(`spawn poll timeout`,void 0,{timeoutMs:u,childPid:x,childAlive:typeof x==`number`?n(x):void 0,stderr:y||void 0}),Error(`Error: server did not start within ${b}s.${S}${y?` stderr:\n${y}`:``}`)}function Gi(e){if(e.portOverride!==void 0){let t=Number.parseInt(e.portOverride,10);if(Number.isNaN(t)||t<=0)return async()=>void 0;let n=`ws://${e.host}:${t}`;return async()=>n}let t=e.ensureServerRunningFn??Wi,n=e.cacheMs??1e3,r=new Map,i=new Map;return async s=>{let c=await D(s??e.startupCwd),l=Date.now(),u=r.get(c);if(u&&u.expiresAt>l)return e.logger?.debug(`server url cache hit`,{cwd:c,url:u.url}),u.url;let d=i.get(c);if(d)return e.logger?.debug(`server url resolution pending`,{cwd:c}),await d;e.logger?.debug(`server url cache miss`,{cwd:c});let f=(async()=>{let i=await e.resolveConfig(c),s=o(i,c),l=a(s),u=e.readLock,d=u?()=>u(l):void 0,f=await t({lockDir:l,contentDir:s,host:i.server.host,portOverride:void 0,envAutoStart:e.envAutoStart,configAutoStart:i.mcp.autoStart,logger:e.logger,timeoutMs:e.timeoutMs,pollIntervalMs:e.pollIntervalMs,spawn:e.spawn,readLock:d,isAlive:e.isAlive,sleep:e.sleep,readErrorLog:e.readErrorLog,openErrorLog:e.openErrorLog,closeFd:e.closeFd});return r.set(c,{url:f.serverUrl,expiresAt:Date.now()+n}),f.serverUrl})();i.set(c,f);try{return await f}finally{i.delete(c)}}}function Ki(e){return new A(`mcp`).description(`Start MCP stdio server for project knowledge base`).option(`-p, --port <port>`,`Override port discovery and connect to this port (0 = disk-only)`,void 0).action(async t=>{try{let n=e(),r=process.cwd(),i=ee({startupCwd:r,startupConfig:n}),a=Hi(process.env.OK_MCP_SPAWN_TIMEOUT_MS),o,s;if(t.port!==void 0){let e=Number.parseInt(t.port,10);Number.isNaN(e)?(o=void 0,s=`invalid --port value '${t.port}' — disk-only mode`):e>0?(o=`ws://${n.server.host}:${e}`,s=`using --port override, connecting to ${o}`):(o=void 0,s=`--port=0 — disk-only mode`)}else o=Gi({startupCwd:r,resolveConfig:i,host:n.server.host,portOverride:void 0,envAutoStart:process.env.OK_MCP_AUTOSTART,timeoutMs:a}),s=`project server discovery/autostart is lazy per effective cwd`;process.stderr.write(`[mcp] ${s}\n`),await Vi({projectDir:r,serverUrl:o,config:i,startupConfig:n,bypassProjectSelection:t.port!==void 0})}catch(e){process.stderr.write(`MCP server failed to start: ${e instanceof Error?e.message:String(e)}\n`),process.exitCode=1}})}function qi(e){return new A(`preview`).description(`Show what content the watcher will track (read-only)`).action(async()=>{let{previewContent:t,formatPreviewBlock:n}=await import(`./preview-JkR5aTPE.mjs`),r=e(),i=process.cwd(),a=o(r,i),s;try{s=t({projectDir:i,contentDir:a,include:r.content.include,exclude:r.content.exclude})}catch(e){console.error(`Content preview failed: ${e instanceof Error?e.message:String(e)}`),process.exitCode=1;return}process.stdout.write(`${n(s,i)}\n`),s.totalCount===0&&s.warnings.length>0&&(process.exitCode=1)})}function Z(e,t){e&&process.stdout.write(`${JSON.stringify(t)}\n`)}async function Ji(e,t,n=process.cwd()){let r=e.op??`sync`,i=c(a(o(t,n)));if(i&&i.port>0){let t=`http://127.0.0.1:${i.port}/api/sync/trigger`;e.json||process.stderr.write(`Triggering ${r} via running server (port ${i.port})…\n`);try{let n=await fetch(t,{method:`POST`,headers:{"Content-Type":`application/json`},body:JSON.stringify({op:r})});if(!n.ok){let e=await n.json().catch(()=>({}));throw Error(e.error??`Server responded with ${n.status}`)}Z(e.json,{type:`triggered`,op:r,port:i.port}),e.json||process.stderr.write(`✓ ${r} triggered\n`);return}catch(t){let n=t instanceof Error?t.message:String(t);e.json||process.stderr.write(`Server trigger failed (${n}), running directly…\n`)}}e.json||process.stderr.write(`Running ${r} directly (no live server)…\n`);let s=P({baseDir:n});if(r===`sync`||r===`pull`){Z(e.json,{type:`step`,step:`pull`});let t=await s.pull();Z(e.json,{type:`pull`,summary:t.summary}),e.json||process.stderr.write(` pull: ${t.summary.changes} changes\n`)}(r===`sync`||r===`push`)&&(Z(e.json,{type:`step`,step:`push`}),await s.push(),Z(e.json,{type:`push`,ok:!0}),e.json||process.stderr.write(` push: ok
671
+ `)),Z(e.json,{type:`complete`,op:r}),e.json||process.stderr.write(`✓ ${r} complete\n`)}function Yi(e){return new A(`sync`).description(`Commit, pull, and push to the remote`).option(`--json`,`Output JSONL progress events`,!1).action(async t=>{try{await Ji({json:t.json,op:`sync`},e())}catch(e){let n=e instanceof Error?e.message:String(e);t.json?process.stdout.write(`${JSON.stringify({type:`error`,message:n})}\n`):process.stderr.write(`✗ sync failed: ${n}\n`),process.exit(1)}})}function Xi(e){return new A(`pull`).description(`Pull changes from the remote`).option(`--json`,`Output JSONL progress events`,!1).action(async t=>{try{await Ji({json:t.json,op:`pull`},e())}catch(e){let n=e instanceof Error?e.message:String(e);t.json?process.stdout.write(`${JSON.stringify({type:`error`,message:n})}\n`):process.stderr.write(`✗ pull failed: ${n}\n`),process.exit(1)}})}function Zi(e){return new A(`push`).description(`Push commits to the remote`).option(`--json`,`Output JSONL progress events`,!1).action(async t=>{try{await Ji({json:t.json,op:`push`},e())}catch(e){let n=e instanceof Error?e.message:String(e);t.json?process.stdout.write(`${JSON.stringify({type:`error`,message:n})}\n`):process.stderr.write(`✗ push failed: ${n}\n`),process.exit(1)}})}async function Qi(e={}){let t=M(e.cwd??process.cwd()),n;try{n=await h({projectDir:t,rootDir:e.root})}catch(e){return e instanceof p?{status:`prerequisite-missing`,message:`${S(`Error:`)} ${e.message}`,exitCode:1}:{status:`failed`,message:`${S(`Error:`)} ${e instanceof Error?e.message:String(e)}`,exitCode:1}}if(n.created.length===0&&n.configEdits.length===0)return{status:`no-op`,message:`${x(`Your knowledge base is already seeded.`)}\n${b(`Nothing to do.`)}`,plan:n,exitCode:0};if(e.dryRun)return{status:`dry-run`,message:`${w(`Plan (dry-run — no changes made):`)}\n\n${$i(n,t)}`,plan:n,exitCode:0};if(!e.yes&&!await ea(`${w(`Plan:`)}\n\n${$i(n,t)}\n\n${w(`Apply?`)} ${b(`[Y/n] `)}`,e.confirmStream))return{status:`cancelled`,message:b(`Cancelled.`),plan:n,exitCode:0};let r=await f(n,{projectDir:t});if(r.errors.length>0){let e=r.errors.map(e=>` ${S(`✗`)} ${e.path}: ${e.error}`);return{status:`failed`,message:[`${C(`Applied`)} ${r.applied} entries, ${C(String(r.errors.length))} error(s):`,...e].join(`
672
672
  `),plan:n,exitCode:1}}return{status:`applied`,message:`${x(`✓ Seeded knowledge base`)} ${b(`(${r.applied} entries, ${r.durationMs}ms)`)}`,plan:n,exitCode:0}}function $i(e,t){let n=[],r=e.created.filter(e=>e.kind===`folder`),i=e.created.filter(e=>e.kind===`file`);if(r.length>0){n.push(w(`Folders to create:`));for(let e of r)n.push(` ${x(`+`)} ${y(ye(t,M(t,e.path))||e.path)}${b(`/`)}`)}if(i.length>0){n.length>0&&n.push(``),n.push(w(`Files to create:`));for(let e of i)n.push(` ${x(`+`)} ${y(ye(t,M(t,e.path))||e.path)}`)}if(e.configEdits.length>0){n.length>0&&n.push(``),n.push(w(`config.yml folders: entries to add:`));for(let t of e.configEdits)n.push(` ${x(`+`)} ${y(t.folderMatch)} ${b(`—`)} ${t.entry.frontmatter.title??``}`)}if(e.skipped.length>0){n.length>0&&n.push(``),n.push(b(`Already present (skipped):`));for(let t of e.skipped)n.push(` ${b(`· ${t.path} (${t.reason})`)}`)}if(e.warnings.length>0){n.length>0&&n.push(``),n.push(C(`Warnings:`));for(let t of e.warnings)n.push(` ${C(`!`)} ${t}`)}return n.join(`
673
- `)}async function ea(e,t){let n=Ue({input:t??process.stdin,output:process.stdout});try{let t=(await n.question(e)).trim().toLowerCase();return t===``||t===`y`||t===`yes`}finally{n.close()}}function ta(){return new A(`seed`).description(`Scaffold the Karpathy three-layer knowledge-base structure (external-sources/, research/, articles/) + log.md + config.yml folders: entries`).argument(`[path]`,`Project directory (defaults to cwd)`).option(`-y, --yes`,`Skip confirmation prompt`).option(`--dry-run`,`Print the plan and exit without writing`).action(async(e,t)=>{let n=await Qi({cwd:e??process.cwd(),yes:t.yes,dryRun:t.dryRun});process.stdout.write(`${n.message}\n`),n.exitCode!==0&&(process.exitCode=n.exitCode)})}function na(e,t){return{server:ra(`server`,e),ui:ra(`ui`,t)}}function ra(e,t){switch(t.status){case`missing`:return{name:e,state:`missing`,alive:!1};case`corrupt`:return{name:e,state:`corrupt`,alive:!1};case`foreign-host`:return{name:e,state:`foreign-host`,pid:t.lock.pid,port:t.lock.port,startedAt:t.lock.startedAt,host:t.lock.hostname,alive:`unknown`};case`dead-pid`:return{name:e,state:`dead-pid`,pid:t.lock.pid,port:t.lock.port,startedAt:t.lock.startedAt,host:t.lock.hostname,alive:!1};case`alive`:return{name:e,state:`alive`,pid:t.lock.pid,port:t.lock.port,startedAt:t.lock.startedAt,host:t.lock.hostname,alive:!0}}}function ia(e){return`${aa(e.server)}\n${aa(e.ui)}`}function aa(e){let t=e.name===`server`?`server`:`ui `;return e.state===`missing`?`${t} not running`:e.state===`corrupt`?`${t} lock file corrupt — run \`ok clean\``:e.state===`foreign-host`?`${t} foreign host (${e.host}) pid=${e.pid} port=${e.port}`:e.state===`dead-pid`?`${t} stale (dead pid=${e.pid}) — run \`ok clean\``:`${t} alive pid=${e.pid} port=${e.port} started=${e.startedAt}`}function oa(e){let t=e.inspect??(t=>mt(e.lockDir,t)),n=e.log??(e=>console.log(e)),r=na(t(`server`),t(`ui`));return e.json?n(JSON.stringify(r,null,2)):n(ia(r)),r}function sa(e){return new A(`status`).description(`Show live state of the server + ui lockfiles for this project`).option(`--json`,`Emit structured JSON instead of formatted text`).action(t=>{oa({lockDir:a(o(e(),process.cwd())),json:t.json===!0})})}function ca(e,t){let n=[];return e.status===`alive`&&n.push({name:`server`,pid:e.lock.pid,port:e.lock.port}),t.status===`alive`&&n.push({name:`ui`,pid:t.lock.pid,port:t.lock.port}),{targets:n}}function la(e){let t=e.inspect??(t=>mt(e.lockDir,t)),n=e.kill??((e,t)=>process.kill(e,t)),r=e.log??(e=>console.log(e)),i=e.error??(e=>console.error(e)),a=ca(t(`server`),t(`ui`));if(a.targets.length===0)return r(`No running open-knowledge processes.`),{stopped:[],failed:[],hadTargets:!1};let o=[],s=[];for(let e of a.targets)try{n(e.pid,`SIGTERM`),o.push(e)}catch(t){s.push({target:e,error:t instanceof Error?t.message:String(t)})}return o.length>0&&r(`Stopped: ${o.map(e=>`${e.name} (pid=${e.pid}, port=${e.port})`).join(`, `)}`),s.length>0&&i(`Failed to stop: ${s.map(({target:e,error:t})=>`${e.name} (pid=${e.pid}): ${t}`).join(`; `)}`),{stopped:o,failed:s,hadTargets:!0}}function ua(e){return new A(`stop`).description(`Stop the running open-knowledge server and UI (live only)`).action(()=>{la({lockDir:a(o(e(),process.cwd()))}).failed.length>0&&(process.exitCode=1)})}const da=1e4,fa=[`connection`,`keep-alive`,`proxy-authenticate`,`proxy-authorization`,`te`,`trailer`,`transfer-encoding`,`upgrade`,`cookie`,`set-cookie`];async function pa(e){let t=e.upstreamTimeoutMs??da,n=Ne((n,r)=>{ha(n,r,e.upstreamHost,e.upstreamPort,t)});await new Promise((t,r)=>{let i=e=>r(e);n.once(`error`,i),n.listen(e.listenPort,e.host,()=>{n.off(`error`,i),t()})});let r=n.address();return{httpServer:n,port:typeof r==`object`&&r?r.port:e.listenPort,close:()=>new Promise(e=>{n.close(()=>e())})}}function ma(e,t,n){ha(e,t,n.upstreamHost,n.upstreamPort,n.upstreamTimeoutMs??da)}function ha(e,t,n,r,i){let a={...e.headers};delete a.host;for(let e of fa)delete a[e];e.setTimeout(3e4,()=>{if(t.headersSent)try{t.end()}catch{}else try{t.writeHead(408,{"Content-Type":`text/plain`}),t.end(`Request Timeout`)}catch{}try{e.socket?.destroy()}catch{}});let o=Pe({host:n,port:r,method:e.method,path:e.url,headers:{...a,host:`${n}:${r}`}},e=>{let n={...e.headers};for(let e of fa)delete n[e];t.writeHead(e.statusCode??502,n),e.pipe(t),e.once(`error`,()=>{try{t.end()}catch{}})});i>0&&o.setTimeout(i,()=>{if(!t.headersSent)t.writeHead(504,{"Content-Type":`text/plain`}),t.end(`Gateway Timeout`);else try{t.end()}catch{}o.destroy()}),o.on(`error`,()=>{if(!t.headersSent)t.writeHead(502,{"Content-Type":`text/plain`}),t.end(`Bad Gateway`);else try{t.end()}catch{}}),e.on(`error`,()=>{o.destroy()}),e.pipe(o)}async function ga(e){await Promise.all(e.map(e=>new Promise(t=>{e.close(()=>t())})))}async function _a(e){let{existsSync:t}=await import(`node:fs`),{createServer:r}=await import(`node:http`),{resolve:i}=await import(`node:path`),{acquireUiLock:a,readServerLock:o,releaseUiLock:s,updateUiLockPort:c}=await import(`./dist-DWb2jtNM.mjs`),{default:l}=await import(`sirv`),{resolveContentDir:u,resolveLockDir:d}=await import(`./paths-C6Cx7bux.mjs`),f=u(e.config,e.cwd),p=d(f);a(p,{port:0,worktreeRoot:e.cwd});let m=import.meta.dirname??new URL(`.`,import.meta.url).pathname,h=[i(m,`public`),i(m,`../../app/dist`),i(m,`../../../app/dist`)].find(e=>t(e)),g=h?l(h,{single:!0,gzip:!0,immutable:!0}):null,_=t(f)?l(f,{dotfiles:!1,dev:!0}):null,v=e.port,y=null,b=(e,t)=>{let n=e.url?.split(`?`)[0];if(n===`/api/config`&&(e.method===`GET`||e.method===`HEAD`)){y?.();let n=o(p),r=n&&n.port>0?`ws://localhost:${n.port}/collab`:null,i=JSON.stringify({collabUrl:r,previewUrl:null,port:v});t.setHeader(`Content-Type`,`application/json`),t.setHeader(`Cache-Control`,`no-store`),t.setHeader(`X-Content-Type-Options`,`nosniff`),t.statusCode=200,e.method===`HEAD`?t.end():t.end(i);return}if(n?.startsWith(`/api/`)){y?.();let r=o(p);if(!r||r.port<=0){t.writeHead(503,{"Content-Type":`application/json`,"Cache-Control":`no-store`}),t.end(JSON.stringify({error:"Collab server not running. Start `ok start` or run `ok status`.",path:n}));return}ma(e,t,{upstreamHost:`localhost`,upstreamPort:r.port});return}if(decodeURIComponent(n?.replace(/^\//,``)??``)&&_){t.setHeader(`X-Content-Type-Options`,`nosniff`),_(e,t,()=>{g?g(e,t):va(t)});return}if(g){g(e,t);return}va(t)},x=e.host===void 0?[`::1`,`127.0.0.1`]:[e.host],S=[],C=e.port;try{for(let e of x){let t=r(b);S.push(t),await new Promise((n,r)=>{let i=e=>r(e);t.once(`error`,i),t.listen(C,e,()=>{t.off(`error`,i);let e=t.address();typeof e==`object`&&e&&(C=e.port),n()})})}}catch(e){await Promise.all(S.map(e=>new Promise(t=>{try{e.close(()=>t())}catch{t()}})));try{s(p)}catch{}throw e}let w=C;v=w,c(p,w);let T=e.scheduler??n,E=e.safetyNetMs??432e5,D=null,O=!1,ee=!1,k=()=>{O||(O=!0,D!==null&&(T.clearTimeout(D),D=null))},te=()=>{if(k(),!ee){ee=!0;try{s(p)}catch{}}},A=()=>{O||E<=0||(D!==null&&(T.clearTimeout(D),D=null),D=T.setTimeout(()=>{D=null,console.warn(`[ui] safety-net (${E}ms) reached — shutting down (D-025 backstop)`);try{e.onSafetyNet?.()}catch{}for(let e of S)try{e.close()}catch{}te()},E))},ne=()=>{O||E<=0||A()};return y=ne,A(),{httpServers:S,port:w,release:te,detachSafetyNet:k,nudgeSafetyNet:ne}}function va(e){e.writeHead(404),e.end(`Not found`)}function ya(e,t){if(e!==void 0){let t=Number.parseInt(e,10);if(Number.isNaN(t)||t<0||t>65535)throw Error(`Invalid --port value '${e}'`);return t}if(t!==void 0&&t!==``){let e=Number.parseInt(t,10);if(Number.isNaN(e)||e<0||e>65535)throw Error(`Invalid PORT env value '${t}'`);return e}return 0}async function ba(e){let t=e.readLock??(async()=>{let{readUiLock:t}=await import(`./dist-DWb2jtNM.mjs`);return t(e.lockDir)}),n=await t();if(!n)throw Error(`UI lock collision reported but the lock disappeared before handling — retry acquiring.`);if(n.port===e.requestedPort&&n.port>0)return{mode:`already-running`,port:n.port};let r=n.port;if(r===0){let n=Date.now()+(e.pollDeadlineMs??2e3),i=e.pollIntervalMs??100;for(;Date.now()<n;){await new Promise(e=>{setTimeout(e,i)});let e=await t();if(e&&e.port>0){r=e.port;break}}if(r===0)throw Error("UI did not bind within 2s; run `ok clean`");if(r===e.requestedPort)return{mode:`already-running`,port:r}}return{mode:`proxy`,handle:await pa({listenPort:e.requestedPort,host:e.host,upstreamHost:`localhost`,upstreamPort:r}),upstreamPort:r}}function xa(e){return new A(`ui`).description(`Serve the Open Knowledge React editor UI`).option(`-p, --port <port>`,`UI port (default: $PORT env or 0 / kernel-allocated)`).option(`-H, --host <host>`,"UI host. Default: two-socket loopback bind (`[::1]` + `127.0.0.1`) so cross-family collisions fail loud (D-033). Pass an explicit host (e.g. `127.0.0.1`, `0.0.0.0`) to bind a single socket on that host.").action(async t=>{let{dim:n}=await import(`./colors-CMt7h9Vn.mjs`),{UiLockCollisionError:r}=await import(`./dist-DWb2jtNM.mjs`),{resolveContentDir:i,resolveLockDir:a}=await import(`./paths-C6Cx7bux.mjs`),o=e(),s=t.host,c;try{c=ya(t.port,process.env.PORT)}catch(e){console.error(e instanceof Error?e.message:String(e)),process.exitCode=1;return}try{let e=await _a({config:o,cwd:process.cwd(),port:c,host:s}),t=s===void 0||s===`::`||s===`0.0.0.0`?`localhost`:s;console.log(`${n(`[ui]`)} listening on http://${t}:${e.port}`);let r=!1,i=t=>{if(r)return;r=!0,console.log(n(`\n[ui] Shutting down (${t})...`)),e.detachSafetyNet();let i=()=>{try{e.release()}finally{process.exit(process.exitCode??0)}};ga(e.httpServers).then(i,i),setTimeout(i,2e3).unref()};process.once(`SIGINT`,()=>i(`SIGINT`)),process.once(`SIGTERM`,()=>i(`SIGTERM`));return}catch(e){if(!(e instanceof r))throw e;let t=a(i(o,process.cwd())),l=s??`localhost`,u;try{u=await ba({requestedPort:c,host:l,lockDir:t})}catch(e){console.error(e instanceof Error?e.message:String(e)),process.exit(1)}u.mode===`already-running`&&(console.log(`UI already running at http://${l}:${u.port}`),process.exit(0)),console.log(`UI running at http://${l}:${u.upstreamPort}; acting as HTTP proxy on port ${u.handle.port}`);let d=!1,f=e=>{d||(d=!0,console.log(n(`\n[ui-proxy] Shutting down (${e})...`)),u.handle.close().finally(()=>process.exit(process.exitCode??0)),setTimeout(()=>process.exit(process.exitCode??0),2e3).unref())};process.once(`SIGINT`,()=>f(`SIGINT`)),process.once(`SIGTERM`,()=>f(`SIGTERM`))}})}process.argv.includes(`--no-color`)?(process.env.NO_COLOR=`1`,delete process.env.FORCE_COLOR):process.argv.includes(`--color`)&&(process.env.FORCE_COLOR=`1`,delete process.env.NO_COLOR);const Q=new A;let $;Q.name(`open-knowledge`).description(`Local-first knowledge base with CRDT collaboration`).version(e).option(`--cwd <path>`,`Working directory`).option(`--log-level <level>`,`Log level`,`info`).option(`--no-color`,`Disable color output`).option(`--color`,`Force color output`).hook(`preAction`,e=>{let t=e.opts(),n=t.cwd;n!==void 0&&process.chdir(n);let{config:r}=O(n),i=e.args.length===0?t:e.commands[0]?.opts()??{};i.port!==void 0&&(r.server.port=Number(i.port)),i.host!==void 0&&(r.server.host=i.host),process.env.PORT&&(r.server.port=Number(process.env.PORT)),process.env.HOST&&(r.server.host=process.env.HOST),$=r});const Sa=k(()=>$);Q.addCommand(Sa,{isDefault:!0});const Ca=Ki(()=>$);Q.addCommand(Ca),Q.addCommand(E()),Q.addCommand(ta()),Q.addCommand(At());const wa=qi(()=>$);Q.addCommand(wa);const Ta=xa(()=>$);Q.addCommand(Ta),Q.addCommand(ua(()=>$)),Q.addCommand(_t(()=>$)),Q.addCommand(sa(()=>$)),Q.addCommand(pt(()=>$)),Q.addCommand(Et(()=>$)),Q.addCommand(Yi(()=>$)),Q.addCommand(Zi(()=>$)),Q.addCommand(Xi(()=>$)),await Q.parseAsync();export{};
673
+ `)}async function ea(e,t){let n=Ue({input:t??process.stdin,output:process.stdout});try{let t=(await n.question(e)).trim().toLowerCase();return t===``||t===`y`||t===`yes`}finally{n.close()}}function ta(){return new A(`seed`).description(`Scaffold the Karpathy three-layer knowledge-base structure (external-sources/, research/, articles/) + log.md + config.yml folders: entries. Use --root to place them inside a subfolder instead of the project root.`).argument(`[path]`,`Project directory (defaults to cwd)`).option(`-r, --root <path>`,`Subfolder (relative to the project dir) to scaffold into — created if missing. Defaults to the project root when omitted in non-interactive runs; prompts on a TTY.`).option(`-y, --yes`,`Skip confirmation prompt`).option(`--dry-run`,`Print the plan and exit without writing`).action(async(e,t)=>{let n=await Qi({cwd:e??process.cwd(),root:t.root,yes:t.yes,dryRun:t.dryRun});process.stdout.write(`${n.message}\n`),n.exitCode!==0&&(process.exitCode=n.exitCode)})}function na(e,t){return{server:ra(`server`,e),ui:ra(`ui`,t)}}function ra(e,t){switch(t.status){case`missing`:return{name:e,state:`missing`,alive:!1};case`corrupt`:return{name:e,state:`corrupt`,alive:!1};case`foreign-host`:return{name:e,state:`foreign-host`,pid:t.lock.pid,port:t.lock.port,startedAt:t.lock.startedAt,host:t.lock.hostname,alive:`unknown`};case`dead-pid`:return{name:e,state:`dead-pid`,pid:t.lock.pid,port:t.lock.port,startedAt:t.lock.startedAt,host:t.lock.hostname,alive:!1};case`alive`:return{name:e,state:`alive`,pid:t.lock.pid,port:t.lock.port,startedAt:t.lock.startedAt,host:t.lock.hostname,alive:!0}}}function ia(e){return`${aa(e.server)}\n${aa(e.ui)}`}function aa(e){let t=e.name===`server`?`server`:`ui `;return e.state===`missing`?`${t} not running`:e.state===`corrupt`?`${t} lock file corrupt — run \`ok clean\``:e.state===`foreign-host`?`${t} foreign host (${e.host}) pid=${e.pid} port=${e.port}`:e.state===`dead-pid`?`${t} stale (dead pid=${e.pid}) — run \`ok clean\``:`${t} alive pid=${e.pid} port=${e.port} started=${e.startedAt}`}function oa(e){let t=e.inspect??(t=>mt(e.lockDir,t)),n=e.log??(e=>console.log(e)),r=na(t(`server`),t(`ui`));return e.json?n(JSON.stringify(r,null,2)):n(ia(r)),r}function sa(e){return new A(`status`).description(`Show live state of the server + ui lockfiles for this project`).option(`--json`,`Emit structured JSON instead of formatted text`).action(t=>{oa({lockDir:a(o(e(),process.cwd())),json:t.json===!0})})}function ca(e,t){let n=[];return e.status===`alive`&&n.push({name:`server`,pid:e.lock.pid,port:e.lock.port}),t.status===`alive`&&n.push({name:`ui`,pid:t.lock.pid,port:t.lock.port}),{targets:n}}function la(e){let t=e.inspect??(t=>mt(e.lockDir,t)),n=e.kill??((e,t)=>process.kill(e,t)),r=e.log??(e=>console.log(e)),i=e.error??(e=>console.error(e)),a=ca(t(`server`),t(`ui`));if(a.targets.length===0)return r(`No running open-knowledge processes.`),{stopped:[],failed:[],hadTargets:!1};let o=[],s=[];for(let e of a.targets)try{n(e.pid,`SIGTERM`),o.push(e)}catch(t){s.push({target:e,error:t instanceof Error?t.message:String(t)})}return o.length>0&&r(`Stopped: ${o.map(e=>`${e.name} (pid=${e.pid}, port=${e.port})`).join(`, `)}`),s.length>0&&i(`Failed to stop: ${s.map(({target:e,error:t})=>`${e.name} (pid=${e.pid}): ${t}`).join(`; `)}`),{stopped:o,failed:s,hadTargets:!0}}function ua(e){return new A(`stop`).description(`Stop the running open-knowledge server and UI (live only)`).action(()=>{la({lockDir:a(o(e(),process.cwd()))}).failed.length>0&&(process.exitCode=1)})}const da=1e4,fa=[`connection`,`keep-alive`,`proxy-authenticate`,`proxy-authorization`,`te`,`trailer`,`transfer-encoding`,`upgrade`,`cookie`,`set-cookie`];async function pa(e){let t=e.upstreamTimeoutMs??da,n=Ne((n,r)=>{ha(n,r,e.upstreamHost,e.upstreamPort,t)});await new Promise((t,r)=>{let i=e=>r(e);n.once(`error`,i),n.listen(e.listenPort,e.host,()=>{n.off(`error`,i),t()})});let r=n.address();return{httpServer:n,port:typeof r==`object`&&r?r.port:e.listenPort,close:()=>new Promise(e=>{n.close(()=>e())})}}function ma(e,t,n){ha(e,t,n.upstreamHost,n.upstreamPort,n.upstreamTimeoutMs??da)}function ha(e,t,n,r,i){let a={...e.headers};delete a.host;for(let e of fa)delete a[e];e.setTimeout(3e4,()=>{if(t.headersSent)try{t.end()}catch{}else try{t.writeHead(408,{"Content-Type":`text/plain`}),t.end(`Request Timeout`)}catch{}try{e.socket?.destroy()}catch{}});let o=Pe({host:n,port:r,method:e.method,path:e.url,headers:{...a,host:`${n}:${r}`}},e=>{let n={...e.headers};for(let e of fa)delete n[e];t.writeHead(e.statusCode??502,n),e.pipe(t),e.once(`error`,()=>{try{t.end()}catch{}})});i>0&&o.setTimeout(i,()=>{if(!t.headersSent)t.writeHead(504,{"Content-Type":`text/plain`}),t.end(`Gateway Timeout`);else try{t.end()}catch{}o.destroy()}),o.on(`error`,()=>{if(!t.headersSent)t.writeHead(502,{"Content-Type":`text/plain`}),t.end(`Bad Gateway`);else try{t.end()}catch{}}),e.on(`error`,()=>{o.destroy()}),e.pipe(o)}async function ga(e){await Promise.all(e.map(e=>new Promise(t=>{e.close(()=>t())})))}async function _a(e){let{existsSync:t}=await import(`node:fs`),{createServer:r}=await import(`node:http`),{resolve:i}=await import(`node:path`),{acquireUiLock:a,readServerLock:o,releaseUiLock:s,updateUiLockPort:c}=await import(`./dist-B8mg-f6q.mjs`),{default:l}=await import(`sirv`),{resolveContentDir:u,resolveLockDir:d}=await import(`./paths-DpAhhXfF.mjs`),f=u(e.config,e.cwd),p=d(f);a(p,{port:0,worktreeRoot:e.cwd});let m=import.meta.dirname??new URL(`.`,import.meta.url).pathname,h=[i(m,`public`),i(m,`../../app/dist`),i(m,`../../../app/dist`)].find(e=>t(e)),g=h?l(h,{single:!0,gzip:!0,immutable:!0}):null,_=t(f)?l(f,{dotfiles:!1,dev:!0}):null,v=e.port,y=null,b=(e,t)=>{let n=e.url?.split(`?`)[0];if(n===`/api/config`&&(e.method===`GET`||e.method===`HEAD`)){y?.();let n=o(p),r=n&&n.port>0?`ws://localhost:${n.port}/collab`:null,i=JSON.stringify({collabUrl:r,previewUrl:null,port:v});t.setHeader(`Content-Type`,`application/json`),t.setHeader(`Cache-Control`,`no-store`),t.setHeader(`X-Content-Type-Options`,`nosniff`),t.statusCode=200,e.method===`HEAD`?t.end():t.end(i);return}if(n?.startsWith(`/api/`)){y?.();let r=o(p);if(!r||r.port<=0){t.writeHead(503,{"Content-Type":`application/json`,"Cache-Control":`no-store`}),t.end(JSON.stringify({error:"Collab server not running. Start `ok start` or run `ok status`.",path:n}));return}ma(e,t,{upstreamHost:`localhost`,upstreamPort:r.port});return}if(decodeURIComponent(n?.replace(/^\//,``)??``)&&_){t.setHeader(`X-Content-Type-Options`,`nosniff`),_(e,t,()=>{g?g(e,t):va(t)});return}if(g){g(e,t);return}va(t)},x=e.host===void 0?[`::1`,`127.0.0.1`]:[e.host],S=[],C=e.port;try{for(let e of x){let t=r(b);S.push(t),await new Promise((n,r)=>{let i=e=>r(e);t.once(`error`,i),t.listen(C,e,()=>{t.off(`error`,i);let e=t.address();typeof e==`object`&&e&&(C=e.port),n()})})}}catch(e){await Promise.all(S.map(e=>new Promise(t=>{try{e.close(()=>t())}catch{t()}})));try{s(p)}catch{}throw e}let w=C;v=w,c(p,w);let T=e.scheduler??n,E=e.safetyNetMs??432e5,D=null,O=!1,ee=!1,k=()=>{O||(O=!0,D!==null&&(T.clearTimeout(D),D=null))},te=()=>{if(k(),!ee){ee=!0;try{s(p)}catch{}}},A=()=>{O||E<=0||(D!==null&&(T.clearTimeout(D),D=null),D=T.setTimeout(()=>{D=null,console.warn(`[ui] safety-net (${E}ms) reached — shutting down (D-025 backstop)`);try{e.onSafetyNet?.()}catch{}for(let e of S)try{e.close()}catch{}te()},E))},ne=()=>{O||E<=0||A()};return y=ne,A(),{httpServers:S,port:w,release:te,detachSafetyNet:k,nudgeSafetyNet:ne}}function va(e){e.writeHead(404),e.end(`Not found`)}function ya(e,t){if(e!==void 0){let t=Number.parseInt(e,10);if(Number.isNaN(t)||t<0||t>65535)throw Error(`Invalid --port value '${e}'`);return t}if(t!==void 0&&t!==``){let e=Number.parseInt(t,10);if(Number.isNaN(e)||e<0||e>65535)throw Error(`Invalid PORT env value '${t}'`);return e}return 0}async function ba(e){let t=e.readLock??(async()=>{let{readUiLock:t}=await import(`./dist-B8mg-f6q.mjs`);return t(e.lockDir)}),n=await t();if(!n)throw Error(`UI lock collision reported but the lock disappeared before handling — retry acquiring.`);if(n.port===e.requestedPort&&n.port>0)return{mode:`already-running`,port:n.port};let r=n.port;if(r===0){let n=Date.now()+(e.pollDeadlineMs??2e3),i=e.pollIntervalMs??100;for(;Date.now()<n;){await new Promise(e=>{setTimeout(e,i)});let e=await t();if(e&&e.port>0){r=e.port;break}}if(r===0)throw Error("UI did not bind within 2s; run `ok clean`");if(r===e.requestedPort)return{mode:`already-running`,port:r}}return{mode:`proxy`,handle:await pa({listenPort:e.requestedPort,host:e.host,upstreamHost:`localhost`,upstreamPort:r}),upstreamPort:r}}function xa(e){return new A(`ui`).description(`Serve the Open Knowledge React editor UI`).option(`-p, --port <port>`,`UI port (default: $PORT env or 0 / kernel-allocated)`).option(`-H, --host <host>`,"UI host. Default: two-socket loopback bind (`[::1]` + `127.0.0.1`) so cross-family collisions fail loud (D-033). Pass an explicit host (e.g. `127.0.0.1`, `0.0.0.0`) to bind a single socket on that host.").action(async t=>{let{dim:n}=await import(`./colors-CMt7h9Vn.mjs`),{UiLockCollisionError:r}=await import(`./dist-B8mg-f6q.mjs`),{resolveContentDir:i,resolveLockDir:a}=await import(`./paths-DpAhhXfF.mjs`),o=e(),s=t.host,c;try{c=ya(t.port,process.env.PORT)}catch(e){console.error(e instanceof Error?e.message:String(e)),process.exitCode=1;return}try{let e=await _a({config:o,cwd:process.cwd(),port:c,host:s}),t=s===void 0||s===`::`||s===`0.0.0.0`?`localhost`:s;console.log(`${n(`[ui]`)} listening on http://${t}:${e.port}`);let r=!1,i=t=>{if(r)return;r=!0,console.log(n(`\n[ui] Shutting down (${t})...`)),e.detachSafetyNet();let i=()=>{try{e.release()}finally{process.exit(process.exitCode??0)}};ga(e.httpServers).then(i,i),setTimeout(i,2e3).unref()};process.once(`SIGINT`,()=>i(`SIGINT`)),process.once(`SIGTERM`,()=>i(`SIGTERM`));return}catch(e){if(!(e instanceof r))throw e;let t=a(i(o,process.cwd())),l=s??`localhost`,u;try{u=await ba({requestedPort:c,host:l,lockDir:t})}catch(e){console.error(e instanceof Error?e.message:String(e)),process.exit(1)}u.mode===`already-running`&&(console.log(`UI already running at http://${l}:${u.port}`),process.exit(0)),console.log(`UI running at http://${l}:${u.upstreamPort}; acting as HTTP proxy on port ${u.handle.port}`);let d=!1,f=e=>{d||(d=!0,console.log(n(`\n[ui-proxy] Shutting down (${e})...`)),u.handle.close().finally(()=>process.exit(process.exitCode??0)),setTimeout(()=>process.exit(process.exitCode??0),2e3).unref())};process.once(`SIGINT`,()=>f(`SIGINT`)),process.once(`SIGTERM`,()=>f(`SIGTERM`))}})}process.argv.includes(`--no-color`)?(process.env.NO_COLOR=`1`,delete process.env.FORCE_COLOR):process.argv.includes(`--color`)&&(process.env.FORCE_COLOR=`1`,delete process.env.NO_COLOR);const Q=new A;let $;Q.name(`open-knowledge`).description(`Local-first knowledge base with CRDT collaboration`).version(e).option(`--cwd <path>`,`Working directory`).option(`--log-level <level>`,`Log level`,`info`).option(`--no-color`,`Disable color output`).option(`--color`,`Force color output`).hook(`preAction`,e=>{let t=e.opts(),n=t.cwd;n!==void 0&&process.chdir(n);let{config:r}=O(n),i=e.args.length===0?t:e.commands[0]?.opts()??{};i.port!==void 0&&(r.server.port=Number(i.port)),i.host!==void 0&&(r.server.host=i.host),process.env.PORT&&(r.server.port=Number(process.env.PORT)),process.env.HOST&&(r.server.host=process.env.HOST),$=r});const Sa=k(()=>$);Q.addCommand(Sa,{isDefault:!0});const Ca=Ki(()=>$);Q.addCommand(Ca),Q.addCommand(E()),Q.addCommand(ta()),Q.addCommand(At());const wa=qi(()=>$);Q.addCommand(wa);const Ta=xa(()=>$);Q.addCommand(Ta),Q.addCommand(ua(()=>$)),Q.addCommand(_t(()=>$)),Q.addCommand(sa(()=>$)),Q.addCommand(pt(()=>$)),Q.addCommand(Et(()=>$)),Q.addCommand(Yi(()=>$)),Q.addCommand(Zi(()=>$)),Q.addCommand(Xi(()=>$)),await Q.parseAsync();export{};
674
674
  //# sourceMappingURL=cli.mjs.map
@@ -0,0 +1,2 @@
1
+ import"./dist-D4iyaPjq.mjs";var e=`0.0.0-dev-20260427172649`;const t=`config.yml`,n=`cache`,r=e,i=`open-knowledge`;export{r as i,t as n,i as r,n as t};
2
+ //# sourceMappingURL=constants-BTD7ZKSR.mjs.map
@@ -0,0 +1 @@
1
+ import"./dist-D4iyaPjq.mjs";import{a as e,c as t}from"./server-lock-CH0GCP_4-DCYOtKMW.mjs";import{A as n,Bt as r,St as i,T as a,X as o,_ as s,q as c,vt as l,w as u}from"./dist-mmLEboji.mjs";export{s as ProjectGitInitError,u as UiLockCollisionError,a as acquireUiLock,n as bootServer,c as ensureProjectGit,o as getLogger,e as isProcessAlive,t as readServerLock,l as readUiLock,i as releaseUiLock,r as updateUiLockPort};