@vpxa/kb 0.1.16 → 0.1.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. package/package.json +1 -1
  2. package/packages/cli/dist/commands/init/adapters.js +1 -1
  3. package/packages/cli/dist/commands/init/global.js +1 -1
  4. package/packages/core/dist/global-registry.d.ts +2 -1
  5. package/packages/core/dist/global-registry.js +1 -1
  6. package/packages/embeddings/dist/embedder.interface.d.ts +1 -1
  7. package/packages/embeddings/dist/onnx-embedder.d.ts +1 -1
  8. package/packages/embeddings/dist/onnx-embedder.js +1 -1
  9. package/packages/indexer/dist/hash-cache.d.ts +24 -0
  10. package/packages/indexer/dist/hash-cache.js +1 -0
  11. package/packages/indexer/dist/incremental-indexer.d.ts +5 -1
  12. package/packages/indexer/dist/incremental-indexer.js +1 -1
  13. package/packages/indexer/dist/index.d.ts +2 -1
  14. package/packages/indexer/dist/index.js +1 -1
  15. package/packages/server/dist/server.js +1 -1
  16. package/packages/server/dist/tools/analyze.tools.d.ts +2 -2
  17. package/packages/server/dist/tools/analyze.tools.js +2 -1
  18. package/packages/server/dist/tools/search.tool.js +1 -1
  19. package/packages/server/dist/tools/toolkit.tools.d.ts +2 -2
  20. package/packages/server/dist/tools/toolkit.tools.js +2 -2
  21. package/packages/store/dist/lance-store.js +1 -1
  22. package/packages/tools/dist/index.d.ts +2 -2
  23. package/packages/tools/dist/symbol.d.ts +14 -2
  24. package/packages/tools/dist/symbol.js +3 -3
  25. package/packages/tui/dist/App.d.ts +1 -1
  26. package/packages/tui/dist/{embedder.interface-D4ew0HPW.d.ts → embedder.interface-IFCBpOlX.d.ts} +1 -1
  27. package/packages/tui/dist/{index-B9VpfVPP.d.ts → index-C8NmOF18.d.ts} +1 -1
  28. package/packages/tui/dist/index.d.ts +1 -1
  29. package/packages/tui/dist/panels/SearchPanel.d.ts +1 -1
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@vpxa/kb",
3
- "version": "0.1.16",
3
+ "version": "0.1.17",
4
4
  "type": "module",
5
5
  "description": "Local-first AI developer toolkit — knowledge base, code analysis, context management, and developer tools for LLM agents",
6
6
  "license": "MIT",
@@ -1 +1 @@
1
- import{MCP_SERVER_ENTRY as e}from"./constants.js";import{buildAgentsMd as t,buildCopilotInstructions as n}from"./templates.js";import{existsSync as r,mkdirSync as i,writeFileSync as a}from"node:fs";import{basename as o,resolve as s}from"node:path";function c(e){return r(s(e,`.cursor`))?`cursor`:r(s(e,`.claude`))?`claude-code`:r(s(e,`.windsurf`))?`windsurf`:`copilot`}function l(t){return{servers:{[t]:{...e}}}}function u(t){let{type:n,...r}=e;return{mcpServers:{[t]:r}}}const d={scaffoldDir:`general`,writeMcpConfig(e,t){let n=s(e,`.vscode`),i=s(n,`mcp.json`);r(n)&&!r(i)&&(a(i,`${JSON.stringify(l(t),null,2)}\n`,`utf-8`),console.log(` Created .vscode/mcp.json`))},writeInstructions(e,t){let c=s(e,`.github`),l=s(c,`copilot-instructions.md`);r(l)||(i(c,{recursive:!0}),a(l,n(o(e),t),`utf-8`),console.log(` Created .github/copilot-instructions.md`))},writeAgentsMd(e,n){let i=s(e,`AGENTS.md`);r(i)||(a(i,t(o(e),n),`utf-8`),console.log(` Created AGENTS.md`))}},f={scaffoldDir:`general`,writeMcpConfig(e,t){let n=s(e,`.mcp.json`);r(n)||(a(n,`${JSON.stringify(u(t),null,2)}\n`,`utf-8`),console.log(` Created .mcp.json`))},writeInstructions(e,i){let c=s(e,`CLAUDE.md`);if(!r(c)){let r=o(e);a(c,`${n(r,i)}\n---\n\n${t(r,i)}`,`utf-8`),console.log(` Created CLAUDE.md`)}},writeAgentsMd(e,t){}},p={scaffoldDir:`general`,writeMcpConfig(e,t){let n=s(e,`.cursor`),o=s(n,`mcp.json`);r(o)||(i(n,{recursive:!0}),a(o,`${JSON.stringify(u(t),null,2)}\n`,`utf-8`),console.log(` Created .cursor/mcp.json`))},writeInstructions(e,c){let l=s(e,`.cursor`,`rules`),u=s(l,`kb.mdc`);if(!r(u)){i(l,{recursive:!0});let r=o(e);a(u,`${n(r,c)}\n---\n\n${t(r,c)}`,`utf-8`),console.log(` Created .cursor/rules/kb.mdc`)}},writeAgentsMd(e,t){}},m={scaffoldDir:`general`,writeMcpConfig(e,t){let n=s(e,`.vscode`),o=s(n,`mcp.json`);r(o)||(i(n,{recursive:!0}),a(o,`${JSON.stringify(l(t),null,2)}\n`,`utf-8`),console.log(` Created .vscode/mcp.json (Windsurf-compatible)`))},writeInstructions(e,i){let c=s(e,`.windsurfrules`);if(!r(c)){let r=o(e);a(c,`${n(r,i)}\n---\n\n${t(r,i)}`,`utf-8`),console.log(` Created .windsurfrules`)}},writeAgentsMd(e,t){}};function h(e){switch(e){case`copilot`:return d;case`claude-code`:return f;case`cursor`:return p;case`windsurf`:return m}}export{f as claudeCodeAdapter,d as copilotAdapter,p as cursorAdapter,c as detectIde,h as getAdapter,m as windsurfAdapter};
1
+ import{MCP_SERVER_ENTRY as e}from"./constants.js";import{buildAgentsMd as t,buildCopilotInstructions as n}from"./templates.js";import{existsSync as r,mkdirSync as i,writeFileSync as a}from"node:fs";import{basename as o,resolve as s}from"node:path";function c(e){return r(s(e,`.cursor`))?`cursor`:r(s(e,`.claude`))?`claude-code`:r(s(e,`.windsurf`))?`windsurf`:`copilot`}function l(t){return{servers:{[t]:{...e}}}}function u(t){let{type:n,...r}=e;return{mcpServers:{[t]:r}}}const d={scaffoldDir:`general`,writeMcpConfig(e,t){let n=s(e,`.vscode`),o=s(n,`mcp.json`);r(o)||(i(n,{recursive:!0}),a(o,`${JSON.stringify(l(t),null,2)}\n`,`utf-8`),console.log(` Created .vscode/mcp.json`))},writeInstructions(e,t){let c=s(e,`.github`),l=s(c,`copilot-instructions.md`);r(l)||(i(c,{recursive:!0}),a(l,n(o(e),t),`utf-8`),console.log(` Created .github/copilot-instructions.md`))},writeAgentsMd(e,n){let i=s(e,`AGENTS.md`);r(i)||(a(i,t(o(e),n),`utf-8`),console.log(` Created AGENTS.md`))}},f={scaffoldDir:`general`,writeMcpConfig(e,t){let n=s(e,`.mcp.json`);r(n)||(a(n,`${JSON.stringify(u(t),null,2)}\n`,`utf-8`),console.log(` Created .mcp.json`))},writeInstructions(e,i){let c=s(e,`CLAUDE.md`);if(!r(c)){let r=o(e);a(c,`${n(r,i)}\n---\n\n${t(r,i)}`,`utf-8`),console.log(` Created CLAUDE.md`)}},writeAgentsMd(e,t){}},p={scaffoldDir:`general`,writeMcpConfig(e,t){let n=s(e,`.cursor`),o=s(n,`mcp.json`);r(o)||(i(n,{recursive:!0}),a(o,`${JSON.stringify(u(t),null,2)}\n`,`utf-8`),console.log(` Created .cursor/mcp.json`))},writeInstructions(e,c){let l=s(e,`.cursor`,`rules`),u=s(l,`kb.mdc`);if(!r(u)){i(l,{recursive:!0});let r=o(e);a(u,`${n(r,c)}\n---\n\n${t(r,c)}`,`utf-8`),console.log(` Created .cursor/rules/kb.mdc`)}},writeAgentsMd(e,t){}},m={scaffoldDir:`general`,writeMcpConfig(e,t){let n=s(e,`.vscode`),o=s(n,`mcp.json`);r(o)||(i(n,{recursive:!0}),a(o,`${JSON.stringify(l(t),null,2)}\n`,`utf-8`),console.log(` Created .vscode/mcp.json (Windsurf-compatible)`))},writeInstructions(e,i){let c=s(e,`.windsurfrules`);if(!r(c)){let r=o(e);a(c,`${n(r,i)}\n---\n\n${t(r,i)}`,`utf-8`),console.log(` Created .windsurfrules`)}},writeAgentsMd(e,t){}};function h(e){switch(e){case`copilot`:return d;case`claude-code`:return f;case`cursor`:return p;case`windsurf`:return m}}export{f as claudeCodeAdapter,d as copilotAdapter,p as cursorAdapter,c as detectIde,h as getAdapter,m as windsurfAdapter};
@@ -1,4 +1,4 @@
1
- import{MCP_SERVER_ENTRY as e,SERVER_NAME as t,SKILL_NAMES as n}from"./constants.js";import{copyDirectoryRecursive as r}from"./scaffold.js";import{existsSync as i,mkdirSync as a,readFileSync as o,writeFileSync as s}from"node:fs";import{dirname as c,resolve as l}from"node:path";import{fileURLToPath as u}from"node:url";import{getGlobalDataDir as d,saveRegistry as f}from"../../../../core/dist/index.js";import{homedir as p}from"node:os";function m(){let e=p(),t=process.platform,n=[];if(t===`win32`){let t=process.env.APPDATA??l(e,`AppData`,`Roaming`);n.push({ide:`VS Code`,configDir:l(t,`Code`,`User`),mcpConfigPath:l(t,`Code`,`User`,`mcp.json`)},{ide:`Cursor`,configDir:l(t,`Cursor`,`User`),mcpConfigPath:l(t,`Cursor`,`User`,`mcp.json`)},{ide:`Windsurf`,configDir:l(t,`Windsurf`,`User`),mcpConfigPath:l(t,`Windsurf`,`User`,`mcp.json`)})}else if(t===`darwin`){let t=l(e,`Library`,`Application Support`);n.push({ide:`VS Code`,configDir:l(t,`Code`,`User`),mcpConfigPath:l(t,`Code`,`User`,`mcp.json`)},{ide:`Cursor`,configDir:l(t,`Cursor`,`User`),mcpConfigPath:l(t,`Cursor`,`User`,`mcp.json`)},{ide:`Windsurf`,configDir:l(t,`Windsurf`,`User`),mcpConfigPath:l(t,`Windsurf`,`User`,`mcp.json`)})}else{let t=process.env.XDG_CONFIG_HOME??l(e,`.config`);n.push({ide:`VS Code`,configDir:l(t,`Code`,`User`),mcpConfigPath:l(t,`Code`,`User`,`mcp.json`)},{ide:`Cursor`,configDir:l(t,`Cursor`,`User`),mcpConfigPath:l(t,`Cursor`,`User`,`mcp.json`)},{ide:`Windsurf`,configDir:l(t,`Windsurf`,`User`),mcpConfigPath:l(t,`Windsurf`,`User`,`mcp.json`)})}return n.push({ide:`Claude Code`,configDir:l(e,`.claude`),mcpConfigPath:l(e,`.claude`,`mcp.json`)}),n.filter(e=>i(e.configDir))}function h(t,n,r=!1){let{mcpConfigPath:c,configDir:l}=t,u={...e},d={};if(i(c)){try{let e=o(c,`utf-8`);d=JSON.parse(e)}catch{let e=`${c}.bak`;s(e,o(c,`utf-8`),`utf-8`),console.log(` Backed up invalid ${c} to ${e}`),d={}}if((d.servers??d.mcpServers??{})[n]&&!r){console.log(` ${t.ide}: ${n} already configured (use --force to update)`);return}}let f=t.ide===`VS Code`||t.ide===`Windsurf`?`servers`:`mcpServers`,p=d[f]??{};p[n]=u,d[f]=p,a(l,{recursive:!0}),s(c,`${JSON.stringify(d,null,2)}\n`,`utf-8`),console.log(` ${t.ide}: configured ${n} in ${c}`)}function g(e){let t=l(d(),`skills`);for(let a of n){let n=l(e,`skills`,a);i(n)&&r(n,l(t,a),`skills/${a}`,!0)}console.log(` Installed ${n.length} skills to ${t}`)}async function _(e){let n=t;console.log(`Initializing global KB installation...
1
+ import{MCP_SERVER_ENTRY as e,SERVER_NAME as t,SKILL_NAMES as n}from"./constants.js";import{copyDirectoryRecursive as r}from"./scaffold.js";import{existsSync as i,mkdirSync as a,readFileSync as o,writeFileSync as s}from"node:fs";import{dirname as c,resolve as l}from"node:path";import{fileURLToPath as u}from"node:url";import{getGlobalDataDir as d,saveRegistry as f}from"../../../../core/dist/index.js";import{homedir as p}from"node:os";function m(){let e=p(),t=process.platform,n=[];if(t===`win32`){let t=process.env.APPDATA??l(e,`AppData`,`Roaming`);n.push({ide:`VS Code`,configDir:l(t,`Code`,`User`),mcpConfigPath:l(t,`Code`,`User`,`mcp.json`)},{ide:`VS Code Insiders`,configDir:l(t,`Code - Insiders`,`User`),mcpConfigPath:l(t,`Code - Insiders`,`User`,`mcp.json`)},{ide:`VSCodium`,configDir:l(t,`VSCodium`,`User`),mcpConfigPath:l(t,`VSCodium`,`User`,`mcp.json`)},{ide:`Cursor`,configDir:l(t,`Cursor`,`User`),mcpConfigPath:l(t,`Cursor`,`User`,`mcp.json`)},{ide:`Cursor Nightly`,configDir:l(t,`Cursor Nightly`,`User`),mcpConfigPath:l(t,`Cursor Nightly`,`User`,`mcp.json`)},{ide:`Windsurf`,configDir:l(t,`Windsurf`,`User`),mcpConfigPath:l(t,`Windsurf`,`User`,`mcp.json`)})}else if(t===`darwin`){let t=l(e,`Library`,`Application Support`);n.push({ide:`VS Code`,configDir:l(t,`Code`,`User`),mcpConfigPath:l(t,`Code`,`User`,`mcp.json`)},{ide:`VS Code Insiders`,configDir:l(t,`Code - Insiders`,`User`),mcpConfigPath:l(t,`Code - Insiders`,`User`,`mcp.json`)},{ide:`VSCodium`,configDir:l(t,`VSCodium`,`User`),mcpConfigPath:l(t,`VSCodium`,`User`,`mcp.json`)},{ide:`Cursor`,configDir:l(t,`Cursor`,`User`),mcpConfigPath:l(t,`Cursor`,`User`,`mcp.json`)},{ide:`Cursor Nightly`,configDir:l(t,`Cursor Nightly`,`User`),mcpConfigPath:l(t,`Cursor Nightly`,`User`,`mcp.json`)},{ide:`Windsurf`,configDir:l(t,`Windsurf`,`User`),mcpConfigPath:l(t,`Windsurf`,`User`,`mcp.json`)})}else{let t=process.env.XDG_CONFIG_HOME??l(e,`.config`);n.push({ide:`VS Code`,configDir:l(t,`Code`,`User`),mcpConfigPath:l(t,`Code`,`User`,`mcp.json`)},{ide:`VS Code Insiders`,configDir:l(t,`Code - Insiders`,`User`),mcpConfigPath:l(t,`Code - Insiders`,`User`,`mcp.json`)},{ide:`VSCodium`,configDir:l(t,`VSCodium`,`User`),mcpConfigPath:l(t,`VSCodium`,`User`,`mcp.json`)},{ide:`Cursor`,configDir:l(t,`Cursor`,`User`),mcpConfigPath:l(t,`Cursor`,`User`,`mcp.json`)},{ide:`Cursor Nightly`,configDir:l(t,`Cursor Nightly`,`User`),mcpConfigPath:l(t,`Cursor Nightly`,`User`,`mcp.json`)},{ide:`Windsurf`,configDir:l(t,`Windsurf`,`User`),mcpConfigPath:l(t,`Windsurf`,`User`,`mcp.json`)})}return n.push({ide:`Claude Code`,configDir:l(e,`.claude`),mcpConfigPath:l(e,`.claude`,`mcp.json`)}),n.filter(e=>i(e.configDir))}function h(t,n,r=!1){let{mcpConfigPath:c,configDir:l}=t,u={...e},d={};if(i(c)){try{let e=o(c,`utf-8`);d=JSON.parse(e)}catch{let e=`${c}.bak`;s(e,o(c,`utf-8`),`utf-8`),console.log(` Backed up invalid ${c} to ${e}`),d={}}if((d.servers??d.mcpServers??{})[n]&&!r){console.log(` ${t.ide}: ${n} already configured (use --force to update)`);return}}let f=new Set([`VS Code`,`VS Code Insiders`,`VSCodium`,`Windsurf`]).has(t.ide)?`servers`:`mcpServers`,p=d[f]??{};p[n]=u,d[f]=p,a(l,{recursive:!0}),s(c,`${JSON.stringify(d,null,2)}\n`,`utf-8`),console.log(` ${t.ide}: configured ${n} in ${c}`)}function g(e){let t=l(d(),`skills`);for(let a of n){let n=l(e,`skills`,a);i(n)&&r(n,l(t,a),`skills/${a}`,!0)}console.log(` Installed ${n.length} skills to ${t}`)}async function _(e){let n=t;console.log(`Initializing global KB installation...
2
2
  `);let r=d();a(r,{recursive:!0}),console.log(` Global data store: ${r}`),f({version:1,workspaces:{}}),console.log(` Created registry.json`);let i=m();if(i.length===0)console.log(`
3
3
  No supported IDEs detected. You can manually add the MCP server config.`);else{console.log(`\n Detected ${i.length} IDE(s):`);for(let t of i)h(t,n,e.force)}g(l(c(u(import.meta.url)),`..`,`..`,`..`,`..`,`..`)),console.log(`
4
4
  Global KB installation complete!`),console.log(`
@@ -34,7 +34,8 @@ declare function computePartitionKey(cwd: string): string;
34
34
  */
35
35
  declare function loadRegistry(): GlobalRegistry;
36
36
  /**
37
- * Save the global registry atomically (write to .tmp then rename).
37
+ * Save the global registry atomically with file locking.
38
+ * Uses O_CREAT|O_EXCL lock file + write-to-tmp + rename pattern.
38
39
  */
39
40
  declare function saveRegistry(registry: GlobalRegistry): void;
40
41
  /**
@@ -1 +1 @@
1
- import{KB_GLOBAL_PATHS as e}from"./constants.js";import{basename as t,resolve as n}from"node:path";import{createHash as r}from"node:crypto";import{existsSync as i,mkdirSync as a,readFileSync as o,renameSync as s,writeFileSync as c}from"node:fs";import{homedir as l}from"node:os";function u(){return process.env.KB_GLOBAL_DATA_DIR??n(l(),e.root)}function d(e){let i=n(e);return`${t(i).toLowerCase().replace(/[^a-z0-9-]/g,`-`)||`workspace`}-${r(`sha256`).update(i).digest(`hex`).slice(0,8)}`}function f(){let t=n(u(),e.registry);if(!i(t))return{version:1,workspaces:{}};let r=o(t,`utf-8`);return JSON.parse(r)}function p(t){let r=u();a(r,{recursive:!0});let i=n(r,e.registry),o=`${i}.tmp`;c(o,JSON.stringify(t,null,2),`utf-8`),s(o,i)}function m(e){let t=f(),r=d(e),i=new Date().toISOString();return t.workspaces[r]?t.workspaces[r].lastAccessedAt=i:t.workspaces[r]={partition:r,workspacePath:n(e),registeredAt:i,lastAccessedAt:i},a(_(r),{recursive:!0}),p(t),t.workspaces[r]}function h(e){let t=f(),n=d(e);return t.workspaces[n]}function g(){let e=f();return Object.values(e.workspaces)}function _(e){return n(u(),e)}function v(){return i(n(u(),e.registry))}export{d as computePartitionKey,u as getGlobalDataDir,_ as getPartitionDir,v as isGlobalInstalled,g as listWorkspaces,f as loadRegistry,h as lookupWorkspace,m as registerWorkspace,p as saveRegistry};
1
+ import{KB_GLOBAL_PATHS as e}from"./constants.js";import{basename as t,resolve as n}from"node:path";import{createHash as r}from"node:crypto";import{closeSync as i,constants as a,existsSync as o,mkdirSync as s,openSync as c,readFileSync as l,renameSync as u,statSync as d,unlinkSync as f,writeFileSync as p}from"node:fs";import{homedir as m}from"node:os";function h(){return process.env.KB_GLOBAL_DATA_DIR??n(m(),e.root)}function g(e){let i=n(e);return`${t(i).toLowerCase().replace(/[^a-z0-9-]/g,`-`)||`workspace`}-${r(`sha256`).update(i).digest(`hex`).slice(0,8)}`}function _(){let t=n(h(),e.registry);if(!o(t))return{version:1,workspaces:{}};let r=l(t,`utf-8`);return JSON.parse(r)}function v(e,t=5e3){let n=`${e}.lock`,r=Date.now()+t,o=10;for(;Date.now()<r;)try{let e=c(n,a.O_CREAT|a.O_EXCL|a.O_WRONLY);return p(e,`${process.pid}\n`),i(e),n}catch(e){if(e.code!==`EEXIST`)throw e;try{let{mtimeMs:e}=d(n);if(Date.now()-e>3e4){f(n);continue}}catch{}let t=new SharedArrayBuffer(4);Atomics.wait(new Int32Array(t),0,0,o),o=Math.min(o*2,200)}throw Error(`Failed to acquire registry lock after ${t}ms`)}function y(e){try{f(e)}catch{}}function b(t){let r=h();s(r,{recursive:!0});let i=n(r,e.registry),a=v(i);try{let e=`${i}.tmp`;p(e,JSON.stringify(t,null,2),`utf-8`),u(e,i)}finally{y(a)}}function x(e){let t=_(),r=g(e),i=new Date().toISOString();return t.workspaces[r]?t.workspaces[r].lastAccessedAt=i:t.workspaces[r]={partition:r,workspacePath:n(e),registeredAt:i,lastAccessedAt:i},s(w(r),{recursive:!0}),b(t),t.workspaces[r]}function S(e){let t=_(),n=g(e);return t.workspaces[n]}function C(){let e=_();return Object.values(e.workspaces)}function w(e){return n(h(),e)}function T(){return o(n(h(),e.registry))}export{g as computePartitionKey,h as getGlobalDataDir,w as getPartitionDir,T as isGlobalInstalled,C as listWorkspaces,_ as loadRegistry,S as lookupWorkspace,x as registerWorkspace,b as saveRegistry};
@@ -12,7 +12,7 @@ interface IEmbedder {
12
12
  */
13
13
  embedQuery(query: string): Promise<Float32Array>;
14
14
  /** Generate embeddings for multiple text strings (batched, for documents/passages) */
15
- embedBatch(texts: string[]): Promise<Float32Array[]>;
15
+ embedBatch(texts: string[], batchSize?: number): Promise<Float32Array[]>;
16
16
  /** The dimensionality of the embedding vectors */
17
17
  readonly dimensions: number;
18
18
  /** The model identifier */
@@ -16,7 +16,7 @@ declare class OnnxEmbedder implements IEmbedder {
16
16
  shutdown(): Promise<void>;
17
17
  embed(text: string): Promise<Float32Array>;
18
18
  embedQuery(query: string): Promise<Float32Array>;
19
- embedBatch(texts: string[]): Promise<Float32Array[]>;
19
+ embedBatch(texts: string[], batchSize?: number): Promise<Float32Array[]>;
20
20
  }
21
21
  //#endregion
22
22
  export { OnnxEmbedder };
@@ -1 +1 @@
1
- import{homedir as e}from"node:os";import{join as t}from"node:path";import{env as n,pipeline as r}from"@huggingface/transformers";import{EMBEDDING_DEFAULTS as i}from"../../core/dist/index.js";n.cacheDir=t(e(),`.cache`,`huggingface`,`transformers-js`);var a=class{pipe=null;dimensions;modelId;queryPrefix;constructor(e){this.modelId=e?.model??i.model,this.dimensions=e?.dimensions??i.dimensions,this.queryPrefix=e?.queryPrefix??this.detectQueryPrefix(this.modelId)}detectQueryPrefix(e){let t=e.toLowerCase();return t.includes(`bge`)||t.includes(`mxbai-embed`)?`Represent this sentence for searching relevant passages: `:t.includes(`/e5-`)||t.includes(`multilingual-e5`)?`query: `:``}async initialize(){if(!this.pipe)try{this.pipe=await r(`feature-extraction`,this.modelId,{dtype:`fp32`})}catch(e){throw Error(`Failed to initialize embedding model "${this.modelId}": ${e.message}`)}}async shutdown(){this.pipe=null}async embed(e){this.pipe||await this.initialize();let t=await this.pipe?.(e,{pooling:`mean`,normalize:!0});if(!t)throw Error(`Embedding pipeline returned no output`);return new Float32Array(t.data)}async embedQuery(e){return this.embed(this.queryPrefix+e)}async embedBatch(e){if(e.length===0)return[];this.pipe||await this.initialize();let t=[];for(let n=0;n<e.length;n+=32){let r=e.slice(n,n+32),i=await this.pipe?.(r,{pooling:`mean`,normalize:!0});if(!i)throw Error(`Embedding pipeline returned no output`);if(r.length===1)t.push(new Float32Array(i.data));else for(let e=0;e<r.length;e++){let n=e*this.dimensions,r=i.data.slice(n,n+this.dimensions);t.push(new Float32Array(r))}}return t}};export{a as OnnxEmbedder};
1
+ import{homedir as e}from"node:os";import{join as t}from"node:path";import{env as n,pipeline as r}from"@huggingface/transformers";import{EMBEDDING_DEFAULTS as i}from"../../core/dist/index.js";n.cacheDir=t(e(),`.cache`,`huggingface`,`transformers-js`);var a=class{pipe=null;dimensions;modelId;queryPrefix;constructor(e){this.modelId=e?.model??i.model,this.dimensions=e?.dimensions??i.dimensions,this.queryPrefix=e?.queryPrefix??this.detectQueryPrefix(this.modelId)}detectQueryPrefix(e){let t=e.toLowerCase();return t.includes(`bge`)||t.includes(`mxbai-embed`)?`Represent this sentence for searching relevant passages: `:t.includes(`/e5-`)||t.includes(`multilingual-e5`)?`query: `:``}async initialize(){if(!this.pipe)try{this.pipe=await r(`feature-extraction`,this.modelId,{dtype:`fp32`})}catch(e){throw Error(`Failed to initialize embedding model "${this.modelId}": ${e.message}`)}}async shutdown(){this.pipe=null}async embed(e){this.pipe||await this.initialize();let t=await this.pipe?.(e,{pooling:`mean`,normalize:!0});if(!t)throw Error(`Embedding pipeline returned no output`);return new Float32Array(t.data)}async embedQuery(e){return this.embed(this.queryPrefix+e)}async embedBatch(e,t=64){if(e.length===0)return[];this.pipe||await this.initialize();let n=[];for(let r=0;r<e.length;r+=t){let i=e.slice(r,r+t),a=await this.pipe?.(i,{pooling:`mean`,normalize:!0});if(!a)throw Error(`Embedding pipeline returned no output`);if(i.length===1)n.push(new Float32Array(a.data));else for(let e=0;e<i.length;e++){let t=e*this.dimensions,r=a.data.slice(t,t+this.dimensions);n.push(new Float32Array(r))}}return n}};export{a as OnnxEmbedder};
@@ -0,0 +1,24 @@
1
+ //#region packages/indexer/src/hash-cache.d.ts
2
+ /**
3
+ * Persistent file hash cache.
4
+ * Stores path->hash mappings in a JSON file to avoid LanceDB round-trips
5
+ * when checking which files have changed during incremental indexing.
6
+ */
7
+ declare class FileHashCache {
8
+ private cache;
9
+ private readonly filePath;
10
+ private dirty;
11
+ constructor(storeDir: string);
12
+ /** Load cache from disk. Non-fatal if missing or corrupt. */
13
+ load(): void;
14
+ get(path: string): string | undefined;
15
+ set(path: string, hash: string): void;
16
+ delete(path: string): void;
17
+ /** Persist cache to disk if changed. */
18
+ flush(): void;
19
+ /** Clear all entries and delete file. */
20
+ clear(): void;
21
+ get size(): number;
22
+ }
23
+ //#endregion
24
+ export { FileHashCache };
@@ -0,0 +1 @@
1
+ import{resolve as e}from"node:path";import{createLogger as t}from"../../core/dist/index.js";import{existsSync as n,readFileSync as r,writeFileSync as i}from"node:fs";const a=t(`hash-cache`);var o=class{cache;filePath;dirty=!1;constructor(t){this.filePath=e(t,`file-hashes.json`),this.cache=new Map}load(){if(n(this.filePath))try{let e=r(this.filePath,`utf-8`),t=JSON.parse(e);this.cache=new Map(Object.entries(t)),a.info(`Hash cache loaded`,{entries:this.cache.size})}catch(e){a.warn(`Hash cache load failed, starting fresh`,{err:e}),this.cache=new Map}}get(e){return this.cache.get(e)}set(e,t){this.cache.set(e,t),this.dirty=!0}delete(e){this.cache.delete(e)&&(this.dirty=!0)}flush(){if(this.dirty)try{let e={};for(let[t,n]of this.cache)e[t]=n;i(this.filePath,JSON.stringify(e),`utf-8`),this.dirty=!1}catch(e){a.warn(`Hash cache flush failed`,{err:e})}}clear(){this.cache.clear(),this.dirty=!0,this.flush()}get size(){return this.cache.size}};export{o as FileHashCache};
@@ -1,3 +1,4 @@
1
+ import { FileHashCache } from "./hash-cache.js";
1
2
  import { IGraphStore, IKnowledgeStore } from "@kb/store";
2
3
  import { IndexStats, KBConfig } from "@kb/core";
3
4
  import { IEmbedder } from "@kb/embeddings";
@@ -26,15 +27,18 @@ declare class IncrementalIndexer {
26
27
  private readonly crawler;
27
28
  private indexing;
28
29
  private graphStore?;
30
+ private hashCache?;
29
31
  /** Whether an index operation is currently in progress. */
30
32
  get isIndexing(): boolean;
31
33
  constructor(embedder: IEmbedder, store: IKnowledgeStore);
32
34
  /** Set the graph store for auto-population during indexing and cleanup on re-index. */
33
35
  setGraphStore(graphStore: IGraphStore): void;
36
+ /** Set the hash cache for faster incremental checks. */
37
+ setHashCache(cache: FileHashCache): void;
34
38
  /**
35
39
  * Index all configured sources. Only re-indexes files that have changed.
36
40
  * Sources are crawled in parallel, and file processing runs concurrently
37
- * up to `config.indexing.concurrency` (default: half of CPU cores).
41
+ * up to `config.indexing.concurrency` (default: 75% of CPU cores, minimum 2).
38
42
  */
39
43
  index(config: KBConfig, onProgress?: ProgressCallback): Promise<IndexResult>;
40
44
  private doIndex;
@@ -1 +1 @@
1
- import{generateRecordId as e,hashContent as t}from"./file-hasher.js";import{FilesystemCrawler as n}from"./filesystem-crawler.js";import{extractGraph as r}from"./graph-extractor.js";import{KB_PATHS as i,createLogger as a,detectContentType as o,serializeError as s}from"../../core/dist/index.js";import{availableParallelism as c}from"node:os";import{createChunkerSync as l}from"../../chunker/dist/index.js";const u=a(`indexer`);async function d(e,t,n,r){let i=0;async function a(){for(;i<e.length;){let n=i++;try{await t(e[n])}catch(t){r?.(e[n],t)}}}await Promise.all(Array.from({length:Math.min(n,e.length)},()=>a()))}const f=Math.max(1,Math.floor(c()/2));var p=class{crawler;indexing=!1;graphStore;get isIndexing(){return this.indexing}constructor(e,t){this.embedder=e,this.store=t,this.crawler=new n}setGraphStore(e){this.graphStore=e}async index(e,t){if(this.indexing)throw Error(`Indexing is already in progress`);this.indexing=!0;try{return await this.doIndex(e,t,{})}finally{this.indexing=!1}}async doIndex(n,a,c={}){let p=Date.now(),m=0,h=0,g=0,_=0,v=n.indexing.concurrency??f;a?.({phase:`crawling`,filesTotal:0,filesProcessed:0,chunksTotal:0,chunksProcessed:0});let y=(await Promise.all(n.sources.map(e=>this.crawler.crawl({rootDir:e.path,excludePatterns:e.excludePatterns})))).flat(),b,x;if(c.skipHashCheck)b=y,x=[];else{let e=await this.store.listSourcePaths(),n=new Set(y.map(e=>e.relativePath));x=e.filter(e=>!n.has(e)&&!e.startsWith(`${i.aiCurated}/`)),b=[],await d(y,async e=>{let n=t(e.content),r=await this.store.getBySourcePath(e.relativePath);if(r.length>0&&r[0].fileHash===n){h++;return}b.push(e)},v,(e,t)=>u.error(`Hash check failed`,{sourcePath:e.relativePath,...s(t)}))}let S=b.length,C=[],w=[],T=0,E=async()=>{if(this.graphStore){try{C.length>0&&await this.graphStore.upsertNodes(C),w.length>0&&await this.graphStore.upsertEdges(w)}catch(e){u.warn(`Graph batch flush failed`,s(e))}C=[],w=[],T=0}};return await d(b,async n=>{a?.({phase:`chunking`,filesTotal:S,filesProcessed:m,chunksTotal:g,chunksProcessed:g,currentFile:n.relativePath});let i=o(n.relativePath),d=l(n.extension).chunk(n.content,{sourcePath:n.relativePath,contentType:i});if(d.length===0)return;a?.({phase:`embedding`,filesTotal:S,filesProcessed:m,chunksTotal:g+d.length,chunksProcessed:g,currentFile:n.relativePath});let f=await this.embedder.embedBatch(d.map(e=>e.text)),p=t(n.content),h=d.map((t,r)=>({id:e(n.relativePath,r),content:t.text,sourcePath:t.sourcePath,contentType:t.contentType,headingPath:t.headingPath,chunkIndex:t.chunkIndex,totalChunks:t.totalChunks,startLine:t.startLine,endLine:t.endLine,fileHash:p,indexedAt:new Date().toISOString(),origin:`indexed`,tags:[],version:1}));if(a?.({phase:`storing`,filesTotal:S,filesProcessed:m,chunksTotal:g+d.length,chunksProcessed:g,currentFile:n.relativePath}),await this.store.upsert(h,f),this.graphStore)try{c.graphCleared||await this.graphStore.deleteBySourcePath(n.relativePath);let e=r(n.content,n.relativePath);e.nodes.length>0&&C.push(...e.nodes),e.edges.length>0&&w.push(...e.edges),T++,T>=50&&await E()}catch(e){u.warn(`Graph extraction failed`,{sourcePath:n.relativePath,...s(e)})}m++,g+=d.length},v,(e,t)=>u.error(`Processing failed`,{sourcePath:e.relativePath,...s(t)})),await E(),x.length>0&&(a?.({phase:`cleanup`,filesTotal:S,filesProcessed:m,chunksTotal:g,chunksProcessed:g}),await d(x,async e=>{await this.store.deleteBySourcePath(e),this.graphStore&&await this.graphStore.deleteBySourcePath(e).catch(t=>u.warn(`Graph cleanup failed`,{sourcePath:e,...s(t)})),_++},v,(e,t)=>u.error(`Cleanup failed`,{sourcePath:e,...s(t)}))),a?.({phase:`done`,filesTotal:S,filesProcessed:m,chunksTotal:g,chunksProcessed:g}),{filesProcessed:m,filesSkipped:h,chunksCreated:g,filesRemoved:_,durationMs:Date.now()-p}}async reindexAll(e,t){if(this.indexing)throw Error(`Indexing is already in progress`);this.indexing=!0;try{if(await this.store.dropTable(),this.graphStore)try{let e=await this.graphStore.getStats();e.nodeCount>0&&(await this.graphStore.clear(),u.info(`Graph store cleared`,{nodeCount:e.nodeCount,edgeCount:e.edgeCount}))}catch(e){u.warn(`Graph store clear failed`,s(e))}return await this.doReindex(e,t)}catch(e){throw this.indexing=!1,e}}async doReindex(e,t){try{return await this.doIndex(e,t,{skipHashCheck:!0,graphCleared:!0})}finally{this.indexing=!1}}async getStats(){return this.store.getStats()}};export{p as IncrementalIndexer};
1
+ import{generateRecordId as e,hashContent as t}from"./file-hasher.js";import{FilesystemCrawler as n}from"./filesystem-crawler.js";import{extractGraph as r}from"./graph-extractor.js";import{KB_PATHS as i,createLogger as a,detectContentType as o,serializeError as s}from"../../core/dist/index.js";import{availableParallelism as c}from"node:os";import{createChunkerSync as l}from"../../chunker/dist/index.js";const u=a(`indexer`);async function d(e,t,n,r){let i=0;async function a(){for(;i<e.length;){let n=i++;try{await t(e[n])}catch(t){r?.(e[n],t)}}}await Promise.all(Array.from({length:Math.min(n,e.length)},()=>a()))}const f=Math.max(2,Math.floor(c()*.75));var p=class{crawler;indexing=!1;graphStore;hashCache;get isIndexing(){return this.indexing}constructor(e,t){this.embedder=e,this.store=t,this.crawler=new n}setGraphStore(e){this.graphStore=e}setHashCache(e){this.hashCache=e}async index(e,t){if(this.indexing)throw Error(`Indexing is already in progress`);this.indexing=!0;try{return await this.doIndex(e,t,{})}finally{this.indexing=!1}}async doIndex(n,a,c={}){let p=Date.now(),m=0,h=0,g=0,_=0,v=n.indexing.concurrency??f;a?.({phase:`crawling`,filesTotal:0,filesProcessed:0,chunksTotal:0,chunksProcessed:0});let y=(await Promise.all(n.sources.map(e=>this.crawler.crawl({rootDir:e.path,excludePatterns:e.excludePatterns})))).flat(),b,x;if(c.skipHashCheck)b=y,x=[];else{let e=await this.store.listSourcePaths(),n=new Set(y.map(e=>e.relativePath));x=e.filter(e=>!n.has(e)&&!e.startsWith(`${i.aiCurated}/`)),b=[],await d(y,async e=>{let n=t(e.content);if(this.hashCache){if(this.hashCache.get(e.relativePath)===n){h++;return}}else{let t=await this.store.getBySourcePath(e.relativePath);if(t.length>0&&t[0].fileHash===n){h++;return}}b.push(e)},v,(e,t)=>u.error(`Hash check failed`,{sourcePath:e.relativePath,...s(t)}))}let S=b.length,C=[],w=[],T=0,E=[],D=[],O=new Map,k=0,A=async()=>{if(E.length===0)return;let e=E,t=D,n=O;E=[],D=[],O=new Map,k=0,await this.store.upsert(e,t);for(let[e,t]of n)this.hashCache?.set(e,t)},j=async()=>{if(this.graphStore){try{C.length>0&&await this.graphStore.upsertNodes(C),w.length>0&&await this.graphStore.upsertEdges(w)}catch(e){u.warn(`Graph batch flush failed`,s(e))}C=[],w=[],T=0}};return await d(b,async n=>{a?.({phase:`chunking`,filesTotal:S,filesProcessed:m,chunksTotal:g,chunksProcessed:g,currentFile:n.relativePath});let i=o(n.relativePath),d=l(n.extension).chunk(n.content,{sourcePath:n.relativePath,contentType:i});if(d.length===0)return;a?.({phase:`embedding`,filesTotal:S,filesProcessed:m,chunksTotal:g+d.length,chunksProcessed:g,currentFile:n.relativePath});let f=await this.embedder.embedBatch(d.map(e=>e.text)),p=t(n.content),h=d.map((t,r)=>({id:e(n.relativePath,r),content:t.text,sourcePath:t.sourcePath,contentType:t.contentType,headingPath:t.headingPath,chunkIndex:t.chunkIndex,totalChunks:t.totalChunks,startLine:t.startLine,endLine:t.endLine,fileHash:p,indexedAt:new Date().toISOString(),origin:`indexed`,tags:[],version:1}));if(a?.({phase:`storing`,filesTotal:S,filesProcessed:m,chunksTotal:g+d.length,chunksProcessed:g,currentFile:n.relativePath}),E.push(...h),D.push(...f),O.set(n.relativePath,p),k++,k>=20&&await A(),this.graphStore)try{c.graphCleared||await this.graphStore.deleteBySourcePath(n.relativePath);let e=r(n.content,n.relativePath);e.nodes.length>0&&C.push(...e.nodes),e.edges.length>0&&w.push(...e.edges),T++,T>=50&&await j()}catch(e){u.warn(`Graph extraction failed`,{sourcePath:n.relativePath,...s(e)})}m++,g+=d.length},v,(e,t)=>u.error(`Processing failed`,{sourcePath:e.relativePath,...s(t)})),await A(),await j(),x.length>0&&(a?.({phase:`cleanup`,filesTotal:S,filesProcessed:m,chunksTotal:g,chunksProcessed:g}),await d(x,async e=>{await this.store.deleteBySourcePath(e),this.hashCache?.delete(e),this.graphStore&&await this.graphStore.deleteBySourcePath(e).catch(t=>u.warn(`Graph cleanup failed`,{sourcePath:e,...s(t)})),_++},v,(e,t)=>u.error(`Cleanup failed`,{sourcePath:e,...s(t)}))),this.hashCache?.flush(),a?.({phase:`done`,filesTotal:S,filesProcessed:m,chunksTotal:g,chunksProcessed:g}),{filesProcessed:m,filesSkipped:h,chunksCreated:g,filesRemoved:_,durationMs:Date.now()-p}}async reindexAll(e,t){if(this.indexing)throw Error(`Indexing is already in progress`);this.indexing=!0;try{if(await this.store.dropTable(),this.graphStore)try{let e=await this.graphStore.getStats();e.nodeCount>0&&(await this.graphStore.clear(),u.info(`Graph store cleared`,{nodeCount:e.nodeCount,edgeCount:e.edgeCount}))}catch(e){u.warn(`Graph store clear failed`,s(e))}return await this.doReindex(e,t)}catch(e){throw this.indexing=!1,e}}async doReindex(e,t){try{return await this.doIndex(e,t,{skipHashCheck:!0,graphCleared:!0})}finally{this.indexing=!1}}async getStats(){return this.store.getStats()}};export{p as IncrementalIndexer};
@@ -1,5 +1,6 @@
1
1
  import { generateRecordId, hashContent } from "./file-hasher.js";
2
2
  import { CrawlOptions, CrawlResult, FilesystemCrawler } from "./filesystem-crawler.js";
3
3
  import { ExtractedGraph, extractGraph } from "./graph-extractor.js";
4
+ import { FileHashCache } from "./hash-cache.js";
4
5
  import { IncrementalIndexer, IndexProgress, IndexResult, ProgressCallback } from "./incremental-indexer.js";
5
- export { type CrawlOptions, type CrawlResult, type ExtractedGraph, FilesystemCrawler, IncrementalIndexer, type IndexProgress, type IndexResult, type ProgressCallback, extractGraph, generateRecordId, hashContent };
6
+ export { type CrawlOptions, type CrawlResult, type ExtractedGraph, FileHashCache, FilesystemCrawler, IncrementalIndexer, type IndexProgress, type IndexResult, type ProgressCallback, extractGraph, generateRecordId, hashContent };
@@ -1 +1 @@
1
- import{generateRecordId as e,hashContent as t}from"./file-hasher.js";import{FilesystemCrawler as n}from"./filesystem-crawler.js";import{extractGraph as r}from"./graph-extractor.js";import{IncrementalIndexer as i}from"./incremental-indexer.js";export{n as FilesystemCrawler,i as IncrementalIndexer,r as extractGraph,e as generateRecordId,t as hashContent};
1
+ import{generateRecordId as e,hashContent as t}from"./file-hasher.js";import{FilesystemCrawler as n}from"./filesystem-crawler.js";import{extractGraph as r}from"./graph-extractor.js";import{FileHashCache as i}from"./hash-cache.js";import{IncrementalIndexer as a}from"./incremental-indexer.js";export{i as FileHashCache,n as FilesystemCrawler,a as IncrementalIndexer,r as extractGraph,e as generateRecordId,t as hashContent};
@@ -1 +1 @@
1
- import{CuratedKnowledgeManager as e}from"./curated-manager.js";import{installReplayInterceptor as t}from"./replay-interceptor.js";import{registerResources as n}from"./resources/resources.js";import{registerAnalyzeDependenciesTool as r,registerAnalyzeDiagramTool as i,registerAnalyzeEntryPointsTool as a,registerAnalyzePatternsTool as o,registerAnalyzeStructureTool as s,registerAnalyzeSymbolsTool as c,registerBlastRadiusTool as l}from"./tools/analyze.tools.js";import{registerAuditTool as u}from"./tools/audit.tool.js";import{initBridgeComponents as d,registerErPullTool as f,registerErPushTool as p,registerErSyncStatusTool as m}from"./tools/bridge.tools.js";import{registerErEvolveReviewTool as ee}from"./tools/evolution.tools.js";import{registerDigestTool as te,registerEvidenceMapTool as ne,registerForgeClassifyTool as re,registerForgeGroundTool as ie,registerStratumCardTool as ae}from"./tools/forge.tools.js";import{registerForgetTool as oe}from"./tools/forget.tool.js";import{registerGraphTool as se}from"./tools/graph.tool.js";import{registerListTool as ce}from"./tools/list.tool.js";import{registerLookupTool as le}from"./tools/lookup.tool.js";import{registerOnboardTool as ue}from"./tools/onboard.tool.js";import{registerErUpdatePolicyTool as h}from"./tools/policy.tools.js";import{registerProduceKnowledgeTool as g}from"./tools/produce.tool.js";import{registerReadTool as _}from"./tools/read.tool.js";import{registerReindexTool as v}from"./tools/reindex.tool.js";import{registerRememberTool as y}from"./tools/remember.tool.js";import{registerReplayTool as b}from"./tools/replay.tool.js";import{registerSearchTool as x}from"./tools/search.tool.js";import{registerStatusTool as S}from"./tools/status.tool.js";import{registerBatchTool as C,registerCheckTool as w,registerCheckpointTool as T,registerCodemodTool as E,registerCompactTool as D,registerDataTransformTool as O,registerDeadSymbolsTool as k,registerDelegateTool as A,registerDiffParseTool as j,registerEvalTool as M,registerFileSummaryTool as N,registerFindTool as P,registerGitContextTool as F,registerGuideTool as I,registerHealthTool as L,registerLaneTool as R,registerParseOutputTool as z,registerProcessTool as B,registerQueueTool as V,registerRenameTool as H,registerScopeMapTool as U,registerStashTool as de,registerSymbolTool as fe,registerTestRunTool as pe,registerTraceTool as me,registerWatchTool as he,registerWebFetchTool as ge,registerWorksetTool as _e}from"./tools/toolkit.tools.js";import{registerUpdateTool as W}from"./tools/update.tool.js";import{registerChangelogTool as ve,registerEncodeTool as ye,registerEnvTool as be,registerHttpTool as xe,registerMeasureTool as Se,registerRegexTestTool as Ce,registerSchemaValidateTool as we,registerSnippetTool as Te,registerTimeTool as Ee,registerWebSearchTool as De}from"./tools/utility.tools.js";import{getCurrentVersion as G}from"./version-check.js";import{existsSync as Oe,statSync as ke}from"node:fs";import{resolve as Ae}from"node:path";import{KB_PATHS as je,createLogger as Me,serializeError as K}from"../../core/dist/index.js";import{initializeWasm as Ne}from"../../chunker/dist/index.js";import{OnnxEmbedder as Pe}from"../../embeddings/dist/index.js";import{EvolutionCollector as Fe,PolicyStore as Ie}from"../../enterprise-bridge/dist/index.js";import{IncrementalIndexer as Le}from"../../indexer/dist/index.js";import{SqliteGraphStore as Re,createStore as ze}from"../../store/dist/index.js";import{FileCache as Be}from"../../tools/dist/index.js";import{McpServer as q}from"@modelcontextprotocol/sdk/server/mcp.js";const J=Me(`server`);async function Y(t){J.info(`Initializing knowledge base components`);let n=new Pe({model:t.embedding.model,dimensions:t.embedding.dimensions});await n.initialize(),J.info(`Embedder loaded`,{modelId:n.modelId,dimensions:n.dimensions});let r=await ze({backend:t.store.backend,path:t.store.path});await r.initialize(),J.info(`Store initialized`);let i=new Le(n,r),a=t.curated.path,o=new e(a,r,n),s=new Re({path:t.store.path});await s.initialize(),J.info(`Graph store initialized`),i.setGraphStore(s),await Ne()?J.info(`WASM tree-sitter enabled for AST analysis`):J.warn(`WASM tree-sitter not available; analyzers will use regex fallback`);let c=d(t.er),l=c?new Ie(t.curated.path):void 0;l&&J.info(`Policy store initialized`,{ruleCount:l.getRules().length});let u=c?new Fe:void 0,f=Ae(process.cwd(),je.aiKb),p=Oe(f),m;if(p)try{m=ke(f).mtime.toISOString()}catch{}return J.info(`Onboard state detected`,{onboardComplete:p,onboardTimestamp:m}),{embedder:n,store:r,indexer:i,curated:o,graphStore:s,fileCache:new Be,bridge:c,policyStore:l,evolutionCollector:u,onboardComplete:p,onboardTimestamp:m}}function X(e,t){let n=new q({name:t.serverName??`knowledge-base`,version:G()});return Z(n,e,t),n}function Z(e,d,G){t(e),x(e,d.embedder,d.store,d.graphStore,d.bridge,d.evolutionCollector),le(e,d.store),S(e,d.store,d.graphStore,d.curated,{onboardComplete:d.onboardComplete,onboardTimestamp:d.onboardTimestamp}),v(e,d.indexer,G,d.curated,d.store),y(e,d.curated,d.policyStore,d.evolutionCollector),W(e,d.curated),oe(e,d.curated),_(e,d.curated),ce(e,d.curated),s(e,d.store,d.embedder),r(e,d.store,d.embedder),c(e,d.store,d.embedder),o(e,d.store,d.embedder),a(e,d.store,d.embedder),i(e,d.store,d.embedder),l(e,d.store,d.embedder),g(e),ue(e,d.store,d.embedder),se(e,d.graphStore),u(e,d.store,d.embedder),D(e,d.embedder,d.fileCache),U(e,d.embedder,d.store),P(e,d.embedder,d.store),z(e),_e(e),w(e),C(e,d.embedder,d.store),fe(e,d.embedder,d.store),M(e),pe(e),de(e),F(e),j(e),H(e),E(e),N(e,d.fileCache),T(e),O(e),me(e,d.embedder,d.store),B(e),he(e),k(e,d.embedder,d.store),A(e),L(e),R(e),V(e),ge(e),I(e),ne(e),te(e,d.embedder),re(e),ae(e,d.embedder,d.fileCache),ie(e,d.embedder,d.store),De(e),xe(e),Ce(e),ye(e),Se(e),ve(e),we(e),Te(e),be(e),Ee(e),d.bridge&&(p(e,d.bridge,d.evolutionCollector),f(e,d.bridge),m(e,d.bridge)),d.policyStore&&h(e,d.policyStore),d.evolutionCollector&&ee(e,d.evolutionCollector),n(e,d.store),b(e)}async function Ve(e){let t=await Y(e),n=X(t,e);J.info(`MCP server configured`,{toolCount:$.length,resourceCount:2});let r=async()=>{try{let n=e.sources.map(e=>e.path).join(`, `);J.info(`Running initial index`,{sourcePaths:n});let r=await t.indexer.index(e,e=>{e.phase===`crawling`||e.phase===`done`||(e.phase===`chunking`&&e.currentFile&&J.debug(`Indexing file`,{current:e.filesProcessed+1,total:e.filesTotal,file:e.currentFile}),e.phase===`cleanup`&&J.debug(`Index cleanup`,{staleEntries:e.filesTotal-e.filesProcessed}))});J.info(`Initial index complete`,{filesProcessed:r.filesProcessed,filesSkipped:r.filesSkipped,chunksCreated:r.chunksCreated,durationMs:r.durationMs});try{await t.store.createFtsIndex()}catch(e){J.warn(`FTS index creation failed`,K(e))}try{let e=await t.curated.reindexAll();J.info(`Curated re-index complete`,{indexed:e.indexed})}catch(e){J.error(`Curated re-index failed`,K(e))}}catch(e){J.error(`Initial index failed; will retry on kb_reindex`,K(e))}},i=async()=>{J.info(`Shutting down`),await t.embedder.shutdown().catch(()=>{}),await t.graphStore.close().catch(()=>{}),await t.store.close(),process.exit(0)};process.on(`SIGINT`,i),process.on(`SIGTERM`,i);let a=process.ppid,o=setInterval(()=>{try{process.kill(a,0)}catch{J.info(`Parent process died; shutting down`,{parentPid:a}),clearInterval(o),i()}},5e3);return o.unref(),{server:n,runInitialIndex:r,shutdown:i}}const He=new Set(`batch.changelog.check.checkpoint.codemod.compact.data_transform.delegate.diff_parse.digest.encode.env.eval.evidence_map.file_summary.forge_classify.git_context.graph.guide.health.http.lane.measure.onboard.parse_output.process.queue.read.regex_test.reindex.remember.rename.replay.schema_validate.scope_map.snippet.stash.status.stratum_card.test_run.time.update.forget.list.watch.web_fetch.web_search.workset`.split(`.`)),Q=5e3,$=`analyze_dependencies.analyze_diagram.analyze_entry_points.analyze_patterns.analyze_structure.analyze_symbols.audit.batch.blast_radius.changelog.check.checkpoint.codemod.compact.data_transform.dead_symbols.delegate.diff_parse.digest.encode.env.eval.evidence_map.file_summary.find.forge_classify.forge_ground.forget.git_context.graph.guide.health.http.lane.list.lookup.measure.onboard.parse_output.process.produce_knowledge.queue.read.regex_test.reindex.remember.rename.replay.schema_validate.scope_map.search.snippet.stash.status.stratum_card.symbol.test_run.time.trace.update.watch.web_fetch.web_search.workset`.split(`.`);function Ue(e){let t=new q({name:e.serverName??`knowledge-base`,version:G()}),n=t.sendToolListChanged.bind(t);t.sendToolListChanged=()=>{};let r=$.map(e=>t.registerTool(e,{description:`${e} (initializing...)`,inputSchema:{}},async()=>({content:[{type:`text`,text:`KB is still initializing, please retry in a few seconds.`}]})));t.sendToolListChanged=n;let i=t.resource(`kb-status`,`kb://status`,{description:`Knowledge base status (initializing...)`,mimeType:`text/plain`},async()=>({contents:[{uri:`kb://status`,text:`KB is initializing...`,mimeType:`text/plain`}]})),a,o=new Promise(e=>{a=e}),s=(async()=>{let n=await Y(e),o=t.sendToolListChanged.bind(t);t.sendToolListChanged=()=>{};for(let e of r)e.remove();i.remove(),Z(t,n,e),t.sendToolListChanged=o,t.sendToolListChanged();let s=t._registeredTools??{};for(let[e,t]of Object.entries(s)){if(He.has(e))continue;let r=t.handler;t.handler=async(...t)=>{if(!n.indexer.isIndexing)return r(...t);let i=new Promise(t=>setTimeout(()=>t({content:[{type:`text`,text:`⏳ KB is re-indexing. The tool "${e}" timed out waiting for index data (${Q/1e3}s).\n\nThe existing index may be temporarily locked. Please retry shortly — indexing will complete automatically.`}]}),Q));return Promise.race([r(...t),i])}}let c=Object.keys(s).length;c!==$.length&&J.warn(`ALL_TOOL_NAMES count mismatch`,{expectedToolCount:$.length,registeredToolCount:c}),J.info(`MCP server configured`,{toolCount:$.length,resourceCount:2}),a?.(n)})(),c=async()=>{let t=await o;try{let n=e.sources.map(e=>e.path).join(`, `);J.info(`Running initial index`,{sourcePaths:n});let r=await t.indexer.index(e,e=>{e.phase===`crawling`||e.phase===`done`||(e.phase===`chunking`&&e.currentFile&&J.debug(`Indexing file`,{current:e.filesProcessed+1,total:e.filesTotal,file:e.currentFile}),e.phase===`cleanup`&&J.debug(`Index cleanup`,{staleEntries:e.filesTotal-e.filesProcessed}))});J.info(`Initial index complete`,{filesProcessed:r.filesProcessed,filesSkipped:r.filesSkipped,chunksCreated:r.chunksCreated,durationMs:r.durationMs});try{await t.store.createFtsIndex()}catch(e){J.warn(`FTS index creation failed`,K(e))}try{let e=await t.curated.reindexAll();J.info(`Curated re-index complete`,{indexed:e.indexed})}catch(e){J.error(`Curated re-index failed`,K(e))}}catch(e){J.error(`Initial index failed; will retry on kb_reindex`,K(e))}},l=process.ppid,u=setInterval(()=>{try{process.kill(l,0)}catch{J.info(`Parent process died; shutting down`,{parentPid:l}),clearInterval(u),o.then(async e=>{await e.embedder.shutdown().catch(()=>{}),await e.graphStore.close().catch(()=>{}),await e.store.close().catch(()=>{})}).catch(()=>{}).finally(()=>process.exit(0))}},5e3);return u.unref(),{server:t,ready:s,runInitialIndex:c}}export{$ as ALL_TOOL_NAMES,Ue as createLazyServer,X as createMcpServer,Ve as createServer,Y as initializeKnowledgeBase,Z as registerMcpTools};
1
+ import{CuratedKnowledgeManager as e}from"./curated-manager.js";import{installReplayInterceptor as t}from"./replay-interceptor.js";import{registerResources as n}from"./resources/resources.js";import{registerAnalyzeDependenciesTool as r,registerAnalyzeDiagramTool as i,registerAnalyzeEntryPointsTool as a,registerAnalyzePatternsTool as o,registerAnalyzeStructureTool as s,registerAnalyzeSymbolsTool as c,registerBlastRadiusTool as l}from"./tools/analyze.tools.js";import{registerAuditTool as u}from"./tools/audit.tool.js";import{initBridgeComponents as d,registerErPullTool as f,registerErPushTool as p,registerErSyncStatusTool as m}from"./tools/bridge.tools.js";import{registerErEvolveReviewTool as h}from"./tools/evolution.tools.js";import{registerDigestTool as ee,registerEvidenceMapTool as te,registerForgeClassifyTool as ne,registerForgeGroundTool as re,registerStratumCardTool as ie}from"./tools/forge.tools.js";import{registerForgetTool as ae}from"./tools/forget.tool.js";import{registerGraphTool as oe}from"./tools/graph.tool.js";import{registerListTool as se}from"./tools/list.tool.js";import{registerLookupTool as ce}from"./tools/lookup.tool.js";import{registerOnboardTool as le}from"./tools/onboard.tool.js";import{registerErUpdatePolicyTool as ue}from"./tools/policy.tools.js";import{registerProduceKnowledgeTool as g}from"./tools/produce.tool.js";import{registerReadTool as _}from"./tools/read.tool.js";import{registerReindexTool as v}from"./tools/reindex.tool.js";import{registerRememberTool as y}from"./tools/remember.tool.js";import{registerReplayTool as b}from"./tools/replay.tool.js";import{registerSearchTool as x}from"./tools/search.tool.js";import{registerStatusTool as S}from"./tools/status.tool.js";import{registerBatchTool as C,registerCheckTool as w,registerCheckpointTool as T,registerCodemodTool as E,registerCompactTool as D,registerDataTransformTool as O,registerDeadSymbolsTool as k,registerDelegateTool as A,registerDiffParseTool as j,registerEvalTool as M,registerFileSummaryTool as N,registerFindTool as P,registerGitContextTool as F,registerGuideTool as I,registerHealthTool as L,registerLaneTool as R,registerParseOutputTool as z,registerProcessTool as B,registerQueueTool as V,registerRenameTool as H,registerScopeMapTool as U,registerStashTool as de,registerSymbolTool as fe,registerTestRunTool as pe,registerTraceTool as me,registerWatchTool as he,registerWebFetchTool as ge,registerWorksetTool as _e}from"./tools/toolkit.tools.js";import{registerUpdateTool as ve}from"./tools/update.tool.js";import{registerChangelogTool as ye,registerEncodeTool as be,registerEnvTool as xe,registerHttpTool as Se,registerMeasureTool as Ce,registerRegexTestTool as we,registerSchemaValidateTool as Te,registerSnippetTool as Ee,registerTimeTool as De,registerWebSearchTool as Oe}from"./tools/utility.tools.js";import{getCurrentVersion as W}from"./version-check.js";import{existsSync as ke,statSync as Ae}from"node:fs";import{resolve as je}from"node:path";import{KB_PATHS as Me,createLogger as Ne,serializeError as G}from"../../core/dist/index.js";import{initializeWasm as Pe}from"../../chunker/dist/index.js";import{OnnxEmbedder as Fe}from"../../embeddings/dist/index.js";import{EvolutionCollector as Ie,PolicyStore as Le}from"../../enterprise-bridge/dist/index.js";import{FileHashCache as Re,IncrementalIndexer as ze}from"../../indexer/dist/index.js";import{SqliteGraphStore as Be,createStore as Ve}from"../../store/dist/index.js";import{FileCache as K}from"../../tools/dist/index.js";import{McpServer as q}from"@modelcontextprotocol/sdk/server/mcp.js";const J=Ne(`server`);async function Y(t){J.info(`Initializing knowledge base components`);let n=new Fe({model:t.embedding.model,dimensions:t.embedding.dimensions});await n.initialize(),J.info(`Embedder loaded`,{modelId:n.modelId,dimensions:n.dimensions});let r=await Ve({backend:t.store.backend,path:t.store.path});await r.initialize(),J.info(`Store initialized`);let i=new ze(n,r),a=new Re(t.store.path);a.load(),i.setHashCache(a);let o=t.curated.path,s=new e(o,r,n),c=new Be({path:t.store.path});await c.initialize(),J.info(`Graph store initialized`),i.setGraphStore(c),await Pe()?J.info(`WASM tree-sitter enabled for AST analysis`):J.warn(`WASM tree-sitter not available; analyzers will use regex fallback`);let l=d(t.er),u=l?new Le(t.curated.path):void 0;u&&J.info(`Policy store initialized`,{ruleCount:u.getRules().length});let f=l?new Ie:void 0,p=je(process.cwd(),Me.aiKb),m=ke(p),h;if(m)try{h=Ae(p).mtime.toISOString()}catch{}return J.info(`Onboard state detected`,{onboardComplete:m,onboardTimestamp:h}),{embedder:n,store:r,indexer:i,curated:s,graphStore:c,fileCache:new K,bridge:l,policyStore:u,evolutionCollector:f,onboardComplete:m,onboardTimestamp:h}}function X(e,t){let n=new q({name:t.serverName??`knowledge-base`,version:W()});return Z(n,e,t),n}function Z(e,d,W){t(e),x(e,d.embedder,d.store,d.graphStore,d.bridge,d.evolutionCollector),ce(e,d.store),S(e,d.store,d.graphStore,d.curated,{onboardComplete:d.onboardComplete,onboardTimestamp:d.onboardTimestamp}),v(e,d.indexer,W,d.curated,d.store),y(e,d.curated,d.policyStore,d.evolutionCollector),ve(e,d.curated),ae(e,d.curated),_(e,d.curated),se(e,d.curated),s(e,d.store,d.embedder),r(e,d.store,d.embedder),c(e,d.store,d.embedder),o(e,d.store,d.embedder),a(e,d.store,d.embedder),i(e,d.store,d.embedder),l(e,d.store,d.embedder,d.graphStore),g(e),le(e,d.store,d.embedder),oe(e,d.graphStore),u(e,d.store,d.embedder),D(e,d.embedder,d.fileCache),U(e,d.embedder,d.store),P(e,d.embedder,d.store),z(e),_e(e),w(e),C(e,d.embedder,d.store),fe(e,d.embedder,d.store,d.graphStore),M(e),pe(e),de(e),F(e),j(e),H(e),E(e),N(e,d.fileCache),T(e),O(e),me(e,d.embedder,d.store),B(e),he(e),k(e,d.embedder,d.store),A(e),L(e),R(e),V(e),ge(e),I(e),te(e),ee(e,d.embedder),ne(e),ie(e,d.embedder,d.fileCache),re(e,d.embedder,d.store),Oe(e),Se(e),we(e),be(e),Ce(e),ye(e),Te(e),Ee(e),xe(e),De(e),d.bridge&&(p(e,d.bridge,d.evolutionCollector),f(e,d.bridge),m(e,d.bridge)),d.policyStore&&ue(e,d.policyStore),d.evolutionCollector&&h(e,d.evolutionCollector),n(e,d.store),b(e)}async function He(e){let t=await Y(e),n=X(t,e);J.info(`MCP server configured`,{toolCount:$.length,resourceCount:2});let r=async()=>{try{let n=e.sources.map(e=>e.path).join(`, `);J.info(`Running initial index`,{sourcePaths:n});let r=await t.indexer.index(e,e=>{e.phase===`crawling`||e.phase===`done`||(e.phase===`chunking`&&e.currentFile&&J.debug(`Indexing file`,{current:e.filesProcessed+1,total:e.filesTotal,file:e.currentFile}),e.phase===`cleanup`&&J.debug(`Index cleanup`,{staleEntries:e.filesTotal-e.filesProcessed}))});J.info(`Initial index complete`,{filesProcessed:r.filesProcessed,filesSkipped:r.filesSkipped,chunksCreated:r.chunksCreated,durationMs:r.durationMs});try{await t.store.createFtsIndex()}catch(e){J.warn(`FTS index creation failed`,G(e))}try{let e=await t.curated.reindexAll();J.info(`Curated re-index complete`,{indexed:e.indexed})}catch(e){J.error(`Curated re-index failed`,G(e))}}catch(e){J.error(`Initial index failed; will retry on kb_reindex`,G(e))}},i=async()=>{J.info(`Shutting down`),await t.embedder.shutdown().catch(()=>{}),await t.graphStore.close().catch(()=>{}),await t.store.close(),process.exit(0)};process.on(`SIGINT`,i),process.on(`SIGTERM`,i);let a=process.ppid,o=setInterval(()=>{try{process.kill(a,0)}catch{J.info(`Parent process died; shutting down`,{parentPid:a}),clearInterval(o),i()}},5e3);return o.unref(),{server:n,runInitialIndex:r,shutdown:i}}const Ue=new Set(`batch.changelog.check.checkpoint.codemod.compact.data_transform.delegate.diff_parse.digest.encode.env.eval.evidence_map.file_summary.forge_classify.git_context.graph.guide.health.http.lane.measure.onboard.parse_output.process.queue.read.regex_test.reindex.remember.rename.replay.schema_validate.scope_map.snippet.stash.status.stratum_card.test_run.time.update.forget.list.watch.web_fetch.web_search.workset`.split(`.`)),Q=5e3,$=`analyze_dependencies.analyze_diagram.analyze_entry_points.analyze_patterns.analyze_structure.analyze_symbols.audit.batch.blast_radius.changelog.check.checkpoint.codemod.compact.data_transform.dead_symbols.delegate.diff_parse.digest.encode.env.eval.evidence_map.file_summary.find.forge_classify.forge_ground.forget.git_context.graph.guide.health.http.lane.list.lookup.measure.onboard.parse_output.process.produce_knowledge.queue.read.regex_test.reindex.remember.rename.replay.schema_validate.scope_map.search.snippet.stash.status.stratum_card.symbol.test_run.time.trace.update.watch.web_fetch.web_search.workset`.split(`.`);function We(e){let t=new q({name:e.serverName??`knowledge-base`,version:W()}),n=t.sendToolListChanged.bind(t);t.sendToolListChanged=()=>{};let r=$.map(e=>t.registerTool(e,{description:`${e} (initializing...)`,inputSchema:{}},async()=>({content:[{type:`text`,text:`KB is still initializing, please retry in a few seconds.`}]})));t.sendToolListChanged=n;let i=t.resource(`kb-status`,`kb://status`,{description:`Knowledge base status (initializing...)`,mimeType:`text/plain`},async()=>({contents:[{uri:`kb://status`,text:`KB is initializing...`,mimeType:`text/plain`}]})),a,o=new Promise(e=>{a=e}),s=(async()=>{let n=await Y(e),o=t.sendToolListChanged.bind(t);t.sendToolListChanged=()=>{};for(let e of r)e.remove();i.remove(),Z(t,n,e),t.sendToolListChanged=o,t.sendToolListChanged();let s=t._registeredTools??{};for(let[e,t]of Object.entries(s)){if(Ue.has(e))continue;let r=t.handler;t.handler=async(...t)=>{if(!n.indexer.isIndexing)return r(...t);let i=new Promise(t=>setTimeout(()=>t({content:[{type:`text`,text:`⏳ KB is re-indexing. The tool "${e}" timed out waiting for index data (${Q/1e3}s).\n\nThe existing index may be temporarily locked. Please retry shortly — indexing will complete automatically.`}]}),Q));return Promise.race([r(...t),i])}}let c=Object.keys(s).length;c!==$.length&&J.warn(`ALL_TOOL_NAMES count mismatch`,{expectedToolCount:$.length,registeredToolCount:c}),J.info(`MCP server configured`,{toolCount:$.length,resourceCount:2}),a?.(n)})(),c=async()=>{let t=await o;try{let n=e.sources.map(e=>e.path).join(`, `);J.info(`Running initial index`,{sourcePaths:n});let r=await t.indexer.index(e,e=>{e.phase===`crawling`||e.phase===`done`||(e.phase===`chunking`&&e.currentFile&&J.debug(`Indexing file`,{current:e.filesProcessed+1,total:e.filesTotal,file:e.currentFile}),e.phase===`cleanup`&&J.debug(`Index cleanup`,{staleEntries:e.filesTotal-e.filesProcessed}))});J.info(`Initial index complete`,{filesProcessed:r.filesProcessed,filesSkipped:r.filesSkipped,chunksCreated:r.chunksCreated,durationMs:r.durationMs});try{await t.store.createFtsIndex()}catch(e){J.warn(`FTS index creation failed`,G(e))}try{let e=await t.curated.reindexAll();J.info(`Curated re-index complete`,{indexed:e.indexed})}catch(e){J.error(`Curated re-index failed`,G(e))}}catch(e){J.error(`Initial index failed; will retry on kb_reindex`,G(e))}},l=process.ppid,u=setInterval(()=>{try{process.kill(l,0)}catch{J.info(`Parent process died; shutting down`,{parentPid:l}),clearInterval(u),o.then(async e=>{await e.embedder.shutdown().catch(()=>{}),await e.graphStore.close().catch(()=>{}),await e.store.close().catch(()=>{})}).catch(()=>{}).finally(()=>process.exit(0))}},5e3);return u.unref(),{server:t,ready:s,runInitialIndex:c}}export{$ as ALL_TOOL_NAMES,We as createLazyServer,X as createMcpServer,He as createServer,Y as initializeKnowledgeBase,Z as registerMcpTools};
@@ -1,6 +1,6 @@
1
1
  import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
2
2
  import { IEmbedder } from "@kb/embeddings";
3
- import { IKnowledgeStore } from "@kb/store";
3
+ import { IGraphStore, IKnowledgeStore } from "@kb/store";
4
4
 
5
5
  //#region packages/server/src/tools/analyze.tools.d.ts
6
6
  declare function registerAnalyzeStructureTool(server: McpServer, store: IKnowledgeStore, embedder: IEmbedder): void;
@@ -9,6 +9,6 @@ declare function registerAnalyzeSymbolsTool(server: McpServer, store: IKnowledge
9
9
  declare function registerAnalyzePatternsTool(server: McpServer, store: IKnowledgeStore, embedder: IEmbedder): void;
10
10
  declare function registerAnalyzeEntryPointsTool(server: McpServer, store: IKnowledgeStore, embedder: IEmbedder): void;
11
11
  declare function registerAnalyzeDiagramTool(server: McpServer, store: IKnowledgeStore, embedder: IEmbedder): void;
12
- declare function registerBlastRadiusTool(server: McpServer, store: IKnowledgeStore, embedder: IEmbedder): void;
12
+ declare function registerBlastRadiusTool(server: McpServer, store: IKnowledgeStore, embedder: IEmbedder, graphStore?: IGraphStore): void;
13
13
  //#endregion
14
14
  export { registerAnalyzeDependenciesTool, registerAnalyzeDiagramTool, registerAnalyzeEntryPointsTool, registerAnalyzePatternsTool, registerAnalyzeStructureTool, registerAnalyzeSymbolsTool, registerBlastRadiusTool };
@@ -1 +1,2 @@
1
- import{createHash as e}from"node:crypto";import{BlastRadiusAnalyzer as t,DependencyAnalyzer as n,DiagramGenerator as r,EntryPointAnalyzer as i,PatternAnalyzer as a,StructureAnalyzer as o,SymbolAnalyzer as s}from"../../../analyzers/dist/index.js";import{WasmRuntime as c}from"../../../chunker/dist/index.js";import{createLogger as l,serializeError as u}from"../../../core/dist/index.js";import{truncateToTokenBudget as d}from"../../../tools/dist/index.js";import{z as f}from"zod";const p=l(`tools`),m=f.number().min(100).max(5e4).optional().describe(`Maximum token budget for the response. When set, output is truncated to fit.`);function h(e,t){return t?d(e,t):e}function g(){let e=[];return c.get()||e.push(`Tree-sitter unavailable — using regex fallback, symbol/pattern confidence reduced`),e.length===0?``:`\n\n> **⚠ Caveats:** ${e.join(`; `)}`}async function _(t,n,r,i,a){try{let o=`produced/analysis/${r}/${e(`sha256`).update(i).digest(`hex`).slice(0,12)}.md`,s=e(`sha256`).update(a).digest(`hex`).slice(0,16),c=new Date().toISOString(),l=a.length>2e3?a.split(/(?=^## )/m).filter(e=>e.trim().length>0):[a],u=l.map((t,n)=>({id:e(`sha256`).update(`${o}::${n}`).digest(`hex`).slice(0,16),content:t.trim(),sourcePath:o,contentType:`produced-knowledge`,chunkIndex:n,totalChunks:l.length,startLine:0,endLine:0,fileHash:s,indexedAt:c,origin:`produced`,tags:[`analysis`,r],category:`analysis`,version:1})),d=await n.embedBatch(u.map(e=>e.content));await t.upsert(u,d),p.info(`Auto-persisted analysis`,{analyzerName:r,chunkCount:u.length})}catch(e){p.warn(`Auto-persist analysis failed`,{analyzerName:r,...u(e)})}}function v(e,t,n){let r=new o;e.registerTool(`analyze_structure`,{description:`Analyze the file/directory structure of a codebase. Returns an annotated tree with language stats.`,inputSchema:{path:f.string().describe(`Root path to analyze`),max_depth:f.number().min(1).max(10).default(6).describe(`Maximum directory depth`),format:f.enum([`json`,`markdown`]).default(`markdown`).describe(`Output format`),max_tokens:m}},async({path:e,max_depth:i,format:a,max_tokens:o})=>{try{let s=await r.analyze(e,{format:a,maxDepth:i});return _(t,n,`structure`,e,s.output),{content:[{type:`text`,text:h(s.output+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_dependencies` for import graphs, or `analyze_patterns` to detect architecture patterns._",o)}]}}catch(e){return p.error(`Analysis failed`,u(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function y(e,t,r){let i=new n;e.registerTool(`analyze_dependencies`,{description:`Analyze import/require dependencies across a codebase. Shows external packages and internal module graph.`,inputSchema:{path:f.string().describe(`Root path to analyze`),format:f.enum([`json`,`markdown`,`mermaid`]).default(`markdown`).describe(`Output format`),max_tokens:m}},async({path:e,format:n,max_tokens:a})=>{try{let o=await i.analyze(e,{format:n});return _(t,r,`dependencies`,e,o.output),{content:[{type:`text`,text:h(o.output+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_symbols` to explore exported symbols, or `analyze_diagram` for visual representation._",a)}]}}catch(e){return p.error(`Analysis failed`,u(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function b(e,t,n){let r=new s;e.registerTool(`analyze_symbols`,{description:`Extract exported and local symbols (functions, classes, interfaces, types, constants) from a codebase.`,inputSchema:{path:f.string().describe(`Root path to analyze`),filter:f.string().optional().describe(`Filter symbols by name substring`),format:f.enum([`json`,`markdown`]).default(`markdown`).describe(`Output format`)}},async({path:e,filter:i,format:a})=>{try{let o=await r.analyze(e,{format:a,filter:i});return _(t,n,`symbols`,e,o.output),{content:[{type:`text`,text:o.output+g()+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_dependencies` to see import relationships, or `search` to find usage patterns._"}]}}catch(e){return p.error(`Analysis failed`,u(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function x(e,t,n){let r=new a;e.registerTool(`analyze_patterns`,{description:`Detect architectural patterns, frameworks, and conventions in a codebase using directory structure and code heuristics.`,inputSchema:{path:f.string().describe(`Root path to analyze`)}},async({path:e})=>{try{let i=await r.analyze(e);return _(t,n,`patterns`,e,i.output),{content:[{type:`text`,text:i.output+g()+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_entry_points` to find Lambda handlers and main exports, or `produce_knowledge` for full analysis._"}]}}catch(e){return p.error(`Analysis failed`,u(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function S(e,t,n){let r=new i;e.registerTool(`analyze_entry_points`,{description:`Find entry points: Lambda handlers, main exports, CLI bins, and server start scripts.`,inputSchema:{path:f.string().describe(`Root path to analyze`)}},async({path:e})=>{try{let i=await r.analyze(e);return _(t,n,`entry-points`,e,i.output),{content:[{type:`text`,text:i.output+g()+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_dependencies` to see what each entry point imports, or `produce_knowledge` for comprehensive analysis._"}]}}catch(e){return p.error(`Analysis failed`,u(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function C(e,t,n){let i=new r;e.registerTool(`analyze_diagram`,{description:`Generate a Mermaid diagram of the codebase architecture or dependency graph.`,inputSchema:{path:f.string().describe(`Root path to analyze`),diagram_type:f.enum([`architecture`,`dependencies`]).default(`architecture`).describe(`Type of diagram`)}},async({path:e,diagram_type:r})=>{try{let a=await i.analyze(e,{diagramType:r});return _(t,n,`diagram`,e,a.output),{content:[{type:`text`,text:a.output+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_structure` for detailed file tree, or `produce_knowledge` for comprehensive analysis._"}]}}catch(e){return p.error(`Diagram generation failed`,u(e)),{content:[{type:`text`,text:`Diagram generation failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function w(e,n,r){let i=new t;e.registerTool(`blast_radius`,{description:`Given a list of changed files, trace the dependency graph to find all affected files (direct + transitive importers) and their tests. Useful for scoping code reviews and impact analysis.`,inputSchema:{path:f.string().describe(`Root path of the codebase`),files:f.array(f.string()).min(1).describe(`Changed file paths (relative to root)`),max_depth:f.number().min(1).max(20).default(5).describe(`Maximum transitive dependency depth`),format:f.enum([`json`,`markdown`]).default(`markdown`).describe(`Output format`),max_tokens:m}},async({path:e,files:t,max_depth:a,format:o,max_tokens:s})=>{try{let c=await i.analyze(e,{files:t,maxDepth:a,format:o});return _(n,r,`blast-radius`,e,c.output),{content:[{type:`text`,text:h(c.output+g()+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_dependencies` to see the full import graph, or `analyze_symbols` to inspect affected exports._",s)}]}}catch(e){return p.error(`Blast radius analysis failed`,u(e)),{content:[{type:`text`,text:`Blast radius analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}export{y as registerAnalyzeDependenciesTool,C as registerAnalyzeDiagramTool,S as registerAnalyzeEntryPointsTool,x as registerAnalyzePatternsTool,v as registerAnalyzeStructureTool,b as registerAnalyzeSymbolsTool,w as registerBlastRadiusTool};
1
+ import{createHash as e}from"node:crypto";import{BlastRadiusAnalyzer as t,DependencyAnalyzer as n,DiagramGenerator as r,EntryPointAnalyzer as i,PatternAnalyzer as a,StructureAnalyzer as o,SymbolAnalyzer as s}from"../../../analyzers/dist/index.js";import{WasmRuntime as c}from"../../../chunker/dist/index.js";import{createLogger as l,serializeError as u}from"../../../core/dist/index.js";import{truncateToTokenBudget as d}from"../../../tools/dist/index.js";import{z as f}from"zod";const p=l(`tools`),m=f.number().min(100).max(5e4).optional().describe(`Maximum token budget for the response. When set, output is truncated to fit.`);function h(e,t){return t?d(e,t):e}function g(){let e=[];return c.get()||e.push(`Tree-sitter unavailable — using regex fallback, symbol/pattern confidence reduced`),e.length===0?``:`\n\n> **⚠ Caveats:** ${e.join(`; `)}`}async function _(t,n,r,i,a){try{let o=`produced/analysis/${r}/${e(`sha256`).update(i).digest(`hex`).slice(0,12)}.md`,s=e(`sha256`).update(a).digest(`hex`).slice(0,16),c=new Date().toISOString(),l=a.length>2e3?a.split(/(?=^## )/m).filter(e=>e.trim().length>0):[a],u=l.map((t,n)=>({id:e(`sha256`).update(`${o}::${n}`).digest(`hex`).slice(0,16),content:t.trim(),sourcePath:o,contentType:`produced-knowledge`,chunkIndex:n,totalChunks:l.length,startLine:0,endLine:0,fileHash:s,indexedAt:c,origin:`produced`,tags:[`analysis`,r],category:`analysis`,version:1})),d=await n.embedBatch(u.map(e=>e.content));await t.upsert(u,d),p.info(`Auto-persisted analysis`,{analyzerName:r,chunkCount:u.length})}catch(e){p.warn(`Auto-persist analysis failed`,{analyzerName:r,...u(e)})}}function v(e,t,n){let r=new o;e.registerTool(`analyze_structure`,{description:`Analyze the file/directory structure of a codebase. Returns an annotated tree with language stats.`,inputSchema:{path:f.string().describe(`Root path to analyze`),max_depth:f.number().min(1).max(10).default(6).describe(`Maximum directory depth`),format:f.enum([`json`,`markdown`]).default(`markdown`).describe(`Output format`),max_tokens:m}},async({path:e,max_depth:i,format:a,max_tokens:o})=>{try{let s=await r.analyze(e,{format:a,maxDepth:i});return _(t,n,`structure`,e,s.output),{content:[{type:`text`,text:h(s.output+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_dependencies` for import graphs, or `analyze_patterns` to detect architecture patterns._",o)}]}}catch(e){return p.error(`Analysis failed`,u(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function y(e,t,r){let i=new n;e.registerTool(`analyze_dependencies`,{description:`Analyze import/require dependencies across a codebase. Shows external packages and internal module graph.`,inputSchema:{path:f.string().describe(`Root path to analyze`),format:f.enum([`json`,`markdown`,`mermaid`]).default(`markdown`).describe(`Output format`),max_tokens:m}},async({path:e,format:n,max_tokens:a})=>{try{let o=await i.analyze(e,{format:n});return _(t,r,`dependencies`,e,o.output),{content:[{type:`text`,text:h(o.output+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_symbols` to explore exported symbols, or `analyze_diagram` for visual representation._",a)}]}}catch(e){return p.error(`Analysis failed`,u(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function b(e,t,n){let r=new s;e.registerTool(`analyze_symbols`,{description:`Extract exported and local symbols (functions, classes, interfaces, types, constants) from a codebase.`,inputSchema:{path:f.string().describe(`Root path to analyze`),filter:f.string().optional().describe(`Filter symbols by name substring`),format:f.enum([`json`,`markdown`]).default(`markdown`).describe(`Output format`)}},async({path:e,filter:i,format:a})=>{try{let o=await r.analyze(e,{format:a,filter:i});return _(t,n,`symbols`,e,o.output),{content:[{type:`text`,text:o.output+g()+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_dependencies` to see import relationships, or `search` to find usage patterns._"}]}}catch(e){return p.error(`Analysis failed`,u(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function x(e,t,n){let r=new a;e.registerTool(`analyze_patterns`,{description:`Detect architectural patterns, frameworks, and conventions in a codebase using directory structure and code heuristics.`,inputSchema:{path:f.string().describe(`Root path to analyze`)}},async({path:e})=>{try{let i=await r.analyze(e);return _(t,n,`patterns`,e,i.output),{content:[{type:`text`,text:i.output+g()+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_entry_points` to find Lambda handlers and main exports, or `produce_knowledge` for full analysis._"}]}}catch(e){return p.error(`Analysis failed`,u(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function S(e,t,n){let r=new i;e.registerTool(`analyze_entry_points`,{description:`Find entry points: Lambda handlers, main exports, CLI bins, and server start scripts.`,inputSchema:{path:f.string().describe(`Root path to analyze`)}},async({path:e})=>{try{let i=await r.analyze(e);return _(t,n,`entry-points`,e,i.output),{content:[{type:`text`,text:i.output+g()+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_dependencies` to see what each entry point imports, or `produce_knowledge` for comprehensive analysis._"}]}}catch(e){return p.error(`Analysis failed`,u(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function C(e,t,n){let i=new r;e.registerTool(`analyze_diagram`,{description:`Generate a Mermaid diagram of the codebase architecture or dependency graph.`,inputSchema:{path:f.string().describe(`Root path to analyze`),diagram_type:f.enum([`architecture`,`dependencies`]).default(`architecture`).describe(`Type of diagram`)}},async({path:e,diagram_type:r})=>{try{let a=await i.analyze(e,{diagramType:r});return _(t,n,`diagram`,e,a.output),{content:[{type:`text`,text:a.output+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_structure` for detailed file tree, or `produce_knowledge` for comprehensive analysis._"}]}}catch(e){return p.error(`Diagram generation failed`,u(e)),{content:[{type:`text`,text:`Diagram generation failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function w(e,n,r,i){let a=new t;e.registerTool(`blast_radius`,{description:`Given a list of changed files, trace the dependency graph to find all affected files (direct + transitive importers) and their tests. Useful for scoping code reviews and impact analysis.`,inputSchema:{path:f.string().describe(`Root path of the codebase`),files:f.array(f.string()).min(1).describe(`Changed file paths (relative to root)`),max_depth:f.number().min(1).max(20).default(5).describe(`Maximum transitive dependency depth`),format:f.enum([`json`,`markdown`]).default(`markdown`).describe(`Output format`),max_tokens:m}},async({path:e,files:t,max_depth:o,format:s,max_tokens:c})=>{try{let l=await a.analyze(e,{files:t,maxDepth:o,format:s}),u=``;if(i)try{let e=new Set;for(let n of t){let r=await i.findNodes({sourcePath:n,limit:10});for(let n of r){let r=await i.getNeighbors(n.id,{direction:`incoming`,edgeType:`imports`});for(let n of r.nodes){let r=n.sourcePath??n.name;t.includes(r)||e.add(r)}}}e.size>0&&(u=`\n\n### Graph-discovered importers (${e.size})\n`+[...e].slice(0,20).map(e=>`- \`${e}\``).join(`
2
+ `),e.size>20&&(u+=`\n- ... and ${e.size-20} more`))}catch{}return _(n,r,`blast-radius`,e,l.output),{content:[{type:`text`,text:h(l.output+u+g()+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_dependencies` to see the full import graph, or `analyze_symbols` to inspect affected exports._",c)}]}}catch(e){return p.error(`Blast radius analysis failed`,u(e)),{content:[{type:`text`,text:`Blast radius analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}export{y as registerAnalyzeDependenciesTool,C as registerAnalyzeDiagramTool,S as registerAnalyzeEntryPointsTool,x as registerAnalyzePatternsTool,v as registerAnalyzeStructureTool,b as registerAnalyzeSymbolsTool,w as registerBlastRadiusTool};
@@ -1,4 +1,4 @@
1
- import{fanOutFtsSearch as e,fanOutSearch as t,openWorkspaceStores as n,resolveWorkspaces as r}from"../cross-workspace.js";import{stat as i}from"node:fs/promises";import{CONTENT_TYPES as a,KNOWLEDGE_ORIGINS as o,SOURCE_TYPES as s,computePartitionKey as c,createLogger as l,serializeError as u}from"../../../core/dist/index.js";import{graphAugmentSearch as d,truncateToTokenBudget as f}from"../../../tools/dist/index.js";import{z as p}from"zod";import{mergeResults as m}from"../../../enterprise-bridge/dist/index.js";const h=l(`tools`);async function g(e,t,n,r,i){if(!e||t>=e.config.fallbackThreshold&&n.length>0)return{results:n,triggered:!1,cacheHit:!1};let a=!1;try{let t=e.cache.get(r);return t?a=!0:(t=await e.client.search(r,i),t.length>0&&e.cache.set(r,t)),t.length>0?{results:m(n,t,i).map(e=>({record:{id:`er:${e.sourcePath}`,content:e.content,sourcePath:e.source===`er`?`[ER] ${e.sourcePath}`:e.sourcePath,startLine:e.startLine??0,endLine:e.endLine??0,contentType:e.contentType??`documentation`,headingPath:e.headingPath,origin:e.source===`er`?`curated`:e.origin??`indexed`,category:e.category,tags:e.tags??[],chunkIndex:0,totalChunks:1,fileHash:``,indexedAt:new Date().toISOString(),version:1},score:e.score})),triggered:!0,cacheHit:a}:{results:n,triggered:!0,cacheHit:a}}catch(e){return h.warn(`ER fallback failed`,u(e)),{results:n,triggered:!0,cacheHit:a}}}function _(e,t,n=60){let r=new Map;for(let t=0;t<e.length;t++){let i=e[t];r.set(i.record.id,{record:i.record,score:1/(n+t+1)})}for(let e=0;e<t.length;e++){let i=t[e],a=r.get(i.record.id);a?a.score+=1/(n+e+1):r.set(i.record.id,{record:i.record,score:1/(n+e+1)})}return[...r.values()].sort((e,t)=>t.score-e.score).map(({record:e,score:t})=>({record:e,score:t}))}function v(e,t){let n=t.toLowerCase().split(/\s+/).filter(e=>e.length>=2);return n.length<2?e:e.map(e=>{let t=e.record.content.toLowerCase(),r=n.map(e=>{let n=[],r=t.indexOf(e);for(;r!==-1;)n.push(r),r=t.indexOf(e,r+1);return n});if(r.some(e=>e.length===0))return e;let i=t.length;for(let e of r[0]){let t=e,a=e+n[0].length;for(let i=1;i<r.length;i++){let o=r[i][0],s=Math.abs(o-e);for(let t=1;t<r[i].length;t++){let n=Math.abs(r[i][t]-e);n<s&&(s=n,o=r[i][t])}t=Math.min(t,o),a=Math.max(a,o+n[i].length)}i=Math.min(i,a-t)}let a=1+.25/(1+i/200);return{record:e.record,score:e.score*a}}).sort((e,t)=>t.score-e.score)}function y(e,t,n=8){let r=new Set(t.toLowerCase().split(/\s+/).filter(e=>e.length>=2)),i=new Map,a=e.length;for(let t of e){let e=new Set(t.record.content.split(/[^a-zA-Z0-9_]+/).filter(e=>e.length>=3&&!b.has(e.toLowerCase())));for(let t of e){let e=t.toLowerCase();/[_A-Z]/.test(t)&&i.set(`__id__${e}`,1)}let n=new Set(t.record.content.toLowerCase().split(/[^a-zA-Z0-9_]+/).filter(e=>e.length>=3&&!b.has(e)));for(let e of n)i.set(e,(i.get(e)??0)+1)}let o=[];for(let[e,t]of i){if(e.startsWith(`__id__`)||r.has(e)||t>a*.8)continue;let n=Math.log(a/t),s=i.has(`__id__${e}`)?1:0,c=e.length>8?.5:0;o.push({term:e,score:n+s+c})}return o.sort((e,t)=>t.score-e.score).slice(0,n).map(e=>e.term)}const b=new Set(`the.and.for.are.but.not.you.all.can.had.her.was.one.our.out.has.have.from.this.that.with.they.been.said.each.which.their.will.other.about.many.then.them.these.some.would.make.like.into.could.time.very.when.come.just.know.take.people.also.back.after.only.more.than.over.such.import.export.const.function.return.true.false.null.undefined.string.number.boolean.void.type.interface`.split(`.`));async function x(e,t){try{let n=await e.getStats();if(!n.lastIndexedAt)return;let r=new Date(n.lastIndexedAt).getTime(),a=Date.now(),o=[...new Set(t.map(e=>e.record.sourcePath))].filter(e=>!e.startsWith(`[ER]`)).slice(0,5);if(o.length===0)return;let s=0;for(let e of o)try{(await i(e)).mtimeMs>r&&s++}catch{s++}if(s>0){let e=a-r,t=Math.floor(e/6e4),n=t<1?`<1 min`:`${t} min`;return`> ⚠️ **Index may be stale** — ${s} file(s) modified since last index (${n} ago). Use \`reindex\` to refresh.`}}catch{}}function S(i,l,m,b,S,C){i.registerTool(`search`,{description:`Search the knowledge base with hybrid vector + keyword matching (BM25 + RRF fusion). Best for finding code, docs, and prior decisions. Supports semantic, keyword, and hybrid modes.`,inputSchema:{query:p.string().max(5e3).describe(`Natural language search query`),limit:p.number().min(1).max(20).default(5).describe(`Maximum results to return`),search_mode:p.enum([`hybrid`,`semantic`,`keyword`]).default(`hybrid`).describe(`Search strategy: hybrid (vector + FTS + RRF fusion, default), semantic (vector only), keyword (FTS only)`),content_type:p.enum(a).optional().describe(`Filter by content type`),source_type:p.enum(s).optional().describe(`Coarse filter: "source" (code only), "documentation" (md, curated), "test", "config". Overrides content_type if both set.`),origin:p.enum(o).optional().describe(`Filter by knowledge origin`),category:p.string().optional().describe(`Filter by category (e.g., decisions, patterns, conventions)`),tags:p.array(p.string()).optional().describe(`Filter by tags (returns results matching ANY of the specified tags)`),min_score:p.number().min(0).max(1).default(.25).describe(`Minimum similarity score`),graph_hops:p.number().min(0).max(3).default(0).describe(`Number of graph hops to augment results with (0 = no graph context, 1-3 = enrich results with connected entities)`),max_tokens:p.number().min(100).max(5e4).optional().describe(`Maximum token budget for the response. When set, output is truncated to fit.`),dedup:p.enum([`file`,`chunk`]).default(`chunk`).describe(`Deduplication mode: "chunk" (default, show all matching chunks) or "file" (collapse chunks from same file into single result with merged line ranges)`),workspaces:p.array(p.string()).optional().describe(`Cross-workspace search: partition names or folder basenames to include. Use ["*"] for all registered workspaces. Only works in global install mode.`)}},async({query:i,limit:a,search_mode:o,content_type:s,source_type:p,origin:w,category:T,tags:E,min_score:D,graph_hops:O,max_tokens:k,dedup:A,workspaces:j})=>{try{let M={limit:a,minScore:D,contentType:s,sourceType:p,origin:w,category:T,tags:E},N,P=!1,F=!1;if(o===`keyword`)N=await m.ftsSearch(i,M),N=N.slice(0,a);else if(o===`semantic`){let e=await l.embedQuery(i);N=await m.search(e,M);let t=await g(S,N[0]?.score??0,N,i,a);N=t.results,P=t.triggered,F=t.cacheHit}else{let e=await l.embedQuery(i),[t,n]=await Promise.all([m.search(e,{...M,limit:a*2}),m.ftsSearch(i,{...M,limit:a*2}).catch(()=>[])]);N=_(t,n).slice(0,a);let r=await g(S,t[0]?.score??0,N,i,a);N=r.results,P=r.triggered,F=r.cacheHit}C&&C.recordSearch(i,P,F),N.length>1&&(N=v(N,i));let I=``;if(j&&j.length>0){let s=r(j,c(process.cwd()));if(s.length>0){let{stores:r,closeAll:c}=await n(s);try{let n;n=o===`keyword`?await e(r,i,{...M,limit:a}):await t(r,await l.embedQuery(i),{...M,limit:a});for(let e of n)N.push({record:{...e.record,sourcePath:`[${e.workspace}] ${e.record.sourcePath}`},score:e.score});N=N.sort((e,t)=>t.score-e.score).slice(0,a),I=` + ${s.length} workspace(s)`}finally{await c()}}}if(A===`file`&&N.length>1){let e=new Map;for(let t of N){let n=t.record.sourcePath,r=e.get(n);r?(t.score>r.best.score&&(r.best=t),r.ranges.push({start:t.record.startLine,end:t.record.endLine})):e.set(n,{best:t,ranges:[{start:t.record.startLine,end:t.record.endLine}]})}N=[...e.values()].sort((e,t)=>t.best.score-e.best.score).map(({best:e,ranges:t})=>({record:{...e.record,content:t.length>1?`${e.record.content}\n\n_Matched ${t.length} sections: ${t.sort((e,t)=>e.start-t.start).map(e=>`L${e.start}-${e.end}`).join(`, `)}_`:e.record.content},score:e.score}))}if(N.length===0)return{content:[{type:`text`,text:`No results found for the given query.`}]};let L,R;if(O>0&&!b&&(R="> **Note:** `graph_hops` was set but no graph store is available. Graph augmentation skipped."),O>0&&b)try{let e=await d(b,N.map(e=>({recordId:e.record.id,score:e.score,sourcePath:e.record.sourcePath})),{hops:O,maxPerHit:5});L=new Map;for(let t of e)if(t.graphContext.nodes.length>0){let e=t.graphContext.nodes.slice(0,5).map(e=>` - **${e.name}** (${e.type})`).join(`
1
+ import{fanOutFtsSearch as e,fanOutSearch as t,openWorkspaceStores as n,resolveWorkspaces as r}from"../cross-workspace.js";import{stat as i}from"node:fs/promises";import{CONTENT_TYPES as a,KNOWLEDGE_ORIGINS as o,SOURCE_TYPES as s,computePartitionKey as c,createLogger as l,serializeError as u}from"../../../core/dist/index.js";import{graphAugmentSearch as d,truncateToTokenBudget as f}from"../../../tools/dist/index.js";import{z as p}from"zod";import{mergeResults as m}from"../../../enterprise-bridge/dist/index.js";const h=l(`tools`);async function g(e,t,n,r,i){if(!e||t>=e.config.fallbackThreshold&&n.length>0)return{results:n,triggered:!1,cacheHit:!1};let a=!1;try{let t=e.cache.get(r);return t?a=!0:(t=await e.client.search(r,i),t.length>0&&e.cache.set(r,t)),t.length>0?{results:m(n,t,i).map(e=>({record:{id:`er:${e.sourcePath}`,content:e.content,sourcePath:e.source===`er`?`[ER] ${e.sourcePath}`:e.sourcePath,startLine:e.startLine??0,endLine:e.endLine??0,contentType:e.contentType??`documentation`,headingPath:e.headingPath,origin:e.source===`er`?`curated`:e.origin??`indexed`,category:e.category,tags:e.tags??[],chunkIndex:0,totalChunks:1,fileHash:``,indexedAt:new Date().toISOString(),version:1},score:e.score})),triggered:!0,cacheHit:a}:{results:n,triggered:!0,cacheHit:a}}catch(e){return h.warn(`ER fallback failed`,u(e)),{results:n,triggered:!0,cacheHit:a}}}function _(e,t,n=60){let r=new Map;for(let t=0;t<e.length;t++){let i=e[t];r.set(i.record.id,{record:i.record,score:1/(n+t+1)})}for(let e=0;e<t.length;e++){let i=t[e],a=r.get(i.record.id);a?a.score+=1/(n+e+1):r.set(i.record.id,{record:i.record,score:1/(n+e+1)})}return[...r.values()].sort((e,t)=>t.score-e.score).map(({record:e,score:t})=>({record:e,score:t}))}function v(e,t){let n=t.toLowerCase().split(/\s+/).filter(e=>e.length>=2);return n.length<2?e:e.map(e=>{let t=e.record.content.toLowerCase(),r=n.map(e=>{let n=[],r=t.indexOf(e);for(;r!==-1;)n.push(r),r=t.indexOf(e,r+1);return n});if(r.some(e=>e.length===0))return e;let i=t.length;for(let e of r[0]){let t=e,a=e+n[0].length;for(let i=1;i<r.length;i++){let o=r[i][0],s=Math.abs(o-e);for(let t=1;t<r[i].length;t++){let n=Math.abs(r[i][t]-e);n<s&&(s=n,o=r[i][t])}t=Math.min(t,o),a=Math.max(a,o+n[i].length)}i=Math.min(i,a-t)}let a=1+.25/(1+i/200);return{record:e.record,score:e.score*a}}).sort((e,t)=>t.score-e.score)}function y(e,t,n=8){let r=new Set(t.toLowerCase().split(/\s+/).filter(e=>e.length>=2)),i=new Map,a=e.length;for(let t of e){let e=new Set(t.record.content.split(/[^a-zA-Z0-9_]+/).filter(e=>e.length>=3&&!b.has(e.toLowerCase())));for(let t of e){let e=t.toLowerCase();/[_A-Z]/.test(t)&&i.set(`__id__${e}`,1)}let n=new Set(t.record.content.toLowerCase().split(/[^a-zA-Z0-9_]+/).filter(e=>e.length>=3&&!b.has(e)));for(let e of n)i.set(e,(i.get(e)??0)+1)}let o=[];for(let[e,t]of i){if(e.startsWith(`__id__`)||r.has(e)||t>a*.8)continue;let n=Math.log(a/t),s=i.has(`__id__${e}`)?1:0,c=e.length>8?.5:0;o.push({term:e,score:n+s+c})}return o.sort((e,t)=>t.score-e.score).slice(0,n).map(e=>e.term)}const b=new Set(`the.and.for.are.but.not.you.all.can.had.her.was.one.our.out.has.have.from.this.that.with.they.been.said.each.which.their.will.other.about.many.then.them.these.some.would.make.like.into.could.time.very.when.come.just.know.take.people.also.back.after.only.more.than.over.such.import.export.const.function.return.true.false.null.undefined.string.number.boolean.void.type.interface`.split(`.`));async function x(e,t){try{let n=await e.getStats();if(!n.lastIndexedAt)return;let r=new Date(n.lastIndexedAt).getTime(),a=Date.now(),o=[...new Set(t.map(e=>e.record.sourcePath))].filter(e=>!e.startsWith(`[ER]`)).slice(0,5);if(o.length===0)return;let s=0;for(let e of o)try{(await i(e)).mtimeMs>r&&s++}catch{s++}if(s>0){let e=a-r,t=Math.floor(e/6e4),n=t<1?`<1 min`:`${t} min`;return`> ⚠️ **Index may be stale** — ${s} file(s) modified since last index (${n} ago). Use \`reindex\` to refresh.`}}catch{}}function S(i,l,m,b,S,C){i.registerTool(`search`,{description:`Search the knowledge base with hybrid vector + keyword matching (BM25 + RRF fusion). Best for finding code, docs, and prior decisions. Supports semantic, keyword, and hybrid modes.`,inputSchema:{query:p.string().max(5e3).describe(`Natural language search query`),limit:p.number().min(1).max(20).default(5).describe(`Maximum results to return`),search_mode:p.enum([`hybrid`,`semantic`,`keyword`]).default(`hybrid`).describe(`Search strategy: hybrid (vector + FTS + RRF fusion, default), semantic (vector only), keyword (FTS only)`),content_type:p.enum(a).optional().describe(`Filter by content type`),source_type:p.enum(s).optional().describe(`Coarse filter: "source" (code only), "documentation" (md, curated), "test", "config". Overrides content_type if both set.`),origin:p.enum(o).optional().describe(`Filter by knowledge origin`),category:p.string().optional().describe(`Filter by category (e.g., decisions, patterns, conventions)`),tags:p.array(p.string()).optional().describe(`Filter by tags (returns results matching ANY of the specified tags)`),min_score:p.number().min(0).max(1).default(.25).describe(`Minimum similarity score`),graph_hops:p.number().min(0).max(3).default(1).describe(`Number of graph hops to augment results with connected entities (0 = disabled, 1 = direct connections, 2-3 = deeper traversal). Default 1 provides module/symbol context automatically.`),max_tokens:p.number().min(100).max(5e4).optional().describe(`Maximum token budget for the response. When set, output is truncated to fit.`),dedup:p.enum([`file`,`chunk`]).default(`chunk`).describe(`Deduplication mode: "chunk" (default, show all matching chunks) or "file" (collapse chunks from same file into single result with merged line ranges)`),workspaces:p.array(p.string()).optional().describe(`Cross-workspace search: partition names or folder basenames to include. Use ["*"] for all registered workspaces. Only works in global install mode.`)}},async({query:i,limit:a,search_mode:o,content_type:s,source_type:p,origin:w,category:T,tags:E,min_score:D,graph_hops:O,max_tokens:k,dedup:A,workspaces:j})=>{try{let M={limit:a,minScore:D,contentType:s,sourceType:p,origin:w,category:T,tags:E},N,P=!1,F=!1;if(o===`keyword`)N=await m.ftsSearch(i,M),N=N.slice(0,a);else if(o===`semantic`){let e=await l.embedQuery(i);N=await m.search(e,M);let t=await g(S,N[0]?.score??0,N,i,a);N=t.results,P=t.triggered,F=t.cacheHit}else{let e=await l.embedQuery(i),[t,n]=await Promise.all([m.search(e,{...M,limit:a*2}),m.ftsSearch(i,{...M,limit:a*2}).catch(()=>[])]);N=_(t,n).slice(0,a);let r=await g(S,t[0]?.score??0,N,i,a);N=r.results,P=r.triggered,F=r.cacheHit}C&&C.recordSearch(i,P,F),N.length>1&&(N=v(N,i));let I=``;if(j&&j.length>0){let s=r(j,c(process.cwd()));if(s.length>0){let{stores:r,closeAll:c}=await n(s);try{let n;n=o===`keyword`?await e(r,i,{...M,limit:a}):await t(r,await l.embedQuery(i),{...M,limit:a});for(let e of n)N.push({record:{...e.record,sourcePath:`[${e.workspace}] ${e.record.sourcePath}`},score:e.score});N=N.sort((e,t)=>t.score-e.score).slice(0,a),I=` + ${s.length} workspace(s)`}finally{await c()}}}if(A===`file`&&N.length>1){let e=new Map;for(let t of N){let n=t.record.sourcePath,r=e.get(n);r?(t.score>r.best.score&&(r.best=t),r.ranges.push({start:t.record.startLine,end:t.record.endLine})):e.set(n,{best:t,ranges:[{start:t.record.startLine,end:t.record.endLine}]})}N=[...e.values()].sort((e,t)=>t.best.score-e.best.score).map(({best:e,ranges:t})=>({record:{...e.record,content:t.length>1?`${e.record.content}\n\n_Matched ${t.length} sections: ${t.sort((e,t)=>e.start-t.start).map(e=>`L${e.start}-${e.end}`).join(`, `)}_`:e.record.content},score:e.score}))}if(N.length===0)return{content:[{type:`text`,text:`No results found for the given query.`}]};let L,R;if(O>0&&!b&&(R="> **Note:** `graph_hops` was set but no graph store is available. Graph augmentation skipped."),O>0&&b)try{let e=await d(b,N.map(e=>({recordId:e.record.id,score:e.score,sourcePath:e.record.sourcePath})),{hops:O,maxPerHit:5});L=new Map;for(let t of e)if(t.graphContext.nodes.length>0){let e=t.graphContext.nodes.slice(0,5).map(e=>` - **${e.name}** (${e.type})`).join(`
2
2
  `),n=t.graphContext.edges.slice(0,5).map(e=>` - ${e.fromId} —[${e.type}]→ ${e.toId}`).join(`
3
3
  `),r=[`- **Graph Context** (${O} hop${O>1?`s`:``}):`];e&&r.push(` Entities:\n${e}`),n&&r.push(` Relationships:\n${n}`),L.set(t.recordId,r.join(`
4
4
  `))}}catch(e){h.warn(`Graph augmentation failed`,u(e)),R=`> **Note:** Graph augmentation failed. Results shown without graph context.`}let z=N.map((e,t)=>{let n=e.record;return`${`### Result ${t+1} (score: ${e.score.toFixed(3)})`}\n${[`- **Source**: ${n.sourcePath}`,n.headingPath?`- **Section**: ${n.headingPath}`:null,`- **Type**: ${n.contentType}`,n.startLine?`- **Lines**: ${n.startLine}-${n.endLine}`:null,n.origin===`indexed`?null:`- **Origin**: ${n.origin}`,n.category?`- **Category**: ${n.category}`:null,n.tags?.length?`- **Tags**: ${n.tags.join(`, `)}`:null,L?.get(n.id)??null].filter(Boolean).join(`
@@ -1,6 +1,6 @@
1
1
  import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
2
2
  import { IEmbedder } from "@kb/embeddings";
3
- import { IKnowledgeStore } from "@kb/store";
3
+ import { IGraphStore, IKnowledgeStore } from "@kb/store";
4
4
  import { FileCache } from "@kb/tools";
5
5
 
6
6
  //#region packages/server/src/tools/toolkit.tools.d.ts
@@ -11,7 +11,7 @@ declare function registerParseOutputTool(server: McpServer): void;
11
11
  declare function registerWorksetTool(server: McpServer): void;
12
12
  declare function registerCheckTool(server: McpServer): void;
13
13
  declare function registerBatchTool(server: McpServer, embedder: IEmbedder, store: IKnowledgeStore): void;
14
- declare function registerSymbolTool(server: McpServer, embedder: IEmbedder, store: IKnowledgeStore): void;
14
+ declare function registerSymbolTool(server: McpServer, embedder: IEmbedder, store: IKnowledgeStore, graphStore?: IGraphStore): void;
15
15
  declare function registerEvalTool(server: McpServer): void;
16
16
  declare function registerTestRunTool(server: McpServer): void;
17
17
  declare function registerStashTool(server: McpServer): void;
@@ -4,12 +4,12 @@ import{fanOutSearch as e,openWorkspaceStores as t,resolveWorkspaces as n}from"..
4
4
  `),p):g.join(`
5
5
  `)}]}}catch(e){return X.error(`Find failed`,o(e)),{content:[{type:`text`,text:`Find failed. Check server logs for details.`}],isError:!0}}})}function we(e){e.registerTool(`parse_output`,{description:`Parse structured data from build tool output. Supports tsc, vitest, biome, and git status. Auto-detects the tool or specify explicitly.`,inputSchema:{output:Y.string().max(5e5).describe(`Raw output text from a build tool`),tool:Y.enum([`tsc`,`vitest`,`biome`,`git-status`]).optional().describe(`Tool to parse as (auto-detects if omitted)`)}},async({output:e,tool:t})=>{try{let n=k(e.replace(/\\n/g,`
6
6
  `).replace(/\\t/g,` `),t);return{content:[{type:`text`,text:JSON.stringify(n,null,2)}]}}catch(e){return X.error(`Parse failed`,o(e)),{content:[{type:`text`,text:`Parse failed. Check server logs for details.`}],isError:!0}}})}function Te(e){e.registerTool(`workset`,{description:`Manage named file sets (worksets). Save, load, list, add/remove files. Worksets persist across sessions in .kb-state/worksets.json.`,inputSchema:{action:Y.enum([`save`,`get`,`list`,`delete`,`add`,`remove`]).describe(`Operation to perform`),name:Y.string().optional().describe(`Workset name (required for all except list)`),files:Y.array(Y.string()).optional().describe(`File paths (required for save, add, remove)`),description:Y.string().optional().describe(`Description (for save)`)}},async({action:e,name:t,files:n,description:r})=>{try{switch(e){case`save`:{if(!t||!n)throw Error(`name and files required for save`);let e=K(t,n,{description:r});return{content:[{type:`text`,text:`Saved workset "${e.name}" with ${e.files.length} files.`}]}}case`get`:{if(!t)throw Error(`name required for get`);let e=v(t);return e?{content:[{type:`text`,text:JSON.stringify(e,null,2)}]}:{content:[{type:`text`,text:`Workset "${t}" not found.`}]}}case`list`:{let e=O();return e.length===0?{content:[{type:`text`,text:`No worksets.`}]}:{content:[{type:`text`,text:e.map(e=>`- **${e.name}** (${e.files.length} files) — ${e.description??`no description`}`).join(`
7
- `)}]}}case`delete`:if(!t)throw Error(`name required for delete`);return{content:[{type:`text`,text:ne(t)?`Deleted workset "${t}".`:`Workset "${t}" not found.`}]};case`add`:{if(!t||!n)throw Error(`name and files required for add`);let e=s(t,n);return{content:[{type:`text`,text:`Added to workset "${e.name}": now ${e.files.length} files.`}]}}case`remove`:{if(!t||!n)throw Error(`name and files required for remove`);let e=W(t,n);return e?{content:[{type:`text`,text:`Removed from workset "${e.name}": now ${e.files.length} files.`}]}:{content:[{type:`text`,text:`Workset "${t}" not found.`}]}}}}catch(e){return X.error(`Workset operation failed`,o(e)),{content:[{type:`text`,text:`Workset operation failed. Check server logs for details.`}],isError:!0}}})}function Ee(e){e.registerTool(`check`,{description:`Run incremental typecheck (tsc) and lint (biome) on the project or specific files. Returns structured error and warning lists. Default detail level is "summary" (~300 tokens).`,inputSchema:{files:Y.array(Y.string()).optional().describe(`Specific files to check (if omitted, checks all)`),cwd:Y.string().optional().describe(`Working directory`),skip_types:Y.boolean().default(!1).describe(`Skip TypeScript typecheck`),skip_lint:Y.boolean().default(!1).describe(`Skip Biome lint`),detail:Y.enum([`summary`,`errors`,`full`]).default(`summary`).describe(`Output detail level: summary (default, ~300 tokens — pass/fail + counts + top errors), errors (parsed error objects), full (includes raw terminal output)`)}},async({files:e,cwd:t,skip_types:n,skip_lint:r,detail:i})=>{try{let a=await l({files:e,cwd:t,skipTypes:n,skipLint:r,detail:i===`summary`?`errors`:i});if(i===`summary`){let e=me(a),t=[];if(a.passed)t.push({tool:`test_run`,reason:`Types and lint clean — run tests next`});else{let e=a.tsc.errors[0]?.file??a.biome.errors[0]?.file;e&&t.push({tool:`symbol`,reason:`Resolve failing symbol in ${e}`,suggested_args:{name:e}}),t.push({tool:`check`,reason:`Re-check after fixing errors`,suggested_args:{detail:`errors`}})}return{content:[{type:`text`,text:JSON.stringify({...e,_next:t},null,2)}]}}return{content:[{type:`text`,text:JSON.stringify(a,null,2)}]}}catch(e){return X.error(`Check failed`,o(e)),{content:[{type:`text`,text:`Check failed. Check server logs for details.`}],isError:!0}}})}function De(e,t,n){e.registerTool(`batch`,{description:`Execute multiple built-in operations in parallel with concurrency control. Supported operation types: search, find, and check.`,inputSchema:{operations:Y.array(Y.object({id:Y.string().describe(`Unique ID for this operation`),type:Y.enum([`search`,`find`,`check`]).describe(`Built-in operation type`),args:Y.record(Y.string(),Y.unknown()).describe(`Arguments for the operation`)})).min(1).max(100).describe(`Operations to execute`),concurrency:Y.number().min(1).max(20).default(4).describe(`Max concurrent operations`)}},async({operations:e,concurrency:r})=>{try{let i=await c(e,async e=>Xe(e,t,n),{concurrency:r});return{content:[{type:`text`,text:JSON.stringify(i,null,2)}]}}catch(e){return X.error(`Batch failed`,o(e)),{content:[{type:`text`,text:`Batch failed. Check server logs for details.`}],isError:!0}}})}function Oe(e,r,a){e.registerTool(`symbol`,{description:`Resolve a symbol: find where it is defined, who imports it, and where it is referenced. Works on TypeScript and JavaScript codebases.`,inputSchema:{name:Y.string().describe(`Symbol name to look up (function, class, type, etc.)`),limit:Y.number().min(1).max(50).default(20).describe(`Max results per category`),workspaces:Y.array(Y.string()).optional().describe(`Cross-workspace search: partition names or folder basenames to include. Use ["*"] for all. Global mode only.`)}},async({name:e,limit:s,workspaces:c})=>{try{let o=await q(r,a,{name:e,limit:s});if(c&&c.length>0){let a=n(c,i(process.cwd()));if(a.length>0){let{stores:n,closeAll:i}=await t(a);try{for(let[t,i]of n){let n=await q(r,i,{name:e,limit:s});n.definedIn&&!o.definedIn&&(o.definedIn={...n.definedIn,path:`[${t}] ${n.definedIn.path}`});for(let e of n.referencedIn)o.referencedIn.push({...e,path:`[${t}] ${e.path}`});if(n.importedBy){o.importedBy=o.importedBy??[];for(let e of n.importedBy)o.importedBy.push({...e,path:`[${t}] ${e.path}`})}}}finally{await i()}}}return{content:[{type:`text`,text:Qe(o)}]}}catch(e){return X.error(`Symbol lookup failed`,o(e)),{content:[{type:`text`,text:`Symbol lookup failed. Check server logs for details.`}],isError:!0}}})}function ke(e){e.registerTool(`eval`,{description:`Execute a JavaScript or TypeScript snippet in a constrained VM sandbox with a timeout. Captures console output and returned values.`,inputSchema:{code:Y.string().max(1e5).describe(`Code snippet to execute`),lang:Y.enum([`js`,`ts`]).default(`js`).optional().describe(`Language mode: js executes directly, ts strips common type syntax first`),timeout:Y.number().min(1).max(6e4).default(5e3).optional().describe(`Execution timeout in milliseconds`)}},async({code:e,lang:t,timeout:n})=>{try{let r=ie({code:e,lang:t,timeout:n});return r.success?{content:[{type:`text`,text:`Eval succeeded in ${r.durationMs}ms\n\n${r.output}`}]}:{content:[{type:`text`,text:`Eval failed in ${r.durationMs}ms: ${r.error??`Unknown error`}`}],isError:!0}}catch(e){return X.error(`Eval failed`,o(e)),{content:[{type:`text`,text:`Eval failed. Check server logs for details.`}],isError:!0}}})}function Ae(e){e.registerTool(`test_run`,{description:`Run Vitest for the current project or a subset of files, then return a structured summary of passing and failing tests.`,inputSchema:{files:Y.array(Y.string()).optional().describe(`Specific test files or patterns to run`),grep:Y.string().optional().describe(`Only run tests whose names match this pattern`),cwd:Y.string().optional().describe(`Working directory for the test run`)}},async({files:e,grep:t,cwd:n})=>{try{let r=await he({files:e,grep:t,cwd:n});return{content:[{type:`text`,text:$e(r)}],isError:!r.passed}}catch(e){return X.error(`Test run failed`,o(e)),{content:[{type:`text`,text:`Test run failed. Check server logs for details.`}],isError:!0}}})}function je(e){e.registerTool(`stash`,{description:`Persist and retrieve named values in .kb-state/stash.json for intermediate results between tool calls.`,inputSchema:{action:Y.enum([`set`,`get`,`list`,`delete`,`clear`]).describe(`Operation to perform on the stash`),key:Y.string().optional().describe(`Entry key for set/get/delete operations`),value:Y.string().optional().describe(`String or JSON value for set operations`)}},async({action:e,key:t,value:n})=>{try{switch(e){case`set`:{if(!t)throw Error(`key required for set`);let e=pe(t,rt(n??``));return{content:[{type:`text`,text:`Stored stash entry "${e.key}" (${e.type}) at ${e.storedAt}.`}]}}case`get`:{if(!t)throw Error(`key required for get`);let e=de(t);return{content:[{type:`text`,text:e?JSON.stringify(e,null,2):`Stash entry "${t}" not found.`}]}}case`list`:{let e=fe();return{content:[{type:`text`,text:e.length===0?`Stash is empty.`:e.map(e=>`- ${e.key} (${e.type}) — ${e.storedAt}`).join(`
7
+ `)}]}}case`delete`:if(!t)throw Error(`name required for delete`);return{content:[{type:`text`,text:ne(t)?`Deleted workset "${t}".`:`Workset "${t}" not found.`}]};case`add`:{if(!t||!n)throw Error(`name and files required for add`);let e=s(t,n);return{content:[{type:`text`,text:`Added to workset "${e.name}": now ${e.files.length} files.`}]}}case`remove`:{if(!t||!n)throw Error(`name and files required for remove`);let e=W(t,n);return e?{content:[{type:`text`,text:`Removed from workset "${e.name}": now ${e.files.length} files.`}]}:{content:[{type:`text`,text:`Workset "${t}" not found.`}]}}}}catch(e){return X.error(`Workset operation failed`,o(e)),{content:[{type:`text`,text:`Workset operation failed. Check server logs for details.`}],isError:!0}}})}function Ee(e){e.registerTool(`check`,{description:`Run incremental typecheck (tsc) and lint (biome) on the project or specific files. Returns structured error and warning lists. Default detail level is "summary" (~300 tokens).`,inputSchema:{files:Y.array(Y.string()).optional().describe(`Specific files to check (if omitted, checks all)`),cwd:Y.string().optional().describe(`Working directory`),skip_types:Y.boolean().default(!1).describe(`Skip TypeScript typecheck`),skip_lint:Y.boolean().default(!1).describe(`Skip Biome lint`),detail:Y.enum([`summary`,`errors`,`full`]).default(`summary`).describe(`Output detail level: summary (default, ~300 tokens — pass/fail + counts + top errors), errors (parsed error objects), full (includes raw terminal output)`)}},async({files:e,cwd:t,skip_types:n,skip_lint:r,detail:i})=>{try{let a=await l({files:e,cwd:t,skipTypes:n,skipLint:r,detail:i===`summary`?`errors`:i});if(i===`summary`){let e=me(a),t=[];if(a.passed)t.push({tool:`test_run`,reason:`Types and lint clean — run tests next`});else{let e=a.tsc.errors[0]?.file??a.biome.errors[0]?.file;e&&t.push({tool:`symbol`,reason:`Resolve failing symbol in ${e}`,suggested_args:{name:e}}),t.push({tool:`check`,reason:`Re-check after fixing errors`,suggested_args:{detail:`errors`}})}return{content:[{type:`text`,text:JSON.stringify({...e,_next:t},null,2)}]}}return{content:[{type:`text`,text:JSON.stringify(a,null,2)}]}}catch(e){return X.error(`Check failed`,o(e)),{content:[{type:`text`,text:`Check failed. Check server logs for details.`}],isError:!0}}})}function De(e,t,n){e.registerTool(`batch`,{description:`Execute multiple built-in operations in parallel with concurrency control. Supported operation types: search, find, and check.`,inputSchema:{operations:Y.array(Y.object({id:Y.string().describe(`Unique ID for this operation`),type:Y.enum([`search`,`find`,`check`]).describe(`Built-in operation type`),args:Y.record(Y.string(),Y.unknown()).describe(`Arguments for the operation`)})).min(1).max(100).describe(`Operations to execute`),concurrency:Y.number().min(1).max(20).default(4).describe(`Max concurrent operations`)}},async({operations:e,concurrency:r})=>{try{let i=await c(e,async e=>Xe(e,t,n),{concurrency:r});return{content:[{type:`text`,text:JSON.stringify(i,null,2)}]}}catch(e){return X.error(`Batch failed`,o(e)),{content:[{type:`text`,text:`Batch failed. Check server logs for details.`}],isError:!0}}})}function Oe(e,r,a,s){e.registerTool(`symbol`,{description:`Resolve a symbol: find where it is defined, who imports it, and where it is referenced. Works on TypeScript and JavaScript codebases.`,inputSchema:{name:Y.string().describe(`Symbol name to look up (function, class, type, etc.)`),limit:Y.number().min(1).max(50).default(20).describe(`Max results per category`),workspaces:Y.array(Y.string()).optional().describe(`Cross-workspace search: partition names or folder basenames to include. Use ["*"] for all. Global mode only.`)}},async({name:e,limit:c,workspaces:l})=>{try{let o=await q(r,a,{name:e,limit:c,graphStore:s});if(l&&l.length>0){let a=n(l,i(process.cwd()));if(a.length>0){let{stores:n,closeAll:i}=await t(a);try{for(let[t,i]of n){let n=await q(r,i,{name:e,limit:c});n.definedIn&&!o.definedIn&&(o.definedIn={...n.definedIn,path:`[${t}] ${n.definedIn.path}`});for(let e of n.referencedIn)o.referencedIn.push({...e,path:`[${t}] ${e.path}`});if(n.importedBy){o.importedBy=o.importedBy??[];for(let e of n.importedBy)o.importedBy.push({...e,path:`[${t}] ${e.path}`})}}}finally{await i()}}}return{content:[{type:`text`,text:Qe(o)}]}}catch(e){return X.error(`Symbol lookup failed`,o(e)),{content:[{type:`text`,text:`Symbol lookup failed. Check server logs for details.`}],isError:!0}}})}function ke(e){e.registerTool(`eval`,{description:`Execute a JavaScript or TypeScript snippet in a constrained VM sandbox with a timeout. Captures console output and returned values.`,inputSchema:{code:Y.string().max(1e5).describe(`Code snippet to execute`),lang:Y.enum([`js`,`ts`]).default(`js`).optional().describe(`Language mode: js executes directly, ts strips common type syntax first`),timeout:Y.number().min(1).max(6e4).default(5e3).optional().describe(`Execution timeout in milliseconds`)}},async({code:e,lang:t,timeout:n})=>{try{let r=ie({code:e,lang:t,timeout:n});return r.success?{content:[{type:`text`,text:`Eval succeeded in ${r.durationMs}ms\n\n${r.output}`}]}:{content:[{type:`text`,text:`Eval failed in ${r.durationMs}ms: ${r.error??`Unknown error`}`}],isError:!0}}catch(e){return X.error(`Eval failed`,o(e)),{content:[{type:`text`,text:`Eval failed. Check server logs for details.`}],isError:!0}}})}function Ae(e){e.registerTool(`test_run`,{description:`Run Vitest for the current project or a subset of files, then return a structured summary of passing and failing tests.`,inputSchema:{files:Y.array(Y.string()).optional().describe(`Specific test files or patterns to run`),grep:Y.string().optional().describe(`Only run tests whose names match this pattern`),cwd:Y.string().optional().describe(`Working directory for the test run`)}},async({files:e,grep:t,cwd:n})=>{try{let r=await he({files:e,grep:t,cwd:n});return{content:[{type:`text`,text:$e(r)}],isError:!r.passed}}catch(e){return X.error(`Test run failed`,o(e)),{content:[{type:`text`,text:`Test run failed. Check server logs for details.`}],isError:!0}}})}function je(e){e.registerTool(`stash`,{description:`Persist and retrieve named values in .kb-state/stash.json for intermediate results between tool calls.`,inputSchema:{action:Y.enum([`set`,`get`,`list`,`delete`,`clear`]).describe(`Operation to perform on the stash`),key:Y.string().optional().describe(`Entry key for set/get/delete operations`),value:Y.string().optional().describe(`String or JSON value for set operations`)}},async({action:e,key:t,value:n})=>{try{switch(e){case`set`:{if(!t)throw Error(`key required for set`);let e=pe(t,rt(n??``));return{content:[{type:`text`,text:`Stored stash entry "${e.key}" (${e.type}) at ${e.storedAt}.`}]}}case`get`:{if(!t)throw Error(`key required for get`);let e=de(t);return{content:[{type:`text`,text:e?JSON.stringify(e,null,2):`Stash entry "${t}" not found.`}]}}case`list`:{let e=fe();return{content:[{type:`text`,text:e.length===0?`Stash is empty.`:e.map(e=>`- ${e.key} (${e.type}) — ${e.storedAt}`).join(`
8
8
  `)}]}}case`delete`:if(!t)throw Error(`key required for delete`);return{content:[{type:`text`,text:ue(t)?`Deleted stash entry "${t}".`:`Stash entry "${t}" not found.`}]};case`clear`:{let e=le();return{content:[{type:`text`,text:`Cleared ${e} stash entr${e===1?`y`:`ies`}.`}]}}}}catch(e){return X.error(`Stash operation failed`,o(e)),{content:[{type:`text`,text:`Stash operation failed. Check server logs for details.`}],isError:!0}}})}function Me(e){e.registerTool(`git_context`,{description:`Summarize the current Git branch, working tree state, recent commits, and optional diff statistics for the repository.`,inputSchema:{cwd:Y.string().optional().describe(`Repository root or working directory`),commit_count:Y.number().min(1).max(50).default(5).optional().describe(`How many recent commits to include`),include_diff:Y.boolean().default(!1).optional().describe(`Include diff stat for working tree changes`)}},async({cwd:e,commit_count:t,include_diff:n})=>{try{return{content:[{type:`text`,text:et(await y({cwd:e,commitCount:t,includeDiff:n}))}]}}catch(e){return X.error(`Git context failed`,o(e)),{content:[{type:`text`,text:`Git context failed. Check server logs for details.`}],isError:!0}}})}function Ne(e){e.registerTool(`diff_parse`,{description:`Parse raw unified diff text into file-level and hunk-level structural changes.`,inputSchema:{diff:Y.string().max(1e6).describe(`Raw unified diff text`)}},async({diff:e})=>{try{return{content:[{type:`text`,text:tt(re({diff:e.replace(/\\n/g,`
9
9
  `).replace(/\\t/g,` `)}))}]}}catch(e){return X.error(`Diff parse failed`,o(e)),{content:[{type:`text`,text:`Diff parse failed. Check server logs for details.`}],isError:!0}}})}function Pe(e){e.registerTool(`rename`,{description:`Rename a symbol across files using whole-word regex matching for exports, imports, and general usage references.`,inputSchema:{old_name:Y.string().describe(`Existing symbol name to replace`),new_name:Y.string().describe(`New symbol name to use`),root_path:Y.string().describe(`Root directory to search within`),extensions:Y.array(Y.string()).optional().describe(`Optional file extensions to include, such as .ts,.tsx,.js,.jsx`),dry_run:Y.boolean().default(!0).describe(`Preview changes without writing files`)}},async({old_name:e,new_name:t,root_path:n,extensions:r,dry_run:i})=>{try{let a=await G({oldName:e,newName:t,rootPath:n,extensions:r,dryRun:i});return{content:[{type:`text`,text:JSON.stringify(a,null,2)}]}}catch(e){return X.error(`Rename failed`,o(e)),{content:[{type:`text`,text:`Rename failed. Check server logs for details.`}],isError:!0}}})}function Fe(e){e.registerTool(`codemod`,{description:`Apply regex-based codemod rules across files and return structured before/after changes for each affected line.`,inputSchema:{root_path:Y.string().describe(`Root directory to transform within`),rules:Y.array(Y.object({description:Y.string().describe(`What the codemod rule does`),pattern:Y.string().describe(`Regex pattern in string form`),replacement:Y.string().describe(`Replacement string with optional capture groups`)})).min(1).describe(`Codemod rules to apply`),dry_run:Y.boolean().default(!0).describe(`Preview changes without writing files`)}},async({root_path:e,rules:t,dry_run:n})=>{try{let r=await m({rootPath:e,rules:t,dryRun:n});return{content:[{type:`text`,text:JSON.stringify(r,null,2)}]}}catch(e){return X.error(`Codemod failed`,o(e)),{content:[{type:`text`,text:`Codemod failed. Check server logs for details.`}],isError:!0}}})}function Ie(e,t){e.registerTool(`file_summary`,{description:`Create a concise structural summary of a source file: imports, exports, functions, classes, interfaces, and types.`,inputSchema:{path:Y.string().describe(`Absolute path to the file to summarize`)}},async({path:e})=>{try{return{content:[{type:`text`,text:nt(await ae({path:e,content:(await t.get(e)).content}))}]}}catch(e){return X.error(`File summary failed`,o(e)),{content:[{type:`text`,text:`File summary failed. Check server logs for details.`}],isError:!0}}})}function Le(e){e.registerTool(`checkpoint`,{description:`Save and restore lightweight session checkpoints in .kb-state/checkpoints for cross-session continuity.`,inputSchema:{action:Y.enum([`save`,`load`,`list`,`latest`]).describe(`Checkpoint action to perform`),label:Y.string().optional().describe(`Checkpoint label for save, or checkpoint id for load`),data:Y.string().max(5e5).optional().describe(`JSON object string for save actions`),notes:Y.string().max(1e4).optional().describe(`Optional notes for save actions`)}},async({action:e,label:t,data:n,notes:r})=>{try{switch(e){case`save`:if(!t)throw Error(`label required for save`);return{content:[{type:`text`,text:Q(p(t,it(n),{notes:r}))}]};case`load`:{if(!t)throw Error(`label required for load`);let e=f(t);return{content:[{type:`text`,text:e?Q(e):`Checkpoint "${t}" not found.`}]}}case`list`:{let e=d();return{content:[{type:`text`,text:e.length===0?`No checkpoints saved.`:e.map(e=>`- ${e.id} — ${e.label} (${e.createdAt})`).join(`
10
10
  `)}]}}case`latest`:{let e=u();return{content:[{type:`text`,text:e?Q(e):`No checkpoints saved.`}]}}}}catch(e){return X.error(`Checkpoint failed`,o(e)),{content:[{type:`text`,text:`Checkpoint failed. Check server logs for details.`}],isError:!0}}})}function Re(e){e.registerTool(`data_transform`,{description:`Apply small jq-like transforms to JSON input for filtering, projection, grouping, and path extraction.`,inputSchema:{input:Y.string().max(5e5).describe(`Input JSON string`),expression:Y.string().max(1e4).describe(`Transform expression to apply`)}},async({input:e,expression:t})=>{try{return{content:[{type:`text`,text:g({input:e,expression:t}).outputString}]}}catch(e){return X.error(`Data transform failed`,o(e)),{content:[{type:`text`,text:`Data transform failed. Check server logs for details.`}],isError:!0}}})}function ze(e,t,n){e.registerTool(`trace`,{description:`Trace data flow through a codebase by following imports, call sites, and references from a starting symbol or file location.`,inputSchema:{start:Y.string().describe(`Starting point — symbol name or file:line reference`),direction:Y.enum([`forward`,`backward`,`both`]).describe(`Which direction to trace relationships`),max_depth:Y.number().min(1).max(10).default(3).optional().describe(`Maximum trace depth`)}},async({start:e,direction:r,max_depth:i})=>{try{let a=await ge(t,n,{start:e,direction:r,maxDepth:i}),o=[`## Trace: ${a.start}`,`Direction: ${a.direction} | Depth: ${a.depth}`,``];if(a.nodes.length===0)o.push(`No connections found.`);else{let e=a.nodes.filter(e=>e.relationship===`calls`),t=a.nodes.filter(e=>e.relationship===`called-by`),n=a.nodes.filter(e=>e.relationship===`imports`),r=a.nodes.filter(e=>e.relationship===`imported-by`),i=a.nodes.filter(e=>e.relationship===`references`);if(e.length>0){o.push(`### Calls (${e.length})`);for(let t of e){let e=t.scope?` (from ${t.scope}())`:``;o.push(`- ${t.symbol}() — ${t.path}:${t.line}${e}`)}o.push(``)}if(t.length>0){o.push(`### Called by (${t.length})`);for(let e of t){let t=e.scope?` in ${e.scope}()`:``;o.push(`- ${e.symbol}()${t} — ${e.path}:${e.line}`)}o.push(``)}if(n.length>0){o.push(`### Imports (${n.length})`);for(let e of n)o.push(`- ${e.symbol} — ${e.path}:${e.line}`);o.push(``)}if(r.length>0){o.push(`### Imported by (${r.length})`);for(let e of r)o.push(`- ${e.path}:${e.line}`);o.push(``)}if(i.length>0){o.push(`### References (${i.length})`);for(let e of i)o.push(`- ${e.path}:${e.line}`);o.push(``)}}return o.push(`---`,"_Next: `symbol` for definition details | `compact` to read a referenced file | `blast_radius` for impact analysis_"),{content:[{type:`text`,text:o.join(`
11
11
  `)}]}}catch(e){return X.error(`Trace failed`,o(e)),{content:[{type:`text`,text:`Trace failed. Check server logs for details.`}],isError:!0}}})}function Be(e){e.registerTool(`process`,{description:`Start, stop, inspect, list, and tail logs for in-memory managed child processes.`,inputSchema:{action:Y.enum([`start`,`stop`,`status`,`list`,`logs`]).describe(`Process action to perform`),id:Y.string().optional().describe(`Managed process ID`),command:Y.string().optional().describe(`Executable to start`),args:Y.array(Y.string()).optional().describe(`Arguments for start actions`),tail:Y.number().min(1).max(500).optional().describe(`Log lines to return for logs actions`)}},async({action:e,id:t,command:n,args:r,tail:i})=>{try{switch(e){case`start`:if(!t||!n)throw Error(`id and command are required for start`);return{content:[{type:`text`,text:JSON.stringify(M(t,n,r??[]),null,2)}]};case`stop`:if(!t)throw Error(`id is required for stop`);return{content:[{type:`text`,text:JSON.stringify(P(t)??null,null,2)}]};case`status`:if(!t)throw Error(`id is required for status`);return{content:[{type:`text`,text:JSON.stringify(N(t)??null,null,2)}]};case`list`:return{content:[{type:`text`,text:JSON.stringify(A(),null,2)}]};case`logs`:if(!t)throw Error(`id is required for logs`);return{content:[{type:`text`,text:JSON.stringify(j(t,i),null,2)}]}}}catch(e){return X.error(`Process action failed`,o(e)),{content:[{type:`text`,text:`Process action failed. Check server logs for details.`}],isError:!0}}})}function Ve(e){e.registerTool(`watch`,{description:`Start, stop, and list in-memory filesystem watchers for a directory.`,inputSchema:{action:Y.enum([`start`,`stop`,`list`]).describe(`Watch action to perform`),path:Y.string().optional().describe(`Directory path to watch for start actions`),id:Y.string().optional().describe(`Watcher ID for stop actions`)}},async({action:e,path:t,id:n})=>{try{switch(e){case`start`:if(!t)throw Error(`path is required for start`);return{content:[{type:`text`,text:JSON.stringify(ve({path:t}),null,2)}]};case`stop`:if(!n)throw Error(`id is required for stop`);return{content:[{type:`text`,text:JSON.stringify({stopped:ye(n)},null,2)}]};case`list`:return{content:[{type:`text`,text:JSON.stringify(_e(),null,2)}]}}}catch(e){return X.error(`Watch action failed`,o(e)),{content:[{type:`text`,text:`Watch action failed. Check server logs for details.`}],isError:!0}}})}function He(e,t,n){e.registerTool(`dead_symbols`,{description:`Find exported symbols that appear to be unused because they are never imported or re-exported.`,inputSchema:{path:Y.string().optional().describe(`Root path to scope the search (default: cwd)`),limit:Y.number().min(1).max(500).default(100).optional().describe(`Maximum exported symbols to scan`)}},async({path:e,limit:r})=>{try{let i=await oe(t,n,{rootPath:e,limit:r}),a=[`## Dead Symbol Analysis`,``,`**Exports scanned:** ${i.totalExports}`,`**Dead in source:** ${i.totalDeadSource} (actionable)`,`**Dead in docs:** ${i.totalDeadDocs} (informational — code samples in .md files)`,``];if(i.deadInSource.length>0){a.push(`### Dead in Source (actionable)`);for(let e of i.deadInSource)a.push(`- \`${e.name}\` (${e.kind}) — ${e.path}:${e.line}`);a.push(``)}if(i.deadInDocs.length>0){a.push(`### Dead in Docs (informational)`),a.push(`_${i.totalDeadDocs} symbol(s) found only in documentation code samples — not actionable dead code._`);for(let e of i.deadInDocs.slice(0,5))a.push(`- \`${e.name}\` — ${e.path}:${e.line}`);i.deadInDocs.length>5&&a.push(`- _... ${i.deadInDocs.length-5} more omitted_`)}return i.totalDeadSource>0?a.push(``,`---`,`_Next: \`codemod\` to remove ${i.totalDeadSource} unused exports | \`symbol\` to verify usage before removing_`):a.push(``,`---`,"_Next: `check` — no dead symbols found, validate types and lint_"),{content:[{type:`text`,text:a.join(`
12
- `)}]}}catch(e){return X.error(`Dead symbol scan failed`,o(e)),{content:[{type:`text`,text:`Dead symbol scan failed. Check server logs for details.`}],isError:!0}}})}function Ue(e){e.registerTool(`delegate`,{description:`Delegate a subtask to a local Ollama model. Use for summarization, classification, naming, or any task that can offload work from the host agent. Fails fast if Ollama is not running.`,inputSchema:{prompt:Y.string().max(2e5).describe(`The task or question to send to the local model`),model:Y.string().optional().describe(`Ollama model name (default: first available model)`),system:Y.string().optional().describe(`System prompt for the model`),context:Y.string().max(5e5).optional().describe(`Context text to include before the prompt (e.g. file contents)`),temperature:Y.number().min(0).max(2).default(.3).optional().describe(`Sampling temperature (0=deterministic, default 0.3)`),timeout:Y.number().min(1e3).max(6e5).default(12e4).optional().describe(`Timeout in milliseconds (default 120000)`),action:Y.enum([`generate`,`list_models`]).default(`generate`).optional().describe(`Action: generate a response or list available models`)}},async({prompt:e,model:t,system:n,context:r,temperature:i,timeout:a,action:s})=>{try{if(s===`list_models`){let e=await te();return{content:[{type:`text`,text:JSON.stringify({models:e,count:e.length,_Next:`Use delegate with a model name`},null,2)}]}}let o=await ee({prompt:e,model:t,system:n,context:r,temperature:i,timeout:a});return o.error?{content:[{type:`text`,text:JSON.stringify({error:o.error,model:o.model,durationMs:o.durationMs},null,2)}],isError:!0}:{content:[{type:`text`,text:JSON.stringify({model:o.model,response:o.response,durationMs:o.durationMs,tokenCount:o.tokenCount,_Next:`Use the response in your workflow. stash to save it.`},null,2)}]}}catch(e){return X.error(`Delegate failed`,o(e)),{content:[{type:`text`,text:`Delegate failed. Check server logs for details.`}],isError:!0}}})}function We(e){e.registerTool(`lane`,{description:`Manage verified lanes — isolated file copies for parallel exploration. Create a lane, make changes, diff, merge back, or discard.`,inputSchema:{action:Y.enum([`create`,`list`,`status`,`diff`,`merge`,`discard`]).describe(`Lane action to perform`),name:Y.string().optional().describe(`Lane name (required for create/status/diff/merge/discard)`),files:Y.array(Y.string()).optional().describe(`File paths to copy into the lane (required for create)`)}},async({action:e,name:t,files:n})=>{try{switch(e){case`create`:{if(!t)throw Error(`name is required for create`);if(!n||n.length===0)throw Error(`files are required for create`);let e=S(t,n);return{content:[{type:`text`,text:JSON.stringify(e,null,2)}]}}case`list`:return{content:[{type:`text`,text:JSON.stringify(T(),null,2)}]};case`status`:if(!t)throw Error(`name is required for status`);return{content:[{type:`text`,text:JSON.stringify(D(t),null,2)}]};case`diff`:if(!t)throw Error(`name is required for diff`);return{content:[{type:`text`,text:JSON.stringify(C(t),null,2)}]};case`merge`:if(!t)throw Error(`name is required for merge`);return{content:[{type:`text`,text:JSON.stringify(E(t),null,2)}]};case`discard`:if(!t)throw Error(`name is required for discard`);return{content:[{type:`text`,text:JSON.stringify({discarded:w(t)},null,2)}]}}}catch(e){return X.error(`Lane action failed`,o(e)),{content:[{type:`text`,text:`Lane action failed. Check server logs for details.`}],isError:!0}}})}function Ge(e){e.registerTool(`health`,{description:`Run project health checks — verifies package.json, tsconfig, scripts, lockfile, README, LICENSE, .gitignore.`,inputSchema:{path:Y.string().optional().describe(`Root directory to check (defaults to cwd)`)}},async({path:e})=>{try{let t=x(e);return{content:[{type:`text`,text:JSON.stringify(t,null,2)}]}}catch(e){return X.error(`Health check failed`,o(e)),{content:[{type:`text`,text:`Health check failed. Check server logs for details.`}],isError:!0}}})}function Ke(e){e.registerTool(`queue`,{description:`Manage task queues for sequential agent operations. Push items, take next, mark done/failed, list queues.`,inputSchema:{action:Y.enum([`create`,`push`,`next`,`done`,`fail`,`get`,`list`,`clear`,`delete`]).describe(`Queue action`),name:Y.string().optional().describe(`Queue name (required for most actions)`),title:Y.string().optional().describe(`Item title (required for push)`),id:Y.string().optional().describe(`Item ID (required for done/fail)`),data:Y.unknown().optional().describe(`Arbitrary data to attach to a queue item`),error:Y.string().optional().describe(`Error message (required for fail)`)}},async({action:e,name:t,title:n,id:r,data:i,error:a})=>{try{switch(e){case`create`:if(!t)throw Error(`name is required for create`);return{content:[{type:`text`,text:JSON.stringify(I(t),null,2)}]};case`push`:if(!t)throw Error(`name is required for push`);if(!n)throw Error(`title is required for push`);return{content:[{type:`text`,text:JSON.stringify(U(t,n,i),null,2)}]};case`next`:{if(!t)throw Error(`name is required for next`);let e=H(t);return{content:[{type:`text`,text:JSON.stringify(e,null,2)}]}}case`done`:if(!t)throw Error(`name is required for done`);if(!r)throw Error(`id is required for done`);return{content:[{type:`text`,text:JSON.stringify(R(t,r),null,2)}]};case`fail`:if(!t)throw Error(`name is required for fail`);if(!r)throw Error(`id is required for fail`);if(!a)throw Error(`error is required for fail`);return{content:[{type:`text`,text:JSON.stringify(z(t,r,a),null,2)}]};case`get`:if(!t)throw Error(`name is required for get`);return{content:[{type:`text`,text:JSON.stringify(B(t),null,2)}]};case`list`:return{content:[{type:`text`,text:JSON.stringify(V(),null,2)}]};case`clear`:if(!t)throw Error(`name is required for clear`);return{content:[{type:`text`,text:JSON.stringify({cleared:F(t)},null,2)}]};case`delete`:if(!t)throw Error(`name is required for delete`);return{content:[{type:`text`,text:JSON.stringify({deleted:L(t)},null,2)}]}}}catch(e){return X.error(`Queue action failed`,o(e)),{content:[{type:`text`,text:`Queue action failed. Check server logs for details.`}],isError:!0}}})}const qe=Y.object({query:Y.string(),limit:Y.number().min(1).max(20).default(5).optional(),search_mode:Y.enum([`hybrid`,`semantic`,`keyword`]).default(`hybrid`).optional(),content_type:Y.enum(r).optional(),origin:Y.enum([`indexed`,`curated`,`produced`]).optional(),category:Y.string().optional(),tags:Y.array(Y.string()).optional(),min_score:Y.number().min(0).max(1).default(.25).optional()}),Je=Y.object({query:Y.string().optional(),glob:Y.string().optional(),pattern:Y.string().optional(),limit:Y.number().min(1).max(50).default(10).optional(),content_type:Y.enum(r).optional(),cwd:Y.string().optional()}),Ye=Y.object({files:Y.array(Y.string()).optional(),cwd:Y.string().optional(),skip_types:Y.boolean().optional(),skip_lint:Y.boolean().optional()});async function Xe(e,t,n){switch(e.type){case`search`:return Z(t,n,qe.parse(e.args));case`find`:{let r=Je.parse(e.args);if(!r.query&&!r.glob&&!r.pattern)throw Error(`find operation requires query, glob, or pattern`);return _(t,n,{query:r.query,glob:r.glob,pattern:r.pattern,limit:r.limit,contentType:r.content_type,cwd:r.cwd})}case`check`:{let t=Ye.parse(e.args);return l({files:t.files,cwd:t.cwd,skipTypes:t.skip_types,skipLint:t.skip_lint})}default:throw Error(`Unsupported batch operation type: ${e.type}`)}}async function Z(e,t,n){let r=n.limit??5,i={limit:r,minScore:n.min_score??.25,contentType:n.content_type,origin:n.origin,category:n.category,tags:n.tags},a=e.embedQuery?.bind(e)??e.embed.bind(e);if(n.search_mode===`keyword`)return(await t.ftsSearch(n.query,i)).slice(0,r);let o=await a(n.query);if(n.search_mode===`semantic`)return t.search(o,i);let[s,c]=await Promise.all([t.search(o,{...i,limit:r*2}),t.ftsSearch(n.query,{...i,limit:r*2})]);return Ze(s,c).slice(0,r)}function Ze(e,t,n=60){let r=new Map;for(let t=0;t<e.length;t++){let i=e[t];r.set(i.record.id,{record:i.record,score:1/(n+t+1)})}for(let e=0;e<t.length;e++){let i=t[e],a=r.get(i.record.id);if(a){a.score+=1/(n+e+1);continue}r.set(i.record.id,{record:i.record,score:1/(n+e+1)})}return[...r.values()].sort((e,t)=>t.score-e.score)}function Qe(e){let t=[`Symbol: ${e.name}`];if(e.definedIn){let n=`Defined in: ${e.definedIn.path}:${e.definedIn.line} (${e.definedIn.kind})`;e.definedIn.signature&&(n+=`\nSignature: ${e.definedIn.signature}`),t.push(n)}else t.push(`Defined in: not found`);if(t.push(``,`Imported by:`),e.importedBy.length===0)t.push(` none`);else for(let n of e.importedBy)t.push(` - ${n.path}:${n.line} ${n.importStatement}`);if(t.push(``,`Referenced in:`),e.referencedIn.length===0)t.push(` none`);else for(let n of e.referencedIn){let e=`scope`in n&&n.scope?` in ${n.scope}()`:``;t.push(` - ${n.path}:${n.line}${e} ${n.context}`)}return t.join(`
12
+ `)}]}}catch(e){return X.error(`Dead symbol scan failed`,o(e)),{content:[{type:`text`,text:`Dead symbol scan failed. Check server logs for details.`}],isError:!0}}})}function Ue(e){e.registerTool(`delegate`,{description:`Delegate a subtask to a local Ollama model. Use for summarization, classification, naming, or any task that can offload work from the host agent. Fails fast if Ollama is not running.`,inputSchema:{prompt:Y.string().max(2e5).describe(`The task or question to send to the local model`),model:Y.string().optional().describe(`Ollama model name (default: first available model)`),system:Y.string().optional().describe(`System prompt for the model`),context:Y.string().max(5e5).optional().describe(`Context text to include before the prompt (e.g. file contents)`),temperature:Y.number().min(0).max(2).default(.3).optional().describe(`Sampling temperature (0=deterministic, default 0.3)`),timeout:Y.number().min(1e3).max(6e5).default(12e4).optional().describe(`Timeout in milliseconds (default 120000)`),action:Y.enum([`generate`,`list_models`]).default(`generate`).optional().describe(`Action: generate a response or list available models`)}},async({prompt:e,model:t,system:n,context:r,temperature:i,timeout:a,action:s})=>{try{if(s===`list_models`){let e=await te();return{content:[{type:`text`,text:JSON.stringify({models:e,count:e.length,_Next:`Use delegate with a model name`},null,2)}]}}let o=await ee({prompt:e,model:t,system:n,context:r,temperature:i,timeout:a});return o.error?{content:[{type:`text`,text:JSON.stringify({error:o.error,model:o.model,durationMs:o.durationMs},null,2)}],isError:!0}:{content:[{type:`text`,text:JSON.stringify({model:o.model,response:o.response,durationMs:o.durationMs,tokenCount:o.tokenCount,_Next:`Use the response in your workflow. stash to save it.`},null,2)}]}}catch(e){return X.error(`Delegate failed`,o(e)),{content:[{type:`text`,text:`Delegate failed. Check server logs for details.`}],isError:!0}}})}function We(e){e.registerTool(`lane`,{description:`Manage verified lanes — isolated file copies for parallel exploration. Create a lane, make changes, diff, merge back, or discard.`,inputSchema:{action:Y.enum([`create`,`list`,`status`,`diff`,`merge`,`discard`]).describe(`Lane action to perform`),name:Y.string().optional().describe(`Lane name (required for create/status/diff/merge/discard)`),files:Y.array(Y.string()).optional().describe(`File paths to copy into the lane (required for create)`)}},async({action:e,name:t,files:n})=>{try{switch(e){case`create`:{if(!t)throw Error(`name is required for create`);if(!n||n.length===0)throw Error(`files are required for create`);let e=S(t,n);return{content:[{type:`text`,text:JSON.stringify(e,null,2)}]}}case`list`:return{content:[{type:`text`,text:JSON.stringify(T(),null,2)}]};case`status`:if(!t)throw Error(`name is required for status`);return{content:[{type:`text`,text:JSON.stringify(D(t),null,2)}]};case`diff`:if(!t)throw Error(`name is required for diff`);return{content:[{type:`text`,text:JSON.stringify(C(t),null,2)}]};case`merge`:if(!t)throw Error(`name is required for merge`);return{content:[{type:`text`,text:JSON.stringify(E(t),null,2)}]};case`discard`:if(!t)throw Error(`name is required for discard`);return{content:[{type:`text`,text:JSON.stringify({discarded:w(t)},null,2)}]}}}catch(e){return X.error(`Lane action failed`,o(e)),{content:[{type:`text`,text:`Lane action failed. Check server logs for details.`}],isError:!0}}})}function Ge(e){e.registerTool(`health`,{description:`Run project health checks — verifies package.json, tsconfig, scripts, lockfile, README, LICENSE, .gitignore.`,inputSchema:{path:Y.string().optional().describe(`Root directory to check (defaults to cwd)`)}},async({path:e})=>{try{let t=x(e);return{content:[{type:`text`,text:JSON.stringify(t,null,2)}]}}catch(e){return X.error(`Health check failed`,o(e)),{content:[{type:`text`,text:`Health check failed. Check server logs for details.`}],isError:!0}}})}function Ke(e){e.registerTool(`queue`,{description:`Manage task queues for sequential agent operations. Push items, take next, mark done/failed, list queues.`,inputSchema:{action:Y.enum([`create`,`push`,`next`,`done`,`fail`,`get`,`list`,`clear`,`delete`]).describe(`Queue action`),name:Y.string().optional().describe(`Queue name (required for most actions)`),title:Y.string().optional().describe(`Item title (required for push)`),id:Y.string().optional().describe(`Item ID (required for done/fail)`),data:Y.unknown().optional().describe(`Arbitrary data to attach to a queue item`),error:Y.string().optional().describe(`Error message (required for fail)`)}},async({action:e,name:t,title:n,id:r,data:i,error:a})=>{try{switch(e){case`create`:if(!t)throw Error(`name is required for create`);return{content:[{type:`text`,text:JSON.stringify(I(t),null,2)}]};case`push`:if(!t)throw Error(`name is required for push`);if(!n)throw Error(`title is required for push`);return{content:[{type:`text`,text:JSON.stringify(U(t,n,i),null,2)}]};case`next`:{if(!t)throw Error(`name is required for next`);let e=H(t);return{content:[{type:`text`,text:JSON.stringify(e,null,2)}]}}case`done`:if(!t)throw Error(`name is required for done`);if(!r)throw Error(`id is required for done`);return{content:[{type:`text`,text:JSON.stringify(R(t,r),null,2)}]};case`fail`:if(!t)throw Error(`name is required for fail`);if(!r)throw Error(`id is required for fail`);if(!a)throw Error(`error is required for fail`);return{content:[{type:`text`,text:JSON.stringify(z(t,r,a),null,2)}]};case`get`:if(!t)throw Error(`name is required for get`);return{content:[{type:`text`,text:JSON.stringify(B(t),null,2)}]};case`list`:return{content:[{type:`text`,text:JSON.stringify(V(),null,2)}]};case`clear`:if(!t)throw Error(`name is required for clear`);return{content:[{type:`text`,text:JSON.stringify({cleared:F(t)},null,2)}]};case`delete`:if(!t)throw Error(`name is required for delete`);return{content:[{type:`text`,text:JSON.stringify({deleted:L(t)},null,2)}]}}}catch(e){return X.error(`Queue action failed`,o(e)),{content:[{type:`text`,text:`Queue action failed. Check server logs for details.`}],isError:!0}}})}const qe=Y.object({query:Y.string(),limit:Y.number().min(1).max(20).default(5).optional(),search_mode:Y.enum([`hybrid`,`semantic`,`keyword`]).default(`hybrid`).optional(),content_type:Y.enum(r).optional(),origin:Y.enum([`indexed`,`curated`,`produced`]).optional(),category:Y.string().optional(),tags:Y.array(Y.string()).optional(),min_score:Y.number().min(0).max(1).default(.25).optional()}),Je=Y.object({query:Y.string().optional(),glob:Y.string().optional(),pattern:Y.string().optional(),limit:Y.number().min(1).max(50).default(10).optional(),content_type:Y.enum(r).optional(),cwd:Y.string().optional()}),Ye=Y.object({files:Y.array(Y.string()).optional(),cwd:Y.string().optional(),skip_types:Y.boolean().optional(),skip_lint:Y.boolean().optional()});async function Xe(e,t,n){switch(e.type){case`search`:return Z(t,n,qe.parse(e.args));case`find`:{let r=Je.parse(e.args);if(!r.query&&!r.glob&&!r.pattern)throw Error(`find operation requires query, glob, or pattern`);return _(t,n,{query:r.query,glob:r.glob,pattern:r.pattern,limit:r.limit,contentType:r.content_type,cwd:r.cwd})}case`check`:{let t=Ye.parse(e.args);return l({files:t.files,cwd:t.cwd,skipTypes:t.skip_types,skipLint:t.skip_lint})}default:throw Error(`Unsupported batch operation type: ${e.type}`)}}async function Z(e,t,n){let r=n.limit??5,i={limit:r,minScore:n.min_score??.25,contentType:n.content_type,origin:n.origin,category:n.category,tags:n.tags},a=e.embedQuery?.bind(e)??e.embed.bind(e);if(n.search_mode===`keyword`)return(await t.ftsSearch(n.query,i)).slice(0,r);let o=await a(n.query);if(n.search_mode===`semantic`)return t.search(o,i);let[s,c]=await Promise.all([t.search(o,{...i,limit:r*2}),t.ftsSearch(n.query,{...i,limit:r*2})]);return Ze(s,c).slice(0,r)}function Ze(e,t,n=60){let r=new Map;for(let t=0;t<e.length;t++){let i=e[t];r.set(i.record.id,{record:i.record,score:1/(n+t+1)})}for(let e=0;e<t.length;e++){let i=t[e],a=r.get(i.record.id);if(a){a.score+=1/(n+e+1);continue}r.set(i.record.id,{record:i.record,score:1/(n+e+1)})}return[...r.values()].sort((e,t)=>t.score-e.score)}function Qe(e){let t=[`Symbol: ${e.name}`];if(e.definedIn){let n=`Defined in: ${e.definedIn.path}:${e.definedIn.line} (${e.definedIn.kind})`;e.definedIn.signature&&(n+=`\nSignature: ${e.definedIn.signature}`),t.push(n)}else t.push(`Defined in: not found`);if(t.push(``,`Imported by:`),e.importedBy.length===0)t.push(` none`);else for(let n of e.importedBy)t.push(` - ${n.path}:${n.line} ${n.importStatement}`);if(t.push(``,`Referenced in:`),e.referencedIn.length===0)t.push(` none`);else for(let n of e.referencedIn){let e=`scope`in n&&n.scope?` in ${n.scope}()`:``;t.push(` - ${n.path}:${n.line}${e} ${n.context}`)}if(e.graphContext){let n=e.graphContext;t.push(``,`Graph context:`),n.definingModule&&t.push(` Module: ${n.definingModule}`),n.importedByModules.length>0&&t.push(` Imported by modules: ${n.importedByModules.join(`, `)}`),n.siblingSymbols.length>0&&t.push(` Sibling symbols: ${n.siblingSymbols.join(`, `)}`)}return t.join(`
13
13
  `)}function $e(e){let t=[`Vitest run: ${e.passed?`passed`:`failed`}`,`Duration: ${e.durationMs}ms`,`Passed: ${e.summary.passed}`,`Failed: ${e.summary.failed}`,`Skipped: ${e.summary.skipped}`];e.summary.suites!==void 0&&t.push(`Suites: ${e.summary.suites}`);let n=e.summary.tests.filter(e=>e.status===`fail`);if(n.length>0){t.push(``,`Failed tests:`);for(let e of n)t.push(`- ${e.name}${e.file?` (${e.file})`:``}`),e.error&&t.push(` ${e.error}`)}return t.join(`
14
14
  `)}function et(e){let t=[`Branch: ${e.branch}`,`Staged: ${e.status.staged.length}`,...e.status.staged.map(e=>` - ${e}`),`Modified: ${e.status.modified.length}`,...e.status.modified.map(e=>` - ${e}`),`Untracked: ${e.status.untracked.length}`,...e.status.untracked.map(e=>` - ${e}`),``,`Recent commits:`];if(e.recentCommits.length===0)t.push(` none`);else for(let n of e.recentCommits)t.push(` - ${n.hash} ${n.message}`),t.push(` ${n.author} @ ${n.date}`);return e.diff&&t.push(``,`Diff stat:`,e.diff),t.join(`
15
15
  `)}function tt(e){if(e.length===0)return`No diff files found.`;let t=[];for(let n of e){let e=n.oldPath?` (from ${n.oldPath})`:``;t.push(`${n.path}${e} [${n.status}] +${n.additions} -${n.deletions} (${n.hunks.length} hunks)`);for(let e of n.hunks){let n=e.header?` ${e.header}`:``;t.push(` @@ -${e.oldStart},${e.oldLines} +${e.newStart},${e.newLines} @@${n}`)}}return t.join(`
@@ -1 +1 @@
1
- import{EMBEDDING_DEFAULTS as e,SEARCH_DEFAULTS as t,STORE_DEFAULTS as n,createLogger as r,serializeError as i,sourceTypeContentTypes as a}from"../../core/dist/index.js";import{Index as o,connect as s}from"@lancedb/lancedb";function c(e){if(!e)return[];try{let t=JSON.parse(e);return Array.isArray(t)?t:[]}catch{return[]}}const l=/^[\w.\-/ ]+$/,u=r(`store`);function d(e,t){if(!l.test(e))throw Error(`Invalid ${t} filter value: contains disallowed characters`);return e.replace(/'/g,`''`)}var f=class{db=null;table=null;dbPath;tableName;_writeQueue=Promise.resolve();enqueueWrite(e){let t=this._writeQueue.then(()=>e());return this._writeQueue=t.then(()=>void 0,()=>void 0),t}constructor(e){this.dbPath=e?.path??n.path,this.tableName=e?.tableName??n.tableName}async initialize(){this.db=await s(this.dbPath),(await this.db.tableNames()).includes(this.tableName)&&(this.table=await this.db.openTable(this.tableName),await this.createFtsIndex())}async upsert(e,t){if(e.length!==0){if(e.length!==t.length)throw Error(`Record count (${e.length}) does not match vector count (${t.length})`);return this.enqueueWrite(()=>this._upsertImpl(e,t))}}async _upsertImpl(e,t){let n=e.map((e,n)=>({id:e.id,vector:Array.from(t[n]),content:e.content,sourcePath:e.sourcePath,contentType:e.contentType,headingPath:e.headingPath??``,chunkIndex:e.chunkIndex,totalChunks:e.totalChunks,startLine:e.startLine,endLine:e.endLine,fileHash:e.fileHash,indexedAt:e.indexedAt,origin:e.origin,tags:JSON.stringify(e.tags),category:e.category??``,version:e.version}));if(this.table){let t=[...new Set(e.map(e=>e.sourcePath))];for(let e of t)try{await this.table.delete(`sourcePath = '${d(e,`sourcePath`)}'`)}catch{}await this.table.add(n)}else try{this.table=await this.db?.createTable(this.tableName,n)??null}catch(e){if(String(e).includes(`already exists`)&&this.db)this.table=await this.db.openTable(this.tableName),await this.table.add(n);else throw e}}async search(e,n){if(!this.table)return[];let r=n?.limit??t.maxResults,i=n?.minScore??t.minScore,a=this.table.search(e).limit(r*2),o=this.buildFilterString(n);return o&&(a=a.where(o)),(await a.toArray()).map(e=>({record:this.fromLanceRecord(e),score:1-(e._distance??1)})).filter(e=>e.score>=i).slice(0,r)}async createFtsIndex(){return this.enqueueWrite(()=>this._createFtsIndexImpl())}async _createFtsIndexImpl(){if(this.table)try{await this.table.createIndex(`content`,{config:o.fts()}),u.info(`FTS index created`,{column:`content`})}catch(e){String(e).includes(`already exists`)||u.warn(`FTS index creation failed`,i(e))}}async ftsSearch(e,n){if(!this.table)return[];let r=n?.limit??t.maxResults;try{let t=this.table.search(e).limit(r*2),i=this.buildFilterString(n);return i&&(t=t.where(i)),(await t.toArray()).map(e=>({record:this.fromLanceRecord(e),score:e._score??e._relevance_score??0}))}catch(e){return u.warn(`FTS search failed`,i(e)),[]}}async getById(e){if(!this.table)return null;let t=await this.table.query().where(`id = '${d(e,`id`)}'`).limit(1).toArray();return t.length===0?null:this.fromLanceRecord(t[0])}async deleteBySourcePath(e){return this.enqueueWrite(()=>this._deleteBySourcePathImpl(e))}async _deleteBySourcePathImpl(e){if(!this.table)return 0;let t=await this.getBySourcePath(e);return t.length===0?0:(await this.table.delete(`sourcePath = '${d(e,`sourcePath`)}'`),t.length)}async deleteById(e){return this.enqueueWrite(()=>this._deleteByIdImpl(e))}async _deleteByIdImpl(e){return!this.table||!await this.getById(e)?!1:(await this.table.delete(`id = '${d(e,`id`)}'`),!0)}async getBySourcePath(e){return this.table?(await this.table.query().where(`sourcePath = '${d(e,`sourcePath`)}'`).limit(1e3).toArray()).map(e=>this.fromLanceRecord(e)):[]}async getStats(){if(!this.table)return{totalRecords:0,totalFiles:0,contentTypeBreakdown:{},lastIndexedAt:null,storeBackend:`lancedb`,embeddingModel:e.model};let t=await this.table.countRows(),n=await this.table.query().select([`sourcePath`,`contentType`,`indexedAt`]).limit(1e5).toArray(),r={},i=new Set,a=null;for(let e of n){let t=e;r[t.contentType]=(r[t.contentType]??0)+1,i.add(t.sourcePath),(!a||t.indexedAt>a)&&(a=t.indexedAt)}return{totalRecords:t,totalFiles:i.size,contentTypeBreakdown:r,lastIndexedAt:a,storeBackend:`lancedb`,embeddingModel:e.model}}async listSourcePaths(){if(!this.table)return[];let e=await this.table.query().select([`sourcePath`]).limit(1e5).toArray();return[...new Set(e.map(e=>e.sourcePath))]}async dropTable(){return this.enqueueWrite(()=>this._dropTableImpl())}async _dropTableImpl(){if(this.db&&(await this.db.tableNames()).includes(this.tableName))for(let e=1;e<=3;e++)try{await this.db.dropTable(this.tableName);break}catch(t){if(e===3)throw t;let n=e*500;u.warn(`dropTable attempt failed, retrying`,{attempt:e,delayMs:n}),await new Promise(e=>setTimeout(e,n))}this.table=null}async close(){try{this.db&&typeof this.db.close==`function`&&await this.db.close()}catch{}this.table=null,this.db=null}buildFilterString(e){let t=[];if(e?.contentType&&t.push(`contentType = '${d(e.contentType,`contentType`)}'`),e?.sourceType){let n=a(e.sourceType);if(n.length>0){let e=n.map(e=>`'${d(e,`sourceType`)}'`).join(`, `);t.push(`contentType IN (${e})`)}}if(e?.origin&&t.push(`origin = '${d(e.origin,`origin`)}'`),e?.category&&t.push(`category = '${d(e.category,`category`)}'`),e?.tags&&e.tags.length>0){let n=e.tags.map(e=>`tags LIKE '%${d(e,`tag`)}%'`);t.push(`(${n.join(` OR `)})`)}return t.length>0?t.join(` AND `):null}fromLanceRecord(e){return{id:e.id,content:e.content,sourcePath:e.sourcePath,contentType:e.contentType,headingPath:e.headingPath||void 0,chunkIndex:e.chunkIndex,totalChunks:e.totalChunks,startLine:e.startLine,endLine:e.endLine,fileHash:e.fileHash,indexedAt:e.indexedAt,origin:e.origin,tags:c(e.tags),category:e.category||void 0,version:e.version}}};export{f as LanceStore};
1
+ import{EMBEDDING_DEFAULTS as e,SEARCH_DEFAULTS as t,STORE_DEFAULTS as n,createLogger as r,serializeError as i,sourceTypeContentTypes as a}from"../../core/dist/index.js";import{Index as o,connect as s}from"@lancedb/lancedb";function c(e){if(!e)return[];try{let t=JSON.parse(e);return Array.isArray(t)?t:[]}catch{return[]}}const l=/^[\w.\-/ ]+$/,u=r(`store`);function d(e,t){if(!l.test(e))throw Error(`Invalid ${t} filter value: contains disallowed characters`);return e.replace(/'/g,`''`)}var f=class{db=null;table=null;dbPath;tableName;_writeQueue=Promise.resolve();enqueueWrite(e){let t=this._writeQueue.then(()=>e());return this._writeQueue=t.then(()=>void 0,()=>void 0),t}constructor(e){this.dbPath=e?.path??n.path,this.tableName=e?.tableName??n.tableName}async initialize(){this.db=await s(this.dbPath),(await this.db.tableNames()).includes(this.tableName)&&(this.table=await this.db.openTable(this.tableName),await this.createFtsIndex())}async upsert(e,t){if(e.length!==0){if(e.length!==t.length)throw Error(`Record count (${e.length}) does not match vector count (${t.length})`);return this.enqueueWrite(()=>this._upsertImpl(e,t))}}async _upsertImpl(e,t){let n=e.map((e,n)=>({id:e.id,vector:Array.from(t[n]),content:e.content,sourcePath:e.sourcePath,contentType:e.contentType,headingPath:e.headingPath??``,chunkIndex:e.chunkIndex,totalChunks:e.totalChunks,startLine:e.startLine,endLine:e.endLine,fileHash:e.fileHash,indexedAt:e.indexedAt,origin:e.origin,tags:JSON.stringify(e.tags),category:e.category??``,version:e.version}));if(this.table){let t=[...new Set(e.map(e=>e.sourcePath))];for(let e of t)try{await this.table.delete(`sourcePath = '${d(e,`sourcePath`)}'`)}catch{}await this.table.add(n)}else try{this.table=await this.db?.createTable(this.tableName,n)??null}catch(e){if(String(e).includes(`already exists`)&&this.db)this.table=await this.db.openTable(this.tableName),await this.table.add(n);else throw e}}async search(e,n){if(!this.table)return[];let r=n?.limit??t.maxResults,i=n?.minScore??t.minScore,a=this.table.search(e).limit(r*2),o=this.buildFilterString(n);return o&&(a=a.where(o)),(await a.toArray()).map(e=>({record:this.fromLanceRecord(e),score:1-(e._distance??1)})).filter(e=>e.score>=i).slice(0,r)}async createFtsIndex(){return this.enqueueWrite(()=>this._createFtsIndexImpl())}async _createFtsIndexImpl(){if(this.table)try{await this.table.createIndex(`content`,{config:o.fts(),replace:!0}),u.info(`FTS index created/updated`,{column:`content`})}catch(e){u.warn(`FTS index creation failed`,i(e))}}async ftsSearch(e,n){if(!this.table)return[];let r=n?.limit??t.maxResults;try{let t=this.table.search(e).limit(r*2),i=this.buildFilterString(n);return i&&(t=t.where(i)),(await t.toArray()).map(e=>({record:this.fromLanceRecord(e),score:e._score??e._relevance_score??0}))}catch(e){return u.warn(`FTS search failed`,i(e)),[]}}async getById(e){if(!this.table)return null;let t=await this.table.query().where(`id = '${d(e,`id`)}'`).limit(1).toArray();return t.length===0?null:this.fromLanceRecord(t[0])}async deleteBySourcePath(e){return this.enqueueWrite(()=>this._deleteBySourcePathImpl(e))}async _deleteBySourcePathImpl(e){if(!this.table)return 0;let t=await this.getBySourcePath(e);return t.length===0?0:(await this.table.delete(`sourcePath = '${d(e,`sourcePath`)}'`),t.length)}async deleteById(e){return this.enqueueWrite(()=>this._deleteByIdImpl(e))}async _deleteByIdImpl(e){return!this.table||!await this.getById(e)?!1:(await this.table.delete(`id = '${d(e,`id`)}'`),!0)}async getBySourcePath(e){return this.table?(await this.table.query().where(`sourcePath = '${d(e,`sourcePath`)}'`).limit(1e3).toArray()).map(e=>this.fromLanceRecord(e)):[]}async getStats(){if(!this.table)return{totalRecords:0,totalFiles:0,contentTypeBreakdown:{},lastIndexedAt:null,storeBackend:`lancedb`,embeddingModel:e.model};let t=await this.table.countRows(),n=await this.table.query().select([`sourcePath`,`contentType`,`indexedAt`]).limit(1e5).toArray(),r={},i=new Set,a=null;for(let e of n){let t=e;r[t.contentType]=(r[t.contentType]??0)+1,i.add(t.sourcePath),(!a||t.indexedAt>a)&&(a=t.indexedAt)}return{totalRecords:t,totalFiles:i.size,contentTypeBreakdown:r,lastIndexedAt:a,storeBackend:`lancedb`,embeddingModel:e.model}}async listSourcePaths(){if(!this.table)return[];let e=await this.table.query().select([`sourcePath`]).limit(1e5).toArray();return[...new Set(e.map(e=>e.sourcePath))]}async dropTable(){return this.enqueueWrite(()=>this._dropTableImpl())}async _dropTableImpl(){if(this.db&&(await this.db.tableNames()).includes(this.tableName))for(let e=1;e<=3;e++)try{await this.db.dropTable(this.tableName);break}catch(t){if(e===3)throw t;let n=e*500;u.warn(`dropTable attempt failed, retrying`,{attempt:e,delayMs:n}),await new Promise(e=>setTimeout(e,n))}this.table=null}async close(){try{this.db&&typeof this.db.close==`function`&&await this.db.close()}catch{}this.table=null,this.db=null}buildFilterString(e){let t=[];if(e?.contentType&&t.push(`contentType = '${d(e.contentType,`contentType`)}'`),e?.sourceType){let n=a(e.sourceType);if(n.length>0){let e=n.map(e=>`'${d(e,`sourceType`)}'`).join(`, `);t.push(`contentType IN (${e})`)}}if(e?.origin&&t.push(`origin = '${d(e.origin,`origin`)}'`),e?.category&&t.push(`category = '${d(e.category,`category`)}'`),e?.tags&&e.tags.length>0){let n=e.tags.map(e=>`tags LIKE '%${d(e,`tag`)}%'`);t.push(`(${n.join(` OR `)})`)}return t.length>0?t.join(` AND `):null}fromLanceRecord(e){return{id:e.id,content:e.content,sourcePath:e.sourcePath,contentType:e.contentType,headingPath:e.headingPath||void 0,chunkIndex:e.chunkIndex,totalChunks:e.totalChunks,startLine:e.startLine,endLine:e.endLine,fileHash:e.fileHash,indexedAt:e.indexedAt,origin:e.origin,tags:c(e.tags),category:e.category||void 0,version:e.version}}};export{f as LanceStore};
@@ -42,7 +42,7 @@ import { SchemaValidateOptions, SchemaValidateResult, ValidationError, schemaVal
42
42
  import { Snippet, SnippetAction, SnippetOptions, SnippetResult, snippet } from "./snippet.js";
43
43
  import { StashEntry, stashClear, stashDelete, stashGet, stashList, stashSet } from "./stash.js";
44
44
  import { StratumCard, StratumCardOptions, StratumCardResult, stratumCard } from "./stratum-card.js";
45
- import { SymbolInfo, SymbolOptions, symbol } from "./symbol.js";
45
+ import { SymbolGraphContext, SymbolInfo, SymbolOptions, symbol } from "./symbol.js";
46
46
  import { TestRunOptions, TestRunResult, classifyExitCode, testRun } from "./test-run.js";
47
47
  import { cosineSimilarity, estimateTokens, segment } from "./text-utils.js";
48
48
  import { TimeOptions, TimeResult, timeUtils } from "./time-utils.js";
@@ -52,4 +52,4 @@ import { WatchEvent, WatchHandle, WatchOptions, watchList, watchStart, watchStop
52
52
  import { WebFetchMode, WebFetchOptions, WebFetchResult, webFetch } from "./web-fetch.js";
53
53
  import { WebSearchOptions, WebSearchResult, WebSearchResultItem, parseSearchResults, webSearch } from "./web-search.js";
54
54
  import { Workset, addToWorkset, deleteWorkset, getWorkset, listWorksets, removeFromWorkset, saveWorkset } from "./workset.js";
55
- export { type AuditCheck, type AuditData, type AuditOptions, type AuditRecommendation, type BatchOperation, type BatchOptions, type BatchResult, type ChangelogEntry, type ChangelogFormat, type ChangelogOptions, type ChangelogResult, type CheckOptions, type CheckResult, type CheckSummaryResult, type Checkpoint, type ClassifyTrigger, type CodemodChange, type CodemodOptions, type CodemodResult, type CodemodRule, type CompactOptions, type CompactResult, type ConstraintRef, type DeadSymbol, type DeadSymbolOptions, type DeadSymbolResult, type DelegateOptions, type DelegateResult, type DiffChange, type DiffFile, type DiffHunk, type DiffParseOptions, type DigestFieldEntry, type DigestOptions, type DigestResult, type DigestSource, type DogfoodLogEntry, type DogfoodLogGroupedEntry, type DogfoodLogOptions, type DogfoodLogResult, type EncodeOperation, type EncodeOptions, type EncodeResult, type EnvInfoOptions, type EnvInfoResult, type EvalOptions, type EvalResult, type EvidenceEntry, type EvidenceMapAction, type EvidenceMapResult, type EvidenceMapState, type EvidenceStatus, type Example, FileCache, type FileCacheEntry, type FileCacheStats, type FileMetrics, type FileSummaryOptions, type FileSummaryResult, type FindExamplesOptions, type FindExamplesResult, type FindOptions, type FindResult, type FindResults, type ForgeClassifyCeremony, type ForgeClassifyOptions, type ForgeClassifyResult, type ForgeGroundOptions, type ForgeGroundResult, type ForgeTier, type GateDecision, type GateResult, type GitContextOptions, type GitContextResult, type GraphAugmentOptions, type GraphAugmentedResult, type GraphQueryOptions, type GraphQueryResult, type GuideRecommendation, type GuideResult, type HealthCheck, type HealthResult, type HttpMethod, type HttpRequestOptions, type HttpRequestResult, type KBError, type KBErrorCode, type KBNextHint, type KBResponse, type KBResponseMeta, type LaneDiffEntry, type LaneDiffResult, type LaneMergeResult, type LaneMeta, type ManagedProcess, type MeasureOptions, type MeasureResult, type OnboardMode, type OnboardOptions, type OnboardResult, type OnboardStepResult, type ParsedError, type ParsedGitStatus, type ParsedOutput, type ParsedTestResult, type ParsedTestSummary, type QueueItem, type QueueState, type RegexTestOptions, type RegexTestResult, type RenameChange, type RenameOptions, type RenameResult, type ReplayEntry, type ReplayOptions, type SchemaValidateOptions, type SchemaValidateResult, type ScopeMapEntry, type ScopeMapOptions, type ScopeMapResult, type Snippet, type SnippetAction, type SnippetOptions, type SnippetResult, type StashEntry, type StratumCard, type StratumCardOptions, type StratumCardResult, type SymbolInfo, type SymbolOptions, type TestRunOptions, type TestRunResult, type TimeOptions, type TimeResult, type TraceNode, type TraceOptions, type TraceResult, type TransformOptions, type TransformResult, type TypedUnknownSeed, type UnknownType, type ValidationError, type WatchEvent, type WatchHandle, type WatchOptions, type WebFetchMode, type WebFetchOptions, type WebFetchResult, type WebSearchOptions, type WebSearchResult, type WebSearchResultItem, type Workset, addToWorkset, analyzeFile, audit, batch, changelog, check, checkpointLatest, checkpointList, checkpointLoad, checkpointSave, classifyExitCode, codemod, compact, cosineSimilarity, dataTransform, delegate, delegateListModels, deleteWorkset, diffParse, digest, dogfoodLog, encode, envInfo, errorResponse, estimateTokens, evaluate, evidenceMap, fileSummary, find, findDeadSymbols, findExamples, forgeClassify, forgeGround, formatChangelog, getWorkset, gitContext, graphAugmentSearch, graphQuery, guide, headTailTruncate, health, httpRequest, laneCreate, laneDiff, laneDiscard, laneList, laneMerge, laneStatus, listWorksets, measure, okResponse, onboard, paragraphTruncate, parseBiome, parseGitStatus, parseOutput, parseSearchResults, parseTsc, parseVitest, processList, processLogs, processStart, processStatus, processStop, queueClear, queueCreate, queueDelete, queueDone, queueFail, queueGet, queueList, queueNext, queuePush, regexTest, removeFromWorkset, rename, replayAppend, replayCapture, replayClear, replayList, replayTrim, resolvePath, saveWorkset, schemaValidate, scopeMap, segment, snippet, stashClear, stashDelete, stashGet, stashList, stashSet, stratumCard, summarizeCheckResult, symbol, testRun, timeUtils, trace, truncateToTokenBudget, watchList, watchStart, watchStop, webFetch, webSearch };
55
+ export { type AuditCheck, type AuditData, type AuditOptions, type AuditRecommendation, type BatchOperation, type BatchOptions, type BatchResult, type ChangelogEntry, type ChangelogFormat, type ChangelogOptions, type ChangelogResult, type CheckOptions, type CheckResult, type CheckSummaryResult, type Checkpoint, type ClassifyTrigger, type CodemodChange, type CodemodOptions, type CodemodResult, type CodemodRule, type CompactOptions, type CompactResult, type ConstraintRef, type DeadSymbol, type DeadSymbolOptions, type DeadSymbolResult, type DelegateOptions, type DelegateResult, type DiffChange, type DiffFile, type DiffHunk, type DiffParseOptions, type DigestFieldEntry, type DigestOptions, type DigestResult, type DigestSource, type DogfoodLogEntry, type DogfoodLogGroupedEntry, type DogfoodLogOptions, type DogfoodLogResult, type EncodeOperation, type EncodeOptions, type EncodeResult, type EnvInfoOptions, type EnvInfoResult, type EvalOptions, type EvalResult, type EvidenceEntry, type EvidenceMapAction, type EvidenceMapResult, type EvidenceMapState, type EvidenceStatus, type Example, FileCache, type FileCacheEntry, type FileCacheStats, type FileMetrics, type FileSummaryOptions, type FileSummaryResult, type FindExamplesOptions, type FindExamplesResult, type FindOptions, type FindResult, type FindResults, type ForgeClassifyCeremony, type ForgeClassifyOptions, type ForgeClassifyResult, type ForgeGroundOptions, type ForgeGroundResult, type ForgeTier, type GateDecision, type GateResult, type GitContextOptions, type GitContextResult, type GraphAugmentOptions, type GraphAugmentedResult, type GraphQueryOptions, type GraphQueryResult, type GuideRecommendation, type GuideResult, type HealthCheck, type HealthResult, type HttpMethod, type HttpRequestOptions, type HttpRequestResult, type KBError, type KBErrorCode, type KBNextHint, type KBResponse, type KBResponseMeta, type LaneDiffEntry, type LaneDiffResult, type LaneMergeResult, type LaneMeta, type ManagedProcess, type MeasureOptions, type MeasureResult, type OnboardMode, type OnboardOptions, type OnboardResult, type OnboardStepResult, type ParsedError, type ParsedGitStatus, type ParsedOutput, type ParsedTestResult, type ParsedTestSummary, type QueueItem, type QueueState, type RegexTestOptions, type RegexTestResult, type RenameChange, type RenameOptions, type RenameResult, type ReplayEntry, type ReplayOptions, type SchemaValidateOptions, type SchemaValidateResult, type ScopeMapEntry, type ScopeMapOptions, type ScopeMapResult, type Snippet, type SnippetAction, type SnippetOptions, type SnippetResult, type StashEntry, type StratumCard, type StratumCardOptions, type StratumCardResult, type SymbolGraphContext, type SymbolInfo, type SymbolOptions, type TestRunOptions, type TestRunResult, type TimeOptions, type TimeResult, type TraceNode, type TraceOptions, type TraceResult, type TransformOptions, type TransformResult, type TypedUnknownSeed, type UnknownType, type ValidationError, type WatchEvent, type WatchHandle, type WatchOptions, type WebFetchMode, type WebFetchOptions, type WebFetchResult, type WebSearchOptions, type WebSearchResult, type WebSearchResultItem, type Workset, addToWorkset, analyzeFile, audit, batch, changelog, check, checkpointLatest, checkpointList, checkpointLoad, checkpointSave, classifyExitCode, codemod, compact, cosineSimilarity, dataTransform, delegate, delegateListModels, deleteWorkset, diffParse, digest, dogfoodLog, encode, envInfo, errorResponse, estimateTokens, evaluate, evidenceMap, fileSummary, find, findDeadSymbols, findExamples, forgeClassify, forgeGround, formatChangelog, getWorkset, gitContext, graphAugmentSearch, graphQuery, guide, headTailTruncate, health, httpRequest, laneCreate, laneDiff, laneDiscard, laneList, laneMerge, laneStatus, listWorksets, measure, okResponse, onboard, paragraphTruncate, parseBiome, parseGitStatus, parseOutput, parseSearchResults, parseTsc, parseVitest, processList, processLogs, processStart, processStatus, processStop, queueClear, queueCreate, queueDelete, queueDone, queueFail, queueGet, queueList, queueNext, queuePush, regexTest, removeFromWorkset, rename, replayAppend, replayCapture, replayClear, replayList, replayTrim, resolvePath, saveWorkset, schemaValidate, scopeMap, segment, snippet, stashClear, stashDelete, stashGet, stashList, stashSet, stratumCard, summarizeCheckResult, symbol, testRun, timeUtils, trace, truncateToTokenBudget, watchList, watchStart, watchStop, webFetch, webSearch };
@@ -1,7 +1,15 @@
1
1
  import { IEmbedder } from "@kb/embeddings";
2
- import { IKnowledgeStore } from "@kb/store";
2
+ import { IGraphStore, IKnowledgeStore } from "@kb/store";
3
3
 
4
4
  //#region packages/tools/src/symbol.d.ts
5
+ interface SymbolGraphContext {
6
+ /** Module that defines this symbol */
7
+ definingModule?: string;
8
+ /** Modules that import the file containing this symbol */
9
+ importedByModules: string[];
10
+ /** Symbols defined in the same module */
11
+ siblingSymbols: string[];
12
+ }
5
13
  interface SymbolInfo {
6
14
  name: string;
7
15
  definedIn?: {
@@ -21,13 +29,17 @@ interface SymbolInfo {
21
29
  context: string;
22
30
  scope?: string;
23
31
  }>;
32
+ /** Graph-derived context (when graph store available) */
33
+ graphContext?: SymbolGraphContext;
24
34
  }
25
35
  interface SymbolOptions {
26
36
  /** Symbol name to look up */
27
37
  name: string;
28
38
  /** Limit results */
29
39
  limit?: number;
40
+ /** Optional graph store for enriching results */
41
+ graphStore?: IGraphStore;
30
42
  }
31
43
  declare function symbol(embedder: IEmbedder, store: IKnowledgeStore, options: SymbolOptions): Promise<SymbolInfo>;
32
44
  //#endregion
33
- export { SymbolInfo, SymbolOptions, symbol };
45
+ export { SymbolGraphContext, SymbolInfo, SymbolOptions, symbol };
@@ -1,3 +1,3 @@
1
- import{extname as e}from"node:path";import{SUPPORTED_EXTENSIONS as t,WasmRuntime as n,extractSymbols as r,resolveScopes as i}from"../../chunker/dist/index.js";function a(e){return e.replace(/[.*+?^${}()|[\]\\]/g,`\\$&`)}async function o(o,s,c){let{name:l,limit:u=20}=c,d=o.embedQuery?.bind(o)??o.embed.bind(o),f=[`export function ${l}`,`export class ${l}`,`export const ${l}`,`export interface ${l}`,`export type ${l}`,`export enum ${l}`].join(` | `),p=await s.search(await d(f),{limit:u*2}),m=RegExp(`^export\\s+(?:default\\s+)?(?:async\\s+)?(?:function|class|const|let|interface|type|enum)\\s+${a(l)}\\b`,`m`),h;for(let i of p){if(!m.test(i.record.content))continue;let a=i.record.content.match(/export\s+(?:default\s+)?(?:async\s+)?(\w+)/)?.[1]??`unknown`;h={path:i.record.sourcePath,line:i.record.startLine,kind:a};let o=e(i.record.sourcePath);if(n.get()&&t.has(o))try{let e=(await r(i.record.content,o,i.record.sourcePath)).find(e=>e.name===l&&e.exported);e&&(h.kind=e.kind,e.signature&&(h.signature=e.signature))}catch{}break}let g=RegExp(`import\\s+.*\\b${a(l)}\\b.*from\\s+`,`m`),_=await s.search(await d(`import ${l} from`),{limit:u*3}),v=[],y=new Set;for(let e of _){let t=e.record.content.split(`
2
- `);for(let n=0;n<t.length;n++){let r=t[n];if(!g.test(r))continue;let i=`${e.record.sourcePath}:${r.trim()}`;y.has(i)||(y.add(i),v.push({path:e.record.sourcePath,line:e.record.startLine+n,importStatement:r.trim()}))}}let b=RegExp(`\\b${a(l)}\\b`),x=await s.search(await d(l),{limit:u*3}),S=[],C=new Set;for(let r of x){if(h&&r.record.sourcePath===h.path)continue;let a=r.record.content.split(`
3
- `);for(let o=0;o<a.length;o++){let s=a[o];if(!b.test(s)||g.test(s))continue;let c=`${r.record.sourcePath}:${r.record.startLine+o}`;if(C.has(c))continue;C.add(c);let l,u=e(r.record.sourcePath);if(n.get()&&t.has(u))try{let e=await i(r.record.content,u,o+1);e.length>0&&(l=e[0].name)}catch{}S.push({path:r.record.sourcePath,line:r.record.startLine+o,context:s.trim().slice(0,120),scope:l});break}}return{name:l,definedIn:h,importedBy:v.slice(0,u),referencedIn:S.slice(0,u)}}export{o as symbol};
1
+ import{extname as e}from"node:path";import{SUPPORTED_EXTENSIONS as t,WasmRuntime as n,extractSymbols as r,resolveScopes as i}from"../../chunker/dist/index.js";function a(e){return e.replace(/[.*+?^${}()|[\]\\]/g,`\\$&`)}async function o(o,s,c){let{name:l,limit:u=20,graphStore:d}=c,f=o.embedQuery?.bind(o)??o.embed.bind(o),p=[`export function ${l}`,`export class ${l}`,`export const ${l}`,`export interface ${l}`,`export type ${l}`,`export enum ${l}`].join(` | `),m=await s.search(await f(p),{limit:u*2}),h=RegExp(`^export\\s+(?:default\\s+)?(?:async\\s+)?(?:function|class|const|let|interface|type|enum)\\s+${a(l)}\\b`,`m`),g;for(let i of m){if(!h.test(i.record.content))continue;let a=i.record.content.match(/export\s+(?:default\s+)?(?:async\s+)?(\w+)/)?.[1]??`unknown`;g={path:i.record.sourcePath,line:i.record.startLine,kind:a};let o=e(i.record.sourcePath);if(n.get()&&t.has(o))try{let e=(await r(i.record.content,o,i.record.sourcePath)).find(e=>e.name===l&&e.exported);e&&(g.kind=e.kind,e.signature&&(g.signature=e.signature))}catch{}break}let _=RegExp(`import\\s+.*\\b${a(l)}\\b.*from\\s+`,`m`),v=await s.search(await f(`import ${l} from`),{limit:u*3}),y=[],b=new Set;for(let e of v){let t=e.record.content.split(`
2
+ `);for(let n=0;n<t.length;n++){let r=t[n];if(!_.test(r))continue;let i=`${e.record.sourcePath}:${r.trim()}`;b.has(i)||(b.add(i),y.push({path:e.record.sourcePath,line:e.record.startLine+n,importStatement:r.trim()}))}}let x=RegExp(`\\b${a(l)}\\b`),S=await s.search(await f(l),{limit:u*3}),C=[],w=new Set;for(let r of S){if(g&&r.record.sourcePath===g.path)continue;let a=r.record.content.split(`
3
+ `);for(let o=0;o<a.length;o++){let s=a[o];if(!x.test(s)||_.test(s))continue;let c=`${r.record.sourcePath}:${r.record.startLine+o}`;if(w.has(c))continue;w.add(c);let l,u=e(r.record.sourcePath);if(n.get()&&t.has(u))try{let e=await i(r.record.content,u,o+1);e.length>0&&(l=e[0].name)}catch{}C.push({path:r.record.sourcePath,line:r.record.startLine+o,context:s.trim().slice(0,120),scope:l});break}}let T;if(d)try{let e={importedByModules:[],siblingSymbols:[]},t=await d.findNodes({namePattern:l});if(t.length>0){let n=t[0];n.sourcePath?e.definingModule=n.sourcePath:g&&(e.definingModule=g.path);let r=await d.getNeighbors(n.id,{direction:`incoming`,edgeType:`imports`});for(let t of r.nodes)e.importedByModules.push(t.sourcePath??t.name);if(e.definingModule){let t=await d.findNodes({sourcePath:e.definingModule});for(let n of t)n.name!==l&&n.type!==`module`&&e.siblingSymbols.push(`${n.type}:${n.name}`);e.siblingSymbols=e.siblingSymbols.slice(0,15)}}(e.definingModule||e.importedByModules.length>0||e.siblingSymbols.length>0)&&(T=e)}catch{}return{name:l,definedIn:g,importedBy:y.slice(0,u),referencedIn:C.slice(0,u),graphContext:T}}export{o as symbol};
@@ -1,5 +1,5 @@
1
1
  import { createRequire as __createRequire } from 'node:module'; const require = __createRequire(import.meta.url);
2
- import { t as TUIContext } from "./index-B9VpfVPP.js";
2
+ import { t as TUIContext } from "./index-C8NmOF18.js";
3
3
  import React from "react";
4
4
 
5
5
  //#region packages/tui/src/App.d.ts
@@ -14,7 +14,7 @@ interface IEmbedder {
14
14
  */
15
15
  embedQuery(query: string): Promise<Float32Array>;
16
16
  /** Generate embeddings for multiple text strings (batched, for documents/passages) */
17
- embedBatch(texts: string[]): Promise<Float32Array[]>;
17
+ embedBatch(texts: string[], batchSize?: number): Promise<Float32Array[]>;
18
18
  /** The dimensionality of the embedding vectors */
19
19
  readonly dimensions: number;
20
20
  /** The model identifier */
@@ -1,6 +1,6 @@
1
1
  import { createRequire as __createRequire } from 'node:module'; const require = __createRequire(import.meta.url);
2
2
  import { n as KBConfig, t as IKnowledgeStore } from "./store.interface-CnY6SPOH.js";
3
- import { t as IEmbedder } from "./embedder.interface-D4ew0HPW.js";
3
+ import { t as IEmbedder } from "./embedder.interface-IFCBpOlX.js";
4
4
 
5
5
  //#region packages/tui/src/index.d.ts
6
6
  interface TUIContext {
@@ -1,3 +1,3 @@
1
1
  import { createRequire as __createRequire } from 'node:module'; const require = __createRequire(import.meta.url);
2
- import { n as launch, t as TUIContext } from "./index-B9VpfVPP.js";
2
+ import { n as launch, t as TUIContext } from "./index-C8NmOF18.js";
3
3
  export { TUIContext, launch };
@@ -1,6 +1,6 @@
1
1
  import { createRequire as __createRequire } from 'node:module'; const require = __createRequire(import.meta.url);
2
2
  import { t as IKnowledgeStore } from "../store.interface-CnY6SPOH.js";
3
- import { t as IEmbedder } from "../embedder.interface-D4ew0HPW.js";
3
+ import { t as IEmbedder } from "../embedder.interface-IFCBpOlX.js";
4
4
  import React from "react";
5
5
 
6
6
  //#region packages/tui/src/panels/SearchPanel.d.ts