@vpxa/kb 0.1.21 → 0.1.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/packages/analyzers/dist/structure-analyzer.d.ts +5 -0
- package/packages/analyzers/dist/structure-analyzer.js +4 -2
- package/packages/analyzers/dist/types.d.ts +2 -0
- package/packages/cli/dist/commands/init/config.js +1 -1
- package/packages/cli/dist/kb-init.js +1 -1
- package/packages/core/dist/constants.d.ts +2 -1
- package/packages/core/dist/constants.js +1 -1
- package/packages/core/dist/index.d.ts +2 -2
- package/packages/core/dist/index.js +1 -1
- package/packages/core/dist/logger.d.ts +12 -1
- package/packages/core/dist/logger.js +1 -1
- package/packages/server/dist/curated-manager.d.ts +4 -0
- package/packages/server/dist/curated-manager.js +2 -2
- package/packages/server/dist/index.js +1 -1
- package/packages/server/dist/mcp-logging.d.ts +11 -0
- package/packages/server/dist/mcp-logging.js +1 -0
- package/packages/server/dist/output-schemas.d.ts +120 -0
- package/packages/server/dist/output-schemas.js +1 -0
- package/packages/server/dist/prompts.d.ts +6 -0
- package/packages/server/dist/prompts.js +6 -0
- package/packages/server/dist/resource-links.d.ts +34 -0
- package/packages/server/dist/resource-links.js +1 -0
- package/packages/server/dist/resources/curated-resources.d.ts +13 -0
- package/packages/server/dist/resources/curated-resources.js +2 -0
- package/packages/server/dist/resources/resources.d.ts +2 -1
- package/packages/server/dist/resources/resources.js +2 -2
- package/packages/server/dist/server.d.ts +1 -1
- package/packages/server/dist/server.js +1 -1
- package/packages/server/dist/tool-metadata.d.ts +38 -0
- package/packages/server/dist/tool-metadata.js +1 -0
- package/packages/server/dist/tools/analyze.tools.js +8 -2
- package/packages/server/dist/tools/audit.tool.js +1 -1
- package/packages/server/dist/tools/bridge.tools.js +7 -7
- package/packages/server/dist/tools/context.tools.d.ts +15 -0
- package/packages/server/dist/tools/context.tools.js +9 -0
- package/packages/server/dist/tools/evolution.tools.js +5 -5
- package/packages/server/dist/tools/execution.tools.d.ts +13 -0
- package/packages/server/dist/tools/execution.tools.js +3 -0
- package/packages/server/dist/tools/forge.tools.js +10 -10
- package/packages/server/dist/tools/forget.tool.js +1 -1
- package/packages/server/dist/tools/graph.tool.js +5 -4
- package/packages/server/dist/tools/infra.tools.d.ts +10 -0
- package/packages/server/dist/tools/infra.tools.js +3 -0
- package/packages/server/dist/tools/list.tool.js +2 -2
- package/packages/server/dist/tools/lookup.tool.js +2 -2
- package/packages/server/dist/tools/manipulation.tools.d.ts +10 -0
- package/packages/server/dist/tools/manipulation.tools.js +4 -0
- package/packages/server/dist/tools/onboard.tool.js +2 -2
- package/packages/server/dist/tools/persistence.tools.d.ts +10 -0
- package/packages/server/dist/tools/persistence.tools.js +5 -0
- package/packages/server/dist/tools/policy.tools.js +2 -2
- package/packages/server/dist/tools/produce.tool.js +2 -2
- package/packages/server/dist/tools/read.tool.js +2 -2
- package/packages/server/dist/tools/reindex.tool.js +2 -2
- package/packages/server/dist/tools/remember.tool.js +3 -3
- package/packages/server/dist/tools/replay.tool.js +2 -2
- package/packages/server/dist/tools/restore.tool.d.ts +6 -0
- package/packages/server/dist/tools/restore.tool.js +3 -0
- package/packages/server/dist/tools/search.tool.js +4 -4
- package/packages/server/dist/tools/status.tool.js +2 -2
- package/packages/server/dist/tools/toolkit.tools.js +18 -18
- package/packages/server/dist/tools/update.tool.js +1 -1
- package/packages/server/dist/tools/utility.tools.js +10 -10
- package/packages/store/dist/graph-store.interface.d.ts +13 -1
- package/packages/store/dist/index.d.ts +2 -2
- package/packages/store/dist/sqlite-graph-store.d.ts +2 -1
- package/packages/store/dist/sqlite-graph-store.js +13 -1
- package/packages/tools/dist/codemod.js +2 -2
- package/packages/tools/dist/graph-query.d.ts +3 -2
- package/packages/tools/dist/graph-query.js +1 -1
- package/packages/tools/dist/index.d.ts +2 -1
- package/packages/tools/dist/index.js +1 -1
- package/packages/tools/dist/rename.js +2 -2
- package/packages/tools/dist/restore-points.d.ts +22 -0
- package/packages/tools/dist/restore-points.js +1 -0
- package/packages/tui/dist/{App-CYLNJLr6.js → App-DE_tdOhs.js} +1 -1
- package/packages/tui/dist/App.js +1 -1
- package/packages/tui/dist/LogPanel-Ce3jMQbH.js +3 -0
- package/packages/tui/dist/index.js +1 -1
- package/packages/tui/dist/panels/LogPanel.js +1 -1
- package/packages/tui/dist/LogPanel-DtMnoyXT.js +0 -3
package/package.json
CHANGED
|
@@ -6,8 +6,13 @@ declare class StructureAnalyzer implements IAnalyzer<StructureAnalyzerOptions> {
|
|
|
6
6
|
analyze(rootPath: string, options?: StructureAnalyzerOptions): Promise<AnalysisResult>;
|
|
7
7
|
private buildTree;
|
|
8
8
|
private computeStats;
|
|
9
|
+
private formatMarkdownWithBudget;
|
|
9
10
|
private formatMarkdown;
|
|
10
11
|
private renderTree;
|
|
12
|
+
private renderTreeLevel3;
|
|
13
|
+
private estimateTokens;
|
|
14
|
+
private isLeafDir;
|
|
15
|
+
private leafDirSummary;
|
|
11
16
|
private countFiles;
|
|
12
17
|
}
|
|
13
18
|
//#endregion
|
|
@@ -1,2 +1,4 @@
|
|
|
1
|
-
import{readdir as e,stat as t}from"node:fs/promises";import{extname as n,join as r}from"node:path";const i={src:`Application source`,lib:`Library code`,dist:`Build output`,build:`Build output`,test:`Tests`,tests:`Tests`,spec:`Tests`,__tests__:`Tests`,docs:`Documentation`,scripts:`Build/utility scripts`,config:`Configuration`,handlers:`Entry point handlers`,controllers:`HTTP controllers`,routes:`API routes`,middleware:`Middleware`,services:`Business logic services`,domain:`Domain/business logic`,infrastructure:`External integrations`,adapters:`Adapter implementations`,ports:`Port interfaces`,models:`Data models`,entities:`Domain entities`,repositories:`Data access`,utils:`Utilities`,helpers:`Helper functions`,types:`Type definitions`,interfaces:`Interface definitions`,constants:`Constants`,cdk:`CDK infrastructure`,stacks:`CDK stacks`,constructs:`CDK constructs`,lambdas:`Lambda functions`,components:`UI components`,hooks:`React hooks`,pages:`Page components`,layouts:`Layout components`,store:`State management`,assets:`Static assets`,styles:`Stylesheets`,fixtures:`Test fixtures`,mocks:`Test mocks`,migrations:`Database migrations`},a={".ts":`TypeScript`,".tsx":`React TSX`,".js":`JavaScript`,".jsx":`React JSX`,".mjs":`ES Module`,".cjs":`CommonJS`,".json":`JSON`,".yaml":`YAML`,".yml":`YAML`,".md":`Markdown`,".mdx":`MDX`,".py":`Python`,".go":`Go`,".rs":`Rust`,".java":`Java`,".sh":`Shell`,".ps1":`PowerShell`,".css":`CSS`,".scss":`SCSS`,".html":`HTML`,".sql":`SQL`,".graphql":`GraphQL`,".proto":`Protocol Buffers`,".toml":`TOML`,".env":`Environment`},o=new Set([`node_modules`,`.git`,`dist`,`build`,`.next`,`.nuxt`,`coverage`,`.turbo`,`.cache`,`__pycache__`,`.venv`,`.terraform`,`cdk.out`]),s=new Set(`.ts,.tsx,.js,.jsx,.mjs,.cjs,.java,.kt,.kts,.scala,.py,.go,.rs,.rb,.php,.swift,.cs,.c,.cpp,.h,.hpp,.sh,.bash,.ps1,.sql,.graphql,.gql,.proto,.json,.yaml,.yml,.toml,.env,.ini,.cfg,.xml,.pom,.gradle,.tf,.hcl,.lock,.mjs`.split(`,`)),c=new Set([`__tests__`,`test`,`tests`,`spec`,`__mocks__`,`__fixtures__`,`fixtures`,`mocks`]);var l=class{name=`structure`;async analyze(e,t={}){let{format:n=`markdown`,maxDepth:r=6,sourceOnly:i=!1}=t,
|
|
2
|
-
|
|
1
|
+
import{readdir as e,stat as t}from"node:fs/promises";import{extname as n,join as r}from"node:path";const i={src:`Application source`,lib:`Library code`,dist:`Build output`,build:`Build output`,test:`Tests`,tests:`Tests`,spec:`Tests`,__tests__:`Tests`,docs:`Documentation`,scripts:`Build/utility scripts`,config:`Configuration`,handlers:`Entry point handlers`,controllers:`HTTP controllers`,routes:`API routes`,middleware:`Middleware`,services:`Business logic services`,domain:`Domain/business logic`,infrastructure:`External integrations`,adapters:`Adapter implementations`,ports:`Port interfaces`,models:`Data models`,entities:`Domain entities`,repositories:`Data access`,utils:`Utilities`,helpers:`Helper functions`,types:`Type definitions`,interfaces:`Interface definitions`,constants:`Constants`,cdk:`CDK infrastructure`,stacks:`CDK stacks`,constructs:`CDK constructs`,lambdas:`Lambda functions`,components:`UI components`,hooks:`React hooks`,pages:`Page components`,layouts:`Layout components`,store:`State management`,assets:`Static assets`,styles:`Stylesheets`,fixtures:`Test fixtures`,mocks:`Test mocks`,migrations:`Database migrations`},a={".ts":`TypeScript`,".tsx":`React TSX`,".js":`JavaScript`,".jsx":`React JSX`,".mjs":`ES Module`,".cjs":`CommonJS`,".json":`JSON`,".yaml":`YAML`,".yml":`YAML`,".md":`Markdown`,".mdx":`MDX`,".py":`Python`,".go":`Go`,".rs":`Rust`,".java":`Java`,".sh":`Shell`,".ps1":`PowerShell`,".css":`CSS`,".scss":`SCSS`,".html":`HTML`,".sql":`SQL`,".graphql":`GraphQL`,".proto":`Protocol Buffers`,".toml":`TOML`,".env":`Environment`},o=new Set([`node_modules`,`.git`,`dist`,`build`,`.next`,`.nuxt`,`coverage`,`.turbo`,`.cache`,`__pycache__`,`.venv`,`.terraform`,`cdk.out`]),s=new Set(`.ts,.tsx,.js,.jsx,.mjs,.cjs,.java,.kt,.kts,.scala,.py,.go,.rs,.rb,.php,.swift,.cs,.c,.cpp,.h,.hpp,.sh,.bash,.ps1,.sql,.graphql,.gql,.proto,.json,.yaml,.yml,.toml,.env,.ini,.cfg,.xml,.pom,.gradle,.tf,.hcl,.lock,.mjs`.split(`,`)),c=new Set([`__tests__`,`test`,`tests`,`spec`,`__mocks__`,`__fixtures__`,`fixtures`,`mocks`]);var l=class{name=`structure`;async analyze(e,t={}){let{format:n=`markdown`,maxDepth:r=6,sourceOnly:i=!1,maxTokens:a}=t,o=Date.now(),s=await this.buildTree(e,0,r,i),c=this.computeStats(s);return{output:n===`json`?JSON.stringify({tree:s,stats:c},null,2):this.formatMarkdownWithBudget(s,c,e,a),data:{tree:s,stats:c},meta:{analyzedAt:new Date().toISOString(),scope:e,fileCount:c.totalFiles,durationMs:Date.now()-o}}}async buildTree(c,l,u,d){let f=await e(c,{withFileTypes:!0}),p=[];for(let e of f.sort((e,t)=>e.name.localeCompare(t.name))){if(o.has(e.name)||e.name.startsWith(`.`)&&e.name!==`.env.example`)continue;let i=r(c,e.name);if(e.isDirectory()&&l<u){let e=await this.buildTree(i,l+1,u,d);if(d&&(!e.children||e.children.length===0))continue;p.push(e)}else if(e.isFile()){let r=n(e.name).toLowerCase();if(d&&r&&!s.has(r)||d&&!r)continue;let o=await t(i);p.push({name:e.name,type:`file`,language:a[r]??(r||`unknown`),size:o.size})}}let m=c.split(/[/\\]/).pop()??c;return{name:m,type:`directory`,purpose:i[m.toLowerCase()],children:p}}computeStats(e){let t={},n=0,r=0,i=e=>{if(e.type===`file`){n++,r+=e.size??0;let i=e.language??`unknown`;t[i]=(t[i]??0)+1}e.children?.forEach(i)};return i(e),{totalFiles:n,totalSize:r,languages:t}}formatMarkdownWithBudget(e,t,n,r){let i=this.formatMarkdown(e,t,n,1);if(!r||this.estimateTokens(i)<=r)return i;let a=this.formatMarkdown(e,t,n,2);return this.estimateTokens(a)<=r?`${a}\n\n> _Tree pruned to Level 2: leaf directories collapsed to fit token budget._`:this.formatMarkdown(e,t,n,3)+`
|
|
2
|
+
|
|
3
|
+
> _Tree pruned to Level 3: top-level directories only to fit token budget._`}formatMarkdown(e,t,n,r=1){let i=[];return i.push(`## Project Structure: ${n}\n`),i.push(`**${t.totalFiles} files** | Languages: ${Object.entries(t.languages).map(([e,t])=>`${e} (${t})`).join(`, `)}\n`),i.push("```"),r===3?this.renderTreeLevel3(e,i):this.renderTree(e,``,i,r),i.push("```"),i.join(`
|
|
4
|
+
`)}renderTree(e,t,n,r=1){let i=e.purpose?` ── ${e.purpose}`:``;if(e.type===`directory`){let a=e.name.toLowerCase();if(c.has(a)&&e.children){let r=this.countFiles(e);n.push(`${t}${e.name}/${i} (${r} files)`);return}if(r>=2&&this.isLeafDir(e)){n.push(`${t}${e.name}/${i} (${this.leafDirSummary(e)})`);return}n.push(`${t}${e.name}/${i}`),e.children?.forEach((i,a)=>{let o=a===(e.children?.length??0)-1,s=o?`└── `:`├── `,c=t+(o?` `:`│ `);i.type===`directory`?this.renderTree(i,c,n,r):n.push(`${t}${s}${i.name}`)})}else n.push(`${t}${e.name}`)}renderTreeLevel3(e,t){for(let n of e.children??[]){let e=n.purpose?` ── ${n.purpose}`:``;if(n.type===`directory`){let r=this.countFiles(n);t.push(`${n.name}/${e} (${r} files)`)}else t.push(n.name)}}estimateTokens(e){return Math.ceil(e.length/4)}isLeafDir(e){return e.type===`directory`&&(e.children??[]).every(e=>e.type===`file`)}leafDirSummary(e){let t={};for(let n of e.children??[]){let e=n.language??`unknown`;t[e]=(t[e]??0)+1}return`${Object.values(t).reduce((e,t)=>e+t,0)} files — ${Object.entries(t).map(([e,t])=>`${e} (${t})`).join(`, `)}`}countFiles(e){return e.type===`file`?1:(e.children??[]).reduce((e,t)=>e+this.countFiles(t),0)}};export{l as StructureAnalyzer};
|
|
@@ -28,6 +28,8 @@ interface StructureAnalyzerOptions extends AnalyzerOptions {
|
|
|
28
28
|
maxDepth?: number;
|
|
29
29
|
/** When true, only include source code and config files (no docs, images, fonts, etc.) */
|
|
30
30
|
sourceOnly?: boolean;
|
|
31
|
+
/** Token budget for dynamic tree pruning (Level 1=full, 2=collapsed leaves, 3=top-level). */
|
|
32
|
+
maxTokens?: number;
|
|
31
33
|
}
|
|
32
34
|
interface DependencyAnalyzerOptions extends AnalyzerOptions {
|
|
33
35
|
format?: 'json' | 'markdown' | 'mermaid';
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import{SERVER_NAME as e}from"./constants.js";import{appendFileSync as t,existsSync as n,readFileSync as r,writeFileSync as i}from"node:fs";import{resolve as a}from"node:path";import{KB_PATHS as o}from"../../../../core/dist/index.js";const s={serverName:e,sources:[{path:`.`,excludePatterns:[`**/node_modules/**`,`**/dist/**`,`**/build/**`,`**/.git/**`,`**/${o.data}/**`,`**/coverage/**`,`**/*.min.js`,`**/package-lock.json`,`**/pnpm-lock.yaml`]}],indexing:{chunkSize:1500,chunkOverlap:200,minChunkSize:100},embedding:{model:`mixedbread-ai/mxbai-embed-large-v1`,dimensions:1024},store:{backend:`lancedb`,path:`${o.data}/lance`},curated:{path:o.aiCurated}};function c(e,t){let r=a(e,`kb.config.json`);return n(r)&&!t?(console.log(`kb.config.json already exists. Use --force to overwrite.`),!1):(i(r,`${JSON.stringify(s,null,2)}\n`,`utf-8`),console.log(` Created kb.config.json`),!0)}function l(e){let s=a(e,`.gitignore`),c=[{dir:`${o.data}/`,label:`Knowledge base vector store`},{dir:`${o.state}/`,label:`Knowledge base session state`},{dir:`${o.brainstorm}/`,label:`Brainstorming sessions`},{dir:`${o.handoffs}/`,label:`Handoff documents`}];if(n(s)){let e=r(s,`utf-8`),n=c.filter(t=>!e.includes(t.dir));n.length>0&&(t(s,`\n${n.map(e=>`# ${e.label}\n${e.dir}`).join(`
|
|
1
|
+
import{SERVER_NAME as e}from"./constants.js";import{appendFileSync as t,existsSync as n,readFileSync as r,writeFileSync as i}from"node:fs";import{resolve as a}from"node:path";import{KB_PATHS as o}from"../../../../core/dist/index.js";const s={serverName:e,sources:[{path:`.`,excludePatterns:[`**/node_modules/**`,`**/dist/**`,`**/build/**`,`**/.git/**`,`**/${o.data}/**`,`**/coverage/**`,`**/*.min.js`,`**/package-lock.json`,`**/pnpm-lock.yaml`]}],indexing:{chunkSize:1500,chunkOverlap:200,minChunkSize:100},embedding:{model:`mixedbread-ai/mxbai-embed-large-v1`,dimensions:1024},store:{backend:`lancedb`,path:`${o.data}/lance`},curated:{path:o.aiCurated}};function c(e,t){let r=a(e,`kb.config.json`);return n(r)&&!t?(console.log(`kb.config.json already exists. Use --force to overwrite.`),!1):(i(r,`${JSON.stringify(s,null,2)}\n`,`utf-8`),console.log(` Created kb.config.json`),!0)}function l(e){let s=a(e,`.gitignore`),c=[{dir:`${o.data}/`,label:`Knowledge base vector store`},{dir:`${o.state}/`,label:`Knowledge base session state`},{dir:`${o.restorePoints}/`,label:`Restore points (codemod/rename undo snapshots)`},{dir:`${o.brainstorm}/`,label:`Brainstorming sessions`},{dir:`${o.handoffs}/`,label:`Handoff documents`}];if(n(s)){let e=r(s,`utf-8`),n=c.filter(t=>!e.includes(t.dir));n.length>0&&(t(s,`\n${n.map(e=>`# ${e.label}\n${e.dir}`).join(`
|
|
2
2
|
`)}\n`,`utf-8`),console.log(` Added ${n.map(e=>e.dir).join(`, `)} to .gitignore`))}else i(s,`${c.map(e=>`# ${e.label}\n${e.dir}`).join(`
|
|
3
3
|
`)}\n`,`utf-8`),console.log(` Created .gitignore with KB entries`)}function u(){return s.serverName}export{l as ensureGitignore,u as getServerName,c as writeKbConfig};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{existsSync as e,readFileSync as t}from"node:fs";import{dirname as n,resolve as r}from"node:path";import{initializeWasm as i}from"../../chunker/dist/index.js";import{OnnxEmbedder as a}from"../../embeddings/dist/index.js";import{IncrementalIndexer as o}from"../../indexer/dist/index.js";import{SqliteGraphStore as s,createStore as c}from"../../store/dist/index.js";function l(){let i=process.env.KB_CONFIG_PATH??(e(r(process.cwd(),`kb.config.json`))?r(process.cwd(),`kb.config.json`):null);i||(console.error(`No kb.config.json found in current directory.`),console.error("Run `kb init` to create one, or set KB_CONFIG_PATH."),process.exit(1));let a=t(i,`utf-8`),o;try{o=JSON.parse(a)}catch{console.error(`Failed to parse ${i} as JSON. Ensure the file contains valid JSON.`),process.exit(1)}let s=n(i);return o.sources=o.sources.map(e=>({...e,path:r(s,e.path)})),o.store.path=r(s,o.store.path),o.curated=o.curated??{path:`curated`},o.curated.path=r(s,o.curated.path),o}async function u(){let e=l(),t=new a({model:e.embedding.model,dimensions:e.embedding.dimensions});await t.initialize();let n=await c({backend:e.store.backend,path:e.store.path});await n.initialize();let r=new o(t,n),{CuratedKnowledgeManager:u}=await import(`../../server/dist/curated-manager.js`),d=new u(e.curated.path,n,t),f;try{let t=new s({path:e.store.path});await t.initialize(),f=t,r.setGraphStore(f)}catch(e){console.error(`[kb] Graph store init failed (non-fatal): ${e.message}`),f={initialize:async()=>{},upsertNode:async()=>{},upsertEdge:async()=>{},upsertNodes:async()=>{},upsertEdges:async()=>{},getNode:async()=>null,getNeighbors:async()=>({nodes:[],edges:[]}),traverse:async()=>({nodes:[],edges:[]}),findNodes:async()=>[],findEdges:async()=>[],deleteNode:async()=>{},deleteBySourcePath:async()=>0,clear:async()=>{},getStats:async()=>({nodeCount:0,edgeCount:0,nodeTypes:{},edgeTypes:{}}),close:async()=>{}}}return await i().catch(()=>{}),{config:e,embedder:t,store:n,graphStore:f,indexer:r,curated:d}}export{u as initKB};
|
|
1
|
+
import{existsSync as e,readFileSync as t}from"node:fs";import{dirname as n,resolve as r}from"node:path";import{initializeWasm as i}from"../../chunker/dist/index.js";import{OnnxEmbedder as a}from"../../embeddings/dist/index.js";import{IncrementalIndexer as o}from"../../indexer/dist/index.js";import{SqliteGraphStore as s,createStore as c}from"../../store/dist/index.js";function l(){let i=process.env.KB_CONFIG_PATH??(e(r(process.cwd(),`kb.config.json`))?r(process.cwd(),`kb.config.json`):null);i||(console.error(`No kb.config.json found in current directory.`),console.error("Run `kb init` to create one, or set KB_CONFIG_PATH."),process.exit(1));let a=t(i,`utf-8`),o;try{o=JSON.parse(a)}catch{console.error(`Failed to parse ${i} as JSON. Ensure the file contains valid JSON.`),process.exit(1)}let s=n(i);return o.sources=o.sources.map(e=>({...e,path:r(s,e.path)})),o.store.path=r(s,o.store.path),o.curated=o.curated??{path:`curated`},o.curated.path=r(s,o.curated.path),o}async function u(){let e=l(),t=new a({model:e.embedding.model,dimensions:e.embedding.dimensions});await t.initialize();let n=await c({backend:e.store.backend,path:e.store.path});await n.initialize();let r=new o(t,n),{CuratedKnowledgeManager:u}=await import(`../../server/dist/curated-manager.js`),d=new u(e.curated.path,n,t),f;try{let t=new s({path:e.store.path});await t.initialize(),f=t,r.setGraphStore(f)}catch(e){console.error(`[kb] Graph store init failed (non-fatal): ${e.message}`),f={initialize:async()=>{},upsertNode:async()=>{},upsertEdge:async()=>{},upsertNodes:async()=>{},upsertEdges:async()=>{},getNode:async()=>null,getNeighbors:async()=>({nodes:[],edges:[]}),traverse:async()=>({nodes:[],edges:[]}),findNodes:async()=>[],findEdges:async()=>[],deleteNode:async()=>{},deleteBySourcePath:async()=>0,clear:async()=>{},getStats:async()=>({nodeCount:0,edgeCount:0,nodeTypes:{},edgeTypes:{}}),validate:async()=>({valid:!0,orphanNodes:[],danglingEdges:[],stats:{nodeCount:0,edgeCount:0,nodeTypes:{},edgeTypes:{}}}),close:async()=>{}}}return await i().catch(()=>{}),{config:e,embedder:t,store:n,graphStore:f,indexer:r,curated:d}}export{u as initKB};
|
|
@@ -9,7 +9,8 @@
|
|
|
9
9
|
declare const KB_PATHS: {
|
|
10
10
|
/** AI artifacts root directory */readonly ai: ".ai"; /** Onboard / produce_knowledge output directory */
|
|
11
11
|
readonly aiKb: ".ai/kb"; /** Curated knowledge directory */
|
|
12
|
-
readonly aiCurated: ".ai/curated"; /**
|
|
12
|
+
readonly aiCurated: ".ai/curated"; /** Restore points for destructive operations (codemod, rename, forget) */
|
|
13
|
+
readonly restorePoints: ".ai/restore-points"; /** Vector store + graph data */
|
|
13
14
|
readonly data: ".kb-data"; /** Session state (stash, lanes, checkpoints, worksets, queues, replay, evidence-maps, snippets) */
|
|
14
15
|
readonly state: ".kb-state"; /** Persistent warn/error logs for dogfooding review */
|
|
15
16
|
readonly logs: ".kb-state/logs"; /** Brainstorming sessions */
|
|
@@ -1 +1 @@
|
|
|
1
|
-
const e={ai:`.ai`,aiKb:`.ai/kb`,aiCurated:`.ai/curated`,data:`.kb-data`,state:`.kb-state`,logs:`.kb-state/logs`,brainstorm:`.brainstorm`,handoffs:`.handoffs`},t={root:`.kb-data`,registry:`registry.json`},n={markdown:{max:1500,min:100},code:{max:2e3,min:50},config:{max:3e3,min:50},default:{max:1500,min:100,overlap:200}},r={model:`mixedbread-ai/mxbai-embed-large-v1`,dimensions:1024},i={backend:`lancedb`,path:e.data,tableName:`knowledge`},a={maxFileSizeBytes:1e6,maxCuratedFileSizeBytes:5e4},o={maxResults:10,minScore:.25},s=/^[a-z][a-z0-9-]*$/,c=[`decisions`,`patterns`,`troubleshooting`,`conventions`,`architecture`];export{s as CATEGORY_PATTERN,n as CHUNK_SIZES,c as DEFAULT_CATEGORIES,r as EMBEDDING_DEFAULTS,a as FILE_LIMITS,t as KB_GLOBAL_PATHS,e as KB_PATHS,o as SEARCH_DEFAULTS,i as STORE_DEFAULTS};
|
|
1
|
+
const e={ai:`.ai`,aiKb:`.ai/kb`,aiCurated:`.ai/curated`,restorePoints:`.ai/restore-points`,data:`.kb-data`,state:`.kb-state`,logs:`.kb-state/logs`,brainstorm:`.brainstorm`,handoffs:`.handoffs`},t={root:`.kb-data`,registry:`registry.json`},n={markdown:{max:1500,min:100},code:{max:2e3,min:50},config:{max:3e3,min:50},default:{max:1500,min:100,overlap:200}},r={model:`mixedbread-ai/mxbai-embed-large-v1`,dimensions:1024},i={backend:`lancedb`,path:e.data,tableName:`knowledge`},a={maxFileSizeBytes:1e6,maxCuratedFileSizeBytes:5e4},o={maxResults:10,minScore:.25},s=/^[a-z][a-z0-9-]*$/,c=[`decisions`,`patterns`,`troubleshooting`,`conventions`,`architecture`];export{s as CATEGORY_PATTERN,n as CHUNK_SIZES,c as DEFAULT_CATEGORIES,r as EMBEDDING_DEFAULTS,a as FILE_LIMITS,t as KB_GLOBAL_PATHS,e as KB_PATHS,o as SEARCH_DEFAULTS,i as STORE_DEFAULTS};
|
|
@@ -3,5 +3,5 @@ import { CONTENT_TYPES, ChunkMetadata, ContentType, IndexStats, KBConfig, KNOWLE
|
|
|
3
3
|
import { contentTypeToSourceType, detectContentType, sourceTypeContentTypes } from "./content-detector.js";
|
|
4
4
|
import { ConfigError, EmbeddingError, IndexError, KBError, StoreError } from "./errors.js";
|
|
5
5
|
import { GlobalRegistry, RegistryEntry, computePartitionKey, getGlobalDataDir, getPartitionDir, isUserInstalled, listWorkspaces, loadRegistry, lookupWorkspace, registerWorkspace, saveRegistry } from "./global-registry.js";
|
|
6
|
-
import { LogLevel, createLogger, getLogLevel, resetLogDir, serializeError, setFileSinkEnabled, setLogLevel } from "./logger.js";
|
|
7
|
-
export { CATEGORY_PATTERN, CHUNK_SIZES, CONTENT_TYPES, ChunkMetadata, ConfigError, ContentType, DEFAULT_CATEGORIES, EMBEDDING_DEFAULTS, EmbeddingError, FILE_LIMITS, GlobalRegistry, IndexError, IndexStats, KBConfig, KBError, KB_GLOBAL_PATHS, KB_PATHS, KNOWLEDGE_ORIGINS, KnowledgeOrigin, KnowledgeRecord, LogLevel, RawChunk, RegistryEntry, SEARCH_DEFAULTS, SOURCE_TYPES, STORE_DEFAULTS, SearchResult, SourceType, StoreError, computePartitionKey, contentTypeToSourceType, createLogger, detectContentType, getGlobalDataDir, getLogLevel, getPartitionDir, isUserInstalled, listWorkspaces, loadRegistry, lookupWorkspace, registerWorkspace, resetLogDir, saveRegistry, serializeError, setFileSinkEnabled, setLogLevel, sourceTypeContentTypes };
|
|
6
|
+
import { LogLevel, LogListener, addLogListener, createLogger, getLogLevel, resetLogDir, serializeError, setFileSinkEnabled, setLogLevel } from "./logger.js";
|
|
7
|
+
export { CATEGORY_PATTERN, CHUNK_SIZES, CONTENT_TYPES, ChunkMetadata, ConfigError, ContentType, DEFAULT_CATEGORIES, EMBEDDING_DEFAULTS, EmbeddingError, FILE_LIMITS, GlobalRegistry, IndexError, IndexStats, KBConfig, KBError, KB_GLOBAL_PATHS, KB_PATHS, KNOWLEDGE_ORIGINS, KnowledgeOrigin, KnowledgeRecord, LogLevel, LogListener, RawChunk, RegistryEntry, SEARCH_DEFAULTS, SOURCE_TYPES, STORE_DEFAULTS, SearchResult, SourceType, StoreError, addLogListener, computePartitionKey, contentTypeToSourceType, createLogger, detectContentType, getGlobalDataDir, getLogLevel, getPartitionDir, isUserInstalled, listWorkspaces, loadRegistry, lookupWorkspace, registerWorkspace, resetLogDir, saveRegistry, serializeError, setFileSinkEnabled, setLogLevel, sourceTypeContentTypes };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{CATEGORY_PATTERN as e,CHUNK_SIZES as t,DEFAULT_CATEGORIES as n,EMBEDDING_DEFAULTS as r,FILE_LIMITS as i,KB_GLOBAL_PATHS as a,KB_PATHS as o,SEARCH_DEFAULTS as s,STORE_DEFAULTS as c}from"./constants.js";import{contentTypeToSourceType as l,detectContentType as u,sourceTypeContentTypes as d}from"./content-detector.js";import{ConfigError as f,EmbeddingError as p,IndexError as m,KBError as h,StoreError as g}from"./errors.js";import{computePartitionKey as _,getGlobalDataDir as v,getPartitionDir as y,isUserInstalled as b,listWorkspaces as x,loadRegistry as S,lookupWorkspace as C,registerWorkspace as w,saveRegistry as T}from"./global-registry.js";import{
|
|
1
|
+
import{CATEGORY_PATTERN as e,CHUNK_SIZES as t,DEFAULT_CATEGORIES as n,EMBEDDING_DEFAULTS as r,FILE_LIMITS as i,KB_GLOBAL_PATHS as a,KB_PATHS as o,SEARCH_DEFAULTS as s,STORE_DEFAULTS as c}from"./constants.js";import{contentTypeToSourceType as l,detectContentType as u,sourceTypeContentTypes as d}from"./content-detector.js";import{ConfigError as f,EmbeddingError as p,IndexError as m,KBError as h,StoreError as g}from"./errors.js";import{computePartitionKey as _,getGlobalDataDir as v,getPartitionDir as y,isUserInstalled as b,listWorkspaces as x,loadRegistry as S,lookupWorkspace as C,registerWorkspace as w,saveRegistry as T}from"./global-registry.js";import{addLogListener as E,createLogger as D,getLogLevel as O,resetLogDir as k,serializeError as A,setFileSinkEnabled as j,setLogLevel as M}from"./logger.js";import{CONTENT_TYPES as N,KNOWLEDGE_ORIGINS as P,SOURCE_TYPES as F}from"./types.js";export{e as CATEGORY_PATTERN,t as CHUNK_SIZES,N as CONTENT_TYPES,f as ConfigError,n as DEFAULT_CATEGORIES,r as EMBEDDING_DEFAULTS,p as EmbeddingError,i as FILE_LIMITS,m as IndexError,h as KBError,a as KB_GLOBAL_PATHS,o as KB_PATHS,P as KNOWLEDGE_ORIGINS,s as SEARCH_DEFAULTS,F as SOURCE_TYPES,c as STORE_DEFAULTS,g as StoreError,E as addLogListener,_ as computePartitionKey,l as contentTypeToSourceType,D as createLogger,u as detectContentType,v as getGlobalDataDir,O as getLogLevel,y as getPartitionDir,b as isUserInstalled,x as listWorkspaces,S as loadRegistry,C as lookupWorkspace,w as registerWorkspace,k as resetLogDir,T as saveRegistry,A as serializeError,j as setFileSinkEnabled,M as setLogLevel,d as sourceTypeContentTypes};
|
|
@@ -1,5 +1,11 @@
|
|
|
1
1
|
//#region packages/core/src/logger.d.ts
|
|
2
2
|
type LogLevel = 'debug' | 'info' | 'warn' | 'error';
|
|
3
|
+
type LogListener = (entry: {
|
|
4
|
+
level: LogLevel;
|
|
5
|
+
component: string;
|
|
6
|
+
message: string;
|
|
7
|
+
data?: Record<string, unknown>;
|
|
8
|
+
}) => void;
|
|
3
9
|
declare function setLogLevel(level: LogLevel): void;
|
|
4
10
|
declare function getLogLevel(): LogLevel;
|
|
5
11
|
declare function setFileSinkEnabled(enabled: boolean): void;
|
|
@@ -10,6 +16,11 @@ declare function resetLogDir(): void;
|
|
|
10
16
|
* Includes stack traces only at debug level to keep production logs concise.
|
|
11
17
|
*/
|
|
12
18
|
declare function serializeError(err: unknown): Record<string, unknown>;
|
|
19
|
+
/**
|
|
20
|
+
* Register a listener that receives all log messages (after level filtering).
|
|
21
|
+
* Returns a cleanup function to remove the listener.
|
|
22
|
+
*/
|
|
23
|
+
declare function addLogListener(listener: LogListener): () => void;
|
|
13
24
|
declare function createLogger(component: string): {
|
|
14
25
|
debug: (msg: string, data?: Record<string, unknown>) => void;
|
|
15
26
|
info: (msg: string, data?: Record<string, unknown>) => void;
|
|
@@ -17,4 +28,4 @@ declare function createLogger(component: string): {
|
|
|
17
28
|
error: (msg: string, data?: Record<string, unknown>) => void;
|
|
18
29
|
};
|
|
19
30
|
//#endregion
|
|
20
|
-
export { LogLevel, createLogger, getLogLevel, resetLogDir, serializeError, setFileSinkEnabled, setLogLevel };
|
|
31
|
+
export { LogLevel, LogListener, addLogListener, createLogger, getLogLevel, resetLogDir, serializeError, setFileSinkEnabled, setLogLevel };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{KB_PATHS as e}from"./constants.js";import{join as t,resolve as n}from"node:path";import{appendFileSync as r,mkdirSync as i,readdirSync as a,unlinkSync as o}from"node:fs";const s={debug:0,info:1,warn:2,error:3};let
|
|
1
|
+
import{KB_PATHS as e}from"./constants.js";import{join as t,resolve as n}from"node:path";import{appendFileSync as r,mkdirSync as i,readdirSync as a,unlinkSync as o}from"node:fs";const s={debug:0,info:1,warn:2,error:3},c=[];let l=process.env.KB_LOG_LEVEL??`info`,u=process.env.KB_LOG_FILE_SINK===`true`||process.env.KB_LOG_FILE_SINK!==`false`&&!process.env.VITEST&&process.env.NODE_ENV!==`test`;function d(){return u?process.env.VITEST||process.env.NODE_ENV===`test`?process.env.KB_LOG_FILE_SINK===`true`:!0:!1}let f;function p(){return f||=n(process.cwd(),e.logs),f}function m(e){let n=e.toISOString().slice(0,10);return t(p(),`${n}.jsonl`)}let h=0;function g(){let e=Date.now();if(!(e-h<36e5)){h=e;try{let n=p(),r=new Date(e-30*864e5).toISOString().slice(0,10);for(let e of a(n))if(e.endsWith(`.jsonl`)&&e.slice(0,10)<r)try{o(t(n,e))}catch{}}catch{}}}function _(e,t){try{i(p(),{recursive:!0}),r(m(t),`${e}\n`),g()}catch{}}function v(e){l=e}function y(){return l}function b(e){u=e}function x(){f=void 0}function S(e){if(e instanceof Error){let t={error:e.message};return l===`debug`&&e.stack&&(t.stack=e.stack),t}return{error:String(e)}}function C(e){return c.push(e),()=>{let t=c.indexOf(e);t>=0&&c.splice(t,1)}}function w(e){function t(t,n,r){if(s[t]<s[l])return;let i=new Date,a={ts:i.toISOString(),level:t,component:e,msg:n,...r},o=JSON.stringify(a);console.error(o);for(let i of c)try{i({level:t,component:e,message:n,data:r})}catch{}d()&&(t===`warn`||t===`error`)&&_(o,i)}return{debug:(e,n)=>t(`debug`,e,n),info:(e,n)=>t(`info`,e,n),warn:(e,n)=>t(`warn`,e,n),error:(e,n)=>t(`error`,e,n)}}export{C as addLogListener,w as createLogger,y as getLogLevel,x as resetLogDir,S as serializeError,b as setFileSinkEnabled,v as setLogLevel};
|
|
@@ -64,6 +64,10 @@ declare class CuratedKnowledgeManager {
|
|
|
64
64
|
}>;
|
|
65
65
|
private indexCuratedFile;
|
|
66
66
|
private discoverCategories;
|
|
67
|
+
/**
|
|
68
|
+
* Normalize and validate a relative path within .ai/curated/.
|
|
69
|
+
* Returns the cleaned path (mutates nothing — caller must use the return value).
|
|
70
|
+
*/
|
|
67
71
|
private guardPath;
|
|
68
72
|
private validateCategoryName;
|
|
69
73
|
private validateContentSize;
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import{dirname as e,isAbsolute as t,join as n}from"node:path";import{createLogger as r,serializeError as i}from"../../core/dist/index.js";import{createHash as a}from"node:crypto";import{mkdir as o,readFile as s,readdir as c,stat as l,unlink as u,writeFile as d}from"node:fs/promises";const f=50*1024,p=r(`server`);var m=class{constructor(e,t,n){this.curatedDir=e,this.store=t,this.embedder=n}async remember(t,r,i,a=[]){this.validateCategoryName(i),this.validateContentSize(r);let s=this.slugify(t),c=await this.uniqueRelativePath(i,s),l=n(this.curatedDir,c),f=new Date().toISOString(),p={title:t,category:i,tags:a,created:f,updated:f,version:1,origin:`curated`,changelog:[{version:1,date:f,reason:`Initial creation`}]},m=this.serializeFile(r,p);await o(e(l),{recursive:!0});try{await d(l,m,{encoding:`utf-8`,flag:`wx`})}catch(e){throw e.code===`EEXIST`?Error(`Concurrent write collision for "${c}" — retry the operation`):e}try{await this.indexCuratedFile(c,r,p)}catch(e){throw await u(l).catch(()=>{}),Error(`Remember failed: wrote file but indexing failed — rolled back. ${e.message}`)}return{path:c}}async update(e,t,r){this.guardPath(e),this.validateContentSize(t);let i=n(this.curatedDir,e),a=await s(i,`utf-8`),{frontmatter:o}=this.parseFile(a),c=(o.version??1)+1,l=new Date().toISOString();return o.version=c,o.updated=l,o.changelog=[...o.changelog??[],{version:c,date:l,reason:r}],await d(i,this.serializeFile(t,o),`utf-8`),await this.indexCuratedFile(e,t,o),{path:e,version:c}}async forget(e,t){this.guardPath(e),await u(n(this.curatedDir,e));let r=`.ai/curated/${e}`;return await this.store.deleteBySourcePath(r).catch(e=>{p.warn(`File deleted but vector cleanup failed`,{sourcePath:r,...i(e)})}),{path:e}}async read(e){this.guardPath(e);let t=await s(n(this.curatedDir,e),`utf-8`),{frontmatter:r,content:i}=this.parseFile(t),a=e.split(`/`)[0];return{path:e,title:r.title??e,category:a,tags:r.tags??[],version:r.version??1,created:r.created??``,updated:r.updated??``,contentPreview:i.slice(0,200),content:i}}async list(e){let t=[],r=e?.category?[e.category]:await this.discoverCategories();for(let i of r){let r=n(this.curatedDir,i);try{let a=await c(r);for(let o of a){if(!o.endsWith(`.md`))continue;let a=await s(n(r,o),`utf-8`),{frontmatter:c,content:l}=this.parseFile(a);e?.tag&&!(c.tags??[]).includes(e.tag)||t.push({path:`${i}/${o}`,title:c.title??o,category:i,tags:c.tags??[],version:c.version??1,created:c.created??``,updated:c.updated??``,contentPreview:l.slice(0,200)})}}catch{}}return t}async reindexAll(){let e=await this.discoverCategories(),t=[],r=[];for(let a of e){let e=n(this.curatedDir,a),o;try{o=(await c(e)).filter(e=>e.endsWith(`.md`))}catch{continue}for(let c of o){let o=`${a}/${c}`,l=n(e,c);try{let e=await s(l,`utf-8`),{frontmatter:t,content:n}=this.parseFile(e);r.push({relativePath:o,content:n,frontmatter:t})}catch(e){p.error(`Failed to read curated file`,{relativePath:o,...i(e)}),t.push(`${o}: read failed`)}}}if(r.length===0)return{indexed:0,errors:t};let a=await this.embedder.embedBatch(r.map(e=>e.content)),o=new Date().toISOString(),l=r.map(e=>{let t=`.ai/curated/${e.relativePath}`;return{id:this.hashId(t,0),content:e.content,sourcePath:t,contentType:`curated-knowledge`,headingPath:e.frontmatter.title,chunkIndex:0,totalChunks:1,startLine:1,endLine:e.content.split(`
|
|
1
|
+
import{dirname as e,isAbsolute as t,join as n}from"node:path";import{createLogger as r,serializeError as i}from"../../core/dist/index.js";import{createHash as a}from"node:crypto";import{mkdir as o,readFile as s,readdir as c,stat as l,unlink as u,writeFile as d}from"node:fs/promises";const f=50*1024,p=r(`server`);var m=class{constructor(e,t,n){this.curatedDir=e,this.store=t,this.embedder=n}async remember(t,r,i,a=[]){this.validateCategoryName(i),this.validateContentSize(r);let s=this.slugify(t),c=await this.uniqueRelativePath(i,s),l=n(this.curatedDir,c),f=new Date().toISOString(),p={title:t,category:i,tags:a,created:f,updated:f,version:1,origin:`curated`,changelog:[{version:1,date:f,reason:`Initial creation`}]},m=this.serializeFile(r,p);await o(e(l),{recursive:!0});try{await d(l,m,{encoding:`utf-8`,flag:`wx`})}catch(e){throw e.code===`EEXIST`?Error(`Concurrent write collision for "${c}" — retry the operation`):e}try{await this.indexCuratedFile(c,r,p)}catch(e){throw await u(l).catch(()=>{}),Error(`Remember failed: wrote file but indexing failed — rolled back. ${e.message}`)}return{path:c}}async update(e,t,r){e=this.guardPath(e),this.validateContentSize(t);let i=n(this.curatedDir,e),a=await s(i,`utf-8`),{frontmatter:o}=this.parseFile(a),c=(o.version??1)+1,l=new Date().toISOString();return o.version=c,o.updated=l,o.changelog=[...o.changelog??[],{version:c,date:l,reason:r}],await d(i,this.serializeFile(t,o),`utf-8`),await this.indexCuratedFile(e,t,o),{path:e,version:c}}async forget(e,t){e=this.guardPath(e),await u(n(this.curatedDir,e));let r=`.ai/curated/${e}`;return await this.store.deleteBySourcePath(r).catch(e=>{p.warn(`File deleted but vector cleanup failed`,{sourcePath:r,...i(e)})}),{path:e}}async read(e){e=this.guardPath(e);let t=await s(n(this.curatedDir,e),`utf-8`),{frontmatter:r,content:i}=this.parseFile(t),a=e.split(`/`)[0];return{path:e,title:r.title??e,category:a,tags:r.tags??[],version:r.version??1,created:r.created??``,updated:r.updated??``,contentPreview:i.slice(0,200),content:i}}async list(e){let t=[],r=e?.category?[e.category]:await this.discoverCategories();for(let i of r){let r=n(this.curatedDir,i);try{let a=await c(r);for(let o of a){if(!o.endsWith(`.md`))continue;let a=await s(n(r,o),`utf-8`),{frontmatter:c,content:l}=this.parseFile(a);e?.tag&&!(c.tags??[]).includes(e.tag)||t.push({path:`${i}/${o}`,title:c.title??o,category:i,tags:c.tags??[],version:c.version??1,created:c.created??``,updated:c.updated??``,contentPreview:l.slice(0,200)})}}catch{}}return t}async reindexAll(){let e=await this.discoverCategories(),t=[],r=[];for(let a of e){let e=n(this.curatedDir,a),o;try{o=(await c(e)).filter(e=>e.endsWith(`.md`))}catch{continue}for(let c of o){let o=`${a}/${c}`,l=n(e,c);try{let e=await s(l,`utf-8`),{frontmatter:t,content:n}=this.parseFile(e);r.push({relativePath:o,content:n,frontmatter:t})}catch(e){p.error(`Failed to read curated file`,{relativePath:o,...i(e)}),t.push(`${o}: read failed`)}}}if(r.length===0)return{indexed:0,errors:t};let a=await this.embedder.embedBatch(r.map(e=>e.content)),o=new Date().toISOString(),l=r.map(e=>{let t=`.ai/curated/${e.relativePath}`;return{id:this.hashId(t,0),content:e.content,sourcePath:t,contentType:`curated-knowledge`,headingPath:e.frontmatter.title,chunkIndex:0,totalChunks:1,startLine:1,endLine:e.content.split(`
|
|
2
2
|
`).length,fileHash:this.hash(e.content),indexedAt:o,origin:`curated`,tags:e.frontmatter.tags,category:e.frontmatter.category,version:e.frontmatter.version}});return await this.store.upsert(l,a),{indexed:r.length,errors:t}}async indexCuratedFile(e,t,n){let r=await this.embedder.embed(t),i=`.ai/curated/${e}`,a=new Date().toISOString(),o={id:this.hashId(i,0),content:t,sourcePath:i,contentType:`curated-knowledge`,headingPath:n.title,chunkIndex:0,totalChunks:1,startLine:1,endLine:t.split(`
|
|
3
|
-
`).length,fileHash:this.hash(t),indexedAt:a,origin:`curated`,tags:n.tags,category:n.category,version:n.version};await this.store.upsert([o],[r])}async discoverCategories(){try{return(await c(this.curatedDir,{withFileTypes:!0})).filter(e=>e.isDirectory()&&/^[a-z][a-z0-9-]*$/.test(e.name)).map(e=>e.name)}catch{return[]}}guardPath(e){if(
|
|
3
|
+
`).length,fileHash:this.hash(t),indexedAt:a,origin:`curated`,tags:n.tags,category:n.category,version:n.version};await this.store.upsert([o],[r])}async discoverCategories(){try{return(await c(this.curatedDir,{withFileTypes:!0})).filter(e=>e.isDirectory()&&/^[a-z][a-z0-9-]*$/.test(e.name)).map(e=>e.name)}catch{return[]}}guardPath(e){let n=e.replace(/^\.ai\/curated\//,``);if(n.endsWith(`.md`)||(n+=`.md`),n.includes(`..`)||t(n))throw Error(`Invalid path: ${n}. Must be relative within .ai/curated/ directory.`);let r=n.split(`/`)[0];return this.validateCategoryName(r),n}validateCategoryName(e){if(!/^[a-z][a-z0-9-]*$/.test(e))throw Error(`Invalid category name: "${e}". Must be lowercase kebab-case (e.g., "decisions", "api-contracts").`)}validateContentSize(e){if(Buffer.byteLength(e,`utf-8`)>f)throw Error(`Content exceeds maximum size of ${f/1024}KB`)}slugify(e){return e.toLowerCase().replace(/[^a-z0-9]+/g,`-`).replace(/^-|-$/g,``).slice(0,80)}async uniqueRelativePath(e,t){let r=`${e}/${t}.md`,i=n(this.curatedDir,r);try{await l(i)}catch{return r}for(let r=2;r<=100;r++){let i=`${e}/${t}-${r}.md`;try{await l(n(this.curatedDir,i))}catch{return i}}throw Error(`Too many entries with slug "${t}" in category "${e}"`)}hash(e){return a(`sha256`).update(e).digest(`hex`).slice(0,16)}hashId(e,t){return this.hash(`${e}::${t}`)}serializeFile(e,t){return`${[`---`,`title: "${t.title.replace(/"/g,`\\"`)}"`,`category: ${t.category}`,`tags: [${t.tags.map(e=>`"${e}"`).join(`, `)}]`,`created: ${t.created}`,`updated: ${t.updated}`,`version: ${t.version}`,`origin: ${t.origin}`,`changelog:`,...t.changelog.map(e=>` - version: ${e.version}\n date: ${e.date}\n reason: "${e.reason.replace(/"/g,`\\"`)}"`),`---`].join(`
|
|
4
4
|
`)}\n\n${e}\n`}parseFile(e){let t=e.match(/^---\n([\s\S]*?)\n---\n\n?([\s\S]*)$/);if(!t)return{frontmatter:{title:`Untitled`,category:`notes`,tags:[],created:``,updated:``,version:1,origin:`curated`,changelog:[]},content:e};let n=t[1],r=t[2].trim(),i={},a=[],o=n.split(`
|
|
5
5
|
`),s=!1,c={};for(let e of o){if(/^changelog:\s*$/.test(e)){s=!0;continue}if(s){let t=e.match(/^\s+-\s+version:\s*(\d+)$/);if(t){c.version!=null&&a.push(c),c={version:parseInt(t[1],10)};continue}let n=e.match(/^\s+date:\s*(.+)$/);if(n){c.date=n[1].trim();continue}let r=e.match(/^\s+reason:\s*"?(.*?)"?\s*$/);if(r){c.reason=r[1];continue}/^\w/.test(e)&&(s=!1,c.version!=null&&a.push(c),c={});continue}let t=e.match(/^(\w+):\s*(.*)$/);if(t){let e=t[1],n=t[2];typeof n==`string`&&n.startsWith(`[`)&&n.endsWith(`]`)?n=n.slice(1,-1).split(`,`).map(e=>e.trim().replace(/^"|"$/g,``)).filter(e=>e.length>0):typeof n==`string`&&/^\d+$/.test(n)?n=parseInt(n,10):typeof n==`string`&&n.startsWith(`"`)&&n.endsWith(`"`)&&(n=n.slice(1,-1)),i[e]=n}}return c.version!=null&&a.push(c),{frontmatter:{title:i.title??`Untitled`,category:i.category??`notes`,tags:i.tags??[],created:i.created??``,updated:i.updated??``,version:i.version??1,origin:`curated`,changelog:a},content:r}}};export{m as CuratedKnowledgeManager};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{loadConfig as e,reconfigureForWorkspace as t}from"./config.js";import{checkForUpdates as n}from"./version-check.js";import{ALL_TOOL_NAMES as r,createLazyServer as i,createMcpServer as a,initializeKnowledgeBase as o}from"./server.js";import{fileURLToPath as s}from"node:url";import{createLogger as c,serializeError as l}from"../../core/dist/index.js";import{parseArgs as u}from"node:util";const
|
|
1
|
+
import{loadConfig as e,reconfigureForWorkspace as t}from"./config.js";import{checkForUpdates as n}from"./version-check.js";import{ALL_TOOL_NAMES as r,createLazyServer as i,createMcpServer as a,initializeKnowledgeBase as o}from"./server.js";import{fileURLToPath as s}from"node:url";import{createLogger as c,serializeError as l}from"../../core/dist/index.js";import{parseArgs as u}from"node:util";import{RootsListChangedNotificationSchema as d}from"@modelcontextprotocol/sdk/types.js";const f=c(`server`),{values:p}=u({options:{transport:{type:`string`,default:process.env.KB_TRANSPORT??`stdio`},port:{type:`string`,default:process.env.KB_PORT??`3210`}}});async function m(){process.on(`unhandledRejection`,e=>{f.error(`Unhandled rejection`,l(e))}),f.info(`Starting MCP Knowledge Base server`);let c=e();if(f.info(`Config loaded`,{sourceCount:c.sources.length,storePath:c.store.path}),n(),p.transport===`http`){let{StreamableHTTPServerTransport:e}=await import(`@modelcontextprotocol/sdk/server/streamableHttp.js`),t=(await import(`express`)).default,n=await o(c),i=a(n,c);f.info(`MCP server configured`,{toolCount:r.length,resourceCount:2});let s=t();s.use(t.json()),s.use((e,t,n)=>{if(t.setHeader(`Access-Control-Allow-Origin`,process.env.KB_CORS_ORIGIN??`*`),t.setHeader(`Access-Control-Allow-Methods`,`GET, POST, DELETE, OPTIONS`),t.setHeader(`Access-Control-Allow-Headers`,`Content-Type, Authorization`),e.method===`OPTIONS`){t.status(204).end();return}n()}),s.get(`/health`,(e,t)=>{t.json({status:`ok`})}),s.post(`/mcp`,async(t,n)=>{try{let r=new e({sessionIdGenerator:void 0});await i.connect(r),await r.handleRequest(t,n,t.body),n.on(`close`,()=>{r.close()})}catch(e){f.error(`MCP handler error`,l(e)),n.headersSent||n.status(500).json({jsonrpc:`2.0`,error:{code:-32603,message:`Internal server error`},id:null})}}),s.get(`/mcp`,(e,t)=>{t.writeHead(405).end(JSON.stringify({jsonrpc:`2.0`,error:{code:-32e3,message:`Method not allowed.`},id:null}))}),s.delete(`/mcp`,(e,t)=>{t.writeHead(405).end(JSON.stringify({jsonrpc:`2.0`,error:{code:-32e3,message:`Method not allowed.`},id:null}))});let u=Number(p.port),d=s.listen(u,()=>{f.info(`MCP server listening`,{url:`http://0.0.0.0:${u}/mcp`,port:u}),(async()=>{try{let e=c.sources.map(e=>e.path).join(`, `);f.info(`Running initial index`,{sourcePaths:e});let t=await n.indexer.index(c,e=>{e.phase===`crawling`||e.phase===`done`||e.phase===`chunking`&&e.currentFile&&f.debug(`Indexing file`,{current:e.filesProcessed+1,total:e.filesTotal,file:e.currentFile})});f.info(`Initial index complete`,{filesProcessed:t.filesProcessed,filesSkipped:t.filesSkipped,chunksCreated:t.chunksCreated,durationMs:t.durationMs});try{let e=await n.curated.reindexAll();f.info(`Curated re-index complete`,{indexed:e.indexed})}catch(e){f.error(`Curated re-index failed`,l(e))}}catch(e){f.error(`Initial index failed; will retry on kb_reindex`,l(e))}})().catch(e=>f.error(`Initial index failed`,l(e)))}),m=async e=>{f.info(`Shutdown signal received`,{signal:e}),d.close(),await i.close(),await n.graphStore.close().catch(()=>{}),await n.store.close(),await n.embedder.shutdown(),process.exit(0)};process.on(`SIGINT`,()=>m(`SIGINT`)),process.on(`SIGTERM`,()=>m(`SIGTERM`))}else{let{server:e,startInit:n,ready:r,runInitialIndex:a}=i(c),{StdioServerTransport:o}=await import(`@modelcontextprotocol/sdk/server/stdio.js`),u=new o;await e.connect(u),f.info(`MCP server started`,{transport:`stdio`});let p=e=>{if(e.length===0)return!1;let n=e[0].uri,r=n.startsWith(`file://`)?s(n):n;return f.info(`MCP roots resolved`,{rootUri:n,rootPath:r,rootCount:e.length}),t(c,r),!0},m=!1;try{m=p((await e.server.listRoots()).roots),m||f.info(`No MCP roots yet; waiting for roots/list_changed notification`)}catch(e){f.warn(`MCP roots/list not supported by client; using cwd fallback`,{cwd:process.cwd(),...l(e)}),m=!0}m||=await new Promise(t=>{let n=setTimeout(()=>{f.warn(`Timed out waiting for MCP roots/list_changed; using cwd fallback`,{cwd:process.cwd()}),t(!1)},5e3);e.server.setNotificationHandler(d,async()=>{clearTimeout(n);try{t(p((await e.server.listRoots()).roots))}catch(e){f.warn(`roots/list retry failed after notification`,l(e)),t(!1)}})}),n(),r.catch(e=>{f.error(`Initialization failed`,l(e)),process.exit(1)}),process.env.KB_AUTO_INDEX===`false`?f.warn(`Auto-index disabled; use kb_reindex to index manually`):a().catch(e=>f.error(`Initial index failed`,l(e)))}}m().catch(e=>{f.error(`Fatal error`,l(e)),process.exit(1)});export{};
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
2
|
+
|
|
3
|
+
//#region packages/server/src/mcp-logging.d.ts
|
|
4
|
+
/**
|
|
5
|
+
* Bridge internal logger messages to MCP client via `sendLoggingMessage`.
|
|
6
|
+
* Fire-and-forget — never blocks the log caller, never throws.
|
|
7
|
+
* Returns a cleanup function to remove the bridge.
|
|
8
|
+
*/
|
|
9
|
+
declare function bridgeMcpLogging(server: McpServer): () => void;
|
|
10
|
+
//#endregion
|
|
11
|
+
export { bridgeMcpLogging };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{addLogListener as e}from"../../core/dist/index.js";const t={debug:`debug`,info:`info`,warn:`warning`,error:`error`};function n(n){return e(({level:e,component:r,message:i,data:a})=>{try{n.sendLoggingMessage({level:t[e],logger:r,data:a?{message:i,...a}:i})}catch{}})}export{n as bridgeMcpLogging};
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
|
|
3
|
+
//#region packages/server/src/output-schemas.d.ts
|
|
4
|
+
/**
|
|
5
|
+
* Centralized output schemas for MCP tools that return naturally structured data.
|
|
6
|
+
*
|
|
7
|
+
* Phase 1: status, list, health, measure, env, time
|
|
8
|
+
* These schemas are used with `outputSchema` in tool registration and validated
|
|
9
|
+
* by the SDK at runtime. Keep them intentionally flat and stable — do not leak
|
|
10
|
+
* internal storage record shapes.
|
|
11
|
+
*/
|
|
12
|
+
declare const StatusOutputSchema: z.ZodObject<{
|
|
13
|
+
totalRecords: z.ZodNumber;
|
|
14
|
+
totalFiles: z.ZodNumber;
|
|
15
|
+
lastIndexedAt: z.ZodNullable<z.ZodString>;
|
|
16
|
+
onboarded: z.ZodBoolean;
|
|
17
|
+
contentTypes: z.ZodRecord<z.ZodString, z.ZodNumber>;
|
|
18
|
+
wasmAvailable: z.ZodBoolean;
|
|
19
|
+
graphStats: z.ZodNullable<z.ZodObject<{
|
|
20
|
+
nodes: z.ZodNumber;
|
|
21
|
+
edges: z.ZodNumber;
|
|
22
|
+
}, z.core.$strip>>;
|
|
23
|
+
curatedCount: z.ZodNumber;
|
|
24
|
+
}, z.core.$strip>;
|
|
25
|
+
declare const ListOutputSchema: z.ZodObject<{
|
|
26
|
+
entries: z.ZodArray<z.ZodObject<{
|
|
27
|
+
path: z.ZodString;
|
|
28
|
+
title: z.ZodString;
|
|
29
|
+
category: z.ZodString;
|
|
30
|
+
tags: z.ZodArray<z.ZodString>;
|
|
31
|
+
version: z.ZodNumber;
|
|
32
|
+
preview: z.ZodString;
|
|
33
|
+
}, z.core.$strip>>;
|
|
34
|
+
totalCount: z.ZodNumber;
|
|
35
|
+
}, z.core.$strip>;
|
|
36
|
+
declare const HealthOutputSchema: z.ZodObject<{
|
|
37
|
+
ok: z.ZodBoolean;
|
|
38
|
+
checks: z.ZodArray<z.ZodObject<{
|
|
39
|
+
name: z.ZodString;
|
|
40
|
+
ok: z.ZodBoolean;
|
|
41
|
+
message: z.ZodOptional<z.ZodString>;
|
|
42
|
+
}, z.core.$strip>>;
|
|
43
|
+
}, z.core.$strip>;
|
|
44
|
+
declare const MeasureOutputSchema: z.ZodObject<{
|
|
45
|
+
summary: z.ZodObject<{
|
|
46
|
+
totalFiles: z.ZodNumber;
|
|
47
|
+
totalLines: z.ZodNumber;
|
|
48
|
+
totalCodeLines: z.ZodNumber;
|
|
49
|
+
totalFunctions: z.ZodNumber;
|
|
50
|
+
avgComplexity: z.ZodNumber;
|
|
51
|
+
maxComplexity: z.ZodObject<{
|
|
52
|
+
value: z.ZodNumber;
|
|
53
|
+
file: z.ZodString;
|
|
54
|
+
}, z.core.$strip>;
|
|
55
|
+
}, z.core.$strip>;
|
|
56
|
+
files: z.ZodArray<z.ZodObject<{
|
|
57
|
+
path: z.ZodString;
|
|
58
|
+
lines: z.ZodNumber;
|
|
59
|
+
code: z.ZodNumber;
|
|
60
|
+
complexity: z.ZodNumber;
|
|
61
|
+
functions: z.ZodNumber;
|
|
62
|
+
}, z.core.$strip>>;
|
|
63
|
+
}, z.core.$strip>;
|
|
64
|
+
declare const EnvOutputSchema: z.ZodObject<{
|
|
65
|
+
platform: z.ZodString;
|
|
66
|
+
arch: z.ZodString;
|
|
67
|
+
nodeVersion: z.ZodString;
|
|
68
|
+
cwd: z.ZodString;
|
|
69
|
+
cpus: z.ZodNumber;
|
|
70
|
+
memoryFreeGb: z.ZodNumber;
|
|
71
|
+
memoryTotalGb: z.ZodNumber;
|
|
72
|
+
}, z.core.$strip>;
|
|
73
|
+
declare const TimeOutputSchema: z.ZodObject<{
|
|
74
|
+
iso: z.ZodString;
|
|
75
|
+
unix: z.ZodNumber;
|
|
76
|
+
timezone: z.ZodString;
|
|
77
|
+
formatted: z.ZodString;
|
|
78
|
+
}, z.core.$strip>;
|
|
79
|
+
declare const CheckOutputSchema: z.ZodObject<{
|
|
80
|
+
passed: z.ZodBoolean;
|
|
81
|
+
tsc: z.ZodObject<{
|
|
82
|
+
passed: z.ZodBoolean;
|
|
83
|
+
errorCount: z.ZodNumber;
|
|
84
|
+
warningCount: z.ZodNumber;
|
|
85
|
+
topErrors: z.ZodArray<z.ZodString>;
|
|
86
|
+
}, z.core.$strip>;
|
|
87
|
+
biome: z.ZodObject<{
|
|
88
|
+
passed: z.ZodBoolean;
|
|
89
|
+
errorCount: z.ZodNumber;
|
|
90
|
+
warningCount: z.ZodNumber;
|
|
91
|
+
topErrors: z.ZodArray<z.ZodString>;
|
|
92
|
+
}, z.core.$strip>;
|
|
93
|
+
}, z.core.$strip>;
|
|
94
|
+
declare const SymbolOutputSchema: z.ZodObject<{
|
|
95
|
+
name: z.ZodString;
|
|
96
|
+
definedIn: z.ZodNullable<z.ZodObject<{
|
|
97
|
+
path: z.ZodString;
|
|
98
|
+
line: z.ZodNumber;
|
|
99
|
+
kind: z.ZodString;
|
|
100
|
+
signature: z.ZodOptional<z.ZodString>;
|
|
101
|
+
}, z.core.$strip>>;
|
|
102
|
+
importedBy: z.ZodArray<z.ZodObject<{
|
|
103
|
+
path: z.ZodString;
|
|
104
|
+
line: z.ZodNumber;
|
|
105
|
+
importStatement: z.ZodString;
|
|
106
|
+
}, z.core.$strip>>;
|
|
107
|
+
referencedIn: z.ZodArray<z.ZodObject<{
|
|
108
|
+
path: z.ZodString;
|
|
109
|
+
line: z.ZodNumber;
|
|
110
|
+
context: z.ZodString;
|
|
111
|
+
scope: z.ZodOptional<z.ZodString>;
|
|
112
|
+
}, z.core.$strip>>;
|
|
113
|
+
graphContext: z.ZodNullable<z.ZodObject<{
|
|
114
|
+
definingModule: z.ZodOptional<z.ZodString>;
|
|
115
|
+
importedByModules: z.ZodArray<z.ZodString>;
|
|
116
|
+
siblingSymbols: z.ZodArray<z.ZodString>;
|
|
117
|
+
}, z.core.$strip>>;
|
|
118
|
+
}, z.core.$strip>;
|
|
119
|
+
//#endregion
|
|
120
|
+
export { CheckOutputSchema, EnvOutputSchema, HealthOutputSchema, ListOutputSchema, MeasureOutputSchema, StatusOutputSchema, SymbolOutputSchema, TimeOutputSchema };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{z as e}from"zod";const t=e.object({totalRecords:e.number(),totalFiles:e.number(),lastIndexedAt:e.string().nullable(),onboarded:e.boolean(),contentTypes:e.record(e.string(),e.number()),wasmAvailable:e.boolean(),graphStats:e.object({nodes:e.number(),edges:e.number()}).nullable(),curatedCount:e.number()}),n=e.object({entries:e.array(e.object({path:e.string(),title:e.string(),category:e.string(),tags:e.array(e.string()),version:e.number(),preview:e.string()})),totalCount:e.number()}),r=e.object({ok:e.boolean(),checks:e.array(e.object({name:e.string(),ok:e.boolean(),message:e.string().optional()}))}),i=e.object({summary:e.object({totalFiles:e.number(),totalLines:e.number(),totalCodeLines:e.number(),totalFunctions:e.number(),avgComplexity:e.number(),maxComplexity:e.object({value:e.number(),file:e.string()})}),files:e.array(e.object({path:e.string(),lines:e.number(),code:e.number(),complexity:e.number(),functions:e.number()}))}),a=e.object({platform:e.string(),arch:e.string(),nodeVersion:e.string(),cwd:e.string(),cpus:e.number(),memoryFreeGb:e.number(),memoryTotalGb:e.number()}),o=e.object({iso:e.string(),unix:e.number(),timezone:e.string(),formatted:e.string()}),s=e.object({passed:e.boolean(),errorCount:e.number(),warningCount:e.number(),topErrors:e.array(e.string())}),c=e.object({passed:e.boolean(),tsc:s,biome:s}),l=e.object({name:e.string(),definedIn:e.object({path:e.string(),line:e.number(),kind:e.string(),signature:e.string().optional()}).nullable(),importedBy:e.array(e.object({path:e.string(),line:e.number(),importStatement:e.string()})),referencedIn:e.array(e.object({path:e.string(),line:e.number(),context:e.string(),scope:e.string().optional()})),graphContext:e.object({definingModule:e.string().optional(),importedByModules:e.array(e.string()),siblingSymbols:e.array(e.string())}).nullable()});export{c as CheckOutputSchema,a as EnvOutputSchema,r as HealthOutputSchema,n as ListOutputSchema,i as MeasureOutputSchema,t as StatusOutputSchema,l as SymbolOutputSchema,o as TimeOutputSchema};
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import{z as e}from"zod";function t(t){t.registerPrompt(`onboard`,{title:`Onboard Codebase`,description:`Analyze the codebase for first-time onboarding — runs all analyzers and produces a knowledge summary`,argsSchema:{path:e.string().optional().describe(`Path to analyze (default: workspace root)`)}},async({path:e})=>({messages:[{role:`user`,content:{type:`text`,text:[`Run the full onboarding workflow for "${e??`.`}"`,``,`1. \`onboard({ path: "${e??`.`}" })\` — full codebase analysis`,`2. \`produce_knowledge({ path: "${e??`.`}" })\` — generate synthesis`,"3. `remember` key findings as curated entries","4. `status` to verify index health"].join(`
|
|
2
|
+
`)}}]})),t.registerPrompt(`sessionStart`,{title:`Start KB Session`,description:`Initialize a KB session — check status, list knowledge, and resume from last checkpoint`},async()=>({messages:[{role:`user`,content:{type:`text`,text:[`Run the session start protocol:`,``,"1. `status({})` — check KB health and onboard state","2. `list()` — see stored knowledge entries",'3. `search({ query: "SESSION CHECKPOINT", origin: "curated" })` — resume prior work'].join(`
|
|
3
|
+
`)}}]})),t.registerPrompt(`sessionEnd`,{title:`End KB Session`,description:`Persist decisions and create a session checkpoint before ending`,argsSchema:{summary:e.string().describe(`Brief summary of decisions made, blockers encountered, and next steps`)}},async({summary:e})=>({messages:[{role:`user`,content:{type:`text`,text:[`Run the session end protocol:`,``,'1. `remember({ title: "Session checkpoint: '+e.slice(0,60)+`...", content: "`+e.replace(/"/g,`\\"`)+'", category: "conventions" })` — persist findings',"2. `reindex({})` — refresh search index if files changed",`3. Confirm session data saved`].join(`
|
|
4
|
+
`)}}]})),t.registerPrompt(`search`,{title:`Search Knowledge Base`,description:`Search the knowledge base with hybrid semantic + keyword search`,argsSchema:{query:e.string().describe(`Search query`)}},async({query:e})=>({messages:[{role:`user`,content:{type:`text`,text:`Search the knowledge base for: "${e}"\n\nUse \`search({ query: "${e.replace(/"/g,`\\"`)}" })\` to find relevant code, documentation, and curated knowledge.`}}]})),t.registerPrompt(`remember`,{title:`Remember Knowledge`,description:`Store a decision, convention, or finding as curated knowledge`,argsSchema:{title:e.string().describe(`Title of the knowledge entry`),content:e.string().describe(`Content to remember`),category:e.enum([`conventions`,`decisions`,`patterns`,`blockers`,`tasks`]).optional().describe(`Category (default: conventions)`)}},async({title:e,content:t,category:n})=>({messages:[{role:`user`,content:{type:`text`,text:`Store this knowledge:\n\n\`remember({ title: "${e.replace(/"/g,`\\"`)}", content: "${t.replace(/"/g,`\\"`).slice(0,200)}...", category: "${n??`conventions`}" })\``}}]})),t.registerPrompt(`planTask`,{title:`Plan a Task`,description:`Generate a reading plan and scope map for a development task`,argsSchema:{task:e.string().describe(`Description of the task to plan`)}},async({task:e})=>({messages:[{role:`user`,content:{type:`text`,text:[`Plan implementation for: "${e}"`,``,'1. `search({ query: "'+e.replace(/"/g,`\\"`)+'" })` — find related code and prior decisions','2. `scope_map({ task: "'+e.replace(/"/g,`\\"`)+'" })` — generate a reading plan',"3. For each recommended file, use `file_summary` then `compact` for detail","4. `blast_radius` on planned changes to assess impact"].join(`
|
|
5
|
+
`)}}]})),t.registerPrompt(`investigate`,{title:`Investigate Bug`,description:`Bug investigation workflow — parse error, find symbols, trace data flow, assess impact`,argsSchema:{error:e.string().describe(`Error message, stack trace, or bug description`)}},async({error:e})=>({messages:[{role:`user`,content:{type:`text`,text:[`Investigate this error:`,"```",e,"```",``,`Follow the bug investigation workflow:`,"1. `parse_output({ text: <error> })` — extract structured data from the error","2. `symbol({ name: <relevant symbol> })` — find definition and references",'3. `trace({ symbol: <symbol>, direction: "backward" })` — trace data flow to find root cause',"4. `blast_radius({ changed_files: [<affected files>] })` — assess fix impact"].join(`
|
|
6
|
+
`)}}]}))}export{t as registerPrompts};
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
//#region packages/server/src/resource-links.d.ts
|
|
2
|
+
/**
|
|
3
|
+
* Helpers for emitting resource_link content blocks in tool results.
|
|
4
|
+
* Centralises the `kb://curated/{path}` URI scheme so all tools stay consistent.
|
|
5
|
+
*/
|
|
6
|
+
/** Shape of a resource_link content block (matches MCP SDK ResourceLink). */
|
|
7
|
+
interface ResourceLinkBlock {
|
|
8
|
+
type: 'resource_link';
|
|
9
|
+
uri: string;
|
|
10
|
+
name: string;
|
|
11
|
+
description?: string;
|
|
12
|
+
mimeType?: string;
|
|
13
|
+
}
|
|
14
|
+
/**
|
|
15
|
+
* Build a resource_link content block for a single curated knowledge entry.
|
|
16
|
+
* Returns `undefined` for invalid paths so callers can safely filter.
|
|
17
|
+
*/
|
|
18
|
+
declare function curatedResourceLink(path: string | undefined | null, name?: string, description?: string): ResourceLinkBlock | undefined;
|
|
19
|
+
/**
|
|
20
|
+
* Build resource_link blocks for multiple curated entries.
|
|
21
|
+
* Deduplicates by URI and filters out invalid paths.
|
|
22
|
+
*/
|
|
23
|
+
declare function curatedResourceLinks(entries: ReadonlyArray<{
|
|
24
|
+
path: string;
|
|
25
|
+
title?: string;
|
|
26
|
+
category?: string;
|
|
27
|
+
}>): ResourceLinkBlock[];
|
|
28
|
+
/**
|
|
29
|
+
* Extract the curated path from a store `sourcePath`.
|
|
30
|
+
* Returns `undefined` for non-curated records.
|
|
31
|
+
*/
|
|
32
|
+
declare function extractCuratedPath(sourcePath: string): string | undefined;
|
|
33
|
+
//#endregion
|
|
34
|
+
export { ResourceLinkBlock, curatedResourceLink, curatedResourceLinks, extractCuratedPath };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
function e(e,t,n){if(!(!e||typeof e!=`string`)&&!(e.includes(`..`)||e.startsWith(`/`)||e.startsWith(`[`)))return{type:`resource_link`,uri:`kb://curated/${e}`,name:t??e,mimeType:`text/markdown`,...n?{description:n}:{}}}function t(t){let n=new Set,r=[];for(let i of t){let t=e(i.path,i.title,i.category?`[${i.category}]`:void 0);t&&!n.has(t.uri)&&(n.add(t.uri),r.push(t))}return r}function n(e){if(e.startsWith(`.ai/curated/`))return e.slice(12)}export{e as curatedResourceLink,t as curatedResourceLinks,n as extractCuratedPath};
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { CuratedKnowledgeManager } from "../curated-manager.js";
|
|
2
|
+
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
3
|
+
|
|
4
|
+
//#region packages/server/src/resources/curated-resources.d.ts
|
|
5
|
+
/**
|
|
6
|
+
* Register curated knowledge as browsable MCP resources.
|
|
7
|
+
*
|
|
8
|
+
* - `kb://curated` — index listing all curated entries
|
|
9
|
+
* - `kb://curated/{path}` — individual entry by path (e.g. `decisions/use-lancedb.md`)
|
|
10
|
+
*/
|
|
11
|
+
declare function registerCuratedResources(server: McpServer, curated: CuratedKnowledgeManager): void;
|
|
12
|
+
//#endregion
|
|
13
|
+
export { registerCuratedResources };
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
import{ResourceTemplate as e}from"@modelcontextprotocol/sdk/server/mcp.js";function t(t,n){t.resource(`kb-curated-index`,`kb://curated`,{description:`Index of all curated knowledge entries`,mimeType:`text/markdown`},async()=>{let e=(await n.list()).map(e=>`- [${e.title}](kb://curated/${e.path}) — ${e.category}`);return{contents:[{uri:`kb://curated`,text:`# Curated Knowledge Index\n\n${e.length>0?e.join(`
|
|
2
|
+
`):`_No curated entries yet._`}`,mimeType:`text/markdown`}]}});let r=new e(`kb://curated/{path}`,{list:async()=>({resources:(await n.list()).map(e=>({uri:`kb://curated/${e.path}`,name:e.title,description:`[${e.category}] ${e.contentPreview?.slice(0,80)??``}`,mimeType:`text/markdown`}))})});t.resource(`kb-curated-entry`,r,{description:`A curated knowledge entry`,mimeType:`text/markdown`},async(e,t)=>{let r=t.path;if(!r)throw Error(`Missing path variable in curated resource URI`);let i=await n.read(r);return{contents:[{uri:e.toString(),text:`---\ntitle: ${i.title}\ncategory: ${i.category}\ntags: ${i.tags?.join(`, `)??``}\nversion: ${i.version??1}\n---\n\n${i.content??i.contentPreview??``}`,mimeType:`text/markdown`}]}})}export{t as registerCuratedResources};
|
|
@@ -1,7 +1,8 @@
|
|
|
1
|
+
import { CuratedKnowledgeManager } from "../curated-manager.js";
|
|
1
2
|
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
2
3
|
import { IKnowledgeStore } from "@kb/store";
|
|
3
4
|
|
|
4
5
|
//#region packages/server/src/resources/resources.d.ts
|
|
5
|
-
declare function registerResources(server: McpServer, store: IKnowledgeStore): void;
|
|
6
|
+
declare function registerResources(server: McpServer, store: IKnowledgeStore, curated: CuratedKnowledgeManager): void;
|
|
6
7
|
//#endregion
|
|
7
8
|
export { registerResources };
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
function
|
|
2
|
-
`),mimeType:`text/plain`}]}))}export{
|
|
1
|
+
import{registerCuratedResources as e}from"./curated-resources.js";function t(t,n,r){t.resource(`kb-status`,`kb://status`,{description:`Current knowledge base status and statistics`,mimeType:`text/plain`},async()=>{let e=await n.getStats();return{contents:[{uri:`kb://status`,text:`Knowledge Base: ${e.totalRecords} records from ${e.totalFiles} files. Last indexed: ${e.lastIndexedAt??`Never`}`,mimeType:`text/plain`}]}}),t.resource(`kb-file-tree`,`kb://file-tree`,{description:`List of all indexed source files`,mimeType:`text/plain`},async()=>({contents:[{uri:`kb://file-tree`,text:(await n.listSourcePaths()).sort().join(`
|
|
2
|
+
`),mimeType:`text/plain`}]})),e(t,r)}export{t as registerResources};
|
|
@@ -33,7 +33,7 @@ declare function createServer(config: KBConfig): Promise<{
|
|
|
33
33
|
runInitialIndex: () => Promise<void>;
|
|
34
34
|
shutdown: () => Promise<void>;
|
|
35
35
|
}>;
|
|
36
|
-
declare const ALL_TOOL_NAMES: readonly ["analyze_dependencies", "analyze_diagram", "analyze_entry_points", "analyze_patterns", "analyze_structure", "analyze_symbols", "audit", "batch", "blast_radius", "changelog", "check", "checkpoint", "codemod", "compact", "data_transform", "dead_symbols", "delegate", "diff_parse", "digest", "encode", "env", "eval", "evidence_map", "file_summary", "find", "forge_classify", "forge_ground", "forget", "git_context", "graph", "guide", "health", "http", "lane", "list", "lookup", "measure", "onboard", "parse_output", "process", "produce_knowledge", "queue", "read", "regex_test", "reindex", "remember", "rename", "replay", "schema_validate", "scope_map", "search", "snippet", "stash", "status", "stratum_card", "symbol", "test_run", "time", "trace", "update", "watch", "web_fetch", "web_search", "workset"];
|
|
36
|
+
declare const ALL_TOOL_NAMES: readonly ["analyze_dependencies", "analyze_diagram", "analyze_entry_points", "analyze_patterns", "analyze_structure", "analyze_symbols", "audit", "batch", "blast_radius", "changelog", "check", "checkpoint", "codemod", "compact", "data_transform", "dead_symbols", "delegate", "diff_parse", "digest", "encode", "env", "eval", "evidence_map", "file_summary", "find", "forge_classify", "forge_ground", "forget", "git_context", "graph", "guide", "health", "http", "lane", "list", "lookup", "measure", "onboard", "parse_output", "process", "produce_knowledge", "queue", "read", "regex_test", "reindex", "remember", "rename", "replay", "restore", "schema_validate", "scope_map", "search", "snippet", "stash", "status", "stratum_card", "symbol", "test_run", "time", "trace", "update", "watch", "web_fetch", "web_search", "workset"];
|
|
37
37
|
declare function createLazyServer(config: KBConfig): {
|
|
38
38
|
server: McpServer; /** Call after MCP roots are resolved (or fallback decided) to start heavy init. */
|
|
39
39
|
startInit: () => void;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{CuratedKnowledgeManager as e}from"./curated-manager.js";import{installReplayInterceptor as t}from"./replay-interceptor.js";import{registerResources as n}from"./resources/resources.js";import{registerAnalyzeDependenciesTool as r,registerAnalyzeDiagramTool as i,registerAnalyzeEntryPointsTool as a,registerAnalyzePatternsTool as o,registerAnalyzeStructureTool as s,registerAnalyzeSymbolsTool as c,registerBlastRadiusTool as l}from"./tools/analyze.tools.js";import{registerAuditTool as u}from"./tools/audit.tool.js";import{initBridgeComponents as d,registerErPullTool as f,registerErPushTool as p,registerErSyncStatusTool as m}from"./tools/bridge.tools.js";import{registerErEvolveReviewTool as h}from"./tools/evolution.tools.js";import{registerDigestTool as ee,registerEvidenceMapTool as te,registerForgeClassifyTool as ne,registerForgeGroundTool as re,registerStratumCardTool as ie}from"./tools/forge.tools.js";import{registerForgetTool as ae}from"./tools/forget.tool.js";import{registerGraphTool as oe}from"./tools/graph.tool.js";import{registerListTool as se}from"./tools/list.tool.js";import{registerLookupTool as ce}from"./tools/lookup.tool.js";import{registerOnboardTool as le}from"./tools/onboard.tool.js";import{registerErUpdatePolicyTool as ue}from"./tools/policy.tools.js";import{registerProduceKnowledgeTool as g}from"./tools/produce.tool.js";import{registerReadTool as _}from"./tools/read.tool.js";import{registerReindexTool as v}from"./tools/reindex.tool.js";import{registerRememberTool as y}from"./tools/remember.tool.js";import{registerReplayTool as b}from"./tools/replay.tool.js";import{registerSearchTool as x}from"./tools/search.tool.js";import{registerStatusTool as S}from"./tools/status.tool.js";import{registerBatchTool as C,registerCheckTool as w,registerCheckpointTool as T,registerCodemodTool as E,registerCompactTool as D,registerDataTransformTool as O,registerDeadSymbolsTool as k,registerDelegateTool as A,registerDiffParseTool as j,registerEvalTool as M,registerFileSummaryTool as N,registerFindTool as P,registerGitContextTool as F,registerGuideTool as I,registerHealthTool as L,registerLaneTool as R,registerParseOutputTool as z,registerProcessTool as B,registerQueueTool as V,registerRenameTool as H,registerScopeMapTool as U,registerStashTool as de,registerSymbolTool as fe,registerTestRunTool as pe,registerTraceTool as me,registerWatchTool as he,registerWebFetchTool as ge,registerWorksetTool as _e}from"./tools/toolkit.tools.js";import{registerUpdateTool as ve}from"./tools/update.tool.js";import{registerChangelogTool as ye,registerEncodeTool as be,registerEnvTool as xe,registerHttpTool as Se,registerMeasureTool as Ce,registerRegexTestTool as we,registerSchemaValidateTool as Te,registerSnippetTool as Ee,registerTimeTool as De,registerWebSearchTool as Oe}from"./tools/utility.tools.js";import{getCurrentVersion as W}from"./version-check.js";import{existsSync as ke,statSync as Ae}from"node:fs";import{resolve as je}from"node:path";import{KB_PATHS as Me,createLogger as Ne,serializeError as G}from"../../core/dist/index.js";import{initializeWasm as Pe}from"../../chunker/dist/index.js";import{OnnxEmbedder as Fe}from"../../embeddings/dist/index.js";import{EvolutionCollector as Ie,PolicyStore as Le}from"../../enterprise-bridge/dist/index.js";import{FileHashCache as Re,IncrementalIndexer as ze}from"../../indexer/dist/index.js";import{SqliteGraphStore as Be,createStore as Ve}from"../../store/dist/index.js";import{FileCache as K}from"../../tools/dist/index.js";import{McpServer as q}from"@modelcontextprotocol/sdk/server/mcp.js";const J=Ne(`server`);async function Y(t){J.info(`Initializing knowledge base components`);let n=new Fe({model:t.embedding.model,dimensions:t.embedding.dimensions});await n.initialize(),J.info(`Embedder loaded`,{modelId:n.modelId,dimensions:n.dimensions});let r=await Ve({backend:t.store.backend,path:t.store.path});await r.initialize(),J.info(`Store initialized`);let i=new ze(n,r),a=new Re(t.store.path);a.load(),i.setHashCache(a);let o=t.curated.path,s=new e(o,r,n),c=new Be({path:t.store.path});await c.initialize(),J.info(`Graph store initialized`),i.setGraphStore(c),await Pe()?J.info(`WASM tree-sitter enabled for AST analysis`):J.warn(`WASM tree-sitter not available; analyzers will use regex fallback`);let l=d(t.er),u=l?new Le(t.curated.path):void 0;u&&J.info(`Policy store initialized`,{ruleCount:u.getRules().length});let f=l?new Ie:void 0,p=je(t.sources[0]?.path??process.cwd(),Me.aiKb),m=ke(p),h;if(m)try{h=Ae(p).mtime.toISOString()}catch{}return J.info(`Onboard state detected`,{onboardComplete:m,onboardTimestamp:h}),{embedder:n,store:r,indexer:i,curated:s,graphStore:c,fileCache:new K,bridge:l,policyStore:u,evolutionCollector:f,onboardComplete:m,onboardTimestamp:h}}function X(e,t){let n=new q({name:t.serverName??`knowledge-base`,version:W()});return Z(n,e,t),n}function Z(e,d,W){t(e),x(e,d.embedder,d.store,d.graphStore,d.bridge,d.evolutionCollector),ce(e,d.store),S(e,d.store,d.graphStore,d.curated,{onboardComplete:d.onboardComplete,onboardTimestamp:d.onboardTimestamp}),v(e,d.indexer,W,d.curated,d.store),y(e,d.curated,d.policyStore,d.evolutionCollector),ve(e,d.curated),ae(e,d.curated),_(e,d.curated),se(e,d.curated),s(e,d.store,d.embedder),r(e,d.store,d.embedder),c(e,d.store,d.embedder),o(e,d.store,d.embedder),a(e,d.store,d.embedder),i(e,d.store,d.embedder),l(e,d.store,d.embedder,d.graphStore),g(e),le(e,d.store,d.embedder),oe(e,d.graphStore),u(e,d.store,d.embedder),D(e,d.embedder,d.fileCache),U(e,d.embedder,d.store),P(e,d.embedder,d.store),z(e),_e(e),w(e),C(e,d.embedder,d.store),fe(e,d.embedder,d.store,d.graphStore),M(e),pe(e),de(e),F(e),j(e),H(e),E(e),N(e,d.fileCache),T(e),O(e),me(e,d.embedder,d.store),B(e),he(e),k(e,d.embedder,d.store),A(e),L(e),R(e),V(e),ge(e),I(e),te(e),ee(e,d.embedder),ne(e),ie(e,d.embedder,d.fileCache),re(e,d.embedder,d.store),Oe(e),Se(e),we(e),be(e),Ce(e),ye(e),Te(e),Ee(e),xe(e),De(e),d.bridge&&(p(e,d.bridge,d.evolutionCollector),f(e,d.bridge),m(e,d.bridge)),d.policyStore&&ue(e,d.policyStore),d.evolutionCollector&&h(e,d.evolutionCollector),n(e,d.store),b(e)}async function He(e){let t=await Y(e),n=X(t,e);J.info(`MCP server configured`,{toolCount:$.length,resourceCount:2});let r=async()=>{try{let n=e.sources.map(e=>e.path).join(`, `);J.info(`Running initial index`,{sourcePaths:n});let r=await t.indexer.index(e,e=>{e.phase===`crawling`||e.phase===`done`||(e.phase===`chunking`&&e.currentFile&&J.debug(`Indexing file`,{current:e.filesProcessed+1,total:e.filesTotal,file:e.currentFile}),e.phase===`cleanup`&&J.debug(`Index cleanup`,{staleEntries:e.filesTotal-e.filesProcessed}))});J.info(`Initial index complete`,{filesProcessed:r.filesProcessed,filesSkipped:r.filesSkipped,chunksCreated:r.chunksCreated,durationMs:r.durationMs});try{await t.store.createFtsIndex()}catch(e){J.warn(`FTS index creation failed`,G(e))}try{let e=await t.curated.reindexAll();J.info(`Curated re-index complete`,{indexed:e.indexed})}catch(e){J.error(`Curated re-index failed`,G(e))}}catch(e){J.error(`Initial index failed; will retry on kb_reindex`,G(e))}},i=async()=>{J.info(`Shutting down`),await t.embedder.shutdown().catch(()=>{}),await t.graphStore.close().catch(()=>{}),await t.store.close(),process.exit(0)};process.on(`SIGINT`,i),process.on(`SIGTERM`,i);let a=process.ppid,o=setInterval(()=>{try{process.kill(a,0)}catch{J.info(`Parent process died; shutting down`,{parentPid:a}),clearInterval(o),i()}},5e3);return o.unref(),{server:n,runInitialIndex:r,shutdown:i}}const Ue=new Set(`batch.changelog.check.checkpoint.codemod.compact.data_transform.delegate.diff_parse.digest.encode.env.eval.evidence_map.file_summary.forge_classify.git_context.graph.guide.health.http.lane.measure.onboard.parse_output.process.queue.read.regex_test.reindex.remember.rename.replay.schema_validate.scope_map.snippet.stash.status.stratum_card.test_run.time.update.forget.list.watch.web_fetch.web_search.workset`.split(`.`)),Q=5e3,$=`analyze_dependencies.analyze_diagram.analyze_entry_points.analyze_patterns.analyze_structure.analyze_symbols.audit.batch.blast_radius.changelog.check.checkpoint.codemod.compact.data_transform.dead_symbols.delegate.diff_parse.digest.encode.env.eval.evidence_map.file_summary.find.forge_classify.forge_ground.forget.git_context.graph.guide.health.http.lane.list.lookup.measure.onboard.parse_output.process.produce_knowledge.queue.read.regex_test.reindex.remember.rename.replay.schema_validate.scope_map.search.snippet.stash.status.stratum_card.symbol.test_run.time.trace.update.watch.web_fetch.web_search.workset`.split(`.`);function We(e){let t=new q({name:e.serverName??`knowledge-base`,version:W()}),n=t.sendToolListChanged.bind(t);t.sendToolListChanged=()=>{};let r=$.map(e=>t.registerTool(e,{description:`${e} (initializing...)`,inputSchema:{}},async()=>({content:[{type:`text`,text:`KB is still initializing, please retry in a few seconds.`}]})));t.sendToolListChanged=n;let i=t.resource(`kb-status`,`kb://status`,{description:`Knowledge base status (initializing...)`,mimeType:`text/plain`},async()=>({contents:[{uri:`kb://status`,text:`KB is initializing...`,mimeType:`text/plain`}]})),a,o=new Promise(e=>{a=e}),s,c=new Promise(e=>{s=e}),l=()=>s?.(),u=(async()=>{await c;let n=await Y(e),o=t.sendToolListChanged.bind(t);t.sendToolListChanged=()=>{};for(let e of r)e.remove();i.remove(),Z(t,n,e),t.sendToolListChanged=o,t.sendToolListChanged();let s=t._registeredTools??{};for(let[e,t]of Object.entries(s)){if(Ue.has(e))continue;let r=t.handler;t.handler=async(...t)=>{if(!n.indexer.isIndexing)return r(...t);let i=new Promise(t=>setTimeout(()=>t({content:[{type:`text`,text:`⏳ KB is re-indexing. The tool "${e}" timed out waiting for index data (${Q/1e3}s).\n\nThe existing index may be temporarily locked. Please retry shortly — indexing will complete automatically.`}]}),Q));return Promise.race([r(...t),i])}}let l=Object.keys(s).length;l!==$.length&&J.warn(`ALL_TOOL_NAMES count mismatch`,{expectedToolCount:$.length,registeredToolCount:l}),J.info(`MCP server configured`,{toolCount:$.length,resourceCount:2}),a?.(n)})(),d=async()=>{let t=await o;try{let n=e.sources.map(e=>e.path).join(`, `);J.info(`Running initial index`,{sourcePaths:n});let r=await t.indexer.index(e,e=>{e.phase===`crawling`||e.phase===`done`||(e.phase===`chunking`&&e.currentFile&&J.debug(`Indexing file`,{current:e.filesProcessed+1,total:e.filesTotal,file:e.currentFile}),e.phase===`cleanup`&&J.debug(`Index cleanup`,{staleEntries:e.filesTotal-e.filesProcessed}))});J.info(`Initial index complete`,{filesProcessed:r.filesProcessed,filesSkipped:r.filesSkipped,chunksCreated:r.chunksCreated,durationMs:r.durationMs});try{await t.store.createFtsIndex()}catch(e){J.warn(`FTS index creation failed`,G(e))}try{let e=await t.curated.reindexAll();J.info(`Curated re-index complete`,{indexed:e.indexed})}catch(e){J.error(`Curated re-index failed`,G(e))}}catch(e){J.error(`Initial index failed; will retry on kb_reindex`,G(e))}},f=process.ppid,p=setInterval(()=>{try{process.kill(f,0)}catch{J.info(`Parent process died; shutting down`,{parentPid:f}),clearInterval(p),o.then(async e=>{await e.embedder.shutdown().catch(()=>{}),await e.graphStore.close().catch(()=>{}),await e.store.close().catch(()=>{})}).catch(()=>{}).finally(()=>process.exit(0))}},5e3);return p.unref(),{server:t,startInit:l,ready:u,runInitialIndex:d}}export{$ as ALL_TOOL_NAMES,We as createLazyServer,X as createMcpServer,He as createServer,Y as initializeKnowledgeBase,Z as registerMcpTools};
|
|
1
|
+
import{CuratedKnowledgeManager as e}from"./curated-manager.js";import{bridgeMcpLogging as t}from"./mcp-logging.js";import{registerPrompts as n}from"./prompts.js";import{installReplayInterceptor as r}from"./replay-interceptor.js";import{registerResources as i}from"./resources/resources.js";import{getToolMeta as a}from"./tool-metadata.js";import{registerAnalyzeDependenciesTool as o,registerAnalyzeDiagramTool as s,registerAnalyzeEntryPointsTool as c,registerAnalyzePatternsTool as l,registerAnalyzeStructureTool as u,registerAnalyzeSymbolsTool as d,registerBlastRadiusTool as f}from"./tools/analyze.tools.js";import{registerAuditTool as p}from"./tools/audit.tool.js";import{initBridgeComponents as m,registerErPullTool as h,registerErPushTool as g,registerErSyncStatusTool as ee}from"./tools/bridge.tools.js";import{registerCompactTool as te,registerDeadSymbolsTool as ne,registerFileSummaryTool as re,registerFindTool as ie,registerScopeMapTool as ae,registerSymbolTool as oe,registerTraceTool as se}from"./tools/context.tools.js";import{registerErEvolveReviewTool as ce}from"./tools/evolution.tools.js";import{registerBatchTool as le,registerCheckTool as ue,registerDelegateTool as _,registerEvalTool as v,registerParseOutputTool as y,registerTestRunTool as b}from"./tools/execution.tools.js";import{registerDigestTool as x,registerEvidenceMapTool as S,registerForgeClassifyTool as C,registerForgeGroundTool as w,registerStratumCardTool as T}from"./tools/forge.tools.js";import{registerForgetTool as E}from"./tools/forget.tool.js";import{registerGraphTool as D}from"./tools/graph.tool.js";import{registerGuideTool as O,registerHealthTool as k,registerProcessTool as A,registerWatchTool as j,registerWebFetchTool as M}from"./tools/infra.tools.js";import{registerListTool as N}from"./tools/list.tool.js";import{registerLookupTool as P}from"./tools/lookup.tool.js";import{registerCodemodTool as F,registerDataTransformTool as I,registerDiffParseTool as L,registerGitContextTool as R,registerRenameTool as z}from"./tools/manipulation.tools.js";import{registerOnboardTool as B}from"./tools/onboard.tool.js";import{registerCheckpointTool as V,registerLaneTool as H,registerQueueTool as U,registerStashTool as de,registerWorksetTool as fe}from"./tools/persistence.tools.js";import{registerErUpdatePolicyTool as pe}from"./tools/policy.tools.js";import{registerProduceKnowledgeTool as me}from"./tools/produce.tool.js";import{registerReadTool as he}from"./tools/read.tool.js";import{registerReindexTool as ge}from"./tools/reindex.tool.js";import{registerRememberTool as _e}from"./tools/remember.tool.js";import{registerReplayTool as ve}from"./tools/replay.tool.js";import{registerRestoreTool as ye}from"./tools/restore.tool.js";import{registerSearchTool as be}from"./tools/search.tool.js";import{registerStatusTool as xe}from"./tools/status.tool.js";import{registerUpdateTool as Se}from"./tools/update.tool.js";import{registerChangelogTool as Ce,registerEncodeTool as we,registerEnvTool as Te,registerHttpTool as Ee,registerMeasureTool as De,registerRegexTestTool as Oe,registerSchemaValidateTool as ke,registerSnippetTool as Ae,registerTimeTool as je,registerWebSearchTool as Me}from"./tools/utility.tools.js";import{getCurrentVersion as W}from"./version-check.js";import{existsSync as Ne,statSync as Pe}from"node:fs";import{resolve as Fe}from"node:path";import{KB_PATHS as Ie,createLogger as Le,serializeError as G}from"../../core/dist/index.js";import{initializeWasm as Re}from"../../chunker/dist/index.js";import{OnnxEmbedder as ze}from"../../embeddings/dist/index.js";import{EvolutionCollector as Be,PolicyStore as Ve}from"../../enterprise-bridge/dist/index.js";import{FileHashCache as K,IncrementalIndexer as He}from"../../indexer/dist/index.js";import{SqliteGraphStore as Ue,createStore as We}from"../../store/dist/index.js";import{FileCache as Ge}from"../../tools/dist/index.js";import{McpServer as q}from"@modelcontextprotocol/sdk/server/mcp.js";const J=Le(`server`);async function Y(t){J.info(`Initializing knowledge base components`);let n=new ze({model:t.embedding.model,dimensions:t.embedding.dimensions});await n.initialize(),J.info(`Embedder loaded`,{modelId:n.modelId,dimensions:n.dimensions});let r=await We({backend:t.store.backend,path:t.store.path});await r.initialize(),J.info(`Store initialized`);let i=new He(n,r),a=new K(t.store.path);a.load(),i.setHashCache(a);let o=t.curated.path,s=new e(o,r,n),c=new Ue({path:t.store.path});await c.initialize(),J.info(`Graph store initialized`),i.setGraphStore(c),await Re()?J.info(`WASM tree-sitter enabled for AST analysis`):J.warn(`WASM tree-sitter not available; analyzers will use regex fallback`);let l=m(t.er),u=l?new Ve(t.curated.path):void 0;u&&J.info(`Policy store initialized`,{ruleCount:u.getRules().length});let d=l?new Be:void 0,f=Fe(t.sources[0]?.path??process.cwd(),Ie.aiKb),p=Ne(f),h;if(p)try{h=Pe(f).mtime.toISOString()}catch{}return J.info(`Onboard state detected`,{onboardComplete:p,onboardTimestamp:h}),{embedder:n,store:r,indexer:i,curated:s,graphStore:c,fileCache:new Ge,bridge:l,policyStore:u,evolutionCollector:d,onboardComplete:p,onboardTimestamp:h}}function X(e,r){let i=new q({name:r.serverName??`knowledge-base`,version:W()},{capabilities:{logging:{}}});return t(i),Z(i,e,r),n(i),i}function Z(e,t,n){r(e),be(e,t.embedder,t.store,t.graphStore,t.bridge,t.evolutionCollector),P(e,t.store),xe(e,t.store,t.graphStore,t.curated,{onboardComplete:t.onboardComplete,onboardTimestamp:t.onboardTimestamp}),ge(e,t.indexer,n,t.curated,t.store),_e(e,t.curated,t.policyStore,t.evolutionCollector),Se(e,t.curated),E(e,t.curated),he(e,t.curated),N(e,t.curated),u(e,t.store,t.embedder),o(e,t.store,t.embedder),d(e,t.store,t.embedder),l(e,t.store,t.embedder),c(e,t.store,t.embedder),s(e,t.store,t.embedder),f(e,t.store,t.embedder,t.graphStore),me(e),B(e,t.store,t.embedder),D(e,t.graphStore),p(e,t.store,t.embedder),te(e,t.embedder,t.fileCache),ae(e,t.embedder,t.store),ie(e,t.embedder,t.store),y(e),fe(e),ue(e),le(e,t.embedder,t.store),oe(e,t.embedder,t.store,t.graphStore),v(e),b(e),de(e),R(e),L(e),z(e),F(e),ye(e),re(e,t.fileCache),V(e),I(e),se(e,t.embedder,t.store),A(e),j(e),ne(e,t.embedder,t.store),_(e),k(e),H(e),U(e),M(e),O(e),S(e),x(e,t.embedder),C(e),T(e,t.embedder,t.fileCache),w(e,t.embedder,t.store),Me(e),Ee(e),Oe(e),we(e),De(e),Ce(e),ke(e),Ae(e),Te(e),je(e),t.bridge&&(g(e,t.bridge,t.evolutionCollector),h(e,t.bridge),ee(e,t.bridge)),t.policyStore&&pe(e,t.policyStore),t.evolutionCollector&&ce(e,t.evolutionCollector),i(e,t.store,t.curated),ve(e)}async function Ke(e){let t=await Y(e),n=X(t,e);J.info(`MCP server configured`,{toolCount:$.length,resourceCount:2});let r=async()=>{try{let n=e.sources.map(e=>e.path).join(`, `);J.info(`Running initial index`,{sourcePaths:n});let r=await t.indexer.index(e,e=>{e.phase===`crawling`||e.phase===`done`||(e.phase===`chunking`&&e.currentFile&&J.debug(`Indexing file`,{current:e.filesProcessed+1,total:e.filesTotal,file:e.currentFile}),e.phase===`cleanup`&&J.debug(`Index cleanup`,{staleEntries:e.filesTotal-e.filesProcessed}))});J.info(`Initial index complete`,{filesProcessed:r.filesProcessed,filesSkipped:r.filesSkipped,chunksCreated:r.chunksCreated,durationMs:r.durationMs});try{await t.store.createFtsIndex()}catch(e){J.warn(`FTS index creation failed`,G(e))}try{let e=await t.curated.reindexAll();J.info(`Curated re-index complete`,{indexed:e.indexed})}catch(e){J.error(`Curated re-index failed`,G(e))}}catch(e){J.error(`Initial index failed; will retry on kb_reindex`,G(e))}},i=async()=>{J.info(`Shutting down`),await t.embedder.shutdown().catch(()=>{}),await t.graphStore.close().catch(()=>{}),await t.store.close(),process.exit(0)};process.on(`SIGINT`,i),process.on(`SIGTERM`,i);let a=process.ppid,o=setInterval(()=>{try{process.kill(a,0)}catch{J.info(`Parent process died; shutting down`,{parentPid:a}),clearInterval(o),i()}},5e3);return o.unref(),{server:n,runInitialIndex:r,shutdown:i}}const qe=new Set(`batch.changelog.check.checkpoint.codemod.compact.data_transform.delegate.diff_parse.digest.encode.env.eval.evidence_map.file_summary.forge_classify.git_context.graph.guide.health.http.lane.measure.onboard.parse_output.process.queue.read.regex_test.reindex.remember.rename.replay.restore.schema_validate.scope_map.snippet.stash.status.stratum_card.test_run.time.update.forget.list.watch.web_fetch.web_search.workset`.split(`.`)),Q=5e3,$=`analyze_dependencies.analyze_diagram.analyze_entry_points.analyze_patterns.analyze_structure.analyze_symbols.audit.batch.blast_radius.changelog.check.checkpoint.codemod.compact.data_transform.dead_symbols.delegate.diff_parse.digest.encode.env.eval.evidence_map.file_summary.find.forge_classify.forge_ground.forget.git_context.graph.guide.health.http.lane.list.lookup.measure.onboard.parse_output.process.produce_knowledge.queue.read.regex_test.reindex.remember.rename.replay.restore.schema_validate.scope_map.search.snippet.stash.status.stratum_card.symbol.test_run.time.trace.update.watch.web_fetch.web_search.workset`.split(`.`);function Je(e){let r=new q({name:e.serverName??`knowledge-base`,version:W()},{capabilities:{logging:{}}});t(r);let i=r.sendToolListChanged.bind(r);r.sendToolListChanged=()=>{};let o=$.map(e=>{let t=a(e);return r.registerTool(e,{title:t.title,description:`${e} (initializing...)`,inputSchema:{},annotations:t.annotations},async()=>({content:[{type:`text`,text:`KB is still initializing, please retry in a few seconds.`}]}))});r.sendToolListChanged=i;let s=r.resource(`kb-status`,`kb://status`,{description:`Knowledge base status (initializing...)`,mimeType:`text/plain`},async()=>({contents:[{uri:`kb://status`,text:`KB is initializing...`,mimeType:`text/plain`}]})),c,l=new Promise(e=>{c=e}),u,d=new Promise(e=>{u=e}),f=()=>u?.(),p=(async()=>{await d;let t=await Y(e),i=r.sendToolListChanged.bind(r);r.sendToolListChanged=()=>{};for(let e of o)e.remove();s.remove(),Z(r,t,e),n(r),r.sendToolListChanged=i,r.sendToolListChanged();let a=r._registeredTools??{};for(let[e,n]of Object.entries(a)){if(qe.has(e))continue;let r=n.handler;n.handler=async(...n)=>{if(!t.indexer.isIndexing)return r(...n);let i=new Promise(t=>setTimeout(()=>t({content:[{type:`text`,text:`⏳ KB is re-indexing. The tool "${e}" timed out waiting for index data (${Q/1e3}s).\n\nThe existing index may be temporarily locked. Please retry shortly — indexing will complete automatically.`}]}),Q));return Promise.race([r(...n),i])}}let l=Object.keys(a).length;l!==$.length&&J.warn(`ALL_TOOL_NAMES count mismatch`,{expectedToolCount:$.length,registeredToolCount:l}),J.info(`MCP server configured`,{toolCount:$.length,resourceCount:4}),c?.(t)})(),m=async()=>{let t=await l;try{let n=e.sources.map(e=>e.path).join(`, `);J.info(`Running initial index`,{sourcePaths:n});let r=await t.indexer.index(e,e=>{e.phase===`crawling`||e.phase===`done`||(e.phase===`chunking`&&e.currentFile&&J.debug(`Indexing file`,{current:e.filesProcessed+1,total:e.filesTotal,file:e.currentFile}),e.phase===`cleanup`&&J.debug(`Index cleanup`,{staleEntries:e.filesTotal-e.filesProcessed}))});J.info(`Initial index complete`,{filesProcessed:r.filesProcessed,filesSkipped:r.filesSkipped,chunksCreated:r.chunksCreated,durationMs:r.durationMs});try{await t.store.createFtsIndex()}catch(e){J.warn(`FTS index creation failed`,G(e))}try{let e=await t.curated.reindexAll();J.info(`Curated re-index complete`,{indexed:e.indexed})}catch(e){J.error(`Curated re-index failed`,G(e))}}catch(e){J.error(`Initial index failed; will retry on kb_reindex`,G(e))}},h=process.ppid,g=setInterval(()=>{try{process.kill(h,0)}catch{J.info(`Parent process died; shutting down`,{parentPid:h}),clearInterval(g),l.then(async e=>{await e.embedder.shutdown().catch(()=>{}),await e.graphStore.close().catch(()=>{}),await e.store.close().catch(()=>{})}).catch(()=>{}).finally(()=>process.exit(0))}},5e3);return g.unref(),{server:r,startInit:f,ready:p,runInitialIndex:m}}export{$ as ALL_TOOL_NAMES,Je as createLazyServer,X as createMcpServer,Ke as createServer,Y as initializeKnowledgeBase,Z as registerMcpTools};
|