@vpxa/kb 0.1.16 → 0.1.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +17 -17
- package/package.json +1 -1
- package/packages/cli/dist/commands/init/adapters.js +1 -1
- package/packages/cli/dist/commands/init/constants.d.ts +1 -1
- package/packages/cli/dist/commands/init/index.d.ts +1 -1
- package/packages/cli/dist/commands/init/index.js +4 -4
- package/packages/cli/dist/commands/init/scaffold.js +1 -1
- package/packages/cli/dist/commands/init/user.d.ts +49 -0
- package/packages/cli/dist/commands/init/user.js +6 -0
- package/packages/cli/dist/commands/system.js +2 -2
- package/packages/core/dist/global-registry.d.ts +5 -4
- package/packages/core/dist/global-registry.js +1 -1
- package/packages/core/dist/index.d.ts +2 -2
- package/packages/core/dist/index.js +1 -1
- package/packages/embeddings/dist/embedder.interface.d.ts +1 -1
- package/packages/embeddings/dist/onnx-embedder.d.ts +1 -1
- package/packages/embeddings/dist/onnx-embedder.js +1 -1
- package/packages/indexer/dist/hash-cache.d.ts +24 -0
- package/packages/indexer/dist/hash-cache.js +1 -0
- package/packages/indexer/dist/incremental-indexer.d.ts +5 -1
- package/packages/indexer/dist/incremental-indexer.js +1 -1
- package/packages/indexer/dist/index.d.ts +2 -1
- package/packages/indexer/dist/index.js +1 -1
- package/packages/server/dist/config.js +1 -1
- package/packages/server/dist/cross-workspace.js +1 -1
- package/packages/server/dist/server.js +1 -1
- package/packages/server/dist/tools/analyze.tools.d.ts +2 -2
- package/packages/server/dist/tools/analyze.tools.js +2 -1
- package/packages/server/dist/tools/search.tool.js +1 -1
- package/packages/server/dist/tools/toolkit.tools.d.ts +2 -2
- package/packages/server/dist/tools/toolkit.tools.js +3 -3
- package/packages/store/dist/lance-store.js +1 -1
- package/packages/tools/dist/dead-symbols.d.ts +0 -4
- package/packages/tools/dist/dead-symbols.js +1 -1
- package/packages/tools/dist/index.d.ts +2 -2
- package/packages/tools/dist/symbol.d.ts +14 -2
- package/packages/tools/dist/symbol.js +3 -3
- package/packages/tui/dist/App.d.ts +1 -1
- package/packages/tui/dist/{embedder.interface-D4ew0HPW.d.ts → embedder.interface-IFCBpOlX.d.ts} +1 -1
- package/packages/tui/dist/{index-B9VpfVPP.d.ts → index-C8NmOF18.d.ts} +1 -1
- package/packages/tui/dist/index.d.ts +1 -1
- package/packages/tui/dist/panels/SearchPanel.d.ts +1 -1
- package/packages/cli/dist/commands/init/global.d.ts +0 -34
- package/packages/cli/dist/commands/init/global.js +0 -5
- package/scaffold/copilot/agents/Architect-Reviewer-Alpha.agent.md +0 -21
- package/scaffold/copilot/agents/Architect-Reviewer-Beta.agent.md +0 -21
- package/scaffold/copilot/agents/Documenter.agent.md +0 -42
- package/scaffold/copilot/agents/Orchestrator.agent.md +0 -104
- package/scaffold/copilot/agents/Planner.agent.md +0 -54
- package/scaffold/copilot/agents/Refactor.agent.md +0 -36
- package/scaffold/copilot/agents/Researcher-Alpha.agent.md +0 -20
- package/scaffold/copilot/agents/Researcher-Beta.agent.md +0 -20
- package/scaffold/copilot/agents/Researcher-Delta.agent.md +0 -20
- package/scaffold/copilot/agents/Researcher-Gamma.agent.md +0 -20
package/README.md
CHANGED
|
@@ -29,21 +29,21 @@ The KB auto-indexes configured source directories on startup, stores embeddings
|
|
|
29
29
|
|
|
30
30
|
## Quick Start
|
|
31
31
|
|
|
32
|
-
###
|
|
32
|
+
### User-Level Install (recommended for multi-project setups)
|
|
33
33
|
|
|
34
34
|
```bash
|
|
35
35
|
# Install once, works across all your projects
|
|
36
|
-
npx @vpxa/kb init --
|
|
36
|
+
npx @vpxa/kb init --user
|
|
37
37
|
|
|
38
38
|
# Then in any workspace, scaffold instructions only
|
|
39
39
|
npx @vpxa/kb init
|
|
40
40
|
```
|
|
41
41
|
|
|
42
|
-
###
|
|
42
|
+
### Workspace Install (per-project, self-contained)
|
|
43
43
|
|
|
44
44
|
```bash
|
|
45
|
-
# Full
|
|
46
|
-
npx @vpxa/kb init --
|
|
45
|
+
# Full workspace initialization
|
|
46
|
+
npx @vpxa/kb init --workspace
|
|
47
47
|
|
|
48
48
|
# Index and search
|
|
49
49
|
npx @vpxa/kb reindex
|
|
@@ -58,15 +58,15 @@ npx @vpxa/kb init --force # Overwrite all scaffold/skill files
|
|
|
58
58
|
npx @vpxa/kb init --guide # Check which files are outdated
|
|
59
59
|
```
|
|
60
60
|
|
|
61
|
-
> **Note:** In
|
|
61
|
+
> **Note:** In workspace mode, once `@vpxa/kb` is installed locally, you can use the short `kb` command (e.g. `kb search`, `kb serve`) since the local binary takes precedence.
|
|
62
62
|
|
|
63
|
-
##
|
|
63
|
+
## User-Level vs Workspace Mode
|
|
64
64
|
|
|
65
65
|
KB supports two installation modes:
|
|
66
66
|
|
|
67
|
-
| |
|
|
67
|
+
| | User-Level | Workspace |
|
|
68
68
|
|---|---|---|
|
|
69
|
-
| **Install** | `kb init --
|
|
69
|
+
| **Install** | `kb init --user` (once) | `kb init --workspace` (per project) |
|
|
70
70
|
| **MCP config** | User-level (IDE-wide) | `.vscode/mcp.json` (workspace) |
|
|
71
71
|
| **Data store** | `~/.kb-data/<partition>/` | `.kb-data/store/` (in project) |
|
|
72
72
|
| **Skills** | `~/.kb-data/skills/` | `.github/skills/` (in project) |
|
|
@@ -74,20 +74,20 @@ KB supports two installation modes:
|
|
|
74
74
|
|
|
75
75
|
### How it works
|
|
76
76
|
|
|
77
|
-
- **`kb init --
|
|
78
|
-
- **`kb init`** (smart default) — If
|
|
79
|
-
- **`kb init --
|
|
77
|
+
- **`kb init --user`** — Installs the MCP server in your user-level IDE config (VS Code, Cursor, Claude Code, Windsurf). Creates `~/.kb-data/` for data. Skills are shared. The server auto-indexes each workspace it's opened in.
|
|
78
|
+
- **`kb init`** (smart default) — If user-level is installed, scaffolds workspace-only files (AGENTS.md, instructions, curated directories). If not, does a full workspace install.
|
|
79
|
+
- **`kb init --workspace`** — Traditional per-project install with full local config and data store.
|
|
80
80
|
|
|
81
81
|
### Checking your mode
|
|
82
82
|
|
|
83
83
|
```bash
|
|
84
84
|
kb status
|
|
85
|
-
# Mode:
|
|
85
|
+
# Mode: user (workspace scaffolded)
|
|
86
86
|
# Data: ~/.kb-data/my-project-a1b2c3d4/
|
|
87
87
|
# Registry: 3 workspace(s) enrolled
|
|
88
88
|
```
|
|
89
89
|
|
|
90
|
-
### Cross-workspace search (
|
|
90
|
+
### Cross-workspace search (user-level mode only)
|
|
91
91
|
|
|
92
92
|
```
|
|
93
93
|
search({ query: "error handling", workspaces: ["*"] }) # All workspaces
|
|
@@ -241,7 +241,7 @@ After `kb init`, your `.vscode/mcp.json` is configured automatically:
|
|
|
241
241
|
}
|
|
242
242
|
```
|
|
243
243
|
|
|
244
|
-
> **
|
|
244
|
+
> **User-level mode:** When installed with `kb init --user`, the MCP server is configured at the user level — no per-project `mcp.json` needed. The server auto-detects and indexes each workspace.
|
|
245
245
|
|
|
246
246
|
## CLI Usage
|
|
247
247
|
|
|
@@ -313,7 +313,7 @@ kb status
|
|
|
313
313
|
kb reindex [--full]
|
|
314
314
|
kb onboard <path> [--generate] [--out-dir <dir>]
|
|
315
315
|
kb serve [--transport stdio|http] [--port N]
|
|
316
|
-
kb init [--
|
|
316
|
+
kb init [--user|--workspace] [--force] [--guide]
|
|
317
317
|
```
|
|
318
318
|
|
|
319
319
|
## Configuration
|
|
@@ -387,7 +387,7 @@ Find relevant code, docs, patterns, and curated knowledge using hybrid (vector +
|
|
|
387
387
|
| `origin` | enum | no | — | Filter: `indexed` (from files), `curated` (agent memory), `produced` (auto-generated) |
|
|
388
388
|
| `category` | string | no | — | Filter by curated category (e.g., `decisions`, `patterns`) |
|
|
389
389
|
| `tags` | string[] | no | — | Filter by tags (OR matching) |
|
|
390
|
-
| `workspaces` | string[] | no | — | Cross-workspace search: partition names, folder basenames, or `["*"]` for all.
|
|
390
|
+
| `workspaces` | string[] | no | — | Cross-workspace search: partition names, folder basenames, or `["*"]` for all. User-level mode only. |
|
|
391
391
|
| `min_score` | number (0–1) | no | 0.25 | Minimum similarity score threshold |
|
|
392
392
|
|
|
393
393
|
**Returns**: Ranked results with score, source path, content type, line range, heading path, origin, tags, and full content text. Each response includes a `_Next:` hint suggesting logical follow-up tools.
|
package/package.json
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
import{MCP_SERVER_ENTRY as e}from"./constants.js";import{buildAgentsMd as t,buildCopilotInstructions as n}from"./templates.js";import{existsSync as r,mkdirSync as i,writeFileSync as a}from"node:fs";import{basename as o,resolve as s}from"node:path";function c(e){return r(s(e,`.cursor`))?`cursor`:r(s(e,`.claude`))?`claude-code`:r(s(e,`.windsurf`))?`windsurf`:`copilot`}function l(t){return{servers:{[t]:{...e}}}}function u(t){let{type:n,...r}=e;return{mcpServers:{[t]:r}}}const d={scaffoldDir:`general`,writeMcpConfig(e,t){let n=s(e,`.vscode`),
|
|
1
|
+
import{MCP_SERVER_ENTRY as e}from"./constants.js";import{buildAgentsMd as t,buildCopilotInstructions as n}from"./templates.js";import{existsSync as r,mkdirSync as i,writeFileSync as a}from"node:fs";import{basename as o,resolve as s}from"node:path";function c(e){return r(s(e,`.cursor`))?`cursor`:r(s(e,`.claude`))?`claude-code`:r(s(e,`.windsurf`))?`windsurf`:`copilot`}function l(t){return{servers:{[t]:{...e}}}}function u(t){let{type:n,...r}=e;return{mcpServers:{[t]:r}}}const d={scaffoldDir:`general`,writeMcpConfig(e,t){let n=s(e,`.vscode`),o=s(n,`mcp.json`);r(o)||(i(n,{recursive:!0}),a(o,`${JSON.stringify(l(t),null,2)}\n`,`utf-8`),console.log(` Created .vscode/mcp.json`))},writeInstructions(e,t){let c=s(e,`.github`),l=s(c,`copilot-instructions.md`);r(l)||(i(c,{recursive:!0}),a(l,n(o(e),t),`utf-8`),console.log(` Created .github/copilot-instructions.md`))},writeAgentsMd(e,n){let i=s(e,`AGENTS.md`);r(i)||(a(i,t(o(e),n),`utf-8`),console.log(` Created AGENTS.md`))}},f={scaffoldDir:`general`,writeMcpConfig(e,t){let n=s(e,`.mcp.json`);r(n)||(a(n,`${JSON.stringify(u(t),null,2)}\n`,`utf-8`),console.log(` Created .mcp.json`))},writeInstructions(e,i){let c=s(e,`CLAUDE.md`);if(!r(c)){let r=o(e);a(c,`${n(r,i)}\n---\n\n${t(r,i)}`,`utf-8`),console.log(` Created CLAUDE.md`)}},writeAgentsMd(e,t){}},p={scaffoldDir:`general`,writeMcpConfig(e,t){let n=s(e,`.cursor`),o=s(n,`mcp.json`);r(o)||(i(n,{recursive:!0}),a(o,`${JSON.stringify(u(t),null,2)}\n`,`utf-8`),console.log(` Created .cursor/mcp.json`))},writeInstructions(e,c){let l=s(e,`.cursor`,`rules`),u=s(l,`kb.mdc`);if(!r(u)){i(l,{recursive:!0});let r=o(e);a(u,`${n(r,c)}\n---\n\n${t(r,c)}`,`utf-8`),console.log(` Created .cursor/rules/kb.mdc`)}},writeAgentsMd(e,t){}},m={scaffoldDir:`general`,writeMcpConfig(e,t){let n=s(e,`.vscode`),o=s(n,`mcp.json`);r(o)||(i(n,{recursive:!0}),a(o,`${JSON.stringify(l(t),null,2)}\n`,`utf-8`),console.log(` Created .vscode/mcp.json (Windsurf-compatible)`))},writeInstructions(e,i){let c=s(e,`.windsurfrules`);if(!r(c)){let r=o(e);a(c,`${n(r,i)}\n---\n\n${t(r,i)}`,`utf-8`),console.log(` Created .windsurfrules`)}},writeAgentsMd(e,t){}};function h(e){switch(e){case`copilot`:return d;case`claude-code`:return f;case`cursor`:return p;case`windsurf`:return m}}export{f as claudeCodeAdapter,d as copilotAdapter,p as cursorAdapter,c as detectIde,h as getAdapter,m as windsurfAdapter};
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
/**
|
|
3
3
|
* Init module — shared constants.
|
|
4
4
|
*
|
|
5
|
-
* Single source of truth for lists used by both
|
|
5
|
+
* Single source of truth for lists used by both workspace and user-level init.
|
|
6
6
|
*/
|
|
7
7
|
/** The MCP server name used in all IDE configs. */
|
|
8
8
|
declare const SERVER_NAME = "knowledge-base";
|
|
@@ -17,7 +17,7 @@ declare function initProject(options: {
|
|
|
17
17
|
force: boolean;
|
|
18
18
|
}): Promise<void>;
|
|
19
19
|
/**
|
|
20
|
-
* Smart init — scaffold-only if
|
|
20
|
+
* Smart init — scaffold-only if user-level detected, full workspace otherwise.
|
|
21
21
|
*/
|
|
22
22
|
declare function initSmart(options: {
|
|
23
23
|
force: boolean;
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import{SKILL_NAMES as e}from"./constants.js";import{detectIde as t,getAdapter as n}from"./adapters.js";import{ensureGitignore as r,getServerName as i,writeKbConfig as a}from"./config.js";import{createCuratedDirs as o}from"./curated.js";import{copyScaffold as s,copySkills as c,guideScaffold as l,guideSkills as u}from"./scaffold.js";import{dirname as d,resolve as f}from"node:path";import{fileURLToPath as p}from"node:url";import{
|
|
1
|
+
import{SKILL_NAMES as e}from"./constants.js";import{detectIde as t,getAdapter as n}from"./adapters.js";import{ensureGitignore as r,getServerName as i,writeKbConfig as a}from"./config.js";import{createCuratedDirs as o}from"./curated.js";import{copyScaffold as s,copySkills as c,guideScaffold as l,guideSkills as u}from"./scaffold.js";import{dirname as d,resolve as f}from"node:path";import{fileURLToPath as p}from"node:url";import{isUserInstalled as m}from"../../../../core/dist/index.js";async function h(l){let u=process.cwd();if(!a(u,l.force))return;r(u);let h=i(),g=n(t(u));g.writeMcpConfig(u,h),g.writeInstructions(u,h),g.writeAgentsMd(u,h);let _=f(d(p(import.meta.url)),`..`,`..`,`..`,`..`,`..`);c(u,_,[...e],l.force),s(u,_,g.scaffoldDir,l.force),o(u),console.log(`
|
|
2
2
|
Knowledge base initialized! Next steps:`),console.log(` kb reindex Index your codebase`),console.log(` kb search Search indexed content`),console.log(` kb serve Start MCP server for IDE integration`),m()&&console.log(`
|
|
3
|
-
Note:
|
|
4
|
-
Workspace scaffolded for
|
|
5
|
-
The
|
|
3
|
+
Note: User-level KB is also installed. This workspace uses its own local data store.`)}async function g(e){m()?await _(e):await h(e)}async function _(e){let a=process.cwd(),c=i(),l=n(t(a));l.writeInstructions(a,c),l.writeAgentsMd(a,c),s(a,f(d(p(import.meta.url)),`..`,`..`,`..`,`..`,`..`),l.scaffoldDir,e.force),o(a),r(a),console.log(`
|
|
4
|
+
Workspace scaffolded for user-level KB! Files added:`),console.log(` Instruction files (AGENTS.md, copilot-instructions.md, etc.)`),console.log(` .ai/curated/ directories`),console.log(` .github/agents/ & .github/prompts/`),console.log(`
|
|
5
|
+
The user-level KB server will auto-index this workspace when opened in your IDE.`)}async function v(){let r=process.cwd(),i=n(t(r)),a=f(d(p(import.meta.url)),`..`,`..`,`..`,`..`,`..`),o=[...u(r,a,[...e]),...l(r,a,i.scaffoldDir)],s={summary:{total:o.length,new:o.filter(e=>e.status===`new`).length,outdated:o.filter(e=>e.status===`outdated`).length,current:o.filter(e=>e.status===`current`).length},files:o};console.log(JSON.stringify(s,null,2))}export{v as guideProject,h as initProject,g as initSmart};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{copyFileSync as e,existsSync as t,mkdirSync as n,readFileSync as r,readdirSync as i,statSync as a}from"node:fs";import{resolve as o}from"node:path";function s(r,c,l=``,u=!1){n(c,{recursive:!0});for(let n of i(r)){let i=o(r,n),d=o(c,n),f=l?`${l}/${n}`:n;
|
|
1
|
+
import{copyFileSync as e,existsSync as t,mkdirSync as n,readFileSync as r,readdirSync as i,statSync as a}from"node:fs";import{resolve as o}from"node:path";function s(r,c,l=``,u=!1){n(c,{recursive:!0});for(let n of i(r)){let i=o(r,n),d=o(c,n),f=l?`${l}/${n}`:n;a(i).isDirectory()?s(i,d,f,u):(u||!t(d))&&e(i,d)}}function c(e,n,s,l){if(t(e))for(let u of i(e)){let i=o(e,u),d=s?`${s}/${u}`:u;if(a(i).isDirectory())c(i,o(n,u),d,l);else{let e=o(n,u),a=r(i,`utf-8`);t(e)?a===r(e,`utf-8`)?l.push({status:`current`,relativePath:d,sourcePath:i}):l.push({status:`outdated`,relativePath:d,sourcePath:i,content:a}):l.push({status:`new`,relativePath:d,sourcePath:i,content:a})}}}function l(e,n,r,i=!1){let a=o(n,`scaffold`,r);for(let n of[`agents`,`prompts`]){let r=o(a,n),c=o(e,`.github`,n);t(r)&&s(r,c,``,i)}}function u(e,n,r,i=!1){for(let a of r){let r=o(n,`skills`,a);t(r)&&s(r,o(e,`.github`,`skills`,a),`skills/${a}`,i)}}function d(e,t,n){let r=[],i=o(t,`scaffold`,n);for(let t of[`agents`,`prompts`])c(o(i,t),o(e,`.github`,t),t,r);return r}function f(e,n,r){let i=[];for(let a of r){let r=o(n,`skills`,a);t(r)&&c(r,o(e,`.github`,`skills`,a),`skills/${a}`,i)}return i}export{s as copyDirectoryRecursive,l as copyScaffold,u as copySkills,d as guideScaffold,f as guideSkills};
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
//#region packages/cli/src/commands/init/user.d.ts
|
|
2
|
+
/**
|
|
3
|
+
* `kb init --user` — configure KB as a user-level MCP server.
|
|
4
|
+
*
|
|
5
|
+
* Auto-detects all installed IDEs, writes user-level mcp.json for each,
|
|
6
|
+
* installs skills to a user-level location, and creates the shared data store.
|
|
7
|
+
*/
|
|
8
|
+
/** Represents a user-level IDE config location. */
|
|
9
|
+
interface UserLevelIdePath {
|
|
10
|
+
ide: string;
|
|
11
|
+
configDir: string;
|
|
12
|
+
mcpConfigPath: string;
|
|
13
|
+
/**
|
|
14
|
+
* User-level scaffold root for agents/prompts/skills.
|
|
15
|
+
* VS Code: ~/.github, Claude Code: ~/.claude, Cursor: ~/.cursor, Windsurf: ~/.windsurf.
|
|
16
|
+
* Null if the IDE has no user-level scaffold support.
|
|
17
|
+
*/
|
|
18
|
+
globalScaffoldRoot: string | null;
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Detect all installed IDEs by checking if their user-level config directory exists.
|
|
22
|
+
*/
|
|
23
|
+
declare function detectInstalledIdes(): UserLevelIdePath[];
|
|
24
|
+
/**
|
|
25
|
+
* Write or merge the KB server entry into a user-level mcp.json.
|
|
26
|
+
* Preserves all existing non-KB entries. Backs up existing file before writing.
|
|
27
|
+
*/
|
|
28
|
+
declare function writeUserLevelMcpConfig(idePath: UserLevelIdePath, serverName: string, force?: boolean): void;
|
|
29
|
+
/**
|
|
30
|
+
* Install agents, prompts, skills, and instruction files to each detected IDE's
|
|
31
|
+
* global scaffold root.
|
|
32
|
+
*
|
|
33
|
+
* Each IDE has its own global discovery path:
|
|
34
|
+
* - VS Code / VSCodium: ~/.github/ (copilot-instructions.md, agents/, prompts/, skills/)
|
|
35
|
+
* - Claude Code: ~/.claude/ (CLAUDE.md, agents/)
|
|
36
|
+
* - Cursor / Windsurf: No global scaffold support (project-level only)
|
|
37
|
+
*
|
|
38
|
+
* Multiple IDEs may share the same root (e.g. VS Code + VSCodium both use ~/.github/).
|
|
39
|
+
* We deduplicate scaffold files but generate IDE-specific instruction files.
|
|
40
|
+
*/
|
|
41
|
+
declare function installGlobalScaffold(pkgRoot: string, ides: UserLevelIdePath[], serverName: string, force?: boolean): void;
|
|
42
|
+
/**
|
|
43
|
+
* Main orchestrator for `kb init --user`.
|
|
44
|
+
*/
|
|
45
|
+
declare function initUser(options: {
|
|
46
|
+
force: boolean;
|
|
47
|
+
}): Promise<void>;
|
|
48
|
+
//#endregion
|
|
49
|
+
export { UserLevelIdePath, detectInstalledIdes, initUser, installGlobalScaffold, writeUserLevelMcpConfig };
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import{MCP_SERVER_ENTRY as e,SERVER_NAME as t,SKILL_NAMES as n}from"./constants.js";import{buildAgentsMd as r,buildCopilotInstructions as i}from"./templates.js";import{copyDirectoryRecursive as a}from"./scaffold.js";import{copyFileSync as o,existsSync as s,mkdirSync as c,readFileSync as l,readdirSync as u,statSync as d,writeFileSync as f}from"node:fs";import{dirname as p,resolve as m}from"node:path";import{fileURLToPath as h}from"node:url";import{getGlobalDataDir as g,saveRegistry as _}from"../../../../core/dist/index.js";import{homedir as v}from"node:os";function y(){let e=v(),t=process.platform,n=[],r=m(e,`.github`),i=m(e,`.claude`),a=m(e,`.cursor`),o=m(e,`.windsurf`);if(t===`win32`){let t=process.env.APPDATA??m(e,`AppData`,`Roaming`);n.push({ide:`VS Code`,configDir:m(t,`Code`,`User`),mcpConfigPath:m(t,`Code`,`User`,`mcp.json`),globalScaffoldRoot:r},{ide:`VS Code Insiders`,configDir:m(t,`Code - Insiders`,`User`),mcpConfigPath:m(t,`Code - Insiders`,`User`,`mcp.json`),globalScaffoldRoot:r},{ide:`VSCodium`,configDir:m(t,`VSCodium`,`User`),mcpConfigPath:m(t,`VSCodium`,`User`,`mcp.json`),globalScaffoldRoot:r},{ide:`Cursor`,configDir:m(t,`Cursor`,`User`),mcpConfigPath:m(t,`Cursor`,`User`,`mcp.json`),globalScaffoldRoot:a},{ide:`Cursor Nightly`,configDir:m(t,`Cursor Nightly`,`User`),mcpConfigPath:m(t,`Cursor Nightly`,`User`,`mcp.json`),globalScaffoldRoot:a},{ide:`Windsurf`,configDir:m(t,`Windsurf`,`User`),mcpConfigPath:m(t,`Windsurf`,`User`,`mcp.json`),globalScaffoldRoot:o})}else if(t===`darwin`){let t=m(e,`Library`,`Application Support`);n.push({ide:`VS Code`,configDir:m(t,`Code`,`User`),mcpConfigPath:m(t,`Code`,`User`,`mcp.json`),globalScaffoldRoot:r},{ide:`VS Code Insiders`,configDir:m(t,`Code - Insiders`,`User`),mcpConfigPath:m(t,`Code - Insiders`,`User`,`mcp.json`),globalScaffoldRoot:r},{ide:`VSCodium`,configDir:m(t,`VSCodium`,`User`),mcpConfigPath:m(t,`VSCodium`,`User`,`mcp.json`),globalScaffoldRoot:r},{ide:`Cursor`,configDir:m(t,`Cursor`,`User`),mcpConfigPath:m(t,`Cursor`,`User`,`mcp.json`),globalScaffoldRoot:a},{ide:`Cursor Nightly`,configDir:m(t,`Cursor Nightly`,`User`),mcpConfigPath:m(t,`Cursor Nightly`,`User`,`mcp.json`),globalScaffoldRoot:a},{ide:`Windsurf`,configDir:m(t,`Windsurf`,`User`),mcpConfigPath:m(t,`Windsurf`,`User`,`mcp.json`),globalScaffoldRoot:o})}else{let t=process.env.XDG_CONFIG_HOME??m(e,`.config`);n.push({ide:`VS Code`,configDir:m(t,`Code`,`User`),mcpConfigPath:m(t,`Code`,`User`,`mcp.json`),globalScaffoldRoot:r},{ide:`VS Code Insiders`,configDir:m(t,`Code - Insiders`,`User`),mcpConfigPath:m(t,`Code - Insiders`,`User`,`mcp.json`),globalScaffoldRoot:r},{ide:`VSCodium`,configDir:m(t,`VSCodium`,`User`),mcpConfigPath:m(t,`VSCodium`,`User`,`mcp.json`),globalScaffoldRoot:r},{ide:`Cursor`,configDir:m(t,`Cursor`,`User`),mcpConfigPath:m(t,`Cursor`,`User`,`mcp.json`),globalScaffoldRoot:a},{ide:`Cursor Nightly`,configDir:m(t,`Cursor Nightly`,`User`),mcpConfigPath:m(t,`Cursor Nightly`,`User`,`mcp.json`),globalScaffoldRoot:a},{ide:`Windsurf`,configDir:m(t,`Windsurf`,`User`),mcpConfigPath:m(t,`Windsurf`,`User`,`mcp.json`),globalScaffoldRoot:o})}return n.push({ide:`Claude Code`,configDir:m(e,`.claude`),mcpConfigPath:m(e,`.claude`,`mcp.json`),globalScaffoldRoot:i}),n.filter(e=>s(e.configDir))}function b(t,n,r=!1){let{mcpConfigPath:i,configDir:a}=t,o={...e},u={};if(s(i)){try{let e=l(i,`utf-8`);u=JSON.parse(e)}catch{let e=`${i}.bak`;f(e,l(i,`utf-8`),`utf-8`),console.log(` Backed up invalid ${i} to ${e}`),u={}}if((u.servers??u.mcpServers??{})[n]&&!r){console.log(` ${t.ide}: ${n} already configured (use --force to update)`);return}}let d=new Set([`VS Code`,`VS Code Insiders`,`VSCodium`,`Windsurf`]).has(t.ide)?`servers`:`mcpServers`,p=u[d]??{};p[n]=o,u[d]=p,c(a,{recursive:!0}),f(i,`${JSON.stringify(u,null,2)}\n`,`utf-8`),console.log(` ${t.ide}: configured ${n} in ${i}`)}function x(e,t,n,r){let i=m(t,n);c(i,{recursive:!0});let l=m(e,`scaffold`,`general`,n);if(!s(l))return 0;let f=0;for(let e of u(l)){let t=m(l,e),c=m(i,e);d(t).isDirectory()?a(t,c,`${n}/${e}`,r):(r||!s(c))&&(o(t,c),f++)}return f}function S(e,t,o,l=!1){let u=new Set;for(let e of t)e.globalScaffoldRoot&&u.add(e.globalScaffoldRoot);if(u.size===0){console.log(` No IDEs with global scaffold support detected.`);return}for(let t of u){let r=x(e,t,`agents`,l),i=x(e,t,`prompts`,l),o=m(t,`skills`),c=0;for(let t of n){let n=m(e,`skills`,t);s(n)&&(a(n,m(o,t),`skills/${t}`,l),c++)}console.log(` ${t}: ${r} agents, ${i} prompts, ${c} skills`)}let d=new Set,p=i(`kb`,o),h=r(`kb`,o);for(let e of t){if(!e.globalScaffoldRoot)continue;let t=e.globalScaffoldRoot;if(e.ide===`Claude Code`){let e=m(t,`CLAUDE.md`);(l||!s(e))&&(f(e,`${p}\n---\n\n${h}`,`utf-8`),d.add(e))}else if(e.ide===`VS Code`||e.ide===`VS Code Insiders`||e.ide===`VSCodium`){let e=m(t,`copilot-instructions.md`);d.has(e)||(l||!s(e))&&(f(e,`${p}\n---\n\n${h}`,`utf-8`),d.add(e))}else if(e.ide===`Cursor`||e.ide===`Cursor Nightly`){let e=m(t,`rules`);c(e,{recursive:!0});let n=m(e,`kb.md`);d.has(n)||(l||!s(n))&&(f(n,`${p}\n---\n\n${h}`,`utf-8`),d.add(n))}else if(e.ide===`Windsurf`){let e=m(t,`rules`);c(e,{recursive:!0});let n=m(e,`kb.md`);d.has(n)||(l||!s(n))&&(f(n,`${p}\n---\n\n${h}`,`utf-8`),d.add(n))}}d.size>0&&console.log(` Instruction files: ${[...d].join(`, `)}`)}async function C(e){let n=t;console.log(`Initializing user-level KB installation...
|
|
2
|
+
`);let r=g();c(r,{recursive:!0}),console.log(` Global data store: ${r}`),_({version:1,workspaces:{}}),console.log(` Created registry.json`);let i=y();if(i.length===0)console.log(`
|
|
3
|
+
No supported IDEs detected. You can manually add the MCP server config.`);else{console.log(`\n Detected ${i.length} IDE(s):`);for(let t of i)b(t,n,e.force)}let a=m(p(h(import.meta.url)),`..`,`..`,`..`,`..`,`..`);console.log(`
|
|
4
|
+
Installing scaffold files:`),S(a,i,n,e.force),console.log(`
|
|
5
|
+
User-level KB installation complete!`),console.log(`
|
|
6
|
+
Next steps:`),console.log(` 1. Open any workspace in your IDE`),console.log(` 2. The KB server will auto-start and index the workspace`),console.log(` 3. Agents, prompts, skills & instructions are available globally`),console.log(` 4. No per-workspace init needed — just open a project and start coding`)}export{y as detectInstalledIdes,C as initUser,S as installGlobalScaffold,b as writeUserLevelMcpConfig};
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import{ctx as e}from"../context.js";import{executeCliBatchOperation as t,extractStrFlag as n,parseBatchPayload as r,printCheckResult as i,readInput as a}from"../helpers.js";import{dirname as o,resolve as s}from"node:path";import{fileURLToPath as c}from"node:url";import{audit as l,batch as u,check as d,guide as f,health as p,replayClear as m,replayList as h,replayTrim as g}from"../../../tools/dist/index.js";import{fork as _}from"node:child_process";const v=o(c(import.meta.url)),y=[{name:`status`,description:`Show knowledge base index status and statistics`,run:async()=>{let{
|
|
2
|
-
`)},c;n?(console.log(`Dropping existing index for full reindex...`),c=await i.reindexAll(o,s)):c=await i.index(o,s),console.log(`Done: ${c.filesProcessed} files, ${c.chunksCreated} chunks in ${(c.durationMs/1e3).toFixed(1)}s`),console.log(`Building FTS index...`),await r.createFtsIndex(),console.log(`Re-indexing curated entries...`);let l=await a.reindexAll();console.log(`Curated: ${l.indexed} entries restored`)}},{name:`serve`,description:`Start the MCP server (stdio or HTTP)`,usage:`kb serve [--transport stdio|http] [--port N]`,run:async e=>{let t=s(v,`..`,`..`,`..`,`server`,`dist`,`index.js`),r=n(e,`--transport`,`stdio`),i=n(e,`--port`,`3210`),a=_(t,[],{stdio:r===`stdio`?[`pipe`,`pipe`,`inherit`,`ipc`]:`inherit`,env:{...process.env,KB_TRANSPORT:r,KB_PORT:i}});r===`stdio`&&a.stdin&&a.stdout&&(process.stdin.pipe(a.stdin),a.stdout.pipe(process.stdout)),a.on(`exit`,e=>process.exit(e??0)),process.on(`SIGINT`,()=>a.kill(`SIGINT`)),process.on(`SIGTERM`,()=>a.kill(`SIGTERM`)),await new Promise(()=>{})}},{name:`init`,description:`Initialize a knowledge base in the current directory`,usage:`kb init [--
|
|
1
|
+
import{ctx as e}from"../context.js";import{executeCliBatchOperation as t,extractStrFlag as n,parseBatchPayload as r,printCheckResult as i,readInput as a}from"../helpers.js";import{dirname as o,resolve as s}from"node:path";import{fileURLToPath as c}from"node:url";import{audit as l,batch as u,check as d,guide as f,health as p,replayClear as m,replayList as h,replayTrim as g}from"../../../tools/dist/index.js";import{fork as _}from"node:child_process";const v=o(c(import.meta.url)),y=[{name:`status`,description:`Show knowledge base index status and statistics`,run:async()=>{let{isUserInstalled:t,getGlobalDataDir:n,computePartitionKey:r,listWorkspaces:i}=await import(`../../../core/dist/index.js`),{existsSync:a}=await import(`node:fs`),o=process.cwd(),c=t(),l=a(s(o,`.vscode`,`mcp.json`)),u,d;if(c&&l)u=`workspace (overrides user-level for this workspace)`,d=s(o,`.kb-data`);else if(c){let e=r(o);u=a(s(o,`AGENTS.md`))?`user (workspace scaffolded)`:`user (workspace not scaffolded)`,d=s(n(),e)}else u=`workspace`,d=s(o,`.kb-data`);if(console.log(`Knowledge Base Status`),console.log(`─`.repeat(40)),console.log(` Mode: ${u}`),console.log(` Data: ${d}`),c&&!l){let e=i();console.log(` Registry: ${e.length} workspace(s) enrolled`)}try{let{store:t}=await e(),n=await t.getStats(),r=await t.listSourcePaths();console.log(` Records: ${n.totalRecords}`),console.log(` Files: ${n.totalFiles}`),console.log(` Indexed: ${n.lastIndexedAt??`Never`}`),console.log(` Backend: ${n.storeBackend}`),console.log(` Model: ${n.embeddingModel}`),console.log(``),console.log(`Content Types:`);for(let[e,t]of Object.entries(n.contentTypeBreakdown))console.log(` ${e}: ${t}`);if(r.length>0){console.log(``),console.log(`Files (${r.length} total):`);for(let e of r.slice(0,20))console.log(` ${e}`);r.length>20&&console.log(` ... and ${r.length-20} more`)}}catch{console.log(``),console.log(" Index not available — run `kb reindex` to index this workspace.")}c&&!l&&!a(s(o,`AGENTS.md`))&&(console.log(``),console.log(" Action: Run `npx @vpxa/kb init` to add AGENTS.md and copilot-instructions.md"))}},{name:`reindex`,description:`Re-index the knowledge base from configured sources`,usage:`kb reindex [--full]`,run:async t=>{let n=t.includes(`--full`),{store:r,indexer:i,curated:a,config:o}=await e();console.log(`Indexing sources...`);let s=e=>{e.phase===`chunking`&&e.currentFile&&process.stdout.write(`\r [${e.filesProcessed+1}/${e.filesTotal}] ${e.currentFile}`),e.phase===`done`&&process.stdout.write(`
|
|
2
|
+
`)},c;n?(console.log(`Dropping existing index for full reindex...`),c=await i.reindexAll(o,s)):c=await i.index(o,s),console.log(`Done: ${c.filesProcessed} files, ${c.chunksCreated} chunks in ${(c.durationMs/1e3).toFixed(1)}s`),console.log(`Building FTS index...`),await r.createFtsIndex(),console.log(`Re-indexing curated entries...`);let l=await a.reindexAll();console.log(`Curated: ${l.indexed} entries restored`)}},{name:`serve`,description:`Start the MCP server (stdio or HTTP)`,usage:`kb serve [--transport stdio|http] [--port N]`,run:async e=>{let t=s(v,`..`,`..`,`..`,`server`,`dist`,`index.js`),r=n(e,`--transport`,`stdio`),i=n(e,`--port`,`3210`),a=_(t,[],{stdio:r===`stdio`?[`pipe`,`pipe`,`inherit`,`ipc`]:`inherit`,env:{...process.env,KB_TRANSPORT:r,KB_PORT:i}});r===`stdio`&&a.stdin&&a.stdout&&(process.stdin.pipe(a.stdin),a.stdout.pipe(process.stdout)),a.on(`exit`,e=>process.exit(e??0)),process.on(`SIGINT`,()=>a.kill(`SIGINT`)),process.on(`SIGTERM`,()=>a.kill(`SIGTERM`)),await new Promise(()=>{})}},{name:`init`,description:`Initialize a knowledge base in the current directory`,usage:`kb init [--user|--workspace] [--force] [--guide]`,run:async e=>{let t=e.includes(`--user`),n=e.includes(`--workspace`),r=e.includes(`--guide`),i=e.includes(`--force`);if(t&&n&&(console.error(`Cannot use --user and --workspace together.`),process.exit(1)),r){let{guideProject:e}=await import(`./init/index.js`);await e();return}if(t){let{initUser:e}=await import(`./init/user.js`);await e({force:i})}else if(n){let{initProject:e}=await import(`./init/index.js`);await e({force:i})}else{let{initSmart:e}=await import(`./init/index.js`);await e({force:i})}}},{name:`check`,description:`Run incremental typecheck and lint`,usage:`kb check [--cwd <dir>] [--files f1,f2] [--skip-types] [--skip-lint] [--detail summary|errors|full]`,run:async e=>{let t=n(e,`--cwd`,``).trim()||void 0,r=n(e,`--files`,``),a=n(e,`--detail`,`full`)||`full`,o=r.split(`,`).map(e=>e.trim()).filter(Boolean),s=!1;e.includes(`--skip-types`)&&(e.splice(e.indexOf(`--skip-types`),1),s=!0);let c=!1;e.includes(`--skip-lint`)&&(e.splice(e.indexOf(`--skip-lint`),1),c=!0);let l=await d({cwd:t,files:o.length>0?o:void 0,skipTypes:s,skipLint:c,detail:a});i(l),l.passed||(process.exitCode=1)}},{name:`batch`,description:`Execute built-in operations from JSON input`,usage:`kb batch [--file path] [--concurrency N]`,run:async i=>{let o=n(i,`--file`,``).trim()||void 0,s=(()=>{let e=i.indexOf(`--concurrency`);if(e===-1||e+1>=i.length)return 0;let t=Number.parseInt(i.splice(e,2)[1],10);return Number.isNaN(t)?0:t})(),c=await a(o);c.trim()||(console.error(`Usage: kb batch [--file path] [--concurrency N]`),process.exit(1));let l=r(c),d=s>0?s:l.concurrency,f=l.operations.some(e=>e.type!==`check`)?await e():null,p=await u(l.operations,async e=>t(e,f),{concurrency:d});console.log(JSON.stringify(p,null,2)),p.some(e=>e.status===`error`)&&(process.exitCode=1)}},{name:`health`,description:`Run project health checks on the current directory`,usage:`kb health [path]`,run:async e=>{let t=p(e.shift());console.log(`Project Health: ${t.path}`),console.log(`─`.repeat(50));for(let e of t.checks){let t=e.status===`pass`?`+`:e.status===`warn`?`~`:`X`;console.log(` [${t}] ${e.name}: ${e.message}`)}console.log(`─`.repeat(50)),console.log(`Score: ${t.score}% — ${t.summary}`)}},{name:`audit`,description:`Run a unified project audit (structure, deps, patterns, health, dead symbols, check)`,usage:`kb audit [path] [--checks structure,dependencies,patterns,health,dead_symbols,check,entry_points] [--detail summary|full]`,run:async t=>{let{store:r,embedder:i}=await e(),a=n(t,`--detail`,`summary`)||`summary`,o=n(t,`--checks`,``),s=o?o.split(`,`).map(e=>e.trim()):void 0,c=await l(r,i,{path:t.shift()||`.`,checks:s,detail:a});if(c.ok){if(console.log(c.summary),c.next&&c.next.length>0){console.log(`
|
|
3
3
|
Suggested next steps:`);for(let e of c.next)console.log(` → ${e.tool}: ${e.reason}`)}}else console.error(c.error?.message??`Audit failed`),process.exitCode=1}},{name:`guide`,description:`Tool discovery — recommend KB tools for a given goal`,usage:`kb guide <goal> [--max N]`,run:async e=>{let t=e.indexOf(`--max`),n=5;t!==-1&&t+1<e.length&&(n=Number.parseInt(e.splice(t,2)[1],10)||5);let r=e.join(` `).trim();r||(console.error(`Usage: kb guide <goal> [--max N]`),console.error(`Example: kb guide "audit this project"`),process.exit(1));let i=f(r,n);console.log(`Workflow: ${i.workflow}`),console.log(` ${i.description}\n`),console.log(`Recommended tools:`);for(let e of i.tools){let t=e.suggestedArgs?` ${JSON.stringify(e.suggestedArgs)}`:``;console.log(` ${e.order}. ${e.tool} — ${e.reason}${t}`)}i.alternativeWorkflows.length>0&&console.log(`\nAlternatives: ${i.alternativeWorkflows.join(`, `)}`)}},{name:`replay`,description:`Show recent tool invocation audit trail`,usage:`kb replay [--last N] [--tool <name>] [--source mcp|cli]`,run:async e=>{let t=h({last:Number.parseInt(e[e.indexOf(`--last`)+1],10)||20,tool:e.includes(`--tool`)?e[e.indexOf(`--tool`)+1]:void 0,source:e.includes(`--source`)?e[e.indexOf(`--source`)+1]:void 0});if(t.length===0){console.log(`No replay entries. Activity is logged when tools are invoked.`);return}console.log(`Replay Log (${t.length} entries)\n`);for(let e of t){let t=e.ts.split(`T`)[1]?.split(`.`)[0]??e.ts,n=e.status===`ok`?`✓`:`✗`;console.log(`${t} ${n} ${e.tool} (${e.durationMs}ms) [${e.source}]`),console.log(` in: ${e.input}`),console.log(` out: ${e.output}`)}g()}},{name:`replay-clear`,description:`Clear the replay audit trail`,run:async()=>{m(),console.log(`Replay log cleared.`)}},{name:`tui`,description:`Launch interactive terminal dashboard (human monitoring)`,run:async()=>{try{let{launch:t}=await import(`../../../tui/dist/index.js`),{store:n,embedder:r,config:i}=await e();t({store:n,embedder:r,config:i})}catch(e){throw e.code===`ERR_MODULE_NOT_FOUND`&&(console.error(`TUI requires ink and react. Install them with:
|
|
4
4
|
pnpm add -D ink react @types/react`),process.exit(1)),e}}}];export{y as systemCommands};
|
|
@@ -34,7 +34,8 @@ declare function computePartitionKey(cwd: string): string;
|
|
|
34
34
|
*/
|
|
35
35
|
declare function loadRegistry(): GlobalRegistry;
|
|
36
36
|
/**
|
|
37
|
-
* Save the global registry atomically
|
|
37
|
+
* Save the global registry atomically with file locking.
|
|
38
|
+
* Uses O_CREAT|O_EXCL lock file + write-to-tmp + rename pattern.
|
|
38
39
|
*/
|
|
39
40
|
declare function saveRegistry(registry: GlobalRegistry): void;
|
|
40
41
|
/**
|
|
@@ -55,8 +56,8 @@ declare function listWorkspaces(): RegistryEntry[];
|
|
|
55
56
|
*/
|
|
56
57
|
declare function getPartitionDir(partition: string): string;
|
|
57
58
|
/**
|
|
58
|
-
* Check whether
|
|
59
|
+
* Check whether user-level mode is installed (registry.json exists in ~/.kb-data/).
|
|
59
60
|
*/
|
|
60
|
-
declare function
|
|
61
|
+
declare function isUserInstalled(): boolean;
|
|
61
62
|
//#endregion
|
|
62
|
-
export { GlobalRegistry, RegistryEntry, computePartitionKey, getGlobalDataDir, getPartitionDir,
|
|
63
|
+
export { GlobalRegistry, RegistryEntry, computePartitionKey, getGlobalDataDir, getPartitionDir, isUserInstalled, listWorkspaces, loadRegistry, lookupWorkspace, registerWorkspace, saveRegistry };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{KB_GLOBAL_PATHS as e}from"./constants.js";import{basename as t,resolve as n}from"node:path";import{createHash as r}from"node:crypto";import{
|
|
1
|
+
import{KB_GLOBAL_PATHS as e}from"./constants.js";import{basename as t,resolve as n}from"node:path";import{createHash as r}from"node:crypto";import{closeSync as i,constants as a,existsSync as o,mkdirSync as s,openSync as c,readFileSync as l,renameSync as u,statSync as d,unlinkSync as f,writeFileSync as p}from"node:fs";import{homedir as m}from"node:os";function h(){return process.env.KB_GLOBAL_DATA_DIR??n(m(),e.root)}function g(e){let i=n(e);return`${t(i).toLowerCase().replace(/[^a-z0-9-]/g,`-`)||`workspace`}-${r(`sha256`).update(i).digest(`hex`).slice(0,8)}`}function _(){let t=n(h(),e.registry);if(!o(t))return{version:1,workspaces:{}};let r=l(t,`utf-8`);return JSON.parse(r)}function v(e,t=5e3){let n=`${e}.lock`,r=Date.now()+t,o=10;for(;Date.now()<r;)try{let e=c(n,a.O_CREAT|a.O_EXCL|a.O_WRONLY);return p(e,`${process.pid}\n`),i(e),n}catch(e){if(e.code!==`EEXIST`)throw e;try{let{mtimeMs:e}=d(n);if(Date.now()-e>3e4){f(n);continue}}catch{}let t=new SharedArrayBuffer(4);Atomics.wait(new Int32Array(t),0,0,o),o=Math.min(o*2,200)}throw Error(`Failed to acquire registry lock after ${t}ms`)}function y(e){try{f(e)}catch{}}function b(t){let r=h();s(r,{recursive:!0});let i=n(r,e.registry),a=v(i);try{let e=`${i}.tmp`;p(e,JSON.stringify(t,null,2),`utf-8`),u(e,i)}finally{y(a)}}function x(e){let t=_(),r=g(e),i=new Date().toISOString();return t.workspaces[r]?t.workspaces[r].lastAccessedAt=i:t.workspaces[r]={partition:r,workspacePath:n(e),registeredAt:i,lastAccessedAt:i},s(w(r),{recursive:!0}),b(t),t.workspaces[r]}function S(e){let t=_(),n=g(e);return t.workspaces[n]}function C(){let e=_();return Object.values(e.workspaces)}function w(e){return n(h(),e)}function T(){return o(n(h(),e.registry))}export{g as computePartitionKey,h as getGlobalDataDir,w as getPartitionDir,T as isUserInstalled,C as listWorkspaces,_ as loadRegistry,S as lookupWorkspace,x as registerWorkspace,b as saveRegistry};
|
|
@@ -2,6 +2,6 @@ import { CATEGORY_PATTERN, CHUNK_SIZES, DEFAULT_CATEGORIES, EMBEDDING_DEFAULTS,
|
|
|
2
2
|
import { CONTENT_TYPES, ChunkMetadata, ContentType, IndexStats, KBConfig, KNOWLEDGE_ORIGINS, KnowledgeOrigin, KnowledgeRecord, RawChunk, SOURCE_TYPES, SearchResult, SourceType } from "./types.js";
|
|
3
3
|
import { contentTypeToSourceType, detectContentType, sourceTypeContentTypes } from "./content-detector.js";
|
|
4
4
|
import { ConfigError, EmbeddingError, IndexError, KBError, StoreError } from "./errors.js";
|
|
5
|
-
import { GlobalRegistry, RegistryEntry, computePartitionKey, getGlobalDataDir, getPartitionDir,
|
|
5
|
+
import { GlobalRegistry, RegistryEntry, computePartitionKey, getGlobalDataDir, getPartitionDir, isUserInstalled, listWorkspaces, loadRegistry, lookupWorkspace, registerWorkspace, saveRegistry } from "./global-registry.js";
|
|
6
6
|
import { LogLevel, createLogger, getLogLevel, resetLogDir, serializeError, setFileSinkEnabled, setLogLevel } from "./logger.js";
|
|
7
|
-
export { CATEGORY_PATTERN, CHUNK_SIZES, CONTENT_TYPES, ChunkMetadata, ConfigError, ContentType, DEFAULT_CATEGORIES, EMBEDDING_DEFAULTS, EmbeddingError, FILE_LIMITS, GlobalRegistry, IndexError, IndexStats, KBConfig, KBError, KB_GLOBAL_PATHS, KB_PATHS, KNOWLEDGE_ORIGINS, KnowledgeOrigin, KnowledgeRecord, LogLevel, RawChunk, RegistryEntry, SEARCH_DEFAULTS, SOURCE_TYPES, STORE_DEFAULTS, SearchResult, SourceType, StoreError, computePartitionKey, contentTypeToSourceType, createLogger, detectContentType, getGlobalDataDir, getLogLevel, getPartitionDir,
|
|
7
|
+
export { CATEGORY_PATTERN, CHUNK_SIZES, CONTENT_TYPES, ChunkMetadata, ConfigError, ContentType, DEFAULT_CATEGORIES, EMBEDDING_DEFAULTS, EmbeddingError, FILE_LIMITS, GlobalRegistry, IndexError, IndexStats, KBConfig, KBError, KB_GLOBAL_PATHS, KB_PATHS, KNOWLEDGE_ORIGINS, KnowledgeOrigin, KnowledgeRecord, LogLevel, RawChunk, RegistryEntry, SEARCH_DEFAULTS, SOURCE_TYPES, STORE_DEFAULTS, SearchResult, SourceType, StoreError, computePartitionKey, contentTypeToSourceType, createLogger, detectContentType, getGlobalDataDir, getLogLevel, getPartitionDir, isUserInstalled, listWorkspaces, loadRegistry, lookupWorkspace, registerWorkspace, resetLogDir, saveRegistry, serializeError, setFileSinkEnabled, setLogLevel, sourceTypeContentTypes };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{CATEGORY_PATTERN as e,CHUNK_SIZES as t,DEFAULT_CATEGORIES as n,EMBEDDING_DEFAULTS as r,FILE_LIMITS as i,KB_GLOBAL_PATHS as a,KB_PATHS as o,SEARCH_DEFAULTS as s,STORE_DEFAULTS as c}from"./constants.js";import{contentTypeToSourceType as l,detectContentType as u,sourceTypeContentTypes as d}from"./content-detector.js";import{ConfigError as f,EmbeddingError as p,IndexError as m,KBError as h,StoreError as g}from"./errors.js";import{computePartitionKey as _,getGlobalDataDir as v,getPartitionDir as y,
|
|
1
|
+
import{CATEGORY_PATTERN as e,CHUNK_SIZES as t,DEFAULT_CATEGORIES as n,EMBEDDING_DEFAULTS as r,FILE_LIMITS as i,KB_GLOBAL_PATHS as a,KB_PATHS as o,SEARCH_DEFAULTS as s,STORE_DEFAULTS as c}from"./constants.js";import{contentTypeToSourceType as l,detectContentType as u,sourceTypeContentTypes as d}from"./content-detector.js";import{ConfigError as f,EmbeddingError as p,IndexError as m,KBError as h,StoreError as g}from"./errors.js";import{computePartitionKey as _,getGlobalDataDir as v,getPartitionDir as y,isUserInstalled as b,listWorkspaces as x,loadRegistry as S,lookupWorkspace as C,registerWorkspace as w,saveRegistry as T}from"./global-registry.js";import{createLogger as E,getLogLevel as D,resetLogDir as O,serializeError as k,setFileSinkEnabled as A,setLogLevel as j}from"./logger.js";import{CONTENT_TYPES as M,KNOWLEDGE_ORIGINS as N,SOURCE_TYPES as P}from"./types.js";export{e as CATEGORY_PATTERN,t as CHUNK_SIZES,M as CONTENT_TYPES,f as ConfigError,n as DEFAULT_CATEGORIES,r as EMBEDDING_DEFAULTS,p as EmbeddingError,i as FILE_LIMITS,m as IndexError,h as KBError,a as KB_GLOBAL_PATHS,o as KB_PATHS,N as KNOWLEDGE_ORIGINS,s as SEARCH_DEFAULTS,P as SOURCE_TYPES,c as STORE_DEFAULTS,g as StoreError,_ as computePartitionKey,l as contentTypeToSourceType,E as createLogger,u as detectContentType,v as getGlobalDataDir,D as getLogLevel,y as getPartitionDir,b as isUserInstalled,x as listWorkspaces,S as loadRegistry,C as lookupWorkspace,w as registerWorkspace,O as resetLogDir,T as saveRegistry,k as serializeError,A as setFileSinkEnabled,j as setLogLevel,d as sourceTypeContentTypes};
|
|
@@ -12,7 +12,7 @@ interface IEmbedder {
|
|
|
12
12
|
*/
|
|
13
13
|
embedQuery(query: string): Promise<Float32Array>;
|
|
14
14
|
/** Generate embeddings for multiple text strings (batched, for documents/passages) */
|
|
15
|
-
embedBatch(texts: string[]): Promise<Float32Array[]>;
|
|
15
|
+
embedBatch(texts: string[], batchSize?: number): Promise<Float32Array[]>;
|
|
16
16
|
/** The dimensionality of the embedding vectors */
|
|
17
17
|
readonly dimensions: number;
|
|
18
18
|
/** The model identifier */
|
|
@@ -16,7 +16,7 @@ declare class OnnxEmbedder implements IEmbedder {
|
|
|
16
16
|
shutdown(): Promise<void>;
|
|
17
17
|
embed(text: string): Promise<Float32Array>;
|
|
18
18
|
embedQuery(query: string): Promise<Float32Array>;
|
|
19
|
-
embedBatch(texts: string[]): Promise<Float32Array[]>;
|
|
19
|
+
embedBatch(texts: string[], batchSize?: number): Promise<Float32Array[]>;
|
|
20
20
|
}
|
|
21
21
|
//#endregion
|
|
22
22
|
export { OnnxEmbedder };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{homedir as e}from"node:os";import{join as t}from"node:path";import{env as n,pipeline as r}from"@huggingface/transformers";import{EMBEDDING_DEFAULTS as i}from"../../core/dist/index.js";n.cacheDir=t(e(),`.cache`,`huggingface`,`transformers-js`);var a=class{pipe=null;dimensions;modelId;queryPrefix;constructor(e){this.modelId=e?.model??i.model,this.dimensions=e?.dimensions??i.dimensions,this.queryPrefix=e?.queryPrefix??this.detectQueryPrefix(this.modelId)}detectQueryPrefix(e){let t=e.toLowerCase();return t.includes(`bge`)||t.includes(`mxbai-embed`)?`Represent this sentence for searching relevant passages: `:t.includes(`/e5-`)||t.includes(`multilingual-e5`)?`query: `:``}async initialize(){if(!this.pipe)try{this.pipe=await r(`feature-extraction`,this.modelId,{dtype:`fp32`})}catch(e){throw Error(`Failed to initialize embedding model "${this.modelId}": ${e.message}`)}}async shutdown(){this.pipe=null}async embed(e){this.pipe||await this.initialize();let t=await this.pipe?.(e,{pooling:`mean`,normalize:!0});if(!t)throw Error(`Embedding pipeline returned no output`);return new Float32Array(t.data)}async embedQuery(e){return this.embed(this.queryPrefix+e)}async embedBatch(e){if(e.length===0)return[];this.pipe||await this.initialize();let
|
|
1
|
+
import{homedir as e}from"node:os";import{join as t}from"node:path";import{env as n,pipeline as r}from"@huggingface/transformers";import{EMBEDDING_DEFAULTS as i}from"../../core/dist/index.js";n.cacheDir=t(e(),`.cache`,`huggingface`,`transformers-js`);var a=class{pipe=null;dimensions;modelId;queryPrefix;constructor(e){this.modelId=e?.model??i.model,this.dimensions=e?.dimensions??i.dimensions,this.queryPrefix=e?.queryPrefix??this.detectQueryPrefix(this.modelId)}detectQueryPrefix(e){let t=e.toLowerCase();return t.includes(`bge`)||t.includes(`mxbai-embed`)?`Represent this sentence for searching relevant passages: `:t.includes(`/e5-`)||t.includes(`multilingual-e5`)?`query: `:``}async initialize(){if(!this.pipe)try{this.pipe=await r(`feature-extraction`,this.modelId,{dtype:`fp32`})}catch(e){throw Error(`Failed to initialize embedding model "${this.modelId}": ${e.message}`)}}async shutdown(){this.pipe=null}async embed(e){this.pipe||await this.initialize();let t=await this.pipe?.(e,{pooling:`mean`,normalize:!0});if(!t)throw Error(`Embedding pipeline returned no output`);return new Float32Array(t.data)}async embedQuery(e){return this.embed(this.queryPrefix+e)}async embedBatch(e,t=64){if(e.length===0)return[];this.pipe||await this.initialize();let n=[];for(let r=0;r<e.length;r+=t){let i=e.slice(r,r+t),a=await this.pipe?.(i,{pooling:`mean`,normalize:!0});if(!a)throw Error(`Embedding pipeline returned no output`);if(i.length===1)n.push(new Float32Array(a.data));else for(let e=0;e<i.length;e++){let t=e*this.dimensions,r=a.data.slice(t,t+this.dimensions);n.push(new Float32Array(r))}}return n}};export{a as OnnxEmbedder};
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
//#region packages/indexer/src/hash-cache.d.ts
|
|
2
|
+
/**
|
|
3
|
+
* Persistent file hash cache.
|
|
4
|
+
* Stores path->hash mappings in a JSON file to avoid LanceDB round-trips
|
|
5
|
+
* when checking which files have changed during incremental indexing.
|
|
6
|
+
*/
|
|
7
|
+
declare class FileHashCache {
|
|
8
|
+
private cache;
|
|
9
|
+
private readonly filePath;
|
|
10
|
+
private dirty;
|
|
11
|
+
constructor(storeDir: string);
|
|
12
|
+
/** Load cache from disk. Non-fatal if missing or corrupt. */
|
|
13
|
+
load(): void;
|
|
14
|
+
get(path: string): string | undefined;
|
|
15
|
+
set(path: string, hash: string): void;
|
|
16
|
+
delete(path: string): void;
|
|
17
|
+
/** Persist cache to disk if changed. */
|
|
18
|
+
flush(): void;
|
|
19
|
+
/** Clear all entries and delete file. */
|
|
20
|
+
clear(): void;
|
|
21
|
+
get size(): number;
|
|
22
|
+
}
|
|
23
|
+
//#endregion
|
|
24
|
+
export { FileHashCache };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{resolve as e}from"node:path";import{createLogger as t}from"../../core/dist/index.js";import{existsSync as n,readFileSync as r,writeFileSync as i}from"node:fs";const a=t(`hash-cache`);var o=class{cache;filePath;dirty=!1;constructor(t){this.filePath=e(t,`file-hashes.json`),this.cache=new Map}load(){if(n(this.filePath))try{let e=r(this.filePath,`utf-8`),t=JSON.parse(e);this.cache=new Map(Object.entries(t)),a.info(`Hash cache loaded`,{entries:this.cache.size})}catch(e){a.warn(`Hash cache load failed, starting fresh`,{err:e}),this.cache=new Map}}get(e){return this.cache.get(e)}set(e,t){this.cache.set(e,t),this.dirty=!0}delete(e){this.cache.delete(e)&&(this.dirty=!0)}flush(){if(this.dirty)try{let e={};for(let[t,n]of this.cache)e[t]=n;i(this.filePath,JSON.stringify(e),`utf-8`),this.dirty=!1}catch(e){a.warn(`Hash cache flush failed`,{err:e})}}clear(){this.cache.clear(),this.dirty=!0,this.flush()}get size(){return this.cache.size}};export{o as FileHashCache};
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { FileHashCache } from "./hash-cache.js";
|
|
1
2
|
import { IGraphStore, IKnowledgeStore } from "@kb/store";
|
|
2
3
|
import { IndexStats, KBConfig } from "@kb/core";
|
|
3
4
|
import { IEmbedder } from "@kb/embeddings";
|
|
@@ -26,15 +27,18 @@ declare class IncrementalIndexer {
|
|
|
26
27
|
private readonly crawler;
|
|
27
28
|
private indexing;
|
|
28
29
|
private graphStore?;
|
|
30
|
+
private hashCache?;
|
|
29
31
|
/** Whether an index operation is currently in progress. */
|
|
30
32
|
get isIndexing(): boolean;
|
|
31
33
|
constructor(embedder: IEmbedder, store: IKnowledgeStore);
|
|
32
34
|
/** Set the graph store for auto-population during indexing and cleanup on re-index. */
|
|
33
35
|
setGraphStore(graphStore: IGraphStore): void;
|
|
36
|
+
/** Set the hash cache for faster incremental checks. */
|
|
37
|
+
setHashCache(cache: FileHashCache): void;
|
|
34
38
|
/**
|
|
35
39
|
* Index all configured sources. Only re-indexes files that have changed.
|
|
36
40
|
* Sources are crawled in parallel, and file processing runs concurrently
|
|
37
|
-
* up to `config.indexing.concurrency` (default:
|
|
41
|
+
* up to `config.indexing.concurrency` (default: 75% of CPU cores, minimum 2).
|
|
38
42
|
*/
|
|
39
43
|
index(config: KBConfig, onProgress?: ProgressCallback): Promise<IndexResult>;
|
|
40
44
|
private doIndex;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{generateRecordId as e,hashContent as t}from"./file-hasher.js";import{FilesystemCrawler as n}from"./filesystem-crawler.js";import{extractGraph as r}from"./graph-extractor.js";import{KB_PATHS as i,createLogger as a,detectContentType as o,serializeError as s}from"../../core/dist/index.js";import{availableParallelism as c}from"node:os";import{createChunkerSync as l}from"../../chunker/dist/index.js";const u=a(`indexer`);async function d(e,t,n,r){let i=0;async function a(){for(;i<e.length;){let n=i++;try{await t(e[n])}catch(t){r?.(e[n],t)}}}await Promise.all(Array.from({length:Math.min(n,e.length)},()=>a()))}const f=Math.max(
|
|
1
|
+
import{generateRecordId as e,hashContent as t}from"./file-hasher.js";import{FilesystemCrawler as n}from"./filesystem-crawler.js";import{extractGraph as r}from"./graph-extractor.js";import{KB_PATHS as i,createLogger as a,detectContentType as o,serializeError as s}from"../../core/dist/index.js";import{availableParallelism as c}from"node:os";import{createChunkerSync as l}from"../../chunker/dist/index.js";const u=a(`indexer`);async function d(e,t,n,r){let i=0;async function a(){for(;i<e.length;){let n=i++;try{await t(e[n])}catch(t){r?.(e[n],t)}}}await Promise.all(Array.from({length:Math.min(n,e.length)},()=>a()))}const f=Math.max(2,Math.floor(c()*.75));var p=class{crawler;indexing=!1;graphStore;hashCache;get isIndexing(){return this.indexing}constructor(e,t){this.embedder=e,this.store=t,this.crawler=new n}setGraphStore(e){this.graphStore=e}setHashCache(e){this.hashCache=e}async index(e,t){if(this.indexing)throw Error(`Indexing is already in progress`);this.indexing=!0;try{return await this.doIndex(e,t,{})}finally{this.indexing=!1}}async doIndex(n,a,c={}){let p=Date.now(),m=0,h=0,g=0,_=0,v=n.indexing.concurrency??f;a?.({phase:`crawling`,filesTotal:0,filesProcessed:0,chunksTotal:0,chunksProcessed:0});let y=(await Promise.all(n.sources.map(e=>this.crawler.crawl({rootDir:e.path,excludePatterns:e.excludePatterns})))).flat(),b,x;if(c.skipHashCheck)b=y,x=[];else{let e=await this.store.listSourcePaths(),n=new Set(y.map(e=>e.relativePath));x=e.filter(e=>!n.has(e)&&!e.startsWith(`${i.aiCurated}/`)),b=[],await d(y,async e=>{let n=t(e.content);if(this.hashCache){if(this.hashCache.get(e.relativePath)===n){h++;return}}else{let t=await this.store.getBySourcePath(e.relativePath);if(t.length>0&&t[0].fileHash===n){h++;return}}b.push(e)},v,(e,t)=>u.error(`Hash check failed`,{sourcePath:e.relativePath,...s(t)}))}let S=b.length,C=[],w=[],T=0,E=[],D=[],O=new Map,k=0,A=async()=>{if(E.length===0)return;let e=E,t=D,n=O;E=[],D=[],O=new Map,k=0,await this.store.upsert(e,t);for(let[e,t]of n)this.hashCache?.set(e,t)},j=async()=>{if(this.graphStore){try{C.length>0&&await this.graphStore.upsertNodes(C),w.length>0&&await this.graphStore.upsertEdges(w)}catch(e){u.warn(`Graph batch flush failed`,s(e))}C=[],w=[],T=0}};return await d(b,async n=>{a?.({phase:`chunking`,filesTotal:S,filesProcessed:m,chunksTotal:g,chunksProcessed:g,currentFile:n.relativePath});let i=o(n.relativePath),d=l(n.extension).chunk(n.content,{sourcePath:n.relativePath,contentType:i});if(d.length===0)return;a?.({phase:`embedding`,filesTotal:S,filesProcessed:m,chunksTotal:g+d.length,chunksProcessed:g,currentFile:n.relativePath});let f=await this.embedder.embedBatch(d.map(e=>e.text)),p=t(n.content),h=d.map((t,r)=>({id:e(n.relativePath,r),content:t.text,sourcePath:t.sourcePath,contentType:t.contentType,headingPath:t.headingPath,chunkIndex:t.chunkIndex,totalChunks:t.totalChunks,startLine:t.startLine,endLine:t.endLine,fileHash:p,indexedAt:new Date().toISOString(),origin:`indexed`,tags:[],version:1}));if(a?.({phase:`storing`,filesTotal:S,filesProcessed:m,chunksTotal:g+d.length,chunksProcessed:g,currentFile:n.relativePath}),E.push(...h),D.push(...f),O.set(n.relativePath,p),k++,k>=20&&await A(),this.graphStore)try{c.graphCleared||await this.graphStore.deleteBySourcePath(n.relativePath);let e=r(n.content,n.relativePath);e.nodes.length>0&&C.push(...e.nodes),e.edges.length>0&&w.push(...e.edges),T++,T>=50&&await j()}catch(e){u.warn(`Graph extraction failed`,{sourcePath:n.relativePath,...s(e)})}m++,g+=d.length},v,(e,t)=>u.error(`Processing failed`,{sourcePath:e.relativePath,...s(t)})),await A(),await j(),x.length>0&&(a?.({phase:`cleanup`,filesTotal:S,filesProcessed:m,chunksTotal:g,chunksProcessed:g}),await d(x,async e=>{await this.store.deleteBySourcePath(e),this.hashCache?.delete(e),this.graphStore&&await this.graphStore.deleteBySourcePath(e).catch(t=>u.warn(`Graph cleanup failed`,{sourcePath:e,...s(t)})),_++},v,(e,t)=>u.error(`Cleanup failed`,{sourcePath:e,...s(t)}))),this.hashCache?.flush(),a?.({phase:`done`,filesTotal:S,filesProcessed:m,chunksTotal:g,chunksProcessed:g}),{filesProcessed:m,filesSkipped:h,chunksCreated:g,filesRemoved:_,durationMs:Date.now()-p}}async reindexAll(e,t){if(this.indexing)throw Error(`Indexing is already in progress`);this.indexing=!0;try{if(await this.store.dropTable(),this.graphStore)try{let e=await this.graphStore.getStats();e.nodeCount>0&&(await this.graphStore.clear(),u.info(`Graph store cleared`,{nodeCount:e.nodeCount,edgeCount:e.edgeCount}))}catch(e){u.warn(`Graph store clear failed`,s(e))}return await this.doReindex(e,t)}catch(e){throw this.indexing=!1,e}}async doReindex(e,t){try{return await this.doIndex(e,t,{skipHashCheck:!0,graphCleared:!0})}finally{this.indexing=!1}}async getStats(){return this.store.getStats()}};export{p as IncrementalIndexer};
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { generateRecordId, hashContent } from "./file-hasher.js";
|
|
2
2
|
import { CrawlOptions, CrawlResult, FilesystemCrawler } from "./filesystem-crawler.js";
|
|
3
3
|
import { ExtractedGraph, extractGraph } from "./graph-extractor.js";
|
|
4
|
+
import { FileHashCache } from "./hash-cache.js";
|
|
4
5
|
import { IncrementalIndexer, IndexProgress, IndexResult, ProgressCallback } from "./incremental-indexer.js";
|
|
5
|
-
export { type CrawlOptions, type CrawlResult, type ExtractedGraph, FilesystemCrawler, IncrementalIndexer, type IndexProgress, type IndexResult, type ProgressCallback, extractGraph, generateRecordId, hashContent };
|
|
6
|
+
export { type CrawlOptions, type CrawlResult, type ExtractedGraph, FileHashCache, FilesystemCrawler, IncrementalIndexer, type IndexProgress, type IndexResult, type ProgressCallback, extractGraph, generateRecordId, hashContent };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{generateRecordId as e,hashContent as t}from"./file-hasher.js";import{FilesystemCrawler as n}from"./filesystem-crawler.js";import{extractGraph as r}from"./graph-extractor.js";import{
|
|
1
|
+
import{generateRecordId as e,hashContent as t}from"./file-hasher.js";import{FilesystemCrawler as n}from"./filesystem-crawler.js";import{extractGraph as r}from"./graph-extractor.js";import{FileHashCache as i}from"./hash-cache.js";import{IncrementalIndexer as a}from"./incremental-indexer.js";export{i as FileHashCache,n as FilesystemCrawler,a as IncrementalIndexer,r as extractGraph,e as generateRecordId,t as hashContent};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{existsSync as e,readFileSync as t}from"node:fs";import{dirname as n,resolve as r}from"node:path";import{fileURLToPath as i}from"node:url";import{KB_PATHS as a,createLogger as o,getPartitionDir as s,
|
|
1
|
+
import{existsSync as e,readFileSync as t}from"node:fs";import{dirname as n,resolve as r}from"node:path";import{fileURLToPath as i}from"node:url";import{KB_PATHS as a,createLogger as o,getPartitionDir as s,isUserInstalled as c,registerWorkspace as l,serializeError as u}from"../../core/dist/index.js";const d=n(i(import.meta.url)),f=o(`server`);function p(e,t,n){let i=r(e),a=r(t);if(!i.startsWith(a))throw Error(`Config ${n} path escapes workspace root: ${e} is not under ${t}`);return i}function m(){let i=process.env.KB_CONFIG_PATH??(e(r(process.cwd(),`kb.config.json`))?r(process.cwd(),`kb.config.json`):r(d,`..`,`..`,`..`,`kb.config.json`));try{let e=t(i,`utf-8`),o=JSON.parse(e);if(!o.sources||!Array.isArray(o.sources)||o.sources.length===0)throw Error(`Config must have at least one source`);if(!o.store?.path)throw Error(`Config must specify store.path`);let s=n(i);return o.sources=o.sources.map(e=>({...e,path:p(r(s,e.path),s,`source`)})),o.store.path=p(r(s,o.store.path),s,`store`),o.curated=o.curated??{path:a.aiCurated},o.curated.path=p(r(s,o.curated.path),s,`curated`),g(o,s),o}catch(e){return f.error(`Failed to load config`,{configPath:i,...u(e)}),f.warn(`Falling back to default configuration`,{configPath:i}),h()}}function h(){let e=process.env.KB_WORKSPACE_ROOT??process.cwd(),t={sources:[{path:e,excludePatterns:[`node_modules/**`,`dist/**`,`.git/**`,`coverage/**`,`*.lock`,`pnpm-lock.yaml`]}],serverName:`knowledge-base`,indexing:{chunkSize:1500,chunkOverlap:200,minChunkSize:100},embedding:{model:`mixedbread-ai/mxbai-embed-large-v1`,dimensions:1024},store:{backend:`lancedb`,path:r(e,a.data)},curated:{path:r(e,a.aiCurated)}};return g(t,e),t}function g(e,t){if(!c())return;let n=t,i=l(n);e.store.path=r(s(i.partition)),e.curated||={path:r(n,a.aiCurated)}}export{m as loadConfig};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{createLogger as e,getPartitionDir as t,
|
|
1
|
+
import{createLogger as e,getPartitionDir as t,isUserInstalled as n,listWorkspaces as r}from"../../core/dist/index.js";import{createStore as i}from"../../store/dist/index.js";const a=e(`cross-workspace`);function o(e,t){if(!n())return[];let i=r();if(i.length===0)return[];if(e.includes(`*`))return t?i.filter(e=>e.partition!==t):i;let a=[];for(let n of e){let e=i.find(e=>e.partition===n);if(e){e.partition!==t&&a.push(e);continue}let r=i.filter(e=>e.partition!==t&&e.partition.replace(/-[a-f0-9]{8}$/,``)===n.toLowerCase());a.push(...r)}let o=new Set;return a.filter(e=>o.has(e.partition)?!1:(o.add(e.partition),!0))}async function s(e){let n=new Map;for(let r of e)try{let e=await i({backend:`lancedb`,path:t(r.partition)});await e.initialize(),n.set(r.partition,e)}catch(e){a.warn(`Failed to open workspace store`,{partition:r.partition,err:e})}return{stores:n,closeAll:async()=>{for(let[,e]of n)try{await e.close()}catch{}}}}async function c(e,t,n){let r=[...e.entries()].map(async([e,r])=>{try{return(await r.search(t,n)).map(t=>({...t,workspace:e}))}catch(t){return a.warn(`Cross-workspace search failed for partition`,{partition:e,err:t}),[]}});return(await Promise.all(r)).flat().sort((e,t)=>t.score-e.score).slice(0,n.limit)}async function l(e,t,n){let r=[...e.entries()].map(async([e,r])=>{try{return(await r.ftsSearch(t,n)).map(t=>({...t,workspace:e}))}catch(t){return a.warn(`Cross-workspace FTS search failed for partition`,{partition:e,err:t}),[]}});return(await Promise.all(r)).flat().sort((e,t)=>t.score-e.score).slice(0,n.limit)}export{l as fanOutFtsSearch,c as fanOutSearch,s as openWorkspaceStores,o as resolveWorkspaces};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{CuratedKnowledgeManager as e}from"./curated-manager.js";import{installReplayInterceptor as t}from"./replay-interceptor.js";import{registerResources as n}from"./resources/resources.js";import{registerAnalyzeDependenciesTool as r,registerAnalyzeDiagramTool as i,registerAnalyzeEntryPointsTool as a,registerAnalyzePatternsTool as o,registerAnalyzeStructureTool as s,registerAnalyzeSymbolsTool as c,registerBlastRadiusTool as l}from"./tools/analyze.tools.js";import{registerAuditTool as u}from"./tools/audit.tool.js";import{initBridgeComponents as d,registerErPullTool as f,registerErPushTool as p,registerErSyncStatusTool as m}from"./tools/bridge.tools.js";import{registerErEvolveReviewTool as ee}from"./tools/evolution.tools.js";import{registerDigestTool as te,registerEvidenceMapTool as ne,registerForgeClassifyTool as re,registerForgeGroundTool as ie,registerStratumCardTool as ae}from"./tools/forge.tools.js";import{registerForgetTool as oe}from"./tools/forget.tool.js";import{registerGraphTool as se}from"./tools/graph.tool.js";import{registerListTool as ce}from"./tools/list.tool.js";import{registerLookupTool as le}from"./tools/lookup.tool.js";import{registerOnboardTool as ue}from"./tools/onboard.tool.js";import{registerErUpdatePolicyTool as h}from"./tools/policy.tools.js";import{registerProduceKnowledgeTool as g}from"./tools/produce.tool.js";import{registerReadTool as _}from"./tools/read.tool.js";import{registerReindexTool as v}from"./tools/reindex.tool.js";import{registerRememberTool as y}from"./tools/remember.tool.js";import{registerReplayTool as b}from"./tools/replay.tool.js";import{registerSearchTool as x}from"./tools/search.tool.js";import{registerStatusTool as S}from"./tools/status.tool.js";import{registerBatchTool as C,registerCheckTool as w,registerCheckpointTool as T,registerCodemodTool as E,registerCompactTool as D,registerDataTransformTool as O,registerDeadSymbolsTool as k,registerDelegateTool as A,registerDiffParseTool as j,registerEvalTool as M,registerFileSummaryTool as N,registerFindTool as P,registerGitContextTool as F,registerGuideTool as I,registerHealthTool as L,registerLaneTool as R,registerParseOutputTool as z,registerProcessTool as B,registerQueueTool as V,registerRenameTool as H,registerScopeMapTool as U,registerStashTool as de,registerSymbolTool as fe,registerTestRunTool as pe,registerTraceTool as me,registerWatchTool as he,registerWebFetchTool as ge,registerWorksetTool as _e}from"./tools/toolkit.tools.js";import{registerUpdateTool as W}from"./tools/update.tool.js";import{registerChangelogTool as ve,registerEncodeTool as ye,registerEnvTool as be,registerHttpTool as xe,registerMeasureTool as Se,registerRegexTestTool as Ce,registerSchemaValidateTool as we,registerSnippetTool as Te,registerTimeTool as Ee,registerWebSearchTool as De}from"./tools/utility.tools.js";import{getCurrentVersion as G}from"./version-check.js";import{existsSync as Oe,statSync as ke}from"node:fs";import{resolve as Ae}from"node:path";import{KB_PATHS as je,createLogger as Me,serializeError as K}from"../../core/dist/index.js";import{initializeWasm as Ne}from"../../chunker/dist/index.js";import{OnnxEmbedder as Pe}from"../../embeddings/dist/index.js";import{EvolutionCollector as Fe,PolicyStore as Ie}from"../../enterprise-bridge/dist/index.js";import{IncrementalIndexer as Le}from"../../indexer/dist/index.js";import{SqliteGraphStore as Re,createStore as ze}from"../../store/dist/index.js";import{FileCache as Be}from"../../tools/dist/index.js";import{McpServer as q}from"@modelcontextprotocol/sdk/server/mcp.js";const J=Me(`server`);async function Y(t){J.info(`Initializing knowledge base components`);let n=new Pe({model:t.embedding.model,dimensions:t.embedding.dimensions});await n.initialize(),J.info(`Embedder loaded`,{modelId:n.modelId,dimensions:n.dimensions});let r=await ze({backend:t.store.backend,path:t.store.path});await r.initialize(),J.info(`Store initialized`);let i=new Le(n,r),a=t.curated.path,o=new e(a,r,n),s=new Re({path:t.store.path});await s.initialize(),J.info(`Graph store initialized`),i.setGraphStore(s),await Ne()?J.info(`WASM tree-sitter enabled for AST analysis`):J.warn(`WASM tree-sitter not available; analyzers will use regex fallback`);let c=d(t.er),l=c?new Ie(t.curated.path):void 0;l&&J.info(`Policy store initialized`,{ruleCount:l.getRules().length});let u=c?new Fe:void 0,f=Ae(process.cwd(),je.aiKb),p=Oe(f),m;if(p)try{m=ke(f).mtime.toISOString()}catch{}return J.info(`Onboard state detected`,{onboardComplete:p,onboardTimestamp:m}),{embedder:n,store:r,indexer:i,curated:o,graphStore:s,fileCache:new Be,bridge:c,policyStore:l,evolutionCollector:u,onboardComplete:p,onboardTimestamp:m}}function X(e,t){let n=new q({name:t.serverName??`knowledge-base`,version:G()});return Z(n,e,t),n}function Z(e,d,G){t(e),x(e,d.embedder,d.store,d.graphStore,d.bridge,d.evolutionCollector),le(e,d.store),S(e,d.store,d.graphStore,d.curated,{onboardComplete:d.onboardComplete,onboardTimestamp:d.onboardTimestamp}),v(e,d.indexer,G,d.curated,d.store),y(e,d.curated,d.policyStore,d.evolutionCollector),W(e,d.curated),oe(e,d.curated),_(e,d.curated),ce(e,d.curated),s(e,d.store,d.embedder),r(e,d.store,d.embedder),c(e,d.store,d.embedder),o(e,d.store,d.embedder),a(e,d.store,d.embedder),i(e,d.store,d.embedder),l(e,d.store,d.embedder),g(e),ue(e,d.store,d.embedder),se(e,d.graphStore),u(e,d.store,d.embedder),D(e,d.embedder,d.fileCache),U(e,d.embedder,d.store),P(e,d.embedder,d.store),z(e),_e(e),w(e),C(e,d.embedder,d.store),fe(e,d.embedder,d.store),M(e),pe(e),de(e),F(e),j(e),H(e),E(e),N(e,d.fileCache),T(e),O(e),me(e,d.embedder,d.store),B(e),he(e),k(e,d.embedder,d.store),A(e),L(e),R(e),V(e),ge(e),I(e),ne(e),te(e,d.embedder),re(e),ae(e,d.embedder,d.fileCache),ie(e,d.embedder,d.store),De(e),xe(e),Ce(e),ye(e),Se(e),ve(e),we(e),Te(e),be(e),Ee(e),d.bridge&&(p(e,d.bridge,d.evolutionCollector),f(e,d.bridge),m(e,d.bridge)),d.policyStore&&h(e,d.policyStore),d.evolutionCollector&&ee(e,d.evolutionCollector),n(e,d.store),b(e)}async function Ve(e){let t=await Y(e),n=X(t,e);J.info(`MCP server configured`,{toolCount:$.length,resourceCount:2});let r=async()=>{try{let n=e.sources.map(e=>e.path).join(`, `);J.info(`Running initial index`,{sourcePaths:n});let r=await t.indexer.index(e,e=>{e.phase===`crawling`||e.phase===`done`||(e.phase===`chunking`&&e.currentFile&&J.debug(`Indexing file`,{current:e.filesProcessed+1,total:e.filesTotal,file:e.currentFile}),e.phase===`cleanup`&&J.debug(`Index cleanup`,{staleEntries:e.filesTotal-e.filesProcessed}))});J.info(`Initial index complete`,{filesProcessed:r.filesProcessed,filesSkipped:r.filesSkipped,chunksCreated:r.chunksCreated,durationMs:r.durationMs});try{await t.store.createFtsIndex()}catch(e){J.warn(`FTS index creation failed`,K(e))}try{let e=await t.curated.reindexAll();J.info(`Curated re-index complete`,{indexed:e.indexed})}catch(e){J.error(`Curated re-index failed`,K(e))}}catch(e){J.error(`Initial index failed; will retry on kb_reindex`,K(e))}},i=async()=>{J.info(`Shutting down`),await t.embedder.shutdown().catch(()=>{}),await t.graphStore.close().catch(()=>{}),await t.store.close(),process.exit(0)};process.on(`SIGINT`,i),process.on(`SIGTERM`,i);let a=process.ppid,o=setInterval(()=>{try{process.kill(a,0)}catch{J.info(`Parent process died; shutting down`,{parentPid:a}),clearInterval(o),i()}},5e3);return o.unref(),{server:n,runInitialIndex:r,shutdown:i}}const He=new Set(`batch.changelog.check.checkpoint.codemod.compact.data_transform.delegate.diff_parse.digest.encode.env.eval.evidence_map.file_summary.forge_classify.git_context.graph.guide.health.http.lane.measure.onboard.parse_output.process.queue.read.regex_test.reindex.remember.rename.replay.schema_validate.scope_map.snippet.stash.status.stratum_card.test_run.time.update.forget.list.watch.web_fetch.web_search.workset`.split(`.`)),Q=5e3,$=`analyze_dependencies.analyze_diagram.analyze_entry_points.analyze_patterns.analyze_structure.analyze_symbols.audit.batch.blast_radius.changelog.check.checkpoint.codemod.compact.data_transform.dead_symbols.delegate.diff_parse.digest.encode.env.eval.evidence_map.file_summary.find.forge_classify.forge_ground.forget.git_context.graph.guide.health.http.lane.list.lookup.measure.onboard.parse_output.process.produce_knowledge.queue.read.regex_test.reindex.remember.rename.replay.schema_validate.scope_map.search.snippet.stash.status.stratum_card.symbol.test_run.time.trace.update.watch.web_fetch.web_search.workset`.split(`.`);function Ue(e){let t=new q({name:e.serverName??`knowledge-base`,version:G()}),n=t.sendToolListChanged.bind(t);t.sendToolListChanged=()=>{};let r=$.map(e=>t.registerTool(e,{description:`${e} (initializing...)`,inputSchema:{}},async()=>({content:[{type:`text`,text:`KB is still initializing, please retry in a few seconds.`}]})));t.sendToolListChanged=n;let i=t.resource(`kb-status`,`kb://status`,{description:`Knowledge base status (initializing...)`,mimeType:`text/plain`},async()=>({contents:[{uri:`kb://status`,text:`KB is initializing...`,mimeType:`text/plain`}]})),a,o=new Promise(e=>{a=e}),s=(async()=>{let n=await Y(e),o=t.sendToolListChanged.bind(t);t.sendToolListChanged=()=>{};for(let e of r)e.remove();i.remove(),Z(t,n,e),t.sendToolListChanged=o,t.sendToolListChanged();let s=t._registeredTools??{};for(let[e,t]of Object.entries(s)){if(He.has(e))continue;let r=t.handler;t.handler=async(...t)=>{if(!n.indexer.isIndexing)return r(...t);let i=new Promise(t=>setTimeout(()=>t({content:[{type:`text`,text:`⏳ KB is re-indexing. The tool "${e}" timed out waiting for index data (${Q/1e3}s).\n\nThe existing index may be temporarily locked. Please retry shortly — indexing will complete automatically.`}]}),Q));return Promise.race([r(...t),i])}}let c=Object.keys(s).length;c!==$.length&&J.warn(`ALL_TOOL_NAMES count mismatch`,{expectedToolCount:$.length,registeredToolCount:c}),J.info(`MCP server configured`,{toolCount:$.length,resourceCount:2}),a?.(n)})(),c=async()=>{let t=await o;try{let n=e.sources.map(e=>e.path).join(`, `);J.info(`Running initial index`,{sourcePaths:n});let r=await t.indexer.index(e,e=>{e.phase===`crawling`||e.phase===`done`||(e.phase===`chunking`&&e.currentFile&&J.debug(`Indexing file`,{current:e.filesProcessed+1,total:e.filesTotal,file:e.currentFile}),e.phase===`cleanup`&&J.debug(`Index cleanup`,{staleEntries:e.filesTotal-e.filesProcessed}))});J.info(`Initial index complete`,{filesProcessed:r.filesProcessed,filesSkipped:r.filesSkipped,chunksCreated:r.chunksCreated,durationMs:r.durationMs});try{await t.store.createFtsIndex()}catch(e){J.warn(`FTS index creation failed`,K(e))}try{let e=await t.curated.reindexAll();J.info(`Curated re-index complete`,{indexed:e.indexed})}catch(e){J.error(`Curated re-index failed`,K(e))}}catch(e){J.error(`Initial index failed; will retry on kb_reindex`,K(e))}},l=process.ppid,u=setInterval(()=>{try{process.kill(l,0)}catch{J.info(`Parent process died; shutting down`,{parentPid:l}),clearInterval(u),o.then(async e=>{await e.embedder.shutdown().catch(()=>{}),await e.graphStore.close().catch(()=>{}),await e.store.close().catch(()=>{})}).catch(()=>{}).finally(()=>process.exit(0))}},5e3);return u.unref(),{server:t,ready:s,runInitialIndex:c}}export{$ as ALL_TOOL_NAMES,Ue as createLazyServer,X as createMcpServer,Ve as createServer,Y as initializeKnowledgeBase,Z as registerMcpTools};
|
|
1
|
+
import{CuratedKnowledgeManager as e}from"./curated-manager.js";import{installReplayInterceptor as t}from"./replay-interceptor.js";import{registerResources as n}from"./resources/resources.js";import{registerAnalyzeDependenciesTool as r,registerAnalyzeDiagramTool as i,registerAnalyzeEntryPointsTool as a,registerAnalyzePatternsTool as o,registerAnalyzeStructureTool as s,registerAnalyzeSymbolsTool as c,registerBlastRadiusTool as l}from"./tools/analyze.tools.js";import{registerAuditTool as u}from"./tools/audit.tool.js";import{initBridgeComponents as d,registerErPullTool as f,registerErPushTool as p,registerErSyncStatusTool as m}from"./tools/bridge.tools.js";import{registerErEvolveReviewTool as h}from"./tools/evolution.tools.js";import{registerDigestTool as ee,registerEvidenceMapTool as te,registerForgeClassifyTool as ne,registerForgeGroundTool as re,registerStratumCardTool as ie}from"./tools/forge.tools.js";import{registerForgetTool as ae}from"./tools/forget.tool.js";import{registerGraphTool as oe}from"./tools/graph.tool.js";import{registerListTool as se}from"./tools/list.tool.js";import{registerLookupTool as ce}from"./tools/lookup.tool.js";import{registerOnboardTool as le}from"./tools/onboard.tool.js";import{registerErUpdatePolicyTool as ue}from"./tools/policy.tools.js";import{registerProduceKnowledgeTool as g}from"./tools/produce.tool.js";import{registerReadTool as _}from"./tools/read.tool.js";import{registerReindexTool as v}from"./tools/reindex.tool.js";import{registerRememberTool as y}from"./tools/remember.tool.js";import{registerReplayTool as b}from"./tools/replay.tool.js";import{registerSearchTool as x}from"./tools/search.tool.js";import{registerStatusTool as S}from"./tools/status.tool.js";import{registerBatchTool as C,registerCheckTool as w,registerCheckpointTool as T,registerCodemodTool as E,registerCompactTool as D,registerDataTransformTool as O,registerDeadSymbolsTool as k,registerDelegateTool as A,registerDiffParseTool as j,registerEvalTool as M,registerFileSummaryTool as N,registerFindTool as P,registerGitContextTool as F,registerGuideTool as I,registerHealthTool as L,registerLaneTool as R,registerParseOutputTool as z,registerProcessTool as B,registerQueueTool as V,registerRenameTool as H,registerScopeMapTool as U,registerStashTool as de,registerSymbolTool as fe,registerTestRunTool as pe,registerTraceTool as me,registerWatchTool as he,registerWebFetchTool as ge,registerWorksetTool as _e}from"./tools/toolkit.tools.js";import{registerUpdateTool as ve}from"./tools/update.tool.js";import{registerChangelogTool as ye,registerEncodeTool as be,registerEnvTool as xe,registerHttpTool as Se,registerMeasureTool as Ce,registerRegexTestTool as we,registerSchemaValidateTool as Te,registerSnippetTool as Ee,registerTimeTool as De,registerWebSearchTool as Oe}from"./tools/utility.tools.js";import{getCurrentVersion as W}from"./version-check.js";import{existsSync as ke,statSync as Ae}from"node:fs";import{resolve as je}from"node:path";import{KB_PATHS as Me,createLogger as Ne,serializeError as G}from"../../core/dist/index.js";import{initializeWasm as Pe}from"../../chunker/dist/index.js";import{OnnxEmbedder as Fe}from"../../embeddings/dist/index.js";import{EvolutionCollector as Ie,PolicyStore as Le}from"../../enterprise-bridge/dist/index.js";import{FileHashCache as Re,IncrementalIndexer as ze}from"../../indexer/dist/index.js";import{SqliteGraphStore as Be,createStore as Ve}from"../../store/dist/index.js";import{FileCache as K}from"../../tools/dist/index.js";import{McpServer as q}from"@modelcontextprotocol/sdk/server/mcp.js";const J=Ne(`server`);async function Y(t){J.info(`Initializing knowledge base components`);let n=new Fe({model:t.embedding.model,dimensions:t.embedding.dimensions});await n.initialize(),J.info(`Embedder loaded`,{modelId:n.modelId,dimensions:n.dimensions});let r=await Ve({backend:t.store.backend,path:t.store.path});await r.initialize(),J.info(`Store initialized`);let i=new ze(n,r),a=new Re(t.store.path);a.load(),i.setHashCache(a);let o=t.curated.path,s=new e(o,r,n),c=new Be({path:t.store.path});await c.initialize(),J.info(`Graph store initialized`),i.setGraphStore(c),await Pe()?J.info(`WASM tree-sitter enabled for AST analysis`):J.warn(`WASM tree-sitter not available; analyzers will use regex fallback`);let l=d(t.er),u=l?new Le(t.curated.path):void 0;u&&J.info(`Policy store initialized`,{ruleCount:u.getRules().length});let f=l?new Ie:void 0,p=je(process.cwd(),Me.aiKb),m=ke(p),h;if(m)try{h=Ae(p).mtime.toISOString()}catch{}return J.info(`Onboard state detected`,{onboardComplete:m,onboardTimestamp:h}),{embedder:n,store:r,indexer:i,curated:s,graphStore:c,fileCache:new K,bridge:l,policyStore:u,evolutionCollector:f,onboardComplete:m,onboardTimestamp:h}}function X(e,t){let n=new q({name:t.serverName??`knowledge-base`,version:W()});return Z(n,e,t),n}function Z(e,d,W){t(e),x(e,d.embedder,d.store,d.graphStore,d.bridge,d.evolutionCollector),ce(e,d.store),S(e,d.store,d.graphStore,d.curated,{onboardComplete:d.onboardComplete,onboardTimestamp:d.onboardTimestamp}),v(e,d.indexer,W,d.curated,d.store),y(e,d.curated,d.policyStore,d.evolutionCollector),ve(e,d.curated),ae(e,d.curated),_(e,d.curated),se(e,d.curated),s(e,d.store,d.embedder),r(e,d.store,d.embedder),c(e,d.store,d.embedder),o(e,d.store,d.embedder),a(e,d.store,d.embedder),i(e,d.store,d.embedder),l(e,d.store,d.embedder,d.graphStore),g(e),le(e,d.store,d.embedder),oe(e,d.graphStore),u(e,d.store,d.embedder),D(e,d.embedder,d.fileCache),U(e,d.embedder,d.store),P(e,d.embedder,d.store),z(e),_e(e),w(e),C(e,d.embedder,d.store),fe(e,d.embedder,d.store,d.graphStore),M(e),pe(e),de(e),F(e),j(e),H(e),E(e),N(e,d.fileCache),T(e),O(e),me(e,d.embedder,d.store),B(e),he(e),k(e,d.embedder,d.store),A(e),L(e),R(e),V(e),ge(e),I(e),te(e),ee(e,d.embedder),ne(e),ie(e,d.embedder,d.fileCache),re(e,d.embedder,d.store),Oe(e),Se(e),we(e),be(e),Ce(e),ye(e),Te(e),Ee(e),xe(e),De(e),d.bridge&&(p(e,d.bridge,d.evolutionCollector),f(e,d.bridge),m(e,d.bridge)),d.policyStore&&ue(e,d.policyStore),d.evolutionCollector&&h(e,d.evolutionCollector),n(e,d.store),b(e)}async function He(e){let t=await Y(e),n=X(t,e);J.info(`MCP server configured`,{toolCount:$.length,resourceCount:2});let r=async()=>{try{let n=e.sources.map(e=>e.path).join(`, `);J.info(`Running initial index`,{sourcePaths:n});let r=await t.indexer.index(e,e=>{e.phase===`crawling`||e.phase===`done`||(e.phase===`chunking`&&e.currentFile&&J.debug(`Indexing file`,{current:e.filesProcessed+1,total:e.filesTotal,file:e.currentFile}),e.phase===`cleanup`&&J.debug(`Index cleanup`,{staleEntries:e.filesTotal-e.filesProcessed}))});J.info(`Initial index complete`,{filesProcessed:r.filesProcessed,filesSkipped:r.filesSkipped,chunksCreated:r.chunksCreated,durationMs:r.durationMs});try{await t.store.createFtsIndex()}catch(e){J.warn(`FTS index creation failed`,G(e))}try{let e=await t.curated.reindexAll();J.info(`Curated re-index complete`,{indexed:e.indexed})}catch(e){J.error(`Curated re-index failed`,G(e))}}catch(e){J.error(`Initial index failed; will retry on kb_reindex`,G(e))}},i=async()=>{J.info(`Shutting down`),await t.embedder.shutdown().catch(()=>{}),await t.graphStore.close().catch(()=>{}),await t.store.close(),process.exit(0)};process.on(`SIGINT`,i),process.on(`SIGTERM`,i);let a=process.ppid,o=setInterval(()=>{try{process.kill(a,0)}catch{J.info(`Parent process died; shutting down`,{parentPid:a}),clearInterval(o),i()}},5e3);return o.unref(),{server:n,runInitialIndex:r,shutdown:i}}const Ue=new Set(`batch.changelog.check.checkpoint.codemod.compact.data_transform.delegate.diff_parse.digest.encode.env.eval.evidence_map.file_summary.forge_classify.git_context.graph.guide.health.http.lane.measure.onboard.parse_output.process.queue.read.regex_test.reindex.remember.rename.replay.schema_validate.scope_map.snippet.stash.status.stratum_card.test_run.time.update.forget.list.watch.web_fetch.web_search.workset`.split(`.`)),Q=5e3,$=`analyze_dependencies.analyze_diagram.analyze_entry_points.analyze_patterns.analyze_structure.analyze_symbols.audit.batch.blast_radius.changelog.check.checkpoint.codemod.compact.data_transform.dead_symbols.delegate.diff_parse.digest.encode.env.eval.evidence_map.file_summary.find.forge_classify.forge_ground.forget.git_context.graph.guide.health.http.lane.list.lookup.measure.onboard.parse_output.process.produce_knowledge.queue.read.regex_test.reindex.remember.rename.replay.schema_validate.scope_map.search.snippet.stash.status.stratum_card.symbol.test_run.time.trace.update.watch.web_fetch.web_search.workset`.split(`.`);function We(e){let t=new q({name:e.serverName??`knowledge-base`,version:W()}),n=t.sendToolListChanged.bind(t);t.sendToolListChanged=()=>{};let r=$.map(e=>t.registerTool(e,{description:`${e} (initializing...)`,inputSchema:{}},async()=>({content:[{type:`text`,text:`KB is still initializing, please retry in a few seconds.`}]})));t.sendToolListChanged=n;let i=t.resource(`kb-status`,`kb://status`,{description:`Knowledge base status (initializing...)`,mimeType:`text/plain`},async()=>({contents:[{uri:`kb://status`,text:`KB is initializing...`,mimeType:`text/plain`}]})),a,o=new Promise(e=>{a=e}),s=(async()=>{let n=await Y(e),o=t.sendToolListChanged.bind(t);t.sendToolListChanged=()=>{};for(let e of r)e.remove();i.remove(),Z(t,n,e),t.sendToolListChanged=o,t.sendToolListChanged();let s=t._registeredTools??{};for(let[e,t]of Object.entries(s)){if(Ue.has(e))continue;let r=t.handler;t.handler=async(...t)=>{if(!n.indexer.isIndexing)return r(...t);let i=new Promise(t=>setTimeout(()=>t({content:[{type:`text`,text:`⏳ KB is re-indexing. The tool "${e}" timed out waiting for index data (${Q/1e3}s).\n\nThe existing index may be temporarily locked. Please retry shortly — indexing will complete automatically.`}]}),Q));return Promise.race([r(...t),i])}}let c=Object.keys(s).length;c!==$.length&&J.warn(`ALL_TOOL_NAMES count mismatch`,{expectedToolCount:$.length,registeredToolCount:c}),J.info(`MCP server configured`,{toolCount:$.length,resourceCount:2}),a?.(n)})(),c=async()=>{let t=await o;try{let n=e.sources.map(e=>e.path).join(`, `);J.info(`Running initial index`,{sourcePaths:n});let r=await t.indexer.index(e,e=>{e.phase===`crawling`||e.phase===`done`||(e.phase===`chunking`&&e.currentFile&&J.debug(`Indexing file`,{current:e.filesProcessed+1,total:e.filesTotal,file:e.currentFile}),e.phase===`cleanup`&&J.debug(`Index cleanup`,{staleEntries:e.filesTotal-e.filesProcessed}))});J.info(`Initial index complete`,{filesProcessed:r.filesProcessed,filesSkipped:r.filesSkipped,chunksCreated:r.chunksCreated,durationMs:r.durationMs});try{await t.store.createFtsIndex()}catch(e){J.warn(`FTS index creation failed`,G(e))}try{let e=await t.curated.reindexAll();J.info(`Curated re-index complete`,{indexed:e.indexed})}catch(e){J.error(`Curated re-index failed`,G(e))}}catch(e){J.error(`Initial index failed; will retry on kb_reindex`,G(e))}},l=process.ppid,u=setInterval(()=>{try{process.kill(l,0)}catch{J.info(`Parent process died; shutting down`,{parentPid:l}),clearInterval(u),o.then(async e=>{await e.embedder.shutdown().catch(()=>{}),await e.graphStore.close().catch(()=>{}),await e.store.close().catch(()=>{})}).catch(()=>{}).finally(()=>process.exit(0))}},5e3);return u.unref(),{server:t,ready:s,runInitialIndex:c}}export{$ as ALL_TOOL_NAMES,We as createLazyServer,X as createMcpServer,He as createServer,Y as initializeKnowledgeBase,Z as registerMcpTools};
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
2
2
|
import { IEmbedder } from "@kb/embeddings";
|
|
3
|
-
import { IKnowledgeStore } from "@kb/store";
|
|
3
|
+
import { IGraphStore, IKnowledgeStore } from "@kb/store";
|
|
4
4
|
|
|
5
5
|
//#region packages/server/src/tools/analyze.tools.d.ts
|
|
6
6
|
declare function registerAnalyzeStructureTool(server: McpServer, store: IKnowledgeStore, embedder: IEmbedder): void;
|
|
@@ -9,6 +9,6 @@ declare function registerAnalyzeSymbolsTool(server: McpServer, store: IKnowledge
|
|
|
9
9
|
declare function registerAnalyzePatternsTool(server: McpServer, store: IKnowledgeStore, embedder: IEmbedder): void;
|
|
10
10
|
declare function registerAnalyzeEntryPointsTool(server: McpServer, store: IKnowledgeStore, embedder: IEmbedder): void;
|
|
11
11
|
declare function registerAnalyzeDiagramTool(server: McpServer, store: IKnowledgeStore, embedder: IEmbedder): void;
|
|
12
|
-
declare function registerBlastRadiusTool(server: McpServer, store: IKnowledgeStore, embedder: IEmbedder): void;
|
|
12
|
+
declare function registerBlastRadiusTool(server: McpServer, store: IKnowledgeStore, embedder: IEmbedder, graphStore?: IGraphStore): void;
|
|
13
13
|
//#endregion
|
|
14
14
|
export { registerAnalyzeDependenciesTool, registerAnalyzeDiagramTool, registerAnalyzeEntryPointsTool, registerAnalyzePatternsTool, registerAnalyzeStructureTool, registerAnalyzeSymbolsTool, registerBlastRadiusTool };
|