@vpxa/aikit 0.1.2 → 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/packages/cli/dist/commands/init/constants.d.ts +3 -1
- package/packages/cli/dist/commands/init/constants.js +1 -1
- package/packages/cli/dist/commands/init/index.js +4 -4
- package/packages/cli/dist/commands/init/scaffold.d.ts +8 -1
- package/packages/cli/dist/commands/init/scaffold.js +1 -1
- package/packages/cli/dist/commands/init/user.js +4 -4
- package/packages/cli/dist/commands/upgrade.js +1 -1
- package/packages/core/dist/global-registry.js +1 -1
- package/packages/core/dist/types.d.ts +2 -0
- package/packages/flows/dist/git.js +1 -1
- package/packages/flows/dist/registry.d.ts +3 -3
- package/packages/flows/dist/registry.js +1 -1
- package/packages/flows/dist/symlinks.js +1 -1
- package/packages/indexer/dist/filesystem-crawler.js +1 -1
- package/packages/indexer/dist/hash-cache.js +1 -1
- package/packages/kb-client/dist/direct-client.d.ts +33 -34
- package/packages/kb-client/dist/index.d.ts +5 -4
- package/packages/kb-client/dist/mcp-client.d.ts +18 -18
- package/packages/kb-client/dist/parsers.d.ts +14 -11
- package/packages/kb-client/dist/types.d.ts +50 -47
- package/packages/present/dist/index.html +26 -26
- package/packages/server/dist/config.js +1 -1
- package/packages/server/dist/idle-timer.d.ts +4 -0
- package/packages/server/dist/idle-timer.js +1 -1
- package/packages/server/dist/index.js +1 -1
- package/packages/server/dist/memory-monitor.d.ts +2 -2
- package/packages/server/dist/memory-monitor.js +1 -1
- package/packages/server/dist/server.d.ts +1 -1
- package/packages/server/dist/server.js +2 -2
- package/packages/server/dist/tool-metadata.js +1 -1
- package/packages/server/dist/tools/config.tool.d.ts +8 -0
- package/packages/server/dist/tools/config.tool.js +12 -0
- package/packages/server/dist/tools/flow.tools.js +1 -1
- package/packages/server/dist/tools/present/browser.js +7 -7
- package/packages/server/dist/tools/present/tool.js +4 -4
- package/packages/server/dist/tools/search.tool.js +4 -4
- package/packages/server/dist/tools/status.tool.js +3 -3
- package/packages/store/dist/sqlite-graph-store.d.ts +3 -0
- package/packages/store/dist/sqlite-graph-store.js +3 -3
- package/packages/tools/dist/checkpoint.js +1 -1
- package/packages/tools/dist/evidence-map.js +2 -2
- package/packages/tools/dist/queue.js +1 -1
- package/packages/tools/dist/restore-points.js +1 -1
- package/packages/tools/dist/schema-validate.js +1 -1
- package/packages/tools/dist/snippet.js +1 -1
- package/packages/tools/dist/stash.js +1 -1
- package/packages/tools/dist/workset.js +1 -1
- package/packages/tui/dist/{App-B2-KJPt4.js → App-DpjN3iS-.js} +1 -1
- package/packages/tui/dist/App.js +1 -1
- package/packages/tui/dist/LogPanel-Db-SeZhR.js +3 -0
- package/packages/tui/dist/index.js +1 -1
- package/packages/tui/dist/panels/LogPanel.js +1 -1
- package/scaffold/general/skills/multi-agents-development/SKILL.md +435 -435
- package/scaffold/general/skills/present/SKILL.md +424 -424
- package/packages/kb-client/dist/__tests__/direct-client.test.d.ts +0 -1
- package/packages/kb-client/dist/__tests__/mcp-client.test.d.ts +0 -1
- package/packages/kb-client/dist/__tests__/parsers.test.d.ts +0 -1
- package/packages/tui/dist/LogPanel-E_1Do4-j.js +0 -3
package/package.json
CHANGED
|
@@ -14,6 +14,8 @@ declare const MCP_SERVER_ENTRY: {
|
|
|
14
14
|
};
|
|
15
15
|
/** Skills shipped with the AI Kit package and installed during init. */
|
|
16
16
|
declare const SKILL_NAMES: readonly ["aikit", "brainstorming", "multi-agents-development", "session-handoff", "requirements-clarity", "lesson-learned", "c4-architecture", "adr-skill", "present"];
|
|
17
|
+
/** Built-in flow directories shipped with the package (under scaffold/flows/). */
|
|
18
|
+
declare const FLOW_DIRS: readonly ["aikit-basic", "aikit-advanced"];
|
|
17
19
|
/**
|
|
18
20
|
* VS Code settings merged into each VS Code-family IDE's settings.json.
|
|
19
21
|
*
|
|
@@ -38,4 +40,4 @@ declare const SKILL_NAMES: readonly ["aikit", "brainstorming", "multi-agents-dev
|
|
|
38
40
|
*/
|
|
39
41
|
declare const VSCODE_SETTINGS: Record<string, unknown>;
|
|
40
42
|
//#endregion
|
|
41
|
-
export { MCP_SERVER_ENTRY, SERVER_NAME, SKILL_NAMES, VSCODE_SETTINGS };
|
|
43
|
+
export { FLOW_DIRS, MCP_SERVER_ENTRY, SERVER_NAME, SKILL_NAMES, VSCODE_SETTINGS };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
const e=`aikit`,t={type:`stdio`,command:`npx`,args:[`-y`,`@vpxa/aikit`,`serve`]},n=[`aikit`,`brainstorming`,`multi-agents-development`,`session-handoff`,`requirements-clarity`,`lesson-learned`,`c4-architecture`,`adr-skill`,`present`],r={"chat.agentFilesLocations":{"~/.claude/agents":!1},"github.copilot.chat.copilotMemory.enabled":!0,"chat.customAgentInSubagent.enabled":!0,"chat.useNestedAgentsMdFiles":!0,"chat.useAgentSkills":!0,"github.copilot.chat.switchAgent.enabled":!0,"workbench.browser.enableChatTools":!0,"chat.mcp.apps.enabled":!0};export{t as MCP_SERVER_ENTRY,e as SERVER_NAME,n as SKILL_NAMES,
|
|
1
|
+
const e=`aikit`,t={type:`stdio`,command:`npx`,args:[`-y`,`@vpxa/aikit`,`serve`]},n=[`aikit`,`brainstorming`,`multi-agents-development`,`session-handoff`,`requirements-clarity`,`lesson-learned`,`c4-architecture`,`adr-skill`,`present`],r=[`aikit-basic`,`aikit-advanced`],i={"chat.agentFilesLocations":{"~/.claude/agents":!1},"github.copilot.chat.copilotMemory.enabled":!0,"chat.customAgentInSubagent.enabled":!0,"chat.useNestedAgentsMdFiles":!0,"chat.useAgentSkills":!0,"github.copilot.chat.switchAgent.enabled":!0,"workbench.browser.enableChatTools":!0,"chat.mcp.apps.enabled":!0};export{r as FLOW_DIRS,t as MCP_SERVER_ENTRY,e as SERVER_NAME,n as SKILL_NAMES,i as VSCODE_SETTINGS};
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import{SKILL_NAMES as
|
|
2
|
-
AI Kit initialized! Next steps:`),console.log(` aikit reindex Index your codebase`),console.log(` aikit search Search indexed content`),console.log(` aikit serve Start MCP server for IDE integration`),
|
|
3
|
-
Note: User-level AI Kit is also installed. This workspace uses its own local data store.`)}async function
|
|
1
|
+
import{FLOW_DIRS as e,SKILL_NAMES as t}from"./constants.js";import{detectIde as n,getAdapter as r}from"./adapters.js";import{ensureGitignore as i,getServerName as a,writeKbConfig as o}from"./config.js";import{createCuratedDirs as s}from"./curated.js";import{guideFlows as c,guideScaffold as l,guideSkills as u,smartCopyFlows as d,smartCopyScaffold as f,smartCopySkills as p}from"./scaffold.js";import{readFileSync as m}from"node:fs";import{dirname as h,resolve as g}from"node:path";import{fileURLToPath as _}from"node:url";import{isUserInstalled as v}from"../../../../core/dist/index.js";async function y(c){let l=process.cwd();if(!o(l,c.force))return;i(l);let u=a(),y=r(n(l));y.writeMcpConfig(l,u),y.writeInstructions(l,u),y.writeAgentsMd(l,u);let b=g(h(_(import.meta.url)),`..`,`..`,`..`,`..`,`..`),x=JSON.parse(m(g(b,`package.json`),`utf-8`)).version;p(l,b,[...t],x,c.force),d(l,b,[...e],x,c.force),f(l,b,y.scaffoldDir,x,c.force),s(l),console.log(`
|
|
2
|
+
AI Kit initialized! Next steps:`),console.log(` aikit reindex Index your codebase`),console.log(` aikit search Search indexed content`),console.log(` aikit serve Start MCP server for IDE integration`),v()&&console.log(`
|
|
3
|
+
Note: User-level AI Kit is also installed. This workspace uses its own local data store.`)}async function b(e){v()?await x(e):await y(e)}async function x(t){let o=process.cwd(),c=a(),l=r(n(o));l.writeInstructions(o,c),l.writeAgentsMd(o,c);let u=g(h(_(import.meta.url)),`..`,`..`,`..`,`..`,`..`),p=JSON.parse(m(g(u,`package.json`),`utf-8`)).version;f(o,u,l.scaffoldDir,p,t.force),d(o,u,[...e],p,t.force),s(o),i(o),console.log(`
|
|
4
4
|
Workspace scaffolded for user-level AI Kit! Files added:`),console.log(` Instruction files (AGENTS.md, copilot-instructions.md, etc.)`),console.log(` .ai/curated/ directories`),console.log(` .github/agents/ & .github/prompts/`),console.log(`
|
|
5
|
-
The user-level AI Kit server will auto-index this workspace when opened in your IDE.`)}async function
|
|
5
|
+
The user-level AI Kit server will auto-index this workspace when opened in your IDE.`)}async function S(){let i=process.cwd(),a=r(n(i)),o=g(h(_(import.meta.url)),`..`,`..`,`..`,`..`,`..`),s=[...u(i,o,[...t]),...c(i,o,[...e]),...l(i,o,a.scaffoldDir)],d={summary:{total:s.length,new:s.filter(e=>e.status===`new`).length,outdated:s.filter(e=>e.status===`outdated`).length,current:s.filter(e=>e.status===`current`).length},files:s};console.log(JSON.stringify(d,null,2))}export{S as guideProject,y as initProject,x as initScaffoldOnly,b as initSmart};
|
|
@@ -42,5 +42,12 @@ declare function copySkills(cwd: string, pkgRoot: string, skillNames: string[],
|
|
|
42
42
|
declare function guideScaffold(cwd: string, pkgRoot: string, ide: string): GuideFileEntry[];
|
|
43
43
|
/** Generate guide report for skill files — compare source vs destination. */
|
|
44
44
|
declare function guideSkills(cwd: string, pkgRoot: string, skillNames: string[]): GuideFileEntry[];
|
|
45
|
+
/**
|
|
46
|
+
* Smart-copy built-in flow skill files with manifest tracking.
|
|
47
|
+
* Copies from scaffold/flows/<flowName>/skills/ into .github/flows/<flowName>/skills/.
|
|
48
|
+
*/
|
|
49
|
+
declare function smartCopyFlows(cwd: string, pkgRoot: string, flowDirs: string[], version: string, force?: boolean): void;
|
|
50
|
+
/** Generate guide report for flow skill files. */
|
|
51
|
+
declare function guideFlows(cwd: string, pkgRoot: string, flowDirs: string[]): GuideFileEntry[];
|
|
45
52
|
//#endregion
|
|
46
|
-
export { GuideFileEntry, copyDirectoryRecursive, copyScaffold, copySkills, guideScaffold, guideSkills, smartCopyDirectory, smartCopyScaffold, smartCopySkills, smartCopySubdir };
|
|
53
|
+
export { GuideFileEntry, copyDirectoryRecursive, copyScaffold, copySkills, guideFlows, guideScaffold, guideSkills, smartCopyDirectory, smartCopyFlows, smartCopyScaffold, smartCopySkills, smartCopySubdir };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{mergeFrontmatter as e}from"./frontmatter.js";import{createManifest as t,getFileStatus as n,getUpdateStrategy as r,readManifest as i,recordDeployment as a,writeManifest as o}from"./manifest.js";import{copyFileSync as s,existsSync as c,mkdirSync as l,readFileSync as u,readdirSync as d,statSync as f,writeFileSync as p}from"node:fs";import{dirname as m,resolve as h}from"node:path";function g(e,t,n=``,r=!1){l(t,{recursive:!0});for(let i of d(e)){let a=h(e,i),o=h(t,i),l=n?`${n}/${i}`:i;f(a).isDirectory()?g(a,o,l,r):(r||!c(o))&&s(a,o)}}function _(t,i,o,s=``,g=!1){if(c(t)){l(i,{recursive:!0});for(let v of d(t)){let d=h(t,v),y=h(i,v),b=s?`${s}/${v}`:v;if(f(d).isDirectory()){_(d,y,o,b,g);continue}let x=u(d,`utf-8`);if(g){l(m(y),{recursive:!0}),p(y,x,`utf-8`),a(o,b,x);continue}let S=n(o,b,x);if(S!==`current`){if(S===`new`&&!c(y)){l(m(y),{recursive:!0}),p(y,x,`utf-8`),a(o,b,x);continue}if(S===`new`&&c(y)){a(o,b,x);continue}r(b)===`merge-frontmatter`&&c(y)?p(y,e(x,u(y,`utf-8`)),`utf-8`):(l(m(y),{recursive:!0}),p(y,x,`utf-8`)),a(o,b,x)}}}}function v(e,n,r,a,s=!1){let l=h(n,`scaffold`,r),u=h(e,`.github`),d=h(u,`.aikit-scaffold.json`),f=i(d)??t(a);f.version=a;for(let e of[`agents`,`prompts`]){let t=h(l,e);c(t)&&_(t,h(u,e),f,e,s)}o(d,f)}function y(e,n,r,a,s=!1){let l=h(e,`.github`),u=h(l,`.aikit-scaffold.json`),d=i(u)??t(a);d.version=a;for(let e of r){let t=h(n,`scaffold`,`general`,`skills`,e);c(t)&&_(t,h(l,`skills`,e),d,`skills/${e}`,s)}o(u,d)}function b(e,n,r,a,s=!1){let l=h(e,r);if(!c(l))return;let u=h(n,r),d=h(n,`.aikit-scaffold.json`),f=i(d)??t(a);f.version=a,_(l,u,f,r,s),o(d,f)}function x(e,t,n,r){if(c(e))for(let i of d(e)){let a=h(e,i),o=n?`${n}/${i}`:i;if(f(a).isDirectory())x(a,h(t,i),o,r);else{let e=h(t,i),n=u(a,`utf-8`);c(e)?n===u(e,`utf-8`)?r.push({status:`current`,relativePath:o,sourcePath:a}):r.push({status:`outdated`,relativePath:o,sourcePath:a,content:n}):r.push({status:`new`,relativePath:o,sourcePath:a,content:n})}}}function S(e,t,n,r=!1){let i=h(t,`scaffold`,n);for(let t of[`agents`,`prompts`]){let n=h(i,t),a=h(e,`.github`,t);c(n)&&g(n,a,``,r)}}function C(e,t,n,r=!1){for(let i of n){let n=h(t,`scaffold`,`general`,`skills`,i);c(n)&&g(n,h(e,`.github`,`skills`,i),`skills/${i}`,r)}}function w(e,t,n){let r=[],i=h(t,`scaffold`,n);for(let t of[`agents`,`prompts`])x(h(i,t),h(e,`.github`,t),t,r);return r}function T(e,t,n){let r=[];for(let i of n){let n=h(t,`scaffold`,`general`,`skills`,i);c(n)&&x(n,h(e,`.github`,`skills`,i),`skills/${i}`,r)}return r}export{g as copyDirectoryRecursive,S as copyScaffold,C as copySkills,w as guideScaffold,T as guideSkills,_ as smartCopyDirectory,v as smartCopyScaffold,y as smartCopySkills,b as smartCopySubdir};
|
|
1
|
+
import{mergeFrontmatter as e}from"./frontmatter.js";import{createManifest as t,getFileStatus as n,getUpdateStrategy as r,readManifest as i,recordDeployment as a,writeManifest as o}from"./manifest.js";import{copyFileSync as s,existsSync as c,mkdirSync as l,readFileSync as u,readdirSync as d,statSync as f,writeFileSync as p}from"node:fs";import{dirname as m,resolve as h}from"node:path";function g(e,t,n=``,r=!1){l(t,{recursive:!0});for(let i of d(e)){let a=h(e,i),o=h(t,i),l=n?`${n}/${i}`:i;f(a).isDirectory()?g(a,o,l,r):(r||!c(o))&&s(a,o)}}function _(t,i,o,s=``,g=!1){if(c(t)){l(i,{recursive:!0});for(let v of d(t)){let d=h(t,v),y=h(i,v),b=s?`${s}/${v}`:v;if(f(d).isDirectory()){_(d,y,o,b,g);continue}let x=u(d,`utf-8`);if(g){l(m(y),{recursive:!0}),p(y,x,`utf-8`),a(o,b,x);continue}let S=n(o,b,x);if(S!==`current`){if(S===`new`&&!c(y)){l(m(y),{recursive:!0}),p(y,x,`utf-8`),a(o,b,x);continue}if(S===`new`&&c(y)){a(o,b,x);continue}r(b)===`merge-frontmatter`&&c(y)?p(y,e(x,u(y,`utf-8`)),`utf-8`):(l(m(y),{recursive:!0}),p(y,x,`utf-8`)),a(o,b,x)}}}}function v(e,n,r,a,s=!1){let l=h(n,`scaffold`,r),u=h(e,`.github`),d=h(u,`.aikit-scaffold.json`),f=i(d)??t(a);f.version=a;for(let e of[`agents`,`prompts`]){let t=h(l,e);c(t)&&_(t,h(u,e),f,e,s)}o(d,f)}function y(e,n,r,a,s=!1){let l=h(e,`.github`),u=h(l,`.aikit-scaffold.json`),d=i(u)??t(a);d.version=a;for(let e of r){let t=h(n,`scaffold`,`general`,`skills`,e);c(t)&&_(t,h(l,`skills`,e),d,`skills/${e}`,s)}o(u,d)}function b(e,n,r,a,s=!1){let l=h(e,r);if(!c(l))return;let u=h(n,r),d=h(n,`.aikit-scaffold.json`),f=i(d)??t(a);f.version=a,_(l,u,f,r,s),o(d,f)}function x(e,t,n,r){if(c(e))for(let i of d(e)){let a=h(e,i),o=n?`${n}/${i}`:i;if(f(a).isDirectory())x(a,h(t,i),o,r);else{let e=h(t,i),n=u(a,`utf-8`);c(e)?n===u(e,`utf-8`)?r.push({status:`current`,relativePath:o,sourcePath:a}):r.push({status:`outdated`,relativePath:o,sourcePath:a,content:n}):r.push({status:`new`,relativePath:o,sourcePath:a,content:n})}}}function S(e,t,n,r=!1){let i=h(t,`scaffold`,n);for(let t of[`agents`,`prompts`]){let n=h(i,t),a=h(e,`.github`,t);c(n)&&g(n,a,``,r)}}function C(e,t,n,r=!1){for(let i of n){let n=h(t,`scaffold`,`general`,`skills`,i);c(n)&&g(n,h(e,`.github`,`skills`,i),`skills/${i}`,r)}}function w(e,t,n){let r=[],i=h(t,`scaffold`,n);for(let t of[`agents`,`prompts`])x(h(i,t),h(e,`.github`,t),t,r);return r}function T(e,t,n){let r=[];for(let i of n){let n=h(t,`scaffold`,`general`,`skills`,i);c(n)&&x(n,h(e,`.github`,`skills`,i),`skills/${i}`,r)}return r}function E(e,n,r,a,s=!1){let l=h(e,`.github`),u=h(l,`.aikit-scaffold.json`),d=i(u)??t(a);d.version=a;for(let e of r){let t=h(n,`scaffold`,`flows`,e,`skills`);c(t)&&_(t,h(l,`flows`,e,`skills`),d,`flows/${e}/skills`,s)}o(u,d)}function D(e,t,n){let r=[];for(let i of n){let n=h(t,`scaffold`,`flows`,i,`skills`);c(n)&&x(n,h(e,`.github`,`flows`,i,`skills`),`flows/${i}/skills`,r)}return r}export{g as copyDirectoryRecursive,S as copyScaffold,C as copySkills,D as guideFlows,w as guideScaffold,T as guideSkills,_ as smartCopyDirectory,E as smartCopyFlows,v as smartCopyScaffold,y as smartCopySkills,b as smartCopySubdir};
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import{
|
|
2
|
-
No supported IDEs detected. You can manually add the MCP server config.`);else{console.log(`\n Detected ${o.length} IDE(s):`);for(let
|
|
3
|
-
Installing scaffold files:`),
|
|
1
|
+
import{FLOW_DIRS as e,MCP_SERVER_ENTRY as t,SERVER_NAME as n,SKILL_NAMES as r,VSCODE_SETTINGS as i}from"./constants.js";import{buildAgentsMd as a,buildCopilotInstructions as o}from"./templates.js";import{smartCopySubdir as s}from"./scaffold.js";import{existsSync as c,mkdirSync as l,readFileSync as u,writeFileSync as d}from"node:fs";import{dirname as f,resolve as p}from"node:path";import{fileURLToPath as m}from"node:url";import{getGlobalDataDir as h,saveRegistry as g}from"../../../../core/dist/index.js";import{homedir as _}from"node:os";function v(){let e=_(),t=process.platform,n=[],r=p(e,`.copilot`),i=p(r,`instructions`),a=p(e,`.claude`),o=p(e,`.cursor`),s=p(e,`.windsurf`);if(t===`win32`){let t=process.env.APPDATA??p(e,`AppData`,`Roaming`);n.push({ide:`VS Code`,configDir:p(t,`Code`,`User`),mcpConfigPath:p(t,`Code`,`User`,`mcp.json`),globalScaffoldRoot:r,instructionsRoot:i},{ide:`VS Code Insiders`,configDir:p(t,`Code - Insiders`,`User`),mcpConfigPath:p(t,`Code - Insiders`,`User`,`mcp.json`),globalScaffoldRoot:r,instructionsRoot:i},{ide:`VSCodium`,configDir:p(t,`VSCodium`,`User`),mcpConfigPath:p(t,`VSCodium`,`User`,`mcp.json`),globalScaffoldRoot:r,instructionsRoot:i},{ide:`Cursor`,configDir:p(t,`Cursor`,`User`),mcpConfigPath:p(t,`Cursor`,`User`,`mcp.json`),globalScaffoldRoot:o,instructionsRoot:null},{ide:`Cursor Nightly`,configDir:p(t,`Cursor Nightly`,`User`),mcpConfigPath:p(t,`Cursor Nightly`,`User`,`mcp.json`),globalScaffoldRoot:o,instructionsRoot:null},{ide:`Windsurf`,configDir:p(t,`Windsurf`,`User`),mcpConfigPath:p(t,`Windsurf`,`User`,`mcp.json`),globalScaffoldRoot:s,instructionsRoot:null})}else if(t===`darwin`){let t=p(e,`Library`,`Application Support`);n.push({ide:`VS Code`,configDir:p(t,`Code`,`User`),mcpConfigPath:p(t,`Code`,`User`,`mcp.json`),globalScaffoldRoot:r,instructionsRoot:i},{ide:`VS Code Insiders`,configDir:p(t,`Code - Insiders`,`User`),mcpConfigPath:p(t,`Code - Insiders`,`User`,`mcp.json`),globalScaffoldRoot:r,instructionsRoot:i},{ide:`VSCodium`,configDir:p(t,`VSCodium`,`User`),mcpConfigPath:p(t,`VSCodium`,`User`,`mcp.json`),globalScaffoldRoot:r,instructionsRoot:i},{ide:`Cursor`,configDir:p(t,`Cursor`,`User`),mcpConfigPath:p(t,`Cursor`,`User`,`mcp.json`),globalScaffoldRoot:o,instructionsRoot:null},{ide:`Cursor Nightly`,configDir:p(t,`Cursor Nightly`,`User`),mcpConfigPath:p(t,`Cursor Nightly`,`User`,`mcp.json`),globalScaffoldRoot:o,instructionsRoot:null},{ide:`Windsurf`,configDir:p(t,`Windsurf`,`User`),mcpConfigPath:p(t,`Windsurf`,`User`,`mcp.json`),globalScaffoldRoot:s,instructionsRoot:null})}else{let t=process.env.XDG_CONFIG_HOME??p(e,`.config`);n.push({ide:`VS Code`,configDir:p(t,`Code`,`User`),mcpConfigPath:p(t,`Code`,`User`,`mcp.json`),globalScaffoldRoot:r,instructionsRoot:i},{ide:`VS Code Insiders`,configDir:p(t,`Code - Insiders`,`User`),mcpConfigPath:p(t,`Code - Insiders`,`User`,`mcp.json`),globalScaffoldRoot:r,instructionsRoot:i},{ide:`VSCodium`,configDir:p(t,`VSCodium`,`User`),mcpConfigPath:p(t,`VSCodium`,`User`,`mcp.json`),globalScaffoldRoot:r,instructionsRoot:i},{ide:`Cursor`,configDir:p(t,`Cursor`,`User`),mcpConfigPath:p(t,`Cursor`,`User`,`mcp.json`),globalScaffoldRoot:o,instructionsRoot:null},{ide:`Cursor Nightly`,configDir:p(t,`Cursor Nightly`,`User`),mcpConfigPath:p(t,`Cursor Nightly`,`User`,`mcp.json`),globalScaffoldRoot:o,instructionsRoot:null},{ide:`Windsurf`,configDir:p(t,`Windsurf`,`User`),mcpConfigPath:p(t,`Windsurf`,`User`,`mcp.json`),globalScaffoldRoot:s,instructionsRoot:null})}return n.push({ide:`Claude Code`,configDir:p(e,`.claude`),mcpConfigPath:p(e,`.claude`,`mcp.json`),globalScaffoldRoot:a,instructionsRoot:null}),n.filter(e=>c(e.configDir))}function y(e,n,r=!1){let{mcpConfigPath:i,configDir:a}=e,o={...t},s={};if(c(i)){try{let e=u(i,`utf-8`);s=JSON.parse(e)}catch{let e=`${i}.bak`;d(e,u(i,`utf-8`),`utf-8`),console.log(` Backed up invalid ${i} to ${e}`),s={}}if((s.servers??s.mcpServers??{})[n]&&!r){console.log(` ${e.ide}: ${n} already configured (use --force to update)`);return}}let f=new Set([`VS Code`,`VS Code Insiders`,`VSCodium`,`Windsurf`]).has(e.ide)?`servers`:`mcpServers`,p=s[f]??{};p[n]=o,s[f]=p,l(a,{recursive:!0}),d(i,`${JSON.stringify(s,null,2)}\n`,`utf-8`),console.log(` ${e.ide}: configured ${n} in ${i}`)}const b=new Set([`VS Code`,`VS Code Insiders`,`VSCodium`]);function x(e,t=!1){if(!b.has(e.ide))return;let n=p(e.configDir,`settings.json`),r={};if(c(n))try{let e=u(n,`utf-8`);r=JSON.parse(e)}catch{console.log(` ${e.ide}: skipped settings.json (invalid JSON)`);return}let a=!1;for(let[e,n]of Object.entries(i))if(typeof n==`object`&&n){let t=typeof r[e]==`object`&&r[e]!==null?r[e]:{},i={...t,...n};JSON.stringify(i)!==JSON.stringify(t)&&(r[e]=i,a=!0)}else (t||!(e in r))&&(r[e]=n,a=!0);a&&(d(n,`${JSON.stringify(r,null,2)}\n`,`utf-8`),console.log(` ${e.ide}: updated settings.json`))}function S(t,n,i,u,f=!1){let m=new Set;for(let e of n)e.globalScaffoldRoot&&m.add(e.globalScaffoldRoot);if(m.size===0){console.log(` No IDEs with global scaffold support detected.`);return}let h=p(t,`scaffold`,`general`);for(let n of m){s(h,n,`agents`,u,f),s(h,n,`prompts`,u,f);let i=0;for(let e of r)c(p(h,`skills`,e))&&(s(h,n,`skills/${e}`,u,f),i++);for(let r of e){let e=p(t,`scaffold`,`flows`,r);c(p(e,`skills`))&&s(e,p(n,`flows`,r),`skills`,u,f)}console.log(` ${n}: scaffold updated (${i} skills)`)}let g=new Set,_=o(`aikit`,i),v=a(`aikit`,i);for(let e of n){if(!e.globalScaffoldRoot)continue;let t=e.globalScaffoldRoot;if(e.ide===`Claude Code`){let e=p(t,`CLAUDE.md`);d(e,`${_}\n---\n\n${v}`,`utf-8`),g.add(e)}else if(e.ide===`VS Code`||e.ide===`VS Code Insiders`||e.ide===`VSCodium`){let n=e.instructionsRoot??t;l(n,{recursive:!0});let r=p(n,`kb.instructions.md`);g.has(r)||(d(r,`---\napplyTo: "**"\n---\n\n${_}\n---\n\n${v}`,`utf-8`),g.add(r))}else if(e.ide===`Cursor`||e.ide===`Cursor Nightly`){let e=p(t,`rules`);l(e,{recursive:!0});let n=p(e,`kb.mdc`);g.has(n)||(d(n,`${_}\n---\n\n${v}`,`utf-8`),g.add(n))}else if(e.ide===`Windsurf`){let e=p(t,`rules`);l(e,{recursive:!0});let n=p(e,`kb.md`);g.has(n)||(d(n,`${_}\n---\n\n${v}`,`utf-8`),g.add(n))}}g.size>0&&console.log(` Instruction files: ${[...g].join(`, `)}`)}async function C(e){let t=n,r=p(f(m(import.meta.url)),`..`,`..`,`..`,`..`,`..`,`package.json`),i=JSON.parse(u(r,`utf-8`)).version;console.log(`Initializing @vpxa/aikit v${i}...\n`);let a=h();l(a,{recursive:!0}),console.log(` Global data store: ${a}`),g({version:1,workspaces:{}}),console.log(` Created registry.json`);let o=v();if(o.length===0)console.log(`
|
|
2
|
+
No supported IDEs detected. You can manually add the MCP server config.`);else{console.log(`\n Detected ${o.length} IDE(s):`);for(let n of o)y(n,t,e.force),x(n,e.force)}let s=p(f(m(import.meta.url)),`..`,`..`,`..`,`..`,`..`);console.log(`
|
|
3
|
+
Installing scaffold files:`),S(s,o,t,i,e.force),console.log(`
|
|
4
4
|
User-level AI Kit installation complete!`),console.log(`
|
|
5
|
-
Next steps:`),console.log(` 1. Open any workspace in your IDE`),console.log(` 2. The AI Kit server will auto-start and index the workspace`),console.log(` 3. Agents, prompts, skills & instructions are available globally`),console.log(` 4. No per-workspace init needed — just open a project and start coding`)}export{
|
|
5
|
+
Next steps:`),console.log(` 1. Open any workspace in your IDE`),console.log(` 2. The AI Kit server will auto-start and index the workspace`),console.log(` 3. Agents, prompts, skills & instructions are available globally`),console.log(` 4. No per-workspace init needed — just open a project and start coding`)}export{v as detectInstalledIdes,C as initUser,S as installGlobalScaffold,y as writeUserLevelMcpConfig,x as writeVscodeSettings};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{SKILL_NAMES as
|
|
1
|
+
import{FLOW_DIRS as e,SKILL_NAMES as t}from"./init/constants.js";import{existsSync as n,readFileSync as r}from"node:fs";import{dirname as i,resolve as a}from"node:path";import{fileURLToPath as o}from"node:url";const s=[{name:`upgrade`,description:`Upgrade AI Kit agents, prompts, and skills to the latest version (user-level and workspace-level)`,usage:`aikit upgrade`,run:async()=>{let{initUser:s}=await import(`./init/user.js`);if(await s({force:!0}),n(a(process.cwd(),`.github`,`.aikit-scaffold.json`))){let{initScaffoldOnly:s}=await import(`./init/index.js`);if(await s({force:!0}),n(a(process.cwd(),`.github`,`skills`))){let{smartCopySkills:n}=await import(`./init/scaffold.js`),s=a(i(o(import.meta.url)),`..`,`..`,`..`,`..`),c=JSON.parse(r(a(s,`package.json`),`utf-8`)).version;n(process.cwd(),s,[...t],c,!0);let{smartCopyFlows:l}=await import(`./init/scaffold.js`);l(process.cwd(),s,[...e],c,!0)}}}}];export{s as upgradeCommands};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{AIKIT_GLOBAL_PATHS as e,AIKIT_PATHS as t}from"./constants.js";import{basename as n,resolve as r}from"node:path";import{createHash as i}from"node:crypto";import{closeSync as a,constants as o,existsSync as s,mkdirSync as c,openSync as l,readFileSync as u,renameSync as d,statSync as f,unlinkSync as p,writeFileSync as m}from"node:fs";import{homedir as h}from"node:os";function g(){return process.env.AIKIT_GLOBAL_DATA_DIR??r(h(),e.root)}function _(e){let t=r(e);return`${n(t).toLowerCase().replace(/[^a-z0-9-]/g,`-`)||`workspace`}-${i(`sha256`).update(t).digest(`hex`).slice(0,8)}`}function v(){let t=r(g(),e.registry);if(!s(t))return{version:1,workspaces:{}};let n=u(t,`utf-8`);return JSON.parse(n)}function y(e,t=5e3){let n=`${e}.lock`,r=Date.now()+t,i=10;for(;Date.now()<r;)try{let e=l(n,o.O_CREAT|o.O_EXCL|o.O_WRONLY);return m(e,`${process.pid}\n`),a(e),n}catch(e){if(e.code!==`EEXIST`)throw e;try{let{mtimeMs:e}=f(n);if(Date.now()-e>3e4){p(n);continue}}catch{}let t=new SharedArrayBuffer(4);Atomics.wait(new Int32Array(t),0,0,i),i=Math.min(i*2,200)}throw Error(`Failed to acquire registry lock after ${t}ms`)}function b(e){try{p(e)}catch{}}function x(t){let n=g();c(n,{recursive:!0});let i=r(n,e.registry),a=y(i);try{let e=`${i}.tmp`;m(e,JSON.stringify(t,null,2),`utf-8`),d(e,i)}finally{b(a)}}function S(e){let t=v(),n=_(e),i=new Date().toISOString();return t.workspaces[n]?t.workspaces[n].lastAccessedAt=i:t.workspaces[n]={partition:n,workspacePath:r(e),registeredAt:i,lastAccessedAt:i},c(T(n),{recursive:!0}),x(t),t.workspaces[n]}function C(e){let t=v(),n=_(e);return t.workspaces[n]}function w(){let e=v();return Object.values(e.workspaces)}function T(e){return r(g(),e)}function E(){return s(r(g(),e.registry))}function D(e){return E()?r(T(S(e).partition),`state`):r(e,t.state)}export{_ as computePartitionKey,g as getGlobalDataDir,T as getPartitionDir,E as isUserInstalled,w as listWorkspaces,v as loadRegistry,C as lookupWorkspace,S as registerWorkspace,D as resolveStateDir,x as saveRegistry};
|
|
1
|
+
import{AIKIT_GLOBAL_PATHS as e,AIKIT_PATHS as t}from"./constants.js";import{basename as n,resolve as r}from"node:path";import{createHash as i}from"node:crypto";import{closeSync as a,constants as o,existsSync as s,mkdirSync as c,openSync as l,readFileSync as u,renameSync as d,statSync as f,unlinkSync as p,writeFileSync as m}from"node:fs";import{homedir as h}from"node:os";function g(){return process.env.AIKIT_GLOBAL_DATA_DIR??r(h(),e.root)}function _(e){let t=r(e);return`${n(t).toLowerCase().replace(/[^a-z0-9-]/g,`-`)||`workspace`}-${i(`sha256`).update(t).digest(`hex`).slice(0,8)}`}function v(){let t=r(g(),e.registry);if(!s(t))return{version:1,workspaces:{}};let n=u(t,`utf-8`);try{return JSON.parse(n)}catch{return{version:1,workspaces:{}}}}function y(e,t=5e3){let n=`${e}.lock`,r=Date.now()+t,i=10;for(;Date.now()<r;)try{let e=l(n,o.O_CREAT|o.O_EXCL|o.O_WRONLY);return m(e,`${process.pid}\n`),a(e),n}catch(e){if(e.code!==`EEXIST`)throw e;try{let{mtimeMs:e}=f(n);if(Date.now()-e>3e4){p(n);continue}}catch{}let t=new SharedArrayBuffer(4);Atomics.wait(new Int32Array(t),0,0,i),i=Math.min(i*2,200)}throw Error(`Failed to acquire registry lock after ${t}ms`)}function b(e){try{p(e)}catch{}}function x(t){let n=g();c(n,{recursive:!0});let i=r(n,e.registry),a=y(i);try{let e=`${i}.tmp`;m(e,JSON.stringify(t,null,2),`utf-8`),d(e,i)}finally{b(a)}}function S(e){let t=v(),n=_(e),i=new Date().toISOString();return t.workspaces[n]?t.workspaces[n].lastAccessedAt=i:t.workspaces[n]={partition:n,workspacePath:r(e),registeredAt:i,lastAccessedAt:i},c(T(n),{recursive:!0}),x(t),t.workspaces[n]}function C(e){let t=v(),n=_(e);return t.workspaces[n]}function w(){let e=v();return Object.values(e.workspaces)}function T(e){return r(g(),e)}function E(){return s(r(g(),e.registry))}function D(e){return E()?r(T(S(e).partition),`state`):r(e,t.state)}export{_ as computePartitionKey,g as getGlobalDataDir,T as getPartitionDir,E as isUserInstalled,w as listWorkspaces,v as loadRegistry,C as lookupWorkspace,S as registerWorkspace,D as resolveStateDir,x as saveRegistry};
|
|
@@ -81,6 +81,8 @@ interface SearchResult {
|
|
|
81
81
|
interface KBConfig {
|
|
82
82
|
/** MCP server name. Defaults to 'kb'. */
|
|
83
83
|
serverName?: string;
|
|
84
|
+
/** Whether to auto-index on startup. Defaults to false. */
|
|
85
|
+
autoIndex?: boolean;
|
|
84
86
|
/**
|
|
85
87
|
* Prefix prepended to every MCP tool name to avoid collisions with other
|
|
86
88
|
* MCP servers. E.g. `"aikit_"` turns `search` → `aikit_search`.
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{cpSync as e,existsSync as t,mkdirSync as n,rmSync as r}from"node:fs";import{basename as i,join as a}from"node:path";import{execSync as o}from"node:child_process";var s=class{constructor(e){this.flowsDir=e}clone(e){let n=this.repoNameFromUrl(e),
|
|
1
|
+
import{cpSync as e,existsSync as t,mkdirSync as n,rmSync as r}from"node:fs";import{basename as i,join as a}from"node:path";import{execSync as o}from"node:child_process";var s=class{constructor(e){this.flowsDir=e}clone(e){let n=this.repoNameFromUrl(e),i=a(this.flowsDir,n);if(t(i))if(!t(a(i,`.git`)))r(i,{recursive:!0,force:!0});else return{success:!1,error:`Flow "${n}" already installed at ${i}. Use update instead.`};try{return this.ensureFlowsDir(),o(`git clone --depth 1 ${e} ${i}`,{stdio:`pipe`,timeout:6e4}),{success:!0,data:i}}catch(e){return{success:!1,error:`Git clone failed: ${e instanceof Error?e.message:String(e)}`}}}update(e){if(!t(e))return{success:!1,error:`Install path not found: ${e}`};try{return o(`git pull --ff-only`,{cwd:e,stdio:`pipe`,timeout:6e4}),{success:!0}}catch(e){return{success:!1,error:`Git pull failed: ${e instanceof Error?e.message:String(e)}`}}}copyLocal(n,r){let i=a(this.flowsDir,r);if(t(i))return{success:!1,error:`Flow "${r}" already installed at ${i}`};try{return this.ensureFlowsDir(),e(n,i,{recursive:!0}),{success:!0,data:i}}catch(e){return{success:!1,error:`Copy failed: ${e instanceof Error?e.message:String(e)}`}}}remove(e){if(!t(e))return{success:!0};try{return r(e,{recursive:!0,force:!0}),{success:!0}}catch(e){return{success:!1,error:`Remove failed: ${e instanceof Error?e.message:String(e)}`}}}runInstallDeps(e){for(let t of e)try{if(t.startsWith(`npm:`)){o(`npx skills add ${t.slice(4)} -g`,{stdio:`pipe`,timeout:12e4});continue}if(t.endsWith(`.git`)||t.includes(`github.com`)){o(`npx skills add ${t} -g`,{stdio:`pipe`,timeout:12e4});continue}return{success:!1,error:`Unknown install entry format: ${t}`}}catch(e){return{success:!1,error:`Install dependency failed for "${t}": ${e instanceof Error?e.message:String(e)}`}}return{success:!0}}repoNameFromUrl(e){return i(e).replace(/\.git$/,``)}ensureFlowsDir(){t(this.flowsDir)||n(this.flowsDir,{recursive:!0})}};export{s as GitInstaller};
|
|
@@ -12,11 +12,11 @@ declare class FlowRegistryManager {
|
|
|
12
12
|
register(entry: FlowRegistryEntry): FlowResult;
|
|
13
13
|
/** Remove a flow from the registry */
|
|
14
14
|
unregister(name: string): FlowResult;
|
|
15
|
-
/** Get a specific flow entry */
|
|
15
|
+
/** Get a specific flow entry (checks disk registry first, then builtins) */
|
|
16
16
|
get(name: string): FlowRegistryEntry | null;
|
|
17
|
-
/** List all registered flows */
|
|
17
|
+
/** List all registered flows (disk + builtins, disk overrides builtins) */
|
|
18
18
|
list(): FlowRegistryEntry[];
|
|
19
|
-
/** Check if a flow is registered */
|
|
19
|
+
/** Check if a flow is registered (disk or builtin) */
|
|
20
20
|
has(name: string): boolean;
|
|
21
21
|
}
|
|
22
22
|
//#endregion
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{existsSync as
|
|
1
|
+
import{getBuiltinFlows as e}from"./builtins.js";import{existsSync as t,mkdirSync as n,readFileSync as r,writeFileSync as i}from"node:fs";import{dirname as a}from"node:path";function o(){return e().map(e=>({name:e.manifest.name,version:e.manifest.version,source:`builtin`,sourceType:`builtin`,installPath:e.scaffoldDir,format:`native`,registeredAt:`1970-01-01T00:00:00.000Z`,updatedAt:`1970-01-01T00:00:00.000Z`,manifest:e.manifest}))}var s=class{constructor(e){this.registryPath=e}load(){if(!t(this.registryPath))return{version:1,flows:{}};try{let e=r(this.registryPath,`utf-8`);return JSON.parse(e)}catch{return{version:1,flows:{}}}}save(e){let r=a(this.registryPath);t(r)||n(r,{recursive:!0}),i(this.registryPath,JSON.stringify(e,null,2),`utf-8`)}register(e){let t=this.load();return t.flows[e.name]=e,this.save(t),{success:!0}}unregister(e){let t=this.load();return t.flows[e]?(delete t.flows[e],this.save(t),{success:!0}):{success:!1,error:`Flow "${e}" not found in registry`}}get(e){return this.load().flows[e]||(o().find(t=>t.name===e)??null)}list(){let e=this.load(),t=new Set(Object.keys(e.flows)),n=Object.values(e.flows);for(let e of o())t.has(e.name)||n.push(e);return n}has(e){return e in this.load().flows?!0:o().some(t=>t.name===e)}};export{s as FlowRegistryManager};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{copyFileSync as e,existsSync as t,mkdirSync as n,readdirSync as r,rmdirSync as i,symlinkSync as a,unlinkSync as o}from"node:fs";import{basename as s,dirname as c,join as l,relative as u}from"node:path";var d=class{createSymlinks(r,i,s,d){let f=this.getTargets(r,i);for(let r of f){t(r.baseDir)||n(r.baseDir,{recursive:!0});for(let n of d.agents){let i=l(s,n);if(!t(i))continue;let d=this.getAgentStem(n),f=l(r.baseDir,`${d}${r.extension}`);t(f)&&o(f);let p=u(c(f),i);try{a(p,f,`file`)}catch{e(i,f)}}}}removeSymlinks(e,n){let a=this.getTargets(e,n);for(let e of a)if(t(e.baseDir))try{let t=r(e.baseDir,{withFileTypes:!0});for(let n of t)!n.isFile()&&!n.isSymbolicLink()||o(l(e.baseDir,n.name));r(e.baseDir).length===0&&i(e.baseDir)}catch{}}getTargets(e,t){return[{ide:`copilot`,baseDir:l(e,`.github`,`agents`,`flows`,t),extension:`.agent.md`},{ide:`claude-code`,baseDir:l(e,`.claude`,`agents`,`flows`,t),extension:`.md`}]}getAgentStem(e){return s(e).replace(/\.agent\.md$/,``).replace(/\.md$/,``)}};export{d as SymlinkManager};
|
|
1
|
+
import{copyFileSync as e,existsSync as t,mkdirSync as n,readdirSync as r,rmdirSync as i,symlinkSync as a,unlinkSync as o}from"node:fs";import{basename as s,dirname as c,join as l,relative as u}from"node:path";var d=class{createSymlinks(r,i,s,d){let f=this.getTargets(r,i);for(let r of f){t(r.baseDir)||n(r.baseDir,{recursive:!0});for(let n of d.agents){let i=l(s,n);if(!t(i))continue;let d=this.getAgentStem(n),f=l(r.baseDir,`${d}${r.extension}`);t(f)&&o(f);let p=u(c(f),i);try{a(p,f,`file`)}catch{try{e(i,f)}catch(e){console.warn(`Failed to create symlink or copy fallback for ${i}: ${e instanceof Error?e.message:String(e)}`)}}}}}removeSymlinks(e,n){let a=this.getTargets(e,n);for(let e of a)if(t(e.baseDir))try{let t=r(e.baseDir,{withFileTypes:!0});for(let n of t)!n.isFile()&&!n.isSymbolicLink()||o(l(e.baseDir,n.name));r(e.baseDir).length===0&&i(e.baseDir)}catch{}}getTargets(e,t){return[{ide:`copilot`,baseDir:l(e,`.github`,`agents`,`flows`,t),extension:`.agent.md`},{ide:`claude-code`,baseDir:l(e,`.claude`,`agents`,`flows`,t),extension:`.md`}]}getAgentStem(e){return s(e).replace(/\.agent\.md$/,``).replace(/\.md$/,``)}};export{d as SymlinkManager};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{lstat as
|
|
1
|
+
import{realpathSync as e}from"node:fs";import{lstat as t,readFile as n,readdir as r,stat as i}from"node:fs/promises";import{extname as a,join as o,relative as s}from"node:path";import{AIKIT_PATHS as c,FILE_LIMITS as l,createLogger as u}from"../../core/dist/index.js";import{minimatch as d}from"minimatch";const f=u(`indexer`);var p=class u{static BINARY_EXTENSIONS=new Set(`.node,.so,.dylib,.dll,.wasm,.bin,.exe,.png,.jpg,.jpeg,.gif,.bmp,.ico,.webp,.svg,.mp3,.mp4,.wav,.avi,.mov,.flac,.zip,.gz,.tar,.bz2,.7z,.rar,.pdf,.doc,.docx,.xls,.xlsx,.ppt,.pptx,.ttf,.otf,.woff,.woff2,.eot,.pyc,.class,.o,.obj,.a,.lib`.split(`,`));async crawl(e){let t=[],n=new Set;return await this.walkDir(e.rootDir,e.rootDir,e.excludePatterns,t,n),t}async walkDir(d,p,m,h,g){let _;try{_=await r(d,{withFileTypes:!0})}catch(e){let t=e.code;(t===`EACCES`||t===`EPERM`)&&f.warn(`Permission denied, skipping directory`,{dir:d});return}for(let r of _){let f=o(d,r.name),_=s(p,f).replace(/\\/g,`/`);if(!this.isExcluded(_,m)){if(r.isDirectory()){if(r.name.startsWith(`.`)&&!(r.name===c.ai.slice(1)&&_.startsWith(c.ai)))continue;try{if((await t(f)).isSymbolicLink())continue}catch{continue}let n;try{n=e(f)}catch{continue}if(g.has(n))continue;g.add(n),await this.walkDir(f,p,m,h,g)}else if(r.isFile()){let e=a(r.name).toLowerCase();if(u.BINARY_EXTENSIONS.has(e))continue;try{if((await i(f)).size>l.maxFileSizeBytes)continue;let t=await n(f,`utf-8`);if(t.includes(`\0`))continue;h.push({relativePath:_,absolutePath:f,content:t,extension:e})}catch{}}}}}isExcluded(e,t){return t.some(t=>d(e,t,{dot:!0}))}};export{p as FilesystemCrawler};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{
|
|
1
|
+
import{existsSync as e,readFileSync as t,writeFileSync as n}from"node:fs";import{resolve as r}from"node:path";import{createLogger as i}from"../../core/dist/index.js";const a=i(`hash-cache`);var o=class{cache;filePath;dirty=!1;constructor(e){this.filePath=r(e,`file-hashes.json`),this.cache=new Map}load(){if(e(this.filePath))try{let e=t(this.filePath,`utf-8`),n=JSON.parse(e);this.cache=new Map(Object.entries(n)),a.info(`Hash cache loaded`,{entries:this.cache.size})}catch(e){a.warn(`Hash cache load failed, starting fresh`,{err:e}),this.cache=new Map}}get(e){return this.cache.get(e)}set(e,t){this.cache.set(e,t),this.dirty=!0}delete(e){this.cache.delete(e)&&(this.dirty=!0)}flush(){if(this.dirty)try{let e={};for(let[t,n]of this.cache)e[t]=n;n(this.filePath,JSON.stringify(e),`utf-8`),this.dirty=!1}catch(e){a.warn(`Hash cache flush failed`,{err:e})}}clear(){this.cache.clear(),this.dirty=!0,this.flush()}get size(){return this.cache.size}};export{o as FileHashCache};
|
|
@@ -1,38 +1,37 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
import type { IGraphStore, IKnowledgeStore } from '@kb/store';
|
|
7
|
-
import type { IKBClient, KBGraphData, KBKnowledgeEntry, KBSearchResult, KBStatus } from './types.js';
|
|
1
|
+
import { IKBClient, KBGraphData, KBKnowledgeEntry, KBSearchResult, KBStatus } from "./types.js";
|
|
2
|
+
import { IEmbedder } from "../../embeddings/dist/index.js";
|
|
3
|
+
import { IGraphStore, IKnowledgeStore } from "../../store/dist/index.js";
|
|
4
|
+
|
|
5
|
+
//#region packages/kb-client/src/direct-client.d.ts
|
|
8
6
|
interface CuratedEntry {
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
7
|
+
path: string;
|
|
8
|
+
title: string;
|
|
9
|
+
category: string;
|
|
10
|
+
tags: string[];
|
|
11
|
+
content: string;
|
|
14
12
|
}
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
13
|
+
interface DirectClientDeps {
|
|
14
|
+
store: IKnowledgeStore;
|
|
15
|
+
embedder: IEmbedder;
|
|
16
|
+
graphStore?: IGraphStore;
|
|
17
|
+
/** Function to list curated entries */
|
|
18
|
+
listCurated?: () => Promise<CuratedEntry[]>;
|
|
19
|
+
/** Function to read a single curated entry */
|
|
20
|
+
readCurated?: (path: string) => Promise<CuratedEntry | null>;
|
|
23
21
|
}
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
22
|
+
declare class DirectKBClient implements IKBClient {
|
|
23
|
+
private readonly deps;
|
|
24
|
+
constructor(deps: DirectClientDeps);
|
|
25
|
+
getStatus(): Promise<KBStatus>;
|
|
26
|
+
search(query: string, options?: {
|
|
27
|
+
limit?: number;
|
|
28
|
+
mode?: 'hybrid' | 'semantic' | 'keyword';
|
|
29
|
+
}): Promise<KBSearchResult[]>;
|
|
30
|
+
listKnowledge(): Promise<KBKnowledgeEntry[]>;
|
|
31
|
+
readKnowledge(path: string): Promise<KBKnowledgeEntry | null>;
|
|
32
|
+
getGraph(query?: string): Promise<KBGraphData>;
|
|
33
|
+
getFileTree(): Promise<string[]>;
|
|
34
|
+
private getEdgesForNodes;
|
|
37
35
|
}
|
|
38
|
-
|
|
36
|
+
//#endregion
|
|
37
|
+
export { DirectClientDeps, DirectKBClient };
|
|
@@ -1,4 +1,5 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
1
|
+
import { IKBClient, KBGraphData, KBKnowledgeEntry, KBSearchResult, KBStatus } from "./types.js";
|
|
2
|
+
import { DirectClientDeps, DirectKBClient } from "./direct-client.js";
|
|
3
|
+
import { McpKBClient } from "./mcp-client.js";
|
|
4
|
+
import { ParsedContent, extractStructured, extractText, parseToolResult, tryParseJson } from "./parsers.js";
|
|
5
|
+
export { type DirectClientDeps, DirectKBClient, type IKBClient, type KBGraphData, type KBKnowledgeEntry, type KBSearchResult, type KBStatus, McpKBClient, type ParsedContent, extractStructured, extractText, parseToolResult, tryParseJson };
|
|
@@ -1,19 +1,19 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
getGraph(query?: string): Promise<KBGraphData>;
|
|
18
|
-
getFileTree(): Promise<string[]>;
|
|
1
|
+
import { IKBClient, KBGraphData, KBKnowledgeEntry, KBSearchResult, KBStatus } from "./types.js";
|
|
2
|
+
import { Client } from "@modelcontextprotocol/sdk/client/index.js";
|
|
3
|
+
|
|
4
|
+
//#region packages/kb-client/src/mcp-client.d.ts
|
|
5
|
+
declare class McpKBClient implements IKBClient {
|
|
6
|
+
private readonly client;
|
|
7
|
+
constructor(client: Client);
|
|
8
|
+
getStatus(): Promise<KBStatus>;
|
|
9
|
+
search(query: string, options?: {
|
|
10
|
+
limit?: number;
|
|
11
|
+
mode?: 'hybrid' | 'semantic' | 'keyword';
|
|
12
|
+
}): Promise<KBSearchResult[]>;
|
|
13
|
+
listKnowledge(): Promise<KBKnowledgeEntry[]>;
|
|
14
|
+
readKnowledge(path: string): Promise<KBKnowledgeEntry | null>;
|
|
15
|
+
getGraph(query?: string): Promise<KBGraphData>;
|
|
16
|
+
getFileTree(): Promise<string[]>;
|
|
19
17
|
}
|
|
18
|
+
//#endregion
|
|
19
|
+
export { McpKBClient };
|
|
@@ -1,32 +1,35 @@
|
|
|
1
|
+
//#region packages/kb-client/src/parsers.d.ts
|
|
1
2
|
/**
|
|
2
3
|
* Content parsers for MCP tool responses.
|
|
3
4
|
* Used by McpKBClient to parse structuredContent from tool calls.
|
|
4
5
|
*/
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
6
|
+
interface ParsedContent<T = unknown> {
|
|
7
|
+
text: string;
|
|
8
|
+
structured?: T;
|
|
8
9
|
}
|
|
9
10
|
/**
|
|
10
11
|
* Extract text content from an MCP tool result.
|
|
11
12
|
*/
|
|
12
|
-
|
|
13
|
-
|
|
13
|
+
declare function extractText(result: {
|
|
14
|
+
content?: unknown;
|
|
14
15
|
} | null | undefined): string;
|
|
15
16
|
/**
|
|
16
17
|
* Extract structured content from an MCP tool result.
|
|
17
18
|
*/
|
|
18
|
-
|
|
19
|
-
|
|
19
|
+
declare function extractStructured<T>(result: {
|
|
20
|
+
structuredContent?: unknown;
|
|
20
21
|
} | null | undefined): T | undefined;
|
|
21
22
|
/**
|
|
22
23
|
* Parse a tool result, returning both text and structured content.
|
|
23
24
|
*/
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
25
|
+
declare function parseToolResult<T = unknown>(result: {
|
|
26
|
+
content?: unknown;
|
|
27
|
+
structuredContent?: unknown;
|
|
27
28
|
} | null | undefined): ParsedContent<T>;
|
|
28
29
|
/**
|
|
29
30
|
* Try to parse JSON from a text tool result.
|
|
30
31
|
* Returns undefined if parsing fails.
|
|
31
32
|
*/
|
|
32
|
-
|
|
33
|
+
declare function tryParseJson<T = unknown>(text: string): T | undefined;
|
|
34
|
+
//#endregion
|
|
35
|
+
export { ParsedContent, extractStructured, extractText, parseToolResult, tryParseJson };
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
//#region packages/kb-client/src/types.d.ts
|
|
1
2
|
/**
|
|
2
3
|
* IKBClient — Unified data access interface for KB consumers.
|
|
3
4
|
*
|
|
@@ -5,55 +6,57 @@
|
|
|
5
6
|
* - DirectKBClient (in-process, used by TUI)
|
|
6
7
|
* - McpKBClient (over MCP transport, used by Dashboard)
|
|
7
8
|
*/
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
9
|
+
interface KBStatus {
|
|
10
|
+
totalRecords: number;
|
|
11
|
+
totalFiles: number;
|
|
12
|
+
lastIndexedAt: string | null;
|
|
13
|
+
onboarded: boolean;
|
|
13
14
|
}
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
15
|
+
interface KBSearchResult {
|
|
16
|
+
sourcePath: string;
|
|
17
|
+
contentType: string;
|
|
18
|
+
score: number;
|
|
19
|
+
content: string;
|
|
20
|
+
headingPath?: string;
|
|
21
|
+
startLine?: number;
|
|
22
|
+
endLine?: number;
|
|
22
23
|
}
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
24
|
+
interface KBKnowledgeEntry {
|
|
25
|
+
path: string;
|
|
26
|
+
title: string;
|
|
27
|
+
category: string;
|
|
28
|
+
tags: string[];
|
|
29
|
+
content: string;
|
|
29
30
|
}
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
31
|
+
interface KBGraphData {
|
|
32
|
+
nodes: Array<{
|
|
33
|
+
id: string;
|
|
34
|
+
name: string;
|
|
35
|
+
type: string;
|
|
36
|
+
sourcePath?: string;
|
|
37
|
+
}>;
|
|
38
|
+
edges: Array<{
|
|
39
|
+
fromId: string;
|
|
40
|
+
toId: string;
|
|
41
|
+
type: string;
|
|
42
|
+
}>;
|
|
42
43
|
}
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
44
|
+
interface IKBClient {
|
|
45
|
+
/** Get KB status. */
|
|
46
|
+
getStatus(): Promise<KBStatus>;
|
|
47
|
+
/** Search the knowledge base. */
|
|
48
|
+
search(query: string, options?: {
|
|
49
|
+
limit?: number;
|
|
50
|
+
mode?: 'hybrid' | 'semantic' | 'keyword';
|
|
51
|
+
}): Promise<KBSearchResult[]>;
|
|
52
|
+
/** List curated knowledge entries. */
|
|
53
|
+
listKnowledge(): Promise<KBKnowledgeEntry[]>;
|
|
54
|
+
/** Read a specific curated entry. */
|
|
55
|
+
readKnowledge(path: string): Promise<KBKnowledgeEntry | null>;
|
|
56
|
+
/** Get knowledge graph data. */
|
|
57
|
+
getGraph(query?: string): Promise<KBGraphData>;
|
|
58
|
+
/** Get file tree of indexed sources. */
|
|
59
|
+
getFileTree(): Promise<string[]>;
|
|
59
60
|
}
|
|
61
|
+
//#endregion
|
|
62
|
+
export { IKBClient, KBGraphData, KBKnowledgeEntry, KBSearchResult, KBStatus };
|