@vpxa/aikit 0.1.2 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/package.json +1 -1
  2. package/packages/core/dist/global-registry.js +1 -1
  3. package/packages/core/dist/types.d.ts +2 -0
  4. package/packages/flows/dist/git.js +1 -1
  5. package/packages/flows/dist/registry.d.ts +3 -3
  6. package/packages/flows/dist/registry.js +1 -1
  7. package/packages/flows/dist/symlinks.js +1 -1
  8. package/packages/indexer/dist/filesystem-crawler.js +1 -1
  9. package/packages/indexer/dist/hash-cache.js +1 -1
  10. package/packages/kb-client/dist/direct-client.d.ts +33 -34
  11. package/packages/kb-client/dist/index.d.ts +5 -4
  12. package/packages/kb-client/dist/mcp-client.d.ts +18 -18
  13. package/packages/kb-client/dist/parsers.d.ts +14 -11
  14. package/packages/kb-client/dist/types.d.ts +50 -47
  15. package/packages/present/dist/index.html +26 -26
  16. package/packages/server/dist/config.js +1 -1
  17. package/packages/server/dist/idle-timer.d.ts +4 -0
  18. package/packages/server/dist/idle-timer.js +1 -1
  19. package/packages/server/dist/index.js +1 -1
  20. package/packages/server/dist/memory-monitor.d.ts +2 -2
  21. package/packages/server/dist/memory-monitor.js +1 -1
  22. package/packages/server/dist/server.d.ts +1 -1
  23. package/packages/server/dist/server.js +2 -2
  24. package/packages/server/dist/tool-metadata.js +1 -1
  25. package/packages/server/dist/tools/config.tool.d.ts +8 -0
  26. package/packages/server/dist/tools/config.tool.js +12 -0
  27. package/packages/server/dist/tools/flow.tools.js +1 -1
  28. package/packages/server/dist/tools/present/browser.js +7 -7
  29. package/packages/server/dist/tools/present/tool.js +4 -4
  30. package/packages/server/dist/tools/search.tool.js +4 -4
  31. package/packages/server/dist/tools/status.tool.js +3 -3
  32. package/packages/store/dist/sqlite-graph-store.d.ts +3 -0
  33. package/packages/store/dist/sqlite-graph-store.js +3 -3
  34. package/packages/tools/dist/checkpoint.js +1 -1
  35. package/packages/tools/dist/evidence-map.js +2 -2
  36. package/packages/tools/dist/queue.js +1 -1
  37. package/packages/tools/dist/restore-points.js +1 -1
  38. package/packages/tools/dist/schema-validate.js +1 -1
  39. package/packages/tools/dist/snippet.js +1 -1
  40. package/packages/tools/dist/stash.js +1 -1
  41. package/packages/tools/dist/workset.js +1 -1
  42. package/packages/tui/dist/{App-B2-KJPt4.js → App-DpjN3iS-.js} +1 -1
  43. package/packages/tui/dist/App.js +1 -1
  44. package/packages/tui/dist/LogPanel-Db-SeZhR.js +3 -0
  45. package/packages/tui/dist/index.js +1 -1
  46. package/packages/tui/dist/panels/LogPanel.js +1 -1
  47. package/scaffold/general/skills/multi-agents-development/SKILL.md +435 -435
  48. package/scaffold/general/skills/present/SKILL.md +424 -424
  49. package/packages/kb-client/dist/__tests__/direct-client.test.d.ts +0 -1
  50. package/packages/kb-client/dist/__tests__/mcp-client.test.d.ts +0 -1
  51. package/packages/kb-client/dist/__tests__/parsers.test.d.ts +0 -1
  52. package/packages/tui/dist/LogPanel-E_1Do4-j.js +0 -3
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@vpxa/aikit",
3
- "version": "0.1.2",
3
+ "version": "0.1.3",
4
4
  "type": "module",
5
5
  "description": "Local-first AI developer toolkit — knowledge base, code analysis, context management, and developer tools for LLM agents",
6
6
  "license": "MIT",
@@ -1 +1 @@
1
- import{AIKIT_GLOBAL_PATHS as e,AIKIT_PATHS as t}from"./constants.js";import{basename as n,resolve as r}from"node:path";import{createHash as i}from"node:crypto";import{closeSync as a,constants as o,existsSync as s,mkdirSync as c,openSync as l,readFileSync as u,renameSync as d,statSync as f,unlinkSync as p,writeFileSync as m}from"node:fs";import{homedir as h}from"node:os";function g(){return process.env.AIKIT_GLOBAL_DATA_DIR??r(h(),e.root)}function _(e){let t=r(e);return`${n(t).toLowerCase().replace(/[^a-z0-9-]/g,`-`)||`workspace`}-${i(`sha256`).update(t).digest(`hex`).slice(0,8)}`}function v(){let t=r(g(),e.registry);if(!s(t))return{version:1,workspaces:{}};let n=u(t,`utf-8`);return JSON.parse(n)}function y(e,t=5e3){let n=`${e}.lock`,r=Date.now()+t,i=10;for(;Date.now()<r;)try{let e=l(n,o.O_CREAT|o.O_EXCL|o.O_WRONLY);return m(e,`${process.pid}\n`),a(e),n}catch(e){if(e.code!==`EEXIST`)throw e;try{let{mtimeMs:e}=f(n);if(Date.now()-e>3e4){p(n);continue}}catch{}let t=new SharedArrayBuffer(4);Atomics.wait(new Int32Array(t),0,0,i),i=Math.min(i*2,200)}throw Error(`Failed to acquire registry lock after ${t}ms`)}function b(e){try{p(e)}catch{}}function x(t){let n=g();c(n,{recursive:!0});let i=r(n,e.registry),a=y(i);try{let e=`${i}.tmp`;m(e,JSON.stringify(t,null,2),`utf-8`),d(e,i)}finally{b(a)}}function S(e){let t=v(),n=_(e),i=new Date().toISOString();return t.workspaces[n]?t.workspaces[n].lastAccessedAt=i:t.workspaces[n]={partition:n,workspacePath:r(e),registeredAt:i,lastAccessedAt:i},c(T(n),{recursive:!0}),x(t),t.workspaces[n]}function C(e){let t=v(),n=_(e);return t.workspaces[n]}function w(){let e=v();return Object.values(e.workspaces)}function T(e){return r(g(),e)}function E(){return s(r(g(),e.registry))}function D(e){return E()?r(T(S(e).partition),`state`):r(e,t.state)}export{_ as computePartitionKey,g as getGlobalDataDir,T as getPartitionDir,E as isUserInstalled,w as listWorkspaces,v as loadRegistry,C as lookupWorkspace,S as registerWorkspace,D as resolveStateDir,x as saveRegistry};
1
+ import{AIKIT_GLOBAL_PATHS as e,AIKIT_PATHS as t}from"./constants.js";import{basename as n,resolve as r}from"node:path";import{createHash as i}from"node:crypto";import{closeSync as a,constants as o,existsSync as s,mkdirSync as c,openSync as l,readFileSync as u,renameSync as d,statSync as f,unlinkSync as p,writeFileSync as m}from"node:fs";import{homedir as h}from"node:os";function g(){return process.env.AIKIT_GLOBAL_DATA_DIR??r(h(),e.root)}function _(e){let t=r(e);return`${n(t).toLowerCase().replace(/[^a-z0-9-]/g,`-`)||`workspace`}-${i(`sha256`).update(t).digest(`hex`).slice(0,8)}`}function v(){let t=r(g(),e.registry);if(!s(t))return{version:1,workspaces:{}};let n=u(t,`utf-8`);try{return JSON.parse(n)}catch{return{version:1,workspaces:{}}}}function y(e,t=5e3){let n=`${e}.lock`,r=Date.now()+t,i=10;for(;Date.now()<r;)try{let e=l(n,o.O_CREAT|o.O_EXCL|o.O_WRONLY);return m(e,`${process.pid}\n`),a(e),n}catch(e){if(e.code!==`EEXIST`)throw e;try{let{mtimeMs:e}=f(n);if(Date.now()-e>3e4){p(n);continue}}catch{}let t=new SharedArrayBuffer(4);Atomics.wait(new Int32Array(t),0,0,i),i=Math.min(i*2,200)}throw Error(`Failed to acquire registry lock after ${t}ms`)}function b(e){try{p(e)}catch{}}function x(t){let n=g();c(n,{recursive:!0});let i=r(n,e.registry),a=y(i);try{let e=`${i}.tmp`;m(e,JSON.stringify(t,null,2),`utf-8`),d(e,i)}finally{b(a)}}function S(e){let t=v(),n=_(e),i=new Date().toISOString();return t.workspaces[n]?t.workspaces[n].lastAccessedAt=i:t.workspaces[n]={partition:n,workspacePath:r(e),registeredAt:i,lastAccessedAt:i},c(T(n),{recursive:!0}),x(t),t.workspaces[n]}function C(e){let t=v(),n=_(e);return t.workspaces[n]}function w(){let e=v();return Object.values(e.workspaces)}function T(e){return r(g(),e)}function E(){return s(r(g(),e.registry))}function D(e){return E()?r(T(S(e).partition),`state`):r(e,t.state)}export{_ as computePartitionKey,g as getGlobalDataDir,T as getPartitionDir,E as isUserInstalled,w as listWorkspaces,v as loadRegistry,C as lookupWorkspace,S as registerWorkspace,D as resolveStateDir,x as saveRegistry};
@@ -81,6 +81,8 @@ interface SearchResult {
81
81
  interface KBConfig {
82
82
  /** MCP server name. Defaults to 'kb'. */
83
83
  serverName?: string;
84
+ /** Whether to auto-index on startup. Defaults to false. */
85
+ autoIndex?: boolean;
84
86
  /**
85
87
  * Prefix prepended to every MCP tool name to avoid collisions with other
86
88
  * MCP servers. E.g. `"aikit_"` turns `search` → `aikit_search`.
@@ -1 +1 @@
1
- import{cpSync as e,existsSync as t,mkdirSync as n,rmSync as r}from"node:fs";import{basename as i,join as a}from"node:path";import{execSync as o}from"node:child_process";var s=class{constructor(e){this.flowsDir=e}clone(e){let n=this.repoNameFromUrl(e),r=a(this.flowsDir,n);if(t(r))return{success:!1,error:`Flow "${n}" already installed at ${r}. Use update instead.`};try{return this.ensureFlowsDir(),o(`git clone --depth 1 ${e} ${r}`,{stdio:`pipe`,timeout:6e4}),{success:!0,data:r}}catch(e){return{success:!1,error:`Git clone failed: ${e instanceof Error?e.message:String(e)}`}}}update(e){if(!t(e))return{success:!1,error:`Install path not found: ${e}`};try{return o(`git pull --ff-only`,{cwd:e,stdio:`pipe`,timeout:6e4}),{success:!0}}catch(e){return{success:!1,error:`Git pull failed: ${e instanceof Error?e.message:String(e)}`}}}copyLocal(n,r){let i=a(this.flowsDir,r);if(t(i))return{success:!1,error:`Flow "${r}" already installed at ${i}`};try{return this.ensureFlowsDir(),e(n,i,{recursive:!0}),{success:!0,data:i}}catch(e){return{success:!1,error:`Copy failed: ${e instanceof Error?e.message:String(e)}`}}}remove(e){if(!t(e))return{success:!0};try{return r(e,{recursive:!0,force:!0}),{success:!0}}catch(e){return{success:!1,error:`Remove failed: ${e instanceof Error?e.message:String(e)}`}}}runInstallDeps(e){for(let t of e)try{if(t.startsWith(`npm:`)){o(`npx skills add ${t.slice(4)} -g`,{stdio:`pipe`,timeout:12e4});continue}if(t.endsWith(`.git`)||t.includes(`github.com`)){o(`npx skills add ${t} -g`,{stdio:`pipe`,timeout:12e4});continue}return{success:!1,error:`Unknown install entry format: ${t}`}}catch(e){return{success:!1,error:`Install dependency failed for "${t}": ${e instanceof Error?e.message:String(e)}`}}return{success:!0}}repoNameFromUrl(e){return i(e).replace(/\.git$/,``)}ensureFlowsDir(){t(this.flowsDir)||n(this.flowsDir,{recursive:!0})}};export{s as GitInstaller};
1
+ import{cpSync as e,existsSync as t,mkdirSync as n,rmSync as r}from"node:fs";import{basename as i,join as a}from"node:path";import{execSync as o}from"node:child_process";var s=class{constructor(e){this.flowsDir=e}clone(e){let n=this.repoNameFromUrl(e),i=a(this.flowsDir,n);if(t(i))if(!t(a(i,`.git`)))r(i,{recursive:!0,force:!0});else return{success:!1,error:`Flow "${n}" already installed at ${i}. Use update instead.`};try{return this.ensureFlowsDir(),o(`git clone --depth 1 ${e} ${i}`,{stdio:`pipe`,timeout:6e4}),{success:!0,data:i}}catch(e){return{success:!1,error:`Git clone failed: ${e instanceof Error?e.message:String(e)}`}}}update(e){if(!t(e))return{success:!1,error:`Install path not found: ${e}`};try{return o(`git pull --ff-only`,{cwd:e,stdio:`pipe`,timeout:6e4}),{success:!0}}catch(e){return{success:!1,error:`Git pull failed: ${e instanceof Error?e.message:String(e)}`}}}copyLocal(n,r){let i=a(this.flowsDir,r);if(t(i))return{success:!1,error:`Flow "${r}" already installed at ${i}`};try{return this.ensureFlowsDir(),e(n,i,{recursive:!0}),{success:!0,data:i}}catch(e){return{success:!1,error:`Copy failed: ${e instanceof Error?e.message:String(e)}`}}}remove(e){if(!t(e))return{success:!0};try{return r(e,{recursive:!0,force:!0}),{success:!0}}catch(e){return{success:!1,error:`Remove failed: ${e instanceof Error?e.message:String(e)}`}}}runInstallDeps(e){for(let t of e)try{if(t.startsWith(`npm:`)){o(`npx skills add ${t.slice(4)} -g`,{stdio:`pipe`,timeout:12e4});continue}if(t.endsWith(`.git`)||t.includes(`github.com`)){o(`npx skills add ${t} -g`,{stdio:`pipe`,timeout:12e4});continue}return{success:!1,error:`Unknown install entry format: ${t}`}}catch(e){return{success:!1,error:`Install dependency failed for "${t}": ${e instanceof Error?e.message:String(e)}`}}return{success:!0}}repoNameFromUrl(e){return i(e).replace(/\.git$/,``)}ensureFlowsDir(){t(this.flowsDir)||n(this.flowsDir,{recursive:!0})}};export{s as GitInstaller};
@@ -12,11 +12,11 @@ declare class FlowRegistryManager {
12
12
  register(entry: FlowRegistryEntry): FlowResult;
13
13
  /** Remove a flow from the registry */
14
14
  unregister(name: string): FlowResult;
15
- /** Get a specific flow entry */
15
+ /** Get a specific flow entry (checks disk registry first, then builtins) */
16
16
  get(name: string): FlowRegistryEntry | null;
17
- /** List all registered flows */
17
+ /** List all registered flows (disk + builtins, disk overrides builtins) */
18
18
  list(): FlowRegistryEntry[];
19
- /** Check if a flow is registered */
19
+ /** Check if a flow is registered (disk or builtin) */
20
20
  has(name: string): boolean;
21
21
  }
22
22
  //#endregion
@@ -1 +1 @@
1
- import{existsSync as e,mkdirSync as t,readFileSync as n,writeFileSync as r}from"node:fs";import{dirname as i}from"node:path";var a=class{constructor(e){this.registryPath=e}load(){if(!e(this.registryPath))return{version:1,flows:{}};try{let e=n(this.registryPath,`utf-8`);return JSON.parse(e)}catch{return{version:1,flows:{}}}}save(n){let a=i(this.registryPath);e(a)||t(a,{recursive:!0}),r(this.registryPath,JSON.stringify(n,null,2),`utf-8`)}register(e){let t=this.load();return t.flows[e.name]=e,this.save(t),{success:!0}}unregister(e){let t=this.load();return t.flows[e]?(delete t.flows[e],this.save(t),{success:!0}):{success:!1,error:`Flow "${e}" not found in registry`}}get(e){return this.load().flows[e]??null}list(){let e=this.load();return Object.values(e.flows)}has(e){return e in this.load().flows}};export{a as FlowRegistryManager};
1
+ import{getBuiltinFlows as e}from"./builtins.js";import{existsSync as t,mkdirSync as n,readFileSync as r,writeFileSync as i}from"node:fs";import{dirname as a}from"node:path";function o(){return e().map(e=>({name:e.manifest.name,version:e.manifest.version,source:`builtin`,sourceType:`builtin`,installPath:e.scaffoldDir,format:`native`,registeredAt:`1970-01-01T00:00:00.000Z`,updatedAt:`1970-01-01T00:00:00.000Z`,manifest:e.manifest}))}var s=class{constructor(e){this.registryPath=e}load(){if(!t(this.registryPath))return{version:1,flows:{}};try{let e=r(this.registryPath,`utf-8`);return JSON.parse(e)}catch{return{version:1,flows:{}}}}save(e){let r=a(this.registryPath);t(r)||n(r,{recursive:!0}),i(this.registryPath,JSON.stringify(e,null,2),`utf-8`)}register(e){let t=this.load();return t.flows[e.name]=e,this.save(t),{success:!0}}unregister(e){let t=this.load();return t.flows[e]?(delete t.flows[e],this.save(t),{success:!0}):{success:!1,error:`Flow "${e}" not found in registry`}}get(e){return this.load().flows[e]||(o().find(t=>t.name===e)??null)}list(){let e=this.load(),t=new Set(Object.keys(e.flows)),n=Object.values(e.flows);for(let e of o())t.has(e.name)||n.push(e);return n}has(e){return e in this.load().flows?!0:o().some(t=>t.name===e)}};export{s as FlowRegistryManager};
@@ -1 +1 @@
1
- import{copyFileSync as e,existsSync as t,mkdirSync as n,readdirSync as r,rmdirSync as i,symlinkSync as a,unlinkSync as o}from"node:fs";import{basename as s,dirname as c,join as l,relative as u}from"node:path";var d=class{createSymlinks(r,i,s,d){let f=this.getTargets(r,i);for(let r of f){t(r.baseDir)||n(r.baseDir,{recursive:!0});for(let n of d.agents){let i=l(s,n);if(!t(i))continue;let d=this.getAgentStem(n),f=l(r.baseDir,`${d}${r.extension}`);t(f)&&o(f);let p=u(c(f),i);try{a(p,f,`file`)}catch{e(i,f)}}}}removeSymlinks(e,n){let a=this.getTargets(e,n);for(let e of a)if(t(e.baseDir))try{let t=r(e.baseDir,{withFileTypes:!0});for(let n of t)!n.isFile()&&!n.isSymbolicLink()||o(l(e.baseDir,n.name));r(e.baseDir).length===0&&i(e.baseDir)}catch{}}getTargets(e,t){return[{ide:`copilot`,baseDir:l(e,`.github`,`agents`,`flows`,t),extension:`.agent.md`},{ide:`claude-code`,baseDir:l(e,`.claude`,`agents`,`flows`,t),extension:`.md`}]}getAgentStem(e){return s(e).replace(/\.agent\.md$/,``).replace(/\.md$/,``)}};export{d as SymlinkManager};
1
+ import{copyFileSync as e,existsSync as t,mkdirSync as n,readdirSync as r,rmdirSync as i,symlinkSync as a,unlinkSync as o}from"node:fs";import{basename as s,dirname as c,join as l,relative as u}from"node:path";var d=class{createSymlinks(r,i,s,d){let f=this.getTargets(r,i);for(let r of f){t(r.baseDir)||n(r.baseDir,{recursive:!0});for(let n of d.agents){let i=l(s,n);if(!t(i))continue;let d=this.getAgentStem(n),f=l(r.baseDir,`${d}${r.extension}`);t(f)&&o(f);let p=u(c(f),i);try{a(p,f,`file`)}catch{try{e(i,f)}catch(e){console.warn(`Failed to create symlink or copy fallback for ${i}: ${e instanceof Error?e.message:String(e)}`)}}}}}removeSymlinks(e,n){let a=this.getTargets(e,n);for(let e of a)if(t(e.baseDir))try{let t=r(e.baseDir,{withFileTypes:!0});for(let n of t)!n.isFile()&&!n.isSymbolicLink()||o(l(e.baseDir,n.name));r(e.baseDir).length===0&&i(e.baseDir)}catch{}}getTargets(e,t){return[{ide:`copilot`,baseDir:l(e,`.github`,`agents`,`flows`,t),extension:`.agent.md`},{ide:`claude-code`,baseDir:l(e,`.claude`,`agents`,`flows`,t),extension:`.md`}]}getAgentStem(e){return s(e).replace(/\.agent\.md$/,``).replace(/\.md$/,``)}};export{d as SymlinkManager};
@@ -1 +1 @@
1
- import{lstat as e,readFile as t,readdir as n,stat as r}from"node:fs/promises";import{extname as i,join as a,relative as o}from"node:path";import{AIKIT_PATHS as s,FILE_LIMITS as c,createLogger as l}from"../../core/dist/index.js";import{minimatch as u}from"minimatch";const d=l(`indexer`);var f=class l{static BINARY_EXTENSIONS=new Set(`.node,.so,.dylib,.dll,.wasm,.bin,.exe,.png,.jpg,.jpeg,.gif,.bmp,.ico,.webp,.svg,.mp3,.mp4,.wav,.avi,.mov,.flac,.zip,.gz,.tar,.bz2,.7z,.rar,.pdf,.doc,.docx,.xls,.xlsx,.ppt,.pptx,.ttf,.otf,.woff,.woff2,.eot,.pyc,.class,.o,.obj,.a,.lib`.split(`,`));async crawl(e){let t=[],n=new Set;return await this.walkDir(e.rootDir,e.rootDir,e.excludePatterns,t,n),t}async walkDir(u,f,p,m,h){let g;try{g=await n(u,{withFileTypes:!0})}catch(e){let t=e.code;(t===`EACCES`||t===`EPERM`)&&d.warn(`Permission denied, skipping directory`,{dir:u});return}for(let n of g){let d=a(u,n.name),g=o(f,d).replace(/\\/g,`/`);if(!this.isExcluded(g,p)){if(n.isDirectory()){if(n.name.startsWith(`.`)&&!(n.name===s.ai.slice(1)&&g.startsWith(s.ai)))continue;try{if((await e(d)).isSymbolicLink())continue}catch{continue}let t=d;if(h.has(t))continue;h.add(t),await this.walkDir(d,f,p,m,h)}else if(n.isFile()){let e=i(n.name).toLowerCase();if(l.BINARY_EXTENSIONS.has(e))continue;try{if((await r(d)).size>c.maxFileSizeBytes)continue;let n=await t(d,`utf-8`);if(n.includes(`\0`))continue;m.push({relativePath:g,absolutePath:d,content:n,extension:e})}catch{}}}}}isExcluded(e,t){return t.some(t=>u(e,t,{dot:!0}))}};export{f as FilesystemCrawler};
1
+ import{realpathSync as e}from"node:fs";import{lstat as t,readFile as n,readdir as r,stat as i}from"node:fs/promises";import{extname as a,join as o,relative as s}from"node:path";import{AIKIT_PATHS as c,FILE_LIMITS as l,createLogger as u}from"../../core/dist/index.js";import{minimatch as d}from"minimatch";const f=u(`indexer`);var p=class u{static BINARY_EXTENSIONS=new Set(`.node,.so,.dylib,.dll,.wasm,.bin,.exe,.png,.jpg,.jpeg,.gif,.bmp,.ico,.webp,.svg,.mp3,.mp4,.wav,.avi,.mov,.flac,.zip,.gz,.tar,.bz2,.7z,.rar,.pdf,.doc,.docx,.xls,.xlsx,.ppt,.pptx,.ttf,.otf,.woff,.woff2,.eot,.pyc,.class,.o,.obj,.a,.lib`.split(`,`));async crawl(e){let t=[],n=new Set;return await this.walkDir(e.rootDir,e.rootDir,e.excludePatterns,t,n),t}async walkDir(d,p,m,h,g){let _;try{_=await r(d,{withFileTypes:!0})}catch(e){let t=e.code;(t===`EACCES`||t===`EPERM`)&&f.warn(`Permission denied, skipping directory`,{dir:d});return}for(let r of _){let f=o(d,r.name),_=s(p,f).replace(/\\/g,`/`);if(!this.isExcluded(_,m)){if(r.isDirectory()){if(r.name.startsWith(`.`)&&!(r.name===c.ai.slice(1)&&_.startsWith(c.ai)))continue;try{if((await t(f)).isSymbolicLink())continue}catch{continue}let n;try{n=e(f)}catch{continue}if(g.has(n))continue;g.add(n),await this.walkDir(f,p,m,h,g)}else if(r.isFile()){let e=a(r.name).toLowerCase();if(u.BINARY_EXTENSIONS.has(e))continue;try{if((await i(f)).size>l.maxFileSizeBytes)continue;let t=await n(f,`utf-8`);if(t.includes(`\0`))continue;h.push({relativePath:_,absolutePath:f,content:t,extension:e})}catch{}}}}}isExcluded(e,t){return t.some(t=>d(e,t,{dot:!0}))}};export{p as FilesystemCrawler};
@@ -1 +1 @@
1
- import{resolve as e}from"node:path";import{createLogger as t}from"../../core/dist/index.js";import{existsSync as n,readFileSync as r,writeFileSync as i}from"node:fs";const a=t(`hash-cache`);var o=class{cache;filePath;dirty=!1;constructor(t){this.filePath=e(t,`file-hashes.json`),this.cache=new Map}load(){if(n(this.filePath))try{let e=r(this.filePath,`utf-8`),t=JSON.parse(e);this.cache=new Map(Object.entries(t)),a.info(`Hash cache loaded`,{entries:this.cache.size})}catch(e){a.warn(`Hash cache load failed, starting fresh`,{err:e}),this.cache=new Map}}get(e){return this.cache.get(e)}set(e,t){this.cache.set(e,t),this.dirty=!0}delete(e){this.cache.delete(e)&&(this.dirty=!0)}flush(){if(this.dirty)try{let e={};for(let[t,n]of this.cache)e[t]=n;i(this.filePath,JSON.stringify(e),`utf-8`),this.dirty=!1}catch(e){a.warn(`Hash cache flush failed`,{err:e})}}clear(){this.cache.clear(),this.dirty=!0,this.flush()}get size(){return this.cache.size}};export{o as FileHashCache};
1
+ import{existsSync as e,readFileSync as t,writeFileSync as n}from"node:fs";import{resolve as r}from"node:path";import{createLogger as i}from"../../core/dist/index.js";const a=i(`hash-cache`);var o=class{cache;filePath;dirty=!1;constructor(e){this.filePath=r(e,`file-hashes.json`),this.cache=new Map}load(){if(e(this.filePath))try{let e=t(this.filePath,`utf-8`),n=JSON.parse(e);this.cache=new Map(Object.entries(n)),a.info(`Hash cache loaded`,{entries:this.cache.size})}catch(e){a.warn(`Hash cache load failed, starting fresh`,{err:e}),this.cache=new Map}}get(e){return this.cache.get(e)}set(e,t){this.cache.set(e,t),this.dirty=!0}delete(e){this.cache.delete(e)&&(this.dirty=!0)}flush(){if(this.dirty)try{let e={};for(let[t,n]of this.cache)e[t]=n;n(this.filePath,JSON.stringify(e),`utf-8`),this.dirty=!1}catch(e){a.warn(`Hash cache flush failed`,{err:e})}}clear(){this.cache.clear(),this.dirty=!0,this.flush()}get size(){return this.cache.size}};export{o as FileHashCache};
@@ -1,38 +1,37 @@
1
- /**
2
- * DirectKBClient In-process IKBClient using store/embedder directly.
3
- * Used by the TUI and other in-process consumers.
4
- */
5
- import type { IEmbedder } from '@kb/embeddings';
6
- import type { IGraphStore, IKnowledgeStore } from '@kb/store';
7
- import type { IKBClient, KBGraphData, KBKnowledgeEntry, KBSearchResult, KBStatus } from './types.js';
1
+ import { IKBClient, KBGraphData, KBKnowledgeEntry, KBSearchResult, KBStatus } from "./types.js";
2
+ import { IEmbedder } from "../../embeddings/dist/index.js";
3
+ import { IGraphStore, IKnowledgeStore } from "../../store/dist/index.js";
4
+
5
+ //#region packages/kb-client/src/direct-client.d.ts
8
6
  interface CuratedEntry {
9
- path: string;
10
- title: string;
11
- category: string;
12
- tags: string[];
13
- content: string;
7
+ path: string;
8
+ title: string;
9
+ category: string;
10
+ tags: string[];
11
+ content: string;
14
12
  }
15
- export interface DirectClientDeps {
16
- store: IKnowledgeStore;
17
- embedder: IEmbedder;
18
- graphStore?: IGraphStore;
19
- /** Function to list curated entries */
20
- listCurated?: () => Promise<CuratedEntry[]>;
21
- /** Function to read a single curated entry */
22
- readCurated?: (path: string) => Promise<CuratedEntry | null>;
13
+ interface DirectClientDeps {
14
+ store: IKnowledgeStore;
15
+ embedder: IEmbedder;
16
+ graphStore?: IGraphStore;
17
+ /** Function to list curated entries */
18
+ listCurated?: () => Promise<CuratedEntry[]>;
19
+ /** Function to read a single curated entry */
20
+ readCurated?: (path: string) => Promise<CuratedEntry | null>;
23
21
  }
24
- export declare class DirectKBClient implements IKBClient {
25
- private readonly deps;
26
- constructor(deps: DirectClientDeps);
27
- getStatus(): Promise<KBStatus>;
28
- search(query: string, options?: {
29
- limit?: number;
30
- mode?: 'hybrid' | 'semantic' | 'keyword';
31
- }): Promise<KBSearchResult[]>;
32
- listKnowledge(): Promise<KBKnowledgeEntry[]>;
33
- readKnowledge(path: string): Promise<KBKnowledgeEntry | null>;
34
- getGraph(query?: string): Promise<KBGraphData>;
35
- getFileTree(): Promise<string[]>;
36
- private getEdgesForNodes;
22
+ declare class DirectKBClient implements IKBClient {
23
+ private readonly deps;
24
+ constructor(deps: DirectClientDeps);
25
+ getStatus(): Promise<KBStatus>;
26
+ search(query: string, options?: {
27
+ limit?: number;
28
+ mode?: 'hybrid' | 'semantic' | 'keyword';
29
+ }): Promise<KBSearchResult[]>;
30
+ listKnowledge(): Promise<KBKnowledgeEntry[]>;
31
+ readKnowledge(path: string): Promise<KBKnowledgeEntry | null>;
32
+ getGraph(query?: string): Promise<KBGraphData>;
33
+ getFileTree(): Promise<string[]>;
34
+ private getEdgesForNodes;
37
35
  }
38
- export {};
36
+ //#endregion
37
+ export { DirectClientDeps, DirectKBClient };
@@ -1,4 +1,5 @@
1
- export { type DirectClientDeps, DirectKBClient } from './direct-client.js';
2
- export { McpKBClient } from './mcp-client.js';
3
- export { extractStructured, extractText, type ParsedContent, parseToolResult, tryParseJson, } from './parsers.js';
4
- export type { IKBClient, KBGraphData, KBKnowledgeEntry, KBSearchResult, KBStatus, } from './types.js';
1
+ import { IKBClient, KBGraphData, KBKnowledgeEntry, KBSearchResult, KBStatus } from "./types.js";
2
+ import { DirectClientDeps, DirectKBClient } from "./direct-client.js";
3
+ import { McpKBClient } from "./mcp-client.js";
4
+ import { ParsedContent, extractStructured, extractText, parseToolResult, tryParseJson } from "./parsers.js";
5
+ export { type DirectClientDeps, DirectKBClient, type IKBClient, type KBGraphData, type KBKnowledgeEntry, type KBSearchResult, type KBStatus, McpKBClient, type ParsedContent, extractStructured, extractText, parseToolResult, tryParseJson };
@@ -1,19 +1,19 @@
1
- /**
2
- * McpKBClient IKBClient over MCP transport.
3
- * Used by the Dashboard connecting to KB server via HTTP.
4
- */
5
- import type { Client } from '@modelcontextprotocol/sdk/client/index.js';
6
- import type { IKBClient, KBGraphData, KBKnowledgeEntry, KBSearchResult, KBStatus } from './types.js';
7
- export declare class McpKBClient implements IKBClient {
8
- private readonly client;
9
- constructor(client: Client);
10
- getStatus(): Promise<KBStatus>;
11
- search(query: string, options?: {
12
- limit?: number;
13
- mode?: 'hybrid' | 'semantic' | 'keyword';
14
- }): Promise<KBSearchResult[]>;
15
- listKnowledge(): Promise<KBKnowledgeEntry[]>;
16
- readKnowledge(path: string): Promise<KBKnowledgeEntry | null>;
17
- getGraph(query?: string): Promise<KBGraphData>;
18
- getFileTree(): Promise<string[]>;
1
+ import { IKBClient, KBGraphData, KBKnowledgeEntry, KBSearchResult, KBStatus } from "./types.js";
2
+ import { Client } from "@modelcontextprotocol/sdk/client/index.js";
3
+
4
+ //#region packages/kb-client/src/mcp-client.d.ts
5
+ declare class McpKBClient implements IKBClient {
6
+ private readonly client;
7
+ constructor(client: Client);
8
+ getStatus(): Promise<KBStatus>;
9
+ search(query: string, options?: {
10
+ limit?: number;
11
+ mode?: 'hybrid' | 'semantic' | 'keyword';
12
+ }): Promise<KBSearchResult[]>;
13
+ listKnowledge(): Promise<KBKnowledgeEntry[]>;
14
+ readKnowledge(path: string): Promise<KBKnowledgeEntry | null>;
15
+ getGraph(query?: string): Promise<KBGraphData>;
16
+ getFileTree(): Promise<string[]>;
19
17
  }
18
+ //#endregion
19
+ export { McpKBClient };
@@ -1,32 +1,35 @@
1
+ //#region packages/kb-client/src/parsers.d.ts
1
2
  /**
2
3
  * Content parsers for MCP tool responses.
3
4
  * Used by McpKBClient to parse structuredContent from tool calls.
4
5
  */
5
- export interface ParsedContent<T = unknown> {
6
- text: string;
7
- structured?: T;
6
+ interface ParsedContent<T = unknown> {
7
+ text: string;
8
+ structured?: T;
8
9
  }
9
10
  /**
10
11
  * Extract text content from an MCP tool result.
11
12
  */
12
- export declare function extractText(result: {
13
- content?: unknown;
13
+ declare function extractText(result: {
14
+ content?: unknown;
14
15
  } | null | undefined): string;
15
16
  /**
16
17
  * Extract structured content from an MCP tool result.
17
18
  */
18
- export declare function extractStructured<T>(result: {
19
- structuredContent?: unknown;
19
+ declare function extractStructured<T>(result: {
20
+ structuredContent?: unknown;
20
21
  } | null | undefined): T | undefined;
21
22
  /**
22
23
  * Parse a tool result, returning both text and structured content.
23
24
  */
24
- export declare function parseToolResult<T = unknown>(result: {
25
- content?: unknown;
26
- structuredContent?: unknown;
25
+ declare function parseToolResult<T = unknown>(result: {
26
+ content?: unknown;
27
+ structuredContent?: unknown;
27
28
  } | null | undefined): ParsedContent<T>;
28
29
  /**
29
30
  * Try to parse JSON from a text tool result.
30
31
  * Returns undefined if parsing fails.
31
32
  */
32
- export declare function tryParseJson<T = unknown>(text: string): T | undefined;
33
+ declare function tryParseJson<T = unknown>(text: string): T | undefined;
34
+ //#endregion
35
+ export { ParsedContent, extractStructured, extractText, parseToolResult, tryParseJson };
@@ -1,3 +1,4 @@
1
+ //#region packages/kb-client/src/types.d.ts
1
2
  /**
2
3
  * IKBClient — Unified data access interface for KB consumers.
3
4
  *
@@ -5,55 +6,57 @@
5
6
  * - DirectKBClient (in-process, used by TUI)
6
7
  * - McpKBClient (over MCP transport, used by Dashboard)
7
8
  */
8
- export interface KBStatus {
9
- totalRecords: number;
10
- totalFiles: number;
11
- lastIndexedAt: string | null;
12
- onboarded: boolean;
9
+ interface KBStatus {
10
+ totalRecords: number;
11
+ totalFiles: number;
12
+ lastIndexedAt: string | null;
13
+ onboarded: boolean;
13
14
  }
14
- export interface KBSearchResult {
15
- sourcePath: string;
16
- contentType: string;
17
- score: number;
18
- content: string;
19
- headingPath?: string;
20
- startLine?: number;
21
- endLine?: number;
15
+ interface KBSearchResult {
16
+ sourcePath: string;
17
+ contentType: string;
18
+ score: number;
19
+ content: string;
20
+ headingPath?: string;
21
+ startLine?: number;
22
+ endLine?: number;
22
23
  }
23
- export interface KBKnowledgeEntry {
24
- path: string;
25
- title: string;
26
- category: string;
27
- tags: string[];
28
- content: string;
24
+ interface KBKnowledgeEntry {
25
+ path: string;
26
+ title: string;
27
+ category: string;
28
+ tags: string[];
29
+ content: string;
29
30
  }
30
- export interface KBGraphData {
31
- nodes: Array<{
32
- id: string;
33
- name: string;
34
- type: string;
35
- sourcePath?: string;
36
- }>;
37
- edges: Array<{
38
- fromId: string;
39
- toId: string;
40
- type: string;
41
- }>;
31
+ interface KBGraphData {
32
+ nodes: Array<{
33
+ id: string;
34
+ name: string;
35
+ type: string;
36
+ sourcePath?: string;
37
+ }>;
38
+ edges: Array<{
39
+ fromId: string;
40
+ toId: string;
41
+ type: string;
42
+ }>;
42
43
  }
43
- export interface IKBClient {
44
- /** Get KB status. */
45
- getStatus(): Promise<KBStatus>;
46
- /** Search the knowledge base. */
47
- search(query: string, options?: {
48
- limit?: number;
49
- mode?: 'hybrid' | 'semantic' | 'keyword';
50
- }): Promise<KBSearchResult[]>;
51
- /** List curated knowledge entries. */
52
- listKnowledge(): Promise<KBKnowledgeEntry[]>;
53
- /** Read a specific curated entry. */
54
- readKnowledge(path: string): Promise<KBKnowledgeEntry | null>;
55
- /** Get knowledge graph data. */
56
- getGraph(query?: string): Promise<KBGraphData>;
57
- /** Get file tree of indexed sources. */
58
- getFileTree(): Promise<string[]>;
44
+ interface IKBClient {
45
+ /** Get KB status. */
46
+ getStatus(): Promise<KBStatus>;
47
+ /** Search the knowledge base. */
48
+ search(query: string, options?: {
49
+ limit?: number;
50
+ mode?: 'hybrid' | 'semantic' | 'keyword';
51
+ }): Promise<KBSearchResult[]>;
52
+ /** List curated knowledge entries. */
53
+ listKnowledge(): Promise<KBKnowledgeEntry[]>;
54
+ /** Read a specific curated entry. */
55
+ readKnowledge(path: string): Promise<KBKnowledgeEntry | null>;
56
+ /** Get knowledge graph data. */
57
+ getGraph(query?: string): Promise<KBGraphData>;
58
+ /** Get file tree of indexed sources. */
59
+ getFileTree(): Promise<string[]>;
59
60
  }
61
+ //#endregion
62
+ export { IKBClient, KBGraphData, KBKnowledgeEntry, KBSearchResult, KBStatus };