@vpxa/aikit 0.1.7 → 0.1.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@vpxa/aikit",
3
- "version": "0.1.7",
3
+ "version": "0.1.8",
4
4
  "type": "module",
5
5
  "description": "Local-first AI developer toolkit — knowledge base, code analysis, context management, and developer tools for LLM agents",
6
6
  "license": "MIT",
@@ -1,3 +1,3 @@
1
- import{BackgroundTaskScheduler as e}from"./background-task.js";import{clearCompletionCache as t}from"./completions.js";import{CuratedKnowledgeManager as n}from"./curated-manager.js";import{createElicitor as r,noopElicitor as i}from"./elicitor.js";import{IdleTimer as a}from"./idle-timer.js";import{bridgeMcpLogging as o}from"./mcp-logging.js";import{MemoryMonitor as s}from"./memory-monitor.js";import{registerPrompts as c}from"./prompts.js";import{installReplayInterceptor as l}from"./replay-interceptor.js";import{ResourceNotifier as u}from"./resources/resource-notifier.js";import{registerResources as d}from"./resources/resources.js";import{createSamplingClient as f}from"./sampling.js";import{installStructuredContentGuard as p}from"./structured-content-guard.js";import{getToolMeta as m}from"./tool-metadata.js";import{installToolPrefix as h}from"./tool-prefix.js";import{ToolTimeoutError as g,getToolTimeout as _,withTimeout as v}from"./tool-timeout.js";import{registerAnalyzeDependenciesTool as y,registerAnalyzeDiagramTool as b,registerAnalyzeEntryPointsTool as x,registerAnalyzePatternsTool as S,registerAnalyzeStructureTool as ee,registerAnalyzeSymbolsTool as C,registerBlastRadiusTool as w}from"./tools/analyze.tools.js";import{registerAuditTool as T}from"./tools/audit.tool.js";import{registerBrainstormTool as E}from"./tools/brainstorm.tool.js";import{initBridgeComponents as te,registerErPullTool as D,registerErPushTool as O,registerErSyncStatusTool as k}from"./tools/bridge.tools.js";import{registerConfigTool as ne}from"./tools/config.tool.js";import{registerCompactTool as A,registerDeadSymbolsTool as j,registerFileSummaryTool as M,registerFindTool as N,registerScopeMapTool as re,registerSymbolTool as P,registerTraceTool as F}from"./tools/context.tools.js";import{registerErEvolveReviewTool as ie}from"./tools/evolution.tools.js";import{registerBatchTool as ae,registerCheckTool as oe,registerDelegateTool as se,registerEvalTool as ce,registerParseOutputTool as le,registerTestRunTool as I}from"./tools/execution.tools.js";import{registerFlowTools as ue}from"./tools/flow.tools.js";import{registerDigestTool as de,registerEvidenceMapTool as L,registerForgeClassifyTool as R,registerForgeGroundTool as fe,registerStratumCardTool as pe}from"./tools/forge.tools.js";import{registerForgetTool as me}from"./tools/forget.tool.js";import{registerGraphTool as he}from"./tools/graph.tool.js";import{registerGuideTool as z,registerHealthTool as B,registerProcessTool as V,registerWatchTool as H,registerWebFetchTool as U}from"./tools/infra.tools.js";import{registerListTool as ge}from"./tools/list.tool.js";import{registerLookupTool as _e}from"./tools/lookup.tool.js";import{registerCodemodTool as W,registerDataTransformTool as G,registerDiffParseTool as K,registerGitContextTool as q,registerRenameTool as ve}from"./tools/manipulation.tools.js";import{registerOnboardTool as ye}from"./tools/onboard.tool.js";import{registerCheckpointTool as be,registerLaneTool as xe,registerQueueTool as Se,registerStashTool as Ce,registerWorksetTool as we}from"./tools/persistence.tools.js";import{registerErUpdatePolicyTool as Te}from"./tools/policy.tools.js";import{registerPresentTool as Ee}from"./tools/present/tool.js";import"./tools/present/index.js";import{registerProduceKnowledgeTool as De}from"./tools/produce.tool.js";import{registerReadTool as Oe}from"./tools/read.tool.js";import{registerReindexTool as ke}from"./tools/reindex.tool.js";import{registerRememberTool as Ae}from"./tools/remember.tool.js";import{registerReplayTool as je}from"./tools/replay.tool.js";import{registerRestoreTool as Me}from"./tools/restore.tool.js";import{registerSearchTool as Ne}from"./tools/search.tool.js";import{getCurrentVersion as Pe}from"./version-check.js";import{registerEarlyStatusTool as Fe,registerStatusTool as Ie}from"./tools/status.tool.js";import{registerUpdateTool as Le}from"./tools/update.tool.js";import{registerChangelogTool as Re,registerEncodeTool as ze,registerEnvTool as Be,registerHttpTool as Ve,registerMeasureTool as He,registerRegexTestTool as Ue,registerSchemaValidateTool as We,registerSnippetTool as Ge,registerTimeTool as J,registerWebSearchTool as Ke}from"./tools/utility.tools.js";import{existsSync as qe,statSync as Je}from"node:fs";import{resolve as Ye}from"node:path";import{AIKIT_PATHS as Xe,createLogger as Ze,serializeError as Y}from"../../core/dist/index.js";import{initializeWasm as Qe}from"../../chunker/dist/index.js";import{OnnxEmbedder as $e}from"../../embeddings/dist/index.js";import{EvolutionCollector as et,PolicyStore as tt}from"../../enterprise-bridge/dist/index.js";import{FileHashCache as nt,IncrementalIndexer as rt}from"../../indexer/dist/index.js";import{SqliteGraphStore as it,createStore as at}from"../../store/dist/index.js";import{FileCache as ot}from"../../tools/dist/index.js";import{McpServer as st}from"@modelcontextprotocol/sdk/server/mcp.js";const X=Ze(`server`);async function Z(e){X.info(`Initializing AI Kit components`);let[t,r,i,a]=await Promise.all([(async()=>{let t=new $e({model:e.embedding.model,dimensions:e.embedding.dimensions});return await t.initialize(),X.info(`Embedder loaded`,{modelId:t.modelId,dimensions:t.dimensions}),t})(),(async()=>{let t=await at({backend:e.store.backend,path:e.store.path});return await t.initialize(),X.info(`Store initialized`),t})(),(async()=>{let t=new it({path:e.store.path});return await t.initialize(),X.info(`Graph store initialized`),t})(),(async()=>{let e=await Qe();return e?X.info(`WASM tree-sitter enabled for AST analysis`):X.warn(`WASM tree-sitter not available; analyzers will use regex fallback`),e})()]),o=new rt(t,r),s=new nt(e.store.path);s.load(),o.setHashCache(s);let c=e.curated.path,l=new n(c,r,t);o.setGraphStore(i);let u=te(e.er),d=u?new tt(e.curated.path):void 0;d&&X.info(`Policy store initialized`,{ruleCount:d.getRules().length});let f=u?new et:void 0,p=Ye(e.sources[0]?.path??process.cwd(),Xe.aiKb),m=qe(p),h=e.onboardDir?qe(e.onboardDir):!1,g=m||h,_,v=m?p:e.onboardDir;if(g&&v)try{_=Je(v).mtime.toISOString()}catch{}return X.info(`Onboard state detected`,{onboardComplete:g,onboardTimestamp:_,aiKbExists:m,onboardDirExists:h}),{embedder:t,store:r,indexer:o,curated:l,graphStore:i,fileCache:new ot,bridge:u,policyStore:d,evolutionCollector:f,onboardComplete:g,onboardTimestamp:_}}function ct(e,t){let n=new st({name:t.serverName??`aikit`,version:Pe()},{capabilities:{logging:{}}});return o(n),h(n,t.toolPrefix??``),Q(n,e,t,r(n),new u(n),f(n)),c(n,{curated:e.curated,store:e.store,graphStore:e.graphStore},t.indexMode),n}function Q(e,t,n,r,i,a,o,s){l(e),p(e),Ne(e,t.embedder,t.store,t.graphStore,t.bridge,t.evolutionCollector,a),_e(e,t.store),Ie(e,t.store,t.graphStore,t.curated,{onboardComplete:t.onboardComplete,onboardTimestamp:t.onboardTimestamp},n,o,s),ne(e,n),ke(e,t.indexer,n,t.curated,t.store,i,o),Ae(e,t.curated,t.policyStore,t.evolutionCollector,i),Le(e,t.curated,i),me(e,t.curated,i),Oe(e,t.curated),ge(e,t.curated),ee(e,t.store,t.embedder),y(e,t.store,t.embedder),C(e,t.store,t.embedder),S(e,t.store,t.embedder),x(e,t.store,t.embedder),b(e,t.store,t.embedder),w(e,t.store,t.embedder,t.graphStore),De(e,n),ye(e,t.store,t.embedder,n),he(e,t.graphStore),T(e,t.store,t.embedder);let c=n.sources[0]?.path??process.cwd();A(e,t.embedder,t.fileCache,c),re(e,t.embedder,t.store),N(e,t.embedder,t.store),le(e),we(e),oe(e),ae(e,t.embedder,t.store),P(e,t.embedder,t.store,t.graphStore),ce(e),I(e),Ce(e),q(e),K(e),ve(e),W(e),Me(e),M(e,t.fileCache,c),be(e),G(e),F(e,t.embedder,t.store,t.graphStore),V(e),H(e),j(e,t.embedder,t.store),se(e,a),B(e),xe(e),Se(e),U(e),z(e,o),L(e),de(e,t.embedder),R(e),pe(e,t.embedder,t.fileCache),fe(e,t.embedder,t.store),Ee(e,r),r&&E(e,r),Ke(e),Ve(e),Ue(e),ze(e),He(e),Re(e),We(e),Ge(e),Be(e),J(e),ue(e,n),t.bridge&&(O(e,t.bridge,t.evolutionCollector),D(e,t.bridge),k(e,t.bridge)),t.policyStore&&Te(e,t.policyStore),t.evolutionCollector&&ie(e,t.evolutionCollector),d(e,t.store,t.curated),je(e)}async function lt(e){let t=await Z(e),n=ct(t,e);X.info(`MCP server configured`,{toolCount:$.length,resourceCount:2});let r=async()=>{try{let n=e.sources.map(e=>e.path).join(`, `);X.info(`Running initial index`,{sourcePaths:n});let r=await t.indexer.index(e,e=>{e.phase===`crawling`||e.phase===`done`||(e.phase===`chunking`&&e.currentFile&&X.debug(`Indexing file`,{current:e.filesProcessed+1,total:e.filesTotal,file:e.currentFile}),e.phase===`cleanup`&&X.debug(`Index cleanup`,{staleEntries:e.filesTotal-e.filesProcessed}))});X.info(`Initial index complete`,{filesProcessed:r.filesProcessed,filesSkipped:r.filesSkipped,chunksCreated:r.chunksCreated,durationMs:r.durationMs});try{await t.store.createFtsIndex()}catch(e){X.warn(`FTS index creation failed`,Y(e))}try{let e=await t.curated.reindexAll();X.info(`Curated re-index complete`,{indexed:e.indexed})}catch(e){X.error(`Curated re-index failed`,Y(e))}}catch(e){X.error(`Initial index failed; will retry on aikit_reindex`,Y(e))}},i=async()=>{X.info(`Shutting down`),await Promise.all([t.embedder.shutdown().catch(()=>{}),t.graphStore.close().catch(()=>{}),t.store.close().catch(()=>{})]),process.exit(0)};process.on(`SIGINT`,i),process.on(`SIGTERM`,i);let a=process.ppid,o=setInterval(()=>{try{process.kill(a,0)}catch{X.info(`Parent process died; shutting down`,{parentPid:a}),clearInterval(o),i()}},5e3);return o.unref(),{server:n,runInitialIndex:r,shutdown:i}}const ut=new Set(`batch.brainstorm.changelog.check.checkpoint.codemod.compact.config.data_transform.delegate.diff_parse.digest.encode.env.eval.evidence_map.file_summary.forge_classify.git_context.graph.guide.health.http.lane.measure.onboard.parse_output.present.process.produce_knowledge.queue.read.regex_test.reindex.remember.rename.replay.restore.schema_validate.scope_map.snippet.stash.status.stratum_card.test_run.time.update.forget.list.watch.web_fetch.web_search.workset`.split(`.`)),dt=5e3,ft=new Set(`brainstorm.changelog.check.checkpoint.codemod.data_transform.delegate.diff_parse.encode.env.eval.evidence_map.forge_classify.git_context.guide.present.health.http.lane.measure.parse_output.process.produce_knowledge.queue.regex_test.rename.replay.restore.schema_validate.snippet.stash.status.test_run.time.watch.web_fetch.web_search.workset`.split(`.`));function pt(e){oe(e),ce(e),I(e),le(e),se(e),q(e),K(e),ve(e),W(e),G(e),we(e),Ce(e),be(e),Me(e),xe(e),Se(e),B(e),V(e),H(e),U(e),z(e),L(e),R(e),Ee(e),E(e,i),De(e),je(e),Fe(e),Ke(e),Ve(e),Ue(e),ze(e),He(e),Re(e),We(e),Ge(e),Be(e),J(e)}const $=`analyze_dependencies.analyze_diagram.analyze_entry_points.analyze_patterns.analyze_structure.analyze_symbols.audit.batch.blast_radius.brainstorm.changelog.check.checkpoint.codemod.compact.config.data_transform.dead_symbols.delegate.diff_parse.digest.encode.env.eval.evidence_map.file_summary.find.flow_info.flow_list.flow_reset.flow_start.flow_status.flow_step.forge_classify.forge_ground.forget.git_context.graph.guide.health.http.lane.list.lookup.measure.onboard.parse_output.present.process.produce_knowledge.queue.read.regex_test.reindex.remember.rename.replay.restore.schema_validate.scope_map.search.snippet.stash.status.stratum_card.symbol.test_run.time.trace.update.watch.web_fetch.web_search.workset`.split(`.`);function mt(n,i){let l=new st({name:n.serverName??`aikit`,version:Pe()},{capabilities:{logging:{}}}),d=`initializing`,p=``,y=!1,b=null,x=null,S=null;function ee(e){if(!e||typeof e!=`object`)return[];let t=e,n=[];for(let e of[`path`,`file`,`source_path`,`sourcePath`,`filePath`]){let r=t[e];typeof r==`string`&&r&&n.push(r)}for(let e of[`changed_files`,`paths`,`files`]){let r=t[e];if(Array.isArray(r))for(let e of r){if(typeof e==`string`){n.push(e);continue}e&&typeof e==`object`&&typeof e.path==`string`&&n.push(e.path)}}if(Array.isArray(t.sources))for(let e of t.sources)e&&typeof e==`object`&&typeof e.path==`string`&&n.push(e.path);return n}let C=()=>d===`failed`?[`❌ AI Kit initialization failed — this tool is unavailable.`,``,p?`Error: ${p}`:``,``,`**35 tools are still available** and fully functional:`,`check, eval, test_run, git_context, health, measure, web_fetch, web_search,`,`regex_test, encode, stash, checkpoint, lane, process, time, env, and more.`,``,`Try restarting the MCP server to retry initialization.`].filter(Boolean).join(`
1
+ import{BackgroundTaskScheduler as e}from"./background-task.js";import{clearCompletionCache as t}from"./completions.js";import{CuratedKnowledgeManager as n}from"./curated-manager.js";import{createElicitor as r,noopElicitor as i}from"./elicitor.js";import{IdleTimer as a}from"./idle-timer.js";import{bridgeMcpLogging as o}from"./mcp-logging.js";import{MemoryMonitor as s}from"./memory-monitor.js";import{registerPrompts as c}from"./prompts.js";import{installReplayInterceptor as l}from"./replay-interceptor.js";import{ResourceNotifier as u}from"./resources/resource-notifier.js";import{registerResources as d}from"./resources/resources.js";import{createSamplingClient as f}from"./sampling.js";import{installStructuredContentGuard as p}from"./structured-content-guard.js";import{getToolMeta as m}from"./tool-metadata.js";import{installToolPrefix as h}from"./tool-prefix.js";import{ToolTimeoutError as g,getToolTimeout as _,withTimeout as v}from"./tool-timeout.js";import{registerAnalyzeDependenciesTool as y,registerAnalyzeDiagramTool as b,registerAnalyzeEntryPointsTool as x,registerAnalyzePatternsTool as S,registerAnalyzeStructureTool as ee,registerAnalyzeSymbolsTool as C,registerBlastRadiusTool as w}from"./tools/analyze.tools.js";import{registerAuditTool as T}from"./tools/audit.tool.js";import{registerBrainstormTool as E}from"./tools/brainstorm.tool.js";import{initBridgeComponents as te,registerErPullTool as D,registerErPushTool as O,registerErSyncStatusTool as k}from"./tools/bridge.tools.js";import{registerConfigTool as ne}from"./tools/config.tool.js";import{registerCompactTool as A,registerDeadSymbolsTool as j,registerFileSummaryTool as M,registerFindTool as N,registerScopeMapTool as re,registerSymbolTool as P,registerTraceTool as F}from"./tools/context.tools.js";import{registerErEvolveReviewTool as ie}from"./tools/evolution.tools.js";import{registerBatchTool as ae,registerCheckTool as oe,registerDelegateTool as se,registerEvalTool as ce,registerParseOutputTool as le,registerTestRunTool as I}from"./tools/execution.tools.js";import{registerFlowTools as ue}from"./tools/flow.tools.js";import{registerDigestTool as de,registerEvidenceMapTool as L,registerForgeClassifyTool as R,registerForgeGroundTool as fe,registerStratumCardTool as pe}from"./tools/forge.tools.js";import{registerForgetTool as me}from"./tools/forget.tool.js";import{registerGraphTool as he}from"./tools/graph.tool.js";import{registerGuideTool as z,registerHealthTool as B,registerProcessTool as V,registerWatchTool as H,registerWebFetchTool as U}from"./tools/infra.tools.js";import{registerListTool as ge}from"./tools/list.tool.js";import{registerLookupTool as _e}from"./tools/lookup.tool.js";import{registerCodemodTool as W,registerDataTransformTool as G,registerDiffParseTool as K,registerGitContextTool as q,registerRenameTool as ve}from"./tools/manipulation.tools.js";import{registerOnboardTool as ye}from"./tools/onboard.tool.js";import{registerCheckpointTool as be,registerLaneTool as xe,registerQueueTool as Se,registerStashTool as Ce,registerWorksetTool as we}from"./tools/persistence.tools.js";import{registerErUpdatePolicyTool as Te}from"./tools/policy.tools.js";import{registerPresentTool as Ee}from"./tools/present/tool.js";import"./tools/present/index.js";import{registerProduceKnowledgeTool as De}from"./tools/produce.tool.js";import{registerReadTool as Oe}from"./tools/read.tool.js";import{registerReindexTool as ke}from"./tools/reindex.tool.js";import{registerRememberTool as Ae}from"./tools/remember.tool.js";import{registerReplayTool as je}from"./tools/replay.tool.js";import{registerRestoreTool as Me}from"./tools/restore.tool.js";import{registerSearchTool as Ne}from"./tools/search.tool.js";import{getCurrentVersion as Pe}from"./version-check.js";import{registerEarlyStatusTool as Fe,registerStatusTool as Ie}from"./tools/status.tool.js";import{registerUpdateTool as Le}from"./tools/update.tool.js";import{registerChangelogTool as Re,registerEncodeTool as ze,registerEnvTool as Be,registerHttpTool as Ve,registerMeasureTool as He,registerRegexTestTool as Ue,registerSchemaValidateTool as We,registerSnippetTool as Ge,registerTimeTool as J,registerWebSearchTool as Ke}from"./tools/utility.tools.js";import{existsSync as qe,statSync as Je}from"node:fs";import{resolve as Ye}from"node:path";import{AIKIT_PATHS as Xe,createLogger as Ze,serializeError as Y}from"../../core/dist/index.js";import{initializeWasm as Qe}from"../../chunker/dist/index.js";import{OnnxEmbedder as $e}from"../../embeddings/dist/index.js";import{EvolutionCollector as et,PolicyStore as tt}from"../../enterprise-bridge/dist/index.js";import{FileHashCache as nt,IncrementalIndexer as rt}from"../../indexer/dist/index.js";import{SqliteGraphStore as it,createStore as at}from"../../store/dist/index.js";import{FileCache as ot}from"../../tools/dist/index.js";import{completable as st}from"@modelcontextprotocol/sdk/server/completable.js";import{McpServer as ct}from"@modelcontextprotocol/sdk/server/mcp.js";import{z as lt}from"zod";const X=Ze(`server`);async function Z(e){X.info(`Initializing AI Kit components`);let[t,r,i,a]=await Promise.all([(async()=>{let t=new $e({model:e.embedding.model,dimensions:e.embedding.dimensions});return await t.initialize(),X.info(`Embedder loaded`,{modelId:t.modelId,dimensions:t.dimensions}),t})(),(async()=>{let t=await at({backend:e.store.backend,path:e.store.path});return await t.initialize(),X.info(`Store initialized`),t})(),(async()=>{let t=new it({path:e.store.path});return await t.initialize(),X.info(`Graph store initialized`),t})(),(async()=>{let e=await Qe();return e?X.info(`WASM tree-sitter enabled for AST analysis`):X.warn(`WASM tree-sitter not available; analyzers will use regex fallback`),e})()]),o=new rt(t,r),s=new nt(e.store.path);s.load(),o.setHashCache(s);let c=e.curated.path,l=new n(c,r,t);o.setGraphStore(i);let u=te(e.er),d=u?new tt(e.curated.path):void 0;d&&X.info(`Policy store initialized`,{ruleCount:d.getRules().length});let f=u?new et:void 0,p=Ye(e.sources[0]?.path??process.cwd(),Xe.aiKb),m=qe(p),h=e.onboardDir?qe(e.onboardDir):!1,g=m||h,_,v=m?p:e.onboardDir;if(g&&v)try{_=Je(v).mtime.toISOString()}catch{}return X.info(`Onboard state detected`,{onboardComplete:g,onboardTimestamp:_,aiKbExists:m,onboardDirExists:h}),{embedder:t,store:r,indexer:o,curated:l,graphStore:i,fileCache:new ot,bridge:u,policyStore:d,evolutionCollector:f,onboardComplete:g,onboardTimestamp:_}}function ut(e,t){let n=new ct({name:t.serverName??`aikit`,version:Pe()},{capabilities:{logging:{}}});return o(n),h(n,t.toolPrefix??``),Q(n,e,t,r(n),new u(n),f(n)),c(n,{curated:e.curated,store:e.store,graphStore:e.graphStore},t.indexMode),n}function Q(e,t,n,r,i,a,o,s){l(e),p(e),Ne(e,t.embedder,t.store,t.graphStore,t.bridge,t.evolutionCollector,a),_e(e,t.store),Ie(e,t.store,t.graphStore,t.curated,{onboardComplete:t.onboardComplete,onboardTimestamp:t.onboardTimestamp},n,o,s),ne(e,n),ke(e,t.indexer,n,t.curated,t.store,i,o),Ae(e,t.curated,t.policyStore,t.evolutionCollector,i),Le(e,t.curated,i),me(e,t.curated,i),Oe(e,t.curated),ge(e,t.curated),ee(e,t.store,t.embedder),y(e,t.store,t.embedder),C(e,t.store,t.embedder),S(e,t.store,t.embedder),x(e,t.store,t.embedder),b(e,t.store,t.embedder),w(e,t.store,t.embedder,t.graphStore),De(e,n),ye(e,t.store,t.embedder,n),he(e,t.graphStore),T(e,t.store,t.embedder);let c=n.sources[0]?.path??process.cwd();A(e,t.embedder,t.fileCache,c),re(e,t.embedder,t.store),N(e,t.embedder,t.store),le(e),we(e),oe(e),ae(e,t.embedder,t.store),P(e,t.embedder,t.store,t.graphStore),ce(e),I(e),Ce(e),q(e),K(e),ve(e),W(e),Me(e),M(e,t.fileCache,c),be(e),G(e),F(e,t.embedder,t.store,t.graphStore),V(e),H(e),j(e,t.embedder,t.store),se(e,a),B(e),xe(e),Se(e),U(e),z(e,o),L(e),de(e,t.embedder),R(e),pe(e,t.embedder,t.fileCache),fe(e,t.embedder,t.store),Ee(e,r),r&&E(e,r),Ke(e),Ve(e),Ue(e),ze(e),He(e),Re(e),We(e),Ge(e),Be(e),J(e),ue(e,n),t.bridge&&(O(e,t.bridge,t.evolutionCollector),D(e,t.bridge),k(e,t.bridge)),t.policyStore&&Te(e,t.policyStore),t.evolutionCollector&&ie(e,t.evolutionCollector),d(e,t.store,t.curated),je(e)}async function dt(e){let t=await Z(e),n=ut(t,e);X.info(`MCP server configured`,{toolCount:$.length,resourceCount:2});let r=async()=>{try{let n=e.sources.map(e=>e.path).join(`, `);X.info(`Running initial index`,{sourcePaths:n});let r=await t.indexer.index(e,e=>{e.phase===`crawling`||e.phase===`done`||(e.phase===`chunking`&&e.currentFile&&X.debug(`Indexing file`,{current:e.filesProcessed+1,total:e.filesTotal,file:e.currentFile}),e.phase===`cleanup`&&X.debug(`Index cleanup`,{staleEntries:e.filesTotal-e.filesProcessed}))});X.info(`Initial index complete`,{filesProcessed:r.filesProcessed,filesSkipped:r.filesSkipped,chunksCreated:r.chunksCreated,durationMs:r.durationMs});try{await t.store.createFtsIndex()}catch(e){X.warn(`FTS index creation failed`,Y(e))}try{let e=await t.curated.reindexAll();X.info(`Curated re-index complete`,{indexed:e.indexed})}catch(e){X.error(`Curated re-index failed`,Y(e))}}catch(e){X.error(`Initial index failed; will retry on aikit_reindex`,Y(e))}},i=async()=>{X.info(`Shutting down`),await Promise.all([t.embedder.shutdown().catch(()=>{}),t.graphStore.close().catch(()=>{}),t.store.close().catch(()=>{})]),process.exit(0)};process.on(`SIGINT`,i),process.on(`SIGTERM`,i);let a=process.ppid,o=setInterval(()=>{try{process.kill(a,0)}catch{X.info(`Parent process died; shutting down`,{parentPid:a}),clearInterval(o),i()}},5e3);return o.unref(),{server:n,runInitialIndex:r,shutdown:i}}const ft=new Set(`batch.brainstorm.changelog.check.checkpoint.codemod.compact.config.data_transform.delegate.diff_parse.digest.encode.env.eval.evidence_map.file_summary.forge_classify.git_context.graph.guide.health.http.lane.measure.onboard.parse_output.present.process.produce_knowledge.queue.read.regex_test.reindex.remember.rename.replay.restore.schema_validate.scope_map.snippet.stash.status.stratum_card.test_run.time.update.forget.list.watch.web_fetch.web_search.workset`.split(`.`)),pt=5e3,mt=new Set(`brainstorm.changelog.check.checkpoint.codemod.data_transform.delegate.diff_parse.encode.env.eval.evidence_map.forge_classify.git_context.guide.present.health.http.lane.measure.parse_output.process.produce_knowledge.queue.regex_test.rename.replay.restore.schema_validate.snippet.stash.status.test_run.time.watch.web_fetch.web_search.workset`.split(`.`));function ht(e){oe(e),ce(e),I(e),le(e),se(e),q(e),K(e),ve(e),W(e),G(e),we(e),Ce(e),be(e),Me(e),xe(e),Se(e),B(e),V(e),H(e),U(e),z(e),L(e),R(e),Ee(e),E(e,i),De(e),je(e),Fe(e),Ke(e),Ve(e),Ue(e),ze(e),He(e),Re(e),We(e),Ge(e),Be(e),J(e)}const $=`analyze_dependencies.analyze_diagram.analyze_entry_points.analyze_patterns.analyze_structure.analyze_symbols.audit.batch.blast_radius.brainstorm.changelog.check.checkpoint.codemod.compact.config.data_transform.dead_symbols.delegate.diff_parse.digest.encode.env.eval.evidence_map.file_summary.find.flow_info.flow_list.flow_reset.flow_start.flow_status.flow_step.forge_classify.forge_ground.forget.git_context.graph.guide.health.http.lane.list.lookup.measure.onboard.parse_output.present.process.produce_knowledge.queue.read.regex_test.reindex.remember.rename.replay.restore.schema_validate.scope_map.search.snippet.stash.status.stratum_card.symbol.test_run.time.trace.update.watch.web_fetch.web_search.workset`.split(`.`);function gt(n,i){let l=new ct({name:n.serverName??`aikit`,version:Pe()},{capabilities:{logging:{}}}),d=`initializing`,p=``,y=!1,b=null,x=null,S=null;function ee(e){if(!e||typeof e!=`object`)return[];let t=e,n=[];for(let e of[`path`,`file`,`source_path`,`sourcePath`,`filePath`]){let r=t[e];typeof r==`string`&&r&&n.push(r)}for(let e of[`changed_files`,`paths`,`files`]){let r=t[e];if(Array.isArray(r))for(let e of r){if(typeof e==`string`){n.push(e);continue}e&&typeof e==`object`&&typeof e.path==`string`&&n.push(e.path)}}if(Array.isArray(t.sources))for(let e of t.sources)e&&typeof e==`object`&&typeof e.path==`string`&&n.push(e.path);return n}let C=()=>d===`failed`?[`❌ AI Kit initialization failed — this tool is unavailable.`,``,p?`Error: ${p}`:``,``,`**35 tools are still available** and fully functional:`,`check, eval, test_run, git_context, health, measure, web_fetch, web_search,`,`regex_test, encode, stash, checkpoint, lane, process, time, env, and more.`,``,`Try restarting the MCP server to retry initialization.`].filter(Boolean).join(`
2
2
  `):[`AI Kit is still initializing (loading embeddings model & store).`,``,`**35 tools are already available** while initialization completes — including:`,`check, eval, test_run, git_context, health, measure, web_fetch, web_search,`,`regex_test, encode, stash, checkpoint, lane, process, time, env, and more.`,``,`This tool requires the AI Kit index. Please retry in a few seconds,`,`or use one of the available tools above in the meantime.`].join(`
3
- `);o(l),h(l,n.toolPrefix??``);let w=l.sendToolListChanged.bind(l);l.sendToolListChanged=()=>{};let T=[];for(let e of $){let t=m(e),n=l.registerTool(e,{title:t.title,description:`${t.title} — initializing, available shortly`,inputSchema:{},annotations:t.annotations},async()=>({content:[{type:`text`,text:C()}]}));ft.has(e)?n.remove():T.push(n)}pt(l),l.sendToolListChanged=w;let E=l.registerResource(`aikit-status`,`aikit://status`,{description:`AI Kit status (initializing...)`,mimeType:`text/plain`},async()=>({contents:[{uri:`aikit://status`,text:`AI Kit is initializing...`,mimeType:`text/plain`}]})),te=l.registerPrompt(`_init`,{description:`AI Kit is initializing prompts...`},async()=>({messages:[{role:`user`,content:{type:`text`,text:C()}}]})),D,O=new Promise(e=>{D=e}),k,ne=new Promise(e=>{k=e}),A=()=>k?.(),j=(async()=>{await ne;let e;try{e=await Z(n)}catch(e){d=`failed`,p=e instanceof Error?e.message:String(e),X.error(`AI Kit initialization failed — server continuing with zero-dep tools only`,{error:p});return}let o=l.sendToolListChanged.bind(l);l.sendToolListChanged=()=>{};let m=l.sendPromptListChanged.bind(l);l.sendPromptListChanged=()=>{};let h=l.sendResourceListChanged.bind(l);l.sendResourceListChanged=()=>{};for(let e of T)e.remove();E.remove(),te.remove();let C=l._registeredTools??{};for(let e of ft)C[e]?.remove();let w=new u(l),O=f(l);Q(l,e,n,r(l),w,O,i,i===`smart`?(()=>{let e=S;return e?.getState?e.getState():null}):null),c(l,{curated:e.curated,store:e.store,graphStore:e.graphStore},i),l.sendToolListChanged=o,l.sendPromptListChanged=m,l.sendResourceListChanged=h,Promise.resolve(l.sendToolListChanged()).catch(()=>{}),Promise.resolve(l.sendPromptListChanged()).catch(()=>{}),Promise.resolve(l.sendResourceListChanged()).catch(()=>{});let k=l._registeredTools??{};for(let[t,n]of Object.entries(k)){if(ut.has(t))continue;let r=n.handler;n.handler=async(...n)=>{if(!e.indexer.isIndexing)return r(...n);let i=y?`re-indexing`:`running initial index`,a=new Promise(e=>setTimeout(()=>e({content:[{type:`text`,text:`⏳ AI Kit is ${i}. The tool "${t}" timed out waiting for index data (${dt/1e3}s).\n\nThe existing index may be temporarily locked. Please retry shortly — indexing will complete automatically.`}]}),dt));return Promise.race([r(...n),a])}}for(let[e,t]of Object.entries(k)){let n=t.handler,r=_(e);t.handler=async(...t)=>{try{return await v(()=>n(...t),r,e)}catch(t){if(t instanceof g)return{content:[{type:`text`,text:`⏳ Tool "${e}" timed out after ${r/1e3}s. This may indicate a long-running operation. Please retry or break the task into smaller steps.`}]};throw t}}}let A=Object.keys(k).length;A<$.length&&X.warn(`ALL_TOOL_NAMES count mismatch`,{expectedToolCount:$.length,registeredToolCount:A}),X.info(`MCP server configured`,{toolCount:$.length,resourceCount:4});let j=new s;j.onPressure((e,n)=>{e===`warning`&&t(),e===`critical`&&(X.warn(`Memory pressure critical — consider restarting`,{rssMB:Math.round(n/1024/1024)}),t())}),j.start();let M=new a;x=M,M.onIdle(async()=>{if(N.isRunning||e.indexer.isIndexing){X.info(`Idle cleanup deferred — background tasks still running`),M.touch();return}X.info(`Idle cleanup: closing store and graph connections`);try{await Promise.all([e.store.close().catch(()=>{}),e.graphStore.close().catch(()=>{})])}catch{}}),M.touch();for(let e of Object.values(k)){let t=e.handler;e.handler=async(...e)=>{if(M.touch(),S){let t=ee(e[0]);t.length>0&&S.prioritize(...t)}return t(...e)}}b=e,D?.(e)})(),M=async()=>{let e=await O;x?.setBusy(!0);try{let t=n.sources.map(e=>e.path).join(`, `);X.info(`Running initial index`,{sourcePaths:t});let r=await e.indexer.index(n,e=>{e.phase===`crawling`||e.phase===`done`||(e.phase===`chunking`&&e.currentFile&&X.debug(`Indexing file`,{current:e.filesProcessed+1,total:e.filesTotal,file:e.currentFile}),e.phase===`cleanup`&&X.debug(`Index cleanup`,{staleEntries:e.filesTotal-e.filesProcessed}))});y=!0,X.info(`Initial index complete`,{filesProcessed:r.filesProcessed,filesSkipped:r.filesSkipped,chunksCreated:r.chunksCreated,durationMs:r.durationMs});try{await e.store.createFtsIndex()}catch(e){X.warn(`FTS index creation failed`,Y(e))}try{let t=await e.curated.reindexAll();X.info(`Curated re-index complete`,{indexed:t.indexed})}catch(e){X.error(`Curated re-index failed`,Y(e))}}catch(e){X.error(`Initial index failed; will retry on aikit_reindex`,Y(e))}finally{x?.setBusy(!1)}},N=new e,re=()=>N.schedule({name:`initial-index`,fn:M}),P=process.ppid,F=setInterval(()=>{try{process.kill(P,0)}catch{X.info(`Parent process died; shutting down`,{parentPid:P}),clearInterval(F),O.then(async e=>{await Promise.all([e.embedder.shutdown().catch(()=>{}),e.graphStore.close().catch(()=>{}),e.store.close().catch(()=>{})])}).catch(()=>{}).finally(()=>process.exit(0))}},5e3);return F.unref(),{server:l,startInit:A,ready:j,runInitialIndex:re,get kb(){return b},scheduler:N,setSmartScheduler(e){S=e}}}export{$ as ALL_TOOL_NAMES,mt as createLazyServer,ct as createMcpServer,lt as createServer,Z as initializeKnowledgeBase,Q as registerMcpTools};
3
+ `);o(l),h(l,n.toolPrefix??``);let w=l.sendToolListChanged.bind(l);l.sendToolListChanged=()=>{};let T=[];for(let e of $){let t=m(e),n=l.registerTool(e,{title:t.title,description:`${t.title} — initializing, available shortly`,inputSchema:{},annotations:t.annotations},async()=>({content:[{type:`text`,text:C()}]}));mt.has(e)?n.remove():T.push(n)}ht(l),l.sendToolListChanged=w;let E=l.registerResource(`aikit-status`,`aikit://status`,{description:`AI Kit status (initializing...)`,mimeType:`text/plain`},async()=>({contents:[{uri:`aikit://status`,text:`AI Kit is initializing...`,mimeType:`text/plain`}]})),te=l.registerPrompt(`_init`,{description:`AI Kit is initializing prompts...`,argsSchema:{_prime:st(lt.string().optional(),()=>[])}},async()=>({messages:[{role:`user`,content:{type:`text`,text:C()}}]})),D,O=new Promise(e=>{D=e}),k,ne=new Promise(e=>{k=e}),A=()=>k?.(),j=(async()=>{await ne;let e;try{e=await Z(n)}catch(e){d=`failed`,p=e instanceof Error?e.message:String(e),X.error(`AI Kit initialization failed — server continuing with zero-dep tools only`,{error:p});return}let o=l.sendToolListChanged.bind(l);l.sendToolListChanged=()=>{};let m=l.sendPromptListChanged.bind(l);l.sendPromptListChanged=()=>{};let h=l.sendResourceListChanged.bind(l);l.sendResourceListChanged=()=>{};for(let e of T)e.remove();E.remove(),te.remove();let C=l._registeredTools??{};for(let e of mt)C[e]?.remove();let w=new u(l),O=f(l);Q(l,e,n,r(l),w,O,i,i===`smart`?(()=>{let e=S;return e?.getState?e.getState():null}):null),c(l,{curated:e.curated,store:e.store,graphStore:e.graphStore},i),l.sendToolListChanged=o,l.sendPromptListChanged=m,l.sendResourceListChanged=h,Promise.resolve(l.sendToolListChanged()).catch(()=>{}),Promise.resolve(l.sendPromptListChanged()).catch(()=>{}),Promise.resolve(l.sendResourceListChanged()).catch(()=>{});let k=l._registeredTools??{};for(let[t,n]of Object.entries(k)){if(ft.has(t))continue;let r=n.handler;n.handler=async(...n)=>{if(!e.indexer.isIndexing)return r(...n);let i=y?`re-indexing`:`running initial index`,a=new Promise(e=>setTimeout(()=>e({content:[{type:`text`,text:`⏳ AI Kit is ${i}. The tool "${t}" timed out waiting for index data (${pt/1e3}s).\n\nThe existing index may be temporarily locked. Please retry shortly — indexing will complete automatically.`}]}),pt));return Promise.race([r(...n),a])}}for(let[e,t]of Object.entries(k)){let n=t.handler,r=_(e);t.handler=async(...t)=>{try{return await v(()=>n(...t),r,e)}catch(t){if(t instanceof g)return{content:[{type:`text`,text:`⏳ Tool "${e}" timed out after ${r/1e3}s. This may indicate a long-running operation. Please retry or break the task into smaller steps.`}]};throw t}}}let A=Object.keys(k).length;A<$.length&&X.warn(`ALL_TOOL_NAMES count mismatch`,{expectedToolCount:$.length,registeredToolCount:A}),X.info(`MCP server configured`,{toolCount:$.length,resourceCount:4});let j=new s;j.onPressure((e,n)=>{e===`warning`&&t(),e===`critical`&&(X.warn(`Memory pressure critical — consider restarting`,{rssMB:Math.round(n/1024/1024)}),t())}),j.start();let M=new a;x=M,M.onIdle(async()=>{if(N.isRunning||e.indexer.isIndexing){X.info(`Idle cleanup deferred — background tasks still running`),M.touch();return}X.info(`Idle cleanup: closing store and graph connections`);try{await Promise.all([e.store.close().catch(()=>{}),e.graphStore.close().catch(()=>{})])}catch{}}),M.touch();for(let e of Object.values(k)){let t=e.handler;e.handler=async(...e)=>{if(M.touch(),S){let t=ee(e[0]);t.length>0&&S.prioritize(...t)}return t(...e)}}b=e,D?.(e)})(),M=async()=>{let e=await O;x?.setBusy(!0);try{let t=n.sources.map(e=>e.path).join(`, `);X.info(`Running initial index`,{sourcePaths:t});let r=await e.indexer.index(n,e=>{e.phase===`crawling`||e.phase===`done`||(e.phase===`chunking`&&e.currentFile&&X.debug(`Indexing file`,{current:e.filesProcessed+1,total:e.filesTotal,file:e.currentFile}),e.phase===`cleanup`&&X.debug(`Index cleanup`,{staleEntries:e.filesTotal-e.filesProcessed}))});y=!0,X.info(`Initial index complete`,{filesProcessed:r.filesProcessed,filesSkipped:r.filesSkipped,chunksCreated:r.chunksCreated,durationMs:r.durationMs});try{await e.store.createFtsIndex()}catch(e){X.warn(`FTS index creation failed`,Y(e))}try{let t=await e.curated.reindexAll();X.info(`Curated re-index complete`,{indexed:t.indexed})}catch(e){X.error(`Curated re-index failed`,Y(e))}}catch(e){X.error(`Initial index failed; will retry on aikit_reindex`,Y(e))}finally{x?.setBusy(!1)}},N=new e,re=()=>N.schedule({name:`initial-index`,fn:M}),P=process.ppid,F=setInterval(()=>{try{process.kill(P,0)}catch{X.info(`Parent process died; shutting down`,{parentPid:P}),clearInterval(F),O.then(async e=>{await Promise.all([e.embedder.shutdown().catch(()=>{}),e.graphStore.close().catch(()=>{}),e.store.close().catch(()=>{})])}).catch(()=>{}).finally(()=>process.exit(0))}},5e3);return F.unref(),{server:l,startInit:A,ready:j,runInitialIndex:re,get kb(){return b},scheduler:N,setSmartScheduler(e){S=e}}}export{$ as ALL_TOOL_NAMES,gt as createLazyServer,ut as createMcpServer,dt as createServer,Z as initializeKnowledgeBase,Q as registerMcpTools};
@@ -1,8 +1,8 @@
1
- import{getToolMeta as e}from"../tool-metadata.js";import{AnalyzeStructureOutputSchema as t,BlastRadiusOutputSchema as n}from"../output-schemas.js";import{createHash as r}from"node:crypto";import{z as i}from"zod";import{createLogger as a,serializeError as o}from"../../../core/dist/index.js";import{BlastRadiusAnalyzer as s,DependencyAnalyzer as c,DiagramGenerator as l,EntryPointAnalyzer as u,PatternAnalyzer as d,StructureAnalyzer as f,SymbolAnalyzer as p}from"../../../analyzers/dist/index.js";import{WasmRuntime as m}from"../../../chunker/dist/index.js";import{gitContext as h,truncateToTokenBudget as g}from"../../../tools/dist/index.js";const _=a(`tools`),v=i.number().min(100).max(5e4).optional().describe(`Maximum token budget for the response. When set, output is truncated to fit.`);function y(e,t){return t?g(e,t):e}function b(){let e=[];return m.get()||e.push(`Tree-sitter unavailable — using regex fallback, symbol/pattern confidence reduced`),e.length===0?``:`\n\n> **⚠ Caveats:** ${e.join(`; `)}`}function x(e){return(e??[]).map(e=>{if(typeof e==`string`)return e;if(e&&typeof e==`object`&&`path`in e)return typeof e.path==`string`?e.path:void 0}).filter(e=>!!e)}function S(e){let t=[],n=e.filter(e=>/\.(ts|tsx|js|jsx)$/.test(e)&&/(service|store|model|schema|migration)/i.test(e)),r=e.filter(e=>/\.(ts|tsx|js|jsx)$/.test(e)&&!n.includes(e)),i=e.filter(e=>!/\.(ts|tsx|js|jsx)$/.test(e));return(n.length>0||r.length>0||i.length>0)&&(t.push(`
1
+ import{getToolMeta as e}from"../tool-metadata.js";import{AnalyzeStructureOutputSchema as t,BlastRadiusOutputSchema as n}from"../output-schemas.js";import{z as r}from"zod";import{createHash as i}from"node:crypto";import{createLogger as a,serializeError as o}from"../../../core/dist/index.js";import{BlastRadiusAnalyzer as s,DependencyAnalyzer as c,DiagramGenerator as l,EntryPointAnalyzer as u,PatternAnalyzer as d,StructureAnalyzer as f,SymbolAnalyzer as p}from"../../../analyzers/dist/index.js";import{WasmRuntime as m}from"../../../chunker/dist/index.js";import{gitContext as h,truncateToTokenBudget as g}from"../../../tools/dist/index.js";const _=a(`tools`),v=r.number().min(100).max(5e4).optional().describe(`Maximum token budget for the response. When set, output is truncated to fit.`);function y(e,t){return t?g(e,t):e}function b(){let e=[];return m.get()||e.push(`Tree-sitter unavailable — using regex fallback, symbol/pattern confidence reduced`),e.length===0?``:`\n\n> **⚠ Caveats:** ${e.join(`; `)}`}function x(e){return(e??[]).map(e=>{if(typeof e==`string`)return e;if(e&&typeof e==`object`&&`path`in e)return typeof e.path==`string`?e.path:void 0}).filter(e=>!!e)}function S(e){let t=[],n=e.filter(e=>/\.(ts|tsx|js|jsx)$/.test(e)&&/(service|store|model|schema|migration)/i.test(e)),r=e.filter(e=>/\.(ts|tsx|js|jsx)$/.test(e)&&!n.includes(e)),i=e.filter(e=>!/\.(ts|tsx|js|jsx)$/.test(e));return(n.length>0||r.length>0||i.length>0)&&(t.push(`
2
2
 
3
3
  ### Risk Assessment`),n.length>0&&t.push(`- 🔴 **High risk** (${n.length}): ${n.slice(0,5).map(e=>`\`${e}\``).join(`, `)}`),r.length>0&&t.push(`- 🟡 **Medium risk** (${r.length}): source files`),i.length>0&&t.push(`- 🟢 **Low risk** (${i.length}): non-source files`)),t.join(`
4
4
  `)}function C(e){let t=e.replace(/\\/g,`/`);return/(^|\/)__tests__\/|\.(test|spec)\.[jt]sx?$/i.test(t)?`Tests`:/(controller|route|handler|api)/i.test(t)?`API`:/(service|store|model|schema|migration|repo|repository|db|database)/i.test(t)?`Core/Data`:/\.(ts|tsx|js|jsx)$/.test(t)?`Source`:`Config/Docs`}function w(e){if(e.length===0)return``;let t=[`Core/Data`,`API`,`Source`,`Tests`,`Config/Docs`],n=new Map;for(let t of e){let e=C(t),r=n.get(e)??[];r.push(t),n.set(e,r)}let r=[`
5
5
 
6
6
  ### Layer Classification`];for(let e of t){let t=n.get(e);if(!t?.length)continue;let i=t.slice(0,5).map(e=>`\`${e}\``).join(`, `),a=t.length>5?`, ... and ${t.length-5} more`:``;r.push(`- **${e}** (${t.length}): ${i}${a}`)}return r.join(`
7
- `)}async function T(e,t,n,i,a){try{let o=`produced/analysis/${n}/${r(`sha256`).update(i).digest(`hex`).slice(0,12)}.md`,s=r(`sha256`).update(a).digest(`hex`).slice(0,16),c=new Date().toISOString(),l=a.length>2e3?a.split(/(?=^## )/m).filter(e=>e.trim().length>0):[a],u=l.map((e,t)=>({id:r(`sha256`).update(`${o}::${t}`).digest(`hex`).slice(0,16),content:e.trim(),sourcePath:o,contentType:`produced-knowledge`,chunkIndex:t,totalChunks:l.length,startLine:0,endLine:0,fileHash:s,indexedAt:c,origin:`produced`,tags:[`analysis`,n],category:`analysis`,version:1})),d=await t.embedBatch(u.map(e=>e.content));await e.upsert(u,d),_.info(`Auto-persisted analysis`,{analyzerName:n,chunkCount:u.length})}catch(e){_.warn(`Auto-persist analysis failed`,{analyzerName:n,...o(e)})}}function E(n,r,a){let s=new f,c=e(`analyze_structure`);n.registerTool(`analyze_structure`,{title:c.title,description:`Analyze the file/directory structure of a codebase. Returns an annotated tree with language stats.`,outputSchema:t,inputSchema:{path:i.string().describe(`Root path to analyze`),max_depth:i.number().min(1).max(10).default(6).describe(`Maximum directory depth`),format:i.enum([`json`,`markdown`]).default(`markdown`).describe(`Output format`),max_tokens:v},annotations:c.annotations},async({path:e,max_depth:t,format:n,max_tokens:i})=>{try{let o=await s.analyze(e,{format:n,maxDepth:t,maxTokens:i});T(r,a,`structure`,e,o.output);let c=o.data;return{content:[{type:`text`,text:y(o.output+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_dependencies` for import graphs, or `analyze_patterns` to detect architecture patterns._",i)}],structuredContent:{files:c.fileCount??o.meta.fileCount,packages:c.packageCount??0,languages:c.languages??{},tree:o.output.slice(0,2e3)}}}catch(e){return _.error(`Analysis failed`,o(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function D(t,n,r){let a=new c,s=e(`analyze_dependencies`);t.registerTool(`analyze_dependencies`,{title:s.title,description:`Analyze import/require dependencies across a codebase. Shows external packages and internal module graph.`,inputSchema:{path:i.string().describe(`Root path to analyze`),format:i.enum([`json`,`markdown`,`mermaid`]).default(`markdown`).describe(`Output format`),max_tokens:v},annotations:s.annotations},async({path:e,format:t,max_tokens:i})=>{try{let o=await a.analyze(e,{format:t});return T(n,r,`dependencies`,e,o.output),{content:[{type:`text`,text:y(o.output+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_symbols` to explore exported symbols, or `analyze_diagram` for visual representation._",i)}]}}catch(e){return _.error(`Analysis failed`,o(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function O(t,n,r){let a=new p,s=e(`analyze_symbols`);t.registerTool(`analyze_symbols`,{title:s.title,description:`Extract exported and local symbols (functions, classes, interfaces, types, constants) from a codebase.`,inputSchema:{path:i.string().describe(`Root path to analyze`),filter:i.string().optional().describe(`Filter symbols by name substring`),format:i.enum([`json`,`markdown`]).default(`markdown`).describe(`Output format`)},annotations:s.annotations},async({path:e,filter:t,format:i})=>{try{let o=await a.analyze(e,{format:i,filter:t});return T(n,r,`symbols`,e,o.output),{content:[{type:`text`,text:o.output+b()+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_dependencies` to see import relationships, or `search` to find usage patterns._"}]}}catch(e){return _.error(`Analysis failed`,o(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function k(t,n,r){let a=new d,s=e(`analyze_patterns`);t.registerTool(`analyze_patterns`,{title:s.title,description:`Detect architectural patterns, frameworks, and conventions in a codebase using directory structure and code heuristics.`,inputSchema:{path:i.string().describe(`Root path to analyze`)},annotations:s.annotations},async({path:e})=>{try{let t=await a.analyze(e);return T(n,r,`patterns`,e,t.output),{content:[{type:`text`,text:t.output+b()+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_entry_points` to find Lambda handlers and main exports, or `produce_knowledge` for full analysis._"}]}}catch(e){return _.error(`Analysis failed`,o(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function A(t,n,r){let a=new u,s=e(`analyze_entry_points`);t.registerTool(`analyze_entry_points`,{title:s.title,description:`Find entry points: Lambda handlers, main exports, CLI bins, and server start scripts.`,inputSchema:{path:i.string().describe(`Root path to analyze`)},annotations:s.annotations},async({path:e})=>{try{let t=await a.analyze(e);return T(n,r,`entry-points`,e,t.output),{content:[{type:`text`,text:t.output+b()+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_dependencies` to see what each entry point imports, or `produce_knowledge` for comprehensive analysis._"}]}}catch(e){return _.error(`Analysis failed`,o(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function j(t,n,r){let a=new l,s=e(`analyze_diagram`);t.registerTool(`analyze_diagram`,{title:s.title,description:`Generate a Mermaid diagram of the codebase architecture or dependency graph. Returns Mermaid flowchart syntax — paste into any Markdown renderer or Mermaid editor to visualize.`,inputSchema:{path:i.string().describe(`Root path to analyze`),diagram_type:i.enum([`architecture`,`dependencies`]).default(`architecture`).describe(`Type of diagram`)},annotations:s.annotations},async({path:e,diagram_type:t})=>{try{let i=await a.analyze(e,{diagramType:t});return T(n,r,`diagram`,e,i.output),{content:[{type:`text`,text:i.output+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_structure` for detailed file tree, or `produce_knowledge` for comprehensive analysis._"}]}}catch(e){return _.error(`Diagram generation failed`,o(e)),{content:[{type:`text`,text:`Diagram generation failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function M(t,r,a,c){let l=new s,u=e(`blast_radius`);t.registerTool(`blast_radius`,{title:u.title,description:`Given a list of changed files, trace the dependency graph to find all affected files (direct + transitive importers) and their tests. Useful for scoping code reviews and impact analysis.`,outputSchema:n,inputSchema:{path:i.string().describe(`Root path of the codebase`),files:i.array(i.string()).optional().describe(`Changed file paths (relative to root). If omitted, auto-detects from git status.`),max_depth:i.number().min(1).max(20).default(5).describe(`Maximum transitive dependency depth`),format:i.enum([`json`,`markdown`]).default(`markdown`).describe(`Output format`),max_tokens:v},annotations:u.annotations},async({path:e,files:t,max_depth:n,format:i,max_tokens:s})=>{try{let o=t??[];if(o.length===0)try{let t=await h({cwd:e,includeDiff:!1});if(o=Array.from(new Set([...x(t.status?.staged),...x(t.status?.modified),...x(t.status?.untracked)])),o.length===0)return{content:[{type:`text`,text:"No changed files detected from git status. Provide `files` explicitly or make changes first."}]}}catch{return{content:[{type:`text`,text:"Could not detect changed files from git. Provide `files` explicitly."}],isError:!0}}let u=await l.analyze(e,{files:o,maxDepth:n,format:i}),d=``;if(c)try{let e=new Set;for(let t of o){let n=await c.findNodes({sourcePath:t,limit:10});for(let t of n){let n=await c.getNeighbors(t.id,{direction:`incoming`,edgeType:`imports`});for(let t of n.nodes){let n=t.sourcePath??t.name;o.includes(n)||e.add(n)}}}e.size>0&&(d=`\n\n### Graph-discovered importers (${e.size})\n`+[...e].slice(0,20).map(e=>`- \`${e}\``).join(`
8
- `),e.size>20&&(d+=`\n- ... and ${e.size-20} more`))}catch{}let f=w(o),p=S(o),m=u.output+d+f+p+b()+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_dependencies` to see the full import graph, or `analyze_symbols` to inspect affected exports._";T(r,a,`blast-radius`,e,m);let g=new Set;if(c)for(let e of o)try{let t=await c.findNodes({sourcePath:e,limit:10});for(let e of t){let t=await c.getNeighbors(e.id,{direction:`incoming`,edgeType:`imports`});for(let e of t.nodes){let t=e.sourcePath??e.name;o.includes(t)||g.add(t)}}}catch{}let _=[...g].map(e=>({path:e,impact:`transitive`,reason:`imports changed file`})),v=o.length>10?`high`:o.length>3?`medium`:`low`;return{content:[{type:`text`,text:y(m,s)}],structuredContent:{changedFiles:o,affectedFiles:_,totalAffected:_.length,riskLevel:v}}}catch(e){return _.error(`Blast radius analysis failed`,o(e)),{content:[{type:`text`,text:`Blast radius analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}export{D as registerAnalyzeDependenciesTool,j as registerAnalyzeDiagramTool,A as registerAnalyzeEntryPointsTool,k as registerAnalyzePatternsTool,E as registerAnalyzeStructureTool,O as registerAnalyzeSymbolsTool,M as registerBlastRadiusTool};
7
+ `)}async function T(e,t,n,r,a){try{let o=`produced/analysis/${n}/${i(`sha256`).update(r).digest(`hex`).slice(0,12)}.md`,s=i(`sha256`).update(a).digest(`hex`).slice(0,16),c=new Date().toISOString(),l=a.length>2e3?a.split(/(?=^## )/m).filter(e=>e.trim().length>0):[a],u=l.map((e,t)=>({id:i(`sha256`).update(`${o}::${t}`).digest(`hex`).slice(0,16),content:e.trim(),sourcePath:o,contentType:`produced-knowledge`,chunkIndex:t,totalChunks:l.length,startLine:0,endLine:0,fileHash:s,indexedAt:c,origin:`produced`,tags:[`analysis`,n],category:`analysis`,version:1})),d=await t.embedBatch(u.map(e=>e.content));await e.upsert(u,d),_.info(`Auto-persisted analysis`,{analyzerName:n,chunkCount:u.length})}catch(e){_.warn(`Auto-persist analysis failed`,{analyzerName:n,...o(e)})}}function E(n,i,a){let s=new f,c=e(`analyze_structure`);n.registerTool(`analyze_structure`,{title:c.title,description:`Analyze the file/directory structure of a codebase. Returns an annotated tree with language stats.`,outputSchema:t,inputSchema:{path:r.string().describe(`Root path to analyze`),max_depth:r.number().min(1).max(10).default(6).describe(`Maximum directory depth`),format:r.enum([`json`,`markdown`]).default(`markdown`).describe(`Output format`),max_tokens:v},annotations:c.annotations},async({path:e,max_depth:t,format:n,max_tokens:r})=>{try{let o=await s.analyze(e,{format:n,maxDepth:t,maxTokens:r});T(i,a,`structure`,e,o.output);let c=o.data;return{content:[{type:`text`,text:y(o.output+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_dependencies` for import graphs, or `analyze_patterns` to detect architecture patterns._",r)}],structuredContent:{files:c.fileCount??o.meta.fileCount,packages:c.packageCount??0,languages:c.languages??{},tree:o.output.slice(0,2e3)}}}catch(e){return _.error(`Analysis failed`,o(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function D(t,n,i){let a=new c,s=e(`analyze_dependencies`);t.registerTool(`analyze_dependencies`,{title:s.title,description:`Analyze import/require dependencies across a codebase. Shows external packages and internal module graph.`,inputSchema:{path:r.string().describe(`Root path to analyze`),format:r.enum([`json`,`markdown`,`mermaid`]).default(`markdown`).describe(`Output format`),max_tokens:v},annotations:s.annotations},async({path:e,format:t,max_tokens:r})=>{try{let o=await a.analyze(e,{format:t});return T(n,i,`dependencies`,e,o.output),{content:[{type:`text`,text:y(o.output+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_symbols` to explore exported symbols, or `analyze_diagram` for visual representation._",r)}]}}catch(e){return _.error(`Analysis failed`,o(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function O(t,n,i){let a=new p,s=e(`analyze_symbols`);t.registerTool(`analyze_symbols`,{title:s.title,description:`Extract exported and local symbols (functions, classes, interfaces, types, constants) from a codebase.`,inputSchema:{path:r.string().describe(`Root path to analyze`),filter:r.string().optional().describe(`Filter symbols by name substring`),format:r.enum([`json`,`markdown`]).default(`markdown`).describe(`Output format`)},annotations:s.annotations},async({path:e,filter:t,format:r})=>{try{let o=await a.analyze(e,{format:r,filter:t});return T(n,i,`symbols`,e,o.output),{content:[{type:`text`,text:o.output+b()+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_dependencies` to see import relationships, or `search` to find usage patterns._"}]}}catch(e){return _.error(`Analysis failed`,o(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function k(t,n,i){let a=new d,s=e(`analyze_patterns`);t.registerTool(`analyze_patterns`,{title:s.title,description:`Detect architectural patterns, frameworks, and conventions in a codebase using directory structure and code heuristics.`,inputSchema:{path:r.string().describe(`Root path to analyze`)},annotations:s.annotations},async({path:e})=>{try{let t=await a.analyze(e);return T(n,i,`patterns`,e,t.output),{content:[{type:`text`,text:t.output+b()+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_entry_points` to find Lambda handlers and main exports, or `produce_knowledge` for full analysis._"}]}}catch(e){return _.error(`Analysis failed`,o(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function A(t,n,i){let a=new u,s=e(`analyze_entry_points`);t.registerTool(`analyze_entry_points`,{title:s.title,description:`Find entry points: Lambda handlers, main exports, CLI bins, and server start scripts.`,inputSchema:{path:r.string().describe(`Root path to analyze`)},annotations:s.annotations},async({path:e})=>{try{let t=await a.analyze(e);return T(n,i,`entry-points`,e,t.output),{content:[{type:`text`,text:t.output+b()+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_dependencies` to see what each entry point imports, or `produce_knowledge` for comprehensive analysis._"}]}}catch(e){return _.error(`Analysis failed`,o(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function j(t,n,i){let a=new l,s=e(`analyze_diagram`);t.registerTool(`analyze_diagram`,{title:s.title,description:`Generate a Mermaid diagram of the codebase architecture or dependency graph. Returns Mermaid flowchart syntax — paste into any Markdown renderer or Mermaid editor to visualize.`,inputSchema:{path:r.string().describe(`Root path to analyze`),diagram_type:r.enum([`architecture`,`dependencies`]).default(`architecture`).describe(`Type of diagram`)},annotations:s.annotations},async({path:e,diagram_type:t})=>{try{let r=await a.analyze(e,{diagramType:t});return T(n,i,`diagram`,e,r.output),{content:[{type:`text`,text:r.output+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_structure` for detailed file tree, or `produce_knowledge` for comprehensive analysis._"}]}}catch(e){return _.error(`Diagram generation failed`,o(e)),{content:[{type:`text`,text:`Diagram generation failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function M(t,i,a,c){let l=new s,u=e(`blast_radius`);t.registerTool(`blast_radius`,{title:u.title,description:`Given a list of changed files, trace the dependency graph to find all affected files (direct + transitive importers) and their tests. Useful for scoping code reviews and impact analysis.`,outputSchema:n,inputSchema:{path:r.string().describe(`Root path of the codebase`),files:r.array(r.string()).optional().describe(`Changed file paths (relative to root). If omitted, auto-detects from git status.`),max_depth:r.number().min(1).max(20).default(5).describe(`Maximum transitive dependency depth`),format:r.enum([`json`,`markdown`]).default(`markdown`).describe(`Output format`),max_tokens:v},annotations:u.annotations},async({path:e,files:t,max_depth:n,format:r,max_tokens:s})=>{try{let o=t??[];if(o.length===0)try{let t=await h({cwd:e,includeDiff:!1});if(o=Array.from(new Set([...x(t.status?.staged),...x(t.status?.modified),...x(t.status?.untracked)])),o.length===0)return{content:[{type:`text`,text:"No changed files detected from git status. Provide `files` explicitly or make changes first."}]}}catch{return{content:[{type:`text`,text:"Could not detect changed files from git. Provide `files` explicitly."}],isError:!0}}let u=await l.analyze(e,{files:o,maxDepth:n,format:r}),d=``;if(c)try{let e=new Set;for(let t of o){let n=await c.findNodes({sourcePath:t,limit:10});for(let t of n){let n=await c.getNeighbors(t.id,{direction:`incoming`,edgeType:`imports`});for(let t of n.nodes){let n=t.sourcePath??t.name;o.includes(n)||e.add(n)}}}e.size>0&&(d=`\n\n### Graph-discovered importers (${e.size})\n`+[...e].slice(0,20).map(e=>`- \`${e}\``).join(`
8
+ `),e.size>20&&(d+=`\n- ... and ${e.size-20} more`))}catch{}let f=w(o),p=S(o),m=u.output+d+f+p+b()+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_dependencies` to see the full import graph, or `analyze_symbols` to inspect affected exports._";T(i,a,`blast-radius`,e,m);let g=new Set;if(c)for(let e of o)try{let t=await c.findNodes({sourcePath:e,limit:10});for(let e of t){let t=await c.getNeighbors(e.id,{direction:`incoming`,edgeType:`imports`});for(let e of t.nodes){let t=e.sourcePath??e.name;o.includes(t)||g.add(t)}}}catch{}let _=[...g].map(e=>({path:e,impact:`transitive`,reason:`imports changed file`})),v=o.length>10?`high`:o.length>3?`medium`:`low`;return{content:[{type:`text`,text:y(m,s)}],structuredContent:{changedFiles:o,affectedFiles:_,totalAffected:_.length,riskLevel:v}}}catch(e){return _.error(`Blast radius analysis failed`,o(e)),{content:[{type:`text`,text:`Blast radius analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}export{D as registerAnalyzeDependenciesTool,j as registerAnalyzeDiagramTool,A as registerAnalyzeEntryPointsTool,k as registerAnalyzePatternsTool,E as registerAnalyzeStructureTool,O as registerAnalyzeSymbolsTool,M as registerBlastRadiusTool};
@@ -1,2 +1,2 @@
1
- import{getToolMeta as e}from"../tool-metadata.js";import{createTaskRunner as t}from"../task-manager.js";import{createHash as n}from"node:crypto";import{z as r}from"zod";import{createLogger as i,serializeError as a}from"../../../core/dist/index.js";import{onboard as o}from"../../../tools/dist/index.js";const s=i(`tools`);let c=!1;async function l(e,t,r){for(let i of r.steps)if(!(i.status!==`success`||!i.output))try{let a=n(`sha256`).update(r.path).digest(`hex`).slice(0,12),o=`produced/onboard/${i.name}/${a}.md`,s=n(`sha256`).update(i.output).digest(`hex`).slice(0,16),c=new Date().toISOString(),l=i.output.length>2e3?i.output.split(/(?=^## )/m).filter(e=>e.trim().length>0):[i.output],u=l.map((e,t)=>({id:n(`sha256`).update(`${o}::${t}`).digest(`hex`).slice(0,16),content:e.trim(),sourcePath:o,contentType:`produced-knowledge`,chunkIndex:t,totalChunks:l.length,startLine:0,endLine:0,fileHash:s,indexedAt:c,origin:`produced`,tags:[`onboard`,i.name],category:`analysis`,version:1})),d=await t.embedBatch(u.map(e=>e.content));await e.upsert(u,d)}catch(e){s.warn(`Auto-persist onboard step failed`,{stepName:i.name,...a(e)})}}async function u(e,t,r){if(r.autoRemember?.length)for(let i of r.autoRemember)try{let r=n(`sha256`).update(`onboard-remember::${i.title}`).digest(`hex`).slice(0,16),a=new Date().toISOString(),o={id:r,content:`# ${i.title}\n\n${i.content}`,sourcePath:`curated/onboard/${i.category}/${r}.md`,contentType:`curated`,chunkIndex:0,totalChunks:1,startLine:0,endLine:0,fileHash:n(`sha256`).update(i.content).digest(`hex`).slice(0,16),indexedAt:a,origin:`curated`,tags:i.tags,category:i.category,version:1},[s]=await t.embedBatch([o.content]);await e.upsert([o],[s])}catch(e){s.warn(`Auto-persist remember entry failed`,{title:i.title,...a(e)})}}function d(n,i,d,f){let p=e(`onboard`);n.registerTool(`onboard`,{title:p.title,description:`First-time codebase onboarding: runs all analysis tools (structure, dependencies, entry-points, symbols, patterns, diagram) in one command. Results are auto-persisted to KB. Use mode=generate to also write structured output to .ai/kb/ directory.`,inputSchema:{path:r.string().describe(`Root path of the codebase to onboard`),mode:r.enum([`memory`,`generate`]).default(`generate`).describe(`Output mode: generate (default) = persist to AI Kit + write .ai/kb/ files; memory = AI Kit vector store only`),out_dir:r.string().optional().describe(`Custom output directory for generate mode (default: <path>/.ai/kb)`)},annotations:p.annotations},async({path:e,mode:n,out_dir:r},p)=>{try{if(c)return{content:[{type:`text`,text:`Onboard is already running. Please wait for it to complete before starting another.`}]};c=!0,s.info(`Starting onboard`,{path:e,mode:n});let m=await o({path:e,mode:n,outDir:r??f?.onboardDir}),h=t(p).createTask(`Onboard`,m.steps.length);for(let e=0;e<m.steps.length;e++){let t=m.steps[e];h.progress(e,`${t.name}: ${t.status}`)}h.complete(`Onboard complete: ${m.steps.filter(e=>e.status===`success`).length}/${m.steps.length} steps succeeded`),l(i,d,m),m.autoRemember?.length&&u(i,d,m).catch(e=>{s.warn(`Auto-persist autoRemember failed`,a(e))});let g=[`## Onboard Complete`,``,`**Path:** \`${m.path}\``,`**Mode:** ${m.mode}`,`**Duration:** ${m.totalDurationMs}ms`,``];m.outDir&&(g.push(`**Output directory:** \`${m.outDir}\``),g.push(``)),g.push(`### Analysis Results`,``);let _=[],v=[];for(let e of m.steps)e.status===`success`?_.push(`- ✓ **${e.name}** (${e.durationMs}ms) — ${e.output.length} chars`):v.push(`- ✗ **${e.name}** — ${e.error}`);g.push(..._),v.length>0&&g.push(``,`### Failed`,``,...v),g.push(``,`---`,``);for(let e of m.steps)e.status===`success`&&g.push(`### ${e.name}`,``,e.output,``,`---`,``);return g.push(`_All results auto-saved to KB.`,m.mode===`generate`?` Files written to \`${m.outDir}\`.`:``," Next: Use `search` to query the knowledge, or `remember` to add custom insights._"),{content:[{type:`text`,text:g.join(`
1
+ import{getToolMeta as e}from"../tool-metadata.js";import{createTaskRunner as t}from"../task-manager.js";import{z as n}from"zod";import{createHash as r}from"node:crypto";import{createLogger as i,serializeError as a}from"../../../core/dist/index.js";import{onboard as o}from"../../../tools/dist/index.js";const s=i(`tools`);let c=!1;async function l(e,t,n){for(let i of n.steps)if(!(i.status!==`success`||!i.output))try{let a=r(`sha256`).update(n.path).digest(`hex`).slice(0,12),o=`produced/onboard/${i.name}/${a}.md`,s=r(`sha256`).update(i.output).digest(`hex`).slice(0,16),c=new Date().toISOString(),l=i.output.length>2e3?i.output.split(/(?=^## )/m).filter(e=>e.trim().length>0):[i.output],u=l.map((e,t)=>({id:r(`sha256`).update(`${o}::${t}`).digest(`hex`).slice(0,16),content:e.trim(),sourcePath:o,contentType:`produced-knowledge`,chunkIndex:t,totalChunks:l.length,startLine:0,endLine:0,fileHash:s,indexedAt:c,origin:`produced`,tags:[`onboard`,i.name],category:`analysis`,version:1})),d=await t.embedBatch(u.map(e=>e.content));await e.upsert(u,d)}catch(e){s.warn(`Auto-persist onboard step failed`,{stepName:i.name,...a(e)})}}async function u(e,t,n){if(n.autoRemember?.length)for(let i of n.autoRemember)try{let n=r(`sha256`).update(`onboard-remember::${i.title}`).digest(`hex`).slice(0,16),a=new Date().toISOString(),o={id:n,content:`# ${i.title}\n\n${i.content}`,sourcePath:`curated/onboard/${i.category}/${n}.md`,contentType:`curated`,chunkIndex:0,totalChunks:1,startLine:0,endLine:0,fileHash:r(`sha256`).update(i.content).digest(`hex`).slice(0,16),indexedAt:a,origin:`curated`,tags:i.tags,category:i.category,version:1},[s]=await t.embedBatch([o.content]);await e.upsert([o],[s])}catch(e){s.warn(`Auto-persist remember entry failed`,{title:i.title,...a(e)})}}function d(r,i,d,f){let p=e(`onboard`);r.registerTool(`onboard`,{title:p.title,description:`First-time codebase onboarding: runs all analysis tools (structure, dependencies, entry-points, symbols, patterns, diagram) in one command. Results are auto-persisted to KB. Use mode=generate to also write structured output to .ai/kb/ directory.`,inputSchema:{path:n.string().describe(`Root path of the codebase to onboard`),mode:n.enum([`memory`,`generate`]).default(`generate`).describe(`Output mode: generate (default) = persist to AI Kit + write .ai/kb/ files; memory = AI Kit vector store only`),out_dir:n.string().optional().describe(`Custom output directory for generate mode (default: <path>/.ai/kb)`)},annotations:p.annotations},async({path:e,mode:n,out_dir:r},p)=>{try{if(c)return{content:[{type:`text`,text:`Onboard is already running. Please wait for it to complete before starting another.`}]};c=!0,s.info(`Starting onboard`,{path:e,mode:n});let m=await o({path:e,mode:n,outDir:r??f?.onboardDir}),h=t(p).createTask(`Onboard`,m.steps.length);for(let e=0;e<m.steps.length;e++){let t=m.steps[e];h.progress(e,`${t.name}: ${t.status}`)}h.complete(`Onboard complete: ${m.steps.filter(e=>e.status===`success`).length}/${m.steps.length} steps succeeded`),l(i,d,m),m.autoRemember?.length&&u(i,d,m).catch(e=>{s.warn(`Auto-persist autoRemember failed`,a(e))});let g=[`## Onboard Complete`,``,`**Path:** \`${m.path}\``,`**Mode:** ${m.mode}`,`**Duration:** ${m.totalDurationMs}ms`,``];m.outDir&&(g.push(`**Output directory:** \`${m.outDir}\``),g.push(``)),g.push(`### Analysis Results`,``);let _=[],v=[];for(let e of m.steps)e.status===`success`?_.push(`- ✓ **${e.name}** (${e.durationMs}ms) — ${e.output.length} chars`):v.push(`- ✗ **${e.name}** — ${e.error}`);g.push(..._),v.length>0&&g.push(``,`### Failed`,``,...v),g.push(``,`---`,``);for(let e of m.steps)e.status===`success`&&g.push(`### ${e.name}`,``,e.output,``,`---`,``);return g.push(`_All results auto-saved to KB.`,m.mode===`generate`?` Files written to \`${m.outDir}\`.`:``," Next: Use `search` to query the knowledge, or `remember` to add custom insights._"),{content:[{type:`text`,text:g.join(`
2
2
  `)}]}}catch(e){return s.error(`Onboard failed`,a(e)),{content:[{type:`text`,text:`Onboard failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}finally{c=!1}})}export{d as registerOnboardTool};
@@ -1,5 +1,5 @@
1
- import{getToolMeta as e}from"../tool-metadata.js";import{SearchOutputSchema as t}from"../output-schemas.js";import{fanOutFtsSearch as n,fanOutSearch as r,openWorkspaceStores as i,resolveWorkspaces as a}from"../cross-workspace.js";import{curatedResourceLink as o,extractCuratedPath as s}from"../resource-links.js";import{basename as c}from"node:path";import{stat as l}from"node:fs/promises";import{z as u}from"zod";import{CONTENT_TYPES as d,KNOWLEDGE_ORIGINS as f,SOURCE_TYPES as p,computePartitionKey as m,createLogger as h,serializeError as g}from"../../../core/dist/index.js";import{bookendReorder as _,graphAugmentSearch as v,stashGet as y,truncateToTokenBudget as b}from"../../../tools/dist/index.js";import{mergeResults as x}from"../../../enterprise-bridge/dist/index.js";const S=h(`tools`);function ee(e){let t=[],n=c(process.cwd());n&&t.push(`[project: ${n}]`);let r=y(`__context_boost`);return r&&t.push(`[focus: ${r.value}]`),t.length===0?e:`${t.join(` `)} ${e}`}async function C(e,t,n,r,i){if(!e||t>=e.config.fallbackThreshold&&n.length>0)return{results:n,triggered:!1,cacheHit:!1};let a=!1;try{let t=e.cache.get(r);return t?a=!0:(t=await e.client.search(r,i),t.length>0&&e.cache.set(r,t)),t.length>0?{results:x(n,t,i).map(e=>({record:{id:`er:${e.sourcePath}`,content:e.content,sourcePath:e.source===`er`?`[ER] ${e.sourcePath}`:e.sourcePath,startLine:e.startLine??0,endLine:e.endLine??0,contentType:e.contentType??`documentation`,headingPath:e.headingPath,origin:e.source===`er`?`curated`:e.origin??`indexed`,category:e.category,tags:e.tags??[],chunkIndex:0,totalChunks:1,fileHash:``,indexedAt:new Date().toISOString(),version:1},score:e.score})),triggered:!0,cacheHit:a}:{results:n,triggered:!0,cacheHit:a}}catch(e){return S.warn(`ER fallback failed`,g(e)),{results:n,triggered:!0,cacheHit:a}}}function te(e,t,n=60){let r=new Map;for(let t=0;t<e.length;t++){let i=e[t];r.set(i.record.id,{record:i.record,score:1/(n+t+1)})}for(let e=0;e<t.length;e++){let i=t[e],a=r.get(i.record.id);a?a.score+=1/(n+e+1):r.set(i.record.id,{record:i.record,score:1/(n+e+1)})}return[...r.values()].sort((e,t)=>t.score-e.score).map(({record:e,score:t})=>({record:e,score:t}))}function w(e,t){let n=t.toLowerCase().split(/\s+/).filter(e=>e.length>=2);return n.length<2?e:e.map(e=>{let t=e.record.content.toLowerCase();if(t.length>5e3)return e;let r=n.map(e=>{let n=[],r=t.indexOf(e);for(;r!==-1&&n.length<10;)n.push(r),r=t.indexOf(e,r+1);return n});if(r.some(e=>e.length===0))return e;let i=t.length;for(let e of r[0]){let t=e,a=e+n[0].length;for(let i=1;i<r.length;i++){let o=r[i][0],s=Math.abs(o-e);for(let t=1;t<r[i].length;t++){let n=Math.abs(r[i][t]-e);n<s&&(s=n,o=r[i][t])}t=Math.min(t,o),a=Math.max(a,o+n[i].length)}i=Math.min(i,a-t)}let a=1+.25/(1+i/200);return{record:e.record,score:e.score*a}}).sort((e,t)=>t.score-e.score)}function T(e,t,n=8){let r=new Set(t.toLowerCase().split(/\s+/).filter(e=>e.length>=2)),i=new Map,a=e.length;for(let t of e){let e=t.record.content.split(/[^a-zA-Z0-9_]+/).filter(e=>e.length>=3&&!E.has(e.toLowerCase())),n=new Set;for(let t of e){let e=t.toLowerCase();/[_A-Z]/.test(t)&&i.set(`__id__${e}`,1),n.has(e)||(n.add(e),i.set(e,(i.get(e)??0)+1))}}let o=[];for(let[e,t]of i){if(e.startsWith(`__id__`)||r.has(e)||t>a*.8)continue;let n=Math.log(a/t),s=i.has(`__id__${e}`)?1:0,c=e.length>8?.5:0;o.push({term:e,score:n+s+c})}return o.sort((e,t)=>t.score-e.score).slice(0,n).map(e=>e.term)}const E=new Set(`the.and.for.are.but.not.you.all.can.had.her.was.one.our.out.has.have.from.this.that.with.they.been.said.each.which.their.will.other.about.many.then.them.these.some.would.make.like.into.could.time.very.when.come.just.know.take.people.also.back.after.only.more.than.over.such.import.export.const.function.return.true.false.null.undefined.string.number.boolean.void.type.interface`.split(`.`));async function D(e,t){try{let n=await e.getStats();if(!n.lastIndexedAt)return;let r=new Date(n.lastIndexedAt).getTime(),i=Date.now(),a=[...new Set(t.map(e=>e.record.sourcePath))].filter(e=>!e.startsWith(`[ER]`)).slice(0,5);if(a.length===0)return;let o=0;for(let e of a)try{(await l(e)).mtimeMs>r&&o++}catch{o++}if(o>0){let e=i-r,t=Math.floor(e/6e4),n=t<1?`<1 min`:`${t} min`;return`> ⚠️ **Index may be stale** — ${o} file(s) modified since last index (${n} ago). Use \`reindex\` to refresh.`}}catch{}}function O(c,l,h,y,x,E,O){let k=e(`search`);c.registerTool(`search`,{title:k.title,description:`Search AI Kit for code, docs, and prior decisions. Default choice for discovery. Modes: hybrid (default), semantic, keyword. For multi-strategy precision queries use find; for a known file path use lookup.`,outputSchema:t,inputSchema:{query:u.string().max(5e3).describe(`Natural language search query`),limit:u.number().min(1).max(20).default(5).describe(`Maximum results to return`),search_mode:u.enum([`hybrid`,`semantic`,`keyword`]).default(`hybrid`).describe(`Search strategy: hybrid (vector + FTS + RRF fusion, default), semantic (vector only), keyword (FTS only)`),content_type:u.enum(d).optional().describe(`Filter by content type`),source_type:u.enum(p).optional().describe(`Coarse filter: "source" (code only), "documentation" (md, curated), "test", "config". Overrides content_type if both set.`),origin:u.enum(f).optional().describe(`Filter by knowledge origin`),category:u.string().optional().describe(`Filter by category (e.g., decisions, patterns, conventions)`),tags:u.array(u.string()).optional().describe(`Filter by tags (returns results matching ANY of the specified tags)`),min_score:u.number().min(0).max(1).default(.25).describe(`Minimum similarity score`),graph_hops:u.number().min(0).max(3).default(1).describe(`Number of graph hops to augment results with connected entities (0 = disabled, 1 = direct connections, 2-3 = deeper traversal). Default 1 provides module/symbol context automatically.`),max_tokens:u.number().min(100).max(5e4).optional().describe(`Maximum token budget for the response. When set, output is truncated to fit.`),dedup:u.enum([`file`,`chunk`]).default(`chunk`).describe(`Deduplication mode: "chunk" (default, show all matching chunks) or "file" (collapse chunks from same file into single result with merged line ranges)`),workspaces:u.array(u.string()).optional().describe(`Cross-workspace search: partition names or folder basenames to include. Use ["*"] for all registered workspaces. Only works in user-level install mode.`)},annotations:k.annotations},async({query:e,limit:t,search_mode:c,content_type:u,source_type:d,origin:f,category:p,tags:k,min_score:ne,graph_hops:A,max_tokens:j,dedup:M,workspaces:N})=>{try{let P={limit:t,minScore:ne,contentType:u,sourceType:d,origin:f,category:p,tags:k},F,I=!1,L=!1,R=``,z=ee(e);if(c===`keyword`)F=await h.ftsSearch(e,P),F=F.slice(0,t);else if(c===`semantic`){let n=await l.embedQuery(z);F=await h.search(n,P);let r=await C(x,F[0]?.score??0,F,e,t);F=r.results,I=r.triggered,L=r.cacheHit}else{let n=await l.embedQuery(z),[r,i]=await Promise.all([h.search(n,{...P,limit:t*2}),h.ftsSearch(e,{...P,limit:t*2}).catch(()=>[])]);F=te(r,i).slice(0,t);let a=await C(x,r[0]?.score??0,F,e,t);F=a.results,I=a.triggered,L=a.cacheHit}E&&E.recordSearch(e,I,L),F.length>1&&(F=w(F,e));let B=``;if(N&&N.length>0){let o=a(N,m(process.cwd()));if(o.length>0){let{stores:a,closeAll:s}=await i(o);try{let i;i=c===`keyword`?await n(a,e,{...P,limit:t}):await r(a,await l.embedQuery(e),{...P,limit:t});for(let e of i)F.push({record:{...e.record,sourcePath:`[${e.workspace}] ${e.record.sourcePath}`},score:e.score});F=F.sort((e,t)=>t.score-e.score).slice(0,t),B=` + ${o.length} workspace(s)`}finally{await s()}}}if(M===`file`&&F.length>1){let e=new Map;for(let t of F){let n=t.record.sourcePath,r=e.get(n);r?(t.score>r.best.score&&(r.best=t),r.ranges.push({start:t.record.startLine,end:t.record.endLine})):e.set(n,{best:t,ranges:[{start:t.record.startLine,end:t.record.endLine}]})}F=[...e.values()].sort((e,t)=>t.best.score-e.best.score).map(({best:e,ranges:t})=>({record:{...e.record,content:t.length>1?`${e.record.content}\n\n_Matched ${t.length} sections: ${t.sort((e,t)=>e.start-t.start).map(e=>`L${e.start}-${e.end}`).join(`, `)}_`:e.record.content},score:e.score}))}if(F.length===0){if(O?.available)try{let t=(await O.createMessage({prompt:`The search query "${e}" returned 0 results in AI Kit code search. Suggest ONE alternative search query that might find relevant results. Reply with ONLY the alternative query, nothing else.`,systemPrompt:`You are a search query optimizer for AI Kit code search. Generate a single alternative query.`,maxTokens:100})).text.trim().split(`
2
- `)[0].slice(0,500);if(t&&t!==e){let n=await l.embedQuery(t),r=await h.search(n,P);r.length>0&&(F=r,R=`> _Original query "${e}" returned 0 results. Auto-reformulated to "${t}"._\n\n`,S.info(`Smart search fallback succeeded`,{originalQuery:e,altQuery:t,resultCount:r.length}))}}catch(e){S.debug(`Smart search fallback failed`,{error:String(e)})}if(F.length===0)return{content:[{type:`text`,text:`No results found for the given query.`}],structuredContent:{results:[],totalResults:0,searchMode:c,query:e}}}let V,H;if(A>0&&!y&&(H="> **Note:** `graph_hops` was set but no graph store is available. Graph augmentation skipped."),A>0&&y)try{let e=await v(y,F.map(e=>({recordId:e.record.id,score:e.score,sourcePath:e.record.sourcePath})),{hops:A,maxPerHit:5});V=new Map;for(let t of e)if(t.graphContext.nodes.length>0){let e=t.graphContext.nodes.slice(0,5).map(e=>` - **${e.name}** (${e.type})`).join(`
1
+ import{getToolMeta as e}from"../tool-metadata.js";import{SearchOutputSchema as t}from"../output-schemas.js";import{fanOutFtsSearch as n,fanOutSearch as r,openWorkspaceStores as i,resolveWorkspaces as a}from"../cross-workspace.js";import{curatedResourceLink as o,extractCuratedPath as s}from"../resource-links.js";import{basename as c}from"node:path";import{z as l}from"zod";import{stat as u}from"node:fs/promises";import{CONTENT_TYPES as d,KNOWLEDGE_ORIGINS as f,SOURCE_TYPES as p,computePartitionKey as m,createLogger as h,serializeError as g}from"../../../core/dist/index.js";import{bookendReorder as _,graphAugmentSearch as v,stashGet as y,truncateToTokenBudget as b}from"../../../tools/dist/index.js";import{mergeResults as x}from"../../../enterprise-bridge/dist/index.js";const S=h(`tools`);function ee(e){let t=[],n=c(process.cwd());n&&t.push(`[project: ${n}]`);let r=y(`__context_boost`);return r&&t.push(`[focus: ${r.value}]`),t.length===0?e:`${t.join(` `)} ${e}`}async function C(e,t,n,r,i){if(!e||t>=e.config.fallbackThreshold&&n.length>0)return{results:n,triggered:!1,cacheHit:!1};let a=!1;try{let t=e.cache.get(r);return t?a=!0:(t=await e.client.search(r,i),t.length>0&&e.cache.set(r,t)),t.length>0?{results:x(n,t,i).map(e=>({record:{id:`er:${e.sourcePath}`,content:e.content,sourcePath:e.source===`er`?`[ER] ${e.sourcePath}`:e.sourcePath,startLine:e.startLine??0,endLine:e.endLine??0,contentType:e.contentType??`documentation`,headingPath:e.headingPath,origin:e.source===`er`?`curated`:e.origin??`indexed`,category:e.category,tags:e.tags??[],chunkIndex:0,totalChunks:1,fileHash:``,indexedAt:new Date().toISOString(),version:1},score:e.score})),triggered:!0,cacheHit:a}:{results:n,triggered:!0,cacheHit:a}}catch(e){return S.warn(`ER fallback failed`,g(e)),{results:n,triggered:!0,cacheHit:a}}}function te(e,t,n=60){let r=new Map;for(let t=0;t<e.length;t++){let i=e[t];r.set(i.record.id,{record:i.record,score:1/(n+t+1)})}for(let e=0;e<t.length;e++){let i=t[e],a=r.get(i.record.id);a?a.score+=1/(n+e+1):r.set(i.record.id,{record:i.record,score:1/(n+e+1)})}return[...r.values()].sort((e,t)=>t.score-e.score).map(({record:e,score:t})=>({record:e,score:t}))}function w(e,t){let n=t.toLowerCase().split(/\s+/).filter(e=>e.length>=2);return n.length<2?e:e.map(e=>{let t=e.record.content.toLowerCase();if(t.length>5e3)return e;let r=n.map(e=>{let n=[],r=t.indexOf(e);for(;r!==-1&&n.length<10;)n.push(r),r=t.indexOf(e,r+1);return n});if(r.some(e=>e.length===0))return e;let i=t.length;for(let e of r[0]){let t=e,a=e+n[0].length;for(let i=1;i<r.length;i++){let o=r[i][0],s=Math.abs(o-e);for(let t=1;t<r[i].length;t++){let n=Math.abs(r[i][t]-e);n<s&&(s=n,o=r[i][t])}t=Math.min(t,o),a=Math.max(a,o+n[i].length)}i=Math.min(i,a-t)}let a=1+.25/(1+i/200);return{record:e.record,score:e.score*a}}).sort((e,t)=>t.score-e.score)}function T(e,t,n=8){let r=new Set(t.toLowerCase().split(/\s+/).filter(e=>e.length>=2)),i=new Map,a=e.length;for(let t of e){let e=t.record.content.split(/[^a-zA-Z0-9_]+/).filter(e=>e.length>=3&&!E.has(e.toLowerCase())),n=new Set;for(let t of e){let e=t.toLowerCase();/[_A-Z]/.test(t)&&i.set(`__id__${e}`,1),n.has(e)||(n.add(e),i.set(e,(i.get(e)??0)+1))}}let o=[];for(let[e,t]of i){if(e.startsWith(`__id__`)||r.has(e)||t>a*.8)continue;let n=Math.log(a/t),s=i.has(`__id__${e}`)?1:0,c=e.length>8?.5:0;o.push({term:e,score:n+s+c})}return o.sort((e,t)=>t.score-e.score).slice(0,n).map(e=>e.term)}const E=new Set(`the.and.for.are.but.not.you.all.can.had.her.was.one.our.out.has.have.from.this.that.with.they.been.said.each.which.their.will.other.about.many.then.them.these.some.would.make.like.into.could.time.very.when.come.just.know.take.people.also.back.after.only.more.than.over.such.import.export.const.function.return.true.false.null.undefined.string.number.boolean.void.type.interface`.split(`.`));async function D(e,t){try{let n=await e.getStats();if(!n.lastIndexedAt)return;let r=new Date(n.lastIndexedAt).getTime(),i=Date.now(),a=[...new Set(t.map(e=>e.record.sourcePath))].filter(e=>!e.startsWith(`[ER]`)).slice(0,5);if(a.length===0)return;let o=0;for(let e of a)try{(await u(e)).mtimeMs>r&&o++}catch{o++}if(o>0){let e=i-r,t=Math.floor(e/6e4),n=t<1?`<1 min`:`${t} min`;return`> ⚠️ **Index may be stale** — ${o} file(s) modified since last index (${n} ago). Use \`reindex\` to refresh.`}}catch{}}function O(c,u,h,y,x,E,O){let k=e(`search`);c.registerTool(`search`,{title:k.title,description:`Search AI Kit for code, docs, and prior decisions. Default choice for discovery. Modes: hybrid (default), semantic, keyword. For multi-strategy precision queries use find; for a known file path use lookup.`,outputSchema:t,inputSchema:{query:l.string().max(5e3).describe(`Natural language search query`),limit:l.number().min(1).max(20).default(5).describe(`Maximum results to return`),search_mode:l.enum([`hybrid`,`semantic`,`keyword`]).default(`hybrid`).describe(`Search strategy: hybrid (vector + FTS + RRF fusion, default), semantic (vector only), keyword (FTS only)`),content_type:l.enum(d).optional().describe(`Filter by content type`),source_type:l.enum(p).optional().describe(`Coarse filter: "source" (code only), "documentation" (md, curated), "test", "config". Overrides content_type if both set.`),origin:l.enum(f).optional().describe(`Filter by knowledge origin`),category:l.string().optional().describe(`Filter by category (e.g., decisions, patterns, conventions)`),tags:l.array(l.string()).optional().describe(`Filter by tags (returns results matching ANY of the specified tags)`),min_score:l.number().min(0).max(1).default(.25).describe(`Minimum similarity score`),graph_hops:l.number().min(0).max(3).default(1).describe(`Number of graph hops to augment results with connected entities (0 = disabled, 1 = direct connections, 2-3 = deeper traversal). Default 1 provides module/symbol context automatically.`),max_tokens:l.number().min(100).max(5e4).optional().describe(`Maximum token budget for the response. When set, output is truncated to fit.`),dedup:l.enum([`file`,`chunk`]).default(`chunk`).describe(`Deduplication mode: "chunk" (default, show all matching chunks) or "file" (collapse chunks from same file into single result with merged line ranges)`),workspaces:l.array(l.string()).optional().describe(`Cross-workspace search: partition names or folder basenames to include. Use ["*"] for all registered workspaces. Only works in user-level install mode.`)},annotations:k.annotations},async({query:e,limit:t,search_mode:c,content_type:l,source_type:d,origin:f,category:p,tags:k,min_score:ne,graph_hops:A,max_tokens:j,dedup:M,workspaces:N})=>{try{let P={limit:t,minScore:ne,contentType:l,sourceType:d,origin:f,category:p,tags:k},F,I=!1,L=!1,R=``,z=ee(e);if(c===`keyword`)F=await h.ftsSearch(e,P),F=F.slice(0,t);else if(c===`semantic`){let n=await u.embedQuery(z);F=await h.search(n,P);let r=await C(x,F[0]?.score??0,F,e,t);F=r.results,I=r.triggered,L=r.cacheHit}else{let n=await u.embedQuery(z),[r,i]=await Promise.all([h.search(n,{...P,limit:t*2}),h.ftsSearch(e,{...P,limit:t*2}).catch(()=>[])]);F=te(r,i).slice(0,t);let a=await C(x,r[0]?.score??0,F,e,t);F=a.results,I=a.triggered,L=a.cacheHit}E&&E.recordSearch(e,I,L),F.length>1&&(F=w(F,e));let B=``;if(N&&N.length>0){let o=a(N,m(process.cwd()));if(o.length>0){let{stores:a,closeAll:s}=await i(o);try{let i;i=c===`keyword`?await n(a,e,{...P,limit:t}):await r(a,await u.embedQuery(e),{...P,limit:t});for(let e of i)F.push({record:{...e.record,sourcePath:`[${e.workspace}] ${e.record.sourcePath}`},score:e.score});F=F.sort((e,t)=>t.score-e.score).slice(0,t),B=` + ${o.length} workspace(s)`}finally{await s()}}}if(M===`file`&&F.length>1){let e=new Map;for(let t of F){let n=t.record.sourcePath,r=e.get(n);r?(t.score>r.best.score&&(r.best=t),r.ranges.push({start:t.record.startLine,end:t.record.endLine})):e.set(n,{best:t,ranges:[{start:t.record.startLine,end:t.record.endLine}]})}F=[...e.values()].sort((e,t)=>t.best.score-e.best.score).map(({best:e,ranges:t})=>({record:{...e.record,content:t.length>1?`${e.record.content}\n\n_Matched ${t.length} sections: ${t.sort((e,t)=>e.start-t.start).map(e=>`L${e.start}-${e.end}`).join(`, `)}_`:e.record.content},score:e.score}))}if(F.length===0){if(O?.available)try{let t=(await O.createMessage({prompt:`The search query "${e}" returned 0 results in AI Kit code search. Suggest ONE alternative search query that might find relevant results. Reply with ONLY the alternative query, nothing else.`,systemPrompt:`You are a search query optimizer for AI Kit code search. Generate a single alternative query.`,maxTokens:100})).text.trim().split(`
2
+ `)[0].slice(0,500);if(t&&t!==e){let n=await u.embedQuery(t),r=await h.search(n,P);r.length>0&&(F=r,R=`> _Original query "${e}" returned 0 results. Auto-reformulated to "${t}"._\n\n`,S.info(`Smart search fallback succeeded`,{originalQuery:e,altQuery:t,resultCount:r.length}))}}catch(e){S.debug(`Smart search fallback failed`,{error:String(e)})}if(F.length===0)return{content:[{type:`text`,text:`No results found for the given query.`}],structuredContent:{results:[],totalResults:0,searchMode:c,query:e}}}let V,H;if(A>0&&!y&&(H="> **Note:** `graph_hops` was set but no graph store is available. Graph augmentation skipped."),A>0&&y)try{let e=await v(y,F.map(e=>({recordId:e.record.id,score:e.score,sourcePath:e.record.sourcePath})),{hops:A,maxPerHit:5});V=new Map;for(let t of e)if(t.graphContext.nodes.length>0){let e=t.graphContext.nodes.slice(0,5).map(e=>` - **${e.name}** (${e.type})`).join(`
3
3
  `),n=t.graphContext.edges.slice(0,5).map(e=>` - ${e.fromId} —[${e.type}]→ ${e.toId}`).join(`
4
4
  `),r=[`- **Graph Context** (${A} hop${A>1?`s`:``}):`];e&&r.push(` Entities:\n${e}`),n&&r.push(` Relationships:\n${n}`),V.set(t.recordId,r.join(`
5
5
  `))}}catch(e){S.warn(`Graph augmentation failed`,g(e)),H=`> **Note:** Graph augmentation failed. Results shown without graph context.`}let U=Date.now();for(let e of F)if(e.record.origin===`curated`&&e.record.indexedAt){let t=U-new Date(e.record.indexedAt).getTime(),n=Math.max(0,t/864e5);e.score*=.95**n}F.sort((e,t)=>t.score-e.score),F=_(F);let W=F.map((e,t)=>{let n=e.record;return`${`### Result ${t+1} (score: ${e.score.toFixed(3)})`}\n${[`- **Source**: ${n.sourcePath}`,n.headingPath?`- **Section**: ${n.headingPath}`:null,`- **Type**: ${n.contentType}`,n.startLine?`- **Lines**: ${n.startLine}-${n.endLine}`:null,n.origin===`indexed`?null:`- **Origin**: ${n.origin}`,n.category?`- **Category**: ${n.category}`:null,n.tags?.length?`- **Tags**: ${n.tags.join(`, `)}`:null,V?.get(n.id)??null].filter(Boolean).join(`
@@ -1,3 +1,3 @@
1
1
  import{getGcStatus as e}from"../auto-gc.js";import{getToolTelemetry as t}from"../replay-interceptor.js";import{getToolMeta as n}from"../tool-metadata.js";import{StatusOutputSchema as r}from"../output-schemas.js";import{autoUpgradeScaffold as i,getCurrentVersion as a,getUpgradeState as o}from"../version-check.js";import{existsSync as s,readFileSync as c,statSync as l}from"node:fs";import{resolve as u}from"node:path";import{AIKIT_PATHS as d,createLogger as f,serializeError as p}from"../../../core/dist/index.js";import{WasmRuntime as m}from"../../../chunker/dist/index.js";import{homedir as h}from"node:os";const g=f(`tools`);function _(e,t,n,r=15e3){let i,a=new Promise(e=>{i=setTimeout(()=>{g.warn(`Status sub-operation "${n}" timed out after ${r}ms`),e({value:t,timedOut:!0})},r)});return Promise.race([e.then(e=>(clearTimeout(i),{value:e,timedOut:!1}),e=>(clearTimeout(i),g.warn(`Status sub-operation "${n}" failed: ${e instanceof Error?e.message:String(e)}`),{value:t,timedOut:!1})),a])}const v=5*6e4;let y=null,b=null;function x(){let e=Date.now();if(y&&e-y.ts<v)return y.value;try{let t=u(h(),`.copilot`,`.aikit-scaffold.json`);if(!s(t))return y={value:null,ts:e},null;let n=JSON.parse(c(t,`utf-8`)).version??null;return y={value:n,ts:e},n}catch{return y={value:null,ts:e},null}}function S(){let e=Date.now();if(b&&e-b.ts<v)return b.value;try{let t=u(process.cwd(),`.github`,`.aikit-scaffold.json`);if(!s(t))return b={value:null,ts:e},null;let n=JSON.parse(c(t,`utf-8`)).version??null;return b={value:n,ts:e},n}catch{return b={value:null,ts:e},null}}function C(e){let t=n(`status`);e.registerTool(`status`,{title:t.title,description:`Get the current status and statistics of the AI Kit index.`,outputSchema:r,annotations:t.annotations},async()=>{let e=a(),t=x(),n=S(),r=t!=null&&t!==e,s=n!=null&&n!==e,c=[`## AI Kit Status`,``,`⏳ **AI Kit is initializing** — index stats will be available shortly.`,``,`### Runtime`,`- **Tree-sitter (WASM)**: ${m.get()?`✅ Available (AST analysis)`:`⚠ Unavailable (regex fallback)`}`,``,`### Version`,`- **Server**: ${e}`,`- **Scaffold (user)**: ${t??`not installed`}`,`- **Scaffold (workspace)**: ${n??`not installed`}`];if(r||s){let a=o(),l=[];r&&l.push(`user scaffold v${t}`),s&&l.push(`workspace scaffold v${n}`);let u=l.join(`, `);a.state===`success`?c.push(``,`### ✅ Upgrade Applied`,`- Server v${e} — ${u} auto-upgraded successfully.`,`- _Restart the MCP server to use the updated version._`):a.state===`pending`?c.push(``,`### ⏳ Upgrade In Progress`,`- Server v${e} ≠ ${u}`,`- Auto-upgrade is running in the background…`):a.state===`failed`?(i(),c.push(``,`### ⬆ Upgrade Available (auto-upgrade failed, retrying)`,`- Server v${e} ≠ ${u}`,`- Error: ${a.error??`unknown`}`)):(i(),c.push(``,`### ⬆ Upgrade Available`,`- Server v${e} ≠ ${u}`,`- Auto-upgrade triggered — check again shortly.`))}let l={totalRecords:0,totalFiles:0,lastIndexedAt:null,onboarded:!1,onboardDir:``,contentTypes:{},wasmAvailable:!!m.get(),graphStats:null,curatedCount:0,serverVersion:e,scaffoldVersion:t??null,workspaceScaffoldVersion:n??null,upgradeAvailable:r||s};return{content:[{type:`text`,text:c.join(`
2
- `)}],structuredContent:l}})}function w(c,f,h,v,y,b,C,w){let T=n(`status`);c.registerTool(`status`,{title:T.title,description:`Get the current status and statistics of the AI Kit index.`,outputSchema:r,annotations:T.annotations},async()=>{let n=[];try{let[r,c]=await Promise.all([_(f.getStats(),{totalRecords:0,totalFiles:0,lastIndexedAt:null,contentTypeBreakdown:{}},`store.getStats`),_(f.listSourcePaths(),[],`store.listSourcePaths`)]),p=r.value;r.timedOut&&n.push(`⚠ Index stats timed out — values may be incomplete`);let g=c.value;c.timedOut&&n.push(`⚠ File listing timed out`);let T=null,E=0,D=[`## AI Kit Status`,``,`- **Total Records**: ${p.totalRecords}`,`- **Total Files**: ${p.totalFiles}`,`- **Last Indexed**: ${p.lastIndexedAt??`Never`}`,``,`### Content Types`,...Object.entries(p.contentTypeBreakdown).map(([e,t])=>`- ${e}: ${t}`),``,`### Indexed Files`,...g.slice(0,50).map(e=>`- ${e}`),g.length>50?`\n... and ${g.length-50} more files`:``];if(h)try{let e=await _(h.getStats(),{nodeCount:0,edgeCount:0,nodeTypes:{},edgeTypes:{}},`graphStore.getStats`);if(e.timedOut)n.push(`⚠ Graph stats timed out`),D.push(``,`### Knowledge Graph`,`- Graph stats timed out`);else{let t=e.value;T={nodes:t.nodeCount,edges:t.edgeCount},D.push(``,`### Knowledge Graph`,`- **Nodes**: ${t.nodeCount}`,`- **Edges**: ${t.edgeCount}`,...Object.entries(t.nodeTypes).map(([e,t])=>` - ${e}: ${t}`));try{let e=await _(h.validate(),{valid:!0,danglingEdges:[],orphanNodes:[],stats:{nodeCount:0,edgeCount:0,nodeTypes:{},edgeTypes:{}}},`graphStore.validate`);if(!e.timedOut){let t=e.value;t.valid||D.push(`- **⚠ Integrity Issues**: ${t.danglingEdges.length} dangling edges`),t.orphanNodes.length>0&&D.push(`- **Orphan nodes**: ${t.orphanNodes.length}`)}}catch{}}}catch{D.push(``,`### Knowledge Graph`,`- Graph store unavailable`)}let O=b?.onboardDir??u(process.cwd(),d.aiKb),k=s(O),A=y?.onboardComplete??k;if(D.push(``,`### Onboard Status`,A?`- ✅ Complete${y?.onboardTimestamp?` (last: ${y.onboardTimestamp})`:``}`:'- ❌ Not run — call `onboard({ path: "." })` to analyze the codebase',`- **Onboard Directory**: \`${O}\``),v)try{let e=await _(v.list(),[],`curated.list`);if(e.timedOut)n.push(`⚠ Curated knowledge listing timed out`),D.push(``,`### Curated Knowledge`,`- Listing timed out`);else{let t=e.value;E=t.length,D.push(``,`### Curated Knowledge`,t.length>0?`- ${t.length} entries`:"- Empty — use `remember()` to persist decisions")}}catch{D.push(``,`### Curated Knowledge`,`- Unable to read curated entries`)}let j=0;if(p.lastIndexedAt){j=new Date(p.lastIndexedAt).getTime();let e=(Date.now()-j)/(1e3*60*60);D.push(``,`### Index Freshness`,e>24?C===`smart`?`- ⚠ Last indexed ${Math.floor(e)}h ago — smart indexing will refresh automatically`:`- ⚠ Last indexed ${Math.floor(e)}h ago — may be stale. Run \`reindex({})\``:`- ✅ Last indexed ${e<1?`less than 1h`:`${Math.floor(e)}h`} ago`)}if(C===`smart`)if(D.push(``,`### Smart Indexing`),w){let e=w();D.push(`- **Mode**: Smart (trickle)`,`- **Status**: ${e.running?`✅ Running`:`⏸ Stopped`}`,`- **Queue**: ${e.queueSize} files pending`,`- **Changed files**: ${e.changedFilesSize} detected`,`- **Interval**: ${Math.round(e.intervalMs/1e3)}s per batch of ${e.batchSize}`)}else D.push(`- **Mode**: Smart (trickle) — scheduler state unavailable`);{try{let e=u(process.cwd(),d.data,`stash`);if(s(e)){let t=l(e).mtimeMs;t>j&&(j=t)}}catch{}let e=[];if(v)try{let t=E>0?await v.list():[];for(let e of t){let t=new Date(e.updated||e.created).getTime();t>j&&(j=t)}e.push(...t.sort((e,t)=>new Date(t.updated).getTime()-new Date(e.updated).getTime()).slice(0,5))}catch{}let t=j>0?Date.now()-j:0;if(t>=144e5){let n=Math.floor(t/36e5);if(D.push(``,`### 🌅 Session Briefing`,`_${n}+ hours since last activity — here's what to pick up:_`,``),e.length>0){D.push(`**Recent decisions/notes:**`);for(let t of e)D.push(`- **${t.title}** (${t.category??`note`}) — ${(t.contentPreview??``).slice(0,80)}…`)}D.push(``,`**Suggested next steps:**`,'- `search({ query: "SESSION CHECKPOINT", origin: "curated" })` — find your last checkpoint',"- `restore({})` — resume from a saved checkpoint","- `list()` — browse all stored knowledge")}}D.push(``,`### Runtime`,`- **Tree-sitter (WASM)**: ${m.get()?`✅ Available (AST analysis)`:`⚠ Unavailable (regex fallback)`}`);let M=x(),N=S(),P=a(),F=M!=null&&M!==P,I=N!=null&&N!==P;if(F||I){let e=o(),t=[];F&&t.push(`user scaffold v${M}`),I&&t.push(`workspace scaffold v${N}`);let n=t.join(`, `);e.state===`success`?D.push(``,`### ✅ Upgrade Applied`,`- Server v${P} — ${n} auto-upgraded successfully.`,`- _Restart the MCP server to use the updated version._`):e.state===`pending`?D.push(``,`### ⏳ Upgrade In Progress`,`- Server v${P} ≠ ${n}`,`- Auto-upgrade is running in the background…`):e.state===`failed`?(i(),D.push(``,`### ⬆ Upgrade Available (auto-upgrade failed, retrying)`,`- Server v${P} ≠ ${n}`,`- Error: ${e.error??`unknown`}`)):(i(),D.push(``,`### ⬆ Upgrade Available`,`- Server v${P} ≠ ${n}`,`- Auto-upgrade triggered — check again shortly.`))}n.length>0&&D.push(``,`### ⚠ Warnings`,...n.map(e=>`- ${e}`));let L=t();if(L.length>0){let e=L.sort((e,t)=>t.callCount-e.callCount);D.push(``,`### Tool Usage This Session`,``),D.push(`| Tool | Calls | Tokens In | Tokens Out | Errors | Avg Latency |`),D.push(`|------|-------|-----------|------------|--------|-------------|`);for(let t of e.slice(0,15)){let e=Math.round(t.totalInputChars/4),n=Math.round(t.totalOutputChars/4),r=Math.round(t.totalDurationMs/t.callCount);D.push(`| ${t.tool} | ${t.callCount} | ${e.toLocaleString()} | ${n.toLocaleString()} | ${t.errorCount} | ${r}ms |`)}}let R=e();if(R.bufferSize>=10){let e=R.state===`healthy`?`🟢`:R.state===`degraded`?`🔴`:`🟡`;D.push(``,`### Auto-GC: ${e} ${R.state}`),D.push(`- p95 latency: ${R.p95}ms | buffer: ${R.bufferSize} samples`),R.gcCount>0&&D.push(`- GC cycles triggered: ${R.gcCount}`)}let z=D.join(`
2
+ `)}],structuredContent:l}})}function w(c,f,h,v,y,b,C,w){let T=n(`status`);c.registerTool(`status`,{title:T.title,description:`Get the current status and statistics of the AI Kit index.`,outputSchema:r,annotations:T.annotations},async()=>{let n=[];try{let[r,c]=await Promise.all([_(f.getStats(),{totalRecords:0,totalFiles:0,lastIndexedAt:null,contentTypeBreakdown:{}},`store.getStats`),_(f.listSourcePaths(),[],`store.listSourcePaths`)]),p=r.value;r.timedOut&&n.push(`⚠ Index stats timed out — values may be incomplete`);let g=c.value;c.timedOut&&n.push(`⚠ File listing timed out`);let T=null,E=0,D=[`## AI Kit Status`,``,`- **Total Records**: ${p.totalRecords}`,`- **Total Files**: ${p.totalFiles}`,`- **Last Indexed**: ${p.lastIndexedAt??`Never`}`,``,`### Content Types`,...Object.entries(p.contentTypeBreakdown).map(([e,t])=>`- ${e}: ${t}`),``,`### Indexed Files`,...g.slice(0,50).map(e=>`- ${e}`),g.length>50?`\n... and ${g.length-50} more files`:``];if(h)try{let e=await _(h.getStats(),{nodeCount:0,edgeCount:0,nodeTypes:{},edgeTypes:{}},`graphStore.getStats`);if(e.timedOut)n.push(`⚠ Graph stats timed out`),D.push(``,`### Knowledge Graph`,`- Graph stats timed out`);else{let t=e.value;T={nodes:t.nodeCount,edges:t.edgeCount},D.push(``,`### Knowledge Graph`,`- **Nodes**: ${t.nodeCount}`,`- **Edges**: ${t.edgeCount}`,...Object.entries(t.nodeTypes).map(([e,t])=>` - ${e}: ${t}`));try{let e=await _(h.validate(),{valid:!0,danglingEdges:[],orphanNodes:[],stats:{nodeCount:0,edgeCount:0,nodeTypes:{},edgeTypes:{}}},`graphStore.validate`);if(!e.timedOut){let t=e.value;t.valid||D.push(`- **⚠ Integrity Issues**: ${t.danglingEdges.length} dangling edges`),t.orphanNodes.length>0&&D.push(`- **Orphan nodes**: ${t.orphanNodes.length}`)}}catch{}}}catch{D.push(``,`### Knowledge Graph`,`- Graph store unavailable`)}let O=b?.onboardDir??u(process.cwd(),d.aiKb),k=s(O),A=y?.onboardComplete??k;if(D.push(``,`### Onboard Status`,A?`- ✅ Complete${y?.onboardTimestamp?` (last: ${y.onboardTimestamp})`:``}`:'- ❌ Not run — call `onboard({ path: "." })` to analyze the codebase',`- **Onboard Directory**: \`${O}\``),v)try{let e=await _(v.list(),[],`curated.list`);if(e.timedOut)n.push(`⚠ Curated knowledge listing timed out`),D.push(``,`### Curated Knowledge`,`- Listing timed out`);else{let t=e.value;E=t.length,D.push(``,`### Curated Knowledge`,t.length>0?`- ${t.length} entries`:"- Empty — use `remember()` to persist decisions")}}catch{D.push(``,`### Curated Knowledge`,`- Unable to read curated entries`)}let j=0;if(p.lastIndexedAt){j=new Date(p.lastIndexedAt).getTime();let e=(Date.now()-j)/(1e3*60*60);D.push(``,`### Index Freshness`,e>24?C===`smart`?`- ⚠ Last indexed ${Math.floor(e)}h ago — smart indexing will refresh automatically`:`- ⚠ Last indexed ${Math.floor(e)}h ago — may be stale. Run \`reindex({})\``:`- ✅ Last indexed ${e<1?`less than 1h`:`${Math.floor(e)}h`} ago`)}if(C===`smart`)if(D.push(``,`### Smart Indexing`),w){let e=w();e?D.push(`- **Mode**: Smart (trickle)`,`- **Status**: ${e.running?`✅ Running`:`⏸ Stopped`}`,`- **Queue**: ${e.queueSize} files pending`,`- **Changed files**: ${e.changedFilesSize} detected`,`- **Interval**: ${Math.round(e.intervalMs/1e3)}s per batch of ${e.batchSize}`):D.push(`- **Mode**: Smart (trickle)`,`- **Status**: scheduler state unavailable (init may have failed)`)}else D.push(`- **Mode**: Smart (trickle) — scheduler state unavailable`);{try{let e=u(process.cwd(),d.data,`stash`);if(s(e)){let t=l(e).mtimeMs;t>j&&(j=t)}}catch{}let e=[];if(v)try{let t=E>0?await v.list():[];for(let e of t){let t=new Date(e.updated||e.created).getTime();t>j&&(j=t)}e.push(...t.sort((e,t)=>new Date(t.updated).getTime()-new Date(e.updated).getTime()).slice(0,5))}catch{}let t=j>0?Date.now()-j:0;if(t>=144e5){let n=Math.floor(t/36e5);if(D.push(``,`### 🌅 Session Briefing`,`_${n}+ hours since last activity — here's what to pick up:_`,``),e.length>0){D.push(`**Recent decisions/notes:**`);for(let t of e)D.push(`- **${t.title}** (${t.category??`note`}) — ${(t.contentPreview??``).slice(0,80)}…`)}D.push(``,`**Suggested next steps:**`,'- `search({ query: "SESSION CHECKPOINT", origin: "curated" })` — find your last checkpoint',"- `restore({})` — resume from a saved checkpoint","- `list()` — browse all stored knowledge")}}D.push(``,`### Runtime`,`- **Tree-sitter (WASM)**: ${m.get()?`✅ Available (AST analysis)`:`⚠ Unavailable (regex fallback)`}`);let M=x(),N=S(),P=a(),F=M!=null&&M!==P,I=N!=null&&N!==P;if(F||I){let e=o(),t=[];F&&t.push(`user scaffold v${M}`),I&&t.push(`workspace scaffold v${N}`);let n=t.join(`, `);e.state===`success`?D.push(``,`### ✅ Upgrade Applied`,`- Server v${P} — ${n} auto-upgraded successfully.`,`- _Restart the MCP server to use the updated version._`):e.state===`pending`?D.push(``,`### ⏳ Upgrade In Progress`,`- Server v${P} ≠ ${n}`,`- Auto-upgrade is running in the background…`):e.state===`failed`?(i(),D.push(``,`### ⬆ Upgrade Available (auto-upgrade failed, retrying)`,`- Server v${P} ≠ ${n}`,`- Error: ${e.error??`unknown`}`)):(i(),D.push(``,`### ⬆ Upgrade Available`,`- Server v${P} ≠ ${n}`,`- Auto-upgrade triggered — check again shortly.`))}n.length>0&&D.push(``,`### ⚠ Warnings`,...n.map(e=>`- ${e}`));let L=t();if(L.length>0){let e=L.sort((e,t)=>t.callCount-e.callCount);D.push(``,`### Tool Usage This Session`,``),D.push(`| Tool | Calls | Tokens In | Tokens Out | Errors | Avg Latency |`),D.push(`|------|-------|-----------|------------|--------|-------------|`);for(let t of e.slice(0,15)){let e=Math.round(t.totalInputChars/4),n=Math.round(t.totalOutputChars/4),r=Math.round(t.totalDurationMs/t.callCount);D.push(`| ${t.tool} | ${t.callCount} | ${e.toLocaleString()} | ${n.toLocaleString()} | ${t.errorCount} | ${r}ms |`)}}let R=e();if(R.bufferSize>=10){let e=R.state===`healthy`?`🟢`:R.state===`degraded`?`🔴`:`🟡`;D.push(``,`### Auto-GC: ${e} ${R.state}`),D.push(`- p95 latency: ${R.p95}ms | buffer: ${R.bufferSize} samples`),R.gcCount>0&&D.push(`- GC cycles triggered: ${R.gcCount}`)}let z=D.join(`
3
3
  `),B={totalRecords:p.totalRecords,totalFiles:p.totalFiles,lastIndexedAt:p.lastIndexedAt??null,onboarded:A,onboardDir:O,contentTypes:p.contentTypeBreakdown,wasmAvailable:!!m.get(),graphStats:T,curatedCount:E,serverVersion:P,scaffoldVersion:M??null,workspaceScaffoldVersion:N??null,upgradeAvailable:F||I};return{content:[{type:`text`,text:z+(C===`smart`?"\n\n---\n_Next: Use `search` to query indexed content or `graph(stats)` to explore the knowledge graph. Smart indexing handles updates automatically._":"\n\n---\n_Next: Use `search` to query indexed content, `graph(stats)` to explore the knowledge graph, or `reindex` to refresh the index._")}],structuredContent:B}}catch(e){return g.error(`Status failed`,p(e)),{content:[{type:`text`,text:`Status check failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}export{x as getScaffoldVersion,S as getWorkspaceScaffoldVersion,C as registerEarlyStatusTool,w as registerStatusTool};