@vpxa/aikit 0.1.9 → 0.1.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/packages/indexer/dist/smart-index-scheduler.js +1 -1
- package/packages/server/dist/compression-interceptor.d.ts +12 -0
- package/packages/server/dist/compression-interceptor.js +1 -0
- package/packages/server/dist/server.js +2 -2
- package/packages/server/dist/tools/analyze.tools.js +3 -3
- package/packages/server/dist/tools/flow.tools.js +1 -1
- package/packages/server/dist/tools/onboard.tool.js +1 -1
- package/packages/server/dist/tools/present/templates.js +6 -6
- package/packages/server/dist/tools/present/tool.js +1 -1
- package/packages/server/dist/tools/search.tool.js +2 -2
- package/packages/store/dist/lance-store.js +1 -1
- package/packages/tools/dist/compact.d.ts +6 -0
- package/packages/tools/dist/compact.js +2 -2
- package/packages/tools/dist/compress-output.d.ts +38 -0
- package/packages/tools/dist/compress-output.js +1 -0
- package/packages/tools/dist/compression/delta-cache.d.ts +29 -0
- package/packages/tools/dist/compression/delta-cache.js +1 -0
- package/packages/tools/dist/compression/engine.d.ts +34 -0
- package/packages/tools/dist/compression/engine.js +4 -0
- package/packages/tools/dist/compression/litm.d.ts +20 -0
- package/packages/tools/dist/compression/litm.js +1 -0
- package/packages/tools/dist/compression/mmr.d.ts +24 -0
- package/packages/tools/dist/compression/mmr.js +1 -0
- package/packages/tools/dist/compression/rules/docker.d.ts +11 -0
- package/packages/tools/dist/compression/rules/docker.js +3 -0
- package/packages/tools/dist/compression/rules/generic.d.ts +11 -0
- package/packages/tools/dist/compression/rules/generic.js +3 -0
- package/packages/tools/dist/compression/rules/git.d.ts +11 -0
- package/packages/tools/dist/compression/rules/git.js +3 -0
- package/packages/tools/dist/compression/rules/index.d.ts +14 -0
- package/packages/tools/dist/compression/rules/index.js +1 -0
- package/packages/tools/dist/compression/rules/kubectl.d.ts +11 -0
- package/packages/tools/dist/compression/rules/kubectl.js +3 -0
- package/packages/tools/dist/compression/rules/lint.d.ts +11 -0
- package/packages/tools/dist/compression/rules/lint.js +3 -0
- package/packages/tools/dist/compression/rules/npm.d.ts +11 -0
- package/packages/tools/dist/compression/rules/npm.js +3 -0
- package/packages/tools/dist/compression/rules/test-runner.d.ts +11 -0
- package/packages/tools/dist/compression/rules/test-runner.js +3 -0
- package/packages/tools/dist/compression/scoring.d.ts +26 -0
- package/packages/tools/dist/compression/scoring.js +1 -0
- package/packages/tools/dist/compression/types.d.ts +61 -0
- package/packages/tools/dist/compression/types.js +1 -0
- package/packages/tools/dist/digest.d.ts +5 -0
- package/packages/tools/dist/digest.js +4 -4
- package/packages/tools/dist/index.d.ts +5 -1
- package/packages/tools/dist/index.js +1 -1
- package/packages/tools/dist/parse-output.d.ts +3 -1
- package/packages/tools/dist/parse-output.js +1 -1
- package/packages/tools/dist/truncation.d.ts +34 -1
- package/packages/tools/dist/truncation.js +10 -6
- package/packages/tui/dist/{App-DpjN3iS-.js → App-D8fednPY.js} +1 -1
- package/packages/tui/dist/App.js +1 -1
- package/packages/tui/dist/{CuratedPanel-BIamXLNy.js → CuratedPanel-BwkPKdX4.js} +1 -1
- package/packages/tui/dist/LogPanel-C6KagE0H.js +3320 -0
- package/packages/tui/dist/{SearchPanel-CpJGczAc.js → SearchPanel-BbvCaco0.js} +1 -1
- package/packages/tui/dist/{StatusPanel-BAbUxyqQ.js → StatusPanel-BFshFJU9.js} +1 -1
- package/packages/tui/dist/chunk-BSngPjfM.js +2 -0
- package/packages/tui/dist/{devtools-DMOZMn70.js → devtools-owCrRz12.js} +2 -2
- package/packages/tui/dist/hooks/useKBClient.js +1 -1
- package/packages/tui/dist/hooks/usePolling.js +1 -1
- package/packages/tui/dist/index.js +1 -1
- package/packages/tui/dist/{jsx-runtime-y6Gdq5PZ.js → jsx-runtime-B_2ewjsM.js} +7 -7
- package/packages/tui/dist/panels/CuratedPanel.js +1 -1
- package/packages/tui/dist/panels/LogPanel.js +1 -1
- package/packages/tui/dist/panels/SearchPanel.js +1 -1
- package/packages/tui/dist/panels/StatusPanel.js +1 -1
- package/packages/tui/dist/{react-D__J1GQe.js → react-sYq8CyFO.js} +1 -1
- package/packages/tui/dist/{useKBClient-C35iA4uG.js → useKBClient-BkBffCdd.js} +1 -1
- package/packages/tui/dist/{usePolling-BbjnRWgx.js → usePolling-D3y7u5cZ.js} +1 -1
- package/scaffold/adapters/copilot.mjs +4 -1
- package/scaffold/definitions/agents.mjs +32 -8
- package/scaffold/definitions/bodies.mjs +6 -2
- package/scaffold/definitions/protocols.mjs +12 -0
- package/scaffold/general/agents/Code-Reviewer-Alpha.agent.md +7 -0
- package/scaffold/general/agents/Code-Reviewer-Beta.agent.md +7 -0
- package/scaffold/general/agents/Debugger.agent.md +7 -0
- package/scaffold/general/agents/Documenter.agent.md +1 -0
- package/scaffold/general/agents/Explorer.agent.md +6 -0
- package/scaffold/general/agents/Frontend.agent.md +9 -0
- package/scaffold/general/agents/Implementer.agent.md +7 -0
- package/scaffold/general/agents/Orchestrator.agent.md +35 -51
- package/scaffold/general/agents/README.md +3 -3
- package/scaffold/general/agents/Refactor.agent.md +1 -0
- package/scaffold/general/agents/Researcher-Beta.agent.md +2 -2
- package/scaffold/general/agents/Researcher-Delta.agent.md +2 -2
- package/scaffold/general/agents/Researcher-Gamma.agent.md +2 -2
- package/scaffold/general/agents/Security.agent.md +7 -0
- package/scaffold/general/agents/_shared/code-agent-base.md +38 -18
- package/scaffold/general/skills/aikit/SKILL.md +73 -1
- package/packages/tui/dist/LogPanel-Db-SeZhR.js +0 -3
- package/packages/tui/dist/chunk-D6axbAb-.js +0 -2
package/package.json
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
import{statSync as e}from"node:fs";import{createLogger as t}from"../../core/dist/index.js";import{availableParallelism as n,loadavg as r}from"node:os";const i=t(`smart-index`),a=1.5;var o=class{trickleTimer=null;trickleIntervalMs;batchSize;priorityQueue=[];changedFiles=[];lastRefreshTime=0;refreshing=!1;constructor(e,t){this.indexer=e,this.config=t,this.trickleIntervalMs=this.readPositiveIntEnv(`AIKIT_SMART_TRICKLE_MS`,12e4),this.batchSize=this.readPositiveIntEnv(`AIKIT_SMART_BATCH_SIZE`,1)}start(){this.stop(),i.info(`Smart index scheduler started (trickle mode)`,{intervalMs:this.trickleIntervalMs,batchSize:this.batchSize}),this.scheduleTick()}stop(){this.trickleTimer&&=(clearTimeout(this.trickleTimer),null)}prioritize(...t){let n=[...new Set(t.filter(Boolean))].filter(t=>{try{return!e(t).isDirectory()}catch{return
|
|
1
|
+
import{statSync as e}from"node:fs";import{createLogger as t}from"../../core/dist/index.js";import{availableParallelism as n,loadavg as r}from"node:os";const i=t(`smart-index`),a=1.5;var o=class{trickleTimer=null;trickleIntervalMs;batchSize;priorityQueue=[];changedFiles=[];lastRefreshTime=0;refreshing=!1;constructor(e,t){this.indexer=e,this.config=t,this.trickleIntervalMs=this.readPositiveIntEnv(`AIKIT_SMART_TRICKLE_MS`,12e4),this.batchSize=this.readPositiveIntEnv(`AIKIT_SMART_BATCH_SIZE`,1)}start(){this.stop(),i.info(`Smart index scheduler started (trickle mode)`,{intervalMs:this.trickleIntervalMs,batchSize:this.batchSize}),this.scheduleTick()}stop(){this.trickleTimer&&=(clearTimeout(this.trickleTimer),null)}prioritize(...t){let n=[...new Set(t.filter(Boolean))].filter(t=>{try{return!e(t).isDirectory()}catch{return console.debug(`smart-index: skipping non-existent path: ${t}`),!1}});for(let e of n){let t=this.priorityQueue.indexOf(e);t>=0&&this.priorityQueue.splice(t,1)}for(let e of n.reverse())this.priorityQueue.unshift(e);this.priorityQueue.length>500&&(this.priorityQueue.length=500),n.length>0&&i.info(`Files prioritized for trickle indexing`,{added:n.length,queued:this.priorityQueue.length})}getState(){return{mode:`smart`,queueSize:this.priorityQueue.length,changedFilesSize:this.changedFiles.length,intervalMs:this.trickleIntervalMs,batchSize:this.batchSize,running:this.trickleTimer!==null}}readPositiveIntEnv(e,t){let n=Number(process.env[e]);return Number.isFinite(n)&&n>0?n:t}scheduleTick(){this.trickleTimer=setTimeout(()=>void this.tick(),this.trickleIntervalMs),this.trickleTimer.unref&&this.trickleTimer.unref()}async tick(){try{if(this.indexer.isIndexing){i.info(`Skipping trickle tick — indexing already in progress`);return}let e=this.getCpuCount(),t=r()[0];if(e>0&&t/e>a){i.info(`Skipping trickle tick — system load too high`,{load:t.toFixed(2),cpuCount:e,threshold:a});return}let n=await this.pickFiles();if(n.length===0){await this.maybeRefreshChangedFiles();return}i.info(`Trickle indexing tick started`,{count:n.length,files:n});let o=await this.indexer.indexFiles(this.config,n);this.changedFiles=this.changedFiles.filter(e=>!n.includes(e)),i.info(`Trickle indexing tick complete`,{filesProcessed:o.filesProcessed,filesSkipped:o.filesSkipped,chunksCreated:o.chunksCreated})}catch(e){i.error(`Trickle indexing tick failed`,{error:String(e)})}finally{this.scheduleTick()}}getCpuCount(){try{return typeof n==`function`?n():4}catch{return 4}}async pickFiles(){let e=[];for(;e.length<this.batchSize&&this.priorityQueue.length>0;){let t=this.priorityQueue.shift();t&&!e.includes(t)&&e.push(t)}if(e.length<this.batchSize)for(await this.maybeRefreshChangedFiles();e.length<this.batchSize&&this.changedFiles.length>0;){let t=this.changedFiles.shift();t&&!e.includes(t)&&e.push(t)}return e}async maybeRefreshChangedFiles(){let e=Date.now();if(!(this.refreshing||this.changedFiles.length>0&&e-this.lastRefreshTime<6e5)){this.refreshing=!0;try{this.changedFiles=await this.indexer.getChangedFiles(this.config),this.lastRefreshTime=e,this.changedFiles.length>0&&i.info(`Refreshed changed files for trickle indexing`,{count:this.changedFiles.length})}catch(e){i.error(`Failed to refresh changed files for trickle indexing`,{error:String(e)})}finally{this.refreshing=!1}}}};export{o as SmartIndexScheduler};
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
2
|
+
|
|
3
|
+
//#region packages/server/src/compression-interceptor.d.ts
|
|
4
|
+
interface CompressionInterceptorOptions {
|
|
5
|
+
/** Minimum character count before compression kicks in (default: 4000) */
|
|
6
|
+
threshold?: number;
|
|
7
|
+
/** Maximum token budget for compressed output (default: 2000) */
|
|
8
|
+
tokenBudget?: number;
|
|
9
|
+
}
|
|
10
|
+
declare function installCompressionInterceptor(server: McpServer, options?: CompressionInterceptorOptions): void;
|
|
11
|
+
//#endregion
|
|
12
|
+
export { CompressionInterceptorOptions, installCompressionInterceptor };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{compressTerminalOutput as e}from"../../tools/dist/index.js";const t=new Set([`compact`,`digest`,`file_summary`,`stratum_card`,`present`,`web_fetch`,`web_search`]);function n(n,r){let i=r?.threshold??4e3,a=r?.tokenBudget??2e3,o=n.registerTool.bind(n);n.registerTool=(n,r,s)=>t.has(n)?o(n,r,s):o(n,r,async(t,r)=>{let o=await s(t,r);if(!Array.isArray(o.content))return o;let c=0;for(let e of o.content)e.type===`text`&&typeof e.text==`string`&&(c+=e.text.length);if(c<i)return o;let l=o.content.map(t=>{if(t.type!==`text`||typeof t.text!=`string`||t.text.length<i)return t;let r=e({text:t.text,tool:n,tokenBudget:a});return{...t,text:r.text}});return{...o,content:l}})}export{n as installCompressionInterceptor};
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import{BackgroundTaskScheduler as e}from"./background-task.js";import{clearCompletionCache as t}from"./completions.js";import{CuratedKnowledgeManager as n}from"./curated-manager.js";import{createElicitor as r,noopElicitor as i}from"./elicitor.js";import{IdleTimer as a}from"./idle-timer.js";import{bridgeMcpLogging as o}from"./mcp-logging.js";import{MemoryMonitor as s}from"./memory-monitor.js";import{registerPrompts as c}from"./prompts.js";import{installReplayInterceptor as l}from"./replay-interceptor.js";import{ResourceNotifier as u}from"./resources/resource-notifier.js";import{registerResources as d}from"./resources/resources.js";import{createSamplingClient as f}from"./sampling.js";import{installStructuredContentGuard as p}from"./structured-content-guard.js";import{getToolMeta as m}from"./tool-metadata.js";import{installToolPrefix as h}from"./tool-prefix.js";import{ToolTimeoutError as g,getToolTimeout as _,withTimeout as v}from"./tool-timeout.js";import{registerAnalyzeDependenciesTool as y,registerAnalyzeDiagramTool as b,registerAnalyzeEntryPointsTool as x,registerAnalyzePatternsTool as S,registerAnalyzeStructureTool as ee,registerAnalyzeSymbolsTool as C,registerBlastRadiusTool as w}from"./tools/analyze.tools.js";import{registerAuditTool as T}from"./tools/audit.tool.js";import{registerBrainstormTool as E}from"./tools/brainstorm.tool.js";import{initBridgeComponents as te,registerErPullTool as D,registerErPushTool as O,registerErSyncStatusTool as k}from"./tools/bridge.tools.js";import{registerConfigTool as ne}from"./tools/config.tool.js";import{registerCompactTool as A,registerDeadSymbolsTool as j,registerFileSummaryTool as M,registerFindTool as N,registerScopeMapTool as re,registerSymbolTool as P,registerTraceTool as F}from"./tools/context.tools.js";import{registerErEvolveReviewTool as ie}from"./tools/evolution.tools.js";import{registerBatchTool as ae,registerCheckTool as oe,registerDelegateTool as se,registerEvalTool as ce,registerParseOutputTool as le,registerTestRunTool as I}from"./tools/execution.tools.js";import{registerFlowTools as ue}from"./tools/flow.tools.js";import{registerDigestTool as de,registerEvidenceMapTool as L,registerForgeClassifyTool as R,registerForgeGroundTool as fe,registerStratumCardTool as pe}from"./tools/forge.tools.js";import{registerForgetTool as me}from"./tools/forget.tool.js";import{registerGraphTool as he}from"./tools/graph.tool.js";import{registerGuideTool as z,registerHealthTool as B,registerProcessTool as V,registerWatchTool as H,registerWebFetchTool as U}from"./tools/infra.tools.js";import{registerListTool as ge}from"./tools/list.tool.js";import{registerLookupTool as _e}from"./tools/lookup.tool.js";import{registerCodemodTool as W,registerDataTransformTool as G,registerDiffParseTool as K,registerGitContextTool as q,registerRenameTool as ve}from"./tools/manipulation.tools.js";import{registerOnboardTool as ye}from"./tools/onboard.tool.js";import{registerCheckpointTool as be,registerLaneTool as xe,registerQueueTool as Se,registerStashTool as Ce,registerWorksetTool as we}from"./tools/persistence.tools.js";import{registerErUpdatePolicyTool as Te}from"./tools/policy.tools.js";import{registerPresentTool as Ee}from"./tools/present/tool.js";import"./tools/present/index.js";import{registerProduceKnowledgeTool as De}from"./tools/produce.tool.js";import{registerReadTool as Oe}from"./tools/read.tool.js";import{registerReindexTool as ke}from"./tools/reindex.tool.js";import{registerRememberTool as Ae}from"./tools/remember.tool.js";import{registerReplayTool as je}from"./tools/replay.tool.js";import{registerRestoreTool as Me}from"./tools/restore.tool.js";import{registerSearchTool as Ne}from"./tools/search.tool.js";import{getCurrentVersion as Pe}from"./version-check.js";import{registerEarlyStatusTool as Fe,registerStatusTool as Ie}from"./tools/status.tool.js";import{registerUpdateTool as Le}from"./tools/update.tool.js";import{registerChangelogTool as Re,registerEncodeTool as ze,registerEnvTool as Be,registerHttpTool as Ve,registerMeasureTool as He,registerRegexTestTool as Ue,registerSchemaValidateTool as We,registerSnippetTool as Ge,registerTimeTool as J,registerWebSearchTool as Ke}from"./tools/utility.tools.js";import{existsSync as qe,statSync as Je}from"node:fs";import{resolve as Ye}from"node:path";import{AIKIT_PATHS as Xe,createLogger as Ze,serializeError as Y}from"../../core/dist/index.js";import{initializeWasm as Qe}from"../../chunker/dist/index.js";import{OnnxEmbedder as $e}from"../../embeddings/dist/index.js";import{EvolutionCollector as et,PolicyStore as tt}from"../../enterprise-bridge/dist/index.js";import{FileHashCache as nt,IncrementalIndexer as rt}from"../../indexer/dist/index.js";import{SqliteGraphStore as it,createStore as at}from"../../store/dist/index.js";import{FileCache as ot}from"../../tools/dist/index.js";import{McpServer as st}from"@modelcontextprotocol/sdk/server/mcp.js";const X=Ze(`server`);async function Z(e){X.info(`Initializing AI Kit components`);let[t,r,i,a]=await Promise.all([(async()=>{let t=new $e({model:e.embedding.model,dimensions:e.embedding.dimensions});return await t.initialize(),X.info(`Embedder loaded`,{modelId:t.modelId,dimensions:t.dimensions}),t})(),(async()=>{let t=await at({backend:e.store.backend,path:e.store.path});return await t.initialize(),X.info(`Store initialized`),t})(),(async()=>{let t=new it({path:e.store.path});return await t.initialize(),X.info(`Graph store initialized`),t})(),(async()=>{let e=await Qe();return e?X.info(`WASM tree-sitter enabled for AST analysis`):X.warn(`WASM tree-sitter not available; analyzers will use regex fallback`),e})()]),o=new rt(t,r),s=new nt(e.store.path);s.load(),o.setHashCache(s);let c=e.curated.path,l=new n(c,r,t);o.setGraphStore(i);let u=te(e.er),d=u?new tt(e.curated.path):void 0;d&&X.info(`Policy store initialized`,{ruleCount:d.getRules().length});let f=u?new et:void 0,p=Ye(e.sources[0]?.path??process.cwd(),Xe.aiKb),m=qe(p),h=e.onboardDir?qe(e.onboardDir):!1,g=m||h,_,v=m?p:e.onboardDir;if(g&&v)try{_=Je(v).mtime.toISOString()}catch{}return X.info(`Onboard state detected`,{onboardComplete:g,onboardTimestamp:_,aiKbExists:m,onboardDirExists:h}),{embedder:t,store:r,indexer:o,curated:l,graphStore:i,fileCache:new ot,bridge:u,policyStore:d,evolutionCollector:f,onboardComplete:g,onboardTimestamp:_}}function ct(e,t){let n=new st({name:t.serverName??`aikit`,version:Pe()},{capabilities:{logging:{},completions:{}}});return o(n),h(n,t.toolPrefix??``),Q(n,e,t,r(n),new u(n),f(n)),c(n,{curated:e.curated,store:e.store,graphStore:e.graphStore},t.indexMode),n}function Q(e,t,n,r,i,a,o,s){l(e),p(e),Ne(e,t.embedder,t.store,t.graphStore,t.bridge,t.evolutionCollector,a),_e(e,t.store),Ie(e,t.store,t.graphStore,t.curated,{onboardComplete:t.onboardComplete,onboardTimestamp:t.onboardTimestamp},n,o,s),ne(e,n),ke(e,t.indexer,n,t.curated,t.store,i,o),Ae(e,t.curated,t.policyStore,t.evolutionCollector,i),Le(e,t.curated,i),me(e,t.curated,i),Oe(e,t.curated),ge(e,t.curated),ee(e,t.store,t.embedder),y(e,t.store,t.embedder),C(e,t.store,t.embedder),S(e,t.store,t.embedder),x(e,t.store,t.embedder),b(e,t.store,t.embedder),w(e,t.store,t.embedder,t.graphStore),De(e,n),ye(e,t.store,t.embedder,n),he(e,t.graphStore),T(e,t.store,t.embedder);let c=n.sources[0]?.path??process.cwd();A(e,t.embedder,t.fileCache,c),re(e,t.embedder,t.store),N(e,t.embedder,t.store),le(e),we(e),oe(e),ae(e,t.embedder,t.store),P(e,t.embedder,t.store,t.graphStore),ce(e),I(e),Ce(e),q(e),K(e),ve(e),W(e),Me(e),M(e,t.fileCache,c),be(e),G(e),F(e,t.embedder,t.store,t.graphStore),V(e),H(e),j(e,t.embedder,t.store),se(e,a),B(e),xe(e),Se(e),U(e),z(e,o),L(e),de(e,t.embedder),R(e),pe(e,t.embedder,t.fileCache),fe(e,t.embedder,t.store),Ee(e,r),r&&E(e,r),Ke(e),Ve(e),Ue(e),ze(e),He(e),Re(e),We(e),Ge(e),Be(e),J(e),ue(e,n),t.bridge&&(O(e,t.bridge,t.evolutionCollector),D(e,t.bridge),k(e,t.bridge)),t.policyStore&&Te(e,t.policyStore),t.evolutionCollector&&ie(e,t.evolutionCollector),d(e,t.store,t.curated),je(e)}async function lt(e){let t=await Z(e),n=ct(t,e);X.info(`MCP server configured`,{toolCount:$.length,resourceCount:2});let r=async()=>{try{let n=e.sources.map(e=>e.path).join(`, `);X.info(`Running initial index`,{sourcePaths:n});let r=await t.indexer.index(e,e=>{e.phase===`crawling`||e.phase===`done`||(e.phase===`chunking`&&e.currentFile&&X.debug(`Indexing file`,{current:e.filesProcessed+1,total:e.filesTotal,file:e.currentFile}),e.phase===`cleanup`&&X.debug(`Index cleanup`,{staleEntries:e.filesTotal-e.filesProcessed}))});X.info(`Initial index complete`,{filesProcessed:r.filesProcessed,filesSkipped:r.filesSkipped,chunksCreated:r.chunksCreated,durationMs:r.durationMs});try{await t.store.createFtsIndex()}catch(e){X.warn(`FTS index creation failed`,Y(e))}try{let e=await t.curated.reindexAll();X.info(`Curated re-index complete`,{indexed:e.indexed})}catch(e){X.error(`Curated re-index failed`,Y(e))}}catch(e){X.error(`Initial index failed; will retry on aikit_reindex`,Y(e))}},i=async()=>{X.info(`Shutting down`),await Promise.all([t.embedder.shutdown().catch(()=>{}),t.graphStore.close().catch(()=>{}),t.store.close().catch(()=>{})]),process.exit(0)};process.on(`SIGINT`,i),process.on(`SIGTERM`,i);let a=process.ppid,o=setInterval(()=>{try{process.kill(a,0)}catch{X.info(`Parent process died; shutting down`,{parentPid:a}),clearInterval(o),i()}},5e3);return o.unref(),{server:n,runInitialIndex:r,shutdown:i}}const ut=new Set(`batch.brainstorm.changelog.check.checkpoint.codemod.compact.config.data_transform.delegate.diff_parse.digest.encode.env.eval.evidence_map.file_summary.forge_classify.git_context.graph.guide.health.http.lane.measure.onboard.parse_output.present.process.produce_knowledge.queue.read.regex_test.reindex.remember.rename.replay.restore.schema_validate.scope_map.snippet.stash.status.stratum_card.test_run.time.update.forget.list.watch.web_fetch.web_search.workset`.split(`.`)),dt=5e3,ft=new Set(`brainstorm.changelog.check.checkpoint.codemod.data_transform.delegate.diff_parse.encode.env.eval.evidence_map.forge_classify.git_context.guide.present.health.http.lane.measure.parse_output.process.produce_knowledge.queue.regex_test.rename.replay.restore.schema_validate.snippet.stash.status.test_run.time.watch.web_fetch.web_search.workset`.split(`.`));function pt(e){oe(e),ce(e),I(e),le(e),se(e),q(e),K(e),ve(e),W(e),G(e),we(e),Ce(e),be(e),Me(e),xe(e),Se(e),B(e),V(e),H(e),U(e),z(e),L(e),R(e),Ee(e),E(e,i),De(e),je(e),Fe(e),Ke(e),Ve(e),Ue(e),ze(e),He(e),Re(e),We(e),Ge(e),Be(e),J(e)}const $=`analyze_dependencies.analyze_diagram.analyze_entry_points.analyze_patterns.analyze_structure.analyze_symbols.audit.batch.blast_radius.brainstorm.changelog.check.checkpoint.codemod.compact.config.data_transform.dead_symbols.delegate.diff_parse.digest.encode.env.eval.evidence_map.file_summary.find.flow_info.flow_list.flow_reset.flow_start.flow_status.flow_step.forge_classify.forge_ground.forget.git_context.graph.guide.health.http.lane.list.lookup.measure.onboard.parse_output.present.process.produce_knowledge.queue.read.regex_test.reindex.remember.rename.replay.restore.schema_validate.scope_map.search.snippet.stash.status.stratum_card.symbol.test_run.time.trace.update.watch.web_fetch.web_search.workset`.split(`.`);function mt(n,i){let l=new st({name:n.serverName??`aikit`,version:Pe()},{capabilities:{logging:{},completions:{}}}),d=`initializing`,p=``,y=!1,b=null,x=null,S=null;function ee(e){if(!e||typeof e!=`object`)return[];let t=e,n=[];for(let e of[`path`,`file`,`source_path`,`sourcePath`,`filePath`]){let r=t[e];typeof r==`string`&&r&&n.push(r)}for(let e of[`changed_files`,`paths`,`files`]){let r=t[e];if(Array.isArray(r))for(let e of r){if(typeof e==`string`){n.push(e);continue}e&&typeof e==`object`&&typeof e.path==`string`&&n.push(e.path)}}if(Array.isArray(t.sources))for(let e of t.sources)e&&typeof e==`object`&&typeof e.path==`string`&&n.push(e.path);return n}let C=()=>d===`failed`?[`❌ AI Kit initialization failed — this tool is unavailable.`,``,p?`Error: ${p}`:``,``,`**35 tools are still available** and fully functional:`,`check, eval, test_run, git_context, health, measure, web_fetch, web_search,`,`regex_test, encode, stash, checkpoint, lane, process, time, env, and more.`,``,`Try restarting the MCP server to retry initialization.`].filter(Boolean).join(`
|
|
1
|
+
import{BackgroundTaskScheduler as e}from"./background-task.js";import{clearCompletionCache as t}from"./completions.js";import{installCompressionInterceptor as n}from"./compression-interceptor.js";import{CuratedKnowledgeManager as r}from"./curated-manager.js";import{createElicitor as i,noopElicitor as a}from"./elicitor.js";import{IdleTimer as o}from"./idle-timer.js";import{bridgeMcpLogging as s}from"./mcp-logging.js";import{MemoryMonitor as c}from"./memory-monitor.js";import{registerPrompts as l}from"./prompts.js";import{installReplayInterceptor as u}from"./replay-interceptor.js";import{ResourceNotifier as d}from"./resources/resource-notifier.js";import{registerResources as f}from"./resources/resources.js";import{createSamplingClient as p}from"./sampling.js";import{installStructuredContentGuard as m}from"./structured-content-guard.js";import{getToolMeta as h}from"./tool-metadata.js";import{installToolPrefix as g}from"./tool-prefix.js";import{ToolTimeoutError as _,getToolTimeout as v,withTimeout as ee}from"./tool-timeout.js";import{registerAnalyzeDependenciesTool as y,registerAnalyzeDiagramTool as b,registerAnalyzeEntryPointsTool as x,registerAnalyzePatternsTool as te,registerAnalyzeStructureTool as S,registerAnalyzeSymbolsTool as C,registerBlastRadiusTool as w}from"./tools/analyze.tools.js";import{registerAuditTool as ne}from"./tools/audit.tool.js";import{registerBrainstormTool as T}from"./tools/brainstorm.tool.js";import{initBridgeComponents as E,registerErPullTool as D,registerErPushTool as O,registerErSyncStatusTool as re}from"./tools/bridge.tools.js";import{registerConfigTool as k}from"./tools/config.tool.js";import{registerCompactTool as A,registerDeadSymbolsTool as j,registerFileSummaryTool as M,registerFindTool as ie,registerScopeMapTool as N,registerSymbolTool as P,registerTraceTool as ae}from"./tools/context.tools.js";import{registerErEvolveReviewTool as oe}from"./tools/evolution.tools.js";import{registerBatchTool as se,registerCheckTool as F,registerDelegateTool as I,registerEvalTool as L,registerParseOutputTool as R,registerTestRunTool as z}from"./tools/execution.tools.js";import{registerFlowTools as ce}from"./tools/flow.tools.js";import{registerDigestTool as le,registerEvidenceMapTool as B,registerForgeClassifyTool as V,registerForgeGroundTool as ue,registerStratumCardTool as de}from"./tools/forge.tools.js";import{registerForgetTool as fe}from"./tools/forget.tool.js";import{registerGraphTool as pe}from"./tools/graph.tool.js";import{registerGuideTool as H,registerHealthTool as U,registerProcessTool as W,registerWatchTool as G,registerWebFetchTool as K}from"./tools/infra.tools.js";import{registerListTool as me}from"./tools/list.tool.js";import{registerLookupTool as he}from"./tools/lookup.tool.js";import{registerCodemodTool as ge,registerDataTransformTool as q,registerDiffParseTool as _e,registerGitContextTool as ve,registerRenameTool as ye}from"./tools/manipulation.tools.js";import{registerOnboardTool as be}from"./tools/onboard.tool.js";import{registerCheckpointTool as xe,registerLaneTool as Se,registerQueueTool as Ce,registerStashTool as we,registerWorksetTool as Te}from"./tools/persistence.tools.js";import{registerErUpdatePolicyTool as Ee}from"./tools/policy.tools.js";import{registerPresentTool as De}from"./tools/present/tool.js";import"./tools/present/index.js";import{registerProduceKnowledgeTool as Oe}from"./tools/produce.tool.js";import{registerReadTool as ke}from"./tools/read.tool.js";import{registerReindexTool as Ae}from"./tools/reindex.tool.js";import{registerRememberTool as je}from"./tools/remember.tool.js";import{registerReplayTool as Me}from"./tools/replay.tool.js";import{registerRestoreTool as Ne}from"./tools/restore.tool.js";import{registerSearchTool as Pe}from"./tools/search.tool.js";import{getCurrentVersion as Fe}from"./version-check.js";import{registerEarlyStatusTool as Ie,registerStatusTool as Le}from"./tools/status.tool.js";import{registerUpdateTool as Re}from"./tools/update.tool.js";import{registerChangelogTool as ze,registerEncodeTool as Be,registerEnvTool as Ve,registerHttpTool as He,registerMeasureTool as Ue,registerRegexTestTool as We,registerSchemaValidateTool as Ge,registerSnippetTool as Ke,registerTimeTool as qe,registerWebSearchTool as Je}from"./tools/utility.tools.js";import{existsSync as Ye,statSync as Xe}from"node:fs";import{resolve as Ze}from"node:path";import{AIKIT_PATHS as Qe,createLogger as $e,serializeError as J}from"../../core/dist/index.js";import{initializeWasm as et}from"../../chunker/dist/index.js";import{OnnxEmbedder as tt}from"../../embeddings/dist/index.js";import{EvolutionCollector as nt,PolicyStore as rt}from"../../enterprise-bridge/dist/index.js";import{FileHashCache as it,IncrementalIndexer as at}from"../../indexer/dist/index.js";import{SqliteGraphStore as ot,createStore as st}from"../../store/dist/index.js";import{FileCache as ct}from"../../tools/dist/index.js";import{completable as lt}from"@modelcontextprotocol/sdk/server/completable.js";import{McpServer as ut}from"@modelcontextprotocol/sdk/server/mcp.js";import{z as dt}from"zod";const Y=$e(`server`);async function X(e){Y.info(`Initializing AI Kit components`);let[t,n,i,a]=await Promise.all([(async()=>{let t=new tt({model:e.embedding.model,dimensions:e.embedding.dimensions});return await t.initialize(),Y.info(`Embedder loaded`,{modelId:t.modelId,dimensions:t.dimensions}),t})(),(async()=>{let t=await st({backend:e.store.backend,path:e.store.path});return await t.initialize(),Y.info(`Store initialized`),t})(),(async()=>{let t=new ot({path:e.store.path});return await t.initialize(),Y.info(`Graph store initialized`),t})(),(async()=>{let e=await et();return e?Y.info(`WASM tree-sitter enabled for AST analysis`):Y.warn(`WASM tree-sitter not available; analyzers will use regex fallback`),e})()]),o=new at(t,n),s=new it(e.store.path);s.load(),o.setHashCache(s);let c=e.curated.path,l=new r(c,n,t);o.setGraphStore(i);let u=E(e.er),d=u?new rt(e.curated.path):void 0;d&&Y.info(`Policy store initialized`,{ruleCount:d.getRules().length});let f=u?new nt:void 0,p=Ze(e.sources[0]?.path??process.cwd(),Qe.aiKb),m=Ye(p),h=e.onboardDir?Ye(e.onboardDir):!1,g=m||h,_,v=m?p:e.onboardDir;if(g&&v)try{_=Xe(v).mtime.toISOString()}catch{}return Y.info(`Onboard state detected`,{onboardComplete:g,onboardTimestamp:_,aiKbExists:m,onboardDirExists:h}),{embedder:t,store:n,indexer:o,curated:l,graphStore:i,fileCache:new ct,bridge:u,policyStore:d,evolutionCollector:f,onboardComplete:g,onboardTimestamp:_}}function ft(e,t){let n=new ut({name:t.serverName??`aikit`,version:Fe()},{capabilities:{logging:{},completions:{},prompts:{}}});return s(n),g(n,t.toolPrefix??``),Z(n,e,t,i(n),new d(n),p(n)),l(n,{curated:e.curated,store:e.store,graphStore:e.graphStore},t.indexMode),n}function Z(e,t,r,i,a,o,s,c){u(e),m(e),n(e),Pe(e,t.embedder,t.store,t.graphStore,t.bridge,t.evolutionCollector,o),he(e,t.store),Le(e,t.store,t.graphStore,t.curated,{onboardComplete:t.onboardComplete,onboardTimestamp:t.onboardTimestamp},r,s,c),k(e,r),Ae(e,t.indexer,r,t.curated,t.store,a,s),je(e,t.curated,t.policyStore,t.evolutionCollector,a),Re(e,t.curated,a),fe(e,t.curated,a),ke(e,t.curated),me(e,t.curated),S(e,t.store,t.embedder),y(e,t.store,t.embedder),C(e,t.store,t.embedder),te(e,t.store,t.embedder),x(e,t.store,t.embedder),b(e,t.store,t.embedder),w(e,t.store,t.embedder,t.graphStore),Oe(e,r),be(e,t.store,t.embedder,r),pe(e,t.graphStore),ne(e,t.store,t.embedder);let l=r.sources[0]?.path??process.cwd();A(e,t.embedder,t.fileCache,l),N(e,t.embedder,t.store),ie(e,t.embedder,t.store),R(e),Te(e),F(e),se(e,t.embedder,t.store),P(e,t.embedder,t.store,t.graphStore),L(e),z(e),we(e),ve(e),_e(e),ye(e),ge(e),Ne(e),M(e,t.fileCache,l),xe(e),q(e),ae(e,t.embedder,t.store,t.graphStore),W(e),G(e),j(e,t.embedder,t.store),I(e,o),U(e),Se(e),Ce(e),K(e),H(e,s),B(e),le(e,t.embedder),V(e),de(e,t.embedder,t.fileCache),ue(e,t.embedder,t.store),De(e,i),i&&T(e,i),Je(e),He(e),We(e),Be(e),Ue(e),ze(e),Ge(e),Ke(e),Ve(e),qe(e),ce(e,r),t.bridge&&(O(e,t.bridge,t.evolutionCollector),D(e,t.bridge),re(e,t.bridge)),t.policyStore&&Ee(e,t.policyStore),t.evolutionCollector&&oe(e,t.evolutionCollector),f(e,t.store,t.curated),Me(e)}async function pt(e){let t=await X(e),n=ft(t,e);Y.info(`MCP server configured`,{toolCount:$.length,resourceCount:2});let r=async()=>{try{let n=e.sources.map(e=>e.path).join(`, `);Y.info(`Running initial index`,{sourcePaths:n});let r=await t.indexer.index(e,e=>{e.phase===`crawling`||e.phase===`done`||(e.phase===`chunking`&&e.currentFile&&Y.debug(`Indexing file`,{current:e.filesProcessed+1,total:e.filesTotal,file:e.currentFile}),e.phase===`cleanup`&&Y.debug(`Index cleanup`,{staleEntries:e.filesTotal-e.filesProcessed}))});Y.info(`Initial index complete`,{filesProcessed:r.filesProcessed,filesSkipped:r.filesSkipped,chunksCreated:r.chunksCreated,durationMs:r.durationMs});try{await t.store.createFtsIndex()}catch(e){Y.warn(`FTS index creation failed`,J(e))}try{let e=await t.curated.reindexAll();Y.info(`Curated re-index complete`,{indexed:e.indexed})}catch(e){Y.error(`Curated re-index failed`,J(e))}}catch(e){Y.error(`Initial index failed; will retry on aikit_reindex`,J(e))}},i=async()=>{Y.info(`Shutting down`),await Promise.all([t.embedder.shutdown().catch(()=>{}),t.graphStore.close().catch(()=>{}),t.store.close().catch(()=>{})]),process.exit(0)};process.on(`SIGINT`,i),process.on(`SIGTERM`,i);let a=process.ppid,o=setInterval(()=>{try{process.kill(a,0)}catch{Y.info(`Parent process died; shutting down`,{parentPid:a}),clearInterval(o),i()}},5e3);return o.unref(),{server:n,runInitialIndex:r,shutdown:i}}const mt=new Set(`batch.brainstorm.changelog.check.checkpoint.codemod.compact.config.data_transform.delegate.diff_parse.digest.encode.env.eval.evidence_map.file_summary.forge_classify.git_context.graph.guide.health.http.lane.measure.onboard.parse_output.present.process.produce_knowledge.queue.read.regex_test.reindex.remember.rename.replay.restore.schema_validate.scope_map.snippet.stash.status.stratum_card.test_run.time.update.forget.list.watch.web_fetch.web_search.workset`.split(`.`)),ht=5e3,Q=new Set(`brainstorm.changelog.check.checkpoint.codemod.data_transform.delegate.diff_parse.encode.env.eval.evidence_map.forge_classify.git_context.guide.present.health.http.lane.measure.parse_output.process.produce_knowledge.queue.regex_test.rename.replay.restore.schema_validate.snippet.stash.status.test_run.time.watch.web_fetch.web_search.workset`.split(`.`));function gt(e){F(e),L(e),z(e),R(e),I(e),ve(e),_e(e),ye(e),ge(e),q(e),Te(e),we(e),xe(e),Ne(e),Se(e),Ce(e),U(e),W(e),G(e),K(e),H(e),B(e),V(e),De(e),T(e,a),Oe(e),Me(e),Ie(e),Je(e),He(e),We(e),Be(e),Ue(e),ze(e),Ge(e),Ke(e),Ve(e),qe(e)}const $=`analyze_dependencies.analyze_diagram.analyze_entry_points.analyze_patterns.analyze_structure.analyze_symbols.audit.batch.blast_radius.brainstorm.changelog.check.checkpoint.codemod.compact.config.data_transform.dead_symbols.delegate.diff_parse.digest.encode.env.eval.evidence_map.file_summary.find.flow_info.flow_list.flow_reset.flow_start.flow_status.flow_step.forge_classify.forge_ground.forget.git_context.graph.guide.health.http.lane.list.lookup.measure.onboard.parse_output.present.process.produce_knowledge.queue.read.regex_test.reindex.remember.rename.replay.restore.schema_validate.scope_map.search.snippet.stash.status.stratum_card.symbol.test_run.time.trace.update.watch.web_fetch.web_search.workset`.split(`.`);function _t(n,r){let a=new ut({name:n.serverName??`aikit`,version:Fe()},{capabilities:{logging:{},completions:{},prompts:{}}}),u=`initializing`,f=``,m=!1,y=null,b=null,x=null;function te(e){if(!e||typeof e!=`object`)return[];let t=e,n=[];for(let e of[`path`,`file`,`source_path`,`sourcePath`,`filePath`]){let r=t[e];typeof r==`string`&&r&&n.push(r)}for(let e of[`changed_files`,`paths`,`files`]){let r=t[e];if(Array.isArray(r))for(let e of r){if(typeof e==`string`){n.push(e);continue}e&&typeof e==`object`&&typeof e.path==`string`&&n.push(e.path)}}if(Array.isArray(t.sources))for(let e of t.sources)e&&typeof e==`object`&&typeof e.path==`string`&&n.push(e.path);return n}let S=()=>u===`failed`?[`❌ AI Kit initialization failed — this tool is unavailable.`,``,f?`Error: ${f}`:``,``,`**35 tools are still available** and fully functional:`,`check, eval, test_run, git_context, health, measure, web_fetch, web_search,`,`regex_test, encode, stash, checkpoint, lane, process, time, env, and more.`,``,`Try restarting the MCP server to retry initialization.`].filter(Boolean).join(`
|
|
2
2
|
`):[`AI Kit is still initializing (loading embeddings model & store).`,``,`**35 tools are already available** while initialization completes — including:`,`check, eval, test_run, git_context, health, measure, web_fetch, web_search,`,`regex_test, encode, stash, checkpoint, lane, process, time, env, and more.`,``,`This tool requires the AI Kit index. Please retry in a few seconds,`,`or use one of the available tools above in the meantime.`].join(`
|
|
3
|
-
`);
|
|
3
|
+
`);s(a),g(a,n.toolPrefix??``);let C=a.sendToolListChanged.bind(a);a.sendToolListChanged=()=>{};let w=[];for(let e of $){let t=h(e),n=a.registerTool(e,{title:t.title,description:`${t.title} — initializing, available shortly`,inputSchema:{},annotations:t.annotations},async()=>({content:[{type:`text`,text:S()}]}));Q.has(e)?n.remove():w.push(n)}gt(a),a.sendToolListChanged=C;let ne=a.registerResource(`aikit-status`,`aikit://status`,{description:`AI Kit status (initializing...)`,mimeType:`text/plain`},async()=>({contents:[{uri:`aikit://status`,text:`AI Kit is initializing...`,mimeType:`text/plain`}]})),T=a.registerPrompt(`_init`,{description:`Initializing AI Kit…`,argsSchema:{_dummy:lt(dt.string(),()=>[])}},async()=>({messages:[]})),E,D=new Promise(e=>{E=e}),O,re=new Promise(e=>{O=e}),k=()=>O?.(),A=(async()=>{await re;let e;try{e=await X(n)}catch(e){u=`failed`,f=e instanceof Error?e.message:String(e),Y.error(`AI Kit initialization failed — server continuing with zero-dep tools only`,{error:f});return}let s=a.sendToolListChanged.bind(a);a.sendToolListChanged=()=>{};let h=a.sendPromptListChanged.bind(a);a.sendPromptListChanged=()=>{};let g=a.sendResourceListChanged.bind(a);a.sendResourceListChanged=()=>{};for(let e of w)e.remove();ne.remove(),T.remove();let S=a._registeredTools??{};for(let e of Q)S[e]?.remove();let C=new d(a),D=p(a);Z(a,e,n,i(a),C,D,r,r===`smart`?(()=>{let e=x;return e?.getState?e.getState():null}):null),l(a,{curated:e.curated,store:e.store,graphStore:e.graphStore},r),a.sendToolListChanged=s,a.sendPromptListChanged=h,a.sendResourceListChanged=g,Promise.resolve(a.sendToolListChanged()).catch(()=>{}),Promise.resolve(a.sendPromptListChanged()).catch(()=>{}),Promise.resolve(a.sendResourceListChanged()).catch(()=>{});let O=a._registeredTools??{};for(let[t,n]of Object.entries(O)){if(mt.has(t))continue;let r=n.handler;n.handler=async(...n)=>{if(!e.indexer.isIndexing)return r(...n);let i=m?`re-indexing`:`running initial index`,a=new Promise(e=>setTimeout(()=>e({content:[{type:`text`,text:`⏳ AI Kit is ${i}. The tool "${t}" timed out waiting for index data (${ht/1e3}s).\n\nThe existing index may be temporarily locked. Please retry shortly — indexing will complete automatically.`}]}),ht));return Promise.race([r(...n),a])}}for(let[e,t]of Object.entries(O)){let n=t.handler,r=v(e);t.handler=async(...t)=>{try{return await ee(()=>n(...t),r,e)}catch(t){if(t instanceof _)return{content:[{type:`text`,text:`⏳ Tool "${e}" timed out after ${r/1e3}s. This may indicate a long-running operation. Please retry or break the task into smaller steps.`}]};throw t}}}let k=Object.keys(O).length;k<$.length&&Y.warn(`ALL_TOOL_NAMES count mismatch`,{expectedToolCount:$.length,registeredToolCount:k}),Y.info(`MCP server configured`,{toolCount:$.length,resourceCount:4});let A=new c;A.onPressure((e,n)=>{e===`warning`&&t(),e===`critical`&&(Y.warn(`Memory pressure critical — consider restarting`,{rssMB:Math.round(n/1024/1024)}),t())}),A.start();let j=new o;b=j,j.onIdle(async()=>{if(M.isRunning||e.indexer.isIndexing){Y.info(`Idle cleanup deferred — background tasks still running`),j.touch();return}Y.info(`Idle cleanup: closing store and graph connections`);try{await Promise.all([e.store.close().catch(()=>{}),e.graphStore.close().catch(()=>{})])}catch{}}),j.touch();for(let e of Object.values(O)){let t=e.handler;e.handler=async(...e)=>{if(j.touch(),x){let t=te(e[0]);t.length>0&&x.prioritize(...t)}return t(...e)}}y=e,E?.(e)})(),j=async()=>{let e=await D;b?.setBusy(!0);try{let t=n.sources.map(e=>e.path).join(`, `);Y.info(`Running initial index`,{sourcePaths:t});let r=await e.indexer.index(n,e=>{e.phase===`crawling`||e.phase===`done`||(e.phase===`chunking`&&e.currentFile&&Y.debug(`Indexing file`,{current:e.filesProcessed+1,total:e.filesTotal,file:e.currentFile}),e.phase===`cleanup`&&Y.debug(`Index cleanup`,{staleEntries:e.filesTotal-e.filesProcessed}))});m=!0,Y.info(`Initial index complete`,{filesProcessed:r.filesProcessed,filesSkipped:r.filesSkipped,chunksCreated:r.chunksCreated,durationMs:r.durationMs});try{await e.store.createFtsIndex()}catch(e){Y.warn(`FTS index creation failed`,J(e))}try{let t=await e.curated.reindexAll();Y.info(`Curated re-index complete`,{indexed:t.indexed})}catch(e){Y.error(`Curated re-index failed`,J(e))}}catch(e){Y.error(`Initial index failed; will retry on aikit_reindex`,J(e))}finally{b?.setBusy(!1)}},M=new e,ie=()=>M.schedule({name:`initial-index`,fn:j}),N=process.ppid,P=setInterval(()=>{try{process.kill(N,0)}catch{Y.info(`Parent process died; shutting down`,{parentPid:N}),clearInterval(P),D.then(async e=>{await Promise.all([e.embedder.shutdown().catch(()=>{}),e.graphStore.close().catch(()=>{}),e.store.close().catch(()=>{})])}).catch(()=>{}).finally(()=>process.exit(0))}},5e3);return P.unref(),{server:a,startInit:k,ready:A,runInitialIndex:ie,get kb(){return y},scheduler:M,setSmartScheduler(e){x=e}}}export{$ as ALL_TOOL_NAMES,_t as createLazyServer,ft as createMcpServer,pt as createServer,X as initializeKnowledgeBase,Z as registerMcpTools};
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import{getToolMeta as e}from"../tool-metadata.js";import{AnalyzeStructureOutputSchema as t,BlastRadiusOutputSchema as n}from"../output-schemas.js";import{
|
|
1
|
+
import{getToolMeta as e}from"../tool-metadata.js";import{AnalyzeStructureOutputSchema as t,BlastRadiusOutputSchema as n}from"../output-schemas.js";import{z as r}from"zod";import{createHash as i}from"node:crypto";import{createLogger as a,serializeError as o}from"../../../core/dist/index.js";import{BlastRadiusAnalyzer as s,DependencyAnalyzer as c,DiagramGenerator as l,EntryPointAnalyzer as u,PatternAnalyzer as d,StructureAnalyzer as f,SymbolAnalyzer as p}from"../../../analyzers/dist/index.js";import{WasmRuntime as m}from"../../../chunker/dist/index.js";import{gitContext as h,truncateToTokenBudget as g}from"../../../tools/dist/index.js";const _=a(`tools`),v=r.number().min(100).max(5e4).optional().describe(`Maximum token budget for the response. When set, output is truncated to fit.`);function y(e,t){return t?g(e,t):e}function b(){let e=[];return m.get()||e.push(`Tree-sitter unavailable — using regex fallback, symbol/pattern confidence reduced`),e.length===0?``:`\n\n> **⚠ Caveats:** ${e.join(`; `)}`}function x(e){return(e??[]).map(e=>{if(typeof e==`string`)return e;if(e&&typeof e==`object`&&`path`in e)return typeof e.path==`string`?e.path:void 0}).filter(e=>!!e)}function S(e){let t=[],n=e.filter(e=>/\.(ts|tsx|js|jsx)$/.test(e)&&/(service|store|model|schema|migration)/i.test(e)),r=e.filter(e=>/\.(ts|tsx|js|jsx)$/.test(e)&&!n.includes(e)),i=e.filter(e=>!/\.(ts|tsx|js|jsx)$/.test(e));return(n.length>0||r.length>0||i.length>0)&&(t.push(`
|
|
2
2
|
|
|
3
3
|
### Risk Assessment`),n.length>0&&t.push(`- 🔴 **High risk** (${n.length}): ${n.slice(0,5).map(e=>`\`${e}\``).join(`, `)}`),r.length>0&&t.push(`- 🟡 **Medium risk** (${r.length}): source files`),i.length>0&&t.push(`- 🟢 **Low risk** (${i.length}): non-source files`)),t.join(`
|
|
4
4
|
`)}function C(e){let t=e.replace(/\\/g,`/`);return/(^|\/)__tests__\/|\.(test|spec)\.[jt]sx?$/i.test(t)?`Tests`:/(controller|route|handler|api)/i.test(t)?`API`:/(service|store|model|schema|migration|repo|repository|db|database)/i.test(t)?`Core/Data`:/\.(ts|tsx|js|jsx)$/.test(t)?`Source`:`Config/Docs`}function w(e){if(e.length===0)return``;let t=[`Core/Data`,`API`,`Source`,`Tests`,`Config/Docs`],n=new Map;for(let t of e){let e=C(t),r=n.get(e)??[];r.push(t),n.set(e,r)}let r=[`
|
|
5
5
|
|
|
6
6
|
### Layer Classification`];for(let e of t){let t=n.get(e);if(!t?.length)continue;let i=t.slice(0,5).map(e=>`\`${e}\``).join(`, `),a=t.length>5?`, ... and ${t.length-5} more`:``;r.push(`- **${e}** (${t.length}): ${i}${a}`)}return r.join(`
|
|
7
|
-
`)}async function T(e,t,n,
|
|
8
|
-
`),e.size>20&&(d+=`\n- ... and ${e.size-20} more`))}catch{}let f=w(o),p=S(o),m=u.output+d+f+p+b()+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_dependencies` to see the full import graph, or `analyze_symbols` to inspect affected exports._";T(
|
|
7
|
+
`)}async function T(e,t,n,r,a){try{let o=`produced/analysis/${n}/${i(`sha256`).update(r).digest(`hex`).slice(0,12)}.md`,s=i(`sha256`).update(a).digest(`hex`).slice(0,16),c=new Date().toISOString(),l=a.length>2e3?a.split(/(?=^## )/m).filter(e=>e.trim().length>0):[a],u=l.map((e,t)=>({id:i(`sha256`).update(`${o}::${t}`).digest(`hex`).slice(0,16),content:e.trim(),sourcePath:o,contentType:`produced-knowledge`,chunkIndex:t,totalChunks:l.length,startLine:0,endLine:0,fileHash:s,indexedAt:c,origin:`produced`,tags:[`analysis`,n],category:`analysis`,version:1})),d=await t.embedBatch(u.map(e=>e.content));await e.upsert(u,d),_.info(`Auto-persisted analysis`,{analyzerName:n,chunkCount:u.length})}catch(e){_.warn(`Auto-persist analysis failed`,{analyzerName:n,...o(e)})}}function E(n,i,a){let s=new f,c=e(`analyze_structure`);n.registerTool(`analyze_structure`,{title:c.title,description:`Analyze the file/directory structure of a codebase. Returns an annotated tree with language stats.`,outputSchema:t,inputSchema:{path:r.string().describe(`Root path to analyze`),max_depth:r.number().min(1).max(10).default(6).describe(`Maximum directory depth`),format:r.enum([`json`,`markdown`]).default(`markdown`).describe(`Output format`),max_tokens:v},annotations:c.annotations},async({path:e,max_depth:t,format:n,max_tokens:r})=>{try{let o=await s.analyze(e,{format:n,maxDepth:t,maxTokens:r});T(i,a,`structure`,e,o.output);let c=o.data;return{content:[{type:`text`,text:y(o.output+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_dependencies` for import graphs, or `analyze_patterns` to detect architecture patterns._",r)}],structuredContent:{files:c.fileCount??o.meta.fileCount,packages:c.packageCount??0,languages:c.languages??{},tree:o.output.slice(0,2e3)}}}catch(e){return _.error(`Analysis failed`,o(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function D(t,n,i){let a=new c,s=e(`analyze_dependencies`);t.registerTool(`analyze_dependencies`,{title:s.title,description:`Analyze import/require dependencies across a codebase. Shows external packages and internal module graph.`,inputSchema:{path:r.string().describe(`Root path to analyze`),format:r.enum([`json`,`markdown`,`mermaid`]).default(`markdown`).describe(`Output format`),max_tokens:v},annotations:s.annotations},async({path:e,format:t,max_tokens:r})=>{try{let o=await a.analyze(e,{format:t});return T(n,i,`dependencies`,e,o.output),{content:[{type:`text`,text:y(o.output+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_symbols` to explore exported symbols, or `analyze_diagram` for visual representation._",r)}]}}catch(e){return _.error(`Analysis failed`,o(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function O(t,n,i){let a=new p,s=e(`analyze_symbols`);t.registerTool(`analyze_symbols`,{title:s.title,description:`Extract exported and local symbols (functions, classes, interfaces, types, constants) from a codebase.`,inputSchema:{path:r.string().describe(`Root path to analyze`),filter:r.string().optional().describe(`Filter symbols by name substring`),format:r.enum([`json`,`markdown`]).default(`markdown`).describe(`Output format`)},annotations:s.annotations},async({path:e,filter:t,format:r})=>{try{let o=await a.analyze(e,{format:r,filter:t});return T(n,i,`symbols`,e,o.output),{content:[{type:`text`,text:o.output+b()+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_dependencies` to see import relationships, or `search` to find usage patterns._"}]}}catch(e){return _.error(`Analysis failed`,o(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function k(t,n,i){let a=new d,s=e(`analyze_patterns`);t.registerTool(`analyze_patterns`,{title:s.title,description:`Detect architectural patterns, frameworks, and conventions in a codebase using directory structure and code heuristics.`,inputSchema:{path:r.string().describe(`Root path to analyze`)},annotations:s.annotations},async({path:e})=>{try{let t=await a.analyze(e);return T(n,i,`patterns`,e,t.output),{content:[{type:`text`,text:t.output+b()+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_entry_points` to find Lambda handlers and main exports, or `produce_knowledge` for full analysis._"}]}}catch(e){return _.error(`Analysis failed`,o(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function A(t,n,i){let a=new u,s=e(`analyze_entry_points`);t.registerTool(`analyze_entry_points`,{title:s.title,description:`Find entry points: Lambda handlers, main exports, CLI bins, and server start scripts.`,inputSchema:{path:r.string().describe(`Root path to analyze`)},annotations:s.annotations},async({path:e})=>{try{let t=await a.analyze(e);return T(n,i,`entry-points`,e,t.output),{content:[{type:`text`,text:t.output+b()+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_dependencies` to see what each entry point imports, or `produce_knowledge` for comprehensive analysis._"}]}}catch(e){return _.error(`Analysis failed`,o(e)),{content:[{type:`text`,text:`Analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function j(t,n,i){let a=new l,s=e(`analyze_diagram`);t.registerTool(`analyze_diagram`,{title:s.title,description:`Generate a Mermaid diagram of the codebase architecture or dependency graph. Returns Mermaid flowchart syntax — paste into any Markdown renderer or Mermaid editor to visualize.`,inputSchema:{path:r.string().describe(`Root path to analyze`),diagram_type:r.enum([`architecture`,`dependencies`]).default(`architecture`).describe(`Type of diagram`)},annotations:s.annotations},async({path:e,diagram_type:t})=>{try{let r=await a.analyze(e,{diagramType:t});return T(n,i,`diagram`,e,r.output),{content:[{type:`text`,text:r.output+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_structure` for detailed file tree, or `produce_knowledge` for comprehensive analysis._"}]}}catch(e){return _.error(`Diagram generation failed`,o(e)),{content:[{type:`text`,text:`Diagram generation failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}function M(t,i,a,c){let l=new s,u=e(`blast_radius`);t.registerTool(`blast_radius`,{title:u.title,description:`Given a list of changed files, trace the dependency graph to find all affected files (direct + transitive importers) and their tests. Useful for scoping code reviews and impact analysis.`,outputSchema:n,inputSchema:{path:r.string().describe(`Root path of the codebase`),files:r.array(r.string()).optional().describe(`Changed file paths (relative to root). If omitted, auto-detects from git status.`),max_depth:r.number().min(1).max(20).default(5).describe(`Maximum transitive dependency depth`),format:r.enum([`json`,`markdown`]).default(`markdown`).describe(`Output format`),max_tokens:v},annotations:u.annotations},async({path:e,files:t,max_depth:n,format:r,max_tokens:s})=>{try{let o=t??[];if(o.length===0)try{let t=await h({cwd:e,includeDiff:!1});if(o=Array.from(new Set([...x(t.status?.staged),...x(t.status?.modified),...x(t.status?.untracked)])),o.length===0)return{content:[{type:`text`,text:"No changed files detected from git status. Provide `files` explicitly or make changes first."}]}}catch{return{content:[{type:`text`,text:"Could not detect changed files from git. Provide `files` explicitly."}],isError:!0}}let u=await l.analyze(e,{files:o,maxDepth:n,format:r}),d=``;if(c)try{let e=new Set;for(let t of o){let n=await c.findNodes({sourcePath:t,limit:10});for(let t of n){let n=await c.getNeighbors(t.id,{direction:`incoming`,edgeType:`imports`});for(let t of n.nodes){let n=t.sourcePath??t.name;o.includes(n)||e.add(n)}}}e.size>0&&(d=`\n\n### Graph-discovered importers (${e.size})\n`+[...e].slice(0,20).map(e=>`- \`${e}\``).join(`
|
|
8
|
+
`),e.size>20&&(d+=`\n- ... and ${e.size-20} more`))}catch{}let f=w(o),p=S(o),m=u.output+d+f+p+b()+"\n\n---\n_Analysis auto-saved to KB. Next: Use `analyze_dependencies` to see the full import graph, or `analyze_symbols` to inspect affected exports._";T(i,a,`blast-radius`,e,m);let g=new Set;if(c)for(let e of o)try{let t=await c.findNodes({sourcePath:e,limit:10});for(let e of t){let t=await c.getNeighbors(e.id,{direction:`incoming`,edgeType:`imports`});for(let e of t.nodes){let t=e.sourcePath??e.name;o.includes(t)||g.add(t)}}}catch{}let _=[...g].map(e=>({path:e,impact:`transitive`,reason:`imports changed file`})),v=o.length>10?`high`:o.length>3?`medium`:`low`;return{content:[{type:`text`,text:y(m,s)}],structuredContent:{changedFiles:o,affectedFiles:_,totalAffected:_.length,riskLevel:v}}}catch(e){return _.error(`Blast radius analysis failed`,o(e)),{content:[{type:`text`,text:`Blast radius analysis failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}})}export{D as registerAnalyzeDependenciesTool,j as registerAnalyzeDiagramTool,A as registerAnalyzeEntryPointsTool,k as registerAnalyzePatternsTool,E as registerAnalyzeStructureTool,O as registerAnalyzeSymbolsTool,M as registerBlastRadiusTool};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{getToolMeta as e}from"../tool-metadata.js";import{
|
|
1
|
+
import{getToolMeta as e}from"../tool-metadata.js";import{basename as t,join as n,resolve as r}from"node:path";import{z as i}from"zod";import{readFile as a}from"node:fs/promises";import{createLogger as o,serializeError as s}from"../../../core/dist/index.js";const c=o(`flow-tools`);function l(e){return{content:[{type:`text`,text:e}]}}function u(e){return e instanceof Error?e.message:String(e)}function d(o,d){let f=d.sources?.[0]?.path??process.cwd(),p=n(d.stateDir??n(d.sources[0].path,`.aikit-state`),`flows`),m=n(p,`registry.json`),h=n(p,`state.json`);function g(e,n){let i;return i=e.sourceType===`builtin`?r(f,`.github`,`flows`,t(e.installPath),n):r(e.installPath,n),i.replaceAll(`\\`,`/`)}function _(e){return e.sourceType===`builtin`?r(f,`.github`,`flows`,t(e.installPath)).replaceAll(`\\`,`/`):e.installPath.replaceAll(`\\`,`/`)}async function v(){let{FlowRegistryManager:e,FlowStateMachine:t}=await import(`../../../flows/dist/index.js`);return{registry:new e(m),stateMachine:new t(h)}}let y=e(`flow_list`);o.registerTool(`flow_list`,{title:y.title,description:`List all installed flows and their steps`,annotations:y.annotations,inputSchema:{}},async()=>{try{let{registry:e,stateMachine:t}=await v(),n=e.list(),r=t.getStatus(),i={flows:n.map(e=>({name:e.name,version:e.version,source:e.source,sourceType:e.sourceType,format:e.format,steps:e.manifest.steps.map(e=>e.id)})),activeFlow:r.success&&r.data?{flow:r.data.flow,status:r.data.status,currentStep:r.data.currentStep}:null};return l(JSON.stringify(i,null,2))}catch(e){return c.error(`flow_list failed`,s(e)),l(`Error: ${u(e)}`)}});let b=e(`flow_info`);o.registerTool(`flow_info`,{title:b.title,description:`Show detailed information about a specific flow`,annotations:b.annotations,inputSchema:{name:i.string().describe(`Flow name to get info for`)}},async({name:e})=>{try{let{registry:t}=await v(),n=t.get(e);if(!n)return l(`Flow "${e}" not found. Use flow_list to see available flows.`);let r={name:n.name,version:n.version,description:n.manifest.description,source:n.source,sourceType:n.sourceType,format:n.format,installPath:_(n),registeredAt:n.registeredAt,updatedAt:n.updatedAt,steps:n.manifest.steps.map(e=>({id:e.id,name:e.name,skill:g(n,e.skill),produces:e.produces,requires:e.requires,description:e.description})),agents:n.manifest.agents,artifactsDir:n.manifest.artifacts_dir,install:n.manifest.install};return l(JSON.stringify(r,null,2))}catch(e){return c.error(`flow_info failed`,s(e)),l(`Error: ${u(e)}`)}});let x=e(`flow_start`);o.registerTool(`flow_start`,{title:x.title,description:`Start a flow. Sets the active flow and positions at the first step.`,annotations:x.annotations,inputSchema:{flow:i.string().describe(`Flow name to start (use flow_list to see options)`)}},async({flow:e})=>{try{let{registry:t,stateMachine:n}=await v(),r=t.get(e);if(!r)return l(`Flow "${e}" not found. Use flow_list to see available flows.`);let i=n.start(r.name,r.manifest);if(!i.success||!i.data)return l(`Cannot start: ${i.error}`);let a=i.data,o=r.manifest.steps.find(e=>e.id===a.currentStep),s={started:!0,flow:a.flow,currentStep:a.currentStep,currentStepSkill:r&&o?g(r,o.skill):null,currentStepDescription:o?.description??null,totalSteps:r.manifest.steps.length,stepSequence:r.manifest.steps.map(e=>e.id),artifactsDir:r.manifest.artifacts_dir};return l(JSON.stringify(s,null,2))}catch(e){return c.error(`flow_start failed`,s(e)),l(`Error: ${u(e)}`)}});let S=e(`flow_step`);o.registerTool(`flow_step`,{title:S.title,description:`Advance the active flow: complete current step and move to next, skip current step, or redo current step.`,annotations:S.annotations,inputSchema:{action:i.enum([`next`,`skip`,`redo`]).describe(`next: mark current step done and advance. skip: skip current step. redo: repeat current step.`)}},async({action:e})=>{try{let{registry:t,stateMachine:n}=await v(),r=n.load();if(!r)return l(`No active flow. Use flow_start first.`);let i=t.get(r.flow);if(!i)return l(`Flow "${r.flow}" not found in registry.`);let a=n.step(e,i.manifest);if(!a.success||!a.data)return l(`Cannot ${e}: ${a.error}`);let o=a.data,s=o.currentStep?i.manifest.steps.find(e=>e.id===o.currentStep):null,c={flow:o.flow,status:o.status,action:e,currentStep:o.currentStep,currentStepSkill:i&&s?g(i,s.skill):null,currentStepDescription:s?.description??null,completedSteps:o.completedSteps,skippedSteps:o.skippedSteps,totalSteps:i.manifest.steps.length,remaining:i.manifest.steps.filter(e=>!o.completedSteps.includes(e.id)&&!o.skippedSteps.includes(e.id)&&e.id!==o.currentStep).map(e=>e.id)};return l(JSON.stringify(c,null,2))}catch(e){return c.error(`flow_step failed`,s(e)),l(`Error: ${u(e)}`)}});let C=e(`flow_status`);o.registerTool(`flow_status`,{title:C.title,description:`Show the current flow execution state — which flow is active, current step, completed steps, and artifacts.`,annotations:C.annotations,inputSchema:{}},async()=>{try{let{registry:e,stateMachine:t}=await v(),n=t.getStatus();if(!n.success||!n.data)return l(`No active flow. Use flow_start to begin one, or flow_list to see available flows.`);let r=n.data,i=e.get(r.flow),a=i?.manifest.steps.find(e=>e.id===r.currentStep),o=i&&a?g(i,a.skill):null,s={flow:r.flow,status:r.status,currentStep:r.currentStep,currentStepSkill:o,skillPath:o,currentStepDescription:a?.description??null,completedSteps:r.completedSteps,skippedSteps:r.skippedSteps,artifacts:r.artifacts,startedAt:r.startedAt,updatedAt:r.updatedAt,totalSteps:i?.manifest.steps.length??0,progress:i?`${r.completedSteps.length+r.skippedSteps.length}/${i.manifest.steps.length}`:`unknown`};return l(JSON.stringify(s,null,2))}catch(e){return c.error(`flow_status failed`,s(e)),l(`Error: ${u(e)}`)}});let w=e(`flow_read_skill`);o.registerTool(`flow_read_skill`,{title:w.title===`flow_read_skill`?`Flow Read Skill`:w.title,description:`Read the skill or instruction content for a flow step. If step is omitted, reads the current step.`,annotations:w.title===`flow_read_skill`?{readOnlyHint:!0,idempotentHint:!0}:w.annotations,inputSchema:{step:i.string().optional().describe(`Step id or name to read. Defaults to the current step.`)}},async({step:e})=>{try{let{registry:t,stateMachine:n}=await v(),r=n.getStatus();if(!r.success||!r.data)return l(`No active flow. Use flow_start to begin one, or flow_list to see available flows.`);let i=r.data,o=t.get(i.flow);if(!o)return l(`Flow "${i.flow}" not found in registry.`);let s=e??i.currentStep;if(!s)return l(`No current step is available for the active flow.`);let c=o.manifest.steps.find(e=>e.id===s||e.name===s);return l(c?await a(g(o,c.skill),`utf-8`):`Step "${s}" not found in flow "${i.flow}".`)}catch(e){return c.error(`flow_read_skill failed`,s(e)),e instanceof Error&&`code`in e&&e.code===`ENOENT`?l(`Could not read skill file: ${e.message}`):l(`Error: ${u(e)}`)}});let T=e(`flow_reset`);o.registerTool(`flow_reset`,{title:T.title,description:`Reset the active flow, clearing all state. Use to start over or switch to a different flow.`,annotations:T.annotations,inputSchema:{}},async()=>{try{let{stateMachine:e}=await v(),t=e.reset();return t.success?l(`Flow state reset. Use flow_start to begin a new flow.`):l(`Reset failed: ${t.error}`)}catch(e){return c.error(`flow_reset failed`,s(e)),l(`Error: ${u(e)}`)}})}export{d as registerFlowTools};
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import{getToolMeta as e}from"../tool-metadata.js";import{createTaskRunner as t}from"../task-manager.js";import{
|
|
1
|
+
import{getToolMeta as e}from"../tool-metadata.js";import{createTaskRunner as t}from"../task-manager.js";import{z as n}from"zod";import{createHash as r}from"node:crypto";import{createLogger as i,serializeError as a}from"../../../core/dist/index.js";import{onboard as o}from"../../../tools/dist/index.js";const s=i(`tools`);let c=!1;async function l(e,t,n){for(let i of n.steps)if(!(i.status!==`success`||!i.output))try{let a=r(`sha256`).update(n.path).digest(`hex`).slice(0,12),o=`produced/onboard/${i.name}/${a}.md`,s=r(`sha256`).update(i.output).digest(`hex`).slice(0,16),c=new Date().toISOString(),l=i.output.length>2e3?i.output.split(/(?=^## )/m).filter(e=>e.trim().length>0):[i.output],u=l.map((e,t)=>({id:r(`sha256`).update(`${o}::${t}`).digest(`hex`).slice(0,16),content:e.trim(),sourcePath:o,contentType:`produced-knowledge`,chunkIndex:t,totalChunks:l.length,startLine:0,endLine:0,fileHash:s,indexedAt:c,origin:`produced`,tags:[`onboard`,i.name],category:`analysis`,version:1})),d=await t.embedBatch(u.map(e=>e.content));await e.upsert(u,d)}catch(e){s.warn(`Auto-persist onboard step failed`,{stepName:i.name,...a(e)})}}async function u(e,t,n){if(n.autoRemember?.length)for(let i of n.autoRemember)try{let n=r(`sha256`).update(`onboard-remember::${i.title}`).digest(`hex`).slice(0,16),a=new Date().toISOString(),o={id:n,content:`# ${i.title}\n\n${i.content}`,sourcePath:`curated/onboard/${i.category}/${n}.md`,contentType:`curated`,chunkIndex:0,totalChunks:1,startLine:0,endLine:0,fileHash:r(`sha256`).update(i.content).digest(`hex`).slice(0,16),indexedAt:a,origin:`curated`,tags:i.tags,category:i.category,version:1},[s]=await t.embedBatch([o.content]);await e.upsert([o],[s])}catch(e){s.warn(`Auto-persist remember entry failed`,{title:i.title,...a(e)})}}function d(r,i,d,f){let p=e(`onboard`);r.registerTool(`onboard`,{title:p.title,description:`First-time codebase onboarding: runs all analysis tools (structure, dependencies, entry-points, symbols, patterns, diagram) in one command. Results are auto-persisted to KB. Use mode=generate to also write structured output to .ai/kb/ directory.`,inputSchema:{path:n.string().describe(`Root path of the codebase to onboard`),mode:n.enum([`memory`,`generate`]).default(`generate`).describe(`Output mode: generate (default) = persist to AI Kit + write .ai/kb/ files; memory = AI Kit vector store only`),out_dir:n.string().optional().describe(`Custom output directory for generate mode (default: <path>/.ai/kb)`)},annotations:p.annotations},async({path:e,mode:n,out_dir:r},p)=>{try{if(c)return{content:[{type:`text`,text:`Onboard is already running. Please wait for it to complete before starting another.`}]};c=!0,s.info(`Starting onboard`,{path:e,mode:n});let m=await o({path:e,mode:n,outDir:r??f?.onboardDir}),h=t(p).createTask(`Onboard`,m.steps.length);for(let e=0;e<m.steps.length;e++){let t=m.steps[e];h.progress(e,`${t.name}: ${t.status}`)}h.complete(`Onboard complete: ${m.steps.filter(e=>e.status===`success`).length}/${m.steps.length} steps succeeded`),l(i,d,m),m.autoRemember?.length&&u(i,d,m).catch(e=>{s.warn(`Auto-persist autoRemember failed`,a(e))});let g=[`## Onboard Complete`,``,`**Path:** \`${m.path}\``,`**Mode:** ${m.mode}`,`**Duration:** ${m.totalDurationMs}ms`,``];m.outDir&&(g.push(`**Output directory:** \`${m.outDir}\``),g.push(``)),g.push(`### Analysis Results`,``);let _=[],v=[];for(let e of m.steps)e.status===`success`?_.push(`- ✓ **${e.name}** (${e.durationMs}ms) — ${e.output.length} chars`):v.push(`- ✗ **${e.name}** — ${e.error}`);g.push(..._),v.length>0&&g.push(``,`### Failed`,``,...v),g.push(``,`---`,``);for(let e of m.steps)e.status===`success`&&g.push(`### ${e.name}`,``,e.output,``,`---`,``);return g.push(`_All results auto-saved to KB.`,m.mode===`generate`?` Files written to \`${m.outDir}\`.`:``," Next: Use `search` to query the knowledge, or `remember` to add custom insights._"),{content:[{type:`text`,text:g.join(`
|
|
2
2
|
`)}]}}catch(e){return s.error(`Onboard failed`,a(e)),{content:[{type:`text`,text:`Onboard failed: ${e instanceof Error?e.message:String(e)}`}],isError:!0}}finally{c=!1}})}export{d as registerOnboardTool};
|
|
@@ -63,8 +63,8 @@ import{escHtml as e}from"../present-utils.js";function t(t,f){let p=typeof f==`s
|
|
|
63
63
|
});
|
|
64
64
|
const order = [...list.children].map(c => c.dataset.id);
|
|
65
65
|
document.getElementById('sortStatus').textContent = 'Reordered: ' + order.join(', ');
|
|
66
|
-
if(window.
|
|
67
|
-
fetch(window.
|
|
66
|
+
if(window.__aikitCallback) {
|
|
67
|
+
fetch(window.__aikitCallback, {method:'POST',headers:{'Content-Type':'application/json'},
|
|
68
68
|
body: JSON.stringify({actionId:'reorder',value:JSON.stringify(order)})}).catch(()=>{});
|
|
69
69
|
}
|
|
70
70
|
});
|
|
@@ -211,8 +211,8 @@ ${a}
|
|
|
211
211
|
});
|
|
212
212
|
document.getElementById('pkApply').addEventListener('click', () => {
|
|
213
213
|
const selected = [...list.querySelectorAll('input:checked')].map(cb => cb.dataset.id);
|
|
214
|
-
if(window.
|
|
215
|
-
fetch(window.
|
|
214
|
+
if(window.__aikitCallback) {
|
|
215
|
+
fetch(window.__aikitCallback, {method:'POST',headers:{'Content-Type':'application/json'},
|
|
216
216
|
body:JSON.stringify({actionId:'pick',value:JSON.stringify(selected)})}).catch(()=>{});
|
|
217
217
|
}
|
|
218
218
|
});
|
|
@@ -347,8 +347,8 @@ ${a}
|
|
|
347
347
|
updatePreview();
|
|
348
348
|
|
|
349
349
|
document.getElementById('fmSubmit').addEventListener('click', () => {
|
|
350
|
-
if(window.
|
|
351
|
-
fetch(window.
|
|
350
|
+
if(window.__aikitCallback) {
|
|
351
|
+
fetch(window.__aikitCallback, {method:'POST',headers:{'Content-Type':'application/json'},
|
|
352
352
|
body:JSON.stringify({actionId:'submit',value:JSON.stringify(getValues())})}).catch(()=>{});
|
|
353
353
|
}
|
|
354
354
|
});
|
|
@@ -15,5 +15,5 @@ import{getToolMeta as e}from"../../tool-metadata.js";import{buildBrowserHtml as
|
|
|
15
15
|
- "html" (default): Rich markdown in chat + embedded UIResource. Use for display-only content (tables, charts, reports, status boards) where no user interaction is needed.
|
|
16
16
|
- "browser": Serves a themed dashboard on a local URL. Use ONLY when you need user interaction back (confirmations, selections, form input). The tool blocks until user clicks an action button, then returns their selection.
|
|
17
17
|
FORMAT RULE: If no user interaction is needed → use "html". If you need user input back → use "browser".
|
|
18
|
-
BROWSER WORKFLOW: After calling present with format "browser", you MUST extract the URL from the response and call openBrowserPage({ url }) to open it in VS Code Simple Browser. A system browser fallback also opens automatically, but always call openBrowserPage yourself.`,annotations:r.annotations,inputSchema:_,_meta:{ui:{resourceUri:h}}},async({format:e,title:t,content:r,actions:i,template:a})=>(e??`html`)===`browser
|
|
18
|
+
BROWSER WORKFLOW: After calling present with format "browser", you MUST extract the URL from the response and call openBrowserPage({ url }) to open it in VS Code Simple Browser. A system browser fallback also opens automatically, but always call openBrowserPage yourself.`,annotations:r.annotations,inputSchema:_,_meta:{ui:{resourceUri:h}}},async({format:e,title:t,content:r,actions:i,template:a})=>(e??`html`)===`browser`||Array.isArray(i)&&i.length>0?await w(t,r,i,n,a):T(t,r,i,a))}async function w(e,r,i,a,o){let s=n(e,r),c=t(e,r,i,o),l=p({uri:`ui://aikit/present-browser.html`,content:{type:`rawHtml`,htmlString:c},encoding:`text`,adapters:{mcpApps:{enabled:!0}}}),u,m,h=Array.isArray(i)?i:[],g=``,_;try{b&&=(b.close(),null),h.length>0&&(u=new Promise(e=>{m=e}));let e=!1;g=await new Promise((t,n)=>{let r=f((t,n)=>{if(e||(e=!0,_&&clearTimeout(_)),t.method===`POST`&&t.url===`/callback`){let e=``;t.on(`data`,t=>{e+=t.toString()}),t.on(`end`,()=>{n.writeHead(200,{"Content-Type":`application/json`,"Access-Control-Allow-Origin":`*`}),n.end(`{"ok":true}`);try{let t=JSON.parse(e);m?.(t)}catch{}});return}if(t.method===`OPTIONS`){n.writeHead(204,{"Access-Control-Allow-Origin":`*`,"Access-Control-Allow-Methods":`POST`,"Access-Control-Allow-Headers":`Content-Type`}),n.end();return}n.writeHead(200,{"Content-Type":`text/html; charset=utf-8`}),n.end(c)});r.listen(0,`127.0.0.1`,()=>{let i=r.address();if(typeof i==`object`&&i){b=r;let n=`http://127.0.0.1:${i.port}`;_=setTimeout(()=>{if(!e)try{d(process.platform===`win32`?`start "" "${n}"`:process.platform===`darwin`?`open "${n}"`:`xdg-open "${n}"`)}catch{}},8e3),_.unref(),t(n)}else n(Error(`Failed to start present server`))}),setTimeout(()=>{r.close(),b===r&&(b=null)},300*1e3).unref()})}catch{}let v=g?`${s}\n\n---\n🌐 **Dashboard opened in browser:** ${g}\n\n**IMPORTANT:** You MUST now open this URL using the \`openBrowserPage\` tool (Simple Browser). If that tool is unavailable, open the URL in the system browser as a fallback. If neither works, ask the user to open the URL manually.`:s;if(h.length>0&&a?.available&&u)try{let e=await Promise.race([u,new Promise((e,t)=>setTimeout(()=>t(Error(`timeout`)),300*1e3))]);return{content:[{type:`text`,text:`${v}\n\n✅ **Selected:** ${e.actionId} = \`${e.value}\``},l]}}catch{return{content:[{type:`text`,text:`${v}\n\n⚠️ *No selection received (timed out).*`},l]}}return{content:[{type:`text`,text:v},l]}}function T(e,r,i,a){let o=Array.isArray(i)?i:[],s=n(e,r);if(o.length>0){let e=[``];for(let t=0;t<o.length;t++){let n=o[t],r=typeof n.label==`string`?n.label:`Action ${t+1}`;if(n.type===`select`&&Array.isArray(n.options)){let i=n.options.map(e=>typeof e==`string`?e:e.label).join(`, `);e.push(`${t+1}. **${r}** — choose: ${i}`)}else e.push(`${t+1}. **${r}**`)}s+=`\n${e.join(`
|
|
19
19
|
`)}`}let c=p({uri:`ui://aikit/present-static.html`,content:{type:`rawHtml`,htmlString:t(e,r,i,a)},encoding:`text`,adapters:{mcpApps:{enabled:!0}}});return{content:[{type:`text`,text:s},c],structuredContent:{title:e,content:r,actions:o}}}export{w as formatAsBrowser,T as formatAsHtml,S as getPresentHtml,C as registerPresentTool,x as resolvePresentHtml};
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import{getToolMeta as e}from"../tool-metadata.js";import{SearchOutputSchema as t}from"../output-schemas.js";import{fanOutFtsSearch as n,fanOutSearch as r,openWorkspaceStores as i,resolveWorkspaces as a}from"../cross-workspace.js";import{curatedResourceLink as o,extractCuratedPath as s}from"../resource-links.js";import{basename as c}from"node:path";import{
|
|
2
|
-
`)[0].slice(0,500);if(t&&t!==e){let n=await
|
|
1
|
+
import{getToolMeta as e}from"../tool-metadata.js";import{SearchOutputSchema as t}from"../output-schemas.js";import{fanOutFtsSearch as n,fanOutSearch as r,openWorkspaceStores as i,resolveWorkspaces as a}from"../cross-workspace.js";import{curatedResourceLink as o,extractCuratedPath as s}from"../resource-links.js";import{basename as c}from"node:path";import{z as l}from"zod";import{stat as u}from"node:fs/promises";import{CONTENT_TYPES as d,KNOWLEDGE_ORIGINS as f,SOURCE_TYPES as p,computePartitionKey as m,createLogger as h,serializeError as g}from"../../../core/dist/index.js";import{bookendReorder as _,graphAugmentSearch as v,stashGet as y,truncateToTokenBudget as b}from"../../../tools/dist/index.js";import{mergeResults as x}from"../../../enterprise-bridge/dist/index.js";const S=h(`tools`);function ee(e){let t=[],n=c(process.cwd());n&&t.push(`[project: ${n}]`);let r=y(`__context_boost`);return r&&t.push(`[focus: ${r.value}]`),t.length===0?e:`${t.join(` `)} ${e}`}async function C(e,t,n,r,i){if(!e||t>=e.config.fallbackThreshold&&n.length>0)return{results:n,triggered:!1,cacheHit:!1};let a=!1;try{let t=e.cache.get(r);return t?a=!0:(t=await e.client.search(r,i),t.length>0&&e.cache.set(r,t)),t.length>0?{results:x(n,t,i).map(e=>({record:{id:`er:${e.sourcePath}`,content:e.content,sourcePath:e.source===`er`?`[ER] ${e.sourcePath}`:e.sourcePath,startLine:e.startLine??0,endLine:e.endLine??0,contentType:e.contentType??`documentation`,headingPath:e.headingPath,origin:e.source===`er`?`curated`:e.origin??`indexed`,category:e.category,tags:e.tags??[],chunkIndex:0,totalChunks:1,fileHash:``,indexedAt:new Date().toISOString(),version:1},score:e.score})),triggered:!0,cacheHit:a}:{results:n,triggered:!0,cacheHit:a}}catch(e){return S.warn(`ER fallback failed`,g(e)),{results:n,triggered:!0,cacheHit:a}}}function te(e,t,n=60){let r=new Map;for(let t=0;t<e.length;t++){let i=e[t];r.set(i.record.id,{record:i.record,score:1/(n+t+1)})}for(let e=0;e<t.length;e++){let i=t[e],a=r.get(i.record.id);a?a.score+=1/(n+e+1):r.set(i.record.id,{record:i.record,score:1/(n+e+1)})}return[...r.values()].sort((e,t)=>t.score-e.score).map(({record:e,score:t})=>({record:e,score:t}))}function w(e,t){let n=t.toLowerCase().split(/\s+/).filter(e=>e.length>=2);return n.length<2?e:e.map(e=>{let t=e.record.content.toLowerCase();if(t.length>5e3)return e;let r=n.map(e=>{let n=[],r=t.indexOf(e);for(;r!==-1&&n.length<10;)n.push(r),r=t.indexOf(e,r+1);return n});if(r.some(e=>e.length===0))return e;let i=t.length;for(let e of r[0]){let t=e,a=e+n[0].length;for(let i=1;i<r.length;i++){let o=r[i][0],s=Math.abs(o-e);for(let t=1;t<r[i].length;t++){let n=Math.abs(r[i][t]-e);n<s&&(s=n,o=r[i][t])}t=Math.min(t,o),a=Math.max(a,o+n[i].length)}i=Math.min(i,a-t)}let a=1+.25/(1+i/200);return{record:e.record,score:e.score*a}}).sort((e,t)=>t.score-e.score)}function T(e,t,n=8){let r=new Set(t.toLowerCase().split(/\s+/).filter(e=>e.length>=2)),i=new Map,a=e.length;for(let t of e){let e=t.record.content.split(/[^a-zA-Z0-9_]+/).filter(e=>e.length>=3&&!E.has(e.toLowerCase())),n=new Set;for(let t of e){let e=t.toLowerCase();/[_A-Z]/.test(t)&&i.set(`__id__${e}`,1),n.has(e)||(n.add(e),i.set(e,(i.get(e)??0)+1))}}let o=[];for(let[e,t]of i){if(e.startsWith(`__id__`)||r.has(e)||t>a*.8)continue;let n=Math.log(a/t),s=i.has(`__id__${e}`)?1:0,c=e.length>8?.5:0;o.push({term:e,score:n+s+c})}return o.sort((e,t)=>t.score-e.score).slice(0,n).map(e=>e.term)}const E=new Set(`the.and.for.are.but.not.you.all.can.had.her.was.one.our.out.has.have.from.this.that.with.they.been.said.each.which.their.will.other.about.many.then.them.these.some.would.make.like.into.could.time.very.when.come.just.know.take.people.also.back.after.only.more.than.over.such.import.export.const.function.return.true.false.null.undefined.string.number.boolean.void.type.interface`.split(`.`));async function D(e,t){try{let n=await e.getStats();if(!n.lastIndexedAt)return;let r=new Date(n.lastIndexedAt).getTime(),i=Date.now(),a=[...new Set(t.map(e=>e.record.sourcePath))].filter(e=>!e.startsWith(`[ER]`)).slice(0,5);if(a.length===0)return;let o=0;for(let e of a)try{(await u(e)).mtimeMs>r&&o++}catch{o++}if(o>0){let e=i-r,t=Math.floor(e/6e4),n=t<1?`<1 min`:`${t} min`;return`> ⚠️ **Index may be stale** — ${o} file(s) modified since last index (${n} ago). Use \`reindex\` to refresh.`}}catch{}}function O(c,u,h,y,x,E,O){let k=e(`search`);c.registerTool(`search`,{title:k.title,description:`Search AI Kit for code, docs, and prior decisions. Default choice for discovery. Modes: hybrid (default), semantic, keyword. For multi-strategy precision queries use find; for a known file path use lookup.`,outputSchema:t,inputSchema:{query:l.string().max(5e3).describe(`Natural language search query`),limit:l.number().min(1).max(20).default(5).describe(`Maximum results to return`),search_mode:l.enum([`hybrid`,`semantic`,`keyword`]).default(`hybrid`).describe(`Search strategy: hybrid (vector + FTS + RRF fusion, default), semantic (vector only), keyword (FTS only)`),content_type:l.enum(d).optional().describe(`Filter by content type`),source_type:l.enum(p).optional().describe(`Coarse filter: "source" (code only), "documentation" (md, curated), "test", "config". Overrides content_type if both set.`),origin:l.enum(f).optional().describe(`Filter by knowledge origin`),category:l.string().optional().describe(`Filter by category (e.g., decisions, patterns, conventions)`),tags:l.array(l.string()).optional().describe(`Filter by tags (returns results matching ANY of the specified tags)`),min_score:l.number().min(0).max(1).default(.25).describe(`Minimum similarity score`),graph_hops:l.number().min(0).max(3).default(1).describe(`Number of graph hops to augment results with connected entities (0 = disabled, 1 = direct connections, 2-3 = deeper traversal). Default 1 provides module/symbol context automatically.`),max_tokens:l.number().min(100).max(5e4).optional().describe(`Maximum token budget for the response. When set, output is truncated to fit.`),dedup:l.enum([`file`,`chunk`]).default(`chunk`).describe(`Deduplication mode: "chunk" (default, show all matching chunks) or "file" (collapse chunks from same file into single result with merged line ranges)`),workspaces:l.array(l.string()).optional().describe(`Cross-workspace search: partition names or folder basenames to include. Use ["*"] for all registered workspaces. Only works in user-level install mode.`)},annotations:k.annotations},async({query:e,limit:t,search_mode:c,content_type:l,source_type:d,origin:f,category:p,tags:k,min_score:ne,graph_hops:A,max_tokens:j,dedup:M,workspaces:N})=>{try{let P={limit:t,minScore:ne,contentType:l,sourceType:d,origin:f,category:p,tags:k},F,I=!1,L=!1,R=``,z=ee(e);if(c===`keyword`)F=await h.ftsSearch(e,P),F=F.slice(0,t);else if(c===`semantic`){let n=await u.embedQuery(z);F=await h.search(n,P);let r=await C(x,F[0]?.score??0,F,e,t);F=r.results,I=r.triggered,L=r.cacheHit}else{let n=await u.embedQuery(z),[r,i]=await Promise.all([h.search(n,{...P,limit:t*2}),h.ftsSearch(e,{...P,limit:t*2}).catch(()=>[])]);F=te(r,i).slice(0,t);let a=await C(x,r[0]?.score??0,F,e,t);F=a.results,I=a.triggered,L=a.cacheHit}E&&E.recordSearch(e,I,L),F.length>1&&(F=w(F,e));let B=``;if(N&&N.length>0){let o=a(N,m(process.cwd()));if(o.length>0){let{stores:a,closeAll:s}=await i(o);try{let i;i=c===`keyword`?await n(a,e,{...P,limit:t}):await r(a,await u.embedQuery(e),{...P,limit:t});for(let e of i)F.push({record:{...e.record,sourcePath:`[${e.workspace}] ${e.record.sourcePath}`},score:e.score});F=F.sort((e,t)=>t.score-e.score).slice(0,t),B=` + ${o.length} workspace(s)`}finally{await s()}}}if(M===`file`&&F.length>1){let e=new Map;for(let t of F){let n=t.record.sourcePath,r=e.get(n);r?(t.score>r.best.score&&(r.best=t),r.ranges.push({start:t.record.startLine,end:t.record.endLine})):e.set(n,{best:t,ranges:[{start:t.record.startLine,end:t.record.endLine}]})}F=[...e.values()].sort((e,t)=>t.best.score-e.best.score).map(({best:e,ranges:t})=>({record:{...e.record,content:t.length>1?`${e.record.content}\n\n_Matched ${t.length} sections: ${t.sort((e,t)=>e.start-t.start).map(e=>`L${e.start}-${e.end}`).join(`, `)}_`:e.record.content},score:e.score}))}if(F.length===0){if(O?.available)try{let t=(await O.createMessage({prompt:`The search query "${e}" returned 0 results in AI Kit code search. Suggest ONE alternative search query that might find relevant results. Reply with ONLY the alternative query, nothing else.`,systemPrompt:`You are a search query optimizer for AI Kit code search. Generate a single alternative query.`,maxTokens:100})).text.trim().split(`
|
|
2
|
+
`)[0].slice(0,500);if(t&&t!==e){let n=await u.embedQuery(t),r=await h.search(n,P);r.length>0&&(F=r,R=`> _Original query "${e}" returned 0 results. Auto-reformulated to "${t}"._\n\n`,S.info(`Smart search fallback succeeded`,{originalQuery:e,altQuery:t,resultCount:r.length}))}}catch(e){S.debug(`Smart search fallback failed`,{error:String(e)})}if(F.length===0)return{content:[{type:`text`,text:`No results found for the given query.`}],structuredContent:{results:[],totalResults:0,searchMode:c,query:e}}}let V,H;if(A>0&&!y&&(H="> **Note:** `graph_hops` was set but no graph store is available. Graph augmentation skipped."),A>0&&y)try{let e=await v(y,F.map(e=>({recordId:e.record.id,score:e.score,sourcePath:e.record.sourcePath})),{hops:A,maxPerHit:5});V=new Map;for(let t of e)if(t.graphContext.nodes.length>0){let e=t.graphContext.nodes.slice(0,5).map(e=>` - **${e.name}** (${e.type})`).join(`
|
|
3
3
|
`),n=t.graphContext.edges.slice(0,5).map(e=>` - ${e.fromId} —[${e.type}]→ ${e.toId}`).join(`
|
|
4
4
|
`),r=[`- **Graph Context** (${A} hop${A>1?`s`:``}):`];e&&r.push(` Entities:\n${e}`),n&&r.push(` Relationships:\n${n}`),V.set(t.recordId,r.join(`
|
|
5
5
|
`))}}catch(e){S.warn(`Graph augmentation failed`,g(e)),H=`> **Note:** Graph augmentation failed. Results shown without graph context.`}let U=Date.now();for(let e of F)if(e.record.origin===`curated`&&e.record.indexedAt){let t=U-new Date(e.record.indexedAt).getTime(),n=Math.max(0,t/864e5);e.score*=.95**n}F.sort((e,t)=>t.score-e.score),F=_(F);let W=F.map((e,t)=>{let n=e.record;return`${`### Result ${t+1} (score: ${e.score.toFixed(3)})`}\n${[`- **Source**: ${n.sourcePath}`,n.headingPath?`- **Section**: ${n.headingPath}`:null,`- **Type**: ${n.contentType}`,n.startLine?`- **Lines**: ${n.startLine}-${n.endLine}`:null,n.origin===`indexed`?null:`- **Origin**: ${n.origin}`,n.category?`- **Category**: ${n.category}`:null,n.tags?.length?`- **Tags**: ${n.tags.join(`, `)}`:null,V?.get(n.id)??null].filter(Boolean).join(`
|
|
@@ -1 +1 @@
|
|
|
1
|
-
import{EMBEDDING_DEFAULTS as e,SEARCH_DEFAULTS as t,STORE_DEFAULTS as n,createLogger as r,serializeError as i,sourceTypeContentTypes as a}from"../../core/dist/index.js";import{Index as o,connect as s}from"@lancedb/lancedb";function c(e){if(!e)return[];try{let t=JSON.parse(e);return Array.isArray(t)?t:[]}catch{return[]}}const l=/^[\w.\-/ ]+$/,u=r(`store`);function d(e,t){if(!l.test(e))throw Error(`Invalid ${t} filter value: contains disallowed characters`);return e.replace(/'/g,`''`)}var f=class{db=null;table=null;dbPath;tableName;_draining=!1;_priorityQueue=[];_normalQueue=[];_ftsReady=!1;enqueueWrite(e,t=!1){return new Promise((n,r)=>{let i=async()=>{try{n(await e())}catch(e){r(e)}};t?this._priorityQueue.push(i):this._normalQueue.push(i),this._drain()})}async _drain(){if(!this._draining){this._draining=!0;try{for(;this._priorityQueue.length>0||this._normalQueue.length>0;){let e=this._priorityQueue.shift()??this._normalQueue.shift();e&&await e()}}finally{this._draining=!1}}}constructor(e){this.dbPath=e?.path??n.path,this.tableName=e?.tableName??n.tableName}async initialize(){this.db=await s(this.dbPath),(await this.db.tableNames()).includes(this.tableName)&&(this.table=await this.db.openTable(this.tableName),await this.createFtsIndex())}async upsert(e,t){if(e.length!==0){if(e.length!==t.length)throw Error(`Record count (${e.length}) does not match vector count (${t.length})`);return this.enqueueWrite(()=>this._upsertImpl(e,t))}}async upsertInteractive(e,t){if(e.length!==0){if(e.length!==t.length)throw Error(`Record count (${e.length}) does not match vector count (${t.length})`);return this.enqueueWrite(()=>this._upsertImpl(e,t),!0)}}async _upsertImpl(e,t){let n=e.map((e,n)=>({id:e.id,vector:Array.from(t[n]),content:e.content,sourcePath:e.sourcePath,contentType:e.contentType,headingPath:e.headingPath??``,chunkIndex:e.chunkIndex,totalChunks:e.totalChunks,startLine:e.startLine,endLine:e.endLine,fileHash:e.fileHash,indexedAt:e.indexedAt,origin:e.origin,tags:JSON.stringify(e.tags),category:e.category??``,version:e.version}));if(this.table){let t=[...new Set(e.map(e=>e.sourcePath))];for(let e of t)try{await this.table.delete(`sourcePath = '${d(e,`sourcePath`)}'`)}catch{}await this.table.add(n)}else try{this.table=await this.db?.createTable(this.tableName,n)??null}catch(e){if(String(e).includes(`already exists`)&&this.db)this.table=await this.db.openTable(this.tableName),await this.table.add(n);else throw e}}async search(e,n){if(!this.table)return[];let r=n?.limit??t.maxResults,i=n?.minScore??t.minScore,a=this.table.search(e).limit(r*2),o=this.buildFilterString(n);return o&&(a=a.where(o)),(await a.toArray()).map(e=>({record:this.fromLanceRecord(e),score:1-(e._distance??1)})).filter(e=>e.score>=i).slice(0,r)}async createFtsIndex(){return this.enqueueWrite(()=>this._createFtsIndexImpl())}async _createFtsIndexImpl(){if(this.table)try{await this.table.createIndex(`content`,{config:o.fts(),replace:!0}),this._ftsReady=!0,u.info(`FTS index created/updated`,{column:`content`})}catch(e){u.warn(`FTS index creation failed`,i(e))}}async ftsSearch(e,n){if(!this.table||!this._ftsReady)return[];let r=n?.limit??t.maxResults;try{let t=this.table.search(e).limit(r*2),i=this.buildFilterString(n);return i&&(t=t.where(i)),(await t.toArray()).map(e=>({record:this.fromLanceRecord(e),score:e._score??e._relevance_score??0}))}catch(e){return(e instanceof Error?e.message:String(e)).includes(`INVERTED index`)?(u.debug(`FTS search skipped — index not yet available`),this._ftsReady=!1):u.warn(`FTS search failed`,i(e)),[]}}async getById(e){if(!this.table)return null;let t=await this.table.query().where(`id = '${d(e,`id`)}'`).limit(1).toArray();return t.length===0?null:this.fromLanceRecord(t[0])}async deleteBySourcePath(e){return this.enqueueWrite(()=>this._deleteBySourcePathImpl(e))}async _deleteBySourcePathImpl(e){if(!this.table)return 0;let t=await this.getBySourcePath(e);return t.length===0?0:(await this.table.delete(`sourcePath = '${d(e,`sourcePath`)}'`),t.length)}async deleteById(e){return this.enqueueWrite(()=>this._deleteByIdImpl(e))}async deleteByIdInteractive(e){return this.enqueueWrite(()=>this._deleteByIdImpl(e),!0)}async _deleteByIdImpl(e){return!this.table||!await this.getById(e)?!1:(await this.table.delete(`id = '${d(e,`id`)}'`),!0)}async getBySourcePath(e){return this.table?(await this.table.query().where(`sourcePath = '${d(e,`sourcePath`)}'`).limit(1e3).toArray()).map(e=>this.fromLanceRecord(e)):[]}async getStats(){if(!this.table)return{totalRecords:0,totalFiles:0,contentTypeBreakdown:{},lastIndexedAt:null,storeBackend:`lancedb`,embeddingModel:e.model};let t=await this.table.countRows(),n=await this.table.query().select([`sourcePath`,`contentType`,`indexedAt`]).limit(1e5).toArray(),r={},i=new Set,a=null;for(let e of n){let t=e;r[t.contentType]=(r[t.contentType]??0)+1,i.add(t.sourcePath),(!a||t.indexedAt>a)&&(a=t.indexedAt)}return{totalRecords:t,totalFiles:i.size,contentTypeBreakdown:r,lastIndexedAt:a,storeBackend:`lancedb`,embeddingModel:e.model}}async listSourcePaths(){if(!this.table)return[];let e=await this.table.query().select([`sourcePath`]).limit(1e5).toArray();return[...new Set(e.map(e=>e.sourcePath))]}async dropTable(){return this.enqueueWrite(()=>this._dropTableImpl())}async _dropTableImpl(){if(this.db&&(await this.db.tableNames()).includes(this.tableName))for(let e=1;e<=3;e++)try{await this.db.dropTable(this.tableName);break}catch(t){if(e===3)throw t;let n=e*500;u.warn(`dropTable attempt failed, retrying`,{attempt:e,delayMs:n}),await new Promise(e=>setTimeout(e,n))}this.table=null}async close(){try{this.db&&typeof this.db.close==`function`&&await this.db.close()}catch{}this.table=null,this.db=null}buildFilterString(e){let t=[];if(e?.contentType&&t.push(`contentType = '${d(e.contentType,`contentType`)}'`),e?.sourceType){let n=a(e.sourceType);if(n.length>0){let e=n.map(e=>`'${d(e,`sourceType`)}'`).join(`, `);t.push(`contentType IN (${e})`)}}if(e?.origin&&t.push(`origin = '${d(e.origin,`origin`)}'`),e?.category&&t.push(`category = '${d(e.category,`category`)}'`),e?.tags&&e.tags.length>0){let n=e.tags.map(e=>`tags LIKE '%${d(e,`tag`)}%'`);t.push(`(${n.join(` OR `)})`)}return t.length>0?t.join(` AND `):null}fromLanceRecord(e){return{id:e.id,content:e.content,sourcePath:e.sourcePath,contentType:e.contentType,headingPath:e.headingPath||void 0,chunkIndex:e.chunkIndex,totalChunks:e.totalChunks,startLine:e.startLine,endLine:e.endLine,fileHash:e.fileHash,indexedAt:e.indexedAt,origin:e.origin,tags:c(e.tags),category:e.category||void 0,version:e.version}}};export{f as LanceStore};
|
|
1
|
+
import{EMBEDDING_DEFAULTS as e,SEARCH_DEFAULTS as t,STORE_DEFAULTS as n,createLogger as r,serializeError as i,sourceTypeContentTypes as a}from"../../core/dist/index.js";import{Index as o,connect as s}from"@lancedb/lancedb";function c(e){if(!e)return[];try{let t=JSON.parse(e);return Array.isArray(t)?t:[]}catch{return[]}}const l=/^[\w.\-/ ]+$/,u=r(`store`);function d(e,t){if(!l.test(e))throw Error(`Invalid ${t} filter value: contains disallowed characters`);return e.replace(/'/g,`''`)}var f=class{db=null;table=null;dbPath;tableName;_draining=!1;_priorityQueue=[];_normalQueue=[];_ftsReady=!1;enqueueWrite(e,t=!1){return new Promise((n,r)=>{let i=async()=>{try{n(await e())}catch(e){r(e)}};t?this._priorityQueue.push(i):this._normalQueue.push(i),this._drain()})}async _drain(){if(!this._draining){this._draining=!0;try{for(;this._priorityQueue.length>0||this._normalQueue.length>0;){let e=this._priorityQueue.shift()??this._normalQueue.shift();e&&await e()}}finally{this._draining=!1}}}constructor(e){this.dbPath=e?.path??n.path,this.tableName=e?.tableName??n.tableName}async initialize(){this.db=await s(this.dbPath),(await this.db.tableNames()).includes(this.tableName)&&(this.table=await this.db.openTable(this.tableName),await this.createFtsIndex())}async upsert(e,t){if(e.length!==0){if(e.length!==t.length)throw Error(`Record count (${e.length}) does not match vector count (${t.length})`);return this.enqueueWrite(()=>this._upsertImpl(e,t))}}async upsertInteractive(e,t){if(e.length!==0){if(e.length!==t.length)throw Error(`Record count (${e.length}) does not match vector count (${t.length})`);return this.enqueueWrite(()=>this._upsertImpl(e,t),!0)}}async _upsertImpl(e,t){let n=e.map((e,n)=>({id:e.id,vector:Array.from(t[n]),content:e.content,sourcePath:e.sourcePath,contentType:e.contentType,headingPath:e.headingPath??``,chunkIndex:e.chunkIndex,totalChunks:e.totalChunks,startLine:e.startLine,endLine:e.endLine,fileHash:e.fileHash,indexedAt:e.indexedAt,origin:e.origin,tags:JSON.stringify(e.tags),category:e.category??``,version:e.version}));if(this.table){let t=[...new Set(e.map(e=>e.sourcePath))];for(let e of t)try{await this.table.delete(`sourcePath = '${d(e,`sourcePath`)}'`)}catch{}await this.table.add(n)}else try{this.table=await this.db?.createTable(this.tableName,n)??null}catch(e){if(String(e).includes(`already exists`)&&this.db)this.table=await this.db.openTable(this.tableName),await this.table.add(n);else throw e}}async search(e,n){if(!this.table)return[];let r=n?.limit??t.maxResults,i=n?.minScore??t.minScore,a=this.table.search(e).limit(r*2),o=this.buildFilterString(n);return o&&(a=a.where(o)),(await a.toArray()).map(e=>({record:this.fromLanceRecord(e),score:1-(e._distance??1)})).filter(e=>e.score>=i).slice(0,r)}async createFtsIndex(){return this.enqueueWrite(()=>this._createFtsIndexImpl())}async _createFtsIndexImpl(){if(this.table)try{await this.table.createIndex(`content`,{config:o.fts({withPosition:!0}),replace:!0}),this._ftsReady=!0,u.info(`FTS index created/updated`,{column:`content`})}catch(e){u.warn(`FTS index creation failed`,i(e))}}async ftsSearch(e,n){if(!this.table||!this._ftsReady)return[];let r=n?.limit??t.maxResults;try{let t=this.table.search(e).limit(r*2),i=this.buildFilterString(n);return i&&(t=t.where(i)),(await t.toArray()).map(e=>({record:this.fromLanceRecord(e),score:e._score??e._relevance_score??0}))}catch(e){return(e instanceof Error?e.message:String(e)).includes(`INVERTED index`)?(u.debug(`FTS search skipped — index not yet available`),this._ftsReady=!1):u.warn(`FTS search failed`,i(e)),[]}}async getById(e){if(!this.table)return null;let t=await this.table.query().where(`id = '${d(e,`id`)}'`).limit(1).toArray();return t.length===0?null:this.fromLanceRecord(t[0])}async deleteBySourcePath(e){return this.enqueueWrite(()=>this._deleteBySourcePathImpl(e))}async _deleteBySourcePathImpl(e){if(!this.table)return 0;let t=await this.getBySourcePath(e);return t.length===0?0:(await this.table.delete(`sourcePath = '${d(e,`sourcePath`)}'`),t.length)}async deleteById(e){return this.enqueueWrite(()=>this._deleteByIdImpl(e))}async deleteByIdInteractive(e){return this.enqueueWrite(()=>this._deleteByIdImpl(e),!0)}async _deleteByIdImpl(e){return!this.table||!await this.getById(e)?!1:(await this.table.delete(`id = '${d(e,`id`)}'`),!0)}async getBySourcePath(e){return this.table?(await this.table.query().where(`sourcePath = '${d(e,`sourcePath`)}'`).limit(1e3).toArray()).map(e=>this.fromLanceRecord(e)):[]}async getStats(){if(!this.table)return{totalRecords:0,totalFiles:0,contentTypeBreakdown:{},lastIndexedAt:null,storeBackend:`lancedb`,embeddingModel:e.model};let t=await this.table.countRows(),n=await this.table.query().select([`sourcePath`,`contentType`,`indexedAt`]).limit(1e5).toArray(),r={},i=new Set,a=null;for(let e of n){let t=e;r[t.contentType]=(r[t.contentType]??0)+1,i.add(t.sourcePath),(!a||t.indexedAt>a)&&(a=t.indexedAt)}return{totalRecords:t,totalFiles:i.size,contentTypeBreakdown:r,lastIndexedAt:a,storeBackend:`lancedb`,embeddingModel:e.model}}async listSourcePaths(){if(!this.table)return[];let e=await this.table.query().select([`sourcePath`]).limit(1e5).toArray();return[...new Set(e.map(e=>e.sourcePath))]}async dropTable(){return this.enqueueWrite(()=>this._dropTableImpl())}async _dropTableImpl(){if(this.db&&(await this.db.tableNames()).includes(this.tableName))for(let e=1;e<=3;e++)try{await this.db.dropTable(this.tableName);break}catch(t){if(e===3)throw t;let n=e*500;u.warn(`dropTable attempt failed, retrying`,{attempt:e,delayMs:n}),await new Promise(e=>setTimeout(e,n))}this.table=null}async close(){try{this.db&&typeof this.db.close==`function`&&await this.db.close()}catch{}this.table=null,this.db=null}buildFilterString(e){let t=[];if(e?.contentType&&t.push(`contentType = '${d(e.contentType,`contentType`)}'`),e?.sourceType){let n=a(e.sourceType);if(n.length>0){let e=n.map(e=>`'${d(e,`sourceType`)}'`).join(`, `);t.push(`contentType IN (${e})`)}}if(e?.origin&&t.push(`origin = '${d(e.origin,`origin`)}'`),e?.category&&t.push(`category = '${d(e.category,`category`)}'`),e?.tags&&e.tags.length>0){let n=e.tags.map(e=>`tags LIKE '%${d(e,`tag`)}%'`);t.push(`(${n.join(` OR `)})`)}return t.length>0?t.join(` AND `):null}fromLanceRecord(e){return{id:e.id,content:e.content,sourcePath:e.sourcePath,contentType:e.contentType,headingPath:e.headingPath||void 0,chunkIndex:e.chunkIndex,totalChunks:e.totalChunks,startLine:e.startLine,endLine:e.endLine,fileHash:e.fileHash,indexedAt:e.indexedAt,origin:e.origin,tags:c(e.tags),category:e.category||void 0,version:e.version}}};export{f as LanceStore};
|
|
@@ -19,6 +19,12 @@ interface CompactOptions {
|
|
|
19
19
|
segmentation?: 'paragraph' | 'sentence' | 'line';
|
|
20
20
|
/** Optional file cache — use cached content instead of raw readFile */
|
|
21
21
|
cache?: FileCache;
|
|
22
|
+
/**
|
|
23
|
+
* Content delivery mode:
|
|
24
|
+
* - 'full' (default): Return compressed text as normal
|
|
25
|
+
* - 'delta': Return only changes since last read (for re-reads of same file)
|
|
26
|
+
*/
|
|
27
|
+
mode?: 'full' | 'delta';
|
|
22
28
|
}
|
|
23
29
|
interface CompactResult {
|
|
24
30
|
/** The compressed text */
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import{bookendReorder as e,cosineSimilarity as t,segment as n}from"./text-utils.js";import{readFile as
|
|
1
|
+
import{bookendReorder as e,cosineSimilarity as t,segment as n}from"./text-utils.js";import{getDelta as r}from"./compression/delta-cache.js";import{scoreLines as i}from"./compression/scoring.js";import{readFile as a,stat as o}from"node:fs/promises";async function s(s,c){let{query:l,maxChars:u=3e3,minScore:d=.3,segmentation:f=`paragraph`}=c,p=c.tokenBudget?c.tokenBudget*4:u,m;if(c.text)m=c.text;else if(c.path){let e;try{e=await o(c.path)}catch(e){let t=e.code;throw t===`ENOENT`?Error(`File not found: ${c.path}. Check the path and try again.`):t===`EACCES`||t===`EPERM`?Error(`Permission denied reading ${c.path}. The file exists but is not accessible.`):e}if(e.isDirectory())throw Error(`Path is a directory: ${c.path}. compact requires a file path, not a directory. Use analyze_structure or find to explore directories.`);if(e.size>1e7)throw Error(`File too large (${(e.size/1e6).toFixed(1)}MB). compact supports files up to 10MB. Consider splitting or using search instead.`);m=c.cache?(await c.cache.get(c.path)).content:await a(c.path,`utf-8`)}else throw Error(`Either "text" or "path" must be provided`);if(c.mode===`delta`&&c.path){let e=r(c.path,m);if(e.isDelta)return{text:e.text,originalChars:m.length,compressedChars:e.text.length,ratio:e.text.length/m.length,segmentsKept:1,segmentsTotal:1}}if(m.length<=p)return{text:m,originalChars:m.length,compressedChars:m.length,ratio:1,segmentsKept:1,segmentsTotal:1};let h=n(m,f);if(h.length===0)return{text:``,originalChars:m.length,compressedChars:0,ratio:0,segmentsKept:0,segmentsTotal:0};let g=await s.embed(l),_=i(h),v=[];for(let e=0;e<h.length;e++){let n=.85*t(g,await s.embed(h[e]))+.15*(_[e]??0);v.push({text:h[e],score:n,index:e})}let y=v.filter(e=>e.score>=d).sort((e,t)=>t.score-e.score),b=[],x=0;for(let e of y){if(x+e.text.length>p){x===0&&(b.push({...e,text:e.text.slice(0,p)}),x=p);break}b.push(e),x+=e.text.length+2}let S=e(b.sort((e,t)=>t.score-e.score)).map(e=>e.text).join(`
|
|
2
2
|
|
|
3
|
-
`);return{text:
|
|
3
|
+
`);return{text:S,originalChars:m.length,compressedChars:S.length,ratio:S.length/m.length,segmentsKept:b.length,segmentsTotal:h.length}}export{s as compact};
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import { CompressionMode, CompressionResult } from "./compression/types.js";
|
|
2
|
+
import { detectOutputTool } from "./compression/engine.js";
|
|
3
|
+
|
|
4
|
+
//#region packages/tools/src/compress-output.d.ts
|
|
5
|
+
interface CompressOutputOptions {
|
|
6
|
+
/** Raw terminal/shell output text */
|
|
7
|
+
text: string;
|
|
8
|
+
/** Override detected tool (e.g., 'git', 'npm', 'tsc') */
|
|
9
|
+
tool?: string;
|
|
10
|
+
/** Compression mode: structural (default), heuristic, aggressive */
|
|
11
|
+
mode?: CompressionMode;
|
|
12
|
+
/** Maximum output characters (default: 4000) */
|
|
13
|
+
maxChars?: number;
|
|
14
|
+
/** Maximum output tokens (alternative to maxChars, uses 4 chars/token) */
|
|
15
|
+
tokenBudget?: number;
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Compress terminal/shell output using intelligent rule-based compression.
|
|
19
|
+
*
|
|
20
|
+
* Automatically detects the tool that produced the output (git, npm, tsc,
|
|
21
|
+
* vitest, docker, kubectl, ESLint, etc.) and applies tool-specific
|
|
22
|
+
* compression strategies that preserve critical information while reducing
|
|
23
|
+
* token count.
|
|
24
|
+
*
|
|
25
|
+
* @example
|
|
26
|
+
* ```ts
|
|
27
|
+
* const result = compressTerminalOutput({
|
|
28
|
+
* text: rawGitDiff,
|
|
29
|
+
* maxChars: 3000,
|
|
30
|
+
* });
|
|
31
|
+
* // result.text contains compressed output
|
|
32
|
+
* // result.rule tells which compression rule was applied
|
|
33
|
+
* // result._meta.truncated tells if output was further truncated
|
|
34
|
+
* ```
|
|
35
|
+
*/
|
|
36
|
+
declare function compressTerminalOutput(options: CompressOutputOptions): CompressionResult;
|
|
37
|
+
//#endregion
|
|
38
|
+
export { CompressOutputOptions, type CompressionMode, type CompressionResult, compressTerminalOutput, detectOutputTool };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{compressOutput as e,detectOutputTool as t}from"./compression/engine.js";import"./compression/rules/index.js";function n(t){let{text:n,tool:r,mode:i=`structural`,maxChars:a,tokenBudget:o}=t;return e(n,{mode:i,maxChars:o?o*4:a??4e3,tool:r})}export{n as compressTerminalOutput,t as detectOutputTool};
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
//#region packages/tools/src/compression/delta-cache.d.ts
|
|
2
|
+
interface DeltaCacheEntry {
|
|
3
|
+
hash: string;
|
|
4
|
+
text: string;
|
|
5
|
+
timestamp: number;
|
|
6
|
+
}
|
|
7
|
+
interface DeltaResult {
|
|
8
|
+
/** The delta text (diff or full text if no prior version) */
|
|
9
|
+
text: string;
|
|
10
|
+
/** Whether this is a delta (true) or full text (false) */
|
|
11
|
+
isDelta: boolean;
|
|
12
|
+
/** Content hash of current version */
|
|
13
|
+
hash: string;
|
|
14
|
+
}
|
|
15
|
+
/**
|
|
16
|
+
* Get delta for a given key (typically file path).
|
|
17
|
+
* Returns unified diff if prior version cached, or full text otherwise.
|
|
18
|
+
*/
|
|
19
|
+
declare function getDelta(key: string, currentText: string): DeltaResult;
|
|
20
|
+
/**
|
|
21
|
+
* Clear the delta cache (useful for testing).
|
|
22
|
+
*/
|
|
23
|
+
declare function clearDeltaCache(): void;
|
|
24
|
+
/**
|
|
25
|
+
* Check if a key has a cached version.
|
|
26
|
+
*/
|
|
27
|
+
declare function hasCachedVersion(key: string): boolean;
|
|
28
|
+
//#endregion
|
|
29
|
+
export { DeltaCacheEntry, DeltaResult, clearDeltaCache, getDelta, hasCachedVersion };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{createHash as e}from"node:crypto";import{createTwoFilesPatch as t}from"diff";import{LRUCache as n}from"lru-cache";const r=new n({max:200,ttl:1e3*60*30});function i(t){return e(`sha256`).update(t).digest(`hex`).slice(0,16)}function a(e,n){let a=i(n),o=r.get(e);if(r.set(e,{hash:a,text:n,timestamp:Date.now()}),!o||o.hash===a)return{text:o?.hash===a?`[No changes since last read]`:n,isDelta:o?.hash===a,hash:a};let s=t(e,e,o.text,n,`previous`,`current`,{context:3});return s.length>=n.length*.8?{text:n,isDelta:!1,hash:a}:{text:s,isDelta:!0,hash:a}}function o(){r.clear()}function s(e){return r.has(e)}export{o as clearDeltaCache,a as getDelta,s as hasCachedVersion};
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { CompressionContext, CompressionMode, CompressionResult, CompressionRule } from "./types.js";
|
|
2
|
+
|
|
3
|
+
//#region packages/tools/src/compression/engine.d.ts
|
|
4
|
+
/**
|
|
5
|
+
* Register a compression rule. Rules are auto-sorted by priority (highest first).
|
|
6
|
+
*/
|
|
7
|
+
declare function registerRule(rule: CompressionRule): void;
|
|
8
|
+
/**
|
|
9
|
+
* Register multiple compression rules at once.
|
|
10
|
+
*/
|
|
11
|
+
declare function registerRules(newRules: CompressionRule[]): void;
|
|
12
|
+
/**
|
|
13
|
+
* Get all registered rules (for testing/inspection).
|
|
14
|
+
*/
|
|
15
|
+
declare function getRegisteredRules(): readonly CompressionRule[];
|
|
16
|
+
/**
|
|
17
|
+
* Detect the tool that produced the output.
|
|
18
|
+
* Uses heuristic pattern matching on the raw text.
|
|
19
|
+
*/
|
|
20
|
+
declare function detectOutputTool(text: string): string;
|
|
21
|
+
/**
|
|
22
|
+
* Core compression pipeline.
|
|
23
|
+
* 1. Strip ANSI codes
|
|
24
|
+
* 2. Detect tool
|
|
25
|
+
* 3. Find matching rule (priority order)
|
|
26
|
+
* 4. Apply rule or fall back to truncation
|
|
27
|
+
*/
|
|
28
|
+
declare function compressOutput(rawText: string, options?: {
|
|
29
|
+
mode?: CompressionMode;
|
|
30
|
+
maxChars?: number;
|
|
31
|
+
tool?: string;
|
|
32
|
+
}): CompressionResult;
|
|
33
|
+
//#endregion
|
|
34
|
+
export { type CompressionContext, type CompressionMode, type CompressionResult, type CompressionRule, compressOutput, detectOutputTool, getRegisteredRules, registerRule, registerRules };
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import{stripAnsi as e}from"../parse-output.js";import{estimateTokens as t}from"../text-utils.js";const n=[];function r(e){n.push(e),n.sort((e,t)=>t.priority-e.priority)}function i(e){for(let t of e)r(t)}function a(){return n}function o(e){return/^(diff --git|commit [0-9a-f]{7,40}|On branch |Your branch )/m.test(e)?`git`:/^\s*[MADRCU?!]{1,2}\s+\S/m.test(e)&&/^##\s/m.test(e)?`git-status`:/^(npm (warn|ERR!|notice)|added \d+ packages?|up to date)/m.test(e)?`npm`:/^(Packages|Progress):/m.test(e)||/pnpm/.test(e)?`pnpm`:/✓|✗|PASS|FAIL|Tests?\s+\d+\s+(passed|failed)/m.test(e)||/^(PASS|FAIL)\s+\S/m.test(e)?`test-runner`:/^(error TS\d+|warning TS\d+|\S+\.tsx?[(:]\d+)/m.test(e)?`tsc`:/^\S+\.\w+:\d+:\d+\s+(error|warning|info)/m.test(e)||/Found \d+ (error|warning)/m.test(e)?`lint`:/^(CONTAINER ID|IMAGE|REPOSITORY|Step \d+\/\d+|--->)/m.test(e)||/docker|Dockerfile/i.test(e)?`docker`:/^(NAME\s+READY|NAMESPACE\s|kubectl)/m.test(e)?`kubectl`:`unknown`}function s(r,i={}){let{mode:a=`structural`,maxChars:s=4e3}=i,c=e(r),l=i.tool??o(c),u=c.length;if(u<=s)return{text:c,originalChars:u,compressedChars:c.length,ratio:1,rule:`passthrough`,tool:l,_meta:{truncated:!1,mode:a}};let d={text:c,tool:l,mode:a,maxChars:s,originalChars:u};for(let e of n)if(e.match(d)){let n=e.compress(d),r=n.length,i=r>s?`${n.slice(0,s-50)}\n... [truncated ${r-s+50} chars, ~${t(n.slice(s-50))} tokens]`:n;return{text:i,originalChars:u,compressedChars:i.length,ratio:i.length/u,rule:e.name,tool:l,_meta:{truncated:r>s,mode:a}}}let f=Math.floor(s*.6),p=s-f-60,m=c.split(`
|
|
2
|
+
`),h=``,g=0;for(let e of m){if(h.length+e.length+1>f)break;h+=`${h?`
|
|
3
|
+
`:``}${e}`,g++}let _=``,v=0;for(let e=m.length-1;e>=g&&!(_.length+m[e].length+1>p);e--)_=`${m[e]}${_?`
|
|
4
|
+
`:``}${_}`,v++;let y=`\n... [${m.length-g-v} lines omitted (${Math.max(0,u-h.length-_.length)} chars, ~${t(c.slice(h.length,Math.max(h.length,c.length-_.length)))} tokens)] ...\n`,b=`${h}${y}${_}`;return{text:b,originalChars:u,compressedChars:b.length,ratio:b.length/u,rule:`fallback-truncation`,tool:l,_meta:{truncated:!0,mode:a}}}export{s as compressOutput,o as detectOutputTool,a as getRegisteredRules,r as registerRule,i as registerRules};
|