@vpxa/aikit 0.1.99 → 0.1.101

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/README.md +29 -33
  2. package/package.json +2 -2
  3. package/packages/aikit-client/dist/index.d.ts +27 -27
  4. package/packages/aikit-client/dist/index.js +2 -2
  5. package/packages/analyzers/dist/index.js +2 -2
  6. package/packages/cli/dist/constants-__Dx2-7-.js +1 -0
  7. package/packages/cli/dist/index.js +3 -3
  8. package/packages/cli/dist/init-CwpITli0.js +7 -0
  9. package/packages/cli/dist/{templates-DVcEiTlc.js → templates-Dz2d2veK.js} +9 -9
  10. package/packages/cli/dist/{user-Dj8KE0_0.js → user-CpsehvsY.js} +1 -1
  11. package/packages/core/dist/index.d.ts +8 -8
  12. package/packages/core/dist/index.js +1 -1
  13. package/packages/dashboard/dist/assets/{index-C6D-PCp0.js → index-CxrC6OtB.js} +3 -3
  14. package/packages/dashboard/dist/index.html +1 -1
  15. package/packages/enterprise-bridge/dist/index.d.ts +1 -1
  16. package/packages/enterprise-bridge/dist/index.js +1 -1
  17. package/packages/indexer/dist/index.d.ts +6 -6
  18. package/packages/server/dist/index.d.ts +1 -1
  19. package/packages/server/dist/index.js +1 -1
  20. package/packages/server/dist/server-DfCR_Bix.js +1282 -0
  21. package/packages/server/dist/{version-check-AMfxaZUw.js → version-check-Bj07vc5x.js} +1 -1
  22. package/packages/store/dist/index.d.ts +9 -22
  23. package/packages/store/dist/index.js +6 -6
  24. package/packages/tools/dist/index.d.ts +16 -15
  25. package/packages/tools/dist/index.js +6 -6
  26. package/scaffold/definitions/mcp-entry.json +8 -0
  27. package/scaffold/dist/adapters/intellij.mjs +3 -0
  28. package/scaffold/dist/adapters/zed.mjs +4 -0
  29. package/scaffold/dist/definitions/agents.mjs +1 -1
  30. package/scaffold/dist/definitions/bodies.mjs +16 -16
  31. package/scaffold/dist/definitions/flows.mjs +32 -32
  32. package/scaffold/dist/definitions/hooks.mjs +1 -1
  33. package/scaffold/dist/definitions/mcp.mjs +1 -0
  34. package/scaffold/dist/definitions/prompts.mjs +7 -7
  35. package/scaffold/dist/definitions/protocols.mjs +28 -24
  36. package/scaffold/dist/definitions/skills.mjs +67 -67
  37. package/scaffold/dist/definitions/tools.mjs +1 -1
  38. package/packages/cli/dist/constants-BHJ95m41.js +0 -1
  39. package/packages/cli/dist/init-CVtbu7zj.js +0 -7
  40. package/packages/server/dist/server-DZ1V42_x.js +0 -1282
@@ -1 +1 @@
1
- import{existsSync as e,readFileSync as t}from"node:fs";import{dirname as n,resolve as r}from"node:path";import{fileURLToPath as i}from"node:url";import{createLogger as a}from"../../core/dist/index.js";import{homedir as o}from"node:os";import{execFile as s}from"node:child_process";const c=a(`server`);function l(){let e=r(n(i(import.meta.url)),`..`,`..`,`..`,`package.json`);try{return JSON.parse(t(e,`utf-8`)).version??`0.0.0`}catch{return`0.0.0`}}function u(e,t){let n=e.split(`.`).map(Number),r=t.split(`.`).map(Number);for(let e=0;e<3;e++){let t=(n[e]??0)-(r[e]??0);if(t!==0)return t>0?1:-1}return 0}function d(){let e=l();fetch(`https://registry.npmjs.org/@vpxa/aikit/latest`,{signal:AbortSignal.timeout(5e3)}).then(e=>{if(e.ok)return e.json()}).then(t=>{if(!t||typeof t!=`object`)return;let n=t.version;n&&u(e,n)<0&&c.warn(`Update available`,{currentVersion:e,latestVersion:n,updateCommand:`aikit upgrade`})}).catch(()=>{})}function f(){try{let n=r(o(),`.copilot`,`.aikit-scaffold.json`);return e(n)?JSON.parse(t(n,`utf-8`)).version??null:null}catch{return null}}function p(){try{let n=r(process.cwd(),`.github`,`.aikit-scaffold.json`);return e(n)?JSON.parse(t(n,`utf-8`)).version??null:null}catch{return null}}let m=`idle`,h=null;function g(){return{state:m,error:h}}function _(){try{let t=l(),a=f(),o=p();if(!(a!=null&&a!==t)&&!(o!=null&&o!==t)||m===`pending`||m===`success`)return;m=`pending`,h=null,c.info(`Scaffold version mismatch — auto-upgrading`,{serverVersion:t,userScaffoldVersion:a,workspaceScaffoldVersion:o});let u=n(i(import.meta.url)),d=[r(u,`..`,`..`,`..`,`bin`,`aikit.mjs`),r(u,`..`,`bin`,`aikit.mjs`),...process.argv[1]?[r(n(process.argv[1]),`aikit.mjs`)]:[]],g=d.find(t=>e(t));if(!g){m=`failed`,h=`aikit CLI binary not found. Tried: ${d.join(`, `)}`,c.warn(`Cannot auto-upgrade: aikit CLI binary not found`,{candidates:d,platform:process.platform});return}s(process.execPath,[g,`upgrade`],{timeout:3e4,windowsHide:!0},(e,t,n)=>{e?(m=`failed`,h=e.message,c.warn(`Auto-upgrade failed`,{error:e.message,stderr:n?.slice(0,500),binPath:g,platform:process.platform})):(m=`success`,h=null,c.info(`Auto-upgrade complete`))}).unref()}catch(e){m=`failed`,h=e instanceof Error?e.message:String(e),c.warn(`Auto-upgrade check failed`,{error:h})}}export{_ as autoUpgradeScaffold,d as checkForUpdates,l as getCurrentVersion,g as getUpgradeState};
1
+ import{existsSync as e,readFileSync as t}from"node:fs";import{dirname as n,resolve as r}from"node:path";import{fileURLToPath as i}from"node:url";import{createLogger as a}from"../../core/dist/index.js";import{execFile as o}from"node:child_process";import{homedir as s}from"node:os";const c=a(`server`);function l(){let e=r(n(i(import.meta.url)),`..`,`..`,`..`,`package.json`);try{return JSON.parse(t(e,`utf-8`)).version??`0.0.0`}catch{return`0.0.0`}}function u(e,t){let n=e.split(`.`).map(Number),r=t.split(`.`).map(Number);for(let e=0;e<3;e++){let t=(n[e]??0)-(r[e]??0);if(t!==0)return t>0?1:-1}return 0}function d(){let e=l();fetch(`https://registry.npmjs.org/@vpxa/aikit/latest`,{signal:AbortSignal.timeout(5e3)}).then(e=>{if(e.ok)return e.json()}).then(t=>{if(!t||typeof t!=`object`)return;let n=t.version;n&&u(e,n)<0&&c.warn(`Update available`,{currentVersion:e,latestVersion:n,updateCommand:`aikit upgrade`})}).catch(()=>{})}function f(){try{let n=r(s(),`.copilot`,`.aikit-scaffold.json`);return e(n)?JSON.parse(t(n,`utf-8`)).version??null:null}catch{return null}}function p(){try{let n=r(process.cwd(),`.github`,`.aikit-scaffold.json`);return e(n)?JSON.parse(t(n,`utf-8`)).version??null:null}catch{return null}}let m=`idle`,h=null;function g(){return{state:m,error:h}}function _(){try{let t=l(),a=f(),s=p();if(!(a!=null&&a!==t)&&!(s!=null&&s!==t)||m===`pending`||m===`success`)return;m=`pending`,h=null,c.info(`Scaffold version mismatch — auto-upgrading`,{serverVersion:t,userScaffoldVersion:a,workspaceScaffoldVersion:s});let u=n(i(import.meta.url)),d=[r(u,`..`,`..`,`..`,`bin`,`aikit.mjs`),r(u,`..`,`bin`,`aikit.mjs`),...process.argv[1]?[r(n(process.argv[1]),`aikit.mjs`)]:[]],g=d.find(t=>e(t));if(!g){m=`failed`,h=`aikit CLI binary not found. Tried: ${d.join(`, `)}`,c.warn(`Cannot auto-upgrade: aikit CLI binary not found`,{candidates:d,platform:process.platform});return}o(process.execPath,[g,`upgrade`],{timeout:3e4,windowsHide:!0},(e,t,n)=>{e?(m=`failed`,h=e.message,c.warn(`Auto-upgrade failed`,{error:e.message,stderr:n?.slice(0,500),binPath:g,platform:process.platform})):(m=`success`,h=null,c.info(`Auto-upgrade complete`))}).unref()}catch(e){m=`failed`,h=e instanceof Error?e.message:String(e),c.warn(`Auto-upgrade check failed`,{error:h})}}export{_ as autoUpgradeScaffold,d as checkForUpdates,l as getCurrentVersion,g as getUpgradeState};
@@ -253,13 +253,12 @@ declare class LanceStore implements IKnowledgeStore {
253
253
  //#endregion
254
254
  //#region packages/store/src/sqlite-adapter.d.ts
255
255
  /**
256
- * SQLite adapter abstraction allows swapping between better-sqlite3 (native)
257
- * and sql.js (WASM) without changing consumer code.
256
+ * SQLite adapter abstraction backed by sql.js (WASM).
258
257
  *
259
- * Primary adapter is configurable; the other serves as automatic fallback
260
- * when the primary fails to load (e.g. native binary missing on the platform).
258
+ * The database stays memory-resident while open and is exported back to disk
259
+ * on flush/close so callers keep the same single-file persistence model.
261
260
  */
262
- type SqliteAdapterType = 'better-sqlite3' | 'sql.js';
261
+ type SqliteAdapterType = 'sql.js';
263
262
  interface ISqliteAdapter {
264
263
  /** Which adapter implementation is active */
265
264
  readonly type: SqliteAdapterType;
@@ -278,29 +277,16 @@ interface ISqliteAdapter {
278
277
  /** Close the database connection */
279
278
  close(): void;
280
279
  }
281
- interface SqliteAdapterOptions {
282
- /** Which adapter to try first. Default: 'better-sqlite3' (lower memory usage) */
283
- primary?: SqliteAdapterType;
284
- }
285
- /**
286
- * Create a SQLite adapter with automatic fallback.
287
- *
288
- * Tries the primary adapter first; if it fails to load (missing native binary,
289
- * unsupported platform, etc.), falls back to the other adapter transparently.
290
- *
291
- * Auto-heals NODE_MODULE_VERSION mismatches by re-downloading the correct
292
- * prebuild binary for the current Node.js version.
293
- */
294
- declare function createSqliteAdapter(dbPath: string, options?: SqliteAdapterOptions): Promise<ISqliteAdapter>;
280
+ /** Create the project's SQLite adapter. */
281
+ declare function createSqliteAdapter(dbPath: string): Promise<ISqliteAdapter>;
295
282
  //#endregion
296
283
  //#region packages/store/src/sqlite-graph-store.d.ts
297
284
  declare class SqliteGraphStore implements IGraphStore {
298
285
  private adapter;
286
+ private reopenPromise;
299
287
  private readonly dbPath;
300
- private readonly adapterOptions?;
301
288
  constructor(options?: {
302
289
  path?: string;
303
- sqliteAdapter?: SqliteAdapterOptions;
304
290
  });
305
291
  initialize(): Promise<void>;
306
292
  private configureAdapter;
@@ -312,6 +298,7 @@ declare class SqliteGraphStore implements IGraphStore {
312
298
  * Must be called from any public async method that may be invoked after close().
313
299
  */
314
300
  private ensureOpen;
301
+ private reopenAdapter;
315
302
  private query;
316
303
  private run;
317
304
  upsertNode(node: GraphNode): Promise<void>;
@@ -358,4 +345,4 @@ interface StoreConfig {
358
345
  }
359
346
  declare function createStore(config: StoreConfig): Promise<IKnowledgeStore>;
360
347
  //#endregion
361
- export { type DepthGroupedResult, type GraphEdge, type GraphNode, type GraphStats, type GraphTraversalOptions, type GraphTraversalResult, type GraphValidationResult, type IGraphStore, type IKnowledgeStore, type ISqliteAdapter, LanceStore, type ProcessInfo, type SearchOptions, type SqliteAdapterOptions, type SqliteAdapterType, SqliteGraphStore, type StoreBackend, type StoreConfig, type Symbol360, createSqliteAdapter, createStore };
348
+ export { type DepthGroupedResult, type GraphEdge, type GraphNode, type GraphStats, type GraphTraversalOptions, type GraphTraversalResult, type GraphValidationResult, type IGraphStore, type IKnowledgeStore, type ISqliteAdapter, LanceStore, type ProcessInfo, type SearchOptions, type SqliteAdapterType, SqliteGraphStore, type StoreBackend, type StoreConfig, type Symbol360, createSqliteAdapter, createStore };
@@ -1,4 +1,4 @@
1
- import{t as e}from"./lance-store-CQkljFy3.js";import{createRequire as t}from"node:module";import{AIKIT_PATHS as n}from"../../core/dist/index.js";import{execSync as r}from"node:child_process";import{existsSync as i,mkdirSync as a,readFileSync as o,unlinkSync as s,writeFileSync as c}from"node:fs";import{dirname as l,join as u}from"node:path";var d=class{type=`better-sqlite3`;db=null;async open(e){let t=(await import(`better-sqlite3`)).default;this.db=new t(e)}exec(e){this.getDb().exec(e)}pragma(e){this.getDb().pragma(e)}queryAll(e,t=[]){return this.getDb().prepare(e).all(...t)}run(e,t=[]){this.getDb().prepare(e).run(...t)}flush(){}close(){this.db&&=(this.db.close(),null)}getDb(){if(!this.db)throw Error(`BetterSqliteAdapter: database not opened`);return this.db}},f=class{type=`sql.js`;db=null;dbPath=``;dirty=!1;inTransaction=!1;async open(e){this.dbPath=e;let t=(await import(`sql.js`)).default,n=await t();if(i(e)){let t=o(e);this.db=new n.Database(t)}else this.db=new n.Database}exec(e){let t=e.trimStart().toUpperCase();t.startsWith(`BEGIN`)?this.inTransaction=!0:(t.startsWith(`COMMIT`)||t.startsWith(`ROLLBACK`))&&(this.inTransaction=!1),this.getDb().run(e),this.dirty=!0}pragma(e){this.getDb().exec(`PRAGMA ${e}`)}queryAll(e,t=[]){let n=this.getDb().prepare(e);t.length>0&&n.bind(t);let r=[];for(;n.step();)r.push(n.getAsObject());return n.free(),r}run(e,t=[]){let n=this.getDb(),r=e.trimStart().toUpperCase(),i=!this.inTransaction&&(r.startsWith(`INSERT`)||r.startsWith(`UPDATE`));if(i&&n.run(`SAVEPOINT fk_check`),t.length>0){let r=n.prepare(e);try{r.bind(t),r.step()}finally{r.free()}}else n.run(e);if(i){if(n.exec(`PRAGMA foreign_key_check`).length>0)throw n.run(`ROLLBACK TO fk_check`),n.run(`RELEASE fk_check`),Error(`FOREIGN KEY constraint failed`);n.run(`RELEASE fk_check`)}this.dirty=!0}flush(){if(!this.dirty||!this.db)return;let e=this.db.export();c(this.dbPath,Buffer.from(e)),this.dirty=!1}close(){this.db&&=(this.flush(),this.db.close(),null)}getDb(){if(!this.db)throw Error(`SqlJsAdapter: database not opened`);return this.db}};function p(e){let t=e instanceof Error?e.message:String(e);return t.includes(`NODE_MODULE_VERSION`)||t.includes(`was compiled against a different Node.js version`)}function m(e){return e.replace(/\\/g,`/`).match(/^(.+[/\\]npm-cache[/\\]_npx[/\\][^/\\]+)/)?.[1]}function h(e){let t=m(e);if(!t)return;let n=u(t,`package-lock.json`);if(i(n))try{s(n),console.error(`[aikit] Removed corrupted lockfile: ${n}`)}catch{}}function g(){try{let e=t(import.meta.url),n=l(e.resolve(`better-sqlite3/package.json`));console.error(`[aikit] Native module version mismatch detected — re-downloading prebuild for Node ${process.version}...`),h(n);let a,o=[()=>e.resolve(`prebuild-install/bin`),()=>e.resolve(`prebuild-install/bin.js`),()=>u(n,`node_modules`,`prebuild-install`,`bin.js`),()=>u(n,`..`,`prebuild-install`,`bin.js`)];for(let e of o)try{let t=e();if(i(t)){a=t;break}}catch{}return a?(r(`node "${a}" -r napi`,{cwd:n,stdio:`pipe`,timeout:6e4,env:{...process.env,npm_config_loglevel:`silent`}}),console.error(`[aikit] Prebuild re-downloaded successfully`),!0):(console.error(`[aikit] Auto-heal: prebuild-install not found — skipping`),!1)}catch(e){let t=e instanceof Error?e.message:String(e);return console.error(`[aikit] Auto-heal failed: ${t}`),!1}}async function _(e,t){let n=t?.primary??`better-sqlite3`,r=[n,n===`better-sqlite3`?`sql.js`:`better-sqlite3`],i=!1;for(let t of r)try{let r=t===`better-sqlite3`?new d:new f;return await r.open(e),t!==n&&console.error(`[aikit] SQLite adapter: primary "${n}" unavailable, using fallback "${t}"`),r}catch(n){let r=n instanceof Error?n.message:String(n);if(console.error(`[aikit] SQLite adapter "${t}" failed to load: ${r}`),t===`better-sqlite3`&&!i&&p(n)&&(i=!0,g()))try{let t=new d;return await t.open(e),console.error(`[aikit] SQLite adapter "better-sqlite3" loaded after auto-heal`),t}catch(e){let t=e instanceof Error?e.message:String(e);console.error(`[aikit] Retry after heal still failed: ${t}`)}}throw Error(`[aikit] No SQLite adapter available. Tried: ${r.join(`, `)}. Install either better-sqlite3 or sql.js.`)}var v=class{adapter=null;dbPath;adapterOptions;constructor(e){let t=e?.path??n.data;this.dbPath=u(t,`graph.db`),this.adapterOptions=e?.sqliteAdapter}async initialize(){let e=l(this.dbPath);i(e)||a(e,{recursive:!0}),this.adapter=await _(this.dbPath,this.adapterOptions),this.configureAdapter(this.adapter),this.createTables(this.adapter),this.migrateSchema(this.adapter),this.adapter.flush()}configureAdapter(e){e.pragma(`journal_mode = WAL`),e.pragma(`foreign_keys = ON`)}createTables(e){e.exec(`
1
+ import{t as e}from"./lance-store-CQkljFy3.js";import{createRequire as t}from"node:module";import{AIKIT_PATHS as n}from"../../core/dist/index.js";import{existsSync as r,mkdirSync as i,readFileSync as a,renameSync as o,unlinkSync as s,writeFileSync as c}from"node:fs";import{dirname as l,join as u}from"node:path";const d=t(import.meta.url);function f(e){return d.resolve(`sql.js/dist/${e}`)}var p=class{type=`sql.js`;db=null;dbPath=``;dirty=!1;inTransaction=!1;async open(e){this.dbPath=e;let t=(await import(`sql.js`)).default,n=await t({locateFile:e=>f(e)});if(r(e)){let t=a(e);this.db=new n.Database(t)}else this.db=new n.Database}exec(e){let t=e.trimStart().toUpperCase();this.getDb().run(e),t.startsWith(`BEGIN`)?this.inTransaction=!0:(t.startsWith(`COMMIT`)||t.startsWith(`ROLLBACK`))&&(this.inTransaction=!1),this.dirty=!0}pragma(e){this.getDb().exec(`PRAGMA ${e}`)}queryAll(e,t=[]){let n=this.getDb().prepare(e);try{t.length>0&&n.bind(t);let e=[];for(;n.step();)e.push(n.getAsObject());return e}finally{n.free()}}run(e,t=[]){let n=this.getDb(),r=e.trimStart().toUpperCase(),i=!this.inTransaction&&(r.startsWith(`INSERT`)||r.startsWith(`UPDATE`));i&&n.run(`SAVEPOINT fk_check`);try{if(t.length>0){let r=n.prepare(e);try{r.bind(t),r.step()}finally{r.free()}}else n.run(e);if(i){if(n.exec(`PRAGMA foreign_key_check`).length>0)throw n.run(`ROLLBACK TO fk_check`),n.run(`RELEASE fk_check`),Error(`FOREIGN KEY constraint failed`);n.run(`RELEASE fk_check`)}}catch(e){if(i)try{n.run(`ROLLBACK TO fk_check`),n.run(`RELEASE fk_check`)}catch{}throw e}this.dirty=!0}flush(){if(!this.dirty||!this.db)return;let e=this.db.export(),t=`${this.dbPath}.tmp`;c(t,Buffer.from(e)),o(t,this.dbPath),this.dirty=!1}close(){if(this.db){let e=this.db,t;try{this.flush()}catch(e){t=e;try{s(`${this.dbPath}.tmp`)}catch{}}try{e.close()}finally{this.db=null}if(t)throw t}}getDb(){if(!this.db)throw Error(`SqlJsAdapter: database not opened`);return this.db}};async function m(e){let t=new p;try{return await t.open(e),t}catch(e){let t=e instanceof Error?e.message:String(e);throw Error(`[aikit] SQLite adapter "sql.js" failed to load: ${t}`)}}var h=class{adapter=null;reopenPromise=null;dbPath;constructor(e){let t=e?.path??n.data;this.dbPath=u(t,`graph.db`)}async initialize(){let e=l(this.dbPath);r(e)||i(e,{recursive:!0}),this.adapter=await m(this.dbPath),this.configureAdapter(this.adapter),this.createTables(this.adapter),this.migrateSchema(this.adapter),this.adapter.flush()}configureAdapter(e){e.pragma(`journal_mode = WAL`),e.pragma(`foreign_keys = ON`)}createTables(e){e.exec(`
2
2
  CREATE TABLE IF NOT EXISTS nodes (
3
3
  id TEXT PRIMARY KEY,
4
4
  type TEXT NOT NULL,
@@ -39,7 +39,7 @@ import{t as e}from"./lance-store-CQkljFy3.js";import{createRequire as t}from"nod
39
39
  FOREIGN KEY (process_id) REFERENCES processes(id) ON DELETE CASCADE,
40
40
  FOREIGN KEY (node_id) REFERENCES nodes(id) ON DELETE CASCADE
41
41
  )
42
- `),e.exec(`CREATE INDEX IF NOT EXISTS idx_process_steps_node ON process_steps(node_id)`)}migrateSchema(e){for(let t of[`ALTER TABLE edges ADD COLUMN confidence REAL DEFAULT 1.0`,`ALTER TABLE nodes ADD COLUMN community TEXT`])try{e.exec(t)}catch{}e.exec(`CREATE INDEX IF NOT EXISTS idx_nodes_community ON nodes(community)`)}getAdapter(){if(!this.adapter)throw Error(`SqliteGraphStore not initialized — call initialize() first`);return this.adapter}async ensureOpen(){this.adapter||(this.adapter=await _(this.dbPath,this.adapterOptions),this.configureAdapter(this.adapter))}query(e,t=[]){return this.getAdapter().queryAll(e,t)}run(e,t=[]){this.getAdapter().run(e,t)}async upsertNode(e){await this.ensureOpen(),this.run(`INSERT INTO nodes (id, type, name, properties, source_record_id, source_path, created_at, community)
42
+ `),e.exec(`CREATE INDEX IF NOT EXISTS idx_process_steps_node ON process_steps(node_id)`)}migrateSchema(e){for(let t of[`ALTER TABLE edges ADD COLUMN confidence REAL DEFAULT 1.0`,`ALTER TABLE nodes ADD COLUMN community TEXT`])try{e.exec(t)}catch{}e.exec(`CREATE INDEX IF NOT EXISTS idx_nodes_community ON nodes(community)`)}getAdapter(){if(!this.adapter)throw Error(`SqliteGraphStore not initialized — call initialize() first`);return this.adapter}async ensureOpen(){if(this.adapter)return;if(this.reopenPromise)return this.reopenPromise;let e=this.reopenAdapter();this.reopenPromise=e;try{await e}finally{this.reopenPromise===e&&(this.reopenPromise=null)}}async reopenAdapter(){let e=await m(this.dbPath);this.configureAdapter(e),this.adapter=e}query(e,t=[]){return this.getAdapter().queryAll(e,t)}run(e,t=[]){this.getAdapter().run(e,t)}async upsertNode(e){await this.ensureOpen(),this.run(`INSERT INTO nodes (id, type, name, properties, source_record_id, source_path, created_at, community)
43
43
  VALUES (?, ?, ?, ?, ?, ?, ?, ?)
44
44
  ON CONFLICT(id) DO UPDATE SET
45
45
  type = excluded.type, name = excluded.name, properties = excluded.properties,
@@ -57,19 +57,19 @@ import{t as e}from"./lance-store-CQkljFy3.js";import{createRequire as t}from"nod
57
57
  VALUES (?, ?, ?, ?, ?, ?, ?)
58
58
  ON CONFLICT(id) DO UPDATE SET
59
59
  from_id = excluded.from_id, to_id = excluded.to_id,
60
- type = excluded.type, weight = excluded.weight, confidence = excluded.confidence, properties = excluded.properties`,[t.id,t.fromId,t.toId,t.type,t.weight??1,t.confidence??1,JSON.stringify(t.properties??{})]);t.exec(`COMMIT`),t.flush()}catch(e){throw t.exec(`ROLLBACK`),e}finally{t.pragma(`foreign_keys = ON`)}}async getNode(e){await this.ensureOpen();let t=this.query(`SELECT * FROM nodes WHERE id = ?`,[e]);return t.length>0?b(t[0]):null}async getNeighbors(e,t){await this.ensureOpen();let n=t?.direction??`both`,r=t?.edgeType,i=t?.limit??50,a=[],o=[],s=new Set;if(n===`outgoing`||n===`both`){let t=`
60
+ type = excluded.type, weight = excluded.weight, confidence = excluded.confidence, properties = excluded.properties`,[t.id,t.fromId,t.toId,t.type,t.weight??1,t.confidence??1,JSON.stringify(t.properties??{})]);t.exec(`COMMIT`),t.flush()}catch(e){throw t.exec(`ROLLBACK`),e}finally{t.pragma(`foreign_keys = ON`)}}async getNode(e){await this.ensureOpen();let t=this.query(`SELECT * FROM nodes WHERE id = ?`,[e]);return t.length>0?_(t[0]):null}async getNeighbors(e,t){await this.ensureOpen();let n=t?.direction??`both`,r=t?.edgeType,i=t?.limit??50,a=[],o=[],s=new Set;if(n===`outgoing`||n===`both`){let t=`
61
61
  SELECT e.id AS edge_id, e.from_id, e.to_id, e.type AS edge_type, e.weight,
62
62
  e.confidence AS edge_confidence, e.properties AS edge_props,
63
63
  n.id AS node_id, n.type AS node_type, n.name AS node_name, n.properties AS node_props,
64
64
  n.source_record_id AS node_src_rec, n.source_path AS node_src_path,
65
65
  n.created_at AS node_created, n.community AS node_community
66
- FROM edges e JOIN nodes n ON e.to_id = n.id WHERE e.from_id = ?`,n=[e];r&&(t+=` AND e.type = ?`,n.push(r)),t+=` LIMIT ?`,n.push(i);let c=this.query(t,n);for(let e of c)o.push(S(e)),s.has(e.node_id)||(s.add(e.node_id),a.push(C(e)))}if(n===`incoming`||n===`both`){let t=`
66
+ FROM edges e JOIN nodes n ON e.to_id = n.id WHERE e.from_id = ?`,n=[e];r&&(t+=` AND e.type = ?`,n.push(r)),t+=` LIMIT ?`,n.push(i);let c=this.query(t,n);for(let e of c)o.push(y(e)),s.has(e.node_id)||(s.add(e.node_id),a.push(b(e)))}if(n===`incoming`||n===`both`){let t=`
67
67
  SELECT e.id AS edge_id, e.from_id, e.to_id, e.type AS edge_type, e.weight,
68
68
  e.confidence AS edge_confidence, e.properties AS edge_props,
69
69
  n.id AS node_id, n.type AS node_type, n.name AS node_name, n.properties AS node_props,
70
70
  n.source_record_id AS node_src_rec, n.source_path AS node_src_path,
71
71
  n.created_at AS node_created, n.community AS node_community
72
- FROM edges e JOIN nodes n ON e.from_id = n.id WHERE e.to_id = ?`,n=[e];r&&(t+=` AND e.type = ?`,n.push(r)),t+=` LIMIT ?`,n.push(i);let c=this.query(t,n);for(let e of c)o.push(S(e)),s.has(e.node_id)||(s.add(e.node_id),a.push(C(e)))}return{nodes:a,edges:o}}async traverse(e,t){await this.ensureOpen();let n=t?.maxDepth??2,r=t?.direction??`both`,i=t?.edgeType,a=t?.limit??50,o=new Map,s=new Map,c=new Set,l=[{nodeId:e,depth:0}];for(;l.length>0&&o.size<a;){let e=l.shift();if(!e||c.has(e.nodeId)||e.depth>n)continue;c.add(e.nodeId);let t=await this.getNeighbors(e.nodeId,{direction:r,edgeType:i,limit:a-o.size});for(let r of t.nodes)o.has(r.id)||(o.set(r.id,r),e.depth+1<n&&l.push({nodeId:r.id,depth:e.depth+1}));for(let e of t.edges)s.set(e.id,e)}return{nodes:[...o.values()],edges:[...s.values()]}}async findNodes(e){await this.ensureOpen();let t=[],n=[];e.type&&(t.push(`type = ?`),n.push(e.type)),e.namePattern&&(t.push(`name LIKE ?`),n.push(`%${e.namePattern}%`)),e.sourcePath&&(t.push(`source_path = ?`),n.push(e.sourcePath));let r=t.length>0?`WHERE ${t.join(` AND `)}`:``,i=e.limit??100;return this.query(`SELECT * FROM nodes ${r} LIMIT ?`,[...n,i]).map(e=>b(e))}async findEdges(e){await this.ensureOpen();let t=[],n=[];e.type&&(t.push(`type = ?`),n.push(e.type)),e.fromId&&(t.push(`from_id = ?`),n.push(e.fromId)),e.toId&&(t.push(`to_id = ?`),n.push(e.toId));let r=t.length>0?`WHERE ${t.join(` AND `)}`:``,i=e.limit??100;return this.query(`SELECT * FROM edges ${r} LIMIT ?`,[...n,i]).map(e=>x(e))}async deleteNode(e){await this.ensureOpen();let t=this.getAdapter();t.exec(`BEGIN TRANSACTION`);try{this.run(`DELETE FROM edges WHERE from_id = ? OR to_id = ?`,[e,e]),this.run(`DELETE FROM nodes WHERE id = ?`,[e]),t.exec(`COMMIT`),t.flush()}catch(e){throw t.exec(`ROLLBACK`),e}}async deleteBySourcePath(e){await this.ensureOpen();let t=this.query(`SELECT id FROM nodes WHERE source_path = ?`,[e]);if(t.length===0)return 0;let n=this.getAdapter();n.exec(`BEGIN TRANSACTION`);try{for(let e of t)this.run(`DELETE FROM edges WHERE from_id = ? OR to_id = ?`,[e.id,e.id]);this.run(`DELETE FROM nodes WHERE source_path = ?`,[e]),n.exec(`COMMIT`),n.flush()}catch(e){throw n.exec(`ROLLBACK`),e}return t.length}async clear(){await this.ensureOpen(),this.run(`DELETE FROM process_steps`),this.run(`DELETE FROM processes`),this.run(`DELETE FROM edges`),this.run(`DELETE FROM nodes`),this.getAdapter().flush()}async getStats(){await this.ensureOpen();let e=this.query(`SELECT COUNT(*) as count FROM nodes`)[0]?.count??0,t=this.query(`SELECT COUNT(*) as count FROM edges`)[0]?.count??0,n=this.query(`SELECT type, COUNT(*) as count FROM nodes GROUP BY type`),r={};for(let e of n)r[e.type]=e.count;let i=this.query(`SELECT type, COUNT(*) as count FROM edges GROUP BY type`),a={};for(let e of i)a[e.type]=e.count;return{nodeCount:e,edgeCount:t,nodeTypes:r,edgeTypes:a}}async validate(){await this.ensureOpen();let e=await this.getStats(),t=this.query(`SELECT e.id AS edgeId,
72
+ FROM edges e JOIN nodes n ON e.from_id = n.id WHERE e.to_id = ?`,n=[e];r&&(t+=` AND e.type = ?`,n.push(r)),t+=` LIMIT ?`,n.push(i);let c=this.query(t,n);for(let e of c)o.push(y(e)),s.has(e.node_id)||(s.add(e.node_id),a.push(b(e)))}return{nodes:a,edges:o}}async traverse(e,t){await this.ensureOpen();let n=t?.maxDepth??2,r=t?.direction??`both`,i=t?.edgeType,a=t?.limit??50,o=new Map,s=new Map,c=new Set,l=[{nodeId:e,depth:0}];for(;l.length>0&&o.size<a;){let e=l.shift();if(!e||c.has(e.nodeId)||e.depth>n)continue;c.add(e.nodeId);let t=await this.getNeighbors(e.nodeId,{direction:r,edgeType:i,limit:a-o.size});for(let r of t.nodes)o.has(r.id)||(o.set(r.id,r),e.depth+1<n&&l.push({nodeId:r.id,depth:e.depth+1}));for(let e of t.edges)s.set(e.id,e)}return{nodes:[...o.values()],edges:[...s.values()]}}async findNodes(e){await this.ensureOpen();let t=[],n=[];e.type&&(t.push(`type = ?`),n.push(e.type)),e.namePattern&&(t.push(`name LIKE ?`),n.push(`%${e.namePattern}%`)),e.sourcePath&&(t.push(`source_path = ?`),n.push(e.sourcePath));let r=t.length>0?`WHERE ${t.join(` AND `)}`:``,i=e.limit??100;return this.query(`SELECT * FROM nodes ${r} LIMIT ?`,[...n,i]).map(e=>_(e))}async findEdges(e){await this.ensureOpen();let t=[],n=[];e.type&&(t.push(`type = ?`),n.push(e.type)),e.fromId&&(t.push(`from_id = ?`),n.push(e.fromId)),e.toId&&(t.push(`to_id = ?`),n.push(e.toId));let r=t.length>0?`WHERE ${t.join(` AND `)}`:``,i=e.limit??100;return this.query(`SELECT * FROM edges ${r} LIMIT ?`,[...n,i]).map(e=>v(e))}async deleteNode(e){await this.ensureOpen();let t=this.getAdapter();t.exec(`BEGIN TRANSACTION`);try{this.run(`DELETE FROM edges WHERE from_id = ? OR to_id = ?`,[e,e]),this.run(`DELETE FROM nodes WHERE id = ?`,[e]),t.exec(`COMMIT`),t.flush()}catch(e){throw t.exec(`ROLLBACK`),e}}async deleteBySourcePath(e){await this.ensureOpen();let t=this.query(`SELECT id FROM nodes WHERE source_path = ?`,[e]);if(t.length===0)return 0;let n=this.getAdapter();n.exec(`BEGIN TRANSACTION`);try{for(let e of t)this.run(`DELETE FROM edges WHERE from_id = ? OR to_id = ?`,[e.id,e.id]);this.run(`DELETE FROM nodes WHERE source_path = ?`,[e]),n.exec(`COMMIT`),n.flush()}catch(e){throw n.exec(`ROLLBACK`),e}return t.length}async clear(){await this.ensureOpen();let e=this.getAdapter();e.exec(`BEGIN TRANSACTION`);try{this.run(`DELETE FROM process_steps`),this.run(`DELETE FROM processes`),this.run(`DELETE FROM edges`),this.run(`DELETE FROM nodes`),e.exec(`COMMIT`)}catch(t){throw e.exec(`ROLLBACK`),t}e.flush()}async getStats(){await this.ensureOpen();let e=this.query(`SELECT COUNT(*) as count FROM nodes`)[0]?.count??0,t=this.query(`SELECT COUNT(*) as count FROM edges`)[0]?.count??0,n=this.query(`SELECT type, COUNT(*) as count FROM nodes GROUP BY type`),r={};for(let e of n)r[e.type]=e.count;let i=this.query(`SELECT type, COUNT(*) as count FROM edges GROUP BY type`),a={};for(let e of i)a[e.type]=e.count;return{nodeCount:e,edgeCount:t,nodeTypes:r,edgeTypes:a}}async validate(){await this.ensureOpen();let e=await this.getStats(),t=this.query(`SELECT e.id AS edgeId,
73
73
  CASE
74
74
  WHEN n1.id IS NULL THEN e.from_id
75
75
  WHEN n2.id IS NULL THEN e.to_id
@@ -87,4 +87,4 @@ import{t as e}from"./lance-store-CQkljFy3.js";import{createRequire as t}from"nod
87
87
  VALUES (?, ?, ?, '{}', ?)`,[a,e,t,o]);for(let e=0;e<n.length;e++)this.run(`INSERT INTO process_steps (process_id, node_id, step_order) VALUES (?, ?, ?)`,[a,n[e],e]);s.exec(`COMMIT`),s.flush()}catch(e){throw s.exec(`ROLLBACK`),e}return{id:a,entryNodeId:e,label:t,properties:{},steps:n,createdAt:o}}async getProcesses(e){await this.ensureOpen();let t;t=e?this.query(`SELECT DISTINCT p.id, p.entry_node_id, p.label, p.properties, p.created_at
88
88
  FROM processes p
89
89
  JOIN process_steps ps ON p.id = ps.process_id
90
- WHERE ps.node_id = ?`,[e]):this.query(`SELECT * FROM processes`);let n=[];for(let e of t){let t=this.query(`SELECT node_id FROM process_steps WHERE process_id = ? ORDER BY step_order`,[e.id]);n.push({id:e.id,entryNodeId:e.entry_node_id,label:e.label,properties:y(e.properties),steps:t.map(e=>e.node_id),createdAt:e.created_at})}return n}async deleteProcess(e){await this.ensureOpen();let t=this.getAdapter();t.exec(`BEGIN TRANSACTION`);try{this.run(`DELETE FROM process_steps WHERE process_id = ?`,[e]),this.run(`DELETE FROM processes WHERE id = ?`,[e]),t.exec(`COMMIT`),t.flush()}catch(e){throw t.exec(`ROLLBACK`),e}}async depthGroupedTraversal(e,t=3,n){await this.ensureOpen();let r=n?.direction??`both`,i=n?.edgeType,a=n?.limit??100,o={},s=new Set;s.add(e);let c=[e];for(let e=1;e<=t;e++){let t=[],n=[];for(let e of c){let o=await this.getNeighbors(e,{direction:r,edgeType:i,limit:a});for(let e of o.nodes)s.has(e.id)||(s.add(e.id),t.push(e.id),n.push(e))}if(n.length>0&&(o[e]=n),c=t,c.length===0||s.size>=a)break}return o}async getCohesionScore(e){let t=this.query(`SELECT id FROM nodes WHERE community = ?`,[e]);if(t.length===0)return 0;let n=new Set(t.map(e=>e.id)),r=t.map(()=>`?`).join(`,`),i=t.map(e=>e.id),a=this.query(`SELECT from_id, to_id FROM edges WHERE from_id IN (${r}) OR to_id IN (${r})`,[...i,...i]);if(a.length===0)return 0;let o=0;for(let e of a)n.has(e.from_id)&&n.has(e.to_id)&&o++;return o/a.length}async getSymbol360(e){let t=await this.getNode(e);if(!t)throw Error(`Node '${e}' not found`);let n=await this.findEdges({toId:e}),r=await this.findEdges({fromId:e}),i=await this.getProcesses(e);return{node:t,incoming:n,outgoing:r,community:t.community??null,processes:i}}async close(){this.adapter&&=(this.adapter.close(),null)}};function y(e){if(!e)return{};try{return JSON.parse(e)}catch{return{}}}function b(e){return{id:e.id,type:e.type,name:e.name,properties:y(e.properties),sourceRecordId:e.source_record_id??void 0,sourcePath:e.source_path??void 0,createdAt:e.created_at,community:e.community??void 0}}function x(e){return{id:e.id,fromId:e.from_id,toId:e.to_id,type:e.type,weight:e.weight??1,confidence:e.confidence??1,properties:y(e.properties)}}function S(e){return{id:e.edge_id,fromId:e.from_id,toId:e.to_id,type:e.edge_type,weight:e.weight??1,confidence:e.edge_confidence??1,properties:y(e.edge_props??`{}`)}}function C(e){return{id:e.node_id,type:e.node_type,name:e.node_name,properties:y(e.node_props??`{}`),sourceRecordId:e.node_src_rec??void 0,sourcePath:e.node_src_path??void 0,createdAt:e.node_created,community:e.node_community??void 0}}async function w(e){switch(e.backend){case`lancedb`:{let{LanceStore:t}=await import(`./lance-store-CQkljFy3.js`).then(e=>e.n);return new t({path:e.path})}default:throw Error(`Unknown store backend: "${e.backend}". Supported: lancedb`)}}export{e as LanceStore,v as SqliteGraphStore,_ as createSqliteAdapter,w as createStore};
90
+ WHERE ps.node_id = ?`,[e]):this.query(`SELECT * FROM processes`);let n=[];for(let e of t){let t=this.query(`SELECT node_id FROM process_steps WHERE process_id = ? ORDER BY step_order`,[e.id]);n.push({id:e.id,entryNodeId:e.entry_node_id,label:e.label,properties:g(e.properties),steps:t.map(e=>e.node_id),createdAt:e.created_at})}return n}async deleteProcess(e){await this.ensureOpen();let t=this.getAdapter();t.exec(`BEGIN TRANSACTION`);try{this.run(`DELETE FROM process_steps WHERE process_id = ?`,[e]),this.run(`DELETE FROM processes WHERE id = ?`,[e]),t.exec(`COMMIT`),t.flush()}catch(e){throw t.exec(`ROLLBACK`),e}}async depthGroupedTraversal(e,t=3,n){await this.ensureOpen();let r=n?.direction??`both`,i=n?.edgeType,a=n?.limit??100,o={},s=new Set;s.add(e);let c=[e];for(let e=1;e<=t;e++){let t=[],n=[];for(let e of c){let o=await this.getNeighbors(e,{direction:r,edgeType:i,limit:a});for(let e of o.nodes)s.has(e.id)||(s.add(e.id),t.push(e.id),n.push(e))}if(n.length>0&&(o[e]=n),c=t,c.length===0||s.size>=a)break}return o}async getCohesionScore(e){await this.ensureOpen();let t=this.query(`SELECT id FROM nodes WHERE community = ?`,[e]);if(t.length===0)return 0;let n=new Set(t.map(e=>e.id)),r=t.map(()=>`?`).join(`,`),i=t.map(e=>e.id),a=this.query(`SELECT from_id, to_id FROM edges WHERE from_id IN (${r}) OR to_id IN (${r})`,[...i,...i]);if(a.length===0)return 0;let o=0;for(let e of a)n.has(e.from_id)&&n.has(e.to_id)&&o++;return o/a.length}async getSymbol360(e){let t=await this.getNode(e);if(!t)throw Error(`Node '${e}' not found`);let n=await this.findEdges({toId:e}),r=await this.findEdges({fromId:e}),i=await this.getProcesses(e);return{node:t,incoming:n,outgoing:r,community:t.community??null,processes:i}}async close(){this.adapter&&=(this.adapter.close(),null)}};function g(e){if(!e)return{};try{return JSON.parse(e)}catch{return{}}}function _(e){return{id:e.id,type:e.type,name:e.name,properties:g(e.properties),sourceRecordId:e.source_record_id??void 0,sourcePath:e.source_path??void 0,createdAt:e.created_at,community:e.community??void 0}}function v(e){return{id:e.id,fromId:e.from_id,toId:e.to_id,type:e.type,weight:e.weight??1,confidence:e.confidence??1,properties:g(e.properties)}}function y(e){return{id:e.edge_id,fromId:e.from_id,toId:e.to_id,type:e.edge_type,weight:e.weight??1,confidence:e.edge_confidence??1,properties:g(e.edge_props??`{}`)}}function b(e){return{id:e.node_id,type:e.node_type,name:e.node_name,properties:g(e.node_props??`{}`),sourceRecordId:e.node_src_rec??void 0,sourcePath:e.node_src_path??void 0,createdAt:e.node_created,community:e.node_community??void 0}}async function x(e){switch(e.backend){case`lancedb`:{let{LanceStore:t}=await import(`./lance-store-CQkljFy3.js`).then(e=>e.n);return new t({path:e.path})}default:throw Error(`Unknown store backend: "${e.backend}". Supported: lancedb`)}}export{e as LanceStore,h as SqliteGraphStore,m as createSqliteAdapter,x as createStore};
@@ -3,20 +3,20 @@ import { IEmbedder } from "../../embeddings/dist/index.js";
3
3
  import { DepthGroupedResult, GraphEdge, GraphNode, GraphStats, GraphTraversalResult, GraphValidationResult, IGraphStore, IKnowledgeStore, ProcessInfo, Symbol360 } from "../../store/dist/index.js";
4
4
 
5
5
  //#region packages/tools/src/response-envelope.d.ts
6
- interface KBNextHint {
6
+ interface AikitNextHint {
7
7
  tool: string;
8
8
  reason: string;
9
9
  suggested_args?: Record<string, unknown>;
10
10
  }
11
- interface KBError {
12
- code: KBErrorCode;
11
+ interface AikitToolError {
12
+ code: AikitToolErrorCode;
13
13
  category: 'input' | 'runtime' | 'dependency' | 'timeout' | 'not_found';
14
14
  retryable: boolean;
15
15
  message: string;
16
16
  suggestion?: string;
17
17
  }
18
- type KBErrorCode = 'SYMBOL_NOT_FOUND' | 'INDEX_STALE' | 'TREE_SITTER_UNAVAILABLE' | 'PARSE_FAILED' | 'BUDGET_EXCEEDED' | 'PATH_NOT_FOUND' | 'EMBEDDING_COLD_START' | 'ANALYSIS_FAILED';
19
- interface KBResponseMeta {
18
+ type AikitToolErrorCode = 'SYMBOL_NOT_FOUND' | 'INDEX_STALE' | 'TREE_SITTER_UNAVAILABLE' | 'PARSE_FAILED' | 'BUDGET_EXCEEDED' | 'PATH_NOT_FOUND' | 'EMBEDDING_COLD_START' | 'ANALYSIS_FAILED';
19
+ interface AikitResponseMeta {
20
20
  durationMs: number;
21
21
  tokensEstimate: number;
22
22
  detail: TokenBudget;
@@ -24,21 +24,21 @@ interface KBResponseMeta {
24
24
  truncated: boolean;
25
25
  caveats?: string[];
26
26
  }
27
- interface KBResponse<T> {
27
+ interface AikitResponse<T> {
28
28
  ok: boolean;
29
29
  tool: string;
30
30
  summary: string;
31
31
  data?: T;
32
- meta: KBResponseMeta;
33
- next?: KBNextHint[];
34
- error?: KBError;
32
+ meta: AikitResponseMeta;
33
+ next?: AikitNextHint[];
34
+ error?: AikitToolError;
35
35
  }
36
36
  /** Create a success response. */
37
- declare function okResponse<T>(tool: string, summary: string, data: T, meta: Partial<KBResponseMeta> & {
37
+ declare function okResponse<T>(tool: string, summary: string, data: T, meta: Partial<AikitResponseMeta> & {
38
38
  durationMs: number;
39
- }, next?: KBNextHint[]): KBResponse<T>;
39
+ }, next?: AikitNextHint[]): AikitResponse<T>;
40
40
  /** Create an error response. */
41
- declare function errorResponse(tool: string, error: KBError, durationMs: number): KBResponse<never>;
41
+ declare function errorResponse(tool: string, error: AikitToolError, durationMs: number): AikitResponse<never>;
42
42
  //#endregion
43
43
  //#region packages/tools/src/audit.d.ts
44
44
  type AuditCheck = 'structure' | 'dependencies' | 'patterns' | 'health' | 'dead_symbols' | 'check' | 'entry_points';
@@ -98,7 +98,7 @@ interface AuditData {
98
98
  };
99
99
  recommendations: AuditRecommendation[];
100
100
  }
101
- declare function audit(store: IKnowledgeStore, embedder: IEmbedder, options?: AuditOptions): Promise<KBResponse<AuditData>>;
101
+ declare function audit(store: IKnowledgeStore, embedder: IEmbedder, options?: AuditOptions): Promise<AikitResponse<AuditData>>;
102
102
  //#endregion
103
103
  //#region packages/tools/src/batch.d.ts
104
104
  interface BatchOperation {
@@ -1268,8 +1268,9 @@ declare function gitAvailable(cwd: string): boolean;
1268
1268
  * Execute a git command with a 10-second timeout.
1269
1269
  * Returns trimmed stdout on success, undefined on failure.
1270
1270
  * Never throws — failures are logged and return undefined.
1271
+ * Pass silent=true to suppress the warning (e.g. for expected failures like rev-parse on new refs).
1271
1272
  */
1272
- declare function gitExec(args: string[], cwd: string, input?: string): string | undefined;
1273
+ declare function gitExec(args: string[], cwd: string, input?: string, silent?: boolean): string | undefined;
1273
1274
  /**
1274
1275
  * Create a slugified ref segment from a label.
1275
1276
  * Lowercase, replace non-alphanumeric with dashes, trim leading/trailing dashes, max 60 chars.
@@ -2351,4 +2352,4 @@ declare function addToWorkset(name: string, files: string[], cwd?: string): Work
2351
2352
  */
2352
2353
  declare function removeFromWorkset(name: string, files: string[], cwd?: string): Workset | null;
2353
2354
  //#endregion
2354
- export { type AuditCheck, type AuditData, type AuditOptions, type AuditRecommendation, type BatchOperation, type BatchOptions, type BatchResult, type ChangelogEntry, type ChangelogFormat, type ChangelogOptions, type ChangelogResult, type CheckOptions, type CheckResult, type CheckSummaryResult, type Checkpoint, type ClassifyTrigger, type CodemodChange, type CodemodOptions, type CodemodResult, type CodemodRule, type CompactOptions, type CompactResult, type CompressOutputOptions, type CompressionContext, type CompressionMode, type CompressionResult, type CompressionRule, type ConstraintRef, type DeadSymbol, type DeadSymbolOptions, type DeadSymbolResult, type DelegateOptions, type DelegateResult, type DiffChange, type DiffFile, type DiffHunk, type DiffParseOptions, type DigestFieldEntry, type DigestOptions, type DigestResult, type DigestSource, type DogfoodLogEntry, type DogfoodLogGroupedEntry, type DogfoodLogOptions, type DogfoodLogResult, type EncodeOperation, type EncodeOptions, type EncodeResult, type EnvInfoOptions, type EnvInfoResult, type EvalOptions, type EvalResult, type EvidenceEntry, type EvidenceMapAction, type EvidenceMapResult, type EvidenceMapState, type EvidenceStatus, type Example, FileCache, type FileCacheEntry, type FileCacheStats, type FileMetrics, type FileSummaryOptions, type FileSummaryResult, type FindExamplesOptions, type FindExamplesResult, type FindOptions, type FindResult, type FindResults, type ForgeClassifyCeremony, type ForgeClassifyOptions, type ForgeClassifyResult, type ForgeGroundOptions, type ForgeGroundResult, type ForgeTier, GIT_REF_SLUG_PATTERN, type GateConfig, type GateDecision, type GateResult, type GitContextOptions, type GitContextResult, type GraphAugmentOptions, type GraphAugmentedResult, type GraphQueryOptions, type GraphQueryResult, type GuideRecommendation, type GuideResult, type HealthCheck, type HealthResult, type HotspotEntry, type HttpMethod, type HttpRequestOptions, type HttpRequestResult, type KBError, type KBErrorCode, type KBNextHint, type KBResponse, type KBResponseMeta, type LaneDiffEntry, type LaneDiffResult, type LaneMergeResult, type LaneMeta, type Lease, type LeaseConflict, type ManagedProcess, type MeasureOptions, type MeasureResult, type OnboardMode, type OnboardOptions, type OnboardResult, type OnboardStepResult, type ParsedError, type ParsedGitStatus, type ParsedOutput, type ParsedTestResult, type ParsedTestSummary, type QueueItem, type QueueState, type RegexTestOptions, type RegexTestResult, type RenameChange, type RenameOptions, type RenameResult, type ReplayEntry, type ReplayOptions, type RestorePoint, type SafetyGate, type SafetyGateResult, type SchemaValidateOptions, type SchemaValidateResult, type ScopeMapEntry, type ScopeMapOptions, type ScopeMapResult, type SessionDigestOptions, type SessionDigestResult, type Snippet, type SnippetAction, type SnippetOptions, type SnippetResult, type StashEntry, type StratumCard, type StratumCardOptions, type StratumCardResult, type SymbolGraphContext, type SymbolInfo, type SymbolOptions, type TestRunOptions, type TestRunResult, type TimeOptions, type TimeResult, type TimeoutAction, type TraceNode, type TraceOptions, type TraceResult, type TransformOptions, type TransformResult, type TypedUnknownSeed, type UnknownType, type ValidationError, type WatchEvent, type WatchHandle, type WatchOptions, type WebFetchMode, type WebFetchOptions, type WebFetchResult, type WebSearchOptions, type WebSearchResult, type WebSearchResultItem, type Workset, acquireLease, addToWorkset, analyzeFile, audit, autoClaimTestFailures, batch, bookendReorder, bpeSurprise, changelog, check, checkpointDiff, checkpointGC, checkpointHistory, checkpointLatest, checkpointList, checkpointLoad, checkpointSave, classifyExitCode, codemod, compact, compressOutput, compressTerminalOutput, cosineSimilarity, createRestorePoint, dataTransform, delegate, delegateListModels, deleteWorkset, detectOutputTool, diffParse, digest, dogfoodLog, encode, envInfo, errorResponse, escapeRegExp, estimateTokens, evaluate, evidenceMap, fileSummary, find, findDeadSymbols, findExamples, forgeClassify, forgeGround, formatChangelog, getRegisteredRules, getWorkset, gitAvailable, gitCommitToRef, gitContext, gitExec, graphAugmentSearch, graphQuery, guide, headTailTruncate, health, httpRequest, laneCreate, laneDiff, laneDiscard, laneList, laneMerge, laneStatus, listActiveLeases, listRestorePoints, listWorksets, measure, okResponse, onboard, paragraphTruncate, parseBiome, parseGitStatus, parseOutput, parseSearchResults, parseTsc, parseVitest, processList, processLogs, processStart, processStatus, processStop, queueClear, queueCreate, queueDag, queueDelete, queueDone, queueFail, queueGet, queueList, queueNext, queuePush, regexTest, registerRule, registerRules, releaseLease, removeFromWorkset, rename, replayAppend, replayCapture, replayClear, replayList, replayTrim, resetGitCache, resolvePath, restoreFromPoint, saveWorkset, schemaValidate, scopeMap, scoreLine, scoreLines, segment, sessionDigest, sessionDigestSampling, shannonEntropy, slugForRef, snippet, stashClear, stashDelete, stashGet, stashList, stashSet, stratumCard, summarizeCheckResult, symbol, testRun, timeUtils, trace, truncateToTokenBudget, watchList, watchStart, watchStop, webFetch, webSearch };
2355
+ export { type AikitNextHint, type AikitResponse, type AikitResponseMeta, type AikitToolError, type AikitToolErrorCode, type AuditCheck, type AuditData, type AuditOptions, type AuditRecommendation, type BatchOperation, type BatchOptions, type BatchResult, type ChangelogEntry, type ChangelogFormat, type ChangelogOptions, type ChangelogResult, type CheckOptions, type CheckResult, type CheckSummaryResult, type Checkpoint, type ClassifyTrigger, type CodemodChange, type CodemodOptions, type CodemodResult, type CodemodRule, type CompactOptions, type CompactResult, type CompressOutputOptions, type CompressionContext, type CompressionMode, type CompressionResult, type CompressionRule, type ConstraintRef, type DeadSymbol, type DeadSymbolOptions, type DeadSymbolResult, type DelegateOptions, type DelegateResult, type DiffChange, type DiffFile, type DiffHunk, type DiffParseOptions, type DigestFieldEntry, type DigestOptions, type DigestResult, type DigestSource, type DogfoodLogEntry, type DogfoodLogGroupedEntry, type DogfoodLogOptions, type DogfoodLogResult, type EncodeOperation, type EncodeOptions, type EncodeResult, type EnvInfoOptions, type EnvInfoResult, type EvalOptions, type EvalResult, type EvidenceEntry, type EvidenceMapAction, type EvidenceMapResult, type EvidenceMapState, type EvidenceStatus, type Example, FileCache, type FileCacheEntry, type FileCacheStats, type FileMetrics, type FileSummaryOptions, type FileSummaryResult, type FindExamplesOptions, type FindExamplesResult, type FindOptions, type FindResult, type FindResults, type ForgeClassifyCeremony, type ForgeClassifyOptions, type ForgeClassifyResult, type ForgeGroundOptions, type ForgeGroundResult, type ForgeTier, GIT_REF_SLUG_PATTERN, type GateConfig, type GateDecision, type GateResult, type GitContextOptions, type GitContextResult, type GraphAugmentOptions, type GraphAugmentedResult, type GraphQueryOptions, type GraphQueryResult, type GuideRecommendation, type GuideResult, type HealthCheck, type HealthResult, type HotspotEntry, type HttpMethod, type HttpRequestOptions, type HttpRequestResult, type LaneDiffEntry, type LaneDiffResult, type LaneMergeResult, type LaneMeta, type Lease, type LeaseConflict, type ManagedProcess, type MeasureOptions, type MeasureResult, type OnboardMode, type OnboardOptions, type OnboardResult, type OnboardStepResult, type ParsedError, type ParsedGitStatus, type ParsedOutput, type ParsedTestResult, type ParsedTestSummary, type QueueItem, type QueueState, type RegexTestOptions, type RegexTestResult, type RenameChange, type RenameOptions, type RenameResult, type ReplayEntry, type ReplayOptions, type RestorePoint, type SafetyGate, type SafetyGateResult, type SchemaValidateOptions, type SchemaValidateResult, type ScopeMapEntry, type ScopeMapOptions, type ScopeMapResult, type SessionDigestOptions, type SessionDigestResult, type Snippet, type SnippetAction, type SnippetOptions, type SnippetResult, type StashEntry, type StratumCard, type StratumCardOptions, type StratumCardResult, type SymbolGraphContext, type SymbolInfo, type SymbolOptions, type TestRunOptions, type TestRunResult, type TimeOptions, type TimeResult, type TimeoutAction, type TraceNode, type TraceOptions, type TraceResult, type TransformOptions, type TransformResult, type TypedUnknownSeed, type UnknownType, type ValidationError, type WatchEvent, type WatchHandle, type WatchOptions, type WebFetchMode, type WebFetchOptions, type WebFetchResult, type WebSearchOptions, type WebSearchResult, type WebSearchResultItem, type Workset, acquireLease, addToWorkset, analyzeFile, audit, autoClaimTestFailures, batch, bookendReorder, bpeSurprise, changelog, check, checkpointDiff, checkpointGC, checkpointHistory, checkpointLatest, checkpointList, checkpointLoad, checkpointSave, classifyExitCode, codemod, compact, compressOutput, compressTerminalOutput, cosineSimilarity, createRestorePoint, dataTransform, delegate, delegateListModels, deleteWorkset, detectOutputTool, diffParse, digest, dogfoodLog, encode, envInfo, errorResponse, escapeRegExp, estimateTokens, evaluate, evidenceMap, fileSummary, find, findDeadSymbols, findExamples, forgeClassify, forgeGround, formatChangelog, getRegisteredRules, getWorkset, gitAvailable, gitCommitToRef, gitContext, gitExec, graphAugmentSearch, graphQuery, guide, headTailTruncate, health, httpRequest, laneCreate, laneDiff, laneDiscard, laneList, laneMerge, laneStatus, listActiveLeases, listRestorePoints, listWorksets, measure, okResponse, onboard, paragraphTruncate, parseBiome, parseGitStatus, parseOutput, parseSearchResults, parseTsc, parseVitest, processList, processLogs, processStart, processStatus, processStop, queueClear, queueCreate, queueDag, queueDelete, queueDone, queueFail, queueGet, queueList, queueNext, queuePush, regexTest, registerRule, registerRules, releaseLease, removeFromWorkset, rename, replayAppend, replayCapture, replayClear, replayList, replayTrim, resetGitCache, resolvePath, restoreFromPoint, saveWorkset, schemaValidate, scopeMap, scoreLine, scoreLines, segment, sessionDigest, sessionDigestSampling, shannonEntropy, slugForRef, snippet, stashClear, stashDelete, stashGet, stashList, stashSet, stratumCard, summarizeCheckResult, symbol, testRun, timeUtils, trace, truncateToTokenBudget, watchList, watchStart, watchStop, webFetch, webSearch };
@@ -9,9 +9,9 @@ import{DependencyAnalyzer as e,DiagramGenerator as t,EntryPointAnalyzer as n,Pat
9
9
  ### Patterns Detected
10
10
  `),n.push(`| Pattern | Confidence | Count |`),n.push(`|---------|-----------|-------|`);for(let t of e.patterns)n.push(`| ${t.name} | ${t.confidence} | ${t.count} |`)}else t===`normal`&&n.push(`\n**Patterns:** ${e.patterns.map(e=>e.name).join(`, `)}`);return n.join(`
11
11
  `)}async function nt(e,t,n={}){let r=Math.max(1,n.concurrency??4),i=[],a=[...e];async function o(e){let n=Date.now();try{let r=await t(e);return{id:e.id,status:`success`,result:r,durationMs:Date.now()-n}}catch(t){return{id:e.id,status:`error`,error:t instanceof Error?t.message:String(t),durationMs:Date.now()-n}}}for(;a.length>0;){let e=a.splice(0,r),t=await Promise.allSettled(e.map(e=>o(e)));for(let n=0;n<t.length;n++){let r=t[n];if(r.status===`fulfilled`){i.push(r.value);continue}i.push({id:e[n]?.id??`unknown`,status:`error`,error:r.reason instanceof Error?r.reason.message:`Promise rejected`,durationMs:0})}}return i}const rt=/^[a-zA-Z0-9_./\-~^@{}]+$/;function it(e){let{from:t,to:n=`HEAD`,format:r=`grouped`,includeBreaking:i=!0,cwd:a=process.cwd()}=e;if(!rt.test(t))throw Error(`Invalid git ref: ${t}`);if(!rt.test(n))throw Error(`Invalid git ref: ${n}`);let o;try{o=u(`git`,[`log`,`${t}..${n}`,`--format=%H%s%b%an%ai`],{cwd:a,encoding:`utf8`,maxBuffer:10*1024*1024,timeout:3e4})}catch{throw Error(`Git log failed. Ensure "${t}" and "${n}" are valid refs.`)}let s=o.split(``).map(e=>e.trim()).filter(Boolean).map(e=>{let[t=``,n=``,r=``,i=``,a=``]=e.split(``),o=n.match(/^(\w+)(?:\(([^)]*)\))?(!)?:\s*(.+)/);return{hash:t.slice(0,8),type:o?.[1]??`other`,scope:o?.[2]??``,subject:o?.[4]??n,body:r.trim(),author:i.trim(),date:a.trim().split(` `)[0],breaking:!!(o?.[3]||/BREAKING[\s-]CHANGE/i.test(r))}}),c={},l=0;for(let e of s)c[e.type]=(c[e.type]??0)+1,e.breaking&&l++;return{entries:s,markdown:at(s,r,i),stats:{total:s.length,breaking:l,types:c}}}function at(e,t,n){let r=[`# Changelog`,``];if(n){let t=e.filter(e=>e.breaking);if(t.length>0){r.push(`## Breaking Changes`,``);for(let e of t)r.push(`- ${e.subject} (${e.hash})`);r.push(``)}}if(t===`grouped`){let t={};for(let n of e)t[n.type]||(t[n.type]=[]),t[n.type].push(n);let n=[`feat`,`fix`,`refactor`,`perf`,`test`,`docs`,`chore`],i={feat:`Features`,fix:`Bug Fixes`,refactor:`Refactoring`,perf:`Performance`,test:`Tests`,docs:`Documentation`,chore:`Chores`,other:`Other`};for(let e of[...n,...Object.keys(t).filter(e=>!n.includes(e))])if(t[e]?.length){r.push(`## ${i[e]??e}`,``);for(let n of t[e]){let e=n.scope?`**${n.scope}:** `:``;r.push(`- ${e}${n.subject} (${n.hash})`)}r.push(``)}}else if(t===`chronological`)for(let t of e){let e=t.scope?`(${t.scope}) `:``;r.push(`- \`${t.date}\` ${t.type}: ${e}${t.subject} (${t.hash})`)}else{let t={};for(let n of e){let e=n.scope||`general`;t[e]||(t[e]=[]),t[e].push(n)}for(let[e,n]of Object.entries(t)){r.push(`## ${e}`,``);for(let e of n)r.push(`- ${e.type}: ${e.subject} (${e.hash})`);r.push(``)}}return r.join(`
12
- `)}const ot=/^[a-z0-9][a-z0-9-]*$/,st=new Map;function W(e){let t=w(e),n=st.get(t);if(n!==void 0)return n;try{return u(`git`,[`rev-parse`,`--git-dir`],{cwd:t,timeout:1e4,encoding:`utf8`}),st.set(t,!0),!0}catch{return st.set(t,!1),!1}}function G(e,t,n){try{return u(`git`,e,{cwd:t,timeout:1e4,input:n,encoding:`utf8`}).trim()}catch(t){console.warn(`Git operation failed (${e.join(` `)}): ${t instanceof Error?t.message:String(t)}`);return}}function ct(e){return e.toLowerCase().replace(/[^a-z0-9]+/g,`-`).replace(/^-|-$/g,``).slice(0,60)||`untitled`}function lt(e,t,n,r,i){let a=G([`hash-object`,`-w`,`--stdin`],i,n);if(!a)return;let o=G([`mktree`],i,`100644 blob ${a}\t${t}\n`);if(!o)return;let s=G([`rev-parse`,e],i),c=[`commit-tree`,o];s&&c.push(`-p`,s),c.push(`-m`,r);let l=G(c,i);if(l)return G([`update-ref`,e,l],i)===void 0?void 0:l}function ut(){st.clear()}const dt=`refs/aikit/checkpoints`,ft=ot;function K(e){let t=ct(e);return t===`untitled`?`checkpoint`:t}function pt(e,t,n){if(typeof e!=`string`||e.length<1||e.length>120)throw Error(`Checkpoint label must be 1-120 characters.`);if(!t||typeof t!=`object`||Array.isArray(t))throw Error(`Checkpoint data must be a JSON-serializable object.`);let r;try{r=JSON.stringify(t)}catch{throw Error(`Checkpoint data must be a JSON-serializable object.`)}if(typeof r!=`string`)throw Error(`Checkpoint data must be a JSON-serializable object.`);if(Buffer.byteLength(r,`utf8`)>5e5)throw Error(`Checkpoint data must be 500KB or less when serialized.`);if(n?.notes!==void 0&&Buffer.byteLength(n.notes,`utf8`)>1e4)throw Error(`Checkpoint notes must be 10KB or less.`)}function mt(e,t){let n=K(e.label),r=`${dt}/${n}`;if(ot.test(n))return lt(r,`checkpoint.json`,`${JSON.stringify(e,null,2)}\n`,`checkpoint: ${e.label}`,t)}function ht(e,t){let n=new Set,r=/^\d+-(.+)$/.exec(e);r?.[1]&&n.add(r[1]),n.add(K(e));for(let e of n){if(!ft.test(e))continue;let n=G([`rev-parse`,`${dt}/${e}`],t);if(!n)continue;let r=G([`cat-file`,`blob`,`${n}:checkpoint.json`],t);if(r)try{let e=JSON.parse(r);if(typeof e?.id!=`string`||typeof e.label!=`string`||typeof e.createdAt!=`string`||!e.data||typeof e.data!=`object`||Array.isArray(e.data))continue;return pt(e.label,e.data,{notes:e.notes}),{...e,gitSha:n}}catch(e){console.warn(`Corrupt git-backed checkpoint ${n}: ${e instanceof Error?e.message:String(e)}`)}}}function gt(e){let t=w(B(e??process.cwd()),`checkpoints`);return M(t)||N(t,{recursive:!0}),t}function _t(e,t,n){pt(e,t,{notes:n?.notes});let r=K(e),i={id:`${Date.now()}-${r}`,label:e,createdAt:new Date().toISOString(),data:t,files:n?.files,notes:n?.notes},a=w(gt(n?.cwd),`${i.id}.json`),o=`${a}.tmp`;R(o,`${JSON.stringify(i,null,2)}\n`,`utf-8`),I(o,a);let s=n?.cwd??process.cwd();if(W(s)){let e=mt(i,s);e&&(i.gitSha=e)}return i}function vt(e,t){let n=gt(t),r=w(n,`${e}.json`);if(r.startsWith(w(n))){if(!M(r)){let n=t??process.cwd();return W(n)?ht(e,n):void 0}try{return JSON.parse(P(r,`utf-8`))}catch(e){if(e?.code===`ENOENT`)return;console.warn(`Corrupt state file ${r}: ${e instanceof Error?e.message:String(e)}`);return}}}function yt(e){let t=gt(e);return F(t).filter(e=>e.endsWith(`.json`)).flatMap(e=>{let n=w(t,e);try{return[JSON.parse(P(n,`utf-8`))]}catch(e){return e?.code===`ENOENT`||console.warn(`Corrupt state file ${n}: ${e instanceof Error?e.message:String(e)}`),[]}}).sort((e,t)=>t.createdAt.localeCompare(e.createdAt))}function bt(e){return yt(e)[0]}function xt(e,t,n){let r=vt(e,n),i=vt(t,n);if(!r||!i)return;let a=new Set(Object.keys(r.data)),o=new Set(Object.keys(i.data));return{fromId:e,toId:t,added:[...o].filter(e=>!a.has(e)),removed:[...a].filter(e=>!o.has(e)),modified:[...a].filter(e=>o.has(e)).filter(e=>JSON.stringify(r.data[e])!==JSON.stringify(i.data[e]))}}function St(e,t){let n=t?.cwd??process.cwd(),r=t?.limit??20,i=K(e);if(W(n)){let e=`refs/aikit/checkpoints/${i}`,t=G([`log`,`--format=%H %aI %s`,`-n`,String(r),e],n);if(t)return t.trim().split(`
12
+ `)}const ot=/^[a-z0-9][a-z0-9-]*$/,st=new Map;function W(e){let t=w(e),n=st.get(t);if(n!==void 0)return n;try{return u(`git`,[`rev-parse`,`--git-dir`],{cwd:t,timeout:1e4,encoding:`utf8`}),st.set(t,!0),!0}catch{return st.set(t,!1),!1}}function G(e,t,n,r){try{return u(`git`,e,{cwd:t,timeout:1e4,input:n,encoding:`utf8`}).trim()}catch(t){r||console.warn(`Git operation failed (${e.join(` `)}): ${t instanceof Error?t.message:String(t)}`);return}}function ct(e){return e.toLowerCase().replace(/[^a-z0-9]+/g,`-`).replace(/^-|-$/g,``).slice(0,60)||`untitled`}function lt(e,t,n,r,i){let a=G([`hash-object`,`-w`,`--stdin`],i,n);if(!a)return;let o=G([`mktree`],i,`100644 blob ${a}\t${t}\n`);if(!o)return;let s=G([`rev-parse`,e],i,void 0,!0),c=[`commit-tree`,o];s&&c.push(`-p`,s),c.push(`-m`,r);let l=G(c,i);if(l)return G([`update-ref`,e,l],i)===void 0?void 0:l}function ut(){st.clear()}const dt=`refs/aikit/checkpoints`,ft=ot;function K(e){let t=ct(e);return t===`untitled`?`checkpoint`:t}function pt(e,t,n){if(typeof e!=`string`||e.length<1||e.length>120)throw Error(`Checkpoint label must be 1-120 characters.`);if(!t||typeof t!=`object`||Array.isArray(t))throw Error(`Checkpoint data must be a JSON-serializable object.`);let r;try{r=JSON.stringify(t)}catch{throw Error(`Checkpoint data must be a JSON-serializable object.`)}if(typeof r!=`string`)throw Error(`Checkpoint data must be a JSON-serializable object.`);if(Buffer.byteLength(r,`utf8`)>5e5)throw Error(`Checkpoint data must be 500KB or less when serialized.`);if(n?.notes!==void 0&&Buffer.byteLength(n.notes,`utf8`)>1e4)throw Error(`Checkpoint notes must be 10KB or less.`)}function mt(e,t){let n=K(e.label),r=`${dt}/${n}`;if(ot.test(n))return lt(r,`checkpoint.json`,`${JSON.stringify(e,null,2)}\n`,`checkpoint: ${e.label}`,t)}function ht(e,t){let n=new Set,r=/^\d+-(.+)$/.exec(e);r?.[1]&&n.add(r[1]),n.add(K(e));for(let e of n){if(!ft.test(e))continue;let n=G([`rev-parse`,`${dt}/${e}`],t);if(!n)continue;let r=G([`cat-file`,`blob`,`${n}:checkpoint.json`],t);if(r)try{let e=JSON.parse(r);if(typeof e?.id!=`string`||typeof e.label!=`string`||typeof e.createdAt!=`string`||!e.data||typeof e.data!=`object`||Array.isArray(e.data))continue;return pt(e.label,e.data,{notes:e.notes}),{...e,gitSha:n}}catch(e){console.warn(`Corrupt git-backed checkpoint ${n}: ${e instanceof Error?e.message:String(e)}`)}}}function gt(e){let t=w(B(e??process.cwd()),`checkpoints`);return M(t)||N(t,{recursive:!0}),t}function _t(e,t,n){pt(e,t,{notes:n?.notes});let r=K(e),i={id:`${Date.now()}-${r}`,label:e,createdAt:new Date().toISOString(),data:t,files:n?.files,notes:n?.notes},a=w(gt(n?.cwd),`${i.id}.json`),o=`${a}.tmp`;R(o,`${JSON.stringify(i,null,2)}\n`,`utf-8`),I(o,a);let s=n?.cwd??process.cwd();if(W(s)){let e=mt(i,s);e&&(i.gitSha=e)}return i}function vt(e,t){let n=gt(t),r=w(n,`${e}.json`);if(r.startsWith(w(n))){if(!M(r)){let n=t??process.cwd();return W(n)?ht(e,n):void 0}try{return JSON.parse(P(r,`utf-8`))}catch(e){if(e?.code===`ENOENT`)return;console.warn(`Corrupt state file ${r}: ${e instanceof Error?e.message:String(e)}`);return}}}function yt(e){let t=gt(e);return F(t).filter(e=>e.endsWith(`.json`)).flatMap(e=>{let n=w(t,e);try{return[JSON.parse(P(n,`utf-8`))]}catch(e){return e?.code===`ENOENT`||console.warn(`Corrupt state file ${n}: ${e instanceof Error?e.message:String(e)}`),[]}}).sort((e,t)=>t.createdAt.localeCompare(e.createdAt))}function bt(e){return yt(e)[0]}function xt(e,t,n){let r=vt(e,n),i=vt(t,n);if(!r||!i)return;let a=new Set(Object.keys(r.data)),o=new Set(Object.keys(i.data));return{fromId:e,toId:t,added:[...o].filter(e=>!a.has(e)),removed:[...a].filter(e=>!o.has(e)),modified:[...a].filter(e=>o.has(e)).filter(e=>JSON.stringify(r.data[e])!==JSON.stringify(i.data[e]))}}function St(e,t){let n=t?.cwd??process.cwd(),r=t?.limit??20,i=K(e);if(W(n)){let e=`refs/aikit/checkpoints/${i}`,t=G([`log`,`--format=%H %aI %s`,`-n`,String(r),e],n);if(t)return t.trim().split(`
13
13
  `).filter(Boolean).map(e=>{let[t,n,...r]=e.split(` `);return{sha:t,id:t.slice(0,12),createdAt:n,label:r.join(` `)}})}return yt(n).filter(e=>K(e.label)===i).slice(0,r).map(e=>({id:e.id,createdAt:e.createdAt,label:e.label}))}function Ct(e){let t=yt(e?.cwd),n=e?.keepLast??10,r=e?.dryRun??!0,i=e?.maxAgeDays===void 0?void 0:Date.now()-e.maxAgeDays*864e5,a=e?.label?K(e.label):void 0,o=new Map;for(let e of t){let t=K(e.label);if(a&&t!==a)continue;let n=o.get(t);n?n.push(e):o.set(t,[e])}let s=[],c=0;for(let e of o.values())e.forEach((e,t)=>{let r=Date.parse(e.createdAt);if(t<n&&!(i!==void 0&&!Number.isNaN(r)&&r<i)){c+=1;return}s.push(e.id)});if(!r&&s.length>0){let t=gt(e?.cwd),n=e?.cwd??process.cwd(),r=new Set(s);for(let e of s){let n=w(t,`${e}.json`);if(n.startsWith(w(t)))try{re(n)}catch(e){if(e?.code===`ENOENT`)continue;console.warn(`Failed to remove checkpoint ${n}: ${e instanceof Error?e.message:String(e)}`)}}if(W(n))for(let[e,t]of o.entries())t.every(e=>r.has(e.id))&&ft.test(e)&&G([`update-ref`,`-d`,`${dt}/${e}`],n)}return{deleted:s.length,kept:c,labels:[...o.keys()],deletedIds:s}}const wt=[`.ts`,`.tsx`,`.js`,`.jsx`],Tt=new Set([`node_modules`,`.git`,`dist`,`build`,`coverage`,`.turbo`,`.cache`,`cdk.out`,z.state]);function Et(e){return e.replace(/\\/g,`/`)}function Dt(e){return e.replace(/[.+^${}()|[\]\\]/g,`\\$&`)}function Ot(e,t){let n=Et(e),r=Et(t).trim();if(!r)return!1;let i=Dt(r).replace(/\*\*/g,`::DOUBLE_STAR::`).replace(/\*/g,`[^/]*`).replace(/\?/g,`[^/]`).replace(/::DOUBLE_STAR::/g,`.*`);return RegExp(`^${i}$`).test(n)}function kt(e,t,n){return t.some(t=>Ot(e,t)?!0:n?Ot(`${e}/`,t):!1)}async function At(e,t,n){let r=[],i=t.map(e=>e.toLowerCase());async function a(t){let o=await m(t);for(let s of o){if(Tt.has(s))continue;let o=S(t,s),c=await g(o),l=Et(C(e,o));if(c.isDirectory()){kt(l,n,!0)||await a(o);continue}kt(l,n,!1)||i.includes(b(s).toLowerCase())&&r.push(o)}}return await a(e),r.sort((e,t)=>e.localeCompare(t)),r}const jt=z.restorePoints;function Mt(){let e=S(process.cwd(),jt);return M(e)||N(e,{recursive:!0}),e}function Nt(e,t,n){let r=Mt(),i=`${Date.now()}-${e}`,a={id:i,timestamp:new Date().toISOString(),operation:e,files:t,description:n};R(S(r,`${i}.json`),`${JSON.stringify(a,null,2)}\n`,`utf-8`);let o=F(r).filter(e=>e.endsWith(`.json`)).sort();for(;o.length>50;){let e=o.shift();if(!e)break;try{re(S(r,e))}catch{}}return i}function Pt(){let e=S(process.cwd(),jt);return M(e)?F(e).filter(e=>e.endsWith(`.json`)).sort().reverse().map(t=>{try{return JSON.parse(P(S(e,t),`utf-8`))}catch(n){return console.debug(`Skipping corrupt restore point ${S(e,t)}: ${n instanceof Error?n.message:String(n)}`),null}}).filter(e=>e!==null):[]}async function Ft(e){let t=S(S(process.cwd(),jt),`${e}.json`);if(!M(t))throw Error(`Restore point not found: ${e}`);let n=JSON.parse(P(t,`utf-8`)),r=[];for(let e of n.files){let t=y(e.path);M(t)||N(t,{recursive:!0}),await _(e.path,e.content,`utf-8`),r.push(e.path)}return r}function It(e){return e.replace(/\\/g,`/`)}async function Lt(e){let{rootPath:t,rules:n,extensions:r=wt,exclude:i=[],dryRun:a=!1}=e,o=n.map(e=>({...e,regex:new RegExp(e.pattern,`g`)})),s=await At(t,r,i),c=[],l=new Set,u=0,d=[];for(let e of s){let n=It(C(t,e)),r=await p(e,`utf-8`),i=r.split(/\r?\n/),s=!1;for(let[e,t]of o.entries())if(!(t.fileFilter&&!Ot(n,t.fileFilter)))for(let r=0;r<i.length;r++){let a=i[r];t.regex.lastIndex=0;let o=a.replace(t.regex,t.replacement);a!==o&&(i[r]=o,s=!0,l.add(e),c.push({rule:t.description,path:n,line:r+1,before:a,after:o}))}s&&(u+=1,a||(d.push({path:e,content:r}),await _(e,i.join(`
14
- `),`utf-8`)))}return!a&&d.length>0&&Nt(`codemod`,d,`codemod: ${n.length} rules, ${u} files`),{changes:c,rulesApplied:l.size,filesModified:u,dryRun:a}}const Rt=new se({max:200,ttl:1e3*60*30});function zt(e){return V(`sha256`).update(e).digest(`hex`).slice(0,16)}function Bt(e,t){let n=zt(t),r=Rt.get(e);if(Rt.set(e,{hash:n,text:t,timestamp:Date.now()}),!r||r.hash===n)return{text:r?.hash===n?`[No changes since last read]`:t,isDelta:r?.hash===n,hash:n};let i=oe(e,e,r.text,t,`previous`,`current`,{context:3});return i.length>=t.length*.8?{text:t,isDelta:!1,hash:n}:{text:i,isDelta:!0,hash:n}}const Vt=.6;function Ht(e){if(!e||e.length===0)return 0;let t=ce(e),n=e.length;return n===0?0:Math.min(t.length/n,Vt)/Vt}function Ut(e){if(e.length===0)return[];let t=new Map,n=[];for(let r of e){let e=ce(r);n.push(e);for(let n of e)t.set(n,(t.get(n)??0)+1)}let r=[];for(let i=0;i<e.length;i++){let a=e[i],o=n[i];if(!a||o.length===0){r.push(0);continue}let s=Math.min(o.length/a.length,Vt)/Vt,c=0;for(let e of o)(t.get(e)??0)===1&&c++;let l=c/o.length,u=.6*s+.4*l;r.push(Math.min(u,1))}return r}function Wt(e){if(!e||e.length===0)return 0;let t=new Map;for(let n of e)t.set(n,(t.get(n)??0)+1);let n=0,r=e.length;for(let e of t.values()){let t=e/r;t>0&&(n-=t*Math.log2(t))}return Math.min(n/6.6,1)}function Gt(e){try{return Ht(e)}catch{return Wt(e)}}async function Kt(e,t){let{query:n,maxChars:r=3e3,minScore:i=.3,segmentation:a=`paragraph`}=t,o=t.tokenBudget?t.tokenBudget*4:r,s;if(t.text)s=t.text;else if(t.path){let e;try{e=await g(t.path)}catch(e){let n=e.code;throw n===`ENOENT`?Error(`File not found: ${t.path}. Check the path and try again.`):n===`EACCES`||n===`EPERM`?Error(`Permission denied reading ${t.path}. The file exists but is not accessible.`):e}if(e.isDirectory())throw Error(`Path is a directory: ${t.path}. compact requires a file path, not a directory. Use analyze_structure or find to explore directories.`);if(e.size>1e7)throw Error(`File too large (${(e.size/1e6).toFixed(1)}MB). compact supports files up to 10MB. Consider splitting or using search instead.`);s=t.cache?(await t.cache.get(t.path)).content:await p(t.path,`utf-8`)}else throw Error(`Either "text" or "path" must be provided`);if(t.mode===`delta`&&t.path){let e=Bt(t.path,s);if(e.isDelta)return{text:e.text,originalChars:s.length,compressedChars:e.text.length,ratio:e.text.length/s.length,segmentsKept:1,segmentsTotal:1}}if(s.length<=o)return{text:s,originalChars:s.length,compressedChars:s.length,ratio:1,segmentsKept:1,segmentsTotal:1};let c=Je(s,a);if(c.length===0)return{text:``,originalChars:s.length,compressedChars:0,ratio:0,segmentsKept:0,segmentsTotal:0};let l=await e.embed(n),u=Ut(c),d=[];for(let t=0;t<c.length;t++){let n=.85*Ye(l,await e.embed(c[t]))+.15*(u[t]??0);d.push({text:c[t],score:n,index:t})}let f=d.filter(e=>e.score>=i).sort((e,t)=>t.score-e.score),m=[],h=0;for(let e of f){if(h+e.text.length>o){h===0&&(m.push({...e,text:e.text.slice(0,o)}),h=o);break}m.push(e),h+=e.text.length+2}let _=Xe(m.sort((e,t)=>t.score-e.score)).map(e=>e.text).join(`
14
+ `),`utf-8`)))}return!a&&d.length>0&&Nt(`codemod`,d,`codemod: ${n.length} rules, ${u} files`),{changes:c,rulesApplied:l.size,filesModified:u,dryRun:a}}const Rt=new se({max:200,ttl:1e3*60*30});function zt(e){return V(`sha256`).update(e).digest(`hex`).slice(0,16)}function Bt(e,t){let n=zt(t),r=Rt.get(e);if(Rt.set(e,{hash:n,text:t,timestamp:Date.now()}),!r||r.hash===n)return{text:r?.hash===n?`[No changes since last read]`:t,isDelta:r?.hash===n,hash:n};let i=oe(e,e,r.text,t,`previous`,`current`,{context:3});return i.length>=t.length*.8?{text:t,isDelta:!1,hash:n}:{text:i,isDelta:!0,hash:n}}const Vt=.6;function Ht(e){if(!e||e.length===0)return 0;let t=ce(e),n=e.length;return n===0?0:Math.min(t.length/n,Vt)/Vt}function Ut(e){if(e.length===0)return[];let t=new Map,n=[];for(let r of e){let e=ce(r);n.push(e);for(let n of e)t.set(n,(t.get(n)??0)+1)}let r=[];for(let i=0;i<e.length;i++){let a=e[i],o=n[i];if(!a||o.length===0){r.push(0);continue}let s=Math.min(o.length/a.length,Vt)/Vt,c=0;for(let e of o)(t.get(e)??0)===1&&c++;let l=c/o.length,u=.6*s+.4*l;r.push(Math.min(u,1))}return r}function Wt(e){if(!e||e.length===0)return 0;let t=new Map;for(let n of e)t.set(n,(t.get(n)??0)+1);let n=0,r=e.length;for(let e of t.values()){let t=e/r;t>0&&(n-=t*Math.log2(t))}return Math.min(n/6.6,1)}function Gt(e){try{return Ht(e)}catch{return Wt(e)}}async function Kt(e,t){let{query:n,maxChars:r=3e3,minScore:i=.3,segmentation:a=`paragraph`}=t,o=t.tokenBudget?t.tokenBudget*4:r,s;if(t.text)s=t.text;else if(t.path){let e;try{e=await g(t.path)}catch(e){let n=e.code;throw n===`ENOENT`?Error(`File not found: ${t.path}. Check the path and try again.`):n===`EACCES`||n===`EPERM`?Error(`Permission denied reading ${t.path}. The file exists but is not accessible.`):e}if(e.isDirectory())throw Error(`Path is a directory: ${t.path}. compact requires a file path, not a directory. Use analyze({ aspect: "structure", path }) or find to explore directories.`);if(e.size>1e7)throw Error(`File too large (${(e.size/1e6).toFixed(1)}MB). compact supports files up to 10MB. Consider splitting or using search instead.`);s=t.cache?(await t.cache.get(t.path)).content:await p(t.path,`utf-8`)}else throw Error(`Either "text" or "path" must be provided`);if(t.mode===`delta`&&t.path){let e=Bt(t.path,s);if(e.isDelta)return{text:e.text,originalChars:s.length,compressedChars:e.text.length,ratio:e.text.length/s.length,segmentsKept:1,segmentsTotal:1}}if(s.length<=o)return{text:s,originalChars:s.length,compressedChars:s.length,ratio:1,segmentsKept:1,segmentsTotal:1};let c=Je(s,a);if(c.length===0)return{text:``,originalChars:s.length,compressedChars:0,ratio:0,segmentsKept:0,segmentsTotal:0};let l=await e.embed(n),u=Ut(c),d=[];for(let t=0;t<c.length;t++){let n=.85*Ye(l,await e.embed(c[t]))+.15*(u[t]??0);d.push({text:c[t],score:n,index:t})}let f=d.filter(e=>e.score>=i).sort((e,t)=>t.score-e.score),m=[],h=0;for(let e of f){if(h+e.text.length>o){h===0&&(m.push({...e,text:e.text.slice(0,o)}),h=o);break}m.push(e),h+=e.text.length+2}let _=Xe(m.sort((e,t)=>t.score-e.score)).map(e=>e.text).join(`
15
15
 
16
16
  `);return{text:_,originalChars:s.length,compressedChars:_.length,ratio:_.length/s.length,segmentsKept:m.length,segmentsTotal:c.length}}const qt=/\b(error|fatal|exception|failed|failure|fail|passed|warn|warning|panic|abort|timeout|critical)\b/i,Jt=[];function Yt(e){Jt.push(e),Jt.sort((e,t)=>t.priority-e.priority)}function Xt(e){for(let t of e)Yt(t)}function Zt(){return Jt}function Qt(e){return/^(diff --git|commit [0-9a-f]{7,40}|On branch |Your branch )/m.test(e)?`git`:/^\s*[MADRCU?!]{1,2}\s+\S/m.test(e)&&/^##\s/m.test(e)?`git-status`:/^(npm (warn|ERR!|notice)|added \d+ packages?|up to date)/m.test(e)?`npm`:/^(Packages|Progress):/m.test(e)||/pnpm/.test(e)?`pnpm`:/✓|✗|PASS|FAIL|Tests?\s+\d+\s+(passed|failed)/m.test(e)||/^(PASS|FAIL)\s+\S/m.test(e)?`test-runner`:/^(error TS\d+|warning TS\d+|\S+\.tsx?[(:]\d+)/m.test(e)?`tsc`:/^\S+\.\w+:\d+:\d+\s+(error|warning|info)/m.test(e)||/Found \d+ (error|warning)/m.test(e)?`lint`:/^(CONTAINER ID|IMAGE|REPOSITORY|Step \d+\/\d+|--->)/m.test(e)||/docker|Dockerfile/i.test(e)?`docker`:/^(NAME\s+READY|NAMESPACE\s|kubectl)/m.test(e)?`kubectl`:`unknown`}function $t(e,t,n){return[`\n... [${e} chars / ~${t} tokens omitted]\n`,`\n... [${e} chars omitted]\n`,`
17
17
  ... [omitted]
@@ -48,15 +48,15 @@ import{DependencyAnalyzer as e,DiagramGenerator as t,EntryPointAnalyzer as n,Pat
48
48
  `)})}return{text:Wn(On(p.map(e=>{let t=c.find(t=>t.id===e.id),n=t?Math.max(0,...t.segments.map(e=>e.score)):0;return{...e,maxScore:n}}))),fields:s,sourceStats:f,totalOriginalChars:l,totalCompressedChars:m,ratio:Gn(l,m)}}function qn(){return S(B(process.cwd()),`logs`)}function Jn(e={}){let{days:t=7,level:n,component:r,limit:i=50}=e,a=qn(),o=new Date,s=new Date(o.getTime()-t*864e5).toISOString().slice(0,10),c=o.toISOString().slice(0,10),l;try{l=F(a).filter(e=>e.endsWith(`.jsonl`)&&e.slice(0,10)>=s).sort()}catch{return{totalEntries:0,groups:[],recent:[],dateRange:{from:s,to:c}}}let u=[];for(let e of l)try{let t=P(S(a,e),`utf-8`);for(let e of t.trim().split(`
49
49
  `))if(e)try{let t=JSON.parse(e);if(n&&t.level!==n||r&&t.component!==r)continue;u.push(t)}catch{}}catch{}let d=new Map;for(let e of u){let t=`${e.component}::${e.msg}`,n=d.get(t);n?(n.count++,e.ts<n.firstSeen&&(n.firstSeen=e.ts),e.ts>n.lastSeen&&(n.lastSeen=e.ts)):d.set(t,{component:e.component,msg:e.msg,level:e.level,count:1,firstSeen:e.ts,lastSeen:e.ts})}let f=[...d.values()].sort((e,t)=>t.count-e.count),p=u.slice(-i);return{totalEntries:u.length,groups:f,recent:p,dateRange:{from:l.length>0?l[0].slice(0,10):s,to:c}}}function Yn(e){let{operation:t,input:n}=e,r;switch(t){case`base64_encode`:r=Buffer.from(n).toString(`base64`);break;case`base64_decode`:r=Buffer.from(n,`base64`).toString(`utf8`);break;case`url_encode`:r=encodeURIComponent(n);break;case`url_decode`:r=decodeURIComponent(n);break;case`sha256`:r=V(`sha256`).update(n).digest(`hex`);break;case`md5`:r=V(`md5`).update(n).digest(`hex`);break;case`jwt_decode`:{let e=n.split(`.`);if(e.length!==3)throw Error(`Invalid JWT: expected 3 dot-separated parts`);let t,i;try{t=JSON.parse(Buffer.from(e[0],`base64url`).toString()),i=JSON.parse(Buffer.from(e[1],`base64url`).toString())}catch{throw Error(`Invalid JWT: header or payload is not valid JSON`)}r=JSON.stringify({header:t,payload:i},null,2);break}case`hex_encode`:r=Buffer.from(n).toString(`hex`);break;case`hex_decode`:r=Buffer.from(n,`hex`).toString(`utf8`);break;default:throw Error(`Unknown operation: ${t}`)}return{output:r,operation:t}}const Xn=[/key/i,/secret/i,/token/i,/password/i,/passwd/i,/credential/i,/private/i,/certificate/i];function Zn(e={}){let{includeEnv:t=!1,filterEnv:n,showSensitive:r=!1}=e,i={system:{platform:me(),arch:ue(),release:he(),hostname:pe(),type:_e(),cpus:de().length,memoryTotalGb:Math.round(ge()/1024**3*10)/10,memoryFreeGb:Math.round(fe()/1024**3*10)/10},runtime:{node:process.versions.node,v8:process.versions.v8},cwd:process.cwd()};if(t){let e={};for(let[t,i]of Object.entries(process.env))i&&(n&&!t.toLowerCase().includes(n.toLowerCase())||(!r&&Xn.some(e=>e.test(t))?e[t]=`[REDACTED]`:e[t]=i));i.env=e}return i}function Qn(e){let{code:t,lang:n=`js`,timeout:r=5e3}=e,i=Math.min(Math.max(r,100),1e4),a=Date.now();try{let e=n===`ts`?er(t):t,r=[],o={console:{log:(...e)=>r.push(e.map(String).join(` `)),error:(...e)=>r.push(`[error] ${e.map(String).join(` `)}`),warn:(...e)=>r.push(`[warn] ${e.map(String).join(` `)}`)},setTimeout:void 0,setInterval:void 0,setImmediate:void 0,fetch:void 0,process:void 0,require:void 0,JSON,Math,Date,Array,Object,String,Number,Boolean,Map,Set,RegExp,Error,Promise,parseInt,parseFloat,isNaN,isFinite},s=ve.createContext(o,{codeGeneration:{strings:!1,wasm:!1}}),c=ve.runInContext(e,s,{timeout:i});return{success:!0,output:r.length>0?r.join(`
50
50
  `)+(c===void 0?``:`\n→ ${$n(c)}`):c===void 0?`(no output)`:$n(c),durationMs:Date.now()-a}}catch(e){return{success:!1,output:``,error:e.message,durationMs:Date.now()-a}}}function $n(e){if(e===void 0)return`undefined`;if(e===null)return`null`;if(typeof e==`object`)try{return JSON.stringify(e,null,2)}catch{return String(e)}return String(e)}function er(e){return e.replace(/^\s*import\s+type\s+.*?;\s*$/gm,``).replace(/^\s*(?:export\s+)?interface\s+\w+[^{]*\{[\s\S]*?^\s*}\s*$/gm,``).replace(/^\s*(?:export\s+)?type\s+\w+\s*=.*?;\s*$/gm,``).replace(/([,(]\s*[A-Za-z_$][\w$]*)\s*:\s*[^,)=\n]+/g,`$1`).replace(/\)\s*:\s*[^={\n]+(?=\s*(?:=>|\{))/g,`)`).replace(/\s+as\s+[A-Za-z_$][\w$<>,[\]|&\s.]*/g,``).replace(/<(?:[A-Za-z_$][\w$]*\s*,?\s*)+>(?=\s*\()/g,``)}const tr={maxRetries:3,timeoutAction:`manual`};function nr(e){let t=e?.maxRetries;return{maxRetries:typeof t==`number`&&Number.isFinite(t)&&t>=0?Math.trunc(t):tr.maxRetries,timeoutAction:e?.timeoutAction??tr.timeoutAction}}function rr(e){return{...e,gateConfig:nr(e.gateConfig),gateAttempts:typeof e.gateAttempts==`number`&&Number.isFinite(e.gateAttempts)?Math.max(0,Math.trunc(e.gateAttempts)):0}}function ir(e){return w(B(e??process.cwd()),`evidence-maps.json`)}function ar(e){let t=ir(e);if(!M(t))return{};try{let e=P(t,`utf-8`),n=JSON.parse(e);return Object.fromEntries(Object.entries(n).map(([e,t])=>[e,rr(t)]))}catch(e){return e?.code===`ENOENT`||console.warn(`Corrupt state file ${t}: ${e instanceof Error?e.message:String(e)}`),{}}}function or(e,t){let n=ir(t),r=y(n);M(r)||N(r,{recursive:!0});let i=`${n}.tmp`;R(i,`${JSON.stringify(e,null,2)}\n`,`utf-8`),I(i,n)}function sr(e,t){let n=ar(t),r=n[e];if(!r)throw Error(`Evidence map not found: ${e}`);return{maps:n,state:r}}function cr(e){return e.reduce((e,t)=>Math.max(e,t.id),0)+1}function lr(e){let t=e.trim();if(!t)throw Error(`Claim is required`);if(/\r?\n/.test(t))throw Error(`Claim must be a single line`);return t}function ur(e){return(e??``).replace(/\r?\n/g,` `).replace(/\|/g,`\\|`)}function dr(e){let t=[`| # | Claim | Status | Receipt | Critical | Type | Safety |`,`|---|-------|--------|---------|----------|------|--------|`];for(let n of e.entries)t.push(`| ${n.id} | ${ur(n.claim)} | ${n.status} | ${ur(n.receipt)} | ${n.criticalPath?`yes`:`no`} | ${ur(n.unknownType)} | ${ur(n.safetyGate)} |`);return t.join(`
51
- `)}function fr(e){return{total:e.length,verified:e.filter(e=>e.status===`V`).length,assumed:e.filter(e=>e.status===`A`).length,unresolved:e.filter(e=>e.status===`U`).length}}function pr(e){let t=[];for(let n of e.entries)n.status===`V`&&n.receipt.trim()===``&&t.push(`V entry without receipt`),n.status===`A`&&e.tier===`critical`&&n.unknownType===`contract`&&t.push(`Assumed contract at Critical tier — should be Verified`);return t}function mr(e){return`Gate escalation annotation: unresolved entries remain -> ${e.filter(e=>e.status===`U`).map(e=>`#${e.id} ${e.claim}`).join(`; `)}`}function hr(e){let t=e.entries.filter(e=>e.status===`U`),n=e.entries.filter(e=>e.status===`A`),r=[];return t.length>0&&r.push(`Unresolved: ${t.map(e=>`#${e.id} ${e.claim}`).join(`; `)}`),n.length>0&&r.push(`Assumed: ${n.map(e=>`#${e.id} ${e.claim}`).join(`; `)}`),r.length>0?r.join(` | `):void 0}function gr(e){let t=[],n=e.entries.filter(e=>e.status===`V`&&!e.receipt.trim());n.length>0&&t.push(`Provenance: ${n.length} verified claim(s) lack receipts`);let r=e.entries.filter(e=>e.safetyGate===`commitment`&&e.status!==`V`);r.length>0&&t.push(`Commitment: ${r.length} commitment(s) not verified`);let i=e.entries.some(e=>e.safetyGate===`coverage`&&e.status===`U`);return i&&t.push(`Coverage: unresolved coverage entries remain`),{provenance:n.length>0?`fail`:`pass`,commitment:r.length>0?`fail`:`pass`,coverage:i?`fail`:`pass`,failures:t}}function _r(e,t,n){let r=e.entries.filter(e=>e.criticalPath&&e.status===`U`),i=pr(e),a=fr(e.entries),o=r.find(e=>e.unknownType===`contract`),s=Math.max(t.maxRetries-n,0),c=s>0?`iterate`:t.timeoutAction,l=hr(e);if(o)return{verdict:`HARD_BLOCK`,action:c,retriesRemaining:s,summary:l,decision:`HARD_BLOCK`,reason:`Unresolved contract unknown on critical path`,unresolvedCritical:r,warnings:i,stats:a};if(r.length>0)return{verdict:`HOLD`,action:c,retriesRemaining:s,summary:l,decision:`HOLD`,reason:s>0?`Unresolved critical-path unknown — retry available`:`Unresolved critical-path unknown — retries exhausted`,unresolvedCritical:r,warnings:i,stats:a,...s===0?{annotation:mr(e.entries)}:{}};let u=vr(e,i,a),d=u.decision===`HOLD`?`HOLD`:`YIELD`;return{verdict:d,...d===`HOLD`?{action:c,retriesRemaining:s,summary:l}:{},decision:u.decision??`YIELD`,reason:u.reason??`All critical-path claims satisfy gate rules`,unresolvedCritical:[],warnings:u.warnings??i,stats:a,...`safetyGates`in u?{safetyGates:u.safetyGates}:{}}}function vr(e,t,n){if(!e.entries.some(e=>e.safetyGate)||e.tier===`floor`)return{};let r=gr(e);return r.failures.length>0?{safetyGates:r,decision:`HOLD`,reason:`Safety gate failure: ${r.failures.join(`; `)}`,warnings:[...t,...r.failures]}:{safetyGates:r}}function yr(e,t){switch(e.action){case`create`:{let n=ar(t),r=new Date().toISOString(),i={taskId:e.taskId,tier:e.tier,entries:[],gateConfig:{...tr},gateAttempts:0,createdAt:r,updatedAt:r};return n[e.taskId]=i,or(n,t),{state:i,formattedMap:dr(i)}}case`add`:{let{maps:n,state:r}=sr(e.taskId,t),i={id:cr(r.entries),claim:lr(e.claim),status:e.status,receipt:e.receipt,criticalPath:e.criticalPath??!1,unknownType:e.unknownType,safetyGate:e.safetyGate};return r.entries.push(i),r.updatedAt=new Date().toISOString(),n[e.taskId]=r,or(n,t),{state:r,entry:i,formattedMap:dr(r)}}case`update`:{let{maps:n,state:r}=sr(e.taskId,t),i=r.entries.find(t=>t.id===e.id);if(!i)throw Error(`Evidence entry not found: ${e.id}`);return i.status=e.status,i.receipt=e.receipt,r.updatedAt=new Date().toISOString(),n[e.taskId]=r,or(n,t),{state:r,entry:i,formattedMap:dr(r)}}case`get`:{let{state:n}=sr(e.taskId,t);return{state:n,formattedMap:dr(n)}}case`gate`:{let{maps:n,state:r}=sr(e.taskId,t),i=nr({...r.gateConfig,...e.maxRetries===void 0?{}:{maxRetries:e.maxRetries},...e.timeoutAction===void 0?{}:{timeoutAction:e.timeoutAction}}),a=e.retryCount===void 0?r.gateAttempts:Math.max(r.gateAttempts,Math.max(0,Math.trunc(e.retryCount))),o={...r,gateConfig:i},s=_r(o,i,r.entries.some(e=>e.status===`U`||e.status===`A`)?a+1:0),c={...o,gateConfig:i,gateAttempts:s.verdict===`YIELD`?0:a+1,updatedAt:new Date().toISOString()};return n[e.taskId]=c,or(n,t),{state:c,gate:_r(c,i,c.gateAttempts),formattedMap:dr(c)}}case`list`:return{states:Object.values(ar(t)).sort((e,t)=>e.createdAt.localeCompare(t.createdAt))};case`delete`:{let n=ar(t);return e.taskId in n?(delete n[e.taskId],or(n,t),{deleted:!0}):{deleted:!1}}}}function br(e,t,n){let r=[];for(let i of t){let t=yr({action:`add`,taskId:e,claim:`Test failure: ${i}`,status:`U`,receipt:``,criticalPath:!0},n);t.entry&&r.push(t.entry)}return r}var xr=class e{cache=new Map;totalReads=0;cacheHits=0;static MAX_ENTRIES=500;async get(t){let n=w(t);this.totalReads++;let r=await g(n);if(r.isDirectory())throw Error(`Path is a directory: ${t}. Expected a file path, not a directory. Use analyze_structure or find to explore directories.`);let i=r.mtimeMs,a=this.cache.get(n);if(a){if(a.mtimeMs===i)return this.cacheHits++,a.hitCount++,{content:a.content,hash:a.hash,lines:a.lines,estimatedTokens:a.estimatedTokens,hitCount:a.hitCount,changed:!1};let e=await p(n,`utf-8`),t=Sr(e);if(t===a.hash)return this.cacheHits++,a.hitCount++,a.mtimeMs=i,{content:a.content,hash:a.hash,lines:a.lines,estimatedTokens:a.estimatedTokens,hitCount:a.hitCount,changed:!1};let r=e.split(`
51
+ `)}function fr(e){return{total:e.length,verified:e.filter(e=>e.status===`V`).length,assumed:e.filter(e=>e.status===`A`).length,unresolved:e.filter(e=>e.status===`U`).length}}function pr(e){let t=[];for(let n of e.entries)n.status===`V`&&n.receipt.trim()===``&&t.push(`V entry without receipt`),n.status===`A`&&e.tier===`critical`&&n.unknownType===`contract`&&t.push(`Assumed contract at Critical tier — should be Verified`);return t}function mr(e){return`Gate escalation annotation: unresolved entries remain -> ${e.filter(e=>e.status===`U`).map(e=>`#${e.id} ${e.claim}`).join(`; `)}`}function hr(e){let t=e.entries.filter(e=>e.status===`U`),n=e.entries.filter(e=>e.status===`A`),r=[];return t.length>0&&r.push(`Unresolved: ${t.map(e=>`#${e.id} ${e.claim}`).join(`; `)}`),n.length>0&&r.push(`Assumed: ${n.map(e=>`#${e.id} ${e.claim}`).join(`; `)}`),r.length>0?r.join(` | `):void 0}function gr(e){let t=[],n=e.entries.filter(e=>e.status===`V`&&!e.receipt.trim());n.length>0&&t.push(`Provenance: ${n.length} verified claim(s) lack receipts`);let r=e.entries.filter(e=>e.safetyGate===`commitment`&&e.status!==`V`);r.length>0&&t.push(`Commitment: ${r.length} commitment(s) not verified`);let i=e.entries.some(e=>e.safetyGate===`coverage`&&e.status===`U`);return i&&t.push(`Coverage: unresolved coverage entries remain`),{provenance:n.length>0?`fail`:`pass`,commitment:r.length>0?`fail`:`pass`,coverage:i?`fail`:`pass`,failures:t}}function _r(e,t,n){let r=e.entries.filter(e=>e.criticalPath&&e.status===`U`),i=pr(e),a=fr(e.entries),o=r.find(e=>e.unknownType===`contract`),s=Math.max(t.maxRetries-n,0),c=s>0?`iterate`:t.timeoutAction,l=hr(e);if(o)return{verdict:`HARD_BLOCK`,action:c,retriesRemaining:s,summary:l,decision:`HARD_BLOCK`,reason:`Unresolved contract unknown on critical path`,unresolvedCritical:r,warnings:i,stats:a};if(r.length>0)return{verdict:`HOLD`,action:c,retriesRemaining:s,summary:l,decision:`HOLD`,reason:s>0?`Unresolved critical-path unknown — retry available`:`Unresolved critical-path unknown — retries exhausted`,unresolvedCritical:r,warnings:i,stats:a,...s===0?{annotation:mr(e.entries)}:{}};let u=vr(e,i,a),d=u.decision===`HOLD`?`HOLD`:`YIELD`;return{verdict:d,...d===`HOLD`?{action:c,retriesRemaining:s,summary:l}:{},decision:u.decision??`YIELD`,reason:u.reason??`All critical-path claims satisfy gate rules`,unresolvedCritical:[],warnings:u.warnings??i,stats:a,...`safetyGates`in u?{safetyGates:u.safetyGates}:{}}}function vr(e,t,n){if(!e.entries.some(e=>e.safetyGate)||e.tier===`floor`)return{};let r=gr(e);return r.failures.length>0?{safetyGates:r,decision:`HOLD`,reason:`Safety gate failure: ${r.failures.join(`; `)}`,warnings:[...t,...r.failures]}:{safetyGates:r}}function yr(e,t){switch(e.action){case`create`:{let n=ar(t),r=new Date().toISOString(),i={taskId:e.taskId,tier:e.tier,entries:[],gateConfig:{...tr},gateAttempts:0,createdAt:r,updatedAt:r};return n[e.taskId]=i,or(n,t),{state:i,formattedMap:dr(i)}}case`add`:{let{maps:n,state:r}=sr(e.taskId,t),i={id:cr(r.entries),claim:lr(e.claim),status:e.status,receipt:e.receipt,criticalPath:e.criticalPath??!1,unknownType:e.unknownType,safetyGate:e.safetyGate};return r.entries.push(i),r.updatedAt=new Date().toISOString(),n[e.taskId]=r,or(n,t),{state:r,entry:i,formattedMap:dr(r)}}case`update`:{let{maps:n,state:r}=sr(e.taskId,t),i=r.entries.find(t=>t.id===e.id);if(!i)throw Error(`Evidence entry not found: ${e.id}`);return i.status=e.status,i.receipt=e.receipt,r.updatedAt=new Date().toISOString(),n[e.taskId]=r,or(n,t),{state:r,entry:i,formattedMap:dr(r)}}case`get`:{let{state:n}=sr(e.taskId,t);return{state:n,formattedMap:dr(n)}}case`gate`:{let{maps:n,state:r}=sr(e.taskId,t),i=nr({...r.gateConfig,...e.maxRetries===void 0?{}:{maxRetries:e.maxRetries},...e.timeoutAction===void 0?{}:{timeoutAction:e.timeoutAction}}),a=e.retryCount===void 0?r.gateAttempts:Math.max(r.gateAttempts,Math.max(0,Math.trunc(e.retryCount))),o={...r,gateConfig:i},s=_r(o,i,r.entries.some(e=>e.status===`U`||e.status===`A`)?a+1:0),c={...o,gateConfig:i,gateAttempts:s.verdict===`YIELD`?0:a+1,updatedAt:new Date().toISOString()};return n[e.taskId]=c,or(n,t),{state:c,gate:_r(c,i,c.gateAttempts),formattedMap:dr(c)}}case`list`:return{states:Object.values(ar(t)).sort((e,t)=>e.createdAt.localeCompare(t.createdAt))};case`delete`:{let n=ar(t);return e.taskId in n?(delete n[e.taskId],or(n,t),{deleted:!0}):{deleted:!1}}}}function br(e,t,n){let r=[];for(let i of t){let t=yr({action:`add`,taskId:e,claim:`Test failure: ${i}`,status:`U`,receipt:``,criticalPath:!0},n);t.entry&&r.push(t.entry)}return r}var xr=class e{cache=new Map;totalReads=0;cacheHits=0;static MAX_ENTRIES=500;async get(t){let n=w(t);this.totalReads++;let r=await g(n);if(r.isDirectory())throw Error(`Path is a directory: ${t}. Expected a file path, not a directory. Use analyze({ aspect: "structure", path }) or find to explore directories.`);let i=r.mtimeMs,a=this.cache.get(n);if(a){if(a.mtimeMs===i)return this.cacheHits++,a.hitCount++,{content:a.content,hash:a.hash,lines:a.lines,estimatedTokens:a.estimatedTokens,hitCount:a.hitCount,changed:!1};let e=await p(n,`utf-8`),t=Sr(e);if(t===a.hash)return this.cacheHits++,a.hitCount++,a.mtimeMs=i,{content:a.content,hash:a.hash,lines:a.lines,estimatedTokens:a.estimatedTokens,hitCount:a.hitCount,changed:!1};let r=e.split(`
52
52
  `).length,o=U(e);return a.content=e,a.hash=t,a.lines=r,a.estimatedTokens=o,a.hitCount++,a.mtimeMs=i,{content:e,hash:t,lines:r,estimatedTokens:o,hitCount:a.hitCount,changed:!0}}let o=await p(n,`utf-8`),s=Sr(o),c=o.split(`
53
- `).length,l=U(o);if(this.cache.set(n,{content:o,hash:s,lines:c,estimatedTokens:l,hitCount:1,mtimeMs:i}),this.cache.size>e.MAX_ENTRIES){let e=this.cache.keys().next().value;e&&this.cache.delete(e)}return{content:o,hash:s,lines:c,estimatedTokens:l,hitCount:1,changed:!0}}invalidate(e){return this.cache.delete(w(e))}clear(){let e=this.cache.size;return this.cache.clear(),e}stats(){return{totalReads:this.totalReads,cacheHits:this.cacheHits,filesTracked:this.cache.size}}};function Sr(e){return V(`sha256`).update(e).digest(`hex`)}async function Cr(e){let{path:t,previewLines:n=3}=e;if(!e.content){let e;try{e=await g(t)}catch(e){let n=e.code;throw n===`ENOENT`?Error(`File not found: ${t}. Check the path and try again.`):n===`EACCES`||n===`EPERM`?Error(`Permission denied reading ${t}. The file exists but is not accessible.`):e}if(e.isDirectory())throw Error(`Path is a directory: ${t}. file_summary requires a file path, not a directory. Use analyze_structure or find to explore directories.`);if(e.size>1e7)throw Error(`File too large (${(e.size/1e6).toFixed(1)}MB). file_summary supports files up to 10MB. Use search or compact with a query instead.`)}let r=e.content??await p(t,`utf-8`),i=r.split(`
53
+ `).length,l=U(o);if(this.cache.set(n,{content:o,hash:s,lines:c,estimatedTokens:l,hitCount:1,mtimeMs:i}),this.cache.size>e.MAX_ENTRIES){let e=this.cache.keys().next().value;e&&this.cache.delete(e)}return{content:o,hash:s,lines:c,estimatedTokens:l,hitCount:1,changed:!0}}invalidate(e){return this.cache.delete(w(e))}clear(){let e=this.cache.size;return this.cache.clear(),e}stats(){return{totalReads:this.totalReads,cacheHits:this.cacheHits,filesTracked:this.cache.size}}};function Sr(e){return V(`sha256`).update(e).digest(`hex`)}async function Cr(e){let{path:t,previewLines:n=3}=e;if(!e.content){let e;try{e=await g(t)}catch(e){let n=e.code;throw n===`ENOENT`?Error(`File not found: ${t}. Check the path and try again.`):n===`EACCES`||n===`EPERM`?Error(`Permission denied reading ${t}. The file exists but is not accessible.`):e}if(e.isDirectory())throw Error(`Path is a directory: ${t}. file_summary requires a file path, not a directory. Use analyze({ aspect: "structure", path }) or find to explore directories.`);if(e.size>1e7)throw Error(`File too large (${(e.size/1e6).toFixed(1)}MB). file_summary supports files up to 10MB. Use search or compact with a query instead.`)}let r=e.content??await p(t,`utf-8`),i=r.split(`
54
54
  `),a=t.split(`.`).pop()??``,o=b(t);return D.get()&&E.has(o)?wr(t,r,i,a,o):Tr(t,r,i,a)}async function wr(e,t,n,r,i){let[a,o,s]=await Promise.all([k(t,i,e),O(t,i,e),ee(t,i,e).catch(()=>[])]),c=o.map(e=>`import ${e.specifiers.length>0?`{ ${e.specifiers.join(`, `)} }`:`*`} from '${e.source}'`),l=[],u=[],d=[],f=[],p=[];for(let e of a)switch(e.exported&&l.push(e.name),e.kind){case`function`:case`method`:u.push({name:e.name,line:e.line,exported:e.exported,signature:e.signature});break;case`class`:d.push({name:e.name,line:e.line,exported:e.exported,signature:e.signature});break;case`interface`:f.push({name:e.name,line:e.line,exported:e.exported});break;case`type`:p.push({name:e.name,line:e.line,exported:e.exported});break}let m=o.map(e=>({source:e.source,specifiers:e.specifiers,isExternal:e.isExternal})),h=s.map(e=>({caller:e.callerName,callee:e.calleeName,line:e.line}));return{path:e,lines:n.length,language:Er(r),imports:c,exports:l,functions:u,classes:d,interfaces:f,types:p,importDetails:m,callEdges:h.length>0?h:void 0,estimatedTokens:Math.ceil(t.length/4)}}function Tr(e,t,n,r){let i=[],a=[],o=[],s=[],c=[],l=[];for(let e=0;e<n.length;e+=1){let t=n[e],r=e+1;if(/^import\s+.+/.test(t)){i.push(t.trim());continue}let u=t.match(/^export\s+(?:async\s+)?function\s+(\w+)/);if(u){o.push({name:u[1],line:r,exported:!0}),a.push(u[1]);continue}let d=t.match(/^(?:async\s+)?function\s+(\w+)/);if(d){o.push({name:d[1],line:r,exported:!1});continue}let f=t.match(/^(export\s+)?const\s+(\w+)\s*=.*(?:=>|\bfunction\b)/);if(f){let e=!!f[1];o.push({name:f[2],line:r,exported:e}),e&&a.push(f[2]);continue}let p=t.match(/^export\s+const\s+(\w+)\s*=/);if(p){a.push(p[1]);continue}let m=t.match(/^(export\s+)?(?:abstract\s+)?class\s+(\w+)/);if(m){let e=!!m[1];s.push({name:m[2],line:r,exported:e}),e&&a.push(m[2]);continue}let h=t.match(/^(export\s+)?interface\s+(\w+)/);if(h){let e=!!h[1];c.push({name:h[2],line:r,exported:e}),e&&a.push(h[2]);continue}let g=t.match(/^(export\s+)?type\s+(\w+)/);if(g){let e=!!g[1];l.push({name:g[2],line:r,exported:e}),e&&a.push(g[2]);continue}let _=t.match(/^export\s+\{(.+)\}/);if(_){let e=_[1].split(`,`).map(e=>e.trim().split(/\s+as\s+/).pop()?.trim()??``).filter(Boolean);a.push(...e)}}return{path:e,lines:n.length,language:Er(r),imports:i,exports:a,functions:o,classes:s,interfaces:c,types:l,estimatedTokens:Math.ceil(t.length/4)}}function Er(e){return{ts:`typescript`,tsx:`typescript-jsx`,js:`javascript`,jsx:`javascript-jsx`,py:`python`,rs:`rust`,go:`go`,java:`java`,rb:`ruby`,md:`markdown`,json:`json`,yaml:`yaml`,yml:`yaml`,css:`css`,html:`html`,sh:`shell`,bash:`shell`}[e]??e}async function Dr(e,t,n){let{query:r,glob:i,pattern:a,limit:o=10,contentType:s,cwd:c=process.cwd()}=n,l=[],u=[],d=new Set,f=[];if(r){l.push(`vector`);let n=await e.embed(r),i={limit:o,contentType:s},a=await t.search(n,i);for(let e of a){let t=`${e.record.sourcePath}:${e.record.startLine}`;d.has(t)||(d.add(t),u.push({path:e.record.sourcePath,source:`vector`,score:e.score,lineRange:{start:e.record.startLine,end:e.record.endLine},preview:e.record.content.slice(0,200)}))}}if(r){l.push(`keyword`);try{let e=await t.ftsSearch(r,{limit:o,contentType:s});for(let t of e){let e=`${t.record.sourcePath}:${t.record.startLine}`;d.has(e)||(d.add(e),u.push({path:t.record.sourcePath,source:`keyword`,score:t.score,lineRange:{start:t.record.startLine,end:t.record.endLine},preview:t.record.content.slice(0,200)}))}}catch(e){f.push({strategy:`keyword`,reason:e instanceof Error?e.message:String(e)})}}if(i){l.push(`glob`);try{let{globSync:e}=await import(`node:fs`),t=e(i,{cwd:c,withFileTypes:!1}),n=new Set([`node_modules`,`.git`,`dist`,`build`,`coverage`,`.turbo`,`.cache`,`cdk.out`,z.state,z.data]),r=t.filter(e=>!e.replace(/\\/g,`/`).split(`/`).some(e=>n.has(e)));for(let e of r.slice(0,o)){let t=`glob:${e}`;d.has(t)||(d.add(t),u.push({path:e,source:`glob`,score:1}))}}catch(e){f.push({strategy:`glob`,reason:e instanceof Error?e.message:String(e)})}}if(a){l.push(`pattern`);try{let e=Or(a),n=e?kr(a):a,r=new RegExp(n,`i`),i=e?Ar(a):a,c=await t.ftsSearch(i,{limit:o*2,contentType:s});for(let t of c){let n=e?t.record.sourcePath:t.record.content;if(r.test(n)){let e=`${t.record.sourcePath}:${t.record.startLine}`;d.has(e)||(d.add(e),u.push({path:t.record.sourcePath,source:`pattern`,score:t.score,lineRange:{start:t.record.startLine,end:t.record.endLine},preview:t.record.content.slice(0,200)}))}}}catch(e){f.push({strategy:`pattern`,reason:e instanceof Error?e.message:String(e)})}}return u.sort((e,t)=>t.score-e.score),{results:u.slice(0,o),strategies:l,totalFound:u.length,...f.length>0&&{failedStrategies:f}}}function Or(e){return/\*\*|^[*?]|\/\*|^\*\./.test(e)}function kr(e){let t=`\0DS\0`,n=`\0SS\0`,r=e.replace(/\*\*/g,t).replace(/\*/g,n);return r=r.replace(/[.+^${}()|[\]\\]/g,`\\$&`),r=r.replace(/\?/g,`.`),r=r.replace(new RegExp(t.replace(/\0/g,`\\0`),`g`),`.*`),r=r.replace(new RegExp(n.replace(/\0/g,`\\0`),`g`),`[^/]*`),r}function Ar(e){return e.replace(/\*+/g,` `).replace(/[/\\]/g,` `).trim().split(/\s+/).filter(e=>e.length>1).join(` `)||e}async function jr(e,t,n){let{query:r,limit:i=5,contentType:a}=n,o=`usage example of ${r}`,s=await e.embed(o),c=await t.search(s,{limit:i*3,contentType:a}),l=RegExp(`\\b${H(r)}\\b`,`i`),u=c.filter(e=>l.test(e.record.content));return{query:r,examples:u.map(e=>{let t=e.record.content,n=/export\s+(?:async\s+)?(?:function|class|const|interface|type)\s/.test(t),r=/^\s*import\s/m.test(t),i=/(?:^|[\\/])(test|tests|__tests__|spec)(?:[\\/]|$)/i.test(e.record.sourcePath)||/\.(test|spec)\.[jt]sx?$/i.test(e.record.sourcePath),a=0;n||(a+=.1),r||(a+=.05),i&&(a+=.05);let o=t.split(`
55
55
  `),s=o.findIndex(e=>l.test(e)),c=Math.max(0,s-2),u=Math.min(o.length,s+5),d=o.slice(c,u).join(`
56
56
  `);return{path:e.record.sourcePath,startLine:e.record.startLine,endLine:e.record.endLine,content:d||t.slice(0,300),relevance:Math.min(1,e.score+a),context:i?`test`:n?`definition`:`usage`}}).sort((e,t)=>t.relevance-e.relevance).slice(0,i),totalFound:u.length}}const Mr=new Set([`.ts`,`.tsx`,`.js`,`.jsx`,`.mts`,`.cts`,`.mjs`,`.cjs`]),Nr=new Set([`.git`,z.data,z.state,`.turbo`,`.yarn`,`build`,`coverage`,`dist`,`node_modules`]),Pr=/auth|token|permission|acl|encrypt|secret|credential|jwt|oauth|password/i,Fr=/\b(hash|sign|verify|bcrypt|jwt|decrypt|secret|password)\b/i,Ir=/auth|security|permission|encrypt|secret|credential/i,Lr=/types\.ts$|schema\.ts$|contract\.ts$|\.proto$|openapi|swagger|\.graphql$/i,Rr=/(?:^|\/)(events|contracts|shared)(?:\/|$)/i,zr=/export\s+interface\b|export\s+type\b|export\s+const\s+\w*Schema\w*\s*=\s*z\./i,Br=/schema|contract|migration|breaking.change|api.change/i,Vr=/migration|data.?model|multi.?service|breaking|backward.?compat/i,Hr={floor:{ground:`Parasitic — read target file only`,build:`Implement directly`,break:`Skip`,evidenceMap:`Not required`,gate:`Self-certify`},standard:{ground:`Scope map + blast radius + constraint seed`,build:`TDD — test first, then implement`,break:`Error paths + edge cases`,evidenceMap:`3-8 critical-path entries`,gate:`YIELD/HOLD evaluation`},critical:{ground:`Full scope map + blast radius + trace + patterns + constraint pack`,build:`TDD + contract verification + cross-service validation`,break:`Error paths + edge cases + security dimensions + data-flow verification`,evidenceMap:`Comprehensive — all critical-path claims with receipts`,gate:`Strict YIELD/HOLD/HARD_BLOCK evaluation`}};async function Ur(e){let t=w(e.rootPath),n=e.task.trim(),r=e.files.map(e=>w(t,e)),i=[],a=!1,o=!1;for(let e of r){let r=Gr(e,t),i=qr(e);(Pr.test(r)||Fr.test(i)||Ir.test(n))&&(a=!0),(Lr.test(r)||Rr.test(r)||Br.test(n)||zr.test(i)||await Jr(e))&&(o=!0)}a&&Wr(i,{rule:`security-path`,detail:`Security/auth path, task, or content matched security heuristics`,source:`security_auth`}),o&&Wr(i,{rule:`schema-contract`,detail:`Schema or contract path, task, or export shape matched contract heuristics`,source:`schema_contract`});let s=Yr(r,t);s.affectedFiles>5&&Wr(i,{rule:`blast-radius-importers`,detail:`${s.affectedFiles} affected files via direct import scanning`,source:`blast_radius`});let c=[...new Set(r.map(e=>ei(e,t)).filter(e=>!!e))].sort();c.length>=2&&Wr(i,{rule:`cross-package-boundary`,detail:`Files span ${c.length} packages: ${c.join(`, `)}`,source:`cross_package`}),Vr.test(n)&&Wr(i,{rule:`task-hint-critical`,detail:`Task description matched migration or compatibility criticality hints`,source:`task_hint`});let l=i.length>0?`critical`:r.length===1?`floor`:`standard`;l===`floor`&&(a||o)&&(l=`standard`);let u=$r(i),d=l===`floor`&&u.some(e=>e.type===`contract`)?{suggestedTier:`standard`,reason:`Contract-type unknowns detected — Floor may be insufficient`}:void 0;return{tier:l,triggers:i,packagesCrossed:c,hasSchemaChange:o,hasSecurityPath:a,typedUnknownSeeds:u,ceremony:Hr[l],reclassifyHint:d}}function Wr(e,t){e.some(e=>e.rule===t.rule&&e.source===t.source)||e.push(t)}function Gr(e,t){let n=w(t,e),r=C(t,n);return(r&&!r.startsWith(`..`)?r:n).replace(/\\/g,`/`)}function Kr(e){if(!M(e))return!1;try{return L(e).size<=1e5}catch{return!1}}function qr(e){if(!Kr(e))return``;try{return P(e,`utf-8`).split(/\r?\n/).slice(0,200).join(`
57
57
  `)}catch{return``}}async function Jr(e){if(!Kr(e))return!1;try{let t=await Cr({path:e}),n=new Set(t.exports);return t.interfaces.some(e=>n.has(e.name))||t.types.some(e=>n.has(e.name))}catch{return!1}}function Yr(e,t){let n=new Set(e.filter(e=>M(e)));if(n.size===0)return{affectedFiles:e.length,importers:[]};let r=new Set;for(let e of Xr(t)){if(n.has(e)||!Kr(e))continue;let t=qr(e);t&&Zr(t).some(t=>Qr(t,e,n))&&r.add(e)}return{affectedFiles:e.length+r.size,importers:[...r].map(e=>C(t,e).replace(/\\/g,`/`))}}function Xr(e){let t=[];function n(e){let r=[];try{r=F(e)}catch{return}for(let i of r){if(Nr.has(i))continue;let r=w(e,i),a;try{a=L(r)}catch{continue}if(a.isDirectory()){n(r);continue}Mr.has(b(i).toLowerCase())&&t.push(r)}}return n(e),t}function Zr(e){let t=new Set;for(let n of e.matchAll(/(?:from\s+['"]([^'"]+)['"]|import\s+['"]([^'"]+)['"]|require\(\s*['"]([^'"]+)['"]\s*\))/g)){let e=n[1]??n[2]??n[3];e&&t.add(e)}return[...t]}function Qr(e,t,n){if(!e.startsWith(`.`))return!1;let r=w(y(t),e);return[r,`${r}.ts`,`${r}.tsx`,`${r}.js`,`${r}.jsx`,`${r}.mts`,`${r}.cts`,`${r}.mjs`,`${r}.cjs`,w(r,`index.ts`),w(r,`index.tsx`),w(r,`index.js`),w(r,`index.jsx`)].some(e=>n.has(e))}function $r(e){return e.map(e=>{switch(e.source){case`security_auth`:return{description:`Verify auth and security assumptions before yielding`,type:`contract`,suggestedTool:`aikit_search`};case`schema_contract`:return{description:`Confirm schema and contract compatibility`,type:`contract`,suggestedTool:`aikit_schema_validate`};case`blast_radius`:return{description:`Inspect affected importers before delivery`,type:`impact`,suggestedTool:`aikit_blast_radius`};case`cross_package`:return{description:`Assess downstream package impact across boundaries`,type:`impact`,suggestedTool:`aikit_blast_radius`};case`task_hint`:return{description:`Check established conventions for migrations or compatibility work`,type:`convention`,suggestedTool:`aikit_find`};default:return{description:`No explicit unknown routing required`,type:`freshness`,suggestedTool:`aikit_lookup`}}})}function ei(e,t){let n=y(w(t,e)),r=w(t);for(;n.length>=r.length;){let e=w(n,`package.json`);if(M(e))try{return JSON.parse(P(e,`utf-8`)).name??C(r,n).replace(/\\/g,`/`)}catch{return C(r,n).replace(/\\/g,`/`)}let t=y(n);if(t===n)break;n=t}}function ti(e){return Math.ceil(e.length/4)}async function ni(e,t,n){let{task:r,maxFiles:i=15,contentType:a,origin:o}=n,s=await e.embed(r),c={limit:i*3,contentType:a,origin:o},l=await t.search(s,c),u=new Map;for(let e of l){let t=e.record.sourcePath,n=u.get(t);n?(n.chunks.push(e),n.totalChars+=e.record.content.length,n.maxScore=Math.max(n.maxScore,e.score)):u.set(t,{chunks:[e],totalChars:e.record.content.length,maxScore:e.score})}let d=[...u.entries()].sort(([,e],[,t])=>t.maxScore-e.maxScore).slice(0,i).map(([e,{chunks:t,maxScore:n}])=>{let r=t.sort((e,t)=>e.record.startLine-t.record.startLine).map(e=>({start:e.record.startLine,end:e.record.endLine,heading:e.record.headingPath})),i=t.sort((e,t)=>t.score-e.score)[0];return{path:e,reason:i.record.headingPath?`Matches: ${i.record.headingPath}`:`Contains relevant ${i.record.contentType} content`,estimatedTokens:0,relevance:n,focusRanges:r}});for(let e of d){let t=u.get(e.path);t&&(e.estimatedTokens=ti(t.chunks.map(e=>e.record.content).join(``)))}let f=d.reduce((e,t)=>e+t.estimatedTokens,0),p=[...d].sort((e,t)=>{let n=e.path.includes(`config`)||e.path.includes(`types`)?-1:0,r=t.path.includes(`config`)||t.path.includes(`types`)?-1:0;return n===r?t.relevance-e.relevance:n-r}).map(e=>e.path),m=[];for(let e of d)e.estimatedTokens<=100?m.push(`aikit_file_summary({ path: "${e.path}" }) → ~${e.estimatedTokens} tokens`):m.push(`aikit_compact({ path: "${e.path}", query: "${r}" }) → ~${Math.ceil(e.estimatedTokens/5)} tokens`);return{task:r,files:d,totalEstimatedTokens:f,readingOrder:p,compactCommands:m}}const ri={floor:{ground:`Parasitic — read target file only`,build:`Implement directly`,break:`Skip`,evidenceMap:`Not required`,gate:`Self-certify`},standard:{ground:`Scope map + blast radius + constraint seed`,build:`TDD — test first, then implement`,break:`Error paths + edge cases`,evidenceMap:`3-8 critical-path entries`,gate:`YIELD/HOLD evaluation`},critical:{ground:`Full scope map + blast radius + trace + patterns + constraint pack`,build:`TDD + contract verification + cross-service validation`,break:`Error paths + edge cases + security dimensions + data-flow verification`,evidenceMap:`Comprehensive — all critical-path claims with receipts`,gate:`Strict YIELD/HOLD/HARD_BLOCK evaluation`}};async function ii(e,t,n){let r=n.maxConstraints??3,i=await ai(n),a=await ci(n.files);if(i.tier===`floor`)return hi({tier:i.tier,classifyTriggers:i.classifyTriggers,scopeMap:null,typedUnknownSeeds:i.typedUnknownSeeds,constraints:[],fileSummaries:a,evidenceMapTaskId:null,ceremony:i.ceremony});let[o,s,c]=await Promise.all([oi(e,t,n.task,i.tier),si(e,t,n.task,r),ui(n.rootPath,n.taskId??gi(n.task),i.tier)]);return hi({tier:i.tier,classifyTriggers:i.classifyTriggers,scopeMap:o,typedUnknownSeeds:i.typedUnknownSeeds,constraints:s,fileSummaries:a,evidenceMapTaskId:c,ceremony:i.ceremony})}async function ai(e){if(e.forceTier)return{tier:e.forceTier,classifyTriggers:[],typedUnknownSeeds:[],ceremony:mi(e.forceTier)};try{let t=await Ur({files:e.files,task:e.task,rootPath:e.rootPath});return{tier:t.tier,classifyTriggers:t.triggers,typedUnknownSeeds:t.typedUnknownSeeds,ceremony:t.ceremony}}catch{return{tier:`standard`,classifyTriggers:[],typedUnknownSeeds:[],ceremony:mi(`standard`)}}}async function oi(e,t,n,r){try{return await ni(e,t,{task:n,maxFiles:r===`critical`?20:10})}catch{return null}}async function si(e,t,n,r){try{let i=`decision pattern convention ${n}`,a=typeof e.embedQuery==`function`?await e.embedQuery(i):await e.embed(i);return(await t.search(a,{limit:r,origin:`curated`})).slice(0,r).map(e=>di(e))}catch{return[]}}async function ci(e){return Promise.all(e.map(async e=>li(e)))}async function li(e){try{return pi(await Cr({path:e}))}catch(t){return{path:e,exports:[],functions:[],lines:0,error:t instanceof Error?t.message:`Unable to summarize file`}}}async function ui(e,t,n){try{return yr({action:`create`,taskId:t,tier:n},e),t}catch{return null}}function di(e){return{source:e.record.sourcePath,snippet:fi(e.record.content),relevance:e.score}}function fi(e){let t=e.replace(/\s+/g,` `).trim();return t.length<=200?t:`${t.slice(0,197).trimEnd()}...`}function pi(e){return{path:e.path,exports:e.exports,functions:e.functions.map(e=>e.name),lines:e.lines}}function mi(e){return{...ri[e]}}function hi(e){return{...e,estimatedTokens:U(JSON.stringify(e))}}function gi(e){let t=e.toLowerCase().replace(/[^a-z0-9\s]/g,` `).split(/\s+/).filter(Boolean).slice(0,5).join(`-`),n=Date.now().toString(36);return`${t||`task`}-${n}`}const _i=T(l);async function vi(e,t){try{let{stdout:n}=await _i(`git`,e,{cwd:t,timeout:15e3});return n.toString().trim()}catch{return``}}async function yi(e={}){let t=e.cwd??process.cwd(),n=e.commitCount??5,r=await vi([`rev-parse`,`--show-toplevel`],t);if(!r)return{gitRoot:t,branch:`unknown`,status:{staged:[],modified:[],untracked:[]},recentCommits:[]};let i=r,[a,o,s,c]=await Promise.all([vi([`rev-parse`,`--abbrev-ref`,`HEAD`],i),vi([`status`,`--porcelain`],i),vi([`log`,`--max-count=${n}`,`--format=%h|%s|%an|%ai`],i),e.includeDiff?vi([`diff`,`--stat`,`--no-color`],i):Promise.resolve(``)]),l=[],u=[],d=[];for(let e of o.split(`
58
58
  `).filter(Boolean)){let t=e[0],n=e[1],r=e.slice(3).trim();t!==` `&&t!==`?`&&l.push(r),(n===`M`||n===`D`)&&u.push(r),t===`?`&&d.push(r)}let f=s.split(`
59
- `).filter(Boolean).map(e=>{let[t,n,r,i]=e.split(`|`);return{hash:t,message:n,author:r,date:i}});return{gitRoot:r,branch:a||`unknown`,status:{staged:l,modified:u,untracked:d},recentCommits:f,diff:c||void 0}}function bi(e,...t){return`${e}_${V(`sha256`).update(t.join(`|`)).digest(`hex`).slice(0,12)}`}async function xi(e,t){let{action:n}=t;switch(n){case`find_nodes`:{let r=await e.findNodes({type:t.nodeType,namePattern:t.namePattern,sourcePath:t.sourcePath,limit:t.limit});return{action:n,nodes:r,summary:`Found ${r.length} node(s)${t.nodeType?` of type "${t.nodeType}"`:``}${t.namePattern?` matching "${t.namePattern}"`:``}`}}case`find_edges`:{let r=await e.findEdges({type:t.edgeType,fromId:t.fromId,toId:t.toId,limit:t.limit});return{action:n,edges:r,summary:`Found ${r.length} edge(s)${t.edgeType?` of type "${t.edgeType}"`:``}`}}case`neighbors`:{if(!t.nodeId)return{action:n,summary:`Error: nodeId is required for neighbors action`};let r=await e.getNeighbors(t.nodeId,{edgeType:t.edgeType,direction:t.direction,limit:t.limit});return{action:n,nodes:r.nodes,edges:r.edges,summary:`Found ${r.nodes.length} neighbor(s) and ${r.edges.length} edge(s) for node "${t.nodeId}"`}}case`traverse`:{if(!t.nodeId)return{action:n,summary:`Error: nodeId is required for traverse action`};let r=await e.traverse(t.nodeId,{edgeType:t.edgeType,maxDepth:t.maxDepth,direction:t.direction,limit:t.limit});return{action:n,nodes:r.nodes,edges:r.edges,summary:`Traversed ${r.nodes.length} node(s) and ${r.edges.length} edge(s) from "${t.nodeId}" (depth=${t.maxDepth??2})`}}case`stats`:{let t=await e.getStats();return{action:n,stats:t,summary:`Graph: ${t.nodeCount} nodes, ${t.edgeCount} edges. Types: ${Object.entries(t.nodeTypes).map(([e,t])=>`${e}(${t})`).join(`, `)||`none`}`}}case`validate`:{let t=await e.validate();return{action:n,validation:t,stats:t.stats,summary:t.valid?`Graph validation passed: ${t.stats.nodeCount} nodes, ${t.stats.edgeCount} edges, ${t.orphanNodes.length} orphan node(s)`:`Graph validation found ${t.danglingEdges.length} dangling edge(s) and ${t.orphanNodes.length} orphan node(s)`}}case`add`:{let r=0,i=0;if(t.nodes&&t.nodes.length>0){let n=t.nodes.map(e=>({id:e.id??bi(`node`,e.type,e.name),type:e.type,name:e.name,properties:e.properties??{},sourceRecordId:e.sourceRecordId,sourcePath:e.sourcePath,createdAt:new Date().toISOString()}));await e.upsertNodes(n),r=n.length}if(t.edges&&t.edges.length>0){let n=t.edges.map(e=>({id:e.id??bi(`edge`,e.fromId,e.toId,e.type),fromId:e.fromId,toId:e.toId,type:e.type,weight:e.weight,properties:e.properties}));await e.upsertEdges(n),i=n.length}return{action:n,nodesAdded:r,edgesAdded:i,summary:`Added ${r} node(s) and ${i} edge(s) to the graph`}}case`delete`:if(t.nodeId)return await e.deleteNode(t.nodeId),{action:n,deleted:1,summary:`Deleted node "${t.nodeId}" and its edges`};if(t.sourcePath){let r=await e.deleteBySourcePath(t.sourcePath);return{action:n,deleted:r,summary:`Deleted ${r} node(s) from source "${t.sourcePath}"`}}return{action:n,summary:`Error: nodeId or sourcePath required for delete action`};case`clear`:{let t=await e.getStats();return await e.clear(),{action:n,deleted:t.nodeCount,summary:`Cleared graph: removed ${t.nodeCount} node(s) and ${t.edgeCount} edge(s)`}}case`detect_communities`:{let t=await e.detectCommunities(),r=Object.values(t).reduce((e,t)=>e+t.length,0);return{action:n,communities:t,summary:`Detected ${Object.keys(t).length} community/communities covering ${r} node(s)`}}case`set_community`:return!t.nodeId||!t.community?{action:n,summary:`Error: nodeId and community are required for set_community action`}:(await e.setNodeCommunity(t.nodeId,t.community),{action:n,summary:`Set community "${t.community}" on node "${t.nodeId}"`});case`trace_process`:{if(!t.nodeId||!t.label)return{action:n,summary:`Error: nodeId and label are required for trace_process action`};let r=await e.traceProcess(t.nodeId,t.label);return{action:n,process:r,summary:`Traced process "${r.label}" from node "${t.nodeId}": ${r.steps.length} step(s)`}}case`list_processes`:{let r=await e.getProcesses(t.nodeId);return{action:n,processes:r,summary:`Found ${r.length} process(es)${t.nodeId?` involving node "${t.nodeId}"`:``}`}}case`delete_process`:return t.processId?(await e.deleteProcess(t.processId),{action:n,summary:`Deleted process "${t.processId}"`}):{action:n,summary:`Error: processId is required for delete_process action`};case`depth_traverse`:{if(!t.nodeId)return{action:n,summary:`Error: nodeId is required for depth_traverse action`};let r=await e.depthGroupedTraversal(t.nodeId,t.maxDepth,{edgeType:t.edgeType,direction:t.direction,limit:t.limit}),i=Object.values(r).reduce((e,t)=>e+t.length,0);return{action:n,depthGroups:r,summary:`Depth-grouped traversal from "${t.nodeId}": ${i} node(s) across ${Object.keys(r).length} depth level(s)`}}case`cohesion`:{if(!t.community)return{action:n,summary:`Error: community is required for cohesion action`};let r=await e.getCohesionScore(t.community);return{action:n,cohesionScore:r,summary:`Community "${t.community}" cohesion score: ${(r*100).toFixed(1)}%`}}case`symbol360`:{if(!t.nodeId)return{action:n,summary:`Error: nodeId is required for symbol360 action`};let r=await e.getSymbol360(t.nodeId);return{action:n,symbol360:r,nodes:[r.node],edges:[...r.incoming,...r.outgoing],summary:`360° view of "${r.node.name}": ${r.incoming.length} incoming, ${r.outgoing.length} outgoing, community: ${r.community??`none`}, ${r.processes.length} process(es)`}}default:return{action:n,summary:`Unknown action: ${n}`}}}async function Si(e,t,n){let r=n?.hops??1,i=n?.maxPerHit??5,a=[];for(let o of t)try{let t=await e.findNodes({sourcePath:o.sourcePath}),s=[],c=[],l=new Set,u=new Set;for(let a of t.slice(0,i))if(!l.has(a.id)&&(l.add(a.id),s.push(a),r>0)){let t=await e.traverse(a.id,{maxDepth:r,edgeType:n?.edgeType,limit:i});for(let e of t.nodes)l.has(e.id)||(l.add(e.id),s.push(e));for(let e of t.edges)u.has(e.id)||(u.add(e.id),c.push(e))}a.push({recordId:o.recordId,score:o.score,sourcePath:o.sourcePath,graphContext:{nodes:s,edges:c}})}catch{a.push({recordId:o.recordId,score:o.score,sourcePath:o.sourcePath,graphContext:{nodes:[],edges:[]}})}return a}const Ci=[{name:`onboard`,description:`First-time codebase exploration and understanding`,keywords:[`onboard`,`new project`,`understand`,`explore`,`first time`,`getting started`,`learn`,`overview`],tools:[{tool:`status`,reason:`Check index health and record count`,order:1},{tool:`onboard`,reason:`Run all analysis tools in one command`,order:2,suggestedArgs:{path:`.`}},{tool:`search`,reason:`Find specific topics of interest`,order:3,tokenTip:`Add limit:3 or min_score:0.5 to reduce results`},{tool:`graph`,reason:`Module & symbol relationship map — stats for overview, neighbors for exploration`,order:4,suggestedArgs:{action:`stats`}}]},{name:`audit`,description:`Assess project health, quality, and structure`,keywords:[`audit`,`health`,`quality`,`assess`,`review project`,`check quality`,`code quality`,`tech debt`],tools:[{tool:`status`,reason:`Check index freshness`,order:1},{tool:`audit`,reason:`Unified audit report with score and recommendations`,order:2,suggestedArgs:{detail:`efficient`},tokenTip:`detail:'efficient' for score + top issues only`},{tool:`check`,reason:`Typecheck + lint validation`,order:3,tokenTip:`detail:'efficient' for pass/fail only (~300 tokens)`},{tool:`health`,reason:`Detailed health checks on package.json, tsconfig, etc.`,order:4}]},{name:`bugfix`,description:`Diagnose and fix a bug or failing test`,keywords:[`bug`,`fix`,`debug`,`error`,`failing`,`broken`,`crash`,`wrong`,`issue`,`problem`,`not working`],tools:[{tool:`parse_output`,reason:`Parse error output from build tools (tsc, vitest, biome)`,order:1},{tool:`symbol`,reason:`Find definition and all references of the failing symbol`,order:2},{tool:`trace`,reason:`Trace call chain backward from the failure point`,order:3,suggestedArgs:{direction:`backward`}},{tool:`graph`,reason:`Understand module context — what imports the failing module`,order:4,suggestedArgs:{action:`neighbors`}},{tool:`search`,reason:`Search for related patterns or similar fixes`,order:5,tokenTip:`Add limit:3 or min_score:0.5 to reduce results`},{tool:`test_run`,reason:`Re-run tests after fix`,order:6,tokenTip:`Shows only failures by default — up to 99% savings`}]},{name:`implement`,description:`Add a new feature or implement a change`,keywords:[`implement`,`add feature`,`new feature`,`build`,`create`,`add`,`develop`,`write code`],tools:[{tool:`scope_map`,reason:`Generate a reading plan for affected files`,order:1,tokenTip:`Reading plan without reading files`},{tool:`search`,reason:`Find related patterns and prior art`,order:2,tokenTip:`Add limit:3 or min_score:0.5 to reduce results`},{tool:`find`,reason:`Find usage examples of similar patterns`,order:3,suggestedArgs:{mode:`examples`}},{tool:`graph`,reason:`Map module dependencies before adding new code`,order:4,suggestedArgs:{action:`neighbors`}},{tool:`symbol`,reason:`Find existing patterns to follow`,order:5},{tool:`trace`,reason:`Understand call chains to integrate with`,order:6},{tool:`lane`,reason:`Declare write-intent lease for coordination in multi-agent scenarios`,order:7,suggestedArgs:{action:`lease`}},{tool:`check`,reason:`Validate after implementation`,order:8,tokenTip:`detail:'efficient' for pass/fail only (~300 tokens)`},{tool:`test_run`,reason:`Run tests to verify`,order:9,tokenTip:`Shows only failures by default — up to 99% savings`},{tool:`blast_radius`,reason:`Check impact of changes`,order:10,tokenTip:`Shows only affected files, not full content`}]},{name:`coordinate`,description:`Multi-agent coordination — declare intent, detect conflicts, manage parallel work`,keywords:[`coordinate`,`multi-agent`,`parallel`,`concurrent`,`collision`,`conflict`,`lease`,`dispatch`,`lock`,`intent`,`queue`,`dag`,`dependency`,`wave`,`task order`],tools:[{tool:`queue`,reason:`Create a task queue and push items with dependsOn for DAG ordering`,order:1,suggestedArgs:{action:`push`,name:`tasks`,title:`Task A`,dependsOn:[]}},{tool:`queue`,reason:`View execution waves — topological sort of pending tasks`,order:2,suggestedArgs:{action:`dag`,name:`tasks`}},{tool:`lane`,reason:`Query active leases before dispatching agents`,order:3,suggestedArgs:{action:`leases`}},{tool:`lane`,reason:`Declare write-intent on files before modification`,order:4,suggestedArgs:{action:`lease`}},{tool:`queue`,reason:`Claim next available task (skips blocked items)`,order:5,suggestedArgs:{action:`next`,name:`tasks`}},{tool:`queue`,reason:`Mark task done — auto-unblocks dependents`,order:6,suggestedArgs:{action:`done`,name:`tasks`}},{tool:`lane`,reason:`Release lease after agent completes work`,order:7,suggestedArgs:{action:`unlease`}},{tool:`lane`,reason:`Create isolated copy for parallel exploration`,order:8,suggestedArgs:{action:`create`}},{tool:`blast_radius`,reason:`Assess overlap between parallel tasks`,order:9,tokenTip:`Shows only affected files, not full content`}]},{name:`refactor`,description:`Restructure or clean up existing code`,keywords:[`refactor`,`restructure`,`clean up`,`reorganize`,`rename`,`move`,`extract`,`DRY`,`dead code`],tools:[{tool:`dead_symbols`,reason:`Find unused exports to remove`,order:1},{tool:`graph`,reason:`Map module dependency graph before restructuring`,order:2,suggestedArgs:{action:`neighbors`}},{tool:`trace`,reason:`Understand call chains affected by refactoring`,order:3},{tool:`symbol`,reason:`Find all references before renaming`,order:4},{tool:`blast_radius`,reason:`Assess impact before making changes`,order:5,tokenTip:`Shows only affected files, not full content`},{tool:`rename`,reason:`Safe cross-file rename`,order:6},{tool:`check`,reason:`Validate after refactoring`,order:7,tokenTip:`detail:'efficient' for pass/fail only (~300 tokens)`},{tool:`test_run`,reason:`Ensure no regressions`,order:8,tokenTip:`Shows only failures by default — up to 99% savings`}]},{name:`search`,description:`Find specific code, patterns, or information`,keywords:[`find`,`search`,`where`,`locate`,`look for`,`grep`,`which file`,`how does`],tools:[{tool:`search`,reason:`Hybrid semantic + keyword search`,order:1,tokenTip:`Add limit:3 or min_score:0.5 to reduce results`},{tool:`find`,reason:`Federated search with glob and regex`,order:2},{tool:`symbol`,reason:`Resolve a specific symbol definition and references`,order:3},{tool:`graph`,reason:`Explore cross-module import relationships and connected symbols`,order:4,suggestedArgs:{action:`neighbors`}}]},{name:`code-navigation`,description:`Understand code structure, module relationships, and cross-package dependencies`,keywords:[`navigate`,`understand`,`module`,`import`,`dependency`,`relationship`,`call chain`,`who calls`,`who uses`,`connected`,`cross-package`],tools:[{tool:`graph`,reason:`Module import graph — see who imports whom across packages`,order:1,suggestedArgs:{action:`neighbors`}},{tool:`symbol`,reason:`Resolve symbol definitions and all references`,order:2},{tool:`trace`,reason:`Follow call chains forward or backward`,order:3},{tool:`file_summary`,reason:`Structural overview of target files`,order:4,tokenTip:`10x fewer tokens than read_file`}]},{name:`context`,description:`Compress or manage context for efficient LLM interaction`,keywords:[`context`,`compress`,`summarize`,`too long`,`token`,`budget`,`reduce`,`compact`,`handoff`,`session`],tools:[{tool:`file_summary`,reason:`Quick structural overview without reading full file`,order:1,tokenTip:`10x fewer tokens than read_file`},{tool:`compact`,reason:`Compress file to relevant sections`,order:2,suggestedArgs:{segmentation:`paragraph`},tokenTip:`Server-side compression — 5-20x reduction`},{tool:`digest`,reason:`Compress multiple sources into budgeted summary`,order:3},{tool:`stratum_card`,reason:`Generate reusable context cards`,order:4},{tool:`session_digest`,reason:`Compress session activity for handoff`,order:5}]},{name:`memory`,description:`Manage persistent knowledge across sessions`,keywords:[`memory`,`remember`,`persist`,`save`,`recall`,`history`,`recover`,`diff`,`orphan`,`audit trail`,`decision`,`convention`,`session`,`checkpoint`],tools:[{tool:`list`,reason:`See all stored knowledge entries`,order:1},{tool:`search`,reason:`Search curated knowledge`,order:2,suggestedArgs:{origin:`curated`},tokenTip:`Add limit:3 or min_score:0.5 to reduce results`},{tool:`remember`,reason:`Store a new decision or pattern`,order:3},{tool:`checkpoint`,reason:`Save/restore session progress`,order:4},{tool:`stash`,reason:`Temporary key-value storage within session`,order:5},{tool:`knowledge_history`,reason:`View git-backed version history of a knowledge entry`,order:6},{tool:`knowledge_diff`,reason:`Compare versions of a knowledge entry`,order:7},{tool:`knowledge_recover`,reason:`Recover a deleted or previous version of knowledge`,order:8},{tool:`knowledge_orphaned`,reason:`Find knowledge entries with broken references`,order:9},{tool:`session_digest`,reason:`Compress session activity into a focused handoff`,order:10}]},{name:`validate`,description:`Run checks, tests, and validation`,keywords:[`validate`,`check`,`test`,`lint`,`typecheck`,`verify`,`CI`,`pass`,`run tests`],tools:[{tool:`check`,reason:`Typecheck + lint in one call`,order:1,suggestedArgs:{detail:`normal`},tokenTip:`detail:'efficient' for pass/fail only (~300 tokens)`},{tool:`test_run`,reason:`Run tests with structured output`,order:2,tokenTip:`Shows only failures by default — up to 99% savings`},{tool:`health`,reason:`Project health assessment`,order:3}]},{name:`analyze`,description:`Deep analysis of codebase structure, dependencies, or patterns`,keywords:[`analyze`,`dependency`,`structure`,`pattern`,`architecture`,`diagram`,`entry point`,`import`],tools:[{tool:`analyze_structure`,reason:`Project structure overview`,order:1},{tool:`analyze_dependencies`,reason:`Dependency graph and analysis`,order:2},{tool:`graph`,reason:`Traverse module import graph for cross-package relationships`,order:3,suggestedArgs:{action:`neighbors`}},{tool:`symbol`,reason:`Deep-dive into specific symbols`,order:4},{tool:`trace`,reason:`Follow data flow and call chains`,order:5},{tool:`analyze_patterns`,reason:`Detect code patterns and conventions`,order:6},{tool:`analyze_entry_points`,reason:`Find handlers, exports, and entry points`,order:7},{tool:`analyze_diagram`,reason:`Generate Mermaid diagrams`,order:8}]},{name:`upgrade`,description:`Update AI Kit agents, prompts, skills, and scaffold to the latest version (user-level and workspace-level)`,keywords:[`upgrade`,`update`,`version`,`scaffold`,`outdated`,`mismatch`,`deploy`,`install`,`refresh`],tools:[{tool:`status`,reason:`Check current versions and detect mismatches — auto-triggers upgrade when a version mismatch is found`,order:1},{tool:`reindex`,reason:`Refresh the index after the upgrade completes`,order:2},{tool:`produce_knowledge`,reason:`Regenerate codebase analysis with updated tooling`,order:3,suggestedArgs:{path:`.`}}]},{name:`flow`,description:`Manage development flows — structured step-by-step processes for tasks`,keywords:[`flow`,`workflow`,`step`,`process`,`pipeline`,`lifecycle`,`sequence`,`start flow`,`flow status`],tools:[{tool:`flow_list`,reason:`List all available flows (builtin + custom)`,order:1},{tool:`flow_status`,reason:`Check current active flow and step`,order:2},{tool:`flow_start`,reason:`Start a named flow`,order:3},{tool:`flow_step`,reason:`Advance to next step, skip, or redo`,order:4},{tool:`flow_read_instruction`,reason:`Read the current step instruction`,order:5},{tool:`flow_info`,reason:`Get detailed info about a specific flow`,order:6},{tool:`flow_add`,reason:`Install a new custom flow`,order:7},{tool:`flow_update`,reason:`Update an existing custom flow`,order:8},{tool:`flow_remove`,reason:`Remove a custom flow`,order:9},{tool:`flow_reset`,reason:`Clear active flow state to start over`,order:10}]},{name:`web`,description:`Fetch web pages, search the web, or make HTTP API calls`,keywords:[`web`,`fetch`,`url`,`website`,`api`,`http`,`request`,`download`,`scrape`,`browse`,`web search`,`online`],tools:[{tool:`web_search`,reason:`Search the web for information`,order:1},{tool:`web_fetch`,reason:`Fetch and extract content from URLs`,order:2},{tool:`http`,reason:`Make raw HTTP requests to APIs`,order:3}]},{name:`brainstorm`,description:`Interactive ideation and creative exploration for design decisions`,keywords:[`brainstorm`,`ideate`,`ideas`,`explore`,`creative`,`design`,`options`,`approach`,`alternatives`,`think`],tools:[{tool:`brainstorm`,reason:`Interactive structured ideation session`,order:1},{tool:`search`,reason:`Find related patterns and prior art`,order:2,suggestedArgs:{origin:`curated`},tokenTip:`Add limit:3 or min_score:0.5 to reduce results`},{tool:`remember`,reason:`Persist chosen direction as a decision`,order:3,suggestedArgs:{category:`decisions`}}]},{name:`present`,description:`Display rich visual content — dashboards, charts, tables, timelines`,keywords:[`present`,`dashboard`,`chart`,`table`,`visualize`,`display`,`show`,`render`,`report`,`timeline`],tools:[{tool:`present`,reason:`Render rich HTML dashboards, charts, and tables`,order:1},{tool:`analyze_diagram`,reason:`Generate Mermaid diagrams for architecture views`,order:2},{tool:`measure`,reason:`Collect metrics to visualize`,order:3}]},{name:`quality`,description:`FORGE quality gates — classify task complexity, ground requirements, verify evidence`,keywords:[`quality`,`forge`,`evidence`,`gate`,`classify`,`ground`,`tier`,`critical`,`verify`,`proof`],tools:[{tool:`forge_classify`,reason:`Determine task tier (Floor/Standard/Critical)`,order:1},{tool:`forge_ground`,reason:`Ground requirements with evidence criteria`,order:2},{tool:`evidence_map`,reason:`Map evidence and run quality gates`,order:3,suggestedArgs:{action:`gate`}},{tool:`check`,reason:`Typecheck + lint validation`,order:4,tokenTip:`detail:'efficient' for pass/fail only (~300 tokens)`},{tool:`test_run`,reason:`Run tests for coverage evidence`,order:5,tokenTip:`Shows only failures by default — up to 99% savings`}]},{name:`transform`,description:`Automated code transformations — codemods, renames, data transforms`,keywords:[`transform`,`codemod`,`rename`,`replace`,`migrate`,`convert`,`data transform`,`rewrite`,`bulk edit`],tools:[{tool:`rename`,reason:`Safe cross-file symbol rename`,order:1},{tool:`codemod`,reason:`Apply AST-level code transformations`,order:2},{tool:`data_transform`,reason:`Transform data between formats`,order:3},{tool:`diff_parse`,reason:`Parse and analyze diffs`,order:4},{tool:`blast_radius`,reason:`Check impact of transformations`,order:5,tokenTip:`Shows only affected files, not full content`}]},{name:`snippet`,description:`Save, search, and reuse code templates and patterns`,keywords:[`snippet`,`template`,`boilerplate`,`pattern`,`reuse`,`save code`,`save template`],tools:[{tool:`snippet`,reason:`Search saved code snippets`,order:1,suggestedArgs:{action:`search`}},{tool:`snippet`,reason:`Save a new code snippet`,order:2,suggestedArgs:{action:`save`}},{tool:`snippet`,reason:`Get a snippet by name`,order:3,suggestedArgs:{action:`get`}},{tool:`find`,reason:`Find usage examples of a pattern in the codebase`,order:4,suggestedArgs:{mode:`examples`}}]},{name:`git`,description:`Git context, changelogs, and version tracking`,keywords:[`git`,`commit`,`diff`,`changelog`,`history`,`branch`,`version`,`checkpoint history`,`audit trail`,`version history`,`release`,`changes`,`what changed`],tools:[{tool:`git_context`,reason:`Get git status, diff, and branch info`,order:1},{tool:`changelog`,reason:`Generate changelog from git history`,order:2},{tool:`blast_radius`,reason:`Assess impact of changed files`,order:3,tokenTip:`Shows only affected files, not full content`},{tool:`knowledge_history`,reason:`Git-backed audit trail of knowledge changes`,order:4},{tool:`checkpoint`,reason:`View checkpoint history and diffs (git-backed)`,order:5,suggestedArgs:{action:`history`}}]},{name:`indexing`,description:`Manage smart indexing, trickle mode, and index maintenance`,keywords:[`index`,`indexing`,`smart index`,`trickle`,`reindex`,`index mode`,`index status`,`queue`,`stale index`],tools:[{tool:`status`,reason:`Check index mode, queue size, and freshness`,order:1},{tool:`reindex`,reason:`Force a full reindex (only when smart mode cannot keep up)`,order:2,suggestedArgs:{force:!0}},{tool:`produce_knowledge`,reason:`Regenerate curated resource analysis`,order:3,suggestedArgs:{path:`.`}}]},{name:`token-efficiency`,description:`Reduce token usage across AI Kit tool calls`,keywords:[`token`,`tokens`,`efficient`,`save tokens`,`reduce tokens`,`context`,`budget`,`compress`,`compact`,`verbose`,`terse`,`token budget`,`reduce output`],tools:[{tool:`config`,reason:`Set tokenBudget to control default detail level`,order:1,suggestedArgs:{action:`update`,updates:{tokenBudget:`efficient`}},tokenTip:`Set tokenBudget once — applies to all tools automatically`},{tool:`file_summary`,reason:`Structure-only file view — 10x fewer tokens than read_file`,order:2,tokenTip:`Use instead of read_file for understanding files`},{tool:`compact`,reason:`Server-side compression with query focus`,order:3,suggestedArgs:{query:`<your focus>`},tokenTip:`5-20x token reduction vs reading full file`},{tool:`stratum_card`,reason:`Reusable T1/T2 context cards — 10-100x reduction`,order:4,tokenTip:`T1: ~100 tokens/file, T2: ~300 tokens/file`},{tool:`digest`,reason:`Compress multiple sources into token-budgeted summary`,order:5,suggestedArgs:{token_budget:2e3},tokenTip:`Replaces reading multiple full files`}]}];function wi(e,t=5,n){let r=e.toLowerCase(),i=Ci.map(e=>{let t=0;for(let n of e.keywords)r.includes(n)&&(t+=n.includes(` `)?2:1);return{workflow:e,score:t}}).filter(e=>e.score>0).sort((e,t)=>t.score-e.score),a=Ci.find(e=>e.name===`search`)??Ci[0],o=i[0]?.workflow??a,s=i.slice(1,4).map(e=>e.workflow.name).filter(e=>e!==o.name),c={workflow:o.name,description:o.description,tools:o.tools.slice(0,t),alternativeWorkflows:s};return n===`smart`&&(c.tools=c.tools.map(e=>e.tool===`reindex`?{...e,reason:`Smart indexing is active — files are indexed automatically. Use reindex({ force: true }) only if the index is severely outdated.`,suggestedArgs:{force:!0}}:e)),c}function Ti(e,t,n=.6){if(e.length<=t)return e;let r=Math.max(0,t-120),i=Math.floor(r*n),a=r-i,o=e.slice(0,i),s=o.lastIndexOf(`
59
+ `).filter(Boolean).map(e=>{let[t,n,r,i]=e.split(`|`);return{hash:t,message:n,author:r,date:i}});return{gitRoot:r,branch:a||`unknown`,status:{staged:l,modified:u,untracked:d},recentCommits:f,diff:c||void 0}}function bi(e,...t){return`${e}_${V(`sha256`).update(t.join(`|`)).digest(`hex`).slice(0,12)}`}async function xi(e,t){let{action:n}=t;switch(n){case`find_nodes`:{let r=await e.findNodes({type:t.nodeType,namePattern:t.namePattern,sourcePath:t.sourcePath,limit:t.limit});return{action:n,nodes:r,summary:`Found ${r.length} node(s)${t.nodeType?` of type "${t.nodeType}"`:``}${t.namePattern?` matching "${t.namePattern}"`:``}`}}case`find_edges`:{let r=await e.findEdges({type:t.edgeType,fromId:t.fromId,toId:t.toId,limit:t.limit});return{action:n,edges:r,summary:`Found ${r.length} edge(s)${t.edgeType?` of type "${t.edgeType}"`:``}`}}case`neighbors`:{if(!t.nodeId)return{action:n,summary:`Error: nodeId is required for neighbors action`};let r=await e.getNeighbors(t.nodeId,{edgeType:t.edgeType,direction:t.direction,limit:t.limit});return{action:n,nodes:r.nodes,edges:r.edges,summary:`Found ${r.nodes.length} neighbor(s) and ${r.edges.length} edge(s) for node "${t.nodeId}"`}}case`traverse`:{if(!t.nodeId)return{action:n,summary:`Error: nodeId is required for traverse action`};let r=await e.traverse(t.nodeId,{edgeType:t.edgeType,maxDepth:t.maxDepth,direction:t.direction,limit:t.limit});return{action:n,nodes:r.nodes,edges:r.edges,summary:`Traversed ${r.nodes.length} node(s) and ${r.edges.length} edge(s) from "${t.nodeId}" (depth=${t.maxDepth??2})`}}case`stats`:{let t=await e.getStats();return{action:n,stats:t,summary:`Graph: ${t.nodeCount} nodes, ${t.edgeCount} edges. Types: ${Object.entries(t.nodeTypes).map(([e,t])=>`${e}(${t})`).join(`, `)||`none`}`}}case`validate`:{let t=await e.validate();return{action:n,validation:t,stats:t.stats,summary:t.valid?`Graph validation passed: ${t.stats.nodeCount} nodes, ${t.stats.edgeCount} edges, ${t.orphanNodes.length} orphan node(s)`:`Graph validation found ${t.danglingEdges.length} dangling edge(s) and ${t.orphanNodes.length} orphan node(s)`}}case`add`:{let r=0,i=0;if(t.nodes&&t.nodes.length>0){let n=t.nodes.map(e=>({id:e.id??bi(`node`,e.type,e.name),type:e.type,name:e.name,properties:e.properties??{},sourceRecordId:e.sourceRecordId,sourcePath:e.sourcePath,createdAt:new Date().toISOString()}));await e.upsertNodes(n),r=n.length}if(t.edges&&t.edges.length>0){let n=t.edges.map(e=>({id:e.id??bi(`edge`,e.fromId,e.toId,e.type),fromId:e.fromId,toId:e.toId,type:e.type,weight:e.weight,properties:e.properties}));await e.upsertEdges(n),i=n.length}return{action:n,nodesAdded:r,edgesAdded:i,summary:`Added ${r} node(s) and ${i} edge(s) to the graph`}}case`delete`:if(t.nodeId)return await e.deleteNode(t.nodeId),{action:n,deleted:1,summary:`Deleted node "${t.nodeId}" and its edges`};if(t.sourcePath){let r=await e.deleteBySourcePath(t.sourcePath);return{action:n,deleted:r,summary:`Deleted ${r} node(s) from source "${t.sourcePath}"`}}return{action:n,summary:`Error: nodeId or sourcePath required for delete action`};case`clear`:{let t=await e.getStats();return await e.clear(),{action:n,deleted:t.nodeCount,summary:`Cleared graph: removed ${t.nodeCount} node(s) and ${t.edgeCount} edge(s)`}}case`detect_communities`:{let t=await e.detectCommunities(),r=Object.values(t).reduce((e,t)=>e+t.length,0);return{action:n,communities:t,summary:`Detected ${Object.keys(t).length} community/communities covering ${r} node(s)`}}case`set_community`:return!t.nodeId||!t.community?{action:n,summary:`Error: nodeId and community are required for set_community action`}:(await e.setNodeCommunity(t.nodeId,t.community),{action:n,summary:`Set community "${t.community}" on node "${t.nodeId}"`});case`trace_process`:{if(!t.nodeId||!t.label)return{action:n,summary:`Error: nodeId and label are required for trace_process action`};let r=await e.traceProcess(t.nodeId,t.label);return{action:n,process:r,summary:`Traced process "${r.label}" from node "${t.nodeId}": ${r.steps.length} step(s)`}}case`list_processes`:{let r=await e.getProcesses(t.nodeId);return{action:n,processes:r,summary:`Found ${r.length} process(es)${t.nodeId?` involving node "${t.nodeId}"`:``}`}}case`delete_process`:return t.processId?(await e.deleteProcess(t.processId),{action:n,summary:`Deleted process "${t.processId}"`}):{action:n,summary:`Error: processId is required for delete_process action`};case`depth_traverse`:{if(!t.nodeId)return{action:n,summary:`Error: nodeId is required for depth_traverse action`};let r=await e.depthGroupedTraversal(t.nodeId,t.maxDepth,{edgeType:t.edgeType,direction:t.direction,limit:t.limit}),i=Object.values(r).reduce((e,t)=>e+t.length,0);return{action:n,depthGroups:r,summary:`Depth-grouped traversal from "${t.nodeId}": ${i} node(s) across ${Object.keys(r).length} depth level(s)`}}case`cohesion`:{if(!t.community)return{action:n,summary:`Error: community is required for cohesion action`};let r=await e.getCohesionScore(t.community);return{action:n,cohesionScore:r,summary:`Community "${t.community}" cohesion score: ${(r*100).toFixed(1)}%`}}case`symbol360`:{if(!t.nodeId)return{action:n,summary:`Error: nodeId is required for symbol360 action`};let r=await e.getSymbol360(t.nodeId);return{action:n,symbol360:r,nodes:[r.node],edges:[...r.incoming,...r.outgoing],summary:`360° view of "${r.node.name}": ${r.incoming.length} incoming, ${r.outgoing.length} outgoing, community: ${r.community??`none`}, ${r.processes.length} process(es)`}}default:return{action:n,summary:`Unknown action: ${n}`}}}async function Si(e,t,n){let r=n?.hops??1,i=n?.maxPerHit??5,a=[];for(let o of t)try{let t=await e.findNodes({sourcePath:o.sourcePath}),s=[],c=[],l=new Set,u=new Set;for(let a of t.slice(0,i))if(!l.has(a.id)&&(l.add(a.id),s.push(a),r>0)){let t=await e.traverse(a.id,{maxDepth:r,edgeType:n?.edgeType,limit:i});for(let e of t.nodes)l.has(e.id)||(l.add(e.id),s.push(e));for(let e of t.edges)u.has(e.id)||(u.add(e.id),c.push(e))}a.push({recordId:o.recordId,score:o.score,sourcePath:o.sourcePath,graphContext:{nodes:s,edges:c}})}catch{a.push({recordId:o.recordId,score:o.score,sourcePath:o.sourcePath,graphContext:{nodes:[],edges:[]}})}return a}const Ci=[{name:`onboard`,description:`First-time codebase exploration and understanding`,keywords:[`onboard`,`new project`,`understand`,`explore`,`first time`,`getting started`,`learn`,`overview`],tools:[{tool:`status`,reason:`Check index health and record count`,order:1},{tool:`onboard`,reason:`Run all analysis tools in one command`,order:2,suggestedArgs:{path:`.`}},{tool:`search`,reason:`Find specific topics of interest`,order:3,tokenTip:`Add limit:3 or min_score:0.5 to reduce results`},{tool:`graph`,reason:`Module & symbol relationship map — stats for overview, neighbors for exploration`,order:4,suggestedArgs:{action:`stats`}}]},{name:`audit`,description:`Assess project health, quality, and structure`,keywords:[`audit`,`health`,`quality`,`assess`,`review project`,`check quality`,`code quality`,`tech debt`],tools:[{tool:`status`,reason:`Check index freshness`,order:1},{tool:`audit`,reason:`Unified audit report with score and recommendations`,order:2,suggestedArgs:{detail:`efficient`},tokenTip:`detail:'efficient' for score + top issues only`},{tool:`check`,reason:`Typecheck + lint validation`,order:3,tokenTip:`detail:'efficient' for pass/fail only (~300 tokens)`},{tool:`health`,reason:`Detailed health checks on package.json, tsconfig, etc.`,order:4}]},{name:`bugfix`,description:`Diagnose and fix a bug or failing test`,keywords:[`bug`,`fix`,`debug`,`error`,`failing`,`broken`,`crash`,`wrong`,`issue`,`problem`,`not working`],tools:[{tool:`parse_output`,reason:`Parse error output from build tools (tsc, vitest, biome)`,order:1},{tool:`symbol`,reason:`Find definition and all references of the failing symbol`,order:2},{tool:`trace`,reason:`Trace call chain backward from the failure point`,order:3,suggestedArgs:{direction:`backward`}},{tool:`graph`,reason:`Understand module context — what imports the failing module`,order:4,suggestedArgs:{action:`neighbors`}},{tool:`search`,reason:`Search for related patterns or similar fixes`,order:5,tokenTip:`Add limit:3 or min_score:0.5 to reduce results`},{tool:`test_run`,reason:`Re-run tests after fix`,order:6,tokenTip:`Shows only failures by default — up to 99% savings`}]},{name:`implement`,description:`Add a new feature or implement a change`,keywords:[`implement`,`add feature`,`new feature`,`build`,`create`,`add`,`develop`,`write code`],tools:[{tool:`scope_map`,reason:`Generate a reading plan for affected files`,order:1,tokenTip:`Reading plan without reading files`},{tool:`search`,reason:`Find related patterns and prior art`,order:2,tokenTip:`Add limit:3 or min_score:0.5 to reduce results`},{tool:`find`,reason:`Find usage examples of similar patterns`,order:3,suggestedArgs:{mode:`examples`}},{tool:`graph`,reason:`Map module dependencies before adding new code`,order:4,suggestedArgs:{action:`neighbors`}},{tool:`symbol`,reason:`Find existing patterns to follow`,order:5},{tool:`trace`,reason:`Understand call chains to integrate with`,order:6},{tool:`lane`,reason:`Declare write-intent lease for coordination in multi-agent scenarios`,order:7,suggestedArgs:{action:`lease`}},{tool:`check`,reason:`Validate after implementation`,order:8,tokenTip:`detail:'efficient' for pass/fail only (~300 tokens)`},{tool:`test_run`,reason:`Run tests to verify`,order:9,tokenTip:`Shows only failures by default — up to 99% savings`},{tool:`blast_radius`,reason:`Check impact of changes`,order:10,tokenTip:`Shows only affected files, not full content`}]},{name:`coordinate`,description:`Multi-agent coordination — declare intent, detect conflicts, manage parallel work`,keywords:[`coordinate`,`multi-agent`,`parallel`,`concurrent`,`collision`,`conflict`,`lease`,`dispatch`,`lock`,`intent`,`queue`,`dag`,`dependency`,`wave`,`task order`],tools:[{tool:`queue`,reason:`Create a task queue and push items with dependsOn for DAG ordering`,order:1,suggestedArgs:{action:`push`,name:`tasks`,title:`Task A`,dependsOn:[]}},{tool:`queue`,reason:`View execution waves — topological sort of pending tasks`,order:2,suggestedArgs:{action:`dag`,name:`tasks`}},{tool:`lane`,reason:`Query active leases before dispatching agents`,order:3,suggestedArgs:{action:`leases`}},{tool:`lane`,reason:`Declare write-intent on files before modification`,order:4,suggestedArgs:{action:`lease`}},{tool:`queue`,reason:`Claim next available task (skips blocked items)`,order:5,suggestedArgs:{action:`next`,name:`tasks`}},{tool:`queue`,reason:`Mark task done — auto-unblocks dependents`,order:6,suggestedArgs:{action:`done`,name:`tasks`}},{tool:`lane`,reason:`Release lease after agent completes work`,order:7,suggestedArgs:{action:`unlease`}},{tool:`lane`,reason:`Create isolated copy for parallel exploration`,order:8,suggestedArgs:{action:`create`}},{tool:`blast_radius`,reason:`Assess overlap between parallel tasks`,order:9,tokenTip:`Shows only affected files, not full content`}]},{name:`refactor`,description:`Restructure or clean up existing code`,keywords:[`refactor`,`restructure`,`clean up`,`reorganize`,`rename`,`move`,`extract`,`DRY`,`dead code`],tools:[{tool:`dead_symbols`,reason:`Find unused exports to remove`,order:1},{tool:`graph`,reason:`Map module dependency graph before restructuring`,order:2,suggestedArgs:{action:`neighbors`}},{tool:`trace`,reason:`Understand call chains affected by refactoring`,order:3},{tool:`symbol`,reason:`Find all references before renaming`,order:4},{tool:`blast_radius`,reason:`Assess impact before making changes`,order:5,tokenTip:`Shows only affected files, not full content`},{tool:`rename`,reason:`Safe cross-file rename`,order:6},{tool:`check`,reason:`Validate after refactoring`,order:7,tokenTip:`detail:'efficient' for pass/fail only (~300 tokens)`},{tool:`test_run`,reason:`Ensure no regressions`,order:8,tokenTip:`Shows only failures by default — up to 99% savings`}]},{name:`search`,description:`Find specific code, patterns, or information`,keywords:[`find`,`search`,`where`,`locate`,`look for`,`grep`,`which file`,`how does`],tools:[{tool:`search`,reason:`Hybrid semantic + keyword search`,order:1,tokenTip:`Add limit:3 or min_score:0.5 to reduce results`},{tool:`find`,reason:`Federated search with glob and regex`,order:2},{tool:`symbol`,reason:`Resolve a specific symbol definition and references`,order:3},{tool:`graph`,reason:`Explore cross-module import relationships and connected symbols`,order:4,suggestedArgs:{action:`neighbors`}}]},{name:`code-navigation`,description:`Understand code structure, module relationships, and cross-package dependencies`,keywords:[`navigate`,`understand`,`module`,`import`,`dependency`,`relationship`,`call chain`,`who calls`,`who uses`,`connected`,`cross-package`],tools:[{tool:`graph`,reason:`Module import graph — see who imports whom across packages`,order:1,suggestedArgs:{action:`neighbors`}},{tool:`symbol`,reason:`Resolve symbol definitions and all references`,order:2},{tool:`trace`,reason:`Follow call chains forward or backward`,order:3},{tool:`file_summary`,reason:`Structural overview of target files`,order:4,tokenTip:`10x fewer tokens than read_file`}]},{name:`context`,description:`Compress or manage context for efficient LLM interaction`,keywords:[`context`,`compress`,`summarize`,`too long`,`token`,`budget`,`reduce`,`compact`,`handoff`,`session`],tools:[{tool:`file_summary`,reason:`Quick structural overview without reading full file`,order:1,tokenTip:`10x fewer tokens than read_file`},{tool:`compact`,reason:`Compress file to relevant sections`,order:2,suggestedArgs:{segmentation:`paragraph`},tokenTip:`Server-side compression — 5-20x reduction`},{tool:`digest`,reason:`Compress multiple sources into budgeted summary`,order:3},{tool:`stratum_card`,reason:`Generate reusable context cards`,order:4},{tool:`session_digest`,reason:`Compress session activity for handoff`,order:5}]},{name:`memory`,description:`Manage persistent knowledge across sessions`,keywords:[`memory`,`remember`,`persist`,`save`,`recall`,`history`,`recover`,`diff`,`orphan`,`audit trail`,`decision`,`convention`,`session`,`checkpoint`],tools:[{tool:`list`,reason:`See all stored knowledge entries`,order:1},{tool:`search`,reason:`Search curated knowledge`,order:2,suggestedArgs:{origin:`curated`},tokenTip:`Add limit:3 or min_score:0.5 to reduce results`},{tool:`remember`,reason:`Store a new decision or pattern`,order:3},{tool:`checkpoint`,reason:`Save/restore session progress`,order:4},{tool:`stash`,reason:`Temporary key-value storage within session`,order:5},{tool:`knowledge_history`,reason:`View git-backed version history of a knowledge entry`,order:6},{tool:`knowledge_diff`,reason:`Compare versions of a knowledge entry`,order:7},{tool:`knowledge_recover`,reason:`Recover a deleted or previous version of knowledge`,order:8},{tool:`knowledge_orphaned`,reason:`Find knowledge entries with broken references`,order:9},{tool:`session_digest`,reason:`Compress session activity into a focused handoff`,order:10}]},{name:`validate`,description:`Run checks, tests, and validation`,keywords:[`validate`,`check`,`test`,`lint`,`typecheck`,`verify`,`CI`,`pass`,`run tests`],tools:[{tool:`check`,reason:`Typecheck + lint in one call`,order:1,suggestedArgs:{detail:`normal`},tokenTip:`detail:'efficient' for pass/fail only (~300 tokens)`},{tool:`test_run`,reason:`Run tests with structured output`,order:2,tokenTip:`Shows only failures by default — up to 99% savings`},{tool:`health`,reason:`Project health assessment`,order:3}]},{name:`analyze`,description:`Deep analysis of codebase structure, dependencies, or patterns`,keywords:[`analyze`,`dependency`,`structure`,`pattern`,`architecture`,`diagram`,`entry point`,`import`],tools:[{tool:`analyze`,reason:`Project structure overview`,order:1,suggestedArgs:{aspect:`structure`}},{tool:`analyze`,reason:`Dependency graph and analysis`,order:2,suggestedArgs:{aspect:`dependencies`}},{tool:`graph`,reason:`Traverse module import graph for cross-package relationships`,order:3,suggestedArgs:{action:`neighbors`}},{tool:`symbol`,reason:`Deep-dive into specific symbols`,order:4},{tool:`trace`,reason:`Follow data flow and call chains`,order:5},{tool:`analyze`,reason:`Detect code patterns and conventions`,order:6,suggestedArgs:{aspect:`patterns`}},{tool:`analyze`,reason:`Find handlers, exports, and entry points`,order:7,suggestedArgs:{aspect:`entry_points`}},{tool:`analyze`,reason:`Generate Mermaid diagrams`,order:8,suggestedArgs:{aspect:`diagram`}}]},{name:`upgrade`,description:`Update AI Kit agents, prompts, skills, and scaffold to the latest version (user-level and workspace-level)`,keywords:[`upgrade`,`update`,`version`,`scaffold`,`outdated`,`mismatch`,`deploy`,`install`,`refresh`],tools:[{tool:`status`,reason:`Check current versions and detect mismatches — auto-triggers upgrade when a version mismatch is found`,order:1},{tool:`reindex`,reason:`Refresh the index after the upgrade completes`,order:2},{tool:`produce_knowledge`,reason:`Regenerate codebase analysis with updated tooling`,order:3,suggestedArgs:{path:`.`}}]},{name:`flow`,description:`Manage development flows — structured step-by-step processes for tasks`,keywords:[`flow`,`workflow`,`step`,`process`,`pipeline`,`lifecycle`,`sequence`,`start flow`,`flow status`],tools:[{tool:`flow_list`,reason:`List all available flows (builtin + custom)`,order:1},{tool:`flow_status`,reason:`Check current active flow and step`,order:2},{tool:`flow_start`,reason:`Start a named flow`,order:3},{tool:`flow_step`,reason:`Advance to next step, skip, or redo`,order:4},{tool:`flow_read_instruction`,reason:`Read the current step instruction`,order:5},{tool:`flow_info`,reason:`Get detailed info about a specific flow`,order:6},{tool:`flow_add`,reason:`Install a new custom flow`,order:7},{tool:`flow_update`,reason:`Update an existing custom flow`,order:8},{tool:`flow_remove`,reason:`Remove a custom flow`,order:9},{tool:`flow_reset`,reason:`Clear active flow state to start over`,order:10}]},{name:`web`,description:`Fetch web pages, search the web, or make HTTP API calls`,keywords:[`web`,`fetch`,`url`,`website`,`api`,`http`,`request`,`download`,`scrape`,`browse`,`web search`,`online`],tools:[{tool:`web_search`,reason:`Search the web for information`,order:1},{tool:`web_fetch`,reason:`Fetch and extract content from URLs`,order:2},{tool:`http`,reason:`Make raw HTTP requests to APIs`,order:3}]},{name:`brainstorm`,description:`Interactive ideation and creative exploration for design decisions`,keywords:[`brainstorm`,`ideate`,`ideas`,`explore`,`creative`,`design`,`options`,`approach`,`alternatives`,`think`],tools:[{tool:`brainstorm`,reason:`Interactive structured ideation session`,order:1},{tool:`search`,reason:`Find related patterns and prior art`,order:2,suggestedArgs:{origin:`curated`},tokenTip:`Add limit:3 or min_score:0.5 to reduce results`},{tool:`remember`,reason:`Persist chosen direction as a decision`,order:3,suggestedArgs:{category:`decisions`}}]},{name:`present`,description:`Display rich visual content — dashboards, charts, tables, timelines`,keywords:[`present`,`dashboard`,`chart`,`table`,`visualize`,`display`,`show`,`render`,`report`,`timeline`],tools:[{tool:`present`,reason:`Render rich HTML dashboards, charts, and tables`,order:1},{tool:`analyze`,reason:`Generate Mermaid diagrams for architecture views`,order:2,suggestedArgs:{aspect:`diagram`}},{tool:`measure`,reason:`Collect metrics to visualize`,order:3}]},{name:`quality`,description:`FORGE quality gates — classify task complexity, ground requirements, verify evidence`,keywords:[`quality`,`forge`,`evidence`,`gate`,`classify`,`ground`,`tier`,`critical`,`verify`,`proof`],tools:[{tool:`forge_classify`,reason:`Determine task tier (Floor/Standard/Critical)`,order:1},{tool:`forge_ground`,reason:`Ground requirements with evidence criteria`,order:2},{tool:`evidence_map`,reason:`Map evidence and run quality gates`,order:3,suggestedArgs:{action:`gate`}},{tool:`check`,reason:`Typecheck + lint validation`,order:4,tokenTip:`detail:'efficient' for pass/fail only (~300 tokens)`},{tool:`test_run`,reason:`Run tests for coverage evidence`,order:5,tokenTip:`Shows only failures by default — up to 99% savings`}]},{name:`transform`,description:`Automated code transformations — codemods, renames, data transforms`,keywords:[`transform`,`codemod`,`rename`,`replace`,`migrate`,`convert`,`data transform`,`rewrite`,`bulk edit`],tools:[{tool:`rename`,reason:`Safe cross-file symbol rename`,order:1},{tool:`codemod`,reason:`Apply AST-level code transformations`,order:2},{tool:`data_transform`,reason:`Transform data between formats`,order:3},{tool:`diff_parse`,reason:`Parse and analyze diffs`,order:4},{tool:`blast_radius`,reason:`Check impact of transformations`,order:5,tokenTip:`Shows only affected files, not full content`}]},{name:`snippet`,description:`Save, search, and reuse code templates and patterns`,keywords:[`snippet`,`template`,`boilerplate`,`pattern`,`reuse`,`save code`,`save template`],tools:[{tool:`snippet`,reason:`Search saved code snippets`,order:1,suggestedArgs:{action:`search`}},{tool:`snippet`,reason:`Save a new code snippet`,order:2,suggestedArgs:{action:`save`}},{tool:`snippet`,reason:`Get a snippet by name`,order:3,suggestedArgs:{action:`get`}},{tool:`find`,reason:`Find usage examples of a pattern in the codebase`,order:4,suggestedArgs:{mode:`examples`}}]},{name:`git`,description:`Git context, changelogs, and version tracking`,keywords:[`git`,`commit`,`diff`,`changelog`,`history`,`branch`,`version`,`checkpoint history`,`audit trail`,`version history`,`release`,`changes`,`what changed`],tools:[{tool:`git_context`,reason:`Get git status, diff, and branch info`,order:1},{tool:`changelog`,reason:`Generate changelog from git history`,order:2},{tool:`blast_radius`,reason:`Assess impact of changed files`,order:3,tokenTip:`Shows only affected files, not full content`},{tool:`knowledge_history`,reason:`Git-backed audit trail of knowledge changes`,order:4},{tool:`checkpoint`,reason:`View checkpoint history and diffs (git-backed)`,order:5,suggestedArgs:{action:`history`}}]},{name:`indexing`,description:`Manage smart indexing, trickle mode, and index maintenance`,keywords:[`index`,`indexing`,`smart index`,`trickle`,`reindex`,`index mode`,`index status`,`queue`,`stale index`],tools:[{tool:`status`,reason:`Check index mode, queue size, and freshness`,order:1},{tool:`reindex`,reason:`Force a full reindex (only when smart mode cannot keep up)`,order:2,suggestedArgs:{force:!0}},{tool:`produce_knowledge`,reason:`Regenerate curated resource analysis`,order:3,suggestedArgs:{path:`.`}}]},{name:`token-efficiency`,description:`Reduce token usage across AI Kit tool calls`,keywords:[`token`,`tokens`,`efficient`,`save tokens`,`reduce tokens`,`context`,`budget`,`compress`,`compact`,`verbose`,`terse`,`token budget`,`reduce output`],tools:[{tool:`config`,reason:`Set tokenBudget to control default detail level`,order:1,suggestedArgs:{action:`update`,updates:{tokenBudget:`efficient`}},tokenTip:`Set tokenBudget once — applies to all tools automatically`},{tool:`file_summary`,reason:`Structure-only file view — 10x fewer tokens than read_file`,order:2,tokenTip:`Use instead of read_file for understanding files`},{tool:`compact`,reason:`Server-side compression with query focus`,order:3,suggestedArgs:{query:`<your focus>`},tokenTip:`5-20x token reduction vs reading full file`},{tool:`stratum_card`,reason:`Reusable T1/T2 context cards — 10-100x reduction`,order:4,tokenTip:`T1: ~100 tokens/file, T2: ~300 tokens/file`},{tool:`digest`,reason:`Compress multiple sources into token-budgeted summary`,order:5,suggestedArgs:{token_budget:2e3},tokenTip:`Replaces reading multiple full files`}]}];function wi(e,t=5,n){let r=e.toLowerCase(),i=Ci.map(e=>{let t=0;for(let n of e.keywords)r.includes(n)&&(t+=n.includes(` `)?2:1);return{workflow:e,score:t}}).filter(e=>e.score>0).sort((e,t)=>t.score-e.score),a=Ci.find(e=>e.name===`search`)??Ci[0],o=i[0]?.workflow??a,s=i.slice(1,4).map(e=>e.workflow.name).filter(e=>e!==o.name),c={workflow:o.name,description:o.description,tools:o.tools.slice(0,t),alternativeWorkflows:s};return n===`smart`&&(c.tools=c.tools.map(e=>e.tool===`reindex`?{...e,reason:`Smart indexing is active — files are indexed automatically. Use reindex({ force: true }) only if the index is severely outdated.`,suggestedArgs:{force:!0}}:e)),c}function Ti(e,t,n=.6){if(e.length<=t)return e;let r=Math.max(0,t-120),i=Math.floor(r*n),a=r-i,o=e.slice(0,i),s=o.lastIndexOf(`
60
60
  `),c=s>0?o.slice(0,s):o,l=e.length-a,u=e.slice(l),d=u.indexOf(`
61
61
  `),f=d>=0?u.slice(d+1):u,p=e.length-c.length-f.length,m=1,h=c.length,g=e.length-f.length;for(let t=h;t<g;t++)e.charCodeAt(t)===10&&m++;return`${c}\n\n[… ${m} lines / ${(p/1024).toFixed(1)}KB truncated — showing first ${c.split(`
62
62
  `).length} + last ${f.split(`
@@ -83,7 +83,7 @@ No cross-package dependencies detected.`;let s=new Set;for(let e of o.keys()){le
83
83
  ---
84
84
 
85
85
  `)}const ba={structure:`Project Structure`,dependencies:`Dependencies`,"entry-points":`Entry Points`,symbols:`Symbols`,patterns:`Patterns`,diagram:`C4 Container Diagram`,"code-map":`Code Map (Module Graph)`,"config-values":`Configuration Values`,"synthesis-guide":`Synthesis Guide`};function xa(e,t,n,r){let i=[`Analysis baselines for **${n}** have been generated.`];t===`generate`?i.push("Individual results are in the sibling `.md` and `.json` files in this directory.",``):i.push(`Results are stored in the AI Kit vector store.`,``);let a=r.get(`symbols`),o=r.get(`dependencies`),s=r.get(`patterns`),c=r.get(`entry-points`),l=a?.totalCount??0,u=a?.exportedCount??0,d=o?.totalImports??0,f=c?.total??0,p=(s?.patterns??[]).map(e=>e.pattern),m=p.some(e=>e.startsWith(`Spring`)),h=p.includes(`AWS CDK`)||p.includes(`CDK IaC`),g=p.includes(`Maven`),_=p.includes(`Serverless`)||f>3,v=o?.external?Object.keys(o.external):[],y=v.some(e=>[`express`,`fastify`,`next`,`react`,`vitest`,`jest`].includes(e))||d>0,b=v.some(e=>[`turbo`,`lerna`,`nx`].includes(e))||p.includes(`Monorepo`);if(i.push(`### Project Profile`,``),i.push(`- **${l} symbols** (${u} exported), **${d} imports**, **${f} entry ${f===1?`point`:`points`}**`),p.length>0&&i.push(`- **Detected**: ${p.slice(0,8).join(`, `)}`),i.push(``),l===0&&d===0&&f===0)return i.push(`> **Note:** This project appears to be empty or contains no analyzable source code.`,`> Run onboard again after adding source files.`),i.join(`
86
- `);let x=e.filter(e=>e.status===`success`),S=e.filter(e=>e.status===`failed`);i.push(`### Completed Analyses`,``);for(let e of x){let n=ba[e.name]??e.name,r=e.output.length>1e3?`${Math.round(e.output.length/1024)}KB`:`${e.output.length}B`;t===`generate`?i.push(`- ✓ [${n}](./${e.name}.md) (${r})`):i.push(`- ✓ ${n} (${r})`)}if(S.length>0){i.push(``,`### Failed Analyses`,``);for(let e of S)i.push(`- ✗ ${e.name}: ${e.error}`)}i.push(``,`### Recommended Reading Order`,``,"1. **Start with** `synthesis-guide.md` (this file) → `entry-points.md` → `patterns.md`","2. **Module graph** via `code-map.md` — cross-package call edges with function names","3. **Architecture** via `diagram.md` (C4 Container) → `dependencies.md`","4. **Browse structure** via `structure.md` for file layout","5. **API surface** via `symbols.md` — file paths + exported symbols (capped at 80KB)","6. **Reference**: `config-values.md` (config reference)",``,`> **Size guidance:** Total output is ~`);let C=x.reduce((e,t)=>e+t.output.length,0)/1024;return i[i.length-1]+=`${Math.round(C)}KB. Focus on code-map.md + entry-points.md + diagram.md (~${Math.round((x.find(e=>e.name===`code-map`)?.output.length??0)/1024+(x.find(e=>e.name===`entry-points`)?.output.length??0)/1024+(x.find(e=>e.name===`diagram`)?.output.length??0)/1024)}KB) for maximum signal-to-token ratio.`,i.push(``,`### Synthesize Knowledge`,``,"Produce the following `aikit_remember` entries:",``),i.push("1. **Architecture Summary** (category: `architecture`)"),b?(i.push(` - Package boundaries, dependency graph between packages`),i.push(` - Shared vs service-specific code`)):_?(i.push(` - Lambda functions, triggers, event flow`),i.push(` - Infrastructure patterns (queues, tables, APIs)`)):m?(i.push(` - Controller → Service → Repository layers`),i.push(` - Spring configuration and profiles`)):(i.push(` - Layer structure, dependency flow`),i.push(` - Key design decisions`)),i.push(``),i.push("2. **Domain Model** (category: `architecture`)"),i.push(` - Key entities/types and their relationships`),i.push(` - Data flow from entry points through processing`),i.push(``),i.push("3. **Conventions** (category: `conventions`)"),i.push(` - Naming patterns, file organization, testing approach`),h&&i.push(` - CDK construct patterns and stack organization`),y&&i.push(` - Build tooling, package manager, module system`),g&&i.push(` - Maven module structure, dependency management`),i.push(``,`### Using AI Kit Tools`,``,`This project has an AI Kit MCP server with tools for search, analysis, memory, and more.`,"`aikit init` has already created `.github/copilot-instructions.md` and `AGENTS.md` with the complete tool reference.","If not, run `npx @vpxa/aikit init` to generate them.",``,`**Workflow pattern — use on every task:**`,``,"```",`aikit_search({ query: "your task keywords" }) # Recall prior decisions`,`aikit_scope_map({ task: "what you are doing" }) # Get a reading plan`,`# ... do the work ...`,`aikit_check({}) # Typecheck + lint`,`aikit_test_run({}) # Run tests`,`aikit_remember({ title: "What I learned", category: "decisions" }) # Persist`,"```"),i.join(`
86
+ `);let x=e.filter(e=>e.status===`success`),S=e.filter(e=>e.status===`failed`);i.push(`### Completed Analyses`,``);for(let e of x){let n=ba[e.name]??e.name,r=e.output.length>1e3?`${Math.round(e.output.length/1024)}KB`:`${e.output.length}B`;t===`generate`?i.push(`- ✓ [${n}](./${e.name}.md) (${r})`):i.push(`- ✓ ${n} (${r})`)}if(S.length>0){i.push(``,`### Failed Analyses`,``);for(let e of S)i.push(`- ✗ ${e.name}: ${e.error}`)}i.push(``,`### Recommended Reading Order`,``,"1. **Start with** `synthesis-guide.md` (this file) → `entry-points.md` → `patterns.md`","2. **Module graph** via `code-map.md` — cross-package call edges with function names","3. **Architecture** via `diagram.md` (C4 Container) → `dependencies.md`","4. **Browse structure** via `structure.md` for file layout","5. **API surface** via `symbols.md` — file paths + exported symbols (capped at 80KB)","6. **Reference**: `config-values.md` (config reference)",``,`> **Size guidance:** Total output is ~`);let C=x.reduce((e,t)=>e+t.output.length,0)/1024;return i[i.length-1]+=`${Math.round(C)}KB. Focus on code-map.md + entry-points.md + diagram.md (~${Math.round((x.find(e=>e.name===`code-map`)?.output.length??0)/1024+(x.find(e=>e.name===`entry-points`)?.output.length??0)/1024+(x.find(e=>e.name===`diagram`)?.output.length??0)/1024)}KB) for maximum signal-to-token ratio.`,i.push(``,`### Synthesize Knowledge`,``,'Produce the following `aikit_knowledge` entries with `action: "remember"`:',``),i.push("1. **Architecture Summary** (category: `architecture`)"),b?(i.push(` - Package boundaries, dependency graph between packages`),i.push(` - Shared vs service-specific code`)):_?(i.push(` - Lambda functions, triggers, event flow`),i.push(` - Infrastructure patterns (queues, tables, APIs)`)):m?(i.push(` - Controller → Service → Repository layers`),i.push(` - Spring configuration and profiles`)):(i.push(` - Layer structure, dependency flow`),i.push(` - Key design decisions`)),i.push(``),i.push("2. **Domain Model** (category: `architecture`)"),i.push(` - Key entities/types and their relationships`),i.push(` - Data flow from entry points through processing`),i.push(``),i.push("3. **Conventions** (category: `conventions`)"),i.push(` - Naming patterns, file organization, testing approach`),h&&i.push(` - CDK construct patterns and stack organization`),y&&i.push(` - Build tooling, package manager, module system`),g&&i.push(` - Maven module structure, dependency management`),i.push(``,`### Using AI Kit Tools`,``,`This project has an AI Kit MCP server with tools for search, analysis, memory, and more.`,"`aikit init` has already created `.github/copilot-instructions.md` and `AGENTS.md` with the complete tool reference.","If not, run `npx @vpxa/aikit init` to generate them.",``,`**Workflow pattern — use on every task:**`,``,"```",`aikit_search({ query: "your task keywords" }) # Recall prior decisions`,`aikit_scope_map({ task: "what you are doing" }) # Get a reading plan`,`# ... do the work ...`,`aikit_check({}) # Typecheck + lint`,`aikit_test_run({}) # Run tests`,`aikit_knowledge({ action: "remember", title: "What I learned", category: "decisions" }) # Persist`,"```"),i.join(`
87
87
  `)}function Sa(e,t,n){let r=e.get(`dependencies`),i=e.get(`symbols`),a=e.get(`entry-points`),o=[`## Code Map: ${t}\n`];if(!r&&!i)return o.push(`No dependency or symbol data available.`),o.join(`
88
88
  `);let s=r?.reverseGraph??{},c=i?.symbols??[],l=a?.entryPoints??[],u=new Map;for(let e of c){if(!e.exported)continue;let t=e.filePath.replace(/\\/g,`/`);if(J(t))continue;let n=u.get(t);n?n.push({name:e.name,kind:e.kind}):u.set(t,[{name:e.name,kind:e.kind}])}let d=new Map;for(let[e,t]of Object.entries(s)){let n=va(e.replace(/\\/g,`/`),u),r=t.map(e=>e.replace(/\\/g,`/`)).filter(e=>!J(e));if(r.length===0)continue;let i=d.get(n);if(i)for(let e of r)i.add(e);else d.set(n,new Set(r))}let f=new Map;for(let e of l)f.set(e.filePath.replace(/\\/g,`/`),{name:e.name,trigger:e.trigger});let p=new Map,m=new Map;if(n)for(let[e,t]of n){if(J(e))continue;let n=Y(e);for(let[r,i]of t){if(J(r)||n===Y(r))continue;let t=p.get(e);t||(t=new Map,p.set(e,t)),t.set(r,i);let a=m.get(r),o={file:e,symbols:i};a?a.push(o):m.set(r,[o])}}let h=new Set;for(let e of f.keys())h.add(e);for(let e of p.keys())h.add(e);for(let e of m.keys())h.add(e);if(!n)for(let e of u.keys()){let t=d.get(e);t&&t.size>=3&&h.add(e)}let g=new Map;for(let e of h){let t=Y(e),n=g.get(t);n?n.push(e):g.set(t,[e])}let _=[...g.entries()].sort((e,t)=>e[0].localeCompare(t[0])),v=n?`AST call graph`:`import analysis`,y=n?`, ${p.size} cross-package callers`:``;o.push(`**${h.size} key modules** (${v}${y})\n`),o.push(`**Legend:** ⚡ Entry point | 📤 Exports | → Calls (outgoing) | ← Called by (incoming) | ➡ Used by (import)
89
89
  `);for(let[e,t]of _){t.sort(),o.push(`### ${e}/\n`);for(let r of t){let t=u.get(r),i=f.get(r),a=p.get(r),s=m.get(r),c=d.get(r),l=r.startsWith(`${e}/`)?r.slice(e.length+1):r;if(o.push(`**${l}**`),i&&o.push(` ⚡ Entry: \`${i.name}\`${i.trigger?` (${i.trigger})`:``}`),t&&t.length>0){let e=t.slice(0,8).map(e=>`${e.name}`).join(`, `),n=t.length>8?` (+${t.length-8})`:``;o.push(` 📤 ${e}${n}`)}if(a&&a.size>0){let t=[...a.entries()].sort((e,t)=>t[1].length-e[1].length);for(let[n,r]of t.slice(0,4)){let t=n.startsWith(`${e}/`)?n.slice(e.length+1):n;o.push(` → ${t}: ${r.slice(0,5).join(`, `)}${r.length>5?`…`:``}`)}t.length>4&&o.push(` → +${t.length-4} more targets`)}if(s&&s.length>0){for(let t of s.slice(0,4)){let n=t.file.startsWith(`${e}/`)?t.file.slice(e.length+1):t.file;o.push(` ← ${n}: ${t.symbols.slice(0,4).join(`, `)}${t.symbols.length>4?`…`:``}`)}s.length>4&&o.push(` ← +${s.length-4} more callers`)}else if(!n&&c&&c.size>0){let e=[...c].filter(e=>!J(e));e.length<=3?o.push(` ➡ Used by: ${e.join(`, `)}`):o.push(` ➡ Used by: ${e.slice(0,3).join(`, `)} (+${e.length-3} more)`)}o.push(``)}}return o.join(`
@@ -0,0 +1,8 @@
1
+ {
2
+ "serverName": "aikit",
3
+ "command": "node",
4
+ "args": [
5
+ "-e",
6
+ "const{execFileSync:x}=require('child_process');const{renameSync:m}=require('fs');const{join:j}=require('path');const d=process.platform==='win32'?j(process.env.LOCALAPPDATA||'','npm-cache','_npx'):j(require('os').homedir(),'.npm','_npx');const s={stdio:'inherit',shell:true};try{x('npx',['-y','@vpxa/aikit','serve'],s)}catch(e){try{m(d,d+'_'+Date.now())}catch{};x('npx',['-y','@vpxa/aikit','serve'],s)}"
7
+ ]
8
+ }