@vpxa/aikit 0.1.4 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/package.json +1 -1
  2. package/packages/cli/dist/aikit-init.js +1 -1
  3. package/packages/cli/dist/commands/init/constants.d.ts +1 -1
  4. package/packages/cli/dist/commands/init/constants.js +1 -1
  5. package/packages/cli/dist/commands/init/user.js +1 -1
  6. package/packages/core/dist/index.d.ts +2 -2
  7. package/packages/core/dist/index.js +1 -1
  8. package/packages/core/dist/types.d.ts +12 -2
  9. package/packages/core/dist/types.js +1 -1
  10. package/packages/indexer/dist/incremental-indexer.d.ts +16 -0
  11. package/packages/indexer/dist/incremental-indexer.js +1 -1
  12. package/packages/indexer/dist/index.d.ts +2 -1
  13. package/packages/indexer/dist/index.js +1 -1
  14. package/packages/indexer/dist/smart-index-scheduler.d.ts +36 -0
  15. package/packages/indexer/dist/smart-index-scheduler.js +1 -0
  16. package/packages/server/dist/config.d.ts +3 -1
  17. package/packages/server/dist/config.js +1 -1
  18. package/packages/server/dist/index.js +1 -1
  19. package/packages/server/dist/server.d.ts +6 -2
  20. package/packages/server/dist/server.js +2 -2
  21. package/packages/server/dist/tools/context.tools.d.ts +1 -1
  22. package/packages/server/dist/tools/context.tools.js +2 -2
  23. package/packages/server/dist/tools/graph.tool.js +7 -2
  24. package/packages/store/dist/graph-store.interface.d.ts +42 -1
  25. package/packages/store/dist/index.d.ts +2 -2
  26. package/packages/store/dist/sqlite-graph-store.d.ts +10 -1
  27. package/packages/store/dist/sqlite-graph-store.js +49 -17
  28. package/packages/tools/dist/graph-query.d.ts +14 -2
  29. package/packages/tools/dist/graph-query.js +1 -1
  30. package/packages/tools/dist/trace.d.ts +16 -2
  31. package/packages/tools/dist/trace.js +2 -2
  32. package/scaffold/definitions/bodies.mjs +24 -0
  33. package/scaffold/definitions/prompts.mjs +22 -3
  34. package/scaffold/definitions/protocols.mjs +22 -0
  35. package/scaffold/general/agents/Architect-Reviewer-Alpha.agent.md +6 -0
  36. package/scaffold/general/agents/Architect-Reviewer-Beta.agent.md +6 -0
  37. package/scaffold/general/agents/Code-Reviewer-Alpha.agent.md +6 -0
  38. package/scaffold/general/agents/Code-Reviewer-Beta.agent.md +6 -0
  39. package/scaffold/general/agents/Debugger.agent.md +6 -0
  40. package/scaffold/general/agents/Documenter.agent.md +6 -0
  41. package/scaffold/general/agents/Explorer.agent.md +6 -0
  42. package/scaffold/general/agents/Frontend.agent.md +6 -0
  43. package/scaffold/general/agents/Implementer.agent.md +6 -0
  44. package/scaffold/general/agents/Orchestrator.agent.md +69 -0
  45. package/scaffold/general/agents/Planner.agent.md +18 -0
  46. package/scaffold/general/agents/Refactor.agent.md +6 -0
  47. package/scaffold/general/agents/Researcher-Alpha.agent.md +6 -0
  48. package/scaffold/general/agents/Researcher-Beta.agent.md +6 -0
  49. package/scaffold/general/agents/Researcher-Delta.agent.md +6 -0
  50. package/scaffold/general/agents/Researcher-Gamma.agent.md +6 -0
  51. package/scaffold/general/agents/Security.agent.md +6 -0
  52. package/scaffold/general/agents/_shared/code-agent-base.md +22 -0
  53. package/scaffold/general/prompts/ask.prompt.md +1 -1
  54. package/scaffold/general/prompts/debug.prompt.md +1 -0
  55. package/scaffold/general/prompts/implement.prompt.md +1 -0
  56. package/scaffold/general/prompts/plan.prompt.md +1 -1
  57. package/scaffold/general/prompts/review.prompt.md +1 -1
  58. package/scaffold/general/skills/frontend-design/SKILL.md +225 -0
  59. package/scaffold/general/skills/react/SKILL.md +297 -0
  60. package/scaffold/general/skills/typescript/SKILL.md +393 -0
@@ -20,6 +20,8 @@ interface GraphNode {
20
20
  sourcePath?: string;
21
21
  /** ISO timestamp */
22
22
  createdAt?: string;
23
+ /** Community/cluster label from community detection */
24
+ community?: string;
23
25
  }
24
26
  /** An edge connecting two nodes */
25
27
  interface GraphEdge {
@@ -33,6 +35,8 @@ interface GraphEdge {
33
35
  type: string;
34
36
  /** Optional weight (0-1) */
35
37
  weight?: number;
38
+ /** Optional confidence score (0-1) for relationship certainty */
39
+ confidence?: number;
36
40
  /** Arbitrary properties */
37
41
  properties?: Record<string, unknown>;
38
42
  }
@@ -69,6 +73,27 @@ interface GraphValidationResult {
69
73
  }>;
70
74
  stats: GraphStats;
71
75
  }
76
+ /** Depth-grouped traversal result — nodes bucketed by distance from start */
77
+ interface DepthGroupedResult {
78
+ [depth: number]: GraphNode[];
79
+ }
80
+ /** Execution flow / process info */
81
+ interface ProcessInfo {
82
+ id: string;
83
+ entryNodeId: string;
84
+ label: string;
85
+ properties: Record<string, unknown>;
86
+ steps: string[];
87
+ createdAt?: string;
88
+ }
89
+ /** 360-degree symbol view — full context around a node */
90
+ interface Symbol360 {
91
+ node: GraphNode;
92
+ incoming: GraphEdge[];
93
+ outgoing: GraphEdge[];
94
+ community: string | null;
95
+ processes: ProcessInfo[];
96
+ }
72
97
  /** Knowledge graph store interface */
73
98
  interface IGraphStore {
74
99
  /** Initialize the store (create tables, indices) */
@@ -111,8 +136,24 @@ interface IGraphStore {
111
136
  getStats(): Promise<GraphStats>;
112
137
  /** Validate graph integrity — check for orphan nodes and dangling edges */
113
138
  validate(): Promise<GraphValidationResult>;
139
+ /** Set community label on a node */
140
+ setNodeCommunity(nodeId: string, community: string): Promise<void>;
141
+ /** Run label-propagation community detection, returns community -> nodeId[] map */
142
+ detectCommunities(): Promise<Record<string, string[]>>;
143
+ /** Trace execution flow from entry node via outgoing 'calls' edges, store as process */
144
+ traceProcess(entryNodeId: string, label: string): Promise<ProcessInfo>;
145
+ /** List processes, optionally filtered to those containing a specific node */
146
+ getProcesses(nodeId?: string): Promise<ProcessInfo[]>;
147
+ /** Delete a process and its steps */
148
+ deleteProcess(processId: string): Promise<void>;
149
+ /** Traverse with results grouped by depth: { 1: [...], 2: [...], 3: [...] } */
150
+ depthGroupedTraversal(startId: string, maxDepth?: number, options?: GraphTraversalOptions): Promise<DepthGroupedResult>;
151
+ /** Cohesion score for a community: internal edges / (internal + external edges) */
152
+ getCohesionScore(community: string): Promise<number>;
153
+ /** 360-degree view of a node: incoming, outgoing, community, processes */
154
+ getSymbol360(nodeId: string): Promise<Symbol360>;
114
155
  /** Close the store */
115
156
  close(): Promise<void>;
116
157
  }
117
158
  //#endregion
118
- export { GraphEdge, GraphNode, GraphStats, GraphTraversalOptions, GraphTraversalResult, GraphValidationResult, IGraphStore };
159
+ export { DepthGroupedResult, GraphEdge, GraphNode, GraphStats, GraphTraversalOptions, GraphTraversalResult, GraphValidationResult, IGraphStore, ProcessInfo, Symbol360 };
@@ -1,6 +1,6 @@
1
- import { GraphEdge, GraphNode, GraphStats, GraphTraversalOptions, GraphTraversalResult, GraphValidationResult, IGraphStore } from "./graph-store.interface.js";
1
+ import { DepthGroupedResult, GraphEdge, GraphNode, GraphStats, GraphTraversalOptions, GraphTraversalResult, GraphValidationResult, IGraphStore, ProcessInfo, Symbol360 } from "./graph-store.interface.js";
2
2
  import { IKnowledgeStore, SearchOptions } from "./store.interface.js";
3
3
  import { LanceStore } from "./lance-store.js";
4
4
  import { SqliteGraphStore } from "./sqlite-graph-store.js";
5
5
  import { StoreBackend, StoreConfig, createStore } from "./store-factory.js";
6
- export { type GraphEdge, type GraphNode, type GraphStats, type GraphTraversalOptions, type GraphTraversalResult, type GraphValidationResult, type IGraphStore, type IKnowledgeStore, LanceStore, type SearchOptions, SqliteGraphStore, type StoreBackend, type StoreConfig, createStore };
6
+ export { type DepthGroupedResult, type GraphEdge, type GraphNode, type GraphStats, type GraphTraversalOptions, type GraphTraversalResult, type GraphValidationResult, type IGraphStore, type IKnowledgeStore, LanceStore, type ProcessInfo, type SearchOptions, SqliteGraphStore, type StoreBackend, type StoreConfig, type Symbol360, createStore };
@@ -1,4 +1,4 @@
1
- import { GraphEdge, GraphNode, GraphStats, GraphTraversalOptions, GraphTraversalResult, GraphValidationResult, IGraphStore } from "./graph-store.interface.js";
1
+ import { DepthGroupedResult, GraphEdge, GraphNode, GraphStats, GraphTraversalOptions, GraphTraversalResult, GraphValidationResult, IGraphStore, ProcessInfo, Symbol360 } from "./graph-store.interface.js";
2
2
 
3
3
  //#region packages/store/src/sqlite-graph-store.d.ts
4
4
  declare class SqliteGraphStore implements IGraphStore {
@@ -12,6 +12,7 @@ declare class SqliteGraphStore implements IGraphStore {
12
12
  initialize(): Promise<void>;
13
13
  private configureDb;
14
14
  private createTables;
15
+ private migrateSchema;
15
16
  private ensureDb;
16
17
  private persist;
17
18
  private markDirty;
@@ -42,6 +43,14 @@ declare class SqliteGraphStore implements IGraphStore {
42
43
  clear(): Promise<void>;
43
44
  getStats(): Promise<GraphStats>;
44
45
  validate(): Promise<GraphValidationResult>;
46
+ setNodeCommunity(nodeId: string, community: string): Promise<void>;
47
+ detectCommunities(): Promise<Record<string, string[]>>;
48
+ traceProcess(entryNodeId: string, label: string): Promise<ProcessInfo>;
49
+ getProcesses(nodeId?: string): Promise<ProcessInfo[]>;
50
+ deleteProcess(processId: string): Promise<void>;
51
+ depthGroupedTraversal(startId: string, maxDepth?: number, options?: GraphTraversalOptions): Promise<DepthGroupedResult>;
52
+ getCohesionScore(community: string): Promise<number>;
53
+ getSymbol360(nodeId: string): Promise<Symbol360>;
45
54
  close(): Promise<void>;
46
55
  }
47
56
  //#endregion
@@ -1,4 +1,4 @@
1
- import{AIKIT_PATHS as e}from"../../core/dist/index.js";import{existsSync as t,mkdirSync as n,readFileSync as r,writeFileSync as i}from"node:fs";import{dirname as a,join as o}from"node:path";var s=class{db=null;dbPath;sqlFactory=null;dirty=!1;constructor(t){this.dbPath=o(t?.path??e.data,`graph.db`)}async initialize(){let e=a(this.dbPath);t(e)||n(e,{recursive:!0});let i=(await import(`sql.js`)).default,o=await i();if(this.sqlFactory=o,t(this.dbPath)){let e=r(this.dbPath);this.db=new o.Database(e)}else this.db=new o.Database;this.configureDb(this.db),this.createTables(this.db),this.persist()}configureDb(e){e.run(`PRAGMA journal_mode = WAL`),e.exec(`PRAGMA foreign_keys = ON;`)}createTables(e){e.run(`
1
+ import{AIKIT_PATHS as e}from"../../core/dist/index.js";import{existsSync as t,mkdirSync as n,readFileSync as r,writeFileSync as i}from"node:fs";import{dirname as a,join as o}from"node:path";var s=class{db=null;dbPath;sqlFactory=null;dirty=!1;constructor(t){this.dbPath=o(t?.path??e.data,`graph.db`)}async initialize(){let e=a(this.dbPath);t(e)||n(e,{recursive:!0});let i=(await import(`sql.js`)).default,o=await i();if(this.sqlFactory=o,t(this.dbPath)){let e=r(this.dbPath);this.db=new o.Database(e)}else this.db=new o.Database;this.configureDb(this.db),this.createTables(this.db),this.migrateSchema(this.db),this.persist()}configureDb(e){e.run(`PRAGMA journal_mode = WAL`),e.exec(`PRAGMA foreign_keys = ON;`)}createTables(e){e.run(`
2
2
  CREATE TABLE IF NOT EXISTS nodes (
3
3
  id TEXT PRIMARY KEY,
4
4
  type TEXT NOT NULL,
@@ -6,7 +6,8 @@ import{AIKIT_PATHS as e}from"../../core/dist/index.js";import{existsSync as t,mk
6
6
  properties TEXT NOT NULL DEFAULT '{}',
7
7
  source_record_id TEXT,
8
8
  source_path TEXT,
9
- created_at TEXT NOT NULL DEFAULT (datetime('now'))
9
+ created_at TEXT NOT NULL DEFAULT (datetime('now')),
10
+ community TEXT
10
11
  )
11
12
  `),e.run(`
12
13
  CREATE TABLE IF NOT EXISTS edges (
@@ -15,35 +16,60 @@ import{AIKIT_PATHS as e}from"../../core/dist/index.js";import{existsSync as t,mk
15
16
  to_id TEXT NOT NULL,
16
17
  type TEXT NOT NULL,
17
18
  weight REAL DEFAULT 1.0,
19
+ confidence REAL DEFAULT 1.0,
18
20
  properties TEXT NOT NULL DEFAULT '{}',
19
21
  FOREIGN KEY (from_id) REFERENCES nodes(id) ON DELETE CASCADE,
20
22
  FOREIGN KEY (to_id) REFERENCES nodes(id) ON DELETE CASCADE
21
23
  )
22
- `),e.run(`CREATE INDEX IF NOT EXISTS idx_nodes_type ON nodes(type)`),e.run(`CREATE INDEX IF NOT EXISTS idx_nodes_name ON nodes(name)`),e.run(`CREATE INDEX IF NOT EXISTS idx_nodes_source_path ON nodes(source_path)`),e.run(`CREATE INDEX IF NOT EXISTS idx_edges_from ON edges(from_id)`),e.run(`CREATE INDEX IF NOT EXISTS idx_edges_to ON edges(to_id)`),e.run(`CREATE INDEX IF NOT EXISTS idx_edges_type ON edges(type)`)}ensureDb(){if(!this.db){if(!this.sqlFactory)throw Error(`Graph store not initialized — call initialize() first`);this.db=t(this.dbPath)?new this.sqlFactory.Database(r(this.dbPath)):new this.sqlFactory.Database,this.configureDb(this.db),this.createTables(this.db)}return this.db}persist(){if(!this.db)return;let e=this.db.export();try{i(this.dbPath,Buffer.from(e))}finally{this.db.exec(`PRAGMA foreign_keys = ON;`)}this.dirty=!1}markDirty(){this.dirty=!0}flushIfDirty(){this.dirty&&this.persist()}query(e,t=[]){let n=this.ensureDb().prepare(e);n.bind(t);let r=[];try{for(;n.step();)r.push(n.getAsObject())}finally{n.free()}return r}run(e,t=[]){this.ensureDb().run(e,t)}async upsertNode(e){this.run(`INSERT INTO nodes (id, type, name, properties, source_record_id, source_path, created_at)
23
- VALUES (?, ?, ?, ?, ?, ?, ?)
24
+ `),e.run(`CREATE INDEX IF NOT EXISTS idx_nodes_type ON nodes(type)`),e.run(`CREATE INDEX IF NOT EXISTS idx_nodes_name ON nodes(name)`),e.run(`CREATE INDEX IF NOT EXISTS idx_nodes_source_path ON nodes(source_path)`),e.run(`CREATE INDEX IF NOT EXISTS idx_edges_from ON edges(from_id)`),e.run(`CREATE INDEX IF NOT EXISTS idx_edges_to ON edges(to_id)`),e.run(`CREATE INDEX IF NOT EXISTS idx_edges_type ON edges(type)`),e.run(`
25
+ CREATE TABLE IF NOT EXISTS processes (
26
+ id TEXT PRIMARY KEY,
27
+ entry_node_id TEXT NOT NULL,
28
+ label TEXT NOT NULL,
29
+ properties TEXT NOT NULL DEFAULT '{}',
30
+ created_at TEXT NOT NULL DEFAULT (datetime('now')),
31
+ FOREIGN KEY (entry_node_id) REFERENCES nodes(id) ON DELETE CASCADE
32
+ )
33
+ `),e.run(`
34
+ CREATE TABLE IF NOT EXISTS process_steps (
35
+ process_id TEXT NOT NULL,
36
+ node_id TEXT NOT NULL,
37
+ step_order INTEGER NOT NULL,
38
+ PRIMARY KEY (process_id, node_id),
39
+ FOREIGN KEY (process_id) REFERENCES processes(id) ON DELETE CASCADE,
40
+ FOREIGN KEY (node_id) REFERENCES nodes(id) ON DELETE CASCADE
41
+ )
42
+ `),e.run(`CREATE INDEX IF NOT EXISTS idx_process_steps_node ON process_steps(node_id)`)}migrateSchema(e){for(let t of[`ALTER TABLE edges ADD COLUMN confidence REAL DEFAULT 1.0`,`ALTER TABLE nodes ADD COLUMN community TEXT`])try{e.run(t)}catch{}e.run(`CREATE INDEX IF NOT EXISTS idx_nodes_community ON nodes(community)`)}ensureDb(){if(!this.db){if(!this.sqlFactory)throw Error(`Graph store not initialized — call initialize() first`);this.db=t(this.dbPath)?new this.sqlFactory.Database(r(this.dbPath)):new this.sqlFactory.Database,this.configureDb(this.db),this.createTables(this.db),this.migrateSchema(this.db)}return this.db}persist(){if(!this.db)return;let e=this.db.export();try{i(this.dbPath,Buffer.from(e))}finally{this.db.exec(`PRAGMA foreign_keys = ON;`)}this.dirty=!1}markDirty(){this.dirty=!0}flushIfDirty(){this.dirty&&this.persist()}query(e,t=[]){let n=this.ensureDb().prepare(e);n.bind(t);let r=[];try{for(;n.step();)r.push(n.getAsObject())}finally{n.free()}return r}run(e,t=[]){this.ensureDb().run(e,t)}async upsertNode(e){this.run(`INSERT INTO nodes (id, type, name, properties, source_record_id, source_path, created_at, community)
43
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
24
44
  ON CONFLICT(id) DO UPDATE SET
25
45
  type = excluded.type, name = excluded.name, properties = excluded.properties,
26
- source_record_id = excluded.source_record_id, source_path = excluded.source_path`,[e.id,e.type,e.name,JSON.stringify(e.properties),e.sourceRecordId??null,e.sourcePath??null,e.createdAt??new Date().toISOString()]),this.markDirty(),this.flushIfDirty()}async upsertEdge(e){this.run(`INSERT INTO edges (id, from_id, to_id, type, weight, properties)
27
- VALUES (?, ?, ?, ?, ?, ?)
46
+ source_record_id = excluded.source_record_id, source_path = excluded.source_path,
47
+ community = excluded.community`,[e.id,e.type,e.name,JSON.stringify(e.properties),e.sourceRecordId??null,e.sourcePath??null,e.createdAt??new Date().toISOString(),e.community??null]),this.markDirty(),this.flushIfDirty()}async upsertEdge(e){this.run(`INSERT INTO edges (id, from_id, to_id, type, weight, confidence, properties)
48
+ VALUES (?, ?, ?, ?, ?, ?, ?)
28
49
  ON CONFLICT(id) DO UPDATE SET
29
50
  from_id = excluded.from_id, to_id = excluded.to_id,
30
- type = excluded.type, weight = excluded.weight, properties = excluded.properties`,[e.id,e.fromId,e.toId,e.type,e.weight??1,JSON.stringify(e.properties??{})]),this.markDirty(),this.flushIfDirty()}async upsertNodes(e){if(e.length===0)return;let t=this.ensureDb();t.run(`BEGIN TRANSACTION`);try{for(let t of e)this.run(`INSERT INTO nodes (id, type, name, properties, source_record_id, source_path, created_at)
31
- VALUES (?, ?, ?, ?, ?, ?, ?)
51
+ type = excluded.type, weight = excluded.weight, confidence = excluded.confidence, properties = excluded.properties`,[e.id,e.fromId,e.toId,e.type,e.weight??1,e.confidence??1,JSON.stringify(e.properties??{})]),this.markDirty(),this.flushIfDirty()}async upsertNodes(e){if(e.length===0)return;let t=this.ensureDb();t.run(`BEGIN TRANSACTION`);try{for(let t of e)this.run(`INSERT INTO nodes (id, type, name, properties, source_record_id, source_path, created_at, community)
52
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
32
53
  ON CONFLICT(id) DO UPDATE SET
33
54
  type = excluded.type, name = excluded.name, properties = excluded.properties,
34
- source_record_id = excluded.source_record_id, source_path = excluded.source_path`,[t.id,t.type,t.name,JSON.stringify(t.properties),t.sourceRecordId??null,t.sourcePath??null,t.createdAt??new Date().toISOString()]);t.run(`COMMIT`)}catch(e){throw t.run(`ROLLBACK`),e}this.markDirty(),this.flushIfDirty()}async upsertEdges(e){if(e.length===0)return;let t=this.ensureDb();t.run(`PRAGMA foreign_keys = OFF`),t.run(`BEGIN TRANSACTION`);try{for(let t of e)this.run(`INSERT INTO edges (id, from_id, to_id, type, weight, properties)
35
- VALUES (?, ?, ?, ?, ?, ?)
55
+ source_record_id = excluded.source_record_id, source_path = excluded.source_path,
56
+ community = excluded.community`,[t.id,t.type,t.name,JSON.stringify(t.properties),t.sourceRecordId??null,t.sourcePath??null,t.createdAt??new Date().toISOString(),t.community??null]);t.run(`COMMIT`)}catch(e){throw t.run(`ROLLBACK`),e}this.markDirty(),this.flushIfDirty()}async upsertEdges(e){if(e.length===0)return;let t=this.ensureDb();t.run(`PRAGMA foreign_keys = OFF`),t.run(`BEGIN TRANSACTION`);try{for(let t of e)this.run(`INSERT INTO edges (id, from_id, to_id, type, weight, confidence, properties)
57
+ VALUES (?, ?, ?, ?, ?, ?, ?)
36
58
  ON CONFLICT(id) DO UPDATE SET
37
59
  from_id = excluded.from_id, to_id = excluded.to_id,
38
- type = excluded.type, weight = excluded.weight, properties = excluded.properties`,[t.id,t.fromId,t.toId,t.type,t.weight??1,JSON.stringify(t.properties??{})]);t.run(`COMMIT`)}catch(e){throw t.run(`ROLLBACK`),e}finally{t.run(`PRAGMA foreign_keys = ON`)}this.markDirty(),this.flushIfDirty()}async getNode(e){let t=this.query(`SELECT * FROM nodes WHERE id = ?`,[e]);return t.length>0?l(t[0]):null}async getNeighbors(e,t){let n=t?.direction??`both`,r=t?.edgeType,i=t?.limit??50,a=[],o=[],s=new Set;if(n===`outgoing`||n===`both`){let t=`
39
- SELECT e.id AS edge_id, e.from_id, e.to_id, e.type AS edge_type, e.weight, e.properties AS edge_props,
60
+ type = excluded.type, weight = excluded.weight, confidence = excluded.confidence, properties = excluded.properties`,[t.id,t.fromId,t.toId,t.type,t.weight??1,t.confidence??1,JSON.stringify(t.properties??{})]);t.run(`COMMIT`)}catch(e){throw t.run(`ROLLBACK`),e}finally{t.run(`PRAGMA foreign_keys = ON`)}this.markDirty(),this.flushIfDirty()}async getNode(e){let t=this.query(`SELECT * FROM nodes WHERE id = ?`,[e]);return t.length>0?l(t[0]):null}async getNeighbors(e,t){let n=t?.direction??`both`,r=t?.edgeType,i=t?.limit??50,a=[],o=[],s=new Set;if(n===`outgoing`||n===`both`){let t=`
61
+ SELECT e.id AS edge_id, e.from_id, e.to_id, e.type AS edge_type, e.weight,
62
+ e.confidence AS edge_confidence, e.properties AS edge_props,
40
63
  n.id AS node_id, n.type AS node_type, n.name AS node_name, n.properties AS node_props,
41
- n.source_record_id AS node_src_rec, n.source_path AS node_src_path, n.created_at AS node_created
64
+ n.source_record_id AS node_src_rec, n.source_path AS node_src_path,
65
+ n.created_at AS node_created, n.community AS node_community
42
66
  FROM edges e JOIN nodes n ON e.to_id = n.id WHERE e.from_id = ?`,n=[e];r&&(t+=` AND e.type = ?`,n.push(r)),t+=` LIMIT ?`,n.push(i);let c=this.query(t,n);for(let e of c)o.push(d(e)),s.has(e.node_id)||(s.add(e.node_id),a.push(f(e)))}if(n===`incoming`||n===`both`){let t=`
43
- SELECT e.id AS edge_id, e.from_id, e.to_id, e.type AS edge_type, e.weight, e.properties AS edge_props,
67
+ SELECT e.id AS edge_id, e.from_id, e.to_id, e.type AS edge_type, e.weight,
68
+ e.confidence AS edge_confidence, e.properties AS edge_props,
44
69
  n.id AS node_id, n.type AS node_type, n.name AS node_name, n.properties AS node_props,
45
- n.source_record_id AS node_src_rec, n.source_path AS node_src_path, n.created_at AS node_created
46
- FROM edges e JOIN nodes n ON e.from_id = n.id WHERE e.to_id = ?`,n=[e];r&&(t+=` AND e.type = ?`,n.push(r)),t+=` LIMIT ?`,n.push(i);let c=this.query(t,n);for(let e of c)o.push(d(e)),s.has(e.node_id)||(s.add(e.node_id),a.push(f(e)))}return{nodes:a,edges:o}}async traverse(e,t){let n=t?.maxDepth??2,r=t?.direction??`both`,i=t?.edgeType,a=t?.limit??50,o=new Map,s=new Map,c=new Set,l=[{nodeId:e,depth:0}];for(;l.length>0&&o.size<a;){let e=l.shift();if(!e||c.has(e.nodeId)||e.depth>n)continue;c.add(e.nodeId);let t=await this.getNeighbors(e.nodeId,{direction:r,edgeType:i,limit:a-o.size});for(let r of t.nodes)o.has(r.id)||(o.set(r.id,r),e.depth+1<n&&l.push({nodeId:r.id,depth:e.depth+1}));for(let e of t.edges)s.set(e.id,e)}return{nodes:[...o.values()],edges:[...s.values()]}}async findNodes(e){let t=[],n=[];e.type&&(t.push(`type = ?`),n.push(e.type)),e.namePattern&&(t.push(`name LIKE ?`),n.push(`%${e.namePattern}%`)),e.sourcePath&&(t.push(`source_path = ?`),n.push(e.sourcePath));let r=t.length>0?`WHERE ${t.join(` AND `)}`:``,i=e.limit??100;return this.query(`SELECT * FROM nodes ${r} LIMIT ?`,[...n,i]).map(e=>l(e))}async findEdges(e){let t=[],n=[];e.type&&(t.push(`type = ?`),n.push(e.type)),e.fromId&&(t.push(`from_id = ?`),n.push(e.fromId)),e.toId&&(t.push(`to_id = ?`),n.push(e.toId));let r=t.length>0?`WHERE ${t.join(` AND `)}`:``,i=e.limit??100;return this.query(`SELECT * FROM edges ${r} LIMIT ?`,[...n,i]).map(e=>u(e))}async deleteNode(e){let t=this.ensureDb();t.run(`BEGIN TRANSACTION`);try{this.run(`DELETE FROM edges WHERE from_id = ? OR to_id = ?`,[e,e]),this.run(`DELETE FROM nodes WHERE id = ?`,[e]),t.run(`COMMIT`)}catch(e){throw t.run(`ROLLBACK`),e}this.markDirty(),this.flushIfDirty()}async deleteBySourcePath(e){let t=this.query(`SELECT id FROM nodes WHERE source_path = ?`,[e]);if(t.length===0)return 0;let n=this.ensureDb();n.run(`BEGIN TRANSACTION`);try{for(let e of t)this.run(`DELETE FROM edges WHERE from_id = ? OR to_id = ?`,[e.id,e.id]);this.run(`DELETE FROM nodes WHERE source_path = ?`,[e]),n.run(`COMMIT`)}catch(e){throw n.run(`ROLLBACK`),e}return this.markDirty(),this.flushIfDirty(),t.length}async clear(){this.run(`DELETE FROM edges`),this.run(`DELETE FROM nodes`),this.markDirty(),this.flushIfDirty()}async getStats(){let e=this.query(`SELECT COUNT(*) as count FROM nodes`)[0]?.count??0,t=this.query(`SELECT COUNT(*) as count FROM edges`)[0]?.count??0,n=this.query(`SELECT type, COUNT(*) as count FROM nodes GROUP BY type`),r={};for(let e of n)r[e.type]=e.count;let i=this.query(`SELECT type, COUNT(*) as count FROM edges GROUP BY type`),a={};for(let e of i)a[e.type]=e.count;return{nodeCount:e,edgeCount:t,nodeTypes:r,edgeTypes:a}}async validate(){let e=await this.getStats(),t=this.query(`SELECT e.id AS edgeId,
70
+ n.source_record_id AS node_src_rec, n.source_path AS node_src_path,
71
+ n.created_at AS node_created, n.community AS node_community
72
+ FROM edges e JOIN nodes n ON e.from_id = n.id WHERE e.to_id = ?`,n=[e];r&&(t+=` AND e.type = ?`,n.push(r)),t+=` LIMIT ?`,n.push(i);let c=this.query(t,n);for(let e of c)o.push(d(e)),s.has(e.node_id)||(s.add(e.node_id),a.push(f(e)))}return{nodes:a,edges:o}}async traverse(e,t){let n=t?.maxDepth??2,r=t?.direction??`both`,i=t?.edgeType,a=t?.limit??50,o=new Map,s=new Map,c=new Set,l=[{nodeId:e,depth:0}];for(;l.length>0&&o.size<a;){let e=l.shift();if(!e||c.has(e.nodeId)||e.depth>n)continue;c.add(e.nodeId);let t=await this.getNeighbors(e.nodeId,{direction:r,edgeType:i,limit:a-o.size});for(let r of t.nodes)o.has(r.id)||(o.set(r.id,r),e.depth+1<n&&l.push({nodeId:r.id,depth:e.depth+1}));for(let e of t.edges)s.set(e.id,e)}return{nodes:[...o.values()],edges:[...s.values()]}}async findNodes(e){let t=[],n=[];e.type&&(t.push(`type = ?`),n.push(e.type)),e.namePattern&&(t.push(`name LIKE ?`),n.push(`%${e.namePattern}%`)),e.sourcePath&&(t.push(`source_path = ?`),n.push(e.sourcePath));let r=t.length>0?`WHERE ${t.join(` AND `)}`:``,i=e.limit??100;return this.query(`SELECT * FROM nodes ${r} LIMIT ?`,[...n,i]).map(e=>l(e))}async findEdges(e){let t=[],n=[];e.type&&(t.push(`type = ?`),n.push(e.type)),e.fromId&&(t.push(`from_id = ?`),n.push(e.fromId)),e.toId&&(t.push(`to_id = ?`),n.push(e.toId));let r=t.length>0?`WHERE ${t.join(` AND `)}`:``,i=e.limit??100;return this.query(`SELECT * FROM edges ${r} LIMIT ?`,[...n,i]).map(e=>u(e))}async deleteNode(e){let t=this.ensureDb();t.run(`BEGIN TRANSACTION`);try{this.run(`DELETE FROM edges WHERE from_id = ? OR to_id = ?`,[e,e]),this.run(`DELETE FROM nodes WHERE id = ?`,[e]),t.run(`COMMIT`)}catch(e){throw t.run(`ROLLBACK`),e}this.markDirty(),this.flushIfDirty()}async deleteBySourcePath(e){let t=this.query(`SELECT id FROM nodes WHERE source_path = ?`,[e]);if(t.length===0)return 0;let n=this.ensureDb();n.run(`BEGIN TRANSACTION`);try{for(let e of t)this.run(`DELETE FROM edges WHERE from_id = ? OR to_id = ?`,[e.id,e.id]);this.run(`DELETE FROM nodes WHERE source_path = ?`,[e]),n.run(`COMMIT`)}catch(e){throw n.run(`ROLLBACK`),e}return this.markDirty(),this.flushIfDirty(),t.length}async clear(){this.run(`DELETE FROM process_steps`),this.run(`DELETE FROM processes`),this.run(`DELETE FROM edges`),this.run(`DELETE FROM nodes`),this.markDirty(),this.flushIfDirty()}async getStats(){let e=this.query(`SELECT COUNT(*) as count FROM nodes`)[0]?.count??0,t=this.query(`SELECT COUNT(*) as count FROM edges`)[0]?.count??0,n=this.query(`SELECT type, COUNT(*) as count FROM nodes GROUP BY type`),r={};for(let e of n)r[e.type]=e.count;let i=this.query(`SELECT type, COUNT(*) as count FROM edges GROUP BY type`),a={};for(let e of i)a[e.type]=e.count;return{nodeCount:e,edgeCount:t,nodeTypes:r,edgeTypes:a}}async validate(){let e=await this.getStats(),t=this.query(`SELECT e.id AS edgeId,
47
73
  CASE
48
74
  WHEN n1.id IS NULL THEN e.from_id
49
75
  WHEN n2.id IS NULL THEN e.to_id
@@ -55,4 +81,10 @@ import{AIKIT_PATHS as e}from"../../core/dist/index.js";import{existsSync as t,mk
55
81
  FROM nodes n
56
82
  LEFT JOIN edges e1 ON n.id = e1.from_id
57
83
  LEFT JOIN edges e2 ON n.id = e2.to_id
58
- WHERE e1.id IS NULL AND e2.id IS NULL`).map(e=>e.id);return{valid:t.length===0,orphanNodes:n,danglingEdges:t,stats:e}}async close(){this.db&&=(this.flushIfDirty(),this.db.close(),null)}};function c(e){if(!e)return{};try{return JSON.parse(e)}catch{return{}}}function l(e){return{id:e.id,type:e.type,name:e.name,properties:c(e.properties),sourceRecordId:e.source_record_id??void 0,sourcePath:e.source_path??void 0,createdAt:e.created_at}}function u(e){return{id:e.id,fromId:e.from_id,toId:e.to_id,type:e.type,weight:e.weight??1,properties:c(e.properties)}}function d(e){return{id:e.edge_id,fromId:e.from_id,toId:e.to_id,type:e.edge_type,weight:e.weight??1,properties:c(e.edge_props??`{}`)}}function f(e){return{id:e.node_id,type:e.node_type,name:e.node_name,properties:c(e.node_props??`{}`),sourceRecordId:e.node_src_rec??void 0,sourcePath:e.node_src_path??void 0,createdAt:e.node_created}}export{s as SqliteGraphStore};
84
+ WHERE e1.id IS NULL AND e2.id IS NULL`).map(e=>e.id);return{valid:t.length===0,orphanNodes:n,danglingEdges:t,stats:e}}async setNodeCommunity(e,t){this.run(`UPDATE nodes SET community = ? WHERE id = ?`,[t,e]),this.markDirty(),this.flushIfDirty()}async detectCommunities(){let e=this.query(`SELECT id FROM nodes`),t=new Map;for(let n of e)t.set(n.id,n.id);for(let n=0;n<10;n++){let n=!1,r=new Map(t);for(let i of e){let e=this.query(`SELECT to_id AS neighbor_id FROM edges WHERE from_id = ?
85
+ UNION
86
+ SELECT from_id AS neighbor_id FROM edges WHERE to_id = ?`,[i.id,i.id]),a=new Map,o=t.get(i.id)??i.id;a.set(o,1);for(let n of e){let e=t.get(n.neighbor_id)??n.neighbor_id;a.set(e,(a.get(e)??0)+1)}let s=o,c=0;for(let[e,t]of a)(t>c||t===c&&e<s)&&(c=t,s=e);o!==s&&(r.set(i.id,s),n=!0)}t.clear();for(let[e,n]of r)t.set(e,n);if(!n)break}let n=this.ensureDb();n.run(`BEGIN TRANSACTION`);try{for(let[e,n]of t)this.run(`UPDATE nodes SET community = ? WHERE id = ?`,[n,e]);n.run(`COMMIT`)}catch(e){throw n.run(`ROLLBACK`),e}this.markDirty(),this.flushIfDirty();let r={};for(let[e,n]of t)r[n]||(r[n]=[]),r[n].push(e);return r}async traceProcess(e,t){let n=[],r=new Set,i=[e];for(;i.length>0;){let e=i.shift();if(!e||r.has(e))continue;r.add(e),n.push(e);let t=this.query(`SELECT to_id FROM edges WHERE from_id = ? AND type = 'calls'`,[e]);for(let e of t)r.has(e.to_id)||i.push(e.to_id)}let a=`proc_${Date.now()}_${Math.random().toString(36).slice(2,8)}`,o=new Date().toISOString(),s=this.ensureDb();s.run(`BEGIN TRANSACTION`);try{this.run(`INSERT INTO processes (id, entry_node_id, label, properties, created_at)
87
+ VALUES (?, ?, ?, '{}', ?)`,[a,e,t,o]);for(let e=0;e<n.length;e++)this.run(`INSERT INTO process_steps (process_id, node_id, step_order) VALUES (?, ?, ?)`,[a,n[e],e]);s.run(`COMMIT`)}catch(e){throw s.run(`ROLLBACK`),e}return this.markDirty(),this.flushIfDirty(),{id:a,entryNodeId:e,label:t,properties:{},steps:n,createdAt:o}}async getProcesses(e){let t;t=e?this.query(`SELECT DISTINCT p.id, p.entry_node_id, p.label, p.properties, p.created_at
88
+ FROM processes p
89
+ JOIN process_steps ps ON p.id = ps.process_id
90
+ WHERE ps.node_id = ?`,[e]):this.query(`SELECT * FROM processes`);let n=[];for(let e of t){let t=this.query(`SELECT node_id FROM process_steps WHERE process_id = ? ORDER BY step_order`,[e.id]);n.push({id:e.id,entryNodeId:e.entry_node_id,label:e.label,properties:c(e.properties),steps:t.map(e=>e.node_id),createdAt:e.created_at})}return n}async deleteProcess(e){let t=this.ensureDb();t.run(`BEGIN TRANSACTION`);try{this.run(`DELETE FROM process_steps WHERE process_id = ?`,[e]),this.run(`DELETE FROM processes WHERE id = ?`,[e]),t.run(`COMMIT`)}catch(e){throw t.run(`ROLLBACK`),e}this.markDirty(),this.flushIfDirty()}async depthGroupedTraversal(e,t=3,n){let r=n?.direction??`both`,i=n?.edgeType,a=n?.limit??100,o={},s=new Set;s.add(e);let c=[e];for(let e=1;e<=t;e++){let t=[],n=[];for(let e of c){let o=await this.getNeighbors(e,{direction:r,edgeType:i,limit:a});for(let e of o.nodes)s.has(e.id)||(s.add(e.id),t.push(e.id),n.push(e))}if(n.length>0&&(o[e]=n),c=t,c.length===0||s.size>=a)break}return o}async getCohesionScore(e){let t=this.query(`SELECT id FROM nodes WHERE community = ?`,[e]);if(t.length===0)return 0;let n=new Set(t.map(e=>e.id)),r=t.map(()=>`?`).join(`,`),i=t.map(e=>e.id),a=this.query(`SELECT from_id, to_id FROM edges WHERE from_id IN (${r}) OR to_id IN (${r})`,[...i,...i]);if(a.length===0)return 0;let o=0;for(let e of a)n.has(e.from_id)&&n.has(e.to_id)&&o++;return o/a.length}async getSymbol360(e){let t=await this.getNode(e);if(!t)throw Error(`Node '${e}' not found`);let n=await this.findEdges({toId:e}),r=await this.findEdges({fromId:e}),i=await this.getProcesses(e);return{node:t,incoming:n,outgoing:r,community:t.community??null,processes:i}}async close(){this.db&&=(this.flushIfDirty(),this.db.close(),null)}};function c(e){if(!e)return{};try{return JSON.parse(e)}catch{return{}}}function l(e){return{id:e.id,type:e.type,name:e.name,properties:c(e.properties),sourceRecordId:e.source_record_id??void 0,sourcePath:e.source_path??void 0,createdAt:e.created_at,community:e.community??void 0}}function u(e){return{id:e.id,fromId:e.from_id,toId:e.to_id,type:e.type,weight:e.weight??1,confidence:e.confidence??1,properties:c(e.properties)}}function d(e){return{id:e.edge_id,fromId:e.from_id,toId:e.to_id,type:e.edge_type,weight:e.weight??1,confidence:e.edge_confidence??1,properties:c(e.edge_props??`{}`)}}function f(e){return{id:e.node_id,type:e.node_type,name:e.node_name,properties:c(e.node_props??`{}`),sourceRecordId:e.node_src_rec??void 0,sourcePath:e.node_src_path??void 0,createdAt:e.node_created,community:e.node_community??void 0}}export{s as SqliteGraphStore};
@@ -1,9 +1,9 @@
1
- import { GraphEdge, GraphNode, GraphStats, GraphTraversalResult, GraphValidationResult, IGraphStore } from "../../store/dist/index.js";
1
+ import { DepthGroupedResult, GraphEdge, GraphNode, GraphStats, GraphTraversalResult, GraphValidationResult, IGraphStore, ProcessInfo, Symbol360 } from "../../store/dist/index.js";
2
2
 
3
3
  //#region packages/tools/src/graph-query.d.ts
4
4
  interface GraphQueryOptions {
5
5
  /** Action: query nodes, traverse from a node, get stats, or add data */
6
- action: 'find_nodes' | 'find_edges' | 'neighbors' | 'traverse' | 'stats' | 'validate' | 'add' | 'delete' | 'clear';
6
+ action: 'find_nodes' | 'find_edges' | 'neighbors' | 'traverse' | 'stats' | 'validate' | 'add' | 'delete' | 'clear' | 'detect_communities' | 'set_community' | 'trace_process' | 'list_processes' | 'delete_process' | 'depth_traverse' | 'cohesion' | 'symbol360';
7
7
  /** Node type filter (for find_nodes) */
8
8
  nodeType?: string;
9
9
  /** Name pattern (LIKE %pattern%) for find_nodes */
@@ -42,6 +42,12 @@ interface GraphQueryOptions {
42
42
  weight?: number;
43
43
  properties?: Record<string, unknown>;
44
44
  }>;
45
+ /** Community label (for set_community, cohesion) */
46
+ community?: string;
47
+ /** Process ID (for delete_process) */
48
+ processId?: string;
49
+ /** Label for process tracing */
50
+ label?: string;
45
51
  }
46
52
  interface GraphQueryResult {
47
53
  action: string;
@@ -49,6 +55,12 @@ interface GraphQueryResult {
49
55
  edges?: GraphEdge[];
50
56
  stats?: GraphStats;
51
57
  validation?: GraphValidationResult;
58
+ communities?: Record<string, string[]>;
59
+ process?: ProcessInfo;
60
+ processes?: ProcessInfo[];
61
+ depthGroups?: DepthGroupedResult;
62
+ cohesionScore?: number;
63
+ symbol360?: Symbol360;
52
64
  nodesAdded?: number;
53
65
  edgesAdded?: number;
54
66
  deleted?: number;
@@ -1 +1 @@
1
- import{createHash as e}from"node:crypto";function t(t,...n){return`${t}_${e(`sha256`).update(n.join(`|`)).digest(`hex`).slice(0,12)}`}async function n(e,n){let{action:r}=n;switch(r){case`find_nodes`:{let t=await e.findNodes({type:n.nodeType,namePattern:n.namePattern,sourcePath:n.sourcePath,limit:n.limit});return{action:r,nodes:t,summary:`Found ${t.length} node(s)${n.nodeType?` of type "${n.nodeType}"`:``}${n.namePattern?` matching "${n.namePattern}"`:``}`}}case`find_edges`:{let t=await e.findEdges({type:n.edgeType,fromId:n.fromId,toId:n.toId,limit:n.limit});return{action:r,edges:t,summary:`Found ${t.length} edge(s)${n.edgeType?` of type "${n.edgeType}"`:``}`}}case`neighbors`:{if(!n.nodeId)return{action:r,summary:`Error: nodeId is required for neighbors action`};let t=await e.getNeighbors(n.nodeId,{edgeType:n.edgeType,direction:n.direction,limit:n.limit});return{action:r,nodes:t.nodes,edges:t.edges,summary:`Found ${t.nodes.length} neighbor(s) and ${t.edges.length} edge(s) for node "${n.nodeId}"`}}case`traverse`:{if(!n.nodeId)return{action:r,summary:`Error: nodeId is required for traverse action`};let t=await e.traverse(n.nodeId,{edgeType:n.edgeType,maxDepth:n.maxDepth,direction:n.direction,limit:n.limit});return{action:r,nodes:t.nodes,edges:t.edges,summary:`Traversed ${t.nodes.length} node(s) and ${t.edges.length} edge(s) from "${n.nodeId}" (depth=${n.maxDepth??2})`}}case`stats`:{let t=await e.getStats();return{action:r,stats:t,summary:`Graph: ${t.nodeCount} nodes, ${t.edgeCount} edges. Types: ${Object.entries(t.nodeTypes).map(([e,t])=>`${e}(${t})`).join(`, `)||`none`}`}}case`validate`:{let t=await e.validate();return{action:r,validation:t,stats:t.stats,summary:t.valid?`Graph validation passed: ${t.stats.nodeCount} nodes, ${t.stats.edgeCount} edges, ${t.orphanNodes.length} orphan node(s)`:`Graph validation found ${t.danglingEdges.length} dangling edge(s) and ${t.orphanNodes.length} orphan node(s)`}}case`add`:{let i=0,a=0;if(n.nodes&&n.nodes.length>0){let r=n.nodes.map(e=>({id:e.id??t(`node`,e.type,e.name),type:e.type,name:e.name,properties:e.properties??{},sourceRecordId:e.sourceRecordId,sourcePath:e.sourcePath,createdAt:new Date().toISOString()}));await e.upsertNodes(r),i=r.length}if(n.edges&&n.edges.length>0){let r=n.edges.map(e=>({id:e.id??t(`edge`,e.fromId,e.toId,e.type),fromId:e.fromId,toId:e.toId,type:e.type,weight:e.weight,properties:e.properties}));await e.upsertEdges(r),a=r.length}return{action:r,nodesAdded:i,edgesAdded:a,summary:`Added ${i} node(s) and ${a} edge(s) to the graph`}}case`delete`:if(n.nodeId)return await e.deleteNode(n.nodeId),{action:r,deleted:1,summary:`Deleted node "${n.nodeId}" and its edges`};if(n.sourcePath){let t=await e.deleteBySourcePath(n.sourcePath);return{action:r,deleted:t,summary:`Deleted ${t} node(s) from source "${n.sourcePath}"`}}return{action:r,summary:`Error: nodeId or sourcePath required for delete action`};case`clear`:{let t=await e.getStats();return await e.clear(),{action:r,deleted:t.nodeCount,summary:`Cleared graph: removed ${t.nodeCount} node(s) and ${t.edgeCount} edge(s)`}}default:return{action:r,summary:`Unknown action: ${r}`}}}async function r(e,t,n){let r=n?.hops??1,i=n?.maxPerHit??5,a=[];for(let o of t)try{let t=await e.findNodes({sourcePath:o.sourcePath}),s=[],c=[],l=new Set,u=new Set;for(let a of t.slice(0,i))if(!l.has(a.id)&&(l.add(a.id),s.push(a),r>0)){let t=await e.traverse(a.id,{maxDepth:r,edgeType:n?.edgeType,limit:i});for(let e of t.nodes)l.has(e.id)||(l.add(e.id),s.push(e));for(let e of t.edges)u.has(e.id)||(u.add(e.id),c.push(e))}a.push({recordId:o.recordId,score:o.score,sourcePath:o.sourcePath,graphContext:{nodes:s,edges:c}})}catch{a.push({recordId:o.recordId,score:o.score,sourcePath:o.sourcePath,graphContext:{nodes:[],edges:[]}})}return a}export{r as graphAugmentSearch,n as graphQuery};
1
+ import{createHash as e}from"node:crypto";function t(t,...n){return`${t}_${e(`sha256`).update(n.join(`|`)).digest(`hex`).slice(0,12)}`}async function n(e,n){let{action:r}=n;switch(r){case`find_nodes`:{let t=await e.findNodes({type:n.nodeType,namePattern:n.namePattern,sourcePath:n.sourcePath,limit:n.limit});return{action:r,nodes:t,summary:`Found ${t.length} node(s)${n.nodeType?` of type "${n.nodeType}"`:``}${n.namePattern?` matching "${n.namePattern}"`:``}`}}case`find_edges`:{let t=await e.findEdges({type:n.edgeType,fromId:n.fromId,toId:n.toId,limit:n.limit});return{action:r,edges:t,summary:`Found ${t.length} edge(s)${n.edgeType?` of type "${n.edgeType}"`:``}`}}case`neighbors`:{if(!n.nodeId)return{action:r,summary:`Error: nodeId is required for neighbors action`};let t=await e.getNeighbors(n.nodeId,{edgeType:n.edgeType,direction:n.direction,limit:n.limit});return{action:r,nodes:t.nodes,edges:t.edges,summary:`Found ${t.nodes.length} neighbor(s) and ${t.edges.length} edge(s) for node "${n.nodeId}"`}}case`traverse`:{if(!n.nodeId)return{action:r,summary:`Error: nodeId is required for traverse action`};let t=await e.traverse(n.nodeId,{edgeType:n.edgeType,maxDepth:n.maxDepth,direction:n.direction,limit:n.limit});return{action:r,nodes:t.nodes,edges:t.edges,summary:`Traversed ${t.nodes.length} node(s) and ${t.edges.length} edge(s) from "${n.nodeId}" (depth=${n.maxDepth??2})`}}case`stats`:{let t=await e.getStats();return{action:r,stats:t,summary:`Graph: ${t.nodeCount} nodes, ${t.edgeCount} edges. Types: ${Object.entries(t.nodeTypes).map(([e,t])=>`${e}(${t})`).join(`, `)||`none`}`}}case`validate`:{let t=await e.validate();return{action:r,validation:t,stats:t.stats,summary:t.valid?`Graph validation passed: ${t.stats.nodeCount} nodes, ${t.stats.edgeCount} edges, ${t.orphanNodes.length} orphan node(s)`:`Graph validation found ${t.danglingEdges.length} dangling edge(s) and ${t.orphanNodes.length} orphan node(s)`}}case`add`:{let i=0,a=0;if(n.nodes&&n.nodes.length>0){let r=n.nodes.map(e=>({id:e.id??t(`node`,e.type,e.name),type:e.type,name:e.name,properties:e.properties??{},sourceRecordId:e.sourceRecordId,sourcePath:e.sourcePath,createdAt:new Date().toISOString()}));await e.upsertNodes(r),i=r.length}if(n.edges&&n.edges.length>0){let r=n.edges.map(e=>({id:e.id??t(`edge`,e.fromId,e.toId,e.type),fromId:e.fromId,toId:e.toId,type:e.type,weight:e.weight,properties:e.properties}));await e.upsertEdges(r),a=r.length}return{action:r,nodesAdded:i,edgesAdded:a,summary:`Added ${i} node(s) and ${a} edge(s) to the graph`}}case`delete`:if(n.nodeId)return await e.deleteNode(n.nodeId),{action:r,deleted:1,summary:`Deleted node "${n.nodeId}" and its edges`};if(n.sourcePath){let t=await e.deleteBySourcePath(n.sourcePath);return{action:r,deleted:t,summary:`Deleted ${t} node(s) from source "${n.sourcePath}"`}}return{action:r,summary:`Error: nodeId or sourcePath required for delete action`};case`clear`:{let t=await e.getStats();return await e.clear(),{action:r,deleted:t.nodeCount,summary:`Cleared graph: removed ${t.nodeCount} node(s) and ${t.edgeCount} edge(s)`}}case`detect_communities`:{let t=await e.detectCommunities(),n=Object.values(t).reduce((e,t)=>e+t.length,0);return{action:r,communities:t,summary:`Detected ${Object.keys(t).length} community/communities covering ${n} node(s)`}}case`set_community`:return!n.nodeId||!n.community?{action:r,summary:`Error: nodeId and community are required for set_community action`}:(await e.setNodeCommunity(n.nodeId,n.community),{action:r,summary:`Set community "${n.community}" on node "${n.nodeId}"`});case`trace_process`:{if(!n.nodeId||!n.label)return{action:r,summary:`Error: nodeId and label are required for trace_process action`};let t=await e.traceProcess(n.nodeId,n.label);return{action:r,process:t,summary:`Traced process "${t.label}" from node "${n.nodeId}": ${t.steps.length} step(s)`}}case`list_processes`:{let t=await e.getProcesses(n.nodeId);return{action:r,processes:t,summary:`Found ${t.length} process(es)${n.nodeId?` involving node "${n.nodeId}"`:``}`}}case`delete_process`:return n.processId?(await e.deleteProcess(n.processId),{action:r,summary:`Deleted process "${n.processId}"`}):{action:r,summary:`Error: processId is required for delete_process action`};case`depth_traverse`:{if(!n.nodeId)return{action:r,summary:`Error: nodeId is required for depth_traverse action`};let t=await e.depthGroupedTraversal(n.nodeId,n.maxDepth,{edgeType:n.edgeType,direction:n.direction,limit:n.limit}),i=Object.values(t).reduce((e,t)=>e+t.length,0);return{action:r,depthGroups:t,summary:`Depth-grouped traversal from "${n.nodeId}": ${i} node(s) across ${Object.keys(t).length} depth level(s)`}}case`cohesion`:{if(!n.community)return{action:r,summary:`Error: community is required for cohesion action`};let t=await e.getCohesionScore(n.community);return{action:r,cohesionScore:t,summary:`Community "${n.community}" cohesion score: ${(t*100).toFixed(1)}%`}}case`symbol360`:{if(!n.nodeId)return{action:r,summary:`Error: nodeId is required for symbol360 action`};let t=await e.getSymbol360(n.nodeId);return{action:r,symbol360:t,nodes:[t.node],edges:[...t.incoming,...t.outgoing],summary:`360° view of "${t.node.name}": ${t.incoming.length} incoming, ${t.outgoing.length} outgoing, community: ${t.community??`none`}, ${t.processes.length} process(es)`}}default:return{action:r,summary:`Unknown action: ${r}`}}}async function r(e,t,n){let r=n?.hops??1,i=n?.maxPerHit??5,a=[];for(let o of t)try{let t=await e.findNodes({sourcePath:o.sourcePath}),s=[],c=[],l=new Set,u=new Set;for(let a of t.slice(0,i))if(!l.has(a.id)&&(l.add(a.id),s.push(a),r>0)){let t=await e.traverse(a.id,{maxDepth:r,edgeType:n?.edgeType,limit:i});for(let e of t.nodes)l.has(e.id)||(l.add(e.id),s.push(e));for(let e of t.edges)u.has(e.id)||(u.add(e.id),c.push(e))}a.push({recordId:o.recordId,score:o.score,sourcePath:o.sourcePath,graphContext:{nodes:s,edges:c}})}catch{a.push({recordId:o.recordId,score:o.score,sourcePath:o.sourcePath,graphContext:{nodes:[],edges:[]}})}return a}export{r as graphAugmentSearch,n as graphQuery};
@@ -1,5 +1,5 @@
1
1
  import { IEmbedder } from "../../embeddings/dist/index.js";
2
- import { IKnowledgeStore } from "../../store/dist/index.js";
2
+ import { IGraphStore, IKnowledgeStore } from "../../store/dist/index.js";
3
3
 
4
4
  //#region packages/tools/src/trace.d.ts
5
5
  interface TraceOptions {
@@ -9,6 +9,18 @@ interface TraceOptions {
9
9
  direction: 'forward' | 'backward' | 'both';
10
10
  /** Max depth (default: 3) */
11
11
  maxDepth?: number;
12
+ /** Optional graph store for enriching results */
13
+ graphStore?: IGraphStore;
14
+ }
15
+ interface TraceGraphContext {
16
+ /** Module that defines the traced symbol */
17
+ definingModule?: string;
18
+ /** Community/cluster of the traced symbol */
19
+ community?: string;
20
+ /** Modules that import the file containing this symbol */
21
+ importedByModules: string[];
22
+ /** Symbols defined in the same module */
23
+ siblingSymbols: string[];
12
24
  }
13
25
  interface TraceNode {
14
26
  path: string;
@@ -23,7 +35,9 @@ interface TraceResult {
23
35
  direction: string;
24
36
  nodes: TraceNode[];
25
37
  depth: number;
38
+ /** Graph-derived context (when graph store available) */
39
+ graphContext?: TraceGraphContext;
26
40
  }
27
41
  declare function trace(embedder: IEmbedder, store: IKnowledgeStore, options: TraceOptions): Promise<TraceResult>;
28
42
  //#endregion
29
- export { TraceNode, TraceOptions, TraceResult, trace };
43
+ export { TraceGraphContext, TraceNode, TraceOptions, TraceResult, trace };
@@ -1,2 +1,2 @@
1
- import{escapeRegExp as e}from"./regex-utils.js";import{readFile as t}from"node:fs/promises";import{extname as n}from"node:path";import{SUPPORTED_EXTENSIONS as r,WasmRuntime as i,extractCalls as a}from"../../chunker/dist/index.js";async function o(t,n,r){let{start:a,direction:o,maxDepth:d=3}=r,f=[],p=new Set,m=new Set,h=!!i.get(),g=new Map,_=await t.embed(a);if((await n.search(_,{limit:10})).length===0)return{start:a,direction:o,nodes:f,depth:0};let v=[{target:a,depth:0}],y=0;for(;v.length>0;){let r=v.shift();if(!r)break;if(r.depth>=d||p.has(r.target))continue;p.add(r.target);let i=await t.embed(r.target),a=await n.search(i,{limit:20}),u=e(r.target),_=l(r.target);for(let e of a){let t=e.record.sourcePath,n=e.record.content.split(`
2
- `);if(h&&!_){let e=await s(g,t);for(let n of e)o!==`backward`&&n.callerName===r.target&&(c(m,f,{path:t,symbol:n.calleeName,line:n.line,relationship:`calls`,scope:n.callerName}),y=Math.max(y,r.depth+1),v.push({target:n.calleeName,depth:r.depth+1})),o!==`forward`&&n.calleeName===r.target&&(c(m,f,{path:t,symbol:n.callerName,line:n.line,relationship:`called-by`,scope:n.callerName}),y=Math.max(y,r.depth+1),v.push({target:n.callerName,depth:r.depth+1}))}for(let i=0;i<n.length;i+=1){let a=n[i],s=e.record.startLine+i;if(o!==`forward`&&RegExp(_?`from\\s+['"]${u}['"]`:`import\\s+.*\\b${u}\\b.*from\\s+`).test(a)){c(m,f,{path:t,symbol:r.target,line:s,relationship:`imported-by`}),y=Math.max(y,r.depth+1);let e=a.match(/from\s+['"]([^'"]+)['"]/);!_&&e&&v.push({target:e[1],depth:r.depth+1})}o!==`backward`&&(_?RegExp(`from\\s+['"]${u}['"]`).test(a)&&(c(m,f,{path:t,symbol:r.target,line:s,relationship:`imports`}),y=Math.max(y,r.depth+1)):h||RegExp(`\\b${u}\\s*\\(`).test(a)&&!/^\s*(?:export\s+)?(?:async\s+)?function\s/.test(a)&&(c(m,f,{path:t,symbol:r.target,line:s,relationship:`calls`}),y=Math.max(y,r.depth+1))),RegExp(_?`['"]${u}['"]`:`\\b${u}\\b`).test(a)&&!/^\s*import\s/.test(a)&&!/^\s*(?:export\s+)?(?:async\s+)?function\s/.test(a)&&(c(m,f,{path:t,symbol:r.target,line:s,relationship:`references`}),y=Math.max(y,r.depth+1))}}}return{start:a,direction:o,nodes:u(f),depth:y}}async function s(e,i){let o=e.get(i);if(o)return o;let s=n(i);if(!r.has(s))return e.set(i,[]),[];try{let n=await a(await t(i,`utf-8`),s,i);return e.set(i,n),n}catch{return e.set(i,[]),[]}}function c(e,t,n){let r=`${n.path}:${n.line}:${n.relationship}`;e.has(r)||(e.add(r),t.push(n))}function l(e){return/[./\\]/.test(e)}function u(e){let t=new Set;return e.filter(e=>{let n=`${e.path}:${e.line}:${e.relationship}`;return t.has(n)?!1:(t.add(n),!0)})}export{o as trace};
1
+ import{escapeRegExp as e}from"./regex-utils.js";import{readFile as t}from"node:fs/promises";import{extname as n}from"node:path";import{SUPPORTED_EXTENSIONS as r,WasmRuntime as i,extractCalls as a}from"../../chunker/dist/index.js";async function o(t,n,r){let{start:a,direction:o,maxDepth:f=3,graphStore:p}=r,m=[],h=new Set,g=new Set,_=!!i.get(),v=new Map,y=await t.embed(a);if((await n.search(y,{limit:10})).length===0)return{start:a,direction:o,nodes:m,depth:0};let b=[{target:a,depth:0}],x=0;for(;b.length>0;){let r=b.shift();if(!r)break;if(r.depth>=f||h.has(r.target))continue;h.add(r.target);let i=await t.embed(r.target),a=await n.search(i,{limit:20}),s=e(r.target),d=u(r.target);for(let e of a){let t=e.record.sourcePath,n=e.record.content.split(`
2
+ `);if(_&&!d){let e=await c(v,t);for(let n of e)o!==`backward`&&n.callerName===r.target&&(l(g,m,{path:t,symbol:n.calleeName,line:n.line,relationship:`calls`,scope:n.callerName}),x=Math.max(x,r.depth+1),b.push({target:n.calleeName,depth:r.depth+1})),o!==`forward`&&n.calleeName===r.target&&(l(g,m,{path:t,symbol:n.callerName,line:n.line,relationship:`called-by`,scope:n.callerName}),x=Math.max(x,r.depth+1),b.push({target:n.callerName,depth:r.depth+1}))}for(let i=0;i<n.length;i+=1){let a=n[i],c=e.record.startLine+i;if(o!==`forward`&&RegExp(d?`from\\s+['"]${s}['"]`:`import\\s+.*\\b${s}\\b.*from\\s+`).test(a)){l(g,m,{path:t,symbol:r.target,line:c,relationship:`imported-by`}),x=Math.max(x,r.depth+1);let e=a.match(/from\s+['"]([^'"]+)['"]/);!d&&e&&b.push({target:e[1],depth:r.depth+1})}o!==`backward`&&(d?RegExp(`from\\s+['"]${s}['"]`).test(a)&&(l(g,m,{path:t,symbol:r.target,line:c,relationship:`imports`}),x=Math.max(x,r.depth+1)):_||RegExp(`\\b${s}\\s*\\(`).test(a)&&!/^\s*(?:export\s+)?(?:async\s+)?function\s/.test(a)&&(l(g,m,{path:t,symbol:r.target,line:c,relationship:`calls`}),x=Math.max(x,r.depth+1))),RegExp(d?`['"]${s}['"]`:`\\b${s}\\b`).test(a)&&!/^\s*import\s/.test(a)&&!/^\s*(?:export\s+)?(?:async\s+)?function\s/.test(a)&&(l(g,m,{path:t,symbol:r.target,line:c,relationship:`references`}),x=Math.max(x,r.depth+1))}}}let S=p?await s(a,p):void 0;return{start:a,direction:o,nodes:d(m),depth:x,graphContext:S}}async function s(e,t){try{let n=await t.findNodes({namePattern:e,limit:10}),r=n.find(t=>t.name===e&&t.type!==`module`)??n.find(t=>t.name===e)??n[0];if(!r)return;let i={definingModule:r.sourcePath,community:r.community,importedByModules:[],siblingSymbols:[]};try{let e=await t.getSymbol360(r.id);i.community=e.community??i.community,i.definingModule=e.node.sourcePath??i.definingModule}catch{}return i.importedByModules=f((await t.getNeighbors(r.id,{direction:`incoming`,edgeType:`imports`})).nodes.map(e=>e.sourcePath??e.name)),i.definingModule&&(i.siblingSymbols=f((await t.findNodes({sourcePath:i.definingModule,limit:50})).filter(e=>e.name!==r.name&&e.type!==`module`).map(e=>`${e.type}:${e.name}`)).slice(0,15)),i.definingModule||i.community||i.importedByModules.length>0||i.siblingSymbols.length>0?i:void 0}catch{return}}async function c(e,i){let o=e.get(i);if(o)return o;let s=n(i);if(!r.has(s))return e.set(i,[]),[];try{let n=await a(await t(i,`utf-8`),s,i);return e.set(i,n),n}catch{return e.set(i,[]),[]}}function l(e,t,n){let r=`${n.path}:${n.line}:${n.relationship}`;e.has(r)||(e.add(r),t.push(n))}function u(e){return/[./\\]/.test(e)}function d(e){let t=new Set;return e.filter(e=>{let n=`${e.path}:${e.line}:${e.relationship}`;return t.has(n)?!1:(t.add(n),!0)})}function f(e){return[...new Set(e.filter(e=>e.length>0))]}export{o as trace};
@@ -111,6 +111,18 @@ Batch 2 (after batch 1):
111
111
  - Charts, tables, dependency graphs → always \`present\`
112
112
  - Short confirmations and questions → normal chat
113
113
 
114
+ ## Subagent Output Relay
115
+
116
+ When subagents complete, their visual outputs (from \`present\`) are NOT visible to the user.
117
+ **You MUST relay key findings:**
118
+
119
+ 1. After every subagent completes, extract key data from the returned text
120
+ 2. If the subagent mentions charts, tables, or visual data → re-present using \`present({ format: "html" })\`
121
+ 3. If the subagent returns structured findings → summarize and present to user
122
+ 4. **Never assume the user saw subagent output** — always relay or re-present
123
+
124
+ **Rule: Every subagent batch completion MUST be followed by a user-visible summary or presentation.**
125
+
114
126
  ## Critical Rules
115
127
 
116
128
  1. 🚫 **ZERO implementation** — never \`editFiles\`/\`createFile\` on source code. Always delegate.
@@ -212,6 +224,18 @@ At session start, check for an active flow:
212
224
  5. **Dependency Graph** — For each phase, list dependencies. Group into parallel batches
213
225
  6. **Present** — Show plan with open questions, complexity estimate, parallel batch layout
214
226
 
227
+ ## Subagent Output Relay
228
+
229
+ When subagents complete, their visual outputs (from \`present\`) are NOT visible to the user.
230
+ **You MUST relay key findings:**
231
+
232
+ 1. After every subagent completes, extract key data from the returned text
233
+ 2. If the subagent mentions charts, tables, or visual data → re-present using \`present({ format: "html" })\`
234
+ 3. If the subagent returns structured findings → summarize and present to user
235
+ 4. **Never assume the user saw subagent output** — always relay or re-present
236
+
237
+ **Rule: Every subagent batch completion MUST be followed by a user-visible summary or presentation.**
238
+
215
239
  ## Output Format
216
240
 
217
241
  \`\`\`markdown
@@ -12,7 +12,7 @@ export const PROMPTS = {
12
12
  ask: {
13
13
  description: 'Quick research question — find answer using AI Kit + web search',
14
14
  agent: 'Researcher-Alpha',
15
- tools: ['search', 'web_search', 'web_fetch', 'compact', 'file_summary', 'remember'],
15
+ tools: ['search', 'web_search', 'web_fetch', 'compact', 'file_summary', 'remember', 'present'],
16
16
  content: `## Quick Research
17
17
 
18
18
  1. **AI Kit Recall** — Search knowledge base for existing answers
@@ -36,6 +36,7 @@ export const PROMPTS = {
36
36
  'parse_output',
37
37
  'compact',
38
38
  'remember',
39
+ 'present',
39
40
  ],
40
41
  content: `## Debug Workflow
41
42
 
@@ -61,6 +62,7 @@ export const PROMPTS = {
61
62
  'blast_radius',
62
63
  'remember',
63
64
  'audit',
65
+ 'present',
64
66
  ],
65
67
  content: `## Implementation Pipeline
66
68
 
@@ -88,6 +90,7 @@ Refer to the Orchestrator agent's full instructions for the detailed workflow.`,
88
90
  'analyze_structure',
89
91
  'analyze_dependencies',
90
92
  'remember',
93
+ 'present',
91
94
  ],
92
95
  content: `## Planning Workflow
93
96
 
@@ -105,7 +108,15 @@ Refer to the Orchestrator agent's full instructions for the detailed workflow.`,
105
108
  description:
106
109
  'Collaborative design session — explore ideas, refine requirements, produce a design spec',
107
110
  agent: 'Orchestrator',
108
- tools: ['search', 'scope_map', 'file_summary', 'compact', 'remember', 'forge_classify'],
111
+ tools: [
112
+ 'search',
113
+ 'scope_map',
114
+ 'file_summary',
115
+ 'compact',
116
+ 'remember',
117
+ 'forge_classify',
118
+ 'present',
119
+ ],
109
120
  content: `## Design Session
110
121
 
111
122
  Enter Phase 0 (Design Gate) directly — the user is requesting a design session.
@@ -121,7 +132,15 @@ Enter Phase 0 (Design Gate) directly — the user is requesting a design session
121
132
  review: {
122
133
  description: 'Dual-model code + architecture review pipeline',
123
134
  agent: 'Orchestrator',
124
- tools: ['search', 'blast_radius', 'check', 'test_run', 'analyze_dependencies', 'remember'],
135
+ tools: [
136
+ 'search',
137
+ 'blast_radius',
138
+ 'check',
139
+ 'test_run',
140
+ 'analyze_dependencies',
141
+ 'remember',
142
+ 'present',
143
+ ],
125
144
  content: `## Review Pipeline
126
145
 
127
146
  ### Step 1: Scope
@@ -11,6 +11,25 @@ export const PROTOCOLS = {
11
11
  > This file contains shared protocols for all code-modifying agents (Implementer, Frontend, Refactor, Debugger). Each agent's definition file contains only its unique identity, constraints, and workflow. **Do not duplicate this content in agent files.**
12
12
 
13
13
 
14
+ ## AI Kit MCP Tool Naming Convention
15
+
16
+ All tool references in these instructions use **short names** (e.g. \`status\`, \`compact\`, \`search\`).
17
+ At runtime, these are MCP tools exposed by the AI Kit server. Depending on your IDE/client, the actual tool name will be prefixed:
18
+
19
+ | Client | Tool naming pattern | Example |
20
+ |--------|-------------------|---------|
21
+ | VS Code Copilot | \`mcp_<serverName>_<tool>\` | \`mcp_aikit_status\` |
22
+ | Claude Code | \`mcp__<serverName>__<tool>\` | \`mcp__aikit__status\` |
23
+ | Other MCP clients | \`<serverName>_<tool>\` or bare \`<tool>\` | \`aikit_status\` or \`status\` |
24
+
25
+ The server name is typically \`aikit\` or \`kb\` — check your MCP configuration.
26
+
27
+ **When these instructions say** \`status({})\` **→ call the MCP tool whose name ends with** \`_status\` **and pass** \`{}\` **as arguments.**
28
+
29
+ If tools are deferred/lazy-loaded, load them first (e.g. in VS Code Copilot: \`tool_search_tool_regex({ pattern: "aikit" })\`).
30
+
31
+ ---
32
+
14
33
  ## Invocation Mode Detection
15
34
 
16
35
  You may be invoked in two modes:
@@ -18,6 +37,9 @@ You may be invoked in two modes:
18
37
  2. **Sub-agent** (via Orchestrator) — you may have limited MCP tool access.
19
38
  The Orchestrator provides context under "## Prior AI Kit Context" in your prompt.
20
39
  If present, skip AI Kit Recall and use the provided context instead.
40
+ **Visual Output:** When running as a sub-agent, do NOT use the \`present\` tool (output won't reach the user).
41
+ Instead, include structured data (tables, findings, metrics) as formatted text in your final response.
42
+ The Orchestrator will re-present relevant content to the user.
21
43
 
22
44
  **Detection:** If your prompt contains "## Prior AI Kit Context", you are in sub-agent mode.
23
45
 
@@ -19,3 +19,9 @@ You are **not** the Code-Reviewer agent. Code-Reviewer handles correctness, test
19
19
  |-------|--------------|
20
20
  | `c4-architecture` | When reviewing architectural diagrams or boundary changes |
21
21
  | `adr-skill` | When the review involves architecture decisions — reference or create ADRs |
22
+
23
+ ## Flows
24
+
25
+ This project uses aikit's pluggable flow system. Check flow status with the `flow_status` MCP tool.
26
+ If a flow is active, follow the current step's skill instructions. Advance with `flow_step({ action: 'next' })`.
27
+ Use `flow_list` to see available flows and `flow_start` to begin one.
@@ -19,3 +19,9 @@ You are **not** the Code-Reviewer agent. Code-Reviewer handles correctness, test
19
19
  |-------|--------------|
20
20
  | `c4-architecture` | When reviewing architectural diagrams or boundary changes |
21
21
  | `adr-skill` | When the review involves architecture decisions — reference or create ADRs |
22
+
23
+ ## Flows
24
+
25
+ This project uses aikit's pluggable flow system. Check flow status with the `flow_status` MCP tool.
26
+ If a flow is active, follow the current step's skill instructions. Advance with `flow_step({ action: 'next' })`.
27
+ Use `flow_list` to see available flows and `flow_start` to begin one.
@@ -10,3 +10,9 @@ model: GPT-5.4 (copilot)
10
10
  You are **Code-Reviewer-Alpha**, the primary Code-Reviewer agent.
11
11
 
12
12
  **Read .github/agents/_shared/code-reviewer-base.md NOW** — it contains your complete workflow and guidelines. All instructions there apply to you.
13
+
14
+ ## Flows
15
+
16
+ This project uses aikit's pluggable flow system. Check flow status with the `flow_status` MCP tool.
17
+ If a flow is active, follow the current step's skill instructions. Advance with `flow_step({ action: 'next' })`.
18
+ Use `flow_list` to see available flows and `flow_start` to begin one.
@@ -10,3 +10,9 @@ model: Claude Opus 4.6 (copilot)
10
10
  You are **Code-Reviewer-Beta**, a variant of Code-Reviewer. Same responsibilities, different model perspective.
11
11
 
12
12
  **Read .github/agents/_shared/code-reviewer-base.md NOW** — it contains your complete workflow and guidelines. All instructions there apply to you.
13
+
14
+ ## Flows
15
+
16
+ This project uses aikit's pluggable flow system. Check flow status with the `flow_status` MCP tool.
17
+ If a flow is active, follow the current step's skill instructions. Advance with `flow_step({ action: 'next' })`.
18
+ Use `flow_list` to see available flows and `flow_start` to begin one.
@@ -32,3 +32,9 @@ You are the **Debugger**, expert debugger that diagnoses issues, traces errors,
32
32
  - **Test the fix** — Every fix must have a test that would have caught the bug
33
33
  - **Verify before asserting** — Don't claim a function has a certain signature without checking via `symbol`. Don't reference a config option without confirming it exists in the codebase
34
34
  - **Break debug loops** — If you apply a fix, test, and get the same error 3 times: your hypothesis is wrong. STOP, discard your current theory, re-examine the error output and trace from a different entry point. Return `ESCALATE` if a fresh approach also fails
35
+
36
+ ## Flows
37
+
38
+ This project uses aikit's pluggable flow system. Check flow status with the `flow_status` MCP tool.
39
+ If a flow is active, follow the current step's skill instructions. Advance with `flow_step({ action: 'next' })`.
40
+ Use `flow_list` to see available flows and `flow_start` to begin one.
@@ -51,3 +51,9 @@ You are the **Documenter**, documentation specialist that creates and maintains
51
51
  |-------|--------------|
52
52
  | `c4-architecture` | When documenting system architecture — generate C4 Mermaid diagrams |
53
53
  | `adr-skill` | When documenting architecture decisions — create or update ADRs |
54
+
55
+ ## Flows
56
+
57
+ This project uses aikit's pluggable flow system. Check flow status with the `flow_status` MCP tool.
58
+ If a flow is active, follow the current step's skill instructions. Advance with `flow_step({ action: 'next' })`.
59
+ Use `flow_list` to see available flows and `flow_start` to begin one.
@@ -61,3 +61,9 @@ You are the **Explorer**, rapid codebase exploration to find files, usages, depe
61
61
  - **Speed over depth** — Provide a useful map quickly, not an exhaustive analysis
62
62
  - **Read-only** — Never create, edit, or delete files
63
63
  - **Structured output** — Always return findings in the format above
64
+
65
+ ## Flows
66
+
67
+ This project uses aikit's pluggable flow system. Check flow status with the `flow_status` MCP tool.
68
+ If a flow is active, follow the current step's skill instructions. Advance with `flow_step({ action: 'next' })`.
69
+ Use `flow_list` to see available flows and `flow_start` to begin one.
@@ -27,3 +27,9 @@ You are the **Frontend**, ui/ux specialist for react, styling, responsive design
27
27
  - **Follow design system** — Use existing tokens, don't create one-off values
28
28
  - **Responsive by default** — Mobile-first, test all breakpoints
29
29
  - **Test-first** — Component tests before implementation
30
+
31
+ ## Flows
32
+
33
+ This project uses aikit's pluggable flow system. Check flow status with the `flow_status` MCP tool.
34
+ If a flow is active, follow the current step's skill instructions. Advance with `flow_step({ action: 'next' })`.
35
+ Use `flow_list` to see available flows and `flow_start` to begin one.
@@ -31,3 +31,9 @@ You are the **Implementer**, persistent implementation agent that writes code fo
31
31
  - **Run `check` after every change** — Catch errors early
32
32
  - **Loop-break** — If the same test fails 3 times with the same error after your fixes, STOP. Re-read the error from scratch, check your assumptions with `trace` or `symbol`, and try a fundamentally different approach. Do not attempt a 4th fix in the same direction
33
33
  - **Think-first for complex tasks** — If a task involves 3+ files or non-obvious logic, outline your approach before writing code. Check existing patterns with `search` first. Design, then implement
34
+
35
+ ## Flows
36
+
37
+ This project uses aikit's pluggable flow system. Check flow status with the `flow_status` MCP tool.
38
+ If a flow is active, follow the current step's skill instructions. Advance with `flow_step({ action: 'next' })`.
39
+ Use `flow_list` to see available flows and `flow_start` to begin one.