dev-mcp-server 0.0.2 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +23 -55
- package/README.md +609 -219
- package/cli.js +486 -160
- package/package.json +2 -2
- package/src/agents/BaseAgent.js +113 -0
- package/src/agents/dreamer.js +165 -0
- package/src/agents/improver.js +175 -0
- package/src/agents/specialists.js +202 -0
- package/src/agents/taskDecomposer.js +176 -0
- package/src/agents/teamCoordinator.js +153 -0
- package/src/api/routes/agents.js +172 -0
- package/src/api/routes/extras.js +115 -0
- package/src/api/routes/git.js +72 -0
- package/src/api/routes/ingest.js +60 -40
- package/src/api/routes/knowledge.js +59 -41
- package/src/api/routes/memory.js +41 -0
- package/src/api/routes/newRoutes.js +168 -0
- package/src/api/routes/pipelines.js +41 -0
- package/src/api/routes/planner.js +54 -0
- package/src/api/routes/query.js +24 -0
- package/src/api/routes/sessions.js +54 -0
- package/src/api/routes/tasks.js +67 -0
- package/src/api/routes/tools.js +85 -0
- package/src/api/routes/v5routes.js +196 -0
- package/src/api/server.js +133 -5
- package/src/context/compactor.js +151 -0
- package/src/context/contextEngineer.js +181 -0
- package/src/context/contextVisualizer.js +140 -0
- package/src/core/conversationEngine.js +231 -0
- package/src/core/indexer.js +169 -143
- package/src/core/ingester.js +141 -126
- package/src/core/queryEngine.js +286 -236
- package/src/cron/cronScheduler.js +260 -0
- package/src/dashboard/index.html +1181 -0
- package/src/lsp/symbolNavigator.js +220 -0
- package/src/memory/memoryManager.js +186 -0
- package/src/memory/teamMemory.js +111 -0
- package/src/messaging/messageBus.js +177 -0
- package/src/monitor/proactiveMonitor.js +337 -0
- package/src/pipelines/pipelineEngine.js +230 -0
- package/src/planner/plannerEngine.js +202 -0
- package/src/plugins/builtin/stats-plugin.js +29 -0
- package/src/plugins/pluginManager.js +144 -0
- package/src/prompts/promptEngineer.js +289 -0
- package/src/sessions/sessionManager.js +166 -0
- package/src/skills/skillsManager.js +263 -0
- package/src/storage/store.js +127 -105
- package/src/tasks/taskManager.js +151 -0
- package/src/tools/BashTool.js +154 -0
- package/src/tools/FileEditTool.js +280 -0
- package/src/tools/GitTool.js +212 -0
- package/src/tools/GrepTool.js +199 -0
- package/src/tools/registry.js +1380 -0
- package/src/utils/costTracker.js +69 -0
- package/src/utils/fileParser.js +176 -153
- package/src/utils/llmClient.js +355 -206
- package/src/watcher/fileWatcher.js +137 -0
- package/src/worktrees/worktreeManager.js +176 -0
|
@@ -0,0 +1,337 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* A background agent that WATCHES your codebase and ALERTS you to problems
|
|
3
|
+
* without being asked. It runs scheduled checks and surfaces findings.
|
|
4
|
+
*
|
|
5
|
+
* Checks it runs automatically:
|
|
6
|
+
* - New TODOs/FIXMEs added since last check
|
|
7
|
+
* - Security-sensitive patterns (hardcoded secrets, missing auth)
|
|
8
|
+
* - Files that changed recently (via git) but weren't re-ingested
|
|
9
|
+
* - Memory inconsistencies (facts that contradict each other)
|
|
10
|
+
* - Tasks that have been open too long
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
const fs = require('fs');
|
|
14
|
+
const path = require('path');
|
|
15
|
+
const logger = require('../utils/logger');
|
|
16
|
+
const GrepTool = require('../tools/GrepTool');
|
|
17
|
+
const BashTool = require('../tools/BashTool');
|
|
18
|
+
const { TaskManager } = require('../tasks/taskManager');
|
|
19
|
+
const { MemoryManager } = require('../memory/memoryManager');
|
|
20
|
+
const costTracker = require('../utils/costTracker');
|
|
21
|
+
|
|
22
|
+
const MONITOR_FILE = path.join(process.cwd(), 'data', 'monitor-state.json');
|
|
23
|
+
const ALERTS_FILE = path.join(process.cwd(), 'data', 'alerts.json');
|
|
24
|
+
|
|
25
|
+
// ── Alert severity levels ────────────────────────────────────────────────────
|
|
26
|
+
const SEVERITY = { INFO: 'info', WARN: 'warn', CRITICAL: 'critical' };
|
|
27
|
+
|
|
28
|
+
// ── Built-in checks ──────────────────────────────────────────────────────────
|
|
29
|
+
const BUILTIN_CHECKS = [
|
|
30
|
+
{
|
|
31
|
+
id: 'new-todos',
|
|
32
|
+
name: 'New TODOs/FIXMEs',
|
|
33
|
+
description: 'Detect new TODO/FIXME/HACK comments',
|
|
34
|
+
severity: SEVERITY.INFO,
|
|
35
|
+
intervalMinutes: 60,
|
|
36
|
+
},
|
|
37
|
+
{
|
|
38
|
+
id: 'hardcoded-secrets',
|
|
39
|
+
name: 'Hardcoded Secrets',
|
|
40
|
+
description: 'Scan for API keys, passwords, tokens in code',
|
|
41
|
+
severity: SEVERITY.CRITICAL,
|
|
42
|
+
intervalMinutes: 120,
|
|
43
|
+
},
|
|
44
|
+
{
|
|
45
|
+
id: 'stale-tasks',
|
|
46
|
+
name: 'Stale Tasks',
|
|
47
|
+
description: 'Tasks open more than 7 days without updates',
|
|
48
|
+
severity: SEVERITY.WARN,
|
|
49
|
+
intervalMinutes: 1440, // daily
|
|
50
|
+
},
|
|
51
|
+
{
|
|
52
|
+
id: 'git-drift',
|
|
53
|
+
name: 'Git Drift',
|
|
54
|
+
description: 'Files changed in git but not re-ingested',
|
|
55
|
+
severity: SEVERITY.INFO,
|
|
56
|
+
intervalMinutes: 30,
|
|
57
|
+
},
|
|
58
|
+
{
|
|
59
|
+
id: 'missing-error-handling',
|
|
60
|
+
name: 'Missing Error Handling',
|
|
61
|
+
description: 'Async functions without try/catch',
|
|
62
|
+
severity: SEVERITY.WARN,
|
|
63
|
+
intervalMinutes: 240,
|
|
64
|
+
},
|
|
65
|
+
];
|
|
66
|
+
|
|
67
|
+
class ProactiveMonitor {
|
|
68
|
+
constructor() {
|
|
69
|
+
this._isRunning = false;
|
|
70
|
+
this._intervals = new Map();
|
|
71
|
+
this._state = this._loadState();
|
|
72
|
+
this._alerts = this._loadAlerts();
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
_loadState() {
|
|
76
|
+
try { if (fs.existsSync(MONITOR_FILE)) return JSON.parse(fs.readFileSync(MONITOR_FILE, 'utf-8')); } catch { }
|
|
77
|
+
return { lastRun: {}, customChecks: [] };
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
_loadAlerts() {
|
|
81
|
+
try { if (fs.existsSync(ALERTS_FILE)) return JSON.parse(fs.readFileSync(ALERTS_FILE, 'utf-8')); } catch { }
|
|
82
|
+
return { alerts: [] };
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
_saveState() { fs.writeFileSync(MONITOR_FILE, JSON.stringify(this._state, null, 2)); }
|
|
86
|
+
_saveAlerts() { fs.writeFileSync(ALERTS_FILE, JSON.stringify(this._alerts, null, 2)); }
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Start the monitor with scheduled checks
|
|
90
|
+
*/
|
|
91
|
+
start(cwd = process.cwd()) {
|
|
92
|
+
if (this._isRunning) return;
|
|
93
|
+
this._isRunning = true;
|
|
94
|
+
this._cwd = cwd;
|
|
95
|
+
logger.info('[Monitor] 👁️ Proactive monitor started');
|
|
96
|
+
|
|
97
|
+
// Schedule each check on its own interval
|
|
98
|
+
for (const check of BUILTIN_CHECKS) {
|
|
99
|
+
const handle = setInterval(
|
|
100
|
+
() => this._runCheck(check),
|
|
101
|
+
check.intervalMinutes * 60 * 1000
|
|
102
|
+
);
|
|
103
|
+
this._intervals.set(check.id, handle);
|
|
104
|
+
// Run immediately after 5s delay (staggered)
|
|
105
|
+
const delay = BUILTIN_CHECKS.indexOf(check) * 5000 + 5000;
|
|
106
|
+
setTimeout(() => this._runCheck(check), delay);
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
stop() {
|
|
111
|
+
for (const handle of this._intervals.values()) clearInterval(handle);
|
|
112
|
+
this._intervals.clear();
|
|
113
|
+
this._isRunning = false;
|
|
114
|
+
logger.info('[Monitor] Monitor stopped');
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
/**
|
|
118
|
+
* Run a specific check by ID
|
|
119
|
+
*/
|
|
120
|
+
async runCheck(checkId) {
|
|
121
|
+
const check = BUILTIN_CHECKS.find(c => c.id === checkId);
|
|
122
|
+
if (!check) throw new Error(`Unknown check: ${checkId}`);
|
|
123
|
+
return this._runCheck(check);
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
/**
|
|
127
|
+
* Run ALL checks immediately
|
|
128
|
+
*/
|
|
129
|
+
async runAll() {
|
|
130
|
+
const results = [];
|
|
131
|
+
for (const check of BUILTIN_CHECKS) {
|
|
132
|
+
results.push(await this._runCheck(check));
|
|
133
|
+
}
|
|
134
|
+
return results;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
async _runCheck(check) {
|
|
138
|
+
logger.info(`[Monitor] Running check: ${check.name}`);
|
|
139
|
+
const startTime = Date.now();
|
|
140
|
+
let findings = [];
|
|
141
|
+
|
|
142
|
+
try {
|
|
143
|
+
switch (check.id) {
|
|
144
|
+
case 'new-todos': findings = await this._checkTodos(); break;
|
|
145
|
+
case 'hardcoded-secrets': findings = await this._checkSecrets(); break;
|
|
146
|
+
case 'stale-tasks': findings = this._checkStaleTasks(); break;
|
|
147
|
+
case 'git-drift': findings = await this._checkGitDrift(); break;
|
|
148
|
+
case 'missing-error-handling': findings = await this._checkMissingErrorHandling(); break;
|
|
149
|
+
}
|
|
150
|
+
} catch (err) {
|
|
151
|
+
logger.warn(`[Monitor] Check "${check.id}" failed: ${err.message}`);
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
this._state.lastRun[check.id] = new Date().toISOString();
|
|
155
|
+
this._saveState();
|
|
156
|
+
|
|
157
|
+
if (findings.length > 0) {
|
|
158
|
+
const alert = {
|
|
159
|
+
id: `alert_${Date.now()}`,
|
|
160
|
+
checkId: check.id,
|
|
161
|
+
checkName: check.name,
|
|
162
|
+
severity: check.severity,
|
|
163
|
+
findings,
|
|
164
|
+
triggeredAt: new Date().toISOString(),
|
|
165
|
+
acknowledged: false,
|
|
166
|
+
durationMs: Date.now() - startTime,
|
|
167
|
+
};
|
|
168
|
+
this._alerts.alerts.push(alert);
|
|
169
|
+
this._saveAlerts();
|
|
170
|
+
logger.warn(`[Monitor] ⚠️ ${check.name}: ${findings.length} finding(s)`);
|
|
171
|
+
return alert;
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
logger.info(`[Monitor] ✓ ${check.name}: clean`);
|
|
175
|
+
return { checkId: check.id, findings: [], clean: true };
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
// ── Individual check implementations ────────────────────────────────────────
|
|
179
|
+
|
|
180
|
+
async _checkTodos() {
|
|
181
|
+
const result = await GrepTool.findTodos(this._cwd || process.cwd());
|
|
182
|
+
const prev = this._state.lastRun['new-todos'];
|
|
183
|
+
const findings = [];
|
|
184
|
+
|
|
185
|
+
for (const match of result.matches.slice(0, 20)) {
|
|
186
|
+
findings.push({
|
|
187
|
+
file: match.file,
|
|
188
|
+
line: match.lineNumber,
|
|
189
|
+
text: match.line.trim(),
|
|
190
|
+
type: 'todo',
|
|
191
|
+
});
|
|
192
|
+
}
|
|
193
|
+
return findings;
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
async _checkSecrets() {
|
|
197
|
+
const secretPatterns = [
|
|
198
|
+
{ pattern: '(api_key|apikey|api-key)\\s*[=:]\\s*["\']?[A-Za-z0-9]{20,}', label: 'API key' },
|
|
199
|
+
{ pattern: '(password|passwd|pwd)\\s*[=:]\\s*["\'][^"\']{6,}["\']', label: 'Hardcoded password' },
|
|
200
|
+
{ pattern: '(secret|token)\\s*[=:]\\s*["\'][A-Za-z0-9+/]{20,}["\']', label: 'Secret/token' },
|
|
201
|
+
{ pattern: 'Bearer [A-Za-z0-9\\-_]{20,}', label: 'Bearer token' },
|
|
202
|
+
{ pattern: 'sk-[A-Za-z0-9]{30,}', label: 'OpenAI-style API key' },
|
|
203
|
+
];
|
|
204
|
+
|
|
205
|
+
const findings = [];
|
|
206
|
+
for (const { pattern, label } of secretPatterns) {
|
|
207
|
+
try {
|
|
208
|
+
const result = await GrepTool.search(pattern, {
|
|
209
|
+
cwd: this._cwd || process.cwd(),
|
|
210
|
+
maxResults: 5,
|
|
211
|
+
glob: '*.{js,ts,py,env,json}',
|
|
212
|
+
});
|
|
213
|
+
for (const match of result.matches) {
|
|
214
|
+
// Skip .env.example, test files, and our own config
|
|
215
|
+
if (match.file.includes('.example') || match.file.includes('test') || match.file.includes('.bak')) continue;
|
|
216
|
+
findings.push({ file: match.file, line: match.lineNumber, label, text: match.line.trim().slice(0, 80) });
|
|
217
|
+
}
|
|
218
|
+
} catch { }
|
|
219
|
+
}
|
|
220
|
+
return findings;
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
_checkStaleTasks() {
|
|
224
|
+
const tasks = TaskManager.list({ includeDone: false });
|
|
225
|
+
const findings = [];
|
|
226
|
+
const staleDays = 7;
|
|
227
|
+
|
|
228
|
+
for (const task of tasks) {
|
|
229
|
+
const ageDays = (Date.now() - new Date(task.createdAt).getTime()) / 86400000;
|
|
230
|
+
if (ageDays > staleDays) {
|
|
231
|
+
findings.push({
|
|
232
|
+
taskId: task.id,
|
|
233
|
+
title: task.title,
|
|
234
|
+
priority: task.priority,
|
|
235
|
+
ageDays: Math.floor(ageDays),
|
|
236
|
+
status: task.status,
|
|
237
|
+
});
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
return findings;
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
async _checkGitDrift() {
|
|
244
|
+
try {
|
|
245
|
+
const result = await BashTool.executeOrThrow(
|
|
246
|
+
'git diff --name-only HEAD~1 HEAD 2>/dev/null || git status --porcelain',
|
|
247
|
+
{ cwd: this._cwd || process.cwd() }
|
|
248
|
+
);
|
|
249
|
+
|
|
250
|
+
const store = require('../storage/store');
|
|
251
|
+
const ingestedFiles = new Set(store.getIngestedFiles());
|
|
252
|
+
const findings = [];
|
|
253
|
+
|
|
254
|
+
for (const line of result.stdout.split('\n').filter(Boolean)) {
|
|
255
|
+
const file = line.replace(/^[MAD?]\s+/, '').trim();
|
|
256
|
+
const absFile = path.resolve(this._cwd || process.cwd(), file);
|
|
257
|
+
if (ingestedFiles.has(absFile) === false && fs.existsSync(absFile)) {
|
|
258
|
+
findings.push({ file, reason: 'Changed but not in knowledge base' });
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
return findings.slice(0, 10);
|
|
262
|
+
} catch {
|
|
263
|
+
return [];
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
async _checkMissingErrorHandling() {
|
|
268
|
+
try {
|
|
269
|
+
// Find async functions without try/catch
|
|
270
|
+
const result = await GrepTool.search(
|
|
271
|
+
'async\\s+\\w+\\s*\\([^)]*\\)\\s*\\{(?![^}]*try)',
|
|
272
|
+
{ cwd: this._cwd || process.cwd(), maxResults: 15, glob: '*.{js,ts}' }
|
|
273
|
+
);
|
|
274
|
+
|
|
275
|
+
return result.matches
|
|
276
|
+
.filter(m => !m.file.includes('test') && !m.file.includes('.bak'))
|
|
277
|
+
.slice(0, 10)
|
|
278
|
+
.map(m => ({
|
|
279
|
+
file: m.file,
|
|
280
|
+
line: m.lineNumber,
|
|
281
|
+
text: m.line.trim().slice(0, 80),
|
|
282
|
+
suggestion: 'Wrap with try/catch',
|
|
283
|
+
}));
|
|
284
|
+
} catch { return []; }
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
// ── Alert management ─────────────────────────────────────────────────────────
|
|
288
|
+
|
|
289
|
+
getAlerts(opts = {}) {
|
|
290
|
+
const { severity, unacknowledged = false, limit = 50 } = opts;
|
|
291
|
+
let alerts = [...this._alerts.alerts];
|
|
292
|
+
if (severity) alerts = alerts.filter(a => a.severity === severity);
|
|
293
|
+
if (unacknowledged) alerts = alerts.filter(a => !a.acknowledged);
|
|
294
|
+
return alerts
|
|
295
|
+
.sort((a, b) => new Date(b.triggeredAt) - new Date(a.triggeredAt))
|
|
296
|
+
.slice(0, limit);
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
acknowledge(alertId) {
|
|
300
|
+
const alert = this._alerts.alerts.find(a => a.id === alertId);
|
|
301
|
+
if (!alert) throw new Error(`Alert not found: ${alertId}`);
|
|
302
|
+
alert.acknowledged = true;
|
|
303
|
+
alert.acknowledgedAt = new Date().toISOString();
|
|
304
|
+
this._saveAlerts();
|
|
305
|
+
return alert;
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
acknowledgeAll() {
|
|
309
|
+
const now = new Date().toISOString();
|
|
310
|
+
let count = 0;
|
|
311
|
+
for (const alert of this._alerts.alerts) {
|
|
312
|
+
if (!alert.acknowledged) { alert.acknowledged = true; alert.acknowledgedAt = now; count++; }
|
|
313
|
+
}
|
|
314
|
+
this._saveAlerts();
|
|
315
|
+
return count;
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
getStatus() {
|
|
319
|
+
const alerts = this._alerts.alerts;
|
|
320
|
+
const unacked = alerts.filter(a => !a.acknowledged);
|
|
321
|
+
return {
|
|
322
|
+
isRunning: this._isRunning,
|
|
323
|
+
checks: BUILTIN_CHECKS.map(c => ({
|
|
324
|
+
...c,
|
|
325
|
+
lastRun: this._state.lastRun[c.id] || null,
|
|
326
|
+
})),
|
|
327
|
+
alerts: {
|
|
328
|
+
total: alerts.length,
|
|
329
|
+
unacknowledged: unacked.length,
|
|
330
|
+
critical: unacked.filter(a => a.severity === 'critical').length,
|
|
331
|
+
warn: unacked.filter(a => a.severity === 'warn').length,
|
|
332
|
+
},
|
|
333
|
+
};
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
module.exports = { ProactiveMonitor: new ProactiveMonitor(), SEVERITY, BUILTIN_CHECKS };
|
|
@@ -0,0 +1,230 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Well-structured, multi-step pipelines for common developer workflows.
|
|
3
|
+
* Each pipeline is a sequence of named steps, each producing output
|
|
4
|
+
* that feeds into the next.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
const logger = require('../utils/logger');
|
|
8
|
+
const costTracker = require('../utils/costTracker');
|
|
9
|
+
const { MemoryManager } = require('../memory/memoryManager');
|
|
10
|
+
const { TaskManager } = require('../tasks/taskManager');
|
|
11
|
+
const indexer = require('../core/indexer');
|
|
12
|
+
|
|
13
|
+
// ── PIPELINE STEP REGISTRY ─────────────────────────────────────────────────────
|
|
14
|
+
// Each step is a function: (input, context, state) => output
|
|
15
|
+
const STEP_REGISTRY = {
|
|
16
|
+
|
|
17
|
+
'retrieve-context': async (input, ctx) => {
|
|
18
|
+
const docs = indexer.search(input.query || input.task || input, ctx.topK || 8);
|
|
19
|
+
return { docs, count: docs.length };
|
|
20
|
+
},
|
|
21
|
+
|
|
22
|
+
'engineer-context': async (input, ctx, state) => {
|
|
23
|
+
const contextEngineer = require('../context/contextEngineer');
|
|
24
|
+
const docs = state['retrieve-context']?.docs || [];
|
|
25
|
+
const engineered = contextEngineer.engineer(docs, input.task, input.mode);
|
|
26
|
+
return engineered;
|
|
27
|
+
},
|
|
28
|
+
|
|
29
|
+
'query-ai': async (input, ctx, state) => {
|
|
30
|
+
const { QueryEngine } = require('../core/queryEngine');
|
|
31
|
+
const result = await QueryEngine.query(input.task || input.query, {
|
|
32
|
+
mode: input.mode,
|
|
33
|
+
sessionId: ctx.sessionId,
|
|
34
|
+
});
|
|
35
|
+
return result;
|
|
36
|
+
},
|
|
37
|
+
|
|
38
|
+
'run-debug-agent': async (input, ctx, state) => {
|
|
39
|
+
const { DebugAgent } = require('../agents/specialists');
|
|
40
|
+
const docs = state['retrieve-context']?.docs || [];
|
|
41
|
+
return DebugAgent.run(input.task || input.error, { context: docs, sessionId: ctx.sessionId });
|
|
42
|
+
},
|
|
43
|
+
|
|
44
|
+
'run-security-agent': async (input, ctx, state) => {
|
|
45
|
+
const { SecurityAgent } = require('../agents/specialists');
|
|
46
|
+
const docs = state['retrieve-context']?.docs || [];
|
|
47
|
+
return SecurityAgent.run(input.task, { context: docs, sessionId: ctx.sessionId });
|
|
48
|
+
},
|
|
49
|
+
|
|
50
|
+
'run-architecture-agent': async (input, ctx, state) => {
|
|
51
|
+
const { ArchitectureAgent } = require('../agents/specialists');
|
|
52
|
+
const docs = state['retrieve-context']?.docs || [];
|
|
53
|
+
return ArchitectureAgent.run(input.task, { context: docs, sessionId: ctx.sessionId });
|
|
54
|
+
},
|
|
55
|
+
|
|
56
|
+
'run-doc-agent': async (input, ctx, state) => {
|
|
57
|
+
const { DocumentationAgent } = require('../agents/specialists');
|
|
58
|
+
const docs = state['retrieve-context']?.docs || [];
|
|
59
|
+
return DocumentationAgent.run(input.task, { context: docs, sessionId: ctx.sessionId });
|
|
60
|
+
},
|
|
61
|
+
|
|
62
|
+
'generate-plan': async (input, ctx, state) => {
|
|
63
|
+
const plannerEngine = require('../planner/plannerEngine');
|
|
64
|
+
const docs = state['retrieve-context']?.docs || [];
|
|
65
|
+
return plannerEngine.generatePlan(input.task, docs, ctx.sessionId);
|
|
66
|
+
},
|
|
67
|
+
|
|
68
|
+
'decompose-task': async (input, ctx) => {
|
|
69
|
+
const taskDecomposer = require('../agents/taskDecomposer');
|
|
70
|
+
return taskDecomposer.decompose(input.task, ctx.sessionId);
|
|
71
|
+
},
|
|
72
|
+
|
|
73
|
+
'git-review': async (input, ctx) => {
|
|
74
|
+
const GitTool = require('../tools/GitTool');
|
|
75
|
+
return GitTool.review({ cwd: input.cwd, focus: input.focus });
|
|
76
|
+
},
|
|
77
|
+
|
|
78
|
+
'grep-todos': async (input, ctx) => {
|
|
79
|
+
const GrepTool = require('../tools/GrepTool');
|
|
80
|
+
return GrepTool.findTodos(input.cwd || process.cwd());
|
|
81
|
+
},
|
|
82
|
+
|
|
83
|
+
'grep-definitions': async (input, ctx) => {
|
|
84
|
+
const GrepTool = require('../tools/GrepTool');
|
|
85
|
+
return GrepTool.findDefinitions(input.symbol, input.cwd);
|
|
86
|
+
},
|
|
87
|
+
|
|
88
|
+
'create-tasks': async (input, ctx, state) => {
|
|
89
|
+
// Auto-create tasks from agent findings
|
|
90
|
+
const prevResult = Object.values(state).pop();
|
|
91
|
+
const answer = prevResult?.answer || prevResult?.result || '';
|
|
92
|
+
|
|
93
|
+
// Extract action items (lines starting with - or numbered)
|
|
94
|
+
const actionItems = answer.split('\n')
|
|
95
|
+
.filter(l => /^[\-\*\d]\s/.test(l.trim()))
|
|
96
|
+
.slice(0, 5)
|
|
97
|
+
.map(l => l.replace(/^[\-\*\d\.\s]+/, '').trim())
|
|
98
|
+
.filter(l => l.length > 10);
|
|
99
|
+
|
|
100
|
+
const tasks = actionItems.map(title =>
|
|
101
|
+
TaskManager.create({ title, priority: 'medium', tags: ['pipeline-generated'], linkedQuery: input.task })
|
|
102
|
+
);
|
|
103
|
+
|
|
104
|
+
return { tasksCreated: tasks.length, tasks };
|
|
105
|
+
},
|
|
106
|
+
|
|
107
|
+
'save-to-memory': async (input, ctx, state) => {
|
|
108
|
+
const prevResult = Object.values(state).pop();
|
|
109
|
+
const answer = prevResult?.answer || prevResult?.synthesis || '';
|
|
110
|
+
if (answer && answer.length > 50) {
|
|
111
|
+
const mem = MemoryManager.add(
|
|
112
|
+
`Pipeline result for "${(input.task || '').slice(0, 60)}": ${answer.slice(0, 300)}`,
|
|
113
|
+
'fact',
|
|
114
|
+
['pipeline-result']
|
|
115
|
+
);
|
|
116
|
+
return { saved: true, memoryId: mem.id };
|
|
117
|
+
}
|
|
118
|
+
return { saved: false };
|
|
119
|
+
},
|
|
120
|
+
};
|
|
121
|
+
|
|
122
|
+
// ── PRE-BUILT PIPELINES ────────────────────────────────────────────────────────
|
|
123
|
+
const PIPELINES = {
|
|
124
|
+
'debug-pipeline': {
|
|
125
|
+
description: 'Full debug workflow: retrieve context → run debug agent → create fix tasks',
|
|
126
|
+
steps: ['retrieve-context', 'run-debug-agent', 'create-tasks', 'save-to-memory'],
|
|
127
|
+
},
|
|
128
|
+
'security-audit-pipeline': {
|
|
129
|
+
description: 'Security scan: retrieve context → architecture overview → security scan → save findings',
|
|
130
|
+
steps: ['retrieve-context', 'run-architecture-agent', 'run-security-agent', 'create-tasks', 'save-to-memory'],
|
|
131
|
+
},
|
|
132
|
+
'onboarding-pipeline': {
|
|
133
|
+
description: 'New developer onboarding: codebase context → architecture → docs → todo list',
|
|
134
|
+
steps: ['retrieve-context', 'run-architecture-agent', 'run-doc-agent', 'grep-todos', 'save-to-memory'],
|
|
135
|
+
},
|
|
136
|
+
'feature-planning-pipeline': {
|
|
137
|
+
description: 'Plan a feature: retrieve context → decompose task → generate plan → create tasks',
|
|
138
|
+
steps: ['retrieve-context', 'decompose-task', 'generate-plan', 'create-tasks'],
|
|
139
|
+
},
|
|
140
|
+
'code-review-pipeline': {
|
|
141
|
+
description: 'Full code review: git diff → security scan → architecture check → doc suggestions',
|
|
142
|
+
steps: ['retrieve-context', 'git-review', 'run-security-agent', 'save-to-memory'],
|
|
143
|
+
},
|
|
144
|
+
'impact-analysis-pipeline': {
|
|
145
|
+
description: 'Analyse change impact: retrieve context → architecture analysis → debug risks → plan',
|
|
146
|
+
steps: ['retrieve-context', 'run-architecture-agent', 'run-debug-agent', 'generate-plan', 'create-tasks'],
|
|
147
|
+
},
|
|
148
|
+
};
|
|
149
|
+
|
|
150
|
+
class PipelineEngine {
|
|
151
|
+
/**
|
|
152
|
+
* Run a named pre-built pipeline
|
|
153
|
+
*/
|
|
154
|
+
async run(pipelineName, input, options = {}) {
|
|
155
|
+
const pipeline = PIPELINES[pipelineName];
|
|
156
|
+
if (!pipeline) throw new Error(`Unknown pipeline: ${pipelineName}. Available: ${Object.keys(PIPELINES).join(', ')}`);
|
|
157
|
+
|
|
158
|
+
logger.info(`[Pipeline] Running "${pipelineName}" with ${pipeline.steps.length} steps`);
|
|
159
|
+
return this._execute(pipeline.steps, input, options);
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
/**
|
|
163
|
+
* Run a custom pipeline from a steps array
|
|
164
|
+
*/
|
|
165
|
+
async runCustom(steps, input, options = {}) {
|
|
166
|
+
logger.info(`[Pipeline] Custom pipeline: [${steps.join(' → ')}]`);
|
|
167
|
+
return this._execute(steps, input, options);
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
async _execute(steps, input, options = {}) {
|
|
171
|
+
const { sessionId = 'default', topK = 8 } = options;
|
|
172
|
+
const ctx = { sessionId, topK };
|
|
173
|
+
const state = {};
|
|
174
|
+
const stepResults = [];
|
|
175
|
+
const startTime = Date.now();
|
|
176
|
+
|
|
177
|
+
for (const stepName of steps) {
|
|
178
|
+
const fn = STEP_REGISTRY[stepName];
|
|
179
|
+
if (!fn) {
|
|
180
|
+
logger.warn(`[Pipeline] Unknown step: ${stepName} — skipping`);
|
|
181
|
+
continue;
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
logger.info(`[Pipeline] Step: ${stepName}`);
|
|
185
|
+
const stepStart = Date.now();
|
|
186
|
+
|
|
187
|
+
try {
|
|
188
|
+
const result = await fn(input, ctx, state);
|
|
189
|
+
state[stepName] = result;
|
|
190
|
+
stepResults.push({
|
|
191
|
+
step: stepName,
|
|
192
|
+
success: true,
|
|
193
|
+
durationMs: Date.now() - stepStart,
|
|
194
|
+
outputKeys: Object.keys(result || {}),
|
|
195
|
+
});
|
|
196
|
+
} catch (err) {
|
|
197
|
+
logger.error(`[Pipeline] Step "${stepName}" failed: ${err.message}`);
|
|
198
|
+
stepResults.push({ step: stepName, success: false, error: err.message, durationMs: Date.now() - stepStart });
|
|
199
|
+
// Continue to next step (resilient pipeline)
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
// Final output = last successful step's result
|
|
204
|
+
const lastSuccess = [...stepResults].reverse().find(s => s.success);
|
|
205
|
+
const finalOutput = lastSuccess ? state[lastSuccess.step] : null;
|
|
206
|
+
|
|
207
|
+
return {
|
|
208
|
+
pipeline: steps.join(' → '),
|
|
209
|
+
input: typeof input === 'string' ? input : input.task,
|
|
210
|
+
steps: stepResults,
|
|
211
|
+
state, // All intermediate results
|
|
212
|
+
finalOutput,
|
|
213
|
+
durationMs: Date.now() - startTime,
|
|
214
|
+
successCount: stepResults.filter(s => s.success).length,
|
|
215
|
+
totalSteps: steps.length,
|
|
216
|
+
};
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
getAvailablePipelines() {
|
|
220
|
+
return Object.entries(PIPELINES).map(([name, def]) => ({
|
|
221
|
+
name, description: def.description, steps: def.steps,
|
|
222
|
+
}));
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
getAvailableSteps() {
|
|
226
|
+
return Object.keys(STEP_REGISTRY);
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
module.exports = new PipelineEngine();
|