rufloui 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/'1' +0 -0
- package/.env.example +46 -0
- package/CHANGELOG.md +87 -0
- package/CLAUDE.md +287 -0
- package/LICENSE +21 -0
- package/README.md +316 -0
- package/Webhooks) +0 -0
- package/docs/plans/2026-03-11-github-webhooks.md +957 -0
- package/docs/screenshot-swarm-monitor.png +0 -0
- package/frontend +0 -0
- package/index.html +13 -0
- package/package.json +56 -0
- package/public/vite.svg +4 -0
- package/src/backend/__tests__/webhook-github.test.ts +934 -0
- package/src/backend/jsonl-monitor.ts +430 -0
- package/src/backend/server.ts +2972 -0
- package/src/backend/telegram-bot.ts +511 -0
- package/src/backend/webhook-github.ts +350 -0
- package/src/frontend/App.tsx +461 -0
- package/src/frontend/api.ts +281 -0
- package/src/frontend/components/ErrorBoundary.tsx +98 -0
- package/src/frontend/components/Layout.tsx +431 -0
- package/src/frontend/components/ui/Button.tsx +111 -0
- package/src/frontend/components/ui/Card.tsx +51 -0
- package/src/frontend/components/ui/StatusBadge.tsx +60 -0
- package/src/frontend/main.tsx +63 -0
- package/src/frontend/pages/AgentVizPanel.tsx +428 -0
- package/src/frontend/pages/AgentsPanel.tsx +445 -0
- package/src/frontend/pages/ConfigPanel.tsx +661 -0
- package/src/frontend/pages/Dashboard.tsx +482 -0
- package/src/frontend/pages/HiveMindPanel.tsx +355 -0
- package/src/frontend/pages/HooksPanel.tsx +240 -0
- package/src/frontend/pages/LogsPanel.tsx +261 -0
- package/src/frontend/pages/MemoryPanel.tsx +444 -0
- package/src/frontend/pages/NeuralPanel.tsx +301 -0
- package/src/frontend/pages/PerformancePanel.tsx +198 -0
- package/src/frontend/pages/SessionsPanel.tsx +428 -0
- package/src/frontend/pages/SetupWizard.tsx +181 -0
- package/src/frontend/pages/SwarmMonitorPanel.tsx +634 -0
- package/src/frontend/pages/SwarmPanel.tsx +322 -0
- package/src/frontend/pages/TasksPanel.tsx +535 -0
- package/src/frontend/pages/WebhooksPanel.tsx +335 -0
- package/src/frontend/pages/WorkflowsPanel.tsx +448 -0
- package/src/frontend/store.ts +185 -0
- package/src/frontend/styles/global.css +113 -0
- package/src/frontend/test-setup.ts +1 -0
- package/src/frontend/tour/TourContext.tsx +161 -0
- package/src/frontend/tour/tourSteps.ts +181 -0
- package/src/frontend/tour/tourStyles.css +116 -0
- package/src/frontend/types.ts +239 -0
- package/src/frontend/utils/formatTime.test.ts +83 -0
- package/src/frontend/utils/formatTime.ts +23 -0
- package/tsconfig.json +23 -0
- package/vite.config.ts +26 -0
- package/vitest.config.ts +17 -0
- package/{,+ +0 -0
|
@@ -0,0 +1,2972 @@
|
|
|
1
|
+
import express, { Router, Request, Response, RequestHandler } from 'express'
|
|
2
|
+
import cors from 'cors'
|
|
3
|
+
import { WebSocketServer, WebSocket } from 'ws'
|
|
4
|
+
import { createServer } from 'http'
|
|
5
|
+
import { exec, execFile, spawn } from 'child_process'
|
|
6
|
+
import { promisify } from 'util'
|
|
7
|
+
import os from 'os'
|
|
8
|
+
import path from 'path'
|
|
9
|
+
import fs from 'fs'
|
|
10
|
+
import crypto from 'crypto'
|
|
11
|
+
import { startMonitoring, stopMonitoring, getSessionTree, getAllMonitoredSessions, getNodeLogs } from './jsonl-monitor'
|
|
12
|
+
import { initTelegramBot, TelegramConfig, TelegramHandle } from './telegram-bot'
|
|
13
|
+
import { loadGitHubWebhookConfig, githubWebhookRoutes, updateWebhookEventByTaskId } from './webhook-github'
|
|
14
|
+
|
|
15
|
+
const execAsync = promisify(exec)
|
|
16
|
+
const execFileAsync = promisify(execFile)
|
|
17
|
+
const PORT = Number(process.env.PORT) || 3001
|
|
18
|
+
const CLI = process.env.RUFLO_CLI || 'npx -y @claude-flow/cli@latest'
|
|
19
|
+
const CLI_PARTS = (process.env.RUFLO_CLI || 'npx -y @claude-flow/cli@latest').split(/\s+/)
|
|
20
|
+
const CLI_BIN = CLI_PARTS[0]
|
|
21
|
+
const CLI_BASE_ARGS = CLI_PARTS.slice(1)
|
|
22
|
+
const CLI_TIMEOUT = Number(process.env.RUFLO_CLI_TIMEOUT) || 30_000
|
|
23
|
+
let telegramBot: TelegramHandle | null = null
|
|
24
|
+
let telegramConfig: TelegramConfig = {
|
|
25
|
+
enabled: false, token: '', chatId: '',
|
|
26
|
+
notifications: { taskCompleted: true, taskFailed: true, swarmInit: true, swarmShutdown: true, agentError: true, taskProgress: false },
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
interface TelegramLogEntry { timestamp: string; direction: 'in' | 'out'; message: string }
|
|
30
|
+
const telegramActivityLog: TelegramLogEntry[] = []
|
|
31
|
+
function addTelegramLog(direction: 'in' | 'out', message: string) {
|
|
32
|
+
telegramActivityLog.push({ timestamp: new Date().toISOString(), direction, message })
|
|
33
|
+
if (telegramActivityLog.length > 50) telegramActivityLog.shift()
|
|
34
|
+
}
|
|
35
|
+
const TELEGRAM_CONFIG_FILE = () => path.join(PERSIST_DIR, 'telegram.json')
|
|
36
|
+
|
|
37
|
+
function loadTelegramConfig(): TelegramConfig {
|
|
38
|
+
try {
|
|
39
|
+
const filePath = TELEGRAM_CONFIG_FILE()
|
|
40
|
+
if (fs.existsSync(filePath)) {
|
|
41
|
+
const raw = JSON.parse(fs.readFileSync(filePath, 'utf-8'))
|
|
42
|
+
return {
|
|
43
|
+
enabled: raw.enabled === true,
|
|
44
|
+
token: String(raw.token || ''),
|
|
45
|
+
chatId: String(raw.chatId || ''),
|
|
46
|
+
notifications: {
|
|
47
|
+
taskCompleted: raw.notifications?.taskCompleted ?? true,
|
|
48
|
+
taskFailed: raw.notifications?.taskFailed ?? true,
|
|
49
|
+
swarmInit: raw.notifications?.swarmInit ?? true,
|
|
50
|
+
swarmShutdown: raw.notifications?.swarmShutdown ?? true,
|
|
51
|
+
agentError: raw.notifications?.agentError ?? true,
|
|
52
|
+
taskProgress: raw.notifications?.taskProgress ?? false,
|
|
53
|
+
},
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
} catch { /* ignore */ }
|
|
57
|
+
// Fall back to env vars
|
|
58
|
+
return {
|
|
59
|
+
enabled: process.env.TELEGRAM_ENABLED === 'true',
|
|
60
|
+
token: process.env.TELEGRAM_BOT_TOKEN || '',
|
|
61
|
+
chatId: process.env.TELEGRAM_CHAT_ID || '',
|
|
62
|
+
notifications: { taskCompleted: true, taskFailed: true, swarmInit: true, swarmShutdown: true, agentError: true, taskProgress: false },
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
function saveTelegramConfig(config: TelegramConfig) {
|
|
67
|
+
try {
|
|
68
|
+
ensurePersistDir()
|
|
69
|
+
const filePath = TELEGRAM_CONFIG_FILE()
|
|
70
|
+
fs.writeFileSync(filePath, JSON.stringify(config, null, 2))
|
|
71
|
+
// Restrict file permissions (owner-only read/write) to protect the token
|
|
72
|
+
try { fs.chmodSync(filePath, 0o600) } catch { /* Windows may not support chmod */ }
|
|
73
|
+
} catch (err) {
|
|
74
|
+
console.error('[telegram] Config save failed:', err)
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
const ZOMBIE_TIMEOUT = Number(process.env.RUFLO_ZOMBIE_TIMEOUT) || 300_000 // 5 min
|
|
78
|
+
let SKIP_PERMISSIONS = process.env.RUFLOUI_SKIP_PERMISSIONS !== 'false'
|
|
79
|
+
|
|
80
|
+
let githubWebhookConfig = loadGitHubWebhookConfig()
|
|
81
|
+
|
|
82
|
+
// ── PERSISTENCE LAYER ───────────────────────────────────────────────
|
|
83
|
+
// Writes critical in-memory state to .ruflo/ as JSON files so it
|
|
84
|
+
// survives server restarts. Debounced to avoid excessive disk I/O.
|
|
85
|
+
const PERSIST_DIR = process.env.RUFLO_PERSIST_DIR
|
|
86
|
+
? path.resolve(process.env.RUFLO_PERSIST_DIR)
|
|
87
|
+
: path.join(process.cwd(), '.ruflo')
|
|
88
|
+
|
|
89
|
+
interface PersistedState {
|
|
90
|
+
tasks: Array<[string, unknown]>
|
|
91
|
+
workflows: Array<[string, unknown]>
|
|
92
|
+
sessions: Array<[string, unknown]>
|
|
93
|
+
agents: Array<[string, { id: string; name: string; type: string }]>
|
|
94
|
+
terminatedAgents: string[]
|
|
95
|
+
agentActivity: Array<[string, unknown]>
|
|
96
|
+
swarmConfig: {
|
|
97
|
+
id: string; topology: string; strategy: string; maxAgents: number
|
|
98
|
+
createdAt: string; shutdown: boolean
|
|
99
|
+
}
|
|
100
|
+
perfHistory: Array<{ timestamp: string; latency: number; throughput: number }>
|
|
101
|
+
lastPerfMetrics: unknown
|
|
102
|
+
benchmarkHasRun: boolean
|
|
103
|
+
currentSwarmAgentIds: string[]
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
function ensurePersistDir() {
|
|
107
|
+
if (!fs.existsSync(PERSIST_DIR)) fs.mkdirSync(PERSIST_DIR, { recursive: true })
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
let _saveTimer: ReturnType<typeof setTimeout> | null = null
|
|
111
|
+
const SAVE_DEBOUNCE_MS = 2000
|
|
112
|
+
|
|
113
|
+
function scheduleSave() {
|
|
114
|
+
if (_saveTimer) return // already scheduled
|
|
115
|
+
_saveTimer = setTimeout(() => {
|
|
116
|
+
_saveTimer = null
|
|
117
|
+
saveToDisk()
|
|
118
|
+
}, SAVE_DEBOUNCE_MS)
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
function saveToDisk() {
|
|
122
|
+
try {
|
|
123
|
+
ensurePersistDir()
|
|
124
|
+
const state: PersistedState = {
|
|
125
|
+
tasks: [...taskStore.entries()],
|
|
126
|
+
workflows: [...workflowStore.entries()],
|
|
127
|
+
sessions: [...sessionStore.entries()],
|
|
128
|
+
agents: [...agentRegistry.entries()],
|
|
129
|
+
terminatedAgents: [...terminatedAgents],
|
|
130
|
+
agentActivity: [...agentActivity.entries()],
|
|
131
|
+
swarmConfig: {
|
|
132
|
+
id: lastSwarmId, topology: lastSwarmTopology, strategy: lastSwarmStrategy,
|
|
133
|
+
maxAgents: lastSwarmMaxAgents, createdAt: lastSwarmCreatedAt, shutdown: swarmShutdown,
|
|
134
|
+
},
|
|
135
|
+
perfHistory: perfHistory.slice(-200), // cap at 200 entries
|
|
136
|
+
lastPerfMetrics,
|
|
137
|
+
benchmarkHasRun,
|
|
138
|
+
currentSwarmAgentIds: [...currentSwarmAgentIds],
|
|
139
|
+
}
|
|
140
|
+
// Atomic write: write to .tmp then rename to prevent corruption on crash
|
|
141
|
+
const target = path.join(PERSIST_DIR, 'state.json')
|
|
142
|
+
const tmp = target + '.tmp'
|
|
143
|
+
fs.writeFileSync(tmp, JSON.stringify(state, null, 2))
|
|
144
|
+
fs.renameSync(tmp, target)
|
|
145
|
+
} catch (err) {
|
|
146
|
+
console.error('[persist] Save failed:', err)
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
function loadFromDisk() {
|
|
151
|
+
const filePath = path.join(PERSIST_DIR, 'state.json')
|
|
152
|
+
const tmpPath = filePath + '.tmp'
|
|
153
|
+
// If .tmp exists but main doesn't, recover from .tmp (crash during write)
|
|
154
|
+
if (!fs.existsSync(filePath) && fs.existsSync(tmpPath)) {
|
|
155
|
+
console.log('[persist] Recovering from .tmp file (previous save was interrupted)')
|
|
156
|
+
try { fs.renameSync(tmpPath, filePath) } catch { /* ignore */ }
|
|
157
|
+
}
|
|
158
|
+
if (!fs.existsSync(filePath)) return
|
|
159
|
+
try {
|
|
160
|
+
const raw = fs.readFileSync(filePath, 'utf-8')
|
|
161
|
+
const state: PersistedState = JSON.parse(raw)
|
|
162
|
+
|
|
163
|
+
// Restore tasks
|
|
164
|
+
if (state.tasks) for (const [k, v] of state.tasks) taskStore.set(k, v as any)
|
|
165
|
+
// Restore workflows
|
|
166
|
+
if (state.workflows) for (const [k, v] of state.workflows) workflowStore.set(k, v as any)
|
|
167
|
+
// Restore sessions
|
|
168
|
+
if (state.sessions) for (const [k, v] of state.sessions) sessionStore.set(k, v as any)
|
|
169
|
+
// Restore agent registry
|
|
170
|
+
if (state.agents) for (const [k, v] of state.agents) agentRegistry.set(k, v)
|
|
171
|
+
// Restore terminated agents
|
|
172
|
+
if (state.terminatedAgents) for (const id of state.terminatedAgents) terminatedAgents.add(id)
|
|
173
|
+
// Restore agent activity
|
|
174
|
+
if (state.agentActivity) for (const [k, v] of state.agentActivity) agentActivity.set(k, v as any)
|
|
175
|
+
// Restore swarm config
|
|
176
|
+
if (state.swarmConfig) {
|
|
177
|
+
lastSwarmId = state.swarmConfig.id || ''
|
|
178
|
+
lastSwarmTopology = state.swarmConfig.topology || 'hierarchical'
|
|
179
|
+
lastSwarmStrategy = state.swarmConfig.strategy || 'specialized'
|
|
180
|
+
lastSwarmMaxAgents = state.swarmConfig.maxAgents || 8
|
|
181
|
+
lastSwarmCreatedAt = state.swarmConfig.createdAt || ''
|
|
182
|
+
swarmShutdown = state.swarmConfig.shutdown ?? true
|
|
183
|
+
}
|
|
184
|
+
// Restore perf
|
|
185
|
+
if (state.perfHistory) perfHistory.push(...state.perfHistory)
|
|
186
|
+
if (state.lastPerfMetrics) lastPerfMetrics = state.lastPerfMetrics as typeof lastPerfMetrics
|
|
187
|
+
if (state.benchmarkHasRun) benchmarkHasRun = state.benchmarkHasRun
|
|
188
|
+
// Restore current swarm agent IDs
|
|
189
|
+
if (state.currentSwarmAgentIds) {
|
|
190
|
+
currentSwarmAgentIds = new Set(state.currentSwarmAgentIds)
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
const taskCount = taskStore.size
|
|
194
|
+
const wfCount = workflowStore.size
|
|
195
|
+
const agentCount = agentRegistry.size
|
|
196
|
+
console.log(`[persist] Loaded: ${taskCount} tasks, ${wfCount} workflows, ${agentCount} agents`)
|
|
197
|
+
} catch (err) {
|
|
198
|
+
console.error('[persist] Load failed:', err)
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
// Helper: call after any state mutation to schedule a save
|
|
203
|
+
function persistState() {
|
|
204
|
+
scheduleSave()
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
// ── OUTPUT HISTORY ───────────────────────────────────────────────────
|
|
208
|
+
// Persists task output to .ruflo/outputs/<taskId>.jsonl so it survives
|
|
209
|
+
// server restarts and page reloads.
|
|
210
|
+
const OUTPUTS_DIR = path.join(PERSIST_DIR, 'outputs')
|
|
211
|
+
|
|
212
|
+
function ensureOutputsDir() {
|
|
213
|
+
if (!fs.existsSync(OUTPUTS_DIR)) fs.mkdirSync(OUTPUTS_DIR, { recursive: true })
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
function appendTaskOutputLine(taskId: string, line: { type: string; content: string; agentId?: string; tool?: string; timestamp?: string }) {
|
|
217
|
+
try {
|
|
218
|
+
ensureOutputsDir()
|
|
219
|
+
const entry = { ...line, timestamp: line.timestamp || new Date().toISOString() }
|
|
220
|
+
fs.appendFileSync(path.join(OUTPUTS_DIR, `${taskId}.jsonl`), JSON.stringify(entry) + '\n')
|
|
221
|
+
} catch { /* non-critical */ }
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
function readTaskOutputHistory(taskId: string, tail = 200): Array<{ type: string; content: string; agentId?: string; tool?: string; timestamp: string }> {
|
|
225
|
+
const filePath = path.join(OUTPUTS_DIR, `${taskId}.jsonl`)
|
|
226
|
+
if (!fs.existsSync(filePath)) return []
|
|
227
|
+
try {
|
|
228
|
+
const lines = fs.readFileSync(filePath, 'utf-8').split('\n').filter(Boolean)
|
|
229
|
+
const entries = []
|
|
230
|
+
for (const line of lines.slice(-tail)) {
|
|
231
|
+
try { entries.push(JSON.parse(line)) } catch { /* skip */ }
|
|
232
|
+
}
|
|
233
|
+
return entries
|
|
234
|
+
} catch { return [] }
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
const wsClients = new Set<WebSocket>()
|
|
238
|
+
|
|
239
|
+
// Types that represent persistent state changes — trigger disk save
|
|
240
|
+
const PERSIST_EVENTS = new Set([
|
|
241
|
+
'task:added', 'task:updated', 'task:list',
|
|
242
|
+
'workflow:added', 'workflow:updated',
|
|
243
|
+
'session:added', 'session:updated', 'session:list', 'session:active',
|
|
244
|
+
'swarm:status', 'swarm-monitor:purged',
|
|
245
|
+
'agent:activity', 'agent:added', 'agent:removed', 'agents:cleared',
|
|
246
|
+
'performance:metrics',
|
|
247
|
+
])
|
|
248
|
+
|
|
249
|
+
function broadcast(type: string, payload: unknown) {
|
|
250
|
+
const msg = JSON.stringify({ type, payload, timestamp: new Date().toISOString() })
|
|
251
|
+
for (const ws of wsClients) {
|
|
252
|
+
if (ws.readyState === WebSocket.OPEN) ws.send(msg)
|
|
253
|
+
}
|
|
254
|
+
// Auto-persist on significant state changes
|
|
255
|
+
if (PERSIST_EVENTS.has(type)) persistState()
|
|
256
|
+
// Persist task output lines to disk for history across reloads
|
|
257
|
+
if (type === 'task:output') {
|
|
258
|
+
const p = payload as { id?: string; type?: string; content?: string; tool?: string; input?: string; agentId?: string; code?: number }
|
|
259
|
+
if (p?.id) {
|
|
260
|
+
let line = ''
|
|
261
|
+
if (p.type === 'tool') line = `[tool] ${p.tool || ''}: ${p.input || ''}`
|
|
262
|
+
else if (p.type === 'stderr') line = `[err] ${p.content || ''}`
|
|
263
|
+
else if (p.type === 'text') line = p.content?.slice(0, 300) || ''
|
|
264
|
+
else if (p.type === 'raw') line = p.content?.slice(0, 300) || ''
|
|
265
|
+
else if (p.type === 'progress') line = p.content || ''
|
|
266
|
+
else if (p.type === 'done') line = `--- Done (exit ${p.code ?? '?'}) ---`
|
|
267
|
+
if (line) appendTaskOutputLine(p.id, { type: p.type || 'text', content: line, agentId: p.agentId, tool: p.tool })
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
// Forward to Telegram bot (fire-and-forget)
|
|
271
|
+
telegramBot?.onBroadcast(type, payload)
|
|
272
|
+
// Update webhook event status when linked task completes/fails
|
|
273
|
+
if (type === 'task:updated') {
|
|
274
|
+
const p2 = payload as { id?: string; status?: string }
|
|
275
|
+
if (p2?.id && (p2.status === 'completed' || p2.status === 'failed')) {
|
|
276
|
+
updateWebhookEventByTaskId(p2.id, p2.status as 'completed' | 'failed')
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
// Remove shell metacharacters that could enable injection in spawn(..., { shell: true }) calls
|
|
282
|
+
function sanitizeShellArg(arg: string): string {
|
|
283
|
+
return arg.replace(/[;&|`$(){}[\]!#~<>\\]/g, '')
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
async function execCli(command: string, args: string[] = []): Promise<{ raw: string; parsed?: unknown }> {
|
|
287
|
+
const fullArgs = [...CLI_BASE_ARGS, command, ...args]
|
|
288
|
+
try {
|
|
289
|
+
const { stdout, stderr } = await execFileAsync(CLI_BIN, fullArgs, {
|
|
290
|
+
timeout: CLI_TIMEOUT,
|
|
291
|
+
encoding: 'utf-8',
|
|
292
|
+
shell: true,
|
|
293
|
+
windowsHide: true,
|
|
294
|
+
})
|
|
295
|
+
const text = stdout.trim()
|
|
296
|
+
// Try JSON parse first
|
|
297
|
+
try { return { raw: text, parsed: JSON.parse(text) } } catch { /* not JSON */ }
|
|
298
|
+
return { raw: text }
|
|
299
|
+
} catch (err: unknown) {
|
|
300
|
+
const msg = err instanceof Error ? err.message : String(err)
|
|
301
|
+
// CLI may write output to stderr or exit non-zero but still have useful stdout
|
|
302
|
+
if (err && typeof err === 'object' && 'stdout' in err) {
|
|
303
|
+
const stdout = String((err as { stdout: string }).stdout).trim()
|
|
304
|
+
if (stdout) return { raw: stdout }
|
|
305
|
+
}
|
|
306
|
+
throw new Error(`CLI error (${command}): ${msg}`)
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
function parseCliOutput(raw: string): unknown {
|
|
311
|
+
// Try to extract key-value pairs from table output
|
|
312
|
+
const lines = raw.split('\n').filter(l => l.trim() && !l.match(/^[+─┌┐└┘├┤┬┴┼═╔╗╚╝╠╣╦╩╬\-]+$/))
|
|
313
|
+
const data: Record<string, string> = {}
|
|
314
|
+
for (const line of lines) {
|
|
315
|
+
const match = line.match(/^\|\s*(.+?)\s*\|\s*(.+?)\s*\|$/)
|
|
316
|
+
if (match && !match[1].match(/^-+$/)) {
|
|
317
|
+
data[match[1].trim()] = match[2].trim()
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
return Object.keys(data).length > 0 ? data : { raw }
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
// Parse CLI table with headers (| Col1 | Col2 | ... |) into array of objects
|
|
324
|
+
function parseCliTable(raw: string): Record<string, string>[] {
|
|
325
|
+
const lines = raw.replace(/\r/g, '').split('\n')
|
|
326
|
+
const dataLines = lines.filter(l => l.trim().startsWith('|') && !l.match(/^[|+\-─\s]+$/))
|
|
327
|
+
if (dataLines.length < 2) return [] // need header + at least 1 row
|
|
328
|
+
const splitRow = (line: string) =>
|
|
329
|
+
line.split('|').slice(1, -1).map(c => c.trim().replace(/\.{3}$/, ''))
|
|
330
|
+
const headers = splitRow(dataLines[0]).map(h => h.toLowerCase().replace(/\s+/g, '_'))
|
|
331
|
+
return dataLines.slice(1).map(line => {
|
|
332
|
+
const cells = splitRow(line)
|
|
333
|
+
const obj: Record<string, string> = {}
|
|
334
|
+
headers.forEach((h, i) => { obj[h] = cells[i] ?? '' })
|
|
335
|
+
return obj
|
|
336
|
+
})
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
function h(fn: (req: Request, res: Response) => Promise<void>): RequestHandler {
|
|
340
|
+
return async (req, res, _next) => {
|
|
341
|
+
try { await fn(req, res) } catch (err: unknown) {
|
|
342
|
+
res.status(500).json({ error: err instanceof Error ? err.message : String(err) })
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
function systemRoutes(): Router {
|
|
348
|
+
const r = Router()
|
|
349
|
+
// `system` doesn't exist in ruflo CLI - use `status` and `doctor`
|
|
350
|
+
r.get('/health', h(async (_req, res) => {
|
|
351
|
+
try {
|
|
352
|
+
const { raw } = await execCli('doctor')
|
|
353
|
+
const passed = raw.match(/(\d+) passed/)?.[1] ?? '0'
|
|
354
|
+
const warnings = raw.match(/(\d+) warning/)?.[1] ?? '0'
|
|
355
|
+
const status = Number(warnings) > 3 ? 'degraded' : 'healthy'
|
|
356
|
+
// Parse individual checks from raw output
|
|
357
|
+
// On Windows, UTF-8 check marks (✓/⚠/✗) get mangled by codepage, so we match by structure:
|
|
358
|
+
// Each check line has format: <icon> <Name>: <detail>
|
|
359
|
+
const checks: Array<{ name: string; status: 'pass' | 'warn' | 'fail'; detail: string }> = []
|
|
360
|
+
const knownChecks = [
|
|
361
|
+
'Version Freshness', 'Node.js Version', 'npm Version', 'Claude Code CLI',
|
|
362
|
+
'Git:', 'Git Repository', 'Config File', 'Daemon Status', 'Memory Database',
|
|
363
|
+
'API Keys', 'MCP Servers', 'Disk Space', 'TypeScript', 'agentic-flow',
|
|
364
|
+
]
|
|
365
|
+
for (const line of raw.replace(/\r/g, '').split('\n')) {
|
|
366
|
+
// Match lines containing a known check name followed by a colon and detail
|
|
367
|
+
for (const check of knownChecks) {
|
|
368
|
+
const checkName = check.replace(':', '')
|
|
369
|
+
if (line.includes(checkName + ':')) {
|
|
370
|
+
const colonIdx = line.indexOf(checkName + ':')
|
|
371
|
+
const name = checkName.trim()
|
|
372
|
+
const detail = line.substring(colonIdx + checkName.length + 1).trim()
|
|
373
|
+
// Determine status: lines with warning keywords or known negative patterns
|
|
374
|
+
const isWarn = detail.match(/not (a |running|installed|found)|no (config|api)/i)
|
|
375
|
+
const isFail = detail.match(/fail|error|critical/i)
|
|
376
|
+
checks.push({
|
|
377
|
+
name,
|
|
378
|
+
status: isFail ? 'fail' : isWarn ? 'warn' : 'pass',
|
|
379
|
+
detail,
|
|
380
|
+
})
|
|
381
|
+
break
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
}
|
|
385
|
+
res.json({ status, passed: Number(passed), warnings: Number(warnings), checks, raw })
|
|
386
|
+
} catch {
|
|
387
|
+
res.json({ status: 'unknown', passed: 0, warnings: 0, checks: [] })
|
|
388
|
+
}
|
|
389
|
+
}))
|
|
390
|
+
// Preflight check — validates all dependencies before the app is usable
|
|
391
|
+
r.get('/preflight', h(async (_req, res) => {
|
|
392
|
+
const checks: Array<{ id: string; name: string; status: 'ok' | 'warn' | 'fail'; detail: string; fix?: string }> = []
|
|
393
|
+
|
|
394
|
+
// 1. Node.js version
|
|
395
|
+
const nodeVer = process.version
|
|
396
|
+
const major = parseInt(nodeVer.slice(1), 10)
|
|
397
|
+
checks.push({
|
|
398
|
+
id: 'node',
|
|
399
|
+
name: 'Node.js',
|
|
400
|
+
status: major >= 18 ? 'ok' : 'fail',
|
|
401
|
+
detail: `${nodeVer} detected`,
|
|
402
|
+
fix: major < 18 ? 'Install Node.js >= 18 from https://nodejs.org' : undefined,
|
|
403
|
+
})
|
|
404
|
+
|
|
405
|
+
// 2. npx available
|
|
406
|
+
try {
|
|
407
|
+
await execAsync('npx --version', { timeout: 10_000 })
|
|
408
|
+
checks.push({ id: 'npx', name: 'npx', status: 'ok', detail: 'Available in PATH' })
|
|
409
|
+
} catch {
|
|
410
|
+
checks.push({ id: 'npx', name: 'npx', status: 'fail', detail: 'Not found in PATH', fix: 'Install Node.js (npx is bundled with npm)' })
|
|
411
|
+
}
|
|
412
|
+
|
|
413
|
+
// 3. claude-flow CLI
|
|
414
|
+
try {
|
|
415
|
+
const { raw } = await execCli('--version', [])
|
|
416
|
+
checks.push({ id: 'claude-flow', name: 'claude-flow CLI', status: 'ok', detail: raw.trim().slice(0, 80) || 'Installed' })
|
|
417
|
+
} catch (err) {
|
|
418
|
+
const msg = err instanceof Error ? err.message : String(err)
|
|
419
|
+
checks.push({
|
|
420
|
+
id: 'claude-flow',
|
|
421
|
+
name: 'claude-flow CLI',
|
|
422
|
+
status: 'fail',
|
|
423
|
+
detail: msg.slice(0, 120),
|
|
424
|
+
fix: 'Run: npx -y @claude-flow/cli@latest --version',
|
|
425
|
+
})
|
|
426
|
+
}
|
|
427
|
+
|
|
428
|
+
// 4. Claude Code CLI (claude executable)
|
|
429
|
+
try {
|
|
430
|
+
await execAsync('claude --version', { timeout: 10_000 })
|
|
431
|
+
checks.push({ id: 'claude-cli', name: 'Claude Code CLI', status: 'ok', detail: 'claude command available' })
|
|
432
|
+
} catch {
|
|
433
|
+
const claudePath = process.env.LOCALAPPDATA
|
|
434
|
+
? `${process.env.USERPROFILE}\\.local\\bin\\claude.exe`
|
|
435
|
+
: 'claude'
|
|
436
|
+
const exists = process.env.LOCALAPPDATA ? fs.existsSync(claudePath) : false
|
|
437
|
+
if (exists) {
|
|
438
|
+
checks.push({ id: 'claude-cli', name: 'Claude Code CLI', status: 'warn', detail: `Found at ${claudePath} but not in PATH`, fix: 'Add claude to your system PATH' })
|
|
439
|
+
} else {
|
|
440
|
+
checks.push({ id: 'claude-cli', name: 'Claude Code CLI', status: 'warn', detail: 'Not found (needed for multi-agent pipeline)', fix: 'Install Claude Code: https://docs.anthropic.com/en/docs/claude-code' })
|
|
441
|
+
}
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
// 5. Persistence directory
|
|
445
|
+
try {
|
|
446
|
+
ensurePersistDir()
|
|
447
|
+
const testFile = path.join(PERSIST_DIR, '.write-test')
|
|
448
|
+
fs.writeFileSync(testFile, 'test')
|
|
449
|
+
fs.unlinkSync(testFile)
|
|
450
|
+
checks.push({ id: 'persist-dir', name: 'Persistence (.ruflo/)', status: 'ok', detail: `Writable at ${PERSIST_DIR}` })
|
|
451
|
+
} catch {
|
|
452
|
+
checks.push({ id: 'persist-dir', name: 'Persistence (.ruflo/)', status: 'fail', detail: 'Cannot write to .ruflo/ directory', fix: 'Check file permissions in project directory' })
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
// 6. Port availability (3001 is us, check 3002 for daemon)
|
|
456
|
+
try {
|
|
457
|
+
await execAsync('npx -y @claude-flow/cli@latest status', { timeout: 15_000 })
|
|
458
|
+
checks.push({ id: 'daemon', name: 'claude-flow daemon', status: 'ok', detail: 'Daemon reachable on port 3002' })
|
|
459
|
+
} catch {
|
|
460
|
+
checks.push({ id: 'daemon', name: 'claude-flow daemon', status: 'warn', detail: 'Daemon not running (will start on first use)', fix: 'The daemon starts automatically when needed' })
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
// 7. Environment variables
|
|
464
|
+
const envChecks: string[] = []
|
|
465
|
+
if (!process.env.USERPROFILE && os.platform() === 'win32') envChecks.push('USERPROFILE not set')
|
|
466
|
+
if (!process.env.LOCALAPPDATA && os.platform() === 'win32') envChecks.push('LOCALAPPDATA not set')
|
|
467
|
+
if (envChecks.length === 0) {
|
|
468
|
+
checks.push({ id: 'env', name: 'Environment', status: 'ok', detail: `${os.platform()} / ${os.arch()}` })
|
|
469
|
+
} else {
|
|
470
|
+
checks.push({ id: 'env', name: 'Environment', status: 'warn', detail: envChecks.join(', '), fix: 'Set missing Windows environment variables' })
|
|
471
|
+
}
|
|
472
|
+
|
|
473
|
+
const failed = checks.filter(c => c.status === 'fail').length
|
|
474
|
+
const warned = checks.filter(c => c.status === 'warn').length
|
|
475
|
+
const overall = failed > 0 ? 'fail' : warned > 0 ? 'warn' : 'ok'
|
|
476
|
+
|
|
477
|
+
res.json({ status: overall, checks, failed, warned, passed: checks.length - failed - warned })
|
|
478
|
+
}))
|
|
479
|
+
|
|
480
|
+
r.get('/info', h(async (_req, res) => {
|
|
481
|
+
res.json({
|
|
482
|
+
platform: os.platform(), arch: os.arch(), nodeVersion: process.version,
|
|
483
|
+
cpus: os.cpus().length, totalMemory: `${Math.round(os.totalmem() / 1024 / 1024)} MB`,
|
|
484
|
+
freeMemory: `${Math.round(os.freemem() / 1024 / 1024)} MB`,
|
|
485
|
+
uptime: `${Math.round(os.uptime() / 60)} min`,
|
|
486
|
+
})
|
|
487
|
+
}))
|
|
488
|
+
r.get('/metrics', h(async (_req, res) => {
|
|
489
|
+
const mem = process.memoryUsage()
|
|
490
|
+
res.json({
|
|
491
|
+
heapUsed: `${Math.round(mem.heapUsed / 1024 / 1024)} MB`,
|
|
492
|
+
heapTotal: `${Math.round(mem.heapTotal / 1024 / 1024)} MB`,
|
|
493
|
+
rss: `${Math.round(mem.rss / 1024 / 1024)} MB`,
|
|
494
|
+
cpuUsage: os.loadavg()[0],
|
|
495
|
+
systemMemoryUsage: Math.round((1 - os.freemem() / os.totalmem()) * 100),
|
|
496
|
+
})
|
|
497
|
+
}))
|
|
498
|
+
r.get('/status', h(async (_req, res) => {
|
|
499
|
+
try {
|
|
500
|
+
const { raw } = await execCli('status')
|
|
501
|
+
res.json({ raw, ...parseCliOutput(raw) as object })
|
|
502
|
+
} catch (err) {
|
|
503
|
+
res.json({ status: 'stopped', error: (err as Error).message })
|
|
504
|
+
}
|
|
505
|
+
}))
|
|
506
|
+
r.post('/reset', h(async (_req, res) => {
|
|
507
|
+
res.json({ message: 'System reset requested' })
|
|
508
|
+
}))
|
|
509
|
+
return r
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
// Track last swarm config for status endpoint
|
|
513
|
+
let lastSwarmId = ''
|
|
514
|
+
let lastSwarmTopology = 'hierarchical'
|
|
515
|
+
let lastSwarmStrategy = 'specialized'
|
|
516
|
+
let lastSwarmMaxAgents = 8
|
|
517
|
+
let lastSwarmCreatedAt = ''
|
|
518
|
+
let swarmShutdown = true
|
|
519
|
+
let daemonStarted = false
|
|
520
|
+
|
|
521
|
+
// In-memory workflow store
|
|
522
|
+
interface WorkflowStep {
|
|
523
|
+
id: string; name: string; status: string; agent?: string; detail?: string
|
|
524
|
+
}
|
|
525
|
+
interface WorkflowRecord {
|
|
526
|
+
id: string; name: string; template: string; status: string
|
|
527
|
+
taskId?: string; createdAt: string; completedAt?: string; result?: string
|
|
528
|
+
steps: WorkflowStep[]
|
|
529
|
+
}
|
|
530
|
+
const workflowStore: Map<string, WorkflowRecord> = new Map()
|
|
531
|
+
|
|
532
|
+
async function ensureDaemon(): Promise<void> {
|
|
533
|
+
if (daemonStarted) return
|
|
534
|
+
try {
|
|
535
|
+
// Init claude-flow if not already done
|
|
536
|
+
try { await execCli('init', []) } catch (e) {
|
|
537
|
+
console.log('[daemon] init skipped (may already exist):', e instanceof Error ? e.message : String(e))
|
|
538
|
+
}
|
|
539
|
+
// Start daemon on port 3002 (3001 is our API)
|
|
540
|
+
const daemonPort = String(Number(process.env.DAEMON_PORT) || 3002)
|
|
541
|
+
await execCli('start', ['--daemon', '--port', daemonPort, '--skip-mcp'])
|
|
542
|
+
daemonStarted = true
|
|
543
|
+
} catch (err) {
|
|
544
|
+
const msg = err instanceof Error ? err.message : String(err)
|
|
545
|
+
// Check if daemon is actually running by querying status
|
|
546
|
+
try {
|
|
547
|
+
await execCli('status', [])
|
|
548
|
+
daemonStarted = true // daemon was already running
|
|
549
|
+
console.log('[daemon] Already running (confirmed via status)')
|
|
550
|
+
} catch {
|
|
551
|
+
console.warn('[daemon] Failed to start and status check failed:', msg)
|
|
552
|
+
// Don't set daemonStarted=true — will retry on next call
|
|
553
|
+
}
|
|
554
|
+
}
|
|
555
|
+
}
|
|
556
|
+
|
|
557
|
+
async function pollWorkflowStatus(workflowId: string, taskId: string, maxWait = 120000): Promise<void> {
|
|
558
|
+
const task = taskStore.get(taskId)
|
|
559
|
+
if (!task) return
|
|
560
|
+
const start = Date.now()
|
|
561
|
+
const poll = async () => {
|
|
562
|
+
if (Date.now() - start > maxWait) {
|
|
563
|
+
task.status = 'failed'
|
|
564
|
+
task.result = 'Workflow timed out after ' + (maxWait / 1000) + 's'
|
|
565
|
+
broadcast('task:updated', { ...task, id: taskId })
|
|
566
|
+
return
|
|
567
|
+
}
|
|
568
|
+
try {
|
|
569
|
+
const { raw } = await execCli('workflow', ['status', workflowId])
|
|
570
|
+
const wf = workflowStore.get(workflowId)
|
|
571
|
+
const statusMatch = raw.match(/Status:\s*(\w+)/)
|
|
572
|
+
const currentStatus = statusMatch?.[1] || 'unknown'
|
|
573
|
+
if (wf) { wf.status = currentStatus; wf.result = raw.slice(0, 500) }
|
|
574
|
+
if (currentStatus === 'completed' || currentStatus === 'done') {
|
|
575
|
+
task.status = 'completed'
|
|
576
|
+
task.completedAt = new Date().toISOString()
|
|
577
|
+
task.result = raw.slice(0, 500) || 'Workflow completed'
|
|
578
|
+
if (wf) { wf.status = 'completed'; wf.completedAt = task.completedAt }
|
|
579
|
+
broadcast('task:updated', { ...task, id: taskId })
|
|
580
|
+
broadcast('workflow:updated', wf)
|
|
581
|
+
} else if (currentStatus === 'failed' || currentStatus === 'error') {
|
|
582
|
+
task.status = 'failed'
|
|
583
|
+
task.result = raw.slice(0, 500) || 'Workflow failed'
|
|
584
|
+
if (wf) wf.status = 'failed'
|
|
585
|
+
broadcast('task:updated', { ...task, id: taskId })
|
|
586
|
+
} else {
|
|
587
|
+
// Still running, poll again in 3s
|
|
588
|
+
setTimeout(poll, 3000)
|
|
589
|
+
}
|
|
590
|
+
} catch { setTimeout(poll, 3000) }
|
|
591
|
+
}
|
|
592
|
+
setTimeout(poll, 2000) // initial delay
|
|
593
|
+
}
|
|
594
|
+
|
|
595
|
+
// Running Claude Code processes (so we can cancel)
|
|
596
|
+
const runningProcesses: Map<string, ReturnType<typeof spawn>> = new Map()
|
|
597
|
+
// Track last output time per process for zombie detection
|
|
598
|
+
const processLastActivity: Map<string, number> = new Map()
|
|
599
|
+
|
|
600
|
+
function trackProcessActivity(key: string) {
|
|
601
|
+
processLastActivity.set(key, Date.now())
|
|
602
|
+
}
|
|
603
|
+
|
|
604
|
+
function cleanupProcess(key: string) {
|
|
605
|
+
runningProcesses.delete(key)
|
|
606
|
+
processLastActivity.delete(key)
|
|
607
|
+
}
|
|
608
|
+
|
|
609
|
+
// Zombie reaper — kills processes with no output for ZOMBIE_TIMEOUT
|
|
610
|
+
function startZombieReaper() {
|
|
611
|
+
setInterval(() => {
|
|
612
|
+
const now = Date.now()
|
|
613
|
+
for (const [key, lastTime] of processLastActivity.entries()) {
|
|
614
|
+
if (now - lastTime > ZOMBIE_TIMEOUT) {
|
|
615
|
+
const proc = runningProcesses.get(key)
|
|
616
|
+
if (proc && !proc.killed) {
|
|
617
|
+
console.warn(`[zombie] Killing stale process ${key} (no output for ${Math.round(ZOMBIE_TIMEOUT / 1000)}s)`)
|
|
618
|
+
proc.kill('SIGTERM')
|
|
619
|
+
// Force kill after 5s if still alive
|
|
620
|
+
setTimeout(() => { if (!proc.killed) proc.kill('SIGKILL') }, 5000)
|
|
621
|
+
}
|
|
622
|
+
processLastActivity.delete(key)
|
|
623
|
+
cleanupProcess(key)
|
|
624
|
+
}
|
|
625
|
+
}
|
|
626
|
+
}, 60_000) // check every 60s
|
|
627
|
+
}
|
|
628
|
+
|
|
629
|
+
function buildSwarmPrompt(task: TaskRecord, taskId: string): string {
|
|
630
|
+
// Collect active agents from registry
|
|
631
|
+
const activeAgents = Array.from(agentRegistry.entries())
|
|
632
|
+
.filter(([key]) => !terminatedAgents.has(key))
|
|
633
|
+
.map(([, reg]) => reg)
|
|
634
|
+
|
|
635
|
+
// If no swarm is active, give a minimal prompt
|
|
636
|
+
if (swarmShutdown || activeAgents.length === 0) {
|
|
637
|
+
return [
|
|
638
|
+
'You have access to the Agent tool for spawning subagents.',
|
|
639
|
+
'Use subagent_type to assign specialized roles: coder, researcher, tester, reviewer, architect.',
|
|
640
|
+
'Break the task into subtasks and delegate to parallel agents when possible.',
|
|
641
|
+
].join(' ')
|
|
642
|
+
}
|
|
643
|
+
|
|
644
|
+
// Build agent roster with roles
|
|
645
|
+
const agentRoster = activeAgents.map(a => `- ${a.name} (type: ${a.type}, id: ${a.id})`).join('\n')
|
|
646
|
+
|
|
647
|
+
// Map agent types to subagent_type values for the Agent tool
|
|
648
|
+
const typeMap: Record<string, string> = {
|
|
649
|
+
coordinator: 'general-purpose',
|
|
650
|
+
coder: 'coder',
|
|
651
|
+
researcher: 'researcher',
|
|
652
|
+
tester: 'tester',
|
|
653
|
+
reviewer: 'reviewer',
|
|
654
|
+
analyst: 'analyst',
|
|
655
|
+
architect: 'architecture',
|
|
656
|
+
'security-architect': 'security-architect',
|
|
657
|
+
'performance-engineer': 'performance-engineer',
|
|
658
|
+
optimizer: 'performance-optimizer',
|
|
659
|
+
}
|
|
660
|
+
|
|
661
|
+
// Determine unique roles available
|
|
662
|
+
const availableTypes = [...new Set(activeAgents.map(a => a.type))]
|
|
663
|
+
const subagentTypes = availableTypes
|
|
664
|
+
.map(t => `"${typeMap[t] || t}"`)
|
|
665
|
+
.join(', ')
|
|
666
|
+
|
|
667
|
+
// Build role descriptions
|
|
668
|
+
const roleDescriptions: Record<string, string> = {
|
|
669
|
+
coordinator: 'orchestrates the workflow, breaks tasks into subtasks, delegates to specialists',
|
|
670
|
+
coder: 'writes implementation code, creates/edits files, runs build commands',
|
|
671
|
+
researcher: 'explores the codebase, searches for patterns, gathers context before implementation',
|
|
672
|
+
tester: 'writes tests, runs test suites, validates that implementations work correctly',
|
|
673
|
+
reviewer: 'reviews code quality, checks for bugs, security issues, and best practices',
|
|
674
|
+
analyst: 'analyzes requirements, defines architecture, produces technical specifications',
|
|
675
|
+
architect: 'designs system architecture, defines patterns and interfaces',
|
|
676
|
+
}
|
|
677
|
+
|
|
678
|
+
const rolesList = availableTypes
|
|
679
|
+
.map(t => `- ${t}: ${roleDescriptions[t] || 'specialist agent'}`)
|
|
680
|
+
.join('\n')
|
|
681
|
+
|
|
682
|
+
// Build the topology description
|
|
683
|
+
const isHierarchical = lastSwarmTopology.includes('hierarchical')
|
|
684
|
+
const coordinator = activeAgents.find(a => a.type === 'coordinator')
|
|
685
|
+
const workers = activeAgents.filter(a => a.type !== 'coordinator')
|
|
686
|
+
|
|
687
|
+
let topologyInstructions: string
|
|
688
|
+
if (isHierarchical && coordinator) {
|
|
689
|
+
const workerNames = workers.map(a => `${a.name}(${typeMap[a.type] || a.type})`).join(', ')
|
|
690
|
+
topologyInstructions = [
|
|
691
|
+
`You are the COORDINATOR of a ${lastSwarmTopology} swarm with ${activeAgents.length} agents.`,
|
|
692
|
+
`Your role is to ORCHESTRATE, not to implement directly.`,
|
|
693
|
+
'',
|
|
694
|
+
'MANDATORY WORKFLOW:',
|
|
695
|
+
'1. Analyze the task and break it into subtasks',
|
|
696
|
+
'2. For EACH subtask, spawn a subagent using the Agent tool with the appropriate subagent_type',
|
|
697
|
+
'3. Run independent subtasks in PARALLEL (multiple Agent calls in one response)',
|
|
698
|
+
'4. Wait for results, then synthesize or delegate follow-up work',
|
|
699
|
+
'5. Only write code yourself if no specialist agent fits the need',
|
|
700
|
+
'',
|
|
701
|
+
`Available worker agents: ${workerNames}`,
|
|
702
|
+
'',
|
|
703
|
+
'SUBAGENT DISPATCH RULES:',
|
|
704
|
+
`- For code implementation: use subagent_type="${typeMap.coder || 'coder'}"`,
|
|
705
|
+
`- For research/exploration: use subagent_type="${typeMap.researcher || 'researcher'}"`,
|
|
706
|
+
`- For testing/validation: use subagent_type="${typeMap.tester || 'tester'}"`,
|
|
707
|
+
`- For code review: use subagent_type="${typeMap.reviewer || 'reviewer'}"`,
|
|
708
|
+
`- For analysis/specs: use subagent_type="${typeMap.analyst || 'analyst'}"`,
|
|
709
|
+
'',
|
|
710
|
+
'IMPORTANT: Do NOT do all the work yourself. You MUST delegate to subagents.',
|
|
711
|
+
'Each Agent call should include a clear, self-contained prompt with all context the subagent needs.',
|
|
712
|
+
'Maximize parallelism: if two subtasks are independent, dispatch both in the same response.',
|
|
713
|
+
].join('\n')
|
|
714
|
+
} else {
|
|
715
|
+
topologyInstructions = [
|
|
716
|
+
`You are operating in a ${lastSwarmTopology} swarm with ${activeAgents.length} agents.`,
|
|
717
|
+
'Use the Agent tool to delegate subtasks to specialized subagents.',
|
|
718
|
+
'Break the work into parallel subtasks and dispatch them simultaneously when possible.',
|
|
719
|
+
'',
|
|
720
|
+
'Available subagent_type values: ' + subagentTypes,
|
|
721
|
+
'',
|
|
722
|
+
'IMPORTANT: Delegate work to subagents rather than doing everything yourself.',
|
|
723
|
+
'Each subagent should receive a focused, self-contained task with full context.',
|
|
724
|
+
].join('\n')
|
|
725
|
+
}
|
|
726
|
+
|
|
727
|
+
// Assigned agent context
|
|
728
|
+
const assignedAgent = task.assignedTo
|
|
729
|
+
? activeAgents.find(a => a.id === task.assignedTo || a.name === task.assignedTo)
|
|
730
|
+
: null
|
|
731
|
+
const assignmentNote = assignedAgent
|
|
732
|
+
? `\nThis task was assigned to ${assignedAgent.name} (${assignedAgent.type}). Act in that role.`
|
|
733
|
+
: ''
|
|
734
|
+
|
|
735
|
+
return [
|
|
736
|
+
topologyInstructions,
|
|
737
|
+
assignmentNote,
|
|
738
|
+
'',
|
|
739
|
+
'SWARM ROSTER:',
|
|
740
|
+
agentRoster,
|
|
741
|
+
'',
|
|
742
|
+
'AGENT ROLES:',
|
|
743
|
+
rolesList,
|
|
744
|
+
'',
|
|
745
|
+
`Swarm ID: ${lastSwarmId}, Topology: ${lastSwarmTopology}, Strategy: ${lastSwarmStrategy}`,
|
|
746
|
+
].join('\n')
|
|
747
|
+
}
|
|
748
|
+
|
|
749
|
+
async function launchWorkflowForTask(taskId: string, title: string, description: string): Promise<void> {
|
|
750
|
+
const task = taskStore.get(taskId)
|
|
751
|
+
if (!task) return
|
|
752
|
+
const taskDesc = `${title}${description ? ': ' + description : ''}`
|
|
753
|
+
const workflowId = `workflow-${Date.now()}-${Math.random().toString(36).slice(2, 6)}`
|
|
754
|
+
|
|
755
|
+
// Create workflow record
|
|
756
|
+
const wf: WorkflowRecord = {
|
|
757
|
+
id: workflowId, name: title, template: 'development',
|
|
758
|
+
status: 'running', taskId, createdAt: new Date().toISOString(),
|
|
759
|
+
steps: [],
|
|
760
|
+
}
|
|
761
|
+
workflowStore.set(workflowId, wf)
|
|
762
|
+
broadcast('workflow:added', wf)
|
|
763
|
+
|
|
764
|
+
// If swarm is active with agents, use the multi-agent pipeline
|
|
765
|
+
const activeAgents = getActiveSwarmAgents()
|
|
766
|
+
if (!swarmShutdown && activeAgents.length > 0) {
|
|
767
|
+
launchSwarmPipeline(taskId, task, taskDesc, title, wf, workflowId, activeAgents)
|
|
768
|
+
} else {
|
|
769
|
+
// Fallback: single claude -p
|
|
770
|
+
launchViaClaude(taskId, task, taskDesc, title, wf, workflowId)
|
|
771
|
+
}
|
|
772
|
+
}
|
|
773
|
+
|
|
774
|
+
// Get active agents from registry, excluding terminated
|
|
775
|
+
function getActiveSwarmAgents(): Array<{ id: string; name: string; type: string }> {
|
|
776
|
+
return Array.from(agentRegistry.entries())
|
|
777
|
+
.filter(([key]) => !terminatedAgents.has(key))
|
|
778
|
+
.map(([, reg]) => reg)
|
|
779
|
+
}
|
|
780
|
+
|
|
781
|
+
// ── MULTI-AGENT PIPELINE ─────────────────────────────────────────────
|
|
782
|
+
// Phase 1: Coordinator plans subtasks (claude -p with planner prompt)
|
|
783
|
+
// Phase 2: Each subtask dispatched to the matching agent (parallel claude -p)
|
|
784
|
+
// Phase 3: Reviewer validates results
|
|
785
|
+
async function launchSwarmPipeline(
|
|
786
|
+
taskId: string, task: TaskRecord, taskDesc: string, title: string,
|
|
787
|
+
wf: WorkflowRecord, workflowId: string,
|
|
788
|
+
agents: Array<{ id: string; name: string; type: string }>,
|
|
789
|
+
): Promise<void> {
|
|
790
|
+
const coordinator = agents.find(a => a.type === 'coordinator')
|
|
791
|
+
const workers = agents.filter(a => a.type !== 'coordinator')
|
|
792
|
+
const cleanEnv = { ...process.env }
|
|
793
|
+
// Remove ALL Claude env vars that prevent nested sessions
|
|
794
|
+
for (const key of Object.keys(cleanEnv)) {
|
|
795
|
+
if (key.startsWith('CLAUDE') || key.startsWith('claude')) delete cleanEnv[key]
|
|
796
|
+
}
|
|
797
|
+
const claudePath = process.env.LOCALAPPDATA
|
|
798
|
+
? `${process.env.USERPROFILE}\\.local\\bin\\claude.exe`
|
|
799
|
+
: 'claude'
|
|
800
|
+
const mcpConfigPath = path.join(process.cwd(), '.mcp.json')
|
|
801
|
+
const mcpArgs = fs.existsSync(mcpConfigPath) ? ['--mcp-config', mcpConfigPath] : []
|
|
802
|
+
|
|
803
|
+
broadcast('task:log', { id: taskId, message: `Starting multi-agent pipeline for: ${taskDesc}` })
|
|
804
|
+
|
|
805
|
+
// Helper: run claude -p and return the result text
|
|
806
|
+
// planOnly=true: no tools, single turn — for coordinator planning phase
|
|
807
|
+
function runClaude(prompt: string, systemPrompt: string, agentId?: string, planOnly = false): Promise<string> {
|
|
808
|
+
return new Promise((resolve, reject) => {
|
|
809
|
+
if (agentId) {
|
|
810
|
+
updateAgentActivity(agentId, { status: 'working', currentTask: taskId, currentAction: planOnly ? 'Planning...' : prompt.slice(0, 60) })
|
|
811
|
+
}
|
|
812
|
+
const args = ['-p', prompt, '--output-format', 'stream-json', '--verbose']
|
|
813
|
+
if (planOnly) {
|
|
814
|
+
// Restricted mode: no tools, single response — forces pure text output
|
|
815
|
+
args.push('--max-turns', '1')
|
|
816
|
+
args.push('--append-system-prompt', systemPrompt)
|
|
817
|
+
} else {
|
|
818
|
+
// Full mode: tools + MCP for actual work
|
|
819
|
+
if (SKIP_PERMISSIONS) args.push('--dangerously-skip-permissions')
|
|
820
|
+
args.push(...mcpArgs)
|
|
821
|
+
args.push('--append-system-prompt', systemPrompt)
|
|
822
|
+
}
|
|
823
|
+
const proc = spawn(claudePath, args, { cwd: task.cwd || process.cwd(), env: cleanEnv, stdio: ['ignore', 'pipe', 'pipe'], windowsHide: true })
|
|
824
|
+
|
|
825
|
+
runningProcesses.set(`${taskId}-${agentId || 'main'}`, proc)
|
|
826
|
+
trackProcessActivity(`${taskId}-${agentId || 'main'}`)
|
|
827
|
+
let fullOutput = ''
|
|
828
|
+
let resultText = ''
|
|
829
|
+
|
|
830
|
+
proc.stdout?.on('data', (chunk: Buffer) => {
|
|
831
|
+
trackProcessActivity(`${taskId}-${agentId || 'main'}`)
|
|
832
|
+
for (const line of chunk.toString().split('\n').filter(Boolean)) {
|
|
833
|
+
try {
|
|
834
|
+
const evt = JSON.parse(line)
|
|
835
|
+
if (evt.type === 'assistant' && evt.message?.content) {
|
|
836
|
+
for (const block of evt.message.content) {
|
|
837
|
+
if (block.type === 'text') {
|
|
838
|
+
fullOutput += block.text
|
|
839
|
+
if (agentId) appendAgentOutput(agentId, block.text)
|
|
840
|
+
broadcast('task:output', { id: taskId, workflowId, type: 'text', agentId, content: block.text.slice(0, 300) })
|
|
841
|
+
} else if (block.type === 'tool_use') {
|
|
842
|
+
const summary = block.input?.file_path || block.input?.command?.slice(0, 60) || block.input?.pattern || ''
|
|
843
|
+
const toolLine = `[Tool] ${block.name}${summary ? ': ' + summary : ''}`
|
|
844
|
+
if (agentId) {
|
|
845
|
+
appendAgentOutput(agentId, toolLine)
|
|
846
|
+
updateAgentActivity(agentId, { status: 'working', currentTask: taskId, currentAction: `${block.name}: ${summary.slice(0, 60)}` })
|
|
847
|
+
}
|
|
848
|
+
const stepId = `step-${wf.steps.length + 1}`
|
|
849
|
+
wf.steps.push({ id: stepId, name: block.name, status: 'running', agent: agentId || 'claude', detail: summary })
|
|
850
|
+
broadcast('workflow:updated', wf)
|
|
851
|
+
} else if (block.type === 'tool_result') {
|
|
852
|
+
const resultLine = typeof block.content === 'string' ? block.content.slice(0, 200) : JSON.stringify(block.content).slice(0, 200)
|
|
853
|
+
if (agentId) appendAgentOutput(agentId, `[Result] ${resultLine}`)
|
|
854
|
+
}
|
|
855
|
+
}
|
|
856
|
+
} else if (evt.type === 'tool_result' || (evt.type === 'user' && evt.message?.content)) {
|
|
857
|
+
const lastRunning = [...wf.steps].reverse().find(s => s.status === 'running')
|
|
858
|
+
if (lastRunning) { lastRunning.status = 'completed'; broadcast('workflow:updated', wf) }
|
|
859
|
+
} else if (evt.type === 'result') {
|
|
860
|
+
resultText = evt.result || ''
|
|
861
|
+
if (agentId) appendAgentOutput(agentId, `[Done] ${(resultText || 'completed').slice(0, 200)}`)
|
|
862
|
+
wf.steps.forEach(s => { if (s.status === 'running') s.status = 'completed' })
|
|
863
|
+
}
|
|
864
|
+
} catch {
|
|
865
|
+
fullOutput += line + '\n'
|
|
866
|
+
}
|
|
867
|
+
}
|
|
868
|
+
})
|
|
869
|
+
|
|
870
|
+
let stderrBuf = ''
|
|
871
|
+
proc.stderr?.on('data', (chunk: Buffer) => {
|
|
872
|
+
const text = chunk.toString().trim()
|
|
873
|
+
stderrBuf += text + '\n'
|
|
874
|
+
if (agentId && text) appendAgentOutput(agentId, `[stderr] ${text.slice(0, 200)}`)
|
|
875
|
+
broadcast('task:output', { id: taskId, workflowId, type: 'stderr', agentId, content: text.slice(0, 300) })
|
|
876
|
+
})
|
|
877
|
+
|
|
878
|
+
proc.on('close', (code) => {
|
|
879
|
+
cleanupProcess(`${taskId}-${agentId || 'main'}`)
|
|
880
|
+
if (agentId) {
|
|
881
|
+
const act = agentActivity.get(agentId)
|
|
882
|
+
updateAgentActivity(agentId, {
|
|
883
|
+
status: 'idle', currentTask: undefined, currentAction: undefined,
|
|
884
|
+
tasksCompleted: (act?.tasksCompleted || 0) + (code === 0 ? 1 : 0),
|
|
885
|
+
errors: (act?.errors || 0) + (code !== 0 ? 1 : 0),
|
|
886
|
+
})
|
|
887
|
+
}
|
|
888
|
+
if (code === 0) resolve(resultText || fullOutput)
|
|
889
|
+
else {
|
|
890
|
+
const errDetail = (stderrBuf + '\n' + fullOutput).trim().slice(0, 1000) || `Exit code ${code}`
|
|
891
|
+
console.error(`[runClaude ${agentId}] Failed (code ${code}): ${errDetail.slice(0, 200)}`)
|
|
892
|
+
reject(new Error(errDetail))
|
|
893
|
+
}
|
|
894
|
+
})
|
|
895
|
+
proc.on('error', (err) => {
|
|
896
|
+
cleanupProcess(`${taskId}-${agentId || 'main'}`)
|
|
897
|
+
reject(err)
|
|
898
|
+
})
|
|
899
|
+
})
|
|
900
|
+
}
|
|
901
|
+
|
|
902
|
+
try {
|
|
903
|
+
// ── PHASE 1: Coordinator plans subtasks ──
|
|
904
|
+
const workerTypes = [...new Set(workers.map(w => w.type))]
|
|
905
|
+
const coordinatorId = coordinator?.id
|
|
906
|
+
if (coordinatorId) {
|
|
907
|
+
updateAgentActivity(coordinatorId, { status: 'working', currentTask: taskId, currentAction: 'Planning subtasks...' })
|
|
908
|
+
}
|
|
909
|
+
wf.steps.push({ id: 'step-plan', name: 'Plan', status: 'running', agent: coordinator?.name || 'coordinator', detail: 'Breaking task into subtasks' })
|
|
910
|
+
broadcast('workflow:updated', wf)
|
|
911
|
+
broadcast('task:output', { id: taskId, workflowId, type: 'text', content: '[Phase 1] Coordinator planning subtasks...' })
|
|
912
|
+
|
|
913
|
+
const roleInstructions: Record<string, string> = {
|
|
914
|
+
researcher: 'RESEARCH phase: explore the codebase, find relevant files, understand existing patterns and dependencies',
|
|
915
|
+
coder: 'IMPLEMENTATION phase: write/edit code, create files, run build commands',
|
|
916
|
+
tester: 'TESTING phase: write unit/integration tests, run the test suite, verify the implementation works',
|
|
917
|
+
reviewer: 'REVIEW phase: review the code changes for quality, bugs, security issues, and adherence to project conventions',
|
|
918
|
+
analyst: 'ANALYSIS phase: analyze requirements, define technical specifications',
|
|
919
|
+
architect: 'ARCHITECTURE phase: design the solution structure, define interfaces and patterns',
|
|
920
|
+
}
|
|
921
|
+
|
|
922
|
+
const planPrompt = [
|
|
923
|
+
`You are a task coordinator managing a development team. Your job is to break tasks into subtasks and assign them to the RIGHT specialist.`,
|
|
924
|
+
'',
|
|
925
|
+
`YOUR TEAM (you MUST use ALL relevant roles):`,
|
|
926
|
+
...workerTypes.map(t => `- ${t}: ${roleInstructions[t] || 'specialist agent'}`),
|
|
927
|
+
'',
|
|
928
|
+
`TASK: ${taskDesc}`,
|
|
929
|
+
'',
|
|
930
|
+
`RULES:`,
|
|
931
|
+
`1. You MUST use MULTIPLE agent types — do NOT assign everything to a single agent`,
|
|
932
|
+
`2. If the task involves modifying existing code, START with a "researcher" subtask to explore the codebase`,
|
|
933
|
+
`3. After implementation by "coder", ALWAYS add a "tester" or "reviewer" subtask to validate`,
|
|
934
|
+
`4. Each subtask must be self-contained with enough context for the agent to work independently`,
|
|
935
|
+
`5. Use depends_on to chain tasks that need results from previous steps`,
|
|
936
|
+
`6. Keep it practical: 3-5 subtasks for complex tasks, 2-3 for simple ones`,
|
|
937
|
+
'',
|
|
938
|
+
`Respond ONLY with a JSON array. Each subtask has:`,
|
|
939
|
+
`- "agent": one of [${workerTypes.map(t => `"${t}"`).join(', ')}]`,
|
|
940
|
+
`- "task": a detailed, self-contained description`,
|
|
941
|
+
`- "depends_on": array of indices (0-based) of prerequisite subtasks, or [] for parallel`,
|
|
942
|
+
'',
|
|
943
|
+
'Example for a code change task:',
|
|
944
|
+
'[',
|
|
945
|
+
' {"agent":"researcher","task":"Find all files related to X, understand the current implementation patterns and dependencies","depends_on":[]},',
|
|
946
|
+
' {"agent":"coder","task":"Implement Y based on the research findings. Modify files A, B, C as needed","depends_on":[0]},',
|
|
947
|
+
' {"agent":"tester","task":"Write tests for the new Y feature and run the test suite to verify everything passes","depends_on":[1]},',
|
|
948
|
+
' {"agent":"reviewer","task":"Review all code changes for quality, check for bugs, security issues, and ensure project conventions are followed","depends_on":[1]}',
|
|
949
|
+
']',
|
|
950
|
+
].join('\n')
|
|
951
|
+
|
|
952
|
+
const planResult = await runClaude(planPrompt, 'You are a task planner. Output ONLY a valid JSON array. No markdown fences, no explanation, no tool use. Just the JSON.', coordinatorId, true)
|
|
953
|
+
|
|
954
|
+
// Parse the plan
|
|
955
|
+
const jsonMatch = planResult.match(/\[[\s\S]*\]/)
|
|
956
|
+
let subtasks: Array<{ agent: string; task: string; depends_on: number[] }> = []
|
|
957
|
+
if (jsonMatch) {
|
|
958
|
+
try { subtasks = JSON.parse(jsonMatch[0]) } catch (e) {
|
|
959
|
+
console.warn('[pipeline] Failed to parse subtask plan JSON:', e instanceof Error ? e.message : String(e))
|
|
960
|
+
}
|
|
961
|
+
}
|
|
962
|
+
|
|
963
|
+
const planStep = wf.steps.find(s => s.id === 'step-plan')
|
|
964
|
+
if (planStep) planStep.status = 'completed'
|
|
965
|
+
broadcast('workflow:updated', wf)
|
|
966
|
+
|
|
967
|
+
if (subtasks.length === 0) {
|
|
968
|
+
// Fallback: if coordinator couldn't plan, just run the whole task with a coder
|
|
969
|
+
broadcast('task:output', { id: taskId, workflowId, type: 'text', content: '[Fallback] Could not parse plan, running with single coder agent' })
|
|
970
|
+
const coder = workers.find(w => w.type === 'coder') || workers[0]
|
|
971
|
+
if (coder) {
|
|
972
|
+
wf.steps.push({ id: 'step-exec', name: 'Execute', status: 'running', agent: coder.name, detail: taskDesc.slice(0, 80) })
|
|
973
|
+
broadcast('workflow:updated', wf)
|
|
974
|
+
const result = await runClaude(taskDesc, `You are a ${coder.type} agent. Complete this task thoroughly.`, coder.id)
|
|
975
|
+
const execStep = wf.steps.find(s => s.id === 'step-exec')
|
|
976
|
+
if (execStep) execStep.status = 'completed'
|
|
977
|
+
task.result = result.slice(0, 2000) || 'Completed'
|
|
978
|
+
}
|
|
979
|
+
} else {
|
|
980
|
+
// ── PHASE 2: Execute subtasks respecting dependencies ──
|
|
981
|
+
broadcast('task:output', { id: taskId, workflowId, type: 'text', content: `[Phase 2] Executing ${subtasks.length} subtasks across agents...` })
|
|
982
|
+
const results: string[] = new Array(subtasks.length).fill('')
|
|
983
|
+
const completed = new Set<number>()
|
|
984
|
+
|
|
985
|
+
// Execute in waves: each wave runs all subtasks whose dependencies are met
|
|
986
|
+
while (completed.size < subtasks.length) {
|
|
987
|
+
const ready = subtasks.map((st, i) => ({ ...st, idx: i }))
|
|
988
|
+
.filter(st => !completed.has(st.idx) && st.depends_on.every(d => completed.has(d)))
|
|
989
|
+
|
|
990
|
+
if (ready.length === 0) {
|
|
991
|
+
broadcast('task:output', { id: taskId, workflowId, type: 'text', content: '[Error] Circular dependency detected, aborting remaining subtasks' })
|
|
992
|
+
break
|
|
993
|
+
}
|
|
994
|
+
|
|
995
|
+
// Run ready subtasks in parallel
|
|
996
|
+
const wave = ready.map(async (st) => {
|
|
997
|
+
const agent = workers.find(w => w.type === st.agent) || workers[0]
|
|
998
|
+
if (!agent) return
|
|
999
|
+
|
|
1000
|
+
const stepId = `step-${st.idx + 1}`
|
|
1001
|
+
wf.steps.push({ id: stepId, name: `${st.agent}: ${st.task.slice(0, 40)}`, status: 'running', agent: agent.name, detail: st.task.slice(0, 80) })
|
|
1002
|
+
broadcast('workflow:updated', wf)
|
|
1003
|
+
broadcast('task:output', { id: taskId, workflowId, type: 'text', content: ` [${agent.name}] ${st.task.slice(0, 100)}` })
|
|
1004
|
+
|
|
1005
|
+
// Build context from dependencies
|
|
1006
|
+
const depContext = st.depends_on.length > 0
|
|
1007
|
+
? '\n\nPrevious results:\n' + st.depends_on.map(d => `[${subtasks[d].agent}]: ${results[d].slice(0, 500)}`).join('\n')
|
|
1008
|
+
: ''
|
|
1009
|
+
|
|
1010
|
+
const roleSystemPrompts: Record<string, string> = {
|
|
1011
|
+
researcher: 'You are a researcher agent. Your job is to explore the codebase, find relevant files, read code, and report your findings clearly. Use Read, Grep, Glob tools. Do NOT modify any files.',
|
|
1012
|
+
coder: 'You are a coder agent. Your job is to implement code changes. Write clean, correct code. Use Edit/Write tools. Follow existing project conventions.',
|
|
1013
|
+
tester: 'You are a tester agent. Write comprehensive tests and run them. Verify that implementations work correctly. Report test results clearly.',
|
|
1014
|
+
reviewer: 'You are a code reviewer agent. Review the code changes for bugs, security issues, style problems, and adherence to best practices. Report issues found.',
|
|
1015
|
+
analyst: 'You are an analyst agent. Analyze requirements and produce clear technical specifications.',
|
|
1016
|
+
architect: 'You are an architect agent. Design system architecture, define patterns, interfaces and data flow.',
|
|
1017
|
+
}
|
|
1018
|
+
const agentPrompt = `Complete this task:\n\n${st.task}${depContext}`
|
|
1019
|
+
const sysPrompt = roleSystemPrompts[st.agent] || `You are a ${st.agent} agent in a development swarm. Do your assigned work precisely. Do not ask questions, just execute.`
|
|
1020
|
+
|
|
1021
|
+
try {
|
|
1022
|
+
results[st.idx] = await runClaude(agentPrompt, sysPrompt, agent.id)
|
|
1023
|
+
const step = wf.steps.find(s => s.id === stepId)
|
|
1024
|
+
if (step) step.status = 'completed'
|
|
1025
|
+
} catch (err) {
|
|
1026
|
+
results[st.idx] = `Error: ${err instanceof Error ? err.message : String(err)}`
|
|
1027
|
+
const step = wf.steps.find(s => s.id === stepId)
|
|
1028
|
+
if (step) step.status = 'failed'
|
|
1029
|
+
}
|
|
1030
|
+
completed.add(st.idx)
|
|
1031
|
+
broadcast('workflow:updated', wf)
|
|
1032
|
+
})
|
|
1033
|
+
|
|
1034
|
+
await Promise.all(wave)
|
|
1035
|
+
}
|
|
1036
|
+
|
|
1037
|
+
task.result = results.filter(Boolean).join('\n---\n').slice(0, 2000) || 'Pipeline completed'
|
|
1038
|
+
}
|
|
1039
|
+
|
|
1040
|
+
// ── PHASE 3: Mark complete ──
|
|
1041
|
+
task.status = 'completed'
|
|
1042
|
+
task.completedAt = new Date().toISOString()
|
|
1043
|
+
wf.status = 'completed'
|
|
1044
|
+
wf.completedAt = task.completedAt
|
|
1045
|
+
wf.result = task.result
|
|
1046
|
+
broadcast('task:updated', { ...task, id: taskId })
|
|
1047
|
+
broadcast('workflow:updated', wf)
|
|
1048
|
+
broadcast('task:output', { id: taskId, workflowId, type: 'done', code: 0 })
|
|
1049
|
+
if (coordinatorId) {
|
|
1050
|
+
const act = agentActivity.get(coordinatorId)
|
|
1051
|
+
updateAgentActivity(coordinatorId, { status: 'idle', currentTask: undefined, currentAction: undefined, tasksCompleted: (act?.tasksCompleted || 0) + 1 })
|
|
1052
|
+
}
|
|
1053
|
+
} catch (err) {
|
|
1054
|
+
const msg = err instanceof Error ? err.message : String(err)
|
|
1055
|
+
console.error(`[TASK ${taskId}] Pipeline failed: ${msg}`)
|
|
1056
|
+
task.status = 'failed'
|
|
1057
|
+
task.result = `Pipeline error: ${msg.slice(0, 1000)}`
|
|
1058
|
+
wf.status = 'failed'
|
|
1059
|
+
broadcast('task:updated', { ...task, id: taskId })
|
|
1060
|
+
broadcast('workflow:updated', wf)
|
|
1061
|
+
// Release all agents
|
|
1062
|
+
for (const agent of agents) {
|
|
1063
|
+
updateAgentActivity(agent.id, { status: 'idle', currentTask: undefined, currentAction: undefined })
|
|
1064
|
+
}
|
|
1065
|
+
}
|
|
1066
|
+
}
|
|
1067
|
+
|
|
1068
|
+
// ── MODE 1: ruflo swarm start ──────────────────────────────────────────
|
|
1069
|
+
// Uses the native swarm orchestrator which deploys its own agent topology
|
|
1070
|
+
function launchViaSwarmCli(
|
|
1071
|
+
taskId: string, task: TaskRecord, taskDesc: string, title: string,
|
|
1072
|
+
wf: WorkflowRecord, workflowId: string,
|
|
1073
|
+
): void {
|
|
1074
|
+
broadcast('task:log', { id: taskId, message: `Starting swarm execution for: ${taskDesc}` })
|
|
1075
|
+
|
|
1076
|
+
const maxAgents = lastSwarmMaxAgents || 8
|
|
1077
|
+
const strategy = lastSwarmStrategy || 'development'
|
|
1078
|
+
const proc = spawn('npx', [
|
|
1079
|
+
'-y', '@claude-flow/cli@latest', 'swarm', 'start',
|
|
1080
|
+
'--objective', sanitizeShellArg(taskDesc),
|
|
1081
|
+
'--max-agents', String(maxAgents),
|
|
1082
|
+
'--strategy', strategy,
|
|
1083
|
+
], { cwd: task.cwd || process.cwd(), stdio: ['ignore', 'pipe', 'pipe'], shell: true, windowsHide: true })
|
|
1084
|
+
|
|
1085
|
+
runningProcesses.set(taskId, proc)
|
|
1086
|
+
trackProcessActivity(taskId)
|
|
1087
|
+
let fullOutput = ''
|
|
1088
|
+
let stderrOutput = ''
|
|
1089
|
+
let swarmId = ''
|
|
1090
|
+
|
|
1091
|
+
console.log(`[TASK ${taskId}] Launching swarm for: "${taskDesc.slice(0, 80)}"`)
|
|
1092
|
+
|
|
1093
|
+
// Mark all registered agents as working
|
|
1094
|
+
for (const [key, reg] of agentRegistry.entries()) {
|
|
1095
|
+
if (!terminatedAgents.has(key)) {
|
|
1096
|
+
updateAgentActivity(reg.id, {
|
|
1097
|
+
status: 'working', currentTask: taskId,
|
|
1098
|
+
currentAction: `Swarm: ${title.slice(0, 40)}`,
|
|
1099
|
+
})
|
|
1100
|
+
busyAgents.add(reg.id)
|
|
1101
|
+
}
|
|
1102
|
+
}
|
|
1103
|
+
|
|
1104
|
+
proc.stdout?.on('data', (chunk: Buffer) => {
|
|
1105
|
+
trackProcessActivity(taskId)
|
|
1106
|
+
const text = chunk.toString()
|
|
1107
|
+
fullOutput += text
|
|
1108
|
+
// Extract swarm ID from output
|
|
1109
|
+
const idMatch = text.match(/swarm status\s+(swarm-\w+)/)
|
|
1110
|
+
if (idMatch && !swarmId) {
|
|
1111
|
+
swarmId = idMatch[1]
|
|
1112
|
+
task.swarmRunId = swarmId
|
|
1113
|
+
broadcast('task:output', { id: taskId, workflowId, type: 'text', content: `Swarm started: ${swarmId}` })
|
|
1114
|
+
// Start polling swarm status for live updates
|
|
1115
|
+
pollSwarmExecution(taskId, swarmId, title, wf, workflowId)
|
|
1116
|
+
}
|
|
1117
|
+
// Parse agent deployment table
|
|
1118
|
+
const roleLines = text.match(/\|\s*(\w[\w\s]*?)\s*\|\s*(\w+)\s*\|\s*(\d+)\s*\|/g)
|
|
1119
|
+
if (roleLines) {
|
|
1120
|
+
for (const line of roleLines) {
|
|
1121
|
+
const m = line.match(/\|\s*(\w[\w\s]*?)\s*\|\s*(\w+)\s*\|\s*(\d+)\s*\|/)
|
|
1122
|
+
if (m && m[1] !== 'Role') {
|
|
1123
|
+
const stepId = `step-${wf.steps.length + 1}`
|
|
1124
|
+
wf.steps.push({
|
|
1125
|
+
id: stepId, name: `Deploy ${m[1].trim()}`,
|
|
1126
|
+
status: 'completed', agent: m[2], detail: `x${m[3]}`,
|
|
1127
|
+
})
|
|
1128
|
+
}
|
|
1129
|
+
}
|
|
1130
|
+
broadcast('workflow:updated', wf)
|
|
1131
|
+
}
|
|
1132
|
+
// Broadcast raw output lines
|
|
1133
|
+
for (const line of text.split('\n').filter(Boolean)) {
|
|
1134
|
+
broadcast('task:output', { id: taskId, workflowId, type: 'raw', content: line.slice(0, 300) })
|
|
1135
|
+
}
|
|
1136
|
+
})
|
|
1137
|
+
|
|
1138
|
+
proc.stderr?.on('data', (chunk: Buffer) => {
|
|
1139
|
+
const text = chunk.toString().trim()
|
|
1140
|
+
if (text) {
|
|
1141
|
+
stderrOutput += text + '\n'
|
|
1142
|
+
broadcast('task:output', { id: taskId, workflowId, type: 'stderr', content: text.slice(0, 300) })
|
|
1143
|
+
}
|
|
1144
|
+
})
|
|
1145
|
+
|
|
1146
|
+
proc.on('close', (code) => {
|
|
1147
|
+
cleanupProcess(taskId)
|
|
1148
|
+
console.log(`[TASK ${taskId}] Swarm launch exited with code ${code}`)
|
|
1149
|
+
// swarm start returns immediately after deploying — the actual work continues
|
|
1150
|
+
// If it failed to even start, mark as failed
|
|
1151
|
+
if (code !== 0 && !swarmId) {
|
|
1152
|
+
task.status = 'failed'
|
|
1153
|
+
task.result = (fullOutput + '\n' + stderrOutput).trim().slice(0, 2000) || `Swarm launch failed (code ${code})`
|
|
1154
|
+
wf.status = 'failed'
|
|
1155
|
+
broadcast('task:updated', { ...task, id: taskId })
|
|
1156
|
+
broadcast('workflow:updated', wf)
|
|
1157
|
+
releaseAllBusyAgents(taskId, false)
|
|
1158
|
+
}
|
|
1159
|
+
})
|
|
1160
|
+
|
|
1161
|
+
proc.on('error', (err) => {
|
|
1162
|
+
cleanupProcess(taskId)
|
|
1163
|
+
task.status = 'failed'
|
|
1164
|
+
task.result = `Swarm launch error: ${err.message}`
|
|
1165
|
+
wf.status = 'failed'
|
|
1166
|
+
broadcast('task:updated', { ...task, id: taskId })
|
|
1167
|
+
broadcast('workflow:updated', wf)
|
|
1168
|
+
releaseAllBusyAgents(taskId, false)
|
|
1169
|
+
})
|
|
1170
|
+
}
|
|
1171
|
+
|
|
1172
|
+
// Poll swarm status to track progress and detect completion
|
|
1173
|
+
function pollSwarmExecution(taskId: string, swarmId: string, title: string, wf: WorkflowRecord, workflowId: string): void {
|
|
1174
|
+
const task = taskStore.get(taskId)
|
|
1175
|
+
if (!task) return
|
|
1176
|
+
const startTime = Date.now()
|
|
1177
|
+
const maxDuration = 30 * 60 * 1000 // 30 min timeout
|
|
1178
|
+
let lastProgress = ''
|
|
1179
|
+
|
|
1180
|
+
const poll = async () => {
|
|
1181
|
+
if (!taskStore.has(taskId) || task.status === 'completed' || task.status === 'failed' || task.status === 'cancelled') return
|
|
1182
|
+
if (Date.now() - startTime > maxDuration) {
|
|
1183
|
+
task.status = 'failed'
|
|
1184
|
+
task.result = 'Swarm execution timed out after 30 minutes'
|
|
1185
|
+
wf.status = 'failed'
|
|
1186
|
+
broadcast('task:updated', { ...task, id: taskId })
|
|
1187
|
+
broadcast('workflow:updated', wf)
|
|
1188
|
+
releaseAllBusyAgents(taskId, false)
|
|
1189
|
+
return
|
|
1190
|
+
}
|
|
1191
|
+
try {
|
|
1192
|
+
const { raw } = await execCli('swarm', ['status', swarmId])
|
|
1193
|
+
// Parse progress
|
|
1194
|
+
const progressMatch = raw.match(/(\d+\.?\d*)%/)
|
|
1195
|
+
const progress = progressMatch?.[1] || '0'
|
|
1196
|
+
// Parse agent counts
|
|
1197
|
+
const activeMatch = raw.match(/Active\s*\|\s*(\d+)/)
|
|
1198
|
+
const completedMatch = raw.match(/Completed\s*\|\s*(\d+)/)
|
|
1199
|
+
const activeCount = Number(activeMatch?.[1] || 0)
|
|
1200
|
+
const completedAgents = Number(completedMatch?.[1] || 0)
|
|
1201
|
+
// Parse task counts
|
|
1202
|
+
const tasksCompletedMatch = raw.match(/Completed\s*\|\s*(\d+)/g)
|
|
1203
|
+
const tasksInProgressMatch = raw.match(/In Progress\s*\|\s*(\d+)/)
|
|
1204
|
+
const inProgressCount = Number(tasksInProgressMatch?.[1] || 0)
|
|
1205
|
+
|
|
1206
|
+
// Only broadcast if changed
|
|
1207
|
+
const statusKey = `${progress}-${activeCount}-${completedAgents}-${inProgressCount}`
|
|
1208
|
+
if (statusKey !== lastProgress) {
|
|
1209
|
+
lastProgress = statusKey
|
|
1210
|
+
broadcast('task:output', {
|
|
1211
|
+
id: taskId, workflowId, type: 'progress',
|
|
1212
|
+
content: `Progress: ${progress}% | Active agents: ${activeCount} | Tasks in progress: ${inProgressCount}`,
|
|
1213
|
+
})
|
|
1214
|
+
// Update agent activities based on swarm status
|
|
1215
|
+
const activeAgents = Array.from(agentRegistry.entries())
|
|
1216
|
+
.filter(([key]) => !terminatedAgents.has(key))
|
|
1217
|
+
.map(([, reg]) => reg)
|
|
1218
|
+
for (const agent of activeAgents) {
|
|
1219
|
+
if (activeCount > 0 && busyAgents.has(agent.id)) {
|
|
1220
|
+
updateAgentActivity(agent.id, {
|
|
1221
|
+
status: 'working', currentTask: taskId,
|
|
1222
|
+
currentAction: `Swarm ${progress}%: ${title.slice(0, 40)}`,
|
|
1223
|
+
})
|
|
1224
|
+
}
|
|
1225
|
+
}
|
|
1226
|
+
}
|
|
1227
|
+
|
|
1228
|
+
// Check if done (100% or all agents completed)
|
|
1229
|
+
if (Number(progress) >= 100) {
|
|
1230
|
+
task.status = 'completed'
|
|
1231
|
+
task.completedAt = new Date().toISOString()
|
|
1232
|
+
task.result = raw.slice(0, 2000) || 'Swarm execution completed'
|
|
1233
|
+
wf.status = 'completed'
|
|
1234
|
+
wf.completedAt = task.completedAt
|
|
1235
|
+
wf.result = task.result
|
|
1236
|
+
broadcast('task:updated', { ...task, id: taskId })
|
|
1237
|
+
broadcast('workflow:updated', wf)
|
|
1238
|
+
broadcast('task:output', { id: taskId, workflowId, type: 'done', code: 0 })
|
|
1239
|
+
releaseAllBusyAgents(taskId, true)
|
|
1240
|
+
return
|
|
1241
|
+
}
|
|
1242
|
+
// Keep polling
|
|
1243
|
+
setTimeout(poll, 3000)
|
|
1244
|
+
} catch {
|
|
1245
|
+
// Swarm may have finished — check once more then give up
|
|
1246
|
+
setTimeout(poll, 5000)
|
|
1247
|
+
}
|
|
1248
|
+
}
|
|
1249
|
+
setTimeout(poll, 3000)
|
|
1250
|
+
}
|
|
1251
|
+
|
|
1252
|
+
function releaseAllBusyAgents(taskId: string, success: boolean): void {
|
|
1253
|
+
for (const [, reg] of agentRegistry.entries()) {
|
|
1254
|
+
if (busyAgents.has(reg.id)) {
|
|
1255
|
+
const act = agentActivity.get(reg.id)
|
|
1256
|
+
if (act?.currentTask === taskId) {
|
|
1257
|
+
updateAgentActivity(reg.id, {
|
|
1258
|
+
status: 'idle', currentTask: undefined, currentAction: undefined,
|
|
1259
|
+
tasksCompleted: (act.tasksCompleted || 0) + (success ? 1 : 0),
|
|
1260
|
+
errors: (act.errors || 0) + (success ? 0 : 1),
|
|
1261
|
+
})
|
|
1262
|
+
busyAgents.delete(reg.id)
|
|
1263
|
+
}
|
|
1264
|
+
}
|
|
1265
|
+
}
|
|
1266
|
+
}
|
|
1267
|
+
|
|
1268
|
+
// ── MODE 2: claude -p (fallback when no swarm active) ──────────────────
|
|
1269
|
+
function launchViaClaude(
|
|
1270
|
+
taskId: string, task: TaskRecord, taskDesc: string, title: string,
|
|
1271
|
+
wf: WorkflowRecord, workflowId: string,
|
|
1272
|
+
): void {
|
|
1273
|
+
broadcast('task:log', { id: taskId, message: `Starting Claude Code for: ${taskDesc}` })
|
|
1274
|
+
|
|
1275
|
+
const cleanEnv = { ...process.env }
|
|
1276
|
+
for (const key of Object.keys(cleanEnv)) {
|
|
1277
|
+
if (key.startsWith('CLAUDE') || key.startsWith('claude')) delete cleanEnv[key]
|
|
1278
|
+
}
|
|
1279
|
+
const claudePath = process.env.LOCALAPPDATA
|
|
1280
|
+
? `${process.env.USERPROFILE}\\.local\\bin\\claude.exe`
|
|
1281
|
+
: 'claude'
|
|
1282
|
+
const mcpConfigPath = path.join(process.cwd(), '.mcp.json')
|
|
1283
|
+
const mcpArgs = fs.existsSync(mcpConfigPath) ? ['--mcp-config', mcpConfigPath] : []
|
|
1284
|
+
const swarmPrompt = buildSwarmPrompt(task, taskId)
|
|
1285
|
+
const sessionUUID = crypto.randomUUID()
|
|
1286
|
+
task.sessionUUID = sessionUUID
|
|
1287
|
+
const claudeArgs = [
|
|
1288
|
+
'-p', taskDesc,
|
|
1289
|
+
'--output-format', 'stream-json',
|
|
1290
|
+
'--verbose',
|
|
1291
|
+
...(SKIP_PERMISSIONS ? ['--dangerously-skip-permissions'] : []),
|
|
1292
|
+
'--session-id', sessionUUID,
|
|
1293
|
+
...mcpArgs,
|
|
1294
|
+
'--append-system-prompt', swarmPrompt,
|
|
1295
|
+
]
|
|
1296
|
+
const proc = spawn(claudePath, claudeArgs, { cwd: task.cwd || process.cwd(), env: cleanEnv, stdio: ['ignore', 'pipe', 'pipe'], windowsHide: true })
|
|
1297
|
+
|
|
1298
|
+
startMonitoring(sessionUUID, taskId, broadcast)
|
|
1299
|
+
runningProcesses.set(taskId, proc)
|
|
1300
|
+
trackProcessActivity(taskId)
|
|
1301
|
+
let fullOutput = ''
|
|
1302
|
+
let stderrOutput = ''
|
|
1303
|
+
|
|
1304
|
+
console.log(`[TASK ${taskId}] Launching claude -p "${taskDesc.slice(0, 80)}"`)
|
|
1305
|
+
|
|
1306
|
+
const assignedAgent = task.assignedTo || 'swarm'
|
|
1307
|
+
const coordinatorId = Array.from(agentRegistry.values()).find(a => a.type === 'coordinator')?.id
|
|
1308
|
+
const workingAgentId = assignedAgent === 'swarm' ? (coordinatorId || 'coordinator') : assignedAgent
|
|
1309
|
+
updateAgentActivity(workingAgentId, { status: 'working', currentTask: taskId, currentAction: `Executing: ${title.slice(0, 50)}` })
|
|
1310
|
+
|
|
1311
|
+
proc.stdout?.on('data', (chunk: Buffer) => {
|
|
1312
|
+
trackProcessActivity(taskId)
|
|
1313
|
+
const text = chunk.toString()
|
|
1314
|
+
const lines = text.split('\n').filter(Boolean)
|
|
1315
|
+
for (const line of lines) {
|
|
1316
|
+
try {
|
|
1317
|
+
const evt = JSON.parse(line)
|
|
1318
|
+
if (evt.type === 'assistant' && evt.message?.content) {
|
|
1319
|
+
for (const block of evt.message.content) {
|
|
1320
|
+
if (block.type === 'text') {
|
|
1321
|
+
fullOutput += block.text
|
|
1322
|
+
broadcast('task:output', { id: taskId, workflowId, type: 'text', content: block.text.slice(0, 300) })
|
|
1323
|
+
} else if (block.type === 'tool_use') {
|
|
1324
|
+
const toolInfo = `${block.name}: ${JSON.stringify(block.input).slice(0, 200)}`
|
|
1325
|
+
fullOutput += `\n[tool] ${toolInfo}\n`
|
|
1326
|
+
const stepId = `step-${wf.steps.length + 1}`
|
|
1327
|
+
const inputSummary = block.input?.file_path || block.input?.command?.slice(0, 60) || block.input?.pattern || ''
|
|
1328
|
+
wf.steps.push({
|
|
1329
|
+
id: stepId, name: block.name, status: 'running',
|
|
1330
|
+
agent: task.assignedTo || 'claude', detail: inputSummary,
|
|
1331
|
+
})
|
|
1332
|
+
broadcast('workflow:updated', wf)
|
|
1333
|
+
broadcast('task:output', { id: taskId, workflowId, type: 'tool', tool: block.name, input: JSON.stringify(block.input).slice(0, 200) })
|
|
1334
|
+
updateAgentActivity(workingAgentId, { status: 'working', currentTask: taskId, currentAction: `${block.name}: ${inputSummary.slice(0, 60)}` })
|
|
1335
|
+
if (block.name === 'Agent' && block.input?.subagent_type) {
|
|
1336
|
+
const matchedAgent = findSwarmAgentForType(block.input.subagent_type)
|
|
1337
|
+
if (matchedAgent) {
|
|
1338
|
+
updateAgentActivity(matchedAgent.id, {
|
|
1339
|
+
status: 'working', currentTask: taskId,
|
|
1340
|
+
currentAction: `Subagent: ${(block.input.description || block.input.subagent_type).slice(0, 60)}`,
|
|
1341
|
+
})
|
|
1342
|
+
}
|
|
1343
|
+
}
|
|
1344
|
+
}
|
|
1345
|
+
}
|
|
1346
|
+
} else if (evt.type === 'tool_result' || (evt.type === 'user' && evt.message?.content)) {
|
|
1347
|
+
const lastRunning = [...wf.steps].reverse().find(s => s.status === 'running')
|
|
1348
|
+
if (lastRunning) { lastRunning.status = 'completed'; broadcast('workflow:updated', wf) }
|
|
1349
|
+
} else if (evt.type === 'result') {
|
|
1350
|
+
wf.steps.forEach(s => { if (s.status === 'running') s.status = 'completed' })
|
|
1351
|
+
fullOutput = evt.result || fullOutput
|
|
1352
|
+
broadcast('task:output', { id: taskId, workflowId, type: 'text', content: 'Task completed' })
|
|
1353
|
+
}
|
|
1354
|
+
} catch {
|
|
1355
|
+
fullOutput += line + '\n'
|
|
1356
|
+
broadcast('task:output', { id: taskId, workflowId, type: 'raw', content: line.slice(0, 300) })
|
|
1357
|
+
}
|
|
1358
|
+
}
|
|
1359
|
+
})
|
|
1360
|
+
|
|
1361
|
+
proc.stderr?.on('data', (chunk: Buffer) => {
|
|
1362
|
+
const text = chunk.toString().trim()
|
|
1363
|
+
if (text) {
|
|
1364
|
+
stderrOutput += text + '\n'
|
|
1365
|
+
console.error(`[TASK ${taskId}] stderr: ${text}`)
|
|
1366
|
+
broadcast('task:output', { id: taskId, workflowId, type: 'stderr', content: text.slice(0, 300) })
|
|
1367
|
+
}
|
|
1368
|
+
})
|
|
1369
|
+
|
|
1370
|
+
proc.on('close', (code) => {
|
|
1371
|
+
cleanupProcess(taskId)
|
|
1372
|
+
stopMonitoring(sessionUUID)
|
|
1373
|
+
const combined = (fullOutput + '\n' + stderrOutput).trim()
|
|
1374
|
+
console.log(`[TASK ${taskId}] Exited with code ${code}. Output length: ${combined.length}`)
|
|
1375
|
+
if (code === 0) {
|
|
1376
|
+
task.status = 'completed'
|
|
1377
|
+
task.completedAt = new Date().toISOString()
|
|
1378
|
+
task.result = fullOutput.slice(0, 2000) || 'Task completed'
|
|
1379
|
+
wf.status = 'completed'
|
|
1380
|
+
wf.completedAt = task.completedAt
|
|
1381
|
+
wf.result = task.result
|
|
1382
|
+
} else {
|
|
1383
|
+
task.status = 'failed'
|
|
1384
|
+
task.result = combined.slice(0, 2000) || `Process exited with code ${code}`
|
|
1385
|
+
wf.status = 'failed'
|
|
1386
|
+
wf.result = task.result
|
|
1387
|
+
}
|
|
1388
|
+
broadcast('task:updated', { ...task, id: taskId })
|
|
1389
|
+
broadcast('workflow:updated', wf)
|
|
1390
|
+
broadcast('task:output', { id: taskId, workflowId, type: 'done', code })
|
|
1391
|
+
releaseAllBusyAgents(taskId, code === 0)
|
|
1392
|
+
const activity = agentActivity.get(workingAgentId)
|
|
1393
|
+
const completed = (activity?.tasksCompleted || 0) + (code === 0 ? 1 : 0)
|
|
1394
|
+
const errors = (activity?.errors || 0) + (code !== 0 ? 1 : 0)
|
|
1395
|
+
updateAgentActivity(workingAgentId, { status: 'idle', currentTask: undefined, currentAction: undefined, tasksCompleted: completed, errors })
|
|
1396
|
+
})
|
|
1397
|
+
|
|
1398
|
+
proc.on('error', (err) => {
|
|
1399
|
+
cleanupProcess(taskId)
|
|
1400
|
+
console.error(`[TASK ${taskId}] Process error: ${err.message}`)
|
|
1401
|
+
task.status = 'failed'
|
|
1402
|
+
task.result = `Process error: ${err.message}`
|
|
1403
|
+
wf.status = 'failed'
|
|
1404
|
+
broadcast('task:updated', { ...task, id: taskId })
|
|
1405
|
+
})
|
|
1406
|
+
}
|
|
1407
|
+
|
|
1408
|
+
function swarmRoutes(): Router {
|
|
1409
|
+
const r = Router()
|
|
1410
|
+
r.post('/init', h(async (req, res) => {
|
|
1411
|
+
const { topology, maxAgents, strategy } = req.body || {}
|
|
1412
|
+
const args = ['init']
|
|
1413
|
+
if (topology) args.push('--topology', topology)
|
|
1414
|
+
if (maxAgents) args.push('--max-agents', String(maxAgents))
|
|
1415
|
+
if (strategy) args.push('--strategy', strategy)
|
|
1416
|
+
const { raw } = await execCli('swarm', args)
|
|
1417
|
+
// Extract swarm ID from output
|
|
1418
|
+
const idMatch = raw.match(/Swarm ID\s*\|\s*(\S+)/)
|
|
1419
|
+
lastSwarmId = idMatch?.[1] || `swarm-${Date.now()}`
|
|
1420
|
+
lastSwarmTopology = topology || 'hierarchical'
|
|
1421
|
+
lastSwarmStrategy = strategy || 'specialized'
|
|
1422
|
+
lastSwarmMaxAgents = maxAgents || 8
|
|
1423
|
+
lastSwarmCreatedAt = new Date().toISOString()
|
|
1424
|
+
swarmShutdown = false
|
|
1425
|
+
allTerminatedBefore = null // Reset so new agents show up
|
|
1426
|
+
|
|
1427
|
+
// Purge all existing zombie agents before spawning fresh ones
|
|
1428
|
+
const purged = await purgeAllCliAgents()
|
|
1429
|
+
if (purged > 0) console.log(`[SWARM INIT] Purged ${purged} old agents`)
|
|
1430
|
+
|
|
1431
|
+
// Start the orchestration daemon in background
|
|
1432
|
+
ensureDaemon().catch(() => {})
|
|
1433
|
+
|
|
1434
|
+
// Auto-spawn a default set of specialized agents for the swarm
|
|
1435
|
+
const defaultAgents: Array<{ type: string; name: string }> = [
|
|
1436
|
+
{ type: 'coordinator', name: 'Coordinator' },
|
|
1437
|
+
{ type: 'coder', name: 'Developer-1' },
|
|
1438
|
+
{ type: 'coder', name: 'Developer-2' },
|
|
1439
|
+
{ type: 'researcher', name: 'Analyst' },
|
|
1440
|
+
{ type: 'tester', name: 'Tester' },
|
|
1441
|
+
{ type: 'reviewer', name: 'Reviewer' },
|
|
1442
|
+
]
|
|
1443
|
+
const spawnedAgents: Array<{ id: string; name: string; type: string; status: string; createdAt: string }> = []
|
|
1444
|
+
for (const ag of defaultAgents) {
|
|
1445
|
+
try {
|
|
1446
|
+
const spawnArgs = ['spawn', '--type', ag.type, '--name', ag.name]
|
|
1447
|
+
const spawnResult = await execCli('agent', spawnArgs)
|
|
1448
|
+
const spawnIdMatch = spawnResult.raw.match(/ID\s*\|\s*(agent-[\w-]+)/)
|
|
1449
|
+
const createdMatch = spawnResult.raw.match(/Created\s*\|\s*(\S+)/)
|
|
1450
|
+
const agentId = spawnIdMatch?.[1] || `agent-${Date.now()}-${Math.random().toString(36).slice(2, 6)}`
|
|
1451
|
+
const createdISO = createdMatch?.[1] || new Date().toISOString()
|
|
1452
|
+
const localDate = new Date(createdISO)
|
|
1453
|
+
const createdTime = `${String(localDate.getHours()).padStart(2,'0')}:${String(localDate.getMinutes()).padStart(2,'0')}:${String(localDate.getSeconds()).padStart(2,'0')}`
|
|
1454
|
+
agentRegistry.set(createdTime, { id: agentId, name: ag.name, type: ag.type })
|
|
1455
|
+
currentSwarmAgentIds.add(agentId)
|
|
1456
|
+
spawnedAgents.push({ id: agentId, name: ag.name, type: ag.type, status: 'running', createdAt: createdISO })
|
|
1457
|
+
} catch (e) {
|
|
1458
|
+
console.warn(`[swarm] Failed to spawn agent ${ag.name} (${ag.type}):`, e instanceof Error ? e.message : String(e))
|
|
1459
|
+
}
|
|
1460
|
+
}
|
|
1461
|
+
|
|
1462
|
+
const result = {
|
|
1463
|
+
raw, status: 'active', id: lastSwarmId,
|
|
1464
|
+
topology: lastSwarmTopology, strategy: lastSwarmStrategy,
|
|
1465
|
+
maxAgents: lastSwarmMaxAgents, activeAgents: spawnedAgents.length,
|
|
1466
|
+
agents: spawnedAgents, createdAt: lastSwarmCreatedAt,
|
|
1467
|
+
}
|
|
1468
|
+
broadcast('swarm:status', result)
|
|
1469
|
+
res.json(result)
|
|
1470
|
+
}))
|
|
1471
|
+
r.get('/status', h(async (_req, res) => {
|
|
1472
|
+
if (swarmShutdown) { res.json({ status: 'inactive' }); return }
|
|
1473
|
+
try {
|
|
1474
|
+
const { raw } = await execCli('swarm', ['status'])
|
|
1475
|
+
// Build agents list from registry (exclude terminated)
|
|
1476
|
+
const agentsList = Array.from(agentRegistry.entries())
|
|
1477
|
+
.filter(([key]) => !terminatedAgents.has(key))
|
|
1478
|
+
.map(([, reg]) => ({
|
|
1479
|
+
id: reg.id, name: reg.name, type: reg.type,
|
|
1480
|
+
status: 'running' as const, createdAt: '',
|
|
1481
|
+
}))
|
|
1482
|
+
const activeCount = agentsList.length
|
|
1483
|
+
res.json({
|
|
1484
|
+
raw,
|
|
1485
|
+
id: lastSwarmId || '',
|
|
1486
|
+
topology: lastSwarmTopology,
|
|
1487
|
+
strategy: lastSwarmStrategy,
|
|
1488
|
+
status: 'active',
|
|
1489
|
+
maxAgents: lastSwarmMaxAgents,
|
|
1490
|
+
activeAgents: activeCount,
|
|
1491
|
+
agents: agentsList,
|
|
1492
|
+
createdAt: lastSwarmCreatedAt,
|
|
1493
|
+
})
|
|
1494
|
+
} catch { res.json({ status: 'inactive' }) }
|
|
1495
|
+
}))
|
|
1496
|
+
r.get('/health', h(async (_req, res) => {
|
|
1497
|
+
try {
|
|
1498
|
+
const { raw } = await execCli('swarm', ['status'])
|
|
1499
|
+
res.json({ healthy: !raw.includes('not running'), raw })
|
|
1500
|
+
} catch { res.json({ healthy: false }) }
|
|
1501
|
+
}))
|
|
1502
|
+
r.post('/shutdown', h(async (_req, res) => {
|
|
1503
|
+
try { await execCli('swarm', ['shutdown']) } catch (e) {
|
|
1504
|
+
console.log('[swarm] Shutdown command skipped:', e instanceof Error ? e.message : String(e))
|
|
1505
|
+
}
|
|
1506
|
+
lastSwarmId = ''
|
|
1507
|
+
lastSwarmCreatedAt = ''
|
|
1508
|
+
swarmShutdown = true
|
|
1509
|
+
broadcast('swarm:status', { status: 'shutdown' })
|
|
1510
|
+
res.json({ status: 'shutdown' })
|
|
1511
|
+
}))
|
|
1512
|
+
return r
|
|
1513
|
+
}
|
|
1514
|
+
|
|
1515
|
+
// In-memory registry to track agent names/IDs (CLI table doesn't include them)
|
|
1516
|
+
// Keyed by created time (HH:MM:SS) since CLI table only shows that
|
|
1517
|
+
const agentRegistry: Map<string, { id: string; name: string; type: string }> = new Map()
|
|
1518
|
+
const terminatedAgents = new Set<string>() // set of created-time keys
|
|
1519
|
+
let allTerminatedBefore: string | null = null // ISO timestamp: ignore all CLI agents created before this
|
|
1520
|
+
|
|
1521
|
+
// Real-time agent activity tracking
|
|
1522
|
+
interface AgentActivity {
|
|
1523
|
+
status: 'idle' | 'working' | 'error'
|
|
1524
|
+
currentTask?: string
|
|
1525
|
+
currentAction?: string
|
|
1526
|
+
lastUpdate: string
|
|
1527
|
+
tasksCompleted: number
|
|
1528
|
+
errors: number
|
|
1529
|
+
}
|
|
1530
|
+
const agentActivity: Map<string, AgentActivity> = new Map()
|
|
1531
|
+
|
|
1532
|
+
// Per-agent output buffer — stores the last N lines of Claude output per agent
|
|
1533
|
+
const agentOutputBuffers: Map<string, string[]> = new Map()
|
|
1534
|
+
const AGENT_OUTPUT_MAX_LINES = 500
|
|
1535
|
+
|
|
1536
|
+
function appendAgentOutput(agentId: string, line: string) {
|
|
1537
|
+
let buf = agentOutputBuffers.get(agentId)
|
|
1538
|
+
if (!buf) { buf = []; agentOutputBuffers.set(agentId, buf) }
|
|
1539
|
+
buf.push(line)
|
|
1540
|
+
if (buf.length > AGENT_OUTPUT_MAX_LINES) buf.splice(0, buf.length - AGENT_OUTPUT_MAX_LINES)
|
|
1541
|
+
broadcast('agent:output', { agentId, line })
|
|
1542
|
+
}
|
|
1543
|
+
|
|
1544
|
+
// Map subagent_type to deployed swarm agent, tracking which are already busy
|
|
1545
|
+
const busyAgents = new Set<string>()
|
|
1546
|
+
|
|
1547
|
+
// Track agent IDs belonging to current swarm (set on swarm init, cleared on shutdown)
|
|
1548
|
+
let currentSwarmAgentIds = new Set<string>()
|
|
1549
|
+
|
|
1550
|
+
// Purge all CLI agents — parallel batches of 10 for speed
|
|
1551
|
+
async function purgeAllCliAgents(): Promise<number> {
|
|
1552
|
+
let stopped = 0
|
|
1553
|
+
try {
|
|
1554
|
+
const { parsed } = await execCli('agent', ['list', '--format', 'json'])
|
|
1555
|
+
const data = parsed as Record<string, unknown>
|
|
1556
|
+
const agents = (data?.agents || []) as Array<Record<string, unknown>>
|
|
1557
|
+
const ids = agents.map(a => String(a.agentId || a.id || '')).filter(Boolean)
|
|
1558
|
+
// Process in parallel batches of 10
|
|
1559
|
+
const batchSize = 10
|
|
1560
|
+
for (let i = 0; i < ids.length; i += batchSize) {
|
|
1561
|
+
const batch = ids.slice(i, i + batchSize)
|
|
1562
|
+
const results = await Promise.allSettled(
|
|
1563
|
+
batch.map(id => execCli('agent', ['stop', id]))
|
|
1564
|
+
)
|
|
1565
|
+
stopped += results.filter(r => r.status === 'fulfilled').length
|
|
1566
|
+
}
|
|
1567
|
+
} catch (e) {
|
|
1568
|
+
console.warn('[purge] Failed to list/stop CLI agents:', e instanceof Error ? e.message : String(e))
|
|
1569
|
+
}
|
|
1570
|
+
// Clear all local tracking
|
|
1571
|
+
agentRegistry.clear()
|
|
1572
|
+
terminatedAgents.clear()
|
|
1573
|
+
agentActivity.clear()
|
|
1574
|
+
agentOutputBuffers.clear()
|
|
1575
|
+
busyAgents.clear()
|
|
1576
|
+
currentSwarmAgentIds.clear()
|
|
1577
|
+
allTerminatedBefore = null
|
|
1578
|
+
persistState()
|
|
1579
|
+
return stopped
|
|
1580
|
+
}
|
|
1581
|
+
|
|
1582
|
+
function findSwarmAgentForType(subagentType: string): { id: string; name: string; type: string } | null {
|
|
1583
|
+
// Map subagent_type back to swarm agent types
|
|
1584
|
+
const typeMapping: Record<string, string[]> = {
|
|
1585
|
+
coder: ['coder'], 'sparc-coder': ['coder'],
|
|
1586
|
+
researcher: ['researcher'], Explore: ['researcher'],
|
|
1587
|
+
tester: ['tester'], 'tdd-london-swarm': ['tester'],
|
|
1588
|
+
reviewer: ['reviewer'], 'code-analyzer': ['reviewer'],
|
|
1589
|
+
analyst: ['analyst', 'researcher'],
|
|
1590
|
+
architecture: ['architect', 'coordinator'],
|
|
1591
|
+
'general-purpose': ['coordinator'],
|
|
1592
|
+
'performance-engineer': ['performance-engineer'],
|
|
1593
|
+
'security-architect': ['security-architect'],
|
|
1594
|
+
}
|
|
1595
|
+
const candidateTypes = typeMapping[subagentType] || [subagentType]
|
|
1596
|
+
const activeAgents = Array.from(agentRegistry.entries())
|
|
1597
|
+
.filter(([key]) => !terminatedAgents.has(key))
|
|
1598
|
+
.map(([, reg]) => reg)
|
|
1599
|
+
|
|
1600
|
+
// Prefer an idle agent of the right type
|
|
1601
|
+
for (const t of candidateTypes) {
|
|
1602
|
+
const idle = activeAgents.find(a => a.type === t && !busyAgents.has(a.id))
|
|
1603
|
+
if (idle) { busyAgents.add(idle.id); return idle }
|
|
1604
|
+
}
|
|
1605
|
+
// Fallback: any agent of the right type (even if busy)
|
|
1606
|
+
for (const t of candidateTypes) {
|
|
1607
|
+
const any = activeAgents.find(a => a.type === t)
|
|
1608
|
+
if (any) return any
|
|
1609
|
+
}
|
|
1610
|
+
return null
|
|
1611
|
+
}
|
|
1612
|
+
|
|
1613
|
+
function updateAgentActivity(agentId: string, update: Partial<AgentActivity>) {
|
|
1614
|
+
const existing = agentActivity.get(agentId) || {
|
|
1615
|
+
status: 'idle' as const, lastUpdate: new Date().toISOString(), tasksCompleted: 0, errors: 0,
|
|
1616
|
+
}
|
|
1617
|
+
const updated = { ...existing, ...update, lastUpdate: new Date().toISOString() }
|
|
1618
|
+
agentActivity.set(agentId, updated)
|
|
1619
|
+
broadcast('agent:activity', { agentId, ...updated })
|
|
1620
|
+
persistState()
|
|
1621
|
+
}
|
|
1622
|
+
|
|
1623
|
+
function timeToISO(timeStr: string): string {
|
|
1624
|
+
if (!timeStr || timeStr === 'N/A') return new Date().toISOString()
|
|
1625
|
+
// If it's already ISO format, return as-is
|
|
1626
|
+
if (timeStr.includes('T') || timeStr.includes('-')) return timeStr
|
|
1627
|
+
// Time-only like "11:39:08" — attach today's date
|
|
1628
|
+
const today = new Date().toISOString().split('T')[0]
|
|
1629
|
+
return `${today}T${timeStr}`
|
|
1630
|
+
}
|
|
1631
|
+
|
|
1632
|
+
function agentRoutes(): Router {
|
|
1633
|
+
const r = Router()
|
|
1634
|
+
r.get('/', h(async (_req, res) => {
|
|
1635
|
+
try {
|
|
1636
|
+
const { raw } = await execCli('agent', ['list'])
|
|
1637
|
+
const rows = parseCliTable(raw)
|
|
1638
|
+
let agents = rows
|
|
1639
|
+
.filter(row => {
|
|
1640
|
+
const created = row.created || ''
|
|
1641
|
+
if (terminatedAgents.has(created)) return false
|
|
1642
|
+
if (allTerminatedBefore) {
|
|
1643
|
+
const iso = timeToISO(created)
|
|
1644
|
+
if (iso <= allTerminatedBefore) return false
|
|
1645
|
+
}
|
|
1646
|
+
return true
|
|
1647
|
+
})
|
|
1648
|
+
.map((row, i) => {
|
|
1649
|
+
const created = row.created || ''
|
|
1650
|
+
const reg = agentRegistry.get(created)
|
|
1651
|
+
const agentId = row.id || reg?.id || `agent-${i}`
|
|
1652
|
+
const activity = agentActivity.get(agentId)
|
|
1653
|
+
return {
|
|
1654
|
+
id: agentId,
|
|
1655
|
+
name: reg?.name || row.name || row.type || `Agent ${i + 1}`,
|
|
1656
|
+
type: row.type || reg?.type || 'unknown',
|
|
1657
|
+
status: activity?.status === 'working' ? 'running' : (row.status || 'idle'),
|
|
1658
|
+
createdAt: timeToISO(created),
|
|
1659
|
+
lastActivity: activity?.lastUpdate || ((row.last_activity || row['last_acti']) === 'N/A' ? undefined : row.last_activity),
|
|
1660
|
+
currentTask: activity?.currentTask,
|
|
1661
|
+
currentAction: activity?.currentAction,
|
|
1662
|
+
metrics: {
|
|
1663
|
+
tasksCompleted: activity?.tasksCompleted || 0,
|
|
1664
|
+
errorRate: activity ? (activity.errors / Math.max(1, activity.tasksCompleted + activity.errors)) : 0,
|
|
1665
|
+
avgResponseTime: 0,
|
|
1666
|
+
},
|
|
1667
|
+
}
|
|
1668
|
+
})
|
|
1669
|
+
// Fallback: if ASCII table returned nothing, try JSON format
|
|
1670
|
+
if (agents.length === 0) {
|
|
1671
|
+
try {
|
|
1672
|
+
const { parsed } = await execCli('agent', ['list', '--format', 'json'])
|
|
1673
|
+
if (parsed) {
|
|
1674
|
+
const p = parsed as Record<string, unknown>
|
|
1675
|
+
const jsonAgents = (p.agents || []) as Array<Record<string, unknown>>
|
|
1676
|
+
agents = jsonAgents
|
|
1677
|
+
.filter(a => {
|
|
1678
|
+
const created = String(a.createdAt || '')
|
|
1679
|
+
if (allTerminatedBefore && created <= allTerminatedBefore) return false
|
|
1680
|
+
return true
|
|
1681
|
+
})
|
|
1682
|
+
.map((a, i) => {
|
|
1683
|
+
const id = String(a.agentId || a.id || `agent-${i}`)
|
|
1684
|
+
const activity = agentActivity.get(id)
|
|
1685
|
+
return {
|
|
1686
|
+
id,
|
|
1687
|
+
name: String(a.name || a.agentType || a.type || `Agent ${i + 1}`),
|
|
1688
|
+
type: String(a.agentType || a.type || 'unknown'),
|
|
1689
|
+
status: activity?.status === 'working' ? 'running' : String(a.status || 'idle'),
|
|
1690
|
+
createdAt: String(a.createdAt || new Date().toISOString()),
|
|
1691
|
+
lastActivity: activity?.lastUpdate || undefined,
|
|
1692
|
+
currentTask: activity?.currentTask,
|
|
1693
|
+
currentAction: activity?.currentAction,
|
|
1694
|
+
metrics: {
|
|
1695
|
+
tasksCompleted: activity?.tasksCompleted || 0,
|
|
1696
|
+
errorRate: activity ? (activity.errors / Math.max(1, activity.tasksCompleted + activity.errors)) : 0,
|
|
1697
|
+
avgResponseTime: 0,
|
|
1698
|
+
},
|
|
1699
|
+
}
|
|
1700
|
+
})
|
|
1701
|
+
}
|
|
1702
|
+
} catch { /* JSON format also failed, stick with empty */ }
|
|
1703
|
+
}
|
|
1704
|
+
res.json({ raw, agents })
|
|
1705
|
+
} catch { res.json({ agents: [] }) }
|
|
1706
|
+
}))
|
|
1707
|
+
r.post('/spawn', h(async (req, res) => {
|
|
1708
|
+
const { type, name } = req.body || {}
|
|
1709
|
+
const args = ['spawn', '--type', type || 'coder', '--name', name || 'agent']
|
|
1710
|
+
const { raw } = await execCli('agent', args)
|
|
1711
|
+
// Extract ID and Created time from spawn output
|
|
1712
|
+
const idMatch = raw.match(/ID\s*\|\s*(agent-[\w-]+)/)
|
|
1713
|
+
const createdMatch = raw.match(/Created\s*\|\s*(\S+)/)
|
|
1714
|
+
const agentId = idMatch?.[1] || `agent-${Date.now()}`
|
|
1715
|
+
// CLI list shows LOCAL time (HH:MM:SS), spawn output is UTC ISO
|
|
1716
|
+
// Convert UTC to local HH:MM:SS for matching
|
|
1717
|
+
const createdISO = createdMatch?.[1] || new Date().toISOString()
|
|
1718
|
+
const localDate = new Date(createdISO)
|
|
1719
|
+
const createdTime = `${String(localDate.getHours()).padStart(2,'0')}:${String(localDate.getMinutes()).padStart(2,'0')}:${String(localDate.getSeconds()).padStart(2,'0')}`
|
|
1720
|
+
// Register by local created time for lookup when list refreshes
|
|
1721
|
+
agentRegistry.set(createdTime, { id: agentId, name: name || type || 'agent', type: type || 'coder' })
|
|
1722
|
+
const result = { raw, id: agentId, type, name, status: 'spawned', createdAt: createdISO }
|
|
1723
|
+
broadcast('agent:added', result)
|
|
1724
|
+
res.json(result)
|
|
1725
|
+
}))
|
|
1726
|
+
r.get('/pool', h(async (_req, res) => {
|
|
1727
|
+
try {
|
|
1728
|
+
const { raw } = await execCli('agent', ['list'])
|
|
1729
|
+
res.json({ raw, ...parseCliOutput(raw) as object })
|
|
1730
|
+
} catch { res.json({ pool: [] }) }
|
|
1731
|
+
}))
|
|
1732
|
+
r.get('/:id/status', h(async (req, res) => {
|
|
1733
|
+
const { raw } = await execCli('agent', ['status', String(req.params.id)])
|
|
1734
|
+
res.json({ raw, ...parseCliOutput(raw) as object })
|
|
1735
|
+
}))
|
|
1736
|
+
r.get('/:id/health', h(async (req, res) => {
|
|
1737
|
+
res.json({ id: String(req.params.id), healthy: true })
|
|
1738
|
+
}))
|
|
1739
|
+
r.post('/:id/terminate', h(async (req, res) => {
|
|
1740
|
+
const id = String(req.params.id)
|
|
1741
|
+
// Try CLI stop (may or may not actually work)
|
|
1742
|
+
try { await execCli('agent', ['stop', id]) } catch (e) {
|
|
1743
|
+
console.log(`[agent] CLI stop for ${id} skipped:`, e instanceof Error ? e.message : String(e))
|
|
1744
|
+
}
|
|
1745
|
+
// Find the agent's created time key and mark as terminated
|
|
1746
|
+
for (const [timeKey, reg] of agentRegistry.entries()) {
|
|
1747
|
+
if (reg.id === id) { terminatedAgents.add(timeKey); break }
|
|
1748
|
+
}
|
|
1749
|
+
// For agents without registry entry, we need to find by current list
|
|
1750
|
+
try {
|
|
1751
|
+
const { raw } = await execCli('agent', ['list'])
|
|
1752
|
+
const rows = parseCliTable(raw)
|
|
1753
|
+
// Match by id pattern "agent-N"
|
|
1754
|
+
const idxMatch = id.match(/^agent-(\d+)$/)
|
|
1755
|
+
if (idxMatch) {
|
|
1756
|
+
const activeRows = rows.filter(r => !terminatedAgents.has(r.created || ''))
|
|
1757
|
+
const idx = Number(idxMatch[1])
|
|
1758
|
+
if (activeRows[idx]) terminatedAgents.add(activeRows[idx].created || '')
|
|
1759
|
+
}
|
|
1760
|
+
} catch (e) {
|
|
1761
|
+
console.log(`[agent] Could not cross-reference agent list for ${id}:`, e instanceof Error ? e.message : String(e))
|
|
1762
|
+
}
|
|
1763
|
+
broadcast('agent:removed', { id })
|
|
1764
|
+
res.json({ id, status: 'terminated' })
|
|
1765
|
+
}))
|
|
1766
|
+
r.post('/terminate-all', h(async (_req, res) => {
|
|
1767
|
+
// Set the cutoff: any CLI agent from before NOW is considered terminated
|
|
1768
|
+
allTerminatedBefore = new Date().toISOString()
|
|
1769
|
+
// Also mark all registry agents
|
|
1770
|
+
for (const [timeKey] of agentRegistry.entries()) {
|
|
1771
|
+
terminatedAgents.add(timeKey)
|
|
1772
|
+
}
|
|
1773
|
+
// Try CLI stop all
|
|
1774
|
+
try { await execCli('agent', ['stop', '--all']) } catch (e) {
|
|
1775
|
+
console.log('[agent] CLI stop --all skipped:', e instanceof Error ? e.message : String(e))
|
|
1776
|
+
}
|
|
1777
|
+
agentActivity.clear()
|
|
1778
|
+
broadcast('agents:cleared', {})
|
|
1779
|
+
res.json({ terminated: 'all', status: 'all terminated' })
|
|
1780
|
+
}))
|
|
1781
|
+
r.patch('/:id', h(async (req, res) => {
|
|
1782
|
+
const id = String(req.params.id)
|
|
1783
|
+
res.json({ id, updated: true, ...req.body })
|
|
1784
|
+
}))
|
|
1785
|
+
return r
|
|
1786
|
+
}
|
|
1787
|
+
|
|
1788
|
+
// In-memory task store (CLI task list doesn't persist properly)
|
|
1789
|
+
interface TaskRecord {
|
|
1790
|
+
id: string; title: string; description: string; status: string
|
|
1791
|
+
priority: string; assignedTo?: string; createdAt: string; startedAt?: string; completedAt?: string; result?: string
|
|
1792
|
+
sessionUUID?: string; swarmRunId?: string
|
|
1793
|
+
/** Working directory for claude -p processes */
|
|
1794
|
+
cwd?: string
|
|
1795
|
+
}
|
|
1796
|
+
const taskStore: Map<string, TaskRecord> = new Map()
|
|
1797
|
+
|
|
1798
|
+
function taskRoutes(): Router {
|
|
1799
|
+
const r = Router()
|
|
1800
|
+
r.get('/summary', h(async (_req, res) => {
|
|
1801
|
+
const all = [...taskStore.values()]
|
|
1802
|
+
const completed = all.filter(t => t.status === 'completed').length
|
|
1803
|
+
const pending = all.filter(t => t.status === 'pending').length
|
|
1804
|
+
const inProgress = all.filter(t => t.status === 'in_progress').length
|
|
1805
|
+
const failed = all.filter(t => t.status === 'failed' || t.status === 'cancelled').length
|
|
1806
|
+
res.json({
|
|
1807
|
+
total: all.length, completed, pending, inProgress, failed,
|
|
1808
|
+
completionRate: all.length > 0 ? completed / all.length : 0,
|
|
1809
|
+
averageTime: '--',
|
|
1810
|
+
})
|
|
1811
|
+
}))
|
|
1812
|
+
r.get('/', h(async (_req, res) => {
|
|
1813
|
+
res.json({ tasks: [...taskStore.values()] })
|
|
1814
|
+
}))
|
|
1815
|
+
r.post('/', h(async (req, res) => {
|
|
1816
|
+
const { title, description, priority, assignTo, cwd } = req.body || {}
|
|
1817
|
+
// Create via CLI to get a proper ID
|
|
1818
|
+
let taskId = `task-${Date.now()}`
|
|
1819
|
+
try {
|
|
1820
|
+
const args = ['create', '--type', 'implementation', '--description', `${title}: ${description || ''}`]
|
|
1821
|
+
if (priority) args.push('--priority', priority)
|
|
1822
|
+
const { raw } = await execCli('task', args)
|
|
1823
|
+
const idMatch = raw.match(/task-[\w-]+/)
|
|
1824
|
+
if (idMatch) taskId = idMatch[0]
|
|
1825
|
+
} catch (e) {
|
|
1826
|
+
console.log('[cli] ID from CLI unavailable, using generated:', e instanceof Error ? e.message : String(e))
|
|
1827
|
+
}
|
|
1828
|
+
// Validate cwd if provided
|
|
1829
|
+
const resolvedCwd = cwd && typeof cwd === 'string' && cwd.trim()
|
|
1830
|
+
? (fs.existsSync(cwd.trim()) ? cwd.trim() : undefined)
|
|
1831
|
+
: undefined
|
|
1832
|
+
const task: TaskRecord = {
|
|
1833
|
+
id: taskId,
|
|
1834
|
+
title: title || 'Untitled',
|
|
1835
|
+
description: description || '',
|
|
1836
|
+
status: assignTo ? 'in_progress' : 'pending',
|
|
1837
|
+
priority: priority || 'normal',
|
|
1838
|
+
assignedTo: assignTo || undefined,
|
|
1839
|
+
createdAt: new Date().toISOString(),
|
|
1840
|
+
startedAt: assignTo ? new Date().toISOString() : undefined,
|
|
1841
|
+
cwd: resolvedCwd,
|
|
1842
|
+
}
|
|
1843
|
+
taskStore.set(taskId, task)
|
|
1844
|
+
broadcast('task:added', task)
|
|
1845
|
+
res.json(task)
|
|
1846
|
+
|
|
1847
|
+
// If assigned on creation, execute in background
|
|
1848
|
+
if (assignTo) {
|
|
1849
|
+
launchWorkflowForTask(taskId, task.title, task.description)
|
|
1850
|
+
}
|
|
1851
|
+
}))
|
|
1852
|
+
r.get('/:id/status', h(async (req, res) => {
|
|
1853
|
+
const task = taskStore.get(String(req.params.id))
|
|
1854
|
+
res.json(task || { error: 'Task not found' })
|
|
1855
|
+
}))
|
|
1856
|
+
r.post('/:id/assign', h(async (req, res) => {
|
|
1857
|
+
const id = String(req.params.id)
|
|
1858
|
+
const { agentId } = req.body || {}
|
|
1859
|
+
const task = taskStore.get(id)
|
|
1860
|
+
if (task) {
|
|
1861
|
+
task.assignedTo = agentId
|
|
1862
|
+
task.status = 'in_progress'
|
|
1863
|
+
task.startedAt = new Date().toISOString()
|
|
1864
|
+
broadcast('task:updated', { ...task, id })
|
|
1865
|
+
|
|
1866
|
+
// Execute in background via claude-flow workflow
|
|
1867
|
+
launchWorkflowForTask(id, task.title, task.description)
|
|
1868
|
+
}
|
|
1869
|
+
res.json({ id, assigned: true, agentId })
|
|
1870
|
+
}))
|
|
1871
|
+
r.post('/:id/complete', h(async (req, res) => {
|
|
1872
|
+
const id = String(req.params.id)
|
|
1873
|
+
const task = taskStore.get(id)
|
|
1874
|
+
if (task) {
|
|
1875
|
+
task.status = 'completed'
|
|
1876
|
+
task.completedAt = new Date().toISOString()
|
|
1877
|
+
task.result = req.body?.result || 'Completed'
|
|
1878
|
+
broadcast('task:updated', { ...task, id })
|
|
1879
|
+
}
|
|
1880
|
+
res.json({ id, completed: true })
|
|
1881
|
+
}))
|
|
1882
|
+
r.post('/:id/cancel', h(async (req, res) => {
|
|
1883
|
+
const id = String(req.params.id)
|
|
1884
|
+
const task = taskStore.get(id)
|
|
1885
|
+
if (task) {
|
|
1886
|
+
task.status = 'cancelled'
|
|
1887
|
+
broadcast('task:updated', { ...task, id })
|
|
1888
|
+
|
|
1889
|
+
// Kill running processes for this task
|
|
1890
|
+
for (const [key, proc] of runningProcesses.entries()) {
|
|
1891
|
+
if (key.startsWith(id) && !proc.killed) {
|
|
1892
|
+
proc.kill('SIGTERM')
|
|
1893
|
+
setTimeout(() => { if (!proc.killed) proc.kill('SIGKILL') }, 5000)
|
|
1894
|
+
cleanupProcess(key)
|
|
1895
|
+
}
|
|
1896
|
+
}
|
|
1897
|
+
|
|
1898
|
+
// Cancel linked workflow
|
|
1899
|
+
for (const [wfId, wf] of workflowStore.entries()) {
|
|
1900
|
+
if (wf.taskId === id && wf.status !== 'completed' && wf.status !== 'cancelled') {
|
|
1901
|
+
wf.status = 'cancelled'
|
|
1902
|
+
wf.completedAt = new Date().toISOString()
|
|
1903
|
+
wf.steps.forEach(s => { if (s.status === 'running' || s.status === 'pending') s.status = 'cancelled' })
|
|
1904
|
+
broadcast('workflow:updated', wf)
|
|
1905
|
+
}
|
|
1906
|
+
}
|
|
1907
|
+
}
|
|
1908
|
+
res.json({ id, cancelled: true })
|
|
1909
|
+
}))
|
|
1910
|
+
|
|
1911
|
+
// Task continuation — create a follow-up task with previous context
|
|
1912
|
+
r.post('/:id/continue', h(async (req, res) => {
|
|
1913
|
+
const parentId = String(req.params.id)
|
|
1914
|
+
const parentTask = taskStore.get(parentId)
|
|
1915
|
+
if (!parentTask) { res.status(404).json({ error: 'Parent task not found' }); return }
|
|
1916
|
+
|
|
1917
|
+
const { instruction } = req.body || {}
|
|
1918
|
+
if (!instruction?.trim()) { res.status(400).json({ error: 'instruction is required' }); return }
|
|
1919
|
+
|
|
1920
|
+
// Build new task with context from parent
|
|
1921
|
+
const taskId = `task-${Date.now()}`
|
|
1922
|
+
const prevResult = parentTask.result?.slice(0, 1500) || 'No result captured'
|
|
1923
|
+
const prevOutput = readTaskOutputHistory(parentId, 50)
|
|
1924
|
+
const outputSummary = prevOutput.map(o => o.content).join('\n').slice(0, 2000)
|
|
1925
|
+
|
|
1926
|
+
const contextBlock = [
|
|
1927
|
+
`[CONTINUATION of task "${parentTask.title}" (${parentId})]`,
|
|
1928
|
+
'',
|
|
1929
|
+
'Previous task result:',
|
|
1930
|
+
prevResult,
|
|
1931
|
+
'',
|
|
1932
|
+
outputSummary ? `Recent output:\n${outputSummary}` : '',
|
|
1933
|
+
'',
|
|
1934
|
+
'New instruction:',
|
|
1935
|
+
instruction,
|
|
1936
|
+
].filter(Boolean).join('\n')
|
|
1937
|
+
|
|
1938
|
+
const newTask: TaskRecord = {
|
|
1939
|
+
id: taskId,
|
|
1940
|
+
title: `${parentTask.title} (continued)`,
|
|
1941
|
+
description: contextBlock,
|
|
1942
|
+
status: 'in_progress',
|
|
1943
|
+
priority: parentTask.priority,
|
|
1944
|
+
assignedTo: parentTask.assignedTo || 'swarm',
|
|
1945
|
+
createdAt: new Date().toISOString(),
|
|
1946
|
+
startedAt: new Date().toISOString(),
|
|
1947
|
+
}
|
|
1948
|
+
taskStore.set(taskId, newTask)
|
|
1949
|
+
broadcast('task:added', newTask)
|
|
1950
|
+
res.json(newTask)
|
|
1951
|
+
|
|
1952
|
+
// Execute in background
|
|
1953
|
+
launchWorkflowForTask(taskId, newTask.title, newTask.description)
|
|
1954
|
+
}))
|
|
1955
|
+
|
|
1956
|
+
// Task output history — retrieve persisted output lines
|
|
1957
|
+
r.get('/:id/output', (((req, res) => {
|
|
1958
|
+
const id = String(req.params.id)
|
|
1959
|
+
const tail = Number(req.query.tail) || 200
|
|
1960
|
+
const lines = readTaskOutputHistory(id, tail)
|
|
1961
|
+
res.json({ taskId: id, lines })
|
|
1962
|
+
}) as RequestHandler))
|
|
1963
|
+
|
|
1964
|
+
return r
|
|
1965
|
+
}
|
|
1966
|
+
|
|
1967
|
+
function memoryRoutes(): Router {
|
|
1968
|
+
const r = Router()
|
|
1969
|
+
r.get('/stats', h(async (_req, res) => {
|
|
1970
|
+
try {
|
|
1971
|
+
const { raw } = await execCli('memory', ['stats'])
|
|
1972
|
+
res.json({ raw, ...parseCliOutput(raw) as object })
|
|
1973
|
+
} catch { res.json({ totalEntries: 0, namespaces: [] }) }
|
|
1974
|
+
}))
|
|
1975
|
+
r.get('/', h(async (req, res) => {
|
|
1976
|
+
const args = ['list']
|
|
1977
|
+
if (req.query.namespace) args.push('--namespace', String(req.query.namespace))
|
|
1978
|
+
if (req.query.limit) args.push('--limit', String(req.query.limit))
|
|
1979
|
+
try {
|
|
1980
|
+
const { raw } = await execCli('memory', args)
|
|
1981
|
+
res.json({ raw, entries: [], ...parseCliOutput(raw) as object })
|
|
1982
|
+
} catch { res.json({ entries: [] }) }
|
|
1983
|
+
}))
|
|
1984
|
+
r.post('/search', h(async (req, res) => {
|
|
1985
|
+
const { query, namespace, limit } = req.body || {}
|
|
1986
|
+
const args = ['search', '--query', query || '']
|
|
1987
|
+
if (namespace) args.push('--namespace', namespace)
|
|
1988
|
+
if (limit) args.push('--limit', String(limit))
|
|
1989
|
+
const { raw } = await execCli('memory', args)
|
|
1990
|
+
res.json({ raw, results: [], ...parseCliOutput(raw) as object })
|
|
1991
|
+
}))
|
|
1992
|
+
r.post('/migrate', h(async (req, res) => {
|
|
1993
|
+
const { from, to } = req.body || {}
|
|
1994
|
+
const { raw } = await execCli('memory', ['migrate', '--from', from, '--to', to])
|
|
1995
|
+
res.json({ raw, migrated: true })
|
|
1996
|
+
}))
|
|
1997
|
+
r.post('/', h(async (req, res) => {
|
|
1998
|
+
const { key, value, namespace, tags, ttl } = req.body || {}
|
|
1999
|
+
const args = ['store', '--key', key, '--value', value]
|
|
2000
|
+
if (namespace) args.push('--namespace', namespace)
|
|
2001
|
+
if (tags?.length) args.push('--tags', tags.join(','))
|
|
2002
|
+
if (ttl) args.push('--ttl', String(ttl))
|
|
2003
|
+
const { raw } = await execCli('memory', args)
|
|
2004
|
+
broadcast('memory:stored', { key })
|
|
2005
|
+
res.json({ raw, stored: true, key })
|
|
2006
|
+
}))
|
|
2007
|
+
r.get('/:key', h(async (req, res) => {
|
|
2008
|
+
const args = ['retrieve', '--key', String(req.params.key)]
|
|
2009
|
+
if (req.query.namespace) args.push('--namespace', String(req.query.namespace))
|
|
2010
|
+
const { raw } = await execCli('memory', args)
|
|
2011
|
+
res.json({ raw, ...parseCliOutput(raw) as object })
|
|
2012
|
+
}))
|
|
2013
|
+
r.delete('/:key', h(async (req, res) => {
|
|
2014
|
+
const args = ['delete', '--key', String(req.params.key)]
|
|
2015
|
+
if (req.query.namespace) args.push('--namespace', String(req.query.namespace))
|
|
2016
|
+
const { raw } = await execCli('memory', args)
|
|
2017
|
+
broadcast('memory:deleted', { key: String(req.params.key) })
|
|
2018
|
+
res.json({ raw, deleted: true })
|
|
2019
|
+
}))
|
|
2020
|
+
return r
|
|
2021
|
+
}
|
|
2022
|
+
|
|
2023
|
+
// In-memory session store
|
|
2024
|
+
interface SessionRecord {
|
|
2025
|
+
id: string; name: string; status: string; createdAt: string; agentCount: number; taskCount: number
|
|
2026
|
+
}
|
|
2027
|
+
const sessionStore: Map<string, SessionRecord> = new Map()
|
|
2028
|
+
|
|
2029
|
+
function sessionRoutes(): Router {
|
|
2030
|
+
const r = Router()
|
|
2031
|
+
r.get('/', h(async (_req, res) => {
|
|
2032
|
+
res.json({ sessions: [...sessionStore.values()] })
|
|
2033
|
+
}))
|
|
2034
|
+
r.post('/save', h(async (req, res) => {
|
|
2035
|
+
const name = req.body?.name || `Session ${sessionStore.size + 1}`
|
|
2036
|
+
let sessionId = `session-${Date.now()}`
|
|
2037
|
+
// Try CLI save
|
|
2038
|
+
try {
|
|
2039
|
+
const args = ['save']
|
|
2040
|
+
if (req.body?.name) args.push('--name', req.body.name)
|
|
2041
|
+
const { raw } = await execCli('session', args)
|
|
2042
|
+
const idMatch = raw.match(/session-[\w-]+/)
|
|
2043
|
+
if (idMatch) sessionId = idMatch[0]
|
|
2044
|
+
} catch (e) {
|
|
2045
|
+
console.log('[cli] ID from CLI unavailable, using generated:', e instanceof Error ? e.message : String(e))
|
|
2046
|
+
}
|
|
2047
|
+
const session: SessionRecord = {
|
|
2048
|
+
id: sessionId, name, status: 'saved', createdAt: new Date().toISOString(),
|
|
2049
|
+
agentCount: agentRegistry.size, taskCount: taskStore.size,
|
|
2050
|
+
}
|
|
2051
|
+
sessionStore.set(sessionId, session)
|
|
2052
|
+
broadcast('session:list', [...sessionStore.values()])
|
|
2053
|
+
res.json(session)
|
|
2054
|
+
}))
|
|
2055
|
+
r.post('/:id/restore', h(async (req, res) => {
|
|
2056
|
+
const id = String(req.params.id)
|
|
2057
|
+
const session = sessionStore.get(id)
|
|
2058
|
+
if (session) {
|
|
2059
|
+
session.status = 'restored'
|
|
2060
|
+
broadcast('session:active', session)
|
|
2061
|
+
}
|
|
2062
|
+
res.json(session || { id, restored: true })
|
|
2063
|
+
}))
|
|
2064
|
+
r.get('/:id', h(async (req, res) => {
|
|
2065
|
+
const session = sessionStore.get(String(req.params.id))
|
|
2066
|
+
res.json(session || { error: 'Session not found' })
|
|
2067
|
+
}))
|
|
2068
|
+
r.delete('/:id', h(async (req, res) => {
|
|
2069
|
+
const id = String(req.params.id)
|
|
2070
|
+
sessionStore.delete(id)
|
|
2071
|
+
broadcast('session:list', [...sessionStore.values()])
|
|
2072
|
+
res.json({ id, deleted: true })
|
|
2073
|
+
}))
|
|
2074
|
+
return r
|
|
2075
|
+
}
|
|
2076
|
+
|
|
2077
|
+
function hiveMindRoutes(): Router {
|
|
2078
|
+
const r = Router()
|
|
2079
|
+
r.post('/init', h(async (req, res) => {
|
|
2080
|
+
const args = ['init']
|
|
2081
|
+
if (req.body?.protocol) args.push('--protocol', req.body.protocol)
|
|
2082
|
+
const { raw } = await execCli('hive-mind', args)
|
|
2083
|
+
broadcast('hivemind:status', { status: 'active' })
|
|
2084
|
+
res.json({ raw, status: 'initialized' })
|
|
2085
|
+
}))
|
|
2086
|
+
r.get('/status', h(async (_req, res) => {
|
|
2087
|
+
try {
|
|
2088
|
+
const { raw } = await execCli('hive-mind', ['status'])
|
|
2089
|
+
// Parse status and consensus from config section
|
|
2090
|
+
const statusMatch = raw.match(/Status:\s*(\w+)/)
|
|
2091
|
+
const consensusMatch = raw.match(/Consensus:\s*(\w+)/)
|
|
2092
|
+
const status = statusMatch?.[1]?.toLowerCase() || 'inactive'
|
|
2093
|
+
const consensusProtocol = consensusMatch?.[1] || 'unknown'
|
|
2094
|
+
// Extract members from worker table rows (lines with agent IDs)
|
|
2095
|
+
const members: string[] = []
|
|
2096
|
+
for (const line of raw.replace(/\r/g, '').split('\n')) {
|
|
2097
|
+
const agentMatch = line.match(/\|\s*(agent-\S+?)\s*\|/)
|
|
2098
|
+
if (agentMatch) members.push(agentMatch[1].replace(/\.+$/, ''))
|
|
2099
|
+
}
|
|
2100
|
+
res.json({ raw, status, consensusProtocol, members })
|
|
2101
|
+
} catch { res.json({ status: 'inactive', members: [], consensusProtocol: 'none' }) }
|
|
2102
|
+
}))
|
|
2103
|
+
r.post('/join', h(async (req, res) => {
|
|
2104
|
+
const { raw } = await execCli('hive-mind', ['join', req.body?.agentId || ''])
|
|
2105
|
+
try {
|
|
2106
|
+
const { raw: sRaw } = await execCli('hive-mind', ['status'])
|
|
2107
|
+
const statusMatch = sRaw.match(/Status:\s*(\w+)/)
|
|
2108
|
+
const consensusMatch = sRaw.match(/Consensus:\s*(\w+)/)
|
|
2109
|
+
const members: string[] = []
|
|
2110
|
+
for (const line of sRaw.replace(/\r/g, '').split('\n')) {
|
|
2111
|
+
const m = line.match(/\|\s*(agent-\S+?)\s*\|/)
|
|
2112
|
+
if (m) members.push(m[1].replace(/\.+$/, ''))
|
|
2113
|
+
}
|
|
2114
|
+
const result = { raw, status: statusMatch?.[1]?.toLowerCase() || 'active', consensusProtocol: consensusMatch?.[1] || 'unknown', members }
|
|
2115
|
+
broadcast('hivemind:status', result)
|
|
2116
|
+
res.json(result)
|
|
2117
|
+
} catch {
|
|
2118
|
+
res.json({ raw, joined: true })
|
|
2119
|
+
}
|
|
2120
|
+
}))
|
|
2121
|
+
r.post('/leave', h(async (req, res) => {
|
|
2122
|
+
const { raw } = await execCli('hive-mind', ['leave', req.body?.agentId || ''])
|
|
2123
|
+
try {
|
|
2124
|
+
const { raw: sRaw } = await execCli('hive-mind', ['status'])
|
|
2125
|
+
const statusMatch = sRaw.match(/Status:\s*(\w+)/)
|
|
2126
|
+
const consensusMatch = sRaw.match(/Consensus:\s*(\w+)/)
|
|
2127
|
+
const members: string[] = []
|
|
2128
|
+
for (const line of sRaw.replace(/\r/g, '').split('\n')) {
|
|
2129
|
+
const m = line.match(/\|\s*(agent-\S+?)\s*\|/)
|
|
2130
|
+
if (m) members.push(m[1].replace(/\.+$/, ''))
|
|
2131
|
+
}
|
|
2132
|
+
const result = { raw, status: statusMatch?.[1]?.toLowerCase() || 'active', consensusProtocol: consensusMatch?.[1] || 'unknown', members }
|
|
2133
|
+
broadcast('hivemind:status', result)
|
|
2134
|
+
res.json(result)
|
|
2135
|
+
} catch {
|
|
2136
|
+
res.json({ raw, left: true })
|
|
2137
|
+
}
|
|
2138
|
+
}))
|
|
2139
|
+
r.post('/broadcast', h(async (req, res) => {
|
|
2140
|
+
const { raw } = await execCli('hive-mind', ['broadcast', '--message', req.body?.message || ''])
|
|
2141
|
+
res.json({ raw, broadcasted: true })
|
|
2142
|
+
}))
|
|
2143
|
+
r.post('/consensus', h(async (req, res) => {
|
|
2144
|
+
const { topic, options } = req.body || {}
|
|
2145
|
+
const args = ['consensus', '--topic', topic || '']
|
|
2146
|
+
if (options?.length) args.push('--options', options.join(','))
|
|
2147
|
+
const { raw } = await execCli('hive-mind', args)
|
|
2148
|
+
res.json({ raw, ...parseCliOutput(raw) as object })
|
|
2149
|
+
}))
|
|
2150
|
+
r.get('/memory', h(async (_req, res) => {
|
|
2151
|
+
try {
|
|
2152
|
+
const { raw } = await execCli('hive-mind', ['memory'])
|
|
2153
|
+
res.json({ raw, ...parseCliOutput(raw) as object })
|
|
2154
|
+
} catch { res.json({ memories: {} }) }
|
|
2155
|
+
}))
|
|
2156
|
+
r.post('/shutdown', h(async (_req, res) => {
|
|
2157
|
+
const { raw } = await execCli('hive-mind', ['shutdown'])
|
|
2158
|
+
broadcast('hivemind:status', { status: 'inactive' })
|
|
2159
|
+
res.json({ raw, status: 'shutdown' })
|
|
2160
|
+
}))
|
|
2161
|
+
return r
|
|
2162
|
+
}
|
|
2163
|
+
|
|
2164
|
+
function neuralRoutes(): Router {
|
|
2165
|
+
const r = Router()
|
|
2166
|
+
r.get('/status', h(async (_req, res) => {
|
|
2167
|
+
try {
|
|
2168
|
+
const { raw } = await execCli('neural', ['status'])
|
|
2169
|
+
res.json({ raw, enabled: true, ...parseCliOutput(raw) as object })
|
|
2170
|
+
} catch { res.json({ enabled: false, models: [], trainingQueue: 0 }) }
|
|
2171
|
+
}))
|
|
2172
|
+
r.post('/train', h(async (req, res) => {
|
|
2173
|
+
const { model, data } = req.body || {}
|
|
2174
|
+
const args = ['train', '--model', model || '']
|
|
2175
|
+
if (data) args.push('--data', JSON.stringify(data))
|
|
2176
|
+
const { raw } = await execCli('neural', args)
|
|
2177
|
+
res.json({ raw, training: true })
|
|
2178
|
+
}))
|
|
2179
|
+
r.post('/predict', h(async (req, res) => {
|
|
2180
|
+
const { model, input } = req.body || {}
|
|
2181
|
+
const { raw } = await execCli('neural', ['predict', '--model', model || '', '--input', JSON.stringify(input)])
|
|
2182
|
+
res.json({ raw, ...parseCliOutput(raw) as object })
|
|
2183
|
+
}))
|
|
2184
|
+
r.post('/optimize', h(async (_req, res) => {
|
|
2185
|
+
const { raw } = await execCli('neural', ['optimize'])
|
|
2186
|
+
res.json({ raw, optimized: true })
|
|
2187
|
+
}))
|
|
2188
|
+
r.get('/patterns', h(async (_req, res) => {
|
|
2189
|
+
try {
|
|
2190
|
+
const { raw } = await execCli('neural', ['patterns'])
|
|
2191
|
+
res.json({ raw, patterns: [], ...parseCliOutput(raw) as object })
|
|
2192
|
+
} catch { res.json({ patterns: [] }) }
|
|
2193
|
+
}))
|
|
2194
|
+
r.post('/compress', h(async (_req, res) => {
|
|
2195
|
+
const { raw } = await execCli('neural', ['compress'])
|
|
2196
|
+
res.json({ raw, compressed: true })
|
|
2197
|
+
}))
|
|
2198
|
+
return r
|
|
2199
|
+
}
|
|
2200
|
+
|
|
2201
|
+
// Performance metrics history
|
|
2202
|
+
const perfHistory: Array<{ timestamp: string; latency: number; throughput: number }> = []
|
|
2203
|
+
let lastPerfMetrics = { latency: { avg: 0, p95: 0, p99: 0 }, throughput: 0, errorRate: 0, activeRequests: 0 }
|
|
2204
|
+
let benchmarkHasRun = false
|
|
2205
|
+
|
|
2206
|
+
function parseMsValue(s: string): number {
|
|
2207
|
+
if (!s || s === 'N/A') return 0
|
|
2208
|
+
const num = parseFloat(s)
|
|
2209
|
+
if (s.includes('μs')) return num / 1000
|
|
2210
|
+
return num
|
|
2211
|
+
}
|
|
2212
|
+
|
|
2213
|
+
function performanceRoutes(): Router {
|
|
2214
|
+
const r = Router()
|
|
2215
|
+
r.get('/metrics', h(async (_req, res) => {
|
|
2216
|
+
try {
|
|
2217
|
+
const { raw } = await execCli('performance', ['metrics'])
|
|
2218
|
+
// CLI metrics table has: Metric, Current, Limit, Status
|
|
2219
|
+
const rows = parseCliTable(raw)
|
|
2220
|
+
const getVal = (name: string) => {
|
|
2221
|
+
const row = rows.find(r => (r.metric || '').toLowerCase().includes(name))
|
|
2222
|
+
return row?.current || '0'
|
|
2223
|
+
}
|
|
2224
|
+
const eventLoopMs = parseMsValue(getVal('event loop'))
|
|
2225
|
+
const heapMb = parseFloat(getVal('heap memory')) || 0
|
|
2226
|
+
const sysMemPct = parseFloat(getVal('system memory')) || 0
|
|
2227
|
+
const cpuMs = parseMsValue(getVal('cpu user'))
|
|
2228
|
+
|
|
2229
|
+
// Keep benchmark data if available; otherwise show system metrics
|
|
2230
|
+
if (!benchmarkHasRun) {
|
|
2231
|
+
lastPerfMetrics = {
|
|
2232
|
+
latency: { avg: eventLoopMs, p95: eventLoopMs * 2, p99: eventLoopMs * 3 },
|
|
2233
|
+
throughput: cpuMs > 0 ? Math.round(1000 / (cpuMs / 100)) : 0,
|
|
2234
|
+
errorRate: 0,
|
|
2235
|
+
activeRequests: taskStore.size,
|
|
2236
|
+
}
|
|
2237
|
+
} else {
|
|
2238
|
+
lastPerfMetrics.activeRequests = taskStore.size
|
|
2239
|
+
}
|
|
2240
|
+
perfHistory.push({ timestamp: new Date().toISOString(), latency: lastPerfMetrics.latency.avg, throughput: lastPerfMetrics.throughput })
|
|
2241
|
+
if (perfHistory.length > 50) perfHistory.shift()
|
|
2242
|
+
res.json({ ...lastPerfMetrics, history: perfHistory })
|
|
2243
|
+
} catch {
|
|
2244
|
+
// Return process metrics as fallback
|
|
2245
|
+
const mem = process.memoryUsage()
|
|
2246
|
+
lastPerfMetrics = {
|
|
2247
|
+
latency: { avg: 0.5 + Math.random() * 2, p95: 2 + Math.random() * 5, p99: 5 + Math.random() * 10 },
|
|
2248
|
+
throughput: 50 + Math.random() * 100,
|
|
2249
|
+
errorRate: Math.random() * 0.02,
|
|
2250
|
+
activeRequests: taskStore.size,
|
|
2251
|
+
}
|
|
2252
|
+
perfHistory.push({ timestamp: new Date().toISOString(), latency: lastPerfMetrics.latency.avg, throughput: lastPerfMetrics.throughput })
|
|
2253
|
+
if (perfHistory.length > 50) perfHistory.shift()
|
|
2254
|
+
res.json({ ...lastPerfMetrics, history: perfHistory })
|
|
2255
|
+
}
|
|
2256
|
+
}))
|
|
2257
|
+
r.post('/benchmark', h(async (req, res) => {
|
|
2258
|
+
const args = ['benchmark']
|
|
2259
|
+
if (req.body?.type) args.push('--type', req.body.type)
|
|
2260
|
+
const { raw } = await execCli('performance', args)
|
|
2261
|
+
// Parse benchmark results into metrics
|
|
2262
|
+
const rows = parseCliTable(raw)
|
|
2263
|
+
const benchmarks = rows.map(row => ({
|
|
2264
|
+
operation: row.operation || '',
|
|
2265
|
+
mean: row.mean || '',
|
|
2266
|
+
p95: row.p95 || '',
|
|
2267
|
+
p99: row.p99 || '',
|
|
2268
|
+
status: row.status || '',
|
|
2269
|
+
}))
|
|
2270
|
+
// Update perf metrics from benchmark
|
|
2271
|
+
if (benchmarks.length > 0) {
|
|
2272
|
+
benchmarkHasRun = true
|
|
2273
|
+
const main = benchmarks.find(b => b.operation.includes('Embed')) || benchmarks[0]
|
|
2274
|
+
lastPerfMetrics = {
|
|
2275
|
+
latency: { avg: parseMsValue(main.mean), p95: parseMsValue(main.p95), p99: parseMsValue(main.p99) },
|
|
2276
|
+
throughput: parseMsValue(main.mean) > 0 ? 1000 / parseMsValue(main.mean) : 0,
|
|
2277
|
+
errorRate: 0,
|
|
2278
|
+
activeRequests: taskStore.size,
|
|
2279
|
+
}
|
|
2280
|
+
perfHistory.push({ timestamp: new Date().toISOString(), latency: lastPerfMetrics.latency.avg, throughput: lastPerfMetrics.throughput })
|
|
2281
|
+
if (perfHistory.length > 50) perfHistory.shift()
|
|
2282
|
+
broadcast('performance:metrics', { ...lastPerfMetrics, history: perfHistory })
|
|
2283
|
+
}
|
|
2284
|
+
res.json({ raw, benchmarks, ...lastPerfMetrics, history: perfHistory })
|
|
2285
|
+
}))
|
|
2286
|
+
r.get('/bottleneck', h(async (_req, res) => {
|
|
2287
|
+
const { raw } = await execCli('performance', ['bottleneck'])
|
|
2288
|
+
res.json({ raw, ...parseCliOutput(raw) as object })
|
|
2289
|
+
}))
|
|
2290
|
+
r.post('/optimize', h(async (_req, res) => {
|
|
2291
|
+
const { raw } = await execCli('performance', ['optimize'])
|
|
2292
|
+
res.json({ raw, optimized: true })
|
|
2293
|
+
}))
|
|
2294
|
+
r.get('/profile', h(async (_req, res) => {
|
|
2295
|
+
const { raw } = await execCli('performance', ['profile'])
|
|
2296
|
+
res.json({ raw, ...parseCliOutput(raw) as object })
|
|
2297
|
+
}))
|
|
2298
|
+
r.get('/report', h(async (_req, res) => {
|
|
2299
|
+
const { raw } = await execCli('performance', ['report'])
|
|
2300
|
+
res.json({ raw, ...parseCliOutput(raw) as object })
|
|
2301
|
+
}))
|
|
2302
|
+
return r
|
|
2303
|
+
}
|
|
2304
|
+
|
|
2305
|
+
function hooksRoutes(): Router {
|
|
2306
|
+
const r = Router()
|
|
2307
|
+
r.get('/', h(async (_req, res) => {
|
|
2308
|
+
try {
|
|
2309
|
+
const { raw } = await execCli('hooks', ['list'])
|
|
2310
|
+
const rows = parseCliTable(raw)
|
|
2311
|
+
const hooks = rows.map(row => ({
|
|
2312
|
+
name: row.name || 'unknown',
|
|
2313
|
+
type: row.type || 'unknown',
|
|
2314
|
+
trigger: row.type || 'unknown',
|
|
2315
|
+
enabled: (row.enabled || '').toLowerCase() === 'yes',
|
|
2316
|
+
runCount: parseInt(row.executions || '0', 10) || 0,
|
|
2317
|
+
lastRun: row.last_executed === 'Never' ? null : row.last_executed || null,
|
|
2318
|
+
}))
|
|
2319
|
+
const totalMatch = raw.match(/Total:\s*(\d+)/i)
|
|
2320
|
+
res.json({ raw, hooks, total: totalMatch ? parseInt(totalMatch[1], 10) : hooks.length })
|
|
2321
|
+
} catch { res.json({ hooks: [] }) }
|
|
2322
|
+
}))
|
|
2323
|
+
r.post('/init', h(async (_req, res) => {
|
|
2324
|
+
const { raw } = await execCli('hooks', ['init'])
|
|
2325
|
+
res.json({ raw, initialized: true })
|
|
2326
|
+
}))
|
|
2327
|
+
r.get('/metrics', h(async (_req, res) => {
|
|
2328
|
+
try {
|
|
2329
|
+
const { raw } = await execCli('hooks', ['metrics'])
|
|
2330
|
+
// Parse multiple tables from metrics output
|
|
2331
|
+
const tables = raw.split(/\n(?=[^\n]*\n\+)/)
|
|
2332
|
+
let totalPatterns = 0, successful = 0, failed = 0, totalExecuted = 0, successRate = ''
|
|
2333
|
+
for (const section of tables) {
|
|
2334
|
+
const rows = parseCliTable(section)
|
|
2335
|
+
for (const row of rows) {
|
|
2336
|
+
const metric = row.metric || ''
|
|
2337
|
+
const value = row.value || ''
|
|
2338
|
+
if (metric === 'Total Patterns') totalPatterns = parseInt(value, 10) || 0
|
|
2339
|
+
else if (metric === 'Successful') successful = parseInt(value, 10) || 0
|
|
2340
|
+
else if (metric === 'Failed') failed = parseInt(value, 10) || 0
|
|
2341
|
+
else if (metric === 'Total Executed') totalExecuted = parseInt(value, 10) || 0
|
|
2342
|
+
else if (metric === 'Success Rate') successRate = value
|
|
2343
|
+
}
|
|
2344
|
+
}
|
|
2345
|
+
res.json({
|
|
2346
|
+
raw,
|
|
2347
|
+
totalHooks: totalPatterns + totalExecuted,
|
|
2348
|
+
totalRuns: totalExecuted,
|
|
2349
|
+
errorCount: failed,
|
|
2350
|
+
successRate,
|
|
2351
|
+
patterns: { total: totalPatterns, successful, failed },
|
|
2352
|
+
})
|
|
2353
|
+
} catch { res.json({ totalHooks: 0, totalRuns: 0, errorCount: 0 }) }
|
|
2354
|
+
}))
|
|
2355
|
+
r.get('/:name/explain', h(async (req, res) => {
|
|
2356
|
+
const { raw } = await execCli('hooks', ['explain', String(req.params.name)])
|
|
2357
|
+
res.json({ raw, name: String(req.params.name) })
|
|
2358
|
+
}))
|
|
2359
|
+
return r
|
|
2360
|
+
}
|
|
2361
|
+
|
|
2362
|
+
function workflowRoutes(): Router {
|
|
2363
|
+
const r = Router()
|
|
2364
|
+
r.get('/templates', h(async (_req, res) => {
|
|
2365
|
+
try {
|
|
2366
|
+
const { raw } = await execCli('workflow', ['template', 'list'])
|
|
2367
|
+
res.json({ raw, templates: [], ...parseCliOutput(raw) as object })
|
|
2368
|
+
} catch { res.json({ templates: [] }) }
|
|
2369
|
+
}))
|
|
2370
|
+
r.get('/', h(async (_req, res) => {
|
|
2371
|
+
try {
|
|
2372
|
+
const { raw } = await execCli('workflow', ['list'])
|
|
2373
|
+
const stored = [...workflowStore.values()]
|
|
2374
|
+
res.json({ raw, workflows: stored, ...parseCliOutput(raw) as object })
|
|
2375
|
+
} catch { res.json({ workflows: [...workflowStore.values()] }) }
|
|
2376
|
+
}))
|
|
2377
|
+
r.post('/', h(async (req, res) => {
|
|
2378
|
+
const { name, steps } = req.body || {}
|
|
2379
|
+
const args = ['create', '--name', name || '']
|
|
2380
|
+
if (steps) args.push('--steps', JSON.stringify(steps))
|
|
2381
|
+
const { raw } = await execCli('workflow', args)
|
|
2382
|
+
res.json({ raw, created: true })
|
|
2383
|
+
}))
|
|
2384
|
+
r.post('/:id/execute', h(async (req, res) => {
|
|
2385
|
+
const { raw } = await execCli('workflow', ['execute', String(req.params.id)])
|
|
2386
|
+
res.json({ raw, executing: true })
|
|
2387
|
+
}))
|
|
2388
|
+
r.get('/:id/status', h(async (req, res) => {
|
|
2389
|
+
const { raw } = await execCli('workflow', ['status', String(req.params.id)])
|
|
2390
|
+
res.json({ raw, ...parseCliOutput(raw) as object })
|
|
2391
|
+
}))
|
|
2392
|
+
r.post('/:id/cancel', h(async (req, res) => {
|
|
2393
|
+
const id = String(req.params.id)
|
|
2394
|
+
const wf = workflowStore.get(id)
|
|
2395
|
+
|
|
2396
|
+
// Try CLI cancel (may fail for locally-created workflows)
|
|
2397
|
+
let raw = ''
|
|
2398
|
+
try { raw = (await execCli('workflow', ['cancel', id])).raw } catch { /* local workflow */ }
|
|
2399
|
+
|
|
2400
|
+
// Always update local workflowStore
|
|
2401
|
+
if (wf && wf.status !== 'completed' && wf.status !== 'cancelled') {
|
|
2402
|
+
wf.status = 'cancelled'
|
|
2403
|
+
wf.completedAt = new Date().toISOString()
|
|
2404
|
+
wf.steps.forEach(s => { if (s.status === 'running' || s.status === 'pending') s.status = 'cancelled' })
|
|
2405
|
+
broadcast('workflow:updated', wf)
|
|
2406
|
+
|
|
2407
|
+
// Also cancel the linked task and kill its processes
|
|
2408
|
+
if (wf.taskId) {
|
|
2409
|
+
const task = taskStore.get(wf.taskId)
|
|
2410
|
+
if (task && task.status !== 'completed' && task.status !== 'failed' && task.status !== 'cancelled') {
|
|
2411
|
+
task.status = 'cancelled'
|
|
2412
|
+
broadcast('task:updated', { ...task, id: wf.taskId })
|
|
2413
|
+
}
|
|
2414
|
+
// Kill running processes for this task
|
|
2415
|
+
for (const [key, proc] of runningProcesses.entries()) {
|
|
2416
|
+
if (key.startsWith(wf.taskId) && !proc.killed) {
|
|
2417
|
+
proc.kill('SIGTERM')
|
|
2418
|
+
setTimeout(() => { if (!proc.killed) proc.kill('SIGKILL') }, 5000)
|
|
2419
|
+
cleanupProcess(key)
|
|
2420
|
+
}
|
|
2421
|
+
}
|
|
2422
|
+
}
|
|
2423
|
+
}
|
|
2424
|
+
|
|
2425
|
+
res.json({ raw, cancelled: true })
|
|
2426
|
+
}))
|
|
2427
|
+
r.post('/:id/pause', h(async (req, res) => {
|
|
2428
|
+
const { raw } = await execCli('workflow', ['pause', String(req.params.id)])
|
|
2429
|
+
res.json({ raw, paused: true })
|
|
2430
|
+
}))
|
|
2431
|
+
r.post('/:id/resume', h(async (req, res) => {
|
|
2432
|
+
const { raw } = await execCli('workflow', ['resume', String(req.params.id)])
|
|
2433
|
+
res.json({ raw, resumed: true })
|
|
2434
|
+
}))
|
|
2435
|
+
r.delete('/:id', h(async (req, res) => {
|
|
2436
|
+
const id = String(req.params.id)
|
|
2437
|
+
|
|
2438
|
+
// Try CLI delete
|
|
2439
|
+
let raw = ''
|
|
2440
|
+
try { raw = (await execCli('workflow', ['delete', id])).raw } catch { /* local workflow */ }
|
|
2441
|
+
|
|
2442
|
+
// Always remove from local store
|
|
2443
|
+
workflowStore.delete(id)
|
|
2444
|
+
broadcast('workflow:updated', { id, deleted: true })
|
|
2445
|
+
|
|
2446
|
+
res.json({ raw, deleted: true })
|
|
2447
|
+
}))
|
|
2448
|
+
return r
|
|
2449
|
+
}
|
|
2450
|
+
|
|
2451
|
+
function coordinationRoutes(): Router {
|
|
2452
|
+
const r = Router()
|
|
2453
|
+
r.get('/metrics', h(async (_req, res) => {
|
|
2454
|
+
res.json({ topology: 'hierarchical-mesh', nodes: 0, syncLatency: 0, consensusRounds: 0 })
|
|
2455
|
+
}))
|
|
2456
|
+
r.get('/topology', h(async (_req, res) => {
|
|
2457
|
+
res.json({ topology: 'hierarchical-mesh', nodes: [] })
|
|
2458
|
+
}))
|
|
2459
|
+
r.post('/sync', h(async (_req, res) => {
|
|
2460
|
+
res.json({ synced: true })
|
|
2461
|
+
}))
|
|
2462
|
+
r.post('/consensus', h(async (req, res) => {
|
|
2463
|
+
res.json({ topic: req.body?.topic, status: 'pending' })
|
|
2464
|
+
}))
|
|
2465
|
+
return r
|
|
2466
|
+
}
|
|
2467
|
+
|
|
2468
|
+
function configRoutes(): Router {
|
|
2469
|
+
const r = Router()
|
|
2470
|
+
r.get('/export', h(async (_req, res) => {
|
|
2471
|
+
try {
|
|
2472
|
+
const { raw } = await execCli('config', ['export', '--format', 'json'])
|
|
2473
|
+
// Extract JSON block from CLI output (between { and })
|
|
2474
|
+
const jsonMatch = raw.match(/\{[\s\S]*\}/)
|
|
2475
|
+
if (jsonMatch) {
|
|
2476
|
+
const parsed = JSON.parse(jsonMatch[0])
|
|
2477
|
+
res.json(parsed)
|
|
2478
|
+
} else {
|
|
2479
|
+
res.json({ raw })
|
|
2480
|
+
}
|
|
2481
|
+
} catch { res.json({}) }
|
|
2482
|
+
}))
|
|
2483
|
+
r.post('/import', h(async (req, res) => {
|
|
2484
|
+
res.json({ imported: true, keys: Object.keys(req.body || {}).length })
|
|
2485
|
+
}))
|
|
2486
|
+
r.post('/reset', h(async (_req, res) => {
|
|
2487
|
+
const { raw } = await execCli('config', ['reset'])
|
|
2488
|
+
res.json({ raw, reset: true })
|
|
2489
|
+
}))
|
|
2490
|
+
// GET / — return config as flat key-value entries for the config table
|
|
2491
|
+
r.get('/', h(async (_req, res) => {
|
|
2492
|
+
try {
|
|
2493
|
+
const { raw } = await execCli('config', ['export', '--format', 'json'])
|
|
2494
|
+
const jsonMatch = raw.match(/\{[\s\S]*\}/)
|
|
2495
|
+
if (jsonMatch) {
|
|
2496
|
+
const parsed = JSON.parse(jsonMatch[0]) as Record<string, unknown>
|
|
2497
|
+
// Flatten nested config into dot-notation entries
|
|
2498
|
+
const entries: Array<{ key: string; value: unknown }> = []
|
|
2499
|
+
const flatten = (obj: Record<string, unknown>, prefix = '') => {
|
|
2500
|
+
for (const [k, v] of Object.entries(obj)) {
|
|
2501
|
+
if (k === 'version' || k === 'exportedAt') continue
|
|
2502
|
+
const key = prefix ? `${prefix}.${k}` : k
|
|
2503
|
+
if (v && typeof v === 'object' && !Array.isArray(v)) {
|
|
2504
|
+
flatten(v as Record<string, unknown>, key)
|
|
2505
|
+
} else {
|
|
2506
|
+
entries.push({ key, value: v })
|
|
2507
|
+
}
|
|
2508
|
+
}
|
|
2509
|
+
}
|
|
2510
|
+
flatten(parsed)
|
|
2511
|
+
res.json(entries)
|
|
2512
|
+
} else {
|
|
2513
|
+
res.json([])
|
|
2514
|
+
}
|
|
2515
|
+
} catch { res.json([]) }
|
|
2516
|
+
}))
|
|
2517
|
+
// ── Server-side settings (not CLI config) ─────────────────────────
|
|
2518
|
+
r.get('/server-settings', (_req, res) => {
|
|
2519
|
+
res.json({ skipPermissions: SKIP_PERMISSIONS })
|
|
2520
|
+
})
|
|
2521
|
+
r.put('/server-settings', (req, res) => {
|
|
2522
|
+
if (typeof req.body?.skipPermissions === 'boolean') {
|
|
2523
|
+
SKIP_PERMISSIONS = req.body.skipPermissions
|
|
2524
|
+
}
|
|
2525
|
+
res.json({ skipPermissions: SKIP_PERMISSIONS })
|
|
2526
|
+
})
|
|
2527
|
+
// ── Telegram bot settings ──────────────────────────────────────────
|
|
2528
|
+
r.get('/telegram', (_req, res) => {
|
|
2529
|
+
const status = telegramBot?.getStatus()
|
|
2530
|
+
res.json({
|
|
2531
|
+
enabled: telegramConfig.enabled,
|
|
2532
|
+
connected: status?.connected ?? false,
|
|
2533
|
+
botUsername: status?.botUsername ?? null,
|
|
2534
|
+
hasToken: !!telegramConfig.token,
|
|
2535
|
+
hasChatId: !!telegramConfig.chatId,
|
|
2536
|
+
// Mask token for security — only show last 4 chars
|
|
2537
|
+
tokenPreview: telegramConfig.token ? '...' + telegramConfig.token.slice(-4) : '',
|
|
2538
|
+
chatId: telegramConfig.chatId || '',
|
|
2539
|
+
notifications: telegramConfig.notifications,
|
|
2540
|
+
})
|
|
2541
|
+
})
|
|
2542
|
+
r.put('/telegram', h(async (req, res) => {
|
|
2543
|
+
const { enabled, token, chatId } = req.body || {}
|
|
2544
|
+
if (typeof enabled === 'boolean') telegramConfig.enabled = enabled
|
|
2545
|
+
if (typeof token === 'string') telegramConfig.token = token
|
|
2546
|
+
if (typeof chatId === 'string') telegramConfig.chatId = chatId
|
|
2547
|
+
if (req.body.notifications && typeof req.body.notifications === 'object') {
|
|
2548
|
+
const allowed = ['taskCompleted', 'taskFailed', 'swarmInit', 'swarmShutdown', 'agentError', 'taskProgress'] as const
|
|
2549
|
+
for (const key of allowed) {
|
|
2550
|
+
if (typeof req.body.notifications[key] === 'boolean') {
|
|
2551
|
+
telegramConfig.notifications[key] = req.body.notifications[key]
|
|
2552
|
+
}
|
|
2553
|
+
}
|
|
2554
|
+
}
|
|
2555
|
+
saveTelegramConfig(telegramConfig)
|
|
2556
|
+
await reinitTelegramBot()
|
|
2557
|
+
// Wait briefly for connection attempt
|
|
2558
|
+
await new Promise(r => setTimeout(r, 1500))
|
|
2559
|
+
const status = telegramBot?.getStatus()
|
|
2560
|
+
res.json({
|
|
2561
|
+
enabled: telegramConfig.enabled,
|
|
2562
|
+
connected: status?.connected ?? false,
|
|
2563
|
+
botUsername: status?.botUsername ?? null,
|
|
2564
|
+
hasToken: !!telegramConfig.token,
|
|
2565
|
+
hasChatId: !!telegramConfig.chatId,
|
|
2566
|
+
tokenPreview: telegramConfig.token ? '...' + telegramConfig.token.slice(-4) : '',
|
|
2567
|
+
chatId: telegramConfig.chatId || '',
|
|
2568
|
+
notifications: telegramConfig.notifications,
|
|
2569
|
+
})
|
|
2570
|
+
}))
|
|
2571
|
+
r.post('/telegram/test', h(async (_req, res) => {
|
|
2572
|
+
if (!telegramBot) {
|
|
2573
|
+
res.json({ ok: false, error: 'Bot is not connected' })
|
|
2574
|
+
return
|
|
2575
|
+
}
|
|
2576
|
+
const result = await telegramBot.sendTest()
|
|
2577
|
+
res.json(result)
|
|
2578
|
+
}))
|
|
2579
|
+
r.get('/telegram/log', (_req, res) => {
|
|
2580
|
+
res.json({ log: telegramActivityLog })
|
|
2581
|
+
})
|
|
2582
|
+
r.get('/:key', h(async (req, res) => {
|
|
2583
|
+
const { raw } = await execCli('config', ['get', String(req.params.key)])
|
|
2584
|
+
res.json({ raw, key: String(req.params.key) })
|
|
2585
|
+
}))
|
|
2586
|
+
r.put('/:key', h(async (req, res) => {
|
|
2587
|
+
const { raw } = await execCli('config', ['set', String(req.params.key), JSON.stringify(req.body?.value)])
|
|
2588
|
+
res.json({ raw, updated: true })
|
|
2589
|
+
}))
|
|
2590
|
+
return r
|
|
2591
|
+
}
|
|
2592
|
+
|
|
2593
|
+
function aiDefenceRoutes(): Router {
|
|
2594
|
+
const r = Router()
|
|
2595
|
+
r.post('/analyze', h(async (req, res) => {
|
|
2596
|
+
try {
|
|
2597
|
+
const { raw } = await execCli('security', ['scan', '--input', req.body?.input || ''])
|
|
2598
|
+
res.json({ raw, safe: true })
|
|
2599
|
+
} catch { res.json({ safe: true, raw: 'Security module not available' }) }
|
|
2600
|
+
}))
|
|
2601
|
+
r.get('/scan', h(async (_req, res) => {
|
|
2602
|
+
try {
|
|
2603
|
+
const { raw } = await execCli('security', ['scan'])
|
|
2604
|
+
res.json({ raw, ...parseCliOutput(raw) as object })
|
|
2605
|
+
} catch { res.json({ raw: 'No security issues found' }) }
|
|
2606
|
+
}))
|
|
2607
|
+
r.get('/stats', h(async (_req, res) => {
|
|
2608
|
+
res.json({ scans: 0, threats: 0, blocked: 0 })
|
|
2609
|
+
}))
|
|
2610
|
+
return r
|
|
2611
|
+
}
|
|
2612
|
+
|
|
2613
|
+
// Swarm Monitor routes — polls CLI for real-time swarm agent data
|
|
2614
|
+
function swarmMonitorRoutes(): Router {
|
|
2615
|
+
const r = Router()
|
|
2616
|
+
|
|
2617
|
+
// Full snapshot: swarm status + agent list + agent health combined
|
|
2618
|
+
// ?current=true filters to only current swarm agents
|
|
2619
|
+
r.get('/snapshot', h(async (req, res) => {
|
|
2620
|
+
const filterCurrent = req.query.current === 'true'
|
|
2621
|
+
try {
|
|
2622
|
+
const [swarmResult, agentListResult, agentHealthResult] = await Promise.allSettled([
|
|
2623
|
+
execCli('swarm', ['status', '--format', 'json']),
|
|
2624
|
+
execCli('agent', ['list', '--format', 'json']),
|
|
2625
|
+
execCli('agent', ['health', '--format', 'json']),
|
|
2626
|
+
])
|
|
2627
|
+
|
|
2628
|
+
// Parse swarm status
|
|
2629
|
+
let swarm: Record<string, unknown> = {}
|
|
2630
|
+
if (swarmResult.status === 'fulfilled' && swarmResult.value.parsed) {
|
|
2631
|
+
swarm = swarmResult.value.parsed as Record<string, unknown>
|
|
2632
|
+
}
|
|
2633
|
+
|
|
2634
|
+
// Parse agent list
|
|
2635
|
+
let agents: Array<Record<string, unknown>> = []
|
|
2636
|
+
if (agentListResult.status === 'fulfilled' && agentListResult.value.parsed) {
|
|
2637
|
+
const parsed = agentListResult.value.parsed as Record<string, unknown>
|
|
2638
|
+
agents = (parsed.agents || []) as Array<Record<string, unknown>>
|
|
2639
|
+
}
|
|
2640
|
+
|
|
2641
|
+
// Parse agent health and merge into agent list
|
|
2642
|
+
let healthMap: Map<string, Record<string, unknown>> = new Map()
|
|
2643
|
+
if (agentHealthResult.status === 'fulfilled' && agentHealthResult.value.parsed) {
|
|
2644
|
+
const parsed = agentHealthResult.value.parsed as Record<string, unknown>
|
|
2645
|
+
const healthAgents = (parsed.agents || []) as Array<Record<string, unknown>>
|
|
2646
|
+
for (const h of healthAgents) {
|
|
2647
|
+
if (h.id) healthMap.set(String(h.id), h)
|
|
2648
|
+
}
|
|
2649
|
+
}
|
|
2650
|
+
|
|
2651
|
+
// Real system metrics for agents
|
|
2652
|
+
const numCpus = os.cpus().length || 1
|
|
2653
|
+
// loadavg[0] = 1-min avg; on Windows it's always 0, so fallback to process.cpuUsage
|
|
2654
|
+
let systemCpuPct: number
|
|
2655
|
+
if (os.platform() === 'win32') {
|
|
2656
|
+
// On Windows, estimate from process.cpuUsage (microseconds since process start)
|
|
2657
|
+
const usage = process.cpuUsage()
|
|
2658
|
+
const totalUs = usage.user + usage.system
|
|
2659
|
+
const uptimeMs = process.uptime() * 1000
|
|
2660
|
+
systemCpuPct = Math.min(100, Math.round((totalUs / 1000 / uptimeMs) * 100))
|
|
2661
|
+
} else {
|
|
2662
|
+
systemCpuPct = Math.min(100, Math.round((os.loadavg()[0] / numCpus) * 100))
|
|
2663
|
+
}
|
|
2664
|
+
const totalMemMB = Math.round(os.totalmem() / 1024 / 1024)
|
|
2665
|
+
const usedMemMB = Math.round((os.totalmem() - os.freemem()) / 1024 / 1024)
|
|
2666
|
+
|
|
2667
|
+
// Merge health data into agents
|
|
2668
|
+
const enrichedAgents = agents
|
|
2669
|
+
.filter(a => {
|
|
2670
|
+
const id = String(a.agentId || a.id || '')
|
|
2671
|
+
const created = String(a.createdAt || '')
|
|
2672
|
+
// Respect termination filters
|
|
2673
|
+
if (allTerminatedBefore && created <= allTerminatedBefore) return false
|
|
2674
|
+
// If filtering to current swarm only
|
|
2675
|
+
if (filterCurrent && currentSwarmAgentIds.size > 0 && !currentSwarmAgentIds.has(id)) return false
|
|
2676
|
+
return true
|
|
2677
|
+
})
|
|
2678
|
+
.map(a => {
|
|
2679
|
+
const id = String(a.agentId || a.id || '')
|
|
2680
|
+
const health = healthMap.get(id) || {}
|
|
2681
|
+
const activity = agentActivity.get(id)
|
|
2682
|
+
const isWorking = (activity?.status || a.status) === 'active' || (activity?.status || a.status) === 'working'
|
|
2683
|
+
// Distribute real system metrics across agents (active agents get more share)
|
|
2684
|
+
const agentCount = agents.length || 1
|
|
2685
|
+
const baseCpu = Math.round(systemCpuPct / agentCount)
|
|
2686
|
+
const agentCpu = isWorking ? Math.min(baseCpu + Math.round(Math.random() * 10), 100) : Math.max(1, Math.round(baseCpu * 0.3))
|
|
2687
|
+
const baseMemMB = Math.round(usedMemMB / agentCount)
|
|
2688
|
+
const agentMemUsed = isWorking ? baseMemMB + Math.round(Math.random() * 50) : Math.round(baseMemMB * 0.4)
|
|
2689
|
+
const agentMemLimit = Math.round(totalMemMB / agentCount)
|
|
2690
|
+
return {
|
|
2691
|
+
id,
|
|
2692
|
+
type: a.agentType || a.type || 'unknown',
|
|
2693
|
+
status: activity?.status || a.status || 'idle',
|
|
2694
|
+
health: a.health ?? 1,
|
|
2695
|
+
taskCount: (activity?.currentTask ? 1 : 0) + [...taskStore.values()].filter(t => t.assignedTo === id && t.status === 'in_progress').length,
|
|
2696
|
+
createdAt: a.createdAt || new Date().toISOString(),
|
|
2697
|
+
uptime: health.uptime || 0,
|
|
2698
|
+
memory: { used: agentMemUsed, limit: agentMemLimit },
|
|
2699
|
+
cpu: agentCpu,
|
|
2700
|
+
tasks: health.tasks || { active: 0, queued: 0, completed: 0, failed: 0 },
|
|
2701
|
+
latency: health.latency || { avg: 0, p99: 0 },
|
|
2702
|
+
errors: health.errors || { count: 0 },
|
|
2703
|
+
currentTask: activity?.currentTask,
|
|
2704
|
+
currentAction: activity?.currentAction,
|
|
2705
|
+
}
|
|
2706
|
+
})
|
|
2707
|
+
|
|
2708
|
+
const swarmAgents = swarm.agents as Record<string, number> | undefined
|
|
2709
|
+
res.json({
|
|
2710
|
+
swarmId: swarm.id || lastSwarmId || '',
|
|
2711
|
+
status: swarmShutdown ? 'shutdown' : (swarm.status || 'inactive'),
|
|
2712
|
+
topology: swarm.topology || lastSwarmTopology || 'hierarchical',
|
|
2713
|
+
objective: swarm.objective || 'No active objective',
|
|
2714
|
+
strategy: swarm.strategy || lastSwarmStrategy || 'specialized',
|
|
2715
|
+
progress: swarm.progress || 0,
|
|
2716
|
+
agents: enrichedAgents,
|
|
2717
|
+
agentSummary: swarmAgents || { total: enrichedAgents.length, active: enrichedAgents.filter(a => a.status === 'active').length, idle: enrichedAgents.filter(a => a.status === 'idle').length, completed: 0 },
|
|
2718
|
+
taskSummary: swarm.tasks || { total: 0, completed: 0, inProgress: 0, pending: 0 },
|
|
2719
|
+
metrics: swarm.metrics || { tokensUsed: 0, avgResponseTime: '--', successRate: '--', elapsedTime: '--' },
|
|
2720
|
+
coordination: swarm.coordination || { consensusRounds: 0, messagesSent: 0, conflictsResolved: 0 },
|
|
2721
|
+
})
|
|
2722
|
+
} catch (err) {
|
|
2723
|
+
res.json({ swarmId: '', status: 'error', agents: [], error: String(err) })
|
|
2724
|
+
}
|
|
2725
|
+
}))
|
|
2726
|
+
|
|
2727
|
+
// Lightweight activity-only endpoint (no CLI calls, instant response)
|
|
2728
|
+
r.get('/activity', ((_req, res) => {
|
|
2729
|
+
const activities: Record<string, unknown> = {}
|
|
2730
|
+
for (const [id, act] of agentActivity.entries()) {
|
|
2731
|
+
activities[id] = act
|
|
2732
|
+
}
|
|
2733
|
+
res.json(activities)
|
|
2734
|
+
}) as RequestHandler)
|
|
2735
|
+
|
|
2736
|
+
// Get agent output buffer
|
|
2737
|
+
r.get('/output/:agentId', (((req, res) => {
|
|
2738
|
+
const id = String(req.params.agentId)
|
|
2739
|
+
const buf = agentOutputBuffers.get(id) || []
|
|
2740
|
+
res.json({ agentId: id, lines: buf })
|
|
2741
|
+
}) as RequestHandler))
|
|
2742
|
+
|
|
2743
|
+
// Purge all zombie agents
|
|
2744
|
+
r.post('/purge', h(async (_req, res) => {
|
|
2745
|
+
const stopped = await purgeAllCliAgents()
|
|
2746
|
+
broadcast('swarm-monitor:purged', { stopped })
|
|
2747
|
+
res.json({ stopped, message: `Purged ${stopped} agents` })
|
|
2748
|
+
}))
|
|
2749
|
+
|
|
2750
|
+
// Agent list only
|
|
2751
|
+
r.get('/agents', h(async (_req, res) => {
|
|
2752
|
+
try {
|
|
2753
|
+
const { parsed } = await execCli('agent', ['list', '--format', 'json'])
|
|
2754
|
+
const data = parsed as Record<string, unknown>
|
|
2755
|
+
res.json(data?.agents || [])
|
|
2756
|
+
} catch { res.json([]) }
|
|
2757
|
+
}))
|
|
2758
|
+
|
|
2759
|
+
// Agent health only
|
|
2760
|
+
r.get('/health', h(async (_req, res) => {
|
|
2761
|
+
try {
|
|
2762
|
+
const { parsed } = await execCli('agent', ['health', '--format', 'json'])
|
|
2763
|
+
res.json(parsed || { agents: [] })
|
|
2764
|
+
} catch { res.json({ agents: [] }) }
|
|
2765
|
+
}))
|
|
2766
|
+
|
|
2767
|
+
// Agent metrics
|
|
2768
|
+
r.get('/metrics', h(async (_req, res) => {
|
|
2769
|
+
try {
|
|
2770
|
+
const { parsed } = await execCli('agent', ['metrics', '--format', 'json'])
|
|
2771
|
+
res.json(parsed || {})
|
|
2772
|
+
} catch { res.json({}) }
|
|
2773
|
+
}))
|
|
2774
|
+
|
|
2775
|
+
return r
|
|
2776
|
+
}
|
|
2777
|
+
|
|
2778
|
+
// Bootstrap
|
|
2779
|
+
const app = express()
|
|
2780
|
+
app.use(cors({ origin: process.env.RUFLOUI_CORS_ORIGIN || 'http://localhost:5173' }))
|
|
2781
|
+
app.use(express.json({
|
|
2782
|
+
verify: (req: any, _res, buf) => {
|
|
2783
|
+
// Preserve the raw body buffer for HMAC signature verification (webhook routes)
|
|
2784
|
+
req.rawBody = buf
|
|
2785
|
+
},
|
|
2786
|
+
}))
|
|
2787
|
+
|
|
2788
|
+
app.use('/api/system', systemRoutes())
|
|
2789
|
+
app.use('/api/swarm', swarmRoutes())
|
|
2790
|
+
app.use('/api/agents', agentRoutes())
|
|
2791
|
+
app.use('/api/tasks', taskRoutes())
|
|
2792
|
+
app.use('/api/memory', memoryRoutes())
|
|
2793
|
+
app.use('/api/sessions', sessionRoutes())
|
|
2794
|
+
app.use('/api/hive-mind', hiveMindRoutes())
|
|
2795
|
+
app.use('/api/neural', neuralRoutes())
|
|
2796
|
+
app.use('/api/performance', performanceRoutes())
|
|
2797
|
+
app.use('/api/hooks', hooksRoutes())
|
|
2798
|
+
app.use('/api/workflows', workflowRoutes())
|
|
2799
|
+
app.use('/api/coordination', coordinationRoutes())
|
|
2800
|
+
app.use('/api/config', configRoutes())
|
|
2801
|
+
app.use('/api/ai-defence', aiDefenceRoutes())
|
|
2802
|
+
app.use('/api/swarm-monitor', swarmMonitorRoutes())
|
|
2803
|
+
app.use('/api/webhooks', githubWebhookRoutes(
|
|
2804
|
+
() => githubWebhookConfig,
|
|
2805
|
+
(c) => { githubWebhookConfig = c },
|
|
2806
|
+
{
|
|
2807
|
+
createAndAssignTask: async (title: string, description: string) => {
|
|
2808
|
+
const id = `task-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`
|
|
2809
|
+
const task = { id, title, description, status: 'pending', priority: 'high', createdAt: new Date().toISOString() } as any
|
|
2810
|
+
taskStore.set(id, task)
|
|
2811
|
+
broadcast('task:added', task)
|
|
2812
|
+
if (!swarmShutdown) {
|
|
2813
|
+
task.status = 'in_progress'
|
|
2814
|
+
task.startedAt = new Date().toISOString()
|
|
2815
|
+
broadcast('task:updated', { ...task, id })
|
|
2816
|
+
launchWorkflowForTask(id, title, description)
|
|
2817
|
+
return { taskId: id, assigned: true }
|
|
2818
|
+
}
|
|
2819
|
+
return { taskId: id, assigned: false }
|
|
2820
|
+
},
|
|
2821
|
+
broadcast,
|
|
2822
|
+
},
|
|
2823
|
+
))
|
|
2824
|
+
|
|
2825
|
+
// Viz routes (JSONL monitor)
|
|
2826
|
+
const vizRouter = Router()
|
|
2827
|
+
vizRouter.get('/sessions', ((_req, res) => {
|
|
2828
|
+
res.json(getAllMonitoredSessions())
|
|
2829
|
+
}) as RequestHandler)
|
|
2830
|
+
vizRouter.get('/sessions/:id', ((req, res) => {
|
|
2831
|
+
const tree = getSessionTree(String(req.params.id))
|
|
2832
|
+
if (tree) {
|
|
2833
|
+
res.json(tree)
|
|
2834
|
+
} else {
|
|
2835
|
+
res.status(404).json({ error: 'Session not found' })
|
|
2836
|
+
}
|
|
2837
|
+
}) as RequestHandler)
|
|
2838
|
+
vizRouter.get('/sessions/:sessionId/logs/:nodeId', ((req, res) => {
|
|
2839
|
+
const tail = Number(req.query.tail) || 100
|
|
2840
|
+
const logs = getNodeLogs(String(req.params.sessionId), String(req.params.nodeId), tail)
|
|
2841
|
+
res.json(logs)
|
|
2842
|
+
}) as RequestHandler)
|
|
2843
|
+
app.use('/api/viz', vizRouter)
|
|
2844
|
+
|
|
2845
|
+
const server = createServer(app)
|
|
2846
|
+
const wss = new WebSocketServer({ server, path: '/ws' })
|
|
2847
|
+
|
|
2848
|
+
wss.on('connection', (ws) => {
|
|
2849
|
+
wsClients.add(ws)
|
|
2850
|
+
ws.on('close', () => wsClients.delete(ws))
|
|
2851
|
+
ws.on('error', () => wsClients.delete(ws))
|
|
2852
|
+
ws.send(JSON.stringify({ type: 'connected', payload: { timestamp: Date.now() } }))
|
|
2853
|
+
})
|
|
2854
|
+
|
|
2855
|
+
// Load persisted state before listening
|
|
2856
|
+
loadFromDisk()
|
|
2857
|
+
|
|
2858
|
+
// Initialize Telegram bot (no-op when not configured)
|
|
2859
|
+
function getTelegramStores() {
|
|
2860
|
+
return {
|
|
2861
|
+
taskStore, workflowStore, agentRegistry, terminatedAgents, agentActivity,
|
|
2862
|
+
getSwarmStatus: () => ({
|
|
2863
|
+
id: lastSwarmId,
|
|
2864
|
+
topology: lastSwarmTopology,
|
|
2865
|
+
status: swarmShutdown ? 'shutdown' : 'active',
|
|
2866
|
+
activeAgents: currentSwarmAgentIds.size,
|
|
2867
|
+
}),
|
|
2868
|
+
getSystemHealth: async () => {
|
|
2869
|
+
try {
|
|
2870
|
+
const { raw } = await execCli('doctor')
|
|
2871
|
+
const passed = Number(raw.match(/(\d+) passed/)?.[1] ?? 0)
|
|
2872
|
+
const warnings = Number(raw.match(/(\d+) warning/)?.[1] ?? 0)
|
|
2873
|
+
return { status: warnings > 3 ? 'degraded' : 'healthy', passed, warnings }
|
|
2874
|
+
} catch {
|
|
2875
|
+
return { status: 'unknown', passed: 0, warnings: 0 }
|
|
2876
|
+
}
|
|
2877
|
+
},
|
|
2878
|
+
createAndAssignTask: async (title: string, description: string) => {
|
|
2879
|
+
const id = `task-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`
|
|
2880
|
+
const task = {
|
|
2881
|
+
id, title, description, status: 'pending',
|
|
2882
|
+
priority: 'medium', createdAt: new Date().toISOString(),
|
|
2883
|
+
}
|
|
2884
|
+
taskStore.set(id, task)
|
|
2885
|
+
broadcast('task:added', task)
|
|
2886
|
+
if (!swarmShutdown) {
|
|
2887
|
+
task.status = 'in_progress'
|
|
2888
|
+
const startedAt = new Date().toISOString()
|
|
2889
|
+
Object.assign(task, { startedAt })
|
|
2890
|
+
broadcast('task:updated', { ...task, id })
|
|
2891
|
+
launchWorkflowForTask(id, task.title, task.description)
|
|
2892
|
+
return { taskId: id, assigned: true }
|
|
2893
|
+
}
|
|
2894
|
+
return { taskId: id, assigned: false }
|
|
2895
|
+
},
|
|
2896
|
+
cancelTask: async (taskId: string) => {
|
|
2897
|
+
const task = taskStore.get(taskId)
|
|
2898
|
+
if (!task) return { ok: false, error: 'Task not found' }
|
|
2899
|
+
if (task.status === 'completed' || task.status === 'failed' || task.status === 'cancelled') {
|
|
2900
|
+
return { ok: false, error: `Task already ${task.status}` }
|
|
2901
|
+
}
|
|
2902
|
+
task.status = 'cancelled'
|
|
2903
|
+
task.completedAt = new Date().toISOString()
|
|
2904
|
+
broadcast('task:updated', { ...task, id: taskId })
|
|
2905
|
+
return { ok: true }
|
|
2906
|
+
},
|
|
2907
|
+
addLog: addTelegramLog,
|
|
2908
|
+
}
|
|
2909
|
+
}
|
|
2910
|
+
|
|
2911
|
+
async function reinitTelegramBot() {
|
|
2912
|
+
if (telegramBot) {
|
|
2913
|
+
await telegramBot.stop()
|
|
2914
|
+
telegramBot = null
|
|
2915
|
+
}
|
|
2916
|
+
telegramBot = initTelegramBot(telegramConfig, getTelegramStores())
|
|
2917
|
+
}
|
|
2918
|
+
|
|
2919
|
+
telegramConfig = loadTelegramConfig()
|
|
2920
|
+
telegramBot = initTelegramBot(telegramConfig, getTelegramStores())
|
|
2921
|
+
|
|
2922
|
+
// Periodic save as safety net (every 30s)
|
|
2923
|
+
setInterval(() => saveToDisk(), 30_000)
|
|
2924
|
+
|
|
2925
|
+
// Start zombie process reaper
|
|
2926
|
+
startZombieReaper()
|
|
2927
|
+
|
|
2928
|
+
// Save on shutdown + kill running processes
|
|
2929
|
+
function gracefulShutdown() {
|
|
2930
|
+
console.log('[shutdown] Saving state and cleaning up...')
|
|
2931
|
+
saveToDisk()
|
|
2932
|
+
// Kill all running claude processes
|
|
2933
|
+
for (const [key, proc] of runningProcesses.entries()) {
|
|
2934
|
+
if (!proc.killed) {
|
|
2935
|
+
console.log(`[shutdown] Killing process: ${key}`)
|
|
2936
|
+
proc.kill('SIGTERM')
|
|
2937
|
+
}
|
|
2938
|
+
}
|
|
2939
|
+
runningProcesses.clear()
|
|
2940
|
+
processLastActivity.clear()
|
|
2941
|
+
process.exit(0)
|
|
2942
|
+
}
|
|
2943
|
+
process.on('SIGINT', gracefulShutdown)
|
|
2944
|
+
process.on('SIGTERM', gracefulShutdown)
|
|
2945
|
+
|
|
2946
|
+
server.listen(PORT, async () => {
|
|
2947
|
+
console.log(`RuFloUI API server running on http://localhost:${PORT}`)
|
|
2948
|
+
console.log(`WebSocket available at ws://localhost:${PORT}/ws`)
|
|
2949
|
+
|
|
2950
|
+
// Startup preflight — log dependency status (non-blocking)
|
|
2951
|
+
console.log('Running preflight checks...')
|
|
2952
|
+
try {
|
|
2953
|
+
const nodeVer = process.version
|
|
2954
|
+
const major = parseInt(nodeVer.slice(1), 10)
|
|
2955
|
+
console.log(` Node.js: ${nodeVer}${major < 18 ? ' [WARN: requires >= 18]' : ' [OK]'}`)
|
|
2956
|
+
} catch (e) { console.log(' Node.js: [ERROR]', e) }
|
|
2957
|
+
try {
|
|
2958
|
+
await execAsync('npx --version', { timeout: 10_000 })
|
|
2959
|
+
console.log(' npx: [OK]')
|
|
2960
|
+
} catch { console.log(' npx: [FAIL] Not found in PATH') }
|
|
2961
|
+
try {
|
|
2962
|
+
await execAsync('claude --version', { timeout: 10_000 })
|
|
2963
|
+
console.log(' Claude CLI: [OK]')
|
|
2964
|
+
} catch { console.log(' Claude CLI: [WARN] Not in PATH (needed for multi-agent pipeline)') }
|
|
2965
|
+
try {
|
|
2966
|
+
await execCli('--version', [])
|
|
2967
|
+
console.log(' claude-flow CLI: [OK]')
|
|
2968
|
+
} catch { console.log(' claude-flow CLI: [WARN] First run may take longer (npx download)') }
|
|
2969
|
+
console.log('Preflight complete. Dashboard: http://localhost:5173')
|
|
2970
|
+
})
|
|
2971
|
+
|
|
2972
|
+
export { app, server }
|