agentgui 1.0.837 → 1.0.839
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +3 -0
- package/lib/message-queue.js +62 -0
- package/lib/process-message-rate-limit.js +18 -0
- package/lib/process-message.js +126 -0
- package/lib/routes-agent-actions.js +117 -0
- package/lib/routes-auth-config.js +30 -0
- package/lib/routes-messages.js +139 -0
- package/lib/routes-runs.js +156 -0
- package/lib/routes-scripts.js +135 -0
- package/lib/routes-sessions.js +144 -0
- package/lib/stream-event-handler.js +115 -0
- package/package.json +1 -1
- package/scripts/patch-fsbrowse.js +1 -1
- package/server.js +45 -1706
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
import os from 'os';
|
|
2
|
+
|
|
3
|
+
export function register(deps) {
|
|
4
|
+
const { sendJSON, parseBody, queries, broadcastSync, processMessageWithStreaming, activeExecutions, activeProcessesByRunId, discoveredAgents, STARTUP_CWD } = deps;
|
|
5
|
+
|
|
6
|
+
const routes = {};
|
|
7
|
+
|
|
8
|
+
routes['_match'] = (method, pathOnly) => {
|
|
9
|
+
const key = `${method} ${pathOnly}`;
|
|
10
|
+
if (routes[key]) return routes[key];
|
|
11
|
+
let m;
|
|
12
|
+
if ((m = pathOnly.match(/^\/api\/runs\/([^/]+)$/))) return (req, res) => handleRunById(req, res, m[1]);
|
|
13
|
+
if (method === 'GET' && (m = pathOnly.match(/^\/api\/runs\/([^/]+)\/wait$/))) return (req, res) => handleRunWait(req, res, m[1]);
|
|
14
|
+
if (method === 'GET' && (m = pathOnly.match(/^\/api\/runs\/([^/]+)\/stream$/))) return (req, res) => { res.writeHead(410); res.end(JSON.stringify({ error: 'SSE removed, use WebSocket' })); };
|
|
15
|
+
if (method === 'POST' && (m = pathOnly.match(/^\/api\/runs\/([^/]+)\/cancel$/))) return (req, res) => handleRunCancel(req, res, m[1]);
|
|
16
|
+
if (method === 'POST' && (m = pathOnly.match(/^\/api\/threads\/([^/]+)\/runs\/([^/]+)\/cancel$/))) return (req, res) => handleThreadRunCancel(req, res, m[1], m[2]);
|
|
17
|
+
if (method === 'GET' && (m = pathOnly.match(/^\/api\/threads\/([^/]+)\/runs\/([^/]+)\/wait$/))) return (req, res) => handleThreadRunWait(req, res, m[1], m[2]);
|
|
18
|
+
return null;
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
routes['POST /api/runs'] = async (req, res) => {
|
|
22
|
+
let body = '';
|
|
23
|
+
for await (const chunk of req) { body += chunk; }
|
|
24
|
+
let parsed = {};
|
|
25
|
+
try { parsed = body ? JSON.parse(body) : {}; } catch {}
|
|
26
|
+
const { input, agentId } = parsed;
|
|
27
|
+
if (!input) { sendJSON(req, res, 400, { error: 'Missing input in request body' }); return; }
|
|
28
|
+
const resolvedAgentId = agentId || 'claude-code';
|
|
29
|
+
const resolvedModel = parsed.model || null;
|
|
30
|
+
const cwd = parsed.workingDirectory || STARTUP_CWD;
|
|
31
|
+
const thread = queries.createConversation(resolvedAgentId, 'Stateless Run', cwd);
|
|
32
|
+
const session = queries.createSession(thread.id, resolvedAgentId, 'pending');
|
|
33
|
+
const content = typeof input === 'string' ? input : JSON.stringify(input);
|
|
34
|
+
const message = queries.createMessage(thread.id, 'user', content);
|
|
35
|
+
processMessageWithStreaming(thread.id, message.id, session.id, content, resolvedAgentId, resolvedModel);
|
|
36
|
+
sendJSON(req, res, 200, { id: session.id, status: 'pending', started_at: session.started_at, agentId: resolvedAgentId });
|
|
37
|
+
};
|
|
38
|
+
|
|
39
|
+
routes['POST /api/runs/search'] = async (req, res) => {
|
|
40
|
+
const sessions = queries.getAllSessions();
|
|
41
|
+
const runs = sessions.slice(0, 50).map(s => ({ id: s.id, status: s.status, started_at: s.started_at, completed_at: s.completed_at, agentId: s.agentId, input: null, output: null })).reverse();
|
|
42
|
+
sendJSON(req, res, 200, runs);
|
|
43
|
+
};
|
|
44
|
+
|
|
45
|
+
routes['POST /api/runs/stream'] = (req, res) => { res.writeHead(410); res.end(JSON.stringify({ error: 'SSE removed, use WebSocket' })); };
|
|
46
|
+
|
|
47
|
+
routes['POST /api/runs/wait'] = async (req, res) => {
|
|
48
|
+
const body = await parseBody(req);
|
|
49
|
+
const { agent_id, input, config } = body;
|
|
50
|
+
if (!agent_id) { sendJSON(req, res, 422, { error: 'agent_id is required' }); return; }
|
|
51
|
+
const agent = discoveredAgents.find(a => a.id === agent_id);
|
|
52
|
+
if (!agent) { sendJSON(req, res, 404, { error: 'Agent not found' }); return; }
|
|
53
|
+
const run = queries.createRun(agent_id, null, input, config);
|
|
54
|
+
sendJSON(req, res, 200, run);
|
|
55
|
+
};
|
|
56
|
+
|
|
57
|
+
async function handleRunById(req, res, runId) {
|
|
58
|
+
if (req.method === 'GET') {
|
|
59
|
+
const run = queries.getRun(runId);
|
|
60
|
+
if (!run) { sendJSON(req, res, 404, { error: 'Run not found' }); return; }
|
|
61
|
+
sendJSON(req, res, 200, run);
|
|
62
|
+
return;
|
|
63
|
+
}
|
|
64
|
+
if (req.method === 'POST') {
|
|
65
|
+
const run = queries.getRun(runId);
|
|
66
|
+
if (!run) { sendJSON(req, res, 404, { error: 'Run not found' }); return; }
|
|
67
|
+
if (run.status !== 'pending') { sendJSON(req, res, 409, { error: 'Run is not resumable' }); return; }
|
|
68
|
+
sendJSON(req, res, 200, run);
|
|
69
|
+
return;
|
|
70
|
+
}
|
|
71
|
+
if (req.method === 'DELETE') {
|
|
72
|
+
try { queries.deleteRun(runId); res.writeHead(204); res.end(); } catch { sendJSON(req, res, 404, { error: 'Run not found' }); }
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
async function handleRunWait(req, res, runId) {
|
|
77
|
+
const run = queries.getRun(runId);
|
|
78
|
+
if (!run) { sendJSON(req, res, 404, { error: 'Run not found' }); return; }
|
|
79
|
+
const startTime = Date.now();
|
|
80
|
+
const poll = setInterval(() => {
|
|
81
|
+
const cur = queries.getRun(runId);
|
|
82
|
+
const done = cur && ['success', 'error', 'cancelled'].includes(cur.status);
|
|
83
|
+
if (done) { clearInterval(poll); sendJSON(req, res, 200, cur); }
|
|
84
|
+
else if (Date.now() - startTime > 30000) { clearInterval(poll); sendJSON(req, res, 408, { error: 'Run still pending after 30s', run_id: runId, status: cur?.status || run.status }); }
|
|
85
|
+
}, 500);
|
|
86
|
+
req.on('close', () => clearInterval(poll));
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
async function handleRunCancel(req, res, runId) {
|
|
90
|
+
try {
|
|
91
|
+
const run = queries.getRun(runId);
|
|
92
|
+
if (!run) { sendJSON(req, res, 404, { error: 'Run not found' }); return; }
|
|
93
|
+
if (['success', 'error', 'cancelled'].includes(run.status)) { sendJSON(req, res, 409, { error: 'Run already completed or cancelled' }); return; }
|
|
94
|
+
const cancelled = queries.cancelRun(runId);
|
|
95
|
+
const threadId = run.thread_id;
|
|
96
|
+
if (threadId) {
|
|
97
|
+
const execution = activeExecutions.get(threadId);
|
|
98
|
+
if (execution?.pid) {
|
|
99
|
+
try { process.kill(-execution.pid, 'SIGTERM'); } catch { try { process.kill(execution.pid, 'SIGTERM'); } catch {} }
|
|
100
|
+
setTimeout(() => { try { process.kill(-execution.pid, 'SIGKILL'); } catch { try { process.kill(execution.pid, 'SIGKILL'); } catch {} } }, 3000);
|
|
101
|
+
}
|
|
102
|
+
if (execution?.sessionId) queries.updateSession(execution.sessionId, { status: 'error', error: 'Cancelled by user', completed_at: Date.now() });
|
|
103
|
+
activeExecutions.delete(threadId);
|
|
104
|
+
queries.setIsStreaming(threadId, false);
|
|
105
|
+
broadcastSync({ type: 'streaming_cancelled', sessionId: execution?.sessionId || runId, conversationId: threadId, runId, timestamp: Date.now() });
|
|
106
|
+
}
|
|
107
|
+
sendJSON(req, res, 200, cancelled);
|
|
108
|
+
} catch (err) {
|
|
109
|
+
if (err.message === 'Run not found') sendJSON(req, res, 404, { error: err.message });
|
|
110
|
+
else if (err.message.includes('already completed')) sendJSON(req, res, 409, { error: err.message });
|
|
111
|
+
else sendJSON(req, res, 500, { error: err.message });
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
async function handleThreadRunCancel(req, res, threadId, runId) {
|
|
116
|
+
try {
|
|
117
|
+
const run = queries.getRun(runId);
|
|
118
|
+
if (!run) { sendJSON(req, res, 404, { error: 'Run not found' }); return; }
|
|
119
|
+
if (run.thread_id !== threadId) { sendJSON(req, res, 400, { error: 'Run does not belong to specified thread' }); return; }
|
|
120
|
+
if (['success', 'error', 'cancelled'].includes(run.status)) { sendJSON(req, res, 409, { error: 'Run already completed or cancelled' }); return; }
|
|
121
|
+
const cancelled = queries.cancelRun(runId);
|
|
122
|
+
const execution = activeExecutions.get(threadId);
|
|
123
|
+
if (execution?.pid) {
|
|
124
|
+
try { process.kill(-execution.pid, 'SIGTERM'); } catch { try { process.kill(execution.pid, 'SIGTERM'); } catch {} }
|
|
125
|
+
setTimeout(() => { try { process.kill(-execution.pid, 'SIGKILL'); } catch { try { process.kill(execution.pid, 'SIGKILL'); } catch {} } }, 3000);
|
|
126
|
+
}
|
|
127
|
+
if (execution?.sessionId) queries.updateSession(execution.sessionId, { status: 'error', error: 'Cancelled by user', completed_at: Date.now() });
|
|
128
|
+
activeExecutions.delete(threadId);
|
|
129
|
+
activeProcessesByRunId.delete(runId);
|
|
130
|
+
queries.setIsStreaming(threadId, false);
|
|
131
|
+
broadcastSync({ type: 'run_cancelled', runId, threadId, sessionId: execution?.sessionId, timestamp: Date.now() });
|
|
132
|
+
broadcastSync({ type: 'streaming_cancelled', sessionId: execution?.sessionId || runId, conversationId: threadId, runId, timestamp: Date.now() });
|
|
133
|
+
sendJSON(req, res, 200, cancelled);
|
|
134
|
+
} catch (err) {
|
|
135
|
+
if (err.message === 'Run not found') sendJSON(req, res, 404, { error: err.message });
|
|
136
|
+
else if (err.message.includes('already completed')) sendJSON(req, res, 409, { error: err.message });
|
|
137
|
+
else sendJSON(req, res, 500, { error: err.message });
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
async function handleThreadRunWait(req, res, threadId, runId) {
|
|
142
|
+
const run = queries.getRun(runId);
|
|
143
|
+
if (!run) { sendJSON(req, res, 404, { error: 'Run not found' }); return; }
|
|
144
|
+
if (run.thread_id !== threadId) { sendJSON(req, res, 400, { error: 'Run does not belong to specified thread' }); return; }
|
|
145
|
+
const startTime = Date.now();
|
|
146
|
+
const poll = setInterval(() => {
|
|
147
|
+
const cur = queries.getRun(runId);
|
|
148
|
+
const done = cur && ['success', 'error', 'cancelled'].includes(cur.status);
|
|
149
|
+
if (done) { clearInterval(poll); sendJSON(req, res, 200, cur); }
|
|
150
|
+
else if (Date.now() - startTime > 30000) { clearInterval(poll); sendJSON(req, res, 408, { error: 'Run still pending after 30s', run_id: runId, status: cur?.status || run.status }); }
|
|
151
|
+
}, 500);
|
|
152
|
+
req.on('close', () => clearInterval(poll));
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
return routes;
|
|
156
|
+
}
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import os from 'os';
|
|
4
|
+
import { spawn } from 'child_process';
|
|
5
|
+
|
|
6
|
+
export function register(deps) {
|
|
7
|
+
const { sendJSON, parseBody, queries, broadcastSync, activeScripts, activeExecutions, processMessageWithStreaming, STARTUP_CWD } = deps;
|
|
8
|
+
|
|
9
|
+
const routes = {};
|
|
10
|
+
|
|
11
|
+
routes['_match'] = (method, pathOnly) => {
|
|
12
|
+
let m;
|
|
13
|
+
if (method === 'GET' && (m = pathOnly.match(/^\/api\/conversations\/([^/]+)\/scripts$/))) return (req, res) => handleScripts(req, res, m[1]);
|
|
14
|
+
if (method === 'POST' && (m = pathOnly.match(/^\/api\/conversations\/([^/]+)\/run-script$/))) return (req, res) => handleRunScript(req, res, m[1]);
|
|
15
|
+
if (method === 'POST' && (m = pathOnly.match(/^\/api\/conversations\/([^/]+)\/stop-script$/))) return (req, res) => handleStopScript(req, res, m[1]);
|
|
16
|
+
if (method === 'GET' && (m = pathOnly.match(/^\/api\/conversations\/([^/]+)\/script-status$/))) return (req, res) => handleScriptStatus(req, res, m[1]);
|
|
17
|
+
if (method === 'POST' && (m = pathOnly.match(/^\/api\/conversations\/([^/]+)\/cancel$/))) return (req, res) => handleCancel(req, res, m[1]);
|
|
18
|
+
if (method === 'POST' && (m = pathOnly.match(/^\/api\/conversations\/([^/]+)\/resume$/))) return (req, res) => handleResume(req, res, m[1]);
|
|
19
|
+
if (method === 'POST' && (m = pathOnly.match(/^\/api\/conversations\/([^/]+)\/inject$/))) return (req, res) => handleInject(req, res, m[1]);
|
|
20
|
+
return null;
|
|
21
|
+
};
|
|
22
|
+
|
|
23
|
+
async function handleScripts(req, res, conversationId) {
|
|
24
|
+
const conv = queries.getConversation(conversationId);
|
|
25
|
+
if (!conv) { sendJSON(req, res, 404, { error: 'Not found' }); return; }
|
|
26
|
+
const wd = conv.workingDirectory || STARTUP_CWD;
|
|
27
|
+
let hasStart = false, hasDev = false;
|
|
28
|
+
try {
|
|
29
|
+
const pkg = JSON.parse(fs.readFileSync(path.join(wd, 'package.json'), 'utf-8'));
|
|
30
|
+
const scripts = pkg.scripts || {};
|
|
31
|
+
hasStart = !!scripts.start;
|
|
32
|
+
hasDev = !!scripts.dev;
|
|
33
|
+
} catch {}
|
|
34
|
+
const running = activeScripts.has(conversationId);
|
|
35
|
+
const runningScript = running ? activeScripts.get(conversationId).script : null;
|
|
36
|
+
sendJSON(req, res, 200, { hasStart, hasDev, running, runningScript });
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
async function handleRunScript(req, res, conversationId) {
|
|
40
|
+
const conv = queries.getConversation(conversationId);
|
|
41
|
+
if (!conv) { sendJSON(req, res, 404, { error: 'Not found' }); return; }
|
|
42
|
+
if (activeScripts.has(conversationId)) { sendJSON(req, res, 409, { error: 'Script already running' }); return; }
|
|
43
|
+
const body = await parseBody(req);
|
|
44
|
+
const script = body.script;
|
|
45
|
+
if (script !== 'start' && script !== 'dev') { sendJSON(req, res, 400, { error: 'Invalid script' }); return; }
|
|
46
|
+
const wd = conv.workingDirectory || STARTUP_CWD;
|
|
47
|
+
try {
|
|
48
|
+
const pkg = JSON.parse(fs.readFileSync(path.join(wd, 'package.json'), 'utf-8'));
|
|
49
|
+
if (!pkg.scripts || !pkg.scripts[script]) { sendJSON(req, res, 400, { error: `Script "${script}" not found` }); return; }
|
|
50
|
+
} catch { sendJSON(req, res, 400, { error: 'No package.json' }); return; }
|
|
51
|
+
const childEnv = { ...process.env, FORCE_COLOR: '1' };
|
|
52
|
+
delete childEnv.PORT;
|
|
53
|
+
delete childEnv.BASE_URL;
|
|
54
|
+
delete childEnv.HOT_RELOAD;
|
|
55
|
+
const isWindows = os.platform() === 'win32';
|
|
56
|
+
const child = spawn('npm', ['run', script], { cwd: wd, stdio: ['ignore', 'pipe', 'pipe'], detached: true, env: childEnv, shell: isWindows });
|
|
57
|
+
activeScripts.set(conversationId, { process: child, script, startTime: Date.now() });
|
|
58
|
+
broadcastSync({ type: 'script_started', conversationId, script, timestamp: Date.now() });
|
|
59
|
+
const onData = (stream) => (chunk) => broadcastSync({ type: 'script_output', conversationId, data: chunk.toString(), stream, timestamp: Date.now() });
|
|
60
|
+
child.stdout.on('data', onData('stdout'));
|
|
61
|
+
child.stderr.on('data', onData('stderr'));
|
|
62
|
+
child.stdout.on('error', () => {});
|
|
63
|
+
child.stderr.on('error', () => {});
|
|
64
|
+
child.on('error', (err) => { activeScripts.delete(conversationId); broadcastSync({ type: 'script_stopped', conversationId, code: 1, error: err.message, timestamp: Date.now() }); });
|
|
65
|
+
child.on('close', (code) => { activeScripts.delete(conversationId); broadcastSync({ type: 'script_stopped', conversationId, code: code || 0, timestamp: Date.now() }); });
|
|
66
|
+
sendJSON(req, res, 200, { ok: true, script, pid: child.pid });
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
async function handleStopScript(req, res, conversationId) {
|
|
70
|
+
const entry = activeScripts.get(conversationId);
|
|
71
|
+
if (!entry) { sendJSON(req, res, 404, { error: 'No running script' }); return; }
|
|
72
|
+
try { process.kill(-entry.process.pid, 'SIGTERM'); } catch { try { entry.process.kill('SIGTERM'); } catch {} }
|
|
73
|
+
sendJSON(req, res, 200, { ok: true });
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
async function handleScriptStatus(req, res, conversationId) {
|
|
77
|
+
const entry = activeScripts.get(conversationId);
|
|
78
|
+
sendJSON(req, res, 200, { running: !!entry, script: entry?.script || null });
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
async function handleCancel(req, res, conversationId) {
|
|
82
|
+
const entry = activeExecutions.get(conversationId);
|
|
83
|
+
if (!entry) { sendJSON(req, res, 404, { error: 'No active execution to cancel' }); return; }
|
|
84
|
+
const { pid, sessionId } = entry;
|
|
85
|
+
if (pid) {
|
|
86
|
+
try { process.kill(-pid, 'SIGKILL'); } catch { try { process.kill(pid, 'SIGKILL'); } catch {} }
|
|
87
|
+
}
|
|
88
|
+
if (sessionId) queries.updateSession(sessionId, { status: 'interrupted', completed_at: Date.now() });
|
|
89
|
+
queries.setIsStreaming(conversationId, false);
|
|
90
|
+
activeExecutions.delete(conversationId);
|
|
91
|
+
broadcastSync({ type: 'streaming_complete', sessionId, conversationId, interrupted: true, timestamp: Date.now() });
|
|
92
|
+
sendJSON(req, res, 200, { ok: true, cancelled: true, conversationId, sessionId });
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
async function handleResume(req, res, conversationId) {
|
|
96
|
+
const conv = queries.getConversation(conversationId);
|
|
97
|
+
if (!conv) { sendJSON(req, res, 404, { error: 'Conversation not found' }); return; }
|
|
98
|
+
if (activeExecutions.get(conversationId)) { sendJSON(req, res, 409, { error: 'Conversation already has an active execution' }); return; }
|
|
99
|
+
let body = '';
|
|
100
|
+
for await (const chunk of req) { body += chunk; }
|
|
101
|
+
let parsed = {};
|
|
102
|
+
try { parsed = body ? JSON.parse(body) : {}; } catch {}
|
|
103
|
+
const { content, agentId } = parsed;
|
|
104
|
+
if (!content) { sendJSON(req, res, 400, { error: 'Missing content in request body' }); return; }
|
|
105
|
+
const resolvedAgentId = agentId || conv.agentId || 'claude-code';
|
|
106
|
+
const resolvedModel = parsed.model || conv.model || null;
|
|
107
|
+
const session = queries.createSession(conversationId, resolvedAgentId, 'pending');
|
|
108
|
+
const message = queries.createMessage(conversationId, 'user', content);
|
|
109
|
+
processMessageWithStreaming(conversationId, message.id, session.id, content, resolvedAgentId, resolvedModel);
|
|
110
|
+
sendJSON(req, res, 200, { ok: true, conversationId, sessionId: session.id, messageId: message.id, resumed: true });
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
async function handleInject(req, res, conversationId) {
|
|
114
|
+
const conv = queries.getConversation(conversationId);
|
|
115
|
+
if (!conv) { sendJSON(req, res, 404, { error: 'Conversation not found' }); return; }
|
|
116
|
+
let body = '';
|
|
117
|
+
for await (const chunk of req) { body += chunk; }
|
|
118
|
+
let parsed = {};
|
|
119
|
+
try { parsed = body ? JSON.parse(body) : {}; } catch {}
|
|
120
|
+
const { content, eager } = parsed;
|
|
121
|
+
if (!content) { sendJSON(req, res, 400, { error: 'Missing content in request body' }); return; }
|
|
122
|
+
const entry = activeExecutions.get(conversationId);
|
|
123
|
+
if (entry && eager) { sendJSON(req, res, 409, { error: 'Cannot eagerly inject while execution is running - message queued' }); return; }
|
|
124
|
+
const message = queries.createMessage(conversationId, 'user', '[INJECTED] ' + content);
|
|
125
|
+
if (!entry) {
|
|
126
|
+
const resolvedAgentId = conv.agentId || 'claude-code';
|
|
127
|
+
const resolvedModel = conv.model || null;
|
|
128
|
+
const session = queries.createSession(conversationId, resolvedAgentId, 'pending');
|
|
129
|
+
processMessageWithStreaming(conversationId, message.id, session.id, message.content, resolvedAgentId, resolvedModel);
|
|
130
|
+
}
|
|
131
|
+
sendJSON(req, res, 200, { ok: true, injected: true, conversationId, messageId: message.id });
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
return routes;
|
|
135
|
+
}
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
export function register(deps) {
|
|
2
|
+
const { queries, sendJSON, activeExecutions, rateLimitState, debugLog } = deps;
|
|
3
|
+
|
|
4
|
+
const routes = {};
|
|
5
|
+
|
|
6
|
+
routes['_match'] = (method, pathOnly) => {
|
|
7
|
+
let m;
|
|
8
|
+
|
|
9
|
+
if (method === 'GET' && (m = pathOnly.match(/^\/api\/conversations\/([^/]+)\/messages\/([^/]+)$/)))
|
|
10
|
+
return (req, res) => handleGetMessage(req, res, m[1], m[2]);
|
|
11
|
+
|
|
12
|
+
if (method === 'GET' && (m = pathOnly.match(/^\/api\/sessions\/([^/]+)$/)))
|
|
13
|
+
return (req, res) => handleGetSession(req, res, m[1]);
|
|
14
|
+
|
|
15
|
+
if (method === 'GET' && (m = pathOnly.match(/^\/api\/conversations\/([^/]+)\/full$/)))
|
|
16
|
+
return (req, res) => handleFullLoad(req, res, m[1]);
|
|
17
|
+
|
|
18
|
+
if (method === 'GET' && (m = pathOnly.match(/^\/api\/conversations\/([^/]+)\/chunks$/)))
|
|
19
|
+
return (req, res) => handleConvChunks(req, res, m[1]);
|
|
20
|
+
|
|
21
|
+
if (method === 'GET' && (m = pathOnly.match(/^\/api\/sessions\/([^/]+)\/chunks$/)))
|
|
22
|
+
return (req, res) => handleSessionChunks(req, res, m[1]);
|
|
23
|
+
|
|
24
|
+
if (method === 'GET' && (m = pathOnly.match(/^\/api\/conversations\/([^/]+)\/sessions\/latest$/)))
|
|
25
|
+
return (req, res) => handleLatestSession(req, res, m[1]);
|
|
26
|
+
|
|
27
|
+
if (method === 'GET' && (m = pathOnly.match(/^\/api\/sessions\/([^/]+)\/execution$/)))
|
|
28
|
+
return (req, res) => handleExecution(req, res, m[1]);
|
|
29
|
+
|
|
30
|
+
return null;
|
|
31
|
+
};
|
|
32
|
+
|
|
33
|
+
async function handleGetMessage(req, res, conversationId, msgId) {
|
|
34
|
+
const msg = queries.getMessage(msgId);
|
|
35
|
+
if (!msg || msg.conversationId !== conversationId) { sendJSON(req, res, 404, { error: 'Not found' }); return; }
|
|
36
|
+
sendJSON(req, res, 200, { message: msg });
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
async function handleGetSession(req, res, sessionId) {
|
|
40
|
+
const sess = queries.getSession(sessionId);
|
|
41
|
+
if (!sess) { sendJSON(req, res, 404, { error: 'Not found' }); return; }
|
|
42
|
+
const events = queries.getSessionEvents(sessionId);
|
|
43
|
+
sendJSON(req, res, 200, { session: sess, events });
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
async function handleFullLoad(req, res, conversationId) {
|
|
47
|
+
const conv = queries.getConversation(conversationId);
|
|
48
|
+
if (!conv) { sendJSON(req, res, 404, { error: 'Not found' }); return; }
|
|
49
|
+
const latestSession = queries.getLatestSession(conversationId);
|
|
50
|
+
const isActivelyStreaming = activeExecutions.has(conversationId);
|
|
51
|
+
const url = new URL(req.url, 'http://localhost');
|
|
52
|
+
const chunkLimit = Math.min(parseInt(url.searchParams.get('chunkLimit') || '500'), 5000);
|
|
53
|
+
const allChunks = url.searchParams.get('allChunks') === '1';
|
|
54
|
+
const totalChunks = queries.getConversationChunkCount(conversationId);
|
|
55
|
+
let chunks;
|
|
56
|
+
if (allChunks || totalChunks <= chunkLimit) {
|
|
57
|
+
chunks = queries.getConversationChunks(conversationId);
|
|
58
|
+
} else {
|
|
59
|
+
chunks = queries.getRecentConversationChunks(conversationId, chunkLimit);
|
|
60
|
+
}
|
|
61
|
+
const msgResult = queries.getPaginatedMessages(conversationId, 100, 0);
|
|
62
|
+
const rateLimitInfo = rateLimitState.get(conversationId) || null;
|
|
63
|
+
sendJSON(req, res, 200, {
|
|
64
|
+
conversation: conv,
|
|
65
|
+
isActivelyStreaming,
|
|
66
|
+
latestSession,
|
|
67
|
+
chunks,
|
|
68
|
+
totalChunks,
|
|
69
|
+
messages: msgResult.messages,
|
|
70
|
+
rateLimitInfo
|
|
71
|
+
});
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
async function handleConvChunks(req, res, conversationId) {
|
|
75
|
+
const conv = queries.getConversation(conversationId);
|
|
76
|
+
if (!conv) { sendJSON(req, res, 404, { error: 'Conversation not found' }); return; }
|
|
77
|
+
const url = new URL(req.url, 'http://localhost');
|
|
78
|
+
const since = parseInt(url.searchParams.get('since') || '0');
|
|
79
|
+
const all = url.searchParams.get('all') === 'true';
|
|
80
|
+
const totalChunks = queries.getConversationChunkCount(conversationId);
|
|
81
|
+
let chunks;
|
|
82
|
+
if (since > 0) {
|
|
83
|
+
chunks = queries.getConversationChunksSince(conversationId, since);
|
|
84
|
+
} else if (all) {
|
|
85
|
+
chunks = queries.getConversationChunks(conversationId);
|
|
86
|
+
} else {
|
|
87
|
+
chunks = queries.getRecentConversationChunks(conversationId, 500);
|
|
88
|
+
}
|
|
89
|
+
debugLog(`[chunks] Conv ${conversationId}: ${chunks.length} chunks (total: ${totalChunks})`);
|
|
90
|
+
sendJSON(req, res, 200, { ok: true, chunks, totalChunks });
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
async function handleSessionChunks(req, res, sessionId) {
|
|
94
|
+
const sess = queries.getSession(sessionId);
|
|
95
|
+
if (!sess) { sendJSON(req, res, 404, { error: 'Session not found' }); return; }
|
|
96
|
+
const url = new URL(req.url, 'http://localhost');
|
|
97
|
+
const sinceSeq = parseInt(url.searchParams.get('sinceSeq') || '-1');
|
|
98
|
+
const since = parseInt(url.searchParams.get('since') || '0');
|
|
99
|
+
let chunks;
|
|
100
|
+
if (sinceSeq >= 0) {
|
|
101
|
+
chunks = queries.getChunksSinceSeq(sessionId, sinceSeq);
|
|
102
|
+
} else {
|
|
103
|
+
chunks = queries.getChunksSince(sessionId, since);
|
|
104
|
+
}
|
|
105
|
+
sendJSON(req, res, 200, { ok: true, chunks });
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
async function handleLatestSession(req, res, convId) {
|
|
109
|
+
const latestSession = queries.getLatestSession(convId);
|
|
110
|
+
if (!latestSession) { sendJSON(req, res, 200, { session: null }); return; }
|
|
111
|
+
const events = queries.getSessionEvents(latestSession.id);
|
|
112
|
+
sendJSON(req, res, 200, { session: latestSession, events });
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
async function handleExecution(req, res, sessionId) {
|
|
116
|
+
const url = new URL(req.url, 'http://localhost');
|
|
117
|
+
const limit = Math.min(parseInt(url.searchParams.get('limit') || '1000'), 5000);
|
|
118
|
+
const offset = Math.max(parseInt(url.searchParams.get('offset') || '0'), 0);
|
|
119
|
+
const filterType = url.searchParams.get('filterType');
|
|
120
|
+
try {
|
|
121
|
+
const session = queries.getSession(sessionId);
|
|
122
|
+
const allChunks = session ? (queries.getChunksSince(sessionId, 0) || []) : [];
|
|
123
|
+
const filtered = filterType ? allChunks.filter(e => e.type === filterType) : allChunks;
|
|
124
|
+
sendJSON(req, res, 200, {
|
|
125
|
+
sessionId,
|
|
126
|
+
events: filtered.slice(offset, offset + limit),
|
|
127
|
+
total: filtered.length,
|
|
128
|
+
limit,
|
|
129
|
+
offset,
|
|
130
|
+
hasMore: offset + limit < filtered.length,
|
|
131
|
+
metadata: {
|
|
132
|
+
status: session?.status || 'unknown',
|
|
133
|
+
startTime: session?.created_at || null,
|
|
134
|
+
duration: session?.completed_at && session?.created_at ? session.completed_at - session.created_at : 0,
|
|
135
|
+
eventCount: filtered.length
|
|
136
|
+
}
|
|
137
|
+
});
|
|
138
|
+
} catch (err) {
|
|
139
|
+
sendJSON(req, res, 400, { error: err.message });
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
return routes;
|
|
144
|
+
}
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
export function createEventHandler({ queries, activeExecutions, broadcastSync, rateLimitState, batcherRef, sessionId, conversationId, messageId, content, agentId, model, subAgent, ownedSessionIds, allBlocksRef, currentSequenceRef, scheduleRetry, eagerTTS, debugLog, parseRateLimitResetTime }) {
|
|
2
|
+
return function onEvent(parsed) {
|
|
3
|
+
batcherRef.eventCount++;
|
|
4
|
+
const entry = activeExecutions.get(conversationId);
|
|
5
|
+
if (entry) entry.lastActivity = Date.now();
|
|
6
|
+
if (parsed.session_id) {
|
|
7
|
+
ownedSessionIds.add(parsed.session_id);
|
|
8
|
+
if (!batcherRef.resumeSessionId || batcherRef.resumeSessionId !== parsed.session_id) {
|
|
9
|
+
batcherRef.resumeSessionId = parsed.session_id;
|
|
10
|
+
queries.setClaudeSessionId(conversationId, parsed.session_id, sessionId);
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
debugLog(`[stream] Event ${batcherRef.eventCount}: type=${parsed.type}`);
|
|
14
|
+
|
|
15
|
+
if (parsed.type === 'system') {
|
|
16
|
+
if (parsed.subtype === 'task_notification') return;
|
|
17
|
+
if (!parsed.model && !parsed.cwd && !parsed.tools) return;
|
|
18
|
+
const block = { type: 'system', subtype: parsed.subtype, model: parsed.model, cwd: parsed.cwd, tools: parsed.tools, session_id: parsed.session_id };
|
|
19
|
+
currentSequenceRef.val++;
|
|
20
|
+
batcherRef.batcher.add(sessionId, conversationId, currentSequenceRef.val, 'system', block);
|
|
21
|
+
broadcastSync({ type: 'streaming_progress', sessionId, conversationId, block, blockRole: 'system', blockIndex: allBlocksRef.val.length, seq: currentSequenceRef.val, timestamp: Date.now() });
|
|
22
|
+
} else if (parsed.type === 'assistant' && parsed.message?.content) {
|
|
23
|
+
for (const block of parsed.message.content) {
|
|
24
|
+
allBlocksRef.val.push(block);
|
|
25
|
+
currentSequenceRef.val++;
|
|
26
|
+
batcherRef.batcher.add(sessionId, conversationId, currentSequenceRef.val, block.type || 'assistant', block);
|
|
27
|
+
broadcastSync({ type: 'streaming_progress', sessionId, conversationId, block, blockRole: 'assistant', blockIndex: allBlocksRef.val.length - 1, seq: currentSequenceRef.val, timestamp: Date.now() });
|
|
28
|
+
if (block.type === 'text' && block.text) {
|
|
29
|
+
const rateLimitMatch = block.text.match(/you'?ve hit your limit|rate limit exceeded/i);
|
|
30
|
+
if (rateLimitMatch) {
|
|
31
|
+
debugLog(`[rate-limit] Detected rate limit message in stream for conv ${conversationId}`);
|
|
32
|
+
const retryAfterSec = parseRateLimitResetTime(block.text);
|
|
33
|
+
const entry2 = activeExecutions.get(conversationId);
|
|
34
|
+
if (entry2 && entry2.pid) { try { process.kill(entry2.pid); } catch (e) {} }
|
|
35
|
+
const existingCount = rateLimitState.get(conversationId)?.retryCount || 0;
|
|
36
|
+
if (existingCount >= 3) {
|
|
37
|
+
batcherRef.batcher.drain();
|
|
38
|
+
activeExecutions.delete(conversationId);
|
|
39
|
+
queries.setIsStreaming(conversationId, false);
|
|
40
|
+
const errMsg = queries.createMessage(conversationId, 'assistant', `Error: Rate limit exceeded after ${existingCount + 1} attempts. Please try again later.`);
|
|
41
|
+
broadcastSync({ type: 'message_created', conversationId, message: errMsg, timestamp: Date.now() });
|
|
42
|
+
broadcastSync({ type: 'streaming_complete', sessionId, conversationId, interrupted: true, timestamp: Date.now() });
|
|
43
|
+
return;
|
|
44
|
+
}
|
|
45
|
+
rateLimitState.set(conversationId, { retryAt: Date.now() + (retryAfterSec * 1000), cooldownMs: retryAfterSec * 1000, retryCount: existingCount + 1, isStreamDetected: true });
|
|
46
|
+
broadcastSync({ type: 'rate_limit_hit', sessionId, conversationId, retryAfterMs: retryAfterSec * 1000, retryAt: Date.now() + (retryAfterSec * 1000), retryCount: 1, timestamp: Date.now() });
|
|
47
|
+
batcherRef.batcher.drain();
|
|
48
|
+
activeExecutions.delete(conversationId);
|
|
49
|
+
queries.setIsStreaming(conversationId, false);
|
|
50
|
+
setTimeout(() => {
|
|
51
|
+
rateLimitState.delete(conversationId);
|
|
52
|
+
broadcastSync({ type: 'rate_limit_clear', conversationId, timestamp: Date.now() });
|
|
53
|
+
scheduleRetry(conversationId, messageId, content, agentId, model, subAgent);
|
|
54
|
+
}, retryAfterSec * 1000);
|
|
55
|
+
return;
|
|
56
|
+
}
|
|
57
|
+
eagerTTS(block.text, conversationId, sessionId);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
} else if (parsed.type === 'user' && parsed.message?.content) {
|
|
61
|
+
for (const block of parsed.message.content) {
|
|
62
|
+
if (block.type === 'tool_result') {
|
|
63
|
+
const toolResultBlock = { type: 'tool_result', tool_use_id: block.tool_use_id, content: typeof block.content === 'string' ? block.content : JSON.stringify(block.content), is_error: block.is_error || false };
|
|
64
|
+
currentSequenceRef.val++;
|
|
65
|
+
batcherRef.batcher.add(sessionId, conversationId, currentSequenceRef.val, 'tool_result', toolResultBlock);
|
|
66
|
+
broadcastSync({ type: 'streaming_progress', sessionId, conversationId, block: toolResultBlock, blockRole: 'tool_result', blockIndex: allBlocksRef.val.length, seq: currentSequenceRef.val, timestamp: Date.now() });
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
} else if (parsed.type === 'result') {
|
|
70
|
+
const resultBlock = { type: 'result', subtype: parsed.subtype, duration_ms: parsed.duration_ms, total_cost_usd: parsed.total_cost_usd, num_turns: parsed.num_turns, is_error: parsed.is_error || false, result: parsed.result };
|
|
71
|
+
currentSequenceRef.val++;
|
|
72
|
+
batcherRef.batcher.add(sessionId, conversationId, currentSequenceRef.val, 'result', resultBlock);
|
|
73
|
+
broadcastSync({ type: 'streaming_progress', sessionId, conversationId, block: resultBlock, blockRole: 'result', blockIndex: allBlocksRef.val.length, isResult: true, seq: currentSequenceRef.val, timestamp: Date.now() });
|
|
74
|
+
if (parsed.result) {
|
|
75
|
+
const resultText = typeof parsed.result === 'string' ? parsed.result : JSON.stringify(parsed.result);
|
|
76
|
+
const rlMatch = resultText.match(/you'?ve hit your limit|rate limit exceeded/i);
|
|
77
|
+
if (rlMatch) {
|
|
78
|
+
debugLog(`[rate-limit] Detected rate limit in result for conv ${conversationId}`);
|
|
79
|
+
const retryAfterSec = parseRateLimitResetTime(resultText);
|
|
80
|
+
const entry3 = activeExecutions.get(conversationId);
|
|
81
|
+
if (entry3 && entry3.pid) { try { process.kill(entry3.pid); } catch (e) {} }
|
|
82
|
+
const existingCount2 = rateLimitState.get(conversationId)?.retryCount || 0;
|
|
83
|
+
if (existingCount2 >= 3) {
|
|
84
|
+
batcherRef.batcher.drain();
|
|
85
|
+
activeExecutions.delete(conversationId);
|
|
86
|
+
queries.setIsStreaming(conversationId, false);
|
|
87
|
+
const errMsg2 = queries.createMessage(conversationId, 'assistant', `Error: Rate limit exceeded after ${existingCount2 + 1} attempts. Please try again later.`);
|
|
88
|
+
broadcastSync({ type: 'message_created', conversationId, message: errMsg2, timestamp: Date.now() });
|
|
89
|
+
broadcastSync({ type: 'streaming_complete', sessionId, conversationId, interrupted: true, timestamp: Date.now() });
|
|
90
|
+
return;
|
|
91
|
+
}
|
|
92
|
+
rateLimitState.set(conversationId, { retryAt: Date.now() + (retryAfterSec * 1000), cooldownMs: retryAfterSec * 1000, retryCount: existingCount2 + 1, isStreamDetected: true });
|
|
93
|
+
broadcastSync({ type: 'rate_limit_hit', sessionId, conversationId, retryAfterMs: retryAfterSec * 1000, retryAt: Date.now() + (retryAfterSec * 1000), retryCount: existingCount2 + 1, timestamp: Date.now() });
|
|
94
|
+
batcherRef.batcher.drain();
|
|
95
|
+
activeExecutions.delete(conversationId);
|
|
96
|
+
queries.setIsStreaming(conversationId, false);
|
|
97
|
+
setTimeout(() => {
|
|
98
|
+
rateLimitState.delete(conversationId);
|
|
99
|
+
broadcastSync({ type: 'rate_limit_clear', conversationId, timestamp: Date.now() });
|
|
100
|
+
scheduleRetry(conversationId, messageId, content, agentId, model, subAgent);
|
|
101
|
+
}, retryAfterSec * 1000);
|
|
102
|
+
return;
|
|
103
|
+
}
|
|
104
|
+
if (resultText) eagerTTS(resultText, conversationId, sessionId);
|
|
105
|
+
}
|
|
106
|
+
if (parsed.result && allBlocksRef.val.length === 0) allBlocksRef.val.push({ type: 'text', text: String(parsed.result) });
|
|
107
|
+
} else if (parsed.type === 'tool_status') {
|
|
108
|
+
broadcastSync({ type: 'streaming_progress', sessionId, conversationId, block: { type: 'tool_status', tool_use_id: parsed.tool_use_id, status: parsed.status }, seq: currentSequenceRef.val, timestamp: Date.now() });
|
|
109
|
+
} else if (parsed.type === 'usage') {
|
|
110
|
+
broadcastSync({ type: 'streaming_progress', sessionId, conversationId, block: { type: 'usage', usage: parsed.usage }, seq: currentSequenceRef.val, timestamp: Date.now() });
|
|
111
|
+
} else if (parsed.type === 'plan') {
|
|
112
|
+
broadcastSync({ type: 'streaming_progress', sessionId, conversationId, block: { type: 'plan', entries: parsed.entries }, seq: currentSequenceRef.val, timestamp: Date.now() });
|
|
113
|
+
}
|
|
114
|
+
};
|
|
115
|
+
}
|
package/package.json
CHANGED