agentgui 1.0.837 → 1.0.838
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +1 -0
- package/lib/message-queue.js +62 -0
- package/lib/process-message-rate-limit.js +18 -0
- package/lib/process-message.js +126 -0
- package/lib/stream-event-handler.js +115 -0
- package/package.json +1 -1
- package/scripts/patch-fsbrowse.js +1 -1
- package/server.js +21 -674
package/CHANGELOG.md
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
## [Unreleased]
|
|
2
2
|
|
|
3
3
|
### Refactor
|
|
4
|
+
- Extract processMessageWithStreaming (539L), scheduleRetry, drainMessageQueue, and parseRateLimitResetTime from server.js into lib/process-message.js (127L, createProcessMessage factory), lib/stream-event-handler.js (116L, createEventHandler), lib/message-queue.js (63L, createMessageQueue), lib/process-message-rate-limit.js (19L); all files ≤200L; server.js reduced by ~660L and imports/wires all factories after broadcastSync is created
|
|
4
5
|
- refactor: extract broadcastSync to lib/broadcast.js (createBroadcast factory) and recovery functions to lib/recovery.js (createRecovery factory); server.js reduced from 3419L to 3226L
|
|
5
6
|
- refactor: remove JSDoc and standalone code comments from scripts/patch-fsbrowse.js; reduce from 229L to 200L
|
|
6
7
|
- Split database.js (651L) into database.js (81L) + database-schema.js (176L) + database-migrations.js (150L) + database-migrations-acp.js (134L); all files ≤200L; no circular imports; migration functions receive db as parameter
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
export function createMessageQueue({ queries, activeExecutions, messageQueues, broadcastSync, execMachine, cleanupExecution, debugLog, getProcessMessageWithStreaming }) {
|
|
2
|
+
function scheduleRetry(conversationId, messageId, content, agentId, model, subAgent) {
|
|
3
|
+
debugLog(`[rate-limit] scheduleRetry called for conv ${conversationId}, messageId=${messageId}`);
|
|
4
|
+
if (!content) {
|
|
5
|
+
queries.getConversation(conversationId);
|
|
6
|
+
const lastMsg = queries.getLastUserMessage(conversationId);
|
|
7
|
+
content = lastMsg?.content || 'continue';
|
|
8
|
+
debugLog(`[rate-limit] Recovered content from last message: ${content?.substring?.(0, 50)}...`);
|
|
9
|
+
}
|
|
10
|
+
const newSession = queries.createSession(conversationId);
|
|
11
|
+
queries.createEvent('session.created', { messageId, sessionId: newSession.id, retryReason: 'rate_limit' }, conversationId, newSession.id);
|
|
12
|
+
debugLog(`[rate-limit] Broadcasting streaming_start for retry session ${newSession.id}`);
|
|
13
|
+
broadcastSync({ type: 'streaming_start', sessionId: newSession.id, conversationId, messageId, agentId, queueLength: messageQueues.get(conversationId)?.length || 0, timestamp: Date.now() });
|
|
14
|
+
const startTime = Date.now();
|
|
15
|
+
activeExecutions.set(conversationId, { pid: null, startTime, sessionId: newSession.id, lastActivity: startTime });
|
|
16
|
+
debugLog(`[rate-limit] Calling processMessageWithStreaming for retry`);
|
|
17
|
+
getProcessMessageWithStreaming()(conversationId, messageId, newSession.id, content, agentId, model, subAgent)
|
|
18
|
+
.catch(err => {
|
|
19
|
+
debugLog(`[rate-limit] Retry failed: ${err.message}`);
|
|
20
|
+
console.error(`[rate-limit] Retry error for conv ${conversationId}:`, err);
|
|
21
|
+
cleanupExecution(conversationId);
|
|
22
|
+
broadcastSync({ type: 'streaming_error', sessionId: newSession.id, conversationId, error: `Rate limit retry failed: ${err.message}`, recoverable: false, timestamp: Date.now() });
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
function drainMessageQueue(conversationId) {
|
|
27
|
+
const machineQueue = execMachine.getQueue(conversationId);
|
|
28
|
+
const mapQueue = messageQueues.get(conversationId);
|
|
29
|
+
if (machineQueue.length === 0 && (!mapQueue || mapQueue.length === 0)) return;
|
|
30
|
+
let next;
|
|
31
|
+
if (machineQueue.length > 0) {
|
|
32
|
+
execMachine.send(conversationId, { type: 'COMPLETE' });
|
|
33
|
+
const ctx = execMachine.getContext(conversationId);
|
|
34
|
+
next = ctx?.nextItem;
|
|
35
|
+
if (mapQueue && mapQueue.length > 0) mapQueue.shift();
|
|
36
|
+
if (mapQueue && mapQueue.length === 0) messageQueues.delete(conversationId);
|
|
37
|
+
} else {
|
|
38
|
+
next = mapQueue.shift();
|
|
39
|
+
if (mapQueue.length === 0) messageQueues.delete(conversationId);
|
|
40
|
+
}
|
|
41
|
+
if (!next) return;
|
|
42
|
+
debugLog(`[queue] Draining next message for ${conversationId}, messageId=${next.messageId}`);
|
|
43
|
+
const remainingQueueLength = execMachine.getQueue(conversationId).length || messageQueues.get(conversationId)?.length || 0;
|
|
44
|
+
broadcastSync({ type: 'queue_item_dequeued', conversationId, messageId: next.messageId, queueLength: remainingQueueLength, timestamp: Date.now() });
|
|
45
|
+
const session = queries.createSession(conversationId);
|
|
46
|
+
queries.createEvent('session.created', { messageId: next.messageId, sessionId: session.id }, conversationId, session.id);
|
|
47
|
+
broadcastSync({ type: 'streaming_start', sessionId: session.id, conversationId, messageId: next.messageId, agentId: next.agentId, queueLength: remainingQueueLength, timestamp: Date.now() });
|
|
48
|
+
broadcastSync({ type: 'queue_status', conversationId, queueLength: remainingQueueLength, timestamp: Date.now() });
|
|
49
|
+
const startTime = Date.now();
|
|
50
|
+
execMachine.send(conversationId, { type: 'START', sessionId: session.id });
|
|
51
|
+
activeExecutions.set(conversationId, { pid: null, startTime, sessionId: session.id, lastActivity: startTime });
|
|
52
|
+
getProcessMessageWithStreaming()(conversationId, next.messageId, session.id, next.content, next.agentId, next.model, next.subAgent)
|
|
53
|
+
.catch(err => {
|
|
54
|
+
debugLog(`[queue] Error processing queued message: ${err.message}`);
|
|
55
|
+
cleanupExecution(conversationId);
|
|
56
|
+
broadcastSync({ type: 'streaming_error', sessionId: session.id, conversationId, error: `Queue processing failed: ${err.message}`, recoverable: true, timestamp: Date.now() });
|
|
57
|
+
setTimeout(() => drainMessageQueue(conversationId), 100);
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
return { scheduleRetry, drainMessageQueue };
|
|
62
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Parse the rate-limit reset time from a text message.
|
|
3
|
+
* Returns seconds until reset (minimum 60, default 300).
|
|
4
|
+
*/
|
|
5
|
+
export function parseRateLimitResetTime(text) {
|
|
6
|
+
const match = text.match(/resets?\s+(?:at\s+)?(\d{1,2})(?::(\d{2}))?\s*(am|pm)?\s*\(?(UTC|[A-Z]{2,4})\)?/i);
|
|
7
|
+
if (!match) return 300;
|
|
8
|
+
let hours = parseInt(match[1], 10);
|
|
9
|
+
const minutes = match[2] ? parseInt(match[2], 10) : 0;
|
|
10
|
+
const period = match[3]?.toLowerCase();
|
|
11
|
+
if (period === 'pm' && hours !== 12) hours += 12;
|
|
12
|
+
if (period === 'am' && hours === 12) hours = 0;
|
|
13
|
+
const now = new Date();
|
|
14
|
+
const resetTime = new Date(now);
|
|
15
|
+
resetTime.setUTCHours(hours, minutes, 0, 0);
|
|
16
|
+
if (resetTime <= now) resetTime.setUTCDate(resetTime.getUTCDate() + 1);
|
|
17
|
+
return Math.max(60, Math.ceil((resetTime.getTime() - now.getTime()) / 1000));
|
|
18
|
+
}
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
export function createProcessMessage({ queries, activeExecutions, rateLimitState, execMachine, broadcastSync, runClaudeWithStreaming, cleanupExecution, checkpointManager, discoveredAgents, ownedSessionIds, STARTUP_CWD, buildSystemPrompt, parseRateLimitResetTime, eagerTTS, touchACP, createChunkBatcher, debugLog, logError, scheduleRetry, drainMessageQueue, createEventHandler }) {
|
|
2
|
+
async function processMessageWithStreaming(conversationId, messageId, sessionId, content, agentId, model, subAgent) {
|
|
3
|
+
const startTime = Date.now();
|
|
4
|
+
touchACP(agentId);
|
|
5
|
+
const conv = queries.getConversation(conversationId);
|
|
6
|
+
if (!conv) {
|
|
7
|
+
console.error(`[stream] Conversation ${conversationId} not found, aborting`);
|
|
8
|
+
queries.updateSession(sessionId, { status: 'error', error: 'Conversation not found' });
|
|
9
|
+
queries.setIsStreaming(conversationId, false);
|
|
10
|
+
return;
|
|
11
|
+
}
|
|
12
|
+
if (activeExecutions.has(conversationId)) {
|
|
13
|
+
const existing = activeExecutions.get(conversationId);
|
|
14
|
+
if (existing.sessionId !== sessionId) {
|
|
15
|
+
debugLog(`[stream] Conversation ${conversationId} already has active execution (different session), aborting duplicate`);
|
|
16
|
+
return;
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
if (rateLimitState.has(conversationId)) {
|
|
20
|
+
const rlState = rateLimitState.get(conversationId);
|
|
21
|
+
if (rlState.retryAt > Date.now()) {
|
|
22
|
+
debugLog(`[stream] Conversation ${conversationId} is in rate limit cooldown, aborting`);
|
|
23
|
+
return;
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
activeExecutions.set(conversationId, { pid: null, startTime, sessionId, lastActivity: startTime });
|
|
27
|
+
execMachine.send(conversationId, { type: 'START', sessionId });
|
|
28
|
+
queries.setIsStreaming(conversationId, true);
|
|
29
|
+
queries.updateSession(sessionId, { status: 'active' });
|
|
30
|
+
const batcher = createChunkBatcher(queries, debugLog);
|
|
31
|
+
const cwd = conv?.workingDirectory || STARTUP_CWD;
|
|
32
|
+
const allBlocksRef = { val: [] };
|
|
33
|
+
const currentSequenceRef = { val: queries.getMaxSequence(sessionId) ?? -1 };
|
|
34
|
+
const batcherRef = { batcher, eventCount: 0, resumeSessionId: conv?.claudeSessionId || null };
|
|
35
|
+
const onEvent = createEventHandler({ queries, activeExecutions, broadcastSync, rateLimitState, batcherRef, sessionId, conversationId, messageId, content, agentId, model, subAgent, ownedSessionIds, allBlocksRef, currentSequenceRef, scheduleRetry, eagerTTS, debugLog, parseRateLimitResetTime });
|
|
36
|
+
try {
|
|
37
|
+
debugLog(`[stream] Starting: conversationId=${conversationId}, sessionId=${sessionId}`);
|
|
38
|
+
let resolvedAgentId = agentId || 'claude-code';
|
|
39
|
+
const wrapperAgent = discoveredAgents.find(a => a.id === resolvedAgentId && a.protocol === 'cli-wrapper' && a.acpId);
|
|
40
|
+
if (wrapperAgent) resolvedAgentId = wrapperAgent.acpId;
|
|
41
|
+
const resolvedModel = model || conv?.model || null;
|
|
42
|
+
const resolvedSubAgent = subAgent || conv?.subAgent || null;
|
|
43
|
+
const config = {
|
|
44
|
+
verbose: true, outputFormat: 'stream-json', timeout: 1800000, print: true,
|
|
45
|
+
resumeSessionId: batcherRef.resumeSessionId,
|
|
46
|
+
systemPrompt: buildSystemPrompt(agentId, resolvedModel, resolvedSubAgent),
|
|
47
|
+
model: resolvedModel || undefined, subAgent: resolvedSubAgent || undefined, onEvent,
|
|
48
|
+
onPid: (pid) => { const e = activeExecutions.get(conversationId); if (e) e.pid = pid; execMachine.send(conversationId, { type: 'SET_PID', pid }); },
|
|
49
|
+
onProcess: (proc) => { const e = activeExecutions.get(conversationId); if (e) e.proc = proc; execMachine.send(conversationId, { type: 'SET_PROC', proc }); }
|
|
50
|
+
};
|
|
51
|
+
const { outputs, sessionId: claudeSessionId } = await runClaudeWithStreaming(content, cwd, resolvedAgentId, config);
|
|
52
|
+
if (rateLimitState.get(conversationId)?.isStreamDetected) {
|
|
53
|
+
debugLog(`[rate-limit] Rate limit already handled in stream for conv ${conversationId}, skipping success handler`);
|
|
54
|
+
return;
|
|
55
|
+
}
|
|
56
|
+
activeExecutions.delete(conversationId);
|
|
57
|
+
execMachine.send(conversationId, { type: 'COMPLETE' });
|
|
58
|
+
batcher.drain();
|
|
59
|
+
if (claudeSessionId) ownedSessionIds.delete(claudeSessionId);
|
|
60
|
+
debugLog(`[stream] Claude returned ${outputs.length} outputs, sessionId=${claudeSessionId}`);
|
|
61
|
+
queries.updateSession(sessionId, { status: 'complete', response: JSON.stringify({ outputs, eventCount: batcherRef.eventCount }), completed_at: Date.now() });
|
|
62
|
+
broadcastSync({ type: 'streaming_complete', sessionId, conversationId, agentId, eventCount: batcherRef.eventCount, seq: currentSequenceRef.val, timestamp: Date.now() });
|
|
63
|
+
debugLog(`[stream] Completed: ${outputs.length} outputs, ${batcherRef.eventCount} events`);
|
|
64
|
+
} catch (error) {
|
|
65
|
+
const elapsed = Date.now() - startTime;
|
|
66
|
+
debugLog(`[stream] Error after ${elapsed}ms: ${error.message}`);
|
|
67
|
+
const conv2 = queries.getConversation(conversationId);
|
|
68
|
+
if (conv2?.claudeSessionId) ownedSessionIds.delete(conv2.claudeSessionId);
|
|
69
|
+
if (rateLimitState.get(conversationId)?.isStreamDetected) {
|
|
70
|
+
debugLog(`[rate-limit] Rate limit already handled in stream for conv ${conversationId}, skipping catch handler`);
|
|
71
|
+
return;
|
|
72
|
+
}
|
|
73
|
+
const isAuthError = error.authError || error.nonRetryable || /401|unauthorized|invalid.*auth|invalid.*token|auth.*failed|permission denied|access denied/i.test(error.message);
|
|
74
|
+
const isRateLimit = error.rateLimited || /rate.?limit|429|too many requests|overloaded|throttl/i.test(error.message);
|
|
75
|
+
queries.updateSession(sessionId, { status: 'error', error: error.message, completed_at: Date.now() });
|
|
76
|
+
if (isAuthError) {
|
|
77
|
+
debugLog(`[auth-error] Auth error for conv ${conversationId}: ${error.message}`);
|
|
78
|
+
broadcastSync({ type: 'streaming_error', sessionId, conversationId, error: `Authentication failed: ${error.message}. Please check your API credentials.`, recoverable: false, isAuthError: true, timestamp: Date.now() });
|
|
79
|
+
const errMsg = queries.createMessage(conversationId, 'assistant', `Error: Authentication failed. ${error.message}. Please update your credentials and try again.`);
|
|
80
|
+
broadcastSync({ type: 'message_created', conversationId, message: errMsg, timestamp: Date.now() });
|
|
81
|
+
queries.setIsStreaming(conversationId, false);
|
|
82
|
+
batcher.drain();
|
|
83
|
+
activeExecutions.delete(conversationId);
|
|
84
|
+
return;
|
|
85
|
+
}
|
|
86
|
+
if (isRateLimit) {
|
|
87
|
+
const existingState = rateLimitState.get(conversationId) || {};
|
|
88
|
+
const retryCount = (existingState.retryCount || 0) + 1;
|
|
89
|
+
const maxRateLimitRetries = 3;
|
|
90
|
+
if (retryCount > maxRateLimitRetries) {
|
|
91
|
+
broadcastSync({ type: 'streaming_error', sessionId, conversationId, error: `Rate limit exceeded after ${retryCount} attempts. Please try again later.`, recoverable: false, timestamp: Date.now() });
|
|
92
|
+
const errMsg = queries.createMessage(conversationId, 'assistant', `Error: Rate limit exceeded after ${retryCount} attempts. Please try again later.`);
|
|
93
|
+
broadcastSync({ type: 'message_created', conversationId, message: errMsg, timestamp: Date.now() });
|
|
94
|
+
queries.setIsStreaming(conversationId, false);
|
|
95
|
+
return;
|
|
96
|
+
}
|
|
97
|
+
const cooldownMs = (error.retryAfterSec || 60) * 1000;
|
|
98
|
+
const retryAt = Date.now() + cooldownMs;
|
|
99
|
+
rateLimitState.set(conversationId, { retryAt, cooldownMs, retryCount });
|
|
100
|
+
broadcastSync({ type: 'rate_limit_hit', sessionId, conversationId, retryAfterMs: cooldownMs, retryAt, retryCount, timestamp: Date.now() });
|
|
101
|
+
batcher.drain();
|
|
102
|
+
debugLog(`[rate-limit] Scheduling retry for conv ${conversationId} in ${cooldownMs}ms (attempt ${retryCount + 1})`);
|
|
103
|
+
setTimeout(() => {
|
|
104
|
+
debugLog(`[rate-limit] Timeout fired for conv ${conversationId}, calling scheduleRetry`);
|
|
105
|
+
rateLimitState.delete(conversationId);
|
|
106
|
+
broadcastSync({ type: 'rate_limit_clear', conversationId, timestamp: Date.now() });
|
|
107
|
+
scheduleRetry(conversationId, messageId, content, agentId, model, subAgent);
|
|
108
|
+
}, cooldownMs);
|
|
109
|
+
return;
|
|
110
|
+
}
|
|
111
|
+
const isSessionConflict = error.exitCode === null && batcherRef.eventCount === 0;
|
|
112
|
+
broadcastSync({ type: 'streaming_error', sessionId, conversationId, error: error.message, isPrematureEnd: error.isPrematureEnd || false, exitCode: error.exitCode, stderrText: error.stderrText, recoverable: elapsed < 60000, isSessionConflict, timestamp: Date.now() });
|
|
113
|
+
if (!isSessionConflict) {
|
|
114
|
+
const errMsg = queries.createMessage(conversationId, 'assistant', `Error: ${error.message}`);
|
|
115
|
+
broadcastSync({ type: 'message_created', conversationId, message: errMsg, timestamp: Date.now() });
|
|
116
|
+
}
|
|
117
|
+
} finally {
|
|
118
|
+
batcher.drain();
|
|
119
|
+
if (!rateLimitState.has(conversationId)) {
|
|
120
|
+
cleanupExecution(conversationId);
|
|
121
|
+
drainMessageQueue(conversationId);
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
return { processMessageWithStreaming };
|
|
126
|
+
}
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
export function createEventHandler({ queries, activeExecutions, broadcastSync, rateLimitState, batcherRef, sessionId, conversationId, messageId, content, agentId, model, subAgent, ownedSessionIds, allBlocksRef, currentSequenceRef, scheduleRetry, eagerTTS, debugLog, parseRateLimitResetTime }) {
|
|
2
|
+
return function onEvent(parsed) {
|
|
3
|
+
batcherRef.eventCount++;
|
|
4
|
+
const entry = activeExecutions.get(conversationId);
|
|
5
|
+
if (entry) entry.lastActivity = Date.now();
|
|
6
|
+
if (parsed.session_id) {
|
|
7
|
+
ownedSessionIds.add(parsed.session_id);
|
|
8
|
+
if (!batcherRef.resumeSessionId || batcherRef.resumeSessionId !== parsed.session_id) {
|
|
9
|
+
batcherRef.resumeSessionId = parsed.session_id;
|
|
10
|
+
queries.setClaudeSessionId(conversationId, parsed.session_id, sessionId);
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
debugLog(`[stream] Event ${batcherRef.eventCount}: type=${parsed.type}`);
|
|
14
|
+
|
|
15
|
+
if (parsed.type === 'system') {
|
|
16
|
+
if (parsed.subtype === 'task_notification') return;
|
|
17
|
+
if (!parsed.model && !parsed.cwd && !parsed.tools) return;
|
|
18
|
+
const block = { type: 'system', subtype: parsed.subtype, model: parsed.model, cwd: parsed.cwd, tools: parsed.tools, session_id: parsed.session_id };
|
|
19
|
+
currentSequenceRef.val++;
|
|
20
|
+
batcherRef.batcher.add(sessionId, conversationId, currentSequenceRef.val, 'system', block);
|
|
21
|
+
broadcastSync({ type: 'streaming_progress', sessionId, conversationId, block, blockRole: 'system', blockIndex: allBlocksRef.val.length, seq: currentSequenceRef.val, timestamp: Date.now() });
|
|
22
|
+
} else if (parsed.type === 'assistant' && parsed.message?.content) {
|
|
23
|
+
for (const block of parsed.message.content) {
|
|
24
|
+
allBlocksRef.val.push(block);
|
|
25
|
+
currentSequenceRef.val++;
|
|
26
|
+
batcherRef.batcher.add(sessionId, conversationId, currentSequenceRef.val, block.type || 'assistant', block);
|
|
27
|
+
broadcastSync({ type: 'streaming_progress', sessionId, conversationId, block, blockRole: 'assistant', blockIndex: allBlocksRef.val.length - 1, seq: currentSequenceRef.val, timestamp: Date.now() });
|
|
28
|
+
if (block.type === 'text' && block.text) {
|
|
29
|
+
const rateLimitMatch = block.text.match(/you'?ve hit your limit|rate limit exceeded/i);
|
|
30
|
+
if (rateLimitMatch) {
|
|
31
|
+
debugLog(`[rate-limit] Detected rate limit message in stream for conv ${conversationId}`);
|
|
32
|
+
const retryAfterSec = parseRateLimitResetTime(block.text);
|
|
33
|
+
const entry2 = activeExecutions.get(conversationId);
|
|
34
|
+
if (entry2 && entry2.pid) { try { process.kill(entry2.pid); } catch (e) {} }
|
|
35
|
+
const existingCount = rateLimitState.get(conversationId)?.retryCount || 0;
|
|
36
|
+
if (existingCount >= 3) {
|
|
37
|
+
batcherRef.batcher.drain();
|
|
38
|
+
activeExecutions.delete(conversationId);
|
|
39
|
+
queries.setIsStreaming(conversationId, false);
|
|
40
|
+
const errMsg = queries.createMessage(conversationId, 'assistant', `Error: Rate limit exceeded after ${existingCount + 1} attempts. Please try again later.`);
|
|
41
|
+
broadcastSync({ type: 'message_created', conversationId, message: errMsg, timestamp: Date.now() });
|
|
42
|
+
broadcastSync({ type: 'streaming_complete', sessionId, conversationId, interrupted: true, timestamp: Date.now() });
|
|
43
|
+
return;
|
|
44
|
+
}
|
|
45
|
+
rateLimitState.set(conversationId, { retryAt: Date.now() + (retryAfterSec * 1000), cooldownMs: retryAfterSec * 1000, retryCount: existingCount + 1, isStreamDetected: true });
|
|
46
|
+
broadcastSync({ type: 'rate_limit_hit', sessionId, conversationId, retryAfterMs: retryAfterSec * 1000, retryAt: Date.now() + (retryAfterSec * 1000), retryCount: 1, timestamp: Date.now() });
|
|
47
|
+
batcherRef.batcher.drain();
|
|
48
|
+
activeExecutions.delete(conversationId);
|
|
49
|
+
queries.setIsStreaming(conversationId, false);
|
|
50
|
+
setTimeout(() => {
|
|
51
|
+
rateLimitState.delete(conversationId);
|
|
52
|
+
broadcastSync({ type: 'rate_limit_clear', conversationId, timestamp: Date.now() });
|
|
53
|
+
scheduleRetry(conversationId, messageId, content, agentId, model, subAgent);
|
|
54
|
+
}, retryAfterSec * 1000);
|
|
55
|
+
return;
|
|
56
|
+
}
|
|
57
|
+
eagerTTS(block.text, conversationId, sessionId);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
} else if (parsed.type === 'user' && parsed.message?.content) {
|
|
61
|
+
for (const block of parsed.message.content) {
|
|
62
|
+
if (block.type === 'tool_result') {
|
|
63
|
+
const toolResultBlock = { type: 'tool_result', tool_use_id: block.tool_use_id, content: typeof block.content === 'string' ? block.content : JSON.stringify(block.content), is_error: block.is_error || false };
|
|
64
|
+
currentSequenceRef.val++;
|
|
65
|
+
batcherRef.batcher.add(sessionId, conversationId, currentSequenceRef.val, 'tool_result', toolResultBlock);
|
|
66
|
+
broadcastSync({ type: 'streaming_progress', sessionId, conversationId, block: toolResultBlock, blockRole: 'tool_result', blockIndex: allBlocksRef.val.length, seq: currentSequenceRef.val, timestamp: Date.now() });
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
} else if (parsed.type === 'result') {
|
|
70
|
+
const resultBlock = { type: 'result', subtype: parsed.subtype, duration_ms: parsed.duration_ms, total_cost_usd: parsed.total_cost_usd, num_turns: parsed.num_turns, is_error: parsed.is_error || false, result: parsed.result };
|
|
71
|
+
currentSequenceRef.val++;
|
|
72
|
+
batcherRef.batcher.add(sessionId, conversationId, currentSequenceRef.val, 'result', resultBlock);
|
|
73
|
+
broadcastSync({ type: 'streaming_progress', sessionId, conversationId, block: resultBlock, blockRole: 'result', blockIndex: allBlocksRef.val.length, isResult: true, seq: currentSequenceRef.val, timestamp: Date.now() });
|
|
74
|
+
if (parsed.result) {
|
|
75
|
+
const resultText = typeof parsed.result === 'string' ? parsed.result : JSON.stringify(parsed.result);
|
|
76
|
+
const rlMatch = resultText.match(/you'?ve hit your limit|rate limit exceeded/i);
|
|
77
|
+
if (rlMatch) {
|
|
78
|
+
debugLog(`[rate-limit] Detected rate limit in result for conv ${conversationId}`);
|
|
79
|
+
const retryAfterSec = parseRateLimitResetTime(resultText);
|
|
80
|
+
const entry3 = activeExecutions.get(conversationId);
|
|
81
|
+
if (entry3 && entry3.pid) { try { process.kill(entry3.pid); } catch (e) {} }
|
|
82
|
+
const existingCount2 = rateLimitState.get(conversationId)?.retryCount || 0;
|
|
83
|
+
if (existingCount2 >= 3) {
|
|
84
|
+
batcherRef.batcher.drain();
|
|
85
|
+
activeExecutions.delete(conversationId);
|
|
86
|
+
queries.setIsStreaming(conversationId, false);
|
|
87
|
+
const errMsg2 = queries.createMessage(conversationId, 'assistant', `Error: Rate limit exceeded after ${existingCount2 + 1} attempts. Please try again later.`);
|
|
88
|
+
broadcastSync({ type: 'message_created', conversationId, message: errMsg2, timestamp: Date.now() });
|
|
89
|
+
broadcastSync({ type: 'streaming_complete', sessionId, conversationId, interrupted: true, timestamp: Date.now() });
|
|
90
|
+
return;
|
|
91
|
+
}
|
|
92
|
+
rateLimitState.set(conversationId, { retryAt: Date.now() + (retryAfterSec * 1000), cooldownMs: retryAfterSec * 1000, retryCount: existingCount2 + 1, isStreamDetected: true });
|
|
93
|
+
broadcastSync({ type: 'rate_limit_hit', sessionId, conversationId, retryAfterMs: retryAfterSec * 1000, retryAt: Date.now() + (retryAfterSec * 1000), retryCount: existingCount2 + 1, timestamp: Date.now() });
|
|
94
|
+
batcherRef.batcher.drain();
|
|
95
|
+
activeExecutions.delete(conversationId);
|
|
96
|
+
queries.setIsStreaming(conversationId, false);
|
|
97
|
+
setTimeout(() => {
|
|
98
|
+
rateLimitState.delete(conversationId);
|
|
99
|
+
broadcastSync({ type: 'rate_limit_clear', conversationId, timestamp: Date.now() });
|
|
100
|
+
scheduleRetry(conversationId, messageId, content, agentId, model, subAgent);
|
|
101
|
+
}, retryAfterSec * 1000);
|
|
102
|
+
return;
|
|
103
|
+
}
|
|
104
|
+
if (resultText) eagerTTS(resultText, conversationId, sessionId);
|
|
105
|
+
}
|
|
106
|
+
if (parsed.result && allBlocksRef.val.length === 0) allBlocksRef.val.push({ type: 'text', text: String(parsed.result) });
|
|
107
|
+
} else if (parsed.type === 'tool_status') {
|
|
108
|
+
broadcastSync({ type: 'streaming_progress', sessionId, conversationId, block: { type: 'tool_status', tool_use_id: parsed.tool_use_id, status: parsed.status }, seq: currentSequenceRef.val, timestamp: Date.now() });
|
|
109
|
+
} else if (parsed.type === 'usage') {
|
|
110
|
+
broadcastSync({ type: 'streaming_progress', sessionId, conversationId, block: { type: 'usage', usage: parsed.usage }, seq: currentSequenceRef.val, timestamp: Date.now() });
|
|
111
|
+
} else if (parsed.type === 'plan') {
|
|
112
|
+
broadcastSync({ type: 'streaming_progress', sessionId, conversationId, block: { type: 'plan', entries: parsed.entries }, seq: currentSequenceRef.val, timestamp: Date.now() });
|
|
113
|
+
}
|
|
114
|
+
};
|
|
115
|
+
}
|
package/package.json
CHANGED
package/server.js
CHANGED
|
@@ -53,6 +53,10 @@ import CheckpointManager from './lib/checkpoint-manager.js';
|
|
|
53
53
|
import { JsonlWatcher } from './lib/jsonl-watcher.js';
|
|
54
54
|
import { createBroadcast } from './lib/broadcast.js';
|
|
55
55
|
import { createRecovery } from './lib/recovery.js';
|
|
56
|
+
import { parseRateLimitResetTime } from './lib/process-message-rate-limit.js';
|
|
57
|
+
import { createEventHandler } from './lib/stream-event-handler.js';
|
|
58
|
+
import { createMessageQueue } from './lib/message-queue.js';
|
|
59
|
+
import { createProcessMessage } from './lib/process-message.js';
|
|
56
60
|
|
|
57
61
|
|
|
58
62
|
process.on('uncaughtException', (err, origin) => {
|
|
@@ -1659,680 +1663,6 @@ function serveFile(filePath, res, req) { return _serveFile(filePath, res, req, _
|
|
|
1659
1663
|
|
|
1660
1664
|
let broadcastSeq = 0;
|
|
1661
1665
|
|
|
1662
|
-
function parseRateLimitResetTime(text) {
|
|
1663
|
-
const match = text.match(/resets?\s+(?:at\s+)?(\d{1,2})(?::(\d{2}))?\s*(am|pm)?\s*\(?(UTC|[A-Z]{2,4})\)?/i);
|
|
1664
|
-
if (!match) return 300;
|
|
1665
|
-
let hours = parseInt(match[1], 10);
|
|
1666
|
-
const minutes = match[2] ? parseInt(match[2], 10) : 0;
|
|
1667
|
-
const period = match[3]?.toLowerCase();
|
|
1668
|
-
if (period === 'pm' && hours !== 12) hours += 12;
|
|
1669
|
-
if (period === 'am' && hours === 12) hours = 0;
|
|
1670
|
-
const now = new Date();
|
|
1671
|
-
const resetTime = new Date(now);
|
|
1672
|
-
resetTime.setUTCHours(hours, minutes, 0, 0);
|
|
1673
|
-
if (resetTime <= now) resetTime.setUTCDate(resetTime.getUTCDate() + 1);
|
|
1674
|
-
return Math.max(60, Math.ceil((resetTime.getTime() - now.getTime()) / 1000));
|
|
1675
|
-
}
|
|
1676
|
-
|
|
1677
|
-
async function processMessageWithStreaming(conversationId, messageId, sessionId, content, agentId, model, subAgent) {
|
|
1678
|
-
const startTime = Date.now();
|
|
1679
|
-
touchACP(agentId);
|
|
1680
|
-
|
|
1681
|
-
const conv = queries.getConversation(conversationId);
|
|
1682
|
-
if (!conv) {
|
|
1683
|
-
console.error(`[stream] Conversation ${conversationId} not found, aborting`);
|
|
1684
|
-
queries.updateSession(sessionId, { status: 'error', error: 'Conversation not found' });
|
|
1685
|
-
queries.setIsStreaming(conversationId, false);
|
|
1686
|
-
return;
|
|
1687
|
-
}
|
|
1688
|
-
|
|
1689
|
-
if (activeExecutions.has(conversationId)) {
|
|
1690
|
-
const existing = activeExecutions.get(conversationId);
|
|
1691
|
-
if (existing.sessionId !== sessionId) {
|
|
1692
|
-
debugLog(`[stream] Conversation ${conversationId} already has active execution (different session), aborting duplicate`);
|
|
1693
|
-
return;
|
|
1694
|
-
}
|
|
1695
|
-
}
|
|
1696
|
-
|
|
1697
|
-
if (rateLimitState.has(conversationId)) {
|
|
1698
|
-
const rlState = rateLimitState.get(conversationId);
|
|
1699
|
-
if (rlState.retryAt > Date.now()) {
|
|
1700
|
-
debugLog(`[stream] Conversation ${conversationId} is in rate limit cooldown, aborting`);
|
|
1701
|
-
return;
|
|
1702
|
-
}
|
|
1703
|
-
}
|
|
1704
|
-
|
|
1705
|
-
activeExecutions.set(conversationId, { pid: null, startTime, sessionId, lastActivity: startTime });
|
|
1706
|
-
execMachine.send(conversationId, { type: 'START', sessionId });
|
|
1707
|
-
queries.setIsStreaming(conversationId, true);
|
|
1708
|
-
queries.updateSession(sessionId, { status: 'active' });
|
|
1709
|
-
const batcher = createChunkBatcher(queries, debugLog);
|
|
1710
|
-
|
|
1711
|
-
try {
|
|
1712
|
-
debugLog(`[stream] Starting: conversationId=${conversationId}, sessionId=${sessionId}`);
|
|
1713
|
-
|
|
1714
|
-
const cwd = conv?.workingDirectory || STARTUP_CWD;
|
|
1715
|
-
let resumeSessionId = conv?.claudeSessionId || null;
|
|
1716
|
-
|
|
1717
|
-
let allBlocks = [];
|
|
1718
|
-
let eventCount = 0;
|
|
1719
|
-
let currentSequence = queries.getMaxSequence(sessionId) ?? -1;
|
|
1720
|
-
|
|
1721
|
-
const onEvent = (parsed) => {
|
|
1722
|
-
eventCount++;
|
|
1723
|
-
const entry = activeExecutions.get(conversationId);
|
|
1724
|
-
if (entry) entry.lastActivity = Date.now();
|
|
1725
|
-
if (parsed.session_id) {
|
|
1726
|
-
ownedSessionIds.add(parsed.session_id);
|
|
1727
|
-
if (!resumeSessionId || resumeSessionId !== parsed.session_id) {
|
|
1728
|
-
resumeSessionId = parsed.session_id;
|
|
1729
|
-
queries.setClaudeSessionId(conversationId, parsed.session_id, sessionId);
|
|
1730
|
-
}
|
|
1731
|
-
}
|
|
1732
|
-
debugLog(`[stream] Event ${eventCount}: type=${parsed.type}`);
|
|
1733
|
-
|
|
1734
|
-
if (parsed.type === 'system') {
|
|
1735
|
-
if (parsed.subtype === 'task_notification') return;
|
|
1736
|
-
if (!parsed.model && !parsed.cwd && !parsed.tools) return;
|
|
1737
|
-
|
|
1738
|
-
const systemBlock = {
|
|
1739
|
-
type: 'system',
|
|
1740
|
-
subtype: parsed.subtype,
|
|
1741
|
-
model: parsed.model,
|
|
1742
|
-
cwd: parsed.cwd,
|
|
1743
|
-
tools: parsed.tools,
|
|
1744
|
-
session_id: parsed.session_id
|
|
1745
|
-
};
|
|
1746
|
-
|
|
1747
|
-
currentSequence++;
|
|
1748
|
-
batcher.add(sessionId, conversationId, currentSequence, 'system', systemBlock);
|
|
1749
|
-
|
|
1750
|
-
broadcastSync({
|
|
1751
|
-
type: 'streaming_progress',
|
|
1752
|
-
sessionId,
|
|
1753
|
-
conversationId,
|
|
1754
|
-
block: systemBlock,
|
|
1755
|
-
blockRole: 'system',
|
|
1756
|
-
blockIndex: allBlocks.length,
|
|
1757
|
-
seq: currentSequence,
|
|
1758
|
-
timestamp: Date.now()
|
|
1759
|
-
});
|
|
1760
|
-
} else if (parsed.type === 'assistant' && parsed.message?.content) {
|
|
1761
|
-
for (const block of parsed.message.content) {
|
|
1762
|
-
allBlocks.push(block);
|
|
1763
|
-
|
|
1764
|
-
currentSequence++;
|
|
1765
|
-
batcher.add(sessionId, conversationId, currentSequence, block.type || 'assistant', block);
|
|
1766
|
-
|
|
1767
|
-
broadcastSync({
|
|
1768
|
-
type: 'streaming_progress',
|
|
1769
|
-
sessionId,
|
|
1770
|
-
conversationId,
|
|
1771
|
-
block,
|
|
1772
|
-
blockRole: 'assistant',
|
|
1773
|
-
blockIndex: allBlocks.length - 1,
|
|
1774
|
-
seq: currentSequence,
|
|
1775
|
-
timestamp: Date.now()
|
|
1776
|
-
});
|
|
1777
|
-
|
|
1778
|
-
if (block.type === 'text' && block.text) {
|
|
1779
|
-
// Check for rate limit message in text content
|
|
1780
|
-
const rateLimitTextMatch = block.text.match(/you'?ve hit your limit|rate limit exceeded/i);
|
|
1781
|
-
if (rateLimitTextMatch) {
|
|
1782
|
-
debugLog(`[rate-limit] Detected rate limit message in stream for conv ${conversationId}`);
|
|
1783
|
-
|
|
1784
|
-
const retryAfterSec = parseRateLimitResetTime(block.text);
|
|
1785
|
-
debugLog(`[rate-limit] Parsed reset time, retry in ${retryAfterSec}s`);
|
|
1786
|
-
|
|
1787
|
-
// Kill the running process
|
|
1788
|
-
const entry = activeExecutions.get(conversationId);
|
|
1789
|
-
if (entry && entry.pid) {
|
|
1790
|
-
try {
|
|
1791
|
-
process.kill(entry.pid);
|
|
1792
|
-
debugLog(`[rate-limit] Killed process ${entry.pid} for conv ${conversationId}`);
|
|
1793
|
-
} catch (e) {
|
|
1794
|
-
debugLog(`[rate-limit] Failed to kill process: ${e.message}`);
|
|
1795
|
-
}
|
|
1796
|
-
}
|
|
1797
|
-
|
|
1798
|
-
// Set flag to stop processing and trigger retry
|
|
1799
|
-
const existingRetryCount = rateLimitState.get(conversationId)?.retryCount || 0;
|
|
1800
|
-
if (existingRetryCount >= 3) {
|
|
1801
|
-
debugLog(`[rate-limit] Conv ${conversationId} stream rate limit hit ${existingRetryCount + 1} times, giving up`);
|
|
1802
|
-
batcher.drain();
|
|
1803
|
-
activeExecutions.delete(conversationId);
|
|
1804
|
-
queries.setIsStreaming(conversationId, false);
|
|
1805
|
-
const errorMessage = queries.createMessage(conversationId, 'assistant', `Error: Rate limit exceeded after ${existingRetryCount + 1} attempts. Please try again later.`);
|
|
1806
|
-
broadcastSync({ type: 'message_created', conversationId, message: errorMessage, timestamp: Date.now() });
|
|
1807
|
-
broadcastSync({ type: 'streaming_complete', sessionId, conversationId, interrupted: true, timestamp: Date.now() });
|
|
1808
|
-
return;
|
|
1809
|
-
}
|
|
1810
|
-
rateLimitState.set(conversationId, {
|
|
1811
|
-
retryAt: Date.now() + (retryAfterSec * 1000),
|
|
1812
|
-
cooldownMs: retryAfterSec * 1000,
|
|
1813
|
-
retryCount: existingRetryCount + 1,
|
|
1814
|
-
isStreamDetected: true
|
|
1815
|
-
});
|
|
1816
|
-
|
|
1817
|
-
// Broadcast rate limit event
|
|
1818
|
-
broadcastSync({
|
|
1819
|
-
type: 'rate_limit_hit',
|
|
1820
|
-
sessionId,
|
|
1821
|
-
conversationId,
|
|
1822
|
-
retryAfterMs: retryAfterSec * 1000,
|
|
1823
|
-
retryAt: Date.now() + (retryAfterSec * 1000),
|
|
1824
|
-
retryCount: 1,
|
|
1825
|
-
timestamp: Date.now()
|
|
1826
|
-
});
|
|
1827
|
-
|
|
1828
|
-
batcher.drain();
|
|
1829
|
-
activeExecutions.delete(conversationId);
|
|
1830
|
-
queries.setIsStreaming(conversationId, false);
|
|
1831
|
-
|
|
1832
|
-
// Schedule retry
|
|
1833
|
-
setTimeout(() => {
|
|
1834
|
-
rateLimitState.delete(conversationId);
|
|
1835
|
-
broadcastSync({
|
|
1836
|
-
type: 'rate_limit_clear',
|
|
1837
|
-
conversationId,
|
|
1838
|
-
timestamp: Date.now()
|
|
1839
|
-
});
|
|
1840
|
-
scheduleRetry(conversationId, messageId, content, agentId, model, subAgent);
|
|
1841
|
-
}, retryAfterSec * 1000);
|
|
1842
|
-
|
|
1843
|
-
return; // Stop processing events
|
|
1844
|
-
}
|
|
1845
|
-
|
|
1846
|
-
eagerTTS(block.text, conversationId, sessionId);
|
|
1847
|
-
}
|
|
1848
|
-
}
|
|
1849
|
-
} else if (parsed.type === 'user' && parsed.message?.content) {
|
|
1850
|
-
for (const block of parsed.message.content) {
|
|
1851
|
-
if (block.type === 'tool_result') {
|
|
1852
|
-
const toolResultBlock = {
|
|
1853
|
-
type: 'tool_result',
|
|
1854
|
-
tool_use_id: block.tool_use_id,
|
|
1855
|
-
content: typeof block.content === 'string' ? block.content : JSON.stringify(block.content),
|
|
1856
|
-
is_error: block.is_error || false
|
|
1857
|
-
};
|
|
1858
|
-
|
|
1859
|
-
currentSequence++;
|
|
1860
|
-
batcher.add(sessionId, conversationId, currentSequence, 'tool_result', toolResultBlock);
|
|
1861
|
-
|
|
1862
|
-
broadcastSync({
|
|
1863
|
-
type: 'streaming_progress',
|
|
1864
|
-
sessionId,
|
|
1865
|
-
conversationId,
|
|
1866
|
-
block: toolResultBlock,
|
|
1867
|
-
blockRole: 'tool_result',
|
|
1868
|
-
blockIndex: allBlocks.length,
|
|
1869
|
-
seq: currentSequence,
|
|
1870
|
-
timestamp: Date.now()
|
|
1871
|
-
});
|
|
1872
|
-
}
|
|
1873
|
-
}
|
|
1874
|
-
} else if (parsed.type === 'result') {
|
|
1875
|
-
const resultBlock = {
|
|
1876
|
-
type: 'result',
|
|
1877
|
-
subtype: parsed.subtype,
|
|
1878
|
-
duration_ms: parsed.duration_ms,
|
|
1879
|
-
total_cost_usd: parsed.total_cost_usd,
|
|
1880
|
-
num_turns: parsed.num_turns,
|
|
1881
|
-
is_error: parsed.is_error || false,
|
|
1882
|
-
result: parsed.result
|
|
1883
|
-
};
|
|
1884
|
-
|
|
1885
|
-
currentSequence++;
|
|
1886
|
-
batcher.add(sessionId, conversationId, currentSequence, 'result', resultBlock);
|
|
1887
|
-
|
|
1888
|
-
broadcastSync({
|
|
1889
|
-
type: 'streaming_progress',
|
|
1890
|
-
sessionId,
|
|
1891
|
-
conversationId,
|
|
1892
|
-
block: resultBlock,
|
|
1893
|
-
blockRole: 'result',
|
|
1894
|
-
blockIndex: allBlocks.length,
|
|
1895
|
-
isResult: true,
|
|
1896
|
-
seq: currentSequence,
|
|
1897
|
-
timestamp: Date.now()
|
|
1898
|
-
});
|
|
1899
|
-
|
|
1900
|
-
if (parsed.result) {
|
|
1901
|
-
const resultText = typeof parsed.result === 'string' ? parsed.result : JSON.stringify(parsed.result);
|
|
1902
|
-
|
|
1903
|
-
// Check for rate limit message in result
|
|
1904
|
-
const rateLimitResultMatch = resultText.match(/you'?ve hit your limit|rate limit exceeded/i);
|
|
1905
|
-
if (rateLimitResultMatch) {
|
|
1906
|
-
debugLog(`[rate-limit] Detected rate limit in result for conv ${conversationId}`);
|
|
1907
|
-
|
|
1908
|
-
const retryAfterSec = parseRateLimitResetTime(resultText);
|
|
1909
|
-
|
|
1910
|
-
const entry = activeExecutions.get(conversationId);
|
|
1911
|
-
if (entry && entry.pid) {
|
|
1912
|
-
try {
|
|
1913
|
-
process.kill(entry.pid);
|
|
1914
|
-
} catch (e) {}
|
|
1915
|
-
}
|
|
1916
|
-
|
|
1917
|
-
const existingRetryCount2 = rateLimitState.get(conversationId)?.retryCount || 0;
|
|
1918
|
-
if (existingRetryCount2 >= 3) {
|
|
1919
|
-
debugLog(`[rate-limit] Conv ${conversationId} result rate limit hit ${existingRetryCount2 + 1} times, giving up`);
|
|
1920
|
-
batcher.drain();
|
|
1921
|
-
activeExecutions.delete(conversationId);
|
|
1922
|
-
queries.setIsStreaming(conversationId, false);
|
|
1923
|
-
const errorMessage = queries.createMessage(conversationId, 'assistant', `Error: Rate limit exceeded after ${existingRetryCount2 + 1} attempts. Please try again later.`);
|
|
1924
|
-
broadcastSync({ type: 'message_created', conversationId, message: errorMessage, timestamp: Date.now() });
|
|
1925
|
-
broadcastSync({ type: 'streaming_complete', sessionId, conversationId, interrupted: true, timestamp: Date.now() });
|
|
1926
|
-
return;
|
|
1927
|
-
}
|
|
1928
|
-
rateLimitState.set(conversationId, {
|
|
1929
|
-
retryAt: Date.now() + (retryAfterSec * 1000),
|
|
1930
|
-
cooldownMs: retryAfterSec * 1000,
|
|
1931
|
-
retryCount: existingRetryCount2 + 1,
|
|
1932
|
-
isStreamDetected: true
|
|
1933
|
-
});
|
|
1934
|
-
|
|
1935
|
-
broadcastSync({
|
|
1936
|
-
type: 'rate_limit_hit',
|
|
1937
|
-
sessionId,
|
|
1938
|
-
conversationId,
|
|
1939
|
-
retryAfterMs: retryAfterSec * 1000,
|
|
1940
|
-
retryAt: Date.now() + (retryAfterSec * 1000),
|
|
1941
|
-
retryCount: existingRetryCount2 + 1,
|
|
1942
|
-
timestamp: Date.now()
|
|
1943
|
-
});
|
|
1944
|
-
|
|
1945
|
-
batcher.drain();
|
|
1946
|
-
activeExecutions.delete(conversationId);
|
|
1947
|
-
queries.setIsStreaming(conversationId, false);
|
|
1948
|
-
|
|
1949
|
-
setTimeout(() => {
|
|
1950
|
-
rateLimitState.delete(conversationId);
|
|
1951
|
-
broadcastSync({
|
|
1952
|
-
type: 'rate_limit_clear',
|
|
1953
|
-
conversationId,
|
|
1954
|
-
timestamp: Date.now()
|
|
1955
|
-
});
|
|
1956
|
-
scheduleRetry(conversationId, messageId, content, agentId, model, subAgent);
|
|
1957
|
-
}, retryAfterSec * 1000);
|
|
1958
|
-
|
|
1959
|
-
return;
|
|
1960
|
-
}
|
|
1961
|
-
|
|
1962
|
-
if (resultText) eagerTTS(resultText, conversationId, sessionId);
|
|
1963
|
-
}
|
|
1964
|
-
|
|
1965
|
-
if (parsed.result && allBlocks.length === 0) {
|
|
1966
|
-
allBlocks.push({ type: 'text', text: String(parsed.result) });
|
|
1967
|
-
}
|
|
1968
|
-
} else if (parsed.type === 'tool_status') {
|
|
1969
|
-
// Handle ACP tool status updates (in_progress, pending)
|
|
1970
|
-
broadcastSync({
|
|
1971
|
-
type: 'streaming_progress',
|
|
1972
|
-
sessionId,
|
|
1973
|
-
conversationId,
|
|
1974
|
-
block: {
|
|
1975
|
-
type: 'tool_status',
|
|
1976
|
-
tool_use_id: parsed.tool_use_id,
|
|
1977
|
-
status: parsed.status
|
|
1978
|
-
},
|
|
1979
|
-
seq: currentSequence,
|
|
1980
|
-
timestamp: Date.now()
|
|
1981
|
-
});
|
|
1982
|
-
} else if (parsed.type === 'usage') {
|
|
1983
|
-
// Handle ACP usage updates
|
|
1984
|
-
broadcastSync({
|
|
1985
|
-
type: 'streaming_progress',
|
|
1986
|
-
sessionId,
|
|
1987
|
-
conversationId,
|
|
1988
|
-
block: {
|
|
1989
|
-
type: 'usage',
|
|
1990
|
-
usage: parsed.usage
|
|
1991
|
-
},
|
|
1992
|
-
seq: currentSequence,
|
|
1993
|
-
timestamp: Date.now()
|
|
1994
|
-
});
|
|
1995
|
-
} else if (parsed.type === 'plan') {
|
|
1996
|
-
// Handle ACP plan updates
|
|
1997
|
-
broadcastSync({
|
|
1998
|
-
type: 'streaming_progress',
|
|
1999
|
-
sessionId,
|
|
2000
|
-
conversationId,
|
|
2001
|
-
block: {
|
|
2002
|
-
type: 'plan',
|
|
2003
|
-
entries: parsed.entries
|
|
2004
|
-
},
|
|
2005
|
-
seq: currentSequence,
|
|
2006
|
-
timestamp: Date.now()
|
|
2007
|
-
});
|
|
2008
|
-
}
|
|
2009
|
-
};
|
|
2010
|
-
|
|
2011
|
-
const resolvedModel = model || conv?.model || null;
|
|
2012
|
-
const resolvedSubAgent = subAgent || conv?.subAgent || null;
|
|
2013
|
-
const unifiedSystemPrompt = buildSystemPrompt(agentId, resolvedModel, resolvedSubAgent);
|
|
2014
|
-
const config = {
|
|
2015
|
-
verbose: true,
|
|
2016
|
-
outputFormat: 'stream-json',
|
|
2017
|
-
timeout: 1800000,
|
|
2018
|
-
print: true,
|
|
2019
|
-
resumeSessionId,
|
|
2020
|
-
systemPrompt: unifiedSystemPrompt,
|
|
2021
|
-
model: resolvedModel || undefined,
|
|
2022
|
-
subAgent: resolvedSubAgent || undefined,
|
|
2023
|
-
onEvent,
|
|
2024
|
-
onPid: (pid) => {
|
|
2025
|
-
const entry = activeExecutions.get(conversationId);
|
|
2026
|
-
if (entry) entry.pid = pid;
|
|
2027
|
-
execMachine.send(conversationId, { type: 'SET_PID', pid });
|
|
2028
|
-
},
|
|
2029
|
-
onProcess: (proc) => {
|
|
2030
|
-
const entry = activeExecutions.get(conversationId);
|
|
2031
|
-
if (entry) entry.proc = proc;
|
|
2032
|
-
execMachine.send(conversationId, { type: 'SET_PROC', proc });
|
|
2033
|
-
}
|
|
2034
|
-
};
|
|
2035
|
-
|
|
2036
|
-
// Resolve cli-wrapper agent IDs (e.g. cli-kilo → kilo) to their underlying registered agent
|
|
2037
|
-
let resolvedAgentId = agentId || 'claude-code';
|
|
2038
|
-
const wrapperAgent = discoveredAgents.find(a => a.id === resolvedAgentId && a.protocol === 'cli-wrapper' && a.acpId);
|
|
2039
|
-
if (wrapperAgent) resolvedAgentId = wrapperAgent.acpId;
|
|
2040
|
-
|
|
2041
|
-
const { outputs, sessionId: claudeSessionId } = await runClaudeWithStreaming(content, cwd, resolvedAgentId, config);
|
|
2042
|
-
|
|
2043
|
-
// Check if rate limit was already handled in stream detection
|
|
2044
|
-
if (rateLimitState.get(conversationId)?.isStreamDetected) {
|
|
2045
|
-
debugLog(`[rate-limit] Rate limit already handled in stream for conv ${conversationId}, skipping success handler`);
|
|
2046
|
-
return;
|
|
2047
|
-
}
|
|
2048
|
-
|
|
2049
|
-
activeExecutions.delete(conversationId);
|
|
2050
|
-
execMachine.send(conversationId, { type: 'COMPLETE' });
|
|
2051
|
-
batcher.drain();
|
|
2052
|
-
if (claudeSessionId) ownedSessionIds.delete(claudeSessionId);
|
|
2053
|
-
debugLog(`[stream] Claude returned ${outputs.length} outputs, sessionId=${claudeSessionId}`);
|
|
2054
|
-
|
|
2055
|
-
debugLog(`[stream] Keeping claudeSessionId=${claudeSessionId} for session continuity`);
|
|
2056
|
-
|
|
2057
|
-
// Mark session as complete
|
|
2058
|
-
queries.updateSession(sessionId, {
|
|
2059
|
-
status: 'complete',
|
|
2060
|
-
response: JSON.stringify({ outputs, eventCount }),
|
|
2061
|
-
completed_at: Date.now()
|
|
2062
|
-
});
|
|
2063
|
-
|
|
2064
|
-
broadcastSync({
|
|
2065
|
-
type: 'streaming_complete',
|
|
2066
|
-
sessionId,
|
|
2067
|
-
conversationId,
|
|
2068
|
-
agentId,
|
|
2069
|
-
eventCount,
|
|
2070
|
-
seq: currentSequence,
|
|
2071
|
-
timestamp: Date.now()
|
|
2072
|
-
});
|
|
2073
|
-
|
|
2074
|
-
debugLog(`[stream] Completed: ${outputs.length} outputs, ${eventCount} events`);
|
|
2075
|
-
} catch (error) {
|
|
2076
|
-
const elapsed = Date.now() - startTime;
|
|
2077
|
-
debugLog(`[stream] Error after ${elapsed}ms: ${error.message}`);
|
|
2078
|
-
const conv2 = queries.getConversation(conversationId);
|
|
2079
|
-
if (conv2?.claudeSessionId) ownedSessionIds.delete(conv2.claudeSessionId);
|
|
2080
|
-
|
|
2081
|
-
// Check if rate limit was already handled in stream detection
|
|
2082
|
-
const existingState = rateLimitState.get(conversationId);
|
|
2083
|
-
if (existingState?.isStreamDetected) {
|
|
2084
|
-
debugLog(`[rate-limit] Rate limit already handled in stream for conv ${conversationId}, skipping catch handler`);
|
|
2085
|
-
return;
|
|
2086
|
-
}
|
|
2087
|
-
|
|
2088
|
-
const isAuthError = error.authError || error.nonRetryable ||
|
|
2089
|
-
/401|unauthorized|invalid.*auth|invalid.*token|auth.*failed|permission denied|access denied/i.test(error.message);
|
|
2090
|
-
|
|
2091
|
-
const isRateLimit = error.rateLimited ||
|
|
2092
|
-
/rate.?limit|429|too many requests|overloaded|throttl/i.test(error.message);
|
|
2093
|
-
|
|
2094
|
-
queries.updateSession(sessionId, {
|
|
2095
|
-
status: 'error',
|
|
2096
|
-
error: error.message,
|
|
2097
|
-
completed_at: Date.now()
|
|
2098
|
-
});
|
|
2099
|
-
|
|
2100
|
-
if (isAuthError) {
|
|
2101
|
-
debugLog(`[auth-error] Auth error for conv ${conversationId}: ${error.message}`);
|
|
2102
|
-
broadcastSync({
|
|
2103
|
-
type: 'streaming_error',
|
|
2104
|
-
sessionId,
|
|
2105
|
-
conversationId,
|
|
2106
|
-
error: `Authentication failed: ${error.message}. Please check your API credentials.`,
|
|
2107
|
-
recoverable: false,
|
|
2108
|
-
isAuthError: true,
|
|
2109
|
-
timestamp: Date.now()
|
|
2110
|
-
});
|
|
2111
|
-
const errorMessage = queries.createMessage(conversationId, 'assistant', `Error: Authentication failed. ${error.message}. Please update your credentials and try again.`);
|
|
2112
|
-
broadcastSync({
|
|
2113
|
-
type: 'message_created',
|
|
2114
|
-
conversationId,
|
|
2115
|
-
message: errorMessage,
|
|
2116
|
-
timestamp: Date.now()
|
|
2117
|
-
});
|
|
2118
|
-
queries.setIsStreaming(conversationId, false);
|
|
2119
|
-
batcher.drain();
|
|
2120
|
-
activeExecutions.delete(conversationId);
|
|
2121
|
-
return;
|
|
2122
|
-
}
|
|
2123
|
-
|
|
2124
|
-
if (isRateLimit) {
|
|
2125
|
-
const existingState = rateLimitState.get(conversationId) || {};
|
|
2126
|
-
const retryCount = (existingState.retryCount || 0) + 1;
|
|
2127
|
-
const maxRateLimitRetries = 3;
|
|
2128
|
-
|
|
2129
|
-
if (retryCount > maxRateLimitRetries) {
|
|
2130
|
-
debugLog(`[rate-limit] Conv ${conversationId} hit rate limit ${retryCount} times, giving up`);
|
|
2131
|
-
broadcastSync({
|
|
2132
|
-
type: 'streaming_error',
|
|
2133
|
-
sessionId,
|
|
2134
|
-
conversationId,
|
|
2135
|
-
error: `Rate limit exceeded after ${retryCount} attempts. Please try again later.`,
|
|
2136
|
-
recoverable: false,
|
|
2137
|
-
timestamp: Date.now()
|
|
2138
|
-
});
|
|
2139
|
-
const errorMessage = queries.createMessage(conversationId, 'assistant', `Error: Rate limit exceeded after ${retryCount} attempts. Please try again later.`);
|
|
2140
|
-
broadcastSync({
|
|
2141
|
-
type: 'message_created',
|
|
2142
|
-
conversationId,
|
|
2143
|
-
message: errorMessage,
|
|
2144
|
-
timestamp: Date.now()
|
|
2145
|
-
});
|
|
2146
|
-
queries.setIsStreaming(conversationId, false);
|
|
2147
|
-
return;
|
|
2148
|
-
}
|
|
2149
|
-
|
|
2150
|
-
const cooldownMs = (error.retryAfterSec || 60) * 1000;
|
|
2151
|
-
const retryAt = Date.now() + cooldownMs;
|
|
2152
|
-
rateLimitState.set(conversationId, { retryAt, cooldownMs, retryCount });
|
|
2153
|
-
debugLog(`[rate-limit] Conv ${conversationId} hit rate limit (attempt ${retryCount}/${maxRateLimitRetries}), retry in ${cooldownMs}ms`);
|
|
2154
|
-
|
|
2155
|
-
broadcastSync({
|
|
2156
|
-
type: 'rate_limit_hit',
|
|
2157
|
-
sessionId,
|
|
2158
|
-
conversationId,
|
|
2159
|
-
retryAfterMs: cooldownMs,
|
|
2160
|
-
retryAt,
|
|
2161
|
-
retryCount,
|
|
2162
|
-
timestamp: Date.now()
|
|
2163
|
-
});
|
|
2164
|
-
|
|
2165
|
-
batcher.drain();
|
|
2166
|
-
|
|
2167
|
-
debugLog(`[rate-limit] Scheduling retry for conv ${conversationId} in ${cooldownMs}ms (attempt ${retryCount + 1})`);
|
|
2168
|
-
|
|
2169
|
-
setTimeout(() => {
|
|
2170
|
-
debugLog(`[rate-limit] Timeout fired for conv ${conversationId}, calling scheduleRetry`);
|
|
2171
|
-
rateLimitState.delete(conversationId);
|
|
2172
|
-
debugLog(`[rate-limit] Conv ${conversationId} cooldown expired, restarting (attempt ${retryCount + 1})`);
|
|
2173
|
-
broadcastSync({
|
|
2174
|
-
type: 'rate_limit_clear',
|
|
2175
|
-
conversationId,
|
|
2176
|
-
timestamp: Date.now()
|
|
2177
|
-
});
|
|
2178
|
-
scheduleRetry(conversationId, messageId, content, agentId, model, subAgent);
|
|
2179
|
-
}, cooldownMs);
|
|
2180
|
-
return;
|
|
2181
|
-
}
|
|
2182
|
-
|
|
2183
|
-
const isSessionConflict = error.exitCode === null && eventCount === 0;
|
|
2184
|
-
|
|
2185
|
-
broadcastSync({
|
|
2186
|
-
type: 'streaming_error',
|
|
2187
|
-
sessionId,
|
|
2188
|
-
conversationId,
|
|
2189
|
-
error: error.message,
|
|
2190
|
-
isPrematureEnd: error.isPrematureEnd || false,
|
|
2191
|
-
exitCode: error.exitCode,
|
|
2192
|
-
stderrText: error.stderrText,
|
|
2193
|
-
recoverable: elapsed < 60000,
|
|
2194
|
-
isSessionConflict,
|
|
2195
|
-
timestamp: Date.now()
|
|
2196
|
-
});
|
|
2197
|
-
|
|
2198
|
-
if (!isSessionConflict) {
|
|
2199
|
-
const errorMessage = queries.createMessage(conversationId, 'assistant', `Error: ${error.message}`);
|
|
2200
|
-
broadcastSync({
|
|
2201
|
-
type: 'message_created',
|
|
2202
|
-
conversationId,
|
|
2203
|
-
message: errorMessage,
|
|
2204
|
-
timestamp: Date.now()
|
|
2205
|
-
});
|
|
2206
|
-
}
|
|
2207
|
-
} finally {
|
|
2208
|
-
batcher.drain();
|
|
2209
|
-
// Use atomic cleanup but only if not in rate limit recovery
|
|
2210
|
-
if (!rateLimitState.has(conversationId)) {
|
|
2211
|
-
cleanupExecution(conversationId);
|
|
2212
|
-
drainMessageQueue(conversationId);
|
|
2213
|
-
}
|
|
2214
|
-
}
|
|
2215
|
-
}
|
|
2216
|
-
|
|
2217
|
-
function scheduleRetry(conversationId, messageId, content, agentId, model, subAgent) {
|
|
2218
|
-
debugLog(`[rate-limit] scheduleRetry called for conv ${conversationId}, messageId=${messageId}`);
|
|
2219
|
-
|
|
2220
|
-
if (!content) {
|
|
2221
|
-
const conv = queries.getConversation(conversationId);
|
|
2222
|
-
const lastMsg = queries.getLastUserMessage(conversationId);
|
|
2223
|
-
content = lastMsg?.content || 'continue';
|
|
2224
|
-
debugLog(`[rate-limit] Recovered content from last message: ${content?.substring?.(0, 50)}...`);
|
|
2225
|
-
}
|
|
2226
|
-
|
|
2227
|
-
const newSession = queries.createSession(conversationId);
|
|
2228
|
-
queries.createEvent('session.created', { messageId, sessionId: newSession.id, retryReason: 'rate_limit' }, conversationId, newSession.id);
|
|
2229
|
-
|
|
2230
|
-
debugLog(`[rate-limit] Broadcasting streaming_start for retry session ${newSession.id}`);
|
|
2231
|
-
broadcastSync({
|
|
2232
|
-
type: 'streaming_start',
|
|
2233
|
-
sessionId: newSession.id,
|
|
2234
|
-
conversationId,
|
|
2235
|
-
messageId,
|
|
2236
|
-
agentId,
|
|
2237
|
-
queueLength: messageQueues.get(conversationId)?.length || 0,
|
|
2238
|
-
timestamp: Date.now()
|
|
2239
|
-
});
|
|
2240
|
-
|
|
2241
|
-
const startTime = Date.now();
|
|
2242
|
-
activeExecutions.set(conversationId, { pid: null, startTime, sessionId: newSession.id, lastActivity: startTime });
|
|
2243
|
-
|
|
2244
|
-
debugLog(`[rate-limit] Calling processMessageWithStreaming for retry`);
|
|
2245
|
-
processMessageWithStreaming(conversationId, messageId, newSession.id, content, agentId, model, subAgent)
|
|
2246
|
-
.catch(err => {
|
|
2247
|
-
debugLog(`[rate-limit] Retry failed: ${err.message}`);
|
|
2248
|
-
console.error(`[rate-limit] Retry error for conv ${conversationId}:`, err);
|
|
2249
|
-
// Clean up state on retry failure
|
|
2250
|
-
cleanupExecution(conversationId);
|
|
2251
|
-
broadcastSync({
|
|
2252
|
-
type: 'streaming_error',
|
|
2253
|
-
sessionId: newSession.id,
|
|
2254
|
-
conversationId,
|
|
2255
|
-
error: `Rate limit retry failed: ${err.message}`,
|
|
2256
|
-
recoverable: false,
|
|
2257
|
-
timestamp: Date.now()
|
|
2258
|
-
});
|
|
2259
|
-
});
|
|
2260
|
-
}
|
|
2261
|
-
|
|
2262
|
-
function drainMessageQueue(conversationId) {
|
|
2263
|
-
// Machine context queue is authoritative; fall back to legacy Map
|
|
2264
|
-
const machineQueue = execMachine.getQueue(conversationId);
|
|
2265
|
-
const mapQueue = messageQueues.get(conversationId);
|
|
2266
|
-
if (machineQueue.length === 0 && (!mapQueue || mapQueue.length === 0)) return;
|
|
2267
|
-
|
|
2268
|
-
let next;
|
|
2269
|
-
if (machineQueue.length > 0) {
|
|
2270
|
-
// Consume from machine via COMPLETE transition (draining state pops nextItem)
|
|
2271
|
-
execMachine.send(conversationId, { type: 'COMPLETE' });
|
|
2272
|
-
const ctx = execMachine.getContext(conversationId);
|
|
2273
|
-
next = ctx?.nextItem;
|
|
2274
|
-
// Also keep Map in sync
|
|
2275
|
-
if (mapQueue && mapQueue.length > 0) mapQueue.shift();
|
|
2276
|
-
if (mapQueue && mapQueue.length === 0) messageQueues.delete(conversationId);
|
|
2277
|
-
} else {
|
|
2278
|
-
next = mapQueue.shift();
|
|
2279
|
-
if (mapQueue.length === 0) messageQueues.delete(conversationId);
|
|
2280
|
-
}
|
|
2281
|
-
|
|
2282
|
-
if (!next) return;
|
|
2283
|
-
|
|
2284
|
-
debugLog(`[queue] Draining next message for ${conversationId}, messageId=${next.messageId}`);
|
|
2285
|
-
|
|
2286
|
-
const remainingQueueLength = execMachine.getQueue(conversationId).length || messageQueues.get(conversationId)?.length || 0;
|
|
2287
|
-
|
|
2288
|
-
broadcastSync({
|
|
2289
|
-
type: 'queue_item_dequeued',
|
|
2290
|
-
conversationId,
|
|
2291
|
-
messageId: next.messageId,
|
|
2292
|
-
queueLength: remainingQueueLength,
|
|
2293
|
-
timestamp: Date.now()
|
|
2294
|
-
});
|
|
2295
|
-
|
|
2296
|
-
const session = queries.createSession(conversationId);
|
|
2297
|
-
queries.createEvent('session.created', { messageId: next.messageId, sessionId: session.id }, conversationId, session.id);
|
|
2298
|
-
|
|
2299
|
-
broadcastSync({
|
|
2300
|
-
type: 'streaming_start',
|
|
2301
|
-
sessionId: session.id,
|
|
2302
|
-
conversationId,
|
|
2303
|
-
messageId: next.messageId,
|
|
2304
|
-
agentId: next.agentId,
|
|
2305
|
-
queueLength: remainingQueueLength,
|
|
2306
|
-
timestamp: Date.now()
|
|
2307
|
-
});
|
|
2308
|
-
|
|
2309
|
-
broadcastSync({
|
|
2310
|
-
type: 'queue_status',
|
|
2311
|
-
conversationId,
|
|
2312
|
-
queueLength: remainingQueueLength,
|
|
2313
|
-
timestamp: Date.now()
|
|
2314
|
-
});
|
|
2315
|
-
|
|
2316
|
-
const startTime = Date.now();
|
|
2317
|
-
// Machine START event makes machine authoritative for this execution
|
|
2318
|
-
execMachine.send(conversationId, { type: 'START', sessionId: session.id });
|
|
2319
|
-
activeExecutions.set(conversationId, { pid: null, startTime, sessionId: session.id, lastActivity: startTime });
|
|
2320
|
-
|
|
2321
|
-
processMessageWithStreaming(conversationId, next.messageId, session.id, next.content, next.agentId, next.model, next.subAgent)
|
|
2322
|
-
.catch(err => {
|
|
2323
|
-
debugLog(`[queue] Error processing queued message: ${err.message}`);
|
|
2324
|
-
cleanupExecution(conversationId);
|
|
2325
|
-
broadcastSync({
|
|
2326
|
-
type: 'streaming_error',
|
|
2327
|
-
sessionId: session.id,
|
|
2328
|
-
conversationId,
|
|
2329
|
-
error: `Queue processing failed: ${err.message}`,
|
|
2330
|
-
recoverable: true,
|
|
2331
|
-
timestamp: Date.now()
|
|
2332
|
-
});
|
|
2333
|
-
setTimeout(() => drainMessageQueue(conversationId), 100);
|
|
2334
|
-
});
|
|
2335
|
-
}
|
|
2336
1666
|
|
|
2337
1667
|
|
|
2338
1668
|
const wss = new WebSocketServer({
|
|
@@ -2425,6 +1755,23 @@ const broadcastSync = createBroadcast({
|
|
|
2425
1755
|
getSeq: () => ++broadcastSeq
|
|
2426
1756
|
});
|
|
2427
1757
|
|
|
1758
|
+
// Wire up process-message factories now that broadcastSync and all deps are available
|
|
1759
|
+
const _mqDeps = {
|
|
1760
|
+
queries, messageQueues, activeExecutions, rateLimitState, execMachine,
|
|
1761
|
+
broadcastSync, cleanupExecution, debugLog,
|
|
1762
|
+
getProcessMessageWithStreaming: () => processMessageWithStreaming
|
|
1763
|
+
};
|
|
1764
|
+
const { scheduleRetry, drainMessageQueue } = createMessageQueue(_mqDeps);
|
|
1765
|
+
|
|
1766
|
+
const { processMessageWithStreaming } = createProcessMessage({
|
|
1767
|
+
queries, activeExecutions, rateLimitState, execMachine,
|
|
1768
|
+
broadcastSync, runClaudeWithStreaming, cleanupExecution, checkpointManager,
|
|
1769
|
+
discoveredAgents, ownedSessionIds, STARTUP_CWD, buildSystemPrompt,
|
|
1770
|
+
parseRateLimitResetTime, eagerTTS, touchACP, createChunkBatcher,
|
|
1771
|
+
debugLog, logError,
|
|
1772
|
+
scheduleRetry, drainMessageQueue, createEventHandler
|
|
1773
|
+
});
|
|
1774
|
+
|
|
2428
1775
|
// WebSocket protocol router
|
|
2429
1776
|
const wsRouter = new WsRouter();
|
|
2430
1777
|
|