agentgui 1.0.835 → 1.0.836
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +2 -0
- package/lib/broadcast.js +28 -0
- package/lib/recovery.js +166 -0
- package/package.json +1 -1
- package/scripts/patch-fsbrowse.js +0 -28
- package/server.js +22 -381
package/CHANGELOG.md
CHANGED
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
## [Unreleased]
|
|
2
2
|
|
|
3
3
|
### Refactor
|
|
4
|
+
- refactor: extract broadcastSync to lib/broadcast.js (createBroadcast factory) and recovery functions to lib/recovery.js (createRecovery factory); server.js reduced from 3419L to 3226L
|
|
5
|
+
- refactor: remove JSDoc and standalone code comments from scripts/patch-fsbrowse.js; reduce from 229L to 200L
|
|
4
6
|
- Split database.js (651L) into database.js (81L) + database-schema.js (176L) + database-migrations.js (150L) + database-migrations-acp.js (134L); all files ≤200L; no circular imports; migration functions receive db as parameter
|
|
5
7
|
- Split claude-runner.js (1267L) into claude-runner.js (56L, AgentRunner class+helpers), claude-runner-direct.js (117L, runDirect method), claude-runner-acp.js (156L, runACP+_runACPOnce methods), claude-runner-agents.js (105L, AgentRegistry+registrations using acp-protocol.js), claude-runner-run.js (50L, runClaudeWithStreaming export); server.js updated to import from claude-runner-run.js
|
|
6
8
|
- Split db-queries.js (1413L) into db-queries.js (93L, factory + conv queries), db-queries-messages.js (103L), db-queries-sessions.js (113L), db-queries-events.js (69L), db-queries-del.js (143L), db-queries-cleanup.js (86L), db-queries-import.js (134L), db-queries-streams.js (101L), db-queries-chunks.js (196L), db-queries-chunks2.js (83L), db-queries-voice.js (86L), db-queries-tools.js (128L); all ≤200L; each exports addXxxQueries(q, db, prep, generateId); factory calls all helpers and returns q
|
package/lib/broadcast.js
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
export function createBroadcast({ syncClients, subscriptionIndex, wsOptimizer, broadcastTypes, getSeq }) {
|
|
2
|
+
return function broadcastSync(event) {
|
|
3
|
+
try {
|
|
4
|
+
if (!event.seq) {
|
|
5
|
+
event.seq = getSeq();
|
|
6
|
+
}
|
|
7
|
+
const isBroadcast = broadcastTypes.has(event.type);
|
|
8
|
+
if (syncClients.size > 0) {
|
|
9
|
+
if (isBroadcast) {
|
|
10
|
+
for (const ws of syncClients) { try { wsOptimizer.sendToClient(ws, event); } catch (e) {} }
|
|
11
|
+
} else {
|
|
12
|
+
const targets = new Set();
|
|
13
|
+
if (event.sessionId) {
|
|
14
|
+
const subs = subscriptionIndex.get(event.sessionId);
|
|
15
|
+
if (subs) for (const ws of subs) targets.add(ws);
|
|
16
|
+
}
|
|
17
|
+
if (event.conversationId) {
|
|
18
|
+
const subs = subscriptionIndex.get(`conv-${event.conversationId}`);
|
|
19
|
+
if (subs) for (const ws of subs) targets.add(ws);
|
|
20
|
+
}
|
|
21
|
+
for (const ws of targets) { try { wsOptimizer.sendToClient(ws, event); } catch (e) {} }
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
} catch (err) {
|
|
25
|
+
console.error('[BROADCAST] Error (contained):', err.message);
|
|
26
|
+
}
|
|
27
|
+
};
|
|
28
|
+
}
|
package/lib/recovery.js
ADDED
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
export function createRecovery({ activeExecutions, processMessageWithStreaming, queries, broadcastSync, checkpointManager, drainMessageQueue, stuckThresholdMs, noPidGracePeriodMs }) {
|
|
2
|
+
function isProcessAlive(pid) {
|
|
3
|
+
try {
|
|
4
|
+
process.kill(pid, 0);
|
|
5
|
+
return true;
|
|
6
|
+
} catch (err) {
|
|
7
|
+
if (err.code === 'EPERM') return true;
|
|
8
|
+
return false;
|
|
9
|
+
}
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
function killActiveExecutions() {
|
|
13
|
+
for (const [convId, entry] of activeExecutions.entries()) {
|
|
14
|
+
if (entry.pid) {
|
|
15
|
+
try { process.kill(-entry.pid, 'SIGTERM'); } catch { try { process.kill(entry.pid, 'SIGTERM'); } catch (_) {} }
|
|
16
|
+
}
|
|
17
|
+
if (entry.proc) {
|
|
18
|
+
try { entry.proc.kill('SIGTERM'); } catch (_) {}
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
activeExecutions.clear();
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function recoverStaleSessions() {
|
|
25
|
+
try {
|
|
26
|
+
const RESUME_WINDOW_MS = 600000;
|
|
27
|
+
const cutoff = Date.now() - RESUME_WINDOW_MS;
|
|
28
|
+
const staleSessions = queries.getActiveSessions();
|
|
29
|
+
for (const session of staleSessions) {
|
|
30
|
+
queries.updateSession(session.id, {
|
|
31
|
+
status: session.started_at > cutoff ? 'interrupted' : 'error',
|
|
32
|
+
error: 'Server restarted',
|
|
33
|
+
completed_at: Date.now()
|
|
34
|
+
});
|
|
35
|
+
}
|
|
36
|
+
queries.clearAllStreamingFlags();
|
|
37
|
+
if (staleSessions.length > 0) {
|
|
38
|
+
console.log(`[RECOVERY] Marked ${staleSessions.length} stale session(s); cleared streaming flags`);
|
|
39
|
+
}
|
|
40
|
+
} catch (err) {
|
|
41
|
+
console.error('[RECOVERY] Error:', err.message);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
async function resumeConversation(conversationId, previousSessionId, reason) {
|
|
46
|
+
const conv = queries.getConversation(conversationId);
|
|
47
|
+
if (!conv) throw new Error('Conversation not found');
|
|
48
|
+
const checkpoint = previousSessionId ? checkpointManager.loadCheckpoint(previousSessionId) : null;
|
|
49
|
+
if (previousSessionId) {
|
|
50
|
+
const prev = queries.getSession ? queries.getSession(previousSessionId) : null;
|
|
51
|
+
if (prev && prev.status !== 'interrupted') {
|
|
52
|
+
queries.updateSession(previousSessionId, { status: 'interrupted', error: reason || 'Restarting', completed_at: Date.now() });
|
|
53
|
+
}
|
|
54
|
+
if (checkpoint) {
|
|
55
|
+
checkpointManager.markSessionResumed(previousSessionId);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
const lastMsg = queries.getLastUserMessage(conversationId);
|
|
59
|
+
const promptText = typeof lastMsg?.content === 'string' ? lastMsg.content : JSON.stringify(lastMsg?.content || 'continue');
|
|
60
|
+
const session = queries.createSession(conversationId);
|
|
61
|
+
queries.createEvent('session.created', {
|
|
62
|
+
sessionId: session.id,
|
|
63
|
+
resumeReason: 'interrupted',
|
|
64
|
+
claudeSessionId: conv.claudeSessionId,
|
|
65
|
+
checkpointFrom: previousSessionId || null
|
|
66
|
+
}, conversationId, session.id);
|
|
67
|
+
activeExecutions.set(conversationId, {
|
|
68
|
+
pid: null,
|
|
69
|
+
startTime: Date.now(),
|
|
70
|
+
sessionId: session.id,
|
|
71
|
+
lastActivity: Date.now()
|
|
72
|
+
});
|
|
73
|
+
broadcastSync({
|
|
74
|
+
type: 'streaming_start',
|
|
75
|
+
sessionId: session.id,
|
|
76
|
+
conversationId,
|
|
77
|
+
agentId: conv.agentType,
|
|
78
|
+
resumed: true,
|
|
79
|
+
checkpointAvailable: !!checkpoint,
|
|
80
|
+
timestamp: Date.now()
|
|
81
|
+
});
|
|
82
|
+
if (checkpoint) {
|
|
83
|
+
checkpointManager.storeCheckpointForDelay(conversationId, checkpoint);
|
|
84
|
+
console.log(`[RESUME] Checkpoint stored for conv ${conversationId}`);
|
|
85
|
+
}
|
|
86
|
+
console.log(`[RESUME] Restarting conv ${conversationId} (reason: ${reason})`);
|
|
87
|
+
await processMessageWithStreaming(conversationId, lastMsg?.id || null, session.id, promptText, conv.agentType, conv.model, conv.subAgent);
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
function markAgentDead(conversationId, entry, reason) {
|
|
91
|
+
if (!activeExecutions.has(conversationId)) return;
|
|
92
|
+
activeExecutions.delete(conversationId);
|
|
93
|
+
const RESUME_WINDOW_MS = 600000;
|
|
94
|
+
const sessionAge = entry.startTime ? Date.now() - entry.startTime : Infinity;
|
|
95
|
+
const shouldRestart = sessionAge < RESUME_WINDOW_MS;
|
|
96
|
+
queries.setIsStreaming(conversationId, false);
|
|
97
|
+
if (entry.sessionId) {
|
|
98
|
+
queries.updateSession(entry.sessionId, {
|
|
99
|
+
status: shouldRestart ? 'interrupted' : 'error',
|
|
100
|
+
error: reason,
|
|
101
|
+
completed_at: Date.now()
|
|
102
|
+
});
|
|
103
|
+
}
|
|
104
|
+
if (shouldRestart) {
|
|
105
|
+
resumeConversation(conversationId, entry.sessionId, reason).catch(err => {
|
|
106
|
+
console.error(`[RESUME] Auto-restart failed for conv ${conversationId}: ${err.message}`);
|
|
107
|
+
queries.setIsStreaming(conversationId, false);
|
|
108
|
+
});
|
|
109
|
+
return;
|
|
110
|
+
}
|
|
111
|
+
broadcastSync({
|
|
112
|
+
type: 'streaming_error',
|
|
113
|
+
sessionId: entry.sessionId,
|
|
114
|
+
conversationId,
|
|
115
|
+
error: reason,
|
|
116
|
+
recoverable: false,
|
|
117
|
+
timestamp: Date.now()
|
|
118
|
+
});
|
|
119
|
+
drainMessageQueue(conversationId);
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
async function resumeInterruptedStreams() {
|
|
123
|
+
try {
|
|
124
|
+
const toResume = queries.getResumableConversations(600000);
|
|
125
|
+
if (toResume.length === 0) return;
|
|
126
|
+
console.log(`[RESUME] Resuming ${toResume.length} interrupted conversation(s)`);
|
|
127
|
+
for (let i = 0; i < toResume.length; i++) {
|
|
128
|
+
const conv = toResume[i];
|
|
129
|
+
try {
|
|
130
|
+
const lastSession = queries.getLatestSession(conv.id);
|
|
131
|
+
await resumeConversation(conv.id, lastSession?.id || null, 'Server restarted');
|
|
132
|
+
if (i < toResume.length - 1) await new Promise(r => setTimeout(r, 200));
|
|
133
|
+
} catch (err) {
|
|
134
|
+
console.error(`[RESUME] Failed to resume conv ${conv.id}: ${err.message}`);
|
|
135
|
+
queries.setIsStreaming(conv.id, false);
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
} catch (err) {
|
|
139
|
+
console.error('[RESUME] Error:', err.message);
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
function performAgentHealthCheck() {
|
|
144
|
+
const now = Date.now();
|
|
145
|
+
for (const [conversationId, entry] of activeExecutions) {
|
|
146
|
+
if (!entry) continue;
|
|
147
|
+
if (entry.pid) {
|
|
148
|
+
if (!isProcessAlive(entry.pid)) {
|
|
149
|
+
console.error(`[HEALTH] Agent PID ${entry.pid} for conv ${conversationId} is dead`);
|
|
150
|
+
markAgentDead(conversationId, entry, 'Agent process died unexpectedly');
|
|
151
|
+
} else if (now - entry.lastActivity > stuckThresholdMs) {
|
|
152
|
+
console.error(`[HEALTH] Agent PID ${entry.pid} for conv ${conversationId} has no activity for ${Math.round((now - entry.lastActivity) / 1000)}s`);
|
|
153
|
+
try { process.kill(entry.pid, 'SIGTERM'); } catch (e) {}
|
|
154
|
+
markAgentDead(conversationId, entry, 'Agent was stuck (no activity for 30 minutes)');
|
|
155
|
+
}
|
|
156
|
+
} else {
|
|
157
|
+
if (now - entry.startTime > noPidGracePeriodMs) {
|
|
158
|
+
console.error(`[HEALTH] Agent for conv ${conversationId} never reported PID after ${Math.round((now - entry.startTime) / 1000)}s`);
|
|
159
|
+
markAgentDead(conversationId, entry, 'Agent failed to start (no PID reported)');
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
return { killActiveExecutions, recoverStaleSessions, resumeInterruptedStreams, isProcessAlive, markAgentDead, resumeConversation, performAgentHealthCheck };
|
|
166
|
+
}
|
package/package.json
CHANGED
|
@@ -1,16 +1,9 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
/**
|
|
3
|
-
* Patch script to fix Windows path duplication issue in fsbrowse
|
|
4
|
-
* and sync fsbrowse styling with AgentGUI dark mode theme
|
|
5
|
-
* Fixes: Error ENOENT: no such file or directory, scandir 'C:\C:\dev'
|
|
6
|
-
*/
|
|
7
|
-
|
|
8
2
|
import fs from 'fs';
|
|
9
3
|
import path from 'path';
|
|
10
4
|
import { fileURLToPath } from 'url';
|
|
11
5
|
|
|
12
6
|
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
13
|
-
|
|
14
7
|
const fsbrowsePath = path.join(__dirname, '..', 'node_modules', 'fsbrowse', 'index.js');
|
|
15
8
|
|
|
16
9
|
if (!fs.existsSync(fsbrowsePath)) {
|
|
@@ -21,13 +14,11 @@ if (!fs.existsSync(fsbrowsePath)) {
|
|
|
21
14
|
try {
|
|
22
15
|
let content = fs.readFileSync(fsbrowsePath, 'utf8');
|
|
23
16
|
|
|
24
|
-
// Check if patch is already applied
|
|
25
17
|
if (content.includes('sanitizedIsAbsoluteOnDrive')) {
|
|
26
18
|
console.log('[PATCH] fsbrowse Windows path fix already applied');
|
|
27
19
|
process.exit(0);
|
|
28
20
|
}
|
|
29
21
|
|
|
30
|
-
// Replace the makeResolver function with the fixed version
|
|
31
22
|
const oldMakeResolver = `function makeResolver(baseDir) {
|
|
32
23
|
return function resolveWithBaseDir(relPath) {
|
|
33
24
|
const sanitized = sanitizePath(relPath);
|
|
@@ -46,15 +37,10 @@ try {
|
|
|
46
37
|
return function resolveWithBaseDir(relPath) {
|
|
47
38
|
const sanitized = sanitizePath(relPath);
|
|
48
39
|
let fullPath;
|
|
49
|
-
|
|
50
|
-
// Extract drive letter from both paths to check for same-drive duplication on Windows
|
|
51
40
|
const sanitizedDriveLetter = sanitized.match(/^[A-Z]:/i)?.[0];
|
|
52
41
|
const sanitizedIsAbsoluteOnDrive = /^[A-Z]:/i.test(sanitized);
|
|
53
42
|
|
|
54
|
-
// If both paths are on the same Windows drive, strip the drive letter from relPath
|
|
55
|
-
// to avoid duplication like C:\\C:\\dev
|
|
56
43
|
if (baseDriveLetter && sanitizedIsAbsoluteOnDrive && sanitizedDriveLetter === baseDriveLetter) {
|
|
57
|
-
// Remove drive letter and leading slashes to make it relative
|
|
58
44
|
let relativePath = sanitized;
|
|
59
45
|
if (/^[A-Z]:/i.test(relativePath)) {
|
|
60
46
|
relativePath = relativePath.substring(2);
|
|
@@ -65,11 +51,8 @@ try {
|
|
|
65
51
|
fullPath = path.resolve(normalizedBase, sanitized);
|
|
66
52
|
}
|
|
67
53
|
|
|
68
|
-
// Normalize for consistent comparison
|
|
69
54
|
const normalizedFullPath = path.normalize(fullPath);
|
|
70
55
|
const normalizedComparisonBase = path.normalize(normalizedBase);
|
|
71
|
-
|
|
72
|
-
// Check path injection - convert backslashes to forward slashes for comparison
|
|
73
56
|
const normalizedCheck = normalizedFullPath.replace(/\\\\/g, '/');
|
|
74
57
|
const normalizedBaseCheck = normalizedComparisonBase.replace(/\\\\/g, '/');
|
|
75
58
|
|
|
@@ -92,18 +75,14 @@ try {
|
|
|
92
75
|
process.exit(1);
|
|
93
76
|
}
|
|
94
77
|
|
|
95
|
-
// Patch fsbrowse CSS for dark mode theme sync
|
|
96
78
|
const fsbrowseCSSPath = path.join(__dirname, '..', 'node_modules', 'fsbrowse', 'public', 'style.css');
|
|
97
|
-
|
|
98
79
|
if (fs.existsSync(fsbrowseCSSPath)) {
|
|
99
80
|
try {
|
|
100
81
|
let cssContent = fs.readFileSync(fsbrowseCSSPath, 'utf8');
|
|
101
82
|
|
|
102
|
-
// Check if dark mode CSS is already patched
|
|
103
83
|
if (cssContent.includes('html.dark {')) {
|
|
104
84
|
console.log('[PATCH] fsbrowse dark mode CSS already patched');
|
|
105
85
|
} else {
|
|
106
|
-
// Inject dark mode CSS rules
|
|
107
86
|
const darkModeCSS = `/* Light mode - explicit */
|
|
108
87
|
html.light {
|
|
109
88
|
--primary: #3b82f6;
|
|
@@ -145,7 +124,6 @@ html.dark {
|
|
|
145
124
|
}
|
|
146
125
|
}`;
|
|
147
126
|
|
|
148
|
-
// Find the closing brace of :root and insert after it
|
|
149
127
|
cssContent = cssContent.replace(
|
|
150
128
|
/:root \{[\s\S]*?\}\s*@media/,
|
|
151
129
|
match => match.replace('@media', darkModeCSS + '\n@media')
|
|
@@ -159,23 +137,18 @@ html.dark {
|
|
|
159
137
|
}
|
|
160
138
|
}
|
|
161
139
|
|
|
162
|
-
// Patch fsbrowse app.js for theme sync
|
|
163
140
|
const fsbrowseAppJSPath = path.join(__dirname, '..', 'node_modules', 'fsbrowse', 'public', 'app.js');
|
|
164
|
-
|
|
165
141
|
if (fs.existsSync(fsbrowseAppJSPath)) {
|
|
166
142
|
try {
|
|
167
143
|
let appContent = fs.readFileSync(fsbrowseAppJSPath, 'utf8');
|
|
168
144
|
|
|
169
|
-
// Ensure postMessage theme listener is present (storage events don't fire in same-window iframes)
|
|
170
145
|
if (appContent.includes('theme-change')) {
|
|
171
146
|
console.log('[PATCH] fsbrowse postMessage theme sync already present');
|
|
172
147
|
} else if (appContent.includes('setupThemeSync')) {
|
|
173
|
-
// setupThemeSync exists but lacks postMessage support - inject it
|
|
174
148
|
appContent = appContent.replace(
|
|
175
149
|
" // Watch for storage changes from other tabs/windows\n window.addEventListener('storage', e => {\n if (e.key === 'gmgui-theme') syncTheme();\n });",
|
|
176
150
|
" // Watch for storage changes from other tabs/windows\n window.addEventListener('storage', e => {\n if (e.key === 'gmgui-theme') syncTheme();\n });\n\n // Watch for postMessage from parent window (same-window iframes don't receive storage events)\n window.addEventListener('message', e => {\n if (e.data && e.data.type === 'theme-change' && e.data.theme) syncTheme(e.data.theme);\n });"
|
|
177
151
|
);
|
|
178
|
-
// Also make syncTheme accept an explicit theme argument
|
|
179
152
|
appContent = appContent.replace(
|
|
180
153
|
'const syncTheme = () => {\n const theme = localStorage.getItem(\'gmgui-theme\') ||',
|
|
181
154
|
'const syncTheme = (theme) => {\n theme = theme || localStorage.getItem(\'gmgui-theme\') ||'
|
|
@@ -183,7 +156,6 @@ if (fs.existsSync(fsbrowseAppJSPath)) {
|
|
|
183
156
|
fs.writeFileSync(fsbrowseAppJSPath, appContent, 'utf8');
|
|
184
157
|
console.log('[PATCH] fsbrowse postMessage theme sync injected');
|
|
185
158
|
} else {
|
|
186
|
-
// No setupThemeSync at all - inject full method
|
|
187
159
|
const themeSyncMethod = `
|
|
188
160
|
setupThemeSync() {
|
|
189
161
|
// Sync theme from parent window/localStorage if available
|
package/server.js
CHANGED
|
@@ -45,11 +45,14 @@ import { register as registerMsgHandlers } from './lib/ws-handlers-msg.js';
|
|
|
45
45
|
import { startAll as startACPTools, stopAll as stopACPTools, getStatus as getACPStatus, getPort as getACPPort, ensureRunning, queryModels as queryACPModels, touch as touchACP } from './lib/acp-sdk-manager.js';
|
|
46
46
|
import * as execMachine from './lib/execution-machine.js';
|
|
47
47
|
import * as toolInstallMachine from './lib/tool-install-machine.js';
|
|
48
|
+
import { _assetCache, htmlState, generateETag, warmAssetCache, serveFile as _serveFile, createChunkBatcher } from './lib/asset-server.js';
|
|
48
49
|
import { installGMAgentConfigs } from './lib/gm-agent-configs.js';
|
|
49
50
|
import * as toolManager from './lib/tool-manager.js';
|
|
50
51
|
import { pm2Manager } from './lib/pm2-manager.js';
|
|
51
52
|
import CheckpointManager from './lib/checkpoint-manager.js';
|
|
52
53
|
import { JsonlWatcher } from './lib/jsonl-watcher.js';
|
|
54
|
+
import { createBroadcast } from './lib/broadcast.js';
|
|
55
|
+
import { createRecovery } from './lib/recovery.js';
|
|
53
56
|
|
|
54
57
|
|
|
55
58
|
process.on('uncaughtException', (err, origin) => {
|
|
@@ -1651,175 +1654,9 @@ const server = http.createServer(async (req, res) => {
|
|
|
1651
1654
|
}
|
|
1652
1655
|
});
|
|
1653
1656
|
|
|
1654
|
-
const
|
|
1657
|
+
const _assetDeps = { compressAndSend, acceptsEncoding, watch, BASE_URL, PKG_VERSION };
|
|
1658
|
+
function serveFile(filePath, res, req) { return _serveFile(filePath, res, req, _assetDeps); }
|
|
1655
1659
|
|
|
1656
|
-
function generateETag(stats) {
|
|
1657
|
-
return `"${stats.mtimeMs.toString(36)}-${stats.size.toString(36)}"`;
|
|
1658
|
-
}
|
|
1659
|
-
|
|
1660
|
-
// In-memory cache: etag -> { gz: Buffer, raw: Buffer } (bounded to 200 entries)
|
|
1661
|
-
const _assetCache = new LRUCache({ max: 200 });
|
|
1662
|
-
// Cached processed HTML (invalidated on hot-reload or server restart)
|
|
1663
|
-
let _htmlCache = null;
|
|
1664
|
-
let _htmlCacheEtag = null;
|
|
1665
|
-
|
|
1666
|
-
function warmAssetCache() {
|
|
1667
|
-
const dirs = ['js', 'css', 'lib', 'vendor'];
|
|
1668
|
-
let count = 0;
|
|
1669
|
-
for (const dir of dirs) {
|
|
1670
|
-
const full = path.join(staticDir, dir);
|
|
1671
|
-
if (!fs.existsSync(full)) continue;
|
|
1672
|
-
for (const file of fs.readdirSync(full)) {
|
|
1673
|
-
const filePath = path.join(full, file);
|
|
1674
|
-
try {
|
|
1675
|
-
const stats = fs.statSync(filePath);
|
|
1676
|
-
if (!stats.isFile()) continue;
|
|
1677
|
-
const etag = generateETag(stats);
|
|
1678
|
-
if (_assetCache.has(etag)) continue;
|
|
1679
|
-
const raw = fs.readFileSync(filePath);
|
|
1680
|
-
const entry = raw.length < 860 ? { raw, gz: null } : { raw, gz: zlib.gzipSync(raw, { level: 6 }) };
|
|
1681
|
-
_assetCache.set(etag, entry);
|
|
1682
|
-
count++;
|
|
1683
|
-
} catch (_) {}
|
|
1684
|
-
}
|
|
1685
|
-
}
|
|
1686
|
-
for (const file of ['app.js', 'theme.js']) {
|
|
1687
|
-
const filePath = path.join(staticDir, file);
|
|
1688
|
-
try {
|
|
1689
|
-
const stats = fs.statSync(filePath);
|
|
1690
|
-
const etag = generateETag(stats);
|
|
1691
|
-
if (!_assetCache.has(etag)) {
|
|
1692
|
-
const raw = fs.readFileSync(filePath);
|
|
1693
|
-
_assetCache.set(etag, raw.length < 860 ? { raw, gz: null } : { raw, gz: zlib.gzipSync(raw, { level: 6 }) });
|
|
1694
|
-
count++;
|
|
1695
|
-
}
|
|
1696
|
-
} catch (_) {}
|
|
1697
|
-
}
|
|
1698
|
-
if (count > 0) console.log(`[CACHE] Pre-warmed ${count} static assets`);
|
|
1699
|
-
}
|
|
1700
|
-
|
|
1701
|
-
function serveFile(filePath, res, req) {
|
|
1702
|
-
const ext = path.extname(filePath).toLowerCase();
|
|
1703
|
-
const contentType = MIME_TYPES[ext] || 'application/octet-stream';
|
|
1704
|
-
|
|
1705
|
-
if (ext !== '.html') {
|
|
1706
|
-
fs.stat(filePath, (err, stats) => {
|
|
1707
|
-
if (err) { res.writeHead(500); res.end('Server error'); return; }
|
|
1708
|
-
const etag = generateETag(stats);
|
|
1709
|
-
if (req && req.headers['if-none-match'] === etag) {
|
|
1710
|
-
res.writeHead(304);
|
|
1711
|
-
res.end();
|
|
1712
|
-
return;
|
|
1713
|
-
}
|
|
1714
|
-
// Use ETag-based revalidation: browser always checks with server, serves from cache on 304
|
|
1715
|
-
// Avoids stale immutable assets when server files change during development
|
|
1716
|
-
const cacheControl = 'public, no-cache';
|
|
1717
|
-
|
|
1718
|
-
const sendCached = (cached) => {
|
|
1719
|
-
if (acceptsEncoding(req, 'gzip') && cached.gz) {
|
|
1720
|
-
res.writeHead(200, { 'Content-Type': contentType, 'Content-Encoding': 'gzip', 'Content-Length': cached.gz.length, 'ETag': etag, 'Cache-Control': cacheControl });
|
|
1721
|
-
res.end(cached.gz);
|
|
1722
|
-
} else {
|
|
1723
|
-
res.writeHead(200, { 'Content-Type': contentType, 'Content-Length': cached.raw.length, 'ETag': etag, 'Cache-Control': cacheControl });
|
|
1724
|
-
res.end(cached.raw);
|
|
1725
|
-
}
|
|
1726
|
-
};
|
|
1727
|
-
|
|
1728
|
-
const cached = _assetCache.get(etag);
|
|
1729
|
-
if (cached) { sendCached(cached); return; }
|
|
1730
|
-
|
|
1731
|
-
fs.readFile(filePath, (err2, raw) => {
|
|
1732
|
-
if (err2) { res.writeHead(500); res.end('Server error'); return; }
|
|
1733
|
-
if (raw.length < 860) {
|
|
1734
|
-
const entry = { raw, gz: null };
|
|
1735
|
-
_assetCache.set(etag, entry);
|
|
1736
|
-
sendCached(entry);
|
|
1737
|
-
return;
|
|
1738
|
-
}
|
|
1739
|
-
// Pre-compress once with gzip, cache it
|
|
1740
|
-
const gz = zlib.gzipSync(raw, { level: 6 });
|
|
1741
|
-
const entry = { raw, gz };
|
|
1742
|
-
_assetCache.set(etag, entry);
|
|
1743
|
-
sendCached(entry);
|
|
1744
|
-
});
|
|
1745
|
-
});
|
|
1746
|
-
return;
|
|
1747
|
-
}
|
|
1748
|
-
|
|
1749
|
-
// HTML: cache processed result, invalidate when file changes
|
|
1750
|
-
fs.stat(filePath, (err, stats) => {
|
|
1751
|
-
if (err) { res.writeHead(500); res.end('Server error'); return; }
|
|
1752
|
-
const etag = generateETag(stats);
|
|
1753
|
-
if (!watch && _htmlCache && _htmlCacheEtag === etag) {
|
|
1754
|
-
res.writeHead(200, { 'Content-Type': contentType, 'Cache-Control': 'no-store', 'Content-Encoding': 'gzip', 'Content-Length': _htmlCache.length });
|
|
1755
|
-
res.end(_htmlCache);
|
|
1756
|
-
return;
|
|
1757
|
-
}
|
|
1758
|
-
fs.readFile(filePath, (err2, data) => {
|
|
1759
|
-
if (err2) { res.writeHead(500); res.end('Server error'); return; }
|
|
1760
|
-
let content = data.toString();
|
|
1761
|
-
const wsToken = process.env.PASSWORD ? `window.__WS_TOKEN='${process.env.PASSWORD.replace(/'/g, "\\'")}';` : '';
|
|
1762
|
-
const baseTag = `<script>window.__BASE_URL='${BASE_URL}';window.__SERVER_VERSION='${PKG_VERSION}';${wsToken}</script>`;
|
|
1763
|
-
content = content.replace('<head>', `<head>\n <base href="${BASE_URL}/">\n ` + baseTag);
|
|
1764
|
-
content = content.replace(/(href|src)="vendor\//g, `$1="${BASE_URL}/vendor/`);
|
|
1765
|
-
content = content.replace(/(src)="\/gm\/js\//g, `$1="${BASE_URL}/js/`);
|
|
1766
|
-
if (watch) {
|
|
1767
|
-
content += `\n<script>(function(){const ws=new WebSocket((location.protocol==='https:'?'wss://':'ws://')+location.host+'${BASE_URL}/hot-reload');ws.onmessage=e=>{if(JSON.parse(e.data).type==='reload')location.reload()};})();</script>`;
|
|
1768
|
-
}
|
|
1769
|
-
compressAndSend(req, res, 200, contentType, content);
|
|
1770
|
-
if (!watch && acceptsEncoding(req, 'gzip')) {
|
|
1771
|
-
_htmlCache = zlib.gzipSync(Buffer.from(content), { level: 6 });
|
|
1772
|
-
_htmlCacheEtag = etag;
|
|
1773
|
-
}
|
|
1774
|
-
});
|
|
1775
|
-
});
|
|
1776
|
-
}
|
|
1777
|
-
|
|
1778
|
-
function createChunkBatcher() {
|
|
1779
|
-
const pending = [];
|
|
1780
|
-
let timer = null;
|
|
1781
|
-
const BATCH_SIZE = 10;
|
|
1782
|
-
const BATCH_INTERVAL = 50;
|
|
1783
|
-
|
|
1784
|
-
function flush() {
|
|
1785
|
-
if (pending.length === 0) return;
|
|
1786
|
-
const batch = pending.splice(0);
|
|
1787
|
-
try {
|
|
1788
|
-
const tx = queries._db ? queries._db.transaction(() => {
|
|
1789
|
-
for (const c of batch) queries.createChunk(c.sessionId, c.conversationId, c.sequence, c.type, c.data);
|
|
1790
|
-
}) : null;
|
|
1791
|
-
if (tx) { tx(); } else {
|
|
1792
|
-
for (const c of batch) {
|
|
1793
|
-
try { queries.createChunk(c.sessionId, c.conversationId, c.sequence, c.type, c.data); } catch (e) { debugLog(`[chunk] ${e.message}`); }
|
|
1794
|
-
}
|
|
1795
|
-
}
|
|
1796
|
-
} catch (err) {
|
|
1797
|
-
debugLog(`[chunk-batch] Batch write failed: ${err.message}`);
|
|
1798
|
-
for (const c of batch) {
|
|
1799
|
-
try { queries.createChunk(c.sessionId, c.conversationId, c.sequence, c.type, c.data); } catch (_) {}
|
|
1800
|
-
}
|
|
1801
|
-
}
|
|
1802
|
-
}
|
|
1803
|
-
|
|
1804
|
-
function add(sessionId, conversationId, sequence, blockType, blockData) {
|
|
1805
|
-
pending.push({ sessionId, conversationId, sequence, type: blockType, data: blockData });
|
|
1806
|
-
if (pending.length >= BATCH_SIZE) {
|
|
1807
|
-
if (timer) { clearTimeout(timer); timer = null; }
|
|
1808
|
-
flush();
|
|
1809
|
-
} else if (!timer) {
|
|
1810
|
-
timer = setTimeout(() => { timer = null; flush(); }, BATCH_INTERVAL);
|
|
1811
|
-
}
|
|
1812
|
-
}
|
|
1813
|
-
|
|
1814
|
-
function drain() {
|
|
1815
|
-
if (timer) { clearTimeout(timer); timer = null; }
|
|
1816
|
-
flush();
|
|
1817
|
-
}
|
|
1818
|
-
|
|
1819
|
-
return { add, drain };
|
|
1820
|
-
}
|
|
1821
|
-
|
|
1822
|
-
// Global broadcast sequence counter for event ordering
|
|
1823
1660
|
let broadcastSeq = 0;
|
|
1824
1661
|
|
|
1825
1662
|
function parseRateLimitResetTime(text) {
|
|
@@ -2580,36 +2417,13 @@ const BROADCAST_TYPES = new Set([
|
|
|
2580
2417
|
|
|
2581
2418
|
const wsOptimizer = new WSOptimizer();
|
|
2582
2419
|
|
|
2583
|
-
|
|
2584
|
-
|
|
2585
|
-
|
|
2586
|
-
|
|
2587
|
-
|
|
2588
|
-
|
|
2589
|
-
|
|
2590
|
-
const isBroadcast = BROADCAST_TYPES.has(event.type);
|
|
2591
|
-
|
|
2592
|
-
if (syncClients.size > 0) {
|
|
2593
|
-
if (isBroadcast) {
|
|
2594
|
-
for (const ws of syncClients) { try { wsOptimizer.sendToClient(ws, event); } catch (e) {} }
|
|
2595
|
-
} else {
|
|
2596
|
-
const targets = new Set();
|
|
2597
|
-
if (event.sessionId) {
|
|
2598
|
-
const subs = subscriptionIndex.get(event.sessionId);
|
|
2599
|
-
if (subs) for (const ws of subs) targets.add(ws);
|
|
2600
|
-
}
|
|
2601
|
-
if (event.conversationId) {
|
|
2602
|
-
const subs = subscriptionIndex.get(`conv-${event.conversationId}`);
|
|
2603
|
-
if (subs) for (const ws of subs) targets.add(ws);
|
|
2604
|
-
}
|
|
2605
|
-
for (const ws of targets) { try { wsOptimizer.sendToClient(ws, event); } catch (e) {} }
|
|
2606
|
-
}
|
|
2607
|
-
}
|
|
2608
|
-
|
|
2609
|
-
} catch (err) {
|
|
2610
|
-
console.error('[BROADCAST] Error (contained):', err.message);
|
|
2611
|
-
}
|
|
2612
|
-
}
|
|
2420
|
+
const broadcastSync = createBroadcast({
|
|
2421
|
+
syncClients,
|
|
2422
|
+
subscriptionIndex,
|
|
2423
|
+
wsOptimizer,
|
|
2424
|
+
broadcastTypes: BROADCAST_TYPES,
|
|
2425
|
+
getSeq: () => ++broadcastSeq
|
|
2426
|
+
});
|
|
2613
2427
|
|
|
2614
2428
|
// WebSocket protocol router
|
|
2615
2429
|
const wsRouter = new WsRouter();
|
|
@@ -2953,17 +2767,16 @@ if (watch) {
|
|
|
2953
2767
|
} catch (e) { console.error('Watch error:', e.message); }
|
|
2954
2768
|
}
|
|
2955
2769
|
|
|
2956
|
-
|
|
2957
|
-
|
|
2958
|
-
|
|
2959
|
-
|
|
2960
|
-
|
|
2961
|
-
|
|
2962
|
-
|
|
2963
|
-
|
|
2964
|
-
|
|
2965
|
-
|
|
2966
|
-
}
|
|
2770
|
+
const { killActiveExecutions, recoverStaleSessions, resumeInterruptedStreams, isProcessAlive, markAgentDead, resumeConversation, performAgentHealthCheck } = createRecovery({
|
|
2771
|
+
activeExecutions,
|
|
2772
|
+
processMessageWithStreaming,
|
|
2773
|
+
queries,
|
|
2774
|
+
broadcastSync,
|
|
2775
|
+
checkpointManager,
|
|
2776
|
+
drainMessageQueue,
|
|
2777
|
+
stuckThresholdMs: STUCK_AGENT_THRESHOLD_MS,
|
|
2778
|
+
noPidGracePeriodMs: NO_PID_GRACE_PERIOD_MS
|
|
2779
|
+
});
|
|
2967
2780
|
|
|
2968
2781
|
process.on('SIGTERM', () => {
|
|
2969
2782
|
console.log('[SIGNAL] SIGTERM received - graceful shutdown');
|
|
@@ -2992,178 +2805,6 @@ server.on('error', (err) => {
|
|
|
2992
2805
|
}
|
|
2993
2806
|
});
|
|
2994
2807
|
|
|
2995
|
-
// On startup: mark all active/pending sessions as interrupted (server was down, they didn't complete).
|
|
2996
|
-
// Then resumeInterruptedStreams will pick up recent ones for auto-resume.
|
|
2997
|
-
function recoverStaleSessions() {
|
|
2998
|
-
try {
|
|
2999
|
-
const RESUME_WINDOW_MS = 600000;
|
|
3000
|
-
const cutoff = Date.now() - RESUME_WINDOW_MS;
|
|
3001
|
-
const staleSessions = queries.getActiveSessions();
|
|
3002
|
-
for (const session of staleSessions) {
|
|
3003
|
-
queries.updateSession(session.id, {
|
|
3004
|
-
status: session.started_at > cutoff ? 'interrupted' : 'error',
|
|
3005
|
-
error: 'Server restarted',
|
|
3006
|
-
completed_at: Date.now()
|
|
3007
|
-
});
|
|
3008
|
-
}
|
|
3009
|
-
// Clear all isStreaming flags - nothing is running yet
|
|
3010
|
-
queries.clearAllStreamingFlags();
|
|
3011
|
-
if (staleSessions.length > 0) {
|
|
3012
|
-
console.log(`[RECOVERY] Marked ${staleSessions.length} stale session(s); cleared streaming flags`);
|
|
3013
|
-
}
|
|
3014
|
-
} catch (err) {
|
|
3015
|
-
console.error('[RECOVERY] Error:', err.message);
|
|
3016
|
-
}
|
|
3017
|
-
}
|
|
3018
|
-
|
|
3019
|
-
// Resume conversations with recently interrupted sessions (started within 10 min).
|
|
3020
|
-
async function resumeInterruptedStreams() {
|
|
3021
|
-
try {
|
|
3022
|
-
const toResume = queries.getResumableConversations(600000);
|
|
3023
|
-
if (toResume.length === 0) return;
|
|
3024
|
-
console.log(`[RESUME] Resuming ${toResume.length} interrupted conversation(s)`);
|
|
3025
|
-
for (let i = 0; i < toResume.length; i++) {
|
|
3026
|
-
const conv = toResume[i];
|
|
3027
|
-
try {
|
|
3028
|
-
const lastSession = queries.getLatestSession(conv.id);
|
|
3029
|
-
await resumeConversation(conv.id, lastSession?.id || null, 'Server restarted');
|
|
3030
|
-
if (i < toResume.length - 1) await new Promise(r => setTimeout(r, 200));
|
|
3031
|
-
} catch (err) {
|
|
3032
|
-
console.error(`[RESUME] Failed to resume conv ${conv.id}: ${err.message}`);
|
|
3033
|
-
queries.setIsStreaming(conv.id, false);
|
|
3034
|
-
}
|
|
3035
|
-
}
|
|
3036
|
-
} catch (err) {
|
|
3037
|
-
console.error('[RESUME] Error:', err.message);
|
|
3038
|
-
}
|
|
3039
|
-
}
|
|
3040
|
-
|
|
3041
|
-
function isProcessAlive(pid) {
|
|
3042
|
-
try {
|
|
3043
|
-
process.kill(pid, 0);
|
|
3044
|
-
return true;
|
|
3045
|
-
} catch (err) {
|
|
3046
|
-
if (err.code === 'EPERM') return true;
|
|
3047
|
-
return false;
|
|
3048
|
-
}
|
|
3049
|
-
}
|
|
3050
|
-
|
|
3051
|
-
function markAgentDead(conversationId, entry, reason) {
|
|
3052
|
-
if (!activeExecutions.has(conversationId)) return;
|
|
3053
|
-
activeExecutions.delete(conversationId);
|
|
3054
|
-
|
|
3055
|
-
const RESUME_WINDOW_MS = 600000; // 10 minutes
|
|
3056
|
-
const sessionAge = entry.startTime ? Date.now() - entry.startTime : Infinity;
|
|
3057
|
-
const shouldRestart = sessionAge < RESUME_WINDOW_MS;
|
|
3058
|
-
|
|
3059
|
-
queries.setIsStreaming(conversationId, false);
|
|
3060
|
-
if (entry.sessionId) {
|
|
3061
|
-
queries.updateSession(entry.sessionId, {
|
|
3062
|
-
status: shouldRestart ? 'interrupted' : 'error',
|
|
3063
|
-
error: reason,
|
|
3064
|
-
completed_at: Date.now()
|
|
3065
|
-
});
|
|
3066
|
-
}
|
|
3067
|
-
|
|
3068
|
-
if (shouldRestart) {
|
|
3069
|
-
// Session was recent — restart it automatically
|
|
3070
|
-
resumeConversation(conversationId, entry.sessionId, reason).catch(err => {
|
|
3071
|
-
console.error(`[RESUME] Auto-restart failed for conv ${conversationId}: ${err.message}`);
|
|
3072
|
-
queries.setIsStreaming(conversationId, false);
|
|
3073
|
-
});
|
|
3074
|
-
return;
|
|
3075
|
-
}
|
|
3076
|
-
|
|
3077
|
-
broadcastSync({
|
|
3078
|
-
type: 'streaming_error',
|
|
3079
|
-
sessionId: entry.sessionId,
|
|
3080
|
-
conversationId,
|
|
3081
|
-
error: reason,
|
|
3082
|
-
recoverable: false,
|
|
3083
|
-
timestamp: Date.now()
|
|
3084
|
-
});
|
|
3085
|
-
drainMessageQueue(conversationId);
|
|
3086
|
-
}
|
|
3087
|
-
|
|
3088
|
-
// Resume a single conversation after interruption. Used both by markAgentDead and resumeInterruptedStreams.
|
|
3089
|
-
async function resumeConversation(conversationId, previousSessionId, reason) {
|
|
3090
|
-
const conv = queries.getConversation(conversationId);
|
|
3091
|
-
if (!conv) throw new Error('Conversation not found');
|
|
3092
|
-
|
|
3093
|
-
const checkpoint = previousSessionId ? checkpointManager.loadCheckpoint(previousSessionId) : null;
|
|
3094
|
-
|
|
3095
|
-
if (previousSessionId) {
|
|
3096
|
-
// Only mark interrupted if not already done
|
|
3097
|
-
const prev = queries.getSession ? queries.getSession(previousSessionId) : null;
|
|
3098
|
-
if (prev && prev.status !== 'interrupted') {
|
|
3099
|
-
queries.updateSession(previousSessionId, { status: 'interrupted', error: reason || 'Restarting', completed_at: Date.now() });
|
|
3100
|
-
}
|
|
3101
|
-
if (checkpoint) {
|
|
3102
|
-
checkpointManager.markSessionResumed(previousSessionId);
|
|
3103
|
-
}
|
|
3104
|
-
}
|
|
3105
|
-
|
|
3106
|
-
const lastMsg = queries.getLastUserMessage(conversationId);
|
|
3107
|
-
const promptText = typeof lastMsg?.content === 'string' ? lastMsg.content : JSON.stringify(lastMsg?.content || 'continue');
|
|
3108
|
-
|
|
3109
|
-
const session = queries.createSession(conversationId);
|
|
3110
|
-
queries.createEvent('session.created', {
|
|
3111
|
-
sessionId: session.id,
|
|
3112
|
-
resumeReason: 'interrupted',
|
|
3113
|
-
claudeSessionId: conv.claudeSessionId,
|
|
3114
|
-
checkpointFrom: previousSessionId || null
|
|
3115
|
-
}, conversationId, session.id);
|
|
3116
|
-
|
|
3117
|
-
activeExecutions.set(conversationId, {
|
|
3118
|
-
pid: null,
|
|
3119
|
-
startTime: Date.now(),
|
|
3120
|
-
sessionId: session.id,
|
|
3121
|
-
lastActivity: Date.now()
|
|
3122
|
-
});
|
|
3123
|
-
|
|
3124
|
-
broadcastSync({
|
|
3125
|
-
type: 'streaming_start',
|
|
3126
|
-
sessionId: session.id,
|
|
3127
|
-
conversationId,
|
|
3128
|
-
agentId: conv.agentType,
|
|
3129
|
-
resumed: true,
|
|
3130
|
-
checkpointAvailable: !!checkpoint,
|
|
3131
|
-
timestamp: Date.now()
|
|
3132
|
-
});
|
|
3133
|
-
|
|
3134
|
-
if (checkpoint) {
|
|
3135
|
-
checkpointManager.storeCheckpointForDelay(conversationId, checkpoint);
|
|
3136
|
-
console.log(`[RESUME] Checkpoint stored for conv ${conversationId}`);
|
|
3137
|
-
}
|
|
3138
|
-
|
|
3139
|
-
console.log(`[RESUME] Restarting conv ${conversationId} (reason: ${reason})`);
|
|
3140
|
-
await processMessageWithStreaming(conversationId, lastMsg?.id || null, session.id, promptText, conv.agentType, conv.model, conv.subAgent);
|
|
3141
|
-
}
|
|
3142
|
-
|
|
3143
|
-
function performAgentHealthCheck() {
|
|
3144
|
-
const now = Date.now();
|
|
3145
|
-
for (const [conversationId, entry] of activeExecutions) {
|
|
3146
|
-
if (!entry) continue;
|
|
3147
|
-
|
|
3148
|
-
if (entry.pid) {
|
|
3149
|
-
if (!isProcessAlive(entry.pid)) {
|
|
3150
|
-
debugLog(`[HEALTH] Agent PID ${entry.pid} for conv ${conversationId} is dead`);
|
|
3151
|
-
markAgentDead(conversationId, entry, 'Agent process died unexpectedly');
|
|
3152
|
-
} else if (now - entry.lastActivity > STUCK_AGENT_THRESHOLD_MS) {
|
|
3153
|
-
debugLog(`[HEALTH] Agent PID ${entry.pid} for conv ${conversationId} has no activity for ${Math.round((now - entry.lastActivity) / 1000)}s`);
|
|
3154
|
-
// Kill stuck agent and clear streaming state
|
|
3155
|
-
try { process.kill(entry.pid, 'SIGTERM'); } catch (e) {}
|
|
3156
|
-
markAgentDead(conversationId, entry, 'Agent was stuck (no activity for 30 minutes)');
|
|
3157
|
-
}
|
|
3158
|
-
} else {
|
|
3159
|
-
if (now - entry.startTime > NO_PID_GRACE_PERIOD_MS) {
|
|
3160
|
-
debugLog(`[HEALTH] Agent for conv ${conversationId} never reported PID after ${Math.round((now - entry.startTime) / 1000)}s`);
|
|
3161
|
-
markAgentDead(conversationId, entry, 'Agent failed to start (no PID reported)');
|
|
3162
|
-
}
|
|
3163
|
-
}
|
|
3164
|
-
}
|
|
3165
|
-
}
|
|
3166
|
-
|
|
3167
2808
|
let jsonlWatcher = null;
|
|
3168
2809
|
|
|
3169
2810
|
function onServerReady() {
|