@agent-relay/wrapper 2.0.13 → 2.0.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -20,7 +20,7 @@ import { spawn, ChildProcess } from 'node:child_process';
|
|
|
20
20
|
import { createConnection, Socket } from 'node:net';
|
|
21
21
|
import { createHash } from 'node:crypto';
|
|
22
22
|
import { join, dirname } from 'node:path';
|
|
23
|
-
import { existsSync, unlinkSync, mkdirSync, symlinkSync, lstatSync, rmSync, watch, readdirSync, readlinkSync } from 'node:fs';
|
|
23
|
+
import { existsSync, unlinkSync, mkdirSync, symlinkSync, lstatSync, rmSync, watch, readdirSync, readlinkSync, writeFileSync, appendFileSync } from 'node:fs';
|
|
24
24
|
import type { FSWatcher } from 'node:fs';
|
|
25
25
|
import { getProjectPaths } from '@agent-relay/config/project-namespace';
|
|
26
26
|
import { getAgentOutboxTemplate } from '@agent-relay/config/relay-file-writer';
|
|
@@ -31,7 +31,8 @@ const __filename = fileURLToPath(import.meta.url);
|
|
|
31
31
|
const __dirname = dirname(__filename);
|
|
32
32
|
import { BaseWrapper, type BaseWrapperConfig } from './base-wrapper.js';
|
|
33
33
|
import { parseSummaryWithDetails, parseSessionEndFromOutput } from './parser.js';
|
|
34
|
-
import type { SendPayload, SendMeta } from '@agent-relay/protocol/types';
|
|
34
|
+
import type { SendPayload, SendMeta, Envelope } from '@agent-relay/protocol/types';
|
|
35
|
+
import type { ChannelMessagePayload } from '@agent-relay/protocol/channels';
|
|
35
36
|
import { findRelayPtyBinary as findRelayPtyBinaryUtil } from '@agent-relay/utils/relay-pty-path';
|
|
36
37
|
import {
|
|
37
38
|
type QueuedMessage,
|
|
@@ -80,7 +81,17 @@ interface ShutdownRequest {
|
|
|
80
81
|
type: 'shutdown';
|
|
81
82
|
}
|
|
82
83
|
|
|
83
|
-
|
|
84
|
+
/**
|
|
85
|
+
* Send just Enter key (for stuck input recovery)
|
|
86
|
+
* Used when message was written to PTY but Enter wasn't processed
|
|
87
|
+
*/
|
|
88
|
+
interface SendEnterRequest {
|
|
89
|
+
type: 'send_enter';
|
|
90
|
+
/** Message ID this is for (for tracking) */
|
|
91
|
+
id: string;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
type RelayPtyRequest = InjectRequest | StatusRequest | ShutdownRequest | SendEnterRequest;
|
|
84
95
|
|
|
85
96
|
/**
|
|
86
97
|
* Response types received from relay-pty socket
|
|
@@ -116,12 +127,26 @@ interface ShutdownAckResponse {
|
|
|
116
127
|
type: 'shutdown_ack';
|
|
117
128
|
}
|
|
118
129
|
|
|
130
|
+
/**
|
|
131
|
+
* Response for SendEnter request (stuck input recovery)
|
|
132
|
+
*/
|
|
133
|
+
interface SendEnterResultResponse {
|
|
134
|
+
type: 'send_enter_result';
|
|
135
|
+
/** Message ID this is for */
|
|
136
|
+
id: string;
|
|
137
|
+
/** Whether Enter was sent successfully */
|
|
138
|
+
success: boolean;
|
|
139
|
+
/** Unix timestamp in milliseconds */
|
|
140
|
+
timestamp: number;
|
|
141
|
+
}
|
|
142
|
+
|
|
119
143
|
type RelayPtyResponse =
|
|
120
144
|
| InjectResultResponse
|
|
121
145
|
| StatusResponse
|
|
122
146
|
| BackpressureResponse
|
|
123
147
|
| ErrorResponse
|
|
124
|
-
| ShutdownAckResponse
|
|
148
|
+
| ShutdownAckResponse
|
|
149
|
+
| SendEnterResultResponse;
|
|
125
150
|
|
|
126
151
|
/**
|
|
127
152
|
* Configuration for RelayPtyOrchestrator
|
|
@@ -196,6 +221,16 @@ export class RelayPtyOrchestrator extends BaseWrapper {
|
|
|
196
221
|
retryCount: number; // Track retry attempts
|
|
197
222
|
originalBody: string; // Original injection content for retries
|
|
198
223
|
}> = new Map();
|
|
224
|
+
// Pending SendEnter requests (for stuck input recovery)
|
|
225
|
+
private pendingSendEnter: Map<string, {
|
|
226
|
+
resolve: (verified: boolean) => void;
|
|
227
|
+
timeout: NodeJS.Timeout;
|
|
228
|
+
from: string;
|
|
229
|
+
shortId: string;
|
|
230
|
+
retryCount: number;
|
|
231
|
+
originalBody: string;
|
|
232
|
+
originalResolve: (success: boolean) => void; // Original injection promise resolver
|
|
233
|
+
}> = new Map();
|
|
199
234
|
private backpressureActive = false;
|
|
200
235
|
private readyForMessages = false;
|
|
201
236
|
|
|
@@ -330,18 +365,38 @@ export class RelayPtyOrchestrator extends BaseWrapper {
|
|
|
330
365
|
|
|
331
366
|
/**
|
|
332
367
|
* Debug log - only outputs when debug is enabled
|
|
368
|
+
* Writes to log file to avoid polluting TUI output
|
|
333
369
|
*/
|
|
334
370
|
private log(message: string): void {
|
|
335
371
|
if (this.config.debug) {
|
|
336
|
-
|
|
372
|
+
const logLine = `${new Date().toISOString()} [relay-pty-orchestrator:${this.config.name}] ${message}\n`;
|
|
373
|
+
try {
|
|
374
|
+
const logDir = dirname(this._logPath);
|
|
375
|
+
if (!existsSync(logDir)) {
|
|
376
|
+
mkdirSync(logDir, { recursive: true });
|
|
377
|
+
}
|
|
378
|
+
appendFileSync(this._logPath, logLine);
|
|
379
|
+
} catch {
|
|
380
|
+
// Fallback to stderr if file write fails (only during init before _logPath is set)
|
|
381
|
+
}
|
|
337
382
|
}
|
|
338
383
|
}
|
|
339
384
|
|
|
340
385
|
/**
|
|
341
386
|
* Error log - always outputs (errors are important)
|
|
387
|
+
* Writes to log file to avoid polluting TUI output
|
|
342
388
|
*/
|
|
343
389
|
private logError(message: string): void {
|
|
344
|
-
|
|
390
|
+
const logLine = `${new Date().toISOString()} [relay-pty-orchestrator:${this.config.name}] ERROR: ${message}\n`;
|
|
391
|
+
try {
|
|
392
|
+
const logDir = dirname(this._logPath);
|
|
393
|
+
if (!existsSync(logDir)) {
|
|
394
|
+
mkdirSync(logDir, { recursive: true });
|
|
395
|
+
}
|
|
396
|
+
appendFileSync(this._logPath, logLine);
|
|
397
|
+
} catch {
|
|
398
|
+
// Fallback to stderr if file write fails (only during init before _logPath is set)
|
|
399
|
+
}
|
|
345
400
|
}
|
|
346
401
|
|
|
347
402
|
/**
|
|
@@ -503,6 +558,26 @@ export class RelayPtyOrchestrator extends BaseWrapper {
|
|
|
503
558
|
this.logError(` Failed to set up outbox: ${err.message}`);
|
|
504
559
|
}
|
|
505
560
|
|
|
561
|
+
// Write MCP identity file so MCP servers can discover their agent name
|
|
562
|
+
// This is needed because Claude Code may not pass through env vars to MCP server processes
|
|
563
|
+
try {
|
|
564
|
+
const projectPaths = getProjectPaths(this.config.cwd);
|
|
565
|
+
const identityDir = join(projectPaths.dataDir);
|
|
566
|
+
if (!existsSync(identityDir)) {
|
|
567
|
+
mkdirSync(identityDir, { recursive: true });
|
|
568
|
+
}
|
|
569
|
+
// Write a per-process identity file (using PPID so MCP server finds parent's identity)
|
|
570
|
+
const identityPath = join(identityDir, `mcp-identity-${process.pid}`);
|
|
571
|
+
writeFileSync(identityPath, this.config.name, 'utf-8');
|
|
572
|
+
this.log(` Wrote MCP identity file: ${identityPath}`);
|
|
573
|
+
|
|
574
|
+
// Also write a simple identity file (for single-agent scenarios)
|
|
575
|
+
const simpleIdentityPath = join(identityDir, 'mcp-identity');
|
|
576
|
+
writeFileSync(simpleIdentityPath, this.config.name, 'utf-8');
|
|
577
|
+
} catch (err: any) {
|
|
578
|
+
this.logError(` Failed to write MCP identity file: ${err.message}`);
|
|
579
|
+
}
|
|
580
|
+
|
|
506
581
|
// Find relay-pty binary
|
|
507
582
|
const binaryPath = this.findRelayPtyBinary();
|
|
508
583
|
if (!binaryPath) {
|
|
@@ -552,6 +627,12 @@ export class RelayPtyOrchestrator extends BaseWrapper {
|
|
|
552
627
|
this.stopProtocolMonitor();
|
|
553
628
|
this.stopPeriodicReminder();
|
|
554
629
|
|
|
630
|
+
// Clear socket reconnect timer
|
|
631
|
+
if (this.socketReconnectTimer) {
|
|
632
|
+
clearTimeout(this.socketReconnectTimer);
|
|
633
|
+
this.socketReconnectTimer = undefined;
|
|
634
|
+
}
|
|
635
|
+
|
|
555
636
|
// Unregister from memory monitor
|
|
556
637
|
this.memoryMonitor.unregister(this.config.name);
|
|
557
638
|
if (this.memoryAlertHandler) {
|
|
@@ -683,6 +764,7 @@ export class RelayPtyOrchestrator extends BaseWrapper {
|
|
|
683
764
|
...process.env,
|
|
684
765
|
...this.config.env,
|
|
685
766
|
AGENT_RELAY_NAME: this.config.name,
|
|
767
|
+
RELAY_AGENT_NAME: this.config.name, // MCP server uses this env var
|
|
686
768
|
AGENT_RELAY_OUTBOX: this._canonicalOutboxPath, // Agents use this for outbox path
|
|
687
769
|
TERM: 'xterm-256color',
|
|
688
770
|
},
|
|
@@ -1156,6 +1238,16 @@ export class RelayPtyOrchestrator extends BaseWrapper {
|
|
|
1156
1238
|
*/
|
|
1157
1239
|
private attemptSocketConnection(timeout: number): Promise<void> {
|
|
1158
1240
|
return new Promise((resolve, reject) => {
|
|
1241
|
+
// Clean up any existing socket before creating new one
|
|
1242
|
+
// This prevents orphaned sockets with stale event handlers
|
|
1243
|
+
if (this.socket) {
|
|
1244
|
+
// Remove all listeners to prevent the old socket's 'close' event
|
|
1245
|
+
// from triggering another reconnect cycle
|
|
1246
|
+
this.socket.removeAllListeners();
|
|
1247
|
+
this.socket.destroy();
|
|
1248
|
+
this.socket = undefined;
|
|
1249
|
+
}
|
|
1250
|
+
|
|
1159
1251
|
const timer = setTimeout(() => {
|
|
1160
1252
|
reject(new Error('Socket connection timeout'));
|
|
1161
1253
|
}, timeout);
|
|
@@ -1172,9 +1264,19 @@ export class RelayPtyOrchestrator extends BaseWrapper {
|
|
|
1172
1264
|
reject(err);
|
|
1173
1265
|
});
|
|
1174
1266
|
|
|
1267
|
+
// Handle 'end' event - server closed its write side (half-close)
|
|
1268
|
+
this.socket.on('end', () => {
|
|
1269
|
+
this.socketConnected = false;
|
|
1270
|
+
this.log(` Socket received end (server closed write side)`);
|
|
1271
|
+
});
|
|
1272
|
+
|
|
1175
1273
|
this.socket.on('close', () => {
|
|
1176
1274
|
this.socketConnected = false;
|
|
1177
1275
|
this.log(` Socket closed`);
|
|
1276
|
+
// Auto-reconnect if not intentionally stopped
|
|
1277
|
+
if (this.running && !this.isGracefulStop) {
|
|
1278
|
+
this.scheduleSocketReconnect();
|
|
1279
|
+
}
|
|
1178
1280
|
});
|
|
1179
1281
|
|
|
1180
1282
|
// Handle incoming data (responses)
|
|
@@ -1213,6 +1315,64 @@ export class RelayPtyOrchestrator extends BaseWrapper {
|
|
|
1213
1315
|
this.pendingInjections.clear();
|
|
1214
1316
|
}
|
|
1215
1317
|
|
|
1318
|
+
/** Timer for socket reconnection */
|
|
1319
|
+
private socketReconnectTimer?: NodeJS.Timeout;
|
|
1320
|
+
/** Current reconnection attempt count */
|
|
1321
|
+
private socketReconnectAttempt = 0;
|
|
1322
|
+
|
|
1323
|
+
/**
|
|
1324
|
+
* Schedule a socket reconnection attempt with exponential backoff
|
|
1325
|
+
*/
|
|
1326
|
+
private scheduleSocketReconnect(): void {
|
|
1327
|
+
const maxAttempts = this.config.socketReconnectAttempts ?? 3;
|
|
1328
|
+
|
|
1329
|
+
// Clear any existing timer
|
|
1330
|
+
if (this.socketReconnectTimer) {
|
|
1331
|
+
clearTimeout(this.socketReconnectTimer);
|
|
1332
|
+
this.socketReconnectTimer = undefined;
|
|
1333
|
+
}
|
|
1334
|
+
|
|
1335
|
+
if (this.socketReconnectAttempt >= maxAttempts) {
|
|
1336
|
+
this.logError(` Socket reconnect failed after ${maxAttempts} attempts`);
|
|
1337
|
+
// Reset counter for future reconnects (processMessageQueue can trigger new cycle)
|
|
1338
|
+
this.socketReconnectAttempt = 0;
|
|
1339
|
+
// Note: socketReconnectTimer is already undefined, allowing processMessageQueue
|
|
1340
|
+
// to trigger a new reconnection cycle when new messages arrive
|
|
1341
|
+
return;
|
|
1342
|
+
}
|
|
1343
|
+
|
|
1344
|
+
this.socketReconnectAttempt++;
|
|
1345
|
+
const delay = Math.min(1000 * Math.pow(2, this.socketReconnectAttempt - 1), 10000); // Max 10s
|
|
1346
|
+
|
|
1347
|
+
this.log(` Scheduling socket reconnect in ${delay}ms (attempt ${this.socketReconnectAttempt}/${maxAttempts})`);
|
|
1348
|
+
|
|
1349
|
+
this.socketReconnectTimer = setTimeout(async () => {
|
|
1350
|
+
// Clear timer reference now that callback is executing
|
|
1351
|
+
this.socketReconnectTimer = undefined;
|
|
1352
|
+
|
|
1353
|
+
if (!this.running || this.isGracefulStop) {
|
|
1354
|
+
return;
|
|
1355
|
+
}
|
|
1356
|
+
|
|
1357
|
+
try {
|
|
1358
|
+
const timeout = this.config.socketConnectTimeoutMs ?? 5000;
|
|
1359
|
+
await this.attemptSocketConnection(timeout);
|
|
1360
|
+
this.log(` Socket reconnected successfully`);
|
|
1361
|
+
this.socketReconnectAttempt = 0; // Reset on success
|
|
1362
|
+
|
|
1363
|
+
// Process any queued messages that were waiting
|
|
1364
|
+
if (this.messageQueue.length > 0 && !this.isInjecting) {
|
|
1365
|
+
this.log(` Processing ${this.messageQueue.length} queued messages after reconnect`);
|
|
1366
|
+
this.processMessageQueue();
|
|
1367
|
+
}
|
|
1368
|
+
} catch (err: any) {
|
|
1369
|
+
this.logError(` Socket reconnect attempt ${this.socketReconnectAttempt} failed: ${err.message}`);
|
|
1370
|
+
// Schedule another attempt
|
|
1371
|
+
this.scheduleSocketReconnect();
|
|
1372
|
+
}
|
|
1373
|
+
}, delay);
|
|
1374
|
+
}
|
|
1375
|
+
|
|
1216
1376
|
/**
|
|
1217
1377
|
* Send a request to the socket and optionally wait for response
|
|
1218
1378
|
*/
|
|
@@ -1266,6 +1426,13 @@ export class RelayPtyOrchestrator extends BaseWrapper {
|
|
|
1266
1426
|
case 'shutdown_ack':
|
|
1267
1427
|
this.log(` Shutdown acknowledged`);
|
|
1268
1428
|
break;
|
|
1429
|
+
|
|
1430
|
+
case 'send_enter_result':
|
|
1431
|
+
// Handle SendEnter result (stuck input recovery)
|
|
1432
|
+
this.handleSendEnterResult(response).catch((err: Error) => {
|
|
1433
|
+
this.logError(` Error handling send_enter result: ${err.message}`);
|
|
1434
|
+
});
|
|
1435
|
+
break;
|
|
1269
1436
|
}
|
|
1270
1437
|
} catch (err: any) {
|
|
1271
1438
|
this.logError(` Failed to parse socket response: ${err.message}`);
|
|
@@ -1356,7 +1523,6 @@ export class RelayPtyOrchestrator extends BaseWrapper {
|
|
|
1356
1523
|
|
|
1357
1524
|
// Check if we should retry
|
|
1358
1525
|
if (pending.retryCount < INJECTION_CONSTANTS.MAX_RETRIES - 1) {
|
|
1359
|
-
this.log(` Retrying injection (attempt ${pending.retryCount + 2}/${INJECTION_CONSTANTS.MAX_RETRIES})`);
|
|
1360
1526
|
clearTimeout(pending.timeout);
|
|
1361
1527
|
this.pendingInjections.delete(response.id);
|
|
1362
1528
|
|
|
@@ -1382,40 +1548,56 @@ export class RelayPtyOrchestrator extends BaseWrapper {
|
|
|
1382
1548
|
return;
|
|
1383
1549
|
}
|
|
1384
1550
|
|
|
1385
|
-
//
|
|
1386
|
-
//
|
|
1387
|
-
|
|
1388
|
-
|
|
1389
|
-
|
|
1390
|
-
|
|
1391
|
-
|
|
1392
|
-
|
|
1393
|
-
|
|
1394
|
-
|
|
1395
|
-
|
|
1396
|
-
|
|
1397
|
-
|
|
1398
|
-
|
|
1399
|
-
|
|
1400
|
-
|
|
1401
|
-
|
|
1402
|
-
|
|
1403
|
-
|
|
1404
|
-
|
|
1405
|
-
|
|
1406
|
-
|
|
1407
|
-
|
|
1408
|
-
|
|
1409
|
-
|
|
1410
|
-
|
|
1411
|
-
|
|
1551
|
+
// On first retry attempt (retryCount === 0), try SendEnter first
|
|
1552
|
+
// This handles the case where message content was written but Enter wasn't processed
|
|
1553
|
+
if (pending.retryCount === 0) {
|
|
1554
|
+
this.log(` Trying SendEnter first for ${pending.shortId} (stuck input recovery)`);
|
|
1555
|
+
|
|
1556
|
+
// Send just the Enter key
|
|
1557
|
+
const sendEnterRequest: SendEnterRequest = {
|
|
1558
|
+
type: 'send_enter',
|
|
1559
|
+
id: response.id,
|
|
1560
|
+
};
|
|
1561
|
+
|
|
1562
|
+
// Track this SendEnter request for verification
|
|
1563
|
+
const sendEnterTimeout = setTimeout(() => {
|
|
1564
|
+
this.logError(` SendEnter timeout for ${pending.shortId}`);
|
|
1565
|
+
this.pendingSendEnter.delete(response.id);
|
|
1566
|
+
// Fall back to full retry after SendEnter timeout
|
|
1567
|
+
this.doFullRetry(response.id, pending);
|
|
1568
|
+
}, 5000); // 5 second timeout for SendEnter
|
|
1569
|
+
|
|
1570
|
+
this.pendingSendEnter.set(response.id, {
|
|
1571
|
+
resolve: (verified: boolean) => {
|
|
1572
|
+
if (verified) {
|
|
1573
|
+
// SendEnter worked!
|
|
1574
|
+
this.injectionMetrics.successWithRetry++;
|
|
1575
|
+
this.injectionMetrics.total++;
|
|
1576
|
+
pending.resolve(true);
|
|
1577
|
+
} else {
|
|
1578
|
+
// SendEnter didn't work, do full retry
|
|
1579
|
+
this.doFullRetry(response.id, pending);
|
|
1580
|
+
}
|
|
1581
|
+
},
|
|
1582
|
+
timeout: sendEnterTimeout,
|
|
1583
|
+
from: pending.from,
|
|
1584
|
+
shortId: pending.shortId,
|
|
1585
|
+
retryCount: pending.retryCount,
|
|
1586
|
+
originalBody: pending.originalBody,
|
|
1587
|
+
originalResolve: pending.resolve,
|
|
1588
|
+
});
|
|
1412
1589
|
|
|
1413
|
-
|
|
1414
|
-
|
|
1415
|
-
|
|
1416
|
-
|
|
1417
|
-
|
|
1418
|
-
|
|
1590
|
+
this.sendSocketRequest(sendEnterRequest).catch((err) => {
|
|
1591
|
+
this.logError(` SendEnter request failed: ${err.message}`);
|
|
1592
|
+
clearTimeout(sendEnterTimeout);
|
|
1593
|
+
this.pendingSendEnter.delete(response.id);
|
|
1594
|
+
// Fall back to full retry
|
|
1595
|
+
this.doFullRetry(response.id, pending);
|
|
1596
|
+
});
|
|
1597
|
+
} else {
|
|
1598
|
+
// On subsequent retries (retryCount > 0), do full retry directly
|
|
1599
|
+
this.doFullRetry(response.id, pending);
|
|
1600
|
+
}
|
|
1419
1601
|
} else {
|
|
1420
1602
|
// Max retries exceeded
|
|
1421
1603
|
this.logError(` Message ${pending.shortId} failed after ${INJECTION_CONSTANTS.MAX_RETRIES} attempts - NOT found in output`);
|
|
@@ -1447,6 +1629,102 @@ export class RelayPtyOrchestrator extends BaseWrapper {
|
|
|
1447
1629
|
// queued/injecting are intermediate states - wait for final status
|
|
1448
1630
|
}
|
|
1449
1631
|
|
|
1632
|
+
/**
|
|
1633
|
+
* Handle SendEnter result (stuck input recovery)
|
|
1634
|
+
* Called when relay-pty responds to a SendEnter request
|
|
1635
|
+
*/
|
|
1636
|
+
private async handleSendEnterResult(response: SendEnterResultResponse): Promise<void> {
|
|
1637
|
+
this.log(` handleSendEnterResult: id=${response.id.substring(0, 8)} success=${response.success}`);
|
|
1638
|
+
|
|
1639
|
+
const pendingEnter = this.pendingSendEnter.get(response.id);
|
|
1640
|
+
if (!pendingEnter) {
|
|
1641
|
+
this.log(` No pending SendEnter found for ${response.id.substring(0, 8)}`);
|
|
1642
|
+
return;
|
|
1643
|
+
}
|
|
1644
|
+
|
|
1645
|
+
clearTimeout(pendingEnter.timeout);
|
|
1646
|
+
this.pendingSendEnter.delete(response.id);
|
|
1647
|
+
|
|
1648
|
+
if (!response.success) {
|
|
1649
|
+
this.log(` SendEnter failed for ${pendingEnter.shortId}, will try full retry`);
|
|
1650
|
+
pendingEnter.resolve(false);
|
|
1651
|
+
return;
|
|
1652
|
+
}
|
|
1653
|
+
|
|
1654
|
+
// SendEnter succeeded - wait and verify
|
|
1655
|
+
this.log(` SendEnter sent for ${pendingEnter.shortId}, waiting to verify...`);
|
|
1656
|
+
await sleep(150); // Give time for Enter to be processed
|
|
1657
|
+
|
|
1658
|
+
// Verify the message appeared in output
|
|
1659
|
+
const verified = await verifyInjection(
|
|
1660
|
+
pendingEnter.shortId,
|
|
1661
|
+
pendingEnter.from,
|
|
1662
|
+
async () => this.getCleanOutput()
|
|
1663
|
+
);
|
|
1664
|
+
|
|
1665
|
+
if (verified) {
|
|
1666
|
+
this.log(` Message ${pendingEnter.shortId} verified after SendEnter ✓`);
|
|
1667
|
+
pendingEnter.resolve(true);
|
|
1668
|
+
} else {
|
|
1669
|
+
this.log(` Message ${pendingEnter.shortId} still not verified after SendEnter, will try full retry`);
|
|
1670
|
+
pendingEnter.resolve(false);
|
|
1671
|
+
}
|
|
1672
|
+
}
|
|
1673
|
+
|
|
1674
|
+
/**
|
|
1675
|
+
* Do a full retry with message content (used when SendEnter fails or for subsequent retries)
|
|
1676
|
+
*/
|
|
1677
|
+
private doFullRetry(
|
|
1678
|
+
messageId: string,
|
|
1679
|
+
pending: {
|
|
1680
|
+
resolve: (success: boolean) => void;
|
|
1681
|
+
reject: (error: Error) => void;
|
|
1682
|
+
from: string;
|
|
1683
|
+
shortId: string;
|
|
1684
|
+
retryCount: number;
|
|
1685
|
+
originalBody: string;
|
|
1686
|
+
}
|
|
1687
|
+
): void {
|
|
1688
|
+
this.log(` Doing full retry for ${pending.shortId} (attempt ${pending.retryCount + 2}/${INJECTION_CONSTANTS.MAX_RETRIES})`);
|
|
1689
|
+
|
|
1690
|
+
// Re-inject by sending another socket request
|
|
1691
|
+
// Prepend [RETRY] to help agent notice this is a retry
|
|
1692
|
+
const retryBody = pending.originalBody.startsWith('[RETRY]')
|
|
1693
|
+
? pending.originalBody
|
|
1694
|
+
: `[RETRY] ${pending.originalBody}`;
|
|
1695
|
+
const retryRequest: InjectRequest = {
|
|
1696
|
+
type: 'inject',
|
|
1697
|
+
id: messageId,
|
|
1698
|
+
from: pending.from,
|
|
1699
|
+
body: retryBody,
|
|
1700
|
+
priority: 1, // Higher priority for retries
|
|
1701
|
+
};
|
|
1702
|
+
|
|
1703
|
+
// Create new pending entry with incremented retry count
|
|
1704
|
+
const newTimeout = setTimeout(() => {
|
|
1705
|
+
this.logError(` Retry timeout for ${pending.shortId}`);
|
|
1706
|
+
this.pendingInjections.delete(messageId);
|
|
1707
|
+
pending.resolve(false);
|
|
1708
|
+
}, 30000);
|
|
1709
|
+
|
|
1710
|
+
this.pendingInjections.set(messageId, {
|
|
1711
|
+
resolve: pending.resolve,
|
|
1712
|
+
reject: pending.reject,
|
|
1713
|
+
timeout: newTimeout,
|
|
1714
|
+
from: pending.from,
|
|
1715
|
+
shortId: pending.shortId,
|
|
1716
|
+
retryCount: pending.retryCount + 1,
|
|
1717
|
+
originalBody: retryBody,
|
|
1718
|
+
});
|
|
1719
|
+
|
|
1720
|
+
this.sendSocketRequest(retryRequest).catch((err) => {
|
|
1721
|
+
this.logError(` Full retry request failed: ${err.message}`);
|
|
1722
|
+
clearTimeout(newTimeout);
|
|
1723
|
+
this.pendingInjections.delete(messageId);
|
|
1724
|
+
pending.resolve(false);
|
|
1725
|
+
});
|
|
1726
|
+
}
|
|
1727
|
+
|
|
1450
1728
|
/**
|
|
1451
1729
|
* Handle backpressure notification
|
|
1452
1730
|
*/
|
|
@@ -1532,14 +1810,46 @@ export class RelayPtyOrchestrator extends BaseWrapper {
|
|
|
1532
1810
|
* Process queued messages
|
|
1533
1811
|
*/
|
|
1534
1812
|
private async processMessageQueue(): Promise<void> {
|
|
1535
|
-
|
|
1536
|
-
|
|
1813
|
+
// Debug: Log blocking conditions when queue has messages
|
|
1814
|
+
if (this.messageQueue.length > 0) {
|
|
1815
|
+
if (!this.readyForMessages) {
|
|
1816
|
+
this.log(` Queue blocked: readyForMessages=false (queue=${this.messageQueue.length})`);
|
|
1817
|
+
return;
|
|
1818
|
+
}
|
|
1819
|
+
if (this.backpressureActive) {
|
|
1820
|
+
this.log(` Queue blocked: backpressure active (queue=${this.messageQueue.length})`);
|
|
1821
|
+
return;
|
|
1822
|
+
}
|
|
1823
|
+
if (this.isInjecting) {
|
|
1824
|
+
// Already injecting - the finally block will process next message
|
|
1825
|
+
// But add a safety timeout in case injection gets stuck
|
|
1826
|
+
const elapsed = this.injectionStartTime > 0 ? Date.now() - this.injectionStartTime : 0;
|
|
1827
|
+
if (elapsed > 35000) {
|
|
1828
|
+
this.logError(` Injection stuck for ${elapsed}ms, forcing reset`);
|
|
1829
|
+
this.isInjecting = false;
|
|
1830
|
+
this.injectionStartTime = 0;
|
|
1831
|
+
}
|
|
1832
|
+
return;
|
|
1833
|
+
}
|
|
1537
1834
|
}
|
|
1538
1835
|
|
|
1539
1836
|
if (this.messageQueue.length === 0) {
|
|
1540
1837
|
return;
|
|
1541
1838
|
}
|
|
1542
1839
|
|
|
1840
|
+
// Proactively reconnect socket if disconnected and we have messages to send
|
|
1841
|
+
if (!this.socketConnected && !this.socketReconnectTimer) {
|
|
1842
|
+
this.log(` Socket disconnected, triggering reconnect before processing queue`);
|
|
1843
|
+
this.scheduleSocketReconnect();
|
|
1844
|
+
return; // Wait for reconnection to complete
|
|
1845
|
+
}
|
|
1846
|
+
|
|
1847
|
+
if (!this.socketConnected) {
|
|
1848
|
+
// Reconnection in progress, wait for it
|
|
1849
|
+
this.log(` Queue waiting: socket reconnecting (queue=${this.messageQueue.length})`);
|
|
1850
|
+
return;
|
|
1851
|
+
}
|
|
1852
|
+
|
|
1543
1853
|
// Check if agent is in editor mode - delay injection if so
|
|
1544
1854
|
const idleResult = this.idleDetector.checkIdle();
|
|
1545
1855
|
if (idleResult.inEditorMode) {
|
|
@@ -1608,6 +1918,24 @@ export class RelayPtyOrchestrator extends BaseWrapper {
|
|
|
1608
1918
|
this.processMessageQueue();
|
|
1609
1919
|
}
|
|
1610
1920
|
|
|
1921
|
+
/**
|
|
1922
|
+
* Override handleIncomingChannelMessage to trigger queue processing.
|
|
1923
|
+
* Without this override, channel messages would be queued but processMessageQueue()
|
|
1924
|
+
* would never be called, causing messages to get stuck until the queue monitor runs.
|
|
1925
|
+
*/
|
|
1926
|
+
protected override handleIncomingChannelMessage(
|
|
1927
|
+
from: string,
|
|
1928
|
+
channel: string,
|
|
1929
|
+
body: string,
|
|
1930
|
+
envelope: Envelope<ChannelMessagePayload>
|
|
1931
|
+
): void {
|
|
1932
|
+
this.log(` === CHANNEL MESSAGE RECEIVED: ${envelope.id.substring(0, 8)} from ${from} on ${channel} ===`);
|
|
1933
|
+
this.log(` Body preview: ${body?.substring(0, 100) ?? '(no body)'}...`);
|
|
1934
|
+
super.handleIncomingChannelMessage(from, channel, body, envelope);
|
|
1935
|
+
this.log(` Queue length after add: ${this.messageQueue.length}`);
|
|
1936
|
+
this.processMessageQueue();
|
|
1937
|
+
}
|
|
1938
|
+
|
|
1611
1939
|
// =========================================================================
|
|
1612
1940
|
// Queue monitor - Detect and process stuck messages
|
|
1613
1941
|
// =========================================================================
|
|
@@ -2288,6 +2616,10 @@ Then output: \`->relay-file:spawn\`
|
|
|
2288
2616
|
*/
|
|
2289
2617
|
async kill(): Promise<void> {
|
|
2290
2618
|
this.isGracefulStop = true; // Mark as intentional to prevent crash broadcast
|
|
2619
|
+
if (this.socketReconnectTimer) {
|
|
2620
|
+
clearTimeout(this.socketReconnectTimer);
|
|
2621
|
+
this.socketReconnectTimer = undefined;
|
|
2622
|
+
}
|
|
2291
2623
|
if (this.relayPtyProcess && !this.relayPtyProcess.killed) {
|
|
2292
2624
|
this.relayPtyProcess.kill('SIGKILL');
|
|
2293
2625
|
}
|