@suzuke/agend 0.0.1 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +557 -1
- package/README.zh-TW.md +504 -0
- package/dist/access-path.d.ts +7 -0
- package/dist/access-path.js +12 -0
- package/dist/access-path.js.map +1 -0
- package/dist/approval/approval-server.d.ts +30 -0
- package/dist/approval/approval-server.js +156 -0
- package/dist/approval/approval-server.js.map +1 -0
- package/dist/approval/tmux-prompt-detector.d.ts +34 -0
- package/dist/approval/tmux-prompt-detector.js +264 -0
- package/dist/approval/tmux-prompt-detector.js.map +1 -0
- package/dist/backend/approval-strategy.d.ts +14 -0
- package/dist/backend/approval-strategy.js +2 -0
- package/dist/backend/approval-strategy.js.map +1 -0
- package/dist/backend/claude-code.d.ts +13 -0
- package/dist/backend/claude-code.js +114 -0
- package/dist/backend/claude-code.js.map +1 -0
- package/dist/backend/codex.d.ts +10 -0
- package/dist/backend/codex.js +58 -0
- package/dist/backend/codex.js.map +1 -0
- package/dist/backend/factory.d.ts +2 -0
- package/dist/backend/factory.js +19 -0
- package/dist/backend/factory.js.map +1 -0
- package/dist/backend/gemini-cli.d.ts +10 -0
- package/dist/backend/gemini-cli.js +68 -0
- package/dist/backend/gemini-cli.js.map +1 -0
- package/dist/backend/hook-based-approval.d.ts +20 -0
- package/dist/backend/hook-based-approval.js +41 -0
- package/dist/backend/hook-based-approval.js.map +1 -0
- package/dist/backend/index.d.ts +6 -0
- package/dist/backend/index.js +6 -0
- package/dist/backend/index.js.map +1 -0
- package/dist/backend/opencode.d.ts +10 -0
- package/dist/backend/opencode.js +63 -0
- package/dist/backend/opencode.js.map +1 -0
- package/dist/backend/types.d.ts +26 -0
- package/dist/backend/types.js +2 -0
- package/dist/backend/types.js.map +1 -0
- package/dist/channel/access-manager.d.ts +18 -0
- package/dist/channel/access-manager.js +149 -0
- package/dist/channel/access-manager.js.map +1 -0
- package/dist/channel/adapters/discord.d.ts +45 -0
- package/dist/channel/adapters/discord.js +366 -0
- package/dist/channel/adapters/discord.js.map +1 -0
- package/dist/channel/adapters/telegram.d.ts +58 -0
- package/dist/channel/adapters/telegram.js +569 -0
- package/dist/channel/adapters/telegram.js.map +1 -0
- package/dist/channel/attachment-handler.d.ts +15 -0
- package/dist/channel/attachment-handler.js +55 -0
- package/dist/channel/attachment-handler.js.map +1 -0
- package/dist/channel/factory.d.ts +12 -0
- package/dist/channel/factory.js +38 -0
- package/dist/channel/factory.js.map +1 -0
- package/dist/channel/ipc-bridge.d.ts +26 -0
- package/dist/channel/ipc-bridge.js +170 -0
- package/dist/channel/ipc-bridge.js.map +1 -0
- package/dist/channel/mcp-server.d.ts +10 -0
- package/dist/channel/mcp-server.js +196 -0
- package/dist/channel/mcp-server.js.map +1 -0
- package/dist/channel/mcp-tools.d.ts +909 -0
- package/dist/channel/mcp-tools.js +346 -0
- package/dist/channel/mcp-tools.js.map +1 -0
- package/dist/channel/message-bus.d.ts +17 -0
- package/dist/channel/message-bus.js +86 -0
- package/dist/channel/message-bus.js.map +1 -0
- package/dist/channel/message-queue.d.ts +39 -0
- package/dist/channel/message-queue.js +248 -0
- package/dist/channel/message-queue.js.map +1 -0
- package/dist/channel/tool-router.d.ts +6 -0
- package/dist/channel/tool-router.js +69 -0
- package/dist/channel/tool-router.js.map +1 -0
- package/dist/channel/tool-tracker.d.ts +13 -0
- package/dist/channel/tool-tracker.js +58 -0
- package/dist/channel/tool-tracker.js.map +1 -0
- package/dist/channel/types.d.ts +116 -0
- package/dist/channel/types.js +2 -0
- package/dist/channel/types.js.map +1 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +782 -0
- package/dist/cli.js.map +1 -0
- package/dist/config.d.ts +8 -0
- package/dist/config.js +85 -0
- package/dist/config.js.map +1 -0
- package/dist/container-manager.d.ts +24 -0
- package/dist/container-manager.js +148 -0
- package/dist/container-manager.js.map +1 -0
- package/dist/context-guardian.d.ts +29 -0
- package/dist/context-guardian.js +123 -0
- package/dist/context-guardian.js.map +1 -0
- package/dist/cost-guard.d.ts +21 -0
- package/dist/cost-guard.js +113 -0
- package/dist/cost-guard.js.map +1 -0
- package/dist/daemon-entry.d.ts +1 -0
- package/dist/daemon-entry.js +29 -0
- package/dist/daemon-entry.js.map +1 -0
- package/dist/daemon.d.ts +88 -0
- package/dist/daemon.js +820 -0
- package/dist/daemon.js.map +1 -0
- package/dist/daily-summary.d.ts +13 -0
- package/dist/daily-summary.js +55 -0
- package/dist/daily-summary.js.map +1 -0
- package/dist/db.d.ts +10 -0
- package/dist/db.js +43 -0
- package/dist/db.js.map +1 -0
- package/dist/event-log.d.ts +22 -0
- package/dist/event-log.js +66 -0
- package/dist/event-log.js.map +1 -0
- package/dist/export-import.d.ts +2 -0
- package/dist/export-import.js +110 -0
- package/dist/export-import.js.map +1 -0
- package/dist/fleet-context.d.ts +36 -0
- package/dist/fleet-context.js +4 -0
- package/dist/fleet-context.js.map +1 -0
- package/dist/fleet-manager.d.ts +115 -0
- package/dist/fleet-manager.js +1742 -0
- package/dist/fleet-manager.js.map +1 -0
- package/dist/fleet-system-prompt.d.ts +11 -0
- package/dist/fleet-system-prompt.js +60 -0
- package/dist/fleet-system-prompt.js.map +1 -0
- package/dist/hang-detector.d.ts +16 -0
- package/dist/hang-detector.js +53 -0
- package/dist/hang-detector.js.map +1 -0
- package/dist/index.d.ts +8 -0
- package/dist/index.js +6 -0
- package/dist/index.js.map +1 -0
- package/dist/install-recorder.d.ts +30 -0
- package/dist/install-recorder.js +159 -0
- package/dist/install-recorder.js.map +1 -0
- package/dist/logger.d.ts +3 -0
- package/dist/logger.js +63 -0
- package/dist/logger.js.map +1 -0
- package/dist/meeting/orchestrator.d.ts +30 -0
- package/dist/meeting/orchestrator.js +355 -0
- package/dist/meeting/orchestrator.js.map +1 -0
- package/dist/meeting/prompt-builder.d.ts +12 -0
- package/dist/meeting/prompt-builder.js +96 -0
- package/dist/meeting/prompt-builder.js.map +1 -0
- package/dist/meeting/role-assigner.d.ts +2 -0
- package/dist/meeting/role-assigner.js +25 -0
- package/dist/meeting/role-assigner.js.map +1 -0
- package/dist/meeting/types.d.ts +21 -0
- package/dist/meeting/types.js +2 -0
- package/dist/meeting/types.js.map +1 -0
- package/dist/meeting-manager.d.ts +10 -0
- package/dist/meeting-manager.js +38 -0
- package/dist/meeting-manager.js.map +1 -0
- package/dist/memory-layer.d.ts +13 -0
- package/dist/memory-layer.js +44 -0
- package/dist/memory-layer.js.map +1 -0
- package/dist/plugin/agend/.claude-plugin/plugin.json +5 -0
- package/dist/plugin/agend/.mcp.json +9 -0
- package/dist/plugin/ccd-channel/.claude-plugin/plugin.json +5 -0
- package/dist/plugin/ccd-channel/.mcp.json +9 -0
- package/dist/process-manager.d.ts +31 -0
- package/dist/process-manager.js +264 -0
- package/dist/process-manager.js.map +1 -0
- package/dist/scheduler/db.d.ts +16 -0
- package/dist/scheduler/db.js +132 -0
- package/dist/scheduler/db.js.map +1 -0
- package/dist/scheduler/db.test.d.ts +1 -0
- package/dist/scheduler/db.test.js +92 -0
- package/dist/scheduler/db.test.js.map +1 -0
- package/dist/scheduler/index.d.ts +4 -0
- package/dist/scheduler/index.js +4 -0
- package/dist/scheduler/index.js.map +1 -0
- package/dist/scheduler/scheduler.d.ts +25 -0
- package/dist/scheduler/scheduler.js +119 -0
- package/dist/scheduler/scheduler.js.map +1 -0
- package/dist/scheduler/scheduler.test.d.ts +1 -0
- package/dist/scheduler/scheduler.test.js +119 -0
- package/dist/scheduler/scheduler.test.js.map +1 -0
- package/dist/scheduler/types.d.ts +47 -0
- package/dist/scheduler/types.js +7 -0
- package/dist/scheduler/types.js.map +1 -0
- package/dist/service-installer.d.ts +14 -0
- package/dist/service-installer.js +91 -0
- package/dist/service-installer.js.map +1 -0
- package/dist/setup-wizard.d.ts +14 -0
- package/dist/setup-wizard.js +517 -0
- package/dist/setup-wizard.js.map +1 -0
- package/dist/stt.d.ts +10 -0
- package/dist/stt.js +33 -0
- package/dist/stt.js.map +1 -0
- package/dist/tmux-manager.d.ts +22 -0
- package/dist/tmux-manager.js +131 -0
- package/dist/tmux-manager.js.map +1 -0
- package/dist/topic-commands.d.ts +22 -0
- package/dist/topic-commands.js +176 -0
- package/dist/topic-commands.js.map +1 -0
- package/dist/transcript-monitor.d.ts +21 -0
- package/dist/transcript-monitor.js +149 -0
- package/dist/transcript-monitor.js.map +1 -0
- package/dist/types.d.ts +153 -0
- package/dist/types.js +2 -0
- package/dist/types.js.map +1 -0
- package/dist/webhook-emitter.d.ts +15 -0
- package/dist/webhook-emitter.js +41 -0
- package/dist/webhook-emitter.js.map +1 -0
- package/package.json +60 -4
- package/templates/launchd.plist.ejs +29 -0
- package/templates/systemd.service.ejs +15 -0
- package/index.js +0 -1
package/dist/daemon.js
ADDED
|
@@ -0,0 +1,820 @@
|
|
|
1
|
+
import { join, dirname } from "node:path";
|
|
2
|
+
import { mkdirSync, writeFileSync, readFileSync, existsSync, unlinkSync } from "node:fs";
|
|
3
|
+
import { fileURLToPath } from "node:url";
|
|
4
|
+
import { EventEmitter } from "node:events";
|
|
5
|
+
import { createLogger } from "./logger.js";
|
|
6
|
+
import { TmuxManager } from "./tmux-manager.js";
|
|
7
|
+
import { TranscriptMonitor } from "./transcript-monitor.js";
|
|
8
|
+
import { ContextGuardian } from "./context-guardian.js";
|
|
9
|
+
import { IpcServer } from "./channel/ipc-bridge.js";
|
|
10
|
+
import { MessageBus } from "./channel/message-bus.js";
|
|
11
|
+
import { routeToolCall } from "./channel/tool-router.js";
|
|
12
|
+
import { generateFleetSystemPrompt } from "./fleet-system-prompt.js";
|
|
13
|
+
import { HangDetector } from "./hang-detector.js";
|
|
14
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
15
|
+
const __dirname = dirname(__filename);
|
|
16
|
+
export class Daemon extends EventEmitter {
|
|
17
|
+
name;
|
|
18
|
+
config;
|
|
19
|
+
instanceDir;
|
|
20
|
+
topicMode;
|
|
21
|
+
backend;
|
|
22
|
+
logger;
|
|
23
|
+
tmux = null;
|
|
24
|
+
ipcServer = null;
|
|
25
|
+
messageBus;
|
|
26
|
+
transcriptMonitor = null;
|
|
27
|
+
toolTracker = null;
|
|
28
|
+
guardian = null;
|
|
29
|
+
adapter = null;
|
|
30
|
+
pendingIpcRequests = new Map();
|
|
31
|
+
// Track chatId/threadId from inbound messages for automatic outbound routing
|
|
32
|
+
lastChatId;
|
|
33
|
+
lastThreadId;
|
|
34
|
+
// Pending ack: react 🫡 on first transcript activity after receiving a message
|
|
35
|
+
pendingAckMessage = null;
|
|
36
|
+
// Tool status tracking for Telegram
|
|
37
|
+
toolStatusMessageId = null;
|
|
38
|
+
toolStatusLines = [];
|
|
39
|
+
toolStatusDebounce = null;
|
|
40
|
+
// Session identity: map IPC socket → sessionName (from mcp_ready)
|
|
41
|
+
socketSessionNames = new Map();
|
|
42
|
+
// Crash recovery
|
|
43
|
+
healthCheckTimer = null;
|
|
44
|
+
crashCount = 0;
|
|
45
|
+
lastCrashAt = 0;
|
|
46
|
+
lastSpawnAt = 0;
|
|
47
|
+
rapidCrashCount = 0;
|
|
48
|
+
healthCheckPaused = false;
|
|
49
|
+
spawning = false;
|
|
50
|
+
// Context rotation quality tracking
|
|
51
|
+
rotationStartedAt = 0;
|
|
52
|
+
preRotationContextPct = 0;
|
|
53
|
+
hangDetector = null;
|
|
54
|
+
// Model failover: override model on next spawn when rate-limited
|
|
55
|
+
modelOverride;
|
|
56
|
+
// Context rotation v3: ring buffers for daemon-side snapshot
|
|
57
|
+
recentUserMessages = [];
|
|
58
|
+
recentEvents = [];
|
|
59
|
+
recentToolActivity = [];
|
|
60
|
+
constructor(name, config, instanceDir, topicMode = false, backend) {
|
|
61
|
+
super();
|
|
62
|
+
this.name = name;
|
|
63
|
+
this.config = config;
|
|
64
|
+
this.instanceDir = instanceDir;
|
|
65
|
+
this.topicMode = topicMode;
|
|
66
|
+
this.backend = backend;
|
|
67
|
+
this.logger = createLogger(config.log_level);
|
|
68
|
+
this.messageBus = new MessageBus();
|
|
69
|
+
this.messageBus.setLogger(this.logger);
|
|
70
|
+
}
|
|
71
|
+
async start() {
|
|
72
|
+
mkdirSync(this.instanceDir, { recursive: true });
|
|
73
|
+
writeFileSync(join(this.instanceDir, "daemon.pid"), String(process.pid));
|
|
74
|
+
this.logger.info(`Starting ${this.name}`);
|
|
75
|
+
// 1. IPC server — bridge between MCP server (Claude's child) and daemon
|
|
76
|
+
const sockPath = join(this.instanceDir, "channel.sock");
|
|
77
|
+
this.ipcServer = new IpcServer(sockPath, this.logger);
|
|
78
|
+
await this.ipcServer.listen();
|
|
79
|
+
// Permanent IPC dispatcher: routes responses to pending requests by type+id key
|
|
80
|
+
this.ipcServer.on("message", (msg) => {
|
|
81
|
+
const type = msg.type;
|
|
82
|
+
if (!type)
|
|
83
|
+
return;
|
|
84
|
+
// Build lookup key matching the pattern used when registering
|
|
85
|
+
let key;
|
|
86
|
+
if ((type === "fleet_schedule_response" || type === "fleet_outbound_response") && msg.fleetRequestId) {
|
|
87
|
+
key = String(msg.fleetRequestId);
|
|
88
|
+
}
|
|
89
|
+
else if (type === "fleet_outbound_response" && msg.requestId != null) {
|
|
90
|
+
key = `fleet_out_${msg.requestId}`;
|
|
91
|
+
}
|
|
92
|
+
if (key && this.pendingIpcRequests.has(key)) {
|
|
93
|
+
const handler = this.pendingIpcRequests.get(key);
|
|
94
|
+
this.pendingIpcRequests.delete(key);
|
|
95
|
+
handler(msg);
|
|
96
|
+
}
|
|
97
|
+
});
|
|
98
|
+
// IPC message relay: when daemon wants to push a channel message to Claude,
|
|
99
|
+
// it broadcasts to all IPC clients (the MCP server is one of them).
|
|
100
|
+
// When MCP server sends a tool_call, daemon handles it via the messageBus.
|
|
101
|
+
this.ipcServer.on("message", (msg, socket) => {
|
|
102
|
+
if (msg.type === "tool_call") {
|
|
103
|
+
// MCP server forwarding a Claude tool call (reply, react, edit, download)
|
|
104
|
+
this.handleToolCall(msg, socket);
|
|
105
|
+
}
|
|
106
|
+
else if (msg.type === "mcp_ready") {
|
|
107
|
+
const sessionName = msg.sessionName;
|
|
108
|
+
if (sessionName) {
|
|
109
|
+
this.socketSessionNames.set(socket, sessionName);
|
|
110
|
+
socket.on("close", () => {
|
|
111
|
+
this.socketSessionNames.delete(socket);
|
|
112
|
+
// Notify fleet manager so it can clean up sessionRegistry
|
|
113
|
+
if (sessionName !== this.name) {
|
|
114
|
+
this.ipcServer?.broadcast({ type: "session_disconnected", sessionName });
|
|
115
|
+
}
|
|
116
|
+
});
|
|
117
|
+
}
|
|
118
|
+
this.logger.debug({ sessionName }, "MCP channel server connected and ready");
|
|
119
|
+
// Notify FleetManager's IPC client that MCP is ready
|
|
120
|
+
this.ipcServer?.broadcast({ type: "mcp_ready", sessionName });
|
|
121
|
+
}
|
|
122
|
+
else if (msg.type === "query_sessions") {
|
|
123
|
+
// Fleet manager asks for all registered session names (catches sessions
|
|
124
|
+
// that sent mcp_ready before fleet manager connected).
|
|
125
|
+
const sessions = [];
|
|
126
|
+
for (const [s, sessionName] of this.socketSessionNames) {
|
|
127
|
+
if (!s.destroyed && sessionName !== this.name) {
|
|
128
|
+
// Individual mcp_ready for initial registration path
|
|
129
|
+
this.ipcServer?.send(socket, { type: "mcp_ready", sessionName });
|
|
130
|
+
sessions.push(sessionName);
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
// Batch response for prune path
|
|
134
|
+
this.ipcServer?.send(socket, { type: "query_sessions_response", sessions });
|
|
135
|
+
}
|
|
136
|
+
else if (msg.type === "fleet_inbound") {
|
|
137
|
+
// Fleet manager routed a message to us (topic mode)
|
|
138
|
+
const meta = msg.meta;
|
|
139
|
+
const targetSession = msg.targetSession;
|
|
140
|
+
// Only update lastChatId/lastThreadId from real Telegram messages (non-empty chat_id).
|
|
141
|
+
// Cross-instance messages have empty chat_id and must not overwrite these.
|
|
142
|
+
if (meta.chat_id)
|
|
143
|
+
this.lastChatId = meta.chat_id;
|
|
144
|
+
if (meta.chat_id && meta.thread_id)
|
|
145
|
+
this.lastThreadId = meta.thread_id;
|
|
146
|
+
this.pushChannelMessage(msg.content, meta, targetSession);
|
|
147
|
+
}
|
|
148
|
+
else if (msg.type === "fleet_schedule_trigger") {
|
|
149
|
+
const payload = msg.payload;
|
|
150
|
+
const meta = msg.meta;
|
|
151
|
+
this.lastChatId = meta.chat_id;
|
|
152
|
+
this.lastThreadId = meta.thread_id;
|
|
153
|
+
this.pushChannelMessage(payload.message, meta);
|
|
154
|
+
}
|
|
155
|
+
else if (msg.type === "fleet_tool_status_ack") {
|
|
156
|
+
// Fleet manager sent us the messageId for our tool status message
|
|
157
|
+
this.toolStatusMessageId = msg.messageId;
|
|
158
|
+
}
|
|
159
|
+
});
|
|
160
|
+
// 2. Tmux — ensure session, create window if not alive
|
|
161
|
+
const sessionName = "agend";
|
|
162
|
+
await TmuxManager.ensureSession(sessionName);
|
|
163
|
+
this.tmux = new TmuxManager(sessionName, "");
|
|
164
|
+
// Strategy A: always start fresh Claude window (MCP server has no reconnection)
|
|
165
|
+
// Kill any existing window from previous run
|
|
166
|
+
const windowIdFile = join(this.instanceDir, "window-id");
|
|
167
|
+
if (existsSync(windowIdFile)) {
|
|
168
|
+
const savedId = readFileSync(windowIdFile, "utf-8").trim();
|
|
169
|
+
if (savedId) {
|
|
170
|
+
const oldTmux = new TmuxManager(sessionName, savedId);
|
|
171
|
+
if (await oldTmux.isWindowAlive()) {
|
|
172
|
+
this.saveSessionId();
|
|
173
|
+
await oldTmux.killWindow();
|
|
174
|
+
this.logger.info({ savedId }, "Killed old tmux window for fresh start");
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
await this.spawnClaudeWindow();
|
|
179
|
+
if (!this.config.lightweight) {
|
|
180
|
+
// 3. Pipe-pane for prompt detection
|
|
181
|
+
const outputLog = join(this.instanceDir, "output.log");
|
|
182
|
+
await this.tmux.pipeOutput(outputLog).catch(() => { });
|
|
183
|
+
// 4. Transcript monitor
|
|
184
|
+
this.transcriptMonitor = new TranscriptMonitor(this.instanceDir, this.logger);
|
|
185
|
+
// 5. Wire transcript events
|
|
186
|
+
const ackIfPending = () => {
|
|
187
|
+
if (!this.pendingAckMessage || !this.adapter)
|
|
188
|
+
return;
|
|
189
|
+
const { chatId, messageId } = this.pendingAckMessage;
|
|
190
|
+
this.pendingAckMessage = null;
|
|
191
|
+
this.adapter.react(chatId, messageId, "🫡")
|
|
192
|
+
.catch(e => this.logger.debug({ err: e.message }, "Ack react failed"));
|
|
193
|
+
};
|
|
194
|
+
this.transcriptMonitor.on("tool_use", (name, input) => {
|
|
195
|
+
this.logger.debug({ tool: name }, "Tool use");
|
|
196
|
+
ackIfPending();
|
|
197
|
+
this.hangDetector?.recordActivity();
|
|
198
|
+
this.recordRecentEvent({ type: "tool_use", name, preview: this.summarizeTool(name, input) });
|
|
199
|
+
this.recordRecentToolActivity(this.summarizeTool(name, input));
|
|
200
|
+
});
|
|
201
|
+
this.transcriptMonitor.on("tool_result", (name, _output) => {
|
|
202
|
+
this.hangDetector?.recordActivity();
|
|
203
|
+
this.recordRecentEvent({ type: "tool_result", name });
|
|
204
|
+
});
|
|
205
|
+
this.transcriptMonitor.on("assistant_text", (text) => {
|
|
206
|
+
this.logger.debug({ text: text.slice(0, 200) }, "Claude response");
|
|
207
|
+
ackIfPending();
|
|
208
|
+
this.hangDetector?.recordActivity();
|
|
209
|
+
this.recordRecentEvent({ type: "assistant_text", preview: text.slice(0, 100) });
|
|
210
|
+
});
|
|
211
|
+
this.transcriptMonitor.startPolling();
|
|
212
|
+
// Hang detector
|
|
213
|
+
this.hangDetector = new HangDetector(15);
|
|
214
|
+
this.hangDetector.start();
|
|
215
|
+
// 8. Context guardian
|
|
216
|
+
const statusFile = join(this.instanceDir, "statusline.json");
|
|
217
|
+
this.guardian = new ContextGuardian(this.config.context_guardian, this.logger, statusFile);
|
|
218
|
+
this.guardian.startWatching();
|
|
219
|
+
this.guardian.startTimer();
|
|
220
|
+
this.guardian.on("status_update", () => {
|
|
221
|
+
this.saveSessionId();
|
|
222
|
+
this.hangDetector?.recordStatuslineUpdate();
|
|
223
|
+
});
|
|
224
|
+
// v3: daemon-driven restart — no handover prompt, no validation
|
|
225
|
+
this.guardian.on("restart_requested", async (reason) => {
|
|
226
|
+
this.rotationStartedAt = Date.now();
|
|
227
|
+
this.preRotationContextPct = this.readContextPercentage();
|
|
228
|
+
this.logger.info({ reason, context_pct: this.preRotationContextPct }, "Restart requested");
|
|
229
|
+
// Minimal idle barrier: let current step settle (best-effort, not a handover wait)
|
|
230
|
+
await this.waitForIdle(5000);
|
|
231
|
+
// Collect and write daemon-side snapshot
|
|
232
|
+
const snapshot = this.writeRotationSnapshot(reason);
|
|
233
|
+
// Save session id, kill and respawn
|
|
234
|
+
this.saveSessionId();
|
|
235
|
+
await this.tmux?.killWindow();
|
|
236
|
+
this.transcriptMonitor?.resetOffset();
|
|
237
|
+
// Clear ring buffers for new session
|
|
238
|
+
this.recentUserMessages = [];
|
|
239
|
+
this.recentEvents = [];
|
|
240
|
+
this.recentToolActivity = [];
|
|
241
|
+
await this.spawnClaudeWindow();
|
|
242
|
+
// Track restart metrics
|
|
243
|
+
const durationMs = Date.now() - this.rotationStartedAt;
|
|
244
|
+
this.emit("restart_complete", {
|
|
245
|
+
instance: this.name,
|
|
246
|
+
reason,
|
|
247
|
+
pre_restart_context_pct: this.preRotationContextPct,
|
|
248
|
+
restart_duration_ms: durationMs,
|
|
249
|
+
snapshot_user_message_count: snapshot.recent_user_messages?.length ?? 0,
|
|
250
|
+
snapshot_event_count: snapshot.recent_events?.length ?? 0,
|
|
251
|
+
});
|
|
252
|
+
this.guardian?.markRestartComplete();
|
|
253
|
+
this.logger.info({ reason, duration_ms: durationMs }, "Restart complete — fresh Claude session started");
|
|
254
|
+
});
|
|
255
|
+
}
|
|
256
|
+
// Set AGEND_SOCKET_PATH env for MCP server
|
|
257
|
+
process.env.AGEND_SOCKET_PATH = sockPath;
|
|
258
|
+
// 10. Health check — detect crashed tmux window and respawn
|
|
259
|
+
if (!this.config.lightweight) {
|
|
260
|
+
this.startHealthCheck();
|
|
261
|
+
}
|
|
262
|
+
this.logger.info(`${this.name} ready`);
|
|
263
|
+
}
|
|
264
|
+
startHealthCheck() {
|
|
265
|
+
const { max_retries, backoff, reset_after } = this.config.restart_policy;
|
|
266
|
+
if (max_retries <= 0)
|
|
267
|
+
return; // restart disabled
|
|
268
|
+
const scheduleNext = () => {
|
|
269
|
+
this.healthCheckTimer = setTimeout(async () => {
|
|
270
|
+
if (!this.tmux || this.guardian?.state === "RESTARTING" || this.spawning || this.healthCheckPaused) {
|
|
271
|
+
scheduleNext();
|
|
272
|
+
return;
|
|
273
|
+
}
|
|
274
|
+
const alive = await this.tmux.isWindowAlive();
|
|
275
|
+
if (alive) {
|
|
276
|
+
scheduleNext();
|
|
277
|
+
return;
|
|
278
|
+
}
|
|
279
|
+
// Detect rapid crash: window died within 60s of spawn
|
|
280
|
+
if (this.lastSpawnAt > 0 && Date.now() - this.lastSpawnAt < 60_000) {
|
|
281
|
+
this.rapidCrashCount++;
|
|
282
|
+
}
|
|
283
|
+
else {
|
|
284
|
+
this.rapidCrashCount = 0;
|
|
285
|
+
}
|
|
286
|
+
if (this.rapidCrashCount >= 1) {
|
|
287
|
+
this.healthCheckPaused = true;
|
|
288
|
+
this.logger.error({ rapidCrashCount: this.rapidCrashCount }, "Claude keeps crashing shortly after launch (possible rate limit) — pausing respawn");
|
|
289
|
+
this.emit("crash_loop", this.name);
|
|
290
|
+
return; // don't schedule next — paused
|
|
291
|
+
}
|
|
292
|
+
// Reset crash count if enough time has passed
|
|
293
|
+
if (reset_after > 0 && Date.now() - this.lastCrashAt > reset_after) {
|
|
294
|
+
this.crashCount = 0;
|
|
295
|
+
}
|
|
296
|
+
this.crashCount++;
|
|
297
|
+
this.lastCrashAt = Date.now();
|
|
298
|
+
if (this.crashCount > max_retries) {
|
|
299
|
+
this.logger.error({ crashCount: this.crashCount, maxRetries: max_retries }, "Max crash retries exceeded — not respawning");
|
|
300
|
+
return; // don't schedule next — given up
|
|
301
|
+
}
|
|
302
|
+
// Calculate backoff delay
|
|
303
|
+
const delay = backoff === "exponential"
|
|
304
|
+
? Math.min(1000 * Math.pow(2, this.crashCount - 1), 60_000)
|
|
305
|
+
: 1000 * this.crashCount;
|
|
306
|
+
this.logger.warn({ crashCount: this.crashCount, delay }, "Claude window died — respawning after backoff");
|
|
307
|
+
await new Promise(r => setTimeout(r, delay));
|
|
308
|
+
try {
|
|
309
|
+
this.saveSessionId();
|
|
310
|
+
this.transcriptMonitor?.resetOffset();
|
|
311
|
+
// Kill any same-name windows before respawn to prevent orphans
|
|
312
|
+
const windows = await TmuxManager.listWindows("agend");
|
|
313
|
+
for (const w of windows) {
|
|
314
|
+
if (w.name === this.name) {
|
|
315
|
+
const tm = new TmuxManager("agend", w.id);
|
|
316
|
+
await tm.killWindow();
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
await this.spawnClaudeWindow();
|
|
320
|
+
this.logger.info("Respawned Claude window after crash");
|
|
321
|
+
}
|
|
322
|
+
catch (err) {
|
|
323
|
+
this.logger.error({ err }, "Failed to respawn Claude window");
|
|
324
|
+
}
|
|
325
|
+
scheduleNext();
|
|
326
|
+
}, 30_000);
|
|
327
|
+
};
|
|
328
|
+
scheduleNext();
|
|
329
|
+
}
|
|
330
|
+
async stop() {
|
|
331
|
+
this.logger.info("Stopping daemon instance");
|
|
332
|
+
if (this.healthCheckTimer) {
|
|
333
|
+
clearTimeout(this.healthCheckTimer);
|
|
334
|
+
this.healthCheckTimer = null;
|
|
335
|
+
}
|
|
336
|
+
if (this.toolStatusDebounce) {
|
|
337
|
+
clearTimeout(this.toolStatusDebounce);
|
|
338
|
+
this.toolStatusDebounce = null;
|
|
339
|
+
}
|
|
340
|
+
this.pendingIpcRequests.clear();
|
|
341
|
+
this.hangDetector?.stop();
|
|
342
|
+
this.transcriptMonitor?.stop();
|
|
343
|
+
this.guardian?.stop();
|
|
344
|
+
if (this.adapter)
|
|
345
|
+
await this.adapter.stop();
|
|
346
|
+
await this.ipcServer?.close();
|
|
347
|
+
// Strategy A: kill window on stop, resume via --resume on next start
|
|
348
|
+
// MCP server has no reconnection → keeping window alive would leave
|
|
349
|
+
// Claude without channel/approval connectivity
|
|
350
|
+
if (this.tmux) {
|
|
351
|
+
this.saveSessionId();
|
|
352
|
+
await this.tmux.killWindow();
|
|
353
|
+
const windowIdFile = join(this.instanceDir, "window-id");
|
|
354
|
+
try {
|
|
355
|
+
unlinkSync(windowIdFile);
|
|
356
|
+
}
|
|
357
|
+
catch (e) {
|
|
358
|
+
this.logger.debug({ err: e }, "Failed to remove window-id file");
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
// Clean up backend config files
|
|
362
|
+
if (this.backend?.cleanup) {
|
|
363
|
+
this.backend.cleanup(this.buildBackendConfig());
|
|
364
|
+
}
|
|
365
|
+
const pidPath = join(this.instanceDir, "daemon.pid");
|
|
366
|
+
try {
|
|
367
|
+
unlinkSync(pidPath);
|
|
368
|
+
}
|
|
369
|
+
catch (e) {
|
|
370
|
+
this.logger.debug({ err: e }, "Failed to remove PID file");
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
getHangDetector() {
|
|
374
|
+
return this.hangDetector;
|
|
375
|
+
}
|
|
376
|
+
getMessageBus() {
|
|
377
|
+
return this.messageBus;
|
|
378
|
+
}
|
|
379
|
+
// ── Tool status tracking ──────────────────────────────────────
|
|
380
|
+
summarizeTool(name, input) {
|
|
381
|
+
const inp = input;
|
|
382
|
+
if (!inp)
|
|
383
|
+
return name;
|
|
384
|
+
if (name === "Read")
|
|
385
|
+
return `Read ${inp.file_path ?? ""}`;
|
|
386
|
+
if (name === "Edit")
|
|
387
|
+
return `Edit ${inp.file_path ?? ""}`;
|
|
388
|
+
if (name === "Write")
|
|
389
|
+
return `Write ${inp.file_path ?? ""}`;
|
|
390
|
+
if (name === "Bash")
|
|
391
|
+
return `$ ${String(inp.command ?? "").slice(0, 50)}`;
|
|
392
|
+
if (name === "Glob")
|
|
393
|
+
return `Glob ${inp.pattern ?? ""}`;
|
|
394
|
+
if (name === "Grep")
|
|
395
|
+
return `Grep ${inp.pattern ?? ""}`;
|
|
396
|
+
if (name === "Agent")
|
|
397
|
+
return "Agent (subagent)";
|
|
398
|
+
if (name.startsWith("mcp__agend__"))
|
|
399
|
+
return ""; // skip channel tools
|
|
400
|
+
return name;
|
|
401
|
+
}
|
|
402
|
+
addToolStatus(name, input, state) {
|
|
403
|
+
const summary = this.summarizeTool(name, input);
|
|
404
|
+
if (!summary)
|
|
405
|
+
return; // skip empty (e.g., channel tools)
|
|
406
|
+
if (state === "running") {
|
|
407
|
+
this.toolStatusLines.push(`⏳ ${summary}`);
|
|
408
|
+
}
|
|
409
|
+
else {
|
|
410
|
+
// Mark the last matching tool as done
|
|
411
|
+
for (let i = this.toolStatusLines.length - 1; i >= 0; i--) {
|
|
412
|
+
if (this.toolStatusLines[i].includes(name) && this.toolStatusLines[i].startsWith("⏳")) {
|
|
413
|
+
this.toolStatusLines[i] = this.toolStatusLines[i].replace("⏳", "✅");
|
|
414
|
+
break;
|
|
415
|
+
}
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
this.debouncedSendToolStatus();
|
|
419
|
+
}
|
|
420
|
+
/** Debounce tool status updates to avoid Telegram rate limits */
|
|
421
|
+
debouncedSendToolStatus() {
|
|
422
|
+
if (this.toolStatusDebounce)
|
|
423
|
+
clearTimeout(this.toolStatusDebounce);
|
|
424
|
+
this.toolStatusDebounce = setTimeout(() => this.sendToolStatus(), 500);
|
|
425
|
+
}
|
|
426
|
+
sendToolStatus() {
|
|
427
|
+
const text = this.toolStatusLines.join("\n");
|
|
428
|
+
if (!text)
|
|
429
|
+
return;
|
|
430
|
+
this.ipcServer?.broadcast({
|
|
431
|
+
type: "fleet_tool_status",
|
|
432
|
+
instanceName: this.name,
|
|
433
|
+
text,
|
|
434
|
+
editMessageId: this.toolStatusMessageId,
|
|
435
|
+
});
|
|
436
|
+
}
|
|
437
|
+
/** Called by fleet manager when tool status message is sent (returns messageId) */
|
|
438
|
+
setToolStatusMessageId(messageId) {
|
|
439
|
+
this.toolStatusMessageId = messageId;
|
|
440
|
+
}
|
|
441
|
+
/**
|
|
442
|
+
* Push an inbound channel message to a specific MCP session.
|
|
443
|
+
* If targetSession is provided, only send to the matching socket.
|
|
444
|
+
* Otherwise send to the instance's own session (this.name).
|
|
445
|
+
*/
|
|
446
|
+
pushChannelMessage(content, meta, _targetSession) {
|
|
447
|
+
if (!this.tmux) {
|
|
448
|
+
this.logger.warn("Cannot push channel message: tmux not running");
|
|
449
|
+
return;
|
|
450
|
+
}
|
|
451
|
+
this.hangDetector?.recordInbound();
|
|
452
|
+
// v3: record user messages for rotation snapshot
|
|
453
|
+
this.recordRecentUserMessage(content, meta);
|
|
454
|
+
// Format message with metadata prefix for the agent
|
|
455
|
+
const user = meta.user || "unknown";
|
|
456
|
+
const fromInstance = meta.from_instance;
|
|
457
|
+
let formatted;
|
|
458
|
+
if (fromInstance) {
|
|
459
|
+
// Cross-instance message
|
|
460
|
+
formatted = `[from:${fromInstance}] ${content}`;
|
|
461
|
+
}
|
|
462
|
+
else {
|
|
463
|
+
// User message from Telegram/Discord
|
|
464
|
+
formatted = `[user:${user}] ${content}`;
|
|
465
|
+
}
|
|
466
|
+
this.tmux.pasteText(formatted).catch(err => {
|
|
467
|
+
this.logger.error({ err }, "Failed to paste message to tmux");
|
|
468
|
+
});
|
|
469
|
+
this.logger.debug({ user: meta.user, text: content.slice(0, 100) }, "Pushed channel message via tmux");
|
|
470
|
+
}
|
|
471
|
+
/** Find the IPC socket for a given sessionName */
|
|
472
|
+
findSocketBySession(sessionName) {
|
|
473
|
+
for (const [socket, name] of this.socketSessionNames) {
|
|
474
|
+
if (name === sessionName && !socket.destroyed)
|
|
475
|
+
return socket;
|
|
476
|
+
}
|
|
477
|
+
return undefined;
|
|
478
|
+
}
|
|
479
|
+
/**
|
|
480
|
+
* Handle a tool call from the MCP server (forwarded by Claude).
|
|
481
|
+
* Routes to the channel adapter via MessageBus.
|
|
482
|
+
*/
|
|
483
|
+
handleToolCall(msg, socket) {
|
|
484
|
+
const tool = msg.tool;
|
|
485
|
+
const args = (msg.args ?? {});
|
|
486
|
+
const requestId = msg.requestId;
|
|
487
|
+
this.logger.debug({ tool, requestId }, "Tool call from MCP server");
|
|
488
|
+
// For now, log and respond. Full adapter routing will be wired in fleet manager.
|
|
489
|
+
const respond = (result, error) => {
|
|
490
|
+
this.ipcServer?.send(socket, { requestId, result, error });
|
|
491
|
+
};
|
|
492
|
+
// Schedule tools → route to fleet manager
|
|
493
|
+
const CROSS_INSTANCE_TOOLS = new Set(["send_to_instance", "list_instances", "start_instance", "create_instance", "delete_instance", "request_information", "delegate_task", "report_result", "describe_instance"]);
|
|
494
|
+
const SCHEDULE_TOOLS = new Set(["create_schedule", "list_schedules", "update_schedule", "delete_schedule"]);
|
|
495
|
+
if (SCHEDULE_TOOLS.has(tool)) {
|
|
496
|
+
const typeMap = {
|
|
497
|
+
create_schedule: "fleet_schedule_create",
|
|
498
|
+
list_schedules: "fleet_schedule_list",
|
|
499
|
+
update_schedule: "fleet_schedule_update",
|
|
500
|
+
delete_schedule: "fleet_schedule_delete",
|
|
501
|
+
};
|
|
502
|
+
// Use fleetRequestId (not requestId) to avoid MCP server resolving the
|
|
503
|
+
// pending tool call prematurely when it receives the broadcast.
|
|
504
|
+
const fleetReqId = `sched_${requestId}`;
|
|
505
|
+
this.ipcServer?.broadcast({
|
|
506
|
+
type: typeMap[tool],
|
|
507
|
+
payload: args,
|
|
508
|
+
meta: { chat_id: this.lastChatId, thread_id: this.lastThreadId, instance_name: this.name },
|
|
509
|
+
fleetRequestId: fleetReqId,
|
|
510
|
+
});
|
|
511
|
+
// Wait for fleet_schedule_response via pending request map
|
|
512
|
+
const timeout = setTimeout(() => {
|
|
513
|
+
this.pendingIpcRequests.delete(fleetReqId);
|
|
514
|
+
respond(null, "Schedule operation timed out after 30s");
|
|
515
|
+
}, 30_000);
|
|
516
|
+
this.pendingIpcRequests.set(fleetReqId, (respMsg) => {
|
|
517
|
+
clearTimeout(timeout);
|
|
518
|
+
respond(respMsg.result, respMsg.error);
|
|
519
|
+
});
|
|
520
|
+
return;
|
|
521
|
+
}
|
|
522
|
+
if (CROSS_INSTANCE_TOOLS.has(tool)) {
|
|
523
|
+
// Route to fleet manager via IPC (topic mode only)
|
|
524
|
+
if (this.topicMode && this.ipcServer) {
|
|
525
|
+
// Use fleetRequestId (not requestId) to avoid MCP server resolving the
|
|
526
|
+
// pending tool call prematurely when it receives the broadcast.
|
|
527
|
+
const fleetReqId = `xmsg_${requestId}`;
|
|
528
|
+
const senderSessionName = this.socketSessionNames.get(socket);
|
|
529
|
+
this.ipcServer.broadcast({
|
|
530
|
+
type: "fleet_outbound",
|
|
531
|
+
tool,
|
|
532
|
+
args,
|
|
533
|
+
fleetRequestId: fleetReqId,
|
|
534
|
+
senderSessionName,
|
|
535
|
+
});
|
|
536
|
+
const crossTimeoutMs = (tool === "start_instance" || tool === "create_instance") ? 60_000 : 30_000;
|
|
537
|
+
const timeout = setTimeout(() => {
|
|
538
|
+
this.pendingIpcRequests.delete(fleetReqId);
|
|
539
|
+
respond(null, `Cross-instance operation timed out after ${crossTimeoutMs / 1000}s`);
|
|
540
|
+
}, crossTimeoutMs);
|
|
541
|
+
this.pendingIpcRequests.set(fleetReqId, (respMsg) => {
|
|
542
|
+
clearTimeout(timeout);
|
|
543
|
+
respond(respMsg.result, respMsg.error);
|
|
544
|
+
});
|
|
545
|
+
}
|
|
546
|
+
else {
|
|
547
|
+
respond(null, "Cross-instance messaging requires topic mode");
|
|
548
|
+
}
|
|
549
|
+
return;
|
|
550
|
+
}
|
|
551
|
+
// Route to adapter via MessageBus
|
|
552
|
+
const adapters = this.messageBus.getAllAdapters();
|
|
553
|
+
if (adapters.length === 0) {
|
|
554
|
+
// Topic mode: forward to fleet manager via IPC (fleet manager connected as IPC client)
|
|
555
|
+
// The fleet manager's IPC client receives this and routes to shared adapter.
|
|
556
|
+
// Use fleetRequestId (not requestId) to avoid other MCP sessions on this daemon
|
|
557
|
+
// from prematurely resolving their pending requests when they receive the broadcast.
|
|
558
|
+
const fleetReqId = `tool_${requestId}`;
|
|
559
|
+
const outboundKey = fleetReqId;
|
|
560
|
+
this.ipcServer?.broadcast({ type: "fleet_outbound", tool, args, fleetRequestId: fleetReqId });
|
|
561
|
+
const timeout = setTimeout(() => {
|
|
562
|
+
this.pendingIpcRequests.delete(outboundKey);
|
|
563
|
+
respond(null, "Fleet outbound timed out after 30s");
|
|
564
|
+
}, 30_000);
|
|
565
|
+
this.pendingIpcRequests.set(outboundKey, (respMsg) => {
|
|
566
|
+
clearTimeout(timeout);
|
|
567
|
+
respond(respMsg.result, respMsg.error);
|
|
568
|
+
});
|
|
569
|
+
return;
|
|
570
|
+
}
|
|
571
|
+
const adapter = adapters[0];
|
|
572
|
+
if (!routeToolCall(adapter, tool, args, this.lastThreadId, respond)) {
|
|
573
|
+
respond(null, `Unknown tool: ${tool}`);
|
|
574
|
+
}
|
|
575
|
+
}
|
|
576
|
+
/** Build config object for the CLI backend */
|
|
577
|
+
buildBackendConfig() {
|
|
578
|
+
const sockPath = join(this.instanceDir, "channel.sock");
|
|
579
|
+
let serverJs = join(__dirname, "channel", "mcp-server.js");
|
|
580
|
+
if (!existsSync(serverJs)) {
|
|
581
|
+
serverJs = join(__dirname, "..", "dist", "channel", "mcp-server.js");
|
|
582
|
+
}
|
|
583
|
+
return {
|
|
584
|
+
workingDirectory: this.config.working_directory,
|
|
585
|
+
instanceDir: this.instanceDir,
|
|
586
|
+
instanceName: this.name,
|
|
587
|
+
mcpServers: {
|
|
588
|
+
"agend": {
|
|
589
|
+
command: "node",
|
|
590
|
+
args: [serverJs],
|
|
591
|
+
env: { AGEND_SOCKET_PATH: sockPath },
|
|
592
|
+
},
|
|
593
|
+
},
|
|
594
|
+
systemPrompt: this.buildSystemPrompt(),
|
|
595
|
+
skipPermissions: this.config.skipPermissions,
|
|
596
|
+
model: this.modelOverride ?? this.config.model,
|
|
597
|
+
};
|
|
598
|
+
}
|
|
599
|
+
/** Combine fleet context with user-configured system prompt + previous session snapshot */
|
|
600
|
+
buildSystemPrompt() {
|
|
601
|
+
const fleetContext = generateFleetSystemPrompt({
|
|
602
|
+
instanceName: this.name,
|
|
603
|
+
workingDirectory: this.config.working_directory,
|
|
604
|
+
});
|
|
605
|
+
let prompt = fleetContext;
|
|
606
|
+
if (this.config.systemPrompt) {
|
|
607
|
+
prompt += "\n\n" + this.config.systemPrompt;
|
|
608
|
+
}
|
|
609
|
+
// v3: inject previous session snapshot
|
|
610
|
+
const snapshotBlock = this.buildSnapshotPrompt();
|
|
611
|
+
if (snapshotBlock) {
|
|
612
|
+
prompt += "\n\n" + snapshotBlock;
|
|
613
|
+
}
|
|
614
|
+
return prompt;
|
|
615
|
+
}
|
|
616
|
+
/** Spawn (or respawn) a Claude window in tmux */
|
|
617
|
+
async spawnClaudeWindow() {
|
|
618
|
+
this.spawning = true;
|
|
619
|
+
try {
|
|
620
|
+
// Clear tool status from previous session
|
|
621
|
+
this.toolStatusLines = [];
|
|
622
|
+
this.toolStatusMessageId = null;
|
|
623
|
+
if (!this.backend) {
|
|
624
|
+
throw new Error("No backend configured — cannot spawn Claude window");
|
|
625
|
+
}
|
|
626
|
+
const backendConfig = this.buildBackendConfig();
|
|
627
|
+
this.backend.writeConfig(backendConfig);
|
|
628
|
+
// Inject AGEND_INSTANCE_NAME via shell env (not .mcp.json) so internal sessions
|
|
629
|
+
// are distinguishable from external sessions sharing the same .mcp.json
|
|
630
|
+
let claudeCmd = `AGEND_INSTANCE_NAME=${this.name} ` + this.backend.buildCommand(backendConfig);
|
|
631
|
+
const windowId = await this.tmux.createWindow(claudeCmd, this.config.working_directory, this.name);
|
|
632
|
+
const windowIdFile = join(this.instanceDir, "window-id");
|
|
633
|
+
writeFileSync(windowIdFile, windowId);
|
|
634
|
+
// Smart wait: poll tmux pane for prompt indicators, press Enter when found.
|
|
635
|
+
// Minimum 3s wait to let CLI initialize, then poll up to 10s.
|
|
636
|
+
await new Promise(r => setTimeout(r, 3000));
|
|
637
|
+
const deadline = Date.now() + 7_000;
|
|
638
|
+
let prompted = false;
|
|
639
|
+
while (Date.now() < deadline) {
|
|
640
|
+
await new Promise(r => setTimeout(r, 500));
|
|
641
|
+
try {
|
|
642
|
+
const pane = await this.tmux.capturePane();
|
|
643
|
+
// Confirmation prompts that need Enter
|
|
644
|
+
if (/Do you want|Yes.*No|Trust|trust|Enter to confirm|New MCP server/i.test(pane)) {
|
|
645
|
+
prompted = true;
|
|
646
|
+
break;
|
|
647
|
+
}
|
|
648
|
+
// CLI is ready (status bar visible = fully loaded)
|
|
649
|
+
if (/bypass permissions|tokens|ok\s*$/m.test(pane)) {
|
|
650
|
+
break; // ready, no Enter needed
|
|
651
|
+
}
|
|
652
|
+
}
|
|
653
|
+
catch {
|
|
654
|
+
break;
|
|
655
|
+
}
|
|
656
|
+
}
|
|
657
|
+
if (prompted) {
|
|
658
|
+
try {
|
|
659
|
+
await this.tmux.sendSpecialKey("Enter");
|
|
660
|
+
}
|
|
661
|
+
catch { /* window may have exited */ }
|
|
662
|
+
}
|
|
663
|
+
this.lastSpawnAt = Date.now();
|
|
664
|
+
}
|
|
665
|
+
finally {
|
|
666
|
+
this.spawning = false;
|
|
667
|
+
}
|
|
668
|
+
}
|
|
669
|
+
saveSessionId() {
|
|
670
|
+
const sid = this.backend?.getSessionId();
|
|
671
|
+
if (sid) {
|
|
672
|
+
writeFileSync(join(this.instanceDir, "session-id"), sid);
|
|
673
|
+
}
|
|
674
|
+
}
|
|
675
|
+
readContextPercentage() {
|
|
676
|
+
return this.backend?.getContextUsage() ?? 0;
|
|
677
|
+
}
|
|
678
|
+
/** Set a model override for next spawn (used by failover logic) */
|
|
679
|
+
setModelOverride(model) {
|
|
680
|
+
this.modelOverride = model;
|
|
681
|
+
}
|
|
682
|
+
/** Get the currently active model override */
|
|
683
|
+
getModelOverride() {
|
|
684
|
+
return this.modelOverride;
|
|
685
|
+
}
|
|
686
|
+
/** Public wrapper for graceful restart — wait for instance to be idle. */
|
|
687
|
+
waitForIdle(quietMs = 5000) {
|
|
688
|
+
return new Promise((resolve) => {
|
|
689
|
+
const events = ["tool_use", "tool_result", "assistant_text"];
|
|
690
|
+
let timer;
|
|
691
|
+
const done = () => {
|
|
692
|
+
events.forEach(e => this.transcriptMonitor?.removeListener(e, reset));
|
|
693
|
+
resolve();
|
|
694
|
+
};
|
|
695
|
+
const reset = () => {
|
|
696
|
+
clearTimeout(timer);
|
|
697
|
+
timer = setTimeout(done, quietMs);
|
|
698
|
+
};
|
|
699
|
+
timer = setTimeout(done, quietMs);
|
|
700
|
+
events.forEach(e => this.transcriptMonitor?.on(e, reset));
|
|
701
|
+
});
|
|
702
|
+
}
|
|
703
|
+
// ── Context Rotation v3: Ring buffers ─────────────────────────
|
|
704
|
+
recordRecentUserMessage(content, meta) {
|
|
705
|
+
// Only record real user messages, not cross-instance messages
|
|
706
|
+
if (!meta.user || meta.user.startsWith("instance:"))
|
|
707
|
+
return;
|
|
708
|
+
this.recentUserMessages.push({
|
|
709
|
+
text: content.slice(0, 200),
|
|
710
|
+
ts: meta.ts ?? new Date().toISOString(),
|
|
711
|
+
});
|
|
712
|
+
if (this.recentUserMessages.length > 10)
|
|
713
|
+
this.recentUserMessages.shift();
|
|
714
|
+
}
|
|
715
|
+
recordRecentEvent(event) {
|
|
716
|
+
this.recentEvents.push(event);
|
|
717
|
+
if (this.recentEvents.length > 15)
|
|
718
|
+
this.recentEvents.shift();
|
|
719
|
+
}
|
|
720
|
+
recordRecentToolActivity(summary) {
|
|
721
|
+
if (!summary)
|
|
722
|
+
return;
|
|
723
|
+
this.recentToolActivity.push(summary);
|
|
724
|
+
if (this.recentToolActivity.length > 10)
|
|
725
|
+
this.recentToolActivity.shift();
|
|
726
|
+
}
|
|
727
|
+
// ── Context Rotation v3: Snapshot writer ──────────────────────
|
|
728
|
+
writeRotationSnapshot(reason) {
|
|
729
|
+
const statusline = this.readStatuslineData();
|
|
730
|
+
const snapshot = {
|
|
731
|
+
instance: this.name,
|
|
732
|
+
reason,
|
|
733
|
+
created_at: new Date().toISOString(),
|
|
734
|
+
working_directory: this.config.working_directory,
|
|
735
|
+
session_id: this.backend?.getSessionId() ?? null,
|
|
736
|
+
context_pct: this.readContextPercentage(),
|
|
737
|
+
recent_user_messages: [...this.recentUserMessages],
|
|
738
|
+
recent_events: [...this.recentEvents],
|
|
739
|
+
recent_tool_activity: [...this.recentToolActivity],
|
|
740
|
+
last_statusline: statusline ? {
|
|
741
|
+
model: statusline.model?.display_name,
|
|
742
|
+
cost_usd: statusline.cost?.total_cost_usd,
|
|
743
|
+
five_hour_pct: statusline.rate_limits?.five_hour?.used_percentage,
|
|
744
|
+
seven_day_pct: statusline.rate_limits?.seven_day?.used_percentage,
|
|
745
|
+
} : undefined,
|
|
746
|
+
};
|
|
747
|
+
const snapshotPath = join(this.instanceDir, "rotation-state.json");
|
|
748
|
+
writeFileSync(snapshotPath, JSON.stringify(snapshot, null, 2));
|
|
749
|
+
this.logger.info({
|
|
750
|
+
reason,
|
|
751
|
+
context_pct: snapshot.context_pct,
|
|
752
|
+
user_msg_count: snapshot.recent_user_messages?.length ?? 0,
|
|
753
|
+
event_count: snapshot.recent_events?.length ?? 0,
|
|
754
|
+
}, "Snapshot written");
|
|
755
|
+
return snapshot;
|
|
756
|
+
}
|
|
757
|
+
readStatuslineData() {
|
|
758
|
+
try {
|
|
759
|
+
const sf = join(this.instanceDir, "statusline.json");
|
|
760
|
+
return JSON.parse(readFileSync(sf, "utf-8"));
|
|
761
|
+
}
|
|
762
|
+
catch {
|
|
763
|
+
return null;
|
|
764
|
+
}
|
|
765
|
+
}
|
|
766
|
+
// ── Context Rotation v3: Prompt injection ─────────────────────
|
|
767
|
+
buildSnapshotPrompt() {
|
|
768
|
+
const snapshotPath = join(this.instanceDir, "rotation-state.json");
|
|
769
|
+
try {
|
|
770
|
+
if (!existsSync(snapshotPath))
|
|
771
|
+
return null;
|
|
772
|
+
const snapshot = JSON.parse(readFileSync(snapshotPath, "utf-8"));
|
|
773
|
+
// Single-consume: delete after reading so it's not re-injected on
|
|
774
|
+
// crash respawn, manual restart, or future rotations.
|
|
775
|
+
try {
|
|
776
|
+
unlinkSync(snapshotPath);
|
|
777
|
+
}
|
|
778
|
+
catch { /* best-effort */ }
|
|
779
|
+
const lines = ["## Previous Session Snapshot", ""];
|
|
780
|
+
lines.push(`Restart reason: ${snapshot.reason}`);
|
|
781
|
+
if (snapshot.context_pct != null)
|
|
782
|
+
lines.push(`Previous context usage: ${snapshot.context_pct}%`);
|
|
783
|
+
if (snapshot.session_id)
|
|
784
|
+
lines.push(`Previous session id: ${snapshot.session_id}`);
|
|
785
|
+
lines.push(`Working directory: ${snapshot.working_directory}`);
|
|
786
|
+
lines.push("");
|
|
787
|
+
if (snapshot.recent_user_messages && snapshot.recent_user_messages.length > 0) {
|
|
788
|
+
lines.push("Recent user messages:");
|
|
789
|
+
for (const msg of snapshot.recent_user_messages) {
|
|
790
|
+
lines.push(`- ${msg.text}`);
|
|
791
|
+
}
|
|
792
|
+
lines.push("");
|
|
793
|
+
}
|
|
794
|
+
if (snapshot.recent_events && snapshot.recent_events.length > 0) {
|
|
795
|
+
lines.push("Recent activity:");
|
|
796
|
+
for (const ev of snapshot.recent_events) {
|
|
797
|
+
if (ev.type === "assistant_text") {
|
|
798
|
+
lines.push(`- Assistant: ${ev.preview}`);
|
|
799
|
+
}
|
|
800
|
+
else {
|
|
801
|
+
lines.push(`- ${ev.name}${ev.preview ? `: ${ev.preview}` : ""}`);
|
|
802
|
+
}
|
|
803
|
+
}
|
|
804
|
+
lines.push("");
|
|
805
|
+
}
|
|
806
|
+
lines.push("Instruction:");
|
|
807
|
+
lines.push("Resume work from this snapshot when relevant. Do not assume anything not stated here.");
|
|
808
|
+
// Enforce 2000-char budget
|
|
809
|
+
let result = lines.join("\n");
|
|
810
|
+
if (result.length > 2000) {
|
|
811
|
+
result = result.slice(0, 1997) + "...";
|
|
812
|
+
}
|
|
813
|
+
return result;
|
|
814
|
+
}
|
|
815
|
+
catch {
|
|
816
|
+
return null;
|
|
817
|
+
}
|
|
818
|
+
}
|
|
819
|
+
}
|
|
820
|
+
//# sourceMappingURL=daemon.js.map
|