@geravant/sinain 1.0.19 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +10 -1
- package/cli.js +176 -0
- package/install.js +11 -2
- package/launcher.js +622 -0
- package/openclaw.plugin.json +4 -0
- package/pack-prepare.js +48 -0
- package/package.json +24 -5
- package/sense_client/README.md +82 -0
- package/sense_client/__init__.py +1 -0
- package/sense_client/__main__.py +462 -0
- package/sense_client/app_detector.py +54 -0
- package/sense_client/app_detector_win.py +83 -0
- package/sense_client/capture.py +215 -0
- package/sense_client/capture_win.py +88 -0
- package/sense_client/change_detector.py +86 -0
- package/sense_client/config.py +64 -0
- package/sense_client/gate.py +145 -0
- package/sense_client/ocr.py +347 -0
- package/sense_client/privacy.py +65 -0
- package/sense_client/requirements.txt +13 -0
- package/sense_client/roi_extractor.py +84 -0
- package/sense_client/sender.py +173 -0
- package/sense_client/tests/__init__.py +0 -0
- package/sense_client/tests/test_stream1_optimizations.py +234 -0
- package/setup-overlay.js +82 -0
- package/sinain-agent/.env.example +17 -0
- package/sinain-agent/CLAUDE.md +80 -0
- package/sinain-agent/mcp-config.json +12 -0
- package/sinain-agent/run.sh +248 -0
- package/sinain-core/.env.example +93 -0
- package/sinain-core/package-lock.json +552 -0
- package/sinain-core/package.json +21 -0
- package/sinain-core/src/agent/analyzer.ts +366 -0
- package/sinain-core/src/agent/context-window.ts +172 -0
- package/sinain-core/src/agent/loop.ts +404 -0
- package/sinain-core/src/agent/situation-writer.ts +187 -0
- package/sinain-core/src/agent/traits.ts +520 -0
- package/sinain-core/src/audio/capture-spawner-macos.ts +44 -0
- package/sinain-core/src/audio/capture-spawner-win.ts +37 -0
- package/sinain-core/src/audio/capture-spawner.ts +14 -0
- package/sinain-core/src/audio/pipeline.ts +335 -0
- package/sinain-core/src/audio/transcription-local.ts +141 -0
- package/sinain-core/src/audio/transcription.ts +278 -0
- package/sinain-core/src/buffers/feed-buffer.ts +71 -0
- package/sinain-core/src/buffers/sense-buffer.ts +425 -0
- package/sinain-core/src/config.ts +245 -0
- package/sinain-core/src/escalation/escalation-slot.ts +136 -0
- package/sinain-core/src/escalation/escalator.ts +812 -0
- package/sinain-core/src/escalation/message-builder.ts +323 -0
- package/sinain-core/src/escalation/openclaw-ws.ts +726 -0
- package/sinain-core/src/escalation/scorer.ts +166 -0
- package/sinain-core/src/index.ts +507 -0
- package/sinain-core/src/learning/feedback-store.ts +253 -0
- package/sinain-core/src/learning/signal-collector.ts +218 -0
- package/sinain-core/src/log.ts +24 -0
- package/sinain-core/src/overlay/commands.ts +126 -0
- package/sinain-core/src/overlay/ws-handler.ts +267 -0
- package/sinain-core/src/privacy/index.ts +18 -0
- package/sinain-core/src/privacy/presets.ts +40 -0
- package/sinain-core/src/privacy/redact.ts +92 -0
- package/sinain-core/src/profiler.ts +181 -0
- package/sinain-core/src/recorder.ts +186 -0
- package/sinain-core/src/server.ts +417 -0
- package/sinain-core/src/trace/trace-store.ts +73 -0
- package/sinain-core/src/trace/tracer.ts +94 -0
- package/sinain-core/src/types.ts +427 -0
- package/sinain-core/src/util/dedup.ts +48 -0
- package/sinain-core/src/util/task-store.ts +84 -0
- package/sinain-core/tsconfig.json +18 -0
- package/sinain-knowledge/data/git-store.ts +2 -0
- package/sinain-mcp-server/index.ts +337 -0
- package/sinain-mcp-server/package.json +19 -0
- package/sinain-mcp-server/tsconfig.json +15 -0
|
@@ -0,0 +1,812 @@
|
|
|
1
|
+
import type { AgentEntry, ContextWindow, EscalationConfig, OpenClawConfig, FeedItem, SpawnTaskMessage, SpawnTaskStatus } from "../types.js";
|
|
2
|
+
import type { FeedBuffer } from "../buffers/feed-buffer.js";
|
|
3
|
+
import type { WsHandler } from "../overlay/ws-handler.js";
|
|
4
|
+
import type { Profiler } from "../profiler.js";
|
|
5
|
+
import type { FeedbackStore } from "../learning/feedback-store.js";
|
|
6
|
+
import type { SignalCollector } from "../learning/signal-collector.js";
|
|
7
|
+
import { randomUUID, createHash } from "node:crypto";
|
|
8
|
+
import { OpenClawWsClient } from "./openclaw-ws.js";
|
|
9
|
+
import { EscalationSlot } from "./escalation-slot.js";
|
|
10
|
+
import type { SlotEntry, QueueFeedbackCtx } from "./escalation-slot.js";
|
|
11
|
+
import { shouldEscalate, calculateEscalationScore } from "./scorer.js";
|
|
12
|
+
import { isCodingContext, buildEscalationMessage } from "./message-builder.js";
|
|
13
|
+
import { loadPendingTasks, savePendingTasks, type PendingTaskEntry } from "../util/task-store.js";
|
|
14
|
+
import { log, warn, error } from "../log.js";
|
|
15
|
+
|
|
16
|
+
export interface HttpPendingEscalation {
|
|
17
|
+
id: string;
|
|
18
|
+
message: string;
|
|
19
|
+
score: number;
|
|
20
|
+
codingContext: boolean;
|
|
21
|
+
ts: number;
|
|
22
|
+
feedbackCtx: QueueFeedbackCtx | undefined;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
const TAG = "escalation";
|
|
26
|
+
|
|
27
|
+
export interface EscalatorDeps {
|
|
28
|
+
feedBuffer: FeedBuffer;
|
|
29
|
+
wsHandler: WsHandler;
|
|
30
|
+
escalationConfig: EscalationConfig;
|
|
31
|
+
openclawConfig: OpenClawConfig;
|
|
32
|
+
profiler?: Profiler;
|
|
33
|
+
feedbackStore?: FeedbackStore;
|
|
34
|
+
signalCollector?: SignalCollector;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Orchestrates escalation decisions and message delivery.
|
|
39
|
+
* Combines scorer (should we escalate?) + message builder (what to send) +
|
|
40
|
+
* OpenClaw WS delivery (how to send) into a single coordinator.
|
|
41
|
+
*
|
|
42
|
+
* Delivery uses a two-phase protocol:
|
|
43
|
+
* Phase 1 (10s): await "accepted" frame → delivery confirmed, worker unblocks
|
|
44
|
+
* Phase 2 (120s): await final frame → response arrives async, never trips circuit
|
|
45
|
+
*
|
|
46
|
+
* OutboundQueue persists messages to disk for crash recovery and
|
|
47
|
+
* provides content-hash idempotency keys for gateway-level dedup.
|
|
48
|
+
*/
|
|
49
|
+
export class Escalator {
|
|
50
|
+
private wsClient: OpenClawWsClient;
|
|
51
|
+
private slot: EscalationSlot;
|
|
52
|
+
private httpPending: HttpPendingEscalation | null = null;
|
|
53
|
+
|
|
54
|
+
private lastEscalationTs = Date.now();
|
|
55
|
+
private lastEscalatedDigest = "";
|
|
56
|
+
|
|
57
|
+
// Spawn deduplication state
|
|
58
|
+
private lastSpawnFingerprint = "";
|
|
59
|
+
private lastSpawnTs = 0;
|
|
60
|
+
private static readonly SPAWN_COOLDOWN_MS = 60_000; // 60 seconds between duplicate spawns
|
|
61
|
+
|
|
62
|
+
// Prevent concurrent spawn RPCs (sibling spawns only — never blocks regular escalations)
|
|
63
|
+
private spawnInFlight = false;
|
|
64
|
+
|
|
65
|
+
// Track pending spawn tasks for result fetching (persisted to disk)
|
|
66
|
+
private pendingSpawnTasks: Map<string, PendingTaskEntry>;
|
|
67
|
+
|
|
68
|
+
// Cap concurrent polling loops to limit RPC load
|
|
69
|
+
private static readonly MAX_CONCURRENT_POLLS = 5;
|
|
70
|
+
private activePolls = 0;
|
|
71
|
+
private pollQueue: string[] = [];
|
|
72
|
+
|
|
73
|
+
// Store context from last escalation for response handling
|
|
74
|
+
private lastEscalationContext: ContextWindow | null = null;
|
|
75
|
+
|
|
76
|
+
private stats = {
|
|
77
|
+
totalEscalations: 0,
|
|
78
|
+
totalResponses: 0,
|
|
79
|
+
totalErrors: 0,
|
|
80
|
+
totalNoReply: 0,
|
|
81
|
+
lastEscalationTs: 0,
|
|
82
|
+
lastResponseTs: 0,
|
|
83
|
+
// Health metrics
|
|
84
|
+
totalTimeouts: 0,
|
|
85
|
+
totalDirectResponses: 0,
|
|
86
|
+
totalSpawnResponses: 0,
|
|
87
|
+
avgResponseMs: 0,
|
|
88
|
+
consecutiveTimeouts: 0,
|
|
89
|
+
lastTimeoutTs: 0,
|
|
90
|
+
};
|
|
91
|
+
|
|
92
|
+
private outboundBytes = 0;
|
|
93
|
+
|
|
94
|
+
constructor(private deps: EscalatorDeps) {
|
|
95
|
+
this.wsClient = new OpenClawWsClient(deps.openclawConfig);
|
|
96
|
+
this.slot = new EscalationSlot(this.wsClient, deps.openclawConfig, {
|
|
97
|
+
onResponse: (result, entry, latencyMs) => this.handleEscalationResponse(result, entry, latencyMs),
|
|
98
|
+
onPhase1Failure: (isTimeout) => {
|
|
99
|
+
if (isTimeout) {
|
|
100
|
+
this.stats.totalTimeouts++;
|
|
101
|
+
this.stats.consecutiveTimeouts++;
|
|
102
|
+
this.stats.lastTimeoutTs = Date.now();
|
|
103
|
+
this.deps.profiler?.gauge("escalation.totalTimeouts", this.stats.totalTimeouts);
|
|
104
|
+
if (this.stats.consecutiveTimeouts >= 3) {
|
|
105
|
+
warn(TAG, `⚠ ${this.stats.consecutiveTimeouts} consecutive Phase 1 timeouts`);
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
},
|
|
109
|
+
onOutboundBytes: (n) => {
|
|
110
|
+
this.outboundBytes += n;
|
|
111
|
+
this.deps.profiler?.gauge("network.escalationOutBytes", this.outboundBytes);
|
|
112
|
+
},
|
|
113
|
+
});
|
|
114
|
+
// Load pending tasks from disk (crash recovery)
|
|
115
|
+
this.pendingSpawnTasks = loadPendingTasks();
|
|
116
|
+
// Attempt delivery on every WS reconnect
|
|
117
|
+
this.wsClient.on("connected", () => this.slot.onConnected());
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
/** Late-bind the signal collector (created after AgentLoop). */
|
|
121
|
+
setSignalCollector(sc: SignalCollector): void {
|
|
122
|
+
this.deps.signalCollector = sc;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
/** Start the WS connection to OpenClaw (skipped when transport=http). */
|
|
126
|
+
start(): void {
|
|
127
|
+
if (this.deps.escalationConfig.mode !== "off" && this.deps.escalationConfig.transport !== "http") {
|
|
128
|
+
this.wsClient.connect();
|
|
129
|
+
const tokenHash = this.deps.openclawConfig.gatewayToken
|
|
130
|
+
? createHash("sha256").update(this.deps.openclawConfig.gatewayToken).digest("hex").slice(0, 12)
|
|
131
|
+
: "none";
|
|
132
|
+
log(TAG, `mode: ${this.deps.escalationConfig.mode}, tokenHash: ${tokenHash}, wsUrl: ${this.deps.openclawConfig.gatewayWsUrl}`);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
/** Stop and disconnect. */
|
|
137
|
+
stop(): void {
|
|
138
|
+
this.wsClient.disconnect();
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
/** Update escalation mode at runtime. */
|
|
142
|
+
setMode(mode: EscalatorDeps["escalationConfig"]["mode"]): void {
|
|
143
|
+
const wasOff = this.deps.escalationConfig.mode === "off";
|
|
144
|
+
this.deps.escalationConfig.mode = mode;
|
|
145
|
+
if (mode !== "off" && !this.wsClient.isConnected) {
|
|
146
|
+
this.wsClient.resetConnection();
|
|
147
|
+
}
|
|
148
|
+
if (mode === "off") {
|
|
149
|
+
this.wsClient.disconnect();
|
|
150
|
+
}
|
|
151
|
+
// Reset stale timer when transitioning from "off" to active (prevents immediate stale)
|
|
152
|
+
if (wasOff && mode !== "off") {
|
|
153
|
+
this.lastEscalationTs = Date.now();
|
|
154
|
+
}
|
|
155
|
+
log(TAG, `mode changed to: ${mode}`);
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
/**
|
|
159
|
+
* Called after every agent analysis tick.
|
|
160
|
+
* Decides whether to escalate and enqueues the message for delivery.
|
|
161
|
+
*/
|
|
162
|
+
onAgentAnalysis(entry: AgentEntry, contextWindow: ContextWindow): void {
|
|
163
|
+
// Skip WS escalations when circuit is open (HTTP transport bypasses this)
|
|
164
|
+
const transport = this.deps.escalationConfig.transport;
|
|
165
|
+
if (this.wsClient.isCircuitOpen && transport !== "http") {
|
|
166
|
+
log(TAG, `tick #${entry.id}: skipped — circuit breaker open`);
|
|
167
|
+
return;
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
const { escalate, score, stale } = shouldEscalate(
|
|
171
|
+
entry.digest,
|
|
172
|
+
entry.hud,
|
|
173
|
+
contextWindow,
|
|
174
|
+
this.deps.escalationConfig.mode,
|
|
175
|
+
this.lastEscalationTs,
|
|
176
|
+
this.deps.escalationConfig.cooldownMs,
|
|
177
|
+
this.lastEscalatedDigest,
|
|
178
|
+
this.deps.escalationConfig.staleMs,
|
|
179
|
+
);
|
|
180
|
+
|
|
181
|
+
if (!escalate) {
|
|
182
|
+
log(TAG, `tick #${entry.id}: not escalating (mode=${this.deps.escalationConfig.mode}, score=${score.total}, hud="${entry.hud.slice(0, 40)}")`);
|
|
183
|
+
return;
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
// Mark cooldown immediately
|
|
187
|
+
this.stats.totalEscalations++;
|
|
188
|
+
this.deps.profiler?.gauge("escalation.totalEscalations", this.stats.totalEscalations);
|
|
189
|
+
this.lastEscalationTs = Date.now();
|
|
190
|
+
this.stats.lastEscalationTs = Date.now();
|
|
191
|
+
this.lastEscalatedDigest = entry.digest;
|
|
192
|
+
|
|
193
|
+
const staleTag = stale ? ", STALE" : "";
|
|
194
|
+
const wsState = this.wsClient.isConnected ? "ws=connected" : "ws=disconnected";
|
|
195
|
+
log(TAG, `escalating tick #${entry.id} (score=${score.total}, reasons=[${score.reasons.join(",")}]${staleTag}, ${wsState})`);
|
|
196
|
+
|
|
197
|
+
// Store context for response handling (used in pushResponse for coding-context max-length)
|
|
198
|
+
this.lastEscalationContext = contextWindow;
|
|
199
|
+
|
|
200
|
+
const escalationReason = score.reasons.join(", ");
|
|
201
|
+
const message = buildEscalationMessage(
|
|
202
|
+
entry.digest,
|
|
203
|
+
contextWindow,
|
|
204
|
+
entry,
|
|
205
|
+
this.deps.escalationConfig.mode,
|
|
206
|
+
escalationReason,
|
|
207
|
+
);
|
|
208
|
+
|
|
209
|
+
const slotId = createHash("sha256").update(this.deps.openclawConfig.sessionKey + entry.ts).digest("hex").slice(0, 16);
|
|
210
|
+
const slotEntry: SlotEntry = {
|
|
211
|
+
id: slotId,
|
|
212
|
+
message,
|
|
213
|
+
sessionKey: this.deps.openclawConfig.sessionKey,
|
|
214
|
+
feedbackCtx: {
|
|
215
|
+
tickId: entry.id,
|
|
216
|
+
hud: entry.hud,
|
|
217
|
+
currentApp: contextWindow.currentApp,
|
|
218
|
+
escalationScore: score.total,
|
|
219
|
+
escalationReasons: score.reasons,
|
|
220
|
+
codingContext: isCodingContext(contextWindow).coding,
|
|
221
|
+
digest: entry.digest,
|
|
222
|
+
},
|
|
223
|
+
ts: entry.ts,
|
|
224
|
+
};
|
|
225
|
+
|
|
226
|
+
const useHttp = transport === "http" || (transport === "auto" && !this.wsClient.isConnected);
|
|
227
|
+
|
|
228
|
+
if (useHttp) {
|
|
229
|
+
// Store in HTTP pending slot (newest wins, like EscalationSlot)
|
|
230
|
+
this.httpPending = {
|
|
231
|
+
id: slotId,
|
|
232
|
+
message,
|
|
233
|
+
score: score.total,
|
|
234
|
+
codingContext: isCodingContext(contextWindow).coding,
|
|
235
|
+
ts: entry.ts,
|
|
236
|
+
feedbackCtx: slotEntry.feedbackCtx,
|
|
237
|
+
};
|
|
238
|
+
log(TAG, `tick #${entry.id} → httpPending id=${slotId} (transport=${transport})`);
|
|
239
|
+
} else {
|
|
240
|
+
log(TAG, `tick #${entry.id} → slot.insert id=${slotId} depth=${this.slot.depth}`);
|
|
241
|
+
this.slot.insert(slotEntry);
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
/** Push fresh SITUATION.md content to the gateway server (fire-and-forget). */
|
|
246
|
+
pushSituationMd(content: string): void {
|
|
247
|
+
if (!this.wsClient.isConnected) return;
|
|
248
|
+
this.wsClient.sendRpc("situation.update", { content }, 10_000)
|
|
249
|
+
.catch((err: any) => warn(TAG, `situation.update rpc failed: ${err.message}`));
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
/** Send a direct user message to OpenClaw. */
|
|
253
|
+
async sendDirect(text: string): Promise<void> {
|
|
254
|
+
const idemKey = `direct-${Date.now()}`;
|
|
255
|
+
if (this.wsClient.isConnected) {
|
|
256
|
+
try {
|
|
257
|
+
await this.wsClient.sendAgentRpc(text, idemKey, this.deps.openclawConfig.sessionKey);
|
|
258
|
+
return;
|
|
259
|
+
} catch (err: any) {
|
|
260
|
+
warn(TAG, `sendDirect RPC failed: ${err.message}`);
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
// WS disconnected or RPC failed — surface error to HUD
|
|
264
|
+
const errMsg = `[⚠] Gateway disconnected — message not sent`;
|
|
265
|
+
this.deps.feedBuffer.push(errMsg, "normal", "openclaw", "stream");
|
|
266
|
+
this.deps.wsHandler.broadcast(errMsg, "normal", "stream");
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
/**
|
|
270
|
+
* Send a periodic feedback summary to the OpenClaw agent.
|
|
271
|
+
* Called on a timer from index.ts when learning is enabled.
|
|
272
|
+
* Returns true if the summary was sent successfully.
|
|
273
|
+
*/
|
|
274
|
+
async sendFeedbackSummary(): Promise<boolean> {
|
|
275
|
+
if (!this.deps.feedbackStore) return false;
|
|
276
|
+
if (!this.wsClient.isConnected) return false;
|
|
277
|
+
|
|
278
|
+
const stats = this.deps.feedbackStore.getStats();
|
|
279
|
+
const totalRecords = stats.totalRecords as number;
|
|
280
|
+
if (totalRecords < 3) return false;
|
|
281
|
+
|
|
282
|
+
const recent = this.deps.feedbackStore.queryRecent(5);
|
|
283
|
+
const withSignals = recent.filter(r => r.signals.compositeScore !== 0 || r.signals.errorCleared !== null);
|
|
284
|
+
if (withSignals.length === 0) return false;
|
|
285
|
+
|
|
286
|
+
// Format compact summary
|
|
287
|
+
const topTags = (stats.topTags as [string, number][] || [])
|
|
288
|
+
.slice(0, 5)
|
|
289
|
+
.map(([tag, count]) => `${tag} (${count})`)
|
|
290
|
+
.join(", ");
|
|
291
|
+
|
|
292
|
+
const recentLines = withSignals.slice(0, 5).map(r => {
|
|
293
|
+
const ok = r.signals.compositeScore >= 0.2;
|
|
294
|
+
const icon = ok ? "✓" : "✗";
|
|
295
|
+
const score = r.signals.compositeScore.toFixed(2);
|
|
296
|
+
const tags = r.tags.slice(0, 3).join(", ");
|
|
297
|
+
const details: string[] = [];
|
|
298
|
+
if (r.signals.errorCleared === true) details.push("error cleared");
|
|
299
|
+
if (r.signals.errorCleared === false) details.push("error persisted");
|
|
300
|
+
if (r.signals.noReEscalation === true) details.push("no re-escalation");
|
|
301
|
+
if (r.signals.noReEscalation === false) details.push("re-escalated");
|
|
302
|
+
if (r.signals.quickAppSwitch === true) details.push("quick switch");
|
|
303
|
+
return ` ${icon} ${score} [${tags}]${details.length > 0 ? " — " + details.join(", ") : ""}`;
|
|
304
|
+
});
|
|
305
|
+
|
|
306
|
+
const message = `[sinain-core:feedback-summary]
|
|
307
|
+
|
|
308
|
+
Escalations: ${totalRecords} | Avg score: ${stats.avgCompositeScore ?? "n/a"} | Avg latency: ${stats.avgLatencyMs ?? "n/a"}ms
|
|
309
|
+
Top tags: ${topTags || "none"}
|
|
310
|
+
|
|
311
|
+
Recent (last ${withSignals.length}):
|
|
312
|
+
${recentLines.join("\n")}`;
|
|
313
|
+
|
|
314
|
+
const idemKey = `feedback-summary-${Date.now()}`;
|
|
315
|
+
try {
|
|
316
|
+
await this.wsClient.sendAgentRpc(message, idemKey, this.deps.openclawConfig.sessionKey);
|
|
317
|
+
log(TAG, `feedback summary sent (${totalRecords} records, ${withSignals.length} with signals)`);
|
|
318
|
+
return true;
|
|
319
|
+
} catch (err: any) {
|
|
320
|
+
warn(TAG, `feedback summary send failed: ${err.message}`);
|
|
321
|
+
return false;
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
/** Return the current HTTP pending escalation (or null). */
|
|
326
|
+
getPendingHttp(): HttpPendingEscalation | null {
|
|
327
|
+
return this.httpPending;
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
/** Respond to an HTTP pending escalation. */
|
|
331
|
+
respondHttp(id: string, response: string): { ok: boolean; error?: string } {
|
|
332
|
+
if (!this.httpPending) {
|
|
333
|
+
return { ok: false, error: "no pending escalation" };
|
|
334
|
+
}
|
|
335
|
+
if (this.httpPending.id !== id) {
|
|
336
|
+
return { ok: false, error: `id mismatch: expected ${this.httpPending.id}` };
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
this.pushResponse(response, this.lastEscalationContext);
|
|
340
|
+
|
|
341
|
+
// Record feedback (async, non-blocking)
|
|
342
|
+
if (this.httpPending.feedbackCtx) {
|
|
343
|
+
const { digest, ...ctx } = this.httpPending.feedbackCtx;
|
|
344
|
+
this.recordFeedback(ctx, digest, this.httpPending.message, response, Date.now() - this.httpPending.ts);
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
log(TAG, `httpPending id=${id} responded (${response.length} chars)`);
|
|
348
|
+
this.httpPending = null;
|
|
349
|
+
return { ok: true };
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
/** Whether the gateway WS client is currently connected. */
|
|
353
|
+
get isGatewayConnected(): boolean {
|
|
354
|
+
return this.wsClient.isConnected;
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
/** Force-reconnect the gateway WS client. */
|
|
358
|
+
reconnectGateway(): void {
|
|
359
|
+
this.wsClient.resetConnection();
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
/** Get stats for /health. */
|
|
363
|
+
getStats(): Record<string, unknown> {
|
|
364
|
+
return {
|
|
365
|
+
mode: this.deps.escalationConfig.mode,
|
|
366
|
+
transport: this.deps.escalationConfig.transport,
|
|
367
|
+
gatewayConnected: this.wsClient.isConnected,
|
|
368
|
+
circuitOpen: this.wsClient.isCircuitOpen,
|
|
369
|
+
slotDepth: this.slot.depth,
|
|
370
|
+
slotInFlight: this.slot.inFlightId,
|
|
371
|
+
httpPendingId: this.httpPending?.id ?? null,
|
|
372
|
+
spawnInFlight: this.spawnInFlight,
|
|
373
|
+
cooldownMs: this.deps.escalationConfig.cooldownMs,
|
|
374
|
+
staleMs: this.deps.escalationConfig.staleMs,
|
|
375
|
+
pendingSpawnTasks: this.pendingSpawnTasks.size,
|
|
376
|
+
...this.stats,
|
|
377
|
+
};
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
/**
|
|
381
|
+
* Dispatch a task to a spawned subagent via direct child session addressing.
|
|
382
|
+
* Creates a unique child session key and sends the task directly to the gateway
|
|
383
|
+
* agent RPC — bypassing the main session to avoid dedup/NO_REPLY issues.
|
|
384
|
+
*/
|
|
385
|
+
async dispatchSpawnTask(task: string, label?: string): Promise<void> {
|
|
386
|
+
// Prevent sibling spawn RPCs from piling up (independent from escalation queue)
|
|
387
|
+
if (this.spawnInFlight) {
|
|
388
|
+
log(TAG, `spawn-task skipped — spawn RPC already in-flight`);
|
|
389
|
+
return;
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
// --- Fingerprint dedup — hash the task content ---
|
|
393
|
+
const fingerprint = createHash("sha256").update(task.trim()).digest("hex").slice(0, 16);
|
|
394
|
+
const now = Date.now();
|
|
395
|
+
|
|
396
|
+
if (fingerprint === this.lastSpawnFingerprint &&
|
|
397
|
+
now - this.lastSpawnTs < Escalator.SPAWN_COOLDOWN_MS) {
|
|
398
|
+
log(TAG, `spawn-task skipped (duplicate fingerprint ${fingerprint})`);
|
|
399
|
+
return;
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
this.lastSpawnFingerprint = fingerprint;
|
|
403
|
+
this.lastSpawnTs = now;
|
|
404
|
+
|
|
405
|
+
const taskId = `spawn-${Date.now()}`;
|
|
406
|
+
const startedAt = Date.now();
|
|
407
|
+
const labelStr = label ? ` (label: "${label}")` : "";
|
|
408
|
+
const idemKey = `spawn-task-${Date.now()}`;
|
|
409
|
+
|
|
410
|
+
// Generate a unique child session key — bypasses the main agent entirely
|
|
411
|
+
const childSessionKey = `agent:main:subagent:${randomUUID()}`;
|
|
412
|
+
const mainSessionKey = this.deps.openclawConfig.sessionKey;
|
|
413
|
+
|
|
414
|
+
this.outboundBytes += Buffer.byteLength(task);
|
|
415
|
+
this.deps.profiler?.gauge("network.escalationOutBytes", this.outboundBytes);
|
|
416
|
+
log(TAG, `dispatching spawn-task${labelStr} → child=${childSessionKey}: "${task.slice(0, 80)}..."`);
|
|
417
|
+
|
|
418
|
+
// ★ Broadcast "spawned" BEFORE the RPC — TSK tab shows ··· immediately
|
|
419
|
+
this.broadcastTaskEvent(taskId, "spawned", label, startedAt);
|
|
420
|
+
|
|
421
|
+
if (!this.wsClient.isConnected) {
|
|
422
|
+
warn(TAG, `spawn-task ${taskId}: WS disconnected — cannot dispatch`);
|
|
423
|
+
this.broadcastTaskEvent(taskId, "failed", label, startedAt);
|
|
424
|
+
return;
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
// ★ Set spawnInFlight BEFORE first await — cleared in finally regardless of outcome.
|
|
428
|
+
// Dedicated lane flag: never touches the escalation queue so regular escalations
|
|
429
|
+
// continue unblocked while this spawn RPC is pending.
|
|
430
|
+
this.spawnInFlight = true;
|
|
431
|
+
try {
|
|
432
|
+
// Send directly to a new child session via the gateway agent RPC
|
|
433
|
+
const result = await this.wsClient.sendRpc("agent", {
|
|
434
|
+
message: task,
|
|
435
|
+
sessionKey: childSessionKey,
|
|
436
|
+
lane: "subagent",
|
|
437
|
+
extraSystemPrompt: this.buildChildSystemPrompt(task, label),
|
|
438
|
+
deliver: false,
|
|
439
|
+
spawnedBy: mainSessionKey,
|
|
440
|
+
idempotencyKey: idemKey,
|
|
441
|
+
label: label || undefined,
|
|
442
|
+
}, 45_000, { expectFinal: true });
|
|
443
|
+
|
|
444
|
+
log(TAG, `spawn-task RPC response: ${JSON.stringify(result).slice(0, 500)}`);
|
|
445
|
+
this.stats.totalSpawnResponses++;
|
|
446
|
+
|
|
447
|
+
// Extract result — child agent actually ran the task and returned content
|
|
448
|
+
const payloads = result?.payload?.result?.payloads;
|
|
449
|
+
const runId = result?.payload?.runId || taskId;
|
|
450
|
+
|
|
451
|
+
if (Array.isArray(payloads) && payloads.length > 0) {
|
|
452
|
+
const output = payloads.map((pl: any) => pl.text || "").join("\n").trim();
|
|
453
|
+
if (output) {
|
|
454
|
+
this.pushResponse(`${label || "Background task"}:\n${output}`);
|
|
455
|
+
this.broadcastTaskEvent(taskId, "completed", label, startedAt, output);
|
|
456
|
+
} else {
|
|
457
|
+
log(TAG, `spawn-task: ${payloads.length} payloads but empty text, trying chat.history`);
|
|
458
|
+
const historyText = await this.fetchChildResult(childSessionKey);
|
|
459
|
+
this.broadcastTaskEvent(taskId, "completed", label, startedAt,
|
|
460
|
+
historyText || "task completed (no output)");
|
|
461
|
+
if (historyText) {
|
|
462
|
+
this.pushResponse(`${label || "Background task"}:\n${historyText}`);
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
} else {
|
|
466
|
+
// No payloads — fallback: fetch from chat.history on child session
|
|
467
|
+
log(TAG, `spawn-task: no payloads, fetching chat.history for child=${childSessionKey}`);
|
|
468
|
+
const historyText = await this.fetchChildResult(childSessionKey);
|
|
469
|
+
if (historyText) {
|
|
470
|
+
this.pushResponse(`${label || "Background task"}:\n${historyText}`);
|
|
471
|
+
this.broadcastTaskEvent(taskId, "completed", label, startedAt, historyText);
|
|
472
|
+
} else {
|
|
473
|
+
this.broadcastTaskEvent(taskId, "completed", label, startedAt,
|
|
474
|
+
"task completed (no output captured)");
|
|
475
|
+
}
|
|
476
|
+
}
|
|
477
|
+
|
|
478
|
+
// Persist for crash recovery (no polling needed — result already in hand)
|
|
479
|
+
this.pendingSpawnTasks.set(taskId, {
|
|
480
|
+
runId,
|
|
481
|
+
childSessionKey,
|
|
482
|
+
label,
|
|
483
|
+
startedAt,
|
|
484
|
+
pollingEmitted: false,
|
|
485
|
+
});
|
|
486
|
+
savePendingTasks(this.pendingSpawnTasks);
|
|
487
|
+
|
|
488
|
+
// Clean up immediately since we already have the result
|
|
489
|
+
this.pendingSpawnTasks.delete(taskId);
|
|
490
|
+
savePendingTasks(this.pendingSpawnTasks);
|
|
491
|
+
} catch (err: any) {
|
|
492
|
+
error(TAG, `spawn-task failed: ${err.message}`);
|
|
493
|
+
this.broadcastTaskEvent(taskId, "failed", label, startedAt);
|
|
494
|
+
} finally {
|
|
495
|
+
this.spawnInFlight = false;
|
|
496
|
+
}
|
|
497
|
+
}
|
|
498
|
+
|
|
499
|
+
/** Build a focused system prompt for the child subagent. */
|
|
500
|
+
private buildChildSystemPrompt(task: string, label?: string): string {
|
|
501
|
+
return [
|
|
502
|
+
"# Subagent Context",
|
|
503
|
+
"",
|
|
504
|
+
"You are a **subagent** spawned for a specific task.",
|
|
505
|
+
"",
|
|
506
|
+
"## Your Role",
|
|
507
|
+
`- Task: ${task.replace(/\s+/g, " ").trim().slice(0, 500)}`,
|
|
508
|
+
"- Complete this task. That's your entire purpose.",
|
|
509
|
+
"",
|
|
510
|
+
"## Rules",
|
|
511
|
+
"1. Stay focused — do your assigned task, nothing else",
|
|
512
|
+
"2. Your final message will be reported to the requester",
|
|
513
|
+
"3. Be concise but informative",
|
|
514
|
+
"",
|
|
515
|
+
label ? `Label: ${label}` : "",
|
|
516
|
+
].filter(Boolean).join("\n");
|
|
517
|
+
}
|
|
518
|
+
|
|
519
|
+
/** Fetch the latest assistant reply from a child session's chat history. */
|
|
520
|
+
private async fetchChildResult(childSessionKey: string): Promise<string | null> {
|
|
521
|
+
try {
|
|
522
|
+
const historyResult = await this.wsClient.sendRpc("chat.history", {
|
|
523
|
+
sessionKey: childSessionKey,
|
|
524
|
+
limit: 10,
|
|
525
|
+
}, 10_000);
|
|
526
|
+
return this.extractLatestAssistantReply(historyResult);
|
|
527
|
+
} catch (err: any) {
|
|
528
|
+
warn(TAG, `chat.history fetch failed for ${childSessionKey}: ${err.message}`);
|
|
529
|
+
return null;
|
|
530
|
+
}
|
|
531
|
+
}
|
|
532
|
+
|
|
533
|
+
/** Poll for task completion and push result to HUD (preserved for crash recovery). */
|
|
534
|
+
private async pollTaskCompletion(taskId: string): Promise<void> {
|
|
535
|
+
// Enforce concurrency cap — queue excess tasks
|
|
536
|
+
if (this.activePolls >= Escalator.MAX_CONCURRENT_POLLS) {
|
|
537
|
+
log(TAG, `poll queued (${this.activePolls} active): taskId=${taskId}`);
|
|
538
|
+
this.pollQueue.push(taskId);
|
|
539
|
+
return;
|
|
540
|
+
}
|
|
541
|
+
|
|
542
|
+
this.activePolls++;
|
|
543
|
+
this.deps.profiler?.gauge("escalation.activePolls", this.activePolls);
|
|
544
|
+
|
|
545
|
+
const task = this.pendingSpawnTasks.get(taskId);
|
|
546
|
+
if (!task) {
|
|
547
|
+
this.finishPoll();
|
|
548
|
+
return;
|
|
549
|
+
}
|
|
550
|
+
|
|
551
|
+
const maxWaitMs = 5 * 60 * 1000; // 5 minutes
|
|
552
|
+
const pollIntervalMs = 5000; // 5 seconds
|
|
553
|
+
|
|
554
|
+
const poll = async (): Promise<void> => {
|
|
555
|
+
const elapsed = Date.now() - task.startedAt;
|
|
556
|
+
if (elapsed > maxWaitMs) {
|
|
557
|
+
log(TAG, `spawn-task timeout: taskId=${taskId}`);
|
|
558
|
+
this.broadcastTaskEvent(taskId, "timeout", task.label, task.startedAt);
|
|
559
|
+
this.pendingSpawnTasks.delete(taskId);
|
|
560
|
+
savePendingTasks(this.pendingSpawnTasks);
|
|
561
|
+
this.finishPoll();
|
|
562
|
+
return;
|
|
563
|
+
}
|
|
564
|
+
|
|
565
|
+
if (!this.wsClient.isConnected) {
|
|
566
|
+
// Retry later
|
|
567
|
+
setTimeout(() => poll(), pollIntervalMs);
|
|
568
|
+
return;
|
|
569
|
+
}
|
|
570
|
+
|
|
571
|
+
try {
|
|
572
|
+
// Wait for completion (short timeout to poll periodically)
|
|
573
|
+
const waitResult = await this.wsClient.sendRpc("agent.wait", {
|
|
574
|
+
runId: task.runId,
|
|
575
|
+
timeoutMs: pollIntervalMs,
|
|
576
|
+
}, pollIntervalMs + 2000);
|
|
577
|
+
|
|
578
|
+
// Debug: log the poll result
|
|
579
|
+
log(TAG, `poll result: taskId=${taskId}, status=${waitResult?.payload?.status}, ok=${waitResult?.ok}`);
|
|
580
|
+
|
|
581
|
+
// Accept multiple completion statuses
|
|
582
|
+
const completedStatuses = ["ok", "completed", "done", "finished", "success"];
|
|
583
|
+
const status = waitResult?.payload?.status;
|
|
584
|
+
|
|
585
|
+
if (waitResult?.ok && completedStatuses.includes(status)) {
|
|
586
|
+
log(TAG, `spawn-task completed: taskId=${taskId}, status=${status}`);
|
|
587
|
+
|
|
588
|
+
// Fetch the result from chat history
|
|
589
|
+
const historyResult = await this.wsClient.sendRpc("chat.history", {
|
|
590
|
+
sessionKey: task.childSessionKey,
|
|
591
|
+
limit: 10,
|
|
592
|
+
}, 10000);
|
|
593
|
+
|
|
594
|
+
const resultText = this.extractLatestAssistantReply(historyResult);
|
|
595
|
+
if (resultText) {
|
|
596
|
+
const labelDisplay = task.label || "Background task";
|
|
597
|
+
this.pushResponse(`${labelDisplay}:\n${resultText}`);
|
|
598
|
+
} else {
|
|
599
|
+
log(TAG, `spawn-task completed but no result text: taskId=${taskId}`);
|
|
600
|
+
}
|
|
601
|
+
|
|
602
|
+
this.broadcastTaskEvent(taskId, "completed", task.label, task.startedAt, resultText ?? undefined);
|
|
603
|
+
this.pendingSpawnTasks.delete(taskId);
|
|
604
|
+
savePendingTasks(this.pendingSpawnTasks);
|
|
605
|
+
this.finishPoll();
|
|
606
|
+
return;
|
|
607
|
+
}
|
|
608
|
+
|
|
609
|
+
if (waitResult?.payload?.status === "error" || waitResult?.payload?.status === "failed") {
|
|
610
|
+
log(TAG, `spawn-task failed: taskId=${taskId}, error=${waitResult?.payload?.error || "unknown"}`);
|
|
611
|
+
this.broadcastTaskEvent(taskId, "failed", task.label, task.startedAt);
|
|
612
|
+
this.pendingSpawnTasks.delete(taskId);
|
|
613
|
+
savePendingTasks(this.pendingSpawnTasks);
|
|
614
|
+
this.finishPoll();
|
|
615
|
+
return;
|
|
616
|
+
}
|
|
617
|
+
|
|
618
|
+
// Status is "timeout" or still running — emit polling once
|
|
619
|
+
if (!task.pollingEmitted) {
|
|
620
|
+
task.pollingEmitted = true;
|
|
621
|
+
this.broadcastTaskEvent(taskId, "polling", task.label, task.startedAt);
|
|
622
|
+
}
|
|
623
|
+
setTimeout(() => poll(), 1000);
|
|
624
|
+
} catch (err: any) {
|
|
625
|
+
warn(TAG, `poll error for taskId=${taskId}: ${err.message}`);
|
|
626
|
+
// Retry on transient errors
|
|
627
|
+
setTimeout(() => poll(), pollIntervalMs);
|
|
628
|
+
}
|
|
629
|
+
};
|
|
630
|
+
|
|
631
|
+
// Start polling
|
|
632
|
+
poll();
|
|
633
|
+
}
|
|
634
|
+
|
|
635
|
+
/** Decrement active polls and drain the queue. */
|
|
636
|
+
private finishPoll(): void {
|
|
637
|
+
this.activePolls--;
|
|
638
|
+
this.deps.profiler?.gauge("escalation.activePolls", this.activePolls);
|
|
639
|
+
this.deps.profiler?.gauge("escalation.pendingSpawns", this.pendingSpawnTasks.size);
|
|
640
|
+
// Drain queued tasks
|
|
641
|
+
while (this.pollQueue.length > 0 && this.activePolls < Escalator.MAX_CONCURRENT_POLLS) {
|
|
642
|
+
const nextId = this.pollQueue.shift()!;
|
|
643
|
+
if (this.pendingSpawnTasks.has(nextId)) {
|
|
644
|
+
log(TAG, `poll dequeued: taskId=${nextId}`);
|
|
645
|
+
this.pollTaskCompletion(nextId);
|
|
646
|
+
}
|
|
647
|
+
}
|
|
648
|
+
}
|
|
649
|
+
|
|
650
|
+
/** Extract the latest assistant reply from chat history. */
|
|
651
|
+
private extractLatestAssistantReply(historyResult: any): string | null {
|
|
652
|
+
// Try multiple paths to find messages (different API response formats)
|
|
653
|
+
const messages = historyResult?.payload?.messages
|
|
654
|
+
|| historyResult?.messages
|
|
655
|
+
|| historyResult?.payload?.result?.messages
|
|
656
|
+
|| historyResult?.result?.messages;
|
|
657
|
+
|
|
658
|
+
// Debug: log what we found
|
|
659
|
+
log(TAG, `extractLatestAssistantReply: messages=${Array.isArray(messages) ? messages.length : "none"}`);
|
|
660
|
+
|
|
661
|
+
if (!Array.isArray(messages)) {
|
|
662
|
+
// Maybe it's a direct text response
|
|
663
|
+
if (typeof historyResult?.payload?.text === "string") {
|
|
664
|
+
log(TAG, `extractLatestAssistantReply: found payload.text`);
|
|
665
|
+
return historyResult.payload.text;
|
|
666
|
+
}
|
|
667
|
+
if (typeof historyResult?.text === "string") {
|
|
668
|
+
log(TAG, `extractLatestAssistantReply: found text`);
|
|
669
|
+
return historyResult.text;
|
|
670
|
+
}
|
|
671
|
+
if (typeof historyResult?.payload?.result?.text === "string") {
|
|
672
|
+
log(TAG, `extractLatestAssistantReply: found payload.result.text`);
|
|
673
|
+
return historyResult.payload.result.text;
|
|
674
|
+
}
|
|
675
|
+
log(TAG, `extractLatestAssistantReply: no messages array found, historyResult keys=${Object.keys(historyResult || {}).join(",")}`);
|
|
676
|
+
return null;
|
|
677
|
+
}
|
|
678
|
+
|
|
679
|
+
// Find the last assistant message
|
|
680
|
+
for (let i = messages.length - 1; i >= 0; i--) {
|
|
681
|
+
const msg = messages[i];
|
|
682
|
+
if (msg?.role === "assistant") {
|
|
683
|
+
// Extract text content
|
|
684
|
+
if (typeof msg.content === "string") return msg.content;
|
|
685
|
+
if (Array.isArray(msg.content)) {
|
|
686
|
+
const textPart = msg.content.find((p: any) => p.type === "text");
|
|
687
|
+
if (textPart?.text) return textPart.text;
|
|
688
|
+
}
|
|
689
|
+
}
|
|
690
|
+
}
|
|
691
|
+
|
|
692
|
+
log(TAG, `extractLatestAssistantReply: no assistant message found in ${messages.length} messages`);
|
|
693
|
+
return null;
|
|
694
|
+
}
|
|
695
|
+
|
|
696
|
+
// ── Private ──
|
|
697
|
+
|
|
698
|
+
/** Process the agent response arriving in Phase 2 (called by EscalationSlot callback). */
|
|
699
|
+
private handleEscalationResponse(result: any, entry: SlotEntry, rpcLatencyMs: number): void {
|
|
700
|
+
if (result?.ok && result.payload) {
|
|
701
|
+
const p = result.payload;
|
|
702
|
+
log(TAG, `WS RPC ok → runId=${p.runId}, status=${p.status}, latency=${rpcLatencyMs}ms`);
|
|
703
|
+
|
|
704
|
+
this.stats.totalDirectResponses++;
|
|
705
|
+
this.stats.consecutiveTimeouts = 0;
|
|
706
|
+
// EMA α=0.2: smooths latency while reacting to sustained changes
|
|
707
|
+
this.stats.avgResponseMs = this.stats.avgResponseMs === 0
|
|
708
|
+
? rpcLatencyMs
|
|
709
|
+
: this.stats.avgResponseMs * 0.8 + rpcLatencyMs * 0.2;
|
|
710
|
+
|
|
711
|
+
const payloads = p.result?.payloads;
|
|
712
|
+
let responseText = "";
|
|
713
|
+
if (Array.isArray(payloads) && payloads.length > 0) {
|
|
714
|
+
const output = payloads.map((pl: any) => pl.text || "").join("\n").trim();
|
|
715
|
+
responseText = output;
|
|
716
|
+
if (output && !output.startsWith("NO_REPLY")) {
|
|
717
|
+
this.pushResponse(output, this.lastEscalationContext);
|
|
718
|
+
} else {
|
|
719
|
+
this.stats.totalNoReply++;
|
|
720
|
+
this.deps.profiler?.gauge("escalation.totalNoReply", this.stats.totalNoReply);
|
|
721
|
+
log(TAG, output ? `agent returned NO_REPLY as text — silent` : `empty text in ${payloads.length} payloads`);
|
|
722
|
+
}
|
|
723
|
+
} else {
|
|
724
|
+
this.stats.totalNoReply++;
|
|
725
|
+
this.deps.profiler?.gauge("escalation.totalNoReply", this.stats.totalNoReply);
|
|
726
|
+
log(TAG, "agent returned NO_REPLY — silent");
|
|
727
|
+
}
|
|
728
|
+
|
|
729
|
+
// Record feedback (async, non-blocking)
|
|
730
|
+
if (entry.feedbackCtx) {
|
|
731
|
+
const { digest, ...ctx } = entry.feedbackCtx;
|
|
732
|
+
this.recordFeedback(ctx, digest, entry.message, responseText, rpcLatencyMs);
|
|
733
|
+
}
|
|
734
|
+
} else if (result && !result.ok) {
|
|
735
|
+
const errDetail = JSON.stringify(result.error || result.payload);
|
|
736
|
+
log(TAG, `agent RPC error: ${errDetail}`);
|
|
737
|
+
this.pushError(errDetail);
|
|
738
|
+
this.stats.totalErrors++;
|
|
739
|
+
this.deps.profiler?.gauge("escalation.errors", this.stats.totalErrors);
|
|
740
|
+
}
|
|
741
|
+
}
|
|
742
|
+
|
|
743
|
+
private broadcastTaskEvent(
|
|
744
|
+
taskId: string,
|
|
745
|
+
status: SpawnTaskStatus,
|
|
746
|
+
label?: string,
|
|
747
|
+
startedAt?: number,
|
|
748
|
+
resultPreview?: string,
|
|
749
|
+
): void {
|
|
750
|
+
const now = Date.now();
|
|
751
|
+
const isTerminal = status === "completed" || status === "failed" || status === "timeout";
|
|
752
|
+
const msg: SpawnTaskMessage = {
|
|
753
|
+
type: "spawn_task",
|
|
754
|
+
taskId,
|
|
755
|
+
label: label || "Background task",
|
|
756
|
+
status,
|
|
757
|
+
startedAt: startedAt || now,
|
|
758
|
+
...(isTerminal ? { completedAt: now } : {}),
|
|
759
|
+
...(resultPreview ? { resultPreview: resultPreview.slice(0, 200) } : {}),
|
|
760
|
+
};
|
|
761
|
+
log(TAG, `broadcast spawn_task: taskId=${taskId}, status=${status}, clients=${this.deps.wsHandler.clientCount}`);
|
|
762
|
+
this.deps.wsHandler.broadcastRaw(msg);
|
|
763
|
+
}
|
|
764
|
+
|
|
765
|
+
private pushResponse(output: string, context?: ContextWindow | null): void {
|
|
766
|
+
// Allow longer responses for coding contexts
|
|
767
|
+
const { coding } = context ? isCodingContext(context) : { coding: false };
|
|
768
|
+
const maxLen = coding ? 4000 : 3000;
|
|
769
|
+
|
|
770
|
+
const text = `[🤖] ${output.trim().slice(0, maxLen)}`;
|
|
771
|
+
this.deps.feedBuffer.push(text, "high", "openclaw", "agent");
|
|
772
|
+
this.deps.wsHandler.broadcast(text, "high", "agent");
|
|
773
|
+
this.stats.totalResponses++;
|
|
774
|
+
this.deps.profiler?.gauge("escalation.totalResponses", this.stats.totalResponses);
|
|
775
|
+
this.stats.lastResponseTs = Date.now();
|
|
776
|
+
log(TAG, `response pushed (coding=${coding}, maxLen=${maxLen}): "${output.slice(0, 80)}..."`);
|
|
777
|
+
}
|
|
778
|
+
|
|
779
|
+
private pushError(detail: string): void {
|
|
780
|
+
const text = `[\ud83e\udd16 err] ${detail.slice(0, 500)}`;
|
|
781
|
+
this.deps.feedBuffer.push(text, "normal", "openclaw", "stream");
|
|
782
|
+
}
|
|
783
|
+
|
|
784
|
+
/** Record a feedback entry after successful escalation. Safe — never throws. */
|
|
785
|
+
private recordFeedback(
|
|
786
|
+
ctx: { tickId: number; hud: string; currentApp: string; escalationScore: number; escalationReasons: string[]; codingContext: boolean } | undefined,
|
|
787
|
+
digest: string,
|
|
788
|
+
escalationMessage: string,
|
|
789
|
+
openclawResponse: string,
|
|
790
|
+
responseLatencyMs: number,
|
|
791
|
+
): void {
|
|
792
|
+
if (!ctx || !this.deps.feedbackStore || !this.deps.signalCollector) return;
|
|
793
|
+
try {
|
|
794
|
+
const record = this.deps.feedbackStore.createRecord({
|
|
795
|
+
tickId: ctx.tickId,
|
|
796
|
+
digest,
|
|
797
|
+
hud: ctx.hud,
|
|
798
|
+
currentApp: ctx.currentApp,
|
|
799
|
+
escalationScore: ctx.escalationScore,
|
|
800
|
+
escalationReasons: ctx.escalationReasons,
|
|
801
|
+
codingContext: ctx.codingContext,
|
|
802
|
+
escalationMessage,
|
|
803
|
+
openclawResponse,
|
|
804
|
+
responseLatencyMs,
|
|
805
|
+
});
|
|
806
|
+
this.deps.feedbackStore.append(record);
|
|
807
|
+
this.deps.signalCollector.schedule(record);
|
|
808
|
+
} catch (err: any) {
|
|
809
|
+
warn(TAG, `feedback record failed: ${err.message}`);
|
|
810
|
+
}
|
|
811
|
+
}
|
|
812
|
+
}
|