metame-cli 1.5.19 → 1.5.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +157 -80
- package/package.json +2 -2
- package/scripts/bin/bootstrap-worktree.sh +20 -0
- package/scripts/core/audit.js +190 -0
- package/scripts/core/handoff.js +780 -0
- package/scripts/core/handoff.test.js +1074 -0
- package/scripts/core/memory-model.js +183 -0
- package/scripts/core/memory-model.test.js +486 -0
- package/scripts/core/reactive-paths.js +44 -0
- package/scripts/core/reactive-paths.test.js +35 -0
- package/scripts/core/reactive-prompt.js +51 -0
- package/scripts/core/reactive-prompt.test.js +88 -0
- package/scripts/core/reactive-signal.js +40 -0
- package/scripts/core/reactive-signal.test.js +88 -0
- package/scripts/core/thread-chat-id.js +52 -0
- package/scripts/core/thread-chat-id.test.js +113 -0
- package/scripts/daemon-bridges.js +92 -38
- package/scripts/daemon-claude-engine.js +373 -444
- package/scripts/daemon-command-router.js +82 -8
- package/scripts/daemon-engine-runtime.js +7 -10
- package/scripts/daemon-reactive-lifecycle.js +100 -33
- package/scripts/daemon-session-commands.js +133 -43
- package/scripts/daemon-session-store.js +300 -82
- package/scripts/daemon-team-dispatch.js +16 -16
- package/scripts/daemon.js +21 -175
- package/scripts/deploy-manifest.js +90 -0
- package/scripts/docs/maintenance-manual.md +14 -11
- package/scripts/docs/pointer-map.md +13 -4
- package/scripts/feishu-adapter.js +31 -27
- package/scripts/hooks/intent-engine.js +6 -3
- package/scripts/hooks/intent-memory-recall.js +1 -0
- package/scripts/hooks/intent-perpetual.js +1 -1
- package/scripts/memory-extract.js +5 -97
- package/scripts/memory-gc.js +35 -90
- package/scripts/memory-migrate-v2.js +304 -0
- package/scripts/memory-nightly-reflect.js +40 -41
- package/scripts/memory.js +340 -859
- package/scripts/migrate-reactive-paths.js +122 -0
- package/scripts/signal-capture.js +4 -0
- package/scripts/sync-plugin.js +56 -0
|
@@ -0,0 +1,1074 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const { describe, it } = require('node:test');
|
|
4
|
+
const assert = require('node:assert/strict');
|
|
5
|
+
const path = require('path');
|
|
6
|
+
|
|
7
|
+
const { EventEmitter } = require('events');
|
|
8
|
+
const handoff = require('./handoff');
|
|
9
|
+
const { createPlatformSpawn, terminateChildProcess, stopStreamingLifecycle, abortStreamingChildLifecycle, setActiveChildProcess, clearActiveChildProcess, acquireStreamingChild, buildStreamingResult, resolveStreamingClosePayload, accumulateStreamingStderr, splitStreamingStdoutChunk, buildStreamFlushPayload, buildToolOverlayPayload, buildMilestoneOverlayPayload, finalizePersistentStreamingTurn, writeStreamingChildInput, parseStreamingEvents, applyStreamingMetadata, applyStreamingToolState, applyStreamingContentState, createStreamingWatchdog, runAsyncCommand } = handoff;
|
|
10
|
+
const { resolveNodeEntry, escalateKill, resetReusableChildListeners, destroyChildStdin, recordToolUsage, reduceStreamingWaitState, applyStreamingTextResult } = handoff._internal;
|
|
11
|
+
|
|
12
|
+
describe('resolveNodeEntry', () => {
|
|
13
|
+
it('extracts the node entry from a cmd wrapper', () => {
|
|
14
|
+
const fakeFs = {
|
|
15
|
+
readFileSync() { return '@echo off\n"%dp0%bin\\entry.js" %*\n'; },
|
|
16
|
+
existsSync(file) { return file === 'C:\\tools\\bin\\entry.js'; },
|
|
17
|
+
};
|
|
18
|
+
assert.equal(
|
|
19
|
+
resolveNodeEntry(fakeFs, path.win32, 'C:\\tools\\claude.cmd'),
|
|
20
|
+
'C:\\tools\\bin\\entry.js'
|
|
21
|
+
);
|
|
22
|
+
});
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
describe('createPlatformSpawn', () => {
|
|
26
|
+
it('uses node entry directly for cmd-like tools on windows', () => {
|
|
27
|
+
const calls = [];
|
|
28
|
+
const fakeSpawn = (cmd, args, options) => {
|
|
29
|
+
calls.push({ cmd, args, options });
|
|
30
|
+
return { cmd, args, options };
|
|
31
|
+
};
|
|
32
|
+
const handoff = createPlatformSpawn({
|
|
33
|
+
fs: {
|
|
34
|
+
readFileSync() { return '@echo off\n"%dp0%runner.js" %*\n'; },
|
|
35
|
+
existsSync(file) { return file === 'C:\\tools\\runner.js' || file === 'C:\\tools\\codex.cmd'; },
|
|
36
|
+
},
|
|
37
|
+
path: path.win32,
|
|
38
|
+
spawn: fakeSpawn,
|
|
39
|
+
execSync() { return 'C:\\tools\\codex.cmd\n'; },
|
|
40
|
+
processPlatform: 'win32',
|
|
41
|
+
processExecPath: 'C:\\Program Files\\nodejs\\node.exe',
|
|
42
|
+
claudeBin: 'claude',
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
handoff.spawn('codex', ['exec'], { cwd: 'C:\\repo' });
|
|
46
|
+
|
|
47
|
+
assert.equal(calls.length, 1);
|
|
48
|
+
assert.equal(calls[0].cmd, 'C:\\Program Files\\nodejs\\node.exe');
|
|
49
|
+
assert.deepEqual(calls[0].args, ['C:\\tools\\runner.js', 'exec']);
|
|
50
|
+
assert.equal(calls[0].options.windowsHide, true);
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
it('passes through unchanged on non-windows', () => {
|
|
54
|
+
const calls = [];
|
|
55
|
+
const fakeSpawn = (cmd, args, options) => {
|
|
56
|
+
calls.push({ cmd, args, options });
|
|
57
|
+
return { cmd, args, options };
|
|
58
|
+
};
|
|
59
|
+
const handoff = createPlatformSpawn({
|
|
60
|
+
fs: {
|
|
61
|
+
readFileSync() { throw new Error('should not read files'); },
|
|
62
|
+
existsSync() { return false; },
|
|
63
|
+
},
|
|
64
|
+
path,
|
|
65
|
+
spawn: fakeSpawn,
|
|
66
|
+
execSync() { throw new Error('should not run'); },
|
|
67
|
+
processPlatform: 'darwin',
|
|
68
|
+
processExecPath: process.execPath,
|
|
69
|
+
claudeBin: 'claude',
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
handoff.spawn('claude', ['-p'], { cwd: '/tmp' });
|
|
73
|
+
|
|
74
|
+
assert.equal(calls.length, 1);
|
|
75
|
+
assert.equal(calls[0].cmd, 'claude');
|
|
76
|
+
assert.deepEqual(calls[0].args, ['-p']);
|
|
77
|
+
assert.deepEqual(calls[0].options, { cwd: '/tmp' });
|
|
78
|
+
});
|
|
79
|
+
});
|
|
80
|
+
|
|
81
|
+
describe('terminateChildProcess', () => {
|
|
82
|
+
it('falls back to child.kill when process group signal throws', () => {
|
|
83
|
+
const calls = [];
|
|
84
|
+
const originalKill = process.kill;
|
|
85
|
+
process.kill = () => { throw new Error('no group'); };
|
|
86
|
+
try {
|
|
87
|
+
const child = {
|
|
88
|
+
pid: 123,
|
|
89
|
+
kill(signal) { calls.push(signal); },
|
|
90
|
+
};
|
|
91
|
+
assert.equal(terminateChildProcess(child, 'SIGTERM'), true);
|
|
92
|
+
assert.deepEqual(calls, ['SIGTERM']);
|
|
93
|
+
} finally {
|
|
94
|
+
process.kill = originalKill;
|
|
95
|
+
}
|
|
96
|
+
});
|
|
97
|
+
|
|
98
|
+
it('skips process-group kill when useProcessGroup is false', () => {
|
|
99
|
+
const calls = [];
|
|
100
|
+
const originalKill = process.kill;
|
|
101
|
+
process.kill = () => { throw new Error('should not be called'); };
|
|
102
|
+
try {
|
|
103
|
+
const child = {
|
|
104
|
+
pid: 123,
|
|
105
|
+
kill(signal) { calls.push(signal); },
|
|
106
|
+
};
|
|
107
|
+
assert.equal(terminateChildProcess(child, 'SIGTERM', { useProcessGroup: false }), true);
|
|
108
|
+
assert.deepEqual(calls, ['SIGTERM']);
|
|
109
|
+
} finally {
|
|
110
|
+
process.kill = originalKill;
|
|
111
|
+
}
|
|
112
|
+
});
|
|
113
|
+
});
|
|
114
|
+
|
|
115
|
+
describe('escalateKill', () => {
|
|
116
|
+
it('schedules a SIGKILL escalation timer', async () => {
|
|
117
|
+
const signals = [];
|
|
118
|
+
const originalKill = process.kill;
|
|
119
|
+
process.kill = (_pid, signal) => { signals.push(signal); };
|
|
120
|
+
try {
|
|
121
|
+
const child = { pid: 321, kill() {} };
|
|
122
|
+
const { timer } = escalateKill(child, 'SIGTERM', 10);
|
|
123
|
+
await new Promise((resolve) => setTimeout(resolve, 30));
|
|
124
|
+
clearTimeout(timer);
|
|
125
|
+
assert.deepEqual(signals, ['SIGTERM', 'SIGKILL']);
|
|
126
|
+
} finally {
|
|
127
|
+
process.kill = originalKill;
|
|
128
|
+
}
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
it('uses child.kill directly when process groups are disabled', async () => {
|
|
132
|
+
const calls = [];
|
|
133
|
+
const originalKill = process.kill;
|
|
134
|
+
process.kill = () => { throw new Error('should not be called'); };
|
|
135
|
+
try {
|
|
136
|
+
const child = {
|
|
137
|
+
pid: 456,
|
|
138
|
+
kill(signal) { calls.push(signal); },
|
|
139
|
+
};
|
|
140
|
+
const { timer } = escalateKill(child, 'SIGTERM', 10, { useProcessGroup: false });
|
|
141
|
+
await new Promise((resolve) => setTimeout(resolve, 30));
|
|
142
|
+
clearTimeout(timer);
|
|
143
|
+
assert.deepEqual(calls, ['SIGTERM', 'SIGKILL']);
|
|
144
|
+
} finally {
|
|
145
|
+
process.kill = originalKill;
|
|
146
|
+
}
|
|
147
|
+
});
|
|
148
|
+
});
|
|
149
|
+
|
|
150
|
+
describe('resetReusableChildListeners', () => {
|
|
151
|
+
it('clears reused child stream and lifecycle listeners', () => {
|
|
152
|
+
const child = new EventEmitter();
|
|
153
|
+
child.stdout = new EventEmitter();
|
|
154
|
+
child.stderr = new EventEmitter();
|
|
155
|
+
child.stdin = new EventEmitter();
|
|
156
|
+
child.stdout.on('data', () => {});
|
|
157
|
+
child.stderr.on('data', () => {});
|
|
158
|
+
child.stdin.on('error', () => {});
|
|
159
|
+
child.on('close', () => {});
|
|
160
|
+
child.on('error', () => {});
|
|
161
|
+
|
|
162
|
+
const result = resetReusableChildListeners(child);
|
|
163
|
+
|
|
164
|
+
assert.equal(result, child);
|
|
165
|
+
assert.equal(child.stdout.listenerCount('data'), 0);
|
|
166
|
+
assert.equal(child.stderr.listenerCount('data'), 0);
|
|
167
|
+
assert.equal(child.stdin.listenerCount('error'), 0);
|
|
168
|
+
assert.equal(child.listenerCount('close'), 0);
|
|
169
|
+
assert.equal(child.listenerCount('error'), 0);
|
|
170
|
+
});
|
|
171
|
+
});
|
|
172
|
+
|
|
173
|
+
describe('destroyChildStdin', () => {
|
|
174
|
+
it('destroys stdin when available', () => {
|
|
175
|
+
let destroyed = false;
|
|
176
|
+
const child = {
|
|
177
|
+
stdin: {
|
|
178
|
+
destroy() { destroyed = true; },
|
|
179
|
+
},
|
|
180
|
+
};
|
|
181
|
+
|
|
182
|
+
assert.equal(destroyChildStdin(child), true);
|
|
183
|
+
assert.equal(destroyed, true);
|
|
184
|
+
});
|
|
185
|
+
|
|
186
|
+
it('returns false when stdin destroy is unavailable', () => {
|
|
187
|
+
assert.equal(destroyChildStdin({ stdin: {} }), false);
|
|
188
|
+
assert.equal(destroyChildStdin(null), false);
|
|
189
|
+
});
|
|
190
|
+
});
|
|
191
|
+
|
|
192
|
+
describe('streaming lifecycle cleanup', () => {
|
|
193
|
+
it('stops the watchdog and clears the milestone timer', async () => {
|
|
194
|
+
let stopped = false;
|
|
195
|
+
let timerFired = false;
|
|
196
|
+
const timer = setTimeout(() => { timerFired = true; }, 20);
|
|
197
|
+
|
|
198
|
+
stopStreamingLifecycle({ stop() { stopped = true; } }, timer);
|
|
199
|
+
await new Promise((resolve) => setTimeout(resolve, 40));
|
|
200
|
+
|
|
201
|
+
assert.equal(stopped, true);
|
|
202
|
+
assert.equal(timerFired, false);
|
|
203
|
+
});
|
|
204
|
+
|
|
205
|
+
it('aborts stdin-driven streaming cleanup without leaving the active child registered', () => {
|
|
206
|
+
const active = new Map([['chat-1', { child: { pid: 1 } }]]);
|
|
207
|
+
let saveCount = 0;
|
|
208
|
+
let destroyed = false;
|
|
209
|
+
let abortReason = null;
|
|
210
|
+
const timer = setTimeout(() => {}, 1000);
|
|
211
|
+
|
|
212
|
+
abortStreamingChildLifecycle({
|
|
213
|
+
child: {
|
|
214
|
+
stdin: {
|
|
215
|
+
destroy() { destroyed = true; },
|
|
216
|
+
},
|
|
217
|
+
},
|
|
218
|
+
watchdog: {
|
|
219
|
+
abort(reason) { abortReason = reason; },
|
|
220
|
+
},
|
|
221
|
+
milestoneTimer: timer,
|
|
222
|
+
activeProcesses: active,
|
|
223
|
+
saveActivePids: () => { saveCount += 1; },
|
|
224
|
+
chatId: 'chat-1',
|
|
225
|
+
reason: 'stdin',
|
|
226
|
+
});
|
|
227
|
+
|
|
228
|
+
clearTimeout(timer);
|
|
229
|
+
assert.equal(destroyed, true);
|
|
230
|
+
assert.equal(abortReason, 'stdin');
|
|
231
|
+
assert.equal(active.has('chat-1'), false);
|
|
232
|
+
assert.equal(saveCount, 1);
|
|
233
|
+
});
|
|
234
|
+
});
|
|
235
|
+
|
|
236
|
+
describe('active child tracking', () => {
|
|
237
|
+
it('stores an active child entry and persists the pid snapshot', () => {
|
|
238
|
+
const active = new Map();
|
|
239
|
+
let saveCount = 0;
|
|
240
|
+
const entry = { child: { pid: 1 }, engine: 'claude' };
|
|
241
|
+
|
|
242
|
+
assert.equal(setActiveChildProcess(active, () => { saveCount += 1; }, 'chat-1', entry), true);
|
|
243
|
+
assert.equal(active.get('chat-1'), entry);
|
|
244
|
+
assert.equal(saveCount, 1);
|
|
245
|
+
});
|
|
246
|
+
|
|
247
|
+
it('clears an active child entry and persists the pid snapshot', () => {
|
|
248
|
+
const active = new Map([['chat-1', { child: { pid: 1 } }]]);
|
|
249
|
+
let saveCount = 0;
|
|
250
|
+
|
|
251
|
+
assert.equal(clearActiveChildProcess(active, () => { saveCount += 1; }, 'chat-1'), true);
|
|
252
|
+
assert.equal(active.has('chat-1'), false);
|
|
253
|
+
assert.equal(saveCount, 1);
|
|
254
|
+
});
|
|
255
|
+
});
|
|
256
|
+
|
|
257
|
+
describe('acquireStreamingChild', () => {
|
|
258
|
+
it('reuses a warm child after resetting listeners', () => {
|
|
259
|
+
const child = new EventEmitter();
|
|
260
|
+
child.stdout = new EventEmitter();
|
|
261
|
+
child.stderr = new EventEmitter();
|
|
262
|
+
child.stdin = new EventEmitter();
|
|
263
|
+
child.stdout.on('data', () => {});
|
|
264
|
+
child.stderr.on('data', () => {});
|
|
265
|
+
child.stdin.on('error', () => {});
|
|
266
|
+
child.on('close', () => {});
|
|
267
|
+
child.on('error', () => {});
|
|
268
|
+
|
|
269
|
+
const result = acquireStreamingChild({
|
|
270
|
+
warmChild: child,
|
|
271
|
+
spawn() { throw new Error('should not spawn'); },
|
|
272
|
+
});
|
|
273
|
+
|
|
274
|
+
assert.equal(result.child, child);
|
|
275
|
+
assert.equal(result.reused, true);
|
|
276
|
+
assert.equal(child.stdout.listenerCount('data'), 0);
|
|
277
|
+
assert.equal(child.stderr.listenerCount('data'), 0);
|
|
278
|
+
assert.equal(child.stdin.listenerCount('error'), 0);
|
|
279
|
+
assert.equal(child.listenerCount('close'), 0);
|
|
280
|
+
assert.equal(child.listenerCount('error'), 0);
|
|
281
|
+
});
|
|
282
|
+
|
|
283
|
+
it('spawns a fresh detached child when no warm child is available', () => {
|
|
284
|
+
const calls = [];
|
|
285
|
+
const child = { pid: 123 };
|
|
286
|
+
const result = acquireStreamingChild({
|
|
287
|
+
spawn(binary, args, options) {
|
|
288
|
+
calls.push({ binary, args, options });
|
|
289
|
+
return child;
|
|
290
|
+
},
|
|
291
|
+
binary: 'claude',
|
|
292
|
+
args: ['-p'],
|
|
293
|
+
cwd: '/tmp',
|
|
294
|
+
env: { A: '1' },
|
|
295
|
+
useDetached: true,
|
|
296
|
+
});
|
|
297
|
+
|
|
298
|
+
assert.equal(result.child, child);
|
|
299
|
+
assert.equal(result.reused, false);
|
|
300
|
+
assert.deepEqual(calls, [{
|
|
301
|
+
binary: 'claude',
|
|
302
|
+
args: ['-p'],
|
|
303
|
+
options: {
|
|
304
|
+
cwd: '/tmp',
|
|
305
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
306
|
+
detached: true,
|
|
307
|
+
env: { A: '1' },
|
|
308
|
+
},
|
|
309
|
+
}]);
|
|
310
|
+
});
|
|
311
|
+
});
|
|
312
|
+
|
|
313
|
+
describe('buildStreamingResult', () => {
|
|
314
|
+
it('fills default streaming metadata fields', () => {
|
|
315
|
+
assert.deepEqual(
|
|
316
|
+
buildStreamingResult({ output: 'ok', error: null }),
|
|
317
|
+
{
|
|
318
|
+
output: 'ok',
|
|
319
|
+
error: null,
|
|
320
|
+
files: [],
|
|
321
|
+
toolUsageLog: [],
|
|
322
|
+
usage: null,
|
|
323
|
+
sessionId: '',
|
|
324
|
+
}
|
|
325
|
+
);
|
|
326
|
+
});
|
|
327
|
+
|
|
328
|
+
it('allows overrides for additional result fields', () => {
|
|
329
|
+
assert.deepEqual(
|
|
330
|
+
buildStreamingResult(
|
|
331
|
+
{ output: null, error: 'boom', files: ['a'], toolUsageLog: [{ tool: 'Read' }], usage: { input: 1 }, sessionId: 'sid' },
|
|
332
|
+
{ timedOut: true, errorCode: 'INTERRUPTED' }
|
|
333
|
+
),
|
|
334
|
+
{
|
|
335
|
+
output: null,
|
|
336
|
+
error: 'boom',
|
|
337
|
+
files: ['a'],
|
|
338
|
+
toolUsageLog: [{ tool: 'Read' }],
|
|
339
|
+
usage: { input: 1 },
|
|
340
|
+
sessionId: 'sid',
|
|
341
|
+
timedOut: true,
|
|
342
|
+
errorCode: 'INTERRUPTED',
|
|
343
|
+
}
|
|
344
|
+
);
|
|
345
|
+
});
|
|
346
|
+
});
|
|
347
|
+
|
|
348
|
+
describe('resolveStreamingClosePayload', () => {
|
|
349
|
+
const formatTimeoutWindowLabel = (timeoutMs, kind) => `${kind}:${timeoutMs}`;
|
|
350
|
+
const emptyStream = { finalResult: '', finalUsage: null, observedSessionId: '', writtenFiles: [], toolUsageLog: [] };
|
|
351
|
+
const defaultTimeout = { startTime: Date.now(), idleTimeoutMs: 1000, toolTimeoutMs: 1000, hardCeilingMs: 60000, formatTimeoutWindowLabel };
|
|
352
|
+
|
|
353
|
+
it('maps interrupted merge-pause exits to the merge pause error code', () => {
|
|
354
|
+
const result = resolveStreamingClosePayload({
|
|
355
|
+
code: 1,
|
|
356
|
+
streamState: { finalResult: 'partial', finalUsage: null, observedSessionId: 'sess', writtenFiles: ['/tmp/out.txt'], toolUsageLog: [{ tool: 'Write', context: 'out.txt' }] },
|
|
357
|
+
wasAborted: true,
|
|
358
|
+
abortReason: 'merge-pause',
|
|
359
|
+
watchdog: { isKilled: () => false, getKilledReason: () => null },
|
|
360
|
+
timeoutConfig: defaultTimeout,
|
|
361
|
+
classifiedError: null,
|
|
362
|
+
stderr: '',
|
|
363
|
+
});
|
|
364
|
+
|
|
365
|
+
assert.equal(result.error, 'Paused for merge');
|
|
366
|
+
assert.equal(result.errorCode, 'INTERRUPTED_MERGE_PAUSE');
|
|
367
|
+
assert.equal(result.output, 'partial');
|
|
368
|
+
});
|
|
369
|
+
|
|
370
|
+
it('keeps interrupted zero-exit payloads nullable when no output was produced', () => {
|
|
371
|
+
const result = resolveStreamingClosePayload({
|
|
372
|
+
code: 0,
|
|
373
|
+
streamState: emptyStream,
|
|
374
|
+
wasAborted: true,
|
|
375
|
+
abortReason: 'user-stop',
|
|
376
|
+
watchdog: { isKilled: () => false, getKilledReason: () => null },
|
|
377
|
+
timeoutConfig: defaultTimeout,
|
|
378
|
+
classifiedError: null,
|
|
379
|
+
stderr: '',
|
|
380
|
+
});
|
|
381
|
+
|
|
382
|
+
assert.equal(result.output, null);
|
|
383
|
+
assert.equal(result.errorCode, 'INTERRUPTED_USER');
|
|
384
|
+
});
|
|
385
|
+
|
|
386
|
+
it('marks watchdog timeouts as timedOut results', () => {
|
|
387
|
+
const result = resolveStreamingClosePayload({
|
|
388
|
+
code: 1,
|
|
389
|
+
streamState: emptyStream,
|
|
390
|
+
wasAborted: false,
|
|
391
|
+
abortReason: '',
|
|
392
|
+
stdinFailureError: null,
|
|
393
|
+
watchdog: { isKilled: () => true, getKilledReason: () => 'tool' },
|
|
394
|
+
timeoutConfig: { startTime: Date.now() - 2 * 60000, idleTimeoutMs: 1000, toolTimeoutMs: 2000, hardCeilingMs: 60000, formatTimeoutWindowLabel },
|
|
395
|
+
classifiedError: null,
|
|
396
|
+
stderr: '',
|
|
397
|
+
});
|
|
398
|
+
|
|
399
|
+
assert.equal(result.timedOut, true);
|
|
400
|
+
assert.match(result.error, /工具执行tool:2000超时/);
|
|
401
|
+
});
|
|
402
|
+
|
|
403
|
+
it('prefers classified engine errors over raw stderr on non-zero exit', () => {
|
|
404
|
+
const result = resolveStreamingClosePayload({
|
|
405
|
+
code: 2,
|
|
406
|
+
streamState: emptyStream,
|
|
407
|
+
watchdog: { isKilled: () => false, getKilledReason: () => null },
|
|
408
|
+
timeoutConfig: defaultTimeout,
|
|
409
|
+
classifiedError: { message: 'friendly message', code: 'EXEC_FAILURE' },
|
|
410
|
+
stderr: 'raw stderr',
|
|
411
|
+
});
|
|
412
|
+
|
|
413
|
+
assert.equal(result.error, 'friendly message');
|
|
414
|
+
assert.equal(result.errorCode, 'EXEC_FAILURE');
|
|
415
|
+
});
|
|
416
|
+
});
|
|
417
|
+
|
|
418
|
+
describe('accumulateStreamingStderr', () => {
|
|
419
|
+
it('appends stderr chunks and captures the first classified error', () => {
|
|
420
|
+
const first = accumulateStreamingStderr(
|
|
421
|
+
{ stderr: '', classifiedError: null },
|
|
422
|
+
'model not found',
|
|
423
|
+
{
|
|
424
|
+
classifyError: (chunk) => ({ message: `classified:${chunk}`, code: 'EXEC_FAILURE' }),
|
|
425
|
+
}
|
|
426
|
+
);
|
|
427
|
+
const second = accumulateStreamingStderr(
|
|
428
|
+
first,
|
|
429
|
+
' raw stderr',
|
|
430
|
+
{
|
|
431
|
+
classifyError: () => ({ message: 'should not replace', code: 'OTHER' }),
|
|
432
|
+
}
|
|
433
|
+
);
|
|
434
|
+
|
|
435
|
+
assert.equal(second.stderr, 'model not found raw stderr');
|
|
436
|
+
assert.deepEqual(second.classifiedError, { message: 'classified:model not found', code: 'EXEC_FAILURE' });
|
|
437
|
+
});
|
|
438
|
+
|
|
439
|
+
it('flags API-looking stderr chunks via isApiError', () => {
|
|
440
|
+
const result = accumulateStreamingStderr(
|
|
441
|
+
{ stderr: '', classifiedError: null },
|
|
442
|
+
'400 invalid model request',
|
|
443
|
+
{}
|
|
444
|
+
);
|
|
445
|
+
|
|
446
|
+
assert.equal(result.stderr, '400 invalid model request');
|
|
447
|
+
assert.equal(result.isApiError, true);
|
|
448
|
+
});
|
|
449
|
+
|
|
450
|
+
it('does not flag non-API stderr as isApiError', () => {
|
|
451
|
+
const result = accumulateStreamingStderr(
|
|
452
|
+
{ stderr: '', classifiedError: null },
|
|
453
|
+
'normal debug output',
|
|
454
|
+
{}
|
|
455
|
+
);
|
|
456
|
+
assert.equal(result.isApiError, false);
|
|
457
|
+
});
|
|
458
|
+
});
|
|
459
|
+
|
|
460
|
+
describe('splitStreamingStdoutChunk', () => {
|
|
461
|
+
it('returns complete lines and preserves the trailing partial buffer', () => {
|
|
462
|
+
assert.deepEqual(
|
|
463
|
+
splitStreamingStdoutChunk('partial', ' line 1\nline 2\ntrail'),
|
|
464
|
+
{
|
|
465
|
+
lines: ['partial line 1', 'line 2'],
|
|
466
|
+
buffer: 'trail',
|
|
467
|
+
}
|
|
468
|
+
);
|
|
469
|
+
});
|
|
470
|
+
|
|
471
|
+
it('keeps the full chunk buffered when no newline is present', () => {
|
|
472
|
+
assert.deepEqual(
|
|
473
|
+
splitStreamingStdoutChunk('', 'no newline yet'),
|
|
474
|
+
{
|
|
475
|
+
lines: [],
|
|
476
|
+
buffer: 'no newline yet',
|
|
477
|
+
}
|
|
478
|
+
);
|
|
479
|
+
});
|
|
480
|
+
});
|
|
481
|
+
|
|
482
|
+
describe('buildStreamFlushPayload', () => {
|
|
483
|
+
it('skips flushes for empty stream text', () => {
|
|
484
|
+
assert.deepEqual(
|
|
485
|
+
buildStreamFlushPayload({ streamText: ' ', lastFlushAt: 10 }, { now: 20, throttleMs: 5 }),
|
|
486
|
+
{ shouldFlush: false, lastFlushAt: 10 }
|
|
487
|
+
);
|
|
488
|
+
});
|
|
489
|
+
|
|
490
|
+
it('throttles non-forced flushes within the throttle window', () => {
|
|
491
|
+
assert.deepEqual(
|
|
492
|
+
buildStreamFlushPayload({ streamText: 'hello', lastFlushAt: 100 }, { now: 200, throttleMs: 150 }),
|
|
493
|
+
{ shouldFlush: false, lastFlushAt: 100 }
|
|
494
|
+
);
|
|
495
|
+
});
|
|
496
|
+
|
|
497
|
+
it('builds a stream payload when flush is allowed', () => {
|
|
498
|
+
assert.deepEqual(
|
|
499
|
+
buildStreamFlushPayload({ streamText: 'hello', lastFlushAt: 100 }, { now: 300, throttleMs: 150 }),
|
|
500
|
+
{ shouldFlush: true, lastFlushAt: 300, payload: '__STREAM_TEXT__hello' }
|
|
501
|
+
);
|
|
502
|
+
});
|
|
503
|
+
});
|
|
504
|
+
|
|
505
|
+
describe('buildToolOverlayPayload', () => {
|
|
506
|
+
const toolEmoji = { default: '•', Write: '✍️', Skill: '🧠' };
|
|
507
|
+
|
|
508
|
+
it('suppresses tool overlays inside the throttle window', () => {
|
|
509
|
+
assert.deepEqual(
|
|
510
|
+
buildToolOverlayPayload({
|
|
511
|
+
toolName: 'Write',
|
|
512
|
+
toolInput: { file_path: '/tmp/out.txt' },
|
|
513
|
+
lastStatusTime: 100,
|
|
514
|
+
now: 150,
|
|
515
|
+
throttleMs: 100,
|
|
516
|
+
toolEmoji,
|
|
517
|
+
pathModule: require('path'),
|
|
518
|
+
}),
|
|
519
|
+
{ shouldEmit: false, lastStatusTime: 100 }
|
|
520
|
+
);
|
|
521
|
+
});
|
|
522
|
+
|
|
523
|
+
it('builds overlay payloads with streamed text context', () => {
|
|
524
|
+
const result = buildToolOverlayPayload({
|
|
525
|
+
toolName: 'Write',
|
|
526
|
+
toolInput: { file_path: '/tmp/out.txt' },
|
|
527
|
+
streamText: 'partial output',
|
|
528
|
+
lastStatusTime: 100,
|
|
529
|
+
now: 500,
|
|
530
|
+
throttleMs: 100,
|
|
531
|
+
toolEmoji,
|
|
532
|
+
pathModule: require('path'),
|
|
533
|
+
});
|
|
534
|
+
|
|
535
|
+
assert.equal(result.shouldEmit, true);
|
|
536
|
+
assert.equal(result.lastStatusTime, 500);
|
|
537
|
+
assert.match(result.payload, /^__TOOL_OVERLAY__partial output\n\n> ✍️ Write: 「out/);
|
|
538
|
+
});
|
|
539
|
+
|
|
540
|
+
it('formats playwright MCP tools as browser actions', () => {
|
|
541
|
+
const result = buildToolOverlayPayload({
|
|
542
|
+
toolName: 'mcp__playwright__open_page',
|
|
543
|
+
toolInput: {},
|
|
544
|
+
lastStatusTime: 0,
|
|
545
|
+
now: 500,
|
|
546
|
+
throttleMs: 100,
|
|
547
|
+
toolEmoji,
|
|
548
|
+
pathModule: require('path'),
|
|
549
|
+
});
|
|
550
|
+
|
|
551
|
+
assert.equal(result.payload, '🌐 Browser: 「open page」');
|
|
552
|
+
});
|
|
553
|
+
});
|
|
554
|
+
|
|
555
|
+
describe('recordToolUsage', () => {
|
|
556
|
+
it('records tool context and tracks written files once', () => {
|
|
557
|
+
const result = recordToolUsage(
|
|
558
|
+
{ toolUsageLog: [], writtenFiles: ['/tmp/existing.txt'] },
|
|
559
|
+
{
|
|
560
|
+
toolName: 'Write',
|
|
561
|
+
toolInput: { file_path: '/tmp/out.txt' },
|
|
562
|
+
pathModule: require('path'),
|
|
563
|
+
}
|
|
564
|
+
);
|
|
565
|
+
|
|
566
|
+
assert.deepEqual(result.toolUsageLog, [{ tool: 'Write', context: 'out.txt' }]);
|
|
567
|
+
assert.deepEqual(result.writtenFiles, ['/tmp/existing.txt', '/tmp/out.txt']);
|
|
568
|
+
});
|
|
569
|
+
|
|
570
|
+
it('caps tool usage entries but still tracks file writes', () => {
|
|
571
|
+
const result = recordToolUsage(
|
|
572
|
+
{ toolUsageLog: new Array(50).fill({ tool: 'Read' }), writtenFiles: [] },
|
|
573
|
+
{
|
|
574
|
+
toolName: 'Write',
|
|
575
|
+
toolInput: { file_path: '/tmp/out.txt' },
|
|
576
|
+
pathModule: require('path'),
|
|
577
|
+
maxEntries: 50,
|
|
578
|
+
}
|
|
579
|
+
);
|
|
580
|
+
|
|
581
|
+
assert.equal(result.toolUsageLog.length, 50);
|
|
582
|
+
assert.deepEqual(result.writtenFiles, ['/tmp/out.txt']);
|
|
583
|
+
});
|
|
584
|
+
});
|
|
585
|
+
|
|
586
|
+
describe('buildMilestoneOverlayPayload', () => {
|
|
587
|
+
it('builds a plain milestone message without stream text', () => {
|
|
588
|
+
assert.equal(
|
|
589
|
+
buildMilestoneOverlayPayload({
|
|
590
|
+
elapsedMin: 7,
|
|
591
|
+
toolCallCount: 2,
|
|
592
|
+
writtenFiles: ['/tmp/a.txt'],
|
|
593
|
+
toolUsageLog: [{ tool: 'Write', context: 'a.txt' }],
|
|
594
|
+
}),
|
|
595
|
+
'⏳ 已运行 7 分钟 | 调用 2 次工具 | 修改 1 个文件 | 最近: Write a.txt'
|
|
596
|
+
);
|
|
597
|
+
});
|
|
598
|
+
|
|
599
|
+
it('wraps milestone text as an overlay when stream text exists', () => {
|
|
600
|
+
assert.equal(
|
|
601
|
+
buildMilestoneOverlayPayload({
|
|
602
|
+
elapsedMin: 2,
|
|
603
|
+
toolCallCount: 0,
|
|
604
|
+
writtenFiles: [],
|
|
605
|
+
toolUsageLog: [],
|
|
606
|
+
streamText: 'partial output',
|
|
607
|
+
}),
|
|
608
|
+
'__TOOL_OVERLAY__partial output\n\n> ⏳ 已运行 2 分钟'
|
|
609
|
+
);
|
|
610
|
+
});
|
|
611
|
+
});
|
|
612
|
+
|
|
613
|
+
describe('finalizePersistentStreamingTurn', () => {
|
|
614
|
+
it('stops lifecycle, clears active state, stores the warm child, and returns the final result', () => {
|
|
615
|
+
let stopped = false;
|
|
616
|
+
const active = new Map([['chat-1', { child: { pid: 1 } }]]);
|
|
617
|
+
let saveCount = 0;
|
|
618
|
+
const stored = [];
|
|
619
|
+
const timer = setTimeout(() => {}, 1000);
|
|
620
|
+
const child = { killed: false, exitCode: null };
|
|
621
|
+
|
|
622
|
+
const result = finalizePersistentStreamingTurn({
|
|
623
|
+
watchdog: { stop() { stopped = true; } },
|
|
624
|
+
milestoneTimer: timer,
|
|
625
|
+
activeProcesses: active,
|
|
626
|
+
saveActivePids: () => { saveCount += 1; },
|
|
627
|
+
chatId: 'chat-1',
|
|
628
|
+
warmPool: {
|
|
629
|
+
storeWarm(key, proc, meta) { stored.push({ key, proc, meta }); },
|
|
630
|
+
},
|
|
631
|
+
warmSessionKey: 'warm-1',
|
|
632
|
+
child,
|
|
633
|
+
observedSessionId: 'sess-1',
|
|
634
|
+
cwd: '/tmp/project',
|
|
635
|
+
output: 'ok',
|
|
636
|
+
files: ['/tmp/out.txt'],
|
|
637
|
+
toolUsageLog: [{ tool: 'Write', context: 'out.txt' }],
|
|
638
|
+
usage: { input_tokens: 1, output_tokens: 2 },
|
|
639
|
+
});
|
|
640
|
+
|
|
641
|
+
clearTimeout(timer);
|
|
642
|
+
assert.equal(stopped, true);
|
|
643
|
+
assert.equal(active.has('chat-1'), false);
|
|
644
|
+
assert.equal(saveCount, 1);
|
|
645
|
+
assert.deepEqual(stored, [{
|
|
646
|
+
key: 'warm-1',
|
|
647
|
+
proc: child,
|
|
648
|
+
meta: { sessionId: 'sess-1', cwd: '/tmp/project' },
|
|
649
|
+
}]);
|
|
650
|
+
assert.deepEqual(result, {
|
|
651
|
+
output: 'ok',
|
|
652
|
+
error: null,
|
|
653
|
+
files: ['/tmp/out.txt'],
|
|
654
|
+
toolUsageLog: [{ tool: 'Write', context: 'out.txt' }],
|
|
655
|
+
usage: { input_tokens: 1, output_tokens: 2 },
|
|
656
|
+
sessionId: 'sess-1',
|
|
657
|
+
});
|
|
658
|
+
});
|
|
659
|
+
});
|
|
660
|
+
|
|
661
|
+
describe('writeStreamingChildInput', () => {
|
|
662
|
+
it('writes stream-json input through the warm pool in persistent mode', () => {
|
|
663
|
+
const writes = [];
|
|
664
|
+
const child = {
|
|
665
|
+
stdin: {
|
|
666
|
+
write(chunk) { writes.push(chunk); },
|
|
667
|
+
end() { writes.push('END'); },
|
|
668
|
+
},
|
|
669
|
+
};
|
|
670
|
+
const result = writeStreamingChildInput({
|
|
671
|
+
child,
|
|
672
|
+
input: 'hello',
|
|
673
|
+
isPersistent: true,
|
|
674
|
+
warmPool: { buildStreamMessage: (input, sessionId) => `MSG:${sessionId}:${input}` },
|
|
675
|
+
observedSessionId: 'sess-1',
|
|
676
|
+
});
|
|
677
|
+
|
|
678
|
+
assert.deepEqual(writes, ['MSG:sess-1:hello']);
|
|
679
|
+
assert.deepEqual(result, { mode: 'persistent' });
|
|
680
|
+
});
|
|
681
|
+
|
|
682
|
+
it('writes plain stdin and ends in one-shot mode', () => {
|
|
683
|
+
const writes = [];
|
|
684
|
+
const child = {
|
|
685
|
+
stdin: {
|
|
686
|
+
write(chunk) { writes.push(chunk); },
|
|
687
|
+
end() { writes.push('END'); },
|
|
688
|
+
},
|
|
689
|
+
};
|
|
690
|
+
const result = writeStreamingChildInput({
|
|
691
|
+
child,
|
|
692
|
+
input: 'hello',
|
|
693
|
+
isPersistent: false,
|
|
694
|
+
warmPool: null,
|
|
695
|
+
});
|
|
696
|
+
|
|
697
|
+
assert.deepEqual(writes, ['hello', 'END']);
|
|
698
|
+
assert.deepEqual(result, { mode: 'oneshot' });
|
|
699
|
+
});
|
|
700
|
+
});
|
|
701
|
+
|
|
702
|
+
describe('parseStreamingEvents', () => {
|
|
703
|
+
it('returns parser output when parsing succeeds', () => {
|
|
704
|
+
assert.deepEqual(
|
|
705
|
+
parseStreamingEvents((line) => [JSON.parse(line)], '{"type":"text","text":"ok"}'),
|
|
706
|
+
[{ type: 'text', text: 'ok' }]
|
|
707
|
+
);
|
|
708
|
+
});
|
|
709
|
+
|
|
710
|
+
it('returns an empty list when the parser throws', () => {
|
|
711
|
+
assert.deepEqual(
|
|
712
|
+
parseStreamingEvents(() => { throw new Error('bad line'); }, 'bad'),
|
|
713
|
+
[]
|
|
714
|
+
);
|
|
715
|
+
});
|
|
716
|
+
});
|
|
717
|
+
|
|
718
|
+
describe('reduceStreamingWaitState', () => {
|
|
719
|
+
it('enters tool-waiting mode on tool_use', () => {
|
|
720
|
+
assert.deepEqual(
|
|
721
|
+
reduceStreamingWaitState(false, 'tool_use'),
|
|
722
|
+
{ waitingForTool: true, shouldUpdateWatchdog: true, watchdogWaiting: true }
|
|
723
|
+
);
|
|
724
|
+
});
|
|
725
|
+
|
|
726
|
+
it('clears tool-waiting mode on text output', () => {
|
|
727
|
+
assert.deepEqual(
|
|
728
|
+
reduceStreamingWaitState(true, 'text'),
|
|
729
|
+
{ waitingForTool: false, shouldUpdateWatchdog: true, watchdogWaiting: false }
|
|
730
|
+
);
|
|
731
|
+
});
|
|
732
|
+
|
|
733
|
+
it('keeps state unchanged when no wait transition applies', () => {
|
|
734
|
+
assert.deepEqual(
|
|
735
|
+
reduceStreamingWaitState(false, 'session'),
|
|
736
|
+
{ waitingForTool: false, shouldUpdateWatchdog: false, watchdogWaiting: false }
|
|
737
|
+
);
|
|
738
|
+
});
|
|
739
|
+
});
|
|
740
|
+
|
|
741
|
+
describe('applyStreamingTextResult', () => {
|
|
742
|
+
it('appends streamed text chunks with paragraph separators', () => {
|
|
743
|
+
assert.deepEqual(
|
|
744
|
+
applyStreamingTextResult(
|
|
745
|
+
{ finalResult: 'first', streamText: 'first' },
|
|
746
|
+
{ eventType: 'text', text: 'second' }
|
|
747
|
+
),
|
|
748
|
+
{ finalResult: 'first\n\nsecond', streamText: 'first\n\nsecond' }
|
|
749
|
+
);
|
|
750
|
+
});
|
|
751
|
+
|
|
752
|
+
it('uses done.result as a fallback when no text has streamed', () => {
|
|
753
|
+
assert.deepEqual(
|
|
754
|
+
applyStreamingTextResult(
|
|
755
|
+
{ finalResult: '', streamText: '' },
|
|
756
|
+
{ eventType: 'done', doneResult: 'tool-only result' }
|
|
757
|
+
),
|
|
758
|
+
{ finalResult: 'tool-only result', streamText: 'tool-only result' }
|
|
759
|
+
);
|
|
760
|
+
});
|
|
761
|
+
});
|
|
762
|
+
|
|
763
|
+
describe('applyStreamingMetadata', () => {
|
|
764
|
+
it('updates the observed session id on session events', () => {
|
|
765
|
+
assert.deepEqual(
|
|
766
|
+
applyStreamingMetadata(
|
|
767
|
+
{ observedSessionId: '', classifiedError: null },
|
|
768
|
+
{ type: 'session', sessionId: 'sess-1' }
|
|
769
|
+
),
|
|
770
|
+
{ observedSessionId: 'sess-1', classifiedError: null }
|
|
771
|
+
);
|
|
772
|
+
});
|
|
773
|
+
|
|
774
|
+
it('captures classified error events without disturbing the session id', () => {
|
|
775
|
+
const errorEvent = { type: 'error', message: 'boom', code: 'EXEC_FAILURE' };
|
|
776
|
+
assert.deepEqual(
|
|
777
|
+
applyStreamingMetadata(
|
|
778
|
+
{ observedSessionId: 'sess-1', classifiedError: null },
|
|
779
|
+
errorEvent
|
|
780
|
+
),
|
|
781
|
+
{ observedSessionId: 'sess-1', classifiedError: errorEvent }
|
|
782
|
+
);
|
|
783
|
+
});
|
|
784
|
+
});
|
|
785
|
+
|
|
786
|
+
describe('applyStreamingToolState', () => {
|
|
787
|
+
it('updates tool state on tool_use events', () => {
|
|
788
|
+
assert.deepEqual(
|
|
789
|
+
applyStreamingToolState(
|
|
790
|
+
{ waitingForTool: false, toolCallCount: 0, toolUsageLog: [], writtenFiles: [] },
|
|
791
|
+
{ type: 'tool_use', toolName: 'Write', toolInput: { file_path: '/tmp/out.txt' } },
|
|
792
|
+
{ pathModule: path, maxEntries: 50 }
|
|
793
|
+
),
|
|
794
|
+
{
|
|
795
|
+
toolCallCount: 1,
|
|
796
|
+
waitingForTool: true,
|
|
797
|
+
shouldUpdateWatchdog: true,
|
|
798
|
+
watchdogWaiting: true,
|
|
799
|
+
toolUsageLog: [{ tool: 'Write', context: 'out.txt' }],
|
|
800
|
+
writtenFiles: ['/tmp/out.txt'],
|
|
801
|
+
toolName: 'Write',
|
|
802
|
+
toolInput: { file_path: '/tmp/out.txt' },
|
|
803
|
+
}
|
|
804
|
+
);
|
|
805
|
+
});
|
|
806
|
+
|
|
807
|
+
it('only clears wait state on tool_result events', () => {
|
|
808
|
+
assert.deepEqual(
|
|
809
|
+
applyStreamingToolState(
|
|
810
|
+
{
|
|
811
|
+
waitingForTool: true,
|
|
812
|
+
toolCallCount: 2,
|
|
813
|
+
toolUsageLog: [{ tool: 'Write', context: 'out.txt' }],
|
|
814
|
+
writtenFiles: ['/tmp/out.txt'],
|
|
815
|
+
},
|
|
816
|
+
{ type: 'tool_result' },
|
|
817
|
+
{ pathModule: path, maxEntries: 50 }
|
|
818
|
+
),
|
|
819
|
+
{
|
|
820
|
+
toolCallCount: 2,
|
|
821
|
+
waitingForTool: false,
|
|
822
|
+
shouldUpdateWatchdog: true,
|
|
823
|
+
watchdogWaiting: false,
|
|
824
|
+
toolUsageLog: [{ tool: 'Write', context: 'out.txt' }],
|
|
825
|
+
writtenFiles: ['/tmp/out.txt'],
|
|
826
|
+
toolName: 'Tool',
|
|
827
|
+
toolInput: {},
|
|
828
|
+
}
|
|
829
|
+
);
|
|
830
|
+
});
|
|
831
|
+
});
|
|
832
|
+
|
|
833
|
+
describe('applyStreamingContentState', () => {
|
|
834
|
+
it('updates text content state and clears tool wait on text events', () => {
|
|
835
|
+
assert.deepEqual(
|
|
836
|
+
applyStreamingContentState(
|
|
837
|
+
{ finalResult: 'first', streamText: 'first', waitingForTool: true, finalUsage: null },
|
|
838
|
+
{ type: 'text', text: 'second' }
|
|
839
|
+
),
|
|
840
|
+
{
|
|
841
|
+
finalResult: 'first\n\nsecond',
|
|
842
|
+
streamText: 'first\n\nsecond',
|
|
843
|
+
waitingForTool: false,
|
|
844
|
+
shouldUpdateWatchdog: true,
|
|
845
|
+
watchdogWaiting: false,
|
|
846
|
+
finalUsage: null,
|
|
847
|
+
shouldFlush: true,
|
|
848
|
+
flushForce: false,
|
|
849
|
+
}
|
|
850
|
+
);
|
|
851
|
+
});
|
|
852
|
+
|
|
853
|
+
it('captures final usage and forces a flush on done events', () => {
|
|
854
|
+
assert.deepEqual(
|
|
855
|
+
applyStreamingContentState(
|
|
856
|
+
{ finalResult: '', streamText: '', waitingForTool: true, finalUsage: null },
|
|
857
|
+
{ type: 'done', result: 'tool-only result', usage: { input_tokens: 1, output_tokens: 2 } }
|
|
858
|
+
),
|
|
859
|
+
{
|
|
860
|
+
finalResult: 'tool-only result',
|
|
861
|
+
streamText: 'tool-only result',
|
|
862
|
+
waitingForTool: false,
|
|
863
|
+
shouldUpdateWatchdog: true,
|
|
864
|
+
watchdogWaiting: false,
|
|
865
|
+
finalUsage: { input_tokens: 1, output_tokens: 2 },
|
|
866
|
+
shouldFlush: true,
|
|
867
|
+
flushForce: true,
|
|
868
|
+
}
|
|
869
|
+
);
|
|
870
|
+
});
|
|
871
|
+
});
|
|
872
|
+
|
|
873
|
+
describe('createStreamingWatchdog', () => {
|
|
874
|
+
it('kills for idle timeout and records the reason', async () => {
|
|
875
|
+
const killSignals = [];
|
|
876
|
+
const child = { pid: 123, kill(signal) { killSignals.push(signal); } };
|
|
877
|
+
const watchdog = createStreamingWatchdog({
|
|
878
|
+
child,
|
|
879
|
+
idleTimeoutMs: 10,
|
|
880
|
+
toolTimeoutMs: 30,
|
|
881
|
+
useProcessGroup: false,
|
|
882
|
+
});
|
|
883
|
+
|
|
884
|
+
await new Promise((resolve) => setTimeout(resolve, 30));
|
|
885
|
+
watchdog.stop();
|
|
886
|
+
|
|
887
|
+
assert.equal(watchdog.isKilled(), true);
|
|
888
|
+
assert.equal(watchdog.getKilledReason(), 'idle');
|
|
889
|
+
assert.deepEqual(killSignals, ['SIGTERM']);
|
|
890
|
+
});
|
|
891
|
+
|
|
892
|
+
it('switches to tool timeout window when waiting for a tool', async () => {
|
|
893
|
+
const killSignals = [];
|
|
894
|
+
const child = { pid: 456, kill(signal) { killSignals.push(signal); } };
|
|
895
|
+
const watchdog = createStreamingWatchdog({
|
|
896
|
+
child,
|
|
897
|
+
idleTimeoutMs: 10,
|
|
898
|
+
toolTimeoutMs: 40,
|
|
899
|
+
useProcessGroup: false,
|
|
900
|
+
});
|
|
901
|
+
|
|
902
|
+
watchdog.setWaitingForTool(true);
|
|
903
|
+
await new Promise((resolve) => setTimeout(resolve, 20));
|
|
904
|
+
assert.equal(watchdog.isKilled(), false);
|
|
905
|
+
await new Promise((resolve) => setTimeout(resolve, 40));
|
|
906
|
+
watchdog.stop();
|
|
907
|
+
|
|
908
|
+
assert.equal(watchdog.isKilled(), true);
|
|
909
|
+
assert.equal(watchdog.getKilledReason(), 'tool');
|
|
910
|
+
assert.deepEqual(killSignals, ['SIGTERM']);
|
|
911
|
+
});
|
|
912
|
+
|
|
913
|
+
it('aborts immediately with a custom reason', () => {
|
|
914
|
+
const killSignals = [];
|
|
915
|
+
const reasons = [];
|
|
916
|
+
const child = { pid: 789, kill(signal) { killSignals.push(signal); } };
|
|
917
|
+
const watchdog = createStreamingWatchdog({
|
|
918
|
+
child,
|
|
919
|
+
idleTimeoutMs: 1000,
|
|
920
|
+
toolTimeoutMs: 1000,
|
|
921
|
+
useProcessGroup: false,
|
|
922
|
+
onKill(reason) { reasons.push(reason); },
|
|
923
|
+
});
|
|
924
|
+
|
|
925
|
+
watchdog.abort('stdin');
|
|
926
|
+
watchdog.stop();
|
|
927
|
+
|
|
928
|
+
assert.equal(watchdog.isKilled(), true);
|
|
929
|
+
assert.equal(watchdog.getKilledReason(), 'stdin');
|
|
930
|
+
assert.deepEqual(reasons, ['stdin']);
|
|
931
|
+
assert.deepEqual(killSignals, ['SIGTERM']);
|
|
932
|
+
});
|
|
933
|
+
|
|
934
|
+
it('kills for ceiling timeout and records the reason', async () => {
|
|
935
|
+
const killSignals = [];
|
|
936
|
+
const child = { pid: 654, kill(signal) { killSignals.push(signal); } };
|
|
937
|
+
const watchdog = createStreamingWatchdog({
|
|
938
|
+
child,
|
|
939
|
+
idleTimeoutMs: 1000,
|
|
940
|
+
toolTimeoutMs: 1000,
|
|
941
|
+
ceilingTimeoutMs: 10,
|
|
942
|
+
useProcessGroup: false,
|
|
943
|
+
});
|
|
944
|
+
|
|
945
|
+
await new Promise((resolve) => setTimeout(resolve, 30));
|
|
946
|
+
watchdog.stop();
|
|
947
|
+
|
|
948
|
+
assert.equal(watchdog.isKilled(), true);
|
|
949
|
+
assert.equal(watchdog.getKilledReason(), 'ceiling');
|
|
950
|
+
assert.deepEqual(killSignals, ['SIGTERM']);
|
|
951
|
+
});
|
|
952
|
+
});
|
|
953
|
+
|
|
954
|
+
describe('runAsyncCommand', () => {
|
|
955
|
+
it('collects stdout and resolves successful output', async () => {
|
|
956
|
+
let closeHandler = null;
|
|
957
|
+
const fakeChild = {
|
|
958
|
+
stdout: { on(_event, handler) { handler(Buffer.from('hello\n')); } },
|
|
959
|
+
stderr: { on() {} },
|
|
960
|
+
stdin: { write() {}, end() {} },
|
|
961
|
+
on(event, handler) {
|
|
962
|
+
if (event === 'close') closeHandler = handler;
|
|
963
|
+
},
|
|
964
|
+
};
|
|
965
|
+
const promise = runAsyncCommand({
|
|
966
|
+
spawn() { return fakeChild; },
|
|
967
|
+
cmd: 'claude',
|
|
968
|
+
args: ['-p'],
|
|
969
|
+
cwd: '/tmp',
|
|
970
|
+
env: {},
|
|
971
|
+
});
|
|
972
|
+
closeHandler(0);
|
|
973
|
+
const result = await promise;
|
|
974
|
+
assert.deepEqual(result, { output: 'hello', error: null });
|
|
975
|
+
});
|
|
976
|
+
|
|
977
|
+
it('uses the provided spawn error formatter', async () => {
|
|
978
|
+
let errorHandler = null;
|
|
979
|
+
const fakeChild = {
|
|
980
|
+
stdout: { on() {} },
|
|
981
|
+
stderr: { on() {} },
|
|
982
|
+
stdin: { write() {}, end() {} },
|
|
983
|
+
on(event, handler) {
|
|
984
|
+
if (event === 'error') errorHandler = handler;
|
|
985
|
+
},
|
|
986
|
+
};
|
|
987
|
+
const promise = runAsyncCommand({
|
|
988
|
+
spawn() { return fakeChild; },
|
|
989
|
+
cmd: 'claude',
|
|
990
|
+
args: ['-p'],
|
|
991
|
+
cwd: '/tmp',
|
|
992
|
+
env: {},
|
|
993
|
+
formatSpawnError() { return 'formatted'; },
|
|
994
|
+
});
|
|
995
|
+
errorHandler(new Error('boom'));
|
|
996
|
+
const result = await promise;
|
|
997
|
+
assert.deepEqual(result, { output: null, error: 'formatted' });
|
|
998
|
+
});
|
|
999
|
+
|
|
1000
|
+
it('handles stdin write exceptions without hanging', async () => {
|
|
1001
|
+
let closeHandler = null;
|
|
1002
|
+
const killCalls = [];
|
|
1003
|
+
const fakeChild = {
|
|
1004
|
+
pid: 777,
|
|
1005
|
+
stdout: { on() {} },
|
|
1006
|
+
stderr: { on() {} },
|
|
1007
|
+
stdin: {
|
|
1008
|
+
on() {},
|
|
1009
|
+
write() { throw new Error('EPIPE'); },
|
|
1010
|
+
end() {},
|
|
1011
|
+
},
|
|
1012
|
+
kill(signal) { killCalls.push(signal); },
|
|
1013
|
+
on(event, handler) {
|
|
1014
|
+
if (event === 'close') closeHandler = handler;
|
|
1015
|
+
},
|
|
1016
|
+
};
|
|
1017
|
+
const promise = runAsyncCommand({
|
|
1018
|
+
spawn() { return fakeChild; },
|
|
1019
|
+
cmd: 'claude',
|
|
1020
|
+
args: ['-p'],
|
|
1021
|
+
cwd: '/tmp',
|
|
1022
|
+
env: {},
|
|
1023
|
+
formatSpawnError(err) { return err.message; },
|
|
1024
|
+
});
|
|
1025
|
+
let settled = false;
|
|
1026
|
+
promise.then(() => { settled = true; });
|
|
1027
|
+
await Promise.resolve();
|
|
1028
|
+
assert.equal(settled, false);
|
|
1029
|
+
closeHandler(1);
|
|
1030
|
+
const result = await promise;
|
|
1031
|
+
assert.deepEqual(result, { output: null, error: 'EPIPE' });
|
|
1032
|
+
assert.deepEqual(killCalls, ['SIGTERM']);
|
|
1033
|
+
assert.equal(typeof closeHandler, 'function');
|
|
1034
|
+
});
|
|
1035
|
+
|
|
1036
|
+
it('handles stdin error events without hanging', async () => {
|
|
1037
|
+
let stdinErrorHandler = null;
|
|
1038
|
+
let closeHandler = null;
|
|
1039
|
+
const killCalls = [];
|
|
1040
|
+
const fakeChild = {
|
|
1041
|
+
pid: 888,
|
|
1042
|
+
stdout: { on() {} },
|
|
1043
|
+
stderr: { on() {} },
|
|
1044
|
+
stdin: {
|
|
1045
|
+
on(event, handler) {
|
|
1046
|
+
if (event === 'error') stdinErrorHandler = handler;
|
|
1047
|
+
},
|
|
1048
|
+
write() {},
|
|
1049
|
+
end() {},
|
|
1050
|
+
},
|
|
1051
|
+
kill(signal) { killCalls.push(signal); },
|
|
1052
|
+
on(event, handler) {
|
|
1053
|
+
if (event === 'close') closeHandler = handler;
|
|
1054
|
+
},
|
|
1055
|
+
};
|
|
1056
|
+
const promise = runAsyncCommand({
|
|
1057
|
+
spawn() { return fakeChild; },
|
|
1058
|
+
cmd: 'claude',
|
|
1059
|
+
args: ['-p'],
|
|
1060
|
+
cwd: '/tmp',
|
|
1061
|
+
env: {},
|
|
1062
|
+
formatSpawnError(err) { return err.message; },
|
|
1063
|
+
});
|
|
1064
|
+
stdinErrorHandler(new Error('ERR_STREAM_DESTROYED'));
|
|
1065
|
+
let settled = false;
|
|
1066
|
+
promise.then(() => { settled = true; });
|
|
1067
|
+
await Promise.resolve();
|
|
1068
|
+
assert.equal(settled, false);
|
|
1069
|
+
closeHandler(1);
|
|
1070
|
+
const result = await promise;
|
|
1071
|
+
assert.deepEqual(result, { output: null, error: 'ERR_STREAM_DESTROYED' });
|
|
1072
|
+
assert.deepEqual(killCalls, ['SIGTERM']);
|
|
1073
|
+
});
|
|
1074
|
+
});
|