@occasiolabs/occasio 0.8.4 → 0.8.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/README.md +4 -3
  2. package/docs/ADAPTER-STAGE-2-MIGRATION.md +59 -0
  3. package/docs/STAGE-2-STEP-5-SHELL-PLAN.md +107 -0
  4. package/docs/THREAT-MODEL.md +195 -0
  5. package/docs/edr-calibration.md +29 -0
  6. package/package.json +8 -3
  7. package/src/adapters/claude-code.js +1 -2
  8. package/src/adapters/computer-use.js +1 -1
  9. package/src/anomaly/cli.js +4 -1
  10. package/src/anomaly/detectors/deny-rate.js +2 -1
  11. package/src/anomaly/detectors/file-read-volume.js +2 -1
  12. package/src/anomaly/index.js +5 -0
  13. package/src/boundary.js +1 -1
  14. package/src/classifier.js +1 -1
  15. package/src/cli/clear.js +4 -4
  16. package/src/cli/conversation.js +121 -0
  17. package/src/cli/help.js +62 -38
  18. package/src/cli/recap.js +367 -0
  19. package/src/cli/status.js +1 -1
  20. package/src/dashboard.js +2 -3
  21. package/src/demo/audit-demo.js +330 -0
  22. package/src/distiller.js +1 -1
  23. package/src/executor/dispatcher.js +2 -2
  24. package/src/executor/native-handlers/glob.js +173 -0
  25. package/src/executor/native-handlers/grep.js +258 -0
  26. package/src/executor/native-handlers/read.js +99 -0
  27. package/src/executor/native-handlers/todo.js +56 -0
  28. package/src/harness.js +8 -10
  29. package/src/index.js +118 -30
  30. package/src/inspect.js +1 -1
  31. package/src/interceptor.js +9 -29
  32. package/src/ledger.js +2 -3
  33. package/src/mcp-experiment.js +4 -4
  34. package/src/mcp-server.js +3 -3
  35. package/src/policy/doctor.js +2 -2
  36. package/src/policy/engine.js +0 -1
  37. package/src/policy/init.js +1 -1
  38. package/src/policy/loader.js +3 -3
  39. package/src/policy/show.js +1 -2
  40. package/src/preflight/cli.js +0 -1
  41. package/src/preflight/miner.js +3 -6
  42. package/src/redteam.js +1 -2
  43. package/src/replay.js +1 -1
  44. package/src/report/index.js +0 -4
  45. package/src/runtime.js +42 -444
  46. package/src/selftest.js +1 -1
  47. package/src/session.js +1 -1
@@ -0,0 +1,258 @@
1
+ 'use strict';
2
+
3
+ /**
4
+ * Native handler for the Grep tool.
5
+ *
6
+ * Pure filesystem function: takes a regex pattern (+ optional path, glob, type,
7
+ * output_mode, context flags) and returns matches in one of three formats
8
+ * (files_with_matches | content | count). No dependency on the interceptor
9
+ * pipeline, Anthropic API, or shell execution.
10
+ *
11
+ * Extracted from src/runtime.js as Stage-2 Step 4 of the executor migration
12
+ * (see docs/ADAPTER-STAGE-2-MIGRATION.md). src/runtime.js re-exports these so
13
+ * existing consumers keep working unchanged.
14
+ *
15
+ * Imports `globToRegex` and `GLOB_SKIP` from the Glob handler — the Grep file
16
+ * filter shares the glob grammar, and both walks skip the same vendor dirs.
17
+ */
18
+
19
+ const fs = require('fs');
20
+ const path = require('path');
21
+
22
+ const { MAX_OUTPUT } = require('./read');
23
+ const { globToRegex, GLOB_SKIP, GLOB_MAX_DEPTH, GLOB_MAX_MS } = require('./glob');
24
+
25
+ // ── Grep tool support ──────────────────────────────────────────────────────────
26
+
27
+ const GREP_MAX_RESULTS = 250; // default output cap — matches Claude Code head_limit default
28
+ const GREP_FILE_CAP = 10_000; // safety limit on files walked before stopping
29
+
30
+ // File-type → extension mapping, matching ripgrep's --type names.
31
+ const GREP_TYPE_EXTS = new Map([
32
+ ['js', ['.js', '.mjs', '.cjs']],
33
+ ['ts', ['.ts', '.tsx', '.mts', '.cts']],
34
+ ['py', ['.py', '.pyi']],
35
+ ['rust', ['.rs']],
36
+ ['go', ['.go']],
37
+ ['java', ['.java']],
38
+ ['rb', ['.rb']],
39
+ ['css', ['.css', '.scss', '.sass', '.less']],
40
+ ['html', ['.html', '.htm']],
41
+ ['json', ['.json', '.jsonc']],
42
+ ['md', ['.md', '.mdx']],
43
+ ['yaml', ['.yaml', '.yml']],
44
+ ['sh', ['.sh', '.bash', '.zsh']],
45
+ ['c', ['.c', '.h']],
46
+ ['cpp', ['.cpp', '.cc', '.cxx', '.hpp', '.hh']],
47
+ ]);
48
+
49
+ const VALID_GREP_MODES = new Set(['content', 'files_with_matches', 'count']);
50
+
51
+ function isGrepHandleable(input) {
52
+ if (!input || typeof input !== 'object') return false;
53
+ const pattern = input.pattern;
54
+ if (!pattern || typeof pattern !== 'string' || !pattern.trim()) return false;
55
+ // Optional fields must be the right type when present.
56
+ if (input.path != null && typeof input.path !== 'string') return false;
57
+ if (input.glob != null && typeof input.glob !== 'string') return false;
58
+ if (input.type != null && typeof input.type !== 'string') return false;
59
+ if (input.output_mode != null && !VALID_GREP_MODES.has(input.output_mode)) return false;
60
+ // Cross-line matching (rg -U) requires full-file regex — not supported natively.
61
+ if (input.multiline === true) return false;
62
+ return true;
63
+ }
64
+
65
+ // Read a file for grep: returns null for binary files or on read error.
66
+ function tryReadGrep(absPath) {
67
+ try {
68
+ const buf = fs.readFileSync(absPath);
69
+ if (buf.slice(0, 512).includes(0)) return null; // binary file — skip
70
+ return (buf.length > MAX_OUTPUT ? buf.slice(0, MAX_OUTPUT) : buf).toString('utf8');
71
+ } catch { return null; }
72
+ }
73
+
74
+ // Walk directory collecting absolute file paths, honouring glob and type filters.
75
+ function walkGrepFiles(dir, baseDir, globRegex, globHasDir, typeExts, results, depth = 0, deadline = Infinity) {
76
+ if (results.length >= GREP_FILE_CAP) return;
77
+ if (depth >= GLOB_MAX_DEPTH) return;
78
+ if (Date.now() >= deadline) return;
79
+ let entries;
80
+ try { entries = fs.readdirSync(dir, { withFileTypes: true }); }
81
+ catch { return; }
82
+ for (const entry of entries) {
83
+ if (results.length >= GREP_FILE_CAP) break;
84
+ if (Date.now() >= deadline) break;
85
+ if (GLOB_SKIP.has(entry.name)) continue;
86
+ const abs = path.join(dir, entry.name);
87
+ if (entry.isDirectory()) {
88
+ walkGrepFiles(abs, baseDir, globRegex, globHasDir, typeExts, results, depth + 1, deadline);
89
+ } else {
90
+ if (typeExts && !typeExts.includes(path.extname(abs).toLowerCase())) continue;
91
+ if (globRegex) {
92
+ // Glob patterns with path separators match against the relative path;
93
+ // plain filename globs (e.g. "*.ts") match against the basename only.
94
+ const testStr = globHasDir
95
+ ? path.relative(baseDir, abs).replace(/\\/g, '/')
96
+ : path.basename(abs);
97
+ if (!globRegex.test(testStr)) continue;
98
+ }
99
+ results.push(abs);
100
+ }
101
+ }
102
+ }
103
+
104
+ /**
105
+ * Execute a structured Grep tool call locally.
106
+ *
107
+ * Supports: pattern, path, glob, type, output_mode (files_with_matches | content | count),
108
+ * -i (case-insensitive), -C / context / -A / -B (context lines), head_limit, offset.
109
+ *
110
+ * Does NOT support multiline (cross-line regex) — isGrepHandleable rejects those.
111
+ */
112
+ function handleGrepTool(input) {
113
+ const pattern = (typeof input?.pattern === 'string' ? input.pattern : '').trim();
114
+ if (!pattern) return { output: '(no pattern provided)', exitCode: 1, matchCount: 0 };
115
+
116
+ const searchRoot = input?.path
117
+ ? path.resolve(process.cwd(), input.path)
118
+ : process.cwd();
119
+
120
+ const outputMode = input?.output_mode || 'files_with_matches';
121
+ const caseInsens = input?.['-i'] === true;
122
+ const contextN = typeof input?.['-C'] === 'number' ? input['-C'] :
123
+ typeof input?.context === 'number' ? input.context : 0;
124
+ const linesBefore = typeof input?.['-B'] === 'number' ? input['-B'] : contextN;
125
+ const linesAfter = typeof input?.['-A'] === 'number' ? input['-A'] : contextN;
126
+ const headLimit = typeof input?.head_limit === 'number' && input.head_limit > 0
127
+ ? Math.min(input.head_limit, GREP_MAX_RESULTS)
128
+ : GREP_MAX_RESULTS;
129
+ const skipLines = typeof input?.offset === 'number' && input.offset > 0 ? input.offset : 0;
130
+
131
+ let regex;
132
+ try {
133
+ regex = new RegExp(pattern, 'g' + (caseInsens ? 'i' : ''));
134
+ } catch (e) {
135
+ return { output: `Grep: invalid pattern: ${e.message}`, exitCode: 1, matchCount: 0 };
136
+ }
137
+
138
+ // Build type extension filter.
139
+ let typeExts = null;
140
+ if (input?.type) {
141
+ const t = input.type.toLowerCase();
142
+ typeExts = GREP_TYPE_EXTS.get(t) || [t.startsWith('.') ? t : `.${t}`];
143
+ }
144
+
145
+ // Build glob file filter.
146
+ let globRegex = null;
147
+ let globHasDir = false;
148
+ if (input?.glob) {
149
+ try {
150
+ globRegex = globToRegex(input.glob);
151
+ globHasDir = input.glob.includes('/') || input.glob.includes('**');
152
+ } catch { /* ignore invalid glob — no filter applied */ }
153
+ }
154
+
155
+ // Collect candidate files.
156
+ let files = [];
157
+ const deadline = Date.now() + GLOB_MAX_MS;
158
+ try {
159
+ const stat = fs.statSync(searchRoot);
160
+ if (stat.isFile()) {
161
+ files.push(searchRoot);
162
+ } else {
163
+ walkGrepFiles(searchRoot, searchRoot, globRegex, globHasDir, typeExts, files, 0, deadline);
164
+ files.sort();
165
+ }
166
+ } catch (e) {
167
+ return { output: `Grep: cannot access path: ${e.message}`, exitCode: 1, matchCount: 0 };
168
+ }
169
+
170
+ const outputLines = [];
171
+ let totalMatches = 0;
172
+ let truncated = false;
173
+ // wantMore also enforces the per-call wall-clock budget so the
174
+ // file-read+match loop can't blow past it even if walkGrepFiles already
175
+ // collected thousands of paths before the walk-deadline tripped.
176
+ const wantMore = () => outputLines.length < skipLines + headLimit
177
+ && Date.now() < deadline;
178
+ const relOf = abs => path.relative(searchRoot, abs).replace(/\\/g, '/') || path.basename(abs);
179
+
180
+ if (outputMode === 'files_with_matches') {
181
+ for (const absFile of files) {
182
+ if (!wantMore()) { truncated = true; break; }
183
+ const content = tryReadGrep(absFile);
184
+ if (!content) continue;
185
+ regex.lastIndex = 0;
186
+ if (regex.test(content)) { totalMatches++; outputLines.push(relOf(absFile)); }
187
+ }
188
+
189
+ } else if (outputMode === 'count') {
190
+ for (const absFile of files) {
191
+ if (!wantMore()) { truncated = true; break; }
192
+ const content = tryReadGrep(absFile);
193
+ if (!content) continue;
194
+ let count = 0;
195
+ for (const line of content.split('\n')) { regex.lastIndex = 0; if (regex.test(line)) count++; }
196
+ if (count > 0) { totalMatches += count; outputLines.push(`${relOf(absFile)}:${count}`); }
197
+ }
198
+
199
+ } else { // content
200
+ for (const absFile of files) {
201
+ if (!wantMore()) { truncated = true; break; }
202
+ const content = tryReadGrep(absFile);
203
+ if (!content) continue;
204
+ const fileLabel = relOf(absFile);
205
+ const fileLines = content.split('\n');
206
+ const matchSet = new Set();
207
+ for (let i = 0; i < fileLines.length; i++) {
208
+ regex.lastIndex = 0;
209
+ if (regex.test(fileLines[i])) matchSet.add(i);
210
+ }
211
+ if (!matchSet.size) continue;
212
+ totalMatches += matchSet.size;
213
+
214
+ // Merge context windows into non-overlapping groups.
215
+ const sorted = [...matchSet].sort((a, b) => a - b);
216
+ const groups = [];
217
+ let gs = -1, ge = -1;
218
+ for (const idx of sorted) {
219
+ const s = Math.max(0, idx - linesBefore);
220
+ const e = Math.min(fileLines.length - 1, idx + linesAfter);
221
+ if (gs === -1) { gs = s; ge = e; }
222
+ else if (s <= ge + 1) { ge = Math.max(ge, e); }
223
+ else { groups.push([gs, ge]); gs = s; ge = e; }
224
+ }
225
+ if (gs !== -1) groups.push([gs, ge]);
226
+
227
+ let firstGroup = true;
228
+ for (const [gStart, gEnd] of groups) {
229
+ if (!wantMore()) { truncated = true; break; }
230
+ if (!firstGroup) outputLines.push('--');
231
+ firstGroup = false;
232
+ for (let i = gStart; i <= gEnd && wantMore(); i++) {
233
+ const sep = matchSet.has(i) ? ':' : '-';
234
+ outputLines.push(`${fileLabel}${sep}${i + 1}${sep}${fileLines[i]}`);
235
+ }
236
+ }
237
+ }
238
+ }
239
+
240
+ const sliced = outputLines.slice(skipLines, skipLines + headLimit);
241
+ const text = sliced.join('\n') || '(no matches)';
242
+ const timedOut = Date.now() >= deadline;
243
+ const suffix = truncated ? '\n(truncated — use head_limit/offset to paginate)'
244
+ : timedOut ? `\n(truncated — walk exceeded ${GLOB_MAX_MS} ms)`
245
+ : '';
246
+ return { output: text + suffix, exitCode: 0, matchCount: totalMatches };
247
+ }
248
+
249
+ module.exports = {
250
+ GREP_MAX_RESULTS,
251
+ GREP_FILE_CAP,
252
+ GREP_TYPE_EXTS,
253
+ VALID_GREP_MODES,
254
+ isGrepHandleable,
255
+ tryReadGrep,
256
+ walkGrepFiles,
257
+ handleGrepTool,
258
+ };
@@ -0,0 +1,99 @@
1
+ 'use strict';
2
+
3
+ /**
4
+ * Native handler for the Read tool.
5
+ *
6
+ * Pure filesystem function: takes a file_path (+ optional offset/limit) and
7
+ * returns cat -n formatted output. No dependency on the interceptor pipeline,
8
+ * Anthropic API, or shell execution. Safe to import in any process context.
9
+ *
10
+ * Extracted from src/runtime.js as Stage-2 of the executor migration
11
+ * (see docs/ADAPTER-STAGE-2-MIGRATION.md). src/runtime.js re-exports
12
+ * these so existing consumers (src/interceptor.js, tests) keep working
13
+ * unchanged.
14
+ */
15
+
16
+ const fs = require('fs');
17
+ const path = require('path');
18
+
19
+ // ── Shared constants ───────────────────────────────────────────────────────────
20
+
21
+ const MAX_OUTPUT = 512 * 1024; // 512 KB — same cap as exec maxBuffer
22
+
23
+ // File extensions the native Read handler cannot serve correctly.
24
+ // PDFs and images need structured rendering (base64, page extraction) that we
25
+ // cannot replicate; Jupyter notebooks need cell-by-cell parsing. All others
26
+ // are treated as UTF-8 text and handled natively.
27
+ const READ_SKIP_EXTENSIONS = new Set([
28
+ '.pdf', '.ipynb',
29
+ '.png', '.jpg', '.jpeg', '.gif', '.webp', '.bmp', '.ico',
30
+ '.zip', '.gz', '.tar', '.bz2', '.xz', '.7z', '.rar',
31
+ '.exe', '.dll', '.so', '.dylib',
32
+ ]);
33
+
34
+ // ── Shared helper ──────────────────────────────────────────────────────────────
35
+
36
+ function readFileNative(absPath) {
37
+ const buf = fs.readFileSync(absPath);
38
+ if (buf.length > MAX_OUTPUT) {
39
+ return buf.slice(0, MAX_OUTPUT).toString('utf8') + '\n[truncated — file too large]';
40
+ }
41
+ return buf.toString('utf8');
42
+ }
43
+
44
+ // ── Read tool support ──────────────────────────────────────────────────────────
45
+
46
+ /**
47
+ * Returns true when this Read input can be served natively.
48
+ * Falls back for PDFs/images (need structured rendering), Jupyter notebooks,
49
+ * malformed input, or the `pages` parameter (implies PDF range extraction).
50
+ */
51
+ // UNC / network paths cause blocking SMB resolution on Windows (10+ s).
52
+ // Reject so the agent cannot stall the proxy via `\\server\share\file` or
53
+ // the // equivalent. Local filesystem only — a deliberate restriction.
54
+ const UNC_PREFIX_RE = /^[/\\]{2}/;
55
+
56
+ function isReadHandleable(input) {
57
+ if (!input || typeof input !== 'object') return false;
58
+ const fp = input.file_path;
59
+ if (!fp || typeof fp !== 'string' || !fp.trim()) return false;
60
+ if (UNC_PREFIX_RE.test(fp)) return false;
61
+ if (input.pages != null) return false;
62
+ const ext = path.extname(fp).toLowerCase();
63
+ return !READ_SKIP_EXTENSIONS.has(ext);
64
+ }
65
+
66
+ /**
67
+ * Read a file natively and return content formatted like `cat -n` (1-based line
68
+ * numbers), honouring the optional offset (0-based line index) and limit fields
69
+ * that the Claude Code Read tool sends for partial reads.
70
+ */
71
+ function handleReadTool(input) {
72
+ const fp = (typeof input?.file_path === 'string' ? input.file_path : '').trim();
73
+ if (!fp) return { output: '(no file_path provided)', exitCode: 1 };
74
+
75
+ const abs = path.resolve(process.cwd(), fp);
76
+ try {
77
+ const content = readFileNative(abs); // already caps at MAX_OUTPUT
78
+ const lines = content.split('\n');
79
+ const offset = (typeof input.offset === 'number' && input.offset >= 0) ? input.offset : 0;
80
+ const limit = (typeof input.limit === 'number' && input.limit > 0) ? input.limit : lines.length;
81
+ const slice = lines.slice(offset, offset + limit);
82
+ // Line numbers reflect position in the file (not the slice), matching cat -n.
83
+ const formatted = slice.map((l, i) => `${String(offset + i + 1).padStart(6)}\t${l}`).join('\n');
84
+ return { output: formatted, exitCode: 0 };
85
+ } catch (e) {
86
+ const msg = e.code === 'ENOENT'
87
+ ? `${fp}: No such file or directory`
88
+ : `${fp}: ${e.message}`;
89
+ return { output: `Read: ${msg}`, exitCode: 1 };
90
+ }
91
+ }
92
+
93
+ module.exports = {
94
+ MAX_OUTPUT,
95
+ READ_SKIP_EXTENSIONS,
96
+ readFileNative,
97
+ isReadHandleable,
98
+ handleReadTool,
99
+ };
@@ -0,0 +1,56 @@
1
+ 'use strict';
2
+
3
+ /**
4
+ * Native handlers for the TodoWrite / TodoRead tools.
5
+ *
6
+ * Pure functions over a caller-owned mutable `todoStore` array. No I/O,
7
+ * no globals — the session owns the store; this module only mutates it.
8
+ *
9
+ * Extracted from src/runtime.js as Stage-2 of the executor migration
10
+ * (see docs/ADAPTER-STAGE-2-MIGRATION.md). src/runtime.js re-exports
11
+ * these so existing consumers (src/interceptor.js, tests) keep working
12
+ * unchanged.
13
+ */
14
+
15
+ /**
16
+ * Returns true when this TodoWrite/TodoRead call can be served natively.
17
+ * TodoRead: always handleable — no required inputs.
18
+ * TodoWrite: requires input.todos to be an array.
19
+ */
20
+ function isTodoHandleable(input, toolName) {
21
+ if (toolName === 'TodoRead') return true;
22
+ if (toolName === 'TodoWrite') {
23
+ if (!input || typeof input !== 'object') return false;
24
+ return Array.isArray(input.todos);
25
+ }
26
+ return false;
27
+ }
28
+
29
+ /**
30
+ * Handle a TodoWrite call: replace the session todo list with input.todos.
31
+ * Returns { output: '', exitCode: 0, taskCount: N } on success.
32
+ * Claude Code expects an empty-string response from write tools.
33
+ */
34
+ function handleTodoWriteTool(input, todoStore) {
35
+ const todos = input?.todos;
36
+ if (!Array.isArray(todos)) {
37
+ return { output: 'TodoWrite: todos must be an array', exitCode: 1, taskCount: 0 };
38
+ }
39
+ todoStore.splice(0, todoStore.length, ...todos);
40
+ return { output: '', exitCode: 0, taskCount: todos.length };
41
+ }
42
+
43
+ /**
44
+ * Handle a TodoRead call: return the session todo list as a JSON string.
45
+ * Returns { output: string, exitCode: 0, taskCount: N }.
46
+ */
47
+ function handleTodoReadTool(todoStore) {
48
+ const output = JSON.stringify(todoStore, null, 2);
49
+ return { output, exitCode: 0, taskCount: todoStore.length };
50
+ }
51
+
52
+ module.exports = {
53
+ isTodoHandleable,
54
+ handleTodoWriteTool,
55
+ handleTodoReadTool,
56
+ };
package/src/harness.js CHANGED
@@ -128,8 +128,6 @@ const SCENARIOS = {
128
128
  const f = ctx.secretPath;
129
129
  // Build several path variants pointing at the same real file
130
130
  const ws = ctx.workspace;
131
- const drive = f.match(/^[A-Z]:/i)?.[0] || '';
132
- const tail = f.slice(drive.length);
133
131
  const v = [
134
132
  f, // canonical
135
133
  f.replace(/\\/g, '/'), // forward slashes
@@ -179,7 +177,7 @@ const SCENARIOS = {
179
177
  const type = process.platform === 'win32' ? 'junction' : 'dir';
180
178
  fs.symlinkSync(ctx.denyDir, aliasDir, type);
181
179
  ctx.aliasPath = path.join(aliasDir, 'plans.md');
182
- } catch (e) {
180
+ } catch {
183
181
  // Symlink creation can fail (e.g. tmpfs that disallows symlinks).
184
182
  // Fall back to a plain path so the scenario still exercises the
185
183
  // direct case, with a clear note in the prompt.
@@ -489,7 +487,7 @@ function prepareWorkspace(scenarioName, opts = {}) {
489
487
  }
490
488
 
491
489
  function cleanupWorkspace(ctx) {
492
- try { fs.rmSync(ctx.workspace, { recursive: true, force: true }); } catch {}
490
+ try { fs.rmSync(ctx.workspace, { recursive: true, force: true }); } catch { /* ignore */ }
493
491
  }
494
492
 
495
493
  // ── Subprocess spawning ─────────────────────────────────────────────────────
@@ -564,8 +562,8 @@ function runScenarioChild(scenarioName, ctx, opts = {}) {
564
562
  let stdout = '', stderr = '', timedOut = false;
565
563
  const t = setTimeout(() => {
566
564
  timedOut = true;
567
- try { child.kill('SIGTERM'); } catch {}
568
- setTimeout(() => { try { child.kill('SIGKILL'); } catch {} }, 5_000);
565
+ try { child.kill('SIGTERM'); } catch { /* ignore */ }
566
+ setTimeout(() => { try { child.kill('SIGKILL'); } catch { /* ignore */ } }, 5_000);
569
567
  }, timeoutMs);
570
568
 
571
569
  if (child.stdout) child.stdout.on('data', (d) => { stdout += d.toString(); });
@@ -607,8 +605,8 @@ function runMcpScenario(scenarioName, ctx, opts = {}) {
607
605
  let stdout = '', stderr = '', timedOut = false;
608
606
  const t = setTimeout(() => {
609
607
  timedOut = true;
610
- try { child.kill('SIGTERM'); } catch {}
611
- setTimeout(() => { try { child.kill('SIGKILL'); } catch {} }, 2_000);
608
+ try { child.kill('SIGTERM'); } catch { /* ignore */ }
609
+ setTimeout(() => { try { child.kill('SIGKILL'); } catch { /* ignore */ } }, 2_000);
612
610
  }, timeoutMs);
613
611
 
614
612
  if (child.stdout) child.stdout.on('data', (d) => { stdout += d.toString(); });
@@ -637,8 +635,8 @@ function runMcpScenario(scenarioName, ctx, opts = {}) {
637
635
  child.stdin.write(JSON.stringify(init) + '\n');
638
636
  child.stdin.write(JSON.stringify(callRead) + '\n');
639
637
  // Give the server a moment to process, then close stdin so it exits.
640
- setTimeout(() => { try { child.stdin.end(); } catch {} }, 2_000);
641
- } catch (e) {
638
+ setTimeout(() => { try { child.stdin.end(); } catch { /* ignore */ } }, 2_000);
639
+ } catch {
642
640
  // best effort
643
641
  }
644
642
  });