@visorcraft/idlehands 0.9.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (197) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +30 -0
  3. package/dist/agent.js +2604 -0
  4. package/dist/agent.js.map +1 -0
  5. package/dist/anton/controller.js +341 -0
  6. package/dist/anton/controller.js.map +1 -0
  7. package/dist/anton/lock.js +110 -0
  8. package/dist/anton/lock.js.map +1 -0
  9. package/dist/anton/parser.js +303 -0
  10. package/dist/anton/parser.js.map +1 -0
  11. package/dist/anton/prompt.js +203 -0
  12. package/dist/anton/prompt.js.map +1 -0
  13. package/dist/anton/reporter.js +119 -0
  14. package/dist/anton/reporter.js.map +1 -0
  15. package/dist/anton/session.js +51 -0
  16. package/dist/anton/session.js.map +1 -0
  17. package/dist/anton/types.js +7 -0
  18. package/dist/anton/types.js.map +1 -0
  19. package/dist/anton/verifier.js +263 -0
  20. package/dist/anton/verifier.js.map +1 -0
  21. package/dist/bench/compare.js +239 -0
  22. package/dist/bench/compare.js.map +1 -0
  23. package/dist/bench/debug_hooks.js +17 -0
  24. package/dist/bench/debug_hooks.js.map +1 -0
  25. package/dist/bench/json_extract.js +22 -0
  26. package/dist/bench/json_extract.js.map +1 -0
  27. package/dist/bench/openclaw.js +86 -0
  28. package/dist/bench/openclaw.js.map +1 -0
  29. package/dist/bench/report.js +116 -0
  30. package/dist/bench/report.js.map +1 -0
  31. package/dist/bench/runner.js +312 -0
  32. package/dist/bench/runner.js.map +1 -0
  33. package/dist/bench/types.js +2 -0
  34. package/dist/bench/types.js.map +1 -0
  35. package/dist/bot/commands.js +444 -0
  36. package/dist/bot/commands.js.map +1 -0
  37. package/dist/bot/confirm-discord.js +133 -0
  38. package/dist/bot/confirm-discord.js.map +1 -0
  39. package/dist/bot/confirm-telegram.js +290 -0
  40. package/dist/bot/confirm-telegram.js.map +1 -0
  41. package/dist/bot/discord.js +826 -0
  42. package/dist/bot/discord.js.map +1 -0
  43. package/dist/bot/format.js +210 -0
  44. package/dist/bot/format.js.map +1 -0
  45. package/dist/bot/session-manager.js +270 -0
  46. package/dist/bot/session-manager.js.map +1 -0
  47. package/dist/bot/telegram.js +678 -0
  48. package/dist/bot/telegram.js.map +1 -0
  49. package/dist/cli/agent-turn.js +45 -0
  50. package/dist/cli/agent-turn.js.map +1 -0
  51. package/dist/cli/args.js +236 -0
  52. package/dist/cli/args.js.map +1 -0
  53. package/dist/cli/bot.js +252 -0
  54. package/dist/cli/bot.js.map +1 -0
  55. package/dist/cli/build-repl-context.js +365 -0
  56. package/dist/cli/build-repl-context.js.map +1 -0
  57. package/dist/cli/command-registry.js +20 -0
  58. package/dist/cli/command-registry.js.map +1 -0
  59. package/dist/cli/commands/anton.js +271 -0
  60. package/dist/cli/commands/anton.js.map +1 -0
  61. package/dist/cli/commands/editing.js +328 -0
  62. package/dist/cli/commands/editing.js.map +1 -0
  63. package/dist/cli/commands/model.js +274 -0
  64. package/dist/cli/commands/model.js.map +1 -0
  65. package/dist/cli/commands/project.js +255 -0
  66. package/dist/cli/commands/project.js.map +1 -0
  67. package/dist/cli/commands/runtime.js +63 -0
  68. package/dist/cli/commands/runtime.js.map +1 -0
  69. package/dist/cli/commands/session.js +281 -0
  70. package/dist/cli/commands/session.js.map +1 -0
  71. package/dist/cli/commands/tools.js +126 -0
  72. package/dist/cli/commands/tools.js.map +1 -0
  73. package/dist/cli/commands/trifecta.js +221 -0
  74. package/dist/cli/commands/trifecta.js.map +1 -0
  75. package/dist/cli/commands/tui.js +17 -0
  76. package/dist/cli/commands/tui.js.map +1 -0
  77. package/dist/cli/init.js +222 -0
  78. package/dist/cli/init.js.map +1 -0
  79. package/dist/cli/input.js +360 -0
  80. package/dist/cli/input.js.map +1 -0
  81. package/dist/cli/oneshot.js +254 -0
  82. package/dist/cli/oneshot.js.map +1 -0
  83. package/dist/cli/repl-context.js +2 -0
  84. package/dist/cli/repl-context.js.map +1 -0
  85. package/dist/cli/runtime-cmds.js +811 -0
  86. package/dist/cli/runtime-cmds.js.map +1 -0
  87. package/dist/cli/service.js +145 -0
  88. package/dist/cli/service.js.map +1 -0
  89. package/dist/cli/session-state.js +130 -0
  90. package/dist/cli/session-state.js.map +1 -0
  91. package/dist/cli/setup.js +815 -0
  92. package/dist/cli/setup.js.map +1 -0
  93. package/dist/cli/shell.js +79 -0
  94. package/dist/cli/shell.js.map +1 -0
  95. package/dist/cli/status.js +392 -0
  96. package/dist/cli/status.js.map +1 -0
  97. package/dist/cli/watch.js +33 -0
  98. package/dist/cli/watch.js.map +1 -0
  99. package/dist/client.js +676 -0
  100. package/dist/client.js.map +1 -0
  101. package/dist/commands.js +194 -0
  102. package/dist/commands.js.map +1 -0
  103. package/dist/config.js +507 -0
  104. package/dist/config.js.map +1 -0
  105. package/dist/confirm/auto.js +13 -0
  106. package/dist/confirm/auto.js.map +1 -0
  107. package/dist/confirm/headless.js +41 -0
  108. package/dist/confirm/headless.js.map +1 -0
  109. package/dist/confirm/terminal.js +90 -0
  110. package/dist/confirm/terminal.js.map +1 -0
  111. package/dist/context.js +49 -0
  112. package/dist/context.js.map +1 -0
  113. package/dist/git.js +136 -0
  114. package/dist/git.js.map +1 -0
  115. package/dist/harnesses.js +171 -0
  116. package/dist/harnesses.js.map +1 -0
  117. package/dist/history.js +139 -0
  118. package/dist/history.js.map +1 -0
  119. package/dist/index.js +700 -0
  120. package/dist/index.js.map +1 -0
  121. package/dist/indexer.js +374 -0
  122. package/dist/indexer.js.map +1 -0
  123. package/dist/jsonrpc.js +76 -0
  124. package/dist/jsonrpc.js.map +1 -0
  125. package/dist/lens.js +525 -0
  126. package/dist/lens.js.map +1 -0
  127. package/dist/lsp.js +605 -0
  128. package/dist/lsp.js.map +1 -0
  129. package/dist/markdown.js +275 -0
  130. package/dist/markdown.js.map +1 -0
  131. package/dist/mcp.js +554 -0
  132. package/dist/mcp.js.map +1 -0
  133. package/dist/recovery.js +178 -0
  134. package/dist/recovery.js.map +1 -0
  135. package/dist/replay.js +132 -0
  136. package/dist/replay.js.map +1 -0
  137. package/dist/replay_cli.js +24 -0
  138. package/dist/replay_cli.js.map +1 -0
  139. package/dist/runtime/executor.js +418 -0
  140. package/dist/runtime/executor.js.map +1 -0
  141. package/dist/runtime/planner.js +197 -0
  142. package/dist/runtime/planner.js.map +1 -0
  143. package/dist/runtime/store.js +289 -0
  144. package/dist/runtime/store.js.map +1 -0
  145. package/dist/runtime/types.js +2 -0
  146. package/dist/runtime/types.js.map +1 -0
  147. package/dist/safety.js +446 -0
  148. package/dist/safety.js.map +1 -0
  149. package/dist/spinner.js +224 -0
  150. package/dist/spinner.js.map +1 -0
  151. package/dist/sys/context.js +124 -0
  152. package/dist/sys/context.js.map +1 -0
  153. package/dist/sys/snapshot.sh +97 -0
  154. package/dist/term.js +61 -0
  155. package/dist/term.js.map +1 -0
  156. package/dist/themes.js +135 -0
  157. package/dist/themes.js.map +1 -0
  158. package/dist/tools.js +1114 -0
  159. package/dist/tools.js.map +1 -0
  160. package/dist/tui/branch-picker.js +65 -0
  161. package/dist/tui/branch-picker.js.map +1 -0
  162. package/dist/tui/command-handler.js +108 -0
  163. package/dist/tui/command-handler.js.map +1 -0
  164. package/dist/tui/confirm.js +90 -0
  165. package/dist/tui/confirm.js.map +1 -0
  166. package/dist/tui/controller.js +463 -0
  167. package/dist/tui/controller.js.map +1 -0
  168. package/dist/tui/event-bridge.js +44 -0
  169. package/dist/tui/event-bridge.js.map +1 -0
  170. package/dist/tui/events.js +2 -0
  171. package/dist/tui/events.js.map +1 -0
  172. package/dist/tui/keymap.js +144 -0
  173. package/dist/tui/keymap.js.map +1 -0
  174. package/dist/tui/layout.js +11 -0
  175. package/dist/tui/layout.js.map +1 -0
  176. package/dist/tui/render.js +186 -0
  177. package/dist/tui/render.js.map +1 -0
  178. package/dist/tui/screen.js +48 -0
  179. package/dist/tui/screen.js.map +1 -0
  180. package/dist/tui/state.js +167 -0
  181. package/dist/tui/state.js.map +1 -0
  182. package/dist/tui/theme.js +70 -0
  183. package/dist/tui/theme.js.map +1 -0
  184. package/dist/tui/types.js +2 -0
  185. package/dist/tui/types.js.map +1 -0
  186. package/dist/types.js +2 -0
  187. package/dist/types.js.map +1 -0
  188. package/dist/upgrade.js +412 -0
  189. package/dist/upgrade.js.map +1 -0
  190. package/dist/utils.js +87 -0
  191. package/dist/utils.js.map +1 -0
  192. package/dist/vault.js +520 -0
  193. package/dist/vault.js.map +1 -0
  194. package/dist/vim.js +160 -0
  195. package/dist/vim.js.map +1 -0
  196. package/package.json +67 -0
  197. package/src/sys/snapshot.sh +97 -0
package/dist/tools.js ADDED
@@ -0,0 +1,1114 @@
1
+ import fs from 'node:fs/promises';
2
+ import path from 'node:path';
3
+ import crypto from 'node:crypto';
4
+ import { spawn, spawnSync } from 'node:child_process';
5
+ import { checkExecSafety, checkPathSafety, isProtectedDeleteTarget } from './safety.js';
6
+ import { sys_context as sysContextTool } from './sys/context.js';
7
+ import { stateDir, shellEscape, BASH_PATH } from './utils.js';
8
+ const DEFAULT_MAX_BACKUPS_PER_FILE = 5;
9
+ const DEFAULT_MAX_EXEC_BYTES = 16384;
10
+ let ptyUnavailableWarned = false;
11
+ async function loadNodePty() {
12
+ try {
13
+ const mod = await import('node-pty');
14
+ return mod;
15
+ }
16
+ catch {
17
+ if (!ptyUnavailableWarned) {
18
+ ptyUnavailableWarned = true;
19
+ console.error('[warn] node-pty not available; interactive sudo is disabled. Install build tools (python3, make, g++) and reinstall to enable it.');
20
+ }
21
+ return null;
22
+ }
23
+ }
24
+ /** Best-effort MIME type guess from magic bytes + extension (§7/§8). */
25
+ function guessMimeType(filePath, buf) {
26
+ // Magic byte signatures
27
+ if (buf.length >= 4) {
28
+ if (buf[0] === 0x89 && buf[1] === 0x50 && buf[2] === 0x4e && buf[3] === 0x47)
29
+ return 'image/png';
30
+ if (buf[0] === 0xff && buf[1] === 0xd8 && buf[2] === 0xff)
31
+ return 'image/jpeg';
32
+ if (buf[0] === 0x47 && buf[1] === 0x49 && buf[2] === 0x46)
33
+ return 'image/gif';
34
+ if (buf[0] === 0x52 && buf[1] === 0x49 && buf[2] === 0x46 && buf[3] === 0x46 && buf.length >= 12 && buf[8] === 0x57 && buf[9] === 0x45 && buf[10] === 0x42 && buf[11] === 0x50)
35
+ return 'image/webp'; // RIFF+WEBP
36
+ if (buf[0] === 0x25 && buf[1] === 0x50 && buf[2] === 0x44 && buf[3] === 0x46)
37
+ return 'application/pdf';
38
+ if (buf[0] === 0x50 && buf[1] === 0x4b && buf[2] === 0x03 && buf[3] === 0x04)
39
+ return 'application/zip';
40
+ if (buf[0] === 0x7f && buf[1] === 0x45 && buf[2] === 0x4c && buf[3] === 0x46)
41
+ return 'application/x-elf';
42
+ if (buf[0] === 0x1f && buf[1] === 0x8b)
43
+ return 'application/gzip';
44
+ }
45
+ // Fall back to extension
46
+ const ext = path.extname(filePath).toLowerCase();
47
+ const extMap = {
48
+ '.png': 'image/png', '.jpg': 'image/jpeg', '.jpeg': 'image/jpeg', '.gif': 'image/gif',
49
+ '.webp': 'image/webp', '.svg': 'image/svg+xml', '.pdf': 'application/pdf',
50
+ '.zip': 'application/zip', '.gz': 'application/gzip', '.tar': 'application/x-tar',
51
+ '.wasm': 'application/wasm', '.so': 'application/x-sharedlib',
52
+ '.exe': 'application/x-executable', '.o': 'application/x-object',
53
+ };
54
+ return extMap[ext] ?? 'application/octet-stream';
55
+ }
56
+ function defaultBackupDir() {
57
+ return path.join(stateDir(), 'backups');
58
+ }
59
+ function sha256(s) {
60
+ return crypto.createHash('sha256').update(s).digest('hex');
61
+ }
62
+ function keyFromPath(absPath) {
63
+ return sha256(absPath);
64
+ }
65
+ function backupDirForPath(ctx, absPath) {
66
+ const bdir = ctx.backupDir ?? defaultBackupDir();
67
+ const key = keyFromPath(absPath);
68
+ return { bdir, key, keyDir: path.join(bdir, key) };
69
+ }
70
+ function formatBackupTs() {
71
+ return new Date().toISOString().replace(/[:.]/g, '-');
72
+ }
73
+ async function restoreLatestBackup(absPath, ctx) {
74
+ const { key, keyDir } = backupDirForPath(ctx, absPath);
75
+ const legacyDir = ctx.backupDir ?? defaultBackupDir();
76
+ const latestInDir = async (dir) => {
77
+ const ents = await fs.readdir(dir, { withFileTypes: true }).catch(() => []);
78
+ return ents
79
+ .filter((e) => e.isFile())
80
+ .map((e) => e.name)
81
+ .filter((n) => n.endsWith('.bak'))
82
+ .sort()
83
+ .reverse()[0];
84
+ };
85
+ let bakDir = keyDir;
86
+ let bakFile = await latestInDir(keyDir);
87
+ if (!bakFile) {
88
+ // Compatibility with older flat backup format (without nested key dir).
89
+ const ents = await fs.readdir(legacyDir, { withFileTypes: true }).catch(() => []);
90
+ const legacy = ents
91
+ .filter((e) => e.isFile())
92
+ .map((e) => e.name)
93
+ .filter((n) => n.startsWith(`${key}.`) && !n.endsWith('.json'))
94
+ .sort()
95
+ .reverse()[0];
96
+ if (legacy) {
97
+ bakFile = legacy;
98
+ bakDir = legacyDir;
99
+ }
100
+ }
101
+ if (!bakFile) {
102
+ throw new Error(`undo: no backups found for ${absPath} in ${legacyDir}`);
103
+ }
104
+ const bakPath = path.join(bakDir, bakFile);
105
+ const buf = await fs.readFile(bakPath);
106
+ // backup current file before restoring
107
+ await backupFile(absPath, ctx);
108
+ await atomicWrite(absPath, buf);
109
+ return `restored ${absPath} from backup ${bakPath}`;
110
+ }
111
+ function stripAnsi(s) {
112
+ // eslint-disable-next-line no-control-regex
113
+ return s
114
+ .replace(/\u001b\[[0-9;]*[A-Za-z]/g, '') // CSI sequences (SGR, cursor, erase, scroll, etc.)
115
+ .replace(/\u001b\][^\u0007]*\u0007/g, '') // OSC sequences
116
+ .replace(/\u001b[()][AB012]/g, '') // Character set selection
117
+ .replace(/\u001b[=>Nc7-9]/g, ''); // Other common single-char escapes
118
+ }
119
+ function dedupeRepeats(lines, maxLineLen = 400) {
120
+ const out = [];
121
+ let prev = null;
122
+ let count = 0;
123
+ const flush = () => {
124
+ if (prev == null)
125
+ return;
126
+ if (count <= 1)
127
+ out.push(prev);
128
+ else
129
+ out.push(prev, `[repeated ${count - 1} more times]`);
130
+ };
131
+ for (const raw of lines) {
132
+ const line = raw.length > maxLineLen ? raw.slice(0, maxLineLen) + '…' : raw;
133
+ if (prev === line) {
134
+ count++;
135
+ continue;
136
+ }
137
+ flush();
138
+ prev = line;
139
+ count = 1;
140
+ }
141
+ flush();
142
+ return out;
143
+ }
144
+ function collapseStackTraces(lines) {
145
+ const out = [];
146
+ let inStack = false;
147
+ let stackCount = 0;
148
+ let firstFrame = '';
149
+ let lastError = '';
150
+ const isStackFrame = (l) => /^\s+at\s/.test(l);
151
+ const flush = () => {
152
+ if (!inStack)
153
+ return;
154
+ if (firstFrame)
155
+ out.push(firstFrame);
156
+ if (stackCount > 1)
157
+ out.push(` [${stackCount - 1} more frames]`);
158
+ if (lastError)
159
+ out.push(lastError);
160
+ inStack = false;
161
+ stackCount = 0;
162
+ firstFrame = '';
163
+ lastError = '';
164
+ };
165
+ for (const line of lines) {
166
+ if (isStackFrame(line)) {
167
+ if (!inStack) {
168
+ inStack = true;
169
+ stackCount = 1;
170
+ firstFrame = line;
171
+ }
172
+ else {
173
+ stackCount++;
174
+ }
175
+ }
176
+ else {
177
+ if (inStack) {
178
+ // Lines between stack frames that look like error messages
179
+ if (/^\w*(Error|Exception|Caused by)/.test(line.trim())) {
180
+ lastError = line;
181
+ continue;
182
+ }
183
+ flush();
184
+ }
185
+ out.push(line);
186
+ }
187
+ }
188
+ flush();
189
+ return out;
190
+ }
191
+ function truncateBytes(s, maxBytes, totalBytesHint) {
192
+ const b = Buffer.from(s, 'utf8');
193
+ const total = typeof totalBytesHint === 'number' && Number.isFinite(totalBytesHint) ? totalBytesHint : b.length;
194
+ if (b.length <= maxBytes)
195
+ return { text: s, truncated: false };
196
+ // cut to boundary
197
+ const cut = b.subarray(0, maxBytes);
198
+ return { text: cut.toString('utf8') + `\n[truncated, ${total} bytes total]`, truncated: true };
199
+ }
200
+ async function rotateBackups(absPath, ctx) {
201
+ const { keyDir } = backupDirForPath(ctx, absPath);
202
+ const limit = ctx.maxBackupsPerFile ?? DEFAULT_MAX_BACKUPS_PER_FILE;
203
+ if (limit <= 0)
204
+ return;
205
+ await fs.mkdir(keyDir, { recursive: true });
206
+ const ents = await fs.readdir(keyDir, { withFileTypes: true }).catch(() => []);
207
+ const backups = ents
208
+ .filter((e) => e.isFile())
209
+ .map((e) => e.name)
210
+ .filter((n) => n.endsWith('.bak'))
211
+ .sort(); // oldest → newest due to ISO timestamp
212
+ const toDelete = backups.length > limit ? backups.slice(0, backups.length - limit) : [];
213
+ for (const name of toDelete) {
214
+ const bak = path.join(keyDir, name);
215
+ const meta = path.join(keyDir, `${name.replace(/\.bak$/, '')}.meta.json`);
216
+ await fs.rm(bak, { force: true }).catch(() => { });
217
+ await fs.rm(meta, { force: true }).catch(() => { });
218
+ }
219
+ }
220
+ async function backupFile(absPath, ctx) {
221
+ const { bdir, keyDir } = backupDirForPath(ctx, absPath);
222
+ await fs.mkdir(bdir, { recursive: true });
223
+ await fs.mkdir(keyDir, { recursive: true });
224
+ // Auto-create .gitignore in state dir to prevent backups from being committed
225
+ const gitignorePath = path.join(bdir, '.gitignore');
226
+ await fs.writeFile(gitignorePath, '*\n', { flag: 'wx' }).catch(() => { });
227
+ // 'wx' flag = create only if doesn't exist, silently skip if it does
228
+ const st = await fs.stat(absPath).catch(() => null);
229
+ if (!st || !st.isFile())
230
+ return;
231
+ const content = await fs.readFile(absPath);
232
+ const hash = crypto.createHash('sha256').update(content).digest('hex');
233
+ const ts = formatBackupTs();
234
+ const bakName = `${ts}.bak`;
235
+ const metaName = `${ts}.meta.json`;
236
+ const bakPath = path.join(keyDir, bakName);
237
+ const metaPath = path.join(keyDir, metaName);
238
+ await fs.writeFile(bakPath, content);
239
+ await fs.writeFile(metaPath, JSON.stringify({ original_path: absPath, timestamp: ts, size: st.size, sha256_before: hash }, null, 2) + '\n', 'utf8');
240
+ await rotateBackups(absPath, ctx);
241
+ }
242
+ async function checkpointReplay(ctx, payload) {
243
+ if (!ctx.replay)
244
+ return '';
245
+ let note;
246
+ if (ctx.lens && payload.before && payload.after) {
247
+ try {
248
+ note = await ctx.lens.summarizeDiffToText(payload.before.toString('utf8'), payload.after.toString('utf8'), payload.filePath);
249
+ }
250
+ catch {
251
+ // ignore and fallback to raw checkpoint
252
+ }
253
+ }
254
+ try {
255
+ await ctx.replay.checkpoint({ ...payload, note });
256
+ return '';
257
+ }
258
+ catch (e) {
259
+ return ` replay_skipped: ${e?.message ?? String(e)}`;
260
+ }
261
+ }
262
+ export async function atomicWrite(absPath, data) {
263
+ const dir = path.dirname(absPath);
264
+ await fs.mkdir(dir, { recursive: true });
265
+ // Capture original permissions before overwriting
266
+ const origStat = await fs.stat(absPath).catch(() => null);
267
+ const origMode = origStat?.mode;
268
+ const tmp = path.join(dir, `.${path.basename(absPath)}.idlehands.tmp.${process.pid}.${Date.now()}`);
269
+ await fs.writeFile(tmp, data);
270
+ // Restore original file mode bits if the file existed
271
+ if (origMode != null) {
272
+ await fs.chmod(tmp, origMode & 0o7777).catch(() => { });
273
+ }
274
+ await fs.rename(tmp, absPath);
275
+ }
276
+ export async function undo_path(ctx, args) {
277
+ const directPath = args?.path === undefined ? undefined : String(args.path);
278
+ const p = directPath ? resolvePath(ctx, directPath) : ctx.lastEditedPath;
279
+ if (!p)
280
+ throw new Error('undo: missing path');
281
+ if (!ctx.noConfirm && ctx.confirm) {
282
+ const ok = await ctx.confirm(`Restore latest backup for:\n ${p}\nThis will overwrite the current file. Proceed? (y/N) `);
283
+ if (!ok)
284
+ return 'undo: cancelled';
285
+ }
286
+ if (!ctx.noConfirm && !ctx.confirm) {
287
+ throw new Error('undo: confirmation required (run with --no-confirm/--yolo or in interactive mode)');
288
+ }
289
+ if (ctx.dryRun)
290
+ return `dry-run: would restore latest backup for ${p}`;
291
+ return await restoreLatestBackup(p, ctx);
292
+ }
293
+ export async function read_file(ctx, args) {
294
+ const p = resolvePath(ctx, args?.path);
295
+ const offset = args?.offset ? Number(args.offset) : undefined;
296
+ const limit = Math.max(10, Math.min(args?.limit ? Number(args.limit) : 100, 500));
297
+ const search = typeof args?.search === 'string' ? args.search : undefined;
298
+ const context = args?.context ? Number(args.context) : 10;
299
+ const maxBytes = 100 * 1024;
300
+ if (!p)
301
+ throw new Error('read_file: missing path');
302
+ // Detect directories early with a helpful message instead of cryptic EISDIR
303
+ try {
304
+ const stat = await fs.stat(p);
305
+ if (stat.isDirectory()) {
306
+ return `read_file: "${p}" is a directory, not a file. Use list_dir to see its contents, or search_files to find specific code.`;
307
+ }
308
+ }
309
+ catch (e) {
310
+ // stat failure (ENOENT etc.) — let readFile handle it for the standard error path
311
+ }
312
+ const buf = await fs.readFile(p).catch((e) => {
313
+ throw new Error(`read_file: cannot read ${p}: ${e?.message ?? String(e)}`);
314
+ });
315
+ if (buf.length > maxBytes) {
316
+ // Truncate gracefully instead of throwing
317
+ const truncText = buf.subarray(0, maxBytes).toString('utf8');
318
+ const truncLines = truncText.split(/\r?\n/);
319
+ const numbered = truncLines.map((l, i) => `${String(i + 1).padStart(4)}| ${l}`).join('\n');
320
+ return `# ${p} [TRUNCATED: ${buf.length} bytes, showing first ${maxBytes}]\n${numbered}`;
321
+ }
322
+ // Binary detection: NUL byte in first 512 bytes (§8)
323
+ for (let i = 0; i < Math.min(buf.length, 512); i++) {
324
+ if (buf[i] === 0) {
325
+ const mimeGuess = guessMimeType(p, buf);
326
+ return `[binary file, ${buf.length} bytes, detected type: ${mimeGuess}]`;
327
+ }
328
+ }
329
+ const text = buf.toString('utf8');
330
+ const lines = text.split(/\r?\n/);
331
+ let start = 1;
332
+ let end = Math.min(lines.length, limit);
333
+ if (search) {
334
+ const matchLines = [];
335
+ for (let i = 0; i < lines.length; i++) {
336
+ if (lines[i].includes(search))
337
+ matchLines.push(i + 1);
338
+ }
339
+ if (!matchLines.length) {
340
+ return `# ${p}\n# search not found: ${JSON.stringify(search)}\n# file has ${lines.length} lines`;
341
+ }
342
+ const firstIdx = matchLines[0];
343
+ start = Math.max(1, firstIdx - context);
344
+ end = Math.min(lines.length, firstIdx + context);
345
+ const out = [];
346
+ out.push(`# ${p}`);
347
+ out.push(`# matches at lines: ${matchLines.join(', ')}${matchLines.length > 20 ? ' [truncated]' : ''}`);
348
+ for (let ln = start; ln <= end; ln++) {
349
+ out.push(`${String(ln).padStart(6, ' ')}| ${lines[ln - 1] ?? ''}`);
350
+ }
351
+ if (end < lines.length)
352
+ out.push(`# ... (${lines.length - end} more lines)`);
353
+ return out.join('\n');
354
+ }
355
+ else if (offset && offset >= 1) {
356
+ start = offset;
357
+ end = Math.min(lines.length, offset + limit - 1);
358
+ }
359
+ // Lens projection: only for large files (200+ lines) without search/offset.
360
+ // Small files should return full content — the agent needs exact text for edits,
361
+ // and a structural skeleton for a 15-line file just wastes a tool call.
362
+ if (!search && !offset && lines.length >= 200) {
363
+ try {
364
+ const lensSummary = await ctx.lens?.projectFile(p, text);
365
+ if (lensSummary)
366
+ return lensSummary;
367
+ }
368
+ catch (e) {
369
+ // Lens parse failure should not block read_file.
370
+ console.warn(`[warn] lens unavailable for ${p}: ${e?.message ?? 'failed'}`);
371
+ }
372
+ }
373
+ const out = [];
374
+ out.push(`# ${p}`);
375
+ for (let ln = start; ln <= end; ln++) {
376
+ out.push(`${String(ln).padStart(6, ' ')}| ${lines[ln - 1] ?? ''}`);
377
+ }
378
+ if (end < lines.length)
379
+ out.push(`# ... (${lines.length - end} more lines)`);
380
+ return out.join('\n');
381
+ }
382
+ export async function read_files(ctx, args) {
383
+ const reqs = Array.isArray(args?.requests) ? args.requests : [];
384
+ if (!reqs.length)
385
+ throw new Error('read_files: missing requests[]');
386
+ const parts = [];
387
+ for (const r of reqs) {
388
+ parts.push(await read_file(ctx, r));
389
+ parts.push('');
390
+ }
391
+ return parts.join('\n');
392
+ }
393
+ export async function write_file(ctx, args) {
394
+ const p = resolvePath(ctx, args?.path);
395
+ // Content may arrive as a string (normal) or as a parsed JSON object
396
+ // (when llama-server's XML parser auto-parses JSON content values).
397
+ const raw = args?.content;
398
+ const content = typeof raw === 'string' ? raw
399
+ : (raw != null && typeof raw === 'object' ? JSON.stringify(raw, null, 2) : undefined);
400
+ if (!p)
401
+ throw new Error('write_file: missing path');
402
+ if (content == null)
403
+ throw new Error('write_file: missing content (got ' + typeof raw + ')');
404
+ // Out-of-cwd enforcement: block creating NEW files outside cwd, warn on editing existing ones.
405
+ const cwdWarning = checkCwdWarning('write_file', p, ctx);
406
+ if (cwdWarning) {
407
+ // Check if the file already exists — only allow editing existing files outside cwd
408
+ const exists = await fs.stat(p).then(() => true, () => false);
409
+ if (!exists) {
410
+ throw new Error(`write_file: BLOCKED — cannot create new file "${p}" outside the working directory "${path.resolve(ctx.cwd)}". Use relative paths to create files within your project.`);
411
+ }
412
+ }
413
+ // Path safety check (Phase 9)
414
+ const pathVerdict = checkPathSafety(p);
415
+ if (pathVerdict.tier === 'forbidden') {
416
+ throw new Error(`write_file: ${pathVerdict.reason}`);
417
+ }
418
+ if (pathVerdict.tier === 'cautious' && !ctx.noConfirm) {
419
+ if (ctx.confirm) {
420
+ const ok = await ctx.confirm(pathVerdict.prompt || `Write to ${p}?`, { tool: 'write_file', args: { path: p } });
421
+ if (!ok)
422
+ throw new Error(`write_file: cancelled by user (${pathVerdict.reason})`);
423
+ }
424
+ else {
425
+ throw new Error(`write_file: blocked (${pathVerdict.reason}) without --no-confirm/--yolo`);
426
+ }
427
+ }
428
+ if (ctx.dryRun)
429
+ return `dry-run: would write ${p} (${Buffer.byteLength(content, 'utf8')} bytes)${cwdWarning}`;
430
+ // Phase 9d: snapshot /etc/ files before editing
431
+ if (ctx.mode === 'sys' && ctx.vault) {
432
+ await snapshotBeforeEdit(ctx.vault, p).catch(() => { });
433
+ }
434
+ const beforeBuf = await fs.readFile(p).catch(() => Buffer.from(''));
435
+ await backupFile(p, ctx);
436
+ await atomicWrite(p, content);
437
+ ctx.onMutation?.(p);
438
+ const afterBuf = Buffer.from(content, 'utf8');
439
+ const replayNote = await checkpointReplay(ctx, { op: 'write_file', filePath: p, before: beforeBuf, after: afterBuf });
440
+ return `wrote ${p} (${Buffer.byteLength(content, 'utf8')} bytes)${replayNote}${cwdWarning}`;
441
+ }
442
+ export async function insert_file(ctx, args) {
443
+ const p = resolvePath(ctx, args?.path);
444
+ const line = Number(args?.line);
445
+ const rawText = args?.text;
446
+ const text = typeof rawText === 'string' ? rawText
447
+ : (rawText != null && typeof rawText === 'object' ? JSON.stringify(rawText, null, 2) : undefined);
448
+ if (!p)
449
+ throw new Error('insert_file: missing path');
450
+ if (!Number.isFinite(line))
451
+ throw new Error('insert_file: missing/invalid line');
452
+ if (text == null)
453
+ throw new Error('insert_file: missing text (got ' + typeof rawText + ')');
454
+ // Path safety check (Phase 9)
455
+ const pathVerdict = checkPathSafety(p);
456
+ if (pathVerdict.tier === 'forbidden') {
457
+ throw new Error(`insert_file: ${pathVerdict.reason}`);
458
+ }
459
+ if (pathVerdict.tier === 'cautious' && !ctx.noConfirm) {
460
+ if (ctx.confirm) {
461
+ const ok = await ctx.confirm(pathVerdict.prompt || `Insert into ${p}?`, { tool: 'insert_file', args: { path: p } });
462
+ if (!ok)
463
+ throw new Error(`insert_file: cancelled by user (${pathVerdict.reason})`);
464
+ }
465
+ else {
466
+ throw new Error(`insert_file: blocked (${pathVerdict.reason}) without --no-confirm/--yolo`);
467
+ }
468
+ }
469
+ if (ctx.dryRun)
470
+ return `dry-run: would insert into ${p} at line=${line} (${Buffer.byteLength(text, 'utf8')} bytes)`;
471
+ // Phase 9d: snapshot /etc/ files before editing
472
+ if (ctx.mode === 'sys' && ctx.vault) {
473
+ await snapshotBeforeEdit(ctx.vault, p).catch(() => { });
474
+ }
475
+ const beforeText = await fs.readFile(p, 'utf8').catch(() => '');
476
+ // Detect original newline style
477
+ const eol = beforeText.includes('\r\n') ? '\r\n' : '\n';
478
+ // Handle empty file: just write the inserted text directly (avoid spurious leading newline).
479
+ if (beforeText === '') {
480
+ const out = text;
481
+ await backupFile(p, ctx);
482
+ await atomicWrite(p, out);
483
+ ctx.onMutation?.(p);
484
+ const replayNote = await checkpointReplay(ctx, {
485
+ op: 'insert_file',
486
+ filePath: p,
487
+ before: Buffer.from(beforeText, 'utf8'),
488
+ after: Buffer.from(out, 'utf8')
489
+ });
490
+ const cwdWarning = checkCwdWarning('insert_file', p, ctx);
491
+ return `inserted into ${p} at 0${replayNote}${cwdWarning}`;
492
+ }
493
+ const lines = beforeText.split(/\r?\n/);
494
+ let idx;
495
+ if (line === -1)
496
+ idx = lines.length;
497
+ else
498
+ idx = Math.max(0, Math.min(lines.length, line));
499
+ // When appending to a file that ends with a newline, the split produces a
500
+ // trailing empty element (e.g. "a\n" → ["a",""]). Inserting at lines.length
501
+ // pushes content AFTER that empty element, producing a double-newline on rejoin.
502
+ // Fix: when appending (line === -1) and the last element is empty (trailing newline),
503
+ // insert before the trailing empty element instead.
504
+ if (line === -1 && lines.length > 0 && lines[lines.length - 1] === '') {
505
+ idx = lines.length - 1;
506
+ }
507
+ const insertLines = text.split(/\r?\n/);
508
+ lines.splice(idx, 0, ...insertLines);
509
+ const out = lines.join(eol);
510
+ await backupFile(p, ctx);
511
+ await atomicWrite(p, out);
512
+ ctx.onMutation?.(p);
513
+ const replayNote = await checkpointReplay(ctx, {
514
+ op: 'insert_file',
515
+ filePath: p,
516
+ before: Buffer.from(beforeText, 'utf8'),
517
+ after: Buffer.from(out, 'utf8')
518
+ });
519
+ const cwdWarning = checkCwdWarning('insert_file', p, ctx);
520
+ return `inserted into ${p} at ${idx}${replayNote}${cwdWarning}`;
521
+ }
522
+ export async function edit_file(ctx, args) {
523
+ const p = resolvePath(ctx, args?.path);
524
+ const rawOld = args?.old_text;
525
+ const oldText = typeof rawOld === 'string' ? rawOld
526
+ : (rawOld != null && typeof rawOld === 'object' ? JSON.stringify(rawOld, null, 2) : undefined);
527
+ const rawNew = args?.new_text;
528
+ const newText = typeof rawNew === 'string' ? rawNew
529
+ : (rawNew != null && typeof rawNew === 'object' ? JSON.stringify(rawNew, null, 2) : undefined);
530
+ const replaceAll = Boolean(args?.replace_all);
531
+ if (!p)
532
+ throw new Error('edit_file: missing path');
533
+ if (oldText == null)
534
+ throw new Error('edit_file: missing old_text');
535
+ if (newText == null)
536
+ throw new Error('edit_file: missing new_text');
537
+ // Path safety check (Phase 9)
538
+ const pathVerdict = checkPathSafety(p);
539
+ if (pathVerdict.tier === 'forbidden') {
540
+ throw new Error(`edit_file: ${pathVerdict.reason}`);
541
+ }
542
+ if (pathVerdict.tier === 'cautious' && !ctx.noConfirm) {
543
+ if (ctx.confirm) {
544
+ const ok = await ctx.confirm(pathVerdict.prompt || `Edit ${p}?`, { tool: 'edit_file', args: { path: p, old_text: oldText, new_text: newText } });
545
+ if (!ok)
546
+ throw new Error(`edit_file: cancelled by user (${pathVerdict.reason})`);
547
+ }
548
+ else {
549
+ throw new Error(`edit_file: blocked (${pathVerdict.reason}) without --no-confirm/--yolo`);
550
+ }
551
+ }
552
+ // Phase 9d: snapshot /etc/ files before editing
553
+ if (ctx.mode === 'sys' && ctx.vault) {
554
+ await snapshotBeforeEdit(ctx.vault, p).catch(() => { });
555
+ }
556
+ const cur = await fs.readFile(p, 'utf8').catch((e) => {
557
+ throw new Error(`edit_file: cannot read ${p}: ${e?.message ?? String(e)}`);
558
+ });
559
+ const idx = cur.indexOf(oldText);
560
+ if (idx === -1) {
561
+ // Find closest near-match via normalized comparison
562
+ const normalize = (s) => s.replace(/\s+/g, ' ').trim().toLowerCase();
563
+ const needle = normalize(oldText);
564
+ const curLines = cur.split(/\r?\n/);
565
+ const needleLines = oldText.split(/\r?\n/).length;
566
+ let bestScore = 0;
567
+ let bestLine = -1;
568
+ let bestText = '';
569
+ for (let i = 0; i < curLines.length; i++) {
570
+ // Build a window of the same number of lines as old_text
571
+ const windowEnd = Math.min(curLines.length, i + needleLines);
572
+ const window = curLines.slice(i, windowEnd).join('\n');
573
+ const normWindow = normalize(window);
574
+ // Similarity: count matching character bigrams (handles differences anywhere, not just prefix).
575
+ const score = bigramSimilarity(needle, normWindow);
576
+ if (score > bestScore) {
577
+ bestScore = score;
578
+ bestLine = i + 1;
579
+ bestText = window;
580
+ }
581
+ }
582
+ let hint = '';
583
+ if (bestScore > 0.3 && bestLine > 0) {
584
+ const preview = bestText.length > 600 ? bestText.slice(0, 600) + '…' : bestText;
585
+ hint = `\nClosest match at line ${bestLine} (${Math.round(bestScore * 100)}% similarity):\n${preview}`;
586
+ }
587
+ else if (!cur.trim()) {
588
+ hint = `\nFile is empty.`;
589
+ }
590
+ else {
591
+ hint = `\nFile head (first 400 chars):\n${cur.slice(0, 400)}`;
592
+ }
593
+ throw new Error(`edit_file: old_text not found in ${p}. Re-read the file and retry with exact text.${hint}`);
594
+ }
595
+ const next = replaceAll ? cur.split(oldText).join(newText) : cur.slice(0, idx) + newText + cur.slice(idx + oldText.length);
596
+ if (ctx.dryRun)
597
+ return `dry-run: would edit ${p} (replace_all=${replaceAll})`;
598
+ await backupFile(p, ctx);
599
+ await atomicWrite(p, next);
600
+ ctx.onMutation?.(p);
601
+ const replayNote = await checkpointReplay(ctx, {
602
+ op: 'edit_file',
603
+ filePath: p,
604
+ before: Buffer.from(cur, 'utf8'),
605
+ after: Buffer.from(next, 'utf8')
606
+ });
607
+ const cwdWarning = checkCwdWarning('edit_file', p, ctx);
608
+ return `edited ${p} (replace_all=${replaceAll})${replayNote}${cwdWarning}`;
609
+ }
610
+ export async function list_dir(ctx, args) {
611
+ const p = resolvePath(ctx, args?.path ?? '.');
612
+ const recursive = Boolean(args?.recursive);
613
+ const maxEntries = Math.min(args?.max_entries ? Number(args.max_entries) : 200, 500);
614
+ if (!p)
615
+ throw new Error('list_dir: missing path');
616
+ const lines = [];
617
+ let count = 0;
618
+ async function walk(dir, depth) {
619
+ if (count >= maxEntries)
620
+ return;
621
+ const ents = await fs.readdir(dir, { withFileTypes: true }).catch((e) => {
622
+ throw new Error(`list_dir: cannot read ${dir}: ${e?.message ?? String(e)}`);
623
+ });
624
+ for (const ent of ents) {
625
+ if (count >= maxEntries)
626
+ return;
627
+ const full = path.join(dir, ent.name);
628
+ const st = await fs.lstat(full).catch(() => null);
629
+ const kind = ent.isDirectory() ? 'dir' : ent.isSymbolicLink() ? 'link' : 'file';
630
+ lines.push(`${kind}\t${st?.size ?? 0}\t${full}`);
631
+ count++;
632
+ if (recursive && ent.isDirectory() && depth < 3) {
633
+ await walk(full, depth + 1);
634
+ }
635
+ }
636
+ }
637
+ await walk(p, 0);
638
+ if (count >= maxEntries)
639
+ lines.push(`[truncated after ${maxEntries} entries]`);
640
+ return lines.join('\n');
641
+ }
642
+ export async function search_files(ctx, args) {
643
+ const root = resolvePath(ctx, args?.path ?? '.');
644
+ const pattern = typeof args?.pattern === 'string' ? args.pattern : undefined;
645
+ const include = typeof args?.include === 'string' ? args.include : undefined;
646
+ const maxResults = Math.min(args?.max_results ? Number(args.max_results) : 50, 100);
647
+ if (!root)
648
+ throw new Error('search_files: missing path');
649
+ if (!pattern)
650
+ throw new Error('search_files: missing pattern');
651
+ // Prefer rg if available (fast, bounded output)
652
+ if (await hasRg()) {
653
+ const cmd = ['rg', '-n', '--no-heading', '--color', 'never', pattern, root];
654
+ if (include)
655
+ cmd.splice(1, 0, '-g', include);
656
+ try {
657
+ const rawJson = await exec(ctx, { command: cmd.map(shellEscape).join(' '), timeout: 30 });
658
+ const parsed = JSON.parse(rawJson);
659
+ // rg exits 1 when no matches found (not an error), 2+ for real errors.
660
+ if (parsed.rc === 1 && !parsed.out?.trim()) {
661
+ return `No matches for pattern "${pattern}" in ${root}. STOP — do NOT read files individually to search. Try a broader regex pattern, different keywords, or use exec: grep -rn "keyword" ${root}`;
662
+ }
663
+ if (parsed.rc >= 2) {
664
+ // Real rg error — fall through to regex fallback below
665
+ }
666
+ else {
667
+ const rgOutput = parsed.out ?? '';
668
+ if (rgOutput) {
669
+ const lines = rgOutput.split(/\r?\n/).filter(Boolean).slice(0, maxResults);
670
+ if (lines.length >= maxResults)
671
+ lines.push(`[truncated after ${maxResults} results]`);
672
+ return lines.join('\n');
673
+ }
674
+ }
675
+ }
676
+ catch {
677
+ // JSON parse failed or exec error — fall through to regex fallback
678
+ }
679
+ }
680
+ // Slow fallback
681
+ const re = new RegExp(pattern);
682
+ const out = [];
683
+ async function walk(dir, depth) {
684
+ if (out.length >= maxResults)
685
+ return;
686
+ const ents = await fs.readdir(dir, { withFileTypes: true }).catch(() => []);
687
+ for (const ent of ents) {
688
+ if (out.length >= maxResults)
689
+ return;
690
+ const full = path.join(dir, ent.name);
691
+ if (ent.isDirectory()) {
692
+ if (ent.name === 'node_modules' || ent.name === '.git' || ent.name === 'dist' || ent.name === 'build')
693
+ continue;
694
+ if (depth < 6)
695
+ await walk(full, depth + 1);
696
+ continue;
697
+ }
698
+ if (!ent.isFile())
699
+ continue;
700
+ if (include && !globishMatch(ent.name, include))
701
+ continue;
702
+ // Skip binary files (NUL byte in first 512 bytes)
703
+ const rawBuf = await fs.readFile(full).catch(() => null);
704
+ if (!rawBuf)
705
+ continue;
706
+ let isBinary = false;
707
+ for (let bi = 0; bi < Math.min(rawBuf.length, 512); bi++) {
708
+ if (rawBuf[bi] === 0) {
709
+ isBinary = true;
710
+ break;
711
+ }
712
+ }
713
+ if (isBinary)
714
+ continue;
715
+ const buf = rawBuf.toString('utf8');
716
+ const lines = buf.split(/\r?\n/);
717
+ for (let i = 0; i < lines.length; i++) {
718
+ if (re.test(lines[i])) {
719
+ out.push(`${full}:${i + 1}:${lines[i]}`);
720
+ if (out.length >= maxResults)
721
+ return;
722
+ }
723
+ }
724
+ }
725
+ }
726
+ await walk(root, 0);
727
+ if (out.length >= maxResults)
728
+ out.push(`[truncated after ${maxResults} results]`);
729
+ const result = out.join('\n');
730
+ if (!result)
731
+ return `No matches for pattern "${pattern}" in ${root}. STOP — do NOT read files individually to search. Try a broader regex pattern, different keywords, or use exec: grep -rn "keyword" ${root}`;
732
+ return result;
733
+ }
734
+ export async function exec(ctx, args) {
735
+ const command = typeof args?.command === 'string' ? args.command : undefined;
736
+ const cwd = args?.cwd ? resolvePath(ctx, args.cwd) : ctx.cwd;
737
+ const defaultTimeout = ctx.mode === 'sys' ? 60 : 30;
738
+ const timeout = Math.min(args?.timeout ? Number(args.timeout) : defaultTimeout, 120);
739
+ if (!command)
740
+ throw new Error('exec: missing command');
741
+ // Out-of-cwd enforcement: block exec cwd or `cd` navigating outside the project.
742
+ const absCwd = path.resolve(ctx.cwd);
743
+ let execCwdWarning = '';
744
+ if (args?.cwd) {
745
+ const absExecCwd = path.resolve(cwd);
746
+ if (!absExecCwd.startsWith(absCwd + path.sep) && absExecCwd !== absCwd) {
747
+ throw new Error(`exec: BLOCKED — cwd "${absExecCwd}" is outside the working directory "${absCwd}". Use relative paths and work within the project directory.`);
748
+ }
749
+ }
750
+ if (command) {
751
+ // Detect `cd /absolute/path` anywhere in the command
752
+ const cdPattern = /\bcd\s+(['"]?)(\/[^\s'";&|]+)\1/g;
753
+ let cdMatch;
754
+ while ((cdMatch = cdPattern.exec(command)) !== null) {
755
+ const cdTarget = path.resolve(cdMatch[2]);
756
+ if (!cdTarget.startsWith(absCwd + path.sep) && cdTarget !== absCwd) {
757
+ throw new Error(`exec: BLOCKED — command navigates to "${cdTarget}" which is outside the working directory "${absCwd}". Use relative paths and work within the project directory.`);
758
+ }
759
+ }
760
+ // Detect absolute paths in file-creating commands (mkdir, cat >, tee, touch, etc.)
761
+ // that target directories outside cwd — HARD BLOCK
762
+ const absPathPattern = /(?:mkdir|cat\s*>|tee|touch|cp|mv)\s+(?:-\S+\s+)*(['"]?)(\/[^\s'";&|]+)\1/g;
763
+ let apMatch;
764
+ while ((apMatch = absPathPattern.exec(command)) !== null) {
765
+ const absTarget = path.resolve(apMatch[2]);
766
+ if (!absTarget.startsWith(absCwd + path.sep) && absTarget !== absCwd) {
767
+ throw new Error(`exec: BLOCKED — command targets "${absTarget}" which is outside the working directory "${absCwd}". Use relative paths to work within the project directory.`);
768
+ }
769
+ }
770
+ }
771
+ // ── Safety tier check (Phase 9) ──
772
+ const verdict = checkExecSafety(command);
773
+ // Forbidden: ALWAYS blocked, even in yolo/noConfirm mode. No override.
774
+ if (verdict.tier === 'forbidden') {
775
+ throw new Error(`exec: ${verdict.reason} — command: ${command}`);
776
+ }
777
+ // Extra protection: block rm targeting protected root directories
778
+ if (isProtectedDeleteTarget(command)) {
779
+ throw new Error(`exec: BLOCKED: rm targeting protected directory — command: ${command}`);
780
+ }
781
+ // Cautious: require confirmation unless yolo/noConfirm
782
+ if (verdict.tier === 'cautious' && !ctx.noConfirm) {
783
+ if (ctx.confirm) {
784
+ const ok = await ctx.confirm(verdict.prompt || `About to run:\n\n${command}\n\nProceed? (y/N) `, { tool: 'exec', args: { command } });
785
+ if (!ok) {
786
+ throw new Error(`exec: cancelled by user (${verdict.reason}): ${command}`);
787
+ }
788
+ }
789
+ else {
790
+ throw new Error(`exec: blocked (${verdict.reason}) without --no-confirm/--yolo: ${command}`);
791
+ }
792
+ }
793
+ if (ctx.dryRun)
794
+ return `dry-run: would exec in ${cwd}: ${command}`;
795
+ // ── Sudo handling (Phase 9c) ──
796
+ // Non-TTY: probe for NOPASSWD / cached credentials before running.
797
+ if (/^\s*sudo\s/.test(command) && !process.stdin.isTTY) {
798
+ try {
799
+ const probe = spawnSync('sudo', ['-n', 'true'], { timeout: 5000, stdio: 'ignore' });
800
+ if (probe.status !== 0) {
801
+ throw new Error('exec: sudo requires a TTY for password input, but stdin is not a TTY. ' +
802
+ 'Options: run idlehands interactively, configure NOPASSWD for this command, or pre-cache sudo credentials.');
803
+ }
804
+ }
805
+ catch (e) {
806
+ if (e.message?.includes('sudo requires a TTY'))
807
+ throw e;
808
+ // spawnSync error (sudo not found, etc.) — let the actual command fail naturally
809
+ }
810
+ }
811
+ const maxBytes = ctx.maxExecBytes ?? DEFAULT_MAX_EXEC_BYTES;
812
+ const captureLimit = ctx.maxExecCaptureBytes ?? Math.max(maxBytes * 64, 256 * 1024);
813
+ // TTY interactive sudo path (Phase 9c): use node-pty when available.
814
+ if (/^\s*sudo\s/.test(command) && process.stdin.isTTY) {
815
+ const pty = await loadNodePty();
816
+ if (!pty) {
817
+ throw new Error('exec: interactive sudo requires node-pty, but it is not installed. Install optional dependency `node-pty` (build tools: python3, make, g++) or use non-interactive sudo (NOPASSWD/cached credentials).');
818
+ }
819
+ return await execWithPty({
820
+ pty,
821
+ command,
822
+ cwd,
823
+ timeout,
824
+ maxBytes,
825
+ captureLimit,
826
+ signal: ctx.signal,
827
+ }) + execCwdWarning;
828
+ }
829
+ // Use spawn with shell:true — lets Node.js resolve the shell internally,
830
+ // avoiding ENOENT issues with explicit bash paths in certain environments.
831
+ const child = spawn(command, [], {
832
+ cwd,
833
+ stdio: ['ignore', 'pipe', 'pipe'],
834
+ shell: BASH_PATH,
835
+ });
836
+ const outChunks = [];
837
+ const errChunks = [];
838
+ let outSeen = 0;
839
+ let errSeen = 0;
840
+ let outCaptured = 0;
841
+ let errCaptured = 0;
842
+ let killed = false;
843
+ const killTimer = setTimeout(() => {
844
+ killed = true;
845
+ child.kill('SIGKILL');
846
+ }, Math.max(1, timeout) * 1000);
847
+ // §11: kill child process if parent abort signal fires (Ctrl+C).
848
+ const onAbort = () => { killed = true; child.kill('SIGKILL'); };
849
+ ctx.signal?.addEventListener('abort', onAbort, { once: true });
850
+ const pushCapped = (chunks, buf, kind) => {
851
+ const n = buf.length;
852
+ if (kind === 'out')
853
+ outSeen += n;
854
+ else
855
+ errSeen += n;
856
+ const captured = kind === 'out' ? outCaptured : errCaptured;
857
+ const remaining = captureLimit - captured;
858
+ if (remaining <= 0)
859
+ return;
860
+ const take = n <= remaining ? buf : buf.subarray(0, remaining);
861
+ chunks.push(Buffer.from(take));
862
+ if (kind === 'out')
863
+ outCaptured += take.length;
864
+ else
865
+ errCaptured += take.length;
866
+ };
867
+ child.stdout.on('data', (d) => pushCapped(outChunks, d, 'out'));
868
+ child.stderr.on('data', (d) => pushCapped(errChunks, d, 'err'));
869
+ const rc = await new Promise((resolve, reject) => {
870
+ child.on('error', (err) => {
871
+ clearTimeout(killTimer);
872
+ ctx.signal?.removeEventListener('abort', onAbort);
873
+ reject(new Error(`exec: failed to spawn shell: ${err.message} (${err.code ?? 'unknown'})`));
874
+ });
875
+ child.on('close', (code) => resolve(code ?? 0));
876
+ });
877
+ clearTimeout(killTimer);
878
+ ctx.signal?.removeEventListener('abort', onAbort);
879
+ const outRaw = stripAnsi(Buffer.concat(outChunks).toString('utf8'));
880
+ const errRaw = stripAnsi(Buffer.concat(errChunks).toString('utf8'));
881
+ const outLines = collapseStackTraces(dedupeRepeats(outRaw.split(/\r?\n/))).join('\n').trimEnd();
882
+ const errLines = collapseStackTraces(dedupeRepeats(errRaw.split(/\r?\n/))).join('\n').trimEnd();
883
+ const outT = truncateBytes(outLines, maxBytes, outSeen);
884
+ const errT = truncateBytes(errLines, maxBytes, errSeen);
885
+ let outText = outT.text;
886
+ let errText = errT.text;
887
+ const capOut = outSeen > outCaptured;
888
+ const capErr = errSeen > errCaptured;
889
+ // If we had to cap capture but the post-processed output ended up short
890
+ // (e.g., massive repeated output collapsed), still surface that truncation.
891
+ if (capOut && !outT.truncated) {
892
+ outText = truncateBytes(outText + `\n[capture truncated, ${outSeen} bytes total]`, maxBytes, outSeen).text;
893
+ }
894
+ if (capErr && !errT.truncated) {
895
+ errText = truncateBytes(errText + `\n[capture truncated, ${errSeen} bytes total]`, maxBytes, errSeen).text;
896
+ }
897
+ if (killed) {
898
+ errText = (errText ? errText + '\n' : '') + `[killed after ${timeout}s timeout]`;
899
+ }
900
+ const result = { rc, out: outText, err: errText, truncated: outT.truncated || errT.truncated || capOut || capErr };
901
+ // Phase 9d: auto-note system changes in sys mode
902
+ if (ctx.mode === 'sys' && ctx.vault && rc === 0) {
903
+ autoNoteSysChange(ctx.vault, command, outText).catch(() => { });
904
+ }
905
+ return JSON.stringify(result) + execCwdWarning;
906
+ }
907
+ async function execWithPty(args) {
908
+ const { pty, command, cwd, timeout, maxBytes, captureLimit, signal } = args;
909
+ const proc = pty.spawn(BASH_PATH, ['-lc', command], {
910
+ name: 'xterm-color',
911
+ cwd,
912
+ cols: 120,
913
+ rows: 30,
914
+ env: process.env,
915
+ });
916
+ const chunks = [];
917
+ let seen = 0;
918
+ let captured = 0;
919
+ let killed = false;
920
+ const onDataDisposable = proc.onData((data) => {
921
+ // Real-time stream for interactive UX
922
+ if (process.stdout.isTTY) {
923
+ process.stdout.write(data);
924
+ }
925
+ const n = Buffer.byteLength(data, 'utf8');
926
+ seen += n;
927
+ const remaining = captureLimit - captured;
928
+ if (remaining <= 0)
929
+ return;
930
+ if (n <= remaining) {
931
+ chunks.push(data);
932
+ captured += n;
933
+ }
934
+ else {
935
+ const buf = Buffer.from(data, 'utf8');
936
+ const slice = buf.subarray(0, remaining).toString('utf8');
937
+ chunks.push(slice);
938
+ captured += Buffer.byteLength(slice, 'utf8');
939
+ }
940
+ });
941
+ const kill = () => {
942
+ killed = true;
943
+ try {
944
+ proc.kill();
945
+ }
946
+ catch {
947
+ // ignore
948
+ }
949
+ };
950
+ const killTimer = setTimeout(kill, Math.max(1, timeout) * 1000);
951
+ const onAbort = () => kill();
952
+ signal?.addEventListener('abort', onAbort, { once: true });
953
+ const rc = await new Promise((resolve) => {
954
+ proc.onExit((e) => resolve(Number(e?.exitCode ?? 0)));
955
+ });
956
+ clearTimeout(killTimer);
957
+ signal?.removeEventListener('abort', onAbort);
958
+ onDataDisposable?.dispose?.();
959
+ const raw = stripAnsi(chunks.join(''));
960
+ const lines = collapseStackTraces(dedupeRepeats(raw.split(/\r?\n/))).join('\n').trimEnd();
961
+ const outT = truncateBytes(lines, maxBytes, seen);
962
+ let outText = outT.text;
963
+ const cap = seen > captured;
964
+ if (cap && !outT.truncated) {
965
+ outText = truncateBytes(outText + `\n[capture truncated, ${seen} bytes total]`, maxBytes, seen).text;
966
+ }
967
+ let errText = '';
968
+ if (killed) {
969
+ errText = `[killed after ${timeout}s timeout]`;
970
+ }
971
+ const result = {
972
+ rc,
973
+ out: outText,
974
+ err: errText,
975
+ truncated: outT.truncated || cap || killed,
976
+ };
977
+ return JSON.stringify(result);
978
+ }
979
+ export async function vault_note(ctx, args) {
980
+ const key = typeof args?.key === 'string' ? args.key.trim() : '';
981
+ const value = typeof args?.value === 'string' ? args.value : undefined;
982
+ if (!key)
983
+ throw new Error('vault_note: missing key');
984
+ if (value == null)
985
+ throw new Error('vault_note: missing value');
986
+ if (ctx.dryRun)
987
+ return `dry-run: would add vault note ${JSON.stringify(key)}`;
988
+ if (!ctx.vault) {
989
+ throw new Error('vault_note: vault disabled');
990
+ }
991
+ const id = await ctx.vault.note(key, String(value));
992
+ return `vault_note: saved ${id}`;
993
+ }
994
+ export async function vault_search(ctx, args) {
995
+ const query = typeof args?.query === 'string' ? args.query.trim() : '';
996
+ const limit = Number(args?.limit);
997
+ if (!query)
998
+ return 'vault_search: missing query';
999
+ const n = Number.isFinite(limit) && limit > 0 ? Math.min(50, Math.max(1, Math.floor(limit))) : 8;
1000
+ if (!ctx.vault)
1001
+ return 'vault disabled';
1002
+ const results = await ctx.vault.search(query, n);
1003
+ if (!results.length) {
1004
+ return `vault_search: no results for ${JSON.stringify(query)}`;
1005
+ }
1006
+ const lines = results.map((r) => {
1007
+ const title = r.kind === 'note' ? `note:${r.key}` : `tool:${r.tool || r.key || 'unknown'}`;
1008
+ const body = r.value ?? r.snippet ?? r.content ?? '';
1009
+ const short = body.replace(/\s+/g, ' ').slice(0, 160);
1010
+ return `${r.updatedAt} ${title} ${JSON.stringify(short)}`;
1011
+ });
1012
+ return lines.join('\n');
1013
+ }
1014
+ /** Phase 9: sys_context tool (mode-gated in agent schema). */
1015
+ export async function sys_context(ctx, args) {
1016
+ return sysContextTool(ctx, args);
1017
+ }
1018
+ function resolvePath(ctx, p) {
1019
+ if (typeof p !== 'string' || !p.trim())
1020
+ throw new Error('missing path');
1021
+ return path.resolve(ctx.cwd, p);
1022
+ }
1023
+ /**
1024
+ * Check if a resolved path is outside the working directory.
1025
+ * Returns a model-visible warning string if so, empty string otherwise.
1026
+ */
1027
+ function checkCwdWarning(tool, resolvedPath, ctx) {
1028
+ const absCwd = path.resolve(ctx.cwd);
1029
+ if (resolvedPath.startsWith(absCwd + path.sep) || resolvedPath === absCwd)
1030
+ return '';
1031
+ const warning = `\n[WARNING] Path "${resolvedPath}" is OUTSIDE the working directory "${absCwd}". You MUST use relative paths and work within the project directory. Do NOT create or edit files outside the cwd.`;
1032
+ console.warn(`[warning] ${tool}: path "${resolvedPath}" is outside the working directory "${absCwd}".`);
1033
+ return warning;
1034
+ }
1035
+ async function hasRg() {
1036
+ try {
1037
+ await fs.access('/usr/bin/rg');
1038
+ return true;
1039
+ }
1040
+ catch {
1041
+ // try PATH
1042
+ return await new Promise((resolve) => {
1043
+ const c = spawn(BASH_PATH, ['-lc', 'command -v rg >/dev/null 2>&1'], { stdio: 'ignore' });
1044
+ c.on('error', () => resolve(false));
1045
+ c.on('close', (code) => resolve(code === 0));
1046
+ });
1047
+ }
1048
+ }
1049
+ /** Sørensen-Dice coefficient on character bigrams. Returns 0–1. */
1050
+ function bigramSimilarity(a, b) {
1051
+ if (a.length < 2 && b.length < 2)
1052
+ return a === b ? 1 : 0;
1053
+ const bigrams = (s) => {
1054
+ const m = new Map();
1055
+ for (let i = 0; i < s.length - 1; i++) {
1056
+ const bi = s.slice(i, i + 2);
1057
+ m.set(bi, (m.get(bi) ?? 0) + 1);
1058
+ }
1059
+ return m;
1060
+ };
1061
+ const aB = bigrams(a);
1062
+ const bB = bigrams(b);
1063
+ let overlap = 0;
1064
+ for (const [k, v] of aB) {
1065
+ overlap += Math.min(v, bB.get(k) ?? 0);
1066
+ }
1067
+ const total = (a.length - 1) + (b.length - 1);
1068
+ return total > 0 ? (2 * overlap) / total : 0;
1069
+ }
1070
+ function globishMatch(name, glob) {
1071
+ // supports only simple '*.ext' and exact matches
1072
+ if (glob === name)
1073
+ return true;
1074
+ const m = /^\*\.(.+)$/.exec(glob);
1075
+ if (m)
1076
+ return name.endsWith('.' + m[1]);
1077
+ return false;
1078
+ }
1079
+ // ---------------------------------------------------------------------------
1080
+ // Phase 9d: System memory helpers
1081
+ // ---------------------------------------------------------------------------
1082
+ /** Patterns that indicate system-modifying commands worth auto-noting. */
1083
+ const SYS_CHANGE_PATTERNS = [
1084
+ /\b(apt|apt-get|dnf|yum|pacman|pip|npm)\s+(install|remove|purge|upgrade|update)\b/i,
1085
+ /\bsystemctl\s+(start|stop|restart|enable|disable)\b/i,
1086
+ /\bufw\s+(allow|deny|delete|enable|disable)\b/i,
1087
+ /\biptables\s+(-A|-I|-D)\b/i,
1088
+ /\buseradd\b/i,
1089
+ /\buserdel\b/i,
1090
+ /\bcrontab\b/i,
1091
+ ];
1092
+ /** Auto-note significant system changes to Vault (sys mode only). */
1093
+ async function autoNoteSysChange(vault, command, output) {
1094
+ const isSignificant = SYS_CHANGE_PATTERNS.some(p => p.test(command));
1095
+ if (!isSignificant)
1096
+ return;
1097
+ const summary = output.length > 200 ? output.slice(0, 197) + '...' : output;
1098
+ const value = `Command: ${command}\nOutput: ${summary}`;
1099
+ await vault.note(`sys:${command.slice(0, 80)}`, value);
1100
+ }
1101
+ /** Snapshot a file's contents to Vault before editing (for /etc/ config tracking). */
1102
+ export async function snapshotBeforeEdit(vault, filePath) {
1103
+ if (!filePath.startsWith('/etc/'))
1104
+ return;
1105
+ try {
1106
+ const content = await fs.readFile(filePath, 'utf8');
1107
+ const snippet = content.length > 500 ? content.slice(0, 497) + '...' : content;
1108
+ await vault.note(`sys:pre-edit:${filePath}`, `Snapshot before edit:\n${snippet}`);
1109
+ }
1110
+ catch {
1111
+ // File doesn't exist yet or not readable — skip
1112
+ }
1113
+ }
1114
+ //# sourceMappingURL=tools.js.map