vibehacker 4.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/tools.js ADDED
@@ -0,0 +1,588 @@
1
+ 'use strict';
2
+
3
+ const fs = require('fs').promises;
4
+ const fsSync = require('fs');
5
+ const path = require('path');
6
+ const { exec } = require('child_process');
7
+ const { promisify } = require('util');
8
+ const execAsync = promisify(exec);
9
+
10
+ // ── File State Management ────────────────────────────────────────────────────
11
+ // Tracks read files, file mtimes, and edit history for undo support.
12
+
13
+ const _fileState = {
14
+ read: new Set(), // abs paths that have been read
15
+ mtimes: new Map(), // abs path → mtime at last read
16
+ edits: [], // edit journal: [{ path, before, after, timestamp }]
17
+ modified: new Set(), // files modified this session
18
+ };
19
+
20
+ function markRead(absPath, mtime) {
21
+ _fileState.read.add(absPath);
22
+ if (mtime) _fileState.mtimes.set(absPath, mtime);
23
+ }
24
+
25
+ function hasBeenRead(absPath) { return _fileState.read.has(absPath); }
26
+
27
+ function recordEdit(absPath, before, after) {
28
+ _fileState.edits.push({ path: absPath, before, after, ts: Date.now() });
29
+ _fileState.modified.add(absPath);
30
+ // Keep last 50 edits only
31
+ if (_fileState.edits.length > 50) _fileState.edits.shift();
32
+ }
33
+
34
+ function getModifiedFiles() { return [..._fileState.modified]; }
35
+ function getLastEdit() { return _fileState.edits[_fileState.edits.length - 1] || null; }
36
+
37
+ // ── Tool Call Parser — extracts ALL tool calls from response ─────────────────
38
+
39
+ const TOOL_PATTERNS = [
40
+ {
41
+ name: 'read_file',
42
+ regex: /<read_file>\s*<path>([\s\S]*?)<\/path>(?:\s*<offset>([\s\S]*?)<\/offset>)?(?:\s*<limit>([\s\S]*?)<\/limit>)?\s*<\/read_file>/g,
43
+ extract: (m) => ({ path: m[1].trim(), offset: m[2] ? parseInt(m[2].trim(), 10) : undefined, limit: m[3] ? parseInt(m[3].trim(), 10) : undefined }),
44
+ },
45
+ {
46
+ name: 'edit_file',
47
+ regex: /<edit_file>\s*<path>([\s\S]*?)<\/path>\s*<old_string>([\s\S]*?)<\/old_string>\s*<new_string>([\s\S]*?)<\/new_string>(?:\s*<replace_all>([\s\S]*?)<\/replace_all>)?\s*<\/edit_file>/g,
48
+ extract: (m) => ({ path: m[1].trim(), old_string: m[2], new_string: m[3], replace_all: m[4]?.trim() === 'true' }),
49
+ },
50
+ {
51
+ name: 'write_file',
52
+ regex: /<write_file>\s*<path>([\s\S]*?)<\/path>\s*<content>([\s\S]*?)<\/content>\s*<\/write_file>/g,
53
+ extract: (m) => ({ path: m[1].trim(), content: m[2] }),
54
+ },
55
+ {
56
+ name: 'execute_command',
57
+ regex: /<execute_command>\s*<command>([\s\S]*?)<\/command>\s*<\/execute_command>/g,
58
+ extract: (m) => ({ command: m[1].trim() }),
59
+ },
60
+ {
61
+ name: 'glob',
62
+ regex: /<glob>\s*<pattern>([\s\S]*?)<\/pattern>(?:\s*<path>([\s\S]*?)<\/path>)?\s*<\/glob>/g,
63
+ extract: (m) => ({ pattern: m[1].trim(), path: m[2]?.trim() }),
64
+ },
65
+ {
66
+ name: 'grep',
67
+ regex: /<grep>\s*<pattern>([\s\S]*?)<\/pattern>(?:\s*<path>([\s\S]*?)<\/path>)?(?:\s*<file_pattern>([\s\S]*?)<\/file_pattern>)?\s*<\/grep>/g,
68
+ extract: (m) => ({ pattern: m[1].trim(), path: m[2]?.trim(), filePattern: m[3]?.trim() }),
69
+ },
70
+ {
71
+ name: 'list_files',
72
+ regex: /<list_files>\s*<path>([\s\S]*?)<\/path>(?:\s*<recursive>([\s\S]*?)<\/recursive>)?\s*<\/list_files>/g,
73
+ extract: (m) => ({ path: m[1].trim(), recursive: m[2]?.trim() === 'true' }),
74
+ },
75
+ {
76
+ name: 'search_files',
77
+ regex: /<search_files>\s*<path>([\s\S]*?)<\/path>\s*<pattern>([\s\S]*?)<\/pattern>(?:\s*<file_pattern>([\s\S]*?)<\/file_pattern>)?\s*<\/search_files>/g,
78
+ extract: (m) => ({ path: m[1].trim(), pattern: m[2].trim(), filePattern: m[3]?.trim() }),
79
+ },
80
+ {
81
+ name: 'create_directory',
82
+ regex: /<create_directory>\s*<path>([\s\S]*?)<\/path>\s*<\/create_directory>/g,
83
+ extract: (m) => ({ path: m[1].trim() }),
84
+ },
85
+ {
86
+ name: 'delete_file',
87
+ regex: /<delete_file>\s*<path>([\s\S]*?)<\/path>\s*<\/delete_file>/g,
88
+ extract: (m) => ({ path: m[1].trim() }),
89
+ },
90
+ ];
91
+
92
+ // Export tool names for XmlStreamFilter sync
93
+ const TOOL_TAG_NAMES = TOOL_PATTERNS.map(p => p.name);
94
+
95
+ function parseToolCalls(text) {
96
+ const calls = [];
97
+ const seen = new Set();
98
+ for (const p of TOOL_PATTERNS) {
99
+ p.regex.lastIndex = 0;
100
+ let match;
101
+ while ((match = p.regex.exec(text)) !== null) {
102
+ if (seen.has(match[0])) continue;
103
+ seen.add(match[0]);
104
+ calls.push({ name: p.name, args: p.extract(match), raw: match[0], index: match.index });
105
+ }
106
+ }
107
+ calls.sort((a, b) => a.index - b.index);
108
+ return calls;
109
+ }
110
+
111
+ function parseToolCall(text) {
112
+ const calls = parseToolCalls(text);
113
+ return calls.length > 0 ? calls[0] : null;
114
+ }
115
+
116
+ // ── Tool Execution ───────────────────────────────────────────────────────────
117
+
118
+ async function executeTool(toolCall, cwd) {
119
+ const { name, args } = toolCall;
120
+ const resolvePath = (p) => path.resolve(cwd, p);
121
+
122
+ try {
123
+ switch (name) {
124
+
125
+ case 'read_file': {
126
+ const fullPath = resolvePath(args.path);
127
+ let stat;
128
+ try { stat = await fs.stat(fullPath); } catch (e) {
129
+ return `[Error] File not found: ${args.path}\nCheck the path and try again. Use glob or list_files to find the correct path.`;
130
+ }
131
+ markRead(fullPath, stat.mtimeMs);
132
+
133
+ const content = await fs.readFile(fullPath, 'utf8');
134
+ let lines = content.split('\n');
135
+ const totalLines = lines.length;
136
+
137
+ const offset = args.offset ? Math.max(0, args.offset - 1) : 0;
138
+ const limit = args.limit || (stat.size > 500 * 1024 ? 300 : lines.length);
139
+
140
+ if (offset > 0 || limit < lines.length) {
141
+ lines = lines.slice(offset, offset + limit);
142
+ }
143
+
144
+ const pad = String(offset + lines.length).length;
145
+ const numbered = lines.map((line, i) => `${String(offset + i + 1).padStart(pad)}│ ${line}`).join('\n');
146
+ const rangeNote = (offset > 0 || limit < totalLines)
147
+ ? ` | lines ${offset + 1}-${offset + lines.length} of ${totalLines}`
148
+ : '';
149
+
150
+ return `[File: ${args.path} | ${totalLines} lines | ${stat.size} bytes${rangeNote}]\n${numbered}`;
151
+ }
152
+
153
+ case 'edit_file': {
154
+ const fullPath = resolvePath(args.path);
155
+
156
+ // Check file exists
157
+ let stat;
158
+ try { stat = await fs.stat(fullPath); } catch (_) {
159
+ return `[Error] File not found: ${args.path}\nUse write_file to create new files. Use glob to find existing files.`;
160
+ }
161
+
162
+ // Enforce read-before-edit
163
+ if (!hasBeenRead(fullPath)) {
164
+ return `[Error] Must read_file before editing: ${args.path}\nRead the file first so you can see its current contents and craft an accurate edit.`;
165
+ }
166
+
167
+ // Conflict detection — check if file changed since we last read it
168
+ const lastMtime = _fileState.mtimes.get(fullPath);
169
+ if (lastMtime && stat.mtimeMs > lastMtime + 1000) {
170
+ markRead(fullPath, stat.mtimeMs); // update mtime
171
+ return `[Error] File changed on disk since last read: ${args.path}\nAnother process modified this file. Read it again to see the current contents.`;
172
+ }
173
+
174
+ const content = await fs.readFile(fullPath, 'utf8');
175
+
176
+ if (args.old_string === args.new_string) {
177
+ return `[Error] old_string and new_string are identical — no change needed.`;
178
+ }
179
+
180
+ if (args.replace_all) {
181
+ if (!content.includes(args.old_string)) {
182
+ return `[Error] old_string not found in ${args.path}\nThe text you're trying to replace doesn't exist. Read the file again — the content may have changed.`;
183
+ }
184
+ const newContent = content.split(args.old_string).join(args.new_string);
185
+ const count = content.split(args.old_string).length - 1;
186
+ recordEdit(fullPath, content, newContent);
187
+ await fs.writeFile(fullPath, newContent, 'utf8');
188
+ markRead(fullPath, Date.now());
189
+ return `[Edited: ${args.path} | replaced ${count} occurrence${count > 1 ? 's' : ''}]`;
190
+ }
191
+
192
+ // Single replacement
193
+ const firstIdx = content.indexOf(args.old_string);
194
+ if (firstIdx === -1) {
195
+ // Help the agent recover
196
+ const suggestion = args.old_string.length > 50
197
+ ? 'Try using a shorter, more specific snippet from the file.'
198
+ : 'Check whitespace, indentation (tabs vs spaces), and line endings.';
199
+ return `[Error] old_string not found in ${args.path}\n${suggestion}\nRead the file again to see exact current contents.`;
200
+ }
201
+ const lastIdx = content.lastIndexOf(args.old_string);
202
+ if (firstIdx !== lastIdx) {
203
+ return `[Error] old_string matches ${content.split(args.old_string).length - 1} locations in ${args.path}\nInclude more surrounding context to make it unique, or use <replace_all>true</replace_all>.`;
204
+ }
205
+
206
+ const newContent = content.substring(0, firstIdx) + args.new_string + content.substring(firstIdx + args.old_string.length);
207
+ recordEdit(fullPath, content, newContent);
208
+ await fs.writeFile(fullPath, newContent, 'utf8');
209
+ markRead(fullPath, Date.now());
210
+
211
+ // Generate unified diff
212
+ const oldLines = args.old_string.split('\n');
213
+ const newLines = args.new_string.split('\n');
214
+ const editLineStart = content.substring(0, firstIdx).split('\n').length;
215
+
216
+ let diff = `[Edited: ${args.path}]\n`;
217
+ diff += `--- ${args.path}\n+++ ${args.path}\n`;
218
+ diff += `@@ -${editLineStart},${oldLines.length} +${editLineStart},${newLines.length} @@\n`;
219
+ for (const l of oldLines) diff += `- ${l}\n`;
220
+ for (const l of newLines) diff += `+ ${l}\n`;
221
+
222
+ return diff;
223
+ }
224
+
225
+ case 'write_file': {
226
+ const fullPath = resolvePath(args.path);
227
+
228
+ // Warn if overwriting unread file
229
+ let existed = false;
230
+ try {
231
+ await fs.stat(fullPath);
232
+ existed = true;
233
+ if (!hasBeenRead(fullPath)) {
234
+ // Record old content for undo
235
+ try {
236
+ const old = await fs.readFile(fullPath, 'utf8');
237
+ recordEdit(fullPath, old, args.content);
238
+ } catch (_) {}
239
+ }
240
+ } catch (_) {}
241
+
242
+ await fs.mkdir(path.dirname(fullPath), { recursive: true });
243
+ await fs.writeFile(fullPath, args.content, 'utf8');
244
+ markRead(fullPath, Date.now());
245
+ if (!existed) recordEdit(fullPath, null, args.content);
246
+
247
+ const bytes = Buffer.byteLength(args.content, 'utf8');
248
+ const lines = args.content.split('\n').length;
249
+ return `[${existed ? 'Overwritten' : 'Created'}: ${args.path} | ${lines} lines | ${bytes} bytes]`;
250
+ }
251
+
252
+ case 'execute_command': {
253
+ const startTime = Date.now();
254
+ try {
255
+ const { stdout, stderr } = await execAsync(args.command, {
256
+ cwd,
257
+ timeout: 120000,
258
+ maxBuffer: 8 * 1024 * 1024,
259
+ env: { ...process.env, FORCE_COLOR: '0', NO_COLOR: '1' },
260
+ });
261
+ const elapsed = ((Date.now() - startTime) / 1000).toFixed(1);
262
+ let out = '';
263
+ if (stdout) out += stdout;
264
+ if (stderr) out += (out ? '\n[stderr]\n' : '[stderr]\n') + stderr;
265
+ if (out.length > 24000) {
266
+ out = out.substring(0, 12000) + '\n\n[... truncated ...]\n\n' + out.substring(out.length - 8000);
267
+ }
268
+ return `[Command: ${args.command} | ${elapsed}s | exit 0]\n${out || '(no output)'}`;
269
+ } catch (err) {
270
+ const elapsed = ((Date.now() - startTime) / 1000).toFixed(1);
271
+ let errOut = `[Command failed: ${args.command} | ${elapsed}s | exit ${err.code || 1}]\n`;
272
+ if (err.killed) errOut += '[Timed out after 2 minutes]\n';
273
+ if (err.stdout) errOut += err.stdout.substring(0, 12000);
274
+ if (err.stderr) errOut += '\n[stderr]\n' + err.stderr.substring(0, 8000);
275
+ if (!err.stdout && !err.stderr) errOut += err.message;
276
+ return errOut.substring(0, 24000);
277
+ }
278
+ }
279
+
280
+ case 'glob': {
281
+ const searchPath = args.path ? resolvePath(args.path) : cwd;
282
+ const results = await globSearch(searchPath, args.pattern);
283
+ if (!results.length) return `[Glob: ${args.pattern} in ${args.path || '.'}]\nNo files matched. Try a broader pattern like **/* or check the path.`;
284
+ const rel = results.map(f => path.relative(cwd, f));
285
+ return `[Glob: ${args.pattern} | ${rel.length} files]\n${rel.join('\n')}`;
286
+ }
287
+
288
+ case 'grep': {
289
+ const searchPath = args.path ? resolvePath(args.path) : cwd;
290
+ const results = await grepSearch(searchPath, args.pattern, args.filePattern || '*', cwd);
291
+ if (!results.length) return `[Grep: "${args.pattern}" in ${args.path || '.'}]\nNo matches. Try a different pattern or broader file filter.`;
292
+ const truncNote = results.length >= 200 ? '\n[... results capped at 200]' : '';
293
+ return `[Grep: "${args.pattern}" | ${results.length} matches]\n${results.slice(0, 200).join('\n')}${truncNote}`;
294
+ }
295
+
296
+ case 'list_files': {
297
+ const fullPath = resolvePath(args.path);
298
+ try { await fs.stat(fullPath); } catch (_) {
299
+ return `[Error] Directory not found: ${args.path}`;
300
+ }
301
+ if (args.recursive) {
302
+ const files = await walkDir(fullPath, cwd);
303
+ return `[Directory: ${args.path} | ${files.length} entries]\n${files.join('\n')}`;
304
+ }
305
+ const entries = await fs.readdir(fullPath, { withFileTypes: true });
306
+ const formatted = entries.map(e => `${e.isDirectory() ? '📁' : '📄'} ${e.name}`);
307
+ return `[Directory: ${args.path} | ${entries.length} items]\n${formatted.join('\n')}`;
308
+ }
309
+
310
+ case 'search_files': {
311
+ const fullPath = resolvePath(args.path);
312
+ const results = await grepSearch(fullPath, args.pattern, args.filePattern || '*', cwd);
313
+ if (!results.length) return `[Search: "${args.pattern}" in ${args.path}]\nNo matches found.`;
314
+ return `[Search: "${args.pattern}" | ${results.length} matches]\n${results.slice(0, 200).join('\n')}`;
315
+ }
316
+
317
+ case 'create_directory': {
318
+ const fullPath = resolvePath(args.path);
319
+ await fs.mkdir(fullPath, { recursive: true });
320
+ return `[Created: ${args.path}]`;
321
+ }
322
+
323
+ case 'delete_file': {
324
+ const fullPath = resolvePath(args.path);
325
+ let stat;
326
+ try { stat = await fs.stat(fullPath); } catch (_) {
327
+ return `[Error] Path not found: ${args.path}`;
328
+ }
329
+ if (stat.isDirectory()) {
330
+ await fs.rm(fullPath, { recursive: true, force: true });
331
+ return `[Deleted directory: ${args.path}]`;
332
+ }
333
+ // Record for undo
334
+ try {
335
+ const old = await fs.readFile(fullPath, 'utf8');
336
+ recordEdit(fullPath, old, null);
337
+ } catch (_) {}
338
+ await fs.unlink(fullPath);
339
+ return `[Deleted: ${args.path}]`;
340
+ }
341
+
342
+ default:
343
+ return `[Error] Unknown tool: ${name}`;
344
+ }
345
+ } catch (err) {
346
+ return `[Error: ${name}] ${err.message}\nIf this is unexpected, try a different approach.`;
347
+ }
348
+ }
349
+
350
+ // ── Glob Search ──────────────────────────────────────────────────────────────
351
+
352
+ async function globSearch(dir, pattern) {
353
+ const results = [];
354
+ const SKIP = new Set(['.git', 'node_modules', '.next', '__pycache__', '.venv', 'venv', 'env',
355
+ 'dist', 'build', '.cache', 'coverage', '.tox', '.mypy_cache', '.pytest_cache',
356
+ 'vendor', 'target', '.gradle', '.idea', '.vs', '.vscode']);
357
+
358
+ const parts = pattern.replace(/\\/g, '/').split('/');
359
+ const isDeep = parts.includes('**');
360
+ const filePart = parts[parts.length - 1];
361
+
362
+ async function walk(currentDir, depth) {
363
+ let entries;
364
+ try { entries = await fs.readdir(currentDir, { withFileTypes: true }); } catch (_) { return; }
365
+ for (const entry of entries) {
366
+ if (SKIP.has(entry.name)) continue;
367
+ if (entry.name.startsWith('.') && entry.name !== '.env' && entry.name !== '.gitignore' && entry.name !== '.eslintrc.js') continue;
368
+ const full = path.join(currentDir, entry.name);
369
+ if (entry.isDirectory()) {
370
+ if (isDeep || depth === 0) await walk(full, depth + 1);
371
+ } else {
372
+ if (matchGlob(entry.name, filePart)) results.push(full);
373
+ }
374
+ if (results.length > 500) return;
375
+ }
376
+ }
377
+ await walk(dir, 0);
378
+ return results;
379
+ }
380
+
381
+ // ── Grep Search — Parallel File I/O ──────────────────────────────────────────
382
+
383
+ const BINARY_EXTS = new Set(['.png','.jpg','.jpeg','.gif','.ico','.bmp','.webp','.svg',
384
+ '.woff','.woff2','.ttf','.eot','.otf','.mp3','.mp4','.avi','.mov','.zip',
385
+ '.gz','.tar','.rar','.7z','.pdf','.exe','.dll','.so','.dylib','.o','.obj',
386
+ '.class','.pyc','.wasm','.db','.sqlite','.lock']);
387
+
388
+ async function grepSearch(dir, pattern, filePattern, baseCwd) {
389
+ const results = [];
390
+ const SKIP = new Set(['.git', 'node_modules', '.next', '__pycache__', '.venv', 'dist', 'build', '.cache',
391
+ 'coverage', 'vendor', 'target', '.gradle', '.idea', '.vs']);
392
+
393
+ if (typeof pattern !== 'string' || pattern.length > 300) return ['[grep: pattern too long]'];
394
+ if (/(\([^)]*[+*][^)]*\))[+*]/.test(pattern)) return ['[grep: pattern rejected — ReDoS risk]'];
395
+ let regex;
396
+ try { regex = new RegExp(pattern, 'gi'); } catch (e) { return [`[grep: invalid regex — ${e.message}]`]; }
397
+
398
+ // Phase 1: Collect all candidate file paths (fast walk, no reads)
399
+ const candidates = [];
400
+ async function collectFiles(currentDir) {
401
+ let entries;
402
+ try { entries = await fs.readdir(currentDir, { withFileTypes: true }); } catch (_) { return; }
403
+ for (const entry of entries) {
404
+ if (SKIP.has(entry.name) || (entry.name.startsWith('.') && entry.name !== '.env')) continue;
405
+ const fullPath = path.join(currentDir, entry.name);
406
+ if (entry.isDirectory()) { await collectFiles(fullPath); }
407
+ else {
408
+ if (!matchGlob(entry.name, filePattern)) continue;
409
+ if (BINARY_EXTS.has(path.extname(entry.name).toLowerCase())) continue;
410
+ candidates.push(fullPath);
411
+ }
412
+ if (candidates.length > 2000) return; // cap file count
413
+ }
414
+ }
415
+ await collectFiles(dir);
416
+
417
+ // Phase 2: Read and search files in parallel batches (8 concurrent)
418
+ const BATCH = 8;
419
+ for (let i = 0; i < candidates.length && results.length < 200; i += BATCH) {
420
+ const batch = candidates.slice(i, i + BATCH);
421
+ const reads = batch.map(async (fullPath) => {
422
+ try {
423
+ const stat = await fs.stat(fullPath);
424
+ if (stat.size > 3 * 1024 * 1024) return; // skip >3MB
425
+ const content = await fs.readFile(fullPath, 'utf8');
426
+ if (content.includes('\0')) return; // binary check
427
+ const lines = content.split('\n');
428
+ const rel = path.relative(baseCwd || dir, fullPath);
429
+ // Use fresh regex per file (avoid shared lastIndex issues)
430
+ const localRegex = new RegExp(pattern, 'gi');
431
+ for (let j = 0; j < lines.length; j++) {
432
+ const line = lines[j].length > 500 ? lines[j].substring(0, 500) : lines[j];
433
+ if (localRegex.test(line)) {
434
+ results.push(`${rel}:${j + 1}: ${line.trim().substring(0, 150)}`);
435
+ localRegex.lastIndex = 0;
436
+ if (results.length >= 200) return;
437
+ }
438
+ }
439
+ } catch (_) {}
440
+ });
441
+ await Promise.all(reads);
442
+ }
443
+
444
+ return results;
445
+ }
446
+
447
+ // ── Helpers ──────────────────────────────────────────────────────────────────
448
+
449
+ async function walkDir(dir, cwd) {
450
+ let entries;
451
+ try { entries = await fs.readdir(dir, { withFileTypes: true }); } catch (_) { return []; }
452
+ const results = [];
453
+ const SKIP = new Set(['.git', 'node_modules', '.next', '__pycache__', '.venv', 'dist', 'build', '.cache', 'coverage']);
454
+ for (const entry of entries) {
455
+ if (entry.name.startsWith('.') && entry.name !== '.env') continue;
456
+ if (SKIP.has(entry.name)) continue;
457
+ const rel = path.relative(cwd, path.join(dir, entry.name));
458
+ if (entry.isDirectory()) {
459
+ results.push(`📁 ${rel}/`);
460
+ if (results.length < 800) results.push(...await walkDir(path.join(dir, entry.name), cwd));
461
+ } else {
462
+ results.push(`📄 ${rel}`);
463
+ }
464
+ if (results.length > 1000) { results.push('... (truncated)'); break; }
465
+ }
466
+ return results;
467
+ }
468
+
469
+ function matchGlob(filename, pattern) {
470
+ if (!pattern || pattern === '*') return true;
471
+ if (pattern.startsWith('*.')) return filename.endsWith(pattern.slice(1));
472
+ const braceMatch = pattern.match(/^\*\.\{(.+)\}$/);
473
+ if (braceMatch) {
474
+ return braceMatch[1].split(',').some(e => filename.endsWith('.' + e.trim()));
475
+ }
476
+ return filename === pattern;
477
+ }
478
+
479
+ // ── Tool Documentation ──────────────────────────────────────────────────────
480
+
481
+ const TOOL_DOCS = `
482
+ ## Available Tools
483
+
484
+ Use XML blocks to interact with the filesystem and shell. You may use MULTIPLE tools in a single response.
485
+
486
+ ### read_file
487
+ Read file contents with line numbers. Use offset/limit for large files.
488
+ \`\`\`xml
489
+ <read_file>
490
+ <path>src/main.js</path>
491
+ </read_file>
492
+ \`\`\`
493
+ Partial read:
494
+ \`\`\`xml
495
+ <read_file>
496
+ <path>src/main.js</path>
497
+ <offset>50</offset>
498
+ <limit>50</limit>
499
+ </read_file>
500
+ \`\`\`
501
+
502
+ ### edit_file ← PREFERRED for modifying existing files
503
+ Surgical string replacement. MUST read_file first. old_string must be unique.
504
+ \`\`\`xml
505
+ <edit_file>
506
+ <path>src/main.js</path>
507
+ <old_string>const x = 1;</old_string>
508
+ <new_string>const x = 42;</new_string>
509
+ </edit_file>
510
+ \`\`\`
511
+ Replace all occurrences:
512
+ \`\`\`xml
513
+ <edit_file>
514
+ <path>src/main.js</path>
515
+ <old_string>oldName</old_string>
516
+ <new_string>newName</new_string>
517
+ <replace_all>true</replace_all>
518
+ </edit_file>
519
+ \`\`\`
520
+
521
+ ### write_file
522
+ Create new files or complete rewrites only. Prefer edit_file for modifications.
523
+ \`\`\`xml
524
+ <write_file>
525
+ <path>src/new-file.js</path>
526
+ <content>
527
+ // complete file contents
528
+ </content>
529
+ </write_file>
530
+ \`\`\`
531
+
532
+ ### execute_command
533
+ Run shell commands (2 min timeout). Non-interactive only.
534
+ \`\`\`xml
535
+ <execute_command>
536
+ <command>npm test</command>
537
+ </execute_command>
538
+ \`\`\`
539
+
540
+ ### grep
541
+ Fast regex content search across files.
542
+ \`\`\`xml
543
+ <grep>
544
+ <pattern>function main</pattern>
545
+ <path>src</path>
546
+ <file_pattern>*.js</file_pattern>
547
+ </grep>
548
+ \`\`\`
549
+
550
+ ### glob
551
+ Find files by pattern.
552
+ \`\`\`xml
553
+ <glob>
554
+ <pattern>**/*.js</pattern>
555
+ <path>src</path>
556
+ </glob>
557
+ \`\`\`
558
+
559
+ ### list_files
560
+ List directory contents.
561
+ \`\`\`xml
562
+ <list_files>
563
+ <path>.</path>
564
+ <recursive>true</recursive>
565
+ </list_files>
566
+ \`\`\`
567
+
568
+ ### search_files
569
+ Search file contents (alias for grep).
570
+
571
+ ### create_directory
572
+ Create directories recursively.
573
+ \`\`\`xml
574
+ <create_directory>
575
+ <path>src/utils</path>
576
+ </create_directory>
577
+ \`\`\`
578
+
579
+ ### delete_file
580
+ Delete a file or directory.
581
+ \`\`\`xml
582
+ <delete_file>
583
+ <path>temp/old-file.txt</path>
584
+ </delete_file>
585
+ \`\`\`
586
+ `;
587
+
588
+ module.exports = { parseToolCall, parseToolCalls, executeTool, TOOL_DOCS, TOOL_TAG_NAMES, getModifiedFiles, getLastEdit };