dev-mcp-server 0.0.2 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/.env.example +23 -55
  2. package/README.md +609 -219
  3. package/cli.js +486 -160
  4. package/package.json +2 -2
  5. package/src/agents/BaseAgent.js +113 -0
  6. package/src/agents/dreamer.js +165 -0
  7. package/src/agents/improver.js +175 -0
  8. package/src/agents/specialists.js +202 -0
  9. package/src/agents/taskDecomposer.js +176 -0
  10. package/src/agents/teamCoordinator.js +153 -0
  11. package/src/api/routes/agents.js +172 -0
  12. package/src/api/routes/extras.js +115 -0
  13. package/src/api/routes/git.js +72 -0
  14. package/src/api/routes/ingest.js +60 -40
  15. package/src/api/routes/knowledge.js +59 -41
  16. package/src/api/routes/memory.js +41 -0
  17. package/src/api/routes/newRoutes.js +168 -0
  18. package/src/api/routes/pipelines.js +41 -0
  19. package/src/api/routes/planner.js +54 -0
  20. package/src/api/routes/query.js +24 -0
  21. package/src/api/routes/sessions.js +54 -0
  22. package/src/api/routes/tasks.js +67 -0
  23. package/src/api/routes/tools.js +85 -0
  24. package/src/api/routes/v5routes.js +196 -0
  25. package/src/api/server.js +133 -5
  26. package/src/context/compactor.js +151 -0
  27. package/src/context/contextEngineer.js +181 -0
  28. package/src/context/contextVisualizer.js +140 -0
  29. package/src/core/conversationEngine.js +231 -0
  30. package/src/core/indexer.js +169 -143
  31. package/src/core/ingester.js +141 -126
  32. package/src/core/queryEngine.js +286 -236
  33. package/src/cron/cronScheduler.js +260 -0
  34. package/src/dashboard/index.html +1181 -0
  35. package/src/lsp/symbolNavigator.js +220 -0
  36. package/src/memory/memoryManager.js +186 -0
  37. package/src/memory/teamMemory.js +111 -0
  38. package/src/messaging/messageBus.js +177 -0
  39. package/src/monitor/proactiveMonitor.js +337 -0
  40. package/src/pipelines/pipelineEngine.js +230 -0
  41. package/src/planner/plannerEngine.js +202 -0
  42. package/src/plugins/builtin/stats-plugin.js +29 -0
  43. package/src/plugins/pluginManager.js +144 -0
  44. package/src/prompts/promptEngineer.js +289 -0
  45. package/src/sessions/sessionManager.js +166 -0
  46. package/src/skills/skillsManager.js +263 -0
  47. package/src/storage/store.js +127 -105
  48. package/src/tasks/taskManager.js +151 -0
  49. package/src/tools/BashTool.js +154 -0
  50. package/src/tools/FileEditTool.js +280 -0
  51. package/src/tools/GitTool.js +212 -0
  52. package/src/tools/GrepTool.js +199 -0
  53. package/src/tools/registry.js +1380 -0
  54. package/src/utils/costTracker.js +69 -0
  55. package/src/utils/fileParser.js +176 -153
  56. package/src/utils/llmClient.js +355 -206
  57. package/src/watcher/fileWatcher.js +137 -0
  58. package/src/worktrees/worktreeManager.js +176 -0
@@ -0,0 +1,1380 @@
1
+ 'use strict';
2
+ /**
3
+ * Single source of truth for every tool in the system.
4
+ * Each tool exports:
5
+ * - schema : Anthropic-compatible tool definition (name, description, input_schema)
6
+ * - execute : async (input) => string — called when the agent invokes the tool
7
+ * - group : string — tool category
8
+ * - safe : bool — whether it requires permission (false = always allowed)
9
+ */
10
+
11
+ const { execSync, exec } = require('child_process');
12
+ const { promisify } = require('util');
13
+ const execAsync = promisify(exec);
14
+ const fs = require('fs');
15
+ const path = require('path');
16
+ const os = require('os');
17
+ const http = require('http');
18
+ const https = require('https');
19
+ const logger = require('../utils/logger');
20
+
21
+ // ─── helpers ───────────────────────────────────────────────────────────────────
22
+ const run = async (cmd, cwd, opts = {}) => {
23
+ try {
24
+ const { stdout, stderr } = await execAsync(cmd, { cwd: cwd || process.cwd(), timeout: 30000, maxBuffer: 4 * 1024 * 1024 });
25
+ return (stdout || '') + (stderr ? `\n[stderr]: ${stderr}` : '');
26
+ } catch (e) {
27
+ // exit 1 from diff means "files differ" — that IS the output, not an error
28
+ if (opts.allowNonZero || e.code === 1) return (e.stdout || '') + (e.stderr ? `\n[stderr]: ${e.stderr}` : '');
29
+ return `[error exit ${e.code}]: ${e.stderr || e.stdout || e.message}`;
30
+ }
31
+ };
32
+
33
+ // Safely parse JSON that may already be an object (API sends objects, not strings)
34
+ const safeParseJSON = (val) => {
35
+ if (val === null || val === undefined) return [null, 'Value is null/undefined'];
36
+ if (typeof val === 'object') return [val, null]; // already parsed
37
+ if (typeof val !== 'string') return [null, `Expected string or object, got ${typeof val}`];
38
+ try { return [JSON.parse(val), null]; } catch (e) { return [null, e.message]; }
39
+ };
40
+
41
+ const fetchUrl = (url, opts = {}) => new Promise((resolve, reject) => {
42
+ const mod = url.startsWith('https') ? https : http;
43
+ const timeout = opts.timeout || 10000;
44
+ const req = mod.get(url, { headers: { 'User-Agent': 'dev-mcp/4.0' }, timeout }, res => {
45
+ let body = '';
46
+ res.on('data', d => body += d);
47
+ res.on('end', () => resolve({ status: res.statusCode, headers: res.headers, body: body.slice(0, 8000) }));
48
+ });
49
+ req.on('error', reject);
50
+ req.on('timeout', () => { req.destroy(); reject(new Error(`Timeout: ${url}`)); });
51
+ });
52
+
53
+ const readFile = fp => {
54
+ const abs = path.resolve(fp);
55
+ if (!fs.existsSync(abs)) throw new Error(`File not found: ${abs}`);
56
+ const stat = fs.statSync(abs);
57
+ if (stat.size > 300 * 1024) throw new Error(`File too large: ${(stat.size / 1024).toFixed(0)}KB`);
58
+ return fs.readFileSync(abs, 'utf-8');
59
+ };
60
+
61
+ // ─── TOOL DEFINITIONS ──────────────────────────────────────────────────────────
62
+
63
+ const TOOLS = [
64
+
65
+ // ── 1. BASH ──────────────────────────────────────────────────────────────────
66
+ {
67
+ group: 'execution',
68
+ safe: false,
69
+ schema: {
70
+ name: 'bash',
71
+ description: 'Execute a shell command. Returns stdout + stderr. Use for running scripts, builds, tests.',
72
+ input_schema: {
73
+ type: 'object',
74
+ properties: {
75
+ command: { type: 'string', description: 'The shell command to run' },
76
+ cwd: { type: 'string', description: 'Working directory (optional)' },
77
+ timeout: { type: 'number', description: 'Timeout in ms (default 30000)' },
78
+ },
79
+ required: ['command'],
80
+ },
81
+ },
82
+ execute: async ({ command, cwd, timeout }) => {
83
+ const DANGEROUS = [/rm\s+-rf?\s+\//, /sudo\s+rm/, /mkfs\./, /dd\s+if=/, /:(){ :|:& };:/];
84
+ if (DANGEROUS.some(p => p.test(command))) return '[BLOCKED] Dangerous command pattern detected';
85
+ return run(command, cwd);
86
+ },
87
+ },
88
+
89
+ // ── 2. FILE READ ─────────────────────────────────────────────────────────────
90
+ {
91
+ group: 'files',
92
+ safe: true,
93
+ schema: {
94
+ name: 'file_read',
95
+ description: 'Read a file from disk. Returns contents. Supports line ranges.',
96
+ input_schema: {
97
+ type: 'object',
98
+ properties: {
99
+ path: { type: 'string', description: 'File path' },
100
+ start_line: { type: 'number', description: 'Start line (1-indexed, optional)' },
101
+ end_line: { type: 'number', description: 'End line (optional)' },
102
+ },
103
+ required: ['path'],
104
+ },
105
+ },
106
+ execute: async ({ path: fp, start_line, end_line }) => {
107
+ const content = readFile(fp);
108
+ if (!start_line && !end_line) return content;
109
+ const lines = content.split('\n');
110
+ return lines.slice((start_line || 1) - 1, end_line || lines.length).join('\n');
111
+ },
112
+ },
113
+
114
+ // ── 3. FILE WRITE ─────────────────────────────────────────────────────────────
115
+ {
116
+ group: 'files',
117
+ safe: false,
118
+ schema: {
119
+ name: 'file_write',
120
+ description: 'Write or overwrite a file. Creates directories if needed.',
121
+ input_schema: {
122
+ type: 'object',
123
+ properties: {
124
+ path: { type: 'string', description: 'File path to write' },
125
+ content: { type: 'string', description: 'File contents' },
126
+ backup: { type: 'boolean', description: 'Create .bak backup first (default true)' },
127
+ },
128
+ required: ['path', 'content'],
129
+ },
130
+ },
131
+ execute: async ({ path: fp, content, backup = true }) => {
132
+ const abs = path.resolve(fp);
133
+ fs.mkdirSync(path.dirname(abs), { recursive: true });
134
+ if (backup && fs.existsSync(abs)) fs.writeFileSync(abs + '.bak', fs.readFileSync(abs));
135
+ fs.writeFileSync(abs, content, 'utf-8');
136
+ return `Written: ${abs} (${content.length} chars)`;
137
+ },
138
+ },
139
+
140
+ // ── 4. FILE EDIT (str_replace) ────────────────────────────────────────────────
141
+ {
142
+ group: 'files',
143
+ safe: false,
144
+ schema: {
145
+ name: 'file_edit',
146
+ description: 'Apply a precise string replacement in a file. oldStr must be unique in the file.',
147
+ input_schema: {
148
+ type: 'object',
149
+ properties: {
150
+ path: { type: 'string' },
151
+ old_str: { type: 'string', description: 'Exact string to find and replace (must be unique)' },
152
+ new_str: { type: 'string', description: 'Replacement string' },
153
+ },
154
+ required: ['path', 'old_str', 'new_str'],
155
+ },
156
+ },
157
+ execute: async ({ path: fp, old_str, new_str }) => {
158
+ const FileEditTool = require('./FileEditTool');
159
+ const r = await FileEditTool.strReplace(fp, old_str, new_str, { backup: true });
160
+ return r.diff?.summary || 'Edit applied';
161
+ },
162
+ },
163
+
164
+ // ── 5. FILE DELETE ────────────────────────────────────────────────────────────
165
+ {
166
+ group: 'files',
167
+ safe: false,
168
+ schema: {
169
+ name: 'file_delete',
170
+ description: 'Delete a file (moves to .bak first for safety).',
171
+ input_schema: {
172
+ type: 'object',
173
+ properties: { path: { type: 'string' } },
174
+ required: ['path'],
175
+ },
176
+ },
177
+ execute: async ({ path: fp }) => {
178
+ const abs = path.resolve(fp);
179
+ if (!fs.existsSync(abs)) return 'File not found';
180
+ fs.renameSync(abs, abs + '.deleted_' + Date.now());
181
+ return `Deleted (backed up): ${abs}`;
182
+ },
183
+ },
184
+
185
+ // ── 6. DIRECTORY LIST ─────────────────────────────────────────────────────────
186
+ {
187
+ group: 'files',
188
+ safe: true,
189
+ schema: {
190
+ name: 'dir_list',
191
+ description: 'List files and directories. Respects .gitignore patterns.',
192
+ input_schema: {
193
+ type: 'object',
194
+ properties: {
195
+ path: { type: 'string', description: 'Directory path (default: cwd)' },
196
+ recursive: { type: 'boolean', description: 'List recursively' },
197
+ pattern: { type: 'string', description: 'Glob pattern filter (e.g. *.js)' },
198
+ },
199
+ required: [],
200
+ },
201
+ },
202
+ execute: async ({ path: dirPath = '.', recursive = false, pattern }) => {
203
+ const abs = path.resolve(dirPath);
204
+ if (!fs.existsSync(abs)) return `Directory not found: ${abs}`;
205
+ const flag = recursive ? '-R' : '';
206
+ const cmd = pattern ? `find ${abs} -name "${pattern}" | head -100` : `ls ${flag} ${abs} | head -200`;
207
+ return run(cmd);
208
+ },
209
+ },
210
+
211
+ // ── 7. GREP ───────────────────────────────────────────────────────────────────
212
+ {
213
+ group: 'search',
214
+ safe: true,
215
+ schema: {
216
+ name: 'grep',
217
+ description: 'Search for a pattern across files using ripgrep (or grep fallback).',
218
+ input_schema: {
219
+ type: 'object',
220
+ properties: {
221
+ pattern: { type: 'string', description: 'Search pattern (regex or literal)' },
222
+ path: { type: 'string', description: 'Directory to search (default: cwd)' },
223
+ glob: { type: 'string', description: 'File glob (e.g. "*.js")' },
224
+ ignore_case: { type: 'boolean' },
225
+ max_results: { type: 'number', description: 'Max results (default 30)' },
226
+ },
227
+ required: ['pattern'],
228
+ },
229
+ },
230
+ execute: async ({ pattern, path: p = '.', glob, ignore_case, max_results = 30 }) => {
231
+ const GrepTool = require('./GrepTool');
232
+ const result = await GrepTool.search(pattern, { cwd: p, glob, ignoreCase: ignore_case, maxResults: max_results });
233
+ return result.matches.map(m => `${m.file}:${m.lineNumber}: ${m.line.trim()}`).join('\n') || 'No matches';
234
+ },
235
+ },
236
+
237
+ // ── 8. FIND FILES ─────────────────────────────────────────────────────────────
238
+ {
239
+ group: 'search',
240
+ safe: true,
241
+ schema: {
242
+ name: 'find_files',
243
+ description: 'Find files by name pattern or extension.',
244
+ input_schema: {
245
+ type: 'object',
246
+ properties: {
247
+ pattern: { type: 'string', description: 'Filename pattern (e.g. "*.test.js", "config*")' },
248
+ path: { type: 'string', description: 'Root directory' },
249
+ type: { type: 'string', enum: ['file', 'dir', 'any'], description: 'Type filter' },
250
+ },
251
+ required: ['pattern'],
252
+ },
253
+ },
254
+ execute: async ({ pattern, path: p = '.', type = 'file' }) => {
255
+ const typeFlag = type === 'file' ? '-type f' : type === 'dir' ? '-type d' : '';
256
+ return run(`find ${path.resolve(p)} ${typeFlag} -name "${pattern}" 2>/dev/null | head -50`);
257
+ },
258
+ },
259
+
260
+ // ── 9. GIT STATUS ─────────────────────────────────────────────────────────────
261
+ {
262
+ group: 'git',
263
+ safe: true,
264
+ schema: {
265
+ name: 'git_status',
266
+ description: 'Get git status: current branch, staged/unstaged changes, recent commits.',
267
+ input_schema: { type: 'object', properties: { cwd: { type: 'string' } }, required: [] },
268
+ },
269
+ execute: async ({ cwd }) => {
270
+ const GitTool = require('./GitTool');
271
+ const s = await GitTool.status(cwd);
272
+ return JSON.stringify(s, null, 2);
273
+ },
274
+ },
275
+
276
+ // ── 10. GIT DIFF ──────────────────────────────────────────────────────────────
277
+ {
278
+ group: 'git',
279
+ safe: true,
280
+ schema: {
281
+ name: 'git_diff',
282
+ description: 'Show git diff for staged or unstaged changes.',
283
+ input_schema: {
284
+ type: 'object',
285
+ properties: {
286
+ staged: { type: 'boolean', description: 'Show staged diff (default: unstaged)' },
287
+ file: { type: 'string', description: 'Specific file path (optional)' },
288
+ cwd: { type: 'string' },
289
+ },
290
+ required: [],
291
+ },
292
+ },
293
+ execute: async ({ staged = false, file, cwd }) => {
294
+ const GitTool = require('./GitTool');
295
+ const r = await GitTool.diff({ staged, file, cwd });
296
+ return r.diff || 'No changes';
297
+ },
298
+ },
299
+
300
+ // ── 11. GIT LOG ───────────────────────────────────────────────────────────────
301
+ {
302
+ group: 'git',
303
+ safe: true,
304
+ schema: {
305
+ name: 'git_log',
306
+ description: 'Show recent git commit log.',
307
+ input_schema: {
308
+ type: 'object',
309
+ properties: {
310
+ limit: { type: 'number', description: 'Number of commits (default 10)' },
311
+ file: { type: 'string', description: 'Filter by file' },
312
+ cwd: { type: 'string' },
313
+ },
314
+ required: [],
315
+ },
316
+ },
317
+ execute: async ({ limit = 10, file, cwd }) => {
318
+ const GitTool = require('./GitTool');
319
+ const commits = await GitTool.log({ limit, file, oneline: true, cwd });
320
+ return commits.join('\n');
321
+ },
322
+ },
323
+
324
+ // ── 12. GIT COMMIT ────────────────────────────────────────────────────────────
325
+ {
326
+ group: 'git',
327
+ safe: false,
328
+ schema: {
329
+ name: 'git_commit',
330
+ description: 'Stage files and create a commit. Leave message empty for AI-generated message.',
331
+ input_schema: {
332
+ type: 'object',
333
+ properties: {
334
+ message: { type: 'string', description: 'Commit message (optional, AI will generate if omitted)' },
335
+ files: { type: 'array', items: { type: 'string' }, description: 'Files to stage (default ["."])' },
336
+ cwd: { type: 'string' },
337
+ },
338
+ required: [],
339
+ },
340
+ },
341
+ execute: async ({ message, files = ['.'], cwd }) => {
342
+ const GitTool = require('./GitTool');
343
+ const r = await GitTool.commit({ message, files, autoMessage: !message, cwd });
344
+ return r.success ? `Committed: "${r.message}"` : r.message;
345
+ },
346
+ },
347
+
348
+ // ── 13. GIT BRANCHES ──────────────────────────────────────────────────────────
349
+ {
350
+ group: 'git',
351
+ safe: true,
352
+ schema: {
353
+ name: 'git_branches',
354
+ description: 'List all git branches.',
355
+ input_schema: { type: 'object', properties: { cwd: { type: 'string' } }, required: [] },
356
+ },
357
+ execute: async ({ cwd }) => {
358
+ const GitTool = require('./GitTool');
359
+ const branches = await GitTool.branches(cwd);
360
+ return branches.map(b => `${b.current ? '* ' : ' '}${b.name}`).join('\n');
361
+ },
362
+ },
363
+
364
+ // ── 14. HTTP REQUEST ──────────────────────────────────────────────────────────
365
+ {
366
+ group: 'network',
367
+ safe: true,
368
+ schema: {
369
+ name: 'http_request',
370
+ description: 'Make an HTTP/HTTPS GET request and return the response body.',
371
+ input_schema: {
372
+ type: 'object',
373
+ properties: {
374
+ url: { type: 'string', description: 'Full URL including protocol' },
375
+ timeout: { type: 'number', description: 'Timeout in ms (default 10000)' },
376
+ },
377
+ required: ['url'],
378
+ },
379
+ },
380
+ execute: async ({ url, timeout }) => {
381
+ const r = await fetchUrl(url, { timeout });
382
+ return `Status: ${r.status}\n\n${r.body}`;
383
+ },
384
+ },
385
+
386
+ // ── 15. JSON PARSE / QUERY ────────────────────────────────────────────────────
387
+ {
388
+ group: 'data',
389
+ safe: true,
390
+ schema: {
391
+ name: 'json_query',
392
+ description: 'Parse JSON and extract a value using a dot-path (e.g. "data.users.0.name").',
393
+ input_schema: {
394
+ type: 'object',
395
+ properties: {
396
+ json: { type: 'string', description: 'JSON string or file path' },
397
+ path: { type: 'string', description: 'Dot-path query (e.g. "users.0.name"). Empty = pretty-print all.' },
398
+ file: { type: 'string', description: 'Alternatively, path to a JSON file' },
399
+ },
400
+ required: [],
401
+ },
402
+ },
403
+ execute: async ({ json, path: query, file }) => {
404
+ let data;
405
+ if (file) json = readFile(file);
406
+ const [parsed, err] = safeParseJSON(json);
407
+ if (err) return `Invalid JSON: ${err}`;
408
+ data = parsed;
409
+ if (!query) return JSON.stringify(data, null, 2).slice(0, 4000);
410
+ const parts = query.split('.');
411
+ let cur = data;
412
+ for (const p of parts) {
413
+ if (cur == null) return 'undefined';
414
+ cur = cur[p] ?? cur[parseInt(p)];
415
+ }
416
+ return typeof cur === 'object' ? JSON.stringify(cur, null, 2) : String(cur);
417
+ },
418
+ },
419
+
420
+ // ── 16. JSON TRANSFORM ────────────────────────────────────────────────────────
421
+ {
422
+ group: 'data',
423
+ safe: true,
424
+ schema: {
425
+ name: 'json_transform',
426
+ description: 'Transform JSON data: filter, map, sort, pick fields.',
427
+ input_schema: {
428
+ type: 'object',
429
+ properties: {
430
+ json: { type: 'string', description: 'JSON array string' },
431
+ filter: { type: 'string', description: 'Filter expression on item (e.g. "item.age > 18")' },
432
+ fields: { type: 'array', items: { type: 'string' }, description: 'Fields to pick' },
433
+ sort: { type: 'string', description: 'Field to sort by' },
434
+ limit: { type: 'number', description: 'Max items to return' },
435
+ },
436
+ required: ['json'],
437
+ },
438
+ },
439
+ execute: async ({ json, filter, fields, sort, limit }) => {
440
+ const [parsed, err] = safeParseJSON(json);
441
+ if (err) return `Invalid JSON: ${err}`;
442
+ let arr = parsed;
443
+ if (!Array.isArray(arr)) return 'Input must be a JSON array';
444
+ if (filter) {
445
+ try { arr = arr.filter(item => eval(`(item => ${filter})(item)`)); } catch { }
446
+ }
447
+ if (sort) arr = arr.sort((a, b) => a[sort] > b[sort] ? 1 : -1);
448
+ if (limit) arr = arr.slice(0, limit);
449
+ if (fields?.length) arr = arr.map(item => Object.fromEntries(fields.map(f => [f, item[f]])));
450
+ return JSON.stringify(arr, null, 2).slice(0, 4000);
451
+ },
452
+ },
453
+
454
+ // ── 17. ENV READ ──────────────────────────────────────────────────────────────
455
+ {
456
+ group: 'config',
457
+ safe: true,
458
+ schema: {
459
+ name: 'env_read',
460
+ description: 'Read environment variables or .env file contents (masks secrets).',
461
+ input_schema: {
462
+ type: 'object',
463
+ properties: {
464
+ file: { type: 'string', description: '.env file path (optional)' },
465
+ key: { type: 'string', description: 'Specific key to read (optional)' },
466
+ mask: { type: 'boolean', description: 'Mask secret values (default true)' },
467
+ },
468
+ required: [],
469
+ },
470
+ },
471
+ execute: async ({ file, key, mask = true }) => {
472
+ if (key) {
473
+ const v = process.env[key];
474
+ return v ? (mask && (key.toLowerCase().includes('key') || key.toLowerCase().includes('secret') || key.toLowerCase().includes('token')) ? v.slice(0, 6) + '***' : v) : 'Not set';
475
+ }
476
+ const source = file ? readFile(file) : Object.entries(process.env).map(([k, v]) => `${k}=${v}`).join('\n');
477
+ const maskFn = mask ? (line) => {
478
+ const [k, ...rest] = line.split('=');
479
+ const v = rest.join('=');
480
+ const isSensitive = /key|secret|token|pass|pwd|api/i.test(k);
481
+ return isSensitive ? `${k}=${v.slice(0, 4)}***` : line;
482
+ } : (l) => l;
483
+ return source.split('\n').map(maskFn).filter(l => l && !l.startsWith('#')).join('\n');
484
+ },
485
+ },
486
+
487
+ // ── 18. NPM / PACKAGE ─────────────────────────────────────────────────────────
488
+ {
489
+ group: 'package',
490
+ safe: true,
491
+ schema: {
492
+ name: 'npm_info',
493
+ description: 'Get npm package info: list deps, check outdated, audit for vulnerabilities.',
494
+ input_schema: {
495
+ type: 'object',
496
+ properties: {
497
+ action: { type: 'string', enum: ['list', 'outdated', 'audit', 'info'], description: 'Action to perform' },
498
+ package: { type: 'string', description: 'Package name (for "info" action)' },
499
+ cwd: { type: 'string' },
500
+ },
501
+ required: ['action'],
502
+ },
503
+ },
504
+ execute: async ({ action, package: pkg, cwd }) => {
505
+ const cmds = {
506
+ list: 'npm list --depth=0 2>/dev/null',
507
+ outdated: 'npm outdated --json 2>/dev/null || echo "{}"',
508
+ audit: 'npm audit --json 2>/dev/null | head -100',
509
+ info: pkg ? `npm show ${pkg} description version homepage` : 'npm show --help',
510
+ };
511
+ return run(cmds[action] || `npm ${action}`, cwd);
512
+ },
513
+ },
514
+
515
+ // ── 19. RUN TESTS ─────────────────────────────────────────────────────────────
516
+ {
517
+ group: 'testing',
518
+ safe: true,
519
+ schema: {
520
+ name: 'run_tests',
521
+ description: 'Run test suite (Jest, Mocha, etc). Can target specific files or patterns.',
522
+ input_schema: {
523
+ type: 'object',
524
+ properties: {
525
+ pattern: { type: 'string', description: 'Test file pattern or name (optional)' },
526
+ framework: { type: 'string', enum: ['jest', 'mocha', 'auto'], description: 'Test framework' },
527
+ coverage: { type: 'boolean', description: 'Include coverage report' },
528
+ cwd: { type: 'string' },
529
+ },
530
+ required: [],
531
+ },
532
+ },
533
+ execute: async ({ pattern, framework = 'auto', coverage, cwd }) => {
534
+ let cmd;
535
+ if (framework === 'auto') {
536
+ const pkgPath = path.join(cwd || process.cwd(), 'package.json');
537
+ const pkg = fs.existsSync(pkgPath) ? JSON.parse(fs.readFileSync(pkgPath, 'utf-8')) : {};
538
+ const hasJest = pkg.dependencies?.jest || pkg.devDependencies?.jest || pkg.scripts?.test?.includes('jest');
539
+ const hasMocha = pkg.devDependencies?.mocha || pkg.scripts?.test?.includes('mocha');
540
+ framework = hasJest ? 'jest' : hasMocha ? 'mocha' : 'jest';
541
+ }
542
+ const coverageFlag = coverage ? (framework === 'jest' ? '--coverage' : '--reporter html') : '';
543
+ const patternFlag = pattern ? (framework === 'jest' ? `"${pattern}"` : `--grep "${pattern}"`) : '';
544
+ cmd = `npx ${framework} ${patternFlag} ${coverageFlag} --passWithNoTests 2>&1 | tail -50`;
545
+ return run(cmd, cwd);
546
+ },
547
+ },
548
+
549
+ // ── 20. LINT ──────────────────────────────────────────────────────────────────
550
+ {
551
+ group: 'code-quality',
552
+ safe: true,
553
+ schema: {
554
+ name: 'lint',
555
+ description: 'Run ESLint on files. Returns errors and warnings.',
556
+ input_schema: {
557
+ type: 'object',
558
+ properties: {
559
+ path: { type: 'string', description: 'File or directory to lint' },
560
+ fix: { type: 'boolean', description: 'Auto-fix fixable issues' },
561
+ format: { type: 'string', enum: ['compact', 'json', 'stylish'], description: 'Output format' },
562
+ cwd: { type: 'string' },
563
+ },
564
+ required: [],
565
+ },
566
+ },
567
+ execute: async ({ path: p = '.', fix, format = 'compact', cwd }) => {
568
+ const fixFlag = fix ? '--fix' : '';
569
+ return run(`npx eslint ${p} ${fixFlag} --format ${format} 2>&1 | head -80`, cwd);
570
+ },
571
+ },
572
+
573
+ // ── 21. FORMAT CODE ───────────────────────────────────────────────────────────
574
+ {
575
+ group: 'code-quality',
576
+ safe: false,
577
+ schema: {
578
+ name: 'format_code',
579
+ description: 'Format code with Prettier. Returns formatted output or writes in place.',
580
+ input_schema: {
581
+ type: 'object',
582
+ properties: {
583
+ path: { type: 'string', description: 'File to format' },
584
+ write: { type: 'boolean', description: 'Write to file (default false = print only)' },
585
+ parser: { type: 'string', description: 'Parser: babel, typescript, json, markdown, etc.' },
586
+ },
587
+ required: ['path'],
588
+ },
589
+ },
590
+ execute: async ({ path: p, write, parser }) => {
591
+ const writeFlag = write ? '--write' : '--check';
592
+ const parserFlag = parser ? `--parser ${parser}` : '';
593
+ return run(`npx prettier ${writeFlag} ${parserFlag} "${p}" 2>&1 | head -30`);
594
+ },
595
+ },
596
+
597
+ // ── 22. REGEX TEST ────────────────────────────────────────────────────────────
598
+ {
599
+ group: 'data',
600
+ safe: true,
601
+ schema: {
602
+ name: 'regex_test',
603
+ description: 'Test a regex pattern against input text. Returns matches and groups.',
604
+ input_schema: {
605
+ type: 'object',
606
+ properties: {
607
+ pattern: { type: 'string', description: 'Regex pattern (without delimiters)' },
608
+ input: { type: 'string', description: 'Input text to match against' },
609
+ flags: { type: 'string', description: 'Regex flags (g, i, m, s)' },
610
+ all_matches: { type: 'boolean', description: 'Return all matches (default first only)' },
611
+ },
612
+ required: ['pattern', 'input'],
613
+ },
614
+ },
615
+ execute: async ({ pattern, input, flags = 'gm', all_matches = true }) => {
616
+ const regex = new RegExp(pattern, flags);
617
+ if (all_matches) {
618
+ const matches = [...input.matchAll(new RegExp(pattern, flags.includes('g') ? flags : flags + 'g'))];
619
+ if (!matches.length) return 'No matches';
620
+ return matches.slice(0, 20).map((m, i) => `Match ${i + 1}: "${m[0]}"${m.slice(1).length ? ' groups: ' + JSON.stringify(m.slice(1)) : ''} at index ${m.index}`).join('\n');
621
+ }
622
+ const m = input.match(regex);
623
+ return m ? `Match: "${m[0]}"${m.slice(1).length ? '\nGroups: ' + JSON.stringify(m.slice(1)) : ''}` : 'No match';
624
+ },
625
+ },
626
+
627
+ // ── 23. CRYPTO / HASH ─────────────────────────────────────────────────────────
628
+ {
629
+ group: 'data',
630
+ safe: true,
631
+ schema: {
632
+ name: 'crypto_hash',
633
+ description: 'Hash, encode, or decode data. Supports md5, sha256, sha512, base64.',
634
+ input_schema: {
635
+ type: 'object',
636
+ properties: {
637
+ algorithm: { type: 'string', enum: ['md5', 'sha256', 'sha512', 'sha1', 'base64-encode', 'base64-decode', 'hex'] },
638
+ input: { type: 'string', description: 'Input string' },
639
+ },
640
+ required: ['algorithm', 'input'],
641
+ },
642
+ },
643
+ execute: async ({ algorithm, input }) => {
644
+ const crypto = require('crypto');
645
+ if (algorithm.startsWith('base64')) {
646
+ return algorithm === 'base64-encode' ? Buffer.from(input).toString('base64') : Buffer.from(input, 'base64').toString('utf-8');
647
+ }
648
+ if (algorithm === 'hex') return Buffer.from(input).toString('hex');
649
+ return crypto.createHash(algorithm).update(input).digest('hex');
650
+ },
651
+ },
652
+
653
+ // ── 24. DATE / TIME ───────────────────────────────────────────────────────────
654
+ {
655
+ group: 'data',
656
+ safe: true,
657
+ schema: {
658
+ name: 'datetime',
659
+ description: 'Date/time operations: format, parse, calculate differences, timezone convert.',
660
+ input_schema: {
661
+ type: 'object',
662
+ properties: {
663
+ action: { type: 'string', enum: ['now', 'format', 'diff', 'add', 'parse'] },
664
+ date: { type: 'string', description: 'Date string (ISO 8601 or natural)' },
665
+ date2: { type: 'string', description: 'Second date for diff' },
666
+ format: { type: 'string', description: 'Output format (ISO, locale, unix, relative)' },
667
+ amount: { type: 'number', description: 'Amount for add/subtract' },
668
+ unit: { type: 'string', description: 'Unit: days, hours, minutes, months, years' },
669
+ },
670
+ required: ['action'],
671
+ },
672
+ },
673
+ execute: async ({ action, date, date2, format = 'ISO', amount, unit }) => {
674
+ const d1 = date ? new Date(date) : new Date();
675
+ const fmtDate = (d) => {
676
+ if (format === 'unix') return Math.floor(d.getTime() / 1000).toString();
677
+ if (format === 'locale') return d.toLocaleString();
678
+ if (format === 'relative') {
679
+ const diff = Math.abs(Date.now() - d.getTime());
680
+ const mins = Math.floor(diff / 60000);
681
+ if (mins < 60) return `${mins} minutes ago`;
682
+ if (mins < 1440) return `${Math.floor(mins / 60)} hours ago`;
683
+ return `${Math.floor(mins / 1440)} days ago`;
684
+ }
685
+ return d.toISOString();
686
+ };
687
+ if (action === 'now') return fmtDate(new Date());
688
+ if (action === 'format') return fmtDate(d1);
689
+ if (action === 'parse') return JSON.stringify({ iso: d1.toISOString(), unix: Math.floor(d1.getTime() / 1000), valid: !isNaN(d1) });
690
+ if (action === 'diff') { const diff = Math.abs(new Date(date2) - d1); return `${Math.floor(diff / 86400000)} days, ${Math.floor((diff % 86400000) / 3600000)} hours`; }
691
+ if (action === 'add') {
692
+ const units = { minutes: 60000, hours: 3600000, days: 86400000, weeks: 604800000, months: 30 * 86400000, years: 365 * 86400000 };
693
+ return fmtDate(new Date(d1.getTime() + amount * (units[unit] || 86400000)));
694
+ }
695
+ return fmtDate(d1);
696
+ },
697
+ },
698
+
699
+ // ── 25. SYSTEM METRICS ────────────────────────────────────────────────────────
700
+ {
701
+ group: 'system',
702
+ safe: true,
703
+ schema: {
704
+ name: 'system_info',
705
+ description: 'Get system information: CPU, memory, disk usage, Node.js runtime info.',
706
+ input_schema: {
707
+ type: 'object',
708
+ properties: {
709
+ metric: { type: 'string', enum: ['all', 'cpu', 'memory', 'disk', 'node', 'processes'] },
710
+ },
711
+ required: [],
712
+ },
713
+ },
714
+ execute: async ({ metric = 'all' }) => {
715
+ const info = {};
716
+ if (metric === 'all' || metric === 'memory') {
717
+ info.memory = { total: `${(os.totalmem() / 1e9).toFixed(1)}GB`, free: `${(os.freemem() / 1e9).toFixed(1)}GB`, used: `${((os.totalmem() - os.freemem()) / 1e9).toFixed(1)}GB` };
718
+ }
719
+ if (metric === 'all' || metric === 'cpu') {
720
+ info.cpu = { model: os.cpus()[0]?.model, cores: os.cpus().length, loadAvg: os.loadavg().map(l => l.toFixed(2)) };
721
+ }
722
+ if (metric === 'all' || metric === 'node') {
723
+ info.node = { version: process.version, uptime: `${(process.uptime() / 60).toFixed(0)}min`, memory: process.memoryUsage() };
724
+ }
725
+ if (metric === 'all' || metric === 'disk') {
726
+ info.disk = await run('df -h / | tail -1');
727
+ }
728
+ if (metric === 'processes') {
729
+ return run('ps aux --sort=-%cpu | head -15');
730
+ }
731
+ return JSON.stringify(info, null, 2);
732
+ },
733
+ },
734
+
735
+ // ── 26. NETWORK CHECK ─────────────────────────────────────────────────────────
736
+ {
737
+ group: 'network',
738
+ safe: true,
739
+ schema: {
740
+ name: 'network_check',
741
+ description: 'Network diagnostics: ping, port check, DNS lookup.',
742
+ input_schema: {
743
+ type: 'object',
744
+ properties: {
745
+ action: { type: 'string', enum: ['ping', 'port', 'dns', 'trace'] },
746
+ host: { type: 'string' },
747
+ port: { type: 'number' },
748
+ },
749
+ required: ['action', 'host'],
750
+ },
751
+ },
752
+ execute: async ({ action, host, port }) => {
753
+ const cmds = {
754
+ ping: `ping -c 3 ${host} 2>&1 | tail -5`,
755
+ port: `nc -zv ${host} ${port || 80} 2>&1`,
756
+ dns: `nslookup ${host} 2>&1 | head -10`,
757
+ trace: `traceroute -m 10 ${host} 2>&1 | head -15`,
758
+ };
759
+ return run(cmds[action] || `ping -c 1 ${host}`);
760
+ },
761
+ },
762
+
763
+ // ── 27. DOCKER ────────────────────────────────────────────────────────────────
764
+ {
765
+ group: 'infrastructure',
766
+ safe: true,
767
+ schema: {
768
+ name: 'docker',
769
+ description: 'Docker container management: list, logs, exec, inspect.',
770
+ input_schema: {
771
+ type: 'object',
772
+ properties: {
773
+ action: { type: 'string', enum: ['ps', 'logs', 'exec', 'inspect', 'images', 'stats'] },
774
+ container: { type: 'string', description: 'Container name or ID' },
775
+ command: { type: 'string', description: 'Command for exec action' },
776
+ lines: { type: 'number', description: 'Lines for logs (default 50)' },
777
+ },
778
+ required: ['action'],
779
+ },
780
+ },
781
+ execute: async ({ action, container, command, lines = 50 }) => {
782
+ const cmds = {
783
+ ps: 'docker ps --format "table {{.Names}}\t{{.Status}}\t{{.Ports}}" 2>&1',
784
+ images: 'docker images --format "table {{.Repository}}\t{{.Tag}}\t{{.Size}}" 2>&1',
785
+ stats: 'docker stats --no-stream --format "table {{.Name}}\t{{.CPUPerc}}\t{{.MemUsage}}" 2>&1',
786
+ logs: container ? `docker logs --tail ${lines} ${container} 2>&1` : 'Specify container',
787
+ exec: container && command ? `docker exec ${container} ${command} 2>&1` : 'Specify container and command',
788
+ inspect: container ? `docker inspect ${container} 2>&1 | head -60` : 'Specify container',
789
+ };
790
+ return run(cmds[action] || 'docker info');
791
+ },
792
+ },
793
+
794
+ // ── 28. LOG ANALYSIS ──────────────────────────────────────────────────────────
795
+ {
796
+ group: 'analysis',
797
+ safe: true,
798
+ schema: {
799
+ name: 'log_analyze',
800
+ description: 'Analyze log files: find errors, extract patterns, summarize levels.',
801
+ input_schema: {
802
+ type: 'object',
803
+ properties: {
804
+ path: { type: 'string', description: 'Log file path' },
805
+ level: { type: 'string', enum: ['ERROR', 'WARN', 'INFO', 'all'], description: 'Filter by level' },
806
+ pattern: { type: 'string', description: 'Custom grep pattern' },
807
+ tail: { type: 'number', description: 'Last N lines (default 100)' },
808
+ stats: { type: 'boolean', description: 'Return level statistics instead of lines' },
809
+ },
810
+ required: ['path'],
811
+ },
812
+ },
813
+ execute: async ({ path: fp, level = 'all', pattern, tail = 100, stats }) => {
814
+ const content = readFile(fp);
815
+ const lines = content.split('\n');
816
+ if (stats) {
817
+ const counts = { ERROR: 0, WARN: 0, INFO: 0, DEBUG: 0, OTHER: 0 };
818
+ for (const l of lines) {
819
+ if (/error/i.test(l)) counts.ERROR++;
820
+ else if (/warn/i.test(l)) counts.WARN++;
821
+ else if (/info/i.test(l)) counts.INFO++;
822
+ else if (/debug/i.test(l)) counts.DEBUG++;
823
+ else counts.OTHER++;
824
+ }
825
+ return JSON.stringify(counts, null, 2);
826
+ }
827
+ let filtered = lines;
828
+ if (level !== 'all') filtered = filtered.filter(l => new RegExp(level, 'i').test(l));
829
+ if (pattern) filtered = filtered.filter(l => l.includes(pattern));
830
+ return filtered.slice(-tail).join('\n');
831
+ },
832
+ },
833
+
834
+ // ── 29. DEPENDENCY ANALYSIS ───────────────────────────────────────────────────
835
+ {
836
+ group: 'analysis',
837
+ safe: true,
838
+ schema: {
839
+ name: 'dependency_analysis',
840
+ description: 'Analyze project dependencies: unused packages, circular deps, security issues.',
841
+ input_schema: {
842
+ type: 'object',
843
+ properties: {
844
+ action: { type: 'string', enum: ['list', 'unused', 'circular', 'size', 'vulnerabilities'] },
845
+ cwd: { type: 'string' },
846
+ },
847
+ required: ['action'],
848
+ },
849
+ },
850
+ execute: async ({ action, cwd }) => {
851
+ const cmds = {
852
+ list: 'cat package.json | node -e "const p=require(\'./package.json\');console.log(JSON.stringify({deps:Object.keys(p.dependencies||{}),dev:Object.keys(p.devDependencies||{})},null,2))"',
853
+ unused: 'npx depcheck 2>&1 | head -30',
854
+ circular: 'npx madge --circular . 2>&1 | head -30',
855
+ size: 'npx cost-of-modules --no-install 2>&1 | head -20',
856
+ vulnerabilities: 'npm audit --json 2>&1 | node -e "let d=\'\';process.stdin.on(\'data\',c=>d+=c);process.stdin.on(\'end\',()=>{try{const a=JSON.parse(d);console.log(JSON.stringify({critical:a.metadata?.vulnerabilities?.critical,high:a.metadata?.vulnerabilities?.high,moderate:a.metadata?.vulnerabilities?.moderate},null,2))}catch{console.log(d.slice(0,500))}})"',
857
+ };
858
+ return run(cmds[action] || 'npm list', cwd);
859
+ },
860
+ },
861
+
862
+ // ── 30. CODE COMPLEXITY ───────────────────────────────────────────────────────
863
+ {
864
+ group: 'analysis',
865
+ safe: true,
866
+ schema: {
867
+ name: 'code_complexity',
868
+ description: 'Analyze code complexity: function lengths, nesting depth, duplicates.',
869
+ input_schema: {
870
+ type: 'object',
871
+ properties: {
872
+ path: { type: 'string', description: 'File or directory to analyze' },
873
+ },
874
+ required: ['path'],
875
+ },
876
+ },
877
+ execute: async ({ path: fp }) => {
878
+ const abs = path.resolve(fp);
879
+ if (!fs.existsSync(abs)) return 'Path not found';
880
+ const isFile = fs.statSync(abs).isFile();
881
+ const content = isFile ? readFile(abs) : '';
882
+ if (!content) return `Directory analysis: ${await run(`find ${abs} -name "*.js" | xargs wc -l 2>/dev/null | sort -rn | head -20`)}`;
883
+ const lines = content.split('\n');
884
+ const fns = lines.filter(l => /function\s+\w+|const\s+\w+\s*=\s*(?:async\s*)?\(|=>\s*\{/.test(l)).length;
885
+ const maxDepth = Math.max(...lines.map(l => (l.match(/^\s+/)?.[0].length || 0) / 2));
886
+ const longFns = lines.reduce((acc, l, i) => { if (l.match(/function|=>/) && lines.slice(i, i + 50).length > 40) acc.push(i + 1); return acc; }, []);
887
+ return JSON.stringify({ file: fp, lines: lines.length, functions: fns, maxNestingDepth: Math.floor(maxDepth), potentiallyLongFunctions: longFns.slice(0, 5) }, null, 2);
888
+ },
889
+ },
890
+
891
+ // ── 31. API TEST ──────────────────────────────────────────────────────────────
892
+ {
893
+ group: 'testing',
894
+ safe: true,
895
+ schema: {
896
+ name: 'api_test',
897
+ description: 'Test a REST API endpoint. Supports GET, POST, PUT, DELETE with headers and body.',
898
+ input_schema: {
899
+ type: 'object',
900
+ properties: {
901
+ url: { type: 'string' },
902
+ method: { type: 'string', enum: ['GET', 'POST', 'PUT', 'PATCH', 'DELETE'] },
903
+ body: { type: 'string', description: 'JSON request body' },
904
+ headers: { type: 'string', description: 'JSON headers object' },
905
+ expect_status: { type: 'number', description: 'Expected HTTP status code' },
906
+ },
907
+ required: ['url'],
908
+ },
909
+ },
910
+ execute: async ({ url, method = 'GET', body, headers, expect_status }) => {
911
+ const headerStr = headers ? Object.entries(JSON.parse(headers)).map(([k, v]) => `-H "${k}: ${v}"`).join(' ') : '-H "Content-Type: application/json"';
912
+ const bodyStr = body ? `-d '${body}'` : '';
913
+ const cmd = `curl -s -o /tmp/mcp_api_response.txt -w "%{http_code}" -X ${method} ${headerStr} ${bodyStr} "${url}" 2>&1`;
914
+ const statusCode = (await run(cmd)).trim();
915
+ let responseBody = '';
916
+ try { responseBody = fs.readFileSync('/tmp/mcp_api_response.txt', 'utf-8').slice(0, 2000); } catch { }
917
+ const passed = !expect_status || parseInt(statusCode) === expect_status;
918
+ return `Status: ${statusCode} ${passed ? '✓' : `✗ (expected ${expect_status})`}\n\nResponse:\n${responseBody}`;
919
+ },
920
+ },
921
+
922
+ // ── 32. MOCK GENERATOR ────────────────────────────────────────────────────────
923
+ {
924
+ group: 'testing',
925
+ safe: true,
926
+ schema: {
927
+ name: 'mock_generate',
928
+ description: 'Generate mock/fixture data based on a JSON schema or example object.',
929
+ input_schema: {
930
+ type: 'object',
931
+ properties: {
932
+ schema: { type: 'string', description: 'JSON Schema or example object to base mock on' },
933
+ count: { type: 'number', description: 'Number of mock objects to generate (default 3)' },
934
+ format: { type: 'string', enum: ['json', 'js', 'ts'], description: 'Output format' },
935
+ },
936
+ required: ['schema'],
937
+ },
938
+ },
939
+ execute: async ({ schema, count = 3, format = 'json' }) => {
940
+ const [example, err] = safeParseJSON(schema);
941
+ if (err) return `Invalid JSON schema: ${err}`;
942
+ const generateValue = (v, key = '') => {
943
+ if (typeof v === 'string') {
944
+ const k = key.toLowerCase();
945
+ if (k.includes('email') || v.includes('email')) return 'user@example.com';
946
+ if (k.includes('name') || v.includes('name')) return 'John Doe';
947
+ if (k.includes('id') || v.includes('id')) return `id_${Math.random().toString(36).slice(2, 8)}`;
948
+ if (k.includes('url') || v.includes('http')) return 'https://example.com';
949
+ if (k.includes('date') || v.includes('date')) return new Date().toISOString().slice(0, 10);
950
+ return `sample_${Math.random().toString(36).slice(2, 6)}`;
951
+ }
952
+ if (typeof v === 'number') return Math.floor(Math.random() * 100);
953
+ if (typeof v === 'boolean') return Math.random() > 0.5;
954
+ if (Array.isArray(v)) return v.length > 0 ? [generateValue(v[0])] : [];
955
+ if (typeof v === 'object' && v !== null) {
956
+ return Object.fromEntries(Object.entries(v).map(([k, val]) => [k, generateValue(val, k)]));
957
+ }
958
+ return v;
959
+ };
960
+ const mocks = Array.from({ length: Math.min(count, 20) }, () => generateValue(example));
961
+ return format === 'json' ? JSON.stringify(mocks, null, 2) : `const mocks = ${JSON.stringify(mocks, null, 2)};\nmodule.exports = mocks;`;
962
+ },
963
+ },
964
+
965
+ // ── 33. SCHEMA VALIDATE ───────────────────────────────────────────────────────
966
+ {
967
+ group: 'data',
968
+ safe: true,
969
+ schema: {
970
+ name: 'schema_validate',
971
+ description: 'Validate JSON data against a JSON Schema.',
972
+ input_schema: {
973
+ type: 'object',
974
+ properties: {
975
+ data: { type: 'string', description: 'JSON data to validate' },
976
+ schema: { type: 'string', description: 'JSON Schema object' },
977
+ },
978
+ required: ['data', 'schema'],
979
+ },
980
+ },
981
+ execute: async ({ data, schema }) => {
982
+ const [d, de] = safeParseJSON(data);
983
+ if (de) return `Invalid data: ${de}`;
984
+ const [s, se] = safeParseJSON(schema);
985
+ if (se) return `Invalid schema: ${se}`;
986
+ const errors = [];
987
+ const validate = (obj, sch, prefix = '') => {
988
+ if (sch.type) {
989
+ const t = Array.isArray(obj) ? 'array' : typeof obj;
990
+ if (t !== sch.type) errors.push(`${prefix || 'root'}: expected ${sch.type}, got ${t}`);
991
+ }
992
+ if (sch.required && typeof obj === 'object' && obj !== null) {
993
+ for (const req of sch.required) {
994
+ if (!(req in obj)) errors.push(`${prefix}: missing required field "${req}"`);
995
+ }
996
+ }
997
+ if (sch.properties && typeof obj === 'object') {
998
+ for (const [k, v] of Object.entries(sch.properties)) {
999
+ if (obj[k] !== undefined) validate(obj[k], v, `${prefix}.${k}`);
1000
+ }
1001
+ }
1002
+ };
1003
+ validate(d, s);
1004
+ return errors.length === 0 ? '✓ Valid' : `✗ ${errors.length} error(s):\n${errors.join('\n')}`;
1005
+ },
1006
+ },
1007
+
1008
+ // ── 34. DIAGRAM GENERATOR ─────────────────────────────────────────────────────
1009
+ {
1010
+ group: 'documentation',
1011
+ safe: true,
1012
+ schema: {
1013
+ name: 'generate_diagram',
1014
+ description: 'Generate a Mermaid diagram from code structure, module deps, or sequence.',
1015
+ input_schema: {
1016
+ type: 'object',
1017
+ properties: {
1018
+ type: { type: 'string', enum: ['flowchart', 'sequence', 'class', 'er', 'gantt', 'mindmap'] },
1019
+ content: { type: 'string', description: 'Code or description to diagram' },
1020
+ title: { type: 'string', description: 'Optional diagram title' },
1021
+ },
1022
+ required: ['type', 'content'],
1023
+ },
1024
+ },
1025
+ execute: async ({ type, content, title }) => {
1026
+ const titleLine = title ? `\n title ${title}` : '';
1027
+ const templates = {
1028
+ flowchart: `flowchart TD${titleLine}\n${content}`,
1029
+ sequence: `sequenceDiagram${titleLine}\n${content}`,
1030
+ class: `classDiagram${titleLine}\n${content}`,
1031
+ er: `erDiagram${titleLine}\n${content}`,
1032
+ gantt: `gantt${titleLine}\n dateFormat YYYY-MM-DD\n${content}`,
1033
+ mindmap: `mindmap${titleLine}\n${content}`,
1034
+ };
1035
+ return `\`\`\`mermaid\n${templates[type] || content}\n\`\`\``;
1036
+ },
1037
+ },
1038
+
1039
+ // ── 35. CHANGELOG GENERATOR ───────────────────────────────────────────────────
1040
+ {
1041
+ group: 'documentation',
1042
+ safe: true,
1043
+ schema: {
1044
+ name: 'generate_changelog',
1045
+ description: 'Generate a CHANGELOG from git commit history.',
1046
+ input_schema: {
1047
+ type: 'object',
1048
+ properties: {
1049
+ from: { type: 'string', description: 'From tag/commit (optional)' },
1050
+ to: { type: 'string', description: 'To tag/commit (default HEAD)' },
1051
+ version: { type: 'string', description: 'Version label for this release' },
1052
+ cwd: { type: 'string' },
1053
+ },
1054
+ required: [],
1055
+ },
1056
+ },
1057
+ execute: async ({ from, to = 'HEAD', version = 'Unreleased', cwd }) => {
1058
+ const range = from ? `${from}..${to}` : '-20';
1059
+ const log = await run(`git log ${range} --pretty=format:"%s" 2>/dev/null`, cwd);
1060
+ const commits = log.split('\n').filter(Boolean);
1061
+ const grouped = { feat: [], fix: [], refactor: [], docs: [], other: [] };
1062
+ for (const c of commits) {
1063
+ if (c.startsWith('feat')) grouped.feat.push(c);
1064
+ else if (c.startsWith('fix')) grouped.fix.push(c);
1065
+ else if (c.startsWith('refactor')) grouped.refactor.push(c);
1066
+ else if (c.startsWith('docs')) grouped.docs.push(c);
1067
+ else grouped.other.push(c);
1068
+ }
1069
+ const lines = [`## ${version} (${new Date().toISOString().slice(0, 10)})`, ''];
1070
+ if (grouped.feat.length) lines.push('### Features', ...grouped.feat.map(c => `- ${c}`), '');
1071
+ if (grouped.fix.length) lines.push('### Bug Fixes', ...grouped.fix.map(c => `- ${c}`), '');
1072
+ if (grouped.refactor.length) lines.push('### Refactors', ...grouped.refactor.map(c => `- ${c}`), '');
1073
+ if (grouped.docs.length) lines.push('### Documentation', ...grouped.docs.map(c => `- ${c}`), '');
1074
+ if (grouped.other.length) lines.push('### Other', ...grouped.other.map(c => `- ${c}`), '');
1075
+ return lines.join('\n');
1076
+ },
1077
+ },
1078
+
1079
+ // ── 36. TOKEN COUNTER ─────────────────────────────────────────────────────────
1080
+ {
1081
+ group: 'ai',
1082
+ safe: true,
1083
+ schema: {
1084
+ name: 'token_count',
1085
+ description: 'Estimate token count for text (4 chars ≈ 1 token). Useful for context budget planning.',
1086
+ input_schema: {
1087
+ type: 'object',
1088
+ properties: {
1089
+ text: { type: 'string', description: 'Text to count' },
1090
+ model: { type: 'string', description: 'Model name (affects cost estimate)' },
1091
+ },
1092
+ required: ['text'],
1093
+ },
1094
+ },
1095
+ execute: async ({ text, model = 'claude-opus-4-5' }) => {
1096
+ const tokens = Math.ceil(text.length / 4);
1097
+ const pricing = { 'claude-opus-4-5': { input: 15 }, 'claude-haiku-4-5-20251001': { input: 0.25 }, 'claude-sonnet-4-5': { input: 3 } };
1098
+ const price = pricing[model]?.input || 15;
1099
+ const cost = (tokens / 1_000_000) * price;
1100
+ return JSON.stringify({ tokens, chars: text.length, estimatedCostUsd: `$${cost.toFixed(6)}`, model, budget: { of128k: `${(tokens / 128000 * 100).toFixed(1)}%`, of200k: `${(tokens / 200000 * 100).toFixed(1)}%` } }, null, 2);
1101
+ },
1102
+ },
1103
+
1104
+ // ── 37. THINK (chain-of-thought) ──────────────────────────────────────────────
1105
+ {
1106
+ group: 'ai',
1107
+ safe: true,
1108
+ schema: {
1109
+ name: 'think',
1110
+ description: 'Internal reasoning tool. Use this to think step-by-step before answering. Output is private scratchpad.',
1111
+ input_schema: {
1112
+ type: 'object',
1113
+ properties: {
1114
+ thought: { type: 'string', description: 'Your internal reasoning, analysis, or chain-of-thought' },
1115
+ },
1116
+ required: ['thought'],
1117
+ },
1118
+ },
1119
+ execute: async ({ thought }) => {
1120
+ logger.info(`[Think] ${thought.slice(0, 80)}`);
1121
+ return `Thought recorded. Continue with your analysis.`;
1122
+ },
1123
+ },
1124
+
1125
+ // ── 38. SLEEP / WAIT ──────────────────────────────────────────────────────────
1126
+ {
1127
+ group: 'control',
1128
+ safe: true,
1129
+ schema: {
1130
+ name: 'sleep',
1131
+ description: 'Wait for a specified duration. Useful for rate-limiting or polling.',
1132
+ input_schema: {
1133
+ type: 'object',
1134
+ properties: {
1135
+ ms: { type: 'number', description: 'Milliseconds to wait (max 5000)' },
1136
+ reason: { type: 'string', description: 'Why are you waiting' },
1137
+ },
1138
+ required: ['ms'],
1139
+ },
1140
+ },
1141
+ execute: async ({ ms, reason }) => {
1142
+ await new Promise(r => setTimeout(r, Math.min(ms, 5000)));
1143
+ return `Waited ${ms}ms${reason ? ': ' + reason : ''}`;
1144
+ },
1145
+ },
1146
+
1147
+ // ── 39. KNOWLEDGE BASE SEARCH ─────────────────────────────────────────────────
1148
+ {
1149
+ group: 'knowledge',
1150
+ safe: true,
1151
+ schema: {
1152
+ name: 'kb_search',
1153
+ description: 'Search the ingested codebase knowledge base for relevant context.',
1154
+ input_schema: {
1155
+ type: 'object',
1156
+ properties: {
1157
+ query: { type: 'string', description: 'Search query' },
1158
+ top_k: { type: 'number', description: 'Number of results (default 6)' },
1159
+ kind: { type: 'string', description: 'Filter by kind: code, log, config, documentation' },
1160
+ },
1161
+ required: ['query'],
1162
+ },
1163
+ },
1164
+ execute: async ({ query, top_k = 6, kind }) => {
1165
+ const indexer = require('../core/indexer');
1166
+ const results = indexer.search(query, top_k, kind ? { kind } : {});
1167
+ if (!results.length) return 'No relevant context found in knowledge base.';
1168
+ return results.map((r, i) =>
1169
+ `[${i + 1}] ${r.filename} (${r.kind}) score:${r.relevanceScore}\n${r.content.slice(0, 400)}`
1170
+ ).join('\n\n---\n\n');
1171
+ },
1172
+ },
1173
+
1174
+ // ── 40. MEMORY SEARCH ─────────────────────────────────────────────────────────
1175
+ {
1176
+ group: 'knowledge',
1177
+ safe: true,
1178
+ schema: {
1179
+ name: 'memory_search',
1180
+ description: 'Search persistent memory for relevant facts about this codebase.',
1181
+ input_schema: {
1182
+ type: 'object',
1183
+ properties: {
1184
+ query: { type: 'string', description: 'Search query' },
1185
+ limit: { type: 'number', description: 'Max results (default 5)' },
1186
+ },
1187
+ required: ['query'],
1188
+ },
1189
+ },
1190
+ execute: async ({ query, limit = 5 }) => {
1191
+ const { MemoryManager } = require('../memory/memoryManager');
1192
+ const mems = MemoryManager.getRelevant(query, limit);
1193
+ if (!mems.length) return 'No relevant memories found.';
1194
+ return mems.map(m => `[${m.type}] ${m.content}`).join('\n');
1195
+ },
1196
+ },
1197
+
1198
+ // ── 41. TASK MANAGER ──────────────────────────────────────────────────────────
1199
+ {
1200
+ group: 'tasks',
1201
+ safe: true,
1202
+ schema: {
1203
+ name: 'task_manage',
1204
+ description: 'Create, update, or list tasks.',
1205
+ input_schema: {
1206
+ type: 'object',
1207
+ properties: {
1208
+ action: { type: 'string', enum: ['list', 'create', 'done', 'update'] },
1209
+ title: { type: 'string', description: 'Task title (for create)' },
1210
+ id: { type: 'number', description: 'Task ID (for done/update)' },
1211
+ priority: { type: 'string', enum: ['low', 'medium', 'high', 'critical'] },
1212
+ status: { type: 'string', enum: ['todo', 'in_progress', 'done', 'blocked'] },
1213
+ },
1214
+ required: ['action'],
1215
+ },
1216
+ },
1217
+ execute: async ({ action, title, id, priority, status }) => {
1218
+ const { TaskManager } = require('../tasks/taskManager');
1219
+ if (action === 'list') return JSON.stringify(TaskManager.list().map(t => ({ id: t.id, title: t.title, status: t.status, priority: t.priority })), null, 2);
1220
+ if (action === 'create') return JSON.stringify(TaskManager.create({ title, priority: priority || 'medium' }));
1221
+ if (action === 'done') return JSON.stringify(TaskManager.update(id, { status: 'done' }));
1222
+ if (action === 'update') return JSON.stringify(TaskManager.update(id, { status, priority }));
1223
+ return 'Unknown action';
1224
+ },
1225
+ },
1226
+
1227
+ // ── 42. SYMBOL NAVIGATE ───────────────────────────────────────────────────────
1228
+ {
1229
+ group: 'navigation',
1230
+ safe: true,
1231
+ schema: {
1232
+ name: 'symbol_navigate',
1233
+ description: 'Navigate code symbols: find definitions, references, or file outline.',
1234
+ input_schema: {
1235
+ type: 'object',
1236
+ properties: {
1237
+ action: { type: 'string', enum: ['definition', 'references', 'outline', 'workspace'] },
1238
+ symbol: { type: 'string', description: 'Symbol name to look up' },
1239
+ file: { type: 'string', description: 'File path for outline action' },
1240
+ cwd: { type: 'string' },
1241
+ },
1242
+ required: ['action'],
1243
+ },
1244
+ },
1245
+ execute: async ({ action, symbol, file, cwd }) => {
1246
+ const nav = require('../lsp/symbolNavigator');
1247
+ if (action === 'definition') return JSON.stringify((await nav.goToDefinition(symbol, cwd)).definitions.slice(0, 5), null, 2);
1248
+ if (action === 'references') { const r = await nav.findReferences(symbol, cwd); return `${r.total} references\n${r.references.slice(0, 10).map(r => `${r.file}:${r.line} ${r.text}`).join('\n')}`; }
1249
+ if (action === 'outline') return JSON.stringify((await nav.outline(file)).symbols, null, 2);
1250
+ if (action === 'workspace') return JSON.stringify((await nav.workspaceSymbols(symbol, cwd)).symbols.slice(0, 15), null, 2);
1251
+ return 'Unknown action';
1252
+ },
1253
+ },
1254
+
1255
+ // ── 43. TEXT DIFF ─────────────────────────────────────────────────────────────
1256
+ {
1257
+ group: 'analysis',
1258
+ safe: true,
1259
+ schema: {
1260
+ name: 'text_diff',
1261
+ description: 'Compare two files or strings and return their diff.',
1262
+ input_schema: {
1263
+ type: 'object',
1264
+ properties: {
1265
+ file_a: { type: 'string', description: 'First file path' },
1266
+ file_b: { type: 'string', description: 'Second file path' },
1267
+ text_a: { type: 'string', description: 'First text (alternative to file_a)' },
1268
+ text_b: { type: 'string', description: 'Second text (alternative to file_b)' },
1269
+ context: { type: 'number', description: 'Context lines (default 3)' },
1270
+ },
1271
+ required: [],
1272
+ },
1273
+ },
1274
+ execute: async ({ file_a, file_b, text_a, text_b, context = 3 }) => {
1275
+ // diff returns exit 1 when files differ — that's normal output, not an error
1276
+ // Use -U N (unified context lines) — more portable than --context=N
1277
+ if (file_a && file_b) {
1278
+ const result = await run(`diff -U ${context} "${path.resolve(file_a)}" "${path.resolve(file_b)}"`, null, { allowNonZero: true });
1279
+ return result || '(no differences)';
1280
+ }
1281
+ if (text_a !== undefined && text_b !== undefined) {
1282
+ fs.writeFileSync('/tmp/mcp_diff_a.txt', String(text_a));
1283
+ fs.writeFileSync('/tmp/mcp_diff_b.txt', String(text_b));
1284
+ const result = await run(`diff -U ${context} /tmp/mcp_diff_a.txt /tmp/mcp_diff_b.txt`, null, { allowNonZero: true });
1285
+ return result || '(no differences)';
1286
+ }
1287
+ return 'Provide file_a + file_b or text_a + text_b';
1288
+ },
1289
+ },
1290
+
1291
+ // ── 44. PROCESS MANAGER ───────────────────────────────────────────────────────
1292
+ {
1293
+ group: 'system',
1294
+ safe: true,
1295
+ schema: {
1296
+ name: 'process_info',
1297
+ description: 'List processes or find processes by name/port.',
1298
+ input_schema: {
1299
+ type: 'object',
1300
+ properties: {
1301
+ action: { type: 'string', enum: ['list', 'find', 'port'] },
1302
+ name: { type: 'string', description: 'Process name to search' },
1303
+ port: { type: 'number', description: 'Port to find process for' },
1304
+ },
1305
+ required: ['action'],
1306
+ },
1307
+ },
1308
+ execute: async ({ action, name, port }) => {
1309
+ const cmds = {
1310
+ list: 'ps aux --sort=-%cpu | head -20 2>&1',
1311
+ find: `pgrep -la "${name}" 2>&1 | head -10`,
1312
+ port: `lsof -i :${port} 2>&1 | head -10`,
1313
+ };
1314
+ return run(cmds[action] || 'ps aux | head -10');
1315
+ },
1316
+ },
1317
+
1318
+ // ── 45. SKILL RUNNER ──────────────────────────────────────────────────────────
1319
+ {
1320
+ group: 'ai',
1321
+ safe: true,
1322
+ schema: {
1323
+ name: 'run_skill',
1324
+ description: 'Execute a named skill (add-error-handling, document-function, check-security, etc.)',
1325
+ input_schema: {
1326
+ type: 'object',
1327
+ properties: {
1328
+ skill: { type: 'string', description: 'Skill name' },
1329
+ target: { type: 'string', description: 'Target (function, file, module)' },
1330
+ },
1331
+ required: ['skill', 'target'],
1332
+ },
1333
+ },
1334
+ execute: async ({ skill, target }) => {
1335
+ const skillsManager = require('../skills/skillsManager');
1336
+ const result = await skillsManager.run(skill, target);
1337
+ return result.result || result.error || 'No result';
1338
+ },
1339
+ },
1340
+
1341
+ ];
1342
+
1343
+ // ─── REGISTRY API ──────────────────────────────────────────────────────────────
1344
+ class ToolRegistry {
1345
+ constructor() {
1346
+ this._tools = new Map(TOOLS.map(t => [t.schema.name, t]));
1347
+ }
1348
+
1349
+ /** All tool schemas in Anthropic format */
1350
+ schemas(names) {
1351
+ const all = names ? TOOLS.filter(t => names.includes(t.schema.name)) : TOOLS;
1352
+ return all.map(t => t.schema);
1353
+ }
1354
+
1355
+ /** Get all tools in a group */
1356
+ byGroup(group) {
1357
+ return TOOLS.filter(t => t.group === group).map(t => t.schema.name);
1358
+ }
1359
+
1360
+ /** Execute a tool call */
1361
+ async execute(name, input) {
1362
+ const tool = this._tools.get(name);
1363
+ if (!tool) return `Unknown tool: ${name}`;
1364
+ try {
1365
+ const result = await tool.execute(input);
1366
+ return typeof result === 'string' ? result : JSON.stringify(result);
1367
+ } catch (err) {
1368
+ logger.error(`[Tool:${name}] ${err.message}`);
1369
+ return `[Error in ${name}]: ${err.message}`;
1370
+ }
1371
+ }
1372
+
1373
+ /** List all tool names */
1374
+ list() { return TOOLS.map(t => ({ name: t.schema.name, group: t.group, safe: t.safe, description: t.schema.description })); }
1375
+
1376
+ /** Total count */
1377
+ get count() { return TOOLS.length; }
1378
+ }
1379
+
1380
+ module.exports = new ToolRegistry();