ted-mosby 1.0.0 → 1.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,5 @@
1
1
  import { query, tool, createSdkMcpServer } from '@anthropic-ai/claude-agent-sdk';
2
+ import Anthropic from '@anthropic-ai/sdk';
2
3
  import { z } from 'zod';
3
4
  import * as fs from 'fs';
4
5
  import * as path from 'path';
@@ -8,6 +9,8 @@ import { PermissionManager } from './permissions.js';
8
9
  import { MCPConfigManager } from './mcp-config.js';
9
10
  import { RAGSystem } from './rag/index.js';
10
11
  import { WIKI_SYSTEM_PROMPT } from './prompts/wiki-system.js';
12
+ // Store generation options for use in tool server
13
+ let currentGenerationOptions = null;
11
14
  export class ArchitecturalWikiAgent {
12
15
  config;
13
16
  permissionManager;
@@ -33,6 +36,8 @@ export class ArchitecturalWikiAgent {
33
36
  * Generate wiki documentation for a repository
34
37
  */
35
38
  async *generateWiki(options) {
39
+ // Store options for tool server to access
40
+ currentGenerationOptions = options;
36
41
  // Phase 1: Clone or access repository
37
42
  yield { type: 'phase', message: 'Preparing repository', progress: 0 };
38
43
  this.repoPath = await this.prepareRepository(options.repoUrl, options.accessToken);
@@ -45,7 +50,8 @@ export class ArchitecturalWikiAgent {
45
50
  yield { type: 'phase', message: 'Indexing codebase for semantic search', progress: 10 };
46
51
  this.ragSystem = new RAGSystem({
47
52
  storePath: path.join(this.outputDir, '.ted-mosby-cache'),
48
- repoPath: this.repoPath
53
+ repoPath: this.repoPath,
54
+ maxChunks: options.maxChunks // Limit chunks for large codebases
49
55
  });
50
56
  await this.ragSystem.indexRepository();
51
57
  yield { type: 'step', message: `Indexed ${this.ragSystem.getDocumentCount()} code chunks` };
@@ -56,7 +62,18 @@ export class ArchitecturalWikiAgent {
56
62
  const agentOptions = this.buildAgentOptions(options);
57
63
  const prompt = this.buildGenerationPrompt(options);
58
64
  // Stream agent execution using simple string prompt
65
+ // Track if we've yielded the complete event (wiki is done even if subprocess exits uncleanly)
66
+ let wikiComplete = false;
59
67
  try {
68
+ if (options.verbose) {
69
+ console.log('[Debug] Starting agent with options:', JSON.stringify({
70
+ model: agentOptions.model,
71
+ cwd: agentOptions.cwd,
72
+ maxTurns: agentOptions.maxTurns,
73
+ allowedTools: agentOptions.allowedTools?.length,
74
+ ragChunks: this.ragSystem?.getDocumentCount()
75
+ }, null, 2));
76
+ }
60
77
  const queryResult = query({
61
78
  prompt,
62
79
  options: agentOptions
@@ -65,27 +82,903 @@ export class ArchitecturalWikiAgent {
65
82
  // Capture session ID
66
83
  if (message.type === 'system' && message.subtype === 'init') {
67
84
  this.sessionId = message.session_id;
85
+ if (options.verbose) {
86
+ console.log('[Debug] Agent initialized, session:', this.sessionId);
87
+ }
68
88
  }
69
89
  // Log errors from the agent
70
90
  if (message.type === 'system' && message.subtype === 'error') {
71
- console.error('Agent error:', message.error || message);
91
+ console.error('Agent error:', JSON.stringify(message, null, 2));
92
+ }
93
+ // Log tool calls in verbose mode
94
+ if (options.verbose && message.type === 'assistant' && message.tool_use) {
95
+ const tools = message.tool_use;
96
+ console.log('[Debug] Tool calls:', tools.map((t) => t.name).join(', '));
97
+ }
98
+ // Check if agent has finished its work (result message indicates completion)
99
+ if (message.type === 'result' && message.subtype === 'success') {
100
+ wikiComplete = true;
72
101
  }
73
102
  yield message;
74
103
  }
75
104
  yield { type: 'complete', message: 'Wiki generation complete', progress: 100 };
76
105
  }
77
106
  catch (err) {
78
- // Capture stderr if available
107
+ // If wiki was already complete when the error occurred, treat as success
108
+ // This handles the case where the subprocess exits during cleanup
109
+ if (wikiComplete) {
110
+ if (options.verbose) {
111
+ console.log('[Debug] Wiki generation completed successfully (subprocess cleanup error ignored)');
112
+ }
113
+ yield { type: 'complete', message: 'Wiki generation complete', progress: 100 };
114
+ return; // Don't throw - wiki is done
115
+ }
116
+ // Enhanced error capture for actual failures
79
117
  console.error('Query error details:', err.message);
118
+ // Log full error object for debugging
119
+ if (options.verbose) {
120
+ console.error('[Debug] Full error:', JSON.stringify(err, Object.getOwnPropertyNames(err), 2));
121
+ }
80
122
  if (err.stderr) {
81
123
  console.error('Stderr:', err.stderr);
82
124
  }
125
+ if (err.stdout) {
126
+ console.error('Stdout:', err.stdout);
127
+ }
83
128
  if (err.cause) {
84
129
  console.error('Cause:', err.cause);
85
130
  }
131
+ if (err.code) {
132
+ console.error('Exit code:', err.code);
133
+ }
134
+ if (err.signal) {
135
+ console.error('Signal:', err.signal);
136
+ }
137
+ // Check for common issues
138
+ if (err.message?.includes('exit')) {
139
+ console.error('\nPossible causes:');
140
+ console.error(' 1. MCP server failed to start (check npx is available)');
141
+ console.error(' 2. Out of memory (try with smaller codebase or increase Node memory)');
142
+ console.error(' 3. API rate limit exceeded');
143
+ console.error('\nTry running with --verbose for more details.');
144
+ console.error('\nTip: For large codebases, try --max-chunks 5000 to limit index size.');
145
+ }
146
+ throw err;
147
+ }
148
+ }
149
+ /**
150
+ * Generate wiki in batched mode for very large codebases.
151
+ * Processes the codebase in sequential batches to avoid memory issues.
152
+ */
153
+ async *generateWikiBatched(options) {
154
+ const batchSize = options.batchSize || 3000; // Default 3000 chunks per batch
155
+ // Store options for tool server to access
156
+ currentGenerationOptions = options;
157
+ // Phase 1: Clone or access repository
158
+ yield { type: 'phase', message: 'Preparing repository', progress: 0 };
159
+ this.repoPath = await this.prepareRepository(options.repoUrl, options.accessToken);
160
+ this.outputDir = path.resolve(options.outputDir);
161
+ // Ensure output directory exists
162
+ if (!fs.existsSync(this.outputDir)) {
163
+ fs.mkdirSync(this.outputDir, { recursive: true });
164
+ }
165
+ // Phase 2: Discover codebase size
166
+ yield { type: 'phase', message: 'Analyzing codebase size', progress: 5 };
167
+ const tempRag = new RAGSystem({
168
+ storePath: path.join(this.outputDir, '.ted-mosby-cache'),
169
+ repoPath: this.repoPath
170
+ });
171
+ const discovery = await tempRag.discoverChunkCount();
172
+ const totalBatches = Math.ceil(discovery.chunks / batchSize);
173
+ yield {
174
+ type: 'step',
175
+ message: `Found ${discovery.files} files, ${discovery.chunks} chunks → ${totalBatches} batch(es) of ${batchSize}`
176
+ };
177
+ // Phase 3: Process batches sequentially
178
+ for (let batchNum = 0; batchNum < totalBatches; batchNum++) {
179
+ const batchProgress = 10 + (batchNum / totalBatches) * 60; // 10-70% for batches
180
+ yield {
181
+ type: 'phase',
182
+ message: `Processing batch ${batchNum + 1}/${totalBatches}`,
183
+ progress: batchProgress
184
+ };
185
+ // Create fresh RAG system for this batch (memory isolation)
186
+ this.ragSystem = new RAGSystem({
187
+ storePath: path.join(this.outputDir, '.ted-mosby-cache'),
188
+ repoPath: this.repoPath
189
+ });
190
+ // Index this batch
191
+ const batchInfo = await this.ragSystem.indexBatch(batchNum, batchSize);
192
+ if (batchInfo.chunksInBatch === 0) {
193
+ yield { type: 'step', message: `Batch ${batchNum + 1} empty, skipping` };
194
+ continue;
195
+ }
196
+ yield {
197
+ type: 'step',
198
+ message: `Batch ${batchNum + 1}: indexed chunks ${batchInfo.batchStart + 1}-${batchInfo.batchEnd}`
199
+ };
200
+ }
201
+ // Phase 4: Finalize the index
202
+ // For batched mode, we load metadata only (embeddings were generated per batch but not persisted to FAISS)
203
+ // This uses keyword search which is slower but doesn't require re-generating all embeddings
204
+ yield { type: 'phase', message: 'Finalizing search index', progress: 70 };
205
+ this.ragSystem = new RAGSystem({
206
+ storePath: path.join(this.outputDir, '.ted-mosby-cache'),
207
+ repoPath: this.repoPath
208
+ });
209
+ // Load the accumulated metadata - keyword search will be used since no FAISS index exists
210
+ await this.ragSystem.loadMetadataOnly();
211
+ const searchMode = this.ragSystem['index'] ? 'vector search' : 'keyword search';
212
+ yield { type: 'step', message: `Final index: ${this.ragSystem.getDocumentCount()} chunks loaded (${searchMode})` };
213
+ // Recreate tool server with finalized RAG system
214
+ this.customServer = this.createCustomToolServer();
215
+ // Phase 5: Run agent to generate wiki (single session now that index is ready)
216
+ yield { type: 'phase', message: 'Generating architectural documentation', progress: 75 };
217
+ const agentOptions = this.buildAgentOptions(options);
218
+ const prompt = this.buildGenerationPrompt(options);
219
+ let wikiComplete = false;
220
+ let messageCount = 0;
221
+ try {
222
+ console.log('[Batched] Starting agent with', this.ragSystem.getDocumentCount(), 'chunks indexed');
223
+ console.log('[Batched] RAG system status:', {
224
+ documentCount: this.ragSystem.getDocumentCount(),
225
+ hasIndex: this.ragSystem['index'] !== null,
226
+ metadataSize: this.ragSystem['metadata'].size
227
+ });
228
+ console.log('[Batched] Agent options:', JSON.stringify({
229
+ model: agentOptions.model,
230
+ maxTurns: agentOptions.maxTurns,
231
+ mcpServers: Object.keys(agentOptions.mcpServers || {}),
232
+ allowedTools: agentOptions.allowedTools?.length,
233
+ cwd: agentOptions.cwd,
234
+ permissionMode: agentOptions.permissionMode
235
+ }, null, 2));
236
+ console.log('[Batched] Prompt length:', prompt.length, 'chars');
237
+ console.log('[Batched] System prompt length:', WIKI_SYSTEM_PROMPT.length, 'chars');
238
+ const queryResult = query({
239
+ prompt,
240
+ options: agentOptions
241
+ });
242
+ for await (const message of queryResult) {
243
+ messageCount++;
244
+ // Log every message type for debugging
245
+ const subtype = message.subtype || 'none';
246
+ console.log(`[Batched] Message ${messageCount}: type=${message.type}, subtype=${subtype}`);
247
+ if (message.type === 'system' && subtype === 'init') {
248
+ this.sessionId = message.session_id;
249
+ console.log('[Batched] Agent session started:', this.sessionId);
250
+ }
251
+ // Log assistant messages with content preview
252
+ if (message.type === 'assistant') {
253
+ const content = message.message?.content || [];
254
+ const textParts = content.filter((c) => c.type === 'text');
255
+ const toolUses = content.filter((c) => c.type === 'tool_use');
256
+ if (textParts.length > 0) {
257
+ const textPreview = textParts[0].text?.slice(0, 200) || '';
258
+ console.log('[Batched] Assistant text:', textPreview + (textPreview.length >= 200 ? '...' : ''));
259
+ }
260
+ if (toolUses.length > 0) {
261
+ console.log('[Batched] Tool calls:', toolUses.map((t) => t.name).join(', '));
262
+ }
263
+ if (content.length === 0) {
264
+ console.log('[Batched] Assistant message with empty content');
265
+ }
266
+ }
267
+ // Log user/tool results
268
+ if (message.type === 'user') {
269
+ const content = message.message?.content || [];
270
+ const toolResults = content.filter((c) => c.type === 'tool_result');
271
+ if (toolResults.length > 0) {
272
+ console.log('[Batched] Tool results:', toolResults.map((t) => `${t.tool_use_id}: ${t.is_error ? 'ERROR' : 'ok'}`).join(', '));
273
+ }
274
+ }
275
+ // Log system errors
276
+ if (message.type === 'system' && subtype === 'error') {
277
+ console.error('[Batched] Agent error:', JSON.stringify(message, null, 2));
278
+ }
279
+ // Log result messages
280
+ if (message.type === 'result') {
281
+ console.log('[Batched] Result:', subtype, JSON.stringify(message).slice(0, 500));
282
+ if (subtype === 'success') {
283
+ wikiComplete = true;
284
+ console.log('[Batched] Agent completed successfully after', messageCount, 'messages');
285
+ }
286
+ }
287
+ yield message;
288
+ }
289
+ if (!wikiComplete) {
290
+ console.log('[Batched] Warning: Agent finished without success signal after', messageCount, 'messages');
291
+ }
292
+ yield { type: 'complete', message: 'Wiki generation complete', progress: 100 };
293
+ }
294
+ catch (err) {
295
+ console.log('[Batched] Agent error after', messageCount, 'messages:', err.message);
296
+ console.log('[Batched] Error details:', {
297
+ name: err.name,
298
+ code: err.code,
299
+ signal: err.signal,
300
+ stderr: err.stderr?.slice?.(0, 500),
301
+ stdout: err.stdout?.slice?.(0, 500)
302
+ });
303
+ if (wikiComplete) {
304
+ console.log('[Batched] Wiki was already complete, treating as success');
305
+ yield { type: 'complete', message: 'Wiki generation complete', progress: 100 };
306
+ return;
307
+ }
308
+ console.error('Query error:', err.message);
309
+ if (err.message?.includes('exit')) {
310
+ console.error('\nThe agent subprocess exited unexpectedly.');
311
+ console.error('Tips:');
312
+ console.error(' 1. Try running with --verbose for more details');
313
+ console.error(' 2. Try reducing --batch-size (e.g., --batch-size 2000)');
314
+ console.error(' 3. Check if the wiki has partial content in', this.outputDir);
315
+ }
316
+ throw err;
317
+ }
318
+ }
319
+ /**
320
+ * Run only the agent part using an existing cached index.
321
+ * Useful for debugging agent behavior without re-indexing.
322
+ */
323
+ async *generateWikiAgentOnly(options) {
324
+ currentGenerationOptions = options;
325
+ // Setup paths
326
+ yield { type: 'phase', message: 'Loading existing index', progress: 0 };
327
+ this.repoPath = await this.prepareRepository(options.repoUrl, options.accessToken);
328
+ this.outputDir = path.resolve(options.outputDir);
329
+ const cachePath = path.join(this.outputDir, '.ted-mosby-cache');
330
+ const metadataPath = path.join(cachePath, 'metadata.json');
331
+ if (!fs.existsSync(metadataPath)) {
332
+ throw new Error(`No cached index found at ${cachePath}. Run without --skip-index first to build the index.`);
333
+ }
334
+ // Ensure output directory exists
335
+ if (!fs.existsSync(this.outputDir)) {
336
+ fs.mkdirSync(this.outputDir, { recursive: true });
337
+ }
338
+ // Load existing RAG index
339
+ this.ragSystem = new RAGSystem({
340
+ storePath: cachePath,
341
+ repoPath: this.repoPath
342
+ });
343
+ await this.ragSystem.loadMetadataOnly();
344
+ const searchMode = this.ragSystem['index'] ? 'vector search' : 'keyword search';
345
+ yield {
346
+ type: 'step',
347
+ message: `Loaded ${this.ragSystem.getDocumentCount()} chunks (${searchMode})`
348
+ };
349
+ // Recreate tool server with RAG system
350
+ this.customServer = this.createCustomToolServer();
351
+ // Run the agent
352
+ yield { type: 'phase', message: 'Running agent (debug mode)', progress: 50 };
353
+ const agentOptions = this.buildAgentOptions(options);
354
+ const prompt = this.buildGenerationPrompt(options);
355
+ // Enhanced debug output
356
+ console.log('\n' + '='.repeat(60));
357
+ console.log('[AgentOnly] DEBUG MODE - Detailed Agent Execution');
358
+ console.log('='.repeat(60));
359
+ console.log('[AgentOnly] Repository:', this.repoPath);
360
+ console.log('[AgentOnly] Output:', this.outputDir);
361
+ console.log('[AgentOnly] Chunks loaded:', this.ragSystem.getDocumentCount());
362
+ console.log('[AgentOnly] Search mode:', searchMode);
363
+ console.log('[AgentOnly] Model:', agentOptions.model);
364
+ console.log('[AgentOnly] Max turns:', agentOptions.maxTurns);
365
+ console.log('[AgentOnly] Permission mode:', agentOptions.permissionMode);
366
+ console.log('[AgentOnly] CWD:', agentOptions.cwd);
367
+ console.log('[AgentOnly] MCP servers:', Object.keys(agentOptions.mcpServers || {}));
368
+ console.log('[AgentOnly] Allowed tools count:', agentOptions.allowedTools?.length);
369
+ console.log('[AgentOnly] Allowed tools:', agentOptions.allowedTools);
370
+ console.log('[AgentOnly] System prompt length:', WIKI_SYSTEM_PROMPT.length, 'chars');
371
+ console.log('[AgentOnly] User prompt length:', prompt.length, 'chars');
372
+ console.log('[AgentOnly] User prompt preview:', prompt.slice(0, 500));
373
+ console.log('='.repeat(60) + '\n');
374
+ let wikiComplete = false;
375
+ let messageCount = 0;
376
+ let toolCallCount = 0;
377
+ try {
378
+ console.log('[AgentOnly] Calling query()...');
379
+ const queryResult = query({
380
+ prompt,
381
+ options: agentOptions
382
+ });
383
+ console.log('[AgentOnly] Got queryResult, starting iteration...');
384
+ for await (const message of queryResult) {
385
+ messageCount++;
386
+ const subtype = message.subtype || 'none';
387
+ const timestamp = new Date().toISOString().split('T')[1];
388
+ console.log(`\n[AgentOnly] === Message ${messageCount} ===`);
389
+ console.log(`[AgentOnly] Time: ${timestamp}`);
390
+ console.log(`[AgentOnly] Type: ${message.type}`);
391
+ console.log(`[AgentOnly] Subtype: ${subtype}`);
392
+ // System messages
393
+ if (message.type === 'system') {
394
+ if (subtype === 'init') {
395
+ this.sessionId = message.session_id;
396
+ console.log(`[AgentOnly] Session ID: ${this.sessionId}`);
397
+ console.log(`[AgentOnly] Tools available: ${message.tools?.length || 'unknown'}`);
398
+ }
399
+ else if (subtype === 'error') {
400
+ console.error('[AgentOnly] SYSTEM ERROR:');
401
+ console.error(JSON.stringify(message, null, 2));
402
+ }
403
+ else {
404
+ console.log(`[AgentOnly] System message:`, JSON.stringify(message, null, 2).slice(0, 500));
405
+ }
406
+ }
407
+ // Assistant messages
408
+ if (message.type === 'assistant') {
409
+ const content = message.message?.content || [];
410
+ console.log(`[AgentOnly] Content blocks: ${content.length}`);
411
+ for (const block of content) {
412
+ if (block.type === 'text') {
413
+ console.log(`[AgentOnly] TEXT (${block.text?.length || 0} chars):`);
414
+ console.log(block.text?.slice(0, 300) + (block.text?.length > 300 ? '...' : ''));
415
+ }
416
+ else if (block.type === 'tool_use') {
417
+ toolCallCount++;
418
+ console.log(`[AgentOnly] TOOL_USE #${toolCallCount}:`);
419
+ console.log(` Name: ${block.name}`);
420
+ console.log(` ID: ${block.id}`);
421
+ console.log(` Input: ${JSON.stringify(block.input).slice(0, 200)}`);
422
+ }
423
+ else {
424
+ console.log(`[AgentOnly] Block type: ${block.type}`);
425
+ }
426
+ }
427
+ if (content.length === 0) {
428
+ console.log('[AgentOnly] WARNING: Assistant message with no content!');
429
+ console.log('[AgentOnly] Full message:', JSON.stringify(message, null, 2));
430
+ }
431
+ }
432
+ // User messages (tool results)
433
+ if (message.type === 'user') {
434
+ const content = message.message?.content || [];
435
+ const toolResults = content.filter((c) => c.type === 'tool_result');
436
+ console.log(`[AgentOnly] Tool results: ${toolResults.length}`);
437
+ for (const tr of toolResults) {
438
+ const resultPreview = typeof tr.content === 'string'
439
+ ? tr.content.slice(0, 200)
440
+ : JSON.stringify(tr.content).slice(0, 200);
441
+ console.log(` ${tr.tool_use_id}: ${tr.is_error ? 'ERROR' : 'ok'} - ${resultPreview}`);
442
+ }
443
+ }
444
+ // Result messages
445
+ if (message.type === 'result') {
446
+ console.log(`[AgentOnly] RESULT: ${subtype}`);
447
+ console.log(`[AgentOnly] Full result:`, JSON.stringify(message, null, 2).slice(0, 1000));
448
+ if (subtype === 'success') {
449
+ wikiComplete = true;
450
+ }
451
+ }
452
+ yield message;
453
+ }
454
+ console.log('\n' + '='.repeat(60));
455
+ console.log('[AgentOnly] AGENT FINISHED');
456
+ console.log(`[AgentOnly] Total messages: ${messageCount}`);
457
+ console.log(`[AgentOnly] Total tool calls: ${toolCallCount}`);
458
+ console.log(`[AgentOnly] Wiki complete: ${wikiComplete}`);
459
+ console.log('='.repeat(60) + '\n');
460
+ if (!wikiComplete) {
461
+ console.log('[AgentOnly] WARNING: Agent finished without success signal!');
462
+ }
463
+ if (toolCallCount === 0) {
464
+ console.log('[AgentOnly] WARNING: No tool calls were made! The agent may not have access to tools.');
465
+ }
466
+ yield { type: 'complete', message: 'Agent-only run complete', progress: 100 };
467
+ }
468
+ catch (err) {
469
+ console.log('\n' + '='.repeat(60));
470
+ console.log('[AgentOnly] AGENT ERROR');
471
+ console.log(`[AgentOnly] Messages before error: ${messageCount}`);
472
+ console.log(`[AgentOnly] Tool calls before error: ${toolCallCount}`);
473
+ console.log('[AgentOnly] Error:', err.message);
474
+ console.log('[AgentOnly] Error name:', err.name);
475
+ console.log('[AgentOnly] Error code:', err.code);
476
+ console.log('[AgentOnly] Error signal:', err.signal);
477
+ if (err.stderr)
478
+ console.log('[AgentOnly] Stderr:', err.stderr.slice(0, 1000));
479
+ if (err.stdout)
480
+ console.log('[AgentOnly] Stdout:', err.stdout.slice(0, 1000));
481
+ if (err.stack)
482
+ console.log('[AgentOnly] Stack:', err.stack);
483
+ console.log('='.repeat(60) + '\n');
484
+ if (wikiComplete) {
485
+ console.log('[AgentOnly] Wiki was complete before error, treating as success');
486
+ yield { type: 'complete', message: 'Agent-only run complete (with cleanup error)', progress: 100 };
487
+ return;
488
+ }
489
+ throw err;
490
+ }
491
+ }
492
+ /**
493
+ * Generate wiki using Anthropic API directly (bypasses Claude Code billing check).
494
+ * This method handles tool calls manually and uses your ANTHROPIC_API_KEY credits.
495
+ */
496
+ async *generateWikiDirectApi(options) {
497
+ currentGenerationOptions = options;
498
+ // Phase 1: Prepare repository
499
+ yield { type: 'phase', message: 'Preparing repository', progress: 0 };
500
+ this.repoPath = await this.prepareRepository(options.repoUrl, options.accessToken);
501
+ this.outputDir = path.resolve(options.outputDir);
502
+ if (!fs.existsSync(this.outputDir)) {
503
+ fs.mkdirSync(this.outputDir, { recursive: true });
504
+ }
505
+ // Phase 2: Index codebase (or load existing)
506
+ const cachePath = path.join(this.outputDir, '.ted-mosby-cache');
507
+ const metadataPath = path.join(cachePath, 'metadata.json');
508
+ if (options.skipIndex && fs.existsSync(metadataPath)) {
509
+ yield { type: 'phase', message: 'Loading existing index', progress: 10 };
510
+ this.ragSystem = new RAGSystem({
511
+ storePath: cachePath,
512
+ repoPath: this.repoPath
513
+ });
514
+ await this.ragSystem.loadMetadataOnly();
515
+ }
516
+ else {
517
+ yield { type: 'phase', message: 'Indexing codebase', progress: 10 };
518
+ this.ragSystem = new RAGSystem({
519
+ storePath: cachePath,
520
+ repoPath: this.repoPath,
521
+ maxChunks: options.maxChunks
522
+ });
523
+ await this.ragSystem.indexRepository();
524
+ }
525
+ const searchMode = this.ragSystem['index'] ? 'vector search' : 'keyword search';
526
+ yield { type: 'step', message: `Loaded ${this.ragSystem.getDocumentCount()} chunks (${searchMode})` };
527
+ // Phase 3: Run direct API agent
528
+ yield { type: 'phase', message: 'Generating wiki (Direct API mode)', progress: 20 };
529
+ const client = new Anthropic();
530
+ const model = options.model || 'claude-sonnet-4-20250514';
531
+ const maxTurns = options.maxTurns || 200;
532
+ const prompt = this.buildGenerationPrompt(options);
533
+ // Define tools for the API
534
+ const tools = this.buildDirectApiTools();
535
+ console.log('\n' + '='.repeat(60));
536
+ console.log('[DirectAPI] Starting direct API mode');
537
+ console.log('[DirectAPI] Model:', model);
538
+ console.log('[DirectAPI] Max turns:', maxTurns);
539
+ console.log('[DirectAPI] Tools:', tools.length);
540
+ console.log('[DirectAPI] Repository:', this.repoPath);
541
+ console.log('[DirectAPI] Output:', this.outputDir);
542
+ console.log('[DirectAPI] Chunks:', this.ragSystem.getDocumentCount());
543
+ console.log('='.repeat(60) + '\n');
544
+ // Build initial messages
545
+ const messages = [
546
+ { role: 'user', content: prompt }
547
+ ];
548
+ let turnCount = 0;
549
+ let totalToolCalls = 0;
550
+ let done = false;
551
+ try {
552
+ while (!done && turnCount < maxTurns) {
553
+ turnCount++;
554
+ const progress = 20 + Math.min(70, (turnCount / maxTurns) * 70);
555
+ yield { type: 'step', message: `Turn ${turnCount}/${maxTurns}`, progress };
556
+ if (options.verbose) {
557
+ console.log(`\n[DirectAPI] === Turn ${turnCount} ===`);
558
+ }
559
+ // Call the API
560
+ const response = await client.messages.create({
561
+ model,
562
+ max_tokens: 8192,
563
+ system: WIKI_SYSTEM_PROMPT,
564
+ tools,
565
+ messages
566
+ });
567
+ if (options.verbose) {
568
+ console.log(`[DirectAPI] Stop reason: ${response.stop_reason}`);
569
+ console.log(`[DirectAPI] Usage: input=${response.usage.input_tokens}, output=${response.usage.output_tokens}`);
570
+ }
571
+ // Process response content
572
+ const assistantContent = [];
573
+ const toolResults = [];
574
+ for (const block of response.content) {
575
+ assistantContent.push(block);
576
+ if (block.type === 'text') {
577
+ if (options.verbose) {
578
+ console.log(`[DirectAPI] Text: ${block.text.slice(0, 200)}${block.text.length > 200 ? '...' : ''}`);
579
+ }
580
+ yield { type: 'assistant', content: block.text };
581
+ }
582
+ else if (block.type === 'tool_use') {
583
+ totalToolCalls++;
584
+ if (options.verbose) {
585
+ console.log(`[DirectAPI] Tool call #${totalToolCalls}: ${block.name}`);
586
+ console.log(`[DirectAPI] Input: ${JSON.stringify(block.input).slice(0, 200)}`);
587
+ }
588
+ // Execute the tool
589
+ const result = await this.executeDirectApiTool(block.name, block.input);
590
+ if (options.verbose) {
591
+ console.log(`[DirectAPI] Result: ${result.slice(0, 200)}${result.length > 200 ? '...' : ''}`);
592
+ }
593
+ toolResults.push({
594
+ type: 'tool_result',
595
+ tool_use_id: block.id,
596
+ content: result
597
+ });
598
+ }
599
+ }
600
+ // Add assistant message
601
+ messages.push({ role: 'assistant', content: assistantContent });
602
+ // If there were tool uses, add results and continue
603
+ if (toolResults.length > 0) {
604
+ messages.push({ role: 'user', content: toolResults });
605
+ }
606
+ // Check if we're done
607
+ if (response.stop_reason === 'end_turn' && toolResults.length === 0) {
608
+ done = true;
609
+ console.log(`[DirectAPI] Agent finished naturally after ${turnCount} turns`);
610
+ }
611
+ else if (response.stop_reason === 'max_tokens') {
612
+ console.log(`[DirectAPI] Warning: Hit max_tokens, continuing...`);
613
+ }
614
+ }
615
+ if (!done && turnCount >= maxTurns) {
616
+ console.log(`[DirectAPI] Warning: Reached max turns (${maxTurns})`);
617
+ yield { type: 'error', message: `Reached maximum turns (${maxTurns})` };
618
+ }
619
+ console.log('\n' + '='.repeat(60));
620
+ console.log('[DirectAPI] Initial generation complete');
621
+ console.log(`[DirectAPI] Total turns: ${turnCount}`);
622
+ console.log(`[DirectAPI] Total tool calls: ${totalToolCalls}`);
623
+ console.log('='.repeat(60) + '\n');
624
+ // Phase 4: Verification loop - keep generating until all links are valid
625
+ yield { type: 'phase', message: 'Verifying wiki completeness', progress: 90 };
626
+ let verificationAttempts = 0;
627
+ const maxVerificationAttempts = 5; // Prevent infinite loops
628
+ while (verificationAttempts < maxVerificationAttempts) {
629
+ verificationAttempts++;
630
+ const verification = await this.verifyWikiCompleteness(this.outputDir);
631
+ console.log(`[DirectAPI] Verification #${verificationAttempts}: ${verification.totalPages} pages, ${verification.brokenLinks.length} broken links`);
632
+ if (verification.isComplete) {
633
+ console.log('[DirectAPI] Wiki is complete! All links are valid.');
634
+ break;
635
+ }
636
+ yield {
637
+ type: 'step',
638
+ message: `Found ${verification.brokenLinks.length} broken links, generating missing pages...`,
639
+ progress: 90 + verificationAttempts
640
+ };
641
+ // Get unique missing pages
642
+ const missingPages = [...new Set(verification.brokenLinks.map(l => l.target))];
643
+ console.log(`[DirectAPI] Missing pages: ${missingPages.join(', ')}`);
644
+ // Generate missing pages
645
+ const continuationPrompt = `Continue generating wiki pages. The following pages are referenced but do not exist:
646
+
647
+ ${missingPages.map(p => `- ${p}`).join('\n')}
648
+
649
+ For EACH missing page:
650
+ 1. Use search_codebase to find relevant code for that topic
651
+ 2. Use read_file to read the specific source files
652
+ 3. Use write_wiki_page to create the page with proper source traceability
653
+
654
+ Remember: Every architectural concept MUST include file:line references to the source code.
655
+ Create all ${missingPages.length} missing pages now.`;
656
+ // Reset for continuation
657
+ const continuationMessages = [
658
+ { role: 'user', content: continuationPrompt }
659
+ ];
660
+ let continuationTurns = 0;
661
+ const maxContinuationTurns = Math.min(50, maxTurns - turnCount); // Don't exceed remaining turns
662
+ while (continuationTurns < maxContinuationTurns) {
663
+ continuationTurns++;
664
+ turnCount++;
665
+ const response = await client.messages.create({
666
+ model,
667
+ max_tokens: 8192,
668
+ system: WIKI_SYSTEM_PROMPT,
669
+ tools,
670
+ messages: continuationMessages
671
+ });
672
+ if (options.verbose) {
673
+ console.log(`[DirectAPI] Continuation turn ${continuationTurns}: ${response.stop_reason}`);
674
+ }
675
+ const assistantContent = [];
676
+ const toolResults = [];
677
+ for (const block of response.content) {
678
+ assistantContent.push(block);
679
+ if (block.type === 'tool_use') {
680
+ totalToolCalls++;
681
+ const result = await this.executeDirectApiTool(block.name, block.input);
682
+ toolResults.push({
683
+ type: 'tool_result',
684
+ tool_use_id: block.id,
685
+ content: result
686
+ });
687
+ if (options.verbose) {
688
+ console.log(`[DirectAPI] Tool: ${block.name}`);
689
+ }
690
+ }
691
+ }
692
+ continuationMessages.push({ role: 'assistant', content: assistantContent });
693
+ if (toolResults.length > 0) {
694
+ continuationMessages.push({ role: 'user', content: toolResults });
695
+ }
696
+ if (response.stop_reason === 'end_turn' && toolResults.length === 0) {
697
+ break;
698
+ }
699
+ }
700
+ }
701
+ if (verificationAttempts >= maxVerificationAttempts) {
702
+ console.log(`[DirectAPI] Warning: Max verification attempts (${maxVerificationAttempts}) reached`);
703
+ const finalCheck = await this.verifyWikiCompleteness(this.outputDir);
704
+ if (!finalCheck.isComplete) {
705
+ console.log(`[DirectAPI] ${finalCheck.brokenLinks.length} broken links remain`);
706
+ yield { type: 'step', message: `Warning: ${finalCheck.brokenLinks.length} broken links remain`, progress: 95 };
707
+ }
708
+ }
709
+ console.log('\n' + '='.repeat(60));
710
+ console.log('[DirectAPI] GENERATION COMPLETE');
711
+ console.log(`[DirectAPI] Total turns: ${turnCount}`);
712
+ console.log(`[DirectAPI] Total tool calls: ${totalToolCalls}`);
713
+ console.log('='.repeat(60) + '\n');
714
+ yield { type: 'complete', message: 'Wiki generation complete (Direct API)', progress: 100 };
715
+ }
716
+ catch (err) {
717
+ console.error('[DirectAPI] Error:', err.message);
718
+ if (options.verbose) {
719
+ console.error('[DirectAPI] Full error:', err);
720
+ }
721
+ yield { type: 'error', message: err.message };
86
722
  throw err;
87
723
  }
88
724
  }
725
+ /**
726
+ * Build tool definitions for direct API calls
727
+ */
728
+ buildDirectApiTools() {
729
+ return [
730
+ // Filesystem tools
731
+ {
732
+ name: 'read_file',
733
+ description: 'Read the contents of a file from the repository',
734
+ input_schema: {
735
+ type: 'object',
736
+ properties: {
737
+ path: { type: 'string', description: 'Path to the file relative to repository root' }
738
+ },
739
+ required: ['path']
740
+ }
741
+ },
742
+ {
743
+ name: 'list_directory',
744
+ description: 'List files and directories in a path',
745
+ input_schema: {
746
+ type: 'object',
747
+ properties: {
748
+ path: { type: 'string', description: 'Path to the directory relative to repository root' }
749
+ },
750
+ required: ['path']
751
+ }
752
+ },
753
+ {
754
+ name: 'directory_tree',
755
+ description: 'Get a tree view of the directory structure',
756
+ input_schema: {
757
+ type: 'object',
758
+ properties: {
759
+ path: { type: 'string', description: 'Path to the directory relative to repository root' },
760
+ depth: { type: 'number', description: 'Maximum depth to traverse (default 3)' }
761
+ },
762
+ required: ['path']
763
+ }
764
+ },
765
+ // RAG/Wiki tools
766
+ {
767
+ name: 'search_codebase',
768
+ description: 'Semantic search over the codebase using embeddings. Returns relevant code snippets with file paths and line numbers.',
769
+ input_schema: {
770
+ type: 'object',
771
+ properties: {
772
+ query: { type: 'string', description: 'Natural language search query' },
773
+ maxResults: { type: 'number', description: 'Maximum results (default 10)' }
774
+ },
775
+ required: ['query']
776
+ }
777
+ },
778
+ {
779
+ name: 'write_wiki_page',
780
+ description: 'Write a wiki documentation page to the output directory',
781
+ input_schema: {
782
+ type: 'object',
783
+ properties: {
784
+ pagePath: { type: 'string', description: 'Path relative to wiki root (e.g., "architecture/overview.md")' },
785
+ title: { type: 'string', description: 'Page title' },
786
+ content: { type: 'string', description: 'Full markdown content (excluding H1 title)' },
787
+ description: { type: 'string', description: 'Brief page description' },
788
+ sources: {
789
+ type: 'array',
790
+ items: { type: 'string' },
791
+ description: 'Source files referenced'
792
+ }
793
+ },
794
+ required: ['pagePath', 'title', 'content']
795
+ }
796
+ },
797
+ {
798
+ name: 'analyze_code_structure',
799
+ description: 'Analyze a code file to extract functions, classes, imports, and exports',
800
+ input_schema: {
801
+ type: 'object',
802
+ properties: {
803
+ filePath: { type: 'string', description: 'Path to the file to analyze' }
804
+ },
805
+ required: ['filePath']
806
+ }
807
+ },
808
+ {
809
+ name: 'verify_wiki_completeness',
810
+ description: 'Check for broken internal links in the wiki. Returns missing pages that need to be created.',
811
+ input_schema: {
812
+ type: 'object',
813
+ properties: {}
814
+ }
815
+ },
816
+ {
817
+ name: 'list_wiki_pages',
818
+ description: 'List all wiki pages that have been created',
819
+ input_schema: {
820
+ type: 'object',
821
+ properties: {}
822
+ }
823
+ }
824
+ ];
825
+ }
826
+ /**
827
+ * Execute a tool call for direct API mode
828
+ */
829
+ async executeDirectApiTool(name, input) {
830
+ try {
831
+ switch (name) {
832
+ case 'read_file': {
833
+ const filePath = path.join(this.repoPath, input.path);
834
+ if (!fs.existsSync(filePath)) {
835
+ return `Error: File not found: ${input.path}`;
836
+ }
837
+ const content = fs.readFileSync(filePath, 'utf-8');
838
+ return content;
839
+ }
840
+ case 'list_directory': {
841
+ const dirPath = path.join(this.repoPath, input.path || '');
842
+ if (!fs.existsSync(dirPath)) {
843
+ return `Error: Directory not found: ${input.path}`;
844
+ }
845
+ const entries = fs.readdirSync(dirPath, { withFileTypes: true });
846
+ const result = entries.map(e => {
847
+ const type = e.isDirectory() ? '[DIR]' : '[FILE]';
848
+ return `${type} ${e.name}`;
849
+ }).join('\n');
850
+ return result;
851
+ }
852
+ case 'directory_tree': {
853
+ const dirPath = path.join(this.repoPath, input.path || '');
854
+ const maxDepth = input.depth || 3;
855
+ const tree = this.buildDirectoryTree(dirPath, maxDepth, 0);
856
+ return tree;
857
+ }
858
+ case 'search_codebase': {
859
+ if (!this.ragSystem) {
860
+ return 'Error: RAG system not initialized';
861
+ }
862
+ const maxResults = input.maxResults || 10;
863
+ const results = await this.ragSystem.search(input.query, { maxResults });
864
+ if (results.length === 0) {
865
+ return 'No relevant code found for this query.';
866
+ }
867
+ const formatted = results.map((r, i) => `### Result ${i + 1} (score: ${r.score.toFixed(3)})\n` +
868
+ `**Source:** \`${r.filePath}:${r.startLine}-${r.endLine}\`\n\n` +
869
+ '```' + (r.language || '') + '\n' + r.content + '\n```').join('\n\n');
870
+ return `Found ${results.length} relevant code snippets:\n\n${formatted}`;
871
+ }
872
+ case 'write_wiki_page': {
873
+ const fullPath = path.join(this.outputDir, input.pagePath);
874
+ const dir = path.dirname(fullPath);
875
+ if (!fs.existsSync(dir)) {
876
+ fs.mkdirSync(dir, { recursive: true });
877
+ }
878
+ const frontmatterData = {
879
+ title: input.title,
880
+ generated: new Date().toISOString(),
881
+ description: input.description,
882
+ sources: input.sources
883
+ };
884
+ const fullContent = matter.stringify(`# ${input.title}\n\n${input.content}`, frontmatterData);
885
+ fs.writeFileSync(fullPath, fullContent, 'utf-8');
886
+ return `Successfully wrote wiki page: ${input.pagePath}`;
887
+ }
888
+ case 'analyze_code_structure': {
889
+ const filePath = path.join(this.repoPath, input.filePath);
890
+ if (!fs.existsSync(filePath)) {
891
+ return `Error: File not found: ${input.filePath}`;
892
+ }
893
+ const content = fs.readFileSync(filePath, 'utf-8');
894
+ const lines = content.split('\n');
895
+ const ext = path.extname(input.filePath);
896
+ const analysis = {
897
+ functions: [],
898
+ classes: [],
899
+ imports: [],
900
+ exports: []
901
+ };
902
+ if (['.ts', '.tsx', '.js', '.jsx'].includes(ext)) {
903
+ lines.forEach((line, idx) => {
904
+ const lineNum = idx + 1;
905
+ const funcMatch = line.match(/(?:export\s+)?(?:async\s+)?function\s+(\w+)/);
906
+ if (funcMatch)
907
+ analysis.functions.push({ name: funcMatch[1], line: lineNum });
908
+ const classMatch = line.match(/(?:export\s+)?class\s+(\w+)/);
909
+ if (classMatch)
910
+ analysis.classes.push({ name: classMatch[1], line: lineNum });
911
+ const importMatch = line.match(/import\s+.*\s+from\s+['"]([^'"]+)['"]/);
912
+ if (importMatch)
913
+ analysis.imports.push({ module: importMatch[1], line: lineNum });
914
+ const exportMatch = line.match(/export\s+(?:default\s+)?(?:class|function|const|let|var|interface|type)\s+(\w+)/);
915
+ if (exportMatch)
916
+ analysis.exports.push({ name: exportMatch[1], line: lineNum });
917
+ });
918
+ }
919
+ let output = `# Code Analysis: ${input.filePath}\n`;
920
+ output += `Lines: ${lines.length}\n\n`;
921
+ output += `Functions (${analysis.functions.length}): ${analysis.functions.map(f => `${f.name}:${f.line}`).join(', ')}\n`;
922
+ output += `Classes (${analysis.classes.length}): ${analysis.classes.map(c => `${c.name}:${c.line}`).join(', ')}\n`;
923
+ output += `Imports (${analysis.imports.length}): ${analysis.imports.map(i => i.module).join(', ')}\n`;
924
+ output += `Exports (${analysis.exports.length}): ${analysis.exports.map(e => e.name).join(', ')}`;
925
+ return output;
926
+ }
927
+ case 'verify_wiki_completeness': {
928
+ const result = await this.verifyWikiCompleteness(this.outputDir);
929
+ let response = `# Wiki Completeness Report\n\n`;
930
+ response += `Total pages: ${result.totalPages}\n`;
931
+ response += `Missing pages: ${result.missingPages.length}\n`;
932
+ response += `Broken links: ${result.brokenLinks.length}\n\n`;
933
+ if (result.isComplete) {
934
+ response += '✅ All internal links are valid! The wiki is complete.';
935
+ }
936
+ else {
937
+ response += '❌ Missing Pages (MUST CREATE):\n\n';
938
+ for (const link of result.brokenLinks) {
939
+ response += `- ${link.target} (referenced from ${link.source})\n`;
940
+ }
941
+ }
942
+ return response;
943
+ }
944
+ case 'list_wiki_pages': {
945
+ const wikiFiles = this.findAllWikiFiles(this.outputDir);
946
+ const pages = wikiFiles.map(f => {
947
+ const relativePath = path.relative(this.outputDir, f).replace(/\\/g, '/');
948
+ return relativePath;
949
+ });
950
+ return `Wiki Pages (${pages.length} total):\n${pages.sort().map(p => `- ${p}`).join('\n')}`;
951
+ }
952
+ default:
953
+ return `Error: Unknown tool: ${name}`;
954
+ }
955
+ }
956
+ catch (error) {
957
+ return `Error executing ${name}: ${error instanceof Error ? error.message : String(error)}`;
958
+ }
959
+ }
960
+ /**
961
+ * Build a directory tree string
962
+ */
963
+ buildDirectoryTree(dirPath, maxDepth, currentDepth, prefix = '') {
964
+ if (currentDepth >= maxDepth || !fs.existsSync(dirPath)) {
965
+ return '';
966
+ }
967
+ const entries = fs.readdirSync(dirPath, { withFileTypes: true })
968
+ .filter(e => !e.name.startsWith('.') && e.name !== 'node_modules');
969
+ let result = '';
970
+ entries.forEach((entry, index) => {
971
+ const isLast = index === entries.length - 1;
972
+ const connector = isLast ? '└── ' : '├── ';
973
+ const childPrefix = isLast ? ' ' : '│ ';
974
+ result += `${prefix}${connector}${entry.name}\n`;
975
+ if (entry.isDirectory()) {
976
+ const childPath = path.join(dirPath, entry.name);
977
+ result += this.buildDirectoryTree(childPath, maxDepth, currentDepth + 1, prefix + childPrefix);
978
+ }
979
+ });
980
+ return result;
981
+ }
89
982
  /**
90
983
  * Estimate generation time and cost without making API calls
91
984
  */
@@ -238,9 +1131,11 @@ export class ArchitecturalWikiAgent {
238
1131
  // Custom tedmosby tools
239
1132
  'mcp__tedmosby__search_codebase',
240
1133
  'mcp__tedmosby__write_wiki_page',
241
- 'mcp__tedmosby__analyze_code_structure'
1134
+ 'mcp__tedmosby__analyze_code_structure',
1135
+ 'mcp__tedmosby__verify_wiki_completeness',
1136
+ 'mcp__tedmosby__list_wiki_pages'
242
1137
  ],
243
- maxTurns: 200,
1138
+ maxTurns: wikiOptions.maxTurns || 200,
244
1139
  permissionMode: 'acceptEdits',
245
1140
  includePartialMessages: true,
246
1141
  // Capture stderr from Claude Code subprocess
@@ -256,6 +1151,33 @@ export class ArchitecturalWikiAgent {
256
1151
  const configNote = options.configPath && fs.existsSync(options.configPath)
257
1152
  ? `\n\nConfiguration file provided at: ${options.configPath}\nPlease read it first to understand the wiki structure requirements.`
258
1153
  : '';
1154
+ // Continuation mode - only generate missing pages
1155
+ if (options.missingPages && options.missingPages.length > 0) {
1156
+ const missingList = options.missingPages.map(p => `- ${p}`).join('\n');
1157
+ return `Continue generating the architectural documentation wiki. Some pages are missing and need to be created.
1158
+
1159
+ **Repository:** ${options.repoUrl}
1160
+ **Output Directory:** ${this.outputDir}
1161
+
1162
+ ## Missing Pages That MUST Be Created
1163
+
1164
+ The following wiki pages are referenced but do not exist. You MUST create each of these:
1165
+
1166
+ ${missingList}
1167
+
1168
+ ## Instructions
1169
+
1170
+ 1. First, use \`mcp__tedmosby__list_wiki_pages\` to see what pages already exist
1171
+ 2. For each missing page above:
1172
+ - Use \`mcp__tedmosby__search_codebase\` to find relevant code for that topic
1173
+ - Use \`mcp__filesystem__read_file\` to read the specific source files
1174
+ - Use \`mcp__tedmosby__write_wiki_page\` to create the page with proper source traceability
1175
+ 3. After creating all pages, use \`mcp__tedmosby__verify_wiki_completeness\` to confirm all links are valid
1176
+
1177
+ Remember: Every architectural concept MUST include file:line references to the source code.
1178
+ Do NOT modify existing pages unless they have broken internal links that need fixing.`;
1179
+ }
1180
+ // Full generation mode
259
1181
  return `Generate a comprehensive architectural documentation wiki for this repository.
260
1182
 
261
1183
  **Repository:** ${options.repoUrl}
@@ -269,6 +1191,11 @@ Begin by:
269
1191
  3. Planning the wiki structure
270
1192
  4. Generating documentation with source code traceability
271
1193
 
1194
+ **IMPORTANT:** After generating all pages, you MUST:
1195
+ 5. Use \`mcp__tedmosby__verify_wiki_completeness\` to check for broken links
1196
+ 6. If any pages are missing, create them immediately
1197
+ 7. Repeat verification until all links are valid
1198
+
272
1199
  Remember: Every architectural concept MUST include file:line references to the source code.`;
273
1200
  }
274
1201
  /**
@@ -277,9 +1204,11 @@ Remember: Every architectural concept MUST include file:line references to the s
277
1204
  createCustomToolServer() {
278
1205
  const tools = [];
279
1206
  // Tool 1: search_codebase - RAG-powered semantic search
1207
+ // Get configured max results (default 10, can be limited for large codebases)
1208
+ const configuredMaxResults = currentGenerationOptions?.maxSearchResults || 10;
280
1209
  tools.push(tool('search_codebase', 'Semantic search over the codebase using embeddings. Returns relevant code snippets with file paths and line numbers. Use this to find code related to architectural concepts you are documenting.', {
281
1210
  query: z.string().describe('Natural language search query (e.g., "authentication handling", "database connection")'),
282
- maxResults: z.number().min(1).max(20).optional().default(10).describe('Maximum number of results to return'),
1211
+ maxResults: z.number().min(1).max(20).optional().default(configuredMaxResults).describe('Maximum number of results to return'),
283
1212
  fileTypes: z.array(z.string()).optional().describe('Filter by file extensions (e.g., [".ts", ".js"])'),
284
1213
  excludeTests: z.boolean().optional().default(true).describe('Exclude test files from results')
285
1214
  }, async (args) => {
@@ -292,8 +1221,10 @@ Remember: Every architectural concept MUST include file:line references to the s
292
1221
  };
293
1222
  }
294
1223
  try {
1224
+ // Apply configured limit
1225
+ const effectiveMaxResults = Math.min(args.maxResults || configuredMaxResults, configuredMaxResults);
295
1226
  const results = await this.ragSystem.search(args.query, {
296
- maxResults: args.maxResults || 10,
1227
+ maxResults: effectiveMaxResults,
297
1228
  fileTypes: args.fileTypes,
298
1229
  excludeTests: args.excludeTests ?? true
299
1230
  });
@@ -542,11 +1473,206 @@ Remember: Every architectural concept MUST include file:line references to the s
542
1473
  };
543
1474
  }
544
1475
  }));
1476
+ // Tool 4: verify_wiki_completeness - Check for broken internal links and missing pages
1477
+ tools.push(tool('verify_wiki_completeness', 'Verify that all internal links in the wiki resolve to actual pages. Returns a list of missing pages that need to be created. ALWAYS run this after generating wiki pages to ensure completeness.', {
1478
+ fixBrokenLinks: z.boolean().optional().default(false).describe('If true, returns suggested content for missing pages')
1479
+ }, async (args) => {
1480
+ try {
1481
+ const wikiFiles = this.findAllWikiFiles(this.outputDir);
1482
+ const existingPages = new Set(wikiFiles.map(f => path.relative(this.outputDir, f).replace(/\\/g, '/')));
1483
+ const brokenLinks = [];
1484
+ const allReferencedPages = new Set();
1485
+ // Scan all wiki files for internal links
1486
+ for (const file of wikiFiles) {
1487
+ const content = fs.readFileSync(file, 'utf-8');
1488
+ const relativePath = path.relative(this.outputDir, file).replace(/\\/g, '/');
1489
+ const fileDir = path.dirname(file);
1490
+ // Find all markdown links
1491
+ const linkRegex = /\[([^\]]+)\]\(([^)]+)\)/g;
1492
+ let match;
1493
+ while ((match = linkRegex.exec(content)) !== null) {
1494
+ const linkText = match[1];
1495
+ const linkPath = match[2];
1496
+ // Skip external links, anchors, and source code references
1497
+ if (linkPath.startsWith('http://') ||
1498
+ linkPath.startsWith('https://') ||
1499
+ linkPath.startsWith('#') ||
1500
+ linkPath.includes('github.com') ||
1501
+ linkPath.match(/\.(ts|js|py|go|rs|java|tsx|jsx)[:L#]/)) {
1502
+ continue;
1503
+ }
1504
+ // Resolve internal markdown links
1505
+ if (linkPath.endsWith('.md') || linkPath.includes('.md#')) {
1506
+ const cleanPath = linkPath.split('#')[0];
1507
+ const resolvedPath = path.resolve(fileDir, cleanPath);
1508
+ const relativeResolved = path.relative(this.outputDir, resolvedPath).replace(/\\/g, '/');
1509
+ allReferencedPages.add(relativeResolved);
1510
+ if (!fs.existsSync(resolvedPath)) {
1511
+ brokenLinks.push({
1512
+ sourcePage: relativePath,
1513
+ targetLink: cleanPath,
1514
+ linkText
1515
+ });
1516
+ }
1517
+ }
1518
+ }
1519
+ }
1520
+ // Find missing pages (referenced but not created)
1521
+ const missingPages = [...allReferencedPages].filter(p => !existingPages.has(p));
1522
+ // Build response
1523
+ let response = `# Wiki Completeness Report\n\n`;
1524
+ response += `**Total pages:** ${existingPages.size}\n`;
1525
+ response += `**Referenced pages:** ${allReferencedPages.size}\n`;
1526
+ response += `**Missing pages:** ${missingPages.length}\n`;
1527
+ response += `**Broken links:** ${brokenLinks.length}\n\n`;
1528
+ if (brokenLinks.length === 0) {
1529
+ response += `✅ **All internal links are valid!** The wiki is complete.\n`;
1530
+ }
1531
+ else {
1532
+ response += `## ❌ Missing Pages (MUST CREATE)\n\n`;
1533
+ response += `The following pages are referenced but do not exist. You MUST create these pages:\n\n`;
1534
+ // Group by missing page
1535
+ const byMissingPage = new Map();
1536
+ for (const link of brokenLinks) {
1537
+ const key = link.targetLink;
1538
+ if (!byMissingPage.has(key)) {
1539
+ byMissingPage.set(key, []);
1540
+ }
1541
+ byMissingPage.get(key).push({ source: link.sourcePage, text: link.linkText });
1542
+ }
1543
+ for (const [missingPage, references] of byMissingPage) {
1544
+ response += `### ${missingPage}\n`;
1545
+ response += `Referenced by:\n`;
1546
+ for (const ref of references) {
1547
+ response += `- \`${ref.source}\` (link text: "${ref.text}")\n`;
1548
+ }
1549
+ response += `\n`;
1550
+ }
1551
+ response += `\n## Action Required\n\n`;
1552
+ response += `Use \`mcp__tedmosby__write_wiki_page\` to create each missing page above.\n`;
1553
+ response += `After creating all pages, run \`mcp__tedmosby__verify_wiki_completeness\` again to confirm.\n`;
1554
+ }
1555
+ return {
1556
+ content: [{
1557
+ type: 'text',
1558
+ text: response
1559
+ }]
1560
+ };
1561
+ }
1562
+ catch (error) {
1563
+ return {
1564
+ content: [{
1565
+ type: 'text',
1566
+ text: `Verification error: ${error instanceof Error ? error.message : String(error)}`
1567
+ }]
1568
+ };
1569
+ }
1570
+ }));
1571
+ // Tool 5: list_wiki_pages - List all created wiki pages
1572
+ tools.push(tool('list_wiki_pages', 'List all wiki pages that have been created in the output directory.', {}, async () => {
1573
+ try {
1574
+ const wikiFiles = this.findAllWikiFiles(this.outputDir);
1575
+ const pages = wikiFiles.map(f => {
1576
+ const relativePath = path.relative(this.outputDir, f).replace(/\\/g, '/');
1577
+ const content = fs.readFileSync(f, 'utf-8');
1578
+ const titleMatch = content.match(/^#\s+(.+)$/m);
1579
+ return {
1580
+ path: relativePath,
1581
+ title: titleMatch ? titleMatch[1] : path.basename(f, '.md')
1582
+ };
1583
+ });
1584
+ let response = `# Wiki Pages (${pages.length} total)\n\n`;
1585
+ for (const page of pages.sort((a, b) => a.path.localeCompare(b.path))) {
1586
+ response += `- \`${page.path}\` - ${page.title}\n`;
1587
+ }
1588
+ return {
1589
+ content: [{
1590
+ type: 'text',
1591
+ text: response
1592
+ }]
1593
+ };
1594
+ }
1595
+ catch (error) {
1596
+ return {
1597
+ content: [{
1598
+ type: 'text',
1599
+ text: `Error listing pages: ${error instanceof Error ? error.message : String(error)}`
1600
+ }]
1601
+ };
1602
+ }
1603
+ }));
545
1604
  return createSdkMcpServer({
546
1605
  name: 'tedmosby',
547
1606
  version: '1.0.0',
548
1607
  tools
549
1608
  });
550
1609
  }
1610
+ /**
1611
+ * Find all markdown files in a directory recursively
1612
+ */
1613
+ findAllWikiFiles(dir) {
1614
+ const files = [];
1615
+ if (!fs.existsSync(dir)) {
1616
+ return files;
1617
+ }
1618
+ const entries = fs.readdirSync(dir, { withFileTypes: true });
1619
+ for (const entry of entries) {
1620
+ const fullPath = path.join(dir, entry.name);
1621
+ if (entry.isDirectory() && !entry.name.startsWith('.')) {
1622
+ files.push(...this.findAllWikiFiles(fullPath));
1623
+ }
1624
+ else if (entry.isFile() && entry.name.endsWith('.md')) {
1625
+ files.push(fullPath);
1626
+ }
1627
+ }
1628
+ return files;
1629
+ }
1630
+ /**
1631
+ * Verify wiki completeness and return missing pages
1632
+ */
1633
+ async verifyWikiCompleteness(wikiDir) {
1634
+ const wikiFiles = this.findAllWikiFiles(wikiDir);
1635
+ const existingPages = new Set(wikiFiles.map(f => path.relative(wikiDir, f).replace(/\\/g, '/')));
1636
+ const brokenLinks = [];
1637
+ const allReferencedPages = new Set();
1638
+ for (const file of wikiFiles) {
1639
+ const content = fs.readFileSync(file, 'utf-8');
1640
+ const relativePath = path.relative(wikiDir, file).replace(/\\/g, '/');
1641
+ const fileDir = path.dirname(file);
1642
+ const linkRegex = /\[([^\]]+)\]\(([^)]+)\)/g;
1643
+ let match;
1644
+ while ((match = linkRegex.exec(content)) !== null) {
1645
+ const linkText = match[1];
1646
+ const linkPath = match[2];
1647
+ if (linkPath.startsWith('http://') ||
1648
+ linkPath.startsWith('https://') ||
1649
+ linkPath.startsWith('#') ||
1650
+ linkPath.includes('github.com') ||
1651
+ linkPath.match(/\.(ts|js|py|go|rs|java|tsx|jsx)[:L#]/)) {
1652
+ continue;
1653
+ }
1654
+ if (linkPath.endsWith('.md') || linkPath.includes('.md#')) {
1655
+ const cleanPath = linkPath.split('#')[0];
1656
+ const resolvedPath = path.resolve(fileDir, cleanPath);
1657
+ const relativeResolved = path.relative(wikiDir, resolvedPath).replace(/\\/g, '/');
1658
+ allReferencedPages.add(relativeResolved);
1659
+ if (!fs.existsSync(resolvedPath)) {
1660
+ brokenLinks.push({
1661
+ source: relativePath,
1662
+ target: cleanPath,
1663
+ linkText
1664
+ });
1665
+ }
1666
+ }
1667
+ }
1668
+ }
1669
+ const missingPages = [...allReferencedPages].filter(p => !existingPages.has(p));
1670
+ return {
1671
+ totalPages: existingPages.size,
1672
+ brokenLinks,
1673
+ missingPages,
1674
+ isComplete: brokenLinks.length === 0
1675
+ };
1676
+ }
551
1677
  }
552
1678
  //# sourceMappingURL=wiki-agent.js.map