claude-all-hands 1.0.2 → 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/.claude/agents/curator.md +1 -5
  2. package/.claude/agents/documentation-taxonomist.md +255 -0
  3. package/.claude/agents/documentation-writer.md +366 -0
  4. package/.claude/agents/surveyor.md +1 -1
  5. package/.claude/commands/continue.md +12 -10
  6. package/.claude/commands/create-skill.md +2 -2
  7. package/.claude/commands/create-specialist.md +3 -3
  8. package/.claude/commands/debug.md +5 -5
  9. package/.claude/commands/docs-adjust.md +214 -0
  10. package/.claude/commands/docs-audit.md +172 -0
  11. package/.claude/commands/docs-init.md +210 -0
  12. package/.claude/commands/plan.md +6 -6
  13. package/.claude/commands/whats-next.md +2 -2
  14. package/.claude/envoy/README.md +5 -5
  15. package/.claude/envoy/package-lock.json +216 -10
  16. package/.claude/envoy/package.json +9 -0
  17. package/.claude/envoy/src/commands/docs.ts +881 -0
  18. package/.claude/envoy/src/commands/knowledge.ts +33 -42
  19. package/.claude/envoy/src/lib/ast-queries.ts +261 -0
  20. package/.claude/envoy/src/lib/knowledge.ts +176 -124
  21. package/.claude/envoy/src/lib/tree-sitter-utils.ts +301 -0
  22. package/.claude/envoy/src/types/tree-sitter.d.ts +76 -0
  23. package/.claude/hooks/scripts/enforce_research_fetch.py +1 -1
  24. package/.claude/protocols/bug-discovery.yaml +1 -1
  25. package/.claude/protocols/discovery.yaml +1 -1
  26. package/.claude/settings.json +4 -3
  27. package/.claude/skills/discovery-mode/SKILL.md +7 -7
  28. package/.claude/skills/documentation-taxonomy/SKILL.md +287 -0
  29. package/.claude/skills/implementation-mode/SKILL.md +7 -7
  30. package/.claude/skills/knowledge-discovery/SKILL.md +178 -0
  31. package/bin/cli.js +41 -1
  32. package/package.json +1 -1
  33. package/.claude/agents/documentor.md +0 -147
  34. package/.claude/commands/audit-docs.md +0 -94
  35. package/.claude/commands/create-docs.md +0 -100
@@ -0,0 +1,881 @@
1
+ /**
2
+ * Documentation commands - symbol reference formatting and validation.
3
+ *
4
+ * Commands:
5
+ * envoy docs format-reference <file> <symbol>
6
+ * envoy docs validate
7
+ * envoy docs complexity <path>
8
+ * envoy docs tree <path>
9
+ */
10
+
11
+ import { Command } from "commander";
12
+ import { execSync, spawnSync } from "child_process";
13
+ import { existsSync, readFileSync, readdirSync, statSync } from "fs";
14
+ import { join, relative, extname, dirname } from "path";
15
+ import matter from "gray-matter";
16
+ import { BaseCommand, CommandResult } from "./base.js";
17
+ import {
18
+ findSymbol,
19
+ symbolExists,
20
+ getFileComplexity,
21
+ } from "../lib/tree-sitter-utils.js";
22
+ import { getSupportedExtensions } from "../lib/ast-queries.js";
23
+
24
+ const getProjectRoot = (): string => {
25
+ try {
26
+ return execSync("git rev-parse --show-toplevel", { encoding: "utf-8" }).trim();
27
+ } catch {
28
+ return process.cwd();
29
+ }
30
+ };
31
+
32
+ /**
33
+ * Get most recent commit hash for a line range using git blame.
34
+ * Uses -t flag to get timestamps directly from blame output (single subprocess).
35
+ */
36
+ const getMostRecentHashForRange = (
37
+ filePath: string,
38
+ startLine: number,
39
+ endLine: number,
40
+ cwd: string
41
+ ): { hash: string; success: boolean } => {
42
+ const blameResult = spawnSync(
43
+ "git",
44
+ ["blame", "-L", `${startLine},${endLine}`, "--porcelain", "-t", filePath],
45
+ { encoding: "utf-8", cwd }
46
+ );
47
+
48
+ if (blameResult.status !== 0) {
49
+ return { hash: "0000000", success: false };
50
+ }
51
+
52
+ const lines = blameResult.stdout.split("\n");
53
+ let mostRecentHash = "0000000";
54
+ let mostRecentTime = 0;
55
+ let currentHash = "";
56
+
57
+ for (const line of lines) {
58
+ if (/^[a-f0-9]{40}/.test(line)) {
59
+ currentHash = line.substring(0, 7);
60
+ } else if (line.startsWith("committer-time ") && currentHash) {
61
+ const timestamp = parseInt(line.substring(15), 10);
62
+ if (timestamp > mostRecentTime) {
63
+ mostRecentTime = timestamp;
64
+ mostRecentHash = currentHash;
65
+ }
66
+ }
67
+ }
68
+
69
+ return { hash: mostRecentHash, success: true };
70
+ };
71
+
72
+ /**
73
+ * Get most recent commit hash for entire file.
74
+ * Used for non-AST files where we can't identify symbol line ranges.
75
+ */
76
+ const getMostRecentHashForFile = (
77
+ filePath: string,
78
+ cwd: string
79
+ ): { hash: string; success: boolean } => {
80
+ const logResult = spawnSync(
81
+ "git",
82
+ ["log", "-1", "--format=%h", "--", filePath],
83
+ { encoding: "utf-8", cwd }
84
+ );
85
+
86
+ if (logResult.status !== 0 || !logResult.stdout.trim()) {
87
+ return { hash: "0000000", success: false };
88
+ }
89
+
90
+ return { hash: logResult.stdout.trim().substring(0, 7), success: true };
91
+ };
92
+
93
+ /**
94
+ * Format a symbol reference with git blame hash.
95
+ * Output: [ref:file:symbol:hash] for AST-supported files with symbol
96
+ * Output: [ref:file::hash] for file-only refs (no symbol or non-AST files)
97
+ */
98
+ class FormatReferenceCommand extends BaseCommand {
99
+ readonly name = "format-reference";
100
+ readonly description = "Format symbol reference with git blame hash";
101
+
102
+ defineArguments(cmd: Command): void {
103
+ cmd
104
+ .argument("<file>", "Path to source file")
105
+ .argument("[symbol]", "Symbol name (optional for non-AST files)");
106
+ }
107
+
108
+ async execute(args: Record<string, unknown>): Promise<CommandResult> {
109
+ const file = args.file as string;
110
+ const symbolName = args.symbol as string | undefined;
111
+
112
+ if (!file) {
113
+ return this.error("validation_error", "file is required");
114
+ }
115
+
116
+ const projectRoot = getProjectRoot();
117
+ const absolutePath = file.startsWith("/") ? file : join(projectRoot, file);
118
+ const relativePath = relative(projectRoot, absolutePath);
119
+
120
+ if (!existsSync(absolutePath)) {
121
+ return this.error("file_not_found", `File not found: ${relativePath}`);
122
+ }
123
+
124
+ // Check if file type is AST-supported
125
+ const ext = extname(absolutePath);
126
+ const supported = getSupportedExtensions();
127
+ const isAstSupported = supported.includes(ext);
128
+
129
+ // File-only reference (no symbol provided or non-AST file)
130
+ if (!symbolName) {
131
+ const { hash: fileHash, success } = getMostRecentHashForFile(
132
+ absolutePath,
133
+ projectRoot
134
+ );
135
+
136
+ if (!success || fileHash === "0000000") {
137
+ return this.error(
138
+ "uncommitted_file",
139
+ `File ${relativePath} has uncommitted changes or no git history`,
140
+ "Commit all changes before generating references: git add -A && git commit"
141
+ );
142
+ }
143
+
144
+ const reference = `[ref:${relativePath}::${fileHash}]`;
145
+
146
+ return this.success({
147
+ reference,
148
+ file: relativePath,
149
+ symbol: null,
150
+ hash: fileHash,
151
+ type: "file-only",
152
+ ast_supported: isAstSupported,
153
+ });
154
+ }
155
+
156
+ // Symbol reference - requires AST support
157
+ if (!isAstSupported) {
158
+ return this.error(
159
+ "unsupported_file_type",
160
+ `File type ${ext} does not support symbol references`,
161
+ `Use file-only reference: envoy docs format-reference ${file}`
162
+ );
163
+ }
164
+
165
+ // Find symbol in file
166
+ const symbol = await findSymbol(absolutePath, symbolName);
167
+
168
+ if (!symbol) {
169
+ return this.error(
170
+ "symbol_not_found",
171
+ `Symbol '${symbolName}' not found in ${relativePath}`,
172
+ "Check symbol name spelling and ensure it's a top-level declaration"
173
+ );
174
+ }
175
+
176
+ // Get git blame hash for symbol's line range
177
+ const { hash: mostRecentHash, success } = getMostRecentHashForRange(
178
+ absolutePath,
179
+ symbol.startLine,
180
+ symbol.endLine,
181
+ projectRoot
182
+ );
183
+
184
+ if (!success || mostRecentHash === "0000000") {
185
+ return this.error(
186
+ "uncommitted_file",
187
+ `File ${relativePath} has uncommitted changes or no git history`,
188
+ "Commit all changes before generating references: git add -A && git commit"
189
+ );
190
+ }
191
+
192
+ const reference = `[ref:${relativePath}:${symbolName}:${mostRecentHash}]`;
193
+
194
+ return this.success({
195
+ reference,
196
+ file: relativePath,
197
+ symbol: symbolName,
198
+ hash: mostRecentHash,
199
+ line_range: { start: symbol.startLine, end: symbol.endLine },
200
+ symbol_type: symbol.type,
201
+ type: "symbol",
202
+ });
203
+ }
204
+ }
205
+
206
+ /**
207
+ * Validate all documentation references.
208
+ * Supports two formats:
209
+ * [ref:file:symbol:hash] - symbol reference (AST-supported files)
210
+ * [ref:file::hash] - file-only reference (any file type)
211
+ */
212
+ class ValidateCommand extends BaseCommand {
213
+ readonly name = "validate";
214
+ readonly description = "Validate symbol references in documentation";
215
+
216
+ defineArguments(cmd: Command): void {
217
+ cmd.option("--path <path>", "Specific docs path to validate", "docs/");
218
+ }
219
+
220
+ async execute(args: Record<string, unknown>): Promise<CommandResult> {
221
+ const docsPath = args.path as string || "docs/";
222
+ const projectRoot = getProjectRoot();
223
+ const absoluteDocsPath = docsPath.startsWith("/")
224
+ ? docsPath
225
+ : join(projectRoot, docsPath);
226
+
227
+ if (!existsSync(absoluteDocsPath)) {
228
+ return this.success({
229
+ message: "No docs directory found",
230
+ stale: [],
231
+ invalid: [],
232
+ frontmatter_errors: [],
233
+ total_refs: 0,
234
+ total_files: 0,
235
+ });
236
+ }
237
+
238
+ interface RefInfo {
239
+ file: string;
240
+ reference: string;
241
+ refFile: string;
242
+ refSymbol: string | null; // null for file-only refs
243
+ refHash: string;
244
+ isFileOnly: boolean;
245
+ }
246
+
247
+ const refs: RefInfo[] = [];
248
+
249
+ // Recursively find all markdown files
250
+ const findMarkdownFiles = (dir: string): string[] => {
251
+ const files: string[] = [];
252
+ const entries = readdirSync(dir);
253
+ for (const entry of entries) {
254
+ const fullPath = join(dir, entry);
255
+ const stat = statSync(fullPath);
256
+ if (stat.isDirectory()) {
257
+ files.push(...findMarkdownFiles(fullPath));
258
+ } else if (entry.endsWith(".md")) {
259
+ files.push(fullPath);
260
+ }
261
+ }
262
+ return files;
263
+ };
264
+
265
+ const mdFiles = findMarkdownFiles(absoluteDocsPath);
266
+
267
+ // Group all issues by doc file for easy delegation to documentation-writers
268
+ interface DocFileIssues {
269
+ stale: Array<{
270
+ reference: string;
271
+ file_path: string;
272
+ symbol_name: string | null;
273
+ stored_hash: string;
274
+ current_hash: string;
275
+ ref_type: "symbol" | "file-only";
276
+ }>;
277
+ invalid: Array<{
278
+ reference: string;
279
+ reason: string;
280
+ }>;
281
+ frontmatter_error: string | null;
282
+ placeholder_errors: Array<string>;
283
+ inline_code_block_count: number;
284
+ has_capability_list_warning: boolean;
285
+ }
286
+
287
+ const byDocFile: Record<string, DocFileIssues> = {};
288
+
289
+ const getOrCreateDocEntry = (docFile: string): DocFileIssues => {
290
+ if (!byDocFile[docFile]) {
291
+ byDocFile[docFile] = {
292
+ stale: [],
293
+ invalid: [],
294
+ frontmatter_error: null,
295
+ placeholder_errors: [],
296
+ inline_code_block_count: 0,
297
+ has_capability_list_warning: false,
298
+ };
299
+ }
300
+ return byDocFile[docFile];
301
+ };
302
+
303
+ // Legacy flat arrays for backward compat
304
+ const frontmatterErrors: Array<{
305
+ doc_file: string;
306
+ reason: string;
307
+ }> = [];
308
+
309
+ const placeholderErrors: Array<{
310
+ doc_file: string;
311
+ count: number;
312
+ examples: string[];
313
+ reason: string;
314
+ }> = [];
315
+
316
+ const inlineCodeErrors: Array<{
317
+ doc_file: string;
318
+ block_count: number;
319
+ reason: string;
320
+ }> = [];
321
+
322
+ const capabilityListWarnings: Array<{
323
+ doc_file: string;
324
+ reason: string;
325
+ }> = [];
326
+
327
+ for (const mdFile of mdFiles) {
328
+ const content = readFileSync(mdFile, "utf-8");
329
+ const relPath = relative(projectRoot, mdFile);
330
+
331
+ // Check for front matter block
332
+ if (!content.startsWith("---")) {
333
+ const reason = "Missing front matter (file must start with ---)";
334
+ frontmatterErrors.push({ doc_file: relPath, reason });
335
+ getOrCreateDocEntry(relPath).frontmatter_error = reason;
336
+ continue;
337
+ }
338
+
339
+ try {
340
+ const parsed = matter(content);
341
+
342
+ // Check for required description field
343
+ if (!parsed.data.description || typeof parsed.data.description !== "string") {
344
+ const reason = "Missing or invalid 'description' field in front matter";
345
+ frontmatterErrors.push({ doc_file: relPath, reason });
346
+ getOrCreateDocEntry(relPath).frontmatter_error = reason;
347
+ } else if (parsed.data.description.trim() === "") {
348
+ const reason = "Empty 'description' field in front matter";
349
+ frontmatterErrors.push({ doc_file: relPath, reason });
350
+ getOrCreateDocEntry(relPath).frontmatter_error = reason;
351
+ }
352
+
353
+ // Validate relevant_files if present
354
+ if (parsed.data.relevant_files !== undefined) {
355
+ if (!Array.isArray(parsed.data.relevant_files)) {
356
+ const reason = "'relevant_files' must be an array";
357
+ frontmatterErrors.push({ doc_file: relPath, reason });
358
+ getOrCreateDocEntry(relPath).frontmatter_error = reason;
359
+ }
360
+ }
361
+ } catch {
362
+ const reason = "Invalid front matter syntax";
363
+ frontmatterErrors.push({ doc_file: relPath, reason });
364
+ getOrCreateDocEntry(relPath).frontmatter_error = reason;
365
+ }
366
+ }
367
+
368
+ // Extract refs from all markdown files
369
+ // Matches both [ref:file:symbol:hash] and [ref:file::hash]
370
+ const refPattern = /\[ref:([^:\]]+):([^:\]]*):([a-f0-9]+)\]/g;
371
+
372
+ for (const mdFile of mdFiles) {
373
+ const content = readFileSync(mdFile, "utf-8");
374
+ const relPath = relative(projectRoot, mdFile);
375
+ let match;
376
+ while ((match = refPattern.exec(content)) !== null) {
377
+ const isFileOnly = match[2] === "";
378
+ refs.push({
379
+ file: relPath,
380
+ reference: match[0],
381
+ refFile: match[1],
382
+ refSymbol: isFileOnly ? null : match[2],
383
+ refHash: match[3],
384
+ isFileOnly,
385
+ });
386
+ }
387
+
388
+ // Placeholder hash detection
389
+ const placeholderPattern = /\[ref:[^\]]+:(abc123[0-9]?|123456[0-9]?|000000[0-9]?|hash[a-f0-9]{0,4}|test[a-f0-9]{0,4})\]/gi;
390
+ const placeholderMatches = content.match(placeholderPattern);
391
+ if (placeholderMatches) {
392
+ placeholderErrors.push({
393
+ doc_file: relPath,
394
+ count: placeholderMatches.length,
395
+ examples: placeholderMatches.slice(0, 3),
396
+ reason: "Placeholder hashes detected - writer didn't use format-reference",
397
+ });
398
+ const entry = getOrCreateDocEntry(relPath);
399
+ entry.placeholder_errors = placeholderMatches;
400
+ }
401
+
402
+ // Inline code block detection (fenced code blocks in documentation)
403
+ const codeBlockPattern = /^```[a-z0-9_+-]*$/gm;
404
+ const codeBlockMatches = content.match(codeBlockPattern);
405
+ if (codeBlockMatches && codeBlockMatches.length > 0) {
406
+ inlineCodeErrors.push({
407
+ doc_file: relPath,
408
+ block_count: codeBlockMatches.length,
409
+ reason: "Documentation contains inline code blocks",
410
+ });
411
+ getOrCreateDocEntry(relPath).inline_code_block_count = codeBlockMatches.length;
412
+ }
413
+
414
+ // Capability list detection (tables with Command/Purpose headers)
415
+ const capabilityTablePattern = /\|\s*(Command|Option|Flag)\s*\|.*\|\s*(Purpose|Description)\s*\|/i;
416
+ if (capabilityTablePattern.test(content)) {
417
+ capabilityListWarnings.push({
418
+ doc_file: relPath,
419
+ reason: "Possible capability list table detected",
420
+ });
421
+ getOrCreateDocEntry(relPath).has_capability_list_warning = true;
422
+ }
423
+ }
424
+
425
+ // Legacy flat arrays for backward compat
426
+ const stale: Array<{
427
+ doc_file: string;
428
+ reference: string;
429
+ stored_hash: string;
430
+ current_hash: string;
431
+ ref_type: "symbol" | "file-only";
432
+ }> = [];
433
+
434
+ const invalid: Array<{
435
+ doc_file: string;
436
+ reference: string;
437
+ reason: string;
438
+ }> = [];
439
+
440
+ // Validate each reference
441
+ for (const ref of refs) {
442
+ const absoluteRefFile = join(projectRoot, ref.refFile);
443
+
444
+ // Check if file exists
445
+ if (!existsSync(absoluteRefFile)) {
446
+ const reason = "File not found";
447
+ invalid.push({
448
+ doc_file: ref.file,
449
+ reference: ref.reference,
450
+ reason,
451
+ });
452
+ getOrCreateDocEntry(ref.file).invalid.push({
453
+ reference: ref.reference,
454
+ reason,
455
+ });
456
+ continue;
457
+ }
458
+
459
+ // File-only reference: check file hash staleness
460
+ if (ref.isFileOnly) {
461
+ const { hash: currentHash, success } = getMostRecentHashForFile(
462
+ absoluteRefFile,
463
+ projectRoot
464
+ );
465
+
466
+ if (!success) {
467
+ const reason = "Git hash lookup failed";
468
+ invalid.push({
469
+ doc_file: ref.file,
470
+ reference: ref.reference,
471
+ reason,
472
+ });
473
+ getOrCreateDocEntry(ref.file).invalid.push({
474
+ reference: ref.reference,
475
+ reason,
476
+ });
477
+ continue;
478
+ }
479
+
480
+ if (currentHash !== ref.refHash) {
481
+ stale.push({
482
+ doc_file: ref.file,
483
+ reference: ref.reference,
484
+ stored_hash: ref.refHash,
485
+ current_hash: currentHash,
486
+ ref_type: "file-only",
487
+ });
488
+ getOrCreateDocEntry(ref.file).stale.push({
489
+ reference: ref.reference,
490
+ file_path: ref.refFile,
491
+ symbol_name: null,
492
+ stored_hash: ref.refHash,
493
+ current_hash: currentHash,
494
+ ref_type: "file-only",
495
+ });
496
+ }
497
+ continue;
498
+ }
499
+
500
+ // Symbol reference: validate symbol exists and check hash
501
+ const ext = extname(absoluteRefFile);
502
+ const supported = getSupportedExtensions();
503
+ if (!supported.includes(ext)) {
504
+ const reason = `File type ${ext} does not support symbol references`;
505
+ invalid.push({
506
+ doc_file: ref.file,
507
+ reference: ref.reference,
508
+ reason,
509
+ });
510
+ getOrCreateDocEntry(ref.file).invalid.push({
511
+ reference: ref.reference,
512
+ reason,
513
+ });
514
+ continue;
515
+ }
516
+
517
+ // Check if symbol exists
518
+ const symbolFound = await symbolExists(absoluteRefFile, ref.refSymbol!);
519
+ if (!symbolFound) {
520
+ const reason = "Symbol not found";
521
+ invalid.push({
522
+ doc_file: ref.file,
523
+ reference: ref.reference,
524
+ reason,
525
+ });
526
+ getOrCreateDocEntry(ref.file).invalid.push({
527
+ reference: ref.reference,
528
+ reason,
529
+ });
530
+ continue;
531
+ }
532
+
533
+ // Get current hash for symbol
534
+ const symbol = await findSymbol(absoluteRefFile, ref.refSymbol!);
535
+ if (!symbol) {
536
+ continue; // Already validated above
537
+ }
538
+
539
+ const { hash: mostRecentHash, success } = getMostRecentHashForRange(
540
+ absoluteRefFile,
541
+ symbol.startLine,
542
+ symbol.endLine,
543
+ projectRoot
544
+ );
545
+
546
+ if (!success) {
547
+ const reason = "Git blame failed";
548
+ invalid.push({
549
+ doc_file: ref.file,
550
+ reference: ref.reference,
551
+ reason,
552
+ });
553
+ getOrCreateDocEntry(ref.file).invalid.push({
554
+ reference: ref.reference,
555
+ reason,
556
+ });
557
+ continue;
558
+ }
559
+
560
+ if (mostRecentHash !== ref.refHash) {
561
+ stale.push({
562
+ doc_file: ref.file,
563
+ reference: ref.reference,
564
+ stored_hash: ref.refHash,
565
+ current_hash: mostRecentHash,
566
+ ref_type: "symbol",
567
+ });
568
+ getOrCreateDocEntry(ref.file).stale.push({
569
+ reference: ref.reference,
570
+ file_path: ref.refFile,
571
+ symbol_name: ref.refSymbol,
572
+ stored_hash: ref.refHash,
573
+ current_hash: mostRecentHash,
574
+ ref_type: "symbol",
575
+ });
576
+ }
577
+ }
578
+
579
+ const hasErrors = frontmatterErrors.length > 0 || stale.length > 0 || invalid.length > 0 ||
580
+ placeholderErrors.length > 0 || inlineCodeErrors.length > 0;
581
+ const hasWarnings = capabilityListWarnings.length > 0;
582
+ const fileOnlyRefs = refs.filter(r => r.isFileOnly).length;
583
+ const symbolRefs = refs.filter(r => !r.isFileOnly).length;
584
+
585
+ let message: string;
586
+ if (hasErrors) {
587
+ const parts: string[] = [];
588
+ if (frontmatterErrors.length > 0) parts.push(`${frontmatterErrors.length} front matter errors`);
589
+ if (invalid.length > 0) parts.push(`${invalid.length} invalid refs`);
590
+ if (stale.length > 0) parts.push(`${stale.length} stale refs`);
591
+ if (placeholderErrors.length > 0) parts.push(`${placeholderErrors.length} placeholder hashes`);
592
+ if (inlineCodeErrors.length > 0) parts.push(`${inlineCodeErrors.length} inline code blocks`);
593
+ message = `Validation found issues: ${parts.join(", ")}`;
594
+ } else if (hasWarnings) {
595
+ message = `Validated ${mdFiles.length} files with ${capabilityListWarnings.length} warnings`;
596
+ } else {
597
+ message = `Validated ${mdFiles.length} files and ${refs.length} references (${symbolRefs} symbol, ${fileOnlyRefs} file-only)`;
598
+ }
599
+
600
+ // Filter byDocFile to only include docs with actual issues
601
+ const byDocFileFiltered: Record<string, DocFileIssues> = {};
602
+ for (const [docFile, issues] of Object.entries(byDocFile)) {
603
+ const hasIssues =
604
+ issues.stale.length > 0 ||
605
+ issues.invalid.length > 0 ||
606
+ issues.frontmatter_error !== null ||
607
+ issues.placeholder_errors.length > 0 ||
608
+ issues.inline_code_block_count > 0 ||
609
+ issues.has_capability_list_warning;
610
+ if (hasIssues) {
611
+ byDocFileFiltered[docFile] = issues;
612
+ }
613
+ }
614
+
615
+ return this.success({
616
+ message,
617
+ total_files: mdFiles.length,
618
+ total_refs: refs.length,
619
+ symbol_refs: symbolRefs,
620
+ file_only_refs: fileOnlyRefs,
621
+ frontmatter_error_count: frontmatterErrors.length,
622
+ stale_count: stale.length,
623
+ invalid_count: invalid.length,
624
+ placeholder_error_count: placeholderErrors.length,
625
+ inline_code_error_count: inlineCodeErrors.length,
626
+ capability_list_warning_count: capabilityListWarnings.length,
627
+ // Grouped by doc file for easy delegation to documentation-writers
628
+ by_doc_file: byDocFileFiltered,
629
+ // Legacy flat arrays for backward compat
630
+ frontmatter_errors: frontmatterErrors,
631
+ stale,
632
+ invalid,
633
+ placeholder_errors: placeholderErrors,
634
+ inline_code_errors: inlineCodeErrors,
635
+ capability_list_warnings: capabilityListWarnings,
636
+ });
637
+ }
638
+ }
639
+
640
+ /**
641
+ * Get complexity metrics for a file or directory.
642
+ */
643
+ class ComplexityCommand extends BaseCommand {
644
+ readonly name = "complexity";
645
+ readonly description = "Get complexity metrics for file or directory";
646
+
647
+ defineArguments(cmd: Command): void {
648
+ cmd.argument("<path>", "File or directory path");
649
+ }
650
+
651
+ async execute(args: Record<string, unknown>): Promise<CommandResult> {
652
+ const pathArg = args.path as string;
653
+
654
+ if (!pathArg) {
655
+ return this.error("validation_error", "path is required");
656
+ }
657
+
658
+ const projectRoot = getProjectRoot();
659
+ const absolutePath = pathArg.startsWith("/")
660
+ ? pathArg
661
+ : join(projectRoot, pathArg);
662
+ const relativePath = relative(projectRoot, absolutePath);
663
+
664
+ if (!existsSync(absolutePath)) {
665
+ return this.error("path_not_found", `Path not found: ${relativePath}`);
666
+ }
667
+
668
+ const stat = statSync(absolutePath);
669
+
670
+ if (stat.isFile()) {
671
+ // Single file complexity
672
+ const metrics = await getFileComplexity(absolutePath);
673
+ if (!metrics) {
674
+ return this.error("parse_error", `Could not parse ${relativePath}`);
675
+ }
676
+
677
+ return this.success({
678
+ path: relativePath,
679
+ type: "file",
680
+ metrics,
681
+ estimated_tokens: Math.ceil(metrics.lines * 10), // Rough estimate
682
+ });
683
+ }
684
+
685
+ // Directory complexity
686
+ const supported = getSupportedExtensions();
687
+ const files: string[] = [];
688
+
689
+ const findSourceFiles = (dir: string): void => {
690
+ const entries = readdirSync(dir);
691
+ for (const entry of entries) {
692
+ if (entry.startsWith(".") || entry === "node_modules") continue;
693
+ const fullPath = join(dir, entry);
694
+ const entryStat = statSync(fullPath);
695
+ if (entryStat.isDirectory()) {
696
+ findSourceFiles(fullPath);
697
+ } else if (supported.includes(extname(entry))) {
698
+ files.push(fullPath);
699
+ }
700
+ }
701
+ };
702
+
703
+ findSourceFiles(absolutePath);
704
+
705
+ let totalLines = 0;
706
+ let totalImports = 0;
707
+ let totalExports = 0;
708
+ let totalFunctions = 0;
709
+ let totalClasses = 0;
710
+
711
+ for (const file of files) {
712
+ const metrics = await getFileComplexity(file);
713
+ if (metrics) {
714
+ totalLines += metrics.lines;
715
+ totalImports += metrics.imports;
716
+ totalExports += metrics.exports;
717
+ totalFunctions += metrics.functions;
718
+ totalClasses += metrics.classes;
719
+ }
720
+ }
721
+
722
+ return this.success({
723
+ path: relativePath,
724
+ type: "directory",
725
+ file_count: files.length,
726
+ metrics: {
727
+ lines: totalLines,
728
+ imports: totalImports,
729
+ exports: totalExports,
730
+ functions: totalFunctions,
731
+ classes: totalClasses,
732
+ },
733
+ estimated_tokens: Math.ceil(totalLines * 10),
734
+ });
735
+ }
736
+ }
737
+
738
+ /**
739
+ * Get tree structure with documentation coverage.
740
+ */
741
+ class TreeCommand extends BaseCommand {
742
+ readonly name = "tree";
743
+ readonly description = "Get tree structure with doc coverage indicators";
744
+
745
+ defineArguments(cmd: Command): void {
746
+ cmd
747
+ .argument("<path>", "Directory path")
748
+ .option("--depth <n>", "Max depth to traverse", "3");
749
+ }
750
+
751
+ async execute(args: Record<string, unknown>): Promise<CommandResult> {
752
+ const pathArg = args.path as string;
753
+ const maxDepth = parseInt(args.depth as string || "3", 10);
754
+
755
+ if (!pathArg) {
756
+ return this.error("validation_error", "path is required");
757
+ }
758
+
759
+ const projectRoot = getProjectRoot();
760
+ const absolutePath = pathArg.startsWith("/")
761
+ ? pathArg
762
+ : join(projectRoot, pathArg);
763
+ const relativePath = relative(projectRoot, absolutePath);
764
+
765
+ if (!existsSync(absolutePath)) {
766
+ return this.error("path_not_found", `Path not found: ${relativePath}`);
767
+ }
768
+
769
+ const stat = statSync(absolutePath);
770
+ if (!stat.isDirectory()) {
771
+ return this.error("not_directory", `${relativePath} is not a directory`);
772
+ }
773
+
774
+ const docsPath = join(projectRoot, "docs");
775
+
776
+ interface TreeNode {
777
+ name: string;
778
+ type: "file" | "directory";
779
+ has_docs: boolean;
780
+ doc_path?: string;
781
+ children?: TreeNode[];
782
+ }
783
+
784
+ const buildTree = (dir: string, depth: number): TreeNode[] => {
785
+ if (depth <= 0) return [];
786
+
787
+ const entries = readdirSync(dir);
788
+ const nodes: TreeNode[] = [];
789
+
790
+ for (const entry of entries) {
791
+ if (entry.startsWith(".") || entry === "node_modules") continue;
792
+
793
+ const fullPath = join(dir, entry);
794
+ const entryRelPath = relative(projectRoot, fullPath);
795
+ const entryStat = statSync(fullPath);
796
+
797
+ // Check for docs coverage
798
+ // Convention: docs/path/to/dir.md or docs/path/to/file.md
799
+ const possibleDocPaths = [
800
+ join(docsPath, entryRelPath + ".md"),
801
+ join(docsPath, dirname(entryRelPath), entry.replace(extname(entry), ".md")),
802
+ join(docsPath, entryRelPath, "README.md"),
803
+ join(docsPath, entryRelPath, "index.md"),
804
+ ];
805
+
806
+ let hasDoc = false;
807
+ let docPath: string | undefined;
808
+ for (const dp of possibleDocPaths) {
809
+ if (existsSync(dp)) {
810
+ hasDoc = true;
811
+ docPath = relative(projectRoot, dp);
812
+ break;
813
+ }
814
+ }
815
+
816
+ if (entryStat.isDirectory()) {
817
+ const children = buildTree(fullPath, depth - 1);
818
+ nodes.push({
819
+ name: entry,
820
+ type: "directory",
821
+ has_docs: hasDoc,
822
+ doc_path: docPath,
823
+ children: children.length > 0 ? children : undefined,
824
+ });
825
+ } else {
826
+ const ext = extname(entry);
827
+ const supported = getSupportedExtensions();
828
+ if (supported.includes(ext)) {
829
+ nodes.push({
830
+ name: entry,
831
+ type: "file",
832
+ has_docs: hasDoc,
833
+ doc_path: docPath,
834
+ });
835
+ }
836
+ }
837
+ }
838
+
839
+ return nodes;
840
+ };
841
+
842
+ const tree = buildTree(absolutePath, maxDepth);
843
+
844
+ // Calculate coverage stats
845
+ const countNodes = (nodes: TreeNode[]): { total: number; covered: number } => {
846
+ let total = 0;
847
+ let covered = 0;
848
+ for (const node of nodes) {
849
+ total++;
850
+ if (node.has_docs) covered++;
851
+ if (node.children) {
852
+ const childStats = countNodes(node.children);
853
+ total += childStats.total;
854
+ covered += childStats.covered;
855
+ }
856
+ }
857
+ return { total, covered };
858
+ };
859
+
860
+ const stats = countNodes(tree);
861
+
862
+ return this.success({
863
+ path: relativePath,
864
+ tree,
865
+ coverage: {
866
+ total: stats.total,
867
+ covered: stats.covered,
868
+ percentage: stats.total > 0
869
+ ? Math.round((stats.covered / stats.total) * 100)
870
+ : 0,
871
+ },
872
+ });
873
+ }
874
+ }
875
+
876
+ export const COMMANDS = {
877
+ "format-reference": FormatReferenceCommand,
878
+ validate: ValidateCommand,
879
+ complexity: ComplexityCommand,
880
+ tree: TreeCommand,
881
+ };