@doccov/cli 0.2.1 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -18,7 +18,7 @@ var __toESM = (mod, isNodeMode, target) => {
18
18
  };
19
19
  var __require = /* @__PURE__ */ createRequire(import.meta.url);
20
20
 
21
- // src/config/openpkg-config.ts
21
+ // src/config/doccov-config.ts
22
22
  import { access } from "node:fs/promises";
23
23
  import path from "node:path";
24
24
  import { pathToFileURL } from "node:url";
@@ -52,20 +52,14 @@ var normalizeConfig = (input) => {
52
52
  };
53
53
  };
54
54
 
55
- // src/config/openpkg-config.ts
55
+ // src/config/doccov-config.ts
56
56
  var DOCCOV_CONFIG_FILENAMES = [
57
57
  "doccov.config.ts",
58
58
  "doccov.config.mts",
59
59
  "doccov.config.cts",
60
60
  "doccov.config.js",
61
61
  "doccov.config.mjs",
62
- "doccov.config.cjs",
63
- "openpkg.config.ts",
64
- "openpkg.config.mts",
65
- "openpkg.config.cts",
66
- "openpkg.config.js",
67
- "openpkg.config.mjs",
68
- "openpkg.config.cjs"
62
+ "doccov.config.cjs"
69
63
  ];
70
64
  var fileExists = async (filePath) => {
71
65
  try {
@@ -126,24 +120,97 @@ ${formatIssues(issues)}`);
126
120
  ...normalized
127
121
  };
128
122
  };
129
- var loadOpenPkgConfigInternal = loadDocCovConfig;
130
- var loadOpenPkgConfig = loadDocCovConfig;
131
123
 
132
124
  // src/config/index.ts
133
125
  var defineConfig = (config) => config;
134
126
  // src/cli.ts
135
- import { readFileSync as readFileSync8 } from "node:fs";
136
- import * as path11 from "node:path";
127
+ import { readFileSync as readFileSync9 } from "node:fs";
128
+ import * as path13 from "node:path";
137
129
  import { fileURLToPath } from "node:url";
138
130
  import { Command } from "commander";
139
131
 
140
132
  // src/commands/check.ts
141
133
  import * as fs2 from "node:fs";
142
134
  import * as path3 from "node:path";
143
- import { DocCov } from "@doccov/sdk";
135
+ import {
136
+ DocCov,
137
+ detectExampleAssertionFailures,
138
+ detectExampleRuntimeErrors,
139
+ hasNonAssertionComments,
140
+ parseAssertions,
141
+ runExamplesWithPackage
142
+ } from "@doccov/sdk";
144
143
  import chalk from "chalk";
145
144
  import ora from "ora";
146
145
 
146
+ // src/utils/llm-assertion-parser.ts
147
+ import { createAnthropic } from "@ai-sdk/anthropic";
148
+ import { createOpenAI } from "@ai-sdk/openai";
149
+ import { generateObject } from "ai";
150
+ import { z as z2 } from "zod";
151
+ var AssertionParseSchema = z2.object({
152
+ assertions: z2.array(z2.object({
153
+ lineNumber: z2.number().describe("1-indexed line number where the assertion appears"),
154
+ expected: z2.string().describe("The expected output value"),
155
+ originalComment: z2.string().describe("The original comment text"),
156
+ suggestedSyntax: z2.string().describe("The line rewritten with standard // => value syntax")
157
+ })).describe("List of assertion-like comments found in the code"),
158
+ hasAssertions: z2.boolean().describe("Whether any assertion-like comments were found")
159
+ });
160
+ var ASSERTION_PARSE_PROMPT = (code) => `Analyze this TypeScript/JavaScript example code for assertion-like comments.
161
+
162
+ Look for comments that appear to specify expected output values, such as:
163
+ - "// should be 3"
164
+ - "// returns 5"
165
+ - "// outputs: hello"
166
+ - "// expected: [1, 2, 3]"
167
+ - "// 42" (bare value after console.log)
168
+ - "// result: true"
169
+
170
+ Do NOT include:
171
+ - Regular code comments that explain what the code does
172
+ - Comments that are instructions or documentation
173
+ - Comments with // => (already using standard syntax)
174
+
175
+ For each assertion found, extract:
176
+ 1. The line number (1-indexed)
177
+ 2. The expected value (just the value, not the comment prefix)
178
+ 3. The original comment text
179
+ 4. A suggested rewrite of the ENTIRE line using "// => value" syntax
180
+
181
+ Code:
182
+ \`\`\`
183
+ ${code}
184
+ \`\`\``;
185
+ function getModel() {
186
+ const provider = process.env.DOCCOV_LLM_PROVIDER?.toLowerCase();
187
+ if (provider === "anthropic" || process.env.ANTHROPIC_API_KEY) {
188
+ const anthropic = createAnthropic();
189
+ return anthropic("claude-sonnet-4-20250514");
190
+ }
191
+ const openai = createOpenAI();
192
+ return openai("gpt-4o-mini");
193
+ }
194
+ function isLLMAssertionParsingAvailable() {
195
+ return Boolean(process.env.OPENAI_API_KEY || process.env.ANTHROPIC_API_KEY);
196
+ }
197
+ async function parseAssertionsWithLLM(code) {
198
+ if (!isLLMAssertionParsingAvailable()) {
199
+ return null;
200
+ }
201
+ try {
202
+ const model = getModel();
203
+ const { object } = await generateObject({
204
+ model,
205
+ schema: AssertionParseSchema,
206
+ prompt: ASSERTION_PARSE_PROMPT(code)
207
+ });
208
+ return object;
209
+ } catch {
210
+ return null;
211
+ }
212
+ }
213
+
147
214
  // src/utils/package-utils.ts
148
215
  import * as fs from "node:fs";
149
216
  import * as path2 from "node:path";
@@ -294,7 +361,7 @@ function registerCheckCommand(program, dependencies = {}) {
294
361
  ...defaultDependencies,
295
362
  ...dependencies
296
363
  };
297
- program.command("check [entry]").description("Fail if documentation coverage falls below a threshold").option("--cwd <dir>", "Working directory", process.cwd()).option("--package <name>", "Target package name (for monorepos)").option("--min-coverage <percentage>", "Minimum docs coverage percentage (0-100)", (value) => Number(value)).option("--require-examples", "Require at least one @example for every export").option("--no-external-types", "Skip external type resolution from node_modules").action(async (entry, options) => {
364
+ program.command("check [entry]").description("Fail if documentation coverage falls below a threshold").option("--cwd <dir>", "Working directory", process.cwd()).option("--package <name>", "Target package name (for monorepos)").option("--min-coverage <percentage>", "Minimum docs coverage percentage (0-100)", (value) => Number(value)).option("--require-examples", "Require at least one @example for every export").option("--run-examples", "Execute @example blocks and fail on runtime errors").option("--ignore-drift", "Do not fail on documentation drift").option("--skip-resolve", "Skip external type resolution from node_modules").action(async (entry, options) => {
298
365
  try {
299
366
  let targetDir = options.cwd;
300
367
  let entryFile = entry;
@@ -312,12 +379,13 @@ function registerCheckCommand(program, dependencies = {}) {
312
379
  } else {
313
380
  entryFile = path3.resolve(targetDir, entryFile);
314
381
  if (fs2.existsSync(entryFile) && fs2.statSync(entryFile).isDirectory()) {
382
+ targetDir = entryFile;
315
383
  entryFile = await findEntryPoint(entryFile, true);
316
384
  log(chalk.gray(`Auto-detected entry point: ${entryFile}`));
317
385
  }
318
386
  }
319
387
  const minCoverage = clampCoverage(options.minCoverage ?? 80);
320
- const resolveExternalTypes = options.externalTypes !== false;
388
+ const resolveExternalTypes = !options.skipResolve;
321
389
  const spinnerInstance = spinner("Analyzing documentation coverage...");
322
390
  spinnerInstance.start();
323
391
  let specResult;
@@ -333,13 +401,132 @@ function registerCheckCommand(program, dependencies = {}) {
333
401
  throw new Error("Failed to analyze documentation coverage.");
334
402
  }
335
403
  const spec = specResult.spec;
404
+ const warnings = specResult.diagnostics.filter((d) => d.severity === "warning");
405
+ const infos = specResult.diagnostics.filter((d) => d.severity === "info");
406
+ if (warnings.length > 0 || infos.length > 0) {
407
+ log("");
408
+ for (const diag of warnings) {
409
+ log(chalk.yellow(`⚠ ${diag.message}`));
410
+ if (diag.suggestion) {
411
+ log(chalk.gray(` ${diag.suggestion}`));
412
+ }
413
+ }
414
+ for (const diag of infos) {
415
+ log(chalk.cyan(`ℹ ${diag.message}`));
416
+ if (diag.suggestion) {
417
+ log(chalk.gray(` ${diag.suggestion}`));
418
+ }
419
+ }
420
+ log("");
421
+ }
422
+ const runtimeDrifts = [];
423
+ if (options.runExamples) {
424
+ const allExamples = [];
425
+ for (const entry2 of spec.exports ?? []) {
426
+ if (entry2.examples && entry2.examples.length > 0) {
427
+ allExamples.push({ exportName: entry2.name, examples: entry2.examples });
428
+ }
429
+ }
430
+ if (allExamples.length === 0) {
431
+ log(chalk.gray("No @example blocks found"));
432
+ } else {
433
+ const examplesSpinner = spinner("Installing package for examples...");
434
+ examplesSpinner.start();
435
+ const flatExamples = allExamples.flatMap((e) => e.examples);
436
+ const packageResult = await runExamplesWithPackage(flatExamples, {
437
+ packagePath: targetDir,
438
+ timeout: 5000,
439
+ installTimeout: 60000,
440
+ cwd: targetDir
441
+ });
442
+ if (!packageResult.installSuccess) {
443
+ examplesSpinner.fail(`Package install failed: ${packageResult.installError}`);
444
+ log(chalk.yellow("Skipping example execution. Ensure the package is built."));
445
+ } else {
446
+ examplesSpinner.text = "Running @example blocks...";
447
+ let examplesRun = 0;
448
+ let examplesFailed = 0;
449
+ let exampleIndex = 0;
450
+ for (const { exportName, examples } of allExamples) {
451
+ const entryResults = new Map;
452
+ for (let i = 0;i < examples.length; i++) {
453
+ const result = packageResult.results.get(exampleIndex);
454
+ if (result) {
455
+ entryResults.set(i, result);
456
+ examplesRun++;
457
+ if (!result.success)
458
+ examplesFailed++;
459
+ }
460
+ exampleIndex++;
461
+ }
462
+ const entry2 = (spec.exports ?? []).find((e) => e.name === exportName);
463
+ if (entry2) {
464
+ const runtimeErrorDrifts = detectExampleRuntimeErrors(entry2, entryResults);
465
+ for (const drift of runtimeErrorDrifts) {
466
+ runtimeDrifts.push({
467
+ name: entry2.name,
468
+ issue: drift.issue,
469
+ suggestion: drift.suggestion
470
+ });
471
+ }
472
+ const assertionDrifts = detectExampleAssertionFailures(entry2, entryResults);
473
+ for (const drift of assertionDrifts) {
474
+ runtimeDrifts.push({
475
+ name: entry2.name,
476
+ issue: drift.issue,
477
+ suggestion: drift.suggestion
478
+ });
479
+ }
480
+ if (isLLMAssertionParsingAvailable() && entry2.examples) {
481
+ for (let exIdx = 0;exIdx < entry2.examples.length; exIdx++) {
482
+ const example = entry2.examples[exIdx];
483
+ const result = entryResults.get(exIdx);
484
+ if (!result?.success || typeof example !== "string")
485
+ continue;
486
+ const regexAssertions = parseAssertions(example);
487
+ if (regexAssertions.length === 0 && hasNonAssertionComments(example)) {
488
+ const llmResult = await parseAssertionsWithLLM(example);
489
+ if (llmResult?.hasAssertions && llmResult.assertions.length > 0) {
490
+ const stdoutLines = result.stdout.split(`
491
+ `).map((l) => l.trim()).filter((l) => l.length > 0);
492
+ for (let aIdx = 0;aIdx < llmResult.assertions.length; aIdx++) {
493
+ const assertion = llmResult.assertions[aIdx];
494
+ const actual = stdoutLines[aIdx];
495
+ if (actual === undefined) {
496
+ runtimeDrifts.push({
497
+ name: entry2.name,
498
+ issue: `Assertion expected "${assertion.expected}" but no output was produced`,
499
+ suggestion: `Consider using standard syntax: ${assertion.suggestedSyntax}`
500
+ });
501
+ } else if (assertion.expected.trim() !== actual.trim()) {
502
+ runtimeDrifts.push({
503
+ name: entry2.name,
504
+ issue: `Assertion failed: expected "${assertion.expected}" but got "${actual}"`,
505
+ suggestion: `Consider using standard syntax: ${assertion.suggestedSyntax}`
506
+ });
507
+ }
508
+ }
509
+ }
510
+ }
511
+ }
512
+ }
513
+ }
514
+ }
515
+ if (examplesFailed > 0) {
516
+ examplesSpinner.fail(`${examplesFailed}/${examplesRun} example(s) failed`);
517
+ } else {
518
+ examplesSpinner.succeed(`${examplesRun} example(s) passed`);
519
+ }
520
+ }
521
+ }
522
+ }
336
523
  const coverageScore = spec.docs?.coverageScore ?? 0;
337
524
  const failingExports = collectFailingExports(spec.exports ?? [], minCoverage);
338
525
  const missingExamples = options.requireExamples ? failingExports.filter((item) => item.missing?.includes("examples")) : [];
339
- const driftExports = collectDrift(spec.exports ?? []);
526
+ const driftExports = [...collectDrift(spec.exports ?? []), ...runtimeDrifts];
340
527
  const coverageFailed = coverageScore < minCoverage;
341
528
  const hasMissingExamples = missingExamples.length > 0;
342
- const hasDrift = driftExports.length > 0;
529
+ const hasDrift = !options.ignoreDrift && driftExports.length > 0;
343
530
  if (!coverageFailed && !hasMissingExamples && !hasDrift) {
344
531
  log(chalk.green(`✓ Docs coverage ${coverageScore}% (min ${minCoverage}%)`));
345
532
  if (failingExports.length > 0) {
@@ -348,6 +535,16 @@ function registerCheckCommand(program, dependencies = {}) {
348
535
  log(chalk.gray(` • ${name}: missing ${missing?.join(", ")}`));
349
536
  }
350
537
  }
538
+ if (options.ignoreDrift && driftExports.length > 0) {
539
+ log("");
540
+ log(chalk.yellow(`⚠️ ${driftExports.length} drift issue(s) detected (ignored):`));
541
+ for (const drift of driftExports.slice(0, 10)) {
542
+ log(chalk.yellow(` • ${drift.name}: ${drift.issue}`));
543
+ if (drift.suggestion) {
544
+ log(chalk.gray(` Suggestion: ${drift.suggestion}`));
545
+ }
546
+ }
547
+ }
351
548
  return;
352
549
  }
353
550
  error("");
@@ -471,7 +668,7 @@ function loadSpec(filePath, readFileSync3) {
471
668
  throw new Error(`Failed to parse ${filePath}: ${parseError instanceof Error ? parseError.message : parseError}`);
472
669
  }
473
670
  }
474
- function printTextDiff(diff, log, error) {
671
+ function printTextDiff(diff, log, _error) {
475
672
  log("");
476
673
  log(chalk2.bold("DocCov Diff Report"));
477
674
  log("─".repeat(40));
@@ -536,16 +733,215 @@ function printTextDiff(diff, log, error) {
536
733
  log("");
537
734
  }
538
735
 
539
- // src/commands/generate.ts
736
+ // src/commands/fix.ts
540
737
  import * as fs4 from "node:fs";
541
738
  import * as path5 from "node:path";
542
- import { DocCov as DocCov2 } from "@doccov/sdk";
543
- import { normalize, validateSpec } from "@openpkg-ts/spec";
544
- import chalk4 from "chalk";
739
+ import {
740
+ applyEdits,
741
+ categorizeDrifts,
742
+ createSourceFile,
743
+ DocCov as DocCov2,
744
+ findJSDocLocation,
745
+ generateFixesForExport,
746
+ mergeFixes,
747
+ parseJSDocToPatch,
748
+ serializeJSDoc
749
+ } from "@doccov/sdk";
750
+ import chalk3 from "chalk";
545
751
  import ora2 from "ora";
752
+ var defaultDependencies3 = {
753
+ createDocCov: (options) => new DocCov2(options),
754
+ spinner: (text) => ora2(text),
755
+ log: console.log,
756
+ error: console.error
757
+ };
758
+ function collectDrifts(exports) {
759
+ const results = [];
760
+ for (const exp of exports) {
761
+ const drifts = exp.docs?.drift ?? [];
762
+ for (const drift of drifts) {
763
+ results.push({ export: exp, drift });
764
+ }
765
+ }
766
+ return results;
767
+ }
768
+ function filterDriftsByType(drifts, onlyTypes) {
769
+ if (!onlyTypes)
770
+ return drifts;
771
+ const allowedTypes = new Set(onlyTypes.split(",").map((t) => t.trim()));
772
+ return drifts.filter((d) => allowedTypes.has(d.drift.type));
773
+ }
774
+ function groupByExport(drifts) {
775
+ const map = new Map;
776
+ for (const { export: exp, drift } of drifts) {
777
+ const existing = map.get(exp) ?? [];
778
+ existing.push(drift);
779
+ map.set(exp, existing);
780
+ }
781
+ return map;
782
+ }
783
+ function registerFixCommand(program, dependencies = {}) {
784
+ const { createDocCov, spinner, log, error } = {
785
+ ...defaultDependencies3,
786
+ ...dependencies
787
+ };
788
+ program.command("fix [entry]").description("Automatically fix documentation drift").option("--cwd <dir>", "Working directory", process.cwd()).option("--package <name>", "Target package name (for monorepos)").option("--dry-run", "Preview changes without writing").option("--only <types>", "Only fix specific drift types (comma-separated)").option("--skip-resolve", "Skip external type resolution from node_modules").action(async (entry, options) => {
789
+ try {
790
+ let targetDir = options.cwd;
791
+ let entryFile = entry;
792
+ if (options.package) {
793
+ const packageDir = await findPackageInMonorepo(options.cwd, options.package);
794
+ if (!packageDir) {
795
+ throw new Error(`Package "${options.package}" not found in monorepo`);
796
+ }
797
+ targetDir = packageDir;
798
+ log(chalk3.gray(`Found package at ${path5.relative(options.cwd, packageDir)}`));
799
+ }
800
+ if (!entryFile) {
801
+ entryFile = await findEntryPoint(targetDir, true);
802
+ log(chalk3.gray(`Auto-detected entry point: ${path5.relative(targetDir, entryFile)}`));
803
+ } else {
804
+ entryFile = path5.resolve(targetDir, entryFile);
805
+ if (fs4.existsSync(entryFile) && fs4.statSync(entryFile).isDirectory()) {
806
+ targetDir = entryFile;
807
+ entryFile = await findEntryPoint(entryFile, true);
808
+ log(chalk3.gray(`Auto-detected entry point: ${entryFile}`));
809
+ }
810
+ }
811
+ const resolveExternalTypes = !options.skipResolve;
812
+ const analyzeSpinner = spinner("Analyzing documentation...");
813
+ analyzeSpinner.start();
814
+ const doccov = createDocCov({ resolveExternalTypes });
815
+ const result = await doccov.analyzeFileWithDiagnostics(entryFile);
816
+ const spec = result.spec;
817
+ analyzeSpinner.succeed("Analysis complete");
818
+ const allDrifts = collectDrifts(spec.exports ?? []);
819
+ if (allDrifts.length === 0) {
820
+ log(chalk3.green("No drift issues found. Documentation is in sync!"));
821
+ return;
822
+ }
823
+ const filteredDrifts = filterDriftsByType(allDrifts, options.only);
824
+ if (filteredDrifts.length === 0) {
825
+ log(chalk3.yellow("No matching drift issues for the specified types."));
826
+ return;
827
+ }
828
+ const { fixable, nonFixable } = categorizeDrifts(filteredDrifts.map((d) => d.drift));
829
+ if (fixable.length === 0) {
830
+ log(chalk3.yellow(`Found ${nonFixable.length} drift issue(s), but none are auto-fixable.`));
831
+ log(chalk3.gray("Non-fixable drift types require manual intervention:"));
832
+ for (const drift of nonFixable.slice(0, 5)) {
833
+ log(chalk3.gray(` • ${drift.type}: ${drift.issue}`));
834
+ }
835
+ return;
836
+ }
837
+ log("");
838
+ log(chalk3.bold(`Found ${fixable.length} fixable issue(s)`));
839
+ if (nonFixable.length > 0) {
840
+ log(chalk3.gray(`(${nonFixable.length} non-fixable issue(s) skipped)`));
841
+ }
842
+ log("");
843
+ const groupedDrifts = groupByExport(filteredDrifts.filter((d) => fixable.includes(d.drift)));
844
+ const edits = [];
845
+ const editsByFile = new Map;
846
+ for (const [exp, drifts] of groupedDrifts) {
847
+ if (!exp.source?.file) {
848
+ log(chalk3.gray(` Skipping ${exp.name}: no source location`));
849
+ continue;
850
+ }
851
+ if (exp.source.file.endsWith(".d.ts")) {
852
+ log(chalk3.gray(` Skipping ${exp.name}: declaration file`));
853
+ continue;
854
+ }
855
+ const filePath = path5.resolve(targetDir, exp.source.file);
856
+ if (!fs4.existsSync(filePath)) {
857
+ log(chalk3.gray(` Skipping ${exp.name}: file not found`));
858
+ continue;
859
+ }
860
+ const sourceFile = createSourceFile(filePath);
861
+ const location = findJSDocLocation(sourceFile, exp.name, exp.source.line);
862
+ if (!location) {
863
+ log(chalk3.gray(` Skipping ${exp.name}: could not find declaration`));
864
+ continue;
865
+ }
866
+ let existingPatch = {};
867
+ if (location.hasExisting && location.existingJSDoc) {
868
+ existingPatch = parseJSDocToPatch(location.existingJSDoc);
869
+ }
870
+ const fixes = generateFixesForExport({ ...exp, docs: { ...exp.docs, drift: drifts } }, existingPatch);
871
+ if (fixes.length === 0)
872
+ continue;
873
+ const mergedPatch = mergeFixes(fixes, existingPatch);
874
+ const newJSDoc = serializeJSDoc(mergedPatch, location.indent);
875
+ const edit = {
876
+ filePath,
877
+ symbolName: exp.name,
878
+ startLine: location.startLine,
879
+ endLine: location.endLine,
880
+ hasExisting: location.hasExisting,
881
+ existingJSDoc: location.existingJSDoc,
882
+ newJSDoc,
883
+ indent: location.indent
884
+ };
885
+ edits.push(edit);
886
+ const fileEdits = editsByFile.get(filePath) ?? [];
887
+ fileEdits.push({ export: exp, edit, fixes, existingPatch });
888
+ editsByFile.set(filePath, fileEdits);
889
+ }
890
+ if (edits.length === 0) {
891
+ log(chalk3.yellow("No edits could be generated."));
892
+ return;
893
+ }
894
+ if (options.dryRun) {
895
+ log(chalk3.bold("Dry run - changes that would be made:"));
896
+ log("");
897
+ for (const [filePath, fileEdits] of editsByFile) {
898
+ const relativePath = path5.relative(targetDir, filePath);
899
+ log(chalk3.cyan(` ${relativePath}:`));
900
+ for (const { export: exp, edit, fixes } of fileEdits) {
901
+ const lineInfo = edit.hasExisting ? `lines ${edit.startLine + 1}-${edit.endLine + 1}` : `line ${edit.startLine + 1}`;
902
+ log(` ${chalk3.bold(exp.name)} [${lineInfo}]`);
903
+ for (const fix of fixes) {
904
+ log(chalk3.green(` + ${fix.description}`));
905
+ }
906
+ }
907
+ log("");
908
+ }
909
+ log(chalk3.gray("Run without --dry-run to apply these changes."));
910
+ } else {
911
+ const applySpinner = spinner("Applying fixes...");
912
+ applySpinner.start();
913
+ const result2 = await applyEdits(edits);
914
+ if (result2.errors.length > 0) {
915
+ applySpinner.warn("Some fixes could not be applied");
916
+ for (const err of result2.errors) {
917
+ error(chalk3.red(` ${err.file}: ${err.error}`));
918
+ }
919
+ } else {
920
+ applySpinner.succeed(`Applied ${result2.editsApplied} fix(es) to ${result2.filesModified} file(s)`);
921
+ }
922
+ log("");
923
+ for (const [filePath, fileEdits] of editsByFile) {
924
+ const relativePath = path5.relative(targetDir, filePath);
925
+ log(chalk3.green(` ✓ ${relativePath}: ${fileEdits.length} fix(es)`));
926
+ }
927
+ }
928
+ } catch (commandError) {
929
+ error(chalk3.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
930
+ process.exitCode = 1;
931
+ }
932
+ });
933
+ }
934
+
935
+ // src/commands/generate.ts
936
+ import * as fs5 from "node:fs";
937
+ import * as path6 from "node:path";
938
+ import { DocCov as DocCov3 } from "@doccov/sdk";
939
+ import { normalize, validateSpec } from "@openpkg-ts/spec";
940
+ import chalk5 from "chalk";
941
+ import ora3 from "ora";
546
942
 
547
943
  // src/utils/filter-options.ts
548
- import chalk3 from "chalk";
944
+ import chalk4 from "chalk";
549
945
  var unique = (values) => Array.from(new Set(values));
550
946
  var parseListFlag = (value) => {
551
947
  if (!value) {
@@ -555,7 +951,7 @@ var parseListFlag = (value) => {
555
951
  const normalized = rawItems.flatMap((item) => String(item).split(",")).map((item) => item.trim()).filter(Boolean);
556
952
  return normalized.length > 0 ? unique(normalized) : undefined;
557
953
  };
558
- var formatList = (label, values) => `${label}: ${values.map((value) => chalk3.cyan(value)).join(", ")}`;
954
+ var formatList = (label, values) => `${label}: ${values.map((value) => chalk4.cyan(value)).join(", ")}`;
559
955
  var mergeFilterOptions = (config, cliOptions) => {
560
956
  const messages = [];
561
957
  const configInclude = config?.include;
@@ -595,10 +991,10 @@ var mergeFilterOptions = (config, cliOptions) => {
595
991
  };
596
992
 
597
993
  // src/commands/generate.ts
598
- var defaultDependencies3 = {
599
- createDocCov: (options) => new DocCov2(options),
600
- writeFileSync: fs4.writeFileSync,
601
- spinner: (text) => ora2(text),
994
+ var defaultDependencies4 = {
995
+ createDocCov: (options) => new DocCov3(options),
996
+ writeFileSync: fs5.writeFileSync,
997
+ spinner: (text) => ora3(text),
602
998
  log: console.log,
603
999
  error: console.error
604
1000
  };
@@ -617,17 +1013,17 @@ function stripDocsFields(spec) {
617
1013
  }
618
1014
  function formatDiagnosticOutput(prefix, diagnostic, baseDir) {
619
1015
  const location = diagnostic.location;
620
- const relativePath = location?.file ? path5.relative(baseDir, location.file) || location.file : undefined;
621
- const locationText = location && relativePath ? chalk4.gray(`${relativePath}:${location.line ?? 1}:${location.column ?? 1}`) : null;
1016
+ const relativePath = location?.file ? path6.relative(baseDir, location.file) || location.file : undefined;
1017
+ const locationText = location && relativePath ? chalk5.gray(`${relativePath}:${location.line ?? 1}:${location.column ?? 1}`) : null;
622
1018
  const locationPrefix = locationText ? `${locationText} ` : "";
623
1019
  return `${prefix} ${locationPrefix}${diagnostic.message}`;
624
1020
  }
625
1021
  function registerGenerateCommand(program, dependencies = {}) {
626
1022
  const { createDocCov, writeFileSync: writeFileSync2, spinner, log, error } = {
627
- ...defaultDependencies3,
1023
+ ...defaultDependencies4,
628
1024
  ...dependencies
629
1025
  };
630
- program.command("generate [entry]").description("Generate OpenPkg specification for documentation coverage analysis").option("-o, --output <file>", "Output file", "openpkg.json").option("-p, --package <name>", "Target package name (for monorepos)").option("--cwd <dir>", "Working directory", process.cwd()).option("--no-external-types", "Skip external type resolution from node_modules").option("--include <ids>", "Filter exports by identifier (comma-separated or repeated)").option("--exclude <ids>", "Exclude exports by identifier (comma-separated or repeated)").option("--show-diagnostics", "Print TypeScript diagnostics from analysis").option("--no-docs", "Omit docs coverage fields from output (pure structural spec)").option("-y, --yes", "Skip all prompts and use defaults").action(async (entry, options) => {
1026
+ program.command("generate [entry]").description("Generate OpenPkg specification for documentation coverage analysis").option("-o, --output <file>", "Output file", "openpkg.json").option("-p, --package <name>", "Target package name (for monorepos)").option("--cwd <dir>", "Working directory", process.cwd()).option("--skip-resolve", "Skip external type resolution from node_modules").option("--include <ids>", "Filter exports by identifier (comma-separated or repeated)").option("--exclude <ids>", "Exclude exports by identifier (comma-separated or repeated)").option("--show-diagnostics", "Print TypeScript diagnostics from analysis").option("--no-docs", "Omit docs coverage fields from output (pure structural spec)").option("-y, --yes", "Skip all prompts and use defaults").action(async (entry, options) => {
631
1027
  try {
632
1028
  let targetDir = options.cwd;
633
1029
  let entryFile = entry;
@@ -637,19 +1033,19 @@ function registerGenerateCommand(program, dependencies = {}) {
637
1033
  throw new Error(`Package "${options.package}" not found in monorepo`);
638
1034
  }
639
1035
  targetDir = packageDir;
640
- log(chalk4.gray(`Found package at ${path5.relative(options.cwd, packageDir)}`));
1036
+ log(chalk5.gray(`Found package at ${path6.relative(options.cwd, packageDir)}`));
641
1037
  }
642
1038
  if (!entryFile) {
643
1039
  entryFile = await findEntryPoint(targetDir, true);
644
- log(chalk4.gray(`Auto-detected entry point: ${path5.relative(targetDir, entryFile)}`));
1040
+ log(chalk5.gray(`Auto-detected entry point: ${path6.relative(targetDir, entryFile)}`));
645
1041
  } else {
646
- entryFile = path5.resolve(targetDir, entryFile);
647
- if (fs4.existsSync(entryFile) && fs4.statSync(entryFile).isDirectory()) {
1042
+ entryFile = path6.resolve(targetDir, entryFile);
1043
+ if (fs5.existsSync(entryFile) && fs5.statSync(entryFile).isDirectory()) {
648
1044
  entryFile = await findEntryPoint(entryFile, true);
649
- log(chalk4.gray(`Auto-detected entry point: ${entryFile}`));
1045
+ log(chalk5.gray(`Auto-detected entry point: ${entryFile}`));
650
1046
  }
651
1047
  }
652
- const resolveExternalTypes = options.externalTypes !== false;
1048
+ const resolveExternalTypes = !options.skipResolve;
653
1049
  const cliFilters = {
654
1050
  include: parseListFlag(options.include),
655
1051
  exclude: parseListFlag(options.exclude)
@@ -658,15 +1054,15 @@ function registerGenerateCommand(program, dependencies = {}) {
658
1054
  try {
659
1055
  config = await loadDocCovConfig(targetDir);
660
1056
  if (config?.filePath) {
661
- log(chalk4.gray(`Loaded configuration from ${path5.relative(targetDir, config.filePath)}`));
1057
+ log(chalk5.gray(`Loaded configuration from ${path6.relative(targetDir, config.filePath)}`));
662
1058
  }
663
1059
  } catch (configError) {
664
- error(chalk4.red("Failed to load DocCov config:"), configError instanceof Error ? configError.message : configError);
1060
+ error(chalk5.red("Failed to load DocCov config:"), configError instanceof Error ? configError.message : configError);
665
1061
  process.exit(1);
666
1062
  }
667
1063
  const resolvedFilters = mergeFilterOptions(config, cliFilters);
668
1064
  for (const message of resolvedFilters.messages) {
669
- log(chalk4.gray(`• ${message}`));
1065
+ log(chalk5.gray(`• ${message}`));
670
1066
  }
671
1067
  const spinnerInstance = spinner("Generating OpenPkg spec...");
672
1068
  spinnerInstance.start();
@@ -690,7 +1086,7 @@ function registerGenerateCommand(program, dependencies = {}) {
690
1086
  if (!result) {
691
1087
  throw new Error("Failed to produce an OpenPkg spec.");
692
1088
  }
693
- const outputPath = path5.resolve(process.cwd(), options.output);
1089
+ const outputPath = path6.resolve(process.cwd(), options.output);
694
1090
  let normalized = normalize(result.spec);
695
1091
  if (options.docs === false) {
696
1092
  normalized = stripDocsFields(normalized);
@@ -699,85 +1095,85 @@ function registerGenerateCommand(program, dependencies = {}) {
699
1095
  if (!validation.ok) {
700
1096
  spinnerInstance.fail("Spec failed schema validation");
701
1097
  for (const err of validation.errors) {
702
- error(chalk4.red(`schema: ${err.instancePath || "/"} ${err.message}`));
1098
+ error(chalk5.red(`schema: ${err.instancePath || "/"} ${err.message}`));
703
1099
  }
704
1100
  process.exit(1);
705
1101
  }
706
1102
  writeFileSync2(outputPath, JSON.stringify(normalized, null, 2));
707
- log(chalk4.green(`✓ Generated ${options.output}`));
708
- log(chalk4.gray(` ${getArrayLength(normalized.exports)} exports`));
709
- log(chalk4.gray(` ${getArrayLength(normalized.types)} types`));
1103
+ log(chalk5.green(`✓ Generated ${options.output}`));
1104
+ log(chalk5.gray(` ${getArrayLength(normalized.exports)} exports`));
1105
+ log(chalk5.gray(` ${getArrayLength(normalized.types)} types`));
710
1106
  if (options.showDiagnostics && result.diagnostics.length > 0) {
711
1107
  log("");
712
- log(chalk4.bold("Diagnostics"));
1108
+ log(chalk5.bold("Diagnostics"));
713
1109
  for (const diagnostic of result.diagnostics) {
714
- const prefix = diagnostic.severity === "error" ? chalk4.red("✖") : diagnostic.severity === "warning" ? chalk4.yellow("⚠") : chalk4.cyan("ℹ");
1110
+ const prefix = diagnostic.severity === "error" ? chalk5.red("✖") : diagnostic.severity === "warning" ? chalk5.yellow("⚠") : chalk5.cyan("ℹ");
715
1111
  log(formatDiagnosticOutput(prefix, diagnostic, targetDir));
716
1112
  }
717
1113
  }
718
1114
  } catch (commandError) {
719
- error(chalk4.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
1115
+ error(chalk5.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
720
1116
  process.exit(1);
721
1117
  }
722
1118
  });
723
1119
  }
724
1120
 
725
1121
  // src/commands/init.ts
726
- import * as fs5 from "node:fs";
727
- import * as path6 from "node:path";
728
- import chalk5 from "chalk";
729
- var defaultDependencies4 = {
730
- fileExists: fs5.existsSync,
731
- writeFileSync: fs5.writeFileSync,
732
- readFileSync: fs5.readFileSync,
1122
+ import * as fs6 from "node:fs";
1123
+ import * as path7 from "node:path";
1124
+ import chalk6 from "chalk";
1125
+ var defaultDependencies5 = {
1126
+ fileExists: fs6.existsSync,
1127
+ writeFileSync: fs6.writeFileSync,
1128
+ readFileSync: fs6.readFileSync,
733
1129
  log: console.log,
734
1130
  error: console.error
735
1131
  };
736
1132
  function registerInitCommand(program, dependencies = {}) {
737
1133
  const { fileExists: fileExists2, writeFileSync: writeFileSync3, readFileSync: readFileSync4, log, error } = {
738
- ...defaultDependencies4,
1134
+ ...defaultDependencies5,
739
1135
  ...dependencies
740
1136
  };
741
1137
  program.command("init").description("Create a DocCov configuration file").option("--cwd <dir>", "Working directory", process.cwd()).option("--format <format>", "Config format: auto, mjs, js, cjs", "auto").action((options) => {
742
- const cwd = path6.resolve(options.cwd);
1138
+ const cwd = path7.resolve(options.cwd);
743
1139
  const formatOption = String(options.format ?? "auto").toLowerCase();
744
1140
  if (!isValidFormat(formatOption)) {
745
- error(chalk5.red(`Invalid format "${formatOption}". Use auto, mjs, js, or cjs.`));
1141
+ error(chalk6.red(`Invalid format "${formatOption}". Use auto, mjs, js, or cjs.`));
746
1142
  process.exitCode = 1;
747
1143
  return;
748
1144
  }
749
1145
  const existing = findExistingConfig(cwd, fileExists2);
750
1146
  if (existing) {
751
- error(chalk5.red(`A DocCov config already exists at ${path6.relative(cwd, existing) || "./doccov.config.*"}.`));
1147
+ error(chalk6.red(`A DocCov config already exists at ${path7.relative(cwd, existing) || "./doccov.config.*"}.`));
752
1148
  process.exitCode = 1;
753
1149
  return;
754
1150
  }
755
1151
  const packageType = detectPackageType(cwd, fileExists2, readFileSync4);
756
1152
  const targetFormat = resolveFormat(formatOption, packageType);
757
1153
  if (targetFormat === "js" && packageType !== "module") {
758
- log(chalk5.yellow('Package is not marked as "type": "module"; creating doccov.config.js may require enabling ESM.'));
1154
+ log(chalk6.yellow('Package is not marked as "type": "module"; creating doccov.config.js may require enabling ESM.'));
759
1155
  }
760
1156
  const fileName = `doccov.config.${targetFormat}`;
761
- const outputPath = path6.join(cwd, fileName);
1157
+ const outputPath = path7.join(cwd, fileName);
762
1158
  if (fileExists2(outputPath)) {
763
- error(chalk5.red(`Cannot create ${fileName}; file already exists.`));
1159
+ error(chalk6.red(`Cannot create ${fileName}; file already exists.`));
764
1160
  process.exitCode = 1;
765
1161
  return;
766
1162
  }
767
1163
  const template = buildTemplate(targetFormat);
768
1164
  writeFileSync3(outputPath, template, { encoding: "utf8" });
769
- log(chalk5.green(`✓ Created ${path6.relative(process.cwd(), outputPath)}`));
1165
+ log(chalk6.green(`✓ Created ${path7.relative(process.cwd(), outputPath)}`));
770
1166
  });
771
1167
  }
772
1168
  var isValidFormat = (value) => {
773
1169
  return value === "auto" || value === "mjs" || value === "js" || value === "cjs";
774
1170
  };
775
1171
  var findExistingConfig = (cwd, fileExists2) => {
776
- let current = path6.resolve(cwd);
777
- const { root } = path6.parse(current);
1172
+ let current = path7.resolve(cwd);
1173
+ const { root } = path7.parse(current);
778
1174
  while (true) {
779
1175
  for (const candidate of DOCCOV_CONFIG_FILENAMES) {
780
- const candidatePath = path6.join(current, candidate);
1176
+ const candidatePath = path7.join(current, candidate);
781
1177
  if (fileExists2(candidatePath)) {
782
1178
  return candidatePath;
783
1179
  }
@@ -785,7 +1181,7 @@ var findExistingConfig = (cwd, fileExists2) => {
785
1181
  if (current === root) {
786
1182
  break;
787
1183
  }
788
- current = path6.dirname(current);
1184
+ current = path7.dirname(current);
789
1185
  }
790
1186
  return null;
791
1187
  };
@@ -807,17 +1203,17 @@ var detectPackageType = (cwd, fileExists2, readFileSync4) => {
807
1203
  return;
808
1204
  };
809
1205
  var findNearestPackageJson = (cwd, fileExists2) => {
810
- let current = path6.resolve(cwd);
811
- const { root } = path6.parse(current);
1206
+ let current = path7.resolve(cwd);
1207
+ const { root } = path7.parse(current);
812
1208
  while (true) {
813
- const candidate = path6.join(current, "package.json");
1209
+ const candidate = path7.join(current, "package.json");
814
1210
  if (fileExists2(candidate)) {
815
1211
  return candidate;
816
1212
  }
817
1213
  if (current === root) {
818
1214
  break;
819
1215
  }
820
- current = path6.dirname(current);
1216
+ current = path7.dirname(current);
821
1217
  }
822
1218
  return null;
823
1219
  };
@@ -852,26 +1248,256 @@ var buildTemplate = (format) => {
852
1248
  `);
853
1249
  };
854
1250
 
1251
+ // src/commands/report.ts
1252
+ import * as fs7 from "node:fs";
1253
+ import * as path8 from "node:path";
1254
+ import { DocCov as DocCov4 } from "@doccov/sdk";
1255
+ import chalk7 from "chalk";
1256
+ import ora4 from "ora";
1257
+
1258
+ // src/reports/markdown.ts
1259
+ function bar(pct, width = 10) {
1260
+ const filled = Math.round(pct / 100 * width);
1261
+ return "█".repeat(filled) + "░".repeat(width - filled);
1262
+ }
1263
+ function renderMarkdown(stats, options = {}) {
1264
+ const limit = options.limit ?? 20;
1265
+ const lines = [];
1266
+ lines.push(`# DocCov Report: ${stats.packageName}@${stats.version}`);
1267
+ lines.push("");
1268
+ lines.push(`**Coverage: ${stats.coverageScore}%** \`${bar(stats.coverageScore)}\``);
1269
+ lines.push("");
1270
+ lines.push("| Metric | Value |");
1271
+ lines.push("|--------|-------|");
1272
+ lines.push(`| Exports | ${stats.totalExports} |`);
1273
+ lines.push(`| Fully documented | ${stats.fullyDocumented} |`);
1274
+ lines.push(`| Partially documented | ${stats.partiallyDocumented} |`);
1275
+ lines.push(`| Undocumented | ${stats.undocumented} |`);
1276
+ lines.push(`| Drift issues | ${stats.driftCount} |`);
1277
+ lines.push("");
1278
+ lines.push("## Coverage by Signal");
1279
+ lines.push("");
1280
+ lines.push("| Signal | Coverage |");
1281
+ lines.push("|--------|----------|");
1282
+ for (const [sig, s] of Object.entries(stats.signalCoverage)) {
1283
+ lines.push(`| ${sig} | ${s.pct}% \`${bar(s.pct, 8)}\` |`);
1284
+ }
1285
+ if (stats.byKind.length > 0) {
1286
+ lines.push("");
1287
+ lines.push("## Coverage by Kind");
1288
+ lines.push("");
1289
+ lines.push("| Kind | Count | Avg Score |");
1290
+ lines.push("|------|-------|-----------|");
1291
+ for (const k of stats.byKind) {
1292
+ lines.push(`| ${k.kind} | ${k.count} | ${k.avgScore}% |`);
1293
+ }
1294
+ }
1295
+ const lowExports = stats.exports.filter((e) => e.score < 100).slice(0, limit);
1296
+ if (lowExports.length > 0) {
1297
+ lines.push("");
1298
+ lines.push("## Lowest Coverage Exports");
1299
+ lines.push("");
1300
+ lines.push("| Export | Kind | Score | Missing |");
1301
+ lines.push("|--------|------|-------|---------|");
1302
+ for (const e of lowExports) {
1303
+ lines.push(`| \`${e.name}\` | ${e.kind} | ${e.score}% | ${e.missing.join(", ") || "-"} |`);
1304
+ }
1305
+ const totalLow = stats.exports.filter((e) => e.score < 100).length;
1306
+ if (totalLow > limit) {
1307
+ lines.push(`| ... | | | ${totalLow - limit} more |`);
1308
+ }
1309
+ }
1310
+ if (stats.driftIssues.length > 0) {
1311
+ lines.push("");
1312
+ lines.push("## Drift Issues");
1313
+ lines.push("");
1314
+ lines.push("| Export | Type | Issue |");
1315
+ lines.push("|--------|------|-------|");
1316
+ for (const d of stats.driftIssues.slice(0, limit)) {
1317
+ const hint = d.suggestion ? ` → ${d.suggestion}` : "";
1318
+ lines.push(`| \`${d.exportName}\` | ${d.type} | ${d.issue}${hint} |`);
1319
+ }
1320
+ }
1321
+ lines.push("");
1322
+ lines.push("---");
1323
+ lines.push("*Generated by [DocCov](https://doccov.com)*");
1324
+ return lines.join(`
1325
+ `);
1326
+ }
1327
+
1328
+ // src/reports/html.ts
1329
+ function escapeHtml(s) {
1330
+ return s.replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;");
1331
+ }
1332
+ function renderHtml(stats, options = {}) {
1333
+ const md = renderMarkdown(stats, options);
1334
+ return `<!DOCTYPE html>
1335
+ <html lang="en">
1336
+ <head>
1337
+ <meta charset="UTF-8">
1338
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
1339
+ <title>DocCov Report: ${escapeHtml(stats.packageName)}</title>
1340
+ <style>
1341
+ :root { --bg: #0d1117; --fg: #c9d1d9; --border: #30363d; --accent: #58a6ff; }
1342
+ body { font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif; background: var(--bg); color: var(--fg); max-width: 900px; margin: 0 auto; padding: 2rem; line-height: 1.6; }
1343
+ h1, h2 { border-bottom: 1px solid var(--border); padding-bottom: 0.5rem; }
1344
+ table { border-collapse: collapse; width: 100%; margin: 1rem 0; }
1345
+ th, td { border: 1px solid var(--border); padding: 0.5rem 1rem; text-align: left; }
1346
+ th { background: #161b22; }
1347
+ code { background: #161b22; padding: 0.2rem 0.4rem; border-radius: 4px; font-size: 0.9em; }
1348
+ a { color: var(--accent); }
1349
+ </style>
1350
+ </head>
1351
+ <body>
1352
+ <pre style="white-space: pre-wrap; font-family: inherit;">${escapeHtml(md)}</pre>
1353
+ </body>
1354
+ </html>`;
1355
+ }
1356
+ // src/reports/stats.ts
1357
+ function computeStats(spec) {
1358
+ const exports = spec.exports ?? [];
1359
+ const signals = {
1360
+ description: { covered: 0, total: 0 },
1361
+ params: { covered: 0, total: 0 },
1362
+ returns: { covered: 0, total: 0 },
1363
+ examples: { covered: 0, total: 0 }
1364
+ };
1365
+ const kindMap = new Map;
1366
+ const driftIssues = [];
1367
+ let fullyDocumented = 0;
1368
+ let partiallyDocumented = 0;
1369
+ let undocumented = 0;
1370
+ for (const exp of exports) {
1371
+ const score = exp.docs?.coverageScore ?? 0;
1372
+ const missing = exp.docs?.missing ?? [];
1373
+ for (const sig of ["description", "params", "returns", "examples"]) {
1374
+ signals[sig].total++;
1375
+ if (!missing.includes(sig))
1376
+ signals[sig].covered++;
1377
+ }
1378
+ const kindEntry = kindMap.get(exp.kind) ?? { count: 0, totalScore: 0 };
1379
+ kindEntry.count++;
1380
+ kindEntry.totalScore += score;
1381
+ kindMap.set(exp.kind, kindEntry);
1382
+ if (score === 100)
1383
+ fullyDocumented++;
1384
+ else if (score > 0)
1385
+ partiallyDocumented++;
1386
+ else
1387
+ undocumented++;
1388
+ for (const d of exp.docs?.drift ?? []) {
1389
+ driftIssues.push({
1390
+ exportName: exp.name,
1391
+ type: d.type,
1392
+ issue: d.issue,
1393
+ suggestion: d.suggestion
1394
+ });
1395
+ }
1396
+ }
1397
+ const signalCoverage = Object.fromEntries(Object.entries(signals).map(([k, v]) => [
1398
+ k,
1399
+ { ...v, pct: v.total ? Math.round(v.covered / v.total * 100) : 0 }
1400
+ ]));
1401
+ const byKind = Array.from(kindMap.entries()).map(([kind, { count, totalScore }]) => ({
1402
+ kind,
1403
+ count,
1404
+ avgScore: Math.round(totalScore / count)
1405
+ })).sort((a, b) => b.count - a.count);
1406
+ const sortedExports = exports.map((e) => ({
1407
+ name: e.name,
1408
+ kind: e.kind,
1409
+ score: e.docs?.coverageScore ?? 0,
1410
+ missing: e.docs?.missing ?? []
1411
+ })).sort((a, b) => a.score - b.score);
1412
+ return {
1413
+ packageName: spec.meta.name ?? "unknown",
1414
+ version: spec.meta.version ?? "0.0.0",
1415
+ coverageScore: spec.docs?.coverageScore ?? 0,
1416
+ totalExports: exports.length,
1417
+ fullyDocumented,
1418
+ partiallyDocumented,
1419
+ undocumented,
1420
+ driftCount: driftIssues.length,
1421
+ signalCoverage,
1422
+ byKind,
1423
+ exports: sortedExports,
1424
+ driftIssues
1425
+ };
1426
+ }
1427
+ // src/commands/report.ts
1428
+ function registerReportCommand(program) {
1429
+ program.command("report [entry]").description("Generate a documentation coverage report").option("--cwd <dir>", "Working directory", process.cwd()).option("--package <name>", "Target package name (for monorepos)").option("--spec <file>", "Use existing openpkg.json instead of analyzing").option("--output <format>", "Output format: markdown, html, json", "markdown").option("--out <file>", "Write to file instead of stdout").option("--limit <n>", "Max exports to show in tables", "20").option("--skip-resolve", "Skip external type resolution from node_modules").action(async (entry, options) => {
1430
+ try {
1431
+ let spec;
1432
+ if (options.spec) {
1433
+ const specPath = path8.resolve(options.cwd, options.spec);
1434
+ spec = JSON.parse(fs7.readFileSync(specPath, "utf-8"));
1435
+ } else {
1436
+ let targetDir = options.cwd;
1437
+ let entryFile = entry;
1438
+ if (options.package) {
1439
+ const packageDir = await findPackageInMonorepo(options.cwd, options.package);
1440
+ if (!packageDir)
1441
+ throw new Error(`Package "${options.package}" not found`);
1442
+ targetDir = packageDir;
1443
+ }
1444
+ if (!entryFile) {
1445
+ entryFile = await findEntryPoint(targetDir, true);
1446
+ } else {
1447
+ entryFile = path8.resolve(targetDir, entryFile);
1448
+ }
1449
+ const spinner = ora4("Analyzing...").start();
1450
+ const resolveExternalTypes = !options.skipResolve;
1451
+ const doccov = new DocCov4({ resolveExternalTypes });
1452
+ const result = await doccov.analyzeFileWithDiagnostics(entryFile);
1453
+ spinner.succeed("Analysis complete");
1454
+ spec = result.spec;
1455
+ }
1456
+ const stats = computeStats(spec);
1457
+ const format = options.output;
1458
+ const limit = parseInt(options.limit, 10) || 20;
1459
+ let output;
1460
+ if (format === "json") {
1461
+ output = JSON.stringify(stats, null, 2);
1462
+ } else if (format === "html") {
1463
+ output = renderHtml(stats, { limit });
1464
+ } else {
1465
+ output = renderMarkdown(stats, { limit });
1466
+ }
1467
+ if (options.out) {
1468
+ const outPath = path8.resolve(options.cwd, options.out);
1469
+ fs7.writeFileSync(outPath, output);
1470
+ console.log(chalk7.green(`Report written to ${outPath}`));
1471
+ } else {
1472
+ console.log(output);
1473
+ }
1474
+ } catch (err) {
1475
+ console.error(chalk7.red("Error:"), err instanceof Error ? err.message : err);
1476
+ process.exitCode = 1;
1477
+ }
1478
+ });
1479
+ }
1480
+
855
1481
  // src/commands/scan.ts
856
- import * as fs9 from "node:fs";
1482
+ import * as fs11 from "node:fs";
857
1483
  import * as os from "node:os";
858
- import * as path10 from "node:path";
859
- import { DocCov as DocCov3 } from "@doccov/sdk";
860
- import chalk6 from "chalk";
861
- import ora3 from "ora";
1484
+ import * as path12 from "node:path";
1485
+ import { DocCov as DocCov5 } from "@doccov/sdk";
1486
+ import chalk8 from "chalk";
1487
+ import ora5 from "ora";
862
1488
  import { simpleGit } from "simple-git";
863
1489
 
864
1490
  // src/utils/entry-detection.ts
865
- import * as fs6 from "node:fs";
866
- import * as path7 from "node:path";
1491
+ import * as fs8 from "node:fs";
1492
+ import * as path9 from "node:path";
867
1493
  function detectEntryPoint(repoDir) {
868
- const pkgPath = path7.join(repoDir, "package.json");
869
- if (!fs6.existsSync(pkgPath)) {
1494
+ const pkgPath = path9.join(repoDir, "package.json");
1495
+ if (!fs8.existsSync(pkgPath)) {
870
1496
  throw new Error("No package.json found - not a valid npm package");
871
1497
  }
872
1498
  let pkg;
873
1499
  try {
874
- pkg = JSON.parse(fs6.readFileSync(pkgPath, "utf-8"));
1500
+ pkg = JSON.parse(fs8.readFileSync(pkgPath, "utf-8"));
875
1501
  } catch {
876
1502
  throw new Error("Failed to parse package.json");
877
1503
  }
@@ -921,7 +1547,7 @@ function detectEntryPoint(repoDir) {
921
1547
  "source/index.ts"
922
1548
  ];
923
1549
  for (const p of commonPaths) {
924
- if (fs6.existsSync(path7.join(repoDir, p))) {
1550
+ if (fs8.existsSync(path9.join(repoDir, p))) {
925
1551
  return { entryPath: p, source: "fallback" };
926
1552
  }
927
1553
  }
@@ -930,7 +1556,7 @@ function detectEntryPoint(repoDir) {
930
1556
  function resolveToTs(baseDir, filePath) {
931
1557
  const normalized = filePath.replace(/^\.\//, "");
932
1558
  if (normalized.endsWith(".ts") || normalized.endsWith(".tsx")) {
933
- if (fs6.existsSync(path7.join(baseDir, normalized))) {
1559
+ if (fs8.existsSync(path9.join(baseDir, normalized))) {
934
1560
  return normalized;
935
1561
  }
936
1562
  }
@@ -955,11 +1581,11 @@ function resolveToTs(baseDir, filePath) {
955
1581
  candidates.push(normalized.replace(/\.d\.ts$/, ".ts"));
956
1582
  candidates.push(normalized.replace(/\.js$/, ".tsx"));
957
1583
  if (normalized.endsWith(".d.ts")) {
958
- const baseName = path7.basename(normalized, ".d.ts");
1584
+ const baseName = path9.basename(normalized, ".d.ts");
959
1585
  candidates.push(`src/${baseName}.ts`);
960
1586
  }
961
1587
  for (const candidate of candidates) {
962
- if (fs6.existsSync(path7.join(baseDir, candidate))) {
1588
+ if (fs8.existsSync(path9.join(baseDir, candidate))) {
963
1589
  return candidate;
964
1590
  }
965
1591
  }
@@ -997,17 +1623,17 @@ function buildDisplayUrl(parsed) {
997
1623
  }
998
1624
 
999
1625
  // src/utils/llm-build-plan.ts
1000
- import * as fs7 from "node:fs";
1001
- import * as path8 from "node:path";
1002
- import { createAnthropic } from "@ai-sdk/anthropic";
1003
- import { createOpenAI } from "@ai-sdk/openai";
1004
- import { generateObject } from "ai";
1005
- import { z as z2 } from "zod";
1006
- var BuildPlanSchema = z2.object({
1007
- installCommand: z2.string().optional().describe("Additional install command if needed"),
1008
- buildCommands: z2.array(z2.string()).describe('Build steps to run, e.g. ["npm run build:wasm"]'),
1009
- entryPoint: z2.string().describe("Path to TS/TSX entry file after build"),
1010
- notes: z2.string().optional().describe("Caveats or warnings")
1626
+ import * as fs9 from "node:fs";
1627
+ import * as path10 from "node:path";
1628
+ import { createAnthropic as createAnthropic2 } from "@ai-sdk/anthropic";
1629
+ import { createOpenAI as createOpenAI2 } from "@ai-sdk/openai";
1630
+ import { generateObject as generateObject2 } from "ai";
1631
+ import { z as z3 } from "zod";
1632
+ var BuildPlanSchema = z3.object({
1633
+ installCommand: z3.string().optional().describe("Additional install command if needed"),
1634
+ buildCommands: z3.array(z3.string()).describe('Build steps to run, e.g. ["npm run build:wasm"]'),
1635
+ entryPoint: z3.string().describe("Path to TS/TSX entry file after build"),
1636
+ notes: z3.string().optional().describe("Caveats or warnings")
1011
1637
  });
1012
1638
  var CONTEXT_FILES = [
1013
1639
  "package.json",
@@ -1022,22 +1648,22 @@ var CONTEXT_FILES = [
1022
1648
  "wasm-pack.json"
1023
1649
  ];
1024
1650
  var MAX_FILE_CHARS = 2000;
1025
- function getModel() {
1651
+ function getModel2() {
1026
1652
  const provider = process.env.DOCCOV_LLM_PROVIDER?.toLowerCase();
1027
1653
  if (provider === "anthropic" || process.env.ANTHROPIC_API_KEY) {
1028
- const anthropic = createAnthropic();
1654
+ const anthropic = createAnthropic2();
1029
1655
  return anthropic("claude-sonnet-4-20250514");
1030
1656
  }
1031
- const openai = createOpenAI();
1657
+ const openai = createOpenAI2();
1032
1658
  return openai("gpt-4o-mini");
1033
1659
  }
1034
1660
  async function gatherContextFiles(repoDir) {
1035
1661
  const sections = [];
1036
1662
  for (const fileName of CONTEXT_FILES) {
1037
- const filePath = path8.join(repoDir, fileName);
1038
- if (fs7.existsSync(filePath)) {
1663
+ const filePath = path10.join(repoDir, fileName);
1664
+ if (fs9.existsSync(filePath)) {
1039
1665
  try {
1040
- let content = fs7.readFileSync(filePath, "utf-8");
1666
+ let content = fs9.readFileSync(filePath, "utf-8");
1041
1667
  if (content.length > MAX_FILE_CHARS) {
1042
1668
  content = `${content.slice(0, MAX_FILE_CHARS)}
1043
1669
  ... (truncated)`;
@@ -1079,8 +1705,8 @@ async function generateBuildPlan(repoDir) {
1079
1705
  if (!context.trim()) {
1080
1706
  return null;
1081
1707
  }
1082
- const model = getModel();
1083
- const { object } = await generateObject({
1708
+ const model = getModel2();
1709
+ const { object } = await generateObject2({
1084
1710
  model,
1085
1711
  schema: BuildPlanSchema,
1086
1712
  prompt: BUILD_PLAN_PROMPT(context)
@@ -1089,17 +1715,17 @@ async function generateBuildPlan(repoDir) {
1089
1715
  }
1090
1716
 
1091
1717
  // src/utils/monorepo-detection.ts
1092
- import * as fs8 from "node:fs";
1093
- import * as path9 from "node:path";
1718
+ import * as fs10 from "node:fs";
1719
+ import * as path11 from "node:path";
1094
1720
  import { glob } from "glob";
1095
1721
  async function detectMonorepo(repoDir) {
1096
- const pkgPath = path9.join(repoDir, "package.json");
1097
- if (!fs8.existsSync(pkgPath)) {
1722
+ const pkgPath = path11.join(repoDir, "package.json");
1723
+ if (!fs10.existsSync(pkgPath)) {
1098
1724
  return { isMonorepo: false, packages: [], type: "none" };
1099
1725
  }
1100
1726
  let pkg;
1101
1727
  try {
1102
- pkg = JSON.parse(fs8.readFileSync(pkgPath, "utf-8"));
1728
+ pkg = JSON.parse(fs10.readFileSync(pkgPath, "utf-8"));
1103
1729
  } catch {
1104
1730
  return { isMonorepo: false, packages: [], type: "none" };
1105
1731
  }
@@ -1108,16 +1734,16 @@ async function detectMonorepo(repoDir) {
1108
1734
  const packages = await resolveWorkspacePackages(repoDir, patterns);
1109
1735
  return { isMonorepo: packages.length > 0, packages, type: "npm" };
1110
1736
  }
1111
- const pnpmPath = path9.join(repoDir, "pnpm-workspace.yaml");
1112
- if (fs8.existsSync(pnpmPath)) {
1737
+ const pnpmPath = path11.join(repoDir, "pnpm-workspace.yaml");
1738
+ if (fs10.existsSync(pnpmPath)) {
1113
1739
  const patterns = parsePnpmWorkspace(pnpmPath);
1114
1740
  const packages = await resolveWorkspacePackages(repoDir, patterns);
1115
1741
  return { isMonorepo: packages.length > 0, packages, type: "pnpm" };
1116
1742
  }
1117
- const lernaPath = path9.join(repoDir, "lerna.json");
1118
- if (fs8.existsSync(lernaPath)) {
1743
+ const lernaPath = path11.join(repoDir, "lerna.json");
1744
+ if (fs10.existsSync(lernaPath)) {
1119
1745
  try {
1120
- const lerna = JSON.parse(fs8.readFileSync(lernaPath, "utf-8"));
1746
+ const lerna = JSON.parse(fs10.readFileSync(lernaPath, "utf-8"));
1121
1747
  const patterns = lerna.packages ?? ["packages/*"];
1122
1748
  const packages = await resolveWorkspacePackages(repoDir, patterns);
1123
1749
  return { isMonorepo: packages.length > 0, packages, type: "lerna" };
@@ -1139,7 +1765,7 @@ function extractWorkspacePatterns(workspaces) {
1139
1765
  }
1140
1766
  function parsePnpmWorkspace(filePath) {
1141
1767
  try {
1142
- const content = fs8.readFileSync(filePath, "utf-8");
1768
+ const content = fs10.readFileSync(filePath, "utf-8");
1143
1769
  const match = content.match(/packages:\s*\n((?:\s+-\s+.+\n?)+)/);
1144
1770
  if (match) {
1145
1771
  const lines = match[1].split(`
@@ -1159,13 +1785,13 @@ async function resolveWorkspacePackages(repoDir, patterns) {
1159
1785
  absolute: false
1160
1786
  });
1161
1787
  for (const match of matches) {
1162
- const pkgJsonPath = path9.join(repoDir, match, "package.json");
1163
- if (fs8.existsSync(pkgJsonPath)) {
1788
+ const pkgJsonPath = path11.join(repoDir, match, "package.json");
1789
+ if (fs10.existsSync(pkgJsonPath)) {
1164
1790
  try {
1165
- const pkgJson = JSON.parse(fs8.readFileSync(pkgJsonPath, "utf-8"));
1791
+ const pkgJson = JSON.parse(fs10.readFileSync(pkgJsonPath, "utf-8"));
1166
1792
  packages.push({
1167
- name: pkgJson.name ?? path9.basename(match),
1168
- path: path9.join(repoDir, match),
1793
+ name: pkgJson.name ?? path11.basename(match),
1794
+ path: path11.join(repoDir, match),
1169
1795
  relativePath: match
1170
1796
  });
1171
1797
  } catch {}
@@ -1192,29 +1818,29 @@ function formatPackageList(packages, limit = 10) {
1192
1818
  }
1193
1819
 
1194
1820
  // src/commands/scan.ts
1195
- var defaultDependencies5 = {
1196
- createDocCov: (options) => new DocCov3(options),
1197
- spinner: (text) => ora3(text),
1821
+ var defaultDependencies6 = {
1822
+ createDocCov: (options) => new DocCov5(options),
1823
+ spinner: (text) => ora5(text),
1198
1824
  log: console.log,
1199
1825
  error: console.error
1200
1826
  };
1201
1827
  function registerScanCommand(program, dependencies = {}) {
1202
1828
  const { createDocCov, spinner, log, error } = {
1203
- ...defaultDependencies5,
1829
+ ...defaultDependencies6,
1204
1830
  ...dependencies
1205
1831
  };
1206
- program.command("scan <url>").description("Analyze docs coverage for any public GitHub repository").option("--ref <branch>", "Branch or tag to analyze").option("--package <name>", "Target package in monorepo").option("--output <format>", "Output format: text or json", "text").option("--no-cleanup", "Keep cloned repo (for debugging)").option("--skip-install", "Skip dependency installation (faster, but may limit type resolution)").option("--save-spec <path>", "Save full OpenPkg spec to file").action(async (url, options) => {
1832
+ program.command("scan <url>").description("Analyze docs coverage for any public GitHub repository").option("--ref <branch>", "Branch or tag to analyze").option("--package <name>", "Target package in monorepo").option("--output <format>", "Output format: text or json", "text").option("--no-cleanup", "Keep cloned repo (for debugging)").option("--skip-install", "Skip dependency installation (faster, but may limit type resolution)").option("--skip-resolve", "Skip external type resolution from node_modules").option("--save-spec <path>", "Save full OpenPkg spec to file").action(async (url, options) => {
1207
1833
  let tempDir;
1208
1834
  try {
1209
1835
  const parsed = parseGitHubUrl(url, options.ref ?? "main");
1210
1836
  const cloneUrl = buildCloneUrl(parsed);
1211
1837
  const displayUrl = buildDisplayUrl(parsed);
1212
1838
  log("");
1213
- log(chalk6.bold(`Scanning ${displayUrl}`));
1214
- log(chalk6.gray(`Branch/tag: ${parsed.ref}`));
1839
+ log(chalk8.bold(`Scanning ${displayUrl}`));
1840
+ log(chalk8.gray(`Branch/tag: ${parsed.ref}`));
1215
1841
  log("");
1216
- tempDir = path10.join(os.tmpdir(), `doccov-scan-${Date.now()}-${Math.random().toString(36).slice(2)}`);
1217
- fs9.mkdirSync(tempDir, { recursive: true });
1842
+ tempDir = path12.join(os.tmpdir(), `doccov-scan-${Date.now()}-${Math.random().toString(36).slice(2)}`);
1843
+ fs11.mkdirSync(tempDir, { recursive: true });
1218
1844
  const cloneSpinner = spinner(`Cloning ${parsed.owner}/${parsed.repo}...`);
1219
1845
  cloneSpinner.start();
1220
1846
  try {
@@ -1239,7 +1865,7 @@ function registerScanCommand(program, dependencies = {}) {
1239
1865
  throw new Error(`Clone failed: ${message}`);
1240
1866
  }
1241
1867
  if (options.skipInstall) {
1242
- log(chalk6.gray("Skipping dependency installation (--skip-install)"));
1868
+ log(chalk8.gray("Skipping dependency installation (--skip-install)"));
1243
1869
  } else {
1244
1870
  const installSpinner = spinner("Installing dependencies...");
1245
1871
  installSpinner.start();
@@ -1255,7 +1881,7 @@ function registerScanCommand(program, dependencies = {}) {
1255
1881
  ];
1256
1882
  let installed = false;
1257
1883
  for (const { file, cmd } of lockfileCommands) {
1258
- if (fs9.existsSync(path10.join(tempDir, file))) {
1884
+ if (fs11.existsSync(path12.join(tempDir, file))) {
1259
1885
  try {
1260
1886
  execSync(cmd, {
1261
1887
  cwd: tempDir,
@@ -1302,14 +1928,14 @@ function registerScanCommand(program, dependencies = {}) {
1302
1928
  } else {
1303
1929
  installSpinner.warn("Could not install dependencies (analysis may be limited)");
1304
1930
  for (const err of installErrors) {
1305
- log(chalk6.gray(` ${err}`));
1931
+ log(chalk8.gray(` ${err}`));
1306
1932
  }
1307
1933
  }
1308
1934
  } catch (outerError) {
1309
1935
  const msg = outerError instanceof Error ? outerError.message : String(outerError);
1310
1936
  installSpinner.warn(`Could not install dependencies: ${msg.slice(0, 100)}`);
1311
1937
  for (const err of installErrors) {
1312
- log(chalk6.gray(` ${err}`));
1938
+ log(chalk8.gray(` ${err}`));
1313
1939
  }
1314
1940
  }
1315
1941
  }
@@ -1319,7 +1945,7 @@ function registerScanCommand(program, dependencies = {}) {
1319
1945
  if (mono.isMonorepo) {
1320
1946
  if (!options.package) {
1321
1947
  error("");
1322
- error(chalk6.red(`Monorepo detected with ${mono.packages.length} packages. Specify target with --package:`));
1948
+ error(chalk8.red(`Monorepo detected with ${mono.packages.length} packages. Specify target with --package:`));
1323
1949
  error("");
1324
1950
  error(formatPackageList(mono.packages));
1325
1951
  error("");
@@ -1328,7 +1954,7 @@ function registerScanCommand(program, dependencies = {}) {
1328
1954
  const pkg = await findPackage(tempDir, options.package);
1329
1955
  if (!pkg) {
1330
1956
  error("");
1331
- error(chalk6.red(`Package "${options.package}" not found. Available packages:`));
1957
+ error(chalk8.red(`Package "${options.package}" not found. Available packages:`));
1332
1958
  error("");
1333
1959
  error(formatPackageList(mono.packages));
1334
1960
  error("");
@@ -1336,7 +1962,7 @@ function registerScanCommand(program, dependencies = {}) {
1336
1962
  }
1337
1963
  targetDir = pkg.path;
1338
1964
  packageName = pkg.name;
1339
- log(chalk6.gray(`Analyzing package: ${packageName}`));
1965
+ log(chalk8.gray(`Analyzing package: ${packageName}`));
1340
1966
  }
1341
1967
  const entrySpinner = spinner("Detecting entry point...");
1342
1968
  entrySpinner.start();
@@ -1345,15 +1971,15 @@ function registerScanCommand(program, dependencies = {}) {
1345
1971
  if (!entryFile.endsWith(".d.ts"))
1346
1972
  return false;
1347
1973
  const cargoLocations = [
1348
- path10.join(pkgDir, "Cargo.toml"),
1349
- path10.join(repoRoot, "Cargo.toml")
1974
+ path12.join(pkgDir, "Cargo.toml"),
1975
+ path12.join(repoRoot, "Cargo.toml")
1350
1976
  ];
1351
- const hasCargoToml = cargoLocations.some((p) => fs9.existsSync(p));
1977
+ const hasCargoToml = cargoLocations.some((p) => fs11.existsSync(p));
1352
1978
  const checkWasmScripts = (dir) => {
1353
- const pkgPath = path10.join(dir, "package.json");
1354
- if (fs9.existsSync(pkgPath)) {
1979
+ const pkgPath = path12.join(dir, "package.json");
1980
+ if (fs11.existsSync(pkgPath)) {
1355
1981
  try {
1356
- const pkg = JSON.parse(fs9.readFileSync(pkgPath, "utf-8"));
1982
+ const pkg = JSON.parse(fs11.readFileSync(pkgPath, "utf-8"));
1357
1983
  const scripts = Object.values(pkg.scripts ?? {}).join(" ");
1358
1984
  return scripts.includes("wasm-pack") || scripts.includes("wasm");
1359
1985
  } catch {}
@@ -1373,24 +1999,24 @@ function registerScanCommand(program, dependencies = {}) {
1373
1999
  if (plan.buildCommands.length > 0) {
1374
2000
  const { execSync } = await import("node:child_process");
1375
2001
  for (const cmd of plan.buildCommands) {
1376
- log(chalk6.gray(` Running: ${cmd}`));
2002
+ log(chalk8.gray(` Running: ${cmd}`));
1377
2003
  try {
1378
2004
  execSync(cmd, { cwd: targetDir, stdio: "pipe", timeout: 300000 });
1379
2005
  } catch (buildError) {
1380
2006
  buildFailed = true;
1381
2007
  const msg = buildError instanceof Error ? buildError.message : String(buildError);
1382
2008
  if (msg.includes("rustc") || msg.includes("cargo") || msg.includes("wasm-pack")) {
1383
- log(chalk6.yellow(` ⚠ Build requires Rust toolchain (not available)`));
2009
+ log(chalk8.yellow(` ⚠ Build requires Rust toolchain (not available)`));
1384
2010
  } else if (msg.includes("rimraf") || msg.includes("command not found")) {
1385
- log(chalk6.yellow(` ⚠ Build failed: missing dependencies`));
2011
+ log(chalk8.yellow(` ⚠ Build failed: missing dependencies`));
1386
2012
  } else {
1387
- log(chalk6.yellow(` ⚠ Build failed: ${msg.slice(0, 80)}`));
2013
+ log(chalk8.yellow(` ⚠ Build failed: ${msg.slice(0, 80)}`));
1388
2014
  }
1389
2015
  }
1390
2016
  }
1391
2017
  }
1392
2018
  if (plan.notes) {
1393
- log(chalk6.gray(` Note: ${plan.notes}`));
2019
+ log(chalk8.gray(` Note: ${plan.notes}`));
1394
2020
  }
1395
2021
  return plan.entryPoint;
1396
2022
  };
@@ -1400,26 +2026,26 @@ function registerScanCommand(program, dependencies = {}) {
1400
2026
  entrySpinner.text = "Detected .d.ts entry with WASM indicators...";
1401
2027
  const llmEntry = await runLlmFallback("WASM project detected");
1402
2028
  if (llmEntry) {
1403
- entryPath = path10.join(targetDir, llmEntry);
2029
+ entryPath = path12.join(targetDir, llmEntry);
1404
2030
  if (buildFailed) {
1405
2031
  entrySpinner.succeed(`Entry point: ${llmEntry} (using pre-committed declarations)`);
1406
- log(chalk6.gray(" Coverage may be limited - generated .d.ts files typically lack JSDoc"));
2032
+ log(chalk8.gray(" Coverage may be limited - generated .d.ts files typically lack JSDoc"));
1407
2033
  } else {
1408
2034
  entrySpinner.succeed(`Entry point: ${llmEntry} (from LLM fallback - WASM project)`);
1409
2035
  }
1410
2036
  } else {
1411
- entryPath = path10.join(targetDir, entry.entryPath);
2037
+ entryPath = path12.join(targetDir, entry.entryPath);
1412
2038
  entrySpinner.succeed(`Entry point: ${entry.entryPath} (from ${entry.source})`);
1413
- log(chalk6.yellow(" ⚠ WASM project detected but no API key - analysis may be limited"));
2039
+ log(chalk8.yellow(" ⚠ WASM project detected but no API key - analysis may be limited"));
1414
2040
  }
1415
2041
  } else {
1416
- entryPath = path10.join(targetDir, entry.entryPath);
2042
+ entryPath = path12.join(targetDir, entry.entryPath);
1417
2043
  entrySpinner.succeed(`Entry point: ${entry.entryPath} (from ${entry.source})`);
1418
2044
  }
1419
2045
  } catch (entryError) {
1420
2046
  const llmEntry = await runLlmFallback("Heuristics failed");
1421
2047
  if (llmEntry) {
1422
- entryPath = path10.join(targetDir, llmEntry);
2048
+ entryPath = path12.join(targetDir, llmEntry);
1423
2049
  entrySpinner.succeed(`Entry point: ${llmEntry} (from LLM fallback)`);
1424
2050
  } else {
1425
2051
  entrySpinner.fail("Could not detect entry point (set OPENAI_API_KEY for smart fallback)");
@@ -1430,7 +2056,8 @@ function registerScanCommand(program, dependencies = {}) {
1430
2056
  analyzeSpinner.start();
1431
2057
  let result;
1432
2058
  try {
1433
- const doccov = createDocCov({ resolveExternalTypes: true });
2059
+ const resolveExternalTypes = !options.skipResolve;
2060
+ const doccov = createDocCov({ resolveExternalTypes });
1434
2061
  result = await doccov.analyzeFileWithDiagnostics(entryPath);
1435
2062
  analyzeSpinner.succeed("Analysis complete");
1436
2063
  } catch (analysisError) {
@@ -1440,9 +2067,9 @@ function registerScanCommand(program, dependencies = {}) {
1440
2067
  const spec = result.spec;
1441
2068
  const coverageScore = spec.docs?.coverageScore ?? 0;
1442
2069
  if (options.saveSpec) {
1443
- const specPath = path10.resolve(process.cwd(), options.saveSpec);
1444
- fs9.writeFileSync(specPath, JSON.stringify(spec, null, 2));
1445
- log(chalk6.green(`✓ Saved spec to ${options.saveSpec}`));
2070
+ const specPath = path12.resolve(process.cwd(), options.saveSpec);
2071
+ fs11.writeFileSync(specPath, JSON.stringify(spec, null, 2));
2072
+ log(chalk8.green(`✓ Saved spec to ${options.saveSpec}`));
1446
2073
  }
1447
2074
  const undocumented = [];
1448
2075
  const driftIssues = [];
@@ -1479,7 +2106,7 @@ function registerScanCommand(program, dependencies = {}) {
1479
2106
  printTextResult(scanResult, log);
1480
2107
  }
1481
2108
  } catch (commandError) {
1482
- error(chalk6.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
2109
+ error(chalk8.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
1483
2110
  process.exitCode = 1;
1484
2111
  } finally {
1485
2112
  if (tempDir && options.cleanup !== false) {
@@ -1489,46 +2116,46 @@ function registerScanCommand(program, dependencies = {}) {
1489
2116
  stdio: "ignore"
1490
2117
  }).unref();
1491
2118
  } else if (tempDir) {
1492
- log(chalk6.gray(`Repo preserved at: ${tempDir}`));
2119
+ log(chalk8.gray(`Repo preserved at: ${tempDir}`));
1493
2120
  }
1494
2121
  }
1495
2122
  });
1496
2123
  }
1497
2124
  function printTextResult(result, log) {
1498
2125
  log("");
1499
- log(chalk6.bold("DocCov Scan Results"));
2126
+ log(chalk8.bold("DocCov Scan Results"));
1500
2127
  log("─".repeat(40));
1501
2128
  const repoName = result.packageName ? `${result.owner}/${result.repo} (${result.packageName})` : `${result.owner}/${result.repo}`;
1502
- log(`Repository: ${chalk6.cyan(repoName)}`);
1503
- log(`Branch: ${chalk6.gray(result.ref)}`);
2129
+ log(`Repository: ${chalk8.cyan(repoName)}`);
2130
+ log(`Branch: ${chalk8.gray(result.ref)}`);
1504
2131
  log("");
1505
- const coverageColor = result.coverage >= 80 ? chalk6.green : result.coverage >= 50 ? chalk6.yellow : chalk6.red;
1506
- log(chalk6.bold("Coverage"));
2132
+ const coverageColor = result.coverage >= 80 ? chalk8.green : result.coverage >= 50 ? chalk8.yellow : chalk8.red;
2133
+ log(chalk8.bold("Coverage"));
1507
2134
  log(` ${coverageColor(`${result.coverage}%`)}`);
1508
2135
  log("");
1509
- log(chalk6.bold("Stats"));
2136
+ log(chalk8.bold("Stats"));
1510
2137
  log(` ${result.exportCount} exports`);
1511
2138
  log(` ${result.typeCount} types`);
1512
2139
  log(` ${result.undocumented.length} undocumented`);
1513
2140
  log(` ${result.driftCount} drift issues`);
1514
2141
  if (result.undocumented.length > 0) {
1515
2142
  log("");
1516
- log(chalk6.bold("Undocumented Exports"));
2143
+ log(chalk8.bold("Undocumented Exports"));
1517
2144
  for (const name of result.undocumented.slice(0, 10)) {
1518
- log(chalk6.yellow(` ! ${name}`));
2145
+ log(chalk8.yellow(` ! ${name}`));
1519
2146
  }
1520
2147
  if (result.undocumented.length > 10) {
1521
- log(chalk6.gray(` ... and ${result.undocumented.length - 10} more`));
2148
+ log(chalk8.gray(` ... and ${result.undocumented.length - 10} more`));
1522
2149
  }
1523
2150
  }
1524
2151
  if (result.drift.length > 0) {
1525
2152
  log("");
1526
- log(chalk6.bold("Drift Issues"));
2153
+ log(chalk8.bold("Drift Issues"));
1527
2154
  for (const d of result.drift.slice(0, 5)) {
1528
- log(chalk6.red(` • ${d.export}: ${d.issue}`));
2155
+ log(chalk8.red(` • ${d.export}: ${d.issue}`));
1529
2156
  }
1530
2157
  if (result.drift.length > 5) {
1531
- log(chalk6.gray(` ... and ${result.drift.length - 5} more`));
2158
+ log(chalk8.gray(` ... and ${result.drift.length - 5} more`));
1532
2159
  }
1533
2160
  }
1534
2161
  log("");
@@ -1536,14 +2163,16 @@ function printTextResult(result, log) {
1536
2163
 
1537
2164
  // src/cli.ts
1538
2165
  var __filename2 = fileURLToPath(import.meta.url);
1539
- var __dirname2 = path11.dirname(__filename2);
1540
- var packageJson = JSON.parse(readFileSync8(path11.join(__dirname2, "../package.json"), "utf-8"));
2166
+ var __dirname2 = path13.dirname(__filename2);
2167
+ var packageJson = JSON.parse(readFileSync9(path13.join(__dirname2, "../package.json"), "utf-8"));
1541
2168
  var program = new Command;
1542
2169
  program.name("doccov").description("DocCov - Documentation coverage and drift detection for TypeScript").version(packageJson.version);
1543
2170
  registerGenerateCommand(program);
1544
2171
  registerCheckCommand(program);
1545
2172
  registerDiffCommand(program);
2173
+ registerFixCommand(program);
1546
2174
  registerInitCommand(program);
2175
+ registerReportCommand(program);
1547
2176
  registerScanCommand(program);
1548
2177
  program.command("*", { hidden: true }).action(() => {
1549
2178
  program.outputHelp();