@doccov/cli 0.3.0 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +24 -48
  2. package/dist/cli.js +789 -162
  3. package/package.json +3 -3
package/dist/cli.js CHANGED
@@ -124,18 +124,93 @@ ${formatIssues(issues)}`);
124
124
  // src/config/index.ts
125
125
  var defineConfig = (config) => config;
126
126
  // src/cli.ts
127
- import { readFileSync as readFileSync8 } from "node:fs";
128
- import * as path11 from "node:path";
127
+ import { readFileSync as readFileSync9 } from "node:fs";
128
+ import * as path13 from "node:path";
129
129
  import { fileURLToPath } from "node:url";
130
130
  import { Command } from "commander";
131
131
 
132
132
  // src/commands/check.ts
133
133
  import * as fs2 from "node:fs";
134
134
  import * as path3 from "node:path";
135
- import { DocCov } from "@doccov/sdk";
135
+ import {
136
+ DocCov,
137
+ detectExampleAssertionFailures,
138
+ detectExampleRuntimeErrors,
139
+ hasNonAssertionComments,
140
+ parseAssertions,
141
+ runExamplesWithPackage
142
+ } from "@doccov/sdk";
136
143
  import chalk from "chalk";
137
144
  import ora from "ora";
138
145
 
146
+ // src/utils/llm-assertion-parser.ts
147
+ import { createAnthropic } from "@ai-sdk/anthropic";
148
+ import { createOpenAI } from "@ai-sdk/openai";
149
+ import { generateObject } from "ai";
150
+ import { z as z2 } from "zod";
151
+ var AssertionParseSchema = z2.object({
152
+ assertions: z2.array(z2.object({
153
+ lineNumber: z2.number().describe("1-indexed line number where the assertion appears"),
154
+ expected: z2.string().describe("The expected output value"),
155
+ originalComment: z2.string().describe("The original comment text"),
156
+ suggestedSyntax: z2.string().describe("The line rewritten with standard // => value syntax")
157
+ })).describe("List of assertion-like comments found in the code"),
158
+ hasAssertions: z2.boolean().describe("Whether any assertion-like comments were found")
159
+ });
160
+ var ASSERTION_PARSE_PROMPT = (code) => `Analyze this TypeScript/JavaScript example code for assertion-like comments.
161
+
162
+ Look for comments that appear to specify expected output values, such as:
163
+ - "// should be 3"
164
+ - "// returns 5"
165
+ - "// outputs: hello"
166
+ - "// expected: [1, 2, 3]"
167
+ - "// 42" (bare value after console.log)
168
+ - "// result: true"
169
+
170
+ Do NOT include:
171
+ - Regular code comments that explain what the code does
172
+ - Comments that are instructions or documentation
173
+ - Comments with // => (already using standard syntax)
174
+
175
+ For each assertion found, extract:
176
+ 1. The line number (1-indexed)
177
+ 2. The expected value (just the value, not the comment prefix)
178
+ 3. The original comment text
179
+ 4. A suggested rewrite of the ENTIRE line using "// => value" syntax
180
+
181
+ Code:
182
+ \`\`\`
183
+ ${code}
184
+ \`\`\``;
185
+ function getModel() {
186
+ const provider = process.env.DOCCOV_LLM_PROVIDER?.toLowerCase();
187
+ if (provider === "anthropic" || process.env.ANTHROPIC_API_KEY) {
188
+ const anthropic = createAnthropic();
189
+ return anthropic("claude-sonnet-4-20250514");
190
+ }
191
+ const openai = createOpenAI();
192
+ return openai("gpt-4o-mini");
193
+ }
194
+ function isLLMAssertionParsingAvailable() {
195
+ return Boolean(process.env.OPENAI_API_KEY || process.env.ANTHROPIC_API_KEY);
196
+ }
197
+ async function parseAssertionsWithLLM(code) {
198
+ if (!isLLMAssertionParsingAvailable()) {
199
+ return null;
200
+ }
201
+ try {
202
+ const model = getModel();
203
+ const { object } = await generateObject({
204
+ model,
205
+ schema: AssertionParseSchema,
206
+ prompt: ASSERTION_PARSE_PROMPT(code)
207
+ });
208
+ return object;
209
+ } catch {
210
+ return null;
211
+ }
212
+ }
213
+
139
214
  // src/utils/package-utils.ts
140
215
  import * as fs from "node:fs";
141
216
  import * as path2 from "node:path";
@@ -286,7 +361,7 @@ function registerCheckCommand(program, dependencies = {}) {
286
361
  ...defaultDependencies,
287
362
  ...dependencies
288
363
  };
289
- program.command("check [entry]").description("Fail if documentation coverage falls below a threshold").option("--cwd <dir>", "Working directory", process.cwd()).option("--package <name>", "Target package name (for monorepos)").option("--min-coverage <percentage>", "Minimum docs coverage percentage (0-100)", (value) => Number(value)).option("--require-examples", "Require at least one @example for every export").option("--ignore-drift", "Do not fail on documentation drift").option("--no-external-types", "Skip external type resolution from node_modules").action(async (entry, options) => {
364
+ program.command("check [entry]").description("Fail if documentation coverage falls below a threshold").option("--cwd <dir>", "Working directory", process.cwd()).option("--package <name>", "Target package name (for monorepos)").option("--min-coverage <percentage>", "Minimum docs coverage percentage (0-100)", (value) => Number(value)).option("--require-examples", "Require at least one @example for every export").option("--run-examples", "Execute @example blocks and fail on runtime errors").option("--ignore-drift", "Do not fail on documentation drift").option("--skip-resolve", "Skip external type resolution from node_modules").action(async (entry, options) => {
290
365
  try {
291
366
  let targetDir = options.cwd;
292
367
  let entryFile = entry;
@@ -304,12 +379,13 @@ function registerCheckCommand(program, dependencies = {}) {
304
379
  } else {
305
380
  entryFile = path3.resolve(targetDir, entryFile);
306
381
  if (fs2.existsSync(entryFile) && fs2.statSync(entryFile).isDirectory()) {
382
+ targetDir = entryFile;
307
383
  entryFile = await findEntryPoint(entryFile, true);
308
384
  log(chalk.gray(`Auto-detected entry point: ${entryFile}`));
309
385
  }
310
386
  }
311
387
  const minCoverage = clampCoverage(options.minCoverage ?? 80);
312
- const resolveExternalTypes = options.externalTypes !== false;
388
+ const resolveExternalTypes = !options.skipResolve;
313
389
  const spinnerInstance = spinner("Analyzing documentation coverage...");
314
390
  spinnerInstance.start();
315
391
  let specResult;
@@ -325,10 +401,129 @@ function registerCheckCommand(program, dependencies = {}) {
325
401
  throw new Error("Failed to analyze documentation coverage.");
326
402
  }
327
403
  const spec = specResult.spec;
404
+ const warnings = specResult.diagnostics.filter((d) => d.severity === "warning");
405
+ const infos = specResult.diagnostics.filter((d) => d.severity === "info");
406
+ if (warnings.length > 0 || infos.length > 0) {
407
+ log("");
408
+ for (const diag of warnings) {
409
+ log(chalk.yellow(`⚠ ${diag.message}`));
410
+ if (diag.suggestion) {
411
+ log(chalk.gray(` ${diag.suggestion}`));
412
+ }
413
+ }
414
+ for (const diag of infos) {
415
+ log(chalk.cyan(`ℹ ${diag.message}`));
416
+ if (diag.suggestion) {
417
+ log(chalk.gray(` ${diag.suggestion}`));
418
+ }
419
+ }
420
+ log("");
421
+ }
422
+ const runtimeDrifts = [];
423
+ if (options.runExamples) {
424
+ const allExamples = [];
425
+ for (const entry2 of spec.exports ?? []) {
426
+ if (entry2.examples && entry2.examples.length > 0) {
427
+ allExamples.push({ exportName: entry2.name, examples: entry2.examples });
428
+ }
429
+ }
430
+ if (allExamples.length === 0) {
431
+ log(chalk.gray("No @example blocks found"));
432
+ } else {
433
+ const examplesSpinner = spinner("Installing package for examples...");
434
+ examplesSpinner.start();
435
+ const flatExamples = allExamples.flatMap((e) => e.examples);
436
+ const packageResult = await runExamplesWithPackage(flatExamples, {
437
+ packagePath: targetDir,
438
+ timeout: 5000,
439
+ installTimeout: 60000,
440
+ cwd: targetDir
441
+ });
442
+ if (!packageResult.installSuccess) {
443
+ examplesSpinner.fail(`Package install failed: ${packageResult.installError}`);
444
+ log(chalk.yellow("Skipping example execution. Ensure the package is built."));
445
+ } else {
446
+ examplesSpinner.text = "Running @example blocks...";
447
+ let examplesRun = 0;
448
+ let examplesFailed = 0;
449
+ let exampleIndex = 0;
450
+ for (const { exportName, examples } of allExamples) {
451
+ const entryResults = new Map;
452
+ for (let i = 0;i < examples.length; i++) {
453
+ const result = packageResult.results.get(exampleIndex);
454
+ if (result) {
455
+ entryResults.set(i, result);
456
+ examplesRun++;
457
+ if (!result.success)
458
+ examplesFailed++;
459
+ }
460
+ exampleIndex++;
461
+ }
462
+ const entry2 = (spec.exports ?? []).find((e) => e.name === exportName);
463
+ if (entry2) {
464
+ const runtimeErrorDrifts = detectExampleRuntimeErrors(entry2, entryResults);
465
+ for (const drift of runtimeErrorDrifts) {
466
+ runtimeDrifts.push({
467
+ name: entry2.name,
468
+ issue: drift.issue,
469
+ suggestion: drift.suggestion
470
+ });
471
+ }
472
+ const assertionDrifts = detectExampleAssertionFailures(entry2, entryResults);
473
+ for (const drift of assertionDrifts) {
474
+ runtimeDrifts.push({
475
+ name: entry2.name,
476
+ issue: drift.issue,
477
+ suggestion: drift.suggestion
478
+ });
479
+ }
480
+ if (isLLMAssertionParsingAvailable() && entry2.examples) {
481
+ for (let exIdx = 0;exIdx < entry2.examples.length; exIdx++) {
482
+ const example = entry2.examples[exIdx];
483
+ const result = entryResults.get(exIdx);
484
+ if (!result?.success || typeof example !== "string")
485
+ continue;
486
+ const regexAssertions = parseAssertions(example);
487
+ if (regexAssertions.length === 0 && hasNonAssertionComments(example)) {
488
+ const llmResult = await parseAssertionsWithLLM(example);
489
+ if (llmResult?.hasAssertions && llmResult.assertions.length > 0) {
490
+ const stdoutLines = result.stdout.split(`
491
+ `).map((l) => l.trim()).filter((l) => l.length > 0);
492
+ for (let aIdx = 0;aIdx < llmResult.assertions.length; aIdx++) {
493
+ const assertion = llmResult.assertions[aIdx];
494
+ const actual = stdoutLines[aIdx];
495
+ if (actual === undefined) {
496
+ runtimeDrifts.push({
497
+ name: entry2.name,
498
+ issue: `Assertion expected "${assertion.expected}" but no output was produced`,
499
+ suggestion: `Consider using standard syntax: ${assertion.suggestedSyntax}`
500
+ });
501
+ } else if (assertion.expected.trim() !== actual.trim()) {
502
+ runtimeDrifts.push({
503
+ name: entry2.name,
504
+ issue: `Assertion failed: expected "${assertion.expected}" but got "${actual}"`,
505
+ suggestion: `Consider using standard syntax: ${assertion.suggestedSyntax}`
506
+ });
507
+ }
508
+ }
509
+ }
510
+ }
511
+ }
512
+ }
513
+ }
514
+ }
515
+ if (examplesFailed > 0) {
516
+ examplesSpinner.fail(`${examplesFailed}/${examplesRun} example(s) failed`);
517
+ } else {
518
+ examplesSpinner.succeed(`${examplesRun} example(s) passed`);
519
+ }
520
+ }
521
+ }
522
+ }
328
523
  const coverageScore = spec.docs?.coverageScore ?? 0;
329
524
  const failingExports = collectFailingExports(spec.exports ?? [], minCoverage);
330
525
  const missingExamples = options.requireExamples ? failingExports.filter((item) => item.missing?.includes("examples")) : [];
331
- const driftExports = collectDrift(spec.exports ?? []);
526
+ const driftExports = [...collectDrift(spec.exports ?? []), ...runtimeDrifts];
332
527
  const coverageFailed = coverageScore < minCoverage;
333
528
  const hasMissingExamples = missingExamples.length > 0;
334
529
  const hasDrift = !options.ignoreDrift && driftExports.length > 0;
@@ -538,16 +733,215 @@ function printTextDiff(diff, log, _error) {
538
733
  log("");
539
734
  }
540
735
 
541
- // src/commands/generate.ts
736
+ // src/commands/fix.ts
542
737
  import * as fs4 from "node:fs";
543
738
  import * as path5 from "node:path";
544
- import { DocCov as DocCov2 } from "@doccov/sdk";
545
- import { normalize, validateSpec } from "@openpkg-ts/spec";
546
- import chalk4 from "chalk";
739
+ import {
740
+ applyEdits,
741
+ categorizeDrifts,
742
+ createSourceFile,
743
+ DocCov as DocCov2,
744
+ findJSDocLocation,
745
+ generateFixesForExport,
746
+ mergeFixes,
747
+ parseJSDocToPatch,
748
+ serializeJSDoc
749
+ } from "@doccov/sdk";
750
+ import chalk3 from "chalk";
547
751
  import ora2 from "ora";
752
+ var defaultDependencies3 = {
753
+ createDocCov: (options) => new DocCov2(options),
754
+ spinner: (text) => ora2(text),
755
+ log: console.log,
756
+ error: console.error
757
+ };
758
+ function collectDrifts(exports) {
759
+ const results = [];
760
+ for (const exp of exports) {
761
+ const drifts = exp.docs?.drift ?? [];
762
+ for (const drift of drifts) {
763
+ results.push({ export: exp, drift });
764
+ }
765
+ }
766
+ return results;
767
+ }
768
+ function filterDriftsByType(drifts, onlyTypes) {
769
+ if (!onlyTypes)
770
+ return drifts;
771
+ const allowedTypes = new Set(onlyTypes.split(",").map((t) => t.trim()));
772
+ return drifts.filter((d) => allowedTypes.has(d.drift.type));
773
+ }
774
+ function groupByExport(drifts) {
775
+ const map = new Map;
776
+ for (const { export: exp, drift } of drifts) {
777
+ const existing = map.get(exp) ?? [];
778
+ existing.push(drift);
779
+ map.set(exp, existing);
780
+ }
781
+ return map;
782
+ }
783
+ function registerFixCommand(program, dependencies = {}) {
784
+ const { createDocCov, spinner, log, error } = {
785
+ ...defaultDependencies3,
786
+ ...dependencies
787
+ };
788
+ program.command("fix [entry]").description("Automatically fix documentation drift").option("--cwd <dir>", "Working directory", process.cwd()).option("--package <name>", "Target package name (for monorepos)").option("--dry-run", "Preview changes without writing").option("--only <types>", "Only fix specific drift types (comma-separated)").option("--skip-resolve", "Skip external type resolution from node_modules").action(async (entry, options) => {
789
+ try {
790
+ let targetDir = options.cwd;
791
+ let entryFile = entry;
792
+ if (options.package) {
793
+ const packageDir = await findPackageInMonorepo(options.cwd, options.package);
794
+ if (!packageDir) {
795
+ throw new Error(`Package "${options.package}" not found in monorepo`);
796
+ }
797
+ targetDir = packageDir;
798
+ log(chalk3.gray(`Found package at ${path5.relative(options.cwd, packageDir)}`));
799
+ }
800
+ if (!entryFile) {
801
+ entryFile = await findEntryPoint(targetDir, true);
802
+ log(chalk3.gray(`Auto-detected entry point: ${path5.relative(targetDir, entryFile)}`));
803
+ } else {
804
+ entryFile = path5.resolve(targetDir, entryFile);
805
+ if (fs4.existsSync(entryFile) && fs4.statSync(entryFile).isDirectory()) {
806
+ targetDir = entryFile;
807
+ entryFile = await findEntryPoint(entryFile, true);
808
+ log(chalk3.gray(`Auto-detected entry point: ${entryFile}`));
809
+ }
810
+ }
811
+ const resolveExternalTypes = !options.skipResolve;
812
+ const analyzeSpinner = spinner("Analyzing documentation...");
813
+ analyzeSpinner.start();
814
+ const doccov = createDocCov({ resolveExternalTypes });
815
+ const result = await doccov.analyzeFileWithDiagnostics(entryFile);
816
+ const spec = result.spec;
817
+ analyzeSpinner.succeed("Analysis complete");
818
+ const allDrifts = collectDrifts(spec.exports ?? []);
819
+ if (allDrifts.length === 0) {
820
+ log(chalk3.green("No drift issues found. Documentation is in sync!"));
821
+ return;
822
+ }
823
+ const filteredDrifts = filterDriftsByType(allDrifts, options.only);
824
+ if (filteredDrifts.length === 0) {
825
+ log(chalk3.yellow("No matching drift issues for the specified types."));
826
+ return;
827
+ }
828
+ const { fixable, nonFixable } = categorizeDrifts(filteredDrifts.map((d) => d.drift));
829
+ if (fixable.length === 0) {
830
+ log(chalk3.yellow(`Found ${nonFixable.length} drift issue(s), but none are auto-fixable.`));
831
+ log(chalk3.gray("Non-fixable drift types require manual intervention:"));
832
+ for (const drift of nonFixable.slice(0, 5)) {
833
+ log(chalk3.gray(` • ${drift.type}: ${drift.issue}`));
834
+ }
835
+ return;
836
+ }
837
+ log("");
838
+ log(chalk3.bold(`Found ${fixable.length} fixable issue(s)`));
839
+ if (nonFixable.length > 0) {
840
+ log(chalk3.gray(`(${nonFixable.length} non-fixable issue(s) skipped)`));
841
+ }
842
+ log("");
843
+ const groupedDrifts = groupByExport(filteredDrifts.filter((d) => fixable.includes(d.drift)));
844
+ const edits = [];
845
+ const editsByFile = new Map;
846
+ for (const [exp, drifts] of groupedDrifts) {
847
+ if (!exp.source?.file) {
848
+ log(chalk3.gray(` Skipping ${exp.name}: no source location`));
849
+ continue;
850
+ }
851
+ if (exp.source.file.endsWith(".d.ts")) {
852
+ log(chalk3.gray(` Skipping ${exp.name}: declaration file`));
853
+ continue;
854
+ }
855
+ const filePath = path5.resolve(targetDir, exp.source.file);
856
+ if (!fs4.existsSync(filePath)) {
857
+ log(chalk3.gray(` Skipping ${exp.name}: file not found`));
858
+ continue;
859
+ }
860
+ const sourceFile = createSourceFile(filePath);
861
+ const location = findJSDocLocation(sourceFile, exp.name, exp.source.line);
862
+ if (!location) {
863
+ log(chalk3.gray(` Skipping ${exp.name}: could not find declaration`));
864
+ continue;
865
+ }
866
+ let existingPatch = {};
867
+ if (location.hasExisting && location.existingJSDoc) {
868
+ existingPatch = parseJSDocToPatch(location.existingJSDoc);
869
+ }
870
+ const fixes = generateFixesForExport({ ...exp, docs: { ...exp.docs, drift: drifts } }, existingPatch);
871
+ if (fixes.length === 0)
872
+ continue;
873
+ const mergedPatch = mergeFixes(fixes, existingPatch);
874
+ const newJSDoc = serializeJSDoc(mergedPatch, location.indent);
875
+ const edit = {
876
+ filePath,
877
+ symbolName: exp.name,
878
+ startLine: location.startLine,
879
+ endLine: location.endLine,
880
+ hasExisting: location.hasExisting,
881
+ existingJSDoc: location.existingJSDoc,
882
+ newJSDoc,
883
+ indent: location.indent
884
+ };
885
+ edits.push(edit);
886
+ const fileEdits = editsByFile.get(filePath) ?? [];
887
+ fileEdits.push({ export: exp, edit, fixes, existingPatch });
888
+ editsByFile.set(filePath, fileEdits);
889
+ }
890
+ if (edits.length === 0) {
891
+ log(chalk3.yellow("No edits could be generated."));
892
+ return;
893
+ }
894
+ if (options.dryRun) {
895
+ log(chalk3.bold("Dry run - changes that would be made:"));
896
+ log("");
897
+ for (const [filePath, fileEdits] of editsByFile) {
898
+ const relativePath = path5.relative(targetDir, filePath);
899
+ log(chalk3.cyan(` ${relativePath}:`));
900
+ for (const { export: exp, edit, fixes } of fileEdits) {
901
+ const lineInfo = edit.hasExisting ? `lines ${edit.startLine + 1}-${edit.endLine + 1}` : `line ${edit.startLine + 1}`;
902
+ log(` ${chalk3.bold(exp.name)} [${lineInfo}]`);
903
+ for (const fix of fixes) {
904
+ log(chalk3.green(` + ${fix.description}`));
905
+ }
906
+ }
907
+ log("");
908
+ }
909
+ log(chalk3.gray("Run without --dry-run to apply these changes."));
910
+ } else {
911
+ const applySpinner = spinner("Applying fixes...");
912
+ applySpinner.start();
913
+ const result2 = await applyEdits(edits);
914
+ if (result2.errors.length > 0) {
915
+ applySpinner.warn("Some fixes could not be applied");
916
+ for (const err of result2.errors) {
917
+ error(chalk3.red(` ${err.file}: ${err.error}`));
918
+ }
919
+ } else {
920
+ applySpinner.succeed(`Applied ${result2.editsApplied} fix(es) to ${result2.filesModified} file(s)`);
921
+ }
922
+ log("");
923
+ for (const [filePath, fileEdits] of editsByFile) {
924
+ const relativePath = path5.relative(targetDir, filePath);
925
+ log(chalk3.green(` ✓ ${relativePath}: ${fileEdits.length} fix(es)`));
926
+ }
927
+ }
928
+ } catch (commandError) {
929
+ error(chalk3.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
930
+ process.exitCode = 1;
931
+ }
932
+ });
933
+ }
934
+
935
+ // src/commands/generate.ts
936
+ import * as fs5 from "node:fs";
937
+ import * as path6 from "node:path";
938
+ import { DocCov as DocCov3 } from "@doccov/sdk";
939
+ import { normalize, validateSpec } from "@openpkg-ts/spec";
940
+ import chalk5 from "chalk";
941
+ import ora3 from "ora";
548
942
 
549
943
  // src/utils/filter-options.ts
550
- import chalk3 from "chalk";
944
+ import chalk4 from "chalk";
551
945
  var unique = (values) => Array.from(new Set(values));
552
946
  var parseListFlag = (value) => {
553
947
  if (!value) {
@@ -557,7 +951,7 @@ var parseListFlag = (value) => {
557
951
  const normalized = rawItems.flatMap((item) => String(item).split(",")).map((item) => item.trim()).filter(Boolean);
558
952
  return normalized.length > 0 ? unique(normalized) : undefined;
559
953
  };
560
- var formatList = (label, values) => `${label}: ${values.map((value) => chalk3.cyan(value)).join(", ")}`;
954
+ var formatList = (label, values) => `${label}: ${values.map((value) => chalk4.cyan(value)).join(", ")}`;
561
955
  var mergeFilterOptions = (config, cliOptions) => {
562
956
  const messages = [];
563
957
  const configInclude = config?.include;
@@ -597,10 +991,10 @@ var mergeFilterOptions = (config, cliOptions) => {
597
991
  };
598
992
 
599
993
  // src/commands/generate.ts
600
- var defaultDependencies3 = {
601
- createDocCov: (options) => new DocCov2(options),
602
- writeFileSync: fs4.writeFileSync,
603
- spinner: (text) => ora2(text),
994
+ var defaultDependencies4 = {
995
+ createDocCov: (options) => new DocCov3(options),
996
+ writeFileSync: fs5.writeFileSync,
997
+ spinner: (text) => ora3(text),
604
998
  log: console.log,
605
999
  error: console.error
606
1000
  };
@@ -619,17 +1013,17 @@ function stripDocsFields(spec) {
619
1013
  }
620
1014
  function formatDiagnosticOutput(prefix, diagnostic, baseDir) {
621
1015
  const location = diagnostic.location;
622
- const relativePath = location?.file ? path5.relative(baseDir, location.file) || location.file : undefined;
623
- const locationText = location && relativePath ? chalk4.gray(`${relativePath}:${location.line ?? 1}:${location.column ?? 1}`) : null;
1016
+ const relativePath = location?.file ? path6.relative(baseDir, location.file) || location.file : undefined;
1017
+ const locationText = location && relativePath ? chalk5.gray(`${relativePath}:${location.line ?? 1}:${location.column ?? 1}`) : null;
624
1018
  const locationPrefix = locationText ? `${locationText} ` : "";
625
1019
  return `${prefix} ${locationPrefix}${diagnostic.message}`;
626
1020
  }
627
1021
  function registerGenerateCommand(program, dependencies = {}) {
628
1022
  const { createDocCov, writeFileSync: writeFileSync2, spinner, log, error } = {
629
- ...defaultDependencies3,
1023
+ ...defaultDependencies4,
630
1024
  ...dependencies
631
1025
  };
632
- program.command("generate [entry]").description("Generate OpenPkg specification for documentation coverage analysis").option("-o, --output <file>", "Output file", "openpkg.json").option("-p, --package <name>", "Target package name (for monorepos)").option("--cwd <dir>", "Working directory", process.cwd()).option("--no-external-types", "Skip external type resolution from node_modules").option("--include <ids>", "Filter exports by identifier (comma-separated or repeated)").option("--exclude <ids>", "Exclude exports by identifier (comma-separated or repeated)").option("--show-diagnostics", "Print TypeScript diagnostics from analysis").option("--no-docs", "Omit docs coverage fields from output (pure structural spec)").option("-y, --yes", "Skip all prompts and use defaults").action(async (entry, options) => {
1026
+ program.command("generate [entry]").description("Generate OpenPkg specification for documentation coverage analysis").option("-o, --output <file>", "Output file", "openpkg.json").option("-p, --package <name>", "Target package name (for monorepos)").option("--cwd <dir>", "Working directory", process.cwd()).option("--skip-resolve", "Skip external type resolution from node_modules").option("--include <ids>", "Filter exports by identifier (comma-separated or repeated)").option("--exclude <ids>", "Exclude exports by identifier (comma-separated or repeated)").option("--show-diagnostics", "Print TypeScript diagnostics from analysis").option("--no-docs", "Omit docs coverage fields from output (pure structural spec)").option("-y, --yes", "Skip all prompts and use defaults").action(async (entry, options) => {
633
1027
  try {
634
1028
  let targetDir = options.cwd;
635
1029
  let entryFile = entry;
@@ -639,19 +1033,19 @@ function registerGenerateCommand(program, dependencies = {}) {
639
1033
  throw new Error(`Package "${options.package}" not found in monorepo`);
640
1034
  }
641
1035
  targetDir = packageDir;
642
- log(chalk4.gray(`Found package at ${path5.relative(options.cwd, packageDir)}`));
1036
+ log(chalk5.gray(`Found package at ${path6.relative(options.cwd, packageDir)}`));
643
1037
  }
644
1038
  if (!entryFile) {
645
1039
  entryFile = await findEntryPoint(targetDir, true);
646
- log(chalk4.gray(`Auto-detected entry point: ${path5.relative(targetDir, entryFile)}`));
1040
+ log(chalk5.gray(`Auto-detected entry point: ${path6.relative(targetDir, entryFile)}`));
647
1041
  } else {
648
- entryFile = path5.resolve(targetDir, entryFile);
649
- if (fs4.existsSync(entryFile) && fs4.statSync(entryFile).isDirectory()) {
1042
+ entryFile = path6.resolve(targetDir, entryFile);
1043
+ if (fs5.existsSync(entryFile) && fs5.statSync(entryFile).isDirectory()) {
650
1044
  entryFile = await findEntryPoint(entryFile, true);
651
- log(chalk4.gray(`Auto-detected entry point: ${entryFile}`));
1045
+ log(chalk5.gray(`Auto-detected entry point: ${entryFile}`));
652
1046
  }
653
1047
  }
654
- const resolveExternalTypes = options.externalTypes !== false;
1048
+ const resolveExternalTypes = !options.skipResolve;
655
1049
  const cliFilters = {
656
1050
  include: parseListFlag(options.include),
657
1051
  exclude: parseListFlag(options.exclude)
@@ -660,15 +1054,15 @@ function registerGenerateCommand(program, dependencies = {}) {
660
1054
  try {
661
1055
  config = await loadDocCovConfig(targetDir);
662
1056
  if (config?.filePath) {
663
- log(chalk4.gray(`Loaded configuration from ${path5.relative(targetDir, config.filePath)}`));
1057
+ log(chalk5.gray(`Loaded configuration from ${path6.relative(targetDir, config.filePath)}`));
664
1058
  }
665
1059
  } catch (configError) {
666
- error(chalk4.red("Failed to load DocCov config:"), configError instanceof Error ? configError.message : configError);
1060
+ error(chalk5.red("Failed to load DocCov config:"), configError instanceof Error ? configError.message : configError);
667
1061
  process.exit(1);
668
1062
  }
669
1063
  const resolvedFilters = mergeFilterOptions(config, cliFilters);
670
1064
  for (const message of resolvedFilters.messages) {
671
- log(chalk4.gray(`• ${message}`));
1065
+ log(chalk5.gray(`• ${message}`));
672
1066
  }
673
1067
  const spinnerInstance = spinner("Generating OpenPkg spec...");
674
1068
  spinnerInstance.start();
@@ -692,7 +1086,7 @@ function registerGenerateCommand(program, dependencies = {}) {
692
1086
  if (!result) {
693
1087
  throw new Error("Failed to produce an OpenPkg spec.");
694
1088
  }
695
- const outputPath = path5.resolve(process.cwd(), options.output);
1089
+ const outputPath = path6.resolve(process.cwd(), options.output);
696
1090
  let normalized = normalize(result.spec);
697
1091
  if (options.docs === false) {
698
1092
  normalized = stripDocsFields(normalized);
@@ -701,85 +1095,85 @@ function registerGenerateCommand(program, dependencies = {}) {
701
1095
  if (!validation.ok) {
702
1096
  spinnerInstance.fail("Spec failed schema validation");
703
1097
  for (const err of validation.errors) {
704
- error(chalk4.red(`schema: ${err.instancePath || "/"} ${err.message}`));
1098
+ error(chalk5.red(`schema: ${err.instancePath || "/"} ${err.message}`));
705
1099
  }
706
1100
  process.exit(1);
707
1101
  }
708
1102
  writeFileSync2(outputPath, JSON.stringify(normalized, null, 2));
709
- log(chalk4.green(`✓ Generated ${options.output}`));
710
- log(chalk4.gray(` ${getArrayLength(normalized.exports)} exports`));
711
- log(chalk4.gray(` ${getArrayLength(normalized.types)} types`));
1103
+ log(chalk5.green(`✓ Generated ${options.output}`));
1104
+ log(chalk5.gray(` ${getArrayLength(normalized.exports)} exports`));
1105
+ log(chalk5.gray(` ${getArrayLength(normalized.types)} types`));
712
1106
  if (options.showDiagnostics && result.diagnostics.length > 0) {
713
1107
  log("");
714
- log(chalk4.bold("Diagnostics"));
1108
+ log(chalk5.bold("Diagnostics"));
715
1109
  for (const diagnostic of result.diagnostics) {
716
- const prefix = diagnostic.severity === "error" ? chalk4.red("✖") : diagnostic.severity === "warning" ? chalk4.yellow("⚠") : chalk4.cyan("ℹ");
1110
+ const prefix = diagnostic.severity === "error" ? chalk5.red("✖") : diagnostic.severity === "warning" ? chalk5.yellow("⚠") : chalk5.cyan("ℹ");
717
1111
  log(formatDiagnosticOutput(prefix, diagnostic, targetDir));
718
1112
  }
719
1113
  }
720
1114
  } catch (commandError) {
721
- error(chalk4.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
1115
+ error(chalk5.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
722
1116
  process.exit(1);
723
1117
  }
724
1118
  });
725
1119
  }
726
1120
 
727
1121
  // src/commands/init.ts
728
- import * as fs5 from "node:fs";
729
- import * as path6 from "node:path";
730
- import chalk5 from "chalk";
731
- var defaultDependencies4 = {
732
- fileExists: fs5.existsSync,
733
- writeFileSync: fs5.writeFileSync,
734
- readFileSync: fs5.readFileSync,
1122
+ import * as fs6 from "node:fs";
1123
+ import * as path7 from "node:path";
1124
+ import chalk6 from "chalk";
1125
+ var defaultDependencies5 = {
1126
+ fileExists: fs6.existsSync,
1127
+ writeFileSync: fs6.writeFileSync,
1128
+ readFileSync: fs6.readFileSync,
735
1129
  log: console.log,
736
1130
  error: console.error
737
1131
  };
738
1132
  function registerInitCommand(program, dependencies = {}) {
739
1133
  const { fileExists: fileExists2, writeFileSync: writeFileSync3, readFileSync: readFileSync4, log, error } = {
740
- ...defaultDependencies4,
1134
+ ...defaultDependencies5,
741
1135
  ...dependencies
742
1136
  };
743
1137
  program.command("init").description("Create a DocCov configuration file").option("--cwd <dir>", "Working directory", process.cwd()).option("--format <format>", "Config format: auto, mjs, js, cjs", "auto").action((options) => {
744
- const cwd = path6.resolve(options.cwd);
1138
+ const cwd = path7.resolve(options.cwd);
745
1139
  const formatOption = String(options.format ?? "auto").toLowerCase();
746
1140
  if (!isValidFormat(formatOption)) {
747
- error(chalk5.red(`Invalid format "${formatOption}". Use auto, mjs, js, or cjs.`));
1141
+ error(chalk6.red(`Invalid format "${formatOption}". Use auto, mjs, js, or cjs.`));
748
1142
  process.exitCode = 1;
749
1143
  return;
750
1144
  }
751
1145
  const existing = findExistingConfig(cwd, fileExists2);
752
1146
  if (existing) {
753
- error(chalk5.red(`A DocCov config already exists at ${path6.relative(cwd, existing) || "./doccov.config.*"}.`));
1147
+ error(chalk6.red(`A DocCov config already exists at ${path7.relative(cwd, existing) || "./doccov.config.*"}.`));
754
1148
  process.exitCode = 1;
755
1149
  return;
756
1150
  }
757
1151
  const packageType = detectPackageType(cwd, fileExists2, readFileSync4);
758
1152
  const targetFormat = resolveFormat(formatOption, packageType);
759
1153
  if (targetFormat === "js" && packageType !== "module") {
760
- log(chalk5.yellow('Package is not marked as "type": "module"; creating doccov.config.js may require enabling ESM.'));
1154
+ log(chalk6.yellow('Package is not marked as "type": "module"; creating doccov.config.js may require enabling ESM.'));
761
1155
  }
762
1156
  const fileName = `doccov.config.${targetFormat}`;
763
- const outputPath = path6.join(cwd, fileName);
1157
+ const outputPath = path7.join(cwd, fileName);
764
1158
  if (fileExists2(outputPath)) {
765
- error(chalk5.red(`Cannot create ${fileName}; file already exists.`));
1159
+ error(chalk6.red(`Cannot create ${fileName}; file already exists.`));
766
1160
  process.exitCode = 1;
767
1161
  return;
768
1162
  }
769
1163
  const template = buildTemplate(targetFormat);
770
1164
  writeFileSync3(outputPath, template, { encoding: "utf8" });
771
- log(chalk5.green(`✓ Created ${path6.relative(process.cwd(), outputPath)}`));
1165
+ log(chalk6.green(`✓ Created ${path7.relative(process.cwd(), outputPath)}`));
772
1166
  });
773
1167
  }
774
1168
  var isValidFormat = (value) => {
775
1169
  return value === "auto" || value === "mjs" || value === "js" || value === "cjs";
776
1170
  };
777
1171
  var findExistingConfig = (cwd, fileExists2) => {
778
- let current = path6.resolve(cwd);
779
- const { root } = path6.parse(current);
1172
+ let current = path7.resolve(cwd);
1173
+ const { root } = path7.parse(current);
780
1174
  while (true) {
781
1175
  for (const candidate of DOCCOV_CONFIG_FILENAMES) {
782
- const candidatePath = path6.join(current, candidate);
1176
+ const candidatePath = path7.join(current, candidate);
783
1177
  if (fileExists2(candidatePath)) {
784
1178
  return candidatePath;
785
1179
  }
@@ -787,7 +1181,7 @@ var findExistingConfig = (cwd, fileExists2) => {
787
1181
  if (current === root) {
788
1182
  break;
789
1183
  }
790
- current = path6.dirname(current);
1184
+ current = path7.dirname(current);
791
1185
  }
792
1186
  return null;
793
1187
  };
@@ -809,17 +1203,17 @@ var detectPackageType = (cwd, fileExists2, readFileSync4) => {
809
1203
  return;
810
1204
  };
811
1205
  var findNearestPackageJson = (cwd, fileExists2) => {
812
- let current = path6.resolve(cwd);
813
- const { root } = path6.parse(current);
1206
+ let current = path7.resolve(cwd);
1207
+ const { root } = path7.parse(current);
814
1208
  while (true) {
815
- const candidate = path6.join(current, "package.json");
1209
+ const candidate = path7.join(current, "package.json");
816
1210
  if (fileExists2(candidate)) {
817
1211
  return candidate;
818
1212
  }
819
1213
  if (current === root) {
820
1214
  break;
821
1215
  }
822
- current = path6.dirname(current);
1216
+ current = path7.dirname(current);
823
1217
  }
824
1218
  return null;
825
1219
  };
@@ -854,26 +1248,256 @@ var buildTemplate = (format) => {
854
1248
  `);
855
1249
  };
856
1250
 
1251
+ // src/commands/report.ts
1252
+ import * as fs7 from "node:fs";
1253
+ import * as path8 from "node:path";
1254
+ import { DocCov as DocCov4 } from "@doccov/sdk";
1255
+ import chalk7 from "chalk";
1256
+ import ora4 from "ora";
1257
+
1258
+ // src/reports/markdown.ts
1259
+ function bar(pct, width = 10) {
1260
+ const filled = Math.round(pct / 100 * width);
1261
+ return "█".repeat(filled) + "░".repeat(width - filled);
1262
+ }
1263
+ function renderMarkdown(stats, options = {}) {
1264
+ const limit = options.limit ?? 20;
1265
+ const lines = [];
1266
+ lines.push(`# DocCov Report: ${stats.packageName}@${stats.version}`);
1267
+ lines.push("");
1268
+ lines.push(`**Coverage: ${stats.coverageScore}%** \`${bar(stats.coverageScore)}\``);
1269
+ lines.push("");
1270
+ lines.push("| Metric | Value |");
1271
+ lines.push("|--------|-------|");
1272
+ lines.push(`| Exports | ${stats.totalExports} |`);
1273
+ lines.push(`| Fully documented | ${stats.fullyDocumented} |`);
1274
+ lines.push(`| Partially documented | ${stats.partiallyDocumented} |`);
1275
+ lines.push(`| Undocumented | ${stats.undocumented} |`);
1276
+ lines.push(`| Drift issues | ${stats.driftCount} |`);
1277
+ lines.push("");
1278
+ lines.push("## Coverage by Signal");
1279
+ lines.push("");
1280
+ lines.push("| Signal | Coverage |");
1281
+ lines.push("|--------|----------|");
1282
+ for (const [sig, s] of Object.entries(stats.signalCoverage)) {
1283
+ lines.push(`| ${sig} | ${s.pct}% \`${bar(s.pct, 8)}\` |`);
1284
+ }
1285
+ if (stats.byKind.length > 0) {
1286
+ lines.push("");
1287
+ lines.push("## Coverage by Kind");
1288
+ lines.push("");
1289
+ lines.push("| Kind | Count | Avg Score |");
1290
+ lines.push("|------|-------|-----------|");
1291
+ for (const k of stats.byKind) {
1292
+ lines.push(`| ${k.kind} | ${k.count} | ${k.avgScore}% |`);
1293
+ }
1294
+ }
1295
+ const lowExports = stats.exports.filter((e) => e.score < 100).slice(0, limit);
1296
+ if (lowExports.length > 0) {
1297
+ lines.push("");
1298
+ lines.push("## Lowest Coverage Exports");
1299
+ lines.push("");
1300
+ lines.push("| Export | Kind | Score | Missing |");
1301
+ lines.push("|--------|------|-------|---------|");
1302
+ for (const e of lowExports) {
1303
+ lines.push(`| \`${e.name}\` | ${e.kind} | ${e.score}% | ${e.missing.join(", ") || "-"} |`);
1304
+ }
1305
+ const totalLow = stats.exports.filter((e) => e.score < 100).length;
1306
+ if (totalLow > limit) {
1307
+ lines.push(`| ... | | | ${totalLow - limit} more |`);
1308
+ }
1309
+ }
1310
+ if (stats.driftIssues.length > 0) {
1311
+ lines.push("");
1312
+ lines.push("## Drift Issues");
1313
+ lines.push("");
1314
+ lines.push("| Export | Type | Issue |");
1315
+ lines.push("|--------|------|-------|");
1316
+ for (const d of stats.driftIssues.slice(0, limit)) {
1317
+ const hint = d.suggestion ? ` → ${d.suggestion}` : "";
1318
+ lines.push(`| \`${d.exportName}\` | ${d.type} | ${d.issue}${hint} |`);
1319
+ }
1320
+ }
1321
+ lines.push("");
1322
+ lines.push("---");
1323
+ lines.push("*Generated by [DocCov](https://doccov.com)*");
1324
+ return lines.join(`
1325
+ `);
1326
+ }
1327
+
1328
+ // src/reports/html.ts
1329
+ function escapeHtml(s) {
1330
+ return s.replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;");
1331
+ }
1332
+ function renderHtml(stats, options = {}) {
1333
+ const md = renderMarkdown(stats, options);
1334
+ return `<!DOCTYPE html>
1335
+ <html lang="en">
1336
+ <head>
1337
+ <meta charset="UTF-8">
1338
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
1339
+ <title>DocCov Report: ${escapeHtml(stats.packageName)}</title>
1340
+ <style>
1341
+ :root { --bg: #0d1117; --fg: #c9d1d9; --border: #30363d; --accent: #58a6ff; }
1342
+ body { font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif; background: var(--bg); color: var(--fg); max-width: 900px; margin: 0 auto; padding: 2rem; line-height: 1.6; }
1343
+ h1, h2 { border-bottom: 1px solid var(--border); padding-bottom: 0.5rem; }
1344
+ table { border-collapse: collapse; width: 100%; margin: 1rem 0; }
1345
+ th, td { border: 1px solid var(--border); padding: 0.5rem 1rem; text-align: left; }
1346
+ th { background: #161b22; }
1347
+ code { background: #161b22; padding: 0.2rem 0.4rem; border-radius: 4px; font-size: 0.9em; }
1348
+ a { color: var(--accent); }
1349
+ </style>
1350
+ </head>
1351
+ <body>
1352
+ <pre style="white-space: pre-wrap; font-family: inherit;">${escapeHtml(md)}</pre>
1353
+ </body>
1354
+ </html>`;
1355
+ }
1356
+ // src/reports/stats.ts
1357
+ function computeStats(spec) {
1358
+ const exports = spec.exports ?? [];
1359
+ const signals = {
1360
+ description: { covered: 0, total: 0 },
1361
+ params: { covered: 0, total: 0 },
1362
+ returns: { covered: 0, total: 0 },
1363
+ examples: { covered: 0, total: 0 }
1364
+ };
1365
+ const kindMap = new Map;
1366
+ const driftIssues = [];
1367
+ let fullyDocumented = 0;
1368
+ let partiallyDocumented = 0;
1369
+ let undocumented = 0;
1370
+ for (const exp of exports) {
1371
+ const score = exp.docs?.coverageScore ?? 0;
1372
+ const missing = exp.docs?.missing ?? [];
1373
+ for (const sig of ["description", "params", "returns", "examples"]) {
1374
+ signals[sig].total++;
1375
+ if (!missing.includes(sig))
1376
+ signals[sig].covered++;
1377
+ }
1378
+ const kindEntry = kindMap.get(exp.kind) ?? { count: 0, totalScore: 0 };
1379
+ kindEntry.count++;
1380
+ kindEntry.totalScore += score;
1381
+ kindMap.set(exp.kind, kindEntry);
1382
+ if (score === 100)
1383
+ fullyDocumented++;
1384
+ else if (score > 0)
1385
+ partiallyDocumented++;
1386
+ else
1387
+ undocumented++;
1388
+ for (const d of exp.docs?.drift ?? []) {
1389
+ driftIssues.push({
1390
+ exportName: exp.name,
1391
+ type: d.type,
1392
+ issue: d.issue,
1393
+ suggestion: d.suggestion
1394
+ });
1395
+ }
1396
+ }
1397
+ const signalCoverage = Object.fromEntries(Object.entries(signals).map(([k, v]) => [
1398
+ k,
1399
+ { ...v, pct: v.total ? Math.round(v.covered / v.total * 100) : 0 }
1400
+ ]));
1401
+ const byKind = Array.from(kindMap.entries()).map(([kind, { count, totalScore }]) => ({
1402
+ kind,
1403
+ count,
1404
+ avgScore: Math.round(totalScore / count)
1405
+ })).sort((a, b) => b.count - a.count);
1406
+ const sortedExports = exports.map((e) => ({
1407
+ name: e.name,
1408
+ kind: e.kind,
1409
+ score: e.docs?.coverageScore ?? 0,
1410
+ missing: e.docs?.missing ?? []
1411
+ })).sort((a, b) => a.score - b.score);
1412
+ return {
1413
+ packageName: spec.meta.name ?? "unknown",
1414
+ version: spec.meta.version ?? "0.0.0",
1415
+ coverageScore: spec.docs?.coverageScore ?? 0,
1416
+ totalExports: exports.length,
1417
+ fullyDocumented,
1418
+ partiallyDocumented,
1419
+ undocumented,
1420
+ driftCount: driftIssues.length,
1421
+ signalCoverage,
1422
+ byKind,
1423
+ exports: sortedExports,
1424
+ driftIssues
1425
+ };
1426
+ }
1427
+ // src/commands/report.ts
1428
+ function registerReportCommand(program) {
1429
+ program.command("report [entry]").description("Generate a documentation coverage report").option("--cwd <dir>", "Working directory", process.cwd()).option("--package <name>", "Target package name (for monorepos)").option("--spec <file>", "Use existing openpkg.json instead of analyzing").option("--output <format>", "Output format: markdown, html, json", "markdown").option("--out <file>", "Write to file instead of stdout").option("--limit <n>", "Max exports to show in tables", "20").option("--skip-resolve", "Skip external type resolution from node_modules").action(async (entry, options) => {
1430
+ try {
1431
+ let spec;
1432
+ if (options.spec) {
1433
+ const specPath = path8.resolve(options.cwd, options.spec);
1434
+ spec = JSON.parse(fs7.readFileSync(specPath, "utf-8"));
1435
+ } else {
1436
+ let targetDir = options.cwd;
1437
+ let entryFile = entry;
1438
+ if (options.package) {
1439
+ const packageDir = await findPackageInMonorepo(options.cwd, options.package);
1440
+ if (!packageDir)
1441
+ throw new Error(`Package "${options.package}" not found`);
1442
+ targetDir = packageDir;
1443
+ }
1444
+ if (!entryFile) {
1445
+ entryFile = await findEntryPoint(targetDir, true);
1446
+ } else {
1447
+ entryFile = path8.resolve(targetDir, entryFile);
1448
+ }
1449
+ const spinner = ora4("Analyzing...").start();
1450
+ const resolveExternalTypes = !options.skipResolve;
1451
+ const doccov = new DocCov4({ resolveExternalTypes });
1452
+ const result = await doccov.analyzeFileWithDiagnostics(entryFile);
1453
+ spinner.succeed("Analysis complete");
1454
+ spec = result.spec;
1455
+ }
1456
+ const stats = computeStats(spec);
1457
+ const format = options.output;
1458
+ const limit = parseInt(options.limit, 10) || 20;
1459
+ let output;
1460
+ if (format === "json") {
1461
+ output = JSON.stringify(stats, null, 2);
1462
+ } else if (format === "html") {
1463
+ output = renderHtml(stats, { limit });
1464
+ } else {
1465
+ output = renderMarkdown(stats, { limit });
1466
+ }
1467
+ if (options.out) {
1468
+ const outPath = path8.resolve(options.cwd, options.out);
1469
+ fs7.writeFileSync(outPath, output);
1470
+ console.log(chalk7.green(`Report written to ${outPath}`));
1471
+ } else {
1472
+ console.log(output);
1473
+ }
1474
+ } catch (err) {
1475
+ console.error(chalk7.red("Error:"), err instanceof Error ? err.message : err);
1476
+ process.exitCode = 1;
1477
+ }
1478
+ });
1479
+ }
1480
+
857
1481
  // src/commands/scan.ts
858
- import * as fs9 from "node:fs";
1482
+ import * as fs11 from "node:fs";
859
1483
  import * as os from "node:os";
860
- import * as path10 from "node:path";
861
- import { DocCov as DocCov3 } from "@doccov/sdk";
862
- import chalk6 from "chalk";
863
- import ora3 from "ora";
1484
+ import * as path12 from "node:path";
1485
+ import { DocCov as DocCov5 } from "@doccov/sdk";
1486
+ import chalk8 from "chalk";
1487
+ import ora5 from "ora";
864
1488
  import { simpleGit } from "simple-git";
865
1489
 
866
1490
  // src/utils/entry-detection.ts
867
- import * as fs6 from "node:fs";
868
- import * as path7 from "node:path";
1491
+ import * as fs8 from "node:fs";
1492
+ import * as path9 from "node:path";
869
1493
  function detectEntryPoint(repoDir) {
870
- const pkgPath = path7.join(repoDir, "package.json");
871
- if (!fs6.existsSync(pkgPath)) {
1494
+ const pkgPath = path9.join(repoDir, "package.json");
1495
+ if (!fs8.existsSync(pkgPath)) {
872
1496
  throw new Error("No package.json found - not a valid npm package");
873
1497
  }
874
1498
  let pkg;
875
1499
  try {
876
- pkg = JSON.parse(fs6.readFileSync(pkgPath, "utf-8"));
1500
+ pkg = JSON.parse(fs8.readFileSync(pkgPath, "utf-8"));
877
1501
  } catch {
878
1502
  throw new Error("Failed to parse package.json");
879
1503
  }
@@ -923,7 +1547,7 @@ function detectEntryPoint(repoDir) {
923
1547
  "source/index.ts"
924
1548
  ];
925
1549
  for (const p of commonPaths) {
926
- if (fs6.existsSync(path7.join(repoDir, p))) {
1550
+ if (fs8.existsSync(path9.join(repoDir, p))) {
927
1551
  return { entryPath: p, source: "fallback" };
928
1552
  }
929
1553
  }
@@ -932,7 +1556,7 @@ function detectEntryPoint(repoDir) {
932
1556
  function resolveToTs(baseDir, filePath) {
933
1557
  const normalized = filePath.replace(/^\.\//, "");
934
1558
  if (normalized.endsWith(".ts") || normalized.endsWith(".tsx")) {
935
- if (fs6.existsSync(path7.join(baseDir, normalized))) {
1559
+ if (fs8.existsSync(path9.join(baseDir, normalized))) {
936
1560
  return normalized;
937
1561
  }
938
1562
  }
@@ -957,11 +1581,11 @@ function resolveToTs(baseDir, filePath) {
957
1581
  candidates.push(normalized.replace(/\.d\.ts$/, ".ts"));
958
1582
  candidates.push(normalized.replace(/\.js$/, ".tsx"));
959
1583
  if (normalized.endsWith(".d.ts")) {
960
- const baseName = path7.basename(normalized, ".d.ts");
1584
+ const baseName = path9.basename(normalized, ".d.ts");
961
1585
  candidates.push(`src/${baseName}.ts`);
962
1586
  }
963
1587
  for (const candidate of candidates) {
964
- if (fs6.existsSync(path7.join(baseDir, candidate))) {
1588
+ if (fs8.existsSync(path9.join(baseDir, candidate))) {
965
1589
  return candidate;
966
1590
  }
967
1591
  }
@@ -999,17 +1623,17 @@ function buildDisplayUrl(parsed) {
999
1623
  }
1000
1624
 
1001
1625
  // src/utils/llm-build-plan.ts
1002
- import * as fs7 from "node:fs";
1003
- import * as path8 from "node:path";
1004
- import { createAnthropic } from "@ai-sdk/anthropic";
1005
- import { createOpenAI } from "@ai-sdk/openai";
1006
- import { generateObject } from "ai";
1007
- import { z as z2 } from "zod";
1008
- var BuildPlanSchema = z2.object({
1009
- installCommand: z2.string().optional().describe("Additional install command if needed"),
1010
- buildCommands: z2.array(z2.string()).describe('Build steps to run, e.g. ["npm run build:wasm"]'),
1011
- entryPoint: z2.string().describe("Path to TS/TSX entry file after build"),
1012
- notes: z2.string().optional().describe("Caveats or warnings")
1626
+ import * as fs9 from "node:fs";
1627
+ import * as path10 from "node:path";
1628
+ import { createAnthropic as createAnthropic2 } from "@ai-sdk/anthropic";
1629
+ import { createOpenAI as createOpenAI2 } from "@ai-sdk/openai";
1630
+ import { generateObject as generateObject2 } from "ai";
1631
+ import { z as z3 } from "zod";
1632
+ var BuildPlanSchema = z3.object({
1633
+ installCommand: z3.string().optional().describe("Additional install command if needed"),
1634
+ buildCommands: z3.array(z3.string()).describe('Build steps to run, e.g. ["npm run build:wasm"]'),
1635
+ entryPoint: z3.string().describe("Path to TS/TSX entry file after build"),
1636
+ notes: z3.string().optional().describe("Caveats or warnings")
1013
1637
  });
1014
1638
  var CONTEXT_FILES = [
1015
1639
  "package.json",
@@ -1024,22 +1648,22 @@ var CONTEXT_FILES = [
1024
1648
  "wasm-pack.json"
1025
1649
  ];
1026
1650
  var MAX_FILE_CHARS = 2000;
1027
- function getModel() {
1651
+ function getModel2() {
1028
1652
  const provider = process.env.DOCCOV_LLM_PROVIDER?.toLowerCase();
1029
1653
  if (provider === "anthropic" || process.env.ANTHROPIC_API_KEY) {
1030
- const anthropic = createAnthropic();
1654
+ const anthropic = createAnthropic2();
1031
1655
  return anthropic("claude-sonnet-4-20250514");
1032
1656
  }
1033
- const openai = createOpenAI();
1657
+ const openai = createOpenAI2();
1034
1658
  return openai("gpt-4o-mini");
1035
1659
  }
1036
1660
  async function gatherContextFiles(repoDir) {
1037
1661
  const sections = [];
1038
1662
  for (const fileName of CONTEXT_FILES) {
1039
- const filePath = path8.join(repoDir, fileName);
1040
- if (fs7.existsSync(filePath)) {
1663
+ const filePath = path10.join(repoDir, fileName);
1664
+ if (fs9.existsSync(filePath)) {
1041
1665
  try {
1042
- let content = fs7.readFileSync(filePath, "utf-8");
1666
+ let content = fs9.readFileSync(filePath, "utf-8");
1043
1667
  if (content.length > MAX_FILE_CHARS) {
1044
1668
  content = `${content.slice(0, MAX_FILE_CHARS)}
1045
1669
  ... (truncated)`;
@@ -1081,8 +1705,8 @@ async function generateBuildPlan(repoDir) {
1081
1705
  if (!context.trim()) {
1082
1706
  return null;
1083
1707
  }
1084
- const model = getModel();
1085
- const { object } = await generateObject({
1708
+ const model = getModel2();
1709
+ const { object } = await generateObject2({
1086
1710
  model,
1087
1711
  schema: BuildPlanSchema,
1088
1712
  prompt: BUILD_PLAN_PROMPT(context)
@@ -1091,17 +1715,17 @@ async function generateBuildPlan(repoDir) {
1091
1715
  }
1092
1716
 
1093
1717
  // src/utils/monorepo-detection.ts
1094
- import * as fs8 from "node:fs";
1095
- import * as path9 from "node:path";
1718
+ import * as fs10 from "node:fs";
1719
+ import * as path11 from "node:path";
1096
1720
  import { glob } from "glob";
1097
1721
  async function detectMonorepo(repoDir) {
1098
- const pkgPath = path9.join(repoDir, "package.json");
1099
- if (!fs8.existsSync(pkgPath)) {
1722
+ const pkgPath = path11.join(repoDir, "package.json");
1723
+ if (!fs10.existsSync(pkgPath)) {
1100
1724
  return { isMonorepo: false, packages: [], type: "none" };
1101
1725
  }
1102
1726
  let pkg;
1103
1727
  try {
1104
- pkg = JSON.parse(fs8.readFileSync(pkgPath, "utf-8"));
1728
+ pkg = JSON.parse(fs10.readFileSync(pkgPath, "utf-8"));
1105
1729
  } catch {
1106
1730
  return { isMonorepo: false, packages: [], type: "none" };
1107
1731
  }
@@ -1110,16 +1734,16 @@ async function detectMonorepo(repoDir) {
1110
1734
  const packages = await resolveWorkspacePackages(repoDir, patterns);
1111
1735
  return { isMonorepo: packages.length > 0, packages, type: "npm" };
1112
1736
  }
1113
- const pnpmPath = path9.join(repoDir, "pnpm-workspace.yaml");
1114
- if (fs8.existsSync(pnpmPath)) {
1737
+ const pnpmPath = path11.join(repoDir, "pnpm-workspace.yaml");
1738
+ if (fs10.existsSync(pnpmPath)) {
1115
1739
  const patterns = parsePnpmWorkspace(pnpmPath);
1116
1740
  const packages = await resolveWorkspacePackages(repoDir, patterns);
1117
1741
  return { isMonorepo: packages.length > 0, packages, type: "pnpm" };
1118
1742
  }
1119
- const lernaPath = path9.join(repoDir, "lerna.json");
1120
- if (fs8.existsSync(lernaPath)) {
1743
+ const lernaPath = path11.join(repoDir, "lerna.json");
1744
+ if (fs10.existsSync(lernaPath)) {
1121
1745
  try {
1122
- const lerna = JSON.parse(fs8.readFileSync(lernaPath, "utf-8"));
1746
+ const lerna = JSON.parse(fs10.readFileSync(lernaPath, "utf-8"));
1123
1747
  const patterns = lerna.packages ?? ["packages/*"];
1124
1748
  const packages = await resolveWorkspacePackages(repoDir, patterns);
1125
1749
  return { isMonorepo: packages.length > 0, packages, type: "lerna" };
@@ -1141,7 +1765,7 @@ function extractWorkspacePatterns(workspaces) {
1141
1765
  }
1142
1766
  function parsePnpmWorkspace(filePath) {
1143
1767
  try {
1144
- const content = fs8.readFileSync(filePath, "utf-8");
1768
+ const content = fs10.readFileSync(filePath, "utf-8");
1145
1769
  const match = content.match(/packages:\s*\n((?:\s+-\s+.+\n?)+)/);
1146
1770
  if (match) {
1147
1771
  const lines = match[1].split(`
@@ -1161,13 +1785,13 @@ async function resolveWorkspacePackages(repoDir, patterns) {
1161
1785
  absolute: false
1162
1786
  });
1163
1787
  for (const match of matches) {
1164
- const pkgJsonPath = path9.join(repoDir, match, "package.json");
1165
- if (fs8.existsSync(pkgJsonPath)) {
1788
+ const pkgJsonPath = path11.join(repoDir, match, "package.json");
1789
+ if (fs10.existsSync(pkgJsonPath)) {
1166
1790
  try {
1167
- const pkgJson = JSON.parse(fs8.readFileSync(pkgJsonPath, "utf-8"));
1791
+ const pkgJson = JSON.parse(fs10.readFileSync(pkgJsonPath, "utf-8"));
1168
1792
  packages.push({
1169
- name: pkgJson.name ?? path9.basename(match),
1170
- path: path9.join(repoDir, match),
1793
+ name: pkgJson.name ?? path11.basename(match),
1794
+ path: path11.join(repoDir, match),
1171
1795
  relativePath: match
1172
1796
  });
1173
1797
  } catch {}
@@ -1194,29 +1818,29 @@ function formatPackageList(packages, limit = 10) {
1194
1818
  }
1195
1819
 
1196
1820
  // src/commands/scan.ts
1197
- var defaultDependencies5 = {
1198
- createDocCov: (options) => new DocCov3(options),
1199
- spinner: (text) => ora3(text),
1821
+ var defaultDependencies6 = {
1822
+ createDocCov: (options) => new DocCov5(options),
1823
+ spinner: (text) => ora5(text),
1200
1824
  log: console.log,
1201
1825
  error: console.error
1202
1826
  };
1203
1827
  function registerScanCommand(program, dependencies = {}) {
1204
1828
  const { createDocCov, spinner, log, error } = {
1205
- ...defaultDependencies5,
1829
+ ...defaultDependencies6,
1206
1830
  ...dependencies
1207
1831
  };
1208
- program.command("scan <url>").description("Analyze docs coverage for any public GitHub repository").option("--ref <branch>", "Branch or tag to analyze").option("--package <name>", "Target package in monorepo").option("--output <format>", "Output format: text or json", "text").option("--no-cleanup", "Keep cloned repo (for debugging)").option("--skip-install", "Skip dependency installation (faster, but may limit type resolution)").option("--save-spec <path>", "Save full OpenPkg spec to file").action(async (url, options) => {
1832
+ program.command("scan <url>").description("Analyze docs coverage for any public GitHub repository").option("--ref <branch>", "Branch or tag to analyze").option("--package <name>", "Target package in monorepo").option("--output <format>", "Output format: text or json", "text").option("--no-cleanup", "Keep cloned repo (for debugging)").option("--skip-install", "Skip dependency installation (faster, but may limit type resolution)").option("--skip-resolve", "Skip external type resolution from node_modules").option("--save-spec <path>", "Save full OpenPkg spec to file").action(async (url, options) => {
1209
1833
  let tempDir;
1210
1834
  try {
1211
1835
  const parsed = parseGitHubUrl(url, options.ref ?? "main");
1212
1836
  const cloneUrl = buildCloneUrl(parsed);
1213
1837
  const displayUrl = buildDisplayUrl(parsed);
1214
1838
  log("");
1215
- log(chalk6.bold(`Scanning ${displayUrl}`));
1216
- log(chalk6.gray(`Branch/tag: ${parsed.ref}`));
1839
+ log(chalk8.bold(`Scanning ${displayUrl}`));
1840
+ log(chalk8.gray(`Branch/tag: ${parsed.ref}`));
1217
1841
  log("");
1218
- tempDir = path10.join(os.tmpdir(), `doccov-scan-${Date.now()}-${Math.random().toString(36).slice(2)}`);
1219
- fs9.mkdirSync(tempDir, { recursive: true });
1842
+ tempDir = path12.join(os.tmpdir(), `doccov-scan-${Date.now()}-${Math.random().toString(36).slice(2)}`);
1843
+ fs11.mkdirSync(tempDir, { recursive: true });
1220
1844
  const cloneSpinner = spinner(`Cloning ${parsed.owner}/${parsed.repo}...`);
1221
1845
  cloneSpinner.start();
1222
1846
  try {
@@ -1241,7 +1865,7 @@ function registerScanCommand(program, dependencies = {}) {
1241
1865
  throw new Error(`Clone failed: ${message}`);
1242
1866
  }
1243
1867
  if (options.skipInstall) {
1244
- log(chalk6.gray("Skipping dependency installation (--skip-install)"));
1868
+ log(chalk8.gray("Skipping dependency installation (--skip-install)"));
1245
1869
  } else {
1246
1870
  const installSpinner = spinner("Installing dependencies...");
1247
1871
  installSpinner.start();
@@ -1257,7 +1881,7 @@ function registerScanCommand(program, dependencies = {}) {
1257
1881
  ];
1258
1882
  let installed = false;
1259
1883
  for (const { file, cmd } of lockfileCommands) {
1260
- if (fs9.existsSync(path10.join(tempDir, file))) {
1884
+ if (fs11.existsSync(path12.join(tempDir, file))) {
1261
1885
  try {
1262
1886
  execSync(cmd, {
1263
1887
  cwd: tempDir,
@@ -1304,14 +1928,14 @@ function registerScanCommand(program, dependencies = {}) {
1304
1928
  } else {
1305
1929
  installSpinner.warn("Could not install dependencies (analysis may be limited)");
1306
1930
  for (const err of installErrors) {
1307
- log(chalk6.gray(` ${err}`));
1931
+ log(chalk8.gray(` ${err}`));
1308
1932
  }
1309
1933
  }
1310
1934
  } catch (outerError) {
1311
1935
  const msg = outerError instanceof Error ? outerError.message : String(outerError);
1312
1936
  installSpinner.warn(`Could not install dependencies: ${msg.slice(0, 100)}`);
1313
1937
  for (const err of installErrors) {
1314
- log(chalk6.gray(` ${err}`));
1938
+ log(chalk8.gray(` ${err}`));
1315
1939
  }
1316
1940
  }
1317
1941
  }
@@ -1321,7 +1945,7 @@ function registerScanCommand(program, dependencies = {}) {
1321
1945
  if (mono.isMonorepo) {
1322
1946
  if (!options.package) {
1323
1947
  error("");
1324
- error(chalk6.red(`Monorepo detected with ${mono.packages.length} packages. Specify target with --package:`));
1948
+ error(chalk8.red(`Monorepo detected with ${mono.packages.length} packages. Specify target with --package:`));
1325
1949
  error("");
1326
1950
  error(formatPackageList(mono.packages));
1327
1951
  error("");
@@ -1330,7 +1954,7 @@ function registerScanCommand(program, dependencies = {}) {
1330
1954
  const pkg = await findPackage(tempDir, options.package);
1331
1955
  if (!pkg) {
1332
1956
  error("");
1333
- error(chalk6.red(`Package "${options.package}" not found. Available packages:`));
1957
+ error(chalk8.red(`Package "${options.package}" not found. Available packages:`));
1334
1958
  error("");
1335
1959
  error(formatPackageList(mono.packages));
1336
1960
  error("");
@@ -1338,7 +1962,7 @@ function registerScanCommand(program, dependencies = {}) {
1338
1962
  }
1339
1963
  targetDir = pkg.path;
1340
1964
  packageName = pkg.name;
1341
- log(chalk6.gray(`Analyzing package: ${packageName}`));
1965
+ log(chalk8.gray(`Analyzing package: ${packageName}`));
1342
1966
  }
1343
1967
  const entrySpinner = spinner("Detecting entry point...");
1344
1968
  entrySpinner.start();
@@ -1347,15 +1971,15 @@ function registerScanCommand(program, dependencies = {}) {
1347
1971
  if (!entryFile.endsWith(".d.ts"))
1348
1972
  return false;
1349
1973
  const cargoLocations = [
1350
- path10.join(pkgDir, "Cargo.toml"),
1351
- path10.join(repoRoot, "Cargo.toml")
1974
+ path12.join(pkgDir, "Cargo.toml"),
1975
+ path12.join(repoRoot, "Cargo.toml")
1352
1976
  ];
1353
- const hasCargoToml = cargoLocations.some((p) => fs9.existsSync(p));
1977
+ const hasCargoToml = cargoLocations.some((p) => fs11.existsSync(p));
1354
1978
  const checkWasmScripts = (dir) => {
1355
- const pkgPath = path10.join(dir, "package.json");
1356
- if (fs9.existsSync(pkgPath)) {
1979
+ const pkgPath = path12.join(dir, "package.json");
1980
+ if (fs11.existsSync(pkgPath)) {
1357
1981
  try {
1358
- const pkg = JSON.parse(fs9.readFileSync(pkgPath, "utf-8"));
1982
+ const pkg = JSON.parse(fs11.readFileSync(pkgPath, "utf-8"));
1359
1983
  const scripts = Object.values(pkg.scripts ?? {}).join(" ");
1360
1984
  return scripts.includes("wasm-pack") || scripts.includes("wasm");
1361
1985
  } catch {}
@@ -1375,24 +1999,24 @@ function registerScanCommand(program, dependencies = {}) {
1375
1999
  if (plan.buildCommands.length > 0) {
1376
2000
  const { execSync } = await import("node:child_process");
1377
2001
  for (const cmd of plan.buildCommands) {
1378
- log(chalk6.gray(` Running: ${cmd}`));
2002
+ log(chalk8.gray(` Running: ${cmd}`));
1379
2003
  try {
1380
2004
  execSync(cmd, { cwd: targetDir, stdio: "pipe", timeout: 300000 });
1381
2005
  } catch (buildError) {
1382
2006
  buildFailed = true;
1383
2007
  const msg = buildError instanceof Error ? buildError.message : String(buildError);
1384
2008
  if (msg.includes("rustc") || msg.includes("cargo") || msg.includes("wasm-pack")) {
1385
- log(chalk6.yellow(` ⚠ Build requires Rust toolchain (not available)`));
2009
+ log(chalk8.yellow(` ⚠ Build requires Rust toolchain (not available)`));
1386
2010
  } else if (msg.includes("rimraf") || msg.includes("command not found")) {
1387
- log(chalk6.yellow(` ⚠ Build failed: missing dependencies`));
2011
+ log(chalk8.yellow(` ⚠ Build failed: missing dependencies`));
1388
2012
  } else {
1389
- log(chalk6.yellow(` ⚠ Build failed: ${msg.slice(0, 80)}`));
2013
+ log(chalk8.yellow(` ⚠ Build failed: ${msg.slice(0, 80)}`));
1390
2014
  }
1391
2015
  }
1392
2016
  }
1393
2017
  }
1394
2018
  if (plan.notes) {
1395
- log(chalk6.gray(` Note: ${plan.notes}`));
2019
+ log(chalk8.gray(` Note: ${plan.notes}`));
1396
2020
  }
1397
2021
  return plan.entryPoint;
1398
2022
  };
@@ -1402,26 +2026,26 @@ function registerScanCommand(program, dependencies = {}) {
1402
2026
  entrySpinner.text = "Detected .d.ts entry with WASM indicators...";
1403
2027
  const llmEntry = await runLlmFallback("WASM project detected");
1404
2028
  if (llmEntry) {
1405
- entryPath = path10.join(targetDir, llmEntry);
2029
+ entryPath = path12.join(targetDir, llmEntry);
1406
2030
  if (buildFailed) {
1407
2031
  entrySpinner.succeed(`Entry point: ${llmEntry} (using pre-committed declarations)`);
1408
- log(chalk6.gray(" Coverage may be limited - generated .d.ts files typically lack JSDoc"));
2032
+ log(chalk8.gray(" Coverage may be limited - generated .d.ts files typically lack JSDoc"));
1409
2033
  } else {
1410
2034
  entrySpinner.succeed(`Entry point: ${llmEntry} (from LLM fallback - WASM project)`);
1411
2035
  }
1412
2036
  } else {
1413
- entryPath = path10.join(targetDir, entry.entryPath);
2037
+ entryPath = path12.join(targetDir, entry.entryPath);
1414
2038
  entrySpinner.succeed(`Entry point: ${entry.entryPath} (from ${entry.source})`);
1415
- log(chalk6.yellow(" ⚠ WASM project detected but no API key - analysis may be limited"));
2039
+ log(chalk8.yellow(" ⚠ WASM project detected but no API key - analysis may be limited"));
1416
2040
  }
1417
2041
  } else {
1418
- entryPath = path10.join(targetDir, entry.entryPath);
2042
+ entryPath = path12.join(targetDir, entry.entryPath);
1419
2043
  entrySpinner.succeed(`Entry point: ${entry.entryPath} (from ${entry.source})`);
1420
2044
  }
1421
2045
  } catch (entryError) {
1422
2046
  const llmEntry = await runLlmFallback("Heuristics failed");
1423
2047
  if (llmEntry) {
1424
- entryPath = path10.join(targetDir, llmEntry);
2048
+ entryPath = path12.join(targetDir, llmEntry);
1425
2049
  entrySpinner.succeed(`Entry point: ${llmEntry} (from LLM fallback)`);
1426
2050
  } else {
1427
2051
  entrySpinner.fail("Could not detect entry point (set OPENAI_API_KEY for smart fallback)");
@@ -1432,7 +2056,8 @@ function registerScanCommand(program, dependencies = {}) {
1432
2056
  analyzeSpinner.start();
1433
2057
  let result;
1434
2058
  try {
1435
- const doccov = createDocCov({ resolveExternalTypes: true });
2059
+ const resolveExternalTypes = !options.skipResolve;
2060
+ const doccov = createDocCov({ resolveExternalTypes });
1436
2061
  result = await doccov.analyzeFileWithDiagnostics(entryPath);
1437
2062
  analyzeSpinner.succeed("Analysis complete");
1438
2063
  } catch (analysisError) {
@@ -1442,9 +2067,9 @@ function registerScanCommand(program, dependencies = {}) {
1442
2067
  const spec = result.spec;
1443
2068
  const coverageScore = spec.docs?.coverageScore ?? 0;
1444
2069
  if (options.saveSpec) {
1445
- const specPath = path10.resolve(process.cwd(), options.saveSpec);
1446
- fs9.writeFileSync(specPath, JSON.stringify(spec, null, 2));
1447
- log(chalk6.green(`✓ Saved spec to ${options.saveSpec}`));
2070
+ const specPath = path12.resolve(process.cwd(), options.saveSpec);
2071
+ fs11.writeFileSync(specPath, JSON.stringify(spec, null, 2));
2072
+ log(chalk8.green(`✓ Saved spec to ${options.saveSpec}`));
1448
2073
  }
1449
2074
  const undocumented = [];
1450
2075
  const driftIssues = [];
@@ -1481,7 +2106,7 @@ function registerScanCommand(program, dependencies = {}) {
1481
2106
  printTextResult(scanResult, log);
1482
2107
  }
1483
2108
  } catch (commandError) {
1484
- error(chalk6.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
2109
+ error(chalk8.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
1485
2110
  process.exitCode = 1;
1486
2111
  } finally {
1487
2112
  if (tempDir && options.cleanup !== false) {
@@ -1491,46 +2116,46 @@ function registerScanCommand(program, dependencies = {}) {
1491
2116
  stdio: "ignore"
1492
2117
  }).unref();
1493
2118
  } else if (tempDir) {
1494
- log(chalk6.gray(`Repo preserved at: ${tempDir}`));
2119
+ log(chalk8.gray(`Repo preserved at: ${tempDir}`));
1495
2120
  }
1496
2121
  }
1497
2122
  });
1498
2123
  }
1499
2124
  function printTextResult(result, log) {
1500
2125
  log("");
1501
- log(chalk6.bold("DocCov Scan Results"));
2126
+ log(chalk8.bold("DocCov Scan Results"));
1502
2127
  log("─".repeat(40));
1503
2128
  const repoName = result.packageName ? `${result.owner}/${result.repo} (${result.packageName})` : `${result.owner}/${result.repo}`;
1504
- log(`Repository: ${chalk6.cyan(repoName)}`);
1505
- log(`Branch: ${chalk6.gray(result.ref)}`);
2129
+ log(`Repository: ${chalk8.cyan(repoName)}`);
2130
+ log(`Branch: ${chalk8.gray(result.ref)}`);
1506
2131
  log("");
1507
- const coverageColor = result.coverage >= 80 ? chalk6.green : result.coverage >= 50 ? chalk6.yellow : chalk6.red;
1508
- log(chalk6.bold("Coverage"));
2132
+ const coverageColor = result.coverage >= 80 ? chalk8.green : result.coverage >= 50 ? chalk8.yellow : chalk8.red;
2133
+ log(chalk8.bold("Coverage"));
1509
2134
  log(` ${coverageColor(`${result.coverage}%`)}`);
1510
2135
  log("");
1511
- log(chalk6.bold("Stats"));
2136
+ log(chalk8.bold("Stats"));
1512
2137
  log(` ${result.exportCount} exports`);
1513
2138
  log(` ${result.typeCount} types`);
1514
2139
  log(` ${result.undocumented.length} undocumented`);
1515
2140
  log(` ${result.driftCount} drift issues`);
1516
2141
  if (result.undocumented.length > 0) {
1517
2142
  log("");
1518
- log(chalk6.bold("Undocumented Exports"));
2143
+ log(chalk8.bold("Undocumented Exports"));
1519
2144
  for (const name of result.undocumented.slice(0, 10)) {
1520
- log(chalk6.yellow(` ! ${name}`));
2145
+ log(chalk8.yellow(` ! ${name}`));
1521
2146
  }
1522
2147
  if (result.undocumented.length > 10) {
1523
- log(chalk6.gray(` ... and ${result.undocumented.length - 10} more`));
2148
+ log(chalk8.gray(` ... and ${result.undocumented.length - 10} more`));
1524
2149
  }
1525
2150
  }
1526
2151
  if (result.drift.length > 0) {
1527
2152
  log("");
1528
- log(chalk6.bold("Drift Issues"));
2153
+ log(chalk8.bold("Drift Issues"));
1529
2154
  for (const d of result.drift.slice(0, 5)) {
1530
- log(chalk6.red(` • ${d.export}: ${d.issue}`));
2155
+ log(chalk8.red(` • ${d.export}: ${d.issue}`));
1531
2156
  }
1532
2157
  if (result.drift.length > 5) {
1533
- log(chalk6.gray(` ... and ${result.drift.length - 5} more`));
2158
+ log(chalk8.gray(` ... and ${result.drift.length - 5} more`));
1534
2159
  }
1535
2160
  }
1536
2161
  log("");
@@ -1538,14 +2163,16 @@ function printTextResult(result, log) {
1538
2163
 
1539
2164
  // src/cli.ts
1540
2165
  var __filename2 = fileURLToPath(import.meta.url);
1541
- var __dirname2 = path11.dirname(__filename2);
1542
- var packageJson = JSON.parse(readFileSync8(path11.join(__dirname2, "../package.json"), "utf-8"));
2166
+ var __dirname2 = path13.dirname(__filename2);
2167
+ var packageJson = JSON.parse(readFileSync9(path13.join(__dirname2, "../package.json"), "utf-8"));
1543
2168
  var program = new Command;
1544
2169
  program.name("doccov").description("DocCov - Documentation coverage and drift detection for TypeScript").version(packageJson.version);
1545
2170
  registerGenerateCommand(program);
1546
2171
  registerCheckCommand(program);
1547
2172
  registerDiffCommand(program);
2173
+ registerFixCommand(program);
1548
2174
  registerInitCommand(program);
2175
+ registerReportCommand(program);
1549
2176
  registerScanCommand(program);
1550
2177
  program.command("*", { hidden: true }).action(() => {
1551
2178
  program.outputHelp();