@doccov/cli 0.4.7 → 0.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -29,10 +29,15 @@ var stringList = z.union([
29
29
  z.string(),
30
30
  z.array(z.string())
31
31
  ]);
32
+ var docsConfigSchema = z.object({
33
+ include: stringList.optional(),
34
+ exclude: stringList.optional()
35
+ });
32
36
  var docCovConfigSchema = z.object({
33
37
  include: stringList.optional(),
34
38
  exclude: stringList.optional(),
35
- plugins: z.array(z.unknown()).optional()
39
+ plugins: z.array(z.unknown()).optional(),
40
+ docs: docsConfigSchema.optional()
36
41
  });
37
42
  var normalizeList = (value) => {
38
43
  if (!value) {
@@ -45,10 +50,22 @@ var normalizeList = (value) => {
45
50
  var normalizeConfig = (input) => {
46
51
  const include = normalizeList(input.include);
47
52
  const exclude = normalizeList(input.exclude);
53
+ let docs;
54
+ if (input.docs) {
55
+ const docsInclude = normalizeList(input.docs.include);
56
+ const docsExclude = normalizeList(input.docs.exclude);
57
+ if (docsInclude || docsExclude) {
58
+ docs = {
59
+ include: docsInclude,
60
+ exclude: docsExclude
61
+ };
62
+ }
63
+ }
48
64
  return {
49
65
  include,
50
66
  exclude,
51
- plugins: input.plugins
67
+ plugins: input.plugins,
68
+ docs
52
69
  };
53
70
  };
54
71
 
@@ -124,21 +141,33 @@ ${formatIssues(issues)}`);
124
141
  // src/config/index.ts
125
142
  var defineConfig = (config) => config;
126
143
  // src/cli.ts
127
- import { readFileSync as readFileSync9 } from "node:fs";
128
- import * as path13 from "node:path";
144
+ import { readFileSync as readFileSync5 } from "node:fs";
145
+ import * as path9 from "node:path";
129
146
  import { fileURLToPath } from "node:url";
130
147
  import { Command } from "commander";
131
148
 
132
149
  // src/commands/check.ts
133
- import * as fs2 from "node:fs";
134
- import * as path3 from "node:path";
150
+ import * as fs from "node:fs";
151
+ import * as path2 from "node:path";
135
152
  import {
153
+ applyEdits,
154
+ categorizeDrifts,
155
+ createSourceFile,
136
156
  DocCov,
157
+ detectEntryPoint,
137
158
  detectExampleAssertionFailures,
138
159
  detectExampleRuntimeErrors,
160
+ detectMonorepo,
161
+ findPackageByName,
162
+ findJSDocLocation,
163
+ generateFixesForExport,
139
164
  hasNonAssertionComments,
165
+ mergeFixes,
166
+ NodeFileSystem,
140
167
  parseAssertions,
141
- runExamplesWithPackage
168
+ parseJSDocToPatch,
169
+ runExamplesWithPackage,
170
+ serializeJSDoc
142
171
  } from "@doccov/sdk";
143
172
  import chalk from "chalk";
144
173
  import ora from "ora";
@@ -211,191 +240,77 @@ async function parseAssertionsWithLLM(code) {
211
240
  }
212
241
  }
213
242
 
214
- // src/utils/package-utils.ts
215
- import * as fs from "node:fs";
216
- import * as path2 from "node:path";
217
- async function findEntryPoint(packageDir, preferSource = false) {
218
- const packageJsonPath = path2.join(packageDir, "package.json");
219
- if (!fs.existsSync(packageJsonPath)) {
220
- return findDefaultEntryPoint(packageDir);
221
- }
222
- const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf-8"));
223
- if (preferSource) {
224
- const srcIndex = path2.join(packageDir, "src/index.ts");
225
- if (fs.existsSync(srcIndex)) {
226
- return srcIndex;
227
- }
228
- }
229
- if (!preferSource && (packageJson.types || packageJson.typings)) {
230
- const typesPath = path2.join(packageDir, packageJson.types || packageJson.typings);
231
- if (fs.existsSync(typesPath)) {
232
- return typesPath;
233
- }
234
- }
235
- if (packageJson.exports) {
236
- const exportPath = resolveExportsField(packageJson.exports, packageDir);
237
- if (exportPath) {
238
- return exportPath;
239
- }
240
- }
241
- if (packageJson.main) {
242
- const mainBase = packageJson.main.replace(/\.(js|mjs|cjs)$/, "");
243
- const dtsPath = path2.join(packageDir, `${mainBase}.d.ts`);
244
- if (fs.existsSync(dtsPath)) {
245
- return dtsPath;
246
- }
247
- const tsPath = path2.join(packageDir, `${mainBase}.ts`);
248
- if (fs.existsSync(tsPath)) {
249
- return tsPath;
250
- }
251
- const mainPath = path2.join(packageDir, packageJson.main);
252
- if (fs.existsSync(mainPath) && fs.statSync(mainPath).isDirectory()) {
253
- const indexDts = path2.join(mainPath, "index.d.ts");
254
- const indexTs = path2.join(mainPath, "index.ts");
255
- if (fs.existsSync(indexDts))
256
- return indexDts;
257
- if (fs.existsSync(indexTs))
258
- return indexTs;
259
- }
260
- }
261
- return findDefaultEntryPoint(packageDir);
262
- }
263
- function resolveExportsField(exports, packageDir) {
264
- if (typeof exports === "string") {
265
- return findTypeScriptFile(path2.join(packageDir, exports));
266
- }
267
- if (typeof exports === "object" && exports !== null && "." in exports) {
268
- const dotExport = exports["."];
269
- if (typeof dotExport === "string") {
270
- return findTypeScriptFile(path2.join(packageDir, dotExport));
271
- }
272
- if (dotExport && typeof dotExport === "object") {
273
- const dotRecord = dotExport;
274
- const typesEntry = dotRecord.types;
275
- if (typeof typesEntry === "string") {
276
- const typesPath = path2.join(packageDir, typesEntry);
277
- if (fs.existsSync(typesPath)) {
278
- return typesPath;
279
- }
280
- }
281
- for (const condition of ["import", "require", "default"]) {
282
- const target = dotRecord[condition];
283
- if (typeof target === "string") {
284
- const result = findTypeScriptFile(path2.join(packageDir, target));
285
- if (result)
286
- return result;
287
- }
288
- }
289
- }
290
- }
291
- return null;
292
- }
293
- function findTypeScriptFile(jsPath) {
294
- if (!fs.existsSync(jsPath))
295
- return null;
296
- const dtsPath = jsPath.replace(/\.(js|mjs|cjs)$/, ".d.ts");
297
- if (fs.existsSync(dtsPath)) {
298
- return dtsPath;
299
- }
300
- const tsPath = jsPath.replace(/\.(js|mjs|cjs)$/, ".ts");
301
- if (fs.existsSync(tsPath)) {
302
- return tsPath;
303
- }
304
- return null;
305
- }
306
- async function findDefaultEntryPoint(packageDir) {
307
- const candidates = [
308
- "dist/index.d.ts",
309
- "dist/index.ts",
310
- "lib/index.d.ts",
311
- "lib/index.ts",
312
- "src/index.ts",
313
- "index.d.ts",
314
- "index.ts"
315
- ];
316
- for (const candidate of candidates) {
317
- const fullPath = path2.join(packageDir, candidate);
318
- if (fs.existsSync(fullPath)) {
319
- return fullPath;
320
- }
321
- }
322
- throw new Error(`Could not find entry point in ${packageDir}`);
323
- }
324
- async function findPackageInMonorepo(rootDir, packageName) {
325
- const rootPackageJsonPath = path2.join(rootDir, "package.json");
326
- if (!fs.existsSync(rootPackageJsonPath)) {
327
- return null;
328
- }
329
- const rootPackageJson = JSON.parse(fs.readFileSync(rootPackageJsonPath, "utf-8"));
330
- if (rootPackageJson.name === packageName) {
331
- return rootDir;
332
- }
333
- let workspacePatterns = Array.isArray(rootPackageJson.workspaces) ? rootPackageJson.workspaces : rootPackageJson.workspaces?.packages || [];
334
- if (workspacePatterns.length === 0) {
335
- const pnpmWorkspacePath = path2.join(rootDir, "pnpm-workspace.yaml");
336
- if (fs.existsSync(pnpmWorkspacePath)) {
337
- const content = fs.readFileSync(pnpmWorkspacePath, "utf-8");
338
- const packagesMatch = content.match(/packages:\s*\n((?:\s*-\s*.+\n?)+)/);
339
- if (packagesMatch) {
340
- workspacePatterns = packagesMatch[1].split(`
341
- `).map((line) => line.replace(/^\s*-\s*['"]?/, "").replace(/['"]?\s*$/, "")).filter((line) => line.length > 0);
342
- }
343
- }
344
- }
345
- for (const pattern of workspacePatterns) {
346
- const searchPath = path2.join(rootDir, pattern.replace("/**", "").replace("/*", ""));
347
- if (fs.existsSync(searchPath) && fs.statSync(searchPath).isDirectory()) {
348
- const entries = fs.readdirSync(searchPath, { withFileTypes: true });
349
- for (const entry of entries) {
350
- if (entry.isDirectory()) {
351
- const packagePath = path2.join(searchPath, entry.name);
352
- const packageJsonPath = path2.join(packagePath, "package.json");
353
- if (fs.existsSync(packageJsonPath)) {
354
- const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf-8"));
355
- if (packageJson.name === packageName) {
356
- return packagePath;
357
- }
358
- }
359
- }
360
- }
361
- }
362
- }
363
- return null;
364
- }
365
-
366
243
  // src/commands/check.ts
367
244
  var defaultDependencies = {
368
245
  createDocCov: (options) => new DocCov(options),
369
- spinner: (text) => ora(text),
246
+ spinner: (text) => ora({
247
+ text,
248
+ discardStdin: false,
249
+ hideCursor: true
250
+ }),
370
251
  log: console.log,
371
252
  error: console.error
372
253
  };
254
+ function collectDriftsFromExports(exports) {
255
+ const results = [];
256
+ for (const exp of exports) {
257
+ for (const drift of exp.docs?.drift ?? []) {
258
+ results.push({ export: exp, drift });
259
+ }
260
+ }
261
+ return results;
262
+ }
263
+ function filterDriftsByType(drifts, onlyTypes) {
264
+ if (!onlyTypes)
265
+ return drifts;
266
+ const allowedTypes = new Set(onlyTypes.split(",").map((t) => t.trim()));
267
+ return drifts.filter((d) => allowedTypes.has(d.drift.type));
268
+ }
269
+ function groupByExport(drifts) {
270
+ const map = new Map;
271
+ for (const { export: exp, drift } of drifts) {
272
+ const existing = map.get(exp) ?? [];
273
+ existing.push(drift);
274
+ map.set(exp, existing);
275
+ }
276
+ return map;
277
+ }
373
278
  function registerCheckCommand(program, dependencies = {}) {
374
279
  const { createDocCov, spinner, log, error } = {
375
280
  ...defaultDependencies,
376
281
  ...dependencies
377
282
  };
378
- program.command("check [entry]").description("Fail if documentation coverage falls below a threshold").option("--cwd <dir>", "Working directory", process.cwd()).option("--package <name>", "Target package name (for monorepos)").option("--min-coverage <percentage>", "Minimum docs coverage percentage (0-100)", (value) => Number(value)).option("--require-examples", "Require at least one @example for every export").option("--run-examples", "Execute @example blocks and fail on runtime errors").option("--ignore-drift", "Do not fail on documentation drift").option("--skip-resolve", "Skip external type resolution from node_modules").action(async (entry, options) => {
283
+ program.command("check [entry]").description("Fail if documentation coverage falls below a threshold").option("--cwd <dir>", "Working directory", process.cwd()).option("--package <name>", "Target package name (for monorepos)").option("--min-coverage <percentage>", "Minimum docs coverage percentage (0-100)", (value) => Number(value)).option("--require-examples", "Require at least one @example for every export").option("--run-examples", "Execute @example blocks and fail on runtime errors").option("--ignore-drift", "Do not fail on documentation drift").option("--skip-resolve", "Skip external type resolution from node_modules").option("--write", "Auto-fix drift issues").option("--only <types>", "Only fix specific drift types (comma-separated)").option("--dry-run", "Preview fixes without writing (requires --write)").action(async (entry, options) => {
379
284
  try {
380
285
  let targetDir = options.cwd;
381
286
  let entryFile = entry;
287
+ const fileSystem = new NodeFileSystem(options.cwd);
382
288
  if (options.package) {
383
- const packageDir = await findPackageInMonorepo(options.cwd, options.package);
384
- if (!packageDir) {
385
- throw new Error(`Package "${options.package}" not found in monorepo`);
289
+ const mono = await detectMonorepo(fileSystem);
290
+ if (!mono.isMonorepo) {
291
+ throw new Error(`Not a monorepo. Remove --package flag for single-package repos.`);
292
+ }
293
+ const pkg = findPackageByName(mono.packages, options.package);
294
+ if (!pkg) {
295
+ const available = mono.packages.map((p) => p.name).join(", ");
296
+ throw new Error(`Package "${options.package}" not found. Available: ${available}`);
386
297
  }
387
- targetDir = packageDir;
388
- log(chalk.gray(`Found package at ${path3.relative(options.cwd, packageDir)}`));
298
+ targetDir = path2.join(options.cwd, pkg.path);
299
+ log(chalk.gray(`Found package at ${pkg.path}`));
389
300
  }
390
301
  if (!entryFile) {
391
- entryFile = await findEntryPoint(targetDir, true);
392
- log(chalk.gray(`Auto-detected entry point: ${path3.relative(targetDir, entryFile)}`));
302
+ const targetFs = new NodeFileSystem(targetDir);
303
+ const detected = await detectEntryPoint(targetFs);
304
+ entryFile = path2.join(targetDir, detected.path);
305
+ log(chalk.gray(`Auto-detected entry point: ${detected.path} (from ${detected.source})`));
393
306
  } else {
394
- entryFile = path3.resolve(targetDir, entryFile);
395
- if (fs2.existsSync(entryFile) && fs2.statSync(entryFile).isDirectory()) {
307
+ entryFile = path2.resolve(targetDir, entryFile);
308
+ if (fs.existsSync(entryFile) && fs.statSync(entryFile).isDirectory()) {
396
309
  targetDir = entryFile;
397
- entryFile = await findEntryPoint(entryFile, true);
398
- log(chalk.gray(`Auto-detected entry point: ${entryFile}`));
310
+ const dirFs = new NodeFileSystem(entryFile);
311
+ const detected = await detectEntryPoint(dirFs);
312
+ entryFile = path2.join(entryFile, detected.path);
313
+ log(chalk.gray(`Auto-detected entry point: ${detected.path}`));
399
314
  }
400
315
  }
401
316
  const minCoverage = clampCoverage(options.minCoverage ?? 80);
@@ -537,7 +452,116 @@ function registerCheckCommand(program, dependencies = {}) {
537
452
  const coverageScore = spec.docs?.coverageScore ?? 0;
538
453
  const failingExports = collectFailingExports(spec.exports ?? [], minCoverage);
539
454
  const missingExamples = options.requireExamples ? failingExports.filter((item) => item.missing?.includes("examples")) : [];
540
- const driftExports = [...collectDrift(spec.exports ?? []), ...runtimeDrifts];
455
+ let driftExports = [...collectDrift(spec.exports ?? []), ...runtimeDrifts];
456
+ const fixedDriftKeys = new Set;
457
+ if (options.write && driftExports.length > 0) {
458
+ const allDrifts = collectDriftsFromExports(spec.exports ?? []);
459
+ const filteredDrifts = filterDriftsByType(allDrifts, options.only);
460
+ if (filteredDrifts.length === 0 && options.only) {
461
+ log(chalk.yellow("No matching drift issues for the specified types."));
462
+ } else if (filteredDrifts.length > 0) {
463
+ const { fixable, nonFixable } = categorizeDrifts(filteredDrifts.map((d) => d.drift));
464
+ if (fixable.length === 0) {
465
+ log(chalk.yellow(`Found ${nonFixable.length} drift issue(s), but none are auto-fixable.`));
466
+ } else {
467
+ log("");
468
+ log(chalk.bold(`Found ${fixable.length} fixable issue(s)`));
469
+ if (nonFixable.length > 0) {
470
+ log(chalk.gray(`(${nonFixable.length} non-fixable issue(s) skipped)`));
471
+ }
472
+ log("");
473
+ const groupedDrifts = groupByExport(filteredDrifts.filter((d) => fixable.includes(d.drift)));
474
+ const edits = [];
475
+ const editsByFile = new Map;
476
+ for (const [exp, drifts] of groupedDrifts) {
477
+ if (!exp.source?.file) {
478
+ log(chalk.gray(` Skipping ${exp.name}: no source location`));
479
+ continue;
480
+ }
481
+ if (exp.source.file.endsWith(".d.ts")) {
482
+ log(chalk.gray(` Skipping ${exp.name}: declaration file`));
483
+ continue;
484
+ }
485
+ const filePath = path2.resolve(targetDir, exp.source.file);
486
+ if (!fs.existsSync(filePath)) {
487
+ log(chalk.gray(` Skipping ${exp.name}: file not found`));
488
+ continue;
489
+ }
490
+ const sourceFile = createSourceFile(filePath);
491
+ const location = findJSDocLocation(sourceFile, exp.name, exp.source.line);
492
+ if (!location) {
493
+ log(chalk.gray(` Skipping ${exp.name}: could not find declaration`));
494
+ continue;
495
+ }
496
+ let existingPatch = {};
497
+ if (location.hasExisting && location.existingJSDoc) {
498
+ existingPatch = parseJSDocToPatch(location.existingJSDoc);
499
+ }
500
+ const fixes = generateFixesForExport({ ...exp, docs: { ...exp.docs, drift: drifts } }, existingPatch);
501
+ if (fixes.length === 0)
502
+ continue;
503
+ for (const drift of drifts) {
504
+ fixedDriftKeys.add(`${exp.name}:${drift.issue}`);
505
+ }
506
+ const mergedPatch = mergeFixes(fixes, existingPatch);
507
+ const newJSDoc = serializeJSDoc(mergedPatch, location.indent);
508
+ const edit = {
509
+ filePath,
510
+ symbolName: exp.name,
511
+ startLine: location.startLine,
512
+ endLine: location.endLine,
513
+ hasExisting: location.hasExisting,
514
+ existingJSDoc: location.existingJSDoc,
515
+ newJSDoc,
516
+ indent: location.indent
517
+ };
518
+ edits.push(edit);
519
+ const fileEdits = editsByFile.get(filePath) ?? [];
520
+ fileEdits.push({ export: exp, edit, fixes, existingPatch });
521
+ editsByFile.set(filePath, fileEdits);
522
+ }
523
+ if (edits.length > 0) {
524
+ if (options.dryRun) {
525
+ log(chalk.bold("Dry run - changes that would be made:"));
526
+ log("");
527
+ for (const [filePath, fileEdits] of editsByFile) {
528
+ const relativePath = path2.relative(targetDir, filePath);
529
+ log(chalk.cyan(` ${relativePath}:`));
530
+ for (const { export: exp, edit, fixes } of fileEdits) {
531
+ const lineInfo = edit.hasExisting ? `lines ${edit.startLine + 1}-${edit.endLine + 1}` : `line ${edit.startLine + 1}`;
532
+ log(` ${chalk.bold(exp.name)} [${lineInfo}]`);
533
+ for (const fix of fixes) {
534
+ log(chalk.green(` + ${fix.description}`));
535
+ }
536
+ }
537
+ log("");
538
+ }
539
+ log(chalk.gray("Run without --dry-run to apply these changes."));
540
+ } else {
541
+ const applySpinner = spinner("Applying fixes...");
542
+ applySpinner.start();
543
+ const applyResult = await applyEdits(edits);
544
+ if (applyResult.errors.length > 0) {
545
+ applySpinner.warn("Some fixes could not be applied");
546
+ for (const err of applyResult.errors) {
547
+ error(chalk.red(` ${err.file}: ${err.error}`));
548
+ }
549
+ } else {
550
+ applySpinner.succeed(`Applied ${applyResult.editsApplied} fix(es) to ${applyResult.filesModified} file(s)`);
551
+ }
552
+ log("");
553
+ for (const [filePath, fileEdits] of editsByFile) {
554
+ const relativePath = path2.relative(targetDir, filePath);
555
+ log(chalk.green(` ✓ ${relativePath}: ${fileEdits.length} fix(es)`));
556
+ }
557
+ }
558
+ }
559
+ }
560
+ }
561
+ if (!options.dryRun) {
562
+ driftExports = driftExports.filter((d) => !fixedDriftKeys.has(`${d.name}:${d.issue}`));
563
+ }
564
+ }
541
565
  const coverageFailed = coverageScore < minCoverage;
542
566
  const hasMissingExamples = missingExamples.length > 0;
543
567
  const hasDrift = !options.ignoreDrift && driftExports.length > 0;
@@ -627,30 +651,131 @@ function collectDrift(exportsList) {
627
651
  }
628
652
 
629
653
  // src/commands/diff.ts
630
- import * as fs3 from "node:fs";
631
- import * as path4 from "node:path";
632
- import { diffSpec } from "@openpkg-ts/spec";
654
+ import * as fs2 from "node:fs";
655
+ import * as path3 from "node:path";
656
+ import {
657
+ diffSpecWithDocs,
658
+ getDocsImpactSummary,
659
+ hasDocsImpact,
660
+ parseMarkdownFiles
661
+ } from "@doccov/sdk";
633
662
  import chalk2 from "chalk";
663
+ import { glob } from "glob";
664
+
665
+ // src/utils/docs-impact-ai.ts
666
+ import { createAnthropic as createAnthropic2 } from "@ai-sdk/anthropic";
667
+ import { createOpenAI as createOpenAI2 } from "@ai-sdk/openai";
668
+ import { generateObject as generateObject2, generateText } from "ai";
669
+ import { z as z3 } from "zod";
670
+ var CodeBlockUsageSchema = z3.object({
671
+ isImpacted: z3.boolean().describe("Whether the code block is affected by the change"),
672
+ reason: z3.string().describe("Explanation of why/why not the code is impacted"),
673
+ usageType: z3.enum(["direct-call", "import-only", "indirect", "not-used"]).describe("How the export is used in this code block"),
674
+ suggestedFix: z3.string().optional().describe("If impacted, the suggested code change"),
675
+ confidence: z3.enum(["high", "medium", "low"]).describe("Confidence level of the analysis")
676
+ });
677
+ var MultiBlockAnalysisSchema = z3.object({
678
+ groups: z3.array(z3.object({
679
+ blockIndices: z3.array(z3.number()).describe("Indices of blocks that should run together"),
680
+ reason: z3.string().describe("Why these blocks are related")
681
+ })).describe("Groups of related code blocks"),
682
+ skippedBlocks: z3.array(z3.number()).describe("Indices of blocks that should be skipped (incomplete/illustrative)")
683
+ });
684
+ function getModel2() {
685
+ const provider = process.env.DOCCOV_LLM_PROVIDER?.toLowerCase();
686
+ if (provider === "anthropic" || process.env.ANTHROPIC_API_KEY) {
687
+ const anthropic = createAnthropic2();
688
+ return anthropic("claude-sonnet-4-20250514");
689
+ }
690
+ const openai = createOpenAI2();
691
+ return openai("gpt-4o-mini");
692
+ }
693
+ function isAIDocsAnalysisAvailable() {
694
+ return Boolean(process.env.OPENAI_API_KEY || process.env.ANTHROPIC_API_KEY);
695
+ }
696
+ async function generateImpactSummary(impacts) {
697
+ if (!isAIDocsAnalysisAvailable()) {
698
+ return null;
699
+ }
700
+ if (impacts.length === 0) {
701
+ return "No documentation impacts detected.";
702
+ }
703
+ try {
704
+ const { text } = await generateText({
705
+ model: getModel2(),
706
+ prompt: `Summarize these documentation impacts for a GitHub PR comment.
707
+
708
+ Impacts:
709
+ ${impacts.map((i) => `- ${i.file}: ${i.exportName} (${i.changeType})`).join(`
710
+ `)}
711
+
712
+ Write a brief, actionable summary (2-3 sentences) explaining:
713
+ 1. How many files/references are affected
714
+ 2. What type of updates are needed
715
+ 3. Priority recommendation
716
+
717
+ Keep it concise and developer-friendly.`
718
+ });
719
+ return text.trim();
720
+ } catch {
721
+ return null;
722
+ }
723
+ }
724
+
725
+ // src/commands/diff.ts
634
726
  var defaultDependencies2 = {
635
- readFileSync: fs3.readFileSync,
727
+ readFileSync: fs2.readFileSync,
636
728
  log: console.log,
637
729
  error: console.error
638
730
  };
639
731
  function registerDiffCommand(program, dependencies = {}) {
640
- const { readFileSync: readFileSync3, log, error } = {
732
+ const { readFileSync: readFileSync2, log, error } = {
641
733
  ...defaultDependencies2,
642
734
  ...dependencies
643
735
  };
644
- program.command("diff <base> <head>").description("Compare two OpenPkg specs and report coverage delta").option("--output <format>", "Output format: json or text", "text").option("--fail-on-regression", "Exit with error if coverage regressed").option("--fail-on-drift", "Exit with error if new drift was introduced").action((base, head, options) => {
736
+ program.command("diff <base> <head>").description("Compare two OpenPkg specs and report coverage delta").option("--output <format>", "Output format: json or text", "text").option("--fail-on-regression", "Exit with error if coverage regressed").option("--fail-on-drift", "Exit with error if new drift was introduced").option("--docs <glob>", "Glob pattern for markdown docs to check for impact", collect, []).option("--fail-on-docs-impact", "Exit with error if docs need updates").option("--ai", "Use AI for deeper analysis and fix suggestions").action(async (base, head, options) => {
645
737
  try {
646
- const baseSpec = loadSpec(base, readFileSync3);
647
- const headSpec = loadSpec(head, readFileSync3);
648
- const diff = diffSpec(baseSpec, headSpec);
738
+ const baseSpec = loadSpec(base, readFileSync2);
739
+ const headSpec = loadSpec(head, readFileSync2);
740
+ let markdownFiles;
741
+ let docsPatterns = options.docs;
742
+ if (!docsPatterns || docsPatterns.length === 0) {
743
+ const configResult = await loadDocCovConfig(process.cwd());
744
+ if (configResult.config?.docs?.include) {
745
+ docsPatterns = configResult.config.docs.include;
746
+ log(chalk2.gray(`Using docs patterns from config: ${docsPatterns.join(", ")}`));
747
+ }
748
+ }
749
+ if (docsPatterns && docsPatterns.length > 0) {
750
+ markdownFiles = await loadMarkdownFiles(docsPatterns);
751
+ }
752
+ const diff = diffSpecWithDocs(baseSpec, headSpec, { markdownFiles });
649
753
  const format = options.output ?? "text";
650
754
  if (format === "json") {
651
755
  log(JSON.stringify(diff, null, 2));
652
756
  } else {
653
757
  printTextDiff(diff, log, error);
758
+ if (options.ai && diff.docsImpact && hasDocsImpact(diff)) {
759
+ if (!isAIDocsAnalysisAvailable()) {
760
+ log(chalk2.yellow(`
761
+ ⚠ AI analysis unavailable (set OPENAI_API_KEY or ANTHROPIC_API_KEY)`));
762
+ } else {
763
+ log(chalk2.gray(`
764
+ Generating AI summary...`));
765
+ const impacts = diff.docsImpact.impactedFiles.flatMap((f) => f.references.map((r) => ({
766
+ file: f.file,
767
+ exportName: r.exportName,
768
+ changeType: r.changeType,
769
+ context: r.context
770
+ })));
771
+ const summary = await generateImpactSummary(impacts);
772
+ if (summary) {
773
+ log("");
774
+ log(chalk2.bold("AI Summary"));
775
+ log(chalk2.cyan(` ${summary}`));
776
+ }
777
+ }
778
+ }
654
779
  }
655
780
  if (options.failOnRegression && diff.coverageDelta < 0) {
656
781
  error(chalk2.red(`
@@ -664,19 +789,42 @@ ${diff.driftIntroduced} new drift issue(s) introduced`));
664
789
  process.exitCode = 1;
665
790
  return;
666
791
  }
792
+ if (options.failOnDocsImpact && hasDocsImpact(diff)) {
793
+ const summary = getDocsImpactSummary(diff);
794
+ error(chalk2.red(`
795
+ ${summary.totalIssues} docs issue(s) require attention`));
796
+ process.exitCode = 1;
797
+ return;
798
+ }
667
799
  } catch (commandError) {
668
800
  error(chalk2.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
669
801
  process.exitCode = 1;
670
802
  }
671
803
  });
672
804
  }
673
- function loadSpec(filePath, readFileSync3) {
674
- const resolvedPath = path4.resolve(filePath);
675
- if (!fs3.existsSync(resolvedPath)) {
805
+ function collect(value, previous) {
806
+ return previous.concat([value]);
807
+ }
808
+ async function loadMarkdownFiles(patterns) {
809
+ const files = [];
810
+ for (const pattern of patterns) {
811
+ const matches = await glob(pattern, { nodir: true });
812
+ for (const filePath of matches) {
813
+ try {
814
+ const content = fs2.readFileSync(filePath, "utf-8");
815
+ files.push({ path: filePath, content });
816
+ } catch {}
817
+ }
818
+ }
819
+ return parseMarkdownFiles(files);
820
+ }
821
+ function loadSpec(filePath, readFileSync2) {
822
+ const resolvedPath = path3.resolve(filePath);
823
+ if (!fs2.existsSync(resolvedPath)) {
676
824
  throw new Error(`File not found: ${filePath}`);
677
825
  }
678
826
  try {
679
- const content = readFileSync3(resolvedPath, "utf-8");
827
+ const content = readFileSync2(resolvedPath, "utf-8");
680
828
  return JSON.parse(content);
681
829
  } catch (parseError) {
682
830
  throw new Error(`Failed to parse ${filePath}: ${parseError instanceof Error ? parseError.message : parseError}`);
@@ -744,218 +892,61 @@ function printTextDiff(diff, log, _error) {
744
892
  log(chalk2.green(` -${diff.driftResolved} drift issue(s) resolved`));
745
893
  }
746
894
  }
747
- log("");
748
- }
749
-
750
- // src/commands/fix.ts
751
- import * as fs4 from "node:fs";
752
- import * as path5 from "node:path";
753
- import {
754
- applyEdits,
755
- categorizeDrifts,
756
- createSourceFile,
757
- DocCov as DocCov2,
758
- findJSDocLocation,
759
- generateFixesForExport,
760
- mergeFixes,
761
- parseJSDocToPatch,
762
- serializeJSDoc
763
- } from "@doccov/sdk";
764
- import chalk3 from "chalk";
765
- import ora2 from "ora";
766
- var defaultDependencies3 = {
767
- createDocCov: (options) => new DocCov2(options),
768
- spinner: (text) => ora2(text),
769
- log: console.log,
770
- error: console.error
771
- };
772
- function collectDrifts(exports) {
773
- const results = [];
774
- for (const exp of exports) {
775
- const drifts = exp.docs?.drift ?? [];
776
- for (const drift of drifts) {
777
- results.push({ export: exp, drift });
778
- }
779
- }
780
- return results;
781
- }
782
- function filterDriftsByType(drifts, onlyTypes) {
783
- if (!onlyTypes)
784
- return drifts;
785
- const allowedTypes = new Set(onlyTypes.split(",").map((t) => t.trim()));
786
- return drifts.filter((d) => allowedTypes.has(d.drift.type));
787
- }
788
- function groupByExport(drifts) {
789
- const map = new Map;
790
- for (const { export: exp, drift } of drifts) {
791
- const existing = map.get(exp) ?? [];
792
- existing.push(drift);
793
- map.set(exp, existing);
794
- }
795
- return map;
796
- }
797
- function registerFixCommand(program, dependencies = {}) {
798
- const { createDocCov, spinner, log, error } = {
799
- ...defaultDependencies3,
800
- ...dependencies
801
- };
802
- program.command("fix [entry]").description("Automatically fix documentation drift").option("--cwd <dir>", "Working directory", process.cwd()).option("--package <name>", "Target package name (for monorepos)").option("--dry-run", "Preview changes without writing").option("--only <types>", "Only fix specific drift types (comma-separated)").option("--skip-resolve", "Skip external type resolution from node_modules").action(async (entry, options) => {
803
- try {
804
- let targetDir = options.cwd;
805
- let entryFile = entry;
806
- if (options.package) {
807
- const packageDir = await findPackageInMonorepo(options.cwd, options.package);
808
- if (!packageDir) {
809
- throw new Error(`Package "${options.package}" not found in monorepo`);
810
- }
811
- targetDir = packageDir;
812
- log(chalk3.gray(`Found package at ${path5.relative(options.cwd, packageDir)}`));
813
- }
814
- if (!entryFile) {
815
- entryFile = await findEntryPoint(targetDir, true);
816
- log(chalk3.gray(`Auto-detected entry point: ${path5.relative(targetDir, entryFile)}`));
817
- } else {
818
- entryFile = path5.resolve(targetDir, entryFile);
819
- if (fs4.existsSync(entryFile) && fs4.statSync(entryFile).isDirectory()) {
820
- targetDir = entryFile;
821
- entryFile = await findEntryPoint(entryFile, true);
822
- log(chalk3.gray(`Auto-detected entry point: ${entryFile}`));
895
+ if (diff.docsImpact) {
896
+ log("");
897
+ log(chalk2.bold("Docs Impact"));
898
+ const { impactedFiles, missingDocs, stats } = diff.docsImpact;
899
+ log(chalk2.gray(` Scanned ${stats.filesScanned} file(s), ${stats.codeBlocksFound} code block(s)`));
900
+ if (impactedFiles.length > 0) {
901
+ log("");
902
+ log(chalk2.yellow(` ${impactedFiles.length} file(s) need updates:`));
903
+ for (const file of impactedFiles.slice(0, 10)) {
904
+ log(chalk2.yellow(` \uD83D\uDCC4 ${file.file}`));
905
+ for (const ref of file.references.slice(0, 3)) {
906
+ const changeLabel = ref.changeType === "signature-changed" ? "signature changed" : ref.changeType === "removed" ? "removed" : "deprecated";
907
+ log(chalk2.gray(` Line ${ref.line}: ${ref.exportName} (${changeLabel})`));
823
908
  }
824
- }
825
- const resolveExternalTypes = !options.skipResolve;
826
- const analyzeSpinner = spinner("Analyzing documentation...");
827
- analyzeSpinner.start();
828
- const doccov = createDocCov({ resolveExternalTypes });
829
- const result = await doccov.analyzeFileWithDiagnostics(entryFile);
830
- const spec = result.spec;
831
- analyzeSpinner.succeed("Analysis complete");
832
- const allDrifts = collectDrifts(spec.exports ?? []);
833
- if (allDrifts.length === 0) {
834
- log(chalk3.green("No drift issues found. Documentation is in sync!"));
835
- return;
836
- }
837
- const filteredDrifts = filterDriftsByType(allDrifts, options.only);
838
- if (filteredDrifts.length === 0) {
839
- log(chalk3.yellow("No matching drift issues for the specified types."));
840
- return;
841
- }
842
- const { fixable, nonFixable } = categorizeDrifts(filteredDrifts.map((d) => d.drift));
843
- if (fixable.length === 0) {
844
- log(chalk3.yellow(`Found ${nonFixable.length} drift issue(s), but none are auto-fixable.`));
845
- log(chalk3.gray("Non-fixable drift types require manual intervention:"));
846
- for (const drift of nonFixable.slice(0, 5)) {
847
- log(chalk3.gray(` • ${drift.type}: ${drift.issue}`));
909
+ if (file.references.length > 3) {
910
+ log(chalk2.gray(` ... and ${file.references.length - 3} more reference(s)`));
848
911
  }
849
- return;
850
912
  }
851
- log("");
852
- log(chalk3.bold(`Found ${fixable.length} fixable issue(s)`));
853
- if (nonFixable.length > 0) {
854
- log(chalk3.gray(`(${nonFixable.length} non-fixable issue(s) skipped)`));
913
+ if (impactedFiles.length > 10) {
914
+ log(chalk2.gray(` ... and ${impactedFiles.length - 10} more file(s)`));
855
915
  }
916
+ }
917
+ if (missingDocs.length > 0) {
856
918
  log("");
857
- const groupedDrifts = groupByExport(filteredDrifts.filter((d) => fixable.includes(d.drift)));
858
- const edits = [];
859
- const editsByFile = new Map;
860
- for (const [exp, drifts] of groupedDrifts) {
861
- if (!exp.source?.file) {
862
- log(chalk3.gray(` Skipping ${exp.name}: no source location`));
863
- continue;
864
- }
865
- if (exp.source.file.endsWith(".d.ts")) {
866
- log(chalk3.gray(` Skipping ${exp.name}: declaration file`));
867
- continue;
868
- }
869
- const filePath = path5.resolve(targetDir, exp.source.file);
870
- if (!fs4.existsSync(filePath)) {
871
- log(chalk3.gray(` Skipping ${exp.name}: file not found`));
872
- continue;
873
- }
874
- const sourceFile = createSourceFile(filePath);
875
- const location = findJSDocLocation(sourceFile, exp.name, exp.source.line);
876
- if (!location) {
877
- log(chalk3.gray(` Skipping ${exp.name}: could not find declaration`));
878
- continue;
879
- }
880
- let existingPatch = {};
881
- if (location.hasExisting && location.existingJSDoc) {
882
- existingPatch = parseJSDocToPatch(location.existingJSDoc);
883
- }
884
- const fixes = generateFixesForExport({ ...exp, docs: { ...exp.docs, drift: drifts } }, existingPatch);
885
- if (fixes.length === 0)
886
- continue;
887
- const mergedPatch = mergeFixes(fixes, existingPatch);
888
- const newJSDoc = serializeJSDoc(mergedPatch, location.indent);
889
- const edit = {
890
- filePath,
891
- symbolName: exp.name,
892
- startLine: location.startLine,
893
- endLine: location.endLine,
894
- hasExisting: location.hasExisting,
895
- existingJSDoc: location.existingJSDoc,
896
- newJSDoc,
897
- indent: location.indent
898
- };
899
- edits.push(edit);
900
- const fileEdits = editsByFile.get(filePath) ?? [];
901
- fileEdits.push({ export: exp, edit, fixes, existingPatch });
902
- editsByFile.set(filePath, fileEdits);
919
+ log(chalk2.yellow(` ${missingDocs.length} new export(s) missing docs:`));
920
+ for (const name of missingDocs.slice(0, 5)) {
921
+ log(chalk2.yellow(` • ${name}`));
903
922
  }
904
- if (edits.length === 0) {
905
- log(chalk3.yellow("No edits could be generated."));
906
- return;
923
+ if (missingDocs.length > 5) {
924
+ log(chalk2.gray(` ... and ${missingDocs.length - 5} more`));
907
925
  }
908
- if (options.dryRun) {
909
- log(chalk3.bold("Dry run - changes that would be made:"));
910
- log("");
911
- for (const [filePath, fileEdits] of editsByFile) {
912
- const relativePath = path5.relative(targetDir, filePath);
913
- log(chalk3.cyan(` ${relativePath}:`));
914
- for (const { export: exp, edit, fixes } of fileEdits) {
915
- const lineInfo = edit.hasExisting ? `lines ${edit.startLine + 1}-${edit.endLine + 1}` : `line ${edit.startLine + 1}`;
916
- log(` ${chalk3.bold(exp.name)} [${lineInfo}]`);
917
- for (const fix of fixes) {
918
- log(chalk3.green(` + ${fix.description}`));
919
- }
920
- }
921
- log("");
922
- }
923
- log(chalk3.gray("Run without --dry-run to apply these changes."));
924
- } else {
925
- const applySpinner = spinner("Applying fixes...");
926
- applySpinner.start();
927
- const result2 = await applyEdits(edits);
928
- if (result2.errors.length > 0) {
929
- applySpinner.warn("Some fixes could not be applied");
930
- for (const err of result2.errors) {
931
- error(chalk3.red(` ${err.file}: ${err.error}`));
932
- }
933
- } else {
934
- applySpinner.succeed(`Applied ${result2.editsApplied} fix(es) to ${result2.filesModified} file(s)`);
935
- }
936
- log("");
937
- for (const [filePath, fileEdits] of editsByFile) {
938
- const relativePath = path5.relative(targetDir, filePath);
939
- log(chalk3.green(` ✓ ${relativePath}: ${fileEdits.length} fix(es)`));
940
- }
941
- }
942
- } catch (commandError) {
943
- error(chalk3.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
944
- process.exitCode = 1;
945
926
  }
946
- });
927
+ if (impactedFiles.length === 0 && missingDocs.length === 0) {
928
+ log(chalk2.green(" ✓ No docs impact detected"));
929
+ }
930
+ }
931
+ log("");
947
932
  }
948
933
 
949
934
  // src/commands/generate.ts
950
- import * as fs6 from "node:fs";
951
- import * as path7 from "node:path";
952
- import { DocCov as DocCov3 } from "@doccov/sdk";
935
+ import * as fs3 from "node:fs";
936
+ import * as path4 from "node:path";
937
+ import {
938
+ DocCov as DocCov2,
939
+ detectEntryPoint as detectEntryPoint2,
940
+ detectMonorepo as detectMonorepo2,
941
+ findPackageByName as findPackageByName2,
942
+ NodeFileSystem as NodeFileSystem2
943
+ } from "@doccov/sdk";
953
944
  import { normalize, validateSpec } from "@openpkg-ts/spec";
954
- import chalk5 from "chalk";
955
- import ora3 from "ora";
945
+ import chalk4 from "chalk";
946
+ import ora2 from "ora";
956
947
 
957
948
  // src/utils/filter-options.ts
958
- import chalk4 from "chalk";
949
+ import chalk3 from "chalk";
959
950
  var unique = (values) => Array.from(new Set(values));
960
951
  var parseListFlag = (value) => {
961
952
  if (!value) {
@@ -965,7 +956,7 @@ var parseListFlag = (value) => {
965
956
  const normalized = rawItems.flatMap((item) => String(item).split(",")).map((item) => item.trim()).filter(Boolean);
966
957
  return normalized.length > 0 ? unique(normalized) : undefined;
967
958
  };
968
- var formatList = (label, values) => `${label}: ${values.map((value) => chalk4.cyan(value)).join(", ")}`;
959
+ var formatList = (label, values) => `${label}: ${values.map((value) => chalk3.cyan(value)).join(", ")}`;
969
960
  var mergeFilterOptions = (config, cliOptions) => {
970
961
  const messages = [];
971
962
  const configInclude = config?.include;
@@ -1004,119 +995,15 @@ var mergeFilterOptions = (config, cliOptions) => {
1004
995
  };
1005
996
  };
1006
997
 
1007
- // src/utils/entry-detection.ts
1008
- import * as fs5 from "node:fs";
1009
- import * as path6 from "node:path";
1010
- function detectEntryPoint(repoDir) {
1011
- const pkgPath = path6.join(repoDir, "package.json");
1012
- if (!fs5.existsSync(pkgPath)) {
1013
- throw new Error("No package.json found - not a valid npm package");
1014
- }
1015
- let pkg;
1016
- try {
1017
- pkg = JSON.parse(fs5.readFileSync(pkgPath, "utf-8"));
1018
- } catch {
1019
- throw new Error("Failed to parse package.json");
1020
- }
1021
- if (typeof pkg.types === "string") {
1022
- const resolved = resolveToTs(repoDir, pkg.types);
1023
- if (resolved) {
1024
- return { entryPath: resolved, source: "types" };
1025
- }
1026
- }
1027
- if (typeof pkg.typings === "string") {
1028
- const resolved = resolveToTs(repoDir, pkg.typings);
1029
- if (resolved) {
1030
- return { entryPath: resolved, source: "types" };
1031
- }
1032
- }
1033
- const exports = pkg.exports;
1034
- if (exports) {
1035
- const mainExport = exports["."];
1036
- if (typeof mainExport === "object" && mainExport !== null) {
1037
- const exportObj = mainExport;
1038
- if (typeof exportObj.types === "string") {
1039
- const resolved = resolveToTs(repoDir, exportObj.types);
1040
- if (resolved) {
1041
- return { entryPath: resolved, source: "exports" };
1042
- }
1043
- }
1044
- }
1045
- }
1046
- if (typeof pkg.main === "string") {
1047
- const resolved = resolveToTs(repoDir, pkg.main);
1048
- if (resolved) {
1049
- return { entryPath: resolved, source: "main" };
1050
- }
1051
- }
1052
- if (typeof pkg.module === "string") {
1053
- const resolved = resolveToTs(repoDir, pkg.module);
1054
- if (resolved) {
1055
- return { entryPath: resolved, source: "module" };
1056
- }
1057
- }
1058
- const commonPaths = [
1059
- "src/index.ts",
1060
- "src/index.tsx",
1061
- "src/main.ts",
1062
- "index.ts",
1063
- "lib/index.ts",
1064
- "source/index.ts"
1065
- ];
1066
- for (const p of commonPaths) {
1067
- if (fs5.existsSync(path6.join(repoDir, p))) {
1068
- return { entryPath: p, source: "fallback" };
1069
- }
1070
- }
1071
- throw new Error("Could not detect TypeScript entry point. No types field in package.json and no common entry paths found.");
1072
- }
1073
- function resolveToTs(baseDir, filePath) {
1074
- const normalized = filePath.replace(/^\.\//, "");
1075
- const isSourceTs = normalized.endsWith(".ts") && !normalized.endsWith(".d.ts") || normalized.endsWith(".tsx");
1076
- if (isSourceTs) {
1077
- if (fs5.existsSync(path6.join(baseDir, normalized))) {
1078
- return normalized;
1079
- }
1080
- }
1081
- const candidates = [];
1082
- if (normalized.startsWith("dist/")) {
1083
- const srcPath = normalized.replace(/^dist\//, "src/");
1084
- candidates.push(srcPath.replace(/\.js$/, ".ts"));
1085
- candidates.push(srcPath.replace(/\.d\.ts$/, ".ts"));
1086
- candidates.push(srcPath.replace(/\.js$/, ".tsx"));
1087
- }
1088
- if (normalized.startsWith("build/")) {
1089
- const srcPath = normalized.replace(/^build\//, "src/");
1090
- candidates.push(srcPath.replace(/\.js$/, ".ts"));
1091
- candidates.push(srcPath.replace(/\.d\.ts$/, ".ts"));
1092
- }
1093
- if (normalized.startsWith("lib/")) {
1094
- const srcPath = normalized.replace(/^lib\//, "src/");
1095
- candidates.push(srcPath.replace(/\.js$/, ".ts"));
1096
- candidates.push(srcPath.replace(/\.d\.ts$/, ".ts"));
1097
- }
1098
- candidates.push(normalized.replace(/\.js$/, ".ts"));
1099
- candidates.push(normalized.replace(/\.d\.ts$/, ".ts"));
1100
- candidates.push(normalized.replace(/\.js$/, ".tsx"));
1101
- if (normalized.endsWith(".d.ts")) {
1102
- const baseName = path6.basename(normalized, ".d.ts");
1103
- candidates.push(`src/${baseName}.ts`);
1104
- }
1105
- for (const candidate of candidates) {
1106
- if (candidate.endsWith(".d.ts"))
1107
- continue;
1108
- if (fs5.existsSync(path6.join(baseDir, candidate))) {
1109
- return candidate;
1110
- }
1111
- }
1112
- return;
1113
- }
1114
-
1115
998
  // src/commands/generate.ts
1116
- var defaultDependencies4 = {
1117
- createDocCov: (options) => new DocCov3(options),
1118
- writeFileSync: fs6.writeFileSync,
1119
- spinner: (text) => ora3(text),
999
+ var defaultDependencies3 = {
1000
+ createDocCov: (options) => new DocCov2(options),
1001
+ writeFileSync: fs3.writeFileSync,
1002
+ spinner: (text) => ora2({
1003
+ text,
1004
+ discardStdin: false,
1005
+ hideCursor: true
1006
+ }),
1120
1007
  log: console.log,
1121
1008
  error: console.error
1122
1009
  };
@@ -1135,38 +1022,46 @@ function stripDocsFields(spec) {
1135
1022
  }
1136
1023
  function formatDiagnosticOutput(prefix, diagnostic, baseDir) {
1137
1024
  const location = diagnostic.location;
1138
- const relativePath = location?.file ? path7.relative(baseDir, location.file) || location.file : undefined;
1139
- const locationText = location && relativePath ? chalk5.gray(`${relativePath}:${location.line ?? 1}:${location.column ?? 1}`) : null;
1025
+ const relativePath = location?.file ? path4.relative(baseDir, location.file) || location.file : undefined;
1026
+ const locationText = location && relativePath ? chalk4.gray(`${relativePath}:${location.line ?? 1}:${location.column ?? 1}`) : null;
1140
1027
  const locationPrefix = locationText ? `${locationText} ` : "";
1141
1028
  return `${prefix} ${locationPrefix}${diagnostic.message}`;
1142
1029
  }
1143
1030
  function registerGenerateCommand(program, dependencies = {}) {
1144
1031
  const { createDocCov, writeFileSync: writeFileSync2, spinner, log, error } = {
1145
- ...defaultDependencies4,
1032
+ ...defaultDependencies3,
1146
1033
  ...dependencies
1147
1034
  };
1148
1035
  program.command("generate [entry]").description("Generate OpenPkg specification for documentation coverage analysis").option("-o, --output <file>", "Output file", "openpkg.json").option("-p, --package <name>", "Target package name (for monorepos)").option("--cwd <dir>", "Working directory", process.cwd()).option("--skip-resolve", "Skip external type resolution from node_modules").option("--include <ids>", "Filter exports by identifier (comma-separated or repeated)").option("--exclude <ids>", "Exclude exports by identifier (comma-separated or repeated)").option("--show-diagnostics", "Print TypeScript diagnostics from analysis").option("--no-docs", "Omit docs coverage fields from output (pure structural spec)").option("-y, --yes", "Skip all prompts and use defaults").action(async (entry, options) => {
1149
1036
  try {
1150
1037
  let targetDir = options.cwd;
1151
1038
  let entryFile = entry;
1039
+ const fileSystem = new NodeFileSystem2(options.cwd);
1152
1040
  if (options.package) {
1153
- const packageDir = await findPackageInMonorepo(options.cwd, options.package);
1154
- if (!packageDir) {
1155
- throw new Error(`Package "${options.package}" not found in monorepo`);
1041
+ const mono = await detectMonorepo2(fileSystem);
1042
+ if (!mono.isMonorepo) {
1043
+ throw new Error(`Not a monorepo. Remove --package flag for single-package repos.`);
1156
1044
  }
1157
- targetDir = packageDir;
1158
- log(chalk5.gray(`Found package at ${path7.relative(options.cwd, packageDir)}`));
1045
+ const pkg = findPackageByName2(mono.packages, options.package);
1046
+ if (!pkg) {
1047
+ const available = mono.packages.map((p) => p.name).join(", ");
1048
+ throw new Error(`Package "${options.package}" not found. Available: ${available}`);
1049
+ }
1050
+ targetDir = path4.join(options.cwd, pkg.path);
1051
+ log(chalk4.gray(`Found package at ${pkg.path}`));
1159
1052
  }
1160
1053
  if (!entryFile) {
1161
- const detected = detectEntryPoint(targetDir);
1162
- entryFile = path7.join(targetDir, detected.entryPath);
1163
- log(chalk5.gray(`Auto-detected entry point: ${detected.entryPath} (from ${detected.source})`));
1054
+ const targetFs = new NodeFileSystem2(targetDir);
1055
+ const detected = await detectEntryPoint2(targetFs);
1056
+ entryFile = path4.join(targetDir, detected.path);
1057
+ log(chalk4.gray(`Auto-detected entry point: ${detected.path} (from ${detected.source})`));
1164
1058
  } else {
1165
- entryFile = path7.resolve(targetDir, entryFile);
1166
- if (fs6.existsSync(entryFile) && fs6.statSync(entryFile).isDirectory()) {
1167
- const detected = detectEntryPoint(entryFile);
1168
- entryFile = path7.join(entryFile, detected.entryPath);
1169
- log(chalk5.gray(`Auto-detected entry point: ${detected.entryPath} (from ${detected.source})`));
1059
+ entryFile = path4.resolve(targetDir, entryFile);
1060
+ if (fs3.existsSync(entryFile) && fs3.statSync(entryFile).isDirectory()) {
1061
+ const dirFs = new NodeFileSystem2(entryFile);
1062
+ const detected = await detectEntryPoint2(dirFs);
1063
+ entryFile = path4.join(entryFile, detected.path);
1064
+ log(chalk4.gray(`Auto-detected entry point: ${detected.path} (from ${detected.source})`));
1170
1065
  }
1171
1066
  }
1172
1067
  const resolveExternalTypes = !options.skipResolve;
@@ -1178,15 +1073,15 @@ function registerGenerateCommand(program, dependencies = {}) {
1178
1073
  try {
1179
1074
  config = await loadDocCovConfig(targetDir);
1180
1075
  if (config?.filePath) {
1181
- log(chalk5.gray(`Loaded configuration from ${path7.relative(targetDir, config.filePath)}`));
1076
+ log(chalk4.gray(`Loaded configuration from ${path4.relative(targetDir, config.filePath)}`));
1182
1077
  }
1183
1078
  } catch (configError) {
1184
- error(chalk5.red("Failed to load DocCov config:"), configError instanceof Error ? configError.message : configError);
1079
+ error(chalk4.red("Failed to load DocCov config:"), configError instanceof Error ? configError.message : configError);
1185
1080
  process.exit(1);
1186
1081
  }
1187
1082
  const resolvedFilters = mergeFilterOptions(config, cliFilters);
1188
1083
  for (const message of resolvedFilters.messages) {
1189
- log(chalk5.gray(`• ${message}`));
1084
+ log(chalk4.gray(`• ${message}`));
1190
1085
  }
1191
1086
  const spinnerInstance = spinner("Generating OpenPkg spec...");
1192
1087
  spinnerInstance.start();
@@ -1210,7 +1105,7 @@ function registerGenerateCommand(program, dependencies = {}) {
1210
1105
  if (!result) {
1211
1106
  throw new Error("Failed to produce an OpenPkg spec.");
1212
1107
  }
1213
- const outputPath = path7.resolve(process.cwd(), options.output);
1108
+ const outputPath = path4.resolve(process.cwd(), options.output);
1214
1109
  let normalized = normalize(result.spec);
1215
1110
  if (options.docs === false) {
1216
1111
  normalized = stripDocsFields(normalized);
@@ -1219,85 +1114,85 @@ function registerGenerateCommand(program, dependencies = {}) {
1219
1114
  if (!validation.ok) {
1220
1115
  spinnerInstance.fail("Spec failed schema validation");
1221
1116
  for (const err of validation.errors) {
1222
- error(chalk5.red(`schema: ${err.instancePath || "/"} ${err.message}`));
1117
+ error(chalk4.red(`schema: ${err.instancePath || "/"} ${err.message}`));
1223
1118
  }
1224
1119
  process.exit(1);
1225
1120
  }
1226
1121
  writeFileSync2(outputPath, JSON.stringify(normalized, null, 2));
1227
- log(chalk5.green(`✓ Generated ${options.output}`));
1228
- log(chalk5.gray(` ${getArrayLength(normalized.exports)} exports`));
1229
- log(chalk5.gray(` ${getArrayLength(normalized.types)} types`));
1122
+ log(chalk4.green(`✓ Generated ${options.output}`));
1123
+ log(chalk4.gray(` ${getArrayLength(normalized.exports)} exports`));
1124
+ log(chalk4.gray(` ${getArrayLength(normalized.types)} types`));
1230
1125
  if (options.showDiagnostics && result.diagnostics.length > 0) {
1231
1126
  log("");
1232
- log(chalk5.bold("Diagnostics"));
1127
+ log(chalk4.bold("Diagnostics"));
1233
1128
  for (const diagnostic of result.diagnostics) {
1234
- const prefix = diagnostic.severity === "error" ? chalk5.red("✖") : diagnostic.severity === "warning" ? chalk5.yellow("⚠") : chalk5.cyan("ℹ");
1129
+ const prefix = diagnostic.severity === "error" ? chalk4.red("✖") : diagnostic.severity === "warning" ? chalk4.yellow("⚠") : chalk4.cyan("ℹ");
1235
1130
  log(formatDiagnosticOutput(prefix, diagnostic, targetDir));
1236
1131
  }
1237
1132
  }
1238
1133
  } catch (commandError) {
1239
- error(chalk5.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
1134
+ error(chalk4.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
1240
1135
  process.exit(1);
1241
1136
  }
1242
1137
  });
1243
1138
  }
1244
1139
 
1245
1140
  // src/commands/init.ts
1246
- import * as fs7 from "node:fs";
1247
- import * as path8 from "node:path";
1248
- import chalk6 from "chalk";
1249
- var defaultDependencies5 = {
1250
- fileExists: fs7.existsSync,
1251
- writeFileSync: fs7.writeFileSync,
1252
- readFileSync: fs7.readFileSync,
1141
+ import * as fs4 from "node:fs";
1142
+ import * as path5 from "node:path";
1143
+ import chalk5 from "chalk";
1144
+ var defaultDependencies4 = {
1145
+ fileExists: fs4.existsSync,
1146
+ writeFileSync: fs4.writeFileSync,
1147
+ readFileSync: fs4.readFileSync,
1253
1148
  log: console.log,
1254
1149
  error: console.error
1255
1150
  };
1256
1151
  function registerInitCommand(program, dependencies = {}) {
1257
- const { fileExists: fileExists2, writeFileSync: writeFileSync3, readFileSync: readFileSync5, log, error } = {
1258
- ...defaultDependencies5,
1152
+ const { fileExists: fileExists2, writeFileSync: writeFileSync3, readFileSync: readFileSync3, log, error } = {
1153
+ ...defaultDependencies4,
1259
1154
  ...dependencies
1260
1155
  };
1261
1156
  program.command("init").description("Create a DocCov configuration file").option("--cwd <dir>", "Working directory", process.cwd()).option("--format <format>", "Config format: auto, mjs, js, cjs", "auto").action((options) => {
1262
- const cwd = path8.resolve(options.cwd);
1157
+ const cwd = path5.resolve(options.cwd);
1263
1158
  const formatOption = String(options.format ?? "auto").toLowerCase();
1264
1159
  if (!isValidFormat(formatOption)) {
1265
- error(chalk6.red(`Invalid format "${formatOption}". Use auto, mjs, js, or cjs.`));
1160
+ error(chalk5.red(`Invalid format "${formatOption}". Use auto, mjs, js, or cjs.`));
1266
1161
  process.exitCode = 1;
1267
1162
  return;
1268
1163
  }
1269
1164
  const existing = findExistingConfig(cwd, fileExists2);
1270
1165
  if (existing) {
1271
- error(chalk6.red(`A DocCov config already exists at ${path8.relative(cwd, existing) || "./doccov.config.*"}.`));
1166
+ error(chalk5.red(`A DocCov config already exists at ${path5.relative(cwd, existing) || "./doccov.config.*"}.`));
1272
1167
  process.exitCode = 1;
1273
1168
  return;
1274
1169
  }
1275
- const packageType = detectPackageType(cwd, fileExists2, readFileSync5);
1170
+ const packageType = detectPackageType(cwd, fileExists2, readFileSync3);
1276
1171
  const targetFormat = resolveFormat(formatOption, packageType);
1277
1172
  if (targetFormat === "js" && packageType !== "module") {
1278
- log(chalk6.yellow('Package is not marked as "type": "module"; creating doccov.config.js may require enabling ESM.'));
1173
+ log(chalk5.yellow('Package is not marked as "type": "module"; creating doccov.config.js may require enabling ESM.'));
1279
1174
  }
1280
1175
  const fileName = `doccov.config.${targetFormat}`;
1281
- const outputPath = path8.join(cwd, fileName);
1176
+ const outputPath = path5.join(cwd, fileName);
1282
1177
  if (fileExists2(outputPath)) {
1283
- error(chalk6.red(`Cannot create ${fileName}; file already exists.`));
1178
+ error(chalk5.red(`Cannot create ${fileName}; file already exists.`));
1284
1179
  process.exitCode = 1;
1285
1180
  return;
1286
1181
  }
1287
1182
  const template = buildTemplate(targetFormat);
1288
1183
  writeFileSync3(outputPath, template, { encoding: "utf8" });
1289
- log(chalk6.green(`✓ Created ${path8.relative(process.cwd(), outputPath)}`));
1184
+ log(chalk5.green(`✓ Created ${path5.relative(process.cwd(), outputPath)}`));
1290
1185
  });
1291
1186
  }
1292
1187
  var isValidFormat = (value) => {
1293
1188
  return value === "auto" || value === "mjs" || value === "js" || value === "cjs";
1294
1189
  };
1295
1190
  var findExistingConfig = (cwd, fileExists2) => {
1296
- let current = path8.resolve(cwd);
1297
- const { root } = path8.parse(current);
1191
+ let current = path5.resolve(cwd);
1192
+ const { root } = path5.parse(current);
1298
1193
  while (true) {
1299
1194
  for (const candidate of DOCCOV_CONFIG_FILENAMES) {
1300
- const candidatePath = path8.join(current, candidate);
1195
+ const candidatePath = path5.join(current, candidate);
1301
1196
  if (fileExists2(candidatePath)) {
1302
1197
  return candidatePath;
1303
1198
  }
@@ -1305,17 +1200,17 @@ var findExistingConfig = (cwd, fileExists2) => {
1305
1200
  if (current === root) {
1306
1201
  break;
1307
1202
  }
1308
- current = path8.dirname(current);
1203
+ current = path5.dirname(current);
1309
1204
  }
1310
1205
  return null;
1311
1206
  };
1312
- var detectPackageType = (cwd, fileExists2, readFileSync5) => {
1207
+ var detectPackageType = (cwd, fileExists2, readFileSync3) => {
1313
1208
  const packageJsonPath = findNearestPackageJson(cwd, fileExists2);
1314
1209
  if (!packageJsonPath) {
1315
1210
  return;
1316
1211
  }
1317
1212
  try {
1318
- const raw = readFileSync5(packageJsonPath, "utf8");
1213
+ const raw = readFileSync3(packageJsonPath, "utf8");
1319
1214
  const parsed = JSON.parse(raw);
1320
1215
  if (parsed.type === "module") {
1321
1216
  return "module";
@@ -1327,17 +1222,17 @@ var detectPackageType = (cwd, fileExists2, readFileSync5) => {
1327
1222
  return;
1328
1223
  };
1329
1224
  var findNearestPackageJson = (cwd, fileExists2) => {
1330
- let current = path8.resolve(cwd);
1331
- const { root } = path8.parse(current);
1225
+ let current = path5.resolve(cwd);
1226
+ const { root } = path5.parse(current);
1332
1227
  while (true) {
1333
- const candidate = path8.join(current, "package.json");
1228
+ const candidate = path5.join(current, "package.json");
1334
1229
  if (fileExists2(candidate)) {
1335
1230
  return candidate;
1336
1231
  }
1337
1232
  if (current === root) {
1338
1233
  break;
1339
1234
  }
1340
- current = path8.dirname(current);
1235
+ current = path5.dirname(current);
1341
1236
  }
1342
1237
  return null;
1343
1238
  };
@@ -1373,11 +1268,17 @@ var buildTemplate = (format) => {
1373
1268
  };
1374
1269
 
1375
1270
  // src/commands/report.ts
1376
- import * as fs8 from "node:fs";
1377
- import * as path9 from "node:path";
1378
- import { DocCov as DocCov4 } from "@doccov/sdk";
1379
- import chalk7 from "chalk";
1380
- import ora4 from "ora";
1271
+ import * as fs5 from "node:fs";
1272
+ import * as path6 from "node:path";
1273
+ import {
1274
+ DocCov as DocCov3,
1275
+ detectEntryPoint as detectEntryPoint3,
1276
+ detectMonorepo as detectMonorepo3,
1277
+ findPackageByName as findPackageByName3,
1278
+ NodeFileSystem as NodeFileSystem3
1279
+ } from "@doccov/sdk";
1280
+ import chalk6 from "chalk";
1281
+ import ora3 from "ora";
1381
1282
 
1382
1283
  // src/reports/markdown.ts
1383
1284
  function bar(pct, width = 10) {
@@ -1554,25 +1455,38 @@ function registerReportCommand(program) {
1554
1455
  try {
1555
1456
  let spec;
1556
1457
  if (options.spec) {
1557
- const specPath = path9.resolve(options.cwd, options.spec);
1558
- spec = JSON.parse(fs8.readFileSync(specPath, "utf-8"));
1458
+ const specPath = path6.resolve(options.cwd, options.spec);
1459
+ spec = JSON.parse(fs5.readFileSync(specPath, "utf-8"));
1559
1460
  } else {
1560
1461
  let targetDir = options.cwd;
1561
1462
  let entryFile = entry;
1463
+ const fileSystem = new NodeFileSystem3(options.cwd);
1562
1464
  if (options.package) {
1563
- const packageDir = await findPackageInMonorepo(options.cwd, options.package);
1564
- if (!packageDir)
1565
- throw new Error(`Package "${options.package}" not found`);
1566
- targetDir = packageDir;
1465
+ const mono = await detectMonorepo3(fileSystem);
1466
+ if (!mono.isMonorepo) {
1467
+ throw new Error(`Not a monorepo. Remove --package flag for single-package repos.`);
1468
+ }
1469
+ const pkg = findPackageByName3(mono.packages, options.package);
1470
+ if (!pkg) {
1471
+ const available = mono.packages.map((p) => p.name).join(", ");
1472
+ throw new Error(`Package "${options.package}" not found. Available: ${available}`);
1473
+ }
1474
+ targetDir = path6.join(options.cwd, pkg.path);
1567
1475
  }
1568
1476
  if (!entryFile) {
1569
- entryFile = await findEntryPoint(targetDir, true);
1477
+ const targetFs = new NodeFileSystem3(targetDir);
1478
+ const detected = await detectEntryPoint3(targetFs);
1479
+ entryFile = path6.join(targetDir, detected.path);
1570
1480
  } else {
1571
- entryFile = path9.resolve(targetDir, entryFile);
1481
+ entryFile = path6.resolve(targetDir, entryFile);
1572
1482
  }
1573
- const spinner = ora4("Analyzing...").start();
1483
+ const spinner = ora3({
1484
+ text: "Analyzing...",
1485
+ discardStdin: false,
1486
+ hideCursor: true
1487
+ }).start();
1574
1488
  const resolveExternalTypes = !options.skipResolve;
1575
- const doccov = new DocCov4({ resolveExternalTypes });
1489
+ const doccov = new DocCov3({ resolveExternalTypes });
1576
1490
  const result = await doccov.analyzeFileWithDiagnostics(entryFile);
1577
1491
  spinner.succeed("Analysis complete");
1578
1492
  spec = result.spec;
@@ -1589,26 +1503,36 @@ function registerReportCommand(program) {
1589
1503
  output = renderMarkdown(stats, { limit });
1590
1504
  }
1591
1505
  if (options.out) {
1592
- const outPath = path9.resolve(options.cwd, options.out);
1593
- fs8.writeFileSync(outPath, output);
1594
- console.log(chalk7.green(`Report written to ${outPath}`));
1506
+ const outPath = path6.resolve(options.cwd, options.out);
1507
+ fs5.writeFileSync(outPath, output);
1508
+ console.log(chalk6.green(`Report written to ${outPath}`));
1595
1509
  } else {
1596
1510
  console.log(output);
1597
1511
  }
1598
1512
  } catch (err) {
1599
- console.error(chalk7.red("Error:"), err instanceof Error ? err.message : err);
1513
+ console.error(chalk6.red("Error:"), err instanceof Error ? err.message : err);
1600
1514
  process.exitCode = 1;
1601
1515
  }
1602
1516
  });
1603
1517
  }
1604
1518
 
1605
1519
  // src/commands/scan.ts
1606
- import * as fs11 from "node:fs";
1520
+ import * as fs7 from "node:fs";
1607
1521
  import * as os from "node:os";
1608
- import * as path12 from "node:path";
1609
- import { DocCov as DocCov5 } from "@doccov/sdk";
1610
- import chalk8 from "chalk";
1611
- import ora5 from "ora";
1522
+ import * as path8 from "node:path";
1523
+ import {
1524
+ DocCov as DocCov4,
1525
+ detectBuildInfo,
1526
+ detectEntryPoint as detectEntryPoint4,
1527
+ detectMonorepo as detectMonorepo4,
1528
+ detectPackageManager,
1529
+ findPackageByName as findPackageByName4,
1530
+ formatPackageList,
1531
+ getInstallCommand,
1532
+ NodeFileSystem as NodeFileSystem4
1533
+ } from "@doccov/sdk";
1534
+ import chalk7 from "chalk";
1535
+ import ora4 from "ora";
1612
1536
  import { simpleGit } from "simple-git";
1613
1537
 
1614
1538
  // src/utils/github-url.ts
@@ -1642,17 +1566,17 @@ function buildDisplayUrl(parsed) {
1642
1566
  }
1643
1567
 
1644
1568
  // src/utils/llm-build-plan.ts
1645
- import * as fs9 from "node:fs";
1646
- import * as path10 from "node:path";
1647
- import { createAnthropic as createAnthropic2 } from "@ai-sdk/anthropic";
1648
- import { createOpenAI as createOpenAI2 } from "@ai-sdk/openai";
1649
- import { generateObject as generateObject2 } from "ai";
1650
- import { z as z3 } from "zod";
1651
- var BuildPlanSchema = z3.object({
1652
- installCommand: z3.string().optional().describe("Additional install command if needed"),
1653
- buildCommands: z3.array(z3.string()).describe('Build steps to run, e.g. ["npm run build:wasm"]'),
1654
- entryPoint: z3.string().describe("Path to TS/TSX entry file after build"),
1655
- notes: z3.string().optional().describe("Caveats or warnings")
1569
+ import * as fs6 from "node:fs";
1570
+ import * as path7 from "node:path";
1571
+ import { createAnthropic as createAnthropic3 } from "@ai-sdk/anthropic";
1572
+ import { createOpenAI as createOpenAI3 } from "@ai-sdk/openai";
1573
+ import { generateObject as generateObject3 } from "ai";
1574
+ import { z as z4 } from "zod";
1575
+ var BuildPlanSchema = z4.object({
1576
+ installCommand: z4.string().optional().describe("Additional install command if needed"),
1577
+ buildCommands: z4.array(z4.string()).describe('Build steps to run, e.g. ["npm run build:wasm"]'),
1578
+ entryPoint: z4.string().describe("Path to TS/TSX entry file after build"),
1579
+ notes: z4.string().optional().describe("Caveats or warnings")
1656
1580
  });
1657
1581
  var CONTEXT_FILES = [
1658
1582
  "package.json",
@@ -1667,22 +1591,22 @@ var CONTEXT_FILES = [
1667
1591
  "wasm-pack.json"
1668
1592
  ];
1669
1593
  var MAX_FILE_CHARS = 2000;
1670
- function getModel2() {
1594
+ function getModel3() {
1671
1595
  const provider = process.env.DOCCOV_LLM_PROVIDER?.toLowerCase();
1672
1596
  if (provider === "anthropic" || process.env.ANTHROPIC_API_KEY) {
1673
- const anthropic = createAnthropic2();
1597
+ const anthropic = createAnthropic3();
1674
1598
  return anthropic("claude-sonnet-4-20250514");
1675
1599
  }
1676
- const openai = createOpenAI2();
1600
+ const openai = createOpenAI3();
1677
1601
  return openai("gpt-4o-mini");
1678
1602
  }
1679
1603
  async function gatherContextFiles(repoDir) {
1680
1604
  const sections = [];
1681
1605
  for (const fileName of CONTEXT_FILES) {
1682
- const filePath = path10.join(repoDir, fileName);
1683
- if (fs9.existsSync(filePath)) {
1606
+ const filePath = path7.join(repoDir, fileName);
1607
+ if (fs6.existsSync(filePath)) {
1684
1608
  try {
1685
- let content = fs9.readFileSync(filePath, "utf-8");
1609
+ let content = fs6.readFileSync(filePath, "utf-8");
1686
1610
  if (content.length > MAX_FILE_CHARS) {
1687
1611
  content = `${content.slice(0, MAX_FILE_CHARS)}
1688
1612
  ... (truncated)`;
@@ -1724,8 +1648,8 @@ async function generateBuildPlan(repoDir) {
1724
1648
  if (!context.trim()) {
1725
1649
  return null;
1726
1650
  }
1727
- const model = getModel2();
1728
- const { object } = await generateObject2({
1651
+ const model = getModel3();
1652
+ const { object } = await generateObject3({
1729
1653
  model,
1730
1654
  schema: BuildPlanSchema,
1731
1655
  prompt: BUILD_PLAN_PROMPT(context)
@@ -1733,119 +1657,20 @@ async function generateBuildPlan(repoDir) {
1733
1657
  return object;
1734
1658
  }
1735
1659
 
1736
- // src/utils/monorepo-detection.ts
1737
- import * as fs10 from "node:fs";
1738
- import * as path11 from "node:path";
1739
- import { glob } from "glob";
1740
- async function detectMonorepo(repoDir) {
1741
- const pkgPath = path11.join(repoDir, "package.json");
1742
- if (!fs10.existsSync(pkgPath)) {
1743
- return { isMonorepo: false, packages: [], type: "none" };
1744
- }
1745
- let pkg;
1746
- try {
1747
- pkg = JSON.parse(fs10.readFileSync(pkgPath, "utf-8"));
1748
- } catch {
1749
- return { isMonorepo: false, packages: [], type: "none" };
1750
- }
1751
- if (pkg.workspaces) {
1752
- const patterns = extractWorkspacePatterns(pkg.workspaces);
1753
- const packages = await resolveWorkspacePackages(repoDir, patterns);
1754
- return { isMonorepo: packages.length > 0, packages, type: "npm" };
1755
- }
1756
- const pnpmPath = path11.join(repoDir, "pnpm-workspace.yaml");
1757
- if (fs10.existsSync(pnpmPath)) {
1758
- const patterns = parsePnpmWorkspace(pnpmPath);
1759
- const packages = await resolveWorkspacePackages(repoDir, patterns);
1760
- return { isMonorepo: packages.length > 0, packages, type: "pnpm" };
1761
- }
1762
- const lernaPath = path11.join(repoDir, "lerna.json");
1763
- if (fs10.existsSync(lernaPath)) {
1764
- try {
1765
- const lerna = JSON.parse(fs10.readFileSync(lernaPath, "utf-8"));
1766
- const patterns = lerna.packages ?? ["packages/*"];
1767
- const packages = await resolveWorkspacePackages(repoDir, patterns);
1768
- return { isMonorepo: packages.length > 0, packages, type: "lerna" };
1769
- } catch {}
1770
- }
1771
- return { isMonorepo: false, packages: [], type: "none" };
1772
- }
1773
- function extractWorkspacePatterns(workspaces) {
1774
- if (Array.isArray(workspaces)) {
1775
- return workspaces.filter((w) => typeof w === "string");
1776
- }
1777
- if (typeof workspaces === "object" && workspaces !== null) {
1778
- const ws = workspaces;
1779
- if (Array.isArray(ws.packages)) {
1780
- return ws.packages.filter((w) => typeof w === "string");
1781
- }
1782
- }
1783
- return [];
1784
- }
1785
- function parsePnpmWorkspace(filePath) {
1786
- try {
1787
- const content = fs10.readFileSync(filePath, "utf-8");
1788
- const match = content.match(/packages:\s*\n((?:\s+-\s+.+\n?)+)/);
1789
- if (match) {
1790
- const lines = match[1].split(`
1791
- `);
1792
- return lines.map((line) => line.replace(/^\s+-\s+['"]?/, "").replace(/['"]?\s*$/, "")).filter(Boolean);
1793
- }
1794
- } catch {}
1795
- return ["packages/*"];
1796
- }
1797
- async function resolveWorkspacePackages(repoDir, patterns) {
1798
- const packages = [];
1799
- for (const pattern of patterns) {
1800
- const normalizedPattern = pattern.endsWith("/") ? pattern.slice(0, -1) : pattern;
1801
- try {
1802
- const matches = await glob(normalizedPattern, {
1803
- cwd: repoDir,
1804
- absolute: false
1805
- });
1806
- for (const match of matches) {
1807
- const pkgJsonPath = path11.join(repoDir, match, "package.json");
1808
- if (fs10.existsSync(pkgJsonPath)) {
1809
- try {
1810
- const pkgJson = JSON.parse(fs10.readFileSync(pkgJsonPath, "utf-8"));
1811
- packages.push({
1812
- name: pkgJson.name ?? path11.basename(match),
1813
- path: path11.join(repoDir, match),
1814
- relativePath: match
1815
- });
1816
- } catch {}
1817
- }
1818
- }
1819
- } catch {}
1820
- }
1821
- return packages.sort((a, b) => a.name.localeCompare(b.name));
1822
- }
1823
- async function findPackage(repoDir, packageName) {
1824
- const mono = await detectMonorepo(repoDir);
1825
- if (!mono.isMonorepo) {
1826
- return;
1827
- }
1828
- return mono.packages.find((pkg) => pkg.name === packageName || pkg.relativePath === packageName);
1829
- }
1830
- function formatPackageList(packages, limit = 10) {
1831
- const lines = packages.slice(0, limit).map((pkg) => ` --package ${pkg.name}`);
1832
- if (packages.length > limit) {
1833
- lines.push(` ... and ${packages.length - limit} more`);
1834
- }
1835
- return lines.join(`
1836
- `);
1837
- }
1838
-
1839
1660
  // src/commands/scan.ts
1840
- var defaultDependencies6 = {
1841
- createDocCov: (options) => new DocCov5(options),
1842
- spinner: (text) => ora5(text),
1661
+ var defaultDependencies5 = {
1662
+ createDocCov: (options) => new DocCov4(options),
1663
+ spinner: (text) => ora4({
1664
+ text,
1665
+ discardStdin: false,
1666
+ hideCursor: true
1667
+ }),
1843
1668
  log: console.log,
1844
1669
  error: console.error
1845
1670
  };
1846
1671
  function registerScanCommand(program, dependencies = {}) {
1847
1672
  const { createDocCov, spinner, log, error } = {
1848
- ...defaultDependencies6,
1673
+ ...defaultDependencies5,
1849
1674
  ...dependencies
1850
1675
  };
1851
1676
  program.command("scan <url>").description("Analyze docs coverage for any public GitHub repository").option("--ref <branch>", "Branch or tag to analyze").option("--package <name>", "Target package in monorepo").option("--output <format>", "Output format: text or json", "text").option("--no-cleanup", "Keep cloned repo (for debugging)").option("--skip-install", "Skip dependency installation (faster, but may limit type resolution)").option("--skip-resolve", "Skip external type resolution from node_modules").option("--save-spec <path>", "Save full OpenPkg spec to file").action(async (url, options) => {
@@ -1855,65 +1680,75 @@ function registerScanCommand(program, dependencies = {}) {
1855
1680
  const cloneUrl = buildCloneUrl(parsed);
1856
1681
  const displayUrl = buildDisplayUrl(parsed);
1857
1682
  log("");
1858
- log(chalk8.bold(`Scanning ${displayUrl}`));
1859
- log(chalk8.gray(`Branch/tag: ${parsed.ref}`));
1683
+ log(chalk7.bold(`Scanning ${displayUrl}`));
1684
+ log(chalk7.gray(`Branch/tag: ${parsed.ref}`));
1860
1685
  log("");
1861
- tempDir = path12.join(os.tmpdir(), `doccov-scan-${Date.now()}-${Math.random().toString(36).slice(2)}`);
1862
- fs11.mkdirSync(tempDir, { recursive: true });
1686
+ tempDir = path8.join(os.tmpdir(), `doccov-scan-${Date.now()}-${Math.random().toString(36).slice(2)}`);
1687
+ fs7.mkdirSync(tempDir, { recursive: true });
1863
1688
  const cloneSpinner = spinner(`Cloning ${parsed.owner}/${parsed.repo}...`);
1864
1689
  cloneSpinner.start();
1865
1690
  try {
1866
- const git = simpleGit();
1867
- await git.clone(cloneUrl, tempDir, [
1868
- "--depth",
1869
- "1",
1870
- "--branch",
1871
- parsed.ref,
1872
- "--single-branch"
1873
- ]);
1691
+ const git = simpleGit({
1692
+ timeout: {
1693
+ block: 30000
1694
+ }
1695
+ });
1696
+ const originalEnv = { ...process.env };
1697
+ process.env.GIT_TERMINAL_PROMPT = "0";
1698
+ process.env.GIT_ASKPASS = "echo";
1699
+ try {
1700
+ await git.clone(cloneUrl, tempDir, [
1701
+ "--depth",
1702
+ "1",
1703
+ "--branch",
1704
+ parsed.ref,
1705
+ "--single-branch"
1706
+ ]);
1707
+ } finally {
1708
+ process.env = originalEnv;
1709
+ }
1874
1710
  cloneSpinner.succeed(`Cloned ${parsed.owner}/${parsed.repo}`);
1875
1711
  } catch (cloneError) {
1876
1712
  cloneSpinner.fail("Failed to clone repository");
1877
1713
  const message = cloneError instanceof Error ? cloneError.message : String(cloneError);
1714
+ if (message.includes("Authentication failed") || message.includes("could not read Username") || message.includes("terminal prompts disabled") || message.includes("Invalid username or password") || message.includes("Permission denied")) {
1715
+ throw new Error(`Authentication required: This repository appears to be private. ` + `Public repositories only are currently supported.
1716
+ ` + `Repository: ${displayUrl}`);
1717
+ }
1878
1718
  if (message.includes("not found") || message.includes("404")) {
1879
- throw new Error(`Repository not accessible or does not exist: ${displayUrl}`);
1719
+ throw new Error(`Repository not accessible or does not exist: ${displayUrl}
1720
+ ` + `Note: Private repositories are not currently supported.`);
1880
1721
  }
1881
1722
  if (message.includes("Could not find remote branch")) {
1882
1723
  throw new Error(`Branch or tag not found: ${parsed.ref}`);
1883
1724
  }
1884
1725
  throw new Error(`Clone failed: ${message}`);
1885
1726
  }
1727
+ const fileSystem = new NodeFileSystem4(tempDir);
1886
1728
  if (options.skipInstall) {
1887
- log(chalk8.gray("Skipping dependency installation (--skip-install)"));
1729
+ log(chalk7.gray("Skipping dependency installation (--skip-install)"));
1888
1730
  } else {
1889
1731
  const installSpinner = spinner("Installing dependencies...");
1890
1732
  installSpinner.start();
1891
1733
  const installErrors = [];
1892
1734
  try {
1893
1735
  const { execSync } = await import("node:child_process");
1894
- const lockfileCommands = [
1895
- { file: "pnpm-lock.yaml", cmd: "pnpm install --frozen-lockfile" },
1896
- { file: "bun.lock", cmd: "bun install --frozen-lockfile" },
1897
- { file: "bun.lockb", cmd: "bun install --frozen-lockfile" },
1898
- { file: "yarn.lock", cmd: "yarn install --frozen-lockfile" },
1899
- { file: "package-lock.json", cmd: "npm install --legacy-peer-deps" }
1900
- ];
1736
+ const pmInfo = await detectPackageManager(fileSystem);
1737
+ const installCmd = getInstallCommand(pmInfo);
1738
+ const cmdString = installCmd.join(" ");
1901
1739
  let installed = false;
1902
- for (const { file, cmd } of lockfileCommands) {
1903
- if (fs11.existsSync(path12.join(tempDir, file))) {
1904
- try {
1905
- execSync(cmd, {
1906
- cwd: tempDir,
1907
- stdio: "pipe",
1908
- timeout: 180000
1909
- });
1910
- installed = true;
1911
- break;
1912
- } catch (cmdError) {
1913
- const stderr = cmdError?.stderr?.toString() ?? "";
1914
- const msg = cmdError instanceof Error ? cmdError.message : String(cmdError);
1915
- installErrors.push(`[${cmd}] ${stderr.slice(0, 150) || msg.slice(0, 150)}`);
1916
- }
1740
+ if (pmInfo.lockfile) {
1741
+ try {
1742
+ execSync(cmdString, {
1743
+ cwd: tempDir,
1744
+ stdio: "pipe",
1745
+ timeout: 180000
1746
+ });
1747
+ installed = true;
1748
+ } catch (cmdError) {
1749
+ const stderr = cmdError?.stderr?.toString() ?? "";
1750
+ const msg = cmdError instanceof Error ? cmdError.message : String(cmdError);
1751
+ installErrors.push(`[${cmdString}] ${stderr.slice(0, 150) || msg.slice(0, 150)}`);
1917
1752
  }
1918
1753
  }
1919
1754
  if (!installed) {
@@ -1947,67 +1782,46 @@ function registerScanCommand(program, dependencies = {}) {
1947
1782
  } else {
1948
1783
  installSpinner.warn("Could not install dependencies (analysis may be limited)");
1949
1784
  for (const err of installErrors) {
1950
- log(chalk8.gray(` ${err}`));
1785
+ log(chalk7.gray(` ${err}`));
1951
1786
  }
1952
1787
  }
1953
1788
  } catch (outerError) {
1954
1789
  const msg = outerError instanceof Error ? outerError.message : String(outerError);
1955
1790
  installSpinner.warn(`Could not install dependencies: ${msg.slice(0, 100)}`);
1956
1791
  for (const err of installErrors) {
1957
- log(chalk8.gray(` ${err}`));
1792
+ log(chalk7.gray(` ${err}`));
1958
1793
  }
1959
1794
  }
1960
1795
  }
1961
1796
  let targetDir = tempDir;
1962
1797
  let packageName;
1963
- const mono = await detectMonorepo(tempDir);
1798
+ const mono = await detectMonorepo4(fileSystem);
1964
1799
  if (mono.isMonorepo) {
1965
1800
  if (!options.package) {
1966
1801
  error("");
1967
- error(chalk8.red(`Monorepo detected with ${mono.packages.length} packages. Specify target with --package:`));
1802
+ error(chalk7.red(`Monorepo detected with ${mono.packages.length} packages. Specify target with --package:`));
1968
1803
  error("");
1969
1804
  error(formatPackageList(mono.packages));
1970
1805
  error("");
1971
1806
  throw new Error("Monorepo requires --package flag");
1972
1807
  }
1973
- const pkg = await findPackage(tempDir, options.package);
1808
+ const pkg = findPackageByName4(mono.packages, options.package);
1974
1809
  if (!pkg) {
1975
1810
  error("");
1976
- error(chalk8.red(`Package "${options.package}" not found. Available packages:`));
1811
+ error(chalk7.red(`Package "${options.package}" not found. Available packages:`));
1977
1812
  error("");
1978
1813
  error(formatPackageList(mono.packages));
1979
1814
  error("");
1980
1815
  throw new Error(`Package not found: ${options.package}`);
1981
1816
  }
1982
- targetDir = pkg.path;
1817
+ targetDir = path8.join(tempDir, pkg.path);
1983
1818
  packageName = pkg.name;
1984
- log(chalk8.gray(`Analyzing package: ${packageName}`));
1819
+ log(chalk7.gray(`Analyzing package: ${packageName}`));
1985
1820
  }
1986
1821
  const entrySpinner = spinner("Detecting entry point...");
1987
1822
  entrySpinner.start();
1988
1823
  let entryPath;
1989
- const needsBuildStep = (pkgDir, repoRoot, entryFile) => {
1990
- if (!entryFile.endsWith(".d.ts"))
1991
- return false;
1992
- const cargoLocations = [
1993
- path12.join(pkgDir, "Cargo.toml"),
1994
- path12.join(repoRoot, "Cargo.toml")
1995
- ];
1996
- const hasCargoToml = cargoLocations.some((p) => fs11.existsSync(p));
1997
- const checkWasmScripts = (dir) => {
1998
- const pkgPath = path12.join(dir, "package.json");
1999
- if (fs11.existsSync(pkgPath)) {
2000
- try {
2001
- const pkg = JSON.parse(fs11.readFileSync(pkgPath, "utf-8"));
2002
- const scripts = Object.values(pkg.scripts ?? {}).join(" ");
2003
- return scripts.includes("wasm-pack") || scripts.includes("wasm");
2004
- } catch {}
2005
- }
2006
- return false;
2007
- };
2008
- const hasWasmPackScript = checkWasmScripts(pkgDir) || checkWasmScripts(repoRoot);
2009
- return hasCargoToml || hasWasmPackScript;
2010
- };
1824
+ const targetFs = mono.isMonorepo ? new NodeFileSystem4(targetDir) : fileSystem;
2011
1825
  let buildFailed = false;
2012
1826
  const runLlmFallback = async (reason) => {
2013
1827
  entrySpinner.text = `${reason}, trying LLM fallback...`;
@@ -2018,53 +1832,55 @@ function registerScanCommand(program, dependencies = {}) {
2018
1832
  if (plan.buildCommands.length > 0) {
2019
1833
  const { execSync } = await import("node:child_process");
2020
1834
  for (const cmd of plan.buildCommands) {
2021
- log(chalk8.gray(` Running: ${cmd}`));
1835
+ log(chalk7.gray(` Running: ${cmd}`));
2022
1836
  try {
2023
1837
  execSync(cmd, { cwd: targetDir, stdio: "pipe", timeout: 300000 });
2024
1838
  } catch (buildError) {
2025
1839
  buildFailed = true;
2026
1840
  const msg = buildError instanceof Error ? buildError.message : String(buildError);
2027
1841
  if (msg.includes("rustc") || msg.includes("cargo") || msg.includes("wasm-pack")) {
2028
- log(chalk8.yellow(` ⚠ Build requires Rust toolchain (not available)`));
1842
+ log(chalk7.yellow(` ⚠ Build requires Rust toolchain (not available)`));
2029
1843
  } else if (msg.includes("rimraf") || msg.includes("command not found")) {
2030
- log(chalk8.yellow(` ⚠ Build failed: missing dependencies`));
1844
+ log(chalk7.yellow(` ⚠ Build failed: missing dependencies`));
2031
1845
  } else {
2032
- log(chalk8.yellow(` ⚠ Build failed: ${msg.slice(0, 80)}`));
1846
+ log(chalk7.yellow(` ⚠ Build failed: ${msg.slice(0, 80)}`));
2033
1847
  }
2034
1848
  }
2035
1849
  }
2036
1850
  }
2037
1851
  if (plan.notes) {
2038
- log(chalk8.gray(` Note: ${plan.notes}`));
1852
+ log(chalk7.gray(` Note: ${plan.notes}`));
2039
1853
  }
2040
1854
  return plan.entryPoint;
2041
1855
  };
2042
1856
  try {
2043
- const entry = detectEntryPoint(targetDir);
2044
- if (needsBuildStep(targetDir, tempDir, entry.entryPath)) {
1857
+ const entry = await detectEntryPoint4(targetFs);
1858
+ const buildInfo = await detectBuildInfo(targetFs);
1859
+ const needsBuildStep = entry.isDeclarationOnly && buildInfo.exoticIndicators.wasm;
1860
+ if (needsBuildStep) {
2045
1861
  entrySpinner.text = "Detected .d.ts entry with WASM indicators...";
2046
1862
  const llmEntry = await runLlmFallback("WASM project detected");
2047
1863
  if (llmEntry) {
2048
- entryPath = path12.join(targetDir, llmEntry);
1864
+ entryPath = path8.join(targetDir, llmEntry);
2049
1865
  if (buildFailed) {
2050
1866
  entrySpinner.succeed(`Entry point: ${llmEntry} (using pre-committed declarations)`);
2051
- log(chalk8.gray(" Coverage may be limited - generated .d.ts files typically lack JSDoc"));
1867
+ log(chalk7.gray(" Coverage may be limited - generated .d.ts files typically lack JSDoc"));
2052
1868
  } else {
2053
1869
  entrySpinner.succeed(`Entry point: ${llmEntry} (from LLM fallback - WASM project)`);
2054
1870
  }
2055
1871
  } else {
2056
- entryPath = path12.join(targetDir, entry.entryPath);
2057
- entrySpinner.succeed(`Entry point: ${entry.entryPath} (from ${entry.source})`);
2058
- log(chalk8.yellow(" ⚠ WASM project detected but no API key - analysis may be limited"));
1872
+ entryPath = path8.join(targetDir, entry.path);
1873
+ entrySpinner.succeed(`Entry point: ${entry.path} (from ${entry.source})`);
1874
+ log(chalk7.yellow(" ⚠ WASM project detected but no API key - analysis may be limited"));
2059
1875
  }
2060
1876
  } else {
2061
- entryPath = path12.join(targetDir, entry.entryPath);
2062
- entrySpinner.succeed(`Entry point: ${entry.entryPath} (from ${entry.source})`);
1877
+ entryPath = path8.join(targetDir, entry.path);
1878
+ entrySpinner.succeed(`Entry point: ${entry.path} (from ${entry.source})`);
2063
1879
  }
2064
1880
  } catch (entryError) {
2065
1881
  const llmEntry = await runLlmFallback("Heuristics failed");
2066
1882
  if (llmEntry) {
2067
- entryPath = path12.join(targetDir, llmEntry);
1883
+ entryPath = path8.join(targetDir, llmEntry);
2068
1884
  entrySpinner.succeed(`Entry point: ${llmEntry} (from LLM fallback)`);
2069
1885
  } else {
2070
1886
  entrySpinner.fail("Could not detect entry point (set OPENAI_API_KEY for smart fallback)");
@@ -2086,9 +1902,9 @@ function registerScanCommand(program, dependencies = {}) {
2086
1902
  const spec = result.spec;
2087
1903
  const coverageScore = spec.docs?.coverageScore ?? 0;
2088
1904
  if (options.saveSpec) {
2089
- const specPath = path12.resolve(process.cwd(), options.saveSpec);
2090
- fs11.writeFileSync(specPath, JSON.stringify(spec, null, 2));
2091
- log(chalk8.green(`✓ Saved spec to ${options.saveSpec}`));
1905
+ const specPath = path8.resolve(process.cwd(), options.saveSpec);
1906
+ fs7.writeFileSync(specPath, JSON.stringify(spec, null, 2));
1907
+ log(chalk7.green(`✓ Saved spec to ${options.saveSpec}`));
2092
1908
  }
2093
1909
  const undocumented = [];
2094
1910
  const driftIssues = [];
@@ -2125,7 +1941,7 @@ function registerScanCommand(program, dependencies = {}) {
2125
1941
  printTextResult(scanResult, log);
2126
1942
  }
2127
1943
  } catch (commandError) {
2128
- error(chalk8.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
1944
+ error(chalk7.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
2129
1945
  process.exitCode = 1;
2130
1946
  } finally {
2131
1947
  if (tempDir && options.cleanup !== false) {
@@ -2135,46 +1951,46 @@ function registerScanCommand(program, dependencies = {}) {
2135
1951
  stdio: "ignore"
2136
1952
  }).unref();
2137
1953
  } else if (tempDir) {
2138
- log(chalk8.gray(`Repo preserved at: ${tempDir}`));
1954
+ log(chalk7.gray(`Repo preserved at: ${tempDir}`));
2139
1955
  }
2140
1956
  }
2141
1957
  });
2142
1958
  }
2143
1959
  function printTextResult(result, log) {
2144
1960
  log("");
2145
- log(chalk8.bold("DocCov Scan Results"));
1961
+ log(chalk7.bold("DocCov Scan Results"));
2146
1962
  log("─".repeat(40));
2147
1963
  const repoName = result.packageName ? `${result.owner}/${result.repo} (${result.packageName})` : `${result.owner}/${result.repo}`;
2148
- log(`Repository: ${chalk8.cyan(repoName)}`);
2149
- log(`Branch: ${chalk8.gray(result.ref)}`);
1964
+ log(`Repository: ${chalk7.cyan(repoName)}`);
1965
+ log(`Branch: ${chalk7.gray(result.ref)}`);
2150
1966
  log("");
2151
- const coverageColor = result.coverage >= 80 ? chalk8.green : result.coverage >= 50 ? chalk8.yellow : chalk8.red;
2152
- log(chalk8.bold("Coverage"));
1967
+ const coverageColor = result.coverage >= 80 ? chalk7.green : result.coverage >= 50 ? chalk7.yellow : chalk7.red;
1968
+ log(chalk7.bold("Coverage"));
2153
1969
  log(` ${coverageColor(`${result.coverage}%`)}`);
2154
1970
  log("");
2155
- log(chalk8.bold("Stats"));
1971
+ log(chalk7.bold("Stats"));
2156
1972
  log(` ${result.exportCount} exports`);
2157
1973
  log(` ${result.typeCount} types`);
2158
1974
  log(` ${result.undocumented.length} undocumented`);
2159
1975
  log(` ${result.driftCount} drift issues`);
2160
1976
  if (result.undocumented.length > 0) {
2161
1977
  log("");
2162
- log(chalk8.bold("Undocumented Exports"));
1978
+ log(chalk7.bold("Undocumented Exports"));
2163
1979
  for (const name of result.undocumented.slice(0, 10)) {
2164
- log(chalk8.yellow(` ! ${name}`));
1980
+ log(chalk7.yellow(` ! ${name}`));
2165
1981
  }
2166
1982
  if (result.undocumented.length > 10) {
2167
- log(chalk8.gray(` ... and ${result.undocumented.length - 10} more`));
1983
+ log(chalk7.gray(` ... and ${result.undocumented.length - 10} more`));
2168
1984
  }
2169
1985
  }
2170
1986
  if (result.drift.length > 0) {
2171
1987
  log("");
2172
- log(chalk8.bold("Drift Issues"));
1988
+ log(chalk7.bold("Drift Issues"));
2173
1989
  for (const d of result.drift.slice(0, 5)) {
2174
- log(chalk8.red(` • ${d.export}: ${d.issue}`));
1990
+ log(chalk7.red(` • ${d.export}: ${d.issue}`));
2175
1991
  }
2176
1992
  if (result.drift.length > 5) {
2177
- log(chalk8.gray(` ... and ${result.drift.length - 5} more`));
1993
+ log(chalk7.gray(` ... and ${result.drift.length - 5} more`));
2178
1994
  }
2179
1995
  }
2180
1996
  log("");
@@ -2182,14 +1998,13 @@ function printTextResult(result, log) {
2182
1998
 
2183
1999
  // src/cli.ts
2184
2000
  var __filename2 = fileURLToPath(import.meta.url);
2185
- var __dirname2 = path13.dirname(__filename2);
2186
- var packageJson = JSON.parse(readFileSync9(path13.join(__dirname2, "../package.json"), "utf-8"));
2001
+ var __dirname2 = path9.dirname(__filename2);
2002
+ var packageJson = JSON.parse(readFileSync5(path9.join(__dirname2, "../package.json"), "utf-8"));
2187
2003
  var program = new Command;
2188
2004
  program.name("doccov").description("DocCov - Documentation coverage and drift detection for TypeScript").version(packageJson.version);
2189
2005
  registerGenerateCommand(program);
2190
2006
  registerCheckCommand(program);
2191
2007
  registerDiffCommand(program);
2192
- registerFixCommand(program);
2193
2008
  registerInitCommand(program);
2194
2009
  registerReportCommand(program);
2195
2010
  registerScanCommand(program);