@doccov/cli 0.4.6 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -29,10 +29,15 @@ var stringList = z.union([
29
29
  z.string(),
30
30
  z.array(z.string())
31
31
  ]);
32
+ var docsConfigSchema = z.object({
33
+ include: stringList.optional(),
34
+ exclude: stringList.optional()
35
+ });
32
36
  var docCovConfigSchema = z.object({
33
37
  include: stringList.optional(),
34
38
  exclude: stringList.optional(),
35
- plugins: z.array(z.unknown()).optional()
39
+ plugins: z.array(z.unknown()).optional(),
40
+ docs: docsConfigSchema.optional()
36
41
  });
37
42
  var normalizeList = (value) => {
38
43
  if (!value) {
@@ -45,10 +50,22 @@ var normalizeList = (value) => {
45
50
  var normalizeConfig = (input) => {
46
51
  const include = normalizeList(input.include);
47
52
  const exclude = normalizeList(input.exclude);
53
+ let docs;
54
+ if (input.docs) {
55
+ const docsInclude = normalizeList(input.docs.include);
56
+ const docsExclude = normalizeList(input.docs.exclude);
57
+ if (docsInclude || docsExclude) {
58
+ docs = {
59
+ include: docsInclude,
60
+ exclude: docsExclude
61
+ };
62
+ }
63
+ }
48
64
  return {
49
65
  include,
50
66
  exclude,
51
- plugins: input.plugins
67
+ plugins: input.plugins,
68
+ docs
52
69
  };
53
70
  };
54
71
 
@@ -124,21 +141,33 @@ ${formatIssues(issues)}`);
124
141
  // src/config/index.ts
125
142
  var defineConfig = (config) => config;
126
143
  // src/cli.ts
127
- import { readFileSync as readFileSync9 } from "node:fs";
128
- import * as path13 from "node:path";
144
+ import { readFileSync as readFileSync5 } from "node:fs";
145
+ import * as path9 from "node:path";
129
146
  import { fileURLToPath } from "node:url";
130
147
  import { Command } from "commander";
131
148
 
132
149
  // src/commands/check.ts
133
- import * as fs2 from "node:fs";
134
- import * as path3 from "node:path";
150
+ import * as fs from "node:fs";
151
+ import * as path2 from "node:path";
135
152
  import {
153
+ applyEdits,
154
+ categorizeDrifts,
155
+ createSourceFile,
136
156
  DocCov,
157
+ detectEntryPoint,
137
158
  detectExampleAssertionFailures,
138
159
  detectExampleRuntimeErrors,
160
+ detectMonorepo,
161
+ findPackageByName,
162
+ findJSDocLocation,
163
+ generateFixesForExport,
139
164
  hasNonAssertionComments,
165
+ mergeFixes,
166
+ NodeFileSystem,
140
167
  parseAssertions,
141
- runExamplesWithPackage
168
+ parseJSDocToPatch,
169
+ runExamplesWithPackage,
170
+ serializeJSDoc
142
171
  } from "@doccov/sdk";
143
172
  import chalk from "chalk";
144
173
  import ora from "ora";
@@ -211,144 +240,6 @@ async function parseAssertionsWithLLM(code) {
211
240
  }
212
241
  }
213
242
 
214
- // src/utils/package-utils.ts
215
- import * as fs from "node:fs";
216
- import * as path2 from "node:path";
217
- async function findEntryPoint(packageDir, preferSource = false) {
218
- const packageJsonPath = path2.join(packageDir, "package.json");
219
- if (!fs.existsSync(packageJsonPath)) {
220
- return findDefaultEntryPoint(packageDir);
221
- }
222
- const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf-8"));
223
- if (preferSource) {
224
- const srcIndex = path2.join(packageDir, "src/index.ts");
225
- if (fs.existsSync(srcIndex)) {
226
- return srcIndex;
227
- }
228
- }
229
- if (!preferSource && (packageJson.types || packageJson.typings)) {
230
- const typesPath = path2.join(packageDir, packageJson.types || packageJson.typings);
231
- if (fs.existsSync(typesPath)) {
232
- return typesPath;
233
- }
234
- }
235
- if (packageJson.exports) {
236
- const exportPath = resolveExportsField(packageJson.exports, packageDir);
237
- if (exportPath) {
238
- return exportPath;
239
- }
240
- }
241
- if (packageJson.main) {
242
- const mainBase = packageJson.main.replace(/\.(js|mjs|cjs)$/, "");
243
- const dtsPath = path2.join(packageDir, `${mainBase}.d.ts`);
244
- if (fs.existsSync(dtsPath)) {
245
- return dtsPath;
246
- }
247
- const tsPath = path2.join(packageDir, `${mainBase}.ts`);
248
- if (fs.existsSync(tsPath)) {
249
- return tsPath;
250
- }
251
- const mainPath = path2.join(packageDir, packageJson.main);
252
- if (fs.existsSync(mainPath) && fs.statSync(mainPath).isDirectory()) {
253
- const indexDts = path2.join(mainPath, "index.d.ts");
254
- const indexTs = path2.join(mainPath, "index.ts");
255
- if (fs.existsSync(indexDts))
256
- return indexDts;
257
- if (fs.existsSync(indexTs))
258
- return indexTs;
259
- }
260
- }
261
- return findDefaultEntryPoint(packageDir);
262
- }
263
- function resolveExportsField(exports, packageDir) {
264
- if (typeof exports === "string") {
265
- return findTypeScriptFile(path2.join(packageDir, exports));
266
- }
267
- if (typeof exports === "object" && exports !== null && "." in exports) {
268
- const dotExport = exports["."];
269
- if (typeof dotExport === "string") {
270
- return findTypeScriptFile(path2.join(packageDir, dotExport));
271
- }
272
- if (dotExport && typeof dotExport === "object") {
273
- const dotRecord = dotExport;
274
- const typesEntry = dotRecord.types;
275
- if (typeof typesEntry === "string") {
276
- const typesPath = path2.join(packageDir, typesEntry);
277
- if (fs.existsSync(typesPath)) {
278
- return typesPath;
279
- }
280
- }
281
- for (const condition of ["import", "require", "default"]) {
282
- const target = dotRecord[condition];
283
- if (typeof target === "string") {
284
- const result = findTypeScriptFile(path2.join(packageDir, target));
285
- if (result)
286
- return result;
287
- }
288
- }
289
- }
290
- }
291
- return null;
292
- }
293
- function findTypeScriptFile(jsPath) {
294
- if (!fs.existsSync(jsPath))
295
- return null;
296
- const dtsPath = jsPath.replace(/\.(js|mjs|cjs)$/, ".d.ts");
297
- if (fs.existsSync(dtsPath)) {
298
- return dtsPath;
299
- }
300
- const tsPath = jsPath.replace(/\.(js|mjs|cjs)$/, ".ts");
301
- if (fs.existsSync(tsPath)) {
302
- return tsPath;
303
- }
304
- return null;
305
- }
306
- async function findDefaultEntryPoint(packageDir) {
307
- const candidates = [
308
- "dist/index.d.ts",
309
- "dist/index.ts",
310
- "lib/index.d.ts",
311
- "lib/index.ts",
312
- "src/index.ts",
313
- "index.d.ts",
314
- "index.ts"
315
- ];
316
- for (const candidate of candidates) {
317
- const fullPath = path2.join(packageDir, candidate);
318
- if (fs.existsSync(fullPath)) {
319
- return fullPath;
320
- }
321
- }
322
- throw new Error(`Could not find entry point in ${packageDir}`);
323
- }
324
- async function findPackageInMonorepo(rootDir, packageName) {
325
- const rootPackageJsonPath = path2.join(rootDir, "package.json");
326
- if (!fs.existsSync(rootPackageJsonPath)) {
327
- return null;
328
- }
329
- const rootPackageJson = JSON.parse(fs.readFileSync(rootPackageJsonPath, "utf-8"));
330
- const workspacePatterns = Array.isArray(rootPackageJson.workspaces) ? rootPackageJson.workspaces : rootPackageJson.workspaces?.packages || [];
331
- for (const pattern of workspacePatterns) {
332
- const searchPath = path2.join(rootDir, pattern.replace("/**", "").replace("/*", ""));
333
- if (fs.existsSync(searchPath) && fs.statSync(searchPath).isDirectory()) {
334
- const entries = fs.readdirSync(searchPath, { withFileTypes: true });
335
- for (const entry of entries) {
336
- if (entry.isDirectory()) {
337
- const packagePath = path2.join(searchPath, entry.name);
338
- const packageJsonPath = path2.join(packagePath, "package.json");
339
- if (fs.existsSync(packageJsonPath)) {
340
- const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf-8"));
341
- if (packageJson.name === packageName) {
342
- return packagePath;
343
- }
344
- }
345
- }
346
- }
347
- }
348
- }
349
- return null;
350
- }
351
-
352
243
  // src/commands/check.ts
353
244
  var defaultDependencies = {
354
245
  createDocCov: (options) => new DocCov(options),
@@ -356,32 +247,66 @@ var defaultDependencies = {
356
247
  log: console.log,
357
248
  error: console.error
358
249
  };
250
+ function collectDriftsFromExports(exports) {
251
+ const results = [];
252
+ for (const exp of exports) {
253
+ for (const drift of exp.docs?.drift ?? []) {
254
+ results.push({ export: exp, drift });
255
+ }
256
+ }
257
+ return results;
258
+ }
259
+ function filterDriftsByType(drifts, onlyTypes) {
260
+ if (!onlyTypes)
261
+ return drifts;
262
+ const allowedTypes = new Set(onlyTypes.split(",").map((t) => t.trim()));
263
+ return drifts.filter((d) => allowedTypes.has(d.drift.type));
264
+ }
265
+ function groupByExport(drifts) {
266
+ const map = new Map;
267
+ for (const { export: exp, drift } of drifts) {
268
+ const existing = map.get(exp) ?? [];
269
+ existing.push(drift);
270
+ map.set(exp, existing);
271
+ }
272
+ return map;
273
+ }
359
274
  function registerCheckCommand(program, dependencies = {}) {
360
275
  const { createDocCov, spinner, log, error } = {
361
276
  ...defaultDependencies,
362
277
  ...dependencies
363
278
  };
364
- program.command("check [entry]").description("Fail if documentation coverage falls below a threshold").option("--cwd <dir>", "Working directory", process.cwd()).option("--package <name>", "Target package name (for monorepos)").option("--min-coverage <percentage>", "Minimum docs coverage percentage (0-100)", (value) => Number(value)).option("--require-examples", "Require at least one @example for every export").option("--run-examples", "Execute @example blocks and fail on runtime errors").option("--ignore-drift", "Do not fail on documentation drift").option("--skip-resolve", "Skip external type resolution from node_modules").action(async (entry, options) => {
279
+ program.command("check [entry]").description("Fail if documentation coverage falls below a threshold").option("--cwd <dir>", "Working directory", process.cwd()).option("--package <name>", "Target package name (for monorepos)").option("--min-coverage <percentage>", "Minimum docs coverage percentage (0-100)", (value) => Number(value)).option("--require-examples", "Require at least one @example for every export").option("--run-examples", "Execute @example blocks and fail on runtime errors").option("--ignore-drift", "Do not fail on documentation drift").option("--skip-resolve", "Skip external type resolution from node_modules").option("--write", "Auto-fix drift issues").option("--only <types>", "Only fix specific drift types (comma-separated)").option("--dry-run", "Preview fixes without writing (requires --write)").action(async (entry, options) => {
365
280
  try {
366
281
  let targetDir = options.cwd;
367
282
  let entryFile = entry;
283
+ const fileSystem = new NodeFileSystem(options.cwd);
368
284
  if (options.package) {
369
- const packageDir = await findPackageInMonorepo(options.cwd, options.package);
370
- if (!packageDir) {
371
- throw new Error(`Package "${options.package}" not found in monorepo`);
285
+ const mono = await detectMonorepo(fileSystem);
286
+ if (!mono.isMonorepo) {
287
+ throw new Error(`Not a monorepo. Remove --package flag for single-package repos.`);
372
288
  }
373
- targetDir = packageDir;
374
- log(chalk.gray(`Found package at ${path3.relative(options.cwd, packageDir)}`));
289
+ const pkg = findPackageByName(mono.packages, options.package);
290
+ if (!pkg) {
291
+ const available = mono.packages.map((p) => p.name).join(", ");
292
+ throw new Error(`Package "${options.package}" not found. Available: ${available}`);
293
+ }
294
+ targetDir = path2.join(options.cwd, pkg.path);
295
+ log(chalk.gray(`Found package at ${pkg.path}`));
375
296
  }
376
297
  if (!entryFile) {
377
- entryFile = await findEntryPoint(targetDir, true);
378
- log(chalk.gray(`Auto-detected entry point: ${path3.relative(targetDir, entryFile)}`));
298
+ const targetFs = new NodeFileSystem(targetDir);
299
+ const detected = await detectEntryPoint(targetFs);
300
+ entryFile = path2.join(targetDir, detected.path);
301
+ log(chalk.gray(`Auto-detected entry point: ${detected.path} (from ${detected.source})`));
379
302
  } else {
380
- entryFile = path3.resolve(targetDir, entryFile);
381
- if (fs2.existsSync(entryFile) && fs2.statSync(entryFile).isDirectory()) {
303
+ entryFile = path2.resolve(targetDir, entryFile);
304
+ if (fs.existsSync(entryFile) && fs.statSync(entryFile).isDirectory()) {
382
305
  targetDir = entryFile;
383
- entryFile = await findEntryPoint(entryFile, true);
384
- log(chalk.gray(`Auto-detected entry point: ${entryFile}`));
306
+ const dirFs = new NodeFileSystem(entryFile);
307
+ const detected = await detectEntryPoint(dirFs);
308
+ entryFile = path2.join(entryFile, detected.path);
309
+ log(chalk.gray(`Auto-detected entry point: ${detected.path}`));
385
310
  }
386
311
  }
387
312
  const minCoverage = clampCoverage(options.minCoverage ?? 80);
@@ -523,7 +448,116 @@ function registerCheckCommand(program, dependencies = {}) {
523
448
  const coverageScore = spec.docs?.coverageScore ?? 0;
524
449
  const failingExports = collectFailingExports(spec.exports ?? [], minCoverage);
525
450
  const missingExamples = options.requireExamples ? failingExports.filter((item) => item.missing?.includes("examples")) : [];
526
- const driftExports = [...collectDrift(spec.exports ?? []), ...runtimeDrifts];
451
+ let driftExports = [...collectDrift(spec.exports ?? []), ...runtimeDrifts];
452
+ const fixedDriftKeys = new Set;
453
+ if (options.write && driftExports.length > 0) {
454
+ const allDrifts = collectDriftsFromExports(spec.exports ?? []);
455
+ const filteredDrifts = filterDriftsByType(allDrifts, options.only);
456
+ if (filteredDrifts.length === 0 && options.only) {
457
+ log(chalk.yellow("No matching drift issues for the specified types."));
458
+ } else if (filteredDrifts.length > 0) {
459
+ const { fixable, nonFixable } = categorizeDrifts(filteredDrifts.map((d) => d.drift));
460
+ if (fixable.length === 0) {
461
+ log(chalk.yellow(`Found ${nonFixable.length} drift issue(s), but none are auto-fixable.`));
462
+ } else {
463
+ log("");
464
+ log(chalk.bold(`Found ${fixable.length} fixable issue(s)`));
465
+ if (nonFixable.length > 0) {
466
+ log(chalk.gray(`(${nonFixable.length} non-fixable issue(s) skipped)`));
467
+ }
468
+ log("");
469
+ const groupedDrifts = groupByExport(filteredDrifts.filter((d) => fixable.includes(d.drift)));
470
+ const edits = [];
471
+ const editsByFile = new Map;
472
+ for (const [exp, drifts] of groupedDrifts) {
473
+ if (!exp.source?.file) {
474
+ log(chalk.gray(` Skipping ${exp.name}: no source location`));
475
+ continue;
476
+ }
477
+ if (exp.source.file.endsWith(".d.ts")) {
478
+ log(chalk.gray(` Skipping ${exp.name}: declaration file`));
479
+ continue;
480
+ }
481
+ const filePath = path2.resolve(targetDir, exp.source.file);
482
+ if (!fs.existsSync(filePath)) {
483
+ log(chalk.gray(` Skipping ${exp.name}: file not found`));
484
+ continue;
485
+ }
486
+ const sourceFile = createSourceFile(filePath);
487
+ const location = findJSDocLocation(sourceFile, exp.name, exp.source.line);
488
+ if (!location) {
489
+ log(chalk.gray(` Skipping ${exp.name}: could not find declaration`));
490
+ continue;
491
+ }
492
+ let existingPatch = {};
493
+ if (location.hasExisting && location.existingJSDoc) {
494
+ existingPatch = parseJSDocToPatch(location.existingJSDoc);
495
+ }
496
+ const fixes = generateFixesForExport({ ...exp, docs: { ...exp.docs, drift: drifts } }, existingPatch);
497
+ if (fixes.length === 0)
498
+ continue;
499
+ for (const drift of drifts) {
500
+ fixedDriftKeys.add(`${exp.name}:${drift.issue}`);
501
+ }
502
+ const mergedPatch = mergeFixes(fixes, existingPatch);
503
+ const newJSDoc = serializeJSDoc(mergedPatch, location.indent);
504
+ const edit = {
505
+ filePath,
506
+ symbolName: exp.name,
507
+ startLine: location.startLine,
508
+ endLine: location.endLine,
509
+ hasExisting: location.hasExisting,
510
+ existingJSDoc: location.existingJSDoc,
511
+ newJSDoc,
512
+ indent: location.indent
513
+ };
514
+ edits.push(edit);
515
+ const fileEdits = editsByFile.get(filePath) ?? [];
516
+ fileEdits.push({ export: exp, edit, fixes, existingPatch });
517
+ editsByFile.set(filePath, fileEdits);
518
+ }
519
+ if (edits.length > 0) {
520
+ if (options.dryRun) {
521
+ log(chalk.bold("Dry run - changes that would be made:"));
522
+ log("");
523
+ for (const [filePath, fileEdits] of editsByFile) {
524
+ const relativePath = path2.relative(targetDir, filePath);
525
+ log(chalk.cyan(` ${relativePath}:`));
526
+ for (const { export: exp, edit, fixes } of fileEdits) {
527
+ const lineInfo = edit.hasExisting ? `lines ${edit.startLine + 1}-${edit.endLine + 1}` : `line ${edit.startLine + 1}`;
528
+ log(` ${chalk.bold(exp.name)} [${lineInfo}]`);
529
+ for (const fix of fixes) {
530
+ log(chalk.green(` + ${fix.description}`));
531
+ }
532
+ }
533
+ log("");
534
+ }
535
+ log(chalk.gray("Run without --dry-run to apply these changes."));
536
+ } else {
537
+ const applySpinner = spinner("Applying fixes...");
538
+ applySpinner.start();
539
+ const applyResult = await applyEdits(edits);
540
+ if (applyResult.errors.length > 0) {
541
+ applySpinner.warn("Some fixes could not be applied");
542
+ for (const err of applyResult.errors) {
543
+ error(chalk.red(` ${err.file}: ${err.error}`));
544
+ }
545
+ } else {
546
+ applySpinner.succeed(`Applied ${applyResult.editsApplied} fix(es) to ${applyResult.filesModified} file(s)`);
547
+ }
548
+ log("");
549
+ for (const [filePath, fileEdits] of editsByFile) {
550
+ const relativePath = path2.relative(targetDir, filePath);
551
+ log(chalk.green(` ✓ ${relativePath}: ${fileEdits.length} fix(es)`));
552
+ }
553
+ }
554
+ }
555
+ }
556
+ }
557
+ if (!options.dryRun) {
558
+ driftExports = driftExports.filter((d) => !fixedDriftKeys.has(`${d.name}:${d.issue}`));
559
+ }
560
+ }
527
561
  const coverageFailed = coverageScore < minCoverage;
528
562
  const hasMissingExamples = missingExamples.length > 0;
529
563
  const hasDrift = !options.ignoreDrift && driftExports.length > 0;
@@ -613,30 +647,131 @@ function collectDrift(exportsList) {
613
647
  }
614
648
 
615
649
  // src/commands/diff.ts
616
- import * as fs3 from "node:fs";
617
- import * as path4 from "node:path";
618
- import { diffSpec } from "@openpkg-ts/spec";
650
+ import * as fs2 from "node:fs";
651
+ import * as path3 from "node:path";
652
+ import {
653
+ diffSpecWithDocs,
654
+ getDocsImpactSummary,
655
+ hasDocsImpact,
656
+ parseMarkdownFiles
657
+ } from "@doccov/sdk";
619
658
  import chalk2 from "chalk";
659
+ import { glob } from "glob";
660
+
661
+ // src/utils/docs-impact-ai.ts
662
+ import { createAnthropic as createAnthropic2 } from "@ai-sdk/anthropic";
663
+ import { createOpenAI as createOpenAI2 } from "@ai-sdk/openai";
664
+ import { generateObject as generateObject2, generateText } from "ai";
665
+ import { z as z3 } from "zod";
666
+ var CodeBlockUsageSchema = z3.object({
667
+ isImpacted: z3.boolean().describe("Whether the code block is affected by the change"),
668
+ reason: z3.string().describe("Explanation of why/why not the code is impacted"),
669
+ usageType: z3.enum(["direct-call", "import-only", "indirect", "not-used"]).describe("How the export is used in this code block"),
670
+ suggestedFix: z3.string().optional().describe("If impacted, the suggested code change"),
671
+ confidence: z3.enum(["high", "medium", "low"]).describe("Confidence level of the analysis")
672
+ });
673
+ var MultiBlockAnalysisSchema = z3.object({
674
+ groups: z3.array(z3.object({
675
+ blockIndices: z3.array(z3.number()).describe("Indices of blocks that should run together"),
676
+ reason: z3.string().describe("Why these blocks are related")
677
+ })).describe("Groups of related code blocks"),
678
+ skippedBlocks: z3.array(z3.number()).describe("Indices of blocks that should be skipped (incomplete/illustrative)")
679
+ });
680
+ function getModel2() {
681
+ const provider = process.env.DOCCOV_LLM_PROVIDER?.toLowerCase();
682
+ if (provider === "anthropic" || process.env.ANTHROPIC_API_KEY) {
683
+ const anthropic = createAnthropic2();
684
+ return anthropic("claude-sonnet-4-20250514");
685
+ }
686
+ const openai = createOpenAI2();
687
+ return openai("gpt-4o-mini");
688
+ }
689
+ function isAIDocsAnalysisAvailable() {
690
+ return Boolean(process.env.OPENAI_API_KEY || process.env.ANTHROPIC_API_KEY);
691
+ }
692
+ async function generateImpactSummary(impacts) {
693
+ if (!isAIDocsAnalysisAvailable()) {
694
+ return null;
695
+ }
696
+ if (impacts.length === 0) {
697
+ return "No documentation impacts detected.";
698
+ }
699
+ try {
700
+ const { text } = await generateText({
701
+ model: getModel2(),
702
+ prompt: `Summarize these documentation impacts for a GitHub PR comment.
703
+
704
+ Impacts:
705
+ ${impacts.map((i) => `- ${i.file}: ${i.exportName} (${i.changeType})`).join(`
706
+ `)}
707
+
708
+ Write a brief, actionable summary (2-3 sentences) explaining:
709
+ 1. How many files/references are affected
710
+ 2. What type of updates are needed
711
+ 3. Priority recommendation
712
+
713
+ Keep it concise and developer-friendly.`
714
+ });
715
+ return text.trim();
716
+ } catch {
717
+ return null;
718
+ }
719
+ }
720
+
721
+ // src/commands/diff.ts
620
722
  var defaultDependencies2 = {
621
- readFileSync: fs3.readFileSync,
723
+ readFileSync: fs2.readFileSync,
622
724
  log: console.log,
623
725
  error: console.error
624
726
  };
625
727
  function registerDiffCommand(program, dependencies = {}) {
626
- const { readFileSync: readFileSync3, log, error } = {
728
+ const { readFileSync: readFileSync2, log, error } = {
627
729
  ...defaultDependencies2,
628
730
  ...dependencies
629
731
  };
630
- program.command("diff <base> <head>").description("Compare two OpenPkg specs and report coverage delta").option("--output <format>", "Output format: json or text", "text").option("--fail-on-regression", "Exit with error if coverage regressed").option("--fail-on-drift", "Exit with error if new drift was introduced").action((base, head, options) => {
732
+ program.command("diff <base> <head>").description("Compare two OpenPkg specs and report coverage delta").option("--output <format>", "Output format: json or text", "text").option("--fail-on-regression", "Exit with error if coverage regressed").option("--fail-on-drift", "Exit with error if new drift was introduced").option("--docs <glob>", "Glob pattern for markdown docs to check for impact", collect, []).option("--fail-on-docs-impact", "Exit with error if docs need updates").option("--ai", "Use AI for deeper analysis and fix suggestions").action(async (base, head, options) => {
631
733
  try {
632
- const baseSpec = loadSpec(base, readFileSync3);
633
- const headSpec = loadSpec(head, readFileSync3);
634
- const diff = diffSpec(baseSpec, headSpec);
734
+ const baseSpec = loadSpec(base, readFileSync2);
735
+ const headSpec = loadSpec(head, readFileSync2);
736
+ let markdownFiles;
737
+ let docsPatterns = options.docs;
738
+ if (!docsPatterns || docsPatterns.length === 0) {
739
+ const configResult = await loadDocCovConfig(process.cwd());
740
+ if (configResult.config?.docs?.include) {
741
+ docsPatterns = configResult.config.docs.include;
742
+ log(chalk2.gray(`Using docs patterns from config: ${docsPatterns.join(", ")}`));
743
+ }
744
+ }
745
+ if (docsPatterns && docsPatterns.length > 0) {
746
+ markdownFiles = await loadMarkdownFiles(docsPatterns);
747
+ }
748
+ const diff = diffSpecWithDocs(baseSpec, headSpec, { markdownFiles });
635
749
  const format = options.output ?? "text";
636
750
  if (format === "json") {
637
751
  log(JSON.stringify(diff, null, 2));
638
752
  } else {
639
753
  printTextDiff(diff, log, error);
754
+ if (options.ai && diff.docsImpact && hasDocsImpact(diff)) {
755
+ if (!isAIDocsAnalysisAvailable()) {
756
+ log(chalk2.yellow(`
757
+ ⚠ AI analysis unavailable (set OPENAI_API_KEY or ANTHROPIC_API_KEY)`));
758
+ } else {
759
+ log(chalk2.gray(`
760
+ Generating AI summary...`));
761
+ const impacts = diff.docsImpact.impactedFiles.flatMap((f) => f.references.map((r) => ({
762
+ file: f.file,
763
+ exportName: r.exportName,
764
+ changeType: r.changeType,
765
+ context: r.context
766
+ })));
767
+ const summary = await generateImpactSummary(impacts);
768
+ if (summary) {
769
+ log("");
770
+ log(chalk2.bold("AI Summary"));
771
+ log(chalk2.cyan(` ${summary}`));
772
+ }
773
+ }
774
+ }
640
775
  }
641
776
  if (options.failOnRegression && diff.coverageDelta < 0) {
642
777
  error(chalk2.red(`
@@ -650,19 +785,42 @@ ${diff.driftIntroduced} new drift issue(s) introduced`));
650
785
  process.exitCode = 1;
651
786
  return;
652
787
  }
788
+ if (options.failOnDocsImpact && hasDocsImpact(diff)) {
789
+ const summary = getDocsImpactSummary(diff);
790
+ error(chalk2.red(`
791
+ ${summary.totalIssues} docs issue(s) require attention`));
792
+ process.exitCode = 1;
793
+ return;
794
+ }
653
795
  } catch (commandError) {
654
796
  error(chalk2.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
655
797
  process.exitCode = 1;
656
798
  }
657
799
  });
658
800
  }
659
- function loadSpec(filePath, readFileSync3) {
660
- const resolvedPath = path4.resolve(filePath);
661
- if (!fs3.existsSync(resolvedPath)) {
801
+ function collect(value, previous) {
802
+ return previous.concat([value]);
803
+ }
804
+ async function loadMarkdownFiles(patterns) {
805
+ const files = [];
806
+ for (const pattern of patterns) {
807
+ const matches = await glob(pattern, { nodir: true });
808
+ for (const filePath of matches) {
809
+ try {
810
+ const content = fs2.readFileSync(filePath, "utf-8");
811
+ files.push({ path: filePath, content });
812
+ } catch {}
813
+ }
814
+ }
815
+ return parseMarkdownFiles(files);
816
+ }
817
+ function loadSpec(filePath, readFileSync2) {
818
+ const resolvedPath = path3.resolve(filePath);
819
+ if (!fs2.existsSync(resolvedPath)) {
662
820
  throw new Error(`File not found: ${filePath}`);
663
821
  }
664
822
  try {
665
- const content = readFileSync3(resolvedPath, "utf-8");
823
+ const content = readFileSync2(resolvedPath, "utf-8");
666
824
  return JSON.parse(content);
667
825
  } catch (parseError) {
668
826
  throw new Error(`Failed to parse ${filePath}: ${parseError instanceof Error ? parseError.message : parseError}`);
@@ -730,218 +888,61 @@ function printTextDiff(diff, log, _error) {
730
888
  log(chalk2.green(` -${diff.driftResolved} drift issue(s) resolved`));
731
889
  }
732
890
  }
733
- log("");
734
- }
735
-
736
- // src/commands/fix.ts
737
- import * as fs4 from "node:fs";
738
- import * as path5 from "node:path";
739
- import {
740
- applyEdits,
741
- categorizeDrifts,
742
- createSourceFile,
743
- DocCov as DocCov2,
744
- findJSDocLocation,
745
- generateFixesForExport,
746
- mergeFixes,
747
- parseJSDocToPatch,
748
- serializeJSDoc
749
- } from "@doccov/sdk";
750
- import chalk3 from "chalk";
751
- import ora2 from "ora";
752
- var defaultDependencies3 = {
753
- createDocCov: (options) => new DocCov2(options),
754
- spinner: (text) => ora2(text),
755
- log: console.log,
756
- error: console.error
757
- };
758
- function collectDrifts(exports) {
759
- const results = [];
760
- for (const exp of exports) {
761
- const drifts = exp.docs?.drift ?? [];
762
- for (const drift of drifts) {
763
- results.push({ export: exp, drift });
764
- }
765
- }
766
- return results;
767
- }
768
- function filterDriftsByType(drifts, onlyTypes) {
769
- if (!onlyTypes)
770
- return drifts;
771
- const allowedTypes = new Set(onlyTypes.split(",").map((t) => t.trim()));
772
- return drifts.filter((d) => allowedTypes.has(d.drift.type));
773
- }
774
- function groupByExport(drifts) {
775
- const map = new Map;
776
- for (const { export: exp, drift } of drifts) {
777
- const existing = map.get(exp) ?? [];
778
- existing.push(drift);
779
- map.set(exp, existing);
780
- }
781
- return map;
782
- }
783
- function registerFixCommand(program, dependencies = {}) {
784
- const { createDocCov, spinner, log, error } = {
785
- ...defaultDependencies3,
786
- ...dependencies
787
- };
788
- program.command("fix [entry]").description("Automatically fix documentation drift").option("--cwd <dir>", "Working directory", process.cwd()).option("--package <name>", "Target package name (for monorepos)").option("--dry-run", "Preview changes without writing").option("--only <types>", "Only fix specific drift types (comma-separated)").option("--skip-resolve", "Skip external type resolution from node_modules").action(async (entry, options) => {
789
- try {
790
- let targetDir = options.cwd;
791
- let entryFile = entry;
792
- if (options.package) {
793
- const packageDir = await findPackageInMonorepo(options.cwd, options.package);
794
- if (!packageDir) {
795
- throw new Error(`Package "${options.package}" not found in monorepo`);
891
+ if (diff.docsImpact) {
892
+ log("");
893
+ log(chalk2.bold("Docs Impact"));
894
+ const { impactedFiles, missingDocs, stats } = diff.docsImpact;
895
+ log(chalk2.gray(` Scanned ${stats.filesScanned} file(s), ${stats.codeBlocksFound} code block(s)`));
896
+ if (impactedFiles.length > 0) {
897
+ log("");
898
+ log(chalk2.yellow(` ${impactedFiles.length} file(s) need updates:`));
899
+ for (const file of impactedFiles.slice(0, 10)) {
900
+ log(chalk2.yellow(` \uD83D\uDCC4 ${file.file}`));
901
+ for (const ref of file.references.slice(0, 3)) {
902
+ const changeLabel = ref.changeType === "signature-changed" ? "signature changed" : ref.changeType === "removed" ? "removed" : "deprecated";
903
+ log(chalk2.gray(` Line ${ref.line}: ${ref.exportName} (${changeLabel})`));
796
904
  }
797
- targetDir = packageDir;
798
- log(chalk3.gray(`Found package at ${path5.relative(options.cwd, packageDir)}`));
799
- }
800
- if (!entryFile) {
801
- entryFile = await findEntryPoint(targetDir, true);
802
- log(chalk3.gray(`Auto-detected entry point: ${path5.relative(targetDir, entryFile)}`));
803
- } else {
804
- entryFile = path5.resolve(targetDir, entryFile);
805
- if (fs4.existsSync(entryFile) && fs4.statSync(entryFile).isDirectory()) {
806
- targetDir = entryFile;
807
- entryFile = await findEntryPoint(entryFile, true);
808
- log(chalk3.gray(`Auto-detected entry point: ${entryFile}`));
905
+ if (file.references.length > 3) {
906
+ log(chalk2.gray(` ... and ${file.references.length - 3} more reference(s)`));
809
907
  }
810
908
  }
811
- const resolveExternalTypes = !options.skipResolve;
812
- const analyzeSpinner = spinner("Analyzing documentation...");
813
- analyzeSpinner.start();
814
- const doccov = createDocCov({ resolveExternalTypes });
815
- const result = await doccov.analyzeFileWithDiagnostics(entryFile);
816
- const spec = result.spec;
817
- analyzeSpinner.succeed("Analysis complete");
818
- const allDrifts = collectDrifts(spec.exports ?? []);
819
- if (allDrifts.length === 0) {
820
- log(chalk3.green("No drift issues found. Documentation is in sync!"));
821
- return;
822
- }
823
- const filteredDrifts = filterDriftsByType(allDrifts, options.only);
824
- if (filteredDrifts.length === 0) {
825
- log(chalk3.yellow("No matching drift issues for the specified types."));
826
- return;
827
- }
828
- const { fixable, nonFixable } = categorizeDrifts(filteredDrifts.map((d) => d.drift));
829
- if (fixable.length === 0) {
830
- log(chalk3.yellow(`Found ${nonFixable.length} drift issue(s), but none are auto-fixable.`));
831
- log(chalk3.gray("Non-fixable drift types require manual intervention:"));
832
- for (const drift of nonFixable.slice(0, 5)) {
833
- log(chalk3.gray(` • ${drift.type}: ${drift.issue}`));
834
- }
835
- return;
909
+ if (impactedFiles.length > 10) {
910
+ log(chalk2.gray(` ... and ${impactedFiles.length - 10} more file(s)`));
836
911
  }
912
+ }
913
+ if (missingDocs.length > 0) {
837
914
  log("");
838
- log(chalk3.bold(`Found ${fixable.length} fixable issue(s)`));
839
- if (nonFixable.length > 0) {
840
- log(chalk3.gray(`(${nonFixable.length} non-fixable issue(s) skipped)`));
915
+ log(chalk2.yellow(` ${missingDocs.length} new export(s) missing docs:`));
916
+ for (const name of missingDocs.slice(0, 5)) {
917
+ log(chalk2.yellow(`${name}`));
841
918
  }
842
- log("");
843
- const groupedDrifts = groupByExport(filteredDrifts.filter((d) => fixable.includes(d.drift)));
844
- const edits = [];
845
- const editsByFile = new Map;
846
- for (const [exp, drifts] of groupedDrifts) {
847
- if (!exp.source?.file) {
848
- log(chalk3.gray(` Skipping ${exp.name}: no source location`));
849
- continue;
850
- }
851
- if (exp.source.file.endsWith(".d.ts")) {
852
- log(chalk3.gray(` Skipping ${exp.name}: declaration file`));
853
- continue;
854
- }
855
- const filePath = path5.resolve(targetDir, exp.source.file);
856
- if (!fs4.existsSync(filePath)) {
857
- log(chalk3.gray(` Skipping ${exp.name}: file not found`));
858
- continue;
859
- }
860
- const sourceFile = createSourceFile(filePath);
861
- const location = findJSDocLocation(sourceFile, exp.name, exp.source.line);
862
- if (!location) {
863
- log(chalk3.gray(` Skipping ${exp.name}: could not find declaration`));
864
- continue;
865
- }
866
- let existingPatch = {};
867
- if (location.hasExisting && location.existingJSDoc) {
868
- existingPatch = parseJSDocToPatch(location.existingJSDoc);
869
- }
870
- const fixes = generateFixesForExport({ ...exp, docs: { ...exp.docs, drift: drifts } }, existingPatch);
871
- if (fixes.length === 0)
872
- continue;
873
- const mergedPatch = mergeFixes(fixes, existingPatch);
874
- const newJSDoc = serializeJSDoc(mergedPatch, location.indent);
875
- const edit = {
876
- filePath,
877
- symbolName: exp.name,
878
- startLine: location.startLine,
879
- endLine: location.endLine,
880
- hasExisting: location.hasExisting,
881
- existingJSDoc: location.existingJSDoc,
882
- newJSDoc,
883
- indent: location.indent
884
- };
885
- edits.push(edit);
886
- const fileEdits = editsByFile.get(filePath) ?? [];
887
- fileEdits.push({ export: exp, edit, fixes, existingPatch });
888
- editsByFile.set(filePath, fileEdits);
919
+ if (missingDocs.length > 5) {
920
+ log(chalk2.gray(` ... and ${missingDocs.length - 5} more`));
889
921
  }
890
- if (edits.length === 0) {
891
- log(chalk3.yellow("No edits could be generated."));
892
- return;
893
- }
894
- if (options.dryRun) {
895
- log(chalk3.bold("Dry run - changes that would be made:"));
896
- log("");
897
- for (const [filePath, fileEdits] of editsByFile) {
898
- const relativePath = path5.relative(targetDir, filePath);
899
- log(chalk3.cyan(` ${relativePath}:`));
900
- for (const { export: exp, edit, fixes } of fileEdits) {
901
- const lineInfo = edit.hasExisting ? `lines ${edit.startLine + 1}-${edit.endLine + 1}` : `line ${edit.startLine + 1}`;
902
- log(` ${chalk3.bold(exp.name)} [${lineInfo}]`);
903
- for (const fix of fixes) {
904
- log(chalk3.green(` + ${fix.description}`));
905
- }
906
- }
907
- log("");
908
- }
909
- log(chalk3.gray("Run without --dry-run to apply these changes."));
910
- } else {
911
- const applySpinner = spinner("Applying fixes...");
912
- applySpinner.start();
913
- const result2 = await applyEdits(edits);
914
- if (result2.errors.length > 0) {
915
- applySpinner.warn("Some fixes could not be applied");
916
- for (const err of result2.errors) {
917
- error(chalk3.red(` ${err.file}: ${err.error}`));
918
- }
919
- } else {
920
- applySpinner.succeed(`Applied ${result2.editsApplied} fix(es) to ${result2.filesModified} file(s)`);
921
- }
922
- log("");
923
- for (const [filePath, fileEdits] of editsByFile) {
924
- const relativePath = path5.relative(targetDir, filePath);
925
- log(chalk3.green(` ✓ ${relativePath}: ${fileEdits.length} fix(es)`));
926
- }
927
- }
928
- } catch (commandError) {
929
- error(chalk3.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
930
- process.exitCode = 1;
931
922
  }
932
- });
923
+ if (impactedFiles.length === 0 && missingDocs.length === 0) {
924
+ log(chalk2.green(" ✓ No docs impact detected"));
925
+ }
926
+ }
927
+ log("");
933
928
  }
934
929
 
935
930
  // src/commands/generate.ts
936
- import * as fs6 from "node:fs";
937
- import * as path7 from "node:path";
938
- import { DocCov as DocCov3 } from "@doccov/sdk";
931
+ import * as fs3 from "node:fs";
932
+ import * as path4 from "node:path";
933
+ import {
934
+ DocCov as DocCov2,
935
+ detectEntryPoint as detectEntryPoint2,
936
+ detectMonorepo as detectMonorepo2,
937
+ findPackageByName as findPackageByName2,
938
+ NodeFileSystem as NodeFileSystem2
939
+ } from "@doccov/sdk";
939
940
  import { normalize, validateSpec } from "@openpkg-ts/spec";
940
- import chalk5 from "chalk";
941
- import ora3 from "ora";
941
+ import chalk4 from "chalk";
942
+ import ora2 from "ora";
942
943
 
943
944
  // src/utils/filter-options.ts
944
- import chalk4 from "chalk";
945
+ import chalk3 from "chalk";
945
946
  var unique = (values) => Array.from(new Set(values));
946
947
  var parseListFlag = (value) => {
947
948
  if (!value) {
@@ -951,7 +952,7 @@ var parseListFlag = (value) => {
951
952
  const normalized = rawItems.flatMap((item) => String(item).split(",")).map((item) => item.trim()).filter(Boolean);
952
953
  return normalized.length > 0 ? unique(normalized) : undefined;
953
954
  };
954
- var formatList = (label, values) => `${label}: ${values.map((value) => chalk4.cyan(value)).join(", ")}`;
955
+ var formatList = (label, values) => `${label}: ${values.map((value) => chalk3.cyan(value)).join(", ")}`;
955
956
  var mergeFilterOptions = (config, cliOptions) => {
956
957
  const messages = [];
957
958
  const configInclude = config?.include;
@@ -990,119 +991,11 @@ var mergeFilterOptions = (config, cliOptions) => {
990
991
  };
991
992
  };
992
993
 
993
- // src/utils/entry-detection.ts
994
- import * as fs5 from "node:fs";
995
- import * as path6 from "node:path";
996
- function detectEntryPoint(repoDir) {
997
- const pkgPath = path6.join(repoDir, "package.json");
998
- if (!fs5.existsSync(pkgPath)) {
999
- throw new Error("No package.json found - not a valid npm package");
1000
- }
1001
- let pkg;
1002
- try {
1003
- pkg = JSON.parse(fs5.readFileSync(pkgPath, "utf-8"));
1004
- } catch {
1005
- throw new Error("Failed to parse package.json");
1006
- }
1007
- if (typeof pkg.types === "string") {
1008
- const resolved = resolveToTs(repoDir, pkg.types);
1009
- if (resolved) {
1010
- return { entryPath: resolved, source: "types" };
1011
- }
1012
- }
1013
- if (typeof pkg.typings === "string") {
1014
- const resolved = resolveToTs(repoDir, pkg.typings);
1015
- if (resolved) {
1016
- return { entryPath: resolved, source: "types" };
1017
- }
1018
- }
1019
- const exports = pkg.exports;
1020
- if (exports) {
1021
- const mainExport = exports["."];
1022
- if (typeof mainExport === "object" && mainExport !== null) {
1023
- const exportObj = mainExport;
1024
- if (typeof exportObj.types === "string") {
1025
- const resolved = resolveToTs(repoDir, exportObj.types);
1026
- if (resolved) {
1027
- return { entryPath: resolved, source: "exports" };
1028
- }
1029
- }
1030
- }
1031
- }
1032
- if (typeof pkg.main === "string") {
1033
- const resolved = resolveToTs(repoDir, pkg.main);
1034
- if (resolved) {
1035
- return { entryPath: resolved, source: "main" };
1036
- }
1037
- }
1038
- if (typeof pkg.module === "string") {
1039
- const resolved = resolveToTs(repoDir, pkg.module);
1040
- if (resolved) {
1041
- return { entryPath: resolved, source: "module" };
1042
- }
1043
- }
1044
- const commonPaths = [
1045
- "src/index.ts",
1046
- "src/index.tsx",
1047
- "src/main.ts",
1048
- "index.ts",
1049
- "lib/index.ts",
1050
- "source/index.ts"
1051
- ];
1052
- for (const p of commonPaths) {
1053
- if (fs5.existsSync(path6.join(repoDir, p))) {
1054
- return { entryPath: p, source: "fallback" };
1055
- }
1056
- }
1057
- throw new Error("Could not detect TypeScript entry point. No types field in package.json and no common entry paths found.");
1058
- }
1059
- function resolveToTs(baseDir, filePath) {
1060
- const normalized = filePath.replace(/^\.\//, "");
1061
- const isSourceTs = normalized.endsWith(".ts") && !normalized.endsWith(".d.ts") || normalized.endsWith(".tsx");
1062
- if (isSourceTs) {
1063
- if (fs5.existsSync(path6.join(baseDir, normalized))) {
1064
- return normalized;
1065
- }
1066
- }
1067
- const candidates = [];
1068
- if (normalized.startsWith("dist/")) {
1069
- const srcPath = normalized.replace(/^dist\//, "src/");
1070
- candidates.push(srcPath.replace(/\.js$/, ".ts"));
1071
- candidates.push(srcPath.replace(/\.d\.ts$/, ".ts"));
1072
- candidates.push(srcPath.replace(/\.js$/, ".tsx"));
1073
- }
1074
- if (normalized.startsWith("build/")) {
1075
- const srcPath = normalized.replace(/^build\//, "src/");
1076
- candidates.push(srcPath.replace(/\.js$/, ".ts"));
1077
- candidates.push(srcPath.replace(/\.d\.ts$/, ".ts"));
1078
- }
1079
- if (normalized.startsWith("lib/")) {
1080
- const srcPath = normalized.replace(/^lib\//, "src/");
1081
- candidates.push(srcPath.replace(/\.js$/, ".ts"));
1082
- candidates.push(srcPath.replace(/\.d\.ts$/, ".ts"));
1083
- }
1084
- candidates.push(normalized.replace(/\.js$/, ".ts"));
1085
- candidates.push(normalized.replace(/\.d\.ts$/, ".ts"));
1086
- candidates.push(normalized.replace(/\.js$/, ".tsx"));
1087
- if (normalized.endsWith(".d.ts")) {
1088
- const baseName = path6.basename(normalized, ".d.ts");
1089
- candidates.push(`src/${baseName}.ts`);
1090
- }
1091
- for (const candidate of candidates) {
1092
- if (candidate.endsWith(".d.ts"))
1093
- continue;
1094
- if (fs5.existsSync(path6.join(baseDir, candidate))) {
1095
- return candidate;
1096
- }
1097
- }
1098
- return;
1099
- }
1100
-
1101
994
  // src/commands/generate.ts
1102
- var defaultDependencies4 = {
1103
- createDocCov: (options) => new DocCov3(options),
1104
- writeFileSync: fs6.writeFileSync,
1105
- spinner: (text) => ora3(text),
995
+ var defaultDependencies3 = {
996
+ createDocCov: (options) => new DocCov2(options),
997
+ writeFileSync: fs3.writeFileSync,
998
+ spinner: (text) => ora2(text),
1106
999
  log: console.log,
1107
1000
  error: console.error
1108
1001
  };
@@ -1121,38 +1014,46 @@ function stripDocsFields(spec) {
1121
1014
  }
1122
1015
  function formatDiagnosticOutput(prefix, diagnostic, baseDir) {
1123
1016
  const location = diagnostic.location;
1124
- const relativePath = location?.file ? path7.relative(baseDir, location.file) || location.file : undefined;
1125
- const locationText = location && relativePath ? chalk5.gray(`${relativePath}:${location.line ?? 1}:${location.column ?? 1}`) : null;
1017
+ const relativePath = location?.file ? path4.relative(baseDir, location.file) || location.file : undefined;
1018
+ const locationText = location && relativePath ? chalk4.gray(`${relativePath}:${location.line ?? 1}:${location.column ?? 1}`) : null;
1126
1019
  const locationPrefix = locationText ? `${locationText} ` : "";
1127
1020
  return `${prefix} ${locationPrefix}${diagnostic.message}`;
1128
1021
  }
1129
1022
  function registerGenerateCommand(program, dependencies = {}) {
1130
1023
  const { createDocCov, writeFileSync: writeFileSync2, spinner, log, error } = {
1131
- ...defaultDependencies4,
1024
+ ...defaultDependencies3,
1132
1025
  ...dependencies
1133
1026
  };
1134
1027
  program.command("generate [entry]").description("Generate OpenPkg specification for documentation coverage analysis").option("-o, --output <file>", "Output file", "openpkg.json").option("-p, --package <name>", "Target package name (for monorepos)").option("--cwd <dir>", "Working directory", process.cwd()).option("--skip-resolve", "Skip external type resolution from node_modules").option("--include <ids>", "Filter exports by identifier (comma-separated or repeated)").option("--exclude <ids>", "Exclude exports by identifier (comma-separated or repeated)").option("--show-diagnostics", "Print TypeScript diagnostics from analysis").option("--no-docs", "Omit docs coverage fields from output (pure structural spec)").option("-y, --yes", "Skip all prompts and use defaults").action(async (entry, options) => {
1135
1028
  try {
1136
1029
  let targetDir = options.cwd;
1137
1030
  let entryFile = entry;
1031
+ const fileSystem = new NodeFileSystem2(options.cwd);
1138
1032
  if (options.package) {
1139
- const packageDir = await findPackageInMonorepo(options.cwd, options.package);
1140
- if (!packageDir) {
1141
- throw new Error(`Package "${options.package}" not found in monorepo`);
1033
+ const mono = await detectMonorepo2(fileSystem);
1034
+ if (!mono.isMonorepo) {
1035
+ throw new Error(`Not a monorepo. Remove --package flag for single-package repos.`);
1036
+ }
1037
+ const pkg = findPackageByName2(mono.packages, options.package);
1038
+ if (!pkg) {
1039
+ const available = mono.packages.map((p) => p.name).join(", ");
1040
+ throw new Error(`Package "${options.package}" not found. Available: ${available}`);
1142
1041
  }
1143
- targetDir = packageDir;
1144
- log(chalk5.gray(`Found package at ${path7.relative(options.cwd, packageDir)}`));
1042
+ targetDir = path4.join(options.cwd, pkg.path);
1043
+ log(chalk4.gray(`Found package at ${pkg.path}`));
1145
1044
  }
1146
1045
  if (!entryFile) {
1147
- const detected = detectEntryPoint(targetDir);
1148
- entryFile = path7.join(targetDir, detected.entryPath);
1149
- log(chalk5.gray(`Auto-detected entry point: ${detected.entryPath} (from ${detected.source})`));
1046
+ const targetFs = new NodeFileSystem2(targetDir);
1047
+ const detected = await detectEntryPoint2(targetFs);
1048
+ entryFile = path4.join(targetDir, detected.path);
1049
+ log(chalk4.gray(`Auto-detected entry point: ${detected.path} (from ${detected.source})`));
1150
1050
  } else {
1151
- entryFile = path7.resolve(targetDir, entryFile);
1152
- if (fs6.existsSync(entryFile) && fs6.statSync(entryFile).isDirectory()) {
1153
- const detected = detectEntryPoint(entryFile);
1154
- entryFile = path7.join(entryFile, detected.entryPath);
1155
- log(chalk5.gray(`Auto-detected entry point: ${detected.entryPath} (from ${detected.source})`));
1051
+ entryFile = path4.resolve(targetDir, entryFile);
1052
+ if (fs3.existsSync(entryFile) && fs3.statSync(entryFile).isDirectory()) {
1053
+ const dirFs = new NodeFileSystem2(entryFile);
1054
+ const detected = await detectEntryPoint2(dirFs);
1055
+ entryFile = path4.join(entryFile, detected.path);
1056
+ log(chalk4.gray(`Auto-detected entry point: ${detected.path} (from ${detected.source})`));
1156
1057
  }
1157
1058
  }
1158
1059
  const resolveExternalTypes = !options.skipResolve;
@@ -1164,15 +1065,15 @@ function registerGenerateCommand(program, dependencies = {}) {
1164
1065
  try {
1165
1066
  config = await loadDocCovConfig(targetDir);
1166
1067
  if (config?.filePath) {
1167
- log(chalk5.gray(`Loaded configuration from ${path7.relative(targetDir, config.filePath)}`));
1068
+ log(chalk4.gray(`Loaded configuration from ${path4.relative(targetDir, config.filePath)}`));
1168
1069
  }
1169
1070
  } catch (configError) {
1170
- error(chalk5.red("Failed to load DocCov config:"), configError instanceof Error ? configError.message : configError);
1071
+ error(chalk4.red("Failed to load DocCov config:"), configError instanceof Error ? configError.message : configError);
1171
1072
  process.exit(1);
1172
1073
  }
1173
1074
  const resolvedFilters = mergeFilterOptions(config, cliFilters);
1174
1075
  for (const message of resolvedFilters.messages) {
1175
- log(chalk5.gray(`• ${message}`));
1076
+ log(chalk4.gray(`• ${message}`));
1176
1077
  }
1177
1078
  const spinnerInstance = spinner("Generating OpenPkg spec...");
1178
1079
  spinnerInstance.start();
@@ -1196,7 +1097,7 @@ function registerGenerateCommand(program, dependencies = {}) {
1196
1097
  if (!result) {
1197
1098
  throw new Error("Failed to produce an OpenPkg spec.");
1198
1099
  }
1199
- const outputPath = path7.resolve(process.cwd(), options.output);
1100
+ const outputPath = path4.resolve(process.cwd(), options.output);
1200
1101
  let normalized = normalize(result.spec);
1201
1102
  if (options.docs === false) {
1202
1103
  normalized = stripDocsFields(normalized);
@@ -1205,85 +1106,85 @@ function registerGenerateCommand(program, dependencies = {}) {
1205
1106
  if (!validation.ok) {
1206
1107
  spinnerInstance.fail("Spec failed schema validation");
1207
1108
  for (const err of validation.errors) {
1208
- error(chalk5.red(`schema: ${err.instancePath || "/"} ${err.message}`));
1109
+ error(chalk4.red(`schema: ${err.instancePath || "/"} ${err.message}`));
1209
1110
  }
1210
1111
  process.exit(1);
1211
1112
  }
1212
1113
  writeFileSync2(outputPath, JSON.stringify(normalized, null, 2));
1213
- log(chalk5.green(`✓ Generated ${options.output}`));
1214
- log(chalk5.gray(` ${getArrayLength(normalized.exports)} exports`));
1215
- log(chalk5.gray(` ${getArrayLength(normalized.types)} types`));
1114
+ log(chalk4.green(`✓ Generated ${options.output}`));
1115
+ log(chalk4.gray(` ${getArrayLength(normalized.exports)} exports`));
1116
+ log(chalk4.gray(` ${getArrayLength(normalized.types)} types`));
1216
1117
  if (options.showDiagnostics && result.diagnostics.length > 0) {
1217
1118
  log("");
1218
- log(chalk5.bold("Diagnostics"));
1119
+ log(chalk4.bold("Diagnostics"));
1219
1120
  for (const diagnostic of result.diagnostics) {
1220
- const prefix = diagnostic.severity === "error" ? chalk5.red("✖") : diagnostic.severity === "warning" ? chalk5.yellow("⚠") : chalk5.cyan("ℹ");
1121
+ const prefix = diagnostic.severity === "error" ? chalk4.red("✖") : diagnostic.severity === "warning" ? chalk4.yellow("⚠") : chalk4.cyan("ℹ");
1221
1122
  log(formatDiagnosticOutput(prefix, diagnostic, targetDir));
1222
1123
  }
1223
1124
  }
1224
1125
  } catch (commandError) {
1225
- error(chalk5.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
1126
+ error(chalk4.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
1226
1127
  process.exit(1);
1227
1128
  }
1228
1129
  });
1229
1130
  }
1230
1131
 
1231
1132
  // src/commands/init.ts
1232
- import * as fs7 from "node:fs";
1233
- import * as path8 from "node:path";
1234
- import chalk6 from "chalk";
1235
- var defaultDependencies5 = {
1236
- fileExists: fs7.existsSync,
1237
- writeFileSync: fs7.writeFileSync,
1238
- readFileSync: fs7.readFileSync,
1133
+ import * as fs4 from "node:fs";
1134
+ import * as path5 from "node:path";
1135
+ import chalk5 from "chalk";
1136
+ var defaultDependencies4 = {
1137
+ fileExists: fs4.existsSync,
1138
+ writeFileSync: fs4.writeFileSync,
1139
+ readFileSync: fs4.readFileSync,
1239
1140
  log: console.log,
1240
1141
  error: console.error
1241
1142
  };
1242
1143
  function registerInitCommand(program, dependencies = {}) {
1243
- const { fileExists: fileExists2, writeFileSync: writeFileSync3, readFileSync: readFileSync5, log, error } = {
1244
- ...defaultDependencies5,
1144
+ const { fileExists: fileExists2, writeFileSync: writeFileSync3, readFileSync: readFileSync3, log, error } = {
1145
+ ...defaultDependencies4,
1245
1146
  ...dependencies
1246
1147
  };
1247
1148
  program.command("init").description("Create a DocCov configuration file").option("--cwd <dir>", "Working directory", process.cwd()).option("--format <format>", "Config format: auto, mjs, js, cjs", "auto").action((options) => {
1248
- const cwd = path8.resolve(options.cwd);
1149
+ const cwd = path5.resolve(options.cwd);
1249
1150
  const formatOption = String(options.format ?? "auto").toLowerCase();
1250
1151
  if (!isValidFormat(formatOption)) {
1251
- error(chalk6.red(`Invalid format "${formatOption}". Use auto, mjs, js, or cjs.`));
1152
+ error(chalk5.red(`Invalid format "${formatOption}". Use auto, mjs, js, or cjs.`));
1252
1153
  process.exitCode = 1;
1253
1154
  return;
1254
1155
  }
1255
1156
  const existing = findExistingConfig(cwd, fileExists2);
1256
1157
  if (existing) {
1257
- error(chalk6.red(`A DocCov config already exists at ${path8.relative(cwd, existing) || "./doccov.config.*"}.`));
1158
+ error(chalk5.red(`A DocCov config already exists at ${path5.relative(cwd, existing) || "./doccov.config.*"}.`));
1258
1159
  process.exitCode = 1;
1259
1160
  return;
1260
1161
  }
1261
- const packageType = detectPackageType(cwd, fileExists2, readFileSync5);
1162
+ const packageType = detectPackageType(cwd, fileExists2, readFileSync3);
1262
1163
  const targetFormat = resolveFormat(formatOption, packageType);
1263
1164
  if (targetFormat === "js" && packageType !== "module") {
1264
- log(chalk6.yellow('Package is not marked as "type": "module"; creating doccov.config.js may require enabling ESM.'));
1165
+ log(chalk5.yellow('Package is not marked as "type": "module"; creating doccov.config.js may require enabling ESM.'));
1265
1166
  }
1266
1167
  const fileName = `doccov.config.${targetFormat}`;
1267
- const outputPath = path8.join(cwd, fileName);
1168
+ const outputPath = path5.join(cwd, fileName);
1268
1169
  if (fileExists2(outputPath)) {
1269
- error(chalk6.red(`Cannot create ${fileName}; file already exists.`));
1170
+ error(chalk5.red(`Cannot create ${fileName}; file already exists.`));
1270
1171
  process.exitCode = 1;
1271
1172
  return;
1272
1173
  }
1273
1174
  const template = buildTemplate(targetFormat);
1274
1175
  writeFileSync3(outputPath, template, { encoding: "utf8" });
1275
- log(chalk6.green(`✓ Created ${path8.relative(process.cwd(), outputPath)}`));
1176
+ log(chalk5.green(`✓ Created ${path5.relative(process.cwd(), outputPath)}`));
1276
1177
  });
1277
1178
  }
1278
1179
  var isValidFormat = (value) => {
1279
1180
  return value === "auto" || value === "mjs" || value === "js" || value === "cjs";
1280
1181
  };
1281
1182
  var findExistingConfig = (cwd, fileExists2) => {
1282
- let current = path8.resolve(cwd);
1283
- const { root } = path8.parse(current);
1183
+ let current = path5.resolve(cwd);
1184
+ const { root } = path5.parse(current);
1284
1185
  while (true) {
1285
1186
  for (const candidate of DOCCOV_CONFIG_FILENAMES) {
1286
- const candidatePath = path8.join(current, candidate);
1187
+ const candidatePath = path5.join(current, candidate);
1287
1188
  if (fileExists2(candidatePath)) {
1288
1189
  return candidatePath;
1289
1190
  }
@@ -1291,17 +1192,17 @@ var findExistingConfig = (cwd, fileExists2) => {
1291
1192
  if (current === root) {
1292
1193
  break;
1293
1194
  }
1294
- current = path8.dirname(current);
1195
+ current = path5.dirname(current);
1295
1196
  }
1296
1197
  return null;
1297
1198
  };
1298
- var detectPackageType = (cwd, fileExists2, readFileSync5) => {
1199
+ var detectPackageType = (cwd, fileExists2, readFileSync3) => {
1299
1200
  const packageJsonPath = findNearestPackageJson(cwd, fileExists2);
1300
1201
  if (!packageJsonPath) {
1301
1202
  return;
1302
1203
  }
1303
1204
  try {
1304
- const raw = readFileSync5(packageJsonPath, "utf8");
1205
+ const raw = readFileSync3(packageJsonPath, "utf8");
1305
1206
  const parsed = JSON.parse(raw);
1306
1207
  if (parsed.type === "module") {
1307
1208
  return "module";
@@ -1313,17 +1214,17 @@ var detectPackageType = (cwd, fileExists2, readFileSync5) => {
1313
1214
  return;
1314
1215
  };
1315
1216
  var findNearestPackageJson = (cwd, fileExists2) => {
1316
- let current = path8.resolve(cwd);
1317
- const { root } = path8.parse(current);
1217
+ let current = path5.resolve(cwd);
1218
+ const { root } = path5.parse(current);
1318
1219
  while (true) {
1319
- const candidate = path8.join(current, "package.json");
1220
+ const candidate = path5.join(current, "package.json");
1320
1221
  if (fileExists2(candidate)) {
1321
1222
  return candidate;
1322
1223
  }
1323
1224
  if (current === root) {
1324
1225
  break;
1325
1226
  }
1326
- current = path8.dirname(current);
1227
+ current = path5.dirname(current);
1327
1228
  }
1328
1229
  return null;
1329
1230
  };
@@ -1359,11 +1260,17 @@ var buildTemplate = (format) => {
1359
1260
  };
1360
1261
 
1361
1262
  // src/commands/report.ts
1362
- import * as fs8 from "node:fs";
1363
- import * as path9 from "node:path";
1364
- import { DocCov as DocCov4 } from "@doccov/sdk";
1365
- import chalk7 from "chalk";
1366
- import ora4 from "ora";
1263
+ import * as fs5 from "node:fs";
1264
+ import * as path6 from "node:path";
1265
+ import {
1266
+ DocCov as DocCov3,
1267
+ detectEntryPoint as detectEntryPoint3,
1268
+ detectMonorepo as detectMonorepo3,
1269
+ findPackageByName as findPackageByName3,
1270
+ NodeFileSystem as NodeFileSystem3
1271
+ } from "@doccov/sdk";
1272
+ import chalk6 from "chalk";
1273
+ import ora3 from "ora";
1367
1274
 
1368
1275
  // src/reports/markdown.ts
1369
1276
  function bar(pct, width = 10) {
@@ -1540,25 +1447,34 @@ function registerReportCommand(program) {
1540
1447
  try {
1541
1448
  let spec;
1542
1449
  if (options.spec) {
1543
- const specPath = path9.resolve(options.cwd, options.spec);
1544
- spec = JSON.parse(fs8.readFileSync(specPath, "utf-8"));
1450
+ const specPath = path6.resolve(options.cwd, options.spec);
1451
+ spec = JSON.parse(fs5.readFileSync(specPath, "utf-8"));
1545
1452
  } else {
1546
1453
  let targetDir = options.cwd;
1547
1454
  let entryFile = entry;
1455
+ const fileSystem = new NodeFileSystem3(options.cwd);
1548
1456
  if (options.package) {
1549
- const packageDir = await findPackageInMonorepo(options.cwd, options.package);
1550
- if (!packageDir)
1551
- throw new Error(`Package "${options.package}" not found`);
1552
- targetDir = packageDir;
1457
+ const mono = await detectMonorepo3(fileSystem);
1458
+ if (!mono.isMonorepo) {
1459
+ throw new Error(`Not a monorepo. Remove --package flag for single-package repos.`);
1460
+ }
1461
+ const pkg = findPackageByName3(mono.packages, options.package);
1462
+ if (!pkg) {
1463
+ const available = mono.packages.map((p) => p.name).join(", ");
1464
+ throw new Error(`Package "${options.package}" not found. Available: ${available}`);
1465
+ }
1466
+ targetDir = path6.join(options.cwd, pkg.path);
1553
1467
  }
1554
1468
  if (!entryFile) {
1555
- entryFile = await findEntryPoint(targetDir, true);
1469
+ const targetFs = new NodeFileSystem3(targetDir);
1470
+ const detected = await detectEntryPoint3(targetFs);
1471
+ entryFile = path6.join(targetDir, detected.path);
1556
1472
  } else {
1557
- entryFile = path9.resolve(targetDir, entryFile);
1473
+ entryFile = path6.resolve(targetDir, entryFile);
1558
1474
  }
1559
- const spinner = ora4("Analyzing...").start();
1475
+ const spinner = ora3("Analyzing...").start();
1560
1476
  const resolveExternalTypes = !options.skipResolve;
1561
- const doccov = new DocCov4({ resolveExternalTypes });
1477
+ const doccov = new DocCov3({ resolveExternalTypes });
1562
1478
  const result = await doccov.analyzeFileWithDiagnostics(entryFile);
1563
1479
  spinner.succeed("Analysis complete");
1564
1480
  spec = result.spec;
@@ -1575,26 +1491,36 @@ function registerReportCommand(program) {
1575
1491
  output = renderMarkdown(stats, { limit });
1576
1492
  }
1577
1493
  if (options.out) {
1578
- const outPath = path9.resolve(options.cwd, options.out);
1579
- fs8.writeFileSync(outPath, output);
1580
- console.log(chalk7.green(`Report written to ${outPath}`));
1494
+ const outPath = path6.resolve(options.cwd, options.out);
1495
+ fs5.writeFileSync(outPath, output);
1496
+ console.log(chalk6.green(`Report written to ${outPath}`));
1581
1497
  } else {
1582
1498
  console.log(output);
1583
1499
  }
1584
1500
  } catch (err) {
1585
- console.error(chalk7.red("Error:"), err instanceof Error ? err.message : err);
1501
+ console.error(chalk6.red("Error:"), err instanceof Error ? err.message : err);
1586
1502
  process.exitCode = 1;
1587
1503
  }
1588
1504
  });
1589
1505
  }
1590
1506
 
1591
1507
  // src/commands/scan.ts
1592
- import * as fs11 from "node:fs";
1508
+ import * as fs7 from "node:fs";
1593
1509
  import * as os from "node:os";
1594
- import * as path12 from "node:path";
1595
- import { DocCov as DocCov5 } from "@doccov/sdk";
1596
- import chalk8 from "chalk";
1597
- import ora5 from "ora";
1510
+ import * as path8 from "node:path";
1511
+ import {
1512
+ DocCov as DocCov4,
1513
+ detectBuildInfo,
1514
+ detectEntryPoint as detectEntryPoint4,
1515
+ detectMonorepo as detectMonorepo4,
1516
+ detectPackageManager,
1517
+ findPackageByName as findPackageByName4,
1518
+ formatPackageList,
1519
+ getInstallCommand,
1520
+ NodeFileSystem as NodeFileSystem4
1521
+ } from "@doccov/sdk";
1522
+ import chalk7 from "chalk";
1523
+ import ora4 from "ora";
1598
1524
  import { simpleGit } from "simple-git";
1599
1525
 
1600
1526
  // src/utils/github-url.ts
@@ -1628,17 +1554,17 @@ function buildDisplayUrl(parsed) {
1628
1554
  }
1629
1555
 
1630
1556
  // src/utils/llm-build-plan.ts
1631
- import * as fs9 from "node:fs";
1632
- import * as path10 from "node:path";
1633
- import { createAnthropic as createAnthropic2 } from "@ai-sdk/anthropic";
1634
- import { createOpenAI as createOpenAI2 } from "@ai-sdk/openai";
1635
- import { generateObject as generateObject2 } from "ai";
1636
- import { z as z3 } from "zod";
1637
- var BuildPlanSchema = z3.object({
1638
- installCommand: z3.string().optional().describe("Additional install command if needed"),
1639
- buildCommands: z3.array(z3.string()).describe('Build steps to run, e.g. ["npm run build:wasm"]'),
1640
- entryPoint: z3.string().describe("Path to TS/TSX entry file after build"),
1641
- notes: z3.string().optional().describe("Caveats or warnings")
1557
+ import * as fs6 from "node:fs";
1558
+ import * as path7 from "node:path";
1559
+ import { createAnthropic as createAnthropic3 } from "@ai-sdk/anthropic";
1560
+ import { createOpenAI as createOpenAI3 } from "@ai-sdk/openai";
1561
+ import { generateObject as generateObject3 } from "ai";
1562
+ import { z as z4 } from "zod";
1563
+ var BuildPlanSchema = z4.object({
1564
+ installCommand: z4.string().optional().describe("Additional install command if needed"),
1565
+ buildCommands: z4.array(z4.string()).describe('Build steps to run, e.g. ["npm run build:wasm"]'),
1566
+ entryPoint: z4.string().describe("Path to TS/TSX entry file after build"),
1567
+ notes: z4.string().optional().describe("Caveats or warnings")
1642
1568
  });
1643
1569
  var CONTEXT_FILES = [
1644
1570
  "package.json",
@@ -1653,22 +1579,22 @@ var CONTEXT_FILES = [
1653
1579
  "wasm-pack.json"
1654
1580
  ];
1655
1581
  var MAX_FILE_CHARS = 2000;
1656
- function getModel2() {
1582
+ function getModel3() {
1657
1583
  const provider = process.env.DOCCOV_LLM_PROVIDER?.toLowerCase();
1658
1584
  if (provider === "anthropic" || process.env.ANTHROPIC_API_KEY) {
1659
- const anthropic = createAnthropic2();
1585
+ const anthropic = createAnthropic3();
1660
1586
  return anthropic("claude-sonnet-4-20250514");
1661
1587
  }
1662
- const openai = createOpenAI2();
1588
+ const openai = createOpenAI3();
1663
1589
  return openai("gpt-4o-mini");
1664
1590
  }
1665
1591
  async function gatherContextFiles(repoDir) {
1666
1592
  const sections = [];
1667
1593
  for (const fileName of CONTEXT_FILES) {
1668
- const filePath = path10.join(repoDir, fileName);
1669
- if (fs9.existsSync(filePath)) {
1594
+ const filePath = path7.join(repoDir, fileName);
1595
+ if (fs6.existsSync(filePath)) {
1670
1596
  try {
1671
- let content = fs9.readFileSync(filePath, "utf-8");
1597
+ let content = fs6.readFileSync(filePath, "utf-8");
1672
1598
  if (content.length > MAX_FILE_CHARS) {
1673
1599
  content = `${content.slice(0, MAX_FILE_CHARS)}
1674
1600
  ... (truncated)`;
@@ -1710,8 +1636,8 @@ async function generateBuildPlan(repoDir) {
1710
1636
  if (!context.trim()) {
1711
1637
  return null;
1712
1638
  }
1713
- const model = getModel2();
1714
- const { object } = await generateObject2({
1639
+ const model = getModel3();
1640
+ const { object } = await generateObject3({
1715
1641
  model,
1716
1642
  schema: BuildPlanSchema,
1717
1643
  prompt: BUILD_PLAN_PROMPT(context)
@@ -1719,119 +1645,16 @@ async function generateBuildPlan(repoDir) {
1719
1645
  return object;
1720
1646
  }
1721
1647
 
1722
- // src/utils/monorepo-detection.ts
1723
- import * as fs10 from "node:fs";
1724
- import * as path11 from "node:path";
1725
- import { glob } from "glob";
1726
- async function detectMonorepo(repoDir) {
1727
- const pkgPath = path11.join(repoDir, "package.json");
1728
- if (!fs10.existsSync(pkgPath)) {
1729
- return { isMonorepo: false, packages: [], type: "none" };
1730
- }
1731
- let pkg;
1732
- try {
1733
- pkg = JSON.parse(fs10.readFileSync(pkgPath, "utf-8"));
1734
- } catch {
1735
- return { isMonorepo: false, packages: [], type: "none" };
1736
- }
1737
- if (pkg.workspaces) {
1738
- const patterns = extractWorkspacePatterns(pkg.workspaces);
1739
- const packages = await resolveWorkspacePackages(repoDir, patterns);
1740
- return { isMonorepo: packages.length > 0, packages, type: "npm" };
1741
- }
1742
- const pnpmPath = path11.join(repoDir, "pnpm-workspace.yaml");
1743
- if (fs10.existsSync(pnpmPath)) {
1744
- const patterns = parsePnpmWorkspace(pnpmPath);
1745
- const packages = await resolveWorkspacePackages(repoDir, patterns);
1746
- return { isMonorepo: packages.length > 0, packages, type: "pnpm" };
1747
- }
1748
- const lernaPath = path11.join(repoDir, "lerna.json");
1749
- if (fs10.existsSync(lernaPath)) {
1750
- try {
1751
- const lerna = JSON.parse(fs10.readFileSync(lernaPath, "utf-8"));
1752
- const patterns = lerna.packages ?? ["packages/*"];
1753
- const packages = await resolveWorkspacePackages(repoDir, patterns);
1754
- return { isMonorepo: packages.length > 0, packages, type: "lerna" };
1755
- } catch {}
1756
- }
1757
- return { isMonorepo: false, packages: [], type: "none" };
1758
- }
1759
- function extractWorkspacePatterns(workspaces) {
1760
- if (Array.isArray(workspaces)) {
1761
- return workspaces.filter((w) => typeof w === "string");
1762
- }
1763
- if (typeof workspaces === "object" && workspaces !== null) {
1764
- const ws = workspaces;
1765
- if (Array.isArray(ws.packages)) {
1766
- return ws.packages.filter((w) => typeof w === "string");
1767
- }
1768
- }
1769
- return [];
1770
- }
1771
- function parsePnpmWorkspace(filePath) {
1772
- try {
1773
- const content = fs10.readFileSync(filePath, "utf-8");
1774
- const match = content.match(/packages:\s*\n((?:\s+-\s+.+\n?)+)/);
1775
- if (match) {
1776
- const lines = match[1].split(`
1777
- `);
1778
- return lines.map((line) => line.replace(/^\s+-\s+['"]?/, "").replace(/['"]?\s*$/, "")).filter(Boolean);
1779
- }
1780
- } catch {}
1781
- return ["packages/*"];
1782
- }
1783
- async function resolveWorkspacePackages(repoDir, patterns) {
1784
- const packages = [];
1785
- for (const pattern of patterns) {
1786
- const normalizedPattern = pattern.endsWith("/") ? pattern.slice(0, -1) : pattern;
1787
- try {
1788
- const matches = await glob(normalizedPattern, {
1789
- cwd: repoDir,
1790
- absolute: false
1791
- });
1792
- for (const match of matches) {
1793
- const pkgJsonPath = path11.join(repoDir, match, "package.json");
1794
- if (fs10.existsSync(pkgJsonPath)) {
1795
- try {
1796
- const pkgJson = JSON.parse(fs10.readFileSync(pkgJsonPath, "utf-8"));
1797
- packages.push({
1798
- name: pkgJson.name ?? path11.basename(match),
1799
- path: path11.join(repoDir, match),
1800
- relativePath: match
1801
- });
1802
- } catch {}
1803
- }
1804
- }
1805
- } catch {}
1806
- }
1807
- return packages.sort((a, b) => a.name.localeCompare(b.name));
1808
- }
1809
- async function findPackage(repoDir, packageName) {
1810
- const mono = await detectMonorepo(repoDir);
1811
- if (!mono.isMonorepo) {
1812
- return;
1813
- }
1814
- return mono.packages.find((pkg) => pkg.name === packageName || pkg.relativePath === packageName);
1815
- }
1816
- function formatPackageList(packages, limit = 10) {
1817
- const lines = packages.slice(0, limit).map((pkg) => ` --package ${pkg.name}`);
1818
- if (packages.length > limit) {
1819
- lines.push(` ... and ${packages.length - limit} more`);
1820
- }
1821
- return lines.join(`
1822
- `);
1823
- }
1824
-
1825
1648
  // src/commands/scan.ts
1826
- var defaultDependencies6 = {
1827
- createDocCov: (options) => new DocCov5(options),
1828
- spinner: (text) => ora5(text),
1649
+ var defaultDependencies5 = {
1650
+ createDocCov: (options) => new DocCov4(options),
1651
+ spinner: (text) => ora4(text),
1829
1652
  log: console.log,
1830
1653
  error: console.error
1831
1654
  };
1832
1655
  function registerScanCommand(program, dependencies = {}) {
1833
1656
  const { createDocCov, spinner, log, error } = {
1834
- ...defaultDependencies6,
1657
+ ...defaultDependencies5,
1835
1658
  ...dependencies
1836
1659
  };
1837
1660
  program.command("scan <url>").description("Analyze docs coverage for any public GitHub repository").option("--ref <branch>", "Branch or tag to analyze").option("--package <name>", "Target package in monorepo").option("--output <format>", "Output format: text or json", "text").option("--no-cleanup", "Keep cloned repo (for debugging)").option("--skip-install", "Skip dependency installation (faster, but may limit type resolution)").option("--skip-resolve", "Skip external type resolution from node_modules").option("--save-spec <path>", "Save full OpenPkg spec to file").action(async (url, options) => {
@@ -1841,11 +1664,11 @@ function registerScanCommand(program, dependencies = {}) {
1841
1664
  const cloneUrl = buildCloneUrl(parsed);
1842
1665
  const displayUrl = buildDisplayUrl(parsed);
1843
1666
  log("");
1844
- log(chalk8.bold(`Scanning ${displayUrl}`));
1845
- log(chalk8.gray(`Branch/tag: ${parsed.ref}`));
1667
+ log(chalk7.bold(`Scanning ${displayUrl}`));
1668
+ log(chalk7.gray(`Branch/tag: ${parsed.ref}`));
1846
1669
  log("");
1847
- tempDir = path12.join(os.tmpdir(), `doccov-scan-${Date.now()}-${Math.random().toString(36).slice(2)}`);
1848
- fs11.mkdirSync(tempDir, { recursive: true });
1670
+ tempDir = path8.join(os.tmpdir(), `doccov-scan-${Date.now()}-${Math.random().toString(36).slice(2)}`);
1671
+ fs7.mkdirSync(tempDir, { recursive: true });
1849
1672
  const cloneSpinner = spinner(`Cloning ${parsed.owner}/${parsed.repo}...`);
1850
1673
  cloneSpinner.start();
1851
1674
  try {
@@ -1869,37 +1692,31 @@ function registerScanCommand(program, dependencies = {}) {
1869
1692
  }
1870
1693
  throw new Error(`Clone failed: ${message}`);
1871
1694
  }
1695
+ const fileSystem = new NodeFileSystem4(tempDir);
1872
1696
  if (options.skipInstall) {
1873
- log(chalk8.gray("Skipping dependency installation (--skip-install)"));
1697
+ log(chalk7.gray("Skipping dependency installation (--skip-install)"));
1874
1698
  } else {
1875
1699
  const installSpinner = spinner("Installing dependencies...");
1876
1700
  installSpinner.start();
1877
1701
  const installErrors = [];
1878
1702
  try {
1879
1703
  const { execSync } = await import("node:child_process");
1880
- const lockfileCommands = [
1881
- { file: "pnpm-lock.yaml", cmd: "pnpm install --frozen-lockfile" },
1882
- { file: "bun.lock", cmd: "bun install --frozen-lockfile" },
1883
- { file: "bun.lockb", cmd: "bun install --frozen-lockfile" },
1884
- { file: "yarn.lock", cmd: "yarn install --frozen-lockfile" },
1885
- { file: "package-lock.json", cmd: "npm install --legacy-peer-deps" }
1886
- ];
1704
+ const pmInfo = await detectPackageManager(fileSystem);
1705
+ const installCmd = getInstallCommand(pmInfo);
1706
+ const cmdString = installCmd.join(" ");
1887
1707
  let installed = false;
1888
- for (const { file, cmd } of lockfileCommands) {
1889
- if (fs11.existsSync(path12.join(tempDir, file))) {
1890
- try {
1891
- execSync(cmd, {
1892
- cwd: tempDir,
1893
- stdio: "pipe",
1894
- timeout: 180000
1895
- });
1896
- installed = true;
1897
- break;
1898
- } catch (cmdError) {
1899
- const stderr = cmdError?.stderr?.toString() ?? "";
1900
- const msg = cmdError instanceof Error ? cmdError.message : String(cmdError);
1901
- installErrors.push(`[${cmd}] ${stderr.slice(0, 150) || msg.slice(0, 150)}`);
1902
- }
1708
+ if (pmInfo.lockfile) {
1709
+ try {
1710
+ execSync(cmdString, {
1711
+ cwd: tempDir,
1712
+ stdio: "pipe",
1713
+ timeout: 180000
1714
+ });
1715
+ installed = true;
1716
+ } catch (cmdError) {
1717
+ const stderr = cmdError?.stderr?.toString() ?? "";
1718
+ const msg = cmdError instanceof Error ? cmdError.message : String(cmdError);
1719
+ installErrors.push(`[${cmdString}] ${stderr.slice(0, 150) || msg.slice(0, 150)}`);
1903
1720
  }
1904
1721
  }
1905
1722
  if (!installed) {
@@ -1933,67 +1750,46 @@ function registerScanCommand(program, dependencies = {}) {
1933
1750
  } else {
1934
1751
  installSpinner.warn("Could not install dependencies (analysis may be limited)");
1935
1752
  for (const err of installErrors) {
1936
- log(chalk8.gray(` ${err}`));
1753
+ log(chalk7.gray(` ${err}`));
1937
1754
  }
1938
1755
  }
1939
1756
  } catch (outerError) {
1940
1757
  const msg = outerError instanceof Error ? outerError.message : String(outerError);
1941
1758
  installSpinner.warn(`Could not install dependencies: ${msg.slice(0, 100)}`);
1942
1759
  for (const err of installErrors) {
1943
- log(chalk8.gray(` ${err}`));
1760
+ log(chalk7.gray(` ${err}`));
1944
1761
  }
1945
1762
  }
1946
1763
  }
1947
1764
  let targetDir = tempDir;
1948
1765
  let packageName;
1949
- const mono = await detectMonorepo(tempDir);
1766
+ const mono = await detectMonorepo4(fileSystem);
1950
1767
  if (mono.isMonorepo) {
1951
1768
  if (!options.package) {
1952
1769
  error("");
1953
- error(chalk8.red(`Monorepo detected with ${mono.packages.length} packages. Specify target with --package:`));
1770
+ error(chalk7.red(`Monorepo detected with ${mono.packages.length} packages. Specify target with --package:`));
1954
1771
  error("");
1955
1772
  error(formatPackageList(mono.packages));
1956
1773
  error("");
1957
1774
  throw new Error("Monorepo requires --package flag");
1958
1775
  }
1959
- const pkg = await findPackage(tempDir, options.package);
1776
+ const pkg = findPackageByName4(mono.packages, options.package);
1960
1777
  if (!pkg) {
1961
1778
  error("");
1962
- error(chalk8.red(`Package "${options.package}" not found. Available packages:`));
1779
+ error(chalk7.red(`Package "${options.package}" not found. Available packages:`));
1963
1780
  error("");
1964
1781
  error(formatPackageList(mono.packages));
1965
1782
  error("");
1966
1783
  throw new Error(`Package not found: ${options.package}`);
1967
1784
  }
1968
- targetDir = pkg.path;
1785
+ targetDir = path8.join(tempDir, pkg.path);
1969
1786
  packageName = pkg.name;
1970
- log(chalk8.gray(`Analyzing package: ${packageName}`));
1787
+ log(chalk7.gray(`Analyzing package: ${packageName}`));
1971
1788
  }
1972
1789
  const entrySpinner = spinner("Detecting entry point...");
1973
1790
  entrySpinner.start();
1974
1791
  let entryPath;
1975
- const needsBuildStep = (pkgDir, repoRoot, entryFile) => {
1976
- if (!entryFile.endsWith(".d.ts"))
1977
- return false;
1978
- const cargoLocations = [
1979
- path12.join(pkgDir, "Cargo.toml"),
1980
- path12.join(repoRoot, "Cargo.toml")
1981
- ];
1982
- const hasCargoToml = cargoLocations.some((p) => fs11.existsSync(p));
1983
- const checkWasmScripts = (dir) => {
1984
- const pkgPath = path12.join(dir, "package.json");
1985
- if (fs11.existsSync(pkgPath)) {
1986
- try {
1987
- const pkg = JSON.parse(fs11.readFileSync(pkgPath, "utf-8"));
1988
- const scripts = Object.values(pkg.scripts ?? {}).join(" ");
1989
- return scripts.includes("wasm-pack") || scripts.includes("wasm");
1990
- } catch {}
1991
- }
1992
- return false;
1993
- };
1994
- const hasWasmPackScript = checkWasmScripts(pkgDir) || checkWasmScripts(repoRoot);
1995
- return hasCargoToml || hasWasmPackScript;
1996
- };
1792
+ const targetFs = mono.isMonorepo ? new NodeFileSystem4(targetDir) : fileSystem;
1997
1793
  let buildFailed = false;
1998
1794
  const runLlmFallback = async (reason) => {
1999
1795
  entrySpinner.text = `${reason}, trying LLM fallback...`;
@@ -2004,53 +1800,55 @@ function registerScanCommand(program, dependencies = {}) {
2004
1800
  if (plan.buildCommands.length > 0) {
2005
1801
  const { execSync } = await import("node:child_process");
2006
1802
  for (const cmd of plan.buildCommands) {
2007
- log(chalk8.gray(` Running: ${cmd}`));
1803
+ log(chalk7.gray(` Running: ${cmd}`));
2008
1804
  try {
2009
1805
  execSync(cmd, { cwd: targetDir, stdio: "pipe", timeout: 300000 });
2010
1806
  } catch (buildError) {
2011
1807
  buildFailed = true;
2012
1808
  const msg = buildError instanceof Error ? buildError.message : String(buildError);
2013
1809
  if (msg.includes("rustc") || msg.includes("cargo") || msg.includes("wasm-pack")) {
2014
- log(chalk8.yellow(` ⚠ Build requires Rust toolchain (not available)`));
1810
+ log(chalk7.yellow(` ⚠ Build requires Rust toolchain (not available)`));
2015
1811
  } else if (msg.includes("rimraf") || msg.includes("command not found")) {
2016
- log(chalk8.yellow(` ⚠ Build failed: missing dependencies`));
1812
+ log(chalk7.yellow(` ⚠ Build failed: missing dependencies`));
2017
1813
  } else {
2018
- log(chalk8.yellow(` ⚠ Build failed: ${msg.slice(0, 80)}`));
1814
+ log(chalk7.yellow(` ⚠ Build failed: ${msg.slice(0, 80)}`));
2019
1815
  }
2020
1816
  }
2021
1817
  }
2022
1818
  }
2023
1819
  if (plan.notes) {
2024
- log(chalk8.gray(` Note: ${plan.notes}`));
1820
+ log(chalk7.gray(` Note: ${plan.notes}`));
2025
1821
  }
2026
1822
  return plan.entryPoint;
2027
1823
  };
2028
1824
  try {
2029
- const entry = detectEntryPoint(targetDir);
2030
- if (needsBuildStep(targetDir, tempDir, entry.entryPath)) {
1825
+ const entry = await detectEntryPoint4(targetFs);
1826
+ const buildInfo = await detectBuildInfo(targetFs);
1827
+ const needsBuildStep = entry.isDeclarationOnly && buildInfo.exoticIndicators.wasm;
1828
+ if (needsBuildStep) {
2031
1829
  entrySpinner.text = "Detected .d.ts entry with WASM indicators...";
2032
1830
  const llmEntry = await runLlmFallback("WASM project detected");
2033
1831
  if (llmEntry) {
2034
- entryPath = path12.join(targetDir, llmEntry);
1832
+ entryPath = path8.join(targetDir, llmEntry);
2035
1833
  if (buildFailed) {
2036
1834
  entrySpinner.succeed(`Entry point: ${llmEntry} (using pre-committed declarations)`);
2037
- log(chalk8.gray(" Coverage may be limited - generated .d.ts files typically lack JSDoc"));
1835
+ log(chalk7.gray(" Coverage may be limited - generated .d.ts files typically lack JSDoc"));
2038
1836
  } else {
2039
1837
  entrySpinner.succeed(`Entry point: ${llmEntry} (from LLM fallback - WASM project)`);
2040
1838
  }
2041
1839
  } else {
2042
- entryPath = path12.join(targetDir, entry.entryPath);
2043
- entrySpinner.succeed(`Entry point: ${entry.entryPath} (from ${entry.source})`);
2044
- log(chalk8.yellow(" ⚠ WASM project detected but no API key - analysis may be limited"));
1840
+ entryPath = path8.join(targetDir, entry.path);
1841
+ entrySpinner.succeed(`Entry point: ${entry.path} (from ${entry.source})`);
1842
+ log(chalk7.yellow(" ⚠ WASM project detected but no API key - analysis may be limited"));
2045
1843
  }
2046
1844
  } else {
2047
- entryPath = path12.join(targetDir, entry.entryPath);
2048
- entrySpinner.succeed(`Entry point: ${entry.entryPath} (from ${entry.source})`);
1845
+ entryPath = path8.join(targetDir, entry.path);
1846
+ entrySpinner.succeed(`Entry point: ${entry.path} (from ${entry.source})`);
2049
1847
  }
2050
1848
  } catch (entryError) {
2051
1849
  const llmEntry = await runLlmFallback("Heuristics failed");
2052
1850
  if (llmEntry) {
2053
- entryPath = path12.join(targetDir, llmEntry);
1851
+ entryPath = path8.join(targetDir, llmEntry);
2054
1852
  entrySpinner.succeed(`Entry point: ${llmEntry} (from LLM fallback)`);
2055
1853
  } else {
2056
1854
  entrySpinner.fail("Could not detect entry point (set OPENAI_API_KEY for smart fallback)");
@@ -2072,9 +1870,9 @@ function registerScanCommand(program, dependencies = {}) {
2072
1870
  const spec = result.spec;
2073
1871
  const coverageScore = spec.docs?.coverageScore ?? 0;
2074
1872
  if (options.saveSpec) {
2075
- const specPath = path12.resolve(process.cwd(), options.saveSpec);
2076
- fs11.writeFileSync(specPath, JSON.stringify(spec, null, 2));
2077
- log(chalk8.green(`✓ Saved spec to ${options.saveSpec}`));
1873
+ const specPath = path8.resolve(process.cwd(), options.saveSpec);
1874
+ fs7.writeFileSync(specPath, JSON.stringify(spec, null, 2));
1875
+ log(chalk7.green(`✓ Saved spec to ${options.saveSpec}`));
2078
1876
  }
2079
1877
  const undocumented = [];
2080
1878
  const driftIssues = [];
@@ -2111,7 +1909,7 @@ function registerScanCommand(program, dependencies = {}) {
2111
1909
  printTextResult(scanResult, log);
2112
1910
  }
2113
1911
  } catch (commandError) {
2114
- error(chalk8.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
1912
+ error(chalk7.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
2115
1913
  process.exitCode = 1;
2116
1914
  } finally {
2117
1915
  if (tempDir && options.cleanup !== false) {
@@ -2121,46 +1919,46 @@ function registerScanCommand(program, dependencies = {}) {
2121
1919
  stdio: "ignore"
2122
1920
  }).unref();
2123
1921
  } else if (tempDir) {
2124
- log(chalk8.gray(`Repo preserved at: ${tempDir}`));
1922
+ log(chalk7.gray(`Repo preserved at: ${tempDir}`));
2125
1923
  }
2126
1924
  }
2127
1925
  });
2128
1926
  }
2129
1927
  function printTextResult(result, log) {
2130
1928
  log("");
2131
- log(chalk8.bold("DocCov Scan Results"));
1929
+ log(chalk7.bold("DocCov Scan Results"));
2132
1930
  log("─".repeat(40));
2133
1931
  const repoName = result.packageName ? `${result.owner}/${result.repo} (${result.packageName})` : `${result.owner}/${result.repo}`;
2134
- log(`Repository: ${chalk8.cyan(repoName)}`);
2135
- log(`Branch: ${chalk8.gray(result.ref)}`);
1932
+ log(`Repository: ${chalk7.cyan(repoName)}`);
1933
+ log(`Branch: ${chalk7.gray(result.ref)}`);
2136
1934
  log("");
2137
- const coverageColor = result.coverage >= 80 ? chalk8.green : result.coverage >= 50 ? chalk8.yellow : chalk8.red;
2138
- log(chalk8.bold("Coverage"));
1935
+ const coverageColor = result.coverage >= 80 ? chalk7.green : result.coverage >= 50 ? chalk7.yellow : chalk7.red;
1936
+ log(chalk7.bold("Coverage"));
2139
1937
  log(` ${coverageColor(`${result.coverage}%`)}`);
2140
1938
  log("");
2141
- log(chalk8.bold("Stats"));
1939
+ log(chalk7.bold("Stats"));
2142
1940
  log(` ${result.exportCount} exports`);
2143
1941
  log(` ${result.typeCount} types`);
2144
1942
  log(` ${result.undocumented.length} undocumented`);
2145
1943
  log(` ${result.driftCount} drift issues`);
2146
1944
  if (result.undocumented.length > 0) {
2147
1945
  log("");
2148
- log(chalk8.bold("Undocumented Exports"));
1946
+ log(chalk7.bold("Undocumented Exports"));
2149
1947
  for (const name of result.undocumented.slice(0, 10)) {
2150
- log(chalk8.yellow(` ! ${name}`));
1948
+ log(chalk7.yellow(` ! ${name}`));
2151
1949
  }
2152
1950
  if (result.undocumented.length > 10) {
2153
- log(chalk8.gray(` ... and ${result.undocumented.length - 10} more`));
1951
+ log(chalk7.gray(` ... and ${result.undocumented.length - 10} more`));
2154
1952
  }
2155
1953
  }
2156
1954
  if (result.drift.length > 0) {
2157
1955
  log("");
2158
- log(chalk8.bold("Drift Issues"));
1956
+ log(chalk7.bold("Drift Issues"));
2159
1957
  for (const d of result.drift.slice(0, 5)) {
2160
- log(chalk8.red(` • ${d.export}: ${d.issue}`));
1958
+ log(chalk7.red(` • ${d.export}: ${d.issue}`));
2161
1959
  }
2162
1960
  if (result.drift.length > 5) {
2163
- log(chalk8.gray(` ... and ${result.drift.length - 5} more`));
1961
+ log(chalk7.gray(` ... and ${result.drift.length - 5} more`));
2164
1962
  }
2165
1963
  }
2166
1964
  log("");
@@ -2168,14 +1966,13 @@ function printTextResult(result, log) {
2168
1966
 
2169
1967
  // src/cli.ts
2170
1968
  var __filename2 = fileURLToPath(import.meta.url);
2171
- var __dirname2 = path13.dirname(__filename2);
2172
- var packageJson = JSON.parse(readFileSync9(path13.join(__dirname2, "../package.json"), "utf-8"));
1969
+ var __dirname2 = path9.dirname(__filename2);
1970
+ var packageJson = JSON.parse(readFileSync5(path9.join(__dirname2, "../package.json"), "utf-8"));
2173
1971
  var program = new Command;
2174
1972
  program.name("doccov").description("DocCov - Documentation coverage and drift detection for TypeScript").version(packageJson.version);
2175
1973
  registerGenerateCommand(program);
2176
1974
  registerCheckCommand(program);
2177
1975
  registerDiffCommand(program);
2178
- registerFixCommand(program);
2179
1976
  registerInitCommand(program);
2180
1977
  registerReportCommand(program);
2181
1978
  registerScanCommand(program);