@doccov/cli 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js ADDED
@@ -0,0 +1,1551 @@
1
+ #!/usr/bin/env node
2
+ import { createRequire } from "node:module";
3
+ var __create = Object.create;
4
+ var __getProtoOf = Object.getPrototypeOf;
5
+ var __defProp = Object.defineProperty;
6
+ var __getOwnPropNames = Object.getOwnPropertyNames;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __toESM = (mod, isNodeMode, target) => {
9
+ target = mod != null ? __create(__getProtoOf(mod)) : {};
10
+ const to = isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target;
11
+ for (let key of __getOwnPropNames(mod))
12
+ if (!__hasOwnProp.call(to, key))
13
+ __defProp(to, key, {
14
+ get: () => mod[key],
15
+ enumerable: true
16
+ });
17
+ return to;
18
+ };
19
+ var __require = /* @__PURE__ */ createRequire(import.meta.url);
20
+
21
+ // src/config/openpkg-config.ts
22
+ import { access } from "node:fs/promises";
23
+ import path from "node:path";
24
+ import { pathToFileURL } from "node:url";
25
+
26
+ // src/config/schema.ts
27
+ import { z } from "zod";
28
+ var stringList = z.union([
29
+ z.string(),
30
+ z.array(z.string())
31
+ ]);
32
+ var docCovConfigSchema = z.object({
33
+ include: stringList.optional(),
34
+ exclude: stringList.optional(),
35
+ plugins: z.array(z.unknown()).optional()
36
+ });
37
+ var normalizeList = (value) => {
38
+ if (!value) {
39
+ return;
40
+ }
41
+ const list = Array.isArray(value) ? value : [value];
42
+ const normalized = list.map((item) => item.trim()).filter(Boolean);
43
+ return normalized.length > 0 ? normalized : undefined;
44
+ };
45
+ var normalizeConfig = (input) => {
46
+ const include = normalizeList(input.include);
47
+ const exclude = normalizeList(input.exclude);
48
+ return {
49
+ include,
50
+ exclude,
51
+ plugins: input.plugins
52
+ };
53
+ };
54
+
55
+ // src/config/openpkg-config.ts
56
+ var DOCCOV_CONFIG_FILENAMES = [
57
+ "doccov.config.ts",
58
+ "doccov.config.mts",
59
+ "doccov.config.cts",
60
+ "doccov.config.js",
61
+ "doccov.config.mjs",
62
+ "doccov.config.cjs",
63
+ "openpkg.config.ts",
64
+ "openpkg.config.mts",
65
+ "openpkg.config.cts",
66
+ "openpkg.config.js",
67
+ "openpkg.config.mjs",
68
+ "openpkg.config.cjs"
69
+ ];
70
+ var fileExists = async (filePath) => {
71
+ try {
72
+ await access(filePath);
73
+ return true;
74
+ } catch {
75
+ return false;
76
+ }
77
+ };
78
+ var findConfigFile = async (cwd) => {
79
+ let current = path.resolve(cwd);
80
+ const { root } = path.parse(current);
81
+ while (true) {
82
+ for (const candidate of DOCCOV_CONFIG_FILENAMES) {
83
+ const candidatePath = path.join(current, candidate);
84
+ if (await fileExists(candidatePath)) {
85
+ return candidatePath;
86
+ }
87
+ }
88
+ if (current === root) {
89
+ return null;
90
+ }
91
+ current = path.dirname(current);
92
+ }
93
+ };
94
+ var importConfigModule = async (absolutePath) => {
95
+ const fileUrl = pathToFileURL(absolutePath);
96
+ fileUrl.searchParams.set("t", Date.now().toString());
97
+ const module = await import(fileUrl.href);
98
+ return module?.default ?? module?.config ?? module;
99
+ };
100
+ var formatIssues = (issues) => issues.map((issue) => `- ${issue}`).join(`
101
+ `);
102
+ var loadDocCovConfig = async (cwd) => {
103
+ const configPath = await findConfigFile(cwd);
104
+ if (!configPath) {
105
+ return null;
106
+ }
107
+ let rawConfig;
108
+ try {
109
+ rawConfig = await importConfigModule(configPath);
110
+ } catch (error) {
111
+ const message = error instanceof Error ? error.message : String(error);
112
+ throw new Error(`Failed to load DocCov config at ${configPath}: ${message}`);
113
+ }
114
+ const parsed = docCovConfigSchema.safeParse(rawConfig);
115
+ if (!parsed.success) {
116
+ const issues = parsed.error.issues.map((issue) => {
117
+ const pathLabel = issue.path.length > 0 ? issue.path.join(".") : "(root)";
118
+ return `${pathLabel}: ${issue.message}`;
119
+ });
120
+ throw new Error(`Invalid DocCov configuration at ${configPath}.
121
+ ${formatIssues(issues)}`);
122
+ }
123
+ const normalized = normalizeConfig(parsed.data);
124
+ return {
125
+ filePath: configPath,
126
+ ...normalized
127
+ };
128
+ };
129
+ var loadOpenPkgConfigInternal = loadDocCovConfig;
130
+ var loadOpenPkgConfig = loadDocCovConfig;
131
+
132
+ // src/config/index.ts
133
+ var defineConfig = (config) => config;
134
+ // src/cli.ts
135
+ import { readFileSync as readFileSync8 } from "node:fs";
136
+ import * as path11 from "node:path";
137
+ import { fileURLToPath } from "node:url";
138
+ import { Command } from "commander";
139
+
140
+ // src/commands/check.ts
141
+ import * as fs2 from "node:fs";
142
+ import * as path3 from "node:path";
143
+ import { DocCov } from "@doccov/sdk";
144
+ import chalk from "chalk";
145
+ import ora from "ora";
146
+
147
+ // src/utils/package-utils.ts
148
+ import * as fs from "node:fs";
149
+ import * as path2 from "node:path";
150
+ async function findEntryPoint(packageDir, preferSource = false) {
151
+ const packageJsonPath = path2.join(packageDir, "package.json");
152
+ if (!fs.existsSync(packageJsonPath)) {
153
+ return findDefaultEntryPoint(packageDir);
154
+ }
155
+ const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf-8"));
156
+ if (preferSource) {
157
+ const srcIndex = path2.join(packageDir, "src/index.ts");
158
+ if (fs.existsSync(srcIndex)) {
159
+ return srcIndex;
160
+ }
161
+ }
162
+ if (!preferSource && (packageJson.types || packageJson.typings)) {
163
+ const typesPath = path2.join(packageDir, packageJson.types || packageJson.typings);
164
+ if (fs.existsSync(typesPath)) {
165
+ return typesPath;
166
+ }
167
+ }
168
+ if (packageJson.exports) {
169
+ const exportPath = resolveExportsField(packageJson.exports, packageDir);
170
+ if (exportPath) {
171
+ return exportPath;
172
+ }
173
+ }
174
+ if (packageJson.main) {
175
+ const mainBase = packageJson.main.replace(/\.(js|mjs|cjs)$/, "");
176
+ const dtsPath = path2.join(packageDir, `${mainBase}.d.ts`);
177
+ if (fs.existsSync(dtsPath)) {
178
+ return dtsPath;
179
+ }
180
+ const tsPath = path2.join(packageDir, `${mainBase}.ts`);
181
+ if (fs.existsSync(tsPath)) {
182
+ return tsPath;
183
+ }
184
+ const mainPath = path2.join(packageDir, packageJson.main);
185
+ if (fs.existsSync(mainPath) && fs.statSync(mainPath).isDirectory()) {
186
+ const indexDts = path2.join(mainPath, "index.d.ts");
187
+ const indexTs = path2.join(mainPath, "index.ts");
188
+ if (fs.existsSync(indexDts))
189
+ return indexDts;
190
+ if (fs.existsSync(indexTs))
191
+ return indexTs;
192
+ }
193
+ }
194
+ return findDefaultEntryPoint(packageDir);
195
+ }
196
+ function resolveExportsField(exports, packageDir) {
197
+ if (typeof exports === "string") {
198
+ return findTypeScriptFile(path2.join(packageDir, exports));
199
+ }
200
+ if (typeof exports === "object" && exports !== null && "." in exports) {
201
+ const dotExport = exports["."];
202
+ if (typeof dotExport === "string") {
203
+ return findTypeScriptFile(path2.join(packageDir, dotExport));
204
+ }
205
+ if (dotExport && typeof dotExport === "object") {
206
+ const dotRecord = dotExport;
207
+ const typesEntry = dotRecord.types;
208
+ if (typeof typesEntry === "string") {
209
+ const typesPath = path2.join(packageDir, typesEntry);
210
+ if (fs.existsSync(typesPath)) {
211
+ return typesPath;
212
+ }
213
+ }
214
+ for (const condition of ["import", "require", "default"]) {
215
+ const target = dotRecord[condition];
216
+ if (typeof target === "string") {
217
+ const result = findTypeScriptFile(path2.join(packageDir, target));
218
+ if (result)
219
+ return result;
220
+ }
221
+ }
222
+ }
223
+ }
224
+ return null;
225
+ }
226
+ function findTypeScriptFile(jsPath) {
227
+ if (!fs.existsSync(jsPath))
228
+ return null;
229
+ const dtsPath = jsPath.replace(/\.(js|mjs|cjs)$/, ".d.ts");
230
+ if (fs.existsSync(dtsPath)) {
231
+ return dtsPath;
232
+ }
233
+ const tsPath = jsPath.replace(/\.(js|mjs|cjs)$/, ".ts");
234
+ if (fs.existsSync(tsPath)) {
235
+ return tsPath;
236
+ }
237
+ return null;
238
+ }
239
+ async function findDefaultEntryPoint(packageDir) {
240
+ const candidates = [
241
+ "dist/index.d.ts",
242
+ "dist/index.ts",
243
+ "lib/index.d.ts",
244
+ "lib/index.ts",
245
+ "src/index.ts",
246
+ "index.d.ts",
247
+ "index.ts"
248
+ ];
249
+ for (const candidate of candidates) {
250
+ const fullPath = path2.join(packageDir, candidate);
251
+ if (fs.existsSync(fullPath)) {
252
+ return fullPath;
253
+ }
254
+ }
255
+ throw new Error(`Could not find entry point in ${packageDir}`);
256
+ }
257
+ async function findPackageInMonorepo(rootDir, packageName) {
258
+ const rootPackageJsonPath = path2.join(rootDir, "package.json");
259
+ if (!fs.existsSync(rootPackageJsonPath)) {
260
+ return null;
261
+ }
262
+ const rootPackageJson = JSON.parse(fs.readFileSync(rootPackageJsonPath, "utf-8"));
263
+ const workspacePatterns = Array.isArray(rootPackageJson.workspaces) ? rootPackageJson.workspaces : rootPackageJson.workspaces?.packages || [];
264
+ for (const pattern of workspacePatterns) {
265
+ const searchPath = path2.join(rootDir, pattern.replace("/**", "").replace("/*", ""));
266
+ if (fs.existsSync(searchPath) && fs.statSync(searchPath).isDirectory()) {
267
+ const entries = fs.readdirSync(searchPath, { withFileTypes: true });
268
+ for (const entry of entries) {
269
+ if (entry.isDirectory()) {
270
+ const packagePath = path2.join(searchPath, entry.name);
271
+ const packageJsonPath = path2.join(packagePath, "package.json");
272
+ if (fs.existsSync(packageJsonPath)) {
273
+ const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf-8"));
274
+ if (packageJson.name === packageName) {
275
+ return packagePath;
276
+ }
277
+ }
278
+ }
279
+ }
280
+ }
281
+ }
282
+ return null;
283
+ }
284
+
285
+ // src/commands/check.ts
286
+ var defaultDependencies = {
287
+ createDocCov: (options) => new DocCov(options),
288
+ spinner: (text) => ora(text),
289
+ log: console.log,
290
+ error: console.error
291
+ };
292
+ function registerCheckCommand(program, dependencies = {}) {
293
+ const { createDocCov, spinner, log, error } = {
294
+ ...defaultDependencies,
295
+ ...dependencies
296
+ };
297
+ program.command("check [entry]").description("Fail if documentation coverage falls below a threshold").option("--cwd <dir>", "Working directory", process.cwd()).option("--package <name>", "Target package name (for monorepos)").option("--min-coverage <percentage>", "Minimum docs coverage percentage (0-100)", (value) => Number(value)).option("--require-examples", "Require at least one @example for every export").option("--no-external-types", "Skip external type resolution from node_modules").action(async (entry, options) => {
298
+ try {
299
+ let targetDir = options.cwd;
300
+ let entryFile = entry;
301
+ if (options.package) {
302
+ const packageDir = await findPackageInMonorepo(options.cwd, options.package);
303
+ if (!packageDir) {
304
+ throw new Error(`Package "${options.package}" not found in monorepo`);
305
+ }
306
+ targetDir = packageDir;
307
+ log(chalk.gray(`Found package at ${path3.relative(options.cwd, packageDir)}`));
308
+ }
309
+ if (!entryFile) {
310
+ entryFile = await findEntryPoint(targetDir, true);
311
+ log(chalk.gray(`Auto-detected entry point: ${path3.relative(targetDir, entryFile)}`));
312
+ } else {
313
+ entryFile = path3.resolve(targetDir, entryFile);
314
+ if (fs2.existsSync(entryFile) && fs2.statSync(entryFile).isDirectory()) {
315
+ entryFile = await findEntryPoint(entryFile, true);
316
+ log(chalk.gray(`Auto-detected entry point: ${entryFile}`));
317
+ }
318
+ }
319
+ const minCoverage = clampCoverage(options.minCoverage ?? 80);
320
+ const resolveExternalTypes = options.externalTypes !== false;
321
+ const spinnerInstance = spinner("Analyzing documentation coverage...");
322
+ spinnerInstance.start();
323
+ let specResult;
324
+ try {
325
+ const doccov = createDocCov({ resolveExternalTypes });
326
+ specResult = await doccov.analyzeFileWithDiagnostics(entryFile);
327
+ spinnerInstance.succeed("Documentation analysis complete");
328
+ } catch (analysisError) {
329
+ spinnerInstance.fail("Failed to analyze documentation coverage");
330
+ throw analysisError;
331
+ }
332
+ if (!specResult) {
333
+ throw new Error("Failed to analyze documentation coverage.");
334
+ }
335
+ const spec = specResult.spec;
336
+ const coverageScore = spec.docs?.coverageScore ?? 0;
337
+ const failingExports = collectFailingExports(spec.exports ?? [], minCoverage);
338
+ const missingExamples = options.requireExamples ? failingExports.filter((item) => item.missing?.includes("examples")) : [];
339
+ const driftExports = collectDrift(spec.exports ?? []);
340
+ const coverageFailed = coverageScore < minCoverage;
341
+ const hasMissingExamples = missingExamples.length > 0;
342
+ const hasDrift = driftExports.length > 0;
343
+ if (!coverageFailed && !hasMissingExamples && !hasDrift) {
344
+ log(chalk.green(`✓ Docs coverage ${coverageScore}% (min ${minCoverage}%)`));
345
+ if (failingExports.length > 0) {
346
+ log(chalk.gray("Some exports have partial docs:"));
347
+ for (const { name, missing } of failingExports.slice(0, 10)) {
348
+ log(chalk.gray(` • ${name}: missing ${missing?.join(", ")}`));
349
+ }
350
+ }
351
+ return;
352
+ }
353
+ error("");
354
+ if (coverageFailed) {
355
+ error(chalk.red(`Docs coverage ${coverageScore}% fell below required ${minCoverage}%.`));
356
+ }
357
+ if (hasMissingExamples) {
358
+ error(chalk.red(`${missingExamples.length} export(s) missing examples (required via --require-examples)`));
359
+ }
360
+ if (failingExports.length > 0 || driftExports.length > 0) {
361
+ error("");
362
+ error(chalk.bold("Missing documentation details:"));
363
+ for (const { name, missing } of failingExports.slice(0, 10)) {
364
+ error(chalk.red(` • ${name}: missing ${missing?.join(", ")}`));
365
+ }
366
+ for (const drift of driftExports.slice(0, 10)) {
367
+ error(chalk.red(` • ${drift.name}: ${drift.issue}`));
368
+ if (drift.suggestion) {
369
+ error(chalk.yellow(` Suggestion: ${drift.suggestion}`));
370
+ }
371
+ }
372
+ }
373
+ throw new Error("Documentation coverage requirements not met.");
374
+ } catch (commandError) {
375
+ error(chalk.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
376
+ throw commandError instanceof Error ? commandError : new Error(String(commandError));
377
+ }
378
+ });
379
+ }
380
+ function clampCoverage(value) {
381
+ if (Number.isNaN(value)) {
382
+ return 80;
383
+ }
384
+ return Math.min(100, Math.max(0, Math.round(value)));
385
+ }
386
+ function collectFailingExports(exportsList, minCoverage) {
387
+ const offenders = [];
388
+ for (const entry of exportsList) {
389
+ const exportScore = entry.docs?.coverageScore ?? 0;
390
+ const missing = entry.docs?.missing;
391
+ if (exportScore < minCoverage || missing && missing.length > 0) {
392
+ offenders.push({
393
+ name: entry.name,
394
+ missing
395
+ });
396
+ }
397
+ }
398
+ return offenders;
399
+ }
400
+ function collectDrift(exportsList) {
401
+ const drifts = [];
402
+ for (const entry of exportsList) {
403
+ const drift = entry.docs?.drift;
404
+ if (!drift || drift.length === 0) {
405
+ continue;
406
+ }
407
+ for (const signal of drift) {
408
+ drifts.push({
409
+ name: entry.name,
410
+ issue: signal.issue ?? "Documentation drift detected.",
411
+ suggestion: signal.suggestion
412
+ });
413
+ }
414
+ }
415
+ return drifts;
416
+ }
417
+
418
+ // src/commands/diff.ts
419
+ import * as fs3 from "node:fs";
420
+ import * as path4 from "node:path";
421
+ import { diffSpec } from "@openpkg-ts/spec";
422
+ import chalk2 from "chalk";
423
+ var defaultDependencies2 = {
424
+ readFileSync: fs3.readFileSync,
425
+ log: console.log,
426
+ error: console.error
427
+ };
428
+ function registerDiffCommand(program, dependencies = {}) {
429
+ const { readFileSync: readFileSync3, log, error } = {
430
+ ...defaultDependencies2,
431
+ ...dependencies
432
+ };
433
+ program.command("diff <base> <head>").description("Compare two OpenPkg specs and report coverage delta").option("--output <format>", "Output format: json or text", "text").option("--fail-on-regression", "Exit with error if coverage regressed").option("--fail-on-drift", "Exit with error if new drift was introduced").action((base, head, options) => {
434
+ try {
435
+ const baseSpec = loadSpec(base, readFileSync3);
436
+ const headSpec = loadSpec(head, readFileSync3);
437
+ const diff = diffSpec(baseSpec, headSpec);
438
+ const format = options.output ?? "text";
439
+ if (format === "json") {
440
+ log(JSON.stringify(diff, null, 2));
441
+ } else {
442
+ printTextDiff(diff, log, error);
443
+ }
444
+ if (options.failOnRegression && diff.coverageDelta < 0) {
445
+ error(chalk2.red(`
446
+ Coverage regressed by ${Math.abs(diff.coverageDelta)}%`));
447
+ process.exitCode = 1;
448
+ return;
449
+ }
450
+ if (options.failOnDrift && diff.driftIntroduced > 0) {
451
+ error(chalk2.red(`
452
+ ${diff.driftIntroduced} new drift issue(s) introduced`));
453
+ process.exitCode = 1;
454
+ return;
455
+ }
456
+ } catch (commandError) {
457
+ error(chalk2.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
458
+ process.exitCode = 1;
459
+ }
460
+ });
461
+ }
462
+ function loadSpec(filePath, readFileSync3) {
463
+ const resolvedPath = path4.resolve(filePath);
464
+ if (!fs3.existsSync(resolvedPath)) {
465
+ throw new Error(`File not found: ${filePath}`);
466
+ }
467
+ try {
468
+ const content = readFileSync3(resolvedPath, "utf-8");
469
+ return JSON.parse(content);
470
+ } catch (parseError) {
471
+ throw new Error(`Failed to parse ${filePath}: ${parseError instanceof Error ? parseError.message : parseError}`);
472
+ }
473
+ }
474
+ function printTextDiff(diff, log, error) {
475
+ log("");
476
+ log(chalk2.bold("DocCov Diff Report"));
477
+ log("─".repeat(40));
478
+ const coverageColor = diff.coverageDelta > 0 ? chalk2.green : diff.coverageDelta < 0 ? chalk2.red : chalk2.gray;
479
+ const coverageSymbol = diff.coverageDelta > 0 ? "↑" : diff.coverageDelta < 0 ? "↓" : "→";
480
+ const deltaStr = diff.coverageDelta > 0 ? `+${diff.coverageDelta}` : String(diff.coverageDelta);
481
+ log("");
482
+ log(chalk2.bold("Coverage"));
483
+ log(` ${diff.oldCoverage}% ${coverageSymbol} ${diff.newCoverage}% ${coverageColor(`(${deltaStr}%)`)}`);
484
+ if (diff.breaking.length > 0 || diff.nonBreaking.length > 0) {
485
+ log("");
486
+ log(chalk2.bold("API Changes"));
487
+ if (diff.breaking.length > 0) {
488
+ log(chalk2.red(` ${diff.breaking.length} breaking change(s)`));
489
+ for (const id of diff.breaking.slice(0, 5)) {
490
+ log(chalk2.red(` - ${id}`));
491
+ }
492
+ if (diff.breaking.length > 5) {
493
+ log(chalk2.gray(` ... and ${diff.breaking.length - 5} more`));
494
+ }
495
+ }
496
+ if (diff.nonBreaking.length > 0) {
497
+ log(chalk2.green(` ${diff.nonBreaking.length} new export(s)`));
498
+ for (const id of diff.nonBreaking.slice(0, 5)) {
499
+ log(chalk2.green(` + ${id}`));
500
+ }
501
+ if (diff.nonBreaking.length > 5) {
502
+ log(chalk2.gray(` ... and ${diff.nonBreaking.length - 5} more`));
503
+ }
504
+ }
505
+ }
506
+ log("");
507
+ log(chalk2.bold("Docs Health"));
508
+ if (diff.newUndocumented.length > 0) {
509
+ log(chalk2.yellow(` ${diff.newUndocumented.length} new undocumented export(s)`));
510
+ for (const id of diff.newUndocumented.slice(0, 5)) {
511
+ log(chalk2.yellow(` ! ${id}`));
512
+ }
513
+ if (diff.newUndocumented.length > 5) {
514
+ log(chalk2.gray(` ... and ${diff.newUndocumented.length - 5} more`));
515
+ }
516
+ }
517
+ if (diff.improvedExports.length > 0) {
518
+ log(chalk2.green(` ${diff.improvedExports.length} export(s) improved docs`));
519
+ }
520
+ if (diff.regressedExports.length > 0) {
521
+ log(chalk2.red(` ${diff.regressedExports.length} export(s) regressed docs`));
522
+ for (const id of diff.regressedExports.slice(0, 5)) {
523
+ log(chalk2.red(` ↓ ${id}`));
524
+ }
525
+ }
526
+ if (diff.driftIntroduced > 0 || diff.driftResolved > 0) {
527
+ log("");
528
+ log(chalk2.bold("Drift"));
529
+ if (diff.driftIntroduced > 0) {
530
+ log(chalk2.red(` +${diff.driftIntroduced} new drift issue(s)`));
531
+ }
532
+ if (diff.driftResolved > 0) {
533
+ log(chalk2.green(` -${diff.driftResolved} drift issue(s) resolved`));
534
+ }
535
+ }
536
+ log("");
537
+ }
538
+
539
+ // src/commands/generate.ts
540
+ import * as fs4 from "node:fs";
541
+ import * as path5 from "node:path";
542
+ import { DocCov as DocCov2 } from "@doccov/sdk";
543
+ import { normalize, validateSpec } from "@openpkg-ts/spec";
544
+ import chalk4 from "chalk";
545
+ import ora2 from "ora";
546
+
547
+ // src/utils/filter-options.ts
548
+ import chalk3 from "chalk";
549
+ var unique = (values) => Array.from(new Set(values));
550
+ var parseListFlag = (value) => {
551
+ if (!value) {
552
+ return;
553
+ }
554
+ const rawItems = Array.isArray(value) ? value : [value];
555
+ const normalized = rawItems.flatMap((item) => String(item).split(",")).map((item) => item.trim()).filter(Boolean);
556
+ return normalized.length > 0 ? unique(normalized) : undefined;
557
+ };
558
+ var formatList = (label, values) => `${label}: ${values.map((value) => chalk3.cyan(value)).join(", ")}`;
559
+ var mergeFilterOptions = (config, cliOptions) => {
560
+ const messages = [];
561
+ const configInclude = config?.include;
562
+ const configExclude = config?.exclude;
563
+ const cliInclude = cliOptions.include;
564
+ const cliExclude = cliOptions.exclude;
565
+ let include = configInclude;
566
+ let exclude = configExclude;
567
+ let source = include || exclude ? "config" : undefined;
568
+ if (configInclude) {
569
+ messages.push(formatList("include filters from config", configInclude));
570
+ }
571
+ if (configExclude) {
572
+ messages.push(formatList("exclude filters from config", configExclude));
573
+ }
574
+ if (cliInclude) {
575
+ include = include ? include.filter((item) => cliInclude.includes(item)) : cliInclude;
576
+ source = include ? "combined" : "cli";
577
+ messages.push(formatList("apply include filters from CLI", cliInclude));
578
+ }
579
+ if (cliExclude) {
580
+ exclude = exclude ? unique([...exclude, ...cliExclude]) : cliExclude;
581
+ source = source ? "combined" : "cli";
582
+ messages.push(formatList("apply exclude filters from CLI", cliExclude));
583
+ }
584
+ include = include ? unique(include) : undefined;
585
+ exclude = exclude ? unique(exclude) : undefined;
586
+ if (!include && !exclude) {
587
+ return { messages };
588
+ }
589
+ return {
590
+ include,
591
+ exclude,
592
+ source,
593
+ messages
594
+ };
595
+ };
596
+
597
+ // src/commands/generate.ts
598
+ var defaultDependencies3 = {
599
+ createDocCov: (options) => new DocCov2(options),
600
+ writeFileSync: fs4.writeFileSync,
601
+ spinner: (text) => ora2(text),
602
+ log: console.log,
603
+ error: console.error
604
+ };
605
+ function getArrayLength(value) {
606
+ return Array.isArray(value) ? value.length : 0;
607
+ }
608
+ function stripDocsFields(spec) {
609
+ const { docs: _rootDocs, ...rest } = spec;
610
+ return {
611
+ ...rest,
612
+ exports: spec.exports?.map((exp) => {
613
+ const { docs: _expDocs, ...expRest } = exp;
614
+ return expRest;
615
+ })
616
+ };
617
+ }
618
+ function formatDiagnosticOutput(prefix, diagnostic, baseDir) {
619
+ const location = diagnostic.location;
620
+ const relativePath = location?.file ? path5.relative(baseDir, location.file) || location.file : undefined;
621
+ const locationText = location && relativePath ? chalk4.gray(`${relativePath}:${location.line ?? 1}:${location.column ?? 1}`) : null;
622
+ const locationPrefix = locationText ? `${locationText} ` : "";
623
+ return `${prefix} ${locationPrefix}${diagnostic.message}`;
624
+ }
625
+ function registerGenerateCommand(program, dependencies = {}) {
626
+ const { createDocCov, writeFileSync: writeFileSync2, spinner, log, error } = {
627
+ ...defaultDependencies3,
628
+ ...dependencies
629
+ };
630
+ program.command("generate [entry]").description("Generate OpenPkg specification for documentation coverage analysis").option("-o, --output <file>", "Output file", "openpkg.json").option("-p, --package <name>", "Target package name (for monorepos)").option("--cwd <dir>", "Working directory", process.cwd()).option("--no-external-types", "Skip external type resolution from node_modules").option("--include <ids>", "Filter exports by identifier (comma-separated or repeated)").option("--exclude <ids>", "Exclude exports by identifier (comma-separated or repeated)").option("--show-diagnostics", "Print TypeScript diagnostics from analysis").option("--no-docs", "Omit docs coverage fields from output (pure structural spec)").option("-y, --yes", "Skip all prompts and use defaults").action(async (entry, options) => {
631
+ try {
632
+ let targetDir = options.cwd;
633
+ let entryFile = entry;
634
+ if (options.package) {
635
+ const packageDir = await findPackageInMonorepo(options.cwd, options.package);
636
+ if (!packageDir) {
637
+ throw new Error(`Package "${options.package}" not found in monorepo`);
638
+ }
639
+ targetDir = packageDir;
640
+ log(chalk4.gray(`Found package at ${path5.relative(options.cwd, packageDir)}`));
641
+ }
642
+ if (!entryFile) {
643
+ entryFile = await findEntryPoint(targetDir, true);
644
+ log(chalk4.gray(`Auto-detected entry point: ${path5.relative(targetDir, entryFile)}`));
645
+ } else {
646
+ entryFile = path5.resolve(targetDir, entryFile);
647
+ if (fs4.existsSync(entryFile) && fs4.statSync(entryFile).isDirectory()) {
648
+ entryFile = await findEntryPoint(entryFile, true);
649
+ log(chalk4.gray(`Auto-detected entry point: ${entryFile}`));
650
+ }
651
+ }
652
+ const resolveExternalTypes = options.externalTypes !== false;
653
+ const cliFilters = {
654
+ include: parseListFlag(options.include),
655
+ exclude: parseListFlag(options.exclude)
656
+ };
657
+ let config = null;
658
+ try {
659
+ config = await loadDocCovConfig(targetDir);
660
+ if (config?.filePath) {
661
+ log(chalk4.gray(`Loaded configuration from ${path5.relative(targetDir, config.filePath)}`));
662
+ }
663
+ } catch (configError) {
664
+ error(chalk4.red("Failed to load DocCov config:"), configError instanceof Error ? configError.message : configError);
665
+ process.exit(1);
666
+ }
667
+ const resolvedFilters = mergeFilterOptions(config, cliFilters);
668
+ for (const message of resolvedFilters.messages) {
669
+ log(chalk4.gray(`• ${message}`));
670
+ }
671
+ const spinnerInstance = spinner("Generating OpenPkg spec...");
672
+ spinnerInstance.start();
673
+ let result;
674
+ try {
675
+ const doccov = createDocCov({
676
+ resolveExternalTypes
677
+ });
678
+ const analyzeOptions = resolvedFilters.include || resolvedFilters.exclude ? {
679
+ filters: {
680
+ include: resolvedFilters.include,
681
+ exclude: resolvedFilters.exclude
682
+ }
683
+ } : {};
684
+ result = await doccov.analyzeFileWithDiagnostics(entryFile, analyzeOptions);
685
+ spinnerInstance.succeed("Generated OpenPkg spec");
686
+ } catch (generationError) {
687
+ spinnerInstance.fail("Failed to generate spec");
688
+ throw generationError;
689
+ }
690
+ if (!result) {
691
+ throw new Error("Failed to produce an OpenPkg spec.");
692
+ }
693
+ const outputPath = path5.resolve(process.cwd(), options.output);
694
+ let normalized = normalize(result.spec);
695
+ if (options.docs === false) {
696
+ normalized = stripDocsFields(normalized);
697
+ }
698
+ const validation = validateSpec(normalized);
699
+ if (!validation.ok) {
700
+ spinnerInstance.fail("Spec failed schema validation");
701
+ for (const err of validation.errors) {
702
+ error(chalk4.red(`schema: ${err.instancePath || "/"} ${err.message}`));
703
+ }
704
+ process.exit(1);
705
+ }
706
+ writeFileSync2(outputPath, JSON.stringify(normalized, null, 2));
707
+ log(chalk4.green(`✓ Generated ${options.output}`));
708
+ log(chalk4.gray(` ${getArrayLength(normalized.exports)} exports`));
709
+ log(chalk4.gray(` ${getArrayLength(normalized.types)} types`));
710
+ if (options.showDiagnostics && result.diagnostics.length > 0) {
711
+ log("");
712
+ log(chalk4.bold("Diagnostics"));
713
+ for (const diagnostic of result.diagnostics) {
714
+ const prefix = diagnostic.severity === "error" ? chalk4.red("✖") : diagnostic.severity === "warning" ? chalk4.yellow("⚠") : chalk4.cyan("ℹ");
715
+ log(formatDiagnosticOutput(prefix, diagnostic, targetDir));
716
+ }
717
+ }
718
+ } catch (commandError) {
719
+ error(chalk4.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
720
+ process.exit(1);
721
+ }
722
+ });
723
+ }
724
+
725
+ // src/commands/init.ts
726
+ import * as fs5 from "node:fs";
727
+ import * as path6 from "node:path";
728
+ import chalk5 from "chalk";
729
+ var defaultDependencies4 = {
730
+ fileExists: fs5.existsSync,
731
+ writeFileSync: fs5.writeFileSync,
732
+ readFileSync: fs5.readFileSync,
733
+ log: console.log,
734
+ error: console.error
735
+ };
736
+ function registerInitCommand(program, dependencies = {}) {
737
+ const { fileExists: fileExists2, writeFileSync: writeFileSync3, readFileSync: readFileSync4, log, error } = {
738
+ ...defaultDependencies4,
739
+ ...dependencies
740
+ };
741
+ program.command("init").description("Create a DocCov configuration file").option("--cwd <dir>", "Working directory", process.cwd()).option("--format <format>", "Config format: auto, mjs, js, cjs", "auto").action((options) => {
742
+ const cwd = path6.resolve(options.cwd);
743
+ const formatOption = String(options.format ?? "auto").toLowerCase();
744
+ if (!isValidFormat(formatOption)) {
745
+ error(chalk5.red(`Invalid format "${formatOption}". Use auto, mjs, js, or cjs.`));
746
+ process.exitCode = 1;
747
+ return;
748
+ }
749
+ const existing = findExistingConfig(cwd, fileExists2);
750
+ if (existing) {
751
+ error(chalk5.red(`A DocCov config already exists at ${path6.relative(cwd, existing) || "./doccov.config.*"}.`));
752
+ process.exitCode = 1;
753
+ return;
754
+ }
755
+ const packageType = detectPackageType(cwd, fileExists2, readFileSync4);
756
+ const targetFormat = resolveFormat(formatOption, packageType);
757
+ if (targetFormat === "js" && packageType !== "module") {
758
+ log(chalk5.yellow('Package is not marked as "type": "module"; creating doccov.config.js may require enabling ESM.'));
759
+ }
760
+ const fileName = `doccov.config.${targetFormat}`;
761
+ const outputPath = path6.join(cwd, fileName);
762
+ if (fileExists2(outputPath)) {
763
+ error(chalk5.red(`Cannot create ${fileName}; file already exists.`));
764
+ process.exitCode = 1;
765
+ return;
766
+ }
767
+ const template = buildTemplate(targetFormat);
768
+ writeFileSync3(outputPath, template, { encoding: "utf8" });
769
+ log(chalk5.green(`✓ Created ${path6.relative(process.cwd(), outputPath)}`));
770
+ });
771
+ }
772
+ var isValidFormat = (value) => {
773
+ return value === "auto" || value === "mjs" || value === "js" || value === "cjs";
774
+ };
775
+ var findExistingConfig = (cwd, fileExists2) => {
776
+ let current = path6.resolve(cwd);
777
+ const { root } = path6.parse(current);
778
+ while (true) {
779
+ for (const candidate of DOCCOV_CONFIG_FILENAMES) {
780
+ const candidatePath = path6.join(current, candidate);
781
+ if (fileExists2(candidatePath)) {
782
+ return candidatePath;
783
+ }
784
+ }
785
+ if (current === root) {
786
+ break;
787
+ }
788
+ current = path6.dirname(current);
789
+ }
790
+ return null;
791
+ };
792
+ var detectPackageType = (cwd, fileExists2, readFileSync4) => {
793
+ const packageJsonPath = findNearestPackageJson(cwd, fileExists2);
794
+ if (!packageJsonPath) {
795
+ return;
796
+ }
797
+ try {
798
+ const raw = readFileSync4(packageJsonPath, "utf8");
799
+ const parsed = JSON.parse(raw);
800
+ if (parsed.type === "module") {
801
+ return "module";
802
+ }
803
+ if (parsed.type === "commonjs") {
804
+ return "commonjs";
805
+ }
806
+ } catch (_error) {}
807
+ return;
808
+ };
809
+ var findNearestPackageJson = (cwd, fileExists2) => {
810
+ let current = path6.resolve(cwd);
811
+ const { root } = path6.parse(current);
812
+ while (true) {
813
+ const candidate = path6.join(current, "package.json");
814
+ if (fileExists2(candidate)) {
815
+ return candidate;
816
+ }
817
+ if (current === root) {
818
+ break;
819
+ }
820
+ current = path6.dirname(current);
821
+ }
822
+ return null;
823
+ };
824
+ var resolveFormat = (format, packageType) => {
825
+ if (format === "auto") {
826
+ return packageType === "module" ? "js" : "mjs";
827
+ }
828
+ return format;
829
+ };
830
+ var buildTemplate = (format) => {
831
+ if (format === "cjs") {
832
+ return [
833
+ "const { defineConfig } = require('@doccov/cli/config');",
834
+ "",
835
+ "module.exports = defineConfig({",
836
+ " include: [],",
837
+ " exclude: [],",
838
+ "});",
839
+ ""
840
+ ].join(`
841
+ `);
842
+ }
843
+ return [
844
+ "import { defineConfig } from '@doccov/cli/config';",
845
+ "",
846
+ "export default defineConfig({",
847
+ " include: [],",
848
+ " exclude: [],",
849
+ "});",
850
+ ""
851
+ ].join(`
852
+ `);
853
+ };
854
+
855
+ // src/commands/scan.ts
856
+ import * as fs9 from "node:fs";
857
+ import * as os from "node:os";
858
+ import * as path10 from "node:path";
859
+ import { DocCov as DocCov3 } from "@doccov/sdk";
860
+ import chalk6 from "chalk";
861
+ import ora3 from "ora";
862
+ import { simpleGit } from "simple-git";
863
+
864
+ // src/utils/entry-detection.ts
865
+ import * as fs6 from "node:fs";
866
+ import * as path7 from "node:path";
867
+ function detectEntryPoint(repoDir) {
868
+ const pkgPath = path7.join(repoDir, "package.json");
869
+ if (!fs6.existsSync(pkgPath)) {
870
+ throw new Error("No package.json found - not a valid npm package");
871
+ }
872
+ let pkg;
873
+ try {
874
+ pkg = JSON.parse(fs6.readFileSync(pkgPath, "utf-8"));
875
+ } catch {
876
+ throw new Error("Failed to parse package.json");
877
+ }
878
+ if (typeof pkg.types === "string") {
879
+ const resolved = resolveToTs(repoDir, pkg.types);
880
+ if (resolved) {
881
+ return { entryPath: resolved, source: "types" };
882
+ }
883
+ }
884
+ if (typeof pkg.typings === "string") {
885
+ const resolved = resolveToTs(repoDir, pkg.typings);
886
+ if (resolved) {
887
+ return { entryPath: resolved, source: "types" };
888
+ }
889
+ }
890
+ const exports = pkg.exports;
891
+ if (exports) {
892
+ const mainExport = exports["."];
893
+ if (typeof mainExport === "object" && mainExport !== null) {
894
+ const exportObj = mainExport;
895
+ if (typeof exportObj.types === "string") {
896
+ const resolved = resolveToTs(repoDir, exportObj.types);
897
+ if (resolved) {
898
+ return { entryPath: resolved, source: "exports" };
899
+ }
900
+ }
901
+ }
902
+ }
903
+ if (typeof pkg.main === "string") {
904
+ const resolved = resolveToTs(repoDir, pkg.main);
905
+ if (resolved) {
906
+ return { entryPath: resolved, source: "main" };
907
+ }
908
+ }
909
+ if (typeof pkg.module === "string") {
910
+ const resolved = resolveToTs(repoDir, pkg.module);
911
+ if (resolved) {
912
+ return { entryPath: resolved, source: "module" };
913
+ }
914
+ }
915
+ const commonPaths = [
916
+ "src/index.ts",
917
+ "src/index.tsx",
918
+ "src/main.ts",
919
+ "index.ts",
920
+ "lib/index.ts",
921
+ "source/index.ts"
922
+ ];
923
+ for (const p of commonPaths) {
924
+ if (fs6.existsSync(path7.join(repoDir, p))) {
925
+ return { entryPath: p, source: "fallback" };
926
+ }
927
+ }
928
+ throw new Error("Could not detect TypeScript entry point. No types field in package.json and no common entry paths found.");
929
+ }
930
+ function resolveToTs(baseDir, filePath) {
931
+ const normalized = filePath.replace(/^\.\//, "");
932
+ if (normalized.endsWith(".ts") || normalized.endsWith(".tsx")) {
933
+ if (fs6.existsSync(path7.join(baseDir, normalized))) {
934
+ return normalized;
935
+ }
936
+ }
937
+ const candidates = [];
938
+ if (normalized.startsWith("dist/")) {
939
+ const srcPath = normalized.replace(/^dist\//, "src/");
940
+ candidates.push(srcPath.replace(/\.js$/, ".ts"));
941
+ candidates.push(srcPath.replace(/\.d\.ts$/, ".ts"));
942
+ candidates.push(srcPath.replace(/\.js$/, ".tsx"));
943
+ }
944
+ if (normalized.startsWith("build/")) {
945
+ const srcPath = normalized.replace(/^build\//, "src/");
946
+ candidates.push(srcPath.replace(/\.js$/, ".ts"));
947
+ candidates.push(srcPath.replace(/\.d\.ts$/, ".ts"));
948
+ }
949
+ if (normalized.startsWith("lib/")) {
950
+ const srcPath = normalized.replace(/^lib\//, "src/");
951
+ candidates.push(srcPath.replace(/\.js$/, ".ts"));
952
+ candidates.push(srcPath.replace(/\.d\.ts$/, ".ts"));
953
+ }
954
+ candidates.push(normalized.replace(/\.js$/, ".ts"));
955
+ candidates.push(normalized.replace(/\.d\.ts$/, ".ts"));
956
+ candidates.push(normalized.replace(/\.js$/, ".tsx"));
957
+ if (normalized.endsWith(".d.ts")) {
958
+ const baseName = path7.basename(normalized, ".d.ts");
959
+ candidates.push(`src/${baseName}.ts`);
960
+ }
961
+ for (const candidate of candidates) {
962
+ if (fs6.existsSync(path7.join(baseDir, candidate))) {
963
+ return candidate;
964
+ }
965
+ }
966
+ return;
967
+ }
968
+
969
+ // src/utils/github-url.ts
970
+ function parseGitHubUrl(input, defaultRef = "main") {
971
+ const trimmed = input.trim();
972
+ if (!trimmed) {
973
+ throw new Error("GitHub URL cannot be empty");
974
+ }
975
+ let normalized = trimmed.replace(/^https?:\/\//, "").replace(/^git@github\.com:/, "").replace(/\.git$/, "");
976
+ normalized = normalized.replace(/^github\.com\//, "");
977
+ const parts = normalized.split("/").filter(Boolean);
978
+ if (parts.length < 2) {
979
+ throw new Error(`Invalid GitHub URL format: "${input}". Expected owner/repo or https://github.com/owner/repo`);
980
+ }
981
+ const owner = parts[0];
982
+ const repo = parts[1];
983
+ let ref = defaultRef;
984
+ if (parts.length >= 4 && (parts[2] === "tree" || parts[2] === "blob")) {
985
+ ref = parts.slice(3).join("/");
986
+ }
987
+ if (!owner || !repo) {
988
+ throw new Error(`Could not parse owner/repo from: "${input}"`);
989
+ }
990
+ return { owner, repo, ref };
991
+ }
992
+ function buildCloneUrl(parsed) {
993
+ return `https://github.com/${parsed.owner}/${parsed.repo}.git`;
994
+ }
995
+ function buildDisplayUrl(parsed) {
996
+ return `github.com/${parsed.owner}/${parsed.repo}`;
997
+ }
998
+
999
+ // src/utils/llm-build-plan.ts
1000
+ import * as fs7 from "node:fs";
1001
+ import * as path8 from "node:path";
1002
+ import { createAnthropic } from "@ai-sdk/anthropic";
1003
+ import { createOpenAI } from "@ai-sdk/openai";
1004
+ import { generateObject } from "ai";
1005
+ import { z as z2 } from "zod";
1006
+ var BuildPlanSchema = z2.object({
1007
+ installCommand: z2.string().optional().describe("Additional install command if needed"),
1008
+ buildCommands: z2.array(z2.string()).describe('Build steps to run, e.g. ["npm run build:wasm"]'),
1009
+ entryPoint: z2.string().describe("Path to TS/TSX entry file after build"),
1010
+ notes: z2.string().optional().describe("Caveats or warnings")
1011
+ });
1012
+ var CONTEXT_FILES = [
1013
+ "package.json",
1014
+ "README.md",
1015
+ "README",
1016
+ "tsconfig.json",
1017
+ "Cargo.toml",
1018
+ ".nvmrc",
1019
+ ".node-version",
1020
+ "pnpm-workspace.yaml",
1021
+ "lerna.json",
1022
+ "wasm-pack.json"
1023
+ ];
1024
+ var MAX_FILE_CHARS = 2000;
1025
+ function getModel() {
1026
+ const provider = process.env.DOCCOV_LLM_PROVIDER?.toLowerCase();
1027
+ if (provider === "anthropic" || process.env.ANTHROPIC_API_KEY) {
1028
+ const anthropic = createAnthropic();
1029
+ return anthropic("claude-sonnet-4-20250514");
1030
+ }
1031
+ const openai = createOpenAI();
1032
+ return openai("gpt-4o-mini");
1033
+ }
1034
+ async function gatherContextFiles(repoDir) {
1035
+ const sections = [];
1036
+ for (const fileName of CONTEXT_FILES) {
1037
+ const filePath = path8.join(repoDir, fileName);
1038
+ if (fs7.existsSync(filePath)) {
1039
+ try {
1040
+ let content = fs7.readFileSync(filePath, "utf-8");
1041
+ if (content.length > MAX_FILE_CHARS) {
1042
+ content = `${content.slice(0, MAX_FILE_CHARS)}
1043
+ ... (truncated)`;
1044
+ }
1045
+ sections.push(`--- ${fileName} ---
1046
+ ${content}`);
1047
+ } catch {}
1048
+ }
1049
+ }
1050
+ return sections.join(`
1051
+
1052
+ `);
1053
+ }
1054
+ var BUILD_PLAN_PROMPT = (context) => `Analyze this project to determine how to build it for TypeScript API analysis.
1055
+
1056
+ The standard entry detection failed. This might be a WASM project, unusual monorepo, or require a build step before the TypeScript entry point exists.
1057
+
1058
+ <files>
1059
+ ${context}
1060
+ </files>
1061
+
1062
+ Return:
1063
+ - buildCommands: Commands to run in order (e.g., ["npm run build:wasm", "npm run build"]). Empty array if no build needed.
1064
+ - entryPoint: Path to the TypeScript entry file AFTER build completes (e.g., "src/index.ts" or "pkg/index.d.ts")
1065
+ - installCommand: Additional install command if needed beyond what was already run
1066
+ - notes: Any caveats (e.g., "requires Rust/wasm-pack installed")
1067
+
1068
+ Important:
1069
+ - Look for build scripts in package.json that might generate TypeScript bindings
1070
+ - Check README for build instructions
1071
+ - For WASM projects, look for wasm-pack or similar tooling
1072
+ - The entry point should be a .ts, .tsx, or .d.ts file`;
1073
+ async function generateBuildPlan(repoDir) {
1074
+ const hasApiKey = process.env.OPENAI_API_KEY || process.env.ANTHROPIC_API_KEY;
1075
+ if (!hasApiKey) {
1076
+ return null;
1077
+ }
1078
+ const context = await gatherContextFiles(repoDir);
1079
+ if (!context.trim()) {
1080
+ return null;
1081
+ }
1082
+ const model = getModel();
1083
+ const { object } = await generateObject({
1084
+ model,
1085
+ schema: BuildPlanSchema,
1086
+ prompt: BUILD_PLAN_PROMPT(context)
1087
+ });
1088
+ return object;
1089
+ }
1090
+
1091
+ // src/utils/monorepo-detection.ts
1092
+ import * as fs8 from "node:fs";
1093
+ import * as path9 from "node:path";
1094
+ import { glob } from "glob";
1095
+ async function detectMonorepo(repoDir) {
1096
+ const pkgPath = path9.join(repoDir, "package.json");
1097
+ if (!fs8.existsSync(pkgPath)) {
1098
+ return { isMonorepo: false, packages: [], type: "none" };
1099
+ }
1100
+ let pkg;
1101
+ try {
1102
+ pkg = JSON.parse(fs8.readFileSync(pkgPath, "utf-8"));
1103
+ } catch {
1104
+ return { isMonorepo: false, packages: [], type: "none" };
1105
+ }
1106
+ if (pkg.workspaces) {
1107
+ const patterns = extractWorkspacePatterns(pkg.workspaces);
1108
+ const packages = await resolveWorkspacePackages(repoDir, patterns);
1109
+ return { isMonorepo: packages.length > 0, packages, type: "npm" };
1110
+ }
1111
+ const pnpmPath = path9.join(repoDir, "pnpm-workspace.yaml");
1112
+ if (fs8.existsSync(pnpmPath)) {
1113
+ const patterns = parsePnpmWorkspace(pnpmPath);
1114
+ const packages = await resolveWorkspacePackages(repoDir, patterns);
1115
+ return { isMonorepo: packages.length > 0, packages, type: "pnpm" };
1116
+ }
1117
+ const lernaPath = path9.join(repoDir, "lerna.json");
1118
+ if (fs8.existsSync(lernaPath)) {
1119
+ try {
1120
+ const lerna = JSON.parse(fs8.readFileSync(lernaPath, "utf-8"));
1121
+ const patterns = lerna.packages ?? ["packages/*"];
1122
+ const packages = await resolveWorkspacePackages(repoDir, patterns);
1123
+ return { isMonorepo: packages.length > 0, packages, type: "lerna" };
1124
+ } catch {}
1125
+ }
1126
+ return { isMonorepo: false, packages: [], type: "none" };
1127
+ }
1128
+ function extractWorkspacePatterns(workspaces) {
1129
+ if (Array.isArray(workspaces)) {
1130
+ return workspaces.filter((w) => typeof w === "string");
1131
+ }
1132
+ if (typeof workspaces === "object" && workspaces !== null) {
1133
+ const ws = workspaces;
1134
+ if (Array.isArray(ws.packages)) {
1135
+ return ws.packages.filter((w) => typeof w === "string");
1136
+ }
1137
+ }
1138
+ return [];
1139
+ }
1140
+ function parsePnpmWorkspace(filePath) {
1141
+ try {
1142
+ const content = fs8.readFileSync(filePath, "utf-8");
1143
+ const match = content.match(/packages:\s*\n((?:\s+-\s+.+\n?)+)/);
1144
+ if (match) {
1145
+ const lines = match[1].split(`
1146
+ `);
1147
+ return lines.map((line) => line.replace(/^\s+-\s+['"]?/, "").replace(/['"]?\s*$/, "")).filter(Boolean);
1148
+ }
1149
+ } catch {}
1150
+ return ["packages/*"];
1151
+ }
1152
+ async function resolveWorkspacePackages(repoDir, patterns) {
1153
+ const packages = [];
1154
+ for (const pattern of patterns) {
1155
+ const normalizedPattern = pattern.endsWith("/") ? pattern.slice(0, -1) : pattern;
1156
+ try {
1157
+ const matches = await glob(normalizedPattern, {
1158
+ cwd: repoDir,
1159
+ absolute: false
1160
+ });
1161
+ for (const match of matches) {
1162
+ const pkgJsonPath = path9.join(repoDir, match, "package.json");
1163
+ if (fs8.existsSync(pkgJsonPath)) {
1164
+ try {
1165
+ const pkgJson = JSON.parse(fs8.readFileSync(pkgJsonPath, "utf-8"));
1166
+ packages.push({
1167
+ name: pkgJson.name ?? path9.basename(match),
1168
+ path: path9.join(repoDir, match),
1169
+ relativePath: match
1170
+ });
1171
+ } catch {}
1172
+ }
1173
+ }
1174
+ } catch {}
1175
+ }
1176
+ return packages.sort((a, b) => a.name.localeCompare(b.name));
1177
+ }
1178
+ async function findPackage(repoDir, packageName) {
1179
+ const mono = await detectMonorepo(repoDir);
1180
+ if (!mono.isMonorepo) {
1181
+ return;
1182
+ }
1183
+ return mono.packages.find((pkg) => pkg.name === packageName || pkg.relativePath === packageName);
1184
+ }
1185
+ function formatPackageList(packages, limit = 10) {
1186
+ const lines = packages.slice(0, limit).map((pkg) => ` --package ${pkg.name}`);
1187
+ if (packages.length > limit) {
1188
+ lines.push(` ... and ${packages.length - limit} more`);
1189
+ }
1190
+ return lines.join(`
1191
+ `);
1192
+ }
1193
+
1194
+ // src/commands/scan.ts
1195
+ var defaultDependencies5 = {
1196
+ createDocCov: (options) => new DocCov3(options),
1197
+ spinner: (text) => ora3(text),
1198
+ log: console.log,
1199
+ error: console.error
1200
+ };
1201
+ function registerScanCommand(program, dependencies = {}) {
1202
+ const { createDocCov, spinner, log, error } = {
1203
+ ...defaultDependencies5,
1204
+ ...dependencies
1205
+ };
1206
+ program.command("scan <url>").description("Analyze docs coverage for any public GitHub repository").option("--ref <branch>", "Branch or tag to analyze").option("--package <name>", "Target package in monorepo").option("--output <format>", "Output format: text or json", "text").option("--no-cleanup", "Keep cloned repo (for debugging)").option("--skip-install", "Skip dependency installation (faster, but may limit type resolution)").option("--save-spec <path>", "Save full OpenPkg spec to file").action(async (url, options) => {
1207
+ let tempDir;
1208
+ try {
1209
+ const parsed = parseGitHubUrl(url, options.ref ?? "main");
1210
+ const cloneUrl = buildCloneUrl(parsed);
1211
+ const displayUrl = buildDisplayUrl(parsed);
1212
+ log("");
1213
+ log(chalk6.bold(`Scanning ${displayUrl}`));
1214
+ log(chalk6.gray(`Branch/tag: ${parsed.ref}`));
1215
+ log("");
1216
+ tempDir = path10.join(os.tmpdir(), `doccov-scan-${Date.now()}-${Math.random().toString(36).slice(2)}`);
1217
+ fs9.mkdirSync(tempDir, { recursive: true });
1218
+ const cloneSpinner = spinner(`Cloning ${parsed.owner}/${parsed.repo}...`);
1219
+ cloneSpinner.start();
1220
+ try {
1221
+ const git = simpleGit();
1222
+ await git.clone(cloneUrl, tempDir, [
1223
+ "--depth",
1224
+ "1",
1225
+ "--branch",
1226
+ parsed.ref,
1227
+ "--single-branch"
1228
+ ]);
1229
+ cloneSpinner.succeed(`Cloned ${parsed.owner}/${parsed.repo}`);
1230
+ } catch (cloneError) {
1231
+ cloneSpinner.fail("Failed to clone repository");
1232
+ const message = cloneError instanceof Error ? cloneError.message : String(cloneError);
1233
+ if (message.includes("not found") || message.includes("404")) {
1234
+ throw new Error(`Repository not accessible or does not exist: ${displayUrl}`);
1235
+ }
1236
+ if (message.includes("Could not find remote branch")) {
1237
+ throw new Error(`Branch or tag not found: ${parsed.ref}`);
1238
+ }
1239
+ throw new Error(`Clone failed: ${message}`);
1240
+ }
1241
+ if (options.skipInstall) {
1242
+ log(chalk6.gray("Skipping dependency installation (--skip-install)"));
1243
+ } else {
1244
+ const installSpinner = spinner("Installing dependencies...");
1245
+ installSpinner.start();
1246
+ const installErrors = [];
1247
+ try {
1248
+ const { execSync } = await import("node:child_process");
1249
+ const lockfileCommands = [
1250
+ { file: "pnpm-lock.yaml", cmd: "pnpm install --frozen-lockfile" },
1251
+ { file: "bun.lock", cmd: "bun install --frozen-lockfile" },
1252
+ { file: "bun.lockb", cmd: "bun install --frozen-lockfile" },
1253
+ { file: "yarn.lock", cmd: "yarn install --frozen-lockfile" },
1254
+ { file: "package-lock.json", cmd: "npm install --legacy-peer-deps" }
1255
+ ];
1256
+ let installed = false;
1257
+ for (const { file, cmd } of lockfileCommands) {
1258
+ if (fs9.existsSync(path10.join(tempDir, file))) {
1259
+ try {
1260
+ execSync(cmd, {
1261
+ cwd: tempDir,
1262
+ stdio: "pipe",
1263
+ timeout: 180000
1264
+ });
1265
+ installed = true;
1266
+ break;
1267
+ } catch (cmdError) {
1268
+ const stderr = cmdError?.stderr?.toString() ?? "";
1269
+ const msg = cmdError instanceof Error ? cmdError.message : String(cmdError);
1270
+ installErrors.push(`[${cmd}] ${stderr.slice(0, 150) || msg.slice(0, 150)}`);
1271
+ }
1272
+ }
1273
+ }
1274
+ if (!installed) {
1275
+ try {
1276
+ execSync("bun install", {
1277
+ cwd: tempDir,
1278
+ stdio: "pipe",
1279
+ timeout: 120000
1280
+ });
1281
+ installed = true;
1282
+ } catch (bunError) {
1283
+ const stderr = bunError?.stderr?.toString() ?? "";
1284
+ const msg = bunError instanceof Error ? bunError.message : String(bunError);
1285
+ installErrors.push(`[bun install] ${stderr.slice(0, 150) || msg.slice(0, 150)}`);
1286
+ try {
1287
+ execSync("npm install --legacy-peer-deps --ignore-scripts", {
1288
+ cwd: tempDir,
1289
+ stdio: "pipe",
1290
+ timeout: 180000
1291
+ });
1292
+ installed = true;
1293
+ } catch (npmError) {
1294
+ const npmStderr = npmError?.stderr?.toString() ?? "";
1295
+ const npmMsg = npmError instanceof Error ? npmError.message : String(npmError);
1296
+ installErrors.push(`[npm install] ${npmStderr.slice(0, 150) || npmMsg.slice(0, 150)}`);
1297
+ }
1298
+ }
1299
+ }
1300
+ if (installed) {
1301
+ installSpinner.succeed("Dependencies installed");
1302
+ } else {
1303
+ installSpinner.warn("Could not install dependencies (analysis may be limited)");
1304
+ for (const err of installErrors) {
1305
+ log(chalk6.gray(` ${err}`));
1306
+ }
1307
+ }
1308
+ } catch (outerError) {
1309
+ const msg = outerError instanceof Error ? outerError.message : String(outerError);
1310
+ installSpinner.warn(`Could not install dependencies: ${msg.slice(0, 100)}`);
1311
+ for (const err of installErrors) {
1312
+ log(chalk6.gray(` ${err}`));
1313
+ }
1314
+ }
1315
+ }
1316
+ let targetDir = tempDir;
1317
+ let packageName;
1318
+ const mono = await detectMonorepo(tempDir);
1319
+ if (mono.isMonorepo) {
1320
+ if (!options.package) {
1321
+ error("");
1322
+ error(chalk6.red(`Monorepo detected with ${mono.packages.length} packages. Specify target with --package:`));
1323
+ error("");
1324
+ error(formatPackageList(mono.packages));
1325
+ error("");
1326
+ throw new Error("Monorepo requires --package flag");
1327
+ }
1328
+ const pkg = await findPackage(tempDir, options.package);
1329
+ if (!pkg) {
1330
+ error("");
1331
+ error(chalk6.red(`Package "${options.package}" not found. Available packages:`));
1332
+ error("");
1333
+ error(formatPackageList(mono.packages));
1334
+ error("");
1335
+ throw new Error(`Package not found: ${options.package}`);
1336
+ }
1337
+ targetDir = pkg.path;
1338
+ packageName = pkg.name;
1339
+ log(chalk6.gray(`Analyzing package: ${packageName}`));
1340
+ }
1341
+ const entrySpinner = spinner("Detecting entry point...");
1342
+ entrySpinner.start();
1343
+ let entryPath;
1344
+ const needsBuildStep = (pkgDir, repoRoot, entryFile) => {
1345
+ if (!entryFile.endsWith(".d.ts"))
1346
+ return false;
1347
+ const cargoLocations = [
1348
+ path10.join(pkgDir, "Cargo.toml"),
1349
+ path10.join(repoRoot, "Cargo.toml")
1350
+ ];
1351
+ const hasCargoToml = cargoLocations.some((p) => fs9.existsSync(p));
1352
+ const checkWasmScripts = (dir) => {
1353
+ const pkgPath = path10.join(dir, "package.json");
1354
+ if (fs9.existsSync(pkgPath)) {
1355
+ try {
1356
+ const pkg = JSON.parse(fs9.readFileSync(pkgPath, "utf-8"));
1357
+ const scripts = Object.values(pkg.scripts ?? {}).join(" ");
1358
+ return scripts.includes("wasm-pack") || scripts.includes("wasm");
1359
+ } catch {}
1360
+ }
1361
+ return false;
1362
+ };
1363
+ const hasWasmPackScript = checkWasmScripts(pkgDir) || checkWasmScripts(repoRoot);
1364
+ return hasCargoToml || hasWasmPackScript;
1365
+ };
1366
+ let buildFailed = false;
1367
+ const runLlmFallback = async (reason) => {
1368
+ entrySpinner.text = `${reason}, trying LLM fallback...`;
1369
+ const plan = await generateBuildPlan(targetDir);
1370
+ if (!plan) {
1371
+ return null;
1372
+ }
1373
+ if (plan.buildCommands.length > 0) {
1374
+ const { execSync } = await import("node:child_process");
1375
+ for (const cmd of plan.buildCommands) {
1376
+ log(chalk6.gray(` Running: ${cmd}`));
1377
+ try {
1378
+ execSync(cmd, { cwd: targetDir, stdio: "pipe", timeout: 300000 });
1379
+ } catch (buildError) {
1380
+ buildFailed = true;
1381
+ const msg = buildError instanceof Error ? buildError.message : String(buildError);
1382
+ if (msg.includes("rustc") || msg.includes("cargo") || msg.includes("wasm-pack")) {
1383
+ log(chalk6.yellow(` ⚠ Build requires Rust toolchain (not available)`));
1384
+ } else if (msg.includes("rimraf") || msg.includes("command not found")) {
1385
+ log(chalk6.yellow(` ⚠ Build failed: missing dependencies`));
1386
+ } else {
1387
+ log(chalk6.yellow(` ⚠ Build failed: ${msg.slice(0, 80)}`));
1388
+ }
1389
+ }
1390
+ }
1391
+ }
1392
+ if (plan.notes) {
1393
+ log(chalk6.gray(` Note: ${plan.notes}`));
1394
+ }
1395
+ return plan.entryPoint;
1396
+ };
1397
+ try {
1398
+ const entry = detectEntryPoint(targetDir);
1399
+ if (needsBuildStep(targetDir, tempDir, entry.entryPath)) {
1400
+ entrySpinner.text = "Detected .d.ts entry with WASM indicators...";
1401
+ const llmEntry = await runLlmFallback("WASM project detected");
1402
+ if (llmEntry) {
1403
+ entryPath = path10.join(targetDir, llmEntry);
1404
+ if (buildFailed) {
1405
+ entrySpinner.succeed(`Entry point: ${llmEntry} (using pre-committed declarations)`);
1406
+ log(chalk6.gray(" Coverage may be limited - generated .d.ts files typically lack JSDoc"));
1407
+ } else {
1408
+ entrySpinner.succeed(`Entry point: ${llmEntry} (from LLM fallback - WASM project)`);
1409
+ }
1410
+ } else {
1411
+ entryPath = path10.join(targetDir, entry.entryPath);
1412
+ entrySpinner.succeed(`Entry point: ${entry.entryPath} (from ${entry.source})`);
1413
+ log(chalk6.yellow(" ⚠ WASM project detected but no API key - analysis may be limited"));
1414
+ }
1415
+ } else {
1416
+ entryPath = path10.join(targetDir, entry.entryPath);
1417
+ entrySpinner.succeed(`Entry point: ${entry.entryPath} (from ${entry.source})`);
1418
+ }
1419
+ } catch (entryError) {
1420
+ const llmEntry = await runLlmFallback("Heuristics failed");
1421
+ if (llmEntry) {
1422
+ entryPath = path10.join(targetDir, llmEntry);
1423
+ entrySpinner.succeed(`Entry point: ${llmEntry} (from LLM fallback)`);
1424
+ } else {
1425
+ entrySpinner.fail("Could not detect entry point (set OPENAI_API_KEY for smart fallback)");
1426
+ throw entryError;
1427
+ }
1428
+ }
1429
+ const analyzeSpinner = spinner("Analyzing documentation coverage...");
1430
+ analyzeSpinner.start();
1431
+ let result;
1432
+ try {
1433
+ const doccov = createDocCov({ resolveExternalTypes: true });
1434
+ result = await doccov.analyzeFileWithDiagnostics(entryPath);
1435
+ analyzeSpinner.succeed("Analysis complete");
1436
+ } catch (analysisError) {
1437
+ analyzeSpinner.fail("Analysis failed");
1438
+ throw analysisError;
1439
+ }
1440
+ const spec = result.spec;
1441
+ const coverageScore = spec.docs?.coverageScore ?? 0;
1442
+ if (options.saveSpec) {
1443
+ const specPath = path10.resolve(process.cwd(), options.saveSpec);
1444
+ fs9.writeFileSync(specPath, JSON.stringify(spec, null, 2));
1445
+ log(chalk6.green(`✓ Saved spec to ${options.saveSpec}`));
1446
+ }
1447
+ const undocumented = [];
1448
+ const driftIssues = [];
1449
+ for (const exp of spec.exports ?? []) {
1450
+ const expDocs = exp.docs;
1451
+ if (!expDocs)
1452
+ continue;
1453
+ if ((expDocs.missing?.length ?? 0) > 0 || (expDocs.coverageScore ?? 0) < 100) {
1454
+ undocumented.push(exp.name);
1455
+ }
1456
+ for (const d of expDocs.drift ?? []) {
1457
+ driftIssues.push({
1458
+ export: exp.name,
1459
+ type: d.type,
1460
+ issue: d.issue
1461
+ });
1462
+ }
1463
+ }
1464
+ const scanResult = {
1465
+ owner: parsed.owner,
1466
+ repo: parsed.repo,
1467
+ ref: parsed.ref,
1468
+ packageName,
1469
+ coverage: coverageScore,
1470
+ exportCount: spec.exports?.length ?? 0,
1471
+ typeCount: spec.types?.length ?? 0,
1472
+ driftCount: driftIssues.length,
1473
+ undocumented,
1474
+ drift: driftIssues
1475
+ };
1476
+ if (options.output === "json") {
1477
+ log(JSON.stringify(scanResult, null, 2));
1478
+ } else {
1479
+ printTextResult(scanResult, log);
1480
+ }
1481
+ } catch (commandError) {
1482
+ error(chalk6.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
1483
+ process.exitCode = 1;
1484
+ } finally {
1485
+ if (tempDir && options.cleanup !== false) {
1486
+ const { spawn } = await import("node:child_process");
1487
+ spawn("rm", ["-rf", tempDir], {
1488
+ detached: true,
1489
+ stdio: "ignore"
1490
+ }).unref();
1491
+ } else if (tempDir) {
1492
+ log(chalk6.gray(`Repo preserved at: ${tempDir}`));
1493
+ }
1494
+ }
1495
+ });
1496
+ }
1497
+ function printTextResult(result, log) {
1498
+ log("");
1499
+ log(chalk6.bold("DocCov Scan Results"));
1500
+ log("─".repeat(40));
1501
+ const repoName = result.packageName ? `${result.owner}/${result.repo} (${result.packageName})` : `${result.owner}/${result.repo}`;
1502
+ log(`Repository: ${chalk6.cyan(repoName)}`);
1503
+ log(`Branch: ${chalk6.gray(result.ref)}`);
1504
+ log("");
1505
+ const coverageColor = result.coverage >= 80 ? chalk6.green : result.coverage >= 50 ? chalk6.yellow : chalk6.red;
1506
+ log(chalk6.bold("Coverage"));
1507
+ log(` ${coverageColor(`${result.coverage}%`)}`);
1508
+ log("");
1509
+ log(chalk6.bold("Stats"));
1510
+ log(` ${result.exportCount} exports`);
1511
+ log(` ${result.typeCount} types`);
1512
+ log(` ${result.undocumented.length} undocumented`);
1513
+ log(` ${result.driftCount} drift issues`);
1514
+ if (result.undocumented.length > 0) {
1515
+ log("");
1516
+ log(chalk6.bold("Undocumented Exports"));
1517
+ for (const name of result.undocumented.slice(0, 10)) {
1518
+ log(chalk6.yellow(` ! ${name}`));
1519
+ }
1520
+ if (result.undocumented.length > 10) {
1521
+ log(chalk6.gray(` ... and ${result.undocumented.length - 10} more`));
1522
+ }
1523
+ }
1524
+ if (result.drift.length > 0) {
1525
+ log("");
1526
+ log(chalk6.bold("Drift Issues"));
1527
+ for (const d of result.drift.slice(0, 5)) {
1528
+ log(chalk6.red(` • ${d.export}: ${d.issue}`));
1529
+ }
1530
+ if (result.drift.length > 5) {
1531
+ log(chalk6.gray(` ... and ${result.drift.length - 5} more`));
1532
+ }
1533
+ }
1534
+ log("");
1535
+ }
1536
+
1537
+ // src/cli.ts
1538
+ var __filename2 = fileURLToPath(import.meta.url);
1539
+ var __dirname2 = path11.dirname(__filename2);
1540
+ var packageJson = JSON.parse(readFileSync8(path11.join(__dirname2, "../package.json"), "utf-8"));
1541
+ var program = new Command;
1542
+ program.name("doccov").description("DocCov - Documentation coverage and drift detection for TypeScript").version(packageJson.version);
1543
+ registerGenerateCommand(program);
1544
+ registerCheckCommand(program);
1545
+ registerDiffCommand(program);
1546
+ registerInitCommand(program);
1547
+ registerScanCommand(program);
1548
+ program.command("*", { hidden: true }).action(() => {
1549
+ program.outputHelp();
1550
+ });
1551
+ program.parseAsync();