@doccov/cli 0.4.7 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +575 -792
- package/dist/config/index.d.ts +7 -7
- package/dist/config/index.js +19 -2
- package/package.json +2 -2
package/dist/cli.js
CHANGED
|
@@ -29,10 +29,15 @@ var stringList = z.union([
|
|
|
29
29
|
z.string(),
|
|
30
30
|
z.array(z.string())
|
|
31
31
|
]);
|
|
32
|
+
var docsConfigSchema = z.object({
|
|
33
|
+
include: stringList.optional(),
|
|
34
|
+
exclude: stringList.optional()
|
|
35
|
+
});
|
|
32
36
|
var docCovConfigSchema = z.object({
|
|
33
37
|
include: stringList.optional(),
|
|
34
38
|
exclude: stringList.optional(),
|
|
35
|
-
plugins: z.array(z.unknown()).optional()
|
|
39
|
+
plugins: z.array(z.unknown()).optional(),
|
|
40
|
+
docs: docsConfigSchema.optional()
|
|
36
41
|
});
|
|
37
42
|
var normalizeList = (value) => {
|
|
38
43
|
if (!value) {
|
|
@@ -45,10 +50,22 @@ var normalizeList = (value) => {
|
|
|
45
50
|
var normalizeConfig = (input) => {
|
|
46
51
|
const include = normalizeList(input.include);
|
|
47
52
|
const exclude = normalizeList(input.exclude);
|
|
53
|
+
let docs;
|
|
54
|
+
if (input.docs) {
|
|
55
|
+
const docsInclude = normalizeList(input.docs.include);
|
|
56
|
+
const docsExclude = normalizeList(input.docs.exclude);
|
|
57
|
+
if (docsInclude || docsExclude) {
|
|
58
|
+
docs = {
|
|
59
|
+
include: docsInclude,
|
|
60
|
+
exclude: docsExclude
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
}
|
|
48
64
|
return {
|
|
49
65
|
include,
|
|
50
66
|
exclude,
|
|
51
|
-
plugins: input.plugins
|
|
67
|
+
plugins: input.plugins,
|
|
68
|
+
docs
|
|
52
69
|
};
|
|
53
70
|
};
|
|
54
71
|
|
|
@@ -124,21 +141,33 @@ ${formatIssues(issues)}`);
|
|
|
124
141
|
// src/config/index.ts
|
|
125
142
|
var defineConfig = (config) => config;
|
|
126
143
|
// src/cli.ts
|
|
127
|
-
import { readFileSync as
|
|
128
|
-
import * as
|
|
144
|
+
import { readFileSync as readFileSync5 } from "node:fs";
|
|
145
|
+
import * as path9 from "node:path";
|
|
129
146
|
import { fileURLToPath } from "node:url";
|
|
130
147
|
import { Command } from "commander";
|
|
131
148
|
|
|
132
149
|
// src/commands/check.ts
|
|
133
|
-
import * as
|
|
134
|
-
import * as
|
|
150
|
+
import * as fs from "node:fs";
|
|
151
|
+
import * as path2 from "node:path";
|
|
135
152
|
import {
|
|
153
|
+
applyEdits,
|
|
154
|
+
categorizeDrifts,
|
|
155
|
+
createSourceFile,
|
|
136
156
|
DocCov,
|
|
157
|
+
detectEntryPoint,
|
|
137
158
|
detectExampleAssertionFailures,
|
|
138
159
|
detectExampleRuntimeErrors,
|
|
160
|
+
detectMonorepo,
|
|
161
|
+
findPackageByName,
|
|
162
|
+
findJSDocLocation,
|
|
163
|
+
generateFixesForExport,
|
|
139
164
|
hasNonAssertionComments,
|
|
165
|
+
mergeFixes,
|
|
166
|
+
NodeFileSystem,
|
|
140
167
|
parseAssertions,
|
|
141
|
-
|
|
168
|
+
parseJSDocToPatch,
|
|
169
|
+
runExamplesWithPackage,
|
|
170
|
+
serializeJSDoc
|
|
142
171
|
} from "@doccov/sdk";
|
|
143
172
|
import chalk from "chalk";
|
|
144
173
|
import ora from "ora";
|
|
@@ -211,158 +240,6 @@ async function parseAssertionsWithLLM(code) {
|
|
|
211
240
|
}
|
|
212
241
|
}
|
|
213
242
|
|
|
214
|
-
// src/utils/package-utils.ts
|
|
215
|
-
import * as fs from "node:fs";
|
|
216
|
-
import * as path2 from "node:path";
|
|
217
|
-
async function findEntryPoint(packageDir, preferSource = false) {
|
|
218
|
-
const packageJsonPath = path2.join(packageDir, "package.json");
|
|
219
|
-
if (!fs.existsSync(packageJsonPath)) {
|
|
220
|
-
return findDefaultEntryPoint(packageDir);
|
|
221
|
-
}
|
|
222
|
-
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf-8"));
|
|
223
|
-
if (preferSource) {
|
|
224
|
-
const srcIndex = path2.join(packageDir, "src/index.ts");
|
|
225
|
-
if (fs.existsSync(srcIndex)) {
|
|
226
|
-
return srcIndex;
|
|
227
|
-
}
|
|
228
|
-
}
|
|
229
|
-
if (!preferSource && (packageJson.types || packageJson.typings)) {
|
|
230
|
-
const typesPath = path2.join(packageDir, packageJson.types || packageJson.typings);
|
|
231
|
-
if (fs.existsSync(typesPath)) {
|
|
232
|
-
return typesPath;
|
|
233
|
-
}
|
|
234
|
-
}
|
|
235
|
-
if (packageJson.exports) {
|
|
236
|
-
const exportPath = resolveExportsField(packageJson.exports, packageDir);
|
|
237
|
-
if (exportPath) {
|
|
238
|
-
return exportPath;
|
|
239
|
-
}
|
|
240
|
-
}
|
|
241
|
-
if (packageJson.main) {
|
|
242
|
-
const mainBase = packageJson.main.replace(/\.(js|mjs|cjs)$/, "");
|
|
243
|
-
const dtsPath = path2.join(packageDir, `${mainBase}.d.ts`);
|
|
244
|
-
if (fs.existsSync(dtsPath)) {
|
|
245
|
-
return dtsPath;
|
|
246
|
-
}
|
|
247
|
-
const tsPath = path2.join(packageDir, `${mainBase}.ts`);
|
|
248
|
-
if (fs.existsSync(tsPath)) {
|
|
249
|
-
return tsPath;
|
|
250
|
-
}
|
|
251
|
-
const mainPath = path2.join(packageDir, packageJson.main);
|
|
252
|
-
if (fs.existsSync(mainPath) && fs.statSync(mainPath).isDirectory()) {
|
|
253
|
-
const indexDts = path2.join(mainPath, "index.d.ts");
|
|
254
|
-
const indexTs = path2.join(mainPath, "index.ts");
|
|
255
|
-
if (fs.existsSync(indexDts))
|
|
256
|
-
return indexDts;
|
|
257
|
-
if (fs.existsSync(indexTs))
|
|
258
|
-
return indexTs;
|
|
259
|
-
}
|
|
260
|
-
}
|
|
261
|
-
return findDefaultEntryPoint(packageDir);
|
|
262
|
-
}
|
|
263
|
-
function resolveExportsField(exports, packageDir) {
|
|
264
|
-
if (typeof exports === "string") {
|
|
265
|
-
return findTypeScriptFile(path2.join(packageDir, exports));
|
|
266
|
-
}
|
|
267
|
-
if (typeof exports === "object" && exports !== null && "." in exports) {
|
|
268
|
-
const dotExport = exports["."];
|
|
269
|
-
if (typeof dotExport === "string") {
|
|
270
|
-
return findTypeScriptFile(path2.join(packageDir, dotExport));
|
|
271
|
-
}
|
|
272
|
-
if (dotExport && typeof dotExport === "object") {
|
|
273
|
-
const dotRecord = dotExport;
|
|
274
|
-
const typesEntry = dotRecord.types;
|
|
275
|
-
if (typeof typesEntry === "string") {
|
|
276
|
-
const typesPath = path2.join(packageDir, typesEntry);
|
|
277
|
-
if (fs.existsSync(typesPath)) {
|
|
278
|
-
return typesPath;
|
|
279
|
-
}
|
|
280
|
-
}
|
|
281
|
-
for (const condition of ["import", "require", "default"]) {
|
|
282
|
-
const target = dotRecord[condition];
|
|
283
|
-
if (typeof target === "string") {
|
|
284
|
-
const result = findTypeScriptFile(path2.join(packageDir, target));
|
|
285
|
-
if (result)
|
|
286
|
-
return result;
|
|
287
|
-
}
|
|
288
|
-
}
|
|
289
|
-
}
|
|
290
|
-
}
|
|
291
|
-
return null;
|
|
292
|
-
}
|
|
293
|
-
function findTypeScriptFile(jsPath) {
|
|
294
|
-
if (!fs.existsSync(jsPath))
|
|
295
|
-
return null;
|
|
296
|
-
const dtsPath = jsPath.replace(/\.(js|mjs|cjs)$/, ".d.ts");
|
|
297
|
-
if (fs.existsSync(dtsPath)) {
|
|
298
|
-
return dtsPath;
|
|
299
|
-
}
|
|
300
|
-
const tsPath = jsPath.replace(/\.(js|mjs|cjs)$/, ".ts");
|
|
301
|
-
if (fs.existsSync(tsPath)) {
|
|
302
|
-
return tsPath;
|
|
303
|
-
}
|
|
304
|
-
return null;
|
|
305
|
-
}
|
|
306
|
-
async function findDefaultEntryPoint(packageDir) {
|
|
307
|
-
const candidates = [
|
|
308
|
-
"dist/index.d.ts",
|
|
309
|
-
"dist/index.ts",
|
|
310
|
-
"lib/index.d.ts",
|
|
311
|
-
"lib/index.ts",
|
|
312
|
-
"src/index.ts",
|
|
313
|
-
"index.d.ts",
|
|
314
|
-
"index.ts"
|
|
315
|
-
];
|
|
316
|
-
for (const candidate of candidates) {
|
|
317
|
-
const fullPath = path2.join(packageDir, candidate);
|
|
318
|
-
if (fs.existsSync(fullPath)) {
|
|
319
|
-
return fullPath;
|
|
320
|
-
}
|
|
321
|
-
}
|
|
322
|
-
throw new Error(`Could not find entry point in ${packageDir}`);
|
|
323
|
-
}
|
|
324
|
-
async function findPackageInMonorepo(rootDir, packageName) {
|
|
325
|
-
const rootPackageJsonPath = path2.join(rootDir, "package.json");
|
|
326
|
-
if (!fs.existsSync(rootPackageJsonPath)) {
|
|
327
|
-
return null;
|
|
328
|
-
}
|
|
329
|
-
const rootPackageJson = JSON.parse(fs.readFileSync(rootPackageJsonPath, "utf-8"));
|
|
330
|
-
if (rootPackageJson.name === packageName) {
|
|
331
|
-
return rootDir;
|
|
332
|
-
}
|
|
333
|
-
let workspacePatterns = Array.isArray(rootPackageJson.workspaces) ? rootPackageJson.workspaces : rootPackageJson.workspaces?.packages || [];
|
|
334
|
-
if (workspacePatterns.length === 0) {
|
|
335
|
-
const pnpmWorkspacePath = path2.join(rootDir, "pnpm-workspace.yaml");
|
|
336
|
-
if (fs.existsSync(pnpmWorkspacePath)) {
|
|
337
|
-
const content = fs.readFileSync(pnpmWorkspacePath, "utf-8");
|
|
338
|
-
const packagesMatch = content.match(/packages:\s*\n((?:\s*-\s*.+\n?)+)/);
|
|
339
|
-
if (packagesMatch) {
|
|
340
|
-
workspacePatterns = packagesMatch[1].split(`
|
|
341
|
-
`).map((line) => line.replace(/^\s*-\s*['"]?/, "").replace(/['"]?\s*$/, "")).filter((line) => line.length > 0);
|
|
342
|
-
}
|
|
343
|
-
}
|
|
344
|
-
}
|
|
345
|
-
for (const pattern of workspacePatterns) {
|
|
346
|
-
const searchPath = path2.join(rootDir, pattern.replace("/**", "").replace("/*", ""));
|
|
347
|
-
if (fs.existsSync(searchPath) && fs.statSync(searchPath).isDirectory()) {
|
|
348
|
-
const entries = fs.readdirSync(searchPath, { withFileTypes: true });
|
|
349
|
-
for (const entry of entries) {
|
|
350
|
-
if (entry.isDirectory()) {
|
|
351
|
-
const packagePath = path2.join(searchPath, entry.name);
|
|
352
|
-
const packageJsonPath = path2.join(packagePath, "package.json");
|
|
353
|
-
if (fs.existsSync(packageJsonPath)) {
|
|
354
|
-
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf-8"));
|
|
355
|
-
if (packageJson.name === packageName) {
|
|
356
|
-
return packagePath;
|
|
357
|
-
}
|
|
358
|
-
}
|
|
359
|
-
}
|
|
360
|
-
}
|
|
361
|
-
}
|
|
362
|
-
}
|
|
363
|
-
return null;
|
|
364
|
-
}
|
|
365
|
-
|
|
366
243
|
// src/commands/check.ts
|
|
367
244
|
var defaultDependencies = {
|
|
368
245
|
createDocCov: (options) => new DocCov(options),
|
|
@@ -370,32 +247,66 @@ var defaultDependencies = {
|
|
|
370
247
|
log: console.log,
|
|
371
248
|
error: console.error
|
|
372
249
|
};
|
|
250
|
+
function collectDriftsFromExports(exports) {
|
|
251
|
+
const results = [];
|
|
252
|
+
for (const exp of exports) {
|
|
253
|
+
for (const drift of exp.docs?.drift ?? []) {
|
|
254
|
+
results.push({ export: exp, drift });
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
return results;
|
|
258
|
+
}
|
|
259
|
+
function filterDriftsByType(drifts, onlyTypes) {
|
|
260
|
+
if (!onlyTypes)
|
|
261
|
+
return drifts;
|
|
262
|
+
const allowedTypes = new Set(onlyTypes.split(",").map((t) => t.trim()));
|
|
263
|
+
return drifts.filter((d) => allowedTypes.has(d.drift.type));
|
|
264
|
+
}
|
|
265
|
+
function groupByExport(drifts) {
|
|
266
|
+
const map = new Map;
|
|
267
|
+
for (const { export: exp, drift } of drifts) {
|
|
268
|
+
const existing = map.get(exp) ?? [];
|
|
269
|
+
existing.push(drift);
|
|
270
|
+
map.set(exp, existing);
|
|
271
|
+
}
|
|
272
|
+
return map;
|
|
273
|
+
}
|
|
373
274
|
function registerCheckCommand(program, dependencies = {}) {
|
|
374
275
|
const { createDocCov, spinner, log, error } = {
|
|
375
276
|
...defaultDependencies,
|
|
376
277
|
...dependencies
|
|
377
278
|
};
|
|
378
|
-
program.command("check [entry]").description("Fail if documentation coverage falls below a threshold").option("--cwd <dir>", "Working directory", process.cwd()).option("--package <name>", "Target package name (for monorepos)").option("--min-coverage <percentage>", "Minimum docs coverage percentage (0-100)", (value) => Number(value)).option("--require-examples", "Require at least one @example for every export").option("--run-examples", "Execute @example blocks and fail on runtime errors").option("--ignore-drift", "Do not fail on documentation drift").option("--skip-resolve", "Skip external type resolution from node_modules").action(async (entry, options) => {
|
|
279
|
+
program.command("check [entry]").description("Fail if documentation coverage falls below a threshold").option("--cwd <dir>", "Working directory", process.cwd()).option("--package <name>", "Target package name (for monorepos)").option("--min-coverage <percentage>", "Minimum docs coverage percentage (0-100)", (value) => Number(value)).option("--require-examples", "Require at least one @example for every export").option("--run-examples", "Execute @example blocks and fail on runtime errors").option("--ignore-drift", "Do not fail on documentation drift").option("--skip-resolve", "Skip external type resolution from node_modules").option("--write", "Auto-fix drift issues").option("--only <types>", "Only fix specific drift types (comma-separated)").option("--dry-run", "Preview fixes without writing (requires --write)").action(async (entry, options) => {
|
|
379
280
|
try {
|
|
380
281
|
let targetDir = options.cwd;
|
|
381
282
|
let entryFile = entry;
|
|
283
|
+
const fileSystem = new NodeFileSystem(options.cwd);
|
|
382
284
|
if (options.package) {
|
|
383
|
-
const
|
|
384
|
-
if (!
|
|
385
|
-
throw new Error(`
|
|
285
|
+
const mono = await detectMonorepo(fileSystem);
|
|
286
|
+
if (!mono.isMonorepo) {
|
|
287
|
+
throw new Error(`Not a monorepo. Remove --package flag for single-package repos.`);
|
|
386
288
|
}
|
|
387
|
-
|
|
388
|
-
|
|
289
|
+
const pkg = findPackageByName(mono.packages, options.package);
|
|
290
|
+
if (!pkg) {
|
|
291
|
+
const available = mono.packages.map((p) => p.name).join(", ");
|
|
292
|
+
throw new Error(`Package "${options.package}" not found. Available: ${available}`);
|
|
293
|
+
}
|
|
294
|
+
targetDir = path2.join(options.cwd, pkg.path);
|
|
295
|
+
log(chalk.gray(`Found package at ${pkg.path}`));
|
|
389
296
|
}
|
|
390
297
|
if (!entryFile) {
|
|
391
|
-
|
|
392
|
-
|
|
298
|
+
const targetFs = new NodeFileSystem(targetDir);
|
|
299
|
+
const detected = await detectEntryPoint(targetFs);
|
|
300
|
+
entryFile = path2.join(targetDir, detected.path);
|
|
301
|
+
log(chalk.gray(`Auto-detected entry point: ${detected.path} (from ${detected.source})`));
|
|
393
302
|
} else {
|
|
394
|
-
entryFile =
|
|
395
|
-
if (
|
|
303
|
+
entryFile = path2.resolve(targetDir, entryFile);
|
|
304
|
+
if (fs.existsSync(entryFile) && fs.statSync(entryFile).isDirectory()) {
|
|
396
305
|
targetDir = entryFile;
|
|
397
|
-
|
|
398
|
-
|
|
306
|
+
const dirFs = new NodeFileSystem(entryFile);
|
|
307
|
+
const detected = await detectEntryPoint(dirFs);
|
|
308
|
+
entryFile = path2.join(entryFile, detected.path);
|
|
309
|
+
log(chalk.gray(`Auto-detected entry point: ${detected.path}`));
|
|
399
310
|
}
|
|
400
311
|
}
|
|
401
312
|
const minCoverage = clampCoverage(options.minCoverage ?? 80);
|
|
@@ -537,7 +448,116 @@ function registerCheckCommand(program, dependencies = {}) {
|
|
|
537
448
|
const coverageScore = spec.docs?.coverageScore ?? 0;
|
|
538
449
|
const failingExports = collectFailingExports(spec.exports ?? [], minCoverage);
|
|
539
450
|
const missingExamples = options.requireExamples ? failingExports.filter((item) => item.missing?.includes("examples")) : [];
|
|
540
|
-
|
|
451
|
+
let driftExports = [...collectDrift(spec.exports ?? []), ...runtimeDrifts];
|
|
452
|
+
const fixedDriftKeys = new Set;
|
|
453
|
+
if (options.write && driftExports.length > 0) {
|
|
454
|
+
const allDrifts = collectDriftsFromExports(spec.exports ?? []);
|
|
455
|
+
const filteredDrifts = filterDriftsByType(allDrifts, options.only);
|
|
456
|
+
if (filteredDrifts.length === 0 && options.only) {
|
|
457
|
+
log(chalk.yellow("No matching drift issues for the specified types."));
|
|
458
|
+
} else if (filteredDrifts.length > 0) {
|
|
459
|
+
const { fixable, nonFixable } = categorizeDrifts(filteredDrifts.map((d) => d.drift));
|
|
460
|
+
if (fixable.length === 0) {
|
|
461
|
+
log(chalk.yellow(`Found ${nonFixable.length} drift issue(s), but none are auto-fixable.`));
|
|
462
|
+
} else {
|
|
463
|
+
log("");
|
|
464
|
+
log(chalk.bold(`Found ${fixable.length} fixable issue(s)`));
|
|
465
|
+
if (nonFixable.length > 0) {
|
|
466
|
+
log(chalk.gray(`(${nonFixable.length} non-fixable issue(s) skipped)`));
|
|
467
|
+
}
|
|
468
|
+
log("");
|
|
469
|
+
const groupedDrifts = groupByExport(filteredDrifts.filter((d) => fixable.includes(d.drift)));
|
|
470
|
+
const edits = [];
|
|
471
|
+
const editsByFile = new Map;
|
|
472
|
+
for (const [exp, drifts] of groupedDrifts) {
|
|
473
|
+
if (!exp.source?.file) {
|
|
474
|
+
log(chalk.gray(` Skipping ${exp.name}: no source location`));
|
|
475
|
+
continue;
|
|
476
|
+
}
|
|
477
|
+
if (exp.source.file.endsWith(".d.ts")) {
|
|
478
|
+
log(chalk.gray(` Skipping ${exp.name}: declaration file`));
|
|
479
|
+
continue;
|
|
480
|
+
}
|
|
481
|
+
const filePath = path2.resolve(targetDir, exp.source.file);
|
|
482
|
+
if (!fs.existsSync(filePath)) {
|
|
483
|
+
log(chalk.gray(` Skipping ${exp.name}: file not found`));
|
|
484
|
+
continue;
|
|
485
|
+
}
|
|
486
|
+
const sourceFile = createSourceFile(filePath);
|
|
487
|
+
const location = findJSDocLocation(sourceFile, exp.name, exp.source.line);
|
|
488
|
+
if (!location) {
|
|
489
|
+
log(chalk.gray(` Skipping ${exp.name}: could not find declaration`));
|
|
490
|
+
continue;
|
|
491
|
+
}
|
|
492
|
+
let existingPatch = {};
|
|
493
|
+
if (location.hasExisting && location.existingJSDoc) {
|
|
494
|
+
existingPatch = parseJSDocToPatch(location.existingJSDoc);
|
|
495
|
+
}
|
|
496
|
+
const fixes = generateFixesForExport({ ...exp, docs: { ...exp.docs, drift: drifts } }, existingPatch);
|
|
497
|
+
if (fixes.length === 0)
|
|
498
|
+
continue;
|
|
499
|
+
for (const drift of drifts) {
|
|
500
|
+
fixedDriftKeys.add(`${exp.name}:${drift.issue}`);
|
|
501
|
+
}
|
|
502
|
+
const mergedPatch = mergeFixes(fixes, existingPatch);
|
|
503
|
+
const newJSDoc = serializeJSDoc(mergedPatch, location.indent);
|
|
504
|
+
const edit = {
|
|
505
|
+
filePath,
|
|
506
|
+
symbolName: exp.name,
|
|
507
|
+
startLine: location.startLine,
|
|
508
|
+
endLine: location.endLine,
|
|
509
|
+
hasExisting: location.hasExisting,
|
|
510
|
+
existingJSDoc: location.existingJSDoc,
|
|
511
|
+
newJSDoc,
|
|
512
|
+
indent: location.indent
|
|
513
|
+
};
|
|
514
|
+
edits.push(edit);
|
|
515
|
+
const fileEdits = editsByFile.get(filePath) ?? [];
|
|
516
|
+
fileEdits.push({ export: exp, edit, fixes, existingPatch });
|
|
517
|
+
editsByFile.set(filePath, fileEdits);
|
|
518
|
+
}
|
|
519
|
+
if (edits.length > 0) {
|
|
520
|
+
if (options.dryRun) {
|
|
521
|
+
log(chalk.bold("Dry run - changes that would be made:"));
|
|
522
|
+
log("");
|
|
523
|
+
for (const [filePath, fileEdits] of editsByFile) {
|
|
524
|
+
const relativePath = path2.relative(targetDir, filePath);
|
|
525
|
+
log(chalk.cyan(` ${relativePath}:`));
|
|
526
|
+
for (const { export: exp, edit, fixes } of fileEdits) {
|
|
527
|
+
const lineInfo = edit.hasExisting ? `lines ${edit.startLine + 1}-${edit.endLine + 1}` : `line ${edit.startLine + 1}`;
|
|
528
|
+
log(` ${chalk.bold(exp.name)} [${lineInfo}]`);
|
|
529
|
+
for (const fix of fixes) {
|
|
530
|
+
log(chalk.green(` + ${fix.description}`));
|
|
531
|
+
}
|
|
532
|
+
}
|
|
533
|
+
log("");
|
|
534
|
+
}
|
|
535
|
+
log(chalk.gray("Run without --dry-run to apply these changes."));
|
|
536
|
+
} else {
|
|
537
|
+
const applySpinner = spinner("Applying fixes...");
|
|
538
|
+
applySpinner.start();
|
|
539
|
+
const applyResult = await applyEdits(edits);
|
|
540
|
+
if (applyResult.errors.length > 0) {
|
|
541
|
+
applySpinner.warn("Some fixes could not be applied");
|
|
542
|
+
for (const err of applyResult.errors) {
|
|
543
|
+
error(chalk.red(` ${err.file}: ${err.error}`));
|
|
544
|
+
}
|
|
545
|
+
} else {
|
|
546
|
+
applySpinner.succeed(`Applied ${applyResult.editsApplied} fix(es) to ${applyResult.filesModified} file(s)`);
|
|
547
|
+
}
|
|
548
|
+
log("");
|
|
549
|
+
for (const [filePath, fileEdits] of editsByFile) {
|
|
550
|
+
const relativePath = path2.relative(targetDir, filePath);
|
|
551
|
+
log(chalk.green(` ✓ ${relativePath}: ${fileEdits.length} fix(es)`));
|
|
552
|
+
}
|
|
553
|
+
}
|
|
554
|
+
}
|
|
555
|
+
}
|
|
556
|
+
}
|
|
557
|
+
if (!options.dryRun) {
|
|
558
|
+
driftExports = driftExports.filter((d) => !fixedDriftKeys.has(`${d.name}:${d.issue}`));
|
|
559
|
+
}
|
|
560
|
+
}
|
|
541
561
|
const coverageFailed = coverageScore < minCoverage;
|
|
542
562
|
const hasMissingExamples = missingExamples.length > 0;
|
|
543
563
|
const hasDrift = !options.ignoreDrift && driftExports.length > 0;
|
|
@@ -627,30 +647,131 @@ function collectDrift(exportsList) {
|
|
|
627
647
|
}
|
|
628
648
|
|
|
629
649
|
// src/commands/diff.ts
|
|
630
|
-
import * as
|
|
631
|
-
import * as
|
|
632
|
-
import {
|
|
650
|
+
import * as fs2 from "node:fs";
|
|
651
|
+
import * as path3 from "node:path";
|
|
652
|
+
import {
|
|
653
|
+
diffSpecWithDocs,
|
|
654
|
+
getDocsImpactSummary,
|
|
655
|
+
hasDocsImpact,
|
|
656
|
+
parseMarkdownFiles
|
|
657
|
+
} from "@doccov/sdk";
|
|
633
658
|
import chalk2 from "chalk";
|
|
659
|
+
import { glob } from "glob";
|
|
660
|
+
|
|
661
|
+
// src/utils/docs-impact-ai.ts
|
|
662
|
+
import { createAnthropic as createAnthropic2 } from "@ai-sdk/anthropic";
|
|
663
|
+
import { createOpenAI as createOpenAI2 } from "@ai-sdk/openai";
|
|
664
|
+
import { generateObject as generateObject2, generateText } from "ai";
|
|
665
|
+
import { z as z3 } from "zod";
|
|
666
|
+
var CodeBlockUsageSchema = z3.object({
|
|
667
|
+
isImpacted: z3.boolean().describe("Whether the code block is affected by the change"),
|
|
668
|
+
reason: z3.string().describe("Explanation of why/why not the code is impacted"),
|
|
669
|
+
usageType: z3.enum(["direct-call", "import-only", "indirect", "not-used"]).describe("How the export is used in this code block"),
|
|
670
|
+
suggestedFix: z3.string().optional().describe("If impacted, the suggested code change"),
|
|
671
|
+
confidence: z3.enum(["high", "medium", "low"]).describe("Confidence level of the analysis")
|
|
672
|
+
});
|
|
673
|
+
var MultiBlockAnalysisSchema = z3.object({
|
|
674
|
+
groups: z3.array(z3.object({
|
|
675
|
+
blockIndices: z3.array(z3.number()).describe("Indices of blocks that should run together"),
|
|
676
|
+
reason: z3.string().describe("Why these blocks are related")
|
|
677
|
+
})).describe("Groups of related code blocks"),
|
|
678
|
+
skippedBlocks: z3.array(z3.number()).describe("Indices of blocks that should be skipped (incomplete/illustrative)")
|
|
679
|
+
});
|
|
680
|
+
function getModel2() {
|
|
681
|
+
const provider = process.env.DOCCOV_LLM_PROVIDER?.toLowerCase();
|
|
682
|
+
if (provider === "anthropic" || process.env.ANTHROPIC_API_KEY) {
|
|
683
|
+
const anthropic = createAnthropic2();
|
|
684
|
+
return anthropic("claude-sonnet-4-20250514");
|
|
685
|
+
}
|
|
686
|
+
const openai = createOpenAI2();
|
|
687
|
+
return openai("gpt-4o-mini");
|
|
688
|
+
}
|
|
689
|
+
function isAIDocsAnalysisAvailable() {
|
|
690
|
+
return Boolean(process.env.OPENAI_API_KEY || process.env.ANTHROPIC_API_KEY);
|
|
691
|
+
}
|
|
692
|
+
async function generateImpactSummary(impacts) {
|
|
693
|
+
if (!isAIDocsAnalysisAvailable()) {
|
|
694
|
+
return null;
|
|
695
|
+
}
|
|
696
|
+
if (impacts.length === 0) {
|
|
697
|
+
return "No documentation impacts detected.";
|
|
698
|
+
}
|
|
699
|
+
try {
|
|
700
|
+
const { text } = await generateText({
|
|
701
|
+
model: getModel2(),
|
|
702
|
+
prompt: `Summarize these documentation impacts for a GitHub PR comment.
|
|
703
|
+
|
|
704
|
+
Impacts:
|
|
705
|
+
${impacts.map((i) => `- ${i.file}: ${i.exportName} (${i.changeType})`).join(`
|
|
706
|
+
`)}
|
|
707
|
+
|
|
708
|
+
Write a brief, actionable summary (2-3 sentences) explaining:
|
|
709
|
+
1. How many files/references are affected
|
|
710
|
+
2. What type of updates are needed
|
|
711
|
+
3. Priority recommendation
|
|
712
|
+
|
|
713
|
+
Keep it concise and developer-friendly.`
|
|
714
|
+
});
|
|
715
|
+
return text.trim();
|
|
716
|
+
} catch {
|
|
717
|
+
return null;
|
|
718
|
+
}
|
|
719
|
+
}
|
|
720
|
+
|
|
721
|
+
// src/commands/diff.ts
|
|
634
722
|
var defaultDependencies2 = {
|
|
635
|
-
readFileSync:
|
|
723
|
+
readFileSync: fs2.readFileSync,
|
|
636
724
|
log: console.log,
|
|
637
725
|
error: console.error
|
|
638
726
|
};
|
|
639
727
|
function registerDiffCommand(program, dependencies = {}) {
|
|
640
|
-
const { readFileSync:
|
|
728
|
+
const { readFileSync: readFileSync2, log, error } = {
|
|
641
729
|
...defaultDependencies2,
|
|
642
730
|
...dependencies
|
|
643
731
|
};
|
|
644
|
-
program.command("diff <base> <head>").description("Compare two OpenPkg specs and report coverage delta").option("--output <format>", "Output format: json or text", "text").option("--fail-on-regression", "Exit with error if coverage regressed").option("--fail-on-drift", "Exit with error if new drift was introduced").action((base, head, options) => {
|
|
732
|
+
program.command("diff <base> <head>").description("Compare two OpenPkg specs and report coverage delta").option("--output <format>", "Output format: json or text", "text").option("--fail-on-regression", "Exit with error if coverage regressed").option("--fail-on-drift", "Exit with error if new drift was introduced").option("--docs <glob>", "Glob pattern for markdown docs to check for impact", collect, []).option("--fail-on-docs-impact", "Exit with error if docs need updates").option("--ai", "Use AI for deeper analysis and fix suggestions").action(async (base, head, options) => {
|
|
645
733
|
try {
|
|
646
|
-
const baseSpec = loadSpec(base,
|
|
647
|
-
const headSpec = loadSpec(head,
|
|
648
|
-
|
|
734
|
+
const baseSpec = loadSpec(base, readFileSync2);
|
|
735
|
+
const headSpec = loadSpec(head, readFileSync2);
|
|
736
|
+
let markdownFiles;
|
|
737
|
+
let docsPatterns = options.docs;
|
|
738
|
+
if (!docsPatterns || docsPatterns.length === 0) {
|
|
739
|
+
const configResult = await loadDocCovConfig(process.cwd());
|
|
740
|
+
if (configResult.config?.docs?.include) {
|
|
741
|
+
docsPatterns = configResult.config.docs.include;
|
|
742
|
+
log(chalk2.gray(`Using docs patterns from config: ${docsPatterns.join(", ")}`));
|
|
743
|
+
}
|
|
744
|
+
}
|
|
745
|
+
if (docsPatterns && docsPatterns.length > 0) {
|
|
746
|
+
markdownFiles = await loadMarkdownFiles(docsPatterns);
|
|
747
|
+
}
|
|
748
|
+
const diff = diffSpecWithDocs(baseSpec, headSpec, { markdownFiles });
|
|
649
749
|
const format = options.output ?? "text";
|
|
650
750
|
if (format === "json") {
|
|
651
751
|
log(JSON.stringify(diff, null, 2));
|
|
652
752
|
} else {
|
|
653
753
|
printTextDiff(diff, log, error);
|
|
754
|
+
if (options.ai && diff.docsImpact && hasDocsImpact(diff)) {
|
|
755
|
+
if (!isAIDocsAnalysisAvailable()) {
|
|
756
|
+
log(chalk2.yellow(`
|
|
757
|
+
⚠ AI analysis unavailable (set OPENAI_API_KEY or ANTHROPIC_API_KEY)`));
|
|
758
|
+
} else {
|
|
759
|
+
log(chalk2.gray(`
|
|
760
|
+
Generating AI summary...`));
|
|
761
|
+
const impacts = diff.docsImpact.impactedFiles.flatMap((f) => f.references.map((r) => ({
|
|
762
|
+
file: f.file,
|
|
763
|
+
exportName: r.exportName,
|
|
764
|
+
changeType: r.changeType,
|
|
765
|
+
context: r.context
|
|
766
|
+
})));
|
|
767
|
+
const summary = await generateImpactSummary(impacts);
|
|
768
|
+
if (summary) {
|
|
769
|
+
log("");
|
|
770
|
+
log(chalk2.bold("AI Summary"));
|
|
771
|
+
log(chalk2.cyan(` ${summary}`));
|
|
772
|
+
}
|
|
773
|
+
}
|
|
774
|
+
}
|
|
654
775
|
}
|
|
655
776
|
if (options.failOnRegression && diff.coverageDelta < 0) {
|
|
656
777
|
error(chalk2.red(`
|
|
@@ -664,19 +785,42 @@ ${diff.driftIntroduced} new drift issue(s) introduced`));
|
|
|
664
785
|
process.exitCode = 1;
|
|
665
786
|
return;
|
|
666
787
|
}
|
|
788
|
+
if (options.failOnDocsImpact && hasDocsImpact(diff)) {
|
|
789
|
+
const summary = getDocsImpactSummary(diff);
|
|
790
|
+
error(chalk2.red(`
|
|
791
|
+
${summary.totalIssues} docs issue(s) require attention`));
|
|
792
|
+
process.exitCode = 1;
|
|
793
|
+
return;
|
|
794
|
+
}
|
|
667
795
|
} catch (commandError) {
|
|
668
796
|
error(chalk2.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
|
|
669
797
|
process.exitCode = 1;
|
|
670
798
|
}
|
|
671
799
|
});
|
|
672
800
|
}
|
|
673
|
-
function
|
|
674
|
-
|
|
675
|
-
|
|
801
|
+
function collect(value, previous) {
|
|
802
|
+
return previous.concat([value]);
|
|
803
|
+
}
|
|
804
|
+
async function loadMarkdownFiles(patterns) {
|
|
805
|
+
const files = [];
|
|
806
|
+
for (const pattern of patterns) {
|
|
807
|
+
const matches = await glob(pattern, { nodir: true });
|
|
808
|
+
for (const filePath of matches) {
|
|
809
|
+
try {
|
|
810
|
+
const content = fs2.readFileSync(filePath, "utf-8");
|
|
811
|
+
files.push({ path: filePath, content });
|
|
812
|
+
} catch {}
|
|
813
|
+
}
|
|
814
|
+
}
|
|
815
|
+
return parseMarkdownFiles(files);
|
|
816
|
+
}
|
|
817
|
+
function loadSpec(filePath, readFileSync2) {
|
|
818
|
+
const resolvedPath = path3.resolve(filePath);
|
|
819
|
+
if (!fs2.existsSync(resolvedPath)) {
|
|
676
820
|
throw new Error(`File not found: ${filePath}`);
|
|
677
821
|
}
|
|
678
822
|
try {
|
|
679
|
-
const content =
|
|
823
|
+
const content = readFileSync2(resolvedPath, "utf-8");
|
|
680
824
|
return JSON.parse(content);
|
|
681
825
|
} catch (parseError) {
|
|
682
826
|
throw new Error(`Failed to parse ${filePath}: ${parseError instanceof Error ? parseError.message : parseError}`);
|
|
@@ -744,218 +888,61 @@ function printTextDiff(diff, log, _error) {
|
|
|
744
888
|
log(chalk2.green(` -${diff.driftResolved} drift issue(s) resolved`));
|
|
745
889
|
}
|
|
746
890
|
}
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
mergeFixes,
|
|
761
|
-
parseJSDocToPatch,
|
|
762
|
-
serializeJSDoc
|
|
763
|
-
} from "@doccov/sdk";
|
|
764
|
-
import chalk3 from "chalk";
|
|
765
|
-
import ora2 from "ora";
|
|
766
|
-
var defaultDependencies3 = {
|
|
767
|
-
createDocCov: (options) => new DocCov2(options),
|
|
768
|
-
spinner: (text) => ora2(text),
|
|
769
|
-
log: console.log,
|
|
770
|
-
error: console.error
|
|
771
|
-
};
|
|
772
|
-
function collectDrifts(exports) {
|
|
773
|
-
const results = [];
|
|
774
|
-
for (const exp of exports) {
|
|
775
|
-
const drifts = exp.docs?.drift ?? [];
|
|
776
|
-
for (const drift of drifts) {
|
|
777
|
-
results.push({ export: exp, drift });
|
|
778
|
-
}
|
|
779
|
-
}
|
|
780
|
-
return results;
|
|
781
|
-
}
|
|
782
|
-
function filterDriftsByType(drifts, onlyTypes) {
|
|
783
|
-
if (!onlyTypes)
|
|
784
|
-
return drifts;
|
|
785
|
-
const allowedTypes = new Set(onlyTypes.split(",").map((t) => t.trim()));
|
|
786
|
-
return drifts.filter((d) => allowedTypes.has(d.drift.type));
|
|
787
|
-
}
|
|
788
|
-
function groupByExport(drifts) {
|
|
789
|
-
const map = new Map;
|
|
790
|
-
for (const { export: exp, drift } of drifts) {
|
|
791
|
-
const existing = map.get(exp) ?? [];
|
|
792
|
-
existing.push(drift);
|
|
793
|
-
map.set(exp, existing);
|
|
794
|
-
}
|
|
795
|
-
return map;
|
|
796
|
-
}
|
|
797
|
-
function registerFixCommand(program, dependencies = {}) {
|
|
798
|
-
const { createDocCov, spinner, log, error } = {
|
|
799
|
-
...defaultDependencies3,
|
|
800
|
-
...dependencies
|
|
801
|
-
};
|
|
802
|
-
program.command("fix [entry]").description("Automatically fix documentation drift").option("--cwd <dir>", "Working directory", process.cwd()).option("--package <name>", "Target package name (for monorepos)").option("--dry-run", "Preview changes without writing").option("--only <types>", "Only fix specific drift types (comma-separated)").option("--skip-resolve", "Skip external type resolution from node_modules").action(async (entry, options) => {
|
|
803
|
-
try {
|
|
804
|
-
let targetDir = options.cwd;
|
|
805
|
-
let entryFile = entry;
|
|
806
|
-
if (options.package) {
|
|
807
|
-
const packageDir = await findPackageInMonorepo(options.cwd, options.package);
|
|
808
|
-
if (!packageDir) {
|
|
809
|
-
throw new Error(`Package "${options.package}" not found in monorepo`);
|
|
891
|
+
if (diff.docsImpact) {
|
|
892
|
+
log("");
|
|
893
|
+
log(chalk2.bold("Docs Impact"));
|
|
894
|
+
const { impactedFiles, missingDocs, stats } = diff.docsImpact;
|
|
895
|
+
log(chalk2.gray(` Scanned ${stats.filesScanned} file(s), ${stats.codeBlocksFound} code block(s)`));
|
|
896
|
+
if (impactedFiles.length > 0) {
|
|
897
|
+
log("");
|
|
898
|
+
log(chalk2.yellow(` ${impactedFiles.length} file(s) need updates:`));
|
|
899
|
+
for (const file of impactedFiles.slice(0, 10)) {
|
|
900
|
+
log(chalk2.yellow(` \uD83D\uDCC4 ${file.file}`));
|
|
901
|
+
for (const ref of file.references.slice(0, 3)) {
|
|
902
|
+
const changeLabel = ref.changeType === "signature-changed" ? "signature changed" : ref.changeType === "removed" ? "removed" : "deprecated";
|
|
903
|
+
log(chalk2.gray(` Line ${ref.line}: ${ref.exportName} (${changeLabel})`));
|
|
810
904
|
}
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
}
|
|
814
|
-
if (!entryFile) {
|
|
815
|
-
entryFile = await findEntryPoint(targetDir, true);
|
|
816
|
-
log(chalk3.gray(`Auto-detected entry point: ${path5.relative(targetDir, entryFile)}`));
|
|
817
|
-
} else {
|
|
818
|
-
entryFile = path5.resolve(targetDir, entryFile);
|
|
819
|
-
if (fs4.existsSync(entryFile) && fs4.statSync(entryFile).isDirectory()) {
|
|
820
|
-
targetDir = entryFile;
|
|
821
|
-
entryFile = await findEntryPoint(entryFile, true);
|
|
822
|
-
log(chalk3.gray(`Auto-detected entry point: ${entryFile}`));
|
|
905
|
+
if (file.references.length > 3) {
|
|
906
|
+
log(chalk2.gray(` ... and ${file.references.length - 3} more reference(s)`));
|
|
823
907
|
}
|
|
824
908
|
}
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
analyzeSpinner.start();
|
|
828
|
-
const doccov = createDocCov({ resolveExternalTypes });
|
|
829
|
-
const result = await doccov.analyzeFileWithDiagnostics(entryFile);
|
|
830
|
-
const spec = result.spec;
|
|
831
|
-
analyzeSpinner.succeed("Analysis complete");
|
|
832
|
-
const allDrifts = collectDrifts(spec.exports ?? []);
|
|
833
|
-
if (allDrifts.length === 0) {
|
|
834
|
-
log(chalk3.green("No drift issues found. Documentation is in sync!"));
|
|
835
|
-
return;
|
|
836
|
-
}
|
|
837
|
-
const filteredDrifts = filterDriftsByType(allDrifts, options.only);
|
|
838
|
-
if (filteredDrifts.length === 0) {
|
|
839
|
-
log(chalk3.yellow("No matching drift issues for the specified types."));
|
|
840
|
-
return;
|
|
841
|
-
}
|
|
842
|
-
const { fixable, nonFixable } = categorizeDrifts(filteredDrifts.map((d) => d.drift));
|
|
843
|
-
if (fixable.length === 0) {
|
|
844
|
-
log(chalk3.yellow(`Found ${nonFixable.length} drift issue(s), but none are auto-fixable.`));
|
|
845
|
-
log(chalk3.gray("Non-fixable drift types require manual intervention:"));
|
|
846
|
-
for (const drift of nonFixable.slice(0, 5)) {
|
|
847
|
-
log(chalk3.gray(` • ${drift.type}: ${drift.issue}`));
|
|
848
|
-
}
|
|
849
|
-
return;
|
|
909
|
+
if (impactedFiles.length > 10) {
|
|
910
|
+
log(chalk2.gray(` ... and ${impactedFiles.length - 10} more file(s)`));
|
|
850
911
|
}
|
|
912
|
+
}
|
|
913
|
+
if (missingDocs.length > 0) {
|
|
851
914
|
log("");
|
|
852
|
-
log(
|
|
853
|
-
|
|
854
|
-
log(
|
|
915
|
+
log(chalk2.yellow(` ${missingDocs.length} new export(s) missing docs:`));
|
|
916
|
+
for (const name of missingDocs.slice(0, 5)) {
|
|
917
|
+
log(chalk2.yellow(` • ${name}`));
|
|
855
918
|
}
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
const edits = [];
|
|
859
|
-
const editsByFile = new Map;
|
|
860
|
-
for (const [exp, drifts] of groupedDrifts) {
|
|
861
|
-
if (!exp.source?.file) {
|
|
862
|
-
log(chalk3.gray(` Skipping ${exp.name}: no source location`));
|
|
863
|
-
continue;
|
|
864
|
-
}
|
|
865
|
-
if (exp.source.file.endsWith(".d.ts")) {
|
|
866
|
-
log(chalk3.gray(` Skipping ${exp.name}: declaration file`));
|
|
867
|
-
continue;
|
|
868
|
-
}
|
|
869
|
-
const filePath = path5.resolve(targetDir, exp.source.file);
|
|
870
|
-
if (!fs4.existsSync(filePath)) {
|
|
871
|
-
log(chalk3.gray(` Skipping ${exp.name}: file not found`));
|
|
872
|
-
continue;
|
|
873
|
-
}
|
|
874
|
-
const sourceFile = createSourceFile(filePath);
|
|
875
|
-
const location = findJSDocLocation(sourceFile, exp.name, exp.source.line);
|
|
876
|
-
if (!location) {
|
|
877
|
-
log(chalk3.gray(` Skipping ${exp.name}: could not find declaration`));
|
|
878
|
-
continue;
|
|
879
|
-
}
|
|
880
|
-
let existingPatch = {};
|
|
881
|
-
if (location.hasExisting && location.existingJSDoc) {
|
|
882
|
-
existingPatch = parseJSDocToPatch(location.existingJSDoc);
|
|
883
|
-
}
|
|
884
|
-
const fixes = generateFixesForExport({ ...exp, docs: { ...exp.docs, drift: drifts } }, existingPatch);
|
|
885
|
-
if (fixes.length === 0)
|
|
886
|
-
continue;
|
|
887
|
-
const mergedPatch = mergeFixes(fixes, existingPatch);
|
|
888
|
-
const newJSDoc = serializeJSDoc(mergedPatch, location.indent);
|
|
889
|
-
const edit = {
|
|
890
|
-
filePath,
|
|
891
|
-
symbolName: exp.name,
|
|
892
|
-
startLine: location.startLine,
|
|
893
|
-
endLine: location.endLine,
|
|
894
|
-
hasExisting: location.hasExisting,
|
|
895
|
-
existingJSDoc: location.existingJSDoc,
|
|
896
|
-
newJSDoc,
|
|
897
|
-
indent: location.indent
|
|
898
|
-
};
|
|
899
|
-
edits.push(edit);
|
|
900
|
-
const fileEdits = editsByFile.get(filePath) ?? [];
|
|
901
|
-
fileEdits.push({ export: exp, edit, fixes, existingPatch });
|
|
902
|
-
editsByFile.set(filePath, fileEdits);
|
|
903
|
-
}
|
|
904
|
-
if (edits.length === 0) {
|
|
905
|
-
log(chalk3.yellow("No edits could be generated."));
|
|
906
|
-
return;
|
|
907
|
-
}
|
|
908
|
-
if (options.dryRun) {
|
|
909
|
-
log(chalk3.bold("Dry run - changes that would be made:"));
|
|
910
|
-
log("");
|
|
911
|
-
for (const [filePath, fileEdits] of editsByFile) {
|
|
912
|
-
const relativePath = path5.relative(targetDir, filePath);
|
|
913
|
-
log(chalk3.cyan(` ${relativePath}:`));
|
|
914
|
-
for (const { export: exp, edit, fixes } of fileEdits) {
|
|
915
|
-
const lineInfo = edit.hasExisting ? `lines ${edit.startLine + 1}-${edit.endLine + 1}` : `line ${edit.startLine + 1}`;
|
|
916
|
-
log(` ${chalk3.bold(exp.name)} [${lineInfo}]`);
|
|
917
|
-
for (const fix of fixes) {
|
|
918
|
-
log(chalk3.green(` + ${fix.description}`));
|
|
919
|
-
}
|
|
920
|
-
}
|
|
921
|
-
log("");
|
|
922
|
-
}
|
|
923
|
-
log(chalk3.gray("Run without --dry-run to apply these changes."));
|
|
924
|
-
} else {
|
|
925
|
-
const applySpinner = spinner("Applying fixes...");
|
|
926
|
-
applySpinner.start();
|
|
927
|
-
const result2 = await applyEdits(edits);
|
|
928
|
-
if (result2.errors.length > 0) {
|
|
929
|
-
applySpinner.warn("Some fixes could not be applied");
|
|
930
|
-
for (const err of result2.errors) {
|
|
931
|
-
error(chalk3.red(` ${err.file}: ${err.error}`));
|
|
932
|
-
}
|
|
933
|
-
} else {
|
|
934
|
-
applySpinner.succeed(`Applied ${result2.editsApplied} fix(es) to ${result2.filesModified} file(s)`);
|
|
935
|
-
}
|
|
936
|
-
log("");
|
|
937
|
-
for (const [filePath, fileEdits] of editsByFile) {
|
|
938
|
-
const relativePath = path5.relative(targetDir, filePath);
|
|
939
|
-
log(chalk3.green(` ✓ ${relativePath}: ${fileEdits.length} fix(es)`));
|
|
940
|
-
}
|
|
919
|
+
if (missingDocs.length > 5) {
|
|
920
|
+
log(chalk2.gray(` ... and ${missingDocs.length - 5} more`));
|
|
941
921
|
}
|
|
942
|
-
} catch (commandError) {
|
|
943
|
-
error(chalk3.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
|
|
944
|
-
process.exitCode = 1;
|
|
945
922
|
}
|
|
946
|
-
|
|
923
|
+
if (impactedFiles.length === 0 && missingDocs.length === 0) {
|
|
924
|
+
log(chalk2.green(" ✓ No docs impact detected"));
|
|
925
|
+
}
|
|
926
|
+
}
|
|
927
|
+
log("");
|
|
947
928
|
}
|
|
948
929
|
|
|
949
930
|
// src/commands/generate.ts
|
|
950
|
-
import * as
|
|
951
|
-
import * as
|
|
952
|
-
import {
|
|
931
|
+
import * as fs3 from "node:fs";
|
|
932
|
+
import * as path4 from "node:path";
|
|
933
|
+
import {
|
|
934
|
+
DocCov as DocCov2,
|
|
935
|
+
detectEntryPoint as detectEntryPoint2,
|
|
936
|
+
detectMonorepo as detectMonorepo2,
|
|
937
|
+
findPackageByName as findPackageByName2,
|
|
938
|
+
NodeFileSystem as NodeFileSystem2
|
|
939
|
+
} from "@doccov/sdk";
|
|
953
940
|
import { normalize, validateSpec } from "@openpkg-ts/spec";
|
|
954
|
-
import
|
|
955
|
-
import
|
|
941
|
+
import chalk4 from "chalk";
|
|
942
|
+
import ora2 from "ora";
|
|
956
943
|
|
|
957
944
|
// src/utils/filter-options.ts
|
|
958
|
-
import
|
|
945
|
+
import chalk3 from "chalk";
|
|
959
946
|
var unique = (values) => Array.from(new Set(values));
|
|
960
947
|
var parseListFlag = (value) => {
|
|
961
948
|
if (!value) {
|
|
@@ -965,7 +952,7 @@ var parseListFlag = (value) => {
|
|
|
965
952
|
const normalized = rawItems.flatMap((item) => String(item).split(",")).map((item) => item.trim()).filter(Boolean);
|
|
966
953
|
return normalized.length > 0 ? unique(normalized) : undefined;
|
|
967
954
|
};
|
|
968
|
-
var formatList = (label, values) => `${label}: ${values.map((value) =>
|
|
955
|
+
var formatList = (label, values) => `${label}: ${values.map((value) => chalk3.cyan(value)).join(", ")}`;
|
|
969
956
|
var mergeFilterOptions = (config, cliOptions) => {
|
|
970
957
|
const messages = [];
|
|
971
958
|
const configInclude = config?.include;
|
|
@@ -1004,119 +991,11 @@ var mergeFilterOptions = (config, cliOptions) => {
|
|
|
1004
991
|
};
|
|
1005
992
|
};
|
|
1006
993
|
|
|
1007
|
-
// src/utils/entry-detection.ts
|
|
1008
|
-
import * as fs5 from "node:fs";
|
|
1009
|
-
import * as path6 from "node:path";
|
|
1010
|
-
function detectEntryPoint(repoDir) {
|
|
1011
|
-
const pkgPath = path6.join(repoDir, "package.json");
|
|
1012
|
-
if (!fs5.existsSync(pkgPath)) {
|
|
1013
|
-
throw new Error("No package.json found - not a valid npm package");
|
|
1014
|
-
}
|
|
1015
|
-
let pkg;
|
|
1016
|
-
try {
|
|
1017
|
-
pkg = JSON.parse(fs5.readFileSync(pkgPath, "utf-8"));
|
|
1018
|
-
} catch {
|
|
1019
|
-
throw new Error("Failed to parse package.json");
|
|
1020
|
-
}
|
|
1021
|
-
if (typeof pkg.types === "string") {
|
|
1022
|
-
const resolved = resolveToTs(repoDir, pkg.types);
|
|
1023
|
-
if (resolved) {
|
|
1024
|
-
return { entryPath: resolved, source: "types" };
|
|
1025
|
-
}
|
|
1026
|
-
}
|
|
1027
|
-
if (typeof pkg.typings === "string") {
|
|
1028
|
-
const resolved = resolveToTs(repoDir, pkg.typings);
|
|
1029
|
-
if (resolved) {
|
|
1030
|
-
return { entryPath: resolved, source: "types" };
|
|
1031
|
-
}
|
|
1032
|
-
}
|
|
1033
|
-
const exports = pkg.exports;
|
|
1034
|
-
if (exports) {
|
|
1035
|
-
const mainExport = exports["."];
|
|
1036
|
-
if (typeof mainExport === "object" && mainExport !== null) {
|
|
1037
|
-
const exportObj = mainExport;
|
|
1038
|
-
if (typeof exportObj.types === "string") {
|
|
1039
|
-
const resolved = resolveToTs(repoDir, exportObj.types);
|
|
1040
|
-
if (resolved) {
|
|
1041
|
-
return { entryPath: resolved, source: "exports" };
|
|
1042
|
-
}
|
|
1043
|
-
}
|
|
1044
|
-
}
|
|
1045
|
-
}
|
|
1046
|
-
if (typeof pkg.main === "string") {
|
|
1047
|
-
const resolved = resolveToTs(repoDir, pkg.main);
|
|
1048
|
-
if (resolved) {
|
|
1049
|
-
return { entryPath: resolved, source: "main" };
|
|
1050
|
-
}
|
|
1051
|
-
}
|
|
1052
|
-
if (typeof pkg.module === "string") {
|
|
1053
|
-
const resolved = resolveToTs(repoDir, pkg.module);
|
|
1054
|
-
if (resolved) {
|
|
1055
|
-
return { entryPath: resolved, source: "module" };
|
|
1056
|
-
}
|
|
1057
|
-
}
|
|
1058
|
-
const commonPaths = [
|
|
1059
|
-
"src/index.ts",
|
|
1060
|
-
"src/index.tsx",
|
|
1061
|
-
"src/main.ts",
|
|
1062
|
-
"index.ts",
|
|
1063
|
-
"lib/index.ts",
|
|
1064
|
-
"source/index.ts"
|
|
1065
|
-
];
|
|
1066
|
-
for (const p of commonPaths) {
|
|
1067
|
-
if (fs5.existsSync(path6.join(repoDir, p))) {
|
|
1068
|
-
return { entryPath: p, source: "fallback" };
|
|
1069
|
-
}
|
|
1070
|
-
}
|
|
1071
|
-
throw new Error("Could not detect TypeScript entry point. No types field in package.json and no common entry paths found.");
|
|
1072
|
-
}
|
|
1073
|
-
function resolveToTs(baseDir, filePath) {
|
|
1074
|
-
const normalized = filePath.replace(/^\.\//, "");
|
|
1075
|
-
const isSourceTs = normalized.endsWith(".ts") && !normalized.endsWith(".d.ts") || normalized.endsWith(".tsx");
|
|
1076
|
-
if (isSourceTs) {
|
|
1077
|
-
if (fs5.existsSync(path6.join(baseDir, normalized))) {
|
|
1078
|
-
return normalized;
|
|
1079
|
-
}
|
|
1080
|
-
}
|
|
1081
|
-
const candidates = [];
|
|
1082
|
-
if (normalized.startsWith("dist/")) {
|
|
1083
|
-
const srcPath = normalized.replace(/^dist\//, "src/");
|
|
1084
|
-
candidates.push(srcPath.replace(/\.js$/, ".ts"));
|
|
1085
|
-
candidates.push(srcPath.replace(/\.d\.ts$/, ".ts"));
|
|
1086
|
-
candidates.push(srcPath.replace(/\.js$/, ".tsx"));
|
|
1087
|
-
}
|
|
1088
|
-
if (normalized.startsWith("build/")) {
|
|
1089
|
-
const srcPath = normalized.replace(/^build\//, "src/");
|
|
1090
|
-
candidates.push(srcPath.replace(/\.js$/, ".ts"));
|
|
1091
|
-
candidates.push(srcPath.replace(/\.d\.ts$/, ".ts"));
|
|
1092
|
-
}
|
|
1093
|
-
if (normalized.startsWith("lib/")) {
|
|
1094
|
-
const srcPath = normalized.replace(/^lib\//, "src/");
|
|
1095
|
-
candidates.push(srcPath.replace(/\.js$/, ".ts"));
|
|
1096
|
-
candidates.push(srcPath.replace(/\.d\.ts$/, ".ts"));
|
|
1097
|
-
}
|
|
1098
|
-
candidates.push(normalized.replace(/\.js$/, ".ts"));
|
|
1099
|
-
candidates.push(normalized.replace(/\.d\.ts$/, ".ts"));
|
|
1100
|
-
candidates.push(normalized.replace(/\.js$/, ".tsx"));
|
|
1101
|
-
if (normalized.endsWith(".d.ts")) {
|
|
1102
|
-
const baseName = path6.basename(normalized, ".d.ts");
|
|
1103
|
-
candidates.push(`src/${baseName}.ts`);
|
|
1104
|
-
}
|
|
1105
|
-
for (const candidate of candidates) {
|
|
1106
|
-
if (candidate.endsWith(".d.ts"))
|
|
1107
|
-
continue;
|
|
1108
|
-
if (fs5.existsSync(path6.join(baseDir, candidate))) {
|
|
1109
|
-
return candidate;
|
|
1110
|
-
}
|
|
1111
|
-
}
|
|
1112
|
-
return;
|
|
1113
|
-
}
|
|
1114
|
-
|
|
1115
994
|
// src/commands/generate.ts
|
|
1116
|
-
var
|
|
1117
|
-
createDocCov: (options) => new
|
|
1118
|
-
writeFileSync:
|
|
1119
|
-
spinner: (text) =>
|
|
995
|
+
var defaultDependencies3 = {
|
|
996
|
+
createDocCov: (options) => new DocCov2(options),
|
|
997
|
+
writeFileSync: fs3.writeFileSync,
|
|
998
|
+
spinner: (text) => ora2(text),
|
|
1120
999
|
log: console.log,
|
|
1121
1000
|
error: console.error
|
|
1122
1001
|
};
|
|
@@ -1135,38 +1014,46 @@ function stripDocsFields(spec) {
|
|
|
1135
1014
|
}
|
|
1136
1015
|
function formatDiagnosticOutput(prefix, diagnostic, baseDir) {
|
|
1137
1016
|
const location = diagnostic.location;
|
|
1138
|
-
const relativePath = location?.file ?
|
|
1139
|
-
const locationText = location && relativePath ?
|
|
1017
|
+
const relativePath = location?.file ? path4.relative(baseDir, location.file) || location.file : undefined;
|
|
1018
|
+
const locationText = location && relativePath ? chalk4.gray(`${relativePath}:${location.line ?? 1}:${location.column ?? 1}`) : null;
|
|
1140
1019
|
const locationPrefix = locationText ? `${locationText} ` : "";
|
|
1141
1020
|
return `${prefix} ${locationPrefix}${diagnostic.message}`;
|
|
1142
1021
|
}
|
|
1143
1022
|
function registerGenerateCommand(program, dependencies = {}) {
|
|
1144
1023
|
const { createDocCov, writeFileSync: writeFileSync2, spinner, log, error } = {
|
|
1145
|
-
...
|
|
1024
|
+
...defaultDependencies3,
|
|
1146
1025
|
...dependencies
|
|
1147
1026
|
};
|
|
1148
1027
|
program.command("generate [entry]").description("Generate OpenPkg specification for documentation coverage analysis").option("-o, --output <file>", "Output file", "openpkg.json").option("-p, --package <name>", "Target package name (for monorepos)").option("--cwd <dir>", "Working directory", process.cwd()).option("--skip-resolve", "Skip external type resolution from node_modules").option("--include <ids>", "Filter exports by identifier (comma-separated or repeated)").option("--exclude <ids>", "Exclude exports by identifier (comma-separated or repeated)").option("--show-diagnostics", "Print TypeScript diagnostics from analysis").option("--no-docs", "Omit docs coverage fields from output (pure structural spec)").option("-y, --yes", "Skip all prompts and use defaults").action(async (entry, options) => {
|
|
1149
1028
|
try {
|
|
1150
1029
|
let targetDir = options.cwd;
|
|
1151
1030
|
let entryFile = entry;
|
|
1031
|
+
const fileSystem = new NodeFileSystem2(options.cwd);
|
|
1152
1032
|
if (options.package) {
|
|
1153
|
-
const
|
|
1154
|
-
if (!
|
|
1155
|
-
throw new Error(`
|
|
1033
|
+
const mono = await detectMonorepo2(fileSystem);
|
|
1034
|
+
if (!mono.isMonorepo) {
|
|
1035
|
+
throw new Error(`Not a monorepo. Remove --package flag for single-package repos.`);
|
|
1036
|
+
}
|
|
1037
|
+
const pkg = findPackageByName2(mono.packages, options.package);
|
|
1038
|
+
if (!pkg) {
|
|
1039
|
+
const available = mono.packages.map((p) => p.name).join(", ");
|
|
1040
|
+
throw new Error(`Package "${options.package}" not found. Available: ${available}`);
|
|
1156
1041
|
}
|
|
1157
|
-
targetDir =
|
|
1158
|
-
log(
|
|
1042
|
+
targetDir = path4.join(options.cwd, pkg.path);
|
|
1043
|
+
log(chalk4.gray(`Found package at ${pkg.path}`));
|
|
1159
1044
|
}
|
|
1160
1045
|
if (!entryFile) {
|
|
1161
|
-
const
|
|
1162
|
-
|
|
1163
|
-
|
|
1046
|
+
const targetFs = new NodeFileSystem2(targetDir);
|
|
1047
|
+
const detected = await detectEntryPoint2(targetFs);
|
|
1048
|
+
entryFile = path4.join(targetDir, detected.path);
|
|
1049
|
+
log(chalk4.gray(`Auto-detected entry point: ${detected.path} (from ${detected.source})`));
|
|
1164
1050
|
} else {
|
|
1165
|
-
entryFile =
|
|
1166
|
-
if (
|
|
1167
|
-
const
|
|
1168
|
-
|
|
1169
|
-
|
|
1051
|
+
entryFile = path4.resolve(targetDir, entryFile);
|
|
1052
|
+
if (fs3.existsSync(entryFile) && fs3.statSync(entryFile).isDirectory()) {
|
|
1053
|
+
const dirFs = new NodeFileSystem2(entryFile);
|
|
1054
|
+
const detected = await detectEntryPoint2(dirFs);
|
|
1055
|
+
entryFile = path4.join(entryFile, detected.path);
|
|
1056
|
+
log(chalk4.gray(`Auto-detected entry point: ${detected.path} (from ${detected.source})`));
|
|
1170
1057
|
}
|
|
1171
1058
|
}
|
|
1172
1059
|
const resolveExternalTypes = !options.skipResolve;
|
|
@@ -1178,15 +1065,15 @@ function registerGenerateCommand(program, dependencies = {}) {
|
|
|
1178
1065
|
try {
|
|
1179
1066
|
config = await loadDocCovConfig(targetDir);
|
|
1180
1067
|
if (config?.filePath) {
|
|
1181
|
-
log(
|
|
1068
|
+
log(chalk4.gray(`Loaded configuration from ${path4.relative(targetDir, config.filePath)}`));
|
|
1182
1069
|
}
|
|
1183
1070
|
} catch (configError) {
|
|
1184
|
-
error(
|
|
1071
|
+
error(chalk4.red("Failed to load DocCov config:"), configError instanceof Error ? configError.message : configError);
|
|
1185
1072
|
process.exit(1);
|
|
1186
1073
|
}
|
|
1187
1074
|
const resolvedFilters = mergeFilterOptions(config, cliFilters);
|
|
1188
1075
|
for (const message of resolvedFilters.messages) {
|
|
1189
|
-
log(
|
|
1076
|
+
log(chalk4.gray(`• ${message}`));
|
|
1190
1077
|
}
|
|
1191
1078
|
const spinnerInstance = spinner("Generating OpenPkg spec...");
|
|
1192
1079
|
spinnerInstance.start();
|
|
@@ -1210,7 +1097,7 @@ function registerGenerateCommand(program, dependencies = {}) {
|
|
|
1210
1097
|
if (!result) {
|
|
1211
1098
|
throw new Error("Failed to produce an OpenPkg spec.");
|
|
1212
1099
|
}
|
|
1213
|
-
const outputPath =
|
|
1100
|
+
const outputPath = path4.resolve(process.cwd(), options.output);
|
|
1214
1101
|
let normalized = normalize(result.spec);
|
|
1215
1102
|
if (options.docs === false) {
|
|
1216
1103
|
normalized = stripDocsFields(normalized);
|
|
@@ -1219,85 +1106,85 @@ function registerGenerateCommand(program, dependencies = {}) {
|
|
|
1219
1106
|
if (!validation.ok) {
|
|
1220
1107
|
spinnerInstance.fail("Spec failed schema validation");
|
|
1221
1108
|
for (const err of validation.errors) {
|
|
1222
|
-
error(
|
|
1109
|
+
error(chalk4.red(`schema: ${err.instancePath || "/"} ${err.message}`));
|
|
1223
1110
|
}
|
|
1224
1111
|
process.exit(1);
|
|
1225
1112
|
}
|
|
1226
1113
|
writeFileSync2(outputPath, JSON.stringify(normalized, null, 2));
|
|
1227
|
-
log(
|
|
1228
|
-
log(
|
|
1229
|
-
log(
|
|
1114
|
+
log(chalk4.green(`✓ Generated ${options.output}`));
|
|
1115
|
+
log(chalk4.gray(` ${getArrayLength(normalized.exports)} exports`));
|
|
1116
|
+
log(chalk4.gray(` ${getArrayLength(normalized.types)} types`));
|
|
1230
1117
|
if (options.showDiagnostics && result.diagnostics.length > 0) {
|
|
1231
1118
|
log("");
|
|
1232
|
-
log(
|
|
1119
|
+
log(chalk4.bold("Diagnostics"));
|
|
1233
1120
|
for (const diagnostic of result.diagnostics) {
|
|
1234
|
-
const prefix = diagnostic.severity === "error" ?
|
|
1121
|
+
const prefix = diagnostic.severity === "error" ? chalk4.red("✖") : diagnostic.severity === "warning" ? chalk4.yellow("⚠") : chalk4.cyan("ℹ");
|
|
1235
1122
|
log(formatDiagnosticOutput(prefix, diagnostic, targetDir));
|
|
1236
1123
|
}
|
|
1237
1124
|
}
|
|
1238
1125
|
} catch (commandError) {
|
|
1239
|
-
error(
|
|
1126
|
+
error(chalk4.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
|
|
1240
1127
|
process.exit(1);
|
|
1241
1128
|
}
|
|
1242
1129
|
});
|
|
1243
1130
|
}
|
|
1244
1131
|
|
|
1245
1132
|
// src/commands/init.ts
|
|
1246
|
-
import * as
|
|
1247
|
-
import * as
|
|
1248
|
-
import
|
|
1249
|
-
var
|
|
1250
|
-
fileExists:
|
|
1251
|
-
writeFileSync:
|
|
1252
|
-
readFileSync:
|
|
1133
|
+
import * as fs4 from "node:fs";
|
|
1134
|
+
import * as path5 from "node:path";
|
|
1135
|
+
import chalk5 from "chalk";
|
|
1136
|
+
var defaultDependencies4 = {
|
|
1137
|
+
fileExists: fs4.existsSync,
|
|
1138
|
+
writeFileSync: fs4.writeFileSync,
|
|
1139
|
+
readFileSync: fs4.readFileSync,
|
|
1253
1140
|
log: console.log,
|
|
1254
1141
|
error: console.error
|
|
1255
1142
|
};
|
|
1256
1143
|
function registerInitCommand(program, dependencies = {}) {
|
|
1257
|
-
const { fileExists: fileExists2, writeFileSync: writeFileSync3, readFileSync:
|
|
1258
|
-
...
|
|
1144
|
+
const { fileExists: fileExists2, writeFileSync: writeFileSync3, readFileSync: readFileSync3, log, error } = {
|
|
1145
|
+
...defaultDependencies4,
|
|
1259
1146
|
...dependencies
|
|
1260
1147
|
};
|
|
1261
1148
|
program.command("init").description("Create a DocCov configuration file").option("--cwd <dir>", "Working directory", process.cwd()).option("--format <format>", "Config format: auto, mjs, js, cjs", "auto").action((options) => {
|
|
1262
|
-
const cwd =
|
|
1149
|
+
const cwd = path5.resolve(options.cwd);
|
|
1263
1150
|
const formatOption = String(options.format ?? "auto").toLowerCase();
|
|
1264
1151
|
if (!isValidFormat(formatOption)) {
|
|
1265
|
-
error(
|
|
1152
|
+
error(chalk5.red(`Invalid format "${formatOption}". Use auto, mjs, js, or cjs.`));
|
|
1266
1153
|
process.exitCode = 1;
|
|
1267
1154
|
return;
|
|
1268
1155
|
}
|
|
1269
1156
|
const existing = findExistingConfig(cwd, fileExists2);
|
|
1270
1157
|
if (existing) {
|
|
1271
|
-
error(
|
|
1158
|
+
error(chalk5.red(`A DocCov config already exists at ${path5.relative(cwd, existing) || "./doccov.config.*"}.`));
|
|
1272
1159
|
process.exitCode = 1;
|
|
1273
1160
|
return;
|
|
1274
1161
|
}
|
|
1275
|
-
const packageType = detectPackageType(cwd, fileExists2,
|
|
1162
|
+
const packageType = detectPackageType(cwd, fileExists2, readFileSync3);
|
|
1276
1163
|
const targetFormat = resolveFormat(formatOption, packageType);
|
|
1277
1164
|
if (targetFormat === "js" && packageType !== "module") {
|
|
1278
|
-
log(
|
|
1165
|
+
log(chalk5.yellow('Package is not marked as "type": "module"; creating doccov.config.js may require enabling ESM.'));
|
|
1279
1166
|
}
|
|
1280
1167
|
const fileName = `doccov.config.${targetFormat}`;
|
|
1281
|
-
const outputPath =
|
|
1168
|
+
const outputPath = path5.join(cwd, fileName);
|
|
1282
1169
|
if (fileExists2(outputPath)) {
|
|
1283
|
-
error(
|
|
1170
|
+
error(chalk5.red(`Cannot create ${fileName}; file already exists.`));
|
|
1284
1171
|
process.exitCode = 1;
|
|
1285
1172
|
return;
|
|
1286
1173
|
}
|
|
1287
1174
|
const template = buildTemplate(targetFormat);
|
|
1288
1175
|
writeFileSync3(outputPath, template, { encoding: "utf8" });
|
|
1289
|
-
log(
|
|
1176
|
+
log(chalk5.green(`✓ Created ${path5.relative(process.cwd(), outputPath)}`));
|
|
1290
1177
|
});
|
|
1291
1178
|
}
|
|
1292
1179
|
var isValidFormat = (value) => {
|
|
1293
1180
|
return value === "auto" || value === "mjs" || value === "js" || value === "cjs";
|
|
1294
1181
|
};
|
|
1295
1182
|
var findExistingConfig = (cwd, fileExists2) => {
|
|
1296
|
-
let current =
|
|
1297
|
-
const { root } =
|
|
1183
|
+
let current = path5.resolve(cwd);
|
|
1184
|
+
const { root } = path5.parse(current);
|
|
1298
1185
|
while (true) {
|
|
1299
1186
|
for (const candidate of DOCCOV_CONFIG_FILENAMES) {
|
|
1300
|
-
const candidatePath =
|
|
1187
|
+
const candidatePath = path5.join(current, candidate);
|
|
1301
1188
|
if (fileExists2(candidatePath)) {
|
|
1302
1189
|
return candidatePath;
|
|
1303
1190
|
}
|
|
@@ -1305,17 +1192,17 @@ var findExistingConfig = (cwd, fileExists2) => {
|
|
|
1305
1192
|
if (current === root) {
|
|
1306
1193
|
break;
|
|
1307
1194
|
}
|
|
1308
|
-
current =
|
|
1195
|
+
current = path5.dirname(current);
|
|
1309
1196
|
}
|
|
1310
1197
|
return null;
|
|
1311
1198
|
};
|
|
1312
|
-
var detectPackageType = (cwd, fileExists2,
|
|
1199
|
+
var detectPackageType = (cwd, fileExists2, readFileSync3) => {
|
|
1313
1200
|
const packageJsonPath = findNearestPackageJson(cwd, fileExists2);
|
|
1314
1201
|
if (!packageJsonPath) {
|
|
1315
1202
|
return;
|
|
1316
1203
|
}
|
|
1317
1204
|
try {
|
|
1318
|
-
const raw =
|
|
1205
|
+
const raw = readFileSync3(packageJsonPath, "utf8");
|
|
1319
1206
|
const parsed = JSON.parse(raw);
|
|
1320
1207
|
if (parsed.type === "module") {
|
|
1321
1208
|
return "module";
|
|
@@ -1327,17 +1214,17 @@ var detectPackageType = (cwd, fileExists2, readFileSync5) => {
|
|
|
1327
1214
|
return;
|
|
1328
1215
|
};
|
|
1329
1216
|
var findNearestPackageJson = (cwd, fileExists2) => {
|
|
1330
|
-
let current =
|
|
1331
|
-
const { root } =
|
|
1217
|
+
let current = path5.resolve(cwd);
|
|
1218
|
+
const { root } = path5.parse(current);
|
|
1332
1219
|
while (true) {
|
|
1333
|
-
const candidate =
|
|
1220
|
+
const candidate = path5.join(current, "package.json");
|
|
1334
1221
|
if (fileExists2(candidate)) {
|
|
1335
1222
|
return candidate;
|
|
1336
1223
|
}
|
|
1337
1224
|
if (current === root) {
|
|
1338
1225
|
break;
|
|
1339
1226
|
}
|
|
1340
|
-
current =
|
|
1227
|
+
current = path5.dirname(current);
|
|
1341
1228
|
}
|
|
1342
1229
|
return null;
|
|
1343
1230
|
};
|
|
@@ -1373,11 +1260,17 @@ var buildTemplate = (format) => {
|
|
|
1373
1260
|
};
|
|
1374
1261
|
|
|
1375
1262
|
// src/commands/report.ts
|
|
1376
|
-
import * as
|
|
1377
|
-
import * as
|
|
1378
|
-
import {
|
|
1379
|
-
|
|
1380
|
-
|
|
1263
|
+
import * as fs5 from "node:fs";
|
|
1264
|
+
import * as path6 from "node:path";
|
|
1265
|
+
import {
|
|
1266
|
+
DocCov as DocCov3,
|
|
1267
|
+
detectEntryPoint as detectEntryPoint3,
|
|
1268
|
+
detectMonorepo as detectMonorepo3,
|
|
1269
|
+
findPackageByName as findPackageByName3,
|
|
1270
|
+
NodeFileSystem as NodeFileSystem3
|
|
1271
|
+
} from "@doccov/sdk";
|
|
1272
|
+
import chalk6 from "chalk";
|
|
1273
|
+
import ora3 from "ora";
|
|
1381
1274
|
|
|
1382
1275
|
// src/reports/markdown.ts
|
|
1383
1276
|
function bar(pct, width = 10) {
|
|
@@ -1554,25 +1447,34 @@ function registerReportCommand(program) {
|
|
|
1554
1447
|
try {
|
|
1555
1448
|
let spec;
|
|
1556
1449
|
if (options.spec) {
|
|
1557
|
-
const specPath =
|
|
1558
|
-
spec = JSON.parse(
|
|
1450
|
+
const specPath = path6.resolve(options.cwd, options.spec);
|
|
1451
|
+
spec = JSON.parse(fs5.readFileSync(specPath, "utf-8"));
|
|
1559
1452
|
} else {
|
|
1560
1453
|
let targetDir = options.cwd;
|
|
1561
1454
|
let entryFile = entry;
|
|
1455
|
+
const fileSystem = new NodeFileSystem3(options.cwd);
|
|
1562
1456
|
if (options.package) {
|
|
1563
|
-
const
|
|
1564
|
-
if (!
|
|
1565
|
-
throw new Error(`
|
|
1566
|
-
|
|
1457
|
+
const mono = await detectMonorepo3(fileSystem);
|
|
1458
|
+
if (!mono.isMonorepo) {
|
|
1459
|
+
throw new Error(`Not a monorepo. Remove --package flag for single-package repos.`);
|
|
1460
|
+
}
|
|
1461
|
+
const pkg = findPackageByName3(mono.packages, options.package);
|
|
1462
|
+
if (!pkg) {
|
|
1463
|
+
const available = mono.packages.map((p) => p.name).join(", ");
|
|
1464
|
+
throw new Error(`Package "${options.package}" not found. Available: ${available}`);
|
|
1465
|
+
}
|
|
1466
|
+
targetDir = path6.join(options.cwd, pkg.path);
|
|
1567
1467
|
}
|
|
1568
1468
|
if (!entryFile) {
|
|
1569
|
-
|
|
1469
|
+
const targetFs = new NodeFileSystem3(targetDir);
|
|
1470
|
+
const detected = await detectEntryPoint3(targetFs);
|
|
1471
|
+
entryFile = path6.join(targetDir, detected.path);
|
|
1570
1472
|
} else {
|
|
1571
|
-
entryFile =
|
|
1473
|
+
entryFile = path6.resolve(targetDir, entryFile);
|
|
1572
1474
|
}
|
|
1573
|
-
const spinner =
|
|
1475
|
+
const spinner = ora3("Analyzing...").start();
|
|
1574
1476
|
const resolveExternalTypes = !options.skipResolve;
|
|
1575
|
-
const doccov = new
|
|
1477
|
+
const doccov = new DocCov3({ resolveExternalTypes });
|
|
1576
1478
|
const result = await doccov.analyzeFileWithDiagnostics(entryFile);
|
|
1577
1479
|
spinner.succeed("Analysis complete");
|
|
1578
1480
|
spec = result.spec;
|
|
@@ -1589,26 +1491,36 @@ function registerReportCommand(program) {
|
|
|
1589
1491
|
output = renderMarkdown(stats, { limit });
|
|
1590
1492
|
}
|
|
1591
1493
|
if (options.out) {
|
|
1592
|
-
const outPath =
|
|
1593
|
-
|
|
1594
|
-
console.log(
|
|
1494
|
+
const outPath = path6.resolve(options.cwd, options.out);
|
|
1495
|
+
fs5.writeFileSync(outPath, output);
|
|
1496
|
+
console.log(chalk6.green(`Report written to ${outPath}`));
|
|
1595
1497
|
} else {
|
|
1596
1498
|
console.log(output);
|
|
1597
1499
|
}
|
|
1598
1500
|
} catch (err) {
|
|
1599
|
-
console.error(
|
|
1501
|
+
console.error(chalk6.red("Error:"), err instanceof Error ? err.message : err);
|
|
1600
1502
|
process.exitCode = 1;
|
|
1601
1503
|
}
|
|
1602
1504
|
});
|
|
1603
1505
|
}
|
|
1604
1506
|
|
|
1605
1507
|
// src/commands/scan.ts
|
|
1606
|
-
import * as
|
|
1508
|
+
import * as fs7 from "node:fs";
|
|
1607
1509
|
import * as os from "node:os";
|
|
1608
|
-
import * as
|
|
1609
|
-
import {
|
|
1610
|
-
|
|
1611
|
-
|
|
1510
|
+
import * as path8 from "node:path";
|
|
1511
|
+
import {
|
|
1512
|
+
DocCov as DocCov4,
|
|
1513
|
+
detectBuildInfo,
|
|
1514
|
+
detectEntryPoint as detectEntryPoint4,
|
|
1515
|
+
detectMonorepo as detectMonorepo4,
|
|
1516
|
+
detectPackageManager,
|
|
1517
|
+
findPackageByName as findPackageByName4,
|
|
1518
|
+
formatPackageList,
|
|
1519
|
+
getInstallCommand,
|
|
1520
|
+
NodeFileSystem as NodeFileSystem4
|
|
1521
|
+
} from "@doccov/sdk";
|
|
1522
|
+
import chalk7 from "chalk";
|
|
1523
|
+
import ora4 from "ora";
|
|
1612
1524
|
import { simpleGit } from "simple-git";
|
|
1613
1525
|
|
|
1614
1526
|
// src/utils/github-url.ts
|
|
@@ -1642,17 +1554,17 @@ function buildDisplayUrl(parsed) {
|
|
|
1642
1554
|
}
|
|
1643
1555
|
|
|
1644
1556
|
// src/utils/llm-build-plan.ts
|
|
1645
|
-
import * as
|
|
1646
|
-
import * as
|
|
1647
|
-
import { createAnthropic as
|
|
1648
|
-
import { createOpenAI as
|
|
1649
|
-
import { generateObject as
|
|
1650
|
-
import { z as
|
|
1651
|
-
var BuildPlanSchema =
|
|
1652
|
-
installCommand:
|
|
1653
|
-
buildCommands:
|
|
1654
|
-
entryPoint:
|
|
1655
|
-
notes:
|
|
1557
|
+
import * as fs6 from "node:fs";
|
|
1558
|
+
import * as path7 from "node:path";
|
|
1559
|
+
import { createAnthropic as createAnthropic3 } from "@ai-sdk/anthropic";
|
|
1560
|
+
import { createOpenAI as createOpenAI3 } from "@ai-sdk/openai";
|
|
1561
|
+
import { generateObject as generateObject3 } from "ai";
|
|
1562
|
+
import { z as z4 } from "zod";
|
|
1563
|
+
var BuildPlanSchema = z4.object({
|
|
1564
|
+
installCommand: z4.string().optional().describe("Additional install command if needed"),
|
|
1565
|
+
buildCommands: z4.array(z4.string()).describe('Build steps to run, e.g. ["npm run build:wasm"]'),
|
|
1566
|
+
entryPoint: z4.string().describe("Path to TS/TSX entry file after build"),
|
|
1567
|
+
notes: z4.string().optional().describe("Caveats or warnings")
|
|
1656
1568
|
});
|
|
1657
1569
|
var CONTEXT_FILES = [
|
|
1658
1570
|
"package.json",
|
|
@@ -1667,22 +1579,22 @@ var CONTEXT_FILES = [
|
|
|
1667
1579
|
"wasm-pack.json"
|
|
1668
1580
|
];
|
|
1669
1581
|
var MAX_FILE_CHARS = 2000;
|
|
1670
|
-
function
|
|
1582
|
+
function getModel3() {
|
|
1671
1583
|
const provider = process.env.DOCCOV_LLM_PROVIDER?.toLowerCase();
|
|
1672
1584
|
if (provider === "anthropic" || process.env.ANTHROPIC_API_KEY) {
|
|
1673
|
-
const anthropic =
|
|
1585
|
+
const anthropic = createAnthropic3();
|
|
1674
1586
|
return anthropic("claude-sonnet-4-20250514");
|
|
1675
1587
|
}
|
|
1676
|
-
const openai =
|
|
1588
|
+
const openai = createOpenAI3();
|
|
1677
1589
|
return openai("gpt-4o-mini");
|
|
1678
1590
|
}
|
|
1679
1591
|
async function gatherContextFiles(repoDir) {
|
|
1680
1592
|
const sections = [];
|
|
1681
1593
|
for (const fileName of CONTEXT_FILES) {
|
|
1682
|
-
const filePath =
|
|
1683
|
-
if (
|
|
1594
|
+
const filePath = path7.join(repoDir, fileName);
|
|
1595
|
+
if (fs6.existsSync(filePath)) {
|
|
1684
1596
|
try {
|
|
1685
|
-
let content =
|
|
1597
|
+
let content = fs6.readFileSync(filePath, "utf-8");
|
|
1686
1598
|
if (content.length > MAX_FILE_CHARS) {
|
|
1687
1599
|
content = `${content.slice(0, MAX_FILE_CHARS)}
|
|
1688
1600
|
... (truncated)`;
|
|
@@ -1724,8 +1636,8 @@ async function generateBuildPlan(repoDir) {
|
|
|
1724
1636
|
if (!context.trim()) {
|
|
1725
1637
|
return null;
|
|
1726
1638
|
}
|
|
1727
|
-
const model =
|
|
1728
|
-
const { object } = await
|
|
1639
|
+
const model = getModel3();
|
|
1640
|
+
const { object } = await generateObject3({
|
|
1729
1641
|
model,
|
|
1730
1642
|
schema: BuildPlanSchema,
|
|
1731
1643
|
prompt: BUILD_PLAN_PROMPT(context)
|
|
@@ -1733,119 +1645,16 @@ async function generateBuildPlan(repoDir) {
|
|
|
1733
1645
|
return object;
|
|
1734
1646
|
}
|
|
1735
1647
|
|
|
1736
|
-
// src/utils/monorepo-detection.ts
|
|
1737
|
-
import * as fs10 from "node:fs";
|
|
1738
|
-
import * as path11 from "node:path";
|
|
1739
|
-
import { glob } from "glob";
|
|
1740
|
-
async function detectMonorepo(repoDir) {
|
|
1741
|
-
const pkgPath = path11.join(repoDir, "package.json");
|
|
1742
|
-
if (!fs10.existsSync(pkgPath)) {
|
|
1743
|
-
return { isMonorepo: false, packages: [], type: "none" };
|
|
1744
|
-
}
|
|
1745
|
-
let pkg;
|
|
1746
|
-
try {
|
|
1747
|
-
pkg = JSON.parse(fs10.readFileSync(pkgPath, "utf-8"));
|
|
1748
|
-
} catch {
|
|
1749
|
-
return { isMonorepo: false, packages: [], type: "none" };
|
|
1750
|
-
}
|
|
1751
|
-
if (pkg.workspaces) {
|
|
1752
|
-
const patterns = extractWorkspacePatterns(pkg.workspaces);
|
|
1753
|
-
const packages = await resolveWorkspacePackages(repoDir, patterns);
|
|
1754
|
-
return { isMonorepo: packages.length > 0, packages, type: "npm" };
|
|
1755
|
-
}
|
|
1756
|
-
const pnpmPath = path11.join(repoDir, "pnpm-workspace.yaml");
|
|
1757
|
-
if (fs10.existsSync(pnpmPath)) {
|
|
1758
|
-
const patterns = parsePnpmWorkspace(pnpmPath);
|
|
1759
|
-
const packages = await resolveWorkspacePackages(repoDir, patterns);
|
|
1760
|
-
return { isMonorepo: packages.length > 0, packages, type: "pnpm" };
|
|
1761
|
-
}
|
|
1762
|
-
const lernaPath = path11.join(repoDir, "lerna.json");
|
|
1763
|
-
if (fs10.existsSync(lernaPath)) {
|
|
1764
|
-
try {
|
|
1765
|
-
const lerna = JSON.parse(fs10.readFileSync(lernaPath, "utf-8"));
|
|
1766
|
-
const patterns = lerna.packages ?? ["packages/*"];
|
|
1767
|
-
const packages = await resolveWorkspacePackages(repoDir, patterns);
|
|
1768
|
-
return { isMonorepo: packages.length > 0, packages, type: "lerna" };
|
|
1769
|
-
} catch {}
|
|
1770
|
-
}
|
|
1771
|
-
return { isMonorepo: false, packages: [], type: "none" };
|
|
1772
|
-
}
|
|
1773
|
-
function extractWorkspacePatterns(workspaces) {
|
|
1774
|
-
if (Array.isArray(workspaces)) {
|
|
1775
|
-
return workspaces.filter((w) => typeof w === "string");
|
|
1776
|
-
}
|
|
1777
|
-
if (typeof workspaces === "object" && workspaces !== null) {
|
|
1778
|
-
const ws = workspaces;
|
|
1779
|
-
if (Array.isArray(ws.packages)) {
|
|
1780
|
-
return ws.packages.filter((w) => typeof w === "string");
|
|
1781
|
-
}
|
|
1782
|
-
}
|
|
1783
|
-
return [];
|
|
1784
|
-
}
|
|
1785
|
-
function parsePnpmWorkspace(filePath) {
|
|
1786
|
-
try {
|
|
1787
|
-
const content = fs10.readFileSync(filePath, "utf-8");
|
|
1788
|
-
const match = content.match(/packages:\s*\n((?:\s+-\s+.+\n?)+)/);
|
|
1789
|
-
if (match) {
|
|
1790
|
-
const lines = match[1].split(`
|
|
1791
|
-
`);
|
|
1792
|
-
return lines.map((line) => line.replace(/^\s+-\s+['"]?/, "").replace(/['"]?\s*$/, "")).filter(Boolean);
|
|
1793
|
-
}
|
|
1794
|
-
} catch {}
|
|
1795
|
-
return ["packages/*"];
|
|
1796
|
-
}
|
|
1797
|
-
async function resolveWorkspacePackages(repoDir, patterns) {
|
|
1798
|
-
const packages = [];
|
|
1799
|
-
for (const pattern of patterns) {
|
|
1800
|
-
const normalizedPattern = pattern.endsWith("/") ? pattern.slice(0, -1) : pattern;
|
|
1801
|
-
try {
|
|
1802
|
-
const matches = await glob(normalizedPattern, {
|
|
1803
|
-
cwd: repoDir,
|
|
1804
|
-
absolute: false
|
|
1805
|
-
});
|
|
1806
|
-
for (const match of matches) {
|
|
1807
|
-
const pkgJsonPath = path11.join(repoDir, match, "package.json");
|
|
1808
|
-
if (fs10.existsSync(pkgJsonPath)) {
|
|
1809
|
-
try {
|
|
1810
|
-
const pkgJson = JSON.parse(fs10.readFileSync(pkgJsonPath, "utf-8"));
|
|
1811
|
-
packages.push({
|
|
1812
|
-
name: pkgJson.name ?? path11.basename(match),
|
|
1813
|
-
path: path11.join(repoDir, match),
|
|
1814
|
-
relativePath: match
|
|
1815
|
-
});
|
|
1816
|
-
} catch {}
|
|
1817
|
-
}
|
|
1818
|
-
}
|
|
1819
|
-
} catch {}
|
|
1820
|
-
}
|
|
1821
|
-
return packages.sort((a, b) => a.name.localeCompare(b.name));
|
|
1822
|
-
}
|
|
1823
|
-
async function findPackage(repoDir, packageName) {
|
|
1824
|
-
const mono = await detectMonorepo(repoDir);
|
|
1825
|
-
if (!mono.isMonorepo) {
|
|
1826
|
-
return;
|
|
1827
|
-
}
|
|
1828
|
-
return mono.packages.find((pkg) => pkg.name === packageName || pkg.relativePath === packageName);
|
|
1829
|
-
}
|
|
1830
|
-
function formatPackageList(packages, limit = 10) {
|
|
1831
|
-
const lines = packages.slice(0, limit).map((pkg) => ` --package ${pkg.name}`);
|
|
1832
|
-
if (packages.length > limit) {
|
|
1833
|
-
lines.push(` ... and ${packages.length - limit} more`);
|
|
1834
|
-
}
|
|
1835
|
-
return lines.join(`
|
|
1836
|
-
`);
|
|
1837
|
-
}
|
|
1838
|
-
|
|
1839
1648
|
// src/commands/scan.ts
|
|
1840
|
-
var
|
|
1841
|
-
createDocCov: (options) => new
|
|
1842
|
-
spinner: (text) =>
|
|
1649
|
+
var defaultDependencies5 = {
|
|
1650
|
+
createDocCov: (options) => new DocCov4(options),
|
|
1651
|
+
spinner: (text) => ora4(text),
|
|
1843
1652
|
log: console.log,
|
|
1844
1653
|
error: console.error
|
|
1845
1654
|
};
|
|
1846
1655
|
function registerScanCommand(program, dependencies = {}) {
|
|
1847
1656
|
const { createDocCov, spinner, log, error } = {
|
|
1848
|
-
...
|
|
1657
|
+
...defaultDependencies5,
|
|
1849
1658
|
...dependencies
|
|
1850
1659
|
};
|
|
1851
1660
|
program.command("scan <url>").description("Analyze docs coverage for any public GitHub repository").option("--ref <branch>", "Branch or tag to analyze").option("--package <name>", "Target package in monorepo").option("--output <format>", "Output format: text or json", "text").option("--no-cleanup", "Keep cloned repo (for debugging)").option("--skip-install", "Skip dependency installation (faster, but may limit type resolution)").option("--skip-resolve", "Skip external type resolution from node_modules").option("--save-spec <path>", "Save full OpenPkg spec to file").action(async (url, options) => {
|
|
@@ -1855,11 +1664,11 @@ function registerScanCommand(program, dependencies = {}) {
|
|
|
1855
1664
|
const cloneUrl = buildCloneUrl(parsed);
|
|
1856
1665
|
const displayUrl = buildDisplayUrl(parsed);
|
|
1857
1666
|
log("");
|
|
1858
|
-
log(
|
|
1859
|
-
log(
|
|
1667
|
+
log(chalk7.bold(`Scanning ${displayUrl}`));
|
|
1668
|
+
log(chalk7.gray(`Branch/tag: ${parsed.ref}`));
|
|
1860
1669
|
log("");
|
|
1861
|
-
tempDir =
|
|
1862
|
-
|
|
1670
|
+
tempDir = path8.join(os.tmpdir(), `doccov-scan-${Date.now()}-${Math.random().toString(36).slice(2)}`);
|
|
1671
|
+
fs7.mkdirSync(tempDir, { recursive: true });
|
|
1863
1672
|
const cloneSpinner = spinner(`Cloning ${parsed.owner}/${parsed.repo}...`);
|
|
1864
1673
|
cloneSpinner.start();
|
|
1865
1674
|
try {
|
|
@@ -1883,37 +1692,31 @@ function registerScanCommand(program, dependencies = {}) {
|
|
|
1883
1692
|
}
|
|
1884
1693
|
throw new Error(`Clone failed: ${message}`);
|
|
1885
1694
|
}
|
|
1695
|
+
const fileSystem = new NodeFileSystem4(tempDir);
|
|
1886
1696
|
if (options.skipInstall) {
|
|
1887
|
-
log(
|
|
1697
|
+
log(chalk7.gray("Skipping dependency installation (--skip-install)"));
|
|
1888
1698
|
} else {
|
|
1889
1699
|
const installSpinner = spinner("Installing dependencies...");
|
|
1890
1700
|
installSpinner.start();
|
|
1891
1701
|
const installErrors = [];
|
|
1892
1702
|
try {
|
|
1893
1703
|
const { execSync } = await import("node:child_process");
|
|
1894
|
-
const
|
|
1895
|
-
|
|
1896
|
-
|
|
1897
|
-
{ file: "bun.lockb", cmd: "bun install --frozen-lockfile" },
|
|
1898
|
-
{ file: "yarn.lock", cmd: "yarn install --frozen-lockfile" },
|
|
1899
|
-
{ file: "package-lock.json", cmd: "npm install --legacy-peer-deps" }
|
|
1900
|
-
];
|
|
1704
|
+
const pmInfo = await detectPackageManager(fileSystem);
|
|
1705
|
+
const installCmd = getInstallCommand(pmInfo);
|
|
1706
|
+
const cmdString = installCmd.join(" ");
|
|
1901
1707
|
let installed = false;
|
|
1902
|
-
|
|
1903
|
-
|
|
1904
|
-
|
|
1905
|
-
|
|
1906
|
-
|
|
1907
|
-
|
|
1908
|
-
|
|
1909
|
-
|
|
1910
|
-
|
|
1911
|
-
|
|
1912
|
-
|
|
1913
|
-
|
|
1914
|
-
const msg = cmdError instanceof Error ? cmdError.message : String(cmdError);
|
|
1915
|
-
installErrors.push(`[${cmd}] ${stderr.slice(0, 150) || msg.slice(0, 150)}`);
|
|
1916
|
-
}
|
|
1708
|
+
if (pmInfo.lockfile) {
|
|
1709
|
+
try {
|
|
1710
|
+
execSync(cmdString, {
|
|
1711
|
+
cwd: tempDir,
|
|
1712
|
+
stdio: "pipe",
|
|
1713
|
+
timeout: 180000
|
|
1714
|
+
});
|
|
1715
|
+
installed = true;
|
|
1716
|
+
} catch (cmdError) {
|
|
1717
|
+
const stderr = cmdError?.stderr?.toString() ?? "";
|
|
1718
|
+
const msg = cmdError instanceof Error ? cmdError.message : String(cmdError);
|
|
1719
|
+
installErrors.push(`[${cmdString}] ${stderr.slice(0, 150) || msg.slice(0, 150)}`);
|
|
1917
1720
|
}
|
|
1918
1721
|
}
|
|
1919
1722
|
if (!installed) {
|
|
@@ -1947,67 +1750,46 @@ function registerScanCommand(program, dependencies = {}) {
|
|
|
1947
1750
|
} else {
|
|
1948
1751
|
installSpinner.warn("Could not install dependencies (analysis may be limited)");
|
|
1949
1752
|
for (const err of installErrors) {
|
|
1950
|
-
log(
|
|
1753
|
+
log(chalk7.gray(` ${err}`));
|
|
1951
1754
|
}
|
|
1952
1755
|
}
|
|
1953
1756
|
} catch (outerError) {
|
|
1954
1757
|
const msg = outerError instanceof Error ? outerError.message : String(outerError);
|
|
1955
1758
|
installSpinner.warn(`Could not install dependencies: ${msg.slice(0, 100)}`);
|
|
1956
1759
|
for (const err of installErrors) {
|
|
1957
|
-
log(
|
|
1760
|
+
log(chalk7.gray(` ${err}`));
|
|
1958
1761
|
}
|
|
1959
1762
|
}
|
|
1960
1763
|
}
|
|
1961
1764
|
let targetDir = tempDir;
|
|
1962
1765
|
let packageName;
|
|
1963
|
-
const mono = await
|
|
1766
|
+
const mono = await detectMonorepo4(fileSystem);
|
|
1964
1767
|
if (mono.isMonorepo) {
|
|
1965
1768
|
if (!options.package) {
|
|
1966
1769
|
error("");
|
|
1967
|
-
error(
|
|
1770
|
+
error(chalk7.red(`Monorepo detected with ${mono.packages.length} packages. Specify target with --package:`));
|
|
1968
1771
|
error("");
|
|
1969
1772
|
error(formatPackageList(mono.packages));
|
|
1970
1773
|
error("");
|
|
1971
1774
|
throw new Error("Monorepo requires --package flag");
|
|
1972
1775
|
}
|
|
1973
|
-
const pkg =
|
|
1776
|
+
const pkg = findPackageByName4(mono.packages, options.package);
|
|
1974
1777
|
if (!pkg) {
|
|
1975
1778
|
error("");
|
|
1976
|
-
error(
|
|
1779
|
+
error(chalk7.red(`Package "${options.package}" not found. Available packages:`));
|
|
1977
1780
|
error("");
|
|
1978
1781
|
error(formatPackageList(mono.packages));
|
|
1979
1782
|
error("");
|
|
1980
1783
|
throw new Error(`Package not found: ${options.package}`);
|
|
1981
1784
|
}
|
|
1982
|
-
targetDir = pkg.path;
|
|
1785
|
+
targetDir = path8.join(tempDir, pkg.path);
|
|
1983
1786
|
packageName = pkg.name;
|
|
1984
|
-
log(
|
|
1787
|
+
log(chalk7.gray(`Analyzing package: ${packageName}`));
|
|
1985
1788
|
}
|
|
1986
1789
|
const entrySpinner = spinner("Detecting entry point...");
|
|
1987
1790
|
entrySpinner.start();
|
|
1988
1791
|
let entryPath;
|
|
1989
|
-
const
|
|
1990
|
-
if (!entryFile.endsWith(".d.ts"))
|
|
1991
|
-
return false;
|
|
1992
|
-
const cargoLocations = [
|
|
1993
|
-
path12.join(pkgDir, "Cargo.toml"),
|
|
1994
|
-
path12.join(repoRoot, "Cargo.toml")
|
|
1995
|
-
];
|
|
1996
|
-
const hasCargoToml = cargoLocations.some((p) => fs11.existsSync(p));
|
|
1997
|
-
const checkWasmScripts = (dir) => {
|
|
1998
|
-
const pkgPath = path12.join(dir, "package.json");
|
|
1999
|
-
if (fs11.existsSync(pkgPath)) {
|
|
2000
|
-
try {
|
|
2001
|
-
const pkg = JSON.parse(fs11.readFileSync(pkgPath, "utf-8"));
|
|
2002
|
-
const scripts = Object.values(pkg.scripts ?? {}).join(" ");
|
|
2003
|
-
return scripts.includes("wasm-pack") || scripts.includes("wasm");
|
|
2004
|
-
} catch {}
|
|
2005
|
-
}
|
|
2006
|
-
return false;
|
|
2007
|
-
};
|
|
2008
|
-
const hasWasmPackScript = checkWasmScripts(pkgDir) || checkWasmScripts(repoRoot);
|
|
2009
|
-
return hasCargoToml || hasWasmPackScript;
|
|
2010
|
-
};
|
|
1792
|
+
const targetFs = mono.isMonorepo ? new NodeFileSystem4(targetDir) : fileSystem;
|
|
2011
1793
|
let buildFailed = false;
|
|
2012
1794
|
const runLlmFallback = async (reason) => {
|
|
2013
1795
|
entrySpinner.text = `${reason}, trying LLM fallback...`;
|
|
@@ -2018,53 +1800,55 @@ function registerScanCommand(program, dependencies = {}) {
|
|
|
2018
1800
|
if (plan.buildCommands.length > 0) {
|
|
2019
1801
|
const { execSync } = await import("node:child_process");
|
|
2020
1802
|
for (const cmd of plan.buildCommands) {
|
|
2021
|
-
log(
|
|
1803
|
+
log(chalk7.gray(` Running: ${cmd}`));
|
|
2022
1804
|
try {
|
|
2023
1805
|
execSync(cmd, { cwd: targetDir, stdio: "pipe", timeout: 300000 });
|
|
2024
1806
|
} catch (buildError) {
|
|
2025
1807
|
buildFailed = true;
|
|
2026
1808
|
const msg = buildError instanceof Error ? buildError.message : String(buildError);
|
|
2027
1809
|
if (msg.includes("rustc") || msg.includes("cargo") || msg.includes("wasm-pack")) {
|
|
2028
|
-
log(
|
|
1810
|
+
log(chalk7.yellow(` ⚠ Build requires Rust toolchain (not available)`));
|
|
2029
1811
|
} else if (msg.includes("rimraf") || msg.includes("command not found")) {
|
|
2030
|
-
log(
|
|
1812
|
+
log(chalk7.yellow(` ⚠ Build failed: missing dependencies`));
|
|
2031
1813
|
} else {
|
|
2032
|
-
log(
|
|
1814
|
+
log(chalk7.yellow(` ⚠ Build failed: ${msg.slice(0, 80)}`));
|
|
2033
1815
|
}
|
|
2034
1816
|
}
|
|
2035
1817
|
}
|
|
2036
1818
|
}
|
|
2037
1819
|
if (plan.notes) {
|
|
2038
|
-
log(
|
|
1820
|
+
log(chalk7.gray(` Note: ${plan.notes}`));
|
|
2039
1821
|
}
|
|
2040
1822
|
return plan.entryPoint;
|
|
2041
1823
|
};
|
|
2042
1824
|
try {
|
|
2043
|
-
const entry =
|
|
2044
|
-
|
|
1825
|
+
const entry = await detectEntryPoint4(targetFs);
|
|
1826
|
+
const buildInfo = await detectBuildInfo(targetFs);
|
|
1827
|
+
const needsBuildStep = entry.isDeclarationOnly && buildInfo.exoticIndicators.wasm;
|
|
1828
|
+
if (needsBuildStep) {
|
|
2045
1829
|
entrySpinner.text = "Detected .d.ts entry with WASM indicators...";
|
|
2046
1830
|
const llmEntry = await runLlmFallback("WASM project detected");
|
|
2047
1831
|
if (llmEntry) {
|
|
2048
|
-
entryPath =
|
|
1832
|
+
entryPath = path8.join(targetDir, llmEntry);
|
|
2049
1833
|
if (buildFailed) {
|
|
2050
1834
|
entrySpinner.succeed(`Entry point: ${llmEntry} (using pre-committed declarations)`);
|
|
2051
|
-
log(
|
|
1835
|
+
log(chalk7.gray(" Coverage may be limited - generated .d.ts files typically lack JSDoc"));
|
|
2052
1836
|
} else {
|
|
2053
1837
|
entrySpinner.succeed(`Entry point: ${llmEntry} (from LLM fallback - WASM project)`);
|
|
2054
1838
|
}
|
|
2055
1839
|
} else {
|
|
2056
|
-
entryPath =
|
|
2057
|
-
entrySpinner.succeed(`Entry point: ${entry.
|
|
2058
|
-
log(
|
|
1840
|
+
entryPath = path8.join(targetDir, entry.path);
|
|
1841
|
+
entrySpinner.succeed(`Entry point: ${entry.path} (from ${entry.source})`);
|
|
1842
|
+
log(chalk7.yellow(" ⚠ WASM project detected but no API key - analysis may be limited"));
|
|
2059
1843
|
}
|
|
2060
1844
|
} else {
|
|
2061
|
-
entryPath =
|
|
2062
|
-
entrySpinner.succeed(`Entry point: ${entry.
|
|
1845
|
+
entryPath = path8.join(targetDir, entry.path);
|
|
1846
|
+
entrySpinner.succeed(`Entry point: ${entry.path} (from ${entry.source})`);
|
|
2063
1847
|
}
|
|
2064
1848
|
} catch (entryError) {
|
|
2065
1849
|
const llmEntry = await runLlmFallback("Heuristics failed");
|
|
2066
1850
|
if (llmEntry) {
|
|
2067
|
-
entryPath =
|
|
1851
|
+
entryPath = path8.join(targetDir, llmEntry);
|
|
2068
1852
|
entrySpinner.succeed(`Entry point: ${llmEntry} (from LLM fallback)`);
|
|
2069
1853
|
} else {
|
|
2070
1854
|
entrySpinner.fail("Could not detect entry point (set OPENAI_API_KEY for smart fallback)");
|
|
@@ -2086,9 +1870,9 @@ function registerScanCommand(program, dependencies = {}) {
|
|
|
2086
1870
|
const spec = result.spec;
|
|
2087
1871
|
const coverageScore = spec.docs?.coverageScore ?? 0;
|
|
2088
1872
|
if (options.saveSpec) {
|
|
2089
|
-
const specPath =
|
|
2090
|
-
|
|
2091
|
-
log(
|
|
1873
|
+
const specPath = path8.resolve(process.cwd(), options.saveSpec);
|
|
1874
|
+
fs7.writeFileSync(specPath, JSON.stringify(spec, null, 2));
|
|
1875
|
+
log(chalk7.green(`✓ Saved spec to ${options.saveSpec}`));
|
|
2092
1876
|
}
|
|
2093
1877
|
const undocumented = [];
|
|
2094
1878
|
const driftIssues = [];
|
|
@@ -2125,7 +1909,7 @@ function registerScanCommand(program, dependencies = {}) {
|
|
|
2125
1909
|
printTextResult(scanResult, log);
|
|
2126
1910
|
}
|
|
2127
1911
|
} catch (commandError) {
|
|
2128
|
-
error(
|
|
1912
|
+
error(chalk7.red("Error:"), commandError instanceof Error ? commandError.message : commandError);
|
|
2129
1913
|
process.exitCode = 1;
|
|
2130
1914
|
} finally {
|
|
2131
1915
|
if (tempDir && options.cleanup !== false) {
|
|
@@ -2135,46 +1919,46 @@ function registerScanCommand(program, dependencies = {}) {
|
|
|
2135
1919
|
stdio: "ignore"
|
|
2136
1920
|
}).unref();
|
|
2137
1921
|
} else if (tempDir) {
|
|
2138
|
-
log(
|
|
1922
|
+
log(chalk7.gray(`Repo preserved at: ${tempDir}`));
|
|
2139
1923
|
}
|
|
2140
1924
|
}
|
|
2141
1925
|
});
|
|
2142
1926
|
}
|
|
2143
1927
|
function printTextResult(result, log) {
|
|
2144
1928
|
log("");
|
|
2145
|
-
log(
|
|
1929
|
+
log(chalk7.bold("DocCov Scan Results"));
|
|
2146
1930
|
log("─".repeat(40));
|
|
2147
1931
|
const repoName = result.packageName ? `${result.owner}/${result.repo} (${result.packageName})` : `${result.owner}/${result.repo}`;
|
|
2148
|
-
log(`Repository: ${
|
|
2149
|
-
log(`Branch: ${
|
|
1932
|
+
log(`Repository: ${chalk7.cyan(repoName)}`);
|
|
1933
|
+
log(`Branch: ${chalk7.gray(result.ref)}`);
|
|
2150
1934
|
log("");
|
|
2151
|
-
const coverageColor = result.coverage >= 80 ?
|
|
2152
|
-
log(
|
|
1935
|
+
const coverageColor = result.coverage >= 80 ? chalk7.green : result.coverage >= 50 ? chalk7.yellow : chalk7.red;
|
|
1936
|
+
log(chalk7.bold("Coverage"));
|
|
2153
1937
|
log(` ${coverageColor(`${result.coverage}%`)}`);
|
|
2154
1938
|
log("");
|
|
2155
|
-
log(
|
|
1939
|
+
log(chalk7.bold("Stats"));
|
|
2156
1940
|
log(` ${result.exportCount} exports`);
|
|
2157
1941
|
log(` ${result.typeCount} types`);
|
|
2158
1942
|
log(` ${result.undocumented.length} undocumented`);
|
|
2159
1943
|
log(` ${result.driftCount} drift issues`);
|
|
2160
1944
|
if (result.undocumented.length > 0) {
|
|
2161
1945
|
log("");
|
|
2162
|
-
log(
|
|
1946
|
+
log(chalk7.bold("Undocumented Exports"));
|
|
2163
1947
|
for (const name of result.undocumented.slice(0, 10)) {
|
|
2164
|
-
log(
|
|
1948
|
+
log(chalk7.yellow(` ! ${name}`));
|
|
2165
1949
|
}
|
|
2166
1950
|
if (result.undocumented.length > 10) {
|
|
2167
|
-
log(
|
|
1951
|
+
log(chalk7.gray(` ... and ${result.undocumented.length - 10} more`));
|
|
2168
1952
|
}
|
|
2169
1953
|
}
|
|
2170
1954
|
if (result.drift.length > 0) {
|
|
2171
1955
|
log("");
|
|
2172
|
-
log(
|
|
1956
|
+
log(chalk7.bold("Drift Issues"));
|
|
2173
1957
|
for (const d of result.drift.slice(0, 5)) {
|
|
2174
|
-
log(
|
|
1958
|
+
log(chalk7.red(` • ${d.export}: ${d.issue}`));
|
|
2175
1959
|
}
|
|
2176
1960
|
if (result.drift.length > 5) {
|
|
2177
|
-
log(
|
|
1961
|
+
log(chalk7.gray(` ... and ${result.drift.length - 5} more`));
|
|
2178
1962
|
}
|
|
2179
1963
|
}
|
|
2180
1964
|
log("");
|
|
@@ -2182,14 +1966,13 @@ function printTextResult(result, log) {
|
|
|
2182
1966
|
|
|
2183
1967
|
// src/cli.ts
|
|
2184
1968
|
var __filename2 = fileURLToPath(import.meta.url);
|
|
2185
|
-
var __dirname2 =
|
|
2186
|
-
var packageJson = JSON.parse(
|
|
1969
|
+
var __dirname2 = path9.dirname(__filename2);
|
|
1970
|
+
var packageJson = JSON.parse(readFileSync5(path9.join(__dirname2, "../package.json"), "utf-8"));
|
|
2187
1971
|
var program = new Command;
|
|
2188
1972
|
program.name("doccov").description("DocCov - Documentation coverage and drift detection for TypeScript").version(packageJson.version);
|
|
2189
1973
|
registerGenerateCommand(program);
|
|
2190
1974
|
registerCheckCommand(program);
|
|
2191
1975
|
registerDiffCommand(program);
|
|
2192
|
-
registerFixCommand(program);
|
|
2193
1976
|
registerInitCommand(program);
|
|
2194
1977
|
registerReportCommand(program);
|
|
2195
1978
|
registerScanCommand(program);
|