jsdoczoom 1.2.1 → 1.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cache.js +4 -0
- package/dist/cli.js +30 -2
- package/dist/debug.js +61 -0
- package/dist/drilldown.js +48 -5
- package/dist/eslint-engine.js +7 -2
- package/dist/file-discovery.js +80 -7
- package/dist/lint.js +15 -4
- package/dist/search.js +47 -21
- package/dist/type-declarations.js +6 -1
- package/dist/validate.js +17 -4
- package/package.json +3 -1
- package/types/debug.d.ts +46 -0
- package/types/file-discovery.d.ts +3 -9
- package/types/search.d.ts +2 -0
package/dist/cache.js
CHANGED
|
@@ -10,6 +10,7 @@
|
|
|
10
10
|
import { createHash } from "node:crypto";
|
|
11
11
|
import { mkdir, readFile, rename, writeFile } from "node:fs/promises";
|
|
12
12
|
import { join } from "node:path";
|
|
13
|
+
import { debugCache, recordCacheHit, recordCacheMiss } from "./debug.js";
|
|
13
14
|
/**
|
|
14
15
|
* Compute a SHA-256 content hash for the given string.
|
|
15
16
|
*
|
|
@@ -89,8 +90,11 @@ export async function processWithCache(config, mode, content, compute) {
|
|
|
89
90
|
await ensureCacheDir(config, mode);
|
|
90
91
|
const cached = await readCacheEntry(config, mode, hash);
|
|
91
92
|
if (cached !== null) {
|
|
93
|
+
recordCacheHit();
|
|
92
94
|
return cached;
|
|
93
95
|
}
|
|
96
|
+
recordCacheMiss();
|
|
97
|
+
debugCache("[MISS] mode=%s hash=%s", mode, hash.slice(0, 8));
|
|
94
98
|
const result = await compute();
|
|
95
99
|
await writeCacheEntry(config, mode, hash, result);
|
|
96
100
|
return result;
|
package/dist/cli.js
CHANGED
|
@@ -2,6 +2,12 @@
|
|
|
2
2
|
import { readFile } from "node:fs/promises";
|
|
3
3
|
import { dirname, resolve } from "node:path";
|
|
4
4
|
import { fileURLToPath } from "node:url";
|
|
5
|
+
import {
|
|
6
|
+
createDebug,
|
|
7
|
+
debugDiscovery,
|
|
8
|
+
flushCacheSummary,
|
|
9
|
+
time,
|
|
10
|
+
} from "./debug.js";
|
|
5
11
|
import { drilldown, drilldownFiles } from "./drilldown.js";
|
|
6
12
|
import { JsdocError } from "./errors.js";
|
|
7
13
|
import { discoverFiles } from "./file-discovery.js";
|
|
@@ -41,6 +47,7 @@ Options:
|
|
|
41
47
|
--disable-cache Skip all cache operations
|
|
42
48
|
--cache-directory Override cache directory (default: system temp)
|
|
43
49
|
--explain-rule R Explain a lint rule with examples (e.g. jsdoc/informative-docs)
|
|
50
|
+
--debug Enable debug timing output (stderr). Equivalent to DEBUG=jsdoczoom:*
|
|
44
51
|
|
|
45
52
|
Selector:
|
|
46
53
|
A glob pattern or file path, optionally with @depth suffix (1-4).
|
|
@@ -116,6 +123,7 @@ function parseArgs(args) {
|
|
|
116
123
|
selectorArg: undefined,
|
|
117
124
|
extraArgs: [],
|
|
118
125
|
searchQuery: undefined,
|
|
126
|
+
debug: false,
|
|
119
127
|
};
|
|
120
128
|
for (let i = 0; i < args.length; i++) {
|
|
121
129
|
const arg = args[i];
|
|
@@ -156,6 +164,10 @@ function parseArgs(args) {
|
|
|
156
164
|
parsed.disableCache = true;
|
|
157
165
|
continue;
|
|
158
166
|
}
|
|
167
|
+
if (arg === "--debug") {
|
|
168
|
+
parsed.debug = true;
|
|
169
|
+
continue;
|
|
170
|
+
}
|
|
159
171
|
// Value flags
|
|
160
172
|
if (arg === "--limit") {
|
|
161
173
|
const { value, nextIndex } = parseValueFlag(args, i);
|
|
@@ -226,6 +238,7 @@ async function processFileList(
|
|
|
226
238
|
json,
|
|
227
239
|
pretty,
|
|
228
240
|
limit,
|
|
241
|
+
gitignore,
|
|
229
242
|
cwd,
|
|
230
243
|
cacheConfig,
|
|
231
244
|
searchQuery,
|
|
@@ -237,6 +250,7 @@ async function processFileList(
|
|
|
237
250
|
filePaths,
|
|
238
251
|
searchQuery,
|
|
239
252
|
cwd,
|
|
253
|
+
gitignore,
|
|
240
254
|
limit,
|
|
241
255
|
cacheConfig,
|
|
242
256
|
);
|
|
@@ -257,6 +271,7 @@ async function processFileList(
|
|
|
257
271
|
limit,
|
|
258
272
|
cacheConfig,
|
|
259
273
|
);
|
|
274
|
+
flushCacheSummary(`drilldown ${filePaths.length} files`);
|
|
260
275
|
writeDrilldownResult(result, json, pretty);
|
|
261
276
|
}
|
|
262
277
|
}
|
|
@@ -271,6 +286,7 @@ async function processStdin(
|
|
|
271
286
|
json,
|
|
272
287
|
pretty,
|
|
273
288
|
limit,
|
|
289
|
+
gitignore,
|
|
274
290
|
cwd,
|
|
275
291
|
cacheConfig,
|
|
276
292
|
searchQuery,
|
|
@@ -284,6 +300,7 @@ async function processStdin(
|
|
|
284
300
|
json,
|
|
285
301
|
pretty,
|
|
286
302
|
limit,
|
|
303
|
+
gitignore,
|
|
287
304
|
cwd,
|
|
288
305
|
cacheConfig,
|
|
289
306
|
searchQuery,
|
|
@@ -453,6 +470,9 @@ function validateModeCombinations(parsed, json) {
|
|
|
453
470
|
*/
|
|
454
471
|
export async function main(args, stdin) {
|
|
455
472
|
const json = args.includes("--json");
|
|
473
|
+
if (args.includes("--debug")) {
|
|
474
|
+
createDebug.enable("jsdoczoom:*");
|
|
475
|
+
}
|
|
456
476
|
try {
|
|
457
477
|
const parsed = parseArgs(args);
|
|
458
478
|
if (await handleEarlyExitFlags(parsed, json)) return;
|
|
@@ -471,6 +491,7 @@ export async function main(args, stdin) {
|
|
|
471
491
|
parsed.json,
|
|
472
492
|
parsed.pretty,
|
|
473
493
|
parsed.limit,
|
|
494
|
+
parsed.gitignore,
|
|
474
495
|
cwd,
|
|
475
496
|
cacheConfig,
|
|
476
497
|
parsed.searchQuery,
|
|
@@ -482,10 +503,16 @@ export async function main(args, stdin) {
|
|
|
482
503
|
...(parsed.selectorArg ? [parsed.selectorArg] : []),
|
|
483
504
|
...parsed.extraArgs,
|
|
484
505
|
];
|
|
485
|
-
const fileLists = await
|
|
486
|
-
|
|
506
|
+
const fileLists = await time(
|
|
507
|
+
debugDiscovery,
|
|
508
|
+
`discover ${allArgPaths.length} paths`,
|
|
509
|
+
() =>
|
|
510
|
+
Promise.all(
|
|
511
|
+
allArgPaths.map((p) => discoverFiles(p, cwd, parsed.gitignore)),
|
|
512
|
+
),
|
|
487
513
|
);
|
|
488
514
|
const filePaths = [...new Set(fileLists.flat())];
|
|
515
|
+
debugDiscovery("discover total=%d unique files", filePaths.length);
|
|
489
516
|
await processFileList(
|
|
490
517
|
filePaths,
|
|
491
518
|
parsed.selectorArg,
|
|
@@ -494,6 +521,7 @@ export async function main(args, stdin) {
|
|
|
494
521
|
parsed.json,
|
|
495
522
|
parsed.pretty,
|
|
496
523
|
parsed.limit,
|
|
524
|
+
parsed.gitignore,
|
|
497
525
|
cwd,
|
|
498
526
|
cacheConfig,
|
|
499
527
|
parsed.searchQuery,
|
package/dist/debug.js
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Shared debug instances and timing utilities for jsdoczoom.
|
|
3
|
+
*
|
|
4
|
+
* Activate with DEBUG=jsdoczoom:* or via the --debug CLI flag.
|
|
5
|
+
* Each namespace maps to a source module. Delta timing (+NNNms) is
|
|
6
|
+
* provided automatically by the debug package between calls on the
|
|
7
|
+
* same namespace.
|
|
8
|
+
*
|
|
9
|
+
* @summary Namespaced debug loggers and perf_hooks timing helper
|
|
10
|
+
*/
|
|
11
|
+
import { performance } from "node:perf_hooks";
|
|
12
|
+
import createDebug from "debug";
|
|
13
|
+
export const debugDiscovery = createDebug("jsdoczoom:discovery");
|
|
14
|
+
export const debugSearch = createDebug("jsdoczoom:search");
|
|
15
|
+
export const debugCache = createDebug("jsdoczoom:cache");
|
|
16
|
+
export const debugEslint = createDebug("jsdoczoom:eslint");
|
|
17
|
+
export const debugLint = createDebug("jsdoczoom:lint");
|
|
18
|
+
export const debugValidate = createDebug("jsdoczoom:validate");
|
|
19
|
+
export const debugBarrel = createDebug("jsdoczoom:barrel");
|
|
20
|
+
export const debugDrilldown = createDebug("jsdoczoom:drilldown");
|
|
21
|
+
export const debugTs = createDebug("jsdoczoom:ts");
|
|
22
|
+
export { createDebug };
|
|
23
|
+
/** Accumulated cache hits since last flushCacheSummary call. */
|
|
24
|
+
let _cacheHits = 0;
|
|
25
|
+
/** Accumulated cache misses since last flushCacheSummary call. */
|
|
26
|
+
let _cacheMisses = 0;
|
|
27
|
+
/** Record a cache hit without emitting a log line. */
|
|
28
|
+
export function recordCacheHit() {
|
|
29
|
+
_cacheHits++;
|
|
30
|
+
}
|
|
31
|
+
/** Record a cache miss without emitting a log line. */
|
|
32
|
+
export function recordCacheMiss() {
|
|
33
|
+
_cacheMisses++;
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Emit a single cache summary line and reset the counters.
|
|
37
|
+
* Call this after each batch of processWithCache operations.
|
|
38
|
+
*
|
|
39
|
+
* @param label - Describes the batch (e.g. "search 326 files")
|
|
40
|
+
*/
|
|
41
|
+
export function flushCacheSummary(label) {
|
|
42
|
+
if (_cacheHits === 0 && _cacheMisses === 0) return;
|
|
43
|
+
debugCache("[SUMMARY] %s hits=%d misses=%d", label, _cacheHits, _cacheMisses);
|
|
44
|
+
_cacheHits = 0;
|
|
45
|
+
_cacheMisses = 0;
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Wrap an async operation with start/done timing logs.
|
|
49
|
+
*
|
|
50
|
+
* @param dbg - Debug instance to log to
|
|
51
|
+
* @param label - Label shown in start/done messages
|
|
52
|
+
* @param fn - Async operation to time
|
|
53
|
+
* @returns Result of fn
|
|
54
|
+
*/
|
|
55
|
+
export async function time(dbg, label, fn) {
|
|
56
|
+
const t0 = performance.now();
|
|
57
|
+
dbg("%s start", label);
|
|
58
|
+
const result = await fn();
|
|
59
|
+
dbg("%s done %sms", label, (performance.now() - t0).toFixed(1));
|
|
60
|
+
return result;
|
|
61
|
+
}
|
package/dist/drilldown.js
CHANGED
|
@@ -2,10 +2,14 @@ import { readFile } from "node:fs/promises";
|
|
|
2
2
|
import { dirname, relative } from "node:path";
|
|
3
3
|
import { getBarrelChildren, isBarrel } from "./barrel.js";
|
|
4
4
|
import { processWithCache } from "./cache.js";
|
|
5
|
+
import { debugDrilldown } from "./debug.js";
|
|
5
6
|
import { JsdocError } from "./errors.js";
|
|
6
7
|
import { discoverFiles, loadGitignore } from "./file-discovery.js";
|
|
7
8
|
import { parseFileSummaries } from "./jsdoc-parser.js";
|
|
8
|
-
import {
|
|
9
|
+
import {
|
|
10
|
+
generateTypeDeclarations,
|
|
11
|
+
splitDeclarations,
|
|
12
|
+
} from "./type-declarations.js";
|
|
9
13
|
import { DEFAULT_CACHE_DIR } from "./types.js";
|
|
10
14
|
|
|
11
15
|
/** Terminal level (1-indexed): 1=summary, 2=description, 3=type declarations, 4=full file. */
|
|
@@ -27,16 +31,31 @@ const DEFAULT_CACHE_CONFIG = {
|
|
|
27
31
|
* Type declarations and file content are pre-computed and passed in to support
|
|
28
32
|
* async cache integration.
|
|
29
33
|
*/
|
|
34
|
+
/**
|
|
35
|
+
* Build the text for level 3 (type declarations).
|
|
36
|
+
* Description is rendered as plain markdown prose; declaration blocks
|
|
37
|
+
* (those starting with a `// L` annotation) are each wrapped in a
|
|
38
|
+
* typescript code fence. The module-level chunk (JSDoc + imports) is dropped.
|
|
39
|
+
*/
|
|
40
|
+
function buildLevel3Text(description, typeDeclarations) {
|
|
41
|
+
const parts = [];
|
|
42
|
+
if (description !== null) parts.push(description);
|
|
43
|
+
if (typeDeclarations.length > 0) {
|
|
44
|
+
const fenced = splitDeclarations(typeDeclarations)
|
|
45
|
+
.filter((chunk) => chunk.startsWith("// L"))
|
|
46
|
+
.map((chunk) => `\`\`\`typescript\n${chunk}\n\`\`\``)
|
|
47
|
+
.join("\n\n");
|
|
48
|
+
if (fenced) parts.push(fenced);
|
|
49
|
+
}
|
|
50
|
+
return parts.join("\n\n");
|
|
51
|
+
}
|
|
30
52
|
function buildLevels(info, typeDeclarations, fileContent) {
|
|
31
53
|
const { summary, description } = info;
|
|
32
54
|
return [
|
|
33
55
|
summary !== null ? { text: summary } : null,
|
|
34
56
|
description !== null ? { text: description } : null,
|
|
35
57
|
{
|
|
36
|
-
text:
|
|
37
|
-
typeDeclarations.length > 0
|
|
38
|
-
? `\`\`\`typescript\n${typeDeclarations}\n\`\`\``
|
|
39
|
-
: typeDeclarations,
|
|
58
|
+
text: buildLevel3Text(description, typeDeclarations),
|
|
40
59
|
},
|
|
41
60
|
{ text: `\`\`\`typescript\n${fileContent}\n\`\`\`` },
|
|
42
61
|
];
|
|
@@ -83,6 +102,12 @@ function processFile(
|
|
|
83
102
|
effectiveDepth < TERMINAL_LEVEL &&
|
|
84
103
|
levels[effectiveDepth - 1] === null
|
|
85
104
|
) {
|
|
105
|
+
debugDrilldown(
|
|
106
|
+
"depth advance: depth=%d → %d (null level) file=%s",
|
|
107
|
+
effectiveDepth,
|
|
108
|
+
effectiveDepth + 1,
|
|
109
|
+
idPath,
|
|
110
|
+
);
|
|
86
111
|
effectiveDepth++;
|
|
87
112
|
}
|
|
88
113
|
const level = levels[effectiveDepth - 1];
|
|
@@ -238,6 +263,10 @@ async function processBarrelAtDepth(barrel, depth, cwd, config) {
|
|
|
238
263
|
// Null-skip: if depth 2 but no description, advance to transition depth
|
|
239
264
|
let effectiveDepth = depth;
|
|
240
265
|
if (effectiveDepth === 2 && !barrel.hasDescription) {
|
|
266
|
+
debugDrilldown(
|
|
267
|
+
"barrel depth advance: depth=2 → 3 (no description) barrel=%s",
|
|
268
|
+
relative(cwd, barrel.path),
|
|
269
|
+
);
|
|
241
270
|
effectiveDepth = 3;
|
|
242
271
|
}
|
|
243
272
|
if (effectiveDepth < 3) {
|
|
@@ -255,6 +284,13 @@ async function processBarrelAtDepth(barrel, depth, cwd, config) {
|
|
|
255
284
|
}
|
|
256
285
|
// Barrel transitions: barrel disappears, children appear
|
|
257
286
|
const childDepth = effectiveDepth - 2;
|
|
287
|
+
debugDrilldown(
|
|
288
|
+
"barrel transition: barrel=%s depth=%d → children=%d at childDepth=%d",
|
|
289
|
+
relative(cwd, barrel.path),
|
|
290
|
+
effectiveDepth,
|
|
291
|
+
barrel.children.length,
|
|
292
|
+
childDepth,
|
|
293
|
+
);
|
|
258
294
|
return collectSafeResults(barrel.children, childDepth, cwd, config);
|
|
259
295
|
}
|
|
260
296
|
/**
|
|
@@ -295,6 +331,13 @@ async function processGlobWithBarrels(files, depth, cwd, config) {
|
|
|
295
331
|
config,
|
|
296
332
|
);
|
|
297
333
|
const gatedFiles = buildGatedFileSet(barrelInfos);
|
|
334
|
+
debugDrilldown(
|
|
335
|
+
"barrel gating: barrels=%d non-barrels=%d gated=%d depth=%d",
|
|
336
|
+
barrelPaths.length,
|
|
337
|
+
nonBarrelPaths.length,
|
|
338
|
+
gatedFiles.size,
|
|
339
|
+
depth,
|
|
340
|
+
);
|
|
298
341
|
const results = [...barrelErrors];
|
|
299
342
|
const barrelResults = await Promise.all(
|
|
300
343
|
barrelInfos
|
package/dist/eslint-engine.js
CHANGED
|
@@ -6,6 +6,7 @@
|
|
|
6
6
|
import tsParser from "@typescript-eslint/parser";
|
|
7
7
|
import { ESLint } from "eslint";
|
|
8
8
|
import jsdocPlugin from "eslint-plugin-jsdoc";
|
|
9
|
+
import { debugEslint, time } from "./debug.js";
|
|
9
10
|
import plugin from "./eslint-plugin.js";
|
|
10
11
|
|
|
11
12
|
/** Common invalid JSDoc tags and their recommended replacements */
|
|
@@ -115,7 +116,9 @@ export function createLintLinter(cwd) {
|
|
|
115
116
|
* @returns Simplified message list with ruleId, messageId, and fatal flag
|
|
116
117
|
*/
|
|
117
118
|
export async function lintFileForValidation(eslint, sourceText, filePath) {
|
|
118
|
-
const results = await
|
|
119
|
+
const results = await time(debugEslint, `validate ${filePath}`, () =>
|
|
120
|
+
eslint.lintText(sourceText, { filePath }),
|
|
121
|
+
);
|
|
119
122
|
if (results.length === 0) return [];
|
|
120
123
|
return results[0].messages.map((msg) => ({
|
|
121
124
|
ruleId: msg.ruleId,
|
|
@@ -204,7 +207,9 @@ function extractSymbolName(sourceText, line) {
|
|
|
204
207
|
* @returns Array of lint diagnostics with line, column, rule, message, and severity
|
|
205
208
|
*/
|
|
206
209
|
export async function lintFileForLint(eslint, sourceText, filePath) {
|
|
207
|
-
const results = await
|
|
210
|
+
const results = await time(debugEslint, `lint ${filePath}`, () =>
|
|
211
|
+
eslint.lintText(sourceText, { filePath }),
|
|
212
|
+
);
|
|
208
213
|
if (results.length === 0) return [];
|
|
209
214
|
return results[0].messages.map((msg) => {
|
|
210
215
|
const diagnostic = {
|
package/dist/file-discovery.js
CHANGED
|
@@ -1,24 +1,43 @@
|
|
|
1
|
-
import { readFile, stat } from "node:fs/promises";
|
|
1
|
+
import { readFile, realpath, stat } from "node:fs/promises";
|
|
2
2
|
import { dirname, join, relative, resolve } from "node:path";
|
|
3
3
|
import { glob } from "glob";
|
|
4
4
|
import ignore from "ignore";
|
|
5
|
+
import { debugDiscovery } from "./debug.js";
|
|
5
6
|
import { JsdocError } from "./errors.js";
|
|
7
|
+
|
|
6
8
|
/**
|
|
7
9
|
* Walks .gitignore files from cwd to filesystem root, building an ignore
|
|
8
10
|
* filter that glob results pass through. Direct-path lookups bypass the
|
|
9
|
-
* filter since the user explicitly named the file.
|
|
10
|
-
*
|
|
11
|
+
* filter since the user explicitly named the file. Results are cached per
|
|
12
|
+
* cwd for the process lifetime so concurrent discoverFiles calls sharing a
|
|
13
|
+
* cwd only walk the filesystem once.
|
|
11
14
|
*
|
|
12
15
|
* @summary Resolve selector patterns to absolute file paths with gitignore filtering
|
|
13
16
|
*/
|
|
17
|
+
/** Process-lifetime cache: cwd → in-flight or settled Ignore promise. */
|
|
18
|
+
const gitignoreCache = new Map();
|
|
14
19
|
/**
|
|
15
20
|
* Walk from `cwd` up to the filesystem root, collecting .gitignore entries.
|
|
16
|
-
* Returns an Ignore instance loaded with all discovered rules.
|
|
21
|
+
* Returns an Ignore instance loaded with all discovered rules. Results are
|
|
22
|
+
* cached per cwd so repeated calls (e.g. from concurrent discoverFiles calls)
|
|
23
|
+
* only perform the filesystem walk once.
|
|
17
24
|
*/
|
|
18
|
-
export
|
|
25
|
+
export function loadGitignore(cwd) {
|
|
26
|
+
const key = resolve(cwd);
|
|
27
|
+
const cached = gitignoreCache.get(key);
|
|
28
|
+
if (cached !== undefined) {
|
|
29
|
+
return cached;
|
|
30
|
+
}
|
|
31
|
+
const promise = loadGitignoreUncached(key);
|
|
32
|
+
gitignoreCache.set(key, promise);
|
|
33
|
+
return promise;
|
|
34
|
+
}
|
|
35
|
+
async function loadGitignoreUncached(cwd) {
|
|
19
36
|
const ig = ignore();
|
|
20
37
|
let dir = resolve(cwd);
|
|
38
|
+
let walkDepth = 0;
|
|
21
39
|
while (true) {
|
|
40
|
+
debugDiscovery("gitignore walk depth=%d dir=%s", walkDepth, dir);
|
|
22
41
|
const gitignorePath = join(dir, ".gitignore");
|
|
23
42
|
try {
|
|
24
43
|
const content = await readFile(gitignorePath, "utf-8");
|
|
@@ -27,6 +46,12 @@ export async function loadGitignore(cwd) {
|
|
|
27
46
|
.split("\n")
|
|
28
47
|
.map((l) => l.trim())
|
|
29
48
|
.filter((l) => l && !l.startsWith("#"));
|
|
49
|
+
debugDiscovery(
|
|
50
|
+
"gitignore depth=%d loaded %d rules from %s",
|
|
51
|
+
walkDepth,
|
|
52
|
+
lines.length,
|
|
53
|
+
gitignorePath,
|
|
54
|
+
);
|
|
30
55
|
for (const line of lines) {
|
|
31
56
|
// Prefix rules from ancestor .gitignore files so paths are
|
|
32
57
|
// relative to `cwd`, which is where glob results are anchored.
|
|
@@ -36,8 +61,15 @@ export async function loadGitignore(cwd) {
|
|
|
36
61
|
// No .gitignore at this level, continue walking up
|
|
37
62
|
}
|
|
38
63
|
const parent = dirname(dir);
|
|
39
|
-
if (parent === dir)
|
|
64
|
+
if (parent === dir) {
|
|
65
|
+
debugDiscovery(
|
|
66
|
+
"gitignore walk complete at depth=%d (reached root)",
|
|
67
|
+
walkDepth,
|
|
68
|
+
);
|
|
69
|
+
break;
|
|
70
|
+
}
|
|
40
71
|
dir = parent;
|
|
72
|
+
walkDepth++;
|
|
41
73
|
}
|
|
42
74
|
return ig;
|
|
43
75
|
}
|
|
@@ -57,15 +89,52 @@ export async function loadGitignore(cwd) {
|
|
|
57
89
|
export async function discoverFiles(pattern, cwd, gitignore = true) {
|
|
58
90
|
const hasGlobChars = /[*?[\]{]/.test(pattern);
|
|
59
91
|
if (hasGlobChars) {
|
|
92
|
+
debugDiscovery("glob pattern=%s", pattern);
|
|
60
93
|
const matches = await glob(pattern, { cwd, absolute: true });
|
|
61
94
|
let filtered = matches.filter(
|
|
62
95
|
(f) => (f.endsWith(".ts") || f.endsWith(".tsx")) && !f.endsWith(".d.ts"),
|
|
63
96
|
);
|
|
64
97
|
if (gitignore) {
|
|
98
|
+
const preFilter = filtered.length;
|
|
65
99
|
const ig = await loadGitignore(cwd);
|
|
66
100
|
filtered = filtered.filter((abs) => !ig.ignores(relative(cwd, abs)));
|
|
101
|
+
debugDiscovery(
|
|
102
|
+
"glob done pattern=%s matched=%d after-gitignore=%d",
|
|
103
|
+
pattern,
|
|
104
|
+
preFilter,
|
|
105
|
+
filtered.length,
|
|
106
|
+
);
|
|
107
|
+
} else {
|
|
108
|
+
debugDiscovery(
|
|
109
|
+
"glob done pattern=%s matched=%d",
|
|
110
|
+
pattern,
|
|
111
|
+
filtered.length,
|
|
112
|
+
);
|
|
67
113
|
}
|
|
68
|
-
|
|
114
|
+
// Deduplicate by realpath so symlinks to the same physical file are
|
|
115
|
+
// processed only once. Glob prevents infinite cycles but can still
|
|
116
|
+
// return the same inode at multiple paths (e.g. via directory symlinks).
|
|
117
|
+
const withRealpaths = await Promise.all(
|
|
118
|
+
filtered.map(async (abs) => ({
|
|
119
|
+
abs,
|
|
120
|
+
real: await realpath(abs).catch(() => abs),
|
|
121
|
+
})),
|
|
122
|
+
);
|
|
123
|
+
const seen = new Set();
|
|
124
|
+
const deduped = [];
|
|
125
|
+
for (const { abs, real } of withRealpaths) {
|
|
126
|
+
if (seen.has(real)) {
|
|
127
|
+
debugDiscovery(
|
|
128
|
+
"symlink dedup: skipping %s (same realpath as earlier entry %s)",
|
|
129
|
+
abs,
|
|
130
|
+
real,
|
|
131
|
+
);
|
|
132
|
+
continue;
|
|
133
|
+
}
|
|
134
|
+
seen.add(real);
|
|
135
|
+
deduped.push(abs);
|
|
136
|
+
}
|
|
137
|
+
return deduped.sort();
|
|
69
138
|
}
|
|
70
139
|
// Direct path
|
|
71
140
|
const resolved = resolve(cwd, pattern);
|
|
@@ -76,6 +145,10 @@ export async function discoverFiles(pattern, cwd, gitignore = true) {
|
|
|
76
145
|
throw new JsdocError("FILE_NOT_FOUND", `File not found: ${pattern}`);
|
|
77
146
|
}
|
|
78
147
|
if (statResult.isDirectory()) {
|
|
148
|
+
debugDiscovery(
|
|
149
|
+
"discoverFiles recursing: directory path=%s → glob",
|
|
150
|
+
resolved,
|
|
151
|
+
);
|
|
79
152
|
return discoverFiles(`${resolved}/**`, cwd, gitignore);
|
|
80
153
|
}
|
|
81
154
|
return [resolved];
|
package/dist/lint.js
CHANGED
|
@@ -12,6 +12,7 @@ import { readFile } from "node:fs/promises";
|
|
|
12
12
|
import { relative } from "node:path";
|
|
13
13
|
import { findMissingBarrels } from "./barrel.js";
|
|
14
14
|
import { processWithCache } from "./cache.js";
|
|
15
|
+
import { debugLint, flushCacheSummary, time } from "./debug.js";
|
|
15
16
|
import { JsdocError } from "./errors.js";
|
|
16
17
|
import { createLintLinter, lintFileForLint } from "./eslint-engine.js";
|
|
17
18
|
import { discoverFiles } from "./file-discovery.js";
|
|
@@ -98,9 +99,14 @@ export async function lint(
|
|
|
98
99
|
}
|
|
99
100
|
const tsFiles = files.filter((f) => f.endsWith(".ts") || f.endsWith(".tsx"));
|
|
100
101
|
const eslint = createLintLinter(cwd);
|
|
101
|
-
|
|
102
|
-
|
|
102
|
+
debugLint("lint start files=%d pattern=%s", tsFiles.length, selector.pattern);
|
|
103
|
+
const fileResults = await time(
|
|
104
|
+
debugLint,
|
|
105
|
+
`lint ${tsFiles.length} files`,
|
|
106
|
+
() =>
|
|
107
|
+
Promise.all(tsFiles.map((f) => lintSingleFile(eslint, f, cwd, config))),
|
|
103
108
|
);
|
|
109
|
+
flushCacheSummary(`lint ${tsFiles.length} files`);
|
|
104
110
|
const missingBarrels = await findMissingBarrels(tsFiles, cwd);
|
|
105
111
|
return buildLintResult(fileResults, tsFiles.length, limit, missingBarrels);
|
|
106
112
|
}
|
|
@@ -126,9 +132,14 @@ export async function lintFiles(
|
|
|
126
132
|
(f) => f.endsWith(".ts") || f.endsWith(".tsx"),
|
|
127
133
|
);
|
|
128
134
|
const eslint = createLintLinter(cwd);
|
|
129
|
-
|
|
130
|
-
|
|
135
|
+
debugLint("lintFiles start files=%d", tsFiles.length);
|
|
136
|
+
const fileResults = await time(
|
|
137
|
+
debugLint,
|
|
138
|
+
`lintFiles ${tsFiles.length} files`,
|
|
139
|
+
() =>
|
|
140
|
+
Promise.all(tsFiles.map((f) => lintSingleFile(eslint, f, cwd, config))),
|
|
131
141
|
);
|
|
142
|
+
flushCacheSummary(`lintFiles ${tsFiles.length} files`);
|
|
132
143
|
const missingBarrels = await findMissingBarrels(tsFiles, cwd);
|
|
133
144
|
return buildLintResult(fileResults, tsFiles.length, limit, missingBarrels);
|
|
134
145
|
}
|
package/dist/search.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { readFile } from "node:fs/promises";
|
|
2
2
|
import { relative } from "node:path";
|
|
3
3
|
import { processWithCache } from "./cache.js";
|
|
4
|
+
import { debugSearch, flushCacheSummary, time } from "./debug.js";
|
|
4
5
|
import { JsdocError } from "./errors.js";
|
|
5
6
|
import { discoverFiles, loadGitignore } from "./file-discovery.js";
|
|
6
7
|
import { parseFileSummaries } from "./jsdoc-parser.js";
|
|
@@ -53,6 +54,7 @@ async function processFileSafe(filePath, regex, cwd, config) {
|
|
|
53
54
|
);
|
|
54
55
|
// Level 1: filename/path match — fall back through levels like drilldown
|
|
55
56
|
if (regex.test(idPath)) {
|
|
57
|
+
debugSearch("depth=1 path match file=%s", idPath);
|
|
56
58
|
if (info.summary !== null) {
|
|
57
59
|
return { next_id: `${idPath}@2`, text: info.summary };
|
|
58
60
|
}
|
|
@@ -76,10 +78,12 @@ async function processFileSafe(filePath, regex, cwd, config) {
|
|
|
76
78
|
}
|
|
77
79
|
// Level 2: summary match
|
|
78
80
|
if (info.summary !== null && regex.test(info.summary)) {
|
|
81
|
+
debugSearch("depth=2 summary match file=%s", idPath);
|
|
79
82
|
return { next_id: `${idPath}@2`, text: info.summary };
|
|
80
83
|
}
|
|
81
84
|
// Level 3a: description match
|
|
82
85
|
if (info.description !== null && regex.test(info.description)) {
|
|
86
|
+
debugSearch("depth=3a description match file=%s", idPath);
|
|
83
87
|
return { next_id: `${idPath}@3`, text: info.description };
|
|
84
88
|
}
|
|
85
89
|
// Level 3b: type declaration match
|
|
@@ -93,6 +97,11 @@ async function processFileSafe(filePath, regex, cwd, config) {
|
|
|
93
97
|
const chunks = splitDeclarations(dts);
|
|
94
98
|
const matching = chunks.filter((c) => regex.test(c));
|
|
95
99
|
if (matching.length > 0) {
|
|
100
|
+
debugSearch(
|
|
101
|
+
"depth=3b type-decl match file=%s chunks=%d",
|
|
102
|
+
idPath,
|
|
103
|
+
matching.length,
|
|
104
|
+
);
|
|
96
105
|
return {
|
|
97
106
|
next_id: `${idPath}@3`,
|
|
98
107
|
text: matching
|
|
@@ -110,12 +119,18 @@ async function processFileSafe(filePath, regex, cwd, config) {
|
|
|
110
119
|
);
|
|
111
120
|
const matchingBlocks = allBlocks.filter((b) => regex.test(b.blockText));
|
|
112
121
|
if (matchingBlocks.length > 0) {
|
|
122
|
+
debugSearch(
|
|
123
|
+
"depth=4 source-block match file=%s blocks=%d",
|
|
124
|
+
idPath,
|
|
125
|
+
matchingBlocks.length,
|
|
126
|
+
);
|
|
113
127
|
const fenced = matchingBlocks
|
|
114
128
|
.map((b) => `\`\`\`typescript\n${b.annotation}\n${b.blockText}\n\`\`\``)
|
|
115
129
|
.join("\n\n");
|
|
116
130
|
return { id: `${idPath}@4`, text: fenced };
|
|
117
131
|
}
|
|
118
132
|
if (regex.test(content)) {
|
|
133
|
+
debugSearch("depth=4 full-source match file=%s", idPath);
|
|
119
134
|
return { id: `${idPath}@4`, text: `\`\`\`typescript\n${content}\n\`\`\`` };
|
|
120
135
|
}
|
|
121
136
|
return null; // no match
|
|
@@ -148,20 +163,27 @@ function applyLimit(sorted, limit) {
|
|
|
148
163
|
* Files with parse errors are silently skipped.
|
|
149
164
|
*/
|
|
150
165
|
async function searchFileList(files, regex, cwd, limit, config) {
|
|
151
|
-
const results = await
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
166
|
+
const results = await time(
|
|
167
|
+
debugSearch,
|
|
168
|
+
`search ${files.length} files query=${regex.source}`,
|
|
169
|
+
() =>
|
|
170
|
+
Promise.all(
|
|
171
|
+
files.map(async (filePath) => {
|
|
172
|
+
try {
|
|
173
|
+
return await processFileSafe(filePath, regex, cwd, config);
|
|
174
|
+
} catch (error) {
|
|
175
|
+
if (error instanceof JsdocError && error.code === "PARSE_ERROR") {
|
|
176
|
+
return null;
|
|
177
|
+
}
|
|
178
|
+
throw error;
|
|
179
|
+
}
|
|
180
|
+
}),
|
|
181
|
+
),
|
|
162
182
|
);
|
|
183
|
+
flushCacheSummary(`search ${files.length} files`);
|
|
163
184
|
const matched = results.filter((r) => r !== null);
|
|
164
185
|
const sorted = matched.sort((a, b) => sortKey(a).localeCompare(sortKey(b)));
|
|
186
|
+
debugSearch("matches=%d", matched.length);
|
|
165
187
|
return applyLimit(sorted, limit);
|
|
166
188
|
}
|
|
167
189
|
/**
|
|
@@ -200,6 +222,7 @@ export async function search(
|
|
|
200
222
|
* @param filePaths - Array of absolute file paths
|
|
201
223
|
* @param query - Regex query string (case-insensitive)
|
|
202
224
|
* @param cwd - Working directory for relative path output
|
|
225
|
+
* @param gitignore - Whether to respect .gitignore rules (default true)
|
|
203
226
|
* @param limit - Maximum number of results to return (default 100)
|
|
204
227
|
* @param config - Cache configuration
|
|
205
228
|
* @throws {JsdocError} INVALID_SELECTOR for invalid regex
|
|
@@ -208,19 +231,22 @@ export async function searchFiles(
|
|
|
208
231
|
filePaths,
|
|
209
232
|
query,
|
|
210
233
|
cwd,
|
|
234
|
+
gitignore = true,
|
|
211
235
|
limit = 100,
|
|
212
236
|
config = DEFAULT_CACHE_CONFIG,
|
|
213
237
|
) {
|
|
214
238
|
const regex = compileRegex(query);
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
239
|
+
let tsFiles = filePaths.filter(
|
|
240
|
+
(f) => (f.endsWith(".ts") || f.endsWith(".tsx")) && !f.endsWith(".d.ts"),
|
|
241
|
+
);
|
|
242
|
+
if (gitignore) {
|
|
243
|
+
const ig = await loadGitignore(cwd);
|
|
244
|
+
tsFiles = tsFiles.filter((f) => {
|
|
245
|
+
const rel = relative(cwd, f);
|
|
246
|
+
// Files outside cwd (traversal paths) are beyond the gitignore scope
|
|
247
|
+
if (rel.startsWith("..")) return true;
|
|
248
|
+
return !ig.ignores(rel);
|
|
249
|
+
});
|
|
250
|
+
}
|
|
225
251
|
return searchFileList(tsFiles, regex, cwd, limit, config);
|
|
226
252
|
}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { readFile } from "node:fs/promises";
|
|
2
2
|
import { dirname } from "node:path";
|
|
3
3
|
import ts from "typescript";
|
|
4
|
+
import { debugTs, time } from "./debug.js";
|
|
4
5
|
import { JsdocError } from "./errors.js";
|
|
5
6
|
|
|
6
7
|
/**
|
|
@@ -432,7 +433,11 @@ export async function generateTypeDeclarations(filePath) {
|
|
|
432
433
|
throw new JsdocError("PARSE_ERROR", `Failed to parse file: ${filePath}`);
|
|
433
434
|
}
|
|
434
435
|
// Get emit output using the language service
|
|
435
|
-
const emitOutput =
|
|
436
|
+
const emitOutput = await time(
|
|
437
|
+
debugTs,
|
|
438
|
+
`getEmitOutput ${filePath}`,
|
|
439
|
+
async () => service.getEmitOutput(filePath, true),
|
|
440
|
+
);
|
|
436
441
|
// Find the .d.ts output file
|
|
437
442
|
const dtsFile = emitOutput.outputFiles.find((file) =>
|
|
438
443
|
file.name.endsWith(".d.ts"),
|
package/dist/validate.js
CHANGED
|
@@ -2,6 +2,7 @@ import { readFile } from "node:fs/promises";
|
|
|
2
2
|
import { relative } from "node:path";
|
|
3
3
|
import { findMissingBarrels } from "./barrel.js";
|
|
4
4
|
import { processWithCache } from "./cache.js";
|
|
5
|
+
import { debugValidate, flushCacheSummary, time } from "./debug.js";
|
|
5
6
|
import { JsdocError } from "./errors.js";
|
|
6
7
|
import {
|
|
7
8
|
createValidationLinter,
|
|
@@ -105,9 +106,17 @@ export async function validate(
|
|
|
105
106
|
);
|
|
106
107
|
}
|
|
107
108
|
const eslint = createValidationLinter();
|
|
108
|
-
|
|
109
|
-
|
|
109
|
+
debugValidate(
|
|
110
|
+
"validate start files=%d pattern=%s",
|
|
111
|
+
files.length,
|
|
112
|
+
selector.pattern,
|
|
110
113
|
);
|
|
114
|
+
const statuses = await time(
|
|
115
|
+
debugValidate,
|
|
116
|
+
`validate ${files.length} files`,
|
|
117
|
+
() => Promise.all(files.map((f) => classifyFile(eslint, f, cwd, config))),
|
|
118
|
+
);
|
|
119
|
+
flushCacheSummary(`validate ${files.length} files`);
|
|
111
120
|
const missingBarrels = await findMissingBarrels(files, cwd);
|
|
112
121
|
return buildGroupedResult(statuses, missingBarrels, limit);
|
|
113
122
|
}
|
|
@@ -132,9 +141,13 @@ export async function validateFiles(
|
|
|
132
141
|
(f) => f.endsWith(".ts") || f.endsWith(".tsx"),
|
|
133
142
|
);
|
|
134
143
|
const eslint = createValidationLinter();
|
|
135
|
-
|
|
136
|
-
|
|
144
|
+
debugValidate("validateFiles start files=%d", tsFiles.length);
|
|
145
|
+
const statuses = await time(
|
|
146
|
+
debugValidate,
|
|
147
|
+
`validateFiles ${tsFiles.length} files`,
|
|
148
|
+
() => Promise.all(tsFiles.map((f) => classifyFile(eslint, f, cwd, config))),
|
|
137
149
|
);
|
|
150
|
+
flushCacheSummary(`validateFiles ${tsFiles.length} files`);
|
|
138
151
|
const missingBarrels = await findMissingBarrels(tsFiles, cwd);
|
|
139
152
|
return buildGroupedResult(statuses, missingBarrels, limit);
|
|
140
153
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "jsdoczoom",
|
|
3
|
-
"version": "1.2.
|
|
3
|
+
"version": "1.2.3",
|
|
4
4
|
"description": "CLI tool for extracting JSDoc summaries at configurable depths",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"sideEffects": false,
|
|
@@ -43,6 +43,7 @@
|
|
|
43
43
|
},
|
|
44
44
|
"dependencies": {
|
|
45
45
|
"@typescript-eslint/parser": "^8.55.0",
|
|
46
|
+
"debug": "^4.4.3",
|
|
46
47
|
"eslint": "^9.0.0",
|
|
47
48
|
"eslint-plugin-jsdoc": "^62.5.5",
|
|
48
49
|
"glob": "^13.0.3",
|
|
@@ -51,6 +52,7 @@
|
|
|
51
52
|
},
|
|
52
53
|
"devDependencies": {
|
|
53
54
|
"@biomejs/biome": "2.4.1",
|
|
55
|
+
"@types/debug": "^4.1.12",
|
|
54
56
|
"@types/node": "^25.2.3",
|
|
55
57
|
"vitest": "4.0.18"
|
|
56
58
|
}
|
package/types/debug.d.ts
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Shared debug instances and timing utilities for jsdoczoom.
|
|
3
|
+
*
|
|
4
|
+
* Activate with DEBUG=jsdoczoom:* or via the --debug CLI flag.
|
|
5
|
+
* Each namespace maps to a source module. Delta timing (+NNNms) is
|
|
6
|
+
* provided automatically by the debug package between calls on the
|
|
7
|
+
* same namespace.
|
|
8
|
+
*
|
|
9
|
+
* @summary Namespaced debug loggers and perf_hooks timing helper
|
|
10
|
+
*/
|
|
11
|
+
import type { Debugger } from "debug";
|
|
12
|
+
import createDebug from "debug";
|
|
13
|
+
export declare const debugDiscovery: Debugger;
|
|
14
|
+
export declare const debugSearch: Debugger;
|
|
15
|
+
export declare const debugCache: Debugger;
|
|
16
|
+
export declare const debugEslint: Debugger;
|
|
17
|
+
export declare const debugLint: Debugger;
|
|
18
|
+
export declare const debugValidate: Debugger;
|
|
19
|
+
export declare const debugBarrel: Debugger;
|
|
20
|
+
export declare const debugDrilldown: Debugger;
|
|
21
|
+
export declare const debugTs: Debugger;
|
|
22
|
+
export { createDebug };
|
|
23
|
+
/** Record a cache hit without emitting a log line. */
|
|
24
|
+
export declare function recordCacheHit(): void;
|
|
25
|
+
/** Record a cache miss without emitting a log line. */
|
|
26
|
+
export declare function recordCacheMiss(): void;
|
|
27
|
+
/**
|
|
28
|
+
* Emit a single cache summary line and reset the counters.
|
|
29
|
+
* Call this after each batch of processWithCache operations.
|
|
30
|
+
*
|
|
31
|
+
* @param label - Describes the batch (e.g. "search 326 files")
|
|
32
|
+
*/
|
|
33
|
+
export declare function flushCacheSummary(label: string): void;
|
|
34
|
+
/**
|
|
35
|
+
* Wrap an async operation with start/done timing logs.
|
|
36
|
+
*
|
|
37
|
+
* @param dbg - Debug instance to log to
|
|
38
|
+
* @param label - Label shown in start/done messages
|
|
39
|
+
* @param fn - Async operation to time
|
|
40
|
+
* @returns Result of fn
|
|
41
|
+
*/
|
|
42
|
+
export declare function time<T>(
|
|
43
|
+
dbg: Debugger,
|
|
44
|
+
label: string,
|
|
45
|
+
fn: () => Promise<T>,
|
|
46
|
+
): Promise<T>;
|
|
@@ -1,15 +1,9 @@
|
|
|
1
1
|
import { type Ignore } from "ignore";
|
|
2
|
-
/**
|
|
3
|
-
* Walks .gitignore files from cwd to filesystem root, building an ignore
|
|
4
|
-
* filter that glob results pass through. Direct-path lookups bypass the
|
|
5
|
-
* filter since the user explicitly named the file. The ignore instance is
|
|
6
|
-
* created per call -- no caching -- because cwd may differ between invocations.
|
|
7
|
-
*
|
|
8
|
-
* @summary Resolve selector patterns to absolute file paths with gitignore filtering
|
|
9
|
-
*/
|
|
10
2
|
/**
|
|
11
3
|
* Walk from `cwd` up to the filesystem root, collecting .gitignore entries.
|
|
12
|
-
* Returns an Ignore instance loaded with all discovered rules.
|
|
4
|
+
* Returns an Ignore instance loaded with all discovered rules. Results are
|
|
5
|
+
* cached per cwd so repeated calls (e.g. from concurrent discoverFiles calls)
|
|
6
|
+
* only perform the filesystem walk once.
|
|
13
7
|
*/
|
|
14
8
|
export declare function loadGitignore(cwd: string): Promise<Ignore>;
|
|
15
9
|
/**
|
package/types/search.d.ts
CHANGED
|
@@ -31,6 +31,7 @@ export declare function search(
|
|
|
31
31
|
* @param filePaths - Array of absolute file paths
|
|
32
32
|
* @param query - Regex query string (case-insensitive)
|
|
33
33
|
* @param cwd - Working directory for relative path output
|
|
34
|
+
* @param gitignore - Whether to respect .gitignore rules (default true)
|
|
34
35
|
* @param limit - Maximum number of results to return (default 100)
|
|
35
36
|
* @param config - Cache configuration
|
|
36
37
|
* @throws {JsdocError} INVALID_SELECTOR for invalid regex
|
|
@@ -39,6 +40,7 @@ export declare function searchFiles(
|
|
|
39
40
|
filePaths: string[],
|
|
40
41
|
query: string,
|
|
41
42
|
cwd: string,
|
|
43
|
+
gitignore?: boolean,
|
|
42
44
|
limit?: number,
|
|
43
45
|
config?: CacheConfig,
|
|
44
46
|
): Promise<DrilldownResult>;
|