jsdoczoom 1.2.0 → 1.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cache.js CHANGED
@@ -10,6 +10,7 @@
10
10
  import { createHash } from "node:crypto";
11
11
  import { mkdir, readFile, rename, writeFile } from "node:fs/promises";
12
12
  import { join } from "node:path";
13
+ import { debugCache, recordCacheHit, recordCacheMiss } from "./debug.js";
13
14
  /**
14
15
  * Compute a SHA-256 content hash for the given string.
15
16
  *
@@ -89,8 +90,11 @@ export async function processWithCache(config, mode, content, compute) {
89
90
  await ensureCacheDir(config, mode);
90
91
  const cached = await readCacheEntry(config, mode, hash);
91
92
  if (cached !== null) {
93
+ recordCacheHit();
92
94
  return cached;
93
95
  }
96
+ recordCacheMiss();
97
+ debugCache("[MISS] mode=%s hash=%s", mode, hash.slice(0, 8));
94
98
  const result = await compute();
95
99
  await writeCacheEntry(config, mode, hash, result);
96
100
  return result;
package/dist/cli.js CHANGED
@@ -2,8 +2,15 @@
2
2
  import { readFile } from "node:fs/promises";
3
3
  import { dirname, resolve } from "node:path";
4
4
  import { fileURLToPath } from "node:url";
5
+ import {
6
+ createDebug,
7
+ debugDiscovery,
8
+ flushCacheSummary,
9
+ time,
10
+ } from "./debug.js";
5
11
  import { drilldown, drilldownFiles } from "./drilldown.js";
6
12
  import { JsdocError } from "./errors.js";
13
+ import { discoverFiles } from "./file-discovery.js";
7
14
  import { lint, lintFiles } from "./lint.js";
8
15
  import { search, searchFiles } from "./search.js";
9
16
  import { parseSelector } from "./selector.js";
@@ -40,6 +47,7 @@ Options:
40
47
  --disable-cache Skip all cache operations
41
48
  --cache-directory Override cache directory (default: system temp)
42
49
  --explain-rule R Explain a lint rule with examples (e.g. jsdoc/informative-docs)
50
+ --debug Enable debug timing output (stderr). Equivalent to DEBUG=jsdoczoom:*
43
51
 
44
52
  Selector:
45
53
  A glob pattern or file path, optionally with @depth suffix (1-4).
@@ -113,7 +121,9 @@ function parseArgs(args) {
113
121
  cacheDirectory: undefined,
114
122
  explainRule: undefined,
115
123
  selectorArg: undefined,
124
+ extraArgs: [],
116
125
  searchQuery: undefined,
126
+ debug: false,
117
127
  };
118
128
  for (let i = 0; i < args.length; i++) {
119
129
  const arg = args[i];
@@ -154,6 +164,10 @@ function parseArgs(args) {
154
164
  parsed.disableCache = true;
155
165
  continue;
156
166
  }
167
+ if (arg === "--debug") {
168
+ parsed.debug = true;
169
+ continue;
170
+ }
157
171
  // Value flags
158
172
  if (arg === "--limit") {
159
173
  const { value, nextIndex } = parseValueFlag(args, i);
@@ -189,6 +203,8 @@ function parseArgs(args) {
189
203
  // Positional selector arg
190
204
  if (parsed.selectorArg === undefined) {
191
205
  parsed.selectorArg = arg;
206
+ } else {
207
+ parsed.extraArgs.push(arg);
192
208
  }
193
209
  }
194
210
  return parsed;
@@ -212,28 +228,29 @@ function extractDepthFromArg(selectorArg) {
212
228
  return parsed.depth;
213
229
  }
214
230
  /**
215
- * Process stdin mode: file paths piped in.
231
+ * Process an explicit list of resolved file paths.
216
232
  */
217
- async function processStdin(
218
- stdin,
233
+ async function processFileList(
234
+ filePaths,
219
235
  selectorArg,
220
236
  checkMode,
221
237
  lintMode,
222
238
  json,
223
239
  pretty,
224
240
  limit,
241
+ gitignore,
225
242
  cwd,
226
243
  cacheConfig,
227
244
  searchQuery,
228
245
  ) {
229
- const stdinPaths = parseStdinPaths(stdin, cwd);
230
246
  const depth =
231
247
  selectorArg !== undefined ? extractDepthFromArg(selectorArg) : undefined;
232
248
  if (searchQuery !== undefined) {
233
249
  const result = await searchFiles(
234
- stdinPaths,
250
+ filePaths,
235
251
  searchQuery,
236
252
  cwd,
253
+ gitignore,
237
254
  limit,
238
255
  cacheConfig,
239
256
  );
@@ -241,22 +258,54 @@ async function processStdin(
241
258
  return;
242
259
  }
243
260
  if (lintMode) {
244
- const result = await lintFiles(stdinPaths, cwd, limit, cacheConfig);
261
+ const result = await lintFiles(filePaths, cwd, limit, cacheConfig);
245
262
  writeLintResult(result, pretty);
246
263
  } else if (checkMode) {
247
- const result = await validateFiles(stdinPaths, cwd, limit, cacheConfig);
264
+ const result = await validateFiles(filePaths, cwd, limit, cacheConfig);
248
265
  writeValidationResult(result, pretty);
249
266
  } else {
250
267
  const result = await drilldownFiles(
251
- stdinPaths,
268
+ filePaths,
252
269
  depth,
253
270
  cwd,
254
271
  limit,
255
272
  cacheConfig,
256
273
  );
274
+ flushCacheSummary(`drilldown ${filePaths.length} files`);
257
275
  writeDrilldownResult(result, json, pretty);
258
276
  }
259
277
  }
278
+ /**
279
+ * Process stdin mode: file paths piped in.
280
+ */
281
+ async function processStdin(
282
+ stdin,
283
+ selectorArg,
284
+ checkMode,
285
+ lintMode,
286
+ json,
287
+ pretty,
288
+ limit,
289
+ gitignore,
290
+ cwd,
291
+ cacheConfig,
292
+ searchQuery,
293
+ ) {
294
+ const stdinPaths = parseStdinPaths(stdin, cwd);
295
+ await processFileList(
296
+ stdinPaths,
297
+ selectorArg,
298
+ checkMode,
299
+ lintMode,
300
+ json,
301
+ pretty,
302
+ limit,
303
+ gitignore,
304
+ cwd,
305
+ cacheConfig,
306
+ searchQuery,
307
+ );
308
+ }
260
309
  /**
261
310
  * Process selector mode: glob or path argument.
262
311
  */
@@ -305,19 +354,27 @@ async function processSelector(
305
354
  }
306
355
  }
307
356
  /**
308
- * Write an error to stderr as JSON and set exit code.
357
+ * Write an error to stderr as JSON or plain text depending on the json flag.
309
358
  */
310
- function writeError(error) {
359
+ function writeError(error, json) {
360
+ process.exitCode = 1;
361
+ if (json) {
362
+ if (error instanceof JsdocError) {
363
+ void process.stderr.write(`${JSON.stringify(error.toJSON())}\n`);
364
+ return;
365
+ }
366
+ const message = error instanceof Error ? error.message : String(error);
367
+ void process.stderr.write(
368
+ `${JSON.stringify({ error: { code: "INTERNAL_ERROR", message } })}\n`,
369
+ );
370
+ return;
371
+ }
311
372
  if (error instanceof JsdocError) {
312
- void process.stderr.write(`${JSON.stringify(error.toJSON())}\n`);
313
- process.exitCode = 1;
373
+ void process.stderr.write(`Error [${error.code}]: ${error.message}\n`);
314
374
  return;
315
375
  }
316
376
  const message = error instanceof Error ? error.message : String(error);
317
- void process.stderr.write(
318
- `${JSON.stringify({ error: { code: "INTERNAL_ERROR", message } })}\n`,
319
- );
320
- process.exitCode = 1;
377
+ void process.stderr.write(`Error: ${message}\n`);
321
378
  }
322
379
  /**
323
380
  * Handle --help flag by printing help text.
@@ -346,7 +403,7 @@ function handleSkill() {
346
403
  /**
347
404
  * Handle --explain-rule flag by printing rule explanation.
348
405
  */
349
- function handleExplainRule(ruleName) {
406
+ function handleExplainRule(ruleName, json) {
350
407
  const explanation = RULE_EXPLANATIONS[ruleName];
351
408
  if (explanation) {
352
409
  void process.stdout.write(explanation);
@@ -358,13 +415,14 @@ function handleExplainRule(ruleName) {
358
415
  "INVALID_SELECTOR",
359
416
  `Unknown rule: ${ruleName}. Available rules: ${available}`,
360
417
  ),
418
+ json,
361
419
  );
362
420
  }
363
421
  /**
364
422
  * Handle early-exit flags that print output and return without processing files.
365
423
  * Returns true if an early-exit flag was handled.
366
424
  */
367
- async function handleEarlyExitFlags(parsed) {
425
+ async function handleEarlyExitFlags(parsed, json) {
368
426
  if (parsed.help) {
369
427
  handleHelp();
370
428
  return true;
@@ -378,7 +436,7 @@ async function handleEarlyExitFlags(parsed) {
378
436
  return true;
379
437
  }
380
438
  if (parsed.explainRule !== undefined) {
381
- handleExplainRule(parsed.explainRule);
439
+ handleExplainRule(parsed.explainRule, json);
382
440
  return true;
383
441
  }
384
442
  return false;
@@ -387,10 +445,11 @@ async function handleEarlyExitFlags(parsed) {
387
445
  * Validate that mode flags are not used in incompatible combinations.
388
446
  * Returns true if validation passed (no conflicts), false if an error was written.
389
447
  */
390
- function validateModeCombinations(parsed) {
448
+ function validateModeCombinations(parsed, json) {
391
449
  if (parsed.checkMode && parsed.lintMode) {
392
450
  writeError(
393
451
  new JsdocError("INVALID_SELECTOR", "Cannot use -c and -l together"),
452
+ json,
394
453
  );
395
454
  return false;
396
455
  }
@@ -400,6 +459,7 @@ function validateModeCombinations(parsed) {
400
459
  ) {
401
460
  writeError(
402
461
  new JsdocError("INVALID_SELECTOR", "Cannot use --search with -c or -l"),
462
+ json,
403
463
  );
404
464
  return false;
405
465
  }
@@ -409,10 +469,14 @@ function validateModeCombinations(parsed) {
409
469
  * Main CLI entry point. Exported for testability.
410
470
  */
411
471
  export async function main(args, stdin) {
472
+ const json = args.includes("--json");
473
+ if (args.includes("--debug")) {
474
+ createDebug.enable("jsdoczoom:*");
475
+ }
412
476
  try {
413
477
  const parsed = parseArgs(args);
414
- if (await handleEarlyExitFlags(parsed)) return;
415
- if (!validateModeCombinations(parsed)) return;
478
+ if (await handleEarlyExitFlags(parsed, json)) return;
479
+ if (!validateModeCombinations(parsed, json)) return;
416
480
  const cacheConfig = {
417
481
  enabled: !parsed.disableCache,
418
482
  directory: parsed.cacheDirectory ?? DEFAULT_CACHE_DIR,
@@ -427,6 +491,37 @@ export async function main(args, stdin) {
427
491
  parsed.json,
428
492
  parsed.pretty,
429
493
  parsed.limit,
494
+ parsed.gitignore,
495
+ cwd,
496
+ cacheConfig,
497
+ parsed.searchQuery,
498
+ );
499
+ } else if (parsed.extraArgs.length > 0) {
500
+ // Multiple positional args (e.g. shell-expanded glob): expand each to
501
+ // .ts/.tsx files via discoverFiles (handles directories recursively)
502
+ const allArgPaths = [
503
+ ...(parsed.selectorArg ? [parsed.selectorArg] : []),
504
+ ...parsed.extraArgs,
505
+ ];
506
+ const fileLists = await time(
507
+ debugDiscovery,
508
+ `discover ${allArgPaths.length} paths`,
509
+ () =>
510
+ Promise.all(
511
+ allArgPaths.map((p) => discoverFiles(p, cwd, parsed.gitignore)),
512
+ ),
513
+ );
514
+ const filePaths = [...new Set(fileLists.flat())];
515
+ debugDiscovery("discover total=%d unique files", filePaths.length);
516
+ await processFileList(
517
+ filePaths,
518
+ parsed.selectorArg,
519
+ parsed.checkMode,
520
+ parsed.lintMode,
521
+ parsed.json,
522
+ parsed.pretty,
523
+ parsed.limit,
524
+ parsed.gitignore,
430
525
  cwd,
431
526
  cacheConfig,
432
527
  parsed.searchQuery,
@@ -446,7 +541,7 @@ export async function main(args, stdin) {
446
541
  );
447
542
  }
448
543
  } catch (error) {
449
- writeError(error);
544
+ writeError(error, json);
450
545
  }
451
546
  }
452
547
  /**
package/dist/debug.js ADDED
@@ -0,0 +1,61 @@
1
+ /**
2
+ * Shared debug instances and timing utilities for jsdoczoom.
3
+ *
4
+ * Activate with DEBUG=jsdoczoom:* or via the --debug CLI flag.
5
+ * Each namespace maps to a source module. Delta timing (+NNNms) is
6
+ * provided automatically by the debug package between calls on the
7
+ * same namespace.
8
+ *
9
+ * @summary Namespaced debug loggers and perf_hooks timing helper
10
+ */
11
+ import { performance } from "node:perf_hooks";
12
+ import createDebug from "debug";
13
+ export const debugDiscovery = createDebug("jsdoczoom:discovery");
14
+ export const debugSearch = createDebug("jsdoczoom:search");
15
+ export const debugCache = createDebug("jsdoczoom:cache");
16
+ export const debugEslint = createDebug("jsdoczoom:eslint");
17
+ export const debugLint = createDebug("jsdoczoom:lint");
18
+ export const debugValidate = createDebug("jsdoczoom:validate");
19
+ export const debugBarrel = createDebug("jsdoczoom:barrel");
20
+ export const debugDrilldown = createDebug("jsdoczoom:drilldown");
21
+ export const debugTs = createDebug("jsdoczoom:ts");
22
+ export { createDebug };
23
+ /** Accumulated cache hits since last flushCacheSummary call. */
24
+ let _cacheHits = 0;
25
+ /** Accumulated cache misses since last flushCacheSummary call. */
26
+ let _cacheMisses = 0;
27
+ /** Record a cache hit without emitting a log line. */
28
+ export function recordCacheHit() {
29
+ _cacheHits++;
30
+ }
31
+ /** Record a cache miss without emitting a log line. */
32
+ export function recordCacheMiss() {
33
+ _cacheMisses++;
34
+ }
35
+ /**
36
+ * Emit a single cache summary line and reset the counters.
37
+ * Call this after each batch of processWithCache operations.
38
+ *
39
+ * @param label - Describes the batch (e.g. "search 326 files")
40
+ */
41
+ export function flushCacheSummary(label) {
42
+ if (_cacheHits === 0 && _cacheMisses === 0) return;
43
+ debugCache("[SUMMARY] %s hits=%d misses=%d", label, _cacheHits, _cacheMisses);
44
+ _cacheHits = 0;
45
+ _cacheMisses = 0;
46
+ }
47
+ /**
48
+ * Wrap an async operation with start/done timing logs.
49
+ *
50
+ * @param dbg - Debug instance to log to
51
+ * @param label - Label shown in start/done messages
52
+ * @param fn - Async operation to time
53
+ * @returns Result of fn
54
+ */
55
+ export async function time(dbg, label, fn) {
56
+ const t0 = performance.now();
57
+ dbg("%s start", label);
58
+ const result = await fn();
59
+ dbg("%s done %sms", label, (performance.now() - t0).toFixed(1));
60
+ return result;
61
+ }
package/dist/drilldown.js CHANGED
@@ -2,6 +2,7 @@ import { readFile } from "node:fs/promises";
2
2
  import { dirname, relative } from "node:path";
3
3
  import { getBarrelChildren, isBarrel } from "./barrel.js";
4
4
  import { processWithCache } from "./cache.js";
5
+ import { debugDrilldown } from "./debug.js";
5
6
  import { JsdocError } from "./errors.js";
6
7
  import { discoverFiles, loadGitignore } from "./file-discovery.js";
7
8
  import { parseFileSummaries } from "./jsdoc-parser.js";
@@ -83,6 +84,12 @@ function processFile(
83
84
  effectiveDepth < TERMINAL_LEVEL &&
84
85
  levels[effectiveDepth - 1] === null
85
86
  ) {
87
+ debugDrilldown(
88
+ "depth advance: depth=%d → %d (null level) file=%s",
89
+ effectiveDepth,
90
+ effectiveDepth + 1,
91
+ idPath,
92
+ );
86
93
  effectiveDepth++;
87
94
  }
88
95
  const level = levels[effectiveDepth - 1];
@@ -238,6 +245,10 @@ async function processBarrelAtDepth(barrel, depth, cwd, config) {
238
245
  // Null-skip: if depth 2 but no description, advance to transition depth
239
246
  let effectiveDepth = depth;
240
247
  if (effectiveDepth === 2 && !barrel.hasDescription) {
248
+ debugDrilldown(
249
+ "barrel depth advance: depth=2 → 3 (no description) barrel=%s",
250
+ relative(cwd, barrel.path),
251
+ );
241
252
  effectiveDepth = 3;
242
253
  }
243
254
  if (effectiveDepth < 3) {
@@ -255,6 +266,13 @@ async function processBarrelAtDepth(barrel, depth, cwd, config) {
255
266
  }
256
267
  // Barrel transitions: barrel disappears, children appear
257
268
  const childDepth = effectiveDepth - 2;
269
+ debugDrilldown(
270
+ "barrel transition: barrel=%s depth=%d → children=%d at childDepth=%d",
271
+ relative(cwd, barrel.path),
272
+ effectiveDepth,
273
+ barrel.children.length,
274
+ childDepth,
275
+ );
258
276
  return collectSafeResults(barrel.children, childDepth, cwd, config);
259
277
  }
260
278
  /**
@@ -295,6 +313,13 @@ async function processGlobWithBarrels(files, depth, cwd, config) {
295
313
  config,
296
314
  );
297
315
  const gatedFiles = buildGatedFileSet(barrelInfos);
316
+ debugDrilldown(
317
+ "barrel gating: barrels=%d non-barrels=%d gated=%d depth=%d",
318
+ barrelPaths.length,
319
+ nonBarrelPaths.length,
320
+ gatedFiles.size,
321
+ depth,
322
+ );
298
323
  const results = [...barrelErrors];
299
324
  const barrelResults = await Promise.all(
300
325
  barrelInfos
@@ -6,6 +6,7 @@
6
6
  import tsParser from "@typescript-eslint/parser";
7
7
  import { ESLint } from "eslint";
8
8
  import jsdocPlugin from "eslint-plugin-jsdoc";
9
+ import { debugEslint, time } from "./debug.js";
9
10
  import plugin from "./eslint-plugin.js";
10
11
 
11
12
  /** Common invalid JSDoc tags and their recommended replacements */
@@ -115,7 +116,9 @@ export function createLintLinter(cwd) {
115
116
  * @returns Simplified message list with ruleId, messageId, and fatal flag
116
117
  */
117
118
  export async function lintFileForValidation(eslint, sourceText, filePath) {
118
- const results = await eslint.lintText(sourceText, { filePath });
119
+ const results = await time(debugEslint, `validate ${filePath}`, () =>
120
+ eslint.lintText(sourceText, { filePath }),
121
+ );
119
122
  if (results.length === 0) return [];
120
123
  return results[0].messages.map((msg) => ({
121
124
  ruleId: msg.ruleId,
@@ -204,7 +207,9 @@ function extractSymbolName(sourceText, line) {
204
207
  * @returns Array of lint diagnostics with line, column, rule, message, and severity
205
208
  */
206
209
  export async function lintFileForLint(eslint, sourceText, filePath) {
207
- const results = await eslint.lintText(sourceText, { filePath });
210
+ const results = await time(debugEslint, `lint ${filePath}`, () =>
211
+ eslint.lintText(sourceText, { filePath }),
212
+ );
208
213
  if (results.length === 0) return [];
209
214
  return results[0].messages.map((msg) => {
210
215
  const diagnostic = {
@@ -1,24 +1,43 @@
1
- import { readFile, stat } from "node:fs/promises";
1
+ import { readFile, realpath, stat } from "node:fs/promises";
2
2
  import { dirname, join, relative, resolve } from "node:path";
3
3
  import { glob } from "glob";
4
4
  import ignore from "ignore";
5
+ import { debugDiscovery } from "./debug.js";
5
6
  import { JsdocError } from "./errors.js";
7
+
6
8
  /**
7
9
  * Walks .gitignore files from cwd to filesystem root, building an ignore
8
10
  * filter that glob results pass through. Direct-path lookups bypass the
9
- * filter since the user explicitly named the file. The ignore instance is
10
- * created per call -- no caching -- because cwd may differ between invocations.
11
+ * filter since the user explicitly named the file. Results are cached per
12
+ * cwd for the process lifetime so concurrent discoverFiles calls sharing a
13
+ * cwd only walk the filesystem once.
11
14
  *
12
15
  * @summary Resolve selector patterns to absolute file paths with gitignore filtering
13
16
  */
17
+ /** Process-lifetime cache: cwd → in-flight or settled Ignore promise. */
18
+ const gitignoreCache = new Map();
14
19
  /**
15
20
  * Walk from `cwd` up to the filesystem root, collecting .gitignore entries.
16
- * Returns an Ignore instance loaded with all discovered rules.
21
+ * Returns an Ignore instance loaded with all discovered rules. Results are
22
+ * cached per cwd so repeated calls (e.g. from concurrent discoverFiles calls)
23
+ * only perform the filesystem walk once.
17
24
  */
18
- export async function loadGitignore(cwd) {
25
+ export function loadGitignore(cwd) {
26
+ const key = resolve(cwd);
27
+ const cached = gitignoreCache.get(key);
28
+ if (cached !== undefined) {
29
+ return cached;
30
+ }
31
+ const promise = loadGitignoreUncached(key);
32
+ gitignoreCache.set(key, promise);
33
+ return promise;
34
+ }
35
+ async function loadGitignoreUncached(cwd) {
19
36
  const ig = ignore();
20
37
  let dir = resolve(cwd);
38
+ let walkDepth = 0;
21
39
  while (true) {
40
+ debugDiscovery("gitignore walk depth=%d dir=%s", walkDepth, dir);
22
41
  const gitignorePath = join(dir, ".gitignore");
23
42
  try {
24
43
  const content = await readFile(gitignorePath, "utf-8");
@@ -27,6 +46,12 @@ export async function loadGitignore(cwd) {
27
46
  .split("\n")
28
47
  .map((l) => l.trim())
29
48
  .filter((l) => l && !l.startsWith("#"));
49
+ debugDiscovery(
50
+ "gitignore depth=%d loaded %d rules from %s",
51
+ walkDepth,
52
+ lines.length,
53
+ gitignorePath,
54
+ );
30
55
  for (const line of lines) {
31
56
  // Prefix rules from ancestor .gitignore files so paths are
32
57
  // relative to `cwd`, which is where glob results are anchored.
@@ -36,8 +61,15 @@ export async function loadGitignore(cwd) {
36
61
  // No .gitignore at this level, continue walking up
37
62
  }
38
63
  const parent = dirname(dir);
39
- if (parent === dir) break;
64
+ if (parent === dir) {
65
+ debugDiscovery(
66
+ "gitignore walk complete at depth=%d (reached root)",
67
+ walkDepth,
68
+ );
69
+ break;
70
+ }
40
71
  dir = parent;
72
+ walkDepth++;
41
73
  }
42
74
  return ig;
43
75
  }
@@ -57,15 +89,52 @@ export async function loadGitignore(cwd) {
57
89
  export async function discoverFiles(pattern, cwd, gitignore = true) {
58
90
  const hasGlobChars = /[*?[\]{]/.test(pattern);
59
91
  if (hasGlobChars) {
92
+ debugDiscovery("glob pattern=%s", pattern);
60
93
  const matches = await glob(pattern, { cwd, absolute: true });
61
94
  let filtered = matches.filter(
62
95
  (f) => (f.endsWith(".ts") || f.endsWith(".tsx")) && !f.endsWith(".d.ts"),
63
96
  );
64
97
  if (gitignore) {
98
+ const preFilter = filtered.length;
65
99
  const ig = await loadGitignore(cwd);
66
100
  filtered = filtered.filter((abs) => !ig.ignores(relative(cwd, abs)));
101
+ debugDiscovery(
102
+ "glob done pattern=%s matched=%d after-gitignore=%d",
103
+ pattern,
104
+ preFilter,
105
+ filtered.length,
106
+ );
107
+ } else {
108
+ debugDiscovery(
109
+ "glob done pattern=%s matched=%d",
110
+ pattern,
111
+ filtered.length,
112
+ );
67
113
  }
68
- return filtered.sort();
114
+ // Deduplicate by realpath so symlinks to the same physical file are
115
+ // processed only once. Glob prevents infinite cycles but can still
116
+ // return the same inode at multiple paths (e.g. via directory symlinks).
117
+ const withRealpaths = await Promise.all(
118
+ filtered.map(async (abs) => ({
119
+ abs,
120
+ real: await realpath(abs).catch(() => abs),
121
+ })),
122
+ );
123
+ const seen = new Set();
124
+ const deduped = [];
125
+ for (const { abs, real } of withRealpaths) {
126
+ if (seen.has(real)) {
127
+ debugDiscovery(
128
+ "symlink dedup: skipping %s (same realpath as earlier entry %s)",
129
+ abs,
130
+ real,
131
+ );
132
+ continue;
133
+ }
134
+ seen.add(real);
135
+ deduped.push(abs);
136
+ }
137
+ return deduped.sort();
69
138
  }
70
139
  // Direct path
71
140
  const resolved = resolve(cwd, pattern);
@@ -76,6 +145,10 @@ export async function discoverFiles(pattern, cwd, gitignore = true) {
76
145
  throw new JsdocError("FILE_NOT_FOUND", `File not found: ${pattern}`);
77
146
  }
78
147
  if (statResult.isDirectory()) {
148
+ debugDiscovery(
149
+ "discoverFiles recursing: directory path=%s → glob",
150
+ resolved,
151
+ );
79
152
  return discoverFiles(`${resolved}/**`, cwd, gitignore);
80
153
  }
81
154
  return [resolved];
package/dist/lint.js CHANGED
@@ -12,6 +12,7 @@ import { readFile } from "node:fs/promises";
12
12
  import { relative } from "node:path";
13
13
  import { findMissingBarrels } from "./barrel.js";
14
14
  import { processWithCache } from "./cache.js";
15
+ import { debugLint, flushCacheSummary, time } from "./debug.js";
15
16
  import { JsdocError } from "./errors.js";
16
17
  import { createLintLinter, lintFileForLint } from "./eslint-engine.js";
17
18
  import { discoverFiles } from "./file-discovery.js";
@@ -98,9 +99,14 @@ export async function lint(
98
99
  }
99
100
  const tsFiles = files.filter((f) => f.endsWith(".ts") || f.endsWith(".tsx"));
100
101
  const eslint = createLintLinter(cwd);
101
- const fileResults = await Promise.all(
102
- tsFiles.map((f) => lintSingleFile(eslint, f, cwd, config)),
102
+ debugLint("lint start files=%d pattern=%s", tsFiles.length, selector.pattern);
103
+ const fileResults = await time(
104
+ debugLint,
105
+ `lint ${tsFiles.length} files`,
106
+ () =>
107
+ Promise.all(tsFiles.map((f) => lintSingleFile(eslint, f, cwd, config))),
103
108
  );
109
+ flushCacheSummary(`lint ${tsFiles.length} files`);
104
110
  const missingBarrels = await findMissingBarrels(tsFiles, cwd);
105
111
  return buildLintResult(fileResults, tsFiles.length, limit, missingBarrels);
106
112
  }
@@ -126,9 +132,14 @@ export async function lintFiles(
126
132
  (f) => f.endsWith(".ts") || f.endsWith(".tsx"),
127
133
  );
128
134
  const eslint = createLintLinter(cwd);
129
- const fileResults = await Promise.all(
130
- tsFiles.map((f) => lintSingleFile(eslint, f, cwd, config)),
135
+ debugLint("lintFiles start files=%d", tsFiles.length);
136
+ const fileResults = await time(
137
+ debugLint,
138
+ `lintFiles ${tsFiles.length} files`,
139
+ () =>
140
+ Promise.all(tsFiles.map((f) => lintSingleFile(eslint, f, cwd, config))),
131
141
  );
142
+ flushCacheSummary(`lintFiles ${tsFiles.length} files`);
132
143
  const missingBarrels = await findMissingBarrels(tsFiles, cwd);
133
144
  return buildLintResult(fileResults, tsFiles.length, limit, missingBarrels);
134
145
  }
package/dist/search.js CHANGED
@@ -1,6 +1,7 @@
1
1
  import { readFile } from "node:fs/promises";
2
2
  import { relative } from "node:path";
3
3
  import { processWithCache } from "./cache.js";
4
+ import { debugSearch, flushCacheSummary, time } from "./debug.js";
4
5
  import { JsdocError } from "./errors.js";
5
6
  import { discoverFiles, loadGitignore } from "./file-discovery.js";
6
7
  import { parseFileSummaries } from "./jsdoc-parser.js";
@@ -53,6 +54,7 @@ async function processFileSafe(filePath, regex, cwd, config) {
53
54
  );
54
55
  // Level 1: filename/path match — fall back through levels like drilldown
55
56
  if (regex.test(idPath)) {
57
+ debugSearch("depth=1 path match file=%s", idPath);
56
58
  if (info.summary !== null) {
57
59
  return { next_id: `${idPath}@2`, text: info.summary };
58
60
  }
@@ -76,10 +78,12 @@ async function processFileSafe(filePath, regex, cwd, config) {
76
78
  }
77
79
  // Level 2: summary match
78
80
  if (info.summary !== null && regex.test(info.summary)) {
81
+ debugSearch("depth=2 summary match file=%s", idPath);
79
82
  return { next_id: `${idPath}@2`, text: info.summary };
80
83
  }
81
84
  // Level 3a: description match
82
85
  if (info.description !== null && regex.test(info.description)) {
86
+ debugSearch("depth=3a description match file=%s", idPath);
83
87
  return { next_id: `${idPath}@3`, text: info.description };
84
88
  }
85
89
  // Level 3b: type declaration match
@@ -93,6 +97,11 @@ async function processFileSafe(filePath, regex, cwd, config) {
93
97
  const chunks = splitDeclarations(dts);
94
98
  const matching = chunks.filter((c) => regex.test(c));
95
99
  if (matching.length > 0) {
100
+ debugSearch(
101
+ "depth=3b type-decl match file=%s chunks=%d",
102
+ idPath,
103
+ matching.length,
104
+ );
96
105
  return {
97
106
  next_id: `${idPath}@3`,
98
107
  text: matching
@@ -110,12 +119,18 @@ async function processFileSafe(filePath, regex, cwd, config) {
110
119
  );
111
120
  const matchingBlocks = allBlocks.filter((b) => regex.test(b.blockText));
112
121
  if (matchingBlocks.length > 0) {
122
+ debugSearch(
123
+ "depth=4 source-block match file=%s blocks=%d",
124
+ idPath,
125
+ matchingBlocks.length,
126
+ );
113
127
  const fenced = matchingBlocks
114
128
  .map((b) => `\`\`\`typescript\n${b.annotation}\n${b.blockText}\n\`\`\``)
115
129
  .join("\n\n");
116
130
  return { id: `${idPath}@4`, text: fenced };
117
131
  }
118
132
  if (regex.test(content)) {
133
+ debugSearch("depth=4 full-source match file=%s", idPath);
119
134
  return { id: `${idPath}@4`, text: `\`\`\`typescript\n${content}\n\`\`\`` };
120
135
  }
121
136
  return null; // no match
@@ -148,20 +163,27 @@ function applyLimit(sorted, limit) {
148
163
  * Files with parse errors are silently skipped.
149
164
  */
150
165
  async function searchFileList(files, regex, cwd, limit, config) {
151
- const results = await Promise.all(
152
- files.map(async (filePath) => {
153
- try {
154
- return await processFileSafe(filePath, regex, cwd, config);
155
- } catch (error) {
156
- if (error instanceof JsdocError && error.code === "PARSE_ERROR") {
157
- return null;
158
- }
159
- throw error;
160
- }
161
- }),
166
+ const results = await time(
167
+ debugSearch,
168
+ `search ${files.length} files query=${regex.source}`,
169
+ () =>
170
+ Promise.all(
171
+ files.map(async (filePath) => {
172
+ try {
173
+ return await processFileSafe(filePath, regex, cwd, config);
174
+ } catch (error) {
175
+ if (error instanceof JsdocError && error.code === "PARSE_ERROR") {
176
+ return null;
177
+ }
178
+ throw error;
179
+ }
180
+ }),
181
+ ),
162
182
  );
183
+ flushCacheSummary(`search ${files.length} files`);
163
184
  const matched = results.filter((r) => r !== null);
164
185
  const sorted = matched.sort((a, b) => sortKey(a).localeCompare(sortKey(b)));
186
+ debugSearch("matches=%d", matched.length);
165
187
  return applyLimit(sorted, limit);
166
188
  }
167
189
  /**
@@ -200,6 +222,7 @@ export async function search(
200
222
  * @param filePaths - Array of absolute file paths
201
223
  * @param query - Regex query string (case-insensitive)
202
224
  * @param cwd - Working directory for relative path output
225
+ * @param gitignore - Whether to respect .gitignore rules (default true)
203
226
  * @param limit - Maximum number of results to return (default 100)
204
227
  * @param config - Cache configuration
205
228
  * @throws {JsdocError} INVALID_SELECTOR for invalid regex
@@ -208,19 +231,22 @@ export async function searchFiles(
208
231
  filePaths,
209
232
  query,
210
233
  cwd,
234
+ gitignore = true,
211
235
  limit = 100,
212
236
  config = DEFAULT_CACHE_CONFIG,
213
237
  ) {
214
238
  const regex = compileRegex(query);
215
- const ig = await loadGitignore(cwd);
216
- const tsFiles = filePaths.filter((f) => {
217
- if (!(f.endsWith(".ts") || f.endsWith(".tsx")) || f.endsWith(".d.ts")) {
218
- return false;
219
- }
220
- const rel = relative(cwd, f);
221
- // Files outside cwd (traversal paths) are beyond the gitignore scope
222
- if (rel.startsWith("..")) return true;
223
- return !ig.ignores(rel);
224
- });
239
+ let tsFiles = filePaths.filter(
240
+ (f) => (f.endsWith(".ts") || f.endsWith(".tsx")) && !f.endsWith(".d.ts"),
241
+ );
242
+ if (gitignore) {
243
+ const ig = await loadGitignore(cwd);
244
+ tsFiles = tsFiles.filter((f) => {
245
+ const rel = relative(cwd, f);
246
+ // Files outside cwd (traversal paths) are beyond the gitignore scope
247
+ if (rel.startsWith("..")) return true;
248
+ return !ig.ignores(rel);
249
+ });
250
+ }
225
251
  return searchFileList(tsFiles, regex, cwd, limit, config);
226
252
  }
@@ -1,6 +1,7 @@
1
1
  import { readFile } from "node:fs/promises";
2
2
  import { dirname } from "node:path";
3
3
  import ts from "typescript";
4
+ import { debugTs, time } from "./debug.js";
4
5
  import { JsdocError } from "./errors.js";
5
6
 
6
7
  /**
@@ -432,7 +433,11 @@ export async function generateTypeDeclarations(filePath) {
432
433
  throw new JsdocError("PARSE_ERROR", `Failed to parse file: ${filePath}`);
433
434
  }
434
435
  // Get emit output using the language service
435
- const emitOutput = service.getEmitOutput(filePath, true); // true = emitOnlyDtsFiles
436
+ const emitOutput = await time(
437
+ debugTs,
438
+ `getEmitOutput ${filePath}`,
439
+ async () => service.getEmitOutput(filePath, true),
440
+ );
436
441
  // Find the .d.ts output file
437
442
  const dtsFile = emitOutput.outputFiles.find((file) =>
438
443
  file.name.endsWith(".d.ts"),
package/dist/validate.js CHANGED
@@ -2,6 +2,7 @@ import { readFile } from "node:fs/promises";
2
2
  import { relative } from "node:path";
3
3
  import { findMissingBarrels } from "./barrel.js";
4
4
  import { processWithCache } from "./cache.js";
5
+ import { debugValidate, flushCacheSummary, time } from "./debug.js";
5
6
  import { JsdocError } from "./errors.js";
6
7
  import {
7
8
  createValidationLinter,
@@ -105,9 +106,17 @@ export async function validate(
105
106
  );
106
107
  }
107
108
  const eslint = createValidationLinter();
108
- const statuses = await Promise.all(
109
- files.map((f) => classifyFile(eslint, f, cwd, config)),
109
+ debugValidate(
110
+ "validate start files=%d pattern=%s",
111
+ files.length,
112
+ selector.pattern,
110
113
  );
114
+ const statuses = await time(
115
+ debugValidate,
116
+ `validate ${files.length} files`,
117
+ () => Promise.all(files.map((f) => classifyFile(eslint, f, cwd, config))),
118
+ );
119
+ flushCacheSummary(`validate ${files.length} files`);
111
120
  const missingBarrels = await findMissingBarrels(files, cwd);
112
121
  return buildGroupedResult(statuses, missingBarrels, limit);
113
122
  }
@@ -132,9 +141,13 @@ export async function validateFiles(
132
141
  (f) => f.endsWith(".ts") || f.endsWith(".tsx"),
133
142
  );
134
143
  const eslint = createValidationLinter();
135
- const statuses = await Promise.all(
136
- tsFiles.map((f) => classifyFile(eslint, f, cwd, config)),
144
+ debugValidate("validateFiles start files=%d", tsFiles.length);
145
+ const statuses = await time(
146
+ debugValidate,
147
+ `validateFiles ${tsFiles.length} files`,
148
+ () => Promise.all(tsFiles.map((f) => classifyFile(eslint, f, cwd, config))),
137
149
  );
150
+ flushCacheSummary(`validateFiles ${tsFiles.length} files`);
138
151
  const missingBarrels = await findMissingBarrels(tsFiles, cwd);
139
152
  return buildGroupedResult(statuses, missingBarrels, limit);
140
153
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "jsdoczoom",
3
- "version": "1.2.0",
3
+ "version": "1.2.2",
4
4
  "description": "CLI tool for extracting JSDoc summaries at configurable depths",
5
5
  "type": "module",
6
6
  "sideEffects": false,
@@ -43,6 +43,7 @@
43
43
  },
44
44
  "dependencies": {
45
45
  "@typescript-eslint/parser": "^8.55.0",
46
+ "debug": "^4.4.3",
46
47
  "eslint": "^9.0.0",
47
48
  "eslint-plugin-jsdoc": "^62.5.5",
48
49
  "glob": "^13.0.3",
@@ -51,6 +52,7 @@
51
52
  },
52
53
  "devDependencies": {
53
54
  "@biomejs/biome": "2.4.1",
55
+ "@types/debug": "^4.1.12",
54
56
  "@types/node": "^25.2.3",
55
57
  "vitest": "4.0.18"
56
58
  }
@@ -0,0 +1,46 @@
1
+ /**
2
+ * Shared debug instances and timing utilities for jsdoczoom.
3
+ *
4
+ * Activate with DEBUG=jsdoczoom:* or via the --debug CLI flag.
5
+ * Each namespace maps to a source module. Delta timing (+NNNms) is
6
+ * provided automatically by the debug package between calls on the
7
+ * same namespace.
8
+ *
9
+ * @summary Namespaced debug loggers and perf_hooks timing helper
10
+ */
11
+ import createDebug from "debug";
12
+ import type { Debugger } from "debug";
13
+ export declare const debugDiscovery: createDebug.Debugger;
14
+ export declare const debugSearch: createDebug.Debugger;
15
+ export declare const debugCache: createDebug.Debugger;
16
+ export declare const debugEslint: createDebug.Debugger;
17
+ export declare const debugLint: createDebug.Debugger;
18
+ export declare const debugValidate: createDebug.Debugger;
19
+ export declare const debugBarrel: createDebug.Debugger;
20
+ export declare const debugDrilldown: createDebug.Debugger;
21
+ export declare const debugTs: createDebug.Debugger;
22
+ export { createDebug };
23
+ /** Record a cache hit without emitting a log line. */
24
+ export declare function recordCacheHit(): void;
25
+ /** Record a cache miss without emitting a log line. */
26
+ export declare function recordCacheMiss(): void;
27
+ /**
28
+ * Emit a single cache summary line and reset the counters.
29
+ * Call this after each batch of processWithCache operations.
30
+ *
31
+ * @param label - Describes the batch (e.g. "search 326 files")
32
+ */
33
+ export declare function flushCacheSummary(label: string): void;
34
+ /**
35
+ * Wrap an async operation with start/done timing logs.
36
+ *
37
+ * @param dbg - Debug instance to log to
38
+ * @param label - Label shown in start/done messages
39
+ * @param fn - Async operation to time
40
+ * @returns Result of fn
41
+ */
42
+ export declare function time<T>(
43
+ dbg: Debugger,
44
+ label: string,
45
+ fn: () => Promise<T>,
46
+ ): Promise<T>;
@@ -1,15 +1,9 @@
1
1
  import { type Ignore } from "ignore";
2
- /**
3
- * Walks .gitignore files from cwd to filesystem root, building an ignore
4
- * filter that glob results pass through. Direct-path lookups bypass the
5
- * filter since the user explicitly named the file. The ignore instance is
6
- * created per call -- no caching -- because cwd may differ between invocations.
7
- *
8
- * @summary Resolve selector patterns to absolute file paths with gitignore filtering
9
- */
10
2
  /**
11
3
  * Walk from `cwd` up to the filesystem root, collecting .gitignore entries.
12
- * Returns an Ignore instance loaded with all discovered rules.
4
+ * Returns an Ignore instance loaded with all discovered rules. Results are
5
+ * cached per cwd so repeated calls (e.g. from concurrent discoverFiles calls)
6
+ * only perform the filesystem walk once.
13
7
  */
14
8
  export declare function loadGitignore(cwd: string): Promise<Ignore>;
15
9
  /**
package/types/search.d.ts CHANGED
@@ -31,6 +31,7 @@ export declare function search(
31
31
  * @param filePaths - Array of absolute file paths
32
32
  * @param query - Regex query string (case-insensitive)
33
33
  * @param cwd - Working directory for relative path output
34
+ * @param gitignore - Whether to respect .gitignore rules (default true)
34
35
  * @param limit - Maximum number of results to return (default 100)
35
36
  * @param config - Cache configuration
36
37
  * @throws {JsdocError} INVALID_SELECTOR for invalid regex
@@ -39,6 +40,7 @@ export declare function searchFiles(
39
40
  filePaths: string[],
40
41
  query: string,
41
42
  cwd: string,
43
+ gitignore?: boolean,
42
44
  limit?: number,
43
45
  config?: CacheConfig,
44
46
  ): Promise<DrilldownResult>;