jsdoczoom 0.4.21 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/barrel.js +47 -28
- package/dist/cli.js +140 -54
- package/dist/drilldown.js +24 -16
- package/dist/file-discovery.js +15 -10
- package/dist/index.js +7 -1
- package/dist/jsdoc-parser.js +3 -3
- package/dist/lint.js +3 -3
- package/dist/search.js +226 -0
- package/dist/skill-text.js +18 -0
- package/dist/text-format.js +42 -0
- package/dist/type-declarations.js +247 -3
- package/dist/validate.js +3 -3
- package/package.json +1 -1
- package/types/barrel.d.ts +2 -2
- package/types/file-discovery.d.ts +2 -2
- package/types/index.d.ts +7 -1
- package/types/jsdoc-parser.d.ts +3 -1
- package/types/search.d.ts +44 -0
- package/types/skill-text.d.ts +1 -1
- package/types/text-format.d.ts +6 -0
- package/types/type-declarations.d.ts +53 -1
- package/types/types.d.ts +1 -0
package/dist/lint.js
CHANGED
|
@@ -89,7 +89,7 @@ export async function lint(
|
|
|
89
89
|
gitignore = true,
|
|
90
90
|
config = { enabled: true, directory: DEFAULT_CACHE_DIR },
|
|
91
91
|
) {
|
|
92
|
-
const files = discoverFiles(selector.pattern, cwd, gitignore);
|
|
92
|
+
const files = await discoverFiles(selector.pattern, cwd, gitignore);
|
|
93
93
|
if (files.length === 0 && selector.type === "glob") {
|
|
94
94
|
throw new JsdocError(
|
|
95
95
|
"NO_FILES_MATCHED",
|
|
@@ -101,7 +101,7 @@ export async function lint(
|
|
|
101
101
|
const fileResults = await Promise.all(
|
|
102
102
|
tsFiles.map((f) => lintSingleFile(eslint, f, cwd, config)),
|
|
103
103
|
);
|
|
104
|
-
const missingBarrels = findMissingBarrels(tsFiles, cwd);
|
|
104
|
+
const missingBarrels = await findMissingBarrels(tsFiles, cwd);
|
|
105
105
|
return buildLintResult(fileResults, tsFiles.length, limit, missingBarrels);
|
|
106
106
|
}
|
|
107
107
|
/**
|
|
@@ -129,6 +129,6 @@ export async function lintFiles(
|
|
|
129
129
|
const fileResults = await Promise.all(
|
|
130
130
|
tsFiles.map((f) => lintSingleFile(eslint, f, cwd, config)),
|
|
131
131
|
);
|
|
132
|
-
const missingBarrels = findMissingBarrels(tsFiles, cwd);
|
|
132
|
+
const missingBarrels = await findMissingBarrels(tsFiles, cwd);
|
|
133
133
|
return buildLintResult(fileResults, tsFiles.length, limit, missingBarrels);
|
|
134
134
|
}
|
package/dist/search.js
ADDED
|
@@ -0,0 +1,226 @@
|
|
|
1
|
+
import { readFile } from "node:fs/promises";
|
|
2
|
+
import { relative } from "node:path";
|
|
3
|
+
import { processWithCache } from "./cache.js";
|
|
4
|
+
import { JsdocError } from "./errors.js";
|
|
5
|
+
import { discoverFiles, loadGitignore } from "./file-discovery.js";
|
|
6
|
+
import { parseFileSummaries } from "./jsdoc-parser.js";
|
|
7
|
+
import {
|
|
8
|
+
extractAllSourceBlocks,
|
|
9
|
+
generateTypeDeclarations,
|
|
10
|
+
splitDeclarations,
|
|
11
|
+
} from "./type-declarations.js";
|
|
12
|
+
import { DEFAULT_CACHE_DIR } from "./types.js";
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Searches TypeScript files for a regex query, returning results at the
|
|
16
|
+
* shallowest matching depth level. Processes each file through a 4-level
|
|
17
|
+
* matching hierarchy: filename/path, summary, description/declarations,
|
|
18
|
+
* and full source content.
|
|
19
|
+
*
|
|
20
|
+
* @summary Search TypeScript documentation and source by regex query
|
|
21
|
+
*/
|
|
22
|
+
/** Default cache configuration used when no config is provided. */
|
|
23
|
+
const DEFAULT_CACHE_CONFIG = {
|
|
24
|
+
enabled: true,
|
|
25
|
+
directory: DEFAULT_CACHE_DIR,
|
|
26
|
+
};
|
|
27
|
+
/**
|
|
28
|
+
* Compute the display id path for a file.
|
|
29
|
+
* Uses simple relative path — no barrel-to-directory conversion.
|
|
30
|
+
*/
|
|
31
|
+
function displayPath(filePath, cwd) {
|
|
32
|
+
return relative(cwd, filePath);
|
|
33
|
+
}
|
|
34
|
+
/**
|
|
35
|
+
* Extract the sort key from an output entry (either next_id or id).
|
|
36
|
+
*/
|
|
37
|
+
function sortKey(entry) {
|
|
38
|
+
if ("next_id" in entry) return entry.next_id;
|
|
39
|
+
return entry.id;
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Process a single file through the 4-level search hierarchy.
|
|
43
|
+
* Returns an OutputEntry if the file matches the query at any level,
|
|
44
|
+
* or null if there is no match. PARSE_ERROR exceptions are rethrown
|
|
45
|
+
* for the caller to handle silently.
|
|
46
|
+
*/
|
|
47
|
+
async function processFileSafe(filePath, regex, cwd, config) {
|
|
48
|
+
const content = await readFile(filePath, "utf-8");
|
|
49
|
+
const idPath = displayPath(filePath, cwd);
|
|
50
|
+
// Parse summaries once for levels 1-3a
|
|
51
|
+
const info = await processWithCache(config, "drilldown", content, () =>
|
|
52
|
+
parseFileSummaries(filePath),
|
|
53
|
+
);
|
|
54
|
+
// Level 1: filename/path match — fall back through levels like drilldown
|
|
55
|
+
if (regex.test(idPath)) {
|
|
56
|
+
if (info.summary !== null) {
|
|
57
|
+
return { next_id: `${idPath}@2`, text: info.summary };
|
|
58
|
+
}
|
|
59
|
+
if (info.description !== null) {
|
|
60
|
+
return { next_id: `${idPath}@3`, text: info.description };
|
|
61
|
+
}
|
|
62
|
+
const dts = await processWithCache(
|
|
63
|
+
config,
|
|
64
|
+
"drilldown",
|
|
65
|
+
`${content}\0typedecl`,
|
|
66
|
+
() => generateTypeDeclarations(filePath),
|
|
67
|
+
);
|
|
68
|
+
if (dts.length > 0) {
|
|
69
|
+
const chunks = splitDeclarations(dts);
|
|
70
|
+
return {
|
|
71
|
+
next_id: `${idPath}@4`,
|
|
72
|
+
text: chunks.map((c) => `\`\`\`typescript\n${c}\n\`\`\``).join("\n\n"),
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
return { id: `${idPath}@4`, text: `\`\`\`typescript\n${content}\n\`\`\`` };
|
|
76
|
+
}
|
|
77
|
+
// Level 2: summary match
|
|
78
|
+
if (info.summary !== null && regex.test(info.summary)) {
|
|
79
|
+
return { next_id: `${idPath}@2`, text: info.summary };
|
|
80
|
+
}
|
|
81
|
+
// Level 3a: description match
|
|
82
|
+
if (info.description !== null && regex.test(info.description)) {
|
|
83
|
+
return { next_id: `${idPath}@3`, text: info.description };
|
|
84
|
+
}
|
|
85
|
+
// Level 3b: type declaration match
|
|
86
|
+
const dts = await processWithCache(
|
|
87
|
+
config,
|
|
88
|
+
"drilldown",
|
|
89
|
+
`${content}\0typedecl`,
|
|
90
|
+
() => generateTypeDeclarations(filePath),
|
|
91
|
+
);
|
|
92
|
+
if (dts.length > 0) {
|
|
93
|
+
const chunks = splitDeclarations(dts);
|
|
94
|
+
const matching = chunks.filter((c) => regex.test(c));
|
|
95
|
+
if (matching.length > 0) {
|
|
96
|
+
return {
|
|
97
|
+
next_id: `${idPath}@3`,
|
|
98
|
+
text: matching
|
|
99
|
+
.map((c) => `\`\`\`typescript\n${c}\n\`\`\``)
|
|
100
|
+
.join("\n\n"),
|
|
101
|
+
};
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
// Level 4: source match — cache block extraction, filter by regex
|
|
105
|
+
const allBlocks = await processWithCache(
|
|
106
|
+
config,
|
|
107
|
+
"drilldown",
|
|
108
|
+
`${content}\0sourceblocks`,
|
|
109
|
+
() => extractAllSourceBlocks(filePath, content),
|
|
110
|
+
);
|
|
111
|
+
const matchingBlocks = allBlocks.filter((b) => regex.test(b.blockText));
|
|
112
|
+
if (matchingBlocks.length > 0) {
|
|
113
|
+
const fenced = matchingBlocks
|
|
114
|
+
.map((b) => `\`\`\`typescript\n${b.annotation}\n${b.blockText}\n\`\`\``)
|
|
115
|
+
.join("\n\n");
|
|
116
|
+
return { id: `${idPath}@4`, text: fenced };
|
|
117
|
+
}
|
|
118
|
+
if (regex.test(content)) {
|
|
119
|
+
return { id: `${idPath}@4`, text: `\`\`\`typescript\n${content}\n\`\`\`` };
|
|
120
|
+
}
|
|
121
|
+
return null; // no match
|
|
122
|
+
}
|
|
123
|
+
/**
|
|
124
|
+
* Compile a query string into a case-insensitive regex.
|
|
125
|
+
* Throws INVALID_SELECTOR if the query is not a valid regex pattern.
|
|
126
|
+
*/
|
|
127
|
+
function compileRegex(query) {
|
|
128
|
+
try {
|
|
129
|
+
return new RegExp(query, "i");
|
|
130
|
+
} catch (_error) {
|
|
131
|
+
throw new JsdocError("INVALID_SELECTOR", `Invalid regex: ${query}`);
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
/**
|
|
135
|
+
* Apply limit/truncation to sorted results.
|
|
136
|
+
*/
|
|
137
|
+
function applyLimit(sorted, limit) {
|
|
138
|
+
const count = sorted.length;
|
|
139
|
+
const isTruncated = count > limit;
|
|
140
|
+
return {
|
|
141
|
+
items: sorted.slice(0, limit),
|
|
142
|
+
truncated: isTruncated,
|
|
143
|
+
...(isTruncated ? { total: count } : {}),
|
|
144
|
+
};
|
|
145
|
+
}
|
|
146
|
+
/**
|
|
147
|
+
* Process a list of file paths through the search hierarchy and return sorted results.
|
|
148
|
+
* Files with parse errors are silently skipped.
|
|
149
|
+
*/
|
|
150
|
+
async function searchFileList(files, regex, cwd, limit, config) {
|
|
151
|
+
const results = await Promise.all(
|
|
152
|
+
files.map(async (filePath) => {
|
|
153
|
+
try {
|
|
154
|
+
return await processFileSafe(filePath, regex, cwd, config);
|
|
155
|
+
} catch (error) {
|
|
156
|
+
if (error instanceof JsdocError && error.code === "PARSE_ERROR") {
|
|
157
|
+
return null;
|
|
158
|
+
}
|
|
159
|
+
throw error;
|
|
160
|
+
}
|
|
161
|
+
}),
|
|
162
|
+
);
|
|
163
|
+
const matched = results.filter((r) => r !== null);
|
|
164
|
+
const sorted = matched.sort((a, b) => sortKey(a).localeCompare(sortKey(b)));
|
|
165
|
+
return applyLimit(sorted, limit);
|
|
166
|
+
}
|
|
167
|
+
/**
|
|
168
|
+
* Search TypeScript files matching a selector for a regex query.
|
|
169
|
+
*
|
|
170
|
+
* Discovers files from the selector pattern, processes each through
|
|
171
|
+
* the 4-level matching hierarchy, and returns results at the shallowest
|
|
172
|
+
* matching depth. Files with parse errors are silently skipped.
|
|
173
|
+
*
|
|
174
|
+
* @param selector - Parsed selector with type and pattern
|
|
175
|
+
* @param query - Regex query string (case-insensitive)
|
|
176
|
+
* @param cwd - Working directory for file resolution
|
|
177
|
+
* @param gitignore - Whether to respect .gitignore rules (default true)
|
|
178
|
+
* @param limit - Maximum number of results to return (default 100)
|
|
179
|
+
* @param config - Cache configuration
|
|
180
|
+
* @throws {JsdocError} INVALID_SELECTOR for invalid regex
|
|
181
|
+
*/
|
|
182
|
+
export async function search(
|
|
183
|
+
selector,
|
|
184
|
+
query,
|
|
185
|
+
cwd,
|
|
186
|
+
gitignore = true,
|
|
187
|
+
limit = 100,
|
|
188
|
+
config = DEFAULT_CACHE_CONFIG,
|
|
189
|
+
) {
|
|
190
|
+
const regex = compileRegex(query);
|
|
191
|
+
const files = await discoverFiles(selector.pattern, cwd, gitignore);
|
|
192
|
+
return searchFileList(files, regex, cwd, limit, config);
|
|
193
|
+
}
|
|
194
|
+
/**
|
|
195
|
+
* Search an explicit list of file paths for a regex query.
|
|
196
|
+
*
|
|
197
|
+
* Used for stdin input. Filters to .ts/.tsx files, excludes .d.ts.
|
|
198
|
+
* Processes each file through the 4-level matching hierarchy.
|
|
199
|
+
*
|
|
200
|
+
* @param filePaths - Array of absolute file paths
|
|
201
|
+
* @param query - Regex query string (case-insensitive)
|
|
202
|
+
* @param cwd - Working directory for relative path output
|
|
203
|
+
* @param limit - Maximum number of results to return (default 100)
|
|
204
|
+
* @param config - Cache configuration
|
|
205
|
+
* @throws {JsdocError} INVALID_SELECTOR for invalid regex
|
|
206
|
+
*/
|
|
207
|
+
export async function searchFiles(
|
|
208
|
+
filePaths,
|
|
209
|
+
query,
|
|
210
|
+
cwd,
|
|
211
|
+
limit = 100,
|
|
212
|
+
config = DEFAULT_CACHE_CONFIG,
|
|
213
|
+
) {
|
|
214
|
+
const regex = compileRegex(query);
|
|
215
|
+
const ig = await loadGitignore(cwd);
|
|
216
|
+
const tsFiles = filePaths.filter((f) => {
|
|
217
|
+
if (!(f.endsWith(".ts") || f.endsWith(".tsx")) || f.endsWith(".d.ts")) {
|
|
218
|
+
return false;
|
|
219
|
+
}
|
|
220
|
+
const rel = relative(cwd, f);
|
|
221
|
+
// Files outside cwd (traversal paths) are beyond the gitignore scope
|
|
222
|
+
if (rel.startsWith("..")) return true;
|
|
223
|
+
return !ig.ignores(rel);
|
|
224
|
+
});
|
|
225
|
+
return searchFileList(tsFiles, regex, cwd, limit, config);
|
|
226
|
+
}
|
package/dist/skill-text.js
CHANGED
|
@@ -368,6 +368,24 @@ function parse(input: string | Buffer): number {
|
|
|
368
368
|
}
|
|
369
369
|
\`\`\`
|
|
370
370
|
|
|
371
|
+
## Text output and piping
|
|
372
|
+
|
|
373
|
+
jsdoczoom outputs human-readable text by default. Each item has a \`# path\` header followed by content. This makes it composable with standard Unix tools:
|
|
374
|
+
|
|
375
|
+
\`\`\`sh
|
|
376
|
+
jsdoczoom src/utils.ts@3 | grep "functionName" # find symbol + source line
|
|
377
|
+
jsdoczoom src/utils.ts@3 | grep "// L" # list all declarations with lines
|
|
378
|
+
jsdoczoom src/**/*.ts | grep "^#" # list all file summaries
|
|
379
|
+
grep -rl "term" src/ --include="*.ts" | jsdoczoom # describe matching files
|
|
380
|
+
\`\`\`
|
|
381
|
+
|
|
382
|
+
Type declarations include source line annotations (\`// LN\` or \`// LN-LM\` for ranges), so you can locate implementations in the source file without a separate search step.
|
|
383
|
+
|
|
384
|
+
For machine-parseable output, use \`--json\`:
|
|
385
|
+
|
|
386
|
+
\`\`\`sh
|
|
387
|
+
jsdoczoom --json src/**/*.ts | jq '.items[].text'
|
|
388
|
+
\`\`\`
|
|
371
389
|
`;
|
|
372
390
|
/** Explanation text for each lint rule, used by --explain-rule */
|
|
373
391
|
export const RULE_EXPLANATIONS = {
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Formats drilldown results as human-readable text for shell piping.
|
|
3
|
+
* Each item gets a `# id` header line followed by its content.
|
|
4
|
+
* Items are separated by blank lines.
|
|
5
|
+
*
|
|
6
|
+
* @summary Plain-text output formatter for drilldown results
|
|
7
|
+
*/
|
|
8
|
+
/**
|
|
9
|
+
* Format a single output entry as a text block (header + content).
|
|
10
|
+
*/
|
|
11
|
+
function formatEntry(entry) {
|
|
12
|
+
if ("error" in entry) {
|
|
13
|
+
return `# ${entry.id} [${entry.error.code}]\n${entry.error.message}`;
|
|
14
|
+
}
|
|
15
|
+
const text = entry.text.trimEnd();
|
|
16
|
+
if ("next_id" in entry) {
|
|
17
|
+
const lines = [`# ${entry.next_id}`];
|
|
18
|
+
const children = entry.children;
|
|
19
|
+
if (children) {
|
|
20
|
+
lines.push(`## children: ${children.join(", ")}`);
|
|
21
|
+
}
|
|
22
|
+
if (text) lines.push("", text);
|
|
23
|
+
return lines.join("\n");
|
|
24
|
+
}
|
|
25
|
+
// Terminal item
|
|
26
|
+
if (!text) return `# ${entry.id}`;
|
|
27
|
+
return `# ${entry.id}\n\n${text}`;
|
|
28
|
+
}
|
|
29
|
+
/**
|
|
30
|
+
* Format a DrilldownResult as plain text for CLI output.
|
|
31
|
+
* Returns the formatted string with a trailing newline.
|
|
32
|
+
*/
|
|
33
|
+
export function formatTextOutput(result) {
|
|
34
|
+
const blocks = result.items.map(formatEntry);
|
|
35
|
+
let output = blocks.join("\n\n\n");
|
|
36
|
+
if (result.truncated && result.total !== undefined) {
|
|
37
|
+
output += `\n\n# truncated (showing ${result.items.length} of ${result.total})`;
|
|
38
|
+
} else if (result.truncated) {
|
|
39
|
+
output += "\n\n# truncated";
|
|
40
|
+
}
|
|
41
|
+
return `${output}\n`;
|
|
42
|
+
}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { readFile } from "node:fs/promises";
|
|
2
2
|
import { dirname } from "node:path";
|
|
3
3
|
import ts from "typescript";
|
|
4
4
|
import { JsdocError } from "./errors.js";
|
|
@@ -153,6 +153,247 @@ export function resetCache() {
|
|
|
153
153
|
compilerOptionsCache.clear();
|
|
154
154
|
serviceCache.clear();
|
|
155
155
|
}
|
|
156
|
+
/**
|
|
157
|
+
* Extract exported symbol names from a top-level statement.
|
|
158
|
+
* Returns an array of names (variable statements may have multiple).
|
|
159
|
+
*/
|
|
160
|
+
function exportedNamesOf(statement) {
|
|
161
|
+
if (ts.isTypeAliasDeclaration(statement)) return [statement.name.text];
|
|
162
|
+
if (ts.isInterfaceDeclaration(statement)) return [statement.name.text];
|
|
163
|
+
if (ts.isFunctionDeclaration(statement) && statement.name) {
|
|
164
|
+
return [statement.name.text];
|
|
165
|
+
}
|
|
166
|
+
if (ts.isClassDeclaration(statement) && statement.name) {
|
|
167
|
+
return [statement.name.text];
|
|
168
|
+
}
|
|
169
|
+
if (ts.isEnumDeclaration(statement)) return [statement.name.text];
|
|
170
|
+
if (ts.isVariableStatement(statement)) {
|
|
171
|
+
return statement.declarationList.declarations
|
|
172
|
+
.filter((d) => ts.isIdentifier(d.name))
|
|
173
|
+
.map((d) => d.name.text);
|
|
174
|
+
}
|
|
175
|
+
return [];
|
|
176
|
+
}
|
|
177
|
+
/**
|
|
178
|
+
* Return true if the statement has an `export` modifier.
|
|
179
|
+
*/
|
|
180
|
+
function isExportedStatement(statement) {
|
|
181
|
+
if (!ts.canHaveModifiers(statement)) return false;
|
|
182
|
+
return (
|
|
183
|
+
ts
|
|
184
|
+
.getModifiers(statement)
|
|
185
|
+
?.some((m) => m.kind === ts.SyntaxKind.ExportKeyword) ?? false
|
|
186
|
+
);
|
|
187
|
+
}
|
|
188
|
+
/**
|
|
189
|
+
* Build a map of exported symbol names to their source line ranges.
|
|
190
|
+
* Walks top-level statements looking for export modifiers.
|
|
191
|
+
*/
|
|
192
|
+
async function buildSourceLineMap(filePath) {
|
|
193
|
+
const content = await readFile(filePath, "utf-8");
|
|
194
|
+
const sourceFile = ts.createSourceFile(
|
|
195
|
+
filePath,
|
|
196
|
+
content,
|
|
197
|
+
ts.ScriptTarget.Latest,
|
|
198
|
+
true,
|
|
199
|
+
);
|
|
200
|
+
const map = new Map();
|
|
201
|
+
for (const statement of sourceFile.statements) {
|
|
202
|
+
if (!isExportedStatement(statement)) continue;
|
|
203
|
+
const start =
|
|
204
|
+
sourceFile.getLineAndCharacterOfPosition(statement.getStart(sourceFile))
|
|
205
|
+
.line + 1;
|
|
206
|
+
const end =
|
|
207
|
+
sourceFile.getLineAndCharacterOfPosition(statement.getEnd()).line + 1;
|
|
208
|
+
for (const name of exportedNamesOf(statement)) {
|
|
209
|
+
map.set(name, { start, end });
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
return map;
|
|
213
|
+
}
|
|
214
|
+
/** Regex matching the start of a top-level export declaration in .d.ts output. */
|
|
215
|
+
const DECL_PATTERN =
|
|
216
|
+
/^export\s+(?:default\s+)?(?:declare\s+)?(?:abstract\s+)?(?:type|interface|function|const|let|var|class|enum)\s+(\w+)/;
|
|
217
|
+
/**
|
|
218
|
+
* Format a source range as a GitHub-style line annotation.
|
|
219
|
+
*/
|
|
220
|
+
function formatLineRef(range) {
|
|
221
|
+
return range.start === range.end
|
|
222
|
+
? `// L${range.start}`
|
|
223
|
+
: `// L${range.start}-L${range.end}`;
|
|
224
|
+
}
|
|
225
|
+
/**
|
|
226
|
+
* Find the start of the JSDoc block above a declaration line.
|
|
227
|
+
* Returns the declaration line index itself if no JSDoc is found.
|
|
228
|
+
*/
|
|
229
|
+
function findChunkStart(lines, declLine) {
|
|
230
|
+
if (declLine === 0) return 0;
|
|
231
|
+
const prev = lines[declLine - 1]?.trimEnd();
|
|
232
|
+
if (prev === "*/" || prev?.endsWith("*/")) {
|
|
233
|
+
for (let j = declLine - 1; j >= 0; j--) {
|
|
234
|
+
if (lines[j].trimStart().startsWith("/**")) return j;
|
|
235
|
+
}
|
|
236
|
+
} else if (prev?.trimStart().startsWith("/**")) {
|
|
237
|
+
// Single-line JSDoc: /** comment */
|
|
238
|
+
return declLine - 1;
|
|
239
|
+
}
|
|
240
|
+
return declLine;
|
|
241
|
+
}
|
|
242
|
+
/**
|
|
243
|
+
* Annotate .d.ts text with source line references for each top-level declaration.
|
|
244
|
+
* Inserts GitHub-style `// LN` or `// LN-LM` on a separate line before each
|
|
245
|
+
* declaration chunk, with a blank line above for visual separation.
|
|
246
|
+
*/
|
|
247
|
+
async function annotateWithSourceLines(dtsText, filePath) {
|
|
248
|
+
const lineMap = await buildSourceLineMap(filePath);
|
|
249
|
+
if (lineMap.size === 0) return dtsText;
|
|
250
|
+
const lines = dtsText.split("\n");
|
|
251
|
+
// First pass: map chunk start lines to their annotations
|
|
252
|
+
const annotations = new Map();
|
|
253
|
+
for (let i = 0; i < lines.length; i++) {
|
|
254
|
+
const match = lines[i].match(DECL_PATTERN);
|
|
255
|
+
if (!match) continue;
|
|
256
|
+
const range = lineMap.get(match[1]);
|
|
257
|
+
if (!range) continue;
|
|
258
|
+
annotations.set(findChunkStart(lines, i), formatLineRef(range));
|
|
259
|
+
}
|
|
260
|
+
if (annotations.size === 0) return dtsText;
|
|
261
|
+
// Second pass: build output with annotations inserted
|
|
262
|
+
const result = [];
|
|
263
|
+
for (let i = 0; i < lines.length; i++) {
|
|
264
|
+
const annotation = annotations.get(i);
|
|
265
|
+
if (annotation) {
|
|
266
|
+
// Blank line separator before annotation (if there's content above)
|
|
267
|
+
if (result.length > 0 && result[result.length - 1]?.trim() !== "") {
|
|
268
|
+
result.push("");
|
|
269
|
+
}
|
|
270
|
+
result.push(annotation);
|
|
271
|
+
}
|
|
272
|
+
result.push(lines[i]);
|
|
273
|
+
}
|
|
274
|
+
return result.join("\n");
|
|
275
|
+
}
|
|
276
|
+
/**
|
|
277
|
+
* Splits annotated .d.ts text into individual declaration chunks.
|
|
278
|
+
*
|
|
279
|
+
* Splits on blank-line boundaries that precede a line annotation (`// L`)
|
|
280
|
+
* or a declaration keyword (`export`, `declare`). Each chunk is a complete
|
|
281
|
+
* declaration including its preceding JSDoc comment and line annotation.
|
|
282
|
+
* Trailing whitespace is trimmed from each chunk. Empty chunks are filtered out.
|
|
283
|
+
*
|
|
284
|
+
* @param dtsText - The annotated .d.ts text to split
|
|
285
|
+
* @returns Array of declaration chunks
|
|
286
|
+
*/
|
|
287
|
+
export function splitDeclarations(dtsText) {
|
|
288
|
+
if (dtsText === "") return [];
|
|
289
|
+
const lines = dtsText.split("\n");
|
|
290
|
+
const chunks = [];
|
|
291
|
+
let start = 0;
|
|
292
|
+
for (let i = 1; i < lines.length; i++) {
|
|
293
|
+
// A split point is a blank line followed by a line annotation or declaration keyword
|
|
294
|
+
if (lines[i - 1].trim() === "") {
|
|
295
|
+
const nextLine = lines[i].trimStart();
|
|
296
|
+
const isAnnotation = nextLine.startsWith("// L");
|
|
297
|
+
const isDeclaration =
|
|
298
|
+
nextLine.startsWith("export") || nextLine.startsWith("declare");
|
|
299
|
+
if (isAnnotation || isDeclaration) {
|
|
300
|
+
const chunk = lines
|
|
301
|
+
.slice(start, i - 1)
|
|
302
|
+
.join("\n")
|
|
303
|
+
.trimEnd();
|
|
304
|
+
if (chunk !== "") chunks.push(chunk);
|
|
305
|
+
start = i;
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
// Push final chunk
|
|
310
|
+
const last = lines.slice(start).join("\n").trimEnd();
|
|
311
|
+
if (last !== "") chunks.push(last);
|
|
312
|
+
return chunks;
|
|
313
|
+
}
|
|
314
|
+
/**
|
|
315
|
+
* Find the 0-based line index where a leading JSDoc block starts above a statement.
|
|
316
|
+
* Skips blank lines above the statement, then looks for a `/** ... * /` block.
|
|
317
|
+
* Returns `statementLine` itself when no leading JSDoc is found.
|
|
318
|
+
*/
|
|
319
|
+
function findJsdocStart(lines, statementLine) {
|
|
320
|
+
let line = statementLine - 1;
|
|
321
|
+
// Skip blank lines immediately before the statement
|
|
322
|
+
while (line >= 0 && lines[line].trim() === "") line--;
|
|
323
|
+
if (line < 0) return statementLine;
|
|
324
|
+
const trimmed = lines[line].trimEnd();
|
|
325
|
+
if (trimmed === "*/" || trimmed.endsWith("*/")) {
|
|
326
|
+
// Multi-line JSDoc: walk back to find opening /**
|
|
327
|
+
for (let j = line; j >= 0; j--) {
|
|
328
|
+
if (lines[j].trimStart().startsWith("/**")) return j;
|
|
329
|
+
}
|
|
330
|
+
} else if (trimmed.trimStart().startsWith("/**")) {
|
|
331
|
+
// Single-line JSDoc: /** comment */
|
|
332
|
+
return line;
|
|
333
|
+
}
|
|
334
|
+
return statementLine;
|
|
335
|
+
}
|
|
336
|
+
/**
|
|
337
|
+
* Extracts all top-level source blocks from a TypeScript file.
|
|
338
|
+
* Walks all top-level statements (not just exported), includes leading
|
|
339
|
+
* JSDoc comments, and annotates each block with source line references.
|
|
340
|
+
* Import declarations are excluded.
|
|
341
|
+
*
|
|
342
|
+
* Returns an empty array for empty files.
|
|
343
|
+
*
|
|
344
|
+
* @param filePath - Absolute path to the TypeScript source file
|
|
345
|
+
* @param content - Pre-read file content (avoids redundant disk read)
|
|
346
|
+
*/
|
|
347
|
+
export function extractAllSourceBlocks(filePath, content) {
|
|
348
|
+
if (content.trim() === "") return [];
|
|
349
|
+
const lines = content.split("\n");
|
|
350
|
+
const sourceFile = ts.createSourceFile(
|
|
351
|
+
filePath,
|
|
352
|
+
content,
|
|
353
|
+
ts.ScriptTarget.Latest,
|
|
354
|
+
true,
|
|
355
|
+
);
|
|
356
|
+
const blocks = [];
|
|
357
|
+
const seen = new Set();
|
|
358
|
+
for (const statement of sourceFile.statements) {
|
|
359
|
+
// Skip import declarations — callers fall back to full file for import matches
|
|
360
|
+
if (ts.isImportDeclaration(statement)) continue;
|
|
361
|
+
const endLine =
|
|
362
|
+
sourceFile.getLineAndCharacterOfPosition(statement.getEnd()).line + 1;
|
|
363
|
+
const stmtStartLine = sourceFile.getLineAndCharacterOfPosition(
|
|
364
|
+
statement.getStart(sourceFile),
|
|
365
|
+
).line; // 0-based
|
|
366
|
+
const jsdocStartLine = findJsdocStart(lines, stmtStartLine); // 0-based
|
|
367
|
+
// Skip if we've already included this block (deduplication by start position)
|
|
368
|
+
if (seen.has(jsdocStartLine)) continue;
|
|
369
|
+
seen.add(jsdocStartLine);
|
|
370
|
+
// Extract the source text for this block (lines are 0-based, endLine is 1-based)
|
|
371
|
+
const blockText = lines.slice(jsdocStartLine, endLine).join("\n");
|
|
372
|
+
const startLine1based = jsdocStartLine + 1; // 1-based
|
|
373
|
+
const annotation = formatLineRef({ start: startLine1based, end: endLine });
|
|
374
|
+
blocks.push({ annotation, blockText });
|
|
375
|
+
}
|
|
376
|
+
return blocks;
|
|
377
|
+
}
|
|
378
|
+
/**
|
|
379
|
+
* Extracts top-level source blocks from a file that match a regex.
|
|
380
|
+
*
|
|
381
|
+
* Walks all top-level statements (not just exported ones), includes leading
|
|
382
|
+
* JSDoc comments, and tests the regex against each block's source text.
|
|
383
|
+
* Matching blocks are annotated with `// LN` or `// LN-LM` and joined with
|
|
384
|
+
* blank line separators.
|
|
385
|
+
*
|
|
386
|
+
* @param filePath - Absolute path to the TypeScript source file
|
|
387
|
+
* @param content - Pre-read file content (avoids redundant disk read)
|
|
388
|
+
* @param regex - Regular expression to test against each block's source text
|
|
389
|
+
* @returns Annotated matching blocks joined with blank lines, or null if no match
|
|
390
|
+
*/
|
|
391
|
+
export function extractSourceBlocks(filePath, content, regex) {
|
|
392
|
+
const allBlocks = extractAllSourceBlocks(filePath, content);
|
|
393
|
+
const matching = allBlocks.filter((b) => regex.test(b.blockText));
|
|
394
|
+
if (matching.length === 0) return null;
|
|
395
|
+
return matching.map((b) => `${b.annotation}\n${b.blockText}`).join("\n\n");
|
|
396
|
+
}
|
|
156
397
|
/**
|
|
157
398
|
* Generates TypeScript declaration output from a source file.
|
|
158
399
|
*
|
|
@@ -172,10 +413,10 @@ export function resetCache() {
|
|
|
172
413
|
* @returns The declaration output as a string
|
|
173
414
|
* @throws {JsdocError} If the file cannot be read or parsed
|
|
174
415
|
*/
|
|
175
|
-
export function generateTypeDeclarations(filePath) {
|
|
416
|
+
export async function generateTypeDeclarations(filePath) {
|
|
176
417
|
// Verify the file exists and throw FILE_NOT_FOUND for any read errors
|
|
177
418
|
try {
|
|
178
|
-
|
|
419
|
+
await readFile(filePath, "utf-8");
|
|
179
420
|
} catch (_error) {
|
|
180
421
|
throw new JsdocError("FILE_NOT_FOUND", `Failed to read file: ${filePath}`);
|
|
181
422
|
}
|
|
@@ -208,5 +449,8 @@ export function generateTypeDeclarations(filePath) {
|
|
|
208
449
|
if (withoutComments === "export {};") {
|
|
209
450
|
cleaned = "";
|
|
210
451
|
}
|
|
452
|
+
if (cleaned.length > 0) {
|
|
453
|
+
cleaned = await annotateWithSourceLines(cleaned, filePath);
|
|
454
|
+
}
|
|
211
455
|
return cleaned;
|
|
212
456
|
}
|
package/dist/validate.js
CHANGED
|
@@ -97,7 +97,7 @@ export async function validate(
|
|
|
97
97
|
gitignore = true,
|
|
98
98
|
config = { enabled: true, directory: DEFAULT_CACHE_DIR },
|
|
99
99
|
) {
|
|
100
|
-
const files = discoverFiles(selector.pattern, cwd, gitignore);
|
|
100
|
+
const files = await discoverFiles(selector.pattern, cwd, gitignore);
|
|
101
101
|
if (files.length === 0) {
|
|
102
102
|
throw new JsdocError(
|
|
103
103
|
"NO_FILES_MATCHED",
|
|
@@ -108,7 +108,7 @@ export async function validate(
|
|
|
108
108
|
const statuses = await Promise.all(
|
|
109
109
|
files.map((f) => classifyFile(eslint, f, cwd, config)),
|
|
110
110
|
);
|
|
111
|
-
const missingBarrels = findMissingBarrels(files, cwd);
|
|
111
|
+
const missingBarrels = await findMissingBarrels(files, cwd);
|
|
112
112
|
return buildGroupedResult(statuses, missingBarrels, limit);
|
|
113
113
|
}
|
|
114
114
|
/**
|
|
@@ -135,6 +135,6 @@ export async function validateFiles(
|
|
|
135
135
|
const statuses = await Promise.all(
|
|
136
136
|
tsFiles.map((f) => classifyFile(eslint, f, cwd, config)),
|
|
137
137
|
);
|
|
138
|
-
const missingBarrels = findMissingBarrels(tsFiles, cwd);
|
|
138
|
+
const missingBarrels = await findMissingBarrels(tsFiles, cwd);
|
|
139
139
|
return buildGroupedResult(statuses, missingBarrels, limit);
|
|
140
140
|
}
|
package/package.json
CHANGED
package/types/barrel.d.ts
CHANGED
|
@@ -32,7 +32,7 @@ export declare function isBarrel(filePath: string): boolean;
|
|
|
32
32
|
export declare function getBarrelChildren(
|
|
33
33
|
barrelPath: string,
|
|
34
34
|
_cwd: string,
|
|
35
|
-
): string[]
|
|
35
|
+
): Promise<string[]>;
|
|
36
36
|
/** Minimum number of .ts/.tsx files in a directory to require a barrel. */
|
|
37
37
|
export declare const BARREL_THRESHOLD = 3;
|
|
38
38
|
/**
|
|
@@ -42,4 +42,4 @@ export declare const BARREL_THRESHOLD = 3;
|
|
|
42
42
|
export declare function findMissingBarrels(
|
|
43
43
|
filePaths: string[],
|
|
44
44
|
cwd: string,
|
|
45
|
-
): string[]
|
|
45
|
+
): Promise<string[]>;
|
|
@@ -11,7 +11,7 @@ import { type Ignore } from "ignore";
|
|
|
11
11
|
* Walk from `cwd` up to the filesystem root, collecting .gitignore entries.
|
|
12
12
|
* Returns an Ignore instance loaded with all discovered rules.
|
|
13
13
|
*/
|
|
14
|
-
export declare function loadGitignore(cwd: string): Ignore
|
|
14
|
+
export declare function loadGitignore(cwd: string): Promise<Ignore>;
|
|
15
15
|
/**
|
|
16
16
|
* Resolve a selector pattern to a list of .ts/.tsx file paths.
|
|
17
17
|
*
|
|
@@ -29,4 +29,4 @@ export declare function discoverFiles(
|
|
|
29
29
|
pattern: string,
|
|
30
30
|
cwd: string,
|
|
31
31
|
gitignore?: boolean,
|
|
32
|
-
): string[]
|
|
32
|
+
): Promise<string[]>;
|
package/types/index.d.ts
CHANGED
|
@@ -13,8 +13,14 @@ export { JsdocError } from "./errors.js";
|
|
|
13
13
|
export { discoverFiles } from "./file-discovery.js";
|
|
14
14
|
export { extractFileJsdoc, parseFileSummaries } from "./jsdoc-parser.js";
|
|
15
15
|
export { lint, lintFiles } from "./lint.js";
|
|
16
|
+
export { search, searchFiles } from "./search.js";
|
|
16
17
|
export { parseSelector } from "./selector.js";
|
|
17
|
-
export {
|
|
18
|
+
export { formatTextOutput } from "./text-format.js";
|
|
19
|
+
export {
|
|
20
|
+
extractSourceBlocks,
|
|
21
|
+
generateTypeDeclarations,
|
|
22
|
+
splitDeclarations,
|
|
23
|
+
} from "./type-declarations.js";
|
|
18
24
|
export {
|
|
19
25
|
type CacheConfig,
|
|
20
26
|
type CacheOperationMode,
|
package/types/jsdoc-parser.d.ts
CHANGED
|
@@ -24,4 +24,6 @@ export declare function extractFileJsdoc(
|
|
|
24
24
|
* Reads the file, extracts the first file-level JSDoc block, and parses the first
|
|
25
25
|
* @summary tag and free-text description.
|
|
26
26
|
*/
|
|
27
|
-
export declare function parseFileSummaries(
|
|
27
|
+
export declare function parseFileSummaries(
|
|
28
|
+
filePath: string,
|
|
29
|
+
): Promise<ParsedFileInfo>;
|