@bfra.me/doc-sync 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +288 -0
- package/lib/chunk-6NKAJT2M.js +1233 -0
- package/lib/chunk-DR6UG237.js +1027 -0
- package/lib/chunk-G5KKGJYO.js +1560 -0
- package/lib/chunk-ROLA7SBB.js +12 -0
- package/lib/cli/index.d.ts +1 -0
- package/lib/cli/index.js +397 -0
- package/lib/generators/index.d.ts +170 -0
- package/lib/generators/index.js +76 -0
- package/lib/index.d.ts +141 -0
- package/lib/index.js +118 -0
- package/lib/parsers/index.d.ts +264 -0
- package/lib/parsers/index.js +113 -0
- package/lib/types.d.ts +388 -0
- package/lib/types.js +7 -0
- package/package.json +99 -0
- package/src/cli/commands/index.ts +3 -0
- package/src/cli/commands/sync.ts +146 -0
- package/src/cli/commands/validate.ts +151 -0
- package/src/cli/commands/watch.ts +74 -0
- package/src/cli/index.ts +71 -0
- package/src/cli/types.ts +19 -0
- package/src/cli/ui.ts +123 -0
- package/src/generators/api-reference-generator.ts +268 -0
- package/src/generators/code-example-formatter.ts +313 -0
- package/src/generators/component-mapper.ts +383 -0
- package/src/generators/content-merger.ts +295 -0
- package/src/generators/frontmatter-generator.ts +277 -0
- package/src/generators/index.ts +56 -0
- package/src/generators/mdx-generator.ts +289 -0
- package/src/index.ts +131 -0
- package/src/orchestrator/index.ts +21 -0
- package/src/orchestrator/package-scanner.ts +276 -0
- package/src/orchestrator/sync-orchestrator.ts +382 -0
- package/src/orchestrator/validation-pipeline.ts +328 -0
- package/src/parsers/export-analyzer.ts +335 -0
- package/src/parsers/guards.ts +350 -0
- package/src/parsers/index.ts +82 -0
- package/src/parsers/jsdoc-extractor.ts +313 -0
- package/src/parsers/package-info.ts +267 -0
- package/src/parsers/readme-parser.ts +334 -0
- package/src/parsers/typescript-parser.ts +299 -0
- package/src/types.ts +423 -0
- package/src/utils/index.ts +13 -0
- package/src/utils/safe-patterns.ts +280 -0
- package/src/utils/sanitization.ts +164 -0
- package/src/watcher/change-detector.ts +138 -0
- package/src/watcher/debouncer.ts +168 -0
- package/src/watcher/file-watcher.ts +164 -0
- package/src/watcher/index.ts +27 -0
|
@@ -0,0 +1,334 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @bfra.me/doc-sync/parsers/readme-parser - README Markdown parser with section extraction
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import type {Root, RootContent} from 'mdast'
|
|
6
|
+
|
|
7
|
+
import type {ParseError, ParseResult, ReadmeContent, ReadmeSection} from '../types'
|
|
8
|
+
|
|
9
|
+
import {err, ok} from '@bfra.me/es/result'
|
|
10
|
+
import remarkParse from 'remark-parse'
|
|
11
|
+
import {unified} from 'unified'
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Options for parsing README files
|
|
15
|
+
*/
|
|
16
|
+
export interface ReadmeParserOptions {
|
|
17
|
+
readonly extractSections?: boolean
|
|
18
|
+
readonly preserveRaw?: boolean
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Parses a README markdown string into structured content
|
|
23
|
+
*/
|
|
24
|
+
export function parseReadme(
|
|
25
|
+
content: string,
|
|
26
|
+
options?: ReadmeParserOptions,
|
|
27
|
+
): ParseResult<ReadmeContent> {
|
|
28
|
+
try {
|
|
29
|
+
const processor = unified().use(remarkParse)
|
|
30
|
+
const tree = processor.parse(content)
|
|
31
|
+
|
|
32
|
+
const title = extractTitle(tree)
|
|
33
|
+
const preamble = extractPreamble(tree)
|
|
34
|
+
const sections =
|
|
35
|
+
options?.extractSections === false ? ([] as readonly ReadmeSection[]) : extractSections(tree)
|
|
36
|
+
|
|
37
|
+
return ok({
|
|
38
|
+
...(title !== undefined && {title}),
|
|
39
|
+
...(preamble !== undefined && {preamble}),
|
|
40
|
+
sections,
|
|
41
|
+
raw: options?.preserveRaw === false ? '' : content,
|
|
42
|
+
})
|
|
43
|
+
} catch (error) {
|
|
44
|
+
return err({
|
|
45
|
+
code: 'INVALID_SYNTAX',
|
|
46
|
+
message: 'Failed to parse README content',
|
|
47
|
+
cause: error,
|
|
48
|
+
} satisfies ParseError)
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Parses a README file from the file system
|
|
54
|
+
*/
|
|
55
|
+
export async function parseReadmeFile(
|
|
56
|
+
filePath: string,
|
|
57
|
+
options?: ReadmeParserOptions,
|
|
58
|
+
): Promise<ParseResult<ReadmeContent>> {
|
|
59
|
+
try {
|
|
60
|
+
const fs = await import('node:fs/promises')
|
|
61
|
+
const content = await fs.readFile(filePath, 'utf-8')
|
|
62
|
+
return parseReadme(content, options)
|
|
63
|
+
} catch (error) {
|
|
64
|
+
if (error instanceof Error && 'code' in error && error.code === 'ENOENT') {
|
|
65
|
+
return err({
|
|
66
|
+
code: 'FILE_NOT_FOUND',
|
|
67
|
+
message: `README file not found: ${filePath}`,
|
|
68
|
+
filePath,
|
|
69
|
+
cause: error,
|
|
70
|
+
} satisfies ParseError)
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
return err({
|
|
74
|
+
code: 'READ_ERROR',
|
|
75
|
+
message: `Failed to read README file: ${filePath}`,
|
|
76
|
+
filePath,
|
|
77
|
+
cause: error,
|
|
78
|
+
} satisfies ParseError)
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
function extractTitle(tree: Root): string | undefined {
|
|
83
|
+
for (const node of tree.children) {
|
|
84
|
+
if (node.type === 'heading' && node.depth === 1) {
|
|
85
|
+
return extractTextFromNode(node)
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
return undefined
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
function extractPreamble(tree: Root): string | undefined {
|
|
92
|
+
const preambleNodes: RootContent[] = []
|
|
93
|
+
|
|
94
|
+
for (const node of tree.children) {
|
|
95
|
+
if (node.type === 'heading') {
|
|
96
|
+
break
|
|
97
|
+
}
|
|
98
|
+
preambleNodes.push(node)
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
if (preambleNodes.length === 0) {
|
|
102
|
+
return undefined
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
const text = preambleNodes.map(extractTextFromNode).join('\n\n').trim()
|
|
106
|
+
return text.length > 0 ? text : undefined
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
function extractSections(tree: Root): readonly ReadmeSection[] {
|
|
110
|
+
const sections: ReadmeSection[] = []
|
|
111
|
+
const stack: {level: number; section: ReadmeSection & {children: ReadmeSection[]}}[] = []
|
|
112
|
+
|
|
113
|
+
for (let i = 0; i < tree.children.length; i++) {
|
|
114
|
+
const node = tree.children[i]
|
|
115
|
+
|
|
116
|
+
if (node !== undefined && node.type === 'heading') {
|
|
117
|
+
const heading = extractTextFromNode(node)
|
|
118
|
+
const level = node.depth
|
|
119
|
+
const content = extractSectionContent(tree, i + 1)
|
|
120
|
+
|
|
121
|
+
const newSection: ReadmeSection & {children: ReadmeSection[]} = {
|
|
122
|
+
heading,
|
|
123
|
+
level,
|
|
124
|
+
content,
|
|
125
|
+
children: [],
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
// Pop sections from stack that are at same or higher level
|
|
129
|
+
while (stack.length > 0 && (stack.at(-1)?.level ?? 0) >= level) {
|
|
130
|
+
stack.pop()
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
if (stack.length === 0) {
|
|
134
|
+
// Top-level section
|
|
135
|
+
sections.push(newSection)
|
|
136
|
+
} else {
|
|
137
|
+
// Nested section
|
|
138
|
+
const parent = stack.at(-1)
|
|
139
|
+
parent?.section.children.push(newSection)
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
stack.push({level, section: newSection})
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
return freezeSections(sections)
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
function freezeSections(sections: ReadmeSection[]): readonly ReadmeSection[] {
|
|
150
|
+
return sections.map(s => ({
|
|
151
|
+
heading: s.heading,
|
|
152
|
+
level: s.level,
|
|
153
|
+
content: s.content,
|
|
154
|
+
children: freezeSections([...s.children]),
|
|
155
|
+
}))
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
function extractSectionContent(tree: Root, startIndex: number): string {
|
|
159
|
+
const contentNodes: RootContent[] = []
|
|
160
|
+
|
|
161
|
+
for (let i = startIndex; i < tree.children.length; i++) {
|
|
162
|
+
const node = tree.children[i]
|
|
163
|
+
if (node === undefined || node.type === 'heading') {
|
|
164
|
+
break
|
|
165
|
+
}
|
|
166
|
+
contentNodes.push(node)
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
return contentNodes.map(serializeNode).join('\n\n').trim()
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
function extractTextFromNode(node: RootContent): string {
|
|
173
|
+
if ('value' in node && typeof node.value === 'string') {
|
|
174
|
+
return node.value
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
if ('children' in node && Array.isArray(node.children)) {
|
|
178
|
+
return (node.children as RootContent[]).map(extractTextFromNode).join('')
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
return ''
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
function serializeNode(node: RootContent): string {
|
|
185
|
+
if (node.type === 'paragraph') {
|
|
186
|
+
return extractTextFromNode(node)
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
if (node.type === 'heading') {
|
|
190
|
+
const prefix = '#'.repeat(node.depth)
|
|
191
|
+
return `${prefix} ${extractTextFromNode(node)}`
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
if (node.type === 'code') {
|
|
195
|
+
return `\`\`\`${node.lang ?? ''}\n${node.value}\n\`\`\``
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
if (node.type === 'blockquote') {
|
|
199
|
+
return (node.children as RootContent[]).map(c => `> ${serializeNode(c)}`).join('\n')
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
if (node.type === 'list') {
|
|
203
|
+
const items = node.children
|
|
204
|
+
.map((item, index) => {
|
|
205
|
+
if (item.type !== 'listItem') return ''
|
|
206
|
+
const prefix = node.ordered === true ? `${index + 1}. ` : '- '
|
|
207
|
+
const content = (item.children as RootContent[]).map(serializeNode).join('\n')
|
|
208
|
+
return `${prefix}${content}`
|
|
209
|
+
})
|
|
210
|
+
.filter(s => s.length > 0)
|
|
211
|
+
return items.join('\n')
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
if (node.type === 'thematicBreak') {
|
|
215
|
+
return '---'
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
if (node.type === 'html') {
|
|
219
|
+
return node.value
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
if (node.type === 'table') {
|
|
223
|
+
return serializeTable(node)
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
// For all other node types, extract text content
|
|
227
|
+
return extractTextFromNode(node)
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
function serializeTable(node: RootContent): string {
|
|
231
|
+
if (node.type !== 'table' || !('children' in node)) {
|
|
232
|
+
return ''
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
const rows = node.children
|
|
236
|
+
.filter((row): row is (typeof node.children)[number] => row.type === 'tableRow')
|
|
237
|
+
.map(row => {
|
|
238
|
+
const cells = (row.children as RootContent[])
|
|
239
|
+
.filter((cell): cell is RootContent => cell.type === 'tableCell')
|
|
240
|
+
.map(cell => extractTextFromNode(cell))
|
|
241
|
+
return `| ${cells.join(' | ')} |`
|
|
242
|
+
})
|
|
243
|
+
|
|
244
|
+
if (rows.length === 0) {
|
|
245
|
+
return ''
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
// Insert separator after header row
|
|
249
|
+
const headerRow = rows[0]
|
|
250
|
+
if (headerRow !== undefined) {
|
|
251
|
+
const columnCount = (headerRow.match(/\|/g)?.length ?? 2) - 1
|
|
252
|
+
const separator = `|${' --- |'.repeat(columnCount)}`
|
|
253
|
+
rows.splice(1, 0, separator)
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
return rows.join('\n')
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
/**
|
|
260
|
+
* Finds a section by heading text (case-insensitive)
|
|
261
|
+
*/
|
|
262
|
+
export function findSection(
|
|
263
|
+
content: ReadmeContent,
|
|
264
|
+
headingText: string,
|
|
265
|
+
): ReadmeSection | undefined {
|
|
266
|
+
const normalizedSearch = headingText.toLowerCase()
|
|
267
|
+
|
|
268
|
+
function searchInSections(sections: readonly ReadmeSection[]): ReadmeSection | undefined {
|
|
269
|
+
for (const section of sections) {
|
|
270
|
+
if (section.heading.toLowerCase() === normalizedSearch) {
|
|
271
|
+
return section
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
const found = searchInSections(section.children)
|
|
275
|
+
if (found !== undefined) {
|
|
276
|
+
return found
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
return undefined
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
return searchInSections(content.sections)
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
/**
|
|
286
|
+
* Gets all sections at a specific heading level
|
|
287
|
+
*/
|
|
288
|
+
export function getSectionsByLevel(
|
|
289
|
+
content: ReadmeContent,
|
|
290
|
+
level: number,
|
|
291
|
+
): readonly ReadmeSection[] {
|
|
292
|
+
const result: ReadmeSection[] = []
|
|
293
|
+
|
|
294
|
+
function collectAtLevel(sections: readonly ReadmeSection[]): void {
|
|
295
|
+
for (const section of sections) {
|
|
296
|
+
if (section.level === level) {
|
|
297
|
+
result.push(section)
|
|
298
|
+
}
|
|
299
|
+
collectAtLevel(section.children)
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
collectAtLevel(content.sections)
|
|
304
|
+
return result
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
/**
|
|
308
|
+
* Flattens all sections into a single array
|
|
309
|
+
*/
|
|
310
|
+
export function flattenSections(content: ReadmeContent): readonly ReadmeSection[] {
|
|
311
|
+
const result: ReadmeSection[] = []
|
|
312
|
+
|
|
313
|
+
function collect(sections: readonly ReadmeSection[]): void {
|
|
314
|
+
for (const section of sections) {
|
|
315
|
+
result.push(section)
|
|
316
|
+
collect(section.children)
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
collect(content.sections)
|
|
321
|
+
return result
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
/**
|
|
325
|
+
* Gets the table of contents as a flat list
|
|
326
|
+
*/
|
|
327
|
+
export function getTableOfContents(
|
|
328
|
+
content: ReadmeContent,
|
|
329
|
+
): readonly {readonly heading: string; readonly level: number}[] {
|
|
330
|
+
return flattenSections(content).map(s => ({
|
|
331
|
+
heading: s.heading,
|
|
332
|
+
level: s.level,
|
|
333
|
+
}))
|
|
334
|
+
}
|
|
@@ -0,0 +1,299 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @bfra.me/doc-sync/parsers/typescript-parser - TypeScript source file parser using ts-morph
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import type {
|
|
6
|
+
ExportedFunction,
|
|
7
|
+
ExportedType,
|
|
8
|
+
FunctionParameter,
|
|
9
|
+
PackageAPI,
|
|
10
|
+
ParseError,
|
|
11
|
+
ParseResult,
|
|
12
|
+
ReExport,
|
|
13
|
+
} from '../types'
|
|
14
|
+
|
|
15
|
+
import {err, ok} from '@bfra.me/es/result'
|
|
16
|
+
import {Project, type SourceFile} from 'ts-morph'
|
|
17
|
+
|
|
18
|
+
import {extractJSDocInfo} from './jsdoc-extractor'
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Options for parsing TypeScript source files
|
|
22
|
+
*/
|
|
23
|
+
export interface TypeScriptParserOptions {
|
|
24
|
+
readonly tsConfigPath?: string
|
|
25
|
+
readonly compilerOptions?: Record<string, unknown>
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Creates a ts-morph project instance for analyzing source files
|
|
30
|
+
*/
|
|
31
|
+
export function createProject(options?: TypeScriptParserOptions): Project {
|
|
32
|
+
return new Project({
|
|
33
|
+
tsConfigFilePath: options?.tsConfigPath,
|
|
34
|
+
compilerOptions: {
|
|
35
|
+
declaration: true,
|
|
36
|
+
...(options?.compilerOptions as object),
|
|
37
|
+
},
|
|
38
|
+
skipAddingFilesFromTsConfig: true,
|
|
39
|
+
})
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Parses a TypeScript source file and extracts its structure
|
|
44
|
+
*/
|
|
45
|
+
export function parseSourceFile(project: Project, filePath: string): ParseResult<SourceFile> {
|
|
46
|
+
try {
|
|
47
|
+
const sourceFile = project.addSourceFileAtPath(filePath)
|
|
48
|
+
return ok(sourceFile)
|
|
49
|
+
} catch (error) {
|
|
50
|
+
return err({
|
|
51
|
+
code: 'FILE_NOT_FOUND',
|
|
52
|
+
message: `Failed to parse source file: ${filePath}`,
|
|
53
|
+
filePath,
|
|
54
|
+
cause: error,
|
|
55
|
+
} satisfies ParseError)
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* Parses TypeScript source content from a string
|
|
61
|
+
*/
|
|
62
|
+
export function parseSourceContent(
|
|
63
|
+
project: Project,
|
|
64
|
+
content: string,
|
|
65
|
+
virtualPath = 'virtual.ts',
|
|
66
|
+
): ParseResult<SourceFile> {
|
|
67
|
+
try {
|
|
68
|
+
const sourceFile = project.createSourceFile(virtualPath, content, {
|
|
69
|
+
overwrite: true,
|
|
70
|
+
})
|
|
71
|
+
return ok(sourceFile)
|
|
72
|
+
} catch (error) {
|
|
73
|
+
return err({
|
|
74
|
+
code: 'INVALID_SYNTAX',
|
|
75
|
+
message: 'Failed to parse TypeScript content',
|
|
76
|
+
filePath: virtualPath,
|
|
77
|
+
cause: error,
|
|
78
|
+
} satisfies ParseError)
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
/**
|
|
83
|
+
* Extracts exported functions from a source file
|
|
84
|
+
*/
|
|
85
|
+
export function extractExportedFunctions(sourceFile: SourceFile): readonly ExportedFunction[] {
|
|
86
|
+
const functions: ExportedFunction[] = []
|
|
87
|
+
|
|
88
|
+
for (const func of sourceFile.getFunctions()) {
|
|
89
|
+
if (!func.isExported()) continue
|
|
90
|
+
|
|
91
|
+
const name = func.getName() ?? 'default'
|
|
92
|
+
const parameters = extractFunctionParameters(func)
|
|
93
|
+
const returnType = func.getReturnType().getText()
|
|
94
|
+
const jsdoc = extractJSDocInfo(func)
|
|
95
|
+
|
|
96
|
+
functions.push({
|
|
97
|
+
name,
|
|
98
|
+
jsdoc,
|
|
99
|
+
signature: func.getSignature()?.getDeclaration().getText() ?? func.getText(),
|
|
100
|
+
isAsync: func.isAsync(),
|
|
101
|
+
isGenerator: func.isGenerator(),
|
|
102
|
+
parameters,
|
|
103
|
+
returnType,
|
|
104
|
+
isDefault: func.isDefaultExport(),
|
|
105
|
+
})
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
return functions
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
function extractFunctionParameters(
|
|
112
|
+
func: ReturnType<SourceFile['getFunctions']>[number],
|
|
113
|
+
): readonly FunctionParameter[] {
|
|
114
|
+
return func.getParameters().map(param => ({
|
|
115
|
+
name: param.getName(),
|
|
116
|
+
type: param.getType().getText(),
|
|
117
|
+
optional: param.isOptional(),
|
|
118
|
+
defaultValue: param.getInitializer()?.getText(),
|
|
119
|
+
}))
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
/**
|
|
123
|
+
* Extracts exported types and interfaces from a source file
|
|
124
|
+
*/
|
|
125
|
+
export function extractExportedTypes(sourceFile: SourceFile): readonly ExportedType[] {
|
|
126
|
+
const types: ExportedType[] = []
|
|
127
|
+
|
|
128
|
+
// Extract interfaces
|
|
129
|
+
for (const iface of sourceFile.getInterfaces()) {
|
|
130
|
+
if (!iface.isExported()) continue
|
|
131
|
+
|
|
132
|
+
const jsdoc = extractJSDocInfo(iface)
|
|
133
|
+
const typeParams = iface.getTypeParameters().map(tp => tp.getText())
|
|
134
|
+
|
|
135
|
+
types.push({
|
|
136
|
+
name: iface.getName(),
|
|
137
|
+
jsdoc,
|
|
138
|
+
definition: iface.getText(),
|
|
139
|
+
kind: 'interface',
|
|
140
|
+
isDefault: iface.isDefaultExport(),
|
|
141
|
+
typeParameters: typeParams.length > 0 ? typeParams : undefined,
|
|
142
|
+
})
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
// Extract type aliases
|
|
146
|
+
for (const typeAlias of sourceFile.getTypeAliases()) {
|
|
147
|
+
if (!typeAlias.isExported()) continue
|
|
148
|
+
|
|
149
|
+
const jsdoc = extractJSDocInfo(typeAlias)
|
|
150
|
+
const typeParams = typeAlias.getTypeParameters().map(tp => tp.getText())
|
|
151
|
+
|
|
152
|
+
types.push({
|
|
153
|
+
name: typeAlias.getName(),
|
|
154
|
+
jsdoc,
|
|
155
|
+
definition: typeAlias.getText(),
|
|
156
|
+
kind: 'type',
|
|
157
|
+
isDefault: typeAlias.isDefaultExport(),
|
|
158
|
+
typeParameters: typeParams.length > 0 ? typeParams : undefined,
|
|
159
|
+
})
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
// Extract enums
|
|
163
|
+
for (const enumDecl of sourceFile.getEnums()) {
|
|
164
|
+
if (!enumDecl.isExported()) continue
|
|
165
|
+
|
|
166
|
+
const jsdoc = extractJSDocInfo(enumDecl)
|
|
167
|
+
|
|
168
|
+
types.push({
|
|
169
|
+
name: enumDecl.getName(),
|
|
170
|
+
jsdoc,
|
|
171
|
+
definition: enumDecl.getText(),
|
|
172
|
+
kind: 'enum',
|
|
173
|
+
isDefault: enumDecl.isDefaultExport(),
|
|
174
|
+
})
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
// Extract classes
|
|
178
|
+
for (const classDecl of sourceFile.getClasses()) {
|
|
179
|
+
if (!classDecl.isExported()) continue
|
|
180
|
+
|
|
181
|
+
const name = classDecl.getName()
|
|
182
|
+
if (name === undefined) continue
|
|
183
|
+
|
|
184
|
+
const jsdoc = extractJSDocInfo(classDecl)
|
|
185
|
+
const typeParams = classDecl.getTypeParameters().map(tp => tp.getText())
|
|
186
|
+
|
|
187
|
+
types.push({
|
|
188
|
+
name,
|
|
189
|
+
jsdoc,
|
|
190
|
+
definition: classDecl.getText(),
|
|
191
|
+
kind: 'class',
|
|
192
|
+
isDefault: classDecl.isDefaultExport(),
|
|
193
|
+
typeParameters: typeParams.length > 0 ? typeParams : undefined,
|
|
194
|
+
})
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
return types
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
/**
|
|
201
|
+
* Extracts re-export statements from a source file
|
|
202
|
+
*/
|
|
203
|
+
export function extractReExports(sourceFile: SourceFile): readonly ReExport[] {
|
|
204
|
+
const reExports: ReExport[] = []
|
|
205
|
+
|
|
206
|
+
for (const exportDecl of sourceFile.getExportDeclarations()) {
|
|
207
|
+
const moduleSpecifier = exportDecl.getModuleSpecifierValue()
|
|
208
|
+
if (moduleSpecifier === undefined || moduleSpecifier.length === 0) continue
|
|
209
|
+
|
|
210
|
+
if (exportDecl.isNamespaceExport()) {
|
|
211
|
+
const namespaceExport = exportDecl.getNamespaceExport()
|
|
212
|
+
reExports.push({
|
|
213
|
+
from: moduleSpecifier,
|
|
214
|
+
exports: '*',
|
|
215
|
+
alias: namespaceExport?.getName(),
|
|
216
|
+
})
|
|
217
|
+
} else {
|
|
218
|
+
const namedExports = exportDecl.getNamedExports().map(ne => {
|
|
219
|
+
const aliasNode = ne.getAliasNode()
|
|
220
|
+
const alias = aliasNode === undefined ? undefined : aliasNode.getText()
|
|
221
|
+
const name = ne.getName()
|
|
222
|
+
return alias === undefined ? name : `${name} as ${alias}`
|
|
223
|
+
})
|
|
224
|
+
|
|
225
|
+
if (namedExports.length > 0) {
|
|
226
|
+
reExports.push({
|
|
227
|
+
from: moduleSpecifier,
|
|
228
|
+
exports: namedExports,
|
|
229
|
+
})
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
return reExports
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
/**
|
|
238
|
+
* Extracts the complete API surface from a source file
|
|
239
|
+
*/
|
|
240
|
+
export function extractPackageAPI(sourceFile: SourceFile): PackageAPI {
|
|
241
|
+
return {
|
|
242
|
+
functions: extractExportedFunctions(sourceFile),
|
|
243
|
+
types: extractExportedTypes(sourceFile),
|
|
244
|
+
reExports: extractReExports(sourceFile),
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
/**
|
|
249
|
+
* Parses and analyzes a TypeScript file, returning the complete API
|
|
250
|
+
*/
|
|
251
|
+
export function analyzeTypeScriptFile(
|
|
252
|
+
filePath: string,
|
|
253
|
+
options?: TypeScriptParserOptions,
|
|
254
|
+
): ParseResult<PackageAPI> {
|
|
255
|
+
const project = createProject(options)
|
|
256
|
+
const sourceFileResult = parseSourceFile(project, filePath)
|
|
257
|
+
|
|
258
|
+
if (!sourceFileResult.success) {
|
|
259
|
+
return sourceFileResult
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
try {
|
|
263
|
+
const api = extractPackageAPI(sourceFileResult.data)
|
|
264
|
+
return ok(api)
|
|
265
|
+
} catch (error) {
|
|
266
|
+
return err({
|
|
267
|
+
code: 'INVALID_SYNTAX',
|
|
268
|
+
message: `Failed to analyze TypeScript file: ${filePath}`,
|
|
269
|
+
filePath,
|
|
270
|
+
cause: error,
|
|
271
|
+
} satisfies ParseError)
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
/**
|
|
276
|
+
* Analyzes TypeScript content from a string
|
|
277
|
+
*/
|
|
278
|
+
export function analyzeTypeScriptContent(
|
|
279
|
+
content: string,
|
|
280
|
+
options?: TypeScriptParserOptions,
|
|
281
|
+
): ParseResult<PackageAPI> {
|
|
282
|
+
const project = createProject(options)
|
|
283
|
+
const sourceFileResult = parseSourceContent(project, content)
|
|
284
|
+
|
|
285
|
+
if (!sourceFileResult.success) {
|
|
286
|
+
return sourceFileResult
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
try {
|
|
290
|
+
const api = extractPackageAPI(sourceFileResult.data)
|
|
291
|
+
return ok(api)
|
|
292
|
+
} catch (error) {
|
|
293
|
+
return err({
|
|
294
|
+
code: 'INVALID_SYNTAX',
|
|
295
|
+
message: 'Failed to analyze TypeScript content',
|
|
296
|
+
cause: error,
|
|
297
|
+
} satisfies ParseError)
|
|
298
|
+
}
|
|
299
|
+
}
|