@kubb/fabric-core 0.2.19 → 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{Fabric-CVe8cc8b.d.ts → Fabric-RmoYWGrr.d.cts} +4 -4
- package/dist/{Fabric-BezqNTQ9.d.cts → Fabric-cIhiQpgN.d.ts} +4 -4
- package/dist/defineProperty-DwFON4j7.cjs +367 -0
- package/dist/defineProperty-DwFON4j7.cjs.map +1 -0
- package/dist/defineProperty-fiNt9UhD.js +325 -0
- package/dist/defineProperty-fiNt9UhD.js.map +1 -0
- package/dist/{getRelativePath-C6lvNCs7.cjs → getRelativePath-eCdp2Z8M.cjs} +1 -2
- package/dist/{getRelativePath-C6lvNCs7.cjs.map → getRelativePath-eCdp2Z8M.cjs.map} +1 -1
- package/dist/index.cjs +20 -21
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +20 -19
- package/dist/index.js.map +1 -1
- package/dist/parsers/typescript.cjs +2 -2
- package/dist/parsers/typescript.d.cts +2 -2
- package/dist/parsers/typescript.d.ts +2 -2
- package/dist/parsers/typescript.js +1 -1
- package/dist/parsers.cjs +2 -2
- package/dist/parsers.d.cts +2 -2
- package/dist/parsers.d.ts +2 -2
- package/dist/parsers.js +1 -1
- package/dist/plugins.cjs +79 -46
- package/dist/plugins.cjs.map +1 -1
- package/dist/plugins.d.cts +1 -1
- package/dist/plugins.d.ts +1 -1
- package/dist/plugins.js +78 -43
- package/dist/plugins.js.map +1 -1
- package/dist/types.d.cts +1 -1
- package/dist/types.d.ts +1 -1
- package/dist/{typescriptParser-B5SxjtvV.d.ts → typescriptParser-BjqVuRHF.d.cts} +3 -14
- package/dist/{typescriptParser-CWT7zCJy.js → typescriptParser-CvJg4PQJ.js} +27 -45
- package/dist/typescriptParser-CvJg4PQJ.js.map +1 -0
- package/dist/{typescriptParser-PfAO0SSm.d.cts → typescriptParser-Cy9_9o6I.d.ts} +3 -14
- package/dist/{typescriptParser-CNHO6H2_.cjs → typescriptParser-D6-3Z7Lj.cjs} +28 -46
- package/dist/typescriptParser-D6-3Z7Lj.cjs.map +1 -0
- package/package.json +1 -1
- package/src/Fabric.ts +1 -1
- package/src/FileManager.ts +2 -2
- package/src/FileProcessor.ts +8 -15
- package/src/createFile.ts +110 -57
- package/src/defineFabric.ts +15 -3
- package/src/parsers/typescriptParser.ts +55 -73
- package/src/plugins/barrelPlugin.ts +63 -36
- package/src/utils/TreeNode.ts +54 -27
- package/dist/defineProperty-DZi5DvrW.cjs +0 -390
- package/dist/defineProperty-DZi5DvrW.cjs.map +0 -1
- package/dist/defineProperty-DcP1vZ2K.js +0 -346
- package/dist/defineProperty-DcP1vZ2K.js.map +0 -1
- package/dist/typescriptParser-CNHO6H2_.cjs.map +0 -1
- package/dist/typescriptParser-CWT7zCJy.js.map +0 -1
package/src/createFile.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { createHash } from 'node:crypto'
|
|
2
2
|
import path from 'node:path'
|
|
3
3
|
import { orderBy } from 'natural-orderby'
|
|
4
|
-
import {
|
|
4
|
+
import { uniqueBy } from 'remeda'
|
|
5
5
|
import type * as KubbFile from './KubbFile.ts'
|
|
6
6
|
import { trimExtName } from './utils/trimExtName.ts'
|
|
7
7
|
|
|
@@ -10,53 +10,78 @@ export function combineSources(sources: Array<KubbFile.Source>): Array<KubbFile.
|
|
|
10
10
|
}
|
|
11
11
|
|
|
12
12
|
export function combineExports(exports: Array<KubbFile.Export>): Array<KubbFile.Export> {
|
|
13
|
-
|
|
13
|
+
const sorted = orderBy(exports, [
|
|
14
14
|
(v) => !!Array.isArray(v.name),
|
|
15
15
|
(v) => !v.isTypeOnly,
|
|
16
16
|
(v) => v.path,
|
|
17
17
|
(v) => !!v.name,
|
|
18
18
|
(v) => (Array.isArray(v.name) ? orderBy(v.name) : v.name),
|
|
19
|
-
])
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
19
|
+
])
|
|
20
|
+
|
|
21
|
+
const prev: Array<KubbFile.Export> = []
|
|
22
|
+
// Map to track items by path for O(1) lookup
|
|
23
|
+
const pathMap = new Map<string, KubbFile.Export>()
|
|
24
|
+
// Map to track unique items by path+name+isTypeOnly+asAlias
|
|
25
|
+
const uniqueMap = new Map<string, KubbFile.Export>()
|
|
26
|
+
// Map to track items by path+name where isTypeOnly=true (for type-only check)
|
|
27
|
+
const pathNameTypeTrueMap = new Map<string, KubbFile.Export>()
|
|
28
|
+
|
|
29
|
+
for (const curr of sorted) {
|
|
30
|
+
const name = curr.name
|
|
31
|
+
const pathKey = curr.path
|
|
32
|
+
const prevByPath = pathMap.get(pathKey)
|
|
33
|
+
|
|
34
|
+
// Create unique key for path+name+isTypeOnly
|
|
35
|
+
const nameKey = Array.isArray(name) ? JSON.stringify(name) : name || ''
|
|
36
|
+
const pathNameTypeKey = `${pathKey}:${nameKey}:${curr.isTypeOnly}`
|
|
37
|
+
// Check if there's already an item with the same path+name but with isTypeOnly=true
|
|
38
|
+
const pathNameKey = `${pathKey}:${nameKey}`
|
|
39
|
+
const prevByPathAndIsTypeOnly = pathNameTypeTrueMap.get(pathNameKey)
|
|
40
|
+
|
|
41
|
+
if (prevByPathAndIsTypeOnly) {
|
|
42
|
+
// we already have an export that has the same path and name but uses `isTypeOnly` (export type ...)
|
|
43
|
+
continue
|
|
44
|
+
}
|
|
29
45
|
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
46
|
+
// Create unique key for path+name+isTypeOnly+asAlias
|
|
47
|
+
const uniqueKey = `${pathNameTypeKey}:${curr.asAlias || ''}`
|
|
48
|
+
const uniquePrev = uniqueMap.get(uniqueKey)
|
|
33
49
|
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
50
|
+
// we already have an item that was unique enough or name field is empty or prev asAlias is set but current has no changes
|
|
51
|
+
if (uniquePrev || (Array.isArray(name) && !name.length) || (prevByPath?.asAlias && !curr.asAlias)) {
|
|
52
|
+
continue
|
|
53
|
+
}
|
|
38
54
|
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
55
|
+
if (!prevByPath) {
|
|
56
|
+
const newItem = {
|
|
57
|
+
...curr,
|
|
58
|
+
name: Array.isArray(name) ? [...new Set(name)] : name,
|
|
59
|
+
}
|
|
60
|
+
prev.push(newItem)
|
|
61
|
+
pathMap.set(pathKey, newItem)
|
|
62
|
+
uniqueMap.set(uniqueKey, newItem)
|
|
63
|
+
// Track items with isTypeOnly=true for the type-only check
|
|
64
|
+
if (newItem.isTypeOnly) {
|
|
65
|
+
pathNameTypeTrueMap.set(pathNameKey, newItem)
|
|
47
66
|
}
|
|
67
|
+
continue
|
|
68
|
+
}
|
|
48
69
|
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
70
|
+
// merge all names when prev and current both have the same isTypeOnly set
|
|
71
|
+
if (prevByPath && Array.isArray(prevByPath.name) && Array.isArray(curr.name) && prevByPath.isTypeOnly === curr.isTypeOnly) {
|
|
72
|
+
prevByPath.name = [...new Set([...prevByPath.name, ...curr.name])]
|
|
73
|
+
continue
|
|
74
|
+
}
|
|
52
75
|
|
|
53
|
-
|
|
54
|
-
|
|
76
|
+
prev.push(curr)
|
|
77
|
+
uniqueMap.set(uniqueKey, curr)
|
|
78
|
+
// Track items with isTypeOnly=true for the type-only check
|
|
79
|
+
if (curr.isTypeOnly) {
|
|
80
|
+
pathNameTypeTrueMap.set(pathNameKey, curr)
|
|
81
|
+
}
|
|
82
|
+
}
|
|
55
83
|
|
|
56
|
-
|
|
57
|
-
},
|
|
58
|
-
[] as Array<KubbFile.Export>,
|
|
59
|
-
)
|
|
84
|
+
return prev
|
|
60
85
|
}
|
|
61
86
|
|
|
62
87
|
export function combineImports(imports: Array<KubbFile.Import>, exports: Array<KubbFile.Export>, source?: string): Array<KubbFile.Import> {
|
|
@@ -96,18 +121,28 @@ export function combineImports(imports: Array<KubbFile.Import>, exports: Array<K
|
|
|
96
121
|
return isUsed
|
|
97
122
|
}
|
|
98
123
|
|
|
99
|
-
|
|
124
|
+
const sorted = orderBy(imports, [
|
|
100
125
|
(v) => !!Array.isArray(v.name),
|
|
101
126
|
(v) => !v.isTypeOnly,
|
|
102
127
|
(v) => v.path,
|
|
103
128
|
(v) => !!v.name,
|
|
104
129
|
(v) => (Array.isArray(v.name) ? orderBy(v.name) : v.name),
|
|
105
|
-
])
|
|
130
|
+
])
|
|
131
|
+
|
|
132
|
+
const prev: Array<KubbFile.Import> = []
|
|
133
|
+
// Map to track items by path+isTypeOnly for O(1) lookup
|
|
134
|
+
const pathTypeMap = new Map<string, KubbFile.Import>()
|
|
135
|
+
// Map to track unique items by path+name+isTypeOnly
|
|
136
|
+
const uniqueMap = new Map<string, KubbFile.Import>()
|
|
137
|
+
// Map to track items by path+name where isTypeOnly=true (for type-only check)
|
|
138
|
+
const pathNameTypeTrueMap = new Map<string, KubbFile.Import>()
|
|
139
|
+
|
|
140
|
+
for (const curr of sorted) {
|
|
106
141
|
let name = Array.isArray(curr.name) ? [...new Set(curr.name)] : curr.name
|
|
107
142
|
|
|
108
143
|
if (curr.path === curr.root) {
|
|
109
144
|
// root and path are the same file, remove the "./" import
|
|
110
|
-
|
|
145
|
+
continue
|
|
111
146
|
}
|
|
112
147
|
|
|
113
148
|
// merge all names and check if the importName is being used in the generated source and if not filter those imports out
|
|
@@ -115,45 +150,63 @@ export function combineImports(imports: Array<KubbFile.Import>, exports: Array<K
|
|
|
115
150
|
name = name.filter((item) => (typeof item === 'string' ? hasImportInSource(item) : hasImportInSource(item.propertyName)))
|
|
116
151
|
}
|
|
117
152
|
|
|
118
|
-
const
|
|
119
|
-
const
|
|
120
|
-
|
|
153
|
+
const pathTypeKey = `${curr.path}:${curr.isTypeOnly}`
|
|
154
|
+
const prevByPath = pathTypeMap.get(pathTypeKey)
|
|
155
|
+
|
|
156
|
+
// Create key for name comparison
|
|
157
|
+
const nameKey = Array.isArray(name) ? JSON.stringify(name) : name || ''
|
|
158
|
+
const pathNameTypeKey = `${curr.path}:${nameKey}:${curr.isTypeOnly}`
|
|
159
|
+
const uniquePrev = uniqueMap.get(pathNameTypeKey)
|
|
160
|
+
// Check if there's already an item with the same path+name but with isTypeOnly=true
|
|
161
|
+
const pathNameKey = `${curr.path}:${nameKey}`
|
|
162
|
+
const prevByPathNameAndIsTypeOnly = pathNameTypeTrueMap.get(pathNameKey)
|
|
121
163
|
|
|
122
164
|
if (prevByPathNameAndIsTypeOnly) {
|
|
123
|
-
// we already have an
|
|
124
|
-
|
|
165
|
+
// we already have an import that has the same path and name but uses `isTypeOnly` (import type ...)
|
|
166
|
+
continue
|
|
125
167
|
}
|
|
126
168
|
|
|
127
169
|
// already unique enough or name is empty
|
|
128
170
|
if (uniquePrev || (Array.isArray(name) && !name.length)) {
|
|
129
|
-
|
|
171
|
+
continue
|
|
130
172
|
}
|
|
131
173
|
|
|
132
174
|
// new item, append name
|
|
133
175
|
if (!prevByPath) {
|
|
134
|
-
|
|
135
|
-
...
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
176
|
+
const newItem = {
|
|
177
|
+
...curr,
|
|
178
|
+
name,
|
|
179
|
+
}
|
|
180
|
+
prev.push(newItem)
|
|
181
|
+
pathTypeMap.set(pathTypeKey, newItem)
|
|
182
|
+
uniqueMap.set(pathNameTypeKey, newItem)
|
|
183
|
+
// Track items with isTypeOnly=true for the type-only check
|
|
184
|
+
if (newItem.isTypeOnly) {
|
|
185
|
+
pathNameTypeTrueMap.set(pathNameKey, newItem)
|
|
186
|
+
}
|
|
187
|
+
continue
|
|
141
188
|
}
|
|
142
189
|
|
|
143
190
|
// merge all names when prev and current both have the same isTypeOnly set
|
|
144
191
|
if (prevByPath && Array.isArray(prevByPath.name) && Array.isArray(name) && prevByPath.isTypeOnly === curr.isTypeOnly) {
|
|
145
192
|
prevByPath.name = [...new Set([...prevByPath.name, ...name])]
|
|
146
|
-
|
|
147
|
-
return prev
|
|
193
|
+
continue
|
|
148
194
|
}
|
|
149
195
|
|
|
150
196
|
// no import was found in the source, ignore import
|
|
151
197
|
if (!Array.isArray(name) && name && !hasImportInSource(name)) {
|
|
152
|
-
|
|
198
|
+
continue
|
|
153
199
|
}
|
|
154
200
|
|
|
155
|
-
|
|
156
|
-
|
|
201
|
+
prev.push(curr)
|
|
202
|
+
uniqueMap.set(pathNameTypeKey, curr)
|
|
203
|
+
// Track items with isTypeOnly=true for the type-only check
|
|
204
|
+
if (curr.isTypeOnly) {
|
|
205
|
+
pathNameTypeTrueMap.set(pathNameKey, curr)
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
return prev
|
|
157
210
|
}
|
|
158
211
|
|
|
159
212
|
/**
|
package/src/defineFabric.ts
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { isFunction } from 'remeda'
|
|
2
2
|
import type { Fabric, FabricConfig, FabricContext, FabricEvents, FabricOptions } from './Fabric.ts'
|
|
3
3
|
import { FileManager } from './FileManager.ts'
|
|
4
|
+
import type * as KubbFile from './KubbFile.ts'
|
|
4
5
|
import type { Parser } from './parsers/types.ts'
|
|
5
6
|
import type { Plugin } from './plugins/types.ts'
|
|
6
7
|
import { AsyncEventEmitter } from './utils/AsyncEventEmitter.ts'
|
|
@@ -30,7 +31,8 @@ export function defineFabric<T extends FabricOptions>(init?: FabricInitializer<T
|
|
|
30
31
|
function create(config: FabricConfig<T> = { mode: 'sequential' } as FabricConfig<T>): Fabric<T> {
|
|
31
32
|
const events = new AsyncEventEmitter<FabricEvents>()
|
|
32
33
|
const installedPlugins = new Set<Plugin<any>>()
|
|
33
|
-
const installedParsers = new
|
|
34
|
+
const installedParsers = new Map<KubbFile.Extname, Parser<any>>()
|
|
35
|
+
const installedParserNames = new Set<string>()
|
|
34
36
|
const fileManager = new FileManager({ events })
|
|
35
37
|
|
|
36
38
|
const context: FabricContext<T> = {
|
|
@@ -76,10 +78,20 @@ export function defineFabric<T extends FabricOptions>(init?: FabricInitializer<T
|
|
|
76
78
|
}
|
|
77
79
|
|
|
78
80
|
if (pluginOrParser.type === 'parser') {
|
|
79
|
-
if (
|
|
81
|
+
if (installedParserNames.has(pluginOrParser.name)) {
|
|
80
82
|
console.warn(`Parser "${pluginOrParser.name}" already applied.`)
|
|
81
83
|
} else {
|
|
82
|
-
|
|
84
|
+
installedParserNames.add(pluginOrParser.name)
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
if (pluginOrParser.extNames) {
|
|
88
|
+
for (const extName of pluginOrParser.extNames) {
|
|
89
|
+
const existing = installedParsers.get(extName)
|
|
90
|
+
if (existing && existing.name !== pluginOrParser.name) {
|
|
91
|
+
console.warn(`Parser "${pluginOrParser.name}" is overriding parser "${existing.name}" for extension "${extName}".`)
|
|
92
|
+
}
|
|
93
|
+
installedParsers.set(extName, pluginOrParser)
|
|
94
|
+
}
|
|
83
95
|
}
|
|
84
96
|
}
|
|
85
97
|
|
|
@@ -6,29 +6,11 @@ import { createParser } from './createParser.ts'
|
|
|
6
6
|
|
|
7
7
|
const { factory } = ts
|
|
8
8
|
|
|
9
|
-
type PrintOptions = {
|
|
10
|
-
source?: string
|
|
11
|
-
baseName?: string
|
|
12
|
-
scriptKind?: ts.ScriptKind
|
|
13
|
-
}
|
|
14
|
-
|
|
15
|
-
/**
|
|
16
|
-
* Escaped new lines in code with block comments so they can be restored by {@link restoreNewLines}
|
|
17
|
-
*/
|
|
18
|
-
const escapeNewLines = (code: string) => code.replace(/\n\n/g, '\n/* :newline: */')
|
|
19
|
-
|
|
20
|
-
/**
|
|
21
|
-
* Reverses {@link escapeNewLines} and restores new lines
|
|
22
|
-
*/
|
|
23
|
-
const restoreNewLines = (code: string) => code.replace(/\/\* :newline: \*\//g, '\n')
|
|
24
|
-
|
|
25
9
|
/**
|
|
26
10
|
* Convert AST TypeScript/TSX nodes to a string based on the TypeScript printer.
|
|
27
|
-
* Ensures consistent output across environments.
|
|
28
|
-
* Also works as a formatter when `source` is provided without `elements`.
|
|
29
11
|
*/
|
|
30
|
-
export function print(elements: Array<ts.Node>
|
|
31
|
-
const sourceFile = ts.createSourceFile(
|
|
12
|
+
export function print(...elements: Array<ts.Node>): string {
|
|
13
|
+
const sourceFile = ts.createSourceFile('print.tsx', '', ts.ScriptTarget.ES2022, true, ts.ScriptKind.TSX)
|
|
32
14
|
|
|
33
15
|
const printer = ts.createPrinter({
|
|
34
16
|
omitTrailingSemicolon: true,
|
|
@@ -37,18 +19,15 @@ export function print(elements: Array<ts.Node> = [], { source = '', baseName = '
|
|
|
37
19
|
noEmitHelpers: true,
|
|
38
20
|
})
|
|
39
21
|
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
const nodes = elements.filter(Boolean).sort((a, b) => (a.pos ?? 0) - (b.pos ?? 0))
|
|
45
|
-
output = printer.printList(ts.ListFormat.MultiLine, factory.createNodeArray(nodes), sourceFile)
|
|
46
|
-
} else {
|
|
47
|
-
// Format the whole file
|
|
48
|
-
output = printer.printFile(sourceFile)
|
|
22
|
+
for (const node of elements) {
|
|
23
|
+
if (node.kind === ts.SyntaxKind.Unknown) {
|
|
24
|
+
console.error('⚠️ Unknown node found:', node)
|
|
25
|
+
}
|
|
49
26
|
}
|
|
50
27
|
|
|
51
|
-
|
|
28
|
+
const output = printer.printList(ts.ListFormat.MultiLine, factory.createNodeArray(elements.filter(Boolean)), sourceFile)
|
|
29
|
+
|
|
30
|
+
return output.replace(/\r\n/g, '\n')
|
|
52
31
|
}
|
|
53
32
|
|
|
54
33
|
export function createImport({
|
|
@@ -66,43 +45,38 @@ export function createImport({
|
|
|
66
45
|
}) {
|
|
67
46
|
const resolvePath = root ? getRelativePath(root, path) : path
|
|
68
47
|
|
|
48
|
+
// Namespace or default import
|
|
69
49
|
if (!Array.isArray(name)) {
|
|
70
|
-
let importPropertyName: ts.Identifier | undefined = factory.createIdentifier(name)
|
|
71
|
-
let importName: ts.NamedImportBindings | undefined
|
|
72
|
-
|
|
73
50
|
if (isNameSpace) {
|
|
74
|
-
|
|
75
|
-
|
|
51
|
+
return factory.createImportDeclaration(
|
|
52
|
+
undefined,
|
|
53
|
+
factory.createImportClause(isTypeOnly, undefined, factory.createNamespaceImport(factory.createIdentifier(name))),
|
|
54
|
+
factory.createStringLiteral(resolvePath),
|
|
55
|
+
undefined,
|
|
56
|
+
)
|
|
76
57
|
}
|
|
77
58
|
|
|
78
59
|
return factory.createImportDeclaration(
|
|
79
60
|
undefined,
|
|
80
|
-
factory.createImportClause(isTypeOnly,
|
|
61
|
+
factory.createImportClause(isTypeOnly, factory.createIdentifier(name), undefined),
|
|
81
62
|
factory.createStringLiteral(resolvePath),
|
|
82
63
|
undefined,
|
|
83
64
|
)
|
|
84
65
|
}
|
|
85
66
|
|
|
67
|
+
// Named imports
|
|
68
|
+
const specifiers = name.map((item) => {
|
|
69
|
+
if (typeof item === 'object') {
|
|
70
|
+
const { propertyName, name: alias } = item
|
|
71
|
+
return factory.createImportSpecifier(false, alias ? factory.createIdentifier(propertyName) : undefined, factory.createIdentifier(alias ?? propertyName))
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
return factory.createImportSpecifier(false, undefined, factory.createIdentifier(item))
|
|
75
|
+
})
|
|
76
|
+
|
|
86
77
|
return factory.createImportDeclaration(
|
|
87
78
|
undefined,
|
|
88
|
-
factory.createImportClause(
|
|
89
|
-
isTypeOnly,
|
|
90
|
-
undefined,
|
|
91
|
-
factory.createNamedImports(
|
|
92
|
-
name.map((item) => {
|
|
93
|
-
if (typeof item === 'object') {
|
|
94
|
-
const obj = item as { propertyName: string; name?: string }
|
|
95
|
-
if (obj.name) {
|
|
96
|
-
return factory.createImportSpecifier(false, factory.createIdentifier(obj.propertyName), factory.createIdentifier(obj.name))
|
|
97
|
-
}
|
|
98
|
-
|
|
99
|
-
return factory.createImportSpecifier(false, undefined, factory.createIdentifier(obj.propertyName))
|
|
100
|
-
}
|
|
101
|
-
|
|
102
|
-
return factory.createImportSpecifier(false, undefined, factory.createIdentifier(item))
|
|
103
|
-
}),
|
|
104
|
-
),
|
|
105
|
-
),
|
|
79
|
+
factory.createImportClause(isTypeOnly, undefined, factory.createNamedImports(specifiers)),
|
|
106
80
|
factory.createStringLiteral(resolvePath),
|
|
107
81
|
undefined,
|
|
108
82
|
)
|
|
@@ -153,36 +127,44 @@ export const typescriptParser = createParser({
|
|
|
153
127
|
extNames: ['.ts', '.js'],
|
|
154
128
|
install() {},
|
|
155
129
|
async parse(file, options = { extname: '.ts' }) {
|
|
156
|
-
const
|
|
130
|
+
const sourceParts: Array<string> = []
|
|
131
|
+
for (const item of file.sources) {
|
|
132
|
+
if (item.value) {
|
|
133
|
+
sourceParts.push(item.value)
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
const source = sourceParts.join('\n\n')
|
|
157
137
|
|
|
158
|
-
const importNodes =
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
138
|
+
const importNodes: Array<ts.ImportDeclaration> = []
|
|
139
|
+
for (const item of file.imports) {
|
|
140
|
+
const importPath = item.root ? getRelativePath(item.root, item.path) : item.path
|
|
141
|
+
const hasExtname = !!path.extname(importPath)
|
|
162
142
|
|
|
163
|
-
|
|
143
|
+
importNodes.push(
|
|
144
|
+
createImport({
|
|
164
145
|
name: item.name,
|
|
165
146
|
path: options.extname && hasExtname ? `${trimExtName(importPath)}${options.extname}` : item.root ? trimExtName(importPath) : importPath,
|
|
166
147
|
isTypeOnly: item.isTypeOnly,
|
|
167
|
-
})
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
const exportNodes = file.exports
|
|
172
|
-
.map((item) => {
|
|
173
|
-
const exportPath = item.path
|
|
148
|
+
}),
|
|
149
|
+
)
|
|
150
|
+
}
|
|
174
151
|
|
|
175
|
-
|
|
152
|
+
const exportNodes: Array<ts.ExportDeclaration> = []
|
|
153
|
+
for (const item of file.exports) {
|
|
154
|
+
const exportPath = item.path
|
|
155
|
+
const hasExtname = !!path.extname(exportPath)
|
|
176
156
|
|
|
177
|
-
|
|
157
|
+
exportNodes.push(
|
|
158
|
+
createExport({
|
|
178
159
|
name: item.name,
|
|
179
160
|
path: options.extname && hasExtname ? `${trimExtName(item.path)}${options.extname}` : trimExtName(item.path),
|
|
180
161
|
isTypeOnly: item.isTypeOnly,
|
|
181
162
|
asAlias: item.asAlias,
|
|
182
|
-
})
|
|
183
|
-
|
|
184
|
-
|
|
163
|
+
}),
|
|
164
|
+
)
|
|
165
|
+
}
|
|
185
166
|
|
|
186
|
-
|
|
167
|
+
const parts = [file.banner, print(...importNodes, ...exportNodes), source, file.footer].filter((segment): segment is string => segment != null)
|
|
168
|
+
return parts.join('\n')
|
|
187
169
|
},
|
|
188
170
|
})
|
|
@@ -50,6 +50,20 @@ export function getBarrelFiles({ files, root, mode }: GetBarrelFilesOptions): Ar
|
|
|
50
50
|
return []
|
|
51
51
|
}
|
|
52
52
|
|
|
53
|
+
const indexableSourcesMap = new Map<KubbFile.File, Array<KubbFile.Source>>()
|
|
54
|
+
|
|
55
|
+
for (const file of files) {
|
|
56
|
+
const indexableSources: Array<KubbFile.Source> = []
|
|
57
|
+
for (const source of file.sources || []) {
|
|
58
|
+
if (source.isIndexable && source.name) {
|
|
59
|
+
indexableSources.push(source)
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
if (indexableSources.length > 0) {
|
|
63
|
+
indexableSourcesMap.set(file, indexableSources)
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
53
67
|
const cachedFiles = new Map<KubbFile.Path, KubbFile.File>()
|
|
54
68
|
const dedupe = new Map<KubbFile.Path, Set<string>>()
|
|
55
69
|
|
|
@@ -82,41 +96,40 @@ export function getBarrelFiles({ files, root, mode }: GetBarrelFilesOptions): Ar
|
|
|
82
96
|
|
|
83
97
|
const seen = dedupe.get(barrelPath)!
|
|
84
98
|
|
|
85
|
-
|
|
86
|
-
node.leaves.forEach((leaf) => {
|
|
99
|
+
for (const leaf of node.leaves) {
|
|
87
100
|
const file = leaf.data.file
|
|
88
|
-
if (!file) {
|
|
89
|
-
|
|
101
|
+
if (!file || !file.path) {
|
|
102
|
+
continue
|
|
90
103
|
}
|
|
91
104
|
|
|
92
|
-
const
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
}
|
|
105
|
+
const indexableSources = indexableSourcesMap.get(file)
|
|
106
|
+
if (!indexableSources) {
|
|
107
|
+
continue
|
|
108
|
+
}
|
|
97
109
|
|
|
110
|
+
for (const source of indexableSources) {
|
|
98
111
|
const key = `${source.name}|${source.isTypeOnly ? '1' : '0'}`
|
|
99
112
|
if (seen.has(key)) {
|
|
100
|
-
|
|
113
|
+
continue
|
|
101
114
|
}
|
|
102
115
|
seen.add(key)
|
|
103
116
|
|
|
104
117
|
// Always compute relative path from the parent directory to the file path
|
|
105
118
|
barrelFile!.exports!.push({
|
|
106
|
-
name: [source.name],
|
|
119
|
+
name: [source.name!],
|
|
107
120
|
path: getRelativePath(parentPath, file.path),
|
|
108
121
|
isTypeOnly: source.isTypeOnly,
|
|
109
122
|
})
|
|
110
123
|
|
|
111
124
|
barrelFile!.sources.push({
|
|
112
|
-
name: source.name
|
|
125
|
+
name: source.name!,
|
|
113
126
|
isTypeOnly: source.isTypeOnly,
|
|
114
127
|
value: '', // TODO use parser to generate import
|
|
115
128
|
isExportable: mode === 'all' || mode === 'named',
|
|
116
129
|
isIndexable: mode === 'all' || mode === 'named',
|
|
117
130
|
})
|
|
118
|
-
}
|
|
119
|
-
}
|
|
131
|
+
}
|
|
132
|
+
}
|
|
120
133
|
})
|
|
121
134
|
|
|
122
135
|
const result = [...cachedFiles.values()]
|
|
@@ -162,32 +175,46 @@ export const barrelPlugin = createPlugin<Options, ExtendOptions>({
|
|
|
162
175
|
|
|
163
176
|
const rootPath = path.resolve(root, 'index.ts')
|
|
164
177
|
|
|
165
|
-
const barrelFiles
|
|
166
|
-
|
|
167
|
-
|
|
178
|
+
const barrelFiles: Array<KubbFile.ResolvedFile> = []
|
|
179
|
+
for (const file of ctx.files) {
|
|
180
|
+
for (const source of file.sources) {
|
|
181
|
+
if (source.isIndexable) {
|
|
182
|
+
barrelFiles.push(file)
|
|
183
|
+
|
|
184
|
+
break
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
const fileTypeCache = new Map<KubbFile.ResolvedFile, boolean>()
|
|
190
|
+
for (const file of barrelFiles) {
|
|
191
|
+
fileTypeCache.set(
|
|
192
|
+
file,
|
|
193
|
+
file.sources.every((source) => source.isTypeOnly),
|
|
194
|
+
)
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
const exports: Array<KubbFile.Export> = []
|
|
198
|
+
for (const file of barrelFiles) {
|
|
199
|
+
const containsOnlyTypes = fileTypeCache.get(file) ?? false
|
|
200
|
+
|
|
201
|
+
for (const source of file.sources) {
|
|
202
|
+
if (!file.path || !source.isIndexable) {
|
|
203
|
+
continue
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
exports.push({
|
|
207
|
+
name: mode === 'all' ? undefined : [source.name],
|
|
208
|
+
path: getRelativePath(rootPath, file.path),
|
|
209
|
+
isTypeOnly: mode === 'all' ? containsOnlyTypes : source.isTypeOnly,
|
|
210
|
+
} as KubbFile.Export)
|
|
211
|
+
}
|
|
212
|
+
}
|
|
168
213
|
|
|
169
214
|
const entryFile = createFile({
|
|
170
215
|
path: rootPath,
|
|
171
216
|
baseName: 'index.ts',
|
|
172
|
-
exports
|
|
173
|
-
.flatMap((file) => {
|
|
174
|
-
const containsOnlyTypes = file.sources.every((source) => source.isTypeOnly)
|
|
175
|
-
|
|
176
|
-
return file.sources
|
|
177
|
-
?.map((source) => {
|
|
178
|
-
if (!file.path || !source.isIndexable) {
|
|
179
|
-
return undefined
|
|
180
|
-
}
|
|
181
|
-
|
|
182
|
-
return {
|
|
183
|
-
name: mode === 'all' ? undefined : [source.name],
|
|
184
|
-
path: getRelativePath(rootPath, file.path),
|
|
185
|
-
isTypeOnly: mode === 'all' ? containsOnlyTypes : source.isTypeOnly,
|
|
186
|
-
} as KubbFile.Export
|
|
187
|
-
})
|
|
188
|
-
.filter(Boolean)
|
|
189
|
-
})
|
|
190
|
-
.filter(Boolean),
|
|
217
|
+
exports,
|
|
191
218
|
sources: [],
|
|
192
219
|
})
|
|
193
220
|
|