@kubb/fabric-core 0.2.18 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{Fabric-CVe8cc8b.d.ts → Fabric-RmoYWGrr.d.cts} +4 -4
- package/dist/{Fabric-BezqNTQ9.d.cts → Fabric-cIhiQpgN.d.ts} +4 -4
- package/dist/defineProperty-DwFON4j7.cjs +367 -0
- package/dist/defineProperty-DwFON4j7.cjs.map +1 -0
- package/dist/defineProperty-fiNt9UhD.js +325 -0
- package/dist/defineProperty-fiNt9UhD.js.map +1 -0
- package/dist/{getRelativePath-C6lvNCs7.cjs → getRelativePath-eCdp2Z8M.cjs} +1 -2
- package/dist/{getRelativePath-C6lvNCs7.cjs.map → getRelativePath-eCdp2Z8M.cjs.map} +1 -1
- package/dist/index.cjs +26 -27
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +26 -25
- package/dist/index.js.map +1 -1
- package/dist/parsers/typescript.cjs +2 -2
- package/dist/parsers/typescript.d.cts +2 -2
- package/dist/parsers/typescript.d.ts +2 -2
- package/dist/parsers/typescript.js +1 -1
- package/dist/parsers.cjs +2 -2
- package/dist/parsers.d.cts +2 -2
- package/dist/parsers.d.ts +2 -2
- package/dist/parsers.js +1 -1
- package/dist/plugins.cjs +79 -46
- package/dist/plugins.cjs.map +1 -1
- package/dist/plugins.d.cts +1 -1
- package/dist/plugins.d.ts +1 -1
- package/dist/plugins.js +78 -43
- package/dist/plugins.js.map +1 -1
- package/dist/types.d.cts +1 -1
- package/dist/types.d.ts +1 -1
- package/dist/{typescriptParser-CWT7zCJy.js → typescriptParser-BFhqWjdo.js} +20 -35
- package/dist/typescriptParser-BFhqWjdo.js.map +1 -0
- package/dist/{typescriptParser-B5SxjtvV.d.ts → typescriptParser-BjqVuRHF.d.cts} +3 -14
- package/dist/{typescriptParser-PfAO0SSm.d.cts → typescriptParser-Cy9_9o6I.d.ts} +3 -14
- package/dist/{typescriptParser-CNHO6H2_.cjs → typescriptParser-DJxEGCz3.cjs} +21 -36
- package/dist/typescriptParser-DJxEGCz3.cjs.map +1 -0
- package/package.json +1 -1
- package/src/Fabric.ts +1 -1
- package/src/FileManager.ts +8 -8
- package/src/FileProcessor.ts +8 -15
- package/src/createFile.ts +110 -57
- package/src/defineFabric.ts +15 -3
- package/src/parsers/typescriptParser.ts +33 -50
- package/src/plugins/barrelPlugin.ts +63 -36
- package/src/utils/TreeNode.ts +54 -27
- package/dist/defineProperty-DZi5DvrW.cjs +0 -390
- package/dist/defineProperty-DZi5DvrW.cjs.map +0 -1
- package/dist/defineProperty-DcP1vZ2K.js +0 -346
- package/dist/defineProperty-DcP1vZ2K.js.map +0 -1
- package/dist/typescriptParser-CNHO6H2_.cjs.map +0 -1
- package/dist/typescriptParser-CWT7zCJy.js.map +0 -1
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"typescriptParser-BFhqWjdo.js","names":["path","importPropertyName: ts.Identifier | undefined","importName: ts.NamedImportBindings | undefined","sourceParts: Array<string>","importNodes: Array<ts.ImportDeclaration>","exportNodes: Array<ts.ExportDeclaration>"],"sources":["../src/parsers/typescriptParser.ts"],"sourcesContent":["import path from 'node:path'\nimport ts from 'typescript'\nimport { getRelativePath } from '../utils/getRelativePath.ts'\nimport { trimExtName } from '../utils/trimExtName.ts'\nimport { createParser } from './createParser.ts'\n\nconst { factory } = ts\n\n/**\n * Convert AST TypeScript/TSX nodes to a string based on the TypeScript printer.\n */\nexport function print(...elements: Array<ts.Node>): string {\n const sourceFile = ts.createSourceFile('print.tsx', '', ts.ScriptTarget.ES2022, true, ts.ScriptKind.TSX)\n\n const printer = ts.createPrinter({\n omitTrailingSemicolon: true,\n newLine: ts.NewLineKind.LineFeed,\n removeComments: false,\n noEmitHelpers: true,\n })\n\n const output = printer.printList(ts.ListFormat.MultiLine, factory.createNodeArray(elements), sourceFile)\n\n return output.replace(/\\r\\n/g, '\\n')\n}\n\nexport function createImport({\n name,\n path,\n root,\n isTypeOnly = false,\n isNameSpace = false,\n}: {\n name: string | Array<string | { propertyName: string; name?: string }>\n path: string\n root?: string\n isTypeOnly?: boolean\n isNameSpace?: boolean\n}) {\n const resolvePath = root ? getRelativePath(root, path) : path\n\n if (!Array.isArray(name)) {\n let importPropertyName: ts.Identifier | undefined = factory.createIdentifier(name)\n let importName: ts.NamedImportBindings | undefined\n\n if (isNameSpace) {\n importPropertyName = undefined\n importName = factory.createNamespaceImport(factory.createIdentifier(name))\n }\n\n return factory.createImportDeclaration(\n undefined,\n factory.createImportClause(isTypeOnly, importPropertyName, importName),\n factory.createStringLiteral(resolvePath),\n undefined,\n )\n }\n\n return factory.createImportDeclaration(\n undefined,\n factory.createImportClause(\n isTypeOnly,\n undefined,\n factory.createNamedImports(\n name.map((item) => {\n if (typeof item === 'object') {\n const obj = item as { propertyName: string; name?: string }\n if (obj.name) {\n return factory.createImportSpecifier(false, factory.createIdentifier(obj.propertyName), factory.createIdentifier(obj.name))\n }\n\n return factory.createImportSpecifier(false, undefined, factory.createIdentifier(obj.propertyName))\n }\n\n return factory.createImportSpecifier(false, undefined, factory.createIdentifier(item))\n }),\n ),\n ),\n factory.createStringLiteral(resolvePath),\n undefined,\n )\n}\n\nexport function createExport({\n path,\n asAlias,\n isTypeOnly = false,\n name,\n}: {\n path: string\n asAlias?: boolean\n isTypeOnly?: boolean\n name?: string | Array<ts.Identifier | string>\n}) {\n if (name && !Array.isArray(name) && !asAlias) {\n console.warn(`When using name as string, asAlias should be true ${name}`)\n }\n\n if (!Array.isArray(name)) {\n const parsedName = name?.match(/^\\d/) ? `_${name?.slice(1)}` : name\n\n return factory.createExportDeclaration(\n undefined,\n isTypeOnly,\n asAlias && parsedName ? factory.createNamespaceExport(factory.createIdentifier(parsedName)) : undefined,\n factory.createStringLiteral(path),\n undefined,\n )\n }\n\n return factory.createExportDeclaration(\n undefined,\n isTypeOnly,\n factory.createNamedExports(\n name.map((propertyName) => {\n return factory.createExportSpecifier(false, undefined, typeof propertyName === 'string' ? factory.createIdentifier(propertyName) : propertyName)\n }),\n ),\n factory.createStringLiteral(path),\n undefined,\n )\n}\n\nexport const typescriptParser = createParser({\n name: 'typescript',\n extNames: ['.ts', '.js'],\n install() {},\n async parse(file, options = { extname: '.ts' }) {\n const sourceParts: Array<string> = []\n for (const item of file.sources) {\n if (item.value) {\n sourceParts.push(item.value)\n }\n }\n const source = sourceParts.join('\\n\\n')\n\n const importNodes: Array<ts.ImportDeclaration> = []\n for (const item of file.imports) {\n const importPath = item.root ? getRelativePath(item.root, item.path) : item.path\n const hasExtname = !!path.extname(importPath)\n\n importNodes.push(\n createImport({\n name: item.name,\n path: options.extname && hasExtname ? `${trimExtName(importPath)}${options.extname}` : item.root ? trimExtName(importPath) : importPath,\n isTypeOnly: item.isTypeOnly,\n }),\n )\n }\n\n const exportNodes: Array<ts.ExportDeclaration> = []\n for (const item of file.exports) {\n const exportPath = item.path\n const hasExtname = !!path.extname(exportPath)\n\n exportNodes.push(\n createExport({\n name: item.name,\n path: options.extname && hasExtname ? `${trimExtName(item.path)}${options.extname}` : trimExtName(item.path),\n isTypeOnly: item.isTypeOnly,\n asAlias: item.asAlias,\n }),\n )\n }\n\n const parts = [file.banner, print(...importNodes, ...exportNodes), source, file.footer].filter(\n (segment): segment is string => segment != null,\n )\n return parts.join('\\n')\n },\n})\n"],"mappings":";;;;;;;AAMA,MAAM,EAAE,YAAY;;;;AAKpB,SAAgB,MAAM,GAAG,UAAkC;CACzD,MAAM,aAAa,GAAG,iBAAiB,aAAa,IAAI,GAAG,aAAa,QAAQ,MAAM,GAAG,WAAW,IAAI;AAWxG,QATgB,GAAG,cAAc;EAC/B,uBAAuB;EACvB,SAAS,GAAG,YAAY;EACxB,gBAAgB;EAChB,eAAe;EAChB,CAAC,CAEqB,UAAU,GAAG,WAAW,WAAW,QAAQ,gBAAgB,SAAS,EAAE,WAAW,CAE1F,QAAQ,SAAS,KAAK;;AAGtC,SAAgB,aAAa,EAC3B,MACA,cACA,MACA,aAAa,OACb,cAAc,SAOb;CACD,MAAM,cAAc,OAAO,gBAAgB,MAAMA,OAAK,GAAGA;AAEzD,KAAI,CAAC,MAAM,QAAQ,KAAK,EAAE;EACxB,IAAIC,qBAAgD,QAAQ,iBAAiB,KAAK;EAClF,IAAIC;AAEJ,MAAI,aAAa;AACf,wBAAqB;AACrB,gBAAa,QAAQ,sBAAsB,QAAQ,iBAAiB,KAAK,CAAC;;AAG5E,SAAO,QAAQ,wBACb,QACA,QAAQ,mBAAmB,YAAY,oBAAoB,WAAW,EACtE,QAAQ,oBAAoB,YAAY,EACxC,OACD;;AAGH,QAAO,QAAQ,wBACb,QACA,QAAQ,mBACN,YACA,QACA,QAAQ,mBACN,KAAK,KAAK,SAAS;AACjB,MAAI,OAAO,SAAS,UAAU;GAC5B,MAAM,MAAM;AACZ,OAAI,IAAI,KACN,QAAO,QAAQ,sBAAsB,OAAO,QAAQ,iBAAiB,IAAI,aAAa,EAAE,QAAQ,iBAAiB,IAAI,KAAK,CAAC;AAG7H,UAAO,QAAQ,sBAAsB,OAAO,QAAW,QAAQ,iBAAiB,IAAI,aAAa,CAAC;;AAGpG,SAAO,QAAQ,sBAAsB,OAAO,QAAW,QAAQ,iBAAiB,KAAK,CAAC;GACtF,CACH,CACF,EACD,QAAQ,oBAAoB,YAAY,EACxC,OACD;;AAGH,SAAgB,aAAa,EAC3B,cACA,SACA,aAAa,OACb,QAMC;AACD,KAAI,QAAQ,CAAC,MAAM,QAAQ,KAAK,IAAI,CAAC,QACnC,SAAQ,KAAK,qDAAqD,OAAO;AAG3E,KAAI,CAAC,MAAM,QAAQ,KAAK,EAAE;EACxB,MAAM,0DAAa,KAAM,MAAM,MAAM,IAAG,gDAAI,KAAM,MAAM,EAAE,KAAK;AAE/D,SAAO,QAAQ,wBACb,QACA,YACA,WAAW,aAAa,QAAQ,sBAAsB,QAAQ,iBAAiB,WAAW,CAAC,GAAG,QAC9F,QAAQ,oBAAoBF,OAAK,EACjC,OACD;;AAGH,QAAO,QAAQ,wBACb,QACA,YACA,QAAQ,mBACN,KAAK,KAAK,iBAAiB;AACzB,SAAO,QAAQ,sBAAsB,OAAO,QAAW,OAAO,iBAAiB,WAAW,QAAQ,iBAAiB,aAAa,GAAG,aAAa;GAChJ,CACH,EACD,QAAQ,oBAAoBA,OAAK,EACjC,OACD;;AAGH,MAAa,mBAAmB,aAAa;CAC3C,MAAM;CACN,UAAU,CAAC,OAAO,MAAM;CACxB,UAAU;CACV,MAAM,MAAM,MAAM,UAAU,EAAE,SAAS,OAAO,EAAE;EAC9C,MAAMG,cAA6B,EAAE;AACrC,OAAK,MAAM,QAAQ,KAAK,QACtB,KAAI,KAAK,MACP,aAAY,KAAK,KAAK,MAAM;EAGhC,MAAM,SAAS,YAAY,KAAK,OAAO;EAEvC,MAAMC,cAA2C,EAAE;AACnD,OAAK,MAAM,QAAQ,KAAK,SAAS;GAC/B,MAAM,aAAa,KAAK,OAAO,gBAAgB,KAAK,MAAM,KAAK,KAAK,GAAG,KAAK;GAC5E,MAAM,aAAa,CAAC,CAAC,KAAK,QAAQ,WAAW;AAE7C,eAAY,KACV,aAAa;IACX,MAAM,KAAK;IACX,MAAM,QAAQ,WAAW,aAAa,GAAG,YAAY,WAAW,GAAG,QAAQ,YAAY,KAAK,OAAO,YAAY,WAAW,GAAG;IAC7H,YAAY,KAAK;IAClB,CAAC,CACH;;EAGH,MAAMC,cAA2C,EAAE;AACnD,OAAK,MAAM,QAAQ,KAAK,SAAS;GAC/B,MAAM,aAAa,KAAK;GACxB,MAAM,aAAa,CAAC,CAAC,KAAK,QAAQ,WAAW;AAE7C,eAAY,KACV,aAAa;IACX,MAAM,KAAK;IACX,MAAM,QAAQ,WAAW,aAAa,GAAG,YAAY,KAAK,KAAK,GAAG,QAAQ,YAAY,YAAY,KAAK,KAAK;IAC5G,YAAY,KAAK;IACjB,SAAS,KAAK;IACf,CAAC,CACH;;AAMH,SAHc;GAAC,KAAK;GAAQ,MAAM,GAAG,aAAa,GAAG,YAAY;GAAE;GAAQ,KAAK;GAAO,CAAC,QACrF,YAA+B,WAAW,KAC5C,CACY,KAAK,KAAK;;CAE1B,CAAC"}
|
|
@@ -1,22 +1,11 @@
|
|
|
1
|
-
import { u as Parser } from "./Fabric-
|
|
1
|
+
import { u as Parser } from "./Fabric-RmoYWGrr.cjs";
|
|
2
2
|
import ts from "typescript";
|
|
3
3
|
|
|
4
4
|
//#region src/parsers/typescriptParser.d.ts
|
|
5
|
-
type PrintOptions = {
|
|
6
|
-
source?: string;
|
|
7
|
-
baseName?: string;
|
|
8
|
-
scriptKind?: ts.ScriptKind;
|
|
9
|
-
};
|
|
10
5
|
/**
|
|
11
6
|
* Convert AST TypeScript/TSX nodes to a string based on the TypeScript printer.
|
|
12
|
-
* Ensures consistent output across environments.
|
|
13
|
-
* Also works as a formatter when `source` is provided without `elements`.
|
|
14
7
|
*/
|
|
15
|
-
declare function print(elements
|
|
16
|
-
source,
|
|
17
|
-
baseName,
|
|
18
|
-
scriptKind
|
|
19
|
-
}?: PrintOptions): string;
|
|
8
|
+
declare function print(...elements: Array<ts.Node>): string;
|
|
20
9
|
declare function createImport({
|
|
21
10
|
name,
|
|
22
11
|
path,
|
|
@@ -47,4 +36,4 @@ declare function createExport({
|
|
|
47
36
|
declare const typescriptParser: Parser<[], any>;
|
|
48
37
|
//#endregion
|
|
49
38
|
export { typescriptParser as i, createImport as n, print as r, createExport as t };
|
|
50
|
-
//# sourceMappingURL=typescriptParser-
|
|
39
|
+
//# sourceMappingURL=typescriptParser-BjqVuRHF.d.cts.map
|
|
@@ -1,22 +1,11 @@
|
|
|
1
|
-
import { u as Parser } from "./Fabric-
|
|
1
|
+
import { u as Parser } from "./Fabric-cIhiQpgN.js";
|
|
2
2
|
import ts from "typescript";
|
|
3
3
|
|
|
4
4
|
//#region src/parsers/typescriptParser.d.ts
|
|
5
|
-
type PrintOptions = {
|
|
6
|
-
source?: string;
|
|
7
|
-
baseName?: string;
|
|
8
|
-
scriptKind?: ts.ScriptKind;
|
|
9
|
-
};
|
|
10
5
|
/**
|
|
11
6
|
* Convert AST TypeScript/TSX nodes to a string based on the TypeScript printer.
|
|
12
|
-
* Ensures consistent output across environments.
|
|
13
|
-
* Also works as a formatter when `source` is provided without `elements`.
|
|
14
7
|
*/
|
|
15
|
-
declare function print(elements
|
|
16
|
-
source,
|
|
17
|
-
baseName,
|
|
18
|
-
scriptKind
|
|
19
|
-
}?: PrintOptions): string;
|
|
8
|
+
declare function print(...elements: Array<ts.Node>): string;
|
|
20
9
|
declare function createImport({
|
|
21
10
|
name,
|
|
22
11
|
path,
|
|
@@ -47,4 +36,4 @@ declare function createExport({
|
|
|
47
36
|
declare const typescriptParser: Parser<[], any>;
|
|
48
37
|
//#endregion
|
|
49
38
|
export { typescriptParser as i, createImport as n, print as r, createExport as t };
|
|
50
|
-
//# sourceMappingURL=typescriptParser-
|
|
39
|
+
//# sourceMappingURL=typescriptParser-Cy9_9o6I.d.ts.map
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
const require_trimExtName = require('./trimExtName-Bb4zGVF1.cjs');
|
|
2
2
|
const require_createParser = require('./createParser-C4IkyTs5.cjs');
|
|
3
|
-
const require_getRelativePath = require('./getRelativePath-
|
|
3
|
+
const require_getRelativePath = require('./getRelativePath-eCdp2Z8M.cjs');
|
|
4
4
|
let node_path = require("node:path");
|
|
5
5
|
node_path = require_trimExtName.__toESM(node_path);
|
|
6
6
|
let typescript = require("typescript");
|
|
@@ -9,35 +9,16 @@ typescript = require_trimExtName.__toESM(typescript);
|
|
|
9
9
|
//#region src/parsers/typescriptParser.ts
|
|
10
10
|
const { factory } = typescript.default;
|
|
11
11
|
/**
|
|
12
|
-
* Escaped new lines in code with block comments so they can be restored by {@link restoreNewLines}
|
|
13
|
-
*/
|
|
14
|
-
const escapeNewLines = (code) => code.replace(/\n\n/g, "\n/* :newline: */");
|
|
15
|
-
/**
|
|
16
|
-
* Reverses {@link escapeNewLines} and restores new lines
|
|
17
|
-
*/
|
|
18
|
-
const restoreNewLines = (code) => code.replace(/\/\* :newline: \*\//g, "\n");
|
|
19
|
-
/**
|
|
20
12
|
* Convert AST TypeScript/TSX nodes to a string based on the TypeScript printer.
|
|
21
|
-
* Ensures consistent output across environments.
|
|
22
|
-
* Also works as a formatter when `source` is provided without `elements`.
|
|
23
13
|
*/
|
|
24
|
-
function print(elements
|
|
25
|
-
const sourceFile = typescript.default.createSourceFile(
|
|
26
|
-
|
|
14
|
+
function print(...elements) {
|
|
15
|
+
const sourceFile = typescript.default.createSourceFile("print.tsx", "", typescript.default.ScriptTarget.ES2022, true, typescript.default.ScriptKind.TSX);
|
|
16
|
+
return typescript.default.createPrinter({
|
|
27
17
|
omitTrailingSemicolon: true,
|
|
28
18
|
newLine: typescript.default.NewLineKind.LineFeed,
|
|
29
19
|
removeComments: false,
|
|
30
20
|
noEmitHelpers: true
|
|
31
|
-
});
|
|
32
|
-
let output;
|
|
33
|
-
if (elements.length > 0) {
|
|
34
|
-
const nodes = elements.filter(Boolean).sort((a, b) => {
|
|
35
|
-
var _a$pos, _b$pos;
|
|
36
|
-
return ((_a$pos = a.pos) !== null && _a$pos !== void 0 ? _a$pos : 0) - ((_b$pos = b.pos) !== null && _b$pos !== void 0 ? _b$pos : 0);
|
|
37
|
-
});
|
|
38
|
-
output = printer.printList(typescript.default.ListFormat.MultiLine, factory.createNodeArray(nodes), sourceFile);
|
|
39
|
-
} else output = printer.printFile(sourceFile);
|
|
40
|
-
return restoreNewLines(output).replace(/\r\n/g, "\n");
|
|
21
|
+
}).printList(typescript.default.ListFormat.MultiLine, factory.createNodeArray(elements), sourceFile).replace(/\r\n/g, "\n");
|
|
41
22
|
}
|
|
42
23
|
function createImport({ name, path: path$1, root, isTypeOnly = false, isNameSpace = false }) {
|
|
43
24
|
const resolvePath = root ? require_getRelativePath.getRelativePath(root, path$1) : path$1;
|
|
@@ -74,32 +55,36 @@ const typescriptParser = require_createParser.createParser({
|
|
|
74
55
|
extNames: [".ts", ".js"],
|
|
75
56
|
install() {},
|
|
76
57
|
async parse(file, options = { extname: ".ts" }) {
|
|
77
|
-
const
|
|
78
|
-
const
|
|
58
|
+
const sourceParts = [];
|
|
59
|
+
for (const item of file.sources) if (item.value) sourceParts.push(item.value);
|
|
60
|
+
const source = sourceParts.join("\n\n");
|
|
61
|
+
const importNodes = [];
|
|
62
|
+
for (const item of file.imports) {
|
|
79
63
|
const importPath = item.root ? require_getRelativePath.getRelativePath(item.root, item.path) : item.path;
|
|
80
64
|
const hasExtname = !!node_path.default.extname(importPath);
|
|
81
|
-
|
|
65
|
+
importNodes.push(createImport({
|
|
82
66
|
name: item.name,
|
|
83
67
|
path: options.extname && hasExtname ? `${require_trimExtName.trimExtName(importPath)}${options.extname}` : item.root ? require_trimExtName.trimExtName(importPath) : importPath,
|
|
84
68
|
isTypeOnly: item.isTypeOnly
|
|
85
|
-
});
|
|
86
|
-
}
|
|
87
|
-
const exportNodes =
|
|
69
|
+
}));
|
|
70
|
+
}
|
|
71
|
+
const exportNodes = [];
|
|
72
|
+
for (const item of file.exports) {
|
|
88
73
|
const exportPath = item.path;
|
|
89
74
|
const hasExtname = !!node_path.default.extname(exportPath);
|
|
90
|
-
|
|
75
|
+
exportNodes.push(createExport({
|
|
91
76
|
name: item.name,
|
|
92
77
|
path: options.extname && hasExtname ? `${require_trimExtName.trimExtName(item.path)}${options.extname}` : require_trimExtName.trimExtName(item.path),
|
|
93
78
|
isTypeOnly: item.isTypeOnly,
|
|
94
79
|
asAlias: item.asAlias
|
|
95
|
-
});
|
|
96
|
-
}
|
|
80
|
+
}));
|
|
81
|
+
}
|
|
97
82
|
return [
|
|
98
83
|
file.banner,
|
|
99
|
-
print(
|
|
84
|
+
print(...importNodes, ...exportNodes),
|
|
100
85
|
source,
|
|
101
86
|
file.footer
|
|
102
|
-
].join("\n");
|
|
87
|
+
].filter((segment) => segment != null).join("\n");
|
|
103
88
|
}
|
|
104
89
|
});
|
|
105
90
|
|
|
@@ -128,4 +113,4 @@ Object.defineProperty(exports, 'typescriptParser', {
|
|
|
128
113
|
return typescriptParser;
|
|
129
114
|
}
|
|
130
115
|
});
|
|
131
|
-
//# sourceMappingURL=typescriptParser-
|
|
116
|
+
//# sourceMappingURL=typescriptParser-DJxEGCz3.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"typescriptParser-DJxEGCz3.cjs","names":["ts","getRelativePath","path","importPropertyName: ts.Identifier | undefined","importName: ts.NamedImportBindings | undefined","createParser","sourceParts: Array<string>","importNodes: Array<ts.ImportDeclaration>","trimExtName","exportNodes: Array<ts.ExportDeclaration>"],"sources":["../src/parsers/typescriptParser.ts"],"sourcesContent":["import path from 'node:path'\nimport ts from 'typescript'\nimport { getRelativePath } from '../utils/getRelativePath.ts'\nimport { trimExtName } from '../utils/trimExtName.ts'\nimport { createParser } from './createParser.ts'\n\nconst { factory } = ts\n\n/**\n * Convert AST TypeScript/TSX nodes to a string based on the TypeScript printer.\n */\nexport function print(...elements: Array<ts.Node>): string {\n const sourceFile = ts.createSourceFile('print.tsx', '', ts.ScriptTarget.ES2022, true, ts.ScriptKind.TSX)\n\n const printer = ts.createPrinter({\n omitTrailingSemicolon: true,\n newLine: ts.NewLineKind.LineFeed,\n removeComments: false,\n noEmitHelpers: true,\n })\n\n const output = printer.printList(ts.ListFormat.MultiLine, factory.createNodeArray(elements), sourceFile)\n\n return output.replace(/\\r\\n/g, '\\n')\n}\n\nexport function createImport({\n name,\n path,\n root,\n isTypeOnly = false,\n isNameSpace = false,\n}: {\n name: string | Array<string | { propertyName: string; name?: string }>\n path: string\n root?: string\n isTypeOnly?: boolean\n isNameSpace?: boolean\n}) {\n const resolvePath = root ? getRelativePath(root, path) : path\n\n if (!Array.isArray(name)) {\n let importPropertyName: ts.Identifier | undefined = factory.createIdentifier(name)\n let importName: ts.NamedImportBindings | undefined\n\n if (isNameSpace) {\n importPropertyName = undefined\n importName = factory.createNamespaceImport(factory.createIdentifier(name))\n }\n\n return factory.createImportDeclaration(\n undefined,\n factory.createImportClause(isTypeOnly, importPropertyName, importName),\n factory.createStringLiteral(resolvePath),\n undefined,\n )\n }\n\n return factory.createImportDeclaration(\n undefined,\n factory.createImportClause(\n isTypeOnly,\n undefined,\n factory.createNamedImports(\n name.map((item) => {\n if (typeof item === 'object') {\n const obj = item as { propertyName: string; name?: string }\n if (obj.name) {\n return factory.createImportSpecifier(false, factory.createIdentifier(obj.propertyName), factory.createIdentifier(obj.name))\n }\n\n return factory.createImportSpecifier(false, undefined, factory.createIdentifier(obj.propertyName))\n }\n\n return factory.createImportSpecifier(false, undefined, factory.createIdentifier(item))\n }),\n ),\n ),\n factory.createStringLiteral(resolvePath),\n undefined,\n )\n}\n\nexport function createExport({\n path,\n asAlias,\n isTypeOnly = false,\n name,\n}: {\n path: string\n asAlias?: boolean\n isTypeOnly?: boolean\n name?: string | Array<ts.Identifier | string>\n}) {\n if (name && !Array.isArray(name) && !asAlias) {\n console.warn(`When using name as string, asAlias should be true ${name}`)\n }\n\n if (!Array.isArray(name)) {\n const parsedName = name?.match(/^\\d/) ? `_${name?.slice(1)}` : name\n\n return factory.createExportDeclaration(\n undefined,\n isTypeOnly,\n asAlias && parsedName ? factory.createNamespaceExport(factory.createIdentifier(parsedName)) : undefined,\n factory.createStringLiteral(path),\n undefined,\n )\n }\n\n return factory.createExportDeclaration(\n undefined,\n isTypeOnly,\n factory.createNamedExports(\n name.map((propertyName) => {\n return factory.createExportSpecifier(false, undefined, typeof propertyName === 'string' ? factory.createIdentifier(propertyName) : propertyName)\n }),\n ),\n factory.createStringLiteral(path),\n undefined,\n )\n}\n\nexport const typescriptParser = createParser({\n name: 'typescript',\n extNames: ['.ts', '.js'],\n install() {},\n async parse(file, options = { extname: '.ts' }) {\n const sourceParts: Array<string> = []\n for (const item of file.sources) {\n if (item.value) {\n sourceParts.push(item.value)\n }\n }\n const source = sourceParts.join('\\n\\n')\n\n const importNodes: Array<ts.ImportDeclaration> = []\n for (const item of file.imports) {\n const importPath = item.root ? getRelativePath(item.root, item.path) : item.path\n const hasExtname = !!path.extname(importPath)\n\n importNodes.push(\n createImport({\n name: item.name,\n path: options.extname && hasExtname ? `${trimExtName(importPath)}${options.extname}` : item.root ? trimExtName(importPath) : importPath,\n isTypeOnly: item.isTypeOnly,\n }),\n )\n }\n\n const exportNodes: Array<ts.ExportDeclaration> = []\n for (const item of file.exports) {\n const exportPath = item.path\n const hasExtname = !!path.extname(exportPath)\n\n exportNodes.push(\n createExport({\n name: item.name,\n path: options.extname && hasExtname ? `${trimExtName(item.path)}${options.extname}` : trimExtName(item.path),\n isTypeOnly: item.isTypeOnly,\n asAlias: item.asAlias,\n }),\n )\n }\n\n const parts = [file.banner, print(...importNodes, ...exportNodes), source, file.footer].filter(\n (segment): segment is string => segment != null,\n )\n return parts.join('\\n')\n },\n})\n"],"mappings":";;;;;;;;;AAMA,MAAM,EAAE,YAAYA;;;;AAKpB,SAAgB,MAAM,GAAG,UAAkC;CACzD,MAAM,aAAaA,mBAAG,iBAAiB,aAAa,IAAIA,mBAAG,aAAa,QAAQ,MAAMA,mBAAG,WAAW,IAAI;AAWxG,QATgBA,mBAAG,cAAc;EAC/B,uBAAuB;EACvB,SAASA,mBAAG,YAAY;EACxB,gBAAgB;EAChB,eAAe;EAChB,CAAC,CAEqB,UAAUA,mBAAG,WAAW,WAAW,QAAQ,gBAAgB,SAAS,EAAE,WAAW,CAE1F,QAAQ,SAAS,KAAK;;AAGtC,SAAgB,aAAa,EAC3B,MACA,cACA,MACA,aAAa,OACb,cAAc,SAOb;CACD,MAAM,cAAc,OAAOC,wCAAgB,MAAMC,OAAK,GAAGA;AAEzD,KAAI,CAAC,MAAM,QAAQ,KAAK,EAAE;EACxB,IAAIC,qBAAgD,QAAQ,iBAAiB,KAAK;EAClF,IAAIC;AAEJ,MAAI,aAAa;AACf,wBAAqB;AACrB,gBAAa,QAAQ,sBAAsB,QAAQ,iBAAiB,KAAK,CAAC;;AAG5E,SAAO,QAAQ,wBACb,QACA,QAAQ,mBAAmB,YAAY,oBAAoB,WAAW,EACtE,QAAQ,oBAAoB,YAAY,EACxC,OACD;;AAGH,QAAO,QAAQ,wBACb,QACA,QAAQ,mBACN,YACA,QACA,QAAQ,mBACN,KAAK,KAAK,SAAS;AACjB,MAAI,OAAO,SAAS,UAAU;GAC5B,MAAM,MAAM;AACZ,OAAI,IAAI,KACN,QAAO,QAAQ,sBAAsB,OAAO,QAAQ,iBAAiB,IAAI,aAAa,EAAE,QAAQ,iBAAiB,IAAI,KAAK,CAAC;AAG7H,UAAO,QAAQ,sBAAsB,OAAO,QAAW,QAAQ,iBAAiB,IAAI,aAAa,CAAC;;AAGpG,SAAO,QAAQ,sBAAsB,OAAO,QAAW,QAAQ,iBAAiB,KAAK,CAAC;GACtF,CACH,CACF,EACD,QAAQ,oBAAoB,YAAY,EACxC,OACD;;AAGH,SAAgB,aAAa,EAC3B,cACA,SACA,aAAa,OACb,QAMC;AACD,KAAI,QAAQ,CAAC,MAAM,QAAQ,KAAK,IAAI,CAAC,QACnC,SAAQ,KAAK,qDAAqD,OAAO;AAG3E,KAAI,CAAC,MAAM,QAAQ,KAAK,EAAE;EACxB,MAAM,0DAAa,KAAM,MAAM,MAAM,IAAG,gDAAI,KAAM,MAAM,EAAE,KAAK;AAE/D,SAAO,QAAQ,wBACb,QACA,YACA,WAAW,aAAa,QAAQ,sBAAsB,QAAQ,iBAAiB,WAAW,CAAC,GAAG,QAC9F,QAAQ,oBAAoBF,OAAK,EACjC,OACD;;AAGH,QAAO,QAAQ,wBACb,QACA,YACA,QAAQ,mBACN,KAAK,KAAK,iBAAiB;AACzB,SAAO,QAAQ,sBAAsB,OAAO,QAAW,OAAO,iBAAiB,WAAW,QAAQ,iBAAiB,aAAa,GAAG,aAAa;GAChJ,CACH,EACD,QAAQ,oBAAoBA,OAAK,EACjC,OACD;;AAGH,MAAa,mBAAmBG,kCAAa;CAC3C,MAAM;CACN,UAAU,CAAC,OAAO,MAAM;CACxB,UAAU;CACV,MAAM,MAAM,MAAM,UAAU,EAAE,SAAS,OAAO,EAAE;EAC9C,MAAMC,cAA6B,EAAE;AACrC,OAAK,MAAM,QAAQ,KAAK,QACtB,KAAI,KAAK,MACP,aAAY,KAAK,KAAK,MAAM;EAGhC,MAAM,SAAS,YAAY,KAAK,OAAO;EAEvC,MAAMC,cAA2C,EAAE;AACnD,OAAK,MAAM,QAAQ,KAAK,SAAS;GAC/B,MAAM,aAAa,KAAK,OAAON,wCAAgB,KAAK,MAAM,KAAK,KAAK,GAAG,KAAK;GAC5E,MAAM,aAAa,CAAC,CAACC,kBAAK,QAAQ,WAAW;AAE7C,eAAY,KACV,aAAa;IACX,MAAM,KAAK;IACX,MAAM,QAAQ,WAAW,aAAa,GAAGM,gCAAY,WAAW,GAAG,QAAQ,YAAY,KAAK,OAAOA,gCAAY,WAAW,GAAG;IAC7H,YAAY,KAAK;IAClB,CAAC,CACH;;EAGH,MAAMC,cAA2C,EAAE;AACnD,OAAK,MAAM,QAAQ,KAAK,SAAS;GAC/B,MAAM,aAAa,KAAK;GACxB,MAAM,aAAa,CAAC,CAACP,kBAAK,QAAQ,WAAW;AAE7C,eAAY,KACV,aAAa;IACX,MAAM,KAAK;IACX,MAAM,QAAQ,WAAW,aAAa,GAAGM,gCAAY,KAAK,KAAK,GAAG,QAAQ,YAAYA,gCAAY,KAAK,KAAK;IAC5G,YAAY,KAAK;IACjB,SAAS,KAAK;IACf,CAAC,CACH;;AAMH,SAHc;GAAC,KAAK;GAAQ,MAAM,GAAG,aAAa,GAAG,YAAY;GAAE;GAAQ,KAAK;GAAO,CAAC,QACrF,YAA+B,WAAW,KAC5C,CACY,KAAK,KAAK;;CAE1B,CAAC"}
|
package/package.json
CHANGED
package/src/Fabric.ts
CHANGED
|
@@ -101,7 +101,7 @@ export interface FabricContext<T extends FabricOptions = FabricOptions> extends
|
|
|
101
101
|
|
|
102
102
|
/** Track installed plugins and parsers to prevent duplicates. */
|
|
103
103
|
installedPlugins: Set<Plugin>
|
|
104
|
-
installedParsers:
|
|
104
|
+
installedParsers: Map<KubbFile.Extname, Parser>
|
|
105
105
|
}
|
|
106
106
|
|
|
107
107
|
/**
|
package/src/FileManager.ts
CHANGED
|
@@ -33,14 +33,14 @@ export class FileManager {
|
|
|
33
33
|
return this
|
|
34
34
|
}
|
|
35
35
|
|
|
36
|
-
|
|
37
|
-
|
|
36
|
+
#resolvePath(file: KubbFile.File): KubbFile.File {
|
|
37
|
+
this.events.emit('file:resolve:path', { file })
|
|
38
38
|
|
|
39
39
|
return file
|
|
40
40
|
}
|
|
41
41
|
|
|
42
|
-
|
|
43
|
-
|
|
42
|
+
#resolveName(file: KubbFile.File): KubbFile.File {
|
|
43
|
+
this.events.emit('file:resolve:name', { file })
|
|
44
44
|
|
|
45
45
|
return file
|
|
46
46
|
}
|
|
@@ -62,8 +62,8 @@ export class FileManager {
|
|
|
62
62
|
for (let file of mergedFiles.values()) {
|
|
63
63
|
const existing = this.#cache.get(file.path)
|
|
64
64
|
|
|
65
|
-
file =
|
|
66
|
-
file =
|
|
65
|
+
file = this.#resolveName(file)
|
|
66
|
+
file = this.#resolvePath(file)
|
|
67
67
|
|
|
68
68
|
const merged = existing ? mergeFile(existing, file) : file
|
|
69
69
|
const resolvedFile = createFile(merged)
|
|
@@ -100,7 +100,7 @@ export class FileManager {
|
|
|
100
100
|
|
|
101
101
|
get files(): Array<KubbFile.ResolvedFile> {
|
|
102
102
|
if (this.#filesCache) {
|
|
103
|
-
return
|
|
103
|
+
return this.#filesCache
|
|
104
104
|
}
|
|
105
105
|
|
|
106
106
|
const cachedKeys = this.#cache.keys()
|
|
@@ -119,7 +119,7 @@ export class FileManager {
|
|
|
119
119
|
|
|
120
120
|
this.#filesCache = files
|
|
121
121
|
|
|
122
|
-
return
|
|
122
|
+
return files
|
|
123
123
|
}
|
|
124
124
|
|
|
125
125
|
//TODO add test and check if write of FileManager contains the newly added file
|
package/src/FileProcessor.ts
CHANGED
|
@@ -6,7 +6,7 @@ import type { Parser } from './parsers/types.ts'
|
|
|
6
6
|
import { AsyncEventEmitter } from './utils/AsyncEventEmitter.ts'
|
|
7
7
|
|
|
8
8
|
export type ProcessFilesProps = {
|
|
9
|
-
parsers?:
|
|
9
|
+
parsers?: Map<KubbFile.Extname, Parser>
|
|
10
10
|
extension?: Record<KubbFile.Extname, KubbFile.Extname | ''>
|
|
11
11
|
dryRun?: boolean
|
|
12
12
|
/**
|
|
@@ -16,7 +16,7 @@ export type ProcessFilesProps = {
|
|
|
16
16
|
}
|
|
17
17
|
|
|
18
18
|
type GetParseOptions = {
|
|
19
|
-
parsers?:
|
|
19
|
+
parsers?: Map<KubbFile.Extname, Parser>
|
|
20
20
|
extension?: Record<KubbFile.Extname, KubbFile.Extname | ''>
|
|
21
21
|
}
|
|
22
22
|
|
|
@@ -47,13 +47,7 @@ export class FileProcessor {
|
|
|
47
47
|
return defaultParser.parse(file, { extname: parseExtName })
|
|
48
48
|
}
|
|
49
49
|
|
|
50
|
-
|
|
51
|
-
for (const item of parsers) {
|
|
52
|
-
if (item.extNames?.includes(file.extname)) {
|
|
53
|
-
parser = item
|
|
54
|
-
break
|
|
55
|
-
}
|
|
56
|
-
}
|
|
50
|
+
const parser = parsers.get(file.extname)
|
|
57
51
|
|
|
58
52
|
if (!parser) {
|
|
59
53
|
return defaultParser.parse(file, { extname: parseExtName })
|
|
@@ -68,26 +62,25 @@ export class FileProcessor {
|
|
|
68
62
|
): Promise<KubbFile.ResolvedFile[]> {
|
|
69
63
|
await this.events.emit('process:start', { files })
|
|
70
64
|
|
|
71
|
-
let processed = 0
|
|
72
65
|
const total = files.length
|
|
66
|
+
let processed = 0
|
|
73
67
|
|
|
74
68
|
const processOne = async (resolvedFile: KubbFile.ResolvedFile, index: number) => {
|
|
75
|
-
const percentage = (processed / total) * 100
|
|
76
|
-
|
|
77
69
|
await this.events.emit('file:start', { file: resolvedFile, index, total })
|
|
78
70
|
|
|
79
71
|
const source = dryRun ? undefined : await this.parse(resolvedFile, { extension, parsers })
|
|
80
72
|
|
|
73
|
+
const currentProcessed = ++processed
|
|
74
|
+
const percentage = (currentProcessed / total) * 100
|
|
75
|
+
|
|
81
76
|
await this.events.emit('process:progress', {
|
|
82
77
|
file: resolvedFile,
|
|
83
78
|
source,
|
|
84
|
-
processed,
|
|
79
|
+
processed: currentProcessed,
|
|
85
80
|
percentage,
|
|
86
81
|
total,
|
|
87
82
|
})
|
|
88
83
|
|
|
89
|
-
processed++
|
|
90
|
-
|
|
91
84
|
await this.events.emit('file:end', { file: resolvedFile, index, total })
|
|
92
85
|
}
|
|
93
86
|
|
package/src/createFile.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { createHash } from 'node:crypto'
|
|
2
2
|
import path from 'node:path'
|
|
3
3
|
import { orderBy } from 'natural-orderby'
|
|
4
|
-
import {
|
|
4
|
+
import { uniqueBy } from 'remeda'
|
|
5
5
|
import type * as KubbFile from './KubbFile.ts'
|
|
6
6
|
import { trimExtName } from './utils/trimExtName.ts'
|
|
7
7
|
|
|
@@ -10,53 +10,78 @@ export function combineSources(sources: Array<KubbFile.Source>): Array<KubbFile.
|
|
|
10
10
|
}
|
|
11
11
|
|
|
12
12
|
export function combineExports(exports: Array<KubbFile.Export>): Array<KubbFile.Export> {
|
|
13
|
-
|
|
13
|
+
const sorted = orderBy(exports, [
|
|
14
14
|
(v) => !!Array.isArray(v.name),
|
|
15
15
|
(v) => !v.isTypeOnly,
|
|
16
16
|
(v) => v.path,
|
|
17
17
|
(v) => !!v.name,
|
|
18
18
|
(v) => (Array.isArray(v.name) ? orderBy(v.name) : v.name),
|
|
19
|
-
])
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
19
|
+
])
|
|
20
|
+
|
|
21
|
+
const prev: Array<KubbFile.Export> = []
|
|
22
|
+
// Map to track items by path for O(1) lookup
|
|
23
|
+
const pathMap = new Map<string, KubbFile.Export>()
|
|
24
|
+
// Map to track unique items by path+name+isTypeOnly+asAlias
|
|
25
|
+
const uniqueMap = new Map<string, KubbFile.Export>()
|
|
26
|
+
// Map to track items by path+name where isTypeOnly=true (for type-only check)
|
|
27
|
+
const pathNameTypeTrueMap = new Map<string, KubbFile.Export>()
|
|
28
|
+
|
|
29
|
+
for (const curr of sorted) {
|
|
30
|
+
const name = curr.name
|
|
31
|
+
const pathKey = curr.path
|
|
32
|
+
const prevByPath = pathMap.get(pathKey)
|
|
33
|
+
|
|
34
|
+
// Create unique key for path+name+isTypeOnly
|
|
35
|
+
const nameKey = Array.isArray(name) ? JSON.stringify(name) : name || ''
|
|
36
|
+
const pathNameTypeKey = `${pathKey}:${nameKey}:${curr.isTypeOnly}`
|
|
37
|
+
// Check if there's already an item with the same path+name but with isTypeOnly=true
|
|
38
|
+
const pathNameKey = `${pathKey}:${nameKey}`
|
|
39
|
+
const prevByPathAndIsTypeOnly = pathNameTypeTrueMap.get(pathNameKey)
|
|
40
|
+
|
|
41
|
+
if (prevByPathAndIsTypeOnly) {
|
|
42
|
+
// we already have an export that has the same path and name but uses `isTypeOnly` (export type ...)
|
|
43
|
+
continue
|
|
44
|
+
}
|
|
29
45
|
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
46
|
+
// Create unique key for path+name+isTypeOnly+asAlias
|
|
47
|
+
const uniqueKey = `${pathNameTypeKey}:${curr.asAlias || ''}`
|
|
48
|
+
const uniquePrev = uniqueMap.get(uniqueKey)
|
|
33
49
|
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
50
|
+
// we already have an item that was unique enough or name field is empty or prev asAlias is set but current has no changes
|
|
51
|
+
if (uniquePrev || (Array.isArray(name) && !name.length) || (prevByPath?.asAlias && !curr.asAlias)) {
|
|
52
|
+
continue
|
|
53
|
+
}
|
|
38
54
|
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
55
|
+
if (!prevByPath) {
|
|
56
|
+
const newItem = {
|
|
57
|
+
...curr,
|
|
58
|
+
name: Array.isArray(name) ? [...new Set(name)] : name,
|
|
59
|
+
}
|
|
60
|
+
prev.push(newItem)
|
|
61
|
+
pathMap.set(pathKey, newItem)
|
|
62
|
+
uniqueMap.set(uniqueKey, newItem)
|
|
63
|
+
// Track items with isTypeOnly=true for the type-only check
|
|
64
|
+
if (newItem.isTypeOnly) {
|
|
65
|
+
pathNameTypeTrueMap.set(pathNameKey, newItem)
|
|
47
66
|
}
|
|
67
|
+
continue
|
|
68
|
+
}
|
|
48
69
|
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
70
|
+
// merge all names when prev and current both have the same isTypeOnly set
|
|
71
|
+
if (prevByPath && Array.isArray(prevByPath.name) && Array.isArray(curr.name) && prevByPath.isTypeOnly === curr.isTypeOnly) {
|
|
72
|
+
prevByPath.name = [...new Set([...prevByPath.name, ...curr.name])]
|
|
73
|
+
continue
|
|
74
|
+
}
|
|
52
75
|
|
|
53
|
-
|
|
54
|
-
|
|
76
|
+
prev.push(curr)
|
|
77
|
+
uniqueMap.set(uniqueKey, curr)
|
|
78
|
+
// Track items with isTypeOnly=true for the type-only check
|
|
79
|
+
if (curr.isTypeOnly) {
|
|
80
|
+
pathNameTypeTrueMap.set(pathNameKey, curr)
|
|
81
|
+
}
|
|
82
|
+
}
|
|
55
83
|
|
|
56
|
-
|
|
57
|
-
},
|
|
58
|
-
[] as Array<KubbFile.Export>,
|
|
59
|
-
)
|
|
84
|
+
return prev
|
|
60
85
|
}
|
|
61
86
|
|
|
62
87
|
export function combineImports(imports: Array<KubbFile.Import>, exports: Array<KubbFile.Export>, source?: string): Array<KubbFile.Import> {
|
|
@@ -96,18 +121,28 @@ export function combineImports(imports: Array<KubbFile.Import>, exports: Array<K
|
|
|
96
121
|
return isUsed
|
|
97
122
|
}
|
|
98
123
|
|
|
99
|
-
|
|
124
|
+
const sorted = orderBy(imports, [
|
|
100
125
|
(v) => !!Array.isArray(v.name),
|
|
101
126
|
(v) => !v.isTypeOnly,
|
|
102
127
|
(v) => v.path,
|
|
103
128
|
(v) => !!v.name,
|
|
104
129
|
(v) => (Array.isArray(v.name) ? orderBy(v.name) : v.name),
|
|
105
|
-
])
|
|
130
|
+
])
|
|
131
|
+
|
|
132
|
+
const prev: Array<KubbFile.Import> = []
|
|
133
|
+
// Map to track items by path+isTypeOnly for O(1) lookup
|
|
134
|
+
const pathTypeMap = new Map<string, KubbFile.Import>()
|
|
135
|
+
// Map to track unique items by path+name+isTypeOnly
|
|
136
|
+
const uniqueMap = new Map<string, KubbFile.Import>()
|
|
137
|
+
// Map to track items by path+name where isTypeOnly=true (for type-only check)
|
|
138
|
+
const pathNameTypeTrueMap = new Map<string, KubbFile.Import>()
|
|
139
|
+
|
|
140
|
+
for (const curr of sorted) {
|
|
106
141
|
let name = Array.isArray(curr.name) ? [...new Set(curr.name)] : curr.name
|
|
107
142
|
|
|
108
143
|
if (curr.path === curr.root) {
|
|
109
144
|
// root and path are the same file, remove the "./" import
|
|
110
|
-
|
|
145
|
+
continue
|
|
111
146
|
}
|
|
112
147
|
|
|
113
148
|
// merge all names and check if the importName is being used in the generated source and if not filter those imports out
|
|
@@ -115,45 +150,63 @@ export function combineImports(imports: Array<KubbFile.Import>, exports: Array<K
|
|
|
115
150
|
name = name.filter((item) => (typeof item === 'string' ? hasImportInSource(item) : hasImportInSource(item.propertyName)))
|
|
116
151
|
}
|
|
117
152
|
|
|
118
|
-
const
|
|
119
|
-
const
|
|
120
|
-
|
|
153
|
+
const pathTypeKey = `${curr.path}:${curr.isTypeOnly}`
|
|
154
|
+
const prevByPath = pathTypeMap.get(pathTypeKey)
|
|
155
|
+
|
|
156
|
+
// Create key for name comparison
|
|
157
|
+
const nameKey = Array.isArray(name) ? JSON.stringify(name) : name || ''
|
|
158
|
+
const pathNameTypeKey = `${curr.path}:${nameKey}:${curr.isTypeOnly}`
|
|
159
|
+
const uniquePrev = uniqueMap.get(pathNameTypeKey)
|
|
160
|
+
// Check if there's already an item with the same path+name but with isTypeOnly=true
|
|
161
|
+
const pathNameKey = `${curr.path}:${nameKey}`
|
|
162
|
+
const prevByPathNameAndIsTypeOnly = pathNameTypeTrueMap.get(pathNameKey)
|
|
121
163
|
|
|
122
164
|
if (prevByPathNameAndIsTypeOnly) {
|
|
123
|
-
// we already have an
|
|
124
|
-
|
|
165
|
+
// we already have an import that has the same path and name but uses `isTypeOnly` (import type ...)
|
|
166
|
+
continue
|
|
125
167
|
}
|
|
126
168
|
|
|
127
169
|
// already unique enough or name is empty
|
|
128
170
|
if (uniquePrev || (Array.isArray(name) && !name.length)) {
|
|
129
|
-
|
|
171
|
+
continue
|
|
130
172
|
}
|
|
131
173
|
|
|
132
174
|
// new item, append name
|
|
133
175
|
if (!prevByPath) {
|
|
134
|
-
|
|
135
|
-
...
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
176
|
+
const newItem = {
|
|
177
|
+
...curr,
|
|
178
|
+
name,
|
|
179
|
+
}
|
|
180
|
+
prev.push(newItem)
|
|
181
|
+
pathTypeMap.set(pathTypeKey, newItem)
|
|
182
|
+
uniqueMap.set(pathNameTypeKey, newItem)
|
|
183
|
+
// Track items with isTypeOnly=true for the type-only check
|
|
184
|
+
if (newItem.isTypeOnly) {
|
|
185
|
+
pathNameTypeTrueMap.set(pathNameKey, newItem)
|
|
186
|
+
}
|
|
187
|
+
continue
|
|
141
188
|
}
|
|
142
189
|
|
|
143
190
|
// merge all names when prev and current both have the same isTypeOnly set
|
|
144
191
|
if (prevByPath && Array.isArray(prevByPath.name) && Array.isArray(name) && prevByPath.isTypeOnly === curr.isTypeOnly) {
|
|
145
192
|
prevByPath.name = [...new Set([...prevByPath.name, ...name])]
|
|
146
|
-
|
|
147
|
-
return prev
|
|
193
|
+
continue
|
|
148
194
|
}
|
|
149
195
|
|
|
150
196
|
// no import was found in the source, ignore import
|
|
151
197
|
if (!Array.isArray(name) && name && !hasImportInSource(name)) {
|
|
152
|
-
|
|
198
|
+
continue
|
|
153
199
|
}
|
|
154
200
|
|
|
155
|
-
|
|
156
|
-
|
|
201
|
+
prev.push(curr)
|
|
202
|
+
uniqueMap.set(pathNameTypeKey, curr)
|
|
203
|
+
// Track items with isTypeOnly=true for the type-only check
|
|
204
|
+
if (curr.isTypeOnly) {
|
|
205
|
+
pathNameTypeTrueMap.set(pathNameKey, curr)
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
return prev
|
|
157
210
|
}
|
|
158
211
|
|
|
159
212
|
/**
|
package/src/defineFabric.ts
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { isFunction } from 'remeda'
|
|
2
2
|
import type { Fabric, FabricConfig, FabricContext, FabricEvents, FabricOptions } from './Fabric.ts'
|
|
3
3
|
import { FileManager } from './FileManager.ts'
|
|
4
|
+
import type * as KubbFile from './KubbFile.ts'
|
|
4
5
|
import type { Parser } from './parsers/types.ts'
|
|
5
6
|
import type { Plugin } from './plugins/types.ts'
|
|
6
7
|
import { AsyncEventEmitter } from './utils/AsyncEventEmitter.ts'
|
|
@@ -30,7 +31,8 @@ export function defineFabric<T extends FabricOptions>(init?: FabricInitializer<T
|
|
|
30
31
|
function create(config: FabricConfig<T> = { mode: 'sequential' } as FabricConfig<T>): Fabric<T> {
|
|
31
32
|
const events = new AsyncEventEmitter<FabricEvents>()
|
|
32
33
|
const installedPlugins = new Set<Plugin<any>>()
|
|
33
|
-
const installedParsers = new
|
|
34
|
+
const installedParsers = new Map<KubbFile.Extname, Parser<any>>()
|
|
35
|
+
const installedParserNames = new Set<string>()
|
|
34
36
|
const fileManager = new FileManager({ events })
|
|
35
37
|
|
|
36
38
|
const context: FabricContext<T> = {
|
|
@@ -76,10 +78,20 @@ export function defineFabric<T extends FabricOptions>(init?: FabricInitializer<T
|
|
|
76
78
|
}
|
|
77
79
|
|
|
78
80
|
if (pluginOrParser.type === 'parser') {
|
|
79
|
-
if (
|
|
81
|
+
if (installedParserNames.has(pluginOrParser.name)) {
|
|
80
82
|
console.warn(`Parser "${pluginOrParser.name}" already applied.`)
|
|
81
83
|
} else {
|
|
82
|
-
|
|
84
|
+
installedParserNames.add(pluginOrParser.name)
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
if (pluginOrParser.extNames) {
|
|
88
|
+
for (const extName of pluginOrParser.extNames) {
|
|
89
|
+
const existing = installedParsers.get(extName)
|
|
90
|
+
if (existing && existing.name !== pluginOrParser.name) {
|
|
91
|
+
console.warn(`Parser "${pluginOrParser.name}" is overriding parser "${existing.name}" for extension "${extName}".`)
|
|
92
|
+
}
|
|
93
|
+
installedParsers.set(extName, pluginOrParser)
|
|
94
|
+
}
|
|
83
95
|
}
|
|
84
96
|
}
|
|
85
97
|
|