@kubb/fabric-core 0.8.0 → 0.9.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{defineProperty-B-I0UxsW.js → defineProperty-CS9Uk_6Q.js} +5 -5
- package/dist/defineProperty-CS9Uk_6Q.js.map +1 -0
- package/dist/{defineProperty-Db0mqPmh.cjs → defineProperty-hUmuXj5B.cjs} +5 -5
- package/dist/defineProperty-hUmuXj5B.cjs.map +1 -0
- package/dist/index.cjs +2 -2
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +2 -2
- package/dist/index.js.map +1 -1
- package/dist/parsers/typescript.cjs +16 -0
- package/dist/parsers/typescript.cjs.map +1 -1
- package/dist/parsers/typescript.d.cts +8 -1
- package/dist/parsers/typescript.d.ts +8 -1
- package/dist/parsers/typescript.js +15 -1
- package/dist/parsers/typescript.js.map +1 -1
- package/dist/plugins.cjs +1 -1
- package/dist/plugins.js +1 -1
- package/package.json +2 -2
- package/src/parsers/typescriptParser.ts +25 -0
- package/dist/defineProperty-B-I0UxsW.js.map +0 -1
- package/dist/defineProperty-Db0mqPmh.cjs.map +0 -1
|
@@ -3,7 +3,7 @@ import { orderBy } from "natural-orderby";
|
|
|
3
3
|
import { createHash } from "node:crypto";
|
|
4
4
|
import path from "node:path";
|
|
5
5
|
|
|
6
|
-
//#region ../../node_modules/.pnpm/remeda@2.
|
|
6
|
+
//#region ../../node_modules/.pnpm/remeda@2.33.1/node_modules/remeda/dist/utilityEvaluators-D7O7U3FY.js
|
|
7
7
|
const e = {
|
|
8
8
|
done: !0,
|
|
9
9
|
hasNext: !1
|
|
@@ -17,7 +17,7 @@ const e = {
|
|
|
17
17
|
});
|
|
18
18
|
|
|
19
19
|
//#endregion
|
|
20
|
-
//#region ../../node_modules/.pnpm/remeda@2.
|
|
20
|
+
//#region ../../node_modules/.pnpm/remeda@2.33.1/node_modules/remeda/dist/pipe-CRsqNjLF.js
|
|
21
21
|
function t$1(e$1, ...t$3) {
|
|
22
22
|
let a = e$1, o = t$3.map((e$2) => `lazy` in e$2 ? r$1(e$2) : void 0), s = 0;
|
|
23
23
|
for (; s < t$3.length;) {
|
|
@@ -70,7 +70,7 @@ function i(e$1) {
|
|
|
70
70
|
}
|
|
71
71
|
|
|
72
72
|
//#endregion
|
|
73
|
-
//#region ../../node_modules/.pnpm/remeda@2.
|
|
73
|
+
//#region ../../node_modules/.pnpm/remeda@2.33.1/node_modules/remeda/dist/purryFromLazy-C1RBdBlx.js
|
|
74
74
|
function t(t$3, n$3) {
|
|
75
75
|
let r$3 = n$3.length - t$3.length;
|
|
76
76
|
if (r$3 === 1) {
|
|
@@ -91,7 +91,7 @@ function t(t$3, n$3) {
|
|
|
91
91
|
}
|
|
92
92
|
|
|
93
93
|
//#endregion
|
|
94
|
-
//#region ../../node_modules/.pnpm/remeda@2.
|
|
94
|
+
//#region ../../node_modules/.pnpm/remeda@2.33.1/node_modules/remeda/dist/uniqueBy.js
|
|
95
95
|
function n(...e$1) {
|
|
96
96
|
return t(r, e$1);
|
|
97
97
|
}
|
|
@@ -321,4 +321,4 @@ function _defineProperty(e$1, r$3, t$3) {
|
|
|
321
321
|
|
|
322
322
|
//#endregion
|
|
323
323
|
export { _classPrivateFieldInitSpec as a, _assertClassBrand as i, _classPrivateFieldSet2 as n, _checkPrivateRedeclaration as o, _classPrivateFieldGet2 as r, createFile as s, _defineProperty as t };
|
|
324
|
-
//# sourceMappingURL=defineProperty-
|
|
324
|
+
//# sourceMappingURL=defineProperty-CS9Uk_6Q.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"defineProperty-CS9Uk_6Q.js","names":["t","n","r","e","t","e","r","n","i","r","n","t","i","e","e","n","t","r","i","uniqueBy","prev: Array<KubbFile.Export>","prev: Array<KubbFile.Import>"],"sources":["../../../node_modules/.pnpm/remeda@2.33.1/node_modules/remeda/dist/utilityEvaluators-D7O7U3FY.js","../../../node_modules/.pnpm/remeda@2.33.1/node_modules/remeda/dist/pipe-CRsqNjLF.js","../../../node_modules/.pnpm/remeda@2.33.1/node_modules/remeda/dist/purryFromLazy-C1RBdBlx.js","../../../node_modules/.pnpm/remeda@2.33.1/node_modules/remeda/dist/uniqueBy.js","../src/createFile.ts"],"sourcesContent":["const e={done:!0,hasNext:!1},t={done:!1,hasNext:!1},n=()=>e,r=e=>({hasNext:!0,next:e,done:!1});export{n,r,t};\n//# sourceMappingURL=utilityEvaluators-D7O7U3FY.js.map","import{t as e}from\"./utilityEvaluators-D7O7U3FY.js\";function t(e,...t){let a=e,o=t.map(e=>`lazy`in e?r(e):void 0),s=0;for(;s<t.length;){if(o[s]===void 0||!i(a)){let e=t[s];a=e(a),s+=1;continue}let e=[];for(let n=s;n<t.length;n++){let t=o[n];if(t===void 0||(e.push(t),t.isSingle))break}let r=[];for(let t of a)if(n(t,r,e))break;let{isSingle:c}=e.at(-1);a=c?r[0]:r,s+=e.length}return a}function n(t,r,i){if(i.length===0)return r.push(t),!1;let a=t,o=e,s=!1;for(let[e,t]of i.entries()){let{index:c,items:l}=t;if(l.push(a),o=t(a,c,l),t.index+=1,o.hasNext){if(o.hasMany??!1){for(let t of o.next)if(n(t,r,i.slice(e+1)))return!0;return s}a=o.next}if(!o.hasNext)break;o.done&&(s=!0)}return o.hasNext&&r.push(a),s}function r(e){let{lazy:t,lazyArgs:n}=e,r=t(...n);return Object.assign(r,{isSingle:t.single??!1,index:0,items:[]})}function i(e){return typeof e==`string`||typeof e==`object`&&!!e&&Symbol.iterator in e}export{t};\n//# sourceMappingURL=pipe-CRsqNjLF.js.map","import{t as e}from\"./pipe-CRsqNjLF.js\";function t(t,n){let r=n.length-t.length;if(r===1){let[r,...i]=n;return e(r,{lazy:t,lazyArgs:i})}if(r===0){let r={lazy:t,lazyArgs:n};return Object.assign(t=>e(t,r),r)}throw Error(`Wrong number of arguments`)}export{t};\n//# sourceMappingURL=purryFromLazy-C1RBdBlx.js.map","import{t as e}from\"./utilityEvaluators-D7O7U3FY.js\";import{t}from\"./purryFromLazy-C1RBdBlx.js\";function n(...e){return t(r,e)}function r(t){let n=t,r=new Set;return(t,i,a)=>{let o=n(t,i,a);return r.has(o)?e:(r.add(o),{done:!1,hasNext:!0,next:t})}}export{n as uniqueBy};\n//# sourceMappingURL=uniqueBy.js.map","import { createHash } from 'node:crypto'\nimport path from 'node:path'\nimport { orderBy } from 'natural-orderby'\nimport { uniqueBy } from 'remeda'\nimport type * as KubbFile from './KubbFile.ts'\nimport { trimExtName } from './utils/trimExtName.ts'\n\nexport function combineSources(sources: Array<KubbFile.Source>): Array<KubbFile.Source> {\n return uniqueBy(sources, (obj) => {\n // For named sources, deduplicate by name, isExportable, and isTypeOnly\n // For unnamed sources, include the value to avoid deduplicating different code blocks\n // If both name and value are undefined, use an empty string as the unique identifier\n const uniqueId = obj.name ?? obj.value ?? ''\n const isExportable = obj.isExportable ?? false\n const isTypeOnly = obj.isTypeOnly ?? false\n return `${uniqueId}:${isExportable}:${isTypeOnly}`\n })\n}\n\nexport function combineExports(exports: Array<KubbFile.Export>): Array<KubbFile.Export> {\n const sorted = orderBy(exports, [\n (v) => !!Array.isArray(v.name),\n (v) => !v.isTypeOnly,\n (v) => v.path,\n (v) => !!v.name,\n (v) => (Array.isArray(v.name) ? orderBy(v.name) : v.name),\n ])\n\n const prev: Array<KubbFile.Export> = []\n // Map to track items by path for O(1) lookup\n const pathMap = new Map<string, KubbFile.Export>()\n // Map to track unique items by path+name+isTypeOnly+asAlias\n const uniqueMap = new Map<string, KubbFile.Export>()\n // Map to track items by path+name where isTypeOnly=true (for type-only check)\n const pathNameTypeTrueMap = new Map<string, KubbFile.Export>()\n\n for (const curr of sorted) {\n const name = curr.name\n const pathKey = curr.path\n const prevByPath = pathMap.get(pathKey)\n\n // Create unique key for path+name+isTypeOnly\n const nameKey = Array.isArray(name) ? JSON.stringify(name) : name || ''\n const pathNameTypeKey = `${pathKey}:${nameKey}:${curr.isTypeOnly}`\n // Check if there's already an item with the same path+name but with isTypeOnly=true\n const pathNameKey = `${pathKey}:${nameKey}`\n const prevByPathAndIsTypeOnly = pathNameTypeTrueMap.get(pathNameKey)\n\n if (prevByPathAndIsTypeOnly) {\n // we already have an export that has the same path and name but uses `isTypeOnly` (export type ...)\n continue\n }\n\n // Create unique key for path+name+isTypeOnly+asAlias\n const uniqueKey = `${pathNameTypeKey}:${curr.asAlias || ''}`\n const uniquePrev = uniqueMap.get(uniqueKey)\n\n // we already have an item that was unique enough or name field is empty or prev asAlias is set but current has no changes\n if (uniquePrev || (Array.isArray(name) && !name.length) || (prevByPath?.asAlias && !curr.asAlias)) {\n continue\n }\n\n if (!prevByPath) {\n const newItem = {\n ...curr,\n name: Array.isArray(name) ? [...new Set(name)] : name,\n }\n prev.push(newItem)\n pathMap.set(pathKey, newItem)\n uniqueMap.set(uniqueKey, newItem)\n // Track items with isTypeOnly=true for the type-only check\n if (newItem.isTypeOnly) {\n pathNameTypeTrueMap.set(pathNameKey, newItem)\n }\n continue\n }\n\n // merge all names when prev and current both have the same isTypeOnly set\n if (prevByPath && Array.isArray(prevByPath.name) && Array.isArray(curr.name) && prevByPath.isTypeOnly === curr.isTypeOnly) {\n prevByPath.name = [...new Set([...prevByPath.name, ...curr.name])]\n continue\n }\n\n prev.push(curr)\n uniqueMap.set(uniqueKey, curr)\n // Track items with isTypeOnly=true for the type-only check\n if (curr.isTypeOnly) {\n pathNameTypeTrueMap.set(pathNameKey, curr)\n }\n }\n\n return prev\n}\n\nexport function combineImports(imports: Array<KubbFile.Import>, exports: Array<KubbFile.Export>, source?: string): Array<KubbFile.Import> {\n const exportedNameLookup = new Set<string>()\n for (const item of exports) {\n const { name } = item\n if (!name) {\n continue\n }\n\n if (Array.isArray(name)) {\n for (const value of name) {\n if (value) {\n exportedNameLookup.add(value)\n }\n }\n continue\n }\n\n exportedNameLookup.add(name)\n }\n\n const usageCache = new Map<string, boolean>()\n const hasImportInSource = (importName: string): boolean => {\n if (!source) {\n return true\n }\n\n const cached = usageCache.get(importName)\n if (cached !== undefined) {\n return cached\n }\n\n const isUsed = source.includes(importName) || exportedNameLookup.has(importName)\n usageCache.set(importName, isUsed)\n\n return isUsed\n }\n\n const sorted = orderBy(imports, [\n (v) => !!Array.isArray(v.name),\n (v) => !v.isTypeOnly,\n (v) => v.path,\n (v) => !!v.name,\n (v) => (Array.isArray(v.name) ? orderBy(v.name) : v.name),\n ])\n\n const prev: Array<KubbFile.Import> = []\n // Map to track items by path+isTypeOnly for O(1) lookup\n const pathTypeMap = new Map<string, KubbFile.Import>()\n // Map to track unique items by path+name+isTypeOnly\n const uniqueMap = new Map<string, KubbFile.Import>()\n // Map to track items by path+name where isTypeOnly=true (for type-only check)\n const pathNameTypeTrueMap = new Map<string, KubbFile.Import>()\n\n for (const curr of sorted) {\n let name = Array.isArray(curr.name) ? [...new Set(curr.name)] : curr.name\n\n if (curr.path === curr.root) {\n // root and path are the same file, remove the \"./\" import\n continue\n }\n\n // merge all names and check if the importName is being used in the generated source and if not filter those imports out\n if (Array.isArray(name)) {\n name = name.filter((item) => (typeof item === 'string' ? hasImportInSource(item) : hasImportInSource(item.propertyName)))\n }\n\n const pathTypeKey = `${curr.path}:${curr.isTypeOnly}`\n const prevByPath = pathTypeMap.get(pathTypeKey)\n\n // Create key for name comparison\n const nameKey = Array.isArray(name) ? JSON.stringify(name) : name || ''\n const pathNameTypeKey = `${curr.path}:${nameKey}:${curr.isTypeOnly}`\n const uniquePrev = uniqueMap.get(pathNameTypeKey)\n // Check if there's already an item with the same path+name but with isTypeOnly=true\n const pathNameKey = `${curr.path}:${nameKey}`\n const prevByPathNameAndIsTypeOnly = pathNameTypeTrueMap.get(pathNameKey)\n\n if (prevByPathNameAndIsTypeOnly) {\n // we already have an import that has the same path and name but uses `isTypeOnly` (import type ...)\n continue\n }\n\n // already unique enough or name is empty\n if (uniquePrev || (Array.isArray(name) && !name.length)) {\n continue\n }\n\n // new item, append name\n if (!prevByPath) {\n const newItem = {\n ...curr,\n name,\n }\n prev.push(newItem)\n pathTypeMap.set(pathTypeKey, newItem)\n uniqueMap.set(pathNameTypeKey, newItem)\n // Track items with isTypeOnly=true for the type-only check\n if (newItem.isTypeOnly) {\n pathNameTypeTrueMap.set(pathNameKey, newItem)\n }\n continue\n }\n\n // merge all names when prev and current both have the same isTypeOnly set\n if (prevByPath && Array.isArray(prevByPath.name) && Array.isArray(name) && prevByPath.isTypeOnly === curr.isTypeOnly) {\n prevByPath.name = [...new Set([...prevByPath.name, ...name])]\n continue\n }\n\n // no import was found in the source, ignore import\n if (!Array.isArray(name) && name && !hasImportInSource(name)) {\n continue\n }\n\n prev.push(curr)\n uniqueMap.set(pathNameTypeKey, curr)\n // Track items with isTypeOnly=true for the type-only check\n if (curr.isTypeOnly) {\n pathNameTypeTrueMap.set(pathNameKey, curr)\n }\n }\n\n return prev\n}\n\n/**\n * Helper to create a file with name and id set\n */\nexport function createFile<TMeta extends object = object>(file: KubbFile.File<TMeta>): KubbFile.ResolvedFile<TMeta> {\n const extname = path.extname(file.baseName) as KubbFile.Extname\n if (!extname) {\n throw new Error(`No extname found for ${file.baseName}`)\n }\n\n const source = file.sources.map((item) => item.value).join('\\n\\n')\n const exports = file.exports?.length ? combineExports(file.exports) : []\n const imports = file.imports?.length && source ? combineImports(file.imports, exports, source) : []\n const sources = file.sources?.length ? combineSources(file.sources) : []\n\n return {\n ...file,\n id: createHash('sha256').update(file.path).digest('hex'),\n name: trimExtName(file.baseName),\n extname,\n imports: imports,\n exports: exports,\n sources: sources,\n meta: file.meta || ({} as TMeta),\n }\n}\n"],"x_google_ignoreList":[0,1,2,3],"mappings":";;;;;;AAAA,MAAM,IAAE;CAAC,MAAK,CAAC;CAAE,SAAQ,CAAC;CAAE,EAACA,MAAE;CAAC,MAAK,CAAC;CAAE,SAAQ,CAAC;CAAE,EAACC,YAAM,GAAEC,OAAE,SAAI;CAAC,SAAQ,CAAC;CAAE,MAAKC;CAAE,MAAK,CAAC;CAAE;;;;ACAzC,SAASC,IAAE,KAAE,GAAGA,KAAE;CAAC,IAAI,IAAEC,KAAE,IAAED,IAAE,KAAI,QAAG,UAASC,MAAEC,IAAED,IAAE,GAAC,KAAK,EAAE,EAAC,IAAE;AAAE,QAAK,IAAED,IAAE,SAAQ;AAAC,MAAG,EAAE,OAAK,KAAK,KAAG,CAAC,EAAE,EAAE,EAAC;GAAC,IAAIC,MAAED,IAAE;AAAG,OAAEC,IAAE,EAAE,EAAC,KAAG;AAAE;;EAAS,IAAIA,MAAE,EAAE;AAAC,OAAI,IAAIE,MAAE,GAAEA,MAAEH,IAAE,QAAO,OAAI;GAAC,IAAIA,MAAE,EAAEG;AAAG,OAAGH,QAAI,KAAK,MAAIC,IAAE,KAAKD,IAAE,EAACA,IAAE,UAAU;;EAAM,IAAIE,MAAE,EAAE;AAAC,OAAI,IAAIF,OAAK,EAAE,KAAGG,IAAEH,KAAEE,KAAED,IAAE,CAAC;EAAM,IAAG,EAAC,UAAS,MAAGA,IAAE,GAAG,GAAG;AAAC,MAAE,IAAEC,IAAE,KAAGA,KAAE,KAAGD,IAAE;;AAAO,QAAO;;AAAE,SAASE,IAAE,KAAE,KAAE,KAAE;AAAC,KAAGC,IAAE,WAAS,EAAE,QAAOF,IAAE,KAAKF,IAAE,EAAC,CAAC;CAAE,IAAI,IAAEA,KAAE,IAAEC,KAAE,IAAE,CAAC;AAAE,MAAI,IAAG,CAACA,KAAED,QAAKI,IAAE,SAAS,EAAC;EAAC,IAAG,EAAC,OAAM,GAAE,OAAM,MAAGJ;AAAE,MAAG,EAAE,KAAK,EAAE,EAAC,IAAEA,IAAE,GAAE,GAAE,EAAE,EAAC,IAAE,SAAO,GAAE,EAAE,SAAQ;;AAAC,qBAAG,EAAE,0DAAS,CAAC,GAAE;AAAC,SAAI,IAAIA,OAAK,EAAE,KAAK,KAAGG,IAAEH,KAAEE,KAAEE,IAAE,MAAMH,MAAE,EAAE,CAAC,CAAC,QAAM,CAAC;AAAE,WAAO;;AAAE,OAAE,EAAE;;AAAK,MAAG,CAAC,EAAE,QAAQ;AAAM,IAAE,SAAO,IAAE,CAAC;;AAAG,QAAO,EAAE,WAASC,IAAE,KAAK,EAAE,EAAC;;AAAE,SAASA,IAAE,KAAE;;CAAC,IAAG,EAAC,MAAKF,KAAE,UAASG,QAAGF,KAAEC,MAAEF,IAAE,GAAGG,IAAE;AAAC,QAAO,OAAO,OAAOD,KAAE;EAAC,uBAASF,IAAE,uDAAQ,CAAC;EAAE,OAAM;EAAE,OAAM,EAAE;EAAC,CAAC;;AAAC,SAAS,EAAE,KAAE;AAAC,QAAO,OAAOC,OAAG,YAAU,OAAOA,OAAG,YAAU,CAAC,CAACA,OAAG,OAAO,YAAYA;;;;;ACAj2B,SAAS,EAAE,KAAE,KAAE;CAAC,IAAII,MAAEC,IAAE,SAAOC,IAAE;AAAO,KAAGF,QAAI,GAAE;EAAC,IAAG,CAACA,KAAE,GAAGG,OAAGF;AAAE,SAAOG,IAAEJ,KAAE;GAAC,MAAKE;GAAE,UAASC;GAAE,CAAC;;AAAC,KAAGH,QAAI,GAAE;EAAC,IAAIA,MAAE;GAAC,MAAKE;GAAE,UAASD;GAAE;AAAC,SAAO,OAAO,QAAO,QAAGG,IAAEF,KAAEF,IAAE,EAACA,IAAE;;AAAC,OAAM,MAAM,4BAA4B;;;;;ACAtJ,SAAS,EAAE,GAAGK,KAAE;AAAC,QAAO,EAAE,GAAEA,IAAE;;AAAC,SAAS,EAAE,KAAE;CAAC,IAAIC,MAAEC,KAAEC,sBAAE,IAAI,KAAG;AAAC,SAAO,KAAE,KAAE,MAAI;EAAC,IAAI,IAAEF,IAAEC,KAAEE,KAAE,EAAE;AAAC,SAAOD,IAAE,IAAI,EAAE,GAACH,OAAGG,IAAE,IAAI,EAAE,EAAC;GAAC,MAAK,CAAC;GAAE,SAAQ,CAAC;GAAE,MAAKD;GAAE;;;;;;ACOpP,SAAgB,eAAe,SAAyD;AACtF,QAAOG,EAAS,UAAU,QAAQ;;AAOhC,SAAO,wBAHU,IAAI,qDAAQ,IAAI,4CAAS,GAGvB,wBAFE,IAAI,6EAAgB,MAEN,sBADhB,IAAI,uEAAc;GAErC;;AAGJ,SAAgB,eAAe,SAAyD;CACtF,MAAM,SAAS,QAAQ,SAAS;GAC7B,MAAM,CAAC,CAAC,MAAM,QAAQ,EAAE,KAAK;GAC7B,MAAM,CAAC,EAAE;GACT,MAAM,EAAE;GACR,MAAM,CAAC,CAAC,EAAE;GACV,MAAO,MAAM,QAAQ,EAAE,KAAK,GAAG,QAAQ,EAAE,KAAK,GAAG,EAAE;EACrD,CAAC;CAEF,MAAMC,OAA+B,EAAE;CAEvC,MAAM,0BAAU,IAAI,KAA8B;CAElD,MAAM,4BAAY,IAAI,KAA8B;CAEpD,MAAM,sCAAsB,IAAI,KAA8B;AAE9D,MAAK,MAAM,QAAQ,QAAQ;EACzB,MAAM,OAAO,KAAK;EAClB,MAAM,UAAU,KAAK;EACrB,MAAM,aAAa,QAAQ,IAAI,QAAQ;EAGvC,MAAM,UAAU,MAAM,QAAQ,KAAK,GAAG,KAAK,UAAU,KAAK,GAAG,QAAQ;EACrE,MAAM,kBAAkB,GAAG,QAAQ,GAAG,QAAQ,GAAG,KAAK;EAEtD,MAAM,cAAc,GAAG,QAAQ,GAAG;AAGlC,MAFgC,oBAAoB,IAAI,YAAY,CAIlE;EAIF,MAAM,YAAY,GAAG,gBAAgB,GAAG,KAAK,WAAW;AAIxD,MAHmB,UAAU,IAAI,UAAU,IAGxB,MAAM,QAAQ,KAAK,IAAI,CAAC,KAAK,mEAAY,WAAY,YAAW,CAAC,KAAK,QACvF;AAGF,MAAI,CAAC,YAAY;GACf,MAAM,UAAU;IACd,GAAG;IACH,MAAM,MAAM,QAAQ,KAAK,GAAG,CAAC,GAAG,IAAI,IAAI,KAAK,CAAC,GAAG;IAClD;AACD,QAAK,KAAK,QAAQ;AAClB,WAAQ,IAAI,SAAS,QAAQ;AAC7B,aAAU,IAAI,WAAW,QAAQ;AAEjC,OAAI,QAAQ,WACV,qBAAoB,IAAI,aAAa,QAAQ;AAE/C;;AAIF,MAAI,cAAc,MAAM,QAAQ,WAAW,KAAK,IAAI,MAAM,QAAQ,KAAK,KAAK,IAAI,WAAW,eAAe,KAAK,YAAY;AACzH,cAAW,OAAO,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,WAAW,MAAM,GAAG,KAAK,KAAK,CAAC,CAAC;AAClE;;AAGF,OAAK,KAAK,KAAK;AACf,YAAU,IAAI,WAAW,KAAK;AAE9B,MAAI,KAAK,WACP,qBAAoB,IAAI,aAAa,KAAK;;AAI9C,QAAO;;AAGT,SAAgB,eAAe,SAAiC,SAAiC,QAAyC;CACxI,MAAM,qCAAqB,IAAI,KAAa;AAC5C,MAAK,MAAM,QAAQ,SAAS;EAC1B,MAAM,EAAE,SAAS;AACjB,MAAI,CAAC,KACH;AAGF,MAAI,MAAM,QAAQ,KAAK,EAAE;AACvB,QAAK,MAAM,SAAS,KAClB,KAAI,MACF,oBAAmB,IAAI,MAAM;AAGjC;;AAGF,qBAAmB,IAAI,KAAK;;CAG9B,MAAM,6BAAa,IAAI,KAAsB;CAC7C,MAAM,qBAAqB,eAAgC;AACzD,MAAI,CAAC,OACH,QAAO;EAGT,MAAM,SAAS,WAAW,IAAI,WAAW;AACzC,MAAI,WAAW,OACb,QAAO;EAGT,MAAM,SAAS,OAAO,SAAS,WAAW,IAAI,mBAAmB,IAAI,WAAW;AAChF,aAAW,IAAI,YAAY,OAAO;AAElC,SAAO;;CAGT,MAAM,SAAS,QAAQ,SAAS;GAC7B,MAAM,CAAC,CAAC,MAAM,QAAQ,EAAE,KAAK;GAC7B,MAAM,CAAC,EAAE;GACT,MAAM,EAAE;GACR,MAAM,CAAC,CAAC,EAAE;GACV,MAAO,MAAM,QAAQ,EAAE,KAAK,GAAG,QAAQ,EAAE,KAAK,GAAG,EAAE;EACrD,CAAC;CAEF,MAAMC,OAA+B,EAAE;CAEvC,MAAM,8BAAc,IAAI,KAA8B;CAEtD,MAAM,4BAAY,IAAI,KAA8B;CAEpD,MAAM,sCAAsB,IAAI,KAA8B;AAE9D,MAAK,MAAM,QAAQ,QAAQ;EACzB,IAAI,OAAO,MAAM,QAAQ,KAAK,KAAK,GAAG,CAAC,GAAG,IAAI,IAAI,KAAK,KAAK,CAAC,GAAG,KAAK;AAErE,MAAI,KAAK,SAAS,KAAK,KAErB;AAIF,MAAI,MAAM,QAAQ,KAAK,CACrB,QAAO,KAAK,QAAQ,SAAU,OAAO,SAAS,WAAW,kBAAkB,KAAK,GAAG,kBAAkB,KAAK,aAAa,CAAE;EAG3H,MAAM,cAAc,GAAG,KAAK,KAAK,GAAG,KAAK;EACzC,MAAM,aAAa,YAAY,IAAI,YAAY;EAG/C,MAAM,UAAU,MAAM,QAAQ,KAAK,GAAG,KAAK,UAAU,KAAK,GAAG,QAAQ;EACrE,MAAM,kBAAkB,GAAG,KAAK,KAAK,GAAG,QAAQ,GAAG,KAAK;EACxD,MAAM,aAAa,UAAU,IAAI,gBAAgB;EAEjD,MAAM,cAAc,GAAG,KAAK,KAAK,GAAG;AAGpC,MAFoC,oBAAoB,IAAI,YAAY,CAItE;AAIF,MAAI,cAAe,MAAM,QAAQ,KAAK,IAAI,CAAC,KAAK,OAC9C;AAIF,MAAI,CAAC,YAAY;GACf,MAAM,UAAU;IACd,GAAG;IACH;IACD;AACD,QAAK,KAAK,QAAQ;AAClB,eAAY,IAAI,aAAa,QAAQ;AACrC,aAAU,IAAI,iBAAiB,QAAQ;AAEvC,OAAI,QAAQ,WACV,qBAAoB,IAAI,aAAa,QAAQ;AAE/C;;AAIF,MAAI,cAAc,MAAM,QAAQ,WAAW,KAAK,IAAI,MAAM,QAAQ,KAAK,IAAI,WAAW,eAAe,KAAK,YAAY;AACpH,cAAW,OAAO,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,WAAW,MAAM,GAAG,KAAK,CAAC,CAAC;AAC7D;;AAIF,MAAI,CAAC,MAAM,QAAQ,KAAK,IAAI,QAAQ,CAAC,kBAAkB,KAAK,CAC1D;AAGF,OAAK,KAAK,KAAK;AACf,YAAU,IAAI,iBAAiB,KAAK;AAEpC,MAAI,KAAK,WACP,qBAAoB,IAAI,aAAa,KAAK;;AAI9C,QAAO;;;;;AAMT,SAAgB,WAA0C,MAA0D;;CAClH,MAAM,UAAU,KAAK,QAAQ,KAAK,SAAS;AAC3C,KAAI,CAAC,QACH,OAAM,IAAI,MAAM,wBAAwB,KAAK,WAAW;CAG1D,MAAM,SAAS,KAAK,QAAQ,KAAK,SAAS,KAAK,MAAM,CAAC,KAAK,OAAO;CAClE,MAAM,4BAAU,KAAK,uEAAS,UAAS,eAAe,KAAK,QAAQ,GAAG,EAAE;CACxE,MAAM,4BAAU,KAAK,uEAAS,WAAU,SAAS,eAAe,KAAK,SAAS,SAAS,OAAO,GAAG,EAAE;CACnG,MAAM,4BAAU,KAAK,uEAAS,UAAS,eAAe,KAAK,QAAQ,GAAG,EAAE;AAExE,QAAO;EACL,GAAG;EACH,IAAI,WAAW,SAAS,CAAC,OAAO,KAAK,KAAK,CAAC,OAAO,MAAM;EACxD,MAAM,YAAY,KAAK,SAAS;EAChC;EACS;EACA;EACA;EACT,MAAM,KAAK,QAAS,EAAE;EACvB"}
|
|
@@ -4,7 +4,7 @@ let node_crypto = require("node:crypto");
|
|
|
4
4
|
let node_path = require("node:path");
|
|
5
5
|
node_path = require_trimExtName.__toESM(node_path);
|
|
6
6
|
|
|
7
|
-
//#region ../../node_modules/.pnpm/remeda@2.
|
|
7
|
+
//#region ../../node_modules/.pnpm/remeda@2.33.1/node_modules/remeda/dist/utilityEvaluators-D7O7U3FY.js
|
|
8
8
|
const e = {
|
|
9
9
|
done: !0,
|
|
10
10
|
hasNext: !1
|
|
@@ -18,7 +18,7 @@ const e = {
|
|
|
18
18
|
});
|
|
19
19
|
|
|
20
20
|
//#endregion
|
|
21
|
-
//#region ../../node_modules/.pnpm/remeda@2.
|
|
21
|
+
//#region ../../node_modules/.pnpm/remeda@2.33.1/node_modules/remeda/dist/pipe-CRsqNjLF.js
|
|
22
22
|
function t$1(e$1, ...t$3) {
|
|
23
23
|
let a = e$1, o = t$3.map((e$2) => `lazy` in e$2 ? r$1(e$2) : void 0), s = 0;
|
|
24
24
|
for (; s < t$3.length;) {
|
|
@@ -71,7 +71,7 @@ function i(e$1) {
|
|
|
71
71
|
}
|
|
72
72
|
|
|
73
73
|
//#endregion
|
|
74
|
-
//#region ../../node_modules/.pnpm/remeda@2.
|
|
74
|
+
//#region ../../node_modules/.pnpm/remeda@2.33.1/node_modules/remeda/dist/purryFromLazy-C1RBdBlx.js
|
|
75
75
|
function t(t$3, n$3) {
|
|
76
76
|
let r$3 = n$3.length - t$3.length;
|
|
77
77
|
if (r$3 === 1) {
|
|
@@ -92,7 +92,7 @@ function t(t$3, n$3) {
|
|
|
92
92
|
}
|
|
93
93
|
|
|
94
94
|
//#endregion
|
|
95
|
-
//#region ../../node_modules/.pnpm/remeda@2.
|
|
95
|
+
//#region ../../node_modules/.pnpm/remeda@2.33.1/node_modules/remeda/dist/uniqueBy.js
|
|
96
96
|
function n(...e$1) {
|
|
97
97
|
return t(r, e$1);
|
|
98
98
|
}
|
|
@@ -363,4 +363,4 @@ Object.defineProperty(exports, 'createFile', {
|
|
|
363
363
|
return createFile;
|
|
364
364
|
}
|
|
365
365
|
});
|
|
366
|
-
//# sourceMappingURL=defineProperty-
|
|
366
|
+
//# sourceMappingURL=defineProperty-hUmuXj5B.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"defineProperty-hUmuXj5B.cjs","names":["t","n","r","e","t","e","r","n","i","r","n","t","i","e","e","n","t","r","i","uniqueBy","exports","prev: Array<KubbFile.Export>","prev: Array<KubbFile.Import>","path","trimExtName"],"sources":["../../../node_modules/.pnpm/remeda@2.33.1/node_modules/remeda/dist/utilityEvaluators-D7O7U3FY.js","../../../node_modules/.pnpm/remeda@2.33.1/node_modules/remeda/dist/pipe-CRsqNjLF.js","../../../node_modules/.pnpm/remeda@2.33.1/node_modules/remeda/dist/purryFromLazy-C1RBdBlx.js","../../../node_modules/.pnpm/remeda@2.33.1/node_modules/remeda/dist/uniqueBy.js","../src/createFile.ts"],"sourcesContent":["const e={done:!0,hasNext:!1},t={done:!1,hasNext:!1},n=()=>e,r=e=>({hasNext:!0,next:e,done:!1});export{n,r,t};\n//# sourceMappingURL=utilityEvaluators-D7O7U3FY.js.map","import{t as e}from\"./utilityEvaluators-D7O7U3FY.js\";function t(e,...t){let a=e,o=t.map(e=>`lazy`in e?r(e):void 0),s=0;for(;s<t.length;){if(o[s]===void 0||!i(a)){let e=t[s];a=e(a),s+=1;continue}let e=[];for(let n=s;n<t.length;n++){let t=o[n];if(t===void 0||(e.push(t),t.isSingle))break}let r=[];for(let t of a)if(n(t,r,e))break;let{isSingle:c}=e.at(-1);a=c?r[0]:r,s+=e.length}return a}function n(t,r,i){if(i.length===0)return r.push(t),!1;let a=t,o=e,s=!1;for(let[e,t]of i.entries()){let{index:c,items:l}=t;if(l.push(a),o=t(a,c,l),t.index+=1,o.hasNext){if(o.hasMany??!1){for(let t of o.next)if(n(t,r,i.slice(e+1)))return!0;return s}a=o.next}if(!o.hasNext)break;o.done&&(s=!0)}return o.hasNext&&r.push(a),s}function r(e){let{lazy:t,lazyArgs:n}=e,r=t(...n);return Object.assign(r,{isSingle:t.single??!1,index:0,items:[]})}function i(e){return typeof e==`string`||typeof e==`object`&&!!e&&Symbol.iterator in e}export{t};\n//# sourceMappingURL=pipe-CRsqNjLF.js.map","import{t as e}from\"./pipe-CRsqNjLF.js\";function t(t,n){let r=n.length-t.length;if(r===1){let[r,...i]=n;return e(r,{lazy:t,lazyArgs:i})}if(r===0){let r={lazy:t,lazyArgs:n};return Object.assign(t=>e(t,r),r)}throw Error(`Wrong number of arguments`)}export{t};\n//# sourceMappingURL=purryFromLazy-C1RBdBlx.js.map","import{t as e}from\"./utilityEvaluators-D7O7U3FY.js\";import{t}from\"./purryFromLazy-C1RBdBlx.js\";function n(...e){return t(r,e)}function r(t){let n=t,r=new Set;return(t,i,a)=>{let o=n(t,i,a);return r.has(o)?e:(r.add(o),{done:!1,hasNext:!0,next:t})}}export{n as uniqueBy};\n//# sourceMappingURL=uniqueBy.js.map","import { createHash } from 'node:crypto'\nimport path from 'node:path'\nimport { orderBy } from 'natural-orderby'\nimport { uniqueBy } from 'remeda'\nimport type * as KubbFile from './KubbFile.ts'\nimport { trimExtName } from './utils/trimExtName.ts'\n\nexport function combineSources(sources: Array<KubbFile.Source>): Array<KubbFile.Source> {\n return uniqueBy(sources, (obj) => {\n // For named sources, deduplicate by name, isExportable, and isTypeOnly\n // For unnamed sources, include the value to avoid deduplicating different code blocks\n // If both name and value are undefined, use an empty string as the unique identifier\n const uniqueId = obj.name ?? obj.value ?? ''\n const isExportable = obj.isExportable ?? false\n const isTypeOnly = obj.isTypeOnly ?? false\n return `${uniqueId}:${isExportable}:${isTypeOnly}`\n })\n}\n\nexport function combineExports(exports: Array<KubbFile.Export>): Array<KubbFile.Export> {\n const sorted = orderBy(exports, [\n (v) => !!Array.isArray(v.name),\n (v) => !v.isTypeOnly,\n (v) => v.path,\n (v) => !!v.name,\n (v) => (Array.isArray(v.name) ? orderBy(v.name) : v.name),\n ])\n\n const prev: Array<KubbFile.Export> = []\n // Map to track items by path for O(1) lookup\n const pathMap = new Map<string, KubbFile.Export>()\n // Map to track unique items by path+name+isTypeOnly+asAlias\n const uniqueMap = new Map<string, KubbFile.Export>()\n // Map to track items by path+name where isTypeOnly=true (for type-only check)\n const pathNameTypeTrueMap = new Map<string, KubbFile.Export>()\n\n for (const curr of sorted) {\n const name = curr.name\n const pathKey = curr.path\n const prevByPath = pathMap.get(pathKey)\n\n // Create unique key for path+name+isTypeOnly\n const nameKey = Array.isArray(name) ? JSON.stringify(name) : name || ''\n const pathNameTypeKey = `${pathKey}:${nameKey}:${curr.isTypeOnly}`\n // Check if there's already an item with the same path+name but with isTypeOnly=true\n const pathNameKey = `${pathKey}:${nameKey}`\n const prevByPathAndIsTypeOnly = pathNameTypeTrueMap.get(pathNameKey)\n\n if (prevByPathAndIsTypeOnly) {\n // we already have an export that has the same path and name but uses `isTypeOnly` (export type ...)\n continue\n }\n\n // Create unique key for path+name+isTypeOnly+asAlias\n const uniqueKey = `${pathNameTypeKey}:${curr.asAlias || ''}`\n const uniquePrev = uniqueMap.get(uniqueKey)\n\n // we already have an item that was unique enough or name field is empty or prev asAlias is set but current has no changes\n if (uniquePrev || (Array.isArray(name) && !name.length) || (prevByPath?.asAlias && !curr.asAlias)) {\n continue\n }\n\n if (!prevByPath) {\n const newItem = {\n ...curr,\n name: Array.isArray(name) ? [...new Set(name)] : name,\n }\n prev.push(newItem)\n pathMap.set(pathKey, newItem)\n uniqueMap.set(uniqueKey, newItem)\n // Track items with isTypeOnly=true for the type-only check\n if (newItem.isTypeOnly) {\n pathNameTypeTrueMap.set(pathNameKey, newItem)\n }\n continue\n }\n\n // merge all names when prev and current both have the same isTypeOnly set\n if (prevByPath && Array.isArray(prevByPath.name) && Array.isArray(curr.name) && prevByPath.isTypeOnly === curr.isTypeOnly) {\n prevByPath.name = [...new Set([...prevByPath.name, ...curr.name])]\n continue\n }\n\n prev.push(curr)\n uniqueMap.set(uniqueKey, curr)\n // Track items with isTypeOnly=true for the type-only check\n if (curr.isTypeOnly) {\n pathNameTypeTrueMap.set(pathNameKey, curr)\n }\n }\n\n return prev\n}\n\nexport function combineImports(imports: Array<KubbFile.Import>, exports: Array<KubbFile.Export>, source?: string): Array<KubbFile.Import> {\n const exportedNameLookup = new Set<string>()\n for (const item of exports) {\n const { name } = item\n if (!name) {\n continue\n }\n\n if (Array.isArray(name)) {\n for (const value of name) {\n if (value) {\n exportedNameLookup.add(value)\n }\n }\n continue\n }\n\n exportedNameLookup.add(name)\n }\n\n const usageCache = new Map<string, boolean>()\n const hasImportInSource = (importName: string): boolean => {\n if (!source) {\n return true\n }\n\n const cached = usageCache.get(importName)\n if (cached !== undefined) {\n return cached\n }\n\n const isUsed = source.includes(importName) || exportedNameLookup.has(importName)\n usageCache.set(importName, isUsed)\n\n return isUsed\n }\n\n const sorted = orderBy(imports, [\n (v) => !!Array.isArray(v.name),\n (v) => !v.isTypeOnly,\n (v) => v.path,\n (v) => !!v.name,\n (v) => (Array.isArray(v.name) ? orderBy(v.name) : v.name),\n ])\n\n const prev: Array<KubbFile.Import> = []\n // Map to track items by path+isTypeOnly for O(1) lookup\n const pathTypeMap = new Map<string, KubbFile.Import>()\n // Map to track unique items by path+name+isTypeOnly\n const uniqueMap = new Map<string, KubbFile.Import>()\n // Map to track items by path+name where isTypeOnly=true (for type-only check)\n const pathNameTypeTrueMap = new Map<string, KubbFile.Import>()\n\n for (const curr of sorted) {\n let name = Array.isArray(curr.name) ? [...new Set(curr.name)] : curr.name\n\n if (curr.path === curr.root) {\n // root and path are the same file, remove the \"./\" import\n continue\n }\n\n // merge all names and check if the importName is being used in the generated source and if not filter those imports out\n if (Array.isArray(name)) {\n name = name.filter((item) => (typeof item === 'string' ? hasImportInSource(item) : hasImportInSource(item.propertyName)))\n }\n\n const pathTypeKey = `${curr.path}:${curr.isTypeOnly}`\n const prevByPath = pathTypeMap.get(pathTypeKey)\n\n // Create key for name comparison\n const nameKey = Array.isArray(name) ? JSON.stringify(name) : name || ''\n const pathNameTypeKey = `${curr.path}:${nameKey}:${curr.isTypeOnly}`\n const uniquePrev = uniqueMap.get(pathNameTypeKey)\n // Check if there's already an item with the same path+name but with isTypeOnly=true\n const pathNameKey = `${curr.path}:${nameKey}`\n const prevByPathNameAndIsTypeOnly = pathNameTypeTrueMap.get(pathNameKey)\n\n if (prevByPathNameAndIsTypeOnly) {\n // we already have an import that has the same path and name but uses `isTypeOnly` (import type ...)\n continue\n }\n\n // already unique enough or name is empty\n if (uniquePrev || (Array.isArray(name) && !name.length)) {\n continue\n }\n\n // new item, append name\n if (!prevByPath) {\n const newItem = {\n ...curr,\n name,\n }\n prev.push(newItem)\n pathTypeMap.set(pathTypeKey, newItem)\n uniqueMap.set(pathNameTypeKey, newItem)\n // Track items with isTypeOnly=true for the type-only check\n if (newItem.isTypeOnly) {\n pathNameTypeTrueMap.set(pathNameKey, newItem)\n }\n continue\n }\n\n // merge all names when prev and current both have the same isTypeOnly set\n if (prevByPath && Array.isArray(prevByPath.name) && Array.isArray(name) && prevByPath.isTypeOnly === curr.isTypeOnly) {\n prevByPath.name = [...new Set([...prevByPath.name, ...name])]\n continue\n }\n\n // no import was found in the source, ignore import\n if (!Array.isArray(name) && name && !hasImportInSource(name)) {\n continue\n }\n\n prev.push(curr)\n uniqueMap.set(pathNameTypeKey, curr)\n // Track items with isTypeOnly=true for the type-only check\n if (curr.isTypeOnly) {\n pathNameTypeTrueMap.set(pathNameKey, curr)\n }\n }\n\n return prev\n}\n\n/**\n * Helper to create a file with name and id set\n */\nexport function createFile<TMeta extends object = object>(file: KubbFile.File<TMeta>): KubbFile.ResolvedFile<TMeta> {\n const extname = path.extname(file.baseName) as KubbFile.Extname\n if (!extname) {\n throw new Error(`No extname found for ${file.baseName}`)\n }\n\n const source = file.sources.map((item) => item.value).join('\\n\\n')\n const exports = file.exports?.length ? combineExports(file.exports) : []\n const imports = file.imports?.length && source ? combineImports(file.imports, exports, source) : []\n const sources = file.sources?.length ? combineSources(file.sources) : []\n\n return {\n ...file,\n id: createHash('sha256').update(file.path).digest('hex'),\n name: trimExtName(file.baseName),\n extname,\n imports: imports,\n exports: exports,\n sources: sources,\n meta: file.meta || ({} as TMeta),\n }\n}\n"],"x_google_ignoreList":[0,1,2,3],"mappings":";;;;;;;AAAA,MAAM,IAAE;CAAC,MAAK,CAAC;CAAE,SAAQ,CAAC;CAAE,EAACA,MAAE;CAAC,MAAK,CAAC;CAAE,SAAQ,CAAC;CAAE,EAACC,YAAM,GAAEC,OAAE,SAAI;CAAC,SAAQ,CAAC;CAAE,MAAKC;CAAE,MAAK,CAAC;CAAE;;;;ACAzC,SAASC,IAAE,KAAE,GAAGA,KAAE;CAAC,IAAI,IAAEC,KAAE,IAAED,IAAE,KAAI,QAAG,UAASC,MAAEC,IAAED,IAAE,GAAC,KAAK,EAAE,EAAC,IAAE;AAAE,QAAK,IAAED,IAAE,SAAQ;AAAC,MAAG,EAAE,OAAK,KAAK,KAAG,CAAC,EAAE,EAAE,EAAC;GAAC,IAAIC,MAAED,IAAE;AAAG,OAAEC,IAAE,EAAE,EAAC,KAAG;AAAE;;EAAS,IAAIA,MAAE,EAAE;AAAC,OAAI,IAAIE,MAAE,GAAEA,MAAEH,IAAE,QAAO,OAAI;GAAC,IAAIA,MAAE,EAAEG;AAAG,OAAGH,QAAI,KAAK,MAAIC,IAAE,KAAKD,IAAE,EAACA,IAAE,UAAU;;EAAM,IAAIE,MAAE,EAAE;AAAC,OAAI,IAAIF,OAAK,EAAE,KAAGG,IAAEH,KAAEE,KAAED,IAAE,CAAC;EAAM,IAAG,EAAC,UAAS,MAAGA,IAAE,GAAG,GAAG;AAAC,MAAE,IAAEC,IAAE,KAAGA,KAAE,KAAGD,IAAE;;AAAO,QAAO;;AAAE,SAASE,IAAE,KAAE,KAAE,KAAE;AAAC,KAAGC,IAAE,WAAS,EAAE,QAAOF,IAAE,KAAKF,IAAE,EAAC,CAAC;CAAE,IAAI,IAAEA,KAAE,IAAEC,KAAE,IAAE,CAAC;AAAE,MAAI,IAAG,CAACA,KAAED,QAAKI,IAAE,SAAS,EAAC;EAAC,IAAG,EAAC,OAAM,GAAE,OAAM,MAAGJ;AAAE,MAAG,EAAE,KAAK,EAAE,EAAC,IAAEA,IAAE,GAAE,GAAE,EAAE,EAAC,IAAE,SAAO,GAAE,EAAE,SAAQ;;AAAC,qBAAG,EAAE,0DAAS,CAAC,GAAE;AAAC,SAAI,IAAIA,OAAK,EAAE,KAAK,KAAGG,IAAEH,KAAEE,KAAEE,IAAE,MAAMH,MAAE,EAAE,CAAC,CAAC,QAAM,CAAC;AAAE,WAAO;;AAAE,OAAE,EAAE;;AAAK,MAAG,CAAC,EAAE,QAAQ;AAAM,IAAE,SAAO,IAAE,CAAC;;AAAG,QAAO,EAAE,WAASC,IAAE,KAAK,EAAE,EAAC;;AAAE,SAASA,IAAE,KAAE;;CAAC,IAAG,EAAC,MAAKF,KAAE,UAASG,QAAGF,KAAEC,MAAEF,IAAE,GAAGG,IAAE;AAAC,QAAO,OAAO,OAAOD,KAAE;EAAC,uBAASF,IAAE,uDAAQ,CAAC;EAAE,OAAM;EAAE,OAAM,EAAE;EAAC,CAAC;;AAAC,SAAS,EAAE,KAAE;AAAC,QAAO,OAAOC,OAAG,YAAU,OAAOA,OAAG,YAAU,CAAC,CAACA,OAAG,OAAO,YAAYA;;;;;ACAj2B,SAAS,EAAE,KAAE,KAAE;CAAC,IAAII,MAAEC,IAAE,SAAOC,IAAE;AAAO,KAAGF,QAAI,GAAE;EAAC,IAAG,CAACA,KAAE,GAAGG,OAAGF;AAAE,SAAOG,IAAEJ,KAAE;GAAC,MAAKE;GAAE,UAASC;GAAE,CAAC;;AAAC,KAAGH,QAAI,GAAE;EAAC,IAAIA,MAAE;GAAC,MAAKE;GAAE,UAASD;GAAE;AAAC,SAAO,OAAO,QAAO,QAAGG,IAAEF,KAAEF,IAAE,EAACA,IAAE;;AAAC,OAAM,MAAM,4BAA4B;;;;;ACAtJ,SAAS,EAAE,GAAGK,KAAE;AAAC,QAAO,EAAE,GAAEA,IAAE;;AAAC,SAAS,EAAE,KAAE;CAAC,IAAIC,MAAEC,KAAEC,sBAAE,IAAI,KAAG;AAAC,SAAO,KAAE,KAAE,MAAI;EAAC,IAAI,IAAEF,IAAEC,KAAEE,KAAE,EAAE;AAAC,SAAOD,IAAE,IAAI,EAAE,GAACH,OAAGG,IAAE,IAAI,EAAE,EAAC;GAAC,MAAK,CAAC;GAAE,SAAQ,CAAC;GAAE,MAAKD;GAAE;;;;;;ACOpP,SAAgB,eAAe,SAAyD;AACtF,QAAOG,EAAS,UAAU,QAAQ;;AAOhC,SAAO,wBAHU,IAAI,qDAAQ,IAAI,4CAAS,GAGvB,wBAFE,IAAI,6EAAgB,MAEN,sBADhB,IAAI,uEAAc;GAErC;;AAGJ,SAAgB,eAAe,WAAyD;CACtF,MAAM,sCAAiBC,WAAS;GAC7B,MAAM,CAAC,CAAC,MAAM,QAAQ,EAAE,KAAK;GAC7B,MAAM,CAAC,EAAE;GACT,MAAM,EAAE;GACR,MAAM,CAAC,CAAC,EAAE;GACV,MAAO,MAAM,QAAQ,EAAE,KAAK,gCAAW,EAAE,KAAK,GAAG,EAAE;EACrD,CAAC;CAEF,MAAMC,OAA+B,EAAE;CAEvC,MAAM,0BAAU,IAAI,KAA8B;CAElD,MAAM,4BAAY,IAAI,KAA8B;CAEpD,MAAM,sCAAsB,IAAI,KAA8B;AAE9D,MAAK,MAAM,QAAQ,QAAQ;EACzB,MAAM,OAAO,KAAK;EAClB,MAAM,UAAU,KAAK;EACrB,MAAM,aAAa,QAAQ,IAAI,QAAQ;EAGvC,MAAM,UAAU,MAAM,QAAQ,KAAK,GAAG,KAAK,UAAU,KAAK,GAAG,QAAQ;EACrE,MAAM,kBAAkB,GAAG,QAAQ,GAAG,QAAQ,GAAG,KAAK;EAEtD,MAAM,cAAc,GAAG,QAAQ,GAAG;AAGlC,MAFgC,oBAAoB,IAAI,YAAY,CAIlE;EAIF,MAAM,YAAY,GAAG,gBAAgB,GAAG,KAAK,WAAW;AAIxD,MAHmB,UAAU,IAAI,UAAU,IAGxB,MAAM,QAAQ,KAAK,IAAI,CAAC,KAAK,mEAAY,WAAY,YAAW,CAAC,KAAK,QACvF;AAGF,MAAI,CAAC,YAAY;GACf,MAAM,UAAU;IACd,GAAG;IACH,MAAM,MAAM,QAAQ,KAAK,GAAG,CAAC,GAAG,IAAI,IAAI,KAAK,CAAC,GAAG;IAClD;AACD,QAAK,KAAK,QAAQ;AAClB,WAAQ,IAAI,SAAS,QAAQ;AAC7B,aAAU,IAAI,WAAW,QAAQ;AAEjC,OAAI,QAAQ,WACV,qBAAoB,IAAI,aAAa,QAAQ;AAE/C;;AAIF,MAAI,cAAc,MAAM,QAAQ,WAAW,KAAK,IAAI,MAAM,QAAQ,KAAK,KAAK,IAAI,WAAW,eAAe,KAAK,YAAY;AACzH,cAAW,OAAO,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,WAAW,MAAM,GAAG,KAAK,KAAK,CAAC,CAAC;AAClE;;AAGF,OAAK,KAAK,KAAK;AACf,YAAU,IAAI,WAAW,KAAK;AAE9B,MAAI,KAAK,WACP,qBAAoB,IAAI,aAAa,KAAK;;AAI9C,QAAO;;AAGT,SAAgB,eAAe,SAAiC,WAAiC,QAAyC;CACxI,MAAM,qCAAqB,IAAI,KAAa;AAC5C,MAAK,MAAM,QAAQD,WAAS;EAC1B,MAAM,EAAE,SAAS;AACjB,MAAI,CAAC,KACH;AAGF,MAAI,MAAM,QAAQ,KAAK,EAAE;AACvB,QAAK,MAAM,SAAS,KAClB,KAAI,MACF,oBAAmB,IAAI,MAAM;AAGjC;;AAGF,qBAAmB,IAAI,KAAK;;CAG9B,MAAM,6BAAa,IAAI,KAAsB;CAC7C,MAAM,qBAAqB,eAAgC;AACzD,MAAI,CAAC,OACH,QAAO;EAGT,MAAM,SAAS,WAAW,IAAI,WAAW;AACzC,MAAI,WAAW,OACb,QAAO;EAGT,MAAM,SAAS,OAAO,SAAS,WAAW,IAAI,mBAAmB,IAAI,WAAW;AAChF,aAAW,IAAI,YAAY,OAAO;AAElC,SAAO;;CAGT,MAAM,sCAAiB,SAAS;GAC7B,MAAM,CAAC,CAAC,MAAM,QAAQ,EAAE,KAAK;GAC7B,MAAM,CAAC,EAAE;GACT,MAAM,EAAE;GACR,MAAM,CAAC,CAAC,EAAE;GACV,MAAO,MAAM,QAAQ,EAAE,KAAK,gCAAW,EAAE,KAAK,GAAG,EAAE;EACrD,CAAC;CAEF,MAAME,OAA+B,EAAE;CAEvC,MAAM,8BAAc,IAAI,KAA8B;CAEtD,MAAM,4BAAY,IAAI,KAA8B;CAEpD,MAAM,sCAAsB,IAAI,KAA8B;AAE9D,MAAK,MAAM,QAAQ,QAAQ;EACzB,IAAI,OAAO,MAAM,QAAQ,KAAK,KAAK,GAAG,CAAC,GAAG,IAAI,IAAI,KAAK,KAAK,CAAC,GAAG,KAAK;AAErE,MAAI,KAAK,SAAS,KAAK,KAErB;AAIF,MAAI,MAAM,QAAQ,KAAK,CACrB,QAAO,KAAK,QAAQ,SAAU,OAAO,SAAS,WAAW,kBAAkB,KAAK,GAAG,kBAAkB,KAAK,aAAa,CAAE;EAG3H,MAAM,cAAc,GAAG,KAAK,KAAK,GAAG,KAAK;EACzC,MAAM,aAAa,YAAY,IAAI,YAAY;EAG/C,MAAM,UAAU,MAAM,QAAQ,KAAK,GAAG,KAAK,UAAU,KAAK,GAAG,QAAQ;EACrE,MAAM,kBAAkB,GAAG,KAAK,KAAK,GAAG,QAAQ,GAAG,KAAK;EACxD,MAAM,aAAa,UAAU,IAAI,gBAAgB;EAEjD,MAAM,cAAc,GAAG,KAAK,KAAK,GAAG;AAGpC,MAFoC,oBAAoB,IAAI,YAAY,CAItE;AAIF,MAAI,cAAe,MAAM,QAAQ,KAAK,IAAI,CAAC,KAAK,OAC9C;AAIF,MAAI,CAAC,YAAY;GACf,MAAM,UAAU;IACd,GAAG;IACH;IACD;AACD,QAAK,KAAK,QAAQ;AAClB,eAAY,IAAI,aAAa,QAAQ;AACrC,aAAU,IAAI,iBAAiB,QAAQ;AAEvC,OAAI,QAAQ,WACV,qBAAoB,IAAI,aAAa,QAAQ;AAE/C;;AAIF,MAAI,cAAc,MAAM,QAAQ,WAAW,KAAK,IAAI,MAAM,QAAQ,KAAK,IAAI,WAAW,eAAe,KAAK,YAAY;AACpH,cAAW,OAAO,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,WAAW,MAAM,GAAG,KAAK,CAAC,CAAC;AAC7D;;AAIF,MAAI,CAAC,MAAM,QAAQ,KAAK,IAAI,QAAQ,CAAC,kBAAkB,KAAK,CAC1D;AAGF,OAAK,KAAK,KAAK;AACf,YAAU,IAAI,iBAAiB,KAAK;AAEpC,MAAI,KAAK,WACP,qBAAoB,IAAI,aAAa,KAAK;;AAI9C,QAAO;;;;;AAMT,SAAgB,WAA0C,MAA0D;;CAClH,MAAM,UAAUC,kBAAK,QAAQ,KAAK,SAAS;AAC3C,KAAI,CAAC,QACH,OAAM,IAAI,MAAM,wBAAwB,KAAK,WAAW;CAG1D,MAAM,SAAS,KAAK,QAAQ,KAAK,SAAS,KAAK,MAAM,CAAC,KAAK,OAAO;CAClE,MAAMH,8BAAU,KAAK,uEAAS,UAAS,eAAe,KAAK,QAAQ,GAAG,EAAE;CACxE,MAAM,4BAAU,KAAK,uEAAS,WAAU,SAAS,eAAe,KAAK,SAASA,WAAS,OAAO,GAAG,EAAE;CACnG,MAAM,4BAAU,KAAK,uEAAS,UAAS,eAAe,KAAK,QAAQ,GAAG,EAAE;AAExE,QAAO;EACL,GAAG;EACH,gCAAe,SAAS,CAAC,OAAO,KAAK,KAAK,CAAC,OAAO,MAAM;EACxD,MAAMI,gCAAY,KAAK,SAAS;EAChC;EACS;EACT,SAASJ;EACA;EACT,MAAM,KAAK,QAAS,EAAE;EACvB"}
|
package/dist/index.cjs
CHANGED
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
const require_trimExtName = require('./trimExtName-DaBSwMN-.cjs');
|
|
2
|
-
const require_defineProperty = require('./defineProperty-
|
|
2
|
+
const require_defineProperty = require('./defineProperty-hUmuXj5B.cjs');
|
|
3
3
|
const require_defaultParser = require('./defaultParser-CIF-0xIK.cjs');
|
|
4
4
|
let natural_orderby = require("natural-orderby");
|
|
5
5
|
let p_limit = require("p-limit");
|
|
6
6
|
p_limit = require_trimExtName.__toESM(p_limit);
|
|
7
7
|
let node_events = require("node:events");
|
|
8
8
|
|
|
9
|
-
//#region ../../node_modules/.pnpm/remeda@2.
|
|
9
|
+
//#region ../../node_modules/.pnpm/remeda@2.33.1/node_modules/remeda/dist/isFunction.js
|
|
10
10
|
const e = (e$1) => typeof e$1 == `function`;
|
|
11
11
|
|
|
12
12
|
//#endregion
|
package/dist/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.cjs","names":["e","NodeEventEmitter","errors: Error[]","defaultParser","resolvedFiles: Array<KubbFile.ResolvedFile>","createFile","trimExtName","files: Array<KubbFile.ResolvedFile>","context: FabricContext<T>","fabric: Fabric<T>","isFunction"],"sources":["../../../node_modules/.pnpm/remeda@2.32.2/node_modules/remeda/dist/isFunction-yJCcR1YM.js","../src/utils/AsyncEventEmitter.ts","../src/FileProcessor.ts","../src/utils/Cache.ts","../src/FileManager.ts","../src/createFabric.ts"],"sourcesContent":["const e=e=>typeof e==`function`;export{e as t};\n//# sourceMappingURL=isFunction-yJCcR1YM.js.map","import { EventEmitter as NodeEventEmitter } from 'node:events'\nimport type { FabricMode } from '../Fabric.ts'\n\ntype Options = {\n mode?: FabricMode\n maxListener?: number\n}\n\nexport class AsyncEventEmitter<TEvents extends Record<string, any>> {\n constructor({ maxListener = 100, mode = 'sequential' }: Options = {}) {\n this.#emitter.setMaxListeners(maxListener)\n this.#mode = mode\n }\n\n #emitter = new NodeEventEmitter()\n #mode: FabricMode\n\n async emit<TEventName extends keyof TEvents & string>(eventName: TEventName, ...eventArgs: TEvents[TEventName]): Promise<void> {\n const listeners = this.#emitter.listeners(eventName) as Array<(...args: TEvents[TEventName]) => any>\n\n if (listeners.length === 0) {\n return\n }\n\n const errors: Error[] = []\n\n if (this.#mode === 'sequential') {\n // Run listeners one by one, in order\n for (const listener of listeners) {\n try {\n await listener(...eventArgs)\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err))\n errors.push(error)\n }\n }\n } else {\n // Run all listeners concurrently\n const promises = listeners.map(async (listener) => {\n try {\n await listener(...eventArgs)\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err))\n errors.push(error)\n }\n })\n await Promise.all(promises)\n }\n\n if (errors.length === 1) {\n throw errors[0]\n }\n\n if (errors.length > 1) {\n throw new AggregateError(errors, `Errors in async listeners for \"${eventName}\"`)\n }\n }\n\n on<TEventName extends keyof TEvents & string>(eventName: TEventName, handler: (...eventArg: TEvents[TEventName]) => void): void {\n this.#emitter.on(eventName, handler as any)\n }\n\n onOnce<TEventName extends keyof TEvents & string>(eventName: TEventName, handler: (...eventArgs: TEvents[TEventName]) => void): void {\n const wrapper = (...args: TEvents[TEventName]) => {\n this.off(eventName, wrapper)\n handler(...args)\n }\n this.on(eventName, wrapper)\n }\n\n off<TEventName extends keyof TEvents & string>(eventName: TEventName, handler: (...eventArg: TEvents[TEventName]) => void): void {\n this.#emitter.off(eventName, handler as any)\n }\n\n removeAll(): void {\n this.#emitter.removeAllListeners()\n }\n}\n","import pLimit from 'p-limit'\nimport type { FabricEvents, FabricMode } from './Fabric.ts'\nimport type * as KubbFile from './KubbFile.ts'\nimport { defaultParser } from './parsers/defaultParser.ts'\nimport type { Parser } from './parsers/types.ts'\nimport { AsyncEventEmitter } from './utils/AsyncEventEmitter.ts'\n\nexport type ProcessFilesProps = {\n parsers?: Map<KubbFile.Extname, Parser>\n extension?: Record<KubbFile.Extname, KubbFile.Extname | ''>\n dryRun?: boolean\n /**\n * @default 'sequential'\n */\n mode?: FabricMode\n}\n\ntype GetParseOptions = {\n parsers?: Map<KubbFile.Extname, Parser>\n extension?: Record<KubbFile.Extname, KubbFile.Extname | ''>\n}\n\ntype Options = {\n events?: AsyncEventEmitter<FabricEvents>\n}\n\nexport class FileProcessor {\n #limit = pLimit(100)\n events: AsyncEventEmitter<FabricEvents>\n\n constructor({ events = new AsyncEventEmitter<FabricEvents>() }: Options = {}) {\n this.events = events\n\n return this\n }\n\n async parse(file: KubbFile.ResolvedFile, { parsers, extension }: GetParseOptions = {}): Promise<string> {\n const parseExtName = extension?.[file.extname] || undefined\n\n if (!parsers) {\n console.warn('No parsers provided, using default parser. If you want to use a specific parser, please provide it in the options.')\n\n return defaultParser.parse(file, { extname: parseExtName })\n }\n\n if (!file.extname) {\n return defaultParser.parse(file, { extname: parseExtName })\n }\n\n const parser = parsers.get(file.extname)\n\n if (!parser) {\n return defaultParser.parse(file, { extname: parseExtName })\n }\n\n return parser.parse(file, { extname: parseExtName })\n }\n\n async run(\n files: Array<KubbFile.ResolvedFile>,\n { parsers, mode = 'sequential', dryRun, extension }: ProcessFilesProps = {},\n ): Promise<KubbFile.ResolvedFile[]> {\n await this.events.emit('files:processing:start', files)\n\n const total = files.length\n let processed = 0\n\n const processOne = async (resolvedFile: KubbFile.ResolvedFile, index: number) => {\n await this.events.emit('file:processing:start', resolvedFile, index, total)\n\n const source = dryRun ? undefined : await this.parse(resolvedFile, { extension, parsers })\n\n const currentProcessed = ++processed\n const percentage = (currentProcessed / total) * 100\n\n await this.events.emit('file:processing:update', {\n file: resolvedFile,\n source,\n processed: currentProcessed,\n percentage,\n total,\n })\n\n await this.events.emit('file:processing:end', resolvedFile, index, total)\n }\n\n if (mode === 'sequential') {\n async function* asyncFiles() {\n for (let index = 0; index < files.length; index++) {\n yield [files[index], index] as const\n }\n }\n\n for await (const [file, index] of asyncFiles()) {\n if (file) {\n await processOne(file, index)\n }\n }\n } else {\n const promises = files.map((resolvedFile, index) => this.#limit(() => processOne(resolvedFile, index)))\n await Promise.all(promises)\n }\n\n await this.events.emit('files:processing:end', files)\n\n return files\n }\n}\n","export class Cache<T> {\n #buffer = new Map<string, T>()\n\n get(key: string): T | null {\n return this.#buffer.get(key) ?? null\n }\n\n set(key: string, value: T): void {\n this.#buffer.set(key, value)\n }\n\n delete(key: string): void {\n this.#buffer.delete(key)\n }\n\n clear(): void {\n this.#buffer.clear()\n }\n\n keys(): string[] {\n return [...this.#buffer.keys()]\n }\n\n values(): Array<T> {\n return [...this.#buffer.values()]\n }\n\n flush(): void {\n // No-op for base cache\n }\n}\n","import { orderBy } from 'natural-orderby'\nimport { createFile } from './createFile.ts'\nimport type { FabricEvents } from './Fabric.ts'\nimport { FileProcessor, type ProcessFilesProps } from './FileProcessor.ts'\nimport type * as KubbFile from './KubbFile.ts'\nimport { AsyncEventEmitter } from './utils/AsyncEventEmitter.ts'\nimport { Cache } from './utils/Cache.ts'\nimport { trimExtName } from './utils/trimExtName.ts'\n\nfunction mergeFile<TMeta extends object = object>(a: KubbFile.File<TMeta>, b: KubbFile.File<TMeta>): KubbFile.File<TMeta> {\n return {\n ...a,\n sources: [...(a.sources || []), ...(b.sources || [])],\n imports: [...(a.imports || []), ...(b.imports || [])],\n exports: [...(a.exports || []), ...(b.exports || [])],\n }\n}\n\ntype Options = {\n events?: AsyncEventEmitter<FabricEvents>\n}\n\nexport class FileManager {\n #cache = new Cache<KubbFile.ResolvedFile>()\n #filesCache: Array<KubbFile.ResolvedFile> | null = null\n events: AsyncEventEmitter<FabricEvents>\n processor: FileProcessor\n\n constructor({ events = new AsyncEventEmitter<FabricEvents>() }: Options = {}) {\n this.processor = new FileProcessor({ events })\n\n this.events = events\n return this\n }\n\n #resolvePath(file: KubbFile.File): KubbFile.File {\n this.events.emit('file:resolve:path', file)\n\n return file\n }\n\n #resolveName(file: KubbFile.File): KubbFile.File {\n this.events.emit('file:resolve:name', file)\n\n return file\n }\n\n async add(...files: Array<KubbFile.File>) {\n const resolvedFiles: Array<KubbFile.ResolvedFile> = []\n\n const mergedFiles = new Map<string, KubbFile.File>()\n\n files.forEach((file) => {\n const existing = mergedFiles.get(file.path)\n if (existing) {\n mergedFiles.set(file.path, mergeFile(existing, file))\n } else {\n mergedFiles.set(file.path, file)\n }\n })\n\n for (let file of mergedFiles.values()) {\n file = this.#resolveName(file)\n file = this.#resolvePath(file)\n\n const resolvedFile = createFile(file)\n\n this.#cache.set(resolvedFile.path, resolvedFile)\n this.flush()\n\n resolvedFiles.push(resolvedFile)\n }\n\n await this.events.emit('files:added', resolvedFiles)\n\n return resolvedFiles\n }\n\n async upsert(...files: Array<KubbFile.File>) {\n const resolvedFiles: Array<KubbFile.ResolvedFile> = []\n\n const mergedFiles = new Map<string, KubbFile.File>()\n\n files.forEach((file) => {\n const existing = mergedFiles.get(file.path)\n if (existing) {\n mergedFiles.set(file.path, mergeFile(existing, file))\n } else {\n mergedFiles.set(file.path, file)\n }\n })\n\n for (let file of mergedFiles.values()) {\n const existing = this.#cache.get(file.path)\n\n file = this.#resolveName(file)\n file = this.#resolvePath(file)\n\n const merged = existing ? mergeFile(existing, file) : file\n const resolvedFile = createFile(merged)\n\n this.#cache.set(resolvedFile.path, resolvedFile)\n this.flush()\n\n resolvedFiles.push(resolvedFile)\n }\n\n await this.events.emit('files:added', resolvedFiles)\n\n return resolvedFiles\n }\n\n flush() {\n this.#filesCache = null\n this.#cache.flush()\n }\n\n getByPath(path: KubbFile.Path): KubbFile.ResolvedFile | null {\n return this.#cache.get(path)\n }\n\n deleteByPath(path: KubbFile.Path): void {\n this.#cache.delete(path)\n this.#filesCache = null\n }\n\n clear(): void {\n this.#cache.clear()\n this.#filesCache = null\n }\n\n get files(): Array<KubbFile.ResolvedFile> {\n if (this.#filesCache) {\n return this.#filesCache\n }\n\n const cachedKeys = this.#cache.keys()\n\n // order by path length and if file is a barrel file\n const keys = orderBy(cachedKeys, [(v) => v.length, (v) => trimExtName(v).endsWith('index')])\n\n const files: Array<KubbFile.ResolvedFile> = []\n\n for (const key of keys) {\n const file = this.#cache.get(key)\n if (file) {\n files.push(file)\n }\n }\n\n this.#filesCache = files\n\n return files\n }\n\n //TODO add test and check if write of FileManager contains the newly added file\n async write(options: ProcessFilesProps): Promise<KubbFile.ResolvedFile[]> {\n await this.events.emit('files:writing:start', this.files)\n\n const resolvedFiles = await this.processor.run(this.files, options)\n\n this.clear()\n\n await this.events.emit('files:writing:end', resolvedFiles)\n\n return resolvedFiles\n }\n}\n","import { isFunction } from 'remeda'\nimport type { Fabric, FabricConfig, FabricContext, FabricEvents, FabricOptions } from './Fabric.ts'\nimport { FileManager } from './FileManager.ts'\nimport type * as KubbFile from './KubbFile.ts'\nimport type { Parser } from './parsers/types.ts'\nimport type { Plugin } from './plugins/types.ts'\nimport { AsyncEventEmitter } from './utils/AsyncEventEmitter.ts'\n\n/**\n * Creates a new Fabric instance\n *\n * @example\n * const fabric = createFabric()\n * fabric.use(myPlugin())\n */\nexport function createFabric<T extends FabricOptions>(config: FabricConfig<T> = { mode: 'sequential' } as FabricConfig<T>): Fabric<T> {\n const events = new AsyncEventEmitter<FabricEvents>()\n const installedPlugins = new Set<Plugin<any>>()\n const installedParsers = new Map<KubbFile.Extname, Parser<any>>()\n const installedParserNames = new Set<string>()\n const fileManager = new FileManager({ events })\n\n const context: FabricContext<T> = {\n get files() {\n return fileManager.files\n },\n async addFile(...files) {\n await fileManager.add(...files)\n },\n config,\n fileManager,\n installedPlugins,\n installedParsers,\n on: events.on.bind(events),\n off: events.off.bind(events),\n onOnce: events.onOnce.bind(events),\n removeAll: events.removeAll.bind(events),\n emit: events.emit.bind(events),\n } as FabricContext<T>\n\n const fabric: Fabric<T> = {\n context,\n get files() {\n return fileManager.files\n },\n async addFile(...files) {\n await fileManager.add(...files)\n },\n async upsertFile(...files) {\n await fileManager.upsert(...files)\n },\n async use(pluginOrParser, ...options) {\n if (pluginOrParser.type === 'plugin') {\n if (installedPlugins.has(pluginOrParser)) {\n console.warn(`Plugin \"${pluginOrParser.name}\" already applied.`)\n } else {\n installedPlugins.add(pluginOrParser)\n }\n\n if (isFunction(pluginOrParser.inject)) {\n const injecter = pluginOrParser.inject\n\n const injected = (injecter as any)(context, ...options)\n Object.assign(fabric, injected)\n }\n }\n\n if (pluginOrParser.type === 'parser') {\n if (installedParserNames.has(pluginOrParser.name)) {\n console.warn(`Parser \"${pluginOrParser.name}\" already applied.`)\n } else {\n installedParserNames.add(pluginOrParser.name)\n }\n\n if (pluginOrParser.extNames) {\n for (const extName of pluginOrParser.extNames) {\n const existing = installedParsers.get(extName)\n if (existing && existing.name !== pluginOrParser.name) {\n console.warn(`Parser \"${pluginOrParser.name}\" is overriding parser \"${existing.name}\" for extension \"${extName}\".`)\n }\n installedParsers.set(extName, pluginOrParser)\n }\n }\n }\n\n if (isFunction(pluginOrParser.install)) {\n const installer = pluginOrParser.install\n\n await (installer as any)(context, ...options)\n }\n\n return fabric\n },\n } as Fabric<T>\n\n return fabric\n}\n"],"x_google_ignoreList":[0],"mappings":";;;;;;;;;AAAA,MAAM,KAAE,QAAG,OAAOA,OAAG;;;;;;ACQrB,IAAa,oBAAb,MAAoE;CAClE,YAAY,EAAE,cAAc,KAAK,OAAO,iBAA0B,EAAE,EAAE;oEAK3D,IAAIC,0BAAkB;;AAJ/B,+DAAa,CAAC,gBAAgB,YAAY;AAC1C,6DAAa,KAAI;;CAMnB,MAAM,KAAgD,WAAuB,GAAG,WAA+C;EAC7H,MAAM,oEAAY,KAAa,CAAC,UAAU,UAAU;AAEpD,MAAI,UAAU,WAAW,EACvB;EAGF,MAAMC,SAAkB,EAAE;AAE1B,2DAAI,KAAU,KAAK,aAEjB,MAAK,MAAM,YAAY,UACrB,KAAI;AACF,SAAM,SAAS,GAAG,UAAU;WACrB,KAAK;GACZ,MAAM,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,IAAI,CAAC;AACjE,UAAO,KAAK,MAAM;;OAGjB;GAEL,MAAM,WAAW,UAAU,IAAI,OAAO,aAAa;AACjD,QAAI;AACF,WAAM,SAAS,GAAG,UAAU;aACrB,KAAK;KACZ,MAAM,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,IAAI,CAAC;AACjE,YAAO,KAAK,MAAM;;KAEpB;AACF,SAAM,QAAQ,IAAI,SAAS;;AAG7B,MAAI,OAAO,WAAW,EACpB,OAAM,OAAO;AAGf,MAAI,OAAO,SAAS,EAClB,OAAM,IAAI,eAAe,QAAQ,kCAAkC,UAAU,GAAG;;CAIpF,GAA8C,WAAuB,SAA2D;AAC9H,+DAAa,CAAC,GAAG,WAAW,QAAe;;CAG7C,OAAkD,WAAuB,SAA4D;EACnI,MAAM,WAAW,GAAG,SAA8B;AAChD,QAAK,IAAI,WAAW,QAAQ;AAC5B,WAAQ,GAAG,KAAK;;AAElB,OAAK,GAAG,WAAW,QAAQ;;CAG7B,IAA+C,WAAuB,SAA2D;AAC/H,+DAAa,CAAC,IAAI,WAAW,QAAe;;CAG9C,YAAkB;AAChB,+DAAa,CAAC,oBAAoB;;;;;;;ACjDtC,IAAa,gBAAb,MAA2B;CAIzB,YAAY,EAAE,SAAS,IAAI,mBAAiC,KAAc,EAAE,EAAE;uFAH9D,IAAI;+CACpB;AAGE,OAAK,SAAS;AAEd,SAAO;;CAGT,MAAM,MAAM,MAA6B,EAAE,SAAS,cAA+B,EAAE,EAAmB;EACtG,MAAM,sEAAe,UAAY,KAAK,aAAY;AAElD,MAAI,CAAC,SAAS;AACZ,WAAQ,KAAK,qHAAqH;AAElI,UAAOC,oCAAc,MAAM,MAAM,EAAE,SAAS,cAAc,CAAC;;AAG7D,MAAI,CAAC,KAAK,QACR,QAAOA,oCAAc,MAAM,MAAM,EAAE,SAAS,cAAc,CAAC;EAG7D,MAAM,SAAS,QAAQ,IAAI,KAAK,QAAQ;AAExC,MAAI,CAAC,OACH,QAAOA,oCAAc,MAAM,MAAM,EAAE,SAAS,cAAc,CAAC;AAG7D,SAAO,OAAO,MAAM,MAAM,EAAE,SAAS,cAAc,CAAC;;CAGtD,MAAM,IACJ,OACA,EAAE,SAAS,OAAO,cAAc,QAAQ,cAAiC,EAAE,EACzC;AAClC,QAAM,KAAK,OAAO,KAAK,0BAA0B,MAAM;EAEvD,MAAM,QAAQ,MAAM;EACpB,IAAI,YAAY;EAEhB,MAAM,aAAa,OAAO,cAAqC,UAAkB;AAC/E,SAAM,KAAK,OAAO,KAAK,yBAAyB,cAAc,OAAO,MAAM;GAE3E,MAAM,SAAS,SAAS,SAAY,MAAM,KAAK,MAAM,cAAc;IAAE;IAAW;IAAS,CAAC;GAE1F,MAAM,mBAAmB,EAAE;GAC3B,MAAM,aAAc,mBAAmB,QAAS;AAEhD,SAAM,KAAK,OAAO,KAAK,0BAA0B;IAC/C,MAAM;IACN;IACA,WAAW;IACX;IACA;IACD,CAAC;AAEF,SAAM,KAAK,OAAO,KAAK,uBAAuB,cAAc,OAAO,MAAM;;AAG3E,MAAI,SAAS,cAAc;GACzB,gBAAgB,aAAa;AAC3B,SAAK,IAAI,QAAQ,GAAG,QAAQ,MAAM,QAAQ,QACxC,OAAM,CAAC,MAAM,QAAQ,MAAM;;AAI/B,cAAW,MAAM,CAAC,MAAM,UAAU,YAAY,CAC5C,KAAI,KACF,OAAM,WAAW,MAAM,MAAM;SAG5B;GACL,MAAM,WAAW,MAAM,KAAK,cAAc,gEAAU,KAAW,kBAAO,WAAW,cAAc,MAAM,CAAC,CAAC;AACvG,SAAM,QAAQ,IAAI,SAAS;;AAG7B,QAAM,KAAK,OAAO,KAAK,wBAAwB,MAAM;AAErD,SAAO;;;;;;;ACzGX,IAAa,QAAb,MAAsB;;mFACV,IAAI,KAAgB;;CAE9B,IAAI,KAAuB;;AACzB,oFAAO,KAAY,CAAC,IAAI,IAAI,+DAAI;;CAGlC,IAAI,KAAa,OAAgB;AAC/B,8DAAY,CAAC,IAAI,KAAK,MAAM;;CAG9B,OAAO,KAAmB;AACxB,8DAAY,CAAC,OAAO,IAAI;;CAG1B,QAAc;AACZ,8DAAY,CAAC,OAAO;;CAGtB,OAAiB;AACf,SAAO,CAAC,0DAAG,KAAY,CAAC,MAAM,CAAC;;CAGjC,SAAmB;AACjB,SAAO,CAAC,0DAAG,KAAY,CAAC,QAAQ,CAAC;;CAGnC,QAAc;;;;;;;;;;;AClBhB,SAAS,UAAyC,GAAyB,GAA+C;AACxH,QAAO;EACL,GAAG;EACH,SAAS,CAAC,GAAI,EAAE,WAAW,EAAE,EAAG,GAAI,EAAE,WAAW,EAAE,CAAE;EACrD,SAAS,CAAC,GAAI,EAAE,WAAW,EAAE,EAAG,GAAI,EAAE,WAAW,EAAE,CAAE;EACrD,SAAS,CAAC,GAAI,EAAE,WAAW,EAAE,EAAG,GAAI,EAAE,WAAW,EAAE,CAAE;EACtD;;;;;AAOH,IAAa,cAAb,MAAyB;CAMvB,YAAY,EAAE,SAAS,IAAI,mBAAiC,KAAc,EAAE,EAAE;;kEALrE,IAAI,OAA8B;uEACQ;+CACnD;+CACA;AAGE,OAAK,YAAY,IAAI,cAAc,EAAE,QAAQ,CAAC;AAE9C,OAAK,SAAS;AACd,SAAO;;CAeT,MAAM,IAAI,GAAG,OAA6B;EACxC,MAAMC,gBAA8C,EAAE;EAEtD,MAAM,8BAAc,IAAI,KAA4B;AAEpD,QAAM,SAAS,SAAS;GACtB,MAAM,WAAW,YAAY,IAAI,KAAK,KAAK;AAC3C,OAAI,SACF,aAAY,IAAI,KAAK,MAAM,UAAU,UAAU,KAAK,CAAC;OAErD,aAAY,IAAI,KAAK,MAAM,KAAK;IAElC;AAEF,OAAK,IAAI,QAAQ,YAAY,QAAQ,EAAE;AACrC,uEAAO,mBAAiB,YAAC,KAAK;AAC9B,uEAAO,mBAAiB,YAAC,KAAK;GAE9B,MAAM,eAAeC,kCAAW,KAAK;AAErC,8DAAW,CAAC,IAAI,aAAa,MAAM,aAAa;AAChD,QAAK,OAAO;AAEZ,iBAAc,KAAK,aAAa;;AAGlC,QAAM,KAAK,OAAO,KAAK,eAAe,cAAc;AAEpD,SAAO;;CAGT,MAAM,OAAO,GAAG,OAA6B;EAC3C,MAAMD,gBAA8C,EAAE;EAEtD,MAAM,8BAAc,IAAI,KAA4B;AAEpD,QAAM,SAAS,SAAS;GACtB,MAAM,WAAW,YAAY,IAAI,KAAK,KAAK;AAC3C,OAAI,SACF,aAAY,IAAI,KAAK,MAAM,UAAU,UAAU,KAAK,CAAC;OAErD,aAAY,IAAI,KAAK,MAAM,KAAK;IAElC;AAEF,OAAK,IAAI,QAAQ,YAAY,QAAQ,EAAE;GACrC,MAAM,iEAAW,KAAW,CAAC,IAAI,KAAK,KAAK;AAE3C,uEAAO,mBAAiB,YAAC,KAAK;AAC9B,uEAAO,mBAAiB,YAAC,KAAK;GAG9B,MAAM,eAAeC,kCADN,WAAW,UAAU,UAAU,KAAK,GAAG,KACf;AAEvC,8DAAW,CAAC,IAAI,aAAa,MAAM,aAAa;AAChD,QAAK,OAAO;AAEZ,iBAAc,KAAK,aAAa;;AAGlC,QAAM,KAAK,OAAO,KAAK,eAAe,cAAc;AAEpD,SAAO;;CAGT,QAAQ;AACN,mEAAmB,KAAI;AACvB,6DAAW,CAAC,OAAO;;CAGrB,UAAU,MAAmD;AAC3D,+DAAO,KAAW,CAAC,IAAI,KAAK;;CAG9B,aAAa,MAA2B;AACtC,6DAAW,CAAC,OAAO,KAAK;AACxB,mEAAmB,KAAI;;CAGzB,QAAc;AACZ,6DAAW,CAAC,OAAO;AACnB,mEAAmB,KAAI;;CAGzB,IAAI,QAAsC;AACxC,iEAAI,KAAgB,CAClB,mEAAO,KAAgB;EAMzB,MAAM,0FAHa,KAAW,CAAC,MAAM,EAGJ,EAAE,MAAM,EAAE,SAAS,MAAMC,gCAAY,EAAE,CAAC,SAAS,QAAQ,CAAC,CAAC;EAE5F,MAAMC,QAAsC,EAAE;AAE9C,OAAK,MAAM,OAAO,MAAM;GACtB,MAAM,6DAAO,KAAW,CAAC,IAAI,IAAI;AACjC,OAAI,KACF,OAAM,KAAK,KAAK;;AAIpB,mEAAmB,MAAK;AAExB,SAAO;;CAIT,MAAM,MAAM,SAA8D;AACxE,QAAM,KAAK,OAAO,KAAK,uBAAuB,KAAK,MAAM;EAEzD,MAAM,gBAAgB,MAAM,KAAK,UAAU,IAAI,KAAK,OAAO,QAAQ;AAEnE,OAAK,OAAO;AAEZ,QAAM,KAAK,OAAO,KAAK,qBAAqB,cAAc;AAE1D,SAAO;;;AAlIT,sBAAa,MAAoC;AAC/C,MAAK,OAAO,KAAK,qBAAqB,KAAK;AAE3C,QAAO;;AAGT,sBAAa,MAAoC;AAC/C,MAAK,OAAO,KAAK,qBAAqB,KAAK;AAE3C,QAAO;;;;;;;;;;;;AC7BX,SAAgB,aAAsC,SAA0B,EAAE,MAAM,cAAc,EAAgC;CACpI,MAAM,SAAS,IAAI,mBAAiC;CACpD,MAAM,mCAAmB,IAAI,KAAkB;CAC/C,MAAM,mCAAmB,IAAI,KAAoC;CACjE,MAAM,uCAAuB,IAAI,KAAa;CAC9C,MAAM,cAAc,IAAI,YAAY,EAAE,QAAQ,CAAC;CAE/C,MAAMC,UAA4B;EAChC,IAAI,QAAQ;AACV,UAAO,YAAY;;EAErB,MAAM,QAAQ,GAAG,OAAO;AACtB,SAAM,YAAY,IAAI,GAAG,MAAM;;EAEjC;EACA;EACA;EACA;EACA,IAAI,OAAO,GAAG,KAAK,OAAO;EAC1B,KAAK,OAAO,IAAI,KAAK,OAAO;EAC5B,QAAQ,OAAO,OAAO,KAAK,OAAO;EAClC,WAAW,OAAO,UAAU,KAAK,OAAO;EACxC,MAAM,OAAO,KAAK,KAAK,OAAO;EAC/B;CAED,MAAMC,SAAoB;EACxB;EACA,IAAI,QAAQ;AACV,UAAO,YAAY;;EAErB,MAAM,QAAQ,GAAG,OAAO;AACtB,SAAM,YAAY,IAAI,GAAG,MAAM;;EAEjC,MAAM,WAAW,GAAG,OAAO;AACzB,SAAM,YAAY,OAAO,GAAG,MAAM;;EAEpC,MAAM,IAAI,gBAAgB,GAAG,SAAS;AACpC,OAAI,eAAe,SAAS,UAAU;AACpC,QAAI,iBAAiB,IAAI,eAAe,CACtC,SAAQ,KAAK,WAAW,eAAe,KAAK,oBAAoB;QAEhE,kBAAiB,IAAI,eAAe;AAGtC,QAAIC,EAAW,eAAe,OAAO,EAAE;KACrC,MAAM,WAAW,eAAe;KAEhC,MAAM,WAAY,SAAiB,SAAS,GAAG,QAAQ;AACvD,YAAO,OAAO,QAAQ,SAAS;;;AAInC,OAAI,eAAe,SAAS,UAAU;AACpC,QAAI,qBAAqB,IAAI,eAAe,KAAK,CAC/C,SAAQ,KAAK,WAAW,eAAe,KAAK,oBAAoB;QAEhE,sBAAqB,IAAI,eAAe,KAAK;AAG/C,QAAI,eAAe,SACjB,MAAK,MAAM,WAAW,eAAe,UAAU;KAC7C,MAAM,WAAW,iBAAiB,IAAI,QAAQ;AAC9C,SAAI,YAAY,SAAS,SAAS,eAAe,KAC/C,SAAQ,KAAK,WAAW,eAAe,KAAK,0BAA0B,SAAS,KAAK,mBAAmB,QAAQ,IAAI;AAErH,sBAAiB,IAAI,SAAS,eAAe;;;AAKnD,OAAIA,EAAW,eAAe,QAAQ,EAAE;IACtC,MAAM,YAAY,eAAe;AAEjC,UAAO,UAAkB,SAAS,GAAG,QAAQ;;AAG/C,UAAO;;EAEV;AAED,QAAO"}
|
|
1
|
+
{"version":3,"file":"index.cjs","names":["e","NodeEventEmitter","errors: Error[]","defaultParser","resolvedFiles: Array<KubbFile.ResolvedFile>","createFile","trimExtName","files: Array<KubbFile.ResolvedFile>","context: FabricContext<T>","fabric: Fabric<T>","isFunction"],"sources":["../../../node_modules/.pnpm/remeda@2.33.1/node_modules/remeda/dist/isFunction.js","../src/utils/AsyncEventEmitter.ts","../src/FileProcessor.ts","../src/utils/Cache.ts","../src/FileManager.ts","../src/createFabric.ts"],"sourcesContent":["const e=e=>typeof e==`function`;export{e as isFunction};\n//# sourceMappingURL=isFunction.js.map","import { EventEmitter as NodeEventEmitter } from 'node:events'\nimport type { FabricMode } from '../Fabric.ts'\n\ntype Options = {\n mode?: FabricMode\n maxListener?: number\n}\n\nexport class AsyncEventEmitter<TEvents extends Record<string, any>> {\n constructor({ maxListener = 100, mode = 'sequential' }: Options = {}) {\n this.#emitter.setMaxListeners(maxListener)\n this.#mode = mode\n }\n\n #emitter = new NodeEventEmitter()\n #mode: FabricMode\n\n async emit<TEventName extends keyof TEvents & string>(eventName: TEventName, ...eventArgs: TEvents[TEventName]): Promise<void> {\n const listeners = this.#emitter.listeners(eventName) as Array<(...args: TEvents[TEventName]) => any>\n\n if (listeners.length === 0) {\n return\n }\n\n const errors: Error[] = []\n\n if (this.#mode === 'sequential') {\n // Run listeners one by one, in order\n for (const listener of listeners) {\n try {\n await listener(...eventArgs)\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err))\n errors.push(error)\n }\n }\n } else {\n // Run all listeners concurrently\n const promises = listeners.map(async (listener) => {\n try {\n await listener(...eventArgs)\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err))\n errors.push(error)\n }\n })\n await Promise.all(promises)\n }\n\n if (errors.length === 1) {\n throw errors[0]\n }\n\n if (errors.length > 1) {\n throw new AggregateError(errors, `Errors in async listeners for \"${eventName}\"`)\n }\n }\n\n on<TEventName extends keyof TEvents & string>(eventName: TEventName, handler: (...eventArg: TEvents[TEventName]) => void): void {\n this.#emitter.on(eventName, handler as any)\n }\n\n onOnce<TEventName extends keyof TEvents & string>(eventName: TEventName, handler: (...eventArgs: TEvents[TEventName]) => void): void {\n const wrapper = (...args: TEvents[TEventName]) => {\n this.off(eventName, wrapper)\n handler(...args)\n }\n this.on(eventName, wrapper)\n }\n\n off<TEventName extends keyof TEvents & string>(eventName: TEventName, handler: (...eventArg: TEvents[TEventName]) => void): void {\n this.#emitter.off(eventName, handler as any)\n }\n\n removeAll(): void {\n this.#emitter.removeAllListeners()\n }\n}\n","import pLimit from 'p-limit'\nimport type { FabricEvents, FabricMode } from './Fabric.ts'\nimport type * as KubbFile from './KubbFile.ts'\nimport { defaultParser } from './parsers/defaultParser.ts'\nimport type { Parser } from './parsers/types.ts'\nimport { AsyncEventEmitter } from './utils/AsyncEventEmitter.ts'\n\nexport type ProcessFilesProps = {\n parsers?: Map<KubbFile.Extname, Parser>\n extension?: Record<KubbFile.Extname, KubbFile.Extname | ''>\n dryRun?: boolean\n /**\n * @default 'sequential'\n */\n mode?: FabricMode\n}\n\ntype GetParseOptions = {\n parsers?: Map<KubbFile.Extname, Parser>\n extension?: Record<KubbFile.Extname, KubbFile.Extname | ''>\n}\n\ntype Options = {\n events?: AsyncEventEmitter<FabricEvents>\n}\n\nexport class FileProcessor {\n #limit = pLimit(100)\n events: AsyncEventEmitter<FabricEvents>\n\n constructor({ events = new AsyncEventEmitter<FabricEvents>() }: Options = {}) {\n this.events = events\n\n return this\n }\n\n async parse(file: KubbFile.ResolvedFile, { parsers, extension }: GetParseOptions = {}): Promise<string> {\n const parseExtName = extension?.[file.extname] || undefined\n\n if (!parsers) {\n console.warn('No parsers provided, using default parser. If you want to use a specific parser, please provide it in the options.')\n\n return defaultParser.parse(file, { extname: parseExtName })\n }\n\n if (!file.extname) {\n return defaultParser.parse(file, { extname: parseExtName })\n }\n\n const parser = parsers.get(file.extname)\n\n if (!parser) {\n return defaultParser.parse(file, { extname: parseExtName })\n }\n\n return parser.parse(file, { extname: parseExtName })\n }\n\n async run(\n files: Array<KubbFile.ResolvedFile>,\n { parsers, mode = 'sequential', dryRun, extension }: ProcessFilesProps = {},\n ): Promise<KubbFile.ResolvedFile[]> {\n await this.events.emit('files:processing:start', files)\n\n const total = files.length\n let processed = 0\n\n const processOne = async (resolvedFile: KubbFile.ResolvedFile, index: number) => {\n await this.events.emit('file:processing:start', resolvedFile, index, total)\n\n const source = dryRun ? undefined : await this.parse(resolvedFile, { extension, parsers })\n\n const currentProcessed = ++processed\n const percentage = (currentProcessed / total) * 100\n\n await this.events.emit('file:processing:update', {\n file: resolvedFile,\n source,\n processed: currentProcessed,\n percentage,\n total,\n })\n\n await this.events.emit('file:processing:end', resolvedFile, index, total)\n }\n\n if (mode === 'sequential') {\n async function* asyncFiles() {\n for (let index = 0; index < files.length; index++) {\n yield [files[index], index] as const\n }\n }\n\n for await (const [file, index] of asyncFiles()) {\n if (file) {\n await processOne(file, index)\n }\n }\n } else {\n const promises = files.map((resolvedFile, index) => this.#limit(() => processOne(resolvedFile, index)))\n await Promise.all(promises)\n }\n\n await this.events.emit('files:processing:end', files)\n\n return files\n }\n}\n","export class Cache<T> {\n #buffer = new Map<string, T>()\n\n get(key: string): T | null {\n return this.#buffer.get(key) ?? null\n }\n\n set(key: string, value: T): void {\n this.#buffer.set(key, value)\n }\n\n delete(key: string): void {\n this.#buffer.delete(key)\n }\n\n clear(): void {\n this.#buffer.clear()\n }\n\n keys(): string[] {\n return [...this.#buffer.keys()]\n }\n\n values(): Array<T> {\n return [...this.#buffer.values()]\n }\n\n flush(): void {\n // No-op for base cache\n }\n}\n","import { orderBy } from 'natural-orderby'\nimport { createFile } from './createFile.ts'\nimport type { FabricEvents } from './Fabric.ts'\nimport { FileProcessor, type ProcessFilesProps } from './FileProcessor.ts'\nimport type * as KubbFile from './KubbFile.ts'\nimport { AsyncEventEmitter } from './utils/AsyncEventEmitter.ts'\nimport { Cache } from './utils/Cache.ts'\nimport { trimExtName } from './utils/trimExtName.ts'\n\nfunction mergeFile<TMeta extends object = object>(a: KubbFile.File<TMeta>, b: KubbFile.File<TMeta>): KubbFile.File<TMeta> {\n return {\n ...a,\n sources: [...(a.sources || []), ...(b.sources || [])],\n imports: [...(a.imports || []), ...(b.imports || [])],\n exports: [...(a.exports || []), ...(b.exports || [])],\n }\n}\n\ntype Options = {\n events?: AsyncEventEmitter<FabricEvents>\n}\n\nexport class FileManager {\n #cache = new Cache<KubbFile.ResolvedFile>()\n #filesCache: Array<KubbFile.ResolvedFile> | null = null\n events: AsyncEventEmitter<FabricEvents>\n processor: FileProcessor\n\n constructor({ events = new AsyncEventEmitter<FabricEvents>() }: Options = {}) {\n this.processor = new FileProcessor({ events })\n\n this.events = events\n return this\n }\n\n #resolvePath(file: KubbFile.File): KubbFile.File {\n this.events.emit('file:resolve:path', file)\n\n return file\n }\n\n #resolveName(file: KubbFile.File): KubbFile.File {\n this.events.emit('file:resolve:name', file)\n\n return file\n }\n\n async add(...files: Array<KubbFile.File>) {\n const resolvedFiles: Array<KubbFile.ResolvedFile> = []\n\n const mergedFiles = new Map<string, KubbFile.File>()\n\n files.forEach((file) => {\n const existing = mergedFiles.get(file.path)\n if (existing) {\n mergedFiles.set(file.path, mergeFile(existing, file))\n } else {\n mergedFiles.set(file.path, file)\n }\n })\n\n for (let file of mergedFiles.values()) {\n file = this.#resolveName(file)\n file = this.#resolvePath(file)\n\n const resolvedFile = createFile(file)\n\n this.#cache.set(resolvedFile.path, resolvedFile)\n this.flush()\n\n resolvedFiles.push(resolvedFile)\n }\n\n await this.events.emit('files:added', resolvedFiles)\n\n return resolvedFiles\n }\n\n async upsert(...files: Array<KubbFile.File>) {\n const resolvedFiles: Array<KubbFile.ResolvedFile> = []\n\n const mergedFiles = new Map<string, KubbFile.File>()\n\n files.forEach((file) => {\n const existing = mergedFiles.get(file.path)\n if (existing) {\n mergedFiles.set(file.path, mergeFile(existing, file))\n } else {\n mergedFiles.set(file.path, file)\n }\n })\n\n for (let file of mergedFiles.values()) {\n const existing = this.#cache.get(file.path)\n\n file = this.#resolveName(file)\n file = this.#resolvePath(file)\n\n const merged = existing ? mergeFile(existing, file) : file\n const resolvedFile = createFile(merged)\n\n this.#cache.set(resolvedFile.path, resolvedFile)\n this.flush()\n\n resolvedFiles.push(resolvedFile)\n }\n\n await this.events.emit('files:added', resolvedFiles)\n\n return resolvedFiles\n }\n\n flush() {\n this.#filesCache = null\n this.#cache.flush()\n }\n\n getByPath(path: KubbFile.Path): KubbFile.ResolvedFile | null {\n return this.#cache.get(path)\n }\n\n deleteByPath(path: KubbFile.Path): void {\n this.#cache.delete(path)\n this.#filesCache = null\n }\n\n clear(): void {\n this.#cache.clear()\n this.#filesCache = null\n }\n\n get files(): Array<KubbFile.ResolvedFile> {\n if (this.#filesCache) {\n return this.#filesCache\n }\n\n const cachedKeys = this.#cache.keys()\n\n // order by path length and if file is a barrel file\n const keys = orderBy(cachedKeys, [(v) => v.length, (v) => trimExtName(v).endsWith('index')])\n\n const files: Array<KubbFile.ResolvedFile> = []\n\n for (const key of keys) {\n const file = this.#cache.get(key)\n if (file) {\n files.push(file)\n }\n }\n\n this.#filesCache = files\n\n return files\n }\n\n //TODO add test and check if write of FileManager contains the newly added file\n async write(options: ProcessFilesProps): Promise<KubbFile.ResolvedFile[]> {\n await this.events.emit('files:writing:start', this.files)\n\n const resolvedFiles = await this.processor.run(this.files, options)\n\n this.clear()\n\n await this.events.emit('files:writing:end', resolvedFiles)\n\n return resolvedFiles\n }\n}\n","import { isFunction } from 'remeda'\nimport type { Fabric, FabricConfig, FabricContext, FabricEvents, FabricOptions } from './Fabric.ts'\nimport { FileManager } from './FileManager.ts'\nimport type * as KubbFile from './KubbFile.ts'\nimport type { Parser } from './parsers/types.ts'\nimport type { Plugin } from './plugins/types.ts'\nimport { AsyncEventEmitter } from './utils/AsyncEventEmitter.ts'\n\n/**\n * Creates a new Fabric instance\n *\n * @example\n * const fabric = createFabric()\n * fabric.use(myPlugin())\n */\nexport function createFabric<T extends FabricOptions>(config: FabricConfig<T> = { mode: 'sequential' } as FabricConfig<T>): Fabric<T> {\n const events = new AsyncEventEmitter<FabricEvents>()\n const installedPlugins = new Set<Plugin<any>>()\n const installedParsers = new Map<KubbFile.Extname, Parser<any>>()\n const installedParserNames = new Set<string>()\n const fileManager = new FileManager({ events })\n\n const context: FabricContext<T> = {\n get files() {\n return fileManager.files\n },\n async addFile(...files) {\n await fileManager.add(...files)\n },\n config,\n fileManager,\n installedPlugins,\n installedParsers,\n on: events.on.bind(events),\n off: events.off.bind(events),\n onOnce: events.onOnce.bind(events),\n removeAll: events.removeAll.bind(events),\n emit: events.emit.bind(events),\n } as FabricContext<T>\n\n const fabric: Fabric<T> = {\n context,\n get files() {\n return fileManager.files\n },\n async addFile(...files) {\n await fileManager.add(...files)\n },\n async upsertFile(...files) {\n await fileManager.upsert(...files)\n },\n async use(pluginOrParser, ...options) {\n if (pluginOrParser.type === 'plugin') {\n if (installedPlugins.has(pluginOrParser)) {\n console.warn(`Plugin \"${pluginOrParser.name}\" already applied.`)\n } else {\n installedPlugins.add(pluginOrParser)\n }\n\n if (isFunction(pluginOrParser.inject)) {\n const injecter = pluginOrParser.inject\n\n const injected = (injecter as any)(context, ...options)\n Object.assign(fabric, injected)\n }\n }\n\n if (pluginOrParser.type === 'parser') {\n if (installedParserNames.has(pluginOrParser.name)) {\n console.warn(`Parser \"${pluginOrParser.name}\" already applied.`)\n } else {\n installedParserNames.add(pluginOrParser.name)\n }\n\n if (pluginOrParser.extNames) {\n for (const extName of pluginOrParser.extNames) {\n const existing = installedParsers.get(extName)\n if (existing && existing.name !== pluginOrParser.name) {\n console.warn(`Parser \"${pluginOrParser.name}\" is overriding parser \"${existing.name}\" for extension \"${extName}\".`)\n }\n installedParsers.set(extName, pluginOrParser)\n }\n }\n }\n\n if (isFunction(pluginOrParser.install)) {\n const installer = pluginOrParser.install\n\n await (installer as any)(context, ...options)\n }\n\n return fabric\n },\n } as Fabric<T>\n\n return fabric\n}\n"],"x_google_ignoreList":[0],"mappings":";;;;;;;;;AAAA,MAAM,KAAE,QAAG,OAAOA,OAAG;;;;;;ACQrB,IAAa,oBAAb,MAAoE;CAClE,YAAY,EAAE,cAAc,KAAK,OAAO,iBAA0B,EAAE,EAAE;oEAK3D,IAAIC,0BAAkB;;AAJ/B,+DAAa,CAAC,gBAAgB,YAAY;AAC1C,6DAAa,KAAI;;CAMnB,MAAM,KAAgD,WAAuB,GAAG,WAA+C;EAC7H,MAAM,oEAAY,KAAa,CAAC,UAAU,UAAU;AAEpD,MAAI,UAAU,WAAW,EACvB;EAGF,MAAMC,SAAkB,EAAE;AAE1B,2DAAI,KAAU,KAAK,aAEjB,MAAK,MAAM,YAAY,UACrB,KAAI;AACF,SAAM,SAAS,GAAG,UAAU;WACrB,KAAK;GACZ,MAAM,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,IAAI,CAAC;AACjE,UAAO,KAAK,MAAM;;OAGjB;GAEL,MAAM,WAAW,UAAU,IAAI,OAAO,aAAa;AACjD,QAAI;AACF,WAAM,SAAS,GAAG,UAAU;aACrB,KAAK;KACZ,MAAM,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,IAAI,CAAC;AACjE,YAAO,KAAK,MAAM;;KAEpB;AACF,SAAM,QAAQ,IAAI,SAAS;;AAG7B,MAAI,OAAO,WAAW,EACpB,OAAM,OAAO;AAGf,MAAI,OAAO,SAAS,EAClB,OAAM,IAAI,eAAe,QAAQ,kCAAkC,UAAU,GAAG;;CAIpF,GAA8C,WAAuB,SAA2D;AAC9H,+DAAa,CAAC,GAAG,WAAW,QAAe;;CAG7C,OAAkD,WAAuB,SAA4D;EACnI,MAAM,WAAW,GAAG,SAA8B;AAChD,QAAK,IAAI,WAAW,QAAQ;AAC5B,WAAQ,GAAG,KAAK;;AAElB,OAAK,GAAG,WAAW,QAAQ;;CAG7B,IAA+C,WAAuB,SAA2D;AAC/H,+DAAa,CAAC,IAAI,WAAW,QAAe;;CAG9C,YAAkB;AAChB,+DAAa,CAAC,oBAAoB;;;;;;;ACjDtC,IAAa,gBAAb,MAA2B;CAIzB,YAAY,EAAE,SAAS,IAAI,mBAAiC,KAAc,EAAE,EAAE;uFAH9D,IAAI;+CACpB;AAGE,OAAK,SAAS;AAEd,SAAO;;CAGT,MAAM,MAAM,MAA6B,EAAE,SAAS,cAA+B,EAAE,EAAmB;EACtG,MAAM,sEAAe,UAAY,KAAK,aAAY;AAElD,MAAI,CAAC,SAAS;AACZ,WAAQ,KAAK,qHAAqH;AAElI,UAAOC,oCAAc,MAAM,MAAM,EAAE,SAAS,cAAc,CAAC;;AAG7D,MAAI,CAAC,KAAK,QACR,QAAOA,oCAAc,MAAM,MAAM,EAAE,SAAS,cAAc,CAAC;EAG7D,MAAM,SAAS,QAAQ,IAAI,KAAK,QAAQ;AAExC,MAAI,CAAC,OACH,QAAOA,oCAAc,MAAM,MAAM,EAAE,SAAS,cAAc,CAAC;AAG7D,SAAO,OAAO,MAAM,MAAM,EAAE,SAAS,cAAc,CAAC;;CAGtD,MAAM,IACJ,OACA,EAAE,SAAS,OAAO,cAAc,QAAQ,cAAiC,EAAE,EACzC;AAClC,QAAM,KAAK,OAAO,KAAK,0BAA0B,MAAM;EAEvD,MAAM,QAAQ,MAAM;EACpB,IAAI,YAAY;EAEhB,MAAM,aAAa,OAAO,cAAqC,UAAkB;AAC/E,SAAM,KAAK,OAAO,KAAK,yBAAyB,cAAc,OAAO,MAAM;GAE3E,MAAM,SAAS,SAAS,SAAY,MAAM,KAAK,MAAM,cAAc;IAAE;IAAW;IAAS,CAAC;GAE1F,MAAM,mBAAmB,EAAE;GAC3B,MAAM,aAAc,mBAAmB,QAAS;AAEhD,SAAM,KAAK,OAAO,KAAK,0BAA0B;IAC/C,MAAM;IACN;IACA,WAAW;IACX;IACA;IACD,CAAC;AAEF,SAAM,KAAK,OAAO,KAAK,uBAAuB,cAAc,OAAO,MAAM;;AAG3E,MAAI,SAAS,cAAc;GACzB,gBAAgB,aAAa;AAC3B,SAAK,IAAI,QAAQ,GAAG,QAAQ,MAAM,QAAQ,QACxC,OAAM,CAAC,MAAM,QAAQ,MAAM;;AAI/B,cAAW,MAAM,CAAC,MAAM,UAAU,YAAY,CAC5C,KAAI,KACF,OAAM,WAAW,MAAM,MAAM;SAG5B;GACL,MAAM,WAAW,MAAM,KAAK,cAAc,gEAAU,KAAW,kBAAO,WAAW,cAAc,MAAM,CAAC,CAAC;AACvG,SAAM,QAAQ,IAAI,SAAS;;AAG7B,QAAM,KAAK,OAAO,KAAK,wBAAwB,MAAM;AAErD,SAAO;;;;;;;ACzGX,IAAa,QAAb,MAAsB;;mFACV,IAAI,KAAgB;;CAE9B,IAAI,KAAuB;;AACzB,oFAAO,KAAY,CAAC,IAAI,IAAI,+DAAI;;CAGlC,IAAI,KAAa,OAAgB;AAC/B,8DAAY,CAAC,IAAI,KAAK,MAAM;;CAG9B,OAAO,KAAmB;AACxB,8DAAY,CAAC,OAAO,IAAI;;CAG1B,QAAc;AACZ,8DAAY,CAAC,OAAO;;CAGtB,OAAiB;AACf,SAAO,CAAC,0DAAG,KAAY,CAAC,MAAM,CAAC;;CAGjC,SAAmB;AACjB,SAAO,CAAC,0DAAG,KAAY,CAAC,QAAQ,CAAC;;CAGnC,QAAc;;;;;;;;;;;AClBhB,SAAS,UAAyC,GAAyB,GAA+C;AACxH,QAAO;EACL,GAAG;EACH,SAAS,CAAC,GAAI,EAAE,WAAW,EAAE,EAAG,GAAI,EAAE,WAAW,EAAE,CAAE;EACrD,SAAS,CAAC,GAAI,EAAE,WAAW,EAAE,EAAG,GAAI,EAAE,WAAW,EAAE,CAAE;EACrD,SAAS,CAAC,GAAI,EAAE,WAAW,EAAE,EAAG,GAAI,EAAE,WAAW,EAAE,CAAE;EACtD;;;;;AAOH,IAAa,cAAb,MAAyB;CAMvB,YAAY,EAAE,SAAS,IAAI,mBAAiC,KAAc,EAAE,EAAE;;kEALrE,IAAI,OAA8B;uEACQ;+CACnD;+CACA;AAGE,OAAK,YAAY,IAAI,cAAc,EAAE,QAAQ,CAAC;AAE9C,OAAK,SAAS;AACd,SAAO;;CAeT,MAAM,IAAI,GAAG,OAA6B;EACxC,MAAMC,gBAA8C,EAAE;EAEtD,MAAM,8BAAc,IAAI,KAA4B;AAEpD,QAAM,SAAS,SAAS;GACtB,MAAM,WAAW,YAAY,IAAI,KAAK,KAAK;AAC3C,OAAI,SACF,aAAY,IAAI,KAAK,MAAM,UAAU,UAAU,KAAK,CAAC;OAErD,aAAY,IAAI,KAAK,MAAM,KAAK;IAElC;AAEF,OAAK,IAAI,QAAQ,YAAY,QAAQ,EAAE;AACrC,uEAAO,mBAAiB,YAAC,KAAK;AAC9B,uEAAO,mBAAiB,YAAC,KAAK;GAE9B,MAAM,eAAeC,kCAAW,KAAK;AAErC,8DAAW,CAAC,IAAI,aAAa,MAAM,aAAa;AAChD,QAAK,OAAO;AAEZ,iBAAc,KAAK,aAAa;;AAGlC,QAAM,KAAK,OAAO,KAAK,eAAe,cAAc;AAEpD,SAAO;;CAGT,MAAM,OAAO,GAAG,OAA6B;EAC3C,MAAMD,gBAA8C,EAAE;EAEtD,MAAM,8BAAc,IAAI,KAA4B;AAEpD,QAAM,SAAS,SAAS;GACtB,MAAM,WAAW,YAAY,IAAI,KAAK,KAAK;AAC3C,OAAI,SACF,aAAY,IAAI,KAAK,MAAM,UAAU,UAAU,KAAK,CAAC;OAErD,aAAY,IAAI,KAAK,MAAM,KAAK;IAElC;AAEF,OAAK,IAAI,QAAQ,YAAY,QAAQ,EAAE;GACrC,MAAM,iEAAW,KAAW,CAAC,IAAI,KAAK,KAAK;AAE3C,uEAAO,mBAAiB,YAAC,KAAK;AAC9B,uEAAO,mBAAiB,YAAC,KAAK;GAG9B,MAAM,eAAeC,kCADN,WAAW,UAAU,UAAU,KAAK,GAAG,KACf;AAEvC,8DAAW,CAAC,IAAI,aAAa,MAAM,aAAa;AAChD,QAAK,OAAO;AAEZ,iBAAc,KAAK,aAAa;;AAGlC,QAAM,KAAK,OAAO,KAAK,eAAe,cAAc;AAEpD,SAAO;;CAGT,QAAQ;AACN,mEAAmB,KAAI;AACvB,6DAAW,CAAC,OAAO;;CAGrB,UAAU,MAAmD;AAC3D,+DAAO,KAAW,CAAC,IAAI,KAAK;;CAG9B,aAAa,MAA2B;AACtC,6DAAW,CAAC,OAAO,KAAK;AACxB,mEAAmB,KAAI;;CAGzB,QAAc;AACZ,6DAAW,CAAC,OAAO;AACnB,mEAAmB,KAAI;;CAGzB,IAAI,QAAsC;AACxC,iEAAI,KAAgB,CAClB,mEAAO,KAAgB;EAMzB,MAAM,0FAHa,KAAW,CAAC,MAAM,EAGJ,EAAE,MAAM,EAAE,SAAS,MAAMC,gCAAY,EAAE,CAAC,SAAS,QAAQ,CAAC,CAAC;EAE5F,MAAMC,QAAsC,EAAE;AAE9C,OAAK,MAAM,OAAO,MAAM;GACtB,MAAM,6DAAO,KAAW,CAAC,IAAI,IAAI;AACjC,OAAI,KACF,OAAM,KAAK,KAAK;;AAIpB,mEAAmB,MAAK;AAExB,SAAO;;CAIT,MAAM,MAAM,SAA8D;AACxE,QAAM,KAAK,OAAO,KAAK,uBAAuB,KAAK,MAAM;EAEzD,MAAM,gBAAgB,MAAM,KAAK,UAAU,IAAI,KAAK,OAAO,QAAQ;AAEnE,OAAK,OAAO;AAEZ,QAAM,KAAK,OAAO,KAAK,qBAAqB,cAAc;AAE1D,SAAO;;;AAlIT,sBAAa,MAAoC;AAC/C,MAAK,OAAO,KAAK,qBAAqB,KAAK;AAE3C,QAAO;;AAGT,sBAAa,MAAoC;AAC/C,MAAK,OAAO,KAAK,qBAAqB,KAAK;AAE3C,QAAO;;;;;;;;;;;;AC7BX,SAAgB,aAAsC,SAA0B,EAAE,MAAM,cAAc,EAAgC;CACpI,MAAM,SAAS,IAAI,mBAAiC;CACpD,MAAM,mCAAmB,IAAI,KAAkB;CAC/C,MAAM,mCAAmB,IAAI,KAAoC;CACjE,MAAM,uCAAuB,IAAI,KAAa;CAC9C,MAAM,cAAc,IAAI,YAAY,EAAE,QAAQ,CAAC;CAE/C,MAAMC,UAA4B;EAChC,IAAI,QAAQ;AACV,UAAO,YAAY;;EAErB,MAAM,QAAQ,GAAG,OAAO;AACtB,SAAM,YAAY,IAAI,GAAG,MAAM;;EAEjC;EACA;EACA;EACA;EACA,IAAI,OAAO,GAAG,KAAK,OAAO;EAC1B,KAAK,OAAO,IAAI,KAAK,OAAO;EAC5B,QAAQ,OAAO,OAAO,KAAK,OAAO;EAClC,WAAW,OAAO,UAAU,KAAK,OAAO;EACxC,MAAM,OAAO,KAAK,KAAK,OAAO;EAC/B;CAED,MAAMC,SAAoB;EACxB;EACA,IAAI,QAAQ;AACV,UAAO,YAAY;;EAErB,MAAM,QAAQ,GAAG,OAAO;AACtB,SAAM,YAAY,IAAI,GAAG,MAAM;;EAEjC,MAAM,WAAW,GAAG,OAAO;AACzB,SAAM,YAAY,OAAO,GAAG,MAAM;;EAEpC,MAAM,IAAI,gBAAgB,GAAG,SAAS;AACpC,OAAI,eAAe,SAAS,UAAU;AACpC,QAAI,iBAAiB,IAAI,eAAe,CACtC,SAAQ,KAAK,WAAW,eAAe,KAAK,oBAAoB;QAEhE,kBAAiB,IAAI,eAAe;AAGtC,QAAIC,EAAW,eAAe,OAAO,EAAE;KACrC,MAAM,WAAW,eAAe;KAEhC,MAAM,WAAY,SAAiB,SAAS,GAAG,QAAQ;AACvD,YAAO,OAAO,QAAQ,SAAS;;;AAInC,OAAI,eAAe,SAAS,UAAU;AACpC,QAAI,qBAAqB,IAAI,eAAe,KAAK,CAC/C,SAAQ,KAAK,WAAW,eAAe,KAAK,oBAAoB;QAEhE,sBAAqB,IAAI,eAAe,KAAK;AAG/C,QAAI,eAAe,SACjB,MAAK,MAAM,WAAW,eAAe,UAAU;KAC7C,MAAM,WAAW,iBAAiB,IAAI,QAAQ;AAC9C,SAAI,YAAY,SAAS,SAAS,eAAe,KAC/C,SAAQ,KAAK,WAAW,eAAe,KAAK,0BAA0B,SAAS,KAAK,mBAAmB,QAAQ,IAAI;AAErH,sBAAiB,IAAI,SAAS,eAAe;;;AAKnD,OAAIA,EAAW,eAAe,QAAQ,EAAE;IACtC,MAAM,YAAY,eAAe;AAEjC,UAAO,UAAkB,SAAS,GAAG,QAAQ;;AAG/C,UAAO;;EAEV;AAED,QAAO"}
|
package/dist/index.js
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
import { a as _classPrivateFieldInitSpec, i as _assertClassBrand, n as _classPrivateFieldSet2, o as _checkPrivateRedeclaration, r as _classPrivateFieldGet2, s as createFile, t as _defineProperty } from "./defineProperty-
|
|
1
|
+
import { a as _classPrivateFieldInitSpec, i as _assertClassBrand, n as _classPrivateFieldSet2, o as _checkPrivateRedeclaration, r as _classPrivateFieldGet2, s as createFile, t as _defineProperty } from "./defineProperty-CS9Uk_6Q.js";
|
|
2
2
|
import { t as trimExtName } from "./trimExtName-Dq2Z7SCT.js";
|
|
3
3
|
import { t as defaultParser } from "./defaultParser-DPHcM2NR.js";
|
|
4
4
|
import { orderBy } from "natural-orderby";
|
|
5
5
|
import pLimit from "p-limit";
|
|
6
6
|
import { EventEmitter } from "node:events";
|
|
7
7
|
|
|
8
|
-
//#region ../../node_modules/.pnpm/remeda@2.
|
|
8
|
+
//#region ../../node_modules/.pnpm/remeda@2.33.1/node_modules/remeda/dist/isFunction.js
|
|
9
9
|
const e = (e$1) => typeof e$1 == `function`;
|
|
10
10
|
|
|
11
11
|
//#endregion
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","names":["e","NodeEventEmitter","errors: Error[]","resolvedFiles: Array<KubbFile.ResolvedFile>","files: Array<KubbFile.ResolvedFile>","context: FabricContext<T>","fabric: Fabric<T>","isFunction"],"sources":["../../../node_modules/.pnpm/remeda@2.32.2/node_modules/remeda/dist/isFunction-yJCcR1YM.js","../src/utils/AsyncEventEmitter.ts","../src/FileProcessor.ts","../src/utils/Cache.ts","../src/FileManager.ts","../src/createFabric.ts"],"sourcesContent":["const e=e=>typeof e==`function`;export{e as t};\n//# sourceMappingURL=isFunction-yJCcR1YM.js.map","import { EventEmitter as NodeEventEmitter } from 'node:events'\nimport type { FabricMode } from '../Fabric.ts'\n\ntype Options = {\n mode?: FabricMode\n maxListener?: number\n}\n\nexport class AsyncEventEmitter<TEvents extends Record<string, any>> {\n constructor({ maxListener = 100, mode = 'sequential' }: Options = {}) {\n this.#emitter.setMaxListeners(maxListener)\n this.#mode = mode\n }\n\n #emitter = new NodeEventEmitter()\n #mode: FabricMode\n\n async emit<TEventName extends keyof TEvents & string>(eventName: TEventName, ...eventArgs: TEvents[TEventName]): Promise<void> {\n const listeners = this.#emitter.listeners(eventName) as Array<(...args: TEvents[TEventName]) => any>\n\n if (listeners.length === 0) {\n return\n }\n\n const errors: Error[] = []\n\n if (this.#mode === 'sequential') {\n // Run listeners one by one, in order\n for (const listener of listeners) {\n try {\n await listener(...eventArgs)\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err))\n errors.push(error)\n }\n }\n } else {\n // Run all listeners concurrently\n const promises = listeners.map(async (listener) => {\n try {\n await listener(...eventArgs)\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err))\n errors.push(error)\n }\n })\n await Promise.all(promises)\n }\n\n if (errors.length === 1) {\n throw errors[0]\n }\n\n if (errors.length > 1) {\n throw new AggregateError(errors, `Errors in async listeners for \"${eventName}\"`)\n }\n }\n\n on<TEventName extends keyof TEvents & string>(eventName: TEventName, handler: (...eventArg: TEvents[TEventName]) => void): void {\n this.#emitter.on(eventName, handler as any)\n }\n\n onOnce<TEventName extends keyof TEvents & string>(eventName: TEventName, handler: (...eventArgs: TEvents[TEventName]) => void): void {\n const wrapper = (...args: TEvents[TEventName]) => {\n this.off(eventName, wrapper)\n handler(...args)\n }\n this.on(eventName, wrapper)\n }\n\n off<TEventName extends keyof TEvents & string>(eventName: TEventName, handler: (...eventArg: TEvents[TEventName]) => void): void {\n this.#emitter.off(eventName, handler as any)\n }\n\n removeAll(): void {\n this.#emitter.removeAllListeners()\n }\n}\n","import pLimit from 'p-limit'\nimport type { FabricEvents, FabricMode } from './Fabric.ts'\nimport type * as KubbFile from './KubbFile.ts'\nimport { defaultParser } from './parsers/defaultParser.ts'\nimport type { Parser } from './parsers/types.ts'\nimport { AsyncEventEmitter } from './utils/AsyncEventEmitter.ts'\n\nexport type ProcessFilesProps = {\n parsers?: Map<KubbFile.Extname, Parser>\n extension?: Record<KubbFile.Extname, KubbFile.Extname | ''>\n dryRun?: boolean\n /**\n * @default 'sequential'\n */\n mode?: FabricMode\n}\n\ntype GetParseOptions = {\n parsers?: Map<KubbFile.Extname, Parser>\n extension?: Record<KubbFile.Extname, KubbFile.Extname | ''>\n}\n\ntype Options = {\n events?: AsyncEventEmitter<FabricEvents>\n}\n\nexport class FileProcessor {\n #limit = pLimit(100)\n events: AsyncEventEmitter<FabricEvents>\n\n constructor({ events = new AsyncEventEmitter<FabricEvents>() }: Options = {}) {\n this.events = events\n\n return this\n }\n\n async parse(file: KubbFile.ResolvedFile, { parsers, extension }: GetParseOptions = {}): Promise<string> {\n const parseExtName = extension?.[file.extname] || undefined\n\n if (!parsers) {\n console.warn('No parsers provided, using default parser. If you want to use a specific parser, please provide it in the options.')\n\n return defaultParser.parse(file, { extname: parseExtName })\n }\n\n if (!file.extname) {\n return defaultParser.parse(file, { extname: parseExtName })\n }\n\n const parser = parsers.get(file.extname)\n\n if (!parser) {\n return defaultParser.parse(file, { extname: parseExtName })\n }\n\n return parser.parse(file, { extname: parseExtName })\n }\n\n async run(\n files: Array<KubbFile.ResolvedFile>,\n { parsers, mode = 'sequential', dryRun, extension }: ProcessFilesProps = {},\n ): Promise<KubbFile.ResolvedFile[]> {\n await this.events.emit('files:processing:start', files)\n\n const total = files.length\n let processed = 0\n\n const processOne = async (resolvedFile: KubbFile.ResolvedFile, index: number) => {\n await this.events.emit('file:processing:start', resolvedFile, index, total)\n\n const source = dryRun ? undefined : await this.parse(resolvedFile, { extension, parsers })\n\n const currentProcessed = ++processed\n const percentage = (currentProcessed / total) * 100\n\n await this.events.emit('file:processing:update', {\n file: resolvedFile,\n source,\n processed: currentProcessed,\n percentage,\n total,\n })\n\n await this.events.emit('file:processing:end', resolvedFile, index, total)\n }\n\n if (mode === 'sequential') {\n async function* asyncFiles() {\n for (let index = 0; index < files.length; index++) {\n yield [files[index], index] as const\n }\n }\n\n for await (const [file, index] of asyncFiles()) {\n if (file) {\n await processOne(file, index)\n }\n }\n } else {\n const promises = files.map((resolvedFile, index) => this.#limit(() => processOne(resolvedFile, index)))\n await Promise.all(promises)\n }\n\n await this.events.emit('files:processing:end', files)\n\n return files\n }\n}\n","export class Cache<T> {\n #buffer = new Map<string, T>()\n\n get(key: string): T | null {\n return this.#buffer.get(key) ?? null\n }\n\n set(key: string, value: T): void {\n this.#buffer.set(key, value)\n }\n\n delete(key: string): void {\n this.#buffer.delete(key)\n }\n\n clear(): void {\n this.#buffer.clear()\n }\n\n keys(): string[] {\n return [...this.#buffer.keys()]\n }\n\n values(): Array<T> {\n return [...this.#buffer.values()]\n }\n\n flush(): void {\n // No-op for base cache\n }\n}\n","import { orderBy } from 'natural-orderby'\nimport { createFile } from './createFile.ts'\nimport type { FabricEvents } from './Fabric.ts'\nimport { FileProcessor, type ProcessFilesProps } from './FileProcessor.ts'\nimport type * as KubbFile from './KubbFile.ts'\nimport { AsyncEventEmitter } from './utils/AsyncEventEmitter.ts'\nimport { Cache } from './utils/Cache.ts'\nimport { trimExtName } from './utils/trimExtName.ts'\n\nfunction mergeFile<TMeta extends object = object>(a: KubbFile.File<TMeta>, b: KubbFile.File<TMeta>): KubbFile.File<TMeta> {\n return {\n ...a,\n sources: [...(a.sources || []), ...(b.sources || [])],\n imports: [...(a.imports || []), ...(b.imports || [])],\n exports: [...(a.exports || []), ...(b.exports || [])],\n }\n}\n\ntype Options = {\n events?: AsyncEventEmitter<FabricEvents>\n}\n\nexport class FileManager {\n #cache = new Cache<KubbFile.ResolvedFile>()\n #filesCache: Array<KubbFile.ResolvedFile> | null = null\n events: AsyncEventEmitter<FabricEvents>\n processor: FileProcessor\n\n constructor({ events = new AsyncEventEmitter<FabricEvents>() }: Options = {}) {\n this.processor = new FileProcessor({ events })\n\n this.events = events\n return this\n }\n\n #resolvePath(file: KubbFile.File): KubbFile.File {\n this.events.emit('file:resolve:path', file)\n\n return file\n }\n\n #resolveName(file: KubbFile.File): KubbFile.File {\n this.events.emit('file:resolve:name', file)\n\n return file\n }\n\n async add(...files: Array<KubbFile.File>) {\n const resolvedFiles: Array<KubbFile.ResolvedFile> = []\n\n const mergedFiles = new Map<string, KubbFile.File>()\n\n files.forEach((file) => {\n const existing = mergedFiles.get(file.path)\n if (existing) {\n mergedFiles.set(file.path, mergeFile(existing, file))\n } else {\n mergedFiles.set(file.path, file)\n }\n })\n\n for (let file of mergedFiles.values()) {\n file = this.#resolveName(file)\n file = this.#resolvePath(file)\n\n const resolvedFile = createFile(file)\n\n this.#cache.set(resolvedFile.path, resolvedFile)\n this.flush()\n\n resolvedFiles.push(resolvedFile)\n }\n\n await this.events.emit('files:added', resolvedFiles)\n\n return resolvedFiles\n }\n\n async upsert(...files: Array<KubbFile.File>) {\n const resolvedFiles: Array<KubbFile.ResolvedFile> = []\n\n const mergedFiles = new Map<string, KubbFile.File>()\n\n files.forEach((file) => {\n const existing = mergedFiles.get(file.path)\n if (existing) {\n mergedFiles.set(file.path, mergeFile(existing, file))\n } else {\n mergedFiles.set(file.path, file)\n }\n })\n\n for (let file of mergedFiles.values()) {\n const existing = this.#cache.get(file.path)\n\n file = this.#resolveName(file)\n file = this.#resolvePath(file)\n\n const merged = existing ? mergeFile(existing, file) : file\n const resolvedFile = createFile(merged)\n\n this.#cache.set(resolvedFile.path, resolvedFile)\n this.flush()\n\n resolvedFiles.push(resolvedFile)\n }\n\n await this.events.emit('files:added', resolvedFiles)\n\n return resolvedFiles\n }\n\n flush() {\n this.#filesCache = null\n this.#cache.flush()\n }\n\n getByPath(path: KubbFile.Path): KubbFile.ResolvedFile | null {\n return this.#cache.get(path)\n }\n\n deleteByPath(path: KubbFile.Path): void {\n this.#cache.delete(path)\n this.#filesCache = null\n }\n\n clear(): void {\n this.#cache.clear()\n this.#filesCache = null\n }\n\n get files(): Array<KubbFile.ResolvedFile> {\n if (this.#filesCache) {\n return this.#filesCache\n }\n\n const cachedKeys = this.#cache.keys()\n\n // order by path length and if file is a barrel file\n const keys = orderBy(cachedKeys, [(v) => v.length, (v) => trimExtName(v).endsWith('index')])\n\n const files: Array<KubbFile.ResolvedFile> = []\n\n for (const key of keys) {\n const file = this.#cache.get(key)\n if (file) {\n files.push(file)\n }\n }\n\n this.#filesCache = files\n\n return files\n }\n\n //TODO add test and check if write of FileManager contains the newly added file\n async write(options: ProcessFilesProps): Promise<KubbFile.ResolvedFile[]> {\n await this.events.emit('files:writing:start', this.files)\n\n const resolvedFiles = await this.processor.run(this.files, options)\n\n this.clear()\n\n await this.events.emit('files:writing:end', resolvedFiles)\n\n return resolvedFiles\n }\n}\n","import { isFunction } from 'remeda'\nimport type { Fabric, FabricConfig, FabricContext, FabricEvents, FabricOptions } from './Fabric.ts'\nimport { FileManager } from './FileManager.ts'\nimport type * as KubbFile from './KubbFile.ts'\nimport type { Parser } from './parsers/types.ts'\nimport type { Plugin } from './plugins/types.ts'\nimport { AsyncEventEmitter } from './utils/AsyncEventEmitter.ts'\n\n/**\n * Creates a new Fabric instance\n *\n * @example\n * const fabric = createFabric()\n * fabric.use(myPlugin())\n */\nexport function createFabric<T extends FabricOptions>(config: FabricConfig<T> = { mode: 'sequential' } as FabricConfig<T>): Fabric<T> {\n const events = new AsyncEventEmitter<FabricEvents>()\n const installedPlugins = new Set<Plugin<any>>()\n const installedParsers = new Map<KubbFile.Extname, Parser<any>>()\n const installedParserNames = new Set<string>()\n const fileManager = new FileManager({ events })\n\n const context: FabricContext<T> = {\n get files() {\n return fileManager.files\n },\n async addFile(...files) {\n await fileManager.add(...files)\n },\n config,\n fileManager,\n installedPlugins,\n installedParsers,\n on: events.on.bind(events),\n off: events.off.bind(events),\n onOnce: events.onOnce.bind(events),\n removeAll: events.removeAll.bind(events),\n emit: events.emit.bind(events),\n } as FabricContext<T>\n\n const fabric: Fabric<T> = {\n context,\n get files() {\n return fileManager.files\n },\n async addFile(...files) {\n await fileManager.add(...files)\n },\n async upsertFile(...files) {\n await fileManager.upsert(...files)\n },\n async use(pluginOrParser, ...options) {\n if (pluginOrParser.type === 'plugin') {\n if (installedPlugins.has(pluginOrParser)) {\n console.warn(`Plugin \"${pluginOrParser.name}\" already applied.`)\n } else {\n installedPlugins.add(pluginOrParser)\n }\n\n if (isFunction(pluginOrParser.inject)) {\n const injecter = pluginOrParser.inject\n\n const injected = (injecter as any)(context, ...options)\n Object.assign(fabric, injected)\n }\n }\n\n if (pluginOrParser.type === 'parser') {\n if (installedParserNames.has(pluginOrParser.name)) {\n console.warn(`Parser \"${pluginOrParser.name}\" already applied.`)\n } else {\n installedParserNames.add(pluginOrParser.name)\n }\n\n if (pluginOrParser.extNames) {\n for (const extName of pluginOrParser.extNames) {\n const existing = installedParsers.get(extName)\n if (existing && existing.name !== pluginOrParser.name) {\n console.warn(`Parser \"${pluginOrParser.name}\" is overriding parser \"${existing.name}\" for extension \"${extName}\".`)\n }\n installedParsers.set(extName, pluginOrParser)\n }\n }\n }\n\n if (isFunction(pluginOrParser.install)) {\n const installer = pluginOrParser.install\n\n await (installer as any)(context, ...options)\n }\n\n return fabric\n },\n } as Fabric<T>\n\n return fabric\n}\n"],"x_google_ignoreList":[0],"mappings":";;;;;;;;AAAA,MAAM,KAAE,QAAG,OAAOA,OAAG;;;;;;ACQrB,IAAa,oBAAb,MAAoE;CAClE,YAAY,EAAE,cAAc,KAAK,OAAO,iBAA0B,EAAE,EAAE;6CAK3D,IAAIC,cAAkB;;AAJ/B,wCAAa,CAAC,gBAAgB,YAAY;AAC1C,sCAAa,KAAI;;CAMnB,MAAM,KAAgD,WAAuB,GAAG,WAA+C;EAC7H,MAAM,6CAAY,KAAa,CAAC,UAAU,UAAU;AAEpD,MAAI,UAAU,WAAW,EACvB;EAGF,MAAMC,SAAkB,EAAE;AAE1B,oCAAI,KAAU,KAAK,aAEjB,MAAK,MAAM,YAAY,UACrB,KAAI;AACF,SAAM,SAAS,GAAG,UAAU;WACrB,KAAK;GACZ,MAAM,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,IAAI,CAAC;AACjE,UAAO,KAAK,MAAM;;OAGjB;GAEL,MAAM,WAAW,UAAU,IAAI,OAAO,aAAa;AACjD,QAAI;AACF,WAAM,SAAS,GAAG,UAAU;aACrB,KAAK;KACZ,MAAM,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,IAAI,CAAC;AACjE,YAAO,KAAK,MAAM;;KAEpB;AACF,SAAM,QAAQ,IAAI,SAAS;;AAG7B,MAAI,OAAO,WAAW,EACpB,OAAM,OAAO;AAGf,MAAI,OAAO,SAAS,EAClB,OAAM,IAAI,eAAe,QAAQ,kCAAkC,UAAU,GAAG;;CAIpF,GAA8C,WAAuB,SAA2D;AAC9H,wCAAa,CAAC,GAAG,WAAW,QAAe;;CAG7C,OAAkD,WAAuB,SAA4D;EACnI,MAAM,WAAW,GAAG,SAA8B;AAChD,QAAK,IAAI,WAAW,QAAQ;AAC5B,WAAQ,GAAG,KAAK;;AAElB,OAAK,GAAG,WAAW,QAAQ;;CAG7B,IAA+C,WAAuB,SAA2D;AAC/H,wCAAa,CAAC,IAAI,WAAW,QAAe;;CAG9C,YAAkB;AAChB,wCAAa,CAAC,oBAAoB;;;;;;;ACjDtC,IAAa,gBAAb,MAA2B;CAIzB,YAAY,EAAE,SAAS,IAAI,mBAAiC,KAAc,EAAE,EAAE;2CAHrE,OAAO,IAAI;wBACpB;AAGE,OAAK,SAAS;AAEd,SAAO;;CAGT,MAAM,MAAM,MAA6B,EAAE,SAAS,cAA+B,EAAE,EAAmB;EACtG,MAAM,sEAAe,UAAY,KAAK,aAAY;AAElD,MAAI,CAAC,SAAS;AACZ,WAAQ,KAAK,qHAAqH;AAElI,UAAO,cAAc,MAAM,MAAM,EAAE,SAAS,cAAc,CAAC;;AAG7D,MAAI,CAAC,KAAK,QACR,QAAO,cAAc,MAAM,MAAM,EAAE,SAAS,cAAc,CAAC;EAG7D,MAAM,SAAS,QAAQ,IAAI,KAAK,QAAQ;AAExC,MAAI,CAAC,OACH,QAAO,cAAc,MAAM,MAAM,EAAE,SAAS,cAAc,CAAC;AAG7D,SAAO,OAAO,MAAM,MAAM,EAAE,SAAS,cAAc,CAAC;;CAGtD,MAAM,IACJ,OACA,EAAE,SAAS,OAAO,cAAc,QAAQ,cAAiC,EAAE,EACzC;AAClC,QAAM,KAAK,OAAO,KAAK,0BAA0B,MAAM;EAEvD,MAAM,QAAQ,MAAM;EACpB,IAAI,YAAY;EAEhB,MAAM,aAAa,OAAO,cAAqC,UAAkB;AAC/E,SAAM,KAAK,OAAO,KAAK,yBAAyB,cAAc,OAAO,MAAM;GAE3E,MAAM,SAAS,SAAS,SAAY,MAAM,KAAK,MAAM,cAAc;IAAE;IAAW;IAAS,CAAC;GAE1F,MAAM,mBAAmB,EAAE;GAC3B,MAAM,aAAc,mBAAmB,QAAS;AAEhD,SAAM,KAAK,OAAO,KAAK,0BAA0B;IAC/C,MAAM;IACN;IACA,WAAW;IACX;IACA;IACD,CAAC;AAEF,SAAM,KAAK,OAAO,KAAK,uBAAuB,cAAc,OAAO,MAAM;;AAG3E,MAAI,SAAS,cAAc;GACzB,gBAAgB,aAAa;AAC3B,SAAK,IAAI,QAAQ,GAAG,QAAQ,MAAM,QAAQ,QACxC,OAAM,CAAC,MAAM,QAAQ,MAAM;;AAI/B,cAAW,MAAM,CAAC,MAAM,UAAU,YAAY,CAC5C,KAAI,KACF,OAAM,WAAW,MAAM,MAAM;SAG5B;GACL,MAAM,WAAW,MAAM,KAAK,cAAc,yCAAU,KAAW,kBAAO,WAAW,cAAc,MAAM,CAAC,CAAC;AACvG,SAAM,QAAQ,IAAI,SAAS;;AAG7B,QAAM,KAAK,OAAO,KAAK,wBAAwB,MAAM;AAErD,SAAO;;;;;;;ACzGX,IAAa,QAAb,MAAsB;;4DACV,IAAI,KAAgB;;CAE9B,IAAI,KAAuB;;AACzB,6DAAO,KAAY,CAAC,IAAI,IAAI,+DAAI;;CAGlC,IAAI,KAAa,OAAgB;AAC/B,uCAAY,CAAC,IAAI,KAAK,MAAM;;CAG9B,OAAO,KAAmB;AACxB,uCAAY,CAAC,OAAO,IAAI;;CAG1B,QAAc;AACZ,uCAAY,CAAC,OAAO;;CAGtB,OAAiB;AACf,SAAO,CAAC,mCAAG,KAAY,CAAC,MAAM,CAAC;;CAGjC,SAAmB;AACjB,SAAO,CAAC,mCAAG,KAAY,CAAC,QAAQ,CAAC;;CAGnC,QAAc;;;;;;;;;;;AClBhB,SAAS,UAAyC,GAAyB,GAA+C;AACxH,QAAO;EACL,GAAG;EACH,SAAS,CAAC,GAAI,EAAE,WAAW,EAAE,EAAG,GAAI,EAAE,WAAW,EAAE,CAAE;EACrD,SAAS,CAAC,GAAI,EAAE,WAAW,EAAE,EAAG,GAAI,EAAE,WAAW,EAAE,CAAE;EACrD,SAAS,CAAC,GAAI,EAAE,WAAW,EAAE,EAAG,GAAI,EAAE,WAAW,EAAE,CAAE;EACtD;;;;;AAOH,IAAa,cAAb,MAAyB;CAMvB,YAAY,EAAE,SAAS,IAAI,mBAAiC,KAAc,EAAE,EAAE;;2CALrE,IAAI,OAA8B;gDACQ;wBACnD;wBACA;AAGE,OAAK,YAAY,IAAI,cAAc,EAAE,QAAQ,CAAC;AAE9C,OAAK,SAAS;AACd,SAAO;;CAeT,MAAM,IAAI,GAAG,OAA6B;EACxC,MAAMC,gBAA8C,EAAE;EAEtD,MAAM,8BAAc,IAAI,KAA4B;AAEpD,QAAM,SAAS,SAAS;GACtB,MAAM,WAAW,YAAY,IAAI,KAAK,KAAK;AAC3C,OAAI,SACF,aAAY,IAAI,KAAK,MAAM,UAAU,UAAU,KAAK,CAAC;OAErD,aAAY,IAAI,KAAK,MAAM,KAAK;IAElC;AAEF,OAAK,IAAI,QAAQ,YAAY,QAAQ,EAAE;AACrC,gDAAO,mBAAiB,YAAC,KAAK;AAC9B,gDAAO,mBAAiB,YAAC,KAAK;GAE9B,MAAM,eAAe,WAAW,KAAK;AAErC,uCAAW,CAAC,IAAI,aAAa,MAAM,aAAa;AAChD,QAAK,OAAO;AAEZ,iBAAc,KAAK,aAAa;;AAGlC,QAAM,KAAK,OAAO,KAAK,eAAe,cAAc;AAEpD,SAAO;;CAGT,MAAM,OAAO,GAAG,OAA6B;EAC3C,MAAMA,gBAA8C,EAAE;EAEtD,MAAM,8BAAc,IAAI,KAA4B;AAEpD,QAAM,SAAS,SAAS;GACtB,MAAM,WAAW,YAAY,IAAI,KAAK,KAAK;AAC3C,OAAI,SACF,aAAY,IAAI,KAAK,MAAM,UAAU,UAAU,KAAK,CAAC;OAErD,aAAY,IAAI,KAAK,MAAM,KAAK;IAElC;AAEF,OAAK,IAAI,QAAQ,YAAY,QAAQ,EAAE;GACrC,MAAM,0CAAW,KAAW,CAAC,IAAI,KAAK,KAAK;AAE3C,gDAAO,mBAAiB,YAAC,KAAK;AAC9B,gDAAO,mBAAiB,YAAC,KAAK;GAG9B,MAAM,eAAe,WADN,WAAW,UAAU,UAAU,KAAK,GAAG,KACf;AAEvC,uCAAW,CAAC,IAAI,aAAa,MAAM,aAAa;AAChD,QAAK,OAAO;AAEZ,iBAAc,KAAK,aAAa;;AAGlC,QAAM,KAAK,OAAO,KAAK,eAAe,cAAc;AAEpD,SAAO;;CAGT,QAAQ;AACN,4CAAmB,KAAI;AACvB,sCAAW,CAAC,OAAO;;CAGrB,UAAU,MAAmD;AAC3D,wCAAO,KAAW,CAAC,IAAI,KAAK;;CAG9B,aAAa,MAA2B;AACtC,sCAAW,CAAC,OAAO,KAAK;AACxB,4CAAmB,KAAI;;CAGzB,QAAc;AACZ,sCAAW,CAAC,OAAO;AACnB,4CAAmB,KAAI;;CAGzB,IAAI,QAAsC;AACxC,0CAAI,KAAgB,CAClB,4CAAO,KAAgB;EAMzB,MAAM,OAAO,uCAHM,KAAW,CAAC,MAAM,EAGJ,EAAE,MAAM,EAAE,SAAS,MAAM,YAAY,EAAE,CAAC,SAAS,QAAQ,CAAC,CAAC;EAE5F,MAAMC,QAAsC,EAAE;AAE9C,OAAK,MAAM,OAAO,MAAM;GACtB,MAAM,sCAAO,KAAW,CAAC,IAAI,IAAI;AACjC,OAAI,KACF,OAAM,KAAK,KAAK;;AAIpB,4CAAmB,MAAK;AAExB,SAAO;;CAIT,MAAM,MAAM,SAA8D;AACxE,QAAM,KAAK,OAAO,KAAK,uBAAuB,KAAK,MAAM;EAEzD,MAAM,gBAAgB,MAAM,KAAK,UAAU,IAAI,KAAK,OAAO,QAAQ;AAEnE,OAAK,OAAO;AAEZ,QAAM,KAAK,OAAO,KAAK,qBAAqB,cAAc;AAE1D,SAAO;;;AAlIT,sBAAa,MAAoC;AAC/C,MAAK,OAAO,KAAK,qBAAqB,KAAK;AAE3C,QAAO;;AAGT,sBAAa,MAAoC;AAC/C,MAAK,OAAO,KAAK,qBAAqB,KAAK;AAE3C,QAAO;;;;;;;;;;;;AC7BX,SAAgB,aAAsC,SAA0B,EAAE,MAAM,cAAc,EAAgC;CACpI,MAAM,SAAS,IAAI,mBAAiC;CACpD,MAAM,mCAAmB,IAAI,KAAkB;CAC/C,MAAM,mCAAmB,IAAI,KAAoC;CACjE,MAAM,uCAAuB,IAAI,KAAa;CAC9C,MAAM,cAAc,IAAI,YAAY,EAAE,QAAQ,CAAC;CAE/C,MAAMC,UAA4B;EAChC,IAAI,QAAQ;AACV,UAAO,YAAY;;EAErB,MAAM,QAAQ,GAAG,OAAO;AACtB,SAAM,YAAY,IAAI,GAAG,MAAM;;EAEjC;EACA;EACA;EACA;EACA,IAAI,OAAO,GAAG,KAAK,OAAO;EAC1B,KAAK,OAAO,IAAI,KAAK,OAAO;EAC5B,QAAQ,OAAO,OAAO,KAAK,OAAO;EAClC,WAAW,OAAO,UAAU,KAAK,OAAO;EACxC,MAAM,OAAO,KAAK,KAAK,OAAO;EAC/B;CAED,MAAMC,SAAoB;EACxB;EACA,IAAI,QAAQ;AACV,UAAO,YAAY;;EAErB,MAAM,QAAQ,GAAG,OAAO;AACtB,SAAM,YAAY,IAAI,GAAG,MAAM;;EAEjC,MAAM,WAAW,GAAG,OAAO;AACzB,SAAM,YAAY,OAAO,GAAG,MAAM;;EAEpC,MAAM,IAAI,gBAAgB,GAAG,SAAS;AACpC,OAAI,eAAe,SAAS,UAAU;AACpC,QAAI,iBAAiB,IAAI,eAAe,CACtC,SAAQ,KAAK,WAAW,eAAe,KAAK,oBAAoB;QAEhE,kBAAiB,IAAI,eAAe;AAGtC,QAAIC,EAAW,eAAe,OAAO,EAAE;KACrC,MAAM,WAAW,eAAe;KAEhC,MAAM,WAAY,SAAiB,SAAS,GAAG,QAAQ;AACvD,YAAO,OAAO,QAAQ,SAAS;;;AAInC,OAAI,eAAe,SAAS,UAAU;AACpC,QAAI,qBAAqB,IAAI,eAAe,KAAK,CAC/C,SAAQ,KAAK,WAAW,eAAe,KAAK,oBAAoB;QAEhE,sBAAqB,IAAI,eAAe,KAAK;AAG/C,QAAI,eAAe,SACjB,MAAK,MAAM,WAAW,eAAe,UAAU;KAC7C,MAAM,WAAW,iBAAiB,IAAI,QAAQ;AAC9C,SAAI,YAAY,SAAS,SAAS,eAAe,KAC/C,SAAQ,KAAK,WAAW,eAAe,KAAK,0BAA0B,SAAS,KAAK,mBAAmB,QAAQ,IAAI;AAErH,sBAAiB,IAAI,SAAS,eAAe;;;AAKnD,OAAIA,EAAW,eAAe,QAAQ,EAAE;IACtC,MAAM,YAAY,eAAe;AAEjC,UAAO,UAAkB,SAAS,GAAG,QAAQ;;AAG/C,UAAO;;EAEV;AAED,QAAO"}
|
|
1
|
+
{"version":3,"file":"index.js","names":["e","NodeEventEmitter","errors: Error[]","resolvedFiles: Array<KubbFile.ResolvedFile>","files: Array<KubbFile.ResolvedFile>","context: FabricContext<T>","fabric: Fabric<T>","isFunction"],"sources":["../../../node_modules/.pnpm/remeda@2.33.1/node_modules/remeda/dist/isFunction.js","../src/utils/AsyncEventEmitter.ts","../src/FileProcessor.ts","../src/utils/Cache.ts","../src/FileManager.ts","../src/createFabric.ts"],"sourcesContent":["const e=e=>typeof e==`function`;export{e as isFunction};\n//# sourceMappingURL=isFunction.js.map","import { EventEmitter as NodeEventEmitter } from 'node:events'\nimport type { FabricMode } from '../Fabric.ts'\n\ntype Options = {\n mode?: FabricMode\n maxListener?: number\n}\n\nexport class AsyncEventEmitter<TEvents extends Record<string, any>> {\n constructor({ maxListener = 100, mode = 'sequential' }: Options = {}) {\n this.#emitter.setMaxListeners(maxListener)\n this.#mode = mode\n }\n\n #emitter = new NodeEventEmitter()\n #mode: FabricMode\n\n async emit<TEventName extends keyof TEvents & string>(eventName: TEventName, ...eventArgs: TEvents[TEventName]): Promise<void> {\n const listeners = this.#emitter.listeners(eventName) as Array<(...args: TEvents[TEventName]) => any>\n\n if (listeners.length === 0) {\n return\n }\n\n const errors: Error[] = []\n\n if (this.#mode === 'sequential') {\n // Run listeners one by one, in order\n for (const listener of listeners) {\n try {\n await listener(...eventArgs)\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err))\n errors.push(error)\n }\n }\n } else {\n // Run all listeners concurrently\n const promises = listeners.map(async (listener) => {\n try {\n await listener(...eventArgs)\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err))\n errors.push(error)\n }\n })\n await Promise.all(promises)\n }\n\n if (errors.length === 1) {\n throw errors[0]\n }\n\n if (errors.length > 1) {\n throw new AggregateError(errors, `Errors in async listeners for \"${eventName}\"`)\n }\n }\n\n on<TEventName extends keyof TEvents & string>(eventName: TEventName, handler: (...eventArg: TEvents[TEventName]) => void): void {\n this.#emitter.on(eventName, handler as any)\n }\n\n onOnce<TEventName extends keyof TEvents & string>(eventName: TEventName, handler: (...eventArgs: TEvents[TEventName]) => void): void {\n const wrapper = (...args: TEvents[TEventName]) => {\n this.off(eventName, wrapper)\n handler(...args)\n }\n this.on(eventName, wrapper)\n }\n\n off<TEventName extends keyof TEvents & string>(eventName: TEventName, handler: (...eventArg: TEvents[TEventName]) => void): void {\n this.#emitter.off(eventName, handler as any)\n }\n\n removeAll(): void {\n this.#emitter.removeAllListeners()\n }\n}\n","import pLimit from 'p-limit'\nimport type { FabricEvents, FabricMode } from './Fabric.ts'\nimport type * as KubbFile from './KubbFile.ts'\nimport { defaultParser } from './parsers/defaultParser.ts'\nimport type { Parser } from './parsers/types.ts'\nimport { AsyncEventEmitter } from './utils/AsyncEventEmitter.ts'\n\nexport type ProcessFilesProps = {\n parsers?: Map<KubbFile.Extname, Parser>\n extension?: Record<KubbFile.Extname, KubbFile.Extname | ''>\n dryRun?: boolean\n /**\n * @default 'sequential'\n */\n mode?: FabricMode\n}\n\ntype GetParseOptions = {\n parsers?: Map<KubbFile.Extname, Parser>\n extension?: Record<KubbFile.Extname, KubbFile.Extname | ''>\n}\n\ntype Options = {\n events?: AsyncEventEmitter<FabricEvents>\n}\n\nexport class FileProcessor {\n #limit = pLimit(100)\n events: AsyncEventEmitter<FabricEvents>\n\n constructor({ events = new AsyncEventEmitter<FabricEvents>() }: Options = {}) {\n this.events = events\n\n return this\n }\n\n async parse(file: KubbFile.ResolvedFile, { parsers, extension }: GetParseOptions = {}): Promise<string> {\n const parseExtName = extension?.[file.extname] || undefined\n\n if (!parsers) {\n console.warn('No parsers provided, using default parser. If you want to use a specific parser, please provide it in the options.')\n\n return defaultParser.parse(file, { extname: parseExtName })\n }\n\n if (!file.extname) {\n return defaultParser.parse(file, { extname: parseExtName })\n }\n\n const parser = parsers.get(file.extname)\n\n if (!parser) {\n return defaultParser.parse(file, { extname: parseExtName })\n }\n\n return parser.parse(file, { extname: parseExtName })\n }\n\n async run(\n files: Array<KubbFile.ResolvedFile>,\n { parsers, mode = 'sequential', dryRun, extension }: ProcessFilesProps = {},\n ): Promise<KubbFile.ResolvedFile[]> {\n await this.events.emit('files:processing:start', files)\n\n const total = files.length\n let processed = 0\n\n const processOne = async (resolvedFile: KubbFile.ResolvedFile, index: number) => {\n await this.events.emit('file:processing:start', resolvedFile, index, total)\n\n const source = dryRun ? undefined : await this.parse(resolvedFile, { extension, parsers })\n\n const currentProcessed = ++processed\n const percentage = (currentProcessed / total) * 100\n\n await this.events.emit('file:processing:update', {\n file: resolvedFile,\n source,\n processed: currentProcessed,\n percentage,\n total,\n })\n\n await this.events.emit('file:processing:end', resolvedFile, index, total)\n }\n\n if (mode === 'sequential') {\n async function* asyncFiles() {\n for (let index = 0; index < files.length; index++) {\n yield [files[index], index] as const\n }\n }\n\n for await (const [file, index] of asyncFiles()) {\n if (file) {\n await processOne(file, index)\n }\n }\n } else {\n const promises = files.map((resolvedFile, index) => this.#limit(() => processOne(resolvedFile, index)))\n await Promise.all(promises)\n }\n\n await this.events.emit('files:processing:end', files)\n\n return files\n }\n}\n","export class Cache<T> {\n #buffer = new Map<string, T>()\n\n get(key: string): T | null {\n return this.#buffer.get(key) ?? null\n }\n\n set(key: string, value: T): void {\n this.#buffer.set(key, value)\n }\n\n delete(key: string): void {\n this.#buffer.delete(key)\n }\n\n clear(): void {\n this.#buffer.clear()\n }\n\n keys(): string[] {\n return [...this.#buffer.keys()]\n }\n\n values(): Array<T> {\n return [...this.#buffer.values()]\n }\n\n flush(): void {\n // No-op for base cache\n }\n}\n","import { orderBy } from 'natural-orderby'\nimport { createFile } from './createFile.ts'\nimport type { FabricEvents } from './Fabric.ts'\nimport { FileProcessor, type ProcessFilesProps } from './FileProcessor.ts'\nimport type * as KubbFile from './KubbFile.ts'\nimport { AsyncEventEmitter } from './utils/AsyncEventEmitter.ts'\nimport { Cache } from './utils/Cache.ts'\nimport { trimExtName } from './utils/trimExtName.ts'\n\nfunction mergeFile<TMeta extends object = object>(a: KubbFile.File<TMeta>, b: KubbFile.File<TMeta>): KubbFile.File<TMeta> {\n return {\n ...a,\n sources: [...(a.sources || []), ...(b.sources || [])],\n imports: [...(a.imports || []), ...(b.imports || [])],\n exports: [...(a.exports || []), ...(b.exports || [])],\n }\n}\n\ntype Options = {\n events?: AsyncEventEmitter<FabricEvents>\n}\n\nexport class FileManager {\n #cache = new Cache<KubbFile.ResolvedFile>()\n #filesCache: Array<KubbFile.ResolvedFile> | null = null\n events: AsyncEventEmitter<FabricEvents>\n processor: FileProcessor\n\n constructor({ events = new AsyncEventEmitter<FabricEvents>() }: Options = {}) {\n this.processor = new FileProcessor({ events })\n\n this.events = events\n return this\n }\n\n #resolvePath(file: KubbFile.File): KubbFile.File {\n this.events.emit('file:resolve:path', file)\n\n return file\n }\n\n #resolveName(file: KubbFile.File): KubbFile.File {\n this.events.emit('file:resolve:name', file)\n\n return file\n }\n\n async add(...files: Array<KubbFile.File>) {\n const resolvedFiles: Array<KubbFile.ResolvedFile> = []\n\n const mergedFiles = new Map<string, KubbFile.File>()\n\n files.forEach((file) => {\n const existing = mergedFiles.get(file.path)\n if (existing) {\n mergedFiles.set(file.path, mergeFile(existing, file))\n } else {\n mergedFiles.set(file.path, file)\n }\n })\n\n for (let file of mergedFiles.values()) {\n file = this.#resolveName(file)\n file = this.#resolvePath(file)\n\n const resolvedFile = createFile(file)\n\n this.#cache.set(resolvedFile.path, resolvedFile)\n this.flush()\n\n resolvedFiles.push(resolvedFile)\n }\n\n await this.events.emit('files:added', resolvedFiles)\n\n return resolvedFiles\n }\n\n async upsert(...files: Array<KubbFile.File>) {\n const resolvedFiles: Array<KubbFile.ResolvedFile> = []\n\n const mergedFiles = new Map<string, KubbFile.File>()\n\n files.forEach((file) => {\n const existing = mergedFiles.get(file.path)\n if (existing) {\n mergedFiles.set(file.path, mergeFile(existing, file))\n } else {\n mergedFiles.set(file.path, file)\n }\n })\n\n for (let file of mergedFiles.values()) {\n const existing = this.#cache.get(file.path)\n\n file = this.#resolveName(file)\n file = this.#resolvePath(file)\n\n const merged = existing ? mergeFile(existing, file) : file\n const resolvedFile = createFile(merged)\n\n this.#cache.set(resolvedFile.path, resolvedFile)\n this.flush()\n\n resolvedFiles.push(resolvedFile)\n }\n\n await this.events.emit('files:added', resolvedFiles)\n\n return resolvedFiles\n }\n\n flush() {\n this.#filesCache = null\n this.#cache.flush()\n }\n\n getByPath(path: KubbFile.Path): KubbFile.ResolvedFile | null {\n return this.#cache.get(path)\n }\n\n deleteByPath(path: KubbFile.Path): void {\n this.#cache.delete(path)\n this.#filesCache = null\n }\n\n clear(): void {\n this.#cache.clear()\n this.#filesCache = null\n }\n\n get files(): Array<KubbFile.ResolvedFile> {\n if (this.#filesCache) {\n return this.#filesCache\n }\n\n const cachedKeys = this.#cache.keys()\n\n // order by path length and if file is a barrel file\n const keys = orderBy(cachedKeys, [(v) => v.length, (v) => trimExtName(v).endsWith('index')])\n\n const files: Array<KubbFile.ResolvedFile> = []\n\n for (const key of keys) {\n const file = this.#cache.get(key)\n if (file) {\n files.push(file)\n }\n }\n\n this.#filesCache = files\n\n return files\n }\n\n //TODO add test and check if write of FileManager contains the newly added file\n async write(options: ProcessFilesProps): Promise<KubbFile.ResolvedFile[]> {\n await this.events.emit('files:writing:start', this.files)\n\n const resolvedFiles = await this.processor.run(this.files, options)\n\n this.clear()\n\n await this.events.emit('files:writing:end', resolvedFiles)\n\n return resolvedFiles\n }\n}\n","import { isFunction } from 'remeda'\nimport type { Fabric, FabricConfig, FabricContext, FabricEvents, FabricOptions } from './Fabric.ts'\nimport { FileManager } from './FileManager.ts'\nimport type * as KubbFile from './KubbFile.ts'\nimport type { Parser } from './parsers/types.ts'\nimport type { Plugin } from './plugins/types.ts'\nimport { AsyncEventEmitter } from './utils/AsyncEventEmitter.ts'\n\n/**\n * Creates a new Fabric instance\n *\n * @example\n * const fabric = createFabric()\n * fabric.use(myPlugin())\n */\nexport function createFabric<T extends FabricOptions>(config: FabricConfig<T> = { mode: 'sequential' } as FabricConfig<T>): Fabric<T> {\n const events = new AsyncEventEmitter<FabricEvents>()\n const installedPlugins = new Set<Plugin<any>>()\n const installedParsers = new Map<KubbFile.Extname, Parser<any>>()\n const installedParserNames = new Set<string>()\n const fileManager = new FileManager({ events })\n\n const context: FabricContext<T> = {\n get files() {\n return fileManager.files\n },\n async addFile(...files) {\n await fileManager.add(...files)\n },\n config,\n fileManager,\n installedPlugins,\n installedParsers,\n on: events.on.bind(events),\n off: events.off.bind(events),\n onOnce: events.onOnce.bind(events),\n removeAll: events.removeAll.bind(events),\n emit: events.emit.bind(events),\n } as FabricContext<T>\n\n const fabric: Fabric<T> = {\n context,\n get files() {\n return fileManager.files\n },\n async addFile(...files) {\n await fileManager.add(...files)\n },\n async upsertFile(...files) {\n await fileManager.upsert(...files)\n },\n async use(pluginOrParser, ...options) {\n if (pluginOrParser.type === 'plugin') {\n if (installedPlugins.has(pluginOrParser)) {\n console.warn(`Plugin \"${pluginOrParser.name}\" already applied.`)\n } else {\n installedPlugins.add(pluginOrParser)\n }\n\n if (isFunction(pluginOrParser.inject)) {\n const injecter = pluginOrParser.inject\n\n const injected = (injecter as any)(context, ...options)\n Object.assign(fabric, injected)\n }\n }\n\n if (pluginOrParser.type === 'parser') {\n if (installedParserNames.has(pluginOrParser.name)) {\n console.warn(`Parser \"${pluginOrParser.name}\" already applied.`)\n } else {\n installedParserNames.add(pluginOrParser.name)\n }\n\n if (pluginOrParser.extNames) {\n for (const extName of pluginOrParser.extNames) {\n const existing = installedParsers.get(extName)\n if (existing && existing.name !== pluginOrParser.name) {\n console.warn(`Parser \"${pluginOrParser.name}\" is overriding parser \"${existing.name}\" for extension \"${extName}\".`)\n }\n installedParsers.set(extName, pluginOrParser)\n }\n }\n }\n\n if (isFunction(pluginOrParser.install)) {\n const installer = pluginOrParser.install\n\n await (installer as any)(context, ...options)\n }\n\n return fabric\n },\n } as Fabric<T>\n\n return fabric\n}\n"],"x_google_ignoreList":[0],"mappings":";;;;;;;;AAAA,MAAM,KAAE,QAAG,OAAOA,OAAG;;;;;;ACQrB,IAAa,oBAAb,MAAoE;CAClE,YAAY,EAAE,cAAc,KAAK,OAAO,iBAA0B,EAAE,EAAE;6CAK3D,IAAIC,cAAkB;;AAJ/B,wCAAa,CAAC,gBAAgB,YAAY;AAC1C,sCAAa,KAAI;;CAMnB,MAAM,KAAgD,WAAuB,GAAG,WAA+C;EAC7H,MAAM,6CAAY,KAAa,CAAC,UAAU,UAAU;AAEpD,MAAI,UAAU,WAAW,EACvB;EAGF,MAAMC,SAAkB,EAAE;AAE1B,oCAAI,KAAU,KAAK,aAEjB,MAAK,MAAM,YAAY,UACrB,KAAI;AACF,SAAM,SAAS,GAAG,UAAU;WACrB,KAAK;GACZ,MAAM,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,IAAI,CAAC;AACjE,UAAO,KAAK,MAAM;;OAGjB;GAEL,MAAM,WAAW,UAAU,IAAI,OAAO,aAAa;AACjD,QAAI;AACF,WAAM,SAAS,GAAG,UAAU;aACrB,KAAK;KACZ,MAAM,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,IAAI,CAAC;AACjE,YAAO,KAAK,MAAM;;KAEpB;AACF,SAAM,QAAQ,IAAI,SAAS;;AAG7B,MAAI,OAAO,WAAW,EACpB,OAAM,OAAO;AAGf,MAAI,OAAO,SAAS,EAClB,OAAM,IAAI,eAAe,QAAQ,kCAAkC,UAAU,GAAG;;CAIpF,GAA8C,WAAuB,SAA2D;AAC9H,wCAAa,CAAC,GAAG,WAAW,QAAe;;CAG7C,OAAkD,WAAuB,SAA4D;EACnI,MAAM,WAAW,GAAG,SAA8B;AAChD,QAAK,IAAI,WAAW,QAAQ;AAC5B,WAAQ,GAAG,KAAK;;AAElB,OAAK,GAAG,WAAW,QAAQ;;CAG7B,IAA+C,WAAuB,SAA2D;AAC/H,wCAAa,CAAC,IAAI,WAAW,QAAe;;CAG9C,YAAkB;AAChB,wCAAa,CAAC,oBAAoB;;;;;;;ACjDtC,IAAa,gBAAb,MAA2B;CAIzB,YAAY,EAAE,SAAS,IAAI,mBAAiC,KAAc,EAAE,EAAE;2CAHrE,OAAO,IAAI;wBACpB;AAGE,OAAK,SAAS;AAEd,SAAO;;CAGT,MAAM,MAAM,MAA6B,EAAE,SAAS,cAA+B,EAAE,EAAmB;EACtG,MAAM,sEAAe,UAAY,KAAK,aAAY;AAElD,MAAI,CAAC,SAAS;AACZ,WAAQ,KAAK,qHAAqH;AAElI,UAAO,cAAc,MAAM,MAAM,EAAE,SAAS,cAAc,CAAC;;AAG7D,MAAI,CAAC,KAAK,QACR,QAAO,cAAc,MAAM,MAAM,EAAE,SAAS,cAAc,CAAC;EAG7D,MAAM,SAAS,QAAQ,IAAI,KAAK,QAAQ;AAExC,MAAI,CAAC,OACH,QAAO,cAAc,MAAM,MAAM,EAAE,SAAS,cAAc,CAAC;AAG7D,SAAO,OAAO,MAAM,MAAM,EAAE,SAAS,cAAc,CAAC;;CAGtD,MAAM,IACJ,OACA,EAAE,SAAS,OAAO,cAAc,QAAQ,cAAiC,EAAE,EACzC;AAClC,QAAM,KAAK,OAAO,KAAK,0BAA0B,MAAM;EAEvD,MAAM,QAAQ,MAAM;EACpB,IAAI,YAAY;EAEhB,MAAM,aAAa,OAAO,cAAqC,UAAkB;AAC/E,SAAM,KAAK,OAAO,KAAK,yBAAyB,cAAc,OAAO,MAAM;GAE3E,MAAM,SAAS,SAAS,SAAY,MAAM,KAAK,MAAM,cAAc;IAAE;IAAW;IAAS,CAAC;GAE1F,MAAM,mBAAmB,EAAE;GAC3B,MAAM,aAAc,mBAAmB,QAAS;AAEhD,SAAM,KAAK,OAAO,KAAK,0BAA0B;IAC/C,MAAM;IACN;IACA,WAAW;IACX;IACA;IACD,CAAC;AAEF,SAAM,KAAK,OAAO,KAAK,uBAAuB,cAAc,OAAO,MAAM;;AAG3E,MAAI,SAAS,cAAc;GACzB,gBAAgB,aAAa;AAC3B,SAAK,IAAI,QAAQ,GAAG,QAAQ,MAAM,QAAQ,QACxC,OAAM,CAAC,MAAM,QAAQ,MAAM;;AAI/B,cAAW,MAAM,CAAC,MAAM,UAAU,YAAY,CAC5C,KAAI,KACF,OAAM,WAAW,MAAM,MAAM;SAG5B;GACL,MAAM,WAAW,MAAM,KAAK,cAAc,yCAAU,KAAW,kBAAO,WAAW,cAAc,MAAM,CAAC,CAAC;AACvG,SAAM,QAAQ,IAAI,SAAS;;AAG7B,QAAM,KAAK,OAAO,KAAK,wBAAwB,MAAM;AAErD,SAAO;;;;;;;ACzGX,IAAa,QAAb,MAAsB;;4DACV,IAAI,KAAgB;;CAE9B,IAAI,KAAuB;;AACzB,6DAAO,KAAY,CAAC,IAAI,IAAI,+DAAI;;CAGlC,IAAI,KAAa,OAAgB;AAC/B,uCAAY,CAAC,IAAI,KAAK,MAAM;;CAG9B,OAAO,KAAmB;AACxB,uCAAY,CAAC,OAAO,IAAI;;CAG1B,QAAc;AACZ,uCAAY,CAAC,OAAO;;CAGtB,OAAiB;AACf,SAAO,CAAC,mCAAG,KAAY,CAAC,MAAM,CAAC;;CAGjC,SAAmB;AACjB,SAAO,CAAC,mCAAG,KAAY,CAAC,QAAQ,CAAC;;CAGnC,QAAc;;;;;;;;;;;AClBhB,SAAS,UAAyC,GAAyB,GAA+C;AACxH,QAAO;EACL,GAAG;EACH,SAAS,CAAC,GAAI,EAAE,WAAW,EAAE,EAAG,GAAI,EAAE,WAAW,EAAE,CAAE;EACrD,SAAS,CAAC,GAAI,EAAE,WAAW,EAAE,EAAG,GAAI,EAAE,WAAW,EAAE,CAAE;EACrD,SAAS,CAAC,GAAI,EAAE,WAAW,EAAE,EAAG,GAAI,EAAE,WAAW,EAAE,CAAE;EACtD;;;;;AAOH,IAAa,cAAb,MAAyB;CAMvB,YAAY,EAAE,SAAS,IAAI,mBAAiC,KAAc,EAAE,EAAE;;2CALrE,IAAI,OAA8B;gDACQ;wBACnD;wBACA;AAGE,OAAK,YAAY,IAAI,cAAc,EAAE,QAAQ,CAAC;AAE9C,OAAK,SAAS;AACd,SAAO;;CAeT,MAAM,IAAI,GAAG,OAA6B;EACxC,MAAMC,gBAA8C,EAAE;EAEtD,MAAM,8BAAc,IAAI,KAA4B;AAEpD,QAAM,SAAS,SAAS;GACtB,MAAM,WAAW,YAAY,IAAI,KAAK,KAAK;AAC3C,OAAI,SACF,aAAY,IAAI,KAAK,MAAM,UAAU,UAAU,KAAK,CAAC;OAErD,aAAY,IAAI,KAAK,MAAM,KAAK;IAElC;AAEF,OAAK,IAAI,QAAQ,YAAY,QAAQ,EAAE;AACrC,gDAAO,mBAAiB,YAAC,KAAK;AAC9B,gDAAO,mBAAiB,YAAC,KAAK;GAE9B,MAAM,eAAe,WAAW,KAAK;AAErC,uCAAW,CAAC,IAAI,aAAa,MAAM,aAAa;AAChD,QAAK,OAAO;AAEZ,iBAAc,KAAK,aAAa;;AAGlC,QAAM,KAAK,OAAO,KAAK,eAAe,cAAc;AAEpD,SAAO;;CAGT,MAAM,OAAO,GAAG,OAA6B;EAC3C,MAAMA,gBAA8C,EAAE;EAEtD,MAAM,8BAAc,IAAI,KAA4B;AAEpD,QAAM,SAAS,SAAS;GACtB,MAAM,WAAW,YAAY,IAAI,KAAK,KAAK;AAC3C,OAAI,SACF,aAAY,IAAI,KAAK,MAAM,UAAU,UAAU,KAAK,CAAC;OAErD,aAAY,IAAI,KAAK,MAAM,KAAK;IAElC;AAEF,OAAK,IAAI,QAAQ,YAAY,QAAQ,EAAE;GACrC,MAAM,0CAAW,KAAW,CAAC,IAAI,KAAK,KAAK;AAE3C,gDAAO,mBAAiB,YAAC,KAAK;AAC9B,gDAAO,mBAAiB,YAAC,KAAK;GAG9B,MAAM,eAAe,WADN,WAAW,UAAU,UAAU,KAAK,GAAG,KACf;AAEvC,uCAAW,CAAC,IAAI,aAAa,MAAM,aAAa;AAChD,QAAK,OAAO;AAEZ,iBAAc,KAAK,aAAa;;AAGlC,QAAM,KAAK,OAAO,KAAK,eAAe,cAAc;AAEpD,SAAO;;CAGT,QAAQ;AACN,4CAAmB,KAAI;AACvB,sCAAW,CAAC,OAAO;;CAGrB,UAAU,MAAmD;AAC3D,wCAAO,KAAW,CAAC,IAAI,KAAK;;CAG9B,aAAa,MAA2B;AACtC,sCAAW,CAAC,OAAO,KAAK;AACxB,4CAAmB,KAAI;;CAGzB,QAAc;AACZ,sCAAW,CAAC,OAAO;AACnB,4CAAmB,KAAI;;CAGzB,IAAI,QAAsC;AACxC,0CAAI,KAAgB,CAClB,4CAAO,KAAgB;EAMzB,MAAM,OAAO,uCAHM,KAAW,CAAC,MAAM,EAGJ,EAAE,MAAM,EAAE,SAAS,MAAM,YAAY,EAAE,CAAC,SAAS,QAAQ,CAAC,CAAC;EAE5F,MAAMC,QAAsC,EAAE;AAE9C,OAAK,MAAM,OAAO,MAAM;GACtB,MAAM,sCAAO,KAAW,CAAC,IAAI,IAAI;AACjC,OAAI,KACF,OAAM,KAAK,KAAK;;AAIpB,4CAAmB,MAAK;AAExB,SAAO;;CAIT,MAAM,MAAM,SAA8D;AACxE,QAAM,KAAK,OAAO,KAAK,uBAAuB,KAAK,MAAM;EAEzD,MAAM,gBAAgB,MAAM,KAAK,UAAU,IAAI,KAAK,OAAO,QAAQ;AAEnE,OAAK,OAAO;AAEZ,QAAM,KAAK,OAAO,KAAK,qBAAqB,cAAc;AAE1D,SAAO;;;AAlIT,sBAAa,MAAoC;AAC/C,MAAK,OAAO,KAAK,qBAAqB,KAAK;AAE3C,QAAO;;AAGT,sBAAa,MAAoC;AAC/C,MAAK,OAAO,KAAK,qBAAqB,KAAK;AAE3C,QAAO;;;;;;;;;;;;AC7BX,SAAgB,aAAsC,SAA0B,EAAE,MAAM,cAAc,EAAgC;CACpI,MAAM,SAAS,IAAI,mBAAiC;CACpD,MAAM,mCAAmB,IAAI,KAAkB;CAC/C,MAAM,mCAAmB,IAAI,KAAoC;CACjE,MAAM,uCAAuB,IAAI,KAAa;CAC9C,MAAM,cAAc,IAAI,YAAY,EAAE,QAAQ,CAAC;CAE/C,MAAMC,UAA4B;EAChC,IAAI,QAAQ;AACV,UAAO,YAAY;;EAErB,MAAM,QAAQ,GAAG,OAAO;AACtB,SAAM,YAAY,IAAI,GAAG,MAAM;;EAEjC;EACA;EACA;EACA;EACA,IAAI,OAAO,GAAG,KAAK,OAAO;EAC1B,KAAK,OAAO,IAAI,KAAK,OAAO;EAC5B,QAAQ,OAAO,OAAO,KAAK,OAAO;EAClC,WAAW,OAAO,UAAU,KAAK,OAAO;EACxC,MAAM,OAAO,KAAK,KAAK,OAAO;EAC/B;CAED,MAAMC,SAAoB;EACxB;EACA,IAAI,QAAQ;AACV,UAAO,YAAY;;EAErB,MAAM,QAAQ,GAAG,OAAO;AACtB,SAAM,YAAY,IAAI,GAAG,MAAM;;EAEjC,MAAM,WAAW,GAAG,OAAO;AACzB,SAAM,YAAY,OAAO,GAAG,MAAM;;EAEpC,MAAM,IAAI,gBAAgB,GAAG,SAAS;AACpC,OAAI,eAAe,SAAS,UAAU;AACpC,QAAI,iBAAiB,IAAI,eAAe,CACtC,SAAQ,KAAK,WAAW,eAAe,KAAK,oBAAoB;QAEhE,kBAAiB,IAAI,eAAe;AAGtC,QAAIC,EAAW,eAAe,OAAO,EAAE;KACrC,MAAM,WAAW,eAAe;KAEhC,MAAM,WAAY,SAAiB,SAAS,GAAG,QAAQ;AACvD,YAAO,OAAO,QAAQ,SAAS;;;AAInC,OAAI,eAAe,SAAS,UAAU;AACpC,QAAI,qBAAqB,IAAI,eAAe,KAAK,CAC/C,SAAQ,KAAK,WAAW,eAAe,KAAK,oBAAoB;QAEhE,sBAAqB,IAAI,eAAe,KAAK;AAG/C,QAAI,eAAe,SACjB,MAAK,MAAM,WAAW,eAAe,UAAU;KAC7C,MAAM,WAAW,iBAAiB,IAAI,QAAQ;AAC9C,SAAI,YAAY,SAAS,SAAS,eAAe,KAC/C,SAAQ,KAAK,WAAW,eAAe,KAAK,0BAA0B,SAAS,KAAK,mBAAmB,QAAQ,IAAI;AAErH,sBAAiB,IAAI,SAAS,eAAe;;;AAKnD,OAAIA,EAAW,eAAe,QAAQ,EAAE;IACtC,MAAM,YAAY,eAAe;AAEjC,UAAO,UAAkB,SAAS,GAAG,QAAQ;;AAG/C,UAAO;;EAEV;AAED,QAAO"}
|
|
@@ -9,6 +9,16 @@ typescript = require_trimExtName.__toESM(typescript);
|
|
|
9
9
|
//#region src/parsers/typescriptParser.ts
|
|
10
10
|
const { factory } = typescript.default;
|
|
11
11
|
/**
|
|
12
|
+
* Validates TypeScript AST nodes before printing to catch invalid nodes early.
|
|
13
|
+
* Throws an error if any node has SyntaxKind.Unknown which would cause the TypeScript printer to crash.
|
|
14
|
+
*/
|
|
15
|
+
function validateNodes(...nodes) {
|
|
16
|
+
for (const node of nodes) {
|
|
17
|
+
if (!node) throw new Error("Attempted to print undefined or null TypeScript node");
|
|
18
|
+
if (node.kind === typescript.default.SyntaxKind.Unknown) throw new Error(`Invalid TypeScript AST node detected with SyntaxKind.Unknown. This typically indicates a schema pattern that couldn't be properly converted to TypeScript. Node: ${JSON.stringify(node, null, 2)}`);
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
12
22
|
* Convert AST TypeScript/TSX nodes to a string based on the TypeScript printer.
|
|
13
23
|
*/
|
|
14
24
|
function print(...elements) {
|
|
@@ -22,6 +32,10 @@ function print(...elements) {
|
|
|
22
32
|
for (const node of elements) if (node.kind === typescript.default.SyntaxKind.Unknown) console.error("⚠️ Unknown node found:", node);
|
|
23
33
|
return printer.printList(typescript.default.ListFormat.MultiLine, factory.createNodeArray(elements.filter(Boolean)), sourceFile).replace(/\r\n/g, "\n");
|
|
24
34
|
}
|
|
35
|
+
function safePrint(...elements) {
|
|
36
|
+
validateNodes(...elements);
|
|
37
|
+
return print(...elements);
|
|
38
|
+
}
|
|
25
39
|
function createImport({ name, path: path$1, root, isTypeOnly = false, isNameSpace = false }) {
|
|
26
40
|
const resolvePath = root ? require_getRelativePath.getRelativePath(root, path$1) : path$1;
|
|
27
41
|
if (!Array.isArray(name)) {
|
|
@@ -89,5 +103,7 @@ const typescriptParser = require_defineParser.defineParser({
|
|
|
89
103
|
exports.createExport = createExport;
|
|
90
104
|
exports.createImport = createImport;
|
|
91
105
|
exports.print = print;
|
|
106
|
+
exports.safePrint = safePrint;
|
|
92
107
|
exports.typescriptParser = typescriptParser;
|
|
108
|
+
exports.validateNodes = validateNodes;
|
|
93
109
|
//# sourceMappingURL=typescript.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"typescript.cjs","names":["ts","getRelativePath","path","defineParser","sourceParts: Array<string>","importNodes: Array<ts.ImportDeclaration>","trimExtName","exportNodes: Array<ts.ExportDeclaration>"],"sources":["../../src/parsers/typescriptParser.ts"],"sourcesContent":["import path from 'node:path'\nimport ts from 'typescript'\nimport { getRelativePath } from '../utils/getRelativePath.ts'\nimport { trimExtName } from '../utils/trimExtName.ts'\nimport { defineParser } from './defineParser.ts'\n\nconst { factory } = ts\n\n/**\n * Convert AST TypeScript/TSX nodes to a string based on the TypeScript printer.\n */\nexport function print(...elements: Array<ts.Node>): string {\n const sourceFile = ts.createSourceFile('print.tsx', '', ts.ScriptTarget.ES2022, true, ts.ScriptKind.TSX)\n\n const printer = ts.createPrinter({\n omitTrailingSemicolon: true,\n newLine: ts.NewLineKind.LineFeed,\n removeComments: false,\n noEmitHelpers: true,\n })\n\n for (const node of elements) {\n if (node.kind === ts.SyntaxKind.Unknown) {\n console.error('⚠️ Unknown node found:', node)\n }\n }\n\n const output = printer.printList(ts.ListFormat.MultiLine, factory.createNodeArray(elements.filter(Boolean)), sourceFile)\n\n return output.replace(/\\r\\n/g, '\\n')\n}\n\nexport function createImport({\n name,\n path,\n root,\n isTypeOnly = false,\n isNameSpace = false,\n}: {\n name: string | Array<string | { propertyName: string; name?: string }>\n path: string\n root?: string\n isTypeOnly?: boolean\n isNameSpace?: boolean\n}) {\n const resolvePath = root ? getRelativePath(root, path) : path\n\n // Namespace or default import\n if (!Array.isArray(name)) {\n if (isNameSpace) {\n return factory.createImportDeclaration(\n undefined,\n factory.createImportClause(isTypeOnly, undefined, factory.createNamespaceImport(factory.createIdentifier(name))),\n factory.createStringLiteral(resolvePath),\n undefined,\n )\n }\n\n return factory.createImportDeclaration(\n undefined,\n factory.createImportClause(isTypeOnly, factory.createIdentifier(name), undefined),\n factory.createStringLiteral(resolvePath),\n undefined,\n )\n }\n\n // Named imports\n const specifiers = name.map((item) => {\n if (typeof item === 'object') {\n const { propertyName, name: alias } = item\n return factory.createImportSpecifier(false, alias ? factory.createIdentifier(propertyName) : undefined, factory.createIdentifier(alias ?? propertyName))\n }\n\n return factory.createImportSpecifier(false, undefined, factory.createIdentifier(item))\n })\n\n return factory.createImportDeclaration(\n undefined,\n factory.createImportClause(isTypeOnly, undefined, factory.createNamedImports(specifiers)),\n factory.createStringLiteral(resolvePath),\n undefined,\n )\n}\n\nexport function createExport({\n path,\n asAlias,\n isTypeOnly = false,\n name,\n}: {\n path: string\n asAlias?: boolean\n isTypeOnly?: boolean\n name?: string | Array<ts.Identifier | string>\n}) {\n if (name && !Array.isArray(name) && !asAlias) {\n console.warn(`When using name as string, asAlias should be true ${name}`)\n }\n\n if (!Array.isArray(name)) {\n const parsedName = name?.match(/^\\d/) ? `_${name?.slice(1)}` : name\n\n return factory.createExportDeclaration(\n undefined,\n isTypeOnly,\n asAlias && parsedName ? factory.createNamespaceExport(factory.createIdentifier(parsedName)) : undefined,\n factory.createStringLiteral(path),\n undefined,\n )\n }\n\n return factory.createExportDeclaration(\n undefined,\n isTypeOnly,\n factory.createNamedExports(\n name.map((propertyName) => {\n return factory.createExportSpecifier(false, undefined, typeof propertyName === 'string' ? factory.createIdentifier(propertyName) : propertyName)\n }),\n ),\n factory.createStringLiteral(path),\n undefined,\n )\n}\n\nexport const typescriptParser = defineParser({\n name: 'typescript',\n extNames: ['.ts', '.js'],\n install() {},\n async parse(file, options = { extname: '.ts' }) {\n const sourceParts: Array<string> = []\n for (const item of file.sources) {\n if (item.value) {\n sourceParts.push(item.value)\n }\n }\n const source = sourceParts.join('\\n\\n')\n\n const importNodes: Array<ts.ImportDeclaration> = []\n for (const item of file.imports) {\n const importPath = item.root ? getRelativePath(item.root, item.path) : item.path\n const hasExtname = !!path.extname(importPath)\n\n importNodes.push(\n createImport({\n name: item.name,\n path: options.extname && hasExtname ? `${trimExtName(importPath)}${options.extname}` : item.root ? trimExtName(importPath) : importPath,\n isTypeOnly: item.isTypeOnly,\n }),\n )\n }\n\n const exportNodes: Array<ts.ExportDeclaration> = []\n for (const item of file.exports) {\n const exportPath = item.path\n const hasExtname = !!path.extname(exportPath)\n\n exportNodes.push(\n createExport({\n name: item.name,\n path: options.extname && hasExtname ? `${trimExtName(item.path)}${options.extname}` : trimExtName(item.path),\n isTypeOnly: item.isTypeOnly,\n asAlias: item.asAlias,\n }),\n )\n }\n\n const parts = [file.banner, print(...importNodes, ...exportNodes), source, file.footer].filter((segment): segment is string => segment != null)\n return parts.join('\\n')\n },\n})\n"],"mappings":";;;;;;;;;AAMA,MAAM,EAAE,YAAYA
|
|
1
|
+
{"version":3,"file":"typescript.cjs","names":["ts","getRelativePath","path","defineParser","sourceParts: Array<string>","importNodes: Array<ts.ImportDeclaration>","trimExtName","exportNodes: Array<ts.ExportDeclaration>"],"sources":["../../src/parsers/typescriptParser.ts"],"sourcesContent":["import path from 'node:path'\nimport ts from 'typescript'\nimport { getRelativePath } from '../utils/getRelativePath.ts'\nimport { trimExtName } from '../utils/trimExtName.ts'\nimport { defineParser } from './defineParser.ts'\n\nconst { factory } = ts\n\n/**\n * Validates TypeScript AST nodes before printing to catch invalid nodes early.\n * Throws an error if any node has SyntaxKind.Unknown which would cause the TypeScript printer to crash.\n */\nexport function validateNodes(...nodes: ts.Node[]): void {\n for (const node of nodes) {\n if (!node) {\n throw new Error('Attempted to print undefined or null TypeScript node')\n }\n if (node.kind === ts.SyntaxKind.Unknown) {\n throw new Error(\n 'Invalid TypeScript AST node detected with SyntaxKind.Unknown. ' +\n `This typically indicates a schema pattern that couldn't be properly converted to TypeScript. ` +\n `Node: ${JSON.stringify(node, null, 2)}`,\n )\n }\n }\n}\n\n/**\n * Convert AST TypeScript/TSX nodes to a string based on the TypeScript printer.\n */\nexport function print(...elements: Array<ts.Node>): string {\n const sourceFile = ts.createSourceFile('print.tsx', '', ts.ScriptTarget.ES2022, true, ts.ScriptKind.TSX)\n\n const printer = ts.createPrinter({\n omitTrailingSemicolon: true,\n newLine: ts.NewLineKind.LineFeed,\n removeComments: false,\n noEmitHelpers: true,\n })\n\n for (const node of elements) {\n if (node.kind === ts.SyntaxKind.Unknown) {\n console.error('⚠️ Unknown node found:', node)\n }\n }\n\n const output = printer.printList(ts.ListFormat.MultiLine, factory.createNodeArray(elements.filter(Boolean)), sourceFile)\n\n return output.replace(/\\r\\n/g, '\\n')\n}\n\nexport function safePrint(...elements: Array<ts.Node>): string {\n validateNodes(...elements)\n\n return print(...elements)\n}\n\nexport function createImport({\n name,\n path,\n root,\n isTypeOnly = false,\n isNameSpace = false,\n}: {\n name: string | Array<string | { propertyName: string; name?: string }>\n path: string\n root?: string\n isTypeOnly?: boolean\n isNameSpace?: boolean\n}) {\n const resolvePath = root ? getRelativePath(root, path) : path\n\n // Namespace or default import\n if (!Array.isArray(name)) {\n if (isNameSpace) {\n return factory.createImportDeclaration(\n undefined,\n factory.createImportClause(isTypeOnly, undefined, factory.createNamespaceImport(factory.createIdentifier(name))),\n factory.createStringLiteral(resolvePath),\n undefined,\n )\n }\n\n return factory.createImportDeclaration(\n undefined,\n factory.createImportClause(isTypeOnly, factory.createIdentifier(name), undefined),\n factory.createStringLiteral(resolvePath),\n undefined,\n )\n }\n\n // Named imports\n const specifiers = name.map((item) => {\n if (typeof item === 'object') {\n const { propertyName, name: alias } = item\n return factory.createImportSpecifier(false, alias ? factory.createIdentifier(propertyName) : undefined, factory.createIdentifier(alias ?? propertyName))\n }\n\n return factory.createImportSpecifier(false, undefined, factory.createIdentifier(item))\n })\n\n return factory.createImportDeclaration(\n undefined,\n factory.createImportClause(isTypeOnly, undefined, factory.createNamedImports(specifiers)),\n factory.createStringLiteral(resolvePath),\n undefined,\n )\n}\n\nexport function createExport({\n path,\n asAlias,\n isTypeOnly = false,\n name,\n}: {\n path: string\n asAlias?: boolean\n isTypeOnly?: boolean\n name?: string | Array<ts.Identifier | string>\n}) {\n if (name && !Array.isArray(name) && !asAlias) {\n console.warn(`When using name as string, asAlias should be true ${name}`)\n }\n\n if (!Array.isArray(name)) {\n const parsedName = name?.match(/^\\d/) ? `_${name?.slice(1)}` : name\n\n return factory.createExportDeclaration(\n undefined,\n isTypeOnly,\n asAlias && parsedName ? factory.createNamespaceExport(factory.createIdentifier(parsedName)) : undefined,\n factory.createStringLiteral(path),\n undefined,\n )\n }\n\n return factory.createExportDeclaration(\n undefined,\n isTypeOnly,\n factory.createNamedExports(\n name.map((propertyName) => {\n return factory.createExportSpecifier(false, undefined, typeof propertyName === 'string' ? factory.createIdentifier(propertyName) : propertyName)\n }),\n ),\n factory.createStringLiteral(path),\n undefined,\n )\n}\n\nexport const typescriptParser = defineParser({\n name: 'typescript',\n extNames: ['.ts', '.js'],\n install() {},\n async parse(file, options = { extname: '.ts' }) {\n const sourceParts: Array<string> = []\n for (const item of file.sources) {\n if (item.value) {\n sourceParts.push(item.value)\n }\n }\n const source = sourceParts.join('\\n\\n')\n\n const importNodes: Array<ts.ImportDeclaration> = []\n for (const item of file.imports) {\n const importPath = item.root ? getRelativePath(item.root, item.path) : item.path\n const hasExtname = !!path.extname(importPath)\n\n importNodes.push(\n createImport({\n name: item.name,\n path: options.extname && hasExtname ? `${trimExtName(importPath)}${options.extname}` : item.root ? trimExtName(importPath) : importPath,\n isTypeOnly: item.isTypeOnly,\n }),\n )\n }\n\n const exportNodes: Array<ts.ExportDeclaration> = []\n for (const item of file.exports) {\n const exportPath = item.path\n const hasExtname = !!path.extname(exportPath)\n\n exportNodes.push(\n createExport({\n name: item.name,\n path: options.extname && hasExtname ? `${trimExtName(item.path)}${options.extname}` : trimExtName(item.path),\n isTypeOnly: item.isTypeOnly,\n asAlias: item.asAlias,\n }),\n )\n }\n\n const parts = [file.banner, print(...importNodes, ...exportNodes), source, file.footer].filter((segment): segment is string => segment != null)\n return parts.join('\\n')\n },\n})\n"],"mappings":";;;;;;;;;AAMA,MAAM,EAAE,YAAYA;;;;;AAMpB,SAAgB,cAAc,GAAG,OAAwB;AACvD,MAAK,MAAM,QAAQ,OAAO;AACxB,MAAI,CAAC,KACH,OAAM,IAAI,MAAM,uDAAuD;AAEzE,MAAI,KAAK,SAASA,mBAAG,WAAW,QAC9B,OAAM,IAAI,MACR,oKAEW,KAAK,UAAU,MAAM,MAAM,EAAE,GACzC;;;;;;AAQP,SAAgB,MAAM,GAAG,UAAkC;CACzD,MAAM,aAAaA,mBAAG,iBAAiB,aAAa,IAAIA,mBAAG,aAAa,QAAQ,MAAMA,mBAAG,WAAW,IAAI;CAExG,MAAM,UAAUA,mBAAG,cAAc;EAC/B,uBAAuB;EACvB,SAASA,mBAAG,YAAY;EACxB,gBAAgB;EAChB,eAAe;EAChB,CAAC;AAEF,MAAK,MAAM,QAAQ,SACjB,KAAI,KAAK,SAASA,mBAAG,WAAW,QAC9B,SAAQ,MAAM,0BAA0B,KAAK;AAMjD,QAFe,QAAQ,UAAUA,mBAAG,WAAW,WAAW,QAAQ,gBAAgB,SAAS,OAAO,QAAQ,CAAC,EAAE,WAAW,CAE1G,QAAQ,SAAS,KAAK;;AAGtC,SAAgB,UAAU,GAAG,UAAkC;AAC7D,eAAc,GAAG,SAAS;AAE1B,QAAO,MAAM,GAAG,SAAS;;AAG3B,SAAgB,aAAa,EAC3B,MACA,cACA,MACA,aAAa,OACb,cAAc,SAOb;CACD,MAAM,cAAc,OAAOC,wCAAgB,MAAMC,OAAK,GAAGA;AAGzD,KAAI,CAAC,MAAM,QAAQ,KAAK,EAAE;AACxB,MAAI,YACF,QAAO,QAAQ,wBACb,QACA,QAAQ,mBAAmB,YAAY,QAAW,QAAQ,sBAAsB,QAAQ,iBAAiB,KAAK,CAAC,CAAC,EAChH,QAAQ,oBAAoB,YAAY,EACxC,OACD;AAGH,SAAO,QAAQ,wBACb,QACA,QAAQ,mBAAmB,YAAY,QAAQ,iBAAiB,KAAK,EAAE,OAAU,EACjF,QAAQ,oBAAoB,YAAY,EACxC,OACD;;CAIH,MAAM,aAAa,KAAK,KAAK,SAAS;AACpC,MAAI,OAAO,SAAS,UAAU;GAC5B,MAAM,EAAE,cAAc,MAAM,UAAU;AACtC,UAAO,QAAQ,sBAAsB,OAAO,QAAQ,QAAQ,iBAAiB,aAAa,GAAG,QAAW,QAAQ,iBAAiB,6CAAS,aAAa,CAAC;;AAG1J,SAAO,QAAQ,sBAAsB,OAAO,QAAW,QAAQ,iBAAiB,KAAK,CAAC;GACtF;AAEF,QAAO,QAAQ,wBACb,QACA,QAAQ,mBAAmB,YAAY,QAAW,QAAQ,mBAAmB,WAAW,CAAC,EACzF,QAAQ,oBAAoB,YAAY,EACxC,OACD;;AAGH,SAAgB,aAAa,EAC3B,cACA,SACA,aAAa,OACb,QAMC;AACD,KAAI,QAAQ,CAAC,MAAM,QAAQ,KAAK,IAAI,CAAC,QACnC,SAAQ,KAAK,qDAAqD,OAAO;AAG3E,KAAI,CAAC,MAAM,QAAQ,KAAK,EAAE;EACxB,MAAM,0DAAa,KAAM,MAAM,MAAM,IAAG,gDAAI,KAAM,MAAM,EAAE,KAAK;AAE/D,SAAO,QAAQ,wBACb,QACA,YACA,WAAW,aAAa,QAAQ,sBAAsB,QAAQ,iBAAiB,WAAW,CAAC,GAAG,QAC9F,QAAQ,oBAAoBA,OAAK,EACjC,OACD;;AAGH,QAAO,QAAQ,wBACb,QACA,YACA,QAAQ,mBACN,KAAK,KAAK,iBAAiB;AACzB,SAAO,QAAQ,sBAAsB,OAAO,QAAW,OAAO,iBAAiB,WAAW,QAAQ,iBAAiB,aAAa,GAAG,aAAa;GAChJ,CACH,EACD,QAAQ,oBAAoBA,OAAK,EACjC,OACD;;AAGH,MAAa,mBAAmBC,kCAAa;CAC3C,MAAM;CACN,UAAU,CAAC,OAAO,MAAM;CACxB,UAAU;CACV,MAAM,MAAM,MAAM,UAAU,EAAE,SAAS,OAAO,EAAE;EAC9C,MAAMC,cAA6B,EAAE;AACrC,OAAK,MAAM,QAAQ,KAAK,QACtB,KAAI,KAAK,MACP,aAAY,KAAK,KAAK,MAAM;EAGhC,MAAM,SAAS,YAAY,KAAK,OAAO;EAEvC,MAAMC,cAA2C,EAAE;AACnD,OAAK,MAAM,QAAQ,KAAK,SAAS;GAC/B,MAAM,aAAa,KAAK,OAAOJ,wCAAgB,KAAK,MAAM,KAAK,KAAK,GAAG,KAAK;GAC5E,MAAM,aAAa,CAAC,CAACC,kBAAK,QAAQ,WAAW;AAE7C,eAAY,KACV,aAAa;IACX,MAAM,KAAK;IACX,MAAM,QAAQ,WAAW,aAAa,GAAGI,gCAAY,WAAW,GAAG,QAAQ,YAAY,KAAK,OAAOA,gCAAY,WAAW,GAAG;IAC7H,YAAY,KAAK;IAClB,CAAC,CACH;;EAGH,MAAMC,cAA2C,EAAE;AACnD,OAAK,MAAM,QAAQ,KAAK,SAAS;GAC/B,MAAM,aAAa,KAAK;GACxB,MAAM,aAAa,CAAC,CAACL,kBAAK,QAAQ,WAAW;AAE7C,eAAY,KACV,aAAa;IACX,MAAM,KAAK;IACX,MAAM,QAAQ,WAAW,aAAa,GAAGI,gCAAY,KAAK,KAAK,GAAG,QAAQ,YAAYA,gCAAY,KAAK,KAAK;IAC5G,YAAY,KAAK;IACjB,SAAS,KAAK;IACf,CAAC,CACH;;AAIH,SADc;GAAC,KAAK;GAAQ,MAAM,GAAG,aAAa,GAAG,YAAY;GAAE;GAAQ,KAAK;GAAO,CAAC,QAAQ,YAA+B,WAAW,KAAK,CAClI,KAAK,KAAK;;CAE1B,CAAC"}
|
|
@@ -2,10 +2,17 @@ import { u as Parser } from "../Fabric-BXYWK9V4.cjs";
|
|
|
2
2
|
import ts from "typescript";
|
|
3
3
|
|
|
4
4
|
//#region src/parsers/typescriptParser.d.ts
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Validates TypeScript AST nodes before printing to catch invalid nodes early.
|
|
8
|
+
* Throws an error if any node has SyntaxKind.Unknown which would cause the TypeScript printer to crash.
|
|
9
|
+
*/
|
|
10
|
+
declare function validateNodes(...nodes: ts.Node[]): void;
|
|
5
11
|
/**
|
|
6
12
|
* Convert AST TypeScript/TSX nodes to a string based on the TypeScript printer.
|
|
7
13
|
*/
|
|
8
14
|
declare function print(...elements: Array<ts.Node>): string;
|
|
15
|
+
declare function safePrint(...elements: Array<ts.Node>): string;
|
|
9
16
|
declare function createImport({
|
|
10
17
|
name,
|
|
11
18
|
path,
|
|
@@ -35,5 +42,5 @@ declare function createExport({
|
|
|
35
42
|
}): ts.ExportDeclaration;
|
|
36
43
|
declare const typescriptParser: Parser<[], any>;
|
|
37
44
|
//#endregion
|
|
38
|
-
export { createExport, createImport, print, typescriptParser };
|
|
45
|
+
export { createExport, createImport, print, safePrint, typescriptParser, validateNodes };
|
|
39
46
|
//# sourceMappingURL=typescript.d.cts.map
|
|
@@ -2,10 +2,17 @@ import { u as Parser } from "../Fabric-CDFwTDyP.js";
|
|
|
2
2
|
import ts from "typescript";
|
|
3
3
|
|
|
4
4
|
//#region src/parsers/typescriptParser.d.ts
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Validates TypeScript AST nodes before printing to catch invalid nodes early.
|
|
8
|
+
* Throws an error if any node has SyntaxKind.Unknown which would cause the TypeScript printer to crash.
|
|
9
|
+
*/
|
|
10
|
+
declare function validateNodes(...nodes: ts.Node[]): void;
|
|
5
11
|
/**
|
|
6
12
|
* Convert AST TypeScript/TSX nodes to a string based on the TypeScript printer.
|
|
7
13
|
*/
|
|
8
14
|
declare function print(...elements: Array<ts.Node>): string;
|
|
15
|
+
declare function safePrint(...elements: Array<ts.Node>): string;
|
|
9
16
|
declare function createImport({
|
|
10
17
|
name,
|
|
11
18
|
path,
|
|
@@ -35,5 +42,5 @@ declare function createExport({
|
|
|
35
42
|
}): ts.ExportDeclaration;
|
|
36
43
|
declare const typescriptParser: Parser<[], any>;
|
|
37
44
|
//#endregion
|
|
38
|
-
export { createExport, createImport, print, typescriptParser };
|
|
45
|
+
export { createExport, createImport, print, safePrint, typescriptParser, validateNodes };
|
|
39
46
|
//# sourceMappingURL=typescript.d.ts.map
|
|
@@ -7,6 +7,16 @@ import ts from "typescript";
|
|
|
7
7
|
//#region src/parsers/typescriptParser.ts
|
|
8
8
|
const { factory } = ts;
|
|
9
9
|
/**
|
|
10
|
+
* Validates TypeScript AST nodes before printing to catch invalid nodes early.
|
|
11
|
+
* Throws an error if any node has SyntaxKind.Unknown which would cause the TypeScript printer to crash.
|
|
12
|
+
*/
|
|
13
|
+
function validateNodes(...nodes) {
|
|
14
|
+
for (const node of nodes) {
|
|
15
|
+
if (!node) throw new Error("Attempted to print undefined or null TypeScript node");
|
|
16
|
+
if (node.kind === ts.SyntaxKind.Unknown) throw new Error(`Invalid TypeScript AST node detected with SyntaxKind.Unknown. This typically indicates a schema pattern that couldn't be properly converted to TypeScript. Node: ${JSON.stringify(node, null, 2)}`);
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
10
20
|
* Convert AST TypeScript/TSX nodes to a string based on the TypeScript printer.
|
|
11
21
|
*/
|
|
12
22
|
function print(...elements) {
|
|
@@ -20,6 +30,10 @@ function print(...elements) {
|
|
|
20
30
|
for (const node of elements) if (node.kind === ts.SyntaxKind.Unknown) console.error("⚠️ Unknown node found:", node);
|
|
21
31
|
return printer.printList(ts.ListFormat.MultiLine, factory.createNodeArray(elements.filter(Boolean)), sourceFile).replace(/\r\n/g, "\n");
|
|
22
32
|
}
|
|
33
|
+
function safePrint(...elements) {
|
|
34
|
+
validateNodes(...elements);
|
|
35
|
+
return print(...elements);
|
|
36
|
+
}
|
|
23
37
|
function createImport({ name, path: path$1, root, isTypeOnly = false, isNameSpace = false }) {
|
|
24
38
|
const resolvePath = root ? getRelativePath(root, path$1) : path$1;
|
|
25
39
|
if (!Array.isArray(name)) {
|
|
@@ -84,5 +98,5 @@ const typescriptParser = defineParser({
|
|
|
84
98
|
});
|
|
85
99
|
|
|
86
100
|
//#endregion
|
|
87
|
-
export { createExport, createImport, print, typescriptParser };
|
|
101
|
+
export { createExport, createImport, print, safePrint, typescriptParser, validateNodes };
|
|
88
102
|
//# sourceMappingURL=typescript.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"typescript.js","names":["path","sourceParts: Array<string>","importNodes: Array<ts.ImportDeclaration>","exportNodes: Array<ts.ExportDeclaration>"],"sources":["../../src/parsers/typescriptParser.ts"],"sourcesContent":["import path from 'node:path'\nimport ts from 'typescript'\nimport { getRelativePath } from '../utils/getRelativePath.ts'\nimport { trimExtName } from '../utils/trimExtName.ts'\nimport { defineParser } from './defineParser.ts'\n\nconst { factory } = ts\n\n/**\n * Convert AST TypeScript/TSX nodes to a string based on the TypeScript printer.\n */\nexport function print(...elements: Array<ts.Node>): string {\n const sourceFile = ts.createSourceFile('print.tsx', '', ts.ScriptTarget.ES2022, true, ts.ScriptKind.TSX)\n\n const printer = ts.createPrinter({\n omitTrailingSemicolon: true,\n newLine: ts.NewLineKind.LineFeed,\n removeComments: false,\n noEmitHelpers: true,\n })\n\n for (const node of elements) {\n if (node.kind === ts.SyntaxKind.Unknown) {\n console.error('⚠️ Unknown node found:', node)\n }\n }\n\n const output = printer.printList(ts.ListFormat.MultiLine, factory.createNodeArray(elements.filter(Boolean)), sourceFile)\n\n return output.replace(/\\r\\n/g, '\\n')\n}\n\nexport function createImport({\n name,\n path,\n root,\n isTypeOnly = false,\n isNameSpace = false,\n}: {\n name: string | Array<string | { propertyName: string; name?: string }>\n path: string\n root?: string\n isTypeOnly?: boolean\n isNameSpace?: boolean\n}) {\n const resolvePath = root ? getRelativePath(root, path) : path\n\n // Namespace or default import\n if (!Array.isArray(name)) {\n if (isNameSpace) {\n return factory.createImportDeclaration(\n undefined,\n factory.createImportClause(isTypeOnly, undefined, factory.createNamespaceImport(factory.createIdentifier(name))),\n factory.createStringLiteral(resolvePath),\n undefined,\n )\n }\n\n return factory.createImportDeclaration(\n undefined,\n factory.createImportClause(isTypeOnly, factory.createIdentifier(name), undefined),\n factory.createStringLiteral(resolvePath),\n undefined,\n )\n }\n\n // Named imports\n const specifiers = name.map((item) => {\n if (typeof item === 'object') {\n const { propertyName, name: alias } = item\n return factory.createImportSpecifier(false, alias ? factory.createIdentifier(propertyName) : undefined, factory.createIdentifier(alias ?? propertyName))\n }\n\n return factory.createImportSpecifier(false, undefined, factory.createIdentifier(item))\n })\n\n return factory.createImportDeclaration(\n undefined,\n factory.createImportClause(isTypeOnly, undefined, factory.createNamedImports(specifiers)),\n factory.createStringLiteral(resolvePath),\n undefined,\n )\n}\n\nexport function createExport({\n path,\n asAlias,\n isTypeOnly = false,\n name,\n}: {\n path: string\n asAlias?: boolean\n isTypeOnly?: boolean\n name?: string | Array<ts.Identifier | string>\n}) {\n if (name && !Array.isArray(name) && !asAlias) {\n console.warn(`When using name as string, asAlias should be true ${name}`)\n }\n\n if (!Array.isArray(name)) {\n const parsedName = name?.match(/^\\d/) ? `_${name?.slice(1)}` : name\n\n return factory.createExportDeclaration(\n undefined,\n isTypeOnly,\n asAlias && parsedName ? factory.createNamespaceExport(factory.createIdentifier(parsedName)) : undefined,\n factory.createStringLiteral(path),\n undefined,\n )\n }\n\n return factory.createExportDeclaration(\n undefined,\n isTypeOnly,\n factory.createNamedExports(\n name.map((propertyName) => {\n return factory.createExportSpecifier(false, undefined, typeof propertyName === 'string' ? factory.createIdentifier(propertyName) : propertyName)\n }),\n ),\n factory.createStringLiteral(path),\n undefined,\n )\n}\n\nexport const typescriptParser = defineParser({\n name: 'typescript',\n extNames: ['.ts', '.js'],\n install() {},\n async parse(file, options = { extname: '.ts' }) {\n const sourceParts: Array<string> = []\n for (const item of file.sources) {\n if (item.value) {\n sourceParts.push(item.value)\n }\n }\n const source = sourceParts.join('\\n\\n')\n\n const importNodes: Array<ts.ImportDeclaration> = []\n for (const item of file.imports) {\n const importPath = item.root ? getRelativePath(item.root, item.path) : item.path\n const hasExtname = !!path.extname(importPath)\n\n importNodes.push(\n createImport({\n name: item.name,\n path: options.extname && hasExtname ? `${trimExtName(importPath)}${options.extname}` : item.root ? trimExtName(importPath) : importPath,\n isTypeOnly: item.isTypeOnly,\n }),\n )\n }\n\n const exportNodes: Array<ts.ExportDeclaration> = []\n for (const item of file.exports) {\n const exportPath = item.path\n const hasExtname = !!path.extname(exportPath)\n\n exportNodes.push(\n createExport({\n name: item.name,\n path: options.extname && hasExtname ? `${trimExtName(item.path)}${options.extname}` : trimExtName(item.path),\n isTypeOnly: item.isTypeOnly,\n asAlias: item.asAlias,\n }),\n )\n }\n\n const parts = [file.banner, print(...importNodes, ...exportNodes), source, file.footer].filter((segment): segment is string => segment != null)\n return parts.join('\\n')\n },\n})\n"],"mappings":";;;;;;;AAMA,MAAM,EAAE,YAAY
|
|
1
|
+
{"version":3,"file":"typescript.js","names":["path","sourceParts: Array<string>","importNodes: Array<ts.ImportDeclaration>","exportNodes: Array<ts.ExportDeclaration>"],"sources":["../../src/parsers/typescriptParser.ts"],"sourcesContent":["import path from 'node:path'\nimport ts from 'typescript'\nimport { getRelativePath } from '../utils/getRelativePath.ts'\nimport { trimExtName } from '../utils/trimExtName.ts'\nimport { defineParser } from './defineParser.ts'\n\nconst { factory } = ts\n\n/**\n * Validates TypeScript AST nodes before printing to catch invalid nodes early.\n * Throws an error if any node has SyntaxKind.Unknown which would cause the TypeScript printer to crash.\n */\nexport function validateNodes(...nodes: ts.Node[]): void {\n for (const node of nodes) {\n if (!node) {\n throw new Error('Attempted to print undefined or null TypeScript node')\n }\n if (node.kind === ts.SyntaxKind.Unknown) {\n throw new Error(\n 'Invalid TypeScript AST node detected with SyntaxKind.Unknown. ' +\n `This typically indicates a schema pattern that couldn't be properly converted to TypeScript. ` +\n `Node: ${JSON.stringify(node, null, 2)}`,\n )\n }\n }\n}\n\n/**\n * Convert AST TypeScript/TSX nodes to a string based on the TypeScript printer.\n */\nexport function print(...elements: Array<ts.Node>): string {\n const sourceFile = ts.createSourceFile('print.tsx', '', ts.ScriptTarget.ES2022, true, ts.ScriptKind.TSX)\n\n const printer = ts.createPrinter({\n omitTrailingSemicolon: true,\n newLine: ts.NewLineKind.LineFeed,\n removeComments: false,\n noEmitHelpers: true,\n })\n\n for (const node of elements) {\n if (node.kind === ts.SyntaxKind.Unknown) {\n console.error('⚠️ Unknown node found:', node)\n }\n }\n\n const output = printer.printList(ts.ListFormat.MultiLine, factory.createNodeArray(elements.filter(Boolean)), sourceFile)\n\n return output.replace(/\\r\\n/g, '\\n')\n}\n\nexport function safePrint(...elements: Array<ts.Node>): string {\n validateNodes(...elements)\n\n return print(...elements)\n}\n\nexport function createImport({\n name,\n path,\n root,\n isTypeOnly = false,\n isNameSpace = false,\n}: {\n name: string | Array<string | { propertyName: string; name?: string }>\n path: string\n root?: string\n isTypeOnly?: boolean\n isNameSpace?: boolean\n}) {\n const resolvePath = root ? getRelativePath(root, path) : path\n\n // Namespace or default import\n if (!Array.isArray(name)) {\n if (isNameSpace) {\n return factory.createImportDeclaration(\n undefined,\n factory.createImportClause(isTypeOnly, undefined, factory.createNamespaceImport(factory.createIdentifier(name))),\n factory.createStringLiteral(resolvePath),\n undefined,\n )\n }\n\n return factory.createImportDeclaration(\n undefined,\n factory.createImportClause(isTypeOnly, factory.createIdentifier(name), undefined),\n factory.createStringLiteral(resolvePath),\n undefined,\n )\n }\n\n // Named imports\n const specifiers = name.map((item) => {\n if (typeof item === 'object') {\n const { propertyName, name: alias } = item\n return factory.createImportSpecifier(false, alias ? factory.createIdentifier(propertyName) : undefined, factory.createIdentifier(alias ?? propertyName))\n }\n\n return factory.createImportSpecifier(false, undefined, factory.createIdentifier(item))\n })\n\n return factory.createImportDeclaration(\n undefined,\n factory.createImportClause(isTypeOnly, undefined, factory.createNamedImports(specifiers)),\n factory.createStringLiteral(resolvePath),\n undefined,\n )\n}\n\nexport function createExport({\n path,\n asAlias,\n isTypeOnly = false,\n name,\n}: {\n path: string\n asAlias?: boolean\n isTypeOnly?: boolean\n name?: string | Array<ts.Identifier | string>\n}) {\n if (name && !Array.isArray(name) && !asAlias) {\n console.warn(`When using name as string, asAlias should be true ${name}`)\n }\n\n if (!Array.isArray(name)) {\n const parsedName = name?.match(/^\\d/) ? `_${name?.slice(1)}` : name\n\n return factory.createExportDeclaration(\n undefined,\n isTypeOnly,\n asAlias && parsedName ? factory.createNamespaceExport(factory.createIdentifier(parsedName)) : undefined,\n factory.createStringLiteral(path),\n undefined,\n )\n }\n\n return factory.createExportDeclaration(\n undefined,\n isTypeOnly,\n factory.createNamedExports(\n name.map((propertyName) => {\n return factory.createExportSpecifier(false, undefined, typeof propertyName === 'string' ? factory.createIdentifier(propertyName) : propertyName)\n }),\n ),\n factory.createStringLiteral(path),\n undefined,\n )\n}\n\nexport const typescriptParser = defineParser({\n name: 'typescript',\n extNames: ['.ts', '.js'],\n install() {},\n async parse(file, options = { extname: '.ts' }) {\n const sourceParts: Array<string> = []\n for (const item of file.sources) {\n if (item.value) {\n sourceParts.push(item.value)\n }\n }\n const source = sourceParts.join('\\n\\n')\n\n const importNodes: Array<ts.ImportDeclaration> = []\n for (const item of file.imports) {\n const importPath = item.root ? getRelativePath(item.root, item.path) : item.path\n const hasExtname = !!path.extname(importPath)\n\n importNodes.push(\n createImport({\n name: item.name,\n path: options.extname && hasExtname ? `${trimExtName(importPath)}${options.extname}` : item.root ? trimExtName(importPath) : importPath,\n isTypeOnly: item.isTypeOnly,\n }),\n )\n }\n\n const exportNodes: Array<ts.ExportDeclaration> = []\n for (const item of file.exports) {\n const exportPath = item.path\n const hasExtname = !!path.extname(exportPath)\n\n exportNodes.push(\n createExport({\n name: item.name,\n path: options.extname && hasExtname ? `${trimExtName(item.path)}${options.extname}` : trimExtName(item.path),\n isTypeOnly: item.isTypeOnly,\n asAlias: item.asAlias,\n }),\n )\n }\n\n const parts = [file.banner, print(...importNodes, ...exportNodes), source, file.footer].filter((segment): segment is string => segment != null)\n return parts.join('\\n')\n },\n})\n"],"mappings":";;;;;;;AAMA,MAAM,EAAE,YAAY;;;;;AAMpB,SAAgB,cAAc,GAAG,OAAwB;AACvD,MAAK,MAAM,QAAQ,OAAO;AACxB,MAAI,CAAC,KACH,OAAM,IAAI,MAAM,uDAAuD;AAEzE,MAAI,KAAK,SAAS,GAAG,WAAW,QAC9B,OAAM,IAAI,MACR,oKAEW,KAAK,UAAU,MAAM,MAAM,EAAE,GACzC;;;;;;AAQP,SAAgB,MAAM,GAAG,UAAkC;CACzD,MAAM,aAAa,GAAG,iBAAiB,aAAa,IAAI,GAAG,aAAa,QAAQ,MAAM,GAAG,WAAW,IAAI;CAExG,MAAM,UAAU,GAAG,cAAc;EAC/B,uBAAuB;EACvB,SAAS,GAAG,YAAY;EACxB,gBAAgB;EAChB,eAAe;EAChB,CAAC;AAEF,MAAK,MAAM,QAAQ,SACjB,KAAI,KAAK,SAAS,GAAG,WAAW,QAC9B,SAAQ,MAAM,0BAA0B,KAAK;AAMjD,QAFe,QAAQ,UAAU,GAAG,WAAW,WAAW,QAAQ,gBAAgB,SAAS,OAAO,QAAQ,CAAC,EAAE,WAAW,CAE1G,QAAQ,SAAS,KAAK;;AAGtC,SAAgB,UAAU,GAAG,UAAkC;AAC7D,eAAc,GAAG,SAAS;AAE1B,QAAO,MAAM,GAAG,SAAS;;AAG3B,SAAgB,aAAa,EAC3B,MACA,cACA,MACA,aAAa,OACb,cAAc,SAOb;CACD,MAAM,cAAc,OAAO,gBAAgB,MAAMA,OAAK,GAAGA;AAGzD,KAAI,CAAC,MAAM,QAAQ,KAAK,EAAE;AACxB,MAAI,YACF,QAAO,QAAQ,wBACb,QACA,QAAQ,mBAAmB,YAAY,QAAW,QAAQ,sBAAsB,QAAQ,iBAAiB,KAAK,CAAC,CAAC,EAChH,QAAQ,oBAAoB,YAAY,EACxC,OACD;AAGH,SAAO,QAAQ,wBACb,QACA,QAAQ,mBAAmB,YAAY,QAAQ,iBAAiB,KAAK,EAAE,OAAU,EACjF,QAAQ,oBAAoB,YAAY,EACxC,OACD;;CAIH,MAAM,aAAa,KAAK,KAAK,SAAS;AACpC,MAAI,OAAO,SAAS,UAAU;GAC5B,MAAM,EAAE,cAAc,MAAM,UAAU;AACtC,UAAO,QAAQ,sBAAsB,OAAO,QAAQ,QAAQ,iBAAiB,aAAa,GAAG,QAAW,QAAQ,iBAAiB,6CAAS,aAAa,CAAC;;AAG1J,SAAO,QAAQ,sBAAsB,OAAO,QAAW,QAAQ,iBAAiB,KAAK,CAAC;GACtF;AAEF,QAAO,QAAQ,wBACb,QACA,QAAQ,mBAAmB,YAAY,QAAW,QAAQ,mBAAmB,WAAW,CAAC,EACzF,QAAQ,oBAAoB,YAAY,EACxC,OACD;;AAGH,SAAgB,aAAa,EAC3B,cACA,SACA,aAAa,OACb,QAMC;AACD,KAAI,QAAQ,CAAC,MAAM,QAAQ,KAAK,IAAI,CAAC,QACnC,SAAQ,KAAK,qDAAqD,OAAO;AAG3E,KAAI,CAAC,MAAM,QAAQ,KAAK,EAAE;EACxB,MAAM,0DAAa,KAAM,MAAM,MAAM,IAAG,gDAAI,KAAM,MAAM,EAAE,KAAK;AAE/D,SAAO,QAAQ,wBACb,QACA,YACA,WAAW,aAAa,QAAQ,sBAAsB,QAAQ,iBAAiB,WAAW,CAAC,GAAG,QAC9F,QAAQ,oBAAoBA,OAAK,EACjC,OACD;;AAGH,QAAO,QAAQ,wBACb,QACA,YACA,QAAQ,mBACN,KAAK,KAAK,iBAAiB;AACzB,SAAO,QAAQ,sBAAsB,OAAO,QAAW,OAAO,iBAAiB,WAAW,QAAQ,iBAAiB,aAAa,GAAG,aAAa;GAChJ,CACH,EACD,QAAQ,oBAAoBA,OAAK,EACjC,OACD;;AAGH,MAAa,mBAAmB,aAAa;CAC3C,MAAM;CACN,UAAU,CAAC,OAAO,MAAM;CACxB,UAAU;CACV,MAAM,MAAM,MAAM,UAAU,EAAE,SAAS,OAAO,EAAE;EAC9C,MAAMC,cAA6B,EAAE;AACrC,OAAK,MAAM,QAAQ,KAAK,QACtB,KAAI,KAAK,MACP,aAAY,KAAK,KAAK,MAAM;EAGhC,MAAM,SAAS,YAAY,KAAK,OAAO;EAEvC,MAAMC,cAA2C,EAAE;AACnD,OAAK,MAAM,QAAQ,KAAK,SAAS;GAC/B,MAAM,aAAa,KAAK,OAAO,gBAAgB,KAAK,MAAM,KAAK,KAAK,GAAG,KAAK;GAC5E,MAAM,aAAa,CAAC,CAAC,KAAK,QAAQ,WAAW;AAE7C,eAAY,KACV,aAAa;IACX,MAAM,KAAK;IACX,MAAM,QAAQ,WAAW,aAAa,GAAG,YAAY,WAAW,GAAG,QAAQ,YAAY,KAAK,OAAO,YAAY,WAAW,GAAG;IAC7H,YAAY,KAAK;IAClB,CAAC,CACH;;EAGH,MAAMC,cAA2C,EAAE;AACnD,OAAK,MAAM,QAAQ,KAAK,SAAS;GAC/B,MAAM,aAAa,KAAK;GACxB,MAAM,aAAa,CAAC,CAAC,KAAK,QAAQ,WAAW;AAE7C,eAAY,KACV,aAAa;IACX,MAAM,KAAK;IACX,MAAM,QAAQ,WAAW,aAAa,GAAG,YAAY,KAAK,KAAK,GAAG,QAAQ,YAAY,YAAY,KAAK,KAAK;IAC5G,YAAY,KAAK;IACjB,SAAS,KAAK;IACf,CAAC,CACH;;AAIH,SADc;GAAC,KAAK;GAAQ,MAAM,GAAG,aAAa,GAAG,YAAY;GAAE;GAAQ,KAAK;GAAO,CAAC,QAAQ,YAA+B,WAAW,KAAK,CAClI,KAAK,KAAK;;CAE1B,CAAC"}
|
package/dist/plugins.cjs
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
const require_trimExtName = require('./trimExtName-DaBSwMN-.cjs');
|
|
2
|
-
const require_defineProperty = require('./defineProperty-
|
|
2
|
+
const require_defineProperty = require('./defineProperty-hUmuXj5B.cjs');
|
|
3
3
|
const require_getRelativePath = require('./getRelativePath-CLj7Ou6d.cjs');
|
|
4
4
|
let node_path = require("node:path");
|
|
5
5
|
node_path = require_trimExtName.__toESM(node_path);
|
package/dist/plugins.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { a as _classPrivateFieldInitSpec, n as _classPrivateFieldSet2, r as _classPrivateFieldGet2, s as createFile, t as _defineProperty } from "./defineProperty-
|
|
1
|
+
import { a as _classPrivateFieldInitSpec, n as _classPrivateFieldSet2, r as _classPrivateFieldGet2, s as createFile, t as _defineProperty } from "./defineProperty-CS9Uk_6Q.js";
|
|
2
2
|
import { t as getRelativePath } from "./getRelativePath-DayVrg5k.js";
|
|
3
3
|
import path, { relative, resolve } from "node:path";
|
|
4
4
|
import fs from "fs-extra";
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@kubb/fabric-core",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.9.1",
|
|
4
4
|
"description": "Core functionality for Kubb's plugin-based code generation system, providing the foundation for transforming OpenAPI specifications.",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"typescript",
|
|
@@ -86,7 +86,7 @@
|
|
|
86
86
|
"picocolors": "^1.1.1",
|
|
87
87
|
"serve-handler": "^6.1.6",
|
|
88
88
|
"typescript": "5.9.3",
|
|
89
|
-
"ws": "^8.
|
|
89
|
+
"ws": "^8.19.0"
|
|
90
90
|
},
|
|
91
91
|
"devDependencies": {
|
|
92
92
|
"@types/fs-extra": "^11.0.4",
|
|
@@ -6,6 +6,25 @@ import { defineParser } from './defineParser.ts'
|
|
|
6
6
|
|
|
7
7
|
const { factory } = ts
|
|
8
8
|
|
|
9
|
+
/**
|
|
10
|
+
* Validates TypeScript AST nodes before printing to catch invalid nodes early.
|
|
11
|
+
* Throws an error if any node has SyntaxKind.Unknown which would cause the TypeScript printer to crash.
|
|
12
|
+
*/
|
|
13
|
+
export function validateNodes(...nodes: ts.Node[]): void {
|
|
14
|
+
for (const node of nodes) {
|
|
15
|
+
if (!node) {
|
|
16
|
+
throw new Error('Attempted to print undefined or null TypeScript node')
|
|
17
|
+
}
|
|
18
|
+
if (node.kind === ts.SyntaxKind.Unknown) {
|
|
19
|
+
throw new Error(
|
|
20
|
+
'Invalid TypeScript AST node detected with SyntaxKind.Unknown. ' +
|
|
21
|
+
`This typically indicates a schema pattern that couldn't be properly converted to TypeScript. ` +
|
|
22
|
+
`Node: ${JSON.stringify(node, null, 2)}`,
|
|
23
|
+
)
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
|
|
9
28
|
/**
|
|
10
29
|
* Convert AST TypeScript/TSX nodes to a string based on the TypeScript printer.
|
|
11
30
|
*/
|
|
@@ -30,6 +49,12 @@ export function print(...elements: Array<ts.Node>): string {
|
|
|
30
49
|
return output.replace(/\r\n/g, '\n')
|
|
31
50
|
}
|
|
32
51
|
|
|
52
|
+
export function safePrint(...elements: Array<ts.Node>): string {
|
|
53
|
+
validateNodes(...elements)
|
|
54
|
+
|
|
55
|
+
return print(...elements)
|
|
56
|
+
}
|
|
57
|
+
|
|
33
58
|
export function createImport({
|
|
34
59
|
name,
|
|
35
60
|
path,
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"defineProperty-B-I0UxsW.js","names":["t","n","r","e","t","e","r","n","i","r","n","t","i","e","e","n","t","r","i","uniqueBy","prev: Array<KubbFile.Export>","prev: Array<KubbFile.Import>"],"sources":["../../../node_modules/.pnpm/remeda@2.32.2/node_modules/remeda/dist/utilityEvaluators-XvjlcSPu.js","../../../node_modules/.pnpm/remeda@2.32.2/node_modules/remeda/dist/pipe-BVeXEJGy.js","../../../node_modules/.pnpm/remeda@2.32.2/node_modules/remeda/dist/purryFromLazy-ackWJ5sq.js","../../../node_modules/.pnpm/remeda@2.32.2/node_modules/remeda/dist/uniqueBy-rVfNPL42.js","../src/createFile.ts"],"sourcesContent":["const e={done:!0,hasNext:!1},t={done:!1,hasNext:!1},n=()=>e,r=e=>({hasNext:!0,next:e,done:!1});export{n,r,t};\n//# sourceMappingURL=utilityEvaluators-XvjlcSPu.js.map","import{t as e}from\"./utilityEvaluators-XvjlcSPu.js\";function t(e,...t){let a=e,o=t.map(e=>`lazy`in e?r(e):void 0),s=0;for(;s<t.length;){if(o[s]===void 0||!i(a)){let e=t[s];a=e(a),s+=1;continue}let e=[];for(let n=s;n<t.length;n++){let t=o[n];if(t===void 0||(e.push(t),t.isSingle))break}let r=[];for(let t of a)if(n(t,r,e))break;let{isSingle:c}=e.at(-1);a=c?r[0]:r,s+=e.length}return a}function n(t,r,i){if(i.length===0)return r.push(t),!1;let a=t,o=e,s=!1;for(let[e,t]of i.entries()){let{index:c,items:l}=t;if(l.push(a),o=t(a,c,l),t.index+=1,o.hasNext){if(o.hasMany??!1){for(let t of o.next)if(n(t,r,i.slice(e+1)))return!0;return s}a=o.next}if(!o.hasNext)break;o.done&&(s=!0)}return o.hasNext&&r.push(a),s}function r(e){let{lazy:t,lazyArgs:n}=e,r=t(...n);return Object.assign(r,{isSingle:t.single??!1,index:0,items:[]})}function i(e){return typeof e==`string`||typeof e==`object`&&!!e&&Symbol.iterator in e}export{t};\n//# sourceMappingURL=pipe-BVeXEJGy.js.map","import{t as e}from\"./pipe-BVeXEJGy.js\";function t(t,n){let r=n.length-t.length;if(r===1){let[r,...i]=n;return e(r,{lazy:t,lazyArgs:i})}if(r===0){let r={lazy:t,lazyArgs:n};return Object.assign(t=>e(t,r),r)}throw Error(`Wrong number of arguments`)}export{t};\n//# sourceMappingURL=purryFromLazy-ackWJ5sq.js.map","import{t as e}from\"./utilityEvaluators-XvjlcSPu.js\";import{t}from\"./purryFromLazy-ackWJ5sq.js\";function n(...e){return t(r,e)}function r(t){let n=t,r=new Set;return(t,i,a)=>{let o=n(t,i,a);return r.has(o)?e:(r.add(o),{done:!1,hasNext:!0,next:t})}}export{n as t};\n//# sourceMappingURL=uniqueBy-rVfNPL42.js.map","import { createHash } from 'node:crypto'\nimport path from 'node:path'\nimport { orderBy } from 'natural-orderby'\nimport { uniqueBy } from 'remeda'\nimport type * as KubbFile from './KubbFile.ts'\nimport { trimExtName } from './utils/trimExtName.ts'\n\nexport function combineSources(sources: Array<KubbFile.Source>): Array<KubbFile.Source> {\n return uniqueBy(sources, (obj) => {\n // For named sources, deduplicate by name, isExportable, and isTypeOnly\n // For unnamed sources, include the value to avoid deduplicating different code blocks\n // If both name and value are undefined, use an empty string as the unique identifier\n const uniqueId = obj.name ?? obj.value ?? ''\n const isExportable = obj.isExportable ?? false\n const isTypeOnly = obj.isTypeOnly ?? false\n return `${uniqueId}:${isExportable}:${isTypeOnly}`\n })\n}\n\nexport function combineExports(exports: Array<KubbFile.Export>): Array<KubbFile.Export> {\n const sorted = orderBy(exports, [\n (v) => !!Array.isArray(v.name),\n (v) => !v.isTypeOnly,\n (v) => v.path,\n (v) => !!v.name,\n (v) => (Array.isArray(v.name) ? orderBy(v.name) : v.name),\n ])\n\n const prev: Array<KubbFile.Export> = []\n // Map to track items by path for O(1) lookup\n const pathMap = new Map<string, KubbFile.Export>()\n // Map to track unique items by path+name+isTypeOnly+asAlias\n const uniqueMap = new Map<string, KubbFile.Export>()\n // Map to track items by path+name where isTypeOnly=true (for type-only check)\n const pathNameTypeTrueMap = new Map<string, KubbFile.Export>()\n\n for (const curr of sorted) {\n const name = curr.name\n const pathKey = curr.path\n const prevByPath = pathMap.get(pathKey)\n\n // Create unique key for path+name+isTypeOnly\n const nameKey = Array.isArray(name) ? JSON.stringify(name) : name || ''\n const pathNameTypeKey = `${pathKey}:${nameKey}:${curr.isTypeOnly}`\n // Check if there's already an item with the same path+name but with isTypeOnly=true\n const pathNameKey = `${pathKey}:${nameKey}`\n const prevByPathAndIsTypeOnly = pathNameTypeTrueMap.get(pathNameKey)\n\n if (prevByPathAndIsTypeOnly) {\n // we already have an export that has the same path and name but uses `isTypeOnly` (export type ...)\n continue\n }\n\n // Create unique key for path+name+isTypeOnly+asAlias\n const uniqueKey = `${pathNameTypeKey}:${curr.asAlias || ''}`\n const uniquePrev = uniqueMap.get(uniqueKey)\n\n // we already have an item that was unique enough or name field is empty or prev asAlias is set but current has no changes\n if (uniquePrev || (Array.isArray(name) && !name.length) || (prevByPath?.asAlias && !curr.asAlias)) {\n continue\n }\n\n if (!prevByPath) {\n const newItem = {\n ...curr,\n name: Array.isArray(name) ? [...new Set(name)] : name,\n }\n prev.push(newItem)\n pathMap.set(pathKey, newItem)\n uniqueMap.set(uniqueKey, newItem)\n // Track items with isTypeOnly=true for the type-only check\n if (newItem.isTypeOnly) {\n pathNameTypeTrueMap.set(pathNameKey, newItem)\n }\n continue\n }\n\n // merge all names when prev and current both have the same isTypeOnly set\n if (prevByPath && Array.isArray(prevByPath.name) && Array.isArray(curr.name) && prevByPath.isTypeOnly === curr.isTypeOnly) {\n prevByPath.name = [...new Set([...prevByPath.name, ...curr.name])]\n continue\n }\n\n prev.push(curr)\n uniqueMap.set(uniqueKey, curr)\n // Track items with isTypeOnly=true for the type-only check\n if (curr.isTypeOnly) {\n pathNameTypeTrueMap.set(pathNameKey, curr)\n }\n }\n\n return prev\n}\n\nexport function combineImports(imports: Array<KubbFile.Import>, exports: Array<KubbFile.Export>, source?: string): Array<KubbFile.Import> {\n const exportedNameLookup = new Set<string>()\n for (const item of exports) {\n const { name } = item\n if (!name) {\n continue\n }\n\n if (Array.isArray(name)) {\n for (const value of name) {\n if (value) {\n exportedNameLookup.add(value)\n }\n }\n continue\n }\n\n exportedNameLookup.add(name)\n }\n\n const usageCache = new Map<string, boolean>()\n const hasImportInSource = (importName: string): boolean => {\n if (!source) {\n return true\n }\n\n const cached = usageCache.get(importName)\n if (cached !== undefined) {\n return cached\n }\n\n const isUsed = source.includes(importName) || exportedNameLookup.has(importName)\n usageCache.set(importName, isUsed)\n\n return isUsed\n }\n\n const sorted = orderBy(imports, [\n (v) => !!Array.isArray(v.name),\n (v) => !v.isTypeOnly,\n (v) => v.path,\n (v) => !!v.name,\n (v) => (Array.isArray(v.name) ? orderBy(v.name) : v.name),\n ])\n\n const prev: Array<KubbFile.Import> = []\n // Map to track items by path+isTypeOnly for O(1) lookup\n const pathTypeMap = new Map<string, KubbFile.Import>()\n // Map to track unique items by path+name+isTypeOnly\n const uniqueMap = new Map<string, KubbFile.Import>()\n // Map to track items by path+name where isTypeOnly=true (for type-only check)\n const pathNameTypeTrueMap = new Map<string, KubbFile.Import>()\n\n for (const curr of sorted) {\n let name = Array.isArray(curr.name) ? [...new Set(curr.name)] : curr.name\n\n if (curr.path === curr.root) {\n // root and path are the same file, remove the \"./\" import\n continue\n }\n\n // merge all names and check if the importName is being used in the generated source and if not filter those imports out\n if (Array.isArray(name)) {\n name = name.filter((item) => (typeof item === 'string' ? hasImportInSource(item) : hasImportInSource(item.propertyName)))\n }\n\n const pathTypeKey = `${curr.path}:${curr.isTypeOnly}`\n const prevByPath = pathTypeMap.get(pathTypeKey)\n\n // Create key for name comparison\n const nameKey = Array.isArray(name) ? JSON.stringify(name) : name || ''\n const pathNameTypeKey = `${curr.path}:${nameKey}:${curr.isTypeOnly}`\n const uniquePrev = uniqueMap.get(pathNameTypeKey)\n // Check if there's already an item with the same path+name but with isTypeOnly=true\n const pathNameKey = `${curr.path}:${nameKey}`\n const prevByPathNameAndIsTypeOnly = pathNameTypeTrueMap.get(pathNameKey)\n\n if (prevByPathNameAndIsTypeOnly) {\n // we already have an import that has the same path and name but uses `isTypeOnly` (import type ...)\n continue\n }\n\n // already unique enough or name is empty\n if (uniquePrev || (Array.isArray(name) && !name.length)) {\n continue\n }\n\n // new item, append name\n if (!prevByPath) {\n const newItem = {\n ...curr,\n name,\n }\n prev.push(newItem)\n pathTypeMap.set(pathTypeKey, newItem)\n uniqueMap.set(pathNameTypeKey, newItem)\n // Track items with isTypeOnly=true for the type-only check\n if (newItem.isTypeOnly) {\n pathNameTypeTrueMap.set(pathNameKey, newItem)\n }\n continue\n }\n\n // merge all names when prev and current both have the same isTypeOnly set\n if (prevByPath && Array.isArray(prevByPath.name) && Array.isArray(name) && prevByPath.isTypeOnly === curr.isTypeOnly) {\n prevByPath.name = [...new Set([...prevByPath.name, ...name])]\n continue\n }\n\n // no import was found in the source, ignore import\n if (!Array.isArray(name) && name && !hasImportInSource(name)) {\n continue\n }\n\n prev.push(curr)\n uniqueMap.set(pathNameTypeKey, curr)\n // Track items with isTypeOnly=true for the type-only check\n if (curr.isTypeOnly) {\n pathNameTypeTrueMap.set(pathNameKey, curr)\n }\n }\n\n return prev\n}\n\n/**\n * Helper to create a file with name and id set\n */\nexport function createFile<TMeta extends object = object>(file: KubbFile.File<TMeta>): KubbFile.ResolvedFile<TMeta> {\n const extname = path.extname(file.baseName) as KubbFile.Extname\n if (!extname) {\n throw new Error(`No extname found for ${file.baseName}`)\n }\n\n const source = file.sources.map((item) => item.value).join('\\n\\n')\n const exports = file.exports?.length ? combineExports(file.exports) : []\n const imports = file.imports?.length && source ? combineImports(file.imports, exports, source) : []\n const sources = file.sources?.length ? combineSources(file.sources) : []\n\n return {\n ...file,\n id: createHash('sha256').update(file.path).digest('hex'),\n name: trimExtName(file.baseName),\n extname,\n imports: imports,\n exports: exports,\n sources: sources,\n meta: file.meta || ({} as TMeta),\n }\n}\n"],"x_google_ignoreList":[0,1,2,3],"mappings":";;;;;;AAAA,MAAM,IAAE;CAAC,MAAK,CAAC;CAAE,SAAQ,CAAC;CAAE,EAACA,MAAE;CAAC,MAAK,CAAC;CAAE,SAAQ,CAAC;CAAE,EAACC,YAAM,GAAEC,OAAE,SAAI;CAAC,SAAQ,CAAC;CAAE,MAAKC;CAAE,MAAK,CAAC;CAAE;;;;ACAzC,SAASC,IAAE,KAAE,GAAGA,KAAE;CAAC,IAAI,IAAEC,KAAE,IAAED,IAAE,KAAI,QAAG,UAASC,MAAEC,IAAED,IAAE,GAAC,KAAK,EAAE,EAAC,IAAE;AAAE,QAAK,IAAED,IAAE,SAAQ;AAAC,MAAG,EAAE,OAAK,KAAK,KAAG,CAAC,EAAE,EAAE,EAAC;GAAC,IAAIC,MAAED,IAAE;AAAG,OAAEC,IAAE,EAAE,EAAC,KAAG;AAAE;;EAAS,IAAIA,MAAE,EAAE;AAAC,OAAI,IAAIE,MAAE,GAAEA,MAAEH,IAAE,QAAO,OAAI;GAAC,IAAIA,MAAE,EAAEG;AAAG,OAAGH,QAAI,KAAK,MAAIC,IAAE,KAAKD,IAAE,EAACA,IAAE,UAAU;;EAAM,IAAIE,MAAE,EAAE;AAAC,OAAI,IAAIF,OAAK,EAAE,KAAGG,IAAEH,KAAEE,KAAED,IAAE,CAAC;EAAM,IAAG,EAAC,UAAS,MAAGA,IAAE,GAAG,GAAG;AAAC,MAAE,IAAEC,IAAE,KAAGA,KAAE,KAAGD,IAAE;;AAAO,QAAO;;AAAE,SAASE,IAAE,KAAE,KAAE,KAAE;AAAC,KAAGC,IAAE,WAAS,EAAE,QAAOF,IAAE,KAAKF,IAAE,EAAC,CAAC;CAAE,IAAI,IAAEA,KAAE,IAAEC,KAAE,IAAE,CAAC;AAAE,MAAI,IAAG,CAACA,KAAED,QAAKI,IAAE,SAAS,EAAC;EAAC,IAAG,EAAC,OAAM,GAAE,OAAM,MAAGJ;AAAE,MAAG,EAAE,KAAK,EAAE,EAAC,IAAEA,IAAE,GAAE,GAAE,EAAE,EAAC,IAAE,SAAO,GAAE,EAAE,SAAQ;;AAAC,qBAAG,EAAE,0DAAS,CAAC,GAAE;AAAC,SAAI,IAAIA,OAAK,EAAE,KAAK,KAAGG,IAAEH,KAAEE,KAAEE,IAAE,MAAMH,MAAE,EAAE,CAAC,CAAC,QAAM,CAAC;AAAE,WAAO;;AAAE,OAAE,EAAE;;AAAK,MAAG,CAAC,EAAE,QAAQ;AAAM,IAAE,SAAO,IAAE,CAAC;;AAAG,QAAO,EAAE,WAASC,IAAE,KAAK,EAAE,EAAC;;AAAE,SAASA,IAAE,KAAE;;CAAC,IAAG,EAAC,MAAKF,KAAE,UAASG,QAAGF,KAAEC,MAAEF,IAAE,GAAGG,IAAE;AAAC,QAAO,OAAO,OAAOD,KAAE;EAAC,uBAASF,IAAE,uDAAQ,CAAC;EAAE,OAAM;EAAE,OAAM,EAAE;EAAC,CAAC;;AAAC,SAAS,EAAE,KAAE;AAAC,QAAO,OAAOC,OAAG,YAAU,OAAOA,OAAG,YAAU,CAAC,CAACA,OAAG,OAAO,YAAYA;;;;;ACAj2B,SAAS,EAAE,KAAE,KAAE;CAAC,IAAII,MAAEC,IAAE,SAAOC,IAAE;AAAO,KAAGF,QAAI,GAAE;EAAC,IAAG,CAACA,KAAE,GAAGG,OAAGF;AAAE,SAAOG,IAAEJ,KAAE;GAAC,MAAKE;GAAE,UAASC;GAAE,CAAC;;AAAC,KAAGH,QAAI,GAAE;EAAC,IAAIA,MAAE;GAAC,MAAKE;GAAE,UAASD;GAAE;AAAC,SAAO,OAAO,QAAO,QAAGG,IAAEF,KAAEF,IAAE,EAACA,IAAE;;AAAC,OAAM,MAAM,4BAA4B;;;;;ACAtJ,SAAS,EAAE,GAAGK,KAAE;AAAC,QAAO,EAAE,GAAEA,IAAE;;AAAC,SAAS,EAAE,KAAE;CAAC,IAAIC,MAAEC,KAAEC,sBAAE,IAAI,KAAG;AAAC,SAAO,KAAE,KAAE,MAAI;EAAC,IAAI,IAAEF,IAAEC,KAAEE,KAAE,EAAE;AAAC,SAAOD,IAAE,IAAI,EAAE,GAACH,OAAGG,IAAE,IAAI,EAAE,EAAC;GAAC,MAAK,CAAC;GAAE,SAAQ,CAAC;GAAE,MAAKD;GAAE;;;;;;ACOpP,SAAgB,eAAe,SAAyD;AACtF,QAAOG,EAAS,UAAU,QAAQ;;AAOhC,SAAO,wBAHU,IAAI,qDAAQ,IAAI,4CAAS,GAGvB,wBAFE,IAAI,6EAAgB,MAEN,sBADhB,IAAI,uEAAc;GAErC;;AAGJ,SAAgB,eAAe,SAAyD;CACtF,MAAM,SAAS,QAAQ,SAAS;GAC7B,MAAM,CAAC,CAAC,MAAM,QAAQ,EAAE,KAAK;GAC7B,MAAM,CAAC,EAAE;GACT,MAAM,EAAE;GACR,MAAM,CAAC,CAAC,EAAE;GACV,MAAO,MAAM,QAAQ,EAAE,KAAK,GAAG,QAAQ,EAAE,KAAK,GAAG,EAAE;EACrD,CAAC;CAEF,MAAMC,OAA+B,EAAE;CAEvC,MAAM,0BAAU,IAAI,KAA8B;CAElD,MAAM,4BAAY,IAAI,KAA8B;CAEpD,MAAM,sCAAsB,IAAI,KAA8B;AAE9D,MAAK,MAAM,QAAQ,QAAQ;EACzB,MAAM,OAAO,KAAK;EAClB,MAAM,UAAU,KAAK;EACrB,MAAM,aAAa,QAAQ,IAAI,QAAQ;EAGvC,MAAM,UAAU,MAAM,QAAQ,KAAK,GAAG,KAAK,UAAU,KAAK,GAAG,QAAQ;EACrE,MAAM,kBAAkB,GAAG,QAAQ,GAAG,QAAQ,GAAG,KAAK;EAEtD,MAAM,cAAc,GAAG,QAAQ,GAAG;AAGlC,MAFgC,oBAAoB,IAAI,YAAY,CAIlE;EAIF,MAAM,YAAY,GAAG,gBAAgB,GAAG,KAAK,WAAW;AAIxD,MAHmB,UAAU,IAAI,UAAU,IAGxB,MAAM,QAAQ,KAAK,IAAI,CAAC,KAAK,mEAAY,WAAY,YAAW,CAAC,KAAK,QACvF;AAGF,MAAI,CAAC,YAAY;GACf,MAAM,UAAU;IACd,GAAG;IACH,MAAM,MAAM,QAAQ,KAAK,GAAG,CAAC,GAAG,IAAI,IAAI,KAAK,CAAC,GAAG;IAClD;AACD,QAAK,KAAK,QAAQ;AAClB,WAAQ,IAAI,SAAS,QAAQ;AAC7B,aAAU,IAAI,WAAW,QAAQ;AAEjC,OAAI,QAAQ,WACV,qBAAoB,IAAI,aAAa,QAAQ;AAE/C;;AAIF,MAAI,cAAc,MAAM,QAAQ,WAAW,KAAK,IAAI,MAAM,QAAQ,KAAK,KAAK,IAAI,WAAW,eAAe,KAAK,YAAY;AACzH,cAAW,OAAO,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,WAAW,MAAM,GAAG,KAAK,KAAK,CAAC,CAAC;AAClE;;AAGF,OAAK,KAAK,KAAK;AACf,YAAU,IAAI,WAAW,KAAK;AAE9B,MAAI,KAAK,WACP,qBAAoB,IAAI,aAAa,KAAK;;AAI9C,QAAO;;AAGT,SAAgB,eAAe,SAAiC,SAAiC,QAAyC;CACxI,MAAM,qCAAqB,IAAI,KAAa;AAC5C,MAAK,MAAM,QAAQ,SAAS;EAC1B,MAAM,EAAE,SAAS;AACjB,MAAI,CAAC,KACH;AAGF,MAAI,MAAM,QAAQ,KAAK,EAAE;AACvB,QAAK,MAAM,SAAS,KAClB,KAAI,MACF,oBAAmB,IAAI,MAAM;AAGjC;;AAGF,qBAAmB,IAAI,KAAK;;CAG9B,MAAM,6BAAa,IAAI,KAAsB;CAC7C,MAAM,qBAAqB,eAAgC;AACzD,MAAI,CAAC,OACH,QAAO;EAGT,MAAM,SAAS,WAAW,IAAI,WAAW;AACzC,MAAI,WAAW,OACb,QAAO;EAGT,MAAM,SAAS,OAAO,SAAS,WAAW,IAAI,mBAAmB,IAAI,WAAW;AAChF,aAAW,IAAI,YAAY,OAAO;AAElC,SAAO;;CAGT,MAAM,SAAS,QAAQ,SAAS;GAC7B,MAAM,CAAC,CAAC,MAAM,QAAQ,EAAE,KAAK;GAC7B,MAAM,CAAC,EAAE;GACT,MAAM,EAAE;GACR,MAAM,CAAC,CAAC,EAAE;GACV,MAAO,MAAM,QAAQ,EAAE,KAAK,GAAG,QAAQ,EAAE,KAAK,GAAG,EAAE;EACrD,CAAC;CAEF,MAAMC,OAA+B,EAAE;CAEvC,MAAM,8BAAc,IAAI,KAA8B;CAEtD,MAAM,4BAAY,IAAI,KAA8B;CAEpD,MAAM,sCAAsB,IAAI,KAA8B;AAE9D,MAAK,MAAM,QAAQ,QAAQ;EACzB,IAAI,OAAO,MAAM,QAAQ,KAAK,KAAK,GAAG,CAAC,GAAG,IAAI,IAAI,KAAK,KAAK,CAAC,GAAG,KAAK;AAErE,MAAI,KAAK,SAAS,KAAK,KAErB;AAIF,MAAI,MAAM,QAAQ,KAAK,CACrB,QAAO,KAAK,QAAQ,SAAU,OAAO,SAAS,WAAW,kBAAkB,KAAK,GAAG,kBAAkB,KAAK,aAAa,CAAE;EAG3H,MAAM,cAAc,GAAG,KAAK,KAAK,GAAG,KAAK;EACzC,MAAM,aAAa,YAAY,IAAI,YAAY;EAG/C,MAAM,UAAU,MAAM,QAAQ,KAAK,GAAG,KAAK,UAAU,KAAK,GAAG,QAAQ;EACrE,MAAM,kBAAkB,GAAG,KAAK,KAAK,GAAG,QAAQ,GAAG,KAAK;EACxD,MAAM,aAAa,UAAU,IAAI,gBAAgB;EAEjD,MAAM,cAAc,GAAG,KAAK,KAAK,GAAG;AAGpC,MAFoC,oBAAoB,IAAI,YAAY,CAItE;AAIF,MAAI,cAAe,MAAM,QAAQ,KAAK,IAAI,CAAC,KAAK,OAC9C;AAIF,MAAI,CAAC,YAAY;GACf,MAAM,UAAU;IACd,GAAG;IACH;IACD;AACD,QAAK,KAAK,QAAQ;AAClB,eAAY,IAAI,aAAa,QAAQ;AACrC,aAAU,IAAI,iBAAiB,QAAQ;AAEvC,OAAI,QAAQ,WACV,qBAAoB,IAAI,aAAa,QAAQ;AAE/C;;AAIF,MAAI,cAAc,MAAM,QAAQ,WAAW,KAAK,IAAI,MAAM,QAAQ,KAAK,IAAI,WAAW,eAAe,KAAK,YAAY;AACpH,cAAW,OAAO,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,WAAW,MAAM,GAAG,KAAK,CAAC,CAAC;AAC7D;;AAIF,MAAI,CAAC,MAAM,QAAQ,KAAK,IAAI,QAAQ,CAAC,kBAAkB,KAAK,CAC1D;AAGF,OAAK,KAAK,KAAK;AACf,YAAU,IAAI,iBAAiB,KAAK;AAEpC,MAAI,KAAK,WACP,qBAAoB,IAAI,aAAa,KAAK;;AAI9C,QAAO;;;;;AAMT,SAAgB,WAA0C,MAA0D;;CAClH,MAAM,UAAU,KAAK,QAAQ,KAAK,SAAS;AAC3C,KAAI,CAAC,QACH,OAAM,IAAI,MAAM,wBAAwB,KAAK,WAAW;CAG1D,MAAM,SAAS,KAAK,QAAQ,KAAK,SAAS,KAAK,MAAM,CAAC,KAAK,OAAO;CAClE,MAAM,4BAAU,KAAK,uEAAS,UAAS,eAAe,KAAK,QAAQ,GAAG,EAAE;CACxE,MAAM,4BAAU,KAAK,uEAAS,WAAU,SAAS,eAAe,KAAK,SAAS,SAAS,OAAO,GAAG,EAAE;CACnG,MAAM,4BAAU,KAAK,uEAAS,UAAS,eAAe,KAAK,QAAQ,GAAG,EAAE;AAExE,QAAO;EACL,GAAG;EACH,IAAI,WAAW,SAAS,CAAC,OAAO,KAAK,KAAK,CAAC,OAAO,MAAM;EACxD,MAAM,YAAY,KAAK,SAAS;EAChC;EACS;EACA;EACA;EACT,MAAM,KAAK,QAAS,EAAE;EACvB"}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"defineProperty-Db0mqPmh.cjs","names":["t","n","r","e","t","e","r","n","i","r","n","t","i","e","e","n","t","r","i","uniqueBy","exports","prev: Array<KubbFile.Export>","prev: Array<KubbFile.Import>","path","trimExtName"],"sources":["../../../node_modules/.pnpm/remeda@2.32.2/node_modules/remeda/dist/utilityEvaluators-XvjlcSPu.js","../../../node_modules/.pnpm/remeda@2.32.2/node_modules/remeda/dist/pipe-BVeXEJGy.js","../../../node_modules/.pnpm/remeda@2.32.2/node_modules/remeda/dist/purryFromLazy-ackWJ5sq.js","../../../node_modules/.pnpm/remeda@2.32.2/node_modules/remeda/dist/uniqueBy-rVfNPL42.js","../src/createFile.ts"],"sourcesContent":["const e={done:!0,hasNext:!1},t={done:!1,hasNext:!1},n=()=>e,r=e=>({hasNext:!0,next:e,done:!1});export{n,r,t};\n//# sourceMappingURL=utilityEvaluators-XvjlcSPu.js.map","import{t as e}from\"./utilityEvaluators-XvjlcSPu.js\";function t(e,...t){let a=e,o=t.map(e=>`lazy`in e?r(e):void 0),s=0;for(;s<t.length;){if(o[s]===void 0||!i(a)){let e=t[s];a=e(a),s+=1;continue}let e=[];for(let n=s;n<t.length;n++){let t=o[n];if(t===void 0||(e.push(t),t.isSingle))break}let r=[];for(let t of a)if(n(t,r,e))break;let{isSingle:c}=e.at(-1);a=c?r[0]:r,s+=e.length}return a}function n(t,r,i){if(i.length===0)return r.push(t),!1;let a=t,o=e,s=!1;for(let[e,t]of i.entries()){let{index:c,items:l}=t;if(l.push(a),o=t(a,c,l),t.index+=1,o.hasNext){if(o.hasMany??!1){for(let t of o.next)if(n(t,r,i.slice(e+1)))return!0;return s}a=o.next}if(!o.hasNext)break;o.done&&(s=!0)}return o.hasNext&&r.push(a),s}function r(e){let{lazy:t,lazyArgs:n}=e,r=t(...n);return Object.assign(r,{isSingle:t.single??!1,index:0,items:[]})}function i(e){return typeof e==`string`||typeof e==`object`&&!!e&&Symbol.iterator in e}export{t};\n//# sourceMappingURL=pipe-BVeXEJGy.js.map","import{t as e}from\"./pipe-BVeXEJGy.js\";function t(t,n){let r=n.length-t.length;if(r===1){let[r,...i]=n;return e(r,{lazy:t,lazyArgs:i})}if(r===0){let r={lazy:t,lazyArgs:n};return Object.assign(t=>e(t,r),r)}throw Error(`Wrong number of arguments`)}export{t};\n//# sourceMappingURL=purryFromLazy-ackWJ5sq.js.map","import{t as e}from\"./utilityEvaluators-XvjlcSPu.js\";import{t}from\"./purryFromLazy-ackWJ5sq.js\";function n(...e){return t(r,e)}function r(t){let n=t,r=new Set;return(t,i,a)=>{let o=n(t,i,a);return r.has(o)?e:(r.add(o),{done:!1,hasNext:!0,next:t})}}export{n as t};\n//# sourceMappingURL=uniqueBy-rVfNPL42.js.map","import { createHash } from 'node:crypto'\nimport path from 'node:path'\nimport { orderBy } from 'natural-orderby'\nimport { uniqueBy } from 'remeda'\nimport type * as KubbFile from './KubbFile.ts'\nimport { trimExtName } from './utils/trimExtName.ts'\n\nexport function combineSources(sources: Array<KubbFile.Source>): Array<KubbFile.Source> {\n return uniqueBy(sources, (obj) => {\n // For named sources, deduplicate by name, isExportable, and isTypeOnly\n // For unnamed sources, include the value to avoid deduplicating different code blocks\n // If both name and value are undefined, use an empty string as the unique identifier\n const uniqueId = obj.name ?? obj.value ?? ''\n const isExportable = obj.isExportable ?? false\n const isTypeOnly = obj.isTypeOnly ?? false\n return `${uniqueId}:${isExportable}:${isTypeOnly}`\n })\n}\n\nexport function combineExports(exports: Array<KubbFile.Export>): Array<KubbFile.Export> {\n const sorted = orderBy(exports, [\n (v) => !!Array.isArray(v.name),\n (v) => !v.isTypeOnly,\n (v) => v.path,\n (v) => !!v.name,\n (v) => (Array.isArray(v.name) ? orderBy(v.name) : v.name),\n ])\n\n const prev: Array<KubbFile.Export> = []\n // Map to track items by path for O(1) lookup\n const pathMap = new Map<string, KubbFile.Export>()\n // Map to track unique items by path+name+isTypeOnly+asAlias\n const uniqueMap = new Map<string, KubbFile.Export>()\n // Map to track items by path+name where isTypeOnly=true (for type-only check)\n const pathNameTypeTrueMap = new Map<string, KubbFile.Export>()\n\n for (const curr of sorted) {\n const name = curr.name\n const pathKey = curr.path\n const prevByPath = pathMap.get(pathKey)\n\n // Create unique key for path+name+isTypeOnly\n const nameKey = Array.isArray(name) ? JSON.stringify(name) : name || ''\n const pathNameTypeKey = `${pathKey}:${nameKey}:${curr.isTypeOnly}`\n // Check if there's already an item with the same path+name but with isTypeOnly=true\n const pathNameKey = `${pathKey}:${nameKey}`\n const prevByPathAndIsTypeOnly = pathNameTypeTrueMap.get(pathNameKey)\n\n if (prevByPathAndIsTypeOnly) {\n // we already have an export that has the same path and name but uses `isTypeOnly` (export type ...)\n continue\n }\n\n // Create unique key for path+name+isTypeOnly+asAlias\n const uniqueKey = `${pathNameTypeKey}:${curr.asAlias || ''}`\n const uniquePrev = uniqueMap.get(uniqueKey)\n\n // we already have an item that was unique enough or name field is empty or prev asAlias is set but current has no changes\n if (uniquePrev || (Array.isArray(name) && !name.length) || (prevByPath?.asAlias && !curr.asAlias)) {\n continue\n }\n\n if (!prevByPath) {\n const newItem = {\n ...curr,\n name: Array.isArray(name) ? [...new Set(name)] : name,\n }\n prev.push(newItem)\n pathMap.set(pathKey, newItem)\n uniqueMap.set(uniqueKey, newItem)\n // Track items with isTypeOnly=true for the type-only check\n if (newItem.isTypeOnly) {\n pathNameTypeTrueMap.set(pathNameKey, newItem)\n }\n continue\n }\n\n // merge all names when prev and current both have the same isTypeOnly set\n if (prevByPath && Array.isArray(prevByPath.name) && Array.isArray(curr.name) && prevByPath.isTypeOnly === curr.isTypeOnly) {\n prevByPath.name = [...new Set([...prevByPath.name, ...curr.name])]\n continue\n }\n\n prev.push(curr)\n uniqueMap.set(uniqueKey, curr)\n // Track items with isTypeOnly=true for the type-only check\n if (curr.isTypeOnly) {\n pathNameTypeTrueMap.set(pathNameKey, curr)\n }\n }\n\n return prev\n}\n\nexport function combineImports(imports: Array<KubbFile.Import>, exports: Array<KubbFile.Export>, source?: string): Array<KubbFile.Import> {\n const exportedNameLookup = new Set<string>()\n for (const item of exports) {\n const { name } = item\n if (!name) {\n continue\n }\n\n if (Array.isArray(name)) {\n for (const value of name) {\n if (value) {\n exportedNameLookup.add(value)\n }\n }\n continue\n }\n\n exportedNameLookup.add(name)\n }\n\n const usageCache = new Map<string, boolean>()\n const hasImportInSource = (importName: string): boolean => {\n if (!source) {\n return true\n }\n\n const cached = usageCache.get(importName)\n if (cached !== undefined) {\n return cached\n }\n\n const isUsed = source.includes(importName) || exportedNameLookup.has(importName)\n usageCache.set(importName, isUsed)\n\n return isUsed\n }\n\n const sorted = orderBy(imports, [\n (v) => !!Array.isArray(v.name),\n (v) => !v.isTypeOnly,\n (v) => v.path,\n (v) => !!v.name,\n (v) => (Array.isArray(v.name) ? orderBy(v.name) : v.name),\n ])\n\n const prev: Array<KubbFile.Import> = []\n // Map to track items by path+isTypeOnly for O(1) lookup\n const pathTypeMap = new Map<string, KubbFile.Import>()\n // Map to track unique items by path+name+isTypeOnly\n const uniqueMap = new Map<string, KubbFile.Import>()\n // Map to track items by path+name where isTypeOnly=true (for type-only check)\n const pathNameTypeTrueMap = new Map<string, KubbFile.Import>()\n\n for (const curr of sorted) {\n let name = Array.isArray(curr.name) ? [...new Set(curr.name)] : curr.name\n\n if (curr.path === curr.root) {\n // root and path are the same file, remove the \"./\" import\n continue\n }\n\n // merge all names and check if the importName is being used in the generated source and if not filter those imports out\n if (Array.isArray(name)) {\n name = name.filter((item) => (typeof item === 'string' ? hasImportInSource(item) : hasImportInSource(item.propertyName)))\n }\n\n const pathTypeKey = `${curr.path}:${curr.isTypeOnly}`\n const prevByPath = pathTypeMap.get(pathTypeKey)\n\n // Create key for name comparison\n const nameKey = Array.isArray(name) ? JSON.stringify(name) : name || ''\n const pathNameTypeKey = `${curr.path}:${nameKey}:${curr.isTypeOnly}`\n const uniquePrev = uniqueMap.get(pathNameTypeKey)\n // Check if there's already an item with the same path+name but with isTypeOnly=true\n const pathNameKey = `${curr.path}:${nameKey}`\n const prevByPathNameAndIsTypeOnly = pathNameTypeTrueMap.get(pathNameKey)\n\n if (prevByPathNameAndIsTypeOnly) {\n // we already have an import that has the same path and name but uses `isTypeOnly` (import type ...)\n continue\n }\n\n // already unique enough or name is empty\n if (uniquePrev || (Array.isArray(name) && !name.length)) {\n continue\n }\n\n // new item, append name\n if (!prevByPath) {\n const newItem = {\n ...curr,\n name,\n }\n prev.push(newItem)\n pathTypeMap.set(pathTypeKey, newItem)\n uniqueMap.set(pathNameTypeKey, newItem)\n // Track items with isTypeOnly=true for the type-only check\n if (newItem.isTypeOnly) {\n pathNameTypeTrueMap.set(pathNameKey, newItem)\n }\n continue\n }\n\n // merge all names when prev and current both have the same isTypeOnly set\n if (prevByPath && Array.isArray(prevByPath.name) && Array.isArray(name) && prevByPath.isTypeOnly === curr.isTypeOnly) {\n prevByPath.name = [...new Set([...prevByPath.name, ...name])]\n continue\n }\n\n // no import was found in the source, ignore import\n if (!Array.isArray(name) && name && !hasImportInSource(name)) {\n continue\n }\n\n prev.push(curr)\n uniqueMap.set(pathNameTypeKey, curr)\n // Track items with isTypeOnly=true for the type-only check\n if (curr.isTypeOnly) {\n pathNameTypeTrueMap.set(pathNameKey, curr)\n }\n }\n\n return prev\n}\n\n/**\n * Helper to create a file with name and id set\n */\nexport function createFile<TMeta extends object = object>(file: KubbFile.File<TMeta>): KubbFile.ResolvedFile<TMeta> {\n const extname = path.extname(file.baseName) as KubbFile.Extname\n if (!extname) {\n throw new Error(`No extname found for ${file.baseName}`)\n }\n\n const source = file.sources.map((item) => item.value).join('\\n\\n')\n const exports = file.exports?.length ? combineExports(file.exports) : []\n const imports = file.imports?.length && source ? combineImports(file.imports, exports, source) : []\n const sources = file.sources?.length ? combineSources(file.sources) : []\n\n return {\n ...file,\n id: createHash('sha256').update(file.path).digest('hex'),\n name: trimExtName(file.baseName),\n extname,\n imports: imports,\n exports: exports,\n sources: sources,\n meta: file.meta || ({} as TMeta),\n }\n}\n"],"x_google_ignoreList":[0,1,2,3],"mappings":";;;;;;;AAAA,MAAM,IAAE;CAAC,MAAK,CAAC;CAAE,SAAQ,CAAC;CAAE,EAACA,MAAE;CAAC,MAAK,CAAC;CAAE,SAAQ,CAAC;CAAE,EAACC,YAAM,GAAEC,OAAE,SAAI;CAAC,SAAQ,CAAC;CAAE,MAAKC;CAAE,MAAK,CAAC;CAAE;;;;ACAzC,SAASC,IAAE,KAAE,GAAGA,KAAE;CAAC,IAAI,IAAEC,KAAE,IAAED,IAAE,KAAI,QAAG,UAASC,MAAEC,IAAED,IAAE,GAAC,KAAK,EAAE,EAAC,IAAE;AAAE,QAAK,IAAED,IAAE,SAAQ;AAAC,MAAG,EAAE,OAAK,KAAK,KAAG,CAAC,EAAE,EAAE,EAAC;GAAC,IAAIC,MAAED,IAAE;AAAG,OAAEC,IAAE,EAAE,EAAC,KAAG;AAAE;;EAAS,IAAIA,MAAE,EAAE;AAAC,OAAI,IAAIE,MAAE,GAAEA,MAAEH,IAAE,QAAO,OAAI;GAAC,IAAIA,MAAE,EAAEG;AAAG,OAAGH,QAAI,KAAK,MAAIC,IAAE,KAAKD,IAAE,EAACA,IAAE,UAAU;;EAAM,IAAIE,MAAE,EAAE;AAAC,OAAI,IAAIF,OAAK,EAAE,KAAGG,IAAEH,KAAEE,KAAED,IAAE,CAAC;EAAM,IAAG,EAAC,UAAS,MAAGA,IAAE,GAAG,GAAG;AAAC,MAAE,IAAEC,IAAE,KAAGA,KAAE,KAAGD,IAAE;;AAAO,QAAO;;AAAE,SAASE,IAAE,KAAE,KAAE,KAAE;AAAC,KAAGC,IAAE,WAAS,EAAE,QAAOF,IAAE,KAAKF,IAAE,EAAC,CAAC;CAAE,IAAI,IAAEA,KAAE,IAAEC,KAAE,IAAE,CAAC;AAAE,MAAI,IAAG,CAACA,KAAED,QAAKI,IAAE,SAAS,EAAC;EAAC,IAAG,EAAC,OAAM,GAAE,OAAM,MAAGJ;AAAE,MAAG,EAAE,KAAK,EAAE,EAAC,IAAEA,IAAE,GAAE,GAAE,EAAE,EAAC,IAAE,SAAO,GAAE,EAAE,SAAQ;;AAAC,qBAAG,EAAE,0DAAS,CAAC,GAAE;AAAC,SAAI,IAAIA,OAAK,EAAE,KAAK,KAAGG,IAAEH,KAAEE,KAAEE,IAAE,MAAMH,MAAE,EAAE,CAAC,CAAC,QAAM,CAAC;AAAE,WAAO;;AAAE,OAAE,EAAE;;AAAK,MAAG,CAAC,EAAE,QAAQ;AAAM,IAAE,SAAO,IAAE,CAAC;;AAAG,QAAO,EAAE,WAASC,IAAE,KAAK,EAAE,EAAC;;AAAE,SAASA,IAAE,KAAE;;CAAC,IAAG,EAAC,MAAKF,KAAE,UAASG,QAAGF,KAAEC,MAAEF,IAAE,GAAGG,IAAE;AAAC,QAAO,OAAO,OAAOD,KAAE;EAAC,uBAASF,IAAE,uDAAQ,CAAC;EAAE,OAAM;EAAE,OAAM,EAAE;EAAC,CAAC;;AAAC,SAAS,EAAE,KAAE;AAAC,QAAO,OAAOC,OAAG,YAAU,OAAOA,OAAG,YAAU,CAAC,CAACA,OAAG,OAAO,YAAYA;;;;;ACAj2B,SAAS,EAAE,KAAE,KAAE;CAAC,IAAII,MAAEC,IAAE,SAAOC,IAAE;AAAO,KAAGF,QAAI,GAAE;EAAC,IAAG,CAACA,KAAE,GAAGG,OAAGF;AAAE,SAAOG,IAAEJ,KAAE;GAAC,MAAKE;GAAE,UAASC;GAAE,CAAC;;AAAC,KAAGH,QAAI,GAAE;EAAC,IAAIA,MAAE;GAAC,MAAKE;GAAE,UAASD;GAAE;AAAC,SAAO,OAAO,QAAO,QAAGG,IAAEF,KAAEF,IAAE,EAACA,IAAE;;AAAC,OAAM,MAAM,4BAA4B;;;;;ACAtJ,SAAS,EAAE,GAAGK,KAAE;AAAC,QAAO,EAAE,GAAEA,IAAE;;AAAC,SAAS,EAAE,KAAE;CAAC,IAAIC,MAAEC,KAAEC,sBAAE,IAAI,KAAG;AAAC,SAAO,KAAE,KAAE,MAAI;EAAC,IAAI,IAAEF,IAAEC,KAAEE,KAAE,EAAE;AAAC,SAAOD,IAAE,IAAI,EAAE,GAACH,OAAGG,IAAE,IAAI,EAAE,EAAC;GAAC,MAAK,CAAC;GAAE,SAAQ,CAAC;GAAE,MAAKD;GAAE;;;;;;ACOpP,SAAgB,eAAe,SAAyD;AACtF,QAAOG,EAAS,UAAU,QAAQ;;AAOhC,SAAO,wBAHU,IAAI,qDAAQ,IAAI,4CAAS,GAGvB,wBAFE,IAAI,6EAAgB,MAEN,sBADhB,IAAI,uEAAc;GAErC;;AAGJ,SAAgB,eAAe,WAAyD;CACtF,MAAM,sCAAiBC,WAAS;GAC7B,MAAM,CAAC,CAAC,MAAM,QAAQ,EAAE,KAAK;GAC7B,MAAM,CAAC,EAAE;GACT,MAAM,EAAE;GACR,MAAM,CAAC,CAAC,EAAE;GACV,MAAO,MAAM,QAAQ,EAAE,KAAK,gCAAW,EAAE,KAAK,GAAG,EAAE;EACrD,CAAC;CAEF,MAAMC,OAA+B,EAAE;CAEvC,MAAM,0BAAU,IAAI,KAA8B;CAElD,MAAM,4BAAY,IAAI,KAA8B;CAEpD,MAAM,sCAAsB,IAAI,KAA8B;AAE9D,MAAK,MAAM,QAAQ,QAAQ;EACzB,MAAM,OAAO,KAAK;EAClB,MAAM,UAAU,KAAK;EACrB,MAAM,aAAa,QAAQ,IAAI,QAAQ;EAGvC,MAAM,UAAU,MAAM,QAAQ,KAAK,GAAG,KAAK,UAAU,KAAK,GAAG,QAAQ;EACrE,MAAM,kBAAkB,GAAG,QAAQ,GAAG,QAAQ,GAAG,KAAK;EAEtD,MAAM,cAAc,GAAG,QAAQ,GAAG;AAGlC,MAFgC,oBAAoB,IAAI,YAAY,CAIlE;EAIF,MAAM,YAAY,GAAG,gBAAgB,GAAG,KAAK,WAAW;AAIxD,MAHmB,UAAU,IAAI,UAAU,IAGxB,MAAM,QAAQ,KAAK,IAAI,CAAC,KAAK,mEAAY,WAAY,YAAW,CAAC,KAAK,QACvF;AAGF,MAAI,CAAC,YAAY;GACf,MAAM,UAAU;IACd,GAAG;IACH,MAAM,MAAM,QAAQ,KAAK,GAAG,CAAC,GAAG,IAAI,IAAI,KAAK,CAAC,GAAG;IAClD;AACD,QAAK,KAAK,QAAQ;AAClB,WAAQ,IAAI,SAAS,QAAQ;AAC7B,aAAU,IAAI,WAAW,QAAQ;AAEjC,OAAI,QAAQ,WACV,qBAAoB,IAAI,aAAa,QAAQ;AAE/C;;AAIF,MAAI,cAAc,MAAM,QAAQ,WAAW,KAAK,IAAI,MAAM,QAAQ,KAAK,KAAK,IAAI,WAAW,eAAe,KAAK,YAAY;AACzH,cAAW,OAAO,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,WAAW,MAAM,GAAG,KAAK,KAAK,CAAC,CAAC;AAClE;;AAGF,OAAK,KAAK,KAAK;AACf,YAAU,IAAI,WAAW,KAAK;AAE9B,MAAI,KAAK,WACP,qBAAoB,IAAI,aAAa,KAAK;;AAI9C,QAAO;;AAGT,SAAgB,eAAe,SAAiC,WAAiC,QAAyC;CACxI,MAAM,qCAAqB,IAAI,KAAa;AAC5C,MAAK,MAAM,QAAQD,WAAS;EAC1B,MAAM,EAAE,SAAS;AACjB,MAAI,CAAC,KACH;AAGF,MAAI,MAAM,QAAQ,KAAK,EAAE;AACvB,QAAK,MAAM,SAAS,KAClB,KAAI,MACF,oBAAmB,IAAI,MAAM;AAGjC;;AAGF,qBAAmB,IAAI,KAAK;;CAG9B,MAAM,6BAAa,IAAI,KAAsB;CAC7C,MAAM,qBAAqB,eAAgC;AACzD,MAAI,CAAC,OACH,QAAO;EAGT,MAAM,SAAS,WAAW,IAAI,WAAW;AACzC,MAAI,WAAW,OACb,QAAO;EAGT,MAAM,SAAS,OAAO,SAAS,WAAW,IAAI,mBAAmB,IAAI,WAAW;AAChF,aAAW,IAAI,YAAY,OAAO;AAElC,SAAO;;CAGT,MAAM,sCAAiB,SAAS;GAC7B,MAAM,CAAC,CAAC,MAAM,QAAQ,EAAE,KAAK;GAC7B,MAAM,CAAC,EAAE;GACT,MAAM,EAAE;GACR,MAAM,CAAC,CAAC,EAAE;GACV,MAAO,MAAM,QAAQ,EAAE,KAAK,gCAAW,EAAE,KAAK,GAAG,EAAE;EACrD,CAAC;CAEF,MAAME,OAA+B,EAAE;CAEvC,MAAM,8BAAc,IAAI,KAA8B;CAEtD,MAAM,4BAAY,IAAI,KAA8B;CAEpD,MAAM,sCAAsB,IAAI,KAA8B;AAE9D,MAAK,MAAM,QAAQ,QAAQ;EACzB,IAAI,OAAO,MAAM,QAAQ,KAAK,KAAK,GAAG,CAAC,GAAG,IAAI,IAAI,KAAK,KAAK,CAAC,GAAG,KAAK;AAErE,MAAI,KAAK,SAAS,KAAK,KAErB;AAIF,MAAI,MAAM,QAAQ,KAAK,CACrB,QAAO,KAAK,QAAQ,SAAU,OAAO,SAAS,WAAW,kBAAkB,KAAK,GAAG,kBAAkB,KAAK,aAAa,CAAE;EAG3H,MAAM,cAAc,GAAG,KAAK,KAAK,GAAG,KAAK;EACzC,MAAM,aAAa,YAAY,IAAI,YAAY;EAG/C,MAAM,UAAU,MAAM,QAAQ,KAAK,GAAG,KAAK,UAAU,KAAK,GAAG,QAAQ;EACrE,MAAM,kBAAkB,GAAG,KAAK,KAAK,GAAG,QAAQ,GAAG,KAAK;EACxD,MAAM,aAAa,UAAU,IAAI,gBAAgB;EAEjD,MAAM,cAAc,GAAG,KAAK,KAAK,GAAG;AAGpC,MAFoC,oBAAoB,IAAI,YAAY,CAItE;AAIF,MAAI,cAAe,MAAM,QAAQ,KAAK,IAAI,CAAC,KAAK,OAC9C;AAIF,MAAI,CAAC,YAAY;GACf,MAAM,UAAU;IACd,GAAG;IACH;IACD;AACD,QAAK,KAAK,QAAQ;AAClB,eAAY,IAAI,aAAa,QAAQ;AACrC,aAAU,IAAI,iBAAiB,QAAQ;AAEvC,OAAI,QAAQ,WACV,qBAAoB,IAAI,aAAa,QAAQ;AAE/C;;AAIF,MAAI,cAAc,MAAM,QAAQ,WAAW,KAAK,IAAI,MAAM,QAAQ,KAAK,IAAI,WAAW,eAAe,KAAK,YAAY;AACpH,cAAW,OAAO,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,WAAW,MAAM,GAAG,KAAK,CAAC,CAAC;AAC7D;;AAIF,MAAI,CAAC,MAAM,QAAQ,KAAK,IAAI,QAAQ,CAAC,kBAAkB,KAAK,CAC1D;AAGF,OAAK,KAAK,KAAK;AACf,YAAU,IAAI,iBAAiB,KAAK;AAEpC,MAAI,KAAK,WACP,qBAAoB,IAAI,aAAa,KAAK;;AAI9C,QAAO;;;;;AAMT,SAAgB,WAA0C,MAA0D;;CAClH,MAAM,UAAUC,kBAAK,QAAQ,KAAK,SAAS;AAC3C,KAAI,CAAC,QACH,OAAM,IAAI,MAAM,wBAAwB,KAAK,WAAW;CAG1D,MAAM,SAAS,KAAK,QAAQ,KAAK,SAAS,KAAK,MAAM,CAAC,KAAK,OAAO;CAClE,MAAMH,8BAAU,KAAK,uEAAS,UAAS,eAAe,KAAK,QAAQ,GAAG,EAAE;CACxE,MAAM,4BAAU,KAAK,uEAAS,WAAU,SAAS,eAAe,KAAK,SAASA,WAAS,OAAO,GAAG,EAAE;CACnG,MAAM,4BAAU,KAAK,uEAAS,UAAS,eAAe,KAAK,QAAQ,GAAG,EAAE;AAExE,QAAO;EACL,GAAG;EACH,gCAAe,SAAS,CAAC,OAAO,KAAK,KAAK,CAAC,OAAO,MAAM;EACxD,MAAMI,gCAAY,KAAK,SAAS;EAChC;EACS;EACT,SAASJ;EACA;EACT,MAAM,KAAK,QAAS,EAAE;EACvB"}
|