@kubb/fabric-core 0.1.0 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/dist/createFileParser-BD8yn0LT.cjs +14 -0
  2. package/dist/createFileParser-BD8yn0LT.cjs.map +1 -0
  3. package/dist/createFileParser-Cix3AMLd.js +8 -0
  4. package/dist/createFileParser-Cix3AMLd.js.map +1 -0
  5. package/dist/default-DCpuPmrL.js +10 -0
  6. package/dist/default-DCpuPmrL.js.map +1 -0
  7. package/dist/default-DNBu_jsL.cjs +15 -0
  8. package/dist/default-DNBu_jsL.cjs.map +1 -0
  9. package/dist/{defineApp-Bg7JewJQ.d.ts → defineApp-CZYKsxTp.d.ts} +43 -10
  10. package/dist/{defineApp-DKW3IRO8.d.cts → defineApp-c9lWJ96_.d.cts} +43 -10
  11. package/dist/index.cjs +107 -40
  12. package/dist/index.cjs.map +1 -1
  13. package/dist/index.d.cts +13 -4
  14. package/dist/index.d.ts +13 -4
  15. package/dist/index.js +97 -33
  16. package/dist/index.js.map +1 -1
  17. package/dist/parsers/default.cjs +4 -0
  18. package/dist/parsers/default.d.cts +8 -0
  19. package/dist/parsers/default.d.ts +8 -0
  20. package/dist/parsers/default.js +4 -0
  21. package/dist/parsers/tsx.cjs +4 -2
  22. package/dist/parsers/tsx.d.cts +2 -2
  23. package/dist/parsers/tsx.d.ts +2 -2
  24. package/dist/parsers/tsx.js +3 -1
  25. package/dist/parsers/typescript.cjs +6 -5
  26. package/dist/parsers/typescript.d.cts +2 -2
  27. package/dist/parsers/typescript.d.ts +2 -2
  28. package/dist/parsers/typescript.js +2 -1
  29. package/dist/tsx-BSUaIML3.cjs +16 -0
  30. package/dist/tsx-BSUaIML3.cjs.map +1 -0
  31. package/dist/tsx-DBAk9dqS.js +11 -0
  32. package/dist/tsx-DBAk9dqS.js.map +1 -0
  33. package/dist/types-CkbelZaS.d.ts +15 -0
  34. package/dist/types-GueHciQ3.d.cts +15 -0
  35. package/dist/types.d.cts +1 -1
  36. package/dist/types.d.ts +1 -1
  37. package/dist/{parser-CWB_OBtr.js → typescript-C60gWBu8.js} +4 -34
  38. package/dist/typescript-C60gWBu8.js.map +1 -0
  39. package/dist/{parser-QF8j8-pj.cjs → typescript-Z90jN87k.cjs} +5 -47
  40. package/dist/typescript-Z90jN87k.cjs.map +1 -0
  41. package/package.json +8 -1
  42. package/src/FileManager.ts +14 -193
  43. package/src/FileProcessor.ts +86 -0
  44. package/src/createFile.ts +167 -0
  45. package/src/defineApp.ts +5 -5
  46. package/src/index.ts +3 -1
  47. package/src/parsers/createFileParser.ts +5 -0
  48. package/src/parsers/default.ts +7 -0
  49. package/src/parsers/tsx.ts +1 -1
  50. package/src/parsers/types.ts +12 -0
  51. package/src/parsers/typescript.ts +1 -1
  52. package/src/utils/EventEmitter.ts +23 -0
  53. package/dist/parser-CWB_OBtr.js.map +0 -1
  54. package/dist/parser-D64DdV1v.d.cts +0 -21
  55. package/dist/parser-QF8j8-pj.cjs.map +0 -1
  56. package/dist/parser-yYqnryUV.d.ts +0 -21
  57. package/src/parsers/parser.ts +0 -56
@@ -21,14 +21,15 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
21
21
  }) : target, mod));
22
22
 
23
23
  //#endregion
24
- let typescript = require("typescript");
25
- typescript = __toESM(typescript);
24
+ const require_createFileParser = require('./createFileParser-BD8yn0LT.cjs');
26
25
  let node_path = require("node:path");
27
26
  node_path = __toESM(node_path);
28
27
  let fs_extra = require("fs-extra");
29
28
  fs_extra = __toESM(fs_extra);
30
29
  let js_runtime = require("js-runtime");
31
30
  js_runtime = __toESM(js_runtime);
31
+ let typescript = require("typescript");
32
+ typescript = __toESM(typescript);
32
33
 
33
34
  //#region src/fs.ts
34
35
  async function write(path$1, data, options = {}) {
@@ -142,7 +143,7 @@ function createExport({ path: path$1, asAlias, isTypeOnly = false, name }) {
142
143
  return factory.createExportSpecifier(false, void 0, typeof propertyName === "string" ? factory.createIdentifier(propertyName) : propertyName);
143
144
  })), factory.createStringLiteral(path$1), void 0);
144
145
  }
145
- const typeScriptParser = createFileParser({ async print(file, options = { extname: ".ts" }) {
146
+ const typeScriptParser = require_createFileParser.createFileParser({ async print(file, options = { extname: ".ts" }) {
146
147
  const source = file.sources.map((item) => item.value).join("\n\n");
147
148
  const importNodes = file.imports.map((item) => {
148
149
  const importPath = item.root ? getRelativePath(item.root, item.path) : item.path;
@@ -171,37 +172,6 @@ const typeScriptParser = createFileParser({ async print(file, options = { extnam
171
172
  ].join("\n");
172
173
  } });
173
174
 
174
- //#endregion
175
- //#region src/parsers/tsx.ts
176
- const tsxParser = createFileParser({ async print(file, options = { extname: ".tsx" }) {
177
- return typeScriptParser.print(file, options);
178
- } });
179
-
180
- //#endregion
181
- //#region src/parsers/parser.ts
182
- function createFileParser(parser) {
183
- return parser;
184
- }
185
- const defaultParser = createFileParser({ async print(file) {
186
- return file.sources.map((item) => item.value).join("\n\n");
187
- } });
188
- const parsers = {
189
- ".ts": typeScriptParser,
190
- ".js": typeScriptParser,
191
- ".jsx": tsxParser,
192
- ".tsx": tsxParser,
193
- ".json": defaultParser
194
- };
195
- async function parseFile(file, { extname } = {}) {
196
- async function getFileParser(extname$1) {
197
- if (!extname$1) return defaultParser;
198
- const parser = parsers[extname$1];
199
- if (!parser) console.warn(`[parser] No parser found for ${extname$1}, default parser will be used`);
200
- return parser || defaultParser;
201
- }
202
- return (await getFileParser(file.extname)).print(file, { extname });
203
- }
204
-
205
175
  //#endregion
206
176
  Object.defineProperty(exports, '__toESM', {
207
177
  enumerable: true,
@@ -221,12 +191,6 @@ Object.defineProperty(exports, 'createImport', {
221
191
  return createImport;
222
192
  }
223
193
  });
224
- Object.defineProperty(exports, 'parseFile', {
225
- enumerable: true,
226
- get: function () {
227
- return parseFile;
228
- }
229
- });
230
194
  Object.defineProperty(exports, 'print', {
231
195
  enumerable: true,
232
196
  get: function () {
@@ -239,12 +203,6 @@ Object.defineProperty(exports, 'trimExtName', {
239
203
  return trimExtName;
240
204
  }
241
205
  });
242
- Object.defineProperty(exports, 'tsxParser', {
243
- enumerable: true,
244
- get: function () {
245
- return tsxParser;
246
- }
247
- });
248
206
  Object.defineProperty(exports, 'typeScriptParser', {
249
207
  enumerable: true,
250
208
  get: function () {
@@ -257,4 +215,4 @@ Object.defineProperty(exports, 'write', {
257
215
  return write;
258
216
  }
259
217
  });
260
- //# sourceMappingURL=parser-QF8j8-pj.cjs.map
218
+ //# sourceMappingURL=typescript-Z90jN87k.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"typescript-Z90jN87k.cjs","names":["fs","path","data","ts","output: string","path","importPropertyName: ts.Identifier | undefined","importName: ts.NamedImportBindings | undefined","createFileParser"],"sources":["../src/fs.ts","../src/parsers/typescript.ts"],"sourcesContent":["import { normalize, relative, resolve } from 'node:path'\nimport fs from 'fs-extra'\nimport { switcher } from 'js-runtime'\n\ntype Options = { sanity?: boolean }\n\nexport async function write(path: string, data: string, options: Options = {}): Promise<string | undefined> {\n if (data.trim() === '') {\n return undefined\n }\n return switcher(\n {\n node: async (path: string, data: string, { sanity }: Options) => {\n try {\n const oldContent = await fs.readFile(resolve(path), {\n encoding: 'utf-8',\n })\n if (oldContent?.toString() === data?.toString()) {\n return\n }\n } catch (_err) {\n /* empty */\n }\n\n await fs.outputFile(resolve(path), data, { encoding: 'utf-8' })\n\n if (sanity) {\n const savedData = await fs.readFile(resolve(path), {\n encoding: 'utf-8',\n })\n\n if (savedData?.toString() !== data?.toString()) {\n throw new Error(`Sanity check failed for ${path}\\n\\nData[${data.length}]:\\n${data}\\n\\nSaved[${savedData.length}]:\\n${savedData}\\n`)\n }\n\n return savedData\n }\n\n return data\n },\n bun: async (path: string, data: string, { sanity }: Options) => {\n try {\n await Bun.write(resolve(path), data)\n\n if (sanity) {\n const file = Bun.file(resolve(path))\n const savedData = await file.text()\n\n if (savedData?.toString() !== data?.toString()) {\n throw new Error(`Sanity check failed for ${path}\\n\\nData[${path.length}]:\\n${path}\\n\\nSaved[${savedData.length}]:\\n${savedData}\\n`)\n }\n\n return savedData\n }\n\n return data\n } catch (e) {\n console.error(e)\n }\n },\n },\n 'node',\n )(path, data.trim(), options)\n}\n\nexport async function read(path: string): Promise<string> {\n return switcher(\n {\n node: async (path: string) => {\n return fs.readFile(path, { encoding: 'utf8' })\n },\n bun: async (path: string) => {\n const file = Bun.file(path)\n\n return file.text()\n },\n },\n 'node',\n )(path)\n}\n\nexport function readSync(path: string): string {\n return switcher(\n {\n node: (path: string) => {\n return fs.readFileSync(path, { encoding: 'utf8' })\n },\n bun: () => {\n throw new Error('Bun cannot read sync')\n },\n },\n 'node',\n )(path)\n}\n\nexport async function exists(path: string): Promise<boolean> {\n return switcher(\n {\n node: async (path: string) => {\n return fs.pathExists(path)\n },\n bun: async (path: string) => {\n const file = Bun.file(path)\n\n return file.exists()\n },\n },\n 'node',\n )(path)\n}\n\nexport function existsSync(path: string): boolean {\n return switcher(\n {\n node: (path: string) => {\n return fs.pathExistsSync(path)\n },\n bun: () => {\n throw new Error('Bun cannot read sync')\n },\n },\n 'node',\n )(path)\n}\n\nexport async function clean(path: string): Promise<void> {\n return fs.remove(path)\n}\n\nexport async function unlink(path: string): Promise<void> {\n return fs.unlink(path)\n}\n\nfunction slash(path: string, platform: 'windows' | 'mac' | 'linux' = 'linux') {\n const isWindowsPath = /^\\\\\\\\\\?\\\\/.test(path)\n const normalizedPath = normalize(path)\n\n if (['linux', 'mac'].includes(platform) && !isWindowsPath) {\n // linux and mac\n return normalizedPath.replaceAll(/\\\\/g, '/').replace('../', '')\n }\n\n // windows\n return normalizedPath.replaceAll(/\\\\/g, '/').replace('../', '')\n}\n\nexport function trimExtName(text: string): string {\n return text.replace(/\\.[^/.]+$/, '')\n}\n\nexport function getRelativePath(rootDir?: string | null, filePath?: string | null, platform: 'windows' | 'mac' | 'linux' = 'linux'): string {\n if (!rootDir || !filePath) {\n throw new Error(`Root and file should be filled in when retrieving the relativePath, ${rootDir || ''} ${filePath || ''}`)\n }\n\n const relativePath = relative(rootDir, filePath)\n\n // On Windows, paths are separated with a \"\\\"\n // However, web browsers use \"/\" no matter the platform\n const slashedPath = slash(relativePath, platform)\n\n if (slashedPath.startsWith('../')) {\n return slashedPath\n }\n\n return `./${slashedPath}`\n}\n","import ts from 'typescript'\nimport { getRelativePath, trimExtName } from '../fs.ts'\nimport path from 'node:path'\nimport { createFileParser } from './createFileParser.ts'\n\nconst { factory } = ts\n\ntype PrintOptions = {\n source?: string\n baseName?: string\n scriptKind?: ts.ScriptKind\n}\n\n/**\n * Escaped new lines in code with block comments so they can be restored by {@link restoreNewLines}\n */\nconst escapeNewLines = (code: string) => code.replace(/\\n\\n/g, '\\n/* :newline: */')\n\n/**\n * Reverses {@link escapeNewLines} and restores new lines\n */\nconst restoreNewLines = (code: string) => code.replace(/\\/\\* :newline: \\*\\//g, '\\n')\n\n/**\n * Convert AST TypeScript/TSX nodes to a string based on the TypeScript printer.\n * Ensures consistent output across environments.\n * Also works as a formatter when `source` is provided without `elements`.\n */\nexport function print(elements: Array<ts.Node> = [], { source = '', baseName = 'print.tsx', scriptKind = ts.ScriptKind.TSX }: PrintOptions = {}): string {\n const sourceFile = ts.createSourceFile(baseName, escapeNewLines(source), ts.ScriptTarget.ES2022, true, scriptKind)\n\n const printer = ts.createPrinter({\n omitTrailingSemicolon: true,\n newLine: ts.NewLineKind.LineFeed,\n removeComments: false,\n noEmitHelpers: true,\n })\n\n let output: string\n\n if (elements.length > 0) {\n // Print only provided nodes\n const nodes = elements.filter(Boolean).sort((a, b) => (a.pos ?? 0) - (b.pos ?? 0))\n output = printer.printList(ts.ListFormat.MultiLine, factory.createNodeArray(nodes), sourceFile)\n } else {\n // Format the whole file\n output = printer.printFile(sourceFile)\n }\n\n return restoreNewLines(output).replace(/\\r\\n/g, '\\n')\n}\n\nexport function createImport({\n name,\n path,\n root,\n isTypeOnly = false,\n isNameSpace = false,\n}: {\n name: string | Array<string | { propertyName: string; name?: string }>\n path: string\n root?: string\n isTypeOnly?: boolean\n isNameSpace?: boolean\n}) {\n const resolvePath = root ? getRelativePath(root, path) : path\n\n if (!Array.isArray(name)) {\n let importPropertyName: ts.Identifier | undefined = factory.createIdentifier(name)\n let importName: ts.NamedImportBindings | undefined\n\n if (isNameSpace) {\n importPropertyName = undefined\n importName = factory.createNamespaceImport(factory.createIdentifier(name))\n }\n\n return factory.createImportDeclaration(\n undefined,\n factory.createImportClause(isTypeOnly, importPropertyName, importName),\n factory.createStringLiteral(resolvePath),\n undefined,\n )\n }\n\n return factory.createImportDeclaration(\n undefined,\n factory.createImportClause(\n isTypeOnly,\n undefined,\n factory.createNamedImports(\n name.map((item) => {\n if (typeof item === 'object') {\n const obj = item as { propertyName: string; name?: string }\n if (obj.name) {\n return factory.createImportSpecifier(false, factory.createIdentifier(obj.propertyName), factory.createIdentifier(obj.name))\n }\n\n return factory.createImportSpecifier(false, undefined, factory.createIdentifier(obj.propertyName))\n }\n\n return factory.createImportSpecifier(false, undefined, factory.createIdentifier(item))\n }),\n ),\n ),\n factory.createStringLiteral(resolvePath),\n undefined,\n )\n}\n\nexport function createExport({\n path,\n asAlias,\n isTypeOnly = false,\n name,\n}: {\n path: string\n asAlias?: boolean\n isTypeOnly?: boolean\n name?: string | Array<ts.Identifier | string>\n}) {\n if (name && !Array.isArray(name) && !asAlias) {\n console.warn(`When using name as string, asAlias should be true ${name}`)\n }\n\n if (!Array.isArray(name)) {\n const parsedName = name?.match(/^\\d/) ? `_${name?.slice(1)}` : name\n\n return factory.createExportDeclaration(\n undefined,\n isTypeOnly,\n asAlias && parsedName ? factory.createNamespaceExport(factory.createIdentifier(parsedName)) : undefined,\n factory.createStringLiteral(path),\n undefined,\n )\n }\n\n return factory.createExportDeclaration(\n undefined,\n isTypeOnly,\n factory.createNamedExports(\n name.map((propertyName) => {\n return factory.createExportSpecifier(false, undefined, typeof propertyName === 'string' ? factory.createIdentifier(propertyName) : propertyName)\n }),\n ),\n factory.createStringLiteral(path),\n undefined,\n )\n}\n\nexport const typeScriptParser = createFileParser({\n async print(file, options = { extname: '.ts' }) {\n const source = file.sources.map((item) => item.value).join('\\n\\n')\n\n const importNodes = file.imports\n .map((item) => {\n const importPath = item.root ? getRelativePath(item.root, item.path) : item.path\n const hasExtname = !!path.extname(importPath)\n\n return createImport({\n name: item.name,\n path: options.extname && hasExtname ? `${trimExtName(importPath)}${options.extname}` : item.root ? trimExtName(importPath) : importPath,\n isTypeOnly: item.isTypeOnly,\n })\n })\n .filter(Boolean)\n\n const exportNodes = file.exports\n .map((item) => {\n const exportPath = item.path\n\n const hasExtname = !!path.extname(exportPath)\n\n return createExport({\n name: item.name,\n path: options.extname && hasExtname ? `${trimExtName(item.path)}${options.extname}` : trimExtName(item.path),\n isTypeOnly: item.isTypeOnly,\n asAlias: item.asAlias,\n })\n })\n .filter(Boolean)\n\n return [file.banner, print([...importNodes, ...exportNodes]), source, file.footer].join('\\n')\n },\n})\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAMA,eAAsB,MAAM,QAAc,MAAc,UAAmB,EAAE,EAA+B;AAC1G,KAAI,KAAK,MAAM,KAAK,GAClB;AAEF,iCACE;EACE,MAAM,OAAO,QAAc,QAAc,EAAE,aAAsB;AAC/D,OAAI;IACF,MAAM,aAAa,MAAMA,iBAAG,gCAAiBC,OAAK,EAAE,EAClD,UAAU,SACX,CAAC;AACF,iEAAI,WAAY,UAAU,uDAAKC,OAAM,UAAU,EAC7C;YAEK,MAAM;AAIf,SAAMF,iBAAG,kCAAmBC,OAAK,EAAEC,QAAM,EAAE,UAAU,SAAS,CAAC;AAE/D,OAAI,QAAQ;IACV,MAAM,YAAY,MAAMF,iBAAG,gCAAiBC,OAAK,EAAE,EACjD,UAAU,SACX,CAAC;AAEF,+DAAI,UAAW,UAAU,uDAAKC,OAAM,UAAU,EAC5C,OAAM,IAAI,MAAM,2BAA2BD,OAAK,WAAWC,OAAK,OAAO,MAAMA,OAAK,YAAY,UAAU,OAAO,MAAM,UAAU,IAAI;AAGrI,WAAO;;AAGT,UAAOA;;EAET,KAAK,OAAO,QAAc,QAAc,EAAE,aAAsB;AAC9D,OAAI;AACF,UAAM,IAAI,6BAAcD,OAAK,EAAEC,OAAK;AAEpC,QAAI,QAAQ;KAEV,MAAM,YAAY,MADL,IAAI,4BAAaD,OAAK,CAAC,CACP,MAAM;AAEnC,gEAAI,UAAW,UAAU,uDAAKC,OAAM,UAAU,EAC5C,OAAM,IAAI,MAAM,2BAA2BD,OAAK,WAAWA,OAAK,OAAO,MAAMA,OAAK,YAAY,UAAU,OAAO,MAAM,UAAU,IAAI;AAGrI,YAAO;;AAGT,WAAOC;YACA,GAAG;AACV,YAAQ,MAAM,EAAE;;;EAGrB,EACD,OACD,CAACD,QAAM,KAAK,MAAM,EAAE,QAAQ;;AAuE/B,SAAS,MAAM,QAAc,WAAwC,SAAS;CAC5E,MAAM,gBAAgB,YAAY,KAAKA,OAAK;CAC5C,MAAM,0CAA2BA,OAAK;AAEtC,KAAI,CAAC,SAAS,MAAM,CAAC,SAAS,SAAS,IAAI,CAAC,cAE1C,QAAO,eAAe,WAAW,OAAO,IAAI,CAAC,QAAQ,OAAO,GAAG;AAIjE,QAAO,eAAe,WAAW,OAAO,IAAI,CAAC,QAAQ,OAAO,GAAG;;AAGjE,SAAgB,YAAY,MAAsB;AAChD,QAAO,KAAK,QAAQ,aAAa,GAAG;;AAGtC,SAAgB,gBAAgB,SAAyB,UAA0B,WAAwC,SAAiB;AAC1I,KAAI,CAAC,WAAW,CAAC,SACf,OAAM,IAAI,MAAM,uEAAuE,WAAW,GAAG,GAAG,YAAY,KAAK;CAO3H,MAAM,cAAc,8BAJU,SAAS,SAAS,EAIR,SAAS;AAEjD,KAAI,YAAY,WAAW,MAAM,CAC/B,QAAO;AAGT,QAAO,KAAK;;;;;AChKd,MAAM,EAAE,YAAYE;;;;AAWpB,MAAM,kBAAkB,SAAiB,KAAK,QAAQ,SAAS,oBAAoB;;;;AAKnF,MAAM,mBAAmB,SAAiB,KAAK,QAAQ,wBAAwB,KAAK;;;;;;AAOpF,SAAgB,MAAM,WAA2B,EAAE,EAAE,EAAE,SAAS,IAAI,WAAW,aAAa,aAAaA,mBAAG,WAAW,QAAsB,EAAE,EAAU;CACvJ,MAAM,aAAaA,mBAAG,iBAAiB,UAAU,eAAe,OAAO,EAAEA,mBAAG,aAAa,QAAQ,MAAM,WAAW;CAElH,MAAM,UAAUA,mBAAG,cAAc;EAC/B,uBAAuB;EACvB,SAASA,mBAAG,YAAY;EACxB,gBAAgB;EAChB,eAAe;EAChB,CAAC;CAEF,IAAIC;AAEJ,KAAI,SAAS,SAAS,GAAG;EAEvB,MAAM,QAAQ,SAAS,OAAO,QAAQ,CAAC,MAAM,GAAG,MAAM;;qBAAC,EAAE,8CAAO,gBAAM,EAAE,8CAAO;IAAG;AAClF,WAAS,QAAQ,UAAUD,mBAAG,WAAW,WAAW,QAAQ,gBAAgB,MAAM,EAAE,WAAW;OAG/F,UAAS,QAAQ,UAAU,WAAW;AAGxC,QAAO,gBAAgB,OAAO,CAAC,QAAQ,SAAS,KAAK;;AAGvD,SAAgB,aAAa,EAC3B,MACA,cACA,MACA,aAAa,OACb,cAAc,SAOb;CACD,MAAM,cAAc,OAAO,gBAAgB,MAAME,OAAK,GAAGA;AAEzD,KAAI,CAAC,MAAM,QAAQ,KAAK,EAAE;EACxB,IAAIC,qBAAgD,QAAQ,iBAAiB,KAAK;EAClF,IAAIC;AAEJ,MAAI,aAAa;AACf,wBAAqB;AACrB,gBAAa,QAAQ,sBAAsB,QAAQ,iBAAiB,KAAK,CAAC;;AAG5E,SAAO,QAAQ,wBACb,QACA,QAAQ,mBAAmB,YAAY,oBAAoB,WAAW,EACtE,QAAQ,oBAAoB,YAAY,EACxC,OACD;;AAGH,QAAO,QAAQ,wBACb,QACA,QAAQ,mBACN,YACA,QACA,QAAQ,mBACN,KAAK,KAAK,SAAS;AACjB,MAAI,OAAO,SAAS,UAAU;GAC5B,MAAM,MAAM;AACZ,OAAI,IAAI,KACN,QAAO,QAAQ,sBAAsB,OAAO,QAAQ,iBAAiB,IAAI,aAAa,EAAE,QAAQ,iBAAiB,IAAI,KAAK,CAAC;AAG7H,UAAO,QAAQ,sBAAsB,OAAO,QAAW,QAAQ,iBAAiB,IAAI,aAAa,CAAC;;AAGpG,SAAO,QAAQ,sBAAsB,OAAO,QAAW,QAAQ,iBAAiB,KAAK,CAAC;GACtF,CACH,CACF,EACD,QAAQ,oBAAoB,YAAY,EACxC,OACD;;AAGH,SAAgB,aAAa,EAC3B,cACA,SACA,aAAa,OACb,QAMC;AACD,KAAI,QAAQ,CAAC,MAAM,QAAQ,KAAK,IAAI,CAAC,QACnC,SAAQ,KAAK,qDAAqD,OAAO;AAG3E,KAAI,CAAC,MAAM,QAAQ,KAAK,EAAE;EACxB,MAAM,0DAAa,KAAM,MAAM,MAAM,IAAG,gDAAI,KAAM,MAAM,EAAE,KAAK;AAE/D,SAAO,QAAQ,wBACb,QACA,YACA,WAAW,aAAa,QAAQ,sBAAsB,QAAQ,iBAAiB,WAAW,CAAC,GAAG,QAC9F,QAAQ,oBAAoBF,OAAK,EACjC,OACD;;AAGH,QAAO,QAAQ,wBACb,QACA,YACA,QAAQ,mBACN,KAAK,KAAK,iBAAiB;AACzB,SAAO,QAAQ,sBAAsB,OAAO,QAAW,OAAO,iBAAiB,WAAW,QAAQ,iBAAiB,aAAa,GAAG,aAAa;GAChJ,CACH,EACD,QAAQ,oBAAoBA,OAAK,EACjC,OACD;;AAGH,MAAa,mBAAmBG,0CAAiB,EAC/C,MAAM,MAAM,MAAM,UAAU,EAAE,SAAS,OAAO,EAAE;CAC9C,MAAM,SAAS,KAAK,QAAQ,KAAK,SAAS,KAAK,MAAM,CAAC,KAAK,OAAO;CAElE,MAAM,cAAc,KAAK,QACtB,KAAK,SAAS;EACb,MAAM,aAAa,KAAK,OAAO,gBAAgB,KAAK,MAAM,KAAK,KAAK,GAAG,KAAK;EAC5E,MAAM,aAAa,CAAC,CAACH,kBAAK,QAAQ,WAAW;AAE7C,SAAO,aAAa;GAClB,MAAM,KAAK;GACX,MAAM,QAAQ,WAAW,aAAa,GAAG,YAAY,WAAW,GAAG,QAAQ,YAAY,KAAK,OAAO,YAAY,WAAW,GAAG;GAC7H,YAAY,KAAK;GAClB,CAAC;GACF,CACD,OAAO,QAAQ;CAElB,MAAM,cAAc,KAAK,QACtB,KAAK,SAAS;EACb,MAAM,aAAa,KAAK;EAExB,MAAM,aAAa,CAAC,CAACA,kBAAK,QAAQ,WAAW;AAE7C,SAAO,aAAa;GAClB,MAAM,KAAK;GACX,MAAM,QAAQ,WAAW,aAAa,GAAG,YAAY,KAAK,KAAK,GAAG,QAAQ,YAAY,YAAY,KAAK,KAAK;GAC5G,YAAY,KAAK;GACjB,SAAS,KAAK;GACf,CAAC;GACF,CACD,OAAO,QAAQ;AAElB,QAAO;EAAC,KAAK;EAAQ,MAAM,CAAC,GAAG,aAAa,GAAG,YAAY,CAAC;EAAE;EAAQ,KAAK;EAAO,CAAC,KAAK,KAAK;GAEhG,CAAC"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@kubb/fabric-core",
3
- "version": "0.1.0",
3
+ "version": "0.1.1",
4
4
  "description": "Core functionality for Kubb's plugin-based code generation system, providing the foundation for transforming OpenAPI specifications.",
5
5
  "keywords": [
6
6
  "typescript",
@@ -26,6 +26,10 @@
26
26
  "import": "./dist/index.js",
27
27
  "require": "./dist/index.cjs"
28
28
  },
29
+ "./parsers/default": {
30
+ "import": "./dist/parsers/default.js",
31
+ "require": "./dist/parsers/default.cjs"
32
+ },
29
33
  "./parsers/tsx": {
30
34
  "import": "./dist/parsers/tsx.js",
31
35
  "require": "./dist/parsers/tsx.cjs"
@@ -51,6 +55,9 @@
51
55
  "parsers/tsx": [
52
56
  "./dist/parsers/tsx.d.ts"
53
57
  ],
58
+ "parsers/default": [
59
+ "./dist/parsers/default.d.ts"
60
+ ],
54
61
  "types": [
55
62
  "./dist/types.d.ts"
56
63
  ]
@@ -1,25 +1,11 @@
1
- import pLimit from 'p-limit'
2
-
3
1
  import type * as KubbFile from './KubbFile.ts'
4
- import { parseFile } from './parsers/parser.ts'
5
2
  import { Cache } from './utils/Cache.ts'
6
- import { trimExtName, write } from './fs.ts'
7
- import { createHash } from 'node:crypto'
8
- import path from 'node:path'
3
+ import { trimExtName } from './fs.ts'
9
4
  import { orderBy } from 'natural-orderby'
10
- import { isDeepEqual, uniqueBy } from 'remeda'
11
-
12
- type WriteFilesProps = {
13
- extension?: Record<KubbFile.Extname, KubbFile.Extname | ''>
14
- dryRun?: boolean
15
- }
5
+ import { createFile } from './createFile.ts'
6
+ import { FileProcessor, type ProcessFilesProps } from './FileProcessor.ts'
16
7
 
17
- function hashObject(obj: Record<string, unknown>): string {
18
- const str = JSON.stringify(obj, Object.keys(obj).sort())
19
- return createHash('sha256').update(str).digest('hex')
20
- }
21
-
22
- export function mergeFile<TMeta extends object = object>(a: KubbFile.File<TMeta>, b: KubbFile.File<TMeta>): KubbFile.File<TMeta> {
8
+ function mergeFile<TMeta extends object = object>(a: KubbFile.File<TMeta>, b: KubbFile.File<TMeta>): KubbFile.File<TMeta> {
23
9
  return {
24
10
  ...a,
25
11
  sources: [...(a.sources || []), ...(b.sources || [])],
@@ -28,165 +14,9 @@ export function mergeFile<TMeta extends object = object>(a: KubbFile.File<TMeta>
28
14
  }
29
15
  }
30
16
 
31
- export function combineSources(sources: Array<KubbFile.Source>): Array<KubbFile.Source> {
32
- return uniqueBy(sources, (obj) => [obj.name, obj.isExportable, obj.isTypeOnly] as const)
33
- }
34
-
35
- export function combineExports(exports: Array<KubbFile.Export>): Array<KubbFile.Export> {
36
- return orderBy(exports, [
37
- (v) => !!Array.isArray(v.name),
38
- (v) => !v.isTypeOnly,
39
- (v) => v.path,
40
- (v) => !!v.name,
41
- (v) => (Array.isArray(v.name) ? orderBy(v.name) : v.name),
42
- ]).reduce(
43
- (prev, curr) => {
44
- const name = curr.name
45
- const prevByPath = prev.findLast((imp) => imp.path === curr.path)
46
- const prevByPathAndIsTypeOnly = prev.findLast((imp) => imp.path === curr.path && isDeepEqual(imp.name, name) && imp.isTypeOnly)
47
-
48
- if (prevByPathAndIsTypeOnly) {
49
- // we already have an export that has the same path but uses `isTypeOnly` (export type ...)
50
- return prev
51
- }
52
-
53
- const uniquePrev = prev.findLast(
54
- (imp) => imp.path === curr.path && isDeepEqual(imp.name, name) && imp.isTypeOnly === curr.isTypeOnly && imp.asAlias === curr.asAlias,
55
- )
56
-
57
- // we already have an item that was unique enough or name field is empty or prev asAlias is set but current has no changes
58
- if (uniquePrev || (Array.isArray(name) && !name.length) || (prevByPath?.asAlias && !curr.asAlias)) {
59
- return prev
60
- }
61
-
62
- if (!prevByPath) {
63
- return [
64
- ...prev,
65
- {
66
- ...curr,
67
- name: Array.isArray(name) ? [...new Set(name)] : name,
68
- },
69
- ]
70
- }
71
-
72
- // merge all names when prev and current both have the same isTypeOnly set
73
- if (prevByPath && Array.isArray(prevByPath.name) && Array.isArray(curr.name) && prevByPath.isTypeOnly === curr.isTypeOnly) {
74
- prevByPath.name = [...new Set([...prevByPath.name, ...curr.name])]
75
-
76
- return prev
77
- }
78
-
79
- return [...prev, curr]
80
- },
81
- [] as Array<KubbFile.Export>,
82
- )
83
- }
84
-
85
- export function combineImports(imports: Array<KubbFile.Import>, exports: Array<KubbFile.Export>, source?: string): Array<KubbFile.Import> {
86
- return orderBy(imports, [
87
- (v) => !!Array.isArray(v.name),
88
- (v) => !v.isTypeOnly,
89
- (v) => v.path,
90
- (v) => !!v.name,
91
- (v) => (Array.isArray(v.name) ? orderBy(v.name) : v.name),
92
- ]).reduce(
93
- (prev, curr) => {
94
- let name = Array.isArray(curr.name) ? [...new Set(curr.name)] : curr.name
95
-
96
- const hasImportInSource = (importName: string) => {
97
- if (!source) {
98
- return true
99
- }
100
-
101
- const checker = (name?: string) => {
102
- return name && source.includes(name)
103
- }
104
-
105
- return checker(importName) || exports.some(({ name }) => (Array.isArray(name) ? name.some(checker) : checker(name)))
106
- }
107
-
108
- if (curr.path === curr.root) {
109
- // root and path are the same file, remove the "./" import
110
- return prev
111
- }
112
-
113
- // merge all names and check if the importName is being used in the generated source and if not filter those imports out
114
- if (Array.isArray(name)) {
115
- name = name.filter((item) => (typeof item === 'string' ? hasImportInSource(item) : hasImportInSource(item.propertyName)))
116
- }
117
-
118
- const prevByPath = prev.findLast((imp) => imp.path === curr.path && imp.isTypeOnly === curr.isTypeOnly)
119
- const uniquePrev = prev.findLast((imp) => imp.path === curr.path && isDeepEqual(imp.name, name) && imp.isTypeOnly === curr.isTypeOnly)
120
- const prevByPathNameAndIsTypeOnly = prev.findLast((imp) => imp.path === curr.path && isDeepEqual(imp.name, name) && imp.isTypeOnly)
121
-
122
- if (prevByPathNameAndIsTypeOnly) {
123
- // we already have an export that has the same path but uses `isTypeOnly` (import type ...)
124
- return prev
125
- }
126
-
127
- // already unique enough or name is empty
128
- if (uniquePrev || (Array.isArray(name) && !name.length)) {
129
- return prev
130
- }
131
-
132
- // new item, append name
133
- if (!prevByPath) {
134
- return [
135
- ...prev,
136
- {
137
- ...curr,
138
- name,
139
- },
140
- ]
141
- }
142
-
143
- // merge all names when prev and current both have the same isTypeOnly set
144
- if (prevByPath && Array.isArray(prevByPath.name) && Array.isArray(name) && prevByPath.isTypeOnly === curr.isTypeOnly) {
145
- prevByPath.name = [...new Set([...prevByPath.name, ...name])]
146
-
147
- return prev
148
- }
149
-
150
- // no import was found in the source, ignore import
151
- if (!Array.isArray(name) && name && !hasImportInSource(name)) {
152
- return prev
153
- }
154
-
155
- return [...prev, curr]
156
- },
157
- [] as Array<KubbFile.Import>,
158
- )
159
- }
160
-
161
- /**
162
- * Helper to create a file with name and id set
163
- */
164
- export function createFile<TMeta extends object = object>(file: KubbFile.File<TMeta>): KubbFile.ResolvedFile<TMeta> {
165
- const extname = path.extname(file.baseName) as KubbFile.Extname
166
- if (!extname) {
167
- throw new Error(`No extname found for ${file.baseName}`)
168
- }
169
-
170
- const source = file.sources.map((item) => item.value).join('\n\n')
171
- const exports = file.exports?.length ? combineExports(file.exports) : []
172
- const imports = file.imports?.length && source ? combineImports(file.imports, exports, source) : []
173
- const sources = file.sources?.length ? combineSources(file.sources) : []
174
-
175
- return {
176
- ...file,
177
- id: hashObject({ path: file.path }),
178
- name: trimExtName(file.baseName),
179
- extname,
180
- imports: imports,
181
- exports: exports,
182
- sources: sources,
183
- meta: file.meta || ({} as TMeta),
184
- }
185
- }
186
-
187
17
  export class FileManager {
188
18
  #cache = new Cache<KubbFile.ResolvedFile>()
189
- #limit = pLimit(100)
19
+ #processor = new FileProcessor()
190
20
 
191
21
  constructor() {
192
22
  return this
@@ -237,7 +67,7 @@ export class FileManager {
237
67
  this.#cache.clear()
238
68
  }
239
69
 
240
- getFiles(): Array<KubbFile.ResolvedFile> {
70
+ get files(): Array<KubbFile.ResolvedFile> {
241
71
  const cachedKeys = this.#cache.keys()
242
72
 
243
73
  // order by path length and if file is a barrel file
@@ -248,23 +78,14 @@ export class FileManager {
248
78
  return files.filter(Boolean)
249
79
  }
250
80
 
251
- async processFiles({ dryRun, extension }: WriteFilesProps): Promise<Array<KubbFile.ResolvedFile>> {
252
- const files = this.getFiles()
253
-
254
- const promises = files.map((resolvedFile) => {
255
- return this.#limit(async () => {
256
- const extname = extension ? extension[resolvedFile.extname] || undefined : resolvedFile.extname
81
+ get processor() {
82
+ const files = this.files
83
+ const processor = this.#processor
257
84
 
258
- if (!dryRun) {
259
- const source = await parseFile(resolvedFile, { extname })
260
-
261
- await write(resolvedFile.path, source, { sanity: false })
262
- }
263
- })
264
- })
265
-
266
- await Promise.all(promises)
267
-
268
- return files
85
+ return {
86
+ async run(options: ProcessFilesProps) {
87
+ return processor.run(files, options)
88
+ },
89
+ }
269
90
  }
270
91
  }
@@ -0,0 +1,86 @@
1
+ import type * as KubbFile from './KubbFile.ts'
2
+ import { EventEmitter } from './utils/EventEmitter.ts'
3
+ import { write } from './fs.ts'
4
+ import pLimit from 'p-limit'
5
+ import type { Parser } from './parsers/types.ts'
6
+ import { typeScriptParser } from './parsers/typescript.ts'
7
+ import { tsxParser } from './parsers/tsx.ts'
8
+ import { defaultParser } from './parsers/default.ts'
9
+
10
+ type FileProcessorEvents = {
11
+ start: [{ files: KubbFile.ResolvedFile[] }]
12
+ finish: [{ files: KubbFile.ResolvedFile[] }]
13
+ 'file:start': [{ file: KubbFile.ResolvedFile }]
14
+ 'file:finish': [{ file: KubbFile.ResolvedFile }]
15
+ }
16
+
17
+ export type ProcessFilesProps = {
18
+ extension?: Record<KubbFile.Extname, KubbFile.Extname | ''>
19
+ dryRun?: boolean
20
+ }
21
+
22
+ type GetSourceOptions = {
23
+ extname?: KubbFile.Extname
24
+ }
25
+
26
+ async function getParser<TMeta extends object = object>(extname: KubbFile.Extname | undefined): Promise<Parser<TMeta>> {
27
+ const parsers: Record<KubbFile.Extname, Parser<any>> = {
28
+ '.ts': typeScriptParser,
29
+ '.js': typeScriptParser,
30
+ '.jsx': tsxParser,
31
+ '.tsx': tsxParser,
32
+ '.json': defaultParser,
33
+ }
34
+
35
+ if (!extname) {
36
+ return defaultParser
37
+ }
38
+
39
+ const parser = parsers[extname]
40
+
41
+ if (!parser) {
42
+ console.warn(`[parser] No parser found for ${extname}, default parser will be used`)
43
+ }
44
+
45
+ return parser || defaultParser
46
+ }
47
+
48
+ export async function parseFile(file: KubbFile.ResolvedFile, { extname }: GetSourceOptions = {}): Promise<string> {
49
+ const parser = await getParser(file.extname)
50
+
51
+ return parser.print(file, { extname })
52
+ }
53
+
54
+ export class FileProcessor extends EventEmitter<FileProcessorEvents> {
55
+ #limit = pLimit(100)
56
+
57
+ constructor(maxListener = 1000) {
58
+ super(maxListener)
59
+ return this
60
+ }
61
+
62
+ async run(files: Array<KubbFile.ResolvedFile>, { dryRun, extension }: ProcessFilesProps): Promise<KubbFile.ResolvedFile[]> {
63
+ this.emit('start', { files })
64
+
65
+ const promises = files.map((resolvedFile) =>
66
+ this.#limit(async () => {
67
+ const extname = extension?.[resolvedFile.extname] || undefined
68
+
69
+ this.emit('file:start', { file: resolvedFile })
70
+
71
+ if (!dryRun) {
72
+ const source = await parseFile(resolvedFile, { extname })
73
+ await write(resolvedFile.path, source, { sanity: false })
74
+ }
75
+
76
+ this.emit('file:finish', { file: resolvedFile })
77
+ }),
78
+ )
79
+
80
+ await Promise.all(promises)
81
+
82
+ this.emit('finish', { files })
83
+
84
+ return files
85
+ }
86
+ }
@@ -0,0 +1,167 @@
1
+ import type * as KubbFile from './KubbFile.ts'
2
+ import { trimExtName } from './fs.ts'
3
+ import { createHash } from 'node:crypto'
4
+ import path from 'node:path'
5
+ import { isDeepEqual, uniqueBy } from 'remeda'
6
+ import { orderBy } from 'natural-orderby'
7
+
8
+ function hashObject(obj: Record<string, unknown>): string {
9
+ const str = JSON.stringify(obj, Object.keys(obj).sort())
10
+ return createHash('sha256').update(str).digest('hex')
11
+ }
12
+
13
+ export function combineSources(sources: Array<KubbFile.Source>): Array<KubbFile.Source> {
14
+ return uniqueBy(sources, (obj) => [obj.name, obj.isExportable, obj.isTypeOnly] as const)
15
+ }
16
+
17
+ export function combineExports(exports: Array<KubbFile.Export>): Array<KubbFile.Export> {
18
+ return orderBy(exports, [
19
+ (v) => !!Array.isArray(v.name),
20
+ (v) => !v.isTypeOnly,
21
+ (v) => v.path,
22
+ (v) => !!v.name,
23
+ (v) => (Array.isArray(v.name) ? orderBy(v.name) : v.name),
24
+ ]).reduce(
25
+ (prev, curr) => {
26
+ const name = curr.name
27
+ const prevByPath = prev.findLast((imp) => imp.path === curr.path)
28
+ const prevByPathAndIsTypeOnly = prev.findLast((imp) => imp.path === curr.path && isDeepEqual(imp.name, name) && imp.isTypeOnly)
29
+
30
+ if (prevByPathAndIsTypeOnly) {
31
+ // we already have an export that has the same path but uses `isTypeOnly` (export type ...)
32
+ return prev
33
+ }
34
+
35
+ const uniquePrev = prev.findLast(
36
+ (imp) => imp.path === curr.path && isDeepEqual(imp.name, name) && imp.isTypeOnly === curr.isTypeOnly && imp.asAlias === curr.asAlias,
37
+ )
38
+
39
+ // we already have an item that was unique enough or name field is empty or prev asAlias is set but current has no changes
40
+ if (uniquePrev || (Array.isArray(name) && !name.length) || (prevByPath?.asAlias && !curr.asAlias)) {
41
+ return prev
42
+ }
43
+
44
+ if (!prevByPath) {
45
+ return [
46
+ ...prev,
47
+ {
48
+ ...curr,
49
+ name: Array.isArray(name) ? [...new Set(name)] : name,
50
+ },
51
+ ]
52
+ }
53
+
54
+ // merge all names when prev and current both have the same isTypeOnly set
55
+ if (prevByPath && Array.isArray(prevByPath.name) && Array.isArray(curr.name) && prevByPath.isTypeOnly === curr.isTypeOnly) {
56
+ prevByPath.name = [...new Set([...prevByPath.name, ...curr.name])]
57
+
58
+ return prev
59
+ }
60
+
61
+ return [...prev, curr]
62
+ },
63
+ [] as Array<KubbFile.Export>,
64
+ )
65
+ }
66
+
67
+ export function combineImports(imports: Array<KubbFile.Import>, exports: Array<KubbFile.Export>, source?: string): Array<KubbFile.Import> {
68
+ return orderBy(imports, [
69
+ (v) => !!Array.isArray(v.name),
70
+ (v) => !v.isTypeOnly,
71
+ (v) => v.path,
72
+ (v) => !!v.name,
73
+ (v) => (Array.isArray(v.name) ? orderBy(v.name) : v.name),
74
+ ]).reduce(
75
+ (prev, curr) => {
76
+ let name = Array.isArray(curr.name) ? [...new Set(curr.name)] : curr.name
77
+
78
+ const hasImportInSource = (importName: string) => {
79
+ if (!source) {
80
+ return true
81
+ }
82
+
83
+ const checker = (name?: string) => {
84
+ return name && source.includes(name)
85
+ }
86
+
87
+ return checker(importName) || exports.some(({ name }) => (Array.isArray(name) ? name.some(checker) : checker(name)))
88
+ }
89
+
90
+ if (curr.path === curr.root) {
91
+ // root and path are the same file, remove the "./" import
92
+ return prev
93
+ }
94
+
95
+ // merge all names and check if the importName is being used in the generated source and if not filter those imports out
96
+ if (Array.isArray(name)) {
97
+ name = name.filter((item) => (typeof item === 'string' ? hasImportInSource(item) : hasImportInSource(item.propertyName)))
98
+ }
99
+
100
+ const prevByPath = prev.findLast((imp) => imp.path === curr.path && imp.isTypeOnly === curr.isTypeOnly)
101
+ const uniquePrev = prev.findLast((imp) => imp.path === curr.path && isDeepEqual(imp.name, name) && imp.isTypeOnly === curr.isTypeOnly)
102
+ const prevByPathNameAndIsTypeOnly = prev.findLast((imp) => imp.path === curr.path && isDeepEqual(imp.name, name) && imp.isTypeOnly)
103
+
104
+ if (prevByPathNameAndIsTypeOnly) {
105
+ // we already have an export that has the same path but uses `isTypeOnly` (import type ...)
106
+ return prev
107
+ }
108
+
109
+ // already unique enough or name is empty
110
+ if (uniquePrev || (Array.isArray(name) && !name.length)) {
111
+ return prev
112
+ }
113
+
114
+ // new item, append name
115
+ if (!prevByPath) {
116
+ return [
117
+ ...prev,
118
+ {
119
+ ...curr,
120
+ name,
121
+ },
122
+ ]
123
+ }
124
+
125
+ // merge all names when prev and current both have the same isTypeOnly set
126
+ if (prevByPath && Array.isArray(prevByPath.name) && Array.isArray(name) && prevByPath.isTypeOnly === curr.isTypeOnly) {
127
+ prevByPath.name = [...new Set([...prevByPath.name, ...name])]
128
+
129
+ return prev
130
+ }
131
+
132
+ // no import was found in the source, ignore import
133
+ if (!Array.isArray(name) && name && !hasImportInSource(name)) {
134
+ return prev
135
+ }
136
+
137
+ return [...prev, curr]
138
+ },
139
+ [] as Array<KubbFile.Import>,
140
+ )
141
+ }
142
+
143
+ /**
144
+ * Helper to create a file with name and id set
145
+ */
146
+ export function createFile<TMeta extends object = object>(file: KubbFile.File<TMeta>): KubbFile.ResolvedFile<TMeta> {
147
+ const extname = path.extname(file.baseName) as KubbFile.Extname
148
+ if (!extname) {
149
+ throw new Error(`No extname found for ${file.baseName}`)
150
+ }
151
+
152
+ const source = file.sources.map((item) => item.value).join('\n\n')
153
+ const exports = file.exports?.length ? combineExports(file.exports) : []
154
+ const imports = file.imports?.length && source ? combineImports(file.imports, exports, source) : []
155
+ const sources = file.sources?.length ? combineSources(file.sources) : []
156
+
157
+ return {
158
+ ...file,
159
+ id: hashObject({ path: file.path }),
160
+ name: trimExtName(file.baseName),
161
+ extname,
162
+ imports: imports,
163
+ exports: exports,
164
+ sources: sources,
165
+ meta: file.meta || ({} as TMeta),
166
+ }
167
+ }
package/src/defineApp.ts CHANGED
@@ -40,7 +40,7 @@ export interface App {
40
40
  _component: Component
41
41
  render(): Promise<void>
42
42
  renderToString(): Promise<string>
43
- getFiles(): Promise<Array<KubbFile.ResolvedFile>>
43
+ files: Array<KubbFile.ResolvedFile>
44
44
  use<Options>(plugin: Plugin<Options>, options: NoInfer<Options>): this
45
45
  write(options?: WriteOptions): Promise<void>
46
46
  addFile(...files: Array<KubbFile.File>): Promise<void>
@@ -63,7 +63,7 @@ export function defineApp<THostElement, TContext extends AppContext>(instance: R
63
63
  context.fileManager.clear()
64
64
  },
65
65
  get files() {
66
- return fileManager.getFiles()
66
+ return fileManager.files
67
67
  },
68
68
  } as TContext
69
69
 
@@ -81,8 +81,8 @@ export function defineApp<THostElement, TContext extends AppContext>(instance: R
81
81
  async renderToString() {
82
82
  return renderToString()
83
83
  },
84
- async getFiles() {
85
- return fileManager.getFiles()
84
+ get files() {
85
+ return fileManager.files
86
86
  },
87
87
  waitUntilExit,
88
88
  addFile: context.addFile,
@@ -92,7 +92,7 @@ export function defineApp<THostElement, TContext extends AppContext>(instance: R
92
92
  dryRun: false,
93
93
  },
94
94
  ) {
95
- await fileManager.processFiles({
95
+ await fileManager.processor.run({
96
96
  extension: options.extension,
97
97
  dryRun: options.dryRun,
98
98
  })