@intlayer/chokidar 2.0.0 → 2.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/transpiler/declaration_file_to_dictionary/i18n_dictionary/buildI18nDictionary.cjs.map +1 -1
- package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.cjs +3 -3
- package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.cjs.map +1 -1
- package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.cjs.map +1 -1
- package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.d.ts +2 -2
- package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.cjs.map +1 -1
- package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.d.ts +2 -2
- package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.cjs +8 -1
- package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.cjs.map +1 -1
- package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.d.ts +3 -3
- package/dist/cjs/transpiler/dictionary_to_type/createType.cjs +38 -52
- package/dist/cjs/transpiler/dictionary_to_type/createType.cjs.map +1 -1
- package/dist/cjs/transpiler/dictionary_to_type/createType.d.ts +3 -3
- package/dist/esm/transpiler/declaration_file_to_dictionary/i18n_dictionary/buildI18nDictionary.mjs.map +1 -1
- package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.mjs +3 -3
- package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.mjs.map +1 -1
- package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.d.mts +2 -2
- package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.mjs.map +1 -1
- package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.d.mts +2 -2
- package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.mjs.map +1 -1
- package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.d.mts +3 -3
- package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.mjs +8 -1
- package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.mjs.map +1 -1
- package/dist/esm/transpiler/dictionary_to_type/createType.d.mts +3 -3
- package/dist/esm/transpiler/dictionary_to_type/createType.mjs +38 -52
- package/dist/esm/transpiler/dictionary_to_type/createType.mjs.map +1 -1
- package/package.json +4 -6
- package/src/chokidar/index.ts +0 -1
- package/src/chokidar/watcher.ts +0 -75
- package/src/transpiler/declaration_file_to_dictionary/i18n_dictionary/buildI18nDictionary.ts +0 -95
- package/src/transpiler/declaration_file_to_dictionary/i18n_dictionary/convertContentDeclarationInto18nDictionaries.ts +0 -91
- package/src/transpiler/declaration_file_to_dictionary/i18n_dictionary/convertPluralsValues.ts +0 -22
- package/src/transpiler/declaration_file_to_dictionary/i18n_dictionary/index.ts +0 -1
- package/src/transpiler/declaration_file_to_dictionary/index.ts +0 -21
- package/src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.ts +0 -70
- package/src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.ts +0 -60
- package/src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/index.ts +0 -3
- package/src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.ts +0 -114
- package/src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.ts +0 -65
- package/src/transpiler/dictionary_to_main/createDictionaryList.ts +0 -65
- package/src/transpiler/dictionary_to_main/index.ts +0 -1
- package/src/transpiler/dictionary_to_type/createModuleAugmentation.ts +0 -102
- package/src/transpiler/dictionary_to_type/createType.ts +0 -162
- package/src/transpiler/dictionary_to_type/index.ts +0 -2
- package/src/utils.ts +0 -26
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/i18n_dictionary/buildI18nDictionary.ts"],"sourcesContent":["import { mkdir, writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport type {
|
|
1
|
+
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/i18n_dictionary/buildI18nDictionary.ts"],"sourcesContent":["import { mkdir, writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport type { DeclarationContent } from '@intlayer/core';\nimport {\n processContentDeclaration,\n extractObjectsWithId,\n} from '../intlayer_dictionary/index';\nimport {\n type I18nDictionariesOutput,\n createI18nDictionaries,\n} from './convertContentDeclarationInto18nDictionaries';\n\nconst { content } = getConfiguration();\nconst { i18nDictionariesDir } = content;\n\ntype DictionariesDeclaration = Record<string, I18nDictionariesOutput>;\n\n/**\n * This function writes the dictionaries to the file system\n */\nconst writeDictionary = async (\n dictionariesDeclaration: DictionariesDeclaration\n) => {\n const resultDictionariesPaths: string[] = [];\n\n for (const [nameSpace, localContent] of Object.entries(\n dictionariesDeclaration\n )) {\n for await (const [locale, content] of Object.entries(localContent)) {\n const contentString = JSON.stringify(content);\n\n const outputFileName = `${nameSpace}.json`;\n const resultDirPath = resolve(i18nDictionariesDir, locale);\n const resultFilePath = resolve(resultDirPath, outputFileName);\n\n // Create the dictionaries folder if it doesn't exist\n await mkdir(resultDirPath, { recursive: true });\n\n // Create the json file\n await writeFile(resultFilePath, contentString, 'utf8').catch((err) => {\n console.error(`Error creating ${outputFileName}:`, err);\n });\n\n resultDictionariesPaths.push(resultFilePath);\n }\n }\n\n return resultDictionariesPaths;\n};\n\n/**\n * This function transpile content declaration to i18n dictionaries\n */\nexport const buildI18nDictionary = async (\n contentDeclarationsPaths: string[] | string\n) => {\n const resultDictionariesPaths: string[] = [];\n\n if (typeof contentDeclarationsPaths === 'string') {\n contentDeclarationsPaths = [contentDeclarationsPaths];\n }\n\n for await (const contentDeclarationPath of contentDeclarationsPaths) {\n const result = await processContentDeclaration(contentDeclarationPath);\n\n if (!result) {\n continue;\n }\n\n const nestedContent: DeclarationContent[] = extractObjectsWithId(result);\n\n // Create dictionaries for each nested content and format them\n const dictionariesDeclaration: DictionariesDeclaration =\n nestedContent.reduce((acc, content) => {\n const id: string = content.id;\n const i18Content = createI18nDictionaries(content);\n\n return {\n ...acc,\n [id]: i18Content,\n };\n }, {});\n\n // Write the dictionaries to the file system\n const dictionariesPaths: string[] = await writeDictionary(\n dictionariesDeclaration\n );\n\n // Add the paths to the result\n resultDictionariesPaths.push(...dictionariesPaths);\n }\n\n return resultDictionariesPaths;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAiC;AACjC,kBAAwB;AACxB,oBAAiC;AAEjC,iCAGO;AACP,0DAGO;AAEP,MAAM,EAAE,QAAQ,QAAI,gCAAiB;AACrC,MAAM,EAAE,oBAAoB,IAAI;AAOhC,MAAM,kBAAkB,OACtB,4BACG;AACH,QAAM,0BAAoC,CAAC;AAE3C,aAAW,CAAC,WAAW,YAAY,KAAK,OAAO;AAAA,IAC7C;AAAA,EACF,GAAG;AACD,qBAAiB,CAAC,QAAQA,QAAO,KAAK,OAAO,QAAQ,YAAY,GAAG;AAClE,YAAM,gBAAgB,KAAK,UAAUA,QAAO;AAE5C,YAAM,iBAAiB,GAAG,SAAS;AACnC,YAAM,oBAAgB,qBAAQ,qBAAqB,MAAM;AACzD,YAAM,qBAAiB,qBAAQ,eAAe,cAAc;AAG5D,gBAAM,uBAAM,eAAe,EAAE,WAAW,KAAK,CAAC;AAG9C,gBAAM,2BAAU,gBAAgB,eAAe,MAAM,EAAE,MAAM,CAAC,QAAQ;AACpE,gBAAQ,MAAM,kBAAkB,cAAc,KAAK,GAAG;AAAA,MACxD,CAAC;AAED,8BAAwB,KAAK,cAAc;AAAA,IAC7C;AAAA,EACF;AAEA,SAAO;AACT;AAKO,MAAM,sBAAsB,OACjC,6BACG;AACH,QAAM,0BAAoC,CAAC;AAE3C,MAAI,OAAO,6BAA6B,UAAU;AAChD,+BAA2B,CAAC,wBAAwB;AAAA,EACtD;AAEA,mBAAiB,0BAA0B,0BAA0B;AACnE,UAAM,SAAS,UAAM,sDAA0B,sBAAsB;AAErE,QAAI,CAAC,QAAQ;AACX;AAAA,IACF;AAEA,UAAM,oBAAsC,iDAAqB,MAAM;AAGvE,UAAM,0BACJ,cAAc,OAAO,CAAC,KAAKA,aAAY;AACrC,YAAM,KAAaA,SAAQ;AAC3B,YAAM,iBAAa,4EAAuBA,QAAO;AAEjD,aAAO;AAAA,QACL,GAAG;AAAA,QACH,CAAC,EAAE,GAAG;AAAA,MACR;AAAA,IACF,GAAG,CAAC,CAAC;AAGP,UAAM,oBAA8B,MAAM;AAAA,MACxC;AAAA,IACF;AAGA,4BAAwB,KAAK,GAAG,iBAAiB;AAAA,EACnD;AAEA,SAAO;AACT;","names":["content"]}
|
|
@@ -30,9 +30,9 @@ const { content } = (0, import_config.getConfiguration)();
|
|
|
30
30
|
const { dictionariesDir } = content;
|
|
31
31
|
const writeDictionary = async (dictionaries) => {
|
|
32
32
|
const resultDictionariesPaths = [];
|
|
33
|
-
for await (const
|
|
34
|
-
const contentString = JSON.stringify(
|
|
35
|
-
const id =
|
|
33
|
+
for await (const dictionaryContent of dictionaries) {
|
|
34
|
+
const contentString = JSON.stringify(dictionaryContent);
|
|
35
|
+
const id = dictionaryContent.id;
|
|
36
36
|
const outputFileName = `${id}.json`;
|
|
37
37
|
const resultFilePath = (0, import_path.resolve)(dictionariesDir, outputFileName);
|
|
38
38
|
await (0, import_promises.writeFile)(resultFilePath, contentString, "utf8").catch((err) => {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.ts"],"sourcesContent":["import { mkdir, writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport type {
|
|
1
|
+
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.ts"],"sourcesContent":["import { mkdir, writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport type { DeclarationContent } from '@intlayer/core';\nimport { extractObjectsWithId } from './extractNestedJSON';\nimport { processContentDeclaration } from './processContentDeclaration';\n\nconst { content } = getConfiguration();\nconst { dictionariesDir } = content;\n\nconst writeDictionary = async (dictionaries: DeclarationContent[]) => {\n const resultDictionariesPaths: string[] = [];\n\n for await (const dictionaryContent of dictionaries) {\n const contentString = JSON.stringify(dictionaryContent);\n\n const id = dictionaryContent.id;\n const outputFileName = `${id}.json`;\n const resultFilePath = resolve(dictionariesDir, outputFileName);\n\n // Create the json file\n await writeFile(resultFilePath, contentString, 'utf8').catch((err) => {\n console.error(`Error creating ${outputFileName}:`, err);\n });\n\n resultDictionariesPaths.push(resultFilePath);\n }\n\n return resultDictionariesPaths;\n};\n\n/**\n * This function transpile the bundled code to to make dictionaries as JSON files\n */\nexport const buildIntlayerDictionary = async (\n contentDeclarationsPaths: string[] | string\n) => {\n const resultDictionariesPaths: string[] = [];\n\n if (typeof contentDeclarationsPaths === 'string') {\n contentDeclarationsPaths = [contentDeclarationsPaths];\n }\n\n // Create the dictionaries folder if it doesn't exist\n await mkdir(resolve(dictionariesDir), { recursive: true });\n\n for await (const contentDeclarationPath of contentDeclarationsPaths) {\n const result = await processContentDeclaration(contentDeclarationPath);\n\n if (!result) {\n continue;\n }\n\n const nestedContent: DeclarationContent[] = extractObjectsWithId(result);\n\n const contentWithFilePath: DeclarationContent[] = nestedContent.map(\n (content) => ({\n ...content,\n filePath: contentDeclarationPath,\n })\n );\n\n const dictionariesPaths: string[] =\n await writeDictionary(contentWithFilePath);\n\n resultDictionariesPaths.push(...dictionariesPaths);\n }\n\n return resultDictionariesPaths;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAiC;AACjC,kBAAwB;AACxB,oBAAiC;AAEjC,+BAAqC;AACrC,uCAA0C;AAE1C,MAAM,EAAE,QAAQ,QAAI,gCAAiB;AACrC,MAAM,EAAE,gBAAgB,IAAI;AAE5B,MAAM,kBAAkB,OAAO,iBAAuC;AACpE,QAAM,0BAAoC,CAAC;AAE3C,mBAAiB,qBAAqB,cAAc;AAClD,UAAM,gBAAgB,KAAK,UAAU,iBAAiB;AAEtD,UAAM,KAAK,kBAAkB;AAC7B,UAAM,iBAAiB,GAAG,EAAE;AAC5B,UAAM,qBAAiB,qBAAQ,iBAAiB,cAAc;AAG9D,cAAM,2BAAU,gBAAgB,eAAe,MAAM,EAAE,MAAM,CAAC,QAAQ;AACpE,cAAQ,MAAM,kBAAkB,cAAc,KAAK,GAAG;AAAA,IACxD,CAAC;AAED,4BAAwB,KAAK,cAAc;AAAA,EAC7C;AAEA,SAAO;AACT;AAKO,MAAM,0BAA0B,OACrC,6BACG;AACH,QAAM,0BAAoC,CAAC;AAE3C,MAAI,OAAO,6BAA6B,UAAU;AAChD,+BAA2B,CAAC,wBAAwB;AAAA,EACtD;AAGA,YAAM,2BAAM,qBAAQ,eAAe,GAAG,EAAE,WAAW,KAAK,CAAC;AAEzD,mBAAiB,0BAA0B,0BAA0B;AACnE,UAAM,SAAS,UAAM,4DAA0B,sBAAsB;AAErE,QAAI,CAAC,QAAQ;AACX;AAAA,IACF;AAEA,UAAM,oBAAsC,+CAAqB,MAAM;AAEvE,UAAM,sBAA4C,cAAc;AAAA,MAC9D,CAACA,cAAa;AAAA,QACZ,GAAGA;AAAA,QACH,UAAU;AAAA,MACZ;AAAA,IACF;AAEA,UAAM,oBACJ,MAAM,gBAAgB,mBAAmB;AAE3C,4BAAwB,KAAK,GAAG,iBAAiB;AAAA,EACnD;AAEA,SAAO;AACT;","names":["content"]}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.ts"],"sourcesContent":["import type { Content,
|
|
1
|
+
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.ts"],"sourcesContent":["import type { Content, DeclarationContent } from '@intlayer/core';\n\n/**\n *\n * This function extracts all nested objects with an 'id' field from the input object and returns them as an array\n *\n * Example:\n *\n * const input = {\n * id: '1',\n * name: 'John Doe',\n * address: {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * state: 'IL'\n * },\n * };\n * const result = extractObjectsWithId(input);\n * console.log(result);\n *\n * Output:\n *\n * [{\n * id: '1',\n * name: 'John Doe',\n * address: {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * state: 'IL'\n * }\n * },\n * {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * state: 'IL'\n * }]\n *\n */\nexport const extractObjectsWithId = (\n input: DeclarationContent\n): DeclarationContent[] => {\n // Function to recursively search and extract nested objects with an 'id'\n const search = (obj: Content, results: DeclarationContent[]): void => {\n if (obj && typeof obj === 'object') {\n if (Object.hasOwn(obj, 'id')) {\n results.push(obj as DeclarationContent);\n }\n for (const key of Object.keys(obj)) {\n if (typeof obj[key] === 'object') {\n search(obj[key] as Content, results);\n }\n }\n }\n };\n\n const results: DeclarationContent[] = [];\n search(input, results);\n return results;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAyCO,MAAM,uBAAuB,CAClC,UACyB;AAEzB,QAAM,SAAS,CAAC,KAAcA,aAAwC;AACpE,QAAI,OAAO,OAAO,QAAQ,UAAU;AAClC,UAAI,OAAO,OAAO,KAAK,IAAI,GAAG;AAC5B,QAAAA,SAAQ,KAAK,GAAyB;AAAA,MACxC;AACA,iBAAW,OAAO,OAAO,KAAK,GAAG,GAAG;AAClC,YAAI,OAAO,IAAI,GAAG,MAAM,UAAU;AAChC,iBAAO,IAAI,GAAG,GAAcA,QAAO;AAAA,QACrC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,UAAgC,CAAC;AACvC,SAAO,OAAO,OAAO;AACrB,SAAO;AACT;","names":["results"]}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { DeclarationContent } from '@intlayer/core';
|
|
2
2
|
|
|
3
3
|
/**
|
|
4
4
|
*
|
|
@@ -39,6 +39,6 @@ import { ContentModule } from '@intlayer/core';
|
|
|
39
39
|
* }]
|
|
40
40
|
*
|
|
41
41
|
*/
|
|
42
|
-
declare const extractObjectsWithId: (input:
|
|
42
|
+
declare const extractObjectsWithId: (input: DeclarationContent) => DeclarationContent[];
|
|
43
43
|
|
|
44
44
|
export { extractObjectsWithId };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-var-requires */\nimport { createRequire } from 'module';\nimport { type Context, runInNewContext } from 'vm';\nimport type {
|
|
1
|
+
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-var-requires */\nimport { createRequire } from 'module';\nimport { type Context, runInNewContext } from 'vm';\nimport type { DeclarationContent } from '@intlayer/core';\nimport { type BuildOptions, buildSync, type BuildResult } from 'esbuild';\n\nconst isESModule = typeof import.meta.url === 'string';\n\nconst sandboxContext: Context = {\n exports: {\n default: {},\n },\n module: {\n exports: {},\n },\n console,\n require: isESModule ? createRequire(import.meta.url) : require,\n};\n\nconst transformationOption: BuildOptions = {\n loader: {\n '.js': 'js',\n '.jsx': 'jsx',\n '.mjs': 'js',\n '.ts': 'ts',\n '.tsx': 'tsx',\n '.cjs': 'js',\n '.json': 'json',\n },\n format: 'cjs', // Output format as commonjs\n target: 'es2017',\n packages: 'external',\n write: false,\n bundle: true,\n};\n\nconst filterValidContentDeclaration = (\n contentDeclaration: DeclarationContent\n): DeclarationContent => {\n // @TODO Implement filtering of valid content declaration\n return contentDeclaration;\n};\n\n/**\n * Load the content declaration from the given path\n *\n * Accepts JSON, JS, MJS and TS files as configuration\n */\nexport const loadContentDeclaration = (\n contentDeclarationFilePath: string\n): DeclarationContent | undefined => {\n let contentDeclaration: DeclarationContent | undefined = undefined;\n\n const fileExtension = contentDeclarationFilePath.split('.').pop() ?? '';\n\n try {\n if (fileExtension === 'json') {\n // Assume JSON\n return require(contentDeclarationFilePath);\n }\n\n // Rest is JS, MJS or TS\n\n const moduleResult: BuildResult = buildSync({\n entryPoints: [contentDeclarationFilePath],\n\n ...transformationOption,\n });\n\n const moduleResultString = moduleResult.outputFiles?.[0].text;\n\n if (!moduleResultString) {\n console.error('Configuration file could not be loaded.');\n return undefined;\n }\n\n runInNewContext(moduleResultString, sandboxContext);\n\n if (\n sandboxContext.exports.default &&\n Object.keys(sandboxContext.exports.default).length > 0\n ) {\n // ES Module\n contentDeclaration = sandboxContext.exports.default;\n } else if (\n sandboxContext.module.exports.defaults &&\n Object.keys(sandboxContext.module.exports.defaults).length > 0\n ) {\n // CommonJS\n contentDeclaration = sandboxContext.module.exports.default;\n } else if (\n sandboxContext.module.exports.default &&\n Object.keys(sandboxContext.module.exports.default).length > 0\n ) {\n // ES Module\n contentDeclaration = sandboxContext.module.exports.default;\n } else if (\n sandboxContext.module.exports &&\n Object.keys(sandboxContext.module.exports).length > 0\n ) {\n // Other\n contentDeclaration = sandboxContext.module.exports;\n }\n\n if (typeof contentDeclaration === 'undefined') {\n console.error('Configuration file could not be loaded.');\n return undefined;\n }\n\n return filterValidContentDeclaration(contentDeclaration);\n } catch (error) {\n console.error('Error:', error);\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAA8B;AAC9B,gBAA8C;AAE9C,qBAA+D;AAJ/D;AAMA,MAAM,aAAa,OAAO,YAAY,QAAQ;AAE9C,MAAM,iBAA0B;AAAA,EAC9B,SAAS;AAAA,IACP,SAAS,CAAC;AAAA,EACZ;AAAA,EACA,QAAQ;AAAA,IACN,SAAS,CAAC;AAAA,EACZ;AAAA,EACA;AAAA,EACA,SAAS,iBAAa,6BAAc,YAAY,GAAG,IAAI;AACzD;AAEA,MAAM,uBAAqC;AAAA,EACzC,QAAQ;AAAA,IACN,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,EACX;AAAA,EACA,QAAQ;AAAA;AAAA,EACR,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,OAAO;AAAA,EACP,QAAQ;AACV;AAEA,MAAM,gCAAgC,CACpC,uBACuB;AAEvB,SAAO;AACT;AAOO,MAAM,yBAAyB,CACpC,+BACmC;AACnC,MAAI,qBAAqD;AAEzD,QAAM,gBAAgB,2BAA2B,MAAM,GAAG,EAAE,IAAI,KAAK;AAErE,MAAI;AACF,QAAI,kBAAkB,QAAQ;AAE5B,aAAO,QAAQ,0BAA0B;AAAA,IAC3C;AAIA,UAAM,mBAA4B,0BAAU;AAAA,MAC1C,aAAa,CAAC,0BAA0B;AAAA,MAExC,GAAG;AAAA,IACL,CAAC;AAED,UAAM,qBAAqB,aAAa,cAAc,CAAC,EAAE;AAEzD,QAAI,CAAC,oBAAoB;AACvB,cAAQ,MAAM,yCAAyC;AACvD,aAAO;AAAA,IACT;AAEA,mCAAgB,oBAAoB,cAAc;AAElD,QACE,eAAe,QAAQ,WACvB,OAAO,KAAK,eAAe,QAAQ,OAAO,EAAE,SAAS,GACrD;AAEA,2BAAqB,eAAe,QAAQ;AAAA,IAC9C,WACE,eAAe,OAAO,QAAQ,YAC9B,OAAO,KAAK,eAAe,OAAO,QAAQ,QAAQ,EAAE,SAAS,GAC7D;AAEA,2BAAqB,eAAe,OAAO,QAAQ;AAAA,IACrD,WACE,eAAe,OAAO,QAAQ,WAC9B,OAAO,KAAK,eAAe,OAAO,QAAQ,OAAO,EAAE,SAAS,GAC5D;AAEA,2BAAqB,eAAe,OAAO,QAAQ;AAAA,IACrD,WACE,eAAe,OAAO,WACtB,OAAO,KAAK,eAAe,OAAO,OAAO,EAAE,SAAS,GACpD;AAEA,2BAAqB,eAAe,OAAO;AAAA,IAC7C;AAEA,QAAI,OAAO,uBAAuB,aAAa;AAC7C,cAAQ,MAAM,yCAAyC;AACvD,aAAO;AAAA,IACT;AAEA,WAAO,8BAA8B,kBAAkB;AAAA,EACzD,SAAS,OAAO;AACd,YAAQ,MAAM,UAAU,KAAK;AAAA,EAC/B;AACF;","names":[]}
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { DeclarationContent } from '@intlayer/core';
|
|
2
2
|
|
|
3
3
|
/**
|
|
4
4
|
* Load the content declaration from the given path
|
|
5
5
|
*
|
|
6
6
|
* Accepts JSON, JS, MJS and TS files as configuration
|
|
7
7
|
*/
|
|
8
|
-
declare const loadContentDeclaration: (contentDeclarationFilePath: string) =>
|
|
8
|
+
declare const loadContentDeclaration: (contentDeclarationFilePath: string) => DeclarationContent | undefined;
|
|
9
9
|
|
|
10
10
|
export { loadContentDeclaration };
|
|
@@ -29,7 +29,14 @@ const processFunctionResults = async (entry) => {
|
|
|
29
29
|
const result = {};
|
|
30
30
|
for (const key of Object.keys(entry)) {
|
|
31
31
|
const field = entry?.[key];
|
|
32
|
-
|
|
32
|
+
const isArray = Array.isArray(field);
|
|
33
|
+
if (typeof field === "object" && isArray) {
|
|
34
|
+
result[key] = await Promise.all(
|
|
35
|
+
field.map(async (el) => {
|
|
36
|
+
return await processFunctionResults(el);
|
|
37
|
+
})
|
|
38
|
+
);
|
|
39
|
+
} else if (typeof field === "object") {
|
|
33
40
|
result[key] = await processFunctionResults(
|
|
34
41
|
field
|
|
35
42
|
);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.ts"],"sourcesContent":["import { resolve } from 'path';\nimport type {\n Content,\n
|
|
1
|
+
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.ts"],"sourcesContent":["import { resolve } from 'path';\nimport type {\n Content,\n DeclarationContent,\n FlatContent,\n FlatContentValue,\n} from '@intlayer/core';\nimport { loadContentDeclaration } from './loadContentDeclaration';\n\n/**\n * Function to replace function and async function fields with their results in the object\n */\nconst processFunctionResults = async (entry: Content): Promise<FlatContent> => {\n if (entry && typeof entry === 'object') {\n const promises: Promise<void>[] = [];\n const result: FlatContent = {};\n\n for (const key of Object.keys(entry)) {\n const field = entry?.[key];\n const isArray = Array.isArray(field);\n\n if (typeof field === 'object' && isArray) {\n result[key] = (await Promise.all(\n field.map(async (el) => {\n return await processFunctionResults(el as Content);\n })\n )) as unknown as FlatContentValue;\n } else if (typeof field === 'object') {\n result[key] = (await processFunctionResults(\n field as Content\n )) as FlatContentValue;\n } else if (typeof field === 'function') {\n // Wait for the function to resolve if it's an async function\n const promise = (async () => {\n // Execute the function and await the result if it's a Promise\n const value = await field();\n\n result[key] = value as FlatContentValue;\n })();\n promises.push(promise);\n } else {\n result[key] = field as FlatContentValue;\n }\n }\n\n // Wait for all async operations to complete\n await Promise.all(promises);\n\n return result;\n }\n\n return entry;\n};\n\n/**\n * Function to load, process the module and return the Intlayer DeclarationContent from the module file\n */\nexport const processContentDeclaration = async (file: string) => {\n try {\n const functionPath = resolve(file);\n const entry = loadContentDeclaration(functionPath);\n\n if (!entry) {\n console.error('No entry found in module:', functionPath);\n return;\n }\n\n return (await processFunctionResults(entry)) as DeclarationContent;\n } catch (error) {\n console.error('Error processing module:', error);\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAOxB,oCAAuC;AAKvC,MAAM,yBAAyB,OAAO,UAAyC;AAC7E,MAAI,SAAS,OAAO,UAAU,UAAU;AACtC,UAAM,WAA4B,CAAC;AACnC,UAAM,SAAsB,CAAC;AAE7B,eAAW,OAAO,OAAO,KAAK,KAAK,GAAG;AACpC,YAAM,QAAQ,QAAQ,GAAG;AACzB,YAAM,UAAU,MAAM,QAAQ,KAAK;AAEnC,UAAI,OAAO,UAAU,YAAY,SAAS;AACxC,eAAO,GAAG,IAAK,MAAM,QAAQ;AAAA,UAC3B,MAAM,IAAI,OAAO,OAAO;AACtB,mBAAO,MAAM,uBAAuB,EAAa;AAAA,UACnD,CAAC;AAAA,QACH;AAAA,MACF,WAAW,OAAO,UAAU,UAAU;AACpC,eAAO,GAAG,IAAK,MAAM;AAAA,UACnB;AAAA,QACF;AAAA,MACF,WAAW,OAAO,UAAU,YAAY;AAEtC,cAAM,WAAW,YAAY;AAE3B,gBAAM,QAAQ,MAAM,MAAM;AAE1B,iBAAO,GAAG,IAAI;AAAA,QAChB,GAAG;AACH,iBAAS,KAAK,OAAO;AAAA,MACvB,OAAO;AACL,eAAO,GAAG,IAAI;AAAA,MAChB;AAAA,IACF;AAGA,UAAM,QAAQ,IAAI,QAAQ;AAE1B,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAKO,MAAM,4BAA4B,OAAO,SAAiB;AAC/D,MAAI;AACF,UAAM,mBAAe,qBAAQ,IAAI;AACjC,UAAM,YAAQ,sDAAuB,YAAY;AAEjD,QAAI,CAAC,OAAO;AACV,cAAQ,MAAM,6BAA6B,YAAY;AACvD;AAAA,IACF;AAEA,WAAQ,MAAM,uBAAuB,KAAK;AAAA,EAC5C,SAAS,OAAO;AACd,YAAQ,MAAM,4BAA4B,KAAK;AAAA,EACjD;AACF;","names":[]}
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { DeclarationContent } from '@intlayer/core';
|
|
2
2
|
|
|
3
3
|
/**
|
|
4
|
-
* Function to load, process the module and return the Intlayer
|
|
4
|
+
* Function to load, process the module and return the Intlayer DeclarationContent from the module file
|
|
5
5
|
*/
|
|
6
|
-
declare const processContentDeclaration: (file: string) => Promise<
|
|
6
|
+
declare const processContentDeclaration: (file: string) => Promise<DeclarationContent | undefined>;
|
|
7
7
|
|
|
8
8
|
export { processContentDeclaration };
|
|
@@ -30,17 +30,17 @@ var import_config = require("@intlayer/config");
|
|
|
30
30
|
var import_core = require("@intlayer/core");
|
|
31
31
|
var import_createModuleAugmentation = require('./createModuleAugmentation.cjs');
|
|
32
32
|
const import_meta = {};
|
|
33
|
-
const { content
|
|
33
|
+
const { content } = (0, import_config.getConfiguration)();
|
|
34
34
|
const { typesDir } = content;
|
|
35
35
|
const isESModule = typeof import_meta.url === "string";
|
|
36
36
|
const requireFunction = isESModule ? (0, import_module.createRequire)(import_meta.url) : require;
|
|
37
|
+
const getFirstValue = (obj) => Object.values(obj)[0];
|
|
37
38
|
const generateTypeScriptType = (obj) => {
|
|
38
39
|
let typeDefinition = ``;
|
|
39
40
|
const typeName = (0, import_createModuleAugmentation.getTypeName)(obj.id);
|
|
40
|
-
typeDefinition += `export type ${typeName} =
|
|
41
|
-
`;
|
|
41
|
+
typeDefinition += `export type ${typeName} = `;
|
|
42
42
|
typeDefinition += generateTypeScriptTypeContent(obj);
|
|
43
|
-
typeDefinition += "
|
|
43
|
+
typeDefinition += ";\n\n";
|
|
44
44
|
return typeDefinition;
|
|
45
45
|
};
|
|
46
46
|
const isReactNode = (node) => typeof node?.key !== "undefined" && typeof node?.props !== "undefined";
|
|
@@ -52,54 +52,40 @@ const generateTypeScriptTypeContent = (obj) => {
|
|
|
52
52
|
if (isReactNodeValue) {
|
|
53
53
|
return `JSX.Element`;
|
|
54
54
|
}
|
|
55
|
-
|
|
55
|
+
if (
|
|
56
|
+
// Check if the value is a typed node
|
|
57
|
+
obj.nodeType === import_core.NodeType.Translation
|
|
58
|
+
) {
|
|
59
|
+
const { nodeType, ...content2 } = obj;
|
|
60
|
+
const languageValue = getFirstValue(
|
|
61
|
+
content2
|
|
62
|
+
);
|
|
63
|
+
const tsType = generateTypeScriptTypeContent(languageValue);
|
|
64
|
+
return `${tsType}`;
|
|
65
|
+
} else if (
|
|
66
|
+
// Check if the value is a typed node
|
|
67
|
+
obj.nodeType === import_core.NodeType.Enumeration
|
|
68
|
+
) {
|
|
69
|
+
const { nodeType, ...content2 } = obj;
|
|
70
|
+
const quantifiedValue = getFirstValue(
|
|
71
|
+
content2
|
|
72
|
+
);
|
|
73
|
+
const tsType = generateTypeScriptTypeContent(quantifiedValue);
|
|
74
|
+
return `(quantity: number) => ${tsType}`;
|
|
75
|
+
} else if (Array.isArray(obj)) {
|
|
76
|
+
const arrayType = generateTypeScriptTypeContent(obj[0]);
|
|
77
|
+
return `${arrayType}[]`;
|
|
78
|
+
}
|
|
79
|
+
let typeDefinition = "{";
|
|
56
80
|
for (const [key, value] of Object.entries(obj)) {
|
|
57
|
-
const
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
const tsType = generateTypeScriptTypeContent(
|
|
63
|
-
value?.[internationalization.defaultLocale]
|
|
64
|
-
);
|
|
65
|
-
typeDefinition += ` ${key}: ${tsType},
|
|
66
|
-
`;
|
|
67
|
-
} else if (
|
|
68
|
-
// Check if the value is a typed node
|
|
69
|
-
typeof value === "object" && nodeType === import_core.NodeType.Enumeration
|
|
70
|
-
) {
|
|
71
|
-
const tsType = generateTypeScriptTypeContent(
|
|
72
|
-
value?.[internationalization.defaultLocale]
|
|
73
|
-
);
|
|
74
|
-
typeDefinition += ` ${key}: (quantity: number) => ${tsType},
|
|
75
|
-
`;
|
|
76
|
-
} else if (
|
|
77
|
-
// Check if the value is a nested object
|
|
78
|
-
typeof value === "object"
|
|
79
|
-
) {
|
|
80
|
-
const isArray = Array.isArray(value);
|
|
81
|
-
if (isArray) {
|
|
82
|
-
const arrayType = generateTypeScriptTypeContent(value);
|
|
83
|
-
typeDefinition += ` ${key}: ${arrayType}[],
|
|
84
|
-
`;
|
|
85
|
-
} else {
|
|
86
|
-
const nestedType = generateTypeScriptTypeContent(value);
|
|
87
|
-
typeDefinition += ` ${key}: {${nestedType}},
|
|
88
|
-
`;
|
|
89
|
-
}
|
|
90
|
-
} else if (
|
|
91
|
-
// Check if the value is an 'id'
|
|
92
|
-
typeof value === "string" && key === "id"
|
|
93
|
-
) {
|
|
94
|
-
const tsType = `"${value}"`;
|
|
95
|
-
typeDefinition += ` ${key}: ${tsType},
|
|
96
|
-
`;
|
|
97
|
-
} else {
|
|
98
|
-
const tsType = typeof value;
|
|
99
|
-
typeDefinition += ` ${key}: ${tsType},
|
|
100
|
-
`;
|
|
81
|
+
const isLast = Object.keys(obj).indexOf(key) === Object.keys(obj).length - 1;
|
|
82
|
+
const nestedType = generateTypeScriptTypeContent(value);
|
|
83
|
+
typeDefinition += `'${key}': ${nestedType}`;
|
|
84
|
+
if (!isLast) {
|
|
85
|
+
typeDefinition += ",";
|
|
101
86
|
}
|
|
102
87
|
}
|
|
88
|
+
typeDefinition += "}";
|
|
103
89
|
return typeDefinition;
|
|
104
90
|
};
|
|
105
91
|
const createTypes = (dictionariesPaths) => {
|
|
@@ -108,9 +94,9 @@ const createTypes = (dictionariesPaths) => {
|
|
|
108
94
|
(0, import_fs.mkdirSync)(typesDir, { recursive: true });
|
|
109
95
|
}
|
|
110
96
|
for (const dictionaryPath of dictionariesPaths) {
|
|
111
|
-
const
|
|
112
|
-
const dictionaryName =
|
|
113
|
-
const typeDefinition = generateTypeScriptType(
|
|
97
|
+
const dictionary = requireFunction(dictionaryPath);
|
|
98
|
+
const dictionaryName = dictionary.id;
|
|
99
|
+
const typeDefinition = generateTypeScriptType(dictionary);
|
|
114
100
|
const outputPath = (0, import_path.resolve)(typesDir, `${dictionaryName}.d.ts`);
|
|
115
101
|
(0, import_fs.writeFileSync)(outputPath, typeDefinition);
|
|
116
102
|
resultTypesPaths.push(outputPath);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../src/transpiler/dictionary_to_type/createType.ts"],"sourcesContent":["import { existsSync, mkdirSync, writeFileSync } from 'fs';\nimport { createRequire } from 'module';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport {\n NodeType,\n type
|
|
1
|
+
{"version":3,"sources":["../../../../src/transpiler/dictionary_to_type/createType.ts"],"sourcesContent":["import { existsSync, mkdirSync, writeFileSync } from 'fs';\nimport { createRequire } from 'module';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport {\n NodeType,\n type Dictionary,\n type DictionaryValue,\n type TypedNode,\n} from '@intlayer/core';\nimport { getTypeName } from './createModuleAugmentation';\n\nconst { content } = getConfiguration();\nconst { typesDir } = content;\n\nconst isESModule = typeof import.meta.url === 'string';\nconst requireFunction = isESModule ? createRequire(import.meta.url) : require;\n\nconst getFirstValue = (obj: Record<string, DictionaryValue>): DictionaryValue =>\n Object.values(obj)[0];\n\n/**\n *\n * This function generates a TypeScript type definition from a JSON object\n *\n * Example:\n *\n * const input = {\n * id: '1',\n * name: 'John Doe',\n * address: {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * }\n * };\n *\n * const result = generateTypeScriptType(input, 'RootObject');\n * console.log(result);\n *\n * Output:\n *\n * type RootObject = {\n * id: '1',\n * name: string,\n * address: {\n * id: '2',\n * street: string,\n * city: string,\n * },\n * };\n *\n */\nexport const generateTypeScriptType = (obj: Dictionary): string => {\n let typeDefinition = ``;\n\n const typeName = getTypeName(obj.id);\n\n typeDefinition += `export type ${typeName} = `;\n typeDefinition += generateTypeScriptTypeContent(obj as DictionaryValue);\n typeDefinition += ';\\n\\n';\n\n return typeDefinition;\n};\n\nconst isReactNode = (node: Record<string, unknown>): boolean =>\n typeof node?.key !== 'undefined' && typeof node?.props !== 'undefined';\n\n// eslint-disable-next-line sonarjs/cognitive-complexity\nexport const generateTypeScriptTypeContent = (obj: DictionaryValue): string => {\n if (typeof obj !== 'object' || obj === null) {\n return `${typeof obj}`;\n }\n\n const isReactNodeValue = isReactNode(obj as Record<string, unknown>);\n\n if (isReactNodeValue) {\n // ReactNode handling\n return `JSX.Element`;\n }\n\n if (\n // Check if the value is a typed node\n (obj as TypedNode).nodeType === NodeType.Translation\n ) {\n const { nodeType, ...content } = obj as TypedNode;\n\n const languageValue: DictionaryValue = getFirstValue(\n content as Record<string, DictionaryValue>\n );\n\n const tsType = generateTypeScriptTypeContent(languageValue);\n return `${tsType}`;\n } else if (\n // Check if the value is a typed node\n (obj as TypedNode).nodeType === NodeType.Enumeration\n ) {\n const { nodeType, ...content } = obj as TypedNode;\n\n const quantifiedValue: DictionaryValue = getFirstValue(\n content as Record<string, DictionaryValue>\n );\n\n const tsType = generateTypeScriptTypeContent(quantifiedValue);\n\n return `(quantity: number) => ${tsType}`;\n } else if (Array.isArray(obj)) {\n // Array handling (simplified, assumes non-empty arrays with uniform type)\n const arrayType = generateTypeScriptTypeContent(obj[0] as DictionaryValue);\n\n return `${arrayType}[]`;\n }\n\n let typeDefinition = '{';\n // Nested object, recurse\n for (const [key, value] of Object.entries(obj)) {\n const isLast =\n Object.keys(obj).indexOf(key) === Object.keys(obj).length - 1;\n\n const nestedType = generateTypeScriptTypeContent(value as DictionaryValue);\n\n typeDefinition += `'${key}': ${nestedType}`;\n\n if (!isLast) {\n typeDefinition += ',';\n }\n }\n typeDefinition += '}';\n return typeDefinition;\n};\n\n/**\n * This function generates a TypeScript type definition from a JSON object\n */\nexport const createTypes = (dictionariesPaths: string[]): string[] => {\n const resultTypesPaths: string[] = [];\n\n // Create type folders if they don't exist\n if (!existsSync(typesDir)) {\n mkdirSync(typesDir, { recursive: true });\n }\n\n for (const dictionaryPath of dictionariesPaths) {\n const dictionary: Dictionary = requireFunction(dictionaryPath);\n const dictionaryName: string = dictionary.id;\n const typeDefinition: string = generateTypeScriptType(dictionary);\n\n const outputPath: string = resolve(typesDir, `${dictionaryName}.d.ts`);\n\n writeFileSync(outputPath, typeDefinition);\n\n resultTypesPaths.push(outputPath);\n }\n\n return resultTypesPaths;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAqD;AACrD,oBAA8B;AAC9B,kBAAwB;AACxB,oBAAiC;AACjC,kBAKO;AACP,sCAA4B;AAV5B;AAYA,MAAM,EAAE,QAAQ,QAAI,gCAAiB;AACrC,MAAM,EAAE,SAAS,IAAI;AAErB,MAAM,aAAa,OAAO,YAAY,QAAQ;AAC9C,MAAM,kBAAkB,iBAAa,6BAAc,YAAY,GAAG,IAAI;AAEtE,MAAM,gBAAgB,CAAC,QACrB,OAAO,OAAO,GAAG,EAAE,CAAC;AAkCf,MAAM,yBAAyB,CAAC,QAA4B;AACjE,MAAI,iBAAiB;AAErB,QAAM,eAAW,6CAAY,IAAI,EAAE;AAEnC,oBAAkB,eAAe,QAAQ;AACzC,oBAAkB,8BAA8B,GAAsB;AACtE,oBAAkB;AAElB,SAAO;AACT;AAEA,MAAM,cAAc,CAAC,SACnB,OAAO,MAAM,QAAQ,eAAe,OAAO,MAAM,UAAU;AAGtD,MAAM,gCAAgC,CAAC,QAAiC;AAC7E,MAAI,OAAO,QAAQ,YAAY,QAAQ,MAAM;AAC3C,WAAO,GAAG,OAAO,GAAG;AAAA,EACtB;AAEA,QAAM,mBAAmB,YAAY,GAA8B;AAEnE,MAAI,kBAAkB;AAEpB,WAAO;AAAA,EACT;AAEA;AAAA;AAAA,IAEG,IAAkB,aAAa,qBAAS;AAAA,IACzC;AACA,UAAM,EAAE,UAAU,GAAGA,SAAQ,IAAI;AAEjC,UAAM,gBAAiC;AAAA,MACrCA;AAAA,IACF;AAEA,UAAM,SAAS,8BAA8B,aAAa;AAC1D,WAAO,GAAG,MAAM;AAAA,EAClB;AAAA;AAAA,IAEG,IAAkB,aAAa,qBAAS;AAAA,IACzC;AACA,UAAM,EAAE,UAAU,GAAGA,SAAQ,IAAI;AAEjC,UAAM,kBAAmC;AAAA,MACvCA;AAAA,IACF;AAEA,UAAM,SAAS,8BAA8B,eAAe;AAE5D,WAAO,yBAAyB,MAAM;AAAA,EACxC,WAAW,MAAM,QAAQ,GAAG,GAAG;AAE7B,UAAM,YAAY,8BAA8B,IAAI,CAAC,CAAoB;AAEzE,WAAO,GAAG,SAAS;AAAA,EACrB;AAEA,MAAI,iBAAiB;AAErB,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,GAAG,GAAG;AAC9C,UAAM,SACJ,OAAO,KAAK,GAAG,EAAE,QAAQ,GAAG,MAAM,OAAO,KAAK,GAAG,EAAE,SAAS;AAE9D,UAAM,aAAa,8BAA8B,KAAwB;AAEzE,sBAAkB,IAAI,GAAG,MAAM,UAAU;AAEzC,QAAI,CAAC,QAAQ;AACX,wBAAkB;AAAA,IACpB;AAAA,EACF;AACA,oBAAkB;AAClB,SAAO;AACT;AAKO,MAAM,cAAc,CAAC,sBAA0C;AACpE,QAAM,mBAA6B,CAAC;AAGpC,MAAI,KAAC,sBAAW,QAAQ,GAAG;AACzB,6BAAU,UAAU,EAAE,WAAW,KAAK,CAAC;AAAA,EACzC;AAEA,aAAW,kBAAkB,mBAAmB;AAC9C,UAAM,aAAyB,gBAAgB,cAAc;AAC7D,UAAM,iBAAyB,WAAW;AAC1C,UAAM,iBAAyB,uBAAuB,UAAU;AAEhE,UAAM,iBAAqB,qBAAQ,UAAU,GAAG,cAAc,OAAO;AAErE,iCAAc,YAAY,cAAc;AAExC,qBAAiB,KAAK,UAAU;AAAA,EAClC;AAEA,SAAO;AACT;","names":["content"]}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { Dictionary, DictionaryValue } from '@intlayer/core';
|
|
2
2
|
|
|
3
3
|
/**
|
|
4
4
|
*
|
|
@@ -32,8 +32,8 @@ import { ContentModule, Content } from '@intlayer/core';
|
|
|
32
32
|
* };
|
|
33
33
|
*
|
|
34
34
|
*/
|
|
35
|
-
declare const generateTypeScriptType: (obj:
|
|
36
|
-
declare const generateTypeScriptTypeContent: (obj:
|
|
35
|
+
declare const generateTypeScriptType: (obj: Dictionary) => string;
|
|
36
|
+
declare const generateTypeScriptTypeContent: (obj: DictionaryValue) => string;
|
|
37
37
|
/**
|
|
38
38
|
* This function generates a TypeScript type definition from a JSON object
|
|
39
39
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/i18n_dictionary/buildI18nDictionary.ts"],"sourcesContent":["import { mkdir, writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport type {
|
|
1
|
+
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/i18n_dictionary/buildI18nDictionary.ts"],"sourcesContent":["import { mkdir, writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport type { DeclarationContent } from '@intlayer/core';\nimport {\n processContentDeclaration,\n extractObjectsWithId,\n} from '../intlayer_dictionary/index';\nimport {\n type I18nDictionariesOutput,\n createI18nDictionaries,\n} from './convertContentDeclarationInto18nDictionaries';\n\nconst { content } = getConfiguration();\nconst { i18nDictionariesDir } = content;\n\ntype DictionariesDeclaration = Record<string, I18nDictionariesOutput>;\n\n/**\n * This function writes the dictionaries to the file system\n */\nconst writeDictionary = async (\n dictionariesDeclaration: DictionariesDeclaration\n) => {\n const resultDictionariesPaths: string[] = [];\n\n for (const [nameSpace, localContent] of Object.entries(\n dictionariesDeclaration\n )) {\n for await (const [locale, content] of Object.entries(localContent)) {\n const contentString = JSON.stringify(content);\n\n const outputFileName = `${nameSpace}.json`;\n const resultDirPath = resolve(i18nDictionariesDir, locale);\n const resultFilePath = resolve(resultDirPath, outputFileName);\n\n // Create the dictionaries folder if it doesn't exist\n await mkdir(resultDirPath, { recursive: true });\n\n // Create the json file\n await writeFile(resultFilePath, contentString, 'utf8').catch((err) => {\n console.error(`Error creating ${outputFileName}:`, err);\n });\n\n resultDictionariesPaths.push(resultFilePath);\n }\n }\n\n return resultDictionariesPaths;\n};\n\n/**\n * This function transpile content declaration to i18n dictionaries\n */\nexport const buildI18nDictionary = async (\n contentDeclarationsPaths: string[] | string\n) => {\n const resultDictionariesPaths: string[] = [];\n\n if (typeof contentDeclarationsPaths === 'string') {\n contentDeclarationsPaths = [contentDeclarationsPaths];\n }\n\n for await (const contentDeclarationPath of contentDeclarationsPaths) {\n const result = await processContentDeclaration(contentDeclarationPath);\n\n if (!result) {\n continue;\n }\n\n const nestedContent: DeclarationContent[] = extractObjectsWithId(result);\n\n // Create dictionaries for each nested content and format them\n const dictionariesDeclaration: DictionariesDeclaration =\n nestedContent.reduce((acc, content) => {\n const id: string = content.id;\n const i18Content = createI18nDictionaries(content);\n\n return {\n ...acc,\n [id]: i18Content,\n };\n }, {});\n\n // Write the dictionaries to the file system\n const dictionariesPaths: string[] = await writeDictionary(\n dictionariesDeclaration\n );\n\n // Add the paths to the result\n resultDictionariesPaths.push(...dictionariesPaths);\n }\n\n return resultDictionariesPaths;\n};\n"],"mappings":"AAAA,SAAS,OAAO,iBAAiB;AACjC,SAAS,eAAe;AACxB,SAAS,wBAAwB;AAEjC;AAAA,EACE;AAAA,EACA;AAAA,OACK;AACP;AAAA,EAEE;AAAA,OACK;AAEP,MAAM,EAAE,QAAQ,IAAI,iBAAiB;AACrC,MAAM,EAAE,oBAAoB,IAAI;AAOhC,MAAM,kBAAkB,OACtB,4BACG;AACH,QAAM,0BAAoC,CAAC;AAE3C,aAAW,CAAC,WAAW,YAAY,KAAK,OAAO;AAAA,IAC7C;AAAA,EACF,GAAG;AACD,qBAAiB,CAAC,QAAQA,QAAO,KAAK,OAAO,QAAQ,YAAY,GAAG;AAClE,YAAM,gBAAgB,KAAK,UAAUA,QAAO;AAE5C,YAAM,iBAAiB,GAAG,SAAS;AACnC,YAAM,gBAAgB,QAAQ,qBAAqB,MAAM;AACzD,YAAM,iBAAiB,QAAQ,eAAe,cAAc;AAG5D,YAAM,MAAM,eAAe,EAAE,WAAW,KAAK,CAAC;AAG9C,YAAM,UAAU,gBAAgB,eAAe,MAAM,EAAE,MAAM,CAAC,QAAQ;AACpE,gBAAQ,MAAM,kBAAkB,cAAc,KAAK,GAAG;AAAA,MACxD,CAAC;AAED,8BAAwB,KAAK,cAAc;AAAA,IAC7C;AAAA,EACF;AAEA,SAAO;AACT;AAKO,MAAM,sBAAsB,OACjC,6BACG;AACH,QAAM,0BAAoC,CAAC;AAE3C,MAAI,OAAO,6BAA6B,UAAU;AAChD,+BAA2B,CAAC,wBAAwB;AAAA,EACtD;AAEA,mBAAiB,0BAA0B,0BAA0B;AACnE,UAAM,SAAS,MAAM,0BAA0B,sBAAsB;AAErE,QAAI,CAAC,QAAQ;AACX;AAAA,IACF;AAEA,UAAM,gBAAsC,qBAAqB,MAAM;AAGvE,UAAM,0BACJ,cAAc,OAAO,CAAC,KAAKA,aAAY;AACrC,YAAM,KAAaA,SAAQ;AAC3B,YAAM,aAAa,uBAAuBA,QAAO;AAEjD,aAAO;AAAA,QACL,GAAG;AAAA,QACH,CAAC,EAAE,GAAG;AAAA,MACR;AAAA,IACF,GAAG,CAAC,CAAC;AAGP,UAAM,oBAA8B,MAAM;AAAA,MACxC;AAAA,IACF;AAGA,4BAAwB,KAAK,GAAG,iBAAiB;AAAA,EACnD;AAEA,SAAO;AACT;","names":["content"]}
|
|
@@ -7,9 +7,9 @@ const { content } = getConfiguration();
|
|
|
7
7
|
const { dictionariesDir } = content;
|
|
8
8
|
const writeDictionary = async (dictionaries) => {
|
|
9
9
|
const resultDictionariesPaths = [];
|
|
10
|
-
for await (const
|
|
11
|
-
const contentString = JSON.stringify(
|
|
12
|
-
const id =
|
|
10
|
+
for await (const dictionaryContent of dictionaries) {
|
|
11
|
+
const contentString = JSON.stringify(dictionaryContent);
|
|
12
|
+
const id = dictionaryContent.id;
|
|
13
13
|
const outputFileName = `${id}.json`;
|
|
14
14
|
const resultFilePath = resolve(dictionariesDir, outputFileName);
|
|
15
15
|
await writeFile(resultFilePath, contentString, "utf8").catch((err) => {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.ts"],"sourcesContent":["import { mkdir, writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport type {
|
|
1
|
+
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.ts"],"sourcesContent":["import { mkdir, writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport type { DeclarationContent } from '@intlayer/core';\nimport { extractObjectsWithId } from './extractNestedJSON';\nimport { processContentDeclaration } from './processContentDeclaration';\n\nconst { content } = getConfiguration();\nconst { dictionariesDir } = content;\n\nconst writeDictionary = async (dictionaries: DeclarationContent[]) => {\n const resultDictionariesPaths: string[] = [];\n\n for await (const dictionaryContent of dictionaries) {\n const contentString = JSON.stringify(dictionaryContent);\n\n const id = dictionaryContent.id;\n const outputFileName = `${id}.json`;\n const resultFilePath = resolve(dictionariesDir, outputFileName);\n\n // Create the json file\n await writeFile(resultFilePath, contentString, 'utf8').catch((err) => {\n console.error(`Error creating ${outputFileName}:`, err);\n });\n\n resultDictionariesPaths.push(resultFilePath);\n }\n\n return resultDictionariesPaths;\n};\n\n/**\n * This function transpile the bundled code to to make dictionaries as JSON files\n */\nexport const buildIntlayerDictionary = async (\n contentDeclarationsPaths: string[] | string\n) => {\n const resultDictionariesPaths: string[] = [];\n\n if (typeof contentDeclarationsPaths === 'string') {\n contentDeclarationsPaths = [contentDeclarationsPaths];\n }\n\n // Create the dictionaries folder if it doesn't exist\n await mkdir(resolve(dictionariesDir), { recursive: true });\n\n for await (const contentDeclarationPath of contentDeclarationsPaths) {\n const result = await processContentDeclaration(contentDeclarationPath);\n\n if (!result) {\n continue;\n }\n\n const nestedContent: DeclarationContent[] = extractObjectsWithId(result);\n\n const contentWithFilePath: DeclarationContent[] = nestedContent.map(\n (content) => ({\n ...content,\n filePath: contentDeclarationPath,\n })\n );\n\n const dictionariesPaths: string[] =\n await writeDictionary(contentWithFilePath);\n\n resultDictionariesPaths.push(...dictionariesPaths);\n }\n\n return resultDictionariesPaths;\n};\n"],"mappings":"AAAA,SAAS,OAAO,iBAAiB;AACjC,SAAS,eAAe;AACxB,SAAS,wBAAwB;AAEjC,SAAS,4BAA4B;AACrC,SAAS,iCAAiC;AAE1C,MAAM,EAAE,QAAQ,IAAI,iBAAiB;AACrC,MAAM,EAAE,gBAAgB,IAAI;AAE5B,MAAM,kBAAkB,OAAO,iBAAuC;AACpE,QAAM,0BAAoC,CAAC;AAE3C,mBAAiB,qBAAqB,cAAc;AAClD,UAAM,gBAAgB,KAAK,UAAU,iBAAiB;AAEtD,UAAM,KAAK,kBAAkB;AAC7B,UAAM,iBAAiB,GAAG,EAAE;AAC5B,UAAM,iBAAiB,QAAQ,iBAAiB,cAAc;AAG9D,UAAM,UAAU,gBAAgB,eAAe,MAAM,EAAE,MAAM,CAAC,QAAQ;AACpE,cAAQ,MAAM,kBAAkB,cAAc,KAAK,GAAG;AAAA,IACxD,CAAC;AAED,4BAAwB,KAAK,cAAc;AAAA,EAC7C;AAEA,SAAO;AACT;AAKO,MAAM,0BAA0B,OACrC,6BACG;AACH,QAAM,0BAAoC,CAAC;AAE3C,MAAI,OAAO,6BAA6B,UAAU;AAChD,+BAA2B,CAAC,wBAAwB;AAAA,EACtD;AAGA,QAAM,MAAM,QAAQ,eAAe,GAAG,EAAE,WAAW,KAAK,CAAC;AAEzD,mBAAiB,0BAA0B,0BAA0B;AACnE,UAAM,SAAS,MAAM,0BAA0B,sBAAsB;AAErE,QAAI,CAAC,QAAQ;AACX;AAAA,IACF;AAEA,UAAM,gBAAsC,qBAAqB,MAAM;AAEvE,UAAM,sBAA4C,cAAc;AAAA,MAC9D,CAACA,cAAa;AAAA,QACZ,GAAGA;AAAA,QACH,UAAU;AAAA,MACZ;AAAA,IACF;AAEA,UAAM,oBACJ,MAAM,gBAAgB,mBAAmB;AAE3C,4BAAwB,KAAK,GAAG,iBAAiB;AAAA,EACnD;AAEA,SAAO;AACT;","names":["content"]}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { DeclarationContent } from '@intlayer/core';
|
|
2
2
|
|
|
3
3
|
/**
|
|
4
4
|
*
|
|
@@ -39,6 +39,6 @@ import { ContentModule } from '@intlayer/core';
|
|
|
39
39
|
* }]
|
|
40
40
|
*
|
|
41
41
|
*/
|
|
42
|
-
declare const extractObjectsWithId: (input:
|
|
42
|
+
declare const extractObjectsWithId: (input: DeclarationContent) => DeclarationContent[];
|
|
43
43
|
|
|
44
44
|
export { extractObjectsWithId };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.ts"],"sourcesContent":["import type { Content,
|
|
1
|
+
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.ts"],"sourcesContent":["import type { Content, DeclarationContent } from '@intlayer/core';\n\n/**\n *\n * This function extracts all nested objects with an 'id' field from the input object and returns them as an array\n *\n * Example:\n *\n * const input = {\n * id: '1',\n * name: 'John Doe',\n * address: {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * state: 'IL'\n * },\n * };\n * const result = extractObjectsWithId(input);\n * console.log(result);\n *\n * Output:\n *\n * [{\n * id: '1',\n * name: 'John Doe',\n * address: {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * state: 'IL'\n * }\n * },\n * {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * state: 'IL'\n * }]\n *\n */\nexport const extractObjectsWithId = (\n input: DeclarationContent\n): DeclarationContent[] => {\n // Function to recursively search and extract nested objects with an 'id'\n const search = (obj: Content, results: DeclarationContent[]): void => {\n if (obj && typeof obj === 'object') {\n if (Object.hasOwn(obj, 'id')) {\n results.push(obj as DeclarationContent);\n }\n for (const key of Object.keys(obj)) {\n if (typeof obj[key] === 'object') {\n search(obj[key] as Content, results);\n }\n }\n }\n };\n\n const results: DeclarationContent[] = [];\n search(input, results);\n return results;\n};\n"],"mappings":"AAyCO,MAAM,uBAAuB,CAClC,UACyB;AAEzB,QAAM,SAAS,CAAC,KAAcA,aAAwC;AACpE,QAAI,OAAO,OAAO,QAAQ,UAAU;AAClC,UAAI,OAAO,OAAO,KAAK,IAAI,GAAG;AAC5B,QAAAA,SAAQ,KAAK,GAAyB;AAAA,MACxC;AACA,iBAAW,OAAO,OAAO,KAAK,GAAG,GAAG;AAClC,YAAI,OAAO,IAAI,GAAG,MAAM,UAAU;AAChC,iBAAO,IAAI,GAAG,GAAcA,QAAO;AAAA,QACrC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,UAAgC,CAAC;AACvC,SAAO,OAAO,OAAO;AACrB,SAAO;AACT;","names":["results"]}
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { DeclarationContent } from '@intlayer/core';
|
|
2
2
|
|
|
3
3
|
/**
|
|
4
4
|
* Load the content declaration from the given path
|
|
5
5
|
*
|
|
6
6
|
* Accepts JSON, JS, MJS and TS files as configuration
|
|
7
7
|
*/
|
|
8
|
-
declare const loadContentDeclaration: (contentDeclarationFilePath: string) =>
|
|
8
|
+
declare const loadContentDeclaration: (contentDeclarationFilePath: string) => DeclarationContent | undefined;
|
|
9
9
|
|
|
10
10
|
export { loadContentDeclaration };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-var-requires */\nimport { createRequire } from 'module';\nimport { type Context, runInNewContext } from 'vm';\nimport type {
|
|
1
|
+
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-var-requires */\nimport { createRequire } from 'module';\nimport { type Context, runInNewContext } from 'vm';\nimport type { DeclarationContent } from '@intlayer/core';\nimport { type BuildOptions, buildSync, type BuildResult } from 'esbuild';\n\nconst isESModule = typeof import.meta.url === 'string';\n\nconst sandboxContext: Context = {\n exports: {\n default: {},\n },\n module: {\n exports: {},\n },\n console,\n require: isESModule ? createRequire(import.meta.url) : require,\n};\n\nconst transformationOption: BuildOptions = {\n loader: {\n '.js': 'js',\n '.jsx': 'jsx',\n '.mjs': 'js',\n '.ts': 'ts',\n '.tsx': 'tsx',\n '.cjs': 'js',\n '.json': 'json',\n },\n format: 'cjs', // Output format as commonjs\n target: 'es2017',\n packages: 'external',\n write: false,\n bundle: true,\n};\n\nconst filterValidContentDeclaration = (\n contentDeclaration: DeclarationContent\n): DeclarationContent => {\n // @TODO Implement filtering of valid content declaration\n return contentDeclaration;\n};\n\n/**\n * Load the content declaration from the given path\n *\n * Accepts JSON, JS, MJS and TS files as configuration\n */\nexport const loadContentDeclaration = (\n contentDeclarationFilePath: string\n): DeclarationContent | undefined => {\n let contentDeclaration: DeclarationContent | undefined = undefined;\n\n const fileExtension = contentDeclarationFilePath.split('.').pop() ?? '';\n\n try {\n if (fileExtension === 'json') {\n // Assume JSON\n return require(contentDeclarationFilePath);\n }\n\n // Rest is JS, MJS or TS\n\n const moduleResult: BuildResult = buildSync({\n entryPoints: [contentDeclarationFilePath],\n\n ...transformationOption,\n });\n\n const moduleResultString = moduleResult.outputFiles?.[0].text;\n\n if (!moduleResultString) {\n console.error('Configuration file could not be loaded.');\n return undefined;\n }\n\n runInNewContext(moduleResultString, sandboxContext);\n\n if (\n sandboxContext.exports.default &&\n Object.keys(sandboxContext.exports.default).length > 0\n ) {\n // ES Module\n contentDeclaration = sandboxContext.exports.default;\n } else if (\n sandboxContext.module.exports.defaults &&\n Object.keys(sandboxContext.module.exports.defaults).length > 0\n ) {\n // CommonJS\n contentDeclaration = sandboxContext.module.exports.default;\n } else if (\n sandboxContext.module.exports.default &&\n Object.keys(sandboxContext.module.exports.default).length > 0\n ) {\n // ES Module\n contentDeclaration = sandboxContext.module.exports.default;\n } else if (\n sandboxContext.module.exports &&\n Object.keys(sandboxContext.module.exports).length > 0\n ) {\n // Other\n contentDeclaration = sandboxContext.module.exports;\n }\n\n if (typeof contentDeclaration === 'undefined') {\n console.error('Configuration file could not be loaded.');\n return undefined;\n }\n\n return filterValidContentDeclaration(contentDeclaration);\n } catch (error) {\n console.error('Error:', error);\n }\n};\n"],"mappings":"AACA,SAAS,qBAAqB;AAC9B,SAAuB,uBAAuB;AAE9C,SAA4B,iBAAmC;AAE/D,MAAM,aAAa,OAAO,YAAY,QAAQ;AAE9C,MAAM,iBAA0B;AAAA,EAC9B,SAAS;AAAA,IACP,SAAS,CAAC;AAAA,EACZ;AAAA,EACA,QAAQ;AAAA,IACN,SAAS,CAAC;AAAA,EACZ;AAAA,EACA;AAAA,EACA,SAAS,aAAa,cAAc,YAAY,GAAG,IAAI;AACzD;AAEA,MAAM,uBAAqC;AAAA,EACzC,QAAQ;AAAA,IACN,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,EACX;AAAA,EACA,QAAQ;AAAA;AAAA,EACR,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,OAAO;AAAA,EACP,QAAQ;AACV;AAEA,MAAM,gCAAgC,CACpC,uBACuB;AAEvB,SAAO;AACT;AAOO,MAAM,yBAAyB,CACpC,+BACmC;AACnC,MAAI,qBAAqD;AAEzD,QAAM,gBAAgB,2BAA2B,MAAM,GAAG,EAAE,IAAI,KAAK;AAErE,MAAI;AACF,QAAI,kBAAkB,QAAQ;AAE5B,aAAO,QAAQ,0BAA0B;AAAA,IAC3C;AAIA,UAAM,eAA4B,UAAU;AAAA,MAC1C,aAAa,CAAC,0BAA0B;AAAA,MAExC,GAAG;AAAA,IACL,CAAC;AAED,UAAM,qBAAqB,aAAa,cAAc,CAAC,EAAE;AAEzD,QAAI,CAAC,oBAAoB;AACvB,cAAQ,MAAM,yCAAyC;AACvD,aAAO;AAAA,IACT;AAEA,oBAAgB,oBAAoB,cAAc;AAElD,QACE,eAAe,QAAQ,WACvB,OAAO,KAAK,eAAe,QAAQ,OAAO,EAAE,SAAS,GACrD;AAEA,2BAAqB,eAAe,QAAQ;AAAA,IAC9C,WACE,eAAe,OAAO,QAAQ,YAC9B,OAAO,KAAK,eAAe,OAAO,QAAQ,QAAQ,EAAE,SAAS,GAC7D;AAEA,2BAAqB,eAAe,OAAO,QAAQ;AAAA,IACrD,WACE,eAAe,OAAO,QAAQ,WAC9B,OAAO,KAAK,eAAe,OAAO,QAAQ,OAAO,EAAE,SAAS,GAC5D;AAEA,2BAAqB,eAAe,OAAO,QAAQ;AAAA,IACrD,WACE,eAAe,OAAO,WACtB,OAAO,KAAK,eAAe,OAAO,OAAO,EAAE,SAAS,GACpD;AAEA,2BAAqB,eAAe,OAAO;AAAA,IAC7C;AAEA,QAAI,OAAO,uBAAuB,aAAa;AAC7C,cAAQ,MAAM,yCAAyC;AACvD,aAAO;AAAA,IACT;AAEA,WAAO,8BAA8B,kBAAkB;AAAA,EACzD,SAAS,OAAO;AACd,YAAQ,MAAM,UAAU,KAAK;AAAA,EAC/B;AACF;","names":[]}
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { DeclarationContent } from '@intlayer/core';
|
|
2
2
|
|
|
3
3
|
/**
|
|
4
|
-
* Function to load, process the module and return the Intlayer
|
|
4
|
+
* Function to load, process the module and return the Intlayer DeclarationContent from the module file
|
|
5
5
|
*/
|
|
6
|
-
declare const processContentDeclaration: (file: string) => Promise<
|
|
6
|
+
declare const processContentDeclaration: (file: string) => Promise<DeclarationContent | undefined>;
|
|
7
7
|
|
|
8
8
|
export { processContentDeclaration };
|
|
@@ -6,7 +6,14 @@ const processFunctionResults = async (entry) => {
|
|
|
6
6
|
const result = {};
|
|
7
7
|
for (const key of Object.keys(entry)) {
|
|
8
8
|
const field = entry?.[key];
|
|
9
|
-
|
|
9
|
+
const isArray = Array.isArray(field);
|
|
10
|
+
if (typeof field === "object" && isArray) {
|
|
11
|
+
result[key] = await Promise.all(
|
|
12
|
+
field.map(async (el) => {
|
|
13
|
+
return await processFunctionResults(el);
|
|
14
|
+
})
|
|
15
|
+
);
|
|
16
|
+
} else if (typeof field === "object") {
|
|
10
17
|
result[key] = await processFunctionResults(
|
|
11
18
|
field
|
|
12
19
|
);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.ts"],"sourcesContent":["import { resolve } from 'path';\nimport type {\n Content,\n
|
|
1
|
+
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.ts"],"sourcesContent":["import { resolve } from 'path';\nimport type {\n Content,\n DeclarationContent,\n FlatContent,\n FlatContentValue,\n} from '@intlayer/core';\nimport { loadContentDeclaration } from './loadContentDeclaration';\n\n/**\n * Function to replace function and async function fields with their results in the object\n */\nconst processFunctionResults = async (entry: Content): Promise<FlatContent> => {\n if (entry && typeof entry === 'object') {\n const promises: Promise<void>[] = [];\n const result: FlatContent = {};\n\n for (const key of Object.keys(entry)) {\n const field = entry?.[key];\n const isArray = Array.isArray(field);\n\n if (typeof field === 'object' && isArray) {\n result[key] = (await Promise.all(\n field.map(async (el) => {\n return await processFunctionResults(el as Content);\n })\n )) as unknown as FlatContentValue;\n } else if (typeof field === 'object') {\n result[key] = (await processFunctionResults(\n field as Content\n )) as FlatContentValue;\n } else if (typeof field === 'function') {\n // Wait for the function to resolve if it's an async function\n const promise = (async () => {\n // Execute the function and await the result if it's a Promise\n const value = await field();\n\n result[key] = value as FlatContentValue;\n })();\n promises.push(promise);\n } else {\n result[key] = field as FlatContentValue;\n }\n }\n\n // Wait for all async operations to complete\n await Promise.all(promises);\n\n return result;\n }\n\n return entry;\n};\n\n/**\n * Function to load, process the module and return the Intlayer DeclarationContent from the module file\n */\nexport const processContentDeclaration = async (file: string) => {\n try {\n const functionPath = resolve(file);\n const entry = loadContentDeclaration(functionPath);\n\n if (!entry) {\n console.error('No entry found in module:', functionPath);\n return;\n }\n\n return (await processFunctionResults(entry)) as DeclarationContent;\n } catch (error) {\n console.error('Error processing module:', error);\n }\n};\n"],"mappings":"AAAA,SAAS,eAAe;AAOxB,SAAS,8BAA8B;AAKvC,MAAM,yBAAyB,OAAO,UAAyC;AAC7E,MAAI,SAAS,OAAO,UAAU,UAAU;AACtC,UAAM,WAA4B,CAAC;AACnC,UAAM,SAAsB,CAAC;AAE7B,eAAW,OAAO,OAAO,KAAK,KAAK,GAAG;AACpC,YAAM,QAAQ,QAAQ,GAAG;AACzB,YAAM,UAAU,MAAM,QAAQ,KAAK;AAEnC,UAAI,OAAO,UAAU,YAAY,SAAS;AACxC,eAAO,GAAG,IAAK,MAAM,QAAQ;AAAA,UAC3B,MAAM,IAAI,OAAO,OAAO;AACtB,mBAAO,MAAM,uBAAuB,EAAa;AAAA,UACnD,CAAC;AAAA,QACH;AAAA,MACF,WAAW,OAAO,UAAU,UAAU;AACpC,eAAO,GAAG,IAAK,MAAM;AAAA,UACnB;AAAA,QACF;AAAA,MACF,WAAW,OAAO,UAAU,YAAY;AAEtC,cAAM,WAAW,YAAY;AAE3B,gBAAM,QAAQ,MAAM,MAAM;AAE1B,iBAAO,GAAG,IAAI;AAAA,QAChB,GAAG;AACH,iBAAS,KAAK,OAAO;AAAA,MACvB,OAAO;AACL,eAAO,GAAG,IAAI;AAAA,MAChB;AAAA,IACF;AAGA,UAAM,QAAQ,IAAI,QAAQ;AAE1B,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAKO,MAAM,4BAA4B,OAAO,SAAiB;AAC/D,MAAI;AACF,UAAM,eAAe,QAAQ,IAAI;AACjC,UAAM,QAAQ,uBAAuB,YAAY;AAEjD,QAAI,CAAC,OAAO;AACV,cAAQ,MAAM,6BAA6B,YAAY;AACvD;AAAA,IACF;AAEA,WAAQ,MAAM,uBAAuB,KAAK;AAAA,EAC5C,SAAS,OAAO;AACd,YAAQ,MAAM,4BAA4B,KAAK;AAAA,EACjD;AACF;","names":[]}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { Dictionary, DictionaryValue } from '@intlayer/core';
|
|
2
2
|
|
|
3
3
|
/**
|
|
4
4
|
*
|
|
@@ -32,8 +32,8 @@ import { ContentModule, Content } from '@intlayer/core';
|
|
|
32
32
|
* };
|
|
33
33
|
*
|
|
34
34
|
*/
|
|
35
|
-
declare const generateTypeScriptType: (obj:
|
|
36
|
-
declare const generateTypeScriptTypeContent: (obj:
|
|
35
|
+
declare const generateTypeScriptType: (obj: Dictionary) => string;
|
|
36
|
+
declare const generateTypeScriptTypeContent: (obj: DictionaryValue) => string;
|
|
37
37
|
/**
|
|
38
38
|
* This function generates a TypeScript type definition from a JSON object
|
|
39
39
|
*/
|