@intlayer/chokidar 1.2.1 → 2.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/dist/cjs/transpiler/declaration_file_to_dictionary/i18n_dictionary/buildI18nDictionary.cjs.map +1 -1
  2. package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.cjs +10 -4
  3. package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.cjs.map +1 -1
  4. package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.cjs.map +1 -1
  5. package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.d.ts +2 -2
  6. package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.cjs.map +1 -1
  7. package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.d.ts +2 -2
  8. package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.cjs +8 -1
  9. package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.cjs.map +1 -1
  10. package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.d.ts +3 -3
  11. package/dist/cjs/transpiler/dictionary_to_type/createModuleAugmentation.cjs +12 -9
  12. package/dist/cjs/transpiler/dictionary_to_type/createModuleAugmentation.cjs.map +1 -1
  13. package/dist/cjs/transpiler/dictionary_to_type/createType.cjs +38 -52
  14. package/dist/cjs/transpiler/dictionary_to_type/createType.cjs.map +1 -1
  15. package/dist/cjs/transpiler/dictionary_to_type/createType.d.ts +3 -3
  16. package/dist/esm/transpiler/declaration_file_to_dictionary/i18n_dictionary/buildI18nDictionary.mjs.map +1 -1
  17. package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.mjs +10 -4
  18. package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.mjs.map +1 -1
  19. package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.d.mts +2 -2
  20. package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.mjs.map +1 -1
  21. package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.d.mts +2 -2
  22. package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.mjs.map +1 -1
  23. package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.d.mts +3 -3
  24. package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.mjs +8 -1
  25. package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.mjs.map +1 -1
  26. package/dist/esm/transpiler/dictionary_to_type/createModuleAugmentation.mjs +12 -9
  27. package/dist/esm/transpiler/dictionary_to_type/createModuleAugmentation.mjs.map +1 -1
  28. package/dist/esm/transpiler/dictionary_to_type/createType.d.mts +3 -3
  29. package/dist/esm/transpiler/dictionary_to_type/createType.mjs +38 -52
  30. package/dist/esm/transpiler/dictionary_to_type/createType.mjs.map +1 -1
  31. package/package.json +4 -6
  32. package/src/chokidar/index.ts +0 -1
  33. package/src/chokidar/watcher.ts +0 -75
  34. package/src/transpiler/declaration_file_to_dictionary/i18n_dictionary/buildI18nDictionary.ts +0 -95
  35. package/src/transpiler/declaration_file_to_dictionary/i18n_dictionary/convertContentDeclarationInto18nDictionaries.ts +0 -91
  36. package/src/transpiler/declaration_file_to_dictionary/i18n_dictionary/convertPluralsValues.ts +0 -22
  37. package/src/transpiler/declaration_file_to_dictionary/i18n_dictionary/index.ts +0 -1
  38. package/src/transpiler/declaration_file_to_dictionary/index.ts +0 -21
  39. package/src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.ts +0 -62
  40. package/src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.ts +0 -60
  41. package/src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/index.ts +0 -3
  42. package/src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.ts +0 -114
  43. package/src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.ts +0 -65
  44. package/src/transpiler/dictionary_to_main/createDictionaryList.ts +0 -65
  45. package/src/transpiler/dictionary_to_main/index.ts +0 -1
  46. package/src/transpiler/dictionary_to_type/createModuleAugmentation.ts +0 -95
  47. package/src/transpiler/dictionary_to_type/createType.ts +0 -162
  48. package/src/transpiler/dictionary_to_type/index.ts +0 -2
  49. package/src/utils.ts +0 -26
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/i18n_dictionary/buildI18nDictionary.ts"],"sourcesContent":["import { mkdir, writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport type { ContentModule } from '@intlayer/core';\nimport {\n processContentDeclaration,\n extractObjectsWithId,\n} from '../intlayer_dictionary/index';\nimport {\n type I18nDictionariesOutput,\n createI18nDictionaries,\n} from './convertContentDeclarationInto18nDictionaries';\n\nconst { content } = getConfiguration();\nconst { i18nDictionariesDir } = content;\n\ntype DictionariesDeclaration = Record<string, I18nDictionariesOutput>;\n\n/**\n * This function writes the dictionaries to the file system\n */\nconst writeDictionary = async (\n dictionariesDeclaration: DictionariesDeclaration\n) => {\n const resultDictionariesPaths: string[] = [];\n\n for (const [nameSpace, localContent] of Object.entries(\n dictionariesDeclaration\n )) {\n for await (const [locale, content] of Object.entries(localContent)) {\n const contentString = JSON.stringify(content);\n\n const outputFileName = `${nameSpace}.json`;\n const resultDirPath = resolve(i18nDictionariesDir, locale);\n const resultFilePath = resolve(resultDirPath, outputFileName);\n\n // Create the dictionaries folder if it doesn't exist\n await mkdir(resultDirPath, { recursive: true });\n\n // Create the json file\n await writeFile(resultFilePath, contentString, 'utf8').catch((err) => {\n console.error(`Error creating ${outputFileName}:`, err);\n });\n\n resultDictionariesPaths.push(resultFilePath);\n }\n }\n\n return resultDictionariesPaths;\n};\n\n/**\n * This function transpile content declaration to i18n dictionaries\n */\nexport const buildI18nDictionary = async (\n contentDeclarationsPaths: string[] | string\n) => {\n const resultDictionariesPaths: string[] = [];\n\n if (typeof contentDeclarationsPaths === 'string') {\n contentDeclarationsPaths = [contentDeclarationsPaths];\n }\n\n for await (const contentDeclarationPath of contentDeclarationsPaths) {\n const result = await processContentDeclaration(contentDeclarationPath);\n\n if (!result) {\n continue;\n }\n\n const nestedContent: ContentModule[] = extractObjectsWithId(result);\n\n // Create dictionaries for each nested content and format them\n const dictionariesDeclaration: DictionariesDeclaration =\n nestedContent.reduce((acc, content) => {\n const id = content.id;\n const i18Content = createI18nDictionaries(content);\n\n return {\n ...acc,\n [id]: i18Content,\n };\n }, {});\n\n // Write the dictionaries to the file system\n const dictionariesPaths: string[] = await writeDictionary(\n dictionariesDeclaration\n );\n\n // Add the paths to the result\n resultDictionariesPaths.push(...dictionariesPaths);\n }\n\n return resultDictionariesPaths;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAiC;AACjC,kBAAwB;AACxB,oBAAiC;AAEjC,iCAGO;AACP,0DAGO;AAEP,MAAM,EAAE,QAAQ,QAAI,gCAAiB;AACrC,MAAM,EAAE,oBAAoB,IAAI;AAOhC,MAAM,kBAAkB,OACtB,4BACG;AACH,QAAM,0BAAoC,CAAC;AAE3C,aAAW,CAAC,WAAW,YAAY,KAAK,OAAO;AAAA,IAC7C;AAAA,EACF,GAAG;AACD,qBAAiB,CAAC,QAAQA,QAAO,KAAK,OAAO,QAAQ,YAAY,GAAG;AAClE,YAAM,gBAAgB,KAAK,UAAUA,QAAO;AAE5C,YAAM,iBAAiB,GAAG,SAAS;AACnC,YAAM,oBAAgB,qBAAQ,qBAAqB,MAAM;AACzD,YAAM,qBAAiB,qBAAQ,eAAe,cAAc;AAG5D,gBAAM,uBAAM,eAAe,EAAE,WAAW,KAAK,CAAC;AAG9C,gBAAM,2BAAU,gBAAgB,eAAe,MAAM,EAAE,MAAM,CAAC,QAAQ;AACpE,gBAAQ,MAAM,kBAAkB,cAAc,KAAK,GAAG;AAAA,MACxD,CAAC;AAED,8BAAwB,KAAK,cAAc;AAAA,IAC7C;AAAA,EACF;AAEA,SAAO;AACT;AAKO,MAAM,sBAAsB,OACjC,6BACG;AACH,QAAM,0BAAoC,CAAC;AAE3C,MAAI,OAAO,6BAA6B,UAAU;AAChD,+BAA2B,CAAC,wBAAwB;AAAA,EACtD;AAEA,mBAAiB,0BAA0B,0BAA0B;AACnE,UAAM,SAAS,UAAM,sDAA0B,sBAAsB;AAErE,QAAI,CAAC,QAAQ;AACX;AAAA,IACF;AAEA,UAAM,oBAAiC,iDAAqB,MAAM;AAGlE,UAAM,0BACJ,cAAc,OAAO,CAAC,KAAKA,aAAY;AACrC,YAAM,KAAKA,SAAQ;AACnB,YAAM,iBAAa,4EAAuBA,QAAO;AAEjD,aAAO;AAAA,QACL,GAAG;AAAA,QACH,CAAC,EAAE,GAAG;AAAA,MACR;AAAA,IACF,GAAG,CAAC,CAAC;AAGP,UAAM,oBAA8B,MAAM;AAAA,MACxC;AAAA,IACF;AAGA,4BAAwB,KAAK,GAAG,iBAAiB;AAAA,EACnD;AAEA,SAAO;AACT;","names":["content"]}
1
+ {"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/i18n_dictionary/buildI18nDictionary.ts"],"sourcesContent":["import { mkdir, writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport type { DeclarationContent } from '@intlayer/core';\nimport {\n processContentDeclaration,\n extractObjectsWithId,\n} from '../intlayer_dictionary/index';\nimport {\n type I18nDictionariesOutput,\n createI18nDictionaries,\n} from './convertContentDeclarationInto18nDictionaries';\n\nconst { content } = getConfiguration();\nconst { i18nDictionariesDir } = content;\n\ntype DictionariesDeclaration = Record<string, I18nDictionariesOutput>;\n\n/**\n * This function writes the dictionaries to the file system\n */\nconst writeDictionary = async (\n dictionariesDeclaration: DictionariesDeclaration\n) => {\n const resultDictionariesPaths: string[] = [];\n\n for (const [nameSpace, localContent] of Object.entries(\n dictionariesDeclaration\n )) {\n for await (const [locale, content] of Object.entries(localContent)) {\n const contentString = JSON.stringify(content);\n\n const outputFileName = `${nameSpace}.json`;\n const resultDirPath = resolve(i18nDictionariesDir, locale);\n const resultFilePath = resolve(resultDirPath, outputFileName);\n\n // Create the dictionaries folder if it doesn't exist\n await mkdir(resultDirPath, { recursive: true });\n\n // Create the json file\n await writeFile(resultFilePath, contentString, 'utf8').catch((err) => {\n console.error(`Error creating ${outputFileName}:`, err);\n });\n\n resultDictionariesPaths.push(resultFilePath);\n }\n }\n\n return resultDictionariesPaths;\n};\n\n/**\n * This function transpile content declaration to i18n dictionaries\n */\nexport const buildI18nDictionary = async (\n contentDeclarationsPaths: string[] | string\n) => {\n const resultDictionariesPaths: string[] = [];\n\n if (typeof contentDeclarationsPaths === 'string') {\n contentDeclarationsPaths = [contentDeclarationsPaths];\n }\n\n for await (const contentDeclarationPath of contentDeclarationsPaths) {\n const result = await processContentDeclaration(contentDeclarationPath);\n\n if (!result) {\n continue;\n }\n\n const nestedContent: DeclarationContent[] = extractObjectsWithId(result);\n\n // Create dictionaries for each nested content and format them\n const dictionariesDeclaration: DictionariesDeclaration =\n nestedContent.reduce((acc, content) => {\n const id: string = content.id;\n const i18Content = createI18nDictionaries(content);\n\n return {\n ...acc,\n [id]: i18Content,\n };\n }, {});\n\n // Write the dictionaries to the file system\n const dictionariesPaths: string[] = await writeDictionary(\n dictionariesDeclaration\n );\n\n // Add the paths to the result\n resultDictionariesPaths.push(...dictionariesPaths);\n }\n\n return resultDictionariesPaths;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAiC;AACjC,kBAAwB;AACxB,oBAAiC;AAEjC,iCAGO;AACP,0DAGO;AAEP,MAAM,EAAE,QAAQ,QAAI,gCAAiB;AACrC,MAAM,EAAE,oBAAoB,IAAI;AAOhC,MAAM,kBAAkB,OACtB,4BACG;AACH,QAAM,0BAAoC,CAAC;AAE3C,aAAW,CAAC,WAAW,YAAY,KAAK,OAAO;AAAA,IAC7C;AAAA,EACF,GAAG;AACD,qBAAiB,CAAC,QAAQA,QAAO,KAAK,OAAO,QAAQ,YAAY,GAAG;AAClE,YAAM,gBAAgB,KAAK,UAAUA,QAAO;AAE5C,YAAM,iBAAiB,GAAG,SAAS;AACnC,YAAM,oBAAgB,qBAAQ,qBAAqB,MAAM;AACzD,YAAM,qBAAiB,qBAAQ,eAAe,cAAc;AAG5D,gBAAM,uBAAM,eAAe,EAAE,WAAW,KAAK,CAAC;AAG9C,gBAAM,2BAAU,gBAAgB,eAAe,MAAM,EAAE,MAAM,CAAC,QAAQ;AACpE,gBAAQ,MAAM,kBAAkB,cAAc,KAAK,GAAG;AAAA,MACxD,CAAC;AAED,8BAAwB,KAAK,cAAc;AAAA,IAC7C;AAAA,EACF;AAEA,SAAO;AACT;AAKO,MAAM,sBAAsB,OACjC,6BACG;AACH,QAAM,0BAAoC,CAAC;AAE3C,MAAI,OAAO,6BAA6B,UAAU;AAChD,+BAA2B,CAAC,wBAAwB;AAAA,EACtD;AAEA,mBAAiB,0BAA0B,0BAA0B;AACnE,UAAM,SAAS,UAAM,sDAA0B,sBAAsB;AAErE,QAAI,CAAC,QAAQ;AACX;AAAA,IACF;AAEA,UAAM,oBAAsC,iDAAqB,MAAM;AAGvE,UAAM,0BACJ,cAAc,OAAO,CAAC,KAAKA,aAAY;AACrC,YAAM,KAAaA,SAAQ;AAC3B,YAAM,iBAAa,4EAAuBA,QAAO;AAEjD,aAAO;AAAA,QACL,GAAG;AAAA,QACH,CAAC,EAAE,GAAG;AAAA,MACR;AAAA,IACF,GAAG,CAAC,CAAC;AAGP,UAAM,oBAA8B,MAAM;AAAA,MACxC;AAAA,IACF;AAGA,4BAAwB,KAAK,GAAG,iBAAiB;AAAA,EACnD;AAEA,SAAO;AACT;","names":["content"]}
@@ -30,9 +30,9 @@ const { content } = (0, import_config.getConfiguration)();
30
30
  const { dictionariesDir } = content;
31
31
  const writeDictionary = async (dictionaries) => {
32
32
  const resultDictionariesPaths = [];
33
- for await (const content2 of dictionaries) {
34
- const contentString = JSON.stringify(content2);
35
- const id = content2.id;
33
+ for await (const dictionaryContent of dictionaries) {
34
+ const contentString = JSON.stringify(dictionaryContent);
35
+ const id = dictionaryContent.id;
36
36
  const outputFileName = `${id}.json`;
37
37
  const resultFilePath = (0, import_path.resolve)(dictionariesDir, outputFileName);
38
38
  await (0, import_promises.writeFile)(resultFilePath, contentString, "utf8").catch((err) => {
@@ -54,7 +54,13 @@ const buildIntlayerDictionary = async (contentDeclarationsPaths) => {
54
54
  continue;
55
55
  }
56
56
  const nestedContent = (0, import_extractNestedJSON.extractObjectsWithId)(result);
57
- const dictionariesPaths = await writeDictionary(nestedContent);
57
+ const contentWithFilePath = nestedContent.map(
58
+ (content2) => ({
59
+ ...content2,
60
+ filePath: contentDeclarationPath
61
+ })
62
+ );
63
+ const dictionariesPaths = await writeDictionary(contentWithFilePath);
58
64
  resultDictionariesPaths.push(...dictionariesPaths);
59
65
  }
60
66
  return resultDictionariesPaths;
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.ts"],"sourcesContent":["import { mkdir, writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport type { ContentModule } from '@intlayer/core';\nimport { extractObjectsWithId } from './extractNestedJSON';\nimport { processContentDeclaration } from './processContentDeclaration';\n\nconst { content } = getConfiguration();\nconst { dictionariesDir } = content;\n\nconst writeDictionary = async (dictionaries: ContentModule[]) => {\n const resultDictionariesPaths: string[] = [];\n\n for await (const content of dictionaries) {\n const contentString = JSON.stringify(content);\n\n const id = content.id;\n const outputFileName = `${id}.json`;\n const resultFilePath = resolve(dictionariesDir, outputFileName);\n\n // Create the json file\n await writeFile(resultFilePath, contentString, 'utf8').catch((err) => {\n console.error(`Error creating ${outputFileName}:`, err);\n });\n\n resultDictionariesPaths.push(resultFilePath);\n }\n\n return resultDictionariesPaths;\n};\n\n/**\n * This function transpile the bundled code to to make dictionaries as JSON files\n */\nexport const buildIntlayerDictionary = async (\n contentDeclarationsPaths: string[] | string\n) => {\n const resultDictionariesPaths: string[] = [];\n\n if (typeof contentDeclarationsPaths === 'string') {\n contentDeclarationsPaths = [contentDeclarationsPaths];\n }\n\n // Create the dictionaries folder if it doesn't exist\n await mkdir(resolve(dictionariesDir), { recursive: true });\n\n for await (const contentDeclarationPath of contentDeclarationsPaths) {\n const result = await processContentDeclaration(contentDeclarationPath);\n\n if (!result) {\n continue;\n }\n\n const nestedContent: ContentModule[] = extractObjectsWithId(result);\n\n const dictionariesPaths: string[] = await writeDictionary(nestedContent);\n\n resultDictionariesPaths.push(...dictionariesPaths);\n }\n\n return resultDictionariesPaths;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAiC;AACjC,kBAAwB;AACxB,oBAAiC;AAEjC,+BAAqC;AACrC,uCAA0C;AAE1C,MAAM,EAAE,QAAQ,QAAI,gCAAiB;AACrC,MAAM,EAAE,gBAAgB,IAAI;AAE5B,MAAM,kBAAkB,OAAO,iBAAkC;AAC/D,QAAM,0BAAoC,CAAC;AAE3C,mBAAiBA,YAAW,cAAc;AACxC,UAAM,gBAAgB,KAAK,UAAUA,QAAO;AAE5C,UAAM,KAAKA,SAAQ;AACnB,UAAM,iBAAiB,GAAG,EAAE;AAC5B,UAAM,qBAAiB,qBAAQ,iBAAiB,cAAc;AAG9D,cAAM,2BAAU,gBAAgB,eAAe,MAAM,EAAE,MAAM,CAAC,QAAQ;AACpE,cAAQ,MAAM,kBAAkB,cAAc,KAAK,GAAG;AAAA,IACxD,CAAC;AAED,4BAAwB,KAAK,cAAc;AAAA,EAC7C;AAEA,SAAO;AACT;AAKO,MAAM,0BAA0B,OACrC,6BACG;AACH,QAAM,0BAAoC,CAAC;AAE3C,MAAI,OAAO,6BAA6B,UAAU;AAChD,+BAA2B,CAAC,wBAAwB;AAAA,EACtD;AAGA,YAAM,2BAAM,qBAAQ,eAAe,GAAG,EAAE,WAAW,KAAK,CAAC;AAEzD,mBAAiB,0BAA0B,0BAA0B;AACnE,UAAM,SAAS,UAAM,4DAA0B,sBAAsB;AAErE,QAAI,CAAC,QAAQ;AACX;AAAA,IACF;AAEA,UAAM,oBAAiC,+CAAqB,MAAM;AAElE,UAAM,oBAA8B,MAAM,gBAAgB,aAAa;AAEvE,4BAAwB,KAAK,GAAG,iBAAiB;AAAA,EACnD;AAEA,SAAO;AACT;","names":["content"]}
1
+ {"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.ts"],"sourcesContent":["import { mkdir, writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport type { DeclarationContent } from '@intlayer/core';\nimport { extractObjectsWithId } from './extractNestedJSON';\nimport { processContentDeclaration } from './processContentDeclaration';\n\nconst { content } = getConfiguration();\nconst { dictionariesDir } = content;\n\nconst writeDictionary = async (dictionaries: DeclarationContent[]) => {\n const resultDictionariesPaths: string[] = [];\n\n for await (const dictionaryContent of dictionaries) {\n const contentString = JSON.stringify(dictionaryContent);\n\n const id = dictionaryContent.id;\n const outputFileName = `${id}.json`;\n const resultFilePath = resolve(dictionariesDir, outputFileName);\n\n // Create the json file\n await writeFile(resultFilePath, contentString, 'utf8').catch((err) => {\n console.error(`Error creating ${outputFileName}:`, err);\n });\n\n resultDictionariesPaths.push(resultFilePath);\n }\n\n return resultDictionariesPaths;\n};\n\n/**\n * This function transpile the bundled code to to make dictionaries as JSON files\n */\nexport const buildIntlayerDictionary = async (\n contentDeclarationsPaths: string[] | string\n) => {\n const resultDictionariesPaths: string[] = [];\n\n if (typeof contentDeclarationsPaths === 'string') {\n contentDeclarationsPaths = [contentDeclarationsPaths];\n }\n\n // Create the dictionaries folder if it doesn't exist\n await mkdir(resolve(dictionariesDir), { recursive: true });\n\n for await (const contentDeclarationPath of contentDeclarationsPaths) {\n const result = await processContentDeclaration(contentDeclarationPath);\n\n if (!result) {\n continue;\n }\n\n const nestedContent: DeclarationContent[] = extractObjectsWithId(result);\n\n const contentWithFilePath: DeclarationContent[] = nestedContent.map(\n (content) => ({\n ...content,\n filePath: contentDeclarationPath,\n })\n );\n\n const dictionariesPaths: string[] =\n await writeDictionary(contentWithFilePath);\n\n resultDictionariesPaths.push(...dictionariesPaths);\n }\n\n return resultDictionariesPaths;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAiC;AACjC,kBAAwB;AACxB,oBAAiC;AAEjC,+BAAqC;AACrC,uCAA0C;AAE1C,MAAM,EAAE,QAAQ,QAAI,gCAAiB;AACrC,MAAM,EAAE,gBAAgB,IAAI;AAE5B,MAAM,kBAAkB,OAAO,iBAAuC;AACpE,QAAM,0BAAoC,CAAC;AAE3C,mBAAiB,qBAAqB,cAAc;AAClD,UAAM,gBAAgB,KAAK,UAAU,iBAAiB;AAEtD,UAAM,KAAK,kBAAkB;AAC7B,UAAM,iBAAiB,GAAG,EAAE;AAC5B,UAAM,qBAAiB,qBAAQ,iBAAiB,cAAc;AAG9D,cAAM,2BAAU,gBAAgB,eAAe,MAAM,EAAE,MAAM,CAAC,QAAQ;AACpE,cAAQ,MAAM,kBAAkB,cAAc,KAAK,GAAG;AAAA,IACxD,CAAC;AAED,4BAAwB,KAAK,cAAc;AAAA,EAC7C;AAEA,SAAO;AACT;AAKO,MAAM,0BAA0B,OACrC,6BACG;AACH,QAAM,0BAAoC,CAAC;AAE3C,MAAI,OAAO,6BAA6B,UAAU;AAChD,+BAA2B,CAAC,wBAAwB;AAAA,EACtD;AAGA,YAAM,2BAAM,qBAAQ,eAAe,GAAG,EAAE,WAAW,KAAK,CAAC;AAEzD,mBAAiB,0BAA0B,0BAA0B;AACnE,UAAM,SAAS,UAAM,4DAA0B,sBAAsB;AAErE,QAAI,CAAC,QAAQ;AACX;AAAA,IACF;AAEA,UAAM,oBAAsC,+CAAqB,MAAM;AAEvE,UAAM,sBAA4C,cAAc;AAAA,MAC9D,CAACA,cAAa;AAAA,QACZ,GAAGA;AAAA,QACH,UAAU;AAAA,MACZ;AAAA,IACF;AAEA,UAAM,oBACJ,MAAM,gBAAgB,mBAAmB;AAE3C,4BAAwB,KAAK,GAAG,iBAAiB;AAAA,EACnD;AAEA,SAAO;AACT;","names":["content"]}
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.ts"],"sourcesContent":["import type { Content, ContentModule } from '@intlayer/core';\n\n/**\n *\n * This function extracts all nested objects with an 'id' field from the input object and returns them as an array\n *\n * Example:\n *\n * const input = {\n * id: '1',\n * name: 'John Doe',\n * address: {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * state: 'IL'\n * },\n * };\n * const result = extractObjectsWithId(input);\n * console.log(result);\n *\n * Output:\n *\n * [{\n * id: '1',\n * name: 'John Doe',\n * address: {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * state: 'IL'\n * }\n * },\n * {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * state: 'IL'\n * }]\n *\n */\nexport const extractObjectsWithId = (input: ContentModule): ContentModule[] => {\n // Function to recursively search and extract nested objects with an 'id'\n const search = (obj: Content, results: ContentModule[]): void => {\n if (obj && typeof obj === 'object') {\n if (Object.hasOwn(obj, 'id')) {\n results.push(obj as ContentModule);\n }\n for (const key of Object.keys(obj)) {\n if (typeof obj[key] === 'object') {\n search(obj[key] as Content, results);\n }\n }\n }\n };\n\n const results: ContentModule[] = [];\n search(input, results);\n return results;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAyCO,MAAM,uBAAuB,CAAC,UAA0C;AAE7E,QAAM,SAAS,CAAC,KAAcA,aAAmC;AAC/D,QAAI,OAAO,OAAO,QAAQ,UAAU;AAClC,UAAI,OAAO,OAAO,KAAK,IAAI,GAAG;AAC5B,QAAAA,SAAQ,KAAK,GAAoB;AAAA,MACnC;AACA,iBAAW,OAAO,OAAO,KAAK,GAAG,GAAG;AAClC,YAAI,OAAO,IAAI,GAAG,MAAM,UAAU;AAChC,iBAAO,IAAI,GAAG,GAAcA,QAAO;AAAA,QACrC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,UAA2B,CAAC;AAClC,SAAO,OAAO,OAAO;AACrB,SAAO;AACT;","names":["results"]}
1
+ {"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.ts"],"sourcesContent":["import type { Content, DeclarationContent } from '@intlayer/core';\n\n/**\n *\n * This function extracts all nested objects with an 'id' field from the input object and returns them as an array\n *\n * Example:\n *\n * const input = {\n * id: '1',\n * name: 'John Doe',\n * address: {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * state: 'IL'\n * },\n * };\n * const result = extractObjectsWithId(input);\n * console.log(result);\n *\n * Output:\n *\n * [{\n * id: '1',\n * name: 'John Doe',\n * address: {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * state: 'IL'\n * }\n * },\n * {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * state: 'IL'\n * }]\n *\n */\nexport const extractObjectsWithId = (\n input: DeclarationContent\n): DeclarationContent[] => {\n // Function to recursively search and extract nested objects with an 'id'\n const search = (obj: Content, results: DeclarationContent[]): void => {\n if (obj && typeof obj === 'object') {\n if (Object.hasOwn(obj, 'id')) {\n results.push(obj as DeclarationContent);\n }\n for (const key of Object.keys(obj)) {\n if (typeof obj[key] === 'object') {\n search(obj[key] as Content, results);\n }\n }\n }\n };\n\n const results: DeclarationContent[] = [];\n search(input, results);\n return results;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAyCO,MAAM,uBAAuB,CAClC,UACyB;AAEzB,QAAM,SAAS,CAAC,KAAcA,aAAwC;AACpE,QAAI,OAAO,OAAO,QAAQ,UAAU;AAClC,UAAI,OAAO,OAAO,KAAK,IAAI,GAAG;AAC5B,QAAAA,SAAQ,KAAK,GAAyB;AAAA,MACxC;AACA,iBAAW,OAAO,OAAO,KAAK,GAAG,GAAG;AAClC,YAAI,OAAO,IAAI,GAAG,MAAM,UAAU;AAChC,iBAAO,IAAI,GAAG,GAAcA,QAAO;AAAA,QACrC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,UAAgC,CAAC;AACvC,SAAO,OAAO,OAAO;AACrB,SAAO;AACT;","names":["results"]}
@@ -1,4 +1,4 @@
1
- import { ContentModule } from '@intlayer/core';
1
+ import { DeclarationContent } from '@intlayer/core';
2
2
 
3
3
  /**
4
4
  *
@@ -39,6 +39,6 @@ import { ContentModule } from '@intlayer/core';
39
39
  * }]
40
40
  *
41
41
  */
42
- declare const extractObjectsWithId: (input: ContentModule) => ContentModule[];
42
+ declare const extractObjectsWithId: (input: DeclarationContent) => DeclarationContent[];
43
43
 
44
44
  export { extractObjectsWithId };
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-var-requires */\nimport { createRequire } from 'module';\nimport { type Context, runInNewContext } from 'vm';\nimport type { ContentModule } from '@intlayer/core';\nimport { type BuildOptions, buildSync, type BuildResult } from 'esbuild';\n\nconst isESModule = typeof import.meta.url === 'string';\n\nconst sandboxContext: Context = {\n exports: {\n default: {},\n },\n module: {\n exports: {},\n },\n console,\n require: isESModule ? createRequire(import.meta.url) : require,\n};\n\nconst transformationOption: BuildOptions = {\n loader: {\n '.js': 'js',\n '.jsx': 'jsx',\n '.mjs': 'js',\n '.ts': 'ts',\n '.tsx': 'tsx',\n '.cjs': 'js',\n '.json': 'json',\n },\n format: 'cjs', // Output format as commonjs\n target: 'es2017',\n packages: 'external',\n write: false,\n bundle: true,\n};\n\nconst filterValidContentDeclaration = (\n contentDeclaration: ContentModule\n): ContentModule => {\n // @TODO Implement filtering of valid content declaration\n return contentDeclaration;\n};\n\n/**\n * Load the content declaration from the given path\n *\n * Accepts JSON, JS, MJS and TS files as configuration\n */\nexport const loadContentDeclaration = (\n contentDeclarationFilePath: string\n): ContentModule | undefined => {\n let contentDeclaration: ContentModule | undefined = undefined;\n\n const fileExtension = contentDeclarationFilePath.split('.').pop() ?? '';\n\n try {\n if (fileExtension === 'json') {\n // Assume JSON\n return require(contentDeclarationFilePath);\n }\n\n // Rest is JS, MJS or TS\n\n const moduleResult: BuildResult = buildSync({\n entryPoints: [contentDeclarationFilePath],\n\n ...transformationOption,\n });\n\n const moduleResultString = moduleResult.outputFiles?.[0].text;\n\n if (!moduleResultString) {\n console.error('Configuration file could not be loaded.');\n return undefined;\n }\n\n runInNewContext(moduleResultString, sandboxContext);\n\n if (\n sandboxContext.exports.default &&\n Object.keys(sandboxContext.exports.default).length > 0\n ) {\n // ES Module\n contentDeclaration = sandboxContext.exports.default;\n } else if (\n sandboxContext.module.exports.defaults &&\n Object.keys(sandboxContext.module.exports.defaults).length > 0\n ) {\n // CommonJS\n contentDeclaration = sandboxContext.module.exports.default;\n } else if (\n sandboxContext.module.exports.default &&\n Object.keys(sandboxContext.module.exports.default).length > 0\n ) {\n // ES Module\n contentDeclaration = sandboxContext.module.exports.default;\n } else if (\n sandboxContext.module.exports &&\n Object.keys(sandboxContext.module.exports).length > 0\n ) {\n // Other\n contentDeclaration = sandboxContext.module.exports;\n }\n\n if (typeof contentDeclaration === 'undefined') {\n console.error('Configuration file could not be loaded.');\n return undefined;\n }\n\n return filterValidContentDeclaration(contentDeclaration);\n } catch (error) {\n console.error('Error:', error);\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAA8B;AAC9B,gBAA8C;AAE9C,qBAA+D;AAJ/D;AAMA,MAAM,aAAa,OAAO,YAAY,QAAQ;AAE9C,MAAM,iBAA0B;AAAA,EAC9B,SAAS;AAAA,IACP,SAAS,CAAC;AAAA,EACZ;AAAA,EACA,QAAQ;AAAA,IACN,SAAS,CAAC;AAAA,EACZ;AAAA,EACA;AAAA,EACA,SAAS,iBAAa,6BAAc,YAAY,GAAG,IAAI;AACzD;AAEA,MAAM,uBAAqC;AAAA,EACzC,QAAQ;AAAA,IACN,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,EACX;AAAA,EACA,QAAQ;AAAA;AAAA,EACR,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,OAAO;AAAA,EACP,QAAQ;AACV;AAEA,MAAM,gCAAgC,CACpC,uBACkB;AAElB,SAAO;AACT;AAOO,MAAM,yBAAyB,CACpC,+BAC8B;AAC9B,MAAI,qBAAgD;AAEpD,QAAM,gBAAgB,2BAA2B,MAAM,GAAG,EAAE,IAAI,KAAK;AAErE,MAAI;AACF,QAAI,kBAAkB,QAAQ;AAE5B,aAAO,QAAQ,0BAA0B;AAAA,IAC3C;AAIA,UAAM,mBAA4B,0BAAU;AAAA,MAC1C,aAAa,CAAC,0BAA0B;AAAA,MAExC,GAAG;AAAA,IACL,CAAC;AAED,UAAM,qBAAqB,aAAa,cAAc,CAAC,EAAE;AAEzD,QAAI,CAAC,oBAAoB;AACvB,cAAQ,MAAM,yCAAyC;AACvD,aAAO;AAAA,IACT;AAEA,mCAAgB,oBAAoB,cAAc;AAElD,QACE,eAAe,QAAQ,WACvB,OAAO,KAAK,eAAe,QAAQ,OAAO,EAAE,SAAS,GACrD;AAEA,2BAAqB,eAAe,QAAQ;AAAA,IAC9C,WACE,eAAe,OAAO,QAAQ,YAC9B,OAAO,KAAK,eAAe,OAAO,QAAQ,QAAQ,EAAE,SAAS,GAC7D;AAEA,2BAAqB,eAAe,OAAO,QAAQ;AAAA,IACrD,WACE,eAAe,OAAO,QAAQ,WAC9B,OAAO,KAAK,eAAe,OAAO,QAAQ,OAAO,EAAE,SAAS,GAC5D;AAEA,2BAAqB,eAAe,OAAO,QAAQ;AAAA,IACrD,WACE,eAAe,OAAO,WACtB,OAAO,KAAK,eAAe,OAAO,OAAO,EAAE,SAAS,GACpD;AAEA,2BAAqB,eAAe,OAAO;AAAA,IAC7C;AAEA,QAAI,OAAO,uBAAuB,aAAa;AAC7C,cAAQ,MAAM,yCAAyC;AACvD,aAAO;AAAA,IACT;AAEA,WAAO,8BAA8B,kBAAkB;AAAA,EACzD,SAAS,OAAO;AACd,YAAQ,MAAM,UAAU,KAAK;AAAA,EAC/B;AACF;","names":[]}
1
+ {"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-var-requires */\nimport { createRequire } from 'module';\nimport { type Context, runInNewContext } from 'vm';\nimport type { DeclarationContent } from '@intlayer/core';\nimport { type BuildOptions, buildSync, type BuildResult } from 'esbuild';\n\nconst isESModule = typeof import.meta.url === 'string';\n\nconst sandboxContext: Context = {\n exports: {\n default: {},\n },\n module: {\n exports: {},\n },\n console,\n require: isESModule ? createRequire(import.meta.url) : require,\n};\n\nconst transformationOption: BuildOptions = {\n loader: {\n '.js': 'js',\n '.jsx': 'jsx',\n '.mjs': 'js',\n '.ts': 'ts',\n '.tsx': 'tsx',\n '.cjs': 'js',\n '.json': 'json',\n },\n format: 'cjs', // Output format as commonjs\n target: 'es2017',\n packages: 'external',\n write: false,\n bundle: true,\n};\n\nconst filterValidContentDeclaration = (\n contentDeclaration: DeclarationContent\n): DeclarationContent => {\n // @TODO Implement filtering of valid content declaration\n return contentDeclaration;\n};\n\n/**\n * Load the content declaration from the given path\n *\n * Accepts JSON, JS, MJS and TS files as configuration\n */\nexport const loadContentDeclaration = (\n contentDeclarationFilePath: string\n): DeclarationContent | undefined => {\n let contentDeclaration: DeclarationContent | undefined = undefined;\n\n const fileExtension = contentDeclarationFilePath.split('.').pop() ?? '';\n\n try {\n if (fileExtension === 'json') {\n // Assume JSON\n return require(contentDeclarationFilePath);\n }\n\n // Rest is JS, MJS or TS\n\n const moduleResult: BuildResult = buildSync({\n entryPoints: [contentDeclarationFilePath],\n\n ...transformationOption,\n });\n\n const moduleResultString = moduleResult.outputFiles?.[0].text;\n\n if (!moduleResultString) {\n console.error('Configuration file could not be loaded.');\n return undefined;\n }\n\n runInNewContext(moduleResultString, sandboxContext);\n\n if (\n sandboxContext.exports.default &&\n Object.keys(sandboxContext.exports.default).length > 0\n ) {\n // ES Module\n contentDeclaration = sandboxContext.exports.default;\n } else if (\n sandboxContext.module.exports.defaults &&\n Object.keys(sandboxContext.module.exports.defaults).length > 0\n ) {\n // CommonJS\n contentDeclaration = sandboxContext.module.exports.default;\n } else if (\n sandboxContext.module.exports.default &&\n Object.keys(sandboxContext.module.exports.default).length > 0\n ) {\n // ES Module\n contentDeclaration = sandboxContext.module.exports.default;\n } else if (\n sandboxContext.module.exports &&\n Object.keys(sandboxContext.module.exports).length > 0\n ) {\n // Other\n contentDeclaration = sandboxContext.module.exports;\n }\n\n if (typeof contentDeclaration === 'undefined') {\n console.error('Configuration file could not be loaded.');\n return undefined;\n }\n\n return filterValidContentDeclaration(contentDeclaration);\n } catch (error) {\n console.error('Error:', error);\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAA8B;AAC9B,gBAA8C;AAE9C,qBAA+D;AAJ/D;AAMA,MAAM,aAAa,OAAO,YAAY,QAAQ;AAE9C,MAAM,iBAA0B;AAAA,EAC9B,SAAS;AAAA,IACP,SAAS,CAAC;AAAA,EACZ;AAAA,EACA,QAAQ;AAAA,IACN,SAAS,CAAC;AAAA,EACZ;AAAA,EACA;AAAA,EACA,SAAS,iBAAa,6BAAc,YAAY,GAAG,IAAI;AACzD;AAEA,MAAM,uBAAqC;AAAA,EACzC,QAAQ;AAAA,IACN,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,EACX;AAAA,EACA,QAAQ;AAAA;AAAA,EACR,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,OAAO;AAAA,EACP,QAAQ;AACV;AAEA,MAAM,gCAAgC,CACpC,uBACuB;AAEvB,SAAO;AACT;AAOO,MAAM,yBAAyB,CACpC,+BACmC;AACnC,MAAI,qBAAqD;AAEzD,QAAM,gBAAgB,2BAA2B,MAAM,GAAG,EAAE,IAAI,KAAK;AAErE,MAAI;AACF,QAAI,kBAAkB,QAAQ;AAE5B,aAAO,QAAQ,0BAA0B;AAAA,IAC3C;AAIA,UAAM,mBAA4B,0BAAU;AAAA,MAC1C,aAAa,CAAC,0BAA0B;AAAA,MAExC,GAAG;AAAA,IACL,CAAC;AAED,UAAM,qBAAqB,aAAa,cAAc,CAAC,EAAE;AAEzD,QAAI,CAAC,oBAAoB;AACvB,cAAQ,MAAM,yCAAyC;AACvD,aAAO;AAAA,IACT;AAEA,mCAAgB,oBAAoB,cAAc;AAElD,QACE,eAAe,QAAQ,WACvB,OAAO,KAAK,eAAe,QAAQ,OAAO,EAAE,SAAS,GACrD;AAEA,2BAAqB,eAAe,QAAQ;AAAA,IAC9C,WACE,eAAe,OAAO,QAAQ,YAC9B,OAAO,KAAK,eAAe,OAAO,QAAQ,QAAQ,EAAE,SAAS,GAC7D;AAEA,2BAAqB,eAAe,OAAO,QAAQ;AAAA,IACrD,WACE,eAAe,OAAO,QAAQ,WAC9B,OAAO,KAAK,eAAe,OAAO,QAAQ,OAAO,EAAE,SAAS,GAC5D;AAEA,2BAAqB,eAAe,OAAO,QAAQ;AAAA,IACrD,WACE,eAAe,OAAO,WACtB,OAAO,KAAK,eAAe,OAAO,OAAO,EAAE,SAAS,GACpD;AAEA,2BAAqB,eAAe,OAAO;AAAA,IAC7C;AAEA,QAAI,OAAO,uBAAuB,aAAa;AAC7C,cAAQ,MAAM,yCAAyC;AACvD,aAAO;AAAA,IACT;AAEA,WAAO,8BAA8B,kBAAkB;AAAA,EACzD,SAAS,OAAO;AACd,YAAQ,MAAM,UAAU,KAAK;AAAA,EAC/B;AACF;","names":[]}
@@ -1,10 +1,10 @@
1
- import { ContentModule } from '@intlayer/core';
1
+ import { DeclarationContent } from '@intlayer/core';
2
2
 
3
3
  /**
4
4
  * Load the content declaration from the given path
5
5
  *
6
6
  * Accepts JSON, JS, MJS and TS files as configuration
7
7
  */
8
- declare const loadContentDeclaration: (contentDeclarationFilePath: string) => ContentModule | undefined;
8
+ declare const loadContentDeclaration: (contentDeclarationFilePath: string) => DeclarationContent | undefined;
9
9
 
10
10
  export { loadContentDeclaration };
@@ -29,7 +29,14 @@ const processFunctionResults = async (entry) => {
29
29
  const result = {};
30
30
  for (const key of Object.keys(entry)) {
31
31
  const field = entry?.[key];
32
- if (typeof field === "object") {
32
+ const isArray = Array.isArray(field);
33
+ if (typeof field === "object" && isArray) {
34
+ result[key] = await Promise.all(
35
+ field.map(async (el) => {
36
+ return await processFunctionResults(el);
37
+ })
38
+ );
39
+ } else if (typeof field === "object") {
33
40
  result[key] = await processFunctionResults(
34
41
  field
35
42
  );
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.ts"],"sourcesContent":["import { resolve } from 'path';\nimport type {\n Content,\n ContentModule,\n FlatContent,\n FlatContentValue,\n} from '@intlayer/core';\nimport { loadContentDeclaration } from './loadContentDeclaration';\n\n/**\n * Function to replace function and async function fields with their results in the object\n */\nconst processFunctionResults = async (entry: Content): Promise<FlatContent> => {\n if (entry && typeof entry === 'object') {\n const promises: Promise<void>[] = [];\n const result: FlatContent = {};\n\n for (const key of Object.keys(entry)) {\n const field = entry?.[key];\n\n if (typeof field === 'object') {\n result[key] = (await processFunctionResults(\n field as Content\n )) as FlatContentValue;\n } else if (typeof field === 'function') {\n // Wait for the function to resolve if it's an async function\n const promise = (async () => {\n // Execute the function and await the result if it's a Promise\n const value = await field();\n\n result[key] = value as FlatContentValue;\n })();\n promises.push(promise);\n } else {\n result[key] = field as FlatContentValue;\n }\n }\n\n // Wait for all async operations to complete\n await Promise.all(promises);\n\n return result;\n }\n\n return entry;\n};\n\n/**\n * Function to load, process the module and return the Intlayer ContentModule from the module file\n */\nexport const processContentDeclaration = async (file: string) => {\n try {\n const functionPath = resolve(file);\n const entry = loadContentDeclaration(functionPath);\n\n if (!entry) {\n console.error('No entry found in module:', functionPath);\n return;\n }\n\n return (await processFunctionResults(entry)) as ContentModule;\n } catch (error) {\n console.error('Error processing module:', error);\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAOxB,oCAAuC;AAKvC,MAAM,yBAAyB,OAAO,UAAyC;AAC7E,MAAI,SAAS,OAAO,UAAU,UAAU;AACtC,UAAM,WAA4B,CAAC;AACnC,UAAM,SAAsB,CAAC;AAE7B,eAAW,OAAO,OAAO,KAAK,KAAK,GAAG;AACpC,YAAM,QAAQ,QAAQ,GAAG;AAEzB,UAAI,OAAO,UAAU,UAAU;AAC7B,eAAO,GAAG,IAAK,MAAM;AAAA,UACnB;AAAA,QACF;AAAA,MACF,WAAW,OAAO,UAAU,YAAY;AAEtC,cAAM,WAAW,YAAY;AAE3B,gBAAM,QAAQ,MAAM,MAAM;AAE1B,iBAAO,GAAG,IAAI;AAAA,QAChB,GAAG;AACH,iBAAS,KAAK,OAAO;AAAA,MACvB,OAAO;AACL,eAAO,GAAG,IAAI;AAAA,MAChB;AAAA,IACF;AAGA,UAAM,QAAQ,IAAI,QAAQ;AAE1B,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAKO,MAAM,4BAA4B,OAAO,SAAiB;AAC/D,MAAI;AACF,UAAM,mBAAe,qBAAQ,IAAI;AACjC,UAAM,YAAQ,sDAAuB,YAAY;AAEjD,QAAI,CAAC,OAAO;AACV,cAAQ,MAAM,6BAA6B,YAAY;AACvD;AAAA,IACF;AAEA,WAAQ,MAAM,uBAAuB,KAAK;AAAA,EAC5C,SAAS,OAAO;AACd,YAAQ,MAAM,4BAA4B,KAAK;AAAA,EACjD;AACF;","names":[]}
1
+ {"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.ts"],"sourcesContent":["import { resolve } from 'path';\nimport type {\n Content,\n DeclarationContent,\n FlatContent,\n FlatContentValue,\n} from '@intlayer/core';\nimport { loadContentDeclaration } from './loadContentDeclaration';\n\n/**\n * Function to replace function and async function fields with their results in the object\n */\nconst processFunctionResults = async (entry: Content): Promise<FlatContent> => {\n if (entry && typeof entry === 'object') {\n const promises: Promise<void>[] = [];\n const result: FlatContent = {};\n\n for (const key of Object.keys(entry)) {\n const field = entry?.[key];\n const isArray = Array.isArray(field);\n\n if (typeof field === 'object' && isArray) {\n result[key] = (await Promise.all(\n field.map(async (el) => {\n return await processFunctionResults(el as Content);\n })\n )) as unknown as FlatContentValue;\n } else if (typeof field === 'object') {\n result[key] = (await processFunctionResults(\n field as Content\n )) as FlatContentValue;\n } else if (typeof field === 'function') {\n // Wait for the function to resolve if it's an async function\n const promise = (async () => {\n // Execute the function and await the result if it's a Promise\n const value = await field();\n\n result[key] = value as FlatContentValue;\n })();\n promises.push(promise);\n } else {\n result[key] = field as FlatContentValue;\n }\n }\n\n // Wait for all async operations to complete\n await Promise.all(promises);\n\n return result;\n }\n\n return entry;\n};\n\n/**\n * Function to load, process the module and return the Intlayer DeclarationContent from the module file\n */\nexport const processContentDeclaration = async (file: string) => {\n try {\n const functionPath = resolve(file);\n const entry = loadContentDeclaration(functionPath);\n\n if (!entry) {\n console.error('No entry found in module:', functionPath);\n return;\n }\n\n return (await processFunctionResults(entry)) as DeclarationContent;\n } catch (error) {\n console.error('Error processing module:', error);\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAOxB,oCAAuC;AAKvC,MAAM,yBAAyB,OAAO,UAAyC;AAC7E,MAAI,SAAS,OAAO,UAAU,UAAU;AACtC,UAAM,WAA4B,CAAC;AACnC,UAAM,SAAsB,CAAC;AAE7B,eAAW,OAAO,OAAO,KAAK,KAAK,GAAG;AACpC,YAAM,QAAQ,QAAQ,GAAG;AACzB,YAAM,UAAU,MAAM,QAAQ,KAAK;AAEnC,UAAI,OAAO,UAAU,YAAY,SAAS;AACxC,eAAO,GAAG,IAAK,MAAM,QAAQ;AAAA,UAC3B,MAAM,IAAI,OAAO,OAAO;AACtB,mBAAO,MAAM,uBAAuB,EAAa;AAAA,UACnD,CAAC;AAAA,QACH;AAAA,MACF,WAAW,OAAO,UAAU,UAAU;AACpC,eAAO,GAAG,IAAK,MAAM;AAAA,UACnB;AAAA,QACF;AAAA,MACF,WAAW,OAAO,UAAU,YAAY;AAEtC,cAAM,WAAW,YAAY;AAE3B,gBAAM,QAAQ,MAAM,MAAM;AAE1B,iBAAO,GAAG,IAAI;AAAA,QAChB,GAAG;AACH,iBAAS,KAAK,OAAO;AAAA,MACvB,OAAO;AACL,eAAO,GAAG,IAAI;AAAA,MAChB;AAAA,IACF;AAGA,UAAM,QAAQ,IAAI,QAAQ;AAE1B,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAKO,MAAM,4BAA4B,OAAO,SAAiB;AAC/D,MAAI;AACF,UAAM,mBAAe,qBAAQ,IAAI;AACjC,UAAM,YAAQ,sDAAuB,YAAY;AAEjD,QAAI,CAAC,OAAO;AACV,cAAQ,MAAM,6BAA6B,YAAY;AACvD;AAAA,IACF;AAEA,WAAQ,MAAM,uBAAuB,KAAK;AAAA,EAC5C,SAAS,OAAO;AACd,YAAQ,MAAM,4BAA4B,KAAK;AAAA,EACjD;AACF;","names":[]}
@@ -1,8 +1,8 @@
1
- import { ContentModule } from '@intlayer/core';
1
+ import { DeclarationContent } from '@intlayer/core';
2
2
 
3
3
  /**
4
- * Function to load, process the module and return the Intlayer ContentModule from the module file
4
+ * Function to load, process the module and return the Intlayer DeclarationContent from the module file
5
5
  */
6
- declare const processContentDeclaration: (file: string) => Promise<ContentModule | undefined>;
6
+ declare const processContentDeclaration: (file: string) => Promise<DeclarationContent | undefined>;
7
7
 
8
8
  export { processContentDeclaration };
@@ -29,10 +29,10 @@ var import_glob = require("glob");
29
29
  var import_utils = require('../../utils.cjs');
30
30
  const { content, internationalization } = (0, import_config.getConfiguration)();
31
31
  const { typesDir, moduleAugmentationDir } = content;
32
- const { locales } = internationalization;
32
+ const { locales, strictMode } = internationalization;
33
33
  const getTypeName = (id) => (0, import_utils.transformToCamelCase)(`${id}Content`);
34
34
  const generateTypeIndexContent = (typeFiles) => {
35
- let content2 = "/* eslint-disable */\nimport 'intlayer';\nimport { Locales } from '@intlayer/config'\n";
35
+ let content2 = "/* eslint-disable */\nimport { Locales } from 'intlayer'\n";
36
36
  const dictionariesRef = typeFiles.map((dictionaryPath) => ({
37
37
  relativePath: (0, import_path.relative)(moduleAugmentationDir, dictionaryPath),
38
38
  id: (0, import_path.basename)(dictionaryPath, ".d.ts"),
@@ -50,10 +50,11 @@ const generateTypeIndexContent = (typeFiles) => {
50
50
  const formatLocales = locales.map((locale) => {
51
51
  for (const key in import_config.Locales) {
52
52
  if (import_config.Locales[key] === locale) {
53
- return ` ${key} = '${locale}'`;
53
+ return `Locales.${key}`;
54
54
  }
55
55
  }
56
- }).join(",\n");
56
+ }).join(" | ");
57
+ const strictModeRecord = strictMode === "strict" ? `interface IConfigLocales<Content> extends Record<ExtractedLocales, Content> {}` : strictMode === "required_only" ? `interface IConfigLocales<Content> extends Record<ExtractedLocales, Content>, Partial<Record<ExcludedLocales, Content>> {}` : `interface IConfigLocales<Content> extends Partial<Record<Locales, Content>> {}`;
57
58
  content2 += `declare module 'intlayer' {
58
59
  `;
59
60
  content2 += ` interface IntlayerDictionaryTypesConnector {
@@ -61,14 +62,16 @@ ${formattedDictionaryMap}
61
62
  }
62
63
 
63
64
  `;
64
- content2 += ` enum ConfigLocales {
65
- ${formatLocales}
66
- };
65
+ content2 += ` type ConfigLocales = ${formatLocales};
66
+ `;
67
+ content2 += ` type ExtractedLocales = Extract<Locales, ConfigLocales>;
68
+ `;
69
+ content2 += ` type ExcludedLocales = Exclude<Locales, ConfigLocales>;
67
70
 
68
71
  `;
69
- content2 += ` interface IConfigLocales<Content> extends Record<ConfigLocales, Content> {}
72
+ content2 += ` ${strictModeRecord}
70
73
  `;
71
- content2 += `};`;
74
+ content2 += `}`;
72
75
  return content2;
73
76
  };
74
77
  const createModuleAugmentation = () => {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/transpiler/dictionary_to_type/createModuleAugmentation.ts"],"sourcesContent":["import { existsSync, mkdirSync, writeFileSync } from 'fs';\nimport { basename, join, relative } from 'path';\nimport { Locales, getConfiguration } from '@intlayer/config';\nimport { sync } from 'glob';\nimport { getFileHash, transformToCamelCase } from '../../utils';\n\nconst { content, internationalization } = getConfiguration();\nconst { typesDir, moduleAugmentationDir } = content;\nconst { locales } = internationalization;\n\nexport const getTypeName = (id: string): string =>\n transformToCamelCase(`${id}Content`);\n\n/**\n * This function generates the content of the module augmentation file\n */\nconst generateTypeIndexContent = (typeFiles: string[]): string => {\n let content =\n \"/* eslint-disable */\\nimport 'intlayer';\\nimport { Locales } from '@intlayer/config'\\n\";\n\n const dictionariesRef = typeFiles.map((dictionaryPath) => ({\n relativePath: relative(moduleAugmentationDir, dictionaryPath),\n id: basename(dictionaryPath, '.d.ts'), // Get the base name as the dictionary id\n hash: `_${getFileHash(dictionaryPath)}`, // Get the hash of the dictionary to avoid conflicts\n }));\n\n // Import all dictionaries\n dictionariesRef.forEach((dictionary) => {\n const typeName = getTypeName(dictionary.id);\n content += `import type { ${typeName} as ${dictionary.hash} } from '${dictionary.relativePath}';\\n`;\n });\n\n content += '\\n';\n\n // Format Dictionary Map\n const formattedDictionaryMap: string = dictionariesRef\n .map((dictionary) => ` \"${dictionary.id}\": ${dictionary.hash};`)\n .join('\\n');\n\n const formatLocales = locales\n .map((locale) => {\n for (const key in Locales) {\n if (Locales[key as keyof typeof Locales] === locale) {\n return ` ${key} = '${locale}'`;\n }\n }\n })\n .join(',\\n');\n\n /**\n * Write the module augmentation to extend the intlayer module with the dictionaries types\n * Will suggest the type resulting of the dictionaries\n *\n * declare module 'intlayer' {\n * interface IntlayerDictionaryTypesConnector = {\n * dictionaries: {\n * id: DictionaryType;\n * }\n * }\n *\n * enum ConfigLocales {\n * ENGLISH = 'en',\n * FRENCH = 'fr',\n * SPANISH = 'es',\n * }\n *\n * interface IConfigLocales<Content> extends Record<ConfigLocales, Content> {}\n *\n * }\n * See https://www.typescriptlang.org/docs/handbook/declaration-merging.html#module-augmentation\n */\n content += `declare module 'intlayer' {\\n`;\n content += ` interface IntlayerDictionaryTypesConnector {\\n${formattedDictionaryMap}\\n }\\n\\n`;\n content += ` enum ConfigLocales {\\n${formatLocales}\\n };\\n\\n`;\n content += ` interface IConfigLocales<Content> extends Record<ConfigLocales, Content> {}\\n`;\n content += `};`;\n\n return content;\n};\n\n/**\n * This function generates a index file merging all the types\n */\nexport const createModuleAugmentation = () => {\n // Create main directory if it doesn't exist\n if (!existsSync(moduleAugmentationDir)) {\n mkdirSync(moduleAugmentationDir, { recursive: true });\n }\n\n const dictionaries: string[] = sync(`${typesDir}/**/*.d.ts`);\n // Create the dictionary list file\n\n const tsContent = generateTypeIndexContent(dictionaries);\n writeFileSync(join(moduleAugmentationDir, 'intlayer.d.ts'), tsContent);\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAqD;AACrD,kBAAyC;AACzC,oBAA0C;AAC1C,kBAAqB;AACrB,mBAAkD;AAElD,MAAM,EAAE,SAAS,qBAAqB,QAAI,gCAAiB;AAC3D,MAAM,EAAE,UAAU,sBAAsB,IAAI;AAC5C,MAAM,EAAE,QAAQ,IAAI;AAEb,MAAM,cAAc,CAAC,WAC1B,mCAAqB,GAAG,EAAE,SAAS;AAKrC,MAAM,2BAA2B,CAAC,cAAgC;AAChE,MAAIA,WACF;AAEF,QAAM,kBAAkB,UAAU,IAAI,CAAC,oBAAoB;AAAA,IACzD,kBAAc,sBAAS,uBAAuB,cAAc;AAAA,IAC5D,QAAI,sBAAS,gBAAgB,OAAO;AAAA;AAAA,IACpC,MAAM,QAAI,0BAAY,cAAc,CAAC;AAAA;AAAA,EACvC,EAAE;AAGF,kBAAgB,QAAQ,CAAC,eAAe;AACtC,UAAM,WAAW,YAAY,WAAW,EAAE;AAC1C,IAAAA,YAAW,iBAAiB,QAAQ,OAAO,WAAW,IAAI,YAAY,WAAW,YAAY;AAAA;AAAA,EAC/F,CAAC;AAED,EAAAA,YAAW;AAGX,QAAM,yBAAiC,gBACpC,IAAI,CAAC,eAAe,QAAQ,WAAW,EAAE,MAAM,WAAW,IAAI,GAAG,EACjE,KAAK,IAAI;AAEZ,QAAM,gBAAgB,QACnB,IAAI,CAAC,WAAW;AACf,eAAW,OAAO,uBAAS;AACzB,UAAI,sBAAQ,GAA2B,MAAM,QAAQ;AACnD,eAAO,OAAO,GAAG,OAAO,MAAM;AAAA,MAChC;AAAA,IACF;AAAA,EACF,CAAC,EACA,KAAK,KAAK;AAwBb,EAAAA,YAAW;AAAA;AACX,EAAAA,YAAW;AAAA,EAAmD,sBAAsB;AAAA;AAAA;AAAA;AACpF,EAAAA,YAAW;AAAA,EAA2B,aAAa;AAAA;AAAA;AAAA;AACnD,EAAAA,YAAW;AAAA;AACX,EAAAA,YAAW;AAEX,SAAOA;AACT;AAKO,MAAM,2BAA2B,MAAM;AAE5C,MAAI,KAAC,sBAAW,qBAAqB,GAAG;AACtC,6BAAU,uBAAuB,EAAE,WAAW,KAAK,CAAC;AAAA,EACtD;AAEA,QAAM,mBAAyB,kBAAK,GAAG,QAAQ,YAAY;AAG3D,QAAM,YAAY,yBAAyB,YAAY;AACvD,mCAAc,kBAAK,uBAAuB,eAAe,GAAG,SAAS;AACvE;","names":["content"]}
1
+ {"version":3,"sources":["../../../../src/transpiler/dictionary_to_type/createModuleAugmentation.ts"],"sourcesContent":["import { existsSync, mkdirSync, writeFileSync } from 'fs';\nimport { basename, join, relative } from 'path';\nimport { Locales, getConfiguration } from '@intlayer/config';\nimport { sync } from 'glob';\nimport { getFileHash, transformToCamelCase } from '../../utils';\n\nconst { content, internationalization } = getConfiguration();\nconst { typesDir, moduleAugmentationDir } = content;\nconst { locales, strictMode } = internationalization;\n\nexport const getTypeName = (id: string): string =>\n transformToCamelCase(`${id}Content`);\n\n/**\n * This function generates the content of the module augmentation file\n */\nconst generateTypeIndexContent = (typeFiles: string[]): string => {\n let content = \"/* eslint-disable */\\nimport { Locales } from 'intlayer'\\n\";\n\n const dictionariesRef = typeFiles.map((dictionaryPath) => ({\n relativePath: relative(moduleAugmentationDir, dictionaryPath),\n id: basename(dictionaryPath, '.d.ts'), // Get the base name as the dictionary id\n hash: `_${getFileHash(dictionaryPath)}`, // Get the hash of the dictionary to avoid conflicts\n }));\n\n // Import all dictionaries\n dictionariesRef.forEach((dictionary) => {\n const typeName = getTypeName(dictionary.id);\n content += `import type { ${typeName} as ${dictionary.hash} } from '${dictionary.relativePath}';\\n`;\n });\n\n content += '\\n';\n\n // Format Dictionary Map\n const formattedDictionaryMap: string = dictionariesRef\n .map((dictionary) => ` \"${dictionary.id}\": ${dictionary.hash};`)\n .join('\\n');\n\n const formatLocales = locales\n .map((locale) => {\n for (const key in Locales) {\n if (Locales[key as keyof typeof Locales] === locale) {\n return `Locales.${key}`;\n }\n }\n })\n .join(' | ');\n\n const strictModeRecord =\n strictMode === 'strict'\n ? `interface IConfigLocales<Content> extends Record<ExtractedLocales, Content> {}`\n : strictMode === 'required_only'\n ? `interface IConfigLocales<Content> extends Record<ExtractedLocales, Content>, Partial<Record<ExcludedLocales, Content>> {}`\n : `interface IConfigLocales<Content> extends Partial<Record<Locales, Content>> {}`;\n\n /**\n * Write the module augmentation to extend the intlayer module with the dictionaries types\n * Will suggest the type resulting of the dictionaries\n *\n * declare module 'intlayer' {\n * interface IntlayerDictionaryTypesConnector = {\n * dictionaries: {\n * id: DictionaryType;\n * }\n * }\n *\n * type ConfigLocales = Locales.ENGLISH | Locales.FRENCH | Locales.SPANISH;\n * type ExtractedLocales = Extract<Locales, ConfigLocales>;\n * type ExcludedLocales = Exclude<Locales, ConfigLocales>;\n *\n * interface IConfigLocales<Content> extends Record<ExtractedLocales, Content>, Partial<Record<ExcludedLocales, Content>> {}\n *\n *\n * }\n * See https://www.typescriptlang.org/docs/handbook/declaration-merging.html#module-augmentation\n */\n content += `declare module 'intlayer' {\\n`;\n content += ` interface IntlayerDictionaryTypesConnector {\\n${formattedDictionaryMap}\\n }\\n\\n`;\n content += ` type ConfigLocales = ${formatLocales};\\n`;\n content += ` type ExtractedLocales = Extract<Locales, ConfigLocales>;\\n`;\n content += ` type ExcludedLocales = Exclude<Locales, ConfigLocales>;\\n\\n`;\n content += ` ${strictModeRecord}\\n`;\n content += `}`;\n\n return content;\n};\n\n/**\n * This function generates a index file merging all the types\n */\nexport const createModuleAugmentation = () => {\n // Create main directory if it doesn't exist\n if (!existsSync(moduleAugmentationDir)) {\n mkdirSync(moduleAugmentationDir, { recursive: true });\n }\n\n const dictionaries: string[] = sync(`${typesDir}/**/*.d.ts`);\n // Create the dictionary list file\n\n const tsContent = generateTypeIndexContent(dictionaries);\n writeFileSync(join(moduleAugmentationDir, 'intlayer.d.ts'), tsContent);\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAqD;AACrD,kBAAyC;AACzC,oBAA0C;AAC1C,kBAAqB;AACrB,mBAAkD;AAElD,MAAM,EAAE,SAAS,qBAAqB,QAAI,gCAAiB;AAC3D,MAAM,EAAE,UAAU,sBAAsB,IAAI;AAC5C,MAAM,EAAE,SAAS,WAAW,IAAI;AAEzB,MAAM,cAAc,CAAC,WAC1B,mCAAqB,GAAG,EAAE,SAAS;AAKrC,MAAM,2BAA2B,CAAC,cAAgC;AAChE,MAAIA,WAAU;AAEd,QAAM,kBAAkB,UAAU,IAAI,CAAC,oBAAoB;AAAA,IACzD,kBAAc,sBAAS,uBAAuB,cAAc;AAAA,IAC5D,QAAI,sBAAS,gBAAgB,OAAO;AAAA;AAAA,IACpC,MAAM,QAAI,0BAAY,cAAc,CAAC;AAAA;AAAA,EACvC,EAAE;AAGF,kBAAgB,QAAQ,CAAC,eAAe;AACtC,UAAM,WAAW,YAAY,WAAW,EAAE;AAC1C,IAAAA,YAAW,iBAAiB,QAAQ,OAAO,WAAW,IAAI,YAAY,WAAW,YAAY;AAAA;AAAA,EAC/F,CAAC;AAED,EAAAA,YAAW;AAGX,QAAM,yBAAiC,gBACpC,IAAI,CAAC,eAAe,QAAQ,WAAW,EAAE,MAAM,WAAW,IAAI,GAAG,EACjE,KAAK,IAAI;AAEZ,QAAM,gBAAgB,QACnB,IAAI,CAAC,WAAW;AACf,eAAW,OAAO,uBAAS;AACzB,UAAI,sBAAQ,GAA2B,MAAM,QAAQ;AACnD,eAAO,WAAW,GAAG;AAAA,MACvB;AAAA,IACF;AAAA,EACF,CAAC,EACA,KAAK,KAAK;AAEb,QAAM,mBACJ,eAAe,WACX,mFACA,eAAe,kBACb,8HACA;AAuBR,EAAAA,YAAW;AAAA;AACX,EAAAA,YAAW;AAAA,EAAmD,sBAAsB;AAAA;AAAA;AAAA;AACpF,EAAAA,YAAW,0BAA0B,aAAa;AAAA;AAClD,EAAAA,YAAW;AAAA;AACX,EAAAA,YAAW;AAAA;AAAA;AACX,EAAAA,YAAW,KAAK,gBAAgB;AAAA;AAChC,EAAAA,YAAW;AAEX,SAAOA;AACT;AAKO,MAAM,2BAA2B,MAAM;AAE5C,MAAI,KAAC,sBAAW,qBAAqB,GAAG;AACtC,6BAAU,uBAAuB,EAAE,WAAW,KAAK,CAAC;AAAA,EACtD;AAEA,QAAM,mBAAyB,kBAAK,GAAG,QAAQ,YAAY;AAG3D,QAAM,YAAY,yBAAyB,YAAY;AACvD,mCAAc,kBAAK,uBAAuB,eAAe,GAAG,SAAS;AACvE;","names":["content"]}
@@ -30,17 +30,17 @@ var import_config = require("@intlayer/config");
30
30
  var import_core = require("@intlayer/core");
31
31
  var import_createModuleAugmentation = require('./createModuleAugmentation.cjs');
32
32
  const import_meta = {};
33
- const { content, internationalization } = (0, import_config.getConfiguration)();
33
+ const { content } = (0, import_config.getConfiguration)();
34
34
  const { typesDir } = content;
35
35
  const isESModule = typeof import_meta.url === "string";
36
36
  const requireFunction = isESModule ? (0, import_module.createRequire)(import_meta.url) : require;
37
+ const getFirstValue = (obj) => Object.values(obj)[0];
37
38
  const generateTypeScriptType = (obj) => {
38
39
  let typeDefinition = ``;
39
40
  const typeName = (0, import_createModuleAugmentation.getTypeName)(obj.id);
40
- typeDefinition += `export type ${typeName} = {
41
- `;
41
+ typeDefinition += `export type ${typeName} = `;
42
42
  typeDefinition += generateTypeScriptTypeContent(obj);
43
- typeDefinition += "};\n\n";
43
+ typeDefinition += ";\n\n";
44
44
  return typeDefinition;
45
45
  };
46
46
  const isReactNode = (node) => typeof node?.key !== "undefined" && typeof node?.props !== "undefined";
@@ -52,54 +52,40 @@ const generateTypeScriptTypeContent = (obj) => {
52
52
  if (isReactNodeValue) {
53
53
  return `JSX.Element`;
54
54
  }
55
- let typeDefinition = ``;
55
+ if (
56
+ // Check if the value is a typed node
57
+ obj.nodeType === import_core.NodeType.Translation
58
+ ) {
59
+ const { nodeType, ...content2 } = obj;
60
+ const languageValue = getFirstValue(
61
+ content2
62
+ );
63
+ const tsType = generateTypeScriptTypeContent(languageValue);
64
+ return `${tsType}`;
65
+ } else if (
66
+ // Check if the value is a typed node
67
+ obj.nodeType === import_core.NodeType.Enumeration
68
+ ) {
69
+ const { nodeType, ...content2 } = obj;
70
+ const quantifiedValue = getFirstValue(
71
+ content2
72
+ );
73
+ const tsType = generateTypeScriptTypeContent(quantifiedValue);
74
+ return `(quantity: number) => ${tsType}`;
75
+ } else if (Array.isArray(obj)) {
76
+ const arrayType = generateTypeScriptTypeContent(obj[0]);
77
+ return `${arrayType}[]`;
78
+ }
79
+ let typeDefinition = "{";
56
80
  for (const [key, value] of Object.entries(obj)) {
57
- const nodeType = value?.nodeType;
58
- if (
59
- // Check if the value is a typed node
60
- typeof value === "object" && nodeType === import_core.NodeType.Translation
61
- ) {
62
- const tsType = generateTypeScriptTypeContent(
63
- value?.[internationalization.defaultLocale]
64
- );
65
- typeDefinition += ` ${key}: ${tsType},
66
- `;
67
- } else if (
68
- // Check if the value is a typed node
69
- typeof value === "object" && nodeType === import_core.NodeType.Enumeration
70
- ) {
71
- const tsType = generateTypeScriptTypeContent(
72
- value?.[internationalization.defaultLocale]
73
- );
74
- typeDefinition += ` ${key}: (quantity: number) => ${tsType},
75
- `;
76
- } else if (
77
- // Check if the value is a nested object
78
- typeof value === "object"
79
- ) {
80
- const isArray = Array.isArray(value);
81
- if (isArray) {
82
- const arrayType = generateTypeScriptTypeContent(value);
83
- typeDefinition += ` ${key}: ${arrayType}[],
84
- `;
85
- } else {
86
- const nestedType = generateTypeScriptTypeContent(value);
87
- typeDefinition += ` ${key}: {${nestedType}},
88
- `;
89
- }
90
- } else if (
91
- // Check if the value is an 'id'
92
- typeof value === "string" && key === "id"
93
- ) {
94
- const tsType = `"${value}"`;
95
- typeDefinition += ` ${key}: ${tsType},
96
- `;
97
- } else {
98
- const tsType = typeof value;
99
- typeDefinition += ` ${key}: ${tsType},
100
- `;
81
+ const isLast = Object.keys(obj).indexOf(key) === Object.keys(obj).length - 1;
82
+ const nestedType = generateTypeScriptTypeContent(value);
83
+ typeDefinition += `'${key}': ${nestedType}`;
84
+ if (!isLast) {
85
+ typeDefinition += ",";
101
86
  }
102
87
  }
88
+ typeDefinition += "}";
103
89
  return typeDefinition;
104
90
  };
105
91
  const createTypes = (dictionariesPaths) => {
@@ -108,9 +94,9 @@ const createTypes = (dictionariesPaths) => {
108
94
  (0, import_fs.mkdirSync)(typesDir, { recursive: true });
109
95
  }
110
96
  for (const dictionaryPath of dictionariesPaths) {
111
- const contentModule = requireFunction(dictionaryPath);
112
- const dictionaryName = contentModule.id;
113
- const typeDefinition = generateTypeScriptType(contentModule);
97
+ const dictionary = requireFunction(dictionaryPath);
98
+ const dictionaryName = dictionary.id;
99
+ const typeDefinition = generateTypeScriptType(dictionary);
114
100
  const outputPath = (0, import_path.resolve)(typesDir, `${dictionaryName}.d.ts`);
115
101
  (0, import_fs.writeFileSync)(outputPath, typeDefinition);
116
102
  resultTypesPaths.push(outputPath);
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/transpiler/dictionary_to_type/createType.ts"],"sourcesContent":["import { existsSync, mkdirSync, writeFileSync } from 'fs';\nimport { createRequire } from 'module';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport {\n NodeType,\n type Content,\n type ContentModule,\n type TypedNode,\n} from '@intlayer/core';\nimport { getTypeName } from './createModuleAugmentation';\n\nconst { content, internationalization } = getConfiguration();\nconst { typesDir } = content;\n\nconst isESModule = typeof import.meta.url === 'string';\nconst requireFunction = isESModule ? createRequire(import.meta.url) : require;\n\n/**\n *\n * This function generates a TypeScript type definition from a JSON object\n *\n * Example:\n *\n * const input = {\n * id: '1',\n * name: 'John Doe',\n * address: {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * }\n * };\n *\n * const result = generateTypeScriptType(input, 'RootObject');\n * console.log(result);\n *\n * Output:\n *\n * type RootObject = {\n * id: '1',\n * name: string,\n * address: {\n * id: '2',\n * street: string,\n * city: string,\n * },\n * };\n *\n */\nexport const generateTypeScriptType = (obj: ContentModule): string => {\n let typeDefinition = ``;\n\n const typeName = getTypeName(obj.id);\n\n typeDefinition += `export type ${typeName} = {\\n`;\n typeDefinition += generateTypeScriptTypeContent(obj);\n typeDefinition += '};\\n\\n';\n\n return typeDefinition;\n};\n\nconst isReactNode = (node: Record<string, unknown>): boolean =>\n typeof node?.key !== 'undefined' && typeof node?.props !== 'undefined';\n\n// eslint-disable-next-line sonarjs/cognitive-complexity\nexport const generateTypeScriptTypeContent = (obj: Content): string => {\n if (typeof obj !== 'object' || obj === null) {\n return `${typeof obj}`;\n }\n\n const isReactNodeValue = isReactNode(obj as Record<string, unknown>);\n\n if (isReactNodeValue) {\n // ReactNode handling\n return `JSX.Element`;\n }\n\n let typeDefinition = ``;\n for (const [key, value] of Object.entries(obj)) {\n const nodeType: NodeType | undefined = (value as TypedNode)?.nodeType;\n type ValueKey = keyof typeof value;\n\n if (\n // Check if the value is a typed node\n typeof value === 'object' &&\n nodeType === NodeType.Translation\n ) {\n const tsType = generateTypeScriptTypeContent(\n value?.[internationalization.defaultLocale as ValueKey]\n );\n typeDefinition += ` ${key}: ${tsType},\\n`;\n } else if (\n // Check if the value is a typed node\n typeof value === 'object' &&\n nodeType === NodeType.Enumeration\n ) {\n const tsType = generateTypeScriptTypeContent(\n value?.[internationalization.defaultLocale as ValueKey] as Content\n );\n\n typeDefinition += ` ${key}: (quantity: number) => ${tsType},\\n`;\n } else if (\n // Check if the value is a nested object\n typeof value === 'object'\n ) {\n const isArray = Array.isArray(value);\n\n if (isArray) {\n // Array handling (simplified, assumes non-empty arrays with uniform type)\n const arrayType = generateTypeScriptTypeContent(value as Content);\n\n typeDefinition += ` ${key}: ${arrayType}[],\\n`;\n } else {\n // Nested object, recurse\n const nestedType = generateTypeScriptTypeContent(value as Content);\n\n typeDefinition += ` ${key}: {${nestedType}},\\n`;\n }\n } else if (\n // Check if the value is an 'id'\n typeof value === 'string' &&\n key === 'id'\n ) {\n // Special handling for 'id' field\n const tsType = `\"${value}\"`;\n typeDefinition += ` ${key}: ${tsType},\\n`;\n } else {\n // Primitive type\n const tsType = typeof value;\n typeDefinition += ` ${key}: ${tsType},\\n`;\n }\n }\n\n return typeDefinition;\n};\n\n/**\n * This function generates a TypeScript type definition from a JSON object\n */\nexport const createTypes = (dictionariesPaths: string[]): string[] => {\n const resultTypesPaths: string[] = [];\n\n // Create type folders if they don't exist\n if (!existsSync(typesDir)) {\n mkdirSync(typesDir, { recursive: true });\n }\n\n for (const dictionaryPath of dictionariesPaths) {\n const contentModule: ContentModule = requireFunction(dictionaryPath);\n const dictionaryName: string = contentModule.id;\n const typeDefinition: string = generateTypeScriptType(contentModule);\n\n const outputPath = resolve(typesDir, `${dictionaryName}.d.ts`);\n\n writeFileSync(outputPath, typeDefinition);\n\n resultTypesPaths.push(outputPath);\n }\n\n return resultTypesPaths;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAqD;AACrD,oBAA8B;AAC9B,kBAAwB;AACxB,oBAAiC;AACjC,kBAKO;AACP,sCAA4B;AAV5B;AAYA,MAAM,EAAE,SAAS,qBAAqB,QAAI,gCAAiB;AAC3D,MAAM,EAAE,SAAS,IAAI;AAErB,MAAM,aAAa,OAAO,YAAY,QAAQ;AAC9C,MAAM,kBAAkB,iBAAa,6BAAc,YAAY,GAAG,IAAI;AAkC/D,MAAM,yBAAyB,CAAC,QAA+B;AACpE,MAAI,iBAAiB;AAErB,QAAM,eAAW,6CAAY,IAAI,EAAE;AAEnC,oBAAkB,eAAe,QAAQ;AAAA;AACzC,oBAAkB,8BAA8B,GAAG;AACnD,oBAAkB;AAElB,SAAO;AACT;AAEA,MAAM,cAAc,CAAC,SACnB,OAAO,MAAM,QAAQ,eAAe,OAAO,MAAM,UAAU;AAGtD,MAAM,gCAAgC,CAAC,QAAyB;AACrE,MAAI,OAAO,QAAQ,YAAY,QAAQ,MAAM;AAC3C,WAAO,GAAG,OAAO,GAAG;AAAA,EACtB;AAEA,QAAM,mBAAmB,YAAY,GAA8B;AAEnE,MAAI,kBAAkB;AAEpB,WAAO;AAAA,EACT;AAEA,MAAI,iBAAiB;AACrB,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,GAAG,GAAG;AAC9C,UAAM,WAAkC,OAAqB;AAG7D;AAAA;AAAA,MAEE,OAAO,UAAU,YACjB,aAAa,qBAAS;AAAA,MACtB;AACA,YAAM,SAAS;AAAA,QACb,QAAQ,qBAAqB,aAAyB;AAAA,MACxD;AACA,wBAAkB,KAAK,GAAG,KAAK,MAAM;AAAA;AAAA,IACvC;AAAA;AAAA,MAEE,OAAO,UAAU,YACjB,aAAa,qBAAS;AAAA,MACtB;AACA,YAAM,SAAS;AAAA,QACb,QAAQ,qBAAqB,aAAyB;AAAA,MACxD;AAEA,wBAAkB,KAAK,GAAG,2BAA2B,MAAM;AAAA;AAAA,IAC7D;AAAA;AAAA,MAEE,OAAO,UAAU;AAAA,MACjB;AACA,YAAM,UAAU,MAAM,QAAQ,KAAK;AAEnC,UAAI,SAAS;AAEX,cAAM,YAAY,8BAA8B,KAAgB;AAEhE,0BAAkB,KAAK,GAAG,KAAK,SAAS;AAAA;AAAA,MAC1C,OAAO;AAEL,cAAM,aAAa,8BAA8B,KAAgB;AAEjE,0BAAkB,KAAK,GAAG,MAAM,UAAU;AAAA;AAAA,MAC5C;AAAA,IACF;AAAA;AAAA,MAEE,OAAO,UAAU,YACjB,QAAQ;AAAA,MACR;AAEA,YAAM,SAAS,IAAI,KAAK;AACxB,wBAAkB,KAAK,GAAG,KAAK,MAAM;AAAA;AAAA,IACvC,OAAO;AAEL,YAAM,SAAS,OAAO;AACtB,wBAAkB,KAAK,GAAG,KAAK,MAAM;AAAA;AAAA,IACvC;AAAA,EACF;AAEA,SAAO;AACT;AAKO,MAAM,cAAc,CAAC,sBAA0C;AACpE,QAAM,mBAA6B,CAAC;AAGpC,MAAI,KAAC,sBAAW,QAAQ,GAAG;AACzB,6BAAU,UAAU,EAAE,WAAW,KAAK,CAAC;AAAA,EACzC;AAEA,aAAW,kBAAkB,mBAAmB;AAC9C,UAAM,gBAA+B,gBAAgB,cAAc;AACnE,UAAM,iBAAyB,cAAc;AAC7C,UAAM,iBAAyB,uBAAuB,aAAa;AAEnE,UAAM,iBAAa,qBAAQ,UAAU,GAAG,cAAc,OAAO;AAE7D,iCAAc,YAAY,cAAc;AAExC,qBAAiB,KAAK,UAAU;AAAA,EAClC;AAEA,SAAO;AACT;","names":[]}
1
+ {"version":3,"sources":["../../../../src/transpiler/dictionary_to_type/createType.ts"],"sourcesContent":["import { existsSync, mkdirSync, writeFileSync } from 'fs';\nimport { createRequire } from 'module';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport {\n NodeType,\n type Dictionary,\n type DictionaryValue,\n type TypedNode,\n} from '@intlayer/core';\nimport { getTypeName } from './createModuleAugmentation';\n\nconst { content } = getConfiguration();\nconst { typesDir } = content;\n\nconst isESModule = typeof import.meta.url === 'string';\nconst requireFunction = isESModule ? createRequire(import.meta.url) : require;\n\nconst getFirstValue = (obj: Record<string, DictionaryValue>): DictionaryValue =>\n Object.values(obj)[0];\n\n/**\n *\n * This function generates a TypeScript type definition from a JSON object\n *\n * Example:\n *\n * const input = {\n * id: '1',\n * name: 'John Doe',\n * address: {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * }\n * };\n *\n * const result = generateTypeScriptType(input, 'RootObject');\n * console.log(result);\n *\n * Output:\n *\n * type RootObject = {\n * id: '1',\n * name: string,\n * address: {\n * id: '2',\n * street: string,\n * city: string,\n * },\n * };\n *\n */\nexport const generateTypeScriptType = (obj: Dictionary): string => {\n let typeDefinition = ``;\n\n const typeName = getTypeName(obj.id);\n\n typeDefinition += `export type ${typeName} = `;\n typeDefinition += generateTypeScriptTypeContent(obj as DictionaryValue);\n typeDefinition += ';\\n\\n';\n\n return typeDefinition;\n};\n\nconst isReactNode = (node: Record<string, unknown>): boolean =>\n typeof node?.key !== 'undefined' && typeof node?.props !== 'undefined';\n\n// eslint-disable-next-line sonarjs/cognitive-complexity\nexport const generateTypeScriptTypeContent = (obj: DictionaryValue): string => {\n if (typeof obj !== 'object' || obj === null) {\n return `${typeof obj}`;\n }\n\n const isReactNodeValue = isReactNode(obj as Record<string, unknown>);\n\n if (isReactNodeValue) {\n // ReactNode handling\n return `JSX.Element`;\n }\n\n if (\n // Check if the value is a typed node\n (obj as TypedNode).nodeType === NodeType.Translation\n ) {\n const { nodeType, ...content } = obj as TypedNode;\n\n const languageValue: DictionaryValue = getFirstValue(\n content as Record<string, DictionaryValue>\n );\n\n const tsType = generateTypeScriptTypeContent(languageValue);\n return `${tsType}`;\n } else if (\n // Check if the value is a typed node\n (obj as TypedNode).nodeType === NodeType.Enumeration\n ) {\n const { nodeType, ...content } = obj as TypedNode;\n\n const quantifiedValue: DictionaryValue = getFirstValue(\n content as Record<string, DictionaryValue>\n );\n\n const tsType = generateTypeScriptTypeContent(quantifiedValue);\n\n return `(quantity: number) => ${tsType}`;\n } else if (Array.isArray(obj)) {\n // Array handling (simplified, assumes non-empty arrays with uniform type)\n const arrayType = generateTypeScriptTypeContent(obj[0] as DictionaryValue);\n\n return `${arrayType}[]`;\n }\n\n let typeDefinition = '{';\n // Nested object, recurse\n for (const [key, value] of Object.entries(obj)) {\n const isLast =\n Object.keys(obj).indexOf(key) === Object.keys(obj).length - 1;\n\n const nestedType = generateTypeScriptTypeContent(value as DictionaryValue);\n\n typeDefinition += `'${key}': ${nestedType}`;\n\n if (!isLast) {\n typeDefinition += ',';\n }\n }\n typeDefinition += '}';\n return typeDefinition;\n};\n\n/**\n * This function generates a TypeScript type definition from a JSON object\n */\nexport const createTypes = (dictionariesPaths: string[]): string[] => {\n const resultTypesPaths: string[] = [];\n\n // Create type folders if they don't exist\n if (!existsSync(typesDir)) {\n mkdirSync(typesDir, { recursive: true });\n }\n\n for (const dictionaryPath of dictionariesPaths) {\n const dictionary: Dictionary = requireFunction(dictionaryPath);\n const dictionaryName: string = dictionary.id;\n const typeDefinition: string = generateTypeScriptType(dictionary);\n\n const outputPath: string = resolve(typesDir, `${dictionaryName}.d.ts`);\n\n writeFileSync(outputPath, typeDefinition);\n\n resultTypesPaths.push(outputPath);\n }\n\n return resultTypesPaths;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAqD;AACrD,oBAA8B;AAC9B,kBAAwB;AACxB,oBAAiC;AACjC,kBAKO;AACP,sCAA4B;AAV5B;AAYA,MAAM,EAAE,QAAQ,QAAI,gCAAiB;AACrC,MAAM,EAAE,SAAS,IAAI;AAErB,MAAM,aAAa,OAAO,YAAY,QAAQ;AAC9C,MAAM,kBAAkB,iBAAa,6BAAc,YAAY,GAAG,IAAI;AAEtE,MAAM,gBAAgB,CAAC,QACrB,OAAO,OAAO,GAAG,EAAE,CAAC;AAkCf,MAAM,yBAAyB,CAAC,QAA4B;AACjE,MAAI,iBAAiB;AAErB,QAAM,eAAW,6CAAY,IAAI,EAAE;AAEnC,oBAAkB,eAAe,QAAQ;AACzC,oBAAkB,8BAA8B,GAAsB;AACtE,oBAAkB;AAElB,SAAO;AACT;AAEA,MAAM,cAAc,CAAC,SACnB,OAAO,MAAM,QAAQ,eAAe,OAAO,MAAM,UAAU;AAGtD,MAAM,gCAAgC,CAAC,QAAiC;AAC7E,MAAI,OAAO,QAAQ,YAAY,QAAQ,MAAM;AAC3C,WAAO,GAAG,OAAO,GAAG;AAAA,EACtB;AAEA,QAAM,mBAAmB,YAAY,GAA8B;AAEnE,MAAI,kBAAkB;AAEpB,WAAO;AAAA,EACT;AAEA;AAAA;AAAA,IAEG,IAAkB,aAAa,qBAAS;AAAA,IACzC;AACA,UAAM,EAAE,UAAU,GAAGA,SAAQ,IAAI;AAEjC,UAAM,gBAAiC;AAAA,MACrCA;AAAA,IACF;AAEA,UAAM,SAAS,8BAA8B,aAAa;AAC1D,WAAO,GAAG,MAAM;AAAA,EAClB;AAAA;AAAA,IAEG,IAAkB,aAAa,qBAAS;AAAA,IACzC;AACA,UAAM,EAAE,UAAU,GAAGA,SAAQ,IAAI;AAEjC,UAAM,kBAAmC;AAAA,MACvCA;AAAA,IACF;AAEA,UAAM,SAAS,8BAA8B,eAAe;AAE5D,WAAO,yBAAyB,MAAM;AAAA,EACxC,WAAW,MAAM,QAAQ,GAAG,GAAG;AAE7B,UAAM,YAAY,8BAA8B,IAAI,CAAC,CAAoB;AAEzE,WAAO,GAAG,SAAS;AAAA,EACrB;AAEA,MAAI,iBAAiB;AAErB,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,GAAG,GAAG;AAC9C,UAAM,SACJ,OAAO,KAAK,GAAG,EAAE,QAAQ,GAAG,MAAM,OAAO,KAAK,GAAG,EAAE,SAAS;AAE9D,UAAM,aAAa,8BAA8B,KAAwB;AAEzE,sBAAkB,IAAI,GAAG,MAAM,UAAU;AAEzC,QAAI,CAAC,QAAQ;AACX,wBAAkB;AAAA,IACpB;AAAA,EACF;AACA,oBAAkB;AAClB,SAAO;AACT;AAKO,MAAM,cAAc,CAAC,sBAA0C;AACpE,QAAM,mBAA6B,CAAC;AAGpC,MAAI,KAAC,sBAAW,QAAQ,GAAG;AACzB,6BAAU,UAAU,EAAE,WAAW,KAAK,CAAC;AAAA,EACzC;AAEA,aAAW,kBAAkB,mBAAmB;AAC9C,UAAM,aAAyB,gBAAgB,cAAc;AAC7D,UAAM,iBAAyB,WAAW;AAC1C,UAAM,iBAAyB,uBAAuB,UAAU;AAEhE,UAAM,iBAAqB,qBAAQ,UAAU,GAAG,cAAc,OAAO;AAErE,iCAAc,YAAY,cAAc;AAExC,qBAAiB,KAAK,UAAU;AAAA,EAClC;AAEA,SAAO;AACT;","names":["content"]}
@@ -1,4 +1,4 @@
1
- import { ContentModule, Content } from '@intlayer/core';
1
+ import { Dictionary, DictionaryValue } from '@intlayer/core';
2
2
 
3
3
  /**
4
4
  *
@@ -32,8 +32,8 @@ import { ContentModule, Content } from '@intlayer/core';
32
32
  * };
33
33
  *
34
34
  */
35
- declare const generateTypeScriptType: (obj: ContentModule) => string;
36
- declare const generateTypeScriptTypeContent: (obj: Content) => string;
35
+ declare const generateTypeScriptType: (obj: Dictionary) => string;
36
+ declare const generateTypeScriptTypeContent: (obj: DictionaryValue) => string;
37
37
  /**
38
38
  * This function generates a TypeScript type definition from a JSON object
39
39
  */
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/i18n_dictionary/buildI18nDictionary.ts"],"sourcesContent":["import { mkdir, writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport type { ContentModule } from '@intlayer/core';\nimport {\n processContentDeclaration,\n extractObjectsWithId,\n} from '../intlayer_dictionary/index';\nimport {\n type I18nDictionariesOutput,\n createI18nDictionaries,\n} from './convertContentDeclarationInto18nDictionaries';\n\nconst { content } = getConfiguration();\nconst { i18nDictionariesDir } = content;\n\ntype DictionariesDeclaration = Record<string, I18nDictionariesOutput>;\n\n/**\n * This function writes the dictionaries to the file system\n */\nconst writeDictionary = async (\n dictionariesDeclaration: DictionariesDeclaration\n) => {\n const resultDictionariesPaths: string[] = [];\n\n for (const [nameSpace, localContent] of Object.entries(\n dictionariesDeclaration\n )) {\n for await (const [locale, content] of Object.entries(localContent)) {\n const contentString = JSON.stringify(content);\n\n const outputFileName = `${nameSpace}.json`;\n const resultDirPath = resolve(i18nDictionariesDir, locale);\n const resultFilePath = resolve(resultDirPath, outputFileName);\n\n // Create the dictionaries folder if it doesn't exist\n await mkdir(resultDirPath, { recursive: true });\n\n // Create the json file\n await writeFile(resultFilePath, contentString, 'utf8').catch((err) => {\n console.error(`Error creating ${outputFileName}:`, err);\n });\n\n resultDictionariesPaths.push(resultFilePath);\n }\n }\n\n return resultDictionariesPaths;\n};\n\n/**\n * This function transpile content declaration to i18n dictionaries\n */\nexport const buildI18nDictionary = async (\n contentDeclarationsPaths: string[] | string\n) => {\n const resultDictionariesPaths: string[] = [];\n\n if (typeof contentDeclarationsPaths === 'string') {\n contentDeclarationsPaths = [contentDeclarationsPaths];\n }\n\n for await (const contentDeclarationPath of contentDeclarationsPaths) {\n const result = await processContentDeclaration(contentDeclarationPath);\n\n if (!result) {\n continue;\n }\n\n const nestedContent: ContentModule[] = extractObjectsWithId(result);\n\n // Create dictionaries for each nested content and format them\n const dictionariesDeclaration: DictionariesDeclaration =\n nestedContent.reduce((acc, content) => {\n const id = content.id;\n const i18Content = createI18nDictionaries(content);\n\n return {\n ...acc,\n [id]: i18Content,\n };\n }, {});\n\n // Write the dictionaries to the file system\n const dictionariesPaths: string[] = await writeDictionary(\n dictionariesDeclaration\n );\n\n // Add the paths to the result\n resultDictionariesPaths.push(...dictionariesPaths);\n }\n\n return resultDictionariesPaths;\n};\n"],"mappings":"AAAA,SAAS,OAAO,iBAAiB;AACjC,SAAS,eAAe;AACxB,SAAS,wBAAwB;AAEjC;AAAA,EACE;AAAA,EACA;AAAA,OACK;AACP;AAAA,EAEE;AAAA,OACK;AAEP,MAAM,EAAE,QAAQ,IAAI,iBAAiB;AACrC,MAAM,EAAE,oBAAoB,IAAI;AAOhC,MAAM,kBAAkB,OACtB,4BACG;AACH,QAAM,0BAAoC,CAAC;AAE3C,aAAW,CAAC,WAAW,YAAY,KAAK,OAAO;AAAA,IAC7C;AAAA,EACF,GAAG;AACD,qBAAiB,CAAC,QAAQA,QAAO,KAAK,OAAO,QAAQ,YAAY,GAAG;AAClE,YAAM,gBAAgB,KAAK,UAAUA,QAAO;AAE5C,YAAM,iBAAiB,GAAG,SAAS;AACnC,YAAM,gBAAgB,QAAQ,qBAAqB,MAAM;AACzD,YAAM,iBAAiB,QAAQ,eAAe,cAAc;AAG5D,YAAM,MAAM,eAAe,EAAE,WAAW,KAAK,CAAC;AAG9C,YAAM,UAAU,gBAAgB,eAAe,MAAM,EAAE,MAAM,CAAC,QAAQ;AACpE,gBAAQ,MAAM,kBAAkB,cAAc,KAAK,GAAG;AAAA,MACxD,CAAC;AAED,8BAAwB,KAAK,cAAc;AAAA,IAC7C;AAAA,EACF;AAEA,SAAO;AACT;AAKO,MAAM,sBAAsB,OACjC,6BACG;AACH,QAAM,0BAAoC,CAAC;AAE3C,MAAI,OAAO,6BAA6B,UAAU;AAChD,+BAA2B,CAAC,wBAAwB;AAAA,EACtD;AAEA,mBAAiB,0BAA0B,0BAA0B;AACnE,UAAM,SAAS,MAAM,0BAA0B,sBAAsB;AAErE,QAAI,CAAC,QAAQ;AACX;AAAA,IACF;AAEA,UAAM,gBAAiC,qBAAqB,MAAM;AAGlE,UAAM,0BACJ,cAAc,OAAO,CAAC,KAAKA,aAAY;AACrC,YAAM,KAAKA,SAAQ;AACnB,YAAM,aAAa,uBAAuBA,QAAO;AAEjD,aAAO;AAAA,QACL,GAAG;AAAA,QACH,CAAC,EAAE,GAAG;AAAA,MACR;AAAA,IACF,GAAG,CAAC,CAAC;AAGP,UAAM,oBAA8B,MAAM;AAAA,MACxC;AAAA,IACF;AAGA,4BAAwB,KAAK,GAAG,iBAAiB;AAAA,EACnD;AAEA,SAAO;AACT;","names":["content"]}
1
+ {"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/i18n_dictionary/buildI18nDictionary.ts"],"sourcesContent":["import { mkdir, writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport type { DeclarationContent } from '@intlayer/core';\nimport {\n processContentDeclaration,\n extractObjectsWithId,\n} from '../intlayer_dictionary/index';\nimport {\n type I18nDictionariesOutput,\n createI18nDictionaries,\n} from './convertContentDeclarationInto18nDictionaries';\n\nconst { content } = getConfiguration();\nconst { i18nDictionariesDir } = content;\n\ntype DictionariesDeclaration = Record<string, I18nDictionariesOutput>;\n\n/**\n * This function writes the dictionaries to the file system\n */\nconst writeDictionary = async (\n dictionariesDeclaration: DictionariesDeclaration\n) => {\n const resultDictionariesPaths: string[] = [];\n\n for (const [nameSpace, localContent] of Object.entries(\n dictionariesDeclaration\n )) {\n for await (const [locale, content] of Object.entries(localContent)) {\n const contentString = JSON.stringify(content);\n\n const outputFileName = `${nameSpace}.json`;\n const resultDirPath = resolve(i18nDictionariesDir, locale);\n const resultFilePath = resolve(resultDirPath, outputFileName);\n\n // Create the dictionaries folder if it doesn't exist\n await mkdir(resultDirPath, { recursive: true });\n\n // Create the json file\n await writeFile(resultFilePath, contentString, 'utf8').catch((err) => {\n console.error(`Error creating ${outputFileName}:`, err);\n });\n\n resultDictionariesPaths.push(resultFilePath);\n }\n }\n\n return resultDictionariesPaths;\n};\n\n/**\n * This function transpile content declaration to i18n dictionaries\n */\nexport const buildI18nDictionary = async (\n contentDeclarationsPaths: string[] | string\n) => {\n const resultDictionariesPaths: string[] = [];\n\n if (typeof contentDeclarationsPaths === 'string') {\n contentDeclarationsPaths = [contentDeclarationsPaths];\n }\n\n for await (const contentDeclarationPath of contentDeclarationsPaths) {\n const result = await processContentDeclaration(contentDeclarationPath);\n\n if (!result) {\n continue;\n }\n\n const nestedContent: DeclarationContent[] = extractObjectsWithId(result);\n\n // Create dictionaries for each nested content and format them\n const dictionariesDeclaration: DictionariesDeclaration =\n nestedContent.reduce((acc, content) => {\n const id: string = content.id;\n const i18Content = createI18nDictionaries(content);\n\n return {\n ...acc,\n [id]: i18Content,\n };\n }, {});\n\n // Write the dictionaries to the file system\n const dictionariesPaths: string[] = await writeDictionary(\n dictionariesDeclaration\n );\n\n // Add the paths to the result\n resultDictionariesPaths.push(...dictionariesPaths);\n }\n\n return resultDictionariesPaths;\n};\n"],"mappings":"AAAA,SAAS,OAAO,iBAAiB;AACjC,SAAS,eAAe;AACxB,SAAS,wBAAwB;AAEjC;AAAA,EACE;AAAA,EACA;AAAA,OACK;AACP;AAAA,EAEE;AAAA,OACK;AAEP,MAAM,EAAE,QAAQ,IAAI,iBAAiB;AACrC,MAAM,EAAE,oBAAoB,IAAI;AAOhC,MAAM,kBAAkB,OACtB,4BACG;AACH,QAAM,0BAAoC,CAAC;AAE3C,aAAW,CAAC,WAAW,YAAY,KAAK,OAAO;AAAA,IAC7C;AAAA,EACF,GAAG;AACD,qBAAiB,CAAC,QAAQA,QAAO,KAAK,OAAO,QAAQ,YAAY,GAAG;AAClE,YAAM,gBAAgB,KAAK,UAAUA,QAAO;AAE5C,YAAM,iBAAiB,GAAG,SAAS;AACnC,YAAM,gBAAgB,QAAQ,qBAAqB,MAAM;AACzD,YAAM,iBAAiB,QAAQ,eAAe,cAAc;AAG5D,YAAM,MAAM,eAAe,EAAE,WAAW,KAAK,CAAC;AAG9C,YAAM,UAAU,gBAAgB,eAAe,MAAM,EAAE,MAAM,CAAC,QAAQ;AACpE,gBAAQ,MAAM,kBAAkB,cAAc,KAAK,GAAG;AAAA,MACxD,CAAC;AAED,8BAAwB,KAAK,cAAc;AAAA,IAC7C;AAAA,EACF;AAEA,SAAO;AACT;AAKO,MAAM,sBAAsB,OACjC,6BACG;AACH,QAAM,0BAAoC,CAAC;AAE3C,MAAI,OAAO,6BAA6B,UAAU;AAChD,+BAA2B,CAAC,wBAAwB;AAAA,EACtD;AAEA,mBAAiB,0BAA0B,0BAA0B;AACnE,UAAM,SAAS,MAAM,0BAA0B,sBAAsB;AAErE,QAAI,CAAC,QAAQ;AACX;AAAA,IACF;AAEA,UAAM,gBAAsC,qBAAqB,MAAM;AAGvE,UAAM,0BACJ,cAAc,OAAO,CAAC,KAAKA,aAAY;AACrC,YAAM,KAAaA,SAAQ;AAC3B,YAAM,aAAa,uBAAuBA,QAAO;AAEjD,aAAO;AAAA,QACL,GAAG;AAAA,QACH,CAAC,EAAE,GAAG;AAAA,MACR;AAAA,IACF,GAAG,CAAC,CAAC;AAGP,UAAM,oBAA8B,MAAM;AAAA,MACxC;AAAA,IACF;AAGA,4BAAwB,KAAK,GAAG,iBAAiB;AAAA,EACnD;AAEA,SAAO;AACT;","names":["content"]}
@@ -7,9 +7,9 @@ const { content } = getConfiguration();
7
7
  const { dictionariesDir } = content;
8
8
  const writeDictionary = async (dictionaries) => {
9
9
  const resultDictionariesPaths = [];
10
- for await (const content2 of dictionaries) {
11
- const contentString = JSON.stringify(content2);
12
- const id = content2.id;
10
+ for await (const dictionaryContent of dictionaries) {
11
+ const contentString = JSON.stringify(dictionaryContent);
12
+ const id = dictionaryContent.id;
13
13
  const outputFileName = `${id}.json`;
14
14
  const resultFilePath = resolve(dictionariesDir, outputFileName);
15
15
  await writeFile(resultFilePath, contentString, "utf8").catch((err) => {
@@ -31,7 +31,13 @@ const buildIntlayerDictionary = async (contentDeclarationsPaths) => {
31
31
  continue;
32
32
  }
33
33
  const nestedContent = extractObjectsWithId(result);
34
- const dictionariesPaths = await writeDictionary(nestedContent);
34
+ const contentWithFilePath = nestedContent.map(
35
+ (content2) => ({
36
+ ...content2,
37
+ filePath: contentDeclarationPath
38
+ })
39
+ );
40
+ const dictionariesPaths = await writeDictionary(contentWithFilePath);
35
41
  resultDictionariesPaths.push(...dictionariesPaths);
36
42
  }
37
43
  return resultDictionariesPaths;
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.ts"],"sourcesContent":["import { mkdir, writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport type { ContentModule } from '@intlayer/core';\nimport { extractObjectsWithId } from './extractNestedJSON';\nimport { processContentDeclaration } from './processContentDeclaration';\n\nconst { content } = getConfiguration();\nconst { dictionariesDir } = content;\n\nconst writeDictionary = async (dictionaries: ContentModule[]) => {\n const resultDictionariesPaths: string[] = [];\n\n for await (const content of dictionaries) {\n const contentString = JSON.stringify(content);\n\n const id = content.id;\n const outputFileName = `${id}.json`;\n const resultFilePath = resolve(dictionariesDir, outputFileName);\n\n // Create the json file\n await writeFile(resultFilePath, contentString, 'utf8').catch((err) => {\n console.error(`Error creating ${outputFileName}:`, err);\n });\n\n resultDictionariesPaths.push(resultFilePath);\n }\n\n return resultDictionariesPaths;\n};\n\n/**\n * This function transpile the bundled code to to make dictionaries as JSON files\n */\nexport const buildIntlayerDictionary = async (\n contentDeclarationsPaths: string[] | string\n) => {\n const resultDictionariesPaths: string[] = [];\n\n if (typeof contentDeclarationsPaths === 'string') {\n contentDeclarationsPaths = [contentDeclarationsPaths];\n }\n\n // Create the dictionaries folder if it doesn't exist\n await mkdir(resolve(dictionariesDir), { recursive: true });\n\n for await (const contentDeclarationPath of contentDeclarationsPaths) {\n const result = await processContentDeclaration(contentDeclarationPath);\n\n if (!result) {\n continue;\n }\n\n const nestedContent: ContentModule[] = extractObjectsWithId(result);\n\n const dictionariesPaths: string[] = await writeDictionary(nestedContent);\n\n resultDictionariesPaths.push(...dictionariesPaths);\n }\n\n return resultDictionariesPaths;\n};\n"],"mappings":"AAAA,SAAS,OAAO,iBAAiB;AACjC,SAAS,eAAe;AACxB,SAAS,wBAAwB;AAEjC,SAAS,4BAA4B;AACrC,SAAS,iCAAiC;AAE1C,MAAM,EAAE,QAAQ,IAAI,iBAAiB;AACrC,MAAM,EAAE,gBAAgB,IAAI;AAE5B,MAAM,kBAAkB,OAAO,iBAAkC;AAC/D,QAAM,0BAAoC,CAAC;AAE3C,mBAAiBA,YAAW,cAAc;AACxC,UAAM,gBAAgB,KAAK,UAAUA,QAAO;AAE5C,UAAM,KAAKA,SAAQ;AACnB,UAAM,iBAAiB,GAAG,EAAE;AAC5B,UAAM,iBAAiB,QAAQ,iBAAiB,cAAc;AAG9D,UAAM,UAAU,gBAAgB,eAAe,MAAM,EAAE,MAAM,CAAC,QAAQ;AACpE,cAAQ,MAAM,kBAAkB,cAAc,KAAK,GAAG;AAAA,IACxD,CAAC;AAED,4BAAwB,KAAK,cAAc;AAAA,EAC7C;AAEA,SAAO;AACT;AAKO,MAAM,0BAA0B,OACrC,6BACG;AACH,QAAM,0BAAoC,CAAC;AAE3C,MAAI,OAAO,6BAA6B,UAAU;AAChD,+BAA2B,CAAC,wBAAwB;AAAA,EACtD;AAGA,QAAM,MAAM,QAAQ,eAAe,GAAG,EAAE,WAAW,KAAK,CAAC;AAEzD,mBAAiB,0BAA0B,0BAA0B;AACnE,UAAM,SAAS,MAAM,0BAA0B,sBAAsB;AAErE,QAAI,CAAC,QAAQ;AACX;AAAA,IACF;AAEA,UAAM,gBAAiC,qBAAqB,MAAM;AAElE,UAAM,oBAA8B,MAAM,gBAAgB,aAAa;AAEvE,4BAAwB,KAAK,GAAG,iBAAiB;AAAA,EACnD;AAEA,SAAO;AACT;","names":["content"]}
1
+ {"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.ts"],"sourcesContent":["import { mkdir, writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport type { DeclarationContent } from '@intlayer/core';\nimport { extractObjectsWithId } from './extractNestedJSON';\nimport { processContentDeclaration } from './processContentDeclaration';\n\nconst { content } = getConfiguration();\nconst { dictionariesDir } = content;\n\nconst writeDictionary = async (dictionaries: DeclarationContent[]) => {\n const resultDictionariesPaths: string[] = [];\n\n for await (const dictionaryContent of dictionaries) {\n const contentString = JSON.stringify(dictionaryContent);\n\n const id = dictionaryContent.id;\n const outputFileName = `${id}.json`;\n const resultFilePath = resolve(dictionariesDir, outputFileName);\n\n // Create the json file\n await writeFile(resultFilePath, contentString, 'utf8').catch((err) => {\n console.error(`Error creating ${outputFileName}:`, err);\n });\n\n resultDictionariesPaths.push(resultFilePath);\n }\n\n return resultDictionariesPaths;\n};\n\n/**\n * This function transpile the bundled code to to make dictionaries as JSON files\n */\nexport const buildIntlayerDictionary = async (\n contentDeclarationsPaths: string[] | string\n) => {\n const resultDictionariesPaths: string[] = [];\n\n if (typeof contentDeclarationsPaths === 'string') {\n contentDeclarationsPaths = [contentDeclarationsPaths];\n }\n\n // Create the dictionaries folder if it doesn't exist\n await mkdir(resolve(dictionariesDir), { recursive: true });\n\n for await (const contentDeclarationPath of contentDeclarationsPaths) {\n const result = await processContentDeclaration(contentDeclarationPath);\n\n if (!result) {\n continue;\n }\n\n const nestedContent: DeclarationContent[] = extractObjectsWithId(result);\n\n const contentWithFilePath: DeclarationContent[] = nestedContent.map(\n (content) => ({\n ...content,\n filePath: contentDeclarationPath,\n })\n );\n\n const dictionariesPaths: string[] =\n await writeDictionary(contentWithFilePath);\n\n resultDictionariesPaths.push(...dictionariesPaths);\n }\n\n return resultDictionariesPaths;\n};\n"],"mappings":"AAAA,SAAS,OAAO,iBAAiB;AACjC,SAAS,eAAe;AACxB,SAAS,wBAAwB;AAEjC,SAAS,4BAA4B;AACrC,SAAS,iCAAiC;AAE1C,MAAM,EAAE,QAAQ,IAAI,iBAAiB;AACrC,MAAM,EAAE,gBAAgB,IAAI;AAE5B,MAAM,kBAAkB,OAAO,iBAAuC;AACpE,QAAM,0BAAoC,CAAC;AAE3C,mBAAiB,qBAAqB,cAAc;AAClD,UAAM,gBAAgB,KAAK,UAAU,iBAAiB;AAEtD,UAAM,KAAK,kBAAkB;AAC7B,UAAM,iBAAiB,GAAG,EAAE;AAC5B,UAAM,iBAAiB,QAAQ,iBAAiB,cAAc;AAG9D,UAAM,UAAU,gBAAgB,eAAe,MAAM,EAAE,MAAM,CAAC,QAAQ;AACpE,cAAQ,MAAM,kBAAkB,cAAc,KAAK,GAAG;AAAA,IACxD,CAAC;AAED,4BAAwB,KAAK,cAAc;AAAA,EAC7C;AAEA,SAAO;AACT;AAKO,MAAM,0BAA0B,OACrC,6BACG;AACH,QAAM,0BAAoC,CAAC;AAE3C,MAAI,OAAO,6BAA6B,UAAU;AAChD,+BAA2B,CAAC,wBAAwB;AAAA,EACtD;AAGA,QAAM,MAAM,QAAQ,eAAe,GAAG,EAAE,WAAW,KAAK,CAAC;AAEzD,mBAAiB,0BAA0B,0BAA0B;AACnE,UAAM,SAAS,MAAM,0BAA0B,sBAAsB;AAErE,QAAI,CAAC,QAAQ;AACX;AAAA,IACF;AAEA,UAAM,gBAAsC,qBAAqB,MAAM;AAEvE,UAAM,sBAA4C,cAAc;AAAA,MAC9D,CAACA,cAAa;AAAA,QACZ,GAAGA;AAAA,QACH,UAAU;AAAA,MACZ;AAAA,IACF;AAEA,UAAM,oBACJ,MAAM,gBAAgB,mBAAmB;AAE3C,4BAAwB,KAAK,GAAG,iBAAiB;AAAA,EACnD;AAEA,SAAO;AACT;","names":["content"]}
@@ -1,4 +1,4 @@
1
- import { ContentModule } from '@intlayer/core';
1
+ import { DeclarationContent } from '@intlayer/core';
2
2
 
3
3
  /**
4
4
  *
@@ -39,6 +39,6 @@ import { ContentModule } from '@intlayer/core';
39
39
  * }]
40
40
  *
41
41
  */
42
- declare const extractObjectsWithId: (input: ContentModule) => ContentModule[];
42
+ declare const extractObjectsWithId: (input: DeclarationContent) => DeclarationContent[];
43
43
 
44
44
  export { extractObjectsWithId };
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.ts"],"sourcesContent":["import type { Content, ContentModule } from '@intlayer/core';\n\n/**\n *\n * This function extracts all nested objects with an 'id' field from the input object and returns them as an array\n *\n * Example:\n *\n * const input = {\n * id: '1',\n * name: 'John Doe',\n * address: {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * state: 'IL'\n * },\n * };\n * const result = extractObjectsWithId(input);\n * console.log(result);\n *\n * Output:\n *\n * [{\n * id: '1',\n * name: 'John Doe',\n * address: {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * state: 'IL'\n * }\n * },\n * {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * state: 'IL'\n * }]\n *\n */\nexport const extractObjectsWithId = (input: ContentModule): ContentModule[] => {\n // Function to recursively search and extract nested objects with an 'id'\n const search = (obj: Content, results: ContentModule[]): void => {\n if (obj && typeof obj === 'object') {\n if (Object.hasOwn(obj, 'id')) {\n results.push(obj as ContentModule);\n }\n for (const key of Object.keys(obj)) {\n if (typeof obj[key] === 'object') {\n search(obj[key] as Content, results);\n }\n }\n }\n };\n\n const results: ContentModule[] = [];\n search(input, results);\n return results;\n};\n"],"mappings":"AAyCO,MAAM,uBAAuB,CAAC,UAA0C;AAE7E,QAAM,SAAS,CAAC,KAAcA,aAAmC;AAC/D,QAAI,OAAO,OAAO,QAAQ,UAAU;AAClC,UAAI,OAAO,OAAO,KAAK,IAAI,GAAG;AAC5B,QAAAA,SAAQ,KAAK,GAAoB;AAAA,MACnC;AACA,iBAAW,OAAO,OAAO,KAAK,GAAG,GAAG;AAClC,YAAI,OAAO,IAAI,GAAG,MAAM,UAAU;AAChC,iBAAO,IAAI,GAAG,GAAcA,QAAO;AAAA,QACrC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,UAA2B,CAAC;AAClC,SAAO,OAAO,OAAO;AACrB,SAAO;AACT;","names":["results"]}
1
+ {"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.ts"],"sourcesContent":["import type { Content, DeclarationContent } from '@intlayer/core';\n\n/**\n *\n * This function extracts all nested objects with an 'id' field from the input object and returns them as an array\n *\n * Example:\n *\n * const input = {\n * id: '1',\n * name: 'John Doe',\n * address: {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * state: 'IL'\n * },\n * };\n * const result = extractObjectsWithId(input);\n * console.log(result);\n *\n * Output:\n *\n * [{\n * id: '1',\n * name: 'John Doe',\n * address: {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * state: 'IL'\n * }\n * },\n * {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * state: 'IL'\n * }]\n *\n */\nexport const extractObjectsWithId = (\n input: DeclarationContent\n): DeclarationContent[] => {\n // Function to recursively search and extract nested objects with an 'id'\n const search = (obj: Content, results: DeclarationContent[]): void => {\n if (obj && typeof obj === 'object') {\n if (Object.hasOwn(obj, 'id')) {\n results.push(obj as DeclarationContent);\n }\n for (const key of Object.keys(obj)) {\n if (typeof obj[key] === 'object') {\n search(obj[key] as Content, results);\n }\n }\n }\n };\n\n const results: DeclarationContent[] = [];\n search(input, results);\n return results;\n};\n"],"mappings":"AAyCO,MAAM,uBAAuB,CAClC,UACyB;AAEzB,QAAM,SAAS,CAAC,KAAcA,aAAwC;AACpE,QAAI,OAAO,OAAO,QAAQ,UAAU;AAClC,UAAI,OAAO,OAAO,KAAK,IAAI,GAAG;AAC5B,QAAAA,SAAQ,KAAK,GAAyB;AAAA,MACxC;AACA,iBAAW,OAAO,OAAO,KAAK,GAAG,GAAG;AAClC,YAAI,OAAO,IAAI,GAAG,MAAM,UAAU;AAChC,iBAAO,IAAI,GAAG,GAAcA,QAAO;AAAA,QACrC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,UAAgC,CAAC;AACvC,SAAO,OAAO,OAAO;AACrB,SAAO;AACT;","names":["results"]}
@@ -1,10 +1,10 @@
1
- import { ContentModule } from '@intlayer/core';
1
+ import { DeclarationContent } from '@intlayer/core';
2
2
 
3
3
  /**
4
4
  * Load the content declaration from the given path
5
5
  *
6
6
  * Accepts JSON, JS, MJS and TS files as configuration
7
7
  */
8
- declare const loadContentDeclaration: (contentDeclarationFilePath: string) => ContentModule | undefined;
8
+ declare const loadContentDeclaration: (contentDeclarationFilePath: string) => DeclarationContent | undefined;
9
9
 
10
10
  export { loadContentDeclaration };
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-var-requires */\nimport { createRequire } from 'module';\nimport { type Context, runInNewContext } from 'vm';\nimport type { ContentModule } from '@intlayer/core';\nimport { type BuildOptions, buildSync, type BuildResult } from 'esbuild';\n\nconst isESModule = typeof import.meta.url === 'string';\n\nconst sandboxContext: Context = {\n exports: {\n default: {},\n },\n module: {\n exports: {},\n },\n console,\n require: isESModule ? createRequire(import.meta.url) : require,\n};\n\nconst transformationOption: BuildOptions = {\n loader: {\n '.js': 'js',\n '.jsx': 'jsx',\n '.mjs': 'js',\n '.ts': 'ts',\n '.tsx': 'tsx',\n '.cjs': 'js',\n '.json': 'json',\n },\n format: 'cjs', // Output format as commonjs\n target: 'es2017',\n packages: 'external',\n write: false,\n bundle: true,\n};\n\nconst filterValidContentDeclaration = (\n contentDeclaration: ContentModule\n): ContentModule => {\n // @TODO Implement filtering of valid content declaration\n return contentDeclaration;\n};\n\n/**\n * Load the content declaration from the given path\n *\n * Accepts JSON, JS, MJS and TS files as configuration\n */\nexport const loadContentDeclaration = (\n contentDeclarationFilePath: string\n): ContentModule | undefined => {\n let contentDeclaration: ContentModule | undefined = undefined;\n\n const fileExtension = contentDeclarationFilePath.split('.').pop() ?? '';\n\n try {\n if (fileExtension === 'json') {\n // Assume JSON\n return require(contentDeclarationFilePath);\n }\n\n // Rest is JS, MJS or TS\n\n const moduleResult: BuildResult = buildSync({\n entryPoints: [contentDeclarationFilePath],\n\n ...transformationOption,\n });\n\n const moduleResultString = moduleResult.outputFiles?.[0].text;\n\n if (!moduleResultString) {\n console.error('Configuration file could not be loaded.');\n return undefined;\n }\n\n runInNewContext(moduleResultString, sandboxContext);\n\n if (\n sandboxContext.exports.default &&\n Object.keys(sandboxContext.exports.default).length > 0\n ) {\n // ES Module\n contentDeclaration = sandboxContext.exports.default;\n } else if (\n sandboxContext.module.exports.defaults &&\n Object.keys(sandboxContext.module.exports.defaults).length > 0\n ) {\n // CommonJS\n contentDeclaration = sandboxContext.module.exports.default;\n } else if (\n sandboxContext.module.exports.default &&\n Object.keys(sandboxContext.module.exports.default).length > 0\n ) {\n // ES Module\n contentDeclaration = sandboxContext.module.exports.default;\n } else if (\n sandboxContext.module.exports &&\n Object.keys(sandboxContext.module.exports).length > 0\n ) {\n // Other\n contentDeclaration = sandboxContext.module.exports;\n }\n\n if (typeof contentDeclaration === 'undefined') {\n console.error('Configuration file could not be loaded.');\n return undefined;\n }\n\n return filterValidContentDeclaration(contentDeclaration);\n } catch (error) {\n console.error('Error:', error);\n }\n};\n"],"mappings":"AACA,SAAS,qBAAqB;AAC9B,SAAuB,uBAAuB;AAE9C,SAA4B,iBAAmC;AAE/D,MAAM,aAAa,OAAO,YAAY,QAAQ;AAE9C,MAAM,iBAA0B;AAAA,EAC9B,SAAS;AAAA,IACP,SAAS,CAAC;AAAA,EACZ;AAAA,EACA,QAAQ;AAAA,IACN,SAAS,CAAC;AAAA,EACZ;AAAA,EACA;AAAA,EACA,SAAS,aAAa,cAAc,YAAY,GAAG,IAAI;AACzD;AAEA,MAAM,uBAAqC;AAAA,EACzC,QAAQ;AAAA,IACN,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,EACX;AAAA,EACA,QAAQ;AAAA;AAAA,EACR,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,OAAO;AAAA,EACP,QAAQ;AACV;AAEA,MAAM,gCAAgC,CACpC,uBACkB;AAElB,SAAO;AACT;AAOO,MAAM,yBAAyB,CACpC,+BAC8B;AAC9B,MAAI,qBAAgD;AAEpD,QAAM,gBAAgB,2BAA2B,MAAM,GAAG,EAAE,IAAI,KAAK;AAErE,MAAI;AACF,QAAI,kBAAkB,QAAQ;AAE5B,aAAO,QAAQ,0BAA0B;AAAA,IAC3C;AAIA,UAAM,eAA4B,UAAU;AAAA,MAC1C,aAAa,CAAC,0BAA0B;AAAA,MAExC,GAAG;AAAA,IACL,CAAC;AAED,UAAM,qBAAqB,aAAa,cAAc,CAAC,EAAE;AAEzD,QAAI,CAAC,oBAAoB;AACvB,cAAQ,MAAM,yCAAyC;AACvD,aAAO;AAAA,IACT;AAEA,oBAAgB,oBAAoB,cAAc;AAElD,QACE,eAAe,QAAQ,WACvB,OAAO,KAAK,eAAe,QAAQ,OAAO,EAAE,SAAS,GACrD;AAEA,2BAAqB,eAAe,QAAQ;AAAA,IAC9C,WACE,eAAe,OAAO,QAAQ,YAC9B,OAAO,KAAK,eAAe,OAAO,QAAQ,QAAQ,EAAE,SAAS,GAC7D;AAEA,2BAAqB,eAAe,OAAO,QAAQ;AAAA,IACrD,WACE,eAAe,OAAO,QAAQ,WAC9B,OAAO,KAAK,eAAe,OAAO,QAAQ,OAAO,EAAE,SAAS,GAC5D;AAEA,2BAAqB,eAAe,OAAO,QAAQ;AAAA,IACrD,WACE,eAAe,OAAO,WACtB,OAAO,KAAK,eAAe,OAAO,OAAO,EAAE,SAAS,GACpD;AAEA,2BAAqB,eAAe,OAAO;AAAA,IAC7C;AAEA,QAAI,OAAO,uBAAuB,aAAa;AAC7C,cAAQ,MAAM,yCAAyC;AACvD,aAAO;AAAA,IACT;AAEA,WAAO,8BAA8B,kBAAkB;AAAA,EACzD,SAAS,OAAO;AACd,YAAQ,MAAM,UAAU,KAAK;AAAA,EAC/B;AACF;","names":[]}
1
+ {"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-var-requires */\nimport { createRequire } from 'module';\nimport { type Context, runInNewContext } from 'vm';\nimport type { DeclarationContent } from '@intlayer/core';\nimport { type BuildOptions, buildSync, type BuildResult } from 'esbuild';\n\nconst isESModule = typeof import.meta.url === 'string';\n\nconst sandboxContext: Context = {\n exports: {\n default: {},\n },\n module: {\n exports: {},\n },\n console,\n require: isESModule ? createRequire(import.meta.url) : require,\n};\n\nconst transformationOption: BuildOptions = {\n loader: {\n '.js': 'js',\n '.jsx': 'jsx',\n '.mjs': 'js',\n '.ts': 'ts',\n '.tsx': 'tsx',\n '.cjs': 'js',\n '.json': 'json',\n },\n format: 'cjs', // Output format as commonjs\n target: 'es2017',\n packages: 'external',\n write: false,\n bundle: true,\n};\n\nconst filterValidContentDeclaration = (\n contentDeclaration: DeclarationContent\n): DeclarationContent => {\n // @TODO Implement filtering of valid content declaration\n return contentDeclaration;\n};\n\n/**\n * Load the content declaration from the given path\n *\n * Accepts JSON, JS, MJS and TS files as configuration\n */\nexport const loadContentDeclaration = (\n contentDeclarationFilePath: string\n): DeclarationContent | undefined => {\n let contentDeclaration: DeclarationContent | undefined = undefined;\n\n const fileExtension = contentDeclarationFilePath.split('.').pop() ?? '';\n\n try {\n if (fileExtension === 'json') {\n // Assume JSON\n return require(contentDeclarationFilePath);\n }\n\n // Rest is JS, MJS or TS\n\n const moduleResult: BuildResult = buildSync({\n entryPoints: [contentDeclarationFilePath],\n\n ...transformationOption,\n });\n\n const moduleResultString = moduleResult.outputFiles?.[0].text;\n\n if (!moduleResultString) {\n console.error('Configuration file could not be loaded.');\n return undefined;\n }\n\n runInNewContext(moduleResultString, sandboxContext);\n\n if (\n sandboxContext.exports.default &&\n Object.keys(sandboxContext.exports.default).length > 0\n ) {\n // ES Module\n contentDeclaration = sandboxContext.exports.default;\n } else if (\n sandboxContext.module.exports.defaults &&\n Object.keys(sandboxContext.module.exports.defaults).length > 0\n ) {\n // CommonJS\n contentDeclaration = sandboxContext.module.exports.default;\n } else if (\n sandboxContext.module.exports.default &&\n Object.keys(sandboxContext.module.exports.default).length > 0\n ) {\n // ES Module\n contentDeclaration = sandboxContext.module.exports.default;\n } else if (\n sandboxContext.module.exports &&\n Object.keys(sandboxContext.module.exports).length > 0\n ) {\n // Other\n contentDeclaration = sandboxContext.module.exports;\n }\n\n if (typeof contentDeclaration === 'undefined') {\n console.error('Configuration file could not be loaded.');\n return undefined;\n }\n\n return filterValidContentDeclaration(contentDeclaration);\n } catch (error) {\n console.error('Error:', error);\n }\n};\n"],"mappings":"AACA,SAAS,qBAAqB;AAC9B,SAAuB,uBAAuB;AAE9C,SAA4B,iBAAmC;AAE/D,MAAM,aAAa,OAAO,YAAY,QAAQ;AAE9C,MAAM,iBAA0B;AAAA,EAC9B,SAAS;AAAA,IACP,SAAS,CAAC;AAAA,EACZ;AAAA,EACA,QAAQ;AAAA,IACN,SAAS,CAAC;AAAA,EACZ;AAAA,EACA;AAAA,EACA,SAAS,aAAa,cAAc,YAAY,GAAG,IAAI;AACzD;AAEA,MAAM,uBAAqC;AAAA,EACzC,QAAQ;AAAA,IACN,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,EACX;AAAA,EACA,QAAQ;AAAA;AAAA,EACR,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,OAAO;AAAA,EACP,QAAQ;AACV;AAEA,MAAM,gCAAgC,CACpC,uBACuB;AAEvB,SAAO;AACT;AAOO,MAAM,yBAAyB,CACpC,+BACmC;AACnC,MAAI,qBAAqD;AAEzD,QAAM,gBAAgB,2BAA2B,MAAM,GAAG,EAAE,IAAI,KAAK;AAErE,MAAI;AACF,QAAI,kBAAkB,QAAQ;AAE5B,aAAO,QAAQ,0BAA0B;AAAA,IAC3C;AAIA,UAAM,eAA4B,UAAU;AAAA,MAC1C,aAAa,CAAC,0BAA0B;AAAA,MAExC,GAAG;AAAA,IACL,CAAC;AAED,UAAM,qBAAqB,aAAa,cAAc,CAAC,EAAE;AAEzD,QAAI,CAAC,oBAAoB;AACvB,cAAQ,MAAM,yCAAyC;AACvD,aAAO;AAAA,IACT;AAEA,oBAAgB,oBAAoB,cAAc;AAElD,QACE,eAAe,QAAQ,WACvB,OAAO,KAAK,eAAe,QAAQ,OAAO,EAAE,SAAS,GACrD;AAEA,2BAAqB,eAAe,QAAQ;AAAA,IAC9C,WACE,eAAe,OAAO,QAAQ,YAC9B,OAAO,KAAK,eAAe,OAAO,QAAQ,QAAQ,EAAE,SAAS,GAC7D;AAEA,2BAAqB,eAAe,OAAO,QAAQ;AAAA,IACrD,WACE,eAAe,OAAO,QAAQ,WAC9B,OAAO,KAAK,eAAe,OAAO,QAAQ,OAAO,EAAE,SAAS,GAC5D;AAEA,2BAAqB,eAAe,OAAO,QAAQ;AAAA,IACrD,WACE,eAAe,OAAO,WACtB,OAAO,KAAK,eAAe,OAAO,OAAO,EAAE,SAAS,GACpD;AAEA,2BAAqB,eAAe,OAAO;AAAA,IAC7C;AAEA,QAAI,OAAO,uBAAuB,aAAa;AAC7C,cAAQ,MAAM,yCAAyC;AACvD,aAAO;AAAA,IACT;AAEA,WAAO,8BAA8B,kBAAkB;AAAA,EACzD,SAAS,OAAO;AACd,YAAQ,MAAM,UAAU,KAAK;AAAA,EAC/B;AACF;","names":[]}