@intlayer/chokidar 5.5.11 → 5.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. package/README.md +1 -1
  2. package/dist/cjs/fetchDistantDictionaries.cjs +1 -1
  3. package/dist/cjs/fetchDistantDictionaries.cjs.map +1 -1
  4. package/dist/cjs/fetchDistantDictionaryKeys.cjs +1 -1
  5. package/dist/cjs/fetchDistantDictionaryKeys.cjs.map +1 -1
  6. package/dist/cjs/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.cjs +2 -2
  7. package/dist/cjs/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.cjs.map +1 -1
  8. package/dist/cjs/index.cjs +16 -2
  9. package/dist/cjs/index.cjs.map +1 -1
  10. package/dist/cjs/listGitFiles.cjs +69 -2
  11. package/dist/cjs/listGitFiles.cjs.map +1 -1
  12. package/dist/cjs/loadDictionaries/loadDictionaries.cjs +18 -5
  13. package/dist/cjs/loadDictionaries/loadDictionaries.cjs.map +1 -1
  14. package/dist/cjs/log.cjs +3 -3
  15. package/dist/cjs/log.cjs.map +1 -1
  16. package/dist/cjs/prepareIntlayer.cjs +4 -2
  17. package/dist/cjs/prepareIntlayer.cjs.map +1 -1
  18. package/dist/cjs/transpiler/declaration_file_to_dictionary/i18next_dictionary/buildI18nextDictionary.cjs +1 -1
  19. package/dist/cjs/transpiler/declaration_file_to_dictionary/i18next_dictionary/buildI18nextDictionary.cjs.map +1 -1
  20. package/dist/cjs/transpiler/dictionary_to_main/generateDictionaryListContent.cjs +2 -2
  21. package/dist/cjs/transpiler/dictionary_to_main/generateDictionaryListContent.cjs.map +1 -1
  22. package/dist/cjs/transpiler/dictionary_to_type/createModuleAugmentation.cjs +4 -3
  23. package/dist/cjs/transpiler/dictionary_to_type/createModuleAugmentation.cjs.map +1 -1
  24. package/dist/cjs/{utils.cjs → utils/getFileHash.cjs} +6 -22
  25. package/dist/cjs/utils/getFileHash.cjs.map +1 -0
  26. package/dist/cjs/utils/kebabCaseToCamelCase.cjs +39 -0
  27. package/dist/cjs/utils/kebabCaseToCamelCase.cjs.map +1 -0
  28. package/dist/cjs/utils/runOnce.cjs +58 -0
  29. package/dist/cjs/utils/runOnce.cjs.map +1 -0
  30. package/dist/cjs/utils/sortAlphabetically.cjs +29 -0
  31. package/dist/cjs/utils/sortAlphabetically.cjs.map +1 -0
  32. package/dist/esm/fetchDistantDictionaries.mjs +2 -2
  33. package/dist/esm/fetchDistantDictionaries.mjs.map +1 -1
  34. package/dist/esm/fetchDistantDictionaryKeys.mjs +1 -1
  35. package/dist/esm/fetchDistantDictionaryKeys.mjs.map +1 -1
  36. package/dist/esm/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.mjs +1 -1
  37. package/dist/esm/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.mjs.map +1 -1
  38. package/dist/esm/index.mjs +12 -2
  39. package/dist/esm/index.mjs.map +1 -1
  40. package/dist/esm/listGitFiles.mjs +67 -1
  41. package/dist/esm/listGitFiles.mjs.map +1 -1
  42. package/dist/esm/loadDictionaries/loadDictionaries.mjs +17 -4
  43. package/dist/esm/loadDictionaries/loadDictionaries.mjs.map +1 -1
  44. package/dist/esm/log.mjs +2 -2
  45. package/dist/esm/log.mjs.map +1 -1
  46. package/dist/esm/prepareIntlayer.mjs +4 -2
  47. package/dist/esm/prepareIntlayer.mjs.map +1 -1
  48. package/dist/esm/transpiler/declaration_file_to_dictionary/i18next_dictionary/buildI18nextDictionary.mjs +1 -1
  49. package/dist/esm/transpiler/declaration_file_to_dictionary/i18next_dictionary/buildI18nextDictionary.mjs.map +1 -1
  50. package/dist/esm/transpiler/dictionary_to_main/generateDictionaryListContent.mjs +1 -1
  51. package/dist/esm/transpiler/dictionary_to_main/generateDictionaryListContent.mjs.map +1 -1
  52. package/dist/esm/transpiler/dictionary_to_type/createModuleAugmentation.mjs +2 -1
  53. package/dist/esm/transpiler/dictionary_to_type/createModuleAugmentation.mjs.map +1 -1
  54. package/dist/esm/utils/getFileHash.mjs +9 -0
  55. package/dist/esm/utils/getFileHash.mjs.map +1 -0
  56. package/dist/esm/{utils.mjs → utils/kebabCaseToCamelCase.mjs} +2 -10
  57. package/dist/esm/utils/kebabCaseToCamelCase.mjs.map +1 -0
  58. package/dist/esm/utils/runOnce.mjs +34 -0
  59. package/dist/esm/utils/runOnce.mjs.map +1 -0
  60. package/dist/esm/utils/sortAlphabetically.mjs +5 -0
  61. package/dist/esm/utils/sortAlphabetically.mjs.map +1 -0
  62. package/dist/types/index.d.ts +6 -2
  63. package/dist/types/index.d.ts.map +1 -1
  64. package/dist/types/listGitFiles.d.ts +6 -0
  65. package/dist/types/listGitFiles.d.ts.map +1 -1
  66. package/dist/types/loadDictionaries/loadDictionaries.d.ts.map +1 -1
  67. package/dist/types/log.d.ts.map +1 -1
  68. package/dist/types/prepareIntlayer.d.ts +1 -1
  69. package/dist/types/prepareIntlayer.d.ts.map +1 -1
  70. package/dist/types/transpiler/declaration_file_to_dictionary/i18next_dictionary/buildI18nextDictionary.d.ts.map +1 -1
  71. package/dist/types/transpiler/dictionary_to_type/createModuleAugmentation.d.ts.map +1 -1
  72. package/dist/types/utils/getFileHash.d.ts +2 -0
  73. package/dist/types/utils/getFileHash.d.ts.map +1 -0
  74. package/dist/types/utils/kebabCaseToCamelCase.d.ts +2 -0
  75. package/dist/types/utils/kebabCaseToCamelCase.d.ts.map +1 -0
  76. package/dist/types/utils/runOnce.d.ts +24 -0
  77. package/dist/types/utils/runOnce.d.ts.map +1 -0
  78. package/dist/types/utils/sortAlphabetically.d.ts +2 -0
  79. package/dist/types/utils/sortAlphabetically.d.ts.map +1 -0
  80. package/package.json +13 -13
  81. package/dist/cjs/utils.cjs.map +0 -1
  82. package/dist/esm/utils.mjs.map +0 -1
  83. package/dist/types/utils.d.ts +0 -4
  84. package/dist/types/utils.d.ts.map +0 -1
@@ -36,8 +36,9 @@ var import_config = require("@intlayer/config");
36
36
  var import_fast_glob = __toESM(require("fast-glob"));
37
37
  var import_fs = require("fs");
38
38
  var import_path = require("path");
39
- var import_utils = require('../../utils.cjs');
40
- const getTypeName = (key) => `${(0, import_utils.kebabCaseToCamelCase)(key)}Content`;
39
+ var import_getFileHash = require('../../utils/getFileHash.cjs');
40
+ var import_kebabCaseToCamelCase = require('../../utils/kebabCaseToCamelCase.cjs');
41
+ const getTypeName = (key) => `${(0, import_kebabCaseToCamelCase.kebabCaseToCamelCase)(key)}Content`;
41
42
  const formatLocales = (locales) => locales.map((locale) => {
42
43
  for (const key in import_config.Locales) {
43
44
  if (import_config.Locales[key] === locale) {
@@ -54,7 +55,7 @@ const generateTypeIndexContent = (typeFiles, configuration = (0, import_config.g
54
55
  relativePath: `./${(0, import_path.relative)(moduleAugmentationDir, dictionaryPath)}`,
55
56
  id: (0, import_path.basename)(dictionaryPath, (0, import_path.extname)(dictionaryPath)),
56
57
  // Get the base name as the dictionary id (without the extension)
57
- hash: `_${(0, import_utils.getFileHash)(dictionaryPath)}`
58
+ hash: `_${(0, import_getFileHash.getFileHash)(dictionaryPath)}`
58
59
  // Get the hash of the dictionary to avoid conflicts
59
60
  }));
60
61
  dictionariesRef.forEach((dictionary) => {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/transpiler/dictionary_to_type/createModuleAugmentation.ts"],"sourcesContent":["import { Locales, getConfiguration, normalizePath } from '@intlayer/config';\nimport fg from 'fast-glob';\nimport { existsSync, mkdirSync, writeFileSync } from 'fs';\nimport { basename, extname, join, relative } from 'path';\nimport { getFileHash, kebabCaseToCamelCase } from '../../utils';\n\nexport const getTypeName = (key: string): string =>\n `${kebabCaseToCamelCase(key)}Content`;\n\nconst formatLocales = (locales: Locales[]): string =>\n locales\n .map((locale) => {\n for (const key in Locales) {\n if (Locales[key as keyof typeof Locales] === locale) {\n return `Locales.${key}`;\n }\n }\n })\n .join(' | ');\n\n/**\n * This function generates the content of the module augmentation file\n */\nconst generateTypeIndexContent = (\n typeFiles: string[],\n configuration = getConfiguration()\n): string => {\n const { content, internationalization } = configuration;\n const { moduleAugmentationDir } = content;\n const { locales, requiredLocales, strictMode } = internationalization;\n\n let fileContent =\n \"/* eslint-disable */\\nimport { Locales } from 'intlayer';\\n\";\n\n const dictionariesRef = typeFiles.map((dictionaryPath) => ({\n relativePath: `./${relative(moduleAugmentationDir, dictionaryPath)}`,\n id: basename(dictionaryPath, extname(dictionaryPath)), // Get the base name as the dictionary id (without the extension)\n hash: `_${getFileHash(dictionaryPath)}`, // Get the hash of the dictionary to avoid conflicts\n }));\n\n // Import all dictionaries\n dictionariesRef.forEach((dictionary) => {\n fileContent += `import ${dictionary.hash} from '${dictionary.relativePath}';\\n`;\n });\n\n fileContent += '\\n';\n\n // Format Dictionary Map\n const formattedDictionaryMap: string = dictionariesRef\n .map((dictionary) => ` \"${dictionary.id}\": typeof ${dictionary.hash};`)\n .join('\\n');\n\n const requiredLocalesValues =\n requiredLocales.length > 0\n ? requiredLocales.filter((locale) =>\n locales.map((locale) => String(locale)).includes(String(locale))\n )\n : locales;\n\n const formattedLocales = formatLocales(locales);\n const formattedRequiredLocales = formatLocales(requiredLocalesValues);\n\n const strictModeRecord =\n strictMode === 'strict'\n ? `interface IConfigLocales<Content> extends Record<DeclaredLocales, Content> {}`\n : strictMode === 'inclusive'\n ? `interface IConfigLocales<Content> extends Record<ExtractedLocales, Content>, Partial<Record<ExcludedLocales, Content>> {}`\n : `interface IConfigLocales<Content> extends Partial<Record<Locales, Content>> {}`;\n\n /**\n * Write the module augmentation to extend the intlayer module with the dictionaries types\n * Will suggest the type resulting of the dictionaries\n *\n * declare module 'intlayer' {\n * interface IntlayerDictionaryTypesConnector = {\n * dictionaries: {\n * id: DictionaryType;\n * }\n * }\n *\n * type ConfigLocales = Locales.ENGLISH | Locales.FRENCH | Locales.SPANISH;\n * type ExtractedLocales = Extract<Locales, ConfigLocales>;\n *\n * interface IConfigLocales<Content> extends Record<ExtractedLocales, Content>, Partial<Record<ExcludedLocales, Content>> {}\n *\n *\n * }\n * See https://www.typescriptlang.org/docs/handbook/declaration-merging.html#module-augmentation\n */\n fileContent += `declare module 'intlayer' {\\n`;\n fileContent += ` interface IntlayerDictionaryTypesConnector {\\n${formattedDictionaryMap}\\n }\\n\\n`;\n fileContent += ` type DeclaredLocales = ${formattedLocales};\\n`;\n fileContent += ` type RequiredLocales = ${formattedRequiredLocales};\\n`;\n fileContent += ` type ExtractedLocales = Extract<Locales, RequiredLocales>;\\n`;\n fileContent += ` type ExcludedLocales = Exclude<Locales, RequiredLocales>;\\n`;\n fileContent += ` ${strictModeRecord}\\n`;\n fileContent += `}`;\n\n return fileContent;\n};\n\n/**\n * This function generates a index file merging all the types\n */\nexport const createModuleAugmentation = (\n configuration = getConfiguration()\n) => {\n const { moduleAugmentationDir, typesDir } = configuration.content;\n\n // Create main directory if it doesn't exist\n if (!existsSync(moduleAugmentationDir)) {\n mkdirSync(moduleAugmentationDir, { recursive: true });\n }\n\n const dictionariesTypesDefinitions: string[] = fg.sync(\n normalizePath(`${typesDir}/*.ts`),\n {\n ignore: ['**/*.d.ts'],\n }\n );\n // Create the dictionary list file\n\n const tsContent = generateTypeIndexContent(dictionariesTypesDefinitions);\n writeFileSync(join(moduleAugmentationDir, 'intlayer.d.ts'), tsContent);\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAyD;AACzD,uBAAe;AACf,gBAAqD;AACrD,kBAAkD;AAClD,mBAAkD;AAE3C,MAAM,cAAc,CAAC,QAC1B,OAAG,mCAAqB,GAAG,CAAC;AAE9B,MAAM,gBAAgB,CAAC,YACrB,QACG,IAAI,CAAC,WAAW;AACf,aAAW,OAAO,uBAAS;AACzB,QAAI,sBAAQ,GAA2B,MAAM,QAAQ;AACnD,aAAO,WAAW,GAAG;AAAA,IACvB;AAAA,EACF;AACF,CAAC,EACA,KAAK,KAAK;AAKf,MAAM,2BAA2B,CAC/B,WACA,oBAAgB,gCAAiB,MACtB;AACX,QAAM,EAAE,SAAS,qBAAqB,IAAI;AAC1C,QAAM,EAAE,sBAAsB,IAAI;AAClC,QAAM,EAAE,SAAS,iBAAiB,WAAW,IAAI;AAEjD,MAAI,cACF;AAEF,QAAM,kBAAkB,UAAU,IAAI,CAAC,oBAAoB;AAAA,IACzD,cAAc,SAAK,sBAAS,uBAAuB,cAAc,CAAC;AAAA,IAClE,QAAI,sBAAS,oBAAgB,qBAAQ,cAAc,CAAC;AAAA;AAAA,IACpD,MAAM,QAAI,0BAAY,cAAc,CAAC;AAAA;AAAA,EACvC,EAAE;AAGF,kBAAgB,QAAQ,CAAC,eAAe;AACtC,mBAAe,UAAU,WAAW,IAAI,UAAU,WAAW,YAAY;AAAA;AAAA,EAC3E,CAAC;AAED,iBAAe;AAGf,QAAM,yBAAiC,gBACpC,IAAI,CAAC,eAAe,QAAQ,WAAW,EAAE,aAAa,WAAW,IAAI,GAAG,EACxE,KAAK,IAAI;AAEZ,QAAM,wBACJ,gBAAgB,SAAS,IACrB,gBAAgB;AAAA,IAAO,CAAC,WACtB,QAAQ,IAAI,CAACA,YAAW,OAAOA,OAAM,CAAC,EAAE,SAAS,OAAO,MAAM,CAAC;AAAA,EACjE,IACA;AAEN,QAAM,mBAAmB,cAAc,OAAO;AAC9C,QAAM,2BAA2B,cAAc,qBAAqB;AAEpE,QAAM,mBACJ,eAAe,WACX,kFACA,eAAe,cACb,8HACA;AAsBR,iBAAe;AAAA;AACf,iBAAe;AAAA,EAAmD,sBAAsB;AAAA;AAAA;AAAA;AACxF,iBAAe,4BAA4B,gBAAgB;AAAA;AAC3D,iBAAe,4BAA4B,wBAAwB;AAAA;AACnE,iBAAe;AAAA;AACf,iBAAe;AAAA;AACf,iBAAe,KAAK,gBAAgB;AAAA;AACpC,iBAAe;AAEf,SAAO;AACT;AAKO,MAAM,2BAA2B,CACtC,oBAAgB,gCAAiB,MAC9B;AACH,QAAM,EAAE,uBAAuB,SAAS,IAAI,cAAc;AAG1D,MAAI,KAAC,sBAAW,qBAAqB,GAAG;AACtC,6BAAU,uBAAuB,EAAE,WAAW,KAAK,CAAC;AAAA,EACtD;AAEA,QAAM,+BAAyC,iBAAAC,QAAG;AAAA,QAChD,6BAAc,GAAG,QAAQ,OAAO;AAAA,IAChC;AAAA,MACE,QAAQ,CAAC,WAAW;AAAA,IACtB;AAAA,EACF;AAGA,QAAM,YAAY,yBAAyB,4BAA4B;AACvE,mCAAc,kBAAK,uBAAuB,eAAe,GAAG,SAAS;AACvE;","names":["locale","fg"]}
1
+ {"version":3,"sources":["../../../../src/transpiler/dictionary_to_type/createModuleAugmentation.ts"],"sourcesContent":["import { Locales, getConfiguration, normalizePath } from '@intlayer/config';\nimport fg from 'fast-glob';\nimport { existsSync, mkdirSync, writeFileSync } from 'fs';\nimport { basename, extname, join, relative } from 'path';\nimport { getFileHash } from '../../utils/getFileHash';\nimport { kebabCaseToCamelCase } from '../../utils/kebabCaseToCamelCase';\n\nexport const getTypeName = (key: string): string =>\n `${kebabCaseToCamelCase(key)}Content`;\n\nconst formatLocales = (locales: Locales[]): string =>\n locales\n .map((locale) => {\n for (const key in Locales) {\n if (Locales[key as keyof typeof Locales] === locale) {\n return `Locales.${key}`;\n }\n }\n })\n .join(' | ');\n\n/**\n * This function generates the content of the module augmentation file\n */\nconst generateTypeIndexContent = (\n typeFiles: string[],\n configuration = getConfiguration()\n): string => {\n const { content, internationalization } = configuration;\n const { moduleAugmentationDir } = content;\n const { locales, requiredLocales, strictMode } = internationalization;\n\n let fileContent =\n \"/* eslint-disable */\\nimport { Locales } from 'intlayer';\\n\";\n\n const dictionariesRef = typeFiles.map((dictionaryPath) => ({\n relativePath: `./${relative(moduleAugmentationDir, dictionaryPath)}`,\n id: basename(dictionaryPath, extname(dictionaryPath)), // Get the base name as the dictionary id (without the extension)\n hash: `_${getFileHash(dictionaryPath)}`, // Get the hash of the dictionary to avoid conflicts\n }));\n\n // Import all dictionaries\n dictionariesRef.forEach((dictionary) => {\n fileContent += `import ${dictionary.hash} from '${dictionary.relativePath}';\\n`;\n });\n\n fileContent += '\\n';\n\n // Format Dictionary Map\n const formattedDictionaryMap: string = dictionariesRef\n .map((dictionary) => ` \"${dictionary.id}\": typeof ${dictionary.hash};`)\n .join('\\n');\n\n const requiredLocalesValues =\n requiredLocales.length > 0\n ? requiredLocales.filter((locale) =>\n locales.map((locale) => String(locale)).includes(String(locale))\n )\n : locales;\n\n const formattedLocales = formatLocales(locales);\n const formattedRequiredLocales = formatLocales(requiredLocalesValues);\n\n const strictModeRecord =\n strictMode === 'strict'\n ? `interface IConfigLocales<Content> extends Record<DeclaredLocales, Content> {}`\n : strictMode === 'inclusive'\n ? `interface IConfigLocales<Content> extends Record<ExtractedLocales, Content>, Partial<Record<ExcludedLocales, Content>> {}`\n : `interface IConfigLocales<Content> extends Partial<Record<Locales, Content>> {}`;\n\n /**\n * Write the module augmentation to extend the intlayer module with the dictionaries types\n * Will suggest the type resulting of the dictionaries\n *\n * declare module 'intlayer' {\n * interface IntlayerDictionaryTypesConnector = {\n * dictionaries: {\n * id: DictionaryType;\n * }\n * }\n *\n * type ConfigLocales = Locales.ENGLISH | Locales.FRENCH | Locales.SPANISH;\n * type ExtractedLocales = Extract<Locales, ConfigLocales>;\n *\n * interface IConfigLocales<Content> extends Record<ExtractedLocales, Content>, Partial<Record<ExcludedLocales, Content>> {}\n *\n *\n * }\n * See https://www.typescriptlang.org/docs/handbook/declaration-merging.html#module-augmentation\n */\n fileContent += `declare module 'intlayer' {\\n`;\n fileContent += ` interface IntlayerDictionaryTypesConnector {\\n${formattedDictionaryMap}\\n }\\n\\n`;\n fileContent += ` type DeclaredLocales = ${formattedLocales};\\n`;\n fileContent += ` type RequiredLocales = ${formattedRequiredLocales};\\n`;\n fileContent += ` type ExtractedLocales = Extract<Locales, RequiredLocales>;\\n`;\n fileContent += ` type ExcludedLocales = Exclude<Locales, RequiredLocales>;\\n`;\n fileContent += ` ${strictModeRecord}\\n`;\n fileContent += `}`;\n\n return fileContent;\n};\n\n/**\n * This function generates a index file merging all the types\n */\nexport const createModuleAugmentation = (\n configuration = getConfiguration()\n) => {\n const { moduleAugmentationDir, typesDir } = configuration.content;\n\n // Create main directory if it doesn't exist\n if (!existsSync(moduleAugmentationDir)) {\n mkdirSync(moduleAugmentationDir, { recursive: true });\n }\n\n const dictionariesTypesDefinitions: string[] = fg.sync(\n normalizePath(`${typesDir}/*.ts`),\n {\n ignore: ['**/*.d.ts'],\n }\n );\n // Create the dictionary list file\n\n const tsContent = generateTypeIndexContent(dictionariesTypesDefinitions);\n writeFileSync(join(moduleAugmentationDir, 'intlayer.d.ts'), tsContent);\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAyD;AACzD,uBAAe;AACf,gBAAqD;AACrD,kBAAkD;AAClD,yBAA4B;AAC5B,kCAAqC;AAE9B,MAAM,cAAc,CAAC,QAC1B,OAAG,kDAAqB,GAAG,CAAC;AAE9B,MAAM,gBAAgB,CAAC,YACrB,QACG,IAAI,CAAC,WAAW;AACf,aAAW,OAAO,uBAAS;AACzB,QAAI,sBAAQ,GAA2B,MAAM,QAAQ;AACnD,aAAO,WAAW,GAAG;AAAA,IACvB;AAAA,EACF;AACF,CAAC,EACA,KAAK,KAAK;AAKf,MAAM,2BAA2B,CAC/B,WACA,oBAAgB,gCAAiB,MACtB;AACX,QAAM,EAAE,SAAS,qBAAqB,IAAI;AAC1C,QAAM,EAAE,sBAAsB,IAAI;AAClC,QAAM,EAAE,SAAS,iBAAiB,WAAW,IAAI;AAEjD,MAAI,cACF;AAEF,QAAM,kBAAkB,UAAU,IAAI,CAAC,oBAAoB;AAAA,IACzD,cAAc,SAAK,sBAAS,uBAAuB,cAAc,CAAC;AAAA,IAClE,QAAI,sBAAS,oBAAgB,qBAAQ,cAAc,CAAC;AAAA;AAAA,IACpD,MAAM,QAAI,gCAAY,cAAc,CAAC;AAAA;AAAA,EACvC,EAAE;AAGF,kBAAgB,QAAQ,CAAC,eAAe;AACtC,mBAAe,UAAU,WAAW,IAAI,UAAU,WAAW,YAAY;AAAA;AAAA,EAC3E,CAAC;AAED,iBAAe;AAGf,QAAM,yBAAiC,gBACpC,IAAI,CAAC,eAAe,QAAQ,WAAW,EAAE,aAAa,WAAW,IAAI,GAAG,EACxE,KAAK,IAAI;AAEZ,QAAM,wBACJ,gBAAgB,SAAS,IACrB,gBAAgB;AAAA,IAAO,CAAC,WACtB,QAAQ,IAAI,CAACA,YAAW,OAAOA,OAAM,CAAC,EAAE,SAAS,OAAO,MAAM,CAAC;AAAA,EACjE,IACA;AAEN,QAAM,mBAAmB,cAAc,OAAO;AAC9C,QAAM,2BAA2B,cAAc,qBAAqB;AAEpE,QAAM,mBACJ,eAAe,WACX,kFACA,eAAe,cACb,8HACA;AAsBR,iBAAe;AAAA;AACf,iBAAe;AAAA,EAAmD,sBAAsB;AAAA;AAAA;AAAA;AACxF,iBAAe,4BAA4B,gBAAgB;AAAA;AAC3D,iBAAe,4BAA4B,wBAAwB;AAAA;AACnE,iBAAe;AAAA;AACf,iBAAe;AAAA;AACf,iBAAe,KAAK,gBAAgB;AAAA;AACpC,iBAAe;AAEf,SAAO;AACT;AAKO,MAAM,2BAA2B,CACtC,oBAAgB,gCAAiB,MAC9B;AACH,QAAM,EAAE,uBAAuB,SAAS,IAAI,cAAc;AAG1D,MAAI,KAAC,sBAAW,qBAAqB,GAAG;AACtC,6BAAU,uBAAuB,EAAE,WAAW,KAAK,CAAC;AAAA,EACtD;AAEA,QAAM,+BAAyC,iBAAAC,QAAG;AAAA,QAChD,6BAAc,GAAG,QAAQ,OAAO;AAAA,IAChC;AAAA,MACE,QAAQ,CAAC,WAAW;AAAA,IACtB;AAAA,EACF;AAGA,QAAM,YAAY,yBAAyB,4BAA4B;AACvE,mCAAc,kBAAK,uBAAuB,eAAe,GAAG,SAAS;AACvE;","names":["locale","fg"]}
@@ -26,34 +26,18 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
26
26
  mod
27
27
  ));
28
28
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
- var utils_exports = {};
30
- __export(utils_exports, {
31
- getFileHash: () => getFileHash,
32
- kebabCaseToCamelCase: () => kebabCaseToCamelCase,
33
- sortAlphabetically: () => sortAlphabetically
29
+ var getFileHash_exports = {};
30
+ __export(getFileHash_exports, {
31
+ getFileHash: () => getFileHash
34
32
  });
35
- module.exports = __toCommonJS(utils_exports);
33
+ module.exports = __toCommonJS(getFileHash_exports);
36
34
  var import_crypto_js = __toESM(require("crypto-js"));
37
35
  const getFileHash = (filePath) => {
38
36
  const hash = import_crypto_js.default.SHA3(filePath);
39
37
  return hash.toString(import_crypto_js.default.enc.Base64).replace(/[^A-Z\d]/gi, "").substring(0, 20);
40
38
  };
41
- const kebabCaseToCamelCase = (name) => {
42
- return name.split(/[^a-zA-Z0-9]+/).filter(Boolean).map((word) => {
43
- const lowerWord = word.toLowerCase();
44
- let capitalized = lowerWord.charAt(0).toUpperCase() + lowerWord.slice(1);
45
- capitalized = capitalized.replace(
46
- /(\d)([a-z])/g,
47
- (_, number, char) => number + char.toUpperCase()
48
- );
49
- return capitalized;
50
- }).join("");
51
- };
52
- const sortAlphabetically = (a, b) => a.localeCompare(b);
53
39
  // Annotate the CommonJS export names for ESM import in node:
54
40
  0 && (module.exports = {
55
- getFileHash,
56
- kebabCaseToCamelCase,
57
- sortAlphabetically
41
+ getFileHash
58
42
  });
59
- //# sourceMappingURL=utils.cjs.map
43
+ //# sourceMappingURL=getFileHash.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/utils/getFileHash.ts"],"sourcesContent":["import crypto from 'crypto-js';\n\nexport const getFileHash = (filePath: string) => {\n const hash = crypto.SHA3(filePath);\n\n return hash\n .toString(crypto.enc.Base64)\n .replace(/[^A-Z\\d]/gi, '')\n .substring(0, 20);\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBAAmB;AAEZ,MAAM,cAAc,CAAC,aAAqB;AAC/C,QAAM,OAAO,iBAAAA,QAAO,KAAK,QAAQ;AAEjC,SAAO,KACJ,SAAS,iBAAAA,QAAO,IAAI,MAAM,EAC1B,QAAQ,cAAc,EAAE,EACxB,UAAU,GAAG,EAAE;AACpB;","names":["crypto"]}
@@ -0,0 +1,39 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+ var kebabCaseToCamelCase_exports = {};
20
+ __export(kebabCaseToCamelCase_exports, {
21
+ kebabCaseToCamelCase: () => kebabCaseToCamelCase
22
+ });
23
+ module.exports = __toCommonJS(kebabCaseToCamelCase_exports);
24
+ const kebabCaseToCamelCase = (name) => {
25
+ return name.split(/[^a-zA-Z0-9]+/).filter(Boolean).map((word) => {
26
+ const lowerWord = word.toLowerCase();
27
+ let capitalized = lowerWord.charAt(0).toUpperCase() + lowerWord.slice(1);
28
+ capitalized = capitalized.replace(
29
+ /(\d)([a-z])/g,
30
+ (_, number, char) => number + char.toUpperCase()
31
+ );
32
+ return capitalized;
33
+ }).join("");
34
+ };
35
+ // Annotate the CommonJS export names for ESM import in node:
36
+ 0 && (module.exports = {
37
+ kebabCaseToCamelCase
38
+ });
39
+ //# sourceMappingURL=kebabCaseToCamelCase.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/utils/kebabCaseToCamelCase.ts"],"sourcesContent":["export const kebabCaseToCamelCase = (name: string): string => {\n return name\n .split(/[^a-zA-Z0-9]+/) // Split on any non-alphanumeric character\n .filter(Boolean) // Remove any empty strings\n .map((word) => {\n // Convert the entire word to lowercase first\n const lowerWord = word.toLowerCase();\n\n // Capitalize the first character\n let capitalized = lowerWord.charAt(0).toUpperCase() + lowerWord.slice(1);\n\n // Capitalize any letter that follows a number\n capitalized = capitalized.replace(\n /(\\d)([a-z])/g,\n (_, number, char) => number + char.toUpperCase()\n );\n\n return capitalized;\n })\n .join(''); // Concatenate all parts into a single string\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAO,MAAM,uBAAuB,CAAC,SAAyB;AAC5D,SAAO,KACJ,MAAM,eAAe,EACrB,OAAO,OAAO,EACd,IAAI,CAAC,SAAS;AAEb,UAAM,YAAY,KAAK,YAAY;AAGnC,QAAI,cAAc,UAAU,OAAO,CAAC,EAAE,YAAY,IAAI,UAAU,MAAM,CAAC;AAGvE,kBAAc,YAAY;AAAA,MACxB;AAAA,MACA,CAAC,GAAG,QAAQ,SAAS,SAAS,KAAK,YAAY;AAAA,IACjD;AAEA,WAAO;AAAA,EACT,CAAC,EACA,KAAK,EAAE;AACZ;","names":[]}
@@ -0,0 +1,58 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+ var runOnce_exports = {};
20
+ __export(runOnce_exports, {
21
+ runOnce: () => runOnce
22
+ });
23
+ module.exports = __toCommonJS(runOnce_exports);
24
+ var import_promises = require("fs/promises");
25
+ var import_path = require("path");
26
+ const runOnce = async (sentinelFilePath, callback, cacheTimeoutMs = 60 * 1e3) => {
27
+ const currentTimestamp = Date.now();
28
+ const timeoutDuration = cacheTimeoutMs;
29
+ try {
30
+ const sentinelStats = await (0, import_promises.stat)(sentinelFilePath);
31
+ const sentinelAge = currentTimestamp - sentinelStats.mtime.getTime();
32
+ if (sentinelAge > timeoutDuration) {
33
+ await (0, import_promises.unlink)(sentinelFilePath);
34
+ } else {
35
+ return;
36
+ }
37
+ } catch (err) {
38
+ if (err.code === "ENOENT") {
39
+ } else {
40
+ throw err;
41
+ }
42
+ }
43
+ try {
44
+ await (0, import_promises.mkdir)((0, import_path.dirname)(sentinelFilePath), { recursive: true });
45
+ await (0, import_promises.writeFile)(sentinelFilePath, String(currentTimestamp), { flag: "wx" });
46
+ } catch (err) {
47
+ if (err.code === "EEXIST") {
48
+ return;
49
+ }
50
+ throw err;
51
+ }
52
+ await callback();
53
+ };
54
+ // Annotate the CommonJS export names for ESM import in node:
55
+ 0 && (module.exports = {
56
+ runOnce
57
+ });
58
+ //# sourceMappingURL=runOnce.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/utils/runOnce.ts"],"sourcesContent":["import { mkdir, stat, unlink, writeFile } from 'fs/promises';\nimport { dirname } from 'path';\n\n/**\n * Ensures a callback function runs only once within a specified time window across multiple processes.\n * Uses a sentinel file to coordinate execution and prevent duplicate work.\n *\n * @param sentinelFilePath - Path to the sentinel file used for coordination\n * @param callback - The function to execute (should be async)\n * @param cacheTimeoutMs - Time window in milliseconds during which the sentinel is considered valid (default: 60000ms = 1 minute)\n *\n * @example\n * ```typescript\n * await runPrepareIntlayerOnce(\n * '/tmp/intlayer-sentinel',\n * async () => {\n * // Your initialization logic here\n * await prepareIntlayer();\n * },\n * 30 * 1000 // 30 seconds cache\n * );\n * ```\n *\n * @throws {Error} When there are unexpected filesystem errors\n */\nexport const runOnce = async (\n sentinelFilePath: string,\n callback: () => void | Promise<void>,\n cacheTimeoutMs: number = 60 * 1000 // 1 minute in milliseconds\n) => {\n const currentTimestamp = Date.now();\n const timeoutDuration = cacheTimeoutMs;\n\n try {\n // Check if sentinel file exists and get its stats\n const sentinelStats = await stat(sentinelFilePath);\n const sentinelAge = currentTimestamp - sentinelStats.mtime.getTime();\n\n // If sentinel is older than the timeout, delete it and rebuild\n if (sentinelAge > timeoutDuration) {\n await unlink(sentinelFilePath);\n // Fall through to create new sentinel and rebuild\n } else {\n // Sentinel is recent, no need to rebuild\n return;\n }\n } catch (err: any) {\n if (err.code === 'ENOENT') {\n // File doesn't exist, continue to create it\n } else {\n throw err; // unexpected FS error\n }\n }\n\n try {\n // Ensure the directory exists before writing the file\n await mkdir(dirname(sentinelFilePath), { recursive: true });\n\n // O_EXCL ensures only the *first* process can create the file\n await writeFile(sentinelFilePath, String(currentTimestamp), { flag: 'wx' });\n } catch (err: any) {\n if (err.code === 'EEXIST') {\n // Another process already created it → we're done\n return;\n }\n throw err; // unexpected FS error\n }\n\n await callback();\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAA+C;AAC/C,kBAAwB;AAwBjB,MAAM,UAAU,OACrB,kBACA,UACA,iBAAyB,KAAK,QAC3B;AACH,QAAM,mBAAmB,KAAK,IAAI;AAClC,QAAM,kBAAkB;AAExB,MAAI;AAEF,UAAM,gBAAgB,UAAM,sBAAK,gBAAgB;AACjD,UAAM,cAAc,mBAAmB,cAAc,MAAM,QAAQ;AAGnE,QAAI,cAAc,iBAAiB;AACjC,gBAAM,wBAAO,gBAAgB;AAAA,IAE/B,OAAO;AAEL;AAAA,IACF;AAAA,EACF,SAAS,KAAU;AACjB,QAAI,IAAI,SAAS,UAAU;AAAA,IAE3B,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AAEA,MAAI;AAEF,cAAM,2BAAM,qBAAQ,gBAAgB,GAAG,EAAE,WAAW,KAAK,CAAC;AAG1D,cAAM,2BAAU,kBAAkB,OAAO,gBAAgB,GAAG,EAAE,MAAM,KAAK,CAAC;AAAA,EAC5E,SAAS,KAAU;AACjB,QAAI,IAAI,SAAS,UAAU;AAEzB;AAAA,IACF;AACA,UAAM;AAAA,EACR;AAEA,QAAM,SAAS;AACjB;","names":[]}
@@ -0,0 +1,29 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+ var sortAlphabetically_exports = {};
20
+ __export(sortAlphabetically_exports, {
21
+ sortAlphabetically: () => sortAlphabetically
22
+ });
23
+ module.exports = __toCommonJS(sortAlphabetically_exports);
24
+ const sortAlphabetically = (a, b) => a.localeCompare(b);
25
+ // Annotate the CommonJS export names for ESM import in node:
26
+ 0 && (module.exports = {
27
+ sortAlphabetically
28
+ });
29
+ //# sourceMappingURL=sortAlphabetically.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/utils/sortAlphabetically.ts"],"sourcesContent":["export const sortAlphabetically = (a: string, b: string) => a.localeCompare(b);\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAO,MAAM,qBAAqB,CAAC,GAAW,MAAc,EAAE,cAAc,CAAC;","names":[]}
@@ -1,4 +1,4 @@
1
- import { getAuthAPI, getDictionaryAPI } from "@intlayer/api";
1
+ import { getDictionaryAPI, getOAuthAPI } from "@intlayer/api";
2
2
  import { getAppLogger, getConfiguration } from "@intlayer/config";
3
3
  import pLimit from "p-limit";
4
4
  import { logger } from "./log.mjs";
@@ -7,7 +7,7 @@ const fetchDistantDictionaries = async (options) => {
7
7
  const appLogger = getAppLogger(config);
8
8
  try {
9
9
  const { clientId, clientSecret } = config.editor;
10
- const authAPI = getAuthAPI(void 0, config);
10
+ const authAPI = getOAuthAPI(config);
11
11
  const dictionaryAPI = getDictionaryAPI(void 0, config);
12
12
  if (!clientId || !clientSecret) {
13
13
  throw new Error(
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/fetchDistantDictionaries.ts"],"sourcesContent":["import { getAuthAPI, getDictionaryAPI } from '@intlayer/api';\n// @ts-ignore @intlayer/backend is not build yet\nimport type { DictionaryAPI } from '@intlayer/backend';\nimport { getAppLogger, getConfiguration } from '@intlayer/config';\nimport pLimit from 'p-limit';\nimport { logger } from './log';\n\ntype FetchDistantDictionariesOptions = {\n dictionaryKeys: string[];\n newDictionariesPath?: string;\n logPrefix?: string;\n};\n\n/**\n * Fetch distant dictionaries and update the logger with their statuses.\n */\nexport const fetchDistantDictionaries = async (\n options: FetchDistantDictionariesOptions\n): Promise<DictionaryAPI[]> => {\n const config = getConfiguration();\n const appLogger = getAppLogger(config);\n try {\n const { clientId, clientSecret } = config.editor;\n const authAPI = getAuthAPI(undefined, config);\n const dictionaryAPI = getDictionaryAPI(undefined, config);\n\n if (!clientId || !clientSecret) {\n throw new Error(\n 'Missing OAuth2 client ID or client secret. To get access token go to https://intlayer.org/dashboard/project.'\n );\n }\n\n const oAuth2TokenResult = await authAPI.getOAuth2AccessToken();\n\n const oAuth2AccessToken = oAuth2TokenResult.data?.accessToken;\n\n const distantDictionariesKeys = options.dictionaryKeys;\n\n // Process dictionaries in parallel with a concurrency limit\n const limit = pLimit(5); // Limit the number of concurrent requests\n\n const processDictionary = async (\n dictionaryKey: string\n ): Promise<DictionaryAPI | undefined> => {\n logger.updateStatus([\n {\n dictionaryKey,\n type: 'distant',\n status: { status: 'fetching' },\n },\n ]);\n\n try {\n // Fetch the dictionary\n const getDictionaryResult = await dictionaryAPI.getDictionary(\n dictionaryKey,\n undefined,\n {\n ...(oAuth2AccessToken && {\n headers: {\n Authorization: `Bearer ${oAuth2AccessToken}`,\n },\n }),\n }\n );\n\n const distantDictionary = getDictionaryResult.data;\n\n if (!distantDictionary) {\n throw new Error(`Dictionary ${dictionaryKey} not found on remote`);\n }\n\n logger.updateStatus([\n { dictionaryKey, type: 'distant', status: { status: 'imported' } },\n ]);\n\n return distantDictionary;\n } catch (error) {\n logger.updateStatus([\n {\n dictionaryKey,\n type: 'distant',\n status: {\n status: 'error',\n error: error as Error,\n errorMessage: `${options?.logPrefix ?? ''}Error fetching dictionary ${dictionaryKey}: ${error}`,\n },\n },\n ]);\n return undefined;\n }\n };\n\n const fetchPromises = distantDictionariesKeys.map((dictionaryKey) =>\n limit(async () => await processDictionary(dictionaryKey))\n );\n\n const result = await Promise.all(fetchPromises);\n\n // Output any error messages\n const statuses = logger.getStatuses();\n for (const statusObj of statuses) {\n const currentState = statusObj.state.find((s) => s.type === 'distant');\n if (currentState && currentState.errorMessage) {\n appLogger(currentState.errorMessage, { level: 'error' });\n }\n }\n\n // Remove undefined values\n const filteredResult = result.filter(\n (dict): dict is DictionaryAPI => dict !== undefined\n );\n\n return filteredResult;\n } catch (error) {\n appLogger(error, { level: 'error' });\n return [];\n }\n};\n"],"mappings":"AAAA,SAAS,YAAY,wBAAwB;AAG7C,SAAS,cAAc,wBAAwB;AAC/C,OAAO,YAAY;AACnB,SAAS,cAAc;AAWhB,MAAM,2BAA2B,OACtC,YAC6B;AAC7B,QAAM,SAAS,iBAAiB;AAChC,QAAM,YAAY,aAAa,MAAM;AACrC,MAAI;AACF,UAAM,EAAE,UAAU,aAAa,IAAI,OAAO;AAC1C,UAAM,UAAU,WAAW,QAAW,MAAM;AAC5C,UAAM,gBAAgB,iBAAiB,QAAW,MAAM;AAExD,QAAI,CAAC,YAAY,CAAC,cAAc;AAC9B,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,UAAM,oBAAoB,MAAM,QAAQ,qBAAqB;AAE7D,UAAM,oBAAoB,kBAAkB,MAAM;AAElD,UAAM,0BAA0B,QAAQ;AAGxC,UAAM,QAAQ,OAAO,CAAC;AAEtB,UAAM,oBAAoB,OACxB,kBACuC;AACvC,aAAO,aAAa;AAAA,QAClB;AAAA,UACE;AAAA,UACA,MAAM;AAAA,UACN,QAAQ,EAAE,QAAQ,WAAW;AAAA,QAC/B;AAAA,MACF,CAAC;AAED,UAAI;AAEF,cAAM,sBAAsB,MAAM,cAAc;AAAA,UAC9C;AAAA,UACA;AAAA,UACA;AAAA,YACE,GAAI,qBAAqB;AAAA,cACvB,SAAS;AAAA,gBACP,eAAe,UAAU,iBAAiB;AAAA,cAC5C;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,cAAM,oBAAoB,oBAAoB;AAE9C,YAAI,CAAC,mBAAmB;AACtB,gBAAM,IAAI,MAAM,cAAc,aAAa,sBAAsB;AAAA,QACnE;AAEA,eAAO,aAAa;AAAA,UAClB,EAAE,eAAe,MAAM,WAAW,QAAQ,EAAE,QAAQ,WAAW,EAAE;AAAA,QACnE,CAAC;AAED,eAAO;AAAA,MACT,SAAS,OAAO;AACd,eAAO,aAAa;AAAA,UAClB;AAAA,YACE;AAAA,YACA,MAAM;AAAA,YACN,QAAQ;AAAA,cACN,QAAQ;AAAA,cACR;AAAA,cACA,cAAc,GAAG,SAAS,aAAa,EAAE,6BAA6B,aAAa,KAAK,KAAK;AAAA,YAC/F;AAAA,UACF;AAAA,QACF,CAAC;AACD,eAAO;AAAA,MACT;AAAA,IACF;AAEA,UAAM,gBAAgB,wBAAwB;AAAA,MAAI,CAAC,kBACjD,MAAM,YAAY,MAAM,kBAAkB,aAAa,CAAC;AAAA,IAC1D;AAEA,UAAM,SAAS,MAAM,QAAQ,IAAI,aAAa;AAG9C,UAAM,WAAW,OAAO,YAAY;AACpC,eAAW,aAAa,UAAU;AAChC,YAAM,eAAe,UAAU,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,SAAS;AACrE,UAAI,gBAAgB,aAAa,cAAc;AAC7C,kBAAU,aAAa,cAAc,EAAE,OAAO,QAAQ,CAAC;AAAA,MACzD;AAAA,IACF;AAGA,UAAM,iBAAiB,OAAO;AAAA,MAC5B,CAAC,SAAgC,SAAS;AAAA,IAC5C;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,cAAU,OAAO,EAAE,OAAO,QAAQ,CAAC;AACnC,WAAO,CAAC;AAAA,EACV;AACF;","names":[]}
1
+ {"version":3,"sources":["../../src/fetchDistantDictionaries.ts"],"sourcesContent":["import { getDictionaryAPI, getOAuthAPI } from '@intlayer/api';\n// @ts-ignore @intlayer/backend is not build yet\nimport type { DictionaryAPI } from '@intlayer/backend';\nimport { getAppLogger, getConfiguration } from '@intlayer/config';\nimport pLimit from 'p-limit';\nimport { logger } from './log';\n\ntype FetchDistantDictionariesOptions = {\n dictionaryKeys: string[];\n newDictionariesPath?: string;\n logPrefix?: string;\n};\n\n/**\n * Fetch distant dictionaries and update the logger with their statuses.\n */\nexport const fetchDistantDictionaries = async (\n options: FetchDistantDictionariesOptions\n): Promise<DictionaryAPI[]> => {\n const config = getConfiguration();\n const appLogger = getAppLogger(config);\n try {\n const { clientId, clientSecret } = config.editor;\n const authAPI = getOAuthAPI(config);\n const dictionaryAPI = getDictionaryAPI(undefined, config);\n\n if (!clientId || !clientSecret) {\n throw new Error(\n 'Missing OAuth2 client ID or client secret. To get access token go to https://intlayer.org/dashboard/project.'\n );\n }\n\n const oAuth2TokenResult = await authAPI.getOAuth2AccessToken();\n\n const oAuth2AccessToken = oAuth2TokenResult.data?.accessToken;\n\n const distantDictionariesKeys = options.dictionaryKeys;\n\n // Process dictionaries in parallel with a concurrency limit\n const limit = pLimit(5); // Limit the number of concurrent requests\n\n const processDictionary = async (\n dictionaryKey: string\n ): Promise<DictionaryAPI | undefined> => {\n logger.updateStatus([\n {\n dictionaryKey,\n type: 'distant',\n status: { status: 'fetching' },\n },\n ]);\n\n try {\n // Fetch the dictionary\n const getDictionaryResult = await dictionaryAPI.getDictionary(\n dictionaryKey,\n undefined,\n {\n ...(oAuth2AccessToken && {\n headers: {\n Authorization: `Bearer ${oAuth2AccessToken}`,\n },\n }),\n }\n );\n\n const distantDictionary = getDictionaryResult.data;\n\n if (!distantDictionary) {\n throw new Error(`Dictionary ${dictionaryKey} not found on remote`);\n }\n\n logger.updateStatus([\n { dictionaryKey, type: 'distant', status: { status: 'imported' } },\n ]);\n\n return distantDictionary;\n } catch (error) {\n logger.updateStatus([\n {\n dictionaryKey,\n type: 'distant',\n status: {\n status: 'error',\n error: error as Error,\n errorMessage: `${options?.logPrefix ?? ''}Error fetching dictionary ${dictionaryKey}: ${error}`,\n },\n },\n ]);\n return undefined;\n }\n };\n\n const fetchPromises = distantDictionariesKeys.map((dictionaryKey) =>\n limit(async () => await processDictionary(dictionaryKey))\n );\n\n const result = await Promise.all(fetchPromises);\n\n // Output any error messages\n const statuses = logger.getStatuses();\n for (const statusObj of statuses) {\n const currentState = statusObj.state.find((s) => s.type === 'distant');\n if (currentState && currentState.errorMessage) {\n appLogger(currentState.errorMessage, { level: 'error' });\n }\n }\n\n // Remove undefined values\n const filteredResult = result.filter(\n (dict): dict is DictionaryAPI => dict !== undefined\n );\n\n return filteredResult;\n } catch (error) {\n appLogger(error, { level: 'error' });\n return [];\n }\n};\n"],"mappings":"AAAA,SAAS,kBAAkB,mBAAmB;AAG9C,SAAS,cAAc,wBAAwB;AAC/C,OAAO,YAAY;AACnB,SAAS,cAAc;AAWhB,MAAM,2BAA2B,OACtC,YAC6B;AAC7B,QAAM,SAAS,iBAAiB;AAChC,QAAM,YAAY,aAAa,MAAM;AACrC,MAAI;AACF,UAAM,EAAE,UAAU,aAAa,IAAI,OAAO;AAC1C,UAAM,UAAU,YAAY,MAAM;AAClC,UAAM,gBAAgB,iBAAiB,QAAW,MAAM;AAExD,QAAI,CAAC,YAAY,CAAC,cAAc;AAC9B,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,UAAM,oBAAoB,MAAM,QAAQ,qBAAqB;AAE7D,UAAM,oBAAoB,kBAAkB,MAAM;AAElD,UAAM,0BAA0B,QAAQ;AAGxC,UAAM,QAAQ,OAAO,CAAC;AAEtB,UAAM,oBAAoB,OACxB,kBACuC;AACvC,aAAO,aAAa;AAAA,QAClB;AAAA,UACE;AAAA,UACA,MAAM;AAAA,UACN,QAAQ,EAAE,QAAQ,WAAW;AAAA,QAC/B;AAAA,MACF,CAAC;AAED,UAAI;AAEF,cAAM,sBAAsB,MAAM,cAAc;AAAA,UAC9C;AAAA,UACA;AAAA,UACA;AAAA,YACE,GAAI,qBAAqB;AAAA,cACvB,SAAS;AAAA,gBACP,eAAe,UAAU,iBAAiB;AAAA,cAC5C;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,cAAM,oBAAoB,oBAAoB;AAE9C,YAAI,CAAC,mBAAmB;AACtB,gBAAM,IAAI,MAAM,cAAc,aAAa,sBAAsB;AAAA,QACnE;AAEA,eAAO,aAAa;AAAA,UAClB,EAAE,eAAe,MAAM,WAAW,QAAQ,EAAE,QAAQ,WAAW,EAAE;AAAA,QACnE,CAAC;AAED,eAAO;AAAA,MACT,SAAS,OAAO;AACd,eAAO,aAAa;AAAA,UAClB;AAAA,YACE;AAAA,YACA,MAAM;AAAA,YACN,QAAQ;AAAA,cACN,QAAQ;AAAA,cACR;AAAA,cACA,cAAc,GAAG,SAAS,aAAa,EAAE,6BAA6B,aAAa,KAAK,KAAK;AAAA,YAC/F;AAAA,UACF;AAAA,QACF,CAAC;AACD,eAAO;AAAA,MACT;AAAA,IACF;AAEA,UAAM,gBAAgB,wBAAwB;AAAA,MAAI,CAAC,kBACjD,MAAM,YAAY,MAAM,kBAAkB,aAAa,CAAC;AAAA,IAC1D;AAEA,UAAM,SAAS,MAAM,QAAQ,IAAI,aAAa;AAG9C,UAAM,WAAW,OAAO,YAAY;AACpC,eAAW,aAAa,UAAU;AAChC,YAAM,eAAe,UAAU,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,SAAS;AACrE,UAAI,gBAAgB,aAAa,cAAc;AAC7C,kBAAU,aAAa,cAAc,EAAE,OAAO,QAAQ,CAAC;AAAA,MACzD;AAAA,IACF;AAGA,UAAM,iBAAiB,OAAO;AAAA,MAC5B,CAAC,SAAgC,SAAS;AAAA,IAC5C;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,cAAU,OAAO,EAAE,OAAO,QAAQ,CAAC;AACnC,WAAO,CAAC;AAAA,EACV;AACF;","names":[]}
@@ -8,7 +8,7 @@ const fetchDistantDictionaryKeys = async (configuration = getConfiguration()) =>
8
8
  );
9
9
  }
10
10
  const intlayerAPI = getIntlayerAPI(void 0, configuration);
11
- const oAuth2TokenResult = await intlayerAPI.auth.getOAuth2AccessToken();
11
+ const oAuth2TokenResult = await intlayerAPI.oAuth.getOAuth2AccessToken();
12
12
  const oAuth2AccessToken = oAuth2TokenResult.data?.accessToken;
13
13
  const getDictionariesKeysResult = await intlayerAPI.dictionary.getDictionariesKeys({
14
14
  ...oAuth2AccessToken && {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/fetchDistantDictionaryKeys.ts"],"sourcesContent":["import { getIntlayerAPI } from '@intlayer/api';\nimport { getConfiguration, type IntlayerConfig } from '@intlayer/config';\n\nexport const fetchDistantDictionaryKeys = async (\n configuration: IntlayerConfig = getConfiguration()\n): Promise<string[]> => {\n const { clientId, clientSecret } = configuration.editor;\n\n if (!clientId || !clientSecret) {\n throw new Error(\n 'Missing OAuth2 client ID or client secret. To get access token go to https://intlayer.org/dashboard/project.'\n );\n }\n\n const intlayerAPI = getIntlayerAPI(undefined, configuration);\n\n const oAuth2TokenResult = await intlayerAPI.auth.getOAuth2AccessToken();\n\n const oAuth2AccessToken = oAuth2TokenResult.data?.accessToken;\n\n // Get the list of dictionary keys\n const getDictionariesKeysResult =\n await intlayerAPI.dictionary.getDictionariesKeys({\n ...(oAuth2AccessToken && {\n headers: {\n Authorization: `Bearer ${oAuth2AccessToken}`,\n },\n }),\n });\n\n if (!getDictionariesKeysResult.data) {\n throw new Error('No distant dictionaries found');\n }\n\n const distantDictionariesKeys: string[] = getDictionariesKeysResult.data;\n\n // Apply any filtering if needed\n return distantDictionariesKeys;\n};\n"],"mappings":"AAAA,SAAS,sBAAsB;AAC/B,SAAS,wBAA6C;AAE/C,MAAM,6BAA6B,OACxC,gBAAgC,iBAAiB,MAC3B;AACtB,QAAM,EAAE,UAAU,aAAa,IAAI,cAAc;AAEjD,MAAI,CAAC,YAAY,CAAC,cAAc;AAC9B,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,cAAc,eAAe,QAAW,aAAa;AAE3D,QAAM,oBAAoB,MAAM,YAAY,KAAK,qBAAqB;AAEtE,QAAM,oBAAoB,kBAAkB,MAAM;AAGlD,QAAM,4BACJ,MAAM,YAAY,WAAW,oBAAoB;AAAA,IAC/C,GAAI,qBAAqB;AAAA,MACvB,SAAS;AAAA,QACP,eAAe,UAAU,iBAAiB;AAAA,MAC5C;AAAA,IACF;AAAA,EACF,CAAC;AAEH,MAAI,CAAC,0BAA0B,MAAM;AACnC,UAAM,IAAI,MAAM,+BAA+B;AAAA,EACjD;AAEA,QAAM,0BAAoC,0BAA0B;AAGpE,SAAO;AACT;","names":[]}
1
+ {"version":3,"sources":["../../src/fetchDistantDictionaryKeys.ts"],"sourcesContent":["import { getIntlayerAPI } from '@intlayer/api';\nimport { getConfiguration, type IntlayerConfig } from '@intlayer/config';\n\nexport const fetchDistantDictionaryKeys = async (\n configuration: IntlayerConfig = getConfiguration()\n): Promise<string[]> => {\n const { clientId, clientSecret } = configuration.editor;\n\n if (!clientId || !clientSecret) {\n throw new Error(\n 'Missing OAuth2 client ID or client secret. To get access token go to https://intlayer.org/dashboard/project.'\n );\n }\n\n const intlayerAPI = getIntlayerAPI(undefined, configuration);\n\n const oAuth2TokenResult = await intlayerAPI.oAuth.getOAuth2AccessToken();\n\n const oAuth2AccessToken = oAuth2TokenResult.data?.accessToken;\n\n // Get the list of dictionary keys\n const getDictionariesKeysResult =\n await intlayerAPI.dictionary.getDictionariesKeys({\n ...(oAuth2AccessToken && {\n headers: {\n Authorization: `Bearer ${oAuth2AccessToken}`,\n },\n }),\n });\n\n if (!getDictionariesKeysResult.data) {\n throw new Error('No distant dictionaries found');\n }\n\n const distantDictionariesKeys: string[] = getDictionariesKeysResult.data;\n\n // Apply any filtering if needed\n return distantDictionariesKeys;\n};\n"],"mappings":"AAAA,SAAS,sBAAsB;AAC/B,SAAS,wBAA6C;AAE/C,MAAM,6BAA6B,OACxC,gBAAgC,iBAAiB,MAC3B;AACtB,QAAM,EAAE,UAAU,aAAa,IAAI,cAAc;AAEjD,MAAI,CAAC,YAAY,CAAC,cAAc;AAC9B,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,cAAc,eAAe,QAAW,aAAa;AAE3D,QAAM,oBAAoB,MAAM,YAAY,MAAM,qBAAqB;AAEvE,QAAM,oBAAoB,kBAAkB,MAAM;AAGlD,QAAM,4BACJ,MAAM,YAAY,WAAW,oBAAoB;AAAA,IAC/C,GAAI,qBAAqB;AAAA,MACvB,SAAS;AAAA,QACP,eAAe,UAAU,iBAAiB;AAAA,MAC5C;AAAA,IACF;AAAA,EACF,CAAC;AAEH,MAAI,CAAC,0BAA0B,MAAM;AACnC,UAAM,IAAI,MAAM,+BAA+B;AAAA,EACjD;AAEA,QAAM,0BAAoC,0BAA0B;AAGpE,SAAO;AACT;","names":[]}
@@ -1,7 +1,7 @@
1
1
  import { readFile } from "fs/promises";
2
2
  import { join } from "path";
3
3
  import { fileURLToPath } from "url";
4
- import { kebabCaseToCamelCase } from "../utils.mjs";
4
+ import { kebabCaseToCamelCase } from "../utils/kebabCaseToCamelCase.mjs";
5
5
  const getContentDeclarationFileTemplate = async (key, format, fileParams = {}) => {
6
6
  const dirname = __dirname ?? fileURLToPath(import.meta.url);
7
7
  let fileTemplate = "./esmTemplate.md";
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.ts"],"sourcesContent":["import { readFile } from 'fs/promises';\nimport { join } from 'path';\nimport { fileURLToPath } from 'url';\nimport { kebabCaseToCamelCase } from '../utils';\n\nexport const getContentDeclarationFileTemplate = async (\n key: string,\n format: 'ts' | 'cjs' | 'esm',\n fileParams: Record<string, any> = {}\n) => {\n const dirname = __dirname ?? fileURLToPath(import.meta.url);\n\n let fileTemplate = './esmTemplate.md';\n\n if (format === 'ts') {\n fileTemplate = './tsTemplate.md';\n } else if (format === 'cjs') {\n fileTemplate = './cjsTemplate.md';\n }\n\n const fileContent = await readFile(join(dirname, fileTemplate), 'utf-8');\n const camelCaseKey = kebabCaseToCamelCase(key);\n const nonCapitalizedCamelCaseKey =\n camelCaseKey.charAt(0).toLowerCase() + camelCaseKey.slice(1);\n\n const fileParmsString = Object.entries(fileParams)\n .filter(([, value]) => value !== undefined)\n .map(([key, value]) => {\n if (typeof value === 'object') {\n return `\\n${key}: ${JSON.stringify(value)},`;\n }\n\n if (typeof value === 'boolean' || typeof value === 'number') {\n return `\\n${key}: ${value},`;\n }\n\n if (typeof value === 'string') {\n return `\\n${key}: '${value}',`;\n }\n\n return `\\n${key}: ${value},`;\n })\n .join('');\n\n return fileContent\n .replace('{{key}}', key)\n .replaceAll('{{name}}', nonCapitalizedCamelCaseKey)\n .replace('{{fileParams}}', fileParmsString);\n};\n"],"mappings":"AAAA,SAAS,gBAAgB;AACzB,SAAS,YAAY;AACrB,SAAS,qBAAqB;AAC9B,SAAS,4BAA4B;AAE9B,MAAM,oCAAoC,OAC/C,KACA,QACA,aAAkC,CAAC,MAChC;AACH,QAAM,UAAU,aAAa,cAAc,YAAY,GAAG;AAE1D,MAAI,eAAe;AAEnB,MAAI,WAAW,MAAM;AACnB,mBAAe;AAAA,EACjB,WAAW,WAAW,OAAO;AAC3B,mBAAe;AAAA,EACjB;AAEA,QAAM,cAAc,MAAM,SAAS,KAAK,SAAS,YAAY,GAAG,OAAO;AACvE,QAAM,eAAe,qBAAqB,GAAG;AAC7C,QAAM,6BACJ,aAAa,OAAO,CAAC,EAAE,YAAY,IAAI,aAAa,MAAM,CAAC;AAE7D,QAAM,kBAAkB,OAAO,QAAQ,UAAU,EAC9C,OAAO,CAAC,CAAC,EAAE,KAAK,MAAM,UAAU,MAAS,EACzC,IAAI,CAAC,CAACA,MAAK,KAAK,MAAM;AACrB,QAAI,OAAO,UAAU,UAAU;AAC7B,aAAO;AAAA,EAAKA,IAAG,KAAK,KAAK,UAAU,KAAK,CAAC;AAAA,IAC3C;AAEA,QAAI,OAAO,UAAU,aAAa,OAAO,UAAU,UAAU;AAC3D,aAAO;AAAA,EAAKA,IAAG,KAAK,KAAK;AAAA,IAC3B;AAEA,QAAI,OAAO,UAAU,UAAU;AAC7B,aAAO;AAAA,EAAKA,IAAG,MAAM,KAAK;AAAA,IAC5B;AAEA,WAAO;AAAA,EAAKA,IAAG,KAAK,KAAK;AAAA,EAC3B,CAAC,EACA,KAAK,EAAE;AAEV,SAAO,YACJ,QAAQ,WAAW,GAAG,EACtB,WAAW,YAAY,0BAA0B,EACjD,QAAQ,kBAAkB,eAAe;AAC9C;","names":["key"]}
1
+ {"version":3,"sources":["../../../src/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.ts"],"sourcesContent":["import { readFile } from 'fs/promises';\nimport { join } from 'path';\nimport { fileURLToPath } from 'url';\nimport { kebabCaseToCamelCase } from '../utils/kebabCaseToCamelCase';\n\nexport const getContentDeclarationFileTemplate = async (\n key: string,\n format: 'ts' | 'cjs' | 'esm',\n fileParams: Record<string, any> = {}\n) => {\n const dirname = __dirname ?? fileURLToPath(import.meta.url);\n\n let fileTemplate = './esmTemplate.md';\n\n if (format === 'ts') {\n fileTemplate = './tsTemplate.md';\n } else if (format === 'cjs') {\n fileTemplate = './cjsTemplate.md';\n }\n\n const fileContent = await readFile(join(dirname, fileTemplate), 'utf-8');\n const camelCaseKey = kebabCaseToCamelCase(key);\n const nonCapitalizedCamelCaseKey =\n camelCaseKey.charAt(0).toLowerCase() + camelCaseKey.slice(1);\n\n const fileParmsString = Object.entries(fileParams)\n .filter(([, value]) => value !== undefined)\n .map(([key, value]) => {\n if (typeof value === 'object') {\n return `\\n${key}: ${JSON.stringify(value)},`;\n }\n\n if (typeof value === 'boolean' || typeof value === 'number') {\n return `\\n${key}: ${value},`;\n }\n\n if (typeof value === 'string') {\n return `\\n${key}: '${value}',`;\n }\n\n return `\\n${key}: ${value},`;\n })\n .join('');\n\n return fileContent\n .replace('{{key}}', key)\n .replaceAll('{{name}}', nonCapitalizedCamelCaseKey)\n .replace('{{fileParams}}', fileParmsString);\n};\n"],"mappings":"AAAA,SAAS,gBAAgB;AACzB,SAAS,YAAY;AACrB,SAAS,qBAAqB;AAC9B,SAAS,4BAA4B;AAE9B,MAAM,oCAAoC,OAC/C,KACA,QACA,aAAkC,CAAC,MAChC;AACH,QAAM,UAAU,aAAa,cAAc,YAAY,GAAG;AAE1D,MAAI,eAAe;AAEnB,MAAI,WAAW,MAAM;AACnB,mBAAe;AAAA,EACjB,WAAW,WAAW,OAAO;AAC3B,mBAAe;AAAA,EACjB;AAEA,QAAM,cAAc,MAAM,SAAS,KAAK,SAAS,YAAY,GAAG,OAAO;AACvE,QAAM,eAAe,qBAAqB,GAAG;AAC7C,QAAM,6BACJ,aAAa,OAAO,CAAC,EAAE,YAAY,IAAI,aAAa,MAAM,CAAC;AAE7D,QAAM,kBAAkB,OAAO,QAAQ,UAAU,EAC9C,OAAO,CAAC,CAAC,EAAE,KAAK,MAAM,UAAU,MAAS,EACzC,IAAI,CAAC,CAACA,MAAK,KAAK,MAAM;AACrB,QAAI,OAAO,UAAU,UAAU;AAC7B,aAAO;AAAA,EAAKA,IAAG,KAAK,KAAK,UAAU,KAAK,CAAC;AAAA,IAC3C;AAEA,QAAI,OAAO,UAAU,aAAa,OAAO,UAAU,UAAU;AAC3D,aAAO;AAAA,EAAKA,IAAG,KAAK,KAAK;AAAA,IAC3B;AAEA,QAAI,OAAO,UAAU,UAAU;AAC7B,aAAO;AAAA,EAAKA,IAAG,MAAM,KAAK;AAAA,IAC5B;AAEA,WAAO;AAAA,EAAKA,IAAG,KAAK,KAAK;AAAA,EAC3B,CAAC,EACA,KAAK,EAAE;AAEV,SAAO,YACJ,QAAQ,WAAW,GAAG,EACtB,WAAW,YAAY,0BAA0B,EACjD,QAAQ,kBAAkB,eAAe;AAC9C;","names":["key"]}
@@ -13,7 +13,8 @@ import { getBuiltUnmergedDictionariesPath } from "./getBuiltUnmergedDictionaries
13
13
  import { getFilteredLocalesContent } from "./getFilteredLocalesContent.mjs";
14
14
  import { listDictionaries } from "./listDictionariesPath.mjs";
15
15
  import {
16
- listGitFiles
16
+ listGitFiles,
17
+ listGitLines
17
18
  } from "./listGitFiles.mjs";
18
19
  import {
19
20
  loadDictionaries,
@@ -31,7 +32,11 @@ import {
31
32
  generateDictionaryListContent
32
33
  } from "./transpiler/dictionary_to_main/index.mjs";
33
34
  import { createModuleAugmentation } from "./transpiler/dictionary_to_type/createModuleAugmentation.mjs";
34
- import { getFileHash } from "./utils.mjs";
35
+ import { getFileHash } from "./utils/getFileHash.mjs";
36
+ import { kebabCaseToCamelCase } from "./utils/kebabCaseToCamelCase.mjs";
37
+ import { resolveObjectPromises } from "./utils/resolveObjectPromises.mjs";
38
+ import { runOnce } from "./utils/runOnce.mjs";
39
+ import { sortAlphabetically } from "./utils/sortAlphabetically.mjs";
35
40
  import {
36
41
  writeContentDeclaration
37
42
  } from "./writeContentDeclaration/index.mjs";
@@ -51,8 +56,10 @@ export {
51
56
  getFilteredLocalesContent,
52
57
  handleAdditionalContentDeclarationFile,
53
58
  handleContentDeclarationFileChange,
59
+ kebabCaseToCamelCase,
54
60
  listDictionaries,
55
61
  listGitFiles,
62
+ listGitLines,
56
63
  loadDictionaries,
57
64
  loadDistantDictionaries,
58
65
  loadLocalDictionaries,
@@ -61,6 +68,9 @@ export {
61
68
  prepareIntlayer,
62
69
  processPerLocaleDictionary,
63
70
  reduceDictionaryContent,
71
+ resolveObjectPromises,
72
+ runOnce,
73
+ sortAlphabetically,
64
74
  watch,
65
75
  writeContentDeclaration
66
76
  };
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/index.ts"],"sourcesContent":["export { checkDictionaryChanges } from './checkDictionaryChanges';\nexport {\n buildAndWatchIntlayer,\n handleAdditionalContentDeclarationFile,\n handleContentDeclarationFileChange,\n watch,\n} from './chokidar/watcher';\nexport { cleanOutputDir } from './cleanOutputDir';\nexport { fetchDistantDictionaries } from './fetchDistantDictionaries';\nexport { fetchDistantDictionaryKeys } from './fetchDistantDictionaryKeys';\nexport { getBuiltDictionariesPath } from './getBuiltDictionariesPath';\nexport { getBuiltUnmergedDictionariesPath } from './getBuiltUnmergedDictionariesPath';\nexport { getFilteredLocalesContent } from './getFilteredLocalesContent';\nexport { listDictionaries } from './listDictionariesPath';\nexport {\n listGitFiles,\n type DiffMode,\n type ListGitFilesOptions,\n} from './listGitFiles';\nexport {\n loadDictionaries,\n loadDistantDictionaries,\n loadLocalDictionaries,\n} from './loadDictionaries/index';\nexport { mergeDictionaries } from './mergeDictionaries';\nexport { prepareContentDeclaration } from './prepareContentDeclaration';\nexport { prepareIntlayer } from './prepareIntlayer';\nexport { processPerLocaleDictionary } from './processPerLocaleDictionary';\nexport { reduceDictionaryContent } from './reduceDictionaryContent/reduceDictionaryContent';\nexport { buildDictionary } from './transpiler/declaration_file_to_dictionary/index';\nexport {\n createDictionaryEntryPoint,\n generateDictionaryListContent,\n} from './transpiler/dictionary_to_main';\nexport { createModuleAugmentation } from './transpiler/dictionary_to_type/createModuleAugmentation';\nexport { getFileHash } from './utils';\nexport {\n writeContentDeclaration,\n type DictionaryStatus,\n} from './writeContentDeclaration';\n"],"mappings":"AAAA,SAAS,8BAA8B;AACvC;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,sBAAsB;AAC/B,SAAS,gCAAgC;AACzC,SAAS,kCAAkC;AAC3C,SAAS,gCAAgC;AACzC,SAAS,wCAAwC;AACjD,SAAS,iCAAiC;AAC1C,SAAS,wBAAwB;AACjC;AAAA,EACE;AAAA,OAGK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,yBAAyB;AAClC,SAAS,iCAAiC;AAC1C,SAAS,uBAAuB;AAChC,SAAS,kCAAkC;AAC3C,SAAS,+BAA+B;AACxC,SAAS,uBAAuB;AAChC;AAAA,EACE;AAAA,EACA;AAAA,OACK;AACP,SAAS,gCAAgC;AACzC,SAAS,mBAAmB;AAC5B;AAAA,EACE;AAAA,OAEK;","names":[]}
1
+ {"version":3,"sources":["../../src/index.ts"],"sourcesContent":["export { checkDictionaryChanges } from './checkDictionaryChanges';\nexport {\n buildAndWatchIntlayer,\n handleAdditionalContentDeclarationFile,\n handleContentDeclarationFileChange,\n watch,\n} from './chokidar/watcher';\nexport { cleanOutputDir } from './cleanOutputDir';\nexport { fetchDistantDictionaries } from './fetchDistantDictionaries';\nexport { fetchDistantDictionaryKeys } from './fetchDistantDictionaryKeys';\nexport { getBuiltDictionariesPath } from './getBuiltDictionariesPath';\nexport { getBuiltUnmergedDictionariesPath } from './getBuiltUnmergedDictionariesPath';\nexport { getFilteredLocalesContent } from './getFilteredLocalesContent';\nexport { listDictionaries } from './listDictionariesPath';\nexport {\n listGitFiles,\n listGitLines,\n type DiffMode,\n type ListGitFilesOptions,\n type ListGitLinesOptions,\n} from './listGitFiles';\nexport {\n loadDictionaries,\n loadDistantDictionaries,\n loadLocalDictionaries,\n} from './loadDictionaries/index';\nexport { mergeDictionaries } from './mergeDictionaries';\nexport { prepareContentDeclaration } from './prepareContentDeclaration';\nexport { prepareIntlayer } from './prepareIntlayer';\nexport { processPerLocaleDictionary } from './processPerLocaleDictionary';\nexport { reduceDictionaryContent } from './reduceDictionaryContent/reduceDictionaryContent';\nexport { buildDictionary } from './transpiler/declaration_file_to_dictionary/index';\nexport {\n createDictionaryEntryPoint,\n generateDictionaryListContent,\n} from './transpiler/dictionary_to_main';\nexport { createModuleAugmentation } from './transpiler/dictionary_to_type/createModuleAugmentation';\nexport { getFileHash } from './utils/getFileHash';\nexport { kebabCaseToCamelCase } from './utils/kebabCaseToCamelCase';\nexport { resolveObjectPromises } from './utils/resolveObjectPromises';\nexport { runOnce } from './utils/runOnce';\nexport { sortAlphabetically } from './utils/sortAlphabetically';\nexport {\n writeContentDeclaration,\n type DictionaryStatus,\n} from './writeContentDeclaration';\n"],"mappings":"AAAA,SAAS,8BAA8B;AACvC;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,sBAAsB;AAC/B,SAAS,gCAAgC;AACzC,SAAS,kCAAkC;AAC3C,SAAS,gCAAgC;AACzC,SAAS,wCAAwC;AACjD,SAAS,iCAAiC;AAC1C,SAAS,wBAAwB;AACjC;AAAA,EACE;AAAA,EACA;AAAA,OAIK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,yBAAyB;AAClC,SAAS,iCAAiC;AAC1C,SAAS,uBAAuB;AAChC,SAAS,kCAAkC;AAC3C,SAAS,+BAA+B;AACxC,SAAS,uBAAuB;AAChC;AAAA,EACE;AAAA,EACA;AAAA,OACK;AACP,SAAS,gCAAgC;AACzC,SAAS,mBAAmB;AAC5B,SAAS,4BAA4B;AACrC,SAAS,6BAA6B;AACtC,SAAS,eAAe;AACxB,SAAS,0BAA0B;AACnC;AAAA,EACE;AAAA,OAEK;","names":[]}
@@ -1,5 +1,6 @@
1
1
  import { getAppLogger } from "@intlayer/config";
2
2
  import configuration from "@intlayer/config/built";
3
+ import { readFileSync } from "fs";
3
4
  import { join } from "path";
4
5
  import simpleGit from "simple-git";
5
6
  const getGitRootDir = async () => {
@@ -60,7 +61,72 @@ const listGitFiles = async ({
60
61
  console.warn("Failed to get changes list:", error);
61
62
  }
62
63
  };
64
+ const listGitLines = async (filePath, {
65
+ mode,
66
+ baseRef = "origin/main",
67
+ currentRef = "HEAD"
68
+ // HEAD points to the current branch's latest commit
69
+ }) => {
70
+ const git = simpleGit();
71
+ const changedLines = /* @__PURE__ */ new Set();
72
+ const collectLinesFromDiff = (diffOutput) => {
73
+ const hunkRegex = /@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@/g;
74
+ let match;
75
+ while ((match = hunkRegex.exec(diffOutput)) !== null) {
76
+ const oldCount = match[2] ? Number(match[2]) : 1;
77
+ const newStart = Number(match[3]);
78
+ const newCount = match[4] ? Number(match[4]) : 1;
79
+ if (newCount > 0) {
80
+ for (let i = 0; i < newCount; i++) {
81
+ changedLines.add(newStart + i);
82
+ }
83
+ }
84
+ if (oldCount > 0 && newCount === 0) {
85
+ if (newStart > 1) {
86
+ changedLines.add(newStart - 1);
87
+ }
88
+ changedLines.add(newStart);
89
+ }
90
+ }
91
+ };
92
+ if (mode.includes("untracked")) {
93
+ const status = await git.status();
94
+ const isUntracked = status.not_added.includes(filePath);
95
+ if (isUntracked) {
96
+ try {
97
+ const content = readFileSync(filePath, "utf-8");
98
+ content.split("\n").forEach((_, idx) => changedLines.add(idx + 1));
99
+ } catch {
100
+ }
101
+ }
102
+ }
103
+ if (mode.includes("uncommitted")) {
104
+ const diffOutput = await git.diff(["--unified=0", "HEAD", "--", filePath]);
105
+ collectLinesFromDiff(diffOutput);
106
+ }
107
+ if (mode.includes("unpushed")) {
108
+ const diffOutput = await git.diff([
109
+ "--unified=0",
110
+ "@{push}...HEAD",
111
+ "--",
112
+ filePath
113
+ ]);
114
+ collectLinesFromDiff(diffOutput);
115
+ }
116
+ if (mode.includes("gitDiff")) {
117
+ await git.fetch(baseRef);
118
+ const diffOutput = await git.diff([
119
+ "--unified=0",
120
+ `${baseRef}...${currentRef}`,
121
+ "--",
122
+ filePath
123
+ ]);
124
+ collectLinesFromDiff(diffOutput);
125
+ }
126
+ return Array.from(changedLines).sort((a, b) => a - b);
127
+ };
63
128
  export {
64
- listGitFiles
129
+ listGitFiles,
130
+ listGitLines
65
131
  };
66
132
  //# sourceMappingURL=listGitFiles.mjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/listGitFiles.ts"],"sourcesContent":["import { getAppLogger } from '@intlayer/config';\nimport configuration from '@intlayer/config/built';\nimport { join } from 'path';\nimport simpleGit from 'simple-git';\n\nexport type DiffMode = 'gitDiff' | 'uncommitted' | 'unpushed' | 'untracked';\n\nconst getGitRootDir = async (): Promise<string | null> => {\n try {\n const git = simpleGit();\n const rootDir = await git.revparse(['--show-toplevel']);\n return rootDir.trim();\n } catch (error) {\n const appLogger = getAppLogger(configuration);\n appLogger('Error getting git root directory:' + error, {\n level: 'error',\n });\n return null;\n }\n};\n\nexport type ListGitFilesOptions = {\n mode: DiffMode[];\n baseRef?: string;\n currentRef?: string;\n absolute?: boolean;\n};\n\nexport const listGitFiles = async ({\n mode,\n baseRef = 'origin/main',\n currentRef = 'HEAD', // HEAD points to the current branch's latest commit\n absolute = true,\n}: ListGitFilesOptions) => {\n try {\n const git = simpleGit();\n const diff: Set<string> = new Set();\n\n if (mode.includes('untracked')) {\n const status = await git.status();\n status.not_added.forEach((f) => diff.add(f));\n }\n\n if (mode.includes('uncommitted')) {\n // Get uncommitted changes\n const uncommittedDiff = await git.diff(['--name-only', 'HEAD']);\n\n const uncommittedFiles = uncommittedDiff.split('\\n').filter(Boolean);\n\n uncommittedFiles.forEach((file) => diff.add(file));\n }\n\n if (mode.includes('unpushed')) {\n // Get unpushed commits\n const unpushedDiff = await git.diff(['--name-only', '@{push}...HEAD']);\n\n const unpushedFiles = unpushedDiff.split('\\n').filter(Boolean);\n\n unpushedFiles.forEach((file) => diff.add(file));\n }\n\n if (mode.includes('gitDiff')) {\n // Get the base branch (usually main/master) from CI environment\n\n await git.fetch(baseRef);\n\n const diffBranch = await git.diff([\n '--name-only',\n `${baseRef}...${currentRef}`,\n ]);\n\n const gitDiffFiles = diffBranch.split('\\n').filter(Boolean);\n\n gitDiffFiles.forEach((file) => diff.add(file));\n }\n\n if (absolute) {\n const gitRootDir = await getGitRootDir();\n if (!gitRootDir) {\n return [];\n }\n return Array.from(diff).map((file) => join(gitRootDir, file));\n }\n\n return Array.from(diff);\n } catch (error) {\n console.warn('Failed to get changes list:', error);\n }\n};\n"],"mappings":"AAAA,SAAS,oBAAoB;AAC7B,OAAO,mBAAmB;AAC1B,SAAS,YAAY;AACrB,OAAO,eAAe;AAItB,MAAM,gBAAgB,YAAoC;AACxD,MAAI;AACF,UAAM,MAAM,UAAU;AACtB,UAAM,UAAU,MAAM,IAAI,SAAS,CAAC,iBAAiB,CAAC;AACtD,WAAO,QAAQ,KAAK;AAAA,EACtB,SAAS,OAAO;AACd,UAAM,YAAY,aAAa,aAAa;AAC5C,cAAU,sCAAsC,OAAO;AAAA,MACrD,OAAO;AAAA,IACT,CAAC;AACD,WAAO;AAAA,EACT;AACF;AASO,MAAM,eAAe,OAAO;AAAA,EACjC;AAAA,EACA,UAAU;AAAA,EACV,aAAa;AAAA;AAAA,EACb,WAAW;AACb,MAA2B;AACzB,MAAI;AACF,UAAM,MAAM,UAAU;AACtB,UAAM,OAAoB,oBAAI,IAAI;AAElC,QAAI,KAAK,SAAS,WAAW,GAAG;AAC9B,YAAM,SAAS,MAAM,IAAI,OAAO;AAChC,aAAO,UAAU,QAAQ,CAAC,MAAM,KAAK,IAAI,CAAC,CAAC;AAAA,IAC7C;AAEA,QAAI,KAAK,SAAS,aAAa,GAAG;AAEhC,YAAM,kBAAkB,MAAM,IAAI,KAAK,CAAC,eAAe,MAAM,CAAC;AAE9D,YAAM,mBAAmB,gBAAgB,MAAM,IAAI,EAAE,OAAO,OAAO;AAEnE,uBAAiB,QAAQ,CAAC,SAAS,KAAK,IAAI,IAAI,CAAC;AAAA,IACnD;AAEA,QAAI,KAAK,SAAS,UAAU,GAAG;AAE7B,YAAM,eAAe,MAAM,IAAI,KAAK,CAAC,eAAe,gBAAgB,CAAC;AAErE,YAAM,gBAAgB,aAAa,MAAM,IAAI,EAAE,OAAO,OAAO;AAE7D,oBAAc,QAAQ,CAAC,SAAS,KAAK,IAAI,IAAI,CAAC;AAAA,IAChD;AAEA,QAAI,KAAK,SAAS,SAAS,GAAG;AAG5B,YAAM,IAAI,MAAM,OAAO;AAEvB,YAAM,aAAa,MAAM,IAAI,KAAK;AAAA,QAChC;AAAA,QACA,GAAG,OAAO,MAAM,UAAU;AAAA,MAC5B,CAAC;AAED,YAAM,eAAe,WAAW,MAAM,IAAI,EAAE,OAAO,OAAO;AAE1D,mBAAa,QAAQ,CAAC,SAAS,KAAK,IAAI,IAAI,CAAC;AAAA,IAC/C;AAEA,QAAI,UAAU;AACZ,YAAM,aAAa,MAAM,cAAc;AACvC,UAAI,CAAC,YAAY;AACf,eAAO,CAAC;AAAA,MACV;AACA,aAAO,MAAM,KAAK,IAAI,EAAE,IAAI,CAAC,SAAS,KAAK,YAAY,IAAI,CAAC;AAAA,IAC9D;AAEA,WAAO,MAAM,KAAK,IAAI;AAAA,EACxB,SAAS,OAAO;AACd,YAAQ,KAAK,+BAA+B,KAAK;AAAA,EACnD;AACF;","names":[]}
1
+ {"version":3,"sources":["../../src/listGitFiles.ts"],"sourcesContent":["import { getAppLogger } from '@intlayer/config';\nimport configuration from '@intlayer/config/built';\nimport { readFileSync } from 'fs';\nimport { join } from 'path';\nimport simpleGit from 'simple-git';\n\nexport type DiffMode = 'gitDiff' | 'uncommitted' | 'unpushed' | 'untracked';\n\nconst getGitRootDir = async (): Promise<string | null> => {\n try {\n const git = simpleGit();\n const rootDir = await git.revparse(['--show-toplevel']);\n return rootDir.trim();\n } catch (error) {\n const appLogger = getAppLogger(configuration);\n appLogger('Error getting git root directory:' + error, {\n level: 'error',\n });\n return null;\n }\n};\n\nexport type ListGitFilesOptions = {\n mode: DiffMode[];\n baseRef?: string;\n currentRef?: string;\n absolute?: boolean;\n};\n\nexport const listGitFiles = async ({\n mode,\n baseRef = 'origin/main',\n currentRef = 'HEAD', // HEAD points to the current branch's latest commit\n absolute = true,\n}: ListGitFilesOptions) => {\n try {\n const git = simpleGit();\n const diff: Set<string> = new Set();\n\n if (mode.includes('untracked')) {\n const status = await git.status();\n status.not_added.forEach((f) => diff.add(f));\n }\n\n if (mode.includes('uncommitted')) {\n // Get uncommitted changes\n const uncommittedDiff = await git.diff(['--name-only', 'HEAD']);\n\n const uncommittedFiles = uncommittedDiff.split('\\n').filter(Boolean);\n\n uncommittedFiles.forEach((file) => diff.add(file));\n }\n\n if (mode.includes('unpushed')) {\n // Get unpushed commits\n const unpushedDiff = await git.diff(['--name-only', '@{push}...HEAD']);\n\n const unpushedFiles = unpushedDiff.split('\\n').filter(Boolean);\n\n unpushedFiles.forEach((file) => diff.add(file));\n }\n\n if (mode.includes('gitDiff')) {\n // Get the base branch (usually main/master) from CI environment\n\n await git.fetch(baseRef);\n\n const diffBranch = await git.diff([\n '--name-only',\n `${baseRef}...${currentRef}`,\n ]);\n\n const gitDiffFiles = diffBranch.split('\\n').filter(Boolean);\n\n gitDiffFiles.forEach((file) => diff.add(file));\n }\n\n if (absolute) {\n const gitRootDir = await getGitRootDir();\n if (!gitRootDir) {\n return [];\n }\n return Array.from(diff).map((file) => join(gitRootDir, file));\n }\n\n return Array.from(diff);\n } catch (error) {\n console.warn('Failed to get changes list:', error);\n }\n};\n\nexport type ListGitLinesOptions = {\n mode: DiffMode[];\n baseRef?: string;\n currentRef?: string;\n};\n\nexport const listGitLines = async (\n filePath: string,\n {\n mode,\n baseRef = 'origin/main',\n currentRef = 'HEAD', // HEAD points to the current branch's latest commit\n }: ListGitLinesOptions\n): Promise<number[]> => {\n const git = simpleGit();\n // We collect **line numbers** (1-based) that were modified/added by the diff.\n // Using a Set ensures uniqueness when the same line is reported by several modes.\n const changedLines: Set<number> = new Set();\n\n /**\n * Extracts line numbers from a diff generated with `--unified=0`.\n * Each hunk header looks like: @@ -<oldStart>,<oldCount> +<newStart>,<newCount> @@\n * We consider both the \"+\" (new) side for additions and the \"-\" (old) side for deletions.\n * For deletions, we add the line before and after the deletion point in the current file.\n */\n const collectLinesFromDiff = (diffOutput: string) => {\n const hunkRegex = /@@ -(\\d+)(?:,(\\d+))? \\+(\\d+)(?:,(\\d+))? @@/g;\n let match: RegExpExecArray | null;\n\n while ((match = hunkRegex.exec(diffOutput)) !== null) {\n const oldCount = match[2] ? Number(match[2]) : 1;\n const newStart = Number(match[3]);\n const newCount = match[4] ? Number(match[4]) : 1;\n\n // Handle additions/modifications (+ side)\n if (newCount > 0) {\n for (let i = 0; i < newCount; i++) {\n changedLines.add(newStart + i);\n }\n }\n\n // Handle deletions (- side)\n if (oldCount > 0 && newCount === 0) {\n // For deletions, add the line before and after the deletion point\n // The deletion point in the new file is at newStart\n if (newStart > 1) {\n changedLines.add(newStart - 1); // Line before deletion\n }\n changedLines.add(newStart); // Line after deletion (if it exists)\n }\n }\n };\n\n // 1. Handle untracked files – when a file is untracked its entire content is new.\n if (mode.includes('untracked')) {\n const status = await git.status();\n const isUntracked = status.not_added.includes(filePath);\n if (isUntracked) {\n try {\n const content = readFileSync(filePath, 'utf-8');\n content.split('\\n').forEach((_, idx) => changedLines.add(idx + 1));\n } catch {\n // ignore read errors – file may have been deleted, etc.\n }\n }\n }\n\n // 2. Uncommitted changes (working tree vs HEAD)\n if (mode.includes('uncommitted')) {\n const diffOutput = await git.diff(['--unified=0', 'HEAD', '--', filePath]);\n collectLinesFromDiff(diffOutput);\n }\n\n // 3. Unpushed commits – compare local branch to its upstream\n if (mode.includes('unpushed')) {\n const diffOutput = await git.diff([\n '--unified=0',\n '@{push}...HEAD',\n '--',\n filePath,\n ]);\n collectLinesFromDiff(diffOutput);\n }\n\n // 4. Regular git diff between baseRef and currentRef (e.g., CI pull-request diff)\n if (mode.includes('gitDiff')) {\n await git.fetch(baseRef);\n const diffOutput = await git.diff([\n '--unified=0',\n `${baseRef}...${currentRef}`,\n '--',\n filePath,\n ]);\n collectLinesFromDiff(diffOutput);\n }\n\n // Return the list sorted for convenience\n return Array.from(changedLines).sort((a, b) => a - b);\n};\n"],"mappings":"AAAA,SAAS,oBAAoB;AAC7B,OAAO,mBAAmB;AAC1B,SAAS,oBAAoB;AAC7B,SAAS,YAAY;AACrB,OAAO,eAAe;AAItB,MAAM,gBAAgB,YAAoC;AACxD,MAAI;AACF,UAAM,MAAM,UAAU;AACtB,UAAM,UAAU,MAAM,IAAI,SAAS,CAAC,iBAAiB,CAAC;AACtD,WAAO,QAAQ,KAAK;AAAA,EACtB,SAAS,OAAO;AACd,UAAM,YAAY,aAAa,aAAa;AAC5C,cAAU,sCAAsC,OAAO;AAAA,MACrD,OAAO;AAAA,IACT,CAAC;AACD,WAAO;AAAA,EACT;AACF;AASO,MAAM,eAAe,OAAO;AAAA,EACjC;AAAA,EACA,UAAU;AAAA,EACV,aAAa;AAAA;AAAA,EACb,WAAW;AACb,MAA2B;AACzB,MAAI;AACF,UAAM,MAAM,UAAU;AACtB,UAAM,OAAoB,oBAAI,IAAI;AAElC,QAAI,KAAK,SAAS,WAAW,GAAG;AAC9B,YAAM,SAAS,MAAM,IAAI,OAAO;AAChC,aAAO,UAAU,QAAQ,CAAC,MAAM,KAAK,IAAI,CAAC,CAAC;AAAA,IAC7C;AAEA,QAAI,KAAK,SAAS,aAAa,GAAG;AAEhC,YAAM,kBAAkB,MAAM,IAAI,KAAK,CAAC,eAAe,MAAM,CAAC;AAE9D,YAAM,mBAAmB,gBAAgB,MAAM,IAAI,EAAE,OAAO,OAAO;AAEnE,uBAAiB,QAAQ,CAAC,SAAS,KAAK,IAAI,IAAI,CAAC;AAAA,IACnD;AAEA,QAAI,KAAK,SAAS,UAAU,GAAG;AAE7B,YAAM,eAAe,MAAM,IAAI,KAAK,CAAC,eAAe,gBAAgB,CAAC;AAErE,YAAM,gBAAgB,aAAa,MAAM,IAAI,EAAE,OAAO,OAAO;AAE7D,oBAAc,QAAQ,CAAC,SAAS,KAAK,IAAI,IAAI,CAAC;AAAA,IAChD;AAEA,QAAI,KAAK,SAAS,SAAS,GAAG;AAG5B,YAAM,IAAI,MAAM,OAAO;AAEvB,YAAM,aAAa,MAAM,IAAI,KAAK;AAAA,QAChC;AAAA,QACA,GAAG,OAAO,MAAM,UAAU;AAAA,MAC5B,CAAC;AAED,YAAM,eAAe,WAAW,MAAM,IAAI,EAAE,OAAO,OAAO;AAE1D,mBAAa,QAAQ,CAAC,SAAS,KAAK,IAAI,IAAI,CAAC;AAAA,IAC/C;AAEA,QAAI,UAAU;AACZ,YAAM,aAAa,MAAM,cAAc;AACvC,UAAI,CAAC,YAAY;AACf,eAAO,CAAC;AAAA,MACV;AACA,aAAO,MAAM,KAAK,IAAI,EAAE,IAAI,CAAC,SAAS,KAAK,YAAY,IAAI,CAAC;AAAA,IAC9D;AAEA,WAAO,MAAM,KAAK,IAAI;AAAA,EACxB,SAAS,OAAO;AACd,YAAQ,KAAK,+BAA+B,KAAK;AAAA,EACnD;AACF;AAQO,MAAM,eAAe,OAC1B,UACA;AAAA,EACE;AAAA,EACA,UAAU;AAAA,EACV,aAAa;AAAA;AACf,MACsB;AACtB,QAAM,MAAM,UAAU;AAGtB,QAAM,eAA4B,oBAAI,IAAI;AAQ1C,QAAM,uBAAuB,CAAC,eAAuB;AACnD,UAAM,YAAY;AAClB,QAAI;AAEJ,YAAQ,QAAQ,UAAU,KAAK,UAAU,OAAO,MAAM;AACpD,YAAM,WAAW,MAAM,CAAC,IAAI,OAAO,MAAM,CAAC,CAAC,IAAI;AAC/C,YAAM,WAAW,OAAO,MAAM,CAAC,CAAC;AAChC,YAAM,WAAW,MAAM,CAAC,IAAI,OAAO,MAAM,CAAC,CAAC,IAAI;AAG/C,UAAI,WAAW,GAAG;AAChB,iBAAS,IAAI,GAAG,IAAI,UAAU,KAAK;AACjC,uBAAa,IAAI,WAAW,CAAC;AAAA,QAC/B;AAAA,MACF;AAGA,UAAI,WAAW,KAAK,aAAa,GAAG;AAGlC,YAAI,WAAW,GAAG;AAChB,uBAAa,IAAI,WAAW,CAAC;AAAA,QAC/B;AACA,qBAAa,IAAI,QAAQ;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AAGA,MAAI,KAAK,SAAS,WAAW,GAAG;AAC9B,UAAM,SAAS,MAAM,IAAI,OAAO;AAChC,UAAM,cAAc,OAAO,UAAU,SAAS,QAAQ;AACtD,QAAI,aAAa;AACf,UAAI;AACF,cAAM,UAAU,aAAa,UAAU,OAAO;AAC9C,gBAAQ,MAAM,IAAI,EAAE,QAAQ,CAAC,GAAG,QAAQ,aAAa,IAAI,MAAM,CAAC,CAAC;AAAA,MACnE,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,EACF;AAGA,MAAI,KAAK,SAAS,aAAa,GAAG;AAChC,UAAM,aAAa,MAAM,IAAI,KAAK,CAAC,eAAe,QAAQ,MAAM,QAAQ,CAAC;AACzE,yBAAqB,UAAU;AAAA,EACjC;AAGA,MAAI,KAAK,SAAS,UAAU,GAAG;AAC7B,UAAM,aAAa,MAAM,IAAI,KAAK;AAAA,MAChC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AACD,yBAAqB,UAAU;AAAA,EACjC;AAGA,MAAI,KAAK,SAAS,SAAS,GAAG;AAC5B,UAAM,IAAI,MAAM,OAAO;AACvB,UAAM,aAAa,MAAM,IAAI,KAAK;AAAA,MAChC;AAAA,MACA,GAAG,OAAO,MAAM,UAAU;AAAA,MAC1B;AAAA,MACA;AAAA,IACF,CAAC;AACD,yBAAqB,UAAU;AAAA,EACjC;AAGA,SAAO,MAAM,KAAK,YAAY,EAAE,KAAK,CAAC,GAAG,MAAM,IAAI,CAAC;AACtD;","names":[]}
@@ -5,7 +5,7 @@ import {
5
5
  } from "@intlayer/config";
6
6
  import { fetchDistantDictionaryKeys } from "../fetchDistantDictionaryKeys.mjs";
7
7
  import { logger } from "../log.mjs";
8
- import { sortAlphabetically } from "../utils.mjs";
8
+ import { sortAlphabetically } from "../utils/sortAlphabetically.mjs";
9
9
  import { loadContentDeclarations } from "./loadContentDeclaration.mjs";
10
10
  import { loadDistantDictionaries } from "./loadDistantDictionaries.mjs";
11
11
  const loadDictionaries = async (contentDeclarationsPaths, configuration = getConfiguration(), projectRequire = ESMxCJSRequire) => {
@@ -18,10 +18,23 @@ const loadDictionaries = async (contentDeclarationsPaths, configuration = getCon
18
18
  files,
19
19
  projectRequire
20
20
  );
21
- const localDictionaryKeys = localDictionaries.map((dict) => dict.key).filter(Boolean);
21
+ const filteredLocalDictionaries = localDictionaries.filter((dict) => {
22
+ const hasKey = Boolean(dict.key);
23
+ const hasContent = Boolean(dict.content);
24
+ if (!hasContent) {
25
+ console.error(
26
+ "Content declaration has no exported content",
27
+ dict.filePath
28
+ );
29
+ } else if (!hasKey) {
30
+ console.error("Content declaration has no key", dict.filePath);
31
+ }
32
+ return hasKey && hasContent;
33
+ });
34
+ const localDictionaryKeys = filteredLocalDictionaries.map((dict) => dict.key).filter(Boolean);
22
35
  logger.init(localDictionaryKeys, []);
23
36
  logger.updateStatus(
24
- localDictionaries.map((dict) => ({
37
+ filteredLocalDictionaries.map((dict) => ({
25
38
  dictionaryKey: dict.key,
26
39
  type: "local",
27
40
  status: { status: "built" }
@@ -44,7 +57,7 @@ const loadDictionaries = async (contentDeclarationsPaths, configuration = getCon
44
57
  }
45
58
  }
46
59
  logger.stop();
47
- return [...localDictionaries, ...distantDictionaries];
60
+ return [...filteredLocalDictionaries, ...distantDictionaries];
48
61
  } catch (error) {
49
62
  logger.stop();
50
63
  throw error;
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/loadDictionaries/loadDictionaries.ts"],"sourcesContent":["// @ts-ignore @intlayer/backend is not build yet\nimport type { DictionaryAPI } from '@intlayer/backend';\nimport {\n ESMxCJSRequire,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport type { Dictionary } from '@intlayer/core';\nimport { fetchDistantDictionaryKeys } from '../fetchDistantDictionaryKeys';\nimport { logger } from '../log';\nimport { sortAlphabetically } from '../utils';\nimport { loadContentDeclarations } from './loadContentDeclaration';\nimport { loadDistantDictionaries } from './loadDistantDictionaries';\n\nexport const loadDictionaries = async (\n contentDeclarationsPaths: string[] | string,\n configuration = getConfiguration(),\n projectRequire = ESMxCJSRequire\n): Promise<Dictionary[]> => {\n try {\n const appLogger = getAppLogger(configuration);\n const { editor } = configuration;\n\n appLogger('Dictionaries:', { isVerbose: true });\n\n const files = Array.isArray(contentDeclarationsPaths)\n ? contentDeclarationsPaths\n : [contentDeclarationsPaths];\n\n const localDictionaries: Dictionary[] = await loadContentDeclarations(\n files,\n projectRequire\n );\n const localDictionaryKeys = localDictionaries\n .map((dict) => dict.key)\n .filter(Boolean); // Remove empty or undefined keys\n\n // Initialize the logger with both local and distant dictionaries\n logger.init(localDictionaryKeys, []);\n\n // Update logger statuses for local dictionaries\n logger.updateStatus(\n localDictionaries.map((dict) => ({\n dictionaryKey: dict.key,\n type: 'local',\n status: { status: 'built' },\n }))\n );\n\n let distantDictionaries: DictionaryAPI[] = [];\n let distantDictionaryKeys: string[] = [];\n\n if (editor.clientId && editor.clientSecret) {\n try {\n // Fetch distant dictionary keys\n distantDictionaryKeys = await fetchDistantDictionaryKeys();\n\n const orderedDistantDictionaryKeys =\n distantDictionaryKeys.sort(sortAlphabetically);\n\n // Add distant dictionaries to the logger\n logger.addDictionaryKeys('distant', orderedDistantDictionaryKeys);\n\n // Fetch distant dictionaries\n distantDictionaries = await loadDistantDictionaries({\n dictionaryKeys: orderedDistantDictionaryKeys,\n });\n } catch (_error) {\n appLogger('Error during fetching distant dictionaries', {\n level: 'error',\n });\n }\n }\n\n // Ensure the logger is stopped\n logger.stop();\n\n return [...localDictionaries, ...distantDictionaries];\n } catch (error) {\n // Ensure the logger is stopped\n logger.stop();\n\n throw error; // Re-throw the error after logging\n }\n};\n"],"mappings":"AAEA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AAEP,SAAS,kCAAkC;AAC3C,SAAS,cAAc;AACvB,SAAS,0BAA0B;AACnC,SAAS,+BAA+B;AACxC,SAAS,+BAA+B;AAEjC,MAAM,mBAAmB,OAC9B,0BACA,gBAAgB,iBAAiB,GACjC,iBAAiB,mBACS;AAC1B,MAAI;AACF,UAAM,YAAY,aAAa,aAAa;AAC5C,UAAM,EAAE,OAAO,IAAI;AAEnB,cAAU,iBAAiB,EAAE,WAAW,KAAK,CAAC;AAE9C,UAAM,QAAQ,MAAM,QAAQ,wBAAwB,IAChD,2BACA,CAAC,wBAAwB;AAE7B,UAAM,oBAAkC,MAAM;AAAA,MAC5C;AAAA,MACA;AAAA,IACF;AACA,UAAM,sBAAsB,kBACzB,IAAI,CAAC,SAAS,KAAK,GAAG,EACtB,OAAO,OAAO;AAGjB,WAAO,KAAK,qBAAqB,CAAC,CAAC;AAGnC,WAAO;AAAA,MACL,kBAAkB,IAAI,CAAC,UAAU;AAAA,QAC/B,eAAe,KAAK;AAAA,QACpB,MAAM;AAAA,QACN,QAAQ,EAAE,QAAQ,QAAQ;AAAA,MAC5B,EAAE;AAAA,IACJ;AAEA,QAAI,sBAAuC,CAAC;AAC5C,QAAI,wBAAkC,CAAC;AAEvC,QAAI,OAAO,YAAY,OAAO,cAAc;AAC1C,UAAI;AAEF,gCAAwB,MAAM,2BAA2B;AAEzD,cAAM,+BACJ,sBAAsB,KAAK,kBAAkB;AAG/C,eAAO,kBAAkB,WAAW,4BAA4B;AAGhE,8BAAsB,MAAM,wBAAwB;AAAA,UAClD,gBAAgB;AAAA,QAClB,CAAC;AAAA,MACH,SAAS,QAAQ;AACf,kBAAU,8CAA8C;AAAA,UACtD,OAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,IACF;AAGA,WAAO,KAAK;AAEZ,WAAO,CAAC,GAAG,mBAAmB,GAAG,mBAAmB;AAAA,EACtD,SAAS,OAAO;AAEd,WAAO,KAAK;AAEZ,UAAM;AAAA,EACR;AACF;","names":[]}
1
+ {"version":3,"sources":["../../../src/loadDictionaries/loadDictionaries.ts"],"sourcesContent":["// @ts-ignore @intlayer/backend is not build yet\nimport type { DictionaryAPI } from '@intlayer/backend';\nimport {\n ESMxCJSRequire,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport type { Dictionary } from '@intlayer/core';\nimport { fetchDistantDictionaryKeys } from '../fetchDistantDictionaryKeys';\nimport { logger } from '../log';\nimport { sortAlphabetically } from '../utils/sortAlphabetically';\nimport { loadContentDeclarations } from './loadContentDeclaration';\nimport { loadDistantDictionaries } from './loadDistantDictionaries';\n\nexport const loadDictionaries = async (\n contentDeclarationsPaths: string[] | string,\n configuration = getConfiguration(),\n projectRequire = ESMxCJSRequire\n): Promise<Dictionary[]> => {\n try {\n const appLogger = getAppLogger(configuration);\n const { editor } = configuration;\n\n appLogger('Dictionaries:', { isVerbose: true });\n\n const files = Array.isArray(contentDeclarationsPaths)\n ? contentDeclarationsPaths\n : [contentDeclarationsPaths];\n\n const localDictionaries: Dictionary[] = await loadContentDeclarations(\n files,\n projectRequire\n );\n\n const filteredLocalDictionaries = localDictionaries.filter((dict) => {\n const hasKey = Boolean(dict.key);\n const hasContent = Boolean(dict.content);\n\n if (!hasContent) {\n console.error(\n 'Content declaration has no exported content',\n dict.filePath\n );\n } else if (!hasKey) {\n console.error('Content declaration has no key', dict.filePath);\n }\n\n return hasKey && hasContent;\n });\n\n const localDictionaryKeys = filteredLocalDictionaries\n .map((dict) => dict.key)\n .filter(Boolean); // Remove empty or undefined keys\n\n // Initialize the logger with both local and distant dictionaries\n logger.init(localDictionaryKeys, []);\n\n // Update logger statuses for local dictionaries\n logger.updateStatus(\n filteredLocalDictionaries.map((dict) => ({\n dictionaryKey: dict.key,\n type: 'local',\n status: { status: 'built' },\n }))\n );\n\n let distantDictionaries: DictionaryAPI[] = [];\n let distantDictionaryKeys: string[] = [];\n\n if (editor.clientId && editor.clientSecret) {\n try {\n // Fetch distant dictionary keys\n distantDictionaryKeys = await fetchDistantDictionaryKeys();\n\n const orderedDistantDictionaryKeys =\n distantDictionaryKeys.sort(sortAlphabetically);\n\n // Add distant dictionaries to the logger\n logger.addDictionaryKeys('distant', orderedDistantDictionaryKeys);\n\n // Fetch distant dictionaries\n distantDictionaries = await loadDistantDictionaries({\n dictionaryKeys: orderedDistantDictionaryKeys,\n });\n } catch (_error) {\n appLogger('Error during fetching distant dictionaries', {\n level: 'error',\n });\n }\n }\n\n // Ensure the logger is stopped\n logger.stop();\n\n return [...filteredLocalDictionaries, ...distantDictionaries];\n } catch (error) {\n // Ensure the logger is stopped\n logger.stop();\n\n throw error; // Re-throw the error after logging\n }\n};\n"],"mappings":"AAEA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AAEP,SAAS,kCAAkC;AAC3C,SAAS,cAAc;AACvB,SAAS,0BAA0B;AACnC,SAAS,+BAA+B;AACxC,SAAS,+BAA+B;AAEjC,MAAM,mBAAmB,OAC9B,0BACA,gBAAgB,iBAAiB,GACjC,iBAAiB,mBACS;AAC1B,MAAI;AACF,UAAM,YAAY,aAAa,aAAa;AAC5C,UAAM,EAAE,OAAO,IAAI;AAEnB,cAAU,iBAAiB,EAAE,WAAW,KAAK,CAAC;AAE9C,UAAM,QAAQ,MAAM,QAAQ,wBAAwB,IAChD,2BACA,CAAC,wBAAwB;AAE7B,UAAM,oBAAkC,MAAM;AAAA,MAC5C;AAAA,MACA;AAAA,IACF;AAEA,UAAM,4BAA4B,kBAAkB,OAAO,CAAC,SAAS;AACnE,YAAM,SAAS,QAAQ,KAAK,GAAG;AAC/B,YAAM,aAAa,QAAQ,KAAK,OAAO;AAEvC,UAAI,CAAC,YAAY;AACf,gBAAQ;AAAA,UACN;AAAA,UACA,KAAK;AAAA,QACP;AAAA,MACF,WAAW,CAAC,QAAQ;AAClB,gBAAQ,MAAM,kCAAkC,KAAK,QAAQ;AAAA,MAC/D;AAEA,aAAO,UAAU;AAAA,IACnB,CAAC;AAED,UAAM,sBAAsB,0BACzB,IAAI,CAAC,SAAS,KAAK,GAAG,EACtB,OAAO,OAAO;AAGjB,WAAO,KAAK,qBAAqB,CAAC,CAAC;AAGnC,WAAO;AAAA,MACL,0BAA0B,IAAI,CAAC,UAAU;AAAA,QACvC,eAAe,KAAK;AAAA,QACpB,MAAM;AAAA,QACN,QAAQ,EAAE,QAAQ,QAAQ;AAAA,MAC5B,EAAE;AAAA,IACJ;AAEA,QAAI,sBAAuC,CAAC;AAC5C,QAAI,wBAAkC,CAAC;AAEvC,QAAI,OAAO,YAAY,OAAO,cAAc;AAC1C,UAAI;AAEF,gCAAwB,MAAM,2BAA2B;AAEzD,cAAM,+BACJ,sBAAsB,KAAK,kBAAkB;AAG/C,eAAO,kBAAkB,WAAW,4BAA4B;AAGhE,8BAAsB,MAAM,wBAAwB;AAAA,UAClD,gBAAgB;AAAA,QAClB,CAAC;AAAA,MACH,SAAS,QAAQ;AACf,kBAAU,8CAA8C;AAAA,UACtD,OAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,IACF;AAGA,WAAO,KAAK;AAEZ,WAAO,CAAC,GAAG,2BAA2B,GAAG,mBAAmB;AAAA,EAC9D,SAAS,OAAO;AAEd,WAAO,KAAK;AAEZ,UAAM;AAAA,EACR;AACF;","names":[]}
package/dist/esm/log.mjs CHANGED
@@ -1,6 +1,6 @@
1
- import readline from "readline";
2
1
  import { getConfiguration } from "@intlayer/config";
3
- import { sortAlphabetically } from "./utils.mjs";
2
+ import readline from "readline";
3
+ import { sortAlphabetically } from "./utils/sortAlphabetically.mjs";
4
4
  const LINE_DETECTOR = "\u200B\u200B\u200B";
5
5
  const SPINNER_FRAMES = ["\u280B", "\u2819", "\u2839", "\u2838", "\u283C", "\u2834", "\u2826", "\u2827", "\u2807", "\u280F"];
6
6
  const RESET = "\x1B[0m";