@intlayer/chokidar 5.6.0 → 5.7.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. package/dist/cjs/fetchDistantDictionaries.cjs +1 -1
  2. package/dist/cjs/fetchDistantDictionaries.cjs.map +1 -1
  3. package/dist/cjs/fetchDistantDictionaryKeys.cjs +1 -1
  4. package/dist/cjs/fetchDistantDictionaryKeys.cjs.map +1 -1
  5. package/dist/cjs/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.cjs +2 -2
  6. package/dist/cjs/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.cjs.map +1 -1
  7. package/dist/cjs/index.cjs +14 -2
  8. package/dist/cjs/index.cjs.map +1 -1
  9. package/dist/cjs/loadDictionaries/loadDictionaries.cjs +18 -5
  10. package/dist/cjs/loadDictionaries/loadDictionaries.cjs.map +1 -1
  11. package/dist/cjs/log.cjs +3 -3
  12. package/dist/cjs/log.cjs.map +1 -1
  13. package/dist/cjs/transpiler/declaration_file_to_dictionary/i18next_dictionary/buildI18nextDictionary.cjs +1 -1
  14. package/dist/cjs/transpiler/declaration_file_to_dictionary/i18next_dictionary/buildI18nextDictionary.cjs.map +1 -1
  15. package/dist/cjs/transpiler/dictionary_to_main/generateDictionaryListContent.cjs +2 -2
  16. package/dist/cjs/transpiler/dictionary_to_main/generateDictionaryListContent.cjs.map +1 -1
  17. package/dist/cjs/transpiler/dictionary_to_type/createModuleAugmentation.cjs +4 -3
  18. package/dist/cjs/transpiler/dictionary_to_type/createModuleAugmentation.cjs.map +1 -1
  19. package/dist/cjs/{utils.cjs → utils/getFileHash.cjs} +6 -22
  20. package/dist/cjs/utils/getFileHash.cjs.map +1 -0
  21. package/dist/cjs/utils/kebabCaseToCamelCase.cjs +39 -0
  22. package/dist/cjs/utils/kebabCaseToCamelCase.cjs.map +1 -0
  23. package/dist/cjs/utils/runOnce.cjs +58 -0
  24. package/dist/cjs/utils/runOnce.cjs.map +1 -0
  25. package/dist/cjs/utils/sortAlphabetically.cjs +29 -0
  26. package/dist/cjs/utils/sortAlphabetically.cjs.map +1 -0
  27. package/dist/esm/fetchDistantDictionaries.mjs +2 -2
  28. package/dist/esm/fetchDistantDictionaries.mjs.map +1 -1
  29. package/dist/esm/fetchDistantDictionaryKeys.mjs +1 -1
  30. package/dist/esm/fetchDistantDictionaryKeys.mjs.map +1 -1
  31. package/dist/esm/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.mjs +1 -1
  32. package/dist/esm/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.mjs.map +1 -1
  33. package/dist/esm/index.mjs +9 -1
  34. package/dist/esm/index.mjs.map +1 -1
  35. package/dist/esm/loadDictionaries/loadDictionaries.mjs +17 -4
  36. package/dist/esm/loadDictionaries/loadDictionaries.mjs.map +1 -1
  37. package/dist/esm/log.mjs +2 -2
  38. package/dist/esm/log.mjs.map +1 -1
  39. package/dist/esm/transpiler/declaration_file_to_dictionary/i18next_dictionary/buildI18nextDictionary.mjs +1 -1
  40. package/dist/esm/transpiler/declaration_file_to_dictionary/i18next_dictionary/buildI18nextDictionary.mjs.map +1 -1
  41. package/dist/esm/transpiler/dictionary_to_main/generateDictionaryListContent.mjs +1 -1
  42. package/dist/esm/transpiler/dictionary_to_main/generateDictionaryListContent.mjs.map +1 -1
  43. package/dist/esm/transpiler/dictionary_to_type/createModuleAugmentation.mjs +2 -1
  44. package/dist/esm/transpiler/dictionary_to_type/createModuleAugmentation.mjs.map +1 -1
  45. package/dist/esm/utils/getFileHash.mjs +9 -0
  46. package/dist/esm/utils/getFileHash.mjs.map +1 -0
  47. package/dist/esm/{utils.mjs → utils/kebabCaseToCamelCase.mjs} +2 -10
  48. package/dist/esm/utils/kebabCaseToCamelCase.mjs.map +1 -0
  49. package/dist/esm/utils/runOnce.mjs +34 -0
  50. package/dist/esm/utils/runOnce.mjs.map +1 -0
  51. package/dist/esm/utils/sortAlphabetically.mjs +5 -0
  52. package/dist/esm/utils/sortAlphabetically.mjs.map +1 -0
  53. package/dist/types/index.d.ts +5 -1
  54. package/dist/types/index.d.ts.map +1 -1
  55. package/dist/types/loadDictionaries/loadDictionaries.d.ts.map +1 -1
  56. package/dist/types/log.d.ts.map +1 -1
  57. package/dist/types/transpiler/declaration_file_to_dictionary/i18next_dictionary/buildI18nextDictionary.d.ts.map +1 -1
  58. package/dist/types/transpiler/dictionary_to_type/createModuleAugmentation.d.ts.map +1 -1
  59. package/dist/types/utils/getFileHash.d.ts +2 -0
  60. package/dist/types/utils/getFileHash.d.ts.map +1 -0
  61. package/dist/types/utils/kebabCaseToCamelCase.d.ts +2 -0
  62. package/dist/types/utils/kebabCaseToCamelCase.d.ts.map +1 -0
  63. package/dist/types/utils/runOnce.d.ts +24 -0
  64. package/dist/types/utils/runOnce.d.ts.map +1 -0
  65. package/dist/types/utils/sortAlphabetically.d.ts +2 -0
  66. package/dist/types/utils/sortAlphabetically.d.ts.map +1 -0
  67. package/package.json +13 -13
  68. package/dist/cjs/utils.cjs.map +0 -1
  69. package/dist/esm/utils.mjs.map +0 -1
  70. package/dist/types/utils.d.ts +0 -4
  71. package/dist/types/utils.d.ts.map +0 -1
@@ -0,0 +1,58 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+ var runOnce_exports = {};
20
+ __export(runOnce_exports, {
21
+ runOnce: () => runOnce
22
+ });
23
+ module.exports = __toCommonJS(runOnce_exports);
24
+ var import_promises = require("fs/promises");
25
+ var import_path = require("path");
26
+ const runOnce = async (sentinelFilePath, callback, cacheTimeoutMs = 60 * 1e3) => {
27
+ const currentTimestamp = Date.now();
28
+ const timeoutDuration = cacheTimeoutMs;
29
+ try {
30
+ const sentinelStats = await (0, import_promises.stat)(sentinelFilePath);
31
+ const sentinelAge = currentTimestamp - sentinelStats.mtime.getTime();
32
+ if (sentinelAge > timeoutDuration) {
33
+ await (0, import_promises.unlink)(sentinelFilePath);
34
+ } else {
35
+ return;
36
+ }
37
+ } catch (err) {
38
+ if (err.code === "ENOENT") {
39
+ } else {
40
+ throw err;
41
+ }
42
+ }
43
+ try {
44
+ await (0, import_promises.mkdir)((0, import_path.dirname)(sentinelFilePath), { recursive: true });
45
+ await (0, import_promises.writeFile)(sentinelFilePath, String(currentTimestamp), { flag: "wx" });
46
+ } catch (err) {
47
+ if (err.code === "EEXIST") {
48
+ return;
49
+ }
50
+ throw err;
51
+ }
52
+ await callback();
53
+ };
54
+ // Annotate the CommonJS export names for ESM import in node:
55
+ 0 && (module.exports = {
56
+ runOnce
57
+ });
58
+ //# sourceMappingURL=runOnce.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/utils/runOnce.ts"],"sourcesContent":["import { mkdir, stat, unlink, writeFile } from 'fs/promises';\nimport { dirname } from 'path';\n\n/**\n * Ensures a callback function runs only once within a specified time window across multiple processes.\n * Uses a sentinel file to coordinate execution and prevent duplicate work.\n *\n * @param sentinelFilePath - Path to the sentinel file used for coordination\n * @param callback - The function to execute (should be async)\n * @param cacheTimeoutMs - Time window in milliseconds during which the sentinel is considered valid (default: 60000ms = 1 minute)\n *\n * @example\n * ```typescript\n * await runPrepareIntlayerOnce(\n * '/tmp/intlayer-sentinel',\n * async () => {\n * // Your initialization logic here\n * await prepareIntlayer();\n * },\n * 30 * 1000 // 30 seconds cache\n * );\n * ```\n *\n * @throws {Error} When there are unexpected filesystem errors\n */\nexport const runOnce = async (\n sentinelFilePath: string,\n callback: () => void | Promise<void>,\n cacheTimeoutMs: number = 60 * 1000 // 1 minute in milliseconds\n) => {\n const currentTimestamp = Date.now();\n const timeoutDuration = cacheTimeoutMs;\n\n try {\n // Check if sentinel file exists and get its stats\n const sentinelStats = await stat(sentinelFilePath);\n const sentinelAge = currentTimestamp - sentinelStats.mtime.getTime();\n\n // If sentinel is older than the timeout, delete it and rebuild\n if (sentinelAge > timeoutDuration) {\n await unlink(sentinelFilePath);\n // Fall through to create new sentinel and rebuild\n } else {\n // Sentinel is recent, no need to rebuild\n return;\n }\n } catch (err: any) {\n if (err.code === 'ENOENT') {\n // File doesn't exist, continue to create it\n } else {\n throw err; // unexpected FS error\n }\n }\n\n try {\n // Ensure the directory exists before writing the file\n await mkdir(dirname(sentinelFilePath), { recursive: true });\n\n // O_EXCL ensures only the *first* process can create the file\n await writeFile(sentinelFilePath, String(currentTimestamp), { flag: 'wx' });\n } catch (err: any) {\n if (err.code === 'EEXIST') {\n // Another process already created it → we're done\n return;\n }\n throw err; // unexpected FS error\n }\n\n await callback();\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAA+C;AAC/C,kBAAwB;AAwBjB,MAAM,UAAU,OACrB,kBACA,UACA,iBAAyB,KAAK,QAC3B;AACH,QAAM,mBAAmB,KAAK,IAAI;AAClC,QAAM,kBAAkB;AAExB,MAAI;AAEF,UAAM,gBAAgB,UAAM,sBAAK,gBAAgB;AACjD,UAAM,cAAc,mBAAmB,cAAc,MAAM,QAAQ;AAGnE,QAAI,cAAc,iBAAiB;AACjC,gBAAM,wBAAO,gBAAgB;AAAA,IAE/B,OAAO;AAEL;AAAA,IACF;AAAA,EACF,SAAS,KAAU;AACjB,QAAI,IAAI,SAAS,UAAU;AAAA,IAE3B,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AAEA,MAAI;AAEF,cAAM,2BAAM,qBAAQ,gBAAgB,GAAG,EAAE,WAAW,KAAK,CAAC;AAG1D,cAAM,2BAAU,kBAAkB,OAAO,gBAAgB,GAAG,EAAE,MAAM,KAAK,CAAC;AAAA,EAC5E,SAAS,KAAU;AACjB,QAAI,IAAI,SAAS,UAAU;AAEzB;AAAA,IACF;AACA,UAAM;AAAA,EACR;AAEA,QAAM,SAAS;AACjB;","names":[]}
@@ -0,0 +1,29 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+ var sortAlphabetically_exports = {};
20
+ __export(sortAlphabetically_exports, {
21
+ sortAlphabetically: () => sortAlphabetically
22
+ });
23
+ module.exports = __toCommonJS(sortAlphabetically_exports);
24
+ const sortAlphabetically = (a, b) => a.localeCompare(b);
25
+ // Annotate the CommonJS export names for ESM import in node:
26
+ 0 && (module.exports = {
27
+ sortAlphabetically
28
+ });
29
+ //# sourceMappingURL=sortAlphabetically.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/utils/sortAlphabetically.ts"],"sourcesContent":["export const sortAlphabetically = (a: string, b: string) => a.localeCompare(b);\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAO,MAAM,qBAAqB,CAAC,GAAW,MAAc,EAAE,cAAc,CAAC;","names":[]}
@@ -1,4 +1,4 @@
1
- import { getAuthAPI, getDictionaryAPI } from "@intlayer/api";
1
+ import { getDictionaryAPI, getOAuthAPI } from "@intlayer/api";
2
2
  import { getAppLogger, getConfiguration } from "@intlayer/config";
3
3
  import pLimit from "p-limit";
4
4
  import { logger } from "./log.mjs";
@@ -7,7 +7,7 @@ const fetchDistantDictionaries = async (options) => {
7
7
  const appLogger = getAppLogger(config);
8
8
  try {
9
9
  const { clientId, clientSecret } = config.editor;
10
- const authAPI = getAuthAPI(void 0, config);
10
+ const authAPI = getOAuthAPI(config);
11
11
  const dictionaryAPI = getDictionaryAPI(void 0, config);
12
12
  if (!clientId || !clientSecret) {
13
13
  throw new Error(
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/fetchDistantDictionaries.ts"],"sourcesContent":["import { getAuthAPI, getDictionaryAPI } from '@intlayer/api';\n// @ts-ignore @intlayer/backend is not build yet\nimport type { DictionaryAPI } from '@intlayer/backend';\nimport { getAppLogger, getConfiguration } from '@intlayer/config';\nimport pLimit from 'p-limit';\nimport { logger } from './log';\n\ntype FetchDistantDictionariesOptions = {\n dictionaryKeys: string[];\n newDictionariesPath?: string;\n logPrefix?: string;\n};\n\n/**\n * Fetch distant dictionaries and update the logger with their statuses.\n */\nexport const fetchDistantDictionaries = async (\n options: FetchDistantDictionariesOptions\n): Promise<DictionaryAPI[]> => {\n const config = getConfiguration();\n const appLogger = getAppLogger(config);\n try {\n const { clientId, clientSecret } = config.editor;\n const authAPI = getAuthAPI(undefined, config);\n const dictionaryAPI = getDictionaryAPI(undefined, config);\n\n if (!clientId || !clientSecret) {\n throw new Error(\n 'Missing OAuth2 client ID or client secret. To get access token go to https://intlayer.org/dashboard/project.'\n );\n }\n\n const oAuth2TokenResult = await authAPI.getOAuth2AccessToken();\n\n const oAuth2AccessToken = oAuth2TokenResult.data?.accessToken;\n\n const distantDictionariesKeys = options.dictionaryKeys;\n\n // Process dictionaries in parallel with a concurrency limit\n const limit = pLimit(5); // Limit the number of concurrent requests\n\n const processDictionary = async (\n dictionaryKey: string\n ): Promise<DictionaryAPI | undefined> => {\n logger.updateStatus([\n {\n dictionaryKey,\n type: 'distant',\n status: { status: 'fetching' },\n },\n ]);\n\n try {\n // Fetch the dictionary\n const getDictionaryResult = await dictionaryAPI.getDictionary(\n dictionaryKey,\n undefined,\n {\n ...(oAuth2AccessToken && {\n headers: {\n Authorization: `Bearer ${oAuth2AccessToken}`,\n },\n }),\n }\n );\n\n const distantDictionary = getDictionaryResult.data;\n\n if (!distantDictionary) {\n throw new Error(`Dictionary ${dictionaryKey} not found on remote`);\n }\n\n logger.updateStatus([\n { dictionaryKey, type: 'distant', status: { status: 'imported' } },\n ]);\n\n return distantDictionary;\n } catch (error) {\n logger.updateStatus([\n {\n dictionaryKey,\n type: 'distant',\n status: {\n status: 'error',\n error: error as Error,\n errorMessage: `${options?.logPrefix ?? ''}Error fetching dictionary ${dictionaryKey}: ${error}`,\n },\n },\n ]);\n return undefined;\n }\n };\n\n const fetchPromises = distantDictionariesKeys.map((dictionaryKey) =>\n limit(async () => await processDictionary(dictionaryKey))\n );\n\n const result = await Promise.all(fetchPromises);\n\n // Output any error messages\n const statuses = logger.getStatuses();\n for (const statusObj of statuses) {\n const currentState = statusObj.state.find((s) => s.type === 'distant');\n if (currentState && currentState.errorMessage) {\n appLogger(currentState.errorMessage, { level: 'error' });\n }\n }\n\n // Remove undefined values\n const filteredResult = result.filter(\n (dict): dict is DictionaryAPI => dict !== undefined\n );\n\n return filteredResult;\n } catch (error) {\n appLogger(error, { level: 'error' });\n return [];\n }\n};\n"],"mappings":"AAAA,SAAS,YAAY,wBAAwB;AAG7C,SAAS,cAAc,wBAAwB;AAC/C,OAAO,YAAY;AACnB,SAAS,cAAc;AAWhB,MAAM,2BAA2B,OACtC,YAC6B;AAC7B,QAAM,SAAS,iBAAiB;AAChC,QAAM,YAAY,aAAa,MAAM;AACrC,MAAI;AACF,UAAM,EAAE,UAAU,aAAa,IAAI,OAAO;AAC1C,UAAM,UAAU,WAAW,QAAW,MAAM;AAC5C,UAAM,gBAAgB,iBAAiB,QAAW,MAAM;AAExD,QAAI,CAAC,YAAY,CAAC,cAAc;AAC9B,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,UAAM,oBAAoB,MAAM,QAAQ,qBAAqB;AAE7D,UAAM,oBAAoB,kBAAkB,MAAM;AAElD,UAAM,0BAA0B,QAAQ;AAGxC,UAAM,QAAQ,OAAO,CAAC;AAEtB,UAAM,oBAAoB,OACxB,kBACuC;AACvC,aAAO,aAAa;AAAA,QAClB;AAAA,UACE;AAAA,UACA,MAAM;AAAA,UACN,QAAQ,EAAE,QAAQ,WAAW;AAAA,QAC/B;AAAA,MACF,CAAC;AAED,UAAI;AAEF,cAAM,sBAAsB,MAAM,cAAc;AAAA,UAC9C;AAAA,UACA;AAAA,UACA;AAAA,YACE,GAAI,qBAAqB;AAAA,cACvB,SAAS;AAAA,gBACP,eAAe,UAAU,iBAAiB;AAAA,cAC5C;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,cAAM,oBAAoB,oBAAoB;AAE9C,YAAI,CAAC,mBAAmB;AACtB,gBAAM,IAAI,MAAM,cAAc,aAAa,sBAAsB;AAAA,QACnE;AAEA,eAAO,aAAa;AAAA,UAClB,EAAE,eAAe,MAAM,WAAW,QAAQ,EAAE,QAAQ,WAAW,EAAE;AAAA,QACnE,CAAC;AAED,eAAO;AAAA,MACT,SAAS,OAAO;AACd,eAAO,aAAa;AAAA,UAClB;AAAA,YACE;AAAA,YACA,MAAM;AAAA,YACN,QAAQ;AAAA,cACN,QAAQ;AAAA,cACR;AAAA,cACA,cAAc,GAAG,SAAS,aAAa,EAAE,6BAA6B,aAAa,KAAK,KAAK;AAAA,YAC/F;AAAA,UACF;AAAA,QACF,CAAC;AACD,eAAO;AAAA,MACT;AAAA,IACF;AAEA,UAAM,gBAAgB,wBAAwB;AAAA,MAAI,CAAC,kBACjD,MAAM,YAAY,MAAM,kBAAkB,aAAa,CAAC;AAAA,IAC1D;AAEA,UAAM,SAAS,MAAM,QAAQ,IAAI,aAAa;AAG9C,UAAM,WAAW,OAAO,YAAY;AACpC,eAAW,aAAa,UAAU;AAChC,YAAM,eAAe,UAAU,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,SAAS;AACrE,UAAI,gBAAgB,aAAa,cAAc;AAC7C,kBAAU,aAAa,cAAc,EAAE,OAAO,QAAQ,CAAC;AAAA,MACzD;AAAA,IACF;AAGA,UAAM,iBAAiB,OAAO;AAAA,MAC5B,CAAC,SAAgC,SAAS;AAAA,IAC5C;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,cAAU,OAAO,EAAE,OAAO,QAAQ,CAAC;AACnC,WAAO,CAAC;AAAA,EACV;AACF;","names":[]}
1
+ {"version":3,"sources":["../../src/fetchDistantDictionaries.ts"],"sourcesContent":["import { getDictionaryAPI, getOAuthAPI } from '@intlayer/api';\n// @ts-ignore @intlayer/backend is not build yet\nimport type { DictionaryAPI } from '@intlayer/backend';\nimport { getAppLogger, getConfiguration } from '@intlayer/config';\nimport pLimit from 'p-limit';\nimport { logger } from './log';\n\ntype FetchDistantDictionariesOptions = {\n dictionaryKeys: string[];\n newDictionariesPath?: string;\n logPrefix?: string;\n};\n\n/**\n * Fetch distant dictionaries and update the logger with their statuses.\n */\nexport const fetchDistantDictionaries = async (\n options: FetchDistantDictionariesOptions\n): Promise<DictionaryAPI[]> => {\n const config = getConfiguration();\n const appLogger = getAppLogger(config);\n try {\n const { clientId, clientSecret } = config.editor;\n const authAPI = getOAuthAPI(config);\n const dictionaryAPI = getDictionaryAPI(undefined, config);\n\n if (!clientId || !clientSecret) {\n throw new Error(\n 'Missing OAuth2 client ID or client secret. To get access token go to https://intlayer.org/dashboard/project.'\n );\n }\n\n const oAuth2TokenResult = await authAPI.getOAuth2AccessToken();\n\n const oAuth2AccessToken = oAuth2TokenResult.data?.accessToken;\n\n const distantDictionariesKeys = options.dictionaryKeys;\n\n // Process dictionaries in parallel with a concurrency limit\n const limit = pLimit(5); // Limit the number of concurrent requests\n\n const processDictionary = async (\n dictionaryKey: string\n ): Promise<DictionaryAPI | undefined> => {\n logger.updateStatus([\n {\n dictionaryKey,\n type: 'distant',\n status: { status: 'fetching' },\n },\n ]);\n\n try {\n // Fetch the dictionary\n const getDictionaryResult = await dictionaryAPI.getDictionary(\n dictionaryKey,\n undefined,\n {\n ...(oAuth2AccessToken && {\n headers: {\n Authorization: `Bearer ${oAuth2AccessToken}`,\n },\n }),\n }\n );\n\n const distantDictionary = getDictionaryResult.data;\n\n if (!distantDictionary) {\n throw new Error(`Dictionary ${dictionaryKey} not found on remote`);\n }\n\n logger.updateStatus([\n { dictionaryKey, type: 'distant', status: { status: 'imported' } },\n ]);\n\n return distantDictionary;\n } catch (error) {\n logger.updateStatus([\n {\n dictionaryKey,\n type: 'distant',\n status: {\n status: 'error',\n error: error as Error,\n errorMessage: `${options?.logPrefix ?? ''}Error fetching dictionary ${dictionaryKey}: ${error}`,\n },\n },\n ]);\n return undefined;\n }\n };\n\n const fetchPromises = distantDictionariesKeys.map((dictionaryKey) =>\n limit(async () => await processDictionary(dictionaryKey))\n );\n\n const result = await Promise.all(fetchPromises);\n\n // Output any error messages\n const statuses = logger.getStatuses();\n for (const statusObj of statuses) {\n const currentState = statusObj.state.find((s) => s.type === 'distant');\n if (currentState && currentState.errorMessage) {\n appLogger(currentState.errorMessage, { level: 'error' });\n }\n }\n\n // Remove undefined values\n const filteredResult = result.filter(\n (dict): dict is DictionaryAPI => dict !== undefined\n );\n\n return filteredResult;\n } catch (error) {\n appLogger(error, { level: 'error' });\n return [];\n }\n};\n"],"mappings":"AAAA,SAAS,kBAAkB,mBAAmB;AAG9C,SAAS,cAAc,wBAAwB;AAC/C,OAAO,YAAY;AACnB,SAAS,cAAc;AAWhB,MAAM,2BAA2B,OACtC,YAC6B;AAC7B,QAAM,SAAS,iBAAiB;AAChC,QAAM,YAAY,aAAa,MAAM;AACrC,MAAI;AACF,UAAM,EAAE,UAAU,aAAa,IAAI,OAAO;AAC1C,UAAM,UAAU,YAAY,MAAM;AAClC,UAAM,gBAAgB,iBAAiB,QAAW,MAAM;AAExD,QAAI,CAAC,YAAY,CAAC,cAAc;AAC9B,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,UAAM,oBAAoB,MAAM,QAAQ,qBAAqB;AAE7D,UAAM,oBAAoB,kBAAkB,MAAM;AAElD,UAAM,0BAA0B,QAAQ;AAGxC,UAAM,QAAQ,OAAO,CAAC;AAEtB,UAAM,oBAAoB,OACxB,kBACuC;AACvC,aAAO,aAAa;AAAA,QAClB;AAAA,UACE;AAAA,UACA,MAAM;AAAA,UACN,QAAQ,EAAE,QAAQ,WAAW;AAAA,QAC/B;AAAA,MACF,CAAC;AAED,UAAI;AAEF,cAAM,sBAAsB,MAAM,cAAc;AAAA,UAC9C;AAAA,UACA;AAAA,UACA;AAAA,YACE,GAAI,qBAAqB;AAAA,cACvB,SAAS;AAAA,gBACP,eAAe,UAAU,iBAAiB;AAAA,cAC5C;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,cAAM,oBAAoB,oBAAoB;AAE9C,YAAI,CAAC,mBAAmB;AACtB,gBAAM,IAAI,MAAM,cAAc,aAAa,sBAAsB;AAAA,QACnE;AAEA,eAAO,aAAa;AAAA,UAClB,EAAE,eAAe,MAAM,WAAW,QAAQ,EAAE,QAAQ,WAAW,EAAE;AAAA,QACnE,CAAC;AAED,eAAO;AAAA,MACT,SAAS,OAAO;AACd,eAAO,aAAa;AAAA,UAClB;AAAA,YACE;AAAA,YACA,MAAM;AAAA,YACN,QAAQ;AAAA,cACN,QAAQ;AAAA,cACR;AAAA,cACA,cAAc,GAAG,SAAS,aAAa,EAAE,6BAA6B,aAAa,KAAK,KAAK;AAAA,YAC/F;AAAA,UACF;AAAA,QACF,CAAC;AACD,eAAO;AAAA,MACT;AAAA,IACF;AAEA,UAAM,gBAAgB,wBAAwB;AAAA,MAAI,CAAC,kBACjD,MAAM,YAAY,MAAM,kBAAkB,aAAa,CAAC;AAAA,IAC1D;AAEA,UAAM,SAAS,MAAM,QAAQ,IAAI,aAAa;AAG9C,UAAM,WAAW,OAAO,YAAY;AACpC,eAAW,aAAa,UAAU;AAChC,YAAM,eAAe,UAAU,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,SAAS;AACrE,UAAI,gBAAgB,aAAa,cAAc;AAC7C,kBAAU,aAAa,cAAc,EAAE,OAAO,QAAQ,CAAC;AAAA,MACzD;AAAA,IACF;AAGA,UAAM,iBAAiB,OAAO;AAAA,MAC5B,CAAC,SAAgC,SAAS;AAAA,IAC5C;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,cAAU,OAAO,EAAE,OAAO,QAAQ,CAAC;AACnC,WAAO,CAAC;AAAA,EACV;AACF;","names":[]}
@@ -8,7 +8,7 @@ const fetchDistantDictionaryKeys = async (configuration = getConfiguration()) =>
8
8
  );
9
9
  }
10
10
  const intlayerAPI = getIntlayerAPI(void 0, configuration);
11
- const oAuth2TokenResult = await intlayerAPI.auth.getOAuth2AccessToken();
11
+ const oAuth2TokenResult = await intlayerAPI.oAuth.getOAuth2AccessToken();
12
12
  const oAuth2AccessToken = oAuth2TokenResult.data?.accessToken;
13
13
  const getDictionariesKeysResult = await intlayerAPI.dictionary.getDictionariesKeys({
14
14
  ...oAuth2AccessToken && {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/fetchDistantDictionaryKeys.ts"],"sourcesContent":["import { getIntlayerAPI } from '@intlayer/api';\nimport { getConfiguration, type IntlayerConfig } from '@intlayer/config';\n\nexport const fetchDistantDictionaryKeys = async (\n configuration: IntlayerConfig = getConfiguration()\n): Promise<string[]> => {\n const { clientId, clientSecret } = configuration.editor;\n\n if (!clientId || !clientSecret) {\n throw new Error(\n 'Missing OAuth2 client ID or client secret. To get access token go to https://intlayer.org/dashboard/project.'\n );\n }\n\n const intlayerAPI = getIntlayerAPI(undefined, configuration);\n\n const oAuth2TokenResult = await intlayerAPI.auth.getOAuth2AccessToken();\n\n const oAuth2AccessToken = oAuth2TokenResult.data?.accessToken;\n\n // Get the list of dictionary keys\n const getDictionariesKeysResult =\n await intlayerAPI.dictionary.getDictionariesKeys({\n ...(oAuth2AccessToken && {\n headers: {\n Authorization: `Bearer ${oAuth2AccessToken}`,\n },\n }),\n });\n\n if (!getDictionariesKeysResult.data) {\n throw new Error('No distant dictionaries found');\n }\n\n const distantDictionariesKeys: string[] = getDictionariesKeysResult.data;\n\n // Apply any filtering if needed\n return distantDictionariesKeys;\n};\n"],"mappings":"AAAA,SAAS,sBAAsB;AAC/B,SAAS,wBAA6C;AAE/C,MAAM,6BAA6B,OACxC,gBAAgC,iBAAiB,MAC3B;AACtB,QAAM,EAAE,UAAU,aAAa,IAAI,cAAc;AAEjD,MAAI,CAAC,YAAY,CAAC,cAAc;AAC9B,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,cAAc,eAAe,QAAW,aAAa;AAE3D,QAAM,oBAAoB,MAAM,YAAY,KAAK,qBAAqB;AAEtE,QAAM,oBAAoB,kBAAkB,MAAM;AAGlD,QAAM,4BACJ,MAAM,YAAY,WAAW,oBAAoB;AAAA,IAC/C,GAAI,qBAAqB;AAAA,MACvB,SAAS;AAAA,QACP,eAAe,UAAU,iBAAiB;AAAA,MAC5C;AAAA,IACF;AAAA,EACF,CAAC;AAEH,MAAI,CAAC,0BAA0B,MAAM;AACnC,UAAM,IAAI,MAAM,+BAA+B;AAAA,EACjD;AAEA,QAAM,0BAAoC,0BAA0B;AAGpE,SAAO;AACT;","names":[]}
1
+ {"version":3,"sources":["../../src/fetchDistantDictionaryKeys.ts"],"sourcesContent":["import { getIntlayerAPI } from '@intlayer/api';\nimport { getConfiguration, type IntlayerConfig } from '@intlayer/config';\n\nexport const fetchDistantDictionaryKeys = async (\n configuration: IntlayerConfig = getConfiguration()\n): Promise<string[]> => {\n const { clientId, clientSecret } = configuration.editor;\n\n if (!clientId || !clientSecret) {\n throw new Error(\n 'Missing OAuth2 client ID or client secret. To get access token go to https://intlayer.org/dashboard/project.'\n );\n }\n\n const intlayerAPI = getIntlayerAPI(undefined, configuration);\n\n const oAuth2TokenResult = await intlayerAPI.oAuth.getOAuth2AccessToken();\n\n const oAuth2AccessToken = oAuth2TokenResult.data?.accessToken;\n\n // Get the list of dictionary keys\n const getDictionariesKeysResult =\n await intlayerAPI.dictionary.getDictionariesKeys({\n ...(oAuth2AccessToken && {\n headers: {\n Authorization: `Bearer ${oAuth2AccessToken}`,\n },\n }),\n });\n\n if (!getDictionariesKeysResult.data) {\n throw new Error('No distant dictionaries found');\n }\n\n const distantDictionariesKeys: string[] = getDictionariesKeysResult.data;\n\n // Apply any filtering if needed\n return distantDictionariesKeys;\n};\n"],"mappings":"AAAA,SAAS,sBAAsB;AAC/B,SAAS,wBAA6C;AAE/C,MAAM,6BAA6B,OACxC,gBAAgC,iBAAiB,MAC3B;AACtB,QAAM,EAAE,UAAU,aAAa,IAAI,cAAc;AAEjD,MAAI,CAAC,YAAY,CAAC,cAAc;AAC9B,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,cAAc,eAAe,QAAW,aAAa;AAE3D,QAAM,oBAAoB,MAAM,YAAY,MAAM,qBAAqB;AAEvE,QAAM,oBAAoB,kBAAkB,MAAM;AAGlD,QAAM,4BACJ,MAAM,YAAY,WAAW,oBAAoB;AAAA,IAC/C,GAAI,qBAAqB;AAAA,MACvB,SAAS;AAAA,QACP,eAAe,UAAU,iBAAiB;AAAA,MAC5C;AAAA,IACF;AAAA,EACF,CAAC;AAEH,MAAI,CAAC,0BAA0B,MAAM;AACnC,UAAM,IAAI,MAAM,+BAA+B;AAAA,EACjD;AAEA,QAAM,0BAAoC,0BAA0B;AAGpE,SAAO;AACT;","names":[]}
@@ -1,7 +1,7 @@
1
1
  import { readFile } from "fs/promises";
2
2
  import { join } from "path";
3
3
  import { fileURLToPath } from "url";
4
- import { kebabCaseToCamelCase } from "../utils.mjs";
4
+ import { kebabCaseToCamelCase } from "../utils/kebabCaseToCamelCase.mjs";
5
5
  const getContentDeclarationFileTemplate = async (key, format, fileParams = {}) => {
6
6
  const dirname = __dirname ?? fileURLToPath(import.meta.url);
7
7
  let fileTemplate = "./esmTemplate.md";
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.ts"],"sourcesContent":["import { readFile } from 'fs/promises';\nimport { join } from 'path';\nimport { fileURLToPath } from 'url';\nimport { kebabCaseToCamelCase } from '../utils';\n\nexport const getContentDeclarationFileTemplate = async (\n key: string,\n format: 'ts' | 'cjs' | 'esm',\n fileParams: Record<string, any> = {}\n) => {\n const dirname = __dirname ?? fileURLToPath(import.meta.url);\n\n let fileTemplate = './esmTemplate.md';\n\n if (format === 'ts') {\n fileTemplate = './tsTemplate.md';\n } else if (format === 'cjs') {\n fileTemplate = './cjsTemplate.md';\n }\n\n const fileContent = await readFile(join(dirname, fileTemplate), 'utf-8');\n const camelCaseKey = kebabCaseToCamelCase(key);\n const nonCapitalizedCamelCaseKey =\n camelCaseKey.charAt(0).toLowerCase() + camelCaseKey.slice(1);\n\n const fileParmsString = Object.entries(fileParams)\n .filter(([, value]) => value !== undefined)\n .map(([key, value]) => {\n if (typeof value === 'object') {\n return `\\n${key}: ${JSON.stringify(value)},`;\n }\n\n if (typeof value === 'boolean' || typeof value === 'number') {\n return `\\n${key}: ${value},`;\n }\n\n if (typeof value === 'string') {\n return `\\n${key}: '${value}',`;\n }\n\n return `\\n${key}: ${value},`;\n })\n .join('');\n\n return fileContent\n .replace('{{key}}', key)\n .replaceAll('{{name}}', nonCapitalizedCamelCaseKey)\n .replace('{{fileParams}}', fileParmsString);\n};\n"],"mappings":"AAAA,SAAS,gBAAgB;AACzB,SAAS,YAAY;AACrB,SAAS,qBAAqB;AAC9B,SAAS,4BAA4B;AAE9B,MAAM,oCAAoC,OAC/C,KACA,QACA,aAAkC,CAAC,MAChC;AACH,QAAM,UAAU,aAAa,cAAc,YAAY,GAAG;AAE1D,MAAI,eAAe;AAEnB,MAAI,WAAW,MAAM;AACnB,mBAAe;AAAA,EACjB,WAAW,WAAW,OAAO;AAC3B,mBAAe;AAAA,EACjB;AAEA,QAAM,cAAc,MAAM,SAAS,KAAK,SAAS,YAAY,GAAG,OAAO;AACvE,QAAM,eAAe,qBAAqB,GAAG;AAC7C,QAAM,6BACJ,aAAa,OAAO,CAAC,EAAE,YAAY,IAAI,aAAa,MAAM,CAAC;AAE7D,QAAM,kBAAkB,OAAO,QAAQ,UAAU,EAC9C,OAAO,CAAC,CAAC,EAAE,KAAK,MAAM,UAAU,MAAS,EACzC,IAAI,CAAC,CAACA,MAAK,KAAK,MAAM;AACrB,QAAI,OAAO,UAAU,UAAU;AAC7B,aAAO;AAAA,EAAKA,IAAG,KAAK,KAAK,UAAU,KAAK,CAAC;AAAA,IAC3C;AAEA,QAAI,OAAO,UAAU,aAAa,OAAO,UAAU,UAAU;AAC3D,aAAO;AAAA,EAAKA,IAAG,KAAK,KAAK;AAAA,IAC3B;AAEA,QAAI,OAAO,UAAU,UAAU;AAC7B,aAAO;AAAA,EAAKA,IAAG,MAAM,KAAK;AAAA,IAC5B;AAEA,WAAO;AAAA,EAAKA,IAAG,KAAK,KAAK;AAAA,EAC3B,CAAC,EACA,KAAK,EAAE;AAEV,SAAO,YACJ,QAAQ,WAAW,GAAG,EACtB,WAAW,YAAY,0BAA0B,EACjD,QAAQ,kBAAkB,eAAe;AAC9C;","names":["key"]}
1
+ {"version":3,"sources":["../../../src/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.ts"],"sourcesContent":["import { readFile } from 'fs/promises';\nimport { join } from 'path';\nimport { fileURLToPath } from 'url';\nimport { kebabCaseToCamelCase } from '../utils/kebabCaseToCamelCase';\n\nexport const getContentDeclarationFileTemplate = async (\n key: string,\n format: 'ts' | 'cjs' | 'esm',\n fileParams: Record<string, any> = {}\n) => {\n const dirname = __dirname ?? fileURLToPath(import.meta.url);\n\n let fileTemplate = './esmTemplate.md';\n\n if (format === 'ts') {\n fileTemplate = './tsTemplate.md';\n } else if (format === 'cjs') {\n fileTemplate = './cjsTemplate.md';\n }\n\n const fileContent = await readFile(join(dirname, fileTemplate), 'utf-8');\n const camelCaseKey = kebabCaseToCamelCase(key);\n const nonCapitalizedCamelCaseKey =\n camelCaseKey.charAt(0).toLowerCase() + camelCaseKey.slice(1);\n\n const fileParmsString = Object.entries(fileParams)\n .filter(([, value]) => value !== undefined)\n .map(([key, value]) => {\n if (typeof value === 'object') {\n return `\\n${key}: ${JSON.stringify(value)},`;\n }\n\n if (typeof value === 'boolean' || typeof value === 'number') {\n return `\\n${key}: ${value},`;\n }\n\n if (typeof value === 'string') {\n return `\\n${key}: '${value}',`;\n }\n\n return `\\n${key}: ${value},`;\n })\n .join('');\n\n return fileContent\n .replace('{{key}}', key)\n .replaceAll('{{name}}', nonCapitalizedCamelCaseKey)\n .replace('{{fileParams}}', fileParmsString);\n};\n"],"mappings":"AAAA,SAAS,gBAAgB;AACzB,SAAS,YAAY;AACrB,SAAS,qBAAqB;AAC9B,SAAS,4BAA4B;AAE9B,MAAM,oCAAoC,OAC/C,KACA,QACA,aAAkC,CAAC,MAChC;AACH,QAAM,UAAU,aAAa,cAAc,YAAY,GAAG;AAE1D,MAAI,eAAe;AAEnB,MAAI,WAAW,MAAM;AACnB,mBAAe;AAAA,EACjB,WAAW,WAAW,OAAO;AAC3B,mBAAe;AAAA,EACjB;AAEA,QAAM,cAAc,MAAM,SAAS,KAAK,SAAS,YAAY,GAAG,OAAO;AACvE,QAAM,eAAe,qBAAqB,GAAG;AAC7C,QAAM,6BACJ,aAAa,OAAO,CAAC,EAAE,YAAY,IAAI,aAAa,MAAM,CAAC;AAE7D,QAAM,kBAAkB,OAAO,QAAQ,UAAU,EAC9C,OAAO,CAAC,CAAC,EAAE,KAAK,MAAM,UAAU,MAAS,EACzC,IAAI,CAAC,CAACA,MAAK,KAAK,MAAM;AACrB,QAAI,OAAO,UAAU,UAAU;AAC7B,aAAO;AAAA,EAAKA,IAAG,KAAK,KAAK,UAAU,KAAK,CAAC;AAAA,IAC3C;AAEA,QAAI,OAAO,UAAU,aAAa,OAAO,UAAU,UAAU;AAC3D,aAAO;AAAA,EAAKA,IAAG,KAAK,KAAK;AAAA,IAC3B;AAEA,QAAI,OAAO,UAAU,UAAU;AAC7B,aAAO;AAAA,EAAKA,IAAG,MAAM,KAAK;AAAA,IAC5B;AAEA,WAAO;AAAA,EAAKA,IAAG,KAAK,KAAK;AAAA,EAC3B,CAAC,EACA,KAAK,EAAE;AAEV,SAAO,YACJ,QAAQ,WAAW,GAAG,EACtB,WAAW,YAAY,0BAA0B,EACjD,QAAQ,kBAAkB,eAAe;AAC9C;","names":["key"]}
@@ -32,7 +32,11 @@ import {
32
32
  generateDictionaryListContent
33
33
  } from "./transpiler/dictionary_to_main/index.mjs";
34
34
  import { createModuleAugmentation } from "./transpiler/dictionary_to_type/createModuleAugmentation.mjs";
35
- import { getFileHash } from "./utils.mjs";
35
+ import { getFileHash } from "./utils/getFileHash.mjs";
36
+ import { kebabCaseToCamelCase } from "./utils/kebabCaseToCamelCase.mjs";
37
+ import { resolveObjectPromises } from "./utils/resolveObjectPromises.mjs";
38
+ import { runOnce } from "./utils/runOnce.mjs";
39
+ import { sortAlphabetically } from "./utils/sortAlphabetically.mjs";
36
40
  import {
37
41
  writeContentDeclaration
38
42
  } from "./writeContentDeclaration/index.mjs";
@@ -52,6 +56,7 @@ export {
52
56
  getFilteredLocalesContent,
53
57
  handleAdditionalContentDeclarationFile,
54
58
  handleContentDeclarationFileChange,
59
+ kebabCaseToCamelCase,
55
60
  listDictionaries,
56
61
  listGitFiles,
57
62
  listGitLines,
@@ -63,6 +68,9 @@ export {
63
68
  prepareIntlayer,
64
69
  processPerLocaleDictionary,
65
70
  reduceDictionaryContent,
71
+ resolveObjectPromises,
72
+ runOnce,
73
+ sortAlphabetically,
66
74
  watch,
67
75
  writeContentDeclaration
68
76
  };
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/index.ts"],"sourcesContent":["export { checkDictionaryChanges } from './checkDictionaryChanges';\nexport {\n buildAndWatchIntlayer,\n handleAdditionalContentDeclarationFile,\n handleContentDeclarationFileChange,\n watch,\n} from './chokidar/watcher';\nexport { cleanOutputDir } from './cleanOutputDir';\nexport { fetchDistantDictionaries } from './fetchDistantDictionaries';\nexport { fetchDistantDictionaryKeys } from './fetchDistantDictionaryKeys';\nexport { getBuiltDictionariesPath } from './getBuiltDictionariesPath';\nexport { getBuiltUnmergedDictionariesPath } from './getBuiltUnmergedDictionariesPath';\nexport { getFilteredLocalesContent } from './getFilteredLocalesContent';\nexport { listDictionaries } from './listDictionariesPath';\nexport {\n listGitFiles,\n listGitLines,\n type DiffMode,\n type ListGitFilesOptions,\n type ListGitLinesOptions,\n} from './listGitFiles';\nexport {\n loadDictionaries,\n loadDistantDictionaries,\n loadLocalDictionaries,\n} from './loadDictionaries/index';\nexport { mergeDictionaries } from './mergeDictionaries';\nexport { prepareContentDeclaration } from './prepareContentDeclaration';\nexport { prepareIntlayer } from './prepareIntlayer';\nexport { processPerLocaleDictionary } from './processPerLocaleDictionary';\nexport { reduceDictionaryContent } from './reduceDictionaryContent/reduceDictionaryContent';\nexport { buildDictionary } from './transpiler/declaration_file_to_dictionary/index';\nexport {\n createDictionaryEntryPoint,\n generateDictionaryListContent,\n} from './transpiler/dictionary_to_main';\nexport { createModuleAugmentation } from './transpiler/dictionary_to_type/createModuleAugmentation';\nexport { getFileHash } from './utils';\nexport {\n writeContentDeclaration,\n type DictionaryStatus,\n} from './writeContentDeclaration';\n"],"mappings":"AAAA,SAAS,8BAA8B;AACvC;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,sBAAsB;AAC/B,SAAS,gCAAgC;AACzC,SAAS,kCAAkC;AAC3C,SAAS,gCAAgC;AACzC,SAAS,wCAAwC;AACjD,SAAS,iCAAiC;AAC1C,SAAS,wBAAwB;AACjC;AAAA,EACE;AAAA,EACA;AAAA,OAIK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,yBAAyB;AAClC,SAAS,iCAAiC;AAC1C,SAAS,uBAAuB;AAChC,SAAS,kCAAkC;AAC3C,SAAS,+BAA+B;AACxC,SAAS,uBAAuB;AAChC;AAAA,EACE;AAAA,EACA;AAAA,OACK;AACP,SAAS,gCAAgC;AACzC,SAAS,mBAAmB;AAC5B;AAAA,EACE;AAAA,OAEK;","names":[]}
1
+ {"version":3,"sources":["../../src/index.ts"],"sourcesContent":["export { checkDictionaryChanges } from './checkDictionaryChanges';\nexport {\n buildAndWatchIntlayer,\n handleAdditionalContentDeclarationFile,\n handleContentDeclarationFileChange,\n watch,\n} from './chokidar/watcher';\nexport { cleanOutputDir } from './cleanOutputDir';\nexport { fetchDistantDictionaries } from './fetchDistantDictionaries';\nexport { fetchDistantDictionaryKeys } from './fetchDistantDictionaryKeys';\nexport { getBuiltDictionariesPath } from './getBuiltDictionariesPath';\nexport { getBuiltUnmergedDictionariesPath } from './getBuiltUnmergedDictionariesPath';\nexport { getFilteredLocalesContent } from './getFilteredLocalesContent';\nexport { listDictionaries } from './listDictionariesPath';\nexport {\n listGitFiles,\n listGitLines,\n type DiffMode,\n type ListGitFilesOptions,\n type ListGitLinesOptions,\n} from './listGitFiles';\nexport {\n loadDictionaries,\n loadDistantDictionaries,\n loadLocalDictionaries,\n} from './loadDictionaries/index';\nexport { mergeDictionaries } from './mergeDictionaries';\nexport { prepareContentDeclaration } from './prepareContentDeclaration';\nexport { prepareIntlayer } from './prepareIntlayer';\nexport { processPerLocaleDictionary } from './processPerLocaleDictionary';\nexport { reduceDictionaryContent } from './reduceDictionaryContent/reduceDictionaryContent';\nexport { buildDictionary } from './transpiler/declaration_file_to_dictionary/index';\nexport {\n createDictionaryEntryPoint,\n generateDictionaryListContent,\n} from './transpiler/dictionary_to_main';\nexport { createModuleAugmentation } from './transpiler/dictionary_to_type/createModuleAugmentation';\nexport { getFileHash } from './utils/getFileHash';\nexport { kebabCaseToCamelCase } from './utils/kebabCaseToCamelCase';\nexport { resolveObjectPromises } from './utils/resolveObjectPromises';\nexport { runOnce } from './utils/runOnce';\nexport { sortAlphabetically } from './utils/sortAlphabetically';\nexport {\n writeContentDeclaration,\n type DictionaryStatus,\n} from './writeContentDeclaration';\n"],"mappings":"AAAA,SAAS,8BAA8B;AACvC;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,sBAAsB;AAC/B,SAAS,gCAAgC;AACzC,SAAS,kCAAkC;AAC3C,SAAS,gCAAgC;AACzC,SAAS,wCAAwC;AACjD,SAAS,iCAAiC;AAC1C,SAAS,wBAAwB;AACjC;AAAA,EACE;AAAA,EACA;AAAA,OAIK;AACP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,yBAAyB;AAClC,SAAS,iCAAiC;AAC1C,SAAS,uBAAuB;AAChC,SAAS,kCAAkC;AAC3C,SAAS,+BAA+B;AACxC,SAAS,uBAAuB;AAChC;AAAA,EACE;AAAA,EACA;AAAA,OACK;AACP,SAAS,gCAAgC;AACzC,SAAS,mBAAmB;AAC5B,SAAS,4BAA4B;AACrC,SAAS,6BAA6B;AACtC,SAAS,eAAe;AACxB,SAAS,0BAA0B;AACnC;AAAA,EACE;AAAA,OAEK;","names":[]}
@@ -5,7 +5,7 @@ import {
5
5
  } from "@intlayer/config";
6
6
  import { fetchDistantDictionaryKeys } from "../fetchDistantDictionaryKeys.mjs";
7
7
  import { logger } from "../log.mjs";
8
- import { sortAlphabetically } from "../utils.mjs";
8
+ import { sortAlphabetically } from "../utils/sortAlphabetically.mjs";
9
9
  import { loadContentDeclarations } from "./loadContentDeclaration.mjs";
10
10
  import { loadDistantDictionaries } from "./loadDistantDictionaries.mjs";
11
11
  const loadDictionaries = async (contentDeclarationsPaths, configuration = getConfiguration(), projectRequire = ESMxCJSRequire) => {
@@ -18,10 +18,23 @@ const loadDictionaries = async (contentDeclarationsPaths, configuration = getCon
18
18
  files,
19
19
  projectRequire
20
20
  );
21
- const localDictionaryKeys = localDictionaries.map((dict) => dict.key).filter(Boolean);
21
+ const filteredLocalDictionaries = localDictionaries.filter((dict) => {
22
+ const hasKey = Boolean(dict.key);
23
+ const hasContent = Boolean(dict.content);
24
+ if (!hasContent) {
25
+ console.error(
26
+ "Content declaration has no exported content",
27
+ dict.filePath
28
+ );
29
+ } else if (!hasKey) {
30
+ console.error("Content declaration has no key", dict.filePath);
31
+ }
32
+ return hasKey && hasContent;
33
+ });
34
+ const localDictionaryKeys = filteredLocalDictionaries.map((dict) => dict.key).filter(Boolean);
22
35
  logger.init(localDictionaryKeys, []);
23
36
  logger.updateStatus(
24
- localDictionaries.map((dict) => ({
37
+ filteredLocalDictionaries.map((dict) => ({
25
38
  dictionaryKey: dict.key,
26
39
  type: "local",
27
40
  status: { status: "built" }
@@ -44,7 +57,7 @@ const loadDictionaries = async (contentDeclarationsPaths, configuration = getCon
44
57
  }
45
58
  }
46
59
  logger.stop();
47
- return [...localDictionaries, ...distantDictionaries];
60
+ return [...filteredLocalDictionaries, ...distantDictionaries];
48
61
  } catch (error) {
49
62
  logger.stop();
50
63
  throw error;
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/loadDictionaries/loadDictionaries.ts"],"sourcesContent":["// @ts-ignore @intlayer/backend is not build yet\nimport type { DictionaryAPI } from '@intlayer/backend';\nimport {\n ESMxCJSRequire,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport type { Dictionary } from '@intlayer/core';\nimport { fetchDistantDictionaryKeys } from '../fetchDistantDictionaryKeys';\nimport { logger } from '../log';\nimport { sortAlphabetically } from '../utils';\nimport { loadContentDeclarations } from './loadContentDeclaration';\nimport { loadDistantDictionaries } from './loadDistantDictionaries';\n\nexport const loadDictionaries = async (\n contentDeclarationsPaths: string[] | string,\n configuration = getConfiguration(),\n projectRequire = ESMxCJSRequire\n): Promise<Dictionary[]> => {\n try {\n const appLogger = getAppLogger(configuration);\n const { editor } = configuration;\n\n appLogger('Dictionaries:', { isVerbose: true });\n\n const files = Array.isArray(contentDeclarationsPaths)\n ? contentDeclarationsPaths\n : [contentDeclarationsPaths];\n\n const localDictionaries: Dictionary[] = await loadContentDeclarations(\n files,\n projectRequire\n );\n const localDictionaryKeys = localDictionaries\n .map((dict) => dict.key)\n .filter(Boolean); // Remove empty or undefined keys\n\n // Initialize the logger with both local and distant dictionaries\n logger.init(localDictionaryKeys, []);\n\n // Update logger statuses for local dictionaries\n logger.updateStatus(\n localDictionaries.map((dict) => ({\n dictionaryKey: dict.key,\n type: 'local',\n status: { status: 'built' },\n }))\n );\n\n let distantDictionaries: DictionaryAPI[] = [];\n let distantDictionaryKeys: string[] = [];\n\n if (editor.clientId && editor.clientSecret) {\n try {\n // Fetch distant dictionary keys\n distantDictionaryKeys = await fetchDistantDictionaryKeys();\n\n const orderedDistantDictionaryKeys =\n distantDictionaryKeys.sort(sortAlphabetically);\n\n // Add distant dictionaries to the logger\n logger.addDictionaryKeys('distant', orderedDistantDictionaryKeys);\n\n // Fetch distant dictionaries\n distantDictionaries = await loadDistantDictionaries({\n dictionaryKeys: orderedDistantDictionaryKeys,\n });\n } catch (_error) {\n appLogger('Error during fetching distant dictionaries', {\n level: 'error',\n });\n }\n }\n\n // Ensure the logger is stopped\n logger.stop();\n\n return [...localDictionaries, ...distantDictionaries];\n } catch (error) {\n // Ensure the logger is stopped\n logger.stop();\n\n throw error; // Re-throw the error after logging\n }\n};\n"],"mappings":"AAEA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AAEP,SAAS,kCAAkC;AAC3C,SAAS,cAAc;AACvB,SAAS,0BAA0B;AACnC,SAAS,+BAA+B;AACxC,SAAS,+BAA+B;AAEjC,MAAM,mBAAmB,OAC9B,0BACA,gBAAgB,iBAAiB,GACjC,iBAAiB,mBACS;AAC1B,MAAI;AACF,UAAM,YAAY,aAAa,aAAa;AAC5C,UAAM,EAAE,OAAO,IAAI;AAEnB,cAAU,iBAAiB,EAAE,WAAW,KAAK,CAAC;AAE9C,UAAM,QAAQ,MAAM,QAAQ,wBAAwB,IAChD,2BACA,CAAC,wBAAwB;AAE7B,UAAM,oBAAkC,MAAM;AAAA,MAC5C;AAAA,MACA;AAAA,IACF;AACA,UAAM,sBAAsB,kBACzB,IAAI,CAAC,SAAS,KAAK,GAAG,EACtB,OAAO,OAAO;AAGjB,WAAO,KAAK,qBAAqB,CAAC,CAAC;AAGnC,WAAO;AAAA,MACL,kBAAkB,IAAI,CAAC,UAAU;AAAA,QAC/B,eAAe,KAAK;AAAA,QACpB,MAAM;AAAA,QACN,QAAQ,EAAE,QAAQ,QAAQ;AAAA,MAC5B,EAAE;AAAA,IACJ;AAEA,QAAI,sBAAuC,CAAC;AAC5C,QAAI,wBAAkC,CAAC;AAEvC,QAAI,OAAO,YAAY,OAAO,cAAc;AAC1C,UAAI;AAEF,gCAAwB,MAAM,2BAA2B;AAEzD,cAAM,+BACJ,sBAAsB,KAAK,kBAAkB;AAG/C,eAAO,kBAAkB,WAAW,4BAA4B;AAGhE,8BAAsB,MAAM,wBAAwB;AAAA,UAClD,gBAAgB;AAAA,QAClB,CAAC;AAAA,MACH,SAAS,QAAQ;AACf,kBAAU,8CAA8C;AAAA,UACtD,OAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,IACF;AAGA,WAAO,KAAK;AAEZ,WAAO,CAAC,GAAG,mBAAmB,GAAG,mBAAmB;AAAA,EACtD,SAAS,OAAO;AAEd,WAAO,KAAK;AAEZ,UAAM;AAAA,EACR;AACF;","names":[]}
1
+ {"version":3,"sources":["../../../src/loadDictionaries/loadDictionaries.ts"],"sourcesContent":["// @ts-ignore @intlayer/backend is not build yet\nimport type { DictionaryAPI } from '@intlayer/backend';\nimport {\n ESMxCJSRequire,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport type { Dictionary } from '@intlayer/core';\nimport { fetchDistantDictionaryKeys } from '../fetchDistantDictionaryKeys';\nimport { logger } from '../log';\nimport { sortAlphabetically } from '../utils/sortAlphabetically';\nimport { loadContentDeclarations } from './loadContentDeclaration';\nimport { loadDistantDictionaries } from './loadDistantDictionaries';\n\nexport const loadDictionaries = async (\n contentDeclarationsPaths: string[] | string,\n configuration = getConfiguration(),\n projectRequire = ESMxCJSRequire\n): Promise<Dictionary[]> => {\n try {\n const appLogger = getAppLogger(configuration);\n const { editor } = configuration;\n\n appLogger('Dictionaries:', { isVerbose: true });\n\n const files = Array.isArray(contentDeclarationsPaths)\n ? contentDeclarationsPaths\n : [contentDeclarationsPaths];\n\n const localDictionaries: Dictionary[] = await loadContentDeclarations(\n files,\n projectRequire\n );\n\n const filteredLocalDictionaries = localDictionaries.filter((dict) => {\n const hasKey = Boolean(dict.key);\n const hasContent = Boolean(dict.content);\n\n if (!hasContent) {\n console.error(\n 'Content declaration has no exported content',\n dict.filePath\n );\n } else if (!hasKey) {\n console.error('Content declaration has no key', dict.filePath);\n }\n\n return hasKey && hasContent;\n });\n\n const localDictionaryKeys = filteredLocalDictionaries\n .map((dict) => dict.key)\n .filter(Boolean); // Remove empty or undefined keys\n\n // Initialize the logger with both local and distant dictionaries\n logger.init(localDictionaryKeys, []);\n\n // Update logger statuses for local dictionaries\n logger.updateStatus(\n filteredLocalDictionaries.map((dict) => ({\n dictionaryKey: dict.key,\n type: 'local',\n status: { status: 'built' },\n }))\n );\n\n let distantDictionaries: DictionaryAPI[] = [];\n let distantDictionaryKeys: string[] = [];\n\n if (editor.clientId && editor.clientSecret) {\n try {\n // Fetch distant dictionary keys\n distantDictionaryKeys = await fetchDistantDictionaryKeys();\n\n const orderedDistantDictionaryKeys =\n distantDictionaryKeys.sort(sortAlphabetically);\n\n // Add distant dictionaries to the logger\n logger.addDictionaryKeys('distant', orderedDistantDictionaryKeys);\n\n // Fetch distant dictionaries\n distantDictionaries = await loadDistantDictionaries({\n dictionaryKeys: orderedDistantDictionaryKeys,\n });\n } catch (_error) {\n appLogger('Error during fetching distant dictionaries', {\n level: 'error',\n });\n }\n }\n\n // Ensure the logger is stopped\n logger.stop();\n\n return [...filteredLocalDictionaries, ...distantDictionaries];\n } catch (error) {\n // Ensure the logger is stopped\n logger.stop();\n\n throw error; // Re-throw the error after logging\n }\n};\n"],"mappings":"AAEA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AAEP,SAAS,kCAAkC;AAC3C,SAAS,cAAc;AACvB,SAAS,0BAA0B;AACnC,SAAS,+BAA+B;AACxC,SAAS,+BAA+B;AAEjC,MAAM,mBAAmB,OAC9B,0BACA,gBAAgB,iBAAiB,GACjC,iBAAiB,mBACS;AAC1B,MAAI;AACF,UAAM,YAAY,aAAa,aAAa;AAC5C,UAAM,EAAE,OAAO,IAAI;AAEnB,cAAU,iBAAiB,EAAE,WAAW,KAAK,CAAC;AAE9C,UAAM,QAAQ,MAAM,QAAQ,wBAAwB,IAChD,2BACA,CAAC,wBAAwB;AAE7B,UAAM,oBAAkC,MAAM;AAAA,MAC5C;AAAA,MACA;AAAA,IACF;AAEA,UAAM,4BAA4B,kBAAkB,OAAO,CAAC,SAAS;AACnE,YAAM,SAAS,QAAQ,KAAK,GAAG;AAC/B,YAAM,aAAa,QAAQ,KAAK,OAAO;AAEvC,UAAI,CAAC,YAAY;AACf,gBAAQ;AAAA,UACN;AAAA,UACA,KAAK;AAAA,QACP;AAAA,MACF,WAAW,CAAC,QAAQ;AAClB,gBAAQ,MAAM,kCAAkC,KAAK,QAAQ;AAAA,MAC/D;AAEA,aAAO,UAAU;AAAA,IACnB,CAAC;AAED,UAAM,sBAAsB,0BACzB,IAAI,CAAC,SAAS,KAAK,GAAG,EACtB,OAAO,OAAO;AAGjB,WAAO,KAAK,qBAAqB,CAAC,CAAC;AAGnC,WAAO;AAAA,MACL,0BAA0B,IAAI,CAAC,UAAU;AAAA,QACvC,eAAe,KAAK;AAAA,QACpB,MAAM;AAAA,QACN,QAAQ,EAAE,QAAQ,QAAQ;AAAA,MAC5B,EAAE;AAAA,IACJ;AAEA,QAAI,sBAAuC,CAAC;AAC5C,QAAI,wBAAkC,CAAC;AAEvC,QAAI,OAAO,YAAY,OAAO,cAAc;AAC1C,UAAI;AAEF,gCAAwB,MAAM,2BAA2B;AAEzD,cAAM,+BACJ,sBAAsB,KAAK,kBAAkB;AAG/C,eAAO,kBAAkB,WAAW,4BAA4B;AAGhE,8BAAsB,MAAM,wBAAwB;AAAA,UAClD,gBAAgB;AAAA,QAClB,CAAC;AAAA,MACH,SAAS,QAAQ;AACf,kBAAU,8CAA8C;AAAA,UACtD,OAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,IACF;AAGA,WAAO,KAAK;AAEZ,WAAO,CAAC,GAAG,2BAA2B,GAAG,mBAAmB;AAAA,EAC9D,SAAS,OAAO;AAEd,WAAO,KAAK;AAEZ,UAAM;AAAA,EACR;AACF;","names":[]}
package/dist/esm/log.mjs CHANGED
@@ -1,6 +1,6 @@
1
- import readline from "readline";
2
1
  import { getConfiguration } from "@intlayer/config";
3
- import { sortAlphabetically } from "./utils.mjs";
2
+ import readline from "readline";
3
+ import { sortAlphabetically } from "./utils/sortAlphabetically.mjs";
4
4
  const LINE_DETECTOR = "\u200B\u200B\u200B";
5
5
  const SPINNER_FRAMES = ["\u280B", "\u2819", "\u2839", "\u2838", "\u283C", "\u2834", "\u2826", "\u2827", "\u2807", "\u280F"];
6
6
  const RESET = "\x1B[0m";
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/log.ts"],"sourcesContent":["import readline from 'readline';\nimport { type IntlayerConfig, getConfiguration } from '@intlayer/config';\nimport { sortAlphabetically } from './utils';\n\nexport type State = {\n type: 'local' | 'distant';\n status: 'pending' | 'fetching' | 'fetched' | 'error' | 'imported' | 'built';\n icon?: string;\n error?: Error;\n errorMessage?: string;\n spinnerFrameIndex?: number;\n};\n\nexport type DictionariesStatus = {\n dictionaryKey: string;\n state: State[];\n};\n\nconst LINE_DETECTOR = '\\u200B\\u200B\\u200B'; // Three zero-width spaces\nconst SPINNER_FRAMES = ['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏'];\n\n// ANSI color codes\nconst RESET = '\\x1b[0m';\nconst GREEN = '\\x1b[32m';\nconst RED = '\\x1b[31m';\nconst BLUE = '\\x1b[34m';\nconst GREY = '\\x1b[90m';\nconst GREY_DARK = '\\x1b[90m';\n\nclass Logger {\n private dictionariesStatuses: DictionariesStatus[] = [];\n private spinnerTimer: NodeJS.Timeout | null = null;\n private maxDictionaryKeyLength: number = 0;\n private previousLineCount: number = 0;\n private lineDifCounter: number = 0;\n private originalStdoutWrite:\n | ((\n chunk: string | Uint8Array, // `chunk` can be either a string or a Uint8Array\n encoding?: BufferEncoding, // `encoding` is optional and should be a BufferEncoding\n callback?: (err?: Error | null) => void // `callback` is optional and a function\n ) => boolean)\n | null = null;\n private extraLines: number = 0;\n private isUpdating: boolean = false;\n private config?: IntlayerConfig;\n\n // Singleton instance\n private static instance: Logger;\n\n private constructor() {}\n\n public static getInstance(): Logger {\n if (!Logger.instance) {\n Logger.instance = new Logger();\n }\n return Logger.instance;\n }\n\n public init(\n localDictionariesKeys: string[],\n distantDictionariesKeys: string[],\n configuration: IntlayerConfig = getConfiguration()\n ) {\n this.config = configuration;\n\n const allKeys = Array.from(\n new Set(\n [...localDictionariesKeys, ...distantDictionariesKeys].sort(\n sortAlphabetically\n )\n )\n );\n\n this.maxDictionaryKeyLength = allKeys.reduce(\n (max, key) => (key.length > max ? key.length : max),\n 0\n );\n\n this.dictionariesStatuses = allKeys.map((dictionaryKey) => {\n const states: State[] = [];\n\n if (localDictionariesKeys.includes(dictionaryKey)) {\n states.push({\n type: 'local',\n status: 'pending',\n spinnerFrameIndex: 0,\n });\n }\n if (distantDictionariesKeys.includes(dictionaryKey)) {\n states.push({\n type: 'distant',\n status: 'pending',\n spinnerFrameIndex: 0,\n });\n }\n\n return {\n dictionaryKey,\n state: states,\n };\n });\n\n // Start spinner timer\n this.startSpinner();\n\n // Update all status lines (this will output the initial statuses)\n this.updateAllStatusLines();\n }\n\n // New method to add dictionary keys after initialization\n public addDictionaryKeys(\n type: 'local' | 'distant',\n dictionaryKeys: string[]\n ) {\n for (const dictionaryKey of dictionaryKeys) {\n // Update maxDictionaryKeyLength if the new key is longer\n if (dictionaryKey.length > this.maxDictionaryKeyLength) {\n this.maxDictionaryKeyLength = dictionaryKey.length;\n }\n\n let statusObj = this.dictionariesStatuses.find(\n (ds) => ds.dictionaryKey === dictionaryKey\n );\n\n if (!statusObj) {\n // If the dictionaryKey doesn't exist yet, create a new DictionariesStatus\n\n statusObj = {\n dictionaryKey,\n state: [],\n };\n this.dictionariesStatuses.push(statusObj);\n\n const newState: State = {\n type,\n status: 'pending',\n spinnerFrameIndex: 0,\n };\n statusObj.state.push(newState);\n } else {\n const existingState = statusObj.state.find((s) => s.type === type);\n if (!existingState) {\n // Add new state for the type\n\n const newState: State = {\n type,\n status: 'pending',\n spinnerFrameIndex: 0,\n };\n statusObj.state.push(newState);\n }\n }\n }\n\n // Call updateAllStatusLines() to refresh the output\n this.updateAllStatusLines();\n }\n\n private startSpinner() {\n if (!this.spinnerTimer) {\n this.spinnerTimer = setInterval(() => {\n this.updateAllStatusLines();\n }, 100); // Update every 100ms\n }\n }\n\n private stopSpinner() {\n if (this.spinnerTimer) {\n clearInterval(this.spinnerTimer);\n this.spinnerTimer = null;\n }\n }\n\n // Method to update the terminal output\n private updateOutput(content: string) {\n if (this.config?.log.mode !== 'verbose') return;\n\n // Monkey-patch process.stdout.write to keep track of extra lines\n if (!this.originalStdoutWrite) {\n this.originalStdoutWrite = process.stdout.write.bind(process.stdout);\n this.extraLines = 0;\n\n const write = (\n chunk: string | Uint8Array, // `chunk` can be either a string or a Uint8Array\n encoding?: BufferEncoding, // `encoding` is optional and should be a BufferEncoding\n callback?: (err?: Error | null) => void // `callback` is optional and a function\n ) => {\n const str = typeof chunk === 'string' ? chunk : chunk.toString();\n const newLines = (str.match(/\\n/g) ?? []).length;\n\n // If the write is not initiated by Logger's updateOutput method\n if (!this.isUpdating) {\n this.extraLines += newLines;\n }\n\n return this.originalStdoutWrite!(chunk, encoding, callback);\n };\n\n process.stdout.write = write as typeof process.stdout.write;\n }\n\n // Set a flag to indicate that updateOutput is running\n this.isUpdating = true;\n\n // Adjust lineDifCounter if LINE_DETECTOR is not the first line\n const contentLines = content.split('\\n');\n const indexOfLineDetector = contentLines.indexOf(LINE_DETECTOR.trim());\n\n if (indexOfLineDetector > 0) {\n // LINE_DETECTOR is not at the first line\n this.lineDifCounter = indexOfLineDetector;\n } else {\n this.lineDifCounter = 0;\n }\n\n // Calculate total lines to move up\n const totalLinesToMoveUp =\n this.previousLineCount + this.lineDifCounter + this.extraLines;\n\n // Move cursor up by totalLinesToMoveUp\n readline.moveCursor(process.stdout, 0, -totalLinesToMoveUp);\n\n // Clear all lines downwards\n readline.clearScreenDown(process.stdout);\n\n // Write the updated content\n contentLines.forEach((line) => {\n process.stdout.write(`${line}\\x1b[K\\n`);\n });\n\n // Update previousLineCount\n this.previousLineCount = contentLines.length;\n\n // Reset extraLines counter and updating flag\n this.extraLines = 0;\n this.isUpdating = false;\n }\n\n public stop() {\n this.stopSpinner();\n }\n\n public updateStatus(\n dictionaries: {\n dictionaryKey: string;\n type: 'local' | 'distant';\n status: Partial<State>;\n }[]\n ) {\n for (const { dictionaryKey, type, status } of dictionaries) {\n const statusObj = this.dictionariesStatuses.find(\n (ds) => ds.dictionaryKey === dictionaryKey\n );\n if (statusObj) {\n const state = statusObj.state.find((s) => s.type === type);\n if (state) {\n // Update existing state\n Object.assign(state, status);\n } else {\n // If the state for this type doesn't exist yet, add it\n if (status.status === undefined) {\n status.status = 'pending'; // Provide default status\n }\n const newState: State = {\n type,\n status: status.status,\n icon: status.icon ?? '',\n error: status.error,\n errorMessage: status.errorMessage,\n spinnerFrameIndex: status.spinnerFrameIndex ?? 0,\n };\n statusObj.state.push(newState);\n }\n } else {\n // If the status object doesn't exist, create it\n const newState: State = {\n type,\n status: status.status ?? 'pending',\n icon: status.icon ?? '',\n error: status.error,\n errorMessage: status.errorMessage,\n spinnerFrameIndex: status.spinnerFrameIndex ?? 0,\n };\n this.dictionariesStatuses.push({\n dictionaryKey,\n state: [newState],\n });\n }\n }\n\n // Update the display after status change\n this.updateAllStatusLines();\n }\n\n private getStatusIcon(status: string): string {\n const statusIcons: Record<string, string> = {\n pending: '⏲',\n fetching: '', // Spinner handled separately\n built: '✔',\n imported: '✔',\n error: '✖',\n };\n return statusIcons[status] ?? '';\n }\n\n private getStatusLine(statusObj: DictionariesStatus): string {\n const keyPadding =\n this.maxDictionaryKeyLength - statusObj.dictionaryKey.length;\n const paddedKey = `${statusObj.dictionaryKey}${' '.repeat(keyPadding)}`;\n\n const states = statusObj.state.map((state) => {\n let colorStart = '';\n let colorEnd = '';\n let icon = this.getStatusIcon(state.status);\n if (state.status === 'fetching') {\n // Use spinner frame\n icon = SPINNER_FRAMES[state.spinnerFrameIndex! % SPINNER_FRAMES.length];\n colorStart = BLUE;\n colorEnd = RESET;\n } else if (state.status === 'error') {\n colorStart = RED;\n colorEnd = RESET;\n } else if (state.status === 'imported' || state.status === 'built') {\n colorStart = GREEN;\n colorEnd = RESET;\n } else {\n colorStart = GREY;\n colorEnd = RESET;\n }\n\n // Format the status block\n const statusBlock = `${GREY_DARK}[${state.type}: ${colorStart}${icon} ${state.status}${GREY_DARK}]${colorEnd}`;\n\n return `${colorStart}${statusBlock}${colorEnd}`;\n });\n\n return `${this.config?.log.prefix}- ${paddedKey} ${states.join(' ')}`;\n }\n\n // Replace logUpdate calls with your custom methods\n private updateAllStatusLines() {\n const terminalHeight = process.stdout.rows;\n const maxVisibleLines = terminalHeight - 1;\n\n const lines = this.dictionariesStatuses.map((statusObj) => {\n statusObj.state.forEach((state) => {\n if (state.status === 'fetching') {\n // Update spinner frame\n state.spinnerFrameIndex =\n (state.spinnerFrameIndex! + 1) % SPINNER_FRAMES.length;\n }\n });\n return this.getStatusLine(statusObj);\n });\n\n let content;\n\n if (lines.length > maxVisibleLines) {\n const visibleLines = lines.slice(0, maxVisibleLines - 5);\n const summary = `${this.config?.log.prefix}... and ${lines.length - visibleLines.length} more`;\n content = LINE_DETECTOR + visibleLines.join('\\n') + '\\n' + summary;\n } else {\n content = lines.join('\\n');\n }\n\n this.updateOutput(content);\n }\n\n public getStatuses() {\n return this.dictionariesStatuses;\n }\n}\n\nexport const logger = Logger.getInstance();\n"],"mappings":"AAAA,OAAO,cAAc;AACrB,SAA8B,wBAAwB;AACtD,SAAS,0BAA0B;AAgBnC,MAAM,gBAAgB;AACtB,MAAM,iBAAiB,CAAC,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,QAAG;AAGxE,MAAM,QAAQ;AACd,MAAM,QAAQ;AACd,MAAM,MAAM;AACZ,MAAM,OAAO;AACb,MAAM,OAAO;AACb,MAAM,YAAY;AAElB,MAAM,OAAO;AAAA,EACH,uBAA6C,CAAC;AAAA,EAC9C,eAAsC;AAAA,EACtC,yBAAiC;AAAA,EACjC,oBAA4B;AAAA,EAC5B,iBAAyB;AAAA,EACzB,sBAMG;AAAA,EACH,aAAqB;AAAA,EACrB,aAAsB;AAAA,EACtB;AAAA;AAAA,EAGR,OAAe;AAAA,EAEP,cAAc;AAAA,EAAC;AAAA,EAEvB,OAAc,cAAsB;AAClC,QAAI,CAAC,OAAO,UAAU;AACpB,aAAO,WAAW,IAAI,OAAO;AAAA,IAC/B;AACA,WAAO,OAAO;AAAA,EAChB;AAAA,EAEO,KACL,uBACA,yBACA,gBAAgC,iBAAiB,GACjD;AACA,SAAK,SAAS;AAEd,UAAM,UAAU,MAAM;AAAA,MACpB,IAAI;AAAA,QACF,CAAC,GAAG,uBAAuB,GAAG,uBAAuB,EAAE;AAAA,UACrD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,SAAK,yBAAyB,QAAQ;AAAA,MACpC,CAAC,KAAK,QAAS,IAAI,SAAS,MAAM,IAAI,SAAS;AAAA,MAC/C;AAAA,IACF;AAEA,SAAK,uBAAuB,QAAQ,IAAI,CAAC,kBAAkB;AACzD,YAAM,SAAkB,CAAC;AAEzB,UAAI,sBAAsB,SAAS,aAAa,GAAG;AACjD,eAAO,KAAK;AAAA,UACV,MAAM;AAAA,UACN,QAAQ;AAAA,UACR,mBAAmB;AAAA,QACrB,CAAC;AAAA,MACH;AACA,UAAI,wBAAwB,SAAS,aAAa,GAAG;AACnD,eAAO,KAAK;AAAA,UACV,MAAM;AAAA,UACN,QAAQ;AAAA,UACR,mBAAmB;AAAA,QACrB,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,QACL;AAAA,QACA,OAAO;AAAA,MACT;AAAA,IACF,CAAC;AAGD,SAAK,aAAa;AAGlB,SAAK,qBAAqB;AAAA,EAC5B;AAAA;AAAA,EAGO,kBACL,MACA,gBACA;AACA,eAAW,iBAAiB,gBAAgB;AAE1C,UAAI,cAAc,SAAS,KAAK,wBAAwB;AACtD,aAAK,yBAAyB,cAAc;AAAA,MAC9C;AAEA,UAAI,YAAY,KAAK,qBAAqB;AAAA,QACxC,CAAC,OAAO,GAAG,kBAAkB;AAAA,MAC/B;AAEA,UAAI,CAAC,WAAW;AAGd,oBAAY;AAAA,UACV;AAAA,UACA,OAAO,CAAC;AAAA,QACV;AACA,aAAK,qBAAqB,KAAK,SAAS;AAExC,cAAM,WAAkB;AAAA,UACtB;AAAA,UACA,QAAQ;AAAA,UACR,mBAAmB;AAAA,QACrB;AACA,kBAAU,MAAM,KAAK,QAAQ;AAAA,MAC/B,OAAO;AACL,cAAM,gBAAgB,UAAU,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,IAAI;AACjE,YAAI,CAAC,eAAe;AAGlB,gBAAM,WAAkB;AAAA,YACtB;AAAA,YACA,QAAQ;AAAA,YACR,mBAAmB;AAAA,UACrB;AACA,oBAAU,MAAM,KAAK,QAAQ;AAAA,QAC/B;AAAA,MACF;AAAA,IACF;AAGA,SAAK,qBAAqB;AAAA,EAC5B;AAAA,EAEQ,eAAe;AACrB,QAAI,CAAC,KAAK,cAAc;AACtB,WAAK,eAAe,YAAY,MAAM;AACpC,aAAK,qBAAqB;AAAA,MAC5B,GAAG,GAAG;AAAA,IACR;AAAA,EACF;AAAA,EAEQ,cAAc;AACpB,QAAI,KAAK,cAAc;AACrB,oBAAc,KAAK,YAAY;AAC/B,WAAK,eAAe;AAAA,IACtB;AAAA,EACF;AAAA;AAAA,EAGQ,aAAa,SAAiB;AACpC,QAAI,KAAK,QAAQ,IAAI,SAAS,UAAW;AAGzC,QAAI,CAAC,KAAK,qBAAqB;AAC7B,WAAK,sBAAsB,QAAQ,OAAO,MAAM,KAAK,QAAQ,MAAM;AACnE,WAAK,aAAa;AAElB,YAAM,QAAQ,CACZ,OACA,UACA,aACG;AACH,cAAM,MAAM,OAAO,UAAU,WAAW,QAAQ,MAAM,SAAS;AAC/D,cAAM,YAAY,IAAI,MAAM,KAAK,KAAK,CAAC,GAAG;AAG1C,YAAI,CAAC,KAAK,YAAY;AACpB,eAAK,cAAc;AAAA,QACrB;AAEA,eAAO,KAAK,oBAAqB,OAAO,UAAU,QAAQ;AAAA,MAC5D;AAEA,cAAQ,OAAO,QAAQ;AAAA,IACzB;AAGA,SAAK,aAAa;AAGlB,UAAM,eAAe,QAAQ,MAAM,IAAI;AACvC,UAAM,sBAAsB,aAAa,QAAQ,cAAc,KAAK,CAAC;AAErE,QAAI,sBAAsB,GAAG;AAE3B,WAAK,iBAAiB;AAAA,IACxB,OAAO;AACL,WAAK,iBAAiB;AAAA,IACxB;AAGA,UAAM,qBACJ,KAAK,oBAAoB,KAAK,iBAAiB,KAAK;AAGtD,aAAS,WAAW,QAAQ,QAAQ,GAAG,CAAC,kBAAkB;AAG1D,aAAS,gBAAgB,QAAQ,MAAM;AAGvC,iBAAa,QAAQ,CAAC,SAAS;AAC7B,cAAQ,OAAO,MAAM,GAAG,IAAI;AAAA,CAAU;AAAA,IACxC,CAAC;AAGD,SAAK,oBAAoB,aAAa;AAGtC,SAAK,aAAa;AAClB,SAAK,aAAa;AAAA,EACpB;AAAA,EAEO,OAAO;AACZ,SAAK,YAAY;AAAA,EACnB;AAAA,EAEO,aACL,cAKA;AACA,eAAW,EAAE,eAAe,MAAM,OAAO,KAAK,cAAc;AAC1D,YAAM,YAAY,KAAK,qBAAqB;AAAA,QAC1C,CAAC,OAAO,GAAG,kBAAkB;AAAA,MAC/B;AACA,UAAI,WAAW;AACb,cAAM,QAAQ,UAAU,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,IAAI;AACzD,YAAI,OAAO;AAET,iBAAO,OAAO,OAAO,MAAM;AAAA,QAC7B,OAAO;AAEL,cAAI,OAAO,WAAW,QAAW;AAC/B,mBAAO,SAAS;AAAA,UAClB;AACA,gBAAM,WAAkB;AAAA,YACtB;AAAA,YACA,QAAQ,OAAO;AAAA,YACf,MAAM,OAAO,QAAQ;AAAA,YACrB,OAAO,OAAO;AAAA,YACd,cAAc,OAAO;AAAA,YACrB,mBAAmB,OAAO,qBAAqB;AAAA,UACjD;AACA,oBAAU,MAAM,KAAK,QAAQ;AAAA,QAC/B;AAAA,MACF,OAAO;AAEL,cAAM,WAAkB;AAAA,UACtB;AAAA,UACA,QAAQ,OAAO,UAAU;AAAA,UACzB,MAAM,OAAO,QAAQ;AAAA,UACrB,OAAO,OAAO;AAAA,UACd,cAAc,OAAO;AAAA,UACrB,mBAAmB,OAAO,qBAAqB;AAAA,QACjD;AACA,aAAK,qBAAqB,KAAK;AAAA,UAC7B;AAAA,UACA,OAAO,CAAC,QAAQ;AAAA,QAClB,CAAC;AAAA,MACH;AAAA,IACF;AAGA,SAAK,qBAAqB;AAAA,EAC5B;AAAA,EAEQ,cAAc,QAAwB;AAC5C,UAAM,cAAsC;AAAA,MAC1C,SAAS;AAAA,MACT,UAAU;AAAA;AAAA,MACV,OAAO;AAAA,MACP,UAAU;AAAA,MACV,OAAO;AAAA,IACT;AACA,WAAO,YAAY,MAAM,KAAK;AAAA,EAChC;AAAA,EAEQ,cAAc,WAAuC;AAC3D,UAAM,aACJ,KAAK,yBAAyB,UAAU,cAAc;AACxD,UAAM,YAAY,GAAG,UAAU,aAAa,GAAG,IAAI,OAAO,UAAU,CAAC;AAErE,UAAM,SAAS,UAAU,MAAM,IAAI,CAAC,UAAU;AAC5C,UAAI,aAAa;AACjB,UAAI,WAAW;AACf,UAAI,OAAO,KAAK,cAAc,MAAM,MAAM;AAC1C,UAAI,MAAM,WAAW,YAAY;AAE/B,eAAO,eAAe,MAAM,oBAAqB,eAAe,MAAM;AACtE,qBAAa;AACb,mBAAW;AAAA,MACb,WAAW,MAAM,WAAW,SAAS;AACnC,qBAAa;AACb,mBAAW;AAAA,MACb,WAAW,MAAM,WAAW,cAAc,MAAM,WAAW,SAAS;AAClE,qBAAa;AACb,mBAAW;AAAA,MACb,OAAO;AACL,qBAAa;AACb,mBAAW;AAAA,MACb;AAGA,YAAM,cAAc,GAAG,SAAS,IAAI,MAAM,IAAI,KAAK,UAAU,GAAG,IAAI,IAAI,MAAM,MAAM,GAAG,SAAS,IAAI,QAAQ;AAE5G,aAAO,GAAG,UAAU,GAAG,WAAW,GAAG,QAAQ;AAAA,IAC/C,CAAC;AAED,WAAO,GAAG,KAAK,QAAQ,IAAI,MAAM,KAAK,SAAS,IAAI,OAAO,KAAK,GAAG,CAAC;AAAA,EACrE;AAAA;AAAA,EAGQ,uBAAuB;AAC7B,UAAM,iBAAiB,QAAQ,OAAO;AACtC,UAAM,kBAAkB,iBAAiB;AAEzC,UAAM,QAAQ,KAAK,qBAAqB,IAAI,CAAC,cAAc;AACzD,gBAAU,MAAM,QAAQ,CAAC,UAAU;AACjC,YAAI,MAAM,WAAW,YAAY;AAE/B,gBAAM,qBACH,MAAM,oBAAqB,KAAK,eAAe;AAAA,QACpD;AAAA,MACF,CAAC;AACD,aAAO,KAAK,cAAc,SAAS;AAAA,IACrC,CAAC;AAED,QAAI;AAEJ,QAAI,MAAM,SAAS,iBAAiB;AAClC,YAAM,eAAe,MAAM,MAAM,GAAG,kBAAkB,CAAC;AACvD,YAAM,UAAU,GAAG,KAAK,QAAQ,IAAI,MAAM,WAAW,MAAM,SAAS,aAAa,MAAM;AACvF,gBAAU,gBAAgB,aAAa,KAAK,IAAI,IAAI,OAAO;AAAA,IAC7D,OAAO;AACL,gBAAU,MAAM,KAAK,IAAI;AAAA,IAC3B;AAEA,SAAK,aAAa,OAAO;AAAA,EAC3B;AAAA,EAEO,cAAc;AACnB,WAAO,KAAK;AAAA,EACd;AACF;AAEO,MAAM,SAAS,OAAO,YAAY;","names":[]}
1
+ {"version":3,"sources":["../../src/log.ts"],"sourcesContent":["import { type IntlayerConfig, getConfiguration } from '@intlayer/config';\nimport readline from 'readline';\nimport { sortAlphabetically } from './utils/sortAlphabetically';\n\nexport type State = {\n type: 'local' | 'distant';\n status: 'pending' | 'fetching' | 'fetched' | 'error' | 'imported' | 'built';\n icon?: string;\n error?: Error;\n errorMessage?: string;\n spinnerFrameIndex?: number;\n};\n\nexport type DictionariesStatus = {\n dictionaryKey: string;\n state: State[];\n};\n\nconst LINE_DETECTOR = '\\u200B\\u200B\\u200B'; // Three zero-width spaces\nconst SPINNER_FRAMES = ['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏'];\n\n// ANSI color codes\nconst RESET = '\\x1b[0m';\nconst GREEN = '\\x1b[32m';\nconst RED = '\\x1b[31m';\nconst BLUE = '\\x1b[34m';\nconst GREY = '\\x1b[90m';\nconst GREY_DARK = '\\x1b[90m';\n\nclass Logger {\n private dictionariesStatuses: DictionariesStatus[] = [];\n private spinnerTimer: NodeJS.Timeout | null = null;\n private maxDictionaryKeyLength: number = 0;\n private previousLineCount: number = 0;\n private lineDifCounter: number = 0;\n private originalStdoutWrite:\n | ((\n chunk: string | Uint8Array, // `chunk` can be either a string or a Uint8Array\n encoding?: BufferEncoding, // `encoding` is optional and should be a BufferEncoding\n callback?: (err?: Error | null) => void // `callback` is optional and a function\n ) => boolean)\n | null = null;\n private extraLines: number = 0;\n private isUpdating: boolean = false;\n private config?: IntlayerConfig;\n\n // Singleton instance\n private static instance: Logger;\n\n private constructor() {}\n\n public static getInstance(): Logger {\n if (!Logger.instance) {\n Logger.instance = new Logger();\n }\n return Logger.instance;\n }\n\n public init(\n localDictionariesKeys: string[],\n distantDictionariesKeys: string[],\n configuration: IntlayerConfig = getConfiguration()\n ) {\n this.config = configuration;\n\n const allKeys = Array.from(\n new Set(\n [...localDictionariesKeys, ...distantDictionariesKeys].sort(\n sortAlphabetically\n )\n )\n );\n\n this.maxDictionaryKeyLength = allKeys.reduce(\n (max, key) => (key.length > max ? key.length : max),\n 0\n );\n\n this.dictionariesStatuses = allKeys.map((dictionaryKey) => {\n const states: State[] = [];\n\n if (localDictionariesKeys.includes(dictionaryKey)) {\n states.push({\n type: 'local',\n status: 'pending',\n spinnerFrameIndex: 0,\n });\n }\n if (distantDictionariesKeys.includes(dictionaryKey)) {\n states.push({\n type: 'distant',\n status: 'pending',\n spinnerFrameIndex: 0,\n });\n }\n\n return {\n dictionaryKey,\n state: states,\n };\n });\n\n // Start spinner timer\n this.startSpinner();\n\n // Update all status lines (this will output the initial statuses)\n this.updateAllStatusLines();\n }\n\n // New method to add dictionary keys after initialization\n public addDictionaryKeys(\n type: 'local' | 'distant',\n dictionaryKeys: string[]\n ) {\n for (const dictionaryKey of dictionaryKeys) {\n // Update maxDictionaryKeyLength if the new key is longer\n if (dictionaryKey.length > this.maxDictionaryKeyLength) {\n this.maxDictionaryKeyLength = dictionaryKey.length;\n }\n\n let statusObj = this.dictionariesStatuses.find(\n (ds) => ds.dictionaryKey === dictionaryKey\n );\n\n if (!statusObj) {\n // If the dictionaryKey doesn't exist yet, create a new DictionariesStatus\n\n statusObj = {\n dictionaryKey,\n state: [],\n };\n this.dictionariesStatuses.push(statusObj);\n\n const newState: State = {\n type,\n status: 'pending',\n spinnerFrameIndex: 0,\n };\n statusObj.state.push(newState);\n } else {\n const existingState = statusObj.state.find((s) => s.type === type);\n if (!existingState) {\n // Add new state for the type\n\n const newState: State = {\n type,\n status: 'pending',\n spinnerFrameIndex: 0,\n };\n statusObj.state.push(newState);\n }\n }\n }\n\n // Call updateAllStatusLines() to refresh the output\n this.updateAllStatusLines();\n }\n\n private startSpinner() {\n if (!this.spinnerTimer) {\n this.spinnerTimer = setInterval(() => {\n this.updateAllStatusLines();\n }, 100); // Update every 100ms\n }\n }\n\n private stopSpinner() {\n if (this.spinnerTimer) {\n clearInterval(this.spinnerTimer);\n this.spinnerTimer = null;\n }\n }\n\n // Method to update the terminal output\n private updateOutput(content: string) {\n if (this.config?.log.mode !== 'verbose') return;\n\n // Monkey-patch process.stdout.write to keep track of extra lines\n if (!this.originalStdoutWrite) {\n this.originalStdoutWrite = process.stdout.write.bind(process.stdout);\n this.extraLines = 0;\n\n const write = (\n chunk: string | Uint8Array, // `chunk` can be either a string or a Uint8Array\n encoding?: BufferEncoding, // `encoding` is optional and should be a BufferEncoding\n callback?: (err?: Error | null) => void // `callback` is optional and a function\n ) => {\n const str = typeof chunk === 'string' ? chunk : chunk.toString();\n const newLines = (str.match(/\\n/g) ?? []).length;\n\n // If the write is not initiated by Logger's updateOutput method\n if (!this.isUpdating) {\n this.extraLines += newLines;\n }\n\n return this.originalStdoutWrite!(chunk, encoding, callback);\n };\n\n process.stdout.write = write as typeof process.stdout.write;\n }\n\n // Set a flag to indicate that updateOutput is running\n this.isUpdating = true;\n\n // Adjust lineDifCounter if LINE_DETECTOR is not the first line\n const contentLines = content.split('\\n');\n const indexOfLineDetector = contentLines.indexOf(LINE_DETECTOR.trim());\n\n if (indexOfLineDetector > 0) {\n // LINE_DETECTOR is not at the first line\n this.lineDifCounter = indexOfLineDetector;\n } else {\n this.lineDifCounter = 0;\n }\n\n // Calculate total lines to move up\n const totalLinesToMoveUp =\n this.previousLineCount + this.lineDifCounter + this.extraLines;\n\n // Move cursor up by totalLinesToMoveUp\n readline.moveCursor(process.stdout, 0, -totalLinesToMoveUp);\n\n // Clear all lines downwards\n readline.clearScreenDown(process.stdout);\n\n // Write the updated content\n contentLines.forEach((line) => {\n process.stdout.write(`${line}\\x1b[K\\n`);\n });\n\n // Update previousLineCount\n this.previousLineCount = contentLines.length;\n\n // Reset extraLines counter and updating flag\n this.extraLines = 0;\n this.isUpdating = false;\n }\n\n public stop() {\n this.stopSpinner();\n }\n\n public updateStatus(\n dictionaries: {\n dictionaryKey: string;\n type: 'local' | 'distant';\n status: Partial<State>;\n }[]\n ) {\n for (const { dictionaryKey, type, status } of dictionaries) {\n const statusObj = this.dictionariesStatuses.find(\n (ds) => ds.dictionaryKey === dictionaryKey\n );\n if (statusObj) {\n const state = statusObj.state.find((s) => s.type === type);\n if (state) {\n // Update existing state\n Object.assign(state, status);\n } else {\n // If the state for this type doesn't exist yet, add it\n if (status.status === undefined) {\n status.status = 'pending'; // Provide default status\n }\n const newState: State = {\n type,\n status: status.status,\n icon: status.icon ?? '',\n error: status.error,\n errorMessage: status.errorMessage,\n spinnerFrameIndex: status.spinnerFrameIndex ?? 0,\n };\n statusObj.state.push(newState);\n }\n } else {\n // If the status object doesn't exist, create it\n const newState: State = {\n type,\n status: status.status ?? 'pending',\n icon: status.icon ?? '',\n error: status.error,\n errorMessage: status.errorMessage,\n spinnerFrameIndex: status.spinnerFrameIndex ?? 0,\n };\n this.dictionariesStatuses.push({\n dictionaryKey,\n state: [newState],\n });\n }\n }\n\n // Update the display after status change\n this.updateAllStatusLines();\n }\n\n private getStatusIcon(status: string): string {\n const statusIcons: Record<string, string> = {\n pending: '⏲',\n fetching: '', // Spinner handled separately\n built: '✔',\n imported: '✔',\n error: '✖',\n };\n return statusIcons[status] ?? '';\n }\n\n private getStatusLine(statusObj: DictionariesStatus): string {\n const keyPadding =\n this.maxDictionaryKeyLength - statusObj.dictionaryKey.length;\n const paddedKey = `${statusObj.dictionaryKey}${' '.repeat(keyPadding)}`;\n\n const states = statusObj.state.map((state) => {\n let colorStart = '';\n let colorEnd = '';\n let icon = this.getStatusIcon(state.status);\n if (state.status === 'fetching') {\n // Use spinner frame\n icon = SPINNER_FRAMES[state.spinnerFrameIndex! % SPINNER_FRAMES.length];\n colorStart = BLUE;\n colorEnd = RESET;\n } else if (state.status === 'error') {\n colorStart = RED;\n colorEnd = RESET;\n } else if (state.status === 'imported' || state.status === 'built') {\n colorStart = GREEN;\n colorEnd = RESET;\n } else {\n colorStart = GREY;\n colorEnd = RESET;\n }\n\n // Format the status block\n const statusBlock = `${GREY_DARK}[${state.type}: ${colorStart}${icon} ${state.status}${GREY_DARK}]${colorEnd}`;\n\n return `${colorStart}${statusBlock}${colorEnd}`;\n });\n\n return `${this.config?.log.prefix}- ${paddedKey} ${states.join(' ')}`;\n }\n\n // Replace logUpdate calls with your custom methods\n private updateAllStatusLines() {\n const terminalHeight = process.stdout.rows;\n const maxVisibleLines = terminalHeight - 1;\n\n const lines = this.dictionariesStatuses.map((statusObj) => {\n statusObj.state.forEach((state) => {\n if (state.status === 'fetching') {\n // Update spinner frame\n state.spinnerFrameIndex =\n (state.spinnerFrameIndex! + 1) % SPINNER_FRAMES.length;\n }\n });\n return this.getStatusLine(statusObj);\n });\n\n let content;\n\n if (lines.length > maxVisibleLines) {\n const visibleLines = lines.slice(0, maxVisibleLines - 5);\n const summary = `${this.config?.log.prefix}... and ${lines.length - visibleLines.length} more`;\n content = LINE_DETECTOR + visibleLines.join('\\n') + '\\n' + summary;\n } else {\n content = lines.join('\\n');\n }\n\n this.updateOutput(content);\n }\n\n public getStatuses() {\n return this.dictionariesStatuses;\n }\n}\n\nexport const logger = Logger.getInstance();\n"],"mappings":"AAAA,SAA8B,wBAAwB;AACtD,OAAO,cAAc;AACrB,SAAS,0BAA0B;AAgBnC,MAAM,gBAAgB;AACtB,MAAM,iBAAiB,CAAC,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,QAAG;AAGxE,MAAM,QAAQ;AACd,MAAM,QAAQ;AACd,MAAM,MAAM;AACZ,MAAM,OAAO;AACb,MAAM,OAAO;AACb,MAAM,YAAY;AAElB,MAAM,OAAO;AAAA,EACH,uBAA6C,CAAC;AAAA,EAC9C,eAAsC;AAAA,EACtC,yBAAiC;AAAA,EACjC,oBAA4B;AAAA,EAC5B,iBAAyB;AAAA,EACzB,sBAMG;AAAA,EACH,aAAqB;AAAA,EACrB,aAAsB;AAAA,EACtB;AAAA;AAAA,EAGR,OAAe;AAAA,EAEP,cAAc;AAAA,EAAC;AAAA,EAEvB,OAAc,cAAsB;AAClC,QAAI,CAAC,OAAO,UAAU;AACpB,aAAO,WAAW,IAAI,OAAO;AAAA,IAC/B;AACA,WAAO,OAAO;AAAA,EAChB;AAAA,EAEO,KACL,uBACA,yBACA,gBAAgC,iBAAiB,GACjD;AACA,SAAK,SAAS;AAEd,UAAM,UAAU,MAAM;AAAA,MACpB,IAAI;AAAA,QACF,CAAC,GAAG,uBAAuB,GAAG,uBAAuB,EAAE;AAAA,UACrD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,SAAK,yBAAyB,QAAQ;AAAA,MACpC,CAAC,KAAK,QAAS,IAAI,SAAS,MAAM,IAAI,SAAS;AAAA,MAC/C;AAAA,IACF;AAEA,SAAK,uBAAuB,QAAQ,IAAI,CAAC,kBAAkB;AACzD,YAAM,SAAkB,CAAC;AAEzB,UAAI,sBAAsB,SAAS,aAAa,GAAG;AACjD,eAAO,KAAK;AAAA,UACV,MAAM;AAAA,UACN,QAAQ;AAAA,UACR,mBAAmB;AAAA,QACrB,CAAC;AAAA,MACH;AACA,UAAI,wBAAwB,SAAS,aAAa,GAAG;AACnD,eAAO,KAAK;AAAA,UACV,MAAM;AAAA,UACN,QAAQ;AAAA,UACR,mBAAmB;AAAA,QACrB,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,QACL;AAAA,QACA,OAAO;AAAA,MACT;AAAA,IACF,CAAC;AAGD,SAAK,aAAa;AAGlB,SAAK,qBAAqB;AAAA,EAC5B;AAAA;AAAA,EAGO,kBACL,MACA,gBACA;AACA,eAAW,iBAAiB,gBAAgB;AAE1C,UAAI,cAAc,SAAS,KAAK,wBAAwB;AACtD,aAAK,yBAAyB,cAAc;AAAA,MAC9C;AAEA,UAAI,YAAY,KAAK,qBAAqB;AAAA,QACxC,CAAC,OAAO,GAAG,kBAAkB;AAAA,MAC/B;AAEA,UAAI,CAAC,WAAW;AAGd,oBAAY;AAAA,UACV;AAAA,UACA,OAAO,CAAC;AAAA,QACV;AACA,aAAK,qBAAqB,KAAK,SAAS;AAExC,cAAM,WAAkB;AAAA,UACtB;AAAA,UACA,QAAQ;AAAA,UACR,mBAAmB;AAAA,QACrB;AACA,kBAAU,MAAM,KAAK,QAAQ;AAAA,MAC/B,OAAO;AACL,cAAM,gBAAgB,UAAU,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,IAAI;AACjE,YAAI,CAAC,eAAe;AAGlB,gBAAM,WAAkB;AAAA,YACtB;AAAA,YACA,QAAQ;AAAA,YACR,mBAAmB;AAAA,UACrB;AACA,oBAAU,MAAM,KAAK,QAAQ;AAAA,QAC/B;AAAA,MACF;AAAA,IACF;AAGA,SAAK,qBAAqB;AAAA,EAC5B;AAAA,EAEQ,eAAe;AACrB,QAAI,CAAC,KAAK,cAAc;AACtB,WAAK,eAAe,YAAY,MAAM;AACpC,aAAK,qBAAqB;AAAA,MAC5B,GAAG,GAAG;AAAA,IACR;AAAA,EACF;AAAA,EAEQ,cAAc;AACpB,QAAI,KAAK,cAAc;AACrB,oBAAc,KAAK,YAAY;AAC/B,WAAK,eAAe;AAAA,IACtB;AAAA,EACF;AAAA;AAAA,EAGQ,aAAa,SAAiB;AACpC,QAAI,KAAK,QAAQ,IAAI,SAAS,UAAW;AAGzC,QAAI,CAAC,KAAK,qBAAqB;AAC7B,WAAK,sBAAsB,QAAQ,OAAO,MAAM,KAAK,QAAQ,MAAM;AACnE,WAAK,aAAa;AAElB,YAAM,QAAQ,CACZ,OACA,UACA,aACG;AACH,cAAM,MAAM,OAAO,UAAU,WAAW,QAAQ,MAAM,SAAS;AAC/D,cAAM,YAAY,IAAI,MAAM,KAAK,KAAK,CAAC,GAAG;AAG1C,YAAI,CAAC,KAAK,YAAY;AACpB,eAAK,cAAc;AAAA,QACrB;AAEA,eAAO,KAAK,oBAAqB,OAAO,UAAU,QAAQ;AAAA,MAC5D;AAEA,cAAQ,OAAO,QAAQ;AAAA,IACzB;AAGA,SAAK,aAAa;AAGlB,UAAM,eAAe,QAAQ,MAAM,IAAI;AACvC,UAAM,sBAAsB,aAAa,QAAQ,cAAc,KAAK,CAAC;AAErE,QAAI,sBAAsB,GAAG;AAE3B,WAAK,iBAAiB;AAAA,IACxB,OAAO;AACL,WAAK,iBAAiB;AAAA,IACxB;AAGA,UAAM,qBACJ,KAAK,oBAAoB,KAAK,iBAAiB,KAAK;AAGtD,aAAS,WAAW,QAAQ,QAAQ,GAAG,CAAC,kBAAkB;AAG1D,aAAS,gBAAgB,QAAQ,MAAM;AAGvC,iBAAa,QAAQ,CAAC,SAAS;AAC7B,cAAQ,OAAO,MAAM,GAAG,IAAI;AAAA,CAAU;AAAA,IACxC,CAAC;AAGD,SAAK,oBAAoB,aAAa;AAGtC,SAAK,aAAa;AAClB,SAAK,aAAa;AAAA,EACpB;AAAA,EAEO,OAAO;AACZ,SAAK,YAAY;AAAA,EACnB;AAAA,EAEO,aACL,cAKA;AACA,eAAW,EAAE,eAAe,MAAM,OAAO,KAAK,cAAc;AAC1D,YAAM,YAAY,KAAK,qBAAqB;AAAA,QAC1C,CAAC,OAAO,GAAG,kBAAkB;AAAA,MAC/B;AACA,UAAI,WAAW;AACb,cAAM,QAAQ,UAAU,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,IAAI;AACzD,YAAI,OAAO;AAET,iBAAO,OAAO,OAAO,MAAM;AAAA,QAC7B,OAAO;AAEL,cAAI,OAAO,WAAW,QAAW;AAC/B,mBAAO,SAAS;AAAA,UAClB;AACA,gBAAM,WAAkB;AAAA,YACtB;AAAA,YACA,QAAQ,OAAO;AAAA,YACf,MAAM,OAAO,QAAQ;AAAA,YACrB,OAAO,OAAO;AAAA,YACd,cAAc,OAAO;AAAA,YACrB,mBAAmB,OAAO,qBAAqB;AAAA,UACjD;AACA,oBAAU,MAAM,KAAK,QAAQ;AAAA,QAC/B;AAAA,MACF,OAAO;AAEL,cAAM,WAAkB;AAAA,UACtB;AAAA,UACA,QAAQ,OAAO,UAAU;AAAA,UACzB,MAAM,OAAO,QAAQ;AAAA,UACrB,OAAO,OAAO;AAAA,UACd,cAAc,OAAO;AAAA,UACrB,mBAAmB,OAAO,qBAAqB;AAAA,QACjD;AACA,aAAK,qBAAqB,KAAK;AAAA,UAC7B;AAAA,UACA,OAAO,CAAC,QAAQ;AAAA,QAClB,CAAC;AAAA,MACH;AAAA,IACF;AAGA,SAAK,qBAAqB;AAAA,EAC5B;AAAA,EAEQ,cAAc,QAAwB;AAC5C,UAAM,cAAsC;AAAA,MAC1C,SAAS;AAAA,MACT,UAAU;AAAA;AAAA,MACV,OAAO;AAAA,MACP,UAAU;AAAA,MACV,OAAO;AAAA,IACT;AACA,WAAO,YAAY,MAAM,KAAK;AAAA,EAChC;AAAA,EAEQ,cAAc,WAAuC;AAC3D,UAAM,aACJ,KAAK,yBAAyB,UAAU,cAAc;AACxD,UAAM,YAAY,GAAG,UAAU,aAAa,GAAG,IAAI,OAAO,UAAU,CAAC;AAErE,UAAM,SAAS,UAAU,MAAM,IAAI,CAAC,UAAU;AAC5C,UAAI,aAAa;AACjB,UAAI,WAAW;AACf,UAAI,OAAO,KAAK,cAAc,MAAM,MAAM;AAC1C,UAAI,MAAM,WAAW,YAAY;AAE/B,eAAO,eAAe,MAAM,oBAAqB,eAAe,MAAM;AACtE,qBAAa;AACb,mBAAW;AAAA,MACb,WAAW,MAAM,WAAW,SAAS;AACnC,qBAAa;AACb,mBAAW;AAAA,MACb,WAAW,MAAM,WAAW,cAAc,MAAM,WAAW,SAAS;AAClE,qBAAa;AACb,mBAAW;AAAA,MACb,OAAO;AACL,qBAAa;AACb,mBAAW;AAAA,MACb;AAGA,YAAM,cAAc,GAAG,SAAS,IAAI,MAAM,IAAI,KAAK,UAAU,GAAG,IAAI,IAAI,MAAM,MAAM,GAAG,SAAS,IAAI,QAAQ;AAE5G,aAAO,GAAG,UAAU,GAAG,WAAW,GAAG,QAAQ;AAAA,IAC/C,CAAC;AAED,WAAO,GAAG,KAAK,QAAQ,IAAI,MAAM,KAAK,SAAS,IAAI,OAAO,KAAK,GAAG,CAAC;AAAA,EACrE;AAAA;AAAA,EAGQ,uBAAuB;AAC7B,UAAM,iBAAiB,QAAQ,OAAO;AACtC,UAAM,kBAAkB,iBAAiB;AAEzC,UAAM,QAAQ,KAAK,qBAAqB,IAAI,CAAC,cAAc;AACzD,gBAAU,MAAM,QAAQ,CAAC,UAAU;AACjC,YAAI,MAAM,WAAW,YAAY;AAE/B,gBAAM,qBACH,MAAM,oBAAqB,KAAK,eAAe;AAAA,QACpD;AAAA,MACF,CAAC;AACD,aAAO,KAAK,cAAc,SAAS;AAAA,IACrC,CAAC;AAED,QAAI;AAEJ,QAAI,MAAM,SAAS,iBAAiB;AAClC,YAAM,eAAe,MAAM,MAAM,GAAG,kBAAkB,CAAC;AACvD,YAAM,UAAU,GAAG,KAAK,QAAQ,IAAI,MAAM,WAAW,MAAM,SAAS,aAAa,MAAM;AACvF,gBAAU,gBAAgB,aAAa,KAAK,IAAI,IAAI,OAAO;AAAA,IAC7D,OAAO;AACL,gBAAU,MAAM,KAAK,IAAI;AAAA,IAC3B;AAEA,SAAK,aAAa,OAAO;AAAA,EAC3B;AAAA,EAEO,cAAc;AACnB,WAAO,KAAK;AAAA,EACd;AACF;AAEO,MAAM,SAAS,OAAO,YAAY;","names":[]}
@@ -1,8 +1,8 @@
1
+ import { getConfiguration } from "@intlayer/config";
1
2
  import { createI18nextDictionaries } from "./convertContentDeclarationInto18nDictionaries.mjs";
2
3
  import {
3
4
  writeDictionary
4
5
  } from "./writeDictionary.mjs";
5
- import { getConfiguration } from "@intlayer/config";
6
6
  const buildI18nDictionary = async (contentDeclarations, configuration = getConfiguration()) => {
7
7
  const dictionariesDeclaration = contentDeclarations.reduce((acc, dictionary) => {
8
8
  const { key, content } = dictionary;
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/i18next_dictionary/buildI18nextDictionary.ts"],"sourcesContent":["import type { Dictionary } from '@intlayer/core';\nimport { createI18nextDictionaries } from './convertContentDeclarationInto18nDictionaries';\nimport {\n writeDictionary,\n type DictionariesDeclaration,\n} from './writeDictionary';\nimport { getConfiguration } from '@intlayer/config';\n\n/**\n * This function transpile content declaration to i18n dictionaries\n */\nexport const buildI18nDictionary = async (\n contentDeclarations: Dictionary[],\n configuration = getConfiguration()\n) => {\n // Create dictionaries for each nested content and format them\n const dictionariesDeclaration: DictionariesDeclaration =\n contentDeclarations.reduce((acc, dictionary) => {\n const { key, content } = dictionary;\n const i18Content = createI18nextDictionaries(content, configuration);\n\n return {\n ...acc,\n [key]: i18Content,\n };\n }, {});\n\n // Write the dictionaries to the file system\n const dictionariesPaths: string[] = await writeDictionary(\n dictionariesDeclaration,\n configuration\n );\n\n return dictionariesPaths;\n};\n"],"mappings":"AACA,SAAS,iCAAiC;AAC1C;AAAA,EACE;AAAA,OAEK;AACP,SAAS,wBAAwB;AAK1B,MAAM,sBAAsB,OACjC,qBACA,gBAAgB,iBAAiB,MAC9B;AAEH,QAAM,0BACJ,oBAAoB,OAAO,CAAC,KAAK,eAAe;AAC9C,UAAM,EAAE,KAAK,QAAQ,IAAI;AACzB,UAAM,aAAa,0BAA0B,SAAS,aAAa;AAEnE,WAAO;AAAA,MACL,GAAG;AAAA,MACH,CAAC,GAAG,GAAG;AAAA,IACT;AAAA,EACF,GAAG,CAAC,CAAC;AAGP,QAAM,oBAA8B,MAAM;AAAA,IACxC;AAAA,IACA;AAAA,EACF;AAEA,SAAO;AACT;","names":[]}
1
+ {"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/i18next_dictionary/buildI18nextDictionary.ts"],"sourcesContent":["import { getConfiguration } from '@intlayer/config';\nimport type { Dictionary } from '@intlayer/core';\nimport { createI18nextDictionaries } from './convertContentDeclarationInto18nDictionaries';\nimport {\n writeDictionary,\n type DictionariesDeclaration,\n} from './writeDictionary';\n\n/**\n * This function transpile content declaration to i18n dictionaries\n */\nexport const buildI18nDictionary = async (\n contentDeclarations: Dictionary[],\n configuration = getConfiguration()\n) => {\n // Create dictionaries for each nested content and format them\n const dictionariesDeclaration: DictionariesDeclaration =\n contentDeclarations.reduce((acc, dictionary) => {\n const { key, content } = dictionary;\n const i18Content = createI18nextDictionaries(content, configuration);\n\n return {\n ...acc,\n [key]: i18Content,\n };\n }, {});\n\n // Write the dictionaries to the file system\n const dictionariesPaths: string[] = await writeDictionary(\n dictionariesDeclaration,\n configuration\n );\n\n return dictionariesPaths;\n};\n"],"mappings":"AAAA,SAAS,wBAAwB;AAEjC,SAAS,iCAAiC;AAC1C;AAAA,EACE;AAAA,OAEK;AAKA,MAAM,sBAAsB,OACjC,qBACA,gBAAgB,iBAAiB,MAC9B;AAEH,QAAM,0BACJ,oBAAoB,OAAO,CAAC,KAAK,eAAe;AAC9C,UAAM,EAAE,KAAK,QAAQ,IAAI;AACzB,UAAM,aAAa,0BAA0B,SAAS,aAAa;AAEnE,WAAO;AAAA,MACL,GAAG;AAAA,MACH,CAAC,GAAG,GAAG;AAAA,IACT;AAAA,EACF,GAAG,CAAC,CAAC;AAGP,QAAM,oBAA8B,MAAM;AAAA,IACxC;AAAA,IACA;AAAA,EACF;AAEA,SAAO;AACT;","names":[]}
@@ -1,6 +1,6 @@
1
1
  import { getConfiguration, normalizePath } from "@intlayer/config";
2
2
  import { basename, extname, relative } from "path";
3
- import { getFileHash } from "../../utils.mjs";
3
+ import { getFileHash } from "../../utils/getFileHash.mjs";
4
4
  const generateDictionaryListContent = (dictionaries, format = "esm", configuration = getConfiguration()) => {
5
5
  const { mainDir } = configuration.content;
6
6
  let content = "";
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/transpiler/dictionary_to_main/generateDictionaryListContent.ts"],"sourcesContent":["import { getConfiguration, normalizePath } from '@intlayer/config';\nimport { basename, extname, relative } from 'path';\nimport { getFileHash } from '../../utils';\n\n/**\n * This function generates the content of the dictionary list file\n */\nexport const generateDictionaryListContent = (\n dictionaries: string[],\n format: 'cjs' | 'esm' = 'esm',\n configuration = getConfiguration()\n): string => {\n const { mainDir } = configuration.content;\n\n let content = '';\n\n const dictionariesRef = dictionaries.map((dictionaryPath) => ({\n relativePath: normalizePath(relative(mainDir, dictionaryPath)),\n id: basename(dictionaryPath, extname(dictionaryPath)), // Get the base name as the dictionary id\n hash: `_${getFileHash(dictionaryPath)}`, // Get the hash of the dictionary to avoid conflicts\n }));\n\n // Import all dictionaries\n dictionariesRef.forEach((dictionary) => {\n if (format === 'esm')\n content += `import ${dictionary.hash} from '${dictionary.relativePath}' with { type: 'json' };\\n`;\n if (format === 'cjs')\n content += `const ${dictionary.hash} = require('${dictionary.relativePath}');\\n`;\n });\n\n content += '\\n';\n\n // Format Dictionary Map\n const formattedDictionaryMap: string = dictionariesRef\n .map((dictionary) => ` \"${dictionary.id}\": ${dictionary.hash}`)\n .join(',\\n');\n\n if (format === 'esm')\n content += `export default {\\n${formattedDictionaryMap}\\n};\\n`;\n if (format === 'cjs')\n content += `module.exports = {\\n${formattedDictionaryMap}\\n};\\n`;\n\n return content;\n};\n"],"mappings":"AAAA,SAAS,kBAAkB,qBAAqB;AAChD,SAAS,UAAU,SAAS,gBAAgB;AAC5C,SAAS,mBAAmB;AAKrB,MAAM,gCAAgC,CAC3C,cACA,SAAwB,OACxB,gBAAgB,iBAAiB,MACtB;AACX,QAAM,EAAE,QAAQ,IAAI,cAAc;AAElC,MAAI,UAAU;AAEd,QAAM,kBAAkB,aAAa,IAAI,CAAC,oBAAoB;AAAA,IAC5D,cAAc,cAAc,SAAS,SAAS,cAAc,CAAC;AAAA,IAC7D,IAAI,SAAS,gBAAgB,QAAQ,cAAc,CAAC;AAAA;AAAA,IACpD,MAAM,IAAI,YAAY,cAAc,CAAC;AAAA;AAAA,EACvC,EAAE;AAGF,kBAAgB,QAAQ,CAAC,eAAe;AACtC,QAAI,WAAW;AACb,iBAAW,UAAU,WAAW,IAAI,UAAU,WAAW,YAAY;AAAA;AACvE,QAAI,WAAW;AACb,iBAAW,SAAS,WAAW,IAAI,eAAe,WAAW,YAAY;AAAA;AAAA,EAC7E,CAAC;AAED,aAAW;AAGX,QAAM,yBAAiC,gBACpC,IAAI,CAAC,eAAe,MAAM,WAAW,EAAE,MAAM,WAAW,IAAI,EAAE,EAC9D,KAAK,KAAK;AAEb,MAAI,WAAW;AACb,eAAW;AAAA,EAAqB,sBAAsB;AAAA;AAAA;AACxD,MAAI,WAAW;AACb,eAAW;AAAA,EAAuB,sBAAsB;AAAA;AAAA;AAE1D,SAAO;AACT;","names":[]}
1
+ {"version":3,"sources":["../../../../src/transpiler/dictionary_to_main/generateDictionaryListContent.ts"],"sourcesContent":["import { getConfiguration, normalizePath } from '@intlayer/config';\nimport { basename, extname, relative } from 'path';\nimport { getFileHash } from '../../utils/getFileHash';\n\n/**\n * This function generates the content of the dictionary list file\n */\nexport const generateDictionaryListContent = (\n dictionaries: string[],\n format: 'cjs' | 'esm' = 'esm',\n configuration = getConfiguration()\n): string => {\n const { mainDir } = configuration.content;\n\n let content = '';\n\n const dictionariesRef = dictionaries.map((dictionaryPath) => ({\n relativePath: normalizePath(relative(mainDir, dictionaryPath)),\n id: basename(dictionaryPath, extname(dictionaryPath)), // Get the base name as the dictionary id\n hash: `_${getFileHash(dictionaryPath)}`, // Get the hash of the dictionary to avoid conflicts\n }));\n\n // Import all dictionaries\n dictionariesRef.forEach((dictionary) => {\n if (format === 'esm')\n content += `import ${dictionary.hash} from '${dictionary.relativePath}' with { type: 'json' };\\n`;\n if (format === 'cjs')\n content += `const ${dictionary.hash} = require('${dictionary.relativePath}');\\n`;\n });\n\n content += '\\n';\n\n // Format Dictionary Map\n const formattedDictionaryMap: string = dictionariesRef\n .map((dictionary) => ` \"${dictionary.id}\": ${dictionary.hash}`)\n .join(',\\n');\n\n if (format === 'esm')\n content += `export default {\\n${formattedDictionaryMap}\\n};\\n`;\n if (format === 'cjs')\n content += `module.exports = {\\n${formattedDictionaryMap}\\n};\\n`;\n\n return content;\n};\n"],"mappings":"AAAA,SAAS,kBAAkB,qBAAqB;AAChD,SAAS,UAAU,SAAS,gBAAgB;AAC5C,SAAS,mBAAmB;AAKrB,MAAM,gCAAgC,CAC3C,cACA,SAAwB,OACxB,gBAAgB,iBAAiB,MACtB;AACX,QAAM,EAAE,QAAQ,IAAI,cAAc;AAElC,MAAI,UAAU;AAEd,QAAM,kBAAkB,aAAa,IAAI,CAAC,oBAAoB;AAAA,IAC5D,cAAc,cAAc,SAAS,SAAS,cAAc,CAAC;AAAA,IAC7D,IAAI,SAAS,gBAAgB,QAAQ,cAAc,CAAC;AAAA;AAAA,IACpD,MAAM,IAAI,YAAY,cAAc,CAAC;AAAA;AAAA,EACvC,EAAE;AAGF,kBAAgB,QAAQ,CAAC,eAAe;AACtC,QAAI,WAAW;AACb,iBAAW,UAAU,WAAW,IAAI,UAAU,WAAW,YAAY;AAAA;AACvE,QAAI,WAAW;AACb,iBAAW,SAAS,WAAW,IAAI,eAAe,WAAW,YAAY;AAAA;AAAA,EAC7E,CAAC;AAED,aAAW;AAGX,QAAM,yBAAiC,gBACpC,IAAI,CAAC,eAAe,MAAM,WAAW,EAAE,MAAM,WAAW,IAAI,EAAE,EAC9D,KAAK,KAAK;AAEb,MAAI,WAAW;AACb,eAAW;AAAA,EAAqB,sBAAsB;AAAA;AAAA;AACxD,MAAI,WAAW;AACb,eAAW;AAAA,EAAuB,sBAAsB;AAAA;AAAA;AAE1D,SAAO;AACT;","names":[]}
@@ -2,7 +2,8 @@ import { Locales, getConfiguration, normalizePath } from "@intlayer/config";
2
2
  import fg from "fast-glob";
3
3
  import { existsSync, mkdirSync, writeFileSync } from "fs";
4
4
  import { basename, extname, join, relative } from "path";
5
- import { getFileHash, kebabCaseToCamelCase } from "../../utils.mjs";
5
+ import { getFileHash } from "../../utils/getFileHash.mjs";
6
+ import { kebabCaseToCamelCase } from "../../utils/kebabCaseToCamelCase.mjs";
6
7
  const getTypeName = (key) => `${kebabCaseToCamelCase(key)}Content`;
7
8
  const formatLocales = (locales) => locales.map((locale) => {
8
9
  for (const key in Locales) {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/transpiler/dictionary_to_type/createModuleAugmentation.ts"],"sourcesContent":["import { Locales, getConfiguration, normalizePath } from '@intlayer/config';\nimport fg from 'fast-glob';\nimport { existsSync, mkdirSync, writeFileSync } from 'fs';\nimport { basename, extname, join, relative } from 'path';\nimport { getFileHash, kebabCaseToCamelCase } from '../../utils';\n\nexport const getTypeName = (key: string): string =>\n `${kebabCaseToCamelCase(key)}Content`;\n\nconst formatLocales = (locales: Locales[]): string =>\n locales\n .map((locale) => {\n for (const key in Locales) {\n if (Locales[key as keyof typeof Locales] === locale) {\n return `Locales.${key}`;\n }\n }\n })\n .join(' | ');\n\n/**\n * This function generates the content of the module augmentation file\n */\nconst generateTypeIndexContent = (\n typeFiles: string[],\n configuration = getConfiguration()\n): string => {\n const { content, internationalization } = configuration;\n const { moduleAugmentationDir } = content;\n const { locales, requiredLocales, strictMode } = internationalization;\n\n let fileContent =\n \"/* eslint-disable */\\nimport { Locales } from 'intlayer';\\n\";\n\n const dictionariesRef = typeFiles.map((dictionaryPath) => ({\n relativePath: `./${relative(moduleAugmentationDir, dictionaryPath)}`,\n id: basename(dictionaryPath, extname(dictionaryPath)), // Get the base name as the dictionary id (without the extension)\n hash: `_${getFileHash(dictionaryPath)}`, // Get the hash of the dictionary to avoid conflicts\n }));\n\n // Import all dictionaries\n dictionariesRef.forEach((dictionary) => {\n fileContent += `import ${dictionary.hash} from '${dictionary.relativePath}';\\n`;\n });\n\n fileContent += '\\n';\n\n // Format Dictionary Map\n const formattedDictionaryMap: string = dictionariesRef\n .map((dictionary) => ` \"${dictionary.id}\": typeof ${dictionary.hash};`)\n .join('\\n');\n\n const requiredLocalesValues =\n requiredLocales.length > 0\n ? requiredLocales.filter((locale) =>\n locales.map((locale) => String(locale)).includes(String(locale))\n )\n : locales;\n\n const formattedLocales = formatLocales(locales);\n const formattedRequiredLocales = formatLocales(requiredLocalesValues);\n\n const strictModeRecord =\n strictMode === 'strict'\n ? `interface IConfigLocales<Content> extends Record<DeclaredLocales, Content> {}`\n : strictMode === 'inclusive'\n ? `interface IConfigLocales<Content> extends Record<ExtractedLocales, Content>, Partial<Record<ExcludedLocales, Content>> {}`\n : `interface IConfigLocales<Content> extends Partial<Record<Locales, Content>> {}`;\n\n /**\n * Write the module augmentation to extend the intlayer module with the dictionaries types\n * Will suggest the type resulting of the dictionaries\n *\n * declare module 'intlayer' {\n * interface IntlayerDictionaryTypesConnector = {\n * dictionaries: {\n * id: DictionaryType;\n * }\n * }\n *\n * type ConfigLocales = Locales.ENGLISH | Locales.FRENCH | Locales.SPANISH;\n * type ExtractedLocales = Extract<Locales, ConfigLocales>;\n *\n * interface IConfigLocales<Content> extends Record<ExtractedLocales, Content>, Partial<Record<ExcludedLocales, Content>> {}\n *\n *\n * }\n * See https://www.typescriptlang.org/docs/handbook/declaration-merging.html#module-augmentation\n */\n fileContent += `declare module 'intlayer' {\\n`;\n fileContent += ` interface IntlayerDictionaryTypesConnector {\\n${formattedDictionaryMap}\\n }\\n\\n`;\n fileContent += ` type DeclaredLocales = ${formattedLocales};\\n`;\n fileContent += ` type RequiredLocales = ${formattedRequiredLocales};\\n`;\n fileContent += ` type ExtractedLocales = Extract<Locales, RequiredLocales>;\\n`;\n fileContent += ` type ExcludedLocales = Exclude<Locales, RequiredLocales>;\\n`;\n fileContent += ` ${strictModeRecord}\\n`;\n fileContent += `}`;\n\n return fileContent;\n};\n\n/**\n * This function generates a index file merging all the types\n */\nexport const createModuleAugmentation = (\n configuration = getConfiguration()\n) => {\n const { moduleAugmentationDir, typesDir } = configuration.content;\n\n // Create main directory if it doesn't exist\n if (!existsSync(moduleAugmentationDir)) {\n mkdirSync(moduleAugmentationDir, { recursive: true });\n }\n\n const dictionariesTypesDefinitions: string[] = fg.sync(\n normalizePath(`${typesDir}/*.ts`),\n {\n ignore: ['**/*.d.ts'],\n }\n );\n // Create the dictionary list file\n\n const tsContent = generateTypeIndexContent(dictionariesTypesDefinitions);\n writeFileSync(join(moduleAugmentationDir, 'intlayer.d.ts'), tsContent);\n};\n"],"mappings":"AAAA,SAAS,SAAS,kBAAkB,qBAAqB;AACzD,OAAO,QAAQ;AACf,SAAS,YAAY,WAAW,qBAAqB;AACrD,SAAS,UAAU,SAAS,MAAM,gBAAgB;AAClD,SAAS,aAAa,4BAA4B;AAE3C,MAAM,cAAc,CAAC,QAC1B,GAAG,qBAAqB,GAAG,CAAC;AAE9B,MAAM,gBAAgB,CAAC,YACrB,QACG,IAAI,CAAC,WAAW;AACf,aAAW,OAAO,SAAS;AACzB,QAAI,QAAQ,GAA2B,MAAM,QAAQ;AACnD,aAAO,WAAW,GAAG;AAAA,IACvB;AAAA,EACF;AACF,CAAC,EACA,KAAK,KAAK;AAKf,MAAM,2BAA2B,CAC/B,WACA,gBAAgB,iBAAiB,MACtB;AACX,QAAM,EAAE,SAAS,qBAAqB,IAAI;AAC1C,QAAM,EAAE,sBAAsB,IAAI;AAClC,QAAM,EAAE,SAAS,iBAAiB,WAAW,IAAI;AAEjD,MAAI,cACF;AAEF,QAAM,kBAAkB,UAAU,IAAI,CAAC,oBAAoB;AAAA,IACzD,cAAc,KAAK,SAAS,uBAAuB,cAAc,CAAC;AAAA,IAClE,IAAI,SAAS,gBAAgB,QAAQ,cAAc,CAAC;AAAA;AAAA,IACpD,MAAM,IAAI,YAAY,cAAc,CAAC;AAAA;AAAA,EACvC,EAAE;AAGF,kBAAgB,QAAQ,CAAC,eAAe;AACtC,mBAAe,UAAU,WAAW,IAAI,UAAU,WAAW,YAAY;AAAA;AAAA,EAC3E,CAAC;AAED,iBAAe;AAGf,QAAM,yBAAiC,gBACpC,IAAI,CAAC,eAAe,QAAQ,WAAW,EAAE,aAAa,WAAW,IAAI,GAAG,EACxE,KAAK,IAAI;AAEZ,QAAM,wBACJ,gBAAgB,SAAS,IACrB,gBAAgB;AAAA,IAAO,CAAC,WACtB,QAAQ,IAAI,CAACA,YAAW,OAAOA,OAAM,CAAC,EAAE,SAAS,OAAO,MAAM,CAAC;AAAA,EACjE,IACA;AAEN,QAAM,mBAAmB,cAAc,OAAO;AAC9C,QAAM,2BAA2B,cAAc,qBAAqB;AAEpE,QAAM,mBACJ,eAAe,WACX,kFACA,eAAe,cACb,8HACA;AAsBR,iBAAe;AAAA;AACf,iBAAe;AAAA,EAAmD,sBAAsB;AAAA;AAAA;AAAA;AACxF,iBAAe,4BAA4B,gBAAgB;AAAA;AAC3D,iBAAe,4BAA4B,wBAAwB;AAAA;AACnE,iBAAe;AAAA;AACf,iBAAe;AAAA;AACf,iBAAe,KAAK,gBAAgB;AAAA;AACpC,iBAAe;AAEf,SAAO;AACT;AAKO,MAAM,2BAA2B,CACtC,gBAAgB,iBAAiB,MAC9B;AACH,QAAM,EAAE,uBAAuB,SAAS,IAAI,cAAc;AAG1D,MAAI,CAAC,WAAW,qBAAqB,GAAG;AACtC,cAAU,uBAAuB,EAAE,WAAW,KAAK,CAAC;AAAA,EACtD;AAEA,QAAM,+BAAyC,GAAG;AAAA,IAChD,cAAc,GAAG,QAAQ,OAAO;AAAA,IAChC;AAAA,MACE,QAAQ,CAAC,WAAW;AAAA,IACtB;AAAA,EACF;AAGA,QAAM,YAAY,yBAAyB,4BAA4B;AACvE,gBAAc,KAAK,uBAAuB,eAAe,GAAG,SAAS;AACvE;","names":["locale"]}
1
+ {"version":3,"sources":["../../../../src/transpiler/dictionary_to_type/createModuleAugmentation.ts"],"sourcesContent":["import { Locales, getConfiguration, normalizePath } from '@intlayer/config';\nimport fg from 'fast-glob';\nimport { existsSync, mkdirSync, writeFileSync } from 'fs';\nimport { basename, extname, join, relative } from 'path';\nimport { getFileHash } from '../../utils/getFileHash';\nimport { kebabCaseToCamelCase } from '../../utils/kebabCaseToCamelCase';\n\nexport const getTypeName = (key: string): string =>\n `${kebabCaseToCamelCase(key)}Content`;\n\nconst formatLocales = (locales: Locales[]): string =>\n locales\n .map((locale) => {\n for (const key in Locales) {\n if (Locales[key as keyof typeof Locales] === locale) {\n return `Locales.${key}`;\n }\n }\n })\n .join(' | ');\n\n/**\n * This function generates the content of the module augmentation file\n */\nconst generateTypeIndexContent = (\n typeFiles: string[],\n configuration = getConfiguration()\n): string => {\n const { content, internationalization } = configuration;\n const { moduleAugmentationDir } = content;\n const { locales, requiredLocales, strictMode } = internationalization;\n\n let fileContent =\n \"/* eslint-disable */\\nimport { Locales } from 'intlayer';\\n\";\n\n const dictionariesRef = typeFiles.map((dictionaryPath) => ({\n relativePath: `./${relative(moduleAugmentationDir, dictionaryPath)}`,\n id: basename(dictionaryPath, extname(dictionaryPath)), // Get the base name as the dictionary id (without the extension)\n hash: `_${getFileHash(dictionaryPath)}`, // Get the hash of the dictionary to avoid conflicts\n }));\n\n // Import all dictionaries\n dictionariesRef.forEach((dictionary) => {\n fileContent += `import ${dictionary.hash} from '${dictionary.relativePath}';\\n`;\n });\n\n fileContent += '\\n';\n\n // Format Dictionary Map\n const formattedDictionaryMap: string = dictionariesRef\n .map((dictionary) => ` \"${dictionary.id}\": typeof ${dictionary.hash};`)\n .join('\\n');\n\n const requiredLocalesValues =\n requiredLocales.length > 0\n ? requiredLocales.filter((locale) =>\n locales.map((locale) => String(locale)).includes(String(locale))\n )\n : locales;\n\n const formattedLocales = formatLocales(locales);\n const formattedRequiredLocales = formatLocales(requiredLocalesValues);\n\n const strictModeRecord =\n strictMode === 'strict'\n ? `interface IConfigLocales<Content> extends Record<DeclaredLocales, Content> {}`\n : strictMode === 'inclusive'\n ? `interface IConfigLocales<Content> extends Record<ExtractedLocales, Content>, Partial<Record<ExcludedLocales, Content>> {}`\n : `interface IConfigLocales<Content> extends Partial<Record<Locales, Content>> {}`;\n\n /**\n * Write the module augmentation to extend the intlayer module with the dictionaries types\n * Will suggest the type resulting of the dictionaries\n *\n * declare module 'intlayer' {\n * interface IntlayerDictionaryTypesConnector = {\n * dictionaries: {\n * id: DictionaryType;\n * }\n * }\n *\n * type ConfigLocales = Locales.ENGLISH | Locales.FRENCH | Locales.SPANISH;\n * type ExtractedLocales = Extract<Locales, ConfigLocales>;\n *\n * interface IConfigLocales<Content> extends Record<ExtractedLocales, Content>, Partial<Record<ExcludedLocales, Content>> {}\n *\n *\n * }\n * See https://www.typescriptlang.org/docs/handbook/declaration-merging.html#module-augmentation\n */\n fileContent += `declare module 'intlayer' {\\n`;\n fileContent += ` interface IntlayerDictionaryTypesConnector {\\n${formattedDictionaryMap}\\n }\\n\\n`;\n fileContent += ` type DeclaredLocales = ${formattedLocales};\\n`;\n fileContent += ` type RequiredLocales = ${formattedRequiredLocales};\\n`;\n fileContent += ` type ExtractedLocales = Extract<Locales, RequiredLocales>;\\n`;\n fileContent += ` type ExcludedLocales = Exclude<Locales, RequiredLocales>;\\n`;\n fileContent += ` ${strictModeRecord}\\n`;\n fileContent += `}`;\n\n return fileContent;\n};\n\n/**\n * This function generates a index file merging all the types\n */\nexport const createModuleAugmentation = (\n configuration = getConfiguration()\n) => {\n const { moduleAugmentationDir, typesDir } = configuration.content;\n\n // Create main directory if it doesn't exist\n if (!existsSync(moduleAugmentationDir)) {\n mkdirSync(moduleAugmentationDir, { recursive: true });\n }\n\n const dictionariesTypesDefinitions: string[] = fg.sync(\n normalizePath(`${typesDir}/*.ts`),\n {\n ignore: ['**/*.d.ts'],\n }\n );\n // Create the dictionary list file\n\n const tsContent = generateTypeIndexContent(dictionariesTypesDefinitions);\n writeFileSync(join(moduleAugmentationDir, 'intlayer.d.ts'), tsContent);\n};\n"],"mappings":"AAAA,SAAS,SAAS,kBAAkB,qBAAqB;AACzD,OAAO,QAAQ;AACf,SAAS,YAAY,WAAW,qBAAqB;AACrD,SAAS,UAAU,SAAS,MAAM,gBAAgB;AAClD,SAAS,mBAAmB;AAC5B,SAAS,4BAA4B;AAE9B,MAAM,cAAc,CAAC,QAC1B,GAAG,qBAAqB,GAAG,CAAC;AAE9B,MAAM,gBAAgB,CAAC,YACrB,QACG,IAAI,CAAC,WAAW;AACf,aAAW,OAAO,SAAS;AACzB,QAAI,QAAQ,GAA2B,MAAM,QAAQ;AACnD,aAAO,WAAW,GAAG;AAAA,IACvB;AAAA,EACF;AACF,CAAC,EACA,KAAK,KAAK;AAKf,MAAM,2BAA2B,CAC/B,WACA,gBAAgB,iBAAiB,MACtB;AACX,QAAM,EAAE,SAAS,qBAAqB,IAAI;AAC1C,QAAM,EAAE,sBAAsB,IAAI;AAClC,QAAM,EAAE,SAAS,iBAAiB,WAAW,IAAI;AAEjD,MAAI,cACF;AAEF,QAAM,kBAAkB,UAAU,IAAI,CAAC,oBAAoB;AAAA,IACzD,cAAc,KAAK,SAAS,uBAAuB,cAAc,CAAC;AAAA,IAClE,IAAI,SAAS,gBAAgB,QAAQ,cAAc,CAAC;AAAA;AAAA,IACpD,MAAM,IAAI,YAAY,cAAc,CAAC;AAAA;AAAA,EACvC,EAAE;AAGF,kBAAgB,QAAQ,CAAC,eAAe;AACtC,mBAAe,UAAU,WAAW,IAAI,UAAU,WAAW,YAAY;AAAA;AAAA,EAC3E,CAAC;AAED,iBAAe;AAGf,QAAM,yBAAiC,gBACpC,IAAI,CAAC,eAAe,QAAQ,WAAW,EAAE,aAAa,WAAW,IAAI,GAAG,EACxE,KAAK,IAAI;AAEZ,QAAM,wBACJ,gBAAgB,SAAS,IACrB,gBAAgB;AAAA,IAAO,CAAC,WACtB,QAAQ,IAAI,CAACA,YAAW,OAAOA,OAAM,CAAC,EAAE,SAAS,OAAO,MAAM,CAAC;AAAA,EACjE,IACA;AAEN,QAAM,mBAAmB,cAAc,OAAO;AAC9C,QAAM,2BAA2B,cAAc,qBAAqB;AAEpE,QAAM,mBACJ,eAAe,WACX,kFACA,eAAe,cACb,8HACA;AAsBR,iBAAe;AAAA;AACf,iBAAe;AAAA,EAAmD,sBAAsB;AAAA;AAAA;AAAA;AACxF,iBAAe,4BAA4B,gBAAgB;AAAA;AAC3D,iBAAe,4BAA4B,wBAAwB;AAAA;AACnE,iBAAe;AAAA;AACf,iBAAe;AAAA;AACf,iBAAe,KAAK,gBAAgB;AAAA;AACpC,iBAAe;AAEf,SAAO;AACT;AAKO,MAAM,2BAA2B,CACtC,gBAAgB,iBAAiB,MAC9B;AACH,QAAM,EAAE,uBAAuB,SAAS,IAAI,cAAc;AAG1D,MAAI,CAAC,WAAW,qBAAqB,GAAG;AACtC,cAAU,uBAAuB,EAAE,WAAW,KAAK,CAAC;AAAA,EACtD;AAEA,QAAM,+BAAyC,GAAG;AAAA,IAChD,cAAc,GAAG,QAAQ,OAAO;AAAA,IAChC;AAAA,MACE,QAAQ,CAAC,WAAW;AAAA,IACtB;AAAA,EACF;AAGA,QAAM,YAAY,yBAAyB,4BAA4B;AACvE,gBAAc,KAAK,uBAAuB,eAAe,GAAG,SAAS;AACvE;","names":["locale"]}
@@ -0,0 +1,9 @@
1
+ import crypto from "crypto-js";
2
+ const getFileHash = (filePath) => {
3
+ const hash = crypto.SHA3(filePath);
4
+ return hash.toString(crypto.enc.Base64).replace(/[^A-Z\d]/gi, "").substring(0, 20);
5
+ };
6
+ export {
7
+ getFileHash
8
+ };
9
+ //# sourceMappingURL=getFileHash.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/utils/getFileHash.ts"],"sourcesContent":["import crypto from 'crypto-js';\n\nexport const getFileHash = (filePath: string) => {\n const hash = crypto.SHA3(filePath);\n\n return hash\n .toString(crypto.enc.Base64)\n .replace(/[^A-Z\\d]/gi, '')\n .substring(0, 20);\n};\n"],"mappings":"AAAA,OAAO,YAAY;AAEZ,MAAM,cAAc,CAAC,aAAqB;AAC/C,QAAM,OAAO,OAAO,KAAK,QAAQ;AAEjC,SAAO,KACJ,SAAS,OAAO,IAAI,MAAM,EAC1B,QAAQ,cAAc,EAAE,EACxB,UAAU,GAAG,EAAE;AACpB;","names":[]}
@@ -1,8 +1,3 @@
1
- import crypto from "crypto-js";
2
- const getFileHash = (filePath) => {
3
- const hash = crypto.SHA3(filePath);
4
- return hash.toString(crypto.enc.Base64).replace(/[^A-Z\d]/gi, "").substring(0, 20);
5
- };
6
1
  const kebabCaseToCamelCase = (name) => {
7
2
  return name.split(/[^a-zA-Z0-9]+/).filter(Boolean).map((word) => {
8
3
  const lowerWord = word.toLowerCase();
@@ -14,10 +9,7 @@ const kebabCaseToCamelCase = (name) => {
14
9
  return capitalized;
15
10
  }).join("");
16
11
  };
17
- const sortAlphabetically = (a, b) => a.localeCompare(b);
18
12
  export {
19
- getFileHash,
20
- kebabCaseToCamelCase,
21
- sortAlphabetically
13
+ kebabCaseToCamelCase
22
14
  };
23
- //# sourceMappingURL=utils.mjs.map
15
+ //# sourceMappingURL=kebabCaseToCamelCase.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/utils/kebabCaseToCamelCase.ts"],"sourcesContent":["export const kebabCaseToCamelCase = (name: string): string => {\n return name\n .split(/[^a-zA-Z0-9]+/) // Split on any non-alphanumeric character\n .filter(Boolean) // Remove any empty strings\n .map((word) => {\n // Convert the entire word to lowercase first\n const lowerWord = word.toLowerCase();\n\n // Capitalize the first character\n let capitalized = lowerWord.charAt(0).toUpperCase() + lowerWord.slice(1);\n\n // Capitalize any letter that follows a number\n capitalized = capitalized.replace(\n /(\\d)([a-z])/g,\n (_, number, char) => number + char.toUpperCase()\n );\n\n return capitalized;\n })\n .join(''); // Concatenate all parts into a single string\n};\n"],"mappings":"AAAO,MAAM,uBAAuB,CAAC,SAAyB;AAC5D,SAAO,KACJ,MAAM,eAAe,EACrB,OAAO,OAAO,EACd,IAAI,CAAC,SAAS;AAEb,UAAM,YAAY,KAAK,YAAY;AAGnC,QAAI,cAAc,UAAU,OAAO,CAAC,EAAE,YAAY,IAAI,UAAU,MAAM,CAAC;AAGvE,kBAAc,YAAY;AAAA,MACxB;AAAA,MACA,CAAC,GAAG,QAAQ,SAAS,SAAS,KAAK,YAAY;AAAA,IACjD;AAEA,WAAO;AAAA,EACT,CAAC,EACA,KAAK,EAAE;AACZ;","names":[]}
@@ -0,0 +1,34 @@
1
+ import { mkdir, stat, unlink, writeFile } from "fs/promises";
2
+ import { dirname } from "path";
3
+ const runOnce = async (sentinelFilePath, callback, cacheTimeoutMs = 60 * 1e3) => {
4
+ const currentTimestamp = Date.now();
5
+ const timeoutDuration = cacheTimeoutMs;
6
+ try {
7
+ const sentinelStats = await stat(sentinelFilePath);
8
+ const sentinelAge = currentTimestamp - sentinelStats.mtime.getTime();
9
+ if (sentinelAge > timeoutDuration) {
10
+ await unlink(sentinelFilePath);
11
+ } else {
12
+ return;
13
+ }
14
+ } catch (err) {
15
+ if (err.code === "ENOENT") {
16
+ } else {
17
+ throw err;
18
+ }
19
+ }
20
+ try {
21
+ await mkdir(dirname(sentinelFilePath), { recursive: true });
22
+ await writeFile(sentinelFilePath, String(currentTimestamp), { flag: "wx" });
23
+ } catch (err) {
24
+ if (err.code === "EEXIST") {
25
+ return;
26
+ }
27
+ throw err;
28
+ }
29
+ await callback();
30
+ };
31
+ export {
32
+ runOnce
33
+ };
34
+ //# sourceMappingURL=runOnce.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/utils/runOnce.ts"],"sourcesContent":["import { mkdir, stat, unlink, writeFile } from 'fs/promises';\nimport { dirname } from 'path';\n\n/**\n * Ensures a callback function runs only once within a specified time window across multiple processes.\n * Uses a sentinel file to coordinate execution and prevent duplicate work.\n *\n * @param sentinelFilePath - Path to the sentinel file used for coordination\n * @param callback - The function to execute (should be async)\n * @param cacheTimeoutMs - Time window in milliseconds during which the sentinel is considered valid (default: 60000ms = 1 minute)\n *\n * @example\n * ```typescript\n * await runPrepareIntlayerOnce(\n * '/tmp/intlayer-sentinel',\n * async () => {\n * // Your initialization logic here\n * await prepareIntlayer();\n * },\n * 30 * 1000 // 30 seconds cache\n * );\n * ```\n *\n * @throws {Error} When there are unexpected filesystem errors\n */\nexport const runOnce = async (\n sentinelFilePath: string,\n callback: () => void | Promise<void>,\n cacheTimeoutMs: number = 60 * 1000 // 1 minute in milliseconds\n) => {\n const currentTimestamp = Date.now();\n const timeoutDuration = cacheTimeoutMs;\n\n try {\n // Check if sentinel file exists and get its stats\n const sentinelStats = await stat(sentinelFilePath);\n const sentinelAge = currentTimestamp - sentinelStats.mtime.getTime();\n\n // If sentinel is older than the timeout, delete it and rebuild\n if (sentinelAge > timeoutDuration) {\n await unlink(sentinelFilePath);\n // Fall through to create new sentinel and rebuild\n } else {\n // Sentinel is recent, no need to rebuild\n return;\n }\n } catch (err: any) {\n if (err.code === 'ENOENT') {\n // File doesn't exist, continue to create it\n } else {\n throw err; // unexpected FS error\n }\n }\n\n try {\n // Ensure the directory exists before writing the file\n await mkdir(dirname(sentinelFilePath), { recursive: true });\n\n // O_EXCL ensures only the *first* process can create the file\n await writeFile(sentinelFilePath, String(currentTimestamp), { flag: 'wx' });\n } catch (err: any) {\n if (err.code === 'EEXIST') {\n // Another process already created it → we're done\n return;\n }\n throw err; // unexpected FS error\n }\n\n await callback();\n};\n"],"mappings":"AAAA,SAAS,OAAO,MAAM,QAAQ,iBAAiB;AAC/C,SAAS,eAAe;AAwBjB,MAAM,UAAU,OACrB,kBACA,UACA,iBAAyB,KAAK,QAC3B;AACH,QAAM,mBAAmB,KAAK,IAAI;AAClC,QAAM,kBAAkB;AAExB,MAAI;AAEF,UAAM,gBAAgB,MAAM,KAAK,gBAAgB;AACjD,UAAM,cAAc,mBAAmB,cAAc,MAAM,QAAQ;AAGnE,QAAI,cAAc,iBAAiB;AACjC,YAAM,OAAO,gBAAgB;AAAA,IAE/B,OAAO;AAEL;AAAA,IACF;AAAA,EACF,SAAS,KAAU;AACjB,QAAI,IAAI,SAAS,UAAU;AAAA,IAE3B,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AAEA,MAAI;AAEF,UAAM,MAAM,QAAQ,gBAAgB,GAAG,EAAE,WAAW,KAAK,CAAC;AAG1D,UAAM,UAAU,kBAAkB,OAAO,gBAAgB,GAAG,EAAE,MAAM,KAAK,CAAC;AAAA,EAC5E,SAAS,KAAU;AACjB,QAAI,IAAI,SAAS,UAAU;AAEzB;AAAA,IACF;AACA,UAAM;AAAA,EACR;AAEA,QAAM,SAAS;AACjB;","names":[]}
@@ -0,0 +1,5 @@
1
+ const sortAlphabetically = (a, b) => a.localeCompare(b);
2
+ export {
3
+ sortAlphabetically
4
+ };
5
+ //# sourceMappingURL=sortAlphabetically.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/utils/sortAlphabetically.ts"],"sourcesContent":["export const sortAlphabetically = (a: string, b: string) => a.localeCompare(b);\n"],"mappings":"AAAO,MAAM,qBAAqB,CAAC,GAAW,MAAc,EAAE,cAAc,CAAC;","names":[]}