@intlayer/cli 7.5.10 → 7.5.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. package/dist/cjs/ci.cjs +73 -0
  2. package/dist/cjs/ci.cjs.map +1 -0
  3. package/dist/cjs/cli.cjs +41 -6
  4. package/dist/cjs/cli.cjs.map +1 -1
  5. package/dist/cjs/editor.cjs +1 -1
  6. package/dist/cjs/index.cjs +4 -5
  7. package/dist/cjs/listContentDeclaration.cjs +6 -2
  8. package/dist/cjs/listContentDeclaration.cjs.map +1 -1
  9. package/dist/cjs/listProjects.cjs +28 -0
  10. package/dist/cjs/listProjects.cjs.map +1 -0
  11. package/dist/cjs/{reviewDoc.cjs → reviewDoc/reviewDoc.cjs} +17 -15
  12. package/dist/cjs/reviewDoc/reviewDoc.cjs.map +1 -0
  13. package/dist/cjs/{reviewDocBlockAware.cjs → reviewDoc/reviewDocBlockAware.cjs} +12 -8
  14. package/dist/cjs/reviewDoc/reviewDocBlockAware.cjs.map +1 -0
  15. package/dist/cjs/translateDoc/index.cjs +8 -0
  16. package/dist/cjs/translateDoc/translateDoc.cjs +74 -0
  17. package/dist/cjs/translateDoc/translateDoc.cjs.map +1 -0
  18. package/dist/cjs/translateDoc/translateFile.cjs +103 -0
  19. package/dist/cjs/translateDoc/translateFile.cjs.map +1 -0
  20. package/dist/cjs/translateDoc/types.cjs +0 -0
  21. package/dist/cjs/translateDoc/validation.cjs +49 -0
  22. package/dist/cjs/translateDoc/validation.cjs.map +1 -0
  23. package/dist/cjs/translation-alignment/planActions.cjs +2 -4
  24. package/dist/cjs/translation-alignment/planActions.cjs.map +1 -1
  25. package/dist/cjs/translation-alignment/segmentDocument.cjs +35 -101
  26. package/dist/cjs/translation-alignment/segmentDocument.cjs.map +1 -1
  27. package/dist/cjs/utils/checkAccess.cjs +2 -1
  28. package/dist/cjs/utils/checkAccess.cjs.map +1 -1
  29. package/dist/cjs/utils/setupAI.cjs +20 -11
  30. package/dist/cjs/utils/setupAI.cjs.map +1 -1
  31. package/dist/esm/auth/login.mjs +16 -16
  32. package/dist/esm/auth/login.mjs.map +1 -1
  33. package/dist/esm/ci.mjs +72 -0
  34. package/dist/esm/ci.mjs.map +1 -0
  35. package/dist/esm/cli.mjs +39 -4
  36. package/dist/esm/cli.mjs.map +1 -1
  37. package/dist/esm/editor.mjs +1 -1
  38. package/dist/esm/index.mjs +3 -3
  39. package/dist/esm/listContentDeclaration.mjs +6 -2
  40. package/dist/esm/listContentDeclaration.mjs.map +1 -1
  41. package/dist/esm/listProjects.mjs +27 -0
  42. package/dist/esm/listProjects.mjs.map +1 -0
  43. package/dist/esm/pull.mjs +6 -6
  44. package/dist/esm/pull.mjs.map +1 -1
  45. package/dist/esm/push/push.mjs +7 -7
  46. package/dist/esm/push/push.mjs.map +1 -1
  47. package/dist/esm/{reviewDoc.mjs → reviewDoc/reviewDoc.mjs} +14 -12
  48. package/dist/esm/reviewDoc/reviewDoc.mjs.map +1 -0
  49. package/dist/esm/{reviewDocBlockAware.mjs → reviewDoc/reviewDocBlockAware.mjs} +11 -7
  50. package/dist/esm/reviewDoc/reviewDocBlockAware.mjs.map +1 -0
  51. package/dist/esm/translateDoc/index.mjs +5 -0
  52. package/dist/esm/translateDoc/translateDoc.mjs +72 -0
  53. package/dist/esm/translateDoc/translateDoc.mjs.map +1 -0
  54. package/dist/esm/translateDoc/translateFile.mjs +102 -0
  55. package/dist/esm/translateDoc/translateFile.mjs.map +1 -0
  56. package/dist/esm/translateDoc/types.mjs +0 -0
  57. package/dist/esm/translateDoc/validation.mjs +47 -0
  58. package/dist/esm/translateDoc/validation.mjs.map +1 -0
  59. package/dist/esm/translation-alignment/planActions.mjs +2 -4
  60. package/dist/esm/translation-alignment/planActions.mjs.map +1 -1
  61. package/dist/esm/translation-alignment/segmentDocument.mjs +35 -101
  62. package/dist/esm/translation-alignment/segmentDocument.mjs.map +1 -1
  63. package/dist/esm/utils/checkAccess.mjs +2 -1
  64. package/dist/esm/utils/checkAccess.mjs.map +1 -1
  65. package/dist/esm/utils/setupAI.mjs +20 -11
  66. package/dist/esm/utils/setupAI.mjs.map +1 -1
  67. package/dist/types/ci.d.ts +5 -0
  68. package/dist/types/ci.d.ts.map +1 -0
  69. package/dist/types/cli.d.ts.map +1 -1
  70. package/dist/types/index.d.ts +3 -3
  71. package/dist/types/listContentDeclaration.d.ts +2 -0
  72. package/dist/types/listContentDeclaration.d.ts.map +1 -1
  73. package/dist/types/listProjects.d.ts +11 -0
  74. package/dist/types/listProjects.d.ts.map +1 -0
  75. package/dist/types/pull.d.ts.map +1 -1
  76. package/dist/types/pushConfig.d.ts.map +1 -1
  77. package/dist/types/{reviewDoc.d.ts → reviewDoc/reviewDoc.d.ts} +1 -1
  78. package/dist/types/reviewDoc/reviewDoc.d.ts.map +1 -0
  79. package/dist/types/{reviewDocBlockAware.d.ts → reviewDoc/reviewDocBlockAware.d.ts} +2 -2
  80. package/dist/types/reviewDoc/reviewDocBlockAware.d.ts.map +1 -0
  81. package/dist/types/translateDoc/index.d.ts +5 -0
  82. package/dist/types/translateDoc/translateDoc.d.ts +21 -0
  83. package/dist/types/translateDoc/translateDoc.d.ts.map +1 -0
  84. package/dist/types/translateDoc/translateFile.d.ts +21 -0
  85. package/dist/types/translateDoc/translateFile.d.ts.map +1 -0
  86. package/dist/types/translateDoc/types.d.ts +47 -0
  87. package/dist/types/translateDoc/types.d.ts.map +1 -0
  88. package/dist/types/translateDoc/validation.d.ts +16 -0
  89. package/dist/types/translateDoc/validation.d.ts.map +1 -0
  90. package/dist/types/translation-alignment/planActions.d.ts +2 -2
  91. package/dist/types/translation-alignment/planActions.d.ts.map +1 -1
  92. package/dist/types/translation-alignment/rebuildDocument.d.ts.map +1 -1
  93. package/dist/types/translation-alignment/segmentDocument.d.ts.map +1 -1
  94. package/dist/types/utils/setupAI.d.ts.map +1 -1
  95. package/package.json +11 -10
  96. package/dist/cjs/reviewDoc.cjs.map +0 -1
  97. package/dist/cjs/reviewDocBlockAware.cjs.map +0 -1
  98. package/dist/cjs/translateDoc.cjs +0 -132
  99. package/dist/cjs/translateDoc.cjs.map +0 -1
  100. package/dist/esm/reviewDoc.mjs.map +0 -1
  101. package/dist/esm/reviewDocBlockAware.mjs.map +0 -1
  102. package/dist/esm/translateDoc.mjs +0 -129
  103. package/dist/esm/translateDoc.mjs.map +0 -1
  104. package/dist/types/reviewDoc.d.ts.map +0 -1
  105. package/dist/types/reviewDocBlockAware.d.ts.map +0 -1
  106. package/dist/types/translateDoc.d.ts +0 -47
  107. package/dist/types/translateDoc.d.ts.map +0 -1
@@ -0,0 +1,27 @@
1
+ import { listProjects } from "@intlayer/chokidar";
2
+ import { relative } from "node:path";
3
+
4
+ //#region src/listProjects.ts
5
+ const listProjectsCommand = async (options) => {
6
+ const { searchDir, projectsPath } = await listProjects(options);
7
+ const projectsRelativePath = projectsPath.map((projectPath) => options?.absolute ? projectPath : relative(searchDir, projectPath)).map((projectPath) => projectPath === "" ? "." : projectPath);
8
+ if (options?.json) {
9
+ console.dir(projectsRelativePath, {
10
+ depth: null,
11
+ arrayLimit: null
12
+ });
13
+ return;
14
+ }
15
+ if (projectsPath.length === 0) {
16
+ console.log("No Intlayer projects found.");
17
+ return;
18
+ }
19
+ console.log(`Found ${projectsPath.length} Intlayer project(s):\n`);
20
+ projectsPath.forEach((project) => {
21
+ console.log(` - ${project}`);
22
+ });
23
+ };
24
+
25
+ //#endregion
26
+ export { listProjectsCommand };
27
+ //# sourceMappingURL=listProjects.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"listProjects.mjs","names":[],"sources":["../../src/listProjects.ts"],"sourcesContent":["import { relative } from 'node:path';\nimport type { ListProjectsOptions } from '@intlayer/chokidar';\nimport { listProjects } from '@intlayer/chokidar';\n\nexport type ListProjectsCommandOptions = ListProjectsOptions & {\n json?: boolean;\n absolute?: boolean;\n};\n\nexport const listProjectsCommand = async (\n options?: ListProjectsCommandOptions\n) => {\n const { searchDir, projectsPath } = await listProjects(options);\n\n const projectsRelativePath = projectsPath\n .map((projectPath) =>\n options?.absolute ? projectPath : relative(searchDir, projectPath)\n )\n .map((projectPath) => (projectPath === '' ? '.' : projectPath));\n\n if (options?.json) {\n console.dir(projectsRelativePath, { depth: null, arrayLimit: null });\n return;\n }\n\n if (projectsPath.length === 0) {\n console.log('No Intlayer projects found.');\n return;\n }\n\n console.log(`Found ${projectsPath.length} Intlayer project(s):\\n`);\n projectsPath.forEach((project) => {\n console.log(` - ${project}`);\n });\n};\n"],"mappings":";;;;AASA,MAAa,sBAAsB,OACjC,YACG;CACH,MAAM,EAAE,WAAW,iBAAiB,MAAM,aAAa,QAAQ;CAE/D,MAAM,uBAAuB,aAC1B,KAAK,gBACJ,SAAS,WAAW,cAAc,SAAS,WAAW,YAAY,CACnE,CACA,KAAK,gBAAiB,gBAAgB,KAAK,MAAM,YAAa;AAEjE,KAAI,SAAS,MAAM;AACjB,UAAQ,IAAI,sBAAsB;GAAE,OAAO;GAAM,YAAY;GAAM,CAAC;AACpE;;AAGF,KAAI,aAAa,WAAW,GAAG;AAC7B,UAAQ,IAAI,8BAA8B;AAC1C;;AAGF,SAAQ,IAAI,SAAS,aAAa,OAAO,yBAAyB;AAClE,cAAa,SAAS,YAAY;AAChC,UAAQ,IAAI,OAAO,UAAU;GAC7B"}
package/dist/esm/pull.mjs CHANGED
@@ -51,8 +51,8 @@ const pull = async (options) => {
51
51
  dictionaryKey,
52
52
  status: "pending"
53
53
  }))];
54
- const logger = new PullLogger();
55
- logger.update(dictionariesStatuses.map((s) => ({
54
+ const logger$1 = new PullLogger();
55
+ logger$1.update(dictionariesStatuses.map((s) => ({
56
56
  dictionaryKey: s.dictionaryKey,
57
57
  status: s.status
58
58
  })));
@@ -61,7 +61,7 @@ const pull = async (options) => {
61
61
  const isCached = statusObj.status === "imported" || statusObj.status === "up-to-date";
62
62
  if (!isCached) {
63
63
  statusObj.status = "fetching";
64
- logger.update([{
64
+ logger$1.update([{
65
65
  dictionaryKey: statusObj.dictionaryKey,
66
66
  status: "fetching"
67
67
  }]);
@@ -73,7 +73,7 @@ const pull = async (options) => {
73
73
  if (!sourceDictionary) throw new Error(`Dictionary ${statusObj.dictionaryKey} not found on remote`);
74
74
  const { status } = await writeContentDeclaration(sourceDictionary, config, options);
75
75
  statusObj.status = status;
76
- logger.update([{
76
+ logger$1.update([{
77
77
  dictionaryKey: statusObj.dictionaryKey,
78
78
  status
79
79
  }]);
@@ -82,14 +82,14 @@ const pull = async (options) => {
82
82
  statusObj.status = "error";
83
83
  statusObj.error = error;
84
84
  statusObj.errorMessage = `Error fetching dictionary ${statusObj.dictionaryKey}: ${error}`;
85
- logger.update([{
85
+ logger$1.update([{
86
86
  dictionaryKey: statusObj.dictionaryKey,
87
87
  status: "error"
88
88
  }]);
89
89
  }
90
90
  };
91
91
  await parallelize(dictionariesStatuses, processDictionary, 5);
92
- logger.finish();
92
+ logger$1.finish();
93
93
  const iconFor = (status) => {
94
94
  switch (status) {
95
95
  case "fetched":
@@ -1 +1 @@
1
- {"version":3,"file":"pull.mjs","names":["distantDictionariesUpdateTimeStamp: Record<string, number>","remoteDictionariesRecord: Record<string, any>","dictionariesStatuses: DictionariesStatus[]","successfullyFetchedDictionaries: Dictionary[]","sourceDictionary: Dictionary | undefined"],"sources":["../../src/pull.ts"],"sourcesContent":["import { existsSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { getIntlayerAPIProxy } from '@intlayer/api';\nimport {\n type DictionaryStatus,\n parallelize,\n writeContentDeclaration,\n} from '@intlayer/chokidar';\nimport {\n ANSIColors,\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n getProjectRequire,\n} from '@intlayer/config';\nimport type { Dictionary } from '@intlayer/types';\nimport { PullLogger, type PullStatus } from './push/pullLog';\nimport { checkCMSAuth } from './utils/checkAccess';\n\ntype PullOptions = {\n dictionaries?: string[];\n newDictionariesPath?: string;\n configOptions?: GetConfigurationOptions;\n};\n\ntype DictionariesStatus = {\n dictionaryKey: string;\n status: DictionaryStatus | 'pending' | 'fetching' | 'error';\n error?: Error;\n errorMessage?: string;\n};\n\n/**\n * Fetch distant dictionaries and write them locally,\n * with progress indicators and concurrency control.\n */\nexport const pull = async (options?: PullOptions): Promise<void> => {\n const appLogger = getAppLogger(options?.configOptions?.override);\n\n try {\n const config = getConfiguration(options?.configOptions);\n\n const hasCMSAuth = await checkCMSAuth(config);\n\n if (!hasCMSAuth) return;\n\n const intlayerAPI = getIntlayerAPIProxy(undefined, config);\n\n // Get remote update timestamps map\n const getDictionariesUpdateTimestampResult =\n await intlayerAPI.dictionary.getDictionariesUpdateTimestamp();\n\n if (!getDictionariesUpdateTimestampResult.data) {\n throw new Error('No distant dictionaries found');\n }\n\n let distantDictionariesUpdateTimeStamp: Record<string, number> =\n getDictionariesUpdateTimestampResult.data;\n\n // Optional filtering by requested dictionaries\n if (options?.dictionaries) {\n distantDictionariesUpdateTimeStamp = Object.fromEntries(\n Object.entries(distantDictionariesUpdateTimeStamp).filter(([key]) =>\n options.dictionaries?.includes(key)\n )\n );\n }\n\n // Load local cached remote dictionaries (if any)\n const remoteDictionariesPath = join(\n config.content.mainDir,\n 'remote_dictionaries.cjs'\n );\n const requireFunction = config.build?.require ?? getProjectRequire();\n const remoteDictionariesRecord: Record<string, any> = existsSync(\n remoteDictionariesPath\n )\n ? (requireFunction(remoteDictionariesPath) as any)\n : {};\n\n // Determine which keys need fetching by comparing updatedAt with local cache\n const entries = Object.entries(distantDictionariesUpdateTimeStamp);\n const keysToFetch = entries\n .filter(([key, remoteUpdatedAt]) => {\n if (!remoteUpdatedAt) return true;\n const local = (remoteDictionariesRecord as any)[key];\n if (!local) return true;\n const localUpdatedAtRaw = (local as any)?.updatedAt as\n | number\n | string\n | undefined;\n const localUpdatedAt =\n typeof localUpdatedAtRaw === 'number'\n ? localUpdatedAtRaw\n : localUpdatedAtRaw\n ? new Date(localUpdatedAtRaw).getTime()\n : undefined;\n if (typeof localUpdatedAt !== 'number') return true;\n return remoteUpdatedAt > localUpdatedAt;\n })\n .map(([key]) => key);\n\n const cachedKeys = entries\n .filter(([key, remoteUpdatedAt]) => {\n const local = (remoteDictionariesRecord as any)[key];\n const localUpdatedAtRaw = (local as any)?.updatedAt as\n | number\n | string\n | undefined;\n const localUpdatedAt =\n typeof localUpdatedAtRaw === 'number'\n ? localUpdatedAtRaw\n : localUpdatedAtRaw\n ? new Date(localUpdatedAtRaw).getTime()\n : undefined;\n return (\n typeof localUpdatedAt === 'number' &&\n typeof remoteUpdatedAt === 'number' &&\n localUpdatedAt >= remoteUpdatedAt\n );\n })\n .map(([key]) => key);\n\n // Check if dictionaries list is empty\n if (entries.length === 0) {\n appLogger('No dictionaries to fetch', {\n level: 'error',\n });\n return;\n }\n\n appLogger('Fetching dictionaries:');\n\n // Prepare dictionaries statuses\n const dictionariesStatuses: DictionariesStatus[] = [\n ...cachedKeys.map((dictionaryKey) => ({\n dictionaryKey,\n status: 'imported' as DictionaryStatus,\n })),\n ...keysToFetch.map((dictionaryKey) => ({\n dictionaryKey,\n status: 'pending' as const,\n })),\n ];\n\n // Initialize aggregated logger\n const logger = new PullLogger();\n logger.update(\n dictionariesStatuses.map<PullStatus>((s) => ({\n dictionaryKey: s.dictionaryKey,\n status: s.status,\n }))\n );\n\n const successfullyFetchedDictionaries: Dictionary[] = [];\n\n const processDictionary = async (\n statusObj: DictionariesStatus\n ): Promise<void> => {\n const isCached =\n statusObj.status === 'imported' || statusObj.status === 'up-to-date';\n\n if (!isCached) {\n statusObj.status = 'fetching';\n logger.update([\n { dictionaryKey: statusObj.dictionaryKey, status: 'fetching' },\n ]);\n }\n\n try {\n let sourceDictionary: Dictionary | undefined;\n\n if (isCached) {\n sourceDictionary = remoteDictionariesRecord[\n statusObj.dictionaryKey\n ] as Dictionary | undefined;\n }\n\n if (!sourceDictionary) {\n // Fetch the dictionary\n const getDictionaryResult =\n await intlayerAPI.dictionary.getDictionary(statusObj.dictionaryKey);\n\n sourceDictionary = getDictionaryResult.data as Dictionary | undefined;\n }\n\n if (!sourceDictionary) {\n throw new Error(\n `Dictionary ${statusObj.dictionaryKey} not found on remote`\n );\n }\n\n // Now, write the dictionary to local file\n const { status } = await writeContentDeclaration(\n sourceDictionary,\n config,\n options\n );\n\n statusObj.status = status;\n logger.update([{ dictionaryKey: statusObj.dictionaryKey, status }]);\n\n successfullyFetchedDictionaries.push(sourceDictionary);\n } catch (error) {\n statusObj.status = 'error';\n statusObj.error = error as Error;\n statusObj.errorMessage = `Error fetching dictionary ${statusObj.dictionaryKey}: ${error}`;\n logger.update([\n { dictionaryKey: statusObj.dictionaryKey, status: 'error' },\n ]);\n }\n };\n\n // Process dictionaries in parallel with concurrency limit\n await parallelize(dictionariesStatuses, processDictionary, 5);\n\n // Stop the logger and render final state\n logger.finish();\n\n // Per-dictionary summary\n const iconFor = (status: DictionariesStatus['status']) => {\n switch (status) {\n case 'fetched':\n case 'imported':\n case 'updated':\n case 'up-to-date':\n case 'reimported in JSON':\n case 'new content file':\n return '✔';\n case 'error':\n return '✖';\n default:\n return '⏲';\n }\n };\n\n const colorFor = (status: DictionariesStatus['status']) => {\n switch (status) {\n case 'fetched':\n case 'imported':\n case 'updated':\n case 'up-to-date':\n return ANSIColors.GREEN;\n case 'reimported in JSON':\n case 'new content file':\n return ANSIColors.YELLOW;\n case 'error':\n return ANSIColors.RED;\n default:\n return ANSIColors.BLUE;\n }\n };\n\n for (const s of dictionariesStatuses) {\n const icon = iconFor(s.status);\n const color = colorFor(s.status);\n appLogger(\n ` - ${s.dictionaryKey} ${ANSIColors.GREY}[${color}${icon} ${s.status}${ANSIColors.GREY}]${ANSIColors.RESET}`\n );\n }\n\n // Output any error messages\n for (const statusObj of dictionariesStatuses) {\n if (statusObj.errorMessage) {\n appLogger(statusObj.errorMessage, {\n level: 'error',\n });\n }\n }\n } catch (error) {\n appLogger(error, {\n level: 'error',\n });\n }\n};\n"],"mappings":";;;;;;;;;;;;;AAoCA,MAAa,OAAO,OAAO,YAAyC;CAClE,MAAM,YAAY,aAAa,SAAS,eAAe,SAAS;AAEhE,KAAI;EACF,MAAM,SAAS,iBAAiB,SAAS,cAAc;AAIvD,MAAI,CAFe,MAAM,aAAa,OAAO,CAE5B;EAEjB,MAAM,cAAc,oBAAoB,QAAW,OAAO;EAG1D,MAAM,uCACJ,MAAM,YAAY,WAAW,gCAAgC;AAE/D,MAAI,CAAC,qCAAqC,KACxC,OAAM,IAAI,MAAM,gCAAgC;EAGlD,IAAIA,qCACF,qCAAqC;AAGvC,MAAI,SAAS,aACX,sCAAqC,OAAO,YAC1C,OAAO,QAAQ,mCAAmC,CAAC,QAAQ,CAAC,SAC1D,QAAQ,cAAc,SAAS,IAAI,CACpC,CACF;EAIH,MAAM,yBAAyB,KAC7B,OAAO,QAAQ,SACf,0BACD;EACD,MAAM,kBAAkB,OAAO,OAAO,WAAW,mBAAmB;EACpE,MAAMC,2BAAgD,WACpD,uBACD,GACI,gBAAgB,uBAAuB,GACxC,EAAE;EAGN,MAAM,UAAU,OAAO,QAAQ,mCAAmC;EAClE,MAAM,cAAc,QACjB,QAAQ,CAAC,KAAK,qBAAqB;AAClC,OAAI,CAAC,gBAAiB,QAAO;GAC7B,MAAM,QAAS,yBAAiC;AAChD,OAAI,CAAC,MAAO,QAAO;GACnB,MAAM,oBAAqB,OAAe;GAI1C,MAAM,iBACJ,OAAO,sBAAsB,WACzB,oBACA,oBACE,IAAI,KAAK,kBAAkB,CAAC,SAAS,GACrC;AACR,OAAI,OAAO,mBAAmB,SAAU,QAAO;AAC/C,UAAO,kBAAkB;IACzB,CACD,KAAK,CAAC,SAAS,IAAI;EAEtB,MAAM,aAAa,QAChB,QAAQ,CAAC,KAAK,qBAAqB;GAElC,MAAM,oBADS,yBAAiC,MACN;GAI1C,MAAM,iBACJ,OAAO,sBAAsB,WACzB,oBACA,oBACE,IAAI,KAAK,kBAAkB,CAAC,SAAS,GACrC;AACR,UACE,OAAO,mBAAmB,YAC1B,OAAO,oBAAoB,YAC3B,kBAAkB;IAEpB,CACD,KAAK,CAAC,SAAS,IAAI;AAGtB,MAAI,QAAQ,WAAW,GAAG;AACxB,aAAU,4BAA4B,EACpC,OAAO,SACR,CAAC;AACF;;AAGF,YAAU,yBAAyB;EAGnC,MAAMC,uBAA6C,CACjD,GAAG,WAAW,KAAK,mBAAmB;GACpC;GACA,QAAQ;GACT,EAAE,EACH,GAAG,YAAY,KAAK,mBAAmB;GACrC;GACA,QAAQ;GACT,EAAE,CACJ;EAGD,MAAM,SAAS,IAAI,YAAY;AAC/B,SAAO,OACL,qBAAqB,KAAiB,OAAO;GAC3C,eAAe,EAAE;GACjB,QAAQ,EAAE;GACX,EAAE,CACJ;EAED,MAAMC,kCAAgD,EAAE;EAExD,MAAM,oBAAoB,OACxB,cACkB;GAClB,MAAM,WACJ,UAAU,WAAW,cAAc,UAAU,WAAW;AAE1D,OAAI,CAAC,UAAU;AACb,cAAU,SAAS;AACnB,WAAO,OAAO,CACZ;KAAE,eAAe,UAAU;KAAe,QAAQ;KAAY,CAC/D,CAAC;;AAGJ,OAAI;IACF,IAAIC;AAEJ,QAAI,SACF,oBAAmB,yBACjB,UAAU;AAId,QAAI,CAAC,iBAKH,qBAFE,MAAM,YAAY,WAAW,cAAc,UAAU,cAAc,EAE9B;AAGzC,QAAI,CAAC,iBACH,OAAM,IAAI,MACR,cAAc,UAAU,cAAc,sBACvC;IAIH,MAAM,EAAE,WAAW,MAAM,wBACvB,kBACA,QACA,QACD;AAED,cAAU,SAAS;AACnB,WAAO,OAAO,CAAC;KAAE,eAAe,UAAU;KAAe;KAAQ,CAAC,CAAC;AAEnE,oCAAgC,KAAK,iBAAiB;YAC/C,OAAO;AACd,cAAU,SAAS;AACnB,cAAU,QAAQ;AAClB,cAAU,eAAe,6BAA6B,UAAU,cAAc,IAAI;AAClF,WAAO,OAAO,CACZ;KAAE,eAAe,UAAU;KAAe,QAAQ;KAAS,CAC5D,CAAC;;;AAKN,QAAM,YAAY,sBAAsB,mBAAmB,EAAE;AAG7D,SAAO,QAAQ;EAGf,MAAM,WAAW,WAAyC;AACxD,WAAQ,QAAR;IACE,KAAK;IACL,KAAK;IACL,KAAK;IACL,KAAK;IACL,KAAK;IACL,KAAK,mBACH,QAAO;IACT,KAAK,QACH,QAAO;IACT,QACE,QAAO;;;EAIb,MAAM,YAAY,WAAyC;AACzD,WAAQ,QAAR;IACE,KAAK;IACL,KAAK;IACL,KAAK;IACL,KAAK,aACH,QAAO,WAAW;IACpB,KAAK;IACL,KAAK,mBACH,QAAO,WAAW;IACpB,KAAK,QACH,QAAO,WAAW;IACpB,QACE,QAAO,WAAW;;;AAIxB,OAAK,MAAM,KAAK,sBAAsB;GACpC,MAAM,OAAO,QAAQ,EAAE,OAAO;GAC9B,MAAM,QAAQ,SAAS,EAAE,OAAO;AAChC,aACE,MAAM,EAAE,cAAc,GAAG,WAAW,KAAK,GAAG,QAAQ,KAAK,GAAG,EAAE,SAAS,WAAW,KAAK,GAAG,WAAW,QACtG;;AAIH,OAAK,MAAM,aAAa,qBACtB,KAAI,UAAU,aACZ,WAAU,UAAU,cAAc,EAChC,OAAO,SACR,CAAC;UAGC,OAAO;AACd,YAAU,OAAO,EACf,OAAO,SACR,CAAC"}
1
+ {"version":3,"file":"pull.mjs","names":["distantDictionariesUpdateTimeStamp: Record<string, number>","remoteDictionariesRecord: Record<string, any>","dictionariesStatuses: DictionariesStatus[]","logger","successfullyFetchedDictionaries: Dictionary[]","sourceDictionary: Dictionary | undefined"],"sources":["../../src/pull.ts"],"sourcesContent":["import { existsSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { getIntlayerAPIProxy } from '@intlayer/api';\nimport {\n type DictionaryStatus,\n parallelize,\n writeContentDeclaration,\n} from '@intlayer/chokidar';\nimport {\n ANSIColors,\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n getProjectRequire,\n} from '@intlayer/config';\nimport type { Dictionary } from '@intlayer/types';\nimport { PullLogger, type PullStatus } from './push/pullLog';\nimport { checkCMSAuth } from './utils/checkAccess';\n\ntype PullOptions = {\n dictionaries?: string[];\n newDictionariesPath?: string;\n configOptions?: GetConfigurationOptions;\n};\n\ntype DictionariesStatus = {\n dictionaryKey: string;\n status: DictionaryStatus | 'pending' | 'fetching' | 'error';\n error?: Error;\n errorMessage?: string;\n};\n\n/**\n * Fetch distant dictionaries and write them locally,\n * with progress indicators and concurrency control.\n */\nexport const pull = async (options?: PullOptions): Promise<void> => {\n const appLogger = getAppLogger(options?.configOptions?.override);\n\n try {\n const config = getConfiguration(options?.configOptions);\n\n const hasCMSAuth = await checkCMSAuth(config);\n\n if (!hasCMSAuth) return;\n\n const intlayerAPI = getIntlayerAPIProxy(undefined, config);\n\n // Get remote update timestamps map\n const getDictionariesUpdateTimestampResult =\n await intlayerAPI.dictionary.getDictionariesUpdateTimestamp();\n\n if (!getDictionariesUpdateTimestampResult.data) {\n throw new Error('No distant dictionaries found');\n }\n\n let distantDictionariesUpdateTimeStamp: Record<string, number> =\n getDictionariesUpdateTimestampResult.data;\n\n // Optional filtering by requested dictionaries\n if (options?.dictionaries) {\n distantDictionariesUpdateTimeStamp = Object.fromEntries(\n Object.entries(distantDictionariesUpdateTimeStamp).filter(([key]) =>\n options.dictionaries?.includes(key)\n )\n );\n }\n\n // Load local cached remote dictionaries (if any)\n const remoteDictionariesPath = join(\n config.content.mainDir,\n 'remote_dictionaries.cjs'\n );\n const requireFunction = config.build?.require ?? getProjectRequire();\n const remoteDictionariesRecord: Record<string, any> = existsSync(\n remoteDictionariesPath\n )\n ? (requireFunction(remoteDictionariesPath) as any)\n : {};\n\n // Determine which keys need fetching by comparing updatedAt with local cache\n const entries = Object.entries(distantDictionariesUpdateTimeStamp);\n const keysToFetch = entries\n .filter(([key, remoteUpdatedAt]) => {\n if (!remoteUpdatedAt) return true;\n const local = (remoteDictionariesRecord as any)[key];\n if (!local) return true;\n const localUpdatedAtRaw = (local as any)?.updatedAt as\n | number\n | string\n | undefined;\n const localUpdatedAt =\n typeof localUpdatedAtRaw === 'number'\n ? localUpdatedAtRaw\n : localUpdatedAtRaw\n ? new Date(localUpdatedAtRaw).getTime()\n : undefined;\n if (typeof localUpdatedAt !== 'number') return true;\n return remoteUpdatedAt > localUpdatedAt;\n })\n .map(([key]) => key);\n\n const cachedKeys = entries\n .filter(([key, remoteUpdatedAt]) => {\n const local = (remoteDictionariesRecord as any)[key];\n const localUpdatedAtRaw = (local as any)?.updatedAt as\n | number\n | string\n | undefined;\n const localUpdatedAt =\n typeof localUpdatedAtRaw === 'number'\n ? localUpdatedAtRaw\n : localUpdatedAtRaw\n ? new Date(localUpdatedAtRaw).getTime()\n : undefined;\n return (\n typeof localUpdatedAt === 'number' &&\n typeof remoteUpdatedAt === 'number' &&\n localUpdatedAt >= remoteUpdatedAt\n );\n })\n .map(([key]) => key);\n\n // Check if dictionaries list is empty\n if (entries.length === 0) {\n appLogger('No dictionaries to fetch', {\n level: 'error',\n });\n return;\n }\n\n appLogger('Fetching dictionaries:');\n\n // Prepare dictionaries statuses\n const dictionariesStatuses: DictionariesStatus[] = [\n ...cachedKeys.map((dictionaryKey) => ({\n dictionaryKey,\n status: 'imported' as DictionaryStatus,\n })),\n ...keysToFetch.map((dictionaryKey) => ({\n dictionaryKey,\n status: 'pending' as const,\n })),\n ];\n\n // Initialize aggregated logger\n const logger = new PullLogger();\n logger.update(\n dictionariesStatuses.map<PullStatus>((s) => ({\n dictionaryKey: s.dictionaryKey,\n status: s.status,\n }))\n );\n\n const successfullyFetchedDictionaries: Dictionary[] = [];\n\n const processDictionary = async (\n statusObj: DictionariesStatus\n ): Promise<void> => {\n const isCached =\n statusObj.status === 'imported' || statusObj.status === 'up-to-date';\n\n if (!isCached) {\n statusObj.status = 'fetching';\n logger.update([\n { dictionaryKey: statusObj.dictionaryKey, status: 'fetching' },\n ]);\n }\n\n try {\n let sourceDictionary: Dictionary | undefined;\n\n if (isCached) {\n sourceDictionary = remoteDictionariesRecord[\n statusObj.dictionaryKey\n ] as Dictionary | undefined;\n }\n\n if (!sourceDictionary) {\n // Fetch the dictionary\n const getDictionaryResult =\n await intlayerAPI.dictionary.getDictionary(statusObj.dictionaryKey);\n\n sourceDictionary = getDictionaryResult.data as Dictionary | undefined;\n }\n\n if (!sourceDictionary) {\n throw new Error(\n `Dictionary ${statusObj.dictionaryKey} not found on remote`\n );\n }\n\n // Now, write the dictionary to local file\n const { status } = await writeContentDeclaration(\n sourceDictionary,\n config,\n options\n );\n\n statusObj.status = status;\n logger.update([{ dictionaryKey: statusObj.dictionaryKey, status }]);\n\n successfullyFetchedDictionaries.push(sourceDictionary);\n } catch (error) {\n statusObj.status = 'error';\n statusObj.error = error as Error;\n statusObj.errorMessage = `Error fetching dictionary ${statusObj.dictionaryKey}: ${error}`;\n logger.update([\n { dictionaryKey: statusObj.dictionaryKey, status: 'error' },\n ]);\n }\n };\n\n // Process dictionaries in parallel with concurrency limit\n await parallelize(dictionariesStatuses, processDictionary, 5);\n\n // Stop the logger and render final state\n logger.finish();\n\n // Per-dictionary summary\n const iconFor = (status: DictionariesStatus['status']) => {\n switch (status) {\n case 'fetched':\n case 'imported':\n case 'updated':\n case 'up-to-date':\n case 'reimported in JSON':\n case 'new content file':\n return '✔';\n case 'error':\n return '✖';\n default:\n return '⏲';\n }\n };\n\n const colorFor = (status: DictionariesStatus['status']) => {\n switch (status) {\n case 'fetched':\n case 'imported':\n case 'updated':\n case 'up-to-date':\n return ANSIColors.GREEN;\n case 'reimported in JSON':\n case 'new content file':\n return ANSIColors.YELLOW;\n case 'error':\n return ANSIColors.RED;\n default:\n return ANSIColors.BLUE;\n }\n };\n\n for (const s of dictionariesStatuses) {\n const icon = iconFor(s.status);\n const color = colorFor(s.status);\n appLogger(\n ` - ${s.dictionaryKey} ${ANSIColors.GREY}[${color}${icon} ${s.status}${ANSIColors.GREY}]${ANSIColors.RESET}`\n );\n }\n\n // Output any error messages\n for (const statusObj of dictionariesStatuses) {\n if (statusObj.errorMessage) {\n appLogger(statusObj.errorMessage, {\n level: 'error',\n });\n }\n }\n } catch (error) {\n appLogger(error, {\n level: 'error',\n });\n }\n};\n"],"mappings":";;;;;;;;;;;;;AAoCA,MAAa,OAAO,OAAO,YAAyC;CAClE,MAAM,YAAY,aAAa,SAAS,eAAe,SAAS;AAEhE,KAAI;EACF,MAAM,SAAS,iBAAiB,SAAS,cAAc;AAIvD,MAAI,CAFe,MAAM,aAAa,OAAO,CAE5B;EAEjB,MAAM,cAAc,oBAAoB,QAAW,OAAO;EAG1D,MAAM,uCACJ,MAAM,YAAY,WAAW,gCAAgC;AAE/D,MAAI,CAAC,qCAAqC,KACxC,OAAM,IAAI,MAAM,gCAAgC;EAGlD,IAAIA,qCACF,qCAAqC;AAGvC,MAAI,SAAS,aACX,sCAAqC,OAAO,YAC1C,OAAO,QAAQ,mCAAmC,CAAC,QAAQ,CAAC,SAC1D,QAAQ,cAAc,SAAS,IAAI,CACpC,CACF;EAIH,MAAM,yBAAyB,KAC7B,OAAO,QAAQ,SACf,0BACD;EACD,MAAM,kBAAkB,OAAO,OAAO,WAAW,mBAAmB;EACpE,MAAMC,2BAAgD,WACpD,uBACD,GACI,gBAAgB,uBAAuB,GACxC,EAAE;EAGN,MAAM,UAAU,OAAO,QAAQ,mCAAmC;EAClE,MAAM,cAAc,QACjB,QAAQ,CAAC,KAAK,qBAAqB;AAClC,OAAI,CAAC,gBAAiB,QAAO;GAC7B,MAAM,QAAS,yBAAiC;AAChD,OAAI,CAAC,MAAO,QAAO;GACnB,MAAM,oBAAqB,OAAe;GAI1C,MAAM,iBACJ,OAAO,sBAAsB,WACzB,oBACA,oBACE,IAAI,KAAK,kBAAkB,CAAC,SAAS,GACrC;AACR,OAAI,OAAO,mBAAmB,SAAU,QAAO;AAC/C,UAAO,kBAAkB;IACzB,CACD,KAAK,CAAC,SAAS,IAAI;EAEtB,MAAM,aAAa,QAChB,QAAQ,CAAC,KAAK,qBAAqB;GAElC,MAAM,oBADS,yBAAiC,MACN;GAI1C,MAAM,iBACJ,OAAO,sBAAsB,WACzB,oBACA,oBACE,IAAI,KAAK,kBAAkB,CAAC,SAAS,GACrC;AACR,UACE,OAAO,mBAAmB,YAC1B,OAAO,oBAAoB,YAC3B,kBAAkB;IAEpB,CACD,KAAK,CAAC,SAAS,IAAI;AAGtB,MAAI,QAAQ,WAAW,GAAG;AACxB,aAAU,4BAA4B,EACpC,OAAO,SACR,CAAC;AACF;;AAGF,YAAU,yBAAyB;EAGnC,MAAMC,uBAA6C,CACjD,GAAG,WAAW,KAAK,mBAAmB;GACpC;GACA,QAAQ;GACT,EAAE,EACH,GAAG,YAAY,KAAK,mBAAmB;GACrC;GACA,QAAQ;GACT,EAAE,CACJ;EAGD,MAAMC,WAAS,IAAI,YAAY;AAC/B,WAAO,OACL,qBAAqB,KAAiB,OAAO;GAC3C,eAAe,EAAE;GACjB,QAAQ,EAAE;GACX,EAAE,CACJ;EAED,MAAMC,kCAAgD,EAAE;EAExD,MAAM,oBAAoB,OACxB,cACkB;GAClB,MAAM,WACJ,UAAU,WAAW,cAAc,UAAU,WAAW;AAE1D,OAAI,CAAC,UAAU;AACb,cAAU,SAAS;AACnB,aAAO,OAAO,CACZ;KAAE,eAAe,UAAU;KAAe,QAAQ;KAAY,CAC/D,CAAC;;AAGJ,OAAI;IACF,IAAIC;AAEJ,QAAI,SACF,oBAAmB,yBACjB,UAAU;AAId,QAAI,CAAC,iBAKH,qBAFE,MAAM,YAAY,WAAW,cAAc,UAAU,cAAc,EAE9B;AAGzC,QAAI,CAAC,iBACH,OAAM,IAAI,MACR,cAAc,UAAU,cAAc,sBACvC;IAIH,MAAM,EAAE,WAAW,MAAM,wBACvB,kBACA,QACA,QACD;AAED,cAAU,SAAS;AACnB,aAAO,OAAO,CAAC;KAAE,eAAe,UAAU;KAAe;KAAQ,CAAC,CAAC;AAEnE,oCAAgC,KAAK,iBAAiB;YAC/C,OAAO;AACd,cAAU,SAAS;AACnB,cAAU,QAAQ;AAClB,cAAU,eAAe,6BAA6B,UAAU,cAAc,IAAI;AAClF,aAAO,OAAO,CACZ;KAAE,eAAe,UAAU;KAAe,QAAQ;KAAS,CAC5D,CAAC;;;AAKN,QAAM,YAAY,sBAAsB,mBAAmB,EAAE;AAG7D,WAAO,QAAQ;EAGf,MAAM,WAAW,WAAyC;AACxD,WAAQ,QAAR;IACE,KAAK;IACL,KAAK;IACL,KAAK;IACL,KAAK;IACL,KAAK;IACL,KAAK,mBACH,QAAO;IACT,KAAK,QACH,QAAO;IACT,QACE,QAAO;;;EAIb,MAAM,YAAY,WAAyC;AACzD,WAAQ,QAAR;IACE,KAAK;IACL,KAAK;IACL,KAAK;IACL,KAAK,aACH,QAAO,WAAW;IACpB,KAAK;IACL,KAAK,mBACH,QAAO,WAAW;IACpB,KAAK,QACH,QAAO,WAAW;IACpB,QACE,QAAO,WAAW;;;AAIxB,OAAK,MAAM,KAAK,sBAAsB;GACpC,MAAM,OAAO,QAAQ,EAAE,OAAO;GAC9B,MAAM,QAAQ,SAAS,EAAE,OAAO;AAChC,aACE,MAAM,EAAE,cAAc,GAAG,WAAW,KAAK,GAAG,QAAQ,KAAK,GAAG,EAAE,SAAS,WAAW,KAAK,GAAG,WAAW,QACtG;;AAIH,OAAK,MAAM,aAAa,qBACtB,KAAI,UAAU,aACZ,WAAU,UAAU,cAAc,EAChC,OAAO,SACR,CAAC;UAGC,OAAO;AACd,YAAU,OAAO,EACf,OAAO,SACR,CAAC"}
@@ -62,15 +62,15 @@ const push = async (options) => {
62
62
  dictionary,
63
63
  status: "pending"
64
64
  }));
65
- const logger = new PushLogger();
66
- logger.update(dictionariesStatuses.map((s) => ({
65
+ const logger$1 = new PushLogger();
66
+ logger$1.update(dictionariesStatuses.map((s) => ({
67
67
  dictionaryKey: s.dictionary.key,
68
68
  status: "pending"
69
69
  })));
70
70
  const successfullyPushedDictionaries = [];
71
71
  const processDictionary = async (statusObj) => {
72
72
  statusObj.status = "pushing";
73
- logger.update([{
73
+ logger$1.update([{
74
74
  dictionaryKey: statusObj.dictionary.key,
75
75
  status: "pushing"
76
76
  }]);
@@ -90,14 +90,14 @@ const push = async (options) => {
90
90
  if (updatedDictionaries.some((dictionary) => dictionary.key === statusObj.dictionary.key)) {
91
91
  statusObj.status = "modified";
92
92
  successfullyPushedDictionaries.push(statusObj.dictionary);
93
- logger.update([{
93
+ logger$1.update([{
94
94
  dictionaryKey: statusObj.dictionary.key,
95
95
  status: "modified"
96
96
  }]);
97
97
  } else if (newDictionaries.some((dictionary) => dictionary.key === statusObj.dictionary.key)) {
98
98
  statusObj.status = "pushed";
99
99
  successfullyPushedDictionaries.push(statusObj.dictionary);
100
- logger.update([{
100
+ logger$1.update([{
101
101
  dictionaryKey: statusObj.dictionary.key,
102
102
  status: "pushed"
103
103
  }]);
@@ -106,14 +106,14 @@ const push = async (options) => {
106
106
  statusObj.status = "error";
107
107
  statusObj.error = error;
108
108
  statusObj.errorMessage = `Error pushing dictionary ${statusObj.dictionary.key}: ${error}`;
109
- logger.update([{
109
+ logger$1.update([{
110
110
  dictionaryKey: statusObj.dictionary.key,
111
111
  status: "error"
112
112
  }]);
113
113
  }
114
114
  };
115
115
  await parallelize(dictionariesStatuses, processDictionary, 5);
116
- logger.finish();
116
+ logger$1.finish();
117
117
  for (const dictionaryStatus of dictionariesStatuses) {
118
118
  const { icon, color } = getIconAndColor(dictionaryStatus.status);
119
119
  appLogger(` - ${dictionaryStatus.dictionary.key} ${ANSIColors.GREY}[${color}${icon} ${dictionaryStatus.status}${ANSIColors.GREY}]${ANSIColors.RESET}`);
@@ -1 +1 @@
1
- {"version":3,"file":"push.mjs","names":["dictionaries: Dictionary[]","existingDictionariesKeys: string[]","dictionariesStatuses: DictionariesStatus[]","successfullyPushedDictionaries: Dictionary[]","filePathsSet: Set<string>"],"sources":["../../../src/push/push.ts"],"sourcesContent":["import * as fsPromises from 'node:fs/promises';\nimport { join } from 'node:path';\nimport * as readline from 'node:readline';\nimport { getIntlayerAPIProxy } from '@intlayer/api';\nimport {\n formatPath,\n type ListGitFilesOptions,\n listGitFiles,\n parallelize,\n prepareIntlayer,\n writeContentDeclaration,\n} from '@intlayer/chokidar';\nimport {\n ANSIColors,\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport type { Dictionary } from '@intlayer/types';\nimport { getUnmergedDictionaries } from '@intlayer/unmerged-dictionaries-entry';\nimport { PushLogger, type PushStatus } from '../pushLog';\nimport { checkCMSAuth } from '../utils/checkAccess';\n\ntype PushOptions = {\n deleteLocaleDictionary?: boolean;\n keepLocaleDictionary?: boolean;\n dictionaries?: string[];\n gitOptions?: ListGitFilesOptions;\n configOptions?: GetConfigurationOptions;\n build?: boolean;\n};\n\ntype DictionariesStatus = {\n dictionary: Dictionary;\n status: 'pending' | 'pushing' | 'modified' | 'pushed' | 'unknown' | 'error';\n error?: Error;\n errorMessage?: string;\n};\n\n// Print per-dictionary summary similar to loadDictionaries\nconst statusIconsAndColors = {\n pushed: { icon: '✔', color: ANSIColors.GREEN },\n modified: { icon: '✔', color: ANSIColors.GREEN },\n error: { icon: '✖', color: ANSIColors.RED },\n default: { icon: '⏲', color: ANSIColors.BLUE },\n};\n\nconst getIconAndColor = (status: DictionariesStatus['status']) => {\n return (\n statusIconsAndColors[status as keyof typeof statusIconsAndColors] ??\n statusIconsAndColors.default\n );\n};\n\n/**\n * Get all local dictionaries and push them simultaneously.\n */\nexport const push = async (options?: PushOptions): Promise<void> => {\n const config = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(config, {\n config: {\n prefix: '',\n },\n });\n\n if (options?.build === true) {\n await prepareIntlayer(config, { forceRun: true });\n } else if (typeof options?.build === 'undefined') {\n await prepareIntlayer(config);\n }\n\n try {\n const hasCMSAuth = await checkCMSAuth(config);\n\n if (!hasCMSAuth) return;\n\n const intlayerAPI = getIntlayerAPIProxy(undefined, config);\n\n const unmergedDictionariesRecord = getUnmergedDictionaries(config);\n let dictionaries: Dictionary[] = Object.values(\n unmergedDictionariesRecord\n ).flat();\n const existingDictionariesKeys: string[] = Object.keys(\n unmergedDictionariesRecord\n );\n\n if (options?.dictionaries) {\n // Check if the provided dictionaries exist\n const noneExistingDictionariesOption = options.dictionaries.filter(\n (dictionaryId) => !existingDictionariesKeys.includes(dictionaryId)\n );\n\n if (noneExistingDictionariesOption.length > 0) {\n appLogger(\n `The following dictionaries do not exist: ${noneExistingDictionariesOption.join(\n ', '\n )} and have been ignored.`,\n {\n level: 'error',\n }\n );\n }\n\n // Filter the dictionaries from the provided list of IDs\n dictionaries = dictionaries.filter((dictionary) =>\n options.dictionaries?.includes(dictionary.key)\n );\n }\n\n if (options?.gitOptions) {\n const gitFiles = await listGitFiles(options.gitOptions);\n\n dictionaries = dictionaries.filter((dictionary) =>\n gitFiles.includes(\n join(config.content.baseDir, dictionary.filePath ?? '')\n )\n );\n }\n\n // Check if the dictionaries list is empty\n if (dictionaries.length === 0) {\n appLogger('No local dictionaries found', {\n level: 'error',\n });\n return;\n }\n\n appLogger('Pushing dictionaries:');\n\n // Prepare dictionaries statuses\n const dictionariesStatuses: DictionariesStatus[] = dictionaries.map(\n (dictionary) => ({\n dictionary,\n status: 'pending',\n })\n );\n\n // Initialize aggregated logger similar to loadDictionaries\n const logger = new PushLogger();\n logger.update(\n dictionariesStatuses.map<PushStatus>((s) => ({\n dictionaryKey: s.dictionary.key,\n status: 'pending',\n }))\n );\n\n const successfullyPushedDictionaries: Dictionary[] = [];\n\n const processDictionary = async (\n statusObj: DictionariesStatus\n ): Promise<void> => {\n statusObj.status = 'pushing';\n logger.update([\n { dictionaryKey: statusObj.dictionary.key, status: 'pushing' },\n ]);\n\n try {\n const pushResult = await intlayerAPI.dictionary.pushDictionaries([\n statusObj.dictionary,\n ]);\n\n const updatedDictionaries = pushResult.data?.updatedDictionaries ?? [];\n const newDictionaries = pushResult.data?.newDictionaries ?? [];\n\n const allDictionaries = [...updatedDictionaries, ...newDictionaries];\n\n for (const remoteDictionaryData of allDictionaries) {\n const localDictionary = unmergedDictionariesRecord[\n remoteDictionaryData.key\n ]?.find(\n (dictionary) => dictionary.localId === remoteDictionaryData.localId\n );\n\n if (!localDictionary) continue;\n\n await writeContentDeclaration(\n { ...localDictionary, id: remoteDictionaryData.id },\n config\n );\n }\n\n if (\n updatedDictionaries.some(\n (dictionary) => dictionary.key === statusObj.dictionary.key\n )\n ) {\n statusObj.status = 'modified';\n successfullyPushedDictionaries.push(statusObj.dictionary);\n logger.update([\n { dictionaryKey: statusObj.dictionary.key, status: 'modified' },\n ]);\n } else if (\n newDictionaries.some(\n (dictionary) => dictionary.key === statusObj.dictionary.key\n )\n ) {\n statusObj.status = 'pushed';\n successfullyPushedDictionaries.push(statusObj.dictionary);\n logger.update([\n { dictionaryKey: statusObj.dictionary.key, status: 'pushed' },\n ]);\n } else {\n statusObj.status = 'unknown';\n }\n } catch (error) {\n statusObj.status = 'error';\n statusObj.error = error as Error;\n statusObj.errorMessage = `Error pushing dictionary ${statusObj.dictionary.key}: ${error}`;\n logger.update([\n { dictionaryKey: statusObj.dictionary.key, status: 'error' },\n ]);\n }\n };\n\n // Process dictionaries in parallel with a concurrency limit (reuse parallelize)\n await parallelize(dictionariesStatuses, processDictionary, 5);\n\n // Stop the logger and render final state\n logger.finish();\n\n for (const dictionaryStatus of dictionariesStatuses) {\n const { icon, color } = getIconAndColor(dictionaryStatus.status);\n appLogger(\n ` - ${dictionaryStatus.dictionary.key} ${ANSIColors.GREY}[${color}${icon} ${dictionaryStatus.status}${ANSIColors.GREY}]${ANSIColors.RESET}`\n );\n }\n\n // Output any error messages\n for (const statusObj of dictionariesStatuses) {\n if (statusObj.errorMessage) {\n appLogger(statusObj.errorMessage, {\n level: 'error',\n });\n }\n }\n\n // Handle delete or keep options\n const deleteOption = options?.deleteLocaleDictionary;\n const keepOption = options?.keepLocaleDictionary;\n\n if (deleteOption && keepOption) {\n throw new Error(\n 'Cannot specify both --deleteLocaleDictionary and --keepLocaleDictionary options.'\n );\n }\n\n if (deleteOption) {\n // Delete only the successfully pushed dictionaries\n await deleteLocalDictionaries(successfullyPushedDictionaries, options);\n } else if (keepOption) {\n // Do nothing, keep the local dictionaries\n } else {\n // Ask the user\n const answer = await askUser(\n 'Do you want to delete the local dictionaries that were successfully pushed? (yes/no): '\n );\n if (answer.toLowerCase() === 'yes' || answer.toLowerCase() === 'y') {\n await deleteLocalDictionaries(successfullyPushedDictionaries, options);\n }\n }\n } catch (error) {\n appLogger(error, {\n level: 'error',\n });\n }\n};\n\nconst askUser = (question: string): Promise<string> => {\n const rl = readline.createInterface({\n input: process.stdin,\n output: process.stdout,\n });\n return new Promise((resolve) => {\n rl.question(question, (answer: string) => {\n rl.close();\n resolve(answer);\n });\n });\n};\n\nconst deleteLocalDictionaries = async (\n dictionariesToDelete: Dictionary[],\n options?: PushOptions\n): Promise<void> => {\n const config = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(config, {\n config: {\n prefix: '',\n },\n });\n\n // Use a Set to collect all unique file paths\n const filePathsSet: Set<string> = new Set();\n\n for (const dictionary of dictionariesToDelete) {\n const { filePath } = dictionary;\n\n if (!filePath) {\n appLogger(`Dictionary ${dictionary.key} does not have a file path`, {\n level: 'error',\n });\n continue;\n }\n\n filePathsSet.add(filePath);\n }\n\n for (const filePath of filePathsSet) {\n try {\n const stats = await fsPromises.lstat(filePath);\n\n if (stats.isFile()) {\n await fsPromises.unlink(filePath);\n appLogger(`Deleted file ${formatPath(filePath)}`, {});\n } else if (stats.isDirectory()) {\n appLogger(`Path is a directory ${formatPath(filePath)}, skipping.`, {});\n } else {\n appLogger(\n `Unknown file type for ${formatPath(filePath)}, skipping.`,\n {}\n );\n }\n } catch (err) {\n appLogger(`Error deleting ${formatPath(filePath)}: ${err}`, {\n level: 'error',\n });\n }\n }\n};\n"],"mappings":";;;;;;;;;;;AAwCA,MAAM,uBAAuB;CAC3B,QAAQ;EAAE,MAAM;EAAK,OAAO,WAAW;EAAO;CAC9C,UAAU;EAAE,MAAM;EAAK,OAAO,WAAW;EAAO;CAChD,OAAO;EAAE,MAAM;EAAK,OAAO,WAAW;EAAK;CAC3C,SAAS;EAAE,MAAM;EAAK,OAAO,WAAW;EAAM;CAC/C;AAED,MAAM,mBAAmB,WAAyC;AAChE,QACE,qBAAqB,WACrB,qBAAqB;;;;;AAOzB,MAAa,OAAO,OAAO,YAAyC;CAClE,MAAM,SAAS,iBAAiB,SAAS,cAAc;CACvD,MAAM,YAAY,aAAa,QAAQ,EACrC,QAAQ,EACN,QAAQ,IACT,EACF,CAAC;AAEF,KAAI,SAAS,UAAU,KACrB,OAAM,gBAAgB,QAAQ,EAAE,UAAU,MAAM,CAAC;UACxC,OAAO,SAAS,UAAU,YACnC,OAAM,gBAAgB,OAAO;AAG/B,KAAI;AAGF,MAAI,CAFe,MAAM,aAAa,OAAO,CAE5B;EAEjB,MAAM,cAAc,oBAAoB,QAAW,OAAO;EAE1D,MAAM,6BAA6B,wBAAwB,OAAO;EAClE,IAAIA,eAA6B,OAAO,OACtC,2BACD,CAAC,MAAM;EACR,MAAMC,2BAAqC,OAAO,KAChD,2BACD;AAED,MAAI,SAAS,cAAc;GAEzB,MAAM,iCAAiC,QAAQ,aAAa,QACzD,iBAAiB,CAAC,yBAAyB,SAAS,aAAa,CACnE;AAED,OAAI,+BAA+B,SAAS,EAC1C,WACE,4CAA4C,+BAA+B,KACzE,KACD,CAAC,0BACF,EACE,OAAO,SACR,CACF;AAIH,kBAAe,aAAa,QAAQ,eAClC,QAAQ,cAAc,SAAS,WAAW,IAAI,CAC/C;;AAGH,MAAI,SAAS,YAAY;GACvB,MAAM,WAAW,MAAM,aAAa,QAAQ,WAAW;AAEvD,kBAAe,aAAa,QAAQ,eAClC,SAAS,SACP,KAAK,OAAO,QAAQ,SAAS,WAAW,YAAY,GAAG,CACxD,CACF;;AAIH,MAAI,aAAa,WAAW,GAAG;AAC7B,aAAU,+BAA+B,EACvC,OAAO,SACR,CAAC;AACF;;AAGF,YAAU,wBAAwB;EAGlC,MAAMC,uBAA6C,aAAa,KAC7D,gBAAgB;GACf;GACA,QAAQ;GACT,EACF;EAGD,MAAM,SAAS,IAAI,YAAY;AAC/B,SAAO,OACL,qBAAqB,KAAiB,OAAO;GAC3C,eAAe,EAAE,WAAW;GAC5B,QAAQ;GACT,EAAE,CACJ;EAED,MAAMC,iCAA+C,EAAE;EAEvD,MAAM,oBAAoB,OACxB,cACkB;AAClB,aAAU,SAAS;AACnB,UAAO,OAAO,CACZ;IAAE,eAAe,UAAU,WAAW;IAAK,QAAQ;IAAW,CAC/D,CAAC;AAEF,OAAI;IACF,MAAM,aAAa,MAAM,YAAY,WAAW,iBAAiB,CAC/D,UAAU,WACX,CAAC;IAEF,MAAM,sBAAsB,WAAW,MAAM,uBAAuB,EAAE;IACtE,MAAM,kBAAkB,WAAW,MAAM,mBAAmB,EAAE;IAE9D,MAAM,kBAAkB,CAAC,GAAG,qBAAqB,GAAG,gBAAgB;AAEpE,SAAK,MAAM,wBAAwB,iBAAiB;KAClD,MAAM,kBAAkB,2BACtB,qBAAqB,MACpB,MACA,eAAe,WAAW,YAAY,qBAAqB,QAC7D;AAED,SAAI,CAAC,gBAAiB;AAEtB,WAAM,wBACJ;MAAE,GAAG;MAAiB,IAAI,qBAAqB;MAAI,EACnD,OACD;;AAGH,QACE,oBAAoB,MACjB,eAAe,WAAW,QAAQ,UAAU,WAAW,IACzD,EACD;AACA,eAAU,SAAS;AACnB,oCAA+B,KAAK,UAAU,WAAW;AACzD,YAAO,OAAO,CACZ;MAAE,eAAe,UAAU,WAAW;MAAK,QAAQ;MAAY,CAChE,CAAC;eAEF,gBAAgB,MACb,eAAe,WAAW,QAAQ,UAAU,WAAW,IACzD,EACD;AACA,eAAU,SAAS;AACnB,oCAA+B,KAAK,UAAU,WAAW;AACzD,YAAO,OAAO,CACZ;MAAE,eAAe,UAAU,WAAW;MAAK,QAAQ;MAAU,CAC9D,CAAC;UAEF,WAAU,SAAS;YAEd,OAAO;AACd,cAAU,SAAS;AACnB,cAAU,QAAQ;AAClB,cAAU,eAAe,4BAA4B,UAAU,WAAW,IAAI,IAAI;AAClF,WAAO,OAAO,CACZ;KAAE,eAAe,UAAU,WAAW;KAAK,QAAQ;KAAS,CAC7D,CAAC;;;AAKN,QAAM,YAAY,sBAAsB,mBAAmB,EAAE;AAG7D,SAAO,QAAQ;AAEf,OAAK,MAAM,oBAAoB,sBAAsB;GACnD,MAAM,EAAE,MAAM,UAAU,gBAAgB,iBAAiB,OAAO;AAChE,aACE,MAAM,iBAAiB,WAAW,IAAI,GAAG,WAAW,KAAK,GAAG,QAAQ,KAAK,GAAG,iBAAiB,SAAS,WAAW,KAAK,GAAG,WAAW,QACrI;;AAIH,OAAK,MAAM,aAAa,qBACtB,KAAI,UAAU,aACZ,WAAU,UAAU,cAAc,EAChC,OAAO,SACR,CAAC;EAKN,MAAM,eAAe,SAAS;EAC9B,MAAM,aAAa,SAAS;AAE5B,MAAI,gBAAgB,WAClB,OAAM,IAAI,MACR,mFACD;AAGH,MAAI,aAEF,OAAM,wBAAwB,gCAAgC,QAAQ;WAC7D,YAAY,QAEhB;GAEL,MAAM,SAAS,MAAM,QACnB,yFACD;AACD,OAAI,OAAO,aAAa,KAAK,SAAS,OAAO,aAAa,KAAK,IAC7D,OAAM,wBAAwB,gCAAgC,QAAQ;;UAGnE,OAAO;AACd,YAAU,OAAO,EACf,OAAO,SACR,CAAC;;;AAIN,MAAM,WAAW,aAAsC;CACrD,MAAM,KAAK,SAAS,gBAAgB;EAClC,OAAO,QAAQ;EACf,QAAQ,QAAQ;EACjB,CAAC;AACF,QAAO,IAAI,SAAS,cAAY;AAC9B,KAAG,SAAS,WAAW,WAAmB;AACxC,MAAG,OAAO;AACV,aAAQ,OAAO;IACf;GACF;;AAGJ,MAAM,0BAA0B,OAC9B,sBACA,YACkB;CAElB,MAAM,YAAY,aADH,iBAAiB,SAAS,cAAc,EAChB,EACrC,QAAQ,EACN,QAAQ,IACT,EACF,CAAC;CAGF,MAAMC,+BAA4B,IAAI,KAAK;AAE3C,MAAK,MAAM,cAAc,sBAAsB;EAC7C,MAAM,EAAE,aAAa;AAErB,MAAI,CAAC,UAAU;AACb,aAAU,cAAc,WAAW,IAAI,6BAA6B,EAClE,OAAO,SACR,CAAC;AACF;;AAGF,eAAa,IAAI,SAAS;;AAG5B,MAAK,MAAM,YAAY,aACrB,KAAI;EACF,MAAM,QAAQ,MAAM,WAAW,MAAM,SAAS;AAE9C,MAAI,MAAM,QAAQ,EAAE;AAClB,SAAM,WAAW,OAAO,SAAS;AACjC,aAAU,gBAAgB,WAAW,SAAS,IAAI,EAAE,CAAC;aAC5C,MAAM,aAAa,CAC5B,WAAU,uBAAuB,WAAW,SAAS,CAAC,cAAc,EAAE,CAAC;MAEvE,WACE,yBAAyB,WAAW,SAAS,CAAC,cAC9C,EAAE,CACH;UAEI,KAAK;AACZ,YAAU,kBAAkB,WAAW,SAAS,CAAC,IAAI,OAAO,EAC1D,OAAO,SACR,CAAC"}
1
+ {"version":3,"file":"push.mjs","names":["dictionaries: Dictionary[]","existingDictionariesKeys: string[]","dictionariesStatuses: DictionariesStatus[]","logger","successfullyPushedDictionaries: Dictionary[]","filePathsSet: Set<string>"],"sources":["../../../src/push/push.ts"],"sourcesContent":["import * as fsPromises from 'node:fs/promises';\nimport { join } from 'node:path';\nimport * as readline from 'node:readline';\nimport { getIntlayerAPIProxy } from '@intlayer/api';\nimport {\n formatPath,\n type ListGitFilesOptions,\n listGitFiles,\n parallelize,\n prepareIntlayer,\n writeContentDeclaration,\n} from '@intlayer/chokidar';\nimport {\n ANSIColors,\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport type { Dictionary } from '@intlayer/types';\nimport { getUnmergedDictionaries } from '@intlayer/unmerged-dictionaries-entry';\nimport { PushLogger, type PushStatus } from '../pushLog';\nimport { checkCMSAuth } from '../utils/checkAccess';\n\ntype PushOptions = {\n deleteLocaleDictionary?: boolean;\n keepLocaleDictionary?: boolean;\n dictionaries?: string[];\n gitOptions?: ListGitFilesOptions;\n configOptions?: GetConfigurationOptions;\n build?: boolean;\n};\n\ntype DictionariesStatus = {\n dictionary: Dictionary;\n status: 'pending' | 'pushing' | 'modified' | 'pushed' | 'unknown' | 'error';\n error?: Error;\n errorMessage?: string;\n};\n\n// Print per-dictionary summary similar to loadDictionaries\nconst statusIconsAndColors = {\n pushed: { icon: '✔', color: ANSIColors.GREEN },\n modified: { icon: '✔', color: ANSIColors.GREEN },\n error: { icon: '✖', color: ANSIColors.RED },\n default: { icon: '⏲', color: ANSIColors.BLUE },\n};\n\nconst getIconAndColor = (status: DictionariesStatus['status']) => {\n return (\n statusIconsAndColors[status as keyof typeof statusIconsAndColors] ??\n statusIconsAndColors.default\n );\n};\n\n/**\n * Get all local dictionaries and push them simultaneously.\n */\nexport const push = async (options?: PushOptions): Promise<void> => {\n const config = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(config, {\n config: {\n prefix: '',\n },\n });\n\n if (options?.build === true) {\n await prepareIntlayer(config, { forceRun: true });\n } else if (typeof options?.build === 'undefined') {\n await prepareIntlayer(config);\n }\n\n try {\n const hasCMSAuth = await checkCMSAuth(config);\n\n if (!hasCMSAuth) return;\n\n const intlayerAPI = getIntlayerAPIProxy(undefined, config);\n\n const unmergedDictionariesRecord = getUnmergedDictionaries(config);\n let dictionaries: Dictionary[] = Object.values(\n unmergedDictionariesRecord\n ).flat();\n const existingDictionariesKeys: string[] = Object.keys(\n unmergedDictionariesRecord\n );\n\n if (options?.dictionaries) {\n // Check if the provided dictionaries exist\n const noneExistingDictionariesOption = options.dictionaries.filter(\n (dictionaryId) => !existingDictionariesKeys.includes(dictionaryId)\n );\n\n if (noneExistingDictionariesOption.length > 0) {\n appLogger(\n `The following dictionaries do not exist: ${noneExistingDictionariesOption.join(\n ', '\n )} and have been ignored.`,\n {\n level: 'error',\n }\n );\n }\n\n // Filter the dictionaries from the provided list of IDs\n dictionaries = dictionaries.filter((dictionary) =>\n options.dictionaries?.includes(dictionary.key)\n );\n }\n\n if (options?.gitOptions) {\n const gitFiles = await listGitFiles(options.gitOptions);\n\n dictionaries = dictionaries.filter((dictionary) =>\n gitFiles.includes(\n join(config.content.baseDir, dictionary.filePath ?? '')\n )\n );\n }\n\n // Check if the dictionaries list is empty\n if (dictionaries.length === 0) {\n appLogger('No local dictionaries found', {\n level: 'error',\n });\n return;\n }\n\n appLogger('Pushing dictionaries:');\n\n // Prepare dictionaries statuses\n const dictionariesStatuses: DictionariesStatus[] = dictionaries.map(\n (dictionary) => ({\n dictionary,\n status: 'pending',\n })\n );\n\n // Initialize aggregated logger similar to loadDictionaries\n const logger = new PushLogger();\n logger.update(\n dictionariesStatuses.map<PushStatus>((s) => ({\n dictionaryKey: s.dictionary.key,\n status: 'pending',\n }))\n );\n\n const successfullyPushedDictionaries: Dictionary[] = [];\n\n const processDictionary = async (\n statusObj: DictionariesStatus\n ): Promise<void> => {\n statusObj.status = 'pushing';\n logger.update([\n { dictionaryKey: statusObj.dictionary.key, status: 'pushing' },\n ]);\n\n try {\n const pushResult = await intlayerAPI.dictionary.pushDictionaries([\n statusObj.dictionary,\n ]);\n\n const updatedDictionaries = pushResult.data?.updatedDictionaries ?? [];\n const newDictionaries = pushResult.data?.newDictionaries ?? [];\n\n const allDictionaries = [...updatedDictionaries, ...newDictionaries];\n\n for (const remoteDictionaryData of allDictionaries) {\n const localDictionary = unmergedDictionariesRecord[\n remoteDictionaryData.key\n ]?.find(\n (dictionary) => dictionary.localId === remoteDictionaryData.localId\n );\n\n if (!localDictionary) continue;\n\n await writeContentDeclaration(\n { ...localDictionary, id: remoteDictionaryData.id },\n config\n );\n }\n\n if (\n updatedDictionaries.some(\n (dictionary) => dictionary.key === statusObj.dictionary.key\n )\n ) {\n statusObj.status = 'modified';\n successfullyPushedDictionaries.push(statusObj.dictionary);\n logger.update([\n { dictionaryKey: statusObj.dictionary.key, status: 'modified' },\n ]);\n } else if (\n newDictionaries.some(\n (dictionary) => dictionary.key === statusObj.dictionary.key\n )\n ) {\n statusObj.status = 'pushed';\n successfullyPushedDictionaries.push(statusObj.dictionary);\n logger.update([\n { dictionaryKey: statusObj.dictionary.key, status: 'pushed' },\n ]);\n } else {\n statusObj.status = 'unknown';\n }\n } catch (error) {\n statusObj.status = 'error';\n statusObj.error = error as Error;\n statusObj.errorMessage = `Error pushing dictionary ${statusObj.dictionary.key}: ${error}`;\n logger.update([\n { dictionaryKey: statusObj.dictionary.key, status: 'error' },\n ]);\n }\n };\n\n // Process dictionaries in parallel with a concurrency limit (reuse parallelize)\n await parallelize(dictionariesStatuses, processDictionary, 5);\n\n // Stop the logger and render final state\n logger.finish();\n\n for (const dictionaryStatus of dictionariesStatuses) {\n const { icon, color } = getIconAndColor(dictionaryStatus.status);\n appLogger(\n ` - ${dictionaryStatus.dictionary.key} ${ANSIColors.GREY}[${color}${icon} ${dictionaryStatus.status}${ANSIColors.GREY}]${ANSIColors.RESET}`\n );\n }\n\n // Output any error messages\n for (const statusObj of dictionariesStatuses) {\n if (statusObj.errorMessage) {\n appLogger(statusObj.errorMessage, {\n level: 'error',\n });\n }\n }\n\n // Handle delete or keep options\n const deleteOption = options?.deleteLocaleDictionary;\n const keepOption = options?.keepLocaleDictionary;\n\n if (deleteOption && keepOption) {\n throw new Error(\n 'Cannot specify both --deleteLocaleDictionary and --keepLocaleDictionary options.'\n );\n }\n\n if (deleteOption) {\n // Delete only the successfully pushed dictionaries\n await deleteLocalDictionaries(successfullyPushedDictionaries, options);\n } else if (keepOption) {\n // Do nothing, keep the local dictionaries\n } else {\n // Ask the user\n const answer = await askUser(\n 'Do you want to delete the local dictionaries that were successfully pushed? (yes/no): '\n );\n if (answer.toLowerCase() === 'yes' || answer.toLowerCase() === 'y') {\n await deleteLocalDictionaries(successfullyPushedDictionaries, options);\n }\n }\n } catch (error) {\n appLogger(error, {\n level: 'error',\n });\n }\n};\n\nconst askUser = (question: string): Promise<string> => {\n const rl = readline.createInterface({\n input: process.stdin,\n output: process.stdout,\n });\n return new Promise((resolve) => {\n rl.question(question, (answer: string) => {\n rl.close();\n resolve(answer);\n });\n });\n};\n\nconst deleteLocalDictionaries = async (\n dictionariesToDelete: Dictionary[],\n options?: PushOptions\n): Promise<void> => {\n const config = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(config, {\n config: {\n prefix: '',\n },\n });\n\n // Use a Set to collect all unique file paths\n const filePathsSet: Set<string> = new Set();\n\n for (const dictionary of dictionariesToDelete) {\n const { filePath } = dictionary;\n\n if (!filePath) {\n appLogger(`Dictionary ${dictionary.key} does not have a file path`, {\n level: 'error',\n });\n continue;\n }\n\n filePathsSet.add(filePath);\n }\n\n for (const filePath of filePathsSet) {\n try {\n const stats = await fsPromises.lstat(filePath);\n\n if (stats.isFile()) {\n await fsPromises.unlink(filePath);\n appLogger(`Deleted file ${formatPath(filePath)}`, {});\n } else if (stats.isDirectory()) {\n appLogger(`Path is a directory ${formatPath(filePath)}, skipping.`, {});\n } else {\n appLogger(\n `Unknown file type for ${formatPath(filePath)}, skipping.`,\n {}\n );\n }\n } catch (err) {\n appLogger(`Error deleting ${formatPath(filePath)}: ${err}`, {\n level: 'error',\n });\n }\n }\n};\n"],"mappings":";;;;;;;;;;;AAwCA,MAAM,uBAAuB;CAC3B,QAAQ;EAAE,MAAM;EAAK,OAAO,WAAW;EAAO;CAC9C,UAAU;EAAE,MAAM;EAAK,OAAO,WAAW;EAAO;CAChD,OAAO;EAAE,MAAM;EAAK,OAAO,WAAW;EAAK;CAC3C,SAAS;EAAE,MAAM;EAAK,OAAO,WAAW;EAAM;CAC/C;AAED,MAAM,mBAAmB,WAAyC;AAChE,QACE,qBAAqB,WACrB,qBAAqB;;;;;AAOzB,MAAa,OAAO,OAAO,YAAyC;CAClE,MAAM,SAAS,iBAAiB,SAAS,cAAc;CACvD,MAAM,YAAY,aAAa,QAAQ,EACrC,QAAQ,EACN,QAAQ,IACT,EACF,CAAC;AAEF,KAAI,SAAS,UAAU,KACrB,OAAM,gBAAgB,QAAQ,EAAE,UAAU,MAAM,CAAC;UACxC,OAAO,SAAS,UAAU,YACnC,OAAM,gBAAgB,OAAO;AAG/B,KAAI;AAGF,MAAI,CAFe,MAAM,aAAa,OAAO,CAE5B;EAEjB,MAAM,cAAc,oBAAoB,QAAW,OAAO;EAE1D,MAAM,6BAA6B,wBAAwB,OAAO;EAClE,IAAIA,eAA6B,OAAO,OACtC,2BACD,CAAC,MAAM;EACR,MAAMC,2BAAqC,OAAO,KAChD,2BACD;AAED,MAAI,SAAS,cAAc;GAEzB,MAAM,iCAAiC,QAAQ,aAAa,QACzD,iBAAiB,CAAC,yBAAyB,SAAS,aAAa,CACnE;AAED,OAAI,+BAA+B,SAAS,EAC1C,WACE,4CAA4C,+BAA+B,KACzE,KACD,CAAC,0BACF,EACE,OAAO,SACR,CACF;AAIH,kBAAe,aAAa,QAAQ,eAClC,QAAQ,cAAc,SAAS,WAAW,IAAI,CAC/C;;AAGH,MAAI,SAAS,YAAY;GACvB,MAAM,WAAW,MAAM,aAAa,QAAQ,WAAW;AAEvD,kBAAe,aAAa,QAAQ,eAClC,SAAS,SACP,KAAK,OAAO,QAAQ,SAAS,WAAW,YAAY,GAAG,CACxD,CACF;;AAIH,MAAI,aAAa,WAAW,GAAG;AAC7B,aAAU,+BAA+B,EACvC,OAAO,SACR,CAAC;AACF;;AAGF,YAAU,wBAAwB;EAGlC,MAAMC,uBAA6C,aAAa,KAC7D,gBAAgB;GACf;GACA,QAAQ;GACT,EACF;EAGD,MAAMC,WAAS,IAAI,YAAY;AAC/B,WAAO,OACL,qBAAqB,KAAiB,OAAO;GAC3C,eAAe,EAAE,WAAW;GAC5B,QAAQ;GACT,EAAE,CACJ;EAED,MAAMC,iCAA+C,EAAE;EAEvD,MAAM,oBAAoB,OACxB,cACkB;AAClB,aAAU,SAAS;AACnB,YAAO,OAAO,CACZ;IAAE,eAAe,UAAU,WAAW;IAAK,QAAQ;IAAW,CAC/D,CAAC;AAEF,OAAI;IACF,MAAM,aAAa,MAAM,YAAY,WAAW,iBAAiB,CAC/D,UAAU,WACX,CAAC;IAEF,MAAM,sBAAsB,WAAW,MAAM,uBAAuB,EAAE;IACtE,MAAM,kBAAkB,WAAW,MAAM,mBAAmB,EAAE;IAE9D,MAAM,kBAAkB,CAAC,GAAG,qBAAqB,GAAG,gBAAgB;AAEpE,SAAK,MAAM,wBAAwB,iBAAiB;KAClD,MAAM,kBAAkB,2BACtB,qBAAqB,MACpB,MACA,eAAe,WAAW,YAAY,qBAAqB,QAC7D;AAED,SAAI,CAAC,gBAAiB;AAEtB,WAAM,wBACJ;MAAE,GAAG;MAAiB,IAAI,qBAAqB;MAAI,EACnD,OACD;;AAGH,QACE,oBAAoB,MACjB,eAAe,WAAW,QAAQ,UAAU,WAAW,IACzD,EACD;AACA,eAAU,SAAS;AACnB,oCAA+B,KAAK,UAAU,WAAW;AACzD,cAAO,OAAO,CACZ;MAAE,eAAe,UAAU,WAAW;MAAK,QAAQ;MAAY,CAChE,CAAC;eAEF,gBAAgB,MACb,eAAe,WAAW,QAAQ,UAAU,WAAW,IACzD,EACD;AACA,eAAU,SAAS;AACnB,oCAA+B,KAAK,UAAU,WAAW;AACzD,cAAO,OAAO,CACZ;MAAE,eAAe,UAAU,WAAW;MAAK,QAAQ;MAAU,CAC9D,CAAC;UAEF,WAAU,SAAS;YAEd,OAAO;AACd,cAAU,SAAS;AACnB,cAAU,QAAQ;AAClB,cAAU,eAAe,4BAA4B,UAAU,WAAW,IAAI,IAAI;AAClF,aAAO,OAAO,CACZ;KAAE,eAAe,UAAU,WAAW;KAAK,QAAQ;KAAS,CAC7D,CAAC;;;AAKN,QAAM,YAAY,sBAAsB,mBAAmB,EAAE;AAG7D,WAAO,QAAQ;AAEf,OAAK,MAAM,oBAAoB,sBAAsB;GACnD,MAAM,EAAE,MAAM,UAAU,gBAAgB,iBAAiB,OAAO;AAChE,aACE,MAAM,iBAAiB,WAAW,IAAI,GAAG,WAAW,KAAK,GAAG,QAAQ,KAAK,GAAG,iBAAiB,SAAS,WAAW,KAAK,GAAG,WAAW,QACrI;;AAIH,OAAK,MAAM,aAAa,qBACtB,KAAI,UAAU,aACZ,WAAU,UAAU,cAAc,EAChC,OAAO,SACR,CAAC;EAKN,MAAM,eAAe,SAAS;EAC9B,MAAM,aAAa,SAAS;AAE5B,MAAI,gBAAgB,WAClB,OAAM,IAAI,MACR,mFACD;AAGH,MAAI,aAEF,OAAM,wBAAwB,gCAAgC,QAAQ;WAC7D,YAAY,QAEhB;GAEL,MAAM,SAAS,MAAM,QACnB,yFACD;AACD,OAAI,OAAO,aAAa,KAAK,SAAS,OAAO,aAAa,KAAK,IAC7D,OAAM,wBAAwB,gCAAgC,QAAQ;;UAGnE,OAAO;AACd,YAAU,OAAO,EACf,OAAO,SACR,CAAC;;;AAIN,MAAM,WAAW,aAAsC;CACrD,MAAM,KAAK,SAAS,gBAAgB;EAClC,OAAO,QAAQ;EACf,QAAQ,QAAQ;EACjB,CAAC;AACF,QAAO,IAAI,SAAS,cAAY;AAC9B,KAAG,SAAS,WAAW,WAAmB;AACxC,MAAG,OAAO;AACV,aAAQ,OAAO;IACf;GACF;;AAGJ,MAAM,0BAA0B,OAC9B,sBACA,YACkB;CAElB,MAAM,YAAY,aADH,iBAAiB,SAAS,cAAc,EAChB,EACrC,QAAQ,EACN,QAAQ,IACT,EACF,CAAC;CAGF,MAAMC,+BAA4B,IAAI,KAAK;AAE3C,MAAK,MAAM,cAAc,sBAAsB;EAC7C,MAAM,EAAE,aAAa;AAErB,MAAI,CAAC,UAAU;AACb,aAAU,cAAc,WAAW,IAAI,6BAA6B,EAClE,OAAO,SACR,CAAC;AACF;;AAGF,eAAa,IAAI,SAAS;;AAG5B,MAAK,MAAM,YAAY,aACrB,KAAI;EACF,MAAM,QAAQ,MAAM,WAAW,MAAM,SAAS;AAE9C,MAAI,MAAM,QAAQ,EAAE;AAClB,SAAM,WAAW,OAAO,SAAS;AACjC,aAAU,gBAAgB,WAAW,SAAS,IAAI,EAAE,CAAC;aAC5C,MAAM,aAAa,CAC5B,WAAU,uBAAuB,WAAW,SAAS,CAAC,cAAc,EAAE,CAAC;MAEvE,WACE,yBAAyB,WAAW,SAAS,CAAC,cAC9C,EAAE,CACH;UAEI,KAAK;AACZ,YAAU,kBAAkB,WAAW,SAAS,CAAC,IAAI,OAAO,EAC1D,OAAO,SACR,CAAC"}
@@ -1,14 +1,14 @@
1
- import { setupAI } from "./utils/setupAI.mjs";
1
+ import { setupAI } from "../utils/setupAI.mjs";
2
+ import { checkFileModifiedRange } from "../utils/checkFileModifiedRange.mjs";
3
+ import { getOutputFilePath } from "../utils/getOutputFilePath.mjs";
2
4
  import { reviewFileBlockAware } from "./reviewDocBlockAware.mjs";
3
- import { checkFileModifiedRange } from "./utils/checkFileModifiedRange.mjs";
4
- import { getOutputFilePath } from "./utils/getOutputFilePath.mjs";
5
5
  import { formatLocale, formatPath, listGitFiles, listGitLines, parallelize } from "@intlayer/chokidar";
6
6
  import { ANSIColors, colorize, colorizeNumber, getAppLogger, getConfiguration } from "@intlayer/config";
7
7
  import { join, relative } from "node:path";
8
8
  import { existsSync } from "node:fs";
9
9
  import fg from "fast-glob";
10
10
 
11
- //#region src/reviewDoc.ts
11
+ //#region src/reviewDoc/reviewDoc.ts
12
12
  /**
13
13
  * Main audit function: scans all .md files in "en/" (unless you specified DOC_LIST),
14
14
  * then audits them to each locale in LOCALE_LIST.
@@ -41,14 +41,16 @@ const reviewDoc = async ({ docPattern, locales, excludedGlobPattern, baseLocale,
41
41
  appLogger(`${colorize("⊘", ANSIColors.YELLOW)} File ${formatPath(relativePath)} already exists, skipping.`);
42
42
  return;
43
43
  }
44
- const fileModificationData = checkFileModifiedRange(outputFilePath, {
45
- skipIfModifiedBefore,
46
- skipIfModifiedAfter
47
- });
48
- if (fileModificationData.isSkipped) {
49
- appLogger(fileModificationData.message);
50
- return;
51
- }
44
+ if (existsSync(outputFilePath)) {
45
+ const fileModificationData = checkFileModifiedRange(outputFilePath, {
46
+ skipIfModifiedBefore,
47
+ skipIfModifiedAfter
48
+ });
49
+ if (fileModificationData.isSkipped) {
50
+ appLogger(fileModificationData.message);
51
+ return;
52
+ }
53
+ } else if (skipIfModifiedBefore || skipIfModifiedAfter) appLogger(`${colorize("!", ANSIColors.YELLOW)} File ${formatPath(outputFilePath)} does not exist, skipping modification date check.`);
52
54
  let changedLines;
53
55
  if (gitOptions) {
54
56
  const gitChangedLines = await listGitLines(absoluteBaseFilePath, gitOptions);
@@ -0,0 +1 @@
1
+ {"version":3,"file":"reviewDoc.mjs","names":["docList: string[]","changedLines: number[] | undefined"],"sources":["../../../src/reviewDoc/reviewDoc.ts"],"sourcesContent":["import { existsSync } from 'node:fs';\nimport { join, relative } from 'node:path';\nimport type { AIOptions } from '@intlayer/api';\nimport {\n formatLocale,\n formatPath,\n type ListGitFilesOptions,\n listGitFiles,\n listGitLines,\n parallelize,\n} from '@intlayer/chokidar';\nimport {\n ANSIColors,\n colorize,\n colorizeNumber,\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport type { Locale } from '@intlayer/types';\nimport fg from 'fast-glob';\nimport { checkFileModifiedRange } from '../utils/checkFileModifiedRange';\nimport { getOutputFilePath } from '../utils/getOutputFilePath';\nimport { setupAI } from '../utils/setupAI';\nimport { reviewFileBlockAware } from './reviewDocBlockAware';\n\ntype ReviewDocOptions = {\n docPattern: string[];\n locales: Locale[];\n excludedGlobPattern: string[];\n baseLocale: Locale;\n aiOptions?: AIOptions;\n nbSimultaneousFileProcessed?: number;\n configOptions?: GetConfigurationOptions;\n customInstructions?: string;\n skipIfModifiedBefore?: number | string | Date;\n skipIfModifiedAfter?: number | string | Date;\n skipIfExists?: boolean;\n gitOptions?: ListGitFilesOptions;\n};\n\n/**\n * Main audit function: scans all .md files in \"en/\" (unless you specified DOC_LIST),\n * then audits them to each locale in LOCALE_LIST.\n */\nexport const reviewDoc = async ({\n docPattern,\n locales,\n excludedGlobPattern,\n baseLocale,\n aiOptions,\n nbSimultaneousFileProcessed,\n configOptions,\n customInstructions,\n skipIfModifiedBefore,\n skipIfModifiedAfter,\n skipIfExists,\n gitOptions,\n}: ReviewDocOptions) => {\n const configuration = getConfiguration(configOptions);\n const appLogger = getAppLogger(configuration);\n\n const aiResult = await setupAI(configuration, aiOptions);\n\n if (!aiResult?.hasAIAccess) return;\n\n const { aiClient, aiConfig } = aiResult;\n\n if (nbSimultaneousFileProcessed && nbSimultaneousFileProcessed > 10) {\n appLogger(\n `Warning: nbSimultaneousFileProcessed is set to ${nbSimultaneousFileProcessed}, which is greater than 10. Setting it to 10.`\n );\n nbSimultaneousFileProcessed = 10; // Limit the number of simultaneous file processed to 10\n }\n\n let docList: string[] = await fg(docPattern, {\n ignore: excludedGlobPattern,\n });\n\n if (gitOptions) {\n const gitChangedFiles = await listGitFiles(gitOptions);\n\n if (gitChangedFiles) {\n // Convert dictionary file paths to be relative to git root for comparison\n\n // Filter dictionaries based on git changed files\n docList = docList.filter((path) =>\n gitChangedFiles.some((gitFile) => join(process.cwd(), path) === gitFile)\n );\n }\n }\n\n // OAuth handled by API proxy internally\n\n appLogger(`Base locale is ${formatLocale(baseLocale)}`);\n appLogger(\n `Reviewing ${colorizeNumber(locales.length)} locales: [ ${formatLocale(locales)} ]`\n );\n\n appLogger(`Reviewing ${colorizeNumber(docList.length)} files:`);\n appLogger(docList.map((path) => ` - ${formatPath(path)}\\n`));\n\n // Create all tasks to be processed\n const allTasks = docList.flatMap((docPath) =>\n locales.map((locale) => async () => {\n appLogger(\n `Reviewing file: ${formatPath(docPath)} to ${formatLocale(locale)}`\n );\n\n const absoluteBaseFilePath = join(configuration.content.baseDir, docPath);\n const outputFilePath = getOutputFilePath(\n absoluteBaseFilePath,\n locale,\n baseLocale\n );\n\n // Skip if file exists and skipIfExists option is enabled\n if (skipIfExists && existsSync(outputFilePath)) {\n const relativePath = relative(\n configuration.content.baseDir,\n outputFilePath\n );\n appLogger(\n `${colorize('⊘', ANSIColors.YELLOW)} File ${formatPath(relativePath)} already exists, skipping.`\n );\n return;\n }\n\n // Check modification range only if the file exists\n if (existsSync(outputFilePath)) {\n const fileModificationData = checkFileModifiedRange(outputFilePath, {\n skipIfModifiedBefore,\n skipIfModifiedAfter,\n });\n\n if (fileModificationData.isSkipped) {\n appLogger(fileModificationData.message);\n return;\n }\n } else if (skipIfModifiedBefore || skipIfModifiedAfter) {\n // Log if we intended to check modification time but couldn't because the file doesn't exist\n appLogger(\n `${colorize('!', ANSIColors.YELLOW)} File ${formatPath(outputFilePath)} does not exist, skipping modification date check.`\n );\n }\n\n let changedLines: number[] | undefined;\n // FIXED: Enable git optimization that was previously commented out\n if (gitOptions) {\n const gitChangedLines = await listGitLines(\n absoluteBaseFilePath,\n gitOptions\n );\n\n appLogger(`Git changed lines: ${gitChangedLines.join(', ')}`);\n changedLines = gitChangedLines;\n }\n\n await reviewFileBlockAware(\n absoluteBaseFilePath,\n outputFilePath,\n locale as Locale,\n baseLocale,\n aiOptions,\n configOptions,\n customInstructions,\n changedLines,\n aiClient,\n aiConfig\n );\n })\n );\n\n await parallelize(\n allTasks,\n (task) => task(),\n nbSimultaneousFileProcessed ?? 3\n );\n};\n"],"mappings":";;;;;;;;;;;;;;;AA6CA,MAAa,YAAY,OAAO,EAC9B,YACA,SACA,qBACA,YACA,WACA,6BACA,eACA,oBACA,sBACA,qBACA,cACA,iBACsB;CACtB,MAAM,gBAAgB,iBAAiB,cAAc;CACrD,MAAM,YAAY,aAAa,cAAc;CAE7C,MAAM,WAAW,MAAM,QAAQ,eAAe,UAAU;AAExD,KAAI,CAAC,UAAU,YAAa;CAE5B,MAAM,EAAE,UAAU,aAAa;AAE/B,KAAI,+BAA+B,8BAA8B,IAAI;AACnE,YACE,kDAAkD,4BAA4B,+CAC/E;AACD,gCAA8B;;CAGhC,IAAIA,UAAoB,MAAM,GAAG,YAAY,EAC3C,QAAQ,qBACT,CAAC;AAEF,KAAI,YAAY;EACd,MAAM,kBAAkB,MAAM,aAAa,WAAW;AAEtD,MAAI,gBAIF,WAAU,QAAQ,QAAQ,SACxB,gBAAgB,MAAM,YAAY,KAAK,QAAQ,KAAK,EAAE,KAAK,KAAK,QAAQ,CACzE;;AAML,WAAU,kBAAkB,aAAa,WAAW,GAAG;AACvD,WACE,aAAa,eAAe,QAAQ,OAAO,CAAC,cAAc,aAAa,QAAQ,CAAC,IACjF;AAED,WAAU,aAAa,eAAe,QAAQ,OAAO,CAAC,SAAS;AAC/D,WAAU,QAAQ,KAAK,SAAS,MAAM,WAAW,KAAK,CAAC,IAAI,CAAC;AAyE5D,OAAM,YAtEW,QAAQ,SAAS,YAChC,QAAQ,KAAK,WAAW,YAAY;AAClC,YACE,mBAAmB,WAAW,QAAQ,CAAC,MAAM,aAAa,OAAO,GAClE;EAED,MAAM,uBAAuB,KAAK,cAAc,QAAQ,SAAS,QAAQ;EACzE,MAAM,iBAAiB,kBACrB,sBACA,QACA,WACD;AAGD,MAAI,gBAAgB,WAAW,eAAe,EAAE;GAC9C,MAAM,eAAe,SACnB,cAAc,QAAQ,SACtB,eACD;AACD,aACE,GAAG,SAAS,KAAK,WAAW,OAAO,CAAC,QAAQ,WAAW,aAAa,CAAC,4BACtE;AACD;;AAIF,MAAI,WAAW,eAAe,EAAE;GAC9B,MAAM,uBAAuB,uBAAuB,gBAAgB;IAClE;IACA;IACD,CAAC;AAEF,OAAI,qBAAqB,WAAW;AAClC,cAAU,qBAAqB,QAAQ;AACvC;;aAEO,wBAAwB,oBAEjC,WACE,GAAG,SAAS,KAAK,WAAW,OAAO,CAAC,QAAQ,WAAW,eAAe,CAAC,oDACxE;EAGH,IAAIC;AAEJ,MAAI,YAAY;GACd,MAAM,kBAAkB,MAAM,aAC5B,sBACA,WACD;AAED,aAAU,sBAAsB,gBAAgB,KAAK,KAAK,GAAG;AAC7D,kBAAe;;AAGjB,QAAM,qBACJ,sBACA,gBACA,QACA,YACA,WACA,eACA,oBACA,cACA,UACA,SACD;GACD,CACH,GAIE,SAAS,MAAM,EAChB,+BAA+B,EAChC"}
@@ -1,8 +1,9 @@
1
- import { readAsset } from "./_virtual/_utils_asset.mjs";
2
- import { mergeReviewedSegments } from "./translation-alignment/rebuildDocument.mjs";
3
- import { buildAlignmentPlan } from "./translation-alignment/pipeline.mjs";
4
- import { chunkInference } from "./utils/chunkInference.mjs";
5
- import { fixChunkStartEndChars } from "./utils/fixChunkStartEndChars.mjs";
1
+ import { readAsset } from "../_virtual/_utils_asset.mjs";
2
+ import { sanitizeChunk, validateTranslation } from "../translateDoc/validation.mjs";
3
+ import { mergeReviewedSegments } from "../translation-alignment/rebuildDocument.mjs";
4
+ import { buildAlignmentPlan } from "../translation-alignment/pipeline.mjs";
5
+ import { chunkInference } from "../utils/chunkInference.mjs";
6
+ import { fixChunkStartEndChars } from "../utils/fixChunkStartEndChars.mjs";
6
7
  import { formatLocale, formatPath } from "@intlayer/chokidar";
7
8
  import { ANSIColors, colon, colorize, colorizeNumber, getAppLogger, getConfiguration, retryManager } from "@intlayer/config";
8
9
  import { dirname } from "node:path";
@@ -11,7 +12,7 @@ import { mkdirSync, writeFileSync } from "node:fs";
11
12
  import { readFile } from "node:fs/promises";
12
13
  import { Locales } from "@intlayer/types";
13
14
 
14
- //#region src/reviewDocBlockAware.ts
15
+ //#region src/reviewDoc/reviewDocBlockAware.ts
15
16
  /**
16
17
  * Review a file using block-aware alignment.
17
18
  * This approach:
@@ -74,7 +75,10 @@ const reviewFileBlockAware = async (baseFilePath, outputFilePath, locale, baseLo
74
75
  }
75
76
  ], aiOptions, configuration, aiClient, aiConfig);
76
77
  applicationLogger(`${prefix}${colorizeNumber(result.tokenUsed)} tokens used - Block ${colorizeNumber(segmentNumber)} of ${colorizeNumber(segmentsToReview.length)}`);
77
- return fixChunkStartEndChars(result?.fileContent, englishBlock.content);
78
+ let processedChunk = sanitizeChunk(result?.fileContent, englishBlock.content);
79
+ processedChunk = fixChunkStartEndChars(processedChunk, englishBlock.content);
80
+ if (!validateTranslation(englishBlock.content, processedChunk, applicationLogger)) throw new Error("Validation failed for chunk (structure or length mismatch). Retrying...");
81
+ return processedChunk;
78
82
  })();
79
83
  reviewedSegmentsMap.set(segment.actionIndex, reviewedChunkResult);
80
84
  }
@@ -0,0 +1 @@
1
+ {"version":3,"file":"reviewDocBlockAware.mjs","names":[],"sources":["../../../src/reviewDoc/reviewDocBlockAware.ts"],"sourcesContent":["import { mkdirSync, writeFileSync } from 'node:fs';\nimport { readFile } from 'node:fs/promises';\nimport { dirname } from 'node:path';\nimport { readAsset } from 'utils:asset';\nimport type { AIConfig } from '@intlayer/ai';\nimport type { AIOptions } from '@intlayer/api';\nimport { formatLocale, formatPath } from '@intlayer/chokidar';\nimport {\n ANSIColors,\n colon,\n colorize,\n colorizeNumber,\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n retryManager,\n} from '@intlayer/config';\nimport { getLocaleName } from '@intlayer/core';\nimport { type Locale, Locales } from '@intlayer/types';\nimport { sanitizeChunk, validateTranslation } from '../translateDoc/validation';\nimport {\n buildAlignmentPlan,\n mergeReviewedSegments,\n} from '../translation-alignment/pipeline';\nimport { chunkInference } from '../utils/chunkInference';\nimport { fixChunkStartEndChars } from '../utils/fixChunkStartEndChars';\nimport type { AIClient } from '../utils/setupAI';\n\n/**\n * Review a file using block-aware alignment.\n * This approach:\n * 1. Segments both English and French documents into semantic blocks\n * 2. Aligns blocks using structure (special chars, numbers) and context\n * 3. Detects which blocks changed, were added, or deleted\n * 4. Only sends changed/new blocks to AI for translation\n * 5. Handles reordering automatically\n */\nexport const reviewFileBlockAware = async (\n baseFilePath: string,\n outputFilePath: string,\n locale: Locale,\n baseLocale: Locale,\n aiOptions?: AIOptions,\n configOptions?: GetConfigurationOptions,\n customInstructions?: string,\n changedLines?: number[],\n aiClient?: AIClient,\n aiConfig?: AIConfig\n) => {\n const configuration = getConfiguration(configOptions);\n const applicationLogger = getAppLogger(configuration);\n\n const englishText = await readFile(baseFilePath, 'utf-8');\n const frenchText = await readFile(outputFilePath, 'utf-8').catch(() => '');\n\n const basePrompt = readAsset('./prompts/REVIEW_PROMPT.md', 'utf-8')\n .replaceAll('{{localeName}}', `${formatLocale(locale, false)}`)\n .replaceAll('{{baseLocaleName}}', `${formatLocale(baseLocale, false)}`)\n .replace('{{applicationContext}}', aiOptions?.applicationContext ?? '-')\n .replace('{{customInstructions}}', customInstructions ?? '-');\n\n const filePrefixText = `${ANSIColors.GREY_DARK}[${formatPath(baseFilePath)}${ANSIColors.GREY_DARK}] `;\n const filePrefix = [\n colon(filePrefixText, { colSize: 40 }),\n `→ ${ANSIColors.RESET}`,\n ].join('');\n const prefixText = `${ANSIColors.GREY_DARK}[${formatPath(baseFilePath)}${ANSIColors.GREY_DARK}][${formatLocale(locale)}${ANSIColors.GREY_DARK}] `;\n const prefix = [\n colon(prefixText, { colSize: 40 }),\n `→ ${ANSIColors.RESET}`,\n ].join('');\n\n // Build block-aware alignment and plan\n const { englishBlocks, frenchBlocks, plan, segmentsToReview } =\n buildAlignmentPlan({\n englishText,\n frenchText,\n changedLines,\n });\n\n applicationLogger(\n `${filePrefix}Block-aware alignment complete. Total blocks: EN=${colorizeNumber(englishBlocks.length)}, FR=${colorizeNumber(frenchBlocks.length)}`\n );\n applicationLogger(\n `${filePrefix}Actions: reuse=${colorizeNumber(plan.actions.filter((a) => a.kind === 'reuse').length)}, review=${colorizeNumber(plan.actions.filter((a) => a.kind === 'review').length)}, new=${colorizeNumber(plan.actions.filter((a) => a.kind === 'insert_new').length)}, delete=${colorizeNumber(plan.actions.filter((a) => a.kind === 'delete').length)}`\n );\n\n if (segmentsToReview.length === 0) {\n applicationLogger(\n `${filePrefix}No segments need review, reusing existing translation`\n );\n mkdirSync(dirname(outputFilePath), { recursive: true });\n writeFileSync(\n outputFilePath,\n mergeReviewedSegments(plan, frenchBlocks, new Map())\n );\n applicationLogger(\n `${colorize('✔', ANSIColors.GREEN)} File ${formatPath(outputFilePath)} updated successfully (no changes needed).`\n );\n return;\n }\n\n applicationLogger(\n `${filePrefix}Segments to review: ${colorizeNumber(segmentsToReview.length)}`\n );\n\n // Review segments that need AI translation\n const reviewedSegmentsMap = new Map<number, string>();\n\n for (const segment of segmentsToReview) {\n const segmentNumber = segmentsToReview.indexOf(segment) + 1;\n const englishBlock = segment.englishBlock;\n\n const getBaseChunkContextPrompt = () =>\n `**BLOCK ${segmentNumber} of ${segmentsToReview.length}** is the base block in ${formatLocale(baseLocale, false)} as reference.\\n` +\n `///chunksStart///\\n` +\n englishBlock.content +\n `///chunksEnd///`;\n\n const getFrenchChunkPrompt = () =>\n `**BLOCK ${segmentNumber} of ${segmentsToReview.length}** is the current block to review in ${formatLocale(locale, false)}.\\n` +\n `///chunksStart///\\n` +\n (segment.frenchBlockText ?? '') +\n `///chunksEnd///`;\n\n const reviewedChunkResult = await retryManager(async () => {\n const result = await chunkInference(\n [\n { role: 'system', content: basePrompt },\n { role: 'system', content: getBaseChunkContextPrompt() },\n { role: 'system', content: getFrenchChunkPrompt() },\n {\n role: 'system',\n content: `The next user message will be the **BLOCK ${colorizeNumber(segmentNumber)} of ${colorizeNumber(segmentsToReview.length)}** that should be translated in ${getLocaleName(locale, Locales.ENGLISH)} (${locale}).`,\n },\n { role: 'user', content: englishBlock.content },\n ],\n aiOptions,\n configuration,\n aiClient,\n aiConfig\n );\n\n applicationLogger(\n `${prefix}${colorizeNumber(result.tokenUsed)} tokens used - Block ${colorizeNumber(segmentNumber)} of ${colorizeNumber(segmentsToReview.length)}`\n );\n\n // Sanitize artifacts (e.g. Markdown code block wrappers)\n let processedChunk = sanitizeChunk(\n result?.fileContent,\n englishBlock.content\n );\n\n // Fix start/end characters\n processedChunk = fixChunkStartEndChars(\n processedChunk,\n englishBlock.content\n );\n\n // Validate Translation (YAML, Code fences, Length ratio)\n const isValid = validateTranslation(\n englishBlock.content,\n processedChunk,\n applicationLogger\n );\n\n if (!isValid) {\n throw new Error(\n 'Validation failed for chunk (structure or length mismatch). Retrying...'\n );\n }\n\n return processedChunk;\n })();\n\n reviewedSegmentsMap.set(segment.actionIndex, reviewedChunkResult);\n }\n\n // Merge reviewed segments back into final document\n const finalFrenchOutput = mergeReviewedSegments(\n plan,\n frenchBlocks,\n reviewedSegmentsMap\n );\n\n mkdirSync(dirname(outputFilePath), { recursive: true });\n writeFileSync(outputFilePath, finalFrenchOutput);\n\n applicationLogger(\n `${colorize('✔', ANSIColors.GREEN)} File ${formatPath(outputFilePath)} created/updated successfully.`\n );\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAqCA,MAAa,uBAAuB,OAClC,cACA,gBACA,QACA,YACA,WACA,eACA,oBACA,cACA,UACA,aACG;CACH,MAAM,gBAAgB,iBAAiB,cAAc;CACrD,MAAM,oBAAoB,aAAa,cAAc;CAErD,MAAM,cAAc,MAAM,SAAS,cAAc,QAAQ;CACzD,MAAM,aAAa,MAAM,SAAS,gBAAgB,QAAQ,CAAC,YAAY,GAAG;CAE1E,MAAM,aAAa,UAAU,8BAA8B,QAAQ,CAChE,WAAW,kBAAkB,GAAG,aAAa,QAAQ,MAAM,GAAG,CAC9D,WAAW,sBAAsB,GAAG,aAAa,YAAY,MAAM,GAAG,CACtE,QAAQ,0BAA0B,WAAW,sBAAsB,IAAI,CACvE,QAAQ,0BAA0B,sBAAsB,IAAI;CAG/D,MAAM,aAAa,CACjB,MAFqB,GAAG,WAAW,UAAU,GAAG,WAAW,aAAa,GAAG,WAAW,UAAU,KAE1E,EAAE,SAAS,IAAI,CAAC,EACtC,KAAK,WAAW,QACjB,CAAC,KAAK,GAAG;CAEV,MAAM,SAAS,CACb,MAFiB,GAAG,WAAW,UAAU,GAAG,WAAW,aAAa,GAAG,WAAW,UAAU,IAAI,aAAa,OAAO,GAAG,WAAW,UAAU,KAE1H,EAAE,SAAS,IAAI,CAAC,EAClC,KAAK,WAAW,QACjB,CAAC,KAAK,GAAG;CAGV,MAAM,EAAE,eAAe,cAAc,MAAM,qBACzC,mBAAmB;EACjB;EACA;EACA;EACD,CAAC;AAEJ,mBACE,GAAG,WAAW,mDAAmD,eAAe,cAAc,OAAO,CAAC,OAAO,eAAe,aAAa,OAAO,GACjJ;AACD,mBACE,GAAG,WAAW,iBAAiB,eAAe,KAAK,QAAQ,QAAQ,MAAM,EAAE,SAAS,QAAQ,CAAC,OAAO,CAAC,WAAW,eAAe,KAAK,QAAQ,QAAQ,MAAM,EAAE,SAAS,SAAS,CAAC,OAAO,CAAC,QAAQ,eAAe,KAAK,QAAQ,QAAQ,MAAM,EAAE,SAAS,aAAa,CAAC,OAAO,CAAC,WAAW,eAAe,KAAK,QAAQ,QAAQ,MAAM,EAAE,SAAS,SAAS,CAAC,OAAO,GAC5V;AAED,KAAI,iBAAiB,WAAW,GAAG;AACjC,oBACE,GAAG,WAAW,uDACf;AACD,YAAU,QAAQ,eAAe,EAAE,EAAE,WAAW,MAAM,CAAC;AACvD,gBACE,gBACA,sBAAsB,MAAM,8BAAc,IAAI,KAAK,CAAC,CACrD;AACD,oBACE,GAAG,SAAS,KAAK,WAAW,MAAM,CAAC,QAAQ,WAAW,eAAe,CAAC,4CACvE;AACD;;AAGF,mBACE,GAAG,WAAW,sBAAsB,eAAe,iBAAiB,OAAO,GAC5E;CAGD,MAAM,sCAAsB,IAAI,KAAqB;AAErD,MAAK,MAAM,WAAW,kBAAkB;EACtC,MAAM,gBAAgB,iBAAiB,QAAQ,QAAQ,GAAG;EAC1D,MAAM,eAAe,QAAQ;EAE7B,MAAM,kCACJ,WAAW,cAAc,MAAM,iBAAiB,OAAO,0BAA0B,aAAa,YAAY,MAAM,CAAC,uCAEjH,aAAa,UACb;EAEF,MAAM,6BACJ,WAAW,cAAc,MAAM,iBAAiB,OAAO,uCAAuC,aAAa,QAAQ,MAAM,CAAC,2BAEzH,QAAQ,mBAAmB,MAC5B;EAEF,MAAM,sBAAsB,MAAM,aAAa,YAAY;GACzD,MAAM,SAAS,MAAM,eACnB;IACE;KAAE,MAAM;KAAU,SAAS;KAAY;IACvC;KAAE,MAAM;KAAU,SAAS,2BAA2B;KAAE;IACxD;KAAE,MAAM;KAAU,SAAS,sBAAsB;KAAE;IACnD;KACE,MAAM;KACN,SAAS,6CAA6C,eAAe,cAAc,CAAC,MAAM,eAAe,iBAAiB,OAAO,CAAC,kCAAkC,cAAc,QAAQ,QAAQ,QAAQ,CAAC,IAAI,OAAO;KACvN;IACD;KAAE,MAAM;KAAQ,SAAS,aAAa;KAAS;IAChD,EACD,WACA,eACA,UACA,SACD;AAED,qBACE,GAAG,SAAS,eAAe,OAAO,UAAU,CAAC,uBAAuB,eAAe,cAAc,CAAC,MAAM,eAAe,iBAAiB,OAAO,GAChJ;GAGD,IAAI,iBAAiB,cACnB,QAAQ,aACR,aAAa,QACd;AAGD,oBAAiB,sBACf,gBACA,aAAa,QACd;AASD,OAAI,CANY,oBACd,aAAa,SACb,gBACA,kBACD,CAGC,OAAM,IAAI,MACR,0EACD;AAGH,UAAO;IACP,EAAE;AAEJ,sBAAoB,IAAI,QAAQ,aAAa,oBAAoB;;CAInE,MAAM,oBAAoB,sBACxB,MACA,cACA,oBACD;AAED,WAAU,QAAQ,eAAe,EAAE,EAAE,WAAW,MAAM,CAAC;AACvD,eAAc,gBAAgB,kBAAkB;AAEhD,mBACE,GAAG,SAAS,KAAK,WAAW,MAAM,CAAC,QAAQ,WAAW,eAAe,CAAC,gCACvE"}
@@ -0,0 +1,5 @@
1
+ import { sanitizeChunk, validateTranslation } from "./validation.mjs";
2
+ import { translateFile } from "./translateFile.mjs";
3
+ import { translateDoc } from "./translateDoc.mjs";
4
+
5
+ export { sanitizeChunk, translateDoc, translateFile, validateTranslation };
@@ -0,0 +1,72 @@
1
+ import { setupAI } from "../utils/setupAI.mjs";
2
+ import { checkFileModifiedRange } from "../utils/checkFileModifiedRange.mjs";
3
+ import { getOutputFilePath } from "../utils/getOutputFilePath.mjs";
4
+ import { translateFile } from "./translateFile.mjs";
5
+ import { listGitFiles, pLimit, parallelize } from "@intlayer/chokidar";
6
+ import { ANSIColors, colorize, colorizeNumber, getAppLogger, getConfiguration } from "@intlayer/config";
7
+ import { dirname, join } from "node:path";
8
+ import { existsSync, mkdirSync, writeFileSync } from "node:fs";
9
+ import fg from "fast-glob";
10
+ import { performance } from "node:perf_hooks";
11
+
12
+ //#region src/translateDoc/translateDoc.ts
13
+ const translateDoc = async ({ docPattern, locales, excludedGlobPattern, baseLocale, aiOptions, nbSimultaneousFileProcessed = 20, configOptions, customInstructions, skipIfModifiedBefore, skipIfModifiedAfter, skipIfExists, gitOptions, flushStrategy = "incremental" }) => {
14
+ const configuration = getConfiguration(configOptions);
15
+ const appLogger = getAppLogger(configuration);
16
+ const maxConcurrentChunks = nbSimultaneousFileProcessed;
17
+ const globalChunkLimiter = pLimit(maxConcurrentChunks);
18
+ let docList = await fg(docPattern, { ignore: excludedGlobPattern });
19
+ const aiResult = await setupAI(configuration, aiOptions);
20
+ if (!aiResult?.hasAIAccess) return;
21
+ const { aiClient, aiConfig } = aiResult;
22
+ if (gitOptions) {
23
+ const gitChangedFiles = await listGitFiles(gitOptions);
24
+ if (gitChangedFiles) docList = docList.filter((path) => gitChangedFiles.some((gitFile) => join(process.cwd(), path) === gitFile));
25
+ }
26
+ const batchStartTime = performance.now();
27
+ appLogger(`Translating ${colorizeNumber(docList.length)} files to ${colorizeNumber(locales.length)} locales. \nGlobal Concurrency: ${colorizeNumber(maxConcurrentChunks)} chunks in parallel.`);
28
+ const errorState = {
29
+ count: 0,
30
+ maxErrors: 5,
31
+ shouldStop: false
32
+ };
33
+ await parallelize(docList.flatMap((docPath) => locales.map((locale) => async () => {
34
+ if (errorState.shouldStop) return;
35
+ const absoluteBaseFilePath = join(configuration.content.baseDir, docPath);
36
+ const outputFilePath = getOutputFilePath(absoluteBaseFilePath, locale, baseLocale);
37
+ if (skipIfExists && existsSync(outputFilePath)) return;
38
+ if (flushStrategy === "incremental" && !existsSync(outputFilePath)) {
39
+ mkdirSync(dirname(outputFilePath), { recursive: true });
40
+ writeFileSync(outputFilePath, "");
41
+ }
42
+ const fileModificationData = checkFileModifiedRange(outputFilePath, {
43
+ skipIfModifiedBefore,
44
+ skipIfModifiedAfter
45
+ });
46
+ if (fileModificationData.isSkipped) {
47
+ appLogger(fileModificationData.message);
48
+ return;
49
+ }
50
+ await translateFile({
51
+ baseFilePath: absoluteBaseFilePath,
52
+ outputFilePath,
53
+ locale,
54
+ baseLocale,
55
+ configuration,
56
+ errorState,
57
+ aiOptions,
58
+ customInstructions,
59
+ aiClient,
60
+ aiConfig,
61
+ flushStrategy,
62
+ limit: globalChunkLimiter
63
+ });
64
+ })), (task) => task(), 50);
65
+ const batchDuration = ((performance.now() - batchStartTime) / 1e3).toFixed(2);
66
+ if (errorState.count > 0) appLogger(`Finished with ${errorState.count} errors in ${batchDuration}s.`);
67
+ else appLogger(`${colorize("✔", ANSIColors.GREEN)} Batch completed successfully in ${colorizeNumber(batchDuration)}s.`);
68
+ };
69
+
70
+ //#endregion
71
+ export { translateDoc };
72
+ //# sourceMappingURL=translateDoc.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"translateDoc.mjs","names":["docList: string[]","errorState: ErrorState"],"sources":["../../../src/translateDoc/translateDoc.ts"],"sourcesContent":["import { existsSync, mkdirSync, writeFileSync } from 'node:fs';\nimport { dirname, join } from 'node:path';\nimport { performance } from 'node:perf_hooks';\nimport { listGitFiles, parallelize, pLimit } from '@intlayer/chokidar';\nimport {\n ANSIColors,\n colorize,\n colorizeNumber,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport type { Locale } from '@intlayer/types';\nimport fg from 'fast-glob';\nimport { checkFileModifiedRange } from '../utils/checkFileModifiedRange';\nimport { getOutputFilePath } from '../utils/getOutputFilePath';\nimport { setupAI } from '../utils/setupAI';\nimport { translateFile } from './translateFile';\nimport type { ErrorState, TranslateDocOptions } from './types';\n\nexport const translateDoc = async ({\n docPattern,\n locales,\n excludedGlobPattern,\n baseLocale,\n aiOptions,\n nbSimultaneousFileProcessed = 20, // Default to a higher concurrency for chunks\n configOptions,\n customInstructions,\n skipIfModifiedBefore,\n skipIfModifiedAfter,\n skipIfExists,\n gitOptions,\n flushStrategy = 'incremental',\n}: TranslateDocOptions) => {\n const configuration = getConfiguration(configOptions);\n const appLogger = getAppLogger(configuration);\n\n // 1. GLOBAL QUEUE SETUP\n // We use pLimit to create a single bottleneck for AI requests.\n // This queue is shared across all files and locales.\n const maxConcurrentChunks = nbSimultaneousFileProcessed;\n const globalChunkLimiter = pLimit(maxConcurrentChunks);\n\n let docList: string[] = await fg(docPattern, {\n ignore: excludedGlobPattern,\n });\n\n const aiResult = await setupAI(configuration, aiOptions);\n if (!aiResult?.hasAIAccess) return;\n const { aiClient, aiConfig } = aiResult;\n\n if (gitOptions) {\n const gitChangedFiles = await listGitFiles(gitOptions);\n if (gitChangedFiles) {\n docList = docList.filter((path) =>\n gitChangedFiles.some((gitFile) => join(process.cwd(), path) === gitFile)\n );\n }\n }\n\n const batchStartTime = performance.now();\n\n appLogger(\n `Translating ${colorizeNumber(docList.length)} files to ${colorizeNumber(locales.length)} locales. \\n` +\n `Global Concurrency: ${colorizeNumber(maxConcurrentChunks)} chunks in parallel.`\n );\n\n const errorState: ErrorState = {\n count: 0,\n maxErrors: 5,\n shouldStop: false,\n };\n\n // 2. FLATTENED TASK LIST\n // We create a task for every File x Locale combination.\n const allTasks = docList.flatMap((docPath) =>\n locales.map((locale) => async () => {\n if (errorState.shouldStop) return;\n\n const absoluteBaseFilePath = join(configuration.content.baseDir, docPath);\n const outputFilePath = getOutputFilePath(\n absoluteBaseFilePath,\n locale,\n baseLocale\n );\n\n // Skip logic\n if (skipIfExists && existsSync(outputFilePath)) return;\n\n if (flushStrategy === 'incremental' && !existsSync(outputFilePath)) {\n mkdirSync(dirname(outputFilePath), { recursive: true });\n writeFileSync(outputFilePath, '');\n }\n\n const fileModificationData = checkFileModifiedRange(outputFilePath, {\n skipIfModifiedBefore,\n skipIfModifiedAfter,\n });\n\n if (fileModificationData.isSkipped) {\n appLogger(fileModificationData.message);\n return;\n }\n\n // Execute translation using the SHARED limiter\n await translateFile({\n baseFilePath: absoluteBaseFilePath,\n outputFilePath,\n locale: locale as Locale,\n baseLocale,\n configuration,\n errorState,\n aiOptions,\n customInstructions,\n aiClient,\n aiConfig,\n flushStrategy,\n limit: globalChunkLimiter, // Pass the global queue\n });\n })\n );\n\n // 3. HIGH-THROUGHPUT FILE OPENER\n // We open many files simultaneously (e.g., 50) to ensure the global chunk queue\n // is always saturated with work.\n // If we open too few files, the chunk queue might drain faster than we can read new files.\n const FILE_OPEN_LIMIT = 50;\n\n await parallelize(allTasks, (task) => task(), FILE_OPEN_LIMIT);\n\n const batchEndTime = performance.now();\n const batchDuration = ((batchEndTime - batchStartTime) / 1000).toFixed(2);\n\n if (errorState.count > 0) {\n appLogger(`Finished with ${errorState.count} errors in ${batchDuration}s.`);\n } else {\n appLogger(\n `${colorize('✔', ANSIColors.GREEN)} Batch completed successfully in ${colorizeNumber(batchDuration)}s.`\n );\n }\n};\n"],"mappings":";;;;;;;;;;;;AAmBA,MAAa,eAAe,OAAO,EACjC,YACA,SACA,qBACA,YACA,WACA,8BAA8B,IAC9B,eACA,oBACA,sBACA,qBACA,cACA,YACA,gBAAgB,oBACS;CACzB,MAAM,gBAAgB,iBAAiB,cAAc;CACrD,MAAM,YAAY,aAAa,cAAc;CAK7C,MAAM,sBAAsB;CAC5B,MAAM,qBAAqB,OAAO,oBAAoB;CAEtD,IAAIA,UAAoB,MAAM,GAAG,YAAY,EAC3C,QAAQ,qBACT,CAAC;CAEF,MAAM,WAAW,MAAM,QAAQ,eAAe,UAAU;AACxD,KAAI,CAAC,UAAU,YAAa;CAC5B,MAAM,EAAE,UAAU,aAAa;AAE/B,KAAI,YAAY;EACd,MAAM,kBAAkB,MAAM,aAAa,WAAW;AACtD,MAAI,gBACF,WAAU,QAAQ,QAAQ,SACxB,gBAAgB,MAAM,YAAY,KAAK,QAAQ,KAAK,EAAE,KAAK,KAAK,QAAQ,CACzE;;CAIL,MAAM,iBAAiB,YAAY,KAAK;AAExC,WACE,eAAe,eAAe,QAAQ,OAAO,CAAC,YAAY,eAAe,QAAQ,OAAO,CAAC,kCAChE,eAAe,oBAAoB,CAAC,sBAC9D;CAED,MAAMC,aAAyB;EAC7B,OAAO;EACP,WAAW;EACX,YAAY;EACb;AAyDD,OAAM,YArDW,QAAQ,SAAS,YAChC,QAAQ,KAAK,WAAW,YAAY;AAClC,MAAI,WAAW,WAAY;EAE3B,MAAM,uBAAuB,KAAK,cAAc,QAAQ,SAAS,QAAQ;EACzE,MAAM,iBAAiB,kBACrB,sBACA,QACA,WACD;AAGD,MAAI,gBAAgB,WAAW,eAAe,CAAE;AAEhD,MAAI,kBAAkB,iBAAiB,CAAC,WAAW,eAAe,EAAE;AAClE,aAAU,QAAQ,eAAe,EAAE,EAAE,WAAW,MAAM,CAAC;AACvD,iBAAc,gBAAgB,GAAG;;EAGnC,MAAM,uBAAuB,uBAAuB,gBAAgB;GAClE;GACA;GACD,CAAC;AAEF,MAAI,qBAAqB,WAAW;AAClC,aAAU,qBAAqB,QAAQ;AACvC;;AAIF,QAAM,cAAc;GAClB,cAAc;GACd;GACQ;GACR;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA,OAAO;GACR,CAAC;GACF,CACH,GAQ4B,SAAS,MAAM,EAFpB,GAEsC;CAG9D,MAAM,kBADe,YAAY,KAAK,GACC,kBAAkB,KAAM,QAAQ,EAAE;AAEzE,KAAI,WAAW,QAAQ,EACrB,WAAU,iBAAiB,WAAW,MAAM,aAAa,cAAc,IAAI;KAE3E,WACE,GAAG,SAAS,KAAK,WAAW,MAAM,CAAC,mCAAmC,eAAe,cAAc,CAAC,IACrG"}
@@ -0,0 +1,102 @@
1
+ import { readAsset } from "../_virtual/_utils_asset.mjs";
2
+ import { sanitizeChunk, validateTranslation } from "./validation.mjs";
3
+ import { chunkInference } from "../utils/chunkInference.mjs";
4
+ import { fixChunkStartEndChars } from "../utils/fixChunkStartEndChars.mjs";
5
+ import { chunkText } from "../utils/calculateChunks.mjs";
6
+ import { formatLocale, formatPath } from "@intlayer/chokidar";
7
+ import { ANSIColors, colon, colorize, colorizeNumber, getAppLogger, retryManager } from "@intlayer/config";
8
+ import { dirname, relative } from "node:path";
9
+ import { mkdirSync, writeFileSync } from "node:fs";
10
+ import { readFile } from "node:fs/promises";
11
+ import { performance } from "node:perf_hooks";
12
+
13
+ //#region src/translateDoc/translateFile.ts
14
+ const translateFile = async ({ baseFilePath, outputFilePath, locale, baseLocale, configuration, errorState, aiOptions, customInstructions, aiClient, aiConfig, flushStrategy = "incremental", onChunkReceive, limit }) => {
15
+ if (errorState.shouldStop) return null;
16
+ const appLogger = getAppLogger(configuration, { config: { prefix: "" } });
17
+ const fileStartTime = performance.now();
18
+ try {
19
+ const chunks = chunkText(await readFile(baseFilePath, "utf-8"));
20
+ const totalChunks = chunks.length;
21
+ const filePrefix = `${colon(`${ANSIColors.GREY_DARK}[${formatPath(baseFilePath)}${ANSIColors.GREY_DARK}] `, { colSize: 40 })}${ANSIColors.RESET}`;
22
+ const prefix = `${colon(`${ANSIColors.GREY_DARK}[${formatPath(baseFilePath)}${ANSIColors.GREY_DARK}][${formatLocale(locale)}${ANSIColors.GREY_DARK}] `, { colSize: 40 })}${ANSIColors.RESET}`;
23
+ appLogger(`${filePrefix}Split into ${colorizeNumber(totalChunks)} chunks. Queuing...`);
24
+ const basePrompt = readAsset("./prompts/TRANSLATE_PROMPT.md", "utf-8").replaceAll("{{localeName}}", `${formatLocale(locale, false)}`).replaceAll("{{baseLocaleName}}", `${formatLocale(baseLocale, false)}`).replace("{{applicationContext}}", aiOptions?.applicationContext ?? "-").replace("{{customInstructions}}", customInstructions ?? "-");
25
+ const translatedParts = new Array(totalChunks).fill("");
26
+ const runTask = limit ?? ((fn) => fn());
27
+ const tasks = chunks.map((chunk, i) => runTask(async () => {
28
+ if (errorState.shouldStop) return null;
29
+ const chunkLogger = getAppLogger(configuration, { config: { prefix: `${prefix} ${ANSIColors.GREY_DARK}[${i + 1}/${totalChunks}] ${ANSIColors.RESET}` } });
30
+ const chunkStartTime = performance.now();
31
+ const isFirstChunk = i === 0;
32
+ const fileToTranslateCurrentChunk = chunk.content;
33
+ const getPrevChunkPrompt = () => `>>> CONTEXT: PREVIOUS SOURCE CONTENT <<<\n\`\`\`\n` + (chunks[i - 1]?.content ?? "") + `\n\`\`\`\n>>> END PREVIOUS CONTEXT <<<`;
34
+ const getBaseChunkContextPrompt = () => `>>> CONTEXT: NEXT CONTENT <<<\n\`\`\`\n` + (chunks[i + 1]?.content ?? "") + `\n\`\`\`\n>>> END NEXT CONTEXT <<<`;
35
+ chunkLogger("Process started");
36
+ const { content: translatedChunk, tokens } = await retryManager(async () => {
37
+ const result = await chunkInference([
38
+ {
39
+ role: "system",
40
+ content: basePrompt
41
+ },
42
+ ...chunks[i + 1] ? [{
43
+ role: "system",
44
+ content: getBaseChunkContextPrompt()
45
+ }] : [],
46
+ ...isFirstChunk ? [] : [{
47
+ role: "system",
48
+ content: getPrevChunkPrompt()
49
+ }],
50
+ {
51
+ role: "system",
52
+ content: [`You are translating TARGET CHUNK (${i + 1}/${totalChunks}).`, `Translate ONLY the target chunk. Preserve frontmatter/code exactly.`].join("\n")
53
+ },
54
+ {
55
+ role: "user",
56
+ content: `>>> TARGET CHUNK START <<<\n${fileToTranslateCurrentChunk}\n>>> TARGET CHUNK END <<<`
57
+ }
58
+ ], aiOptions, configuration, aiClient, aiConfig);
59
+ let processedChunk = sanitizeChunk(result?.fileContent, fileToTranslateCurrentChunk);
60
+ processedChunk = fixChunkStartEndChars(processedChunk, fileToTranslateCurrentChunk);
61
+ if (!validateTranslation(fileToTranslateCurrentChunk, processedChunk, chunkLogger)) throw new Error(`Validation failed for chunk ${i + 1}/${totalChunks}`);
62
+ return {
63
+ content: processedChunk,
64
+ tokens: result.tokenUsed
65
+ };
66
+ })();
67
+ const chunkDuration = (performance.now() - chunkStartTime).toFixed(0);
68
+ translatedParts[i] = translatedChunk;
69
+ if (onChunkReceive) onChunkReceive(translatedChunk, i, totalChunks);
70
+ if (flushStrategy === "incremental") {
71
+ if (translatedParts.slice(0, i + 1).every((p) => p && p !== "")) {
72
+ let endIdx = 0;
73
+ while (endIdx < totalChunks && translatedParts[endIdx] && translatedParts[endIdx] !== "") endIdx++;
74
+ const currentContent = translatedParts.slice(0, endIdx).join("");
75
+ mkdirSync(dirname(outputFilePath), { recursive: true });
76
+ writeFileSync(outputFilePath, currentContent);
77
+ }
78
+ }
79
+ chunkLogger([`${colorizeNumber(tokens)} tokens used `, `${ANSIColors.GREY_DARK}in ${colorizeNumber(chunkDuration)}ms${ANSIColors.RESET}`].join(""));
80
+ }));
81
+ await Promise.all(tasks);
82
+ const fullContent = translatedParts.join("");
83
+ if (flushStrategy === "end" || flushStrategy === "incremental") {
84
+ mkdirSync(dirname(outputFilePath), { recursive: true });
85
+ writeFileSync(outputFilePath, fullContent);
86
+ }
87
+ const totalDuration = ((performance.now() - fileStartTime) / 1e3).toFixed(2);
88
+ const relativePath = relative(configuration.content.baseDir, outputFilePath);
89
+ appLogger(`${colorize("✔", ANSIColors.GREEN)} File ${formatPath(relativePath)} completed in ${colorizeNumber(totalDuration)}s.`);
90
+ return fullContent;
91
+ } catch (error) {
92
+ errorState.count++;
93
+ const errorMessage = error?.message ?? JSON.stringify(error);
94
+ appLogger(`${colorize("✖", ANSIColors.RED)} Error: ${errorMessage}`);
95
+ if (errorState.count >= errorState.maxErrors) errorState.shouldStop = true;
96
+ return null;
97
+ }
98
+ };
99
+
100
+ //#endregion
101
+ export { translateFile };
102
+ //# sourceMappingURL=translateFile.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"translateFile.mjs","names":["translatedParts: string[]","error: any"],"sources":["../../../src/translateDoc/translateFile.ts"],"sourcesContent":["import { mkdirSync, writeFileSync } from 'node:fs';\nimport { readFile } from 'node:fs/promises';\nimport { dirname, relative } from 'node:path';\nimport { performance } from 'node:perf_hooks';\nimport { readAsset } from 'utils:asset';\nimport { formatLocale, formatPath } from '@intlayer/chokidar';\nimport {\n ANSIColors,\n colon,\n colorize,\n colorizeNumber,\n getAppLogger,\n retryManager,\n} from '@intlayer/config';\nimport { chunkText } from '../utils/calculateChunks';\nimport { chunkInference } from '../utils/chunkInference';\nimport { fixChunkStartEndChars } from '../utils/fixChunkStartEndChars';\nimport type { TranslateFileOptions } from './types';\nimport { sanitizeChunk, validateTranslation } from './validation';\n\nexport const translateFile = async ({\n baseFilePath,\n outputFilePath,\n locale,\n baseLocale,\n configuration,\n errorState,\n aiOptions,\n customInstructions,\n aiClient,\n aiConfig,\n flushStrategy = 'incremental',\n onChunkReceive,\n limit, // The Global Limiter\n}: TranslateFileOptions): Promise<string | null> => {\n if (errorState.shouldStop) return null;\n\n const appLogger = getAppLogger(configuration, { config: { prefix: '' } });\n const fileStartTime = performance.now();\n\n try {\n const fileContent = await readFile(baseFilePath, 'utf-8');\n const chunks = chunkText(fileContent);\n const totalChunks = chunks.length;\n\n const filePrefixText = `${ANSIColors.GREY_DARK}[${formatPath(baseFilePath)}${ANSIColors.GREY_DARK}] `;\n const filePrefix = `${colon(filePrefixText, { colSize: 40 })}${ANSIColors.RESET}`;\n const prefixText = `${ANSIColors.GREY_DARK}[${formatPath(baseFilePath)}${ANSIColors.GREY_DARK}][${formatLocale(locale)}${ANSIColors.GREY_DARK}] `;\n const prefix = `${colon(prefixText, { colSize: 40 })}${ANSIColors.RESET}`;\n\n appLogger(\n `${filePrefix}Split into ${colorizeNumber(totalChunks)} chunks. Queuing...`\n );\n\n const basePrompt = readAsset('./prompts/TRANSLATE_PROMPT.md', 'utf-8')\n .replaceAll('{{localeName}}', `${formatLocale(locale, false)}`)\n .replaceAll('{{baseLocaleName}}', `${formatLocale(baseLocale, false)}`)\n .replace('{{applicationContext}}', aiOptions?.applicationContext ?? '-')\n .replace('{{customInstructions}}', customInstructions ?? '-');\n\n const translatedParts: string[] = new Array(totalChunks).fill('');\n\n // Fallback if no limiter is provided (runs immediately)\n const runTask = limit ?? ((fn) => fn());\n\n // MAP CHUNKS TO GLOBAL TASKS\n // This pushes ALL chunks for this file into the Global Queue immediately.\n // They will execute whenever the global concurrency slots open up.\n const tasks = chunks.map((chunk, i) =>\n runTask(async () => {\n if (errorState.shouldStop) return null;\n\n const chunkLogger = getAppLogger(configuration, {\n config: {\n prefix: `${prefix} ${ANSIColors.GREY_DARK}[${i + 1}/${totalChunks}] ${ANSIColors.RESET}`,\n },\n });\n\n const chunkStartTime = performance.now();\n const isFirstChunk = i === 0;\n const fileToTranslateCurrentChunk = chunk.content;\n\n // Context Preparation\n const getPrevChunkPrompt = () =>\n `>>> CONTEXT: PREVIOUS SOURCE CONTENT <<<\\n\\`\\`\\`\\n` +\n (chunks[i - 1]?.content ?? '') +\n `\\n\\`\\`\\`\\n>>> END PREVIOUS CONTEXT <<<`;\n\n const getBaseChunkContextPrompt = () =>\n `>>> CONTEXT: NEXT CONTENT <<<\\n\\`\\`\\`\\n` +\n (chunks[i + 1]?.content ?? '') +\n `\\n\\`\\`\\`\\n>>> END NEXT CONTEXT <<<`;\n\n chunkLogger('Process started');\n\n const chunkTranslation = retryManager(async () => {\n const result = await chunkInference(\n [\n { role: 'system', content: basePrompt },\n ...(chunks[i + 1]\n ? [\n {\n role: 'system',\n content: getBaseChunkContextPrompt(),\n } as const,\n ]\n : []),\n ...(isFirstChunk\n ? []\n : [{ role: 'system', content: getPrevChunkPrompt() } as const]),\n {\n role: 'system',\n content: [\n `You are translating TARGET CHUNK (${i + 1}/${totalChunks}).`,\n `Translate ONLY the target chunk. Preserve frontmatter/code exactly.`,\n ].join('\\n'),\n },\n {\n role: 'user',\n content: `>>> TARGET CHUNK START <<<\\n${fileToTranslateCurrentChunk}\\n>>> TARGET CHUNK END <<<`,\n },\n ],\n aiOptions,\n configuration,\n aiClient,\n aiConfig\n );\n\n let processedChunk = sanitizeChunk(\n result?.fileContent,\n fileToTranslateCurrentChunk\n );\n processedChunk = fixChunkStartEndChars(\n processedChunk,\n fileToTranslateCurrentChunk\n );\n\n const isValid = validateTranslation(\n fileToTranslateCurrentChunk,\n processedChunk,\n chunkLogger\n );\n\n if (!isValid) {\n // Throwing an error here signals retryManager to try again\n throw new Error(\n `Validation failed for chunk ${i + 1}/${totalChunks}`\n );\n }\n\n return { content: processedChunk, tokens: result.tokenUsed };\n });\n\n const { content: translatedChunk, tokens } = await chunkTranslation();\n const chunkEndTime = performance.now();\n const chunkDuration = (chunkEndTime - chunkStartTime).toFixed(0);\n\n // Store Result\n translatedParts[i] = translatedChunk;\n\n if (onChunkReceive) {\n onChunkReceive(translatedChunk, i, totalChunks);\n }\n\n // Incremental Flush Strategy\n if (flushStrategy === 'incremental') {\n const isContiguous = translatedParts\n .slice(0, i + 1)\n .every((p) => p && p !== '');\n\n if (isContiguous) {\n let endIdx = 0;\n while (\n endIdx < totalChunks &&\n translatedParts[endIdx] &&\n translatedParts[endIdx] !== ''\n ) {\n endIdx++;\n }\n const currentContent = translatedParts.slice(0, endIdx).join('');\n // Write asynchronously/sync is fine here as node handles file locks reasonably well for single process\n mkdirSync(dirname(outputFilePath), { recursive: true });\n writeFileSync(outputFilePath, currentContent);\n }\n }\n\n chunkLogger(\n [\n `${colorizeNumber(tokens)} tokens used `,\n `${ANSIColors.GREY_DARK}in ${colorizeNumber(chunkDuration)}ms${ANSIColors.RESET}`,\n ].join('')\n );\n })\n );\n\n // Wait for all chunks for this specific file/locale to finish\n await Promise.all(tasks);\n\n // Final Flush\n const fullContent = translatedParts.join('');\n if (flushStrategy === 'end' || flushStrategy === 'incremental') {\n mkdirSync(dirname(outputFilePath), { recursive: true });\n writeFileSync(outputFilePath, fullContent);\n }\n\n const fileEndTime = performance.now();\n const totalDuration = ((fileEndTime - fileStartTime) / 1000).toFixed(2);\n const relativePath = relative(\n configuration.content.baseDir,\n outputFilePath\n );\n\n appLogger(\n `${colorize('✔', ANSIColors.GREEN)} File ${formatPath(relativePath)} completed in ${colorizeNumber(totalDuration)}s.`\n );\n\n return fullContent;\n } catch (error: any) {\n errorState.count++;\n const errorMessage = error?.message ?? JSON.stringify(error);\n appLogger(`${colorize('✖', ANSIColors.RED)} Error: ${errorMessage}`);\n if (errorState.count >= errorState.maxErrors) errorState.shouldStop = true;\n return null;\n }\n};\n"],"mappings":";;;;;;;;;;;;;AAoBA,MAAa,gBAAgB,OAAO,EAClC,cACA,gBACA,QACA,YACA,eACA,YACA,WACA,oBACA,UACA,UACA,gBAAgB,eAChB,gBACA,YACkD;AAClD,KAAI,WAAW,WAAY,QAAO;CAElC,MAAM,YAAY,aAAa,eAAe,EAAE,QAAQ,EAAE,QAAQ,IAAI,EAAE,CAAC;CACzE,MAAM,gBAAgB,YAAY,KAAK;AAEvC,KAAI;EAEF,MAAM,SAAS,UADK,MAAM,SAAS,cAAc,QAAQ,CACpB;EACrC,MAAM,cAAc,OAAO;EAG3B,MAAM,aAAa,GAAG,MADC,GAAG,WAAW,UAAU,GAAG,WAAW,aAAa,GAAG,WAAW,UAAU,KACtD,EAAE,SAAS,IAAI,CAAC,GAAG,WAAW;EAE1E,MAAM,SAAS,GAAG,MADC,GAAG,WAAW,UAAU,GAAG,WAAW,aAAa,GAAG,WAAW,UAAU,IAAI,aAAa,OAAO,GAAG,WAAW,UAAU,KAC1G,EAAE,SAAS,IAAI,CAAC,GAAG,WAAW;AAElE,YACE,GAAG,WAAW,aAAa,eAAe,YAAY,CAAC,qBACxD;EAED,MAAM,aAAa,UAAU,iCAAiC,QAAQ,CACnE,WAAW,kBAAkB,GAAG,aAAa,QAAQ,MAAM,GAAG,CAC9D,WAAW,sBAAsB,GAAG,aAAa,YAAY,MAAM,GAAG,CACtE,QAAQ,0BAA0B,WAAW,sBAAsB,IAAI,CACvE,QAAQ,0BAA0B,sBAAsB,IAAI;EAE/D,MAAMA,kBAA4B,IAAI,MAAM,YAAY,CAAC,KAAK,GAAG;EAGjE,MAAM,UAAU,WAAW,OAAO,IAAI;EAKtC,MAAM,QAAQ,OAAO,KAAK,OAAO,MAC/B,QAAQ,YAAY;AAClB,OAAI,WAAW,WAAY,QAAO;GAElC,MAAM,cAAc,aAAa,eAAe,EAC9C,QAAQ,EACN,QAAQ,GAAG,OAAO,IAAI,WAAW,UAAU,GAAG,IAAI,EAAE,GAAG,YAAY,IAAI,WAAW,SACnF,EACF,CAAC;GAEF,MAAM,iBAAiB,YAAY,KAAK;GACxC,MAAM,eAAe,MAAM;GAC3B,MAAM,8BAA8B,MAAM;GAG1C,MAAM,2BACJ,wDACC,OAAO,IAAI,IAAI,WAAW,MAC3B;GAEF,MAAM,kCACJ,6CACC,OAAO,IAAI,IAAI,WAAW,MAC3B;AAEF,eAAY,kBAAkB;GA4D9B,MAAM,EAAE,SAAS,iBAAiB,WAAW,MA1DpB,aAAa,YAAY;IAChD,MAAM,SAAS,MAAM,eACnB;KACE;MAAE,MAAM;MAAU,SAAS;MAAY;KACvC,GAAI,OAAO,IAAI,KACX,CACE;MACE,MAAM;MACN,SAAS,2BAA2B;MACrC,CACF,GACD,EAAE;KACN,GAAI,eACA,EAAE,GACF,CAAC;MAAE,MAAM;MAAU,SAAS,oBAAoB;MAAE,CAAU;KAChE;MACE,MAAM;MACN,SAAS,CACP,qCAAqC,IAAI,EAAE,GAAG,YAAY,KAC1D,sEACD,CAAC,KAAK,KAAK;MACb;KACD;MACE,MAAM;MACN,SAAS,+BAA+B,4BAA4B;MACrE;KACF,EACD,WACA,eACA,UACA,SACD;IAED,IAAI,iBAAiB,cACnB,QAAQ,aACR,4BACD;AACD,qBAAiB,sBACf,gBACA,4BACD;AAQD,QAAI,CANY,oBACd,6BACA,gBACA,YACD,CAIC,OAAM,IAAI,MACR,+BAA+B,IAAI,EAAE,GAAG,cACzC;AAGH,WAAO;KAAE,SAAS;KAAgB,QAAQ,OAAO;KAAW;KAC5D,EAEmE;GAErE,MAAM,iBADe,YAAY,KAAK,GACA,gBAAgB,QAAQ,EAAE;AAGhE,mBAAgB,KAAK;AAErB,OAAI,eACF,gBAAe,iBAAiB,GAAG,YAAY;AAIjD,OAAI,kBAAkB,eAKpB;QAJqB,gBAClB,MAAM,GAAG,IAAI,EAAE,CACf,OAAO,MAAM,KAAK,MAAM,GAAG,EAEZ;KAChB,IAAI,SAAS;AACb,YACE,SAAS,eACT,gBAAgB,WAChB,gBAAgB,YAAY,GAE5B;KAEF,MAAM,iBAAiB,gBAAgB,MAAM,GAAG,OAAO,CAAC,KAAK,GAAG;AAEhE,eAAU,QAAQ,eAAe,EAAE,EAAE,WAAW,MAAM,CAAC;AACvD,mBAAc,gBAAgB,eAAe;;;AAIjD,eACE,CACE,GAAG,eAAe,OAAO,CAAC,gBAC1B,GAAG,WAAW,UAAU,KAAK,eAAe,cAAc,CAAC,IAAI,WAAW,QAC3E,CAAC,KAAK,GAAG,CACX;IACD,CACH;AAGD,QAAM,QAAQ,IAAI,MAAM;EAGxB,MAAM,cAAc,gBAAgB,KAAK,GAAG;AAC5C,MAAI,kBAAkB,SAAS,kBAAkB,eAAe;AAC9D,aAAU,QAAQ,eAAe,EAAE,EAAE,WAAW,MAAM,CAAC;AACvD,iBAAc,gBAAgB,YAAY;;EAI5C,MAAM,kBADc,YAAY,KAAK,GACC,iBAAiB,KAAM,QAAQ,EAAE;EACvE,MAAM,eAAe,SACnB,cAAc,QAAQ,SACtB,eACD;AAED,YACE,GAAG,SAAS,KAAK,WAAW,MAAM,CAAC,QAAQ,WAAW,aAAa,CAAC,gBAAgB,eAAe,cAAc,CAAC,IACnH;AAED,SAAO;UACAC,OAAY;AACnB,aAAW;EACX,MAAM,eAAe,OAAO,WAAW,KAAK,UAAU,MAAM;AAC5D,YAAU,GAAG,SAAS,KAAK,WAAW,IAAI,CAAC,UAAU,eAAe;AACpE,MAAI,WAAW,SAAS,WAAW,UAAW,YAAW,aAAa;AACtE,SAAO"}
File without changes