@intlayer/cli 7.5.10 → 7.5.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. package/dist/cjs/ci.cjs +73 -0
  2. package/dist/cjs/ci.cjs.map +1 -0
  3. package/dist/cjs/cli.cjs +41 -6
  4. package/dist/cjs/cli.cjs.map +1 -1
  5. package/dist/cjs/editor.cjs +1 -1
  6. package/dist/cjs/index.cjs +4 -5
  7. package/dist/cjs/listContentDeclaration.cjs +6 -2
  8. package/dist/cjs/listContentDeclaration.cjs.map +1 -1
  9. package/dist/cjs/listProjects.cjs +28 -0
  10. package/dist/cjs/listProjects.cjs.map +1 -0
  11. package/dist/cjs/{reviewDoc.cjs → reviewDoc/reviewDoc.cjs} +17 -15
  12. package/dist/cjs/reviewDoc/reviewDoc.cjs.map +1 -0
  13. package/dist/cjs/{reviewDocBlockAware.cjs → reviewDoc/reviewDocBlockAware.cjs} +12 -8
  14. package/dist/cjs/reviewDoc/reviewDocBlockAware.cjs.map +1 -0
  15. package/dist/cjs/translateDoc/index.cjs +8 -0
  16. package/dist/cjs/translateDoc/translateDoc.cjs +74 -0
  17. package/dist/cjs/translateDoc/translateDoc.cjs.map +1 -0
  18. package/dist/cjs/translateDoc/translateFile.cjs +103 -0
  19. package/dist/cjs/translateDoc/translateFile.cjs.map +1 -0
  20. package/dist/cjs/translateDoc/types.cjs +0 -0
  21. package/dist/cjs/translateDoc/validation.cjs +49 -0
  22. package/dist/cjs/translateDoc/validation.cjs.map +1 -0
  23. package/dist/cjs/translation-alignment/planActions.cjs +2 -4
  24. package/dist/cjs/translation-alignment/planActions.cjs.map +1 -1
  25. package/dist/cjs/translation-alignment/segmentDocument.cjs +35 -101
  26. package/dist/cjs/translation-alignment/segmentDocument.cjs.map +1 -1
  27. package/dist/cjs/utils/checkAccess.cjs +2 -1
  28. package/dist/cjs/utils/checkAccess.cjs.map +1 -1
  29. package/dist/cjs/utils/setupAI.cjs +20 -11
  30. package/dist/cjs/utils/setupAI.cjs.map +1 -1
  31. package/dist/esm/auth/login.mjs +16 -16
  32. package/dist/esm/auth/login.mjs.map +1 -1
  33. package/dist/esm/ci.mjs +72 -0
  34. package/dist/esm/ci.mjs.map +1 -0
  35. package/dist/esm/cli.mjs +39 -4
  36. package/dist/esm/cli.mjs.map +1 -1
  37. package/dist/esm/editor.mjs +1 -1
  38. package/dist/esm/index.mjs +3 -3
  39. package/dist/esm/listContentDeclaration.mjs +6 -2
  40. package/dist/esm/listContentDeclaration.mjs.map +1 -1
  41. package/dist/esm/listProjects.mjs +27 -0
  42. package/dist/esm/listProjects.mjs.map +1 -0
  43. package/dist/esm/pull.mjs +6 -6
  44. package/dist/esm/pull.mjs.map +1 -1
  45. package/dist/esm/push/push.mjs +7 -7
  46. package/dist/esm/push/push.mjs.map +1 -1
  47. package/dist/esm/{reviewDoc.mjs → reviewDoc/reviewDoc.mjs} +14 -12
  48. package/dist/esm/reviewDoc/reviewDoc.mjs.map +1 -0
  49. package/dist/esm/{reviewDocBlockAware.mjs → reviewDoc/reviewDocBlockAware.mjs} +11 -7
  50. package/dist/esm/reviewDoc/reviewDocBlockAware.mjs.map +1 -0
  51. package/dist/esm/translateDoc/index.mjs +5 -0
  52. package/dist/esm/translateDoc/translateDoc.mjs +72 -0
  53. package/dist/esm/translateDoc/translateDoc.mjs.map +1 -0
  54. package/dist/esm/translateDoc/translateFile.mjs +102 -0
  55. package/dist/esm/translateDoc/translateFile.mjs.map +1 -0
  56. package/dist/esm/translateDoc/types.mjs +0 -0
  57. package/dist/esm/translateDoc/validation.mjs +47 -0
  58. package/dist/esm/translateDoc/validation.mjs.map +1 -0
  59. package/dist/esm/translation-alignment/planActions.mjs +2 -4
  60. package/dist/esm/translation-alignment/planActions.mjs.map +1 -1
  61. package/dist/esm/translation-alignment/segmentDocument.mjs +35 -101
  62. package/dist/esm/translation-alignment/segmentDocument.mjs.map +1 -1
  63. package/dist/esm/utils/checkAccess.mjs +2 -1
  64. package/dist/esm/utils/checkAccess.mjs.map +1 -1
  65. package/dist/esm/utils/setupAI.mjs +20 -11
  66. package/dist/esm/utils/setupAI.mjs.map +1 -1
  67. package/dist/types/ci.d.ts +5 -0
  68. package/dist/types/ci.d.ts.map +1 -0
  69. package/dist/types/cli.d.ts.map +1 -1
  70. package/dist/types/index.d.ts +3 -3
  71. package/dist/types/listContentDeclaration.d.ts +2 -0
  72. package/dist/types/listContentDeclaration.d.ts.map +1 -1
  73. package/dist/types/listProjects.d.ts +11 -0
  74. package/dist/types/listProjects.d.ts.map +1 -0
  75. package/dist/types/pull.d.ts.map +1 -1
  76. package/dist/types/pushConfig.d.ts.map +1 -1
  77. package/dist/types/{reviewDoc.d.ts → reviewDoc/reviewDoc.d.ts} +1 -1
  78. package/dist/types/reviewDoc/reviewDoc.d.ts.map +1 -0
  79. package/dist/types/{reviewDocBlockAware.d.ts → reviewDoc/reviewDocBlockAware.d.ts} +2 -2
  80. package/dist/types/reviewDoc/reviewDocBlockAware.d.ts.map +1 -0
  81. package/dist/types/translateDoc/index.d.ts +5 -0
  82. package/dist/types/translateDoc/translateDoc.d.ts +21 -0
  83. package/dist/types/translateDoc/translateDoc.d.ts.map +1 -0
  84. package/dist/types/translateDoc/translateFile.d.ts +21 -0
  85. package/dist/types/translateDoc/translateFile.d.ts.map +1 -0
  86. package/dist/types/translateDoc/types.d.ts +47 -0
  87. package/dist/types/translateDoc/types.d.ts.map +1 -0
  88. package/dist/types/translateDoc/validation.d.ts +16 -0
  89. package/dist/types/translateDoc/validation.d.ts.map +1 -0
  90. package/dist/types/translation-alignment/planActions.d.ts +2 -2
  91. package/dist/types/translation-alignment/planActions.d.ts.map +1 -1
  92. package/dist/types/translation-alignment/rebuildDocument.d.ts.map +1 -1
  93. package/dist/types/translation-alignment/segmentDocument.d.ts.map +1 -1
  94. package/dist/types/utils/setupAI.d.ts.map +1 -1
  95. package/package.json +11 -10
  96. package/dist/cjs/reviewDoc.cjs.map +0 -1
  97. package/dist/cjs/reviewDocBlockAware.cjs.map +0 -1
  98. package/dist/cjs/translateDoc.cjs +0 -132
  99. package/dist/cjs/translateDoc.cjs.map +0 -1
  100. package/dist/esm/reviewDoc.mjs.map +0 -1
  101. package/dist/esm/reviewDocBlockAware.mjs.map +0 -1
  102. package/dist/esm/translateDoc.mjs +0 -129
  103. package/dist/esm/translateDoc.mjs.map +0 -1
  104. package/dist/types/reviewDoc.d.ts.map +0 -1
  105. package/dist/types/reviewDocBlockAware.d.ts.map +0 -1
  106. package/dist/types/translateDoc.d.ts +0 -47
  107. package/dist/types/translateDoc.d.ts.map +0 -1
@@ -0,0 +1 @@
1
+ {"version":3,"file":"reviewDocBlockAware.cjs","names":["readAsset","ANSIColors","buildAlignmentPlan","mergeReviewedSegments","chunkInference","Locales","sanitizeChunk","fixChunkStartEndChars","validateTranslation"],"sources":["../../../src/reviewDoc/reviewDocBlockAware.ts"],"sourcesContent":["import { mkdirSync, writeFileSync } from 'node:fs';\nimport { readFile } from 'node:fs/promises';\nimport { dirname } from 'node:path';\nimport { readAsset } from 'utils:asset';\nimport type { AIConfig } from '@intlayer/ai';\nimport type { AIOptions } from '@intlayer/api';\nimport { formatLocale, formatPath } from '@intlayer/chokidar';\nimport {\n ANSIColors,\n colon,\n colorize,\n colorizeNumber,\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n retryManager,\n} from '@intlayer/config';\nimport { getLocaleName } from '@intlayer/core';\nimport { type Locale, Locales } from '@intlayer/types';\nimport { sanitizeChunk, validateTranslation } from '../translateDoc/validation';\nimport {\n buildAlignmentPlan,\n mergeReviewedSegments,\n} from '../translation-alignment/pipeline';\nimport { chunkInference } from '../utils/chunkInference';\nimport { fixChunkStartEndChars } from '../utils/fixChunkStartEndChars';\nimport type { AIClient } from '../utils/setupAI';\n\n/**\n * Review a file using block-aware alignment.\n * This approach:\n * 1. Segments both English and French documents into semantic blocks\n * 2. Aligns blocks using structure (special chars, numbers) and context\n * 3. Detects which blocks changed, were added, or deleted\n * 4. Only sends changed/new blocks to AI for translation\n * 5. Handles reordering automatically\n */\nexport const reviewFileBlockAware = async (\n baseFilePath: string,\n outputFilePath: string,\n locale: Locale,\n baseLocale: Locale,\n aiOptions?: AIOptions,\n configOptions?: GetConfigurationOptions,\n customInstructions?: string,\n changedLines?: number[],\n aiClient?: AIClient,\n aiConfig?: AIConfig\n) => {\n const configuration = getConfiguration(configOptions);\n const applicationLogger = getAppLogger(configuration);\n\n const englishText = await readFile(baseFilePath, 'utf-8');\n const frenchText = await readFile(outputFilePath, 'utf-8').catch(() => '');\n\n const basePrompt = readAsset('./prompts/REVIEW_PROMPT.md', 'utf-8')\n .replaceAll('{{localeName}}', `${formatLocale(locale, false)}`)\n .replaceAll('{{baseLocaleName}}', `${formatLocale(baseLocale, false)}`)\n .replace('{{applicationContext}}', aiOptions?.applicationContext ?? '-')\n .replace('{{customInstructions}}', customInstructions ?? '-');\n\n const filePrefixText = `${ANSIColors.GREY_DARK}[${formatPath(baseFilePath)}${ANSIColors.GREY_DARK}] `;\n const filePrefix = [\n colon(filePrefixText, { colSize: 40 }),\n `→ ${ANSIColors.RESET}`,\n ].join('');\n const prefixText = `${ANSIColors.GREY_DARK}[${formatPath(baseFilePath)}${ANSIColors.GREY_DARK}][${formatLocale(locale)}${ANSIColors.GREY_DARK}] `;\n const prefix = [\n colon(prefixText, { colSize: 40 }),\n `→ ${ANSIColors.RESET}`,\n ].join('');\n\n // Build block-aware alignment and plan\n const { englishBlocks, frenchBlocks, plan, segmentsToReview } =\n buildAlignmentPlan({\n englishText,\n frenchText,\n changedLines,\n });\n\n applicationLogger(\n `${filePrefix}Block-aware alignment complete. Total blocks: EN=${colorizeNumber(englishBlocks.length)}, FR=${colorizeNumber(frenchBlocks.length)}`\n );\n applicationLogger(\n `${filePrefix}Actions: reuse=${colorizeNumber(plan.actions.filter((a) => a.kind === 'reuse').length)}, review=${colorizeNumber(plan.actions.filter((a) => a.kind === 'review').length)}, new=${colorizeNumber(plan.actions.filter((a) => a.kind === 'insert_new').length)}, delete=${colorizeNumber(plan.actions.filter((a) => a.kind === 'delete').length)}`\n );\n\n if (segmentsToReview.length === 0) {\n applicationLogger(\n `${filePrefix}No segments need review, reusing existing translation`\n );\n mkdirSync(dirname(outputFilePath), { recursive: true });\n writeFileSync(\n outputFilePath,\n mergeReviewedSegments(plan, frenchBlocks, new Map())\n );\n applicationLogger(\n `${colorize('✔', ANSIColors.GREEN)} File ${formatPath(outputFilePath)} updated successfully (no changes needed).`\n );\n return;\n }\n\n applicationLogger(\n `${filePrefix}Segments to review: ${colorizeNumber(segmentsToReview.length)}`\n );\n\n // Review segments that need AI translation\n const reviewedSegmentsMap = new Map<number, string>();\n\n for (const segment of segmentsToReview) {\n const segmentNumber = segmentsToReview.indexOf(segment) + 1;\n const englishBlock = segment.englishBlock;\n\n const getBaseChunkContextPrompt = () =>\n `**BLOCK ${segmentNumber} of ${segmentsToReview.length}** is the base block in ${formatLocale(baseLocale, false)} as reference.\\n` +\n `///chunksStart///\\n` +\n englishBlock.content +\n `///chunksEnd///`;\n\n const getFrenchChunkPrompt = () =>\n `**BLOCK ${segmentNumber} of ${segmentsToReview.length}** is the current block to review in ${formatLocale(locale, false)}.\\n` +\n `///chunksStart///\\n` +\n (segment.frenchBlockText ?? '') +\n `///chunksEnd///`;\n\n const reviewedChunkResult = await retryManager(async () => {\n const result = await chunkInference(\n [\n { role: 'system', content: basePrompt },\n { role: 'system', content: getBaseChunkContextPrompt() },\n { role: 'system', content: getFrenchChunkPrompt() },\n {\n role: 'system',\n content: `The next user message will be the **BLOCK ${colorizeNumber(segmentNumber)} of ${colorizeNumber(segmentsToReview.length)}** that should be translated in ${getLocaleName(locale, Locales.ENGLISH)} (${locale}).`,\n },\n { role: 'user', content: englishBlock.content },\n ],\n aiOptions,\n configuration,\n aiClient,\n aiConfig\n );\n\n applicationLogger(\n `${prefix}${colorizeNumber(result.tokenUsed)} tokens used - Block ${colorizeNumber(segmentNumber)} of ${colorizeNumber(segmentsToReview.length)}`\n );\n\n // Sanitize artifacts (e.g. Markdown code block wrappers)\n let processedChunk = sanitizeChunk(\n result?.fileContent,\n englishBlock.content\n );\n\n // Fix start/end characters\n processedChunk = fixChunkStartEndChars(\n processedChunk,\n englishBlock.content\n );\n\n // Validate Translation (YAML, Code fences, Length ratio)\n const isValid = validateTranslation(\n englishBlock.content,\n processedChunk,\n applicationLogger\n );\n\n if (!isValid) {\n throw new Error(\n 'Validation failed for chunk (structure or length mismatch). Retrying...'\n );\n }\n\n return processedChunk;\n })();\n\n reviewedSegmentsMap.set(segment.actionIndex, reviewedChunkResult);\n }\n\n // Merge reviewed segments back into final document\n const finalFrenchOutput = mergeReviewedSegments(\n plan,\n frenchBlocks,\n reviewedSegmentsMap\n );\n\n mkdirSync(dirname(outputFilePath), { recursive: true });\n writeFileSync(outputFilePath, finalFrenchOutput);\n\n applicationLogger(\n `${colorize('✔', ANSIColors.GREEN)} File ${formatPath(outputFilePath)} created/updated successfully.`\n );\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAqCA,MAAa,uBAAuB,OAClC,cACA,gBACA,QACA,YACA,WACA,eACA,oBACA,cACA,UACA,aACG;CACH,MAAM,uDAAiC,cAAc;CACrD,MAAM,uDAAiC,cAAc;CAErD,MAAM,cAAc,qCAAe,cAAc,QAAQ;CACzD,MAAM,aAAa,qCAAe,gBAAgB,QAAQ,CAAC,YAAY,GAAG;CAE1E,MAAM,aAAaA,+BAAU,8BAA8B,QAAQ,CAChE,WAAW,kBAAkB,wCAAgB,QAAQ,MAAM,GAAG,CAC9D,WAAW,sBAAsB,wCAAgB,YAAY,MAAM,GAAG,CACtE,QAAQ,0BAA0B,WAAW,sBAAsB,IAAI,CACvE,QAAQ,0BAA0B,sBAAsB,IAAI;CAG/D,MAAM,aAAa,6BADI,GAAGC,4BAAW,UAAU,sCAAc,aAAa,GAAGA,4BAAW,UAAU,KAE1E,EAAE,SAAS,IAAI,CAAC,EACtC,KAAKA,4BAAW,QACjB,CAAC,KAAK,GAAG;CAEV,MAAM,SAAS,6BADI,GAAGA,4BAAW,UAAU,sCAAc,aAAa,GAAGA,4BAAW,UAAU,yCAAiB,OAAO,GAAGA,4BAAW,UAAU,KAE1H,EAAE,SAAS,IAAI,CAAC,EAClC,KAAKA,4BAAW,QACjB,CAAC,KAAK,GAAG;CAGV,MAAM,EAAE,eAAe,cAAc,MAAM,qBACzCC,0DAAmB;EACjB;EACA;EACA;EACD,CAAC;AAEJ,mBACE,GAAG,WAAW,wFAAkE,cAAc,OAAO,CAAC,4CAAsB,aAAa,OAAO,GACjJ;AACD,mBACE,GAAG,WAAW,sDAAgC,KAAK,QAAQ,QAAQ,MAAM,EAAE,SAAS,QAAQ,CAAC,OAAO,CAAC,gDAA0B,KAAK,QAAQ,QAAQ,MAAM,EAAE,SAAS,SAAS,CAAC,OAAO,CAAC,6CAAuB,KAAK,QAAQ,QAAQ,MAAM,EAAE,SAAS,aAAa,CAAC,OAAO,CAAC,gDAA0B,KAAK,QAAQ,QAAQ,MAAM,EAAE,SAAS,SAAS,CAAC,OAAO,GAC5V;AAED,KAAI,iBAAiB,WAAW,GAAG;AACjC,oBACE,GAAG,WAAW,uDACf;AACD,gDAAkB,eAAe,EAAE,EAAE,WAAW,MAAM,CAAC;AACvD,6BACE,gBACAC,oEAAsB,MAAM,8BAAc,IAAI,KAAK,CAAC,CACrD;AACD,oBACE,kCAAY,KAAKF,4BAAW,MAAM,CAAC,2CAAmB,eAAe,CAAC,4CACvE;AACD;;AAGF,mBACE,GAAG,WAAW,2DAAqC,iBAAiB,OAAO,GAC5E;CAGD,MAAM,sCAAsB,IAAI,KAAqB;AAErD,MAAK,MAAM,WAAW,kBAAkB;EACtC,MAAM,gBAAgB,iBAAiB,QAAQ,QAAQ,GAAG;EAC1D,MAAM,eAAe,QAAQ;EAE7B,MAAM,kCACJ,WAAW,cAAc,MAAM,iBAAiB,OAAO,+DAAuC,YAAY,MAAM,CAAC,uCAEjH,aAAa,UACb;EAEF,MAAM,6BACJ,WAAW,cAAc,MAAM,iBAAiB,OAAO,4EAAoD,QAAQ,MAAM,CAAC,2BAEzH,QAAQ,mBAAmB,MAC5B;EAEF,MAAM,sBAAsB,yCAAmB,YAAY;GACzD,MAAM,SAAS,MAAMG,4CACnB;IACE;KAAE,MAAM;KAAU,SAAS;KAAY;IACvC;KAAE,MAAM;KAAU,SAAS,2BAA2B;KAAE;IACxD;KAAE,MAAM;KAAU,SAAS,sBAAsB;KAAE;IACnD;KACE,MAAM;KACN,SAAS,kFAA4D,cAAc,CAAC,2CAAqB,iBAAiB,OAAO,CAAC,oEAAgD,QAAQC,wBAAQ,QAAQ,CAAC,IAAI,OAAO;KACvN;IACD;KAAE,MAAM;KAAQ,SAAS,aAAa;KAAS;IAChD,EACD,WACA,eACA,UACA,SACD;AAED,qBACE,GAAG,8CAAwB,OAAO,UAAU,CAAC,4DAAsC,cAAc,CAAC,2CAAqB,iBAAiB,OAAO,GAChJ;GAGD,IAAI,iBAAiBC,8CACnB,QAAQ,aACR,aAAa,QACd;AAGD,oBAAiBC,0DACf,gBACA,aAAa,QACd;AASD,OAAI,CANYC,oDACd,aAAa,SACb,gBACA,kBACD,CAGC,OAAM,IAAI,MACR,0EACD;AAGH,UAAO;IACP,EAAE;AAEJ,sBAAoB,IAAI,QAAQ,aAAa,oBAAoB;;CAInE,MAAM,oBAAoBL,oEACxB,MACA,cACA,oBACD;AAED,+CAAkB,eAAe,EAAE,EAAE,WAAW,MAAM,CAAC;AACvD,4BAAc,gBAAgB,kBAAkB;AAEhD,mBACE,kCAAY,KAAKF,4BAAW,MAAM,CAAC,2CAAmB,eAAe,CAAC,gCACvE"}
@@ -0,0 +1,8 @@
1
+ const require_translateDoc_validation = require('./validation.cjs');
2
+ const require_translateDoc_translateFile = require('./translateFile.cjs');
3
+ const require_translateDoc_translateDoc = require('./translateDoc.cjs');
4
+
5
+ exports.sanitizeChunk = require_translateDoc_validation.sanitizeChunk;
6
+ exports.translateDoc = require_translateDoc_translateDoc.translateDoc;
7
+ exports.translateFile = require_translateDoc_translateFile.translateFile;
8
+ exports.validateTranslation = require_translateDoc_validation.validateTranslation;
@@ -0,0 +1,74 @@
1
+ const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
2
+ const require_utils_setupAI = require('../utils/setupAI.cjs');
3
+ const require_utils_checkFileModifiedRange = require('../utils/checkFileModifiedRange.cjs');
4
+ const require_utils_getOutputFilePath = require('../utils/getOutputFilePath.cjs');
5
+ const require_translateDoc_translateFile = require('./translateFile.cjs');
6
+ let _intlayer_chokidar = require("@intlayer/chokidar");
7
+ let _intlayer_config = require("@intlayer/config");
8
+ let node_path = require("node:path");
9
+ let node_fs = require("node:fs");
10
+ let fast_glob = require("fast-glob");
11
+ fast_glob = require_rolldown_runtime.__toESM(fast_glob);
12
+ let node_perf_hooks = require("node:perf_hooks");
13
+
14
+ //#region src/translateDoc/translateDoc.ts
15
+ const translateDoc = async ({ docPattern, locales, excludedGlobPattern, baseLocale, aiOptions, nbSimultaneousFileProcessed = 20, configOptions, customInstructions, skipIfModifiedBefore, skipIfModifiedAfter, skipIfExists, gitOptions, flushStrategy = "incremental" }) => {
16
+ const configuration = (0, _intlayer_config.getConfiguration)(configOptions);
17
+ const appLogger = (0, _intlayer_config.getAppLogger)(configuration);
18
+ const maxConcurrentChunks = nbSimultaneousFileProcessed;
19
+ const globalChunkLimiter = (0, _intlayer_chokidar.pLimit)(maxConcurrentChunks);
20
+ let docList = await (0, fast_glob.default)(docPattern, { ignore: excludedGlobPattern });
21
+ const aiResult = await require_utils_setupAI.setupAI(configuration, aiOptions);
22
+ if (!aiResult?.hasAIAccess) return;
23
+ const { aiClient, aiConfig } = aiResult;
24
+ if (gitOptions) {
25
+ const gitChangedFiles = await (0, _intlayer_chokidar.listGitFiles)(gitOptions);
26
+ if (gitChangedFiles) docList = docList.filter((path) => gitChangedFiles.some((gitFile) => (0, node_path.join)(process.cwd(), path) === gitFile));
27
+ }
28
+ const batchStartTime = node_perf_hooks.performance.now();
29
+ appLogger(`Translating ${(0, _intlayer_config.colorizeNumber)(docList.length)} files to ${(0, _intlayer_config.colorizeNumber)(locales.length)} locales. \nGlobal Concurrency: ${(0, _intlayer_config.colorizeNumber)(maxConcurrentChunks)} chunks in parallel.`);
30
+ const errorState = {
31
+ count: 0,
32
+ maxErrors: 5,
33
+ shouldStop: false
34
+ };
35
+ await (0, _intlayer_chokidar.parallelize)(docList.flatMap((docPath) => locales.map((locale) => async () => {
36
+ if (errorState.shouldStop) return;
37
+ const absoluteBaseFilePath = (0, node_path.join)(configuration.content.baseDir, docPath);
38
+ const outputFilePath = require_utils_getOutputFilePath.getOutputFilePath(absoluteBaseFilePath, locale, baseLocale);
39
+ if (skipIfExists && (0, node_fs.existsSync)(outputFilePath)) return;
40
+ if (flushStrategy === "incremental" && !(0, node_fs.existsSync)(outputFilePath)) {
41
+ (0, node_fs.mkdirSync)((0, node_path.dirname)(outputFilePath), { recursive: true });
42
+ (0, node_fs.writeFileSync)(outputFilePath, "");
43
+ }
44
+ const fileModificationData = require_utils_checkFileModifiedRange.checkFileModifiedRange(outputFilePath, {
45
+ skipIfModifiedBefore,
46
+ skipIfModifiedAfter
47
+ });
48
+ if (fileModificationData.isSkipped) {
49
+ appLogger(fileModificationData.message);
50
+ return;
51
+ }
52
+ await require_translateDoc_translateFile.translateFile({
53
+ baseFilePath: absoluteBaseFilePath,
54
+ outputFilePath,
55
+ locale,
56
+ baseLocale,
57
+ configuration,
58
+ errorState,
59
+ aiOptions,
60
+ customInstructions,
61
+ aiClient,
62
+ aiConfig,
63
+ flushStrategy,
64
+ limit: globalChunkLimiter
65
+ });
66
+ })), (task) => task(), 50);
67
+ const batchDuration = ((node_perf_hooks.performance.now() - batchStartTime) / 1e3).toFixed(2);
68
+ if (errorState.count > 0) appLogger(`Finished with ${errorState.count} errors in ${batchDuration}s.`);
69
+ else appLogger(`${(0, _intlayer_config.colorize)("✔", _intlayer_config.ANSIColors.GREEN)} Batch completed successfully in ${(0, _intlayer_config.colorizeNumber)(batchDuration)}s.`);
70
+ };
71
+
72
+ //#endregion
73
+ exports.translateDoc = translateDoc;
74
+ //# sourceMappingURL=translateDoc.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"translateDoc.cjs","names":["docList: string[]","setupAI","performance","errorState: ErrorState","getOutputFilePath","checkFileModifiedRange","translateFile","ANSIColors"],"sources":["../../../src/translateDoc/translateDoc.ts"],"sourcesContent":["import { existsSync, mkdirSync, writeFileSync } from 'node:fs';\nimport { dirname, join } from 'node:path';\nimport { performance } from 'node:perf_hooks';\nimport { listGitFiles, parallelize, pLimit } from '@intlayer/chokidar';\nimport {\n ANSIColors,\n colorize,\n colorizeNumber,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport type { Locale } from '@intlayer/types';\nimport fg from 'fast-glob';\nimport { checkFileModifiedRange } from '../utils/checkFileModifiedRange';\nimport { getOutputFilePath } from '../utils/getOutputFilePath';\nimport { setupAI } from '../utils/setupAI';\nimport { translateFile } from './translateFile';\nimport type { ErrorState, TranslateDocOptions } from './types';\n\nexport const translateDoc = async ({\n docPattern,\n locales,\n excludedGlobPattern,\n baseLocale,\n aiOptions,\n nbSimultaneousFileProcessed = 20, // Default to a higher concurrency for chunks\n configOptions,\n customInstructions,\n skipIfModifiedBefore,\n skipIfModifiedAfter,\n skipIfExists,\n gitOptions,\n flushStrategy = 'incremental',\n}: TranslateDocOptions) => {\n const configuration = getConfiguration(configOptions);\n const appLogger = getAppLogger(configuration);\n\n // 1. GLOBAL QUEUE SETUP\n // We use pLimit to create a single bottleneck for AI requests.\n // This queue is shared across all files and locales.\n const maxConcurrentChunks = nbSimultaneousFileProcessed;\n const globalChunkLimiter = pLimit(maxConcurrentChunks);\n\n let docList: string[] = await fg(docPattern, {\n ignore: excludedGlobPattern,\n });\n\n const aiResult = await setupAI(configuration, aiOptions);\n if (!aiResult?.hasAIAccess) return;\n const { aiClient, aiConfig } = aiResult;\n\n if (gitOptions) {\n const gitChangedFiles = await listGitFiles(gitOptions);\n if (gitChangedFiles) {\n docList = docList.filter((path) =>\n gitChangedFiles.some((gitFile) => join(process.cwd(), path) === gitFile)\n );\n }\n }\n\n const batchStartTime = performance.now();\n\n appLogger(\n `Translating ${colorizeNumber(docList.length)} files to ${colorizeNumber(locales.length)} locales. \\n` +\n `Global Concurrency: ${colorizeNumber(maxConcurrentChunks)} chunks in parallel.`\n );\n\n const errorState: ErrorState = {\n count: 0,\n maxErrors: 5,\n shouldStop: false,\n };\n\n // 2. FLATTENED TASK LIST\n // We create a task for every File x Locale combination.\n const allTasks = docList.flatMap((docPath) =>\n locales.map((locale) => async () => {\n if (errorState.shouldStop) return;\n\n const absoluteBaseFilePath = join(configuration.content.baseDir, docPath);\n const outputFilePath = getOutputFilePath(\n absoluteBaseFilePath,\n locale,\n baseLocale\n );\n\n // Skip logic\n if (skipIfExists && existsSync(outputFilePath)) return;\n\n if (flushStrategy === 'incremental' && !existsSync(outputFilePath)) {\n mkdirSync(dirname(outputFilePath), { recursive: true });\n writeFileSync(outputFilePath, '');\n }\n\n const fileModificationData = checkFileModifiedRange(outputFilePath, {\n skipIfModifiedBefore,\n skipIfModifiedAfter,\n });\n\n if (fileModificationData.isSkipped) {\n appLogger(fileModificationData.message);\n return;\n }\n\n // Execute translation using the SHARED limiter\n await translateFile({\n baseFilePath: absoluteBaseFilePath,\n outputFilePath,\n locale: locale as Locale,\n baseLocale,\n configuration,\n errorState,\n aiOptions,\n customInstructions,\n aiClient,\n aiConfig,\n flushStrategy,\n limit: globalChunkLimiter, // Pass the global queue\n });\n })\n );\n\n // 3. HIGH-THROUGHPUT FILE OPENER\n // We open many files simultaneously (e.g., 50) to ensure the global chunk queue\n // is always saturated with work.\n // If we open too few files, the chunk queue might drain faster than we can read new files.\n const FILE_OPEN_LIMIT = 50;\n\n await parallelize(allTasks, (task) => task(), FILE_OPEN_LIMIT);\n\n const batchEndTime = performance.now();\n const batchDuration = ((batchEndTime - batchStartTime) / 1000).toFixed(2);\n\n if (errorState.count > 0) {\n appLogger(`Finished with ${errorState.count} errors in ${batchDuration}s.`);\n } else {\n appLogger(\n `${colorize('✔', ANSIColors.GREEN)} Batch completed successfully in ${colorizeNumber(batchDuration)}s.`\n );\n }\n};\n"],"mappings":";;;;;;;;;;;;;;AAmBA,MAAa,eAAe,OAAO,EACjC,YACA,SACA,qBACA,YACA,WACA,8BAA8B,IAC9B,eACA,oBACA,sBACA,qBACA,cACA,YACA,gBAAgB,oBACS;CACzB,MAAM,uDAAiC,cAAc;CACrD,MAAM,+CAAyB,cAAc;CAK7C,MAAM,sBAAsB;CAC5B,MAAM,oDAA4B,oBAAoB;CAEtD,IAAIA,UAAoB,6BAAS,YAAY,EAC3C,QAAQ,qBACT,CAAC;CAEF,MAAM,WAAW,MAAMC,8BAAQ,eAAe,UAAU;AACxD,KAAI,CAAC,UAAU,YAAa;CAC5B,MAAM,EAAE,UAAU,aAAa;AAE/B,KAAI,YAAY;EACd,MAAM,kBAAkB,2CAAmB,WAAW;AACtD,MAAI,gBACF,WAAU,QAAQ,QAAQ,SACxB,gBAAgB,MAAM,gCAAiB,QAAQ,KAAK,EAAE,KAAK,KAAK,QAAQ,CACzE;;CAIL,MAAM,iBAAiBC,4BAAY,KAAK;AAExC,WACE,oDAA8B,QAAQ,OAAO,CAAC,iDAA2B,QAAQ,OAAO,CAAC,uEACjD,oBAAoB,CAAC,sBAC9D;CAED,MAAMC,aAAyB;EAC7B,OAAO;EACP,WAAW;EACX,YAAY;EACb;AAyDD,2CArDiB,QAAQ,SAAS,YAChC,QAAQ,KAAK,WAAW,YAAY;AAClC,MAAI,WAAW,WAAY;EAE3B,MAAM,2CAA4B,cAAc,QAAQ,SAAS,QAAQ;EACzE,MAAM,iBAAiBC,kDACrB,sBACA,QACA,WACD;AAGD,MAAI,wCAA2B,eAAe,CAAE;AAEhD,MAAI,kBAAkB,iBAAiB,yBAAY,eAAe,EAAE;AAClE,iDAAkB,eAAe,EAAE,EAAE,WAAW,MAAM,CAAC;AACvD,8BAAc,gBAAgB,GAAG;;EAGnC,MAAM,uBAAuBC,4DAAuB,gBAAgB;GAClE;GACA;GACD,CAAC;AAEF,MAAI,qBAAqB,WAAW;AAClC,aAAU,qBAAqB,QAAQ;AACvC;;AAIF,QAAMC,iDAAc;GAClB,cAAc;GACd;GACQ;GACR;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA,OAAO;GACR,CAAC;GACF,CACH,GAQ4B,SAAS,MAAM,EAFpB,GAEsC;CAG9D,MAAM,kBADeJ,4BAAY,KAAK,GACC,kBAAkB,KAAM,QAAQ,EAAE;AAEzE,KAAI,WAAW,QAAQ,EACrB,WAAU,iBAAiB,WAAW,MAAM,aAAa,cAAc,IAAI;KAE3E,WACE,kCAAY,KAAKK,4BAAW,MAAM,CAAC,wEAAkD,cAAc,CAAC,IACrG"}
@@ -0,0 +1,103 @@
1
+ const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
2
+ const require__utils_asset = require('../_virtual/_utils_asset.cjs');
3
+ const require_translateDoc_validation = require('./validation.cjs');
4
+ const require_utils_chunkInference = require('../utils/chunkInference.cjs');
5
+ const require_utils_fixChunkStartEndChars = require('../utils/fixChunkStartEndChars.cjs');
6
+ const require_utils_calculateChunks = require('../utils/calculateChunks.cjs');
7
+ let _intlayer_chokidar = require("@intlayer/chokidar");
8
+ let _intlayer_config = require("@intlayer/config");
9
+ let node_path = require("node:path");
10
+ let node_fs = require("node:fs");
11
+ let node_fs_promises = require("node:fs/promises");
12
+ let node_perf_hooks = require("node:perf_hooks");
13
+
14
+ //#region src/translateDoc/translateFile.ts
15
+ const translateFile = async ({ baseFilePath, outputFilePath, locale, baseLocale, configuration, errorState, aiOptions, customInstructions, aiClient, aiConfig, flushStrategy = "incremental", onChunkReceive, limit }) => {
16
+ if (errorState.shouldStop) return null;
17
+ const appLogger = (0, _intlayer_config.getAppLogger)(configuration, { config: { prefix: "" } });
18
+ const fileStartTime = node_perf_hooks.performance.now();
19
+ try {
20
+ const chunks = require_utils_calculateChunks.chunkText(await (0, node_fs_promises.readFile)(baseFilePath, "utf-8"));
21
+ const totalChunks = chunks.length;
22
+ const filePrefix = `${(0, _intlayer_config.colon)(`${_intlayer_config.ANSIColors.GREY_DARK}[${(0, _intlayer_chokidar.formatPath)(baseFilePath)}${_intlayer_config.ANSIColors.GREY_DARK}] `, { colSize: 40 })}${_intlayer_config.ANSIColors.RESET}`;
23
+ const prefix = `${(0, _intlayer_config.colon)(`${_intlayer_config.ANSIColors.GREY_DARK}[${(0, _intlayer_chokidar.formatPath)(baseFilePath)}${_intlayer_config.ANSIColors.GREY_DARK}][${(0, _intlayer_chokidar.formatLocale)(locale)}${_intlayer_config.ANSIColors.GREY_DARK}] `, { colSize: 40 })}${_intlayer_config.ANSIColors.RESET}`;
24
+ appLogger(`${filePrefix}Split into ${(0, _intlayer_config.colorizeNumber)(totalChunks)} chunks. Queuing...`);
25
+ const basePrompt = require__utils_asset.readAsset("./prompts/TRANSLATE_PROMPT.md", "utf-8").replaceAll("{{localeName}}", `${(0, _intlayer_chokidar.formatLocale)(locale, false)}`).replaceAll("{{baseLocaleName}}", `${(0, _intlayer_chokidar.formatLocale)(baseLocale, false)}`).replace("{{applicationContext}}", aiOptions?.applicationContext ?? "-").replace("{{customInstructions}}", customInstructions ?? "-");
26
+ const translatedParts = new Array(totalChunks).fill("");
27
+ const runTask = limit ?? ((fn) => fn());
28
+ const tasks = chunks.map((chunk, i) => runTask(async () => {
29
+ if (errorState.shouldStop) return null;
30
+ const chunkLogger = (0, _intlayer_config.getAppLogger)(configuration, { config: { prefix: `${prefix} ${_intlayer_config.ANSIColors.GREY_DARK}[${i + 1}/${totalChunks}] ${_intlayer_config.ANSIColors.RESET}` } });
31
+ const chunkStartTime = node_perf_hooks.performance.now();
32
+ const isFirstChunk = i === 0;
33
+ const fileToTranslateCurrentChunk = chunk.content;
34
+ const getPrevChunkPrompt = () => `>>> CONTEXT: PREVIOUS SOURCE CONTENT <<<\n\`\`\`\n` + (chunks[i - 1]?.content ?? "") + `\n\`\`\`\n>>> END PREVIOUS CONTEXT <<<`;
35
+ const getBaseChunkContextPrompt = () => `>>> CONTEXT: NEXT CONTENT <<<\n\`\`\`\n` + (chunks[i + 1]?.content ?? "") + `\n\`\`\`\n>>> END NEXT CONTEXT <<<`;
36
+ chunkLogger("Process started");
37
+ const { content: translatedChunk, tokens } = await (0, _intlayer_config.retryManager)(async () => {
38
+ const result = await require_utils_chunkInference.chunkInference([
39
+ {
40
+ role: "system",
41
+ content: basePrompt
42
+ },
43
+ ...chunks[i + 1] ? [{
44
+ role: "system",
45
+ content: getBaseChunkContextPrompt()
46
+ }] : [],
47
+ ...isFirstChunk ? [] : [{
48
+ role: "system",
49
+ content: getPrevChunkPrompt()
50
+ }],
51
+ {
52
+ role: "system",
53
+ content: [`You are translating TARGET CHUNK (${i + 1}/${totalChunks}).`, `Translate ONLY the target chunk. Preserve frontmatter/code exactly.`].join("\n")
54
+ },
55
+ {
56
+ role: "user",
57
+ content: `>>> TARGET CHUNK START <<<\n${fileToTranslateCurrentChunk}\n>>> TARGET CHUNK END <<<`
58
+ }
59
+ ], aiOptions, configuration, aiClient, aiConfig);
60
+ let processedChunk = require_translateDoc_validation.sanitizeChunk(result?.fileContent, fileToTranslateCurrentChunk);
61
+ processedChunk = require_utils_fixChunkStartEndChars.fixChunkStartEndChars(processedChunk, fileToTranslateCurrentChunk);
62
+ if (!require_translateDoc_validation.validateTranslation(fileToTranslateCurrentChunk, processedChunk, chunkLogger)) throw new Error(`Validation failed for chunk ${i + 1}/${totalChunks}`);
63
+ return {
64
+ content: processedChunk,
65
+ tokens: result.tokenUsed
66
+ };
67
+ })();
68
+ const chunkDuration = (node_perf_hooks.performance.now() - chunkStartTime).toFixed(0);
69
+ translatedParts[i] = translatedChunk;
70
+ if (onChunkReceive) onChunkReceive(translatedChunk, i, totalChunks);
71
+ if (flushStrategy === "incremental") {
72
+ if (translatedParts.slice(0, i + 1).every((p) => p && p !== "")) {
73
+ let endIdx = 0;
74
+ while (endIdx < totalChunks && translatedParts[endIdx] && translatedParts[endIdx] !== "") endIdx++;
75
+ const currentContent = translatedParts.slice(0, endIdx).join("");
76
+ (0, node_fs.mkdirSync)((0, node_path.dirname)(outputFilePath), { recursive: true });
77
+ (0, node_fs.writeFileSync)(outputFilePath, currentContent);
78
+ }
79
+ }
80
+ chunkLogger([`${(0, _intlayer_config.colorizeNumber)(tokens)} tokens used `, `${_intlayer_config.ANSIColors.GREY_DARK}in ${(0, _intlayer_config.colorizeNumber)(chunkDuration)}ms${_intlayer_config.ANSIColors.RESET}`].join(""));
81
+ }));
82
+ await Promise.all(tasks);
83
+ const fullContent = translatedParts.join("");
84
+ if (flushStrategy === "end" || flushStrategy === "incremental") {
85
+ (0, node_fs.mkdirSync)((0, node_path.dirname)(outputFilePath), { recursive: true });
86
+ (0, node_fs.writeFileSync)(outputFilePath, fullContent);
87
+ }
88
+ const totalDuration = ((node_perf_hooks.performance.now() - fileStartTime) / 1e3).toFixed(2);
89
+ const relativePath = (0, node_path.relative)(configuration.content.baseDir, outputFilePath);
90
+ appLogger(`${(0, _intlayer_config.colorize)("✔", _intlayer_config.ANSIColors.GREEN)} File ${(0, _intlayer_chokidar.formatPath)(relativePath)} completed in ${(0, _intlayer_config.colorizeNumber)(totalDuration)}s.`);
91
+ return fullContent;
92
+ } catch (error) {
93
+ errorState.count++;
94
+ const errorMessage = error?.message ?? JSON.stringify(error);
95
+ appLogger(`${(0, _intlayer_config.colorize)("✖", _intlayer_config.ANSIColors.RED)} Error: ${errorMessage}`);
96
+ if (errorState.count >= errorState.maxErrors) errorState.shouldStop = true;
97
+ return null;
98
+ }
99
+ };
100
+
101
+ //#endregion
102
+ exports.translateFile = translateFile;
103
+ //# sourceMappingURL=translateFile.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"translateFile.cjs","names":["performance","chunkText","ANSIColors","readAsset","translatedParts: string[]","chunkInference","sanitizeChunk","fixChunkStartEndChars","validateTranslation","error: any"],"sources":["../../../src/translateDoc/translateFile.ts"],"sourcesContent":["import { mkdirSync, writeFileSync } from 'node:fs';\nimport { readFile } from 'node:fs/promises';\nimport { dirname, relative } from 'node:path';\nimport { performance } from 'node:perf_hooks';\nimport { readAsset } from 'utils:asset';\nimport { formatLocale, formatPath } from '@intlayer/chokidar';\nimport {\n ANSIColors,\n colon,\n colorize,\n colorizeNumber,\n getAppLogger,\n retryManager,\n} from '@intlayer/config';\nimport { chunkText } from '../utils/calculateChunks';\nimport { chunkInference } from '../utils/chunkInference';\nimport { fixChunkStartEndChars } from '../utils/fixChunkStartEndChars';\nimport type { TranslateFileOptions } from './types';\nimport { sanitizeChunk, validateTranslation } from './validation';\n\nexport const translateFile = async ({\n baseFilePath,\n outputFilePath,\n locale,\n baseLocale,\n configuration,\n errorState,\n aiOptions,\n customInstructions,\n aiClient,\n aiConfig,\n flushStrategy = 'incremental',\n onChunkReceive,\n limit, // The Global Limiter\n}: TranslateFileOptions): Promise<string | null> => {\n if (errorState.shouldStop) return null;\n\n const appLogger = getAppLogger(configuration, { config: { prefix: '' } });\n const fileStartTime = performance.now();\n\n try {\n const fileContent = await readFile(baseFilePath, 'utf-8');\n const chunks = chunkText(fileContent);\n const totalChunks = chunks.length;\n\n const filePrefixText = `${ANSIColors.GREY_DARK}[${formatPath(baseFilePath)}${ANSIColors.GREY_DARK}] `;\n const filePrefix = `${colon(filePrefixText, { colSize: 40 })}${ANSIColors.RESET}`;\n const prefixText = `${ANSIColors.GREY_DARK}[${formatPath(baseFilePath)}${ANSIColors.GREY_DARK}][${formatLocale(locale)}${ANSIColors.GREY_DARK}] `;\n const prefix = `${colon(prefixText, { colSize: 40 })}${ANSIColors.RESET}`;\n\n appLogger(\n `${filePrefix}Split into ${colorizeNumber(totalChunks)} chunks. Queuing...`\n );\n\n const basePrompt = readAsset('./prompts/TRANSLATE_PROMPT.md', 'utf-8')\n .replaceAll('{{localeName}}', `${formatLocale(locale, false)}`)\n .replaceAll('{{baseLocaleName}}', `${formatLocale(baseLocale, false)}`)\n .replace('{{applicationContext}}', aiOptions?.applicationContext ?? '-')\n .replace('{{customInstructions}}', customInstructions ?? '-');\n\n const translatedParts: string[] = new Array(totalChunks).fill('');\n\n // Fallback if no limiter is provided (runs immediately)\n const runTask = limit ?? ((fn) => fn());\n\n // MAP CHUNKS TO GLOBAL TASKS\n // This pushes ALL chunks for this file into the Global Queue immediately.\n // They will execute whenever the global concurrency slots open up.\n const tasks = chunks.map((chunk, i) =>\n runTask(async () => {\n if (errorState.shouldStop) return null;\n\n const chunkLogger = getAppLogger(configuration, {\n config: {\n prefix: `${prefix} ${ANSIColors.GREY_DARK}[${i + 1}/${totalChunks}] ${ANSIColors.RESET}`,\n },\n });\n\n const chunkStartTime = performance.now();\n const isFirstChunk = i === 0;\n const fileToTranslateCurrentChunk = chunk.content;\n\n // Context Preparation\n const getPrevChunkPrompt = () =>\n `>>> CONTEXT: PREVIOUS SOURCE CONTENT <<<\\n\\`\\`\\`\\n` +\n (chunks[i - 1]?.content ?? '') +\n `\\n\\`\\`\\`\\n>>> END PREVIOUS CONTEXT <<<`;\n\n const getBaseChunkContextPrompt = () =>\n `>>> CONTEXT: NEXT CONTENT <<<\\n\\`\\`\\`\\n` +\n (chunks[i + 1]?.content ?? '') +\n `\\n\\`\\`\\`\\n>>> END NEXT CONTEXT <<<`;\n\n chunkLogger('Process started');\n\n const chunkTranslation = retryManager(async () => {\n const result = await chunkInference(\n [\n { role: 'system', content: basePrompt },\n ...(chunks[i + 1]\n ? [\n {\n role: 'system',\n content: getBaseChunkContextPrompt(),\n } as const,\n ]\n : []),\n ...(isFirstChunk\n ? []\n : [{ role: 'system', content: getPrevChunkPrompt() } as const]),\n {\n role: 'system',\n content: [\n `You are translating TARGET CHUNK (${i + 1}/${totalChunks}).`,\n `Translate ONLY the target chunk. Preserve frontmatter/code exactly.`,\n ].join('\\n'),\n },\n {\n role: 'user',\n content: `>>> TARGET CHUNK START <<<\\n${fileToTranslateCurrentChunk}\\n>>> TARGET CHUNK END <<<`,\n },\n ],\n aiOptions,\n configuration,\n aiClient,\n aiConfig\n );\n\n let processedChunk = sanitizeChunk(\n result?.fileContent,\n fileToTranslateCurrentChunk\n );\n processedChunk = fixChunkStartEndChars(\n processedChunk,\n fileToTranslateCurrentChunk\n );\n\n const isValid = validateTranslation(\n fileToTranslateCurrentChunk,\n processedChunk,\n chunkLogger\n );\n\n if (!isValid) {\n // Throwing an error here signals retryManager to try again\n throw new Error(\n `Validation failed for chunk ${i + 1}/${totalChunks}`\n );\n }\n\n return { content: processedChunk, tokens: result.tokenUsed };\n });\n\n const { content: translatedChunk, tokens } = await chunkTranslation();\n const chunkEndTime = performance.now();\n const chunkDuration = (chunkEndTime - chunkStartTime).toFixed(0);\n\n // Store Result\n translatedParts[i] = translatedChunk;\n\n if (onChunkReceive) {\n onChunkReceive(translatedChunk, i, totalChunks);\n }\n\n // Incremental Flush Strategy\n if (flushStrategy === 'incremental') {\n const isContiguous = translatedParts\n .slice(0, i + 1)\n .every((p) => p && p !== '');\n\n if (isContiguous) {\n let endIdx = 0;\n while (\n endIdx < totalChunks &&\n translatedParts[endIdx] &&\n translatedParts[endIdx] !== ''\n ) {\n endIdx++;\n }\n const currentContent = translatedParts.slice(0, endIdx).join('');\n // Write asynchronously/sync is fine here as node handles file locks reasonably well for single process\n mkdirSync(dirname(outputFilePath), { recursive: true });\n writeFileSync(outputFilePath, currentContent);\n }\n }\n\n chunkLogger(\n [\n `${colorizeNumber(tokens)} tokens used `,\n `${ANSIColors.GREY_DARK}in ${colorizeNumber(chunkDuration)}ms${ANSIColors.RESET}`,\n ].join('')\n );\n })\n );\n\n // Wait for all chunks for this specific file/locale to finish\n await Promise.all(tasks);\n\n // Final Flush\n const fullContent = translatedParts.join('');\n if (flushStrategy === 'end' || flushStrategy === 'incremental') {\n mkdirSync(dirname(outputFilePath), { recursive: true });\n writeFileSync(outputFilePath, fullContent);\n }\n\n const fileEndTime = performance.now();\n const totalDuration = ((fileEndTime - fileStartTime) / 1000).toFixed(2);\n const relativePath = relative(\n configuration.content.baseDir,\n outputFilePath\n );\n\n appLogger(\n `${colorize('✔', ANSIColors.GREEN)} File ${formatPath(relativePath)} completed in ${colorizeNumber(totalDuration)}s.`\n );\n\n return fullContent;\n } catch (error: any) {\n errorState.count++;\n const errorMessage = error?.message ?? JSON.stringify(error);\n appLogger(`${colorize('✖', ANSIColors.RED)} Error: ${errorMessage}`);\n if (errorState.count >= errorState.maxErrors) errorState.shouldStop = true;\n return null;\n }\n};\n"],"mappings":";;;;;;;;;;;;;;AAoBA,MAAa,gBAAgB,OAAO,EAClC,cACA,gBACA,QACA,YACA,eACA,YACA,WACA,oBACA,UACA,UACA,gBAAgB,eAChB,gBACA,YACkD;AAClD,KAAI,WAAW,WAAY,QAAO;CAElC,MAAM,+CAAyB,eAAe,EAAE,QAAQ,EAAE,QAAQ,IAAI,EAAE,CAAC;CACzE,MAAM,gBAAgBA,4BAAY,KAAK;AAEvC,KAAI;EAEF,MAAM,SAASC,wCADK,qCAAe,cAAc,QAAQ,CACpB;EACrC,MAAM,cAAc,OAAO;EAG3B,MAAM,aAAa,+BADI,GAAGC,4BAAW,UAAU,sCAAc,aAAa,GAAGA,4BAAW,UAAU,KACtD,EAAE,SAAS,IAAI,CAAC,GAAGA,4BAAW;EAE1E,MAAM,SAAS,+BADI,GAAGA,4BAAW,UAAU,sCAAc,aAAa,GAAGA,4BAAW,UAAU,yCAAiB,OAAO,GAAGA,4BAAW,UAAU,KAC1G,EAAE,SAAS,IAAI,CAAC,GAAGA,4BAAW;AAElE,YACE,GAAG,WAAW,kDAA4B,YAAY,CAAC,qBACxD;EAED,MAAM,aAAaC,+BAAU,iCAAiC,QAAQ,CACnE,WAAW,kBAAkB,wCAAgB,QAAQ,MAAM,GAAG,CAC9D,WAAW,sBAAsB,wCAAgB,YAAY,MAAM,GAAG,CACtE,QAAQ,0BAA0B,WAAW,sBAAsB,IAAI,CACvE,QAAQ,0BAA0B,sBAAsB,IAAI;EAE/D,MAAMC,kBAA4B,IAAI,MAAM,YAAY,CAAC,KAAK,GAAG;EAGjE,MAAM,UAAU,WAAW,OAAO,IAAI;EAKtC,MAAM,QAAQ,OAAO,KAAK,OAAO,MAC/B,QAAQ,YAAY;AAClB,OAAI,WAAW,WAAY,QAAO;GAElC,MAAM,iDAA2B,eAAe,EAC9C,QAAQ,EACN,QAAQ,GAAG,OAAO,IAAIF,4BAAW,UAAU,GAAG,IAAI,EAAE,GAAG,YAAY,IAAIA,4BAAW,SACnF,EACF,CAAC;GAEF,MAAM,iBAAiBF,4BAAY,KAAK;GACxC,MAAM,eAAe,MAAM;GAC3B,MAAM,8BAA8B,MAAM;GAG1C,MAAM,2BACJ,wDACC,OAAO,IAAI,IAAI,WAAW,MAC3B;GAEF,MAAM,kCACJ,6CACC,OAAO,IAAI,IAAI,WAAW,MAC3B;AAEF,eAAY,kBAAkB;GA4D9B,MAAM,EAAE,SAAS,iBAAiB,WAAW,yCA1DP,YAAY;IAChD,MAAM,SAAS,MAAMK,4CACnB;KACE;MAAE,MAAM;MAAU,SAAS;MAAY;KACvC,GAAI,OAAO,IAAI,KACX,CACE;MACE,MAAM;MACN,SAAS,2BAA2B;MACrC,CACF,GACD,EAAE;KACN,GAAI,eACA,EAAE,GACF,CAAC;MAAE,MAAM;MAAU,SAAS,oBAAoB;MAAE,CAAU;KAChE;MACE,MAAM;MACN,SAAS,CACP,qCAAqC,IAAI,EAAE,GAAG,YAAY,KAC1D,sEACD,CAAC,KAAK,KAAK;MACb;KACD;MACE,MAAM;MACN,SAAS,+BAA+B,4BAA4B;MACrE;KACF,EACD,WACA,eACA,UACA,SACD;IAED,IAAI,iBAAiBC,8CACnB,QAAQ,aACR,4BACD;AACD,qBAAiBC,0DACf,gBACA,4BACD;AAQD,QAAI,CANYC,oDACd,6BACA,gBACA,YACD,CAIC,OAAM,IAAI,MACR,+BAA+B,IAAI,EAAE,GAAG,cACzC;AAGH,WAAO;KAAE,SAAS;KAAgB,QAAQ,OAAO;KAAW;KAC5D,EAEmE;GAErE,MAAM,iBADeR,4BAAY,KAAK,GACA,gBAAgB,QAAQ,EAAE;AAGhE,mBAAgB,KAAK;AAErB,OAAI,eACF,gBAAe,iBAAiB,GAAG,YAAY;AAIjD,OAAI,kBAAkB,eAKpB;QAJqB,gBAClB,MAAM,GAAG,IAAI,EAAE,CACf,OAAO,MAAM,KAAK,MAAM,GAAG,EAEZ;KAChB,IAAI,SAAS;AACb,YACE,SAAS,eACT,gBAAgB,WAChB,gBAAgB,YAAY,GAE5B;KAEF,MAAM,iBAAiB,gBAAgB,MAAM,GAAG,OAAO,CAAC,KAAK,GAAG;AAEhE,mDAAkB,eAAe,EAAE,EAAE,WAAW,MAAM,CAAC;AACvD,gCAAc,gBAAgB,eAAe;;;AAIjD,eACE,CACE,wCAAkB,OAAO,CAAC,gBAC1B,GAAGE,4BAAW,UAAU,0CAAoB,cAAc,CAAC,IAAIA,4BAAW,QAC3E,CAAC,KAAK,GAAG,CACX;IACD,CACH;AAGD,QAAM,QAAQ,IAAI,MAAM;EAGxB,MAAM,cAAc,gBAAgB,KAAK,GAAG;AAC5C,MAAI,kBAAkB,SAAS,kBAAkB,eAAe;AAC9D,iDAAkB,eAAe,EAAE,EAAE,WAAW,MAAM,CAAC;AACvD,8BAAc,gBAAgB,YAAY;;EAI5C,MAAM,kBADcF,4BAAY,KAAK,GACC,iBAAiB,KAAM,QAAQ,EAAE;EACvE,MAAM,uCACJ,cAAc,QAAQ,SACtB,eACD;AAED,YACE,kCAAY,KAAKE,4BAAW,MAAM,CAAC,2CAAmB,aAAa,CAAC,qDAA+B,cAAc,CAAC,IACnH;AAED,SAAO;UACAO,OAAY;AACnB,aAAW;EACX,MAAM,eAAe,OAAO,WAAW,KAAK,UAAU,MAAM;AAC5D,YAAU,kCAAY,KAAKP,4BAAW,IAAI,CAAC,UAAU,eAAe;AACpE,MAAI,WAAW,SAAS,WAAW,UAAW,YAAW,aAAa;AACtE,SAAO"}
File without changes
@@ -0,0 +1,49 @@
1
+
2
+ //#region src/translateDoc/validation.ts
3
+ /**
4
+ * Validates that the translated content matches the structure of the original.
5
+ * Throws an error if a mismatch is found, triggering a retry.
6
+ */
7
+ const validateTranslation = (original, translated, logger) => {
8
+ const errors = [];
9
+ if (original.trimStart().startsWith("---")) {
10
+ if (!translated.trimStart().startsWith("---")) errors.push("YAML Frontmatter missing: Input starts with \"---\", output does not.");
11
+ const originalDashes = (original.match(/^---$/gm) || []).length;
12
+ const translatedDashes = (translated.match(/^---$/gm) || []).length;
13
+ if (originalDashes >= 2 && translatedDashes < 2) errors.push("YAML Frontmatter unclosed: Input has closing \"---\", output is missing it.");
14
+ }
15
+ const fenceRegex = /^\s*```/gm;
16
+ const originalFences = (original.match(fenceRegex) || []).length;
17
+ const translatedFences = (translated.match(fenceRegex) || []).length;
18
+ if (originalFences !== translatedFences) errors.push(`Code fence mismatch: Input has ${originalFences}, output has ${translatedFences}`);
19
+ const ratio = translated.length / (original.length || 1);
20
+ const isTooLong = ratio > 2.5;
21
+ const isSignificantLength = original.length > 50;
22
+ if (isTooLong && isSignificantLength) errors.push(`Length deviation: Output is ${translated.length} chars vs Input ${original.length} (${ratio.toFixed(1)}x). Likely included context.`);
23
+ const originalLines = original.split("\n").length;
24
+ const translatedLines = translated.split("\n").length;
25
+ if (originalLines > 5) {
26
+ if (translatedLines < originalLines * .4) errors.push(`Line count deviation: Output has ${translatedLines} lines, Input has ${originalLines}. Likely content deletion.`);
27
+ }
28
+ if (errors.length > 0) {
29
+ logger(`Validation Failed: ${errors.join(", ")}`);
30
+ return false;
31
+ }
32
+ return true;
33
+ };
34
+ /**
35
+ * Clean common AI artifacts
36
+ */
37
+ const sanitizeChunk = (translated, original) => {
38
+ let cleaned = translated;
39
+ const match = cleaned.match(/^```(?:markdown|md|txt)?\n([\s\S]*?)\n```$/i);
40
+ if (match) cleaned = match[1];
41
+ if (!original.startsWith("\n") && cleaned.startsWith("\n")) cleaned = cleaned.replace(/^\n+/, "");
42
+ if (!original.startsWith(" ") && cleaned.startsWith(" ")) cleaned = cleaned.trimStart();
43
+ return cleaned;
44
+ };
45
+
46
+ //#endregion
47
+ exports.sanitizeChunk = sanitizeChunk;
48
+ exports.validateTranslation = validateTranslation;
49
+ //# sourceMappingURL=validation.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"validation.cjs","names":["errors: string[]"],"sources":["../../../src/translateDoc/validation.ts"],"sourcesContent":["import type { Logger } from '@intlayer/config';\n\n/**\n * Validates that the translated content matches the structure of the original.\n * Throws an error if a mismatch is found, triggering a retry.\n */\nexport const validateTranslation = (\n original: string,\n translated: string,\n logger: Logger\n): boolean => {\n const errors: string[] = [];\n\n // YAML Frontmatter Integrity (CRITICAL)\n if (original.trimStart().startsWith('---')) {\n if (!translated.trimStart().startsWith('---')) {\n errors.push(\n 'YAML Frontmatter missing: Input starts with \"---\", output does not.'\n );\n }\n const originalDashes = (original.match(/^---$/gm) || []).length;\n const translatedDashes = (translated.match(/^---$/gm) || []).length;\n if (originalDashes >= 2 && translatedDashes < 2) {\n errors.push(\n 'YAML Frontmatter unclosed: Input has closing \"---\", output is missing it.'\n );\n }\n }\n\n // Code Fence Check\n const fenceRegex = /^\\s*```/gm;\n const originalFences = (original.match(fenceRegex) || []).length;\n const translatedFences = (translated.match(fenceRegex) || []).length;\n\n if (originalFences !== translatedFences) {\n errors.push(\n `Code fence mismatch: Input has ${originalFences}, output has ${translatedFences}`\n );\n }\n\n // Length/Duplication Check\n const ratio = translated.length / (original.length || 1);\n const isTooLong = ratio > 2.5;\n const isSignificantLength = original.length > 50;\n\n if (isTooLong && isSignificantLength) {\n errors.push(\n `Length deviation: Output is ${translated.length} chars vs Input ${original.length} (${ratio.toFixed(1)}x). Likely included context.`\n );\n }\n\n // Line Count Heuristic\n const originalLines = original.split('\\n').length;\n const translatedLines = translated.split('\\n').length;\n\n if (originalLines > 5) {\n if (translatedLines < originalLines * 0.4) {\n errors.push(\n `Line count deviation: Output has ${translatedLines} lines, Input has ${originalLines}. Likely content deletion.`\n );\n }\n }\n\n if (errors.length > 0) {\n logger(`Validation Failed: ${errors.join(', ')}`);\n return false;\n }\n\n return true;\n};\n\n/**\n * Clean common AI artifacts\n */\nexport const sanitizeChunk = (translated: string, original: string): string => {\n let cleaned = translated;\n const wrapRegex = /^```(?:markdown|md|txt)?\\n([\\s\\S]*?)\\n```$/i;\n const match = cleaned.match(wrapRegex);\n if (match) cleaned = match[1];\n\n if (!original.startsWith('\\n') && cleaned.startsWith('\\n')) {\n cleaned = cleaned.replace(/^\\n+/, '');\n }\n if (!original.startsWith(' ') && cleaned.startsWith(' ')) {\n cleaned = cleaned.trimStart();\n }\n return cleaned;\n};\n"],"mappings":";;;;;;AAMA,MAAa,uBACX,UACA,YACA,WACY;CACZ,MAAMA,SAAmB,EAAE;AAG3B,KAAI,SAAS,WAAW,CAAC,WAAW,MAAM,EAAE;AAC1C,MAAI,CAAC,WAAW,WAAW,CAAC,WAAW,MAAM,CAC3C,QAAO,KACL,wEACD;EAEH,MAAM,kBAAkB,SAAS,MAAM,UAAU,IAAI,EAAE,EAAE;EACzD,MAAM,oBAAoB,WAAW,MAAM,UAAU,IAAI,EAAE,EAAE;AAC7D,MAAI,kBAAkB,KAAK,mBAAmB,EAC5C,QAAO,KACL,8EACD;;CAKL,MAAM,aAAa;CACnB,MAAM,kBAAkB,SAAS,MAAM,WAAW,IAAI,EAAE,EAAE;CAC1D,MAAM,oBAAoB,WAAW,MAAM,WAAW,IAAI,EAAE,EAAE;AAE9D,KAAI,mBAAmB,iBACrB,QAAO,KACL,kCAAkC,eAAe,eAAe,mBACjE;CAIH,MAAM,QAAQ,WAAW,UAAU,SAAS,UAAU;CACtD,MAAM,YAAY,QAAQ;CAC1B,MAAM,sBAAsB,SAAS,SAAS;AAE9C,KAAI,aAAa,oBACf,QAAO,KACL,+BAA+B,WAAW,OAAO,kBAAkB,SAAS,OAAO,IAAI,MAAM,QAAQ,EAAE,CAAC,8BACzG;CAIH,MAAM,gBAAgB,SAAS,MAAM,KAAK,CAAC;CAC3C,MAAM,kBAAkB,WAAW,MAAM,KAAK,CAAC;AAE/C,KAAI,gBAAgB,GAClB;MAAI,kBAAkB,gBAAgB,GACpC,QAAO,KACL,oCAAoC,gBAAgB,oBAAoB,cAAc,4BACvF;;AAIL,KAAI,OAAO,SAAS,GAAG;AACrB,SAAO,sBAAsB,OAAO,KAAK,KAAK,GAAG;AACjD,SAAO;;AAGT,QAAO;;;;;AAMT,MAAa,iBAAiB,YAAoB,aAA6B;CAC7E,IAAI,UAAU;CAEd,MAAM,QAAQ,QAAQ,MADJ,8CACoB;AACtC,KAAI,MAAO,WAAU,MAAM;AAE3B,KAAI,CAAC,SAAS,WAAW,KAAK,IAAI,QAAQ,WAAW,KAAK,CACxD,WAAU,QAAQ,QAAQ,QAAQ,GAAG;AAEvC,KAAI,CAAC,SAAS,WAAW,IAAI,IAAI,QAAQ,WAAW,IAAI,CACtD,WAAU,QAAQ,WAAW;AAE/B,QAAO"}
@@ -1,6 +1,6 @@
1
1
 
2
2
  //#region src/translation-alignment/planActions.ts
3
- const planAlignmentActions = (alignment, changedEnglishBlockIndexes, similarityOptions) => {
3
+ const planAlignmentActions = (alignment, changedEnglishBlockIndexes) => {
4
4
  const actions = [];
5
5
  const seenFrench = /* @__PURE__ */ new Set();
6
6
  alignment.forEach((pair) => {
@@ -24,9 +24,7 @@ const planAlignmentActions = (alignment, changedEnglishBlockIndexes, similarityO
24
24
  return;
25
25
  }
26
26
  if (englishIndex >= 0 && frenchIndex !== null) {
27
- const isChanged = changedEnglishBlockIndexes.has(englishIndex);
28
- const isHighSimilarity = pair.similarityScore >= similarityOptions.minimumMatchForReuse;
29
- if (!isChanged && isHighSimilarity) actions.push({
27
+ if (!changedEnglishBlockIndexes.has(englishIndex)) actions.push({
30
28
  kind: "reuse",
31
29
  englishIndex,
32
30
  frenchIndex
@@ -1 +1 @@
1
- {"version":3,"file":"planActions.cjs","names":["actions: PlannedAction[]"],"sources":["../../../src/translation-alignment/planActions.ts"],"sourcesContent":["import type {\n AlignmentPair,\n AlignmentPlan,\n PlannedAction,\n SimilarityOptions,\n} from './types';\n\nexport const planAlignmentActions = (\n alignment: AlignmentPair[],\n changedEnglishBlockIndexes: Set<number>,\n similarityOptions: SimilarityOptions\n): AlignmentPlan => {\n const actions: PlannedAction[] = [];\n const seenFrench = new Set<number>();\n\n alignment.forEach((pair) => {\n const englishIndex = pair.englishIndex;\n const frenchIndex = pair.frenchIndex;\n\n if (englishIndex === -1 && frenchIndex !== null) {\n // french only -> delete\n if (!seenFrench.has(frenchIndex)) {\n actions.push({ kind: 'delete', frenchIndex });\n seenFrench.add(frenchIndex);\n }\n return;\n }\n\n if (englishIndex >= 0 && frenchIndex === null) {\n // new english block\n actions.push({ kind: 'insert_new', englishIndex });\n return;\n }\n\n if (englishIndex >= 0 && frenchIndex !== null) {\n // matched pair\n const isChanged = changedEnglishBlockIndexes.has(englishIndex);\n const isHighSimilarity =\n pair.similarityScore >= similarityOptions.minimumMatchForReuse;\n\n if (!isChanged && isHighSimilarity) {\n actions.push({ kind: 'reuse', englishIndex, frenchIndex });\n } else {\n actions.push({ kind: 'review', englishIndex, frenchIndex });\n }\n seenFrench.add(frenchIndex);\n return;\n }\n });\n\n return { actions };\n};\n"],"mappings":";;AAOA,MAAa,wBACX,WACA,4BACA,sBACkB;CAClB,MAAMA,UAA2B,EAAE;CACnC,MAAM,6BAAa,IAAI,KAAa;AAEpC,WAAU,SAAS,SAAS;EAC1B,MAAM,eAAe,KAAK;EAC1B,MAAM,cAAc,KAAK;AAEzB,MAAI,iBAAiB,MAAM,gBAAgB,MAAM;AAE/C,OAAI,CAAC,WAAW,IAAI,YAAY,EAAE;AAChC,YAAQ,KAAK;KAAE,MAAM;KAAU;KAAa,CAAC;AAC7C,eAAW,IAAI,YAAY;;AAE7B;;AAGF,MAAI,gBAAgB,KAAK,gBAAgB,MAAM;AAE7C,WAAQ,KAAK;IAAE,MAAM;IAAc;IAAc,CAAC;AAClD;;AAGF,MAAI,gBAAgB,KAAK,gBAAgB,MAAM;GAE7C,MAAM,YAAY,2BAA2B,IAAI,aAAa;GAC9D,MAAM,mBACJ,KAAK,mBAAmB,kBAAkB;AAE5C,OAAI,CAAC,aAAa,iBAChB,SAAQ,KAAK;IAAE,MAAM;IAAS;IAAc;IAAa,CAAC;OAE1D,SAAQ,KAAK;IAAE,MAAM;IAAU;IAAc;IAAa,CAAC;AAE7D,cAAW,IAAI,YAAY;AAC3B;;GAEF;AAEF,QAAO,EAAE,SAAS"}
1
+ {"version":3,"file":"planActions.cjs","names":["actions: PlannedAction[]"],"sources":["../../../src/translation-alignment/planActions.ts"],"sourcesContent":["import type { AlignmentPair, AlignmentPlan, PlannedAction } from './types';\n\nexport const planAlignmentActions = (\n alignment: AlignmentPair[],\n changedEnglishBlockIndexes: Set<number>\n): AlignmentPlan => {\n const actions: PlannedAction[] = [];\n const seenFrench = new Set<number>();\n\n alignment.forEach((pair) => {\n const englishIndex = pair.englishIndex;\n const frenchIndex = pair.frenchIndex;\n\n // Case 1: Deletion (Exists in FR, not in EN)\n if (englishIndex === -1 && frenchIndex !== null) {\n if (!seenFrench.has(frenchIndex)) {\n actions.push({ kind: 'delete', frenchIndex });\n seenFrench.add(frenchIndex);\n }\n return;\n }\n\n // Case 2: New Insertion (Exists in EN, not in FR)\n if (englishIndex >= 0 && frenchIndex === null) {\n actions.push({ kind: 'insert_new', englishIndex });\n return;\n }\n\n // Case 3: Alignment (Exists in both)\n if (englishIndex >= 0 && frenchIndex !== null) {\n const isChanged = changedEnglishBlockIndexes.has(englishIndex);\n\n // If the block is NOT marked as changed by Git, we REUSE it.\n // We assume the existing translation is correct because the source hasn't been touched.\n // We ignore 'similarityScore' here because EN vs UK text will always have low similarity.\n if (!isChanged) {\n actions.push({ kind: 'reuse', englishIndex, frenchIndex });\n } else {\n // If the block IS changed, we normally Review.\n // OPTIONAL: You could add a check here for 'similarityScore > 0.99'\n // to detect whitespace-only changes, but generally, if Git says changed, we Review.\n actions.push({ kind: 'review', englishIndex, frenchIndex });\n }\n\n seenFrench.add(frenchIndex);\n return;\n }\n });\n\n return { actions };\n};\n"],"mappings":";;AAEA,MAAa,wBACX,WACA,+BACkB;CAClB,MAAMA,UAA2B,EAAE;CACnC,MAAM,6BAAa,IAAI,KAAa;AAEpC,WAAU,SAAS,SAAS;EAC1B,MAAM,eAAe,KAAK;EAC1B,MAAM,cAAc,KAAK;AAGzB,MAAI,iBAAiB,MAAM,gBAAgB,MAAM;AAC/C,OAAI,CAAC,WAAW,IAAI,YAAY,EAAE;AAChC,YAAQ,KAAK;KAAE,MAAM;KAAU;KAAa,CAAC;AAC7C,eAAW,IAAI,YAAY;;AAE7B;;AAIF,MAAI,gBAAgB,KAAK,gBAAgB,MAAM;AAC7C,WAAQ,KAAK;IAAE,MAAM;IAAc;IAAc,CAAC;AAClD;;AAIF,MAAI,gBAAgB,KAAK,gBAAgB,MAAM;AAM7C,OAAI,CALc,2BAA2B,IAAI,aAAa,CAM5D,SAAQ,KAAK;IAAE,MAAM;IAAS;IAAc;IAAa,CAAC;OAK1D,SAAQ,KAAK;IAAE,MAAM;IAAU;IAAc;IAAa,CAAC;AAG7D,cAAW,IAAI,YAAY;AAC3B;;GAEF;AAEF,QAAO,EAAE,SAAS"}
@@ -3,127 +3,61 @@
3
3
  const isBlankLine = (line) => line.trim().length === 0;
4
4
  const isFencedCodeDelimiter = (line) => /^\s*```/.test(line);
5
5
  const isHeading = (line) => /^\s*#{1,6}\s+/.test(line);
6
- const isHorizontalRule = (line) => /^(\s*[-*_]){3,}\s*$/.test(line);
7
- const isListItem = (line) => /^\s*([-*+]\s+|\d+\.[\t\s]+)/.test(line);
8
- const isBlockquote = (line) => /^\s*>\s?/.test(line);
9
- const isTableLike = (line) => /\|/.test(line) && !isCodeFenceStart(line);
10
- const isCodeFenceStart = (line) => /^\s*```/.test(line);
6
+ const isFrontmatterDelimiter = (line) => /^\s*---\s*$/.test(line);
11
7
  const trimTrailingNewlines = (text) => text.replace(/\n+$/g, "\n");
12
8
  const segmentDocument = (text) => {
13
9
  const lines = text.split("\n");
14
10
  const blocks = [];
15
11
  let index = 0;
12
+ let insideCodeBlock = false;
13
+ let currentSectionLines = [];
14
+ let currentSectionStartLine = 1;
15
+ const flushCurrentSection = (endIndex) => {
16
+ if (currentSectionLines.length > 0) {
17
+ const rawContent = currentSectionLines.join("\n");
18
+ if (rawContent.trim().length > 0) blocks.push({
19
+ type: "paragraph",
20
+ content: `${trimTrailingNewlines(rawContent)}\n`,
21
+ lineStart: currentSectionStartLine,
22
+ lineEnd: endIndex
23
+ });
24
+ currentSectionLines = [];
25
+ }
26
+ };
16
27
  while (index < lines.length) {
17
- const startIndex = index;
18
28
  const currentLine = lines[index];
19
- if (isFencedCodeDelimiter(currentLine)) {
29
+ if (blocks.length === 0 && isFrontmatterDelimiter(currentLine)) {
30
+ const startLine = index + 1;
20
31
  const contentLines = [currentLine];
21
- index += 1;
22
- while (index < lines.length && !isFencedCodeDelimiter(lines[index])) {
23
- contentLines.push(lines[index]);
24
- index += 1;
25
- }
26
- if (index < lines.length) {
27
- contentLines.push(lines[index]);
28
- index += 1;
29
- }
30
- blocks.push({
31
- type: "code_block",
32
- content: `${trimTrailingNewlines(contentLines.join("\n"))}\n`,
33
- lineStart: startIndex + 1,
34
- lineEnd: index
35
- });
36
- continue;
37
- }
38
- if (isHorizontalRule(currentLine)) {
39
- blocks.push({
40
- type: "horizontal_rule",
41
- content: `${currentLine}\n`,
42
- lineStart: startIndex + 1,
43
- lineEnd: startIndex + 1
44
- });
45
- index += 1;
46
- continue;
47
- }
48
- if (isHeading(currentLine)) {
49
- blocks.push({
50
- type: "heading",
51
- content: `${currentLine}\n`,
52
- lineStart: startIndex + 1,
53
- lineEnd: startIndex + 1
54
- });
55
- index += 1;
56
- continue;
57
- }
58
- if (isListItem(currentLine)) {
59
- const contentLines = [];
60
- while (index < lines.length && (isListItem(lines[index]) || !isBlankLine(lines[index]) && /^\s{2,}/.test(lines[index]))) {
61
- contentLines.push(lines[index]);
62
- index += 1;
63
- }
64
- blocks.push({
65
- type: "list_item",
66
- content: `${trimTrailingNewlines(contentLines.join("\n"))}\n`,
67
- lineStart: startIndex + 1,
68
- lineEnd: index
69
- });
70
- continue;
71
- }
72
- if (isBlockquote(currentLine)) {
73
- const contentLines = [];
74
- while (index < lines.length && (isBlockquote(lines[index]) || !isBlankLine(lines[index]))) {
32
+ index++;
33
+ while (index < lines.length && !isFrontmatterDelimiter(lines[index])) {
75
34
  contentLines.push(lines[index]);
76
- index += 1;
35
+ index++;
77
36
  }
78
- blocks.push({
79
- type: "blockquote",
80
- content: `${trimTrailingNewlines(contentLines.join("\n"))}\n`,
81
- lineStart: startIndex + 1,
82
- lineEnd: index
83
- });
84
- continue;
85
- }
86
- if (isTableLike(currentLine)) {
87
- const contentLines = [];
88
- while (index < lines.length && /\|/.test(lines[index]) && !isBlankLine(lines[index])) {
37
+ if (index < lines.length && isFrontmatterDelimiter(lines[index])) {
89
38
  contentLines.push(lines[index]);
90
- index += 1;
91
- }
92
- blocks.push({
93
- type: "table",
94
- content: `${trimTrailingNewlines(contentLines.join("\n"))}\n`,
95
- lineStart: startIndex + 1,
96
- lineEnd: index
97
- });
98
- continue;
99
- }
100
- if (!isBlankLine(currentLine)) {
101
- const contentLines = [];
102
- while (index < lines.length && !isBlankLine(lines[index])) {
103
- if (isHeading(lines[index]) || isFencedCodeDelimiter(lines[index]) || isHorizontalRule(lines[index]) || isListItem(lines[index]) || isBlockquote(lines[index]) || isTableLike(lines[index])) break;
104
- contentLines.push(lines[index]);
105
- index += 1;
106
- }
107
- if (index < lines.length && isBlankLine(lines[index])) {
108
- contentLines.push(lines[index]);
109
- index += 1;
39
+ index++;
110
40
  }
111
41
  blocks.push({
112
42
  type: "paragraph",
113
43
  content: `${trimTrailingNewlines(contentLines.join("\n"))}\n`,
114
- lineStart: startIndex + 1,
44
+ lineStart: startLine,
115
45
  lineEnd: index
116
46
  });
117
47
  continue;
118
48
  }
119
- blocks.push({
120
- type: "unknown",
121
- content: `${currentLine}\n`,
122
- lineStart: startIndex + 1,
123
- lineEnd: startIndex + 1
124
- });
125
- index += 1;
49
+ if (isFencedCodeDelimiter(currentLine)) insideCodeBlock = !insideCodeBlock;
50
+ if (!insideCodeBlock && isHeading(currentLine)) {
51
+ if (currentSectionLines.length > 0) flushCurrentSection(index);
52
+ currentSectionStartLine = index + 1;
53
+ currentSectionLines = [currentLine];
54
+ } else {
55
+ if (currentSectionLines.length === 0 && !isBlankLine(currentLine)) currentSectionStartLine = index + 1;
56
+ currentSectionLines.push(currentLine);
57
+ }
58
+ index++;
126
59
  }
60
+ flushCurrentSection(index);
127
61
  return blocks;
128
62
  };
129
63
 
@@ -1 +1 @@
1
- {"version":3,"file":"segmentDocument.cjs","names":["blocks: Block[]","contentLines: string[]"],"sources":["../../../src/translation-alignment/segmentDocument.ts"],"sourcesContent":["import type { Block } from './types';\n\nconst isBlankLine = (line: string): boolean => line.trim().length === 0;\n\nconst isFencedCodeDelimiter = (line: string): boolean => /^\\s*```/.test(line);\n\nconst isHeading = (line: string): boolean => /^\\s*#{1,6}\\s+/.test(line);\n\nconst isHorizontalRule = (line: string): boolean =>\n /^(\\s*[-*_]){3,}\\s*$/.test(line);\n\nconst isListItem = (line: string): boolean =>\n /^\\s*([-*+]\\s+|\\d+\\.[\\t\\s]+)/.test(line);\n\nconst isBlockquote = (line: string): boolean => /^\\s*>\\s?/.test(line);\n\nconst isTableLike = (line: string): boolean =>\n /\\|/.test(line) && !isCodeFenceStart(line);\n\nconst isCodeFenceStart = (line: string): boolean => /^\\s*```/.test(line);\n\nconst trimTrailingNewlines = (text: string): string =>\n text.replace(/\\n+$/g, '\\n');\n\nexport const segmentDocument = (text: string): Block[] => {\n const lines = text.split('\\n');\n const blocks: Block[] = [];\n\n let index = 0;\n while (index < lines.length) {\n const startIndex = index;\n const currentLine = lines[index];\n\n // Code block (fenced)\n if (isFencedCodeDelimiter(currentLine)) {\n const contentLines: string[] = [currentLine];\n index += 1;\n while (index < lines.length && !isFencedCodeDelimiter(lines[index])) {\n contentLines.push(lines[index]);\n index += 1;\n }\n if (index < lines.length) {\n contentLines.push(lines[index]);\n index += 1;\n }\n blocks.push({\n type: 'code_block',\n content: `${trimTrailingNewlines(contentLines.join('\\n'))}\\n`,\n lineStart: startIndex + 1,\n lineEnd: index,\n });\n continue;\n }\n\n // Horizontal rule\n if (isHorizontalRule(currentLine)) {\n blocks.push({\n type: 'horizontal_rule',\n content: `${currentLine}\\n`,\n lineStart: startIndex + 1,\n lineEnd: startIndex + 1,\n });\n index += 1;\n continue;\n }\n\n // Heading\n if (isHeading(currentLine)) {\n blocks.push({\n type: 'heading',\n content: `${currentLine}\\n`,\n lineStart: startIndex + 1,\n lineEnd: startIndex + 1,\n });\n index += 1;\n continue;\n }\n\n // List block (one or more consecutive list items)\n if (isListItem(currentLine)) {\n const contentLines: string[] = [];\n while (\n index < lines.length &&\n (isListItem(lines[index]) ||\n (!isBlankLine(lines[index]) && /^\\s{2,}/.test(lines[index])))\n ) {\n contentLines.push(lines[index]);\n index += 1;\n }\n blocks.push({\n type: 'list_item',\n content: `${trimTrailingNewlines(contentLines.join('\\n'))}\\n`,\n lineStart: startIndex + 1,\n lineEnd: index,\n });\n continue;\n }\n\n // Blockquote (may span multiple lines)\n if (isBlockquote(currentLine)) {\n const contentLines: string[] = [];\n while (\n index < lines.length &&\n (isBlockquote(lines[index]) || !isBlankLine(lines[index]))\n ) {\n contentLines.push(lines[index]);\n index += 1;\n }\n blocks.push({\n type: 'blockquote',\n content: `${trimTrailingNewlines(contentLines.join('\\n'))}\\n`,\n lineStart: startIndex + 1,\n lineEnd: index,\n });\n continue;\n }\n\n // Table-like (simple heuristic)\n if (isTableLike(currentLine)) {\n const contentLines: string[] = [];\n while (\n index < lines.length &&\n /\\|/.test(lines[index]) &&\n !isBlankLine(lines[index])\n ) {\n contentLines.push(lines[index]);\n index += 1;\n }\n blocks.push({\n type: 'table',\n content: `${trimTrailingNewlines(contentLines.join('\\n'))}\\n`,\n lineStart: startIndex + 1,\n lineEnd: index,\n });\n continue;\n }\n\n // Paragraph (gathers until blank line)\n if (!isBlankLine(currentLine)) {\n const contentLines: string[] = [];\n while (index < lines.length && !isBlankLine(lines[index])) {\n // stop if we detect a new structural block start\n if (\n isHeading(lines[index]) ||\n isFencedCodeDelimiter(lines[index]) ||\n isHorizontalRule(lines[index]) ||\n isListItem(lines[index]) ||\n isBlockquote(lines[index]) ||\n isTableLike(lines[index])\n ) {\n break;\n }\n contentLines.push(lines[index]);\n index += 1;\n }\n // consume a single trailing blank line if present\n if (index < lines.length && isBlankLine(lines[index])) {\n contentLines.push(lines[index]);\n index += 1;\n }\n blocks.push({\n type: 'paragraph',\n content: `${trimTrailingNewlines(contentLines.join('\\n'))}\\n`,\n lineStart: startIndex + 1,\n lineEnd: index,\n });\n continue;\n }\n\n // Blank line outside of a paragraph: keep to preserve spacing minimally\n blocks.push({\n type: 'unknown',\n content: `${currentLine}\\n`,\n lineStart: startIndex + 1,\n lineEnd: startIndex + 1,\n });\n index += 1;\n }\n\n return blocks;\n};\n"],"mappings":";;AAEA,MAAM,eAAe,SAA0B,KAAK,MAAM,CAAC,WAAW;AAEtE,MAAM,yBAAyB,SAA0B,UAAU,KAAK,KAAK;AAE7E,MAAM,aAAa,SAA0B,gBAAgB,KAAK,KAAK;AAEvE,MAAM,oBAAoB,SACxB,sBAAsB,KAAK,KAAK;AAElC,MAAM,cAAc,SAClB,8BAA8B,KAAK,KAAK;AAE1C,MAAM,gBAAgB,SAA0B,WAAW,KAAK,KAAK;AAErE,MAAM,eAAe,SACnB,KAAK,KAAK,KAAK,IAAI,CAAC,iBAAiB,KAAK;AAE5C,MAAM,oBAAoB,SAA0B,UAAU,KAAK,KAAK;AAExE,MAAM,wBAAwB,SAC5B,KAAK,QAAQ,SAAS,KAAK;AAE7B,MAAa,mBAAmB,SAA0B;CACxD,MAAM,QAAQ,KAAK,MAAM,KAAK;CAC9B,MAAMA,SAAkB,EAAE;CAE1B,IAAI,QAAQ;AACZ,QAAO,QAAQ,MAAM,QAAQ;EAC3B,MAAM,aAAa;EACnB,MAAM,cAAc,MAAM;AAG1B,MAAI,sBAAsB,YAAY,EAAE;GACtC,MAAMC,eAAyB,CAAC,YAAY;AAC5C,YAAS;AACT,UAAO,QAAQ,MAAM,UAAU,CAAC,sBAAsB,MAAM,OAAO,EAAE;AACnE,iBAAa,KAAK,MAAM,OAAO;AAC/B,aAAS;;AAEX,OAAI,QAAQ,MAAM,QAAQ;AACxB,iBAAa,KAAK,MAAM,OAAO;AAC/B,aAAS;;AAEX,UAAO,KAAK;IACV,MAAM;IACN,SAAS,GAAG,qBAAqB,aAAa,KAAK,KAAK,CAAC,CAAC;IAC1D,WAAW,aAAa;IACxB,SAAS;IACV,CAAC;AACF;;AAIF,MAAI,iBAAiB,YAAY,EAAE;AACjC,UAAO,KAAK;IACV,MAAM;IACN,SAAS,GAAG,YAAY;IACxB,WAAW,aAAa;IACxB,SAAS,aAAa;IACvB,CAAC;AACF,YAAS;AACT;;AAIF,MAAI,UAAU,YAAY,EAAE;AAC1B,UAAO,KAAK;IACV,MAAM;IACN,SAAS,GAAG,YAAY;IACxB,WAAW,aAAa;IACxB,SAAS,aAAa;IACvB,CAAC;AACF,YAAS;AACT;;AAIF,MAAI,WAAW,YAAY,EAAE;GAC3B,MAAMA,eAAyB,EAAE;AACjC,UACE,QAAQ,MAAM,WACb,WAAW,MAAM,OAAO,IACtB,CAAC,YAAY,MAAM,OAAO,IAAI,UAAU,KAAK,MAAM,OAAO,GAC7D;AACA,iBAAa,KAAK,MAAM,OAAO;AAC/B,aAAS;;AAEX,UAAO,KAAK;IACV,MAAM;IACN,SAAS,GAAG,qBAAqB,aAAa,KAAK,KAAK,CAAC,CAAC;IAC1D,WAAW,aAAa;IACxB,SAAS;IACV,CAAC;AACF;;AAIF,MAAI,aAAa,YAAY,EAAE;GAC7B,MAAMA,eAAyB,EAAE;AACjC,UACE,QAAQ,MAAM,WACb,aAAa,MAAM,OAAO,IAAI,CAAC,YAAY,MAAM,OAAO,GACzD;AACA,iBAAa,KAAK,MAAM,OAAO;AAC/B,aAAS;;AAEX,UAAO,KAAK;IACV,MAAM;IACN,SAAS,GAAG,qBAAqB,aAAa,KAAK,KAAK,CAAC,CAAC;IAC1D,WAAW,aAAa;IACxB,SAAS;IACV,CAAC;AACF;;AAIF,MAAI,YAAY,YAAY,EAAE;GAC5B,MAAMA,eAAyB,EAAE;AACjC,UACE,QAAQ,MAAM,UACd,KAAK,KAAK,MAAM,OAAO,IACvB,CAAC,YAAY,MAAM,OAAO,EAC1B;AACA,iBAAa,KAAK,MAAM,OAAO;AAC/B,aAAS;;AAEX,UAAO,KAAK;IACV,MAAM;IACN,SAAS,GAAG,qBAAqB,aAAa,KAAK,KAAK,CAAC,CAAC;IAC1D,WAAW,aAAa;IACxB,SAAS;IACV,CAAC;AACF;;AAIF,MAAI,CAAC,YAAY,YAAY,EAAE;GAC7B,MAAMA,eAAyB,EAAE;AACjC,UAAO,QAAQ,MAAM,UAAU,CAAC,YAAY,MAAM,OAAO,EAAE;AAEzD,QACE,UAAU,MAAM,OAAO,IACvB,sBAAsB,MAAM,OAAO,IACnC,iBAAiB,MAAM,OAAO,IAC9B,WAAW,MAAM,OAAO,IACxB,aAAa,MAAM,OAAO,IAC1B,YAAY,MAAM,OAAO,CAEzB;AAEF,iBAAa,KAAK,MAAM,OAAO;AAC/B,aAAS;;AAGX,OAAI,QAAQ,MAAM,UAAU,YAAY,MAAM,OAAO,EAAE;AACrD,iBAAa,KAAK,MAAM,OAAO;AAC/B,aAAS;;AAEX,UAAO,KAAK;IACV,MAAM;IACN,SAAS,GAAG,qBAAqB,aAAa,KAAK,KAAK,CAAC,CAAC;IAC1D,WAAW,aAAa;IACxB,SAAS;IACV,CAAC;AACF;;AAIF,SAAO,KAAK;GACV,MAAM;GACN,SAAS,GAAG,YAAY;GACxB,WAAW,aAAa;GACxB,SAAS,aAAa;GACvB,CAAC;AACF,WAAS;;AAGX,QAAO"}
1
+ {"version":3,"file":"segmentDocument.cjs","names":["blocks: Block[]","currentSectionLines: string[]","contentLines: string[]"],"sources":["../../../src/translation-alignment/segmentDocument.ts"],"sourcesContent":["import type { Block } from './types';\n\nconst isBlankLine = (line: string): boolean => line.trim().length === 0;\nconst isFencedCodeDelimiter = (line: string): boolean => /^\\s*```/.test(line);\nconst isHeading = (line: string): boolean => /^\\s*#{1,6}\\s+/.test(line);\nconst isFrontmatterDelimiter = (line: string): boolean =>\n /^\\s*---\\s*$/.test(line);\nconst trimTrailingNewlines = (text: string): string =>\n text.replace(/\\n+$/g, '\\n');\n\nexport const segmentDocument = (text: string): Block[] => {\n const lines = text.split('\\n');\n const blocks: Block[] = [];\n\n let index = 0;\n let insideCodeBlock = false;\n\n // Buffers\n let currentSectionLines: string[] = [];\n let currentSectionStartLine = 1;\n\n const flushCurrentSection = (endIndex: number) => {\n if (currentSectionLines.length > 0) {\n // Filter out leading blank lines from the block content to keep it clean,\n // but strictly speaking, we just want to ensure non-empty content.\n const rawContent = currentSectionLines.join('\\n');\n\n if (rawContent.trim().length > 0) {\n blocks.push({\n type: 'paragraph', // Generic type\n content: `${trimTrailingNewlines(rawContent)}\\n`,\n lineStart: currentSectionStartLine,\n lineEnd: endIndex,\n });\n }\n currentSectionLines = [];\n }\n };\n\n while (index < lines.length) {\n const currentLine = lines[index];\n\n // 1. Handle Frontmatter (Must be at start of file)\n if (blocks.length === 0 && isFrontmatterDelimiter(currentLine)) {\n const startLine = index + 1;\n const contentLines: string[] = [currentLine];\n index++;\n\n while (index < lines.length && !isFrontmatterDelimiter(lines[index])) {\n contentLines.push(lines[index]);\n index++;\n }\n\n if (index < lines.length && isFrontmatterDelimiter(lines[index])) {\n contentLines.push(lines[index]);\n index++;\n }\n\n blocks.push({\n type: 'paragraph',\n content: `${trimTrailingNewlines(contentLines.join('\\n'))}\\n`,\n lineStart: startLine,\n lineEnd: index,\n });\n continue;\n }\n\n // 2. Track Code Blocks (Headers inside code blocks are ignored)\n if (isFencedCodeDelimiter(currentLine)) {\n insideCodeBlock = !insideCodeBlock;\n }\n\n const isHeader = !insideCodeBlock && isHeading(currentLine);\n\n // 3. Split on Headers\n if (isHeader) {\n // If we have accumulated content, flush it as the previous block\n if (currentSectionLines.length > 0) {\n flushCurrentSection(index);\n }\n // Start a new section with this header\n currentSectionStartLine = index + 1;\n currentSectionLines = [currentLine];\n } else {\n // Accumulate content\n if (currentSectionLines.length === 0 && !isBlankLine(currentLine)) {\n currentSectionStartLine = index + 1;\n }\n currentSectionLines.push(currentLine);\n }\n\n index++;\n }\n\n // Flush remaining content\n flushCurrentSection(index);\n\n return blocks;\n};\n"],"mappings":";;AAEA,MAAM,eAAe,SAA0B,KAAK,MAAM,CAAC,WAAW;AACtE,MAAM,yBAAyB,SAA0B,UAAU,KAAK,KAAK;AAC7E,MAAM,aAAa,SAA0B,gBAAgB,KAAK,KAAK;AACvE,MAAM,0BAA0B,SAC9B,cAAc,KAAK,KAAK;AAC1B,MAAM,wBAAwB,SAC5B,KAAK,QAAQ,SAAS,KAAK;AAE7B,MAAa,mBAAmB,SAA0B;CACxD,MAAM,QAAQ,KAAK,MAAM,KAAK;CAC9B,MAAMA,SAAkB,EAAE;CAE1B,IAAI,QAAQ;CACZ,IAAI,kBAAkB;CAGtB,IAAIC,sBAAgC,EAAE;CACtC,IAAI,0BAA0B;CAE9B,MAAM,uBAAuB,aAAqB;AAChD,MAAI,oBAAoB,SAAS,GAAG;GAGlC,MAAM,aAAa,oBAAoB,KAAK,KAAK;AAEjD,OAAI,WAAW,MAAM,CAAC,SAAS,EAC7B,QAAO,KAAK;IACV,MAAM;IACN,SAAS,GAAG,qBAAqB,WAAW,CAAC;IAC7C,WAAW;IACX,SAAS;IACV,CAAC;AAEJ,yBAAsB,EAAE;;;AAI5B,QAAO,QAAQ,MAAM,QAAQ;EAC3B,MAAM,cAAc,MAAM;AAG1B,MAAI,OAAO,WAAW,KAAK,uBAAuB,YAAY,EAAE;GAC9D,MAAM,YAAY,QAAQ;GAC1B,MAAMC,eAAyB,CAAC,YAAY;AAC5C;AAEA,UAAO,QAAQ,MAAM,UAAU,CAAC,uBAAuB,MAAM,OAAO,EAAE;AACpE,iBAAa,KAAK,MAAM,OAAO;AAC/B;;AAGF,OAAI,QAAQ,MAAM,UAAU,uBAAuB,MAAM,OAAO,EAAE;AAChE,iBAAa,KAAK,MAAM,OAAO;AAC/B;;AAGF,UAAO,KAAK;IACV,MAAM;IACN,SAAS,GAAG,qBAAqB,aAAa,KAAK,KAAK,CAAC,CAAC;IAC1D,WAAW;IACX,SAAS;IACV,CAAC;AACF;;AAIF,MAAI,sBAAsB,YAAY,CACpC,mBAAkB,CAAC;AAMrB,MAHiB,CAAC,mBAAmB,UAAU,YAAY,EAG7C;AAEZ,OAAI,oBAAoB,SAAS,EAC/B,qBAAoB,MAAM;AAG5B,6BAA0B,QAAQ;AAClC,yBAAsB,CAAC,YAAY;SAC9B;AAEL,OAAI,oBAAoB,WAAW,KAAK,CAAC,YAAY,YAAY,CAC/D,2BAA0B,QAAQ;AAEpC,uBAAoB,KAAK,YAAY;;AAGvC;;AAIF,qBAAoB,MAAM;AAE1B,QAAO"}
@@ -46,7 +46,8 @@ const checkCMSAuth = async (configuration, shouldCheckConfigConsistency = true)
46
46
  const checkAIAccess = async (configuration, aiOptions, shouldCheckConfigConsistency = true) => {
47
47
  const appLogger = (0, _intlayer_config.getAppLogger)(configuration);
48
48
  const hasCMSAuth = Boolean(configuration.editor.clientId && configuration.editor.clientSecret);
49
- if (Boolean(configuration.ai?.apiKey || aiOptions?.apiKey)) return true;
49
+ const isOllama = configuration.ai?.provider === "ollama" || aiOptions?.provider === "ollama";
50
+ if (Boolean(configuration.ai?.apiKey || aiOptions?.apiKey) || isOllama) return true;
50
51
  if (!hasCMSAuth) {
51
52
  appLogger([
52
53
  "AI options or API key not provided. You can either retreive the CMS access key on",
@@ -1 +1 @@
1
- {"version":3,"file":"checkAccess.cjs","names":["ANSIColors"],"sources":["../../../src/utils/checkAccess.ts"],"sourcesContent":["import type { AIOptions } from '@intlayer/api';\nimport { getIntlayerAPIProxy } from '@intlayer/api';\nimport {\n ANSIColors,\n colorize,\n extractErrorMessage,\n getAppLogger,\n} from '@intlayer/config';\nimport type { IntlayerConfig } from '@intlayer/types';\nimport { checkConfigConsistency } from './checkConfigConsistency';\n\nexport const checkCMSAuth = async (\n configuration: IntlayerConfig,\n shouldCheckConfigConsistency: boolean = true\n): Promise<boolean> => {\n const appLogger = getAppLogger(configuration, {\n config: {\n prefix: '',\n },\n });\n\n const hasCMSAuth =\n configuration.editor.clientId && configuration.editor.clientSecret;\n if (!hasCMSAuth) {\n appLogger(\n [\n 'CMS auth not provided. You can either retreive the CMS access key on',\n colorize('https://intlayer.org/dahboard', ANSIColors.GREY),\n colorize('(see doc:', ANSIColors.GREY_DARK),\n colorize('https://intlayer.org/doc/concept/cms', ANSIColors.GREY),\n colorize(')', ANSIColors.GREY_DARK),\n '.',\n ],\n {\n level: 'error',\n }\n );\n\n return false;\n }\n const intlayerAPI = getIntlayerAPIProxy(undefined, configuration);\n\n try {\n const result = await intlayerAPI.oAuth.getOAuth2AccessToken();\n\n const project = result.data?.project;\n\n if (!project) {\n appLogger('Project not found');\n\n return true;\n }\n\n if (project.configuration && shouldCheckConfigConsistency) {\n try {\n // Recursively check if project.configuration (subset) matches configuration (superset)\n checkConfigConsistency(project.configuration, configuration);\n } catch {\n appLogger(\n [\n 'Remote configuration is not up to date. The project configuration does not match the local configuration.',\n 'You can push the configuration by running',\n colorize('npx intlayer push', ANSIColors.CYAN),\n colorize('(see doc:', ANSIColors.GREY_DARK),\n colorize(\n 'https://intlayer.org/doc/concept/cli/push',\n ANSIColors.GREY\n ),\n colorize(')', ANSIColors.GREY_DARK),\n '.',\n ],\n {\n level: 'warn',\n }\n );\n }\n }\n } catch (error) {\n const message = extractErrorMessage(error);\n\n appLogger(message, {\n level: 'error',\n });\n return false;\n }\n\n return true;\n};\n\nexport const checkAIAccess = async (\n configuration: IntlayerConfig,\n aiOptions?: AIOptions,\n shouldCheckConfigConsistency: boolean = true\n): Promise<boolean> => {\n const appLogger = getAppLogger(configuration);\n\n const hasCMSAuth = Boolean(\n configuration.editor.clientId && configuration.editor.clientSecret\n );\n const hasHisOwnAIAPIKey = Boolean(\n configuration.ai?.apiKey || aiOptions?.apiKey\n );\n\n if (hasHisOwnAIAPIKey) {\n return true;\n }\n\n // User need to provide either his own AI API key or the CMS auth\n if (!hasCMSAuth) {\n appLogger(\n [\n 'AI options or API key not provided. You can either retreive the CMS access key on',\n colorize('https://intlayer.org/dahboard', ANSIColors.GREY),\n colorize('(see doc:', ANSIColors.GREY_DARK),\n colorize('https://intlayer.org/doc/concept/cms', ANSIColors.GREY),\n colorize(')', ANSIColors.GREY_DARK),\n '. Alternatively, you can add your own OpenAI API key in the settings',\n colorize('(see doc:', ANSIColors.GREY_DARK),\n colorize(\n 'https://intlayer.org/doc/concept/configuration',\n ANSIColors.GREY\n ),\n colorize(')', ANSIColors.GREY_DARK),\n '.',\n ],\n {\n level: 'error',\n }\n );\n\n return false;\n }\n\n // If the user do not have his own AI API key, we need to check the CMS auth\n return await checkCMSAuth(configuration, shouldCheckConfigConsistency);\n};\n"],"mappings":";;;;;;AAWA,MAAa,eAAe,OAC1B,eACA,+BAAwC,SACnB;CACrB,MAAM,+CAAyB,eAAe,EAC5C,QAAQ,EACN,QAAQ,IACT,EACF,CAAC;AAIF,KAAI,EADF,cAAc,OAAO,YAAY,cAAc,OAAO,eACvC;AACf,YACE;GACE;kCACS,iCAAiCA,4BAAW,KAAK;kCACjD,aAAaA,4BAAW,UAAU;kCAClC,wCAAwCA,4BAAW,KAAK;kCACxD,KAAKA,4BAAW,UAAU;GACnC;GACD,EACD,EACE,OAAO,SACR,CACF;AAED,SAAO;;CAET,MAAM,qDAAkC,QAAW,cAAc;AAEjE,KAAI;EAGF,MAAM,WAFS,MAAM,YAAY,MAAM,sBAAsB,EAEtC,MAAM;AAE7B,MAAI,CAAC,SAAS;AACZ,aAAU,oBAAoB;AAE9B,UAAO;;AAGT,MAAI,QAAQ,iBAAiB,6BAC3B,KAAI;AAEF,+DAAuB,QAAQ,eAAe,cAAc;UACtD;AACN,aACE;IACE;IACA;mCACS,qBAAqBA,4BAAW,KAAK;mCACrC,aAAaA,4BAAW,UAAU;mCAEzC,6CACAA,4BAAW,KACZ;mCACQ,KAAKA,4BAAW,UAAU;IACnC;IACD,EACD,EACE,OAAO,QACR,CACF;;UAGE,OAAO;AAGd,sDAFoC,MAAM,EAEvB,EACjB,OAAO,SACR,CAAC;AACF,SAAO;;AAGT,QAAO;;AAGT,MAAa,gBAAgB,OAC3B,eACA,WACA,+BAAwC,SACnB;CACrB,MAAM,+CAAyB,cAAc;CAE7C,MAAM,aAAa,QACjB,cAAc,OAAO,YAAY,cAAc,OAAO,aACvD;AAKD,KAJ0B,QACxB,cAAc,IAAI,UAAU,WAAW,OACxC,CAGC,QAAO;AAIT,KAAI,CAAC,YAAY;AACf,YACE;GACE;kCACS,iCAAiCA,4BAAW,KAAK;kCACjD,aAAaA,4BAAW,UAAU;kCAClC,wCAAwCA,4BAAW,KAAK;kCACxD,KAAKA,4BAAW,UAAU;GACnC;kCACS,aAAaA,4BAAW,UAAU;kCAEzC,kDACAA,4BAAW,KACZ;kCACQ,KAAKA,4BAAW,UAAU;GACnC;GACD,EACD,EACE,OAAO,SACR,CACF;AAED,SAAO;;AAIT,QAAO,MAAM,aAAa,eAAe,6BAA6B"}
1
+ {"version":3,"file":"checkAccess.cjs","names":["ANSIColors"],"sources":["../../../src/utils/checkAccess.ts"],"sourcesContent":["import type { AIOptions } from '@intlayer/api';\nimport { getIntlayerAPIProxy } from '@intlayer/api';\nimport {\n ANSIColors,\n colorize,\n extractErrorMessage,\n getAppLogger,\n} from '@intlayer/config';\nimport type { IntlayerConfig } from '@intlayer/types';\nimport { checkConfigConsistency } from './checkConfigConsistency';\n\nexport const checkCMSAuth = async (\n configuration: IntlayerConfig,\n shouldCheckConfigConsistency: boolean = true\n): Promise<boolean> => {\n const appLogger = getAppLogger(configuration, {\n config: {\n prefix: '',\n },\n });\n\n const hasCMSAuth =\n configuration.editor.clientId && configuration.editor.clientSecret;\n if (!hasCMSAuth) {\n appLogger(\n [\n 'CMS auth not provided. You can either retreive the CMS access key on',\n colorize('https://intlayer.org/dahboard', ANSIColors.GREY),\n colorize('(see doc:', ANSIColors.GREY_DARK),\n colorize('https://intlayer.org/doc/concept/cms', ANSIColors.GREY),\n colorize(')', ANSIColors.GREY_DARK),\n '.',\n ],\n {\n level: 'error',\n }\n );\n\n return false;\n }\n const intlayerAPI = getIntlayerAPIProxy(undefined, configuration);\n\n try {\n const result = await intlayerAPI.oAuth.getOAuth2AccessToken();\n\n const project = result.data?.project;\n\n if (!project) {\n appLogger('Project not found');\n\n return true;\n }\n\n if (project.configuration && shouldCheckConfigConsistency) {\n try {\n // Recursively check if project.configuration (subset) matches configuration (superset)\n checkConfigConsistency(project.configuration, configuration);\n } catch {\n appLogger(\n [\n 'Remote configuration is not up to date. The project configuration does not match the local configuration.',\n 'You can push the configuration by running',\n colorize('npx intlayer push', ANSIColors.CYAN),\n colorize('(see doc:', ANSIColors.GREY_DARK),\n colorize(\n 'https://intlayer.org/doc/concept/cli/push',\n ANSIColors.GREY\n ),\n colorize(')', ANSIColors.GREY_DARK),\n '.',\n ],\n {\n level: 'warn',\n }\n );\n }\n }\n } catch (error) {\n const message = extractErrorMessage(error);\n\n appLogger(message, {\n level: 'error',\n });\n return false;\n }\n\n return true;\n};\n\nexport const checkAIAccess = async (\n configuration: IntlayerConfig,\n aiOptions?: AIOptions,\n shouldCheckConfigConsistency: boolean = true\n): Promise<boolean> => {\n const appLogger = getAppLogger(configuration);\n\n const hasCMSAuth = Boolean(\n configuration.editor.clientId && configuration.editor.clientSecret\n );\n const isOllama =\n configuration.ai?.provider === 'ollama' || aiOptions?.provider === 'ollama';\n const hasHisOwnAIAPIKey = Boolean(\n configuration.ai?.apiKey || aiOptions?.apiKey\n );\n\n if (hasHisOwnAIAPIKey || isOllama) {\n return true;\n }\n\n // User need to provide either his own AI API key or the CMS auth\n if (!hasCMSAuth) {\n appLogger(\n [\n 'AI options or API key not provided. You can either retreive the CMS access key on',\n colorize('https://intlayer.org/dahboard', ANSIColors.GREY),\n colorize('(see doc:', ANSIColors.GREY_DARK),\n colorize('https://intlayer.org/doc/concept/cms', ANSIColors.GREY),\n colorize(')', ANSIColors.GREY_DARK),\n '. Alternatively, you can add your own OpenAI API key in the settings',\n colorize('(see doc:', ANSIColors.GREY_DARK),\n colorize(\n 'https://intlayer.org/doc/concept/configuration',\n ANSIColors.GREY\n ),\n colorize(')', ANSIColors.GREY_DARK),\n '.',\n ],\n {\n level: 'error',\n }\n );\n\n return false;\n }\n\n // If the user do not have his own AI API key, we need to check the CMS auth\n return await checkCMSAuth(configuration, shouldCheckConfigConsistency);\n};\n"],"mappings":";;;;;;AAWA,MAAa,eAAe,OAC1B,eACA,+BAAwC,SACnB;CACrB,MAAM,+CAAyB,eAAe,EAC5C,QAAQ,EACN,QAAQ,IACT,EACF,CAAC;AAIF,KAAI,EADF,cAAc,OAAO,YAAY,cAAc,OAAO,eACvC;AACf,YACE;GACE;kCACS,iCAAiCA,4BAAW,KAAK;kCACjD,aAAaA,4BAAW,UAAU;kCAClC,wCAAwCA,4BAAW,KAAK;kCACxD,KAAKA,4BAAW,UAAU;GACnC;GACD,EACD,EACE,OAAO,SACR,CACF;AAED,SAAO;;CAET,MAAM,qDAAkC,QAAW,cAAc;AAEjE,KAAI;EAGF,MAAM,WAFS,MAAM,YAAY,MAAM,sBAAsB,EAEtC,MAAM;AAE7B,MAAI,CAAC,SAAS;AACZ,aAAU,oBAAoB;AAE9B,UAAO;;AAGT,MAAI,QAAQ,iBAAiB,6BAC3B,KAAI;AAEF,+DAAuB,QAAQ,eAAe,cAAc;UACtD;AACN,aACE;IACE;IACA;mCACS,qBAAqBA,4BAAW,KAAK;mCACrC,aAAaA,4BAAW,UAAU;mCAEzC,6CACAA,4BAAW,KACZ;mCACQ,KAAKA,4BAAW,UAAU;IACnC;IACD,EACD,EACE,OAAO,QACR,CACF;;UAGE,OAAO;AAGd,sDAFoC,MAAM,EAEvB,EACjB,OAAO,SACR,CAAC;AACF,SAAO;;AAGT,QAAO;;AAGT,MAAa,gBAAgB,OAC3B,eACA,WACA,+BAAwC,SACnB;CACrB,MAAM,+CAAyB,cAAc;CAE7C,MAAM,aAAa,QACjB,cAAc,OAAO,YAAY,cAAc,OAAO,aACvD;CACD,MAAM,WACJ,cAAc,IAAI,aAAa,YAAY,WAAW,aAAa;AAKrE,KAJ0B,QACxB,cAAc,IAAI,UAAU,WAAW,OACxC,IAEwB,SACvB,QAAO;AAIT,KAAI,CAAC,YAAY;AACf,YACE;GACE;kCACS,iCAAiCA,4BAAW,KAAK;kCACjD,aAAaA,4BAAW,UAAU;kCAClC,wCAAwCA,4BAAW,KAAK;kCACxD,KAAKA,4BAAW,UAAU;GACnC;kCACS,aAAaA,4BAAW,UAAU;kCAEzC,kDACAA,4BAAW,KACZ;kCACQ,KAAKA,4BAAW,UAAU;GACnC;GACD,EACD,EACE,OAAO,SACR,CACF;AAED,SAAO;;AAIT,QAAO,MAAM,aAAa,eAAe,6BAA6B"}