@intlayer/cli 7.5.13 → 7.5.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. package/dist/cjs/IntlayerEventListener.cjs.map +1 -1
  2. package/dist/cjs/build.cjs.map +1 -1
  3. package/dist/cjs/ci.cjs.map +1 -1
  4. package/dist/cjs/cli.cjs.map +1 -1
  5. package/dist/cjs/editor.cjs.map +1 -1
  6. package/dist/cjs/fill/deepMergeContent.cjs.map +1 -1
  7. package/dist/cjs/fill/fill.cjs.map +1 -1
  8. package/dist/cjs/fill/formatAutoFilledFilePath.cjs.map +1 -1
  9. package/dist/cjs/fill/formatFillData.cjs.map +1 -1
  10. package/dist/cjs/fill/getFilterMissingContentPerLocale.cjs.map +1 -1
  11. package/dist/cjs/fill/listTranslationsTasks.cjs.map +1 -1
  12. package/dist/cjs/fill/mergeChunks.cjs.map +1 -1
  13. package/dist/cjs/fill/translateDictionary.cjs.map +1 -1
  14. package/dist/cjs/fill/writeFill.cjs.map +1 -1
  15. package/dist/cjs/liveSync.cjs.map +1 -1
  16. package/dist/cjs/pull.cjs.map +1 -1
  17. package/dist/cjs/push/pullLog.cjs.map +1 -1
  18. package/dist/cjs/push/push.cjs.map +1 -1
  19. package/dist/cjs/pushLog.cjs.map +1 -1
  20. package/dist/cjs/reviewDoc/reviewDoc.cjs.map +1 -1
  21. package/dist/cjs/test/listMissingTranslations.cjs.map +1 -1
  22. package/dist/cjs/transform.cjs.map +1 -1
  23. package/dist/cjs/translateDoc/translateDoc.cjs.map +1 -1
  24. package/dist/cjs/translateDoc/translateFile.cjs.map +1 -1
  25. package/dist/cjs/translateDoc/validation.cjs.map +1 -1
  26. package/dist/cjs/translation-alignment/alignBlocks.cjs.map +1 -1
  27. package/dist/cjs/translation-alignment/pipeline.cjs.map +1 -1
  28. package/dist/cjs/translation-alignment/planActions.cjs.map +1 -1
  29. package/dist/cjs/translation-alignment/rebuildDocument.cjs.map +1 -1
  30. package/dist/cjs/translation-alignment/segmentDocument.cjs.map +1 -1
  31. package/dist/cjs/utils/calculateChunks.cjs.map +1 -1
  32. package/dist/cjs/utils/chunkInference.cjs.map +1 -1
  33. package/dist/cjs/utils/getIsFileUpdatedRecently.cjs.map +1 -1
  34. package/dist/cjs/utils/listSpecialChars.cjs.map +1 -1
  35. package/dist/cjs/utils/mapChunksBetweenFiles.cjs.map +1 -1
  36. package/dist/cjs/utils/reorderParagraphs.cjs.map +1 -1
  37. package/dist/cjs/utils/setupAI.cjs.map +1 -1
  38. package/dist/cjs/watch.cjs.map +1 -1
  39. package/dist/esm/IntlayerEventListener.mjs.map +1 -1
  40. package/dist/esm/build.mjs.map +1 -1
  41. package/dist/esm/ci.mjs.map +1 -1
  42. package/dist/esm/cli.mjs.map +1 -1
  43. package/dist/esm/editor.mjs.map +1 -1
  44. package/dist/esm/fill/deepMergeContent.mjs.map +1 -1
  45. package/dist/esm/fill/fill.mjs.map +1 -1
  46. package/dist/esm/fill/formatAutoFilledFilePath.mjs.map +1 -1
  47. package/dist/esm/fill/formatFillData.mjs.map +1 -1
  48. package/dist/esm/fill/getFilterMissingContentPerLocale.mjs.map +1 -1
  49. package/dist/esm/fill/listTranslationsTasks.mjs.map +1 -1
  50. package/dist/esm/fill/mergeChunks.mjs.map +1 -1
  51. package/dist/esm/fill/translateDictionary.mjs.map +1 -1
  52. package/dist/esm/fill/writeFill.mjs.map +1 -1
  53. package/dist/esm/liveSync.mjs.map +1 -1
  54. package/dist/esm/pull.mjs.map +1 -1
  55. package/dist/esm/push/pullLog.mjs.map +1 -1
  56. package/dist/esm/push/push.mjs.map +1 -1
  57. package/dist/esm/pushLog.mjs.map +1 -1
  58. package/dist/esm/reviewDoc/reviewDoc.mjs.map +1 -1
  59. package/dist/esm/test/listMissingTranslations.mjs.map +1 -1
  60. package/dist/esm/transform.mjs.map +1 -1
  61. package/dist/esm/translateDoc/translateDoc.mjs.map +1 -1
  62. package/dist/esm/translateDoc/translateFile.mjs.map +1 -1
  63. package/dist/esm/translateDoc/validation.mjs.map +1 -1
  64. package/dist/esm/translation-alignment/alignBlocks.mjs.map +1 -1
  65. package/dist/esm/translation-alignment/pipeline.mjs.map +1 -1
  66. package/dist/esm/translation-alignment/planActions.mjs.map +1 -1
  67. package/dist/esm/translation-alignment/rebuildDocument.mjs.map +1 -1
  68. package/dist/esm/translation-alignment/segmentDocument.mjs.map +1 -1
  69. package/dist/esm/utils/calculateChunks.mjs.map +1 -1
  70. package/dist/esm/utils/chunkInference.mjs.map +1 -1
  71. package/dist/esm/utils/getIsFileUpdatedRecently.mjs.map +1 -1
  72. package/dist/esm/utils/listSpecialChars.mjs.map +1 -1
  73. package/dist/esm/utils/mapChunksBetweenFiles.mjs.map +1 -1
  74. package/dist/esm/utils/reorderParagraphs.mjs.map +1 -1
  75. package/dist/esm/utils/setupAI.mjs.map +1 -1
  76. package/dist/esm/watch.mjs.map +1 -1
  77. package/dist/types/pull.d.ts.map +1 -1
  78. package/dist/types/translation-alignment/rebuildDocument.d.ts.map +1 -1
  79. package/package.json +14 -14
@@ -1 +1 @@
1
- {"version":3,"file":"translateFile.cjs","names":["performance","chunkText","ANSIColors","readAsset","translatedParts: string[]","chunkInference","sanitizeChunk","fixChunkStartEndChars","validateTranslation","error: any"],"sources":["../../../src/translateDoc/translateFile.ts"],"sourcesContent":["import { mkdirSync, writeFileSync } from 'node:fs';\nimport { readFile } from 'node:fs/promises';\nimport { dirname, relative } from 'node:path';\nimport { performance } from 'node:perf_hooks';\nimport { readAsset } from 'utils:asset';\nimport { formatLocale, formatPath } from '@intlayer/chokidar';\nimport {\n ANSIColors,\n colon,\n colorize,\n colorizeNumber,\n getAppLogger,\n retryManager,\n} from '@intlayer/config';\nimport { chunkText } from '../utils/calculateChunks';\nimport { chunkInference } from '../utils/chunkInference';\nimport { fixChunkStartEndChars } from '../utils/fixChunkStartEndChars';\nimport type { TranslateFileOptions } from './types';\nimport { sanitizeChunk, validateTranslation } from './validation';\n\nexport const translateFile = async ({\n baseFilePath,\n outputFilePath,\n locale,\n baseLocale,\n configuration,\n errorState,\n aiOptions,\n customInstructions,\n aiClient,\n aiConfig,\n flushStrategy = 'incremental',\n onChunkReceive,\n limit, // The Global Limiter\n}: TranslateFileOptions): Promise<string | null> => {\n if (errorState.shouldStop) return null;\n\n const appLogger = getAppLogger(configuration, { config: { prefix: '' } });\n const fileStartTime = performance.now();\n\n try {\n const fileContent = await readFile(baseFilePath, 'utf-8');\n const chunks = chunkText(fileContent);\n const totalChunks = chunks.length;\n\n const filePrefixText = `${ANSIColors.GREY_DARK}[${formatPath(baseFilePath)}${ANSIColors.GREY_DARK}] `;\n const filePrefix = `${colon(filePrefixText, { colSize: 40 })}${ANSIColors.RESET}`;\n const prefixText = `${ANSIColors.GREY_DARK}[${formatPath(baseFilePath)}${ANSIColors.GREY_DARK}][${formatLocale(locale)}${ANSIColors.GREY_DARK}] `;\n const prefix = `${colon(prefixText, { colSize: 40 })}${ANSIColors.RESET}`;\n\n appLogger(\n `${filePrefix}Split into ${colorizeNumber(totalChunks)} chunks. Queuing...`\n );\n\n const basePrompt = readAsset('./prompts/TRANSLATE_PROMPT.md', 'utf-8')\n .replaceAll('{{localeName}}', `${formatLocale(locale, false)}`)\n .replaceAll('{{baseLocaleName}}', `${formatLocale(baseLocale, false)}`)\n .replace('{{applicationContext}}', aiOptions?.applicationContext ?? '-')\n .replace('{{customInstructions}}', customInstructions ?? '-');\n\n const translatedParts: string[] = new Array(totalChunks).fill('');\n\n // Fallback if no limiter is provided (runs immediately)\n const runTask = limit ?? ((fn) => fn());\n\n // MAP CHUNKS TO GLOBAL TASKS\n // This pushes ALL chunks for this file into the Global Queue immediately.\n // They will execute whenever the global concurrency slots open up.\n const tasks = chunks.map((chunk, i) =>\n runTask(async () => {\n if (errorState.shouldStop) return null;\n\n const chunkLogger = getAppLogger(configuration, {\n config: {\n prefix: `${prefix} ${ANSIColors.GREY_DARK}[${i + 1}/${totalChunks}] ${ANSIColors.RESET}`,\n },\n });\n\n const chunkStartTime = performance.now();\n const isFirstChunk = i === 0;\n const fileToTranslateCurrentChunk = chunk.content;\n\n // Context Preparation\n const getPrevChunkPrompt = () =>\n `>>> CONTEXT: PREVIOUS SOURCE CONTENT <<<\\n\\`\\`\\`\\n` +\n (chunks[i - 1]?.content ?? '') +\n `\\n\\`\\`\\`\\n>>> END PREVIOUS CONTEXT <<<`;\n\n const getBaseChunkContextPrompt = () =>\n `>>> CONTEXT: NEXT CONTENT <<<\\n\\`\\`\\`\\n` +\n (chunks[i + 1]?.content ?? '') +\n `\\n\\`\\`\\`\\n>>> END NEXT CONTEXT <<<`;\n\n chunkLogger('Process started');\n\n const chunkTranslation = retryManager(async () => {\n const result = await chunkInference(\n [\n { role: 'system', content: basePrompt },\n ...(chunks[i + 1]\n ? [\n {\n role: 'system',\n content: getBaseChunkContextPrompt(),\n } as const,\n ]\n : []),\n ...(isFirstChunk\n ? []\n : [{ role: 'system', content: getPrevChunkPrompt() } as const]),\n {\n role: 'system',\n content: [\n `You are translating TARGET CHUNK (${i + 1}/${totalChunks}).`,\n `Translate ONLY the target chunk. Preserve frontmatter/code exactly.`,\n ].join('\\n'),\n },\n {\n role: 'user',\n content: `>>> TARGET CHUNK START <<<\\n${fileToTranslateCurrentChunk}\\n>>> TARGET CHUNK END <<<`,\n },\n ],\n aiOptions,\n configuration,\n aiClient,\n aiConfig\n );\n\n let processedChunk = sanitizeChunk(\n result?.fileContent,\n fileToTranslateCurrentChunk\n );\n processedChunk = fixChunkStartEndChars(\n processedChunk,\n fileToTranslateCurrentChunk\n );\n\n const isValid = validateTranslation(\n fileToTranslateCurrentChunk,\n processedChunk,\n chunkLogger\n );\n\n if (!isValid) {\n // Throwing an error here signals retryManager to try again\n throw new Error(\n `Validation failed for chunk ${i + 1}/${totalChunks}`\n );\n }\n\n return { content: processedChunk, tokens: result.tokenUsed };\n });\n\n const { content: translatedChunk, tokens } = await chunkTranslation();\n const chunkEndTime = performance.now();\n const chunkDuration = (chunkEndTime - chunkStartTime).toFixed(0);\n\n // Store Result\n translatedParts[i] = translatedChunk;\n\n if (onChunkReceive) {\n onChunkReceive(translatedChunk, i, totalChunks);\n }\n\n // Incremental Flush Strategy\n if (flushStrategy === 'incremental') {\n const isContiguous = translatedParts\n .slice(0, i + 1)\n .every((p) => p && p !== '');\n\n if (isContiguous) {\n let endIdx = 0;\n while (\n endIdx < totalChunks &&\n translatedParts[endIdx] &&\n translatedParts[endIdx] !== ''\n ) {\n endIdx++;\n }\n const currentContent = translatedParts.slice(0, endIdx).join('');\n // Write asynchronously/sync is fine here as node handles file locks reasonably well for single process\n mkdirSync(dirname(outputFilePath), { recursive: true });\n writeFileSync(outputFilePath, currentContent);\n }\n }\n\n chunkLogger(\n [\n `${colorizeNumber(tokens)} tokens used `,\n `${ANSIColors.GREY_DARK}in ${colorizeNumber(chunkDuration)}ms${ANSIColors.RESET}`,\n ].join('')\n );\n })\n );\n\n // Wait for all chunks for this specific file/locale to finish\n await Promise.all(tasks);\n\n // Final Flush\n const fullContent = translatedParts.join('');\n if (flushStrategy === 'end' || flushStrategy === 'incremental') {\n mkdirSync(dirname(outputFilePath), { recursive: true });\n writeFileSync(outputFilePath, fullContent);\n }\n\n const fileEndTime = performance.now();\n const totalDuration = ((fileEndTime - fileStartTime) / 1000).toFixed(2);\n const relativePath = relative(\n configuration.content.baseDir,\n outputFilePath\n );\n\n appLogger(\n `${colorize('✔', ANSIColors.GREEN)} File ${formatPath(relativePath)} completed in ${colorizeNumber(totalDuration)}s.`\n );\n\n return fullContent;\n } catch (error: any) {\n errorState.count++;\n const errorMessage = error?.message ?? JSON.stringify(error);\n appLogger(`${colorize('✖', ANSIColors.RED)} Error: ${errorMessage}`);\n if (errorState.count >= errorState.maxErrors) errorState.shouldStop = true;\n return null;\n }\n};\n"],"mappings":";;;;;;;;;;;;;;AAoBA,MAAa,gBAAgB,OAAO,EAClC,cACA,gBACA,QACA,YACA,eACA,YACA,WACA,oBACA,UACA,UACA,gBAAgB,eAChB,gBACA,YACkD;AAClD,KAAI,WAAW,WAAY,QAAO;CAElC,MAAM,+CAAyB,eAAe,EAAE,QAAQ,EAAE,QAAQ,IAAI,EAAE,CAAC;CACzE,MAAM,gBAAgBA,4BAAY,KAAK;AAEvC,KAAI;EAEF,MAAM,SAASC,wCADK,qCAAe,cAAc,QAAQ,CACpB;EACrC,MAAM,cAAc,OAAO;EAG3B,MAAM,aAAa,+BADI,GAAGC,4BAAW,UAAU,sCAAc,aAAa,GAAGA,4BAAW,UAAU,KACtD,EAAE,SAAS,IAAI,CAAC,GAAGA,4BAAW;EAE1E,MAAM,SAAS,+BADI,GAAGA,4BAAW,UAAU,sCAAc,aAAa,GAAGA,4BAAW,UAAU,yCAAiB,OAAO,GAAGA,4BAAW,UAAU,KAC1G,EAAE,SAAS,IAAI,CAAC,GAAGA,4BAAW;AAElE,YACE,GAAG,WAAW,kDAA4B,YAAY,CAAC,qBACxD;EAED,MAAM,aAAaC,+BAAU,iCAAiC,QAAQ,CACnE,WAAW,kBAAkB,wCAAgB,QAAQ,MAAM,GAAG,CAC9D,WAAW,sBAAsB,wCAAgB,YAAY,MAAM,GAAG,CACtE,QAAQ,0BAA0B,WAAW,sBAAsB,IAAI,CACvE,QAAQ,0BAA0B,sBAAsB,IAAI;EAE/D,MAAMC,kBAA4B,IAAI,MAAM,YAAY,CAAC,KAAK,GAAG;EAGjE,MAAM,UAAU,WAAW,OAAO,IAAI;EAKtC,MAAM,QAAQ,OAAO,KAAK,OAAO,MAC/B,QAAQ,YAAY;AAClB,OAAI,WAAW,WAAY,QAAO;GAElC,MAAM,iDAA2B,eAAe,EAC9C,QAAQ,EACN,QAAQ,GAAG,OAAO,IAAIF,4BAAW,UAAU,GAAG,IAAI,EAAE,GAAG,YAAY,IAAIA,4BAAW,SACnF,EACF,CAAC;GAEF,MAAM,iBAAiBF,4BAAY,KAAK;GACxC,MAAM,eAAe,MAAM;GAC3B,MAAM,8BAA8B,MAAM;GAG1C,MAAM,2BACJ,wDACC,OAAO,IAAI,IAAI,WAAW,MAC3B;GAEF,MAAM,kCACJ,6CACC,OAAO,IAAI,IAAI,WAAW,MAC3B;AAEF,eAAY,kBAAkB;GA4D9B,MAAM,EAAE,SAAS,iBAAiB,WAAW,yCA1DP,YAAY;IAChD,MAAM,SAAS,MAAMK,4CACnB;KACE;MAAE,MAAM;MAAU,SAAS;MAAY;KACvC,GAAI,OAAO,IAAI,KACX,CACE;MACE,MAAM;MACN,SAAS,2BAA2B;MACrC,CACF,GACD,EAAE;KACN,GAAI,eACA,EAAE,GACF,CAAC;MAAE,MAAM;MAAU,SAAS,oBAAoB;MAAE,CAAU;KAChE;MACE,MAAM;MACN,SAAS,CACP,qCAAqC,IAAI,EAAE,GAAG,YAAY,KAC1D,sEACD,CAAC,KAAK,KAAK;MACb;KACD;MACE,MAAM;MACN,SAAS,+BAA+B,4BAA4B;MACrE;KACF,EACD,WACA,eACA,UACA,SACD;IAED,IAAI,iBAAiBC,8CACnB,QAAQ,aACR,4BACD;AACD,qBAAiBC,0DACf,gBACA,4BACD;AAQD,QAAI,CANYC,oDACd,6BACA,gBACA,YACD,CAIC,OAAM,IAAI,MACR,+BAA+B,IAAI,EAAE,GAAG,cACzC;AAGH,WAAO;KAAE,SAAS;KAAgB,QAAQ,OAAO;KAAW;KAC5D,EAEmE;GAErE,MAAM,iBADeR,4BAAY,KAAK,GACA,gBAAgB,QAAQ,EAAE;AAGhE,mBAAgB,KAAK;AAErB,OAAI,eACF,gBAAe,iBAAiB,GAAG,YAAY;AAIjD,OAAI,kBAAkB,eAKpB;QAJqB,gBAClB,MAAM,GAAG,IAAI,EAAE,CACf,OAAO,MAAM,KAAK,MAAM,GAAG,EAEZ;KAChB,IAAI,SAAS;AACb,YACE,SAAS,eACT,gBAAgB,WAChB,gBAAgB,YAAY,GAE5B;KAEF,MAAM,iBAAiB,gBAAgB,MAAM,GAAG,OAAO,CAAC,KAAK,GAAG;AAEhE,mDAAkB,eAAe,EAAE,EAAE,WAAW,MAAM,CAAC;AACvD,gCAAc,gBAAgB,eAAe;;;AAIjD,eACE,CACE,wCAAkB,OAAO,CAAC,gBAC1B,GAAGE,4BAAW,UAAU,0CAAoB,cAAc,CAAC,IAAIA,4BAAW,QAC3E,CAAC,KAAK,GAAG,CACX;IACD,CACH;AAGD,QAAM,QAAQ,IAAI,MAAM;EAGxB,MAAM,cAAc,gBAAgB,KAAK,GAAG;AAC5C,MAAI,kBAAkB,SAAS,kBAAkB,eAAe;AAC9D,iDAAkB,eAAe,EAAE,EAAE,WAAW,MAAM,CAAC;AACvD,8BAAc,gBAAgB,YAAY;;EAI5C,MAAM,kBADcF,4BAAY,KAAK,GACC,iBAAiB,KAAM,QAAQ,EAAE;EACvE,MAAM,uCACJ,cAAc,QAAQ,SACtB,eACD;AAED,YACE,kCAAY,KAAKE,4BAAW,MAAM,CAAC,2CAAmB,aAAa,CAAC,qDAA+B,cAAc,CAAC,IACnH;AAED,SAAO;UACAO,OAAY;AACnB,aAAW;EACX,MAAM,eAAe,OAAO,WAAW,KAAK,UAAU,MAAM;AAC5D,YAAU,kCAAY,KAAKP,4BAAW,IAAI,CAAC,UAAU,eAAe;AACpE,MAAI,WAAW,SAAS,WAAW,UAAW,YAAW,aAAa;AACtE,SAAO"}
1
+ {"version":3,"file":"translateFile.cjs","names":["performance","chunkText","ANSIColors","readAsset","chunkInference","sanitizeChunk","fixChunkStartEndChars","validateTranslation"],"sources":["../../../src/translateDoc/translateFile.ts"],"sourcesContent":["import { mkdirSync, writeFileSync } from 'node:fs';\nimport { readFile } from 'node:fs/promises';\nimport { dirname, relative } from 'node:path';\nimport { performance } from 'node:perf_hooks';\nimport { readAsset } from 'utils:asset';\nimport { formatLocale, formatPath } from '@intlayer/chokidar';\nimport {\n ANSIColors,\n colon,\n colorize,\n colorizeNumber,\n getAppLogger,\n retryManager,\n} from '@intlayer/config';\nimport { chunkText } from '../utils/calculateChunks';\nimport { chunkInference } from '../utils/chunkInference';\nimport { fixChunkStartEndChars } from '../utils/fixChunkStartEndChars';\nimport type { TranslateFileOptions } from './types';\nimport { sanitizeChunk, validateTranslation } from './validation';\n\nexport const translateFile = async ({\n baseFilePath,\n outputFilePath,\n locale,\n baseLocale,\n configuration,\n errorState,\n aiOptions,\n customInstructions,\n aiClient,\n aiConfig,\n flushStrategy = 'incremental',\n onChunkReceive,\n limit, // The Global Limiter\n}: TranslateFileOptions): Promise<string | null> => {\n if (errorState.shouldStop) return null;\n\n const appLogger = getAppLogger(configuration, { config: { prefix: '' } });\n const fileStartTime = performance.now();\n\n try {\n const fileContent = await readFile(baseFilePath, 'utf-8');\n const chunks = chunkText(fileContent);\n const totalChunks = chunks.length;\n\n const filePrefixText = `${ANSIColors.GREY_DARK}[${formatPath(baseFilePath)}${ANSIColors.GREY_DARK}] `;\n const filePrefix = `${colon(filePrefixText, { colSize: 40 })}${ANSIColors.RESET}`;\n const prefixText = `${ANSIColors.GREY_DARK}[${formatPath(baseFilePath)}${ANSIColors.GREY_DARK}][${formatLocale(locale)}${ANSIColors.GREY_DARK}] `;\n const prefix = `${colon(prefixText, { colSize: 40 })}${ANSIColors.RESET}`;\n\n appLogger(\n `${filePrefix}Split into ${colorizeNumber(totalChunks)} chunks. Queuing...`\n );\n\n const basePrompt = readAsset('./prompts/TRANSLATE_PROMPT.md', 'utf-8')\n .replaceAll('{{localeName}}', `${formatLocale(locale, false)}`)\n .replaceAll('{{baseLocaleName}}', `${formatLocale(baseLocale, false)}`)\n .replace('{{applicationContext}}', aiOptions?.applicationContext ?? '-')\n .replace('{{customInstructions}}', customInstructions ?? '-');\n\n const translatedParts: string[] = new Array(totalChunks).fill('');\n\n // Fallback if no limiter is provided (runs immediately)\n const runTask = limit ?? ((fn) => fn());\n\n // MAP CHUNKS TO GLOBAL TASKS\n // This pushes ALL chunks for this file into the Global Queue immediately.\n // They will execute whenever the global concurrency slots open up.\n const tasks = chunks.map((chunk, i) =>\n runTask(async () => {\n if (errorState.shouldStop) return null;\n\n const chunkLogger = getAppLogger(configuration, {\n config: {\n prefix: `${prefix} ${ANSIColors.GREY_DARK}[${i + 1}/${totalChunks}] ${ANSIColors.RESET}`,\n },\n });\n\n const chunkStartTime = performance.now();\n const isFirstChunk = i === 0;\n const fileToTranslateCurrentChunk = chunk.content;\n\n // Context Preparation\n const getPrevChunkPrompt = () =>\n `>>> CONTEXT: PREVIOUS SOURCE CONTENT <<<\\n\\`\\`\\`\\n` +\n (chunks[i - 1]?.content ?? '') +\n `\\n\\`\\`\\`\\n>>> END PREVIOUS CONTEXT <<<`;\n\n const getBaseChunkContextPrompt = () =>\n `>>> CONTEXT: NEXT CONTENT <<<\\n\\`\\`\\`\\n` +\n (chunks[i + 1]?.content ?? '') +\n `\\n\\`\\`\\`\\n>>> END NEXT CONTEXT <<<`;\n\n chunkLogger('Process started');\n\n const chunkTranslation = retryManager(async () => {\n const result = await chunkInference(\n [\n { role: 'system', content: basePrompt },\n ...(chunks[i + 1]\n ? [\n {\n role: 'system',\n content: getBaseChunkContextPrompt(),\n } as const,\n ]\n : []),\n ...(isFirstChunk\n ? []\n : [{ role: 'system', content: getPrevChunkPrompt() } as const]),\n {\n role: 'system',\n content: [\n `You are translating TARGET CHUNK (${i + 1}/${totalChunks}).`,\n `Translate ONLY the target chunk. Preserve frontmatter/code exactly.`,\n ].join('\\n'),\n },\n {\n role: 'user',\n content: `>>> TARGET CHUNK START <<<\\n${fileToTranslateCurrentChunk}\\n>>> TARGET CHUNK END <<<`,\n },\n ],\n aiOptions,\n configuration,\n aiClient,\n aiConfig\n );\n\n let processedChunk = sanitizeChunk(\n result?.fileContent,\n fileToTranslateCurrentChunk\n );\n processedChunk = fixChunkStartEndChars(\n processedChunk,\n fileToTranslateCurrentChunk\n );\n\n const isValid = validateTranslation(\n fileToTranslateCurrentChunk,\n processedChunk,\n chunkLogger\n );\n\n if (!isValid) {\n // Throwing an error here signals retryManager to try again\n throw new Error(\n `Validation failed for chunk ${i + 1}/${totalChunks}`\n );\n }\n\n return { content: processedChunk, tokens: result.tokenUsed };\n });\n\n const { content: translatedChunk, tokens } = await chunkTranslation();\n const chunkEndTime = performance.now();\n const chunkDuration = (chunkEndTime - chunkStartTime).toFixed(0);\n\n // Store Result\n translatedParts[i] = translatedChunk;\n\n if (onChunkReceive) {\n onChunkReceive(translatedChunk, i, totalChunks);\n }\n\n // Incremental Flush Strategy\n if (flushStrategy === 'incremental') {\n const isContiguous = translatedParts\n .slice(0, i + 1)\n .every((p) => p && p !== '');\n\n if (isContiguous) {\n let endIdx = 0;\n while (\n endIdx < totalChunks &&\n translatedParts[endIdx] &&\n translatedParts[endIdx] !== ''\n ) {\n endIdx++;\n }\n const currentContent = translatedParts.slice(0, endIdx).join('');\n // Write asynchronously/sync is fine here as node handles file locks reasonably well for single process\n mkdirSync(dirname(outputFilePath), { recursive: true });\n writeFileSync(outputFilePath, currentContent);\n }\n }\n\n chunkLogger(\n [\n `${colorizeNumber(tokens)} tokens used `,\n `${ANSIColors.GREY_DARK}in ${colorizeNumber(chunkDuration)}ms${ANSIColors.RESET}`,\n ].join('')\n );\n })\n );\n\n // Wait for all chunks for this specific file/locale to finish\n await Promise.all(tasks);\n\n // Final Flush\n const fullContent = translatedParts.join('');\n if (flushStrategy === 'end' || flushStrategy === 'incremental') {\n mkdirSync(dirname(outputFilePath), { recursive: true });\n writeFileSync(outputFilePath, fullContent);\n }\n\n const fileEndTime = performance.now();\n const totalDuration = ((fileEndTime - fileStartTime) / 1000).toFixed(2);\n const relativePath = relative(\n configuration.content.baseDir,\n outputFilePath\n );\n\n appLogger(\n `${colorize('✔', ANSIColors.GREEN)} File ${formatPath(relativePath)} completed in ${colorizeNumber(totalDuration)}s.`\n );\n\n return fullContent;\n } catch (error: any) {\n errorState.count++;\n const errorMessage = error?.message ?? JSON.stringify(error);\n appLogger(`${colorize('✖', ANSIColors.RED)} Error: ${errorMessage}`);\n if (errorState.count >= errorState.maxErrors) errorState.shouldStop = true;\n return null;\n }\n};\n"],"mappings":";;;;;;;;;;;;;;AAoBA,MAAa,gBAAgB,OAAO,EAClC,cACA,gBACA,QACA,YACA,eACA,YACA,WACA,oBACA,UACA,UACA,gBAAgB,eAChB,gBACA,YACkD;AAClD,KAAI,WAAW,WAAY,QAAO;CAElC,MAAM,+CAAyB,eAAe,EAAE,QAAQ,EAAE,QAAQ,IAAI,EAAE,CAAC;CACzE,MAAM,gBAAgBA,4BAAY,KAAK;AAEvC,KAAI;EAEF,MAAM,SAASC,wCADK,qCAAe,cAAc,QAAQ,CACpB;EACrC,MAAM,cAAc,OAAO;EAG3B,MAAM,aAAa,+BADI,GAAGC,4BAAW,UAAU,sCAAc,aAAa,GAAGA,4BAAW,UAAU,KACtD,EAAE,SAAS,IAAI,CAAC,GAAGA,4BAAW;EAE1E,MAAM,SAAS,+BADI,GAAGA,4BAAW,UAAU,sCAAc,aAAa,GAAGA,4BAAW,UAAU,yCAAiB,OAAO,GAAGA,4BAAW,UAAU,KAC1G,EAAE,SAAS,IAAI,CAAC,GAAGA,4BAAW;AAElE,YACE,GAAG,WAAW,kDAA4B,YAAY,CAAC,qBACxD;EAED,MAAM,aAAaC,+BAAU,iCAAiC,QAAQ,CACnE,WAAW,kBAAkB,wCAAgB,QAAQ,MAAM,GAAG,CAC9D,WAAW,sBAAsB,wCAAgB,YAAY,MAAM,GAAG,CACtE,QAAQ,0BAA0B,WAAW,sBAAsB,IAAI,CACvE,QAAQ,0BAA0B,sBAAsB,IAAI;EAE/D,MAAM,kBAA4B,IAAI,MAAM,YAAY,CAAC,KAAK,GAAG;EAGjE,MAAM,UAAU,WAAW,OAAO,IAAI;EAKtC,MAAM,QAAQ,OAAO,KAAK,OAAO,MAC/B,QAAQ,YAAY;AAClB,OAAI,WAAW,WAAY,QAAO;GAElC,MAAM,iDAA2B,eAAe,EAC9C,QAAQ,EACN,QAAQ,GAAG,OAAO,IAAID,4BAAW,UAAU,GAAG,IAAI,EAAE,GAAG,YAAY,IAAIA,4BAAW,SACnF,EACF,CAAC;GAEF,MAAM,iBAAiBF,4BAAY,KAAK;GACxC,MAAM,eAAe,MAAM;GAC3B,MAAM,8BAA8B,MAAM;GAG1C,MAAM,2BACJ,wDACC,OAAO,IAAI,IAAI,WAAW,MAC3B;GAEF,MAAM,kCACJ,6CACC,OAAO,IAAI,IAAI,WAAW,MAC3B;AAEF,eAAY,kBAAkB;GA4D9B,MAAM,EAAE,SAAS,iBAAiB,WAAW,yCA1DP,YAAY;IAChD,MAAM,SAAS,MAAMI,4CACnB;KACE;MAAE,MAAM;MAAU,SAAS;MAAY;KACvC,GAAI,OAAO,IAAI,KACX,CACE;MACE,MAAM;MACN,SAAS,2BAA2B;MACrC,CACF,GACD,EAAE;KACN,GAAI,eACA,EAAE,GACF,CAAC;MAAE,MAAM;MAAU,SAAS,oBAAoB;MAAE,CAAU;KAChE;MACE,MAAM;MACN,SAAS,CACP,qCAAqC,IAAI,EAAE,GAAG,YAAY,KAC1D,sEACD,CAAC,KAAK,KAAK;MACb;KACD;MACE,MAAM;MACN,SAAS,+BAA+B,4BAA4B;MACrE;KACF,EACD,WACA,eACA,UACA,SACD;IAED,IAAI,iBAAiBC,8CACnB,QAAQ,aACR,4BACD;AACD,qBAAiBC,0DACf,gBACA,4BACD;AAQD,QAAI,CANYC,oDACd,6BACA,gBACA,YACD,CAIC,OAAM,IAAI,MACR,+BAA+B,IAAI,EAAE,GAAG,cACzC;AAGH,WAAO;KAAE,SAAS;KAAgB,QAAQ,OAAO;KAAW;KAC5D,EAEmE;GAErE,MAAM,iBADeP,4BAAY,KAAK,GACA,gBAAgB,QAAQ,EAAE;AAGhE,mBAAgB,KAAK;AAErB,OAAI,eACF,gBAAe,iBAAiB,GAAG,YAAY;AAIjD,OAAI,kBAAkB,eAKpB;QAJqB,gBAClB,MAAM,GAAG,IAAI,EAAE,CACf,OAAO,MAAM,KAAK,MAAM,GAAG,EAEZ;KAChB,IAAI,SAAS;AACb,YACE,SAAS,eACT,gBAAgB,WAChB,gBAAgB,YAAY,GAE5B;KAEF,MAAM,iBAAiB,gBAAgB,MAAM,GAAG,OAAO,CAAC,KAAK,GAAG;AAEhE,mDAAkB,eAAe,EAAE,EAAE,WAAW,MAAM,CAAC;AACvD,gCAAc,gBAAgB,eAAe;;;AAIjD,eACE,CACE,wCAAkB,OAAO,CAAC,gBAC1B,GAAGE,4BAAW,UAAU,0CAAoB,cAAc,CAAC,IAAIA,4BAAW,QAC3E,CAAC,KAAK,GAAG,CACX;IACD,CACH;AAGD,QAAM,QAAQ,IAAI,MAAM;EAGxB,MAAM,cAAc,gBAAgB,KAAK,GAAG;AAC5C,MAAI,kBAAkB,SAAS,kBAAkB,eAAe;AAC9D,iDAAkB,eAAe,EAAE,EAAE,WAAW,MAAM,CAAC;AACvD,8BAAc,gBAAgB,YAAY;;EAI5C,MAAM,kBADcF,4BAAY,KAAK,GACC,iBAAiB,KAAM,QAAQ,EAAE;EACvE,MAAM,uCACJ,cAAc,QAAQ,SACtB,eACD;AAED,YACE,kCAAY,KAAKE,4BAAW,MAAM,CAAC,2CAAmB,aAAa,CAAC,qDAA+B,cAAc,CAAC,IACnH;AAED,SAAO;UACA,OAAY;AACnB,aAAW;EACX,MAAM,eAAe,OAAO,WAAW,KAAK,UAAU,MAAM;AAC5D,YAAU,kCAAY,KAAKA,4BAAW,IAAI,CAAC,UAAU,eAAe;AACpE,MAAI,WAAW,SAAS,WAAW,UAAW,YAAW,aAAa;AACtE,SAAO"}
@@ -1 +1 @@
1
- {"version":3,"file":"validation.cjs","names":["errors: string[]"],"sources":["../../../src/translateDoc/validation.ts"],"sourcesContent":["import type { Logger } from '@intlayer/config';\n\n/**\n * Validates that the translated content matches the structure of the original.\n * Throws an error if a mismatch is found, triggering a retry.\n */\nexport const validateTranslation = (\n original: string,\n translated: string,\n logger: Logger\n): boolean => {\n const errors: string[] = [];\n\n // YAML Frontmatter Integrity (CRITICAL)\n if (original.trimStart().startsWith('---')) {\n if (!translated.trimStart().startsWith('---')) {\n errors.push(\n 'YAML Frontmatter missing: Input starts with \"---\", output does not.'\n );\n }\n const originalDashes = (original.match(/^---$/gm) || []).length;\n const translatedDashes = (translated.match(/^---$/gm) || []).length;\n if (originalDashes >= 2 && translatedDashes < 2) {\n errors.push(\n 'YAML Frontmatter unclosed: Input has closing \"---\", output is missing it.'\n );\n }\n }\n\n // Code Fence Check\n const fenceRegex = /^\\s*```/gm;\n const originalFences = (original.match(fenceRegex) || []).length;\n const translatedFences = (translated.match(fenceRegex) || []).length;\n\n if (originalFences !== translatedFences) {\n errors.push(\n `Code fence mismatch: Input has ${originalFences}, output has ${translatedFences}`\n );\n }\n\n // Length/Duplication Check\n const ratio = translated.length / (original.length || 1);\n const isTooLong = ratio > 2.5;\n const isSignificantLength = original.length > 50;\n\n if (isTooLong && isSignificantLength) {\n errors.push(\n `Length deviation: Output is ${translated.length} chars vs Input ${original.length} (${ratio.toFixed(1)}x). Likely included context.`\n );\n }\n\n // Line Count Heuristic\n const originalLines = original.split('\\n').length;\n const translatedLines = translated.split('\\n').length;\n\n if (originalLines > 5) {\n if (translatedLines < originalLines * 0.4) {\n errors.push(\n `Line count deviation: Output has ${translatedLines} lines, Input has ${originalLines}. Likely content deletion.`\n );\n }\n }\n\n if (errors.length > 0) {\n logger(`Validation Failed: ${errors.join(', ')}`);\n return false;\n }\n\n return true;\n};\n\n/**\n * Clean common AI artifacts\n */\nexport const sanitizeChunk = (translated: string, original: string): string => {\n let cleaned = translated;\n const wrapRegex = /^```(?:markdown|md|txt)?\\n([\\s\\S]*?)\\n```$/i;\n const match = cleaned.match(wrapRegex);\n if (match) cleaned = match[1];\n\n if (!original.startsWith('\\n') && cleaned.startsWith('\\n')) {\n cleaned = cleaned.replace(/^\\n+/, '');\n }\n if (!original.startsWith(' ') && cleaned.startsWith(' ')) {\n cleaned = cleaned.trimStart();\n }\n return cleaned;\n};\n"],"mappings":";;;;;;AAMA,MAAa,uBACX,UACA,YACA,WACY;CACZ,MAAMA,SAAmB,EAAE;AAG3B,KAAI,SAAS,WAAW,CAAC,WAAW,MAAM,EAAE;AAC1C,MAAI,CAAC,WAAW,WAAW,CAAC,WAAW,MAAM,CAC3C,QAAO,KACL,wEACD;EAEH,MAAM,kBAAkB,SAAS,MAAM,UAAU,IAAI,EAAE,EAAE;EACzD,MAAM,oBAAoB,WAAW,MAAM,UAAU,IAAI,EAAE,EAAE;AAC7D,MAAI,kBAAkB,KAAK,mBAAmB,EAC5C,QAAO,KACL,8EACD;;CAKL,MAAM,aAAa;CACnB,MAAM,kBAAkB,SAAS,MAAM,WAAW,IAAI,EAAE,EAAE;CAC1D,MAAM,oBAAoB,WAAW,MAAM,WAAW,IAAI,EAAE,EAAE;AAE9D,KAAI,mBAAmB,iBACrB,QAAO,KACL,kCAAkC,eAAe,eAAe,mBACjE;CAIH,MAAM,QAAQ,WAAW,UAAU,SAAS,UAAU;CACtD,MAAM,YAAY,QAAQ;CAC1B,MAAM,sBAAsB,SAAS,SAAS;AAE9C,KAAI,aAAa,oBACf,QAAO,KACL,+BAA+B,WAAW,OAAO,kBAAkB,SAAS,OAAO,IAAI,MAAM,QAAQ,EAAE,CAAC,8BACzG;CAIH,MAAM,gBAAgB,SAAS,MAAM,KAAK,CAAC;CAC3C,MAAM,kBAAkB,WAAW,MAAM,KAAK,CAAC;AAE/C,KAAI,gBAAgB,GAClB;MAAI,kBAAkB,gBAAgB,GACpC,QAAO,KACL,oCAAoC,gBAAgB,oBAAoB,cAAc,4BACvF;;AAIL,KAAI,OAAO,SAAS,GAAG;AACrB,SAAO,sBAAsB,OAAO,KAAK,KAAK,GAAG;AACjD,SAAO;;AAGT,QAAO;;;;;AAMT,MAAa,iBAAiB,YAAoB,aAA6B;CAC7E,IAAI,UAAU;CAEd,MAAM,QAAQ,QAAQ,MADJ,8CACoB;AACtC,KAAI,MAAO,WAAU,MAAM;AAE3B,KAAI,CAAC,SAAS,WAAW,KAAK,IAAI,QAAQ,WAAW,KAAK,CACxD,WAAU,QAAQ,QAAQ,QAAQ,GAAG;AAEvC,KAAI,CAAC,SAAS,WAAW,IAAI,IAAI,QAAQ,WAAW,IAAI,CACtD,WAAU,QAAQ,WAAW;AAE/B,QAAO"}
1
+ {"version":3,"file":"validation.cjs","names":[],"sources":["../../../src/translateDoc/validation.ts"],"sourcesContent":["import type { Logger } from '@intlayer/config';\n\n/**\n * Validates that the translated content matches the structure of the original.\n * Throws an error if a mismatch is found, triggering a retry.\n */\nexport const validateTranslation = (\n original: string,\n translated: string,\n logger: Logger\n): boolean => {\n const errors: string[] = [];\n\n // YAML Frontmatter Integrity (CRITICAL)\n if (original.trimStart().startsWith('---')) {\n if (!translated.trimStart().startsWith('---')) {\n errors.push(\n 'YAML Frontmatter missing: Input starts with \"---\", output does not.'\n );\n }\n const originalDashes = (original.match(/^---$/gm) || []).length;\n const translatedDashes = (translated.match(/^---$/gm) || []).length;\n if (originalDashes >= 2 && translatedDashes < 2) {\n errors.push(\n 'YAML Frontmatter unclosed: Input has closing \"---\", output is missing it.'\n );\n }\n }\n\n // Code Fence Check\n const fenceRegex = /^\\s*```/gm;\n const originalFences = (original.match(fenceRegex) || []).length;\n const translatedFences = (translated.match(fenceRegex) || []).length;\n\n if (originalFences !== translatedFences) {\n errors.push(\n `Code fence mismatch: Input has ${originalFences}, output has ${translatedFences}`\n );\n }\n\n // Length/Duplication Check\n const ratio = translated.length / (original.length || 1);\n const isTooLong = ratio > 2.5;\n const isSignificantLength = original.length > 50;\n\n if (isTooLong && isSignificantLength) {\n errors.push(\n `Length deviation: Output is ${translated.length} chars vs Input ${original.length} (${ratio.toFixed(1)}x). Likely included context.`\n );\n }\n\n // Line Count Heuristic\n const originalLines = original.split('\\n').length;\n const translatedLines = translated.split('\\n').length;\n\n if (originalLines > 5) {\n if (translatedLines < originalLines * 0.4) {\n errors.push(\n `Line count deviation: Output has ${translatedLines} lines, Input has ${originalLines}. Likely content deletion.`\n );\n }\n }\n\n if (errors.length > 0) {\n logger(`Validation Failed: ${errors.join(', ')}`);\n return false;\n }\n\n return true;\n};\n\n/**\n * Clean common AI artifacts\n */\nexport const sanitizeChunk = (translated: string, original: string): string => {\n let cleaned = translated;\n const wrapRegex = /^```(?:markdown|md|txt)?\\n([\\s\\S]*?)\\n```$/i;\n const match = cleaned.match(wrapRegex);\n if (match) cleaned = match[1];\n\n if (!original.startsWith('\\n') && cleaned.startsWith('\\n')) {\n cleaned = cleaned.replace(/^\\n+/, '');\n }\n if (!original.startsWith(' ') && cleaned.startsWith(' ')) {\n cleaned = cleaned.trimStart();\n }\n return cleaned;\n};\n"],"mappings":";;;;;;AAMA,MAAa,uBACX,UACA,YACA,WACY;CACZ,MAAM,SAAmB,EAAE;AAG3B,KAAI,SAAS,WAAW,CAAC,WAAW,MAAM,EAAE;AAC1C,MAAI,CAAC,WAAW,WAAW,CAAC,WAAW,MAAM,CAC3C,QAAO,KACL,wEACD;EAEH,MAAM,kBAAkB,SAAS,MAAM,UAAU,IAAI,EAAE,EAAE;EACzD,MAAM,oBAAoB,WAAW,MAAM,UAAU,IAAI,EAAE,EAAE;AAC7D,MAAI,kBAAkB,KAAK,mBAAmB,EAC5C,QAAO,KACL,8EACD;;CAKL,MAAM,aAAa;CACnB,MAAM,kBAAkB,SAAS,MAAM,WAAW,IAAI,EAAE,EAAE;CAC1D,MAAM,oBAAoB,WAAW,MAAM,WAAW,IAAI,EAAE,EAAE;AAE9D,KAAI,mBAAmB,iBACrB,QAAO,KACL,kCAAkC,eAAe,eAAe,mBACjE;CAIH,MAAM,QAAQ,WAAW,UAAU,SAAS,UAAU;CACtD,MAAM,YAAY,QAAQ;CAC1B,MAAM,sBAAsB,SAAS,SAAS;AAE9C,KAAI,aAAa,oBACf,QAAO,KACL,+BAA+B,WAAW,OAAO,kBAAkB,SAAS,OAAO,IAAI,MAAM,QAAQ,EAAE,CAAC,8BACzG;CAIH,MAAM,gBAAgB,SAAS,MAAM,KAAK,CAAC;CAC3C,MAAM,kBAAkB,WAAW,MAAM,KAAK,CAAC;AAE/C,KAAI,gBAAgB,GAClB;MAAI,kBAAkB,gBAAgB,GACpC,QAAO,KACL,oCAAoC,gBAAgB,oBAAoB,cAAc,4BACvF;;AAIL,KAAI,OAAO,SAAS,GAAG;AACrB,SAAO,sBAAsB,OAAO,KAAK,KAAK,GAAG;AACjD,SAAO;;AAGT,QAAO;;;;;AAMT,MAAa,iBAAiB,YAAoB,aAA6B;CAC7E,IAAI,UAAU;CAEd,MAAM,QAAQ,QAAQ,MADJ,8CACoB;AACtC,KAAI,MAAO,WAAU,MAAM;AAE3B,KAAI,CAAC,SAAS,WAAW,KAAK,IAAI,QAAQ,WAAW,KAAK,CACxD,WAAU,QAAQ,QAAQ,QAAQ,GAAG;AAEvC,KAAI,CAAC,SAAS,WAAW,IAAI,IAAI,QAAQ,WAAW,IAAI,CACtD,WAAU,QAAQ,WAAW;AAE/B,QAAO"}
@@ -1 +1 @@
1
- {"version":3,"file":"alignBlocks.cjs","names":["scoreMatrix: number[][]","traceMatrix: ('diagonal' | 'up' | 'left')[][]","computeJaccardSimilarity","i","j","result: AlignmentPair[]"],"sources":["../../../src/translation-alignment/alignBlocks.ts"],"sourcesContent":["import { computeJaccardSimilarity } from './computeSimilarity';\nimport type { AlignmentPair, FingerprintedBlock } from './types';\n\nexport const alignEnglishAndFrenchBlocks = (\n defaultBlocks: FingerprintedBlock[],\n secondaryBlocks: FingerprintedBlock[]\n): AlignmentPair[] => {\n // Needleman–Wunsch style global alignment using anchor similarity and type equality\n const defaultLength = defaultBlocks.length;\n const secondaryLength = secondaryBlocks.length;\n\n const scoreMatrix: number[][] = Array.from(\n { length: defaultLength + 1 },\n () => Array.from({ length: secondaryLength + 1 }, () => 0)\n );\n const traceMatrix: ('diagonal' | 'up' | 'left')[][] = Array.from(\n { length: defaultLength + 1 },\n () => Array.from({ length: secondaryLength + 1 }, () => 'diagonal')\n );\n\n const gapPenalty = -2;\n\n const computeMatchScore = (\n defaultIndex: number,\n secondaryIndex: number\n ): number => {\n const defaultBlock = defaultBlocks[defaultIndex];\n const secondaryBlock = secondaryBlocks[secondaryIndex];\n const typeBonus = defaultBlock.type === secondaryBlock.type ? 2 : 0;\n const anchorSimilarity = computeJaccardSimilarity(\n defaultBlock.anchorText,\n secondaryBlock.anchorText,\n 3\n );\n const lengthRatio =\n Math.min(defaultBlock.content.length, secondaryBlock.content.length) /\n Math.max(defaultBlock.content.length, secondaryBlock.content.length);\n const lengthBonus = lengthRatio > 0.75 ? 1 : 0;\n return typeBonus + lengthBonus + anchorSimilarity * 8; // weighted toward anchor similarity\n };\n\n // initialize first row and column\n for (let i = 1; i <= defaultLength; i += 1) {\n scoreMatrix[i][0] = scoreMatrix[i - 1][0] + gapPenalty;\n traceMatrix[i][0] = 'up';\n }\n for (let j = 1; j <= secondaryLength; j += 1) {\n scoreMatrix[0][j] = scoreMatrix[0][j - 1] + gapPenalty;\n traceMatrix[0][j] = 'left';\n }\n\n // fill\n for (let i = 1; i <= defaultLength; i += 1) {\n for (let j = 1; j <= secondaryLength; j += 1) {\n const match = scoreMatrix[i - 1][j - 1] + computeMatchScore(i - 1, j - 1);\n const deleteGap = scoreMatrix[i - 1][j] + gapPenalty;\n const insertGap = scoreMatrix[i][j - 1] + gapPenalty;\n\n const best = Math.max(match, deleteGap, insertGap);\n scoreMatrix[i][j] = best;\n traceMatrix[i][j] =\n best === match ? 'diagonal' : best === deleteGap ? 'up' : 'left';\n }\n }\n\n // traceback\n const result: AlignmentPair[] = [];\n let i = defaultLength;\n let j = secondaryLength;\n while (i > 0 || j > 0) {\n if (i > 0 && j > 0 && traceMatrix[i][j] === 'diagonal') {\n const englishIndex = i - 1;\n const frenchIndex = j - 1;\n const similarityScore = computeJaccardSimilarity(\n defaultBlocks[englishIndex].anchorText,\n secondaryBlocks[frenchIndex].anchorText,\n 3\n );\n result.unshift({ englishIndex, frenchIndex, similarityScore });\n i -= 1;\n j -= 1;\n } else if (i > 0 && (j === 0 || traceMatrix[i][j] === 'up')) {\n result.unshift({\n englishIndex: i - 1,\n frenchIndex: null,\n similarityScore: 0,\n });\n i -= 1;\n } else if (j > 0 && (i === 0 || traceMatrix[i][j] === 'left')) {\n // french block has no corresponding english block (deleted)\n result.unshift({\n englishIndex: -1,\n frenchIndex: j - 1,\n similarityScore: 0,\n });\n j -= 1;\n }\n }\n return result;\n};\n"],"mappings":";;;AAGA,MAAa,+BACX,eACA,oBACoB;CAEpB,MAAM,gBAAgB,cAAc;CACpC,MAAM,kBAAkB,gBAAgB;CAExC,MAAMA,cAA0B,MAAM,KACpC,EAAE,QAAQ,gBAAgB,GAAG,QACvB,MAAM,KAAK,EAAE,QAAQ,kBAAkB,GAAG,QAAQ,EAAE,CAC3D;CACD,MAAMC,cAAgD,MAAM,KAC1D,EAAE,QAAQ,gBAAgB,GAAG,QACvB,MAAM,KAAK,EAAE,QAAQ,kBAAkB,GAAG,QAAQ,WAAW,CACpE;CAED,MAAM,aAAa;CAEnB,MAAM,qBACJ,cACA,mBACW;EACX,MAAM,eAAe,cAAc;EACnC,MAAM,iBAAiB,gBAAgB;EACvC,MAAM,YAAY,aAAa,SAAS,eAAe,OAAO,IAAI;EAClE,MAAM,mBAAmBC,yEACvB,aAAa,YACb,eAAe,YACf,EACD;AAKD,SAAO,aAHL,KAAK,IAAI,aAAa,QAAQ,QAAQ,eAAe,QAAQ,OAAO,GACpE,KAAK,IAAI,aAAa,QAAQ,QAAQ,eAAe,QAAQ,OAAO,GACpC,MAAO,IAAI,KACZ,mBAAmB;;AAItD,MAAK,IAAIC,MAAI,GAAGA,OAAK,eAAe,OAAK,GAAG;AAC1C,cAAYA,KAAG,KAAK,YAAYA,MAAI,GAAG,KAAK;AAC5C,cAAYA,KAAG,KAAK;;AAEtB,MAAK,IAAIC,MAAI,GAAGA,OAAK,iBAAiB,OAAK,GAAG;AAC5C,cAAY,GAAGA,OAAK,YAAY,GAAGA,MAAI,KAAK;AAC5C,cAAY,GAAGA,OAAK;;AAItB,MAAK,IAAID,MAAI,GAAGA,OAAK,eAAe,OAAK,EACvC,MAAK,IAAIC,MAAI,GAAGA,OAAK,iBAAiB,OAAK,GAAG;EAC5C,MAAM,QAAQ,YAAYD,MAAI,GAAGC,MAAI,KAAK,kBAAkBD,MAAI,GAAGC,MAAI,EAAE;EACzE,MAAM,YAAY,YAAYD,MAAI,GAAGC,OAAK;EAC1C,MAAM,YAAY,YAAYD,KAAGC,MAAI,KAAK;EAE1C,MAAM,OAAO,KAAK,IAAI,OAAO,WAAW,UAAU;AAClD,cAAYD,KAAGC,OAAK;AACpB,cAAYD,KAAGC,OACb,SAAS,QAAQ,aAAa,SAAS,YAAY,OAAO;;CAKhE,MAAMC,SAA0B,EAAE;CAClC,IAAI,IAAI;CACR,IAAI,IAAI;AACR,QAAO,IAAI,KAAK,IAAI,EAClB,KAAI,IAAI,KAAK,IAAI,KAAK,YAAY,GAAG,OAAO,YAAY;EACtD,MAAM,eAAe,IAAI;EACzB,MAAM,cAAc,IAAI;EACxB,MAAM,kBAAkBH,yEACtB,cAAc,cAAc,YAC5B,gBAAgB,aAAa,YAC7B,EACD;AACD,SAAO,QAAQ;GAAE;GAAc;GAAa;GAAiB,CAAC;AAC9D,OAAK;AACL,OAAK;YACI,IAAI,MAAM,MAAM,KAAK,YAAY,GAAG,OAAO,OAAO;AAC3D,SAAO,QAAQ;GACb,cAAc,IAAI;GAClB,aAAa;GACb,iBAAiB;GAClB,CAAC;AACF,OAAK;YACI,IAAI,MAAM,MAAM,KAAK,YAAY,GAAG,OAAO,SAAS;AAE7D,SAAO,QAAQ;GACb,cAAc;GACd,aAAa,IAAI;GACjB,iBAAiB;GAClB,CAAC;AACF,OAAK;;AAGT,QAAO"}
1
+ {"version":3,"file":"alignBlocks.cjs","names":["computeJaccardSimilarity","i","j"],"sources":["../../../src/translation-alignment/alignBlocks.ts"],"sourcesContent":["import { computeJaccardSimilarity } from './computeSimilarity';\nimport type { AlignmentPair, FingerprintedBlock } from './types';\n\nexport const alignEnglishAndFrenchBlocks = (\n defaultBlocks: FingerprintedBlock[],\n secondaryBlocks: FingerprintedBlock[]\n): AlignmentPair[] => {\n // Needleman–Wunsch style global alignment using anchor similarity and type equality\n const defaultLength = defaultBlocks.length;\n const secondaryLength = secondaryBlocks.length;\n\n const scoreMatrix: number[][] = Array.from(\n { length: defaultLength + 1 },\n () => Array.from({ length: secondaryLength + 1 }, () => 0)\n );\n const traceMatrix: ('diagonal' | 'up' | 'left')[][] = Array.from(\n { length: defaultLength + 1 },\n () => Array.from({ length: secondaryLength + 1 }, () => 'diagonal')\n );\n\n const gapPenalty = -2;\n\n const computeMatchScore = (\n defaultIndex: number,\n secondaryIndex: number\n ): number => {\n const defaultBlock = defaultBlocks[defaultIndex];\n const secondaryBlock = secondaryBlocks[secondaryIndex];\n const typeBonus = defaultBlock.type === secondaryBlock.type ? 2 : 0;\n const anchorSimilarity = computeJaccardSimilarity(\n defaultBlock.anchorText,\n secondaryBlock.anchorText,\n 3\n );\n const lengthRatio =\n Math.min(defaultBlock.content.length, secondaryBlock.content.length) /\n Math.max(defaultBlock.content.length, secondaryBlock.content.length);\n const lengthBonus = lengthRatio > 0.75 ? 1 : 0;\n return typeBonus + lengthBonus + anchorSimilarity * 8; // weighted toward anchor similarity\n };\n\n // initialize first row and column\n for (let i = 1; i <= defaultLength; i += 1) {\n scoreMatrix[i][0] = scoreMatrix[i - 1][0] + gapPenalty;\n traceMatrix[i][0] = 'up';\n }\n for (let j = 1; j <= secondaryLength; j += 1) {\n scoreMatrix[0][j] = scoreMatrix[0][j - 1] + gapPenalty;\n traceMatrix[0][j] = 'left';\n }\n\n // fill\n for (let i = 1; i <= defaultLength; i += 1) {\n for (let j = 1; j <= secondaryLength; j += 1) {\n const match = scoreMatrix[i - 1][j - 1] + computeMatchScore(i - 1, j - 1);\n const deleteGap = scoreMatrix[i - 1][j] + gapPenalty;\n const insertGap = scoreMatrix[i][j - 1] + gapPenalty;\n\n const best = Math.max(match, deleteGap, insertGap);\n scoreMatrix[i][j] = best;\n traceMatrix[i][j] =\n best === match ? 'diagonal' : best === deleteGap ? 'up' : 'left';\n }\n }\n\n // traceback\n const result: AlignmentPair[] = [];\n let i = defaultLength;\n let j = secondaryLength;\n while (i > 0 || j > 0) {\n if (i > 0 && j > 0 && traceMatrix[i][j] === 'diagonal') {\n const englishIndex = i - 1;\n const frenchIndex = j - 1;\n const similarityScore = computeJaccardSimilarity(\n defaultBlocks[englishIndex].anchorText,\n secondaryBlocks[frenchIndex].anchorText,\n 3\n );\n result.unshift({ englishIndex, frenchIndex, similarityScore });\n i -= 1;\n j -= 1;\n } else if (i > 0 && (j === 0 || traceMatrix[i][j] === 'up')) {\n result.unshift({\n englishIndex: i - 1,\n frenchIndex: null,\n similarityScore: 0,\n });\n i -= 1;\n } else if (j > 0 && (i === 0 || traceMatrix[i][j] === 'left')) {\n // french block has no corresponding english block (deleted)\n result.unshift({\n englishIndex: -1,\n frenchIndex: j - 1,\n similarityScore: 0,\n });\n j -= 1;\n }\n }\n return result;\n};\n"],"mappings":";;;AAGA,MAAa,+BACX,eACA,oBACoB;CAEpB,MAAM,gBAAgB,cAAc;CACpC,MAAM,kBAAkB,gBAAgB;CAExC,MAAM,cAA0B,MAAM,KACpC,EAAE,QAAQ,gBAAgB,GAAG,QACvB,MAAM,KAAK,EAAE,QAAQ,kBAAkB,GAAG,QAAQ,EAAE,CAC3D;CACD,MAAM,cAAgD,MAAM,KAC1D,EAAE,QAAQ,gBAAgB,GAAG,QACvB,MAAM,KAAK,EAAE,QAAQ,kBAAkB,GAAG,QAAQ,WAAW,CACpE;CAED,MAAM,aAAa;CAEnB,MAAM,qBACJ,cACA,mBACW;EACX,MAAM,eAAe,cAAc;EACnC,MAAM,iBAAiB,gBAAgB;EACvC,MAAM,YAAY,aAAa,SAAS,eAAe,OAAO,IAAI;EAClE,MAAM,mBAAmBA,yEACvB,aAAa,YACb,eAAe,YACf,EACD;AAKD,SAAO,aAHL,KAAK,IAAI,aAAa,QAAQ,QAAQ,eAAe,QAAQ,OAAO,GACpE,KAAK,IAAI,aAAa,QAAQ,QAAQ,eAAe,QAAQ,OAAO,GACpC,MAAO,IAAI,KACZ,mBAAmB;;AAItD,MAAK,IAAIC,MAAI,GAAGA,OAAK,eAAe,OAAK,GAAG;AAC1C,cAAYA,KAAG,KAAK,YAAYA,MAAI,GAAG,KAAK;AAC5C,cAAYA,KAAG,KAAK;;AAEtB,MAAK,IAAIC,MAAI,GAAGA,OAAK,iBAAiB,OAAK,GAAG;AAC5C,cAAY,GAAGA,OAAK,YAAY,GAAGA,MAAI,KAAK;AAC5C,cAAY,GAAGA,OAAK;;AAItB,MAAK,IAAID,MAAI,GAAGA,OAAK,eAAe,OAAK,EACvC,MAAK,IAAIC,MAAI,GAAGA,OAAK,iBAAiB,OAAK,GAAG;EAC5C,MAAM,QAAQ,YAAYD,MAAI,GAAGC,MAAI,KAAK,kBAAkBD,MAAI,GAAGC,MAAI,EAAE;EACzE,MAAM,YAAY,YAAYD,MAAI,GAAGC,OAAK;EAC1C,MAAM,YAAY,YAAYD,KAAGC,MAAI,KAAK;EAE1C,MAAM,OAAO,KAAK,IAAI,OAAO,WAAW,UAAU;AAClD,cAAYD,KAAGC,OAAK;AACpB,cAAYD,KAAGC,OACb,SAAS,QAAQ,aAAa,SAAS,YAAY,OAAO;;CAKhE,MAAM,SAA0B,EAAE;CAClC,IAAI,IAAI;CACR,IAAI,IAAI;AACR,QAAO,IAAI,KAAK,IAAI,EAClB,KAAI,IAAI,KAAK,IAAI,KAAK,YAAY,GAAG,OAAO,YAAY;EACtD,MAAM,eAAe,IAAI;EACzB,MAAM,cAAc,IAAI;EACxB,MAAM,kBAAkBF,yEACtB,cAAc,cAAc,YAC5B,gBAAgB,aAAa,YAC7B,EACD;AACD,SAAO,QAAQ;GAAE;GAAc;GAAa;GAAiB,CAAC;AAC9D,OAAK;AACL,OAAK;YACI,IAAI,MAAM,MAAM,KAAK,YAAY,GAAG,OAAO,OAAO;AAC3D,SAAO,QAAQ;GACb,cAAc,IAAI;GAClB,aAAa;GACb,iBAAiB;GAClB,CAAC;AACF,OAAK;YACI,IAAI,MAAM,MAAM,KAAK,YAAY,GAAG,OAAO,SAAS;AAE7D,SAAO,QAAQ;GACb,cAAc;GACd,aAAa,IAAI;GACjB,iBAAiB;GAClB,CAAC;AACF,OAAK;;AAGT,QAAO"}
@@ -1 +1 @@
1
- {"version":3,"file":"pipeline.cjs","names":["segmentDocument","normalizeBlock","englishBlocks: FingerprintedBlock[]","fingerprintBlock","frenchBlocks: FingerprintedBlock[]","planAlignmentActions","alignEnglishAndFrenchBlocks","mapChangedLinesToBlocks","identifySegmentsToReview"],"sources":["../../../src/translation-alignment/pipeline.ts"],"sourcesContent":["import { alignEnglishAndFrenchBlocks } from './alignBlocks';\nimport { fingerprintBlock } from './fingerprintBlock';\nimport { mapChangedLinesToBlocks } from './mapChangedLinesToBlocks';\nimport { normalizeBlock } from './normalizeBlock';\nimport { planAlignmentActions } from './planActions';\nimport {\n identifySegmentsToReview,\n mergeReviewedSegments,\n type SegmentToReview,\n} from './rebuildDocument';\nimport { segmentDocument } from './segmentDocument';\nimport type {\n AlignmentPlan,\n FingerprintedBlock,\n SimilarityOptions,\n} from './types';\n\nexport type BuildAlignmentPlanInput = {\n englishText: string;\n frenchText: string;\n changedLines: number[] | undefined;\n similarityOptions?: Partial<SimilarityOptions>;\n};\n\nexport type BuildAlignmentPlanOutput = {\n englishBlocks: FingerprintedBlock[];\n frenchBlocks: FingerprintedBlock[];\n plan: AlignmentPlan;\n segmentsToReview: SegmentToReview[];\n};\n\nexport const buildAlignmentPlan = ({\n englishText,\n frenchText,\n changedLines,\n similarityOptions,\n}: BuildAlignmentPlanInput): BuildAlignmentPlanOutput => {\n const englishBlocksRaw = segmentDocument(englishText);\n const frenchBlocksRaw = segmentDocument(frenchText);\n\n const englishNormalized = englishBlocksRaw.map(normalizeBlock);\n const frenchNormalized = frenchBlocksRaw.map(normalizeBlock);\n\n const englishBlocks: FingerprintedBlock[] = englishNormalized.map(\n (block, index, array) =>\n fingerprintBlock(\n block,\n array[index - 1] ?? null,\n array[index + 1] ?? null\n )\n );\n const frenchBlocks: FingerprintedBlock[] = frenchNormalized.map(\n (block, index, array) =>\n fingerprintBlock(\n block,\n array[index - 1] ?? null,\n array[index + 1] ?? null\n )\n );\n\n const alignment = alignEnglishAndFrenchBlocks(englishBlocks, frenchBlocks);\n\n const changedIndexes = mapChangedLinesToBlocks(\n englishBlocks,\n Array.isArray(changedLines) ? changedLines : []\n );\n\n const plan = planAlignmentActions(alignment, changedIndexes, {\n minimumMatchForReuse: similarityOptions?.minimumMatchForReuse ?? 0.9,\n minimumMatchForNearDuplicate:\n similarityOptions?.minimumMatchForNearDuplicate ?? 0.8,\n });\n\n const { segmentsToReview } = identifySegmentsToReview({\n englishBlocks,\n frenchBlocks,\n plan,\n });\n\n return { englishBlocks, frenchBlocks, plan, segmentsToReview };\n};\n\nexport { mergeReviewedSegments };\nexport type { SegmentToReview };\n"],"mappings":";;;;;;;;;AA+BA,MAAa,sBAAsB,EACjC,aACA,YACA,cACA,wBACuD;CACvD,MAAM,mBAAmBA,8DAAgB,YAAY;CACrD,MAAM,kBAAkBA,8DAAgB,WAAW;CAEnD,MAAM,oBAAoB,iBAAiB,IAAIC,4DAAe;CAC9D,MAAM,mBAAmB,gBAAgB,IAAIA,4DAAe;CAE5D,MAAMC,gBAAsC,kBAAkB,KAC3D,OAAO,OAAO,UACbC,gEACE,OACA,MAAM,QAAQ,MAAM,MACpB,MAAM,QAAQ,MAAM,KACrB,CACJ;CACD,MAAMC,eAAqC,iBAAiB,KACzD,OAAO,OAAO,UACbD,gEACE,OACA,MAAM,QAAQ,MAAM,MACpB,MAAM,QAAQ,MAAM,KACrB,CACJ;CASD,MAAM,OAAOE,+DAPKC,sEAA4B,eAAe,aAAa,EAEnDC,8EACrB,eACA,MAAM,QAAQ,aAAa,GAAG,eAAe,EAAE,CAChD,EAE4D;EAC3D,sBAAsB,mBAAmB,wBAAwB;EACjE,8BACE,mBAAmB,gCAAgC;EACtD,CAAC;CAEF,MAAM,EAAE,qBAAqBC,uEAAyB;EACpD;EACA;EACA;EACD,CAAC;AAEF,QAAO;EAAE;EAAe;EAAc;EAAM;EAAkB"}
1
+ {"version":3,"file":"pipeline.cjs","names":["segmentDocument","normalizeBlock","fingerprintBlock","planAlignmentActions","alignEnglishAndFrenchBlocks","mapChangedLinesToBlocks","identifySegmentsToReview"],"sources":["../../../src/translation-alignment/pipeline.ts"],"sourcesContent":["import { alignEnglishAndFrenchBlocks } from './alignBlocks';\nimport { fingerprintBlock } from './fingerprintBlock';\nimport { mapChangedLinesToBlocks } from './mapChangedLinesToBlocks';\nimport { normalizeBlock } from './normalizeBlock';\nimport { planAlignmentActions } from './planActions';\nimport {\n identifySegmentsToReview,\n mergeReviewedSegments,\n type SegmentToReview,\n} from './rebuildDocument';\nimport { segmentDocument } from './segmentDocument';\nimport type {\n AlignmentPlan,\n FingerprintedBlock,\n SimilarityOptions,\n} from './types';\n\nexport type BuildAlignmentPlanInput = {\n englishText: string;\n frenchText: string;\n changedLines: number[] | undefined;\n similarityOptions?: Partial<SimilarityOptions>;\n};\n\nexport type BuildAlignmentPlanOutput = {\n englishBlocks: FingerprintedBlock[];\n frenchBlocks: FingerprintedBlock[];\n plan: AlignmentPlan;\n segmentsToReview: SegmentToReview[];\n};\n\nexport const buildAlignmentPlan = ({\n englishText,\n frenchText,\n changedLines,\n similarityOptions,\n}: BuildAlignmentPlanInput): BuildAlignmentPlanOutput => {\n const englishBlocksRaw = segmentDocument(englishText);\n const frenchBlocksRaw = segmentDocument(frenchText);\n\n const englishNormalized = englishBlocksRaw.map(normalizeBlock);\n const frenchNormalized = frenchBlocksRaw.map(normalizeBlock);\n\n const englishBlocks: FingerprintedBlock[] = englishNormalized.map(\n (block, index, array) =>\n fingerprintBlock(\n block,\n array[index - 1] ?? null,\n array[index + 1] ?? null\n )\n );\n const frenchBlocks: FingerprintedBlock[] = frenchNormalized.map(\n (block, index, array) =>\n fingerprintBlock(\n block,\n array[index - 1] ?? null,\n array[index + 1] ?? null\n )\n );\n\n const alignment = alignEnglishAndFrenchBlocks(englishBlocks, frenchBlocks);\n\n const changedIndexes = mapChangedLinesToBlocks(\n englishBlocks,\n Array.isArray(changedLines) ? changedLines : []\n );\n\n const plan = planAlignmentActions(alignment, changedIndexes, {\n minimumMatchForReuse: similarityOptions?.minimumMatchForReuse ?? 0.9,\n minimumMatchForNearDuplicate:\n similarityOptions?.minimumMatchForNearDuplicate ?? 0.8,\n });\n\n const { segmentsToReview } = identifySegmentsToReview({\n englishBlocks,\n frenchBlocks,\n plan,\n });\n\n return { englishBlocks, frenchBlocks, plan, segmentsToReview };\n};\n\nexport { mergeReviewedSegments };\nexport type { SegmentToReview };\n"],"mappings":";;;;;;;;;AA+BA,MAAa,sBAAsB,EACjC,aACA,YACA,cACA,wBACuD;CACvD,MAAM,mBAAmBA,8DAAgB,YAAY;CACrD,MAAM,kBAAkBA,8DAAgB,WAAW;CAEnD,MAAM,oBAAoB,iBAAiB,IAAIC,4DAAe;CAC9D,MAAM,mBAAmB,gBAAgB,IAAIA,4DAAe;CAE5D,MAAM,gBAAsC,kBAAkB,KAC3D,OAAO,OAAO,UACbC,gEACE,OACA,MAAM,QAAQ,MAAM,MACpB,MAAM,QAAQ,MAAM,KACrB,CACJ;CACD,MAAM,eAAqC,iBAAiB,KACzD,OAAO,OAAO,UACbA,gEACE,OACA,MAAM,QAAQ,MAAM,MACpB,MAAM,QAAQ,MAAM,KACrB,CACJ;CASD,MAAM,OAAOC,+DAPKC,sEAA4B,eAAe,aAAa,EAEnDC,8EACrB,eACA,MAAM,QAAQ,aAAa,GAAG,eAAe,EAAE,CAChD,EAE4D;EAC3D,sBAAsB,mBAAmB,wBAAwB;EACjE,8BACE,mBAAmB,gCAAgC;EACtD,CAAC;CAEF,MAAM,EAAE,qBAAqBC,uEAAyB;EACpD;EACA;EACA;EACD,CAAC;AAEF,QAAO;EAAE;EAAe;EAAc;EAAM;EAAkB"}
@@ -1 +1 @@
1
- {"version":3,"file":"planActions.cjs","names":["actions: PlannedAction[]"],"sources":["../../../src/translation-alignment/planActions.ts"],"sourcesContent":["import type { AlignmentPair, AlignmentPlan, PlannedAction } from './types';\n\nexport const planAlignmentActions = (\n alignment: AlignmentPair[],\n changedEnglishBlockIndexes: Set<number>\n): AlignmentPlan => {\n const actions: PlannedAction[] = [];\n const seenFrench = new Set<number>();\n\n alignment.forEach((pair) => {\n const englishIndex = pair.englishIndex;\n const frenchIndex = pair.frenchIndex;\n\n // Case 1: Deletion (Exists in FR, not in EN)\n if (englishIndex === -1 && frenchIndex !== null) {\n if (!seenFrench.has(frenchIndex)) {\n actions.push({ kind: 'delete', frenchIndex });\n seenFrench.add(frenchIndex);\n }\n return;\n }\n\n // Case 2: New Insertion (Exists in EN, not in FR)\n if (englishIndex >= 0 && frenchIndex === null) {\n actions.push({ kind: 'insert_new', englishIndex });\n return;\n }\n\n // Case 3: Alignment (Exists in both)\n if (englishIndex >= 0 && frenchIndex !== null) {\n const isChanged = changedEnglishBlockIndexes.has(englishIndex);\n\n // If the block is NOT marked as changed by Git, we REUSE it.\n // We assume the existing translation is correct because the source hasn't been touched.\n // We ignore 'similarityScore' here because EN vs UK text will always have low similarity.\n if (!isChanged) {\n actions.push({ kind: 'reuse', englishIndex, frenchIndex });\n } else {\n // If the block IS changed, we normally Review.\n // OPTIONAL: You could add a check here for 'similarityScore > 0.99'\n // to detect whitespace-only changes, but generally, if Git says changed, we Review.\n actions.push({ kind: 'review', englishIndex, frenchIndex });\n }\n\n seenFrench.add(frenchIndex);\n return;\n }\n });\n\n return { actions };\n};\n"],"mappings":";;AAEA,MAAa,wBACX,WACA,+BACkB;CAClB,MAAMA,UAA2B,EAAE;CACnC,MAAM,6BAAa,IAAI,KAAa;AAEpC,WAAU,SAAS,SAAS;EAC1B,MAAM,eAAe,KAAK;EAC1B,MAAM,cAAc,KAAK;AAGzB,MAAI,iBAAiB,MAAM,gBAAgB,MAAM;AAC/C,OAAI,CAAC,WAAW,IAAI,YAAY,EAAE;AAChC,YAAQ,KAAK;KAAE,MAAM;KAAU;KAAa,CAAC;AAC7C,eAAW,IAAI,YAAY;;AAE7B;;AAIF,MAAI,gBAAgB,KAAK,gBAAgB,MAAM;AAC7C,WAAQ,KAAK;IAAE,MAAM;IAAc;IAAc,CAAC;AAClD;;AAIF,MAAI,gBAAgB,KAAK,gBAAgB,MAAM;AAM7C,OAAI,CALc,2BAA2B,IAAI,aAAa,CAM5D,SAAQ,KAAK;IAAE,MAAM;IAAS;IAAc;IAAa,CAAC;OAK1D,SAAQ,KAAK;IAAE,MAAM;IAAU;IAAc;IAAa,CAAC;AAG7D,cAAW,IAAI,YAAY;AAC3B;;GAEF;AAEF,QAAO,EAAE,SAAS"}
1
+ {"version":3,"file":"planActions.cjs","names":[],"sources":["../../../src/translation-alignment/planActions.ts"],"sourcesContent":["import type { AlignmentPair, AlignmentPlan, PlannedAction } from './types';\n\nexport const planAlignmentActions = (\n alignment: AlignmentPair[],\n changedEnglishBlockIndexes: Set<number>\n): AlignmentPlan => {\n const actions: PlannedAction[] = [];\n const seenFrench = new Set<number>();\n\n alignment.forEach((pair) => {\n const englishIndex = pair.englishIndex;\n const frenchIndex = pair.frenchIndex;\n\n // Case 1: Deletion (Exists in FR, not in EN)\n if (englishIndex === -1 && frenchIndex !== null) {\n if (!seenFrench.has(frenchIndex)) {\n actions.push({ kind: 'delete', frenchIndex });\n seenFrench.add(frenchIndex);\n }\n return;\n }\n\n // Case 2: New Insertion (Exists in EN, not in FR)\n if (englishIndex >= 0 && frenchIndex === null) {\n actions.push({ kind: 'insert_new', englishIndex });\n return;\n }\n\n // Case 3: Alignment (Exists in both)\n if (englishIndex >= 0 && frenchIndex !== null) {\n const isChanged = changedEnglishBlockIndexes.has(englishIndex);\n\n // If the block is NOT marked as changed by Git, we REUSE it.\n // We assume the existing translation is correct because the source hasn't been touched.\n // We ignore 'similarityScore' here because EN vs UK text will always have low similarity.\n if (!isChanged) {\n actions.push({ kind: 'reuse', englishIndex, frenchIndex });\n } else {\n // If the block IS changed, we normally Review.\n // OPTIONAL: You could add a check here for 'similarityScore > 0.99'\n // to detect whitespace-only changes, but generally, if Git says changed, we Review.\n actions.push({ kind: 'review', englishIndex, frenchIndex });\n }\n\n seenFrench.add(frenchIndex);\n return;\n }\n });\n\n return { actions };\n};\n"],"mappings":";;AAEA,MAAa,wBACX,WACA,+BACkB;CAClB,MAAM,UAA2B,EAAE;CACnC,MAAM,6BAAa,IAAI,KAAa;AAEpC,WAAU,SAAS,SAAS;EAC1B,MAAM,eAAe,KAAK;EAC1B,MAAM,cAAc,KAAK;AAGzB,MAAI,iBAAiB,MAAM,gBAAgB,MAAM;AAC/C,OAAI,CAAC,WAAW,IAAI,YAAY,EAAE;AAChC,YAAQ,KAAK;KAAE,MAAM;KAAU;KAAa,CAAC;AAC7C,eAAW,IAAI,YAAY;;AAE7B;;AAIF,MAAI,gBAAgB,KAAK,gBAAgB,MAAM;AAC7C,WAAQ,KAAK;IAAE,MAAM;IAAc;IAAc,CAAC;AAClD;;AAIF,MAAI,gBAAgB,KAAK,gBAAgB,MAAM;AAM7C,OAAI,CALc,2BAA2B,IAAI,aAAa,CAM5D,SAAQ,KAAK;IAAE,MAAM;IAAS;IAAc;IAAa,CAAC;OAK1D,SAAQ,KAAK;IAAE,MAAM;IAAU;IAAc;IAAa,CAAC;AAG7D,cAAW,IAAI,YAAY;AAC3B;;GAEF;AAEF,QAAO,EAAE,SAAS"}
@@ -1 +1 @@
1
- {"version":3,"file":"rebuildDocument.cjs","names":["segmentsToReview: SegmentToReview[]","outputParts: string[]"],"sources":["../../../src/translation-alignment/rebuildDocument.ts"],"sourcesContent":["import type { AlignmentPlan, FingerprintedBlock } from './types';\n\nexport type SegmentToReview = {\n englishBlock: FingerprintedBlock;\n frenchBlockText: string | null;\n actionIndex: number;\n};\n\nexport type RebuildInput = {\n englishBlocks: FingerprintedBlock[];\n frenchBlocks: FingerprintedBlock[];\n plan: AlignmentPlan;\n};\n\nexport type RebuildResult = {\n segmentsToReview: SegmentToReview[];\n};\n\n/**\n * Analyzes the alignment plan and returns only the segments that need review/translation.\n * Does not generate output text - that's done by mergeReviewedSegments after translation.\n */\nexport const identifySegmentsToReview = ({\n englishBlocks,\n frenchBlocks,\n plan,\n}: RebuildInput): RebuildResult => {\n const segmentsToReview: SegmentToReview[] = [];\n\n plan.actions.forEach((action, actionIndex) => {\n if (action.kind === 'review') {\n const englishBlock = englishBlocks[action.englishIndex];\n const frenchBlockText =\n action.frenchIndex !== null\n ? frenchBlocks[action.frenchIndex].content\n : null;\n\n segmentsToReview.push({ englishBlock, frenchBlockText, actionIndex });\n } else if (action.kind === 'insert_new') {\n const englishBlock = englishBlocks[action.englishIndex];\n\n segmentsToReview.push({\n englishBlock,\n frenchBlockText: null,\n actionIndex,\n });\n }\n });\n\n return { segmentsToReview };\n};\n\n/**\n * Merges reviewed translations back into the final document following the alignment plan.\n */\nexport const mergeReviewedSegments = (\n plan: AlignmentPlan,\n frenchBlocks: FingerprintedBlock[],\n reviewedSegments: Map<number, string>\n): string => {\n const outputParts: string[] = [];\n\n plan.actions.forEach((action, actionIndex) => {\n if (action.kind === 'reuse') {\n outputParts.push(frenchBlocks[action.frenchIndex].content);\n } else if (action.kind === 'review' || action.kind === 'insert_new') {\n const reviewedContent = reviewedSegments.get(actionIndex);\n\n if (reviewedContent !== undefined) {\n outputParts.push(reviewedContent);\n } else {\n // Fallback: if review failed, use existing or blank\n if (action.kind === 'review' && action.frenchIndex !== null) {\n outputParts.push(frenchBlocks[action.frenchIndex].content);\n } else {\n outputParts.push('\\n');\n }\n }\n }\n // \"delete\" actions are simply skipped - no output\n });\n\n return outputParts.join('');\n};\n"],"mappings":";;;;;;AAsBA,MAAa,4BAA4B,EACvC,eACA,cACA,WACiC;CACjC,MAAMA,mBAAsC,EAAE;AAE9C,MAAK,QAAQ,SAAS,QAAQ,gBAAgB;AAC5C,MAAI,OAAO,SAAS,UAAU;GAC5B,MAAM,eAAe,cAAc,OAAO;GAC1C,MAAM,kBACJ,OAAO,gBAAgB,OACnB,aAAa,OAAO,aAAa,UACjC;AAEN,oBAAiB,KAAK;IAAE;IAAc;IAAiB;IAAa,CAAC;aAC5D,OAAO,SAAS,cAAc;GACvC,MAAM,eAAe,cAAc,OAAO;AAE1C,oBAAiB,KAAK;IACpB;IACA,iBAAiB;IACjB;IACD,CAAC;;GAEJ;AAEF,QAAO,EAAE,kBAAkB;;;;;AAM7B,MAAa,yBACX,MACA,cACA,qBACW;CACX,MAAMC,cAAwB,EAAE;AAEhC,MAAK,QAAQ,SAAS,QAAQ,gBAAgB;AAC5C,MAAI,OAAO,SAAS,QAClB,aAAY,KAAK,aAAa,OAAO,aAAa,QAAQ;WACjD,OAAO,SAAS,YAAY,OAAO,SAAS,cAAc;GACnE,MAAM,kBAAkB,iBAAiB,IAAI,YAAY;AAEzD,OAAI,oBAAoB,OACtB,aAAY,KAAK,gBAAgB;YAG7B,OAAO,SAAS,YAAY,OAAO,gBAAgB,KACrD,aAAY,KAAK,aAAa,OAAO,aAAa,QAAQ;OAE1D,aAAY,KAAK,KAAK;;GAK5B;AAEF,QAAO,YAAY,KAAK,GAAG"}
1
+ {"version":3,"file":"rebuildDocument.cjs","names":[],"sources":["../../../src/translation-alignment/rebuildDocument.ts"],"sourcesContent":["import type { AlignmentPlan, FingerprintedBlock } from './types';\n\nexport type SegmentToReview = {\n englishBlock: FingerprintedBlock;\n frenchBlockText: string | null;\n actionIndex: number;\n};\n\nexport type RebuildInput = {\n englishBlocks: FingerprintedBlock[];\n frenchBlocks: FingerprintedBlock[];\n plan: AlignmentPlan;\n};\n\nexport type RebuildResult = {\n segmentsToReview: SegmentToReview[];\n};\n\n/**\n * Analyzes the alignment plan and returns only the segments that need review/translation.\n * Does not generate output text - that's done by mergeReviewedSegments after translation.\n */\nexport const identifySegmentsToReview = ({\n englishBlocks,\n frenchBlocks,\n plan,\n}: RebuildInput): RebuildResult => {\n const segmentsToReview: SegmentToReview[] = [];\n\n plan.actions.forEach((action, actionIndex) => {\n if (action.kind === 'review') {\n const englishBlock = englishBlocks[action.englishIndex];\n const frenchBlockText =\n action.frenchIndex !== null\n ? frenchBlocks[action.frenchIndex].content\n : null;\n\n segmentsToReview.push({ englishBlock, frenchBlockText, actionIndex });\n } else if (action.kind === 'insert_new') {\n const englishBlock = englishBlocks[action.englishIndex];\n\n segmentsToReview.push({\n englishBlock,\n frenchBlockText: null,\n actionIndex,\n });\n }\n });\n\n return { segmentsToReview };\n};\n\n/**\n * Merges reviewed translations back into the final document following the alignment plan.\n */\nexport const mergeReviewedSegments = (\n plan: AlignmentPlan,\n frenchBlocks: FingerprintedBlock[],\n reviewedSegments: Map<number, string>\n): string => {\n const outputParts: string[] = [];\n\n plan.actions.forEach((action, actionIndex) => {\n if (action.kind === 'reuse') {\n outputParts.push(frenchBlocks[action.frenchIndex].content);\n } else if (action.kind === 'review' || action.kind === 'insert_new') {\n const reviewedContent = reviewedSegments.get(actionIndex);\n\n if (reviewedContent !== undefined) {\n outputParts.push(reviewedContent);\n } else {\n // Fallback: if review failed, use existing or blank\n if (action.kind === 'review' && action.frenchIndex !== null) {\n outputParts.push(frenchBlocks[action.frenchIndex].content);\n } else {\n outputParts.push('\\n');\n }\n }\n }\n // \"delete\" actions are simply skipped - no output\n });\n\n return outputParts.join('');\n};\n"],"mappings":";;;;;;AAsBA,MAAa,4BAA4B,EACvC,eACA,cACA,WACiC;CACjC,MAAM,mBAAsC,EAAE;AAE9C,MAAK,QAAQ,SAAS,QAAQ,gBAAgB;AAC5C,MAAI,OAAO,SAAS,UAAU;GAC5B,MAAM,eAAe,cAAc,OAAO;GAC1C,MAAM,kBACJ,OAAO,gBAAgB,OACnB,aAAa,OAAO,aAAa,UACjC;AAEN,oBAAiB,KAAK;IAAE;IAAc;IAAiB;IAAa,CAAC;aAC5D,OAAO,SAAS,cAAc;GACvC,MAAM,eAAe,cAAc,OAAO;AAE1C,oBAAiB,KAAK;IACpB;IACA,iBAAiB;IACjB;IACD,CAAC;;GAEJ;AAEF,QAAO,EAAE,kBAAkB;;;;;AAM7B,MAAa,yBACX,MACA,cACA,qBACW;CACX,MAAM,cAAwB,EAAE;AAEhC,MAAK,QAAQ,SAAS,QAAQ,gBAAgB;AAC5C,MAAI,OAAO,SAAS,QAClB,aAAY,KAAK,aAAa,OAAO,aAAa,QAAQ;WACjD,OAAO,SAAS,YAAY,OAAO,SAAS,cAAc;GACnE,MAAM,kBAAkB,iBAAiB,IAAI,YAAY;AAEzD,OAAI,oBAAoB,OACtB,aAAY,KAAK,gBAAgB;YAG7B,OAAO,SAAS,YAAY,OAAO,gBAAgB,KACrD,aAAY,KAAK,aAAa,OAAO,aAAa,QAAQ;OAE1D,aAAY,KAAK,KAAK;;GAK5B;AAEF,QAAO,YAAY,KAAK,GAAG"}
@@ -1 +1 @@
1
- {"version":3,"file":"segmentDocument.cjs","names":["blocks: Block[]","currentSectionLines: string[]","contentLines: string[]"],"sources":["../../../src/translation-alignment/segmentDocument.ts"],"sourcesContent":["import type { Block } from './types';\n\nconst isBlankLine = (line: string): boolean => line.trim().length === 0;\nconst isFencedCodeDelimiter = (line: string): boolean => /^\\s*```/.test(line);\nconst isHeading = (line: string): boolean => /^\\s*#{1,6}\\s+/.test(line);\nconst isFrontmatterDelimiter = (line: string): boolean =>\n /^\\s*---\\s*$/.test(line);\nconst trimTrailingNewlines = (text: string): string =>\n text.replace(/\\n+$/g, '\\n');\n\nexport const segmentDocument = (text: string): Block[] => {\n const lines = text.split('\\n');\n const blocks: Block[] = [];\n\n let index = 0;\n let insideCodeBlock = false;\n\n // Buffers\n let currentSectionLines: string[] = [];\n let currentSectionStartLine = 1;\n\n const flushCurrentSection = (endIndex: number) => {\n if (currentSectionLines.length > 0) {\n // Filter out leading blank lines from the block content to keep it clean,\n // but strictly speaking, we just want to ensure non-empty content.\n const rawContent = currentSectionLines.join('\\n');\n\n if (rawContent.trim().length > 0) {\n blocks.push({\n type: 'paragraph', // Generic type\n content: `${trimTrailingNewlines(rawContent)}\\n`,\n lineStart: currentSectionStartLine,\n lineEnd: endIndex,\n });\n }\n currentSectionLines = [];\n }\n };\n\n while (index < lines.length) {\n const currentLine = lines[index];\n\n // 1. Handle Frontmatter (Must be at start of file)\n if (blocks.length === 0 && isFrontmatterDelimiter(currentLine)) {\n const startLine = index + 1;\n const contentLines: string[] = [currentLine];\n index++;\n\n while (index < lines.length && !isFrontmatterDelimiter(lines[index])) {\n contentLines.push(lines[index]);\n index++;\n }\n\n if (index < lines.length && isFrontmatterDelimiter(lines[index])) {\n contentLines.push(lines[index]);\n index++;\n }\n\n blocks.push({\n type: 'paragraph',\n content: `${trimTrailingNewlines(contentLines.join('\\n'))}\\n`,\n lineStart: startLine,\n lineEnd: index,\n });\n continue;\n }\n\n // 2. Track Code Blocks (Headers inside code blocks are ignored)\n if (isFencedCodeDelimiter(currentLine)) {\n insideCodeBlock = !insideCodeBlock;\n }\n\n const isHeader = !insideCodeBlock && isHeading(currentLine);\n\n // 3. Split on Headers\n if (isHeader) {\n // If we have accumulated content, flush it as the previous block\n if (currentSectionLines.length > 0) {\n flushCurrentSection(index);\n }\n // Start a new section with this header\n currentSectionStartLine = index + 1;\n currentSectionLines = [currentLine];\n } else {\n // Accumulate content\n if (currentSectionLines.length === 0 && !isBlankLine(currentLine)) {\n currentSectionStartLine = index + 1;\n }\n currentSectionLines.push(currentLine);\n }\n\n index++;\n }\n\n // Flush remaining content\n flushCurrentSection(index);\n\n return blocks;\n};\n"],"mappings":";;AAEA,MAAM,eAAe,SAA0B,KAAK,MAAM,CAAC,WAAW;AACtE,MAAM,yBAAyB,SAA0B,UAAU,KAAK,KAAK;AAC7E,MAAM,aAAa,SAA0B,gBAAgB,KAAK,KAAK;AACvE,MAAM,0BAA0B,SAC9B,cAAc,KAAK,KAAK;AAC1B,MAAM,wBAAwB,SAC5B,KAAK,QAAQ,SAAS,KAAK;AAE7B,MAAa,mBAAmB,SAA0B;CACxD,MAAM,QAAQ,KAAK,MAAM,KAAK;CAC9B,MAAMA,SAAkB,EAAE;CAE1B,IAAI,QAAQ;CACZ,IAAI,kBAAkB;CAGtB,IAAIC,sBAAgC,EAAE;CACtC,IAAI,0BAA0B;CAE9B,MAAM,uBAAuB,aAAqB;AAChD,MAAI,oBAAoB,SAAS,GAAG;GAGlC,MAAM,aAAa,oBAAoB,KAAK,KAAK;AAEjD,OAAI,WAAW,MAAM,CAAC,SAAS,EAC7B,QAAO,KAAK;IACV,MAAM;IACN,SAAS,GAAG,qBAAqB,WAAW,CAAC;IAC7C,WAAW;IACX,SAAS;IACV,CAAC;AAEJ,yBAAsB,EAAE;;;AAI5B,QAAO,QAAQ,MAAM,QAAQ;EAC3B,MAAM,cAAc,MAAM;AAG1B,MAAI,OAAO,WAAW,KAAK,uBAAuB,YAAY,EAAE;GAC9D,MAAM,YAAY,QAAQ;GAC1B,MAAMC,eAAyB,CAAC,YAAY;AAC5C;AAEA,UAAO,QAAQ,MAAM,UAAU,CAAC,uBAAuB,MAAM,OAAO,EAAE;AACpE,iBAAa,KAAK,MAAM,OAAO;AAC/B;;AAGF,OAAI,QAAQ,MAAM,UAAU,uBAAuB,MAAM,OAAO,EAAE;AAChE,iBAAa,KAAK,MAAM,OAAO;AAC/B;;AAGF,UAAO,KAAK;IACV,MAAM;IACN,SAAS,GAAG,qBAAqB,aAAa,KAAK,KAAK,CAAC,CAAC;IAC1D,WAAW;IACX,SAAS;IACV,CAAC;AACF;;AAIF,MAAI,sBAAsB,YAAY,CACpC,mBAAkB,CAAC;AAMrB,MAHiB,CAAC,mBAAmB,UAAU,YAAY,EAG7C;AAEZ,OAAI,oBAAoB,SAAS,EAC/B,qBAAoB,MAAM;AAG5B,6BAA0B,QAAQ;AAClC,yBAAsB,CAAC,YAAY;SAC9B;AAEL,OAAI,oBAAoB,WAAW,KAAK,CAAC,YAAY,YAAY,CAC/D,2BAA0B,QAAQ;AAEpC,uBAAoB,KAAK,YAAY;;AAGvC;;AAIF,qBAAoB,MAAM;AAE1B,QAAO"}
1
+ {"version":3,"file":"segmentDocument.cjs","names":[],"sources":["../../../src/translation-alignment/segmentDocument.ts"],"sourcesContent":["import type { Block } from './types';\n\nconst isBlankLine = (line: string): boolean => line.trim().length === 0;\nconst isFencedCodeDelimiter = (line: string): boolean => /^\\s*```/.test(line);\nconst isHeading = (line: string): boolean => /^\\s*#{1,6}\\s+/.test(line);\nconst isFrontmatterDelimiter = (line: string): boolean =>\n /^\\s*---\\s*$/.test(line);\nconst trimTrailingNewlines = (text: string): string =>\n text.replace(/\\n+$/g, '\\n');\n\nexport const segmentDocument = (text: string): Block[] => {\n const lines = text.split('\\n');\n const blocks: Block[] = [];\n\n let index = 0;\n let insideCodeBlock = false;\n\n // Buffers\n let currentSectionLines: string[] = [];\n let currentSectionStartLine = 1;\n\n const flushCurrentSection = (endIndex: number) => {\n if (currentSectionLines.length > 0) {\n // Filter out leading blank lines from the block content to keep it clean,\n // but strictly speaking, we just want to ensure non-empty content.\n const rawContent = currentSectionLines.join('\\n');\n\n if (rawContent.trim().length > 0) {\n blocks.push({\n type: 'paragraph', // Generic type\n content: `${trimTrailingNewlines(rawContent)}\\n`,\n lineStart: currentSectionStartLine,\n lineEnd: endIndex,\n });\n }\n currentSectionLines = [];\n }\n };\n\n while (index < lines.length) {\n const currentLine = lines[index];\n\n // 1. Handle Frontmatter (Must be at start of file)\n if (blocks.length === 0 && isFrontmatterDelimiter(currentLine)) {\n const startLine = index + 1;\n const contentLines: string[] = [currentLine];\n index++;\n\n while (index < lines.length && !isFrontmatterDelimiter(lines[index])) {\n contentLines.push(lines[index]);\n index++;\n }\n\n if (index < lines.length && isFrontmatterDelimiter(lines[index])) {\n contentLines.push(lines[index]);\n index++;\n }\n\n blocks.push({\n type: 'paragraph',\n content: `${trimTrailingNewlines(contentLines.join('\\n'))}\\n`,\n lineStart: startLine,\n lineEnd: index,\n });\n continue;\n }\n\n // 2. Track Code Blocks (Headers inside code blocks are ignored)\n if (isFencedCodeDelimiter(currentLine)) {\n insideCodeBlock = !insideCodeBlock;\n }\n\n const isHeader = !insideCodeBlock && isHeading(currentLine);\n\n // 3. Split on Headers\n if (isHeader) {\n // If we have accumulated content, flush it as the previous block\n if (currentSectionLines.length > 0) {\n flushCurrentSection(index);\n }\n // Start a new section with this header\n currentSectionStartLine = index + 1;\n currentSectionLines = [currentLine];\n } else {\n // Accumulate content\n if (currentSectionLines.length === 0 && !isBlankLine(currentLine)) {\n currentSectionStartLine = index + 1;\n }\n currentSectionLines.push(currentLine);\n }\n\n index++;\n }\n\n // Flush remaining content\n flushCurrentSection(index);\n\n return blocks;\n};\n"],"mappings":";;AAEA,MAAM,eAAe,SAA0B,KAAK,MAAM,CAAC,WAAW;AACtE,MAAM,yBAAyB,SAA0B,UAAU,KAAK,KAAK;AAC7E,MAAM,aAAa,SAA0B,gBAAgB,KAAK,KAAK;AACvE,MAAM,0BAA0B,SAC9B,cAAc,KAAK,KAAK;AAC1B,MAAM,wBAAwB,SAC5B,KAAK,QAAQ,SAAS,KAAK;AAE7B,MAAa,mBAAmB,SAA0B;CACxD,MAAM,QAAQ,KAAK,MAAM,KAAK;CAC9B,MAAM,SAAkB,EAAE;CAE1B,IAAI,QAAQ;CACZ,IAAI,kBAAkB;CAGtB,IAAI,sBAAgC,EAAE;CACtC,IAAI,0BAA0B;CAE9B,MAAM,uBAAuB,aAAqB;AAChD,MAAI,oBAAoB,SAAS,GAAG;GAGlC,MAAM,aAAa,oBAAoB,KAAK,KAAK;AAEjD,OAAI,WAAW,MAAM,CAAC,SAAS,EAC7B,QAAO,KAAK;IACV,MAAM;IACN,SAAS,GAAG,qBAAqB,WAAW,CAAC;IAC7C,WAAW;IACX,SAAS;IACV,CAAC;AAEJ,yBAAsB,EAAE;;;AAI5B,QAAO,QAAQ,MAAM,QAAQ;EAC3B,MAAM,cAAc,MAAM;AAG1B,MAAI,OAAO,WAAW,KAAK,uBAAuB,YAAY,EAAE;GAC9D,MAAM,YAAY,QAAQ;GAC1B,MAAM,eAAyB,CAAC,YAAY;AAC5C;AAEA,UAAO,QAAQ,MAAM,UAAU,CAAC,uBAAuB,MAAM,OAAO,EAAE;AACpE,iBAAa,KAAK,MAAM,OAAO;AAC/B;;AAGF,OAAI,QAAQ,MAAM,UAAU,uBAAuB,MAAM,OAAO,EAAE;AAChE,iBAAa,KAAK,MAAM,OAAO;AAC/B;;AAGF,UAAO,KAAK;IACV,MAAM;IACN,SAAS,GAAG,qBAAqB,aAAa,KAAK,KAAK,CAAC,CAAC;IAC1D,WAAW;IACX,SAAS;IACV,CAAC;AACF;;AAIF,MAAI,sBAAsB,YAAY,CACpC,mBAAkB,CAAC;AAMrB,MAHiB,CAAC,mBAAmB,UAAU,YAAY,EAG7C;AAEZ,OAAI,oBAAoB,SAAS,EAC/B,qBAAoB,MAAM;AAG5B,6BAA0B,QAAQ;AAClC,yBAAsB,CAAC,YAAY;SAC9B;AAEL,OAAI,oBAAoB,WAAW,KAAK,CAAC,YAAY,YAAY,CAC/D,2BAA0B,QAAQ;AAEpC,uBAAoB,KAAK,YAAY;;AAGvC;;AAIF,qBAAoB,MAAM;AAE1B,QAAO"}
@@ -1 +1 @@
1
- {"version":3,"file":"calculateChunks.cjs","names":["lines: ChunkLineResult[]","splittedLines: ChunkLineResult[]","chunk: ChunkLineResult[]","overlapChunks: ChunkLineResult[]"],"sources":["../../../src/utils/calculateChunks.ts"],"sourcesContent":["import { splitTextByLines } from '@intlayer/chokidar';\n\nexport type ChunkLineResult = {\n lineStart: number;\n lineLength: number;\n charStart: number;\n charLength: number;\n content: string;\n};\n\nconst DEFAULT_MAX_CHARS_PER_CHUNK = 800;\nconst DEFAULT_OVERLAP_CHARS = 0;\n\nexport const chunkText = (\n text: string,\n maxCharsPerChunk: number = DEFAULT_MAX_CHARS_PER_CHUNK,\n overlapChars: number = DEFAULT_OVERLAP_CHARS\n): ChunkLineResult[] => {\n if (maxCharsPerChunk <= 0) {\n throw new Error('maxCharsPerChunk must be greater than 0');\n }\n\n const splittedText = splitTextByLines(text);\n\n // Split text into lines to facilitate the translation\n const lines: ChunkLineResult[] = [];\n let charStartAcc = 0;\n\n splittedText.forEach((line, index) => {\n lines.push({\n content: line,\n lineStart: index,\n lineLength: 1,\n charStart: charStartAcc,\n charLength: line.length,\n });\n charStartAcc += line.length;\n });\n\n // Group lines\n // as long as the chunk length is less than maxCharsPerChunk\n // if a line longer than maxCharsPerChunk, keep it alone\n // if a line is not longer than maxCharsPerChunk, it is grouped\n const groupedLines: ChunkLineResult[] = lines.reduce(\n (acc: ChunkLineResult[], line) => {\n // If this line alone exceeds maxCharsPerChunk, keep it separate\n if (line.content.length > maxCharsPerChunk) {\n acc.push(line);\n return acc;\n }\n\n // If we have no chunks yet, start with this line\n if (acc.length === 0) {\n acc.push(line);\n return acc;\n }\n\n // Get the last chunk\n const lastChunk = acc[acc.length - 1];\n\n // Calculate what the combined length would be (including newline character)\n const combinedLength = lastChunk.content.length + line.content.length;\n\n // If combining would exceed the limit, start a new chunk\n if (combinedLength > maxCharsPerChunk) {\n acc.push(line);\n return acc;\n }\n\n // Otherwise, combine with the last chunk\n const combinedContent = lastChunk.content + line.content;\n const updatedChunk = {\n content: combinedContent,\n lineStart: lastChunk.lineStart,\n lineLength: lastChunk.lineLength + line.lineLength,\n charStart: lastChunk.charStart,\n charLength: combinedContent.length,\n };\n\n acc[acc.length - 1] = updatedChunk;\n return acc;\n },\n []\n );\n\n // If one line is longer than maxCharsPerChunk, split it into multiple chunks\n const splittedLines: ChunkLineResult[] = groupedLines.flatMap((line) => {\n const chunk: ChunkLineResult[] = [];\n\n if (line.content.length <= maxCharsPerChunk) {\n chunk.push(line);\n return chunk;\n }\n\n for (let i = 0; i < line.content.length; i += maxCharsPerChunk) {\n const slicedContent = line.content.slice(i, i + maxCharsPerChunk);\n chunk.push({\n content: slicedContent,\n lineStart: line.lineStart,\n lineLength: 1,\n charStart: line.charStart + i,\n charLength: slicedContent.length,\n });\n }\n return chunk;\n });\n\n if (overlapChars === 0) return splittedLines;\n\n const overlapChunks: ChunkLineResult[] =\n splittedLines.length > 0 ? [splittedLines[0]] : [];\n\n for (let i = 1; i < splittedLines.length; i++) {\n const previousChunk = splittedLines[i - 1];\n const chunk = splittedLines[i];\n\n const overlapContent = previousChunk.content.slice(-overlapChars);\n const overlapLineNb = splitTextByLines(overlapContent).length;\n\n const overlapContentWithoutPartialLine = overlapContent.slice(\n overlapLineNb > 1 ? overlapContent.indexOf('\\n') + 1 : 0,\n overlapContent.length\n );\n\n const newContent = overlapContentWithoutPartialLine + chunk.content;\n const newLineLength = splitTextByLines(newContent).length;\n const lineDiff = chunk.lineLength - newLineLength;\n\n const overlappedChunk = {\n content: newContent,\n lineStart: chunk.lineStart + lineDiff,\n lineLength: chunk.lineLength - lineDiff,\n charStart: chunk.charStart - overlapContentWithoutPartialLine.length,\n charLength: newContent.length,\n };\n\n overlapChunks.push(overlappedChunk);\n }\n\n return overlapChunks;\n};\n"],"mappings":";;;;AAUA,MAAM,8BAA8B;AACpC,MAAM,wBAAwB;AAE9B,MAAa,aACX,MACA,mBAA2B,6BAC3B,eAAuB,0BACD;AACtB,KAAI,oBAAoB,EACtB,OAAM,IAAI,MAAM,0CAA0C;CAG5D,MAAM,wDAAgC,KAAK;CAG3C,MAAMA,QAA2B,EAAE;CACnC,IAAI,eAAe;AAEnB,cAAa,SAAS,MAAM,UAAU;AACpC,QAAM,KAAK;GACT,SAAS;GACT,WAAW;GACX,YAAY;GACZ,WAAW;GACX,YAAY,KAAK;GAClB,CAAC;AACF,kBAAgB,KAAK;GACrB;CAiDF,MAAMC,gBA3CkC,MAAM,QAC3C,KAAwB,SAAS;AAEhC,MAAI,KAAK,QAAQ,SAAS,kBAAkB;AAC1C,OAAI,KAAK,KAAK;AACd,UAAO;;AAIT,MAAI,IAAI,WAAW,GAAG;AACpB,OAAI,KAAK,KAAK;AACd,UAAO;;EAIT,MAAM,YAAY,IAAI,IAAI,SAAS;AAMnC,MAHuB,UAAU,QAAQ,SAAS,KAAK,QAAQ,SAG1C,kBAAkB;AACrC,OAAI,KAAK,KAAK;AACd,UAAO;;EAIT,MAAM,kBAAkB,UAAU,UAAU,KAAK;EACjD,MAAM,eAAe;GACnB,SAAS;GACT,WAAW,UAAU;GACrB,YAAY,UAAU,aAAa,KAAK;GACxC,WAAW,UAAU;GACrB,YAAY,gBAAgB;GAC7B;AAED,MAAI,IAAI,SAAS,KAAK;AACtB,SAAO;IAET,EAAE,CACH,CAGqD,SAAS,SAAS;EACtE,MAAMC,QAA2B,EAAE;AAEnC,MAAI,KAAK,QAAQ,UAAU,kBAAkB;AAC3C,SAAM,KAAK,KAAK;AAChB,UAAO;;AAGT,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,QAAQ,KAAK,kBAAkB;GAC9D,MAAM,gBAAgB,KAAK,QAAQ,MAAM,GAAG,IAAI,iBAAiB;AACjE,SAAM,KAAK;IACT,SAAS;IACT,WAAW,KAAK;IAChB,YAAY;IACZ,WAAW,KAAK,YAAY;IAC5B,YAAY,cAAc;IAC3B,CAAC;;AAEJ,SAAO;GACP;AAEF,KAAI,iBAAiB,EAAG,QAAO;CAE/B,MAAMC,gBACJ,cAAc,SAAS,IAAI,CAAC,cAAc,GAAG,GAAG,EAAE;AAEpD,MAAK,IAAI,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;EAC7C,MAAM,gBAAgB,cAAc,IAAI;EACxC,MAAM,QAAQ,cAAc;EAE5B,MAAM,iBAAiB,cAAc,QAAQ,MAAM,CAAC,aAAa;EACjE,MAAM,yDAAiC,eAAe,CAAC;EAEvD,MAAM,mCAAmC,eAAe,MACtD,gBAAgB,IAAI,eAAe,QAAQ,KAAK,GAAG,IAAI,GACvD,eAAe,OAChB;EAED,MAAM,aAAa,mCAAmC,MAAM;EAC5D,MAAM,yDAAiC,WAAW,CAAC;EACnD,MAAM,WAAW,MAAM,aAAa;EAEpC,MAAM,kBAAkB;GACtB,SAAS;GACT,WAAW,MAAM,YAAY;GAC7B,YAAY,MAAM,aAAa;GAC/B,WAAW,MAAM,YAAY,iCAAiC;GAC9D,YAAY,WAAW;GACxB;AAED,gBAAc,KAAK,gBAAgB;;AAGrC,QAAO"}
1
+ {"version":3,"file":"calculateChunks.cjs","names":[],"sources":["../../../src/utils/calculateChunks.ts"],"sourcesContent":["import { splitTextByLines } from '@intlayer/chokidar';\n\nexport type ChunkLineResult = {\n lineStart: number;\n lineLength: number;\n charStart: number;\n charLength: number;\n content: string;\n};\n\nconst DEFAULT_MAX_CHARS_PER_CHUNK = 800;\nconst DEFAULT_OVERLAP_CHARS = 0;\n\nexport const chunkText = (\n text: string,\n maxCharsPerChunk: number = DEFAULT_MAX_CHARS_PER_CHUNK,\n overlapChars: number = DEFAULT_OVERLAP_CHARS\n): ChunkLineResult[] => {\n if (maxCharsPerChunk <= 0) {\n throw new Error('maxCharsPerChunk must be greater than 0');\n }\n\n const splittedText = splitTextByLines(text);\n\n // Split text into lines to facilitate the translation\n const lines: ChunkLineResult[] = [];\n let charStartAcc = 0;\n\n splittedText.forEach((line, index) => {\n lines.push({\n content: line,\n lineStart: index,\n lineLength: 1,\n charStart: charStartAcc,\n charLength: line.length,\n });\n charStartAcc += line.length;\n });\n\n // Group lines\n // as long as the chunk length is less than maxCharsPerChunk\n // if a line longer than maxCharsPerChunk, keep it alone\n // if a line is not longer than maxCharsPerChunk, it is grouped\n const groupedLines: ChunkLineResult[] = lines.reduce(\n (acc: ChunkLineResult[], line) => {\n // If this line alone exceeds maxCharsPerChunk, keep it separate\n if (line.content.length > maxCharsPerChunk) {\n acc.push(line);\n return acc;\n }\n\n // If we have no chunks yet, start with this line\n if (acc.length === 0) {\n acc.push(line);\n return acc;\n }\n\n // Get the last chunk\n const lastChunk = acc[acc.length - 1];\n\n // Calculate what the combined length would be (including newline character)\n const combinedLength = lastChunk.content.length + line.content.length;\n\n // If combining would exceed the limit, start a new chunk\n if (combinedLength > maxCharsPerChunk) {\n acc.push(line);\n return acc;\n }\n\n // Otherwise, combine with the last chunk\n const combinedContent = lastChunk.content + line.content;\n const updatedChunk = {\n content: combinedContent,\n lineStart: lastChunk.lineStart,\n lineLength: lastChunk.lineLength + line.lineLength,\n charStart: lastChunk.charStart,\n charLength: combinedContent.length,\n };\n\n acc[acc.length - 1] = updatedChunk;\n return acc;\n },\n []\n );\n\n // If one line is longer than maxCharsPerChunk, split it into multiple chunks\n const splittedLines: ChunkLineResult[] = groupedLines.flatMap((line) => {\n const chunk: ChunkLineResult[] = [];\n\n if (line.content.length <= maxCharsPerChunk) {\n chunk.push(line);\n return chunk;\n }\n\n for (let i = 0; i < line.content.length; i += maxCharsPerChunk) {\n const slicedContent = line.content.slice(i, i + maxCharsPerChunk);\n chunk.push({\n content: slicedContent,\n lineStart: line.lineStart,\n lineLength: 1,\n charStart: line.charStart + i,\n charLength: slicedContent.length,\n });\n }\n return chunk;\n });\n\n if (overlapChars === 0) return splittedLines;\n\n const overlapChunks: ChunkLineResult[] =\n splittedLines.length > 0 ? [splittedLines[0]] : [];\n\n for (let i = 1; i < splittedLines.length; i++) {\n const previousChunk = splittedLines[i - 1];\n const chunk = splittedLines[i];\n\n const overlapContent = previousChunk.content.slice(-overlapChars);\n const overlapLineNb = splitTextByLines(overlapContent).length;\n\n const overlapContentWithoutPartialLine = overlapContent.slice(\n overlapLineNb > 1 ? overlapContent.indexOf('\\n') + 1 : 0,\n overlapContent.length\n );\n\n const newContent = overlapContentWithoutPartialLine + chunk.content;\n const newLineLength = splitTextByLines(newContent).length;\n const lineDiff = chunk.lineLength - newLineLength;\n\n const overlappedChunk = {\n content: newContent,\n lineStart: chunk.lineStart + lineDiff,\n lineLength: chunk.lineLength - lineDiff,\n charStart: chunk.charStart - overlapContentWithoutPartialLine.length,\n charLength: newContent.length,\n };\n\n overlapChunks.push(overlappedChunk);\n }\n\n return overlapChunks;\n};\n"],"mappings":";;;;AAUA,MAAM,8BAA8B;AACpC,MAAM,wBAAwB;AAE9B,MAAa,aACX,MACA,mBAA2B,6BAC3B,eAAuB,0BACD;AACtB,KAAI,oBAAoB,EACtB,OAAM,IAAI,MAAM,0CAA0C;CAG5D,MAAM,wDAAgC,KAAK;CAG3C,MAAM,QAA2B,EAAE;CACnC,IAAI,eAAe;AAEnB,cAAa,SAAS,MAAM,UAAU;AACpC,QAAM,KAAK;GACT,SAAS;GACT,WAAW;GACX,YAAY;GACZ,WAAW;GACX,YAAY,KAAK;GAClB,CAAC;AACF,kBAAgB,KAAK;GACrB;CAiDF,MAAM,gBA3CkC,MAAM,QAC3C,KAAwB,SAAS;AAEhC,MAAI,KAAK,QAAQ,SAAS,kBAAkB;AAC1C,OAAI,KAAK,KAAK;AACd,UAAO;;AAIT,MAAI,IAAI,WAAW,GAAG;AACpB,OAAI,KAAK,KAAK;AACd,UAAO;;EAIT,MAAM,YAAY,IAAI,IAAI,SAAS;AAMnC,MAHuB,UAAU,QAAQ,SAAS,KAAK,QAAQ,SAG1C,kBAAkB;AACrC,OAAI,KAAK,KAAK;AACd,UAAO;;EAIT,MAAM,kBAAkB,UAAU,UAAU,KAAK;EACjD,MAAM,eAAe;GACnB,SAAS;GACT,WAAW,UAAU;GACrB,YAAY,UAAU,aAAa,KAAK;GACxC,WAAW,UAAU;GACrB,YAAY,gBAAgB;GAC7B;AAED,MAAI,IAAI,SAAS,KAAK;AACtB,SAAO;IAET,EAAE,CACH,CAGqD,SAAS,SAAS;EACtE,MAAM,QAA2B,EAAE;AAEnC,MAAI,KAAK,QAAQ,UAAU,kBAAkB;AAC3C,SAAM,KAAK,KAAK;AAChB,UAAO;;AAGT,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,QAAQ,KAAK,kBAAkB;GAC9D,MAAM,gBAAgB,KAAK,QAAQ,MAAM,GAAG,IAAI,iBAAiB;AACjE,SAAM,KAAK;IACT,SAAS;IACT,WAAW,KAAK;IAChB,YAAY;IACZ,WAAW,KAAK,YAAY;IAC5B,YAAY,cAAc;IAC3B,CAAC;;AAEJ,SAAO;GACP;AAEF,KAAI,iBAAiB,EAAG,QAAO;CAE/B,MAAM,gBACJ,cAAc,SAAS,IAAI,CAAC,cAAc,GAAG,GAAG,EAAE;AAEpD,MAAK,IAAI,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;EAC7C,MAAM,gBAAgB,cAAc,IAAI;EACxC,MAAM,QAAQ,cAAc;EAE5B,MAAM,iBAAiB,cAAc,QAAQ,MAAM,CAAC,aAAa;EACjE,MAAM,yDAAiC,eAAe,CAAC;EAEvD,MAAM,mCAAmC,eAAe,MACtD,gBAAgB,IAAI,eAAe,QAAQ,KAAK,GAAG,IAAI,GACvD,eAAe,OAChB;EAED,MAAM,aAAa,mCAAmC,MAAM;EAC5D,MAAM,yDAAiC,WAAW,CAAC;EACnD,MAAM,WAAW,MAAM,aAAa;EAEpC,MAAM,kBAAkB;GACtB,SAAS;GACT,WAAW,MAAM,YAAY;GAC7B,YAAY,MAAM,aAAa;GAC/B,WAAW,MAAM,YAAY,iCAAiC;GAC9D,YAAY,WAAW;GACxB;AAED,gBAAc,KAAK,gBAAgB;;AAGrC,QAAO"}
@@ -1 +1 @@
1
- {"version":3,"file":"chunkInference.cjs","names":["lastResult: ChunkInferenceResult","response","fileContent"],"sources":["../../../src/utils/chunkInference.ts"],"sourcesContent":["import type { AIConfig, AIOptions } from '@intlayer/ai';\nimport { getIntlayerAPIProxy, type Messages } from '@intlayer/api';\nimport { retryManager } from '@intlayer/config';\nimport type { IntlayerConfig } from '@intlayer/types';\nimport type { AIClient } from './setupAI';\n\ntype ChunkInferenceResult = {\n fileContent: string;\n tokenUsed: number;\n};\n\n/**\n * Translates a single chunk via the OpenAI API.\n * Includes retry logic if the call fails.\n */\nexport const chunkInference = async (\n messages: Messages,\n aiOptions?: AIOptions,\n configuration?: IntlayerConfig,\n aiClient?: AIClient,\n aiConfig?: AIConfig\n): Promise<ChunkInferenceResult> => {\n let lastResult: ChunkInferenceResult;\n\n await retryManager(async () => {\n if (aiClient && aiConfig) {\n const response = await aiClient.customQuery({\n aiConfig,\n messages,\n });\n\n if (!response) {\n throw new Error('No response from AI API');\n }\n\n const { fileContent, tokenUsed } = response;\n\n lastResult = {\n fileContent: processContent(fileContent),\n tokenUsed,\n };\n\n return;\n }\n\n const api = getIntlayerAPIProxy(undefined, configuration);\n\n const response = await api.ai.customQuery({\n aiOptions,\n messages,\n });\n\n if (!response.data) {\n throw new Error('No response from AI API');\n }\n\n const { fileContent, tokenUsed } = response.data;\n\n lastResult = {\n fileContent: processContent(fileContent),\n tokenUsed,\n };\n })();\n\n return lastResult!;\n};\n\nconst processContent = (content: string) => {\n return content\n .replaceAll('///chunksStart///', '')\n .replaceAll('///chunkStart///', '')\n .replaceAll('///chunksEnd///', '')\n .replaceAll('///chunkEnd///', '')\n .replaceAll('///chunksStart///', '')\n .replaceAll('chunkStart///', '')\n .replaceAll('chunksEnd///', '')\n .replaceAll('chunkEnd///', '')\n .replaceAll('///chunksStart', '')\n .replaceAll('///chunkStart', '')\n .replaceAll('///chunksEnd', '')\n .replaceAll('///chunkEnd', '')\n .replaceAll('chunksStart', '')\n .replaceAll('chunkStart', '')\n .replaceAll('chunksEnd', '')\n .replaceAll('chunkEnd', '');\n};\n"],"mappings":";;;;;;;;;AAeA,MAAa,iBAAiB,OAC5B,UACA,WACA,eACA,UACA,aACkC;CAClC,IAAIA;AAEJ,0CAAmB,YAAY;AAC7B,MAAI,YAAY,UAAU;GACxB,MAAMC,aAAW,MAAM,SAAS,YAAY;IAC1C;IACA;IACD,CAAC;AAEF,OAAI,CAACA,WACH,OAAM,IAAI,MAAM,0BAA0B;GAG5C,MAAM,EAAE,4BAAa,2BAAcA;AAEnC,gBAAa;IACX,aAAa,eAAeC,cAAY;IACxC;IACD;AAED;;EAKF,MAAM,WAAW,6CAFe,QAAW,cAAc,CAE9B,GAAG,YAAY;GACxC;GACA;GACD,CAAC;AAEF,MAAI,CAAC,SAAS,KACZ,OAAM,IAAI,MAAM,0BAA0B;EAG5C,MAAM,EAAE,aAAa,cAAc,SAAS;AAE5C,eAAa;GACX,aAAa,eAAe,YAAY;GACxC;GACD;GACD,EAAE;AAEJ,QAAO;;AAGT,MAAM,kBAAkB,YAAoB;AAC1C,QAAO,QACJ,WAAW,qBAAqB,GAAG,CACnC,WAAW,oBAAoB,GAAG,CAClC,WAAW,mBAAmB,GAAG,CACjC,WAAW,kBAAkB,GAAG,CAChC,WAAW,qBAAqB,GAAG,CACnC,WAAW,iBAAiB,GAAG,CAC/B,WAAW,gBAAgB,GAAG,CAC9B,WAAW,eAAe,GAAG,CAC7B,WAAW,kBAAkB,GAAG,CAChC,WAAW,iBAAiB,GAAG,CAC/B,WAAW,gBAAgB,GAAG,CAC9B,WAAW,eAAe,GAAG,CAC7B,WAAW,eAAe,GAAG,CAC7B,WAAW,cAAc,GAAG,CAC5B,WAAW,aAAa,GAAG,CAC3B,WAAW,YAAY,GAAG"}
1
+ {"version":3,"file":"chunkInference.cjs","names":["response","fileContent"],"sources":["../../../src/utils/chunkInference.ts"],"sourcesContent":["import type { AIConfig, AIOptions } from '@intlayer/ai';\nimport { getIntlayerAPIProxy, type Messages } from '@intlayer/api';\nimport { retryManager } from '@intlayer/config';\nimport type { IntlayerConfig } from '@intlayer/types';\nimport type { AIClient } from './setupAI';\n\ntype ChunkInferenceResult = {\n fileContent: string;\n tokenUsed: number;\n};\n\n/**\n * Translates a single chunk via the OpenAI API.\n * Includes retry logic if the call fails.\n */\nexport const chunkInference = async (\n messages: Messages,\n aiOptions?: AIOptions,\n configuration?: IntlayerConfig,\n aiClient?: AIClient,\n aiConfig?: AIConfig\n): Promise<ChunkInferenceResult> => {\n let lastResult: ChunkInferenceResult;\n\n await retryManager(async () => {\n if (aiClient && aiConfig) {\n const response = await aiClient.customQuery({\n aiConfig,\n messages,\n });\n\n if (!response) {\n throw new Error('No response from AI API');\n }\n\n const { fileContent, tokenUsed } = response;\n\n lastResult = {\n fileContent: processContent(fileContent),\n tokenUsed,\n };\n\n return;\n }\n\n const api = getIntlayerAPIProxy(undefined, configuration);\n\n const response = await api.ai.customQuery({\n aiOptions,\n messages,\n });\n\n if (!response.data) {\n throw new Error('No response from AI API');\n }\n\n const { fileContent, tokenUsed } = response.data;\n\n lastResult = {\n fileContent: processContent(fileContent),\n tokenUsed,\n };\n })();\n\n return lastResult!;\n};\n\nconst processContent = (content: string) => {\n return content\n .replaceAll('///chunksStart///', '')\n .replaceAll('///chunkStart///', '')\n .replaceAll('///chunksEnd///', '')\n .replaceAll('///chunkEnd///', '')\n .replaceAll('///chunksStart///', '')\n .replaceAll('chunkStart///', '')\n .replaceAll('chunksEnd///', '')\n .replaceAll('chunkEnd///', '')\n .replaceAll('///chunksStart', '')\n .replaceAll('///chunkStart', '')\n .replaceAll('///chunksEnd', '')\n .replaceAll('///chunkEnd', '')\n .replaceAll('chunksStart', '')\n .replaceAll('chunkStart', '')\n .replaceAll('chunksEnd', '')\n .replaceAll('chunkEnd', '');\n};\n"],"mappings":";;;;;;;;;AAeA,MAAa,iBAAiB,OAC5B,UACA,WACA,eACA,UACA,aACkC;CAClC,IAAI;AAEJ,0CAAmB,YAAY;AAC7B,MAAI,YAAY,UAAU;GACxB,MAAMA,aAAW,MAAM,SAAS,YAAY;IAC1C;IACA;IACD,CAAC;AAEF,OAAI,CAACA,WACH,OAAM,IAAI,MAAM,0BAA0B;GAG5C,MAAM,EAAE,4BAAa,2BAAcA;AAEnC,gBAAa;IACX,aAAa,eAAeC,cAAY;IACxC;IACD;AAED;;EAKF,MAAM,WAAW,6CAFe,QAAW,cAAc,CAE9B,GAAG,YAAY;GACxC;GACA;GACD,CAAC;AAEF,MAAI,CAAC,SAAS,KACZ,OAAM,IAAI,MAAM,0BAA0B;EAG5C,MAAM,EAAE,aAAa,cAAc,SAAS;AAE5C,eAAa;GACX,aAAa,eAAe,YAAY;GACxC;GACD;GACD,EAAE;AAEJ,QAAO;;AAGT,MAAM,kBAAkB,YAAoB;AAC1C,QAAO,QACJ,WAAW,qBAAqB,GAAG,CACnC,WAAW,oBAAoB,GAAG,CAClC,WAAW,mBAAmB,GAAG,CACjC,WAAW,kBAAkB,GAAG,CAChC,WAAW,qBAAqB,GAAG,CACnC,WAAW,iBAAiB,GAAG,CAC/B,WAAW,gBAAgB,GAAG,CAC9B,WAAW,eAAe,GAAG,CAC7B,WAAW,kBAAkB,GAAG,CAChC,WAAW,iBAAiB,GAAG,CAC/B,WAAW,gBAAgB,GAAG,CAC9B,WAAW,eAAe,GAAG,CAC7B,WAAW,eAAe,GAAG,CAC7B,WAAW,cAAc,GAAG,CAC5B,WAAW,aAAa,GAAG,CAC3B,WAAW,YAAY,GAAG"}
@@ -1 +1 @@
1
- {"version":3,"file":"getIsFileUpdatedRecently.cjs","names":["SKIP_RANGE_OF_LAST_UPDATE_TIME: number"],"sources":["../../../src/utils/getIsFileUpdatedRecently.ts"],"sourcesContent":["import { statSync } from 'node:fs';\n\nconst SKIP_RANGE_OF_LAST_UPDATE_TIME: number = 0; //2 * 60 * 60 * 1000; // 2 hours\n\n/**\n * Check if file was updated recently, to skip re-translation\n */\nexport const getIsFileUpdatedRecently = (localeFilePath: string): boolean => {\n const stats = statSync(localeFilePath);\n const lastModified = new Date(stats.mtime);\n const threshold = new Date(Date.now() - SKIP_RANGE_OF_LAST_UPDATE_TIME);\n\n return lastModified > threshold;\n};\n"],"mappings":";;;;AAEA,MAAMA,iCAAyC;;;;AAK/C,MAAa,4BAA4B,mBAAoC;CAC3E,MAAM,8BAAiB,eAAe;AAItC,QAHqB,IAAI,KAAK,MAAM,MAAM,GACxB,IAAI,KAAK,KAAK,KAAK,GAAG,+BAA+B"}
1
+ {"version":3,"file":"getIsFileUpdatedRecently.cjs","names":[],"sources":["../../../src/utils/getIsFileUpdatedRecently.ts"],"sourcesContent":["import { statSync } from 'node:fs';\n\nconst SKIP_RANGE_OF_LAST_UPDATE_TIME: number = 0; //2 * 60 * 60 * 1000; // 2 hours\n\n/**\n * Check if file was updated recently, to skip re-translation\n */\nexport const getIsFileUpdatedRecently = (localeFilePath: string): boolean => {\n const stats = statSync(localeFilePath);\n const lastModified = new Date(stats.mtime);\n const threshold = new Date(Date.now() - SKIP_RANGE_OF_LAST_UPDATE_TIME);\n\n return lastModified > threshold;\n};\n"],"mappings":";;;;AAEA,MAAM,iCAAyC;;;;AAK/C,MAAa,4BAA4B,mBAAoC;CAC3E,MAAM,8BAAiB,eAAe;AAItC,QAHqB,IAAI,KAAK,MAAM,MAAM,GACxB,IAAI,KAAK,KAAK,KAAK,GAAG,+BAA+B"}
@@ -1 +1 @@
1
- {"version":3,"file":"listSpecialChars.cjs","names":["results: ListCharResult"],"sources":["../../../src/utils/listSpecialChars.ts"],"sourcesContent":["type ListCharResult = {\n char: string;\n /** First line index contained in this chunk (0-based) */\n lineStart: number;\n /** Start character index in the original text (0-based, inclusive)*/\n charStart: number;\n}[];\n\nconst SPECIAL_CHARS = [\n ' ',\n '\\\\',\n '|',\n '(',\n ')',\n '{',\n '}',\n '[',\n ']',\n '<',\n '>',\n '\"',\n '=',\n '+',\n '*',\n '&',\n '#',\n '%',\n '$',\n '!',\n '?',\n ':',\n ';',\n '~',\n];\n\nexport const listSpecialChars = (text: string): ListCharResult => {\n const results: ListCharResult = [];\n\n let lineIndex = 0;\n\n for (let i = 0; i < text.length; i++) {\n const currentChar = text[i];\n\n // Handle newline characters (\"\\n\"): treat them as a \"\\\\\" special char\n if (currentChar === '\\n') {\n results.push({\n char: '\\\\',\n lineStart: lineIndex,\n charStart: i,\n });\n\n // Move to the next line after recording the special char\n lineIndex++;\n continue;\n }\n\n // Check if the current character is one of the special characters\n if (SPECIAL_CHARS.includes(currentChar)) {\n results.push({\n char: currentChar,\n lineStart: lineIndex,\n charStart: i,\n });\n }\n }\n\n return results;\n};\n"],"mappings":";;AAQA,MAAM,gBAAgB;CACpB;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAED,MAAa,oBAAoB,SAAiC;CAChE,MAAMA,UAA0B,EAAE;CAElC,IAAI,YAAY;AAEhB,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;EACpC,MAAM,cAAc,KAAK;AAGzB,MAAI,gBAAgB,MAAM;AACxB,WAAQ,KAAK;IACX,MAAM;IACN,WAAW;IACX,WAAW;IACZ,CAAC;AAGF;AACA;;AAIF,MAAI,cAAc,SAAS,YAAY,CACrC,SAAQ,KAAK;GACX,MAAM;GACN,WAAW;GACX,WAAW;GACZ,CAAC;;AAIN,QAAO"}
1
+ {"version":3,"file":"listSpecialChars.cjs","names":[],"sources":["../../../src/utils/listSpecialChars.ts"],"sourcesContent":["type ListCharResult = {\n char: string;\n /** First line index contained in this chunk (0-based) */\n lineStart: number;\n /** Start character index in the original text (0-based, inclusive)*/\n charStart: number;\n}[];\n\nconst SPECIAL_CHARS = [\n ' ',\n '\\\\',\n '|',\n '(',\n ')',\n '{',\n '}',\n '[',\n ']',\n '<',\n '>',\n '\"',\n '=',\n '+',\n '*',\n '&',\n '#',\n '%',\n '$',\n '!',\n '?',\n ':',\n ';',\n '~',\n];\n\nexport const listSpecialChars = (text: string): ListCharResult => {\n const results: ListCharResult = [];\n\n let lineIndex = 0;\n\n for (let i = 0; i < text.length; i++) {\n const currentChar = text[i];\n\n // Handle newline characters (\"\\n\"): treat them as a \"\\\\\" special char\n if (currentChar === '\\n') {\n results.push({\n char: '\\\\',\n lineStart: lineIndex,\n charStart: i,\n });\n\n // Move to the next line after recording the special char\n lineIndex++;\n continue;\n }\n\n // Check if the current character is one of the special characters\n if (SPECIAL_CHARS.includes(currentChar)) {\n results.push({\n char: currentChar,\n lineStart: lineIndex,\n charStart: i,\n });\n }\n }\n\n return results;\n};\n"],"mappings":";;AAQA,MAAM,gBAAgB;CACpB;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAED,MAAa,oBAAoB,SAAiC;CAChE,MAAM,UAA0B,EAAE;CAElC,IAAI,YAAY;AAEhB,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;EACpC,MAAM,cAAc,KAAK;AAGzB,MAAI,gBAAgB,MAAM;AACxB,WAAQ,KAAK;IACX,MAAM;IACN,WAAW;IACX,WAAW;IACZ,CAAC;AAGF;AACA;;AAIF,MAAI,cAAc,SAAS,YAAY,CACrC,SAAQ,KAAK;GACX,MAAM;GACN,WAAW;GACX,WAAW;GACZ,CAAC;;AAIN,QAAO"}
@@ -1 +1 @@
1
- {"version":3,"file":"mapChunksBetweenFiles.cjs","names":["chunkText","updatedChunk: ChunkLineResult","dp: number[][]","i","j","mappedLines: number[]"],"sources":["../../../src/utils/mapChunksBetweenFiles.ts"],"sourcesContent":["import { splitTextByLines } from '@intlayer/chokidar';\nimport { type ChunkLineResult, chunkText } from './calculateChunks';\n\nexport interface ChunkMapping {\n baseChunk: ChunkLineResult;\n updatedChunk: ChunkLineResult | null; // null if the chunk was deleted\n hasChanges: boolean;\n}\n\n/**\n * Maps chunks from base file to corresponding chunks in updated file,\n * handling insertions, deletions, and modifications properly.\n */\nexport const mapChunksBetweenFiles = (\n baseFileContent: string,\n updatedFileContent: string,\n maxCharsPerChunk: number = 800,\n changedLines?: number[]\n): ChunkMapping[] => {\n const baseChunks = chunkText(baseFileContent, maxCharsPerChunk, 0);\n const baseLines = splitTextByLines(baseFileContent);\n const updatedLines = splitTextByLines(updatedFileContent);\n\n // Create a simple line mapping using LCS (Longest Common Subsequence) approach\n const lineMapping = createLineMapping(baseLines, updatedLines);\n\n return baseChunks.map((baseChunk): ChunkMapping => {\n // Map the base chunk's line range to the updated file\n const mappedRange = mapLineRange(\n baseChunk.lineStart,\n baseChunk.lineLength,\n lineMapping\n );\n\n if (!mappedRange) {\n // This chunk was completely deleted\n return {\n baseChunk,\n updatedChunk: null,\n hasChanges: true,\n };\n }\n\n // Create the corresponding chunk in the updated file\n const updatedChunk: ChunkLineResult = {\n lineStart: mappedRange.start,\n lineLength: mappedRange.length,\n charStart: 0, // Will be calculated when needed\n charLength: 0, // Will be calculated when needed\n content: extractLinesFromRange(\n updatedLines,\n mappedRange.start,\n mappedRange.length\n ),\n };\n\n // Calculate character positions\n updatedChunk.charStart = getCharStartForLine(\n updatedFileContent,\n updatedChunk.lineStart\n );\n updatedChunk.charLength = updatedChunk.content.length;\n\n // Determine if this chunk has changes\n const hasChanges = determineIfChunkHasChanges(\n baseChunk,\n updatedChunk,\n changedLines\n );\n\n return {\n baseChunk,\n updatedChunk,\n hasChanges,\n };\n });\n};\n\n/**\n * Creates a mapping between line numbers in base file and updated file\n * Returns a map where key = base line number, value = updated line number (or null if deleted)\n */\nconst createLineMapping = (\n baseLines: string[],\n updatedLines: string[]\n): Map<number, number | null> => {\n const mapping = new Map<number, number | null>();\n\n // Use a simple diff algorithm (similar to Myers algorithm but simplified)\n const dp: number[][] = Array(baseLines.length + 1)\n .fill(null)\n .map(() => Array(updatedLines.length + 1).fill(0));\n\n // Fill the DP table\n for (let i = 1; i <= baseLines.length; i++) {\n for (let j = 1; j <= updatedLines.length; j++) {\n if (baseLines[i - 1] === updatedLines[j - 1]) {\n dp[i][j] = dp[i - 1][j - 1] + 1;\n } else {\n dp[i][j] = Math.max(dp[i - 1][j], dp[i][j - 1]);\n }\n }\n }\n\n // Backtrack to create the mapping\n let i = baseLines.length;\n let j = updatedLines.length;\n\n while (i > 0 || j > 0) {\n if (i > 0 && j > 0 && baseLines[i - 1] === updatedLines[j - 1]) {\n // Lines match\n mapping.set(i - 1, j - 1);\n i--;\n j--;\n } else if (i > 0 && (j === 0 || dp[i - 1][j] >= dp[i][j - 1])) {\n // Line was deleted from base\n mapping.set(i - 1, null);\n i--;\n } else {\n // Line was added to updated (no mapping needed for base)\n j--;\n }\n }\n\n return mapping;\n};\n\n/**\n * Maps a line range from base file to updated file using the line mapping\n */\nconst mapLineRange = (\n baseStart: number,\n baseLength: number,\n lineMapping: Map<number, number | null>\n): { start: number; length: number } | null => {\n const mappedLines: number[] = [];\n\n for (let i = baseStart; i < baseStart + baseLength; i++) {\n const mappedLine = lineMapping.get(i);\n if (mappedLine !== null && mappedLine !== undefined) {\n mappedLines.push(mappedLine);\n }\n }\n\n if (mappedLines.length === 0) {\n return null; // All lines were deleted\n }\n\n // Find the continuous range in the mapped lines\n mappedLines.sort((a, b) => a - b);\n const start = mappedLines[0];\n const end = mappedLines[mappedLines.length - 1];\n\n return {\n start,\n length: end - start + 1,\n };\n};\n\n/**\n * Extracts lines from a range in the lines array\n */\nconst extractLinesFromRange = (\n lines: string[],\n start: number,\n length: number\n): string => {\n const endIndex = Math.min(start + length, lines.length);\n return lines.slice(start, endIndex).join('');\n};\n\n/**\n * Gets the character position where a line starts in the text\n */\nconst getCharStartForLine = (text: string, lineNumber: number): number => {\n const lines = splitTextByLines(text);\n let charStart = 0;\n\n for (let i = 0; i < Math.min(lineNumber, lines.length); i++) {\n charStart += lines[i].length;\n }\n\n return charStart;\n};\n\n/**\n * Determines if a chunk has changes based on git changed lines or content comparison\n */\nconst determineIfChunkHasChanges = (\n baseChunk: ChunkLineResult,\n updatedChunk: ChunkLineResult,\n changedLines?: number[]\n): boolean => {\n // If we have git changed lines, use them for precise detection\n if (changedLines && changedLines.length > 0) {\n return changedLines.some(\n (line) =>\n line >= updatedChunk.lineStart &&\n line < updatedChunk.lineStart + updatedChunk.lineLength\n );\n }\n\n // Fallback to content comparison\n return baseChunk.content !== updatedChunk.content;\n};\n"],"mappings":";;;;;;;;;AAaA,MAAa,yBACX,iBACA,oBACA,mBAA2B,KAC3B,iBACmB;CACnB,MAAM,aAAaA,wCAAU,iBAAiB,kBAAkB,EAAE;CAClE,MAAM,qDAA6B,gBAAgB;CACnD,MAAM,wDAAgC,mBAAmB;CAGzD,MAAM,cAAc,kBAAkB,WAAW,aAAa;AAE9D,QAAO,WAAW,KAAK,cAA4B;EAEjD,MAAM,cAAc,aAClB,UAAU,WACV,UAAU,YACV,YACD;AAED,MAAI,CAAC,YAEH,QAAO;GACL;GACA,cAAc;GACd,YAAY;GACb;EAIH,MAAMC,eAAgC;GACpC,WAAW,YAAY;GACvB,YAAY,YAAY;GACxB,WAAW;GACX,YAAY;GACZ,SAAS,sBACP,cACA,YAAY,OACZ,YAAY,OACb;GACF;AAGD,eAAa,YAAY,oBACvB,oBACA,aAAa,UACd;AACD,eAAa,aAAa,aAAa,QAAQ;AAS/C,SAAO;GACL;GACA;GACA,YATiB,2BACjB,WACA,cACA,aACD;GAMA;GACD;;;;;;AAOJ,MAAM,qBACJ,WACA,iBAC+B;CAC/B,MAAM,0BAAU,IAAI,KAA4B;CAGhD,MAAMC,KAAiB,MAAM,UAAU,SAAS,EAAE,CAC/C,KAAK,KAAK,CACV,UAAU,MAAM,aAAa,SAAS,EAAE,CAAC,KAAK,EAAE,CAAC;AAGpD,MAAK,IAAIC,MAAI,GAAGA,OAAK,UAAU,QAAQ,MACrC,MAAK,IAAIC,MAAI,GAAGA,OAAK,aAAa,QAAQ,MACxC,KAAI,UAAUD,MAAI,OAAO,aAAaC,MAAI,GACxC,IAAGD,KAAGC,OAAK,GAAGD,MAAI,GAAGC,MAAI,KAAK;KAE9B,IAAGD,KAAGC,OAAK,KAAK,IAAI,GAAGD,MAAI,GAAGC,MAAI,GAAGD,KAAGC,MAAI,GAAG;CAMrD,IAAI,IAAI,UAAU;CAClB,IAAI,IAAI,aAAa;AAErB,QAAO,IAAI,KAAK,IAAI,EAClB,KAAI,IAAI,KAAK,IAAI,KAAK,UAAU,IAAI,OAAO,aAAa,IAAI,IAAI;AAE9D,UAAQ,IAAI,IAAI,GAAG,IAAI,EAAE;AACzB;AACA;YACS,IAAI,MAAM,MAAM,KAAK,GAAG,IAAI,GAAG,MAAM,GAAG,GAAG,IAAI,KAAK;AAE7D,UAAQ,IAAI,IAAI,GAAG,KAAK;AACxB;OAGA;AAIJ,QAAO;;;;;AAMT,MAAM,gBACJ,WACA,YACA,gBAC6C;CAC7C,MAAMC,cAAwB,EAAE;AAEhC,MAAK,IAAI,IAAI,WAAW,IAAI,YAAY,YAAY,KAAK;EACvD,MAAM,aAAa,YAAY,IAAI,EAAE;AACrC,MAAI,eAAe,QAAQ,eAAe,OACxC,aAAY,KAAK,WAAW;;AAIhC,KAAI,YAAY,WAAW,EACzB,QAAO;AAIT,aAAY,MAAM,GAAG,MAAM,IAAI,EAAE;CACjC,MAAM,QAAQ,YAAY;AAG1B,QAAO;EACL;EACA,QAJU,YAAY,YAAY,SAAS,KAI7B,QAAQ;EACvB;;;;;AAMH,MAAM,yBACJ,OACA,OACA,WACW;CACX,MAAM,WAAW,KAAK,IAAI,QAAQ,QAAQ,MAAM,OAAO;AACvD,QAAO,MAAM,MAAM,OAAO,SAAS,CAAC,KAAK,GAAG;;;;;AAM9C,MAAM,uBAAuB,MAAc,eAA+B;CACxE,MAAM,iDAAyB,KAAK;CACpC,IAAI,YAAY;AAEhB,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,IAAI,YAAY,MAAM,OAAO,EAAE,IACtD,cAAa,MAAM,GAAG;AAGxB,QAAO;;;;;AAMT,MAAM,8BACJ,WACA,cACA,iBACY;AAEZ,KAAI,gBAAgB,aAAa,SAAS,EACxC,QAAO,aAAa,MACjB,SACC,QAAQ,aAAa,aACrB,OAAO,aAAa,YAAY,aAAa,WAChD;AAIH,QAAO,UAAU,YAAY,aAAa"}
1
+ {"version":3,"file":"mapChunksBetweenFiles.cjs","names":["chunkText","i","j"],"sources":["../../../src/utils/mapChunksBetweenFiles.ts"],"sourcesContent":["import { splitTextByLines } from '@intlayer/chokidar';\nimport { type ChunkLineResult, chunkText } from './calculateChunks';\n\nexport interface ChunkMapping {\n baseChunk: ChunkLineResult;\n updatedChunk: ChunkLineResult | null; // null if the chunk was deleted\n hasChanges: boolean;\n}\n\n/**\n * Maps chunks from base file to corresponding chunks in updated file,\n * handling insertions, deletions, and modifications properly.\n */\nexport const mapChunksBetweenFiles = (\n baseFileContent: string,\n updatedFileContent: string,\n maxCharsPerChunk: number = 800,\n changedLines?: number[]\n): ChunkMapping[] => {\n const baseChunks = chunkText(baseFileContent, maxCharsPerChunk, 0);\n const baseLines = splitTextByLines(baseFileContent);\n const updatedLines = splitTextByLines(updatedFileContent);\n\n // Create a simple line mapping using LCS (Longest Common Subsequence) approach\n const lineMapping = createLineMapping(baseLines, updatedLines);\n\n return baseChunks.map((baseChunk): ChunkMapping => {\n // Map the base chunk's line range to the updated file\n const mappedRange = mapLineRange(\n baseChunk.lineStart,\n baseChunk.lineLength,\n lineMapping\n );\n\n if (!mappedRange) {\n // This chunk was completely deleted\n return {\n baseChunk,\n updatedChunk: null,\n hasChanges: true,\n };\n }\n\n // Create the corresponding chunk in the updated file\n const updatedChunk: ChunkLineResult = {\n lineStart: mappedRange.start,\n lineLength: mappedRange.length,\n charStart: 0, // Will be calculated when needed\n charLength: 0, // Will be calculated when needed\n content: extractLinesFromRange(\n updatedLines,\n mappedRange.start,\n mappedRange.length\n ),\n };\n\n // Calculate character positions\n updatedChunk.charStart = getCharStartForLine(\n updatedFileContent,\n updatedChunk.lineStart\n );\n updatedChunk.charLength = updatedChunk.content.length;\n\n // Determine if this chunk has changes\n const hasChanges = determineIfChunkHasChanges(\n baseChunk,\n updatedChunk,\n changedLines\n );\n\n return {\n baseChunk,\n updatedChunk,\n hasChanges,\n };\n });\n};\n\n/**\n * Creates a mapping between line numbers in base file and updated file\n * Returns a map where key = base line number, value = updated line number (or null if deleted)\n */\nconst createLineMapping = (\n baseLines: string[],\n updatedLines: string[]\n): Map<number, number | null> => {\n const mapping = new Map<number, number | null>();\n\n // Use a simple diff algorithm (similar to Myers algorithm but simplified)\n const dp: number[][] = Array(baseLines.length + 1)\n .fill(null)\n .map(() => Array(updatedLines.length + 1).fill(0));\n\n // Fill the DP table\n for (let i = 1; i <= baseLines.length; i++) {\n for (let j = 1; j <= updatedLines.length; j++) {\n if (baseLines[i - 1] === updatedLines[j - 1]) {\n dp[i][j] = dp[i - 1][j - 1] + 1;\n } else {\n dp[i][j] = Math.max(dp[i - 1][j], dp[i][j - 1]);\n }\n }\n }\n\n // Backtrack to create the mapping\n let i = baseLines.length;\n let j = updatedLines.length;\n\n while (i > 0 || j > 0) {\n if (i > 0 && j > 0 && baseLines[i - 1] === updatedLines[j - 1]) {\n // Lines match\n mapping.set(i - 1, j - 1);\n i--;\n j--;\n } else if (i > 0 && (j === 0 || dp[i - 1][j] >= dp[i][j - 1])) {\n // Line was deleted from base\n mapping.set(i - 1, null);\n i--;\n } else {\n // Line was added to updated (no mapping needed for base)\n j--;\n }\n }\n\n return mapping;\n};\n\n/**\n * Maps a line range from base file to updated file using the line mapping\n */\nconst mapLineRange = (\n baseStart: number,\n baseLength: number,\n lineMapping: Map<number, number | null>\n): { start: number; length: number } | null => {\n const mappedLines: number[] = [];\n\n for (let i = baseStart; i < baseStart + baseLength; i++) {\n const mappedLine = lineMapping.get(i);\n if (mappedLine !== null && mappedLine !== undefined) {\n mappedLines.push(mappedLine);\n }\n }\n\n if (mappedLines.length === 0) {\n return null; // All lines were deleted\n }\n\n // Find the continuous range in the mapped lines\n mappedLines.sort((a, b) => a - b);\n const start = mappedLines[0];\n const end = mappedLines[mappedLines.length - 1];\n\n return {\n start,\n length: end - start + 1,\n };\n};\n\n/**\n * Extracts lines from a range in the lines array\n */\nconst extractLinesFromRange = (\n lines: string[],\n start: number,\n length: number\n): string => {\n const endIndex = Math.min(start + length, lines.length);\n return lines.slice(start, endIndex).join('');\n};\n\n/**\n * Gets the character position where a line starts in the text\n */\nconst getCharStartForLine = (text: string, lineNumber: number): number => {\n const lines = splitTextByLines(text);\n let charStart = 0;\n\n for (let i = 0; i < Math.min(lineNumber, lines.length); i++) {\n charStart += lines[i].length;\n }\n\n return charStart;\n};\n\n/**\n * Determines if a chunk has changes based on git changed lines or content comparison\n */\nconst determineIfChunkHasChanges = (\n baseChunk: ChunkLineResult,\n updatedChunk: ChunkLineResult,\n changedLines?: number[]\n): boolean => {\n // If we have git changed lines, use them for precise detection\n if (changedLines && changedLines.length > 0) {\n return changedLines.some(\n (line) =>\n line >= updatedChunk.lineStart &&\n line < updatedChunk.lineStart + updatedChunk.lineLength\n );\n }\n\n // Fallback to content comparison\n return baseChunk.content !== updatedChunk.content;\n};\n"],"mappings":";;;;;;;;;AAaA,MAAa,yBACX,iBACA,oBACA,mBAA2B,KAC3B,iBACmB;CACnB,MAAM,aAAaA,wCAAU,iBAAiB,kBAAkB,EAAE;CAClE,MAAM,qDAA6B,gBAAgB;CACnD,MAAM,wDAAgC,mBAAmB;CAGzD,MAAM,cAAc,kBAAkB,WAAW,aAAa;AAE9D,QAAO,WAAW,KAAK,cAA4B;EAEjD,MAAM,cAAc,aAClB,UAAU,WACV,UAAU,YACV,YACD;AAED,MAAI,CAAC,YAEH,QAAO;GACL;GACA,cAAc;GACd,YAAY;GACb;EAIH,MAAM,eAAgC;GACpC,WAAW,YAAY;GACvB,YAAY,YAAY;GACxB,WAAW;GACX,YAAY;GACZ,SAAS,sBACP,cACA,YAAY,OACZ,YAAY,OACb;GACF;AAGD,eAAa,YAAY,oBACvB,oBACA,aAAa,UACd;AACD,eAAa,aAAa,aAAa,QAAQ;AAS/C,SAAO;GACL;GACA;GACA,YATiB,2BACjB,WACA,cACA,aACD;GAMA;GACD;;;;;;AAOJ,MAAM,qBACJ,WACA,iBAC+B;CAC/B,MAAM,0BAAU,IAAI,KAA4B;CAGhD,MAAM,KAAiB,MAAM,UAAU,SAAS,EAAE,CAC/C,KAAK,KAAK,CACV,UAAU,MAAM,aAAa,SAAS,EAAE,CAAC,KAAK,EAAE,CAAC;AAGpD,MAAK,IAAIC,MAAI,GAAGA,OAAK,UAAU,QAAQ,MACrC,MAAK,IAAIC,MAAI,GAAGA,OAAK,aAAa,QAAQ,MACxC,KAAI,UAAUD,MAAI,OAAO,aAAaC,MAAI,GACxC,IAAGD,KAAGC,OAAK,GAAGD,MAAI,GAAGC,MAAI,KAAK;KAE9B,IAAGD,KAAGC,OAAK,KAAK,IAAI,GAAGD,MAAI,GAAGC,MAAI,GAAGD,KAAGC,MAAI,GAAG;CAMrD,IAAI,IAAI,UAAU;CAClB,IAAI,IAAI,aAAa;AAErB,QAAO,IAAI,KAAK,IAAI,EAClB,KAAI,IAAI,KAAK,IAAI,KAAK,UAAU,IAAI,OAAO,aAAa,IAAI,IAAI;AAE9D,UAAQ,IAAI,IAAI,GAAG,IAAI,EAAE;AACzB;AACA;YACS,IAAI,MAAM,MAAM,KAAK,GAAG,IAAI,GAAG,MAAM,GAAG,GAAG,IAAI,KAAK;AAE7D,UAAQ,IAAI,IAAI,GAAG,KAAK;AACxB;OAGA;AAIJ,QAAO;;;;;AAMT,MAAM,gBACJ,WACA,YACA,gBAC6C;CAC7C,MAAM,cAAwB,EAAE;AAEhC,MAAK,IAAI,IAAI,WAAW,IAAI,YAAY,YAAY,KAAK;EACvD,MAAM,aAAa,YAAY,IAAI,EAAE;AACrC,MAAI,eAAe,QAAQ,eAAe,OACxC,aAAY,KAAK,WAAW;;AAIhC,KAAI,YAAY,WAAW,EACzB,QAAO;AAIT,aAAY,MAAM,GAAG,MAAM,IAAI,EAAE;CACjC,MAAM,QAAQ,YAAY;AAG1B,QAAO;EACL;EACA,QAJU,YAAY,YAAY,SAAS,KAI7B,QAAQ;EACvB;;;;;AAMH,MAAM,yBACJ,OACA,OACA,WACW;CACX,MAAM,WAAW,KAAK,IAAI,QAAQ,QAAQ,MAAM,OAAO;AACvD,QAAO,MAAM,MAAM,OAAO,SAAS,CAAC,KAAK,GAAG;;;;;AAM9C,MAAM,uBAAuB,MAAc,eAA+B;CACxE,MAAM,iDAAyB,KAAK;CACpC,IAAI,YAAY;AAEhB,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,IAAI,YAAY,MAAM,OAAO,EAAE,IACtD,cAAa,MAAM,GAAG;AAGxB,QAAO;;;;;AAMT,MAAM,8BACJ,WACA,cACA,iBACY;AAEZ,KAAI,gBAAgB,aAAa,SAAS,EACxC,QAAO,aAAa,MACjB,SACC,QAAQ,aAAa,aACrB,OAAO,aAAa,YAAY,aAAa,WAChD;AAIH,QAAO,UAAU,YAAY,aAAa"}
@@ -1 +1 @@
1
- {"version":3,"file":"reorderParagraphs.cjs","names":["paragraphs: string[]","listSpecialChars","firstMatchIndexForBase: number[]","paragraphMatchedBaseIdx: (number | null)[]","insertAfterBaseIdx: number[]","result: string[]","extraParagraphsBuckets: Record<number, number[]>","leadingExtras: string[]"],"sources":["../../../src/utils/reorderParagraphs.ts"],"sourcesContent":["import { listSpecialChars } from './listSpecialChars';\n\n/**\n * Split a text into paragraphs.\n *\n * We consider a paragraph boundary to be a block of at least two consecutive\n * new-lines that is immediately followed by a non-white-space character. This\n * way, internal blank lines that are part of the same paragraph (e.g. a list\n * item that purposely contains a visual break) are preserved while true\n * paragraph breaks – the ones generated when calling `arr.join(\"\\n\\n\")` in\n * the tests – are still detected.\n */\nconst splitByParagraph = (text: string): string[] => {\n const paragraphs: string[] = [];\n\n // Capture the delimiter so that we can inspect how many new-lines it\n // contains. We know that the test strings only use LF, so we keep the\n // regex simple here.\n const tokens = text.split(/(\\n{2,})/);\n\n for (let i = 0; i < tokens.length; i++) {\n const token = tokens[i];\n\n // Even-indexed tokens are the actual paragraph contents.\n if (i % 2 === 0) {\n if (token) paragraphs.push(token);\n continue;\n }\n\n // Odd-indexed tokens are the delimiters (>= two consecutive new-lines).\n // The first and last pairs represent the natural separators that are\n // added when paragraphs are later joined with \"\\n\\n\". Any additional\n // pairs in between correspond to *explicit* blank paragraphs that were\n // present in the original text and must therefore be preserved.\n const pairsOfNewlines = Math.floor(token.length / 2);\n const blankParagraphs = Math.max(0, pairsOfNewlines - 2);\n\n for (let j = 0; j < blankParagraphs; j++) {\n paragraphs.push('\\n\\n');\n }\n }\n\n return paragraphs;\n};\n\n/**\n * Determine whether two paragraphs match – either exactly, or by sharing the\n * same \"special-character signature\".\n */\nconst paragraphMatches = (\n paragraph: string,\n baseParagraph: string,\n paragraphSignature: ReturnType<typeof listSpecialChars>,\n baseSignature: ReturnType<typeof listSpecialChars>\n): boolean => {\n if (paragraph === baseParagraph) return true;\n // fallback to special-character signature comparison\n if (paragraphSignature.length !== baseSignature.length) return false;\n\n for (let i = 0; i < paragraphSignature.length; i++) {\n if (paragraphSignature[i].char !== baseSignature[i].char) {\n return false;\n }\n }\n return true;\n};\n\n/**\n * Re-order `textToReorder` so that its paragraphs follow the ordering found in\n * `baseFileContent`, while preserving any extra paragraphs (those not present\n * in the base file) in a position that is intuitive for a human reader: right\n * after the closest preceding paragraph coming from the base file.\n */\nexport const reorderParagraphs = (\n textToReorder: string,\n baseFileContent: string\n): string => {\n // 1. Split both texts into paragraphs and pre-compute their signatures.\n const baseFileParagraphs = splitByParagraph(baseFileContent);\n const textToReorderParagraphs = splitByParagraph(textToReorder);\n\n const baseSignatures = baseFileParagraphs.map((p) => listSpecialChars(p));\n const textSignatures = textToReorderParagraphs.map((p) =>\n listSpecialChars(p)\n );\n\n // 2. For every paragraph in the text to reorder, find the *first* base\n // paragraph it matches. We only allow each base paragraph to be matched\n // once. Any further identical paragraphs will be treated as \"extra\" and\n // will be positioned later on, next to their closest neighbour.\n const firstMatchIndexForBase: number[] = Array(\n baseFileParagraphs.length\n ).fill(-1);\n const paragraphMatchedBaseIdx: (number | null)[] = Array(\n textToReorderParagraphs.length\n ).fill(null);\n\n for (let i = 0; i < textToReorderParagraphs.length; i++) {\n const paragraph = textToReorderParagraphs[i];\n const sig = textSignatures[i];\n\n // exact match pass first for performance\n let foundIdx = baseFileParagraphs.findIndex(\n (baseParagraph, idx) =>\n firstMatchIndexForBase[idx] === -1 && paragraph === baseParagraph\n );\n\n if (foundIdx === -1) {\n // fallback to the signature comparison\n foundIdx = baseFileParagraphs.findIndex(\n (baseParagraph, idx) =>\n firstMatchIndexForBase[idx] === -1 &&\n paragraphMatches(paragraph, baseParagraph, sig, baseSignatures[idx])\n );\n }\n\n if (foundIdx !== -1) {\n firstMatchIndexForBase[foundIdx] = i;\n paragraphMatchedBaseIdx[i] = foundIdx;\n }\n }\n\n // 3. For the paragraphs that *didn't* get matched to a base paragraph, we\n // record the highest-numbered base paragraph that was matched *before* it\n // in the original text. The extra paragraph will later be placed right\n // after that paragraph in the final ordering.\n const insertAfterBaseIdx: number[] = Array(\n textToReorderParagraphs.length\n ).fill(-1);\n let maxBaseIdxEncountered = -1;\n\n for (let i = 0; i < textToReorderParagraphs.length; i++) {\n const matchedBase = paragraphMatchedBaseIdx[i];\n\n if (matchedBase !== null) {\n if (matchedBase > maxBaseIdxEncountered) {\n maxBaseIdxEncountered = matchedBase;\n }\n } else {\n insertAfterBaseIdx[i] = maxBaseIdxEncountered;\n }\n }\n\n // 4. Build the final, reordered list of paragraphs.\n const result: string[] = [];\n\n // Helper: quickly retrieve all indices of paragraphs that should be inserted\n // after a given base index, while keeping their original order.\n const extraParagraphsBuckets: Record<number, number[]> = {};\n insertAfterBaseIdx.forEach((afterIdx, paragraphIdx) => {\n if (afterIdx === -1) return; // will be handled later (if any)\n extraParagraphsBuckets[afterIdx] = extraParagraphsBuckets[afterIdx] || [];\n extraParagraphsBuckets[afterIdx].push(paragraphIdx);\n });\n\n for (let bIdx = 0; bIdx < baseFileParagraphs.length; bIdx++) {\n const matchedParagraphIdx = firstMatchIndexForBase[bIdx];\n\n if (matchedParagraphIdx !== -1) {\n result.push(textToReorderParagraphs[matchedParagraphIdx]);\n }\n\n if (extraParagraphsBuckets[bIdx]) {\n extraParagraphsBuckets[bIdx].forEach((pIdx) => {\n result.push(textToReorderParagraphs[pIdx]);\n });\n }\n }\n\n // Finally, if there were extra paragraphs appearing *before* any matched\n // base paragraph (insertAfterBaseIdx === -1), we prepend them to the output\n // in their original order.\n const leadingExtras: string[] = [];\n insertAfterBaseIdx.forEach((afterIdx, pIdx) => {\n if (afterIdx === -1 && paragraphMatchedBaseIdx[pIdx] === null) {\n leadingExtras.push(textToReorderParagraphs[pIdx]);\n }\n });\n\n return [...leadingExtras, ...result].join('\\n\\n');\n};\n"],"mappings":";;;;;;;;;;;;;AAYA,MAAM,oBAAoB,SAA2B;CACnD,MAAMA,aAAuB,EAAE;CAK/B,MAAM,SAAS,KAAK,MAAM,WAAW;AAErC,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;EACtC,MAAM,QAAQ,OAAO;AAGrB,MAAI,IAAI,MAAM,GAAG;AACf,OAAI,MAAO,YAAW,KAAK,MAAM;AACjC;;EAQF,MAAM,kBAAkB,KAAK,MAAM,MAAM,SAAS,EAAE;EACpD,MAAM,kBAAkB,KAAK,IAAI,GAAG,kBAAkB,EAAE;AAExD,OAAK,IAAI,IAAI,GAAG,IAAI,iBAAiB,IACnC,YAAW,KAAK,OAAO;;AAI3B,QAAO;;;;;;AAOT,MAAM,oBACJ,WACA,eACA,oBACA,kBACY;AACZ,KAAI,cAAc,cAAe,QAAO;AAExC,KAAI,mBAAmB,WAAW,cAAc,OAAQ,QAAO;AAE/D,MAAK,IAAI,IAAI,GAAG,IAAI,mBAAmB,QAAQ,IAC7C,KAAI,mBAAmB,GAAG,SAAS,cAAc,GAAG,KAClD,QAAO;AAGX,QAAO;;;;;;;;AAST,MAAa,qBACX,eACA,oBACW;CAEX,MAAM,qBAAqB,iBAAiB,gBAAgB;CAC5D,MAAM,0BAA0B,iBAAiB,cAAc;CAE/D,MAAM,iBAAiB,mBAAmB,KAAK,MAAMC,gDAAiB,EAAE,CAAC;CACzE,MAAM,iBAAiB,wBAAwB,KAAK,MAClDA,gDAAiB,EAAE,CACpB;CAMD,MAAMC,yBAAmC,MACvC,mBAAmB,OACpB,CAAC,KAAK,GAAG;CACV,MAAMC,0BAA6C,MACjD,wBAAwB,OACzB,CAAC,KAAK,KAAK;AAEZ,MAAK,IAAI,IAAI,GAAG,IAAI,wBAAwB,QAAQ,KAAK;EACvD,MAAM,YAAY,wBAAwB;EAC1C,MAAM,MAAM,eAAe;EAG3B,IAAI,WAAW,mBAAmB,WAC/B,eAAe,QACd,uBAAuB,SAAS,MAAM,cAAc,cACvD;AAED,MAAI,aAAa,GAEf,YAAW,mBAAmB,WAC3B,eAAe,QACd,uBAAuB,SAAS,MAChC,iBAAiB,WAAW,eAAe,KAAK,eAAe,KAAK,CACvE;AAGH,MAAI,aAAa,IAAI;AACnB,0BAAuB,YAAY;AACnC,2BAAwB,KAAK;;;CAQjC,MAAMC,qBAA+B,MACnC,wBAAwB,OACzB,CAAC,KAAK,GAAG;CACV,IAAI,wBAAwB;AAE5B,MAAK,IAAI,IAAI,GAAG,IAAI,wBAAwB,QAAQ,KAAK;EACvD,MAAM,cAAc,wBAAwB;AAE5C,MAAI,gBAAgB,MAClB;OAAI,cAAc,sBAChB,yBAAwB;QAG1B,oBAAmB,KAAK;;CAK5B,MAAMC,SAAmB,EAAE;CAI3B,MAAMC,yBAAmD,EAAE;AAC3D,oBAAmB,SAAS,UAAU,iBAAiB;AACrD,MAAI,aAAa,GAAI;AACrB,yBAAuB,YAAY,uBAAuB,aAAa,EAAE;AACzE,yBAAuB,UAAU,KAAK,aAAa;GACnD;AAEF,MAAK,IAAI,OAAO,GAAG,OAAO,mBAAmB,QAAQ,QAAQ;EAC3D,MAAM,sBAAsB,uBAAuB;AAEnD,MAAI,wBAAwB,GAC1B,QAAO,KAAK,wBAAwB,qBAAqB;AAG3D,MAAI,uBAAuB,MACzB,wBAAuB,MAAM,SAAS,SAAS;AAC7C,UAAO,KAAK,wBAAwB,MAAM;IAC1C;;CAON,MAAMC,gBAA0B,EAAE;AAClC,oBAAmB,SAAS,UAAU,SAAS;AAC7C,MAAI,aAAa,MAAM,wBAAwB,UAAU,KACvD,eAAc,KAAK,wBAAwB,MAAM;GAEnD;AAEF,QAAO,CAAC,GAAG,eAAe,GAAG,OAAO,CAAC,KAAK,OAAO"}
1
+ {"version":3,"file":"reorderParagraphs.cjs","names":["listSpecialChars"],"sources":["../../../src/utils/reorderParagraphs.ts"],"sourcesContent":["import { listSpecialChars } from './listSpecialChars';\n\n/**\n * Split a text into paragraphs.\n *\n * We consider a paragraph boundary to be a block of at least two consecutive\n * new-lines that is immediately followed by a non-white-space character. This\n * way, internal blank lines that are part of the same paragraph (e.g. a list\n * item that purposely contains a visual break) are preserved while true\n * paragraph breaks – the ones generated when calling `arr.join(\"\\n\\n\")` in\n * the tests – are still detected.\n */\nconst splitByParagraph = (text: string): string[] => {\n const paragraphs: string[] = [];\n\n // Capture the delimiter so that we can inspect how many new-lines it\n // contains. We know that the test strings only use LF, so we keep the\n // regex simple here.\n const tokens = text.split(/(\\n{2,})/);\n\n for (let i = 0; i < tokens.length; i++) {\n const token = tokens[i];\n\n // Even-indexed tokens are the actual paragraph contents.\n if (i % 2 === 0) {\n if (token) paragraphs.push(token);\n continue;\n }\n\n // Odd-indexed tokens are the delimiters (>= two consecutive new-lines).\n // The first and last pairs represent the natural separators that are\n // added when paragraphs are later joined with \"\\n\\n\". Any additional\n // pairs in between correspond to *explicit* blank paragraphs that were\n // present in the original text and must therefore be preserved.\n const pairsOfNewlines = Math.floor(token.length / 2);\n const blankParagraphs = Math.max(0, pairsOfNewlines - 2);\n\n for (let j = 0; j < blankParagraphs; j++) {\n paragraphs.push('\\n\\n');\n }\n }\n\n return paragraphs;\n};\n\n/**\n * Determine whether two paragraphs match – either exactly, or by sharing the\n * same \"special-character signature\".\n */\nconst paragraphMatches = (\n paragraph: string,\n baseParagraph: string,\n paragraphSignature: ReturnType<typeof listSpecialChars>,\n baseSignature: ReturnType<typeof listSpecialChars>\n): boolean => {\n if (paragraph === baseParagraph) return true;\n // fallback to special-character signature comparison\n if (paragraphSignature.length !== baseSignature.length) return false;\n\n for (let i = 0; i < paragraphSignature.length; i++) {\n if (paragraphSignature[i].char !== baseSignature[i].char) {\n return false;\n }\n }\n return true;\n};\n\n/**\n * Re-order `textToReorder` so that its paragraphs follow the ordering found in\n * `baseFileContent`, while preserving any extra paragraphs (those not present\n * in the base file) in a position that is intuitive for a human reader: right\n * after the closest preceding paragraph coming from the base file.\n */\nexport const reorderParagraphs = (\n textToReorder: string,\n baseFileContent: string\n): string => {\n // 1. Split both texts into paragraphs and pre-compute their signatures.\n const baseFileParagraphs = splitByParagraph(baseFileContent);\n const textToReorderParagraphs = splitByParagraph(textToReorder);\n\n const baseSignatures = baseFileParagraphs.map((p) => listSpecialChars(p));\n const textSignatures = textToReorderParagraphs.map((p) =>\n listSpecialChars(p)\n );\n\n // 2. For every paragraph in the text to reorder, find the *first* base\n // paragraph it matches. We only allow each base paragraph to be matched\n // once. Any further identical paragraphs will be treated as \"extra\" and\n // will be positioned later on, next to their closest neighbour.\n const firstMatchIndexForBase: number[] = Array(\n baseFileParagraphs.length\n ).fill(-1);\n const paragraphMatchedBaseIdx: (number | null)[] = Array(\n textToReorderParagraphs.length\n ).fill(null);\n\n for (let i = 0; i < textToReorderParagraphs.length; i++) {\n const paragraph = textToReorderParagraphs[i];\n const sig = textSignatures[i];\n\n // exact match pass first for performance\n let foundIdx = baseFileParagraphs.findIndex(\n (baseParagraph, idx) =>\n firstMatchIndexForBase[idx] === -1 && paragraph === baseParagraph\n );\n\n if (foundIdx === -1) {\n // fallback to the signature comparison\n foundIdx = baseFileParagraphs.findIndex(\n (baseParagraph, idx) =>\n firstMatchIndexForBase[idx] === -1 &&\n paragraphMatches(paragraph, baseParagraph, sig, baseSignatures[idx])\n );\n }\n\n if (foundIdx !== -1) {\n firstMatchIndexForBase[foundIdx] = i;\n paragraphMatchedBaseIdx[i] = foundIdx;\n }\n }\n\n // 3. For the paragraphs that *didn't* get matched to a base paragraph, we\n // record the highest-numbered base paragraph that was matched *before* it\n // in the original text. The extra paragraph will later be placed right\n // after that paragraph in the final ordering.\n const insertAfterBaseIdx: number[] = Array(\n textToReorderParagraphs.length\n ).fill(-1);\n let maxBaseIdxEncountered = -1;\n\n for (let i = 0; i < textToReorderParagraphs.length; i++) {\n const matchedBase = paragraphMatchedBaseIdx[i];\n\n if (matchedBase !== null) {\n if (matchedBase > maxBaseIdxEncountered) {\n maxBaseIdxEncountered = matchedBase;\n }\n } else {\n insertAfterBaseIdx[i] = maxBaseIdxEncountered;\n }\n }\n\n // 4. Build the final, reordered list of paragraphs.\n const result: string[] = [];\n\n // Helper: quickly retrieve all indices of paragraphs that should be inserted\n // after a given base index, while keeping their original order.\n const extraParagraphsBuckets: Record<number, number[]> = {};\n insertAfterBaseIdx.forEach((afterIdx, paragraphIdx) => {\n if (afterIdx === -1) return; // will be handled later (if any)\n extraParagraphsBuckets[afterIdx] = extraParagraphsBuckets[afterIdx] || [];\n extraParagraphsBuckets[afterIdx].push(paragraphIdx);\n });\n\n for (let bIdx = 0; bIdx < baseFileParagraphs.length; bIdx++) {\n const matchedParagraphIdx = firstMatchIndexForBase[bIdx];\n\n if (matchedParagraphIdx !== -1) {\n result.push(textToReorderParagraphs[matchedParagraphIdx]);\n }\n\n if (extraParagraphsBuckets[bIdx]) {\n extraParagraphsBuckets[bIdx].forEach((pIdx) => {\n result.push(textToReorderParagraphs[pIdx]);\n });\n }\n }\n\n // Finally, if there were extra paragraphs appearing *before* any matched\n // base paragraph (insertAfterBaseIdx === -1), we prepend them to the output\n // in their original order.\n const leadingExtras: string[] = [];\n insertAfterBaseIdx.forEach((afterIdx, pIdx) => {\n if (afterIdx === -1 && paragraphMatchedBaseIdx[pIdx] === null) {\n leadingExtras.push(textToReorderParagraphs[pIdx]);\n }\n });\n\n return [...leadingExtras, ...result].join('\\n\\n');\n};\n"],"mappings":";;;;;;;;;;;;;AAYA,MAAM,oBAAoB,SAA2B;CACnD,MAAM,aAAuB,EAAE;CAK/B,MAAM,SAAS,KAAK,MAAM,WAAW;AAErC,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;EACtC,MAAM,QAAQ,OAAO;AAGrB,MAAI,IAAI,MAAM,GAAG;AACf,OAAI,MAAO,YAAW,KAAK,MAAM;AACjC;;EAQF,MAAM,kBAAkB,KAAK,MAAM,MAAM,SAAS,EAAE;EACpD,MAAM,kBAAkB,KAAK,IAAI,GAAG,kBAAkB,EAAE;AAExD,OAAK,IAAI,IAAI,GAAG,IAAI,iBAAiB,IACnC,YAAW,KAAK,OAAO;;AAI3B,QAAO;;;;;;AAOT,MAAM,oBACJ,WACA,eACA,oBACA,kBACY;AACZ,KAAI,cAAc,cAAe,QAAO;AAExC,KAAI,mBAAmB,WAAW,cAAc,OAAQ,QAAO;AAE/D,MAAK,IAAI,IAAI,GAAG,IAAI,mBAAmB,QAAQ,IAC7C,KAAI,mBAAmB,GAAG,SAAS,cAAc,GAAG,KAClD,QAAO;AAGX,QAAO;;;;;;;;AAST,MAAa,qBACX,eACA,oBACW;CAEX,MAAM,qBAAqB,iBAAiB,gBAAgB;CAC5D,MAAM,0BAA0B,iBAAiB,cAAc;CAE/D,MAAM,iBAAiB,mBAAmB,KAAK,MAAMA,gDAAiB,EAAE,CAAC;CACzE,MAAM,iBAAiB,wBAAwB,KAAK,MAClDA,gDAAiB,EAAE,CACpB;CAMD,MAAM,yBAAmC,MACvC,mBAAmB,OACpB,CAAC,KAAK,GAAG;CACV,MAAM,0BAA6C,MACjD,wBAAwB,OACzB,CAAC,KAAK,KAAK;AAEZ,MAAK,IAAI,IAAI,GAAG,IAAI,wBAAwB,QAAQ,KAAK;EACvD,MAAM,YAAY,wBAAwB;EAC1C,MAAM,MAAM,eAAe;EAG3B,IAAI,WAAW,mBAAmB,WAC/B,eAAe,QACd,uBAAuB,SAAS,MAAM,cAAc,cACvD;AAED,MAAI,aAAa,GAEf,YAAW,mBAAmB,WAC3B,eAAe,QACd,uBAAuB,SAAS,MAChC,iBAAiB,WAAW,eAAe,KAAK,eAAe,KAAK,CACvE;AAGH,MAAI,aAAa,IAAI;AACnB,0BAAuB,YAAY;AACnC,2BAAwB,KAAK;;;CAQjC,MAAM,qBAA+B,MACnC,wBAAwB,OACzB,CAAC,KAAK,GAAG;CACV,IAAI,wBAAwB;AAE5B,MAAK,IAAI,IAAI,GAAG,IAAI,wBAAwB,QAAQ,KAAK;EACvD,MAAM,cAAc,wBAAwB;AAE5C,MAAI,gBAAgB,MAClB;OAAI,cAAc,sBAChB,yBAAwB;QAG1B,oBAAmB,KAAK;;CAK5B,MAAM,SAAmB,EAAE;CAI3B,MAAM,yBAAmD,EAAE;AAC3D,oBAAmB,SAAS,UAAU,iBAAiB;AACrD,MAAI,aAAa,GAAI;AACrB,yBAAuB,YAAY,uBAAuB,aAAa,EAAE;AACzE,yBAAuB,UAAU,KAAK,aAAa;GACnD;AAEF,MAAK,IAAI,OAAO,GAAG,OAAO,mBAAmB,QAAQ,QAAQ;EAC3D,MAAM,sBAAsB,uBAAuB;AAEnD,MAAI,wBAAwB,GAC1B,QAAO,KAAK,wBAAwB,qBAAqB;AAG3D,MAAI,uBAAuB,MACzB,wBAAuB,MAAM,SAAS,SAAS;AAC7C,UAAO,KAAK,wBAAwB,MAAM;IAC1C;;CAON,MAAM,gBAA0B,EAAE;AAClC,oBAAmB,SAAS,UAAU,SAAS;AAC7C,MAAI,aAAa,MAAM,wBAAwB,UAAU,KACvD,eAAc,KAAK,wBAAwB,MAAM;GAEnD;AAEF,QAAO,CAAC,GAAG,eAAe,GAAG,OAAO,CAAC,KAAK,OAAO"}
@@ -1 +1 @@
1
- {"version":3,"file":"setupAI.cjs","names":["ANSIColors","aiClient: AIClient | undefined","hasAIAccess","checkAIAccess"],"sources":["../../../src/utils/setupAI.ts"],"sourcesContent":["import type { AIConfig, AIOptions } from '@intlayer/ai';\nimport {\n ANSIColors,\n colorize,\n getAppLogger,\n type logger,\n} from '@intlayer/config';\nimport type { IntlayerConfig } from '@intlayer/types';\nimport { checkAIAccess } from './checkAccess';\n\nexport type AIClient = typeof import('@intlayer/ai');\n\ntype SetupAIResult = {\n aiClient?: AIClient;\n aiConfig?: AIConfig;\n isCustomAI: boolean;\n hasAIAccess: boolean;\n};\n\n// Disable warnings from the AI SDK\nglobalThis.AI_SDK_LOG_WARNINGS = false;\n\nconst logAIConfig = (aiOptions: AIOptions, appLogger: typeof logger) => {\n appLogger([\n colorize('Provider:', ANSIColors.GREY_DARK),\n colorize(aiOptions?.provider ?? '(default)', ANSIColors.BLUE),\n colorize('- Model:', ANSIColors.GREY_DARK),\n colorize(aiOptions?.model ?? '(default)', ANSIColors.BLUE),\n colorize('- API Key:', ANSIColors.GREY_DARK),\n colorize(aiOptions?.apiKey ? '✓' : '(not set)', ANSIColors.BLUE),\n ]);\n};\n\n/**\n * Checks if the @intlayer/ai package is available and configured when an API key is provided.\n * If API key is present but package is missing, logs a warning.\n * Also checks if the user has access to AI (either via local key or CMS auth).\n */\nexport const setupAI = async (\n configuration: IntlayerConfig,\n aiOptions?: AIOptions\n): Promise<SetupAIResult | undefined> => {\n const appLogger = getAppLogger(configuration);\n\n const isLocalAI =\n aiOptions?.apiKey ||\n aiOptions?.provider === 'ollama' ||\n configuration.ai?.apiKey ||\n configuration.ai?.provider === 'ollama';\n\n if (isLocalAI) {\n // Try to import the AI package for local AI usage\n let aiClient: AIClient | undefined;\n\n try {\n aiClient = await import('@intlayer/ai');\n } catch {\n // Package not installed - log warning and fall back to backend\n appLogger(\n [\n colorize('Using your API key, you can install the', ANSIColors.GREY),\n colorize('@intlayer/ai', ANSIColors.GREY_LIGHT),\n colorize(\n 'package to run the process locally, with no dependency on the Intlayer server',\n ANSIColors.GREY\n ),\n ],\n {\n level: 'warn',\n }\n );\n\n // Fall back to backend API check\n const hasAIAccess = await checkAIAccess(configuration, aiOptions);\n logAIConfig(aiOptions ?? {}, appLogger);\n return {\n isCustomAI: false,\n hasAIAccess,\n };\n }\n\n // Package found - now configure it (errors here should propagate, not fall back)\n appLogger([\n colorize('@intlayer/ai', ANSIColors.GREY_LIGHT),\n colorize('found - Run process locally', ANSIColors.GREY_DARK),\n ]);\n\n const aiConfig = await aiClient.getAIConfig({\n userOptions: aiOptions,\n accessType: ['public'],\n });\n\n logAIConfig(aiOptions ?? {}, appLogger);\n\n return {\n aiClient,\n aiConfig,\n isCustomAI: true,\n hasAIAccess: true, // Local AI always has access\n };\n }\n\n // No local AI configured - use backend API\n const hasAIAccess = await checkAIAccess(configuration, aiOptions);\n logAIConfig(aiOptions ?? {}, appLogger);\n\n return {\n isCustomAI: false,\n hasAIAccess,\n };\n};\n"],"mappings":";;;;;AAoBA,WAAW,sBAAsB;AAEjC,MAAM,eAAe,WAAsB,cAA6B;AACtE,WAAU;iCACC,aAAaA,4BAAW,UAAU;iCAClC,WAAW,YAAY,aAAaA,4BAAW,KAAK;iCACpD,YAAYA,4BAAW,UAAU;iCACjC,WAAW,SAAS,aAAaA,4BAAW,KAAK;iCACjD,cAAcA,4BAAW,UAAU;iCACnC,WAAW,SAAS,MAAM,aAAaA,4BAAW,KAAK;EACjE,CAAC;;;;;;;AAQJ,MAAa,UAAU,OACrB,eACA,cACuC;CACvC,MAAM,+CAAyB,cAAc;AAQ7C,KALE,WAAW,UACX,WAAW,aAAa,YACxB,cAAc,IAAI,UAClB,cAAc,IAAI,aAAa,UAElB;EAEb,IAAIC;AAEJ,MAAI;AACF,cAAW,MAAM,OAAO;UAClB;AAEN,aACE;mCACW,2CAA2CD,4BAAW,KAAK;mCAC3D,gBAAgBA,4BAAW,WAAW;mCAE7C,iFACAA,4BAAW,KACZ;IACF,EACD,EACE,OAAO,QACR,CACF;GAGD,MAAME,gBAAc,MAAMC,wCAAc,eAAe,UAAU;AACjE,eAAY,aAAa,EAAE,EAAE,UAAU;AACvC,UAAO;IACL,YAAY;IACZ;IACD;;AAIH,YAAU,gCACC,gBAAgBH,4BAAW,WAAW,iCACtC,+BAA+BA,4BAAW,UAAU,CAC9D,CAAC;EAEF,MAAM,WAAW,MAAM,SAAS,YAAY;GAC1C,aAAa;GACb,YAAY,CAAC,SAAS;GACvB,CAAC;AAEF,cAAY,aAAa,EAAE,EAAE,UAAU;AAEvC,SAAO;GACL;GACA;GACA,YAAY;GACZ,aAAa;GACd;;CAIH,MAAM,cAAc,MAAMG,wCAAc,eAAe,UAAU;AACjE,aAAY,aAAa,EAAE,EAAE,UAAU;AAEvC,QAAO;EACL,YAAY;EACZ;EACD"}
1
+ {"version":3,"file":"setupAI.cjs","names":["ANSIColors","hasAIAccess","checkAIAccess"],"sources":["../../../src/utils/setupAI.ts"],"sourcesContent":["import type { AIConfig, AIOptions } from '@intlayer/ai';\nimport {\n ANSIColors,\n colorize,\n getAppLogger,\n type logger,\n} from '@intlayer/config';\nimport type { IntlayerConfig } from '@intlayer/types';\nimport { checkAIAccess } from './checkAccess';\n\nexport type AIClient = typeof import('@intlayer/ai');\n\ntype SetupAIResult = {\n aiClient?: AIClient;\n aiConfig?: AIConfig;\n isCustomAI: boolean;\n hasAIAccess: boolean;\n};\n\n// Disable warnings from the AI SDK\nglobalThis.AI_SDK_LOG_WARNINGS = false;\n\nconst logAIConfig = (aiOptions: AIOptions, appLogger: typeof logger) => {\n appLogger([\n colorize('Provider:', ANSIColors.GREY_DARK),\n colorize(aiOptions?.provider ?? '(default)', ANSIColors.BLUE),\n colorize('- Model:', ANSIColors.GREY_DARK),\n colorize(aiOptions?.model ?? '(default)', ANSIColors.BLUE),\n colorize('- API Key:', ANSIColors.GREY_DARK),\n colorize(aiOptions?.apiKey ? '✓' : '(not set)', ANSIColors.BLUE),\n ]);\n};\n\n/**\n * Checks if the @intlayer/ai package is available and configured when an API key is provided.\n * If API key is present but package is missing, logs a warning.\n * Also checks if the user has access to AI (either via local key or CMS auth).\n */\nexport const setupAI = async (\n configuration: IntlayerConfig,\n aiOptions?: AIOptions\n): Promise<SetupAIResult | undefined> => {\n const appLogger = getAppLogger(configuration);\n\n const isLocalAI =\n aiOptions?.apiKey ||\n aiOptions?.provider === 'ollama' ||\n configuration.ai?.apiKey ||\n configuration.ai?.provider === 'ollama';\n\n if (isLocalAI) {\n // Try to import the AI package for local AI usage\n let aiClient: AIClient | undefined;\n\n try {\n aiClient = await import('@intlayer/ai');\n } catch {\n // Package not installed - log warning and fall back to backend\n appLogger(\n [\n colorize('Using your API key, you can install the', ANSIColors.GREY),\n colorize('@intlayer/ai', ANSIColors.GREY_LIGHT),\n colorize(\n 'package to run the process locally, with no dependency on the Intlayer server',\n ANSIColors.GREY\n ),\n ],\n {\n level: 'warn',\n }\n );\n\n // Fall back to backend API check\n const hasAIAccess = await checkAIAccess(configuration, aiOptions);\n logAIConfig(aiOptions ?? {}, appLogger);\n return {\n isCustomAI: false,\n hasAIAccess,\n };\n }\n\n // Package found - now configure it (errors here should propagate, not fall back)\n appLogger([\n colorize('@intlayer/ai', ANSIColors.GREY_LIGHT),\n colorize('found - Run process locally', ANSIColors.GREY_DARK),\n ]);\n\n const aiConfig = await aiClient.getAIConfig({\n userOptions: aiOptions,\n accessType: ['public'],\n });\n\n logAIConfig(aiOptions ?? {}, appLogger);\n\n return {\n aiClient,\n aiConfig,\n isCustomAI: true,\n hasAIAccess: true, // Local AI always has access\n };\n }\n\n // No local AI configured - use backend API\n const hasAIAccess = await checkAIAccess(configuration, aiOptions);\n logAIConfig(aiOptions ?? {}, appLogger);\n\n return {\n isCustomAI: false,\n hasAIAccess,\n };\n};\n"],"mappings":";;;;;AAoBA,WAAW,sBAAsB;AAEjC,MAAM,eAAe,WAAsB,cAA6B;AACtE,WAAU;iCACC,aAAaA,4BAAW,UAAU;iCAClC,WAAW,YAAY,aAAaA,4BAAW,KAAK;iCACpD,YAAYA,4BAAW,UAAU;iCACjC,WAAW,SAAS,aAAaA,4BAAW,KAAK;iCACjD,cAAcA,4BAAW,UAAU;iCACnC,WAAW,SAAS,MAAM,aAAaA,4BAAW,KAAK;EACjE,CAAC;;;;;;;AAQJ,MAAa,UAAU,OACrB,eACA,cACuC;CACvC,MAAM,+CAAyB,cAAc;AAQ7C,KALE,WAAW,UACX,WAAW,aAAa,YACxB,cAAc,IAAI,UAClB,cAAc,IAAI,aAAa,UAElB;EAEb,IAAI;AAEJ,MAAI;AACF,cAAW,MAAM,OAAO;UAClB;AAEN,aACE;mCACW,2CAA2CA,4BAAW,KAAK;mCAC3D,gBAAgBA,4BAAW,WAAW;mCAE7C,iFACAA,4BAAW,KACZ;IACF,EACD,EACE,OAAO,QACR,CACF;GAGD,MAAMC,gBAAc,MAAMC,wCAAc,eAAe,UAAU;AACjE,eAAY,aAAa,EAAE,EAAE,UAAU;AACvC,UAAO;IACL,YAAY;IACZ;IACD;;AAIH,YAAU,gCACC,gBAAgBF,4BAAW,WAAW,iCACtC,+BAA+BA,4BAAW,UAAU,CAC9D,CAAC;EAEF,MAAM,WAAW,MAAM,SAAS,YAAY;GAC1C,aAAa;GACb,YAAY,CAAC,SAAS;GACvB,CAAC;AAEF,cAAY,aAAa,EAAE,EAAE,UAAU;AAEvC,SAAO;GACL;GACA;GACA,YAAY;GACZ,aAAa;GACd;;CAIH,MAAM,cAAc,MAAME,wCAAc,eAAe,UAAU;AACjE,aAAY,aAAa,EAAE,EAAE,UAAU;AAEvC,QAAO;EACL,YAAY;EACZ;EACD"}
@@ -1 +1 @@
1
- {"version":3,"file":"watch.cjs","names":["parallelProcess: ReturnType<typeof runParallel> | undefined"],"sources":["../../src/watch.ts"],"sourcesContent":["import { runParallel, watch } from '@intlayer/chokidar';\nimport {\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\n\ntype WatchOptions = {\n skipPrepare?: boolean;\n with?: string | string[];\n configOptions?: GetConfigurationOptions;\n};\n\n/**\n * Get locales dictionaries .content.{json|ts|tsx|js|jsx|mjs|cjs} and build the JSON dictionaries in the .intlayer directory.\n * Watch mode available to get the change in the .content.{json|ts|tsx|js|jsx|mjs|cjs}\n */\nexport const watchContentDeclaration = async (options?: WatchOptions) => {\n const config = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(config);\n\n // Store references to the child process\n let parallelProcess: ReturnType<typeof runParallel> | undefined;\n\n if (options?.with) {\n parallelProcess = runParallel(options.with);\n // Handle the promise to avoid unhandled rejection\n parallelProcess.result.catch(() => {\n // Parallel process failed or was terminated\n process.exit(1);\n });\n }\n\n appLogger('Watching Intlayer content declarations');\n\n // Capture the watcher instance\n const watcher = watch({\n persistent: true,\n skipPrepare: options?.skipPrepare ?? false,\n });\n\n // Define a Graceful Shutdown function\n const handleShutdown = async () => {\n // Prevent multiple calls\n process.off('SIGINT', handleShutdown);\n process.off('SIGTERM', handleShutdown);\n\n appLogger('Stopping Intlayer watcher...');\n\n try {\n // Close the file watcher immediately to stop \"esbuild service not running\" errors\n await watcher.close();\n\n // If runParallel exposes the child process, we can try to kill it explicitly.\n // Even if it doesn't, process.exit() below usually cleans up attached children.\n if (parallelProcess && 'child' in parallelProcess) {\n // @ts-ignore - Assuming child exists on the return type if runParallel is based on spawn/exec\n parallelProcess.child?.kill('SIGTERM');\n }\n } catch (error) {\n console.error('Error during shutdown:', error);\n } finally {\n process.exit(0);\n }\n };\n\n // Attach Signal Listeners\n process.on('SIGINT', handleShutdown);\n process.on('SIGTERM', handleShutdown);\n};\n"],"mappings":";;;;;;;;;AAiBA,MAAa,0BAA0B,OAAO,YAA2B;CAEvE,MAAM,sFAD0B,SAAS,cAAc,CACjB;CAGtC,IAAIA;AAEJ,KAAI,SAAS,MAAM;AACjB,wDAA8B,QAAQ,KAAK;AAE3C,kBAAgB,OAAO,YAAY;AAEjC,WAAQ,KAAK,EAAE;IACf;;AAGJ,WAAU,yCAAyC;CAGnD,MAAM,wCAAgB;EACpB,YAAY;EACZ,aAAa,SAAS,eAAe;EACtC,CAAC;CAGF,MAAM,iBAAiB,YAAY;AAEjC,UAAQ,IAAI,UAAU,eAAe;AACrC,UAAQ,IAAI,WAAW,eAAe;AAEtC,YAAU,+BAA+B;AAEzC,MAAI;AAEF,SAAM,QAAQ,OAAO;AAIrB,OAAI,mBAAmB,WAAW,gBAEhC,iBAAgB,OAAO,KAAK,UAAU;WAEjC,OAAO;AACd,WAAQ,MAAM,0BAA0B,MAAM;YACtC;AACR,WAAQ,KAAK,EAAE;;;AAKnB,SAAQ,GAAG,UAAU,eAAe;AACpC,SAAQ,GAAG,WAAW,eAAe"}
1
+ {"version":3,"file":"watch.cjs","names":[],"sources":["../../src/watch.ts"],"sourcesContent":["import { runParallel, watch } from '@intlayer/chokidar';\nimport {\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\n\ntype WatchOptions = {\n skipPrepare?: boolean;\n with?: string | string[];\n configOptions?: GetConfigurationOptions;\n};\n\n/**\n * Get locales dictionaries .content.{json|ts|tsx|js|jsx|mjs|cjs} and build the JSON dictionaries in the .intlayer directory.\n * Watch mode available to get the change in the .content.{json|ts|tsx|js|jsx|mjs|cjs}\n */\nexport const watchContentDeclaration = async (options?: WatchOptions) => {\n const config = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(config);\n\n // Store references to the child process\n let parallelProcess: ReturnType<typeof runParallel> | undefined;\n\n if (options?.with) {\n parallelProcess = runParallel(options.with);\n // Handle the promise to avoid unhandled rejection\n parallelProcess.result.catch(() => {\n // Parallel process failed or was terminated\n process.exit(1);\n });\n }\n\n appLogger('Watching Intlayer content declarations');\n\n // Capture the watcher instance\n const watcher = watch({\n persistent: true,\n skipPrepare: options?.skipPrepare ?? false,\n });\n\n // Define a Graceful Shutdown function\n const handleShutdown = async () => {\n // Prevent multiple calls\n process.off('SIGINT', handleShutdown);\n process.off('SIGTERM', handleShutdown);\n\n appLogger('Stopping Intlayer watcher...');\n\n try {\n // Close the file watcher immediately to stop \"esbuild service not running\" errors\n await watcher.close();\n\n // If runParallel exposes the child process, we can try to kill it explicitly.\n // Even if it doesn't, process.exit() below usually cleans up attached children.\n if (parallelProcess && 'child' in parallelProcess) {\n // @ts-ignore - Assuming child exists on the return type if runParallel is based on spawn/exec\n parallelProcess.child?.kill('SIGTERM');\n }\n } catch (error) {\n console.error('Error during shutdown:', error);\n } finally {\n process.exit(0);\n }\n };\n\n // Attach Signal Listeners\n process.on('SIGINT', handleShutdown);\n process.on('SIGTERM', handleShutdown);\n};\n"],"mappings":";;;;;;;;;AAiBA,MAAa,0BAA0B,OAAO,YAA2B;CAEvE,MAAM,sFAD0B,SAAS,cAAc,CACjB;CAGtC,IAAI;AAEJ,KAAI,SAAS,MAAM;AACjB,wDAA8B,QAAQ,KAAK;AAE3C,kBAAgB,OAAO,YAAY;AAEjC,WAAQ,KAAK,EAAE;IACf;;AAGJ,WAAU,yCAAyC;CAGnD,MAAM,wCAAgB;EACpB,YAAY;EACZ,aAAa,SAAS,eAAe;EACtC,CAAC;CAGF,MAAM,iBAAiB,YAAY;AAEjC,UAAQ,IAAI,UAAU,eAAe;AACrC,UAAQ,IAAI,WAAW,eAAe;AAEtC,YAAU,+BAA+B;AAEzC,MAAI;AAEF,SAAM,QAAQ,OAAO;AAIrB,OAAI,mBAAmB,WAAW,gBAEhC,iBAAgB,OAAO,KAAK,UAAU;WAEjC,OAAO;AACd,WAAQ,MAAM,0BAA0B,MAAM;YACtC;AACR,WAAQ,KAAK,EAAE;;;AAKnB,SAAQ,GAAG,UAAU,eAAe;AACpC,SAAQ,GAAG,WAAW,eAAe"}
@@ -1 +1 @@
1
- {"version":3,"file":"IntlayerEventListener.mjs","names":["intlayerConfig: IntlayerConfig","dataJSON: MessageEventData[]"],"sources":["../../src/IntlayerEventListener.ts"],"sourcesContent":["import { getIntlayerAPIProxy } from '@intlayer/api';\n// @ts-ignore: @intlayer/backend is not built yet\nimport type { DictionaryAPI, MessageEventData } from '@intlayer/backend';\nimport configuration from '@intlayer/config/built';\nimport { getAppLogger } from '@intlayer/config/client';\nimport type { IntlayerConfig } from '@intlayer/types';\nimport { EventSource } from 'eventsource';\n\nexport type IntlayerMessageEvent = MessageEvent;\n\n/**\n * IntlayerEventListener class to listen for dictionary changes via SSE (Server-Sent Events).\n *\n * Usage example:\n *\n * import { buildIntlayerDictionary } from './transpiler/declaration_file_to_dictionary/intlayer_dictionary';\n * import { IntlayerEventListener } from '@intlayer/api';\n *\n * export const checkDictionaryChanges = async () => {\n * // Instantiate the listener\n * const eventListener = new IntlayerEventListener();\n *\n * // Set up your callbacks\n * eventListener.onDictionaryChange = async (dictionary) => {\n * await buildIntlayerDictionary(dictionary);\n * };\n *\n * // Initialize the listener\n * await eventListener.initialize();\n *\n * // Optionally, clean up later when you’re done\n * // eventListener.cleanup();\n * };\n */\nexport class IntlayerEventListener {\n private appLogger = getAppLogger(configuration);\n\n private eventSource: EventSource | null = null;\n private reconnectAttempts = 0;\n private maxReconnectAttempts = 5;\n private reconnectDelay = 1000; // Start with 1 second\n private isManuallyDisconnected = false;\n private reconnectTimeout: NodeJS.Timeout | null = null;\n\n /**\n * Callback triggered when a Dictionary is ADDED.\n */\n public onDictionaryAdded?: (dictionary: DictionaryAPI) => any;\n\n /**\n * Callback triggered when a Dictionary is UPDATED.\n */\n public onDictionaryChange?: (dictionary: DictionaryAPI) => any;\n\n /**\n * Callback triggered when a Dictionary is DELETED.\n */\n public onDictionaryDeleted?: (dictionary: DictionaryAPI) => any;\n\n /**\n * Callback triggered when connection is established or re-established.\n */\n public onConnectionOpen?: () => any;\n\n /**\n * Callback triggered when connection encounters an error.\n */\n public onConnectionError?: (error: Event) => any;\n\n constructor(private intlayerConfig: IntlayerConfig = configuration) {\n this.appLogger = getAppLogger(this.intlayerConfig);\n }\n\n /**\n * Initializes the EventSource connection using the given intlayerConfig\n * (or the default config if none was provided).\n */\n public async initialize(): Promise<void> {\n this.isManuallyDisconnected = false;\n await this.connect();\n }\n\n /**\n * Establishes the EventSource connection with automatic reconnection support.\n */\n private async connect(): Promise<void> {\n try {\n const backendURL = this.intlayerConfig.editor.backendURL;\n\n // Retrieve the access token via proxy\n const accessToken = await getIntlayerAPIProxy(\n undefined,\n this.intlayerConfig\n ).oAuth.getOAuth2AccessToken();\n\n if (!accessToken) {\n throw new Error('Failed to retrieve access token');\n }\n\n const API_ROUTE = `${backendURL}/api/event-listener`;\n\n // Close existing connection if any\n if (this.eventSource) {\n this.eventSource.close();\n }\n\n this.eventSource = new EventSource(API_ROUTE, {\n fetch: (input, init) =>\n fetch(input, {\n ...init,\n headers: {\n ...(init?.headers ?? {}),\n Authorization: `Bearer ${accessToken.data?.accessToken}`,\n },\n }),\n });\n\n this.eventSource.onopen = () => {\n this.reconnectAttempts = 0;\n this.reconnectDelay = 1000; // Reset delay\n this.onConnectionOpen?.();\n };\n\n this.eventSource.onmessage = (event) => this.handleMessage(event);\n this.eventSource.onerror = (event) => this.handleError(event);\n } catch (_error) {\n this.appLogger('Failed to establish EventSource connection:', {\n level: 'error',\n });\n this.scheduleReconnect();\n }\n }\n\n /**\n * Cleans up (closes) the EventSource connection.\n */\n public cleanup(): void {\n this.isManuallyDisconnected = true;\n\n if (this.reconnectTimeout) {\n clearTimeout(this.reconnectTimeout);\n this.reconnectTimeout = null;\n }\n\n if (this.eventSource) {\n this.eventSource.close();\n this.eventSource = null;\n }\n }\n\n /**\n * Schedules a reconnection attempt with exponential backoff.\n */\n private scheduleReconnect(): void {\n if (\n this.isManuallyDisconnected ||\n this.reconnectAttempts >= this.maxReconnectAttempts\n ) {\n if (this.reconnectAttempts >= this.maxReconnectAttempts) {\n this.appLogger(\n [\n `Max reconnection attempts (${this.maxReconnectAttempts}) reached. Giving up.`,\n ],\n {\n level: 'error',\n }\n );\n }\n return;\n }\n\n this.reconnectAttempts++;\n const delay = this.reconnectDelay * 2 ** (this.reconnectAttempts - 1); // Exponential backoff\n\n this.appLogger(\n `Scheduling reconnection attempt ${this.reconnectAttempts}/${this.maxReconnectAttempts} in ${delay}ms`\n );\n\n this.reconnectTimeout = setTimeout(async () => {\n if (!this.isManuallyDisconnected) {\n await this.connect();\n }\n }, delay);\n }\n\n /**\n * Handles incoming SSE messages, parses the event data,\n * and invokes the appropriate callback.\n */\n private async handleMessage(event: IntlayerMessageEvent): Promise<void> {\n try {\n const { data } = event;\n\n const dataJSON: MessageEventData[] = JSON.parse(data);\n\n for (const dataEl of dataJSON) {\n switch (dataEl.object) {\n case 'DICTIONARY':\n switch (dataEl.status) {\n case 'ADDED':\n await this.onDictionaryAdded?.(dataEl.data);\n break;\n case 'UPDATED':\n await this.onDictionaryChange?.(dataEl.data);\n break;\n case 'DELETED':\n await this.onDictionaryDeleted?.(dataEl.data);\n break;\n default:\n this.appLogger(\n ['Unhandled dictionary status:', dataEl.status],\n {\n level: 'error',\n }\n );\n break;\n }\n break;\n default:\n this.appLogger(['Unknown object type:', dataEl.object], {\n level: 'error',\n });\n break;\n }\n }\n } catch (error) {\n this.appLogger(['Error processing dictionary update:', error], {\n level: 'error',\n });\n }\n }\n\n /**\n * Handles any SSE errors and attempts reconnection if appropriate.\n */\n private handleError(event: Event): void {\n const errorEvent = event as any;\n\n // Log detailed error information\n this.appLogger(\n [\n 'EventSource error:',\n {\n type: errorEvent.type,\n message: errorEvent.message,\n code: errorEvent.code,\n readyState: this.eventSource?.readyState,\n url: this.eventSource?.url,\n },\n ],\n {\n level: 'error',\n }\n );\n\n // Notify error callback\n this.onConnectionError?.(event);\n\n // Check if this is a connection close error\n const isConnectionClosed =\n errorEvent.type === 'error' &&\n (errorEvent.message?.includes('terminated') ||\n errorEvent.message?.includes('closed') ||\n this.eventSource?.readyState === EventSource.CLOSED);\n\n if (isConnectionClosed && !this.isManuallyDisconnected) {\n this.appLogger(\n 'Connection was terminated by server, attempting to reconnect...'\n );\n this.scheduleReconnect();\n } else {\n // For other types of errors, close the connection\n this.cleanup();\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkCA,IAAa,wBAAb,MAAmC;CACjC,AAAQ,YAAY,aAAa,cAAc;CAE/C,AAAQ,cAAkC;CAC1C,AAAQ,oBAAoB;CAC5B,AAAQ,uBAAuB;CAC/B,AAAQ,iBAAiB;CACzB,AAAQ,yBAAyB;CACjC,AAAQ,mBAA0C;;;;CAKlD,AAAO;;;;CAKP,AAAO;;;;CAKP,AAAO;;;;CAKP,AAAO;;;;CAKP,AAAO;CAEP,YAAY,AAAQA,iBAAiC,eAAe;EAAhD;AAClB,OAAK,YAAY,aAAa,KAAK,eAAe;;;;;;CAOpD,MAAa,aAA4B;AACvC,OAAK,yBAAyB;AAC9B,QAAM,KAAK,SAAS;;;;;CAMtB,MAAc,UAAyB;AACrC,MAAI;GACF,MAAM,aAAa,KAAK,eAAe,OAAO;GAG9C,MAAM,cAAc,MAAM,oBACxB,QACA,KAAK,eACN,CAAC,MAAM,sBAAsB;AAE9B,OAAI,CAAC,YACH,OAAM,IAAI,MAAM,kCAAkC;GAGpD,MAAM,YAAY,GAAG,WAAW;AAGhC,OAAI,KAAK,YACP,MAAK,YAAY,OAAO;AAG1B,QAAK,cAAc,IAAI,YAAY,WAAW,EAC5C,QAAQ,OAAO,SACb,MAAM,OAAO;IACX,GAAG;IACH,SAAS;KACP,GAAI,MAAM,WAAW,EAAE;KACvB,eAAe,UAAU,YAAY,MAAM;KAC5C;IACF,CAAC,EACL,CAAC;AAEF,QAAK,YAAY,eAAe;AAC9B,SAAK,oBAAoB;AACzB,SAAK,iBAAiB;AACtB,SAAK,oBAAoB;;AAG3B,QAAK,YAAY,aAAa,UAAU,KAAK,cAAc,MAAM;AACjE,QAAK,YAAY,WAAW,UAAU,KAAK,YAAY,MAAM;WACtD,QAAQ;AACf,QAAK,UAAU,+CAA+C,EAC5D,OAAO,SACR,CAAC;AACF,QAAK,mBAAmB;;;;;;CAO5B,AAAO,UAAgB;AACrB,OAAK,yBAAyB;AAE9B,MAAI,KAAK,kBAAkB;AACzB,gBAAa,KAAK,iBAAiB;AACnC,QAAK,mBAAmB;;AAG1B,MAAI,KAAK,aAAa;AACpB,QAAK,YAAY,OAAO;AACxB,QAAK,cAAc;;;;;;CAOvB,AAAQ,oBAA0B;AAChC,MACE,KAAK,0BACL,KAAK,qBAAqB,KAAK,sBAC/B;AACA,OAAI,KAAK,qBAAqB,KAAK,qBACjC,MAAK,UACH,CACE,8BAA8B,KAAK,qBAAqB,uBACzD,EACD,EACE,OAAO,SACR,CACF;AAEH;;AAGF,OAAK;EACL,MAAM,QAAQ,KAAK,iBAAiB,MAAM,KAAK,oBAAoB;AAEnE,OAAK,UACH,mCAAmC,KAAK,kBAAkB,GAAG,KAAK,qBAAqB,MAAM,MAAM,IACpG;AAED,OAAK,mBAAmB,WAAW,YAAY;AAC7C,OAAI,CAAC,KAAK,uBACR,OAAM,KAAK,SAAS;KAErB,MAAM;;;;;;CAOX,MAAc,cAAc,OAA4C;AACtE,MAAI;GACF,MAAM,EAAE,SAAS;GAEjB,MAAMC,WAA+B,KAAK,MAAM,KAAK;AAErD,QAAK,MAAM,UAAU,SACnB,SAAQ,OAAO,QAAf;IACE,KAAK;AACH,aAAQ,OAAO,QAAf;MACE,KAAK;AACH,aAAM,KAAK,oBAAoB,OAAO,KAAK;AAC3C;MACF,KAAK;AACH,aAAM,KAAK,qBAAqB,OAAO,KAAK;AAC5C;MACF,KAAK;AACH,aAAM,KAAK,sBAAsB,OAAO,KAAK;AAC7C;MACF;AACE,YAAK,UACH,CAAC,gCAAgC,OAAO,OAAO,EAC/C,EACE,OAAO,SACR,CACF;AACD;;AAEJ;IACF;AACE,UAAK,UAAU,CAAC,wBAAwB,OAAO,OAAO,EAAE,EACtD,OAAO,SACR,CAAC;AACF;;WAGC,OAAO;AACd,QAAK,UAAU,CAAC,uCAAuC,MAAM,EAAE,EAC7D,OAAO,SACR,CAAC;;;;;;CAON,AAAQ,YAAY,OAAoB;EACtC,MAAM,aAAa;AAGnB,OAAK,UACH,CACE,sBACA;GACE,MAAM,WAAW;GACjB,SAAS,WAAW;GACpB,MAAM,WAAW;GACjB,YAAY,KAAK,aAAa;GAC9B,KAAK,KAAK,aAAa;GACxB,CACF,EACD,EACE,OAAO,SACR,CACF;AAGD,OAAK,oBAAoB,MAAM;AAS/B,MALE,WAAW,SAAS,YACnB,WAAW,SAAS,SAAS,aAAa,IACzC,WAAW,SAAS,SAAS,SAAS,IACtC,KAAK,aAAa,eAAe,YAAY,WAEvB,CAAC,KAAK,wBAAwB;AACtD,QAAK,UACH,kEACD;AACD,QAAK,mBAAmB;QAGxB,MAAK,SAAS"}
1
+ {"version":3,"file":"IntlayerEventListener.mjs","names":[],"sources":["../../src/IntlayerEventListener.ts"],"sourcesContent":["import { getIntlayerAPIProxy } from '@intlayer/api';\n// @ts-ignore: @intlayer/backend is not built yet\nimport type { DictionaryAPI, MessageEventData } from '@intlayer/backend';\nimport configuration from '@intlayer/config/built';\nimport { getAppLogger } from '@intlayer/config/client';\nimport type { IntlayerConfig } from '@intlayer/types';\nimport { EventSource } from 'eventsource';\n\nexport type IntlayerMessageEvent = MessageEvent;\n\n/**\n * IntlayerEventListener class to listen for dictionary changes via SSE (Server-Sent Events).\n *\n * Usage example:\n *\n * import { buildIntlayerDictionary } from './transpiler/declaration_file_to_dictionary/intlayer_dictionary';\n * import { IntlayerEventListener } from '@intlayer/api';\n *\n * export const checkDictionaryChanges = async () => {\n * // Instantiate the listener\n * const eventListener = new IntlayerEventListener();\n *\n * // Set up your callbacks\n * eventListener.onDictionaryChange = async (dictionary) => {\n * await buildIntlayerDictionary(dictionary);\n * };\n *\n * // Initialize the listener\n * await eventListener.initialize();\n *\n * // Optionally, clean up later when you’re done\n * // eventListener.cleanup();\n * };\n */\nexport class IntlayerEventListener {\n private appLogger = getAppLogger(configuration);\n\n private eventSource: EventSource | null = null;\n private reconnectAttempts = 0;\n private maxReconnectAttempts = 5;\n private reconnectDelay = 1000; // Start with 1 second\n private isManuallyDisconnected = false;\n private reconnectTimeout: NodeJS.Timeout | null = null;\n\n /**\n * Callback triggered when a Dictionary is ADDED.\n */\n public onDictionaryAdded?: (dictionary: DictionaryAPI) => any;\n\n /**\n * Callback triggered when a Dictionary is UPDATED.\n */\n public onDictionaryChange?: (dictionary: DictionaryAPI) => any;\n\n /**\n * Callback triggered when a Dictionary is DELETED.\n */\n public onDictionaryDeleted?: (dictionary: DictionaryAPI) => any;\n\n /**\n * Callback triggered when connection is established or re-established.\n */\n public onConnectionOpen?: () => any;\n\n /**\n * Callback triggered when connection encounters an error.\n */\n public onConnectionError?: (error: Event) => any;\n\n constructor(private intlayerConfig: IntlayerConfig = configuration) {\n this.appLogger = getAppLogger(this.intlayerConfig);\n }\n\n /**\n * Initializes the EventSource connection using the given intlayerConfig\n * (or the default config if none was provided).\n */\n public async initialize(): Promise<void> {\n this.isManuallyDisconnected = false;\n await this.connect();\n }\n\n /**\n * Establishes the EventSource connection with automatic reconnection support.\n */\n private async connect(): Promise<void> {\n try {\n const backendURL = this.intlayerConfig.editor.backendURL;\n\n // Retrieve the access token via proxy\n const accessToken = await getIntlayerAPIProxy(\n undefined,\n this.intlayerConfig\n ).oAuth.getOAuth2AccessToken();\n\n if (!accessToken) {\n throw new Error('Failed to retrieve access token');\n }\n\n const API_ROUTE = `${backendURL}/api/event-listener`;\n\n // Close existing connection if any\n if (this.eventSource) {\n this.eventSource.close();\n }\n\n this.eventSource = new EventSource(API_ROUTE, {\n fetch: (input, init) =>\n fetch(input, {\n ...init,\n headers: {\n ...(init?.headers ?? {}),\n Authorization: `Bearer ${accessToken.data?.accessToken}`,\n },\n }),\n });\n\n this.eventSource.onopen = () => {\n this.reconnectAttempts = 0;\n this.reconnectDelay = 1000; // Reset delay\n this.onConnectionOpen?.();\n };\n\n this.eventSource.onmessage = (event) => this.handleMessage(event);\n this.eventSource.onerror = (event) => this.handleError(event);\n } catch (_error) {\n this.appLogger('Failed to establish EventSource connection:', {\n level: 'error',\n });\n this.scheduleReconnect();\n }\n }\n\n /**\n * Cleans up (closes) the EventSource connection.\n */\n public cleanup(): void {\n this.isManuallyDisconnected = true;\n\n if (this.reconnectTimeout) {\n clearTimeout(this.reconnectTimeout);\n this.reconnectTimeout = null;\n }\n\n if (this.eventSource) {\n this.eventSource.close();\n this.eventSource = null;\n }\n }\n\n /**\n * Schedules a reconnection attempt with exponential backoff.\n */\n private scheduleReconnect(): void {\n if (\n this.isManuallyDisconnected ||\n this.reconnectAttempts >= this.maxReconnectAttempts\n ) {\n if (this.reconnectAttempts >= this.maxReconnectAttempts) {\n this.appLogger(\n [\n `Max reconnection attempts (${this.maxReconnectAttempts}) reached. Giving up.`,\n ],\n {\n level: 'error',\n }\n );\n }\n return;\n }\n\n this.reconnectAttempts++;\n const delay = this.reconnectDelay * 2 ** (this.reconnectAttempts - 1); // Exponential backoff\n\n this.appLogger(\n `Scheduling reconnection attempt ${this.reconnectAttempts}/${this.maxReconnectAttempts} in ${delay}ms`\n );\n\n this.reconnectTimeout = setTimeout(async () => {\n if (!this.isManuallyDisconnected) {\n await this.connect();\n }\n }, delay);\n }\n\n /**\n * Handles incoming SSE messages, parses the event data,\n * and invokes the appropriate callback.\n */\n private async handleMessage(event: IntlayerMessageEvent): Promise<void> {\n try {\n const { data } = event;\n\n const dataJSON: MessageEventData[] = JSON.parse(data);\n\n for (const dataEl of dataJSON) {\n switch (dataEl.object) {\n case 'DICTIONARY':\n switch (dataEl.status) {\n case 'ADDED':\n await this.onDictionaryAdded?.(dataEl.data);\n break;\n case 'UPDATED':\n await this.onDictionaryChange?.(dataEl.data);\n break;\n case 'DELETED':\n await this.onDictionaryDeleted?.(dataEl.data);\n break;\n default:\n this.appLogger(\n ['Unhandled dictionary status:', dataEl.status],\n {\n level: 'error',\n }\n );\n break;\n }\n break;\n default:\n this.appLogger(['Unknown object type:', dataEl.object], {\n level: 'error',\n });\n break;\n }\n }\n } catch (error) {\n this.appLogger(['Error processing dictionary update:', error], {\n level: 'error',\n });\n }\n }\n\n /**\n * Handles any SSE errors and attempts reconnection if appropriate.\n */\n private handleError(event: Event): void {\n const errorEvent = event as any;\n\n // Log detailed error information\n this.appLogger(\n [\n 'EventSource error:',\n {\n type: errorEvent.type,\n message: errorEvent.message,\n code: errorEvent.code,\n readyState: this.eventSource?.readyState,\n url: this.eventSource?.url,\n },\n ],\n {\n level: 'error',\n }\n );\n\n // Notify error callback\n this.onConnectionError?.(event);\n\n // Check if this is a connection close error\n const isConnectionClosed =\n errorEvent.type === 'error' &&\n (errorEvent.message?.includes('terminated') ||\n errorEvent.message?.includes('closed') ||\n this.eventSource?.readyState === EventSource.CLOSED);\n\n if (isConnectionClosed && !this.isManuallyDisconnected) {\n this.appLogger(\n 'Connection was terminated by server, attempting to reconnect...'\n );\n this.scheduleReconnect();\n } else {\n // For other types of errors, close the connection\n this.cleanup();\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkCA,IAAa,wBAAb,MAAmC;CACjC,AAAQ,YAAY,aAAa,cAAc;CAE/C,AAAQ,cAAkC;CAC1C,AAAQ,oBAAoB;CAC5B,AAAQ,uBAAuB;CAC/B,AAAQ,iBAAiB;CACzB,AAAQ,yBAAyB;CACjC,AAAQ,mBAA0C;;;;CAKlD,AAAO;;;;CAKP,AAAO;;;;CAKP,AAAO;;;;CAKP,AAAO;;;;CAKP,AAAO;CAEP,YAAY,AAAQ,iBAAiC,eAAe;EAAhD;AAClB,OAAK,YAAY,aAAa,KAAK,eAAe;;;;;;CAOpD,MAAa,aAA4B;AACvC,OAAK,yBAAyB;AAC9B,QAAM,KAAK,SAAS;;;;;CAMtB,MAAc,UAAyB;AACrC,MAAI;GACF,MAAM,aAAa,KAAK,eAAe,OAAO;GAG9C,MAAM,cAAc,MAAM,oBACxB,QACA,KAAK,eACN,CAAC,MAAM,sBAAsB;AAE9B,OAAI,CAAC,YACH,OAAM,IAAI,MAAM,kCAAkC;GAGpD,MAAM,YAAY,GAAG,WAAW;AAGhC,OAAI,KAAK,YACP,MAAK,YAAY,OAAO;AAG1B,QAAK,cAAc,IAAI,YAAY,WAAW,EAC5C,QAAQ,OAAO,SACb,MAAM,OAAO;IACX,GAAG;IACH,SAAS;KACP,GAAI,MAAM,WAAW,EAAE;KACvB,eAAe,UAAU,YAAY,MAAM;KAC5C;IACF,CAAC,EACL,CAAC;AAEF,QAAK,YAAY,eAAe;AAC9B,SAAK,oBAAoB;AACzB,SAAK,iBAAiB;AACtB,SAAK,oBAAoB;;AAG3B,QAAK,YAAY,aAAa,UAAU,KAAK,cAAc,MAAM;AACjE,QAAK,YAAY,WAAW,UAAU,KAAK,YAAY,MAAM;WACtD,QAAQ;AACf,QAAK,UAAU,+CAA+C,EAC5D,OAAO,SACR,CAAC;AACF,QAAK,mBAAmB;;;;;;CAO5B,AAAO,UAAgB;AACrB,OAAK,yBAAyB;AAE9B,MAAI,KAAK,kBAAkB;AACzB,gBAAa,KAAK,iBAAiB;AACnC,QAAK,mBAAmB;;AAG1B,MAAI,KAAK,aAAa;AACpB,QAAK,YAAY,OAAO;AACxB,QAAK,cAAc;;;;;;CAOvB,AAAQ,oBAA0B;AAChC,MACE,KAAK,0BACL,KAAK,qBAAqB,KAAK,sBAC/B;AACA,OAAI,KAAK,qBAAqB,KAAK,qBACjC,MAAK,UACH,CACE,8BAA8B,KAAK,qBAAqB,uBACzD,EACD,EACE,OAAO,SACR,CACF;AAEH;;AAGF,OAAK;EACL,MAAM,QAAQ,KAAK,iBAAiB,MAAM,KAAK,oBAAoB;AAEnE,OAAK,UACH,mCAAmC,KAAK,kBAAkB,GAAG,KAAK,qBAAqB,MAAM,MAAM,IACpG;AAED,OAAK,mBAAmB,WAAW,YAAY;AAC7C,OAAI,CAAC,KAAK,uBACR,OAAM,KAAK,SAAS;KAErB,MAAM;;;;;;CAOX,MAAc,cAAc,OAA4C;AACtE,MAAI;GACF,MAAM,EAAE,SAAS;GAEjB,MAAM,WAA+B,KAAK,MAAM,KAAK;AAErD,QAAK,MAAM,UAAU,SACnB,SAAQ,OAAO,QAAf;IACE,KAAK;AACH,aAAQ,OAAO,QAAf;MACE,KAAK;AACH,aAAM,KAAK,oBAAoB,OAAO,KAAK;AAC3C;MACF,KAAK;AACH,aAAM,KAAK,qBAAqB,OAAO,KAAK;AAC5C;MACF,KAAK;AACH,aAAM,KAAK,sBAAsB,OAAO,KAAK;AAC7C;MACF;AACE,YAAK,UACH,CAAC,gCAAgC,OAAO,OAAO,EAC/C,EACE,OAAO,SACR,CACF;AACD;;AAEJ;IACF;AACE,UAAK,UAAU,CAAC,wBAAwB,OAAO,OAAO,EAAE,EACtD,OAAO,SACR,CAAC;AACF;;WAGC,OAAO;AACd,QAAK,UAAU,CAAC,uCAAuC,MAAM,EAAE,EAC7D,OAAO,SACR,CAAC;;;;;;CAON,AAAQ,YAAY,OAAoB;EACtC,MAAM,aAAa;AAGnB,OAAK,UACH,CACE,sBACA;GACE,MAAM,WAAW;GACjB,SAAS,WAAW;GACpB,MAAM,WAAW;GACjB,YAAY,KAAK,aAAa;GAC9B,KAAK,KAAK,aAAa;GACxB,CACF,EACD,EACE,OAAO,SACR,CACF;AAGD,OAAK,oBAAoB,MAAM;AAS/B,MALE,WAAW,SAAS,YACnB,WAAW,SAAS,SAAS,aAAa,IACzC,WAAW,SAAS,SAAS,SAAS,IACtC,KAAK,aAAa,eAAe,YAAY,WAEvB,CAAC,KAAK,wBAAwB;AACtD,QAAK,UACH,kEACD;AACD,QAAK,mBAAmB;QAGxB,MAAK,SAAS"}
@@ -1 +1 @@
1
- {"version":3,"file":"build.mjs","names":["parallelProcess: ParallelHandle | null"],"sources":["../../src/build.ts"],"sourcesContent":["import {\n buildAndWatchIntlayer,\n type ParallelHandle,\n runParallel,\n} from '@intlayer/chokidar';\nimport {\n type GetConfigurationOptions,\n getConfiguration,\n} from '@intlayer/config';\n\ntype BuildOptions = {\n watch?: boolean;\n skipPrepare?: boolean;\n with?: string | string[];\n configOptions?: GetConfigurationOptions;\n};\n\n/**\n * Get locales dictionaries .content.{json|ts|tsx|js|jsx|mjs|cjs} and build the JSON dictionaries in the .intlayer directory.\n * Watch mode available to get the change in the .content.{json|ts|tsx|js|jsx|mjs|cjs}\n */\nexport const build = async (options?: BuildOptions) => {\n const config = getConfiguration(options?.configOptions);\n let parallelProcess: ParallelHandle | null = null;\n\n if (options?.with) {\n parallelProcess = runParallel(options.with);\n // Handle the promise to avoid unhandled rejection\n parallelProcess.result.catch(() => {\n // Parallel process failed or was terminated\n });\n }\n\n await buildAndWatchIntlayer({\n persistent: options?.watch ?? false,\n skipPrepare: options?.skipPrepare ?? false,\n configuration: config,\n });\n\n if (!options?.watch && parallelProcess) {\n parallelProcess.kill();\n }\n};\n"],"mappings":";;;;;;;;AAqBA,MAAa,QAAQ,OAAO,YAA2B;CACrD,MAAM,SAAS,iBAAiB,SAAS,cAAc;CACvD,IAAIA,kBAAyC;AAE7C,KAAI,SAAS,MAAM;AACjB,oBAAkB,YAAY,QAAQ,KAAK;AAE3C,kBAAgB,OAAO,YAAY,GAEjC;;AAGJ,OAAM,sBAAsB;EAC1B,YAAY,SAAS,SAAS;EAC9B,aAAa,SAAS,eAAe;EACrC,eAAe;EAChB,CAAC;AAEF,KAAI,CAAC,SAAS,SAAS,gBACrB,iBAAgB,MAAM"}
1
+ {"version":3,"file":"build.mjs","names":[],"sources":["../../src/build.ts"],"sourcesContent":["import {\n buildAndWatchIntlayer,\n type ParallelHandle,\n runParallel,\n} from '@intlayer/chokidar';\nimport {\n type GetConfigurationOptions,\n getConfiguration,\n} from '@intlayer/config';\n\ntype BuildOptions = {\n watch?: boolean;\n skipPrepare?: boolean;\n with?: string | string[];\n configOptions?: GetConfigurationOptions;\n};\n\n/**\n * Get locales dictionaries .content.{json|ts|tsx|js|jsx|mjs|cjs} and build the JSON dictionaries in the .intlayer directory.\n * Watch mode available to get the change in the .content.{json|ts|tsx|js|jsx|mjs|cjs}\n */\nexport const build = async (options?: BuildOptions) => {\n const config = getConfiguration(options?.configOptions);\n let parallelProcess: ParallelHandle | null = null;\n\n if (options?.with) {\n parallelProcess = runParallel(options.with);\n // Handle the promise to avoid unhandled rejection\n parallelProcess.result.catch(() => {\n // Parallel process failed or was terminated\n });\n }\n\n await buildAndWatchIntlayer({\n persistent: options?.watch ?? false,\n skipPrepare: options?.skipPrepare ?? false,\n configuration: config,\n });\n\n if (!options?.watch && parallelProcess) {\n parallelProcess.kill();\n }\n};\n"],"mappings":";;;;;;;;AAqBA,MAAa,QAAQ,OAAO,YAA2B;CACrD,MAAM,SAAS,iBAAiB,SAAS,cAAc;CACvD,IAAI,kBAAyC;AAE7C,KAAI,SAAS,MAAM;AACjB,oBAAkB,YAAY,QAAQ,KAAK;AAE3C,kBAAgB,OAAO,YAAY,GAEjC;;AAGJ,OAAM,sBAAsB;EAC1B,YAAY,SAAS,SAAS;EAC9B,aAAa,SAAS,eAAe;EACrC,eAAe;EAChB,CAAC;AAEF,KAAI,CAAC,SAAS,SAAS,gBACrB,iBAAgB,MAAM"}
@@ -1 +1 @@
1
- {"version":3,"file":"ci.mjs","names":["credentials: Record<string, { clientId: string; clientSecret: string }>"],"sources":["../../src/ci.ts"],"sourcesContent":["import { spawnSync } from 'node:child_process';\nimport { normalize, resolve } from 'node:path';\nimport { listProjects } from '@intlayer/chokidar';\nimport { logger } from '@intlayer/config';\n\n// Helper to detect the package manager used to run the command\nconst getPackageManagerCommand = () => {\n const userAgent = process.env.npm_config_user_agent;\n\n if (userAgent?.startsWith('bun')) {\n return { command: 'bun', args: ['intlayer'] }; // Bun runs local bins natively\n }\n if (userAgent?.startsWith('pnpm')) {\n return { command: 'pnpm', args: ['exec', 'intlayer'] }; // pnpm requires 'exec'\n }\n if (userAgent?.startsWith('yarn')) {\n return { command: 'yarn', args: ['run', 'intlayer'] }; // yarn requires 'run' or 'exec'\n }\n\n // Default fallback\n return { command: 'npx', args: ['intlayer'] };\n};\n\nexport const runCI = async (commands: string[]) => {\n const credentialsEnv = process.env.INTLAYER_PROJECT_CREDENTIALS;\n let credentials: Record<string, { clientId: string; clientSecret: string }> =\n {};\n\n // Parse Credentials (Optional now)\n if (credentialsEnv) {\n try {\n credentials = JSON.parse(credentialsEnv);\n } catch {\n logger(\n 'INTLAYER_PROJECT_CREDENTIALS is not valid JSON. Proceeding without credentials.',\n {\n level: 'warn',\n }\n );\n }\n }\n\n const cwd = process.cwd();\n\n // Discover Projects\n const { projectsPath } = await listProjects();\n\n if (projectsPath.length === 0) {\n logger('No Intlayer projects found.', { level: 'warn' });\n return;\n }\n\n // 3. Determine Context: Single Project vs All Projects\n // Check if the current directory matches one of the discovered project paths\n const currentProject = projectsPath.find((p) => cwd === p);\n const projectsToRun = currentProject ? [currentProject] : projectsPath;\n\n const { command, args: pmArgs } = getPackageManagerCommand();\n const finalArgs = [...pmArgs, ...commands];\n\n logger(`CI: Using package manager: ${command}`, { level: 'verbose' });\n\n if (currentProject) {\n logger(`CI: Detected project context: ${currentProject}`, {\n level: 'info',\n });\n } else {\n logger(\n `CI: No specific project context detected. Iterating over ${projectsToRun.length} discovered projects...`,\n {\n level: 'info',\n }\n );\n }\n\n let hasError = false;\n\n // Iterate and Execute\n for (const projectPath of projectsToRun) {\n // Attempt to match credentials to the project path\n // We check if the key (relative or absolute) matches the resolved project path\n const credsEntry = Object.entries(credentials).find(([key]) => {\n const absKey = resolve(key);\n return absKey === projectPath || projectPath.endsWith(normalize(key));\n });\n\n const creds = credsEntry?.[1];\n\n logger(`\\nCI: Executing for ${projectPath}...`, {\n level: 'info',\n });\n\n const envVars = { ...process.env };\n\n if (creds) {\n envVars.INTLAYER_CLIENT_ID = creds.clientId;\n envVars.INTLAYER_CLIENT_SECRET = creds.clientSecret;\n } else if (credentialsEnv) {\n logger(\n `CI: No matching credentials found for ${projectPath} in INTLAYER_PROJECT_CREDENTIALS.`,\n { level: 'verbose' }\n );\n }\n\n const result = spawnSync(command, finalArgs, {\n cwd: projectPath,\n stdio: 'inherit',\n env: envVars,\n });\n\n if (result.status !== 0) {\n logger(`CI: Failed for ${projectPath}`, {\n level: 'error',\n });\n hasError = true;\n }\n }\n\n if (hasError) process.exit(1);\n};\n"],"mappings":";;;;;;AAMA,MAAM,iCAAiC;CACrC,MAAM,YAAY,QAAQ,IAAI;AAE9B,KAAI,WAAW,WAAW,MAAM,CAC9B,QAAO;EAAE,SAAS;EAAO,MAAM,CAAC,WAAW;EAAE;AAE/C,KAAI,WAAW,WAAW,OAAO,CAC/B,QAAO;EAAE,SAAS;EAAQ,MAAM,CAAC,QAAQ,WAAW;EAAE;AAExD,KAAI,WAAW,WAAW,OAAO,CAC/B,QAAO;EAAE,SAAS;EAAQ,MAAM,CAAC,OAAO,WAAW;EAAE;AAIvD,QAAO;EAAE,SAAS;EAAO,MAAM,CAAC,WAAW;EAAE;;AAG/C,MAAa,QAAQ,OAAO,aAAuB;CACjD,MAAM,iBAAiB,QAAQ,IAAI;CACnC,IAAIA,cACF,EAAE;AAGJ,KAAI,eACF,KAAI;AACF,gBAAc,KAAK,MAAM,eAAe;SAClC;AACN,SACE,mFACA,EACE,OAAO,QACR,CACF;;CAIL,MAAM,MAAM,QAAQ,KAAK;CAGzB,MAAM,EAAE,iBAAiB,MAAM,cAAc;AAE7C,KAAI,aAAa,WAAW,GAAG;AAC7B,SAAO,+BAA+B,EAAE,OAAO,QAAQ,CAAC;AACxD;;CAKF,MAAM,iBAAiB,aAAa,MAAM,MAAM,QAAQ,EAAE;CAC1D,MAAM,gBAAgB,iBAAiB,CAAC,eAAe,GAAG;CAE1D,MAAM,EAAE,SAAS,MAAM,WAAW,0BAA0B;CAC5D,MAAM,YAAY,CAAC,GAAG,QAAQ,GAAG,SAAS;AAE1C,QAAO,8BAA8B,WAAW,EAAE,OAAO,WAAW,CAAC;AAErE,KAAI,eACF,QAAO,iCAAiC,kBAAkB,EACxD,OAAO,QACR,CAAC;KAEF,QACE,4DAA4D,cAAc,OAAO,0BACjF,EACE,OAAO,QACR,CACF;CAGH,IAAI,WAAW;AAGf,MAAK,MAAM,eAAe,eAAe;EAQvC,MAAM,QALa,OAAO,QAAQ,YAAY,CAAC,MAAM,CAAC,SAAS;AAE7D,UADe,QAAQ,IAAI,KACT,eAAe,YAAY,SAAS,UAAU,IAAI,CAAC;IACrE,GAEyB;AAE3B,SAAO,uBAAuB,YAAY,MAAM,EAC9C,OAAO,QACR,CAAC;EAEF,MAAM,UAAU,EAAE,GAAG,QAAQ,KAAK;AAElC,MAAI,OAAO;AACT,WAAQ,qBAAqB,MAAM;AACnC,WAAQ,yBAAyB,MAAM;aAC9B,eACT,QACE,yCAAyC,YAAY,oCACrD,EAAE,OAAO,WAAW,CACrB;AASH,MANe,UAAU,SAAS,WAAW;GAC3C,KAAK;GACL,OAAO;GACP,KAAK;GACN,CAAC,CAES,WAAW,GAAG;AACvB,UAAO,kBAAkB,eAAe,EACtC,OAAO,SACR,CAAC;AACF,cAAW;;;AAIf,KAAI,SAAU,SAAQ,KAAK,EAAE"}
1
+ {"version":3,"file":"ci.mjs","names":[],"sources":["../../src/ci.ts"],"sourcesContent":["import { spawnSync } from 'node:child_process';\nimport { normalize, resolve } from 'node:path';\nimport { listProjects } from '@intlayer/chokidar';\nimport { logger } from '@intlayer/config';\n\n// Helper to detect the package manager used to run the command\nconst getPackageManagerCommand = () => {\n const userAgent = process.env.npm_config_user_agent;\n\n if (userAgent?.startsWith('bun')) {\n return { command: 'bun', args: ['intlayer'] }; // Bun runs local bins natively\n }\n if (userAgent?.startsWith('pnpm')) {\n return { command: 'pnpm', args: ['exec', 'intlayer'] }; // pnpm requires 'exec'\n }\n if (userAgent?.startsWith('yarn')) {\n return { command: 'yarn', args: ['run', 'intlayer'] }; // yarn requires 'run' or 'exec'\n }\n\n // Default fallback\n return { command: 'npx', args: ['intlayer'] };\n};\n\nexport const runCI = async (commands: string[]) => {\n const credentialsEnv = process.env.INTLAYER_PROJECT_CREDENTIALS;\n let credentials: Record<string, { clientId: string; clientSecret: string }> =\n {};\n\n // Parse Credentials (Optional now)\n if (credentialsEnv) {\n try {\n credentials = JSON.parse(credentialsEnv);\n } catch {\n logger(\n 'INTLAYER_PROJECT_CREDENTIALS is not valid JSON. Proceeding without credentials.',\n {\n level: 'warn',\n }\n );\n }\n }\n\n const cwd = process.cwd();\n\n // Discover Projects\n const { projectsPath } = await listProjects();\n\n if (projectsPath.length === 0) {\n logger('No Intlayer projects found.', { level: 'warn' });\n return;\n }\n\n // 3. Determine Context: Single Project vs All Projects\n // Check if the current directory matches one of the discovered project paths\n const currentProject = projectsPath.find((p) => cwd === p);\n const projectsToRun = currentProject ? [currentProject] : projectsPath;\n\n const { command, args: pmArgs } = getPackageManagerCommand();\n const finalArgs = [...pmArgs, ...commands];\n\n logger(`CI: Using package manager: ${command}`, { level: 'verbose' });\n\n if (currentProject) {\n logger(`CI: Detected project context: ${currentProject}`, {\n level: 'info',\n });\n } else {\n logger(\n `CI: No specific project context detected. Iterating over ${projectsToRun.length} discovered projects...`,\n {\n level: 'info',\n }\n );\n }\n\n let hasError = false;\n\n // Iterate and Execute\n for (const projectPath of projectsToRun) {\n // Attempt to match credentials to the project path\n // We check if the key (relative or absolute) matches the resolved project path\n const credsEntry = Object.entries(credentials).find(([key]) => {\n const absKey = resolve(key);\n return absKey === projectPath || projectPath.endsWith(normalize(key));\n });\n\n const creds = credsEntry?.[1];\n\n logger(`\\nCI: Executing for ${projectPath}...`, {\n level: 'info',\n });\n\n const envVars = { ...process.env };\n\n if (creds) {\n envVars.INTLAYER_CLIENT_ID = creds.clientId;\n envVars.INTLAYER_CLIENT_SECRET = creds.clientSecret;\n } else if (credentialsEnv) {\n logger(\n `CI: No matching credentials found for ${projectPath} in INTLAYER_PROJECT_CREDENTIALS.`,\n { level: 'verbose' }\n );\n }\n\n const result = spawnSync(command, finalArgs, {\n cwd: projectPath,\n stdio: 'inherit',\n env: envVars,\n });\n\n if (result.status !== 0) {\n logger(`CI: Failed for ${projectPath}`, {\n level: 'error',\n });\n hasError = true;\n }\n }\n\n if (hasError) process.exit(1);\n};\n"],"mappings":";;;;;;AAMA,MAAM,iCAAiC;CACrC,MAAM,YAAY,QAAQ,IAAI;AAE9B,KAAI,WAAW,WAAW,MAAM,CAC9B,QAAO;EAAE,SAAS;EAAO,MAAM,CAAC,WAAW;EAAE;AAE/C,KAAI,WAAW,WAAW,OAAO,CAC/B,QAAO;EAAE,SAAS;EAAQ,MAAM,CAAC,QAAQ,WAAW;EAAE;AAExD,KAAI,WAAW,WAAW,OAAO,CAC/B,QAAO;EAAE,SAAS;EAAQ,MAAM,CAAC,OAAO,WAAW;EAAE;AAIvD,QAAO;EAAE,SAAS;EAAO,MAAM,CAAC,WAAW;EAAE;;AAG/C,MAAa,QAAQ,OAAO,aAAuB;CACjD,MAAM,iBAAiB,QAAQ,IAAI;CACnC,IAAI,cACF,EAAE;AAGJ,KAAI,eACF,KAAI;AACF,gBAAc,KAAK,MAAM,eAAe;SAClC;AACN,SACE,mFACA,EACE,OAAO,QACR,CACF;;CAIL,MAAM,MAAM,QAAQ,KAAK;CAGzB,MAAM,EAAE,iBAAiB,MAAM,cAAc;AAE7C,KAAI,aAAa,WAAW,GAAG;AAC7B,SAAO,+BAA+B,EAAE,OAAO,QAAQ,CAAC;AACxD;;CAKF,MAAM,iBAAiB,aAAa,MAAM,MAAM,QAAQ,EAAE;CAC1D,MAAM,gBAAgB,iBAAiB,CAAC,eAAe,GAAG;CAE1D,MAAM,EAAE,SAAS,MAAM,WAAW,0BAA0B;CAC5D,MAAM,YAAY,CAAC,GAAG,QAAQ,GAAG,SAAS;AAE1C,QAAO,8BAA8B,WAAW,EAAE,OAAO,WAAW,CAAC;AAErE,KAAI,eACF,QAAO,iCAAiC,kBAAkB,EACxD,OAAO,QACR,CAAC;KAEF,QACE,4DAA4D,cAAc,OAAO,0BACjF,EACE,OAAO,QACR,CACF;CAGH,IAAI,WAAW;AAGf,MAAK,MAAM,eAAe,eAAe;EAQvC,MAAM,QALa,OAAO,QAAQ,YAAY,CAAC,MAAM,CAAC,SAAS;AAE7D,UADe,QAAQ,IAAI,KACT,eAAe,YAAY,SAAS,UAAU,IAAI,CAAC;IACrE,GAEyB;AAE3B,SAAO,uBAAuB,YAAY,MAAM,EAC9C,OAAO,QACR,CAAC;EAEF,MAAM,UAAU,EAAE,GAAG,QAAQ,KAAK;AAElC,MAAI,OAAO;AACT,WAAQ,qBAAqB,MAAM;AACnC,WAAQ,yBAAyB,MAAM;aAC9B,eACT,QACE,yCAAyC,YAAY,oCACrD,EAAE,OAAO,WAAW,CACrB;AASH,MANe,UAAU,SAAS,WAAW;GAC3C,KAAK;GACL,OAAO;GACP,KAAK;GACN,CAAC,CAES,WAAW,GAAG;AACvB,UAAO,kBAAkB,eAAe,EACtC,OAAO,SACR,CAAC;AACF,cAAW;;;AAIf,KAAI,SAAU,SAAQ,KAAK,EAAE"}