@intlayer/cli 7.5.13 → 7.5.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/IntlayerEventListener.cjs.map +1 -1
- package/dist/cjs/build.cjs.map +1 -1
- package/dist/cjs/ci.cjs.map +1 -1
- package/dist/cjs/cli.cjs.map +1 -1
- package/dist/cjs/editor.cjs.map +1 -1
- package/dist/cjs/fill/deepMergeContent.cjs.map +1 -1
- package/dist/cjs/fill/fill.cjs.map +1 -1
- package/dist/cjs/fill/formatAutoFilledFilePath.cjs.map +1 -1
- package/dist/cjs/fill/formatFillData.cjs.map +1 -1
- package/dist/cjs/fill/getFilterMissingContentPerLocale.cjs.map +1 -1
- package/dist/cjs/fill/listTranslationsTasks.cjs.map +1 -1
- package/dist/cjs/fill/mergeChunks.cjs.map +1 -1
- package/dist/cjs/fill/translateDictionary.cjs.map +1 -1
- package/dist/cjs/fill/writeFill.cjs.map +1 -1
- package/dist/cjs/liveSync.cjs.map +1 -1
- package/dist/cjs/pull.cjs.map +1 -1
- package/dist/cjs/push/pullLog.cjs.map +1 -1
- package/dist/cjs/push/push.cjs.map +1 -1
- package/dist/cjs/pushLog.cjs.map +1 -1
- package/dist/cjs/reviewDoc/reviewDoc.cjs.map +1 -1
- package/dist/cjs/test/listMissingTranslations.cjs.map +1 -1
- package/dist/cjs/transform.cjs.map +1 -1
- package/dist/cjs/translateDoc/translateDoc.cjs.map +1 -1
- package/dist/cjs/translateDoc/translateFile.cjs.map +1 -1
- package/dist/cjs/translateDoc/validation.cjs.map +1 -1
- package/dist/cjs/translation-alignment/alignBlocks.cjs.map +1 -1
- package/dist/cjs/translation-alignment/pipeline.cjs.map +1 -1
- package/dist/cjs/translation-alignment/planActions.cjs.map +1 -1
- package/dist/cjs/translation-alignment/rebuildDocument.cjs.map +1 -1
- package/dist/cjs/translation-alignment/segmentDocument.cjs.map +1 -1
- package/dist/cjs/utils/calculateChunks.cjs.map +1 -1
- package/dist/cjs/utils/chunkInference.cjs.map +1 -1
- package/dist/cjs/utils/getIsFileUpdatedRecently.cjs.map +1 -1
- package/dist/cjs/utils/listSpecialChars.cjs.map +1 -1
- package/dist/cjs/utils/mapChunksBetweenFiles.cjs.map +1 -1
- package/dist/cjs/utils/reorderParagraphs.cjs.map +1 -1
- package/dist/cjs/utils/setupAI.cjs.map +1 -1
- package/dist/cjs/watch.cjs.map +1 -1
- package/dist/esm/IntlayerEventListener.mjs.map +1 -1
- package/dist/esm/build.mjs.map +1 -1
- package/dist/esm/ci.mjs.map +1 -1
- package/dist/esm/cli.mjs.map +1 -1
- package/dist/esm/editor.mjs.map +1 -1
- package/dist/esm/fill/deepMergeContent.mjs.map +1 -1
- package/dist/esm/fill/fill.mjs.map +1 -1
- package/dist/esm/fill/formatAutoFilledFilePath.mjs.map +1 -1
- package/dist/esm/fill/formatFillData.mjs.map +1 -1
- package/dist/esm/fill/getFilterMissingContentPerLocale.mjs.map +1 -1
- package/dist/esm/fill/listTranslationsTasks.mjs.map +1 -1
- package/dist/esm/fill/mergeChunks.mjs.map +1 -1
- package/dist/esm/fill/translateDictionary.mjs.map +1 -1
- package/dist/esm/fill/writeFill.mjs.map +1 -1
- package/dist/esm/liveSync.mjs.map +1 -1
- package/dist/esm/pull.mjs.map +1 -1
- package/dist/esm/push/pullLog.mjs.map +1 -1
- package/dist/esm/push/push.mjs.map +1 -1
- package/dist/esm/pushLog.mjs.map +1 -1
- package/dist/esm/reviewDoc/reviewDoc.mjs.map +1 -1
- package/dist/esm/test/listMissingTranslations.mjs.map +1 -1
- package/dist/esm/transform.mjs.map +1 -1
- package/dist/esm/translateDoc/translateDoc.mjs.map +1 -1
- package/dist/esm/translateDoc/translateFile.mjs.map +1 -1
- package/dist/esm/translateDoc/validation.mjs.map +1 -1
- package/dist/esm/translation-alignment/alignBlocks.mjs.map +1 -1
- package/dist/esm/translation-alignment/pipeline.mjs.map +1 -1
- package/dist/esm/translation-alignment/planActions.mjs.map +1 -1
- package/dist/esm/translation-alignment/rebuildDocument.mjs.map +1 -1
- package/dist/esm/translation-alignment/segmentDocument.mjs.map +1 -1
- package/dist/esm/utils/calculateChunks.mjs.map +1 -1
- package/dist/esm/utils/chunkInference.mjs.map +1 -1
- package/dist/esm/utils/getIsFileUpdatedRecently.mjs.map +1 -1
- package/dist/esm/utils/listSpecialChars.mjs.map +1 -1
- package/dist/esm/utils/mapChunksBetweenFiles.mjs.map +1 -1
- package/dist/esm/utils/reorderParagraphs.mjs.map +1 -1
- package/dist/esm/utils/setupAI.mjs.map +1 -1
- package/dist/esm/watch.mjs.map +1 -1
- package/dist/types/pull.d.ts.map +1 -1
- package/dist/types/translation-alignment/rebuildDocument.d.ts.map +1 -1
- package/package.json +14 -14
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"segmentDocument.mjs","names":[
|
|
1
|
+
{"version":3,"file":"segmentDocument.mjs","names":[],"sources":["../../../src/translation-alignment/segmentDocument.ts"],"sourcesContent":["import type { Block } from './types';\n\nconst isBlankLine = (line: string): boolean => line.trim().length === 0;\nconst isFencedCodeDelimiter = (line: string): boolean => /^\\s*```/.test(line);\nconst isHeading = (line: string): boolean => /^\\s*#{1,6}\\s+/.test(line);\nconst isFrontmatterDelimiter = (line: string): boolean =>\n /^\\s*---\\s*$/.test(line);\nconst trimTrailingNewlines = (text: string): string =>\n text.replace(/\\n+$/g, '\\n');\n\nexport const segmentDocument = (text: string): Block[] => {\n const lines = text.split('\\n');\n const blocks: Block[] = [];\n\n let index = 0;\n let insideCodeBlock = false;\n\n // Buffers\n let currentSectionLines: string[] = [];\n let currentSectionStartLine = 1;\n\n const flushCurrentSection = (endIndex: number) => {\n if (currentSectionLines.length > 0) {\n // Filter out leading blank lines from the block content to keep it clean,\n // but strictly speaking, we just want to ensure non-empty content.\n const rawContent = currentSectionLines.join('\\n');\n\n if (rawContent.trim().length > 0) {\n blocks.push({\n type: 'paragraph', // Generic type\n content: `${trimTrailingNewlines(rawContent)}\\n`,\n lineStart: currentSectionStartLine,\n lineEnd: endIndex,\n });\n }\n currentSectionLines = [];\n }\n };\n\n while (index < lines.length) {\n const currentLine = lines[index];\n\n // 1. Handle Frontmatter (Must be at start of file)\n if (blocks.length === 0 && isFrontmatterDelimiter(currentLine)) {\n const startLine = index + 1;\n const contentLines: string[] = [currentLine];\n index++;\n\n while (index < lines.length && !isFrontmatterDelimiter(lines[index])) {\n contentLines.push(lines[index]);\n index++;\n }\n\n if (index < lines.length && isFrontmatterDelimiter(lines[index])) {\n contentLines.push(lines[index]);\n index++;\n }\n\n blocks.push({\n type: 'paragraph',\n content: `${trimTrailingNewlines(contentLines.join('\\n'))}\\n`,\n lineStart: startLine,\n lineEnd: index,\n });\n continue;\n }\n\n // 2. Track Code Blocks (Headers inside code blocks are ignored)\n if (isFencedCodeDelimiter(currentLine)) {\n insideCodeBlock = !insideCodeBlock;\n }\n\n const isHeader = !insideCodeBlock && isHeading(currentLine);\n\n // 3. Split on Headers\n if (isHeader) {\n // If we have accumulated content, flush it as the previous block\n if (currentSectionLines.length > 0) {\n flushCurrentSection(index);\n }\n // Start a new section with this header\n currentSectionStartLine = index + 1;\n currentSectionLines = [currentLine];\n } else {\n // Accumulate content\n if (currentSectionLines.length === 0 && !isBlankLine(currentLine)) {\n currentSectionStartLine = index + 1;\n }\n currentSectionLines.push(currentLine);\n }\n\n index++;\n }\n\n // Flush remaining content\n flushCurrentSection(index);\n\n return blocks;\n};\n"],"mappings":";AAEA,MAAM,eAAe,SAA0B,KAAK,MAAM,CAAC,WAAW;AACtE,MAAM,yBAAyB,SAA0B,UAAU,KAAK,KAAK;AAC7E,MAAM,aAAa,SAA0B,gBAAgB,KAAK,KAAK;AACvE,MAAM,0BAA0B,SAC9B,cAAc,KAAK,KAAK;AAC1B,MAAM,wBAAwB,SAC5B,KAAK,QAAQ,SAAS,KAAK;AAE7B,MAAa,mBAAmB,SAA0B;CACxD,MAAM,QAAQ,KAAK,MAAM,KAAK;CAC9B,MAAM,SAAkB,EAAE;CAE1B,IAAI,QAAQ;CACZ,IAAI,kBAAkB;CAGtB,IAAI,sBAAgC,EAAE;CACtC,IAAI,0BAA0B;CAE9B,MAAM,uBAAuB,aAAqB;AAChD,MAAI,oBAAoB,SAAS,GAAG;GAGlC,MAAM,aAAa,oBAAoB,KAAK,KAAK;AAEjD,OAAI,WAAW,MAAM,CAAC,SAAS,EAC7B,QAAO,KAAK;IACV,MAAM;IACN,SAAS,GAAG,qBAAqB,WAAW,CAAC;IAC7C,WAAW;IACX,SAAS;IACV,CAAC;AAEJ,yBAAsB,EAAE;;;AAI5B,QAAO,QAAQ,MAAM,QAAQ;EAC3B,MAAM,cAAc,MAAM;AAG1B,MAAI,OAAO,WAAW,KAAK,uBAAuB,YAAY,EAAE;GAC9D,MAAM,YAAY,QAAQ;GAC1B,MAAM,eAAyB,CAAC,YAAY;AAC5C;AAEA,UAAO,QAAQ,MAAM,UAAU,CAAC,uBAAuB,MAAM,OAAO,EAAE;AACpE,iBAAa,KAAK,MAAM,OAAO;AAC/B;;AAGF,OAAI,QAAQ,MAAM,UAAU,uBAAuB,MAAM,OAAO,EAAE;AAChE,iBAAa,KAAK,MAAM,OAAO;AAC/B;;AAGF,UAAO,KAAK;IACV,MAAM;IACN,SAAS,GAAG,qBAAqB,aAAa,KAAK,KAAK,CAAC,CAAC;IAC1D,WAAW;IACX,SAAS;IACV,CAAC;AACF;;AAIF,MAAI,sBAAsB,YAAY,CACpC,mBAAkB,CAAC;AAMrB,MAHiB,CAAC,mBAAmB,UAAU,YAAY,EAG7C;AAEZ,OAAI,oBAAoB,SAAS,EAC/B,qBAAoB,MAAM;AAG5B,6BAA0B,QAAQ;AAClC,yBAAsB,CAAC,YAAY;SAC9B;AAEL,OAAI,oBAAoB,WAAW,KAAK,CAAC,YAAY,YAAY,CAC/D,2BAA0B,QAAQ;AAEpC,uBAAoB,KAAK,YAAY;;AAGvC;;AAIF,qBAAoB,MAAM;AAE1B,QAAO"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"calculateChunks.mjs","names":[
|
|
1
|
+
{"version":3,"file":"calculateChunks.mjs","names":[],"sources":["../../../src/utils/calculateChunks.ts"],"sourcesContent":["import { splitTextByLines } from '@intlayer/chokidar';\n\nexport type ChunkLineResult = {\n lineStart: number;\n lineLength: number;\n charStart: number;\n charLength: number;\n content: string;\n};\n\nconst DEFAULT_MAX_CHARS_PER_CHUNK = 800;\nconst DEFAULT_OVERLAP_CHARS = 0;\n\nexport const chunkText = (\n text: string,\n maxCharsPerChunk: number = DEFAULT_MAX_CHARS_PER_CHUNK,\n overlapChars: number = DEFAULT_OVERLAP_CHARS\n): ChunkLineResult[] => {\n if (maxCharsPerChunk <= 0) {\n throw new Error('maxCharsPerChunk must be greater than 0');\n }\n\n const splittedText = splitTextByLines(text);\n\n // Split text into lines to facilitate the translation\n const lines: ChunkLineResult[] = [];\n let charStartAcc = 0;\n\n splittedText.forEach((line, index) => {\n lines.push({\n content: line,\n lineStart: index,\n lineLength: 1,\n charStart: charStartAcc,\n charLength: line.length,\n });\n charStartAcc += line.length;\n });\n\n // Group lines\n // as long as the chunk length is less than maxCharsPerChunk\n // if a line longer than maxCharsPerChunk, keep it alone\n // if a line is not longer than maxCharsPerChunk, it is grouped\n const groupedLines: ChunkLineResult[] = lines.reduce(\n (acc: ChunkLineResult[], line) => {\n // If this line alone exceeds maxCharsPerChunk, keep it separate\n if (line.content.length > maxCharsPerChunk) {\n acc.push(line);\n return acc;\n }\n\n // If we have no chunks yet, start with this line\n if (acc.length === 0) {\n acc.push(line);\n return acc;\n }\n\n // Get the last chunk\n const lastChunk = acc[acc.length - 1];\n\n // Calculate what the combined length would be (including newline character)\n const combinedLength = lastChunk.content.length + line.content.length;\n\n // If combining would exceed the limit, start a new chunk\n if (combinedLength > maxCharsPerChunk) {\n acc.push(line);\n return acc;\n }\n\n // Otherwise, combine with the last chunk\n const combinedContent = lastChunk.content + line.content;\n const updatedChunk = {\n content: combinedContent,\n lineStart: lastChunk.lineStart,\n lineLength: lastChunk.lineLength + line.lineLength,\n charStart: lastChunk.charStart,\n charLength: combinedContent.length,\n };\n\n acc[acc.length - 1] = updatedChunk;\n return acc;\n },\n []\n );\n\n // If one line is longer than maxCharsPerChunk, split it into multiple chunks\n const splittedLines: ChunkLineResult[] = groupedLines.flatMap((line) => {\n const chunk: ChunkLineResult[] = [];\n\n if (line.content.length <= maxCharsPerChunk) {\n chunk.push(line);\n return chunk;\n }\n\n for (let i = 0; i < line.content.length; i += maxCharsPerChunk) {\n const slicedContent = line.content.slice(i, i + maxCharsPerChunk);\n chunk.push({\n content: slicedContent,\n lineStart: line.lineStart,\n lineLength: 1,\n charStart: line.charStart + i,\n charLength: slicedContent.length,\n });\n }\n return chunk;\n });\n\n if (overlapChars === 0) return splittedLines;\n\n const overlapChunks: ChunkLineResult[] =\n splittedLines.length > 0 ? [splittedLines[0]] : [];\n\n for (let i = 1; i < splittedLines.length; i++) {\n const previousChunk = splittedLines[i - 1];\n const chunk = splittedLines[i];\n\n const overlapContent = previousChunk.content.slice(-overlapChars);\n const overlapLineNb = splitTextByLines(overlapContent).length;\n\n const overlapContentWithoutPartialLine = overlapContent.slice(\n overlapLineNb > 1 ? overlapContent.indexOf('\\n') + 1 : 0,\n overlapContent.length\n );\n\n const newContent = overlapContentWithoutPartialLine + chunk.content;\n const newLineLength = splitTextByLines(newContent).length;\n const lineDiff = chunk.lineLength - newLineLength;\n\n const overlappedChunk = {\n content: newContent,\n lineStart: chunk.lineStart + lineDiff,\n lineLength: chunk.lineLength - lineDiff,\n charStart: chunk.charStart - overlapContentWithoutPartialLine.length,\n charLength: newContent.length,\n };\n\n overlapChunks.push(overlappedChunk);\n }\n\n return overlapChunks;\n};\n"],"mappings":";;;AAUA,MAAM,8BAA8B;AACpC,MAAM,wBAAwB;AAE9B,MAAa,aACX,MACA,mBAA2B,6BAC3B,eAAuB,0BACD;AACtB,KAAI,oBAAoB,EACtB,OAAM,IAAI,MAAM,0CAA0C;CAG5D,MAAM,eAAe,iBAAiB,KAAK;CAG3C,MAAM,QAA2B,EAAE;CACnC,IAAI,eAAe;AAEnB,cAAa,SAAS,MAAM,UAAU;AACpC,QAAM,KAAK;GACT,SAAS;GACT,WAAW;GACX,YAAY;GACZ,WAAW;GACX,YAAY,KAAK;GAClB,CAAC;AACF,kBAAgB,KAAK;GACrB;CAiDF,MAAM,gBA3CkC,MAAM,QAC3C,KAAwB,SAAS;AAEhC,MAAI,KAAK,QAAQ,SAAS,kBAAkB;AAC1C,OAAI,KAAK,KAAK;AACd,UAAO;;AAIT,MAAI,IAAI,WAAW,GAAG;AACpB,OAAI,KAAK,KAAK;AACd,UAAO;;EAIT,MAAM,YAAY,IAAI,IAAI,SAAS;AAMnC,MAHuB,UAAU,QAAQ,SAAS,KAAK,QAAQ,SAG1C,kBAAkB;AACrC,OAAI,KAAK,KAAK;AACd,UAAO;;EAIT,MAAM,kBAAkB,UAAU,UAAU,KAAK;EACjD,MAAM,eAAe;GACnB,SAAS;GACT,WAAW,UAAU;GACrB,YAAY,UAAU,aAAa,KAAK;GACxC,WAAW,UAAU;GACrB,YAAY,gBAAgB;GAC7B;AAED,MAAI,IAAI,SAAS,KAAK;AACtB,SAAO;IAET,EAAE,CACH,CAGqD,SAAS,SAAS;EACtE,MAAM,QAA2B,EAAE;AAEnC,MAAI,KAAK,QAAQ,UAAU,kBAAkB;AAC3C,SAAM,KAAK,KAAK;AAChB,UAAO;;AAGT,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,QAAQ,KAAK,kBAAkB;GAC9D,MAAM,gBAAgB,KAAK,QAAQ,MAAM,GAAG,IAAI,iBAAiB;AACjE,SAAM,KAAK;IACT,SAAS;IACT,WAAW,KAAK;IAChB,YAAY;IACZ,WAAW,KAAK,YAAY;IAC5B,YAAY,cAAc;IAC3B,CAAC;;AAEJ,SAAO;GACP;AAEF,KAAI,iBAAiB,EAAG,QAAO;CAE/B,MAAM,gBACJ,cAAc,SAAS,IAAI,CAAC,cAAc,GAAG,GAAG,EAAE;AAEpD,MAAK,IAAI,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;EAC7C,MAAM,gBAAgB,cAAc,IAAI;EACxC,MAAM,QAAQ,cAAc;EAE5B,MAAM,iBAAiB,cAAc,QAAQ,MAAM,CAAC,aAAa;EACjE,MAAM,gBAAgB,iBAAiB,eAAe,CAAC;EAEvD,MAAM,mCAAmC,eAAe,MACtD,gBAAgB,IAAI,eAAe,QAAQ,KAAK,GAAG,IAAI,GACvD,eAAe,OAChB;EAED,MAAM,aAAa,mCAAmC,MAAM;EAC5D,MAAM,gBAAgB,iBAAiB,WAAW,CAAC;EACnD,MAAM,WAAW,MAAM,aAAa;EAEpC,MAAM,kBAAkB;GACtB,SAAS;GACT,WAAW,MAAM,YAAY;GAC7B,YAAY,MAAM,aAAa;GAC/B,WAAW,MAAM,YAAY,iCAAiC;GAC9D,YAAY,WAAW;GACxB;AAED,gBAAc,KAAK,gBAAgB;;AAGrC,QAAO"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"chunkInference.mjs","names":["
|
|
1
|
+
{"version":3,"file":"chunkInference.mjs","names":["response","fileContent"],"sources":["../../../src/utils/chunkInference.ts"],"sourcesContent":["import type { AIConfig, AIOptions } from '@intlayer/ai';\nimport { getIntlayerAPIProxy, type Messages } from '@intlayer/api';\nimport { retryManager } from '@intlayer/config';\nimport type { IntlayerConfig } from '@intlayer/types';\nimport type { AIClient } from './setupAI';\n\ntype ChunkInferenceResult = {\n fileContent: string;\n tokenUsed: number;\n};\n\n/**\n * Translates a single chunk via the OpenAI API.\n * Includes retry logic if the call fails.\n */\nexport const chunkInference = async (\n messages: Messages,\n aiOptions?: AIOptions,\n configuration?: IntlayerConfig,\n aiClient?: AIClient,\n aiConfig?: AIConfig\n): Promise<ChunkInferenceResult> => {\n let lastResult: ChunkInferenceResult;\n\n await retryManager(async () => {\n if (aiClient && aiConfig) {\n const response = await aiClient.customQuery({\n aiConfig,\n messages,\n });\n\n if (!response) {\n throw new Error('No response from AI API');\n }\n\n const { fileContent, tokenUsed } = response;\n\n lastResult = {\n fileContent: processContent(fileContent),\n tokenUsed,\n };\n\n return;\n }\n\n const api = getIntlayerAPIProxy(undefined, configuration);\n\n const response = await api.ai.customQuery({\n aiOptions,\n messages,\n });\n\n if (!response.data) {\n throw new Error('No response from AI API');\n }\n\n const { fileContent, tokenUsed } = response.data;\n\n lastResult = {\n fileContent: processContent(fileContent),\n tokenUsed,\n };\n })();\n\n return lastResult!;\n};\n\nconst processContent = (content: string) => {\n return content\n .replaceAll('///chunksStart///', '')\n .replaceAll('///chunkStart///', '')\n .replaceAll('///chunksEnd///', '')\n .replaceAll('///chunkEnd///', '')\n .replaceAll('///chunksStart///', '')\n .replaceAll('chunkStart///', '')\n .replaceAll('chunksEnd///', '')\n .replaceAll('chunkEnd///', '')\n .replaceAll('///chunksStart', '')\n .replaceAll('///chunkStart', '')\n .replaceAll('///chunksEnd', '')\n .replaceAll('///chunkEnd', '')\n .replaceAll('chunksStart', '')\n .replaceAll('chunkStart', '')\n .replaceAll('chunksEnd', '')\n .replaceAll('chunkEnd', '');\n};\n"],"mappings":";;;;;;;;AAeA,MAAa,iBAAiB,OAC5B,UACA,WACA,eACA,UACA,aACkC;CAClC,IAAI;AAEJ,OAAM,aAAa,YAAY;AAC7B,MAAI,YAAY,UAAU;GACxB,MAAMA,aAAW,MAAM,SAAS,YAAY;IAC1C;IACA;IACD,CAAC;AAEF,OAAI,CAACA,WACH,OAAM,IAAI,MAAM,0BAA0B;GAG5C,MAAM,EAAE,4BAAa,2BAAcA;AAEnC,gBAAa;IACX,aAAa,eAAeC,cAAY;IACxC;IACD;AAED;;EAKF,MAAM,WAAW,MAFL,oBAAoB,QAAW,cAAc,CAE9B,GAAG,YAAY;GACxC;GACA;GACD,CAAC;AAEF,MAAI,CAAC,SAAS,KACZ,OAAM,IAAI,MAAM,0BAA0B;EAG5C,MAAM,EAAE,aAAa,cAAc,SAAS;AAE5C,eAAa;GACX,aAAa,eAAe,YAAY;GACxC;GACD;GACD,EAAE;AAEJ,QAAO;;AAGT,MAAM,kBAAkB,YAAoB;AAC1C,QAAO,QACJ,WAAW,qBAAqB,GAAG,CACnC,WAAW,oBAAoB,GAAG,CAClC,WAAW,mBAAmB,GAAG,CACjC,WAAW,kBAAkB,GAAG,CAChC,WAAW,qBAAqB,GAAG,CACnC,WAAW,iBAAiB,GAAG,CAC/B,WAAW,gBAAgB,GAAG,CAC9B,WAAW,eAAe,GAAG,CAC7B,WAAW,kBAAkB,GAAG,CAChC,WAAW,iBAAiB,GAAG,CAC/B,WAAW,gBAAgB,GAAG,CAC9B,WAAW,eAAe,GAAG,CAC7B,WAAW,eAAe,GAAG,CAC7B,WAAW,cAAc,GAAG,CAC5B,WAAW,aAAa,GAAG,CAC3B,WAAW,YAAY,GAAG"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"getIsFileUpdatedRecently.mjs","names":[
|
|
1
|
+
{"version":3,"file":"getIsFileUpdatedRecently.mjs","names":[],"sources":["../../../src/utils/getIsFileUpdatedRecently.ts"],"sourcesContent":["import { statSync } from 'node:fs';\n\nconst SKIP_RANGE_OF_LAST_UPDATE_TIME: number = 0; //2 * 60 * 60 * 1000; // 2 hours\n\n/**\n * Check if file was updated recently, to skip re-translation\n */\nexport const getIsFileUpdatedRecently = (localeFilePath: string): boolean => {\n const stats = statSync(localeFilePath);\n const lastModified = new Date(stats.mtime);\n const threshold = new Date(Date.now() - SKIP_RANGE_OF_LAST_UPDATE_TIME);\n\n return lastModified > threshold;\n};\n"],"mappings":";;;AAEA,MAAM,iCAAyC;;;;AAK/C,MAAa,4BAA4B,mBAAoC;CAC3E,MAAM,QAAQ,SAAS,eAAe;AAItC,QAHqB,IAAI,KAAK,MAAM,MAAM,GACxB,IAAI,KAAK,KAAK,KAAK,GAAG,+BAA+B"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"listSpecialChars.mjs","names":[
|
|
1
|
+
{"version":3,"file":"listSpecialChars.mjs","names":[],"sources":["../../../src/utils/listSpecialChars.ts"],"sourcesContent":["type ListCharResult = {\n char: string;\n /** First line index contained in this chunk (0-based) */\n lineStart: number;\n /** Start character index in the original text (0-based, inclusive)*/\n charStart: number;\n}[];\n\nconst SPECIAL_CHARS = [\n ' ',\n '\\\\',\n '|',\n '(',\n ')',\n '{',\n '}',\n '[',\n ']',\n '<',\n '>',\n '\"',\n '=',\n '+',\n '*',\n '&',\n '#',\n '%',\n '$',\n '!',\n '?',\n ':',\n ';',\n '~',\n];\n\nexport const listSpecialChars = (text: string): ListCharResult => {\n const results: ListCharResult = [];\n\n let lineIndex = 0;\n\n for (let i = 0; i < text.length; i++) {\n const currentChar = text[i];\n\n // Handle newline characters (\"\\n\"): treat them as a \"\\\\\" special char\n if (currentChar === '\\n') {\n results.push({\n char: '\\\\',\n lineStart: lineIndex,\n charStart: i,\n });\n\n // Move to the next line after recording the special char\n lineIndex++;\n continue;\n }\n\n // Check if the current character is one of the special characters\n if (SPECIAL_CHARS.includes(currentChar)) {\n results.push({\n char: currentChar,\n lineStart: lineIndex,\n charStart: i,\n });\n }\n }\n\n return results;\n};\n"],"mappings":";AAQA,MAAM,gBAAgB;CACpB;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAED,MAAa,oBAAoB,SAAiC;CAChE,MAAM,UAA0B,EAAE;CAElC,IAAI,YAAY;AAEhB,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;EACpC,MAAM,cAAc,KAAK;AAGzB,MAAI,gBAAgB,MAAM;AACxB,WAAQ,KAAK;IACX,MAAM;IACN,WAAW;IACX,WAAW;IACZ,CAAC;AAGF;AACA;;AAIF,MAAI,cAAc,SAAS,YAAY,CACrC,SAAQ,KAAK;GACX,MAAM;GACN,WAAW;GACX,WAAW;GACZ,CAAC;;AAIN,QAAO"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"mapChunksBetweenFiles.mjs","names":["
|
|
1
|
+
{"version":3,"file":"mapChunksBetweenFiles.mjs","names":["i","j"],"sources":["../../../src/utils/mapChunksBetweenFiles.ts"],"sourcesContent":["import { splitTextByLines } from '@intlayer/chokidar';\nimport { type ChunkLineResult, chunkText } from './calculateChunks';\n\nexport interface ChunkMapping {\n baseChunk: ChunkLineResult;\n updatedChunk: ChunkLineResult | null; // null if the chunk was deleted\n hasChanges: boolean;\n}\n\n/**\n * Maps chunks from base file to corresponding chunks in updated file,\n * handling insertions, deletions, and modifications properly.\n */\nexport const mapChunksBetweenFiles = (\n baseFileContent: string,\n updatedFileContent: string,\n maxCharsPerChunk: number = 800,\n changedLines?: number[]\n): ChunkMapping[] => {\n const baseChunks = chunkText(baseFileContent, maxCharsPerChunk, 0);\n const baseLines = splitTextByLines(baseFileContent);\n const updatedLines = splitTextByLines(updatedFileContent);\n\n // Create a simple line mapping using LCS (Longest Common Subsequence) approach\n const lineMapping = createLineMapping(baseLines, updatedLines);\n\n return baseChunks.map((baseChunk): ChunkMapping => {\n // Map the base chunk's line range to the updated file\n const mappedRange = mapLineRange(\n baseChunk.lineStart,\n baseChunk.lineLength,\n lineMapping\n );\n\n if (!mappedRange) {\n // This chunk was completely deleted\n return {\n baseChunk,\n updatedChunk: null,\n hasChanges: true,\n };\n }\n\n // Create the corresponding chunk in the updated file\n const updatedChunk: ChunkLineResult = {\n lineStart: mappedRange.start,\n lineLength: mappedRange.length,\n charStart: 0, // Will be calculated when needed\n charLength: 0, // Will be calculated when needed\n content: extractLinesFromRange(\n updatedLines,\n mappedRange.start,\n mappedRange.length\n ),\n };\n\n // Calculate character positions\n updatedChunk.charStart = getCharStartForLine(\n updatedFileContent,\n updatedChunk.lineStart\n );\n updatedChunk.charLength = updatedChunk.content.length;\n\n // Determine if this chunk has changes\n const hasChanges = determineIfChunkHasChanges(\n baseChunk,\n updatedChunk,\n changedLines\n );\n\n return {\n baseChunk,\n updatedChunk,\n hasChanges,\n };\n });\n};\n\n/**\n * Creates a mapping between line numbers in base file and updated file\n * Returns a map where key = base line number, value = updated line number (or null if deleted)\n */\nconst createLineMapping = (\n baseLines: string[],\n updatedLines: string[]\n): Map<number, number | null> => {\n const mapping = new Map<number, number | null>();\n\n // Use a simple diff algorithm (similar to Myers algorithm but simplified)\n const dp: number[][] = Array(baseLines.length + 1)\n .fill(null)\n .map(() => Array(updatedLines.length + 1).fill(0));\n\n // Fill the DP table\n for (let i = 1; i <= baseLines.length; i++) {\n for (let j = 1; j <= updatedLines.length; j++) {\n if (baseLines[i - 1] === updatedLines[j - 1]) {\n dp[i][j] = dp[i - 1][j - 1] + 1;\n } else {\n dp[i][j] = Math.max(dp[i - 1][j], dp[i][j - 1]);\n }\n }\n }\n\n // Backtrack to create the mapping\n let i = baseLines.length;\n let j = updatedLines.length;\n\n while (i > 0 || j > 0) {\n if (i > 0 && j > 0 && baseLines[i - 1] === updatedLines[j - 1]) {\n // Lines match\n mapping.set(i - 1, j - 1);\n i--;\n j--;\n } else if (i > 0 && (j === 0 || dp[i - 1][j] >= dp[i][j - 1])) {\n // Line was deleted from base\n mapping.set(i - 1, null);\n i--;\n } else {\n // Line was added to updated (no mapping needed for base)\n j--;\n }\n }\n\n return mapping;\n};\n\n/**\n * Maps a line range from base file to updated file using the line mapping\n */\nconst mapLineRange = (\n baseStart: number,\n baseLength: number,\n lineMapping: Map<number, number | null>\n): { start: number; length: number } | null => {\n const mappedLines: number[] = [];\n\n for (let i = baseStart; i < baseStart + baseLength; i++) {\n const mappedLine = lineMapping.get(i);\n if (mappedLine !== null && mappedLine !== undefined) {\n mappedLines.push(mappedLine);\n }\n }\n\n if (mappedLines.length === 0) {\n return null; // All lines were deleted\n }\n\n // Find the continuous range in the mapped lines\n mappedLines.sort((a, b) => a - b);\n const start = mappedLines[0];\n const end = mappedLines[mappedLines.length - 1];\n\n return {\n start,\n length: end - start + 1,\n };\n};\n\n/**\n * Extracts lines from a range in the lines array\n */\nconst extractLinesFromRange = (\n lines: string[],\n start: number,\n length: number\n): string => {\n const endIndex = Math.min(start + length, lines.length);\n return lines.slice(start, endIndex).join('');\n};\n\n/**\n * Gets the character position where a line starts in the text\n */\nconst getCharStartForLine = (text: string, lineNumber: number): number => {\n const lines = splitTextByLines(text);\n let charStart = 0;\n\n for (let i = 0; i < Math.min(lineNumber, lines.length); i++) {\n charStart += lines[i].length;\n }\n\n return charStart;\n};\n\n/**\n * Determines if a chunk has changes based on git changed lines or content comparison\n */\nconst determineIfChunkHasChanges = (\n baseChunk: ChunkLineResult,\n updatedChunk: ChunkLineResult,\n changedLines?: number[]\n): boolean => {\n // If we have git changed lines, use them for precise detection\n if (changedLines && changedLines.length > 0) {\n return changedLines.some(\n (line) =>\n line >= updatedChunk.lineStart &&\n line < updatedChunk.lineStart + updatedChunk.lineLength\n );\n }\n\n // Fallback to content comparison\n return baseChunk.content !== updatedChunk.content;\n};\n"],"mappings":";;;;;;;;AAaA,MAAa,yBACX,iBACA,oBACA,mBAA2B,KAC3B,iBACmB;CACnB,MAAM,aAAa,UAAU,iBAAiB,kBAAkB,EAAE;CAClE,MAAM,YAAY,iBAAiB,gBAAgB;CACnD,MAAM,eAAe,iBAAiB,mBAAmB;CAGzD,MAAM,cAAc,kBAAkB,WAAW,aAAa;AAE9D,QAAO,WAAW,KAAK,cAA4B;EAEjD,MAAM,cAAc,aAClB,UAAU,WACV,UAAU,YACV,YACD;AAED,MAAI,CAAC,YAEH,QAAO;GACL;GACA,cAAc;GACd,YAAY;GACb;EAIH,MAAM,eAAgC;GACpC,WAAW,YAAY;GACvB,YAAY,YAAY;GACxB,WAAW;GACX,YAAY;GACZ,SAAS,sBACP,cACA,YAAY,OACZ,YAAY,OACb;GACF;AAGD,eAAa,YAAY,oBACvB,oBACA,aAAa,UACd;AACD,eAAa,aAAa,aAAa,QAAQ;AAS/C,SAAO;GACL;GACA;GACA,YATiB,2BACjB,WACA,cACA,aACD;GAMA;GACD;;;;;;AAOJ,MAAM,qBACJ,WACA,iBAC+B;CAC/B,MAAM,0BAAU,IAAI,KAA4B;CAGhD,MAAM,KAAiB,MAAM,UAAU,SAAS,EAAE,CAC/C,KAAK,KAAK,CACV,UAAU,MAAM,aAAa,SAAS,EAAE,CAAC,KAAK,EAAE,CAAC;AAGpD,MAAK,IAAIA,MAAI,GAAGA,OAAK,UAAU,QAAQ,MACrC,MAAK,IAAIC,MAAI,GAAGA,OAAK,aAAa,QAAQ,MACxC,KAAI,UAAUD,MAAI,OAAO,aAAaC,MAAI,GACxC,IAAGD,KAAGC,OAAK,GAAGD,MAAI,GAAGC,MAAI,KAAK;KAE9B,IAAGD,KAAGC,OAAK,KAAK,IAAI,GAAGD,MAAI,GAAGC,MAAI,GAAGD,KAAGC,MAAI,GAAG;CAMrD,IAAI,IAAI,UAAU;CAClB,IAAI,IAAI,aAAa;AAErB,QAAO,IAAI,KAAK,IAAI,EAClB,KAAI,IAAI,KAAK,IAAI,KAAK,UAAU,IAAI,OAAO,aAAa,IAAI,IAAI;AAE9D,UAAQ,IAAI,IAAI,GAAG,IAAI,EAAE;AACzB;AACA;YACS,IAAI,MAAM,MAAM,KAAK,GAAG,IAAI,GAAG,MAAM,GAAG,GAAG,IAAI,KAAK;AAE7D,UAAQ,IAAI,IAAI,GAAG,KAAK;AACxB;OAGA;AAIJ,QAAO;;;;;AAMT,MAAM,gBACJ,WACA,YACA,gBAC6C;CAC7C,MAAM,cAAwB,EAAE;AAEhC,MAAK,IAAI,IAAI,WAAW,IAAI,YAAY,YAAY,KAAK;EACvD,MAAM,aAAa,YAAY,IAAI,EAAE;AACrC,MAAI,eAAe,QAAQ,eAAe,OACxC,aAAY,KAAK,WAAW;;AAIhC,KAAI,YAAY,WAAW,EACzB,QAAO;AAIT,aAAY,MAAM,GAAG,MAAM,IAAI,EAAE;CACjC,MAAM,QAAQ,YAAY;AAG1B,QAAO;EACL;EACA,QAJU,YAAY,YAAY,SAAS,KAI7B,QAAQ;EACvB;;;;;AAMH,MAAM,yBACJ,OACA,OACA,WACW;CACX,MAAM,WAAW,KAAK,IAAI,QAAQ,QAAQ,MAAM,OAAO;AACvD,QAAO,MAAM,MAAM,OAAO,SAAS,CAAC,KAAK,GAAG;;;;;AAM9C,MAAM,uBAAuB,MAAc,eAA+B;CACxE,MAAM,QAAQ,iBAAiB,KAAK;CACpC,IAAI,YAAY;AAEhB,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,IAAI,YAAY,MAAM,OAAO,EAAE,IACtD,cAAa,MAAM,GAAG;AAGxB,QAAO;;;;;AAMT,MAAM,8BACJ,WACA,cACA,iBACY;AAEZ,KAAI,gBAAgB,aAAa,SAAS,EACxC,QAAO,aAAa,MACjB,SACC,QAAQ,aAAa,aACrB,OAAO,aAAa,YAAY,aAAa,WAChD;AAIH,QAAO,UAAU,YAAY,aAAa"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"reorderParagraphs.mjs","names":[
|
|
1
|
+
{"version":3,"file":"reorderParagraphs.mjs","names":[],"sources":["../../../src/utils/reorderParagraphs.ts"],"sourcesContent":["import { listSpecialChars } from './listSpecialChars';\n\n/**\n * Split a text into paragraphs.\n *\n * We consider a paragraph boundary to be a block of at least two consecutive\n * new-lines that is immediately followed by a non-white-space character. This\n * way, internal blank lines that are part of the same paragraph (e.g. a list\n * item that purposely contains a visual break) are preserved while true\n * paragraph breaks – the ones generated when calling `arr.join(\"\\n\\n\")` in\n * the tests – are still detected.\n */\nconst splitByParagraph = (text: string): string[] => {\n const paragraphs: string[] = [];\n\n // Capture the delimiter so that we can inspect how many new-lines it\n // contains. We know that the test strings only use LF, so we keep the\n // regex simple here.\n const tokens = text.split(/(\\n{2,})/);\n\n for (let i = 0; i < tokens.length; i++) {\n const token = tokens[i];\n\n // Even-indexed tokens are the actual paragraph contents.\n if (i % 2 === 0) {\n if (token) paragraphs.push(token);\n continue;\n }\n\n // Odd-indexed tokens are the delimiters (>= two consecutive new-lines).\n // The first and last pairs represent the natural separators that are\n // added when paragraphs are later joined with \"\\n\\n\". Any additional\n // pairs in between correspond to *explicit* blank paragraphs that were\n // present in the original text and must therefore be preserved.\n const pairsOfNewlines = Math.floor(token.length / 2);\n const blankParagraphs = Math.max(0, pairsOfNewlines - 2);\n\n for (let j = 0; j < blankParagraphs; j++) {\n paragraphs.push('\\n\\n');\n }\n }\n\n return paragraphs;\n};\n\n/**\n * Determine whether two paragraphs match – either exactly, or by sharing the\n * same \"special-character signature\".\n */\nconst paragraphMatches = (\n paragraph: string,\n baseParagraph: string,\n paragraphSignature: ReturnType<typeof listSpecialChars>,\n baseSignature: ReturnType<typeof listSpecialChars>\n): boolean => {\n if (paragraph === baseParagraph) return true;\n // fallback to special-character signature comparison\n if (paragraphSignature.length !== baseSignature.length) return false;\n\n for (let i = 0; i < paragraphSignature.length; i++) {\n if (paragraphSignature[i].char !== baseSignature[i].char) {\n return false;\n }\n }\n return true;\n};\n\n/**\n * Re-order `textToReorder` so that its paragraphs follow the ordering found in\n * `baseFileContent`, while preserving any extra paragraphs (those not present\n * in the base file) in a position that is intuitive for a human reader: right\n * after the closest preceding paragraph coming from the base file.\n */\nexport const reorderParagraphs = (\n textToReorder: string,\n baseFileContent: string\n): string => {\n // 1. Split both texts into paragraphs and pre-compute their signatures.\n const baseFileParagraphs = splitByParagraph(baseFileContent);\n const textToReorderParagraphs = splitByParagraph(textToReorder);\n\n const baseSignatures = baseFileParagraphs.map((p) => listSpecialChars(p));\n const textSignatures = textToReorderParagraphs.map((p) =>\n listSpecialChars(p)\n );\n\n // 2. For every paragraph in the text to reorder, find the *first* base\n // paragraph it matches. We only allow each base paragraph to be matched\n // once. Any further identical paragraphs will be treated as \"extra\" and\n // will be positioned later on, next to their closest neighbour.\n const firstMatchIndexForBase: number[] = Array(\n baseFileParagraphs.length\n ).fill(-1);\n const paragraphMatchedBaseIdx: (number | null)[] = Array(\n textToReorderParagraphs.length\n ).fill(null);\n\n for (let i = 0; i < textToReorderParagraphs.length; i++) {\n const paragraph = textToReorderParagraphs[i];\n const sig = textSignatures[i];\n\n // exact match pass first for performance\n let foundIdx = baseFileParagraphs.findIndex(\n (baseParagraph, idx) =>\n firstMatchIndexForBase[idx] === -1 && paragraph === baseParagraph\n );\n\n if (foundIdx === -1) {\n // fallback to the signature comparison\n foundIdx = baseFileParagraphs.findIndex(\n (baseParagraph, idx) =>\n firstMatchIndexForBase[idx] === -1 &&\n paragraphMatches(paragraph, baseParagraph, sig, baseSignatures[idx])\n );\n }\n\n if (foundIdx !== -1) {\n firstMatchIndexForBase[foundIdx] = i;\n paragraphMatchedBaseIdx[i] = foundIdx;\n }\n }\n\n // 3. For the paragraphs that *didn't* get matched to a base paragraph, we\n // record the highest-numbered base paragraph that was matched *before* it\n // in the original text. The extra paragraph will later be placed right\n // after that paragraph in the final ordering.\n const insertAfterBaseIdx: number[] = Array(\n textToReorderParagraphs.length\n ).fill(-1);\n let maxBaseIdxEncountered = -1;\n\n for (let i = 0; i < textToReorderParagraphs.length; i++) {\n const matchedBase = paragraphMatchedBaseIdx[i];\n\n if (matchedBase !== null) {\n if (matchedBase > maxBaseIdxEncountered) {\n maxBaseIdxEncountered = matchedBase;\n }\n } else {\n insertAfterBaseIdx[i] = maxBaseIdxEncountered;\n }\n }\n\n // 4. Build the final, reordered list of paragraphs.\n const result: string[] = [];\n\n // Helper: quickly retrieve all indices of paragraphs that should be inserted\n // after a given base index, while keeping their original order.\n const extraParagraphsBuckets: Record<number, number[]> = {};\n insertAfterBaseIdx.forEach((afterIdx, paragraphIdx) => {\n if (afterIdx === -1) return; // will be handled later (if any)\n extraParagraphsBuckets[afterIdx] = extraParagraphsBuckets[afterIdx] || [];\n extraParagraphsBuckets[afterIdx].push(paragraphIdx);\n });\n\n for (let bIdx = 0; bIdx < baseFileParagraphs.length; bIdx++) {\n const matchedParagraphIdx = firstMatchIndexForBase[bIdx];\n\n if (matchedParagraphIdx !== -1) {\n result.push(textToReorderParagraphs[matchedParagraphIdx]);\n }\n\n if (extraParagraphsBuckets[bIdx]) {\n extraParagraphsBuckets[bIdx].forEach((pIdx) => {\n result.push(textToReorderParagraphs[pIdx]);\n });\n }\n }\n\n // Finally, if there were extra paragraphs appearing *before* any matched\n // base paragraph (insertAfterBaseIdx === -1), we prepend them to the output\n // in their original order.\n const leadingExtras: string[] = [];\n insertAfterBaseIdx.forEach((afterIdx, pIdx) => {\n if (afterIdx === -1 && paragraphMatchedBaseIdx[pIdx] === null) {\n leadingExtras.push(textToReorderParagraphs[pIdx]);\n }\n });\n\n return [...leadingExtras, ...result].join('\\n\\n');\n};\n"],"mappings":";;;;;;;;;;;;;AAYA,MAAM,oBAAoB,SAA2B;CACnD,MAAM,aAAuB,EAAE;CAK/B,MAAM,SAAS,KAAK,MAAM,WAAW;AAErC,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;EACtC,MAAM,QAAQ,OAAO;AAGrB,MAAI,IAAI,MAAM,GAAG;AACf,OAAI,MAAO,YAAW,KAAK,MAAM;AACjC;;EAQF,MAAM,kBAAkB,KAAK,MAAM,MAAM,SAAS,EAAE;EACpD,MAAM,kBAAkB,KAAK,IAAI,GAAG,kBAAkB,EAAE;AAExD,OAAK,IAAI,IAAI,GAAG,IAAI,iBAAiB,IACnC,YAAW,KAAK,OAAO;;AAI3B,QAAO;;;;;;AAOT,MAAM,oBACJ,WACA,eACA,oBACA,kBACY;AACZ,KAAI,cAAc,cAAe,QAAO;AAExC,KAAI,mBAAmB,WAAW,cAAc,OAAQ,QAAO;AAE/D,MAAK,IAAI,IAAI,GAAG,IAAI,mBAAmB,QAAQ,IAC7C,KAAI,mBAAmB,GAAG,SAAS,cAAc,GAAG,KAClD,QAAO;AAGX,QAAO;;;;;;;;AAST,MAAa,qBACX,eACA,oBACW;CAEX,MAAM,qBAAqB,iBAAiB,gBAAgB;CAC5D,MAAM,0BAA0B,iBAAiB,cAAc;CAE/D,MAAM,iBAAiB,mBAAmB,KAAK,MAAM,iBAAiB,EAAE,CAAC;CACzE,MAAM,iBAAiB,wBAAwB,KAAK,MAClD,iBAAiB,EAAE,CACpB;CAMD,MAAM,yBAAmC,MACvC,mBAAmB,OACpB,CAAC,KAAK,GAAG;CACV,MAAM,0BAA6C,MACjD,wBAAwB,OACzB,CAAC,KAAK,KAAK;AAEZ,MAAK,IAAI,IAAI,GAAG,IAAI,wBAAwB,QAAQ,KAAK;EACvD,MAAM,YAAY,wBAAwB;EAC1C,MAAM,MAAM,eAAe;EAG3B,IAAI,WAAW,mBAAmB,WAC/B,eAAe,QACd,uBAAuB,SAAS,MAAM,cAAc,cACvD;AAED,MAAI,aAAa,GAEf,YAAW,mBAAmB,WAC3B,eAAe,QACd,uBAAuB,SAAS,MAChC,iBAAiB,WAAW,eAAe,KAAK,eAAe,KAAK,CACvE;AAGH,MAAI,aAAa,IAAI;AACnB,0BAAuB,YAAY;AACnC,2BAAwB,KAAK;;;CAQjC,MAAM,qBAA+B,MACnC,wBAAwB,OACzB,CAAC,KAAK,GAAG;CACV,IAAI,wBAAwB;AAE5B,MAAK,IAAI,IAAI,GAAG,IAAI,wBAAwB,QAAQ,KAAK;EACvD,MAAM,cAAc,wBAAwB;AAE5C,MAAI,gBAAgB,MAClB;OAAI,cAAc,sBAChB,yBAAwB;QAG1B,oBAAmB,KAAK;;CAK5B,MAAM,SAAmB,EAAE;CAI3B,MAAM,yBAAmD,EAAE;AAC3D,oBAAmB,SAAS,UAAU,iBAAiB;AACrD,MAAI,aAAa,GAAI;AACrB,yBAAuB,YAAY,uBAAuB,aAAa,EAAE;AACzE,yBAAuB,UAAU,KAAK,aAAa;GACnD;AAEF,MAAK,IAAI,OAAO,GAAG,OAAO,mBAAmB,QAAQ,QAAQ;EAC3D,MAAM,sBAAsB,uBAAuB;AAEnD,MAAI,wBAAwB,GAC1B,QAAO,KAAK,wBAAwB,qBAAqB;AAG3D,MAAI,uBAAuB,MACzB,wBAAuB,MAAM,SAAS,SAAS;AAC7C,UAAO,KAAK,wBAAwB,MAAM;IAC1C;;CAON,MAAM,gBAA0B,EAAE;AAClC,oBAAmB,SAAS,UAAU,SAAS;AAC7C,MAAI,aAAa,MAAM,wBAAwB,UAAU,KACvD,eAAc,KAAK,wBAAwB,MAAM;GAEnD;AAEF,QAAO,CAAC,GAAG,eAAe,GAAG,OAAO,CAAC,KAAK,OAAO"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"setupAI.mjs","names":["
|
|
1
|
+
{"version":3,"file":"setupAI.mjs","names":["hasAIAccess"],"sources":["../../../src/utils/setupAI.ts"],"sourcesContent":["import type { AIConfig, AIOptions } from '@intlayer/ai';\nimport {\n ANSIColors,\n colorize,\n getAppLogger,\n type logger,\n} from '@intlayer/config';\nimport type { IntlayerConfig } from '@intlayer/types';\nimport { checkAIAccess } from './checkAccess';\n\nexport type AIClient = typeof import('@intlayer/ai');\n\ntype SetupAIResult = {\n aiClient?: AIClient;\n aiConfig?: AIConfig;\n isCustomAI: boolean;\n hasAIAccess: boolean;\n};\n\n// Disable warnings from the AI SDK\nglobalThis.AI_SDK_LOG_WARNINGS = false;\n\nconst logAIConfig = (aiOptions: AIOptions, appLogger: typeof logger) => {\n appLogger([\n colorize('Provider:', ANSIColors.GREY_DARK),\n colorize(aiOptions?.provider ?? '(default)', ANSIColors.BLUE),\n colorize('- Model:', ANSIColors.GREY_DARK),\n colorize(aiOptions?.model ?? '(default)', ANSIColors.BLUE),\n colorize('- API Key:', ANSIColors.GREY_DARK),\n colorize(aiOptions?.apiKey ? '✓' : '(not set)', ANSIColors.BLUE),\n ]);\n};\n\n/**\n * Checks if the @intlayer/ai package is available and configured when an API key is provided.\n * If API key is present but package is missing, logs a warning.\n * Also checks if the user has access to AI (either via local key or CMS auth).\n */\nexport const setupAI = async (\n configuration: IntlayerConfig,\n aiOptions?: AIOptions\n): Promise<SetupAIResult | undefined> => {\n const appLogger = getAppLogger(configuration);\n\n const isLocalAI =\n aiOptions?.apiKey ||\n aiOptions?.provider === 'ollama' ||\n configuration.ai?.apiKey ||\n configuration.ai?.provider === 'ollama';\n\n if (isLocalAI) {\n // Try to import the AI package for local AI usage\n let aiClient: AIClient | undefined;\n\n try {\n aiClient = await import('@intlayer/ai');\n } catch {\n // Package not installed - log warning and fall back to backend\n appLogger(\n [\n colorize('Using your API key, you can install the', ANSIColors.GREY),\n colorize('@intlayer/ai', ANSIColors.GREY_LIGHT),\n colorize(\n 'package to run the process locally, with no dependency on the Intlayer server',\n ANSIColors.GREY\n ),\n ],\n {\n level: 'warn',\n }\n );\n\n // Fall back to backend API check\n const hasAIAccess = await checkAIAccess(configuration, aiOptions);\n logAIConfig(aiOptions ?? {}, appLogger);\n return {\n isCustomAI: false,\n hasAIAccess,\n };\n }\n\n // Package found - now configure it (errors here should propagate, not fall back)\n appLogger([\n colorize('@intlayer/ai', ANSIColors.GREY_LIGHT),\n colorize('found - Run process locally', ANSIColors.GREY_DARK),\n ]);\n\n const aiConfig = await aiClient.getAIConfig({\n userOptions: aiOptions,\n accessType: ['public'],\n });\n\n logAIConfig(aiOptions ?? {}, appLogger);\n\n return {\n aiClient,\n aiConfig,\n isCustomAI: true,\n hasAIAccess: true, // Local AI always has access\n };\n }\n\n // No local AI configured - use backend API\n const hasAIAccess = await checkAIAccess(configuration, aiOptions);\n logAIConfig(aiOptions ?? {}, appLogger);\n\n return {\n isCustomAI: false,\n hasAIAccess,\n };\n};\n"],"mappings":";;;;AAoBA,WAAW,sBAAsB;AAEjC,MAAM,eAAe,WAAsB,cAA6B;AACtE,WAAU;EACR,SAAS,aAAa,WAAW,UAAU;EAC3C,SAAS,WAAW,YAAY,aAAa,WAAW,KAAK;EAC7D,SAAS,YAAY,WAAW,UAAU;EAC1C,SAAS,WAAW,SAAS,aAAa,WAAW,KAAK;EAC1D,SAAS,cAAc,WAAW,UAAU;EAC5C,SAAS,WAAW,SAAS,MAAM,aAAa,WAAW,KAAK;EACjE,CAAC;;;;;;;AAQJ,MAAa,UAAU,OACrB,eACA,cACuC;CACvC,MAAM,YAAY,aAAa,cAAc;AAQ7C,KALE,WAAW,UACX,WAAW,aAAa,YACxB,cAAc,IAAI,UAClB,cAAc,IAAI,aAAa,UAElB;EAEb,IAAI;AAEJ,MAAI;AACF,cAAW,MAAM,OAAO;UAClB;AAEN,aACE;IACE,SAAS,2CAA2C,WAAW,KAAK;IACpE,SAAS,gBAAgB,WAAW,WAAW;IAC/C,SACE,iFACA,WAAW,KACZ;IACF,EACD,EACE,OAAO,QACR,CACF;GAGD,MAAMA,gBAAc,MAAM,cAAc,eAAe,UAAU;AACjE,eAAY,aAAa,EAAE,EAAE,UAAU;AACvC,UAAO;IACL,YAAY;IACZ;IACD;;AAIH,YAAU,CACR,SAAS,gBAAgB,WAAW,WAAW,EAC/C,SAAS,+BAA+B,WAAW,UAAU,CAC9D,CAAC;EAEF,MAAM,WAAW,MAAM,SAAS,YAAY;GAC1C,aAAa;GACb,YAAY,CAAC,SAAS;GACvB,CAAC;AAEF,cAAY,aAAa,EAAE,EAAE,UAAU;AAEvC,SAAO;GACL;GACA;GACA,YAAY;GACZ,aAAa;GACd;;CAIH,MAAM,cAAc,MAAM,cAAc,eAAe,UAAU;AACjE,aAAY,aAAa,EAAE,EAAE,UAAU;AAEvC,QAAO;EACL,YAAY;EACZ;EACD"}
|
package/dist/esm/watch.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"watch.mjs","names":[
|
|
1
|
+
{"version":3,"file":"watch.mjs","names":[],"sources":["../../src/watch.ts"],"sourcesContent":["import { runParallel, watch } from '@intlayer/chokidar';\nimport {\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\n\ntype WatchOptions = {\n skipPrepare?: boolean;\n with?: string | string[];\n configOptions?: GetConfigurationOptions;\n};\n\n/**\n * Get locales dictionaries .content.{json|ts|tsx|js|jsx|mjs|cjs} and build the JSON dictionaries in the .intlayer directory.\n * Watch mode available to get the change in the .content.{json|ts|tsx|js|jsx|mjs|cjs}\n */\nexport const watchContentDeclaration = async (options?: WatchOptions) => {\n const config = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(config);\n\n // Store references to the child process\n let parallelProcess: ReturnType<typeof runParallel> | undefined;\n\n if (options?.with) {\n parallelProcess = runParallel(options.with);\n // Handle the promise to avoid unhandled rejection\n parallelProcess.result.catch(() => {\n // Parallel process failed or was terminated\n process.exit(1);\n });\n }\n\n appLogger('Watching Intlayer content declarations');\n\n // Capture the watcher instance\n const watcher = watch({\n persistent: true,\n skipPrepare: options?.skipPrepare ?? false,\n });\n\n // Define a Graceful Shutdown function\n const handleShutdown = async () => {\n // Prevent multiple calls\n process.off('SIGINT', handleShutdown);\n process.off('SIGTERM', handleShutdown);\n\n appLogger('Stopping Intlayer watcher...');\n\n try {\n // Close the file watcher immediately to stop \"esbuild service not running\" errors\n await watcher.close();\n\n // If runParallel exposes the child process, we can try to kill it explicitly.\n // Even if it doesn't, process.exit() below usually cleans up attached children.\n if (parallelProcess && 'child' in parallelProcess) {\n // @ts-ignore - Assuming child exists on the return type if runParallel is based on spawn/exec\n parallelProcess.child?.kill('SIGTERM');\n }\n } catch (error) {\n console.error('Error during shutdown:', error);\n } finally {\n process.exit(0);\n }\n };\n\n // Attach Signal Listeners\n process.on('SIGINT', handleShutdown);\n process.on('SIGTERM', handleShutdown);\n};\n"],"mappings":";;;;;;;;AAiBA,MAAa,0BAA0B,OAAO,YAA2B;CAEvE,MAAM,YAAY,aADH,iBAAiB,SAAS,cAAc,CACjB;CAGtC,IAAI;AAEJ,KAAI,SAAS,MAAM;AACjB,oBAAkB,YAAY,QAAQ,KAAK;AAE3C,kBAAgB,OAAO,YAAY;AAEjC,WAAQ,KAAK,EAAE;IACf;;AAGJ,WAAU,yCAAyC;CAGnD,MAAM,UAAU,MAAM;EACpB,YAAY;EACZ,aAAa,SAAS,eAAe;EACtC,CAAC;CAGF,MAAM,iBAAiB,YAAY;AAEjC,UAAQ,IAAI,UAAU,eAAe;AACrC,UAAQ,IAAI,WAAW,eAAe;AAEtC,YAAU,+BAA+B;AAEzC,MAAI;AAEF,SAAM,QAAQ,OAAO;AAIrB,OAAI,mBAAmB,WAAW,gBAEhC,iBAAgB,OAAO,KAAK,UAAU;WAEjC,OAAO;AACd,WAAQ,MAAM,0BAA0B,MAAM;YACtC;AACR,WAAQ,KAAK,EAAE;;;AAKnB,SAAQ,GAAG,UAAU,eAAe;AACpC,SAAQ,GAAG,WAAW,eAAe"}
|
package/dist/types/pull.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"pull.d.ts","names":[],"sources":["../../src/pull.ts"],"sourcesContent":[],"mappings":";;;KAmBK,WAAA;;EAAA,mBAAW,CAAA,EAAA,MAGE;EAcL,aA8OZ,CAAA,EA5PiB,
|
|
1
|
+
{"version":3,"file":"pull.d.ts","names":[],"sources":["../../src/pull.ts"],"sourcesContent":[],"mappings":";;;KAmBK,WAAA;;EAAA,mBAAW,CAAA,EAAA,MAGE;EAcL,aA8OZ,CAAA,EA5PiB,uBAciC;;;;;;cAAtC,iBAAwB,gBAAc"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"rebuildDocument.d.ts","names":[],"sources":["../../../src/translation-alignment/rebuildDocument.ts"],"sourcesContent":[],"mappings":";;;KAEY,eAAA;gBACI;EADJ,eAAA,EAAA,MAAe,GAAA,
|
|
1
|
+
{"version":3,"file":"rebuildDocument.d.ts","names":[],"sources":["../../../src/translation-alignment/rebuildDocument.ts"],"sourcesContent":[],"mappings":";;;KAEY,eAAA;gBACI;EADJ,eAAA,EAAA,MAAe,GAAA,IACX;EAKJ,WAAA,EAAA,MAAY;CACP;AACD,KAFJ,YAAA,GAEI;EACR,aAAA,EAFS,kBAET,EAAA;EAAa,YAAA,EADL,kBACK,EAAA;EAGT,IAAA,EAHJ,aAGiB;AAQzB,CAAA;AAAyC,KAR7B,aAAA,GAQ6B;EAAA,gBAAA,EAPrB,eAOqB,EAAA;CAAA;;;;AAiCzC;AACQ,cAlCK,wBAkCL,EAAA,CAAA;EAAA,aAAA;EAAA,YAAA;EAAA;AAAA,CAAA,EA9BL,YA8BK,EAAA,GA9BU,aA8BV;;;;cADK,8BACL,6BACQ,wCACI"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@intlayer/cli",
|
|
3
|
-
"version": "7.5.
|
|
3
|
+
"version": "7.5.14",
|
|
4
4
|
"private": false,
|
|
5
5
|
"description": "Provides uniform command-line interface scripts for Intlayer, used in packages like intlayer-cli and intlayer.",
|
|
6
6
|
"keywords": [
|
|
@@ -68,31 +68,31 @@
|
|
|
68
68
|
},
|
|
69
69
|
"dependencies": {
|
|
70
70
|
"@clack/prompts": "^0.11.0",
|
|
71
|
-
"@intlayer/ai": "7.5.
|
|
72
|
-
"@intlayer/api": "7.5.
|
|
73
|
-
"@intlayer/chokidar": "7.5.
|
|
74
|
-
"@intlayer/config": "7.5.
|
|
75
|
-
"@intlayer/core": "7.5.
|
|
76
|
-
"@intlayer/dictionaries-entry": "7.5.
|
|
77
|
-
"@intlayer/remote-dictionaries-entry": "7.5.
|
|
78
|
-
"@intlayer/types": "7.5.
|
|
79
|
-
"@intlayer/unmerged-dictionaries-entry": "7.5.
|
|
71
|
+
"@intlayer/ai": "7.5.14",
|
|
72
|
+
"@intlayer/api": "7.5.14",
|
|
73
|
+
"@intlayer/chokidar": "7.5.14",
|
|
74
|
+
"@intlayer/config": "7.5.14",
|
|
75
|
+
"@intlayer/core": "7.5.14",
|
|
76
|
+
"@intlayer/dictionaries-entry": "7.5.14",
|
|
77
|
+
"@intlayer/remote-dictionaries-entry": "7.5.14",
|
|
78
|
+
"@intlayer/types": "7.5.14",
|
|
79
|
+
"@intlayer/unmerged-dictionaries-entry": "7.5.14",
|
|
80
80
|
"commander": "14.0.1",
|
|
81
81
|
"eventsource": "3.0.7",
|
|
82
82
|
"fast-glob": "3.3.3"
|
|
83
83
|
},
|
|
84
84
|
"devDependencies": {
|
|
85
|
-
"@types/node": "25.0.
|
|
85
|
+
"@types/node": "25.0.6",
|
|
86
86
|
"@utils/ts-config": "1.0.4",
|
|
87
87
|
"@utils/ts-config-types": "1.0.4",
|
|
88
88
|
"@utils/tsdown-config": "1.0.4",
|
|
89
89
|
"rimraf": "6.1.2",
|
|
90
|
-
"tsdown": "0.
|
|
90
|
+
"tsdown": "0.19.0",
|
|
91
91
|
"typescript": "5.9.3",
|
|
92
|
-
"vitest": "4.0.
|
|
92
|
+
"vitest": "4.0.17"
|
|
93
93
|
},
|
|
94
94
|
"peerDependencies": {
|
|
95
|
-
"@intlayer/ai": "7.5.
|
|
95
|
+
"@intlayer/ai": "7.5.14"
|
|
96
96
|
},
|
|
97
97
|
"peerDependenciesMeta": {
|
|
98
98
|
"@intlayer/ai": {
|