@intlayer/cli 5.5.10 → 5.5.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/cli.cjs +78 -6
- package/dist/cjs/cli.cjs.map +1 -1
- package/dist/cjs/cli.test.cjs +435 -0
- package/dist/cjs/cli.test.cjs.map +1 -0
- package/dist/cjs/fill.cjs +8 -12
- package/dist/cjs/fill.cjs.map +1 -1
- package/dist/cjs/index.cjs +5 -1
- package/dist/cjs/index.cjs.map +1 -1
- package/dist/cjs/prompts/REVIEW_PROMPT.md +37 -0
- package/dist/cjs/prompts/TRANSLATE_PROMPT.md +38 -0
- package/dist/cjs/pull.cjs +10 -2
- package/dist/cjs/pull.cjs.map +1 -1
- package/dist/cjs/pushConfig.cjs +5 -1
- package/dist/cjs/pushConfig.cjs.map +1 -1
- package/dist/cjs/reviewDoc.cjs +203 -0
- package/dist/cjs/reviewDoc.cjs.map +1 -0
- package/dist/cjs/translateDoc.cjs +201 -0
- package/dist/cjs/translateDoc.cjs.map +1 -0
- package/dist/cjs/utils/calculateChunks.cjs +120 -0
- package/dist/cjs/utils/calculateChunks.cjs.map +1 -0
- package/dist/cjs/utils/calculateChunks.test.cjs +104 -0
- package/dist/cjs/utils/calculateChunks.test.cjs.map +1 -0
- package/dist/cjs/utils/calculrateChunkTest.md +9 -0
- package/dist/cjs/utils/checkAIAccess.cjs +40 -0
- package/dist/cjs/utils/checkAIAccess.cjs.map +1 -0
- package/dist/cjs/utils/checkFileModifiedRange.cjs +97 -0
- package/dist/cjs/utils/checkFileModifiedRange.cjs.map +1 -0
- package/dist/cjs/utils/checkFileModifiedRange.test.cjs +175 -0
- package/dist/cjs/utils/checkFileModifiedRange.test.cjs.map +1 -0
- package/dist/cjs/utils/checkLastUpdateTime.cjs +33 -0
- package/dist/cjs/utils/checkLastUpdateTime.cjs.map +1 -0
- package/dist/cjs/utils/chunkInference.cjs +58 -0
- package/dist/cjs/utils/chunkInference.cjs.map +1 -0
- package/dist/cjs/utils/fixChunkStartEndChars.cjs +47 -0
- package/dist/cjs/utils/fixChunkStartEndChars.cjs.map +1 -0
- package/dist/cjs/utils/fixChunkStartEndChars.test.cjs +81 -0
- package/dist/cjs/utils/fixChunkStartEndChars.test.cjs.map +1 -0
- package/dist/cjs/utils/formatTimeDiff.cjs +46 -0
- package/dist/cjs/utils/formatTimeDiff.cjs.map +1 -0
- package/dist/cjs/utils/formatTimeDiff.test.cjs +32 -0
- package/dist/cjs/utils/formatTimeDiff.test.cjs.map +1 -0
- package/dist/cjs/utils/getChunk.cjs +77 -0
- package/dist/cjs/utils/getChunk.cjs.map +1 -0
- package/dist/cjs/utils/getChunk.test.cjs +46 -0
- package/dist/cjs/utils/getChunk.test.cjs.map +1 -0
- package/dist/cjs/utils/getIsFileUpdatedRecently.cjs +36 -0
- package/dist/cjs/utils/getIsFileUpdatedRecently.cjs.map +1 -0
- package/dist/cjs/utils/getOutputFilePath.cjs +89 -0
- package/dist/cjs/utils/getOutputFilePath.cjs.map +1 -0
- package/dist/cjs/utils/getOutputFilePath.test.cjs +73 -0
- package/dist/cjs/utils/getOutputFilePath.test.cjs.map +1 -0
- package/dist/cjs/utils/getParentPackageJSON.cjs +47 -0
- package/dist/cjs/utils/getParentPackageJSON.cjs.map +1 -0
- package/dist/cjs/utils/listSpecialChars.cjs +78 -0
- package/dist/cjs/utils/listSpecialChars.cjs.map +1 -0
- package/dist/cjs/utils/listSpecialChars.test.cjs +58 -0
- package/dist/cjs/utils/listSpecialChars.test.cjs.map +1 -0
- package/dist/cjs/utils/reorderParagraphs.cjs +125 -0
- package/dist/cjs/utils/reorderParagraphs.cjs.map +1 -0
- package/dist/cjs/utils/reorderParagraphs.test.cjs +71 -0
- package/dist/cjs/utils/reorderParagraphs.test.cjs.map +1 -0
- package/dist/cjs/utils/splitTextByLine.cjs +35 -0
- package/dist/cjs/utils/splitTextByLine.cjs.map +1 -0
- package/dist/cjs/utils/splitTextByLine.test.cjs +14 -0
- package/dist/cjs/utils/splitTextByLine.test.cjs.map +1 -0
- package/dist/esm/cli.mjs +79 -7
- package/dist/esm/cli.mjs.map +1 -1
- package/dist/esm/cli.test.mjs +412 -0
- package/dist/esm/cli.test.mjs.map +1 -0
- package/dist/esm/fill.mjs +8 -12
- package/dist/esm/fill.mjs.map +1 -1
- package/dist/esm/index.mjs +2 -0
- package/dist/esm/index.mjs.map +1 -1
- package/dist/esm/prompts/REVIEW_PROMPT.md +37 -0
- package/dist/esm/prompts/TRANSLATE_PROMPT.md +38 -0
- package/dist/esm/pull.mjs +10 -2
- package/dist/esm/pull.mjs.map +1 -1
- package/dist/esm/pushConfig.mjs +5 -1
- package/dist/esm/pushConfig.mjs.map +1 -1
- package/dist/esm/reviewDoc.mjs +172 -0
- package/dist/esm/reviewDoc.mjs.map +1 -0
- package/dist/esm/translateDoc.mjs +170 -0
- package/dist/esm/translateDoc.mjs.map +1 -0
- package/dist/esm/utils/calculateChunks.mjs +96 -0
- package/dist/esm/utils/calculateChunks.mjs.map +1 -0
- package/dist/esm/utils/calculateChunks.test.mjs +103 -0
- package/dist/esm/utils/calculateChunks.test.mjs.map +1 -0
- package/dist/esm/utils/calculrateChunkTest.md +9 -0
- package/dist/esm/utils/checkAIAccess.mjs +16 -0
- package/dist/esm/utils/checkAIAccess.mjs.map +1 -0
- package/dist/esm/utils/checkFileModifiedRange.mjs +73 -0
- package/dist/esm/utils/checkFileModifiedRange.mjs.map +1 -0
- package/dist/esm/utils/checkFileModifiedRange.test.mjs +181 -0
- package/dist/esm/utils/checkFileModifiedRange.test.mjs.map +1 -0
- package/dist/esm/utils/checkLastUpdateTime.mjs +9 -0
- package/dist/esm/utils/checkLastUpdateTime.mjs.map +1 -0
- package/dist/esm/utils/chunkInference.mjs +34 -0
- package/dist/esm/utils/chunkInference.mjs.map +1 -0
- package/dist/esm/utils/fixChunkStartEndChars.mjs +23 -0
- package/dist/esm/utils/fixChunkStartEndChars.mjs.map +1 -0
- package/dist/esm/utils/fixChunkStartEndChars.test.mjs +80 -0
- package/dist/esm/utils/fixChunkStartEndChars.test.mjs.map +1 -0
- package/dist/esm/utils/formatTimeDiff.mjs +22 -0
- package/dist/esm/utils/formatTimeDiff.mjs.map +1 -0
- package/dist/esm/utils/formatTimeDiff.test.mjs +31 -0
- package/dist/esm/utils/formatTimeDiff.test.mjs.map +1 -0
- package/dist/esm/utils/getChunk.mjs +53 -0
- package/dist/esm/utils/getChunk.mjs.map +1 -0
- package/dist/esm/utils/getChunk.test.mjs +45 -0
- package/dist/esm/utils/getChunk.test.mjs.map +1 -0
- package/dist/esm/utils/getIsFileUpdatedRecently.mjs +12 -0
- package/dist/esm/utils/getIsFileUpdatedRecently.mjs.map +1 -0
- package/dist/esm/utils/getOutputFilePath.mjs +65 -0
- package/dist/esm/utils/getOutputFilePath.mjs.map +1 -0
- package/dist/esm/utils/getOutputFilePath.test.mjs +72 -0
- package/dist/esm/utils/getOutputFilePath.test.mjs.map +1 -0
- package/dist/esm/utils/getParentPackageJSON.mjs +23 -0
- package/dist/esm/utils/getParentPackageJSON.mjs.map +1 -0
- package/dist/esm/utils/listSpecialChars.mjs +54 -0
- package/dist/esm/utils/listSpecialChars.mjs.map +1 -0
- package/dist/esm/utils/listSpecialChars.test.mjs +57 -0
- package/dist/esm/utils/listSpecialChars.test.mjs.map +1 -0
- package/dist/esm/utils/reorderParagraphs.mjs +101 -0
- package/dist/esm/utils/reorderParagraphs.mjs.map +1 -0
- package/dist/esm/utils/reorderParagraphs.test.mjs +70 -0
- package/dist/esm/utils/reorderParagraphs.test.mjs.map +1 -0
- package/dist/esm/utils/splitTextByLine.mjs +11 -0
- package/dist/esm/utils/splitTextByLine.mjs.map +1 -0
- package/dist/esm/utils/splitTextByLine.test.mjs +13 -0
- package/dist/esm/utils/splitTextByLine.test.mjs.map +1 -0
- package/dist/types/cli.d.ts.map +1 -1
- package/dist/types/cli.test.d.ts +2 -0
- package/dist/types/cli.test.d.ts.map +1 -0
- package/dist/types/fill.d.ts.map +1 -1
- package/dist/types/index.d.ts +2 -0
- package/dist/types/index.d.ts.map +1 -1
- package/dist/types/pull.d.ts.map +1 -1
- package/dist/types/pushConfig.d.ts.map +1 -1
- package/dist/types/reviewDoc.d.ts +25 -0
- package/dist/types/reviewDoc.d.ts.map +1 -0
- package/dist/types/translateDoc.d.ts +25 -0
- package/dist/types/translateDoc.d.ts.map +1 -0
- package/dist/types/utils/calculateChunks.d.ts +9 -0
- package/dist/types/utils/calculateChunks.d.ts.map +1 -0
- package/dist/types/utils/calculateChunks.test.d.ts +2 -0
- package/dist/types/utils/calculateChunks.test.d.ts.map +1 -0
- package/dist/types/utils/checkAIAccess.d.ts +4 -0
- package/dist/types/utils/checkAIAccess.d.ts.map +1 -0
- package/dist/types/utils/checkFileModifiedRange.d.ts +11 -0
- package/dist/types/utils/checkFileModifiedRange.d.ts.map +1 -0
- package/dist/types/utils/checkFileModifiedRange.test.d.ts +2 -0
- package/dist/types/utils/checkFileModifiedRange.test.d.ts.map +1 -0
- package/dist/types/utils/checkLastUpdateTime.d.ts +9 -0
- package/dist/types/utils/checkLastUpdateTime.d.ts.map +1 -0
- package/dist/types/utils/chunkInference.d.ts +12 -0
- package/dist/types/utils/chunkInference.d.ts.map +1 -0
- package/dist/types/utils/fixChunkStartEndChars.d.ts +2 -0
- package/dist/types/utils/fixChunkStartEndChars.d.ts.map +1 -0
- package/dist/types/utils/fixChunkStartEndChars.test.d.ts +2 -0
- package/dist/types/utils/fixChunkStartEndChars.test.d.ts.map +1 -0
- package/dist/types/utils/formatTimeDiff.d.ts +2 -0
- package/dist/types/utils/formatTimeDiff.d.ts.map +1 -0
- package/dist/types/utils/formatTimeDiff.test.d.ts +2 -0
- package/dist/types/utils/formatTimeDiff.test.d.ts.map +1 -0
- package/dist/types/utils/getChunk.d.ts +9 -0
- package/dist/types/utils/getChunk.d.ts.map +1 -0
- package/dist/types/utils/getChunk.test.d.ts +2 -0
- package/dist/types/utils/getChunk.test.d.ts.map +1 -0
- package/dist/types/utils/getIsFileUpdatedRecently.d.ts +5 -0
- package/dist/types/utils/getIsFileUpdatedRecently.d.ts.map +1 -0
- package/dist/types/utils/getOutputFilePath.d.ts +26 -0
- package/dist/types/utils/getOutputFilePath.d.ts.map +1 -0
- package/dist/types/utils/getOutputFilePath.test.d.ts +2 -0
- package/dist/types/utils/getOutputFilePath.test.d.ts.map +1 -0
- package/dist/types/utils/getParentPackageJSON.d.ts +32 -0
- package/dist/types/utils/getParentPackageJSON.d.ts.map +1 -0
- package/dist/types/utils/listSpecialChars.d.ts +10 -0
- package/dist/types/utils/listSpecialChars.d.ts.map +1 -0
- package/dist/types/utils/listSpecialChars.test.d.ts +2 -0
- package/dist/types/utils/listSpecialChars.test.d.ts.map +1 -0
- package/dist/types/utils/reorderParagraphs.d.ts +8 -0
- package/dist/types/utils/reorderParagraphs.d.ts.map +1 -0
- package/dist/types/utils/reorderParagraphs.test.d.ts +2 -0
- package/dist/types/utils/reorderParagraphs.test.d.ts.map +1 -0
- package/dist/types/utils/splitTextByLine.d.ts +2 -0
- package/dist/types/utils/splitTextByLine.d.ts.map +1 -0
- package/dist/types/utils/splitTextByLine.test.d.ts +2 -0
- package/dist/types/utils/splitTextByLine.test.d.ts.map +1 -0
- package/package.json +16 -14
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/reviewDoc.ts"],"sourcesContent":["import { AIOptions, getAuthAPI } from '@intlayer/api'; // Importing only getAiAPI for now\nimport {\n getAppLogger,\n getConfiguration,\n GetConfigurationOptions,\n Locales,\n retryManager,\n} from '@intlayer/config';\nimport { getLocaleName } from '@intlayer/core';\nimport fg from 'fast-glob';\nimport { mkdirSync, writeFileSync } from 'fs';\nimport { readFile } from 'fs/promises';\nimport pLimit from 'p-limit';\nimport { dirname, join } from 'path';\nimport { fileURLToPath } from 'url';\nimport { chunkText } from './utils/calculateChunks';\nimport { checkAIAccess } from './utils/checkAIAccess';\nimport { checkFileModifiedRange } from './utils/checkFileModifiedRange';\nimport { chunkInference } from './utils/chunkInference';\nimport { fixChunkStartEndChars } from './utils/fixChunkStartEndChars';\nimport { getChunk } from './utils/getChunk';\nimport { getOutputFilePath } from './utils/getOutputFilePath';\n\nconst isESModule = typeof import.meta.url === 'string';\n\nconst dir = isESModule ? dirname(fileURLToPath(import.meta.url)) : __dirname;\n\n/**\n * Translate a single file for a given locale\n */\nexport const reviewFile = async (\n baseFilePath: string,\n outputFilePath: string,\n locale: Locales,\n baseLocale: Locales,\n aiOptions?: AIOptions,\n configOptions?: GetConfigurationOptions,\n oAuth2AccessToken?: string,\n customInstructions?: string\n) => {\n try {\n const configuration = getConfiguration(configOptions);\n const appLogger = getAppLogger(configuration);\n\n const basedFileContent = await readFile(baseFilePath, 'utf-8');\n const fileToReviewContent = await readFile(outputFilePath, 'utf-8');\n\n let updatedFileContent = fileToReviewContent;\n let fileResultContent = '';\n\n // Prepare the base prompt for ChatGPT\n const basePrompt = (\n await readFile(join(dir, './prompts/REVIEW_PROMPT.md'), 'utf-8')\n )\n .replaceAll(\n '{{localeName}}',\n `${getLocaleName(locale, Locales.ENGLISH)} (${locale})`\n )\n .replaceAll(\n '{{baseLocaleName}}',\n `${getLocaleName(baseLocale, Locales.ENGLISH)} (${baseLocale})`\n )\n .replace('{{applicationContext}}', aiOptions?.applicationContext ?? '-')\n .replace('{{customInstructions}}', customInstructions ?? '-');\n\n const baseChunks = chunkText(basedFileContent, 800, 0);\n\n appLogger(` Base file splitted into ${baseChunks.length} chunks`);\n\n for (let i = 0; i < baseChunks.length; i++) {\n const baseChunkContext = baseChunks[i];\n\n const getBaseChunkContextPrompt = () =>\n `**CHUNK ${i + 1} to ${Math.min(i + 3, baseChunks.length)} of ${baseChunks.length}** is the base chunk in ${getLocaleName(baseLocale, Locales.ENGLISH)} (${baseLocale}) as reference.\\n` +\n `///chunksStart///` +\n (baseChunks[i - 1]?.content ?? '') +\n baseChunkContext.content +\n (baseChunks[i + 1]?.content ?? '') +\n `///chunksEnd///`;\n\n const getChunkToReviewPrompt = () =>\n `**CHUNK ${i + 1} to ${Math.min(i + 3, baseChunks.length)} of ${baseChunks.length}** is the current chunk to review in ${getLocaleName(locale, Locales.ENGLISH)} (${locale}) as reference.\\n` +\n `///chunksStart///` +\n getChunk(updatedFileContent, {\n lineStart: baseChunks[i - 1]?.lineStart ?? 0,\n lineLength:\n (baseChunks[i - 1]?.lineLength ?? 0) +\n baseChunkContext.lineLength +\n (baseChunks[i + 1]?.lineLength ?? 0),\n }) +\n `///chunksEnd///`;\n\n // Make the actual translation call\n let reviewedChunkResult = await retryManager(async () => {\n const result = await chunkInference(\n [\n { role: 'system', content: basePrompt },\n { role: 'system', content: getBaseChunkContextPrompt() },\n { role: 'system', content: getChunkToReviewPrompt() },\n {\n role: 'system',\n content: `The next user message will be the **CHUNK ${i + 1} of ${baseChunks.length}** that should be translated in ${getLocaleName(locale, Locales.ENGLISH)} (${locale}).`,\n },\n { role: 'user', content: baseChunkContext.content },\n ],\n aiOptions,\n oAuth2AccessToken\n );\n\n appLogger(\n ` -> ${result.tokenUsed} tokens used - CHUNK ${i + 1} of ${baseChunks.length}`\n );\n\n const fixedReviewedChunkResult = fixChunkStartEndChars(\n result?.fileContent,\n baseChunkContext.content\n );\n\n return fixedReviewedChunkResult;\n })();\n\n updatedFileContent = updatedFileContent.replace(\n baseChunkContext.content,\n reviewedChunkResult\n );\n\n fileResultContent += reviewedChunkResult;\n }\n\n mkdirSync(dirname(outputFilePath), { recursive: true });\n writeFileSync(outputFilePath, fileResultContent);\n\n appLogger(` File ${outputFilePath} created/updated successfully.`);\n } catch (error) {\n console.error(error);\n }\n};\n\ntype ReviewDocOptions = {\n docPattern: string[];\n locales: Locales[];\n excludedGlobPattern: string[];\n baseLocale: Locales;\n aiOptions?: AIOptions;\n nbSimultaneousFileProcessed?: number;\n configOptions?: GetConfigurationOptions;\n customInstructions?: string;\n skipIfModifiedBefore?: number | string | Date;\n skipIfModifiedAfter?: number | string | Date;\n};\n\n/**\n * Main audit function: scans all .md files in \"en/\" (unless you specified DOC_LIST),\n * then audits them to each locale in LOCALE_LIST.\n */\nexport const reviewDoc = async ({\n docPattern,\n locales,\n excludedGlobPattern,\n baseLocale,\n aiOptions,\n nbSimultaneousFileProcessed,\n configOptions,\n customInstructions,\n skipIfModifiedBefore,\n skipIfModifiedAfter,\n}: ReviewDocOptions) => {\n const configuration = getConfiguration(configOptions);\n const appLogger = getAppLogger(configuration);\n\n if (nbSimultaneousFileProcessed && nbSimultaneousFileProcessed > 10) {\n appLogger(\n `Warning: nbSimultaneousFileProcessed is set to ${nbSimultaneousFileProcessed}, which is greater than 10. Setting it to 10.`\n );\n nbSimultaneousFileProcessed = 10; // Limit the number of simultaneous file processed to 10\n }\n\n const limit = pLimit(nbSimultaneousFileProcessed ?? 3);\n\n const docList: string[] = fg.sync(docPattern, {\n ignore: excludedGlobPattern,\n });\n\n checkAIAccess(configuration, aiOptions);\n\n let oAuth2AccessToken: string | undefined;\n if (configuration.editor.clientId) {\n const intlayerAuthAPI = getAuthAPI(undefined, configuration);\n const oAuth2TokenResult = await intlayerAuthAPI.getOAuth2AccessToken();\n\n oAuth2AccessToken = oAuth2TokenResult.data?.accessToken;\n }\n\n appLogger(\n `Base locale is ${getLocaleName(baseLocale, Locales.ENGLISH)} (${baseLocale})`\n );\n appLogger(\n `Reviewing ${locales.length} locales: [ ${locales\n .map((locale) => `${getLocaleName(locale, Locales.ENGLISH)} (${locale})`)\n .join(', ')} ]`\n );\n\n appLogger(`Reviewing ${docList.length} files:`);\n appLogger(docList.map((path) => ` - ${path}\\n`));\n\n const tasks = docList.map((docPath) =>\n locales.flatMap((locale) =>\n limit(async () => {\n appLogger(\n `Reviewing file: ${docPath} to ${getLocaleName(\n locale,\n Locales.ENGLISH\n )} (${locale})`\n );\n\n const absoluteBaseFilePath = join(\n configuration.content.baseDir,\n docPath\n );\n const outputFilePath = getOutputFilePath(\n absoluteBaseFilePath,\n locale,\n baseLocale\n );\n\n const fileModificationData = checkFileModifiedRange(outputFilePath, {\n skipIfModifiedBefore,\n skipIfModifiedAfter,\n });\n\n if (fileModificationData.isSkipped) {\n appLogger(fileModificationData.message);\n return;\n }\n\n await reviewFile(\n absoluteBaseFilePath,\n outputFilePath,\n locale as Locales,\n baseLocale,\n aiOptions,\n configOptions,\n oAuth2AccessToken,\n customInstructions\n );\n })\n )\n );\n\n await Promise.all(tasks);\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAsC;AACtC,oBAMO;AACP,kBAA8B;AAC9B,uBAAe;AACf,gBAAyC;AACzC,sBAAyB;AACzB,qBAAmB;AACnB,kBAA8B;AAC9B,iBAA8B;AAC9B,6BAA0B;AAC1B,2BAA8B;AAC9B,oCAAuC;AACvC,4BAA+B;AAC/B,mCAAsC;AACtC,sBAAyB;AACzB,+BAAkC;AArBlC;AAuBA,MAAM,aAAa,OAAO,YAAY,QAAQ;AAE9C,MAAM,MAAM,iBAAa,yBAAQ,0BAAc,YAAY,GAAG,CAAC,IAAI;AAK5D,MAAM,aAAa,OACxB,cACA,gBACA,QACA,YACA,WACA,eACA,mBACA,uBACG;AACH,MAAI;AACF,UAAM,oBAAgB,gCAAiB,aAAa;AACpD,UAAM,gBAAY,4BAAa,aAAa;AAE5C,UAAM,mBAAmB,UAAM,0BAAS,cAAc,OAAO;AAC7D,UAAM,sBAAsB,UAAM,0BAAS,gBAAgB,OAAO;AAElE,QAAI,qBAAqB;AACzB,QAAI,oBAAoB;AAGxB,UAAM,cACJ,UAAM,8BAAS,kBAAK,KAAK,4BAA4B,GAAG,OAAO,GAE9D;AAAA,MACC;AAAA,MACA,OAAG,2BAAc,QAAQ,sBAAQ,OAAO,CAAC,KAAK,MAAM;AAAA,IACtD,EACC;AAAA,MACC;AAAA,MACA,OAAG,2BAAc,YAAY,sBAAQ,OAAO,CAAC,KAAK,UAAU;AAAA,IAC9D,EACC,QAAQ,0BAA0B,WAAW,sBAAsB,GAAG,EACtE,QAAQ,0BAA0B,sBAAsB,GAAG;AAE9D,UAAM,iBAAa,kCAAU,kBAAkB,KAAK,CAAC;AAErD,cAAU,4BAA4B,WAAW,MAAM,SAAS;AAEhE,aAAS,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;AAC1C,YAAM,mBAAmB,WAAW,CAAC;AAErC,YAAM,4BAA4B,MAChC,WAAW,IAAI,CAAC,OAAO,KAAK,IAAI,IAAI,GAAG,WAAW,MAAM,CAAC,OAAO,WAAW,MAAM,+BAA2B,2BAAc,YAAY,sBAAQ,OAAO,CAAC,KAAK,UAAU;AAAA,sBAEpK,WAAW,IAAI,CAAC,GAAG,WAAW,MAC/B,iBAAiB,WAChB,WAAW,IAAI,CAAC,GAAG,WAAW,MAC/B;AAEF,YAAM,yBAAyB,MAC7B,WAAW,IAAI,CAAC,OAAO,KAAK,IAAI,IAAI,GAAG,WAAW,MAAM,CAAC,OAAO,WAAW,MAAM,4CAAwC,2BAAc,QAAQ,sBAAQ,OAAO,CAAC,KAAK,MAAM;AAAA,yBAE1K,0BAAS,oBAAoB;AAAA,QAC3B,WAAW,WAAW,IAAI,CAAC,GAAG,aAAa;AAAA,QAC3C,aACG,WAAW,IAAI,CAAC,GAAG,cAAc,KAClC,iBAAiB,cAChB,WAAW,IAAI,CAAC,GAAG,cAAc;AAAA,MACtC,CAAC,IACD;AAGF,UAAI,sBAAsB,UAAM,4BAAa,YAAY;AACvD,cAAM,SAAS,UAAM;AAAA,UACnB;AAAA,YACE,EAAE,MAAM,UAAU,SAAS,WAAW;AAAA,YACtC,EAAE,MAAM,UAAU,SAAS,0BAA0B,EAAE;AAAA,YACvD,EAAE,MAAM,UAAU,SAAS,uBAAuB,EAAE;AAAA,YACpD;AAAA,cACE,MAAM;AAAA,cACN,SAAS,6CAA6C,IAAI,CAAC,OAAO,WAAW,MAAM,uCAAmC,2BAAc,QAAQ,sBAAQ,OAAO,CAAC,KAAK,MAAM;AAAA,YACzK;AAAA,YACA,EAAE,MAAM,QAAQ,SAAS,iBAAiB,QAAQ;AAAA,UACpD;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAEA;AAAA,UACE,OAAO,OAAO,SAAS,wBAAwB,IAAI,CAAC,OAAO,WAAW,MAAM;AAAA,QAC9E;AAEA,cAAM,+BAA2B;AAAA,UAC/B,QAAQ;AAAA,UACR,iBAAiB;AAAA,QACnB;AAEA,eAAO;AAAA,MACT,CAAC,EAAE;AAEH,2BAAqB,mBAAmB;AAAA,QACtC,iBAAiB;AAAA,QACjB;AAAA,MACF;AAEA,2BAAqB;AAAA,IACvB;AAEA,iCAAU,qBAAQ,cAAc,GAAG,EAAE,WAAW,KAAK,CAAC;AACtD,iCAAc,gBAAgB,iBAAiB;AAE/C,cAAU,SAAS,cAAc,gCAAgC;AAAA,EACnE,SAAS,OAAO;AACd,YAAQ,MAAM,KAAK;AAAA,EACrB;AACF;AAmBO,MAAM,YAAY,OAAO;AAAA,EAC9B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAwB;AACtB,QAAM,oBAAgB,gCAAiB,aAAa;AACpD,QAAM,gBAAY,4BAAa,aAAa;AAE5C,MAAI,+BAA+B,8BAA8B,IAAI;AACnE;AAAA,MACE,kDAAkD,2BAA2B;AAAA,IAC/E;AACA,kCAA8B;AAAA,EAChC;AAEA,QAAM,YAAQ,eAAAA,SAAO,+BAA+B,CAAC;AAErD,QAAM,UAAoB,iBAAAC,QAAG,KAAK,YAAY;AAAA,IAC5C,QAAQ;AAAA,EACV,CAAC;AAED,0CAAc,eAAe,SAAS;AAEtC,MAAI;AACJ,MAAI,cAAc,OAAO,UAAU;AACjC,UAAM,sBAAkB,uBAAW,QAAW,aAAa;AAC3D,UAAM,oBAAoB,MAAM,gBAAgB,qBAAqB;AAErE,wBAAoB,kBAAkB,MAAM;AAAA,EAC9C;AAEA;AAAA,IACE,sBAAkB,2BAAc,YAAY,sBAAQ,OAAO,CAAC,KAAK,UAAU;AAAA,EAC7E;AACA;AAAA,IACE,aAAa,QAAQ,MAAM,eAAe,QACvC,IAAI,CAAC,WAAW,OAAG,2BAAc,QAAQ,sBAAQ,OAAO,CAAC,KAAK,MAAM,GAAG,EACvE,KAAK,IAAI,CAAC;AAAA,EACf;AAEA,YAAU,aAAa,QAAQ,MAAM,SAAS;AAC9C,YAAU,QAAQ,IAAI,CAAC,SAAS,MAAM,IAAI;AAAA,CAAI,CAAC;AAE/C,QAAM,QAAQ,QAAQ;AAAA,IAAI,CAAC,YACzB,QAAQ;AAAA,MAAQ,CAAC,WACf,MAAM,YAAY;AAChB;AAAA,UACE,mBAAmB,OAAO,WAAO;AAAA,YAC/B;AAAA,YACA,sBAAQ;AAAA,UACV,CAAC,KAAK,MAAM;AAAA,QACd;AAEA,cAAM,2BAAuB;AAAA,UAC3B,cAAc,QAAQ;AAAA,UACtB;AAAA,QACF;AACA,cAAM,qBAAiB;AAAA,UACrB;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAEA,cAAM,2BAAuB,sDAAuB,gBAAgB;AAAA,UAClE;AAAA,UACA;AAAA,QACF,CAAC;AAED,YAAI,qBAAqB,WAAW;AAClC,oBAAU,qBAAqB,OAAO;AACtC;AAAA,QACF;AAEA,cAAM;AAAA,UACJ;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,QAAQ,IAAI,KAAK;AACzB;","names":["pLimit","fg"]}
|
|
@@ -0,0 +1,201 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
var translateDoc_exports = {};
|
|
30
|
+
__export(translateDoc_exports, {
|
|
31
|
+
translateDoc: () => translateDoc,
|
|
32
|
+
translateFile: () => translateFile
|
|
33
|
+
});
|
|
34
|
+
module.exports = __toCommonJS(translateDoc_exports);
|
|
35
|
+
var import_api = require("@intlayer/api");
|
|
36
|
+
var import_config = require("@intlayer/config");
|
|
37
|
+
var import_core = require("@intlayer/core");
|
|
38
|
+
var import_fast_glob = __toESM(require("fast-glob"));
|
|
39
|
+
var import_fs = require("fs");
|
|
40
|
+
var import_promises = require("fs/promises");
|
|
41
|
+
var import_p_limit = __toESM(require("p-limit"));
|
|
42
|
+
var import_path = require("path");
|
|
43
|
+
var import_url = require("url");
|
|
44
|
+
var import_calculateChunks = require('./utils/calculateChunks.cjs');
|
|
45
|
+
var import_checkAIAccess = require('./utils/checkAIAccess.cjs');
|
|
46
|
+
var import_checkFileModifiedRange = require('./utils/checkFileModifiedRange.cjs');
|
|
47
|
+
var import_chunkInference = require('./utils/chunkInference.cjs');
|
|
48
|
+
var import_fixChunkStartEndChars = require('./utils/fixChunkStartEndChars.cjs');
|
|
49
|
+
var import_getChunk = require('./utils/getChunk.cjs');
|
|
50
|
+
var import_getOutputFilePath = require('./utils/getOutputFilePath.cjs');
|
|
51
|
+
const import_meta = {};
|
|
52
|
+
const isESModule = typeof import_meta.url === "string";
|
|
53
|
+
const dir = isESModule ? (0, import_path.dirname)((0, import_url.fileURLToPath)(import_meta.url)) : __dirname;
|
|
54
|
+
const translateFile = async (baseFilePath, outputFilePath, locale, baseLocale, aiOptions, configOptions, oAuth2AccessToken, customInstructions) => {
|
|
55
|
+
try {
|
|
56
|
+
const configuration = (0, import_config.getConfiguration)(configOptions);
|
|
57
|
+
const appLogger = (0, import_config.getAppLogger)(configuration);
|
|
58
|
+
const fileContent = await (0, import_promises.readFile)(baseFilePath, "utf-8");
|
|
59
|
+
let fileResultContent = fileContent;
|
|
60
|
+
const basePrompt = (await (0, import_promises.readFile)((0, import_path.join)(dir, "./prompts/TRANSLATE_PROMPT.md"), "utf-8")).replaceAll(
|
|
61
|
+
"{{localeName}}",
|
|
62
|
+
`${(0, import_core.getLocaleName)(locale, import_config.Locales.ENGLISH)} (${locale})`
|
|
63
|
+
).replaceAll(
|
|
64
|
+
"{{baseLocaleName}}",
|
|
65
|
+
`${(0, import_core.getLocaleName)(baseLocale, import_config.Locales.ENGLISH)} (${baseLocale})`
|
|
66
|
+
).replace("{{applicationContext}}", aiOptions?.applicationContext ?? "-").replace("{{customInstructions}}", customInstructions ?? "-");
|
|
67
|
+
const chunks = (0, import_calculateChunks.chunkText)(fileContent);
|
|
68
|
+
appLogger(`Base file splitted into ${chunks.length} chunks`);
|
|
69
|
+
for (let i = 0; i < chunks.length; i++) {
|
|
70
|
+
const isFirstChunk = i === 0;
|
|
71
|
+
const getPrevChunkPrompt = () => `**CHUNK ${i} of ${chunks.length}** that has been translated in ${(0, import_core.getLocaleName)(locale, import_config.Locales.ENGLISH)} (${locale}):
|
|
72
|
+
///chunkStart///` + (0, import_getChunk.getChunk)(fileResultContent, chunks[i - 1]) + `///chunkEnd///`;
|
|
73
|
+
const getBaseChunkContextPrompt = () => `**CHUNK ${i + 1} to ${Math.min(i + 3, chunks.length)} of ${chunks.length}** is the base chunk in ${(0, import_core.getLocaleName)(baseLocale, import_config.Locales.ENGLISH)} (${baseLocale}) as reference.
|
|
74
|
+
///chunksStart///` + (chunks[i - 1]?.content ?? "") + chunks[i].content + (chunks[i + 1]?.content ?? "") + `///chunksEnd///`;
|
|
75
|
+
const fileToTranslateCurrentChunk = chunks[i].content;
|
|
76
|
+
let chunkTranslation = await (0, import_config.retryManager)(async () => {
|
|
77
|
+
const result = await (0, import_chunkInference.chunkInference)(
|
|
78
|
+
[
|
|
79
|
+
{ role: "system", content: basePrompt },
|
|
80
|
+
{ role: "system", content: getBaseChunkContextPrompt() },
|
|
81
|
+
...isFirstChunk ? [] : [{ role: "system", content: getPrevChunkPrompt() }],
|
|
82
|
+
{
|
|
83
|
+
role: "system",
|
|
84
|
+
content: `The next user message will be the **CHUNK ${i + 1} of ${chunks.length}** in ${(0, import_core.getLocaleName)(baseLocale, import_config.Locales.ENGLISH)} (${baseLocale}) to translate in ${(0, import_core.getLocaleName)(locale, import_config.Locales.ENGLISH)} (${locale}):`
|
|
85
|
+
},
|
|
86
|
+
{ role: "user", content: fileToTranslateCurrentChunk }
|
|
87
|
+
],
|
|
88
|
+
aiOptions,
|
|
89
|
+
oAuth2AccessToken
|
|
90
|
+
);
|
|
91
|
+
appLogger(
|
|
92
|
+
` -> ${result.tokenUsed} tokens used - CHUNK ${i + 1} of ${chunks.length}`
|
|
93
|
+
);
|
|
94
|
+
const fixedTranslatedChunkResult = (0, import_fixChunkStartEndChars.fixChunkStartEndChars)(
|
|
95
|
+
result?.fileContent,
|
|
96
|
+
fileToTranslateCurrentChunk
|
|
97
|
+
);
|
|
98
|
+
return fixedTranslatedChunkResult;
|
|
99
|
+
})();
|
|
100
|
+
fileResultContent = fileResultContent.replace(
|
|
101
|
+
fileToTranslateCurrentChunk,
|
|
102
|
+
chunkTranslation
|
|
103
|
+
);
|
|
104
|
+
}
|
|
105
|
+
(0, import_fs.mkdirSync)((0, import_path.dirname)(outputFilePath), { recursive: true });
|
|
106
|
+
(0, import_fs.writeFileSync)(outputFilePath, fileResultContent);
|
|
107
|
+
appLogger(`File ${outputFilePath} created/updated successfully.`);
|
|
108
|
+
} catch (error) {
|
|
109
|
+
console.error(error);
|
|
110
|
+
}
|
|
111
|
+
};
|
|
112
|
+
const translateDoc = async ({
|
|
113
|
+
docPattern,
|
|
114
|
+
locales,
|
|
115
|
+
excludedGlobPattern,
|
|
116
|
+
baseLocale,
|
|
117
|
+
aiOptions,
|
|
118
|
+
nbSimultaneousFileProcessed,
|
|
119
|
+
configOptions,
|
|
120
|
+
customInstructions,
|
|
121
|
+
skipIfModifiedBefore,
|
|
122
|
+
skipIfModifiedAfter
|
|
123
|
+
}) => {
|
|
124
|
+
const configuration = (0, import_config.getConfiguration)(configOptions);
|
|
125
|
+
const appLogger = (0, import_config.getAppLogger)(configuration);
|
|
126
|
+
if (nbSimultaneousFileProcessed && nbSimultaneousFileProcessed > 10) {
|
|
127
|
+
appLogger(
|
|
128
|
+
`Warning: nbSimultaneousFileProcessed is set to ${nbSimultaneousFileProcessed}, which is greater than 10. Setting it to 10.`
|
|
129
|
+
);
|
|
130
|
+
nbSimultaneousFileProcessed = 10;
|
|
131
|
+
}
|
|
132
|
+
const limit = (0, import_p_limit.default)(nbSimultaneousFileProcessed ?? 3);
|
|
133
|
+
const docList = import_fast_glob.default.sync(docPattern, {
|
|
134
|
+
ignore: excludedGlobPattern
|
|
135
|
+
});
|
|
136
|
+
(0, import_checkAIAccess.checkAIAccess)(configuration, aiOptions);
|
|
137
|
+
let oAuth2AccessToken;
|
|
138
|
+
if (configuration.editor.clientId) {
|
|
139
|
+
const intlayerAuthAPI = (0, import_api.getAuthAPI)(void 0, configuration);
|
|
140
|
+
const oAuth2TokenResult = await intlayerAuthAPI.getOAuth2AccessToken();
|
|
141
|
+
oAuth2AccessToken = oAuth2TokenResult.data?.accessToken;
|
|
142
|
+
}
|
|
143
|
+
appLogger(`Base locale is ${(0, import_core.getLocaleName)(baseLocale)} (${baseLocale})`);
|
|
144
|
+
appLogger(
|
|
145
|
+
`Translating ${locales.length} locales: [ ${locales.map((locale) => `${(0, import_core.getLocaleName)(locale, baseLocale)} (${locale})`).join(", ")} ]`
|
|
146
|
+
);
|
|
147
|
+
appLogger(`Translating ${docList.length} files:`);
|
|
148
|
+
appLogger(docList.map((path) => ` - ${path}
|
|
149
|
+
`));
|
|
150
|
+
const tasks = docList.map(
|
|
151
|
+
(docPath) => locales.flatMap(
|
|
152
|
+
(locale) => limit(async () => {
|
|
153
|
+
appLogger(
|
|
154
|
+
`Translating file: ${docPath} to ${(0, import_core.getLocaleName)(
|
|
155
|
+
locale,
|
|
156
|
+
import_config.Locales.ENGLISH
|
|
157
|
+
)} (${locale})`
|
|
158
|
+
);
|
|
159
|
+
const absoluteBaseFilePath = (0, import_path.join)(
|
|
160
|
+
configuration.content.baseDir,
|
|
161
|
+
docPath
|
|
162
|
+
);
|
|
163
|
+
const outputFilePath = (0, import_getOutputFilePath.getOutputFilePath)(
|
|
164
|
+
absoluteBaseFilePath,
|
|
165
|
+
locale,
|
|
166
|
+
baseLocale
|
|
167
|
+
);
|
|
168
|
+
if (!(0, import_fs.existsSync)(outputFilePath)) {
|
|
169
|
+
appLogger(`File ${outputFilePath} does not exist, creating it...`);
|
|
170
|
+
(0, import_fs.mkdirSync)((0, import_path.dirname)(outputFilePath), { recursive: true });
|
|
171
|
+
(0, import_fs.writeFileSync)(outputFilePath, "");
|
|
172
|
+
}
|
|
173
|
+
const fileModificationData = (0, import_checkFileModifiedRange.checkFileModifiedRange)(outputFilePath, {
|
|
174
|
+
skipIfModifiedBefore,
|
|
175
|
+
skipIfModifiedAfter
|
|
176
|
+
});
|
|
177
|
+
if (fileModificationData.isSkipped) {
|
|
178
|
+
appLogger(fileModificationData.message);
|
|
179
|
+
return;
|
|
180
|
+
}
|
|
181
|
+
await translateFile(
|
|
182
|
+
absoluteBaseFilePath,
|
|
183
|
+
outputFilePath,
|
|
184
|
+
locale,
|
|
185
|
+
baseLocale,
|
|
186
|
+
aiOptions,
|
|
187
|
+
configOptions,
|
|
188
|
+
oAuth2AccessToken,
|
|
189
|
+
customInstructions
|
|
190
|
+
);
|
|
191
|
+
})
|
|
192
|
+
)
|
|
193
|
+
);
|
|
194
|
+
await Promise.all(tasks);
|
|
195
|
+
};
|
|
196
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
197
|
+
0 && (module.exports = {
|
|
198
|
+
translateDoc,
|
|
199
|
+
translateFile
|
|
200
|
+
});
|
|
201
|
+
//# sourceMappingURL=translateDoc.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/translateDoc.ts"],"sourcesContent":["import { AIOptions, getAuthAPI } from '@intlayer/api';\nimport {\n getAppLogger,\n getConfiguration,\n GetConfigurationOptions,\n Locales,\n retryManager,\n} from '@intlayer/config';\nimport { getLocaleName } from '@intlayer/core';\nimport fg from 'fast-glob';\nimport { existsSync, mkdirSync, writeFileSync } from 'fs';\nimport { readFile } from 'fs/promises';\nimport pLimit from 'p-limit';\nimport { dirname, join } from 'path';\nimport { fileURLToPath } from 'url';\nimport { chunkText } from './utils/calculateChunks';\nimport { checkAIAccess } from './utils/checkAIAccess';\nimport { checkFileModifiedRange } from './utils/checkFileModifiedRange';\nimport { chunkInference } from './utils/chunkInference';\nimport { fixChunkStartEndChars } from './utils/fixChunkStartEndChars';\nimport { getChunk } from './utils/getChunk';\nimport { getOutputFilePath } from './utils/getOutputFilePath';\n\nconst isESModule = typeof import.meta.url === 'string';\n\nconst dir = isESModule ? dirname(fileURLToPath(import.meta.url)) : __dirname;\n\n/**\n * Translate a single file for a given locale\n */\nexport const translateFile = async (\n baseFilePath: string,\n outputFilePath: string,\n locale: Locales,\n baseLocale: Locales,\n aiOptions?: AIOptions,\n configOptions?: GetConfigurationOptions,\n oAuth2AccessToken?: string,\n customInstructions?: string\n) => {\n try {\n const configuration = getConfiguration(configOptions);\n const appLogger = getAppLogger(configuration);\n\n // Determine the target locale file path\n const fileContent = await readFile(baseFilePath, 'utf-8');\n let fileResultContent = fileContent;\n\n // Prepare the base prompt for ChatGPT\n const basePrompt = (\n await readFile(join(dir, './prompts/TRANSLATE_PROMPT.md'), 'utf-8')\n )\n .replaceAll(\n '{{localeName}}',\n `${getLocaleName(locale, Locales.ENGLISH)} (${locale})`\n )\n .replaceAll(\n '{{baseLocaleName}}',\n `${getLocaleName(baseLocale, Locales.ENGLISH)} (${baseLocale})`\n )\n .replace('{{applicationContext}}', aiOptions?.applicationContext ?? '-')\n .replace('{{customInstructions}}', customInstructions ?? '-');\n\n // 1. Chunk the file by number of lines instead of characters\n const chunks = chunkText(fileContent);\n appLogger(`Base file splitted into ${chunks.length} chunks`);\n\n for (let i = 0; i < chunks.length; i++) {\n const isFirstChunk = i === 0;\n\n // Build the chunk-specific prompt\n const getPrevChunkPrompt = () =>\n `**CHUNK ${i} of ${chunks.length}** that has been translated in ${getLocaleName(locale, Locales.ENGLISH)} (${locale}):\\n` +\n `///chunkStart///` +\n getChunk(fileResultContent, chunks[i - 1]) +\n `///chunkEnd///`;\n\n const getBaseChunkContextPrompt = () =>\n `**CHUNK ${i + 1} to ${Math.min(i + 3, chunks.length)} of ${chunks.length}** is the base chunk in ${getLocaleName(baseLocale, Locales.ENGLISH)} (${baseLocale}) as reference.\\n` +\n `///chunksStart///` +\n (chunks[i - 1]?.content ?? '') +\n chunks[i].content +\n (chunks[i + 1]?.content ?? '') +\n `///chunksEnd///`;\n\n const fileToTranslateCurrentChunk = chunks[i].content;\n\n // Make the actual translation call\n let chunkTranslation = await retryManager(async () => {\n const result = await chunkInference(\n [\n { role: 'system', content: basePrompt },\n\n { role: 'system', content: getBaseChunkContextPrompt() },\n ...(isFirstChunk\n ? []\n : [{ role: 'system', content: getPrevChunkPrompt() } as const]),\n {\n role: 'system',\n content: `The next user message will be the **CHUNK ${i + 1} of ${chunks.length}** in ${getLocaleName(baseLocale, Locales.ENGLISH)} (${baseLocale}) to translate in ${getLocaleName(locale, Locales.ENGLISH)} (${locale}):`,\n },\n { role: 'user', content: fileToTranslateCurrentChunk },\n ],\n aiOptions,\n oAuth2AccessToken\n );\n\n appLogger(\n ` -> ${result.tokenUsed} tokens used - CHUNK ${i + 1} of ${chunks.length}`\n );\n\n const fixedTranslatedChunkResult = fixChunkStartEndChars(\n result?.fileContent,\n fileToTranslateCurrentChunk\n );\n\n return fixedTranslatedChunkResult;\n })();\n\n // Replace the chunk in the file content\n fileResultContent = fileResultContent.replace(\n fileToTranslateCurrentChunk,\n chunkTranslation\n );\n }\n\n // 4. Write the final translation to the appropriate file path\n mkdirSync(dirname(outputFilePath), { recursive: true });\n writeFileSync(outputFilePath, fileResultContent);\n\n appLogger(`File ${outputFilePath} created/updated successfully.`);\n } catch (error) {\n console.error(error);\n }\n};\n\ntype TranslateDocOptions = {\n docPattern: string[];\n locales: Locales[];\n excludedGlobPattern: string[];\n baseLocale: Locales;\n aiOptions?: AIOptions;\n nbSimultaneousFileProcessed?: number;\n configOptions?: GetConfigurationOptions;\n customInstructions?: string;\n skipIfModifiedBefore?: number | string | Date;\n skipIfModifiedAfter?: number | string | Date;\n};\n\n/**\n * Main translate function: scans all .md files in \"en/\" (unless you specified DOC_LIST),\n * then translates them to each locale in LOCALE_LIST.\n */\nexport const translateDoc = async ({\n docPattern,\n locales,\n excludedGlobPattern,\n baseLocale,\n aiOptions,\n nbSimultaneousFileProcessed,\n configOptions,\n customInstructions,\n skipIfModifiedBefore,\n skipIfModifiedAfter,\n}: TranslateDocOptions) => {\n const configuration = getConfiguration(configOptions);\n const appLogger = getAppLogger(configuration);\n\n if (nbSimultaneousFileProcessed && nbSimultaneousFileProcessed > 10) {\n appLogger(\n `Warning: nbSimultaneousFileProcessed is set to ${nbSimultaneousFileProcessed}, which is greater than 10. Setting it to 10.`\n );\n nbSimultaneousFileProcessed = 10; // Limit the number of simultaneous file processed to 10\n }\n\n const limit = pLimit(nbSimultaneousFileProcessed ?? 3);\n\n const docList: string[] = fg.sync(docPattern, {\n ignore: excludedGlobPattern,\n });\n\n checkAIAccess(configuration, aiOptions);\n\n let oAuth2AccessToken: string | undefined;\n if (configuration.editor.clientId) {\n const intlayerAuthAPI = getAuthAPI(undefined, configuration);\n const oAuth2TokenResult = await intlayerAuthAPI.getOAuth2AccessToken();\n\n oAuth2AccessToken = oAuth2TokenResult.data?.accessToken;\n }\n\n appLogger(`Base locale is ${getLocaleName(baseLocale)} (${baseLocale})`);\n appLogger(\n `Translating ${locales.length} locales: [ ${locales\n .map((locale) => `${getLocaleName(locale, baseLocale)} (${locale})`)\n .join(', ')} ]`\n );\n\n appLogger(`Translating ${docList.length} files:`);\n appLogger(docList.map((path) => ` - ${path}\\n`));\n\n const tasks = docList.map((docPath) =>\n locales.flatMap((locale) =>\n limit(async () => {\n appLogger(\n `Translating file: ${docPath} to ${getLocaleName(\n locale,\n Locales.ENGLISH\n )} (${locale})`\n );\n\n const absoluteBaseFilePath = join(\n configuration.content.baseDir,\n docPath\n );\n const outputFilePath = getOutputFilePath(\n absoluteBaseFilePath,\n locale,\n baseLocale\n );\n\n // check if the file exist, otherwise create it\n if (!existsSync(outputFilePath)) {\n appLogger(`File ${outputFilePath} does not exist, creating it...`);\n mkdirSync(dirname(outputFilePath), { recursive: true });\n writeFileSync(outputFilePath, '');\n }\n\n const fileModificationData = checkFileModifiedRange(outputFilePath, {\n skipIfModifiedBefore,\n skipIfModifiedAfter,\n });\n\n if (fileModificationData.isSkipped) {\n appLogger(fileModificationData.message);\n return;\n }\n\n await translateFile(\n absoluteBaseFilePath,\n outputFilePath,\n locale as Locales,\n baseLocale,\n aiOptions,\n configOptions,\n oAuth2AccessToken,\n customInstructions\n );\n })\n )\n );\n\n await Promise.all(tasks);\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAAsC;AACtC,oBAMO;AACP,kBAA8B;AAC9B,uBAAe;AACf,gBAAqD;AACrD,sBAAyB;AACzB,qBAAmB;AACnB,kBAA8B;AAC9B,iBAA8B;AAC9B,6BAA0B;AAC1B,2BAA8B;AAC9B,oCAAuC;AACvC,4BAA+B;AAC/B,mCAAsC;AACtC,sBAAyB;AACzB,+BAAkC;AArBlC;AAuBA,MAAM,aAAa,OAAO,YAAY,QAAQ;AAE9C,MAAM,MAAM,iBAAa,yBAAQ,0BAAc,YAAY,GAAG,CAAC,IAAI;AAK5D,MAAM,gBAAgB,OAC3B,cACA,gBACA,QACA,YACA,WACA,eACA,mBACA,uBACG;AACH,MAAI;AACF,UAAM,oBAAgB,gCAAiB,aAAa;AACpD,UAAM,gBAAY,4BAAa,aAAa;AAG5C,UAAM,cAAc,UAAM,0BAAS,cAAc,OAAO;AACxD,QAAI,oBAAoB;AAGxB,UAAM,cACJ,UAAM,8BAAS,kBAAK,KAAK,+BAA+B,GAAG,OAAO,GAEjE;AAAA,MACC;AAAA,MACA,OAAG,2BAAc,QAAQ,sBAAQ,OAAO,CAAC,KAAK,MAAM;AAAA,IACtD,EACC;AAAA,MACC;AAAA,MACA,OAAG,2BAAc,YAAY,sBAAQ,OAAO,CAAC,KAAK,UAAU;AAAA,IAC9D,EACC,QAAQ,0BAA0B,WAAW,sBAAsB,GAAG,EACtE,QAAQ,0BAA0B,sBAAsB,GAAG;AAG9D,UAAM,aAAS,kCAAU,WAAW;AACpC,cAAU,2BAA2B,OAAO,MAAM,SAAS;AAE3D,aAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,YAAM,eAAe,MAAM;AAG3B,YAAM,qBAAqB,MACzB,WAAW,CAAC,OAAO,OAAO,MAAM,sCAAkC,2BAAc,QAAQ,sBAAQ,OAAO,CAAC,KAAK,MAAM;AAAA,wBAEnH,0BAAS,mBAAmB,OAAO,IAAI,CAAC,CAAC,IACzC;AAEF,YAAM,4BAA4B,MAChC,WAAW,IAAI,CAAC,OAAO,KAAK,IAAI,IAAI,GAAG,OAAO,MAAM,CAAC,OAAO,OAAO,MAAM,+BAA2B,2BAAc,YAAY,sBAAQ,OAAO,CAAC,KAAK,UAAU;AAAA,sBAE5J,OAAO,IAAI,CAAC,GAAG,WAAW,MAC3B,OAAO,CAAC,EAAE,WACT,OAAO,IAAI,CAAC,GAAG,WAAW,MAC3B;AAEF,YAAM,8BAA8B,OAAO,CAAC,EAAE;AAG9C,UAAI,mBAAmB,UAAM,4BAAa,YAAY;AACpD,cAAM,SAAS,UAAM;AAAA,UACnB;AAAA,YACE,EAAE,MAAM,UAAU,SAAS,WAAW;AAAA,YAEtC,EAAE,MAAM,UAAU,SAAS,0BAA0B,EAAE;AAAA,YACvD,GAAI,eACA,CAAC,IACD,CAAC,EAAE,MAAM,UAAU,SAAS,mBAAmB,EAAE,CAAU;AAAA,YAC/D;AAAA,cACE,MAAM;AAAA,cACN,SAAS,6CAA6C,IAAI,CAAC,OAAO,OAAO,MAAM,aAAS,2BAAc,YAAY,sBAAQ,OAAO,CAAC,KAAK,UAAU,yBAAqB,2BAAc,QAAQ,sBAAQ,OAAO,CAAC,KAAK,MAAM;AAAA,YACzN;AAAA,YACA,EAAE,MAAM,QAAQ,SAAS,4BAA4B;AAAA,UACvD;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAEA;AAAA,UACE,OAAO,OAAO,SAAS,wBAAwB,IAAI,CAAC,OAAO,OAAO,MAAM;AAAA,QAC1E;AAEA,cAAM,iCAA6B;AAAA,UACjC,QAAQ;AAAA,UACR;AAAA,QACF;AAEA,eAAO;AAAA,MACT,CAAC,EAAE;AAGH,0BAAoB,kBAAkB;AAAA,QACpC;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAGA,iCAAU,qBAAQ,cAAc,GAAG,EAAE,WAAW,KAAK,CAAC;AACtD,iCAAc,gBAAgB,iBAAiB;AAE/C,cAAU,QAAQ,cAAc,gCAAgC;AAAA,EAClE,SAAS,OAAO;AACd,YAAQ,MAAM,KAAK;AAAA,EACrB;AACF;AAmBO,MAAM,eAAe,OAAO;AAAA,EACjC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAA2B;AACzB,QAAM,oBAAgB,gCAAiB,aAAa;AACpD,QAAM,gBAAY,4BAAa,aAAa;AAE5C,MAAI,+BAA+B,8BAA8B,IAAI;AACnE;AAAA,MACE,kDAAkD,2BAA2B;AAAA,IAC/E;AACA,kCAA8B;AAAA,EAChC;AAEA,QAAM,YAAQ,eAAAA,SAAO,+BAA+B,CAAC;AAErD,QAAM,UAAoB,iBAAAC,QAAG,KAAK,YAAY;AAAA,IAC5C,QAAQ;AAAA,EACV,CAAC;AAED,0CAAc,eAAe,SAAS;AAEtC,MAAI;AACJ,MAAI,cAAc,OAAO,UAAU;AACjC,UAAM,sBAAkB,uBAAW,QAAW,aAAa;AAC3D,UAAM,oBAAoB,MAAM,gBAAgB,qBAAqB;AAErE,wBAAoB,kBAAkB,MAAM;AAAA,EAC9C;AAEA,YAAU,sBAAkB,2BAAc,UAAU,CAAC,KAAK,UAAU,GAAG;AACvE;AAAA,IACE,eAAe,QAAQ,MAAM,eAAe,QACzC,IAAI,CAAC,WAAW,OAAG,2BAAc,QAAQ,UAAU,CAAC,KAAK,MAAM,GAAG,EAClE,KAAK,IAAI,CAAC;AAAA,EACf;AAEA,YAAU,eAAe,QAAQ,MAAM,SAAS;AAChD,YAAU,QAAQ,IAAI,CAAC,SAAS,MAAM,IAAI;AAAA,CAAI,CAAC;AAE/C,QAAM,QAAQ,QAAQ;AAAA,IAAI,CAAC,YACzB,QAAQ;AAAA,MAAQ,CAAC,WACf,MAAM,YAAY;AAChB;AAAA,UACE,qBAAqB,OAAO,WAAO;AAAA,YACjC;AAAA,YACA,sBAAQ;AAAA,UACV,CAAC,KAAK,MAAM;AAAA,QACd;AAEA,cAAM,2BAAuB;AAAA,UAC3B,cAAc,QAAQ;AAAA,UACtB;AAAA,QACF;AACA,cAAM,qBAAiB;AAAA,UACrB;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAGA,YAAI,KAAC,sBAAW,cAAc,GAAG;AAC/B,oBAAU,QAAQ,cAAc,iCAAiC;AACjE,uCAAU,qBAAQ,cAAc,GAAG,EAAE,WAAW,KAAK,CAAC;AACtD,uCAAc,gBAAgB,EAAE;AAAA,QAClC;AAEA,cAAM,2BAAuB,sDAAuB,gBAAgB;AAAA,UAClE;AAAA,UACA;AAAA,QACF,CAAC;AAED,YAAI,qBAAqB,WAAW;AAClC,oBAAU,qBAAqB,OAAO;AACtC;AAAA,QACF;AAEA,cAAM;AAAA,UACJ;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,QAAQ,IAAI,KAAK;AACzB;","names":["pLimit","fg"]}
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
var calculateChunks_exports = {};
|
|
20
|
+
__export(calculateChunks_exports, {
|
|
21
|
+
chunkText: () => chunkText
|
|
22
|
+
});
|
|
23
|
+
module.exports = __toCommonJS(calculateChunks_exports);
|
|
24
|
+
var import_splitTextByLine = require('./splitTextByLine.cjs');
|
|
25
|
+
const DEFAULT_MAX_CHARS_PER_CHUNK = 800;
|
|
26
|
+
const DEFAULT_OVERLAP_CHARS = 0;
|
|
27
|
+
const chunkText = (text, maxCharsPerChunk = DEFAULT_MAX_CHARS_PER_CHUNK, overlapChars = DEFAULT_OVERLAP_CHARS) => {
|
|
28
|
+
if (maxCharsPerChunk <= 0) {
|
|
29
|
+
throw new Error("maxCharsPerChunk must be greater than 0");
|
|
30
|
+
}
|
|
31
|
+
const splittedText = (0, import_splitTextByLine.splitTextByLines)(text);
|
|
32
|
+
const lines = [];
|
|
33
|
+
let charStartAcc = 0;
|
|
34
|
+
splittedText.forEach((line, index) => {
|
|
35
|
+
lines.push({
|
|
36
|
+
content: line,
|
|
37
|
+
lineStart: index,
|
|
38
|
+
lineLength: 1,
|
|
39
|
+
charStart: charStartAcc,
|
|
40
|
+
charLength: line.length
|
|
41
|
+
});
|
|
42
|
+
charStartAcc += line.length;
|
|
43
|
+
});
|
|
44
|
+
const groupedLines = lines.reduce(
|
|
45
|
+
(acc, line) => {
|
|
46
|
+
if (line.content.length > maxCharsPerChunk) {
|
|
47
|
+
acc.push(line);
|
|
48
|
+
return acc;
|
|
49
|
+
}
|
|
50
|
+
if (acc.length === 0) {
|
|
51
|
+
acc.push(line);
|
|
52
|
+
return acc;
|
|
53
|
+
}
|
|
54
|
+
const lastChunk = acc[acc.length - 1];
|
|
55
|
+
const combinedLength = lastChunk.content.length + line.content.length;
|
|
56
|
+
if (combinedLength > maxCharsPerChunk) {
|
|
57
|
+
acc.push(line);
|
|
58
|
+
return acc;
|
|
59
|
+
}
|
|
60
|
+
const combinedContent = lastChunk.content + line.content;
|
|
61
|
+
const updatedChunk = {
|
|
62
|
+
content: combinedContent,
|
|
63
|
+
lineStart: lastChunk.lineStart,
|
|
64
|
+
lineLength: lastChunk.lineLength + line.lineLength,
|
|
65
|
+
charStart: lastChunk.charStart,
|
|
66
|
+
charLength: combinedContent.length
|
|
67
|
+
};
|
|
68
|
+
acc[acc.length - 1] = updatedChunk;
|
|
69
|
+
return acc;
|
|
70
|
+
},
|
|
71
|
+
[]
|
|
72
|
+
);
|
|
73
|
+
const splittedLines = groupedLines.flatMap((line) => {
|
|
74
|
+
const chunk = [];
|
|
75
|
+
if (line.content.length <= maxCharsPerChunk) {
|
|
76
|
+
chunk.push(line);
|
|
77
|
+
return chunk;
|
|
78
|
+
}
|
|
79
|
+
for (let i = 0; i < line.content.length; i += maxCharsPerChunk) {
|
|
80
|
+
const slicedContent = line.content.slice(i, i + maxCharsPerChunk);
|
|
81
|
+
chunk.push({
|
|
82
|
+
content: slicedContent,
|
|
83
|
+
lineStart: line.lineStart,
|
|
84
|
+
lineLength: 1,
|
|
85
|
+
charStart: line.charStart + i,
|
|
86
|
+
charLength: slicedContent.length
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
return chunk;
|
|
90
|
+
});
|
|
91
|
+
if (overlapChars === 0) return splittedLines;
|
|
92
|
+
const overlapChunks = splittedLines.length > 0 ? [splittedLines[0]] : [];
|
|
93
|
+
for (let i = 1; i < splittedLines.length; i++) {
|
|
94
|
+
const previousChunk = splittedLines[i - 1];
|
|
95
|
+
const chunk = splittedLines[i];
|
|
96
|
+
const overlapContent = previousChunk.content.slice(-overlapChars);
|
|
97
|
+
const overlapLineNb = (0, import_splitTextByLine.splitTextByLines)(overlapContent).length;
|
|
98
|
+
const overlapContentWithoutPartialLine = overlapContent.slice(
|
|
99
|
+
overlapLineNb > 1 ? overlapContent.indexOf("\n") + 1 : 0,
|
|
100
|
+
overlapContent.length
|
|
101
|
+
);
|
|
102
|
+
const newContent = overlapContentWithoutPartialLine + chunk.content;
|
|
103
|
+
const newLineLength = (0, import_splitTextByLine.splitTextByLines)(newContent).length;
|
|
104
|
+
const lineDiff = chunk.lineLength - newLineLength;
|
|
105
|
+
const overlappedChunk = {
|
|
106
|
+
content: newContent,
|
|
107
|
+
lineStart: chunk.lineStart + lineDiff,
|
|
108
|
+
lineLength: chunk.lineLength - lineDiff,
|
|
109
|
+
charStart: chunk.charStart - overlapContentWithoutPartialLine.length,
|
|
110
|
+
charLength: newContent.length
|
|
111
|
+
};
|
|
112
|
+
overlapChunks.push(overlappedChunk);
|
|
113
|
+
}
|
|
114
|
+
return overlapChunks;
|
|
115
|
+
};
|
|
116
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
117
|
+
0 && (module.exports = {
|
|
118
|
+
chunkText
|
|
119
|
+
});
|
|
120
|
+
//# sourceMappingURL=calculateChunks.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/utils/calculateChunks.ts"],"sourcesContent":["import { splitTextByLines } from './splitTextByLine';\n\nexport type ChunkLineResult = {\n lineStart: number;\n lineLength: number;\n charStart: number;\n charLength: number;\n content: string;\n};\n\nconst DEFAULT_MAX_CHARS_PER_CHUNK = 800;\nconst DEFAULT_OVERLAP_CHARS = 0;\n\nexport const chunkText = (\n text: string,\n maxCharsPerChunk: number = DEFAULT_MAX_CHARS_PER_CHUNK,\n overlapChars: number = DEFAULT_OVERLAP_CHARS\n): ChunkLineResult[] => {\n if (maxCharsPerChunk <= 0) {\n throw new Error('maxCharsPerChunk must be greater than 0');\n }\n\n const splittedText = splitTextByLines(text);\n\n // Split text into lines to faciliate the translation\n const lines: ChunkLineResult[] = [];\n let charStartAcc = 0;\n\n splittedText.forEach((line, index) => {\n lines.push({\n content: line,\n lineStart: index,\n lineLength: 1,\n charStart: charStartAcc,\n charLength: line.length,\n });\n charStartAcc += line.length;\n });\n\n // Group lines\n // as long as the chunk length is less than maxCharsPerChunk\n // if a line longer than maxCharsPerChunk, keep it alone\n // if a line is not longer than maxCharsPerChunk, it is grouped\n const groupedLines: ChunkLineResult[] = lines.reduce(\n (acc: ChunkLineResult[], line) => {\n // If this line alone exceeds maxCharsPerChunk, keep it separate\n if (line.content.length > maxCharsPerChunk) {\n acc.push(line);\n return acc;\n }\n\n // If we have no chunks yet, start with this line\n if (acc.length === 0) {\n acc.push(line);\n return acc;\n }\n\n // Get the last chunk\n const lastChunk = acc[acc.length - 1];\n\n // Calculate what the combined length would be (including newline character)\n const combinedLength = lastChunk.content.length + line.content.length;\n\n // If combining would exceed the limit, start a new chunk\n if (combinedLength > maxCharsPerChunk) {\n acc.push(line);\n return acc;\n }\n\n // Otherwise, combine with the last chunk\n const combinedContent = lastChunk.content + line.content;\n const updatedChunk = {\n content: combinedContent,\n lineStart: lastChunk.lineStart,\n lineLength: lastChunk.lineLength + line.lineLength,\n charStart: lastChunk.charStart,\n charLength: combinedContent.length,\n };\n\n acc[acc.length - 1] = updatedChunk;\n return acc;\n },\n []\n );\n\n // If one line is longer than maxCharsPerChunk, split it into multiple chunks\n const splittedLines: ChunkLineResult[] = groupedLines.flatMap((line) => {\n const chunk: ChunkLineResult[] = [];\n\n if (line.content.length <= maxCharsPerChunk) {\n chunk.push(line);\n return chunk;\n }\n\n for (let i = 0; i < line.content.length; i += maxCharsPerChunk) {\n const slicedContent = line.content.slice(i, i + maxCharsPerChunk);\n chunk.push({\n content: slicedContent,\n lineStart: line.lineStart,\n lineLength: 1,\n charStart: line.charStart + i,\n charLength: slicedContent.length,\n });\n }\n return chunk;\n });\n\n if (overlapChars === 0) return splittedLines;\n\n const overlapChunks: ChunkLineResult[] =\n splittedLines.length > 0 ? [splittedLines[0]] : [];\n\n for (let i = 1; i < splittedLines.length; i++) {\n const previousChunk = splittedLines[i - 1];\n const chunk = splittedLines[i];\n\n const overlapContent = previousChunk.content.slice(-overlapChars);\n const overlapLineNb = splitTextByLines(overlapContent).length;\n\n const overlapContentWithoutPartialLine = overlapContent.slice(\n overlapLineNb > 1 ? overlapContent.indexOf('\\n') + 1 : 0,\n overlapContent.length\n );\n\n const newContent = overlapContentWithoutPartialLine + chunk.content;\n const newLineLength = splitTextByLines(newContent).length;\n const lineDiff = chunk.lineLength - newLineLength;\n\n const overlappedChunk = {\n content: newContent,\n lineStart: chunk.lineStart + lineDiff,\n lineLength: chunk.lineLength - lineDiff,\n charStart: chunk.charStart - overlapContentWithoutPartialLine.length,\n charLength: newContent.length,\n };\n\n overlapChunks.push(overlappedChunk);\n }\n\n return overlapChunks;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,6BAAiC;AAUjC,MAAM,8BAA8B;AACpC,MAAM,wBAAwB;AAEvB,MAAM,YAAY,CACvB,MACA,mBAA2B,6BAC3B,eAAuB,0BACD;AACtB,MAAI,oBAAoB,GAAG;AACzB,UAAM,IAAI,MAAM,yCAAyC;AAAA,EAC3D;AAEA,QAAM,mBAAe,yCAAiB,IAAI;AAG1C,QAAM,QAA2B,CAAC;AAClC,MAAI,eAAe;AAEnB,eAAa,QAAQ,CAAC,MAAM,UAAU;AACpC,UAAM,KAAK;AAAA,MACT,SAAS;AAAA,MACT,WAAW;AAAA,MACX,YAAY;AAAA,MACZ,WAAW;AAAA,MACX,YAAY,KAAK;AAAA,IACnB,CAAC;AACD,oBAAgB,KAAK;AAAA,EACvB,CAAC;AAMD,QAAM,eAAkC,MAAM;AAAA,IAC5C,CAAC,KAAwB,SAAS;AAEhC,UAAI,KAAK,QAAQ,SAAS,kBAAkB;AAC1C,YAAI,KAAK,IAAI;AACb,eAAO;AAAA,MACT;AAGA,UAAI,IAAI,WAAW,GAAG;AACpB,YAAI,KAAK,IAAI;AACb,eAAO;AAAA,MACT;AAGA,YAAM,YAAY,IAAI,IAAI,SAAS,CAAC;AAGpC,YAAM,iBAAiB,UAAU,QAAQ,SAAS,KAAK,QAAQ;AAG/D,UAAI,iBAAiB,kBAAkB;AACrC,YAAI,KAAK,IAAI;AACb,eAAO;AAAA,MACT;AAGA,YAAM,kBAAkB,UAAU,UAAU,KAAK;AACjD,YAAM,eAAe;AAAA,QACnB,SAAS;AAAA,QACT,WAAW,UAAU;AAAA,QACrB,YAAY,UAAU,aAAa,KAAK;AAAA,QACxC,WAAW,UAAU;AAAA,QACrB,YAAY,gBAAgB;AAAA,MAC9B;AAEA,UAAI,IAAI,SAAS,CAAC,IAAI;AACtB,aAAO;AAAA,IACT;AAAA,IACA,CAAC;AAAA,EACH;AAGA,QAAM,gBAAmC,aAAa,QAAQ,CAAC,SAAS;AACtE,UAAM,QAA2B,CAAC;AAElC,QAAI,KAAK,QAAQ,UAAU,kBAAkB;AAC3C,YAAM,KAAK,IAAI;AACf,aAAO;AAAA,IACT;AAEA,aAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,QAAQ,KAAK,kBAAkB;AAC9D,YAAM,gBAAgB,KAAK,QAAQ,MAAM,GAAG,IAAI,gBAAgB;AAChE,YAAM,KAAK;AAAA,QACT,SAAS;AAAA,QACT,WAAW,KAAK;AAAA,QAChB,YAAY;AAAA,QACZ,WAAW,KAAK,YAAY;AAAA,QAC5B,YAAY,cAAc;AAAA,MAC5B,CAAC;AAAA,IACH;AACA,WAAO;AAAA,EACT,CAAC;AAED,MAAI,iBAAiB,EAAG,QAAO;AAE/B,QAAM,gBACJ,cAAc,SAAS,IAAI,CAAC,cAAc,CAAC,CAAC,IAAI,CAAC;AAEnD,WAAS,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;AAC7C,UAAM,gBAAgB,cAAc,IAAI,CAAC;AACzC,UAAM,QAAQ,cAAc,CAAC;AAE7B,UAAM,iBAAiB,cAAc,QAAQ,MAAM,CAAC,YAAY;AAChE,UAAM,oBAAgB,yCAAiB,cAAc,EAAE;AAEvD,UAAM,mCAAmC,eAAe;AAAA,MACtD,gBAAgB,IAAI,eAAe,QAAQ,IAAI,IAAI,IAAI;AAAA,MACvD,eAAe;AAAA,IACjB;AAEA,UAAM,aAAa,mCAAmC,MAAM;AAC5D,UAAM,oBAAgB,yCAAiB,UAAU,EAAE;AACnD,UAAM,WAAW,MAAM,aAAa;AAEpC,UAAM,kBAAkB;AAAA,MACtB,SAAS;AAAA,MACT,WAAW,MAAM,YAAY;AAAA,MAC7B,YAAY,MAAM,aAAa;AAAA,MAC/B,WAAW,MAAM,YAAY,iCAAiC;AAAA,MAC9D,YAAY,WAAW;AAAA,IACzB;AAEA,kBAAc,KAAK,eAAe;AAAA,EACpC;AAEA,SAAO;AACT;","names":[]}
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var import_fs = require("fs");
|
|
3
|
+
var import_path = require("path");
|
|
4
|
+
var import_vitest = require("vitest");
|
|
5
|
+
var import_calculateChunks = require('./calculateChunks.cjs');
|
|
6
|
+
var import_getChunk = require('./getChunk.cjs');
|
|
7
|
+
const sampleText = [
|
|
8
|
+
"Line 0: The quick brown fox jumps over the lazy dog.",
|
|
9
|
+
"Line 1: Pack my box with five dozen liquor jugs.",
|
|
10
|
+
"Line 2: How razorback-jumping frogs can level six piqued gymnasts!",
|
|
11
|
+
"Line 3: 1. 2. 3. 4. 5. 6. 7. 8. 9. 10. 11. 12. 13. 14. 15. 16. 17. 18. 19. 20. 21. 22. 23. 24. 25. 26. 27. 28. 29. 30. 31. 32. 33. 34. 35. 36. 37. 38. 39. 40. 41. 42. 43. 44. 45. 46. 47. 48. 49. 50. 51. 52. 53. 54. 55. 56. 57. 58. 59. 60. ",
|
|
12
|
+
"Line 4: A mad boxer shot a quick, gloved jab to the jaw of his dizzy opponent."
|
|
13
|
+
].join("\n");
|
|
14
|
+
(0, import_vitest.describe)("calculateChunks", () => {
|
|
15
|
+
import_vitest.it.skip("creates chunks with custom parameters", () => {
|
|
16
|
+
const chunks = (0, import_calculateChunks.chunkText)(sampleText, 200);
|
|
17
|
+
(0, import_vitest.expect)(chunks).toStrictEqual([
|
|
18
|
+
{
|
|
19
|
+
content: "Line 0: The quick brown fox jumps over the lazy dog.\nLine 1: Pack my box with five dozen liquor jugs.\nLine 2: How razorback-jumping frogs can level six piqued gymnasts!",
|
|
20
|
+
lineStart: 0,
|
|
21
|
+
lineEnd: 2,
|
|
22
|
+
charStart: 0,
|
|
23
|
+
charEnd: 167
|
|
24
|
+
},
|
|
25
|
+
{
|
|
26
|
+
content: "Line 3: 1. 2. 3. 4. 5. 6. 7. 8. 9. 10. 11. 12. 13. 14. 15. 16. 17. 18. 19. 20. 21. 22. 23. 24. 25. 26. 27. 28. 29. 30. 31. 32. 33. 34. 35. 36. 37. 38. 39. 40. 41. 42. 43. 44. 45. 46. 47. 48. 49. 50. 5",
|
|
27
|
+
lineStart: 3,
|
|
28
|
+
lineEnd: 3,
|
|
29
|
+
charStart: 0,
|
|
30
|
+
charEnd: 199
|
|
31
|
+
},
|
|
32
|
+
{
|
|
33
|
+
content: "1. 52. 53. 54. 55. 56. 57. 58. 59. 60. ",
|
|
34
|
+
lineStart: 3,
|
|
35
|
+
lineEnd: 3,
|
|
36
|
+
charStart: 200,
|
|
37
|
+
charEnd: 238
|
|
38
|
+
},
|
|
39
|
+
{
|
|
40
|
+
content: "Line 4: A mad boxer shot a quick, gloved jab to the jaw of his dizzy opponent.",
|
|
41
|
+
lineStart: 4,
|
|
42
|
+
lineEnd: 4,
|
|
43
|
+
charStart: 0,
|
|
44
|
+
charEnd: 77
|
|
45
|
+
}
|
|
46
|
+
]);
|
|
47
|
+
});
|
|
48
|
+
(0, import_vitest.it)("creates chunks with overlap", () => {
|
|
49
|
+
const chunks = (0, import_calculateChunks.chunkText)(sampleText, 200, 100);
|
|
50
|
+
(0, import_vitest.expect)(chunks).toStrictEqual([
|
|
51
|
+
{
|
|
52
|
+
content: "Line 0: The quick brown fox jumps over the lazy dog.\nLine 1: Pack my box with five dozen liquor jugs.\nLine 2: How razorback-jumping frogs can level six piqued gymnasts!\n",
|
|
53
|
+
lineStart: 0,
|
|
54
|
+
lineLength: 3,
|
|
55
|
+
charStart: 0,
|
|
56
|
+
charLength: 169
|
|
57
|
+
},
|
|
58
|
+
{
|
|
59
|
+
content: "Line 2: How razorback-jumping frogs can level six piqued gymnasts!\nLine 3: 1. 2. 3. 4. 5. 6. 7. 8. 9. 10. 11. 12. 13. 14. 15. 16. 17. 18. 19. 20. 21. 22. 23. 24. 25. 26. 27. 28. 29. 30. 31. 32. 33. 34. 35. 36. 37. 38. 39. 40. 41. 42. 43. 44. 45. 46. 47. 48. 49. 50. 5",
|
|
60
|
+
lineStart: 2,
|
|
61
|
+
lineLength: 2,
|
|
62
|
+
charStart: 102,
|
|
63
|
+
charLength: 267
|
|
64
|
+
},
|
|
65
|
+
{
|
|
66
|
+
content: "6. 27. 28. 29. 30. 31. 32. 33. 34. 35. 36. 37. 38. 39. 40. 41. 42. 43. 44. 45. 46. 47. 48. 49. 50. 51. 52. 53. 54. 55. 56. 57. 58. 59. 60. \n",
|
|
67
|
+
lineStart: 3,
|
|
68
|
+
lineLength: 1,
|
|
69
|
+
charStart: 269,
|
|
70
|
+
charLength: 140
|
|
71
|
+
},
|
|
72
|
+
{
|
|
73
|
+
content: "1. 52. 53. 54. 55. 56. 57. 58. 59. 60. \nLine 4: A mad boxer shot a quick, gloved jab to the jaw of his dizzy opponent.",
|
|
74
|
+
lineStart: 3,
|
|
75
|
+
lineLength: 2,
|
|
76
|
+
charStart: 369,
|
|
77
|
+
charLength: 118
|
|
78
|
+
}
|
|
79
|
+
]);
|
|
80
|
+
});
|
|
81
|
+
(0, import_vitest.it)("Line and char stats correspond", () => {
|
|
82
|
+
const chunks = (0, import_calculateChunks.chunkText)(sampleText, 200, 100);
|
|
83
|
+
const firstChunk = chunks[0];
|
|
84
|
+
const thirdChunk = chunks[2];
|
|
85
|
+
const retrievedFirstChunk = (0, import_getChunk.getChunk)(sampleText, firstChunk);
|
|
86
|
+
const retrievedThirdChunk = (0, import_getChunk.getChunk)(sampleText, thirdChunk);
|
|
87
|
+
(0, import_vitest.expect)(retrievedFirstChunk).toBe(firstChunk.content);
|
|
88
|
+
(0, import_vitest.expect)(retrievedThirdChunk).toBe(thirdChunk.content);
|
|
89
|
+
});
|
|
90
|
+
(0, import_vitest.it)("Validates chunking with real file content", () => {
|
|
91
|
+
const fileContent = (0, import_fs.readFileSync)(
|
|
92
|
+
(0, import_path.join)(__dirname, "./calculrateChunkTest.md"),
|
|
93
|
+
"utf-8"
|
|
94
|
+
);
|
|
95
|
+
const chunks = (0, import_calculateChunks.chunkText)(fileContent, 200, 100);
|
|
96
|
+
const firstChunk = chunks[8];
|
|
97
|
+
const thirdChunk = chunks[25];
|
|
98
|
+
const retrievedFirstChunk = (0, import_getChunk.getChunk)(fileContent, firstChunk);
|
|
99
|
+
const retrievedThirdChunk = (0, import_getChunk.getChunk)(fileContent, thirdChunk);
|
|
100
|
+
(0, import_vitest.expect)(retrievedFirstChunk).toBe(firstChunk.content);
|
|
101
|
+
(0, import_vitest.expect)(retrievedThirdChunk).toBe(thirdChunk.content);
|
|
102
|
+
});
|
|
103
|
+
});
|
|
104
|
+
//# sourceMappingURL=calculateChunks.test.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/utils/calculateChunks.test.ts"],"sourcesContent":["import { readFileSync } from 'fs';\nimport { join } from 'path';\nimport { describe, expect, it } from 'vitest';\nimport { chunkText } from './calculateChunks';\nimport { getChunk } from './getChunk';\n\n// Sample multiline string reused across test cases\nconst sampleText = [\n 'Line 0: The quick brown fox jumps over the lazy dog.',\n 'Line 1: Pack my box with five dozen liquor jugs.',\n 'Line 2: How razorback-jumping frogs can level six piqued gymnasts!',\n 'Line 3: 1. 2. 3. 4. 5. 6. 7. 8. 9. 10. 11. 12. 13. 14. 15. 16. 17. 18. 19. 20. 21. 22. 23. 24. 25. 26. 27. 28. 29. 30. 31. 32. 33. 34. 35. 36. 37. 38. 39. 40. 41. 42. 43. 44. 45. 46. 47. 48. 49. 50. 51. 52. 53. 54. 55. 56. 57. 58. 59. 60. ',\n 'Line 4: A mad boxer shot a quick, gloved jab to the jaw of his dizzy opponent.',\n].join('\\n');\n\ndescribe('calculateChunks', () => {\n it.skip('creates chunks with custom parameters', () => {\n const chunks = chunkText(sampleText, 200);\n\n expect(chunks).toStrictEqual([\n {\n content:\n 'Line 0: The quick brown fox jumps over the lazy dog.\\n' +\n 'Line 1: Pack my box with five dozen liquor jugs.\\n' +\n 'Line 2: How razorback-jumping frogs can level six piqued gymnasts!',\n lineStart: 0,\n lineEnd: 2,\n charStart: 0,\n charEnd: 167,\n },\n {\n content:\n 'Line 3: 1. 2. 3. 4. 5. 6. 7. 8. 9. 10. 11. 12. 13. 14. 15. 16. 17. 18. 19. 20. 21. 22. 23. 24. 25. 26. 27. 28. 29. 30. 31. 32. 33. 34. 35. 36. 37. 38. 39. 40. 41. 42. 43. 44. 45. 46. 47. 48. 49. 50. 5',\n lineStart: 3,\n lineEnd: 3,\n charStart: 0,\n charEnd: 199,\n },\n {\n content: '1. 52. 53. 54. 55. 56. 57. 58. 59. 60. ',\n lineStart: 3,\n lineEnd: 3,\n charStart: 200,\n charEnd: 238,\n },\n {\n content:\n 'Line 4: A mad boxer shot a quick, gloved jab to the jaw of his dizzy opponent.',\n lineStart: 4,\n lineEnd: 4,\n charStart: 0,\n charEnd: 77,\n },\n ]);\n });\n\n it('creates chunks with overlap', () => {\n const chunks = chunkText(sampleText, 200, 100);\n\n expect(chunks).toStrictEqual([\n {\n content:\n 'Line 0: The quick brown fox jumps over the lazy dog.\\n' +\n 'Line 1: Pack my box with five dozen liquor jugs.\\n' +\n 'Line 2: How razorback-jumping frogs can level six piqued gymnasts!\\n',\n lineStart: 0,\n lineLength: 3,\n charStart: 0,\n charLength: 169,\n },\n {\n content:\n 'Line 2: How razorback-jumping frogs can level six piqued gymnasts!\\n' +\n 'Line 3: 1. 2. 3. 4. 5. 6. 7. 8. 9. 10. 11. 12. 13. 14. 15. 16. 17. 18. 19. 20. 21. 22. 23. 24. 25. 26. 27. 28. 29. 30. 31. 32. 33. 34. 35. 36. 37. 38. 39. 40. 41. 42. 43. 44. 45. 46. 47. 48. 49. 50. 5',\n lineStart: 2,\n lineLength: 2,\n charStart: 102,\n charLength: 267,\n },\n {\n content:\n '6. 27. 28. 29. 30. 31. 32. 33. 34. 35. 36. 37. 38. 39. 40. 41. 42. 43. 44. 45. 46. 47. 48. 49. 50. 51. 52. 53. 54. 55. 56. 57. 58. 59. 60. \\n',\n lineStart: 3,\n lineLength: 1,\n charStart: 269,\n charLength: 140,\n },\n {\n content:\n '1. 52. 53. 54. 55. 56. 57. 58. 59. 60. \\n' +\n 'Line 4: A mad boxer shot a quick, gloved jab to the jaw of his dizzy opponent.',\n lineStart: 3,\n lineLength: 2,\n charStart: 369,\n charLength: 118,\n },\n ]);\n });\n\n it('Line and char stats correspond', () => {\n const chunks = chunkText(sampleText, 200, 100);\n\n const firstChunk = chunks[0];\n const thirdChunk = chunks[2];\n\n const retrievedFirstChunk = getChunk(sampleText, firstChunk);\n const retrievedThirdChunk = getChunk(sampleText, thirdChunk);\n\n expect(retrievedFirstChunk).toBe(firstChunk.content);\n expect(retrievedThirdChunk).toBe(thirdChunk.content);\n });\n\n it('Validates chunking with real file content', () => {\n const fileContent = readFileSync(\n join(__dirname, './calculrateChunkTest.md'),\n 'utf-8'\n );\n\n const chunks = chunkText(fileContent, 200, 100);\n\n const firstChunk = chunks[8];\n const thirdChunk = chunks[25];\n\n const retrievedFirstChunk = getChunk(fileContent, firstChunk);\n const retrievedThirdChunk = getChunk(fileContent, thirdChunk);\n\n expect(retrievedFirstChunk).toBe(firstChunk.content);\n expect(retrievedThirdChunk).toBe(thirdChunk.content);\n });\n});\n"],"mappings":";AAAA,gBAA6B;AAC7B,kBAAqB;AACrB,oBAAqC;AACrC,6BAA0B;AAC1B,sBAAyB;AAGzB,MAAM,aAAa;AAAA,EACjB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,EAAE,KAAK,IAAI;AAAA,IAEX,wBAAS,mBAAmB,MAAM;AAChC,mBAAG,KAAK,yCAAyC,MAAM;AACrD,UAAM,aAAS,kCAAU,YAAY,GAAG;AAExC,8BAAO,MAAM,EAAE,cAAc;AAAA,MAC3B;AAAA,QACE,SACE;AAAA,QAGF,WAAW;AAAA,QACX,SAAS;AAAA,QACT,WAAW;AAAA,QACX,SAAS;AAAA,MACX;AAAA,MACA;AAAA,QACE,SACE;AAAA,QACF,WAAW;AAAA,QACX,SAAS;AAAA,QACT,WAAW;AAAA,QACX,SAAS;AAAA,MACX;AAAA,MACA;AAAA,QACE,SAAS;AAAA,QACT,WAAW;AAAA,QACX,SAAS;AAAA,QACT,WAAW;AAAA,QACX,SAAS;AAAA,MACX;AAAA,MACA;AAAA,QACE,SACE;AAAA,QACF,WAAW;AAAA,QACX,SAAS;AAAA,QACT,WAAW;AAAA,QACX,SAAS;AAAA,MACX;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AAED,wBAAG,+BAA+B,MAAM;AACtC,UAAM,aAAS,kCAAU,YAAY,KAAK,GAAG;AAE7C,8BAAO,MAAM,EAAE,cAAc;AAAA,MAC3B;AAAA,QACE,SACE;AAAA,QAGF,WAAW;AAAA,QACX,YAAY;AAAA,QACZ,WAAW;AAAA,QACX,YAAY;AAAA,MACd;AAAA,MACA;AAAA,QACE,SACE;AAAA,QAEF,WAAW;AAAA,QACX,YAAY;AAAA,QACZ,WAAW;AAAA,QACX,YAAY;AAAA,MACd;AAAA,MACA;AAAA,QACE,SACE;AAAA,QACF,WAAW;AAAA,QACX,YAAY;AAAA,QACZ,WAAW;AAAA,QACX,YAAY;AAAA,MACd;AAAA,MACA;AAAA,QACE,SACE;AAAA,QAEF,WAAW;AAAA,QACX,YAAY;AAAA,QACZ,WAAW;AAAA,QACX,YAAY;AAAA,MACd;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AAED,wBAAG,kCAAkC,MAAM;AACzC,UAAM,aAAS,kCAAU,YAAY,KAAK,GAAG;AAE7C,UAAM,aAAa,OAAO,CAAC;AAC3B,UAAM,aAAa,OAAO,CAAC;AAE3B,UAAM,0BAAsB,0BAAS,YAAY,UAAU;AAC3D,UAAM,0BAAsB,0BAAS,YAAY,UAAU;AAE3D,8BAAO,mBAAmB,EAAE,KAAK,WAAW,OAAO;AACnD,8BAAO,mBAAmB,EAAE,KAAK,WAAW,OAAO;AAAA,EACrD,CAAC;AAED,wBAAG,6CAA6C,MAAM;AACpD,UAAM,kBAAc;AAAA,UAClB,kBAAK,WAAW,0BAA0B;AAAA,MAC1C;AAAA,IACF;AAEA,UAAM,aAAS,kCAAU,aAAa,KAAK,GAAG;AAE9C,UAAM,aAAa,OAAO,CAAC;AAC3B,UAAM,aAAa,OAAO,EAAE;AAE5B,UAAM,0BAAsB,0BAAS,aAAa,UAAU;AAC5D,UAAM,0BAAsB,0BAAS,aAAa,UAAU;AAE5D,8BAAO,mBAAmB,EAAE,KAAK,WAAW,OAAO;AACnD,8BAAO,mBAAmB,EAAE,KAAK,WAAW,OAAO;AAAA,EACrD,CAAC;AACH,CAAC;","names":[]}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
Lorem ipsum dolor sit amet consectetur adipiscing elit. Blandit quis suspendisse aliquet nisi sodales consequat magna. Sem placerat in id cursus mi pretium tellus. Finibus facilisis dapibus etiam interdum tortor ligula congue. Sed diam urna tempor pulvinar vivamus fringilla lacus. Porta elementum a enim euismod quam justo lectus. Nisl malesuada lacinia integer nunc posuere ut hendrerit. Imperdiet mollis nullam volutpat porttitor ullamcorper rutrum gravida. Ad litora torquent per conubia nostra inceptos himenaeos. Ornare sagittis vehicula praesent dui felis venenatis ultrices. Dis parturient montes nascetur ridiculus mus donec rhoncus. Potenti ultricies habitant morbi senectus netus suscipit auctor. Maximus eget fermentum odio phasellus non purus est. Platea dictumst lorem ipsum dolor sit amet consectetur. Dictum risus blandit quis suspendisse aliquet nisi sodales. Vitae pellentesque sem placerat in id cursus mi. Luctus nibh finibus facilisis dapibus etiam interdum tortor. Eu aenean sed diam urna tempor pulvinar vivamus. Tincidunt nam porta elementum a enim euismod quam. Iaculis massa nisl malesuada lacinia integer nunc posuere. Velit aliquam imperdiet mollis nullam volutpat porttitor ullamcorper. Taciti sociosqu ad litora torquent per conubia nostra.
|
|
2
|
+
|
|
3
|
+
Primis vulputate ornare sagittis vehicula praesent dui felis. Et magnis dis parturient montes nascetur ridiculus mus. Accumsan maecenas potenti ultricies habitant morbi senectus netus. Mattis scelerisque maximus eget fermentum odio phasellus non. Hac habitasse platea dictumst lorem ipsum dolor sit. Vestibulum fusce dictum risus blandit quis suspendisse aliquet. Ex sapien vitae pellentesque sem placerat in id. Neque at luctus nibh finibus facilisis dapibus etiam. Tempus leo eu aenean sed diam urna tempor. Viverra ac tincidunt nam porta elementum a enim. Bibendum egestas iaculis massa nisl malesuada lacinia integer. Arcu dignissim velit aliquam imperdiet mollis nullam volutpat. Class aptent taciti sociosqu ad litora torquent per. Turpis fames primis vulputate ornare sagittis vehicula praesent. Natoque penatibus et magnis dis parturient montes nascetur. Feugiat tristique accumsan maecenas potenti ultricies habitant morbi. Nulla molestie mattis scelerisque maximus eget fermentum odio. Cubilia curae hac habitasse platea dictumst lorem ipsum. Mauris pharetra vestibulum fusce dictum risus blandit quis. Quisque faucibus ex sapien vitae pellentesque sem placerat. Ante condimentum neque at luctus nibh finibus facilisis. Duis convallis tempus leo eu aenean sed diam. Sollicitudin erat viverra ac tincidunt nam porta elementum. Nec metus bibendum egestas iaculis massa nisl malesuada.
|
|
4
|
+
|
|
5
|
+
Commodo augue arcu dignissim velit aliquam imperdiet mollis. Semper vel class aptent taciti sociosqu ad litora. Cras eleifend turpis fames primis vulputate ornare sagittis. Orci varius natoque penatibus et magnis dis parturient. Proin libero feugiat tristique accumsan maecenas potenti ultricies. Eros lobortis nulla molestie mattis scelerisque maximus eget. Curabitur facilisi cubilia curae hac habitasse platea dictumst. Efficitur laoreet mauris pharetra vestibulum fusce dictum risus. Adipiscing elit quisque faucibus ex sapien vitae pellentesque. Consequat magna ante condimentum neque at luctus nibh. Pretium tellus duis convallis tempus leo eu aenean. Ligula congue sollicitudin erat viverra ac tincidunt nam. Fringilla lacus nec metus bibendum egestas iaculis massa. Justo lectus commodo augue arcu dignissim velit aliquam. Ut hendrerit semper vel class aptent taciti sociosqu. Rutrum gravida cras eleifend turpis fames primis vulputate. Inceptos himenaeos orci varius natoque penatibus et magnis. Venenatis ultrices proin libero feugiat tristique accumsan maecenas. Donec rhoncus eros lobortis nulla molestie mattis scelerisque. Suscipit auctor curabitur facilisi cubilia curae hac habitasse. Purus est efficitur laoreet mauris pharetra vestibulum fusce. Amet consectetur adipiscing elit quisque faucibus ex sapien. Nisi sodales consequat magna ante condimentum neque at. Cursus mi pretium tellus duis convallis tempus leo. Interdum tortor ligula congue sollicitudin erat viverra ac.
|
|
6
|
+
|
|
7
|
+
Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Euismod quam justo lectus commodo augue arcu dignissim. Nunc posuere ut hendrerit semper vel class aptent. Porttitor ullamcorper rutrum gravida cras eleifend turpis fames. Conubia nostra inceptos himenaeos orci varius natoque penatibus. Dui felis venenatis ultrices proin libero feugiat tristique. Ridiculus mus donec rhoncus eros lobortis nulla molestie. Senectus netus suscipit auctor curabitur facilisi cubilia curae. Phasellus non purus est efficitur laoreet mauris pharetra. Dolor sit amet consectetur adipiscing elit quisque faucibus. Suspendisse aliquet nisi sodales consequat magna ante condimentum. In id cursus mi pretium tellus duis convallis.
|
|
8
|
+
|
|
9
|
+
Dapibus etiam interdum tortor ligula congue sollicitudin erat. Urna tempor pulvinar vivamus fringilla lacus nec metus. Aenim euismod quam justo lectus commodo augue. Lacinia integer nunc posuere ut hendrerit semper vel. Nullam volutpat porttitor ullamcorper rutrum gravida cras eleifend. Torquent per conubia nostra inceptos himenaeos orci varius. Vehicula praesent dui felis venenatis ultrices proin libero. Montes nascetur ridiculus mus donec rhoncus eros lobortis. Habitant morbi senectus netus suscipit auctor curabitur facilisi. Fermentum odio phasellus non purus est efficitur laoreet. Lorem ipsum dolor sit amet consectetur adipiscing elit. Blandit quis suspendisse aliquet nisi sodales consequat magna. Sem placerat in id cursus mi pretium tellus. Finibus facilisis dapibus etiam interdum tortor ligula congue. Sed diam urna tempor pulvinar vivamus fringilla lacus. Porta elementum a enim euismod quam justo lectus. Nisl malesuada lacinia integer nunc posuere ut hendrerit.
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
var checkAIAccess_exports = {};
|
|
20
|
+
__export(checkAIAccess_exports, {
|
|
21
|
+
checkAIAccess: () => checkAIAccess
|
|
22
|
+
});
|
|
23
|
+
module.exports = __toCommonJS(checkAIAccess_exports);
|
|
24
|
+
var import_config = require("@intlayer/config");
|
|
25
|
+
const checkAIAccess = (configuration, aiOptions) => {
|
|
26
|
+
const appLogger = (0, import_config.getAppLogger)(configuration);
|
|
27
|
+
if (!configuration.editor.clientId && !configuration.editor.clientSecret && !configuration.ai?.apiKey && !aiOptions?.apiKey) {
|
|
28
|
+
appLogger("AI options or API key not provided. Skipping AI translation.", {
|
|
29
|
+
level: "error"
|
|
30
|
+
});
|
|
31
|
+
throw new Error(
|
|
32
|
+
"AI options or API key not provided. Skipping AI translation."
|
|
33
|
+
);
|
|
34
|
+
}
|
|
35
|
+
};
|
|
36
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
37
|
+
0 && (module.exports = {
|
|
38
|
+
checkAIAccess
|
|
39
|
+
});
|
|
40
|
+
//# sourceMappingURL=checkAIAccess.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/utils/checkAIAccess.ts"],"sourcesContent":["import type { AIOptions } from '@intlayer/api';\nimport { getAppLogger, type IntlayerConfig } from '@intlayer/config';\n\nexport const checkAIAccess = (\n configuration: IntlayerConfig,\n aiOptions?: AIOptions\n) => {\n const appLogger = getAppLogger(configuration);\n\n if (\n !configuration.editor.clientId &&\n !configuration.editor.clientSecret &&\n !configuration.ai?.apiKey &&\n !aiOptions?.apiKey\n ) {\n appLogger('AI options or API key not provided. Skipping AI translation.', {\n level: 'error',\n });\n // Potentially handle this case differently, e.g., by using a different translation method or stopping.\n\n throw new Error(\n 'AI options or API key not provided. Skipping AI translation.'\n );\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAAkD;AAE3C,MAAM,gBAAgB,CAC3B,eACA,cACG;AACH,QAAM,gBAAY,4BAAa,aAAa;AAE5C,MACE,CAAC,cAAc,OAAO,YACtB,CAAC,cAAc,OAAO,gBACtB,CAAC,cAAc,IAAI,UACnB,CAAC,WAAW,QACZ;AACA,cAAU,gEAAgE;AAAA,MACxE,OAAO;AAAA,IACT,CAAC;AAGD,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACF;","names":[]}
|