@intlayer/backend 6.0.0 → 6.0.2-canary.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -68,7 +68,7 @@ const translateJSON = async ({
68
68
  const prompt = CHAT_GPT_PROMPT.replace(
69
69
  "{{entryLocale}}",
70
70
  formatLocaleWithName(entryLocale)
71
- ).replace("{{outputLocale}}", formatLocaleWithName(outputLocale)).replace("{{entryFileContent}}", JSON.stringify(entryFileContent)).replace("{{presetOutputContent}}", JSON.stringify(presetOutputContent)).replace("{{dictionaryDescription}}", dictionaryDescription).replace("{{applicationContext}}", applicationContext ?? "").replace("{{tagsInstructions}}", formatTagInstructions(tags)).replace("{{modeInstructions}}", getModeInstructions(mode));
71
+ ).replace("{{outputLocale}}", formatLocaleWithName(outputLocale)).replace("{{entryFileContent}}", JSON.stringify(entryFileContent)).replace("{{presetOutputContent}}", JSON.stringify(presetOutputContent)).replace("{{dictionaryDescription}}", dictionaryDescription ?? "").replace("{{applicationContext}}", applicationContext ?? "").replace("{{tagsInstructions}}", formatTagInstructions(tags)).replace("{{modeInstructions}}", getModeInstructions(mode));
72
72
  const { text: newContent, usage } = await (0, import_ai.generateText)({
73
73
  ...aiConfig,
74
74
  messages: [{ role: "system", content: prompt }]
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../../src/utils/AI/translateJSON/index.ts"],"sourcesContent":["import type { Tag } from '@/types/tag.types';\nimport { getLocaleName } from '@intlayer/core';\nimport { logger } from '@logger';\nimport { extractJson } from '@utils/extractJSON';\nimport { generateText } from 'ai';\nimport { readFileSync } from 'fs';\nimport { Locales } from 'intlayer';\nimport { dirname, join } from 'path';\nimport { fileURLToPath } from 'url';\nimport { AIConfig, AIOptions, AIProvider } from '../aiSdk';\n\nconst __dirname = dirname(fileURLToPath(import.meta.url));\n\n// Get the content of a file at the specified path\nconst getFileContent = (filePath: string) =>\n readFileSync(join(__dirname, filePath), { encoding: 'utf-8' });\n\nexport type TranslateJSONOptions = {\n entryFileContent: JSON;\n presetOutputContent: JSON;\n dictionaryDescription: string;\n entryLocale: Locales;\n outputLocale: Locales;\n tags: Tag[];\n aiConfig: AIConfig;\n mode: 'complete' | 'review';\n applicationContext?: string;\n};\n\nexport type TranslateJSONResultData = {\n fileContent: string;\n tokenUsed: number;\n};\n\n// The prompt template to send to the AI model\nconst CHAT_GPT_PROMPT = getFileContent('./PROMPT.md');\n\nexport const aiDefaultOptions: AIOptions = {\n provider: AIProvider.OPENAI,\n model: 'gpt-5-nano',\n};\n\n/**\n * Format a locale with its name.\n *\n * @param locale - The locale to format.\n * @returns A string in the format \"locale: name\", e.g. \"en: English\".\n */\nconst formatLocaleWithName = (locale: Locales): string =>\n `${locale}: ${getLocaleName(locale, Locales.ENGLISH)}`;\n\n/**\n * Formats tag instructions for the AI prompt.\n * Creates a string with all available tags and their descriptions.\n *\n * @param tags - The list of tags to format.\n * @returns A formatted string with tag instructions.\n */\nconst formatTagInstructions = (tags: Tag[]): string => {\n if (!tags || tags.length === 0) {\n return '';\n }\n\n // Prepare the tag instructions.\n return `Based on the dictionary content, identify specific tags from the list below that would be relevant:\n \n${tags.map(({ key, description }) => `- ${key}: ${description}`).join('\\n\\n')}`;\n};\n\nconst getModeInstructions = (mode: 'complete' | 'review'): string => {\n if (mode === 'complete') {\n return 'Mode: \"Complete\" - Enrich the preset content with the missing keys and values in the output locale. Do not update existing keys. Everything should be returned in the output.';\n }\n\n return 'Mode: \"Review\" - Fill missing content and review existing keys from the preset content. If a key from the entry is missing in the output, it must be translated to the target language and added. If you detect misspelled content, or content that should be reformulated, correct it. If a translation is not coherent with the desired language, translate it.';\n};\n\n/**\n * TranslateJSONs a content declaration file by constructing a prompt for AI models.\n * The prompt includes details about the project's locales, file paths of content declarations,\n * and requests for identifying issues or inconsistencies.\n */\nexport const translateJSON = async ({\n entryFileContent,\n presetOutputContent,\n dictionaryDescription,\n aiConfig,\n entryLocale,\n outputLocale,\n tags,\n mode,\n applicationContext,\n}: TranslateJSONOptions): Promise<TranslateJSONResultData | undefined> => {\n // Prepare the prompt for AI by replacing placeholders with actual values.\n const prompt = CHAT_GPT_PROMPT.replace(\n '{{entryLocale}}',\n formatLocaleWithName(entryLocale)\n )\n .replace('{{outputLocale}}', formatLocaleWithName(outputLocale))\n .replace('{{entryFileContent}}', JSON.stringify(entryFileContent))\n .replace('{{presetOutputContent}}', JSON.stringify(presetOutputContent))\n .replace('{{dictionaryDescription}}', dictionaryDescription)\n .replace('{{applicationContext}}', applicationContext ?? '')\n .replace('{{tagsInstructions}}', formatTagInstructions(tags))\n .replace('{{modeInstructions}}', getModeInstructions(mode));\n\n // Use the AI SDK to generate the completion\n const { text: newContent, usage } = await generateText({\n ...aiConfig,\n messages: [{ role: 'system', content: prompt }],\n });\n\n logger.info(`${usage?.totalTokens ?? 0} tokens used in the request`);\n\n return {\n fileContent: extractJson(newContent),\n tokenUsed: usage?.totalTokens ?? 0,\n };\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,kBAA8B;AAC9B,oBAAuB;AACvB,yBAA4B;AAC5B,gBAA6B;AAC7B,gBAA6B;AAC7B,sBAAwB;AACxB,kBAA8B;AAC9B,iBAA8B;AAC9B,mBAAgD;AAThD;AAWA,MAAM,gBAAY,yBAAQ,0BAAc,YAAY,GAAG,CAAC;AAGxD,MAAM,iBAAiB,CAAC,iBACtB,4BAAa,kBAAK,WAAW,QAAQ,GAAG,EAAE,UAAU,QAAQ,CAAC;AAoB/D,MAAM,kBAAkB,eAAe,aAAa;AAE7C,MAAM,mBAA8B;AAAA,EACzC,UAAU,wBAAW;AAAA,EACrB,OAAO;AACT;AAQA,MAAM,uBAAuB,CAAC,WAC5B,GAAG,MAAM,SAAK,2BAAc,QAAQ,wBAAQ,OAAO,CAAC;AAStD,MAAM,wBAAwB,CAAC,SAAwB;AACrD,MAAI,CAAC,QAAQ,KAAK,WAAW,GAAG;AAC9B,WAAO;AAAA,EACT;AAGA,SAAO;AAAA;AAAA,EAEP,KAAK,IAAI,CAAC,EAAE,KAAK,YAAY,MAAM,KAAK,GAAG,KAAK,WAAW,EAAE,EAAE,KAAK,MAAM,CAAC;AAC7E;AAEA,MAAM,sBAAsB,CAAC,SAAwC;AACnE,MAAI,SAAS,YAAY;AACvB,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAOO,MAAM,gBAAgB,OAAO;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAA0E;AAExE,QAAM,SAAS,gBAAgB;AAAA,IAC7B;AAAA,IACA,qBAAqB,WAAW;AAAA,EAClC,EACG,QAAQ,oBAAoB,qBAAqB,YAAY,CAAC,EAC9D,QAAQ,wBAAwB,KAAK,UAAU,gBAAgB,CAAC,EAChE,QAAQ,2BAA2B,KAAK,UAAU,mBAAmB,CAAC,EACtE,QAAQ,6BAA6B,qBAAqB,EAC1D,QAAQ,0BAA0B,sBAAsB,EAAE,EAC1D,QAAQ,wBAAwB,sBAAsB,IAAI,CAAC,EAC3D,QAAQ,wBAAwB,oBAAoB,IAAI,CAAC;AAG5D,QAAM,EAAE,MAAM,YAAY,MAAM,IAAI,UAAM,wBAAa;AAAA,IACrD,GAAG;AAAA,IACH,UAAU,CAAC,EAAE,MAAM,UAAU,SAAS,OAAO,CAAC;AAAA,EAChD,CAAC;AAED,uBAAO,KAAK,GAAG,OAAO,eAAe,CAAC,6BAA6B;AAEnE,SAAO;AAAA,IACL,iBAAa,gCAAY,UAAU;AAAA,IACnC,WAAW,OAAO,eAAe;AAAA,EACnC;AACF;","names":[]}
1
+ {"version":3,"sources":["../../../../../src/utils/AI/translateJSON/index.ts"],"sourcesContent":["import type { Tag } from '@/types/tag.types';\nimport { getLocaleName } from '@intlayer/core';\nimport { logger } from '@logger';\nimport { extractJson } from '@utils/extractJSON';\nimport { generateText } from 'ai';\nimport { readFileSync } from 'fs';\nimport { Locales } from 'intlayer';\nimport { dirname, join } from 'path';\nimport { fileURLToPath } from 'url';\nimport { AIConfig, AIOptions, AIProvider } from '../aiSdk';\n\nconst __dirname = dirname(fileURLToPath(import.meta.url));\n\n// Get the content of a file at the specified path\nconst getFileContent = (filePath: string) =>\n readFileSync(join(__dirname, filePath), { encoding: 'utf-8' });\n\nexport type TranslateJSONOptions = {\n entryFileContent: JSON;\n presetOutputContent: JSON;\n dictionaryDescription?: string;\n entryLocale: Locales;\n outputLocale: Locales;\n tags: Tag[];\n aiConfig: AIConfig;\n mode: 'complete' | 'review';\n applicationContext?: string;\n};\n\nexport type TranslateJSONResultData = {\n fileContent: string;\n tokenUsed: number;\n};\n\n// The prompt template to send to the AI model\nconst CHAT_GPT_PROMPT = getFileContent('./PROMPT.md');\n\nexport const aiDefaultOptions: AIOptions = {\n provider: AIProvider.OPENAI,\n model: 'gpt-5-nano',\n};\n\n/**\n * Format a locale with its name.\n *\n * @param locale - The locale to format.\n * @returns A string in the format \"locale: name\", e.g. \"en: English\".\n */\nconst formatLocaleWithName = (locale: Locales): string =>\n `${locale}: ${getLocaleName(locale, Locales.ENGLISH)}`;\n\n/**\n * Formats tag instructions for the AI prompt.\n * Creates a string with all available tags and their descriptions.\n *\n * @param tags - The list of tags to format.\n * @returns A formatted string with tag instructions.\n */\nconst formatTagInstructions = (tags: Tag[]): string => {\n if (!tags || tags.length === 0) {\n return '';\n }\n\n // Prepare the tag instructions.\n return `Based on the dictionary content, identify specific tags from the list below that would be relevant:\n \n${tags.map(({ key, description }) => `- ${key}: ${description}`).join('\\n\\n')}`;\n};\n\nconst getModeInstructions = (mode: 'complete' | 'review'): string => {\n if (mode === 'complete') {\n return 'Mode: \"Complete\" - Enrich the preset content with the missing keys and values in the output locale. Do not update existing keys. Everything should be returned in the output.';\n }\n\n return 'Mode: \"Review\" - Fill missing content and review existing keys from the preset content. If a key from the entry is missing in the output, it must be translated to the target language and added. If you detect misspelled content, or content that should be reformulated, correct it. If a translation is not coherent with the desired language, translate it.';\n};\n\n/**\n * TranslateJSONs a content declaration file by constructing a prompt for AI models.\n * The prompt includes details about the project's locales, file paths of content declarations,\n * and requests for identifying issues or inconsistencies.\n */\nexport const translateJSON = async ({\n entryFileContent,\n presetOutputContent,\n dictionaryDescription,\n aiConfig,\n entryLocale,\n outputLocale,\n tags,\n mode,\n applicationContext,\n}: TranslateJSONOptions): Promise<TranslateJSONResultData | undefined> => {\n // Prepare the prompt for AI by replacing placeholders with actual values.\n const prompt = CHAT_GPT_PROMPT.replace(\n '{{entryLocale}}',\n formatLocaleWithName(entryLocale)\n )\n .replace('{{outputLocale}}', formatLocaleWithName(outputLocale))\n .replace('{{entryFileContent}}', JSON.stringify(entryFileContent))\n .replace('{{presetOutputContent}}', JSON.stringify(presetOutputContent))\n .replace('{{dictionaryDescription}}', dictionaryDescription ?? '')\n .replace('{{applicationContext}}', applicationContext ?? '')\n .replace('{{tagsInstructions}}', formatTagInstructions(tags))\n .replace('{{modeInstructions}}', getModeInstructions(mode));\n\n // Use the AI SDK to generate the completion\n const { text: newContent, usage } = await generateText({\n ...aiConfig,\n messages: [{ role: 'system', content: prompt }],\n });\n\n logger.info(`${usage?.totalTokens ?? 0} tokens used in the request`);\n\n return {\n fileContent: extractJson(newContent),\n tokenUsed: usage?.totalTokens ?? 0,\n };\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,kBAA8B;AAC9B,oBAAuB;AACvB,yBAA4B;AAC5B,gBAA6B;AAC7B,gBAA6B;AAC7B,sBAAwB;AACxB,kBAA8B;AAC9B,iBAA8B;AAC9B,mBAAgD;AAThD;AAWA,MAAM,gBAAY,yBAAQ,0BAAc,YAAY,GAAG,CAAC;AAGxD,MAAM,iBAAiB,CAAC,iBACtB,4BAAa,kBAAK,WAAW,QAAQ,GAAG,EAAE,UAAU,QAAQ,CAAC;AAoB/D,MAAM,kBAAkB,eAAe,aAAa;AAE7C,MAAM,mBAA8B;AAAA,EACzC,UAAU,wBAAW;AAAA,EACrB,OAAO;AACT;AAQA,MAAM,uBAAuB,CAAC,WAC5B,GAAG,MAAM,SAAK,2BAAc,QAAQ,wBAAQ,OAAO,CAAC;AAStD,MAAM,wBAAwB,CAAC,SAAwB;AACrD,MAAI,CAAC,QAAQ,KAAK,WAAW,GAAG;AAC9B,WAAO;AAAA,EACT;AAGA,SAAO;AAAA;AAAA,EAEP,KAAK,IAAI,CAAC,EAAE,KAAK,YAAY,MAAM,KAAK,GAAG,KAAK,WAAW,EAAE,EAAE,KAAK,MAAM,CAAC;AAC7E;AAEA,MAAM,sBAAsB,CAAC,SAAwC;AACnE,MAAI,SAAS,YAAY;AACvB,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAOO,MAAM,gBAAgB,OAAO;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAA0E;AAExE,QAAM,SAAS,gBAAgB;AAAA,IAC7B;AAAA,IACA,qBAAqB,WAAW;AAAA,EAClC,EACG,QAAQ,oBAAoB,qBAAqB,YAAY,CAAC,EAC9D,QAAQ,wBAAwB,KAAK,UAAU,gBAAgB,CAAC,EAChE,QAAQ,2BAA2B,KAAK,UAAU,mBAAmB,CAAC,EACtE,QAAQ,6BAA6B,yBAAyB,EAAE,EAChE,QAAQ,0BAA0B,sBAAsB,EAAE,EAC1D,QAAQ,wBAAwB,sBAAsB,IAAI,CAAC,EAC3D,QAAQ,wBAAwB,oBAAoB,IAAI,CAAC;AAG5D,QAAM,EAAE,MAAM,YAAY,MAAM,IAAI,UAAM,wBAAa;AAAA,IACrD,GAAG;AAAA,IACH,UAAU,CAAC,EAAE,MAAM,UAAU,SAAS,OAAO,CAAC;AAAA,EAChD,CAAC;AAED,uBAAO,KAAK,GAAG,OAAO,eAAe,CAAC,6BAA6B;AAEnE,SAAO;AAAA,IACL,iBAAa,gCAAY,UAAU;AAAA,IACnC,WAAW,OAAO,eAAe;AAAA,EACnC;AACF;","names":[]}