@intlayer/ai 7.3.0-canary.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +275 -0
- package/dist/assets/auditDictionaryMetadata/PROMPT.md +73 -0
- package/dist/assets/translateJSON/PROMPT.md +45 -0
- package/dist/cjs/_virtual/_utils_asset.cjs +97 -0
- package/dist/cjs/aiSdk.cjs +93 -0
- package/dist/cjs/aiSdk.cjs.map +1 -0
- package/dist/cjs/auditDictionaryMetadata/index.cjs +37 -0
- package/dist/cjs/auditDictionaryMetadata/index.cjs.map +1 -0
- package/dist/cjs/customQuery.cjs +24 -0
- package/dist/cjs/customQuery.cjs.map +1 -0
- package/dist/cjs/index.cjs +25 -0
- package/dist/cjs/translateJSON/index.cjs +67 -0
- package/dist/cjs/translateJSON/index.cjs.map +1 -0
- package/dist/cjs/utils/extractJSON.cjs +61 -0
- package/dist/cjs/utils/extractJSON.cjs.map +1 -0
- package/dist/esm/_virtual/_utils_asset.mjs +97 -0
- package/dist/esm/aiSdk.mjs +92 -0
- package/dist/esm/aiSdk.mjs.map +1 -0
- package/dist/esm/auditDictionaryMetadata/index.mjs +36 -0
- package/dist/esm/auditDictionaryMetadata/index.mjs.map +1 -0
- package/dist/esm/customQuery.mjs +23 -0
- package/dist/esm/customQuery.mjs.map +1 -0
- package/dist/esm/index.mjs +8 -0
- package/dist/esm/translateJSON/index.mjs +66 -0
- package/dist/esm/translateJSON/index.mjs.map +1 -0
- package/dist/esm/utils/extractJSON.mjs +60 -0
- package/dist/esm/utils/extractJSON.mjs.map +1 -0
- package/dist/types/aiSdk.d.ts +61 -0
- package/dist/types/aiSdk.d.ts.map +1 -0
- package/dist/types/auditDictionaryMetadata/index.d.ts +36 -0
- package/dist/types/auditDictionaryMetadata/index.d.ts.map +1 -0
- package/dist/types/customQuery.d.ts +24 -0
- package/dist/types/customQuery.d.ts.map +1 -0
- package/dist/types/index.d.ts +7 -0
- package/dist/types/translateJSON/index.d.ts +43 -0
- package/dist/types/translateJSON/index.d.ts.map +1 -0
- package/dist/types/utils/extractJSON.d.ts +36 -0
- package/dist/types/utils/extractJSON.d.ts.map +1 -0
- package/package.json +102 -0
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
const require__utils_asset = require('../_virtual/_utils_asset.cjs');
|
|
2
|
+
const require_utils_extractJSON = require('../utils/extractJSON.cjs');
|
|
3
|
+
let ai = require("ai");
|
|
4
|
+
|
|
5
|
+
//#region src/auditDictionaryMetadata/index.ts
|
|
6
|
+
const aiDefaultOptions = {};
|
|
7
|
+
/**
|
|
8
|
+
* Audits a content declaration file by constructing a prompt for AI models.
|
|
9
|
+
* The prompt includes details about the project's locales, file paths of content declarations,
|
|
10
|
+
* and requests for identifying issues or inconsistencies.
|
|
11
|
+
*/
|
|
12
|
+
const auditDictionaryMetadata = async ({ fileContent, tags, aiConfig, applicationContext }) => {
|
|
13
|
+
const prompt = require__utils_asset.readAsset("./PROMPT.md").replace("{{applicationContext}}", applicationContext ?? "").replace("{{tags}}", tags ? JSON.stringify(tags.map(({ key, description }) => `- ${key}: ${description}`).join("\n\n"), null, 2) : "");
|
|
14
|
+
const { text: newContent, usage } = await (0, ai.generateText)({
|
|
15
|
+
...aiConfig,
|
|
16
|
+
messages: [{
|
|
17
|
+
role: "system",
|
|
18
|
+
content: prompt
|
|
19
|
+
}, {
|
|
20
|
+
role: "user",
|
|
21
|
+
content: [
|
|
22
|
+
"**Content declaration to describe:**",
|
|
23
|
+
"This is the content declaration that you should consider to describe:",
|
|
24
|
+
fileContent
|
|
25
|
+
].join("\n")
|
|
26
|
+
}]
|
|
27
|
+
});
|
|
28
|
+
return {
|
|
29
|
+
fileContent: require_utils_extractJSON.extractJson(newContent),
|
|
30
|
+
tokenUsed: usage?.totalTokens ?? 0
|
|
31
|
+
};
|
|
32
|
+
};
|
|
33
|
+
|
|
34
|
+
//#endregion
|
|
35
|
+
exports.aiDefaultOptions = aiDefaultOptions;
|
|
36
|
+
exports.auditDictionaryMetadata = auditDictionaryMetadata;
|
|
37
|
+
//# sourceMappingURL=index.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.cjs","names":["aiDefaultOptions: AIOptions","readAsset","extractJson"],"sources":["../../../src/auditDictionaryMetadata/index.ts"],"sourcesContent":["import { readAsset } from 'utils:asset';\nimport { generateText } from 'ai';\nimport type { AIConfig, AIOptions } from '../aiSdk';\nimport { extractJson } from '../utils/extractJSON';\n\ntype Tag = {\n key: string;\n description?: string;\n};\n\nexport type AuditDictionaryMetadataOptions = {\n fileContent: string;\n tags?: Tag[];\n aiConfig: AIConfig;\n applicationContext?: string;\n};\n\nexport type AuditFileResultData = {\n fileContent: {\n title: string;\n description: string;\n tags: string[];\n };\n tokenUsed: number;\n};\n\nexport const aiDefaultOptions: AIOptions = {\n // Keep default options\n};\n\n/**\n * Audits a content declaration file by constructing a prompt for AI models.\n * The prompt includes details about the project's locales, file paths of content declarations,\n * and requests for identifying issues or inconsistencies.\n */\nexport const auditDictionaryMetadata = async ({\n fileContent,\n tags,\n aiConfig,\n applicationContext,\n}: AuditDictionaryMetadataOptions): Promise<\n AuditFileResultData | undefined\n> => {\n const CHAT_GPT_PROMPT = readAsset('./PROMPT.md');\n\n // Prepare the prompt for AI by replacing placeholders with actual values.\n const prompt = CHAT_GPT_PROMPT.replace(\n '{{applicationContext}}',\n applicationContext ?? ''\n ).replace(\n '{{tags}}',\n tags\n ? JSON.stringify(\n tags\n .map(({ key, description }) => `- ${key}: ${description}`)\n .join('\\n\\n'),\n null,\n 2\n )\n : ''\n );\n\n // Use the AI SDK to generate the completion\n const { text: newContent, usage } = await generateText({\n ...aiConfig,\n messages: [\n { role: 'system', content: prompt },\n {\n role: 'user',\n content: [\n '**Content declaration to describe:**',\n 'This is the content declaration that you should consider to describe:',\n fileContent,\n ].join('\\n'),\n },\n ],\n });\n\n return {\n fileContent: extractJson(newContent),\n tokenUsed: usage?.totalTokens ?? 0,\n };\n};\n"],"mappings":";;;;;AA0BA,MAAaA,mBAA8B,EAE1C;;;;;;AAOD,MAAa,0BAA0B,OAAO,EAC5C,aACA,MACA,UACA,yBAGG;CAIH,MAAM,SAHkBC,+BAAU,cAAc,CAGjB,QAC7B,0BACA,sBAAsB,GACvB,CAAC,QACA,YACA,OACI,KAAK,UACH,KACG,KAAK,EAAE,KAAK,kBAAkB,KAAK,IAAI,IAAI,cAAc,CACzD,KAAK,OAAO,EACf,MACA,EACD,GACD,GACL;CAGD,MAAM,EAAE,MAAM,YAAY,UAAU,2BAAmB;EACrD,GAAG;EACH,UAAU,CACR;GAAE,MAAM;GAAU,SAAS;GAAQ,EACnC;GACE,MAAM;GACN,SAAS;IACP;IACA;IACA;IACD,CAAC,KAAK,KAAK;GACb,CACF;EACF,CAAC;AAEF,QAAO;EACL,aAAaC,sCAAY,WAAW;EACpC,WAAW,OAAO,eAAe;EAClC"}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
let ai = require("ai");
|
|
2
|
+
|
|
3
|
+
//#region src/customQuery.ts
|
|
4
|
+
const aiDefaultOptions = { model: "gpt-4o-mini" };
|
|
5
|
+
/**
|
|
6
|
+
* CustomQuery a content declaration file by constructing a prompt for AI models.
|
|
7
|
+
* The prompt includes details about the project's locales, file paths of content declarations,
|
|
8
|
+
* and requests for identifying issues or inconsistencies.
|
|
9
|
+
*/
|
|
10
|
+
const customQuery = async ({ messages, aiConfig }) => {
|
|
11
|
+
const { text: newContent, usage } = await (0, ai.generateText)({
|
|
12
|
+
...aiConfig,
|
|
13
|
+
messages
|
|
14
|
+
});
|
|
15
|
+
return {
|
|
16
|
+
fileContent: newContent,
|
|
17
|
+
tokenUsed: usage?.totalTokens ?? 0
|
|
18
|
+
};
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
//#endregion
|
|
22
|
+
exports.aiDefaultOptions = aiDefaultOptions;
|
|
23
|
+
exports.customQuery = customQuery;
|
|
24
|
+
//# sourceMappingURL=customQuery.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"customQuery.cjs","names":["aiDefaultOptions: AIOptions"],"sources":["../../src/customQuery.ts"],"sourcesContent":["import { generateText } from 'ai';\nimport type { AIConfig, AIOptions, Messages } from './aiSdk';\n\nexport type CustomQueryOptions = {\n messages: Messages;\n aiConfig: AIConfig;\n};\n\nexport type CustomQueryResultData = {\n fileContent: string;\n tokenUsed: number;\n};\n\nexport const aiDefaultOptions: AIOptions = {\n model: 'gpt-4o-mini',\n // Keep default options\n};\n\n/**\n * CustomQuery a content declaration file by constructing a prompt for AI models.\n * The prompt includes details about the project's locales, file paths of content declarations,\n * and requests for identifying issues or inconsistencies.\n */\nexport const customQuery = async ({\n messages,\n aiConfig,\n}: CustomQueryOptions): Promise<CustomQueryResultData | undefined> => {\n // Use the AI SDK to generate the completion\n const { text: newContent, usage } = await generateText({\n ...aiConfig,\n messages,\n });\n\n return {\n fileContent: newContent,\n tokenUsed: usage?.totalTokens ?? 0,\n };\n};\n"],"mappings":";;;AAaA,MAAaA,mBAA8B,EACzC,OAAO,eAER;;;;;;AAOD,MAAa,cAAc,OAAO,EAChC,UACA,eACoE;CAEpE,MAAM,EAAE,MAAM,YAAY,UAAU,2BAAmB;EACrD,GAAG;EACH;EACD,CAAC;AAEF,QAAO;EACL,aAAa;EACb,WAAW,OAAO,eAAe;EAClC"}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
const require_aiSdk = require('./aiSdk.cjs');
|
|
2
|
+
const require_customQuery = require('./customQuery.cjs');
|
|
3
|
+
const require_utils_extractJSON = require('./utils/extractJSON.cjs');
|
|
4
|
+
const require_auditDictionaryMetadata_index = require('./auditDictionaryMetadata/index.cjs');
|
|
5
|
+
const require_translateJSON_index = require('./translateJSON/index.cjs');
|
|
6
|
+
let ai = require("ai");
|
|
7
|
+
|
|
8
|
+
exports.AIProvider = require_aiSdk.AIProvider;
|
|
9
|
+
exports.auditDictionaryMetadata = require_auditDictionaryMetadata_index.auditDictionaryMetadata;
|
|
10
|
+
exports.customQuery = require_customQuery.customQuery;
|
|
11
|
+
exports.extractJson = require_utils_extractJSON.extractJson;
|
|
12
|
+
Object.defineProperty(exports, 'generateText', {
|
|
13
|
+
enumerable: true,
|
|
14
|
+
get: function () {
|
|
15
|
+
return ai.generateText;
|
|
16
|
+
}
|
|
17
|
+
});
|
|
18
|
+
exports.getAIConfig = require_aiSdk.getAIConfig;
|
|
19
|
+
Object.defineProperty(exports, 'streamText', {
|
|
20
|
+
enumerable: true,
|
|
21
|
+
get: function () {
|
|
22
|
+
return ai.streamText;
|
|
23
|
+
}
|
|
24
|
+
});
|
|
25
|
+
exports.translateJSON = require_translateJSON_index.translateJSON;
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
const require_aiSdk = require('../aiSdk.cjs');
|
|
2
|
+
const require__utils_asset = require('../_virtual/_utils_asset.cjs');
|
|
3
|
+
const require_utils_extractJSON = require('../utils/extractJSON.cjs');
|
|
4
|
+
let ai = require("ai");
|
|
5
|
+
let __intlayer_core = require("@intlayer/core");
|
|
6
|
+
let __intlayer_types = require("@intlayer/types");
|
|
7
|
+
|
|
8
|
+
//#region src/translateJSON/index.ts
|
|
9
|
+
const aiDefaultOptions = {
|
|
10
|
+
provider: require_aiSdk.AIProvider.OPENAI,
|
|
11
|
+
model: "gpt-5-mini"
|
|
12
|
+
};
|
|
13
|
+
/**
|
|
14
|
+
* Format a locale with its name.
|
|
15
|
+
*
|
|
16
|
+
* @param locale - The locale to format.
|
|
17
|
+
* @returns A string in the format "locale: name", e.g. "en: English".
|
|
18
|
+
*/
|
|
19
|
+
const formatLocaleWithName = (locale) => `${locale}: ${(0, __intlayer_core.getLocaleName)(locale, __intlayer_types.Locales.ENGLISH)}`;
|
|
20
|
+
/**
|
|
21
|
+
* Formats tag instructions for the AI prompt.
|
|
22
|
+
* Creates a string with all available tags and their descriptions.
|
|
23
|
+
*
|
|
24
|
+
* @param tags - The list of tags to format.
|
|
25
|
+
* @returns A formatted string with tag instructions.
|
|
26
|
+
*/
|
|
27
|
+
const formatTagInstructions = (tags) => {
|
|
28
|
+
if (!tags || tags.length === 0) return "";
|
|
29
|
+
return `Based on the dictionary content, identify specific tags from the list below that would be relevant:
|
|
30
|
+
|
|
31
|
+
${tags.map(({ key, description }) => `- ${key}: ${description}`).join("\n\n")}`;
|
|
32
|
+
};
|
|
33
|
+
const getModeInstructions = (mode) => {
|
|
34
|
+
if (mode === "complete") return "Mode: \"Complete\" - Enrich the preset content with the missing keys and values in the output locale. Do not update existing keys. Everything should be returned in the output.";
|
|
35
|
+
return "Mode: \"Review\" - Fill missing content and review existing keys from the preset content. If a key from the entry is missing in the output, it must be translated to the target language and added. If you detect misspelled content, or content that should be reformulated, correct it. If a translation is not coherent with the desired language, translate it.";
|
|
36
|
+
};
|
|
37
|
+
/**
|
|
38
|
+
* TranslateJSONs a content declaration file by constructing a prompt for AI models.
|
|
39
|
+
* The prompt includes details about the project's locales, file paths of content declarations,
|
|
40
|
+
* and requests for identifying issues or inconsistencies.
|
|
41
|
+
*/
|
|
42
|
+
const translateJSON = async ({ entryFileContent, presetOutputContent, dictionaryDescription, aiConfig, entryLocale, outputLocale, tags, mode, applicationContext }) => {
|
|
43
|
+
const prompt = require__utils_asset.readAsset("./PROMPT.md").replace("{{entryLocale}}", formatLocaleWithName(entryLocale)).replace("{{outputLocale}}", formatLocaleWithName(outputLocale)).replace("{{presetOutputContent}}", JSON.stringify(presetOutputContent)).replace("{{dictionaryDescription}}", dictionaryDescription ?? "").replace("{{applicationContext}}", applicationContext ?? "").replace("{{tagsInstructions}}", formatTagInstructions(tags ?? [])).replace("{{modeInstructions}}", getModeInstructions(mode));
|
|
44
|
+
const { text: newContent, usage } = await (0, ai.generateText)({
|
|
45
|
+
...aiConfig,
|
|
46
|
+
messages: [{
|
|
47
|
+
role: "system",
|
|
48
|
+
content: prompt
|
|
49
|
+
}, {
|
|
50
|
+
role: "user",
|
|
51
|
+
content: [
|
|
52
|
+
"**Entry Content to Translate:**",
|
|
53
|
+
"- Given Language: {{entryLocale}}",
|
|
54
|
+
JSON.stringify(entryFileContent)
|
|
55
|
+
].join("\n")
|
|
56
|
+
}]
|
|
57
|
+
});
|
|
58
|
+
return {
|
|
59
|
+
fileContent: require_utils_extractJSON.extractJson(newContent),
|
|
60
|
+
tokenUsed: usage?.totalTokens ?? 0
|
|
61
|
+
};
|
|
62
|
+
};
|
|
63
|
+
|
|
64
|
+
//#endregion
|
|
65
|
+
exports.aiDefaultOptions = aiDefaultOptions;
|
|
66
|
+
exports.translateJSON = translateJSON;
|
|
67
|
+
//# sourceMappingURL=index.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.cjs","names":["aiDefaultOptions: AIOptions","AIProvider","Locales","readAsset","extractJson"],"sources":["../../../src/translateJSON/index.ts"],"sourcesContent":["import { readAsset } from 'utils:asset';\nimport { getLocaleName } from '@intlayer/core';\nimport { type Locale, Locales } from '@intlayer/types';\nimport { generateText } from 'ai';\nimport { type AIConfig, type AIOptions, AIProvider } from '../aiSdk';\nimport { extractJson } from '../utils/extractJSON';\n\ntype Tag = {\n key: string;\n description?: string;\n};\n\nexport type TranslateJSONOptions = {\n entryFileContent: JSON;\n presetOutputContent: JSON;\n dictionaryDescription?: string;\n entryLocale: Locale;\n outputLocale: Locale;\n tags?: Tag[];\n aiConfig: AIConfig;\n mode: 'complete' | 'review';\n applicationContext?: string;\n};\n\nexport type TranslateJSONResultData = {\n fileContent: string;\n tokenUsed: number;\n};\n\nexport const aiDefaultOptions: AIOptions = {\n provider: AIProvider.OPENAI,\n model: 'gpt-5-mini',\n};\n\n/**\n * Format a locale with its name.\n *\n * @param locale - The locale to format.\n * @returns A string in the format \"locale: name\", e.g. \"en: English\".\n */\nconst formatLocaleWithName = (locale: Locale): string =>\n `${locale}: ${getLocaleName(locale, Locales.ENGLISH)}`;\n\n/**\n * Formats tag instructions for the AI prompt.\n * Creates a string with all available tags and their descriptions.\n *\n * @param tags - The list of tags to format.\n * @returns A formatted string with tag instructions.\n */\nconst formatTagInstructions = (tags: Tag[]): string => {\n if (!tags || tags.length === 0) {\n return '';\n }\n\n // Prepare the tag instructions.\n return `Based on the dictionary content, identify specific tags from the list below that would be relevant:\n \n${tags.map(({ key, description }) => `- ${key}: ${description}`).join('\\n\\n')}`;\n};\n\nconst getModeInstructions = (mode: 'complete' | 'review'): string => {\n if (mode === 'complete') {\n return 'Mode: \"Complete\" - Enrich the preset content with the missing keys and values in the output locale. Do not update existing keys. Everything should be returned in the output.';\n }\n\n return 'Mode: \"Review\" - Fill missing content and review existing keys from the preset content. If a key from the entry is missing in the output, it must be translated to the target language and added. If you detect misspelled content, or content that should be reformulated, correct it. If a translation is not coherent with the desired language, translate it.';\n};\n\n/**\n * TranslateJSONs a content declaration file by constructing a prompt for AI models.\n * The prompt includes details about the project's locales, file paths of content declarations,\n * and requests for identifying issues or inconsistencies.\n */\nexport const translateJSON = async ({\n entryFileContent,\n presetOutputContent,\n dictionaryDescription,\n aiConfig,\n entryLocale,\n outputLocale,\n tags,\n mode,\n applicationContext,\n}: TranslateJSONOptions): Promise<TranslateJSONResultData | undefined> => {\n const promptFile = readAsset('./PROMPT.md');\n // Prepare the prompt for AI by replacing placeholders with actual values.\n const prompt = promptFile\n .replace('{{entryLocale}}', formatLocaleWithName(entryLocale))\n .replace('{{outputLocale}}', formatLocaleWithName(outputLocale))\n .replace('{{presetOutputContent}}', JSON.stringify(presetOutputContent))\n .replace('{{dictionaryDescription}}', dictionaryDescription ?? '')\n .replace('{{applicationContext}}', applicationContext ?? '')\n .replace('{{tagsInstructions}}', formatTagInstructions(tags ?? []))\n .replace('{{modeInstructions}}', getModeInstructions(mode));\n\n // Use the AI SDK to generate the completion\n const { text: newContent, usage } = await generateText({\n ...aiConfig,\n messages: [\n { role: 'system', content: prompt },\n {\n role: 'user',\n content: [\n '**Entry Content to Translate:**',\n '- Given Language: {{entryLocale}}',\n JSON.stringify(entryFileContent),\n ].join('\\n'),\n },\n ],\n });\n\n return {\n fileContent: extractJson(newContent),\n tokenUsed: usage?.totalTokens ?? 0,\n };\n};\n"],"mappings":";;;;;;;;AA6BA,MAAaA,mBAA8B;CACzC,UAAUC,yBAAW;CACrB,OAAO;CACR;;;;;;;AAQD,MAAM,wBAAwB,WAC5B,GAAG,OAAO,uCAAkB,QAAQC,yBAAQ,QAAQ;;;;;;;;AAStD,MAAM,yBAAyB,SAAwB;AACrD,KAAI,CAAC,QAAQ,KAAK,WAAW,EAC3B,QAAO;AAIT,QAAO;;EAEP,KAAK,KAAK,EAAE,KAAK,kBAAkB,KAAK,IAAI,IAAI,cAAc,CAAC,KAAK,OAAO;;AAG7E,MAAM,uBAAuB,SAAwC;AACnE,KAAI,SAAS,WACX,QAAO;AAGT,QAAO;;;;;;;AAQT,MAAa,gBAAgB,OAAO,EAClC,kBACA,qBACA,uBACA,UACA,aACA,cACA,MACA,MACA,yBACwE;CAGxE,MAAM,SAFaC,+BAAU,cAAc,CAGxC,QAAQ,mBAAmB,qBAAqB,YAAY,CAAC,CAC7D,QAAQ,oBAAoB,qBAAqB,aAAa,CAAC,CAC/D,QAAQ,2BAA2B,KAAK,UAAU,oBAAoB,CAAC,CACvE,QAAQ,6BAA6B,yBAAyB,GAAG,CACjE,QAAQ,0BAA0B,sBAAsB,GAAG,CAC3D,QAAQ,wBAAwB,sBAAsB,QAAQ,EAAE,CAAC,CAAC,CAClE,QAAQ,wBAAwB,oBAAoB,KAAK,CAAC;CAG7D,MAAM,EAAE,MAAM,YAAY,UAAU,2BAAmB;EACrD,GAAG;EACH,UAAU,CACR;GAAE,MAAM;GAAU,SAAS;GAAQ,EACnC;GACE,MAAM;GACN,SAAS;IACP;IACA;IACA,KAAK,UAAU,iBAAiB;IACjC,CAAC,KAAK,KAAK;GACb,CACF;EACF,CAAC;AAEF,QAAO;EACL,aAAaC,sCAAY,WAAW;EACpC,WAAW,OAAO,eAAe;EAClC"}
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
|
|
2
|
+
//#region src/utils/extractJSON.ts
|
|
3
|
+
/**
|
|
4
|
+
* Extracts and parses the first valid JSON value (object or array) from a string containing arbitrary text.
|
|
5
|
+
* This is used to safely extract JSON from LLM responses that may contain additional text or markdown.
|
|
6
|
+
*
|
|
7
|
+
* @example
|
|
8
|
+
* // Extracts JSON object from markdown response:
|
|
9
|
+
* ```json
|
|
10
|
+
* {
|
|
11
|
+
* "title": "Test content declarations",
|
|
12
|
+
* "description": "A comprehensive test dictionary...",
|
|
13
|
+
* "tags": ["test tag"]
|
|
14
|
+
* }
|
|
15
|
+
* ```
|
|
16
|
+
*
|
|
17
|
+
* @example
|
|
18
|
+
* // Extracts JSON array:
|
|
19
|
+
* ```json
|
|
20
|
+
* ["item1", "item2", "item3"]
|
|
21
|
+
* ```
|
|
22
|
+
*
|
|
23
|
+
* @example
|
|
24
|
+
* // Extracts JSON from markdown:
|
|
25
|
+
* Here is the response:
|
|
26
|
+
* ```json
|
|
27
|
+
* {"key": "value"}
|
|
28
|
+
* ```
|
|
29
|
+
* End of response.
|
|
30
|
+
*
|
|
31
|
+
* @throws {Error} If no valid JSON object/array is found or if parsing fails
|
|
32
|
+
* @returns {T} The parsed JSON value cast to type T
|
|
33
|
+
*/
|
|
34
|
+
const extractJson = (input) => {
|
|
35
|
+
const opening = input.match(/[{[]/);
|
|
36
|
+
if (!opening) throw new Error("No JSON start character ({ or [) found.");
|
|
37
|
+
const startIdx = opening.index;
|
|
38
|
+
const openChar = input[startIdx];
|
|
39
|
+
const closeChar = openChar === "{" ? "}" : "]";
|
|
40
|
+
let depth = 0;
|
|
41
|
+
for (let i = startIdx; i < input.length; i++) {
|
|
42
|
+
const char = input[i];
|
|
43
|
+
if (char === openChar) depth++;
|
|
44
|
+
else if (char === closeChar) {
|
|
45
|
+
depth--;
|
|
46
|
+
if (depth === 0) {
|
|
47
|
+
const jsonSubstring = input.slice(startIdx, i + 1);
|
|
48
|
+
try {
|
|
49
|
+
return JSON.parse(jsonSubstring);
|
|
50
|
+
} catch (err) {
|
|
51
|
+
throw new Error(`Failed to parse JSON: ${err.message}`);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
throw new Error("Reached end of input without closing JSON bracket.");
|
|
57
|
+
};
|
|
58
|
+
|
|
59
|
+
//#endregion
|
|
60
|
+
exports.extractJson = extractJson;
|
|
61
|
+
//# sourceMappingURL=extractJSON.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"extractJSON.cjs","names":[],"sources":["../../../src/utils/extractJSON.ts"],"sourcesContent":["/**\n * Extracts and parses the first valid JSON value (object or array) from a string containing arbitrary text.\n * This is used to safely extract JSON from LLM responses that may contain additional text or markdown.\n *\n * @example\n * // Extracts JSON object from markdown response:\n * ```json\n * {\n * \"title\": \"Test content declarations\",\n * \"description\": \"A comprehensive test dictionary...\",\n * \"tags\": [\"test tag\"]\n * }\n * ```\n *\n * @example\n * // Extracts JSON array:\n * ```json\n * [\"item1\", \"item2\", \"item3\"]\n * ```\n *\n * @example\n * // Extracts JSON from markdown:\n * Here is the response:\n * ```json\n * {\"key\": \"value\"}\n * ```\n * End of response.\n *\n * @throws {Error} If no valid JSON object/array is found or if parsing fails\n * @returns {T} The parsed JSON value cast to type T\n */\nexport const extractJson = <T = any>(input: string): T => {\n const opening = input.match(/[{[]/);\n if (!opening) throw new Error('No JSON start character ({ or [) found.');\n\n const startIdx = opening.index!;\n const openChar = input[startIdx];\n const closeChar = openChar === '{' ? '}' : ']';\n let depth = 0;\n\n for (let i = startIdx; i < input.length; i++) {\n const char = input[i];\n if (char === openChar) depth++;\n else if (char === closeChar) {\n depth--;\n if (depth === 0) {\n const jsonSubstring = input.slice(startIdx, i + 1);\n try {\n return JSON.parse(jsonSubstring) as T;\n } catch (err) {\n throw new Error(`Failed to parse JSON: ${(err as Error).message}`);\n }\n }\n }\n }\n\n throw new Error('Reached end of input without closing JSON bracket.');\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+BA,MAAa,eAAwB,UAAqB;CACxD,MAAM,UAAU,MAAM,MAAM,OAAO;AACnC,KAAI,CAAC,QAAS,OAAM,IAAI,MAAM,0CAA0C;CAExE,MAAM,WAAW,QAAQ;CACzB,MAAM,WAAW,MAAM;CACvB,MAAM,YAAY,aAAa,MAAM,MAAM;CAC3C,IAAI,QAAQ;AAEZ,MAAK,IAAI,IAAI,UAAU,IAAI,MAAM,QAAQ,KAAK;EAC5C,MAAM,OAAO,MAAM;AACnB,MAAI,SAAS,SAAU;WACd,SAAS,WAAW;AAC3B;AACA,OAAI,UAAU,GAAG;IACf,MAAM,gBAAgB,MAAM,MAAM,UAAU,IAAI,EAAE;AAClD,QAAI;AACF,YAAO,KAAK,MAAM,cAAc;aACzB,KAAK;AACZ,WAAM,IAAI,MAAM,yBAA0B,IAAc,UAAU;;;;;AAM1E,OAAM,IAAI,MAAM,qDAAqD"}
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
import { existsSync, readFileSync } from "node:fs";
|
|
2
|
+
import { basename, dirname, join, relative, resolve, sep } from "node:path";
|
|
3
|
+
import { fileURLToPath } from "node:url";
|
|
4
|
+
|
|
5
|
+
//#region \0utils:asset
|
|
6
|
+
const hereDirname = () => {
|
|
7
|
+
try {
|
|
8
|
+
return dirname(fileURLToPath(import.meta.url));
|
|
9
|
+
} catch {
|
|
10
|
+
return typeof __dirname !== "undefined" ? __dirname : process.cwd();
|
|
11
|
+
}
|
|
12
|
+
};
|
|
13
|
+
const findDistRoot = (startDir) => {
|
|
14
|
+
let dir = startDir;
|
|
15
|
+
for (let i = 0; i < 12; i++) {
|
|
16
|
+
if (basename(dir) === "dist") return dir;
|
|
17
|
+
const parent = resolve(dir, "..");
|
|
18
|
+
if (parent === dir) break;
|
|
19
|
+
dir = parent;
|
|
20
|
+
}
|
|
21
|
+
return null;
|
|
22
|
+
};
|
|
23
|
+
const normalizeFrameFile = (file) => {
|
|
24
|
+
if (!file) return null;
|
|
25
|
+
try {
|
|
26
|
+
if (file.startsWith("file://")) return fileURLToPath(file);
|
|
27
|
+
} catch {}
|
|
28
|
+
return file;
|
|
29
|
+
};
|
|
30
|
+
/**
|
|
31
|
+
* Returns the directory of the *caller* module that invoked readAsset.
|
|
32
|
+
* Prefers non-virtual frames; falls back to the first real frame.
|
|
33
|
+
*/
|
|
34
|
+
const getCallerDir = () => {
|
|
35
|
+
const prev = Error.prepareStackTrace;
|
|
36
|
+
try {
|
|
37
|
+
Error.prepareStackTrace = (_, structured) => structured;
|
|
38
|
+
const err = /* @__PURE__ */ new Error();
|
|
39
|
+
Error.captureStackTrace(err, getCallerDir);
|
|
40
|
+
/** @type {import('node:vm').CallSite[]} */
|
|
41
|
+
const frames = err.stack || [];
|
|
42
|
+
const isVirtualPath = (p) => p.includes(`${sep}_virtual${sep}`) || p.includes("/_virtual/");
|
|
43
|
+
for (const frame of frames) {
|
|
44
|
+
const file = normalizeFrameFile(typeof frame.getFileName === "function" ? frame.getFileName() : null);
|
|
45
|
+
if (!file) continue;
|
|
46
|
+
if (file.includes("node:internal") || file.includes(`${sep}internal${sep}modules${sep}`)) continue;
|
|
47
|
+
if (!isVirtualPath(file)) return dirname(file);
|
|
48
|
+
}
|
|
49
|
+
for (const frame of frames) {
|
|
50
|
+
const file = normalizeFrameFile(typeof frame.getFileName === "function" ? frame.getFileName() : null);
|
|
51
|
+
if (file) return dirname(file);
|
|
52
|
+
}
|
|
53
|
+
} catch {} finally {
|
|
54
|
+
Error.prepareStackTrace = prev;
|
|
55
|
+
}
|
|
56
|
+
return hereDirname();
|
|
57
|
+
};
|
|
58
|
+
/**
|
|
59
|
+
* Read an asset copied from src/** to dist/assets/**.
|
|
60
|
+
* - './' or '../' is resolved relative to the *caller module's* emitted directory.
|
|
61
|
+
* - otherwise, treat as src-relative.
|
|
62
|
+
*
|
|
63
|
+
* @param {string} relPath - e.g. './PROMPT.md' or 'utils/AI/askDocQuestion/embeddings/<fileKey>.json'
|
|
64
|
+
* @param {BufferEncoding} [encoding='utf8']
|
|
65
|
+
*/
|
|
66
|
+
const readAsset = (relPath, encoding = "utf8") => {
|
|
67
|
+
const here = hereDirname();
|
|
68
|
+
const distRoot = findDistRoot(here) ?? resolve(here, "..", "..", "dist");
|
|
69
|
+
const assetsRoot = join(distRoot, "assets");
|
|
70
|
+
const tried = [];
|
|
71
|
+
/**
|
|
72
|
+
* Transform dist/(esm|cjs)/... and _virtual/ prefix to clean subpath (Windows-safe)
|
|
73
|
+
*/
|
|
74
|
+
const callerSubpath = relative(distRoot, getCallerDir()).split("\\").join("/").replace(/^(?:dist\/)?(?:esm|cjs)\//, "").replace(/^_virtual\//, "");
|
|
75
|
+
if (relPath.startsWith("./") || relPath.startsWith("../")) {
|
|
76
|
+
const fromCallerAbs = resolve(assetsRoot, callerSubpath, relPath);
|
|
77
|
+
tried.push(fromCallerAbs);
|
|
78
|
+
if (existsSync(fromCallerAbs)) return readFileSync(fromCallerAbs, encoding);
|
|
79
|
+
}
|
|
80
|
+
const directPath = join(assetsRoot, relPath);
|
|
81
|
+
tried.push(directPath);
|
|
82
|
+
if (existsSync(directPath)) return readFileSync(directPath, encoding);
|
|
83
|
+
if (callerSubpath) {
|
|
84
|
+
const nested = join(assetsRoot, callerSubpath, relPath);
|
|
85
|
+
tried.push(nested);
|
|
86
|
+
if (existsSync(nested)) return readFileSync(nested, encoding);
|
|
87
|
+
}
|
|
88
|
+
const msg = [
|
|
89
|
+
"readAsset: file not found.",
|
|
90
|
+
"Searched:",
|
|
91
|
+
...tried.map((p) => `- ${p}`)
|
|
92
|
+
].join("\n");
|
|
93
|
+
throw new Error(msg);
|
|
94
|
+
};
|
|
95
|
+
|
|
96
|
+
//#endregion
|
|
97
|
+
export { readAsset };
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import { createAnthropic } from "@ai-sdk/anthropic";
|
|
2
|
+
import { createDeepSeek } from "@ai-sdk/deepseek";
|
|
3
|
+
import { createGoogleGenerativeAI } from "@ai-sdk/google";
|
|
4
|
+
import { createMistral } from "@ai-sdk/mistral";
|
|
5
|
+
import { createOpenAI } from "@ai-sdk/openai";
|
|
6
|
+
|
|
7
|
+
//#region src/aiSdk.ts
|
|
8
|
+
/**
|
|
9
|
+
* Supported AI SDK providers
|
|
10
|
+
*/
|
|
11
|
+
let AIProvider = /* @__PURE__ */ function(AIProvider$1) {
|
|
12
|
+
AIProvider$1["OPENAI"] = "openai";
|
|
13
|
+
AIProvider$1["ANTHROPIC"] = "anthropic";
|
|
14
|
+
AIProvider$1["MISTRAL"] = "mistral";
|
|
15
|
+
AIProvider$1["DEEPSEEK"] = "deepseek";
|
|
16
|
+
AIProvider$1["GEMINI"] = "gemini";
|
|
17
|
+
return AIProvider$1;
|
|
18
|
+
}({});
|
|
19
|
+
const getAPIKey = (accessType, aiOptions, isAuthenticated = false) => {
|
|
20
|
+
const defaultApiKey = process.env.OPENAI_API_KEY;
|
|
21
|
+
if (accessType.includes("public")) return aiOptions?.apiKey ?? defaultApiKey;
|
|
22
|
+
if (accessType.includes("apiKey") && aiOptions?.apiKey) return aiOptions?.apiKey;
|
|
23
|
+
if (accessType.includes("registered_user") && isAuthenticated) return aiOptions?.apiKey ?? defaultApiKey;
|
|
24
|
+
if (accessType.includes("premium_user") && isAuthenticated) return aiOptions?.apiKey ?? defaultApiKey;
|
|
25
|
+
};
|
|
26
|
+
const getModel = (provider, userApiKey, userModel, defaultModel = "gpt-5-mini") => {
|
|
27
|
+
if (userApiKey) {
|
|
28
|
+
if (provider && provider === AIProvider.OPENAI) return userModel ?? defaultModel;
|
|
29
|
+
switch (provider) {
|
|
30
|
+
case AIProvider.ANTHROPIC: return "claude-sonnet-4-5-20250929";
|
|
31
|
+
case AIProvider.MISTRAL: return "mistral-large-latest";
|
|
32
|
+
case AIProvider.DEEPSEEK: return "deepseek-coder";
|
|
33
|
+
case AIProvider.GEMINI: return "gemini-2.5-flash";
|
|
34
|
+
default: return defaultModel;
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
if (userModel || provider) throw new Error("The user should use his own API key to use a custom model");
|
|
38
|
+
return defaultModel;
|
|
39
|
+
};
|
|
40
|
+
const DEFAULT_PROVIDER = AIProvider.OPENAI;
|
|
41
|
+
const DEFAULT_TEMPERATURE = 1;
|
|
42
|
+
/**
|
|
43
|
+
* Get AI model configuration based on the selected provider and options
|
|
44
|
+
* This function handles the configuration for different AI providers
|
|
45
|
+
*
|
|
46
|
+
* @param options Configuration options including provider, API keys, models and temperature
|
|
47
|
+
* @returns Configured AI model ready to use with generateText
|
|
48
|
+
*/
|
|
49
|
+
const getAIConfig = async (options, isAuthenticated = false) => {
|
|
50
|
+
const { userOptions, defaultOptions, accessType = ["registered_user"] } = options;
|
|
51
|
+
const aiOptions = {
|
|
52
|
+
provider: DEFAULT_PROVIDER,
|
|
53
|
+
temperature: DEFAULT_TEMPERATURE,
|
|
54
|
+
...defaultOptions,
|
|
55
|
+
...userOptions
|
|
56
|
+
};
|
|
57
|
+
const apiKey = getAPIKey(accessType, aiOptions, isAuthenticated);
|
|
58
|
+
if (!apiKey) throw new Error(`API key for ${aiOptions.provider} is missing`);
|
|
59
|
+
const selectedModel = getModel(aiOptions.provider, apiKey, aiOptions.model, defaultOptions?.model);
|
|
60
|
+
const protectedOptions = {
|
|
61
|
+
...aiOptions,
|
|
62
|
+
apiKey,
|
|
63
|
+
model: selectedModel
|
|
64
|
+
};
|
|
65
|
+
let languageModel;
|
|
66
|
+
switch (protectedOptions.provider) {
|
|
67
|
+
case AIProvider.OPENAI:
|
|
68
|
+
languageModel = createOpenAI({ apiKey })(selectedModel);
|
|
69
|
+
break;
|
|
70
|
+
case AIProvider.ANTHROPIC:
|
|
71
|
+
languageModel = createAnthropic({ apiKey })(selectedModel);
|
|
72
|
+
break;
|
|
73
|
+
case AIProvider.MISTRAL:
|
|
74
|
+
languageModel = createMistral({ apiKey })(selectedModel);
|
|
75
|
+
break;
|
|
76
|
+
case AIProvider.DEEPSEEK:
|
|
77
|
+
languageModel = createDeepSeek({ apiKey })(selectedModel);
|
|
78
|
+
break;
|
|
79
|
+
case AIProvider.GEMINI:
|
|
80
|
+
languageModel = createGoogleGenerativeAI({ apiKey })(selectedModel);
|
|
81
|
+
break;
|
|
82
|
+
default: throw new Error(`Provider ${protectedOptions.provider} not supported`);
|
|
83
|
+
}
|
|
84
|
+
return {
|
|
85
|
+
model: languageModel,
|
|
86
|
+
temperature: protectedOptions.temperature
|
|
87
|
+
};
|
|
88
|
+
};
|
|
89
|
+
|
|
90
|
+
//#endregion
|
|
91
|
+
export { AIProvider, getAIConfig };
|
|
92
|
+
//# sourceMappingURL=aiSdk.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"aiSdk.mjs","names":["DEFAULT_PROVIDER: AIProvider","DEFAULT_TEMPERATURE: number","languageModel: AIConfig['model']"],"sources":["../../src/aiSdk.ts"],"sourcesContent":["import { type anthropic, createAnthropic } from '@ai-sdk/anthropic';\nimport { createDeepSeek, type deepseek } from '@ai-sdk/deepseek';\nimport { createGoogleGenerativeAI, type google } from '@ai-sdk/google';\nimport { createMistral, type mistral } from '@ai-sdk/mistral';\nimport { createOpenAI, type openai } from '@ai-sdk/openai';\nimport type {\n AssistantModelMessage,\n generateText,\n SystemModelMessage,\n ToolModelMessage,\n UserModelMessage,\n} from 'ai';\n\ntype AnthropicModel = Parameters<typeof anthropic>[0];\ntype DeepSeekModel = Parameters<typeof deepseek>[0];\ntype MistralModel = Parameters<typeof mistral>[0];\ntype OpenAIModel = Parameters<typeof openai>[0];\ntype GoogleModel = Parameters<typeof google>[0];\n\nexport type Messages = (\n | SystemModelMessage\n | UserModelMessage\n | AssistantModelMessage\n | ToolModelMessage\n)[];\n\n/**\n * Supported AI models\n */\nexport type Model =\n | AnthropicModel\n | DeepSeekModel\n | MistralModel\n | OpenAIModel\n | GoogleModel\n | (string & {});\n\n/**\n * Supported AI SDK providers\n */\nexport enum AIProvider {\n OPENAI = 'openai',\n ANTHROPIC = 'anthropic',\n MISTRAL = 'mistral',\n DEEPSEEK = 'deepseek',\n GEMINI = 'gemini',\n}\n\n/**\n * Common options for all AI providers\n */\nexport type AIOptions = {\n provider?: AIProvider;\n model?: Model;\n temperature?: number;\n apiKey?: string;\n applicationContext?: string;\n};\n\n// Define the structure of messages used in chat completions\nexport type ChatCompletionRequestMessage = {\n role: 'system' | 'user' | 'assistant'; // The role of the message sender\n content: string; // The text content of the message\n timestamp?: Date; // The timestamp of the message\n};\n\ntype AccessType = 'apiKey' | 'registered_user' | 'premium_user' | 'public';\n\nconst getAPIKey = (\n accessType: AccessType[],\n aiOptions?: AIOptions,\n isAuthenticated: boolean = false\n) => {\n const defaultApiKey = process.env.OPENAI_API_KEY;\n\n if (accessType.includes('public')) {\n return aiOptions?.apiKey ?? defaultApiKey;\n }\n\n if (accessType.includes('apiKey') && aiOptions?.apiKey) {\n return aiOptions?.apiKey;\n }\n\n if (accessType.includes('registered_user') && isAuthenticated) {\n return aiOptions?.apiKey ?? defaultApiKey;\n }\n\n // TODO: Implement premium user access\n if (accessType.includes('premium_user') && isAuthenticated) {\n return aiOptions?.apiKey ?? defaultApiKey;\n }\n\n return undefined;\n};\n\nconst getModel = (\n provider: AIProvider,\n userApiKey: string,\n userModel?: Model,\n defaultModel: Model = 'gpt-5-mini'\n): Model => {\n // If the user uses their own API key, allow custom model selection\n if (userApiKey) {\n if (provider && provider === AIProvider.OPENAI) {\n return userModel ?? defaultModel;\n }\n\n switch (provider) {\n case AIProvider.ANTHROPIC:\n return 'claude-sonnet-4-5-20250929';\n case AIProvider.MISTRAL:\n return 'mistral-large-latest';\n case AIProvider.DEEPSEEK:\n return 'deepseek-coder';\n case AIProvider.GEMINI:\n return 'gemini-2.5-flash';\n default:\n return defaultModel;\n }\n }\n\n // Guard: Prevent custom model usage without a user API key\n if (userModel || provider) {\n throw new Error(\n 'The user should use his own API key to use a custom model'\n );\n }\n\n return defaultModel;\n};\n\nexport type AIConfig = Omit<Parameters<typeof generateText>[0], 'prompt'>;\n\nconst DEFAULT_PROVIDER: AIProvider = AIProvider.OPENAI as AIProvider;\nconst DEFAULT_TEMPERATURE: number = 1; // ChatGPT 5 accept only temperature 1\n\nexport type AIConfigOptions = {\n userOptions?: AIOptions;\n defaultOptions?: AIOptions;\n accessType?: AccessType[];\n};\n\n/**\n * Get AI model configuration based on the selected provider and options\n * This function handles the configuration for different AI providers\n *\n * @param options Configuration options including provider, API keys, models and temperature\n * @returns Configured AI model ready to use with generateText\n */\nexport const getAIConfig = async (\n options: AIConfigOptions,\n isAuthenticated: boolean = false\n): Promise<AIConfig> => {\n const {\n userOptions,\n defaultOptions,\n accessType = ['registered_user'],\n } = options;\n\n const aiOptions = {\n provider: DEFAULT_PROVIDER,\n temperature: DEFAULT_TEMPERATURE,\n ...defaultOptions,\n ...userOptions,\n } satisfies AIOptions;\n\n const apiKey = getAPIKey(accessType, aiOptions, isAuthenticated);\n\n // Check if API key is provided\n if (!apiKey) {\n throw new Error(`API key for ${aiOptions.provider} is missing`);\n }\n\n const selectedModel = getModel(\n aiOptions.provider,\n apiKey,\n aiOptions.model,\n defaultOptions?.model\n );\n\n const protectedOptions = {\n ...aiOptions,\n apiKey,\n model: selectedModel,\n } satisfies AIOptions;\n\n let languageModel: AIConfig['model'];\n\n switch (protectedOptions.provider) {\n case AIProvider.OPENAI: {\n languageModel = createOpenAI({\n apiKey,\n })(selectedModel);\n break;\n }\n\n case AIProvider.ANTHROPIC: {\n languageModel = createAnthropic({\n apiKey,\n })(selectedModel);\n break;\n }\n\n case AIProvider.MISTRAL: {\n languageModel = createMistral({\n apiKey,\n })(selectedModel);\n break;\n }\n\n case AIProvider.DEEPSEEK: {\n languageModel = createDeepSeek({\n apiKey,\n })(selectedModel);\n break;\n }\n\n case AIProvider.GEMINI: {\n languageModel = createGoogleGenerativeAI({\n apiKey,\n })(selectedModel);\n break;\n }\n\n default: {\n throw new Error(`Provider ${protectedOptions.provider} not supported`);\n }\n }\n\n return {\n model: languageModel,\n temperature: protectedOptions.temperature,\n };\n};\n"],"mappings":";;;;;;;;;;AAwCA,IAAY,oDAAL;AACL;AACA;AACA;AACA;AACA;;;AAuBF,MAAM,aACJ,YACA,WACA,kBAA2B,UACxB;CACH,MAAM,gBAAgB,QAAQ,IAAI;AAElC,KAAI,WAAW,SAAS,SAAS,CAC/B,QAAO,WAAW,UAAU;AAG9B,KAAI,WAAW,SAAS,SAAS,IAAI,WAAW,OAC9C,QAAO,WAAW;AAGpB,KAAI,WAAW,SAAS,kBAAkB,IAAI,gBAC5C,QAAO,WAAW,UAAU;AAI9B,KAAI,WAAW,SAAS,eAAe,IAAI,gBACzC,QAAO,WAAW,UAAU;;AAMhC,MAAM,YACJ,UACA,YACA,WACA,eAAsB,iBACZ;AAEV,KAAI,YAAY;AACd,MAAI,YAAY,aAAa,WAAW,OACtC,QAAO,aAAa;AAGtB,UAAQ,UAAR;GACE,KAAK,WAAW,UACd,QAAO;GACT,KAAK,WAAW,QACd,QAAO;GACT,KAAK,WAAW,SACd,QAAO;GACT,KAAK,WAAW,OACd,QAAO;GACT,QACE,QAAO;;;AAKb,KAAI,aAAa,SACf,OAAM,IAAI,MACR,4DACD;AAGH,QAAO;;AAKT,MAAMA,mBAA+B,WAAW;AAChD,MAAMC,sBAA8B;;;;;;;;AAepC,MAAa,cAAc,OACzB,SACA,kBAA2B,UACL;CACtB,MAAM,EACJ,aACA,gBACA,aAAa,CAAC,kBAAkB,KAC9B;CAEJ,MAAM,YAAY;EAChB,UAAU;EACV,aAAa;EACb,GAAG;EACH,GAAG;EACJ;CAED,MAAM,SAAS,UAAU,YAAY,WAAW,gBAAgB;AAGhE,KAAI,CAAC,OACH,OAAM,IAAI,MAAM,eAAe,UAAU,SAAS,aAAa;CAGjE,MAAM,gBAAgB,SACpB,UAAU,UACV,QACA,UAAU,OACV,gBAAgB,MACjB;CAED,MAAM,mBAAmB;EACvB,GAAG;EACH;EACA,OAAO;EACR;CAED,IAAIC;AAEJ,SAAQ,iBAAiB,UAAzB;EACE,KAAK,WAAW;AACd,mBAAgB,aAAa,EAC3B,QACD,CAAC,CAAC,cAAc;AACjB;EAGF,KAAK,WAAW;AACd,mBAAgB,gBAAgB,EAC9B,QACD,CAAC,CAAC,cAAc;AACjB;EAGF,KAAK,WAAW;AACd,mBAAgB,cAAc,EAC5B,QACD,CAAC,CAAC,cAAc;AACjB;EAGF,KAAK,WAAW;AACd,mBAAgB,eAAe,EAC7B,QACD,CAAC,CAAC,cAAc;AACjB;EAGF,KAAK,WAAW;AACd,mBAAgB,yBAAyB,EACvC,QACD,CAAC,CAAC,cAAc;AACjB;EAGF,QACE,OAAM,IAAI,MAAM,YAAY,iBAAiB,SAAS,gBAAgB;;AAI1E,QAAO;EACL,OAAO;EACP,aAAa,iBAAiB;EAC/B"}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { readAsset } from "../_virtual/_utils_asset.mjs";
|
|
2
|
+
import { extractJson } from "../utils/extractJSON.mjs";
|
|
3
|
+
import { generateText } from "ai";
|
|
4
|
+
|
|
5
|
+
//#region src/auditDictionaryMetadata/index.ts
|
|
6
|
+
const aiDefaultOptions = {};
|
|
7
|
+
/**
|
|
8
|
+
* Audits a content declaration file by constructing a prompt for AI models.
|
|
9
|
+
* The prompt includes details about the project's locales, file paths of content declarations,
|
|
10
|
+
* and requests for identifying issues or inconsistencies.
|
|
11
|
+
*/
|
|
12
|
+
const auditDictionaryMetadata = async ({ fileContent, tags, aiConfig, applicationContext }) => {
|
|
13
|
+
const prompt = readAsset("./PROMPT.md").replace("{{applicationContext}}", applicationContext ?? "").replace("{{tags}}", tags ? JSON.stringify(tags.map(({ key, description }) => `- ${key}: ${description}`).join("\n\n"), null, 2) : "");
|
|
14
|
+
const { text: newContent, usage } = await generateText({
|
|
15
|
+
...aiConfig,
|
|
16
|
+
messages: [{
|
|
17
|
+
role: "system",
|
|
18
|
+
content: prompt
|
|
19
|
+
}, {
|
|
20
|
+
role: "user",
|
|
21
|
+
content: [
|
|
22
|
+
"**Content declaration to describe:**",
|
|
23
|
+
"This is the content declaration that you should consider to describe:",
|
|
24
|
+
fileContent
|
|
25
|
+
].join("\n")
|
|
26
|
+
}]
|
|
27
|
+
});
|
|
28
|
+
return {
|
|
29
|
+
fileContent: extractJson(newContent),
|
|
30
|
+
tokenUsed: usage?.totalTokens ?? 0
|
|
31
|
+
};
|
|
32
|
+
};
|
|
33
|
+
|
|
34
|
+
//#endregion
|
|
35
|
+
export { aiDefaultOptions, auditDictionaryMetadata };
|
|
36
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.mjs","names":["aiDefaultOptions: AIOptions"],"sources":["../../../src/auditDictionaryMetadata/index.ts"],"sourcesContent":["import { readAsset } from 'utils:asset';\nimport { generateText } from 'ai';\nimport type { AIConfig, AIOptions } from '../aiSdk';\nimport { extractJson } from '../utils/extractJSON';\n\ntype Tag = {\n key: string;\n description?: string;\n};\n\nexport type AuditDictionaryMetadataOptions = {\n fileContent: string;\n tags?: Tag[];\n aiConfig: AIConfig;\n applicationContext?: string;\n};\n\nexport type AuditFileResultData = {\n fileContent: {\n title: string;\n description: string;\n tags: string[];\n };\n tokenUsed: number;\n};\n\nexport const aiDefaultOptions: AIOptions = {\n // Keep default options\n};\n\n/**\n * Audits a content declaration file by constructing a prompt for AI models.\n * The prompt includes details about the project's locales, file paths of content declarations,\n * and requests for identifying issues or inconsistencies.\n */\nexport const auditDictionaryMetadata = async ({\n fileContent,\n tags,\n aiConfig,\n applicationContext,\n}: AuditDictionaryMetadataOptions): Promise<\n AuditFileResultData | undefined\n> => {\n const CHAT_GPT_PROMPT = readAsset('./PROMPT.md');\n\n // Prepare the prompt for AI by replacing placeholders with actual values.\n const prompt = CHAT_GPT_PROMPT.replace(\n '{{applicationContext}}',\n applicationContext ?? ''\n ).replace(\n '{{tags}}',\n tags\n ? JSON.stringify(\n tags\n .map(({ key, description }) => `- ${key}: ${description}`)\n .join('\\n\\n'),\n null,\n 2\n )\n : ''\n );\n\n // Use the AI SDK to generate the completion\n const { text: newContent, usage } = await generateText({\n ...aiConfig,\n messages: [\n { role: 'system', content: prompt },\n {\n role: 'user',\n content: [\n '**Content declaration to describe:**',\n 'This is the content declaration that you should consider to describe:',\n fileContent,\n ].join('\\n'),\n },\n ],\n });\n\n return {\n fileContent: extractJson(newContent),\n tokenUsed: usage?.totalTokens ?? 0,\n };\n};\n"],"mappings":";;;;;AA0BA,MAAaA,mBAA8B,EAE1C;;;;;;AAOD,MAAa,0BAA0B,OAAO,EAC5C,aACA,MACA,UACA,yBAGG;CAIH,MAAM,SAHkB,UAAU,cAAc,CAGjB,QAC7B,0BACA,sBAAsB,GACvB,CAAC,QACA,YACA,OACI,KAAK,UACH,KACG,KAAK,EAAE,KAAK,kBAAkB,KAAK,IAAI,IAAI,cAAc,CACzD,KAAK,OAAO,EACf,MACA,EACD,GACD,GACL;CAGD,MAAM,EAAE,MAAM,YAAY,UAAU,MAAM,aAAa;EACrD,GAAG;EACH,UAAU,CACR;GAAE,MAAM;GAAU,SAAS;GAAQ,EACnC;GACE,MAAM;GACN,SAAS;IACP;IACA;IACA;IACD,CAAC,KAAK,KAAK;GACb,CACF;EACF,CAAC;AAEF,QAAO;EACL,aAAa,YAAY,WAAW;EACpC,WAAW,OAAO,eAAe;EAClC"}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { generateText } from "ai";
|
|
2
|
+
|
|
3
|
+
//#region src/customQuery.ts
|
|
4
|
+
const aiDefaultOptions = { model: "gpt-4o-mini" };
|
|
5
|
+
/**
|
|
6
|
+
* CustomQuery a content declaration file by constructing a prompt for AI models.
|
|
7
|
+
* The prompt includes details about the project's locales, file paths of content declarations,
|
|
8
|
+
* and requests for identifying issues or inconsistencies.
|
|
9
|
+
*/
|
|
10
|
+
const customQuery = async ({ messages, aiConfig }) => {
|
|
11
|
+
const { text: newContent, usage } = await generateText({
|
|
12
|
+
...aiConfig,
|
|
13
|
+
messages
|
|
14
|
+
});
|
|
15
|
+
return {
|
|
16
|
+
fileContent: newContent,
|
|
17
|
+
tokenUsed: usage?.totalTokens ?? 0
|
|
18
|
+
};
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
//#endregion
|
|
22
|
+
export { aiDefaultOptions, customQuery };
|
|
23
|
+
//# sourceMappingURL=customQuery.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"customQuery.mjs","names":["aiDefaultOptions: AIOptions"],"sources":["../../src/customQuery.ts"],"sourcesContent":["import { generateText } from 'ai';\nimport type { AIConfig, AIOptions, Messages } from './aiSdk';\n\nexport type CustomQueryOptions = {\n messages: Messages;\n aiConfig: AIConfig;\n};\n\nexport type CustomQueryResultData = {\n fileContent: string;\n tokenUsed: number;\n};\n\nexport const aiDefaultOptions: AIOptions = {\n model: 'gpt-4o-mini',\n // Keep default options\n};\n\n/**\n * CustomQuery a content declaration file by constructing a prompt for AI models.\n * The prompt includes details about the project's locales, file paths of content declarations,\n * and requests for identifying issues or inconsistencies.\n */\nexport const customQuery = async ({\n messages,\n aiConfig,\n}: CustomQueryOptions): Promise<CustomQueryResultData | undefined> => {\n // Use the AI SDK to generate the completion\n const { text: newContent, usage } = await generateText({\n ...aiConfig,\n messages,\n });\n\n return {\n fileContent: newContent,\n tokenUsed: usage?.totalTokens ?? 0,\n };\n};\n"],"mappings":";;;AAaA,MAAaA,mBAA8B,EACzC,OAAO,eAER;;;;;;AAOD,MAAa,cAAc,OAAO,EAChC,UACA,eACoE;CAEpE,MAAM,EAAE,MAAM,YAAY,UAAU,MAAM,aAAa;EACrD,GAAG;EACH;EACD,CAAC;AAEF,QAAO;EACL,aAAa;EACb,WAAW,OAAO,eAAe;EAClC"}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { AIProvider, getAIConfig } from "./aiSdk.mjs";
|
|
2
|
+
import { customQuery } from "./customQuery.mjs";
|
|
3
|
+
import { extractJson } from "./utils/extractJSON.mjs";
|
|
4
|
+
import { auditDictionaryMetadata } from "./auditDictionaryMetadata/index.mjs";
|
|
5
|
+
import { translateJSON } from "./translateJSON/index.mjs";
|
|
6
|
+
import { generateText, streamText } from "ai";
|
|
7
|
+
|
|
8
|
+
export { AIProvider, auditDictionaryMetadata, customQuery, extractJson, generateText, getAIConfig, streamText, translateJSON };
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import { AIProvider } from "../aiSdk.mjs";
|
|
2
|
+
import { readAsset } from "../_virtual/_utils_asset.mjs";
|
|
3
|
+
import { extractJson } from "../utils/extractJSON.mjs";
|
|
4
|
+
import { generateText } from "ai";
|
|
5
|
+
import { getLocaleName } from "@intlayer/core";
|
|
6
|
+
import { Locales } from "@intlayer/types";
|
|
7
|
+
|
|
8
|
+
//#region src/translateJSON/index.ts
|
|
9
|
+
const aiDefaultOptions = {
|
|
10
|
+
provider: AIProvider.OPENAI,
|
|
11
|
+
model: "gpt-5-mini"
|
|
12
|
+
};
|
|
13
|
+
/**
|
|
14
|
+
* Format a locale with its name.
|
|
15
|
+
*
|
|
16
|
+
* @param locale - The locale to format.
|
|
17
|
+
* @returns A string in the format "locale: name", e.g. "en: English".
|
|
18
|
+
*/
|
|
19
|
+
const formatLocaleWithName = (locale) => `${locale}: ${getLocaleName(locale, Locales.ENGLISH)}`;
|
|
20
|
+
/**
|
|
21
|
+
* Formats tag instructions for the AI prompt.
|
|
22
|
+
* Creates a string with all available tags and their descriptions.
|
|
23
|
+
*
|
|
24
|
+
* @param tags - The list of tags to format.
|
|
25
|
+
* @returns A formatted string with tag instructions.
|
|
26
|
+
*/
|
|
27
|
+
const formatTagInstructions = (tags) => {
|
|
28
|
+
if (!tags || tags.length === 0) return "";
|
|
29
|
+
return `Based on the dictionary content, identify specific tags from the list below that would be relevant:
|
|
30
|
+
|
|
31
|
+
${tags.map(({ key, description }) => `- ${key}: ${description}`).join("\n\n")}`;
|
|
32
|
+
};
|
|
33
|
+
const getModeInstructions = (mode) => {
|
|
34
|
+
if (mode === "complete") return "Mode: \"Complete\" - Enrich the preset content with the missing keys and values in the output locale. Do not update existing keys. Everything should be returned in the output.";
|
|
35
|
+
return "Mode: \"Review\" - Fill missing content and review existing keys from the preset content. If a key from the entry is missing in the output, it must be translated to the target language and added. If you detect misspelled content, or content that should be reformulated, correct it. If a translation is not coherent with the desired language, translate it.";
|
|
36
|
+
};
|
|
37
|
+
/**
|
|
38
|
+
* TranslateJSONs a content declaration file by constructing a prompt for AI models.
|
|
39
|
+
* The prompt includes details about the project's locales, file paths of content declarations,
|
|
40
|
+
* and requests for identifying issues or inconsistencies.
|
|
41
|
+
*/
|
|
42
|
+
const translateJSON = async ({ entryFileContent, presetOutputContent, dictionaryDescription, aiConfig, entryLocale, outputLocale, tags, mode, applicationContext }) => {
|
|
43
|
+
const prompt = readAsset("./PROMPT.md").replace("{{entryLocale}}", formatLocaleWithName(entryLocale)).replace("{{outputLocale}}", formatLocaleWithName(outputLocale)).replace("{{presetOutputContent}}", JSON.stringify(presetOutputContent)).replace("{{dictionaryDescription}}", dictionaryDescription ?? "").replace("{{applicationContext}}", applicationContext ?? "").replace("{{tagsInstructions}}", formatTagInstructions(tags ?? [])).replace("{{modeInstructions}}", getModeInstructions(mode));
|
|
44
|
+
const { text: newContent, usage } = await generateText({
|
|
45
|
+
...aiConfig,
|
|
46
|
+
messages: [{
|
|
47
|
+
role: "system",
|
|
48
|
+
content: prompt
|
|
49
|
+
}, {
|
|
50
|
+
role: "user",
|
|
51
|
+
content: [
|
|
52
|
+
"**Entry Content to Translate:**",
|
|
53
|
+
"- Given Language: {{entryLocale}}",
|
|
54
|
+
JSON.stringify(entryFileContent)
|
|
55
|
+
].join("\n")
|
|
56
|
+
}]
|
|
57
|
+
});
|
|
58
|
+
return {
|
|
59
|
+
fileContent: extractJson(newContent),
|
|
60
|
+
tokenUsed: usage?.totalTokens ?? 0
|
|
61
|
+
};
|
|
62
|
+
};
|
|
63
|
+
|
|
64
|
+
//#endregion
|
|
65
|
+
export { aiDefaultOptions, translateJSON };
|
|
66
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.mjs","names":["aiDefaultOptions: AIOptions"],"sources":["../../../src/translateJSON/index.ts"],"sourcesContent":["import { readAsset } from 'utils:asset';\nimport { getLocaleName } from '@intlayer/core';\nimport { type Locale, Locales } from '@intlayer/types';\nimport { generateText } from 'ai';\nimport { type AIConfig, type AIOptions, AIProvider } from '../aiSdk';\nimport { extractJson } from '../utils/extractJSON';\n\ntype Tag = {\n key: string;\n description?: string;\n};\n\nexport type TranslateJSONOptions = {\n entryFileContent: JSON;\n presetOutputContent: JSON;\n dictionaryDescription?: string;\n entryLocale: Locale;\n outputLocale: Locale;\n tags?: Tag[];\n aiConfig: AIConfig;\n mode: 'complete' | 'review';\n applicationContext?: string;\n};\n\nexport type TranslateJSONResultData = {\n fileContent: string;\n tokenUsed: number;\n};\n\nexport const aiDefaultOptions: AIOptions = {\n provider: AIProvider.OPENAI,\n model: 'gpt-5-mini',\n};\n\n/**\n * Format a locale with its name.\n *\n * @param locale - The locale to format.\n * @returns A string in the format \"locale: name\", e.g. \"en: English\".\n */\nconst formatLocaleWithName = (locale: Locale): string =>\n `${locale}: ${getLocaleName(locale, Locales.ENGLISH)}`;\n\n/**\n * Formats tag instructions for the AI prompt.\n * Creates a string with all available tags and their descriptions.\n *\n * @param tags - The list of tags to format.\n * @returns A formatted string with tag instructions.\n */\nconst formatTagInstructions = (tags: Tag[]): string => {\n if (!tags || tags.length === 0) {\n return '';\n }\n\n // Prepare the tag instructions.\n return `Based on the dictionary content, identify specific tags from the list below that would be relevant:\n \n${tags.map(({ key, description }) => `- ${key}: ${description}`).join('\\n\\n')}`;\n};\n\nconst getModeInstructions = (mode: 'complete' | 'review'): string => {\n if (mode === 'complete') {\n return 'Mode: \"Complete\" - Enrich the preset content with the missing keys and values in the output locale. Do not update existing keys. Everything should be returned in the output.';\n }\n\n return 'Mode: \"Review\" - Fill missing content and review existing keys from the preset content. If a key from the entry is missing in the output, it must be translated to the target language and added. If you detect misspelled content, or content that should be reformulated, correct it. If a translation is not coherent with the desired language, translate it.';\n};\n\n/**\n * TranslateJSONs a content declaration file by constructing a prompt for AI models.\n * The prompt includes details about the project's locales, file paths of content declarations,\n * and requests for identifying issues or inconsistencies.\n */\nexport const translateJSON = async ({\n entryFileContent,\n presetOutputContent,\n dictionaryDescription,\n aiConfig,\n entryLocale,\n outputLocale,\n tags,\n mode,\n applicationContext,\n}: TranslateJSONOptions): Promise<TranslateJSONResultData | undefined> => {\n const promptFile = readAsset('./PROMPT.md');\n // Prepare the prompt for AI by replacing placeholders with actual values.\n const prompt = promptFile\n .replace('{{entryLocale}}', formatLocaleWithName(entryLocale))\n .replace('{{outputLocale}}', formatLocaleWithName(outputLocale))\n .replace('{{presetOutputContent}}', JSON.stringify(presetOutputContent))\n .replace('{{dictionaryDescription}}', dictionaryDescription ?? '')\n .replace('{{applicationContext}}', applicationContext ?? '')\n .replace('{{tagsInstructions}}', formatTagInstructions(tags ?? []))\n .replace('{{modeInstructions}}', getModeInstructions(mode));\n\n // Use the AI SDK to generate the completion\n const { text: newContent, usage } = await generateText({\n ...aiConfig,\n messages: [\n { role: 'system', content: prompt },\n {\n role: 'user',\n content: [\n '**Entry Content to Translate:**',\n '- Given Language: {{entryLocale}}',\n JSON.stringify(entryFileContent),\n ].join('\\n'),\n },\n ],\n });\n\n return {\n fileContent: extractJson(newContent),\n tokenUsed: usage?.totalTokens ?? 0,\n };\n};\n"],"mappings":";;;;;;;;AA6BA,MAAaA,mBAA8B;CACzC,UAAU,WAAW;CACrB,OAAO;CACR;;;;;;;AAQD,MAAM,wBAAwB,WAC5B,GAAG,OAAO,IAAI,cAAc,QAAQ,QAAQ,QAAQ;;;;;;;;AAStD,MAAM,yBAAyB,SAAwB;AACrD,KAAI,CAAC,QAAQ,KAAK,WAAW,EAC3B,QAAO;AAIT,QAAO;;EAEP,KAAK,KAAK,EAAE,KAAK,kBAAkB,KAAK,IAAI,IAAI,cAAc,CAAC,KAAK,OAAO;;AAG7E,MAAM,uBAAuB,SAAwC;AACnE,KAAI,SAAS,WACX,QAAO;AAGT,QAAO;;;;;;;AAQT,MAAa,gBAAgB,OAAO,EAClC,kBACA,qBACA,uBACA,UACA,aACA,cACA,MACA,MACA,yBACwE;CAGxE,MAAM,SAFa,UAAU,cAAc,CAGxC,QAAQ,mBAAmB,qBAAqB,YAAY,CAAC,CAC7D,QAAQ,oBAAoB,qBAAqB,aAAa,CAAC,CAC/D,QAAQ,2BAA2B,KAAK,UAAU,oBAAoB,CAAC,CACvE,QAAQ,6BAA6B,yBAAyB,GAAG,CACjE,QAAQ,0BAA0B,sBAAsB,GAAG,CAC3D,QAAQ,wBAAwB,sBAAsB,QAAQ,EAAE,CAAC,CAAC,CAClE,QAAQ,wBAAwB,oBAAoB,KAAK,CAAC;CAG7D,MAAM,EAAE,MAAM,YAAY,UAAU,MAAM,aAAa;EACrD,GAAG;EACH,UAAU,CACR;GAAE,MAAM;GAAU,SAAS;GAAQ,EACnC;GACE,MAAM;GACN,SAAS;IACP;IACA;IACA,KAAK,UAAU,iBAAiB;IACjC,CAAC,KAAK,KAAK;GACb,CACF;EACF,CAAC;AAEF,QAAO;EACL,aAAa,YAAY,WAAW;EACpC,WAAW,OAAO,eAAe;EAClC"}
|