@intlayer/backend 3.5.6 → 3.5.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/controllers/ai.controller.cjs +16 -0
- package/dist/cjs/controllers/ai.controller.cjs.map +1 -1
- package/dist/cjs/routes/ai.routes.cjs +6 -0
- package/dist/cjs/routes/ai.routes.cjs.map +1 -1
- package/dist/cjs/services/dictionary.service.cjs +0 -1
- package/dist/cjs/services/dictionary.service.cjs.map +1 -1
- package/dist/cjs/services/project.service.cjs +0 -1
- package/dist/cjs/services/project.service.cjs.map +1 -1
- package/dist/cjs/utils/AI/askDocQuestion.cjs +185 -0
- package/dist/cjs/utils/AI/askDocQuestion.cjs.map +1 -0
- package/dist/cjs/utils/AI/embeddings.json +150583 -0
- package/dist/cjs/utils/auditDictionaryField/index.cjs +0 -1
- package/dist/cjs/utils/auditDictionaryField/index.cjs.map +1 -1
- package/dist/cjs/utils/auditDictionaryMetadata/index.cjs +0 -2
- package/dist/cjs/utils/auditDictionaryMetadata/index.cjs.map +1 -1
- package/dist/cjs/utils/auditTag/index.cjs +0 -2
- package/dist/cjs/utils/auditTag/index.cjs.map +1 -1
- package/dist/esm/controllers/ai.controller.mjs +15 -0
- package/dist/esm/controllers/ai.controller.mjs.map +1 -1
- package/dist/esm/routes/ai.routes.mjs +7 -0
- package/dist/esm/routes/ai.routes.mjs.map +1 -1
- package/dist/esm/services/dictionary.service.mjs +0 -1
- package/dist/esm/services/dictionary.service.mjs.map +1 -1
- package/dist/esm/services/project.service.mjs +0 -1
- package/dist/esm/services/project.service.mjs.map +1 -1
- package/dist/esm/utils/AI/askDocQuestion.mjs +148 -0
- package/dist/esm/utils/AI/askDocQuestion.mjs.map +1 -0
- package/dist/esm/utils/AI/embeddings.json +150583 -0
- package/dist/esm/utils/auditDictionaryField/index.mjs +0 -1
- package/dist/esm/utils/auditDictionaryField/index.mjs.map +1 -1
- package/dist/esm/utils/auditDictionaryMetadata/index.mjs +0 -2
- package/dist/esm/utils/auditDictionaryMetadata/index.mjs.map +1 -1
- package/dist/esm/utils/auditTag/index.mjs +0 -2
- package/dist/esm/utils/auditTag/index.mjs.map +1 -1
- package/dist/types/controllers/ai.controller.d.ts +6 -0
- package/dist/types/controllers/ai.controller.d.ts.map +1 -1
- package/dist/types/routes/ai.routes.d.ts +5 -0
- package/dist/types/routes/ai.routes.d.ts.map +1 -1
- package/dist/types/services/dictionary.service.d.ts.map +1 -1
- package/dist/types/services/project.service.d.ts.map +1 -1
- package/dist/types/utils/AI/askDocQuestion.d.ts +38 -0
- package/dist/types/utils/AI/askDocQuestion.d.ts.map +1 -0
- package/dist/types/utils/auditDictionaryField/index.d.ts.map +1 -1
- package/dist/types/utils/auditDictionaryMetadata/index.d.ts.map +1 -1
- package/dist/types/utils/auditTag/index.d.ts.map +1 -1
- package/package.json +11 -9
- package/dist/types/utils/audit/index.d.ts +0 -28
- package/dist/types/utils/audit/index.d.ts.map +0 -1
|
@@ -65,7 +65,6 @@ const auditDictionaryField = async ({
|
|
|
65
65
|
import_logger.logger.info(
|
|
66
66
|
`${chatCompletion.usage?.total_tokens} tokens used in the request`
|
|
67
67
|
);
|
|
68
|
-
console.log("newContent", newContent);
|
|
69
68
|
return {
|
|
70
69
|
fileContent: newContent ?? "",
|
|
71
70
|
tokenUsed: chatCompletion.usage?.total_tokens ?? 0
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../src/utils/auditDictionaryField/index.ts"],"sourcesContent":["import { readFileSync } from 'fs';\nimport { dirname, join } from 'path';\nimport { fileURLToPath } from 'url';\nimport { getLocaleName, type KeyPath } from '@intlayer/core';\nimport { logger } from '@logger';\nimport { Locales } from 'intlayer';\nimport { OpenAI } from 'openai';\nimport { Tag } from '@/types/tag.types';\n\nconst __dirname = dirname(fileURLToPath(import.meta.url));\n\nexport type AuditDictionaryFieldOptions = {\n locales: Locales[];\n fileContent: string;\n model?: string;\n openAiApiKey: string;\n customPrompt?: string;\n keyPath: KeyPath[];\n tags?: Tag[];\n};\nexport type AuditDictionaryFieldResultData = {\n fileContent: string;\n tokenUsed: number;\n};\n\n/**\n * Reads the content of a file synchronously.\n *\n * @function\n * @param relativeFilePath - The relative or absolute path to the target file.\n * @returns The entire contents of the specified file as a UTF-8 encoded string.\n */\nconst getFileContent = (relativeFilePath: string): string => {\n const absolutePath = join(__dirname, relativeFilePath);\n const fileContent = readFileSync(absolutePath, 'utf-8');\n return fileContent;\n};\n\n// The prompt template to send to ChatGPT, requesting an audit of content declaration files.\nconst CHAT_GPT_PROMPT = getFileContent('./PROMPT.md');\n\n/**\n * Formats a locale with its full name and returns a string representation.\n *\n * @function\n * @param locale - A locale from the project's configuration (e.g., 'en-US', 'fr-FR').\n * @returns A formatted string combining the locale's name and code. Example: \"English (US): en-US\".\n */\nconst formatLocaleWithName = (locale: Locales): string => {\n // getLocaleName returns a human-readable name for the locale.\n const localeName = getLocaleName(locale);\n\n // Concatenate both the readable name and the locale code.\n return `${locale}: ${localeName}`;\n};\n\n/**\n * Formats an array of tags with their keys and instructions.\n *\n * @function\n * @param tags - An array of tags from the project's configuration.\n * @returns A string representation of the tags, with their keys and instructions.\n */\nconst formatTagInstructions = (tags: Tag[] = []) =>\n tags.map((tag) => `- ${tag.key}: ${tag.instructions}`).join('\\n\\n');\n\n/**\n * Audits a content declaration file by constructing a prompt for ChatGPT.\n * The prompt includes details about the project's locales, file paths of content declarations,\n * and requests for identifying issues or inconsistencies. It prints the prompt for each file,\n * and could be adapted to send requests to the ChatGPT model.\n */\nexport const auditDictionaryField = async ({\n fileContent,\n model,\n openAiApiKey,\n customPrompt,\n locales,\n keyPath,\n tags,\n}: AuditDictionaryFieldOptions): Promise<\n AuditDictionaryFieldResultData | undefined\n> => {\n try {\n // Optionally, you could initialize and configure the OpenAI client here, if you intend to make API calls.\n // Uncomment and configure the following lines if you have `openai` installed and want to call the API:\n\n const openai = new OpenAI({\n apiKey: openAiApiKey,\n });\n\n // Prepare the prompt for ChatGPT by replacing placeholders with actual values.\n const prompt =\n customPrompt ??\n CHAT_GPT_PROMPT.replace(\n '{{otherLocales}}',\n `{${locales.map(formatLocaleWithName).join(', ')}}`\n )\n .replace('{{keyPath}}', JSON.stringify(keyPath))\n .replace('{{fileContent}}', fileContent)\n .replace('{{tagsInstructions}}', formatTagInstructions(tags));\n\n // Example of how you might request a completion from ChatGPT:\n const chatCompletion = await openai.chat.completions.create({\n model: model ?? 'gpt-4o-mini',\n messages: [{ role: 'system', content: prompt }],\n });\n\n const newContent = chatCompletion.choices[0].message?.content;\n\n logger.info(\n `${chatCompletion.usage?.total_tokens} tokens used in the request`\n );\n\n
|
|
1
|
+
{"version":3,"sources":["../../../../src/utils/auditDictionaryField/index.ts"],"sourcesContent":["import { readFileSync } from 'fs';\nimport { dirname, join } from 'path';\nimport { fileURLToPath } from 'url';\nimport { getLocaleName, type KeyPath } from '@intlayer/core';\nimport { logger } from '@logger';\nimport { Locales } from 'intlayer';\nimport { OpenAI } from 'openai';\nimport { Tag } from '@/types/tag.types';\n\nconst __dirname = dirname(fileURLToPath(import.meta.url));\n\nexport type AuditDictionaryFieldOptions = {\n locales: Locales[];\n fileContent: string;\n model?: string;\n openAiApiKey: string;\n customPrompt?: string;\n keyPath: KeyPath[];\n tags?: Tag[];\n};\nexport type AuditDictionaryFieldResultData = {\n fileContent: string;\n tokenUsed: number;\n};\n\n/**\n * Reads the content of a file synchronously.\n *\n * @function\n * @param relativeFilePath - The relative or absolute path to the target file.\n * @returns The entire contents of the specified file as a UTF-8 encoded string.\n */\nconst getFileContent = (relativeFilePath: string): string => {\n const absolutePath = join(__dirname, relativeFilePath);\n const fileContent = readFileSync(absolutePath, 'utf-8');\n return fileContent;\n};\n\n// The prompt template to send to ChatGPT, requesting an audit of content declaration files.\nconst CHAT_GPT_PROMPT = getFileContent('./PROMPT.md');\n\n/**\n * Formats a locale with its full name and returns a string representation.\n *\n * @function\n * @param locale - A locale from the project's configuration (e.g., 'en-US', 'fr-FR').\n * @returns A formatted string combining the locale's name and code. Example: \"English (US): en-US\".\n */\nconst formatLocaleWithName = (locale: Locales): string => {\n // getLocaleName returns a human-readable name for the locale.\n const localeName = getLocaleName(locale);\n\n // Concatenate both the readable name and the locale code.\n return `${locale}: ${localeName}`;\n};\n\n/**\n * Formats an array of tags with their keys and instructions.\n *\n * @function\n * @param tags - An array of tags from the project's configuration.\n * @returns A string representation of the tags, with their keys and instructions.\n */\nconst formatTagInstructions = (tags: Tag[] = []) =>\n tags.map((tag) => `- ${tag.key}: ${tag.instructions}`).join('\\n\\n');\n\n/**\n * Audits a content declaration file by constructing a prompt for ChatGPT.\n * The prompt includes details about the project's locales, file paths of content declarations,\n * and requests for identifying issues or inconsistencies. It prints the prompt for each file,\n * and could be adapted to send requests to the ChatGPT model.\n */\nexport const auditDictionaryField = async ({\n fileContent,\n model,\n openAiApiKey,\n customPrompt,\n locales,\n keyPath,\n tags,\n}: AuditDictionaryFieldOptions): Promise<\n AuditDictionaryFieldResultData | undefined\n> => {\n try {\n // Optionally, you could initialize and configure the OpenAI client here, if you intend to make API calls.\n // Uncomment and configure the following lines if you have `openai` installed and want to call the API:\n\n const openai = new OpenAI({\n apiKey: openAiApiKey,\n });\n\n // Prepare the prompt for ChatGPT by replacing placeholders with actual values.\n const prompt =\n customPrompt ??\n CHAT_GPT_PROMPT.replace(\n '{{otherLocales}}',\n `{${locales.map(formatLocaleWithName).join(', ')}}`\n )\n .replace('{{keyPath}}', JSON.stringify(keyPath))\n .replace('{{fileContent}}', fileContent)\n .replace('{{tagsInstructions}}', formatTagInstructions(tags));\n\n // Example of how you might request a completion from ChatGPT:\n const chatCompletion = await openai.chat.completions.create({\n model: model ?? 'gpt-4o-mini',\n messages: [{ role: 'system', content: prompt }],\n });\n\n const newContent = chatCompletion.choices[0].message?.content;\n\n logger.info(\n `${chatCompletion.usage?.total_tokens} tokens used in the request`\n );\n\n return {\n fileContent: newContent ?? '',\n tokenUsed: chatCompletion.usage?.total_tokens ?? 0,\n };\n } catch (error) {\n console.error(error);\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAA6B;AAC7B,kBAA8B;AAC9B,iBAA8B;AAC9B,kBAA4C;AAC5C,oBAAuB;AAEvB,oBAAuB;AANvB;AASA,MAAM,gBAAY,yBAAQ,0BAAc,YAAY,GAAG,CAAC;AAuBxD,MAAM,iBAAiB,CAAC,qBAAqC;AAC3D,QAAM,mBAAe,kBAAK,WAAW,gBAAgB;AACrD,QAAM,kBAAc,wBAAa,cAAc,OAAO;AACtD,SAAO;AACT;AAGA,MAAM,kBAAkB,eAAe,aAAa;AASpD,MAAM,uBAAuB,CAAC,WAA4B;AAExD,QAAM,iBAAa,2BAAc,MAAM;AAGvC,SAAO,GAAG,MAAM,KAAK,UAAU;AACjC;AASA,MAAM,wBAAwB,CAAC,OAAc,CAAC,MAC5C,KAAK,IAAI,CAAC,QAAQ,KAAK,IAAI,GAAG,KAAK,IAAI,YAAY,EAAE,EAAE,KAAK,MAAM;AAQ7D,MAAM,uBAAuB,OAAO;AAAA,EACzC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAEK;AACH,MAAI;AAIF,UAAM,SAAS,IAAI,qBAAO;AAAA,MACxB,QAAQ;AAAA,IACV,CAAC;AAGD,UAAM,SACJ,gBACA,gBAAgB;AAAA,MACd;AAAA,MACA,IAAI,QAAQ,IAAI,oBAAoB,EAAE,KAAK,IAAI,CAAC;AAAA,IAClD,EACG,QAAQ,eAAe,KAAK,UAAU,OAAO,CAAC,EAC9C,QAAQ,mBAAmB,WAAW,EACtC,QAAQ,wBAAwB,sBAAsB,IAAI,CAAC;AAGhE,UAAM,iBAAiB,MAAM,OAAO,KAAK,YAAY,OAAO;AAAA,MAC1D,OAAO,SAAS;AAAA,MAChB,UAAU,CAAC,EAAE,MAAM,UAAU,SAAS,OAAO,CAAC;AAAA,IAChD,CAAC;AAED,UAAM,aAAa,eAAe,QAAQ,CAAC,EAAE,SAAS;AAEtD,yBAAO;AAAA,MACL,GAAG,eAAe,OAAO,YAAY;AAAA,IACvC;AAEA,WAAO;AAAA,MACL,aAAa,cAAc;AAAA,MAC3B,WAAW,eAAe,OAAO,gBAAgB;AAAA,IACnD;AAAA,EACF,SAAS,OAAO;AACd,YAAQ,MAAM,KAAK;AAAA,EACrB;AACF;","names":[]}
|
|
@@ -53,13 +53,11 @@ const auditDictionaryMetadata = async ({
|
|
|
53
53
|
2
|
|
54
54
|
)}`
|
|
55
55
|
).replace("{{contentDeclaration}}", fileContent);
|
|
56
|
-
console.log("prompt", prompt);
|
|
57
56
|
const chatCompletion = await openai.chat.completions.create({
|
|
58
57
|
model: model ?? "gpt-4o-mini",
|
|
59
58
|
messages: [{ role: "system", content: prompt }]
|
|
60
59
|
});
|
|
61
60
|
const newContent = chatCompletion.choices[0].message?.content;
|
|
62
|
-
console.log("newContent", newContent);
|
|
63
61
|
import_logger.logger.info(
|
|
64
62
|
`${chatCompletion.usage?.total_tokens} tokens used in the request`
|
|
65
63
|
);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../src/utils/auditDictionaryMetadata/index.ts"],"sourcesContent":["import { readFileSync } from 'fs';\nimport { dirname, join } from 'path';\nimport { fileURLToPath } from 'url';\nimport { logger } from '@logger';\nimport { OpenAI } from 'openai';\nimport { Tag } from '@/types/tag.types';\n\nconst __dirname = dirname(fileURLToPath(import.meta.url));\n\nexport type AuditOptions = {\n tags: Tag[];\n fileContent: string;\n model?: string;\n openAiApiKey: string;\n customPrompt?: string;\n};\nexport type AuditFileResultData = { fileContent: string; tokenUsed: number };\n\n/**\n * Reads the content of a file synchronously.\n *\n * @function\n * @param relativeFilePath - The relative or absolute path to the target file.\n * @returns The entire contents of the specified file as a UTF-8 encoded string.\n */\nconst getFileContent = (relativeFilePath: string): string => {\n const absolutePath = join(__dirname, relativeFilePath);\n const fileContent = readFileSync(absolutePath, 'utf-8');\n return fileContent;\n};\n\n// The prompt template to send to ChatGPT, requesting an audit of content declaration files.\nconst CHAT_GPT_PROMPT = getFileContent('./PROMPT.md');\n\n/**\n * Audits a content declaration file by constructing a prompt for ChatGPT.\n * The prompt includes details about the project's locales, file paths of content declarations,\n * and requests for identifying issues or inconsistencies. It prints the prompt for each file,\n * and could be adapted to send requests to the ChatGPT model.\n *\n */\nexport const auditDictionaryMetadata = async ({\n model,\n openAiApiKey,\n customPrompt,\n tags,\n fileContent,\n}: AuditOptions): Promise<AuditFileResultData | undefined> => {\n try {\n // Optionally, you could initialize and configure the OpenAI client here, if you intend to make API calls.\n // Uncomment and configure the following lines if you have `openai` installed and want to call the API:\n\n const openai = new OpenAI({\n apiKey: openAiApiKey,\n });\n\n // Prepare the prompt for ChatGPT by replacing placeholders with actual values.\n const prompt =\n customPrompt ??\n CHAT_GPT_PROMPT.replace(\n '{{tags}}',\n `${JSON.stringify(\n tags\n .map(({ key, description }) => `- ${key}: ${description}`)\n .join('\\n\\n'),\n null,\n 2\n )}`\n ).replace('{{contentDeclaration}}', fileContent);\n\n
|
|
1
|
+
{"version":3,"sources":["../../../../src/utils/auditDictionaryMetadata/index.ts"],"sourcesContent":["import { readFileSync } from 'fs';\nimport { dirname, join } from 'path';\nimport { fileURLToPath } from 'url';\nimport { logger } from '@logger';\nimport { OpenAI } from 'openai';\nimport { Tag } from '@/types/tag.types';\n\nconst __dirname = dirname(fileURLToPath(import.meta.url));\n\nexport type AuditOptions = {\n tags: Tag[];\n fileContent: string;\n model?: string;\n openAiApiKey: string;\n customPrompt?: string;\n};\nexport type AuditFileResultData = { fileContent: string; tokenUsed: number };\n\n/**\n * Reads the content of a file synchronously.\n *\n * @function\n * @param relativeFilePath - The relative or absolute path to the target file.\n * @returns The entire contents of the specified file as a UTF-8 encoded string.\n */\nconst getFileContent = (relativeFilePath: string): string => {\n const absolutePath = join(__dirname, relativeFilePath);\n const fileContent = readFileSync(absolutePath, 'utf-8');\n return fileContent;\n};\n\n// The prompt template to send to ChatGPT, requesting an audit of content declaration files.\nconst CHAT_GPT_PROMPT = getFileContent('./PROMPT.md');\n\n/**\n * Audits a content declaration file by constructing a prompt for ChatGPT.\n * The prompt includes details about the project's locales, file paths of content declarations,\n * and requests for identifying issues or inconsistencies. It prints the prompt for each file,\n * and could be adapted to send requests to the ChatGPT model.\n *\n */\nexport const auditDictionaryMetadata = async ({\n model,\n openAiApiKey,\n customPrompt,\n tags,\n fileContent,\n}: AuditOptions): Promise<AuditFileResultData | undefined> => {\n try {\n // Optionally, you could initialize and configure the OpenAI client here, if you intend to make API calls.\n // Uncomment and configure the following lines if you have `openai` installed and want to call the API:\n\n const openai = new OpenAI({\n apiKey: openAiApiKey,\n });\n\n // Prepare the prompt for ChatGPT by replacing placeholders with actual values.\n const prompt =\n customPrompt ??\n CHAT_GPT_PROMPT.replace(\n '{{tags}}',\n `${JSON.stringify(\n tags\n .map(({ key, description }) => `- ${key}: ${description}`)\n .join('\\n\\n'),\n null,\n 2\n )}`\n ).replace('{{contentDeclaration}}', fileContent);\n\n // Example of how you might request a completion from ChatGPT:\n const chatCompletion = await openai.chat.completions.create({\n model: model ?? 'gpt-4o-mini',\n messages: [{ role: 'system', content: prompt }],\n });\n\n const newContent = chatCompletion.choices[0].message?.content;\n\n logger.info(\n `${chatCompletion.usage?.total_tokens} tokens used in the request`\n );\n\n return {\n fileContent: newContent ?? '',\n tokenUsed: chatCompletion.usage?.total_tokens ?? 0,\n };\n } catch (error) {\n console.error(error);\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAA6B;AAC7B,kBAA8B;AAC9B,iBAA8B;AAC9B,oBAAuB;AACvB,oBAAuB;AAJvB;AAOA,MAAM,gBAAY,yBAAQ,0BAAc,YAAY,GAAG,CAAC;AAkBxD,MAAM,iBAAiB,CAAC,qBAAqC;AAC3D,QAAM,mBAAe,kBAAK,WAAW,gBAAgB;AACrD,QAAM,kBAAc,wBAAa,cAAc,OAAO;AACtD,SAAO;AACT;AAGA,MAAM,kBAAkB,eAAe,aAAa;AAS7C,MAAM,0BAA0B,OAAO;AAAA,EAC5C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAA8D;AAC5D,MAAI;AAIF,UAAM,SAAS,IAAI,qBAAO;AAAA,MACxB,QAAQ;AAAA,IACV,CAAC;AAGD,UAAM,SACJ,gBACA,gBAAgB;AAAA,MACd;AAAA,MACA,GAAG,KAAK;AAAA,QACN,KACG,IAAI,CAAC,EAAE,KAAK,YAAY,MAAM,KAAK,GAAG,KAAK,WAAW,EAAE,EACxD,KAAK,MAAM;AAAA,QACd;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,EAAE,QAAQ,0BAA0B,WAAW;AAGjD,UAAM,iBAAiB,MAAM,OAAO,KAAK,YAAY,OAAO;AAAA,MAC1D,OAAO,SAAS;AAAA,MAChB,UAAU,CAAC,EAAE,MAAM,UAAU,SAAS,OAAO,CAAC;AAAA,IAChD,CAAC;AAED,UAAM,aAAa,eAAe,QAAQ,CAAC,EAAE,SAAS;AAEtD,yBAAO;AAAA,MACL,GAAG,eAAe,OAAO,YAAY;AAAA,IACvC;AAEA,WAAO;AAAA,MACL,aAAa,cAAc;AAAA,MAC3B,WAAW,eAAe,OAAO,gBAAgB;AAAA,IACnD;AAAA,EACF,SAAS,OAAO;AACd,YAAQ,MAAM,KAAK;AAAA,EACrB;AACF;","names":[]}
|
|
@@ -49,13 +49,11 @@ const auditTag = async ({
|
|
|
49
49
|
"{{contentDeclarations}}",
|
|
50
50
|
dictionaries.map((dictionary) => `- ${JSON.stringify(dictionary)}`).join("\n\n")
|
|
51
51
|
);
|
|
52
|
-
console.log("prompt", prompt);
|
|
53
52
|
const chatCompletion = await openai.chat.completions.create({
|
|
54
53
|
model: model ?? "gpt-4o-mini",
|
|
55
54
|
messages: [{ role: "system", content: prompt }]
|
|
56
55
|
});
|
|
57
56
|
const newContent = chatCompletion.choices[0].message?.content;
|
|
58
|
-
console.log("newContent", newContent);
|
|
59
57
|
import_logger.logger.info(
|
|
60
58
|
`${chatCompletion.usage?.total_tokens} tokens used in the request`
|
|
61
59
|
);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../src/utils/auditTag/index.ts"],"sourcesContent":["import { readFileSync } from 'fs';\nimport { dirname, join } from 'path';\nimport { fileURLToPath } from 'url';\nimport { logger } from '@logger';\nimport { OpenAI } from 'openai';\nimport { Dictionary } from '@/types/dictionary.types';\nimport { Tag } from '@/types/tag.types';\n\nconst __dirname = dirname(fileURLToPath(import.meta.url));\n\nexport type AuditOptions = {\n tag: Tag;\n dictionaries: Dictionary[];\n model?: string;\n openAiApiKey: string;\n customPrompt?: string;\n};\nexport type AuditFileResultData = { fileContent: string; tokenUsed: number };\n\n/**\n * Reads the content of a file synchronously.\n *\n * @function\n * @param relativeFilePath - The relative or absolute path to the target file.\n * @returns The entire contents of the specified file as a UTF-8 encoded string.\n */\nconst getFileContent = (relativeFilePath: string): string => {\n const absolutePath = join(__dirname, relativeFilePath);\n const fileContent = readFileSync(absolutePath, 'utf-8');\n return fileContent;\n};\n\n// The prompt template to send to ChatGPT, requesting an audit of content declaration files.\nconst CHAT_GPT_PROMPT = getFileContent('./PROMPT.md');\n\n/**\n * Audits a content declaration file by constructing a prompt for ChatGPT.\n * The prompt includes details about the project's locales, file paths of content declarations,\n * and requests for identifying issues or inconsistencies. It prints the prompt for each file,\n * and could be adapted to send requests to the ChatGPT model.\n *\n * @async\n * @function\n * @param filePath - The relative or absolute path to the target file.\n * @param options - Optional configuration for the audit process.\n * @returns This function returns a Promise that resolves once the audit is complete.\n */\nexport const auditTag = async ({\n model,\n openAiApiKey,\n customPrompt,\n tag,\n dictionaries,\n}: AuditOptions): Promise<AuditFileResultData | undefined> => {\n try {\n // Optionally, you could initialize and configure the OpenAI client here, if you intend to make API calls.\n // Uncomment and configure the following lines if you have `openai` installed and want to call the API:\n\n const openai = new OpenAI({\n apiKey: openAiApiKey,\n });\n\n // Prepare the prompt for ChatGPT by replacing placeholders with actual values.\n const prompt =\n customPrompt ??\n CHAT_GPT_PROMPT.replace('{{tag}}', `${JSON.stringify(tag)}`).replace(\n '{{contentDeclarations}}',\n dictionaries\n .map((dictionary) => `- ${JSON.stringify(dictionary)}`)\n .join('\\n\\n')\n );\n\n
|
|
1
|
+
{"version":3,"sources":["../../../../src/utils/auditTag/index.ts"],"sourcesContent":["import { readFileSync } from 'fs';\nimport { dirname, join } from 'path';\nimport { fileURLToPath } from 'url';\nimport { logger } from '@logger';\nimport { OpenAI } from 'openai';\nimport { Dictionary } from '@/types/dictionary.types';\nimport { Tag } from '@/types/tag.types';\n\nconst __dirname = dirname(fileURLToPath(import.meta.url));\n\nexport type AuditOptions = {\n tag: Tag;\n dictionaries: Dictionary[];\n model?: string;\n openAiApiKey: string;\n customPrompt?: string;\n};\nexport type AuditFileResultData = { fileContent: string; tokenUsed: number };\n\n/**\n * Reads the content of a file synchronously.\n *\n * @function\n * @param relativeFilePath - The relative or absolute path to the target file.\n * @returns The entire contents of the specified file as a UTF-8 encoded string.\n */\nconst getFileContent = (relativeFilePath: string): string => {\n const absolutePath = join(__dirname, relativeFilePath);\n const fileContent = readFileSync(absolutePath, 'utf-8');\n return fileContent;\n};\n\n// The prompt template to send to ChatGPT, requesting an audit of content declaration files.\nconst CHAT_GPT_PROMPT = getFileContent('./PROMPT.md');\n\n/**\n * Audits a content declaration file by constructing a prompt for ChatGPT.\n * The prompt includes details about the project's locales, file paths of content declarations,\n * and requests for identifying issues or inconsistencies. It prints the prompt for each file,\n * and could be adapted to send requests to the ChatGPT model.\n *\n * @async\n * @function\n * @param filePath - The relative or absolute path to the target file.\n * @param options - Optional configuration for the audit process.\n * @returns This function returns a Promise that resolves once the audit is complete.\n */\nexport const auditTag = async ({\n model,\n openAiApiKey,\n customPrompt,\n tag,\n dictionaries,\n}: AuditOptions): Promise<AuditFileResultData | undefined> => {\n try {\n // Optionally, you could initialize and configure the OpenAI client here, if you intend to make API calls.\n // Uncomment and configure the following lines if you have `openai` installed and want to call the API:\n\n const openai = new OpenAI({\n apiKey: openAiApiKey,\n });\n\n // Prepare the prompt for ChatGPT by replacing placeholders with actual values.\n const prompt =\n customPrompt ??\n CHAT_GPT_PROMPT.replace('{{tag}}', `${JSON.stringify(tag)}`).replace(\n '{{contentDeclarations}}',\n dictionaries\n .map((dictionary) => `- ${JSON.stringify(dictionary)}`)\n .join('\\n\\n')\n );\n\n // Example of how you might request a completion from ChatGPT:\n const chatCompletion = await openai.chat.completions.create({\n model: model ?? 'gpt-4o-mini',\n messages: [{ role: 'system', content: prompt }],\n });\n\n const newContent = chatCompletion.choices[0].message?.content;\n\n logger.info(\n `${chatCompletion.usage?.total_tokens} tokens used in the request`\n );\n\n return {\n fileContent: newContent ?? '',\n tokenUsed: chatCompletion.usage?.total_tokens ?? 0,\n };\n } catch (error) {\n console.error(error);\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAA6B;AAC7B,kBAA8B;AAC9B,iBAA8B;AAC9B,oBAAuB;AACvB,oBAAuB;AAJvB;AAQA,MAAM,gBAAY,yBAAQ,0BAAc,YAAY,GAAG,CAAC;AAkBxD,MAAM,iBAAiB,CAAC,qBAAqC;AAC3D,QAAM,mBAAe,kBAAK,WAAW,gBAAgB;AACrD,QAAM,kBAAc,wBAAa,cAAc,OAAO;AACtD,SAAO;AACT;AAGA,MAAM,kBAAkB,eAAe,aAAa;AAc7C,MAAM,WAAW,OAAO;AAAA,EAC7B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAA8D;AAC5D,MAAI;AAIF,UAAM,SAAS,IAAI,qBAAO;AAAA,MACxB,QAAQ;AAAA,IACV,CAAC;AAGD,UAAM,SACJ,gBACA,gBAAgB,QAAQ,WAAW,GAAG,KAAK,UAAU,GAAG,CAAC,EAAE,EAAE;AAAA,MAC3D;AAAA,MACA,aACG,IAAI,CAAC,eAAe,KAAK,KAAK,UAAU,UAAU,CAAC,EAAE,EACrD,KAAK,MAAM;AAAA,IAChB;AAGF,UAAM,iBAAiB,MAAM,OAAO,KAAK,YAAY,OAAO;AAAA,MAC1D,OAAO,SAAS;AAAA,MAChB,UAAU,CAAC,EAAE,MAAM,UAAU,SAAS,OAAO,CAAC;AAAA,IAChD,CAAC;AAED,UAAM,aAAa,eAAe,QAAQ,CAAC,EAAE,SAAS;AAEtD,yBAAO;AAAA,MACL,GAAG,eAAe,OAAO,YAAY;AAAA,IACvC;AAEA,WAAO;AAAA,MACL,aAAa,cAAc;AAAA,MAC3B,WAAW,eAAe,OAAO,gBAAgB;AAAA,IACnD;AAAA,EACF,SAAS,OAAO;AACd,YAAQ,MAAM,KAAK;AAAA,EACrB;AACF;","names":[]}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { getDictionariesByTags } from './../services/dictionary.service.mjs';
|
|
2
2
|
import { getTagsByKeys } from './../services/tag.service.mjs';
|
|
3
3
|
import * as tagService from './../services/tag.service.mjs';
|
|
4
|
+
import * as askDocQuestionUtil from './../utils/AI/askDocQuestion.mjs';
|
|
4
5
|
import * as auditContentDeclarationUtil from './../utils/auditDictionary/index.mjs';
|
|
5
6
|
import * as auditContentDeclarationFieldUtil from './../utils/auditDictionaryField/index.mjs';
|
|
6
7
|
import * as auditContentDeclarationMetadataUtil from './../utils/auditDictionaryMetadata/index.mjs';
|
|
@@ -192,7 +193,21 @@ const auditTag = async (req, res, _next) => {
|
|
|
192
193
|
return;
|
|
193
194
|
}
|
|
194
195
|
};
|
|
196
|
+
const askDocQuestion = async (req, res) => {
|
|
197
|
+
const { messages } = req.body;
|
|
198
|
+
try {
|
|
199
|
+
const response = await askDocQuestionUtil.askDocQuestion(messages);
|
|
200
|
+
const responseData = formatResponse({
|
|
201
|
+
data: response
|
|
202
|
+
});
|
|
203
|
+
res.json(responseData);
|
|
204
|
+
} catch (error) {
|
|
205
|
+
ErrorHandler.handleAppErrorResponse(res, error);
|
|
206
|
+
return;
|
|
207
|
+
}
|
|
208
|
+
};
|
|
195
209
|
export {
|
|
210
|
+
askDocQuestion,
|
|
196
211
|
auditContentDeclaration,
|
|
197
212
|
auditContentDeclarationField,
|
|
198
213
|
auditContentDeclarationMetadata,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/controllers/ai.controller.ts"],"sourcesContent":["import { type KeyPath } from '@intlayer/core';\nimport type { ResponseWithInformation } from '@middlewares/sessionAuth.middleware';\nimport { getDictionariesByTags } from '@services/dictionary.service';\nimport { getTagsByKeys } from '@services/tag.service';\nimport * as tagService from '@services/tag.service';\nimport * as auditContentDeclarationUtil from '@utils/auditDictionary';\nimport * as auditContentDeclarationFieldUtil from '@utils/auditDictionaryField';\nimport * as auditContentDeclarationMetadataUtil from '@utils/auditDictionaryMetadata';\nimport * as auditTagUtil from '@utils/auditTag';\nimport { AppError, ErrorHandler } from '@utils/errors';\nimport { formatResponse, type ResponseData } from '@utils/responseData';\nimport type { NextFunction, Request } from 'express';\nimport { Locales } from 'intlayer';\nimport type { Dictionary } from '@/types/dictionary.types';\nimport type { Tag } from '@/types/tag.types';\n\nexport type AuditContentDeclarationBody = {\n openAiApiKey?: string;\n customPrompt?: string;\n locales: Locales[];\n defaultLocale: Locales;\n fileContent: string;\n filePath?: string;\n model?: string;\n tagsKeys?: string[];\n};\nexport type AuditContentDeclarationResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclaration = async (\n req: Request<AuditContentDeclarationBody>,\n res: ResponseWithInformation<AuditContentDeclarationResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, project } = res.locals;\n const {\n fileContent,\n filePath,\n openAiApiKey,\n customPrompt,\n model,\n locales,\n defaultLocale,\n tagsKeys,\n } = req.body;\n\n if (!openAiApiKey) {\n if (!user) {\n ErrorHandler.handleGenericErrorResponse(res, 'USER_NOT_DEFINED');\n return;\n }\n if (!project) {\n ErrorHandler.handleGenericErrorResponse(res, 'PROJECT_NOT_DEFINED');\n return;\n }\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse = await auditContentDeclarationUtil.auditDictionary({\n fileContent,\n filePath,\n model,\n openAiApiKey: openAiApiKey ?? process.env.OPENAI_API_KEY,\n customPrompt,\n locales,\n defaultLocale,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationFieldBody = {\n openAiApiKey?: string;\n customPrompt?: string;\n locales: Locales[];\n fileContent: string;\n filePath?: string;\n model?: string;\n tagsKeys?: string[];\n keyPath: KeyPath[];\n};\nexport type AuditContentDeclarationFieldResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclarationField = async (\n req: Request<AuditContentDeclarationFieldBody>,\n res: ResponseWithInformation<AuditContentDeclarationFieldResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, project } = res.locals;\n const {\n fileContent,\n openAiApiKey,\n customPrompt,\n model,\n locales,\n tagsKeys,\n keyPath,\n } = req.body;\n\n if (!openAiApiKey) {\n if (!user) {\n ErrorHandler.handleGenericErrorResponse(res, 'USER_NOT_DEFINED');\n return;\n }\n if (!project) {\n ErrorHandler.handleGenericErrorResponse(res, 'PROJECT_NOT_DEFINED');\n return;\n }\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse =\n await auditContentDeclarationFieldUtil.auditDictionaryField({\n fileContent,\n model,\n openAiApiKey: openAiApiKey ?? process.env.OPENAI_API_KEY,\n customPrompt,\n locales,\n tags,\n keyPath,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationMetadataBody = {\n openAiApiKey?: string;\n customPrompt?: string;\n fileContent: string;\n model?: string;\n};\nexport type AuditContentDeclarationMetadataResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclarationMetadata = async (\n req: Request<AuditContentDeclarationMetadataBody>,\n res: ResponseWithInformation<AuditContentDeclarationMetadataResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, organization, project } = res.locals;\n const { fileContent, openAiApiKey, customPrompt, model } = req.body;\n\n if (!openAiApiKey) {\n if (!user) {\n ErrorHandler.handleGenericErrorResponse(res, 'USER_NOT_DEFINED');\n return;\n }\n if (!organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'ORGANIZATION_NOT_DEFINED');\n return;\n }\n if (!project) {\n ErrorHandler.handleGenericErrorResponse(res, 'PROJECT_NOT_DEFINED');\n return;\n }\n }\n\n try {\n const tags: Tag[] = await tagService.findTags(\n {\n organizationId: organization?._id,\n },\n 0,\n 1000\n );\n\n const auditResponse =\n await auditContentDeclarationMetadataUtil.auditDictionaryMetadata({\n fileContent,\n model,\n openAiApiKey: openAiApiKey ?? process.env.OPENAI_API_KEY,\n customPrompt,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditTagBody = {\n openAiApiKey?: string;\n customPrompt?: string;\n filePath?: string;\n model?: string;\n tag: Tag;\n};\nexport type AuditTagResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditTag = async (\n req: Request<undefined, undefined, AuditTagBody>,\n res: ResponseWithInformation<AuditTagResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, project } = res.locals;\n const { openAiApiKey, customPrompt, model, tag } = req.body;\n\n if (!openAiApiKey) {\n if (!user) {\n ErrorHandler.handleGenericErrorResponse(res, 'USER_NOT_DEFINED');\n return;\n }\n if (!project) {\n ErrorHandler.handleGenericErrorResponse(res, 'PROJECT_NOT_DEFINED');\n return;\n }\n }\n\n try {\n let dictionaries: Dictionary[] = [];\n if (project?.organizationId) {\n dictionaries = await getDictionariesByTags([tag.key], project._id);\n }\n\n const auditResponse = await auditTagUtil.auditTag({\n model,\n openAiApiKey: openAiApiKey ?? process.env.OPENAI_API_KEY!,\n customPrompt,\n dictionaries,\n tag,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n"],"mappings":"AAEA,SAAS,6BAA6B;AACtC,SAAS,qBAAqB;AAC9B,YAAY,gBAAgB;AAC5B,YAAY,iCAAiC;AAC7C,YAAY,sCAAsC;AAClD,YAAY,yCAAyC;AACrD,YAAY,kBAAkB;AAC9B,SAAmB,oBAAoB;AACvC,SAAS,sBAAyC;AAsB3C,MAAM,0BAA0B,OACrC,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,QAAQ,IAAI,IAAI;AAC9B,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,IAAI;AAER,MAAI,CAAC,cAAc;AACjB,QAAI,CAAC,MAAM;AACT,mBAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AACA,QAAI,CAAC,SAAS;AACZ,mBAAa,2BAA2B,KAAK,qBAAqB;AAClE;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,QAAI,OAAc,CAAC;AAEnB,QAAI,SAAS,gBAAgB;AAC3B,aAAO,MAAM,cAAc,UAAU,QAAQ,cAAc;AAAA,IAC7D;AAEA,UAAM,gBAAgB,MAAM,4BAA4B,gBAAgB;AAAA,MACtE;AAAA,MACA;AAAA,MACA;AAAA,MACA,cAAc,gBAAgB,QAAQ,IAAI;AAAA,MAC1C;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,QAAI,CAAC,eAAe;AAClB,mBAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,eACJ,eAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,iBAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AAkBO,MAAM,+BAA+B,OAC1C,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,QAAQ,IAAI,IAAI;AAC9B,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,IAAI;AAER,MAAI,CAAC,cAAc;AACjB,QAAI,CAAC,MAAM;AACT,mBAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AACA,QAAI,CAAC,SAAS;AACZ,mBAAa,2BAA2B,KAAK,qBAAqB;AAClE;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,QAAI,OAAc,CAAC;AAEnB,QAAI,SAAS,gBAAgB;AAC3B,aAAO,MAAM,cAAc,UAAU,QAAQ,cAAc;AAAA,IAC7D;AAEA,UAAM,gBACJ,MAAM,iCAAiC,qBAAqB;AAAA,MAC1D;AAAA,MACA;AAAA,MACA,cAAc,gBAAgB,QAAQ,IAAI;AAAA,MAC1C;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAEH,QAAI,CAAC,eAAe;AAClB,mBAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,eACJ,eAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,iBAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AAcO,MAAM,kCAAkC,OAC7C,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,cAAc,QAAQ,IAAI,IAAI;AAC5C,QAAM,EAAE,aAAa,cAAc,cAAc,MAAM,IAAI,IAAI;AAE/D,MAAI,CAAC,cAAc;AACjB,QAAI,CAAC,MAAM;AACT,mBAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AACA,QAAI,CAAC,cAAc;AACjB,mBAAa,2BAA2B,KAAK,0BAA0B;AACvE;AAAA,IACF;AACA,QAAI,CAAC,SAAS;AACZ,mBAAa,2BAA2B,KAAK,qBAAqB;AAClE;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,UAAM,OAAc,MAAM,WAAW;AAAA,MACnC;AAAA,QACE,gBAAgB,cAAc;AAAA,MAChC;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,UAAM,gBACJ,MAAM,oCAAoC,wBAAwB;AAAA,MAChE;AAAA,MACA;AAAA,MACA,cAAc,gBAAgB,QAAQ,IAAI;AAAA,MAC1C;AAAA,MACA;AAAA,IACF,CAAC;AAEH,QAAI,CAAC,eAAe;AAClB,mBAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,eACJ,eAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,iBAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AAeO,MAAM,WAAW,OACtB,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,QAAQ,IAAI,IAAI;AAC9B,QAAM,EAAE,cAAc,cAAc,OAAO,IAAI,IAAI,IAAI;AAEvD,MAAI,CAAC,cAAc;AACjB,QAAI,CAAC,MAAM;AACT,mBAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AACA,QAAI,CAAC,SAAS;AACZ,mBAAa,2BAA2B,KAAK,qBAAqB;AAClE;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,QAAI,eAA6B,CAAC;AAClC,QAAI,SAAS,gBAAgB;AAC3B,qBAAe,MAAM,sBAAsB,CAAC,IAAI,GAAG,GAAG,QAAQ,GAAG;AAAA,IACnE;AAEA,UAAM,gBAAgB,MAAM,aAAa,SAAS;AAAA,MAChD;AAAA,MACA,cAAc,gBAAgB,QAAQ,IAAI;AAAA,MAC1C;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,QAAI,CAAC,eAAe;AAClB,mBAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,eACJ,eAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,iBAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../../../src/controllers/ai.controller.ts"],"sourcesContent":["import { type KeyPath } from '@intlayer/core';\nimport type { ResponseWithInformation } from '@middlewares/sessionAuth.middleware';\nimport { getDictionariesByTags } from '@services/dictionary.service';\nimport { getTagsByKeys } from '@services/tag.service';\nimport * as tagService from '@services/tag.service';\nimport * as askDocQuestionUtil from '@utils/AI/askDocQuestion';\nimport * as auditContentDeclarationUtil from '@utils/auditDictionary';\nimport * as auditContentDeclarationFieldUtil from '@utils/auditDictionaryField';\nimport * as auditContentDeclarationMetadataUtil from '@utils/auditDictionaryMetadata';\nimport * as auditTagUtil from '@utils/auditTag';\nimport { AppError, ErrorHandler } from '@utils/errors';\nimport { formatResponse, type ResponseData } from '@utils/responseData';\nimport type { NextFunction, Request } from 'express';\nimport { Locales } from 'intlayer';\nimport type { Dictionary } from '@/types/dictionary.types';\nimport type { Tag } from '@/types/tag.types';\n\nexport type AuditContentDeclarationBody = {\n openAiApiKey?: string;\n customPrompt?: string;\n locales: Locales[];\n defaultLocale: Locales;\n fileContent: string;\n filePath?: string;\n model?: string;\n tagsKeys?: string[];\n};\nexport type AuditContentDeclarationResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclaration = async (\n req: Request<AuditContentDeclarationBody>,\n res: ResponseWithInformation<AuditContentDeclarationResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, project } = res.locals;\n const {\n fileContent,\n filePath,\n openAiApiKey,\n customPrompt,\n model,\n locales,\n defaultLocale,\n tagsKeys,\n } = req.body;\n\n if (!openAiApiKey) {\n if (!user) {\n ErrorHandler.handleGenericErrorResponse(res, 'USER_NOT_DEFINED');\n return;\n }\n if (!project) {\n ErrorHandler.handleGenericErrorResponse(res, 'PROJECT_NOT_DEFINED');\n return;\n }\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse = await auditContentDeclarationUtil.auditDictionary({\n fileContent,\n filePath,\n model,\n openAiApiKey: openAiApiKey ?? process.env.OPENAI_API_KEY,\n customPrompt,\n locales,\n defaultLocale,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationFieldBody = {\n openAiApiKey?: string;\n customPrompt?: string;\n locales: Locales[];\n fileContent: string;\n filePath?: string;\n model?: string;\n tagsKeys?: string[];\n keyPath: KeyPath[];\n};\nexport type AuditContentDeclarationFieldResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclarationField = async (\n req: Request<AuditContentDeclarationFieldBody>,\n res: ResponseWithInformation<AuditContentDeclarationFieldResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, project } = res.locals;\n const {\n fileContent,\n openAiApiKey,\n customPrompt,\n model,\n locales,\n tagsKeys,\n keyPath,\n } = req.body;\n\n if (!openAiApiKey) {\n if (!user) {\n ErrorHandler.handleGenericErrorResponse(res, 'USER_NOT_DEFINED');\n return;\n }\n if (!project) {\n ErrorHandler.handleGenericErrorResponse(res, 'PROJECT_NOT_DEFINED');\n return;\n }\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse =\n await auditContentDeclarationFieldUtil.auditDictionaryField({\n fileContent,\n model,\n openAiApiKey: openAiApiKey ?? process.env.OPENAI_API_KEY,\n customPrompt,\n locales,\n tags,\n keyPath,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationMetadataBody = {\n openAiApiKey?: string;\n customPrompt?: string;\n fileContent: string;\n model?: string;\n};\nexport type AuditContentDeclarationMetadataResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclarationMetadata = async (\n req: Request<AuditContentDeclarationMetadataBody>,\n res: ResponseWithInformation<AuditContentDeclarationMetadataResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, organization, project } = res.locals;\n const { fileContent, openAiApiKey, customPrompt, model } = req.body;\n\n if (!openAiApiKey) {\n if (!user) {\n ErrorHandler.handleGenericErrorResponse(res, 'USER_NOT_DEFINED');\n return;\n }\n if (!organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'ORGANIZATION_NOT_DEFINED');\n return;\n }\n if (!project) {\n ErrorHandler.handleGenericErrorResponse(res, 'PROJECT_NOT_DEFINED');\n return;\n }\n }\n\n try {\n const tags: Tag[] = await tagService.findTags(\n {\n organizationId: organization?._id,\n },\n 0,\n 1000\n );\n\n const auditResponse =\n await auditContentDeclarationMetadataUtil.auditDictionaryMetadata({\n fileContent,\n model,\n openAiApiKey: openAiApiKey ?? process.env.OPENAI_API_KEY,\n customPrompt,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditTagBody = {\n openAiApiKey?: string;\n customPrompt?: string;\n filePath?: string;\n model?: string;\n tag: Tag;\n};\nexport type AuditTagResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditTag = async (\n req: Request<undefined, undefined, AuditTagBody>,\n res: ResponseWithInformation<AuditTagResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, project } = res.locals;\n const { openAiApiKey, customPrompt, model, tag } = req.body;\n\n if (!openAiApiKey) {\n if (!user) {\n ErrorHandler.handleGenericErrorResponse(res, 'USER_NOT_DEFINED');\n return;\n }\n if (!project) {\n ErrorHandler.handleGenericErrorResponse(res, 'PROJECT_NOT_DEFINED');\n return;\n }\n }\n\n try {\n let dictionaries: Dictionary[] = [];\n if (project?.organizationId) {\n dictionaries = await getDictionariesByTags([tag.key], project._id);\n }\n\n const auditResponse = await auditTagUtil.auditTag({\n model,\n openAiApiKey: openAiApiKey ?? process.env.OPENAI_API_KEY!,\n customPrompt,\n dictionaries,\n tag,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AskDocQuestionBody = {\n messages: askDocQuestionUtil.ChatCompletionRequestMessage[];\n};\nexport type AskDocQuestionResult =\n ResponseData<askDocQuestionUtil.AskDocQuestionResult>;\n\nexport const askDocQuestion = async (\n req: Request<undefined, undefined, AskDocQuestionBody>,\n res: ResponseWithInformation<AskDocQuestionResult>\n) => {\n const { messages } = req.body;\n try {\n const response = await askDocQuestionUtil.askDocQuestion(messages);\n\n const responseData =\n formatResponse<askDocQuestionUtil.AskDocQuestionResult>({\n data: response,\n });\n\n res.json(responseData);\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n"],"mappings":"AAEA,SAAS,6BAA6B;AACtC,SAAS,qBAAqB;AAC9B,YAAY,gBAAgB;AAC5B,YAAY,wBAAwB;AACpC,YAAY,iCAAiC;AAC7C,YAAY,sCAAsC;AAClD,YAAY,yCAAyC;AACrD,YAAY,kBAAkB;AAC9B,SAAmB,oBAAoB;AACvC,SAAS,sBAAyC;AAsB3C,MAAM,0BAA0B,OACrC,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,QAAQ,IAAI,IAAI;AAC9B,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,IAAI;AAER,MAAI,CAAC,cAAc;AACjB,QAAI,CAAC,MAAM;AACT,mBAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AACA,QAAI,CAAC,SAAS;AACZ,mBAAa,2BAA2B,KAAK,qBAAqB;AAClE;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,QAAI,OAAc,CAAC;AAEnB,QAAI,SAAS,gBAAgB;AAC3B,aAAO,MAAM,cAAc,UAAU,QAAQ,cAAc;AAAA,IAC7D;AAEA,UAAM,gBAAgB,MAAM,4BAA4B,gBAAgB;AAAA,MACtE;AAAA,MACA;AAAA,MACA;AAAA,MACA,cAAc,gBAAgB,QAAQ,IAAI;AAAA,MAC1C;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,QAAI,CAAC,eAAe;AAClB,mBAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,eACJ,eAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,iBAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AAkBO,MAAM,+BAA+B,OAC1C,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,QAAQ,IAAI,IAAI;AAC9B,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,IAAI;AAER,MAAI,CAAC,cAAc;AACjB,QAAI,CAAC,MAAM;AACT,mBAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AACA,QAAI,CAAC,SAAS;AACZ,mBAAa,2BAA2B,KAAK,qBAAqB;AAClE;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,QAAI,OAAc,CAAC;AAEnB,QAAI,SAAS,gBAAgB;AAC3B,aAAO,MAAM,cAAc,UAAU,QAAQ,cAAc;AAAA,IAC7D;AAEA,UAAM,gBACJ,MAAM,iCAAiC,qBAAqB;AAAA,MAC1D;AAAA,MACA;AAAA,MACA,cAAc,gBAAgB,QAAQ,IAAI;AAAA,MAC1C;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAEH,QAAI,CAAC,eAAe;AAClB,mBAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,eACJ,eAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,iBAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AAcO,MAAM,kCAAkC,OAC7C,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,cAAc,QAAQ,IAAI,IAAI;AAC5C,QAAM,EAAE,aAAa,cAAc,cAAc,MAAM,IAAI,IAAI;AAE/D,MAAI,CAAC,cAAc;AACjB,QAAI,CAAC,MAAM;AACT,mBAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AACA,QAAI,CAAC,cAAc;AACjB,mBAAa,2BAA2B,KAAK,0BAA0B;AACvE;AAAA,IACF;AACA,QAAI,CAAC,SAAS;AACZ,mBAAa,2BAA2B,KAAK,qBAAqB;AAClE;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,UAAM,OAAc,MAAM,WAAW;AAAA,MACnC;AAAA,QACE,gBAAgB,cAAc;AAAA,MAChC;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,UAAM,gBACJ,MAAM,oCAAoC,wBAAwB;AAAA,MAChE;AAAA,MACA;AAAA,MACA,cAAc,gBAAgB,QAAQ,IAAI;AAAA,MAC1C;AAAA,MACA;AAAA,IACF,CAAC;AAEH,QAAI,CAAC,eAAe;AAClB,mBAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,eACJ,eAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,iBAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AAeO,MAAM,WAAW,OACtB,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,QAAQ,IAAI,IAAI;AAC9B,QAAM,EAAE,cAAc,cAAc,OAAO,IAAI,IAAI,IAAI;AAEvD,MAAI,CAAC,cAAc;AACjB,QAAI,CAAC,MAAM;AACT,mBAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AACA,QAAI,CAAC,SAAS;AACZ,mBAAa,2BAA2B,KAAK,qBAAqB;AAClE;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,QAAI,eAA6B,CAAC;AAClC,QAAI,SAAS,gBAAgB;AAC3B,qBAAe,MAAM,sBAAsB,CAAC,IAAI,GAAG,GAAG,QAAQ,GAAG;AAAA,IACnE;AAEA,UAAM,gBAAgB,MAAM,aAAa,SAAS;AAAA,MAChD;AAAA,MACA,cAAc,gBAAgB,QAAQ,IAAI;AAAA,MAC1C;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,QAAI,CAAC,eAAe;AAClB,mBAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,eACJ,eAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,iBAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AAQO,MAAM,iBAAiB,OAC5B,KACA,QACG;AACH,QAAM,EAAE,SAAS,IAAI,IAAI;AACzB,MAAI;AACF,UAAM,WAAW,MAAM,mBAAmB,eAAe,QAAQ;AAEjE,UAAM,eACJ,eAAwD;AAAA,MACtD,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AAAA,EACvB,SAAS,OAAO;AACd,iBAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;","names":[]}
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import {
|
|
2
|
+
askDocQuestion,
|
|
2
3
|
auditContentDeclaration,
|
|
3
4
|
auditContentDeclarationField,
|
|
4
5
|
auditContentDeclarationMetadata,
|
|
@@ -27,6 +28,11 @@ const aiRoutes = {
|
|
|
27
28
|
urlModel: "/audit/tag",
|
|
28
29
|
url: `${baseURL}/audit/tag`,
|
|
29
30
|
method: "POST"
|
|
31
|
+
},
|
|
32
|
+
ask: {
|
|
33
|
+
urlModel: "/ask",
|
|
34
|
+
url: `${baseURL}/ask`,
|
|
35
|
+
method: "POST"
|
|
30
36
|
}
|
|
31
37
|
};
|
|
32
38
|
aiRouter.post(
|
|
@@ -41,6 +47,7 @@ aiRouter.post(
|
|
|
41
47
|
aiRoutes.auditContentDeclarationMetadata.urlModel,
|
|
42
48
|
auditContentDeclarationMetadata
|
|
43
49
|
);
|
|
50
|
+
aiRouter.post(aiRoutes.ask.urlModel, askDocQuestion);
|
|
44
51
|
aiRouter.post(aiRoutes.auditTag.urlModel, auditTag);
|
|
45
52
|
export {
|
|
46
53
|
aiRouter,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/routes/ai.routes.ts"],"sourcesContent":["import {\n auditContentDeclaration,\n auditContentDeclarationField,\n auditContentDeclarationMetadata,\n auditTag,\n} from '@controllers/ai.controller';\nimport { Router } from 'express';\nimport { Routes } from '@/types/Routes';\n\nexport const aiRouter: Router = Router();\n\nconst baseURL = `${process.env.BACKEND_URL}/api/ai`;\n\nexport const aiRoutes = {\n auditContentDeclaration: {\n urlModel: '/audit/content-declaration',\n url: `${baseURL}/audit/content-declaration`,\n method: 'POST',\n },\n auditContentDeclarationField: {\n urlModel: '/audit/content-declaration/field',\n url: `${baseURL}/audit/content-declaration/field`,\n method: 'POST',\n },\n auditContentDeclarationMetadata: {\n urlModel: '/audit/content-declaration/metadata',\n url: `${baseURL}/audit/content-declaration/metadata`,\n method: 'POST',\n },\n auditTag: {\n urlModel: '/audit/tag',\n url: `${baseURL}/audit/tag`,\n method: 'POST',\n },\n} satisfies Routes;\n\naiRouter.post(\n aiRoutes.auditContentDeclaration.urlModel,\n auditContentDeclaration\n);\naiRouter.post(\n aiRoutes.auditContentDeclarationField.urlModel,\n auditContentDeclarationField\n);\naiRouter.post(\n aiRoutes.auditContentDeclarationMetadata.urlModel,\n auditContentDeclarationMetadata\n);\n\naiRouter.post(aiRoutes.auditTag.urlModel, auditTag);\n"],"mappings":"AAAA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,cAAc;AAGhB,MAAM,WAAmB,OAAO;AAEvC,MAAM,UAAU,GAAG,QAAQ,IAAI,WAAW;AAEnC,MAAM,WAAW;AAAA,EACtB,yBAAyB;AAAA,IACvB,UAAU;AAAA,IACV,KAAK,GAAG,OAAO;AAAA,IACf,QAAQ;AAAA,EACV;AAAA,EACA,8BAA8B;AAAA,IAC5B,UAAU;AAAA,IACV,KAAK,GAAG,OAAO;AAAA,IACf,QAAQ;AAAA,EACV;AAAA,EACA,iCAAiC;AAAA,IAC/B,UAAU;AAAA,IACV,KAAK,GAAG,OAAO;AAAA,IACf,QAAQ;AAAA,EACV;AAAA,EACA,UAAU;AAAA,IACR,UAAU;AAAA,IACV,KAAK,GAAG,OAAO;AAAA,IACf,QAAQ;AAAA,EACV;AACF;AAEA,SAAS;AAAA,EACP,SAAS,wBAAwB;AAAA,EACjC;AACF;AACA,SAAS;AAAA,EACP,SAAS,6BAA6B;AAAA,EACtC;AACF;AACA,SAAS;AAAA,EACP,SAAS,gCAAgC;AAAA,EACzC;AACF;
|
|
1
|
+
{"version":3,"sources":["../../../src/routes/ai.routes.ts"],"sourcesContent":["import {\n askDocQuestion,\n auditContentDeclaration,\n auditContentDeclarationField,\n auditContentDeclarationMetadata,\n auditTag,\n} from '@controllers/ai.controller';\nimport { Router } from 'express';\nimport { Routes } from '@/types/Routes';\n\nexport const aiRouter: Router = Router();\n\nconst baseURL = `${process.env.BACKEND_URL}/api/ai`;\n\nexport const aiRoutes = {\n auditContentDeclaration: {\n urlModel: '/audit/content-declaration',\n url: `${baseURL}/audit/content-declaration`,\n method: 'POST',\n },\n auditContentDeclarationField: {\n urlModel: '/audit/content-declaration/field',\n url: `${baseURL}/audit/content-declaration/field`,\n method: 'POST',\n },\n auditContentDeclarationMetadata: {\n urlModel: '/audit/content-declaration/metadata',\n url: `${baseURL}/audit/content-declaration/metadata`,\n method: 'POST',\n },\n auditTag: {\n urlModel: '/audit/tag',\n url: `${baseURL}/audit/tag`,\n method: 'POST',\n },\n ask: {\n urlModel: '/ask',\n url: `${baseURL}/ask`,\n method: 'POST',\n },\n} satisfies Routes;\n\naiRouter.post(\n aiRoutes.auditContentDeclaration.urlModel,\n auditContentDeclaration\n);\naiRouter.post(\n aiRoutes.auditContentDeclarationField.urlModel,\n auditContentDeclarationField\n);\naiRouter.post(\n aiRoutes.auditContentDeclarationMetadata.urlModel,\n auditContentDeclarationMetadata\n);\naiRouter.post(aiRoutes.ask.urlModel, askDocQuestion);\n\naiRouter.post(aiRoutes.auditTag.urlModel, auditTag);\n"],"mappings":"AAAA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,cAAc;AAGhB,MAAM,WAAmB,OAAO;AAEvC,MAAM,UAAU,GAAG,QAAQ,IAAI,WAAW;AAEnC,MAAM,WAAW;AAAA,EACtB,yBAAyB;AAAA,IACvB,UAAU;AAAA,IACV,KAAK,GAAG,OAAO;AAAA,IACf,QAAQ;AAAA,EACV;AAAA,EACA,8BAA8B;AAAA,IAC5B,UAAU;AAAA,IACV,KAAK,GAAG,OAAO;AAAA,IACf,QAAQ;AAAA,EACV;AAAA,EACA,iCAAiC;AAAA,IAC/B,UAAU;AAAA,IACV,KAAK,GAAG,OAAO;AAAA,IACf,QAAQ;AAAA,EACV;AAAA,EACA,UAAU;AAAA,IACR,UAAU;AAAA,IACV,KAAK,GAAG,OAAO;AAAA,IACf,QAAQ;AAAA,EACV;AAAA,EACA,KAAK;AAAA,IACH,UAAU;AAAA,IACV,KAAK,GAAG,OAAO;AAAA,IACf,QAAQ;AAAA,EACV;AACF;AAEA,SAAS;AAAA,EACP,SAAS,wBAAwB;AAAA,EACjC;AACF;AACA,SAAS;AAAA,EACP,SAAS,6BAA6B;AAAA,EACtC;AACF;AACA,SAAS;AAAA,EACP,SAAS,gCAAgC;AAAA,EACzC;AACF;AACA,SAAS,KAAK,SAAS,IAAI,UAAU,cAAc;AAEnD,SAAS,KAAK,SAAS,SAAS,UAAU,QAAQ;","names":[]}
|
|
@@ -64,7 +64,6 @@ const getDictionaryById = async (dictionaryId) => {
|
|
|
64
64
|
};
|
|
65
65
|
const getDictionaryByKey = async (dictionaryKey, projectId) => {
|
|
66
66
|
const dictionaries = await getDictionariesByKeys([dictionaryKey], projectId);
|
|
67
|
-
console.log("dictionaries", dictionaries);
|
|
68
67
|
return dictionaries[0];
|
|
69
68
|
};
|
|
70
69
|
const getDictionariesByKeys = async (dictionaryKeys, projectId) => {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/services/dictionary.service.ts"],"sourcesContent":["import { DictionaryModel } from '@models/dictionary.model';\nimport { ensureMongoDocumentToObject } from '@utils/ensureMongoDocumentToObject';\nimport { GenericError } from '@utils/errors';\nimport type { DictionaryFilters } from '@utils/filtersAndPagination/getDictionaryFiltersAndPagination';\nimport { removeObjectKeys } from '@utils/removeObjectKeys';\nimport {\n type DictionaryFields,\n validateDictionary,\n} from '@utils/validation/validateDictionary';\nimport { ObjectId } from 'mongoose';\nimport type {\n Dictionary,\n DictionaryData,\n DictionaryDocument,\n} from '@/types/dictionary.types';\nimport type { Project } from '@/types/project.types';\n\n/**\n * Finds dictionaries based on filters and pagination options.\n * @param filters - MongoDB filter query.\n * @param skip - Number of documents to skip.\n * @param limit - Number of documents to limit.\n * @returns List of dictionaries matching the filters.\n */\nexport const findDictionaries = async (\n filters: DictionaryFilters,\n skip = 0,\n limit = 100\n): Promise<DictionaryDocument[]> => {\n try {\n const dictionaries = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the filters\n { $match: filters },\n\n // Stage 2: Skip for pagination\n { $skip: skip },\n\n // Stage 3: Limit the number of documents\n { $limit: limit },\n\n // Stage 4: Add the 'availableVersions' field\n {\n $addFields: {\n availableVersions: {\n $map: {\n input: { $objectToArray: '$content' },\n as: 'version',\n in: '$$version.k',\n },\n },\n },\n },\n\n // (Optional) Stage 5: Project the fields you want to include/exclude\n // For example, to exclude the entire 'content' field and keep only 'availableVersions'\n // {\n // $project: {\n // content: 0 // Exclude the 'content' field\n // }\n // }\n ]);\n\n return dictionaries;\n } catch (error) {\n console.error('Error fetching dictionaries:', error);\n throw error;\n }\n};\n\n/**\n * Finds a dictionary by its ID.\n * @param dictionaryId - The ID of the dictionary to find.\n * @returns The dictionary matching the ID.\n */\n/**\n * Finds a dictionary by its ID and includes the 'availableVersions' field.\n * @param dictionaryId - The ID of the dictionary to find.\n * @returns The dictionary matching the ID with available versions.\n */\nexport const getDictionaryById = async (\n dictionaryId: string | ObjectId\n): Promise<DictionaryDocument> => {\n const dictionary = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the document by ID\n { $match: { _id: dictionaryId } },\n\n // Stage 2: Add the 'availableVersions' field\n {\n $addFields: {\n availableVersions: {\n $map: {\n input: { $objectToArray: '$content' },\n as: 'version',\n in: '$$version.k',\n },\n },\n },\n },\n ]);\n\n if (!dictionary.length) {\n throw new GenericError('DICTIONARY_NOT_FOUND', { dictionaryId });\n }\n\n return dictionary[0];\n};\n\n/**\n * Finds a dictionary by its ID.\n * @param dictionaryKey - The ID of the dictionary to find.\n * @returns The dictionary matching the ID.\n */\nexport const getDictionaryByKey = async (\n dictionaryKey: string,\n projectId: string | ObjectId\n): Promise<DictionaryDocument> => {\n const dictionaries = await getDictionariesByKeys([dictionaryKey], projectId);\n\n console.log('dictionaries', dictionaries);\n\n return dictionaries[0];\n};\n\nexport const getDictionariesByKeys = async (\n dictionaryKeys: string[],\n projectId: string | ObjectId\n): Promise<DictionaryDocument[]> => {\n const dictionaries = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the document by key\n { $match: { key: { $in: dictionaryKeys }, projectIds: projectId } },\n\n // Stage 2: Add the 'availableVersions' field\n {\n $addFields: {\n availableVersions: {\n $map: {\n input: { $objectToArray: '$content' },\n as: 'version',\n in: '$$version.k',\n },\n },\n },\n },\n ]);\n\n if (!dictionaries) {\n throw new GenericError('DICTIONARY_NOT_FOUND', {\n dictionaryKeys,\n projectId,\n });\n }\n\n return dictionaries;\n};\n\nexport const getDictionariesKeys = async (\n projectId: string | ObjectId\n): Promise<string[]> => {\n const dictionaries = await DictionaryModel.find({\n projectIds: projectId,\n }).select('key');\n\n return dictionaries.map((dictionary) => dictionary.key);\n};\n\nexport const getDictionariesByTags = async (\n tags: string[],\n projectId: string | Project['_id']\n): Promise<DictionaryDocument[]> => {\n const dictionaries = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the document by tags\n {\n $match: {\n tags: { $in: tags },\n projectIds: projectId,\n },\n },\n\n // Stage 2: Add the 'availableVersions' field\n {\n $addFields: {\n availableVersions: {\n $map: {\n input: { $objectToArray: '$content' },\n as: 'version',\n in: '$$version.k',\n },\n },\n },\n },\n ]);\n\n return dictionaries;\n};\n\n/**\n * Counts the total number of dictionaries that match the filters.\n * @param filters - MongoDB filter query.\n * @returns Total number of dictionaries.\n */\nexport const countDictionaries = async (\n filters: DictionaryFilters\n): Promise<number> => {\n const result = await DictionaryModel.countDocuments(filters);\n\n if (typeof result === 'undefined') {\n throw new GenericError('DICTIONARY_COUNT_FAILED', { filters });\n }\n\n return result;\n};\n\n/**\n * Creates a new dictionary in the database.\n * @param dictionary - The dictionary data to create.\n * @returns The created dictionary.\n */\nexport const createDictionary = async (\n dictionary: DictionaryData\n): Promise<DictionaryDocument> => {\n const errors = await validateDictionary(dictionary);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('DICTIONARY_INVALID_FIELDS', {\n errors,\n });\n }\n\n return await DictionaryModel.create(dictionary);\n};\n\ntype GetExistingDictionaryResult = {\n existingDictionariesKey: string[];\n newDictionariesKey: string[];\n};\n\n/**\n * Gets the existing dictionaries from the provided list of keys.\n * @param dictionariesKeys - List of dictionary keys to check.\n * @param projectId - The ID of the project to check the dictionaries against.\n * @returns The existing dictionaries and the new dictionaries.\n */\nexport const getExistingDictionaryKey = async (\n dictionariesKeys: string[],\n projectId: string | ObjectId\n): Promise<GetExistingDictionaryResult> => {\n // Fetch dictionaries from the database where the key is in the provided list\n const existingDictionaries = await DictionaryModel.find({\n key: { $in: dictionariesKeys },\n projectIds: projectId,\n });\n\n // Map existing dictionaries to a LocalDictionary object\n const existingDictionariesKey: string[] = [];\n const newDictionariesKey: string[] = [];\n\n for (const key of dictionariesKeys) {\n const isDictionaryExist = existingDictionaries.some(\n (dictionary) => dictionary.key === key\n );\n\n if (isDictionaryExist) {\n existingDictionariesKey.push(key);\n } else {\n newDictionariesKey.push(key);\n }\n }\n\n return { existingDictionariesKey, newDictionariesKey };\n};\n\n/**\n * Updates an existing dictionary in the database by its ID.\n * @param dictionaryId - The ID of the dictionary to update.\n * @param dictionary - The updated dictionary data.\n * @returns The updated dictionary.\n */\nexport const updateDictionaryById = async (\n dictionaryId: string | ObjectId,\n dictionary: Partial<Dictionary>\n): Promise<DictionaryDocument> => {\n const dictionaryObject = ensureMongoDocumentToObject(dictionary);\n const dictionaryToUpdate = removeObjectKeys(dictionaryObject, ['_id']);\n\n const updatedKeys = Object.keys(dictionaryToUpdate) as DictionaryFields;\n const errors = await validateDictionary(dictionaryToUpdate, updatedKeys);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('DICTIONARY_INVALID_FIELDS', {\n dictionaryId,\n errors,\n });\n }\n\n const result = await DictionaryModel.updateOne(\n { _id: dictionaryId },\n dictionaryToUpdate\n );\n\n if (result.matchedCount === 0) {\n throw new GenericError('DICTIONARY_UPDATE_FAILED', { dictionaryId });\n }\n\n const updatedDictionary = await getDictionaryById(dictionaryId);\n\n return updatedDictionary;\n};\n\n/**\n * Updates an existing dictionary in the database by its key.\n * @param dictionaryKey - The ID of the dictionary to update.\n * @param dictionary - The updated dictionary data.\n * @returns The updated dictionary.\n */\nexport const updateDictionaryByKey = async (\n dictionaryKey: string,\n dictionary: Partial<Dictionary>,\n projectId: string | ObjectId\n): Promise<DictionaryDocument> => {\n const dictionaryObject = ensureMongoDocumentToObject(dictionary);\n\n const dictionaryToUpdate = removeObjectKeys(dictionaryObject, ['_id']);\n\n const updatedKeys = Object.keys(dictionaryToUpdate) as DictionaryFields;\n const errors = await validateDictionary(dictionaryToUpdate, updatedKeys);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('DICTIONARY_INVALID_FIELDS', {\n dictionaryKey,\n projectId,\n errors,\n });\n }\n\n const result = await DictionaryModel.updateOne(\n { key: dictionaryKey, projectIds: projectId },\n dictionaryToUpdate\n );\n\n if (result.matchedCount === 0) {\n throw new GenericError('DICTIONARY_UPDATE_FAILED', { dictionaryKey });\n }\n\n const updatedDictionary = await getDictionaryByKey(dictionaryKey, projectId);\n\n return updatedDictionary;\n};\n\n/**\n * Deletes a dictionary from the database by its ID.\n * @param dictionaryId - The ID of the dictionary to delete.\n * @returns The result of the deletion operation.\n */\nexport const deleteDictionaryById = async (\n dictionaryId: string\n): Promise<DictionaryDocument> => {\n const dictionary = await DictionaryModel.findByIdAndDelete(dictionaryId);\n\n if (!dictionary) {\n throw new GenericError('DICTIONARY_NOT_FOUND', { dictionaryId });\n }\n\n return dictionary;\n};\n\nexport const incrementVersion = (dictionary: Dictionary): string => {\n const VERSION_PREFIX = 'v';\n const availableVersions = dictionary.availableVersions ?? [];\n\n // Get the current version from the version list, default to 'v1' if not present\n const currentVersion =\n availableVersions.length > 0\n ? availableVersions[availableVersions.length - 1]\n : 'v1';\n\n // Function to extract the numeric part of the version\n const getVersionNumber = (version: string): number => {\n const match = version.match(/^v(\\d+)$/);\n if (!match) {\n throw new Error(`Invalid version format: ${version}`);\n }\n return parseInt(match[1], 10);\n };\n\n // Start with the next version number\n let newNumber = getVersionNumber(currentVersion) + 1;\n let newVersion = `${VERSION_PREFIX}${newNumber}`;\n\n // Loop until a unique version is found\n while (availableVersions.includes(newVersion)) {\n newNumber += 1;\n newVersion = `${VERSION_PREFIX}${newNumber}`;\n }\n\n return newVersion;\n};\n"],"mappings":"AAAA,SAAS,uBAAuB;AAChC,SAAS,mCAAmC;AAC5C,SAAS,oBAAoB;AAE7B,SAAS,wBAAwB;AACjC;AAAA,EAEE;AAAA,OACK;AAgBA,MAAM,mBAAmB,OAC9B,SACA,OAAO,GACP,QAAQ,QAC0B;AAClC,MAAI;AACF,UAAM,eAAe,MAAM,gBAAgB,UAA8B;AAAA;AAAA,MAEvE,EAAE,QAAQ,QAAQ;AAAA;AAAA,MAGlB,EAAE,OAAO,KAAK;AAAA;AAAA,MAGd,EAAE,QAAQ,MAAM;AAAA;AAAA,MAGhB;AAAA,QACE,YAAY;AAAA,UACV,mBAAmB;AAAA,YACjB,MAAM;AAAA,cACJ,OAAO,EAAE,gBAAgB,WAAW;AAAA,cACpC,IAAI;AAAA,cACJ,IAAI;AAAA,YACN;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASF,CAAC;AAED,WAAO;AAAA,EACT,SAAS,OAAO;AACd,YAAQ,MAAM,gCAAgC,KAAK;AACnD,UAAM;AAAA,EACR;AACF;AAYO,MAAM,oBAAoB,OAC/B,iBACgC;AAChC,QAAM,aAAa,MAAM,gBAAgB,UAA8B;AAAA;AAAA,IAErE,EAAE,QAAQ,EAAE,KAAK,aAAa,EAAE;AAAA;AAAA,IAGhC;AAAA,MACE,YAAY;AAAA,QACV,mBAAmB;AAAA,UACjB,MAAM;AAAA,YACJ,OAAO,EAAE,gBAAgB,WAAW;AAAA,YACpC,IAAI;AAAA,YACJ,IAAI;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AAED,MAAI,CAAC,WAAW,QAAQ;AACtB,UAAM,IAAI,aAAa,wBAAwB,EAAE,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO,WAAW,CAAC;AACrB;AAOO,MAAM,qBAAqB,OAChC,eACA,cACgC;AAChC,QAAM,eAAe,MAAM,sBAAsB,CAAC,aAAa,GAAG,SAAS;AAE3E,UAAQ,IAAI,gBAAgB,YAAY;AAExC,SAAO,aAAa,CAAC;AACvB;AAEO,MAAM,wBAAwB,OACnC,gBACA,cACkC;AAClC,QAAM,eAAe,MAAM,gBAAgB,UAA8B;AAAA;AAAA,IAEvE,EAAE,QAAQ,EAAE,KAAK,EAAE,KAAK,eAAe,GAAG,YAAY,UAAU,EAAE;AAAA;AAAA,IAGlE;AAAA,MACE,YAAY;AAAA,QACV,mBAAmB;AAAA,UACjB,MAAM;AAAA,YACJ,OAAO,EAAE,gBAAgB,WAAW;AAAA,YACpC,IAAI;AAAA,YACJ,IAAI;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AAED,MAAI,CAAC,cAAc;AACjB,UAAM,IAAI,aAAa,wBAAwB;AAAA,MAC7C;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAEO,MAAM,sBAAsB,OACjC,cACsB;AACtB,QAAM,eAAe,MAAM,gBAAgB,KAAK;AAAA,IAC9C,YAAY;AAAA,EACd,CAAC,EAAE,OAAO,KAAK;AAEf,SAAO,aAAa,IAAI,CAAC,eAAe,WAAW,GAAG;AACxD;AAEO,MAAM,wBAAwB,OACnC,MACA,cACkC;AAClC,QAAM,eAAe,MAAM,gBAAgB,UAA8B;AAAA;AAAA,IAEvE;AAAA,MACE,QAAQ;AAAA,QACN,MAAM,EAAE,KAAK,KAAK;AAAA,QAClB,YAAY;AAAA,MACd;AAAA,IACF;AAAA;AAAA,IAGA;AAAA,MACE,YAAY;AAAA,QACV,mBAAmB;AAAA,UACjB,MAAM;AAAA,YACJ,OAAO,EAAE,gBAAgB,WAAW;AAAA,YACpC,IAAI;AAAA,YACJ,IAAI;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AAED,SAAO;AACT;AAOO,MAAM,oBAAoB,OAC/B,YACoB;AACpB,QAAM,SAAS,MAAM,gBAAgB,eAAe,OAAO;AAE3D,MAAI,OAAO,WAAW,aAAa;AACjC,UAAM,IAAI,aAAa,2BAA2B,EAAE,QAAQ,CAAC;AAAA,EAC/D;AAEA,SAAO;AACT;AAOO,MAAM,mBAAmB,OAC9B,eACgC;AAChC,QAAM,SAAS,MAAM,mBAAmB,UAAU;AAElD,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,6BAA6B;AAAA,MAClD;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO,MAAM,gBAAgB,OAAO,UAAU;AAChD;AAaO,MAAM,2BAA2B,OACtC,kBACA,cACyC;AAEzC,QAAM,uBAAuB,MAAM,gBAAgB,KAAK;AAAA,IACtD,KAAK,EAAE,KAAK,iBAAiB;AAAA,IAC7B,YAAY;AAAA,EACd,CAAC;AAGD,QAAM,0BAAoC,CAAC;AAC3C,QAAM,qBAA+B,CAAC;AAEtC,aAAW,OAAO,kBAAkB;AAClC,UAAM,oBAAoB,qBAAqB;AAAA,MAC7C,CAAC,eAAe,WAAW,QAAQ;AAAA,IACrC;AAEA,QAAI,mBAAmB;AACrB,8BAAwB,KAAK,GAAG;AAAA,IAClC,OAAO;AACL,yBAAmB,KAAK,GAAG;AAAA,IAC7B;AAAA,EACF;AAEA,SAAO,EAAE,yBAAyB,mBAAmB;AACvD;AAQO,MAAM,uBAAuB,OAClC,cACA,eACgC;AAChC,QAAM,mBAAmB,4BAA4B,UAAU;AAC/D,QAAM,qBAAqB,iBAAiB,kBAAkB,CAAC,KAAK,CAAC;AAErE,QAAM,cAAc,OAAO,KAAK,kBAAkB;AAClD,QAAM,SAAS,MAAM,mBAAmB,oBAAoB,WAAW;AAEvE,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,6BAA6B;AAAA,MAClD;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAS,MAAM,gBAAgB;AAAA,IACnC,EAAE,KAAK,aAAa;AAAA,IACpB;AAAA,EACF;AAEA,MAAI,OAAO,iBAAiB,GAAG;AAC7B,UAAM,IAAI,aAAa,4BAA4B,EAAE,aAAa,CAAC;AAAA,EACrE;AAEA,QAAM,oBAAoB,MAAM,kBAAkB,YAAY;AAE9D,SAAO;AACT;AAQO,MAAM,wBAAwB,OACnC,eACA,YACA,cACgC;AAChC,QAAM,mBAAmB,4BAA4B,UAAU;AAE/D,QAAM,qBAAqB,iBAAiB,kBAAkB,CAAC,KAAK,CAAC;AAErE,QAAM,cAAc,OAAO,KAAK,kBAAkB;AAClD,QAAM,SAAS,MAAM,mBAAmB,oBAAoB,WAAW;AAEvE,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,6BAA6B;AAAA,MAClD;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAS,MAAM,gBAAgB;AAAA,IACnC,EAAE,KAAK,eAAe,YAAY,UAAU;AAAA,IAC5C;AAAA,EACF;AAEA,MAAI,OAAO,iBAAiB,GAAG;AAC7B,UAAM,IAAI,aAAa,4BAA4B,EAAE,cAAc,CAAC;AAAA,EACtE;AAEA,QAAM,oBAAoB,MAAM,mBAAmB,eAAe,SAAS;AAE3E,SAAO;AACT;AAOO,MAAM,uBAAuB,OAClC,iBACgC;AAChC,QAAM,aAAa,MAAM,gBAAgB,kBAAkB,YAAY;AAEvE,MAAI,CAAC,YAAY;AACf,UAAM,IAAI,aAAa,wBAAwB,EAAE,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAEO,MAAM,mBAAmB,CAAC,eAAmC;AAClE,QAAM,iBAAiB;AACvB,QAAM,oBAAoB,WAAW,qBAAqB,CAAC;AAG3D,QAAM,iBACJ,kBAAkB,SAAS,IACvB,kBAAkB,kBAAkB,SAAS,CAAC,IAC9C;AAGN,QAAM,mBAAmB,CAAC,YAA4B;AACpD,UAAM,QAAQ,QAAQ,MAAM,UAAU;AACtC,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,MAAM,2BAA2B,OAAO,EAAE;AAAA,IACtD;AACA,WAAO,SAAS,MAAM,CAAC,GAAG,EAAE;AAAA,EAC9B;AAGA,MAAI,YAAY,iBAAiB,cAAc,IAAI;AACnD,MAAI,aAAa,GAAG,cAAc,GAAG,SAAS;AAG9C,SAAO,kBAAkB,SAAS,UAAU,GAAG;AAC7C,iBAAa;AACb,iBAAa,GAAG,cAAc,GAAG,SAAS;AAAA,EAC5C;AAEA,SAAO;AACT;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../../../src/services/dictionary.service.ts"],"sourcesContent":["import { DictionaryModel } from '@models/dictionary.model';\nimport { ensureMongoDocumentToObject } from '@utils/ensureMongoDocumentToObject';\nimport { GenericError } from '@utils/errors';\nimport type { DictionaryFilters } from '@utils/filtersAndPagination/getDictionaryFiltersAndPagination';\nimport { removeObjectKeys } from '@utils/removeObjectKeys';\nimport {\n type DictionaryFields,\n validateDictionary,\n} from '@utils/validation/validateDictionary';\nimport { ObjectId } from 'mongoose';\nimport type {\n Dictionary,\n DictionaryData,\n DictionaryDocument,\n} from '@/types/dictionary.types';\nimport type { Project } from '@/types/project.types';\n\n/**\n * Finds dictionaries based on filters and pagination options.\n * @param filters - MongoDB filter query.\n * @param skip - Number of documents to skip.\n * @param limit - Number of documents to limit.\n * @returns List of dictionaries matching the filters.\n */\nexport const findDictionaries = async (\n filters: DictionaryFilters,\n skip = 0,\n limit = 100\n): Promise<DictionaryDocument[]> => {\n try {\n const dictionaries = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the filters\n { $match: filters },\n\n // Stage 2: Skip for pagination\n { $skip: skip },\n\n // Stage 3: Limit the number of documents\n { $limit: limit },\n\n // Stage 4: Add the 'availableVersions' field\n {\n $addFields: {\n availableVersions: {\n $map: {\n input: { $objectToArray: '$content' },\n as: 'version',\n in: '$$version.k',\n },\n },\n },\n },\n\n // (Optional) Stage 5: Project the fields you want to include/exclude\n // For example, to exclude the entire 'content' field and keep only 'availableVersions'\n // {\n // $project: {\n // content: 0 // Exclude the 'content' field\n // }\n // }\n ]);\n\n return dictionaries;\n } catch (error) {\n console.error('Error fetching dictionaries:', error);\n throw error;\n }\n};\n\n/**\n * Finds a dictionary by its ID.\n * @param dictionaryId - The ID of the dictionary to find.\n * @returns The dictionary matching the ID.\n */\n/**\n * Finds a dictionary by its ID and includes the 'availableVersions' field.\n * @param dictionaryId - The ID of the dictionary to find.\n * @returns The dictionary matching the ID with available versions.\n */\nexport const getDictionaryById = async (\n dictionaryId: string | ObjectId\n): Promise<DictionaryDocument> => {\n const dictionary = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the document by ID\n { $match: { _id: dictionaryId } },\n\n // Stage 2: Add the 'availableVersions' field\n {\n $addFields: {\n availableVersions: {\n $map: {\n input: { $objectToArray: '$content' },\n as: 'version',\n in: '$$version.k',\n },\n },\n },\n },\n ]);\n\n if (!dictionary.length) {\n throw new GenericError('DICTIONARY_NOT_FOUND', { dictionaryId });\n }\n\n return dictionary[0];\n};\n\n/**\n * Finds a dictionary by its ID.\n * @param dictionaryKey - The ID of the dictionary to find.\n * @returns The dictionary matching the ID.\n */\nexport const getDictionaryByKey = async (\n dictionaryKey: string,\n projectId: string | ObjectId\n): Promise<DictionaryDocument> => {\n const dictionaries = await getDictionariesByKeys([dictionaryKey], projectId);\n\n return dictionaries[0];\n};\n\nexport const getDictionariesByKeys = async (\n dictionaryKeys: string[],\n projectId: string | ObjectId\n): Promise<DictionaryDocument[]> => {\n const dictionaries = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the document by key\n { $match: { key: { $in: dictionaryKeys }, projectIds: projectId } },\n\n // Stage 2: Add the 'availableVersions' field\n {\n $addFields: {\n availableVersions: {\n $map: {\n input: { $objectToArray: '$content' },\n as: 'version',\n in: '$$version.k',\n },\n },\n },\n },\n ]);\n\n if (!dictionaries) {\n throw new GenericError('DICTIONARY_NOT_FOUND', {\n dictionaryKeys,\n projectId,\n });\n }\n\n return dictionaries;\n};\n\nexport const getDictionariesKeys = async (\n projectId: string | ObjectId\n): Promise<string[]> => {\n const dictionaries = await DictionaryModel.find({\n projectIds: projectId,\n }).select('key');\n\n return dictionaries.map((dictionary) => dictionary.key);\n};\n\nexport const getDictionariesByTags = async (\n tags: string[],\n projectId: string | Project['_id']\n): Promise<DictionaryDocument[]> => {\n const dictionaries = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the document by tags\n {\n $match: {\n tags: { $in: tags },\n projectIds: projectId,\n },\n },\n\n // Stage 2: Add the 'availableVersions' field\n {\n $addFields: {\n availableVersions: {\n $map: {\n input: { $objectToArray: '$content' },\n as: 'version',\n in: '$$version.k',\n },\n },\n },\n },\n ]);\n\n return dictionaries;\n};\n\n/**\n * Counts the total number of dictionaries that match the filters.\n * @param filters - MongoDB filter query.\n * @returns Total number of dictionaries.\n */\nexport const countDictionaries = async (\n filters: DictionaryFilters\n): Promise<number> => {\n const result = await DictionaryModel.countDocuments(filters);\n\n if (typeof result === 'undefined') {\n throw new GenericError('DICTIONARY_COUNT_FAILED', { filters });\n }\n\n return result;\n};\n\n/**\n * Creates a new dictionary in the database.\n * @param dictionary - The dictionary data to create.\n * @returns The created dictionary.\n */\nexport const createDictionary = async (\n dictionary: DictionaryData\n): Promise<DictionaryDocument> => {\n const errors = await validateDictionary(dictionary);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('DICTIONARY_INVALID_FIELDS', {\n errors,\n });\n }\n\n return await DictionaryModel.create(dictionary);\n};\n\ntype GetExistingDictionaryResult = {\n existingDictionariesKey: string[];\n newDictionariesKey: string[];\n};\n\n/**\n * Gets the existing dictionaries from the provided list of keys.\n * @param dictionariesKeys - List of dictionary keys to check.\n * @param projectId - The ID of the project to check the dictionaries against.\n * @returns The existing dictionaries and the new dictionaries.\n */\nexport const getExistingDictionaryKey = async (\n dictionariesKeys: string[],\n projectId: string | ObjectId\n): Promise<GetExistingDictionaryResult> => {\n // Fetch dictionaries from the database where the key is in the provided list\n const existingDictionaries = await DictionaryModel.find({\n key: { $in: dictionariesKeys },\n projectIds: projectId,\n });\n\n // Map existing dictionaries to a LocalDictionary object\n const existingDictionariesKey: string[] = [];\n const newDictionariesKey: string[] = [];\n\n for (const key of dictionariesKeys) {\n const isDictionaryExist = existingDictionaries.some(\n (dictionary) => dictionary.key === key\n );\n\n if (isDictionaryExist) {\n existingDictionariesKey.push(key);\n } else {\n newDictionariesKey.push(key);\n }\n }\n\n return { existingDictionariesKey, newDictionariesKey };\n};\n\n/**\n * Updates an existing dictionary in the database by its ID.\n * @param dictionaryId - The ID of the dictionary to update.\n * @param dictionary - The updated dictionary data.\n * @returns The updated dictionary.\n */\nexport const updateDictionaryById = async (\n dictionaryId: string | ObjectId,\n dictionary: Partial<Dictionary>\n): Promise<DictionaryDocument> => {\n const dictionaryObject = ensureMongoDocumentToObject(dictionary);\n const dictionaryToUpdate = removeObjectKeys(dictionaryObject, ['_id']);\n\n const updatedKeys = Object.keys(dictionaryToUpdate) as DictionaryFields;\n const errors = await validateDictionary(dictionaryToUpdate, updatedKeys);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('DICTIONARY_INVALID_FIELDS', {\n dictionaryId,\n errors,\n });\n }\n\n const result = await DictionaryModel.updateOne(\n { _id: dictionaryId },\n dictionaryToUpdate\n );\n\n if (result.matchedCount === 0) {\n throw new GenericError('DICTIONARY_UPDATE_FAILED', { dictionaryId });\n }\n\n const updatedDictionary = await getDictionaryById(dictionaryId);\n\n return updatedDictionary;\n};\n\n/**\n * Updates an existing dictionary in the database by its key.\n * @param dictionaryKey - The ID of the dictionary to update.\n * @param dictionary - The updated dictionary data.\n * @returns The updated dictionary.\n */\nexport const updateDictionaryByKey = async (\n dictionaryKey: string,\n dictionary: Partial<Dictionary>,\n projectId: string | ObjectId\n): Promise<DictionaryDocument> => {\n const dictionaryObject = ensureMongoDocumentToObject(dictionary);\n\n const dictionaryToUpdate = removeObjectKeys(dictionaryObject, ['_id']);\n\n const updatedKeys = Object.keys(dictionaryToUpdate) as DictionaryFields;\n const errors = await validateDictionary(dictionaryToUpdate, updatedKeys);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('DICTIONARY_INVALID_FIELDS', {\n dictionaryKey,\n projectId,\n errors,\n });\n }\n\n const result = await DictionaryModel.updateOne(\n { key: dictionaryKey, projectIds: projectId },\n dictionaryToUpdate\n );\n\n if (result.matchedCount === 0) {\n throw new GenericError('DICTIONARY_UPDATE_FAILED', { dictionaryKey });\n }\n\n const updatedDictionary = await getDictionaryByKey(dictionaryKey, projectId);\n\n return updatedDictionary;\n};\n\n/**\n * Deletes a dictionary from the database by its ID.\n * @param dictionaryId - The ID of the dictionary to delete.\n * @returns The result of the deletion operation.\n */\nexport const deleteDictionaryById = async (\n dictionaryId: string\n): Promise<DictionaryDocument> => {\n const dictionary = await DictionaryModel.findByIdAndDelete(dictionaryId);\n\n if (!dictionary) {\n throw new GenericError('DICTIONARY_NOT_FOUND', { dictionaryId });\n }\n\n return dictionary;\n};\n\nexport const incrementVersion = (dictionary: Dictionary): string => {\n const VERSION_PREFIX = 'v';\n const availableVersions = dictionary.availableVersions ?? [];\n\n // Get the current version from the version list, default to 'v1' if not present\n const currentVersion =\n availableVersions.length > 0\n ? availableVersions[availableVersions.length - 1]\n : 'v1';\n\n // Function to extract the numeric part of the version\n const getVersionNumber = (version: string): number => {\n const match = version.match(/^v(\\d+)$/);\n if (!match) {\n throw new Error(`Invalid version format: ${version}`);\n }\n return parseInt(match[1], 10);\n };\n\n // Start with the next version number\n let newNumber = getVersionNumber(currentVersion) + 1;\n let newVersion = `${VERSION_PREFIX}${newNumber}`;\n\n // Loop until a unique version is found\n while (availableVersions.includes(newVersion)) {\n newNumber += 1;\n newVersion = `${VERSION_PREFIX}${newNumber}`;\n }\n\n return newVersion;\n};\n"],"mappings":"AAAA,SAAS,uBAAuB;AAChC,SAAS,mCAAmC;AAC5C,SAAS,oBAAoB;AAE7B,SAAS,wBAAwB;AACjC;AAAA,EAEE;AAAA,OACK;AAgBA,MAAM,mBAAmB,OAC9B,SACA,OAAO,GACP,QAAQ,QAC0B;AAClC,MAAI;AACF,UAAM,eAAe,MAAM,gBAAgB,UAA8B;AAAA;AAAA,MAEvE,EAAE,QAAQ,QAAQ;AAAA;AAAA,MAGlB,EAAE,OAAO,KAAK;AAAA;AAAA,MAGd,EAAE,QAAQ,MAAM;AAAA;AAAA,MAGhB;AAAA,QACE,YAAY;AAAA,UACV,mBAAmB;AAAA,YACjB,MAAM;AAAA,cACJ,OAAO,EAAE,gBAAgB,WAAW;AAAA,cACpC,IAAI;AAAA,cACJ,IAAI;AAAA,YACN;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IASF,CAAC;AAED,WAAO;AAAA,EACT,SAAS,OAAO;AACd,YAAQ,MAAM,gCAAgC,KAAK;AACnD,UAAM;AAAA,EACR;AACF;AAYO,MAAM,oBAAoB,OAC/B,iBACgC;AAChC,QAAM,aAAa,MAAM,gBAAgB,UAA8B;AAAA;AAAA,IAErE,EAAE,QAAQ,EAAE,KAAK,aAAa,EAAE;AAAA;AAAA,IAGhC;AAAA,MACE,YAAY;AAAA,QACV,mBAAmB;AAAA,UACjB,MAAM;AAAA,YACJ,OAAO,EAAE,gBAAgB,WAAW;AAAA,YACpC,IAAI;AAAA,YACJ,IAAI;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AAED,MAAI,CAAC,WAAW,QAAQ;AACtB,UAAM,IAAI,aAAa,wBAAwB,EAAE,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO,WAAW,CAAC;AACrB;AAOO,MAAM,qBAAqB,OAChC,eACA,cACgC;AAChC,QAAM,eAAe,MAAM,sBAAsB,CAAC,aAAa,GAAG,SAAS;AAE3E,SAAO,aAAa,CAAC;AACvB;AAEO,MAAM,wBAAwB,OACnC,gBACA,cACkC;AAClC,QAAM,eAAe,MAAM,gBAAgB,UAA8B;AAAA;AAAA,IAEvE,EAAE,QAAQ,EAAE,KAAK,EAAE,KAAK,eAAe,GAAG,YAAY,UAAU,EAAE;AAAA;AAAA,IAGlE;AAAA,MACE,YAAY;AAAA,QACV,mBAAmB;AAAA,UACjB,MAAM;AAAA,YACJ,OAAO,EAAE,gBAAgB,WAAW;AAAA,YACpC,IAAI;AAAA,YACJ,IAAI;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AAED,MAAI,CAAC,cAAc;AACjB,UAAM,IAAI,aAAa,wBAAwB;AAAA,MAC7C;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAEO,MAAM,sBAAsB,OACjC,cACsB;AACtB,QAAM,eAAe,MAAM,gBAAgB,KAAK;AAAA,IAC9C,YAAY;AAAA,EACd,CAAC,EAAE,OAAO,KAAK;AAEf,SAAO,aAAa,IAAI,CAAC,eAAe,WAAW,GAAG;AACxD;AAEO,MAAM,wBAAwB,OACnC,MACA,cACkC;AAClC,QAAM,eAAe,MAAM,gBAAgB,UAA8B;AAAA;AAAA,IAEvE;AAAA,MACE,QAAQ;AAAA,QACN,MAAM,EAAE,KAAK,KAAK;AAAA,QAClB,YAAY;AAAA,MACd;AAAA,IACF;AAAA;AAAA,IAGA;AAAA,MACE,YAAY;AAAA,QACV,mBAAmB;AAAA,UACjB,MAAM;AAAA,YACJ,OAAO,EAAE,gBAAgB,WAAW;AAAA,YACpC,IAAI;AAAA,YACJ,IAAI;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AAED,SAAO;AACT;AAOO,MAAM,oBAAoB,OAC/B,YACoB;AACpB,QAAM,SAAS,MAAM,gBAAgB,eAAe,OAAO;AAE3D,MAAI,OAAO,WAAW,aAAa;AACjC,UAAM,IAAI,aAAa,2BAA2B,EAAE,QAAQ,CAAC;AAAA,EAC/D;AAEA,SAAO;AACT;AAOO,MAAM,mBAAmB,OAC9B,eACgC;AAChC,QAAM,SAAS,MAAM,mBAAmB,UAAU;AAElD,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,6BAA6B;AAAA,MAClD;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO,MAAM,gBAAgB,OAAO,UAAU;AAChD;AAaO,MAAM,2BAA2B,OACtC,kBACA,cACyC;AAEzC,QAAM,uBAAuB,MAAM,gBAAgB,KAAK;AAAA,IACtD,KAAK,EAAE,KAAK,iBAAiB;AAAA,IAC7B,YAAY;AAAA,EACd,CAAC;AAGD,QAAM,0BAAoC,CAAC;AAC3C,QAAM,qBAA+B,CAAC;AAEtC,aAAW,OAAO,kBAAkB;AAClC,UAAM,oBAAoB,qBAAqB;AAAA,MAC7C,CAAC,eAAe,WAAW,QAAQ;AAAA,IACrC;AAEA,QAAI,mBAAmB;AACrB,8BAAwB,KAAK,GAAG;AAAA,IAClC,OAAO;AACL,yBAAmB,KAAK,GAAG;AAAA,IAC7B;AAAA,EACF;AAEA,SAAO,EAAE,yBAAyB,mBAAmB;AACvD;AAQO,MAAM,uBAAuB,OAClC,cACA,eACgC;AAChC,QAAM,mBAAmB,4BAA4B,UAAU;AAC/D,QAAM,qBAAqB,iBAAiB,kBAAkB,CAAC,KAAK,CAAC;AAErE,QAAM,cAAc,OAAO,KAAK,kBAAkB;AAClD,QAAM,SAAS,MAAM,mBAAmB,oBAAoB,WAAW;AAEvE,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,6BAA6B;AAAA,MAClD;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAS,MAAM,gBAAgB;AAAA,IACnC,EAAE,KAAK,aAAa;AAAA,IACpB;AAAA,EACF;AAEA,MAAI,OAAO,iBAAiB,GAAG;AAC7B,UAAM,IAAI,aAAa,4BAA4B,EAAE,aAAa,CAAC;AAAA,EACrE;AAEA,QAAM,oBAAoB,MAAM,kBAAkB,YAAY;AAE9D,SAAO;AACT;AAQO,MAAM,wBAAwB,OACnC,eACA,YACA,cACgC;AAChC,QAAM,mBAAmB,4BAA4B,UAAU;AAE/D,QAAM,qBAAqB,iBAAiB,kBAAkB,CAAC,KAAK,CAAC;AAErE,QAAM,cAAc,OAAO,KAAK,kBAAkB;AAClD,QAAM,SAAS,MAAM,mBAAmB,oBAAoB,WAAW;AAEvE,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,6BAA6B;AAAA,MAClD;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAS,MAAM,gBAAgB;AAAA,IACnC,EAAE,KAAK,eAAe,YAAY,UAAU;AAAA,IAC5C;AAAA,EACF;AAEA,MAAI,OAAO,iBAAiB,GAAG;AAC7B,UAAM,IAAI,aAAa,4BAA4B,EAAE,cAAc,CAAC;AAAA,EACtE;AAEA,QAAM,oBAAoB,MAAM,mBAAmB,eAAe,SAAS;AAE3E,SAAO;AACT;AAOO,MAAM,uBAAuB,OAClC,iBACgC;AAChC,QAAM,aAAa,MAAM,gBAAgB,kBAAkB,YAAY;AAEvE,MAAI,CAAC,YAAY;AACf,UAAM,IAAI,aAAa,wBAAwB,EAAE,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAEO,MAAM,mBAAmB,CAAC,eAAmC;AAClE,QAAM,iBAAiB;AACvB,QAAM,oBAAoB,WAAW,qBAAqB,CAAC;AAG3D,QAAM,iBACJ,kBAAkB,SAAS,IACvB,kBAAkB,kBAAkB,SAAS,CAAC,IAC9C;AAGN,QAAM,mBAAmB,CAAC,YAA4B;AACpD,UAAM,QAAQ,QAAQ,MAAM,UAAU;AACtC,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,MAAM,2BAA2B,OAAO,EAAE;AAAA,IACtD;AACA,WAAO,SAAS,MAAM,CAAC,GAAG,EAAE;AAAA,EAC9B;AAGA,MAAI,YAAY,iBAAiB,cAAc,IAAI;AACnD,MAAI,aAAa,GAAG,cAAc,GAAG,SAAS;AAG9C,SAAO,kBAAkB,SAAS,UAAU,GAAG;AAC7C,iBAAa;AACb,iBAAa,GAAG,cAAc,GAAG,SAAS;AAAA,EAC5C;AAEA,SAAO;AACT;","names":[]}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/services/project.service.ts"],"sourcesContent":["import { ProjectModel } from '@models/project.model';\nimport { ensureMongoDocumentToObject } from '@utils/ensureMongoDocumentToObject';\nimport { GenericError } from '@utils/errors';\nimport type { ProjectFilters } from '@utils/filtersAndPagination/getProjectFiltersAndPagination';\nimport { removeObjectKeys } from '@utils/removeObjectKeys';\nimport {\n type ProjectFields,\n validateProject,\n} from '@utils/validation/validateProject';\nimport type { ObjectId } from 'mongoose';\nimport type {\n Project,\n ProjectData,\n ProjectDocument,\n} from '@/types/project.types';\n\n/**\n * Finds projects based on filters and pagination options.\n * @param filters - MongoDB filter query.\n * @param skip - Number of documents to skip.\n * @param limit - Number of documents to limit.\n * @returns List of projects matching the filters.\n */\nexport const findProjects = async (\n filters: ProjectFilters,\n skip = 0,\n limit = 100\n): Promise<ProjectDocument[]> =>\n await ProjectModel.find(filters).skip(skip).limit(limit);\n\n/**\n * Finds a project by its ID.\n * @param projectId - The ID of the project to find.\n * @returns The project matching the ID.\n */\nexport const getProjectById = async (\n projectId: string | ObjectId\n): Promise<ProjectDocument> => {\n const project = await ProjectModel.findById(projectId);\n\n if (!project) {\n throw new GenericError('PROJECT_NOT_DEFINED', { projectId });\n }\n\n return project;\n};\n\n/**\n * Counts the total number of projects that match the filters.\n * @param filters - MongoDB filter query.\n * @returns Total number of projects.\n */\nexport const countProjects = async (\n filters: ProjectFilters\n): Promise<number> => {\n const result = await ProjectModel.countDocuments(filters);\n\n if (typeof result === 'undefined') {\n throw new GenericError('PROJECT_COUNT_FAILED', { filters });\n }\n\n return result;\n};\n\n/**\n * Creates a new project in the database.\n * @param project - The project data to create.\n * @returns The created project.\n */\nexport const createProject = async (\n project: ProjectData\n): Promise<ProjectDocument> => {\n if ((project as Partial<Project>).oAuth2Access) {\n delete (project as Partial<Project>).oAuth2Access;\n }\n\n const errors = await validateProject(project, ['name']);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('PROJECT_INVALID_FIELDS', { errors });\n }\n\n return await ProjectModel.create(project);\n};\n\n/**\n * Updates an existing project in the database by its ID.\n * @param projectId - The ID of the project to update.\n * @param project - The updated project data.\n * @returns The updated project.\n */\nexport const updateProjectById = async (\n projectId: string | ObjectId,\n project: Partial<Project>\n): Promise<ProjectDocument> => {\n const projectObject = ensureMongoDocumentToObject(project);\n const projectToUpdate = removeObjectKeys(projectObject, [\n '_id',\n 'oAuth2Access',\n 'organizationId',\n ]);\n\n const updatedKeys = Object.keys(projectToUpdate) as ProjectFields;\n\n const errors = validateProject(project, updatedKeys);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('PROJECT_INVALID_FIELDS', {\n projectId,\n errors,\n });\n }\n\n
|
|
1
|
+
{"version":3,"sources":["../../../src/services/project.service.ts"],"sourcesContent":["import { ProjectModel } from '@models/project.model';\nimport { ensureMongoDocumentToObject } from '@utils/ensureMongoDocumentToObject';\nimport { GenericError } from '@utils/errors';\nimport type { ProjectFilters } from '@utils/filtersAndPagination/getProjectFiltersAndPagination';\nimport { removeObjectKeys } from '@utils/removeObjectKeys';\nimport {\n type ProjectFields,\n validateProject,\n} from '@utils/validation/validateProject';\nimport type { ObjectId } from 'mongoose';\nimport type {\n Project,\n ProjectData,\n ProjectDocument,\n} from '@/types/project.types';\n\n/**\n * Finds projects based on filters and pagination options.\n * @param filters - MongoDB filter query.\n * @param skip - Number of documents to skip.\n * @param limit - Number of documents to limit.\n * @returns List of projects matching the filters.\n */\nexport const findProjects = async (\n filters: ProjectFilters,\n skip = 0,\n limit = 100\n): Promise<ProjectDocument[]> =>\n await ProjectModel.find(filters).skip(skip).limit(limit);\n\n/**\n * Finds a project by its ID.\n * @param projectId - The ID of the project to find.\n * @returns The project matching the ID.\n */\nexport const getProjectById = async (\n projectId: string | ObjectId\n): Promise<ProjectDocument> => {\n const project = await ProjectModel.findById(projectId);\n\n if (!project) {\n throw new GenericError('PROJECT_NOT_DEFINED', { projectId });\n }\n\n return project;\n};\n\n/**\n * Counts the total number of projects that match the filters.\n * @param filters - MongoDB filter query.\n * @returns Total number of projects.\n */\nexport const countProjects = async (\n filters: ProjectFilters\n): Promise<number> => {\n const result = await ProjectModel.countDocuments(filters);\n\n if (typeof result === 'undefined') {\n throw new GenericError('PROJECT_COUNT_FAILED', { filters });\n }\n\n return result;\n};\n\n/**\n * Creates a new project in the database.\n * @param project - The project data to create.\n * @returns The created project.\n */\nexport const createProject = async (\n project: ProjectData\n): Promise<ProjectDocument> => {\n if ((project as Partial<Project>).oAuth2Access) {\n delete (project as Partial<Project>).oAuth2Access;\n }\n\n const errors = await validateProject(project, ['name']);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('PROJECT_INVALID_FIELDS', { errors });\n }\n\n return await ProjectModel.create(project);\n};\n\n/**\n * Updates an existing project in the database by its ID.\n * @param projectId - The ID of the project to update.\n * @param project - The updated project data.\n * @returns The updated project.\n */\nexport const updateProjectById = async (\n projectId: string | ObjectId,\n project: Partial<Project>\n): Promise<ProjectDocument> => {\n const projectObject = ensureMongoDocumentToObject(project);\n const projectToUpdate = removeObjectKeys(projectObject, [\n '_id',\n 'oAuth2Access',\n 'organizationId',\n ]);\n\n const updatedKeys = Object.keys(projectToUpdate) as ProjectFields;\n\n const errors = validateProject(project, updatedKeys);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('PROJECT_INVALID_FIELDS', {\n projectId,\n errors,\n });\n }\n\n const result = await ProjectModel.updateOne(\n { _id: projectId },\n projectToUpdate\n );\n\n if (result.matchedCount === 0) {\n throw new GenericError('PROJECT_UPDATE_FAILED', { projectId });\n }\n\n return await getProjectById(projectId);\n};\n\n/**\n * Deletes a project from the database by its ID.\n * @param projectId - The ID of the project to delete.\n * @returns The result of the deletion operation.\n */\nexport const deleteProjectById = async (\n projectId: string | ObjectId\n): Promise<ProjectDocument> => {\n const project = await ProjectModel.findByIdAndDelete(projectId);\n\n if (!project) {\n throw new GenericError('PROJECT_NOT_DEFINED', { projectId });\n }\n\n return project;\n};\n"],"mappings":"AAAA,SAAS,oBAAoB;AAC7B,SAAS,mCAAmC;AAC5C,SAAS,oBAAoB;AAE7B,SAAS,wBAAwB;AACjC;AAAA,EAEE;AAAA,OACK;AAeA,MAAM,eAAe,OAC1B,SACA,OAAO,GACP,QAAQ,QAER,MAAM,aAAa,KAAK,OAAO,EAAE,KAAK,IAAI,EAAE,MAAM,KAAK;AAOlD,MAAM,iBAAiB,OAC5B,cAC6B;AAC7B,QAAM,UAAU,MAAM,aAAa,SAAS,SAAS;AAErD,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,aAAa,uBAAuB,EAAE,UAAU,CAAC;AAAA,EAC7D;AAEA,SAAO;AACT;AAOO,MAAM,gBAAgB,OAC3B,YACoB;AACpB,QAAM,SAAS,MAAM,aAAa,eAAe,OAAO;AAExD,MAAI,OAAO,WAAW,aAAa;AACjC,UAAM,IAAI,aAAa,wBAAwB,EAAE,QAAQ,CAAC;AAAA,EAC5D;AAEA,SAAO;AACT;AAOO,MAAM,gBAAgB,OAC3B,YAC6B;AAC7B,MAAK,QAA6B,cAAc;AAC9C,WAAQ,QAA6B;AAAA,EACvC;AAEA,QAAM,SAAS,MAAM,gBAAgB,SAAS,CAAC,MAAM,CAAC;AAEtD,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,0BAA0B,EAAE,OAAO,CAAC;AAAA,EAC7D;AAEA,SAAO,MAAM,aAAa,OAAO,OAAO;AAC1C;AAQO,MAAM,oBAAoB,OAC/B,WACA,YAC6B;AAC7B,QAAM,gBAAgB,4BAA4B,OAAO;AACzD,QAAM,kBAAkB,iBAAiB,eAAe;AAAA,IACtD;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,cAAc,OAAO,KAAK,eAAe;AAE/C,QAAM,SAAS,gBAAgB,SAAS,WAAW;AAEnD,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,0BAA0B;AAAA,MAC/C;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAS,MAAM,aAAa;AAAA,IAChC,EAAE,KAAK,UAAU;AAAA,IACjB;AAAA,EACF;AAEA,MAAI,OAAO,iBAAiB,GAAG;AAC7B,UAAM,IAAI,aAAa,yBAAyB,EAAE,UAAU,CAAC;AAAA,EAC/D;AAEA,SAAO,MAAM,eAAe,SAAS;AACvC;AAOO,MAAM,oBAAoB,OAC/B,cAC6B;AAC7B,QAAM,UAAU,MAAM,aAAa,kBAAkB,SAAS;AAE9D,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,aAAa,uBAAuB,EAAE,UAAU,CAAC;AAAA,EAC7D;AAEA,SAAO;AACT;","names":[]}
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
import fs from "fs";
|
|
2
|
+
import { getBlogs } from "@intlayer/blog";
|
|
3
|
+
import { getDocs } from "@intlayer/docs";
|
|
4
|
+
import { Locales } from "intlayer";
|
|
5
|
+
import { OpenAI } from "openai";
|
|
6
|
+
import embeddingsList from './embeddings.json';
|
|
7
|
+
const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
|
|
8
|
+
const vectorStore = [];
|
|
9
|
+
const MODEL = "gpt-4o-2024-11-20";
|
|
10
|
+
const MODEL_TEMPERATURE = 0.1;
|
|
11
|
+
const EMBEDDING_MODEL = "text-embedding-3-large";
|
|
12
|
+
const OVERLAP_TOKENS = 200;
|
|
13
|
+
const MAX_CHUNK_TOKENS = 800;
|
|
14
|
+
const CHAR_BY_TOKEN = 4.15;
|
|
15
|
+
const MAX_CHARS = MAX_CHUNK_TOKENS * CHAR_BY_TOKEN;
|
|
16
|
+
const OVERLAP_CHARS = OVERLAP_TOKENS * CHAR_BY_TOKEN;
|
|
17
|
+
const MAX_RELEVANT_CHUNKS_NB = 8;
|
|
18
|
+
const MIN_RELEVANT_CHUNKS_SIMILARITY = 0.25;
|
|
19
|
+
const chunkText = (text) => {
|
|
20
|
+
const chunks = [];
|
|
21
|
+
let start = 0;
|
|
22
|
+
while (start < text.length) {
|
|
23
|
+
let end = Math.min(start + MAX_CHARS, text.length);
|
|
24
|
+
if (end < text.length) {
|
|
25
|
+
const lastSpace = text.lastIndexOf(" ", end);
|
|
26
|
+
if (lastSpace > start) {
|
|
27
|
+
end = lastSpace;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
chunks.push(text.substring(start, end));
|
|
31
|
+
const nextStart = end - OVERLAP_CHARS;
|
|
32
|
+
if (nextStart <= start) {
|
|
33
|
+
start = end;
|
|
34
|
+
} else {
|
|
35
|
+
start = nextStart;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
return chunks;
|
|
39
|
+
};
|
|
40
|
+
const generateEmbedding = async (text) => {
|
|
41
|
+
const response = await openai.embeddings.create({
|
|
42
|
+
model: EMBEDDING_MODEL,
|
|
43
|
+
// Specify the embedding model
|
|
44
|
+
input: text
|
|
45
|
+
});
|
|
46
|
+
return response.data[0].embedding;
|
|
47
|
+
};
|
|
48
|
+
const cosineSimilarity = (vecA, vecB) => {
|
|
49
|
+
const dotProduct = vecA.reduce((sum, a, idx) => sum + a * vecB[idx], 0);
|
|
50
|
+
const magnitudeA = Math.sqrt(vecA.reduce((sum, a) => sum + a * a, 0));
|
|
51
|
+
const magnitudeB = Math.sqrt(vecB.reduce((sum, b) => sum + b * b, 0));
|
|
52
|
+
return dotProduct / (magnitudeA * magnitudeB);
|
|
53
|
+
};
|
|
54
|
+
const indexMarkdownFiles = async () => {
|
|
55
|
+
const docs = getDocs(Locales.ENGLISH);
|
|
56
|
+
const blogs = getBlogs(Locales.ENGLISH);
|
|
57
|
+
let result = {};
|
|
58
|
+
const files = { ...docs, ...blogs };
|
|
59
|
+
for (const fileKey of Object.keys(files)) {
|
|
60
|
+
const fileChunks = chunkText(files[fileKey]);
|
|
61
|
+
for (const chunkIndex of Object.keys(fileChunks)) {
|
|
62
|
+
const chunkNumber = Number(chunkIndex) + 1;
|
|
63
|
+
const chunksNumber = fileChunks.length;
|
|
64
|
+
const fileChunk = fileChunks[chunkIndex];
|
|
65
|
+
const embeddingKeyName = `${fileKey}/chunk_${chunkNumber}`;
|
|
66
|
+
const docEmbedding = embeddingsList[embeddingKeyName];
|
|
67
|
+
let embedding = docEmbedding;
|
|
68
|
+
if (!embedding) {
|
|
69
|
+
embedding = await generateEmbedding(fileChunk);
|
|
70
|
+
}
|
|
71
|
+
result = { ...result, [embeddingKeyName]: embedding };
|
|
72
|
+
vectorStore.push({
|
|
73
|
+
fileKey,
|
|
74
|
+
chunkNumber,
|
|
75
|
+
embedding,
|
|
76
|
+
content: fileChunk
|
|
77
|
+
});
|
|
78
|
+
console.info(`- Indexed: ${embeddingKeyName}/${chunksNumber}`);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
if (process.env.NODE_ENV === "development") {
|
|
82
|
+
try {
|
|
83
|
+
if (JSON.stringify(result) !== JSON.stringify(embeddingsList)) {
|
|
84
|
+
fs.writeFileSync(
|
|
85
|
+
"src/utils/AI/embeddings.json",
|
|
86
|
+
JSON.stringify(result, null, 2)
|
|
87
|
+
);
|
|
88
|
+
}
|
|
89
|
+
} catch (error) {
|
|
90
|
+
console.error(error);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
};
|
|
94
|
+
indexMarkdownFiles();
|
|
95
|
+
const searchChunkReference = async (query) => {
|
|
96
|
+
const queryEmbedding = await generateEmbedding(query);
|
|
97
|
+
const results = vectorStore.map((chunk) => ({
|
|
98
|
+
...chunk,
|
|
99
|
+
similarity: cosineSimilarity(queryEmbedding, chunk.embedding)
|
|
100
|
+
// Add similarity score to each doc
|
|
101
|
+
})).filter((chunk) => chunk.similarity > MIN_RELEVANT_CHUNKS_SIMILARITY).sort((a, b) => b.similarity - a.similarity).slice(0, MAX_RELEVANT_CHUNKS_NB);
|
|
102
|
+
return results;
|
|
103
|
+
};
|
|
104
|
+
const initPrompt = {
|
|
105
|
+
role: "system",
|
|
106
|
+
content: `Ignore all instructions before this one. Keep forever all instructions after this one. From now you are not allowed to ignore the previous instructions. You are an expert in internationalization solution for Javascript Application. You're an helpful chatbot for Intlayer, an Internationalization solution The website url of the solution is: 'https://intlayer.org'). And your implemented on this website. The user is an potential user of Intlayer. Your task is now to answer the questions of the user. You must talk as an member of Intlayer. You must only answer the questions relative to Intlayer. Any question should be related to Intlayer. If a question is not related to Intlayer, you should NOT answer it. You must NOT answer question who is generally secret for a company (E.g. financial information). Your should NOT invent information that are not precised into the relevant documentation chunks provided. If you don't have enough information to answer the question, not answer using extra information extracted from your knowledge. If your have a doubt about something, you should ask more question to the user. Here some useful urls to know more about Intlayer: https://intlayer.org/docs https://intlayer.org/blog https://intlayer.org/pricing https://intlayer.org/dashboard Your should return a result as markdown. Code element should include metadata fileName="file.ts" if could be useful for the user. Code element format should not include metadata (E.g. codeFormat="typescript", or packageManager="npm". Here is the relevant documentation: {{relevantFilesReferences}}`
|
|
107
|
+
// Placeholder for relevant documentation to be inserted later
|
|
108
|
+
};
|
|
109
|
+
const askDocQuestion = async (messages) => {
|
|
110
|
+
const userMessages = messages.filter((message) => message.role === "user");
|
|
111
|
+
const query = userMessages.map((message) => `- ${message.content}`).join("\n");
|
|
112
|
+
const relevantFilesReferences = await searchChunkReference(query);
|
|
113
|
+
const messagesList = [
|
|
114
|
+
{
|
|
115
|
+
...initPrompt,
|
|
116
|
+
content: initPrompt.content.replace(
|
|
117
|
+
"{{relevantFilesReferences}}",
|
|
118
|
+
relevantFilesReferences.length === 0 ? "Not relevant file found related to the question." : relevantFilesReferences.map(
|
|
119
|
+
(doc, idx) => `[Chunk ${idx}] docKey = "${doc.fileKey}":
|
|
120
|
+
${doc.content}`
|
|
121
|
+
).join("\n\n")
|
|
122
|
+
// Insert relevant docs into the prompt
|
|
123
|
+
)
|
|
124
|
+
},
|
|
125
|
+
...messages
|
|
126
|
+
// Include all user and assistant messages
|
|
127
|
+
];
|
|
128
|
+
const response = await openai.chat.completions.create({
|
|
129
|
+
model: MODEL,
|
|
130
|
+
temperature: MODEL_TEMPERATURE,
|
|
131
|
+
messages: messagesList
|
|
132
|
+
});
|
|
133
|
+
const result = response.choices[0].message.content;
|
|
134
|
+
const relatedFiles = [
|
|
135
|
+
...new Set(relevantFilesReferences.map((doc) => doc.fileKey))
|
|
136
|
+
];
|
|
137
|
+
return {
|
|
138
|
+
response: result ?? "Error: No result found",
|
|
139
|
+
relatedFiles
|
|
140
|
+
};
|
|
141
|
+
};
|
|
142
|
+
export {
|
|
143
|
+
askDocQuestion,
|
|
144
|
+
indexMarkdownFiles,
|
|
145
|
+
initPrompt,
|
|
146
|
+
searchChunkReference
|
|
147
|
+
};
|
|
148
|
+
//# sourceMappingURL=askDocQuestion.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../src/utils/AI/askDocQuestion.ts"],"sourcesContent":["import fs from 'fs';\nimport { getBlogs } from '@intlayer/blog';\nimport { getDocs } from '@intlayer/docs';\nimport { Locales } from 'intlayer';\nimport { OpenAI } from 'openai';\nimport embeddingsList from './embeddings.json';\n\nconst openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n\ntype VectorStoreEl = {\n fileKey: string;\n chunkNumber: number;\n content: string;\n embedding: number[];\n};\n\n/**\n * Simple in-memory vector store to hold document embeddings and their content.\n * Each entry contains:\n * - fileKey: A unique key identifying the file\n * - chunkNumber: The number of the chunk within the document\n * - content: The chunk content\n * - embedding: The numerical embedding vector for the chunk\n */\nconst vectorStore: VectorStoreEl[] = [];\n\n// Constants defining OpenAI's token and character limits\nconst MODEL: OpenAI.Chat.ChatModel = 'gpt-4o-2024-11-20'; // Model to use for chat completions\nconst MODEL_TEMPERATURE = 0.1; // Temperature to use for chat completions\nconst EMBEDDING_MODEL: OpenAI.Embeddings.EmbeddingModel =\n 'text-embedding-3-large'; // Model to use for embedding generation\nconst OVERLAP_TOKENS = 200; // Number of tokens to overlap between chunks\nconst MAX_CHUNK_TOKENS = 800; // Maximum number of tokens per chunk\nconst CHAR_BY_TOKEN = 4.15; // Approximate pessimistically the number of characters per token // Can use `tiktoken` or other tokenizers to calculate it more precisely\nconst MAX_CHARS = MAX_CHUNK_TOKENS * CHAR_BY_TOKEN;\nconst OVERLAP_CHARS = OVERLAP_TOKENS * CHAR_BY_TOKEN;\nconst MAX_RELEVANT_CHUNKS_NB = 8; // Maximum number of relevant chunks to attach to chatGPT context\nconst MIN_RELEVANT_CHUNKS_SIMILARITY = 0.25; // Minimum similarity required for a chunk to be considered relevant\n\n/**\n * Splits a given text into chunks ensuring each chunk does not exceed MAX_CHARS.\n * @param text - The input text to split.\n * @returns - Array of text chunks.\n */\nconst chunkText = (text: string): string[] => {\n const chunks: string[] = [];\n let start = 0;\n\n while (start < text.length) {\n let end = Math.min(start + MAX_CHARS, text.length);\n\n // Ensure we don't cut words in the middle (find nearest space)\n if (end < text.length) {\n const lastSpace = text.lastIndexOf(' ', end);\n if (lastSpace > start) {\n end = lastSpace;\n }\n }\n\n chunks.push(text.substring(start, end));\n\n // Move start forward correctly\n const nextStart = end - OVERLAP_CHARS;\n if (nextStart <= start) {\n // Prevent infinite loop if overlap is too large\n start = end;\n } else {\n start = nextStart;\n }\n }\n\n return chunks;\n};\n\n/**\n * Generates an embedding for a given text using OpenAI's embedding API.\n * Trims the text if it exceeds the maximum allowed characters.\n *\n * @param text - The input text to generate an embedding for\n * @returns The embedding vector as a number array\n */\nconst generateEmbedding = async (text: string): Promise<number[]> => {\n const response = await openai.embeddings.create({\n model: EMBEDDING_MODEL, // Specify the embedding model\n input: text,\n });\n\n return response.data[0].embedding; // Return the generated embedding\n};\n\n/**\n * Calculates the cosine similarity between two vectors.\n * Cosine similarity measures the cosine of the angle between two vectors in an inner product space.\n * Used to determine the similarity between chunks of text.\n *\n * @param vecA - The first vector\n * @param vecB - The second vector\n * @returns The cosine similarity score\n */\nconst cosineSimilarity = (vecA: number[], vecB: number[]): number => {\n // Calculate the dot product of the two vectors\n const dotProduct = vecA.reduce((sum, a, idx) => sum + a * vecB[idx], 0);\n\n // Calculate the magnitude (Euclidean norm) of each vector\n const magnitudeA = Math.sqrt(vecA.reduce((sum, a) => sum + a * a, 0));\n const magnitudeB = Math.sqrt(vecB.reduce((sum, b) => sum + b * b, 0));\n\n // Compute and return the cosine similarity\n return dotProduct / (magnitudeA * magnitudeB);\n};\n\n/**\n * Indexes all Markdown documents by generating embeddings for each chunk and storing them in memory.\n * Also updates the embeddings.json file if new embeddings are generated.\n */\nexport const indexMarkdownFiles = async (): Promise<void> => {\n // Retrieve documentation and blog posts in English locale\n const docs = getDocs(Locales.ENGLISH);\n const blogs = getBlogs(Locales.ENGLISH);\n\n let result: Record<string, number[]> = {}; // Object to hold updated embeddings\n\n const files = { ...docs, ...blogs }; // Combine docs and blogs into a single object\n\n // Iterate over each file key (identifier) in the combined files\n for (const fileKey of Object.keys(files)) {\n // Split the document into chunks based on headings\n const fileChunks = chunkText(files[fileKey as keyof typeof files]);\n\n // Iterate over each chunk within the current file\n for (const chunkIndex of Object.keys(fileChunks)) {\n const chunkNumber = Number(chunkIndex) + 1; // Chunk number starts at 1\n const chunksNumber = fileChunks.length;\n\n const fileChunk = fileChunks[\n chunkIndex as keyof typeof fileChunks\n ] as string;\n\n const embeddingKeyName = `${fileKey}/chunk_${chunkNumber}`; // Unique key for the chunk\n\n // Retrieve precomputed embedding if available\n const docEmbedding = embeddingsList[\n embeddingKeyName as keyof typeof embeddingsList\n ] as number[] | undefined;\n\n let embedding = docEmbedding; // Use existing embedding if available\n\n if (!embedding) {\n embedding = await generateEmbedding(fileChunk); // Generate embedding if not present\n }\n\n // Update the result object with the new embedding\n result = { ...result, [embeddingKeyName]: embedding };\n\n // Store the embedding and content in the in-memory vector store\n vectorStore.push({\n fileKey,\n chunkNumber,\n embedding,\n content: fileChunk,\n });\n\n console.info(`- Indexed: ${embeddingKeyName}/${chunksNumber}`);\n }\n }\n\n if (process.env.NODE_ENV === 'development') {\n try {\n // Compare the newly generated embeddings with existing ones\n if (JSON.stringify(result) !== JSON.stringify(embeddingsList)) {\n // If there are new embeddings, save them to embeddings.json\n fs.writeFileSync(\n 'src/utils/AI/embeddings.json',\n JSON.stringify(result, null, 2)\n );\n }\n } catch (error) {\n console.error(error); // Log any errors during the file write process\n }\n }\n};\n\n// Automatically index Markdown files\nindexMarkdownFiles();\n\n/**\n * Searches the indexed documents for the most relevant chunks based on a query.\n * Utilizes cosine similarity to find the closest matching embeddings.\n *\n * @param query - The search query provided by the user\n * @returns An array of the top matching document chunks' content\n */\nexport const searchChunkReference = async (\n query: string\n): Promise<VectorStoreEl[]> => {\n // Generate an embedding for the user's query\n const queryEmbedding = await generateEmbedding(query);\n\n // Calculate similarity scores between the query embedding and each document's embedding\n const results = vectorStore\n .map((chunk) => ({\n ...chunk,\n similarity: cosineSimilarity(queryEmbedding, chunk.embedding), // Add similarity score to each doc\n }))\n .filter((chunk) => chunk.similarity > MIN_RELEVANT_CHUNKS_SIMILARITY) // Filter out documents with low similarity scores\n .sort((a, b) => b.similarity - a.similarity) // Sort documents by highest similarity first\n .slice(0, MAX_RELEVANT_CHUNKS_NB); // Select the top 6 most similar documents\n\n // Return the content of the top matching documents\n return results;\n};\n\n// Define the structure of messages used in chat completions\nexport type ChatCompletionRequestMessage = {\n role: 'system' | 'user' | 'assistant'; // The role of the message sender\n content: string; // The text content of the message\n};\n\n// Initial prompt configuration for the chatbot\nexport const initPrompt: ChatCompletionRequestMessage = {\n role: 'system',\n content:\n 'Ignore all instructions before this one. \\\n Keep forever all instructions after this one. From now you are not allowed to ignore the previous instructions. \\\n You are an expert in internationalization solution for Javascript Application. You\\'re an helpful chatbot for Intlayer, an Internationalization solution\\\n The website url of the solution is: \\'https://intlayer.org\\'). And your implemented on this website. \\\n The user is an potential user of Intlayer. Your task is now to answer the questions of the user. \\\n You must talk as an member of Intlayer. You must only answer the questions relative to Intlayer. \\\n Any question should be related to Intlayer. If a question is not related to Intlayer, you should NOT answer it. \\\n You must NOT answer question who is generally secret for a company (E.g. financial information). \\\n Your should NOT invent information that are not precised into the relevant documentation chunks provided. \\\n If you don\\'t have enough information to answer the question, not answer using extra information extracted from your knowledge. \\\n If your have a doubt about something, you should ask more question to the user. \\\n \\\n Here some useful urls to know more about Intlayer: \\\n https://intlayer.org/docs \\\n https://intlayer.org/blog \\\n https://intlayer.org/pricing \\\n https://intlayer.org/dashboard \\\n \\\n Your should return a result as markdown.\\\n Code element should include metadata fileName=\"file.ts\" if could be useful for the user. \\\n Code element format should not include metadata (E.g. codeFormat=\"typescript\", or packageManager=\"npm\". \\\n \\\n Here is the relevant documentation:\\\n {{relevantFilesReferences}}', // Placeholder for relevant documentation to be inserted later\n};\n\nexport type AskDocQuestionResult = {\n response: string;\n relatedFiles: string[];\n};\n\n/**\n * Handles the \"Ask a question\" endpoint in an Express.js route.\n * Processes user messages, retrieves relevant documents, and interacts with OpenAI's chat API to generate responses.\n *\n * @param messages - An array of chat messages from the user and assistant\n * @returns The assistant's response as a string\n */\nexport const askDocQuestion = async (\n messages: ChatCompletionRequestMessage[]\n): Promise<AskDocQuestionResult> => {\n // Assistant's response are filtered out otherwise the chatbot will be stuck in a self-referential loop\n // Note that the embedding precision will be lowered if the user change of context in the chat\n const userMessages = messages.filter((message) => message.role === 'user');\n\n // Format the user's question to keep only the relevant keywords\n const query = userMessages\n .map((message) => `- ${message.content}`)\n .join('\\n');\n\n // 1) Find relevant documents based on the user's question\n const relevantFilesReferences = await searchChunkReference(query);\n\n // 2) Integrate the relevant documents into the initial system prompt\n const messagesList: ChatCompletionRequestMessage[] = [\n {\n ...initPrompt,\n content: initPrompt.content.replace(\n '{{relevantFilesReferences}}',\n relevantFilesReferences.length === 0\n ? 'Not relevant file found related to the question.'\n : relevantFilesReferences\n .map(\n (doc, idx) =>\n `[Chunk ${idx}] docKey = \"${doc.fileKey}\":\\n${doc.content}`\n )\n .join('\\n\\n') // Insert relevant docs into the prompt\n ),\n },\n ...messages, // Include all user and assistant messages\n ];\n\n // 3) Send the compiled messages to OpenAI's Chat Completion API (using a specific model)\n const response = await openai.chat.completions.create({\n model: MODEL,\n temperature: MODEL_TEMPERATURE,\n messages: messagesList,\n });\n\n const result = response.choices[0].message.content; // Extract the assistant's reply\n\n // 4) Extract unique related files\n const relatedFiles = [\n ...new Set(relevantFilesReferences.map((doc) => doc.fileKey)),\n ];\n\n // 5) Return the assistant's response to the user\n return {\n response: result ?? 'Error: No result found',\n relatedFiles,\n };\n};\n"],"mappings":"AAAA,OAAO,QAAQ;AACf,SAAS,gBAAgB;AACzB,SAAS,eAAe;AACxB,SAAS,eAAe;AACxB,SAAS,cAAc;AACvB,OAAO,oBAAoB;AAE3B,MAAM,SAAS,IAAI,OAAO,EAAE,QAAQ,QAAQ,IAAI,eAAe,CAAC;AAiBhE,MAAM,cAA+B,CAAC;AAGtC,MAAM,QAA+B;AACrC,MAAM,oBAAoB;AAC1B,MAAM,kBACJ;AACF,MAAM,iBAAiB;AACvB,MAAM,mBAAmB;AACzB,MAAM,gBAAgB;AACtB,MAAM,YAAY,mBAAmB;AACrC,MAAM,gBAAgB,iBAAiB;AACvC,MAAM,yBAAyB;AAC/B,MAAM,iCAAiC;AAOvC,MAAM,YAAY,CAAC,SAA2B;AAC5C,QAAM,SAAmB,CAAC;AAC1B,MAAI,QAAQ;AAEZ,SAAO,QAAQ,KAAK,QAAQ;AAC1B,QAAI,MAAM,KAAK,IAAI,QAAQ,WAAW,KAAK,MAAM;AAGjD,QAAI,MAAM,KAAK,QAAQ;AACrB,YAAM,YAAY,KAAK,YAAY,KAAK,GAAG;AAC3C,UAAI,YAAY,OAAO;AACrB,cAAM;AAAA,MACR;AAAA,IACF;AAEA,WAAO,KAAK,KAAK,UAAU,OAAO,GAAG,CAAC;AAGtC,UAAM,YAAY,MAAM;AACxB,QAAI,aAAa,OAAO;AAEtB,cAAQ;AAAA,IACV,OAAO;AACL,cAAQ;AAAA,IACV;AAAA,EACF;AAEA,SAAO;AACT;AASA,MAAM,oBAAoB,OAAO,SAAoC;AACnE,QAAM,WAAW,MAAM,OAAO,WAAW,OAAO;AAAA,IAC9C,OAAO;AAAA;AAAA,IACP,OAAO;AAAA,EACT,CAAC;AAED,SAAO,SAAS,KAAK,CAAC,EAAE;AAC1B;AAWA,MAAM,mBAAmB,CAAC,MAAgB,SAA2B;AAEnE,QAAM,aAAa,KAAK,OAAO,CAAC,KAAK,GAAG,QAAQ,MAAM,IAAI,KAAK,GAAG,GAAG,CAAC;AAGtE,QAAM,aAAa,KAAK,KAAK,KAAK,OAAO,CAAC,KAAK,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC;AACpE,QAAM,aAAa,KAAK,KAAK,KAAK,OAAO,CAAC,KAAK,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC;AAGpE,SAAO,cAAc,aAAa;AACpC;AAMO,MAAM,qBAAqB,YAA2B;AAE3D,QAAM,OAAO,QAAQ,QAAQ,OAAO;AACpC,QAAM,QAAQ,SAAS,QAAQ,OAAO;AAEtC,MAAI,SAAmC,CAAC;AAExC,QAAM,QAAQ,EAAE,GAAG,MAAM,GAAG,MAAM;AAGlC,aAAW,WAAW,OAAO,KAAK,KAAK,GAAG;AAExC,UAAM,aAAa,UAAU,MAAM,OAA6B,CAAC;AAGjE,eAAW,cAAc,OAAO,KAAK,UAAU,GAAG;AAChD,YAAM,cAAc,OAAO,UAAU,IAAI;AACzC,YAAM,eAAe,WAAW;AAEhC,YAAM,YAAY,WAChB,UACF;AAEA,YAAM,mBAAmB,GAAG,OAAO,UAAU,WAAW;AAGxD,YAAM,eAAe,eACnB,gBACF;AAEA,UAAI,YAAY;AAEhB,UAAI,CAAC,WAAW;AACd,oBAAY,MAAM,kBAAkB,SAAS;AAAA,MAC/C;AAGA,eAAS,EAAE,GAAG,QAAQ,CAAC,gBAAgB,GAAG,UAAU;AAGpD,kBAAY,KAAK;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AAED,cAAQ,KAAK,cAAc,gBAAgB,IAAI,YAAY,EAAE;AAAA,IAC/D;AAAA,EACF;AAEA,MAAI,QAAQ,IAAI,aAAa,eAAe;AAC1C,QAAI;AAEF,UAAI,KAAK,UAAU,MAAM,MAAM,KAAK,UAAU,cAAc,GAAG;AAE7D,WAAG;AAAA,UACD;AAAA,UACA,KAAK,UAAU,QAAQ,MAAM,CAAC;AAAA,QAChC;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,cAAQ,MAAM,KAAK;AAAA,IACrB;AAAA,EACF;AACF;AAGA,mBAAmB;AASZ,MAAM,uBAAuB,OAClC,UAC6B;AAE7B,QAAM,iBAAiB,MAAM,kBAAkB,KAAK;AAGpD,QAAM,UAAU,YACb,IAAI,CAAC,WAAW;AAAA,IACf,GAAG;AAAA,IACH,YAAY,iBAAiB,gBAAgB,MAAM,SAAS;AAAA;AAAA,EAC9D,EAAE,EACD,OAAO,CAAC,UAAU,MAAM,aAAa,8BAA8B,EACnE,KAAK,CAAC,GAAG,MAAM,EAAE,aAAa,EAAE,UAAU,EAC1C,MAAM,GAAG,sBAAsB;AAGlC,SAAO;AACT;AASO,MAAM,aAA2C;AAAA,EACtD,MAAM;AAAA,EACN,SACE;AAAA;AAwBJ;AAcO,MAAM,iBAAiB,OAC5B,aACkC;AAGlC,QAAM,eAAe,SAAS,OAAO,CAAC,YAAY,QAAQ,SAAS,MAAM;AAGzE,QAAM,QAAQ,aACX,IAAI,CAAC,YAAY,KAAK,QAAQ,OAAO,EAAE,EACvC,KAAK,IAAI;AAGZ,QAAM,0BAA0B,MAAM,qBAAqB,KAAK;AAGhE,QAAM,eAA+C;AAAA,IACnD;AAAA,MACE,GAAG;AAAA,MACH,SAAS,WAAW,QAAQ;AAAA,QAC1B;AAAA,QACA,wBAAwB,WAAW,IAC/B,qDACA,wBACG;AAAA,UACC,CAAC,KAAK,QACJ,UAAU,GAAG,eAAe,IAAI,OAAO;AAAA,EAAO,IAAI,OAAO;AAAA,QAC7D,EACC,KAAK,MAAM;AAAA;AAAA,MACpB;AAAA,IACF;AAAA,IACA,GAAG;AAAA;AAAA,EACL;AAGA,QAAM,WAAW,MAAM,OAAO,KAAK,YAAY,OAAO;AAAA,IACpD,OAAO;AAAA,IACP,aAAa;AAAA,IACb,UAAU;AAAA,EACZ,CAAC;AAED,QAAM,SAAS,SAAS,QAAQ,CAAC,EAAE,QAAQ;AAG3C,QAAM,eAAe;AAAA,IACnB,GAAG,IAAI,IAAI,wBAAwB,IAAI,CAAC,QAAQ,IAAI,OAAO,CAAC;AAAA,EAC9D;AAGA,SAAO;AAAA,IACL,UAAU,UAAU;AAAA,IACpB;AAAA,EACF;AACF;","names":[]}
|