@intlayer/backend 7.2.3 → 7.3.0-canary.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -0
- package/dist/assets/utils/AI/askDocQuestion/embeddings/blog/en/compiler_vs_declarative_i18n.json +5132 -0
- package/dist/assets/utils/AI/askDocQuestion/embeddings/docs/en/cli/build.json +2054 -0
- package/dist/assets/utils/AI/askDocQuestion/embeddings/docs/en/cli/configuration.json +2054 -0
- package/dist/assets/utils/AI/askDocQuestion/embeddings/docs/en/cli/debug.json +2054 -0
- package/dist/assets/utils/AI/askDocQuestion/embeddings/docs/en/cli/doc-review.json +2054 -0
- package/dist/assets/utils/AI/askDocQuestion/embeddings/docs/en/cli/doc-translate.json +3080 -0
- package/dist/assets/utils/AI/askDocQuestion/embeddings/docs/en/cli/editor.json +1028 -0
- package/dist/assets/utils/AI/askDocQuestion/embeddings/docs/en/cli/fill.json +3080 -0
- package/dist/assets/utils/AI/askDocQuestion/embeddings/docs/en/cli/index.json +4106 -0
- package/dist/assets/utils/AI/askDocQuestion/embeddings/docs/en/cli/list.json +2054 -0
- package/dist/assets/utils/AI/askDocQuestion/embeddings/docs/en/cli/live.json +2054 -0
- package/dist/assets/utils/AI/askDocQuestion/embeddings/docs/en/cli/pull.json +2054 -0
- package/dist/assets/utils/AI/askDocQuestion/embeddings/docs/en/cli/push.json +3080 -0
- package/dist/assets/utils/AI/askDocQuestion/embeddings/docs/en/cli/sdk.json +2054 -0
- package/dist/assets/utils/AI/askDocQuestion/embeddings/docs/en/cli/test.json +2054 -0
- package/dist/assets/utils/AI/askDocQuestion/embeddings/docs/en/cli/transform.json +2054 -0
- package/dist/assets/utils/AI/askDocQuestion/embeddings/docs/en/cli/version.json +1028 -0
- package/dist/assets/utils/AI/askDocQuestion/embeddings/docs/en/cli/watch.json +1028 -0
- package/dist/esm/controllers/ai.controller.mjs +23 -23
- package/dist/esm/controllers/ai.controller.mjs.map +1 -1
- package/dist/esm/export.mjs +3 -2
- package/dist/esm/utils/AI/askDocQuestion/askDocQuestion.mjs +1 -1
- package/dist/esm/utils/AI/askDocQuestion/askDocQuestion.mjs.map +1 -1
- package/dist/esm/utils/AI/askDocQuestion/indexMarkdownFiles.mjs.map +1 -1
- package/dist/esm/utils/AI/auditDictionary/index.mjs +1 -1
- package/dist/esm/utils/AI/auditDictionary/index.mjs.map +1 -1
- package/dist/esm/utils/AI/auditDictionaryField/index.mjs +1 -1
- package/dist/esm/utils/AI/auditDictionaryField/index.mjs.map +1 -1
- package/dist/esm/utils/AI/auditDictionaryMetadata/index.mjs +5 -29
- package/dist/esm/utils/AI/auditDictionaryMetadata/index.mjs.map +1 -1
- package/dist/esm/utils/AI/auditTag/index.mjs +1 -1
- package/dist/esm/utils/AI/auditTag/index.mjs.map +1 -1
- package/dist/esm/utils/AI/autocomplete/index.mjs +1 -2
- package/dist/esm/utils/AI/autocomplete/index.mjs.map +1 -1
- package/dist/esm/utils/AI/customQuery/index.mjs +5 -11
- package/dist/esm/utils/AI/customQuery/index.mjs.map +1 -1
- package/dist/esm/utils/AI/translateJSON/index.mjs +6 -53
- package/dist/esm/utils/AI/translateJSON/index.mjs.map +1 -1
- package/dist/types/controllers/ai.controller.d.ts +1 -1
- package/dist/types/controllers/ai.controller.d.ts.map +1 -1
- package/dist/types/controllers/dictionary.controller.d.ts.map +1 -1
- package/dist/types/controllers/projectAccessKey.controller.d.ts.map +1 -1
- package/dist/types/emails/InviteUserEmail.d.ts +4 -4
- package/dist/types/emails/MagicLinkEmail.d.ts +4 -4
- package/dist/types/emails/OAuthTokenCreatedEmail.d.ts +4 -4
- package/dist/types/emails/PasswordChangeConfirmation.d.ts +4 -4
- package/dist/types/emails/PasswordChangeConfirmation.d.ts.map +1 -1
- package/dist/types/emails/ResetUserPassword.d.ts +4 -4
- package/dist/types/emails/ResetUserPassword.d.ts.map +1 -1
- package/dist/types/emails/SubscriptionPaymentCancellation.d.ts +4 -4
- package/dist/types/emails/SubscriptionPaymentError.d.ts +4 -4
- package/dist/types/emails/SubscriptionPaymentSuccess.d.ts +4 -4
- package/dist/types/emails/ValidateUserEmail.d.ts +4 -4
- package/dist/types/emails/Welcome.d.ts +4 -4
- package/dist/types/export.d.ts +2 -2
- package/dist/types/models/dictionary.model.d.ts +4 -4
- package/dist/types/models/dictionary.model.d.ts.map +1 -1
- package/dist/types/models/discussion.model.d.ts +2 -2
- package/dist/types/models/discussion.model.d.ts.map +1 -1
- package/dist/types/models/oAuth2.model.d.ts +3 -3
- package/dist/types/models/oAuth2.model.d.ts.map +1 -1
- package/dist/types/schemas/dictionary.schema.d.ts +6 -6
- package/dist/types/schemas/discussion.schema.d.ts +6 -6
- package/dist/types/schemas/oAuth2.schema.d.ts +5 -5
- package/dist/types/schemas/organization.schema.d.ts +6 -6
- package/dist/types/schemas/plans.schema.d.ts +6 -6
- package/dist/types/schemas/project.schema.d.ts +6 -6
- package/dist/types/schemas/project.schema.d.ts.map +1 -1
- package/dist/types/schemas/session.schema.d.ts +6 -6
- package/dist/types/schemas/tag.schema.d.ts +6 -6
- package/dist/types/schemas/tag.schema.d.ts.map +1 -1
- package/dist/types/schemas/user.schema.d.ts +6 -6
- package/dist/types/services/email.service.d.ts +11 -11
- package/dist/types/types/plan.types.d.ts.map +1 -1
- package/dist/types/utils/AI/askDocQuestion/askDocQuestion.d.ts +1 -1
- package/dist/types/utils/AI/askDocQuestion/indexMarkdownFiles.d.ts.map +1 -1
- package/dist/types/utils/AI/auditDictionary/index.d.ts +1 -1
- package/dist/types/utils/AI/auditDictionaryField/index.d.ts +1 -1
- package/dist/types/utils/AI/auditDictionaryMetadata/index.d.ts +3 -16
- package/dist/types/utils/AI/auditDictionaryMetadata/index.d.ts.map +1 -1
- package/dist/types/utils/AI/auditTag/index.d.ts +1 -1
- package/dist/types/utils/AI/autocomplete/index.d.ts +1 -1
- package/dist/types/utils/AI/autocomplete/index.d.ts.map +1 -1
- package/dist/types/utils/AI/customQuery/index.d.ts +3 -10
- package/dist/types/utils/AI/customQuery/index.d.ts.map +1 -1
- package/dist/types/utils/AI/translateJSON/index.d.ts +3 -17
- package/dist/types/utils/AI/translateJSON/index.d.ts.map +1 -1
- package/dist/types/utils/filtersAndPagination/getDiscussionFiltersAndPagination.d.ts +2 -2
- package/dist/types/utils/filtersAndPagination/getOrganizationFiltersAndPagination.d.ts +2 -2
- package/dist/types/utils/filtersAndPagination/getProjectFiltersAndPagination.d.ts +2 -2
- package/dist/types/utils/filtersAndPagination/getTagFiltersAndPagination.d.ts +2 -2
- package/package.json +9 -14
- package/dist/assets/utils/AI/auditDictionaryMetadata/PROMPT.md +0 -73
- package/dist/assets/utils/AI/translateJSON/PROMPT.md +0 -45
- package/dist/esm/utils/AI/aiSdk.mjs +0 -98
- package/dist/esm/utils/AI/aiSdk.mjs.map +0 -1
- package/dist/types/utils/AI/aiSdk.d.ts +0 -62
- package/dist/types/utils/AI/aiSdk.d.ts.map +0 -1
|
@@ -2,34 +2,34 @@ import { formatPaginatedResponse, formatResponse } from "../utils/responseData.m
|
|
|
2
2
|
import { ErrorHandler } from "../utils/errors/ErrorHandler.mjs";
|
|
3
3
|
import { getDictionariesByTags } from "../services/dictionary.service.mjs";
|
|
4
4
|
import { findTags, getTagsByKeys } from "../services/tag.service.mjs";
|
|
5
|
-
import { getAIConfig } from "../utils/AI/aiSdk.mjs";
|
|
6
5
|
import { askDocQuestion as askDocQuestion$1 } from "../utils/AI/askDocQuestion/askDocQuestion.mjs";
|
|
7
6
|
import { aiDefaultOptions, auditDictionary } from "../utils/AI/auditDictionary/index.mjs";
|
|
8
7
|
import { aiDefaultOptions as aiDefaultOptions$1, auditDictionaryField } from "../utils/AI/auditDictionaryField/index.mjs";
|
|
9
|
-
import { aiDefaultOptions as aiDefaultOptions$2, auditDictionaryMetadata } from "../utils/AI/auditDictionaryMetadata/index.mjs";
|
|
8
|
+
import { aiDefaultOptions as aiDefaultOptions$2, auditDictionaryMetadata as auditDictionaryMetadata$1 } from "../utils/AI/auditDictionaryMetadata/index.mjs";
|
|
10
9
|
import { aiDefaultOptions as aiDefaultOptions$3, auditTag as auditTag$1 } from "../utils/AI/auditTag/index.mjs";
|
|
11
10
|
import { aiDefaultOptions as aiDefaultOptions$4, autocomplete as autocomplete$1 } from "../utils/AI/autocomplete/index.mjs";
|
|
12
|
-
import { aiDefaultOptions as aiDefaultOptions$5, customQuery as customQuery$
|
|
13
|
-
import { aiDefaultOptions as aiDefaultOptions$6, translateJSON as translateJSON$
|
|
11
|
+
import { aiDefaultOptions as aiDefaultOptions$5, customQuery as customQuery$2 } from "../utils/AI/customQuery/index.mjs";
|
|
12
|
+
import { aiDefaultOptions as aiDefaultOptions$6, translateJSON as translateJSON$2 } from "../utils/AI/translateJSON/index.mjs";
|
|
14
13
|
import { getDiscussionFiltersAndPagination } from "../utils/filtersAndPagination/getDiscussionFiltersAndPagination.mjs";
|
|
15
14
|
import { DiscussionModel } from "../models/discussion.model.mjs";
|
|
15
|
+
import { getAIConfig } from "@intlayer/ai";
|
|
16
16
|
|
|
17
17
|
//#region src/controllers/ai.controller.ts
|
|
18
18
|
const customQuery = async (req, res, _next) => {
|
|
19
19
|
const { aiOptions, tagsKeys, ...rest } = req.body;
|
|
20
20
|
let aiConfig;
|
|
21
21
|
try {
|
|
22
|
-
aiConfig = await getAIConfig(
|
|
22
|
+
aiConfig = await getAIConfig({
|
|
23
23
|
userOptions: aiOptions,
|
|
24
24
|
defaultOptions: aiDefaultOptions$5,
|
|
25
25
|
accessType: ["registered_user", "apiKey"]
|
|
26
|
-
});
|
|
26
|
+
}, !!res.locals.user);
|
|
27
27
|
} catch (_error) {
|
|
28
28
|
ErrorHandler.handleGenericErrorResponse(res, "AI_ACCESS_DENIED");
|
|
29
29
|
return;
|
|
30
30
|
}
|
|
31
31
|
try {
|
|
32
|
-
const auditResponse = await customQuery$
|
|
32
|
+
const auditResponse = await customQuery$2({
|
|
33
33
|
...rest,
|
|
34
34
|
aiConfig,
|
|
35
35
|
applicationContext: aiOptions?.applicationContext
|
|
@@ -51,11 +51,11 @@ const translateJSON = async (req, res, _next) => {
|
|
|
51
51
|
const { aiOptions, tagsKeys, ...rest } = req.body;
|
|
52
52
|
let aiConfig;
|
|
53
53
|
try {
|
|
54
|
-
aiConfig = await getAIConfig(
|
|
54
|
+
aiConfig = await getAIConfig({
|
|
55
55
|
userOptions: aiOptions,
|
|
56
56
|
defaultOptions: aiDefaultOptions$6,
|
|
57
57
|
accessType: ["registered_user", "apiKey"]
|
|
58
|
-
});
|
|
58
|
+
}, !!res.locals.user);
|
|
59
59
|
} catch (_error) {
|
|
60
60
|
ErrorHandler.handleGenericErrorResponse(res, "AI_ACCESS_DENIED");
|
|
61
61
|
return;
|
|
@@ -63,7 +63,7 @@ const translateJSON = async (req, res, _next) => {
|
|
|
63
63
|
try {
|
|
64
64
|
let tags = [];
|
|
65
65
|
if (project?.organizationId && tagsKeys) tags = await getTagsByKeys(tagsKeys, project.organizationId);
|
|
66
|
-
const auditResponse = await translateJSON$
|
|
66
|
+
const auditResponse = await translateJSON$2({
|
|
67
67
|
...rest,
|
|
68
68
|
aiConfig,
|
|
69
69
|
applicationContext: aiOptions?.applicationContext,
|
|
@@ -89,11 +89,11 @@ const auditContentDeclaration = async (req, res, _next) => {
|
|
|
89
89
|
const { fileContent, filePath, aiOptions, locales, defaultLocale, tagsKeys } = req.body;
|
|
90
90
|
let aiConfig;
|
|
91
91
|
try {
|
|
92
|
-
aiConfig = await getAIConfig(
|
|
92
|
+
aiConfig = await getAIConfig({
|
|
93
93
|
userOptions: aiOptions,
|
|
94
94
|
defaultOptions: aiDefaultOptions,
|
|
95
95
|
accessType: ["registered_user", "apiKey"]
|
|
96
|
-
});
|
|
96
|
+
}, !!res.locals.user);
|
|
97
97
|
} catch (_error) {
|
|
98
98
|
ErrorHandler.handleGenericErrorResponse(res, "AI_ACCESS_DENIED");
|
|
99
99
|
return;
|
|
@@ -130,11 +130,11 @@ const auditContentDeclarationField = async (req, res, _next) => {
|
|
|
130
130
|
const { fileContent, aiOptions, locales, tagsKeys, keyPath } = req.body;
|
|
131
131
|
let aiConfig;
|
|
132
132
|
try {
|
|
133
|
-
aiConfig = await getAIConfig(
|
|
133
|
+
aiConfig = await getAIConfig({
|
|
134
134
|
userOptions: aiOptions,
|
|
135
135
|
defaultOptions: aiDefaultOptions$1,
|
|
136
136
|
accessType: ["registered_user", "apiKey"]
|
|
137
|
-
});
|
|
137
|
+
}, !!res.locals.user);
|
|
138
138
|
} catch (_error) {
|
|
139
139
|
ErrorHandler.handleGenericErrorResponse(res, "AI_ACCESS_DENIED");
|
|
140
140
|
return;
|
|
@@ -170,18 +170,18 @@ const auditContentDeclarationMetadata = async (req, res, _next) => {
|
|
|
170
170
|
const { fileContent, aiOptions } = req.body;
|
|
171
171
|
let aiConfig;
|
|
172
172
|
try {
|
|
173
|
-
aiConfig = await getAIConfig(
|
|
173
|
+
aiConfig = await getAIConfig({
|
|
174
174
|
userOptions: aiOptions,
|
|
175
175
|
defaultOptions: aiDefaultOptions$2,
|
|
176
176
|
accessType: ["registered_user", "apiKey"]
|
|
177
|
-
});
|
|
177
|
+
}, !!res.locals.user);
|
|
178
178
|
} catch (_error) {
|
|
179
179
|
ErrorHandler.handleGenericErrorResponse(res, "AI_ACCESS_DENIED");
|
|
180
180
|
return;
|
|
181
181
|
}
|
|
182
182
|
try {
|
|
183
183
|
const tags = await findTags({ organizationId: organization?.id }, 0, 1e3);
|
|
184
|
-
const auditResponse = await auditDictionaryMetadata({
|
|
184
|
+
const auditResponse = await auditDictionaryMetadata$1({
|
|
185
185
|
fileContent,
|
|
186
186
|
aiConfig,
|
|
187
187
|
applicationContext: aiOptions?.applicationContext,
|
|
@@ -207,11 +207,11 @@ const auditTag = async (req, res, _next) => {
|
|
|
207
207
|
const { aiOptions, tag } = req.body;
|
|
208
208
|
let aiConfig;
|
|
209
209
|
try {
|
|
210
|
-
aiConfig = await getAIConfig(
|
|
210
|
+
aiConfig = await getAIConfig({
|
|
211
211
|
userOptions: aiOptions,
|
|
212
212
|
defaultOptions: aiDefaultOptions$3,
|
|
213
213
|
accessType: ["registered_user", "apiKey"]
|
|
214
|
-
});
|
|
214
|
+
}, !!res.locals.user);
|
|
215
215
|
} catch (_error) {
|
|
216
216
|
ErrorHandler.handleGenericErrorResponse(res, "AI_ACCESS_DENIED");
|
|
217
217
|
return;
|
|
@@ -242,10 +242,10 @@ const askDocQuestion = async (req, res, _next) => {
|
|
|
242
242
|
const { user, project, organization } = res.locals;
|
|
243
243
|
let aiConfig;
|
|
244
244
|
try {
|
|
245
|
-
aiConfig = await getAIConfig(
|
|
245
|
+
aiConfig = await getAIConfig({
|
|
246
246
|
userOptions: {},
|
|
247
247
|
accessType: ["public"]
|
|
248
|
-
});
|
|
248
|
+
}, !!res.locals.user);
|
|
249
249
|
} catch (_error) {
|
|
250
250
|
ErrorHandler.handleGenericErrorResponse(res, "AI_ACCESS_DENIED");
|
|
251
251
|
return;
|
|
@@ -296,11 +296,11 @@ const autocomplete = async (req, res, _next) => {
|
|
|
296
296
|
const { text, aiOptions, contextBefore, currentLine, contextAfter } = req.body;
|
|
297
297
|
let aiConfig;
|
|
298
298
|
try {
|
|
299
|
-
aiConfig = await getAIConfig(
|
|
299
|
+
aiConfig = await getAIConfig({
|
|
300
300
|
userOptions: aiOptions,
|
|
301
301
|
defaultOptions: aiDefaultOptions$4,
|
|
302
302
|
accessType: ["public"]
|
|
303
|
-
});
|
|
303
|
+
}, !!res.locals.user);
|
|
304
304
|
} catch (_error) {
|
|
305
305
|
ErrorHandler.handleGenericErrorResponse(res, "AI_ACCESS_DENIED");
|
|
306
306
|
return;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ai.controller.mjs","names":["aiConfig: AIConfig","customQueryUtil.aiDefaultOptions","customQueryUtil.customQuery","translateJSONUtil.aiDefaultOptions","tags: Tag[]","translateJSONUtil.translateJSON","auditContentDeclarationUtil.aiDefaultOptions","auditContentDeclarationUtil.auditDictionary","auditContentDeclarationFieldUtil.aiDefaultOptions","auditContentDeclarationFieldUtil.auditDictionaryField","auditContentDeclarationMetadataUtil.aiDefaultOptions","tagService.findTags","auditContentDeclarationMetadataUtil.auditDictionaryMetadata","auditTagUtil.aiDefaultOptions","dictionaries: Dictionary[]","auditTagUtil.auditTag","autocompleteUtil.aiDefaultOptions","autocompleteUtil.autocomplete","numberOfMessagesById: Record<string, number>"],"sources":["../../../src/controllers/ai.controller.ts"],"sourcesContent":["import type { KeyPath, Locale } from '@intlayer/types';\nimport type { ResponseWithSession } from '@middlewares/sessionAuth.middleware';\nimport { getDictionariesByTags } from '@services/dictionary.service';\nimport * as tagService from '@services/tag.service';\nimport { getTagsByKeys } from '@services/tag.service';\nimport {\n type AIConfig,\n type AIOptions,\n type ChatCompletionRequestMessage,\n getAIConfig,\n} from '@utils/AI/aiSdk';\nimport * as askDocQuestionUtil from '@utils/AI/askDocQuestion/askDocQuestion';\nimport * as auditContentDeclarationUtil from '@utils/AI/auditDictionary';\nimport * as auditContentDeclarationFieldUtil from '@utils/AI/auditDictionaryField';\nimport * as auditContentDeclarationMetadataUtil from '@utils/AI/auditDictionaryMetadata';\nimport * as auditTagUtil from '@utils/AI/auditTag';\nimport * as autocompleteUtil from '@utils/AI/autocomplete';\nimport * as customQueryUtil from '@utils/AI/customQuery';\nimport * as translateJSONUtil from '@utils/AI/translateJSON';\nimport { type AppError, ErrorHandler } from '@utils/errors';\nimport {\n type DiscussionFiltersParams,\n getDiscussionFiltersAndPagination,\n} from '@utils/filtersAndPagination/getDiscussionFiltersAndPagination';\nimport {\n formatPaginatedResponse,\n formatResponse,\n type PaginatedResponse,\n type ResponseData,\n} from '@utils/responseData';\nimport type { NextFunction, Request } from 'express';\nimport { DiscussionModel } from '@/models/discussion.model';\nimport type { Dictionary } from '@/types/dictionary.types';\nimport type { DiscussionAPI } from '@/types/discussion.types';\nimport type { Tag, TagAPI } from '@/types/tag.types';\n\ntype ReplaceAIConfigByOptions<T> = Omit<T, 'aiConfig'> & {\n aiOptions?: AIOptions;\n};\n\nexport type CustomQueryBody =\n ReplaceAIConfigByOptions<customQueryUtil.CustomQueryOptions> & {\n tagsKeys?: string[];\n applicationContext?: string;\n };\nexport type CustomQueryResult =\n ResponseData<customQueryUtil.CustomQueryResultData>;\n\nexport const customQuery = async (\n req: Request<CustomQueryBody>,\n res: ResponseWithSession<CustomQueryResult>,\n _next: NextFunction\n): Promise<void> => {\n // biome-ignore lint/correctness/noUnusedVariables: Just filter out tagsKeys\n const { aiOptions, tagsKeys, ...rest } = req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: aiOptions,\n defaultOptions: customQueryUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n const auditResponse = await customQueryUtil.customQuery({\n ...rest,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'QUERY_FAILED');\n return;\n }\n\n const responseData = formatResponse<customQueryUtil.CustomQueryResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type TranslateJSONBody = Omit<\n ReplaceAIConfigByOptions<translateJSONUtil.TranslateJSONOptions>,\n 'tags'\n> & {\n tagsKeys?: string[];\n};\nexport type TranslateJSONResult =\n ResponseData<translateJSONUtil.TranslateJSONResultData>;\n\nexport const translateJSON = async (\n req: Request<TranslateJSONBody>,\n res: ResponseWithSession<TranslateJSONResult>,\n _next: NextFunction\n): Promise<void> => {\n const { project } = res.locals;\n const { aiOptions, tagsKeys, ...rest } = req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: aiOptions,\n defaultOptions: translateJSONUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId && tagsKeys) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse = await translateJSONUtil.translateJSON({\n ...rest,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<translateJSONUtil.TranslateJSONResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationBody = {\n aiOptions?: AIOptions;\n locales: Locale[];\n defaultLocale: Locale;\n fileContent: string;\n filePath?: string;\n tagsKeys?: string[];\n};\nexport type AuditContentDeclarationResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclaration = async (\n req: Request<AuditContentDeclarationBody>,\n res: ResponseWithSession<AuditContentDeclarationResult>,\n _next: NextFunction\n): Promise<void> => {\n const { project } = res.locals;\n const { fileContent, filePath, aiOptions, locales, defaultLocale, tagsKeys } =\n req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: aiOptions,\n defaultOptions: auditContentDeclarationUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse = await auditContentDeclarationUtil.auditDictionary({\n fileContent,\n filePath,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n locales,\n defaultLocale,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationFieldBody = {\n aiOptions?: AIOptions;\n locales: Locale[];\n fileContent: string;\n filePath?: string;\n tagsKeys?: string[];\n keyPath: KeyPath[];\n};\nexport type AuditContentDeclarationFieldResult =\n ResponseData<auditContentDeclarationFieldUtil.AuditDictionaryFieldResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclarationField = async (\n req: Request<AuditContentDeclarationFieldBody>,\n res: ResponseWithSession<AuditContentDeclarationFieldResult>,\n _next: NextFunction\n): Promise<void> => {\n const { project } = res.locals;\n const { fileContent, aiOptions, locales, tagsKeys, keyPath } = req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: aiOptions,\n defaultOptions: auditContentDeclarationFieldUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse =\n await auditContentDeclarationFieldUtil.auditDictionaryField({\n fileContent,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n locales,\n tags,\n keyPath,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationFieldUtil.AuditDictionaryFieldResultData>(\n {\n data: auditResponse,\n }\n );\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationMetadataBody = {\n aiOptions?: AIOptions;\n fileContent: string;\n};\n\nexport type AuditContentDeclarationMetadataResult =\n ResponseData<auditContentDeclarationMetadataUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclarationMetadata = async (\n req: Request<AuditContentDeclarationMetadataBody>,\n res: ResponseWithSession<AuditContentDeclarationMetadataResult>,\n _next: NextFunction\n): Promise<void> => {\n const { organization } = res.locals;\n const { fileContent, aiOptions } = req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: aiOptions,\n defaultOptions: auditContentDeclarationMetadataUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n const tags: Tag[] = await tagService.findTags(\n {\n organizationId: organization?.id,\n },\n 0,\n 1000\n );\n\n const auditResponse =\n await auditContentDeclarationMetadataUtil.auditDictionaryMetadata({\n fileContent,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationMetadataUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditTagBody = {\n aiOptions?: AIOptions;\n tag: TagAPI;\n};\nexport type AuditTagResult = ResponseData<auditTagUtil.TranslateJSONResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditTag = async (\n req: Request<undefined, undefined, AuditTagBody>,\n res: ResponseWithSession<AuditTagResult>,\n _next: NextFunction\n): Promise<void> => {\n const { project } = res.locals;\n const { aiOptions, tag } = req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: aiOptions,\n defaultOptions: auditTagUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n let dictionaries: Dictionary[] = [];\n if (project?.organizationId) {\n dictionaries = await getDictionariesByTags([tag.key], project.id);\n }\n\n const auditResponse = await auditTagUtil.auditTag({\n aiConfig,\n dictionaries,\n tag,\n applicationContext: aiOptions?.applicationContext,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData = formatResponse<auditTagUtil.TranslateJSONResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AskDocQuestionBody = {\n messages: ChatCompletionRequestMessage[];\n discussionId: string;\n};\nexport type AskDocQuestionResult =\n ResponseData<askDocQuestionUtil.AskDocQuestionResult>;\n\nexport const askDocQuestion = async (\n req: Request<undefined, undefined, AskDocQuestionBody>,\n res: ResponseWithSession<AskDocQuestionResult>,\n _next: NextFunction\n): Promise<void> => {\n const { messages = [], discussionId } = req.body;\n const { user, project, organization } = res.locals;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: {},\n accessType: ['public'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n // 1. Prepare SSE headers and flush them NOW\n res.setHeader('Content-Type', 'text/event-stream; charset=utf-8');\n res.setHeader('Cache-Control', 'no-cache, no-transform');\n res.setHeader('Connection', 'keep-alive');\n res.setHeader('X-Accel-Buffering', 'no'); // disable nginx buffering\n res.flushHeaders?.();\n res.write(': connected\\n\\n'); // initial comment keeps some browsers happy\n res.flush?.();\n\n // 2. Kick off the upstream stream WITHOUT awaiting it\n askDocQuestionUtil\n .askDocQuestion(messages, aiConfig, {\n onMessage: (chunk) => {\n res.write(`data: ${JSON.stringify({ chunk })}\\n\\n`);\n res.flush?.();\n },\n })\n .then(async (fullResponse) => {\n const lastUserMessageContent = messages.findLast(\n (message) => message.role === 'user'\n )?.content;\n const lastUserMessageNbWords = lastUserMessageContent\n ? lastUserMessageContent.split(' ').length\n : 0;\n if (lastUserMessageNbWords > 2) {\n // If the last user message is less than 3 words, don't persist the discussion\n // Example: \"Hello\", \"Hi\", \"Hey\", \"test\", etc.\n\n // 3. Persist discussion while the client already has all chunks\n await DiscussionModel.findOneAndUpdate(\n { discussionId },\n {\n $set: {\n discussionId,\n userId: user?.id,\n projectId: project?.id,\n organizationId: organization?.id,\n messages: [\n ...messages.map((msg) => ({\n role: msg.role,\n content: msg.content,\n timestamp: msg.timestamp,\n })),\n {\n role: 'assistant',\n content: fullResponse.response,\n relatedFiles: fullResponse.relatedFiles,\n timestamp: new Date(),\n },\n ],\n },\n },\n { upsert: true, new: true }\n );\n }\n\n // 4. Tell the client we're done and close the stream\n res.write(\n `data: ${JSON.stringify({ done: true, response: fullResponse })}\\n\\n`\n );\n res.end();\n })\n .catch((err) => {\n // propagate error as an SSE event so the client knows why it closed\n res.write(\n `event: error\\ndata: ${JSON.stringify({ message: err.message })}\\n\\n`\n );\n res.end();\n });\n};\n\nexport type AutocompleteBody = {\n text: string;\n aiOptions?: AIOptions;\n contextBefore?: string;\n currentLine?: string;\n contextAfter?: string;\n};\n\nexport type AutocompleteResponse = ResponseData<{\n autocompletion: string;\n}>;\n\nexport const autocomplete = async (\n req: Request<AutocompleteBody>,\n res: ResponseWithSession<AutocompleteResponse>,\n _next: NextFunction\n): Promise<void> => {\n try {\n const { text, aiOptions, contextBefore, currentLine, contextAfter } =\n req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: aiOptions,\n defaultOptions: autocompleteUtil.aiDefaultOptions,\n accessType: ['public'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n const response = (await autocompleteUtil.autocomplete({\n text,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n contextBefore,\n currentLine,\n contextAfter,\n })) ?? {\n autocompletion: '',\n tokenUsed: 0,\n };\n\n const responseData =\n formatResponse<autocompleteUtil.AutocompleteFileResultData>({\n data: response,\n });\n\n res.json(responseData);\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type GetDiscussionsParams =\n | ({\n page?: string | number;\n pageSize?: string | number;\n includeMessages?: 'true' | 'false';\n } & DiscussionFiltersParams)\n | undefined;\n\nexport type GetDiscussionsResult = PaginatedResponse<DiscussionAPI>;\n\n/**\n * Retrieves a list of discussions with filters and pagination.\n * Only the owner or admins can access. By default, users only see their own.\n */\nexport const getDiscussions = async (\n req: Request<GetDiscussionsParams>,\n res: ResponseWithSession<GetDiscussionsResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, roles } = res.locals;\n const { filters, sortOptions, pageSize, skip, page, getNumberOfPages } =\n getDiscussionFiltersAndPagination(req, res);\n const includeMessagesParam = (req.query as any)?.includeMessages as\n | 'true'\n | 'false'\n | undefined;\n const includeMessages = includeMessagesParam !== 'false';\n\n if (!user) {\n ErrorHandler.handleGenericErrorResponse(res, 'USER_NOT_DEFINED');\n return;\n }\n\n try {\n const projection = includeMessages ? {} : { messages: 0 };\n const discussions = await DiscussionModel.find(filters, projection)\n .sort(sortOptions)\n .skip(skip)\n .limit(pageSize)\n .lean();\n\n // Compute number of messages for each discussion\n const numberOfMessagesById: Record<string, number> = {};\n if (!includeMessages && discussions.length > 0) {\n const ids = discussions.map((d: any) => d._id);\n const counts = await DiscussionModel.aggregate([\n { $match: { _id: { $in: ids } } },\n {\n $project: {\n numberOfMessages: { $size: { $ifNull: ['$messages', []] } },\n },\n },\n ]);\n for (const c of counts as any[]) {\n numberOfMessagesById[String(c._id)] = c.numberOfMessages ?? 0;\n }\n }\n\n // Permission: allow admin, or the owner for all returned entries\n const allOwnedByUser = discussions.every(\n (d) => String(d.userId) === String(user.id)\n );\n const isAllowed = roles.includes('admin') || allOwnedByUser;\n\n if (!isAllowed) {\n ErrorHandler.handleGenericErrorResponse(res, 'PERMISSION_DENIED');\n return;\n }\n\n const totalItems = await DiscussionModel.countDocuments(filters);\n\n const responseData = formatPaginatedResponse({\n data: discussions.map((d: any) => ({\n ...d,\n id: String(d._id ?? d.id),\n numberOfMessages: includeMessages\n ? Array.isArray(d.messages)\n ? d.messages.length\n : 0\n : (numberOfMessagesById[String(d._id ?? d.id)] ?? 0),\n })),\n page,\n pageSize,\n totalPages: getNumberOfPages(totalItems),\n totalItems,\n });\n\n res.json(responseData as any);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;AAgDA,MAAa,cAAc,OACzB,KACA,KACA,UACkB;CAElB,MAAM,EAAE,WAAW,UAAU,GAAG,SAAS,IAAI;CAE7C,IAAIA;AACJ,KAAI;AACF,aAAW,MAAM,YAAY,KAAK;GAChC,aAAa;GACb,gBAAgBC;GAChB,YAAY,CAAC,mBAAmB,SAAS;GAC1C,CAAC;UACK,QAAQ;AACf,eAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,MAAM,gBAAgB,MAAMC,cAA4B;GACtD,GAAG;GACH;GACA,oBAAoB,WAAW;GAChC,CAAC;AAEF,MAAI,CAAC,eAAe;AAClB,gBAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eAAe,eAAsD,EACzE,MAAM,eACP,CAAC;AAEF,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,eAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;AAaJ,MAAa,gBAAgB,OAC3B,KACA,KACA,UACkB;CAClB,MAAM,EAAE,YAAY,IAAI;CACxB,MAAM,EAAE,WAAW,UAAU,GAAG,SAAS,IAAI;CAE7C,IAAIF;AACJ,KAAI;AACF,aAAW,MAAM,YAAY,KAAK;GAChC,aAAa;GACb,gBAAgBG;GAChB,YAAY,CAAC,mBAAmB,SAAS;GAC1C,CAAC;UACK,QAAQ;AACf,eAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,IAAIC,OAAc,EAAE;AAEpB,MAAI,SAAS,kBAAkB,SAC7B,QAAO,MAAM,cAAc,UAAU,QAAQ,eAAe;EAG9D,MAAM,gBAAgB,MAAMC,gBAAgC;GAC1D,GAAG;GACH;GACA,oBAAoB,WAAW;GAC/B;GACD,CAAC;AAEF,MAAI,CAAC,eAAe;AAClB,gBAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eACJ,eAA0D,EACxD,MAAM,eACP,CAAC;AAEJ,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,eAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;;;;AAkBJ,MAAa,0BAA0B,OACrC,KACA,KACA,UACkB;CAClB,MAAM,EAAE,YAAY,IAAI;CACxB,MAAM,EAAE,aAAa,UAAU,WAAW,SAAS,eAAe,aAChE,IAAI;CAEN,IAAIL;AACJ,KAAI;AACF,aAAW,MAAM,YAAY,KAAK;GAChC,aAAa;GACb,gBAAgBM;GAChB,YAAY,CAAC,mBAAmB,SAAS;GAC1C,CAAC;UACK,QAAQ;AACf,eAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,IAAIF,OAAc,EAAE;AAEpB,MAAI,SAAS,eACX,QAAO,MAAM,cAAc,UAAU,QAAQ,eAAe;EAG9D,MAAM,gBAAgB,MAAMG,gBAA4C;GACtE;GACA;GACA;GACA,oBAAoB,WAAW;GAC/B;GACA;GACA;GACD,CAAC;AAEF,MAAI,CAAC,eAAe;AAClB,gBAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eACJ,eAAgE,EAC9D,MAAM,eACP,CAAC;AAEJ,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,eAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;;;;AAkBJ,MAAa,+BAA+B,OAC1C,KACA,KACA,UACkB;CAClB,MAAM,EAAE,YAAY,IAAI;CACxB,MAAM,EAAE,aAAa,WAAW,SAAS,UAAU,YAAY,IAAI;CAEnE,IAAIP;AACJ,KAAI;AACF,aAAW,MAAM,YAAY,KAAK;GAChC,aAAa;GACb,gBAAgBQ;GAChB,YAAY,CAAC,mBAAmB,SAAS;GAC1C,CAAC;UACK,QAAQ;AACf,eAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,IAAIJ,OAAc,EAAE;AAEpB,MAAI,SAAS,eACX,QAAO,MAAM,cAAc,UAAU,QAAQ,eAAe;EAG9D,MAAM,gBACJ,MAAMK,qBAAsD;GAC1D;GACA;GACA,oBAAoB,WAAW;GAC/B;GACA;GACA;GACD,CAAC;AAEJ,MAAI,CAAC,eAAe;AAClB,gBAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eACJ,eACE,EACE,MAAM,eACP,CACF;AAEH,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,eAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;;;;AAeJ,MAAa,kCAAkC,OAC7C,KACA,KACA,UACkB;CAClB,MAAM,EAAE,iBAAiB,IAAI;CAC7B,MAAM,EAAE,aAAa,cAAc,IAAI;CAEvC,IAAIT;AACJ,KAAI;AACF,aAAW,MAAM,YAAY,KAAK;GAChC,aAAa;GACb,gBAAgBU;GAChB,YAAY,CAAC,mBAAmB,SAAS;GAC1C,CAAC;UACK,QAAQ;AACf,eAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,MAAMN,OAAc,MAAMO,SACxB,EACE,gBAAgB,cAAc,IAC/B,EACD,GACA,IACD;EAED,MAAM,gBACJ,MAAMC,wBAA4D;GAChE;GACA;GACA,oBAAoB,WAAW;GAC/B;GACD,CAAC;AAEJ,MAAI,CAAC,eAAe;AAClB,gBAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eACJ,eAAwE,EACtE,MAAM,eACP,CAAC;AAEJ,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,eAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;;;;AAaJ,MAAa,WAAW,OACtB,KACA,KACA,UACkB;CAClB,MAAM,EAAE,YAAY,IAAI;CACxB,MAAM,EAAE,WAAW,QAAQ,IAAI;CAE/B,IAAIZ;AACJ,KAAI;AACF,aAAW,MAAM,YAAY,KAAK;GAChC,aAAa;GACb,gBAAgBa;GAChB,YAAY,CAAC,mBAAmB,SAAS;GAC1C,CAAC;UACK,QAAQ;AACf,eAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,IAAIC,eAA6B,EAAE;AACnC,MAAI,SAAS,eACX,gBAAe,MAAM,sBAAsB,CAAC,IAAI,IAAI,EAAE,QAAQ,GAAG;EAGnE,MAAM,gBAAgB,MAAMC,WAAsB;GAChD;GACA;GACA;GACA,oBAAoB,WAAW;GAChC,CAAC;AAEF,MAAI,CAAC,eAAe;AAClB,gBAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eAAe,eAAqD,EACxE,MAAM,eACP,CAAC;AAEF,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,eAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;AAWJ,MAAa,iBAAiB,OAC5B,KACA,KACA,UACkB;CAClB,MAAM,EAAE,WAAW,EAAE,EAAE,iBAAiB,IAAI;CAC5C,MAAM,EAAE,MAAM,SAAS,iBAAiB,IAAI;CAE5C,IAAIf;AACJ,KAAI;AACF,aAAW,MAAM,YAAY,KAAK;GAChC,aAAa,EAAE;GACf,YAAY,CAAC,SAAS;GACvB,CAAC;UACK,QAAQ;AACf,eAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAIF,KAAI,UAAU,gBAAgB,mCAAmC;AACjE,KAAI,UAAU,iBAAiB,yBAAyB;AACxD,KAAI,UAAU,cAAc,aAAa;AACzC,KAAI,UAAU,qBAAqB,KAAK;AACxC,KAAI,gBAAgB;AACpB,KAAI,MAAM,kBAAkB;AAC5B,KAAI,SAAS;AAGb,kBACkB,UAAU,UAAU,EAClC,YAAY,UAAU;AACpB,MAAI,MAAM,SAAS,KAAK,UAAU,EAAE,OAAO,CAAC,CAAC,MAAM;AACnD,MAAI,SAAS;IAEhB,CAAC,CACD,KAAK,OAAO,iBAAiB;EAC5B,MAAM,yBAAyB,SAAS,UACrC,YAAY,QAAQ,SAAS,OAC/B,EAAE;AAIH,OAH+B,yBAC3B,uBAAuB,MAAM,IAAI,CAAC,SAClC,KACyB,EAK3B,OAAM,gBAAgB,iBACpB,EAAE,cAAc,EAChB,EACE,MAAM;GACJ;GACA,QAAQ,MAAM;GACd,WAAW,SAAS;GACpB,gBAAgB,cAAc;GAC9B,UAAU,CACR,GAAG,SAAS,KAAK,SAAS;IACxB,MAAM,IAAI;IACV,SAAS,IAAI;IACb,WAAW,IAAI;IAChB,EAAE,EACH;IACE,MAAM;IACN,SAAS,aAAa;IACtB,cAAc,aAAa;IAC3B,2BAAW,IAAI,MAAM;IACtB,CACF;GACF,EACF,EACD;GAAE,QAAQ;GAAM,KAAK;GAAM,CAC5B;AAIH,MAAI,MACF,SAAS,KAAK,UAAU;GAAE,MAAM;GAAM,UAAU;GAAc,CAAC,CAAC,MACjE;AACD,MAAI,KAAK;GACT,CACD,OAAO,QAAQ;AAEd,MAAI,MACF,uBAAuB,KAAK,UAAU,EAAE,SAAS,IAAI,SAAS,CAAC,CAAC,MACjE;AACD,MAAI,KAAK;GACT;;AAeN,MAAa,eAAe,OAC1B,KACA,KACA,UACkB;AAClB,KAAI;EACF,MAAM,EAAE,MAAM,WAAW,eAAe,aAAa,iBACnD,IAAI;EAEN,IAAIA;AACJ,MAAI;AACF,cAAW,MAAM,YAAY,KAAK;IAChC,aAAa;IACb,gBAAgBgB;IAChB,YAAY,CAAC,SAAS;IACvB,CAAC;WACK,QAAQ;AACf,gBAAa,2BAA2B,KAAK,mBAAmB;AAChE;;EAeF,MAAM,eACJ,eAA4D,EAC1D,MAdc,MAAMC,eAA8B;GACpD;GACA;GACA,oBAAoB,WAAW;GAC/B;GACA;GACA;GACD,CAAC,IAAK;GACL,gBAAgB;GAChB,WAAW;GACZ,EAKE,CAAC;AAEJ,MAAI,KAAK,aAAa;UACf,OAAO;AACd,eAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;;;;;AAkBJ,MAAa,iBAAiB,OAC5B,KACA,KACA,UACkB;CAClB,MAAM,EAAE,MAAM,UAAU,IAAI;CAC5B,MAAM,EAAE,SAAS,aAAa,UAAU,MAAM,MAAM,qBAClD,kCAAkC,KAAK,IAAI;CAK7C,MAAM,kBAJwB,IAAI,OAAe,oBAIA;AAEjD,KAAI,CAAC,MAAM;AACT,eAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,MAAM,aAAa,kBAAkB,EAAE,GAAG,EAAE,UAAU,GAAG;EACzD,MAAM,cAAc,MAAM,gBAAgB,KAAK,SAAS,WAAW,CAChE,KAAK,YAAY,CACjB,KAAK,KAAK,CACV,MAAM,SAAS,CACf,MAAM;EAGT,MAAMC,uBAA+C,EAAE;AACvD,MAAI,CAAC,mBAAmB,YAAY,SAAS,GAAG;GAC9C,MAAM,MAAM,YAAY,KAAK,MAAW,EAAE,IAAI;GAC9C,MAAM,SAAS,MAAM,gBAAgB,UAAU,CAC7C,EAAE,QAAQ,EAAE,KAAK,EAAE,KAAK,KAAK,EAAE,EAAE,EACjC,EACE,UAAU,EACR,kBAAkB,EAAE,OAAO,EAAE,SAAS,CAAC,aAAa,EAAE,CAAC,EAAE,EAAE,EAC5D,EACF,CACF,CAAC;AACF,QAAK,MAAM,KAAK,OACd,sBAAqB,OAAO,EAAE,IAAI,IAAI,EAAE,oBAAoB;;EAKhE,MAAM,iBAAiB,YAAY,OAChC,MAAM,OAAO,EAAE,OAAO,KAAK,OAAO,KAAK,GAAG,CAC5C;AAGD,MAAI,EAFc,MAAM,SAAS,QAAQ,IAAI,iBAE7B;AACd,gBAAa,2BAA2B,KAAK,oBAAoB;AACjE;;EAGF,MAAM,aAAa,MAAM,gBAAgB,eAAe,QAAQ;EAEhE,MAAM,eAAe,wBAAwB;GAC3C,MAAM,YAAY,KAAK,OAAY;IACjC,GAAG;IACH,IAAI,OAAO,EAAE,OAAO,EAAE,GAAG;IACzB,kBAAkB,kBACd,MAAM,QAAQ,EAAE,SAAS,GACvB,EAAE,SAAS,SACX,IACD,qBAAqB,OAAO,EAAE,OAAO,EAAE,GAAG,KAAK;IACrD,EAAE;GACH;GACA;GACA,YAAY,iBAAiB,WAAW;GACxC;GACD,CAAC;AAEF,MAAI,KAAK,aAAoB;AAC7B;UACO,OAAO;AACd,eAAa,uBAAuB,KAAK,MAAkB;AAC3D"}
|
|
1
|
+
{"version":3,"file":"ai.controller.mjs","names":["aiConfig: AIConfig","customQueryUtil.aiDefaultOptions","customQueryUtil.customQuery","translateJSONUtil.aiDefaultOptions","tags: Tag[]","translateJSONUtil.translateJSON","auditContentDeclarationUtil.aiDefaultOptions","auditContentDeclarationUtil.auditDictionary","auditContentDeclarationFieldUtil.aiDefaultOptions","auditContentDeclarationFieldUtil.auditDictionaryField","auditContentDeclarationMetadataUtil.aiDefaultOptions","tagService.findTags","auditContentDeclarationMetadataUtil.auditDictionaryMetadata","auditTagUtil.aiDefaultOptions","dictionaries: Dictionary[]","auditTagUtil.auditTag","autocompleteUtil.aiDefaultOptions","autocompleteUtil.autocomplete","numberOfMessagesById: Record<string, number>"],"sources":["../../../src/controllers/ai.controller.ts"],"sourcesContent":["import {\n type AIConfig,\n type AIOptions,\n type ChatCompletionRequestMessage,\n getAIConfig,\n} from '@intlayer/ai';\nimport type { KeyPath, Locale } from '@intlayer/types';\nimport type { ResponseWithSession } from '@middlewares/sessionAuth.middleware';\nimport { getDictionariesByTags } from '@services/dictionary.service';\nimport * as tagService from '@services/tag.service';\nimport { getTagsByKeys } from '@services/tag.service';\nimport * as askDocQuestionUtil from '@utils/AI/askDocQuestion/askDocQuestion';\nimport * as auditContentDeclarationUtil from '@utils/AI/auditDictionary';\nimport * as auditContentDeclarationFieldUtil from '@utils/AI/auditDictionaryField';\nimport * as auditContentDeclarationMetadataUtil from '@utils/AI/auditDictionaryMetadata';\nimport * as auditTagUtil from '@utils/AI/auditTag';\nimport * as autocompleteUtil from '@utils/AI/autocomplete';\nimport * as customQueryUtil from '@utils/AI/customQuery';\nimport * as translateJSONUtil from '@utils/AI/translateJSON';\nimport { type AppError, ErrorHandler } from '@utils/errors';\nimport {\n type DiscussionFiltersParams,\n getDiscussionFiltersAndPagination,\n} from '@utils/filtersAndPagination/getDiscussionFiltersAndPagination';\nimport {\n formatPaginatedResponse,\n formatResponse,\n type PaginatedResponse,\n type ResponseData,\n} from '@utils/responseData';\nimport type { NextFunction, Request } from 'express';\nimport { DiscussionModel } from '@/models/discussion.model';\nimport type { Dictionary } from '@/types/dictionary.types';\nimport type { DiscussionAPI } from '@/types/discussion.types';\nimport type { Tag, TagAPI } from '@/types/tag.types';\n\ntype ReplaceAIConfigByOptions<T> = Omit<T, 'aiConfig'> & {\n aiOptions?: AIOptions;\n};\n\nexport type CustomQueryBody =\n ReplaceAIConfigByOptions<customQueryUtil.CustomQueryOptions> & {\n tagsKeys?: string[];\n applicationContext?: string;\n };\nexport type CustomQueryResult =\n ResponseData<customQueryUtil.CustomQueryResultData>;\n\nexport const customQuery = async (\n req: Request<CustomQueryBody>,\n res: ResponseWithSession<CustomQueryResult>,\n _next: NextFunction\n): Promise<void> => {\n // biome-ignore lint/correctness/noUnusedVariables: Just filter out tagsKeys\n const { aiOptions, tagsKeys, ...rest } = req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(\n {\n userOptions: aiOptions,\n defaultOptions: customQueryUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n },\n !!res.locals.user\n );\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n const auditResponse = await customQueryUtil.customQuery({\n ...rest,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'QUERY_FAILED');\n return;\n }\n\n const responseData = formatResponse<customQueryUtil.CustomQueryResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type TranslateJSONBody = Omit<\n ReplaceAIConfigByOptions<translateJSONUtil.TranslateJSONOptions>,\n 'tags'\n> & {\n tagsKeys?: string[];\n};\nexport type TranslateJSONResult =\n ResponseData<translateJSONUtil.TranslateJSONResultData>;\n\nexport const translateJSON = async (\n req: Request<TranslateJSONBody>,\n res: ResponseWithSession<TranslateJSONResult>,\n _next: NextFunction\n): Promise<void> => {\n const { project } = res.locals;\n const { aiOptions, tagsKeys, ...rest } = req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(\n {\n userOptions: aiOptions,\n defaultOptions: translateJSONUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n },\n !!res.locals.user\n );\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId && tagsKeys) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse = await translateJSONUtil.translateJSON({\n ...rest,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<translateJSONUtil.TranslateJSONResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationBody = {\n aiOptions?: AIOptions;\n locales: Locale[];\n defaultLocale: Locale;\n fileContent: string;\n filePath?: string;\n tagsKeys?: string[];\n};\nexport type AuditContentDeclarationResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclaration = async (\n req: Request<AuditContentDeclarationBody>,\n res: ResponseWithSession<AuditContentDeclarationResult>,\n _next: NextFunction\n): Promise<void> => {\n const { project } = res.locals;\n const { fileContent, filePath, aiOptions, locales, defaultLocale, tagsKeys } =\n req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(\n {\n userOptions: aiOptions,\n defaultOptions: auditContentDeclarationUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n },\n !!res.locals.user\n );\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse = await auditContentDeclarationUtil.auditDictionary({\n fileContent,\n filePath,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n locales,\n defaultLocale,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationFieldBody = {\n aiOptions?: AIOptions;\n locales: Locale[];\n fileContent: string;\n filePath?: string;\n tagsKeys?: string[];\n keyPath: KeyPath[];\n};\nexport type AuditContentDeclarationFieldResult =\n ResponseData<auditContentDeclarationFieldUtil.AuditDictionaryFieldResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclarationField = async (\n req: Request<AuditContentDeclarationFieldBody>,\n res: ResponseWithSession<AuditContentDeclarationFieldResult>,\n _next: NextFunction\n): Promise<void> => {\n const { project } = res.locals;\n const { fileContent, aiOptions, locales, tagsKeys, keyPath } = req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(\n {\n userOptions: aiOptions,\n defaultOptions: auditContentDeclarationFieldUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n },\n !!res.locals.user\n );\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse =\n await auditContentDeclarationFieldUtil.auditDictionaryField({\n fileContent,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n locales,\n tags,\n keyPath,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationFieldUtil.AuditDictionaryFieldResultData>(\n {\n data: auditResponse,\n }\n );\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationMetadataBody = {\n aiOptions?: AIOptions;\n fileContent: string;\n};\n\nexport type AuditContentDeclarationMetadataResult =\n ResponseData<auditContentDeclarationMetadataUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclarationMetadata = async (\n req: Request<AuditContentDeclarationMetadataBody>,\n res: ResponseWithSession<AuditContentDeclarationMetadataResult>,\n _next: NextFunction\n): Promise<void> => {\n const { organization } = res.locals;\n const { fileContent, aiOptions } = req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(\n {\n userOptions: aiOptions,\n defaultOptions: auditContentDeclarationMetadataUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n },\n !!res.locals.user\n );\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n const tags: Tag[] = await tagService.findTags(\n {\n organizationId: organization?.id,\n },\n 0,\n 1000\n );\n\n const auditResponse =\n await auditContentDeclarationMetadataUtil.auditDictionaryMetadata({\n fileContent,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationMetadataUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditTagBody = {\n aiOptions?: AIOptions;\n tag: TagAPI;\n};\nexport type AuditTagResult = ResponseData<auditTagUtil.TranslateJSONResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditTag = async (\n req: Request<undefined, undefined, AuditTagBody>,\n res: ResponseWithSession<AuditTagResult>,\n _next: NextFunction\n): Promise<void> => {\n const { project } = res.locals;\n const { aiOptions, tag } = req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(\n {\n userOptions: aiOptions,\n defaultOptions: auditTagUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n },\n !!res.locals.user\n );\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n let dictionaries: Dictionary[] = [];\n if (project?.organizationId) {\n dictionaries = await getDictionariesByTags([tag.key], project.id);\n }\n\n const auditResponse = await auditTagUtil.auditTag({\n aiConfig,\n dictionaries,\n tag,\n applicationContext: aiOptions?.applicationContext,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData = formatResponse<auditTagUtil.TranslateJSONResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AskDocQuestionBody = {\n messages: ChatCompletionRequestMessage[];\n discussionId: string;\n};\nexport type AskDocQuestionResult =\n ResponseData<askDocQuestionUtil.AskDocQuestionResult>;\n\nexport const askDocQuestion = async (\n req: Request<undefined, undefined, AskDocQuestionBody>,\n res: ResponseWithSession<AskDocQuestionResult>,\n _next: NextFunction\n): Promise<void> => {\n const { messages = [], discussionId } = req.body;\n const { user, project, organization } = res.locals;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(\n {\n userOptions: {},\n accessType: ['public'],\n },\n !!res.locals.user\n );\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n // 1. Prepare SSE headers and flush them NOW\n res.setHeader('Content-Type', 'text/event-stream; charset=utf-8');\n res.setHeader('Cache-Control', 'no-cache, no-transform');\n res.setHeader('Connection', 'keep-alive');\n res.setHeader('X-Accel-Buffering', 'no'); // disable nginx buffering\n res.flushHeaders?.();\n res.write(': connected\\n\\n'); // initial comment keeps some browsers happy\n res.flush?.();\n\n // 2. Kick off the upstream stream WITHOUT awaiting it\n askDocQuestionUtil\n .askDocQuestion(messages, aiConfig, {\n onMessage: (chunk) => {\n res.write(`data: ${JSON.stringify({ chunk })}\\n\\n`);\n res.flush?.();\n },\n })\n .then(async (fullResponse) => {\n const lastUserMessageContent = messages.findLast(\n (message) => message.role === 'user'\n )?.content;\n const lastUserMessageNbWords = lastUserMessageContent\n ? lastUserMessageContent.split(' ').length\n : 0;\n if (lastUserMessageNbWords > 2) {\n // If the last user message is less than 3 words, don't persist the discussion\n // Example: \"Hello\", \"Hi\", \"Hey\", \"test\", etc.\n\n // 3. Persist discussion while the client already has all chunks\n await DiscussionModel.findOneAndUpdate(\n { discussionId },\n {\n $set: {\n discussionId,\n userId: user?.id,\n projectId: project?.id,\n organizationId: organization?.id,\n messages: [\n ...messages.map((msg) => ({\n role: msg.role,\n content: msg.content,\n timestamp: msg.timestamp,\n })),\n {\n role: 'assistant',\n content: fullResponse.response,\n relatedFiles: fullResponse.relatedFiles,\n timestamp: new Date(),\n },\n ],\n },\n },\n { upsert: true, new: true }\n );\n }\n\n // 4. Tell the client we're done and close the stream\n res.write(\n `data: ${JSON.stringify({ done: true, response: fullResponse })}\\n\\n`\n );\n res.end();\n })\n .catch((err) => {\n // propagate error as an SSE event so the client knows why it closed\n res.write(\n `event: error\\ndata: ${JSON.stringify({ message: err.message })}\\n\\n`\n );\n res.end();\n });\n};\n\nexport type AutocompleteBody = {\n text: string;\n aiOptions?: AIOptions;\n contextBefore?: string;\n currentLine?: string;\n contextAfter?: string;\n};\n\nexport type AutocompleteResponse = ResponseData<{\n autocompletion: string;\n}>;\n\nexport const autocomplete = async (\n req: Request<AutocompleteBody>,\n res: ResponseWithSession<AutocompleteResponse>,\n _next: NextFunction\n): Promise<void> => {\n try {\n const { text, aiOptions, contextBefore, currentLine, contextAfter } =\n req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(\n {\n userOptions: aiOptions,\n defaultOptions: autocompleteUtil.aiDefaultOptions,\n accessType: ['public'],\n },\n !!res.locals.user\n );\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n const response = (await autocompleteUtil.autocomplete({\n text,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n contextBefore,\n currentLine,\n contextAfter,\n })) ?? {\n autocompletion: '',\n tokenUsed: 0,\n };\n\n const responseData =\n formatResponse<autocompleteUtil.AutocompleteFileResultData>({\n data: response,\n });\n\n res.json(responseData);\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type GetDiscussionsParams =\n | ({\n page?: string | number;\n pageSize?: string | number;\n includeMessages?: 'true' | 'false';\n } & DiscussionFiltersParams)\n | undefined;\n\nexport type GetDiscussionsResult = PaginatedResponse<DiscussionAPI>;\n\n/**\n * Retrieves a list of discussions with filters and pagination.\n * Only the owner or admins can access. By default, users only see their own.\n */\nexport const getDiscussions = async (\n req: Request<GetDiscussionsParams>,\n res: ResponseWithSession<GetDiscussionsResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, roles } = res.locals;\n const { filters, sortOptions, pageSize, skip, page, getNumberOfPages } =\n getDiscussionFiltersAndPagination(req, res);\n const includeMessagesParam = (req.query as any)?.includeMessages as\n | 'true'\n | 'false'\n | undefined;\n const includeMessages = includeMessagesParam !== 'false';\n\n if (!user) {\n ErrorHandler.handleGenericErrorResponse(res, 'USER_NOT_DEFINED');\n return;\n }\n\n try {\n const projection = includeMessages ? {} : { messages: 0 };\n const discussions = await DiscussionModel.find(filters, projection)\n .sort(sortOptions)\n .skip(skip)\n .limit(pageSize)\n .lean();\n\n // Compute number of messages for each discussion\n const numberOfMessagesById: Record<string, number> = {};\n if (!includeMessages && discussions.length > 0) {\n const ids = discussions.map((d: any) => d._id);\n const counts = await DiscussionModel.aggregate([\n { $match: { _id: { $in: ids } } },\n {\n $project: {\n numberOfMessages: { $size: { $ifNull: ['$messages', []] } },\n },\n },\n ]);\n for (const c of counts as any[]) {\n numberOfMessagesById[String(c._id)] = c.numberOfMessages ?? 0;\n }\n }\n\n // Permission: allow admin, or the owner for all returned entries\n const allOwnedByUser = discussions.every(\n (d) => String(d.userId) === String(user.id)\n );\n const isAllowed = roles.includes('admin') || allOwnedByUser;\n\n if (!isAllowed) {\n ErrorHandler.handleGenericErrorResponse(res, 'PERMISSION_DENIED');\n return;\n }\n\n const totalItems = await DiscussionModel.countDocuments(filters);\n\n const responseData = formatPaginatedResponse({\n data: discussions.map((d: any) => ({\n ...d,\n id: String(d._id ?? d.id),\n numberOfMessages: includeMessages\n ? Array.isArray(d.messages)\n ? d.messages.length\n : 0\n : (numberOfMessagesById[String(d._id ?? d.id)] ?? 0),\n })),\n page,\n pageSize,\n totalPages: getNumberOfPages(totalItems),\n totalItems,\n });\n\n res.json(responseData as any);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;AAgDA,MAAa,cAAc,OACzB,KACA,KACA,UACkB;CAElB,MAAM,EAAE,WAAW,UAAU,GAAG,SAAS,IAAI;CAE7C,IAAIA;AACJ,KAAI;AACF,aAAW,MAAM,YACf;GACE,aAAa;GACb,gBAAgBC;GAChB,YAAY,CAAC,mBAAmB,SAAS;GAC1C,EACD,CAAC,CAAC,IAAI,OAAO,KACd;UACM,QAAQ;AACf,eAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,MAAM,gBAAgB,MAAMC,cAA4B;GACtD,GAAG;GACH;GACA,oBAAoB,WAAW;GAChC,CAAC;AAEF,MAAI,CAAC,eAAe;AAClB,gBAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eAAe,eAAsD,EACzE,MAAM,eACP,CAAC;AAEF,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,eAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;AAaJ,MAAa,gBAAgB,OAC3B,KACA,KACA,UACkB;CAClB,MAAM,EAAE,YAAY,IAAI;CACxB,MAAM,EAAE,WAAW,UAAU,GAAG,SAAS,IAAI;CAE7C,IAAIF;AACJ,KAAI;AACF,aAAW,MAAM,YACf;GACE,aAAa;GACb,gBAAgBG;GAChB,YAAY,CAAC,mBAAmB,SAAS;GAC1C,EACD,CAAC,CAAC,IAAI,OAAO,KACd;UACM,QAAQ;AACf,eAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,IAAIC,OAAc,EAAE;AAEpB,MAAI,SAAS,kBAAkB,SAC7B,QAAO,MAAM,cAAc,UAAU,QAAQ,eAAe;EAG9D,MAAM,gBAAgB,MAAMC,gBAAgC;GAC1D,GAAG;GACH;GACA,oBAAoB,WAAW;GAC/B;GACD,CAAC;AAEF,MAAI,CAAC,eAAe;AAClB,gBAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eACJ,eAA0D,EACxD,MAAM,eACP,CAAC;AAEJ,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,eAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;;;;AAkBJ,MAAa,0BAA0B,OACrC,KACA,KACA,UACkB;CAClB,MAAM,EAAE,YAAY,IAAI;CACxB,MAAM,EAAE,aAAa,UAAU,WAAW,SAAS,eAAe,aAChE,IAAI;CAEN,IAAIL;AACJ,KAAI;AACF,aAAW,MAAM,YACf;GACE,aAAa;GACb,gBAAgBM;GAChB,YAAY,CAAC,mBAAmB,SAAS;GAC1C,EACD,CAAC,CAAC,IAAI,OAAO,KACd;UACM,QAAQ;AACf,eAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,IAAIF,OAAc,EAAE;AAEpB,MAAI,SAAS,eACX,QAAO,MAAM,cAAc,UAAU,QAAQ,eAAe;EAG9D,MAAM,gBAAgB,MAAMG,gBAA4C;GACtE;GACA;GACA;GACA,oBAAoB,WAAW;GAC/B;GACA;GACA;GACD,CAAC;AAEF,MAAI,CAAC,eAAe;AAClB,gBAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eACJ,eAAgE,EAC9D,MAAM,eACP,CAAC;AAEJ,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,eAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;;;;AAkBJ,MAAa,+BAA+B,OAC1C,KACA,KACA,UACkB;CAClB,MAAM,EAAE,YAAY,IAAI;CACxB,MAAM,EAAE,aAAa,WAAW,SAAS,UAAU,YAAY,IAAI;CAEnE,IAAIP;AACJ,KAAI;AACF,aAAW,MAAM,YACf;GACE,aAAa;GACb,gBAAgBQ;GAChB,YAAY,CAAC,mBAAmB,SAAS;GAC1C,EACD,CAAC,CAAC,IAAI,OAAO,KACd;UACM,QAAQ;AACf,eAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,IAAIJ,OAAc,EAAE;AAEpB,MAAI,SAAS,eACX,QAAO,MAAM,cAAc,UAAU,QAAQ,eAAe;EAG9D,MAAM,gBACJ,MAAMK,qBAAsD;GAC1D;GACA;GACA,oBAAoB,WAAW;GAC/B;GACA;GACA;GACD,CAAC;AAEJ,MAAI,CAAC,eAAe;AAClB,gBAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eACJ,eACE,EACE,MAAM,eACP,CACF;AAEH,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,eAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;;;;AAeJ,MAAa,kCAAkC,OAC7C,KACA,KACA,UACkB;CAClB,MAAM,EAAE,iBAAiB,IAAI;CAC7B,MAAM,EAAE,aAAa,cAAc,IAAI;CAEvC,IAAIT;AACJ,KAAI;AACF,aAAW,MAAM,YACf;GACE,aAAa;GACb,gBAAgBU;GAChB,YAAY,CAAC,mBAAmB,SAAS;GAC1C,EACD,CAAC,CAAC,IAAI,OAAO,KACd;UACM,QAAQ;AACf,eAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,MAAMN,OAAc,MAAMO,SACxB,EACE,gBAAgB,cAAc,IAC/B,EACD,GACA,IACD;EAED,MAAM,gBACJ,MAAMC,0BAA4D;GAChE;GACA;GACA,oBAAoB,WAAW;GAC/B;GACD,CAAC;AAEJ,MAAI,CAAC,eAAe;AAClB,gBAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eACJ,eAAwE,EACtE,MAAM,eACP,CAAC;AAEJ,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,eAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;;;;AAaJ,MAAa,WAAW,OACtB,KACA,KACA,UACkB;CAClB,MAAM,EAAE,YAAY,IAAI;CACxB,MAAM,EAAE,WAAW,QAAQ,IAAI;CAE/B,IAAIZ;AACJ,KAAI;AACF,aAAW,MAAM,YACf;GACE,aAAa;GACb,gBAAgBa;GAChB,YAAY,CAAC,mBAAmB,SAAS;GAC1C,EACD,CAAC,CAAC,IAAI,OAAO,KACd;UACM,QAAQ;AACf,eAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,IAAIC,eAA6B,EAAE;AACnC,MAAI,SAAS,eACX,gBAAe,MAAM,sBAAsB,CAAC,IAAI,IAAI,EAAE,QAAQ,GAAG;EAGnE,MAAM,gBAAgB,MAAMC,WAAsB;GAChD;GACA;GACA;GACA,oBAAoB,WAAW;GAChC,CAAC;AAEF,MAAI,CAAC,eAAe;AAClB,gBAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eAAe,eAAqD,EACxE,MAAM,eACP,CAAC;AAEF,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,eAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;AAWJ,MAAa,iBAAiB,OAC5B,KACA,KACA,UACkB;CAClB,MAAM,EAAE,WAAW,EAAE,EAAE,iBAAiB,IAAI;CAC5C,MAAM,EAAE,MAAM,SAAS,iBAAiB,IAAI;CAE5C,IAAIf;AACJ,KAAI;AACF,aAAW,MAAM,YACf;GACE,aAAa,EAAE;GACf,YAAY,CAAC,SAAS;GACvB,EACD,CAAC,CAAC,IAAI,OAAO,KACd;UACM,QAAQ;AACf,eAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAIF,KAAI,UAAU,gBAAgB,mCAAmC;AACjE,KAAI,UAAU,iBAAiB,yBAAyB;AACxD,KAAI,UAAU,cAAc,aAAa;AACzC,KAAI,UAAU,qBAAqB,KAAK;AACxC,KAAI,gBAAgB;AACpB,KAAI,MAAM,kBAAkB;AAC5B,KAAI,SAAS;AAGb,kBACkB,UAAU,UAAU,EAClC,YAAY,UAAU;AACpB,MAAI,MAAM,SAAS,KAAK,UAAU,EAAE,OAAO,CAAC,CAAC,MAAM;AACnD,MAAI,SAAS;IAEhB,CAAC,CACD,KAAK,OAAO,iBAAiB;EAC5B,MAAM,yBAAyB,SAAS,UACrC,YAAY,QAAQ,SAAS,OAC/B,EAAE;AAIH,OAH+B,yBAC3B,uBAAuB,MAAM,IAAI,CAAC,SAClC,KACyB,EAK3B,OAAM,gBAAgB,iBACpB,EAAE,cAAc,EAChB,EACE,MAAM;GACJ;GACA,QAAQ,MAAM;GACd,WAAW,SAAS;GACpB,gBAAgB,cAAc;GAC9B,UAAU,CACR,GAAG,SAAS,KAAK,SAAS;IACxB,MAAM,IAAI;IACV,SAAS,IAAI;IACb,WAAW,IAAI;IAChB,EAAE,EACH;IACE,MAAM;IACN,SAAS,aAAa;IACtB,cAAc,aAAa;IAC3B,2BAAW,IAAI,MAAM;IACtB,CACF;GACF,EACF,EACD;GAAE,QAAQ;GAAM,KAAK;GAAM,CAC5B;AAIH,MAAI,MACF,SAAS,KAAK,UAAU;GAAE,MAAM;GAAM,UAAU;GAAc,CAAC,CAAC,MACjE;AACD,MAAI,KAAK;GACT,CACD,OAAO,QAAQ;AAEd,MAAI,MACF,uBAAuB,KAAK,UAAU,EAAE,SAAS,IAAI,SAAS,CAAC,CAAC,MACjE;AACD,MAAI,KAAK;GACT;;AAeN,MAAa,eAAe,OAC1B,KACA,KACA,UACkB;AAClB,KAAI;EACF,MAAM,EAAE,MAAM,WAAW,eAAe,aAAa,iBACnD,IAAI;EAEN,IAAIA;AACJ,MAAI;AACF,cAAW,MAAM,YACf;IACE,aAAa;IACb,gBAAgBgB;IAChB,YAAY,CAAC,SAAS;IACvB,EACD,CAAC,CAAC,IAAI,OAAO,KACd;WACM,QAAQ;AACf,gBAAa,2BAA2B,KAAK,mBAAmB;AAChE;;EAeF,MAAM,eACJ,eAA4D,EAC1D,MAdc,MAAMC,eAA8B;GACpD;GACA;GACA,oBAAoB,WAAW;GAC/B;GACA;GACA;GACD,CAAC,IAAK;GACL,gBAAgB;GAChB,WAAW;GACZ,EAKE,CAAC;AAEJ,MAAI,KAAK,aAAa;UACf,OAAO;AACd,eAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;;;;;AAkBJ,MAAa,iBAAiB,OAC5B,KACA,KACA,UACkB;CAClB,MAAM,EAAE,MAAM,UAAU,IAAI;CAC5B,MAAM,EAAE,SAAS,aAAa,UAAU,MAAM,MAAM,qBAClD,kCAAkC,KAAK,IAAI;CAK7C,MAAM,kBAJwB,IAAI,OAAe,oBAIA;AAEjD,KAAI,CAAC,MAAM;AACT,eAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,MAAM,aAAa,kBAAkB,EAAE,GAAG,EAAE,UAAU,GAAG;EACzD,MAAM,cAAc,MAAM,gBAAgB,KAAK,SAAS,WAAW,CAChE,KAAK,YAAY,CACjB,KAAK,KAAK,CACV,MAAM,SAAS,CACf,MAAM;EAGT,MAAMC,uBAA+C,EAAE;AACvD,MAAI,CAAC,mBAAmB,YAAY,SAAS,GAAG;GAC9C,MAAM,MAAM,YAAY,KAAK,MAAW,EAAE,IAAI;GAC9C,MAAM,SAAS,MAAM,gBAAgB,UAAU,CAC7C,EAAE,QAAQ,EAAE,KAAK,EAAE,KAAK,KAAK,EAAE,EAAE,EACjC,EACE,UAAU,EACR,kBAAkB,EAAE,OAAO,EAAE,SAAS,CAAC,aAAa,EAAE,CAAC,EAAE,EAAE,EAC5D,EACF,CACF,CAAC;AACF,QAAK,MAAM,KAAK,OACd,sBAAqB,OAAO,EAAE,IAAI,IAAI,EAAE,oBAAoB;;EAKhE,MAAM,iBAAiB,YAAY,OAChC,MAAM,OAAO,EAAE,OAAO,KAAK,OAAO,KAAK,GAAG,CAC5C;AAGD,MAAI,EAFc,MAAM,SAAS,QAAQ,IAAI,iBAE7B;AACd,gBAAa,2BAA2B,KAAK,oBAAoB;AACjE;;EAGF,MAAM,aAAa,MAAM,gBAAgB,eAAe,QAAQ;EAEhE,MAAM,eAAe,wBAAwB;GAC3C,MAAM,YAAY,KAAK,OAAY;IACjC,GAAG;IACH,IAAI,OAAO,EAAE,OAAO,EAAE,GAAG;IACzB,kBAAkB,kBACd,MAAM,QAAQ,EAAE,SAAS,GACvB,EAAE,SAAS,SACX,IACD,qBAAqB,OAAO,EAAE,OAAO,EAAE,GAAG,KAAK;IACrD,EAAE;GACH;GACA;GACA,YAAY,iBAAiB,WAAW;GACxC;GACD,CAAC;AAEF,MAAI,KAAK,aAAoB;AAC7B;UACO,OAAO;AACd,eAAa,uBAAuB,KAAK,MAAkB;AAC3D"}
|
package/dist/esm/export.mjs
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
import { HttpStatusCodes } from "./utils/httpStatusCodes.mjs";
|
|
2
2
|
import { formatPaginatedResponse, formatResponse } from "./utils/responseData.mjs";
|
|
3
|
-
import { AIProvider, getAIConfig } from "./utils/AI/aiSdk.mjs";
|
|
4
3
|
import { getAiRoutes } from "./routes/ai.routes.mjs";
|
|
5
4
|
import { getDictionaryRoutes } from "./routes/dictionary.routes.mjs";
|
|
6
5
|
import { getNewsletterRoutes } from "./routes/newsletter.routes.mjs";
|
|
@@ -9,4 +8,6 @@ import { getProjectRoutes } from "./routes/project.routes.mjs";
|
|
|
9
8
|
import { getUserRoutes } from "./routes/user.routes.mjs";
|
|
10
9
|
import { formatSession, getAuth } from "./utils/auth/getAuth.mjs";
|
|
11
10
|
|
|
12
|
-
export
|
|
11
|
+
export * from "@utils/AI/aiSdk"
|
|
12
|
+
|
|
13
|
+
export { HttpStatusCodes, formatPaginatedResponse, formatResponse, formatSession, getAiRoutes, getAuth, getDictionaryRoutes, getNewsletterRoutes, getOrganizationRoutes, getProjectRoutes, getUserRoutes };
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { readAsset } from "../../../_virtual/_utils_asset.mjs";
|
|
2
|
+
import { streamText } from "@intlayer/ai";
|
|
2
3
|
import { getMarkdownMetadata } from "@intlayer/core";
|
|
3
4
|
import { getBlogs, getDocs, getFrequentQuestions } from "@intlayer/docs";
|
|
4
|
-
import { streamText } from "ai";
|
|
5
5
|
import { OpenAI } from "openai";
|
|
6
6
|
|
|
7
7
|
//#region src/utils/AI/askDocQuestion/askDocQuestion.ts
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"askDocQuestion.mjs","names":["vectorStore: VectorStoreEl[]","MAX_RELEVANT_CHUNKS_NB: number","MIN_RELEVANT_CHUNKS_SIMILARITY: number","EMBEDDING_MODEL: OpenAI.EmbeddingModel","OVERLAP_TOKENS: number","MAX_CHUNK_TOKENS: number","CHAR_BY_TOKEN: number","MAX_CHARS: number","OVERLAP_CHARS: number","chunks: string[]","resultForFile: Record<string, number[] | undefined>","initPrompt: ChatCompletionRequestMessage"],"sources":["../../../../../src/utils/AI/askDocQuestion/askDocQuestion.ts"],"sourcesContent":["import { readAsset } from 'utils:asset';\nimport { getMarkdownMetadata } from '@intlayer/core';\nimport { getBlogs, getDocs, getFrequentQuestions } from '@intlayer/docs';\nimport { streamText } from 'ai';\nimport { OpenAI } from 'openai';\nimport type { AIConfig, ChatCompletionRequestMessage } from '../aiSdk';\n\nconst readEmbeddingsForFile = (fileKey: string): Record<string, number[]> => {\n try {\n return JSON.parse(\n readAsset(`./embeddings/${fileKey.replace('.md', '.json')}`, 'utf-8')\n ) as Record<string, number[]>;\n } catch {\n return {};\n }\n};\n\ntype VectorStoreEl = {\n fileKey: string;\n chunkNumber: number;\n content: string;\n embedding?: number[];\n docUrl: string;\n docName: string;\n};\n\n/**\n * Simple in-memory vector store to hold document embeddings and their content.\n * Each entry contains:\n * - fileKey: A unique key identifying the file\n * - chunkNumber: The number of the chunk within the document\n * - content: The chunk content\n * - embedding: The numerical embedding vector for the chunk\n */\nconst vectorStore: VectorStoreEl[] = [];\n\n/*\n * Ask question AI configuration\n */\nconst MAX_RELEVANT_CHUNKS_NB: number = 20; // Maximum number of relevant chunks to attach to chatGPT context\nconst MIN_RELEVANT_CHUNKS_SIMILARITY: number = 0.42; // Minimum similarity required for a chunk to be considered relevant\n\n/*\n * Embedding model configuration\n */\nconst EMBEDDING_MODEL: OpenAI.EmbeddingModel = 'text-embedding-3-large'; // Model to use for embedding generation\nconst OVERLAP_TOKENS: number = 200; // Number of tokens to overlap between chunks\nconst MAX_CHUNK_TOKENS: number = 800; // Maximum number of tokens per chunk\nconst CHAR_BY_TOKEN: number = 4.15; // Approximate pessimistically the number of characters per token // Can use `tiktoken` or other tokenizers to calculate it more precisely\nconst MAX_CHARS: number = MAX_CHUNK_TOKENS * CHAR_BY_TOKEN;\nconst OVERLAP_CHARS: number = OVERLAP_TOKENS * CHAR_BY_TOKEN;\n\nconst skipDocEmbeddingsIndex = process.env.SKIP_DOC_EMBEDDINGS_INDEX === 'true';\n\n/**\n * Splits a given text into chunks ensuring each chunk does not exceed MAX_CHARS.\n * @param text - The input text to split.\n * @returns - Array of text chunks.\n */\nconst chunkText = (text: string): string[] => {\n const chunks: string[] = [];\n let start = 0;\n\n while (start < text.length) {\n let end = Math.min(start + MAX_CHARS, text.length);\n\n // Ensure we don't cut words in the middle (find nearest space)\n if (end < text.length) {\n const lastSpace = text.lastIndexOf(' ', end);\n if (lastSpace > start) {\n end = lastSpace;\n }\n }\n\n chunks.push(text.substring(start, end));\n\n // Move start forward correctly\n const nextStart = end - OVERLAP_CHARS;\n if (nextStart <= start) {\n // Prevent infinite loop if overlap is too large\n start = end;\n } else {\n start = nextStart;\n }\n }\n\n return chunks;\n};\n\n/**\n * Generates an embedding for a given text using OpenAI's embedding API.\n * Trims the text if it exceeds the maximum allowed characters.\n *\n * @param text - The input text to generate an embedding for\n * @returns The embedding vector as a number array\n */\nconst generateEmbedding = async (text: string): Promise<number[]> => {\n try {\n const openaiClient = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n\n const response = await openaiClient.embeddings.create({\n model: EMBEDDING_MODEL,\n input: text,\n });\n\n return response.data[0].embedding;\n } catch (error) {\n console.error('Error generating embedding:', error);\n return [];\n }\n};\n\n/**\n * Calculates the cosine similarity between two vectors.\n * Cosine similarity measures the cosine of the angle between two vectors in an inner product space.\n * Used to determine the similarity between chunks of text.\n *\n * @param vecA - The first vector\n * @param vecB - The second vector\n * @returns The cosine similarity score\n */\nconst cosineSimilarity = (vecA: number[], vecB: number[]): number => {\n // Calculate the dot product of the two vectors\n const dotProduct = vecA.reduce((sum, a, idx) => sum + a * vecB[idx], 0);\n\n // Calculate the magnitude (Euclidean norm) of each vector\n const magnitudeA = Math.sqrt(vecA.reduce((sum, a) => sum + a * a, 0));\n const magnitudeB = Math.sqrt(vecB.reduce((sum, b) => sum + b * b, 0));\n\n // Compute and return the cosine similarity\n return dotProduct / (magnitudeA * magnitudeB);\n};\n\n/**\n * Indexes all Markdown documents by generating embeddings for each chunk and storing them in memory.\n * Persists per-document embeddings under `embeddings/<fileKey>.json`.\n * Handles cases where files have been updated and chunk counts have changed.\n */\nexport const loadMarkdownFiles = async (): Promise<void> => {\n // Retrieve documentation and blog posts in English locale\n const frequentQuestions = await getFrequentQuestions();\n const docs = await getDocs();\n const blogs = await getBlogs();\n\n const files = { ...docs, ...blogs, ...frequentQuestions }; // Combine docs and blogs into a single object\n\n // Iterate over each file key (identifier) in the combined files\n for await (const fileKey of Object.keys(files)) {\n // Get the metadata of the file\n const fileMetadata = getMarkdownMetadata(\n files[fileKey as keyof typeof files] as string\n );\n\n // Split the document into chunks based on headings\n const fileChunks = chunkText(\n files[fileKey as keyof typeof files] as string\n );\n\n // Read existing embeddings for this file\n const existingEmbeddings = readEmbeddingsForFile(fileKey);\n\n // Check if the number of chunks has changed for this file\n const existingChunksForFile = Object.keys(existingEmbeddings);\n const currentChunkCount = fileChunks.length;\n const previousChunkCount = existingChunksForFile.length;\n\n let shouldRegenerateFileEmbeddings = false;\n\n // If chunk count differs, we need to regenerate embeddings for this file\n if (currentChunkCount !== previousChunkCount) {\n console.info(\n `File \"${fileKey}\" chunk count changed: ${previousChunkCount} -> ${currentChunkCount}. Regenerating embeddings.`\n );\n\n shouldRegenerateFileEmbeddings = !skipDocEmbeddingsIndex;\n }\n\n // Iterate over each chunk within the current file\n let resultForFile: Record<string, number[] | undefined> = {};\n for await (const chunkIndex of Object.keys(fileChunks)) {\n const chunkNumber = Number(chunkIndex) + 1; // Chunk number starts at 1\n const chunksNumber = fileChunks.length;\n\n const fileChunk = fileChunks[\n chunkIndex as keyof typeof fileChunks\n ] as string;\n\n const chunkKeyName = `chunk_${chunkNumber}`; // Unique key for the chunk within the file\n\n // Retrieve precomputed embedding if available and file hasn't changed\n const docEmbedding = !shouldRegenerateFileEmbeddings\n ? (existingEmbeddings[\n chunkKeyName as keyof typeof existingEmbeddings\n ] as number[] | undefined)\n : undefined;\n\n const embedding = docEmbedding; // Use existing embedding if available and valid\n\n // Update the file-scoped result object with the embedding\n resultForFile = { ...resultForFile, [chunkKeyName]: embedding };\n\n // Store the embedding and content in the in-memory vector store\n vectorStore.push({\n fileKey,\n chunkNumber,\n embedding,\n content: fileChunk,\n docUrl: fileMetadata.url,\n docName: fileMetadata.title,\n });\n\n console.info(`- Loaded: ${fileKey}/${chunkKeyName}/${chunksNumber}`);\n }\n }\n};\n\n// Automatically index Markdown files\nloadMarkdownFiles();\n\n/**\n * Searches the indexed documents for the most relevant chunks based on a query.\n * Utilizes cosine similarity to find the closest matching embeddings.\n *\n * @param query - The search query provided by the user\n * @returns An array of the top matching document chunks' content\n */\nexport const searchChunkReference = async (\n query: string,\n maxResults: number = MAX_RELEVANT_CHUNKS_NB,\n minSimilarity: number = MIN_RELEVANT_CHUNKS_SIMILARITY\n): Promise<VectorStoreEl[]> => {\n // Generate an embedding for the user's query\n const queryEmbedding = await generateEmbedding(query);\n\n // Calculate similarity scores between the query embedding and each document's embedding\n const selection = vectorStore\n .filter((chunk) => chunk.embedding)\n .map((chunk) => ({\n ...chunk,\n similarity: cosineSimilarity(queryEmbedding, chunk.embedding!), // Add similarity score to each doc\n }))\n .filter((chunk) => chunk.similarity > minSimilarity) // Filter out documents with low similarity scores\n .sort((a, b) => b.similarity - a.similarity) // Sort documents by highest similarity first\n .slice(0, maxResults); // Select the top 6 most similar documents\n\n const orderedDocKeys = new Set(selection.map((chunk) => chunk.fileKey));\n\n const orderedVectorStore = vectorStore.sort((a, _b) =>\n orderedDocKeys.has(a.fileKey) ? -1 : 1\n );\n\n const results = orderedVectorStore.filter((chunk) =>\n selection.some(\n (v) => v.fileKey === chunk.fileKey && v.chunkNumber === chunk.chunkNumber\n )\n );\n\n // Return the content of the top matching documents\n return results;\n};\n\nconst CHAT_GPT_PROMPT = readAsset('./PROMPT.md');\n\n// Initial prompt configuration for the chatbot\nexport const initPrompt: ChatCompletionRequestMessage = {\n role: 'system',\n content: CHAT_GPT_PROMPT,\n};\n\nexport type AskDocQuestionResult = {\n response: string;\n relatedFiles: string[];\n};\n\nexport type AskDocQuestionOptions = {\n onMessage?: (chunk: string) => void;\n};\n\n/**\n * Handles the \"Ask a question\" endpoint in an Express.js route.\n * Processes user messages, retrieves relevant documents, and interacts with AI models to generate responses.\n *\n * @param messages - An array of chat messages from the user and assistant\n * @returns The assistant's response as a string\n */\nexport const askDocQuestion = async (\n messages: ChatCompletionRequestMessage[],\n aiConfig: AIConfig,\n options?: AskDocQuestionOptions\n): Promise<AskDocQuestionResult> => {\n // Format the user's question to keep only the relevant keywords\n const query = messages\n .filter((message) => message.role === 'user')\n .map((message) => `- ${message.content}`)\n .join('\\n');\n\n // 1) Find relevant documents based on the user's question\n const relevantFilesReferences = await searchChunkReference(query);\n\n // 2) Integrate the relevant documents into the initial system prompt\n const systemPrompt = initPrompt.content.replace(\n '{{relevantFilesReferences}}',\n relevantFilesReferences.length === 0\n ? 'Not relevant file found related to the question.'\n : relevantFilesReferences\n .map((doc, idx) =>\n [\n '-----',\n '---',\n `chunkId: ${idx}`,\n `docChunk: \"${doc.chunkNumber}/${doc.fileKey.length}\"`,\n `docName: \"${doc.docName}\"`,\n `docUrl: \"${doc.docUrl}\"`,\n `---`,\n doc.content,\n `-----`,\n ].join('\\n')\n )\n .join('\\n\\n') // Insert relevant docs into the prompt\n );\n\n // Format messages for AI SDK\n const aiMessages = [\n {\n role: 'system' as const,\n content: systemPrompt,\n },\n ...messages.slice(-8),\n ];\n\n if (!aiConfig) {\n throw new Error('Failed to initialize AI configuration');\n }\n\n // 3) Use the AI SDK to stream the response\n let fullResponse = '';\n const stream = streamText({\n ...aiConfig,\n messages: aiMessages,\n });\n\n // Process the stream\n for await (const chunk of stream.textStream) {\n fullResponse += chunk;\n options?.onMessage?.(chunk);\n }\n\n // 4) Extract unique related files\n const relatedFiles = [\n ...new Set(relevantFilesReferences.map((doc) => doc.fileKey)),\n ];\n\n // 5) Return the assistant's response to the user\n return {\n response: fullResponse ?? 'Error: No result found',\n relatedFiles,\n };\n};\n"],"mappings":";;;;;;;AAOA,MAAM,yBAAyB,YAA8C;AAC3E,KAAI;AACF,SAAO,KAAK,MACV,UAAU,gBAAgB,QAAQ,QAAQ,OAAO,QAAQ,IAAI,QAAQ,CACtE;SACK;AACN,SAAO,EAAE;;;;;;;;;;;AAqBb,MAAMA,cAA+B,EAAE;AAKvC,MAAMC,yBAAiC;AACvC,MAAMC,iCAAyC;AAK/C,MAAMC,kBAAyC;AAC/C,MAAMC,iBAAyB;AAC/B,MAAMC,mBAA2B;AACjC,MAAMC,gBAAwB;AAC9B,MAAMC,YAAoB,mBAAmB;AAC7C,MAAMC,gBAAwB,iBAAiB;AAE/C,MAAM,yBAAyB,QAAQ,IAAI,8BAA8B;;;;;;AAOzE,MAAM,aAAa,SAA2B;CAC5C,MAAMC,SAAmB,EAAE;CAC3B,IAAI,QAAQ;AAEZ,QAAO,QAAQ,KAAK,QAAQ;EAC1B,IAAI,MAAM,KAAK,IAAI,QAAQ,WAAW,KAAK,OAAO;AAGlD,MAAI,MAAM,KAAK,QAAQ;GACrB,MAAM,YAAY,KAAK,YAAY,KAAK,IAAI;AAC5C,OAAI,YAAY,MACd,OAAM;;AAIV,SAAO,KAAK,KAAK,UAAU,OAAO,IAAI,CAAC;EAGvC,MAAM,YAAY,MAAM;AACxB,MAAI,aAAa,MAEf,SAAQ;MAER,SAAQ;;AAIZ,QAAO;;;;;;;;;AAUT,MAAM,oBAAoB,OAAO,SAAoC;AACnE,KAAI;AAQF,UALiB,MAFI,IAAI,OAAO,EAAE,QAAQ,QAAQ,IAAI,gBAAgB,CAAC,CAEnC,WAAW,OAAO;GACpD,OAAO;GACP,OAAO;GACR,CAAC,EAEc,KAAK,GAAG;UACjB,OAAO;AACd,UAAQ,MAAM,+BAA+B,MAAM;AACnD,SAAO,EAAE;;;;;;;;;;;;AAab,MAAM,oBAAoB,MAAgB,SAA2B;AASnE,QAPmB,KAAK,QAAQ,KAAK,GAAG,QAAQ,MAAM,IAAI,KAAK,MAAM,EAAE,IAGpD,KAAK,KAAK,KAAK,QAAQ,KAAK,MAAM,MAAM,IAAI,GAAG,EAAE,CAAC,GAClD,KAAK,KAAK,KAAK,QAAQ,KAAK,MAAM,MAAM,IAAI,GAAG,EAAE,CAAC;;;;;;;AAWvE,MAAa,oBAAoB,YAA2B;CAE1D,MAAM,oBAAoB,MAAM,sBAAsB;CACtD,MAAM,OAAO,MAAM,SAAS;CAC5B,MAAM,QAAQ,MAAM,UAAU;CAE9B,MAAM,QAAQ;EAAE,GAAG;EAAM,GAAG;EAAO,GAAG;EAAmB;AAGzD,YAAW,MAAM,WAAW,OAAO,KAAK,MAAM,EAAE;EAE9C,MAAM,eAAe,oBACnB,MAAM,SACP;EAGD,MAAM,aAAa,UACjB,MAAM,SACP;EAGD,MAAM,qBAAqB,sBAAsB,QAAQ;EAGzD,MAAM,wBAAwB,OAAO,KAAK,mBAAmB;EAC7D,MAAM,oBAAoB,WAAW;EACrC,MAAM,qBAAqB,sBAAsB;EAEjD,IAAI,iCAAiC;AAGrC,MAAI,sBAAsB,oBAAoB;AAC5C,WAAQ,KACN,SAAS,QAAQ,yBAAyB,mBAAmB,MAAM,kBAAkB,4BACtF;AAED,oCAAiC,CAAC;;EAIpC,IAAIC,gBAAsD,EAAE;AAC5D,aAAW,MAAM,cAAc,OAAO,KAAK,WAAW,EAAE;GACtD,MAAM,cAAc,OAAO,WAAW,GAAG;GACzC,MAAM,eAAe,WAAW;GAEhC,MAAM,YAAY,WAChB;GAGF,MAAM,eAAe,SAAS;GAS9B,MAAM,YANe,CAAC,iCACjB,mBACC,gBAEF;AAKJ,mBAAgB;IAAE,GAAG;KAAgB,eAAe;IAAW;AAG/D,eAAY,KAAK;IACf;IACA;IACA;IACA,SAAS;IACT,QAAQ,aAAa;IACrB,SAAS,aAAa;IACvB,CAAC;AAEF,WAAQ,KAAK,aAAa,QAAQ,GAAG,aAAa,GAAG,eAAe;;;;AAM1E,mBAAmB;;;;;;;;AASnB,MAAa,uBAAuB,OAClC,OACA,aAAqB,wBACrB,gBAAwB,mCACK;CAE7B,MAAM,iBAAiB,MAAM,kBAAkB,MAAM;CAGrD,MAAM,YAAY,YACf,QAAQ,UAAU,MAAM,UAAU,CAClC,KAAK,WAAW;EACf,GAAG;EACH,YAAY,iBAAiB,gBAAgB,MAAM,UAAW;EAC/D,EAAE,CACF,QAAQ,UAAU,MAAM,aAAa,cAAc,CACnD,MAAM,GAAG,MAAM,EAAE,aAAa,EAAE,WAAW,CAC3C,MAAM,GAAG,WAAW;CAEvB,MAAM,iBAAiB,IAAI,IAAI,UAAU,KAAK,UAAU,MAAM,QAAQ,CAAC;AAavE,QAX2B,YAAY,MAAM,GAAG,OAC9C,eAAe,IAAI,EAAE,QAAQ,GAAG,KAAK,EACtC,CAEkC,QAAQ,UACzC,UAAU,MACP,MAAM,EAAE,YAAY,MAAM,WAAW,EAAE,gBAAgB,MAAM,YAC/D,CACF;;AAMH,MAAM,kBAAkB,UAAU,cAAc;AAGhD,MAAaC,aAA2C;CACtD,MAAM;CACN,SAAS;CACV;;;;;;;;AAkBD,MAAa,iBAAiB,OAC5B,UACA,UACA,YACkC;CAQlC,MAAM,0BAA0B,MAAM,qBANxB,SACX,QAAQ,YAAY,QAAQ,SAAS,OAAO,CAC5C,KAAK,YAAY,KAAK,QAAQ,UAAU,CACxC,KAAK,KAAK,CAGoD;CAyBjE,MAAM,aAAa,CACjB;EACE,MAAM;EACN,SAzBiB,WAAW,QAAQ,QACtC,+BACA,wBAAwB,WAAW,IAC/B,qDACA,wBACG,KAAK,KAAK,QACT;GACE;GACA;GACA,YAAY;GACZ,cAAc,IAAI,YAAY,GAAG,IAAI,QAAQ,OAAO;GACpD,aAAa,IAAI,QAAQ;GACzB,YAAY,IAAI,OAAO;GACvB;GACA,IAAI;GACJ;GACD,CAAC,KAAK,KAAK,CACb,CACA,KAAK,OAAO,CACpB;EAOE,EACD,GAAG,SAAS,MAAM,GAAG,CACtB;AAED,KAAI,CAAC,SACH,OAAM,IAAI,MAAM,wCAAwC;CAI1D,IAAI,eAAe;CACnB,MAAM,SAAS,WAAW;EACxB,GAAG;EACH,UAAU;EACX,CAAC;AAGF,YAAW,MAAM,SAAS,OAAO,YAAY;AAC3C,kBAAgB;AAChB,WAAS,YAAY,MAAM;;CAI7B,MAAM,eAAe,CACnB,GAAG,IAAI,IAAI,wBAAwB,KAAK,QAAQ,IAAI,QAAQ,CAAC,CAC9D;AAGD,QAAO;EACL,UAAU,gBAAgB;EAC1B;EACD"}
|
|
1
|
+
{"version":3,"file":"askDocQuestion.mjs","names":["vectorStore: VectorStoreEl[]","MAX_RELEVANT_CHUNKS_NB: number","MIN_RELEVANT_CHUNKS_SIMILARITY: number","EMBEDDING_MODEL: OpenAI.EmbeddingModel","OVERLAP_TOKENS: number","MAX_CHUNK_TOKENS: number","CHAR_BY_TOKEN: number","MAX_CHARS: number","OVERLAP_CHARS: number","chunks: string[]","resultForFile: Record<string, number[] | undefined>","initPrompt: ChatCompletionRequestMessage"],"sources":["../../../../../src/utils/AI/askDocQuestion/askDocQuestion.ts"],"sourcesContent":["import { readAsset } from 'utils:asset';\nimport type { AIConfig, ChatCompletionRequestMessage } from '@intlayer/ai';\nimport { streamText } from '@intlayer/ai';\nimport { getMarkdownMetadata } from '@intlayer/core';\nimport { getBlogs, getDocs, getFrequentQuestions } from '@intlayer/docs';\nimport { OpenAI } from 'openai';\n\nconst readEmbeddingsForFile = (fileKey: string): Record<string, number[]> => {\n try {\n return JSON.parse(\n readAsset(`./embeddings/${fileKey.replace('.md', '.json')}`, 'utf-8')\n ) as Record<string, number[]>;\n } catch {\n return {};\n }\n};\n\ntype VectorStoreEl = {\n fileKey: string;\n chunkNumber: number;\n content: string;\n embedding?: number[];\n docUrl: string;\n docName: string;\n};\n\n/**\n * Simple in-memory vector store to hold document embeddings and their content.\n * Each entry contains:\n * - fileKey: A unique key identifying the file\n * - chunkNumber: The number of the chunk within the document\n * - content: The chunk content\n * - embedding: The numerical embedding vector for the chunk\n */\nconst vectorStore: VectorStoreEl[] = [];\n\n/*\n * Ask question AI configuration\n */\nconst MAX_RELEVANT_CHUNKS_NB: number = 20; // Maximum number of relevant chunks to attach to chatGPT context\nconst MIN_RELEVANT_CHUNKS_SIMILARITY: number = 0.42; // Minimum similarity required for a chunk to be considered relevant\n\n/*\n * Embedding model configuration\n */\nconst EMBEDDING_MODEL: OpenAI.EmbeddingModel = 'text-embedding-3-large'; // Model to use for embedding generation\nconst OVERLAP_TOKENS: number = 200; // Number of tokens to overlap between chunks\nconst MAX_CHUNK_TOKENS: number = 800; // Maximum number of tokens per chunk\nconst CHAR_BY_TOKEN: number = 4.15; // Approximate pessimistically the number of characters per token // Can use `tiktoken` or other tokenizers to calculate it more precisely\nconst MAX_CHARS: number = MAX_CHUNK_TOKENS * CHAR_BY_TOKEN;\nconst OVERLAP_CHARS: number = OVERLAP_TOKENS * CHAR_BY_TOKEN;\n\nconst skipDocEmbeddingsIndex = process.env.SKIP_DOC_EMBEDDINGS_INDEX === 'true';\n\n/**\n * Splits a given text into chunks ensuring each chunk does not exceed MAX_CHARS.\n * @param text - The input text to split.\n * @returns - Array of text chunks.\n */\nconst chunkText = (text: string): string[] => {\n const chunks: string[] = [];\n let start = 0;\n\n while (start < text.length) {\n let end = Math.min(start + MAX_CHARS, text.length);\n\n // Ensure we don't cut words in the middle (find nearest space)\n if (end < text.length) {\n const lastSpace = text.lastIndexOf(' ', end);\n if (lastSpace > start) {\n end = lastSpace;\n }\n }\n\n chunks.push(text.substring(start, end));\n\n // Move start forward correctly\n const nextStart = end - OVERLAP_CHARS;\n if (nextStart <= start) {\n // Prevent infinite loop if overlap is too large\n start = end;\n } else {\n start = nextStart;\n }\n }\n\n return chunks;\n};\n\n/**\n * Generates an embedding for a given text using OpenAI's embedding API.\n * Trims the text if it exceeds the maximum allowed characters.\n *\n * @param text - The input text to generate an embedding for\n * @returns The embedding vector as a number array\n */\nconst generateEmbedding = async (text: string): Promise<number[]> => {\n try {\n const openaiClient = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n\n const response = await openaiClient.embeddings.create({\n model: EMBEDDING_MODEL,\n input: text,\n });\n\n return response.data[0].embedding;\n } catch (error) {\n console.error('Error generating embedding:', error);\n return [];\n }\n};\n\n/**\n * Calculates the cosine similarity between two vectors.\n * Cosine similarity measures the cosine of the angle between two vectors in an inner product space.\n * Used to determine the similarity between chunks of text.\n *\n * @param vecA - The first vector\n * @param vecB - The second vector\n * @returns The cosine similarity score\n */\nconst cosineSimilarity = (vecA: number[], vecB: number[]): number => {\n // Calculate the dot product of the two vectors\n const dotProduct = vecA.reduce((sum, a, idx) => sum + a * vecB[idx], 0);\n\n // Calculate the magnitude (Euclidean norm) of each vector\n const magnitudeA = Math.sqrt(vecA.reduce((sum, a) => sum + a * a, 0));\n const magnitudeB = Math.sqrt(vecB.reduce((sum, b) => sum + b * b, 0));\n\n // Compute and return the cosine similarity\n return dotProduct / (magnitudeA * magnitudeB);\n};\n\n/**\n * Indexes all Markdown documents by generating embeddings for each chunk and storing them in memory.\n * Persists per-document embeddings under `embeddings/<fileKey>.json`.\n * Handles cases where files have been updated and chunk counts have changed.\n */\nexport const loadMarkdownFiles = async (): Promise<void> => {\n // Retrieve documentation and blog posts in English locale\n const frequentQuestions = await getFrequentQuestions();\n const docs = await getDocs();\n const blogs = await getBlogs();\n\n const files = { ...docs, ...blogs, ...frequentQuestions }; // Combine docs and blogs into a single object\n\n // Iterate over each file key (identifier) in the combined files\n for await (const fileKey of Object.keys(files)) {\n // Get the metadata of the file\n const fileMetadata = getMarkdownMetadata(\n files[fileKey as keyof typeof files] as string\n );\n\n // Split the document into chunks based on headings\n const fileChunks = chunkText(\n files[fileKey as keyof typeof files] as string\n );\n\n // Read existing embeddings for this file\n const existingEmbeddings = readEmbeddingsForFile(fileKey);\n\n // Check if the number of chunks has changed for this file\n const existingChunksForFile = Object.keys(existingEmbeddings);\n const currentChunkCount = fileChunks.length;\n const previousChunkCount = existingChunksForFile.length;\n\n let shouldRegenerateFileEmbeddings = false;\n\n // If chunk count differs, we need to regenerate embeddings for this file\n if (currentChunkCount !== previousChunkCount) {\n console.info(\n `File \"${fileKey}\" chunk count changed: ${previousChunkCount} -> ${currentChunkCount}. Regenerating embeddings.`\n );\n\n shouldRegenerateFileEmbeddings = !skipDocEmbeddingsIndex;\n }\n\n // Iterate over each chunk within the current file\n let resultForFile: Record<string, number[] | undefined> = {};\n for await (const chunkIndex of Object.keys(fileChunks)) {\n const chunkNumber = Number(chunkIndex) + 1; // Chunk number starts at 1\n const chunksNumber = fileChunks.length;\n\n const fileChunk = fileChunks[\n chunkIndex as keyof typeof fileChunks\n ] as string;\n\n const chunkKeyName = `chunk_${chunkNumber}`; // Unique key for the chunk within the file\n\n // Retrieve precomputed embedding if available and file hasn't changed\n const docEmbedding = !shouldRegenerateFileEmbeddings\n ? (existingEmbeddings[\n chunkKeyName as keyof typeof existingEmbeddings\n ] as number[] | undefined)\n : undefined;\n\n const embedding = docEmbedding; // Use existing embedding if available and valid\n\n // Update the file-scoped result object with the embedding\n resultForFile = { ...resultForFile, [chunkKeyName]: embedding };\n\n // Store the embedding and content in the in-memory vector store\n vectorStore.push({\n fileKey,\n chunkNumber,\n embedding,\n content: fileChunk,\n docUrl: fileMetadata.url,\n docName: fileMetadata.title,\n });\n\n console.info(`- Loaded: ${fileKey}/${chunkKeyName}/${chunksNumber}`);\n }\n }\n};\n\n// Automatically index Markdown files\nloadMarkdownFiles();\n\n/**\n * Searches the indexed documents for the most relevant chunks based on a query.\n * Utilizes cosine similarity to find the closest matching embeddings.\n *\n * @param query - The search query provided by the user\n * @returns An array of the top matching document chunks' content\n */\nexport const searchChunkReference = async (\n query: string,\n maxResults: number = MAX_RELEVANT_CHUNKS_NB,\n minSimilarity: number = MIN_RELEVANT_CHUNKS_SIMILARITY\n): Promise<VectorStoreEl[]> => {\n // Generate an embedding for the user's query\n const queryEmbedding = await generateEmbedding(query);\n\n // Calculate similarity scores between the query embedding and each document's embedding\n const selection = vectorStore\n .filter((chunk) => chunk.embedding)\n .map((chunk) => ({\n ...chunk,\n similarity: cosineSimilarity(queryEmbedding, chunk.embedding!), // Add similarity score to each doc\n }))\n .filter((chunk) => chunk.similarity > minSimilarity) // Filter out documents with low similarity scores\n .sort((a, b) => b.similarity - a.similarity) // Sort documents by highest similarity first\n .slice(0, maxResults); // Select the top 6 most similar documents\n\n const orderedDocKeys = new Set(selection.map((chunk) => chunk.fileKey));\n\n const orderedVectorStore = vectorStore.sort((a, _b) =>\n orderedDocKeys.has(a.fileKey) ? -1 : 1\n );\n\n const results = orderedVectorStore.filter((chunk) =>\n selection.some(\n (v) => v.fileKey === chunk.fileKey && v.chunkNumber === chunk.chunkNumber\n )\n );\n\n // Return the content of the top matching documents\n return results;\n};\n\nconst CHAT_GPT_PROMPT = readAsset('./PROMPT.md');\n\n// Initial prompt configuration for the chatbot\nexport const initPrompt: ChatCompletionRequestMessage = {\n role: 'system',\n content: CHAT_GPT_PROMPT,\n};\n\nexport type AskDocQuestionResult = {\n response: string;\n relatedFiles: string[];\n};\n\nexport type AskDocQuestionOptions = {\n onMessage?: (chunk: string) => void;\n};\n\n/**\n * Handles the \"Ask a question\" endpoint in an Express.js route.\n * Processes user messages, retrieves relevant documents, and interacts with AI models to generate responses.\n *\n * @param messages - An array of chat messages from the user and assistant\n * @returns The assistant's response as a string\n */\nexport const askDocQuestion = async (\n messages: ChatCompletionRequestMessage[],\n aiConfig: AIConfig,\n options?: AskDocQuestionOptions\n): Promise<AskDocQuestionResult> => {\n // Format the user's question to keep only the relevant keywords\n const query = messages\n .filter((message) => message.role === 'user')\n .map((message) => `- ${message.content}`)\n .join('\\n');\n\n // 1) Find relevant documents based on the user's question\n const relevantFilesReferences = await searchChunkReference(query);\n\n // 2) Integrate the relevant documents into the initial system prompt\n const systemPrompt = initPrompt.content.replace(\n '{{relevantFilesReferences}}',\n relevantFilesReferences.length === 0\n ? 'Not relevant file found related to the question.'\n : relevantFilesReferences\n .map((doc, idx) =>\n [\n '-----',\n '---',\n `chunkId: ${idx}`,\n `docChunk: \"${doc.chunkNumber}/${doc.fileKey.length}\"`,\n `docName: \"${doc.docName}\"`,\n `docUrl: \"${doc.docUrl}\"`,\n `---`,\n doc.content,\n `-----`,\n ].join('\\n')\n )\n .join('\\n\\n') // Insert relevant docs into the prompt\n );\n\n // Format messages for AI SDK\n const aiMessages = [\n {\n role: 'system' as const,\n content: systemPrompt,\n },\n ...messages.slice(-8),\n ];\n\n if (!aiConfig) {\n throw new Error('Failed to initialize AI configuration');\n }\n\n // 3) Use the AI SDK to stream the response\n let fullResponse = '';\n const stream = streamText({\n ...aiConfig,\n messages: aiMessages,\n });\n\n // Process the stream\n for await (const chunk of stream.textStream) {\n fullResponse += chunk;\n options?.onMessage?.(chunk);\n }\n\n // 4) Extract unique related files\n const relatedFiles = [\n ...new Set(relevantFilesReferences.map((doc) => doc.fileKey)),\n ];\n\n // 5) Return the assistant's response to the user\n return {\n response: fullResponse ?? 'Error: No result found',\n relatedFiles,\n };\n};\n"],"mappings":";;;;;;;AAOA,MAAM,yBAAyB,YAA8C;AAC3E,KAAI;AACF,SAAO,KAAK,MACV,UAAU,gBAAgB,QAAQ,QAAQ,OAAO,QAAQ,IAAI,QAAQ,CACtE;SACK;AACN,SAAO,EAAE;;;;;;;;;;;AAqBb,MAAMA,cAA+B,EAAE;AAKvC,MAAMC,yBAAiC;AACvC,MAAMC,iCAAyC;AAK/C,MAAMC,kBAAyC;AAC/C,MAAMC,iBAAyB;AAC/B,MAAMC,mBAA2B;AACjC,MAAMC,gBAAwB;AAC9B,MAAMC,YAAoB,mBAAmB;AAC7C,MAAMC,gBAAwB,iBAAiB;AAE/C,MAAM,yBAAyB,QAAQ,IAAI,8BAA8B;;;;;;AAOzE,MAAM,aAAa,SAA2B;CAC5C,MAAMC,SAAmB,EAAE;CAC3B,IAAI,QAAQ;AAEZ,QAAO,QAAQ,KAAK,QAAQ;EAC1B,IAAI,MAAM,KAAK,IAAI,QAAQ,WAAW,KAAK,OAAO;AAGlD,MAAI,MAAM,KAAK,QAAQ;GACrB,MAAM,YAAY,KAAK,YAAY,KAAK,IAAI;AAC5C,OAAI,YAAY,MACd,OAAM;;AAIV,SAAO,KAAK,KAAK,UAAU,OAAO,IAAI,CAAC;EAGvC,MAAM,YAAY,MAAM;AACxB,MAAI,aAAa,MAEf,SAAQ;MAER,SAAQ;;AAIZ,QAAO;;;;;;;;;AAUT,MAAM,oBAAoB,OAAO,SAAoC;AACnE,KAAI;AAQF,UALiB,MAFI,IAAI,OAAO,EAAE,QAAQ,QAAQ,IAAI,gBAAgB,CAAC,CAEnC,WAAW,OAAO;GACpD,OAAO;GACP,OAAO;GACR,CAAC,EAEc,KAAK,GAAG;UACjB,OAAO;AACd,UAAQ,MAAM,+BAA+B,MAAM;AACnD,SAAO,EAAE;;;;;;;;;;;;AAab,MAAM,oBAAoB,MAAgB,SAA2B;AASnE,QAPmB,KAAK,QAAQ,KAAK,GAAG,QAAQ,MAAM,IAAI,KAAK,MAAM,EAAE,IAGpD,KAAK,KAAK,KAAK,QAAQ,KAAK,MAAM,MAAM,IAAI,GAAG,EAAE,CAAC,GAClD,KAAK,KAAK,KAAK,QAAQ,KAAK,MAAM,MAAM,IAAI,GAAG,EAAE,CAAC;;;;;;;AAWvE,MAAa,oBAAoB,YAA2B;CAE1D,MAAM,oBAAoB,MAAM,sBAAsB;CACtD,MAAM,OAAO,MAAM,SAAS;CAC5B,MAAM,QAAQ,MAAM,UAAU;CAE9B,MAAM,QAAQ;EAAE,GAAG;EAAM,GAAG;EAAO,GAAG;EAAmB;AAGzD,YAAW,MAAM,WAAW,OAAO,KAAK,MAAM,EAAE;EAE9C,MAAM,eAAe,oBACnB,MAAM,SACP;EAGD,MAAM,aAAa,UACjB,MAAM,SACP;EAGD,MAAM,qBAAqB,sBAAsB,QAAQ;EAGzD,MAAM,wBAAwB,OAAO,KAAK,mBAAmB;EAC7D,MAAM,oBAAoB,WAAW;EACrC,MAAM,qBAAqB,sBAAsB;EAEjD,IAAI,iCAAiC;AAGrC,MAAI,sBAAsB,oBAAoB;AAC5C,WAAQ,KACN,SAAS,QAAQ,yBAAyB,mBAAmB,MAAM,kBAAkB,4BACtF;AAED,oCAAiC,CAAC;;EAIpC,IAAIC,gBAAsD,EAAE;AAC5D,aAAW,MAAM,cAAc,OAAO,KAAK,WAAW,EAAE;GACtD,MAAM,cAAc,OAAO,WAAW,GAAG;GACzC,MAAM,eAAe,WAAW;GAEhC,MAAM,YAAY,WAChB;GAGF,MAAM,eAAe,SAAS;GAS9B,MAAM,YANe,CAAC,iCACjB,mBACC,gBAEF;AAKJ,mBAAgB;IAAE,GAAG;KAAgB,eAAe;IAAW;AAG/D,eAAY,KAAK;IACf;IACA;IACA;IACA,SAAS;IACT,QAAQ,aAAa;IACrB,SAAS,aAAa;IACvB,CAAC;AAEF,WAAQ,KAAK,aAAa,QAAQ,GAAG,aAAa,GAAG,eAAe;;;;AAM1E,mBAAmB;;;;;;;;AASnB,MAAa,uBAAuB,OAClC,OACA,aAAqB,wBACrB,gBAAwB,mCACK;CAE7B,MAAM,iBAAiB,MAAM,kBAAkB,MAAM;CAGrD,MAAM,YAAY,YACf,QAAQ,UAAU,MAAM,UAAU,CAClC,KAAK,WAAW;EACf,GAAG;EACH,YAAY,iBAAiB,gBAAgB,MAAM,UAAW;EAC/D,EAAE,CACF,QAAQ,UAAU,MAAM,aAAa,cAAc,CACnD,MAAM,GAAG,MAAM,EAAE,aAAa,EAAE,WAAW,CAC3C,MAAM,GAAG,WAAW;CAEvB,MAAM,iBAAiB,IAAI,IAAI,UAAU,KAAK,UAAU,MAAM,QAAQ,CAAC;AAavE,QAX2B,YAAY,MAAM,GAAG,OAC9C,eAAe,IAAI,EAAE,QAAQ,GAAG,KAAK,EACtC,CAEkC,QAAQ,UACzC,UAAU,MACP,MAAM,EAAE,YAAY,MAAM,WAAW,EAAE,gBAAgB,MAAM,YAC/D,CACF;;AAMH,MAAM,kBAAkB,UAAU,cAAc;AAGhD,MAAaC,aAA2C;CACtD,MAAM;CACN,SAAS;CACV;;;;;;;;AAkBD,MAAa,iBAAiB,OAC5B,UACA,UACA,YACkC;CAQlC,MAAM,0BAA0B,MAAM,qBANxB,SACX,QAAQ,YAAY,QAAQ,SAAS,OAAO,CAC5C,KAAK,YAAY,KAAK,QAAQ,UAAU,CACxC,KAAK,KAAK,CAGoD;CAyBjE,MAAM,aAAa,CACjB;EACE,MAAM;EACN,SAzBiB,WAAW,QAAQ,QACtC,+BACA,wBAAwB,WAAW,IAC/B,qDACA,wBACG,KAAK,KAAK,QACT;GACE;GACA;GACA,YAAY;GACZ,cAAc,IAAI,YAAY,GAAG,IAAI,QAAQ,OAAO;GACpD,aAAa,IAAI,QAAQ;GACzB,YAAY,IAAI,OAAO;GACvB;GACA,IAAI;GACJ;GACD,CAAC,KAAK,KAAK,CACb,CACA,KAAK,OAAO,CACpB;EAOE,EACD,GAAG,SAAS,MAAM,GAAG,CACtB;AAED,KAAI,CAAC,SACH,OAAM,IAAI,MAAM,wCAAwC;CAI1D,IAAI,eAAe;CACnB,MAAM,SAAS,WAAW;EACxB,GAAG;EACH,UAAU;EACX,CAAC;AAGF,YAAW,MAAM,SAAS,OAAO,YAAY;AAC3C,kBAAgB;AAChB,WAAS,YAAY,MAAM;;CAI7B,MAAM,eAAe,CACnB,GAAG,IAAI,IAAI,wBAAwB,KAAK,QAAQ,IAAI,QAAQ,CAAC,CAC9D;AAGD,QAAO;EACL,UAAU,gBAAgB;EAC1B;EACD"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"indexMarkdownFiles.mjs","names":["vectorStore: VectorStoreEl[]","EMBEDDING_MODEL: OpenAI.EmbeddingModel","OVERLAP_TOKENS: number","MAX_CHUNK_TOKENS: number","CHAR_BY_TOKEN: number","MAX_CHARS: number","OVERLAP_CHARS: number","chunks: string[]","resultForFile: Record<string, number[]>"],"sources":["../../../../../src/utils/AI/askDocQuestion/indexMarkdownFiles.ts"],"sourcesContent":["import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs';\nimport { dirname, join } from 'node:path';\nimport { fileURLToPath } from 'node:url';\nimport { getMarkdownMetadata } from '@intlayer/core';\nimport { getBlogs, getDocs, getFrequentQuestions } from '@intlayer/docs';\nimport dotenv from 'dotenv';\nimport { OpenAI } from 'openai';\nimport { type AIOptions, AIProvider } from '../aiSdk';\n\nconst OUTPUT_EMBEDDINGS_DIR = 'src/utils/AI/askDocQuestion/embeddings';\nconst __dirname = dirname(fileURLToPath(import.meta.url));\n\nconst readEmbeddingsForFile = (fileKey: string): Record<string, number[]> => {\n try {\n return JSON.parse(\n readFileSync(\n `${__dirname}/embeddings/${fileKey.replace('.md', '.json')}`,\n 'utf-8'\n )\n ) as Record<string, number[]>;\n } catch {\n return {};\n }\n};\n\nconst writeEmbeddingsForFile = (\n fileKey: string,\n data: Record<string, number[]>\n): void => {\n const filePath = join(\n OUTPUT_EMBEDDINGS_DIR,\n `${fileKey.replace('.md', '.json')}`\n );\n const dir = dirname(filePath);\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n writeFileSync(filePath, JSON.stringify(data));\n};\n\ntype VectorStoreEl = {\n fileKey: string;\n chunkNumber: number;\n content: string;\n embedding: number[];\n docUrl: string;\n docName: string;\n};\n\n/**\n * Simple in-memory vector store to hold document embeddings and their content.\n * Each entry contains:\n * - fileKey: A unique key identifying the file\n * - chunkNumber: The number of the chunk within the document\n * - content: The chunk content\n * - embedding: The numerical embedding vector for the chunk\n */\nconst vectorStore: VectorStoreEl[] = [];\n\n/*\n * Embedding model configuration\n */\nconst EMBEDDING_MODEL: OpenAI.EmbeddingModel = 'text-embedding-3-large'; // Model to use for embedding generation\nconst OVERLAP_TOKENS: number = 200; // Number of tokens to overlap between chunks\nconst MAX_CHUNK_TOKENS: number = 800; // Maximum number of tokens per chunk\nconst CHAR_BY_TOKEN: number = 4.15; // Approximate pessimistically the number of characters per token // Can use `tiktoken` or other tokenizers to calculate it more precisely\nconst MAX_CHARS: number = MAX_CHUNK_TOKENS * CHAR_BY_TOKEN;\nconst OVERLAP_CHARS: number = OVERLAP_TOKENS * CHAR_BY_TOKEN;\n\nconst skipDocEmbeddingsIndex = process.env.SKIP_DOC_EMBEDDINGS_INDEX === 'true';\n\n/**\n * Splits a given text into chunks ensuring each chunk does not exceed MAX_CHARS.\n * @param text - The input text to split.\n * @returns - Array of text chunks.\n */\nconst chunkText = (text: string): string[] => {\n const chunks: string[] = [];\n let start = 0;\n\n while (start < text.length) {\n let end = Math.min(start + MAX_CHARS, text.length);\n\n // Ensure we don't cut words in the middle (find nearest space)\n if (end < text.length) {\n const lastSpace = text.lastIndexOf(' ', end);\n if (lastSpace > start) {\n end = lastSpace;\n }\n }\n\n chunks.push(text.substring(start, end));\n\n // Move start forward correctly\n const nextStart = end - OVERLAP_CHARS;\n if (nextStart <= start) {\n // Prevent infinite loop if overlap is too large\n start = end;\n } else {\n start = nextStart;\n }\n }\n\n return chunks;\n};\n\n/**\n * Generates an embedding for a given text using OpenAI's embedding API.\n * Trims the text if it exceeds the maximum allowed characters.\n *\n * @param text - The input text to generate an embedding for\n * @returns The embedding vector as a number array\n */\nconst generateEmbedding = async (text: string): Promise<number[]> => {\n try {\n const openaiClient = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n\n const response = await openaiClient.embeddings.create({\n model: EMBEDDING_MODEL,\n input: text,\n });\n\n return response.data[0].embedding;\n } catch (error) {\n console.error('Error generating embedding:', error);\n return [];\n }\n};\n\n/**\n * Indexes all Markdown documents by generating embeddings for each chunk and storing them in memory.\n * Persists per-document embeddings under `embeddings/<fileKey>.json`.\n * Handles cases where files have been updated and chunk counts have changed.\n */\nexport const indexMarkdownFiles = async (): Promise<void> => {\n const env = process.env.NODE_ENV;\n dotenv.config({\n path: [`.env.${env}.local`, `.env.${env}`, '.env.local', '.env'],\n });\n\n // Retrieve documentation and blog posts in English locale\n const frequentQuestions = await getFrequentQuestions();\n const docs = await getDocs();\n const blogs = await getBlogs();\n\n const files = { ...docs, ...blogs, ...frequentQuestions }; // Combine docs and blogs into a single object\n\n // Iterate over each file key (identifier) in the combined files\n for await (const fileKey of Object.keys(files)) {\n // Get the metadata of the file\n const fileMetadata = getMarkdownMetadata(\n files[fileKey as keyof typeof files] as string\n );\n\n // Split the document into chunks based on headings\n const fileChunks = chunkText(\n files[fileKey as keyof typeof files] as string\n );\n\n // Read existing embeddings for this file\n const existingEmbeddings = readEmbeddingsForFile(fileKey);\n\n // Check if the number of chunks has changed for this file\n const existingChunksForFile = Object.keys(existingEmbeddings);\n const currentChunkCount = fileChunks.length;\n const previousChunkCount = existingChunksForFile.length;\n\n let shouldRegenerateFileEmbeddings = false;\n\n // If chunk count differs, we need to regenerate embeddings for this file\n if (currentChunkCount !== previousChunkCount) {\n console.info(\n `File \"${fileKey}\" chunk count changed: ${previousChunkCount} -> ${currentChunkCount}. Regenerating embeddings.`\n );\n\n shouldRegenerateFileEmbeddings = !skipDocEmbeddingsIndex;\n }\n\n // Iterate over each chunk within the current file\n let resultForFile: Record<string, number[]> = {};\n for await (const chunkIndex of Object.keys(fileChunks)) {\n const chunkNumber = Number(chunkIndex) + 1; // Chunk number starts at 1\n const chunksNumber = fileChunks.length;\n\n const fileChunk = fileChunks[\n chunkIndex as keyof typeof fileChunks\n ] as string;\n\n const chunkKeyName = `chunk_${chunkNumber}`; // Unique key for the chunk within the file\n\n // Retrieve precomputed embedding if available and file hasn't changed\n const docEmbedding = !shouldRegenerateFileEmbeddings\n ? (existingEmbeddings[\n chunkKeyName as keyof typeof existingEmbeddings\n ] as number[] | undefined)\n : undefined;\n\n let embedding = docEmbedding; // Use existing embedding if available and valid\n\n if (!embedding) {\n embedding = await generateEmbedding(fileChunk); // Generate embedding if not present or file changed\n console.info(`- Generated new embedding: ${fileKey}/${chunkKeyName}`);\n }\n\n // Update the file-scoped result object with the embedding\n resultForFile = { ...resultForFile, [chunkKeyName]: embedding };\n\n // Store the embedding and content in the in-memory vector store\n vectorStore.push({\n fileKey,\n chunkNumber,\n embedding,\n content: fileChunk,\n docUrl: fileMetadata.url,\n docName: fileMetadata.title,\n });\n\n console.info(`- Indexed: ${fileKey}/${chunkKeyName}/${chunksNumber}`);\n }\n\n // Persist per-file embeddings if changed\n try {\n if (\n JSON.stringify(resultForFile) !== JSON.stringify(existingEmbeddings)\n ) {\n writeEmbeddingsForFile(fileKey, resultForFile);\n }\n } catch (error) {\n console.error(error);\n }\n }\n};\n"],"mappings":";;;;;;;;;AASA,MAAM,wBAAwB;AAC9B,MAAM,YAAY,QAAQ,cAAc,OAAO,KAAK,IAAI,CAAC;AAEzD,MAAM,yBAAyB,YAA8C;AAC3E,KAAI;AACF,SAAO,KAAK,MACV,aACE,GAAG,UAAU,cAAc,QAAQ,QAAQ,OAAO,QAAQ,IAC1D,QACD,CACF;SACK;AACN,SAAO,EAAE;;;AAIb,MAAM,0BACJ,SACA,SACS;CACT,MAAM,WAAW,KACf,uBACA,GAAG,QAAQ,QAAQ,OAAO,QAAQ,GACnC;CACD,MAAM,MAAM,QAAQ,SAAS;AAC7B,KAAI,CAAC,WAAW,IAAI,CAClB,WAAU,KAAK,EAAE,WAAW,MAAM,CAAC;AAErC,eAAc,UAAU,KAAK,UAAU,KAAK,CAAC;;;;;;;;;;AAoB/C,MAAMA,cAA+B,EAAE;AAKvC,MAAMC,kBAAyC;AAC/C,MAAMC,iBAAyB;AAC/B,MAAMC,mBAA2B;AACjC,MAAMC,gBAAwB;AAC9B,MAAMC,YAAoB,mBAAmB;AAC7C,MAAMC,gBAAwB,iBAAiB;AAE/C,MAAM,yBAAyB,QAAQ,IAAI,8BAA8B;;;;;;AAOzE,MAAM,aAAa,SAA2B;CAC5C,MAAMC,SAAmB,EAAE;CAC3B,IAAI,QAAQ;AAEZ,QAAO,QAAQ,KAAK,QAAQ;EAC1B,IAAI,MAAM,KAAK,IAAI,QAAQ,WAAW,KAAK,OAAO;AAGlD,MAAI,MAAM,KAAK,QAAQ;GACrB,MAAM,YAAY,KAAK,YAAY,KAAK,IAAI;AAC5C,OAAI,YAAY,MACd,OAAM;;AAIV,SAAO,KAAK,KAAK,UAAU,OAAO,IAAI,CAAC;EAGvC,MAAM,YAAY,MAAM;AACxB,MAAI,aAAa,MAEf,SAAQ;MAER,SAAQ;;AAIZ,QAAO;;;;;;;;;AAUT,MAAM,oBAAoB,OAAO,SAAoC;AACnE,KAAI;AAQF,UALiB,MAFI,IAAI,OAAO,EAAE,QAAQ,QAAQ,IAAI,gBAAgB,CAAC,CAEnC,WAAW,OAAO;GACpD,OAAO;GACP,OAAO;GACR,CAAC,EAEc,KAAK,GAAG;UACjB,OAAO;AACd,UAAQ,MAAM,+BAA+B,MAAM;AACnD,SAAO,EAAE;;;;;;;;AASb,MAAa,qBAAqB,YAA2B;CAC3D,MAAM;AACN,QAAO,OAAO,EACZ,MAAM;EAAC,QAAQ,IAAI;EAAS,QAAQ;EAAO;EAAc;EAAO,EACjE,CAAC;CAGF,MAAM,oBAAoB,MAAM,sBAAsB;CACtD,MAAM,OAAO,MAAM,SAAS;CAC5B,MAAM,QAAQ,MAAM,UAAU;CAE9B,MAAM,QAAQ;EAAE,GAAG;EAAM,GAAG;EAAO,GAAG;EAAmB;AAGzD,YAAW,MAAM,WAAW,OAAO,KAAK,MAAM,EAAE;EAE9C,MAAM,eAAe,oBACnB,MAAM,SACP;EAGD,MAAM,aAAa,UACjB,MAAM,SACP;EAGD,MAAM,qBAAqB,sBAAsB,QAAQ;EAGzD,MAAM,wBAAwB,OAAO,KAAK,mBAAmB;EAC7D,MAAM,oBAAoB,WAAW;EACrC,MAAM,qBAAqB,sBAAsB;EAEjD,IAAI,iCAAiC;AAGrC,MAAI,sBAAsB,oBAAoB;AAC5C,WAAQ,KACN,SAAS,QAAQ,yBAAyB,mBAAmB,MAAM,kBAAkB,4BACtF;AAED,oCAAiC,CAAC;;EAIpC,IAAIC,gBAA0C,EAAE;AAChD,aAAW,MAAM,cAAc,OAAO,KAAK,WAAW,EAAE;GACtD,MAAM,cAAc,OAAO,WAAW,GAAG;GACzC,MAAM,eAAe,WAAW;GAEhC,MAAM,YAAY,WAChB;GAGF,MAAM,eAAe,SAAS;GAS9B,IAAI,YANiB,CAAC,iCACjB,mBACC,gBAEF;AAIJ,OAAI,CAAC,WAAW;AACd,gBAAY,MAAM,kBAAkB,UAAU;AAC9C,YAAQ,KAAK,8BAA8B,QAAQ,GAAG,eAAe;;AAIvE,mBAAgB;IAAE,GAAG;KAAgB,eAAe;IAAW;AAG/D,eAAY,KAAK;IACf;IACA;IACA;IACA,SAAS;IACT,QAAQ,aAAa;IACrB,SAAS,aAAa;IACvB,CAAC;AAEF,WAAQ,KAAK,cAAc,QAAQ,GAAG,aAAa,GAAG,eAAe;;AAIvE,MAAI;AACF,OACE,KAAK,UAAU,cAAc,KAAK,KAAK,UAAU,mBAAmB,CAEpE,wBAAuB,SAAS,cAAc;WAEzC,OAAO;AACd,WAAQ,MAAM,MAAM"}
|
|
1
|
+
{"version":3,"file":"indexMarkdownFiles.mjs","names":["vectorStore: VectorStoreEl[]","EMBEDDING_MODEL: OpenAI.EmbeddingModel","OVERLAP_TOKENS: number","MAX_CHUNK_TOKENS: number","CHAR_BY_TOKEN: number","MAX_CHARS: number","OVERLAP_CHARS: number","chunks: string[]","resultForFile: Record<string, number[]>"],"sources":["../../../../../src/utils/AI/askDocQuestion/indexMarkdownFiles.ts"],"sourcesContent":["import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs';\nimport { dirname, join } from 'node:path';\nimport { fileURLToPath } from 'node:url';\nimport { getMarkdownMetadata } from '@intlayer/core';\nimport { getBlogs, getDocs, getFrequentQuestions } from '@intlayer/docs';\nimport dotenv from 'dotenv';\nimport { OpenAI } from 'openai';\n\nconst OUTPUT_EMBEDDINGS_DIR = 'src/utils/AI/askDocQuestion/embeddings';\nconst __dirname = dirname(fileURLToPath(import.meta.url));\n\nconst readEmbeddingsForFile = (fileKey: string): Record<string, number[]> => {\n try {\n return JSON.parse(\n readFileSync(\n `${__dirname}/embeddings/${fileKey.replace('.md', '.json')}`,\n 'utf-8'\n )\n ) as Record<string, number[]>;\n } catch {\n return {};\n }\n};\n\nconst writeEmbeddingsForFile = (\n fileKey: string,\n data: Record<string, number[]>\n): void => {\n const filePath = join(\n OUTPUT_EMBEDDINGS_DIR,\n `${fileKey.replace('.md', '.json')}`\n );\n const dir = dirname(filePath);\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n writeFileSync(filePath, JSON.stringify(data));\n};\n\ntype VectorStoreEl = {\n fileKey: string;\n chunkNumber: number;\n content: string;\n embedding: number[];\n docUrl: string;\n docName: string;\n};\n\n/**\n * Simple in-memory vector store to hold document embeddings and their content.\n * Each entry contains:\n * - fileKey: A unique key identifying the file\n * - chunkNumber: The number of the chunk within the document\n * - content: The chunk content\n * - embedding: The numerical embedding vector for the chunk\n */\nconst vectorStore: VectorStoreEl[] = [];\n\n/*\n * Embedding model configuration\n */\nconst EMBEDDING_MODEL: OpenAI.EmbeddingModel = 'text-embedding-3-large'; // Model to use for embedding generation\nconst OVERLAP_TOKENS: number = 200; // Number of tokens to overlap between chunks\nconst MAX_CHUNK_TOKENS: number = 800; // Maximum number of tokens per chunk\nconst CHAR_BY_TOKEN: number = 4.15; // Approximate pessimistically the number of characters per token // Can use `tiktoken` or other tokenizers to calculate it more precisely\nconst MAX_CHARS: number = MAX_CHUNK_TOKENS * CHAR_BY_TOKEN;\nconst OVERLAP_CHARS: number = OVERLAP_TOKENS * CHAR_BY_TOKEN;\n\nconst skipDocEmbeddingsIndex = process.env.SKIP_DOC_EMBEDDINGS_INDEX === 'true';\n\n/**\n * Splits a given text into chunks ensuring each chunk does not exceed MAX_CHARS.\n * @param text - The input text to split.\n * @returns - Array of text chunks.\n */\nconst chunkText = (text: string): string[] => {\n const chunks: string[] = [];\n let start = 0;\n\n while (start < text.length) {\n let end = Math.min(start + MAX_CHARS, text.length);\n\n // Ensure we don't cut words in the middle (find nearest space)\n if (end < text.length) {\n const lastSpace = text.lastIndexOf(' ', end);\n if (lastSpace > start) {\n end = lastSpace;\n }\n }\n\n chunks.push(text.substring(start, end));\n\n // Move start forward correctly\n const nextStart = end - OVERLAP_CHARS;\n if (nextStart <= start) {\n // Prevent infinite loop if overlap is too large\n start = end;\n } else {\n start = nextStart;\n }\n }\n\n return chunks;\n};\n\n/**\n * Generates an embedding for a given text using OpenAI's embedding API.\n * Trims the text if it exceeds the maximum allowed characters.\n *\n * @param text - The input text to generate an embedding for\n * @returns The embedding vector as a number array\n */\nconst generateEmbedding = async (text: string): Promise<number[]> => {\n try {\n const openaiClient = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n\n const response = await openaiClient.embeddings.create({\n model: EMBEDDING_MODEL,\n input: text,\n });\n\n return response.data[0].embedding;\n } catch (error) {\n console.error('Error generating embedding:', error);\n return [];\n }\n};\n\n/**\n * Indexes all Markdown documents by generating embeddings for each chunk and storing them in memory.\n * Persists per-document embeddings under `embeddings/<fileKey>.json`.\n * Handles cases where files have been updated and chunk counts have changed.\n */\nexport const indexMarkdownFiles = async (): Promise<void> => {\n const env = process.env.NODE_ENV;\n dotenv.config({\n path: [`.env.${env}.local`, `.env.${env}`, '.env.local', '.env'],\n });\n\n // Retrieve documentation and blog posts in English locale\n const frequentQuestions = await getFrequentQuestions();\n const docs = await getDocs();\n const blogs = await getBlogs();\n\n const files = { ...docs, ...blogs, ...frequentQuestions }; // Combine docs and blogs into a single object\n\n // Iterate over each file key (identifier) in the combined files\n for await (const fileKey of Object.keys(files)) {\n // Get the metadata of the file\n const fileMetadata = getMarkdownMetadata(\n files[fileKey as keyof typeof files] as string\n );\n\n // Split the document into chunks based on headings\n const fileChunks = chunkText(\n files[fileKey as keyof typeof files] as string\n );\n\n // Read existing embeddings for this file\n const existingEmbeddings = readEmbeddingsForFile(fileKey);\n\n // Check if the number of chunks has changed for this file\n const existingChunksForFile = Object.keys(existingEmbeddings);\n const currentChunkCount = fileChunks.length;\n const previousChunkCount = existingChunksForFile.length;\n\n let shouldRegenerateFileEmbeddings = false;\n\n // If chunk count differs, we need to regenerate embeddings for this file\n if (currentChunkCount !== previousChunkCount) {\n console.info(\n `File \"${fileKey}\" chunk count changed: ${previousChunkCount} -> ${currentChunkCount}. Regenerating embeddings.`\n );\n\n shouldRegenerateFileEmbeddings = !skipDocEmbeddingsIndex;\n }\n\n // Iterate over each chunk within the current file\n let resultForFile: Record<string, number[]> = {};\n for await (const chunkIndex of Object.keys(fileChunks)) {\n const chunkNumber = Number(chunkIndex) + 1; // Chunk number starts at 1\n const chunksNumber = fileChunks.length;\n\n const fileChunk = fileChunks[\n chunkIndex as keyof typeof fileChunks\n ] as string;\n\n const chunkKeyName = `chunk_${chunkNumber}`; // Unique key for the chunk within the file\n\n // Retrieve precomputed embedding if available and file hasn't changed\n const docEmbedding = !shouldRegenerateFileEmbeddings\n ? (existingEmbeddings[\n chunkKeyName as keyof typeof existingEmbeddings\n ] as number[] | undefined)\n : undefined;\n\n let embedding = docEmbedding; // Use existing embedding if available and valid\n\n if (!embedding) {\n embedding = await generateEmbedding(fileChunk); // Generate embedding if not present or file changed\n console.info(`- Generated new embedding: ${fileKey}/${chunkKeyName}`);\n }\n\n // Update the file-scoped result object with the embedding\n resultForFile = { ...resultForFile, [chunkKeyName]: embedding };\n\n // Store the embedding and content in the in-memory vector store\n vectorStore.push({\n fileKey,\n chunkNumber,\n embedding,\n content: fileChunk,\n docUrl: fileMetadata.url,\n docName: fileMetadata.title,\n });\n\n console.info(`- Indexed: ${fileKey}/${chunkKeyName}/${chunksNumber}`);\n }\n\n // Persist per-file embeddings if changed\n try {\n if (\n JSON.stringify(resultForFile) !== JSON.stringify(existingEmbeddings)\n ) {\n writeEmbeddingsForFile(fileKey, resultForFile);\n }\n } catch (error) {\n console.error(error);\n }\n }\n};\n"],"mappings":";;;;;;;;;AAQA,MAAM,wBAAwB;AAC9B,MAAM,YAAY,QAAQ,cAAc,OAAO,KAAK,IAAI,CAAC;AAEzD,MAAM,yBAAyB,YAA8C;AAC3E,KAAI;AACF,SAAO,KAAK,MACV,aACE,GAAG,UAAU,cAAc,QAAQ,QAAQ,OAAO,QAAQ,IAC1D,QACD,CACF;SACK;AACN,SAAO,EAAE;;;AAIb,MAAM,0BACJ,SACA,SACS;CACT,MAAM,WAAW,KACf,uBACA,GAAG,QAAQ,QAAQ,OAAO,QAAQ,GACnC;CACD,MAAM,MAAM,QAAQ,SAAS;AAC7B,KAAI,CAAC,WAAW,IAAI,CAClB,WAAU,KAAK,EAAE,WAAW,MAAM,CAAC;AAErC,eAAc,UAAU,KAAK,UAAU,KAAK,CAAC;;;;;;;;;;AAoB/C,MAAMA,cAA+B,EAAE;AAKvC,MAAMC,kBAAyC;AAC/C,MAAMC,iBAAyB;AAC/B,MAAMC,mBAA2B;AACjC,MAAMC,gBAAwB;AAC9B,MAAMC,YAAoB,mBAAmB;AAC7C,MAAMC,gBAAwB,iBAAiB;AAE/C,MAAM,yBAAyB,QAAQ,IAAI,8BAA8B;;;;;;AAOzE,MAAM,aAAa,SAA2B;CAC5C,MAAMC,SAAmB,EAAE;CAC3B,IAAI,QAAQ;AAEZ,QAAO,QAAQ,KAAK,QAAQ;EAC1B,IAAI,MAAM,KAAK,IAAI,QAAQ,WAAW,KAAK,OAAO;AAGlD,MAAI,MAAM,KAAK,QAAQ;GACrB,MAAM,YAAY,KAAK,YAAY,KAAK,IAAI;AAC5C,OAAI,YAAY,MACd,OAAM;;AAIV,SAAO,KAAK,KAAK,UAAU,OAAO,IAAI,CAAC;EAGvC,MAAM,YAAY,MAAM;AACxB,MAAI,aAAa,MAEf,SAAQ;MAER,SAAQ;;AAIZ,QAAO;;;;;;;;;AAUT,MAAM,oBAAoB,OAAO,SAAoC;AACnE,KAAI;AAQF,UALiB,MAFI,IAAI,OAAO,EAAE,QAAQ,QAAQ,IAAI,gBAAgB,CAAC,CAEnC,WAAW,OAAO;GACpD,OAAO;GACP,OAAO;GACR,CAAC,EAEc,KAAK,GAAG;UACjB,OAAO;AACd,UAAQ,MAAM,+BAA+B,MAAM;AACnD,SAAO,EAAE;;;;;;;;AASb,MAAa,qBAAqB,YAA2B;CAC3D,MAAM;AACN,QAAO,OAAO,EACZ,MAAM;EAAC,QAAQ,IAAI;EAAS,QAAQ;EAAO;EAAc;EAAO,EACjE,CAAC;CAGF,MAAM,oBAAoB,MAAM,sBAAsB;CACtD,MAAM,OAAO,MAAM,SAAS;CAC5B,MAAM,QAAQ,MAAM,UAAU;CAE9B,MAAM,QAAQ;EAAE,GAAG;EAAM,GAAG;EAAO,GAAG;EAAmB;AAGzD,YAAW,MAAM,WAAW,OAAO,KAAK,MAAM,EAAE;EAE9C,MAAM,eAAe,oBACnB,MAAM,SACP;EAGD,MAAM,aAAa,UACjB,MAAM,SACP;EAGD,MAAM,qBAAqB,sBAAsB,QAAQ;EAGzD,MAAM,wBAAwB,OAAO,KAAK,mBAAmB;EAC7D,MAAM,oBAAoB,WAAW;EACrC,MAAM,qBAAqB,sBAAsB;EAEjD,IAAI,iCAAiC;AAGrC,MAAI,sBAAsB,oBAAoB;AAC5C,WAAQ,KACN,SAAS,QAAQ,yBAAyB,mBAAmB,MAAM,kBAAkB,4BACtF;AAED,oCAAiC,CAAC;;EAIpC,IAAIC,gBAA0C,EAAE;AAChD,aAAW,MAAM,cAAc,OAAO,KAAK,WAAW,EAAE;GACtD,MAAM,cAAc,OAAO,WAAW,GAAG;GACzC,MAAM,eAAe,WAAW;GAEhC,MAAM,YAAY,WAChB;GAGF,MAAM,eAAe,SAAS;GAS9B,IAAI,YANiB,CAAC,iCACjB,mBACC,gBAEF;AAIJ,OAAI,CAAC,WAAW;AACd,gBAAY,MAAM,kBAAkB,UAAU;AAC9C,YAAQ,KAAK,8BAA8B,QAAQ,GAAG,eAAe;;AAIvE,mBAAgB;IAAE,GAAG;KAAgB,eAAe;IAAW;AAG/D,eAAY,KAAK;IACf;IACA;IACA;IACA,SAAS;IACT,QAAQ,aAAa;IACrB,SAAS,aAAa;IACvB,CAAC;AAEF,WAAQ,KAAK,cAAc,QAAQ,GAAG,aAAa,GAAG,eAAe;;AAIvE,MAAI;AACF,OACE,KAAK,UAAU,cAAc,KAAK,KAAK,UAAU,mBAAmB,CAEpE,wBAAuB,SAAS,cAAc;WAEzC,OAAO;AACd,WAAQ,MAAM,MAAM"}
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import { logger } from "../../../logger/index.mjs";
|
|
2
2
|
import { readAsset } from "../../../_virtual/_utils_asset.mjs";
|
|
3
3
|
import { extractJson } from "../../extractJSON.mjs";
|
|
4
|
+
import { generateText } from "@intlayer/ai";
|
|
4
5
|
import { Locales } from "@intlayer/types";
|
|
5
6
|
import { getLocaleName } from "@intlayer/core";
|
|
6
|
-
import { generateText } from "ai";
|
|
7
7
|
|
|
8
8
|
//#region src/utils/AI/auditDictionary/index.ts
|
|
9
9
|
const CHAT_GPT_PROMPT = readAsset("./PROMPT.md");
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.mjs","names":["aiDefaultOptions: AIOptions"],"sources":["../../../../../src/utils/AI/auditDictionary/index.ts"],"sourcesContent":["import { readAsset } from 'utils:asset';\nimport {
|
|
1
|
+
{"version":3,"file":"index.mjs","names":["aiDefaultOptions: AIOptions"],"sources":["../../../../../src/utils/AI/auditDictionary/index.ts"],"sourcesContent":["import { readAsset } from 'utils:asset';\nimport type { AIConfig, AIOptions } from '@intlayer/ai';\nimport { generateText } from '@intlayer/ai';\nimport { getLocaleName } from '@intlayer/core';\nimport { type Locale, Locales } from '@intlayer/types';\nimport { logger } from '@logger';\nimport { extractJson } from '@utils/extractJSON';\nimport type { Tag } from '@/types/tag.types';\n\nexport type AuditOptions = {\n fileContent: string;\n filePath?: string;\n locales: Locale[];\n defaultLocale: Locale;\n tags: Tag[];\n aiConfig: AIConfig;\n applicationContext?: string;\n};\n\nexport type AuditFileResultData = {\n fileContent: {\n title: string;\n description: string;\n tags: string[];\n };\n tokenUsed: number;\n};\n\n// The prompt template to send to the AI model\nconst CHAT_GPT_PROMPT = readAsset('./PROMPT.md');\n\nexport const aiDefaultOptions: AIOptions = {\n // Keep default options\n};\n\n/**\n * Format a locale with its name.\n *\n * @param locale - The locale to format.\n * @returns A string in the format \"locale: name\", e.g. \"en: English\".\n */\nconst formatLocaleWithName = (locale: Locale): string => {\n return `${locale}: ${getLocaleName(locale, Locales.ENGLISH)}`;\n};\n\n/**\n * Formats tag instructions for the AI prompt.\n * Creates a string with all available tags and their descriptions.\n *\n * @param tags - The list of tags to format.\n * @returns A formatted string with tag instructions.\n */\nconst formatTagInstructions = (tags: Tag[]): string => {\n if (!tags || tags.length === 0) return '';\n\n // Prepare the tag instructions.\n return [\n `Based on the dictionary content, identify specific tags from the list below that would be relevant:`,\n tags.map(({ key, description }) => `- ${key}: ${description}`).join('\\n\\n'),\n ].join('\\n\\n');\n};\n\n/**\n * Audits a content declaration file by constructing a prompt for AI models.\n * The prompt includes details about the project's locales, file paths of content declarations,\n * and requests for identifying issues or inconsistencies.\n */\nexport const auditDictionary = async ({\n fileContent,\n filePath,\n locales,\n defaultLocale,\n tags,\n aiConfig,\n applicationContext,\n}: AuditOptions): Promise<AuditFileResultData | undefined> => {\n const otherLocales = locales.filter((locale) => locale !== defaultLocale);\n\n // Prepare the prompt for AI by replacing placeholders with actual values.\n const prompt = CHAT_GPT_PROMPT.replace(\n '{{defaultLocale}}',\n formatLocaleWithName(defaultLocale)\n )\n .replace(\n '{{otherLocales}}',\n `{${otherLocales.map(formatLocaleWithName).join(', ')}}`\n )\n .replace('{{filePath}}', filePath ?? '')\n .replace('{{applicationContext}}', applicationContext ?? '')\n .replace('{{tagsInstructions}}', formatTagInstructions(tags));\n\n // Use the AI SDK to generate the completion\n const { text: newContent, usage } = await generateText({\n ...aiConfig,\n messages: [\n { role: 'system', content: prompt },\n {\n role: 'user',\n content: ['**File to Audit:**', fileContent].join('\\n'),\n },\n ],\n });\n\n logger.info(`${usage?.totalTokens ?? 0} tokens used in the request`);\n\n return {\n fileContent: extractJson(newContent),\n tokenUsed: usage?.totalTokens ?? 0,\n };\n};\n"],"mappings":";;;;;;;;AA6BA,MAAM,kBAAkB,UAAU,cAAc;AAEhD,MAAaA,mBAA8B,EAE1C;;;;;;;AAQD,MAAM,wBAAwB,WAA2B;AACvD,QAAO,GAAG,OAAO,IAAI,cAAc,QAAQ,QAAQ,QAAQ;;;;;;;;;AAU7D,MAAM,yBAAyB,SAAwB;AACrD,KAAI,CAAC,QAAQ,KAAK,WAAW,EAAG,QAAO;AAGvC,QAAO,CACL,uGACA,KAAK,KAAK,EAAE,KAAK,kBAAkB,KAAK,IAAI,IAAI,cAAc,CAAC,KAAK,OAAO,CAC5E,CAAC,KAAK,OAAO;;;;;;;AAQhB,MAAa,kBAAkB,OAAO,EACpC,aACA,UACA,SACA,eACA,MACA,UACA,yBAC4D;CAC5D,MAAM,eAAe,QAAQ,QAAQ,WAAW,WAAW,cAAc;CAGzE,MAAM,SAAS,gBAAgB,QAC7B,qBACA,qBAAqB,cAAc,CACpC,CACE,QACC,oBACA,IAAI,aAAa,IAAI,qBAAqB,CAAC,KAAK,KAAK,CAAC,GACvD,CACA,QAAQ,gBAAgB,YAAY,GAAG,CACvC,QAAQ,0BAA0B,sBAAsB,GAAG,CAC3D,QAAQ,wBAAwB,sBAAsB,KAAK,CAAC;CAG/D,MAAM,EAAE,MAAM,YAAY,UAAU,MAAM,aAAa;EACrD,GAAG;EACH,UAAU,CACR;GAAE,MAAM;GAAU,SAAS;GAAQ,EACnC;GACE,MAAM;GACN,SAAS,CAAC,sBAAsB,YAAY,CAAC,KAAK,KAAK;GACxD,CACF;EACF,CAAC;AAEF,QAAO,KAAK,GAAG,OAAO,eAAe,EAAE,6BAA6B;AAEpE,QAAO;EACL,aAAa,YAAY,WAAW;EACpC,WAAW,OAAO,eAAe;EAClC"}
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import { logger } from "../../../logger/index.mjs";
|
|
2
2
|
import { readAsset } from "../../../_virtual/_utils_asset.mjs";
|
|
3
|
+
import { generateText } from "@intlayer/ai";
|
|
3
4
|
import { Locales } from "@intlayer/types";
|
|
4
5
|
import { getLocaleName } from "@intlayer/core";
|
|
5
|
-
import { generateText } from "ai";
|
|
6
6
|
|
|
7
7
|
//#region src/utils/AI/auditDictionaryField/index.ts
|
|
8
8
|
const CHAT_GPT_PROMPT = readAsset("./PROMPT.md");
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.mjs","names":["aiDefaultOptions: AIOptions"],"sources":["../../../../../src/utils/AI/auditDictionaryField/index.ts"],"sourcesContent":["import { readAsset } from 'utils:asset';\nimport {
|
|
1
|
+
{"version":3,"file":"index.mjs","names":["aiDefaultOptions: AIOptions"],"sources":["../../../../../src/utils/AI/auditDictionaryField/index.ts"],"sourcesContent":["import { readAsset } from 'utils:asset';\nimport type { AIConfig, AIOptions } from '@intlayer/ai';\nimport { generateText } from '@intlayer/ai';\nimport { getLocaleName } from '@intlayer/core';\nimport type { KeyPath } from '@intlayer/types';\nimport { type Locale, Locales } from '@intlayer/types';\nimport { logger } from '@logger';\nimport type { Tag } from '@/types/tag.types';\n\nexport type AuditDictionaryFieldOptions = {\n fileContent: string;\n locales: Locale[];\n keyPath: KeyPath[];\n tags: Tag[];\n aiConfig: AIConfig;\n applicationContext?: string;\n};\n\nexport type AuditDictionaryFieldResultData = {\n fileContent: string;\n tokenUsed: number;\n};\n\n// The prompt template to send to the AI model\nconst CHAT_GPT_PROMPT = readAsset('./PROMPT.md');\n\nexport const aiDefaultOptions: AIOptions = {\n // Keep default options\n};\n\n/**\n * Format a locale with its name.\n *\n * @param locale - The locale to format.\n * @returns A string in the format \"locale: name\", e.g. \"en: English\".\n */\nconst formatLocaleWithName = (locale: Locale): string => {\n return `${locale}: ${getLocaleName(locale, Locales.ENGLISH)}`;\n};\n\n/**\n * Formats tag instructions for the AI prompt.\n *\n * @param tags - Array of tags to format\n * @returns A formatted string with tag instructions\n */\nconst formatTagInstructions = (tags: Tag[]): string => {\n if (!tags || tags.length === 0) {\n return '';\n }\n\n return `Based on the dictionary content, identify specific tags from the list below that would be relevant:\n \n${tags.map(({ key, description }) => `- ${key}: ${description}`).join('\\n\\n')}`;\n};\n\n/**\n * Audits a content declaration file by constructing a prompt for AI models.\n * The prompt includes details about the project's locales, file paths of content declarations,\n * and requests for identifying issues or inconsistencies.\n */\nexport const auditDictionaryField = async ({\n fileContent,\n applicationContext,\n locales,\n keyPath,\n tags,\n aiConfig,\n}: AuditDictionaryFieldOptions): Promise<\n AuditDictionaryFieldResultData | undefined\n> => {\n // Prepare the prompt for AI by replacing placeholders with actual values.\n const prompt = CHAT_GPT_PROMPT.replace(\n '{{otherLocales}}',\n `{${locales.map(formatLocaleWithName).join(', ')}}`\n )\n .replace('{{keyPath}}', JSON.stringify(keyPath))\n .replace('{{applicationContext}}', applicationContext ?? '')\n .replace('{{tagsInstructions}}', formatTagInstructions(tags));\n\n if (!aiConfig) {\n logger.error('Failed to configure AI model');\n return undefined;\n }\n\n // Use the AI SDK to generate the completion\n const { text: newContent, usage } = await generateText({\n ...aiConfig,\n messages: [\n { role: 'system', content: prompt },\n {\n role: 'user',\n content: ['**File to Audit:**', fileContent].join('\\n'),\n },\n ],\n });\n\n logger.info(`${usage?.totalTokens ?? 0} tokens used in the request`);\n\n return {\n fileContent: newContent,\n tokenUsed: usage?.totalTokens ?? 0,\n };\n};\n"],"mappings":";;;;;;;AAwBA,MAAM,kBAAkB,UAAU,cAAc;AAEhD,MAAaA,mBAA8B,EAE1C;;;;;;;AAQD,MAAM,wBAAwB,WAA2B;AACvD,QAAO,GAAG,OAAO,IAAI,cAAc,QAAQ,QAAQ,QAAQ;;;;;;;;AAS7D,MAAM,yBAAyB,SAAwB;AACrD,KAAI,CAAC,QAAQ,KAAK,WAAW,EAC3B,QAAO;AAGT,QAAO;;EAEP,KAAK,KAAK,EAAE,KAAK,kBAAkB,KAAK,IAAI,IAAI,cAAc,CAAC,KAAK,OAAO;;;;;;;AAQ7E,MAAa,uBAAuB,OAAO,EACzC,aACA,oBACA,SACA,SACA,MACA,eAGG;CAEH,MAAM,SAAS,gBAAgB,QAC7B,oBACA,IAAI,QAAQ,IAAI,qBAAqB,CAAC,KAAK,KAAK,CAAC,GAClD,CACE,QAAQ,eAAe,KAAK,UAAU,QAAQ,CAAC,CAC/C,QAAQ,0BAA0B,sBAAsB,GAAG,CAC3D,QAAQ,wBAAwB,sBAAsB,KAAK,CAAC;AAE/D,KAAI,CAAC,UAAU;AACb,SAAO,MAAM,+BAA+B;AAC5C;;CAIF,MAAM,EAAE,MAAM,YAAY,UAAU,MAAM,aAAa;EACrD,GAAG;EACH,UAAU,CACR;GAAE,MAAM;GAAU,SAAS;GAAQ,EACnC;GACE,MAAM;GACN,SAAS,CAAC,sBAAsB,YAAY,CAAC,KAAK,KAAK;GACxD,CACF;EACF,CAAC;AAEF,QAAO,KAAK,GAAG,OAAO,eAAe,EAAE,6BAA6B;AAEpE,QAAO;EACL,aAAa;EACb,WAAW,OAAO,eAAe;EAClC"}
|
|
@@ -1,41 +1,17 @@
|
|
|
1
1
|
import { logger } from "../../../logger/index.mjs";
|
|
2
|
-
import {
|
|
3
|
-
import { extractJson } from "../../extractJSON.mjs";
|
|
4
|
-
import { generateText } from "ai";
|
|
2
|
+
import { auditDictionaryMetadata as auditDictionaryMetadata$1 } from "@intlayer/ai";
|
|
5
3
|
|
|
6
4
|
//#region src/utils/AI/auditDictionaryMetadata/index.ts
|
|
7
|
-
const CHAT_GPT_PROMPT = readAsset("./PROMPT.md");
|
|
8
5
|
const aiDefaultOptions = {};
|
|
9
6
|
/**
|
|
10
7
|
* Audits a content declaration file by constructing a prompt for AI models.
|
|
11
8
|
* The prompt includes details about the project's locales, file paths of content declarations,
|
|
12
9
|
* and requests for identifying issues or inconsistencies.
|
|
13
10
|
*/
|
|
14
|
-
const auditDictionaryMetadata = async (
|
|
15
|
-
const
|
|
16
|
-
if (
|
|
17
|
-
|
|
18
|
-
return;
|
|
19
|
-
}
|
|
20
|
-
const { text: newContent, usage } = await generateText({
|
|
21
|
-
...aiConfig,
|
|
22
|
-
messages: [{
|
|
23
|
-
role: "system",
|
|
24
|
-
content: prompt
|
|
25
|
-
}, {
|
|
26
|
-
role: "user",
|
|
27
|
-
content: [
|
|
28
|
-
"**Content declaration to describe:**",
|
|
29
|
-
"This is the content declaration that you should consider to describe:",
|
|
30
|
-
fileContent
|
|
31
|
-
].join("\n")
|
|
32
|
-
}]
|
|
33
|
-
});
|
|
34
|
-
logger.info(`${usage?.totalTokens ?? 0} tokens used in the request`);
|
|
35
|
-
return {
|
|
36
|
-
fileContent: extractJson(newContent),
|
|
37
|
-
tokenUsed: usage?.totalTokens ?? 0
|
|
38
|
-
};
|
|
11
|
+
const auditDictionaryMetadata = async (options) => {
|
|
12
|
+
const result = await auditDictionaryMetadata$1(options);
|
|
13
|
+
if (result) logger.info(`${result.tokenUsed} tokens used in the request`);
|
|
14
|
+
return result;
|
|
39
15
|
};
|
|
40
16
|
|
|
41
17
|
//#endregion
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.mjs","names":["aiDefaultOptions: AIOptions"],"sources":["../../../../../src/utils/AI/auditDictionaryMetadata/index.ts"],"sourcesContent":["import {
|
|
1
|
+
{"version":3,"file":"index.mjs","names":["aiDefaultOptions: AIOptions","auditDictionaryMetadataAI"],"sources":["../../../../../src/utils/AI/auditDictionaryMetadata/index.ts"],"sourcesContent":["import {\n type AIConfig,\n type AIOptions,\n type AuditFileResultData,\n auditDictionaryMetadata as auditDictionaryMetadataAI,\n} from '@intlayer/ai';\nimport { logger } from '@logger';\nimport type { Tag } from '@/types/tag.types';\n\nexport type AuditOptions = {\n fileContent: string;\n tags: Tag[];\n aiConfig: AIConfig;\n applicationContext?: string;\n};\n\nexport type { AuditFileResultData };\n\nexport const aiDefaultOptions: AIOptions = {\n // Keep default options\n};\n\n/**\n * Audits a content declaration file by constructing a prompt for AI models.\n * The prompt includes details about the project's locales, file paths of content declarations,\n * and requests for identifying issues or inconsistencies.\n */\nexport const auditDictionaryMetadata = async (\n options: AuditOptions\n): Promise<AuditFileResultData | undefined> => {\n const result = await auditDictionaryMetadataAI(options);\n\n if (result) {\n logger.info(`${result.tokenUsed} tokens used in the request`);\n }\n\n return result;\n};\n"],"mappings":";;;;AAkBA,MAAaA,mBAA8B,EAE1C;;;;;;AAOD,MAAa,0BAA0B,OACrC,YAC6C;CAC7C,MAAM,SAAS,MAAMC,0BAA0B,QAAQ;AAEvD,KAAI,OACF,QAAO,KAAK,GAAG,OAAO,UAAU,6BAA6B;AAG/D,QAAO"}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { logger } from "../../../logger/index.mjs";
|
|
2
2
|
import { readAsset } from "../../../_virtual/_utils_asset.mjs";
|
|
3
3
|
import { extractJson } from "../../extractJSON.mjs";
|
|
4
|
-
import { generateText } from "ai";
|
|
4
|
+
import { generateText } from "@intlayer/ai";
|
|
5
5
|
|
|
6
6
|
//#region src/utils/AI/auditTag/index.ts
|
|
7
7
|
const CHAT_GPT_PROMPT = readAsset("./PROMPT.md");
|