@intlayer/backend 7.2.0 → 7.2.1-canary.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. package/dist/assets/utils/AI/auditDictionary/PROMPT.md +0 -12
  2. package/dist/assets/utils/AI/auditDictionaryField/PROMPT.md +0 -13
  3. package/dist/assets/utils/AI/auditDictionaryMetadata/PROMPT.md +0 -6
  4. package/dist/assets/utils/AI/auditTag/PROMPT.md +0 -10
  5. package/dist/assets/utils/AI/translateJSON/PROMPT.md +0 -8
  6. package/dist/cjs/controllers/ai.controller.cjs +0 -1
  7. package/dist/cjs/controllers/ai.controller.cjs.map +1 -1
  8. package/dist/cjs/utils/AI/aiSdk.cjs +12 -21
  9. package/dist/cjs/utils/AI/aiSdk.cjs.map +1 -1
  10. package/dist/cjs/utils/AI/askDocQuestion/askDocQuestion.cjs +0 -9
  11. package/dist/cjs/utils/AI/askDocQuestion/askDocQuestion.cjs.map +1 -1
  12. package/dist/cjs/utils/AI/askDocQuestion/indexMarkdownFiles.cjs +0 -9
  13. package/dist/cjs/utils/AI/askDocQuestion/indexMarkdownFiles.cjs.map +1 -1
  14. package/dist/cjs/utils/AI/auditDictionary/index.cjs +4 -1
  15. package/dist/cjs/utils/AI/auditDictionary/index.cjs.map +1 -1
  16. package/dist/cjs/utils/AI/auditDictionaryField/index.cjs +4 -1
  17. package/dist/cjs/utils/AI/auditDictionaryField/index.cjs.map +1 -1
  18. package/dist/cjs/utils/AI/auditDictionaryMetadata/index.cjs +8 -1
  19. package/dist/cjs/utils/AI/auditDictionaryMetadata/index.cjs.map +1 -1
  20. package/dist/cjs/utils/AI/auditTag/index.cjs +8 -1
  21. package/dist/cjs/utils/AI/auditTag/index.cjs.map +1 -1
  22. package/dist/cjs/utils/AI/translateJSON/index.cjs +8 -1
  23. package/dist/cjs/utils/AI/translateJSON/index.cjs.map +1 -1
  24. package/dist/esm/controllers/ai.controller.mjs +15 -16
  25. package/dist/esm/controllers/ai.controller.mjs.map +1 -1
  26. package/dist/esm/utils/AI/aiSdk.mjs +12 -21
  27. package/dist/esm/utils/AI/aiSdk.mjs.map +1 -1
  28. package/dist/esm/utils/AI/askDocQuestion/askDocQuestion.mjs +1 -9
  29. package/dist/esm/utils/AI/askDocQuestion/askDocQuestion.mjs.map +1 -1
  30. package/dist/esm/utils/AI/askDocQuestion/indexMarkdownFiles.mjs +1 -9
  31. package/dist/esm/utils/AI/askDocQuestion/indexMarkdownFiles.mjs.map +1 -1
  32. package/dist/esm/utils/AI/auditDictionary/index.mjs +4 -1
  33. package/dist/esm/utils/AI/auditDictionary/index.mjs.map +1 -1
  34. package/dist/esm/utils/AI/auditDictionaryField/index.mjs +4 -1
  35. package/dist/esm/utils/AI/auditDictionaryField/index.mjs.map +1 -1
  36. package/dist/esm/utils/AI/auditDictionaryMetadata/index.mjs +8 -1
  37. package/dist/esm/utils/AI/auditDictionaryMetadata/index.mjs.map +1 -1
  38. package/dist/esm/utils/AI/auditTag/index.mjs +8 -1
  39. package/dist/esm/utils/AI/auditTag/index.mjs.map +1 -1
  40. package/dist/esm/utils/AI/translateJSON/index.mjs +8 -1
  41. package/dist/esm/utils/AI/translateJSON/index.mjs.map +1 -1
  42. package/dist/types/controllers/ai.controller.d.ts.map +1 -1
  43. package/dist/types/controllers/projectAccessKey.controller.d.ts.map +1 -1
  44. package/dist/types/emails/InviteUserEmail.d.ts +4 -4
  45. package/dist/types/emails/InviteUserEmail.d.ts.map +1 -1
  46. package/dist/types/emails/MagicLinkEmail.d.ts +4 -4
  47. package/dist/types/emails/OAuthTokenCreatedEmail.d.ts +4 -4
  48. package/dist/types/emails/PasswordChangeConfirmation.d.ts +4 -4
  49. package/dist/types/emails/PasswordChangeConfirmation.d.ts.map +1 -1
  50. package/dist/types/emails/ResetUserPassword.d.ts +4 -4
  51. package/dist/types/emails/ResetUserPassword.d.ts.map +1 -1
  52. package/dist/types/emails/SubscriptionPaymentCancellation.d.ts +4 -4
  53. package/dist/types/emails/SubscriptionPaymentError.d.ts +4 -4
  54. package/dist/types/emails/SubscriptionPaymentSuccess.d.ts +4 -4
  55. package/dist/types/emails/SubscriptionPaymentSuccess.d.ts.map +1 -1
  56. package/dist/types/emails/ValidateUserEmail.d.ts +4 -4
  57. package/dist/types/emails/Welcome.d.ts +4 -4
  58. package/dist/types/models/dictionary.model.d.ts +4 -4
  59. package/dist/types/models/dictionary.model.d.ts.map +1 -1
  60. package/dist/types/models/discussion.model.d.ts +2 -2
  61. package/dist/types/models/discussion.model.d.ts.map +1 -1
  62. package/dist/types/models/oAuth2.model.d.ts +3 -3
  63. package/dist/types/models/oAuth2.model.d.ts.map +1 -1
  64. package/dist/types/routes/search.routes.d.ts.map +1 -1
  65. package/dist/types/routes/stripe.routes.d.ts.map +1 -1
  66. package/dist/types/schemas/dictionary.schema.d.ts +6 -6
  67. package/dist/types/schemas/dictionary.schema.d.ts.map +1 -1
  68. package/dist/types/schemas/discussion.schema.d.ts +6 -6
  69. package/dist/types/schemas/oAuth2.schema.d.ts +5 -5
  70. package/dist/types/schemas/oAuth2.schema.d.ts.map +1 -1
  71. package/dist/types/schemas/organization.schema.d.ts +6 -6
  72. package/dist/types/schemas/plans.schema.d.ts +6 -6
  73. package/dist/types/schemas/project.schema.d.ts +6 -6
  74. package/dist/types/schemas/session.schema.d.ts +6 -6
  75. package/dist/types/schemas/tag.schema.d.ts +6 -6
  76. package/dist/types/schemas/tag.schema.d.ts.map +1 -1
  77. package/dist/types/schemas/user.schema.d.ts +6 -6
  78. package/dist/types/services/email.service.d.ts +11 -11
  79. package/dist/types/utils/AI/aiSdk.d.ts.map +1 -1
  80. package/dist/types/utils/AI/askDocQuestion/askDocQuestion.d.ts +2 -3
  81. package/dist/types/utils/AI/askDocQuestion/askDocQuestion.d.ts.map +1 -1
  82. package/dist/types/utils/AI/askDocQuestion/indexMarkdownFiles.d.ts +1 -4
  83. package/dist/types/utils/AI/askDocQuestion/indexMarkdownFiles.d.ts.map +1 -1
  84. package/dist/types/utils/AI/translateJSON/index.d.ts.map +1 -1
  85. package/dist/types/utils/filtersAndPagination/getDictionaryFiltersAndPagination.d.ts +2 -2
  86. package/dist/types/utils/filtersAndPagination/getDiscussionFiltersAndPagination.d.ts +2 -2
  87. package/dist/types/utils/filtersAndPagination/getOrganizationFiltersAndPagination.d.ts +2 -2
  88. package/dist/types/utils/filtersAndPagination/getProjectFiltersAndPagination.d.ts +2 -2
  89. package/dist/types/utils/filtersAndPagination/getTagFiltersAndPagination.d.ts +2 -2
  90. package/package.json +1 -1
@@ -3,29 +3,24 @@ You are an expert in internationalization, copy writing and content management.
3
3
  **Instructions:**
4
4
 
5
5
  1. **File Location:**
6
-
7
6
  - The content declaration files are located in the `{{filePath}}` directory relative to the project root.
8
7
 
9
8
  2. **Locales:**
10
-
11
9
  - Default locale: {{defaultLocale}}
12
10
  - Required Locales: {{otherLocales}} (add the missing locales in `t({ ... })` function)
13
11
 
14
12
  3. **Dictionary Format:**
15
-
16
13
  - Example format:
17
14
 
18
15
  {{declarationsContentTemplate}}
19
16
 
20
17
  4. **Audit Requirements:**
21
-
22
18
  - **Consistency:** Ensure that all keys have translations for all specified locales.
23
19
  - **Missing Content:** Identify any missing translations and specify the expected content.
24
20
  - **Misplaced Content:** Detect if any translations are placed under incorrect keys.
25
21
  - **Type Compliance:** Verify that the content types match the declarations (e.g., strings, string arrays).
26
22
 
27
23
  5. **Modification Guidelines:**
28
-
29
24
  - **Do Not Alter Structure:** If the file structure is correct, do not modify it. Only add, update, or remove content declarations as necessary.
30
25
  - **Do Not Change a value type** If a key value is like `exampleKey: "exampleValue"`, avoid a maximum to not change the value type to `exampleKey: t({ en: "exampleValue" })`.
31
26
  - **Return Only Final File Content:** Provide the updated file content without any additional comments or explanations.
@@ -36,9 +31,7 @@ You are an expert in internationalization, copy writing and content management.
36
31
  - **Respect the tags instructions:** If the tags instructions are provided, ensure that the audited file adheres to them.
37
32
 
38
33
  6. **Example Scenario:**
39
-
40
34
  - **Example 1:**
41
-
42
35
  - **Input File:**
43
36
 
44
37
  ```typescript
@@ -99,7 +92,6 @@ You are an expert in internationalization, copy writing and content management.
99
92
  - **Clarification:** In this scenario, since the input file is already valid and complete, the expected output should be identical to the input without any additional fields or comments.
100
93
 
101
94
  - **Example 2:**
102
-
103
95
  - **Input File:**
104
96
 
105
97
  ```typescript
@@ -146,10 +138,6 @@ You are an expert in internationalization, copy writing and content management.
146
138
 
147
139
  {{tagsInstructions}}
148
140
 
149
- **File to Audit:**
150
-
151
- {{fileContent}}
152
-
153
141
  **Expected Response:**
154
142
 
155
143
  After auditing, provide only the final content of the file as plain text without any Markdown or code block formatting. If no changes are needed, return the file content exactly as it is.
@@ -9,11 +9,9 @@ You are an expert in internationalization, copy writing and content management.
9
9
  - **KeyPath** The KeyPath correspond to to the path to retrieve the targeted element from the `content` key of the dictionary file.
10
10
 
11
11
  2. **Locales:**
12
-
13
12
  - Required Locales: {{otherLocales}}
14
13
 
15
14
  3. **Audit Requirements:**
16
-
17
15
  - **Consistency:** Ensure that all keys have translations for all specified locales.
18
16
  - **Incoherence:** Ensure that all content is coherent and not misspelled.
19
17
  - **Missing Content:** Identify any missing translations and specify the expected content.
@@ -21,7 +19,6 @@ You are an expert in internationalization, copy writing and content management.
21
19
  - **Type Compliance:** Verify that the content types match the declarations (e.g., strings, string arrays).
22
20
 
23
21
  4. **Modification Guidelines:**
24
-
25
22
  - **Return Only the Targeted Content:** Provide the updated targeted content as plain text without any markdown, additional comments or explanations.
26
23
  - **Consider the locale context:** If the targeted field correspond to a specific language, contains similar languages, as `zh` or `en-GB`, consider return the content in this specified language.
27
24
  - **Fix Incoherent Content:** If the content is inconsistent, misspelled, understandable, or contains errors, fix it by providing a more accurate content.
@@ -29,9 +26,7 @@ You are an expert in internationalization, copy writing and content management.
29
26
  - **Respect the tags instructions:** If the tags instructions are provided, ensure that the audited file adheres to them.
30
27
 
31
28
  5. **Example Scenario:**
32
-
33
29
  - **Example 1:**
34
-
35
30
  - **Input File:**
36
31
 
37
32
  ```typescript
@@ -58,7 +53,6 @@ You are an expert in internationalization, copy writing and content management.
58
53
  Desarrolladores, Gestores de Contenido
59
54
 
60
55
  - **Example 2:**
61
-
62
56
  - **Input File:**
63
57
 
64
58
  ```typescript
@@ -95,15 +89,12 @@ You are an expert in internationalization, copy writing and content management.
95
89
  - **Input target:**
96
90
 
97
91
  `[{type: "object", key: "audienceType"},{type: "enumeration", key: ">2"},{type: "translation", key: "es"}]`
98
-
99
92
  - **Expected Output:**
100
93
 
101
94
  Desarrolladores, Gestores de Contenido
102
-
103
95
  - **Input target:**
104
96
 
105
97
  `[{type: "object", key: "audienceType"},{type: "enumeration", key: "1"},{type: "translation", key: "fr"}]`
106
-
107
98
  - **Expected Output:**
108
99
 
109
100
  Développeurs, Responsables de contenu
@@ -116,10 +107,6 @@ You are an expert in internationalization, copy writing and content management.
116
107
 
117
108
  {{tagsInstructions}}
118
109
 
119
- **File to Audit:**
120
-
121
- {{fileContent}}
122
-
123
110
  **KeyPath:**
124
111
 
125
112
  {{keyPath}}
@@ -71,9 +71,3 @@ export default metadataContent;
71
71
  Here the list of existing tags as a context to help you to pick related ones.
72
72
 
73
73
  {{tags}}
74
-
75
- **Content declaration to describe:**
76
-
77
- This is the content declaration that you should consider to describe:
78
-
79
- {{contentDeclaration}}
@@ -12,12 +12,10 @@ Your role is to review a tag. A tag is attached to a content declaration and is
12
12
  ````
13
13
 
14
14
  1. **Audit Requirements:**
15
-
16
15
  - **Misplaced Content:** Detect each `title`, `description` and `instructions` are defined correct. If not, provide the expected content.
17
16
  - **Ensure Conherence with dictionary:** Ensure that the key instruction make sense with the content declaration to which the given tag is attached. If the instructions doesn't looks appropriate, suggest a new one.
18
17
 
19
18
  2. **Modification Guidelines:**
20
-
21
19
  - **Do Not Alter Structure:** If the file structure is correct, do not modify it. Only add, update, or remove content declarations as necessary.
22
20
  - **Return Only Final File Content:** Provide the updated file content without any additional comments or explanations.
23
21
 
@@ -25,14 +23,6 @@ Your role is to review a tag. A tag is attached to a content declaration and is
25
23
 
26
24
  {{applicationContext}}
27
25
 
28
- **Tags to Audit:**
29
-
30
- {{tag}}
31
-
32
- **Dictionary that attach the tag into:**
33
-
34
- {{contentDeclarations}}
35
-
36
26
  **Expected Response:**
37
27
 
38
28
  After auditing, provide only the final content of the file as plain text without any Markdown or code block formatting. If no changes are needed, return the file content exactly as it is.
@@ -3,14 +3,12 @@ You are an expert in internationalization, copy writing and content management.
3
3
  **Instructions:**
4
4
 
5
5
  2. **Audit Requirements:**
6
-
7
6
  - **Consistency:** The dictionary format should be the same as the one provided in entry. You should not rename or translate the entry keys.
8
7
  - **Missing Content:** Identify any missing translations and specify the expected content.
9
8
  - **Misplaced Content:** Detect if any translations are placed under incorrect keys.
10
9
  - **Type Compliance:** Verify that the content types match the declarations (e.g., strings, string arrays).
11
10
 
12
11
  3. **Modification Guidelines:**
13
-
14
12
  - **Do Not Alter Structure:** If the file structure is correct, do not modify it. Only add, update, or remove content declarations as necessary.
15
13
  - **Missing Content:** If one key is missing from the Preset Output Content, or if the Preset Output Content is empty, the output content should be completed by translating the Entry Content to Translate into the output locale.
16
14
  - **Return Only Final File Content:** Provide the updated file content without any additional comments or explanations.
@@ -36,12 +34,6 @@ You are an expert in internationalization, copy writing and content management.
36
34
 
37
35
  {{dictionaryDescription}}
38
36
 
39
- **Entry Content to Translate:**
40
-
41
- - Given Language: {{entryLocale}}
42
-
43
- {{entryFileContent}}
44
-
45
37
  **Preset Output Content:**
46
38
 
47
39
  - Target Language: {{outputLocale}}
@@ -244,7 +244,6 @@ const askDocQuestion = async (req, res, _next) => {
244
244
  try {
245
245
  aiConfig = await require_utils_AI_aiSdk.getAIConfig(res, {
246
246
  userOptions: {},
247
- defaultOptions: require_utils_AI_askDocQuestion_askDocQuestion.aiDefaultOptions,
248
247
  accessType: ["public"]
249
248
  });
250
249
  } catch (_error) {
@@ -1 +1 @@
1
- {"version":3,"file":"ai.controller.cjs","names":["aiConfig: AIConfig","getAIConfig","formatResponse","tags: Tag[]","getTagsByKeys","dictionaries: Dictionary[]","getDictionariesByTags","DiscussionModel","getDiscussionFiltersAndPagination","numberOfMessagesById: Record<string, number>","formatPaginatedResponse"],"sources":["../../../src/controllers/ai.controller.ts"],"sourcesContent":["import type { KeyPath, Locale } from '@intlayer/types';\nimport type { ResponseWithSession } from '@middlewares/sessionAuth.middleware';\nimport { getDictionariesByTags } from '@services/dictionary.service';\nimport * as tagService from '@services/tag.service';\nimport { getTagsByKeys } from '@services/tag.service';\nimport {\n type AIConfig,\n type AIOptions,\n type ChatCompletionRequestMessage,\n getAIConfig,\n} from '@utils/AI/aiSdk';\nimport * as askDocQuestionUtil from '@utils/AI/askDocQuestion/askDocQuestion';\nimport * as auditContentDeclarationUtil from '@utils/AI/auditDictionary';\nimport * as auditContentDeclarationFieldUtil from '@utils/AI/auditDictionaryField';\nimport * as auditContentDeclarationMetadataUtil from '@utils/AI/auditDictionaryMetadata';\nimport * as auditTagUtil from '@utils/AI/auditTag';\nimport * as autocompleteUtil from '@utils/AI/autocomplete';\nimport * as customQueryUtil from '@utils/AI/customQuery';\nimport * as translateJSONUtil from '@utils/AI/translateJSON';\nimport { type AppError, ErrorHandler } from '@utils/errors';\nimport {\n type DiscussionFiltersParams,\n getDiscussionFiltersAndPagination,\n} from '@utils/filtersAndPagination/getDiscussionFiltersAndPagination';\nimport {\n formatPaginatedResponse,\n formatResponse,\n type PaginatedResponse,\n type ResponseData,\n} from '@utils/responseData';\nimport type { NextFunction, Request } from 'express';\nimport { DiscussionModel } from '@/models/discussion.model';\nimport type { Dictionary } from '@/types/dictionary.types';\nimport type { DiscussionAPI } from '@/types/discussion.types';\nimport type { Tag, TagAPI } from '@/types/tag.types';\n\ntype ReplaceAIConfigByOptions<T> = Omit<T, 'aiConfig'> & {\n aiOptions?: AIOptions;\n};\n\nexport type CustomQueryBody =\n ReplaceAIConfigByOptions<customQueryUtil.CustomQueryOptions> & {\n tagsKeys?: string[];\n applicationContext?: string;\n };\nexport type CustomQueryResult =\n ResponseData<customQueryUtil.CustomQueryResultData>;\n\nexport const customQuery = async (\n req: Request<CustomQueryBody>,\n res: ResponseWithSession<CustomQueryResult>,\n _next: NextFunction\n): Promise<void> => {\n // biome-ignore lint/correctness/noUnusedVariables: Just filter out tagsKeys\n const { aiOptions, tagsKeys, ...rest } = req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: aiOptions,\n defaultOptions: customQueryUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n const auditResponse = await customQueryUtil.customQuery({\n ...rest,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'QUERY_FAILED');\n return;\n }\n\n const responseData = formatResponse<customQueryUtil.CustomQueryResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type TranslateJSONBody = Omit<\n ReplaceAIConfigByOptions<translateJSONUtil.TranslateJSONOptions>,\n 'tags'\n> & {\n tagsKeys?: string[];\n};\nexport type TranslateJSONResult =\n ResponseData<translateJSONUtil.TranslateJSONResultData>;\n\nexport const translateJSON = async (\n req: Request<TranslateJSONBody>,\n res: ResponseWithSession<TranslateJSONResult>,\n _next: NextFunction\n): Promise<void> => {\n const { project } = res.locals;\n const { aiOptions, tagsKeys, ...rest } = req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: aiOptions,\n defaultOptions: translateJSONUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId && tagsKeys) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse = await translateJSONUtil.translateJSON({\n ...rest,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<translateJSONUtil.TranslateJSONResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationBody = {\n aiOptions?: AIOptions;\n locales: Locale[];\n defaultLocale: Locale;\n fileContent: string;\n filePath?: string;\n tagsKeys?: string[];\n};\nexport type AuditContentDeclarationResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclaration = async (\n req: Request<AuditContentDeclarationBody>,\n res: ResponseWithSession<AuditContentDeclarationResult>,\n _next: NextFunction\n): Promise<void> => {\n const { project } = res.locals;\n const { fileContent, filePath, aiOptions, locales, defaultLocale, tagsKeys } =\n req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: aiOptions,\n defaultOptions: auditContentDeclarationUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse = await auditContentDeclarationUtil.auditDictionary({\n fileContent,\n filePath,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n locales,\n defaultLocale,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationFieldBody = {\n aiOptions?: AIOptions;\n locales: Locale[];\n fileContent: string;\n filePath?: string;\n tagsKeys?: string[];\n keyPath: KeyPath[];\n};\nexport type AuditContentDeclarationFieldResult =\n ResponseData<auditContentDeclarationFieldUtil.AuditDictionaryFieldResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclarationField = async (\n req: Request<AuditContentDeclarationFieldBody>,\n res: ResponseWithSession<AuditContentDeclarationFieldResult>,\n _next: NextFunction\n): Promise<void> => {\n const { project } = res.locals;\n const { fileContent, aiOptions, locales, tagsKeys, keyPath } = req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: aiOptions,\n defaultOptions: auditContentDeclarationFieldUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse =\n await auditContentDeclarationFieldUtil.auditDictionaryField({\n fileContent,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n locales,\n tags,\n keyPath,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationFieldUtil.AuditDictionaryFieldResultData>(\n {\n data: auditResponse,\n }\n );\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationMetadataBody = {\n aiOptions?: AIOptions;\n fileContent: string;\n};\n\nexport type AuditContentDeclarationMetadataResult =\n ResponseData<auditContentDeclarationMetadataUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclarationMetadata = async (\n req: Request<AuditContentDeclarationMetadataBody>,\n res: ResponseWithSession<AuditContentDeclarationMetadataResult>,\n _next: NextFunction\n): Promise<void> => {\n const { organization } = res.locals;\n const { fileContent, aiOptions } = req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: aiOptions,\n defaultOptions: auditContentDeclarationMetadataUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n const tags: Tag[] = await tagService.findTags(\n {\n organizationId: organization?.id,\n },\n 0,\n 1000\n );\n\n const auditResponse =\n await auditContentDeclarationMetadataUtil.auditDictionaryMetadata({\n fileContent,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationMetadataUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditTagBody = {\n aiOptions?: AIOptions;\n tag: TagAPI;\n};\nexport type AuditTagResult = ResponseData<auditTagUtil.TranslateJSONResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditTag = async (\n req: Request<undefined, undefined, AuditTagBody>,\n res: ResponseWithSession<AuditTagResult>,\n _next: NextFunction\n): Promise<void> => {\n const { project } = res.locals;\n const { aiOptions, tag } = req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: aiOptions,\n defaultOptions: auditTagUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n let dictionaries: Dictionary[] = [];\n if (project?.organizationId) {\n dictionaries = await getDictionariesByTags([tag.key], project.id);\n }\n\n const auditResponse = await auditTagUtil.auditTag({\n aiConfig,\n dictionaries,\n tag,\n applicationContext: aiOptions?.applicationContext,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData = formatResponse<auditTagUtil.TranslateJSONResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AskDocQuestionBody = {\n messages: ChatCompletionRequestMessage[];\n discussionId: string;\n};\nexport type AskDocQuestionResult =\n ResponseData<askDocQuestionUtil.AskDocQuestionResult>;\n\nexport const askDocQuestion = async (\n req: Request<undefined, undefined, AskDocQuestionBody>,\n res: ResponseWithSession<AskDocQuestionResult>,\n _next: NextFunction\n): Promise<void> => {\n const { messages = [], discussionId } = req.body;\n const { user, project, organization } = res.locals;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: {},\n defaultOptions: askDocQuestionUtil.aiDefaultOptions,\n accessType: ['public'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n // 1. Prepare SSE headers and flush them NOW\n res.setHeader('Content-Type', 'text/event-stream; charset=utf-8');\n res.setHeader('Cache-Control', 'no-cache, no-transform');\n res.setHeader('Connection', 'keep-alive');\n res.setHeader('X-Accel-Buffering', 'no'); // disable nginx buffering\n res.flushHeaders?.();\n res.write(': connected\\n\\n'); // initial comment keeps some browsers happy\n res.flush?.();\n\n // 2. Kick off the upstream stream WITHOUT awaiting it\n askDocQuestionUtil\n .askDocQuestion(messages, aiConfig, {\n onMessage: (chunk) => {\n res.write(`data: ${JSON.stringify({ chunk })}\\n\\n`);\n res.flush?.();\n },\n })\n .then(async (fullResponse) => {\n const lastUserMessageContent = messages.findLast(\n (message) => message.role === 'user'\n )?.content;\n const lastUserMessageNbWords = lastUserMessageContent\n ? lastUserMessageContent.split(' ').length\n : 0;\n if (lastUserMessageNbWords > 2) {\n // If the last user message is less than 3 words, don't persist the discussion\n // Example: \"Hello\", \"Hi\", \"Hey\", \"test\", etc.\n\n // 3. Persist discussion while the client already has all chunks\n await DiscussionModel.findOneAndUpdate(\n { discussionId },\n {\n $set: {\n discussionId,\n userId: user?.id,\n projectId: project?.id,\n organizationId: organization?.id,\n messages: [\n ...messages.map((msg) => ({\n role: msg.role,\n content: msg.content,\n timestamp: msg.timestamp,\n })),\n {\n role: 'assistant',\n content: fullResponse.response,\n relatedFiles: fullResponse.relatedFiles,\n timestamp: new Date(),\n },\n ],\n },\n },\n { upsert: true, new: true }\n );\n }\n\n // 4. Tell the client we're done and close the stream\n res.write(\n `data: ${JSON.stringify({ done: true, response: fullResponse })}\\n\\n`\n );\n res.end();\n })\n .catch((err) => {\n // propagate error as an SSE event so the client knows why it closed\n res.write(\n `event: error\\ndata: ${JSON.stringify({ message: err.message })}\\n\\n`\n );\n res.end();\n });\n};\n\nexport type AutocompleteBody = {\n text: string;\n aiOptions?: AIOptions;\n contextBefore?: string;\n currentLine?: string;\n contextAfter?: string;\n};\n\nexport type AutocompleteResponse = ResponseData<{\n autocompletion: string;\n}>;\n\nexport const autocomplete = async (\n req: Request<AutocompleteBody>,\n res: ResponseWithSession<AutocompleteResponse>,\n _next: NextFunction\n): Promise<void> => {\n try {\n const { text, aiOptions, contextBefore, currentLine, contextAfter } =\n req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: aiOptions,\n defaultOptions: autocompleteUtil.aiDefaultOptions,\n accessType: ['public'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n const response = (await autocompleteUtil.autocomplete({\n text,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n contextBefore,\n currentLine,\n contextAfter,\n })) ?? {\n autocompletion: '',\n tokenUsed: 0,\n };\n\n const responseData =\n formatResponse<autocompleteUtil.AutocompleteFileResultData>({\n data: response,\n });\n\n res.json(responseData);\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type GetDiscussionsParams =\n | ({\n page?: string | number;\n pageSize?: string | number;\n includeMessages?: 'true' | 'false';\n } & DiscussionFiltersParams)\n | undefined;\n\nexport type GetDiscussionsResult = PaginatedResponse<DiscussionAPI>;\n\n/**\n * Retrieves a list of discussions with filters and pagination.\n * Only the owner or admins can access. By default, users only see their own.\n */\nexport const getDiscussions = async (\n req: Request<GetDiscussionsParams>,\n res: ResponseWithSession<GetDiscussionsResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, roles } = res.locals;\n const { filters, sortOptions, pageSize, skip, page, getNumberOfPages } =\n getDiscussionFiltersAndPagination(req, res);\n const includeMessagesParam = (req.query as any)?.includeMessages as\n | 'true'\n | 'false'\n | undefined;\n const includeMessages = includeMessagesParam !== 'false';\n\n if (!user) {\n ErrorHandler.handleGenericErrorResponse(res, 'USER_NOT_DEFINED');\n return;\n }\n\n try {\n const projection = includeMessages ? {} : { messages: 0 };\n const discussions = await DiscussionModel.find(filters, projection)\n .sort(sortOptions)\n .skip(skip)\n .limit(pageSize)\n .lean();\n\n // Compute number of messages for each discussion\n const numberOfMessagesById: Record<string, number> = {};\n if (!includeMessages && discussions.length > 0) {\n const ids = discussions.map((d: any) => d._id);\n const counts = await DiscussionModel.aggregate([\n { $match: { _id: { $in: ids } } },\n {\n $project: {\n numberOfMessages: { $size: { $ifNull: ['$messages', []] } },\n },\n },\n ]);\n for (const c of counts as any[]) {\n numberOfMessagesById[String(c._id)] = c.numberOfMessages ?? 0;\n }\n }\n\n // Permission: allow admin, or the owner for all returned entries\n const allOwnedByUser = discussions.every(\n (d) => String(d.userId) === String(user.id)\n );\n const isAllowed = roles.includes('admin') || allOwnedByUser;\n\n if (!isAllowed) {\n ErrorHandler.handleGenericErrorResponse(res, 'PERMISSION_DENIED');\n return;\n }\n\n const totalItems = await DiscussionModel.countDocuments(filters);\n\n const responseData = formatPaginatedResponse({\n data: discussions.map((d: any) => ({\n ...d,\n id: String(d._id ?? d.id),\n numberOfMessages: includeMessages\n ? Array.isArray(d.messages)\n ? d.messages.length\n : 0\n : (numberOfMessagesById[String(d._id ?? d.id)] ?? 0),\n })),\n page,\n pageSize,\n totalPages: getNumberOfPages(totalItems),\n totalItems,\n });\n\n res.json(responseData as any);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;AAgDA,MAAa,cAAc,OACzB,KACA,KACA,UACkB;CAElB,MAAM,EAAE,WAAW,UAAU,GAAG,SAAS,IAAI;CAE7C,IAAIA;AACJ,KAAI;AACF,aAAW,MAAMC,mCAAY,KAAK;GAChC,aAAa;GACb;GACA,YAAY,CAAC,mBAAmB,SAAS;GAC1C,CAAC;UACK,QAAQ;AACf,iDAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,MAAM,gBAAgB,qDAAkC;GACtD,GAAG;GACH;GACA,oBAAoB,WAAW;GAChC,CAAC;AAEF,MAAI,CAAC,eAAe;AAClB,kDAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eAAeC,0CAAsD,EACzE,MAAM,eACP,CAAC;AAEF,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,iDAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;AAaJ,MAAa,gBAAgB,OAC3B,KACA,KACA,UACkB;CAClB,MAAM,EAAE,YAAY,IAAI;CACxB,MAAM,EAAE,WAAW,UAAU,GAAG,SAAS,IAAI;CAE7C,IAAIF;AACJ,KAAI;AACF,aAAW,MAAMC,mCAAY,KAAK;GAChC,aAAa;GACb;GACA,YAAY,CAAC,mBAAmB,SAAS;GAC1C,CAAC;UACK,QAAQ;AACf,iDAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,IAAIE,OAAc,EAAE;AAEpB,MAAI,SAAS,kBAAkB,SAC7B,QAAO,MAAMC,2CAAc,UAAU,QAAQ,eAAe;EAG9D,MAAM,gBAAgB,yDAAsC;GAC1D,GAAG;GACH;GACA,oBAAoB,WAAW;GAC/B;GACD,CAAC;AAEF,MAAI,CAAC,eAAe;AAClB,kDAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eACJF,0CAA0D,EACxD,MAAM,eACP,CAAC;AAEJ,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,iDAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;;;;AAkBJ,MAAa,0BAA0B,OACrC,KACA,KACA,UACkB;CAClB,MAAM,EAAE,YAAY,IAAI;CACxB,MAAM,EAAE,aAAa,UAAU,WAAW,SAAS,eAAe,aAChE,IAAI;CAEN,IAAIF;AACJ,KAAI;AACF,aAAW,MAAMC,mCAAY,KAAK;GAChC,aAAa;GACb;GACA,YAAY,CAAC,mBAAmB,SAAS;GAC1C,CAAC;UACK,QAAQ;AACf,iDAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,IAAIE,OAAc,EAAE;AAEpB,MAAI,SAAS,eACX,QAAO,MAAMC,2CAAc,UAAU,QAAQ,eAAe;EAG9D,MAAM,gBAAgB,6DAAkD;GACtE;GACA;GACA;GACA,oBAAoB,WAAW;GAC/B;GACA;GACA;GACD,CAAC;AAEF,MAAI,CAAC,eAAe;AAClB,kDAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eACJF,0CAAgE,EAC9D,MAAM,eACP,CAAC;AAEJ,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,iDAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;;;;AAkBJ,MAAa,+BAA+B,OAC1C,KACA,KACA,UACkB;CAClB,MAAM,EAAE,YAAY,IAAI;CACxB,MAAM,EAAE,aAAa,WAAW,SAAS,UAAU,YAAY,IAAI;CAEnE,IAAIF;AACJ,KAAI;AACF,aAAW,MAAMC,mCAAY,KAAK;GAChC,aAAa;GACb;GACA,YAAY,CAAC,mBAAmB,SAAS;GAC1C,CAAC;UACK,QAAQ;AACf,iDAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,IAAIE,OAAc,EAAE;AAEpB,MAAI,SAAS,eACX,QAAO,MAAMC,2CAAc,UAAU,QAAQ,eAAe;EAG9D,MAAM,gBACJ,uEAA4D;GAC1D;GACA;GACA,oBAAoB,WAAW;GAC/B;GACA;GACA;GACD,CAAC;AAEJ,MAAI,CAAC,eAAe;AAClB,kDAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eACJF,0CACE,EACE,MAAM,eACP,CACF;AAEH,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,iDAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;;;;AAeJ,MAAa,kCAAkC,OAC7C,KACA,KACA,UACkB;CAClB,MAAM,EAAE,iBAAiB,IAAI;CAC7B,MAAM,EAAE,aAAa,cAAc,IAAI;CAEvC,IAAIF;AACJ,KAAI;AACF,aAAW,MAAMC,mCAAY,KAAK;GAChC,aAAa;GACb;GACA,YAAY,CAAC,mBAAmB,SAAS;GAC1C,CAAC;UACK,QAAQ;AACf,iDAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,MAAME,OAAc,4CAClB,EACE,gBAAgB,cAAc,IAC/B,EACD,GACA,IACD;EAED,MAAM,gBACJ,6EAAkE;GAChE;GACA;GACA,oBAAoB,WAAW;GAC/B;GACD,CAAC;AAEJ,MAAI,CAAC,eAAe;AAClB,kDAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eACJD,0CAAwE,EACtE,MAAM,eACP,CAAC;AAEJ,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,iDAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;;;;AAaJ,MAAa,WAAW,OACtB,KACA,KACA,UACkB;CAClB,MAAM,EAAE,YAAY,IAAI;CACxB,MAAM,EAAE,WAAW,QAAQ,IAAI;CAE/B,IAAIF;AACJ,KAAI;AACF,aAAW,MAAMC,mCAAY,KAAK;GAChC,aAAa;GACb;GACA,YAAY,CAAC,mBAAmB,SAAS;GAC1C,CAAC;UACK,QAAQ;AACf,iDAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,IAAII,eAA6B,EAAE;AACnC,MAAI,SAAS,eACX,gBAAe,MAAMC,0DAAsB,CAAC,IAAI,IAAI,EAAE,QAAQ,GAAG;EAGnE,MAAM,gBAAgB,+CAA4B;GAChD;GACA;GACA;GACA,oBAAoB,WAAW;GAChC,CAAC;AAEF,MAAI,CAAC,eAAe;AAClB,kDAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eAAeJ,0CAAqD,EACxE,MAAM,eACP,CAAC;AAEF,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,iDAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;AAWJ,MAAa,iBAAiB,OAC5B,KACA,KACA,UACkB;CAClB,MAAM,EAAE,WAAW,EAAE,EAAE,iBAAiB,IAAI;CAC5C,MAAM,EAAE,MAAM,SAAS,iBAAiB,IAAI;CAE5C,IAAIF;AACJ,KAAI;AACF,aAAW,MAAMC,mCAAY,KAAK;GAChC,aAAa,EAAE;GACf;GACA,YAAY,CAAC,SAAS;GACvB,CAAC;UACK,QAAQ;AACf,iDAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAIF,KAAI,UAAU,gBAAgB,mCAAmC;AACjE,KAAI,UAAU,iBAAiB,yBAAyB;AACxD,KAAI,UAAU,cAAc,aAAa;AACzC,KAAI,UAAU,qBAAqB,KAAK;AACxC,KAAI,gBAAgB;AACpB,KAAI,MAAM,kBAAkB;AAC5B,KAAI,SAAS;AAGb,+DACkB,UAAU,UAAU,EAClC,YAAY,UAAU;AACpB,MAAI,MAAM,SAAS,KAAK,UAAU,EAAE,OAAO,CAAC,CAAC,MAAM;AACnD,MAAI,SAAS;IAEhB,CAAC,CACD,KAAK,OAAO,iBAAiB;EAC5B,MAAM,yBAAyB,SAAS,UACrC,YAAY,QAAQ,SAAS,OAC/B,EAAE;AAIH,OAH+B,yBAC3B,uBAAuB,MAAM,IAAI,CAAC,SAClC,KACyB,EAK3B,OAAMM,gDAAgB,iBACpB,EAAE,cAAc,EAChB,EACE,MAAM;GACJ;GACA,QAAQ,MAAM;GACd,WAAW,SAAS;GACpB,gBAAgB,cAAc;GAC9B,UAAU,CACR,GAAG,SAAS,KAAK,SAAS;IACxB,MAAM,IAAI;IACV,SAAS,IAAI;IACb,WAAW,IAAI;IAChB,EAAE,EACH;IACE,MAAM;IACN,SAAS,aAAa;IACtB,cAAc,aAAa;IAC3B,2BAAW,IAAI,MAAM;IACtB,CACF;GACF,EACF,EACD;GAAE,QAAQ;GAAM,KAAK;GAAM,CAC5B;AAIH,MAAI,MACF,SAAS,KAAK,UAAU;GAAE,MAAM;GAAM,UAAU;GAAc,CAAC,CAAC,MACjE;AACD,MAAI,KAAK;GACT,CACD,OAAO,QAAQ;AAEd,MAAI,MACF,uBAAuB,KAAK,UAAU,EAAE,SAAS,IAAI,SAAS,CAAC,CAAC,MACjE;AACD,MAAI,KAAK;GACT;;AAeN,MAAa,eAAe,OAC1B,KACA,KACA,UACkB;AAClB,KAAI;EACF,MAAM,EAAE,MAAM,WAAW,eAAe,aAAa,iBACnD,IAAI;EAEN,IAAIP;AACJ,MAAI;AACF,cAAW,MAAMC,mCAAY,KAAK;IAChC,aAAa;IACb;IACA,YAAY,CAAC,SAAS;IACvB,CAAC;WACK,QAAQ;AACf,kDAAa,2BAA2B,KAAK,mBAAmB;AAChE;;EAeF,MAAM,eACJC,0CAA4D,EAC1D,MAdc,uDAAoC;GACpD;GACA;GACA,oBAAoB,WAAW;GAC/B;GACA;GACA;GACD,CAAC,IAAK;GACL,gBAAgB;GAChB,WAAW;GACZ,EAKE,CAAC;AAEJ,MAAI,KAAK,aAAa;UACf,OAAO;AACd,iDAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;;;;;AAkBJ,MAAa,iBAAiB,OAC5B,KACA,KACA,UACkB;CAClB,MAAM,EAAE,MAAM,UAAU,IAAI;CAC5B,MAAM,EAAE,SAAS,aAAa,UAAU,MAAM,MAAM,qBAClDM,uGAAkC,KAAK,IAAI;CAK7C,MAAM,kBAJwB,IAAI,OAAe,oBAIA;AAEjD,KAAI,CAAC,MAAM;AACT,iDAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,MAAM,aAAa,kBAAkB,EAAE,GAAG,EAAE,UAAU,GAAG;EACzD,MAAM,cAAc,MAAMD,gDAAgB,KAAK,SAAS,WAAW,CAChE,KAAK,YAAY,CACjB,KAAK,KAAK,CACV,MAAM,SAAS,CACf,MAAM;EAGT,MAAME,uBAA+C,EAAE;AACvD,MAAI,CAAC,mBAAmB,YAAY,SAAS,GAAG;GAC9C,MAAM,MAAM,YAAY,KAAK,MAAW,EAAE,IAAI;GAC9C,MAAM,SAAS,MAAMF,gDAAgB,UAAU,CAC7C,EAAE,QAAQ,EAAE,KAAK,EAAE,KAAK,KAAK,EAAE,EAAE,EACjC,EACE,UAAU,EACR,kBAAkB,EAAE,OAAO,EAAE,SAAS,CAAC,aAAa,EAAE,CAAC,EAAE,EAAE,EAC5D,EACF,CACF,CAAC;AACF,QAAK,MAAM,KAAK,OACd,sBAAqB,OAAO,EAAE,IAAI,IAAI,EAAE,oBAAoB;;EAKhE,MAAM,iBAAiB,YAAY,OAChC,MAAM,OAAO,EAAE,OAAO,KAAK,OAAO,KAAK,GAAG,CAC5C;AAGD,MAAI,EAFc,MAAM,SAAS,QAAQ,IAAI,iBAE7B;AACd,kDAAa,2BAA2B,KAAK,oBAAoB;AACjE;;EAGF,MAAM,aAAa,MAAMA,gDAAgB,eAAe,QAAQ;EAEhE,MAAM,eAAeG,mDAAwB;GAC3C,MAAM,YAAY,KAAK,OAAY;IACjC,GAAG;IACH,IAAI,OAAO,EAAE,OAAO,EAAE,GAAG;IACzB,kBAAkB,kBACd,MAAM,QAAQ,EAAE,SAAS,GACvB,EAAE,SAAS,SACX,IACD,qBAAqB,OAAO,EAAE,OAAO,EAAE,GAAG,KAAK;IACrD,EAAE;GACH;GACA;GACA,YAAY,iBAAiB,WAAW;GACxC;GACD,CAAC;AAEF,MAAI,KAAK,aAAoB;AAC7B;UACO,OAAO;AACd,iDAAa,uBAAuB,KAAK,MAAkB;AAC3D"}
1
+ {"version":3,"file":"ai.controller.cjs","names":["aiConfig: AIConfig","getAIConfig","formatResponse","tags: Tag[]","getTagsByKeys","dictionaries: Dictionary[]","getDictionariesByTags","DiscussionModel","getDiscussionFiltersAndPagination","numberOfMessagesById: Record<string, number>","formatPaginatedResponse"],"sources":["../../../src/controllers/ai.controller.ts"],"sourcesContent":["import type { KeyPath, Locale } from '@intlayer/types';\nimport type { ResponseWithSession } from '@middlewares/sessionAuth.middleware';\nimport { getDictionariesByTags } from '@services/dictionary.service';\nimport * as tagService from '@services/tag.service';\nimport { getTagsByKeys } from '@services/tag.service';\nimport {\n type AIConfig,\n type AIOptions,\n type ChatCompletionRequestMessage,\n getAIConfig,\n} from '@utils/AI/aiSdk';\nimport * as askDocQuestionUtil from '@utils/AI/askDocQuestion/askDocQuestion';\nimport * as auditContentDeclarationUtil from '@utils/AI/auditDictionary';\nimport * as auditContentDeclarationFieldUtil from '@utils/AI/auditDictionaryField';\nimport * as auditContentDeclarationMetadataUtil from '@utils/AI/auditDictionaryMetadata';\nimport * as auditTagUtil from '@utils/AI/auditTag';\nimport * as autocompleteUtil from '@utils/AI/autocomplete';\nimport * as customQueryUtil from '@utils/AI/customQuery';\nimport * as translateJSONUtil from '@utils/AI/translateJSON';\nimport { type AppError, ErrorHandler } from '@utils/errors';\nimport {\n type DiscussionFiltersParams,\n getDiscussionFiltersAndPagination,\n} from '@utils/filtersAndPagination/getDiscussionFiltersAndPagination';\nimport {\n formatPaginatedResponse,\n formatResponse,\n type PaginatedResponse,\n type ResponseData,\n} from '@utils/responseData';\nimport type { NextFunction, Request } from 'express';\nimport { DiscussionModel } from '@/models/discussion.model';\nimport type { Dictionary } from '@/types/dictionary.types';\nimport type { DiscussionAPI } from '@/types/discussion.types';\nimport type { Tag, TagAPI } from '@/types/tag.types';\n\ntype ReplaceAIConfigByOptions<T> = Omit<T, 'aiConfig'> & {\n aiOptions?: AIOptions;\n};\n\nexport type CustomQueryBody =\n ReplaceAIConfigByOptions<customQueryUtil.CustomQueryOptions> & {\n tagsKeys?: string[];\n applicationContext?: string;\n };\nexport type CustomQueryResult =\n ResponseData<customQueryUtil.CustomQueryResultData>;\n\nexport const customQuery = async (\n req: Request<CustomQueryBody>,\n res: ResponseWithSession<CustomQueryResult>,\n _next: NextFunction\n): Promise<void> => {\n // biome-ignore lint/correctness/noUnusedVariables: Just filter out tagsKeys\n const { aiOptions, tagsKeys, ...rest } = req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: aiOptions,\n defaultOptions: customQueryUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n const auditResponse = await customQueryUtil.customQuery({\n ...rest,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'QUERY_FAILED');\n return;\n }\n\n const responseData = formatResponse<customQueryUtil.CustomQueryResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type TranslateJSONBody = Omit<\n ReplaceAIConfigByOptions<translateJSONUtil.TranslateJSONOptions>,\n 'tags'\n> & {\n tagsKeys?: string[];\n};\nexport type TranslateJSONResult =\n ResponseData<translateJSONUtil.TranslateJSONResultData>;\n\nexport const translateJSON = async (\n req: Request<TranslateJSONBody>,\n res: ResponseWithSession<TranslateJSONResult>,\n _next: NextFunction\n): Promise<void> => {\n const { project } = res.locals;\n const { aiOptions, tagsKeys, ...rest } = req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: aiOptions,\n defaultOptions: translateJSONUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId && tagsKeys) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse = await translateJSONUtil.translateJSON({\n ...rest,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<translateJSONUtil.TranslateJSONResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationBody = {\n aiOptions?: AIOptions;\n locales: Locale[];\n defaultLocale: Locale;\n fileContent: string;\n filePath?: string;\n tagsKeys?: string[];\n};\nexport type AuditContentDeclarationResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclaration = async (\n req: Request<AuditContentDeclarationBody>,\n res: ResponseWithSession<AuditContentDeclarationResult>,\n _next: NextFunction\n): Promise<void> => {\n const { project } = res.locals;\n const { fileContent, filePath, aiOptions, locales, defaultLocale, tagsKeys } =\n req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: aiOptions,\n defaultOptions: auditContentDeclarationUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse = await auditContentDeclarationUtil.auditDictionary({\n fileContent,\n filePath,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n locales,\n defaultLocale,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationFieldBody = {\n aiOptions?: AIOptions;\n locales: Locale[];\n fileContent: string;\n filePath?: string;\n tagsKeys?: string[];\n keyPath: KeyPath[];\n};\nexport type AuditContentDeclarationFieldResult =\n ResponseData<auditContentDeclarationFieldUtil.AuditDictionaryFieldResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclarationField = async (\n req: Request<AuditContentDeclarationFieldBody>,\n res: ResponseWithSession<AuditContentDeclarationFieldResult>,\n _next: NextFunction\n): Promise<void> => {\n const { project } = res.locals;\n const { fileContent, aiOptions, locales, tagsKeys, keyPath } = req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: aiOptions,\n defaultOptions: auditContentDeclarationFieldUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse =\n await auditContentDeclarationFieldUtil.auditDictionaryField({\n fileContent,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n locales,\n tags,\n keyPath,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationFieldUtil.AuditDictionaryFieldResultData>(\n {\n data: auditResponse,\n }\n );\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationMetadataBody = {\n aiOptions?: AIOptions;\n fileContent: string;\n};\n\nexport type AuditContentDeclarationMetadataResult =\n ResponseData<auditContentDeclarationMetadataUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclarationMetadata = async (\n req: Request<AuditContentDeclarationMetadataBody>,\n res: ResponseWithSession<AuditContentDeclarationMetadataResult>,\n _next: NextFunction\n): Promise<void> => {\n const { organization } = res.locals;\n const { fileContent, aiOptions } = req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: aiOptions,\n defaultOptions: auditContentDeclarationMetadataUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n const tags: Tag[] = await tagService.findTags(\n {\n organizationId: organization?.id,\n },\n 0,\n 1000\n );\n\n const auditResponse =\n await auditContentDeclarationMetadataUtil.auditDictionaryMetadata({\n fileContent,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationMetadataUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditTagBody = {\n aiOptions?: AIOptions;\n tag: TagAPI;\n};\nexport type AuditTagResult = ResponseData<auditTagUtil.TranslateJSONResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditTag = async (\n req: Request<undefined, undefined, AuditTagBody>,\n res: ResponseWithSession<AuditTagResult>,\n _next: NextFunction\n): Promise<void> => {\n const { project } = res.locals;\n const { aiOptions, tag } = req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: aiOptions,\n defaultOptions: auditTagUtil.aiDefaultOptions,\n accessType: ['registered_user', 'apiKey'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n try {\n let dictionaries: Dictionary[] = [];\n if (project?.organizationId) {\n dictionaries = await getDictionariesByTags([tag.key], project.id);\n }\n\n const auditResponse = await auditTagUtil.auditTag({\n aiConfig,\n dictionaries,\n tag,\n applicationContext: aiOptions?.applicationContext,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData = formatResponse<auditTagUtil.TranslateJSONResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AskDocQuestionBody = {\n messages: ChatCompletionRequestMessage[];\n discussionId: string;\n};\nexport type AskDocQuestionResult =\n ResponseData<askDocQuestionUtil.AskDocQuestionResult>;\n\nexport const askDocQuestion = async (\n req: Request<undefined, undefined, AskDocQuestionBody>,\n res: ResponseWithSession<AskDocQuestionResult>,\n _next: NextFunction\n): Promise<void> => {\n const { messages = [], discussionId } = req.body;\n const { user, project, organization } = res.locals;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: {},\n accessType: ['public'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n // 1. Prepare SSE headers and flush them NOW\n res.setHeader('Content-Type', 'text/event-stream; charset=utf-8');\n res.setHeader('Cache-Control', 'no-cache, no-transform');\n res.setHeader('Connection', 'keep-alive');\n res.setHeader('X-Accel-Buffering', 'no'); // disable nginx buffering\n res.flushHeaders?.();\n res.write(': connected\\n\\n'); // initial comment keeps some browsers happy\n res.flush?.();\n\n // 2. Kick off the upstream stream WITHOUT awaiting it\n askDocQuestionUtil\n .askDocQuestion(messages, aiConfig, {\n onMessage: (chunk) => {\n res.write(`data: ${JSON.stringify({ chunk })}\\n\\n`);\n res.flush?.();\n },\n })\n .then(async (fullResponse) => {\n const lastUserMessageContent = messages.findLast(\n (message) => message.role === 'user'\n )?.content;\n const lastUserMessageNbWords = lastUserMessageContent\n ? lastUserMessageContent.split(' ').length\n : 0;\n if (lastUserMessageNbWords > 2) {\n // If the last user message is less than 3 words, don't persist the discussion\n // Example: \"Hello\", \"Hi\", \"Hey\", \"test\", etc.\n\n // 3. Persist discussion while the client already has all chunks\n await DiscussionModel.findOneAndUpdate(\n { discussionId },\n {\n $set: {\n discussionId,\n userId: user?.id,\n projectId: project?.id,\n organizationId: organization?.id,\n messages: [\n ...messages.map((msg) => ({\n role: msg.role,\n content: msg.content,\n timestamp: msg.timestamp,\n })),\n {\n role: 'assistant',\n content: fullResponse.response,\n relatedFiles: fullResponse.relatedFiles,\n timestamp: new Date(),\n },\n ],\n },\n },\n { upsert: true, new: true }\n );\n }\n\n // 4. Tell the client we're done and close the stream\n res.write(\n `data: ${JSON.stringify({ done: true, response: fullResponse })}\\n\\n`\n );\n res.end();\n })\n .catch((err) => {\n // propagate error as an SSE event so the client knows why it closed\n res.write(\n `event: error\\ndata: ${JSON.stringify({ message: err.message })}\\n\\n`\n );\n res.end();\n });\n};\n\nexport type AutocompleteBody = {\n text: string;\n aiOptions?: AIOptions;\n contextBefore?: string;\n currentLine?: string;\n contextAfter?: string;\n};\n\nexport type AutocompleteResponse = ResponseData<{\n autocompletion: string;\n}>;\n\nexport const autocomplete = async (\n req: Request<AutocompleteBody>,\n res: ResponseWithSession<AutocompleteResponse>,\n _next: NextFunction\n): Promise<void> => {\n try {\n const { text, aiOptions, contextBefore, currentLine, contextAfter } =\n req.body;\n\n let aiConfig: AIConfig;\n try {\n aiConfig = await getAIConfig(res, {\n userOptions: aiOptions,\n defaultOptions: autocompleteUtil.aiDefaultOptions,\n accessType: ['public'],\n });\n } catch (_error) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n\n const response = (await autocompleteUtil.autocomplete({\n text,\n aiConfig,\n applicationContext: aiOptions?.applicationContext,\n contextBefore,\n currentLine,\n contextAfter,\n })) ?? {\n autocompletion: '',\n tokenUsed: 0,\n };\n\n const responseData =\n formatResponse<autocompleteUtil.AutocompleteFileResultData>({\n data: response,\n });\n\n res.json(responseData);\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type GetDiscussionsParams =\n | ({\n page?: string | number;\n pageSize?: string | number;\n includeMessages?: 'true' | 'false';\n } & DiscussionFiltersParams)\n | undefined;\n\nexport type GetDiscussionsResult = PaginatedResponse<DiscussionAPI>;\n\n/**\n * Retrieves a list of discussions with filters and pagination.\n * Only the owner or admins can access. By default, users only see their own.\n */\nexport const getDiscussions = async (\n req: Request<GetDiscussionsParams>,\n res: ResponseWithSession<GetDiscussionsResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, roles } = res.locals;\n const { filters, sortOptions, pageSize, skip, page, getNumberOfPages } =\n getDiscussionFiltersAndPagination(req, res);\n const includeMessagesParam = (req.query as any)?.includeMessages as\n | 'true'\n | 'false'\n | undefined;\n const includeMessages = includeMessagesParam !== 'false';\n\n if (!user) {\n ErrorHandler.handleGenericErrorResponse(res, 'USER_NOT_DEFINED');\n return;\n }\n\n try {\n const projection = includeMessages ? {} : { messages: 0 };\n const discussions = await DiscussionModel.find(filters, projection)\n .sort(sortOptions)\n .skip(skip)\n .limit(pageSize)\n .lean();\n\n // Compute number of messages for each discussion\n const numberOfMessagesById: Record<string, number> = {};\n if (!includeMessages && discussions.length > 0) {\n const ids = discussions.map((d: any) => d._id);\n const counts = await DiscussionModel.aggregate([\n { $match: { _id: { $in: ids } } },\n {\n $project: {\n numberOfMessages: { $size: { $ifNull: ['$messages', []] } },\n },\n },\n ]);\n for (const c of counts as any[]) {\n numberOfMessagesById[String(c._id)] = c.numberOfMessages ?? 0;\n }\n }\n\n // Permission: allow admin, or the owner for all returned entries\n const allOwnedByUser = discussions.every(\n (d) => String(d.userId) === String(user.id)\n );\n const isAllowed = roles.includes('admin') || allOwnedByUser;\n\n if (!isAllowed) {\n ErrorHandler.handleGenericErrorResponse(res, 'PERMISSION_DENIED');\n return;\n }\n\n const totalItems = await DiscussionModel.countDocuments(filters);\n\n const responseData = formatPaginatedResponse({\n data: discussions.map((d: any) => ({\n ...d,\n id: String(d._id ?? d.id),\n numberOfMessages: includeMessages\n ? Array.isArray(d.messages)\n ? d.messages.length\n : 0\n : (numberOfMessagesById[String(d._id ?? d.id)] ?? 0),\n })),\n page,\n pageSize,\n totalPages: getNumberOfPages(totalItems),\n totalItems,\n });\n\n res.json(responseData as any);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;AAgDA,MAAa,cAAc,OACzB,KACA,KACA,UACkB;CAElB,MAAM,EAAE,WAAW,UAAU,GAAG,SAAS,IAAI;CAE7C,IAAIA;AACJ,KAAI;AACF,aAAW,MAAMC,mCAAY,KAAK;GAChC,aAAa;GACb;GACA,YAAY,CAAC,mBAAmB,SAAS;GAC1C,CAAC;UACK,QAAQ;AACf,iDAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,MAAM,gBAAgB,qDAAkC;GACtD,GAAG;GACH;GACA,oBAAoB,WAAW;GAChC,CAAC;AAEF,MAAI,CAAC,eAAe;AAClB,kDAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eAAeC,0CAAsD,EACzE,MAAM,eACP,CAAC;AAEF,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,iDAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;AAaJ,MAAa,gBAAgB,OAC3B,KACA,KACA,UACkB;CAClB,MAAM,EAAE,YAAY,IAAI;CACxB,MAAM,EAAE,WAAW,UAAU,GAAG,SAAS,IAAI;CAE7C,IAAIF;AACJ,KAAI;AACF,aAAW,MAAMC,mCAAY,KAAK;GAChC,aAAa;GACb;GACA,YAAY,CAAC,mBAAmB,SAAS;GAC1C,CAAC;UACK,QAAQ;AACf,iDAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,IAAIE,OAAc,EAAE;AAEpB,MAAI,SAAS,kBAAkB,SAC7B,QAAO,MAAMC,2CAAc,UAAU,QAAQ,eAAe;EAG9D,MAAM,gBAAgB,yDAAsC;GAC1D,GAAG;GACH;GACA,oBAAoB,WAAW;GAC/B;GACD,CAAC;AAEF,MAAI,CAAC,eAAe;AAClB,kDAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eACJF,0CAA0D,EACxD,MAAM,eACP,CAAC;AAEJ,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,iDAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;;;;AAkBJ,MAAa,0BAA0B,OACrC,KACA,KACA,UACkB;CAClB,MAAM,EAAE,YAAY,IAAI;CACxB,MAAM,EAAE,aAAa,UAAU,WAAW,SAAS,eAAe,aAChE,IAAI;CAEN,IAAIF;AACJ,KAAI;AACF,aAAW,MAAMC,mCAAY,KAAK;GAChC,aAAa;GACb;GACA,YAAY,CAAC,mBAAmB,SAAS;GAC1C,CAAC;UACK,QAAQ;AACf,iDAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,IAAIE,OAAc,EAAE;AAEpB,MAAI,SAAS,eACX,QAAO,MAAMC,2CAAc,UAAU,QAAQ,eAAe;EAG9D,MAAM,gBAAgB,6DAAkD;GACtE;GACA;GACA;GACA,oBAAoB,WAAW;GAC/B;GACA;GACA;GACD,CAAC;AAEF,MAAI,CAAC,eAAe;AAClB,kDAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eACJF,0CAAgE,EAC9D,MAAM,eACP,CAAC;AAEJ,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,iDAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;;;;AAkBJ,MAAa,+BAA+B,OAC1C,KACA,KACA,UACkB;CAClB,MAAM,EAAE,YAAY,IAAI;CACxB,MAAM,EAAE,aAAa,WAAW,SAAS,UAAU,YAAY,IAAI;CAEnE,IAAIF;AACJ,KAAI;AACF,aAAW,MAAMC,mCAAY,KAAK;GAChC,aAAa;GACb;GACA,YAAY,CAAC,mBAAmB,SAAS;GAC1C,CAAC;UACK,QAAQ;AACf,iDAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,IAAIE,OAAc,EAAE;AAEpB,MAAI,SAAS,eACX,QAAO,MAAMC,2CAAc,UAAU,QAAQ,eAAe;EAG9D,MAAM,gBACJ,uEAA4D;GAC1D;GACA;GACA,oBAAoB,WAAW;GAC/B;GACA;GACA;GACD,CAAC;AAEJ,MAAI,CAAC,eAAe;AAClB,kDAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eACJF,0CACE,EACE,MAAM,eACP,CACF;AAEH,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,iDAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;;;;AAeJ,MAAa,kCAAkC,OAC7C,KACA,KACA,UACkB;CAClB,MAAM,EAAE,iBAAiB,IAAI;CAC7B,MAAM,EAAE,aAAa,cAAc,IAAI;CAEvC,IAAIF;AACJ,KAAI;AACF,aAAW,MAAMC,mCAAY,KAAK;GAChC,aAAa;GACb;GACA,YAAY,CAAC,mBAAmB,SAAS;GAC1C,CAAC;UACK,QAAQ;AACf,iDAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,MAAME,OAAc,4CAClB,EACE,gBAAgB,cAAc,IAC/B,EACD,GACA,IACD;EAED,MAAM,gBACJ,6EAAkE;GAChE;GACA;GACA,oBAAoB,WAAW;GAC/B;GACD,CAAC;AAEJ,MAAI,CAAC,eAAe;AAClB,kDAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eACJD,0CAAwE,EACtE,MAAM,eACP,CAAC;AAEJ,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,iDAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;;;;AAaJ,MAAa,WAAW,OACtB,KACA,KACA,UACkB;CAClB,MAAM,EAAE,YAAY,IAAI;CACxB,MAAM,EAAE,WAAW,QAAQ,IAAI;CAE/B,IAAIF;AACJ,KAAI;AACF,aAAW,MAAMC,mCAAY,KAAK;GAChC,aAAa;GACb;GACA,YAAY,CAAC,mBAAmB,SAAS;GAC1C,CAAC;UACK,QAAQ;AACf,iDAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,IAAII,eAA6B,EAAE;AACnC,MAAI,SAAS,eACX,gBAAe,MAAMC,0DAAsB,CAAC,IAAI,IAAI,EAAE,QAAQ,GAAG;EAGnE,MAAM,gBAAgB,+CAA4B;GAChD;GACA;GACA;GACA,oBAAoB,WAAW;GAChC,CAAC;AAEF,MAAI,CAAC,eAAe;AAClB,kDAAa,2BAA2B,KAAK,eAAe;AAC5D;;EAGF,MAAM,eAAeJ,0CAAqD,EACxE,MAAM,eACP,CAAC;AAEF,MAAI,KAAK,aAAa;AACtB;UACO,OAAO;AACd,iDAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;AAWJ,MAAa,iBAAiB,OAC5B,KACA,KACA,UACkB;CAClB,MAAM,EAAE,WAAW,EAAE,EAAE,iBAAiB,IAAI;CAC5C,MAAM,EAAE,MAAM,SAAS,iBAAiB,IAAI;CAE5C,IAAIF;AACJ,KAAI;AACF,aAAW,MAAMC,mCAAY,KAAK;GAChC,aAAa,EAAE;GACf,YAAY,CAAC,SAAS;GACvB,CAAC;UACK,QAAQ;AACf,iDAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAIF,KAAI,UAAU,gBAAgB,mCAAmC;AACjE,KAAI,UAAU,iBAAiB,yBAAyB;AACxD,KAAI,UAAU,cAAc,aAAa;AACzC,KAAI,UAAU,qBAAqB,KAAK;AACxC,KAAI,gBAAgB;AACpB,KAAI,MAAM,kBAAkB;AAC5B,KAAI,SAAS;AAGb,+DACkB,UAAU,UAAU,EAClC,YAAY,UAAU;AACpB,MAAI,MAAM,SAAS,KAAK,UAAU,EAAE,OAAO,CAAC,CAAC,MAAM;AACnD,MAAI,SAAS;IAEhB,CAAC,CACD,KAAK,OAAO,iBAAiB;EAC5B,MAAM,yBAAyB,SAAS,UACrC,YAAY,QAAQ,SAAS,OAC/B,EAAE;AAIH,OAH+B,yBAC3B,uBAAuB,MAAM,IAAI,CAAC,SAClC,KACyB,EAK3B,OAAMM,gDAAgB,iBACpB,EAAE,cAAc,EAChB,EACE,MAAM;GACJ;GACA,QAAQ,MAAM;GACd,WAAW,SAAS;GACpB,gBAAgB,cAAc;GAC9B,UAAU,CACR,GAAG,SAAS,KAAK,SAAS;IACxB,MAAM,IAAI;IACV,SAAS,IAAI;IACb,WAAW,IAAI;IAChB,EAAE,EACH;IACE,MAAM;IACN,SAAS,aAAa;IACtB,cAAc,aAAa;IAC3B,2BAAW,IAAI,MAAM;IACtB,CACF;GACF,EACF,EACD;GAAE,QAAQ;GAAM,KAAK;GAAM,CAC5B;AAIH,MAAI,MACF,SAAS,KAAK,UAAU;GAAE,MAAM;GAAM,UAAU;GAAc,CAAC,CAAC,MACjE;AACD,MAAI,KAAK;GACT,CACD,OAAO,QAAQ;AAEd,MAAI,MACF,uBAAuB,KAAK,UAAU,EAAE,SAAS,IAAI,SAAS,CAAC,CAAC,MACjE;AACD,MAAI,KAAK;GACT;;AAeN,MAAa,eAAe,OAC1B,KACA,KACA,UACkB;AAClB,KAAI;EACF,MAAM,EAAE,MAAM,WAAW,eAAe,aAAa,iBACnD,IAAI;EAEN,IAAIP;AACJ,MAAI;AACF,cAAW,MAAMC,mCAAY,KAAK;IAChC,aAAa;IACb;IACA,YAAY,CAAC,SAAS;IACvB,CAAC;WACK,QAAQ;AACf,kDAAa,2BAA2B,KAAK,mBAAmB;AAChE;;EAeF,MAAM,eACJC,0CAA4D,EAC1D,MAdc,uDAAoC;GACpD;GACA;GACA,oBAAoB,WAAW;GAC/B;GACA;GACA;GACD,CAAC,IAAK;GACL,gBAAgB;GAChB,WAAW;GACZ,EAKE,CAAC;AAEJ,MAAI,KAAK,aAAa;UACf,OAAO;AACd,iDAAa,uBAAuB,KAAK,MAAkB;AAC3D;;;;;;;AAkBJ,MAAa,iBAAiB,OAC5B,KACA,KACA,UACkB;CAClB,MAAM,EAAE,MAAM,UAAU,IAAI;CAC5B,MAAM,EAAE,SAAS,aAAa,UAAU,MAAM,MAAM,qBAClDM,uGAAkC,KAAK,IAAI;CAK7C,MAAM,kBAJwB,IAAI,OAAe,oBAIA;AAEjD,KAAI,CAAC,MAAM;AACT,iDAAa,2BAA2B,KAAK,mBAAmB;AAChE;;AAGF,KAAI;EACF,MAAM,aAAa,kBAAkB,EAAE,GAAG,EAAE,UAAU,GAAG;EACzD,MAAM,cAAc,MAAMD,gDAAgB,KAAK,SAAS,WAAW,CAChE,KAAK,YAAY,CACjB,KAAK,KAAK,CACV,MAAM,SAAS,CACf,MAAM;EAGT,MAAME,uBAA+C,EAAE;AACvD,MAAI,CAAC,mBAAmB,YAAY,SAAS,GAAG;GAC9C,MAAM,MAAM,YAAY,KAAK,MAAW,EAAE,IAAI;GAC9C,MAAM,SAAS,MAAMF,gDAAgB,UAAU,CAC7C,EAAE,QAAQ,EAAE,KAAK,EAAE,KAAK,KAAK,EAAE,EAAE,EACjC,EACE,UAAU,EACR,kBAAkB,EAAE,OAAO,EAAE,SAAS,CAAC,aAAa,EAAE,CAAC,EAAE,EAAE,EAC5D,EACF,CACF,CAAC;AACF,QAAK,MAAM,KAAK,OACd,sBAAqB,OAAO,EAAE,IAAI,IAAI,EAAE,oBAAoB;;EAKhE,MAAM,iBAAiB,YAAY,OAChC,MAAM,OAAO,EAAE,OAAO,KAAK,OAAO,KAAK,GAAG,CAC5C;AAGD,MAAI,EAFc,MAAM,SAAS,QAAQ,IAAI,iBAE7B;AACd,kDAAa,2BAA2B,KAAK,oBAAoB;AACjE;;EAGF,MAAM,aAAa,MAAMA,gDAAgB,eAAe,QAAQ;EAEhE,MAAM,eAAeG,mDAAwB;GAC3C,MAAM,YAAY,KAAK,OAAY;IACjC,GAAG;IACH,IAAI,OAAO,EAAE,OAAO,EAAE,GAAG;IACzB,kBAAkB,kBACd,MAAM,QAAQ,EAAE,SAAS,GACvB,EAAE,SAAS,SACX,IACD,qBAAqB,OAAO,EAAE,OAAO,EAAE,GAAG,KAAK;IACrD,EAAE;GACH;GACA;GACA,YAAY,iBAAiB,WAAW;GACxC;GACD,CAAC;AAEF,MAAI,KAAK,aAAoB;AAC7B;UACO,OAAO;AACd,iDAAa,uBAAuB,KAAK,MAAkB;AAC3D"}
@@ -24,28 +24,19 @@ const getAPIKey = (res, accessType, aiOptions) => {
24
24
  if (accessType.includes("registered_user") && res.locals.user) return aiOptions?.apiKey ?? defaultApiKey;
25
25
  if (accessType.includes("premium_user") && res.locals.user) return aiOptions?.apiKey ?? defaultApiKey;
26
26
  };
27
- const getModel = (provider, userApiKey, userModel, defaultModel) => {
28
- const fallBackModel = defaultModel ?? "chatgpt-4o-latest";
29
- switch (provider) {
30
- case AIProvider.OPENAI:
31
- defaultModel = "gpt-5-mini";
32
- break;
33
- case AIProvider.ANTHROPIC:
34
- defaultModel = "claude-sonnet-4-5-20250929";
35
- break;
36
- case AIProvider.MISTRAL:
37
- defaultModel = "mistral-large-latest";
38
- break;
39
- case AIProvider.DEEPSEEK:
40
- defaultModel = "deepseek-coder";
41
- break;
42
- case AIProvider.GEMINI:
43
- defaultModel = "gemini-2.5-flash";
44
- break;
27
+ const getModel = (provider, userApiKey, userModel, defaultModel = "gpt-5-mini") => {
28
+ if (userApiKey) {
29
+ if (provider && provider === AIProvider.OPENAI) return userModel;
30
+ switch (provider) {
31
+ case AIProvider.ANTHROPIC: return "claude-sonnet-4-5-20250929";
32
+ case AIProvider.MISTRAL: return "mistral-large-latest";
33
+ case AIProvider.DEEPSEEK: return "deepseek-coder";
34
+ case AIProvider.GEMINI: return "gemini-2.5-flash";
35
+ default: return defaultModel;
36
+ }
45
37
  }
46
- if (Boolean(userApiKey) && Boolean(userModel)) return userModel;
47
- if (userModel) throw new Error("The user should use his own API key to use a custom model");
48
- return fallBackModel;
38
+ if (userModel || provider) throw new Error("The user should use his own API key to use a custom model");
39
+ return defaultModel;
49
40
  };
50
41
  const DEFAULT_PROVIDER = AIProvider.OPENAI;
51
42
  const DEFAULT_TEMPERATURE = 1;
@@ -1 +1 @@
1
- {"version":3,"file":"aiSdk.cjs","names":["fallBackModel: Model","DEFAULT_PROVIDER: AIProvider","DEFAULT_TEMPERATURE: number","languageModel: AIConfig['model']"],"sources":["../../../../src/utils/AI/aiSdk.ts"],"sourcesContent":["import { type anthropic, createAnthropic } from '@ai-sdk/anthropic';\nimport { createDeepSeek, type deepseek } from '@ai-sdk/deepseek';\nimport { createGoogleGenerativeAI, type google } from '@ai-sdk/google';\nimport { createMistral, type mistral } from '@ai-sdk/mistral';\nimport { createOpenAI, type openai } from '@ai-sdk/openai';\nimport type {\n AssistantModelMessage,\n generateText,\n SystemModelMessage,\n ToolModelMessage,\n UserModelMessage,\n} from 'ai';\nimport type { Response } from 'express';\n\ntype AnthropicModel = Parameters<typeof anthropic>[0];\ntype DeepSeekModel = Parameters<typeof deepseek>[0];\ntype MistralModel = Parameters<typeof mistral>[0];\ntype OpenAIModel = Parameters<typeof openai>[0];\ntype GoogleModel = Parameters<typeof google>[0];\n\nexport type Messages = (\n | SystemModelMessage\n | UserModelMessage\n | AssistantModelMessage\n | ToolModelMessage\n)[];\n\n/**\n * Supported AI models\n */\nexport type Model =\n | AnthropicModel\n | DeepSeekModel\n | MistralModel\n | OpenAIModel\n | GoogleModel\n | (string & {});\n\n/**\n * Supported AI SDK providers\n */\nexport enum AIProvider {\n OPENAI = 'openai',\n ANTHROPIC = 'anthropic',\n MISTRAL = 'mistral',\n DEEPSEEK = 'deepseek',\n GEMINI = 'gemini',\n}\n\n/**\n * Common options for all AI providers\n */\nexport type AIOptions = {\n provider?: AIProvider;\n model?: Model;\n temperature?: number;\n apiKey?: string;\n applicationContext?: string;\n};\n\n// Define the structure of messages used in chat completions\nexport type ChatCompletionRequestMessage = {\n role: 'system' | 'user' | 'assistant'; // The role of the message sender\n content: string; // The text content of the message\n timestamp?: Date; // The timestamp of the message\n};\n\ntype AccessType = 'apiKey' | 'registered_user' | 'premium_user' | 'public';\n\nconst getAPIKey = (\n res: Response,\n accessType: AccessType[],\n aiOptions?: AIOptions\n) => {\n const defaultApiKey = process.env.OPENAI_API_KEY;\n\n if (accessType.includes('public')) {\n return aiOptions?.apiKey ?? defaultApiKey;\n }\n\n if (accessType.includes('apiKey') && aiOptions?.apiKey) {\n return aiOptions?.apiKey;\n }\n\n if (accessType.includes('registered_user') && res.locals.user) {\n return aiOptions?.apiKey ?? defaultApiKey;\n }\n\n // TODO: Implement premium user access\n if (accessType.includes('premium_user') && res.locals.user) {\n return aiOptions?.apiKey ?? defaultApiKey;\n }\n\n return undefined;\n};\n\nconst getModel = (\n provider: AIProvider,\n userApiKey: string,\n userModel?: Model,\n defaultModel?: Model\n): Model => {\n // Set default models based on provider\n const fallBackModel: Model = defaultModel ?? 'chatgpt-4o-latest';\n\n switch (provider) {\n case AIProvider.OPENAI:\n defaultModel = 'gpt-5-mini';\n break;\n case AIProvider.ANTHROPIC:\n defaultModel = 'claude-sonnet-4-5-20250929';\n break;\n case AIProvider.MISTRAL:\n defaultModel = 'mistral-large-latest';\n break;\n case AIProvider.DEEPSEEK:\n defaultModel = 'deepseek-coder';\n break;\n case AIProvider.GEMINI:\n defaultModel = 'gemini-2.5-flash';\n break;\n }\n\n // If the user use his own API, let him use the model he wants\n if (Boolean(userApiKey) && Boolean(userModel)) {\n return userModel!;\n }\n\n if (userModel) {\n throw new Error(\n 'The user should use his own API key to use a custom model'\n );\n }\n\n return fallBackModel;\n};\n\nexport type AIConfig = Omit<Parameters<typeof generateText>[0], 'prompt'>;\n\nconst DEFAULT_PROVIDER: AIProvider = AIProvider.OPENAI as AIProvider;\nconst DEFAULT_TEMPERATURE: number = 1; // ChatGPT 5 accept only temperature 1\n\nexport type AIConfigOptions = {\n userOptions?: AIOptions;\n defaultOptions?: AIOptions;\n accessType?: AccessType[];\n};\n\n/**\n * Get AI model configuration based on the selected provider and options\n * This function handles the configuration for different AI providers\n *\n * @param options Configuration options including provider, API keys, models and temperature\n * @returns Configured AI model ready to use with generateText\n */\nexport const getAIConfig = async (\n res: Response,\n options: AIConfigOptions\n): Promise<AIConfig> => {\n const {\n userOptions,\n defaultOptions,\n accessType = ['registered_user'],\n } = options;\n\n const aiOptions = {\n provider: DEFAULT_PROVIDER,\n temperature: DEFAULT_TEMPERATURE,\n ...defaultOptions,\n ...userOptions,\n } satisfies AIOptions;\n\n const apiKey = getAPIKey(res, accessType, aiOptions);\n\n // Check if API key is provided\n if (!apiKey) {\n throw new Error(`API key for ${aiOptions.provider} is missing`);\n }\n\n const selectedModel = getModel(\n aiOptions.provider,\n apiKey,\n aiOptions.model,\n defaultOptions?.model\n );\n\n const protectedOptions = {\n ...aiOptions,\n apiKey,\n model: selectedModel,\n } satisfies AIOptions;\n\n let languageModel: AIConfig['model'];\n\n switch (protectedOptions.provider) {\n case AIProvider.OPENAI: {\n languageModel = createOpenAI({\n apiKey,\n })(selectedModel);\n break;\n }\n\n case AIProvider.ANTHROPIC: {\n languageModel = createAnthropic({\n apiKey,\n })(selectedModel);\n break;\n }\n\n case AIProvider.MISTRAL: {\n languageModel = createMistral({\n apiKey,\n })(selectedModel);\n break;\n }\n\n case AIProvider.DEEPSEEK: {\n languageModel = createDeepSeek({\n apiKey,\n })(selectedModel);\n break;\n }\n\n case AIProvider.GEMINI: {\n languageModel = createGoogleGenerativeAI({\n apiKey,\n })(selectedModel);\n break;\n }\n\n default: {\n throw new Error(`Provider ${protectedOptions.provider} not supported`);\n }\n }\n\n return {\n model: languageModel,\n temperature: protectedOptions.temperature,\n };\n};\n"],"mappings":";;;;;;;;;;;AAyCA,IAAY,oDAAL;AACL;AACA;AACA;AACA;AACA;;;AAuBF,MAAM,aACJ,KACA,YACA,cACG;CACH,MAAM,gBAAgB,QAAQ,IAAI;AAElC,KAAI,WAAW,SAAS,SAAS,CAC/B,QAAO,WAAW,UAAU;AAG9B,KAAI,WAAW,SAAS,SAAS,IAAI,WAAW,OAC9C,QAAO,WAAW;AAGpB,KAAI,WAAW,SAAS,kBAAkB,IAAI,IAAI,OAAO,KACvD,QAAO,WAAW,UAAU;AAI9B,KAAI,WAAW,SAAS,eAAe,IAAI,IAAI,OAAO,KACpD,QAAO,WAAW,UAAU;;AAMhC,MAAM,YACJ,UACA,YACA,WACA,iBACU;CAEV,MAAMA,gBAAuB,gBAAgB;AAE7C,SAAQ,UAAR;EACE,KAAK,WAAW;AACd,kBAAe;AACf;EACF,KAAK,WAAW;AACd,kBAAe;AACf;EACF,KAAK,WAAW;AACd,kBAAe;AACf;EACF,KAAK,WAAW;AACd,kBAAe;AACf;EACF,KAAK,WAAW;AACd,kBAAe;AACf;;AAIJ,KAAI,QAAQ,WAAW,IAAI,QAAQ,UAAU,CAC3C,QAAO;AAGT,KAAI,UACF,OAAM,IAAI,MACR,4DACD;AAGH,QAAO;;AAKT,MAAMC,mBAA+B,WAAW;AAChD,MAAMC,sBAA8B;;;;;;;;AAepC,MAAa,cAAc,OACzB,KACA,YACsB;CACtB,MAAM,EACJ,aACA,gBACA,aAAa,CAAC,kBAAkB,KAC9B;CAEJ,MAAM,YAAY;EAChB,UAAU;EACV,aAAa;EACb,GAAG;EACH,GAAG;EACJ;CAED,MAAM,SAAS,UAAU,KAAK,YAAY,UAAU;AAGpD,KAAI,CAAC,OACH,OAAM,IAAI,MAAM,eAAe,UAAU,SAAS,aAAa;CAGjE,MAAM,gBAAgB,SACpB,UAAU,UACV,QACA,UAAU,OACV,gBAAgB,MACjB;CAED,MAAM,mBAAmB;EACvB,GAAG;EACH;EACA,OAAO;EACR;CAED,IAAIC;AAEJ,SAAQ,iBAAiB,UAAzB;EACE,KAAK,WAAW;AACd,qDAA6B,EAC3B,QACD,CAAC,CAAC,cAAc;AACjB;EAGF,KAAK,WAAW;AACd,2DAAgC,EAC9B,QACD,CAAC,CAAC,cAAc;AACjB;EAGF,KAAK,WAAW;AACd,uDAA8B,EAC5B,QACD,CAAC,CAAC,cAAc;AACjB;EAGF,KAAK,WAAW;AACd,yDAA+B,EAC7B,QACD,CAAC,CAAC,cAAc;AACjB;EAGF,KAAK,WAAW;AACd,iEAAyC,EACvC,QACD,CAAC,CAAC,cAAc;AACjB;EAGF,QACE,OAAM,IAAI,MAAM,YAAY,iBAAiB,SAAS,gBAAgB;;AAI1E,QAAO;EACL,OAAO;EACP,aAAa,iBAAiB;EAC/B"}
1
+ {"version":3,"file":"aiSdk.cjs","names":["DEFAULT_PROVIDER: AIProvider","DEFAULT_TEMPERATURE: number","languageModel: AIConfig['model']"],"sources":["../../../../src/utils/AI/aiSdk.ts"],"sourcesContent":["import { type anthropic, createAnthropic } from '@ai-sdk/anthropic';\nimport { createDeepSeek, type deepseek } from '@ai-sdk/deepseek';\nimport { createGoogleGenerativeAI, type google } from '@ai-sdk/google';\nimport { createMistral, type mistral } from '@ai-sdk/mistral';\nimport { createOpenAI, type openai } from '@ai-sdk/openai';\nimport type {\n AssistantModelMessage,\n generateText,\n SystemModelMessage,\n ToolModelMessage,\n UserModelMessage,\n} from 'ai';\nimport type { Response } from 'express';\n\ntype AnthropicModel = Parameters<typeof anthropic>[0];\ntype DeepSeekModel = Parameters<typeof deepseek>[0];\ntype MistralModel = Parameters<typeof mistral>[0];\ntype OpenAIModel = Parameters<typeof openai>[0];\ntype GoogleModel = Parameters<typeof google>[0];\n\nexport type Messages = (\n | SystemModelMessage\n | UserModelMessage\n | AssistantModelMessage\n | ToolModelMessage\n)[];\n\n/**\n * Supported AI models\n */\nexport type Model =\n | AnthropicModel\n | DeepSeekModel\n | MistralModel\n | OpenAIModel\n | GoogleModel\n | (string & {});\n\n/**\n * Supported AI SDK providers\n */\nexport enum AIProvider {\n OPENAI = 'openai',\n ANTHROPIC = 'anthropic',\n MISTRAL = 'mistral',\n DEEPSEEK = 'deepseek',\n GEMINI = 'gemini',\n}\n\n/**\n * Common options for all AI providers\n */\nexport type AIOptions = {\n provider?: AIProvider;\n model?: Model;\n temperature?: number;\n apiKey?: string;\n applicationContext?: string;\n};\n\n// Define the structure of messages used in chat completions\nexport type ChatCompletionRequestMessage = {\n role: 'system' | 'user' | 'assistant'; // The role of the message sender\n content: string; // The text content of the message\n timestamp?: Date; // The timestamp of the message\n};\n\ntype AccessType = 'apiKey' | 'registered_user' | 'premium_user' | 'public';\n\nconst getAPIKey = (\n res: Response,\n accessType: AccessType[],\n aiOptions?: AIOptions\n) => {\n const defaultApiKey = process.env.OPENAI_API_KEY;\n\n if (accessType.includes('public')) {\n return aiOptions?.apiKey ?? defaultApiKey;\n }\n\n if (accessType.includes('apiKey') && aiOptions?.apiKey) {\n return aiOptions?.apiKey;\n }\n\n if (accessType.includes('registered_user') && res.locals.user) {\n return aiOptions?.apiKey ?? defaultApiKey;\n }\n\n // TODO: Implement premium user access\n if (accessType.includes('premium_user') && res.locals.user) {\n return aiOptions?.apiKey ?? defaultApiKey;\n }\n\n return undefined;\n};\n\nconst getModel = (\n provider: AIProvider,\n userApiKey: string,\n userModel?: Model,\n defaultModel: Model = 'gpt-5-mini'\n): Model => {\n // If the user uses their own API key, allow custom model selection\n if (userApiKey) {\n if (provider && provider === AIProvider.OPENAI) {\n return userModel!;\n }\n\n switch (provider) {\n case AIProvider.ANTHROPIC:\n return 'claude-sonnet-4-5-20250929';\n case AIProvider.MISTRAL:\n return 'mistral-large-latest';\n case AIProvider.DEEPSEEK:\n return 'deepseek-coder';\n case AIProvider.GEMINI:\n return 'gemini-2.5-flash';\n default:\n return defaultModel;\n }\n }\n\n // Guard: Prevent custom model usage without a user API key\n if (userModel || provider) {\n throw new Error(\n 'The user should use his own API key to use a custom model'\n );\n }\n\n return defaultModel;\n};\n\nexport type AIConfig = Omit<Parameters<typeof generateText>[0], 'prompt'>;\n\nconst DEFAULT_PROVIDER: AIProvider = AIProvider.OPENAI as AIProvider;\nconst DEFAULT_TEMPERATURE: number = 1; // ChatGPT 5 accept only temperature 1\n\nexport type AIConfigOptions = {\n userOptions?: AIOptions;\n defaultOptions?: AIOptions;\n accessType?: AccessType[];\n};\n\n/**\n * Get AI model configuration based on the selected provider and options\n * This function handles the configuration for different AI providers\n *\n * @param options Configuration options including provider, API keys, models and temperature\n * @returns Configured AI model ready to use with generateText\n */\nexport const getAIConfig = async (\n res: Response,\n options: AIConfigOptions\n): Promise<AIConfig> => {\n const {\n userOptions,\n defaultOptions,\n accessType = ['registered_user'],\n } = options;\n\n const aiOptions = {\n provider: DEFAULT_PROVIDER,\n temperature: DEFAULT_TEMPERATURE,\n ...defaultOptions,\n ...userOptions,\n } satisfies AIOptions;\n\n const apiKey = getAPIKey(res, accessType, aiOptions);\n\n // Check if API key is provided\n if (!apiKey) {\n throw new Error(`API key for ${aiOptions.provider} is missing`);\n }\n\n const selectedModel = getModel(\n aiOptions.provider,\n apiKey,\n aiOptions.model,\n defaultOptions?.model\n );\n\n const protectedOptions = {\n ...aiOptions,\n apiKey,\n model: selectedModel,\n } satisfies AIOptions;\n\n let languageModel: AIConfig['model'];\n\n switch (protectedOptions.provider) {\n case AIProvider.OPENAI: {\n languageModel = createOpenAI({\n apiKey,\n })(selectedModel);\n break;\n }\n\n case AIProvider.ANTHROPIC: {\n languageModel = createAnthropic({\n apiKey,\n })(selectedModel);\n break;\n }\n\n case AIProvider.MISTRAL: {\n languageModel = createMistral({\n apiKey,\n })(selectedModel);\n break;\n }\n\n case AIProvider.DEEPSEEK: {\n languageModel = createDeepSeek({\n apiKey,\n })(selectedModel);\n break;\n }\n\n case AIProvider.GEMINI: {\n languageModel = createGoogleGenerativeAI({\n apiKey,\n })(selectedModel);\n break;\n }\n\n default: {\n throw new Error(`Provider ${protectedOptions.provider} not supported`);\n }\n }\n\n return {\n model: languageModel,\n temperature: protectedOptions.temperature,\n };\n};\n"],"mappings":";;;;;;;;;;;AAyCA,IAAY,oDAAL;AACL;AACA;AACA;AACA;AACA;;;AAuBF,MAAM,aACJ,KACA,YACA,cACG;CACH,MAAM,gBAAgB,QAAQ,IAAI;AAElC,KAAI,WAAW,SAAS,SAAS,CAC/B,QAAO,WAAW,UAAU;AAG9B,KAAI,WAAW,SAAS,SAAS,IAAI,WAAW,OAC9C,QAAO,WAAW;AAGpB,KAAI,WAAW,SAAS,kBAAkB,IAAI,IAAI,OAAO,KACvD,QAAO,WAAW,UAAU;AAI9B,KAAI,WAAW,SAAS,eAAe,IAAI,IAAI,OAAO,KACpD,QAAO,WAAW,UAAU;;AAMhC,MAAM,YACJ,UACA,YACA,WACA,eAAsB,iBACZ;AAEV,KAAI,YAAY;AACd,MAAI,YAAY,aAAa,WAAW,OACtC,QAAO;AAGT,UAAQ,UAAR;GACE,KAAK,WAAW,UACd,QAAO;GACT,KAAK,WAAW,QACd,QAAO;GACT,KAAK,WAAW,SACd,QAAO;GACT,KAAK,WAAW,OACd,QAAO;GACT,QACE,QAAO;;;AAKb,KAAI,aAAa,SACf,OAAM,IAAI,MACR,4DACD;AAGH,QAAO;;AAKT,MAAMA,mBAA+B,WAAW;AAChD,MAAMC,sBAA8B;;;;;;;;AAepC,MAAa,cAAc,OACzB,KACA,YACsB;CACtB,MAAM,EACJ,aACA,gBACA,aAAa,CAAC,kBAAkB,KAC9B;CAEJ,MAAM,YAAY;EAChB,UAAU;EACV,aAAa;EACb,GAAG;EACH,GAAG;EACJ;CAED,MAAM,SAAS,UAAU,KAAK,YAAY,UAAU;AAGpD,KAAI,CAAC,OACH,OAAM,IAAI,MAAM,eAAe,UAAU,SAAS,aAAa;CAGjE,MAAM,gBAAgB,SACpB,UAAU,UACV,QACA,UAAU,OACV,gBAAgB,MACjB;CAED,MAAM,mBAAmB;EACvB,GAAG;EACH;EACA,OAAO;EACR;CAED,IAAIC;AAEJ,SAAQ,iBAAiB,UAAzB;EACE,KAAK,WAAW;AACd,qDAA6B,EAC3B,QACD,CAAC,CAAC,cAAc;AACjB;EAGF,KAAK,WAAW;AACd,2DAAgC,EAC9B,QACD,CAAC,CAAC,cAAc;AACjB;EAGF,KAAK,WAAW;AACd,uDAA8B,EAC5B,QACD,CAAC,CAAC,cAAc;AACjB;EAGF,KAAK,WAAW;AACd,yDAA+B,EAC7B,QACD,CAAC,CAAC,cAAc;AACjB;EAGF,KAAK,WAAW;AACd,iEAAyC,EACvC,QACD,CAAC,CAAC,cAAc;AACjB;EAGF,QACE,OAAM,IAAI,MAAM,YAAY,iBAAiB,SAAS,gBAAgB;;AAI1E,QAAO;EACL,OAAO;EACP,aAAa,iBAAiB;EAC/B"}
@@ -1,5 +1,4 @@
1
1
  const require_rolldown_runtime = require('../../../_virtual/rolldown_runtime.cjs');
2
- const require_utils_AI_aiSdk = require('../aiSdk.cjs');
3
2
  const require__utils_asset = require('../../../_virtual/_utils_asset.cjs');
4
3
  let __intlayer_core = require("@intlayer/core");
5
4
  let __intlayer_docs = require("@intlayer/docs");
@@ -23,15 +22,8 @@ const readEmbeddingsForFile = (fileKey) => {
23
22
  * - embedding: The numerical embedding vector for the chunk
24
23
  */
25
24
  const vectorStore = [];
26
- const MODEL = "chatgpt-4o-latest";
27
- const MODEL_TEMPERATURE = .1;
28
25
  const MAX_RELEVANT_CHUNKS_NB = 20;
29
26
  const MIN_RELEVANT_CHUNKS_SIMILARITY = .42;
30
- const aiDefaultOptions = {
31
- provider: require_utils_AI_aiSdk.AIProvider.OPENAI,
32
- model: MODEL,
33
- temperature: MODEL_TEMPERATURE
34
- };
35
27
  const EMBEDDING_MODEL = "text-embedding-3-large";
36
28
  const OVERLAP_TOKENS = 200;
37
29
  const MAX_CHUNK_TOKENS = 800;
@@ -202,7 +194,6 @@ const askDocQuestion = async (messages, aiConfig, options) => {
202
194
  };
203
195
 
204
196
  //#endregion
205
- exports.aiDefaultOptions = aiDefaultOptions;
206
197
  exports.askDocQuestion = askDocQuestion;
207
198
  exports.initPrompt = initPrompt;
208
199
  exports.loadMarkdownFiles = loadMarkdownFiles;
@@ -1 +1 @@
1
- {"version":3,"file":"askDocQuestion.cjs","names":["readAsset","vectorStore: VectorStoreEl[]","MODEL: AIOptions['model']","MODEL_TEMPERATURE: AIOptions['temperature']","MAX_RELEVANT_CHUNKS_NB: number","MIN_RELEVANT_CHUNKS_SIMILARITY: number","aiDefaultOptions: AIOptions","AIProvider","EMBEDDING_MODEL: OpenAI.EmbeddingModel","OVERLAP_TOKENS: number","MAX_CHUNK_TOKENS: number","CHAR_BY_TOKEN: number","MAX_CHARS: number","OVERLAP_CHARS: number","chunks: string[]","OpenAI","resultForFile: Record<string, number[] | undefined>","initPrompt: ChatCompletionRequestMessage"],"sources":["../../../../../src/utils/AI/askDocQuestion/askDocQuestion.ts"],"sourcesContent":["import { readAsset } from 'utils:asset';\nimport { getMarkdownMetadata } from '@intlayer/core';\nimport { getBlogs, getDocs, getFrequentQuestions } from '@intlayer/docs';\nimport { streamText } from 'ai';\nimport { OpenAI } from 'openai';\nimport {\n type AIConfig,\n type AIOptions,\n AIProvider,\n type ChatCompletionRequestMessage,\n} from '../aiSdk';\n\nconst readEmbeddingsForFile = (fileKey: string): Record<string, number[]> => {\n try {\n return JSON.parse(\n readAsset(`./embeddings/${fileKey.replace('.md', '.json')}`, 'utf-8')\n ) as Record<string, number[]>;\n } catch {\n return {};\n }\n};\n\ntype VectorStoreEl = {\n fileKey: string;\n chunkNumber: number;\n content: string;\n embedding?: number[];\n docUrl: string;\n docName: string;\n};\n\n/**\n * Simple in-memory vector store to hold document embeddings and their content.\n * Each entry contains:\n * - fileKey: A unique key identifying the file\n * - chunkNumber: The number of the chunk within the document\n * - content: The chunk content\n * - embedding: The numerical embedding vector for the chunk\n */\nconst vectorStore: VectorStoreEl[] = [];\n\n/*\n * Ask question AI configuration\n */\nconst MODEL: AIOptions['model'] = 'chatgpt-4o-latest'; // Model to use for chat completions\nconst MODEL_TEMPERATURE: AIOptions['temperature'] = 0.1; // Temperature to use for chat completions\nconst MAX_RELEVANT_CHUNKS_NB: number = 20; // Maximum number of relevant chunks to attach to chatGPT context\nconst MIN_RELEVANT_CHUNKS_SIMILARITY: number = 0.42; // Minimum similarity required for a chunk to be considered relevant\n\nexport const aiDefaultOptions: AIOptions = {\n provider: AIProvider.OPENAI,\n model: MODEL,\n temperature: MODEL_TEMPERATURE,\n};\n\n/*\n * Embedding model configuration\n */\nconst EMBEDDING_MODEL: OpenAI.EmbeddingModel = 'text-embedding-3-large'; // Model to use for embedding generation\nconst OVERLAP_TOKENS: number = 200; // Number of tokens to overlap between chunks\nconst MAX_CHUNK_TOKENS: number = 800; // Maximum number of tokens per chunk\nconst CHAR_BY_TOKEN: number = 4.15; // Approximate pessimistically the number of characters per token // Can use `tiktoken` or other tokenizers to calculate it more precisely\nconst MAX_CHARS: number = MAX_CHUNK_TOKENS * CHAR_BY_TOKEN;\nconst OVERLAP_CHARS: number = OVERLAP_TOKENS * CHAR_BY_TOKEN;\n\nconst skipDocEmbeddingsIndex = process.env.SKIP_DOC_EMBEDDINGS_INDEX === 'true';\n\n/**\n * Splits a given text into chunks ensuring each chunk does not exceed MAX_CHARS.\n * @param text - The input text to split.\n * @returns - Array of text chunks.\n */\nconst chunkText = (text: string): string[] => {\n const chunks: string[] = [];\n let start = 0;\n\n while (start < text.length) {\n let end = Math.min(start + MAX_CHARS, text.length);\n\n // Ensure we don't cut words in the middle (find nearest space)\n if (end < text.length) {\n const lastSpace = text.lastIndexOf(' ', end);\n if (lastSpace > start) {\n end = lastSpace;\n }\n }\n\n chunks.push(text.substring(start, end));\n\n // Move start forward correctly\n const nextStart = end - OVERLAP_CHARS;\n if (nextStart <= start) {\n // Prevent infinite loop if overlap is too large\n start = end;\n } else {\n start = nextStart;\n }\n }\n\n return chunks;\n};\n\n/**\n * Generates an embedding for a given text using OpenAI's embedding API.\n * Trims the text if it exceeds the maximum allowed characters.\n *\n * @param text - The input text to generate an embedding for\n * @returns The embedding vector as a number array\n */\nconst generateEmbedding = async (text: string): Promise<number[]> => {\n try {\n const openaiClient = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n\n const response = await openaiClient.embeddings.create({\n model: EMBEDDING_MODEL,\n input: text,\n });\n\n return response.data[0].embedding;\n } catch (error) {\n console.error('Error generating embedding:', error);\n return [];\n }\n};\n\n/**\n * Calculates the cosine similarity between two vectors.\n * Cosine similarity measures the cosine of the angle between two vectors in an inner product space.\n * Used to determine the similarity between chunks of text.\n *\n * @param vecA - The first vector\n * @param vecB - The second vector\n * @returns The cosine similarity score\n */\nconst cosineSimilarity = (vecA: number[], vecB: number[]): number => {\n // Calculate the dot product of the two vectors\n const dotProduct = vecA.reduce((sum, a, idx) => sum + a * vecB[idx], 0);\n\n // Calculate the magnitude (Euclidean norm) of each vector\n const magnitudeA = Math.sqrt(vecA.reduce((sum, a) => sum + a * a, 0));\n const magnitudeB = Math.sqrt(vecB.reduce((sum, b) => sum + b * b, 0));\n\n // Compute and return the cosine similarity\n return dotProduct / (magnitudeA * magnitudeB);\n};\n\n/**\n * Indexes all Markdown documents by generating embeddings for each chunk and storing them in memory.\n * Persists per-document embeddings under `embeddings/<fileKey>.json`.\n * Handles cases where files have been updated and chunk counts have changed.\n */\nexport const loadMarkdownFiles = async (): Promise<void> => {\n // Retrieve documentation and blog posts in English locale\n const frequentQuestions = await getFrequentQuestions();\n const docs = await getDocs();\n const blogs = await getBlogs();\n\n const files = { ...docs, ...blogs, ...frequentQuestions }; // Combine docs and blogs into a single object\n\n // Iterate over each file key (identifier) in the combined files\n for await (const fileKey of Object.keys(files)) {\n // Get the metadata of the file\n const fileMetadata = getMarkdownMetadata(\n files[fileKey as keyof typeof files] as string\n );\n\n // Split the document into chunks based on headings\n const fileChunks = chunkText(\n files[fileKey as keyof typeof files] as string\n );\n\n // Read existing embeddings for this file\n const existingEmbeddings = readEmbeddingsForFile(fileKey);\n\n // Check if the number of chunks has changed for this file\n const existingChunksForFile = Object.keys(existingEmbeddings);\n const currentChunkCount = fileChunks.length;\n const previousChunkCount = existingChunksForFile.length;\n\n let shouldRegenerateFileEmbeddings = false;\n\n // If chunk count differs, we need to regenerate embeddings for this file\n if (currentChunkCount !== previousChunkCount) {\n console.info(\n `File \"${fileKey}\" chunk count changed: ${previousChunkCount} -> ${currentChunkCount}. Regenerating embeddings.`\n );\n\n shouldRegenerateFileEmbeddings = !skipDocEmbeddingsIndex;\n }\n\n // Iterate over each chunk within the current file\n let resultForFile: Record<string, number[] | undefined> = {};\n for await (const chunkIndex of Object.keys(fileChunks)) {\n const chunkNumber = Number(chunkIndex) + 1; // Chunk number starts at 1\n const chunksNumber = fileChunks.length;\n\n const fileChunk = fileChunks[\n chunkIndex as keyof typeof fileChunks\n ] as string;\n\n const chunkKeyName = `chunk_${chunkNumber}`; // Unique key for the chunk within the file\n\n // Retrieve precomputed embedding if available and file hasn't changed\n const docEmbedding = !shouldRegenerateFileEmbeddings\n ? (existingEmbeddings[\n chunkKeyName as keyof typeof existingEmbeddings\n ] as number[] | undefined)\n : undefined;\n\n const embedding = docEmbedding; // Use existing embedding if available and valid\n\n // Update the file-scoped result object with the embedding\n resultForFile = { ...resultForFile, [chunkKeyName]: embedding };\n\n // Store the embedding and content in the in-memory vector store\n vectorStore.push({\n fileKey,\n chunkNumber,\n embedding,\n content: fileChunk,\n docUrl: fileMetadata.url,\n docName: fileMetadata.title,\n });\n\n console.info(`- Loaded: ${fileKey}/${chunkKeyName}/${chunksNumber}`);\n }\n }\n};\n\n// Automatically index Markdown files\nloadMarkdownFiles();\n\n/**\n * Searches the indexed documents for the most relevant chunks based on a query.\n * Utilizes cosine similarity to find the closest matching embeddings.\n *\n * @param query - The search query provided by the user\n * @returns An array of the top matching document chunks' content\n */\nexport const searchChunkReference = async (\n query: string,\n maxResults: number = MAX_RELEVANT_CHUNKS_NB,\n minSimilarity: number = MIN_RELEVANT_CHUNKS_SIMILARITY\n): Promise<VectorStoreEl[]> => {\n // Generate an embedding for the user's query\n const queryEmbedding = await generateEmbedding(query);\n\n // Calculate similarity scores between the query embedding and each document's embedding\n const selection = vectorStore\n .filter((chunk) => chunk.embedding)\n .map((chunk) => ({\n ...chunk,\n similarity: cosineSimilarity(queryEmbedding, chunk.embedding!), // Add similarity score to each doc\n }))\n .filter((chunk) => chunk.similarity > minSimilarity) // Filter out documents with low similarity scores\n .sort((a, b) => b.similarity - a.similarity) // Sort documents by highest similarity first\n .slice(0, maxResults); // Select the top 6 most similar documents\n\n const orderedDocKeys = new Set(selection.map((chunk) => chunk.fileKey));\n\n const orderedVectorStore = vectorStore.sort((a, _b) =>\n orderedDocKeys.has(a.fileKey) ? -1 : 1\n );\n\n const results = orderedVectorStore.filter((chunk) =>\n selection.some(\n (v) => v.fileKey === chunk.fileKey && v.chunkNumber === chunk.chunkNumber\n )\n );\n\n // Return the content of the top matching documents\n return results;\n};\n\nconst CHAT_GPT_PROMPT = readAsset('./PROMPT.md');\n\n// Initial prompt configuration for the chatbot\nexport const initPrompt: ChatCompletionRequestMessage = {\n role: 'system',\n content: CHAT_GPT_PROMPT,\n};\n\nexport type AskDocQuestionResult = {\n response: string;\n relatedFiles: string[];\n};\n\nexport type AskDocQuestionOptions = {\n onMessage?: (chunk: string) => void;\n};\n\n/**\n * Handles the \"Ask a question\" endpoint in an Express.js route.\n * Processes user messages, retrieves relevant documents, and interacts with AI models to generate responses.\n *\n * @param messages - An array of chat messages from the user and assistant\n * @returns The assistant's response as a string\n */\nexport const askDocQuestion = async (\n messages: ChatCompletionRequestMessage[],\n aiConfig: AIConfig,\n options?: AskDocQuestionOptions\n): Promise<AskDocQuestionResult> => {\n // Format the user's question to keep only the relevant keywords\n const query = messages\n .filter((message) => message.role === 'user')\n .map((message) => `- ${message.content}`)\n .join('\\n');\n\n // 1) Find relevant documents based on the user's question\n const relevantFilesReferences = await searchChunkReference(query);\n\n // 2) Integrate the relevant documents into the initial system prompt\n const systemPrompt = initPrompt.content.replace(\n '{{relevantFilesReferences}}',\n relevantFilesReferences.length === 0\n ? 'Not relevant file found related to the question.'\n : relevantFilesReferences\n .map((doc, idx) =>\n [\n '-----',\n '---',\n `chunkId: ${idx}`,\n `docChunk: \"${doc.chunkNumber}/${doc.fileKey.length}\"`,\n `docName: \"${doc.docName}\"`,\n `docUrl: \"${doc.docUrl}\"`,\n `---`,\n doc.content,\n `-----`,\n ].join('\\n')\n )\n .join('\\n\\n') // Insert relevant docs into the prompt\n );\n\n // Format messages for AI SDK\n const aiMessages = [\n {\n role: 'system' as const,\n content: systemPrompt,\n },\n ...messages.slice(-8),\n ];\n\n if (!aiConfig) {\n throw new Error('Failed to initialize AI configuration');\n }\n\n // 3) Use the AI SDK to stream the response\n let fullResponse = '';\n const stream = streamText({\n ...aiConfig,\n messages: aiMessages,\n });\n\n // Process the stream\n for await (const chunk of stream.textStream) {\n fullResponse += chunk;\n options?.onMessage?.(chunk);\n }\n\n // 4) Extract unique related files\n const relatedFiles = [\n ...new Set(relevantFilesReferences.map((doc) => doc.fileKey)),\n ];\n\n // 5) Return the assistant's response to the user\n return {\n response: fullResponse ?? 'Error: No result found',\n relatedFiles,\n };\n};\n"],"mappings":";;;;;;;;;AAYA,MAAM,yBAAyB,YAA8C;AAC3E,KAAI;AACF,SAAO,KAAK,MACVA,+BAAU,gBAAgB,QAAQ,QAAQ,OAAO,QAAQ,IAAI,QAAQ,CACtE;SACK;AACN,SAAO,EAAE;;;;;;;;;;;AAqBb,MAAMC,cAA+B,EAAE;AAKvC,MAAMC,QAA4B;AAClC,MAAMC,oBAA8C;AACpD,MAAMC,yBAAiC;AACvC,MAAMC,iCAAyC;AAE/C,MAAaC,mBAA8B;CACzC,UAAUC,kCAAW;CACrB,OAAO;CACP,aAAa;CACd;AAKD,MAAMC,kBAAyC;AAC/C,MAAMC,iBAAyB;AAC/B,MAAMC,mBAA2B;AACjC,MAAMC,gBAAwB;AAC9B,MAAMC,YAAoB,mBAAmB;AAC7C,MAAMC,gBAAwB,iBAAiB;AAE/C,MAAM,yBAAyB,QAAQ,IAAI,8BAA8B;;;;;;AAOzE,MAAM,aAAa,SAA2B;CAC5C,MAAMC,SAAmB,EAAE;CAC3B,IAAI,QAAQ;AAEZ,QAAO,QAAQ,KAAK,QAAQ;EAC1B,IAAI,MAAM,KAAK,IAAI,QAAQ,WAAW,KAAK,OAAO;AAGlD,MAAI,MAAM,KAAK,QAAQ;GACrB,MAAM,YAAY,KAAK,YAAY,KAAK,IAAI;AAC5C,OAAI,YAAY,MACd,OAAM;;AAIV,SAAO,KAAK,KAAK,UAAU,OAAO,IAAI,CAAC;EAGvC,MAAM,YAAY,MAAM;AACxB,MAAI,aAAa,MAEf,SAAQ;MAER,SAAQ;;AAIZ,QAAO;;;;;;;;;AAUT,MAAM,oBAAoB,OAAO,SAAoC;AACnE,KAAI;AAQF,UALiB,MAFI,IAAIC,cAAO,EAAE,QAAQ,QAAQ,IAAI,gBAAgB,CAAC,CAEnC,WAAW,OAAO;GACpD,OAAO;GACP,OAAO;GACR,CAAC,EAEc,KAAK,GAAG;UACjB,OAAO;AACd,UAAQ,MAAM,+BAA+B,MAAM;AACnD,SAAO,EAAE;;;;;;;;;;;;AAab,MAAM,oBAAoB,MAAgB,SAA2B;AASnE,QAPmB,KAAK,QAAQ,KAAK,GAAG,QAAQ,MAAM,IAAI,KAAK,MAAM,EAAE,IAGpD,KAAK,KAAK,KAAK,QAAQ,KAAK,MAAM,MAAM,IAAI,GAAG,EAAE,CAAC,GAClD,KAAK,KAAK,KAAK,QAAQ,KAAK,MAAM,MAAM,IAAI,GAAG,EAAE,CAAC;;;;;;;AAWvE,MAAa,oBAAoB,YAA2B;CAE1D,MAAM,oBAAoB,iDAA4B;CACtD,MAAM,OAAO,oCAAe;CAC5B,MAAM,QAAQ,qCAAgB;CAE9B,MAAM,QAAQ;EAAE,GAAG;EAAM,GAAG;EAAO,GAAG;EAAmB;AAGzD,YAAW,MAAM,WAAW,OAAO,KAAK,MAAM,EAAE;EAE9C,MAAM,wDACJ,MAAM,SACP;EAGD,MAAM,aAAa,UACjB,MAAM,SACP;EAGD,MAAM,qBAAqB,sBAAsB,QAAQ;EAGzD,MAAM,wBAAwB,OAAO,KAAK,mBAAmB;EAC7D,MAAM,oBAAoB,WAAW;EACrC,MAAM,qBAAqB,sBAAsB;EAEjD,IAAI,iCAAiC;AAGrC,MAAI,sBAAsB,oBAAoB;AAC5C,WAAQ,KACN,SAAS,QAAQ,yBAAyB,mBAAmB,MAAM,kBAAkB,4BACtF;AAED,oCAAiC,CAAC;;EAIpC,IAAIC,gBAAsD,EAAE;AAC5D,aAAW,MAAM,cAAc,OAAO,KAAK,WAAW,EAAE;GACtD,MAAM,cAAc,OAAO,WAAW,GAAG;GACzC,MAAM,eAAe,WAAW;GAEhC,MAAM,YAAY,WAChB;GAGF,MAAM,eAAe,SAAS;GAS9B,MAAM,YANe,CAAC,iCACjB,mBACC,gBAEF;AAKJ,mBAAgB;IAAE,GAAG;KAAgB,eAAe;IAAW;AAG/D,eAAY,KAAK;IACf;IACA;IACA;IACA,SAAS;IACT,QAAQ,aAAa;IACrB,SAAS,aAAa;IACvB,CAAC;AAEF,WAAQ,KAAK,aAAa,QAAQ,GAAG,aAAa,GAAG,eAAe;;;;AAM1E,mBAAmB;;;;;;;;AASnB,MAAa,uBAAuB,OAClC,OACA,aAAqB,wBACrB,gBAAwB,mCACK;CAE7B,MAAM,iBAAiB,MAAM,kBAAkB,MAAM;CAGrD,MAAM,YAAY,YACf,QAAQ,UAAU,MAAM,UAAU,CAClC,KAAK,WAAW;EACf,GAAG;EACH,YAAY,iBAAiB,gBAAgB,MAAM,UAAW;EAC/D,EAAE,CACF,QAAQ,UAAU,MAAM,aAAa,cAAc,CACnD,MAAM,GAAG,MAAM,EAAE,aAAa,EAAE,WAAW,CAC3C,MAAM,GAAG,WAAW;CAEvB,MAAM,iBAAiB,IAAI,IAAI,UAAU,KAAK,UAAU,MAAM,QAAQ,CAAC;AAavE,QAX2B,YAAY,MAAM,GAAG,OAC9C,eAAe,IAAI,EAAE,QAAQ,GAAG,KAAK,EACtC,CAEkC,QAAQ,UACzC,UAAU,MACP,MAAM,EAAE,YAAY,MAAM,WAAW,EAAE,gBAAgB,MAAM,YAC/D,CACF;;AAMH,MAAM,kBAAkBhB,+BAAU,cAAc;AAGhD,MAAaiB,aAA2C;CACtD,MAAM;CACN,SAAS;CACV;;;;;;;;AAkBD,MAAa,iBAAiB,OAC5B,UACA,UACA,YACkC;CAQlC,MAAM,0BAA0B,MAAM,qBANxB,SACX,QAAQ,YAAY,QAAQ,SAAS,OAAO,CAC5C,KAAK,YAAY,KAAK,QAAQ,UAAU,CACxC,KAAK,KAAK,CAGoD;CAyBjE,MAAM,aAAa,CACjB;EACE,MAAM;EACN,SAzBiB,WAAW,QAAQ,QACtC,+BACA,wBAAwB,WAAW,IAC/B,qDACA,wBACG,KAAK,KAAK,QACT;GACE;GACA;GACA,YAAY;GACZ,cAAc,IAAI,YAAY,GAAG,IAAI,QAAQ,OAAO;GACpD,aAAa,IAAI,QAAQ;GACzB,YAAY,IAAI,OAAO;GACvB;GACA,IAAI;GACJ;GACD,CAAC,KAAK,KAAK,CACb,CACA,KAAK,OAAO,CACpB;EAOE,EACD,GAAG,SAAS,MAAM,GAAG,CACtB;AAED,KAAI,CAAC,SACH,OAAM,IAAI,MAAM,wCAAwC;CAI1D,IAAI,eAAe;CACnB,MAAM,4BAAoB;EACxB,GAAG;EACH,UAAU;EACX,CAAC;AAGF,YAAW,MAAM,SAAS,OAAO,YAAY;AAC3C,kBAAgB;AAChB,WAAS,YAAY,MAAM;;CAI7B,MAAM,eAAe,CACnB,GAAG,IAAI,IAAI,wBAAwB,KAAK,QAAQ,IAAI,QAAQ,CAAC,CAC9D;AAGD,QAAO;EACL,UAAU,gBAAgB;EAC1B;EACD"}
1
+ {"version":3,"file":"askDocQuestion.cjs","names":["readAsset","vectorStore: VectorStoreEl[]","MAX_RELEVANT_CHUNKS_NB: number","MIN_RELEVANT_CHUNKS_SIMILARITY: number","EMBEDDING_MODEL: OpenAI.EmbeddingModel","OVERLAP_TOKENS: number","MAX_CHUNK_TOKENS: number","CHAR_BY_TOKEN: number","MAX_CHARS: number","OVERLAP_CHARS: number","chunks: string[]","OpenAI","resultForFile: Record<string, number[] | undefined>","initPrompt: ChatCompletionRequestMessage"],"sources":["../../../../../src/utils/AI/askDocQuestion/askDocQuestion.ts"],"sourcesContent":["import { readAsset } from 'utils:asset';\nimport { getMarkdownMetadata } from '@intlayer/core';\nimport { getBlogs, getDocs, getFrequentQuestions } from '@intlayer/docs';\nimport { streamText } from 'ai';\nimport { OpenAI } from 'openai';\nimport type { AIConfig, ChatCompletionRequestMessage } from '../aiSdk';\n\nconst readEmbeddingsForFile = (fileKey: string): Record<string, number[]> => {\n try {\n return JSON.parse(\n readAsset(`./embeddings/${fileKey.replace('.md', '.json')}`, 'utf-8')\n ) as Record<string, number[]>;\n } catch {\n return {};\n }\n};\n\ntype VectorStoreEl = {\n fileKey: string;\n chunkNumber: number;\n content: string;\n embedding?: number[];\n docUrl: string;\n docName: string;\n};\n\n/**\n * Simple in-memory vector store to hold document embeddings and their content.\n * Each entry contains:\n * - fileKey: A unique key identifying the file\n * - chunkNumber: The number of the chunk within the document\n * - content: The chunk content\n * - embedding: The numerical embedding vector for the chunk\n */\nconst vectorStore: VectorStoreEl[] = [];\n\n/*\n * Ask question AI configuration\n */\nconst MAX_RELEVANT_CHUNKS_NB: number = 20; // Maximum number of relevant chunks to attach to chatGPT context\nconst MIN_RELEVANT_CHUNKS_SIMILARITY: number = 0.42; // Minimum similarity required for a chunk to be considered relevant\n\n/*\n * Embedding model configuration\n */\nconst EMBEDDING_MODEL: OpenAI.EmbeddingModel = 'text-embedding-3-large'; // Model to use for embedding generation\nconst OVERLAP_TOKENS: number = 200; // Number of tokens to overlap between chunks\nconst MAX_CHUNK_TOKENS: number = 800; // Maximum number of tokens per chunk\nconst CHAR_BY_TOKEN: number = 4.15; // Approximate pessimistically the number of characters per token // Can use `tiktoken` or other tokenizers to calculate it more precisely\nconst MAX_CHARS: number = MAX_CHUNK_TOKENS * CHAR_BY_TOKEN;\nconst OVERLAP_CHARS: number = OVERLAP_TOKENS * CHAR_BY_TOKEN;\n\nconst skipDocEmbeddingsIndex = process.env.SKIP_DOC_EMBEDDINGS_INDEX === 'true';\n\n/**\n * Splits a given text into chunks ensuring each chunk does not exceed MAX_CHARS.\n * @param text - The input text to split.\n * @returns - Array of text chunks.\n */\nconst chunkText = (text: string): string[] => {\n const chunks: string[] = [];\n let start = 0;\n\n while (start < text.length) {\n let end = Math.min(start + MAX_CHARS, text.length);\n\n // Ensure we don't cut words in the middle (find nearest space)\n if (end < text.length) {\n const lastSpace = text.lastIndexOf(' ', end);\n if (lastSpace > start) {\n end = lastSpace;\n }\n }\n\n chunks.push(text.substring(start, end));\n\n // Move start forward correctly\n const nextStart = end - OVERLAP_CHARS;\n if (nextStart <= start) {\n // Prevent infinite loop if overlap is too large\n start = end;\n } else {\n start = nextStart;\n }\n }\n\n return chunks;\n};\n\n/**\n * Generates an embedding for a given text using OpenAI's embedding API.\n * Trims the text if it exceeds the maximum allowed characters.\n *\n * @param text - The input text to generate an embedding for\n * @returns The embedding vector as a number array\n */\nconst generateEmbedding = async (text: string): Promise<number[]> => {\n try {\n const openaiClient = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n\n const response = await openaiClient.embeddings.create({\n model: EMBEDDING_MODEL,\n input: text,\n });\n\n return response.data[0].embedding;\n } catch (error) {\n console.error('Error generating embedding:', error);\n return [];\n }\n};\n\n/**\n * Calculates the cosine similarity between two vectors.\n * Cosine similarity measures the cosine of the angle between two vectors in an inner product space.\n * Used to determine the similarity between chunks of text.\n *\n * @param vecA - The first vector\n * @param vecB - The second vector\n * @returns The cosine similarity score\n */\nconst cosineSimilarity = (vecA: number[], vecB: number[]): number => {\n // Calculate the dot product of the two vectors\n const dotProduct = vecA.reduce((sum, a, idx) => sum + a * vecB[idx], 0);\n\n // Calculate the magnitude (Euclidean norm) of each vector\n const magnitudeA = Math.sqrt(vecA.reduce((sum, a) => sum + a * a, 0));\n const magnitudeB = Math.sqrt(vecB.reduce((sum, b) => sum + b * b, 0));\n\n // Compute and return the cosine similarity\n return dotProduct / (magnitudeA * magnitudeB);\n};\n\n/**\n * Indexes all Markdown documents by generating embeddings for each chunk and storing them in memory.\n * Persists per-document embeddings under `embeddings/<fileKey>.json`.\n * Handles cases where files have been updated and chunk counts have changed.\n */\nexport const loadMarkdownFiles = async (): Promise<void> => {\n // Retrieve documentation and blog posts in English locale\n const frequentQuestions = await getFrequentQuestions();\n const docs = await getDocs();\n const blogs = await getBlogs();\n\n const files = { ...docs, ...blogs, ...frequentQuestions }; // Combine docs and blogs into a single object\n\n // Iterate over each file key (identifier) in the combined files\n for await (const fileKey of Object.keys(files)) {\n // Get the metadata of the file\n const fileMetadata = getMarkdownMetadata(\n files[fileKey as keyof typeof files] as string\n );\n\n // Split the document into chunks based on headings\n const fileChunks = chunkText(\n files[fileKey as keyof typeof files] as string\n );\n\n // Read existing embeddings for this file\n const existingEmbeddings = readEmbeddingsForFile(fileKey);\n\n // Check if the number of chunks has changed for this file\n const existingChunksForFile = Object.keys(existingEmbeddings);\n const currentChunkCount = fileChunks.length;\n const previousChunkCount = existingChunksForFile.length;\n\n let shouldRegenerateFileEmbeddings = false;\n\n // If chunk count differs, we need to regenerate embeddings for this file\n if (currentChunkCount !== previousChunkCount) {\n console.info(\n `File \"${fileKey}\" chunk count changed: ${previousChunkCount} -> ${currentChunkCount}. Regenerating embeddings.`\n );\n\n shouldRegenerateFileEmbeddings = !skipDocEmbeddingsIndex;\n }\n\n // Iterate over each chunk within the current file\n let resultForFile: Record<string, number[] | undefined> = {};\n for await (const chunkIndex of Object.keys(fileChunks)) {\n const chunkNumber = Number(chunkIndex) + 1; // Chunk number starts at 1\n const chunksNumber = fileChunks.length;\n\n const fileChunk = fileChunks[\n chunkIndex as keyof typeof fileChunks\n ] as string;\n\n const chunkKeyName = `chunk_${chunkNumber}`; // Unique key for the chunk within the file\n\n // Retrieve precomputed embedding if available and file hasn't changed\n const docEmbedding = !shouldRegenerateFileEmbeddings\n ? (existingEmbeddings[\n chunkKeyName as keyof typeof existingEmbeddings\n ] as number[] | undefined)\n : undefined;\n\n const embedding = docEmbedding; // Use existing embedding if available and valid\n\n // Update the file-scoped result object with the embedding\n resultForFile = { ...resultForFile, [chunkKeyName]: embedding };\n\n // Store the embedding and content in the in-memory vector store\n vectorStore.push({\n fileKey,\n chunkNumber,\n embedding,\n content: fileChunk,\n docUrl: fileMetadata.url,\n docName: fileMetadata.title,\n });\n\n console.info(`- Loaded: ${fileKey}/${chunkKeyName}/${chunksNumber}`);\n }\n }\n};\n\n// Automatically index Markdown files\nloadMarkdownFiles();\n\n/**\n * Searches the indexed documents for the most relevant chunks based on a query.\n * Utilizes cosine similarity to find the closest matching embeddings.\n *\n * @param query - The search query provided by the user\n * @returns An array of the top matching document chunks' content\n */\nexport const searchChunkReference = async (\n query: string,\n maxResults: number = MAX_RELEVANT_CHUNKS_NB,\n minSimilarity: number = MIN_RELEVANT_CHUNKS_SIMILARITY\n): Promise<VectorStoreEl[]> => {\n // Generate an embedding for the user's query\n const queryEmbedding = await generateEmbedding(query);\n\n // Calculate similarity scores between the query embedding and each document's embedding\n const selection = vectorStore\n .filter((chunk) => chunk.embedding)\n .map((chunk) => ({\n ...chunk,\n similarity: cosineSimilarity(queryEmbedding, chunk.embedding!), // Add similarity score to each doc\n }))\n .filter((chunk) => chunk.similarity > minSimilarity) // Filter out documents with low similarity scores\n .sort((a, b) => b.similarity - a.similarity) // Sort documents by highest similarity first\n .slice(0, maxResults); // Select the top 6 most similar documents\n\n const orderedDocKeys = new Set(selection.map((chunk) => chunk.fileKey));\n\n const orderedVectorStore = vectorStore.sort((a, _b) =>\n orderedDocKeys.has(a.fileKey) ? -1 : 1\n );\n\n const results = orderedVectorStore.filter((chunk) =>\n selection.some(\n (v) => v.fileKey === chunk.fileKey && v.chunkNumber === chunk.chunkNumber\n )\n );\n\n // Return the content of the top matching documents\n return results;\n};\n\nconst CHAT_GPT_PROMPT = readAsset('./PROMPT.md');\n\n// Initial prompt configuration for the chatbot\nexport const initPrompt: ChatCompletionRequestMessage = {\n role: 'system',\n content: CHAT_GPT_PROMPT,\n};\n\nexport type AskDocQuestionResult = {\n response: string;\n relatedFiles: string[];\n};\n\nexport type AskDocQuestionOptions = {\n onMessage?: (chunk: string) => void;\n};\n\n/**\n * Handles the \"Ask a question\" endpoint in an Express.js route.\n * Processes user messages, retrieves relevant documents, and interacts with AI models to generate responses.\n *\n * @param messages - An array of chat messages from the user and assistant\n * @returns The assistant's response as a string\n */\nexport const askDocQuestion = async (\n messages: ChatCompletionRequestMessage[],\n aiConfig: AIConfig,\n options?: AskDocQuestionOptions\n): Promise<AskDocQuestionResult> => {\n // Format the user's question to keep only the relevant keywords\n const query = messages\n .filter((message) => message.role === 'user')\n .map((message) => `- ${message.content}`)\n .join('\\n');\n\n // 1) Find relevant documents based on the user's question\n const relevantFilesReferences = await searchChunkReference(query);\n\n // 2) Integrate the relevant documents into the initial system prompt\n const systemPrompt = initPrompt.content.replace(\n '{{relevantFilesReferences}}',\n relevantFilesReferences.length === 0\n ? 'Not relevant file found related to the question.'\n : relevantFilesReferences\n .map((doc, idx) =>\n [\n '-----',\n '---',\n `chunkId: ${idx}`,\n `docChunk: \"${doc.chunkNumber}/${doc.fileKey.length}\"`,\n `docName: \"${doc.docName}\"`,\n `docUrl: \"${doc.docUrl}\"`,\n `---`,\n doc.content,\n `-----`,\n ].join('\\n')\n )\n .join('\\n\\n') // Insert relevant docs into the prompt\n );\n\n // Format messages for AI SDK\n const aiMessages = [\n {\n role: 'system' as const,\n content: systemPrompt,\n },\n ...messages.slice(-8),\n ];\n\n if (!aiConfig) {\n throw new Error('Failed to initialize AI configuration');\n }\n\n // 3) Use the AI SDK to stream the response\n let fullResponse = '';\n const stream = streamText({\n ...aiConfig,\n messages: aiMessages,\n });\n\n // Process the stream\n for await (const chunk of stream.textStream) {\n fullResponse += chunk;\n options?.onMessage?.(chunk);\n }\n\n // 4) Extract unique related files\n const relatedFiles = [\n ...new Set(relevantFilesReferences.map((doc) => doc.fileKey)),\n ];\n\n // 5) Return the assistant's response to the user\n return {\n response: fullResponse ?? 'Error: No result found',\n relatedFiles,\n };\n};\n"],"mappings":";;;;;;;;AAOA,MAAM,yBAAyB,YAA8C;AAC3E,KAAI;AACF,SAAO,KAAK,MACVA,+BAAU,gBAAgB,QAAQ,QAAQ,OAAO,QAAQ,IAAI,QAAQ,CACtE;SACK;AACN,SAAO,EAAE;;;;;;;;;;;AAqBb,MAAMC,cAA+B,EAAE;AAKvC,MAAMC,yBAAiC;AACvC,MAAMC,iCAAyC;AAK/C,MAAMC,kBAAyC;AAC/C,MAAMC,iBAAyB;AAC/B,MAAMC,mBAA2B;AACjC,MAAMC,gBAAwB;AAC9B,MAAMC,YAAoB,mBAAmB;AAC7C,MAAMC,gBAAwB,iBAAiB;AAE/C,MAAM,yBAAyB,QAAQ,IAAI,8BAA8B;;;;;;AAOzE,MAAM,aAAa,SAA2B;CAC5C,MAAMC,SAAmB,EAAE;CAC3B,IAAI,QAAQ;AAEZ,QAAO,QAAQ,KAAK,QAAQ;EAC1B,IAAI,MAAM,KAAK,IAAI,QAAQ,WAAW,KAAK,OAAO;AAGlD,MAAI,MAAM,KAAK,QAAQ;GACrB,MAAM,YAAY,KAAK,YAAY,KAAK,IAAI;AAC5C,OAAI,YAAY,MACd,OAAM;;AAIV,SAAO,KAAK,KAAK,UAAU,OAAO,IAAI,CAAC;EAGvC,MAAM,YAAY,MAAM;AACxB,MAAI,aAAa,MAEf,SAAQ;MAER,SAAQ;;AAIZ,QAAO;;;;;;;;;AAUT,MAAM,oBAAoB,OAAO,SAAoC;AACnE,KAAI;AAQF,UALiB,MAFI,IAAIC,cAAO,EAAE,QAAQ,QAAQ,IAAI,gBAAgB,CAAC,CAEnC,WAAW,OAAO;GACpD,OAAO;GACP,OAAO;GACR,CAAC,EAEc,KAAK,GAAG;UACjB,OAAO;AACd,UAAQ,MAAM,+BAA+B,MAAM;AACnD,SAAO,EAAE;;;;;;;;;;;;AAab,MAAM,oBAAoB,MAAgB,SAA2B;AASnE,QAPmB,KAAK,QAAQ,KAAK,GAAG,QAAQ,MAAM,IAAI,KAAK,MAAM,EAAE,IAGpD,KAAK,KAAK,KAAK,QAAQ,KAAK,MAAM,MAAM,IAAI,GAAG,EAAE,CAAC,GAClD,KAAK,KAAK,KAAK,QAAQ,KAAK,MAAM,MAAM,IAAI,GAAG,EAAE,CAAC;;;;;;;AAWvE,MAAa,oBAAoB,YAA2B;CAE1D,MAAM,oBAAoB,iDAA4B;CACtD,MAAM,OAAO,oCAAe;CAC5B,MAAM,QAAQ,qCAAgB;CAE9B,MAAM,QAAQ;EAAE,GAAG;EAAM,GAAG;EAAO,GAAG;EAAmB;AAGzD,YAAW,MAAM,WAAW,OAAO,KAAK,MAAM,EAAE;EAE9C,MAAM,wDACJ,MAAM,SACP;EAGD,MAAM,aAAa,UACjB,MAAM,SACP;EAGD,MAAM,qBAAqB,sBAAsB,QAAQ;EAGzD,MAAM,wBAAwB,OAAO,KAAK,mBAAmB;EAC7D,MAAM,oBAAoB,WAAW;EACrC,MAAM,qBAAqB,sBAAsB;EAEjD,IAAI,iCAAiC;AAGrC,MAAI,sBAAsB,oBAAoB;AAC5C,WAAQ,KACN,SAAS,QAAQ,yBAAyB,mBAAmB,MAAM,kBAAkB,4BACtF;AAED,oCAAiC,CAAC;;EAIpC,IAAIC,gBAAsD,EAAE;AAC5D,aAAW,MAAM,cAAc,OAAO,KAAK,WAAW,EAAE;GACtD,MAAM,cAAc,OAAO,WAAW,GAAG;GACzC,MAAM,eAAe,WAAW;GAEhC,MAAM,YAAY,WAChB;GAGF,MAAM,eAAe,SAAS;GAS9B,MAAM,YANe,CAAC,iCACjB,mBACC,gBAEF;AAKJ,mBAAgB;IAAE,GAAG;KAAgB,eAAe;IAAW;AAG/D,eAAY,KAAK;IACf;IACA;IACA;IACA,SAAS;IACT,QAAQ,aAAa;IACrB,SAAS,aAAa;IACvB,CAAC;AAEF,WAAQ,KAAK,aAAa,QAAQ,GAAG,aAAa,GAAG,eAAe;;;;AAM1E,mBAAmB;;;;;;;;AASnB,MAAa,uBAAuB,OAClC,OACA,aAAqB,wBACrB,gBAAwB,mCACK;CAE7B,MAAM,iBAAiB,MAAM,kBAAkB,MAAM;CAGrD,MAAM,YAAY,YACf,QAAQ,UAAU,MAAM,UAAU,CAClC,KAAK,WAAW;EACf,GAAG;EACH,YAAY,iBAAiB,gBAAgB,MAAM,UAAW;EAC/D,EAAE,CACF,QAAQ,UAAU,MAAM,aAAa,cAAc,CACnD,MAAM,GAAG,MAAM,EAAE,aAAa,EAAE,WAAW,CAC3C,MAAM,GAAG,WAAW;CAEvB,MAAM,iBAAiB,IAAI,IAAI,UAAU,KAAK,UAAU,MAAM,QAAQ,CAAC;AAavE,QAX2B,YAAY,MAAM,GAAG,OAC9C,eAAe,IAAI,EAAE,QAAQ,GAAG,KAAK,EACtC,CAEkC,QAAQ,UACzC,UAAU,MACP,MAAM,EAAE,YAAY,MAAM,WAAW,EAAE,gBAAgB,MAAM,YAC/D,CACF;;AAMH,MAAM,kBAAkBZ,+BAAU,cAAc;AAGhD,MAAaa,aAA2C;CACtD,MAAM;CACN,SAAS;CACV;;;;;;;;AAkBD,MAAa,iBAAiB,OAC5B,UACA,UACA,YACkC;CAQlC,MAAM,0BAA0B,MAAM,qBANxB,SACX,QAAQ,YAAY,QAAQ,SAAS,OAAO,CAC5C,KAAK,YAAY,KAAK,QAAQ,UAAU,CACxC,KAAK,KAAK,CAGoD;CAyBjE,MAAM,aAAa,CACjB;EACE,MAAM;EACN,SAzBiB,WAAW,QAAQ,QACtC,+BACA,wBAAwB,WAAW,IAC/B,qDACA,wBACG,KAAK,KAAK,QACT;GACE;GACA;GACA,YAAY;GACZ,cAAc,IAAI,YAAY,GAAG,IAAI,QAAQ,OAAO;GACpD,aAAa,IAAI,QAAQ;GACzB,YAAY,IAAI,OAAO;GACvB;GACA,IAAI;GACJ;GACD,CAAC,KAAK,KAAK,CACb,CACA,KAAK,OAAO,CACpB;EAOE,EACD,GAAG,SAAS,MAAM,GAAG,CACtB;AAED,KAAI,CAAC,SACH,OAAM,IAAI,MAAM,wCAAwC;CAI1D,IAAI,eAAe;CACnB,MAAM,4BAAoB;EACxB,GAAG;EACH,UAAU;EACX,CAAC;AAGF,YAAW,MAAM,SAAS,OAAO,YAAY;AAC3C,kBAAgB;AAChB,WAAS,YAAY,MAAM;;CAI7B,MAAM,eAAe,CACnB,GAAG,IAAI,IAAI,wBAAwB,KAAK,QAAQ,IAAI,QAAQ,CAAC,CAC9D;AAGD,QAAO;EACL,UAAU,gBAAgB;EAC1B;EACD"}
@@ -1,5 +1,4 @@
1
1
  const require_rolldown_runtime = require('../../../_virtual/rolldown_runtime.cjs');
2
- const require_utils_AI_aiSdk = require('../aiSdk.cjs');
3
2
  let node_fs = require("node:fs");
4
3
  let node_path = require("node:path");
5
4
  let node_url = require("node:url");
@@ -34,13 +33,6 @@ const writeEmbeddingsForFile = (fileKey, data) => {
34
33
  * - embedding: The numerical embedding vector for the chunk
35
34
  */
36
35
  const vectorStore = [];
37
- const MODEL = "chatgpt-4o-latest";
38
- const MODEL_TEMPERATURE = .1;
39
- const aiDefaultOptions = {
40
- provider: require_utils_AI_aiSdk.AIProvider.OPENAI,
41
- model: MODEL,
42
- temperature: MODEL_TEMPERATURE
43
- };
44
36
  const EMBEDDING_MODEL = "text-embedding-3-large";
45
37
  const OVERLAP_TOKENS = 200;
46
38
  const MAX_CHUNK_TOKENS = 800;
@@ -154,6 +146,5 @@ const indexMarkdownFiles = async () => {
154
146
  };
155
147
 
156
148
  //#endregion
157
- exports.aiDefaultOptions = aiDefaultOptions;
158
149
  exports.indexMarkdownFiles = indexMarkdownFiles;
159
150
  //# sourceMappingURL=indexMarkdownFiles.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"indexMarkdownFiles.cjs","names":["__dirname","vectorStore: VectorStoreEl[]","MODEL: AIOptions['model']","MODEL_TEMPERATURE: AIOptions['temperature']","aiDefaultOptions: AIOptions","AIProvider","EMBEDDING_MODEL: OpenAI.EmbeddingModel","OVERLAP_TOKENS: number","MAX_CHUNK_TOKENS: number","CHAR_BY_TOKEN: number","MAX_CHARS: number","OVERLAP_CHARS: number","chunks: string[]","OpenAI","resultForFile: Record<string, number[]>"],"sources":["../../../../../src/utils/AI/askDocQuestion/indexMarkdownFiles.ts"],"sourcesContent":["import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs';\nimport { dirname, join } from 'node:path';\nimport { fileURLToPath } from 'node:url';\nimport { getMarkdownMetadata } from '@intlayer/core';\nimport { getBlogs, getDocs, getFrequentQuestions } from '@intlayer/docs';\nimport dotenv from 'dotenv';\nimport { OpenAI } from 'openai';\nimport { type AIOptions, AIProvider } from '../aiSdk';\n\nconst OUTPUT_EMBEDDINGS_DIR = 'src/utils/AI/askDocQuestion/embeddings';\nconst __dirname = dirname(fileURLToPath(import.meta.url));\n\nconst readEmbeddingsForFile = (fileKey: string): Record<string, number[]> => {\n try {\n return JSON.parse(\n readFileSync(\n `${__dirname}/embeddings/${fileKey.replace('.md', '.json')}`,\n 'utf-8'\n )\n ) as Record<string, number[]>;\n } catch {\n return {};\n }\n};\n\nconst writeEmbeddingsForFile = (\n fileKey: string,\n data: Record<string, number[]>\n): void => {\n const filePath = join(\n OUTPUT_EMBEDDINGS_DIR,\n `${fileKey.replace('.md', '.json')}`\n );\n const dir = dirname(filePath);\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n writeFileSync(filePath, JSON.stringify(data));\n};\n\ntype VectorStoreEl = {\n fileKey: string;\n chunkNumber: number;\n content: string;\n embedding: number[];\n docUrl: string;\n docName: string;\n};\n\n/**\n * Simple in-memory vector store to hold document embeddings and their content.\n * Each entry contains:\n * - fileKey: A unique key identifying the file\n * - chunkNumber: The number of the chunk within the document\n * - content: The chunk content\n * - embedding: The numerical embedding vector for the chunk\n */\nconst vectorStore: VectorStoreEl[] = [];\n\n/*\n * Ask question AI configuration\n */\nconst MODEL: AIOptions['model'] = 'chatgpt-4o-latest'; // Model to use for chat completions\nconst MODEL_TEMPERATURE: AIOptions['temperature'] = 0.1; // Temperature to use for chat completions\n\nexport const aiDefaultOptions: AIOptions = {\n provider: AIProvider.OPENAI,\n model: MODEL,\n temperature: MODEL_TEMPERATURE,\n};\n\n/*\n * Embedding model configuration\n */\nconst EMBEDDING_MODEL: OpenAI.EmbeddingModel = 'text-embedding-3-large'; // Model to use for embedding generation\nconst OVERLAP_TOKENS: number = 200; // Number of tokens to overlap between chunks\nconst MAX_CHUNK_TOKENS: number = 800; // Maximum number of tokens per chunk\nconst CHAR_BY_TOKEN: number = 4.15; // Approximate pessimistically the number of characters per token // Can use `tiktoken` or other tokenizers to calculate it more precisely\nconst MAX_CHARS: number = MAX_CHUNK_TOKENS * CHAR_BY_TOKEN;\nconst OVERLAP_CHARS: number = OVERLAP_TOKENS * CHAR_BY_TOKEN;\n\nconst skipDocEmbeddingsIndex = process.env.SKIP_DOC_EMBEDDINGS_INDEX === 'true';\n\n/**\n * Splits a given text into chunks ensuring each chunk does not exceed MAX_CHARS.\n * @param text - The input text to split.\n * @returns - Array of text chunks.\n */\nconst chunkText = (text: string): string[] => {\n const chunks: string[] = [];\n let start = 0;\n\n while (start < text.length) {\n let end = Math.min(start + MAX_CHARS, text.length);\n\n // Ensure we don't cut words in the middle (find nearest space)\n if (end < text.length) {\n const lastSpace = text.lastIndexOf(' ', end);\n if (lastSpace > start) {\n end = lastSpace;\n }\n }\n\n chunks.push(text.substring(start, end));\n\n // Move start forward correctly\n const nextStart = end - OVERLAP_CHARS;\n if (nextStart <= start) {\n // Prevent infinite loop if overlap is too large\n start = end;\n } else {\n start = nextStart;\n }\n }\n\n return chunks;\n};\n\n/**\n * Generates an embedding for a given text using OpenAI's embedding API.\n * Trims the text if it exceeds the maximum allowed characters.\n *\n * @param text - The input text to generate an embedding for\n * @returns The embedding vector as a number array\n */\nconst generateEmbedding = async (text: string): Promise<number[]> => {\n try {\n const openaiClient = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n\n const response = await openaiClient.embeddings.create({\n model: EMBEDDING_MODEL,\n input: text,\n });\n\n return response.data[0].embedding;\n } catch (error) {\n console.error('Error generating embedding:', error);\n return [];\n }\n};\n\n/**\n * Indexes all Markdown documents by generating embeddings for each chunk and storing them in memory.\n * Persists per-document embeddings under `embeddings/<fileKey>.json`.\n * Handles cases where files have been updated and chunk counts have changed.\n */\nexport const indexMarkdownFiles = async (): Promise<void> => {\n const env = process.env.NODE_ENV;\n dotenv.config({\n path: [`.env.${env}.local`, `.env.${env}`, '.env.local', '.env'],\n });\n\n // Retrieve documentation and blog posts in English locale\n const frequentQuestions = await getFrequentQuestions();\n const docs = await getDocs();\n const blogs = await getBlogs();\n\n const files = { ...docs, ...blogs, ...frequentQuestions }; // Combine docs and blogs into a single object\n\n // Iterate over each file key (identifier) in the combined files\n for await (const fileKey of Object.keys(files)) {\n // Get the metadata of the file\n const fileMetadata = getMarkdownMetadata(\n files[fileKey as keyof typeof files] as string\n );\n\n // Split the document into chunks based on headings\n const fileChunks = chunkText(\n files[fileKey as keyof typeof files] as string\n );\n\n // Read existing embeddings for this file\n const existingEmbeddings = readEmbeddingsForFile(fileKey);\n\n // Check if the number of chunks has changed for this file\n const existingChunksForFile = Object.keys(existingEmbeddings);\n const currentChunkCount = fileChunks.length;\n const previousChunkCount = existingChunksForFile.length;\n\n let shouldRegenerateFileEmbeddings = false;\n\n // If chunk count differs, we need to regenerate embeddings for this file\n if (currentChunkCount !== previousChunkCount) {\n console.info(\n `File \"${fileKey}\" chunk count changed: ${previousChunkCount} -> ${currentChunkCount}. Regenerating embeddings.`\n );\n\n shouldRegenerateFileEmbeddings = !skipDocEmbeddingsIndex;\n }\n\n // Iterate over each chunk within the current file\n let resultForFile: Record<string, number[]> = {};\n for await (const chunkIndex of Object.keys(fileChunks)) {\n const chunkNumber = Number(chunkIndex) + 1; // Chunk number starts at 1\n const chunksNumber = fileChunks.length;\n\n const fileChunk = fileChunks[\n chunkIndex as keyof typeof fileChunks\n ] as string;\n\n const chunkKeyName = `chunk_${chunkNumber}`; // Unique key for the chunk within the file\n\n // Retrieve precomputed embedding if available and file hasn't changed\n const docEmbedding = !shouldRegenerateFileEmbeddings\n ? (existingEmbeddings[\n chunkKeyName as keyof typeof existingEmbeddings\n ] as number[] | undefined)\n : undefined;\n\n let embedding = docEmbedding; // Use existing embedding if available and valid\n\n if (!embedding) {\n embedding = await generateEmbedding(fileChunk); // Generate embedding if not present or file changed\n console.info(`- Generated new embedding: ${fileKey}/${chunkKeyName}`);\n }\n\n // Update the file-scoped result object with the embedding\n resultForFile = { ...resultForFile, [chunkKeyName]: embedding };\n\n // Store the embedding and content in the in-memory vector store\n vectorStore.push({\n fileKey,\n chunkNumber,\n embedding,\n content: fileChunk,\n docUrl: fileMetadata.url,\n docName: fileMetadata.title,\n });\n\n console.info(`- Indexed: ${fileKey}/${chunkKeyName}/${chunksNumber}`);\n }\n\n // Persist per-file embeddings if changed\n try {\n if (\n JSON.stringify(resultForFile) !== JSON.stringify(existingEmbeddings)\n ) {\n writeEmbeddingsForFile(fileKey, resultForFile);\n }\n } catch (error) {\n console.error(error);\n }\n }\n};\n"],"mappings":";;;;;;;;;;;;AASA,MAAM,wBAAwB;AAC9B,MAAMA,+GAAkD,CAAC;AAEzD,MAAM,yBAAyB,YAA8C;AAC3E,KAAI;AACF,SAAO,KAAK,gCAER,GAAGA,YAAU,cAAc,QAAQ,QAAQ,OAAO,QAAQ,IAC1D,QACD,CACF;SACK;AACN,SAAO,EAAE;;;AAIb,MAAM,0BACJ,SACA,SACS;CACT,MAAM,+BACJ,uBACA,GAAG,QAAQ,QAAQ,OAAO,QAAQ,GACnC;CACD,MAAM,6BAAc,SAAS;AAC7B,KAAI,yBAAY,IAAI,CAClB,wBAAU,KAAK,EAAE,WAAW,MAAM,CAAC;AAErC,4BAAc,UAAU,KAAK,UAAU,KAAK,CAAC;;;;;;;;;;AAoB/C,MAAMC,cAA+B,EAAE;AAKvC,MAAMC,QAA4B;AAClC,MAAMC,oBAA8C;AAEpD,MAAaC,mBAA8B;CACzC,UAAUC,kCAAW;CACrB,OAAO;CACP,aAAa;CACd;AAKD,MAAMC,kBAAyC;AAC/C,MAAMC,iBAAyB;AAC/B,MAAMC,mBAA2B;AACjC,MAAMC,gBAAwB;AAC9B,MAAMC,YAAoB,mBAAmB;AAC7C,MAAMC,gBAAwB,iBAAiB;AAE/C,MAAM,yBAAyB,QAAQ,IAAI,8BAA8B;;;;;;AAOzE,MAAM,aAAa,SAA2B;CAC5C,MAAMC,SAAmB,EAAE;CAC3B,IAAI,QAAQ;AAEZ,QAAO,QAAQ,KAAK,QAAQ;EAC1B,IAAI,MAAM,KAAK,IAAI,QAAQ,WAAW,KAAK,OAAO;AAGlD,MAAI,MAAM,KAAK,QAAQ;GACrB,MAAM,YAAY,KAAK,YAAY,KAAK,IAAI;AAC5C,OAAI,YAAY,MACd,OAAM;;AAIV,SAAO,KAAK,KAAK,UAAU,OAAO,IAAI,CAAC;EAGvC,MAAM,YAAY,MAAM;AACxB,MAAI,aAAa,MAEf,SAAQ;MAER,SAAQ;;AAIZ,QAAO;;;;;;;;;AAUT,MAAM,oBAAoB,OAAO,SAAoC;AACnE,KAAI;AAQF,UALiB,MAFI,IAAIC,cAAO,EAAE,QAAQ,QAAQ,IAAI,gBAAgB,CAAC,CAEnC,WAAW,OAAO;GACpD,OAAO;GACP,OAAO;GACR,CAAC,EAEc,KAAK,GAAG;UACjB,OAAO;AACd,UAAQ,MAAM,+BAA+B,MAAM;AACnD,SAAO,EAAE;;;;;;;;AASb,MAAa,qBAAqB,YAA2B;CAC3D,MAAM,MAAM,QAAQ,IAAI;AACxB,gBAAO,OAAO,EACZ,MAAM;EAAC,QAAQ,IAAI;EAAS,QAAQ;EAAO;EAAc;EAAO,EACjE,CAAC;CAGF,MAAM,oBAAoB,iDAA4B;CACtD,MAAM,OAAO,oCAAe;CAC5B,MAAM,QAAQ,qCAAgB;CAE9B,MAAM,QAAQ;EAAE,GAAG;EAAM,GAAG;EAAO,GAAG;EAAmB;AAGzD,YAAW,MAAM,WAAW,OAAO,KAAK,MAAM,EAAE;EAE9C,MAAM,wDACJ,MAAM,SACP;EAGD,MAAM,aAAa,UACjB,MAAM,SACP;EAGD,MAAM,qBAAqB,sBAAsB,QAAQ;EAGzD,MAAM,wBAAwB,OAAO,KAAK,mBAAmB;EAC7D,MAAM,oBAAoB,WAAW;EACrC,MAAM,qBAAqB,sBAAsB;EAEjD,IAAI,iCAAiC;AAGrC,MAAI,sBAAsB,oBAAoB;AAC5C,WAAQ,KACN,SAAS,QAAQ,yBAAyB,mBAAmB,MAAM,kBAAkB,4BACtF;AAED,oCAAiC,CAAC;;EAIpC,IAAIC,gBAA0C,EAAE;AAChD,aAAW,MAAM,cAAc,OAAO,KAAK,WAAW,EAAE;GACtD,MAAM,cAAc,OAAO,WAAW,GAAG;GACzC,MAAM,eAAe,WAAW;GAEhC,MAAM,YAAY,WAChB;GAGF,MAAM,eAAe,SAAS;GAS9B,IAAI,YANiB,CAAC,iCACjB,mBACC,gBAEF;AAIJ,OAAI,CAAC,WAAW;AACd,gBAAY,MAAM,kBAAkB,UAAU;AAC9C,YAAQ,KAAK,8BAA8B,QAAQ,GAAG,eAAe;;AAIvE,mBAAgB;IAAE,GAAG;KAAgB,eAAe;IAAW;AAG/D,eAAY,KAAK;IACf;IACA;IACA;IACA,SAAS;IACT,QAAQ,aAAa;IACrB,SAAS,aAAa;IACvB,CAAC;AAEF,WAAQ,KAAK,cAAc,QAAQ,GAAG,aAAa,GAAG,eAAe;;AAIvE,MAAI;AACF,OACE,KAAK,UAAU,cAAc,KAAK,KAAK,UAAU,mBAAmB,CAEpE,wBAAuB,SAAS,cAAc;WAEzC,OAAO;AACd,WAAQ,MAAM,MAAM"}
1
+ {"version":3,"file":"indexMarkdownFiles.cjs","names":["__dirname","vectorStore: VectorStoreEl[]","EMBEDDING_MODEL: OpenAI.EmbeddingModel","OVERLAP_TOKENS: number","MAX_CHUNK_TOKENS: number","CHAR_BY_TOKEN: number","MAX_CHARS: number","OVERLAP_CHARS: number","chunks: string[]","OpenAI","resultForFile: Record<string, number[]>"],"sources":["../../../../../src/utils/AI/askDocQuestion/indexMarkdownFiles.ts"],"sourcesContent":["import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs';\nimport { dirname, join } from 'node:path';\nimport { fileURLToPath } from 'node:url';\nimport { getMarkdownMetadata } from '@intlayer/core';\nimport { getBlogs, getDocs, getFrequentQuestions } from '@intlayer/docs';\nimport dotenv from 'dotenv';\nimport { OpenAI } from 'openai';\nimport { type AIOptions, AIProvider } from '../aiSdk';\n\nconst OUTPUT_EMBEDDINGS_DIR = 'src/utils/AI/askDocQuestion/embeddings';\nconst __dirname = dirname(fileURLToPath(import.meta.url));\n\nconst readEmbeddingsForFile = (fileKey: string): Record<string, number[]> => {\n try {\n return JSON.parse(\n readFileSync(\n `${__dirname}/embeddings/${fileKey.replace('.md', '.json')}`,\n 'utf-8'\n )\n ) as Record<string, number[]>;\n } catch {\n return {};\n }\n};\n\nconst writeEmbeddingsForFile = (\n fileKey: string,\n data: Record<string, number[]>\n): void => {\n const filePath = join(\n OUTPUT_EMBEDDINGS_DIR,\n `${fileKey.replace('.md', '.json')}`\n );\n const dir = dirname(filePath);\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n writeFileSync(filePath, JSON.stringify(data));\n};\n\ntype VectorStoreEl = {\n fileKey: string;\n chunkNumber: number;\n content: string;\n embedding: number[];\n docUrl: string;\n docName: string;\n};\n\n/**\n * Simple in-memory vector store to hold document embeddings and their content.\n * Each entry contains:\n * - fileKey: A unique key identifying the file\n * - chunkNumber: The number of the chunk within the document\n * - content: The chunk content\n * - embedding: The numerical embedding vector for the chunk\n */\nconst vectorStore: VectorStoreEl[] = [];\n\n/*\n * Embedding model configuration\n */\nconst EMBEDDING_MODEL: OpenAI.EmbeddingModel = 'text-embedding-3-large'; // Model to use for embedding generation\nconst OVERLAP_TOKENS: number = 200; // Number of tokens to overlap between chunks\nconst MAX_CHUNK_TOKENS: number = 800; // Maximum number of tokens per chunk\nconst CHAR_BY_TOKEN: number = 4.15; // Approximate pessimistically the number of characters per token // Can use `tiktoken` or other tokenizers to calculate it more precisely\nconst MAX_CHARS: number = MAX_CHUNK_TOKENS * CHAR_BY_TOKEN;\nconst OVERLAP_CHARS: number = OVERLAP_TOKENS * CHAR_BY_TOKEN;\n\nconst skipDocEmbeddingsIndex = process.env.SKIP_DOC_EMBEDDINGS_INDEX === 'true';\n\n/**\n * Splits a given text into chunks ensuring each chunk does not exceed MAX_CHARS.\n * @param text - The input text to split.\n * @returns - Array of text chunks.\n */\nconst chunkText = (text: string): string[] => {\n const chunks: string[] = [];\n let start = 0;\n\n while (start < text.length) {\n let end = Math.min(start + MAX_CHARS, text.length);\n\n // Ensure we don't cut words in the middle (find nearest space)\n if (end < text.length) {\n const lastSpace = text.lastIndexOf(' ', end);\n if (lastSpace > start) {\n end = lastSpace;\n }\n }\n\n chunks.push(text.substring(start, end));\n\n // Move start forward correctly\n const nextStart = end - OVERLAP_CHARS;\n if (nextStart <= start) {\n // Prevent infinite loop if overlap is too large\n start = end;\n } else {\n start = nextStart;\n }\n }\n\n return chunks;\n};\n\n/**\n * Generates an embedding for a given text using OpenAI's embedding API.\n * Trims the text if it exceeds the maximum allowed characters.\n *\n * @param text - The input text to generate an embedding for\n * @returns The embedding vector as a number array\n */\nconst generateEmbedding = async (text: string): Promise<number[]> => {\n try {\n const openaiClient = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n\n const response = await openaiClient.embeddings.create({\n model: EMBEDDING_MODEL,\n input: text,\n });\n\n return response.data[0].embedding;\n } catch (error) {\n console.error('Error generating embedding:', error);\n return [];\n }\n};\n\n/**\n * Indexes all Markdown documents by generating embeddings for each chunk and storing them in memory.\n * Persists per-document embeddings under `embeddings/<fileKey>.json`.\n * Handles cases where files have been updated and chunk counts have changed.\n */\nexport const indexMarkdownFiles = async (): Promise<void> => {\n const env = process.env.NODE_ENV;\n dotenv.config({\n path: [`.env.${env}.local`, `.env.${env}`, '.env.local', '.env'],\n });\n\n // Retrieve documentation and blog posts in English locale\n const frequentQuestions = await getFrequentQuestions();\n const docs = await getDocs();\n const blogs = await getBlogs();\n\n const files = { ...docs, ...blogs, ...frequentQuestions }; // Combine docs and blogs into a single object\n\n // Iterate over each file key (identifier) in the combined files\n for await (const fileKey of Object.keys(files)) {\n // Get the metadata of the file\n const fileMetadata = getMarkdownMetadata(\n files[fileKey as keyof typeof files] as string\n );\n\n // Split the document into chunks based on headings\n const fileChunks = chunkText(\n files[fileKey as keyof typeof files] as string\n );\n\n // Read existing embeddings for this file\n const existingEmbeddings = readEmbeddingsForFile(fileKey);\n\n // Check if the number of chunks has changed for this file\n const existingChunksForFile = Object.keys(existingEmbeddings);\n const currentChunkCount = fileChunks.length;\n const previousChunkCount = existingChunksForFile.length;\n\n let shouldRegenerateFileEmbeddings = false;\n\n // If chunk count differs, we need to regenerate embeddings for this file\n if (currentChunkCount !== previousChunkCount) {\n console.info(\n `File \"${fileKey}\" chunk count changed: ${previousChunkCount} -> ${currentChunkCount}. Regenerating embeddings.`\n );\n\n shouldRegenerateFileEmbeddings = !skipDocEmbeddingsIndex;\n }\n\n // Iterate over each chunk within the current file\n let resultForFile: Record<string, number[]> = {};\n for await (const chunkIndex of Object.keys(fileChunks)) {\n const chunkNumber = Number(chunkIndex) + 1; // Chunk number starts at 1\n const chunksNumber = fileChunks.length;\n\n const fileChunk = fileChunks[\n chunkIndex as keyof typeof fileChunks\n ] as string;\n\n const chunkKeyName = `chunk_${chunkNumber}`; // Unique key for the chunk within the file\n\n // Retrieve precomputed embedding if available and file hasn't changed\n const docEmbedding = !shouldRegenerateFileEmbeddings\n ? (existingEmbeddings[\n chunkKeyName as keyof typeof existingEmbeddings\n ] as number[] | undefined)\n : undefined;\n\n let embedding = docEmbedding; // Use existing embedding if available and valid\n\n if (!embedding) {\n embedding = await generateEmbedding(fileChunk); // Generate embedding if not present or file changed\n console.info(`- Generated new embedding: ${fileKey}/${chunkKeyName}`);\n }\n\n // Update the file-scoped result object with the embedding\n resultForFile = { ...resultForFile, [chunkKeyName]: embedding };\n\n // Store the embedding and content in the in-memory vector store\n vectorStore.push({\n fileKey,\n chunkNumber,\n embedding,\n content: fileChunk,\n docUrl: fileMetadata.url,\n docName: fileMetadata.title,\n });\n\n console.info(`- Indexed: ${fileKey}/${chunkKeyName}/${chunksNumber}`);\n }\n\n // Persist per-file embeddings if changed\n try {\n if (\n JSON.stringify(resultForFile) !== JSON.stringify(existingEmbeddings)\n ) {\n writeEmbeddingsForFile(fileKey, resultForFile);\n }\n } catch (error) {\n console.error(error);\n }\n }\n};\n"],"mappings":";;;;;;;;;;;AASA,MAAM,wBAAwB;AAC9B,MAAMA,+GAAkD,CAAC;AAEzD,MAAM,yBAAyB,YAA8C;AAC3E,KAAI;AACF,SAAO,KAAK,gCAER,GAAGA,YAAU,cAAc,QAAQ,QAAQ,OAAO,QAAQ,IAC1D,QACD,CACF;SACK;AACN,SAAO,EAAE;;;AAIb,MAAM,0BACJ,SACA,SACS;CACT,MAAM,+BACJ,uBACA,GAAG,QAAQ,QAAQ,OAAO,QAAQ,GACnC;CACD,MAAM,6BAAc,SAAS;AAC7B,KAAI,yBAAY,IAAI,CAClB,wBAAU,KAAK,EAAE,WAAW,MAAM,CAAC;AAErC,4BAAc,UAAU,KAAK,UAAU,KAAK,CAAC;;;;;;;;;;AAoB/C,MAAMC,cAA+B,EAAE;AAKvC,MAAMC,kBAAyC;AAC/C,MAAMC,iBAAyB;AAC/B,MAAMC,mBAA2B;AACjC,MAAMC,gBAAwB;AAC9B,MAAMC,YAAoB,mBAAmB;AAC7C,MAAMC,gBAAwB,iBAAiB;AAE/C,MAAM,yBAAyB,QAAQ,IAAI,8BAA8B;;;;;;AAOzE,MAAM,aAAa,SAA2B;CAC5C,MAAMC,SAAmB,EAAE;CAC3B,IAAI,QAAQ;AAEZ,QAAO,QAAQ,KAAK,QAAQ;EAC1B,IAAI,MAAM,KAAK,IAAI,QAAQ,WAAW,KAAK,OAAO;AAGlD,MAAI,MAAM,KAAK,QAAQ;GACrB,MAAM,YAAY,KAAK,YAAY,KAAK,IAAI;AAC5C,OAAI,YAAY,MACd,OAAM;;AAIV,SAAO,KAAK,KAAK,UAAU,OAAO,IAAI,CAAC;EAGvC,MAAM,YAAY,MAAM;AACxB,MAAI,aAAa,MAEf,SAAQ;MAER,SAAQ;;AAIZ,QAAO;;;;;;;;;AAUT,MAAM,oBAAoB,OAAO,SAAoC;AACnE,KAAI;AAQF,UALiB,MAFI,IAAIC,cAAO,EAAE,QAAQ,QAAQ,IAAI,gBAAgB,CAAC,CAEnC,WAAW,OAAO;GACpD,OAAO;GACP,OAAO;GACR,CAAC,EAEc,KAAK,GAAG;UACjB,OAAO;AACd,UAAQ,MAAM,+BAA+B,MAAM;AACnD,SAAO,EAAE;;;;;;;;AASb,MAAa,qBAAqB,YAA2B;CAC3D,MAAM,MAAM,QAAQ,IAAI;AACxB,gBAAO,OAAO,EACZ,MAAM;EAAC,QAAQ,IAAI;EAAS,QAAQ;EAAO;EAAc;EAAO,EACjE,CAAC;CAGF,MAAM,oBAAoB,iDAA4B;CACtD,MAAM,OAAO,oCAAe;CAC5B,MAAM,QAAQ,qCAAgB;CAE9B,MAAM,QAAQ;EAAE,GAAG;EAAM,GAAG;EAAO,GAAG;EAAmB;AAGzD,YAAW,MAAM,WAAW,OAAO,KAAK,MAAM,EAAE;EAE9C,MAAM,wDACJ,MAAM,SACP;EAGD,MAAM,aAAa,UACjB,MAAM,SACP;EAGD,MAAM,qBAAqB,sBAAsB,QAAQ;EAGzD,MAAM,wBAAwB,OAAO,KAAK,mBAAmB;EAC7D,MAAM,oBAAoB,WAAW;EACrC,MAAM,qBAAqB,sBAAsB;EAEjD,IAAI,iCAAiC;AAGrC,MAAI,sBAAsB,oBAAoB;AAC5C,WAAQ,KACN,SAAS,QAAQ,yBAAyB,mBAAmB,MAAM,kBAAkB,4BACtF;AAED,oCAAiC,CAAC;;EAIpC,IAAIC,gBAA0C,EAAE;AAChD,aAAW,MAAM,cAAc,OAAO,KAAK,WAAW,EAAE;GACtD,MAAM,cAAc,OAAO,WAAW,GAAG;GACzC,MAAM,eAAe,WAAW;GAEhC,MAAM,YAAY,WAChB;GAGF,MAAM,eAAe,SAAS;GAS9B,IAAI,YANiB,CAAC,iCACjB,mBACC,gBAEF;AAIJ,OAAI,CAAC,WAAW;AACd,gBAAY,MAAM,kBAAkB,UAAU;AAC9C,YAAQ,KAAK,8BAA8B,QAAQ,GAAG,eAAe;;AAIvE,mBAAgB;IAAE,GAAG;KAAgB,eAAe;IAAW;AAG/D,eAAY,KAAK;IACf;IACA;IACA;IACA,SAAS;IACT,QAAQ,aAAa;IACrB,SAAS,aAAa;IACvB,CAAC;AAEF,WAAQ,KAAK,cAAc,QAAQ,GAAG,aAAa,GAAG,eAAe;;AAIvE,MAAI;AACF,OACE,KAAK,UAAU,cAAc,KAAK,KAAK,UAAU,mBAAmB,CAEpE,wBAAuB,SAAS,cAAc;WAEzC,OAAO;AACd,WAAQ,MAAM,MAAM"}