@intlayer/backend 5.3.12 → 5.4.0-canary.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/dist/cjs/controllers/ai.controller.cjs +51 -12
  2. package/dist/cjs/controllers/ai.controller.cjs.map +1 -1
  3. package/dist/cjs/models/discussion.model.cjs +34 -0
  4. package/dist/cjs/models/discussion.model.cjs.map +1 -0
  5. package/dist/cjs/schemas/discussion.schema.cjs +81 -0
  6. package/dist/cjs/schemas/discussion.schema.cjs.map +1 -0
  7. package/dist/cjs/types/discussion.types.cjs +17 -0
  8. package/dist/cjs/types/discussion.types.cjs.map +1 -0
  9. package/dist/cjs/utils/AI/askDocQuestion/askDocQuestion.cjs +13 -5
  10. package/dist/cjs/utils/AI/askDocQuestion/askDocQuestion.cjs.map +1 -1
  11. package/dist/cjs/utils/AI/askDocQuestion/embeddings.json +19664 -1224
  12. package/dist/esm/controllers/ai.controller.mjs +51 -12
  13. package/dist/esm/controllers/ai.controller.mjs.map +1 -1
  14. package/dist/esm/models/discussion.model.mjs +10 -0
  15. package/dist/esm/models/discussion.model.mjs.map +1 -0
  16. package/dist/esm/schemas/discussion.schema.mjs +57 -0
  17. package/dist/esm/schemas/discussion.schema.mjs.map +1 -0
  18. package/dist/esm/types/discussion.types.mjs +1 -0
  19. package/dist/esm/types/discussion.types.mjs.map +1 -0
  20. package/dist/esm/utils/AI/askDocQuestion/askDocQuestion.mjs +13 -5
  21. package/dist/esm/utils/AI/askDocQuestion/askDocQuestion.mjs.map +1 -1
  22. package/dist/esm/utils/AI/askDocQuestion/embeddings.json +19664 -1224
  23. package/dist/types/controllers/ai.controller.d.ts +2 -1
  24. package/dist/types/controllers/ai.controller.d.ts.map +1 -1
  25. package/dist/types/models/discussion.model.d.ts +3 -0
  26. package/dist/types/models/discussion.model.d.ts.map +1 -0
  27. package/dist/types/schemas/discussion.schema.d.ts +12 -0
  28. package/dist/types/schemas/discussion.schema.d.ts.map +1 -0
  29. package/dist/types/types/discussion.types.d.ts +20 -0
  30. package/dist/types/types/discussion.types.d.ts.map +1 -0
  31. package/dist/types/utils/AI/askDocQuestion/askDocQuestion.d.ts +4 -1
  32. package/dist/types/utils/AI/askDocQuestion/askDocQuestion.d.ts.map +1 -1
  33. package/package.json +8 -8
@@ -36,9 +36,10 @@ __export(ai_controller_exports, {
36
36
  autocomplete: () => autocomplete
37
37
  });
38
38
  module.exports = __toCommonJS(ai_controller_exports);
39
+ var import_discussion = require('./../models/discussion.model.cjs');
39
40
  var import_dictionary = require('./../services/dictionary.service.cjs');
40
- var import_tag = require('./../services/tag.service.cjs');
41
41
  var tagService = __toESM(require('./../services/tag.service.cjs'), 1);
42
+ var import_tag = require('./../services/tag.service.cjs');
42
43
  var askDocQuestionUtil = __toESM(require('./../utils/AI/askDocQuestion/askDocQuestion.cjs'), 1);
43
44
  var auditContentDeclarationUtil = __toESM(require('./../utils/AI/auditDictionary/index.cjs'), 1);
44
45
  var auditContentDeclarationFieldUtil = __toESM(require('./../utils/AI/auditDictionaryField/index.cjs'), 1);
@@ -219,17 +220,55 @@ const auditTag = async (req, res, _next) => {
219
220
  }
220
221
  };
221
222
  const askDocQuestion = async (req, res) => {
222
- const { messages } = req.body;
223
- try {
224
- const response = await askDocQuestionUtil.askDocQuestion(messages);
225
- const responseData = (0, import_responseData.formatResponse)({
226
- data: response
227
- });
228
- res.json(responseData);
229
- } catch (error) {
230
- import_errors.ErrorHandler.handleAppErrorResponse(res, error);
231
- return;
232
- }
223
+ const { messages, discutionId } = req.body;
224
+ const { user, project, organization } = res.locals;
225
+ res.setHeader("Content-Type", "text/event-stream; charset=utf-8");
226
+ res.setHeader("Cache-Control", "no-cache, no-transform");
227
+ res.setHeader("Connection", "keep-alive");
228
+ res.setHeader("X-Accel-Buffering", "no");
229
+ res.flushHeaders?.();
230
+ res.write(": connected\n\n");
231
+ res.flush?.();
232
+ askDocQuestionUtil.askDocQuestion(messages, {
233
+ onMessage: (chunk) => {
234
+ res.write(`data: ${JSON.stringify({ chunk })}
235
+
236
+ `);
237
+ res.flush?.();
238
+ }
239
+ }).then(async (fullResponse) => {
240
+ await import_discussion.DiscussionModel.findOneAndUpdate(
241
+ { discutionId },
242
+ {
243
+ $set: {
244
+ discutionId,
245
+ userId: user?._id,
246
+ projectId: project?._id,
247
+ organizationId: organization?._id,
248
+ messages: messages.map((msg) => ({
249
+ role: msg.role,
250
+ content: msg.content,
251
+ timestamp: /* @__PURE__ */ new Date()
252
+ }))
253
+ }
254
+ },
255
+ { upsert: true, new: true }
256
+ );
257
+ res.write(
258
+ `data: ${JSON.stringify({ done: true, response: fullResponse })}
259
+
260
+ `
261
+ );
262
+ res.end();
263
+ }).catch((err) => {
264
+ res.write(
265
+ `event: error
266
+ data: ${JSON.stringify({ message: err.message })}
267
+
268
+ `
269
+ );
270
+ res.end();
271
+ });
233
272
  };
234
273
  const autocomplete = async (req, res) => {
235
274
  try {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/controllers/ai.controller.ts"],"sourcesContent":["import { type KeyPath } from '@intlayer/core';\nimport type { ResponseWithInformation } from '@middlewares/sessionAuth.middleware';\nimport { getDictionariesByTags } from '@services/dictionary.service';\nimport { getTagsByKeys } from '@services/tag.service';\nimport * as tagService from '@services/tag.service';\nimport * as askDocQuestionUtil from '@utils/AI/askDocQuestion/askDocQuestion';\nimport * as auditContentDeclarationUtil from '@utils/AI/auditDictionary';\nimport * as auditContentDeclarationFieldUtil from '@utils/AI/auditDictionaryField';\nimport * as auditContentDeclarationMetadataUtil from '@utils/AI/auditDictionaryMetadata';\nimport * as autocompleteUtil from '@utils/AI/autocomplete';\nimport * as auditTagUtil from '@utils/auditTag';\nimport { type AppError, ErrorHandler } from '@utils/errors';\nimport { formatResponse, type ResponseData } from '@utils/responseData';\nimport type { NextFunction, Request } from 'express';\nimport type { Locales } from 'intlayer';\nimport type { Dictionary } from '@/types/dictionary.types';\nimport type { Tag } from '@/types/tag.types';\n\nexport type AuditContentDeclarationBody = {\n openAiApiKey?: string;\n customPrompt?: string;\n locales: Locales[];\n defaultLocale: Locales;\n fileContent: string;\n filePath?: string;\n model?: string;\n temperature?: number;\n tagsKeys?: string[];\n};\nexport type AuditContentDeclarationResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclaration = async (\n req: Request<AuditContentDeclarationBody>,\n res: ResponseWithInformation<AuditContentDeclarationResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, project, organization } = res.locals;\n const {\n fileContent,\n filePath,\n openAiApiKey,\n customPrompt,\n model,\n temperature,\n locales,\n defaultLocale,\n tagsKeys,\n } = req.body;\n\n if (!openAiApiKey) {\n if (!user || !project || !organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse = await auditContentDeclarationUtil.auditDictionary({\n fileContent,\n filePath,\n model,\n temperature,\n openAiApiKey,\n customPrompt,\n locales,\n defaultLocale,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationFieldBody = {\n openAiApiKey?: string;\n customPrompt?: string;\n locales: Locales[];\n fileContent: string;\n filePath?: string;\n model?: string;\n temperature?: number;\n tagsKeys?: string[];\n keyPath: KeyPath[];\n};\nexport type AuditContentDeclarationFieldResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclarationField = async (\n req: Request<AuditContentDeclarationFieldBody>,\n res: ResponseWithInformation<AuditContentDeclarationFieldResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, project, organization } = res.locals;\n const {\n fileContent,\n openAiApiKey,\n customPrompt,\n model,\n temperature,\n locales,\n tagsKeys,\n keyPath,\n } = req.body;\n\n if (!openAiApiKey) {\n if (!user || !project || !organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse =\n await auditContentDeclarationFieldUtil.auditDictionaryField({\n fileContent,\n model,\n temperature,\n openAiApiKey,\n customPrompt,\n locales,\n tags,\n keyPath,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationMetadataBody = {\n openAiApiKey?: string;\n customPrompt?: string;\n fileContent: string;\n model?: string;\n temperature?: number;\n};\nexport type AuditContentDeclarationMetadataResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclarationMetadata = async (\n req: Request<AuditContentDeclarationMetadataBody>,\n res: ResponseWithInformation<AuditContentDeclarationMetadataResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, organization, project } = res.locals;\n const { fileContent, openAiApiKey, customPrompt, model, temperature } =\n req.body;\n\n if (!openAiApiKey) {\n if (!user || !project || !organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n }\n\n try {\n const tags: Tag[] = await tagService.findTags(\n {\n organizationId: organization?._id,\n },\n 0,\n 1000\n );\n\n const auditResponse =\n await auditContentDeclarationMetadataUtil.auditDictionaryMetadata({\n fileContent,\n model,\n openAiApiKey,\n temperature,\n customPrompt,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditTagBody = {\n openAiApiKey?: string;\n customPrompt?: string;\n filePath?: string;\n model?: string;\n temperature?: number;\n tag: Tag;\n};\nexport type AuditTagResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditTag = async (\n req: Request<undefined, undefined, AuditTagBody>,\n res: ResponseWithInformation<AuditTagResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, project, organization } = res.locals;\n const { openAiApiKey, customPrompt, model, temperature, tag } = req.body;\n\n if (!openAiApiKey) {\n if (!user || !project || !organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n }\n\n try {\n let dictionaries: Dictionary[] = [];\n if (project?.organizationId) {\n dictionaries = await getDictionariesByTags([tag.key], project._id);\n }\n\n const auditResponse = await auditTagUtil.auditTag({\n model,\n openAiApiKey,\n temperature,\n customPrompt,\n dictionaries,\n tag,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AskDocQuestionBody = {\n messages: askDocQuestionUtil.ChatCompletionRequestMessage[];\n};\nexport type AskDocQuestionResult =\n ResponseData<askDocQuestionUtil.AskDocQuestionResult>;\n\nexport const askDocQuestion = async (\n req: Request<undefined, undefined, AskDocQuestionBody>,\n res: ResponseWithInformation<AskDocQuestionResult>\n) => {\n const { messages } = req.body;\n try {\n const response = await askDocQuestionUtil.askDocQuestion(messages);\n\n const responseData =\n formatResponse<askDocQuestionUtil.AskDocQuestionResult>({\n data: response,\n });\n\n res.json(responseData);\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AutocompleteBody = {\n text: string;\n model?: string;\n temperature?: number;\n openAiApiKey?: string;\n customPrompt?: string;\n};\n\nexport type AutocompleteResponse = ResponseData<{\n autocompletion: string;\n}>;\n\nexport const autocomplete = async (\n req: Request<undefined, undefined, AutocompleteBody>,\n res: ResponseWithInformation<AutocompleteResponse>\n) => {\n try {\n const { text, model, openAiApiKey, customPrompt, temperature } = req.body;\n const { user, project, organization } = res.locals;\n\n if (!openAiApiKey) {\n if (!user || !project || !organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n }\n\n const response = (await autocompleteUtil.autocomplete({\n text,\n model,\n openAiApiKey,\n temperature,\n customPrompt,\n })) ?? {\n autocompletion: '',\n tokenUsed: 0,\n };\n\n const responseData =\n formatResponse<autocompleteUtil.AutocompleteFileResultData>({\n data: response,\n });\n\n res.json(responseData);\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,wBAAsC;AACtC,iBAA8B;AAC9B,iBAA4B;AAC5B,yBAAoC;AACpC,kCAA6C;AAC7C,uCAAkD;AAClD,0CAAqD;AACrD,uBAAkC;AAClC,mBAA8B;AAC9B,oBAA4C;AAC5C,0BAAkD;AAuB3C,MAAM,0BAA0B,OACrC,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,SAAS,aAAa,IAAI,IAAI;AAC5C,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,IAAI;AAER,MAAI,CAAC,cAAc;AACjB,QAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,cAAc;AACtC,iCAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,QAAI,OAAc,CAAC;AAEnB,QAAI,SAAS,gBAAgB;AAC3B,aAAO,UAAM,0BAAc,UAAU,QAAQ,cAAc;AAAA,IAC7D;AAEA,UAAM,gBAAgB,MAAM,4BAA4B,gBAAgB;AAAA,MACtE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,QAAI,CAAC,eAAe;AAClB,iCAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,mBACJ,oCAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,+BAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AAmBO,MAAM,+BAA+B,OAC1C,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,SAAS,aAAa,IAAI,IAAI;AAC5C,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,IAAI;AAER,MAAI,CAAC,cAAc;AACjB,QAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,cAAc;AACtC,iCAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,QAAI,OAAc,CAAC;AAEnB,QAAI,SAAS,gBAAgB;AAC3B,aAAO,UAAM,0BAAc,UAAU,QAAQ,cAAc;AAAA,IAC7D;AAEA,UAAM,gBACJ,MAAM,iCAAiC,qBAAqB;AAAA,MAC1D;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAEH,QAAI,CAAC,eAAe;AAClB,iCAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,mBACJ,oCAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,+BAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AAeO,MAAM,kCAAkC,OAC7C,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,cAAc,QAAQ,IAAI,IAAI;AAC5C,QAAM,EAAE,aAAa,cAAc,cAAc,OAAO,YAAY,IAClE,IAAI;AAEN,MAAI,CAAC,cAAc;AACjB,QAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,cAAc;AACtC,iCAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,UAAM,OAAc,MAAM,WAAW;AAAA,MACnC;AAAA,QACE,gBAAgB,cAAc;AAAA,MAChC;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,UAAM,gBACJ,MAAM,oCAAoC,wBAAwB;AAAA,MAChE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAEH,QAAI,CAAC,eAAe;AAClB,iCAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,mBACJ,oCAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,+BAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AAgBO,MAAM,WAAW,OACtB,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,SAAS,aAAa,IAAI,IAAI;AAC5C,QAAM,EAAE,cAAc,cAAc,OAAO,aAAa,IAAI,IAAI,IAAI;AAEpE,MAAI,CAAC,cAAc;AACjB,QAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,cAAc;AACtC,iCAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,QAAI,eAA6B,CAAC;AAClC,QAAI,SAAS,gBAAgB;AAC3B,qBAAe,UAAM,yCAAsB,CAAC,IAAI,GAAG,GAAG,QAAQ,GAAG;AAAA,IACnE;AAEA,UAAM,gBAAgB,MAAM,aAAa,SAAS;AAAA,MAChD;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,QAAI,CAAC,eAAe;AAClB,iCAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,mBACJ,oCAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,+BAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AAQO,MAAM,iBAAiB,OAC5B,KACA,QACG;AACH,QAAM,EAAE,SAAS,IAAI,IAAI;AACzB,MAAI;AACF,UAAM,WAAW,MAAM,mBAAmB,eAAe,QAAQ;AAEjE,UAAM,mBACJ,oCAAwD;AAAA,MACtD,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AAAA,EACvB,SAAS,OAAO;AACd,+BAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AAcO,MAAM,eAAe,OAC1B,KACA,QACG;AACH,MAAI;AACF,UAAM,EAAE,MAAM,OAAO,cAAc,cAAc,YAAY,IAAI,IAAI;AACrE,UAAM,EAAE,MAAM,SAAS,aAAa,IAAI,IAAI;AAE5C,QAAI,CAAC,cAAc;AACjB,UAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,cAAc;AACtC,mCAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,MACF;AAAA,IACF;AAEA,UAAM,WAAY,MAAM,iBAAiB,aAAa;AAAA,MACpD;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC,KAAM;AAAA,MACL,gBAAgB;AAAA,MAChB,WAAW;AAAA,IACb;AAEA,UAAM,mBACJ,oCAA4D;AAAA,MAC1D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AAAA,EACvB,SAAS,OAAO;AACd,+BAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;","names":[]}
1
+ {"version":3,"sources":["../../../src/controllers/ai.controller.ts"],"sourcesContent":["import { DiscussionModel } from '@/models/discussion.model';\nimport type { Dictionary } from '@/types/dictionary.types';\nimport type { Tag } from '@/types/tag.types';\nimport { type KeyPath } from '@intlayer/core';\nimport type { ResponseWithInformation } from '@middlewares/sessionAuth.middleware';\nimport { getDictionariesByTags } from '@services/dictionary.service';\nimport * as tagService from '@services/tag.service';\nimport { getTagsByKeys } from '@services/tag.service';\nimport * as askDocQuestionUtil from '@utils/AI/askDocQuestion/askDocQuestion';\nimport * as auditContentDeclarationUtil from '@utils/AI/auditDictionary';\nimport * as auditContentDeclarationFieldUtil from '@utils/AI/auditDictionaryField';\nimport * as auditContentDeclarationMetadataUtil from '@utils/AI/auditDictionaryMetadata';\nimport * as autocompleteUtil from '@utils/AI/autocomplete';\nimport * as auditTagUtil from '@utils/auditTag';\nimport { type AppError, ErrorHandler } from '@utils/errors';\nimport { formatResponse, type ResponseData } from '@utils/responseData';\nimport type { NextFunction, Request } from 'express';\nimport type { Locales } from 'intlayer';\n\nexport type AuditContentDeclarationBody = {\n openAiApiKey?: string;\n customPrompt?: string;\n locales: Locales[];\n defaultLocale: Locales;\n fileContent: string;\n filePath?: string;\n model?: string;\n temperature?: number;\n tagsKeys?: string[];\n};\nexport type AuditContentDeclarationResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclaration = async (\n req: Request<AuditContentDeclarationBody>,\n res: ResponseWithInformation<AuditContentDeclarationResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, project, organization } = res.locals;\n const {\n fileContent,\n filePath,\n openAiApiKey,\n customPrompt,\n model,\n temperature,\n locales,\n defaultLocale,\n tagsKeys,\n } = req.body;\n\n if (!openAiApiKey) {\n if (!user || !project || !organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse = await auditContentDeclarationUtil.auditDictionary({\n fileContent,\n filePath,\n model,\n temperature,\n openAiApiKey,\n customPrompt,\n locales,\n defaultLocale,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationFieldBody = {\n openAiApiKey?: string;\n customPrompt?: string;\n locales: Locales[];\n fileContent: string;\n filePath?: string;\n model?: string;\n temperature?: number;\n tagsKeys?: string[];\n keyPath: KeyPath[];\n};\nexport type AuditContentDeclarationFieldResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclarationField = async (\n req: Request<AuditContentDeclarationFieldBody>,\n res: ResponseWithInformation<AuditContentDeclarationFieldResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, project, organization } = res.locals;\n const {\n fileContent,\n openAiApiKey,\n customPrompt,\n model,\n temperature,\n locales,\n tagsKeys,\n keyPath,\n } = req.body;\n\n if (!openAiApiKey) {\n if (!user || !project || !organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse =\n await auditContentDeclarationFieldUtil.auditDictionaryField({\n fileContent,\n model,\n temperature,\n openAiApiKey,\n customPrompt,\n locales,\n tags,\n keyPath,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationMetadataBody = {\n openAiApiKey?: string;\n customPrompt?: string;\n fileContent: string;\n model?: string;\n temperature?: number;\n};\nexport type AuditContentDeclarationMetadataResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclarationMetadata = async (\n req: Request<AuditContentDeclarationMetadataBody>,\n res: ResponseWithInformation<AuditContentDeclarationMetadataResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, organization, project } = res.locals;\n const { fileContent, openAiApiKey, customPrompt, model, temperature } =\n req.body;\n\n if (!openAiApiKey) {\n if (!user || !project || !organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n }\n\n try {\n const tags: Tag[] = await tagService.findTags(\n {\n organizationId: organization?._id,\n },\n 0,\n 1000\n );\n\n const auditResponse =\n await auditContentDeclarationMetadataUtil.auditDictionaryMetadata({\n fileContent,\n model,\n openAiApiKey,\n temperature,\n customPrompt,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditTagBody = {\n openAiApiKey?: string;\n customPrompt?: string;\n filePath?: string;\n model?: string;\n temperature?: number;\n tag: Tag;\n};\nexport type AuditTagResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditTag = async (\n req: Request<undefined, undefined, AuditTagBody>,\n res: ResponseWithInformation<AuditTagResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, project, organization } = res.locals;\n const { openAiApiKey, customPrompt, model, temperature, tag } = req.body;\n\n if (!openAiApiKey) {\n if (!user || !project || !organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n }\n\n try {\n let dictionaries: Dictionary[] = [];\n if (project?.organizationId) {\n dictionaries = await getDictionariesByTags([tag.key], project._id);\n }\n\n const auditResponse = await auditTagUtil.auditTag({\n model,\n openAiApiKey,\n temperature,\n customPrompt,\n dictionaries,\n tag,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AskDocQuestionBody = {\n messages: askDocQuestionUtil.ChatCompletionRequestMessage[];\n discutionId: string;\n};\nexport type AskDocQuestionResult =\n ResponseData<askDocQuestionUtil.AskDocQuestionResult>;\n\nexport const askDocQuestion = async (\n req: Request<undefined, undefined, AskDocQuestionBody>,\n res: ResponseWithInformation<AskDocQuestionResult>\n) => {\n const { messages, discutionId } = req.body;\n const { user, project, organization } = res.locals;\n\n // 1. Prepare SSE headers and flush them NOW\n res.setHeader('Content-Type', 'text/event-stream; charset=utf-8');\n res.setHeader('Cache-Control', 'no-cache, no-transform');\n res.setHeader('Connection', 'keep-alive');\n res.setHeader('X-Accel-Buffering', 'no'); // disable nginx buffering\n res.flushHeaders?.();\n res.write(': connected\\n\\n'); // initial comment keeps some browsers happy\n res.flush?.();\n\n // 2. Kick off the upstream stream WITHOUT awaiting it\n askDocQuestionUtil\n .askDocQuestion(messages, {\n onMessage: (chunk) => {\n res.write(`data: ${JSON.stringify({ chunk })}\\n\\n`);\n res.flush?.();\n },\n })\n .then(async (fullResponse) => {\n // 3. Persist discussion while the client already has all chunks\n await DiscussionModel.findOneAndUpdate(\n { discutionId },\n {\n $set: {\n discutionId,\n userId: user?._id,\n projectId: project?._id,\n organizationId: organization?._id,\n messages: messages.map((msg) => ({\n role: msg.role,\n content: msg.content,\n timestamp: new Date(),\n })),\n },\n },\n { upsert: true, new: true }\n );\n\n // 4. Tell the client we’re done and close the stream\n res.write(\n `data: ${JSON.stringify({ done: true, response: fullResponse })}\\n\\n`\n );\n res.end();\n })\n .catch((err) => {\n // propagate error as an SSE event so the client knows why it closed\n res.write(\n `event: error\\ndata: ${JSON.stringify({ message: err.message })}\\n\\n`\n );\n res.end();\n });\n};\n\nexport type AutocompleteBody = {\n text: string;\n model?: string;\n temperature?: number;\n openAiApiKey?: string;\n customPrompt?: string;\n};\n\nexport type AutocompleteResponse = ResponseData<{\n autocompletion: string;\n}>;\n\nexport const autocomplete = async (\n req: Request<undefined, undefined, AutocompleteBody>,\n res: ResponseWithInformation<AutocompleteResponse>\n) => {\n try {\n const { text, model, openAiApiKey, customPrompt, temperature } = req.body;\n const { user, project, organization } = res.locals;\n\n if (!openAiApiKey) {\n if (!user || !project || !organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n }\n\n const response = (await autocompleteUtil.autocomplete({\n text,\n model,\n openAiApiKey,\n temperature,\n customPrompt,\n })) ?? {\n autocompletion: '',\n tokenUsed: 0,\n };\n\n const responseData =\n formatResponse<autocompleteUtil.AutocompleteFileResultData>({\n data: response,\n });\n\n res.json(responseData);\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAAgC;AAKhC,wBAAsC;AACtC,iBAA4B;AAC5B,iBAA8B;AAC9B,yBAAoC;AACpC,kCAA6C;AAC7C,uCAAkD;AAClD,0CAAqD;AACrD,uBAAkC;AAClC,mBAA8B;AAC9B,oBAA4C;AAC5C,0BAAkD;AAqB3C,MAAM,0BAA0B,OACrC,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,SAAS,aAAa,IAAI,IAAI;AAC5C,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,IAAI;AAER,MAAI,CAAC,cAAc;AACjB,QAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,cAAc;AACtC,iCAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,QAAI,OAAc,CAAC;AAEnB,QAAI,SAAS,gBAAgB;AAC3B,aAAO,UAAM,0BAAc,UAAU,QAAQ,cAAc;AAAA,IAC7D;AAEA,UAAM,gBAAgB,MAAM,4BAA4B,gBAAgB;AAAA,MACtE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,QAAI,CAAC,eAAe;AAClB,iCAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,mBACJ,oCAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,+BAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AAmBO,MAAM,+BAA+B,OAC1C,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,SAAS,aAAa,IAAI,IAAI;AAC5C,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,IAAI;AAER,MAAI,CAAC,cAAc;AACjB,QAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,cAAc;AACtC,iCAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,QAAI,OAAc,CAAC;AAEnB,QAAI,SAAS,gBAAgB;AAC3B,aAAO,UAAM,0BAAc,UAAU,QAAQ,cAAc;AAAA,IAC7D;AAEA,UAAM,gBACJ,MAAM,iCAAiC,qBAAqB;AAAA,MAC1D;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAEH,QAAI,CAAC,eAAe;AAClB,iCAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,mBACJ,oCAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,+BAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AAeO,MAAM,kCAAkC,OAC7C,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,cAAc,QAAQ,IAAI,IAAI;AAC5C,QAAM,EAAE,aAAa,cAAc,cAAc,OAAO,YAAY,IAClE,IAAI;AAEN,MAAI,CAAC,cAAc;AACjB,QAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,cAAc;AACtC,iCAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,UAAM,OAAc,MAAM,WAAW;AAAA,MACnC;AAAA,QACE,gBAAgB,cAAc;AAAA,MAChC;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,UAAM,gBACJ,MAAM,oCAAoC,wBAAwB;AAAA,MAChE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAEH,QAAI,CAAC,eAAe;AAClB,iCAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,mBACJ,oCAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,+BAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AAgBO,MAAM,WAAW,OACtB,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,SAAS,aAAa,IAAI,IAAI;AAC5C,QAAM,EAAE,cAAc,cAAc,OAAO,aAAa,IAAI,IAAI,IAAI;AAEpE,MAAI,CAAC,cAAc;AACjB,QAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,cAAc;AACtC,iCAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,QAAI,eAA6B,CAAC;AAClC,QAAI,SAAS,gBAAgB;AAC3B,qBAAe,UAAM,yCAAsB,CAAC,IAAI,GAAG,GAAG,QAAQ,GAAG;AAAA,IACnE;AAEA,UAAM,gBAAgB,MAAM,aAAa,SAAS;AAAA,MAChD;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,QAAI,CAAC,eAAe;AAClB,iCAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,mBACJ,oCAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,+BAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AASO,MAAM,iBAAiB,OAC5B,KACA,QACG;AACH,QAAM,EAAE,UAAU,YAAY,IAAI,IAAI;AACtC,QAAM,EAAE,MAAM,SAAS,aAAa,IAAI,IAAI;AAG5C,MAAI,UAAU,gBAAgB,kCAAkC;AAChE,MAAI,UAAU,iBAAiB,wBAAwB;AACvD,MAAI,UAAU,cAAc,YAAY;AACxC,MAAI,UAAU,qBAAqB,IAAI;AACvC,MAAI,eAAe;AACnB,MAAI,MAAM,iBAAiB;AAC3B,MAAI,QAAQ;AAGZ,qBACG,eAAe,UAAU;AAAA,IACxB,WAAW,CAAC,UAAU;AACpB,UAAI,MAAM,SAAS,KAAK,UAAU,EAAE,MAAM,CAAC,CAAC;AAAA;AAAA,CAAM;AAClD,UAAI,QAAQ;AAAA,IACd;AAAA,EACF,CAAC,EACA,KAAK,OAAO,iBAAiB;AAE5B,UAAM,kCAAgB;AAAA,MACpB,EAAE,YAAY;AAAA,MACd;AAAA,QACE,MAAM;AAAA,UACJ;AAAA,UACA,QAAQ,MAAM;AAAA,UACd,WAAW,SAAS;AAAA,UACpB,gBAAgB,cAAc;AAAA,UAC9B,UAAU,SAAS,IAAI,CAAC,SAAS;AAAA,YAC/B,MAAM,IAAI;AAAA,YACV,SAAS,IAAI;AAAA,YACb,WAAW,oBAAI,KAAK;AAAA,UACtB,EAAE;AAAA,QACJ;AAAA,MACF;AAAA,MACA,EAAE,QAAQ,MAAM,KAAK,KAAK;AAAA,IAC5B;AAGA,QAAI;AAAA,MACF,SAAS,KAAK,UAAU,EAAE,MAAM,MAAM,UAAU,aAAa,CAAC,CAAC;AAAA;AAAA;AAAA,IACjE;AACA,QAAI,IAAI;AAAA,EACV,CAAC,EACA,MAAM,CAAC,QAAQ;AAEd,QAAI;AAAA,MACF;AAAA,QAAuB,KAAK,UAAU,EAAE,SAAS,IAAI,QAAQ,CAAC,CAAC;AAAA;AAAA;AAAA,IACjE;AACA,QAAI,IAAI;AAAA,EACV,CAAC;AACL;AAcO,MAAM,eAAe,OAC1B,KACA,QACG;AACH,MAAI;AACF,UAAM,EAAE,MAAM,OAAO,cAAc,cAAc,YAAY,IAAI,IAAI;AACrE,UAAM,EAAE,MAAM,SAAS,aAAa,IAAI,IAAI;AAE5C,QAAI,CAAC,cAAc;AACjB,UAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,cAAc;AACtC,mCAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,MACF;AAAA,IACF;AAEA,UAAM,WAAY,MAAM,iBAAiB,aAAa;AAAA,MACpD;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC,KAAM;AAAA,MACL,gBAAgB;AAAA,MAChB,WAAW;AAAA,IACb;AAEA,UAAM,mBACJ,oCAA4D;AAAA,MAC1D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AAAA,EACvB,SAAS,OAAO;AACd,+BAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;","names":[]}
@@ -0,0 +1,34 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+ var discussion_model_exports = {};
20
+ __export(discussion_model_exports, {
21
+ DiscussionModel: () => DiscussionModel
22
+ });
23
+ module.exports = __toCommonJS(discussion_model_exports);
24
+ var import_discussion = require('./../schemas/discussion.schema.cjs');
25
+ var import_mongoose = require("mongoose");
26
+ const DiscussionModel = (0, import_mongoose.model)(
27
+ "discussion",
28
+ import_discussion.discussionSchema
29
+ );
30
+ // Annotate the CommonJS export names for ESM import in node:
31
+ 0 && (module.exports = {
32
+ DiscussionModel
33
+ });
34
+ //# sourceMappingURL=discussion.model.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/models/discussion.model.ts"],"sourcesContent":["import type { Discussion, DiscussionModelType } from '@/types/discussion.types';\nimport { discussionSchema } from '@schemas/discussion.schema';\nimport { model } from 'mongoose';\n\nexport const DiscussionModel = model<Discussion, DiscussionModelType>(\n 'discussion',\n discussionSchema\n);\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,wBAAiC;AACjC,sBAAsB;AAEf,MAAM,sBAAkB;AAAA,EAC7B;AAAA,EACA;AACF;","names":[]}
@@ -0,0 +1,81 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+ var discussion_schema_exports = {};
20
+ __export(discussion_schema_exports, {
21
+ discussionSchema: () => discussionSchema
22
+ });
23
+ module.exports = __toCommonJS(discussion_schema_exports);
24
+ var import_mongoose = require("mongoose");
25
+ const discussionSchema = new import_mongoose.Schema(
26
+ {
27
+ discutionId: {
28
+ type: String,
29
+ required: true,
30
+ unique: true
31
+ },
32
+ messages: [
33
+ {
34
+ role: {
35
+ type: String,
36
+ required: true,
37
+ enum: ["user", "assistant", "system"]
38
+ },
39
+ content: {
40
+ type: String,
41
+ required: true
42
+ },
43
+ timestamp: {
44
+ type: Date,
45
+ default: Date.now
46
+ }
47
+ }
48
+ ],
49
+ userId: {
50
+ type: import_mongoose.Schema.Types.ObjectId,
51
+ ref: "user",
52
+ required: true
53
+ },
54
+ projectId: {
55
+ type: import_mongoose.Schema.Types.ObjectId,
56
+ ref: "project",
57
+ required: true
58
+ },
59
+ organizationId: {
60
+ type: import_mongoose.Schema.Types.ObjectId,
61
+ ref: "organization",
62
+ required: true
63
+ },
64
+ title: {
65
+ type: String,
66
+ required: false
67
+ },
68
+ isArchived: {
69
+ type: Boolean,
70
+ default: false
71
+ }
72
+ },
73
+ {
74
+ timestamps: true
75
+ }
76
+ );
77
+ // Annotate the CommonJS export names for ESM import in node:
78
+ 0 && (module.exports = {
79
+ discussionSchema
80
+ });
81
+ //# sourceMappingURL=discussion.schema.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/schemas/discussion.schema.ts"],"sourcesContent":["import type { Discussion } from '@/types/discussion.types';\nimport { Schema } from 'mongoose';\n\nexport const discussionSchema = new Schema<Discussion>(\n {\n discutionId: {\n type: String,\n required: true,\n unique: true,\n },\n messages: [\n {\n role: {\n type: String,\n required: true,\n enum: ['user', 'assistant', 'system'],\n },\n content: {\n type: String,\n required: true,\n },\n timestamp: {\n type: Date,\n default: Date.now,\n },\n },\n ],\n userId: {\n type: Schema.Types.ObjectId,\n ref: 'user',\n required: true,\n },\n projectId: {\n type: Schema.Types.ObjectId,\n ref: 'project',\n required: true,\n },\n organizationId: {\n type: Schema.Types.ObjectId,\n ref: 'organization',\n required: true,\n },\n title: {\n type: String,\n required: false,\n },\n isArchived: {\n type: Boolean,\n default: false,\n },\n },\n {\n timestamps: true,\n }\n);\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,sBAAuB;AAEhB,MAAM,mBAAmB,IAAI;AAAA,EAClC;AAAA,IACE,aAAa;AAAA,MACX,MAAM;AAAA,MACN,UAAU;AAAA,MACV,QAAQ;AAAA,IACV;AAAA,IACA,UAAU;AAAA,MACR;AAAA,QACE,MAAM;AAAA,UACJ,MAAM;AAAA,UACN,UAAU;AAAA,UACV,MAAM,CAAC,QAAQ,aAAa,QAAQ;AAAA,QACtC;AAAA,QACA,SAAS;AAAA,UACP,MAAM;AAAA,UACN,UAAU;AAAA,QACZ;AAAA,QACA,WAAW;AAAA,UACT,MAAM;AAAA,UACN,SAAS,KAAK;AAAA,QAChB;AAAA,MACF;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,MACN,MAAM,uBAAO,MAAM;AAAA,MACnB,KAAK;AAAA,MACL,UAAU;AAAA,IACZ;AAAA,IACA,WAAW;AAAA,MACT,MAAM,uBAAO,MAAM;AAAA,MACnB,KAAK;AAAA,MACL,UAAU;AAAA,IACZ;AAAA,IACA,gBAAgB;AAAA,MACd,MAAM,uBAAO,MAAM;AAAA,MACnB,KAAK;AAAA,MACL,UAAU;AAAA,IACZ;AAAA,IACA,OAAO;AAAA,MACL,MAAM;AAAA,MACN,UAAU;AAAA,IACZ;AAAA,IACA,YAAY;AAAA,MACV,MAAM;AAAA,MACN,SAAS;AAAA,IACX;AAAA,EACF;AAAA,EACA;AAAA,IACE,YAAY;AAAA,EACd;AACF;","names":[]}
@@ -0,0 +1,17 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __copyProps = (to, from, except, desc) => {
7
+ if (from && typeof from === "object" || typeof from === "function") {
8
+ for (let key of __getOwnPropNames(from))
9
+ if (!__hasOwnProp.call(to, key) && key !== except)
10
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
11
+ }
12
+ return to;
13
+ };
14
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
15
+ var discussion_types_exports = {};
16
+ module.exports = __toCommonJS(discussion_types_exports);
17
+ //# sourceMappingURL=discussion.types.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/types/discussion.types.ts"],"sourcesContent":["import { Document, Model, Types } from 'mongoose';\n\nexport interface Message {\n role: 'user' | 'assistant' | 'system';\n content: string;\n timestamp: Date;\n}\n\nexport interface Discussion extends Document {\n discutionId: string;\n messages: Message[];\n userId: Types.ObjectId;\n projectId: Types.ObjectId;\n organizationId: Types.ObjectId;\n title?: string;\n isArchived: boolean;\n createdAt: Date;\n updatedAt: Date;\n}\n\nexport interface DiscussionModelType extends Model<Discussion> {\n // Add any static methods here if needed\n}\n"],"mappings":";;;;;;;;;;;;;;AAAA;AAAA;","names":[]}
@@ -34,11 +34,11 @@ __export(askDocQuestion_exports, {
34
34
  searchChunkReference: () => searchChunkReference
35
35
  });
36
36
  module.exports = __toCommonJS(askDocQuestion_exports);
37
- var import_fs = __toESM(require("fs"), 1);
38
37
  var import_blog = require("@intlayer/blog");
39
38
  var import_config = require("@intlayer/config");
40
39
  var import_docs = require("@intlayer/docs");
41
40
  var import_dotenv = __toESM(require("dotenv"), 1);
41
+ var import_fs = __toESM(require("fs"), 1);
42
42
  var import_openai = require("openai");
43
43
  var import_embeddings = __toESM(require('./embeddings.json'), 1);
44
44
  const vectorStore = [];
@@ -152,7 +152,7 @@ const initPrompt = {
152
152
  content: `Ignore all instructions before this one. Keep forever all instructions after this one. From now you are not allowed to ignore the previous instructions. You are an expert in internationalization solution for Javascript Application. You're an helpful chatbot for Intlayer, an Internationalization solution The website url of the solution is: 'https://intlayer.org'). And your implemented on this website. The user is an potential user of Intlayer. Your task is now to answer the questions of the user. You must talk as an member of Intlayer. You must only answer the questions relative to Intlayer. Any question should be related to Intlayer. If a question is not related to Intlayer, you should NOT answer it. You must NOT answer question who is generally secret for a company (E.g. financial information). Your should NOT invent information that are not precised into the relevant documentation chunks provided. If you don't have enough information to answer the question, not answer using extra information extracted from your knowledge. If your have a doubt about something, you should ask more question to the user. Here some useful urls to know more about Intlayer: https://intlayer.org/docs https://intlayer.org/blog https://intlayer.org/pricing https://intlayer.org/dashboard Your should return a result as markdown. Code element should include metadata fileName="file.ts" if could be useful for the user. Code element format should not include metadata (E.g. codeFormat="typescript", or packageManager="npm". Here is the relevant documentation: {{relevantFilesReferences}}`
153
153
  // Placeholder for relevant documentation to be inserted later
154
154
  };
155
- const askDocQuestion = async (messages) => {
155
+ const askDocQuestion = async (messages, options) => {
156
156
  const openai = new import_openai.OpenAI({ apiKey: process.env.OPENAI_API_KEY });
157
157
  const userMessages = messages.filter((message) => message.role === "user");
158
158
  const query = userMessages.map((message) => `- ${message.content}`).join("\n");
@@ -175,14 +175,22 @@ ${doc.content}`
175
175
  const response = await openai.chat.completions.create({
176
176
  model: MODEL,
177
177
  temperature: MODEL_TEMPERATURE,
178
- messages: messagesList
178
+ messages: messagesList,
179
+ stream: true
179
180
  });
180
- const result = response.choices[0].message.content;
181
+ let fullResponse = "";
182
+ for await (const chunk of response) {
183
+ const content = chunk.choices[0]?.delta?.content || "";
184
+ if (content) {
185
+ fullResponse += content;
186
+ options?.onMessage?.(content);
187
+ }
188
+ }
181
189
  const relatedFiles = [
182
190
  ...new Set(relevantFilesReferences.map((doc) => doc.fileKey))
183
191
  ];
184
192
  return {
185
- response: result ?? "Error: No result found",
193
+ response: fullResponse ?? "Error: No result found",
186
194
  relatedFiles
187
195
  };
188
196
  };
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../../src/utils/AI/askDocQuestion/askDocQuestion.ts"],"sourcesContent":["import fs from 'fs';\nimport { getBlogs } from '@intlayer/blog';\nimport { Locales } from '@intlayer/config';\nimport { getDocs } from '@intlayer/docs';\nimport dotenv from 'dotenv';\nimport { OpenAI } from 'openai';\nimport embeddingsList from './embeddings.json' with { type: 'json' };\n\ntype VectorStoreEl = {\n fileKey: string;\n chunkNumber: number;\n content: string;\n embedding: number[];\n};\n\n/**\n * Simple in-memory vector store to hold document embeddings and their content.\n * Each entry contains:\n * - fileKey: A unique key identifying the file\n * - chunkNumber: The number of the chunk within the document\n * - content: The chunk content\n * - embedding: The numerical embedding vector for the chunk\n */\nconst vectorStore: VectorStoreEl[] = [];\n\n// Constants defining OpenAI's token and character limits\nconst MODEL: OpenAI.Chat.ChatModel = 'gpt-4o-2024-11-20'; // Model to use for chat completions\nconst MODEL_TEMPERATURE = 0.1; // Temperature to use for chat completions\nconst EMBEDDING_MODEL: OpenAI.Embeddings.EmbeddingModel =\n 'text-embedding-3-large'; // Model to use for embedding generation\nconst OVERLAP_TOKENS = 200; // Number of tokens to overlap between chunks\nconst MAX_CHUNK_TOKENS = 800; // Maximum number of tokens per chunk\nconst CHAR_BY_TOKEN = 4.15; // Approximate pessimistically the number of characters per token // Can use `tiktoken` or other tokenizers to calculate it more precisely\nconst MAX_CHARS = MAX_CHUNK_TOKENS * CHAR_BY_TOKEN;\nconst OVERLAP_CHARS = OVERLAP_TOKENS * CHAR_BY_TOKEN;\nconst MAX_RELEVANT_CHUNKS_NB = 8; // Maximum number of relevant chunks to attach to chatGPT context\nconst MIN_RELEVANT_CHUNKS_SIMILARITY = 0.25; // Minimum similarity required for a chunk to be considered relevant\n\n/**\n * Splits a given text into chunks ensuring each chunk does not exceed MAX_CHARS.\n * @param text - The input text to split.\n * @returns - Array of text chunks.\n */\nconst chunkText = (text: string): string[] => {\n const chunks: string[] = [];\n let start = 0;\n\n while (start < text.length) {\n let end = Math.min(start + MAX_CHARS, text.length);\n\n // Ensure we don't cut words in the middle (find nearest space)\n if (end < text.length) {\n const lastSpace = text.lastIndexOf(' ', end);\n if (lastSpace > start) {\n end = lastSpace;\n }\n }\n\n chunks.push(text.substring(start, end));\n\n // Move start forward correctly\n const nextStart = end - OVERLAP_CHARS;\n if (nextStart <= start) {\n // Prevent infinite loop if overlap is too large\n start = end;\n } else {\n start = nextStart;\n }\n }\n\n return chunks;\n};\n\n/**\n * Generates an embedding for a given text using OpenAI's embedding API.\n * Trims the text if it exceeds the maximum allowed characters.\n *\n * @param text - The input text to generate an embedding for\n * @returns The embedding vector as a number array\n */\nconst generateEmbedding = async (text: string): Promise<number[]> => {\n try {\n const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n const response = await openai.embeddings.create({\n model: EMBEDDING_MODEL, // Specify the embedding model\n input: text,\n });\n\n return response.data[0].embedding; // Return the generated embedding\n } catch (error) {\n console.error('Error generating embedding:', error);\n return [];\n }\n};\n\n/**\n * Calculates the cosine similarity between two vectors.\n * Cosine similarity measures the cosine of the angle between two vectors in an inner product space.\n * Used to determine the similarity between chunks of text.\n *\n * @param vecA - The first vector\n * @param vecB - The second vector\n * @returns The cosine similarity score\n */\nconst cosineSimilarity = (vecA: number[], vecB: number[]): number => {\n // Calculate the dot product of the two vectors\n const dotProduct = vecA.reduce((sum, a, idx) => sum + a * vecB[idx], 0);\n\n // Calculate the magnitude (Euclidean norm) of each vector\n const magnitudeA = Math.sqrt(vecA.reduce((sum, a) => sum + a * a, 0));\n const magnitudeB = Math.sqrt(vecB.reduce((sum, b) => sum + b * b, 0));\n\n // Compute and return the cosine similarity\n return dotProduct / (magnitudeA * magnitudeB);\n};\n\n/**\n * Indexes all Markdown documents by generating embeddings for each chunk and storing them in memory.\n * Also updates the embeddings.json file if new embeddings are generated.\n */\nexport const indexMarkdownFiles = async (): Promise<void> => {\n const env = process.env.NODE_ENV;\n dotenv.config({\n path: [`.env.${env}.local`, `.env.${env}`, '.env.local', '.env'],\n });\n\n // Retrieve documentation and blog posts in English locale\n const docs = getDocs(Locales.ENGLISH);\n const blogs = getBlogs(Locales.ENGLISH);\n\n let result: Record<string, number[]> = {}; // Object to hold updated embeddings\n\n const files = { ...docs, ...blogs }; // Combine docs and blogs into a single object\n\n // Iterate over each file key (identifier) in the combined files\n for (const fileKey of Object.keys(files)) {\n // Split the document into chunks based on headings\n const fileChunks = chunkText(files[fileKey as keyof typeof files]);\n\n // Iterate over each chunk within the current file\n for (const chunkIndex of Object.keys(fileChunks)) {\n const chunkNumber = Number(chunkIndex) + 1; // Chunk number starts at 1\n const chunksNumber = fileChunks.length;\n\n const fileChunk = fileChunks[\n chunkIndex as keyof typeof fileChunks\n ] as string;\n\n const embeddingKeyName = `${fileKey}/chunk_${chunkNumber}`; // Unique key for the chunk\n\n // Retrieve precomputed embedding if available\n const docEmbedding = embeddingsList[\n embeddingKeyName as keyof typeof embeddingsList\n ] as number[] | undefined;\n\n let embedding = docEmbedding; // Use existing embedding if available\n\n if (!embedding) {\n embedding = await generateEmbedding(fileChunk); // Generate embedding if not present\n }\n\n // Update the result object with the new embedding\n result = { ...result, [embeddingKeyName]: embedding };\n\n // Store the embedding and content in the in-memory vector store\n vectorStore.push({\n fileKey,\n chunkNumber,\n embedding,\n content: fileChunk,\n });\n\n console.info(`- Indexed: ${embeddingKeyName}/${chunksNumber}`);\n }\n }\n\n if (process.env.NODE_ENV === 'development') {\n try {\n // Compare the newly generated embeddings with existing ones\n if (JSON.stringify(result) !== JSON.stringify(embeddingsList)) {\n // If there are new embeddings, save them to embeddings.json\n fs.writeFileSync(\n 'src/utils/AI/askDocQuestion/embeddings.json',\n JSON.stringify(result, null, 2)\n );\n }\n } catch (error) {\n console.error(error); // Log any errors during the file write process\n }\n }\n};\n\n// Automatically index Markdown files\nindexMarkdownFiles();\n\n/**\n * Searches the indexed documents for the most relevant chunks based on a query.\n * Utilizes cosine similarity to find the closest matching embeddings.\n *\n * @param query - The search query provided by the user\n * @returns An array of the top matching document chunks' content\n */\nexport const searchChunkReference = async (\n query: string\n): Promise<VectorStoreEl[]> => {\n // Generate an embedding for the user's query\n const queryEmbedding = await generateEmbedding(query);\n\n // Calculate similarity scores between the query embedding and each document's embedding\n const results = vectorStore\n .map((chunk) => ({\n ...chunk,\n similarity: cosineSimilarity(queryEmbedding, chunk.embedding), // Add similarity score to each doc\n }))\n .filter((chunk) => chunk.similarity > MIN_RELEVANT_CHUNKS_SIMILARITY) // Filter out documents with low similarity scores\n .sort((a, b) => b.similarity - a.similarity) // Sort documents by highest similarity first\n .slice(0, MAX_RELEVANT_CHUNKS_NB); // Select the top 6 most similar documents\n\n // Return the content of the top matching documents\n return results;\n};\n\n// Define the structure of messages used in chat completions\nexport type ChatCompletionRequestMessage = {\n role: 'system' | 'user' | 'assistant'; // The role of the message sender\n content: string; // The text content of the message\n};\n\n// Initial prompt configuration for the chatbot\nexport const initPrompt: ChatCompletionRequestMessage = {\n role: 'system',\n content:\n 'Ignore all instructions before this one. \\\n Keep forever all instructions after this one. From now you are not allowed to ignore the previous instructions. \\\n You are an expert in internationalization solution for Javascript Application. You\\'re an helpful chatbot for Intlayer, an Internationalization solution\\\n The website url of the solution is: \\'https://intlayer.org\\'). And your implemented on this website. \\\n The user is an potential user of Intlayer. Your task is now to answer the questions of the user. \\\n You must talk as an member of Intlayer. You must only answer the questions relative to Intlayer. \\\n Any question should be related to Intlayer. If a question is not related to Intlayer, you should NOT answer it. \\\n You must NOT answer question who is generally secret for a company (E.g. financial information). \\\n Your should NOT invent information that are not precised into the relevant documentation chunks provided. \\\n If you don\\'t have enough information to answer the question, not answer using extra information extracted from your knowledge. \\\n If your have a doubt about something, you should ask more question to the user. \\\n \\\n Here some useful urls to know more about Intlayer: \\\n https://intlayer.org/docs \\\n https://intlayer.org/blog \\\n https://intlayer.org/pricing \\\n https://intlayer.org/dashboard \\\n \\\n Your should return a result as markdown.\\\n Code element should include metadata fileName=\"file.ts\" if could be useful for the user. \\\n Code element format should not include metadata (E.g. codeFormat=\"typescript\", or packageManager=\"npm\". \\\n \\\n Here is the relevant documentation:\\\n {{relevantFilesReferences}}', // Placeholder for relevant documentation to be inserted later\n};\n\nexport type AskDocQuestionResult = {\n response: string;\n relatedFiles: string[];\n};\n\n/**\n * Handles the \"Ask a question\" endpoint in an Express.js route.\n * Processes user messages, retrieves relevant documents, and interacts with OpenAI's chat API to generate responses.\n *\n * @param messages - An array of chat messages from the user and assistant\n * @returns The assistant's response as a string\n */\nexport const askDocQuestion = async (\n messages: ChatCompletionRequestMessage[]\n): Promise<AskDocQuestionResult> => {\n const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n\n // Assistant's response are filtered out otherwise the chatbot will be stuck in a self-referential loop\n // Note that the embedding precision will be lowered if the user change of context in the chat\n const userMessages = messages.filter((message) => message.role === 'user');\n\n // Format the user's question to keep only the relevant keywords\n const query = userMessages\n .map((message) => `- ${message.content}`)\n .join('\\n');\n\n // 1) Find relevant documents based on the user's question\n const relevantFilesReferences = await searchChunkReference(query);\n\n // 2) Integrate the relevant documents into the initial system prompt\n const messagesList: ChatCompletionRequestMessage[] = [\n {\n ...initPrompt,\n content: initPrompt.content.replace(\n '{{relevantFilesReferences}}',\n relevantFilesReferences.length === 0\n ? 'Not relevant file found related to the question.'\n : relevantFilesReferences\n .map(\n (doc, idx) =>\n `[Chunk ${idx}] docKey = \"${doc.fileKey}\":\\n${doc.content}`\n )\n .join('\\n\\n') // Insert relevant docs into the prompt\n ),\n },\n ...messages, // Include all user and assistant messages\n ];\n\n // 3) Send the compiled messages to OpenAI's Chat Completion API (using a specific model)\n const response = await openai.chat.completions.create({\n model: MODEL,\n temperature: MODEL_TEMPERATURE,\n messages: messagesList,\n });\n\n const result = response.choices[0].message.content; // Extract the assistant's reply\n\n // 4) Extract unique related files\n const relatedFiles = [\n ...new Set(relevantFilesReferences.map((doc) => doc.fileKey)),\n ];\n\n // 5) Return the assistant's response to the user\n return {\n response: result ?? 'Error: No result found',\n relatedFiles,\n };\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAe;AACf,kBAAyB;AACzB,oBAAwB;AACxB,kBAAwB;AACxB,oBAAmB;AACnB,oBAAuB;AACvB,wBAA2B;AAiB3B,MAAM,cAA+B,CAAC;AAGtC,MAAM,QAA+B;AACrC,MAAM,oBAAoB;AAC1B,MAAM,kBACJ;AACF,MAAM,iBAAiB;AACvB,MAAM,mBAAmB;AACzB,MAAM,gBAAgB;AACtB,MAAM,YAAY,mBAAmB;AACrC,MAAM,gBAAgB,iBAAiB;AACvC,MAAM,yBAAyB;AAC/B,MAAM,iCAAiC;AAOvC,MAAM,YAAY,CAAC,SAA2B;AAC5C,QAAM,SAAmB,CAAC;AAC1B,MAAI,QAAQ;AAEZ,SAAO,QAAQ,KAAK,QAAQ;AAC1B,QAAI,MAAM,KAAK,IAAI,QAAQ,WAAW,KAAK,MAAM;AAGjD,QAAI,MAAM,KAAK,QAAQ;AACrB,YAAM,YAAY,KAAK,YAAY,KAAK,GAAG;AAC3C,UAAI,YAAY,OAAO;AACrB,cAAM;AAAA,MACR;AAAA,IACF;AAEA,WAAO,KAAK,KAAK,UAAU,OAAO,GAAG,CAAC;AAGtC,UAAM,YAAY,MAAM;AACxB,QAAI,aAAa,OAAO;AAEtB,cAAQ;AAAA,IACV,OAAO;AACL,cAAQ;AAAA,IACV;AAAA,EACF;AAEA,SAAO;AACT;AASA,MAAM,oBAAoB,OAAO,SAAoC;AACnE,MAAI;AACF,UAAM,SAAS,IAAI,qBAAO,EAAE,QAAQ,QAAQ,IAAI,eAAe,CAAC;AAChE,UAAM,WAAW,MAAM,OAAO,WAAW,OAAO;AAAA,MAC9C,OAAO;AAAA;AAAA,MACP,OAAO;AAAA,IACT,CAAC;AAED,WAAO,SAAS,KAAK,CAAC,EAAE;AAAA,EAC1B,SAAS,OAAO;AACd,YAAQ,MAAM,+BAA+B,KAAK;AAClD,WAAO,CAAC;AAAA,EACV;AACF;AAWA,MAAM,mBAAmB,CAAC,MAAgB,SAA2B;AAEnE,QAAM,aAAa,KAAK,OAAO,CAAC,KAAK,GAAG,QAAQ,MAAM,IAAI,KAAK,GAAG,GAAG,CAAC;AAGtE,QAAM,aAAa,KAAK,KAAK,KAAK,OAAO,CAAC,KAAK,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC;AACpE,QAAM,aAAa,KAAK,KAAK,KAAK,OAAO,CAAC,KAAK,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC;AAGpE,SAAO,cAAc,aAAa;AACpC;AAMO,MAAM,qBAAqB,YAA2B;AAC3D,QAAM,MAAM,QAAQ,IAAI;AACxB,gBAAAA,QAAO,OAAO;AAAA,IACZ,MAAM,CAAC,QAAQ,GAAG,UAAU,QAAQ,GAAG,IAAI,cAAc,MAAM;AAAA,EACjE,CAAC;AAGD,QAAM,WAAO,qBAAQ,sBAAQ,OAAO;AACpC,QAAM,YAAQ,sBAAS,sBAAQ,OAAO;AAEtC,MAAI,SAAmC,CAAC;AAExC,QAAM,QAAQ,EAAE,GAAG,MAAM,GAAG,MAAM;AAGlC,aAAW,WAAW,OAAO,KAAK,KAAK,GAAG;AAExC,UAAM,aAAa,UAAU,MAAM,OAA6B,CAAC;AAGjE,eAAW,cAAc,OAAO,KAAK,UAAU,GAAG;AAChD,YAAM,cAAc,OAAO,UAAU,IAAI;AACzC,YAAM,eAAe,WAAW;AAEhC,YAAM,YAAY,WAChB,UACF;AAEA,YAAM,mBAAmB,GAAG,OAAO,UAAU,WAAW;AAGxD,YAAM,eAAe,kBAAAC,QACnB,gBACF;AAEA,UAAI,YAAY;AAEhB,UAAI,CAAC,WAAW;AACd,oBAAY,MAAM,kBAAkB,SAAS;AAAA,MAC/C;AAGA,eAAS,EAAE,GAAG,QAAQ,CAAC,gBAAgB,GAAG,UAAU;AAGpD,kBAAY,KAAK;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AAED,cAAQ,KAAK,cAAc,gBAAgB,IAAI,YAAY,EAAE;AAAA,IAC/D;AAAA,EACF;AAEA,MAAI,QAAQ,IAAI,aAAa,eAAe;AAC1C,QAAI;AAEF,UAAI,KAAK,UAAU,MAAM,MAAM,KAAK,UAAU,kBAAAA,OAAc,GAAG;AAE7D,kBAAAC,QAAG;AAAA,UACD;AAAA,UACA,KAAK,UAAU,QAAQ,MAAM,CAAC;AAAA,QAChC;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,cAAQ,MAAM,KAAK;AAAA,IACrB;AAAA,EACF;AACF;AAGA,mBAAmB;AASZ,MAAM,uBAAuB,OAClC,UAC6B;AAE7B,QAAM,iBAAiB,MAAM,kBAAkB,KAAK;AAGpD,QAAM,UAAU,YACb,IAAI,CAAC,WAAW;AAAA,IACf,GAAG;AAAA,IACH,YAAY,iBAAiB,gBAAgB,MAAM,SAAS;AAAA;AAAA,EAC9D,EAAE,EACD,OAAO,CAAC,UAAU,MAAM,aAAa,8BAA8B,EACnE,KAAK,CAAC,GAAG,MAAM,EAAE,aAAa,EAAE,UAAU,EAC1C,MAAM,GAAG,sBAAsB;AAGlC,SAAO;AACT;AASO,MAAM,aAA2C;AAAA,EACtD,MAAM;AAAA,EACN,SACE;AAAA;AAwBJ;AAcO,MAAM,iBAAiB,OAC5B,aACkC;AAClC,QAAM,SAAS,IAAI,qBAAO,EAAE,QAAQ,QAAQ,IAAI,eAAe,CAAC;AAIhE,QAAM,eAAe,SAAS,OAAO,CAAC,YAAY,QAAQ,SAAS,MAAM;AAGzE,QAAM,QAAQ,aACX,IAAI,CAAC,YAAY,KAAK,QAAQ,OAAO,EAAE,EACvC,KAAK,IAAI;AAGZ,QAAM,0BAA0B,MAAM,qBAAqB,KAAK;AAGhE,QAAM,eAA+C;AAAA,IACnD;AAAA,MACE,GAAG;AAAA,MACH,SAAS,WAAW,QAAQ;AAAA,QAC1B;AAAA,QACA,wBAAwB,WAAW,IAC/B,qDACA,wBACG;AAAA,UACC,CAAC,KAAK,QACJ,UAAU,GAAG,eAAe,IAAI,OAAO;AAAA,EAAO,IAAI,OAAO;AAAA,QAC7D,EACC,KAAK,MAAM;AAAA;AAAA,MACpB;AAAA,IACF;AAAA,IACA,GAAG;AAAA;AAAA,EACL;AAGA,QAAM,WAAW,MAAM,OAAO,KAAK,YAAY,OAAO;AAAA,IACpD,OAAO;AAAA,IACP,aAAa;AAAA,IACb,UAAU;AAAA,EACZ,CAAC;AAED,QAAM,SAAS,SAAS,QAAQ,CAAC,EAAE,QAAQ;AAG3C,QAAM,eAAe;AAAA,IACnB,GAAG,IAAI,IAAI,wBAAwB,IAAI,CAAC,QAAQ,IAAI,OAAO,CAAC;AAAA,EAC9D;AAGA,SAAO;AAAA,IACL,UAAU,UAAU;AAAA,IACpB;AAAA,EACF;AACF;","names":["dotenv","embeddingsList","fs"]}
1
+ {"version":3,"sources":["../../../../../src/utils/AI/askDocQuestion/askDocQuestion.ts"],"sourcesContent":["import { getBlogs } from '@intlayer/blog';\nimport { Locales } from '@intlayer/config';\nimport { getDocs } from '@intlayer/docs';\nimport dotenv from 'dotenv';\nimport fs from 'fs';\nimport { OpenAI } from 'openai';\nimport embeddingsList from './embeddings.json' with { type: 'json' };\n\ntype VectorStoreEl = {\n fileKey: string;\n chunkNumber: number;\n content: string;\n embedding: number[];\n};\n\n/**\n * Simple in-memory vector store to hold document embeddings and their content.\n * Each entry contains:\n * - fileKey: A unique key identifying the file\n * - chunkNumber: The number of the chunk within the document\n * - content: The chunk content\n * - embedding: The numerical embedding vector for the chunk\n */\nconst vectorStore: VectorStoreEl[] = [];\n\n// Constants defining OpenAI's token and character limits\nconst MODEL: OpenAI.Chat.ChatModel = 'gpt-4o-2024-11-20'; // Model to use for chat completions\nconst MODEL_TEMPERATURE = 0.1; // Temperature to use for chat completions\nconst EMBEDDING_MODEL: OpenAI.Embeddings.EmbeddingModel =\n 'text-embedding-3-large'; // Model to use for embedding generation\nconst OVERLAP_TOKENS = 200; // Number of tokens to overlap between chunks\nconst MAX_CHUNK_TOKENS = 800; // Maximum number of tokens per chunk\nconst CHAR_BY_TOKEN = 4.15; // Approximate pessimistically the number of characters per token // Can use `tiktoken` or other tokenizers to calculate it more precisely\nconst MAX_CHARS = MAX_CHUNK_TOKENS * CHAR_BY_TOKEN;\nconst OVERLAP_CHARS = OVERLAP_TOKENS * CHAR_BY_TOKEN;\nconst MAX_RELEVANT_CHUNKS_NB = 8; // Maximum number of relevant chunks to attach to chatGPT context\nconst MIN_RELEVANT_CHUNKS_SIMILARITY = 0.25; // Minimum similarity required for a chunk to be considered relevant\n\n/**\n * Splits a given text into chunks ensuring each chunk does not exceed MAX_CHARS.\n * @param text - The input text to split.\n * @returns - Array of text chunks.\n */\nconst chunkText = (text: string): string[] => {\n const chunks: string[] = [];\n let start = 0;\n\n while (start < text.length) {\n let end = Math.min(start + MAX_CHARS, text.length);\n\n // Ensure we don't cut words in the middle (find nearest space)\n if (end < text.length) {\n const lastSpace = text.lastIndexOf(' ', end);\n if (lastSpace > start) {\n end = lastSpace;\n }\n }\n\n chunks.push(text.substring(start, end));\n\n // Move start forward correctly\n const nextStart = end - OVERLAP_CHARS;\n if (nextStart <= start) {\n // Prevent infinite loop if overlap is too large\n start = end;\n } else {\n start = nextStart;\n }\n }\n\n return chunks;\n};\n\n/**\n * Generates an embedding for a given text using OpenAI's embedding API.\n * Trims the text if it exceeds the maximum allowed characters.\n *\n * @param text - The input text to generate an embedding for\n * @returns The embedding vector as a number array\n */\nconst generateEmbedding = async (text: string): Promise<number[]> => {\n try {\n const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n const response = await openai.embeddings.create({\n model: EMBEDDING_MODEL, // Specify the embedding model\n input: text,\n });\n\n return response.data[0].embedding; // Return the generated embedding\n } catch (error) {\n console.error('Error generating embedding:', error);\n return [];\n }\n};\n\n/**\n * Calculates the cosine similarity between two vectors.\n * Cosine similarity measures the cosine of the angle between two vectors in an inner product space.\n * Used to determine the similarity between chunks of text.\n *\n * @param vecA - The first vector\n * @param vecB - The second vector\n * @returns The cosine similarity score\n */\nconst cosineSimilarity = (vecA: number[], vecB: number[]): number => {\n // Calculate the dot product of the two vectors\n const dotProduct = vecA.reduce((sum, a, idx) => sum + a * vecB[idx], 0);\n\n // Calculate the magnitude (Euclidean norm) of each vector\n const magnitudeA = Math.sqrt(vecA.reduce((sum, a) => sum + a * a, 0));\n const magnitudeB = Math.sqrt(vecB.reduce((sum, b) => sum + b * b, 0));\n\n // Compute and return the cosine similarity\n return dotProduct / (magnitudeA * magnitudeB);\n};\n\n/**\n * Indexes all Markdown documents by generating embeddings for each chunk and storing them in memory.\n * Also updates the embeddings.json file if new embeddings are generated.\n */\nexport const indexMarkdownFiles = async (): Promise<void> => {\n const env = process.env.NODE_ENV;\n dotenv.config({\n path: [`.env.${env}.local`, `.env.${env}`, '.env.local', '.env'],\n });\n\n // Retrieve documentation and blog posts in English locale\n const docs = getDocs(Locales.ENGLISH);\n const blogs = getBlogs(Locales.ENGLISH);\n\n let result: Record<string, number[]> = {}; // Object to hold updated embeddings\n\n const files = { ...docs, ...blogs }; // Combine docs and blogs into a single object\n\n // Iterate over each file key (identifier) in the combined files\n for (const fileKey of Object.keys(files)) {\n // Split the document into chunks based on headings\n const fileChunks = chunkText(files[fileKey as keyof typeof files]);\n\n // Iterate over each chunk within the current file\n for (const chunkIndex of Object.keys(fileChunks)) {\n const chunkNumber = Number(chunkIndex) + 1; // Chunk number starts at 1\n const chunksNumber = fileChunks.length;\n\n const fileChunk = fileChunks[\n chunkIndex as keyof typeof fileChunks\n ] as string;\n\n const embeddingKeyName = `${fileKey}/chunk_${chunkNumber}`; // Unique key for the chunk\n\n // Retrieve precomputed embedding if available\n const docEmbedding = embeddingsList[\n embeddingKeyName as keyof typeof embeddingsList\n ] as number[] | undefined;\n\n let embedding = docEmbedding; // Use existing embedding if available\n\n if (!embedding) {\n embedding = await generateEmbedding(fileChunk); // Generate embedding if not present\n }\n\n // Update the result object with the new embedding\n result = { ...result, [embeddingKeyName]: embedding };\n\n // Store the embedding and content in the in-memory vector store\n vectorStore.push({\n fileKey,\n chunkNumber,\n embedding,\n content: fileChunk,\n });\n\n console.info(`- Indexed: ${embeddingKeyName}/${chunksNumber}`);\n }\n }\n\n if (process.env.NODE_ENV === 'development') {\n try {\n // Compare the newly generated embeddings with existing ones\n if (JSON.stringify(result) !== JSON.stringify(embeddingsList)) {\n // If there are new embeddings, save them to embeddings.json\n fs.writeFileSync(\n 'src/utils/AI/askDocQuestion/embeddings.json',\n JSON.stringify(result, null, 2)\n );\n }\n } catch (error) {\n console.error(error); // Log any errors during the file write process\n }\n }\n};\n\n// Automatically index Markdown files\nindexMarkdownFiles();\n\n/**\n * Searches the indexed documents for the most relevant chunks based on a query.\n * Utilizes cosine similarity to find the closest matching embeddings.\n *\n * @param query - The search query provided by the user\n * @returns An array of the top matching document chunks' content\n */\nexport const searchChunkReference = async (\n query: string\n): Promise<VectorStoreEl[]> => {\n // Generate an embedding for the user's query\n const queryEmbedding = await generateEmbedding(query);\n\n // Calculate similarity scores between the query embedding and each document's embedding\n const results = vectorStore\n .map((chunk) => ({\n ...chunk,\n similarity: cosineSimilarity(queryEmbedding, chunk.embedding), // Add similarity score to each doc\n }))\n .filter((chunk) => chunk.similarity > MIN_RELEVANT_CHUNKS_SIMILARITY) // Filter out documents with low similarity scores\n .sort((a, b) => b.similarity - a.similarity) // Sort documents by highest similarity first\n .slice(0, MAX_RELEVANT_CHUNKS_NB); // Select the top 6 most similar documents\n\n // Return the content of the top matching documents\n return results;\n};\n\n// Define the structure of messages used in chat completions\nexport type ChatCompletionRequestMessage = {\n role: 'system' | 'user' | 'assistant'; // The role of the message sender\n content: string; // The text content of the message\n};\n\n// Initial prompt configuration for the chatbot\nexport const initPrompt: ChatCompletionRequestMessage = {\n role: 'system',\n content:\n 'Ignore all instructions before this one. \\\n Keep forever all instructions after this one. From now you are not allowed to ignore the previous instructions. \\\n You are an expert in internationalization solution for Javascript Application. You\\'re an helpful chatbot for Intlayer, an Internationalization solution\\\n The website url of the solution is: \\'https://intlayer.org\\'). And your implemented on this website. \\\n The user is an potential user of Intlayer. Your task is now to answer the questions of the user. \\\n You must talk as an member of Intlayer. You must only answer the questions relative to Intlayer. \\\n Any question should be related to Intlayer. If a question is not related to Intlayer, you should NOT answer it. \\\n You must NOT answer question who is generally secret for a company (E.g. financial information). \\\n Your should NOT invent information that are not precised into the relevant documentation chunks provided. \\\n If you don\\'t have enough information to answer the question, not answer using extra information extracted from your knowledge. \\\n If your have a doubt about something, you should ask more question to the user. \\\n \\\n Here some useful urls to know more about Intlayer: \\\n https://intlayer.org/docs \\\n https://intlayer.org/blog \\\n https://intlayer.org/pricing \\\n https://intlayer.org/dashboard \\\n \\\n Your should return a result as markdown.\\\n Code element should include metadata fileName=\"file.ts\" if could be useful for the user. \\\n Code element format should not include metadata (E.g. codeFormat=\"typescript\", or packageManager=\"npm\". \\\n \\\n Here is the relevant documentation:\\\n {{relevantFilesReferences}}', // Placeholder for relevant documentation to be inserted later\n};\n\nexport type AskDocQuestionResult = {\n response: string;\n relatedFiles: string[];\n};\n\nexport type AskDocQuestionOptions = {\n onMessage?: (chunk: string) => void;\n};\n\n/**\n * Handles the \"Ask a question\" endpoint in an Express.js route.\n * Processes user messages, retrieves relevant documents, and interacts with OpenAI's chat API to generate responses.\n *\n * @param messages - An array of chat messages from the user and assistant\n * @returns The assistant's response as a string\n */\nexport const askDocQuestion = async (\n messages: ChatCompletionRequestMessage[],\n options?: AskDocQuestionOptions\n): Promise<AskDocQuestionResult> => {\n const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n\n // Assistant's response are filtered out otherwise the chatbot will be stuck in a self-referential loop\n // Note that the embedding precision will be lowered if the user change of context in the chat\n const userMessages = messages.filter((message) => message.role === 'user');\n\n // Format the user's question to keep only the relevant keywords\n const query = userMessages\n .map((message) => `- ${message.content}`)\n .join('\\n');\n\n // 1) Find relevant documents based on the user's question\n const relevantFilesReferences = await searchChunkReference(query);\n\n // 2) Integrate the relevant documents into the initial system prompt\n const messagesList: ChatCompletionRequestMessage[] = [\n {\n ...initPrompt,\n content: initPrompt.content.replace(\n '{{relevantFilesReferences}}',\n relevantFilesReferences.length === 0\n ? 'Not relevant file found related to the question.'\n : relevantFilesReferences\n .map(\n (doc, idx) =>\n `[Chunk ${idx}] docKey = \"${doc.fileKey}\":\\n${doc.content}`\n )\n .join('\\n\\n') // Insert relevant docs into the prompt\n ),\n },\n ...messages, // Include all user and assistant messages\n ];\n\n // 3) Send the compiled messages to OpenAI's Chat Completion API (using a specific model)\n const response = await openai.chat.completions.create({\n model: MODEL,\n temperature: MODEL_TEMPERATURE,\n messages: messagesList,\n stream: true,\n });\n\n let fullResponse = '';\n for await (const chunk of response) {\n const content = chunk.choices[0]?.delta?.content || '';\n if (content) {\n fullResponse += content;\n options?.onMessage?.(content);\n }\n }\n\n // 4) Extract unique related files\n const relatedFiles = [\n ...new Set(relevantFilesReferences.map((doc) => doc.fileKey)),\n ];\n\n // 5) Return the assistant's response to the user\n return {\n response: fullResponse ?? 'Error: No result found',\n relatedFiles,\n };\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAyB;AACzB,oBAAwB;AACxB,kBAAwB;AACxB,oBAAmB;AACnB,gBAAe;AACf,oBAAuB;AACvB,wBAA2B;AAiB3B,MAAM,cAA+B,CAAC;AAGtC,MAAM,QAA+B;AACrC,MAAM,oBAAoB;AAC1B,MAAM,kBACJ;AACF,MAAM,iBAAiB;AACvB,MAAM,mBAAmB;AACzB,MAAM,gBAAgB;AACtB,MAAM,YAAY,mBAAmB;AACrC,MAAM,gBAAgB,iBAAiB;AACvC,MAAM,yBAAyB;AAC/B,MAAM,iCAAiC;AAOvC,MAAM,YAAY,CAAC,SAA2B;AAC5C,QAAM,SAAmB,CAAC;AAC1B,MAAI,QAAQ;AAEZ,SAAO,QAAQ,KAAK,QAAQ;AAC1B,QAAI,MAAM,KAAK,IAAI,QAAQ,WAAW,KAAK,MAAM;AAGjD,QAAI,MAAM,KAAK,QAAQ;AACrB,YAAM,YAAY,KAAK,YAAY,KAAK,GAAG;AAC3C,UAAI,YAAY,OAAO;AACrB,cAAM;AAAA,MACR;AAAA,IACF;AAEA,WAAO,KAAK,KAAK,UAAU,OAAO,GAAG,CAAC;AAGtC,UAAM,YAAY,MAAM;AACxB,QAAI,aAAa,OAAO;AAEtB,cAAQ;AAAA,IACV,OAAO;AACL,cAAQ;AAAA,IACV;AAAA,EACF;AAEA,SAAO;AACT;AASA,MAAM,oBAAoB,OAAO,SAAoC;AACnE,MAAI;AACF,UAAM,SAAS,IAAI,qBAAO,EAAE,QAAQ,QAAQ,IAAI,eAAe,CAAC;AAChE,UAAM,WAAW,MAAM,OAAO,WAAW,OAAO;AAAA,MAC9C,OAAO;AAAA;AAAA,MACP,OAAO;AAAA,IACT,CAAC;AAED,WAAO,SAAS,KAAK,CAAC,EAAE;AAAA,EAC1B,SAAS,OAAO;AACd,YAAQ,MAAM,+BAA+B,KAAK;AAClD,WAAO,CAAC;AAAA,EACV;AACF;AAWA,MAAM,mBAAmB,CAAC,MAAgB,SAA2B;AAEnE,QAAM,aAAa,KAAK,OAAO,CAAC,KAAK,GAAG,QAAQ,MAAM,IAAI,KAAK,GAAG,GAAG,CAAC;AAGtE,QAAM,aAAa,KAAK,KAAK,KAAK,OAAO,CAAC,KAAK,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC;AACpE,QAAM,aAAa,KAAK,KAAK,KAAK,OAAO,CAAC,KAAK,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC;AAGpE,SAAO,cAAc,aAAa;AACpC;AAMO,MAAM,qBAAqB,YAA2B;AAC3D,QAAM,MAAM,QAAQ,IAAI;AACxB,gBAAAA,QAAO,OAAO;AAAA,IACZ,MAAM,CAAC,QAAQ,GAAG,UAAU,QAAQ,GAAG,IAAI,cAAc,MAAM;AAAA,EACjE,CAAC;AAGD,QAAM,WAAO,qBAAQ,sBAAQ,OAAO;AACpC,QAAM,YAAQ,sBAAS,sBAAQ,OAAO;AAEtC,MAAI,SAAmC,CAAC;AAExC,QAAM,QAAQ,EAAE,GAAG,MAAM,GAAG,MAAM;AAGlC,aAAW,WAAW,OAAO,KAAK,KAAK,GAAG;AAExC,UAAM,aAAa,UAAU,MAAM,OAA6B,CAAC;AAGjE,eAAW,cAAc,OAAO,KAAK,UAAU,GAAG;AAChD,YAAM,cAAc,OAAO,UAAU,IAAI;AACzC,YAAM,eAAe,WAAW;AAEhC,YAAM,YAAY,WAChB,UACF;AAEA,YAAM,mBAAmB,GAAG,OAAO,UAAU,WAAW;AAGxD,YAAM,eAAe,kBAAAC,QACnB,gBACF;AAEA,UAAI,YAAY;AAEhB,UAAI,CAAC,WAAW;AACd,oBAAY,MAAM,kBAAkB,SAAS;AAAA,MAC/C;AAGA,eAAS,EAAE,GAAG,QAAQ,CAAC,gBAAgB,GAAG,UAAU;AAGpD,kBAAY,KAAK;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AAED,cAAQ,KAAK,cAAc,gBAAgB,IAAI,YAAY,EAAE;AAAA,IAC/D;AAAA,EACF;AAEA,MAAI,QAAQ,IAAI,aAAa,eAAe;AAC1C,QAAI;AAEF,UAAI,KAAK,UAAU,MAAM,MAAM,KAAK,UAAU,kBAAAA,OAAc,GAAG;AAE7D,kBAAAC,QAAG;AAAA,UACD;AAAA,UACA,KAAK,UAAU,QAAQ,MAAM,CAAC;AAAA,QAChC;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,cAAQ,MAAM,KAAK;AAAA,IACrB;AAAA,EACF;AACF;AAGA,mBAAmB;AASZ,MAAM,uBAAuB,OAClC,UAC6B;AAE7B,QAAM,iBAAiB,MAAM,kBAAkB,KAAK;AAGpD,QAAM,UAAU,YACb,IAAI,CAAC,WAAW;AAAA,IACf,GAAG;AAAA,IACH,YAAY,iBAAiB,gBAAgB,MAAM,SAAS;AAAA;AAAA,EAC9D,EAAE,EACD,OAAO,CAAC,UAAU,MAAM,aAAa,8BAA8B,EACnE,KAAK,CAAC,GAAG,MAAM,EAAE,aAAa,EAAE,UAAU,EAC1C,MAAM,GAAG,sBAAsB;AAGlC,SAAO;AACT;AASO,MAAM,aAA2C;AAAA,EACtD,MAAM;AAAA,EACN,SACE;AAAA;AAwBJ;AAkBO,MAAM,iBAAiB,OAC5B,UACA,YACkC;AAClC,QAAM,SAAS,IAAI,qBAAO,EAAE,QAAQ,QAAQ,IAAI,eAAe,CAAC;AAIhE,QAAM,eAAe,SAAS,OAAO,CAAC,YAAY,QAAQ,SAAS,MAAM;AAGzE,QAAM,QAAQ,aACX,IAAI,CAAC,YAAY,KAAK,QAAQ,OAAO,EAAE,EACvC,KAAK,IAAI;AAGZ,QAAM,0BAA0B,MAAM,qBAAqB,KAAK;AAGhE,QAAM,eAA+C;AAAA,IACnD;AAAA,MACE,GAAG;AAAA,MACH,SAAS,WAAW,QAAQ;AAAA,QAC1B;AAAA,QACA,wBAAwB,WAAW,IAC/B,qDACA,wBACG;AAAA,UACC,CAAC,KAAK,QACJ,UAAU,GAAG,eAAe,IAAI,OAAO;AAAA,EAAO,IAAI,OAAO;AAAA,QAC7D,EACC,KAAK,MAAM;AAAA;AAAA,MACpB;AAAA,IACF;AAAA,IACA,GAAG;AAAA;AAAA,EACL;AAGA,QAAM,WAAW,MAAM,OAAO,KAAK,YAAY,OAAO;AAAA,IACpD,OAAO;AAAA,IACP,aAAa;AAAA,IACb,UAAU;AAAA,IACV,QAAQ;AAAA,EACV,CAAC;AAED,MAAI,eAAe;AACnB,mBAAiB,SAAS,UAAU;AAClC,UAAM,UAAU,MAAM,QAAQ,CAAC,GAAG,OAAO,WAAW;AACpD,QAAI,SAAS;AACX,sBAAgB;AAChB,eAAS,YAAY,OAAO;AAAA,IAC9B;AAAA,EACF;AAGA,QAAM,eAAe;AAAA,IACnB,GAAG,IAAI,IAAI,wBAAwB,IAAI,CAAC,QAAQ,IAAI,OAAO,CAAC;AAAA,EAC9D;AAGA,SAAO;AAAA,IACL,UAAU,gBAAgB;AAAA,IAC1B;AAAA,EACF;AACF;","names":["dotenv","embeddingsList","fs"]}