@intlayer/backend 5.8.1-canary.0 → 6.0.0-canary.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (109) hide show
  1. package/dist/cjs/controllers/dictionary.controller.cjs +47 -23
  2. package/dist/cjs/controllers/dictionary.controller.cjs.map +1 -1
  3. package/dist/cjs/controllers/eventListener.controller.cjs +2 -6
  4. package/dist/cjs/controllers/eventListener.controller.cjs.map +1 -1
  5. package/dist/cjs/controllers/project.controller.cjs.map +1 -1
  6. package/dist/cjs/controllers/user.controller.cjs +21 -4
  7. package/dist/cjs/controllers/user.controller.cjs.map +1 -1
  8. package/dist/cjs/index.cjs.map +1 -1
  9. package/dist/cjs/middlewares/oAuth2.middleware.cjs +1 -1
  10. package/dist/cjs/middlewares/oAuth2.middleware.cjs.map +1 -1
  11. package/dist/cjs/routes/dictionary.routes.cjs +9 -0
  12. package/dist/cjs/routes/dictionary.routes.cjs.map +1 -1
  13. package/dist/cjs/routes/eventListener.routes.cjs +2 -2
  14. package/dist/cjs/routes/eventListener.routes.cjs.map +1 -1
  15. package/dist/cjs/schemas/dictionary.schema.cjs +0 -5
  16. package/dist/cjs/schemas/dictionary.schema.cjs.map +1 -1
  17. package/dist/cjs/services/dictionary.service.cjs +9 -9
  18. package/dist/cjs/services/dictionary.service.cjs.map +1 -1
  19. package/dist/cjs/services/project.service.cjs.map +1 -1
  20. package/dist/cjs/services/user.service.cjs.map +1 -1
  21. package/dist/cjs/types/dictionary.types.cjs.map +1 -1
  22. package/dist/cjs/types/plan.types.cjs.map +1 -1
  23. package/dist/cjs/types/project.types.cjs.map +1 -1
  24. package/dist/cjs/utils/AI/aiSdk.cjs.map +1 -1
  25. package/dist/cjs/utils/AI/askDocQuestion/askDocQuestion.cjs +2 -1
  26. package/dist/cjs/utils/AI/askDocQuestion/askDocQuestion.cjs.map +1 -1
  27. package/dist/cjs/utils/AI/askDocQuestion/embeddings.json +91239 -47121
  28. package/dist/cjs/utils/AI/translateJSON/index.cjs +1 -1
  29. package/dist/cjs/utils/AI/translateJSON/index.cjs.map +1 -1
  30. package/dist/cjs/utils/mapper/dictionary.cjs +4 -3
  31. package/dist/cjs/utils/mapper/dictionary.cjs.map +1 -1
  32. package/dist/cjs/utils/rateLimiter.cjs +2 -2
  33. package/dist/cjs/utils/rateLimiter.cjs.map +1 -1
  34. package/dist/cjs/utils/validation/validateDictionary.cjs +2 -1
  35. package/dist/cjs/utils/validation/validateDictionary.cjs.map +1 -1
  36. package/dist/cjs/utils/validation/validateProject.cjs +2 -1
  37. package/dist/cjs/utils/validation/validateProject.cjs.map +1 -1
  38. package/dist/cjs/utils/validation/validateUser.cjs +3 -2
  39. package/dist/cjs/utils/validation/validateUser.cjs.map +1 -1
  40. package/dist/esm/controllers/dictionary.controller.mjs +46 -23
  41. package/dist/esm/controllers/dictionary.controller.mjs.map +1 -1
  42. package/dist/esm/controllers/eventListener.controller.mjs +2 -6
  43. package/dist/esm/controllers/eventListener.controller.mjs.map +1 -1
  44. package/dist/esm/controllers/project.controller.mjs.map +1 -1
  45. package/dist/esm/controllers/user.controller.mjs +21 -4
  46. package/dist/esm/controllers/user.controller.mjs.map +1 -1
  47. package/dist/esm/index.mjs.map +1 -1
  48. package/dist/esm/middlewares/oAuth2.middleware.mjs +1 -1
  49. package/dist/esm/middlewares/oAuth2.middleware.mjs.map +1 -1
  50. package/dist/esm/routes/dictionary.routes.mjs +10 -0
  51. package/dist/esm/routes/dictionary.routes.mjs.map +1 -1
  52. package/dist/esm/routes/eventListener.routes.mjs +2 -2
  53. package/dist/esm/routes/eventListener.routes.mjs.map +1 -1
  54. package/dist/esm/schemas/dictionary.schema.mjs +0 -5
  55. package/dist/esm/schemas/dictionary.schema.mjs.map +1 -1
  56. package/dist/esm/services/dictionary.service.mjs +9 -9
  57. package/dist/esm/services/dictionary.service.mjs.map +1 -1
  58. package/dist/esm/services/project.service.mjs.map +1 -1
  59. package/dist/esm/services/user.service.mjs.map +1 -1
  60. package/dist/esm/utils/AI/aiSdk.mjs.map +1 -1
  61. package/dist/esm/utils/AI/askDocQuestion/askDocQuestion.mjs +2 -1
  62. package/dist/esm/utils/AI/askDocQuestion/askDocQuestion.mjs.map +1 -1
  63. package/dist/esm/utils/AI/askDocQuestion/embeddings.json +91239 -47121
  64. package/dist/esm/utils/AI/translateJSON/index.mjs +1 -1
  65. package/dist/esm/utils/AI/translateJSON/index.mjs.map +1 -1
  66. package/dist/esm/utils/mapper/dictionary.mjs +4 -3
  67. package/dist/esm/utils/mapper/dictionary.mjs.map +1 -1
  68. package/dist/esm/utils/rateLimiter.mjs +3 -3
  69. package/dist/esm/utils/rateLimiter.mjs.map +1 -1
  70. package/dist/esm/utils/validation/validateDictionary.mjs +2 -1
  71. package/dist/esm/utils/validation/validateDictionary.mjs.map +1 -1
  72. package/dist/esm/utils/validation/validateProject.mjs +2 -1
  73. package/dist/esm/utils/validation/validateProject.mjs.map +1 -1
  74. package/dist/esm/utils/validation/validateUser.mjs +3 -2
  75. package/dist/esm/utils/validation/validateUser.mjs.map +1 -1
  76. package/dist/types/controllers/dictionary.controller.d.ts +5 -0
  77. package/dist/types/controllers/dictionary.controller.d.ts.map +1 -1
  78. package/dist/types/controllers/eventListener.controller.d.ts.map +1 -1
  79. package/dist/types/controllers/project.controller.d.ts +2 -2
  80. package/dist/types/controllers/project.controller.d.ts.map +1 -1
  81. package/dist/types/controllers/user.controller.d.ts +2 -2
  82. package/dist/types/controllers/user.controller.d.ts.map +1 -1
  83. package/dist/types/routes/dictionary.routes.d.ts +5 -0
  84. package/dist/types/routes/dictionary.routes.d.ts.map +1 -1
  85. package/dist/types/routes/eventListener.routes.d.ts +1 -3
  86. package/dist/types/routes/eventListener.routes.d.ts.map +1 -1
  87. package/dist/types/schemas/dictionary.schema.d.ts.map +1 -1
  88. package/dist/types/services/dictionary.service.d.ts +1 -1
  89. package/dist/types/services/project.service.d.ts +2 -2
  90. package/dist/types/services/project.service.d.ts.map +1 -1
  91. package/dist/types/services/user.service.d.ts +2 -2
  92. package/dist/types/services/user.service.d.ts.map +1 -1
  93. package/dist/types/types/dictionary.types.d.ts +4 -2
  94. package/dist/types/types/dictionary.types.d.ts.map +1 -1
  95. package/dist/types/types/plan.types.d.ts +2 -1
  96. package/dist/types/types/plan.types.d.ts.map +1 -1
  97. package/dist/types/types/project.types.d.ts +1 -0
  98. package/dist/types/types/project.types.d.ts.map +1 -1
  99. package/dist/types/utils/AI/aiSdk.d.ts +1 -1
  100. package/dist/types/utils/AI/aiSdk.d.ts.map +1 -1
  101. package/dist/types/utils/AI/askDocQuestion/askDocQuestion.d.ts.map +1 -1
  102. package/dist/types/utils/mapper/dictionary.d.ts +1 -2
  103. package/dist/types/utils/mapper/dictionary.d.ts.map +1 -1
  104. package/dist/types/utils/validation/validateDictionary.d.ts.map +1 -1
  105. package/dist/types/utils/validation/validateProject.d.ts +2 -2
  106. package/dist/types/utils/validation/validateProject.d.ts.map +1 -1
  107. package/dist/types/utils/validation/validateUser.d.ts +2 -2
  108. package/dist/types/utils/validation/validateUser.d.ts.map +1 -1
  109. package/package.json +30 -33
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/services/project.service.ts"],"sourcesContent":["import type {\n Project,\n ProjectData,\n ProjectDocument,\n} from '@/types/project.types';\nimport { ProjectModel } from '@models/project.model';\nimport { ensureMongoDocumentToObject } from '@utils/ensureMongoDocumentToObject';\nimport { GenericError } from '@utils/errors';\nimport type { ProjectFilters } from '@utils/filtersAndPagination/getProjectFiltersAndPagination';\nimport { removeObjectKeys } from '@utils/removeObjectKeys';\nimport {\n type ProjectFields,\n validateProject,\n} from '@utils/validation/validateProject';\nimport type { Types } from 'mongoose';\n\n/**\n * Finds projects based on filters and pagination options.\n * @param filters - MongoDB filter query.\n * @param skip - Number of documents to skip.\n * @param limit - Number of documents to limit.\n * @returns List of projects matching the filters.\n */\nexport const findProjects = async (\n filters: ProjectFilters,\n skip = 0,\n limit = 100\n): Promise<ProjectDocument[]> =>\n await ProjectModel.find(filters).skip(skip).limit(limit);\n\n/**\n * Finds a project by its ID.\n * @param projectId - The ID of the project to find.\n * @returns The project matching the ID.\n */\nexport const getProjectById = async (\n projectId: string | Types.ObjectId\n): Promise<ProjectDocument> => {\n const project = await ProjectModel.findById(projectId);\n\n if (!project) {\n throw new GenericError('PROJECT_NOT_DEFINED', { projectId });\n }\n\n return project;\n};\n\n/**\n * Counts the total number of projects that match the filters.\n * @param filters - MongoDB filter query.\n * @returns Total number of projects.\n */\nexport const countProjects = async (\n filters: ProjectFilters\n): Promise<number> => {\n const result = await ProjectModel.countDocuments(filters);\n\n if (typeof result === 'undefined') {\n throw new GenericError('PROJECT_COUNT_FAILED', { filters });\n }\n\n return result;\n};\n\n/**\n * Creates a new project in the database.\n * @param project - The project data to create.\n * @returns The created project.\n */\nexport const createProject = async (\n project: ProjectData\n): Promise<ProjectDocument> => {\n if ((project as Partial<Project>).oAuth2Access) {\n delete (project as Partial<Project>).oAuth2Access;\n }\n\n const errors = await validateProject(project, ['name']);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('PROJECT_INVALID_FIELDS', { errors });\n }\n\n return await ProjectModel.create(project);\n};\n\n/**\n * Updates an existing project in the database by its ID.\n * @param projectId - The ID of the project to update.\n * @param project - The updated project data.\n * @returns The updated project.\n */\nexport const updateProjectById = async (\n projectId: string | Types.ObjectId,\n project: Partial<Project>\n): Promise<ProjectDocument> => {\n const projectObject = ensureMongoDocumentToObject(project);\n const projectToUpdate = removeObjectKeys(projectObject, [\n 'id',\n 'oAuth2Access',\n 'organizationId',\n ]);\n\n const updatedKeys = Object.keys(projectToUpdate) as ProjectFields;\n\n const errors = validateProject(project, updatedKeys);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('PROJECT_INVALID_FIELDS', {\n projectId,\n errors,\n });\n }\n\n const result = await ProjectModel.updateOne(\n { _id: projectId },\n projectToUpdate\n );\n\n if (result.matchedCount === 0) {\n throw new GenericError('PROJECT_UPDATE_FAILED', { projectId });\n }\n\n return await getProjectById(projectId);\n};\n\n/**\n * Deletes a project from the database by its ID.\n * @param projectId - The ID of the project to delete.\n * @returns The result of the deletion operation.\n */\nexport const deleteProjectById = async (\n projectId: string | Types.ObjectId\n): Promise<ProjectDocument> => {\n const project = await ProjectModel.findByIdAndDelete(projectId);\n\n if (!project) {\n throw new GenericError('PROJECT_NOT_DEFINED', { projectId });\n }\n\n return project;\n};\n"],"mappings":"AAKA,SAAS,oBAAoB;AAC7B,SAAS,mCAAmC;AAC5C,SAAS,oBAAoB;AAE7B,SAAS,wBAAwB;AACjC;AAAA,EAEE;AAAA,OACK;AAUA,MAAM,eAAe,OAC1B,SACA,OAAO,GACP,QAAQ,QAER,MAAM,aAAa,KAAK,OAAO,EAAE,KAAK,IAAI,EAAE,MAAM,KAAK;AAOlD,MAAM,iBAAiB,OAC5B,cAC6B;AAC7B,QAAM,UAAU,MAAM,aAAa,SAAS,SAAS;AAErD,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,aAAa,uBAAuB,EAAE,UAAU,CAAC;AAAA,EAC7D;AAEA,SAAO;AACT;AAOO,MAAM,gBAAgB,OAC3B,YACoB;AACpB,QAAM,SAAS,MAAM,aAAa,eAAe,OAAO;AAExD,MAAI,OAAO,WAAW,aAAa;AACjC,UAAM,IAAI,aAAa,wBAAwB,EAAE,QAAQ,CAAC;AAAA,EAC5D;AAEA,SAAO;AACT;AAOO,MAAM,gBAAgB,OAC3B,YAC6B;AAC7B,MAAK,QAA6B,cAAc;AAC9C,WAAQ,QAA6B;AAAA,EACvC;AAEA,QAAM,SAAS,MAAM,gBAAgB,SAAS,CAAC,MAAM,CAAC;AAEtD,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,0BAA0B,EAAE,OAAO,CAAC;AAAA,EAC7D;AAEA,SAAO,MAAM,aAAa,OAAO,OAAO;AAC1C;AAQO,MAAM,oBAAoB,OAC/B,WACA,YAC6B;AAC7B,QAAM,gBAAgB,4BAA4B,OAAO;AACzD,QAAM,kBAAkB,iBAAiB,eAAe;AAAA,IACtD;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,cAAc,OAAO,KAAK,eAAe;AAE/C,QAAM,SAAS,gBAAgB,SAAS,WAAW;AAEnD,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,0BAA0B;AAAA,MAC/C;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAS,MAAM,aAAa;AAAA,IAChC,EAAE,KAAK,UAAU;AAAA,IACjB;AAAA,EACF;AAEA,MAAI,OAAO,iBAAiB,GAAG;AAC7B,UAAM,IAAI,aAAa,yBAAyB,EAAE,UAAU,CAAC;AAAA,EAC/D;AAEA,SAAO,MAAM,eAAe,SAAS;AACvC;AAOO,MAAM,oBAAoB,OAC/B,cAC6B;AAC7B,QAAM,UAAU,MAAM,aAAa,kBAAkB,SAAS;AAE9D,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,aAAa,uBAAuB,EAAE,UAAU,CAAC;AAAA,EAC7D;AAEA,SAAO;AACT;","names":[]}
1
+ {"version":3,"sources":["../../../src/services/project.service.ts"],"sourcesContent":["import type {\n Project,\n ProjectAPI,\n ProjectData,\n ProjectDocument,\n} from '@/types/project.types';\nimport { ProjectModel } from '@models/project.model';\nimport { ensureMongoDocumentToObject } from '@utils/ensureMongoDocumentToObject';\nimport { GenericError } from '@utils/errors';\nimport type { ProjectFilters } from '@utils/filtersAndPagination/getProjectFiltersAndPagination';\nimport { removeObjectKeys } from '@utils/removeObjectKeys';\nimport {\n type ProjectFields,\n validateProject,\n} from '@utils/validation/validateProject';\nimport type { Types } from 'mongoose';\n\n/**\n * Finds projects based on filters and pagination options.\n * @param filters - MongoDB filter query.\n * @param skip - Number of documents to skip.\n * @param limit - Number of documents to limit.\n * @returns List of projects matching the filters.\n */\nexport const findProjects = async (\n filters: ProjectFilters,\n skip = 0,\n limit = 100\n): Promise<ProjectDocument[]> =>\n await ProjectModel.find(filters).skip(skip).limit(limit);\n\n/**\n * Finds a project by its ID.\n * @param projectId - The ID of the project to find.\n * @returns The project matching the ID.\n */\nexport const getProjectById = async (\n projectId: string | Types.ObjectId\n): Promise<ProjectDocument> => {\n const project = await ProjectModel.findById(projectId);\n\n if (!project) {\n throw new GenericError('PROJECT_NOT_DEFINED', { projectId });\n }\n\n return project;\n};\n\n/**\n * Counts the total number of projects that match the filters.\n * @param filters - MongoDB filter query.\n * @returns Total number of projects.\n */\nexport const countProjects = async (\n filters: ProjectFilters\n): Promise<number> => {\n const result = await ProjectModel.countDocuments(filters);\n\n if (typeof result === 'undefined') {\n throw new GenericError('PROJECT_COUNT_FAILED', { filters });\n }\n\n return result;\n};\n\n/**\n * Creates a new project in the database.\n * @param project - The project data to create.\n * @returns The created project.\n */\nexport const createProject = async (\n project: ProjectData\n): Promise<ProjectDocument> => {\n if ((project as Partial<Project>).oAuth2Access) {\n delete (project as Partial<Project>).oAuth2Access;\n }\n\n const errors = await validateProject(project, ['name']);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('PROJECT_INVALID_FIELDS', { errors });\n }\n\n return await ProjectModel.create(project);\n};\n\n/**\n * Updates an existing project in the database by its ID.\n * @param projectId - The ID of the project to update.\n * @param project - The updated project data.\n * @returns The updated project.\n */\nexport const updateProjectById = async (\n projectId: string | Types.ObjectId,\n project: Partial<Project | ProjectAPI>\n): Promise<ProjectDocument> => {\n const projectObject = ensureMongoDocumentToObject(project);\n const projectToUpdate = removeObjectKeys(projectObject, [\n 'id',\n 'oAuth2Access',\n 'organizationId',\n ]);\n\n const updatedKeys = Object.keys(projectToUpdate) as ProjectFields;\n\n const errors = validateProject(project, updatedKeys);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('PROJECT_INVALID_FIELDS', {\n projectId,\n errors,\n });\n }\n\n const result = await ProjectModel.updateOne(\n { _id: projectId },\n projectToUpdate\n );\n\n if (result.matchedCount === 0) {\n throw new GenericError('PROJECT_UPDATE_FAILED', { projectId });\n }\n\n return await getProjectById(projectId);\n};\n\n/**\n * Deletes a project from the database by its ID.\n * @param projectId - The ID of the project to delete.\n * @returns The result of the deletion operation.\n */\nexport const deleteProjectById = async (\n projectId: string | Types.ObjectId\n): Promise<ProjectDocument> => {\n const project = await ProjectModel.findByIdAndDelete(projectId);\n\n if (!project) {\n throw new GenericError('PROJECT_NOT_DEFINED', { projectId });\n }\n\n return project;\n};\n"],"mappings":"AAMA,SAAS,oBAAoB;AAC7B,SAAS,mCAAmC;AAC5C,SAAS,oBAAoB;AAE7B,SAAS,wBAAwB;AACjC;AAAA,EAEE;AAAA,OACK;AAUA,MAAM,eAAe,OAC1B,SACA,OAAO,GACP,QAAQ,QAER,MAAM,aAAa,KAAK,OAAO,EAAE,KAAK,IAAI,EAAE,MAAM,KAAK;AAOlD,MAAM,iBAAiB,OAC5B,cAC6B;AAC7B,QAAM,UAAU,MAAM,aAAa,SAAS,SAAS;AAErD,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,aAAa,uBAAuB,EAAE,UAAU,CAAC;AAAA,EAC7D;AAEA,SAAO;AACT;AAOO,MAAM,gBAAgB,OAC3B,YACoB;AACpB,QAAM,SAAS,MAAM,aAAa,eAAe,OAAO;AAExD,MAAI,OAAO,WAAW,aAAa;AACjC,UAAM,IAAI,aAAa,wBAAwB,EAAE,QAAQ,CAAC;AAAA,EAC5D;AAEA,SAAO;AACT;AAOO,MAAM,gBAAgB,OAC3B,YAC6B;AAC7B,MAAK,QAA6B,cAAc;AAC9C,WAAQ,QAA6B;AAAA,EACvC;AAEA,QAAM,SAAS,MAAM,gBAAgB,SAAS,CAAC,MAAM,CAAC;AAEtD,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,0BAA0B,EAAE,OAAO,CAAC;AAAA,EAC7D;AAEA,SAAO,MAAM,aAAa,OAAO,OAAO;AAC1C;AAQO,MAAM,oBAAoB,OAC/B,WACA,YAC6B;AAC7B,QAAM,gBAAgB,4BAA4B,OAAO;AACzD,QAAM,kBAAkB,iBAAiB,eAAe;AAAA,IACtD;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,cAAc,OAAO,KAAK,eAAe;AAE/C,QAAM,SAAS,gBAAgB,SAAS,WAAW;AAEnD,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,0BAA0B;AAAA,MAC/C;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAS,MAAM,aAAa;AAAA,IAChC,EAAE,KAAK,UAAU;AAAA,IACjB;AAAA,EACF;AAEA,MAAI,OAAO,iBAAiB,GAAG;AAC7B,UAAM,IAAI,aAAa,yBAAyB,EAAE,UAAU,CAAC;AAAA,EAC/D;AAEA,SAAO,MAAM,eAAe,SAAS;AACvC;AAOO,MAAM,oBAAoB,OAC/B,cAC6B;AAC7B,QAAM,UAAU,MAAM,aAAa,kBAAkB,SAAS;AAE9D,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,aAAa,uBAAuB,EAAE,UAAU,CAAC;AAAA,EAC7D;AAEA,SAAO;AACT;","names":[]}
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/services/user.service.ts"],"sourcesContent":["import type { User, UserDocument } from '@/types/user.types';\nimport { UserModel } from '@models/user.model';\nimport { GenericError } from '@utils/errors';\nimport type { UserFilters } from '@utils/filtersAndPagination/getUserFiltersAndPagination';\nimport {\n type FieldsToCheck,\n type UserFields,\n validateUser,\n} from '@utils/validation/validateUser';\nimport type { Types } from 'mongoose';\n\n/**\n * Creates a new user with password in the database and hashes the password.\n * @param user - User object with password not hashed.\n * @returns Created user object.\n */\nexport const createUser = async (\n user: Partial<User>\n): Promise<UserDocument> => {\n const fieldsToCheck: FieldsToCheck[] = ['email'];\n\n const errors = validateUser(user, fieldsToCheck);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('USER_INVALID_FIELDS', {\n userEmail: user.email,\n errors,\n });\n }\n\n const newUser: UserDocument = await UserModel.create(user);\n\n if (!newUser) {\n throw new GenericError('USER_CREATION_FAILED', { userEmail: user.email });\n }\n\n return newUser;\n};\n\n/**\n * Retrieves a user by email.\n * @param email - User's email.\n * @returns User object or null if no user was found.\n */\nexport const getUserByEmail = async (\n email: string\n): Promise<UserDocument | null> => {\n return await UserModel.findOne({ email });\n};\n\n/**\n * Retrieves users list by email.\n * @param emails - Users email.\n * @returns User object or null if no user was found.\n */\nexport const getUsersByEmails = async (\n emails: string[]\n): Promise<UserDocument[] | null> => {\n return await UserModel.find({ email: { $in: emails } });\n};\n\n/**\n * Checks if a user exists by email.\n * @param email - User's email.\n * @returns True if the user exists, false otherwise.\n */\nexport const checkUserExists = async (email: string): Promise<boolean> => {\n const user = await UserModel.exists({ email });\n return user !== null;\n};\n\n/**\n * Retrieves a user by ID.\n * @param userId - User's ID.\n * @returns User object or null if no user was found.\n */\nexport const getUserById = async (\n userId: string | Types.ObjectId\n): Promise<UserDocument | null> => await UserModel.findById(userId);\n\n/**\n * Retrieves a user by ID.\n * @param userId - User's ID.\n * @returns User object or null if no user was found.\n */\nexport const getUsersByIds = async (\n userIds: (string | Types.ObjectId)[]\n): Promise<UserDocument[] | null> =>\n await UserModel.find({ _id: { $in: userIds } });\n\n/**\n * Finds users based on filters and pagination options.\n * @param filters - MongoDB filter query.\n * @param skip - Number of documents to skip.\n * @param limit - Number of documents to limit.\n * @returns List of users matching the filters.\n */\nexport const findUsers = async (\n filters: UserFilters,\n skip: number,\n limit: number\n): Promise<UserDocument[]> => {\n return await UserModel.find(filters).skip(skip).limit(limit);\n};\n\n/**\n * Counts the total number of users that match the filters.\n * @param filters - MongoDB filter query.\n * @returns Total number of users.\n */\nexport const countUsers = async (filters: UserFilters): Promise<number> => {\n const count = await UserModel.countDocuments(filters);\n\n if (typeof count === 'undefined') {\n throw new GenericError('USER_COUNT_FAILED');\n }\n\n return count;\n};\n\n/**\n * Updates a user's information.\n * @param user - The user object.\n * @param updates - The updates to apply to the user.\n * @returns The updated user.\n */\nexport const updateUserById = async (\n userId: string | Types.ObjectId,\n updates: Partial<User>\n): Promise<UserDocument> => {\n const keyToValidate = Object.keys(updates) as UserFields;\n const errors = validateUser(updates, keyToValidate);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('USER_INVALID_FIELDS', {\n userId,\n errors,\n });\n }\n\n const result = await UserModel.updateOne({ _id: userId }, { $set: updates });\n\n if (result.matchedCount === 0) {\n throw new GenericError('USER_UPDATE_FAILED', { userId });\n }\n\n const updatedUser = await UserModel.findById(userId);\n\n if (!updatedUser) {\n throw new GenericError('USER_UPDATED_USER_NOT_FOUND', { userId });\n }\n\n return updatedUser;\n};\n\n/**\n * Deletes a user from the database.\n * @param userId - The user object.\n * @returns\n */\nexport const deleteUser = async (\n userId: string | Types.ObjectId\n): Promise<UserDocument> => {\n await getUserById(userId);\n\n const user = await UserModel.findByIdAndDelete(userId);\n\n if (!user) {\n throw new GenericError('USER_NOT_FOUND', { userId });\n }\n\n return user;\n};\n"],"mappings":"AACA,SAAS,iBAAiB;AAC1B,SAAS,oBAAoB;AAE7B;AAAA,EAGE;AAAA,OACK;AAQA,MAAM,aAAa,OACxB,SAC0B;AAC1B,QAAM,gBAAiC,CAAC,OAAO;AAE/C,QAAM,SAAS,aAAa,MAAM,aAAa;AAE/C,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,uBAAuB;AAAA,MAC5C,WAAW,KAAK;AAAA,MAChB;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,UAAwB,MAAM,UAAU,OAAO,IAAI;AAEzD,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,aAAa,wBAAwB,EAAE,WAAW,KAAK,MAAM,CAAC;AAAA,EAC1E;AAEA,SAAO;AACT;AAOO,MAAM,iBAAiB,OAC5B,UACiC;AACjC,SAAO,MAAM,UAAU,QAAQ,EAAE,MAAM,CAAC;AAC1C;AAOO,MAAM,mBAAmB,OAC9B,WACmC;AACnC,SAAO,MAAM,UAAU,KAAK,EAAE,OAAO,EAAE,KAAK,OAAO,EAAE,CAAC;AACxD;AAOO,MAAM,kBAAkB,OAAO,UAAoC;AACxE,QAAM,OAAO,MAAM,UAAU,OAAO,EAAE,MAAM,CAAC;AAC7C,SAAO,SAAS;AAClB;AAOO,MAAM,cAAc,OACzB,WACiC,MAAM,UAAU,SAAS,MAAM;AAO3D,MAAM,gBAAgB,OAC3B,YAEA,MAAM,UAAU,KAAK,EAAE,KAAK,EAAE,KAAK,QAAQ,EAAE,CAAC;AASzC,MAAM,YAAY,OACvB,SACA,MACA,UAC4B;AAC5B,SAAO,MAAM,UAAU,KAAK,OAAO,EAAE,KAAK,IAAI,EAAE,MAAM,KAAK;AAC7D;AAOO,MAAM,aAAa,OAAO,YAA0C;AACzE,QAAM,QAAQ,MAAM,UAAU,eAAe,OAAO;AAEpD,MAAI,OAAO,UAAU,aAAa;AAChC,UAAM,IAAI,aAAa,mBAAmB;AAAA,EAC5C;AAEA,SAAO;AACT;AAQO,MAAM,iBAAiB,OAC5B,QACA,YAC0B;AAC1B,QAAM,gBAAgB,OAAO,KAAK,OAAO;AACzC,QAAM,SAAS,aAAa,SAAS,aAAa;AAElD,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,uBAAuB;AAAA,MAC5C;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAS,MAAM,UAAU,UAAU,EAAE,KAAK,OAAO,GAAG,EAAE,MAAM,QAAQ,CAAC;AAE3E,MAAI,OAAO,iBAAiB,GAAG;AAC7B,UAAM,IAAI,aAAa,sBAAsB,EAAE,OAAO,CAAC;AAAA,EACzD;AAEA,QAAM,cAAc,MAAM,UAAU,SAAS,MAAM;AAEnD,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI,aAAa,+BAA+B,EAAE,OAAO,CAAC;AAAA,EAClE;AAEA,SAAO;AACT;AAOO,MAAM,aAAa,OACxB,WAC0B;AAC1B,QAAM,YAAY,MAAM;AAExB,QAAM,OAAO,MAAM,UAAU,kBAAkB,MAAM;AAErD,MAAI,CAAC,MAAM;AACT,UAAM,IAAI,aAAa,kBAAkB,EAAE,OAAO,CAAC;AAAA,EACrD;AAEA,SAAO;AACT;","names":[]}
1
+ {"version":3,"sources":["../../../src/services/user.service.ts"],"sourcesContent":["import type { User, UserAPI, UserDocument } from '@/types/user.types';\nimport { UserModel } from '@models/user.model';\nimport { GenericError } from '@utils/errors';\nimport type { UserFilters } from '@utils/filtersAndPagination/getUserFiltersAndPagination';\nimport {\n type FieldsToCheck,\n type UserFields,\n validateUser,\n} from '@utils/validation/validateUser';\nimport type { Types } from 'mongoose';\n\n/**\n * Creates a new user with password in the database and hashes the password.\n * @param user - User object with password not hashed.\n * @returns Created user object.\n */\nexport const createUser = async (\n user: Partial<User>\n): Promise<UserDocument> => {\n const fieldsToCheck: FieldsToCheck[] = ['email'];\n\n const errors = validateUser(user, fieldsToCheck);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('USER_INVALID_FIELDS', {\n userEmail: user.email,\n errors,\n });\n }\n\n const newUser: UserDocument = await UserModel.create(user);\n\n if (!newUser) {\n throw new GenericError('USER_CREATION_FAILED', { userEmail: user.email });\n }\n\n return newUser;\n};\n\n/**\n * Retrieves a user by email.\n * @param email - User's email.\n * @returns User object or null if no user was found.\n */\nexport const getUserByEmail = async (\n email: string\n): Promise<UserDocument | null> => {\n return await UserModel.findOne({ email });\n};\n\n/**\n * Retrieves users list by email.\n * @param emails - Users email.\n * @returns User object or null if no user was found.\n */\nexport const getUsersByEmails = async (\n emails: string[]\n): Promise<UserDocument[] | null> => {\n return await UserModel.find({ email: { $in: emails } });\n};\n\n/**\n * Checks if a user exists by email.\n * @param email - User's email.\n * @returns True if the user exists, false otherwise.\n */\nexport const checkUserExists = async (email: string): Promise<boolean> => {\n const user = await UserModel.exists({ email });\n return user !== null;\n};\n\n/**\n * Retrieves a user by ID.\n * @param userId - User's ID.\n * @returns User object or null if no user was found.\n */\nexport const getUserById = async (\n userId: string | Types.ObjectId\n): Promise<UserDocument | null> => await UserModel.findById(userId);\n\n/**\n * Retrieves a user by ID.\n * @param userId - User's ID.\n * @returns User object or null if no user was found.\n */\nexport const getUsersByIds = async (\n userIds: (string | Types.ObjectId)[]\n): Promise<UserDocument[] | null> =>\n await UserModel.find({ _id: { $in: userIds } });\n\n/**\n * Finds users based on filters and pagination options.\n * @param filters - MongoDB filter query.\n * @param skip - Number of documents to skip.\n * @param limit - Number of documents to limit.\n * @returns List of users matching the filters.\n */\nexport const findUsers = async (\n filters: UserFilters,\n skip: number,\n limit: number\n): Promise<UserDocument[]> => {\n return await UserModel.find(filters).skip(skip).limit(limit);\n};\n\n/**\n * Counts the total number of users that match the filters.\n * @param filters - MongoDB filter query.\n * @returns Total number of users.\n */\nexport const countUsers = async (filters: UserFilters): Promise<number> => {\n const count = await UserModel.countDocuments(filters);\n\n if (typeof count === 'undefined') {\n throw new GenericError('USER_COUNT_FAILED');\n }\n\n return count;\n};\n\n/**\n * Updates a user's information.\n * @param user - The user object.\n * @param updates - The updates to apply to the user.\n * @returns The updated user.\n */\nexport const updateUserById = async (\n userId: string | Types.ObjectId,\n updates: Partial<UserAPI>\n): Promise<UserDocument> => {\n const keyToValidate = Object.keys(updates) as UserFields;\n const errors = validateUser(updates, keyToValidate);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('USER_INVALID_FIELDS', {\n userId,\n errors,\n });\n }\n\n const result = await UserModel.updateOne({ _id: userId }, { $set: updates });\n\n if (result.matchedCount === 0) {\n throw new GenericError('USER_UPDATE_FAILED', { userId });\n }\n\n const updatedUser = await UserModel.findById(userId);\n\n if (!updatedUser) {\n throw new GenericError('USER_UPDATED_USER_NOT_FOUND', { userId });\n }\n\n return updatedUser;\n};\n\n/**\n * Deletes a user from the database.\n * @param userId - The user object.\n * @returns\n */\nexport const deleteUser = async (\n userId: string | Types.ObjectId\n): Promise<UserDocument> => {\n await getUserById(userId);\n\n const user = await UserModel.findByIdAndDelete(userId);\n\n if (!user) {\n throw new GenericError('USER_NOT_FOUND', { userId });\n }\n\n return user;\n};\n"],"mappings":"AACA,SAAS,iBAAiB;AAC1B,SAAS,oBAAoB;AAE7B;AAAA,EAGE;AAAA,OACK;AAQA,MAAM,aAAa,OACxB,SAC0B;AAC1B,QAAM,gBAAiC,CAAC,OAAO;AAE/C,QAAM,SAAS,aAAa,MAAM,aAAa;AAE/C,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,uBAAuB;AAAA,MAC5C,WAAW,KAAK;AAAA,MAChB;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,UAAwB,MAAM,UAAU,OAAO,IAAI;AAEzD,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,aAAa,wBAAwB,EAAE,WAAW,KAAK,MAAM,CAAC;AAAA,EAC1E;AAEA,SAAO;AACT;AAOO,MAAM,iBAAiB,OAC5B,UACiC;AACjC,SAAO,MAAM,UAAU,QAAQ,EAAE,MAAM,CAAC;AAC1C;AAOO,MAAM,mBAAmB,OAC9B,WACmC;AACnC,SAAO,MAAM,UAAU,KAAK,EAAE,OAAO,EAAE,KAAK,OAAO,EAAE,CAAC;AACxD;AAOO,MAAM,kBAAkB,OAAO,UAAoC;AACxE,QAAM,OAAO,MAAM,UAAU,OAAO,EAAE,MAAM,CAAC;AAC7C,SAAO,SAAS;AAClB;AAOO,MAAM,cAAc,OACzB,WACiC,MAAM,UAAU,SAAS,MAAM;AAO3D,MAAM,gBAAgB,OAC3B,YAEA,MAAM,UAAU,KAAK,EAAE,KAAK,EAAE,KAAK,QAAQ,EAAE,CAAC;AASzC,MAAM,YAAY,OACvB,SACA,MACA,UAC4B;AAC5B,SAAO,MAAM,UAAU,KAAK,OAAO,EAAE,KAAK,IAAI,EAAE,MAAM,KAAK;AAC7D;AAOO,MAAM,aAAa,OAAO,YAA0C;AACzE,QAAM,QAAQ,MAAM,UAAU,eAAe,OAAO;AAEpD,MAAI,OAAO,UAAU,aAAa;AAChC,UAAM,IAAI,aAAa,mBAAmB;AAAA,EAC5C;AAEA,SAAO;AACT;AAQO,MAAM,iBAAiB,OAC5B,QACA,YAC0B;AAC1B,QAAM,gBAAgB,OAAO,KAAK,OAAO;AACzC,QAAM,SAAS,aAAa,SAAS,aAAa;AAElD,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,uBAAuB;AAAA,MAC5C;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAS,MAAM,UAAU,UAAU,EAAE,KAAK,OAAO,GAAG,EAAE,MAAM,QAAQ,CAAC;AAE3E,MAAI,OAAO,iBAAiB,GAAG;AAC7B,UAAM,IAAI,aAAa,sBAAsB,EAAE,OAAO,CAAC;AAAA,EACzD;AAEA,QAAM,cAAc,MAAM,UAAU,SAAS,MAAM;AAEnD,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI,aAAa,+BAA+B,EAAE,OAAO,CAAC;AAAA,EAClE;AAEA,SAAO;AACT;AAOO,MAAM,aAAa,OACxB,WAC0B;AAC1B,QAAM,YAAY,MAAM;AAExB,QAAM,OAAO,MAAM,UAAU,kBAAkB,MAAM;AAErD,MAAI,CAAC,MAAM;AACT,UAAM,IAAI,aAAa,kBAAkB,EAAE,OAAO,CAAC;AAAA,EACrD;AAEA,SAAO;AACT;","names":[]}
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/utils/AI/aiSdk.ts"],"sourcesContent":["import { anthropic, createAnthropic } from '@ai-sdk/anthropic';\nimport { createDeepSeek, deepseek } from '@ai-sdk/deepseek';\nimport { createGoogleGenerativeAI, google } from '@ai-sdk/google';\nimport { createMistral, mistral } from '@ai-sdk/mistral';\nimport { createOpenAI, openai } from '@ai-sdk/openai';\nimport {\n AssistantModelMessage,\n generateText,\n SystemModelMessage,\n ToolModelMessage,\n UserModelMessage,\n} from 'ai';\nimport { Response } from 'express';\n\ntype AnthropicModel = Parameters<typeof anthropic>[0];\ntype DeepSeekModel = Parameters<typeof deepseek>[0];\ntype MistralModel = Parameters<typeof mistral>[0];\ntype OpenAIModel = Parameters<typeof openai>[0];\ntype GoogleModel = Parameters<typeof google>[0];\n\nexport type Messages = (\n | SystemModelMessage\n | UserModelMessage\n | AssistantModelMessage\n | ToolModelMessage\n)[];\n\n/**\n * Supported AI models\n */\nexport type Model =\n | AnthropicModel\n | DeepSeekModel\n | MistralModel\n | OpenAIModel\n | GoogleModel\n | (string & {});\n\n/**\n * Supported AI SDK providers\n */\nexport enum AIProvider {\n OPENAI = 'openai',\n ANTHROPIC = 'anthropic',\n MISTRAL = 'mistral',\n DEEPSEEK = 'deepseek',\n GEMINI = 'gemini',\n}\n\n/**\n * Common options for all AI providers\n */\nexport type AIOptions = {\n provider?: AIProvider;\n model?: Model;\n temperature?: number;\n apiKey?: string;\n applicationContext?: string;\n};\n\n// Define the structure of messages used in chat completions\nexport type ChatCompletionRequestMessage = {\n role: 'system' | 'user' | 'assistant'; // The role of the message sender\n content: string; // The text content of the message\n timestamp?: Date; // The timestamp of the message\n};\n\ntype AccessType = 'apiKey' | 'registered_user' | 'premium_user' | 'public';\n\nconst getAPIKey = (\n res: Response,\n accessType: AccessType[],\n aiOptions?: AIOptions\n) => {\n const defaultApiKey = process.env.OPENAI_API_KEY;\n\n if (accessType.includes('public')) {\n return aiOptions?.apiKey ?? defaultApiKey;\n }\n\n if (accessType.includes('apiKey') && aiOptions?.apiKey) {\n return aiOptions?.apiKey;\n }\n\n if (accessType.includes('registered_user') && res.locals.user) {\n return aiOptions?.apiKey ?? defaultApiKey;\n }\n\n // TODO: Implement premium user access\n if (accessType.includes('premium_user') && res.locals.user) {\n return aiOptions?.apiKey ?? defaultApiKey;\n }\n\n return undefined;\n};\n\nconst getModel = (\n provider: AIProvider,\n userApiKey: string,\n userModel?: Model,\n defaultModel?: Model\n): Model => {\n // Set default models based on provider\n let fallBackModel: Model = defaultModel ?? 'chatgpt-4o-latest';\n\n switch (provider) {\n case AIProvider.OPENAI:\n defaultModel = 'chatgpt-4o-latest';\n break;\n case AIProvider.ANTHROPIC:\n defaultModel = 'claude-3-haiku-20240307';\n break;\n case AIProvider.MISTRAL:\n defaultModel = 'mistral-large-latest';\n break;\n case AIProvider.DEEPSEEK:\n defaultModel = 'deepseek-coder';\n break;\n case AIProvider.GEMINI:\n defaultModel = 'gemini-1.5-pro';\n break;\n }\n\n // If the user use his own API, let him use the model he wants\n if (Boolean(userApiKey) && Boolean(userModel)) {\n return userModel!;\n }\n\n if (Boolean(userModel)) {\n throw new Error(\n 'The user should use his own API key to use a custom model'\n );\n }\n\n return fallBackModel;\n};\n\nexport type AIConfig = Parameters<typeof generateText>[0];\n\nconst DEFAULT_PROVIDER: AIProvider = AIProvider.OPENAI as AIProvider;\nconst DEFAULT_TEMPERATURE: number = 1; // ChatGPT 5 accept only temperature 1\n\nexport type AIConfigOptions = {\n userOptions?: AIOptions;\n defaultOptions?: AIOptions;\n accessType?: AccessType[];\n};\n\n/**\n * Get AI model configuration based on the selected provider and options\n * This function handles the configuration for different AI providers\n *\n * @param options Configuration options including provider, API keys, models and temperature\n * @returns Configured AI model ready to use with generateText\n */\nexport const getAIConfig = async (\n res: Response,\n options: AIConfigOptions\n): Promise<AIConfig> => {\n const {\n userOptions,\n defaultOptions,\n accessType = ['registered_user'],\n } = options;\n\n const aiOptions = {\n provider: DEFAULT_PROVIDER,\n temperature: DEFAULT_TEMPERATURE,\n ...defaultOptions,\n ...userOptions,\n } satisfies AIOptions;\n\n const apiKey = getAPIKey(res, accessType, aiOptions);\n\n // Check if API key is provided\n if (!apiKey) {\n throw new Error(`API key for ${aiOptions.provider} is missing`);\n }\n\n const selectedModel = getModel(\n aiOptions.provider,\n apiKey,\n aiOptions.model,\n defaultOptions?.model\n );\n\n const protectedOptions = {\n ...aiOptions,\n apiKey,\n model: selectedModel,\n } satisfies AIOptions;\n\n let languageModel: AIConfig['model'];\n\n switch (protectedOptions.provider) {\n case AIProvider.OPENAI: {\n languageModel = createOpenAI({\n apiKey,\n })(selectedModel);\n break;\n }\n\n case AIProvider.ANTHROPIC: {\n languageModel = createAnthropic({\n apiKey,\n })(selectedModel);\n break;\n }\n\n case AIProvider.MISTRAL: {\n languageModel = createMistral({\n apiKey,\n })(selectedModel);\n break;\n }\n\n case AIProvider.DEEPSEEK: {\n languageModel = createDeepSeek({\n apiKey,\n })(selectedModel);\n break;\n }\n\n case AIProvider.GEMINI: {\n languageModel = createGoogleGenerativeAI({\n apiKey,\n })(selectedModel);\n break;\n }\n\n default: {\n throw new Error(`Provider ${protectedOptions.provider} not supported`);\n }\n }\n\n return {\n model: languageModel,\n temperature: protectedOptions.temperature,\n };\n};\n"],"mappings":"AAAA,SAAoB,uBAAuB;AAC3C,SAAS,sBAAgC;AACzC,SAAS,gCAAwC;AACjD,SAAS,qBAA8B;AACvC,SAAS,oBAA4B;AAqC9B,IAAK,aAAL,kBAAKA,gBAAL;AACL,EAAAA,YAAA,YAAS;AACT,EAAAA,YAAA,eAAY;AACZ,EAAAA,YAAA,aAAU;AACV,EAAAA,YAAA,cAAW;AACX,EAAAA,YAAA,YAAS;AALC,SAAAA;AAAA,GAAA;AA4BZ,MAAM,YAAY,CAChB,KACA,YACA,cACG;AACH,QAAM,gBAAgB,QAAQ,IAAI;AAElC,MAAI,WAAW,SAAS,QAAQ,GAAG;AACjC,WAAO,WAAW,UAAU;AAAA,EAC9B;AAEA,MAAI,WAAW,SAAS,QAAQ,KAAK,WAAW,QAAQ;AACtD,WAAO,WAAW;AAAA,EACpB;AAEA,MAAI,WAAW,SAAS,iBAAiB,KAAK,IAAI,OAAO,MAAM;AAC7D,WAAO,WAAW,UAAU;AAAA,EAC9B;AAGA,MAAI,WAAW,SAAS,cAAc,KAAK,IAAI,OAAO,MAAM;AAC1D,WAAO,WAAW,UAAU;AAAA,EAC9B;AAEA,SAAO;AACT;AAEA,MAAM,WAAW,CACf,UACA,YACA,WACA,iBACU;AAEV,MAAI,gBAAuB,gBAAgB;AAE3C,UAAQ,UAAU;AAAA,IAChB,KAAK;AACH,qBAAe;AACf;AAAA,IACF,KAAK;AACH,qBAAe;AACf;AAAA,IACF,KAAK;AACH,qBAAe;AACf;AAAA,IACF,KAAK;AACH,qBAAe;AACf;AAAA,IACF,KAAK;AACH,qBAAe;AACf;AAAA,EACJ;AAGA,MAAI,QAAQ,UAAU,KAAK,QAAQ,SAAS,GAAG;AAC7C,WAAO;AAAA,EACT;AAEA,MAAI,QAAQ,SAAS,GAAG;AACtB,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAIA,MAAM,mBAA+B;AACrC,MAAM,sBAA8B;AAe7B,MAAM,cAAc,OACzB,KACA,YACsB;AACtB,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,aAAa,CAAC,iBAAiB;AAAA,EACjC,IAAI;AAEJ,QAAM,YAAY;AAAA,IAChB,UAAU;AAAA,IACV,aAAa;AAAA,IACb,GAAG;AAAA,IACH,GAAG;AAAA,EACL;AAEA,QAAM,SAAS,UAAU,KAAK,YAAY,SAAS;AAGnD,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,eAAe,UAAU,QAAQ,aAAa;AAAA,EAChE;AAEA,QAAM,gBAAgB;AAAA,IACpB,UAAU;AAAA,IACV;AAAA,IACA,UAAU;AAAA,IACV,gBAAgB;AAAA,EAClB;AAEA,QAAM,mBAAmB;AAAA,IACvB,GAAG;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT;AAEA,MAAI;AAEJ,UAAQ,iBAAiB,UAAU;AAAA,IACjC,KAAK,uBAAmB;AACtB,sBAAgB,aAAa;AAAA,QAC3B;AAAA,MACF,CAAC,EAAE,aAAa;AAChB;AAAA,IACF;AAAA,IAEA,KAAK,6BAAsB;AACzB,sBAAgB,gBAAgB;AAAA,QAC9B;AAAA,MACF,CAAC,EAAE,aAAa;AAChB;AAAA,IACF;AAAA,IAEA,KAAK,yBAAoB;AACvB,sBAAgB,cAAc;AAAA,QAC5B;AAAA,MACF,CAAC,EAAE,aAAa;AAChB;AAAA,IACF;AAAA,IAEA,KAAK,2BAAqB;AACxB,sBAAgB,eAAe;AAAA,QAC7B;AAAA,MACF,CAAC,EAAE,aAAa;AAChB;AAAA,IACF;AAAA,IAEA,KAAK,uBAAmB;AACtB,sBAAgB,yBAAyB;AAAA,QACvC;AAAA,MACF,CAAC,EAAE,aAAa;AAChB;AAAA,IACF;AAAA,IAEA,SAAS;AACP,YAAM,IAAI,MAAM,YAAY,iBAAiB,QAAQ,gBAAgB;AAAA,IACvE;AAAA,EACF;AAEA,SAAO;AAAA,IACL,OAAO;AAAA,IACP,aAAa,iBAAiB;AAAA,EAChC;AACF;","names":["AIProvider"]}
1
+ {"version":3,"sources":["../../../../src/utils/AI/aiSdk.ts"],"sourcesContent":["import { anthropic, createAnthropic } from '@ai-sdk/anthropic';\nimport { createDeepSeek, deepseek } from '@ai-sdk/deepseek';\nimport { createGoogleGenerativeAI, google } from '@ai-sdk/google';\nimport { createMistral, mistral } from '@ai-sdk/mistral';\nimport { createOpenAI, openai } from '@ai-sdk/openai';\nimport {\n AssistantModelMessage,\n generateText,\n SystemModelMessage,\n ToolModelMessage,\n UserModelMessage,\n} from 'ai';\nimport { Response } from 'express';\n\ntype AnthropicModel = Parameters<typeof anthropic>[0];\ntype DeepSeekModel = Parameters<typeof deepseek>[0];\ntype MistralModel = Parameters<typeof mistral>[0];\ntype OpenAIModel = Parameters<typeof openai>[0];\ntype GoogleModel = Parameters<typeof google>[0];\n\nexport type Messages = (\n | SystemModelMessage\n | UserModelMessage\n | AssistantModelMessage\n | ToolModelMessage\n)[];\n\n/**\n * Supported AI models\n */\nexport type Model =\n | AnthropicModel\n | DeepSeekModel\n | MistralModel\n | OpenAIModel\n | GoogleModel\n | (string & {});\n\n/**\n * Supported AI SDK providers\n */\nexport enum AIProvider {\n OPENAI = 'openai',\n ANTHROPIC = 'anthropic',\n MISTRAL = 'mistral',\n DEEPSEEK = 'deepseek',\n GEMINI = 'gemini',\n}\n\n/**\n * Common options for all AI providers\n */\nexport type AIOptions = {\n provider?: AIProvider;\n model?: Model;\n temperature?: number;\n apiKey?: string;\n applicationContext?: string;\n};\n\n// Define the structure of messages used in chat completions\nexport type ChatCompletionRequestMessage = {\n role: 'system' | 'user' | 'assistant'; // The role of the message sender\n content: string; // The text content of the message\n timestamp?: Date; // The timestamp of the message\n};\n\ntype AccessType = 'apiKey' | 'registered_user' | 'premium_user' | 'public';\n\nconst getAPIKey = (\n res: Response,\n accessType: AccessType[],\n aiOptions?: AIOptions\n) => {\n const defaultApiKey = process.env.OPENAI_API_KEY;\n\n if (accessType.includes('public')) {\n return aiOptions?.apiKey ?? defaultApiKey;\n }\n\n if (accessType.includes('apiKey') && aiOptions?.apiKey) {\n return aiOptions?.apiKey;\n }\n\n if (accessType.includes('registered_user') && res.locals.user) {\n return aiOptions?.apiKey ?? defaultApiKey;\n }\n\n // TODO: Implement premium user access\n if (accessType.includes('premium_user') && res.locals.user) {\n return aiOptions?.apiKey ?? defaultApiKey;\n }\n\n return undefined;\n};\n\nconst getModel = (\n provider: AIProvider,\n userApiKey: string,\n userModel?: Model,\n defaultModel?: Model\n): Model => {\n // Set default models based on provider\n let fallBackModel: Model = defaultModel ?? 'chatgpt-4o-latest';\n\n switch (provider) {\n case AIProvider.OPENAI:\n defaultModel = 'chatgpt-4o-latest';\n break;\n case AIProvider.ANTHROPIC:\n defaultModel = 'claude-3-haiku-20240307';\n break;\n case AIProvider.MISTRAL:\n defaultModel = 'mistral-large-latest';\n break;\n case AIProvider.DEEPSEEK:\n defaultModel = 'deepseek-coder';\n break;\n case AIProvider.GEMINI:\n defaultModel = 'gemini-1.5-pro';\n break;\n }\n\n // If the user use his own API, let him use the model he wants\n if (Boolean(userApiKey) && Boolean(userModel)) {\n return userModel!;\n }\n\n if (Boolean(userModel)) {\n throw new Error(\n 'The user should use his own API key to use a custom model'\n );\n }\n\n return fallBackModel;\n};\n\nexport type AIConfig = Omit<Parameters<typeof generateText>[0], 'prompt'>;\n\nconst DEFAULT_PROVIDER: AIProvider = AIProvider.OPENAI as AIProvider;\nconst DEFAULT_TEMPERATURE: number = 1; // ChatGPT 5 accept only temperature 1\n\nexport type AIConfigOptions = {\n userOptions?: AIOptions;\n defaultOptions?: AIOptions;\n accessType?: AccessType[];\n};\n\n/**\n * Get AI model configuration based on the selected provider and options\n * This function handles the configuration for different AI providers\n *\n * @param options Configuration options including provider, API keys, models and temperature\n * @returns Configured AI model ready to use with generateText\n */\nexport const getAIConfig = async (\n res: Response,\n options: AIConfigOptions\n): Promise<AIConfig> => {\n const {\n userOptions,\n defaultOptions,\n accessType = ['registered_user'],\n } = options;\n\n const aiOptions = {\n provider: DEFAULT_PROVIDER,\n temperature: DEFAULT_TEMPERATURE,\n ...defaultOptions,\n ...userOptions,\n } satisfies AIOptions;\n\n const apiKey = getAPIKey(res, accessType, aiOptions);\n\n // Check if API key is provided\n if (!apiKey) {\n throw new Error(`API key for ${aiOptions.provider} is missing`);\n }\n\n const selectedModel = getModel(\n aiOptions.provider,\n apiKey,\n aiOptions.model,\n defaultOptions?.model\n );\n\n const protectedOptions = {\n ...aiOptions,\n apiKey,\n model: selectedModel,\n } satisfies AIOptions;\n\n let languageModel: AIConfig['model'];\n\n switch (protectedOptions.provider) {\n case AIProvider.OPENAI: {\n languageModel = createOpenAI({\n apiKey,\n })(selectedModel);\n break;\n }\n\n case AIProvider.ANTHROPIC: {\n languageModel = createAnthropic({\n apiKey,\n })(selectedModel);\n break;\n }\n\n case AIProvider.MISTRAL: {\n languageModel = createMistral({\n apiKey,\n })(selectedModel);\n break;\n }\n\n case AIProvider.DEEPSEEK: {\n languageModel = createDeepSeek({\n apiKey,\n })(selectedModel);\n break;\n }\n\n case AIProvider.GEMINI: {\n languageModel = createGoogleGenerativeAI({\n apiKey,\n })(selectedModel);\n break;\n }\n\n default: {\n throw new Error(`Provider ${protectedOptions.provider} not supported`);\n }\n }\n\n return {\n model: languageModel,\n temperature: protectedOptions.temperature,\n };\n};\n"],"mappings":"AAAA,SAAoB,uBAAuB;AAC3C,SAAS,sBAAgC;AACzC,SAAS,gCAAwC;AACjD,SAAS,qBAA8B;AACvC,SAAS,oBAA4B;AAqC9B,IAAK,aAAL,kBAAKA,gBAAL;AACL,EAAAA,YAAA,YAAS;AACT,EAAAA,YAAA,eAAY;AACZ,EAAAA,YAAA,aAAU;AACV,EAAAA,YAAA,cAAW;AACX,EAAAA,YAAA,YAAS;AALC,SAAAA;AAAA,GAAA;AA4BZ,MAAM,YAAY,CAChB,KACA,YACA,cACG;AACH,QAAM,gBAAgB,QAAQ,IAAI;AAElC,MAAI,WAAW,SAAS,QAAQ,GAAG;AACjC,WAAO,WAAW,UAAU;AAAA,EAC9B;AAEA,MAAI,WAAW,SAAS,QAAQ,KAAK,WAAW,QAAQ;AACtD,WAAO,WAAW;AAAA,EACpB;AAEA,MAAI,WAAW,SAAS,iBAAiB,KAAK,IAAI,OAAO,MAAM;AAC7D,WAAO,WAAW,UAAU;AAAA,EAC9B;AAGA,MAAI,WAAW,SAAS,cAAc,KAAK,IAAI,OAAO,MAAM;AAC1D,WAAO,WAAW,UAAU;AAAA,EAC9B;AAEA,SAAO;AACT;AAEA,MAAM,WAAW,CACf,UACA,YACA,WACA,iBACU;AAEV,MAAI,gBAAuB,gBAAgB;AAE3C,UAAQ,UAAU;AAAA,IAChB,KAAK;AACH,qBAAe;AACf;AAAA,IACF,KAAK;AACH,qBAAe;AACf;AAAA,IACF,KAAK;AACH,qBAAe;AACf;AAAA,IACF,KAAK;AACH,qBAAe;AACf;AAAA,IACF,KAAK;AACH,qBAAe;AACf;AAAA,EACJ;AAGA,MAAI,QAAQ,UAAU,KAAK,QAAQ,SAAS,GAAG;AAC7C,WAAO;AAAA,EACT;AAEA,MAAI,QAAQ,SAAS,GAAG;AACtB,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAIA,MAAM,mBAA+B;AACrC,MAAM,sBAA8B;AAe7B,MAAM,cAAc,OACzB,KACA,YACsB;AACtB,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,aAAa,CAAC,iBAAiB;AAAA,EACjC,IAAI;AAEJ,QAAM,YAAY;AAAA,IAChB,UAAU;AAAA,IACV,aAAa;AAAA,IACb,GAAG;AAAA,IACH,GAAG;AAAA,EACL;AAEA,QAAM,SAAS,UAAU,KAAK,YAAY,SAAS;AAGnD,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,eAAe,UAAU,QAAQ,aAAa;AAAA,EAChE;AAEA,QAAM,gBAAgB;AAAA,IACpB,UAAU;AAAA,IACV;AAAA,IACA,UAAU;AAAA,IACV,gBAAgB;AAAA,EAClB;AAEA,QAAM,mBAAmB;AAAA,IACvB,GAAG;AAAA,IACH;AAAA,IACA,OAAO;AAAA,EACT;AAEA,MAAI;AAEJ,UAAQ,iBAAiB,UAAU;AAAA,IACjC,KAAK,uBAAmB;AACtB,sBAAgB,aAAa;AAAA,QAC3B;AAAA,MACF,CAAC,EAAE,aAAa;AAChB;AAAA,IACF;AAAA,IAEA,KAAK,6BAAsB;AACzB,sBAAgB,gBAAgB;AAAA,QAC9B;AAAA,MACF,CAAC,EAAE,aAAa;AAChB;AAAA,IACF;AAAA,IAEA,KAAK,yBAAoB;AACvB,sBAAgB,cAAc;AAAA,QAC5B;AAAA,MACF,CAAC,EAAE,aAAa;AAChB;AAAA,IACF;AAAA,IAEA,KAAK,2BAAqB;AACxB,sBAAgB,eAAe;AAAA,QAC7B;AAAA,MACF,CAAC,EAAE,aAAa;AAChB;AAAA,IACF;AAAA,IAEA,KAAK,uBAAmB;AACtB,sBAAgB,yBAAyB;AAAA,QACvC;AAAA,MACF,CAAC,EAAE,aAAa;AAChB;AAAA,IACF;AAAA,IAEA,SAAS;AACP,YAAM,IAAI,MAAM,YAAY,iBAAiB,QAAQ,gBAAgB;AAAA,IACvE;AAAA,EACF;AAEA,SAAO;AAAA,IACL,OAAO;AAAA,IACP,aAAa,iBAAiB;AAAA,EAChC;AACF;","names":["AIProvider"]}
@@ -26,6 +26,7 @@ const MAX_CHUNK_TOKENS = 800;
26
26
  const CHAR_BY_TOKEN = 4.15;
27
27
  const MAX_CHARS = MAX_CHUNK_TOKENS * CHAR_BY_TOKEN;
28
28
  const OVERLAP_CHARS = OVERLAP_TOKENS * CHAR_BY_TOKEN;
29
+ const skipDocEmbeddingsIndex = process.env.SKIP_DOC_EMBEDDINGS_INDEX === "true";
29
30
  const chunkText = (text) => {
30
31
  const chunks = [];
31
32
  let start = 0;
@@ -94,7 +95,7 @@ const indexMarkdownFiles = async () => {
94
95
  console.info(
95
96
  `File "${fileKey}" chunk count changed: ${previousChunkCount} -> ${currentChunkCount}. Regenerating embeddings.`
96
97
  );
97
- shouldRegenerateFileEmbeddings = true;
98
+ shouldRegenerateFileEmbeddings = !skipDocEmbeddingsIndex;
98
99
  }
99
100
  for await (const chunkIndex of Object.keys(fileChunks)) {
100
101
  const chunkNumber = Number(chunkIndex) + 1;
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../../src/utils/AI/askDocQuestion/askDocQuestion.ts"],"sourcesContent":["import { getBlogs, getDocs, getFrequentQuestions } from '@intlayer/docs';\nimport { streamText } from 'ai';\nimport dotenv from 'dotenv';\nimport { readFileSync, writeFileSync } from 'fs';\nimport { getMarkdownMetadata } from 'intlayer';\nimport { OpenAI } from 'openai';\nimport { dirname, join } from 'path';\nimport { fileURLToPath } from 'url';\nimport {\n AIConfig,\n AIOptions,\n AIProvider,\n ChatCompletionRequestMessage,\n} from '../aiSdk';\nimport embeddingsList from './embeddings.json' with { type: 'json' };\n\ntype VectorStoreEl = {\n fileKey: string;\n chunkNumber: number;\n content: string;\n embedding: number[];\n docUrl: string;\n docName: string;\n};\n\n/**\n * Simple in-memory vector store to hold document embeddings and their content.\n * Each entry contains:\n * - fileKey: A unique key identifying the file\n * - chunkNumber: The number of the chunk within the document\n * - content: The chunk content\n * - embedding: The numerical embedding vector for the chunk\n */\nconst vectorStore: VectorStoreEl[] = [];\n\n/*\n * Ask question AI configuration\n */\nconst MODEL: AIOptions['model'] = 'chatgpt-4o-latest'; // Model to use for chat completions\nconst MODEL_TEMPERATURE: AIOptions['temperature'] = 0.1; // Temperature to use for chat completions\nconst MAX_RELEVANT_CHUNKS_NB: number = 20; // Maximum number of relevant chunks to attach to chatGPT context\nconst MIN_RELEVANT_CHUNKS_SIMILARITY: number = 0.42; // Minimum similarity required for a chunk to be considered relevant\n\nexport const aiDefaultOptions: AIOptions = {\n provider: AIProvider.OPENAI,\n model: MODEL,\n temperature: MODEL_TEMPERATURE,\n};\n\n/*\n * Embedding model configuration\n */\nconst EMBEDDING_MODEL: OpenAI.EmbeddingModel = 'text-embedding-3-large'; // Model to use for embedding generation\nconst OVERLAP_TOKENS: number = 200; // Number of tokens to overlap between chunks\nconst MAX_CHUNK_TOKENS: number = 800; // Maximum number of tokens per chunk\nconst CHAR_BY_TOKEN: number = 4.15; // Approximate pessimistically the number of characters per token // Can use `tiktoken` or other tokenizers to calculate it more precisely\nconst MAX_CHARS: number = MAX_CHUNK_TOKENS * CHAR_BY_TOKEN;\nconst OVERLAP_CHARS: number = OVERLAP_TOKENS * CHAR_BY_TOKEN;\n\n/**\n * Splits a given text into chunks ensuring each chunk does not exceed MAX_CHARS.\n * @param text - The input text to split.\n * @returns - Array of text chunks.\n */\nconst chunkText = (text: string): string[] => {\n const chunks: string[] = [];\n let start = 0;\n\n while (start < text.length) {\n let end = Math.min(start + MAX_CHARS, text.length);\n\n // Ensure we don't cut words in the middle (find nearest space)\n if (end < text.length) {\n const lastSpace = text.lastIndexOf(' ', end);\n if (lastSpace > start) {\n end = lastSpace;\n }\n }\n\n chunks.push(text.substring(start, end));\n\n // Move start forward correctly\n const nextStart = end - OVERLAP_CHARS;\n if (nextStart <= start) {\n // Prevent infinite loop if overlap is too large\n start = end;\n } else {\n start = nextStart;\n }\n }\n\n return chunks;\n};\n\n/**\n * Generates an embedding for a given text using OpenAI's embedding API.\n * Trims the text if it exceeds the maximum allowed characters.\n *\n * @param text - The input text to generate an embedding for\n * @returns The embedding vector as a number array\n */\nconst generateEmbedding = async (text: string): Promise<number[]> => {\n try {\n const openaiClient = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n\n const response = await openaiClient.embeddings.create({\n model: EMBEDDING_MODEL,\n input: text,\n });\n\n return response.data[0].embedding;\n } catch (error) {\n console.error('Error generating embedding:', error);\n return [];\n }\n};\n\n/**\n * Calculates the cosine similarity between two vectors.\n * Cosine similarity measures the cosine of the angle between two vectors in an inner product space.\n * Used to determine the similarity between chunks of text.\n *\n * @param vecA - The first vector\n * @param vecB - The second vector\n * @returns The cosine similarity score\n */\nconst cosineSimilarity = (vecA: number[], vecB: number[]): number => {\n // Calculate the dot product of the two vectors\n const dotProduct = vecA.reduce((sum, a, idx) => sum + a * vecB[idx], 0);\n\n // Calculate the magnitude (Euclidean norm) of each vector\n const magnitudeA = Math.sqrt(vecA.reduce((sum, a) => sum + a * a, 0));\n const magnitudeB = Math.sqrt(vecB.reduce((sum, b) => sum + b * b, 0));\n\n // Compute and return the cosine similarity\n return dotProduct / (magnitudeA * magnitudeB);\n};\n\n/**\n * Indexes all Markdown documents by generating embeddings for each chunk and storing them in memory.\n * Also updates the embeddings.json file if new embeddings are generated.\n * Handles cases where files have been updated and chunk counts have changed.\n */\nexport const indexMarkdownFiles = async (): Promise<void> => {\n const env = process.env.NODE_ENV;\n dotenv.config({\n path: [`.env.${env}.local`, `.env.${env}`, '.env.local', '.env'],\n });\n\n // if (process.env.SKIP_DOC_EMBEDDINGS_INDEX === 'true') return;\n\n // Retrieve documentation and blog posts in English locale\n const frequentQuestions = await getFrequentQuestions();\n const docs = await getDocs();\n const blogs = await getBlogs();\n\n let result: Record<string, number[]> = {}; // Object to hold updated embeddings\n const currentChunkKeys = new Set<string>(); // Track which chunks should exist\n\n const files = { ...docs, ...blogs, ...frequentQuestions }; // Combine docs and blogs into a single object\n\n // Iterate over each file key (identifier) in the combined files\n for await (const fileKey of Object.keys(files)) {\n // Get the metadata of the file\n const fileMetadata = getMarkdownMetadata(\n files[fileKey as keyof typeof files] as string\n );\n\n // Split the document into chunks based on headings\n const fileChunks = chunkText(\n files[fileKey as keyof typeof files] as string\n );\n\n // Check if the number of chunks has changed for this file\n const existingChunksForFile = Object.keys(embeddingsList).filter((key) =>\n key.startsWith(`${fileKey}/chunk_`)\n );\n const currentChunkCount = fileChunks.length;\n const previousChunkCount = existingChunksForFile.length;\n\n let shouldRegenerateFileEmbeddings = false;\n\n // If chunk count differs, we need to regenerate embeddings for this file\n if (currentChunkCount !== previousChunkCount) {\n console.info(\n `File \"${fileKey}\" chunk count changed: ${previousChunkCount} -> ${currentChunkCount}. Regenerating embeddings.`\n );\n shouldRegenerateFileEmbeddings = true;\n }\n\n // Iterate over each chunk within the current file\n for await (const chunkIndex of Object.keys(fileChunks)) {\n const chunkNumber = Number(chunkIndex) + 1; // Chunk number starts at 1\n const chunksNumber = fileChunks.length;\n\n const fileChunk = fileChunks[\n chunkIndex as keyof typeof fileChunks\n ] as string;\n\n const embeddingKeyName = `${fileKey}/chunk_${chunkNumber}`; // Unique key for the chunk\n currentChunkKeys.add(embeddingKeyName); // Track this chunk as current\n\n // Retrieve precomputed embedding if available and file hasn't changed\n const docEmbedding = !shouldRegenerateFileEmbeddings\n ? (embeddingsList[embeddingKeyName as keyof typeof embeddingsList] as\n | number[]\n | undefined)\n : undefined;\n\n let embedding = docEmbedding; // Use existing embedding if available and valid\n\n if (!embedding) {\n embedding = await generateEmbedding(fileChunk); // Generate embedding if not present or file changed\n console.info(`- Generated new embedding: ${embeddingKeyName}`);\n }\n\n // Update the result object with the embedding\n result = { ...result, [embeddingKeyName]: embedding };\n\n // Store the embedding and content in the in-memory vector store\n vectorStore.push({\n fileKey,\n chunkNumber,\n embedding,\n content: fileChunk,\n docUrl: fileMetadata.url,\n docName: fileMetadata.title,\n });\n\n console.info(`- Indexed: ${embeddingKeyName}/${chunksNumber}`);\n }\n }\n\n // Remove outdated embeddings that no longer exist in current files\n const filteredEmbeddings: Record<string, number[]> = {};\n for (const [key, embedding] of Object.entries(embeddingsList)) {\n if (currentChunkKeys.has(key)) {\n // Only keep embeddings for chunks that still exist\n if (!result[key]) {\n filteredEmbeddings[key] = embedding as number[];\n }\n }\n }\n\n // Merge filtered existing embeddings with new ones\n result = { ...filteredEmbeddings, ...result };\n\n try {\n // Compare the newly generated embeddings with existing ones\n if (JSON.stringify(result) !== JSON.stringify(embeddingsList)) {\n // If there are new embeddings or changes, save them to embeddings.json\n writeFileSync(\n 'src/utils/AI/askDocQuestion/embeddings.json',\n JSON.stringify(result, null, 2)\n );\n }\n } catch (error) {\n console.error(error); // Log any errors during the file write process\n }\n};\n\n// Automatically index Markdown files\nindexMarkdownFiles();\n\n/**\n * Searches the indexed documents for the most relevant chunks based on a query.\n * Utilizes cosine similarity to find the closest matching embeddings.\n *\n * @param query - The search query provided by the user\n * @returns An array of the top matching document chunks' content\n */\nexport const searchChunkReference = async (\n query: string,\n maxResults: number = MAX_RELEVANT_CHUNKS_NB,\n minSimilarity: number = MIN_RELEVANT_CHUNKS_SIMILARITY\n): Promise<VectorStoreEl[]> => {\n // Generate an embedding for the user's query\n const queryEmbedding = await generateEmbedding(query);\n\n // Calculate similarity scores between the query embedding and each document's embedding\n const selection = vectorStore\n .map((chunk) => ({\n ...chunk,\n similarity: cosineSimilarity(queryEmbedding, chunk.embedding), // Add similarity score to each doc\n }))\n .filter((chunk) => chunk.similarity > minSimilarity) // Filter out documents with low similarity scores\n .sort((a, b) => b.similarity - a.similarity) // Sort documents by highest similarity first\n .slice(0, maxResults); // Select the top 6 most similar documents\n\n const orderedDocKeys = new Set(selection.map((chunk) => chunk.fileKey));\n\n const orderedVectorStore = vectorStore.sort((a, b) =>\n orderedDocKeys.has(a.fileKey) ? -1 : 1\n );\n\n const results = orderedVectorStore.filter((chunk) =>\n selection.some(\n (v) => v.fileKey === chunk.fileKey && v.chunkNumber === chunk.chunkNumber\n )\n );\n\n // Return the content of the top matching documents\n return results;\n};\n\n/**\n * Reads the content of a file synchronously.\n *\n * @function\n * @param relativeFilePath - The relative or absolute path to the target file.\n * @returns The entire contents of the specified file as a UTF-8 encoded string.\n */\nconst getFileContent = (relativeFilePath: string): string => {\n const __dirname = dirname(fileURLToPath(import.meta.url));\n const absolutePath = join(__dirname, relativeFilePath);\n const fileContent = readFileSync(absolutePath, 'utf-8');\n return fileContent;\n};\n\nconst CHAT_GPT_PROMPT = getFileContent('./PROMPT.md');\n\n// Initial prompt configuration for the chatbot\nexport const initPrompt: ChatCompletionRequestMessage = {\n role: 'system',\n content: CHAT_GPT_PROMPT,\n};\n\nexport type AskDocQuestionResult = {\n response: string;\n relatedFiles: string[];\n};\n\nexport type AskDocQuestionOptions = {\n onMessage?: (chunk: string) => void;\n};\n\n/**\n * Handles the \"Ask a question\" endpoint in an Express.js route.\n * Processes user messages, retrieves relevant documents, and interacts with AI models to generate responses.\n *\n * @param messages - An array of chat messages from the user and assistant\n * @returns The assistant's response as a string\n */\nexport const askDocQuestion = async (\n messages: ChatCompletionRequestMessage[],\n aiConfig: AIConfig,\n options?: AskDocQuestionOptions\n): Promise<AskDocQuestionResult> => {\n // Format the user's question to keep only the relevant keywords\n const query = messages\n .filter((message) => message.role === 'user')\n .map((message) => `- ${message.content}`)\n .join('\\n');\n\n // 1) Find relevant documents based on the user's question\n const relevantFilesReferences = await searchChunkReference(query);\n\n // 2) Integrate the relevant documents into the initial system prompt\n const systemPrompt = initPrompt.content.replace(\n '{{relevantFilesReferences}}',\n relevantFilesReferences.length === 0\n ? 'Not relevant file found related to the question.'\n : relevantFilesReferences\n .map((doc, idx) =>\n [\n '-----',\n '---',\n `chunkId: ${idx}`,\n `docChunk: \"${doc.chunkNumber}/${doc.fileKey.length}\"`,\n `docName: \"${doc.docName}\"`,\n `docUrl: \"${doc.docUrl}\"`,\n `---`,\n doc.content,\n `-----`,\n ].join('\\n')\n )\n .join('\\n\\n') // Insert relevant docs into the prompt\n );\n\n // Format messages for AI SDK\n const aiMessages = [\n {\n role: 'system' as const,\n content: systemPrompt,\n },\n ...messages.slice(-8),\n ];\n\n if (!aiConfig) {\n throw new Error('Failed to initialize AI configuration');\n }\n\n // 3) Use the AI SDK to stream the response\n let fullResponse = '';\n const stream = streamText({\n ...aiConfig,\n messages: aiMessages,\n });\n\n // Process the stream\n for await (const chunk of stream.textStream) {\n fullResponse += chunk;\n options?.onMessage?.(chunk);\n }\n\n // 4) Extract unique related files\n const relatedFiles = [\n ...new Set(relevantFilesReferences.map((doc) => doc.fileKey)),\n ];\n\n // 5) Return the assistant's response to the user\n return {\n response: fullResponse ?? 'Error: No result found',\n relatedFiles,\n };\n};\n"],"mappings":"AAAA,SAAS,UAAU,SAAS,4BAA4B;AACxD,SAAS,kBAAkB;AAC3B,OAAO,YAAY;AACnB,SAAS,cAAc,qBAAqB;AAC5C,SAAS,2BAA2B;AACpC,SAAS,cAAc;AACvB,SAAS,SAAS,YAAY;AAC9B,SAAS,qBAAqB;AAC9B;AAAA,EAGE;AAAA,OAEK;AACP,OAAO,oBAAoB,oBAAoB,KAAK,EAAE,MAAM,OAAO;AAmBnE,MAAM,cAA+B,CAAC;AAKtC,MAAM,QAA4B;AAClC,MAAM,oBAA8C;AACpD,MAAM,yBAAiC;AACvC,MAAM,iCAAyC;AAExC,MAAM,mBAA8B;AAAA,EACzC,UAAU,WAAW;AAAA,EACrB,OAAO;AAAA,EACP,aAAa;AACf;AAKA,MAAM,kBAAyC;AAC/C,MAAM,iBAAyB;AAC/B,MAAM,mBAA2B;AACjC,MAAM,gBAAwB;AAC9B,MAAM,YAAoB,mBAAmB;AAC7C,MAAM,gBAAwB,iBAAiB;AAO/C,MAAM,YAAY,CAAC,SAA2B;AAC5C,QAAM,SAAmB,CAAC;AAC1B,MAAI,QAAQ;AAEZ,SAAO,QAAQ,KAAK,QAAQ;AAC1B,QAAI,MAAM,KAAK,IAAI,QAAQ,WAAW,KAAK,MAAM;AAGjD,QAAI,MAAM,KAAK,QAAQ;AACrB,YAAM,YAAY,KAAK,YAAY,KAAK,GAAG;AAC3C,UAAI,YAAY,OAAO;AACrB,cAAM;AAAA,MACR;AAAA,IACF;AAEA,WAAO,KAAK,KAAK,UAAU,OAAO,GAAG,CAAC;AAGtC,UAAM,YAAY,MAAM;AACxB,QAAI,aAAa,OAAO;AAEtB,cAAQ;AAAA,IACV,OAAO;AACL,cAAQ;AAAA,IACV;AAAA,EACF;AAEA,SAAO;AACT;AASA,MAAM,oBAAoB,OAAO,SAAoC;AACnE,MAAI;AACF,UAAM,eAAe,IAAI,OAAO,EAAE,QAAQ,QAAQ,IAAI,eAAe,CAAC;AAEtE,UAAM,WAAW,MAAM,aAAa,WAAW,OAAO;AAAA,MACpD,OAAO;AAAA,MACP,OAAO;AAAA,IACT,CAAC;AAED,WAAO,SAAS,KAAK,CAAC,EAAE;AAAA,EAC1B,SAAS,OAAO;AACd,YAAQ,MAAM,+BAA+B,KAAK;AAClD,WAAO,CAAC;AAAA,EACV;AACF;AAWA,MAAM,mBAAmB,CAAC,MAAgB,SAA2B;AAEnE,QAAM,aAAa,KAAK,OAAO,CAAC,KAAK,GAAG,QAAQ,MAAM,IAAI,KAAK,GAAG,GAAG,CAAC;AAGtE,QAAM,aAAa,KAAK,KAAK,KAAK,OAAO,CAAC,KAAK,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC;AACpE,QAAM,aAAa,KAAK,KAAK,KAAK,OAAO,CAAC,KAAK,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC;AAGpE,SAAO,cAAc,aAAa;AACpC;AAOO,MAAM,qBAAqB,YAA2B;AAC3D,QAAM,MAAM,QAAQ,IAAI;AACxB,SAAO,OAAO;AAAA,IACZ,MAAM,CAAC,QAAQ,GAAG,UAAU,QAAQ,GAAG,IAAI,cAAc,MAAM;AAAA,EACjE,CAAC;AAKD,QAAM,oBAAoB,MAAM,qBAAqB;AACrD,QAAM,OAAO,MAAM,QAAQ;AAC3B,QAAM,QAAQ,MAAM,SAAS;AAE7B,MAAI,SAAmC,CAAC;AACxC,QAAM,mBAAmB,oBAAI,IAAY;AAEzC,QAAM,QAAQ,EAAE,GAAG,MAAM,GAAG,OAAO,GAAG,kBAAkB;AAGxD,mBAAiB,WAAW,OAAO,KAAK,KAAK,GAAG;AAE9C,UAAM,eAAe;AAAA,MACnB,MAAM,OAA6B;AAAA,IACrC;AAGA,UAAM,aAAa;AAAA,MACjB,MAAM,OAA6B;AAAA,IACrC;AAGA,UAAM,wBAAwB,OAAO,KAAK,cAAc,EAAE;AAAA,MAAO,CAAC,QAChE,IAAI,WAAW,GAAG,OAAO,SAAS;AAAA,IACpC;AACA,UAAM,oBAAoB,WAAW;AACrC,UAAM,qBAAqB,sBAAsB;AAEjD,QAAI,iCAAiC;AAGrC,QAAI,sBAAsB,oBAAoB;AAC5C,cAAQ;AAAA,QACN,SAAS,OAAO,0BAA0B,kBAAkB,OAAO,iBAAiB;AAAA,MACtF;AACA,uCAAiC;AAAA,IACnC;AAGA,qBAAiB,cAAc,OAAO,KAAK,UAAU,GAAG;AACtD,YAAM,cAAc,OAAO,UAAU,IAAI;AACzC,YAAM,eAAe,WAAW;AAEhC,YAAM,YAAY,WAChB,UACF;AAEA,YAAM,mBAAmB,GAAG,OAAO,UAAU,WAAW;AACxD,uBAAiB,IAAI,gBAAgB;AAGrC,YAAM,eAAe,CAAC,iCACjB,eAAe,gBAA+C,IAG/D;AAEJ,UAAI,YAAY;AAEhB,UAAI,CAAC,WAAW;AACd,oBAAY,MAAM,kBAAkB,SAAS;AAC7C,gBAAQ,KAAK,8BAA8B,gBAAgB,EAAE;AAAA,MAC/D;AAGA,eAAS,EAAE,GAAG,QAAQ,CAAC,gBAAgB,GAAG,UAAU;AAGpD,kBAAY,KAAK;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,QACA,SAAS;AAAA,QACT,QAAQ,aAAa;AAAA,QACrB,SAAS,aAAa;AAAA,MACxB,CAAC;AAED,cAAQ,KAAK,cAAc,gBAAgB,IAAI,YAAY,EAAE;AAAA,IAC/D;AAAA,EACF;AAGA,QAAM,qBAA+C,CAAC;AACtD,aAAW,CAAC,KAAK,SAAS,KAAK,OAAO,QAAQ,cAAc,GAAG;AAC7D,QAAI,iBAAiB,IAAI,GAAG,GAAG;AAE7B,UAAI,CAAC,OAAO,GAAG,GAAG;AAChB,2BAAmB,GAAG,IAAI;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AAGA,WAAS,EAAE,GAAG,oBAAoB,GAAG,OAAO;AAE5C,MAAI;AAEF,QAAI,KAAK,UAAU,MAAM,MAAM,KAAK,UAAU,cAAc,GAAG;AAE7D;AAAA,QACE;AAAA,QACA,KAAK,UAAU,QAAQ,MAAM,CAAC;AAAA,MAChC;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,YAAQ,MAAM,KAAK;AAAA,EACrB;AACF;AAGA,mBAAmB;AASZ,MAAM,uBAAuB,OAClC,OACA,aAAqB,wBACrB,gBAAwB,mCACK;AAE7B,QAAM,iBAAiB,MAAM,kBAAkB,KAAK;AAGpD,QAAM,YAAY,YACf,IAAI,CAAC,WAAW;AAAA,IACf,GAAG;AAAA,IACH,YAAY,iBAAiB,gBAAgB,MAAM,SAAS;AAAA;AAAA,EAC9D,EAAE,EACD,OAAO,CAAC,UAAU,MAAM,aAAa,aAAa,EAClD,KAAK,CAAC,GAAG,MAAM,EAAE,aAAa,EAAE,UAAU,EAC1C,MAAM,GAAG,UAAU;AAEtB,QAAM,iBAAiB,IAAI,IAAI,UAAU,IAAI,CAAC,UAAU,MAAM,OAAO,CAAC;AAEtE,QAAM,qBAAqB,YAAY;AAAA,IAAK,CAAC,GAAG,MAC9C,eAAe,IAAI,EAAE,OAAO,IAAI,KAAK;AAAA,EACvC;AAEA,QAAM,UAAU,mBAAmB;AAAA,IAAO,CAAC,UACzC,UAAU;AAAA,MACR,CAAC,MAAM,EAAE,YAAY,MAAM,WAAW,EAAE,gBAAgB,MAAM;AAAA,IAChE;AAAA,EACF;AAGA,SAAO;AACT;AASA,MAAM,iBAAiB,CAAC,qBAAqC;AAC3D,QAAM,YAAY,QAAQ,cAAc,YAAY,GAAG,CAAC;AACxD,QAAM,eAAe,KAAK,WAAW,gBAAgB;AACrD,QAAM,cAAc,aAAa,cAAc,OAAO;AACtD,SAAO;AACT;AAEA,MAAM,kBAAkB,eAAe,aAAa;AAG7C,MAAM,aAA2C;AAAA,EACtD,MAAM;AAAA,EACN,SAAS;AACX;AAkBO,MAAM,iBAAiB,OAC5B,UACA,UACA,YACkC;AAElC,QAAM,QAAQ,SACX,OAAO,CAAC,YAAY,QAAQ,SAAS,MAAM,EAC3C,IAAI,CAAC,YAAY,KAAK,QAAQ,OAAO,EAAE,EACvC,KAAK,IAAI;AAGZ,QAAM,0BAA0B,MAAM,qBAAqB,KAAK;AAGhE,QAAM,eAAe,WAAW,QAAQ;AAAA,IACtC;AAAA,IACA,wBAAwB,WAAW,IAC/B,qDACA,wBACG;AAAA,MAAI,CAAC,KAAK,QACT;AAAA,QACE;AAAA,QACA;AAAA,QACA,YAAY,GAAG;AAAA,QACf,cAAc,IAAI,WAAW,IAAI,IAAI,QAAQ,MAAM;AAAA,QACnD,aAAa,IAAI,OAAO;AAAA,QACxB,YAAY,IAAI,MAAM;AAAA,QACtB;AAAA,QACA,IAAI;AAAA,QACJ;AAAA,MACF,EAAE,KAAK,IAAI;AAAA,IACb,EACC,KAAK,MAAM;AAAA;AAAA,EACpB;AAGA,QAAM,aAAa;AAAA,IACjB;AAAA,MACE,MAAM;AAAA,MACN,SAAS;AAAA,IACX;AAAA,IACA,GAAG,SAAS,MAAM,EAAE;AAAA,EACtB;AAEA,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,uCAAuC;AAAA,EACzD;AAGA,MAAI,eAAe;AACnB,QAAM,SAAS,WAAW;AAAA,IACxB,GAAG;AAAA,IACH,UAAU;AAAA,EACZ,CAAC;AAGD,mBAAiB,SAAS,OAAO,YAAY;AAC3C,oBAAgB;AAChB,aAAS,YAAY,KAAK;AAAA,EAC5B;AAGA,QAAM,eAAe;AAAA,IACnB,GAAG,IAAI,IAAI,wBAAwB,IAAI,CAAC,QAAQ,IAAI,OAAO,CAAC;AAAA,EAC9D;AAGA,SAAO;AAAA,IACL,UAAU,gBAAgB;AAAA,IAC1B;AAAA,EACF;AACF;","names":[]}
1
+ {"version":3,"sources":["../../../../../src/utils/AI/askDocQuestion/askDocQuestion.ts"],"sourcesContent":["import { getBlogs, getDocs, getFrequentQuestions } from '@intlayer/docs';\nimport { streamText } from 'ai';\nimport dotenv from 'dotenv';\nimport { readFileSync, writeFileSync } from 'fs';\nimport { getMarkdownMetadata } from 'intlayer';\nimport { OpenAI } from 'openai';\nimport { dirname, join } from 'path';\nimport { fileURLToPath } from 'url';\nimport {\n AIConfig,\n AIOptions,\n AIProvider,\n ChatCompletionRequestMessage,\n} from '../aiSdk';\nimport embeddingsList from './embeddings.json' with { type: 'json' };\n\ntype VectorStoreEl = {\n fileKey: string;\n chunkNumber: number;\n content: string;\n embedding: number[];\n docUrl: string;\n docName: string;\n};\n\n/**\n * Simple in-memory vector store to hold document embeddings and their content.\n * Each entry contains:\n * - fileKey: A unique key identifying the file\n * - chunkNumber: The number of the chunk within the document\n * - content: The chunk content\n * - embedding: The numerical embedding vector for the chunk\n */\nconst vectorStore: VectorStoreEl[] = [];\n\n/*\n * Ask question AI configuration\n */\nconst MODEL: AIOptions['model'] = 'chatgpt-4o-latest'; // Model to use for chat completions\nconst MODEL_TEMPERATURE: AIOptions['temperature'] = 0.1; // Temperature to use for chat completions\nconst MAX_RELEVANT_CHUNKS_NB: number = 20; // Maximum number of relevant chunks to attach to chatGPT context\nconst MIN_RELEVANT_CHUNKS_SIMILARITY: number = 0.42; // Minimum similarity required for a chunk to be considered relevant\n\nexport const aiDefaultOptions: AIOptions = {\n provider: AIProvider.OPENAI,\n model: MODEL,\n temperature: MODEL_TEMPERATURE,\n};\n\n/*\n * Embedding model configuration\n */\nconst EMBEDDING_MODEL: OpenAI.EmbeddingModel = 'text-embedding-3-large'; // Model to use for embedding generation\nconst OVERLAP_TOKENS: number = 200; // Number of tokens to overlap between chunks\nconst MAX_CHUNK_TOKENS: number = 800; // Maximum number of tokens per chunk\nconst CHAR_BY_TOKEN: number = 4.15; // Approximate pessimistically the number of characters per token // Can use `tiktoken` or other tokenizers to calculate it more precisely\nconst MAX_CHARS: number = MAX_CHUNK_TOKENS * CHAR_BY_TOKEN;\nconst OVERLAP_CHARS: number = OVERLAP_TOKENS * CHAR_BY_TOKEN;\n\nconst skipDocEmbeddingsIndex = process.env.SKIP_DOC_EMBEDDINGS_INDEX === 'true';\n\n/**\n * Splits a given text into chunks ensuring each chunk does not exceed MAX_CHARS.\n * @param text - The input text to split.\n * @returns - Array of text chunks.\n */\nconst chunkText = (text: string): string[] => {\n const chunks: string[] = [];\n let start = 0;\n\n while (start < text.length) {\n let end = Math.min(start + MAX_CHARS, text.length);\n\n // Ensure we don't cut words in the middle (find nearest space)\n if (end < text.length) {\n const lastSpace = text.lastIndexOf(' ', end);\n if (lastSpace > start) {\n end = lastSpace;\n }\n }\n\n chunks.push(text.substring(start, end));\n\n // Move start forward correctly\n const nextStart = end - OVERLAP_CHARS;\n if (nextStart <= start) {\n // Prevent infinite loop if overlap is too large\n start = end;\n } else {\n start = nextStart;\n }\n }\n\n return chunks;\n};\n\n/**\n * Generates an embedding for a given text using OpenAI's embedding API.\n * Trims the text if it exceeds the maximum allowed characters.\n *\n * @param text - The input text to generate an embedding for\n * @returns The embedding vector as a number array\n */\nconst generateEmbedding = async (text: string): Promise<number[]> => {\n try {\n const openaiClient = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n\n const response = await openaiClient.embeddings.create({\n model: EMBEDDING_MODEL,\n input: text,\n });\n\n return response.data[0].embedding;\n } catch (error) {\n console.error('Error generating embedding:', error);\n return [];\n }\n};\n\n/**\n * Calculates the cosine similarity between two vectors.\n * Cosine similarity measures the cosine of the angle between two vectors in an inner product space.\n * Used to determine the similarity between chunks of text.\n *\n * @param vecA - The first vector\n * @param vecB - The second vector\n * @returns The cosine similarity score\n */\nconst cosineSimilarity = (vecA: number[], vecB: number[]): number => {\n // Calculate the dot product of the two vectors\n const dotProduct = vecA.reduce((sum, a, idx) => sum + a * vecB[idx], 0);\n\n // Calculate the magnitude (Euclidean norm) of each vector\n const magnitudeA = Math.sqrt(vecA.reduce((sum, a) => sum + a * a, 0));\n const magnitudeB = Math.sqrt(vecB.reduce((sum, b) => sum + b * b, 0));\n\n // Compute and return the cosine similarity\n return dotProduct / (magnitudeA * magnitudeB);\n};\n\n/**\n * Indexes all Markdown documents by generating embeddings for each chunk and storing them in memory.\n * Also updates the embeddings.json file if new embeddings are generated.\n * Handles cases where files have been updated and chunk counts have changed.\n */\nexport const indexMarkdownFiles = async (): Promise<void> => {\n const env = process.env.NODE_ENV;\n dotenv.config({\n path: [`.env.${env}.local`, `.env.${env}`, '.env.local', '.env'],\n });\n\n // Retrieve documentation and blog posts in English locale\n const frequentQuestions = await getFrequentQuestions();\n const docs = await getDocs();\n const blogs = await getBlogs();\n\n let result: Record<string, number[]> = {}; // Object to hold updated embeddings\n const currentChunkKeys = new Set<string>(); // Track which chunks should exist\n\n const files = { ...docs, ...blogs, ...frequentQuestions }; // Combine docs and blogs into a single object\n\n // Iterate over each file key (identifier) in the combined files\n for await (const fileKey of Object.keys(files)) {\n // Get the metadata of the file\n const fileMetadata = getMarkdownMetadata(\n files[fileKey as keyof typeof files] as string\n );\n\n // Split the document into chunks based on headings\n const fileChunks = chunkText(\n files[fileKey as keyof typeof files] as string\n );\n\n // Check if the number of chunks has changed for this file\n const existingChunksForFile = Object.keys(embeddingsList).filter((key) =>\n key.startsWith(`${fileKey}/chunk_`)\n );\n const currentChunkCount = fileChunks.length;\n const previousChunkCount = existingChunksForFile.length;\n\n let shouldRegenerateFileEmbeddings = false;\n\n // If chunk count differs, we need to regenerate embeddings for this file\n if (currentChunkCount !== previousChunkCount) {\n console.info(\n `File \"${fileKey}\" chunk count changed: ${previousChunkCount} -> ${currentChunkCount}. Regenerating embeddings.`\n );\n\n shouldRegenerateFileEmbeddings = !skipDocEmbeddingsIndex;\n }\n\n // Iterate over each chunk within the current file\n for await (const chunkIndex of Object.keys(fileChunks)) {\n const chunkNumber = Number(chunkIndex) + 1; // Chunk number starts at 1\n const chunksNumber = fileChunks.length;\n\n const fileChunk = fileChunks[\n chunkIndex as keyof typeof fileChunks\n ] as string;\n\n const embeddingKeyName = `${fileKey}/chunk_${chunkNumber}`; // Unique key for the chunk\n currentChunkKeys.add(embeddingKeyName); // Track this chunk as current\n\n // Retrieve precomputed embedding if available and file hasn't changed\n const docEmbedding = !shouldRegenerateFileEmbeddings\n ? (embeddingsList[embeddingKeyName as keyof typeof embeddingsList] as\n | number[]\n | undefined)\n : undefined;\n\n let embedding = docEmbedding; // Use existing embedding if available and valid\n\n if (!embedding) {\n embedding = await generateEmbedding(fileChunk); // Generate embedding if not present or file changed\n console.info(`- Generated new embedding: ${embeddingKeyName}`);\n }\n\n // Update the result object with the embedding\n result = { ...result, [embeddingKeyName]: embedding };\n\n // Store the embedding and content in the in-memory vector store\n vectorStore.push({\n fileKey,\n chunkNumber,\n embedding,\n content: fileChunk,\n docUrl: fileMetadata.url,\n docName: fileMetadata.title,\n });\n\n console.info(`- Indexed: ${embeddingKeyName}/${chunksNumber}`);\n }\n }\n\n // Remove outdated embeddings that no longer exist in current files\n const filteredEmbeddings: Record<string, number[]> = {};\n for (const [key, embedding] of Object.entries(embeddingsList)) {\n if (currentChunkKeys.has(key)) {\n // Only keep embeddings for chunks that still exist\n if (!result[key]) {\n filteredEmbeddings[key] = embedding as number[];\n }\n }\n }\n\n // Merge filtered existing embeddings with new ones\n result = { ...filteredEmbeddings, ...result };\n\n try {\n // Compare the newly generated embeddings with existing ones\n if (JSON.stringify(result) !== JSON.stringify(embeddingsList)) {\n // If there are new embeddings or changes, save them to embeddings.json\n writeFileSync(\n 'src/utils/AI/askDocQuestion/embeddings.json',\n JSON.stringify(result, null, 2)\n );\n }\n } catch (error) {\n console.error(error); // Log any errors during the file write process\n }\n};\n\n// Automatically index Markdown files\nindexMarkdownFiles();\n\n/**\n * Searches the indexed documents for the most relevant chunks based on a query.\n * Utilizes cosine similarity to find the closest matching embeddings.\n *\n * @param query - The search query provided by the user\n * @returns An array of the top matching document chunks' content\n */\nexport const searchChunkReference = async (\n query: string,\n maxResults: number = MAX_RELEVANT_CHUNKS_NB,\n minSimilarity: number = MIN_RELEVANT_CHUNKS_SIMILARITY\n): Promise<VectorStoreEl[]> => {\n // Generate an embedding for the user's query\n const queryEmbedding = await generateEmbedding(query);\n\n // Calculate similarity scores between the query embedding and each document's embedding\n const selection = vectorStore\n .map((chunk) => ({\n ...chunk,\n similarity: cosineSimilarity(queryEmbedding, chunk.embedding), // Add similarity score to each doc\n }))\n .filter((chunk) => chunk.similarity > minSimilarity) // Filter out documents with low similarity scores\n .sort((a, b) => b.similarity - a.similarity) // Sort documents by highest similarity first\n .slice(0, maxResults); // Select the top 6 most similar documents\n\n const orderedDocKeys = new Set(selection.map((chunk) => chunk.fileKey));\n\n const orderedVectorStore = vectorStore.sort((a, b) =>\n orderedDocKeys.has(a.fileKey) ? -1 : 1\n );\n\n const results = orderedVectorStore.filter((chunk) =>\n selection.some(\n (v) => v.fileKey === chunk.fileKey && v.chunkNumber === chunk.chunkNumber\n )\n );\n\n // Return the content of the top matching documents\n return results;\n};\n\n/**\n * Reads the content of a file synchronously.\n *\n * @function\n * @param relativeFilePath - The relative or absolute path to the target file.\n * @returns The entire contents of the specified file as a UTF-8 encoded string.\n */\nconst getFileContent = (relativeFilePath: string): string => {\n const __dirname = dirname(fileURLToPath(import.meta.url));\n const absolutePath = join(__dirname, relativeFilePath);\n const fileContent = readFileSync(absolutePath, 'utf-8');\n return fileContent;\n};\n\nconst CHAT_GPT_PROMPT = getFileContent('./PROMPT.md');\n\n// Initial prompt configuration for the chatbot\nexport const initPrompt: ChatCompletionRequestMessage = {\n role: 'system',\n content: CHAT_GPT_PROMPT,\n};\n\nexport type AskDocQuestionResult = {\n response: string;\n relatedFiles: string[];\n};\n\nexport type AskDocQuestionOptions = {\n onMessage?: (chunk: string) => void;\n};\n\n/**\n * Handles the \"Ask a question\" endpoint in an Express.js route.\n * Processes user messages, retrieves relevant documents, and interacts with AI models to generate responses.\n *\n * @param messages - An array of chat messages from the user and assistant\n * @returns The assistant's response as a string\n */\nexport const askDocQuestion = async (\n messages: ChatCompletionRequestMessage[],\n aiConfig: AIConfig,\n options?: AskDocQuestionOptions\n): Promise<AskDocQuestionResult> => {\n // Format the user's question to keep only the relevant keywords\n const query = messages\n .filter((message) => message.role === 'user')\n .map((message) => `- ${message.content}`)\n .join('\\n');\n\n // 1) Find relevant documents based on the user's question\n const relevantFilesReferences = await searchChunkReference(query);\n\n // 2) Integrate the relevant documents into the initial system prompt\n const systemPrompt = initPrompt.content.replace(\n '{{relevantFilesReferences}}',\n relevantFilesReferences.length === 0\n ? 'Not relevant file found related to the question.'\n : relevantFilesReferences\n .map((doc, idx) =>\n [\n '-----',\n '---',\n `chunkId: ${idx}`,\n `docChunk: \"${doc.chunkNumber}/${doc.fileKey.length}\"`,\n `docName: \"${doc.docName}\"`,\n `docUrl: \"${doc.docUrl}\"`,\n `---`,\n doc.content,\n `-----`,\n ].join('\\n')\n )\n .join('\\n\\n') // Insert relevant docs into the prompt\n );\n\n // Format messages for AI SDK\n const aiMessages = [\n {\n role: 'system' as const,\n content: systemPrompt,\n },\n ...messages.slice(-8),\n ];\n\n if (!aiConfig) {\n throw new Error('Failed to initialize AI configuration');\n }\n\n // 3) Use the AI SDK to stream the response\n let fullResponse = '';\n const stream = streamText({\n ...aiConfig,\n messages: aiMessages,\n });\n\n // Process the stream\n for await (const chunk of stream.textStream) {\n fullResponse += chunk;\n options?.onMessage?.(chunk);\n }\n\n // 4) Extract unique related files\n const relatedFiles = [\n ...new Set(relevantFilesReferences.map((doc) => doc.fileKey)),\n ];\n\n // 5) Return the assistant's response to the user\n return {\n response: fullResponse ?? 'Error: No result found',\n relatedFiles,\n };\n};\n"],"mappings":"AAAA,SAAS,UAAU,SAAS,4BAA4B;AACxD,SAAS,kBAAkB;AAC3B,OAAO,YAAY;AACnB,SAAS,cAAc,qBAAqB;AAC5C,SAAS,2BAA2B;AACpC,SAAS,cAAc;AACvB,SAAS,SAAS,YAAY;AAC9B,SAAS,qBAAqB;AAC9B;AAAA,EAGE;AAAA,OAEK;AACP,OAAO,oBAAoB,oBAAoB,KAAK,EAAE,MAAM,OAAO;AAmBnE,MAAM,cAA+B,CAAC;AAKtC,MAAM,QAA4B;AAClC,MAAM,oBAA8C;AACpD,MAAM,yBAAiC;AACvC,MAAM,iCAAyC;AAExC,MAAM,mBAA8B;AAAA,EACzC,UAAU,WAAW;AAAA,EACrB,OAAO;AAAA,EACP,aAAa;AACf;AAKA,MAAM,kBAAyC;AAC/C,MAAM,iBAAyB;AAC/B,MAAM,mBAA2B;AACjC,MAAM,gBAAwB;AAC9B,MAAM,YAAoB,mBAAmB;AAC7C,MAAM,gBAAwB,iBAAiB;AAE/C,MAAM,yBAAyB,QAAQ,IAAI,8BAA8B;AAOzE,MAAM,YAAY,CAAC,SAA2B;AAC5C,QAAM,SAAmB,CAAC;AAC1B,MAAI,QAAQ;AAEZ,SAAO,QAAQ,KAAK,QAAQ;AAC1B,QAAI,MAAM,KAAK,IAAI,QAAQ,WAAW,KAAK,MAAM;AAGjD,QAAI,MAAM,KAAK,QAAQ;AACrB,YAAM,YAAY,KAAK,YAAY,KAAK,GAAG;AAC3C,UAAI,YAAY,OAAO;AACrB,cAAM;AAAA,MACR;AAAA,IACF;AAEA,WAAO,KAAK,KAAK,UAAU,OAAO,GAAG,CAAC;AAGtC,UAAM,YAAY,MAAM;AACxB,QAAI,aAAa,OAAO;AAEtB,cAAQ;AAAA,IACV,OAAO;AACL,cAAQ;AAAA,IACV;AAAA,EACF;AAEA,SAAO;AACT;AASA,MAAM,oBAAoB,OAAO,SAAoC;AACnE,MAAI;AACF,UAAM,eAAe,IAAI,OAAO,EAAE,QAAQ,QAAQ,IAAI,eAAe,CAAC;AAEtE,UAAM,WAAW,MAAM,aAAa,WAAW,OAAO;AAAA,MACpD,OAAO;AAAA,MACP,OAAO;AAAA,IACT,CAAC;AAED,WAAO,SAAS,KAAK,CAAC,EAAE;AAAA,EAC1B,SAAS,OAAO;AACd,YAAQ,MAAM,+BAA+B,KAAK;AAClD,WAAO,CAAC;AAAA,EACV;AACF;AAWA,MAAM,mBAAmB,CAAC,MAAgB,SAA2B;AAEnE,QAAM,aAAa,KAAK,OAAO,CAAC,KAAK,GAAG,QAAQ,MAAM,IAAI,KAAK,GAAG,GAAG,CAAC;AAGtE,QAAM,aAAa,KAAK,KAAK,KAAK,OAAO,CAAC,KAAK,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC;AACpE,QAAM,aAAa,KAAK,KAAK,KAAK,OAAO,CAAC,KAAK,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC;AAGpE,SAAO,cAAc,aAAa;AACpC;AAOO,MAAM,qBAAqB,YAA2B;AAC3D,QAAM,MAAM,QAAQ,IAAI;AACxB,SAAO,OAAO;AAAA,IACZ,MAAM,CAAC,QAAQ,GAAG,UAAU,QAAQ,GAAG,IAAI,cAAc,MAAM;AAAA,EACjE,CAAC;AAGD,QAAM,oBAAoB,MAAM,qBAAqB;AACrD,QAAM,OAAO,MAAM,QAAQ;AAC3B,QAAM,QAAQ,MAAM,SAAS;AAE7B,MAAI,SAAmC,CAAC;AACxC,QAAM,mBAAmB,oBAAI,IAAY;AAEzC,QAAM,QAAQ,EAAE,GAAG,MAAM,GAAG,OAAO,GAAG,kBAAkB;AAGxD,mBAAiB,WAAW,OAAO,KAAK,KAAK,GAAG;AAE9C,UAAM,eAAe;AAAA,MACnB,MAAM,OAA6B;AAAA,IACrC;AAGA,UAAM,aAAa;AAAA,MACjB,MAAM,OAA6B;AAAA,IACrC;AAGA,UAAM,wBAAwB,OAAO,KAAK,cAAc,EAAE;AAAA,MAAO,CAAC,QAChE,IAAI,WAAW,GAAG,OAAO,SAAS;AAAA,IACpC;AACA,UAAM,oBAAoB,WAAW;AACrC,UAAM,qBAAqB,sBAAsB;AAEjD,QAAI,iCAAiC;AAGrC,QAAI,sBAAsB,oBAAoB;AAC5C,cAAQ;AAAA,QACN,SAAS,OAAO,0BAA0B,kBAAkB,OAAO,iBAAiB;AAAA,MACtF;AAEA,uCAAiC,CAAC;AAAA,IACpC;AAGA,qBAAiB,cAAc,OAAO,KAAK,UAAU,GAAG;AACtD,YAAM,cAAc,OAAO,UAAU,IAAI;AACzC,YAAM,eAAe,WAAW;AAEhC,YAAM,YAAY,WAChB,UACF;AAEA,YAAM,mBAAmB,GAAG,OAAO,UAAU,WAAW;AACxD,uBAAiB,IAAI,gBAAgB;AAGrC,YAAM,eAAe,CAAC,iCACjB,eAAe,gBAA+C,IAG/D;AAEJ,UAAI,YAAY;AAEhB,UAAI,CAAC,WAAW;AACd,oBAAY,MAAM,kBAAkB,SAAS;AAC7C,gBAAQ,KAAK,8BAA8B,gBAAgB,EAAE;AAAA,MAC/D;AAGA,eAAS,EAAE,GAAG,QAAQ,CAAC,gBAAgB,GAAG,UAAU;AAGpD,kBAAY,KAAK;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,QACA,SAAS;AAAA,QACT,QAAQ,aAAa;AAAA,QACrB,SAAS,aAAa;AAAA,MACxB,CAAC;AAED,cAAQ,KAAK,cAAc,gBAAgB,IAAI,YAAY,EAAE;AAAA,IAC/D;AAAA,EACF;AAGA,QAAM,qBAA+C,CAAC;AACtD,aAAW,CAAC,KAAK,SAAS,KAAK,OAAO,QAAQ,cAAc,GAAG;AAC7D,QAAI,iBAAiB,IAAI,GAAG,GAAG;AAE7B,UAAI,CAAC,OAAO,GAAG,GAAG;AAChB,2BAAmB,GAAG,IAAI;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AAGA,WAAS,EAAE,GAAG,oBAAoB,GAAG,OAAO;AAE5C,MAAI;AAEF,QAAI,KAAK,UAAU,MAAM,MAAM,KAAK,UAAU,cAAc,GAAG;AAE7D;AAAA,QACE;AAAA,QACA,KAAK,UAAU,QAAQ,MAAM,CAAC;AAAA,MAChC;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,YAAQ,MAAM,KAAK;AAAA,EACrB;AACF;AAGA,mBAAmB;AASZ,MAAM,uBAAuB,OAClC,OACA,aAAqB,wBACrB,gBAAwB,mCACK;AAE7B,QAAM,iBAAiB,MAAM,kBAAkB,KAAK;AAGpD,QAAM,YAAY,YACf,IAAI,CAAC,WAAW;AAAA,IACf,GAAG;AAAA,IACH,YAAY,iBAAiB,gBAAgB,MAAM,SAAS;AAAA;AAAA,EAC9D,EAAE,EACD,OAAO,CAAC,UAAU,MAAM,aAAa,aAAa,EAClD,KAAK,CAAC,GAAG,MAAM,EAAE,aAAa,EAAE,UAAU,EAC1C,MAAM,GAAG,UAAU;AAEtB,QAAM,iBAAiB,IAAI,IAAI,UAAU,IAAI,CAAC,UAAU,MAAM,OAAO,CAAC;AAEtE,QAAM,qBAAqB,YAAY;AAAA,IAAK,CAAC,GAAG,MAC9C,eAAe,IAAI,EAAE,OAAO,IAAI,KAAK;AAAA,EACvC;AAEA,QAAM,UAAU,mBAAmB;AAAA,IAAO,CAAC,UACzC,UAAU;AAAA,MACR,CAAC,MAAM,EAAE,YAAY,MAAM,WAAW,EAAE,gBAAgB,MAAM;AAAA,IAChE;AAAA,EACF;AAGA,SAAO;AACT;AASA,MAAM,iBAAiB,CAAC,qBAAqC;AAC3D,QAAM,YAAY,QAAQ,cAAc,YAAY,GAAG,CAAC;AACxD,QAAM,eAAe,KAAK,WAAW,gBAAgB;AACrD,QAAM,cAAc,aAAa,cAAc,OAAO;AACtD,SAAO;AACT;AAEA,MAAM,kBAAkB,eAAe,aAAa;AAG7C,MAAM,aAA2C;AAAA,EACtD,MAAM;AAAA,EACN,SAAS;AACX;AAkBO,MAAM,iBAAiB,OAC5B,UACA,UACA,YACkC;AAElC,QAAM,QAAQ,SACX,OAAO,CAAC,YAAY,QAAQ,SAAS,MAAM,EAC3C,IAAI,CAAC,YAAY,KAAK,QAAQ,OAAO,EAAE,EACvC,KAAK,IAAI;AAGZ,QAAM,0BAA0B,MAAM,qBAAqB,KAAK;AAGhE,QAAM,eAAe,WAAW,QAAQ;AAAA,IACtC;AAAA,IACA,wBAAwB,WAAW,IAC/B,qDACA,wBACG;AAAA,MAAI,CAAC,KAAK,QACT;AAAA,QACE;AAAA,QACA;AAAA,QACA,YAAY,GAAG;AAAA,QACf,cAAc,IAAI,WAAW,IAAI,IAAI,QAAQ,MAAM;AAAA,QACnD,aAAa,IAAI,OAAO;AAAA,QACxB,YAAY,IAAI,MAAM;AAAA,QACtB;AAAA,QACA,IAAI;AAAA,QACJ;AAAA,MACF,EAAE,KAAK,IAAI;AAAA,IACb,EACC,KAAK,MAAM;AAAA;AAAA,EACpB;AAGA,QAAM,aAAa;AAAA,IACjB;AAAA,MACE,MAAM;AAAA,MACN,SAAS;AAAA,IACX;AAAA,IACA,GAAG,SAAS,MAAM,EAAE;AAAA,EACtB;AAEA,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,uCAAuC;AAAA,EACzD;AAGA,MAAI,eAAe;AACnB,QAAM,SAAS,WAAW;AAAA,IACxB,GAAG;AAAA,IACH,UAAU;AAAA,EACZ,CAAC;AAGD,mBAAiB,SAAS,OAAO,YAAY;AAC3C,oBAAgB;AAChB,aAAS,YAAY,KAAK;AAAA,EAC5B;AAGA,QAAM,eAAe;AAAA,IACnB,GAAG,IAAI,IAAI,wBAAwB,IAAI,CAAC,QAAQ,IAAI,OAAO,CAAC;AAAA,EAC9D;AAGA,SAAO;AAAA,IACL,UAAU,gBAAgB;AAAA,IAC1B;AAAA,EACF;AACF;","names":[]}