@intlayer/backend 5.4.1 → 5.5.0-canary.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/controllers/ai.controller.cjs +60 -52
- package/dist/cjs/controllers/ai.controller.cjs.map +1 -1
- package/dist/cjs/controllers/dictionary.controller.cjs +5 -0
- package/dist/cjs/controllers/dictionary.controller.cjs.map +1 -1
- package/dist/cjs/export.cjs +4 -2
- package/dist/cjs/export.cjs.map +1 -1
- package/dist/cjs/routes/ai.routes.cjs +6 -0
- package/dist/cjs/routes/ai.routes.cjs.map +1 -1
- package/dist/cjs/services/dictionary.service.cjs +6 -1
- package/dist/cjs/services/dictionary.service.cjs.map +1 -1
- package/dist/cjs/services/sessionAuth.service.cjs +7 -7
- package/dist/cjs/services/sessionAuth.service.cjs.map +1 -1
- package/dist/cjs/utils/AI/aiSdk.cjs +140 -0
- package/dist/cjs/utils/AI/aiSdk.cjs.map +1 -0
- package/dist/cjs/utils/AI/askDocQuestion/PROMPT.md +2 -1
- package/dist/cjs/utils/AI/askDocQuestion/askDocQuestion.cjs +32 -27
- package/dist/cjs/utils/AI/askDocQuestion/askDocQuestion.cjs.map +1 -1
- package/dist/cjs/utils/AI/askDocQuestion/embeddings.json +7374 -0
- package/dist/cjs/utils/AI/auditDictionary/PROMPT.md +4 -0
- package/dist/cjs/utils/AI/auditDictionary/index.cjs +36 -43
- package/dist/cjs/utils/AI/auditDictionary/index.cjs.map +1 -1
- package/dist/cjs/utils/AI/auditDictionaryField/PROMPT.md +4 -0
- package/dist/cjs/utils/AI/auditDictionaryField/index.cjs +34 -28
- package/dist/cjs/utils/AI/auditDictionaryField/index.cjs.map +1 -1
- package/dist/cjs/utils/AI/auditDictionaryMetadata/PROMPT.md +4 -0
- package/dist/cjs/utils/AI/auditDictionaryMetadata/index.cjs +23 -23
- package/dist/cjs/utils/AI/auditDictionaryMetadata/index.cjs.map +1 -1
- package/dist/cjs/utils/{auditTag → AI/auditTag}/PROMPT.md +4 -0
- package/dist/cjs/utils/{auditTag → AI/auditTag}/index.cjs +27 -27
- package/dist/cjs/utils/AI/auditTag/index.cjs.map +1 -0
- package/dist/cjs/utils/AI/autocomplete/PROMPT.md +4 -0
- package/dist/cjs/utils/AI/autocomplete/index.cjs +25 -22
- package/dist/cjs/utils/AI/autocomplete/index.cjs.map +1 -1
- package/dist/cjs/utils/AI/translateJSON/PROMPT.md +53 -0
- package/dist/cjs/utils/AI/translateJSON/index.cjs +106 -0
- package/dist/cjs/utils/AI/translateJSON/index.cjs.map +1 -0
- package/dist/cjs/utils/extractJSON.cjs +52 -0
- package/dist/cjs/utils/extractJSON.cjs.map +1 -0
- package/dist/esm/controllers/ai.controller.mjs +58 -51
- package/dist/esm/controllers/ai.controller.mjs.map +1 -1
- package/dist/esm/controllers/dictionary.controller.mjs +5 -0
- package/dist/esm/controllers/dictionary.controller.mjs.map +1 -1
- package/dist/esm/export.mjs +3 -2
- package/dist/esm/export.mjs.map +1 -1
- package/dist/esm/routes/ai.routes.mjs +8 -1
- package/dist/esm/routes/ai.routes.mjs.map +1 -1
- package/dist/esm/services/dictionary.service.mjs +6 -1
- package/dist/esm/services/dictionary.service.mjs.map +1 -1
- package/dist/esm/services/sessionAuth.service.mjs +2 -2
- package/dist/esm/services/sessionAuth.service.mjs.map +1 -1
- package/dist/esm/utils/AI/aiSdk.mjs +115 -0
- package/dist/esm/utils/AI/aiSdk.mjs.map +1 -0
- package/dist/esm/utils/AI/askDocQuestion/PROMPT.md +2 -1
- package/dist/esm/utils/AI/askDocQuestion/askDocQuestion.mjs +32 -27
- package/dist/esm/utils/AI/askDocQuestion/askDocQuestion.mjs.map +1 -1
- package/dist/esm/utils/AI/askDocQuestion/embeddings.json +7374 -0
- package/dist/esm/utils/AI/auditDictionary/PROMPT.md +4 -0
- package/dist/esm/utils/AI/auditDictionary/index.mjs +36 -43
- package/dist/esm/utils/AI/auditDictionary/index.mjs.map +1 -1
- package/dist/esm/utils/AI/auditDictionaryField/PROMPT.md +4 -0
- package/dist/esm/utils/AI/auditDictionaryField/index.mjs +34 -28
- package/dist/esm/utils/AI/auditDictionaryField/index.mjs.map +1 -1
- package/dist/esm/utils/AI/auditDictionaryMetadata/PROMPT.md +4 -0
- package/dist/esm/utils/AI/auditDictionaryMetadata/index.mjs +23 -23
- package/dist/esm/utils/AI/auditDictionaryMetadata/index.mjs.map +1 -1
- package/dist/esm/utils/{auditTag → AI/auditTag}/PROMPT.md +4 -0
- package/dist/esm/utils/AI/auditTag/index.mjs +49 -0
- package/dist/esm/utils/AI/auditTag/index.mjs.map +1 -0
- package/dist/esm/utils/AI/autocomplete/PROMPT.md +4 -0
- package/dist/esm/utils/AI/autocomplete/index.mjs +25 -22
- package/dist/esm/utils/AI/autocomplete/index.mjs.map +1 -1
- package/dist/esm/utils/AI/translateJSON/PROMPT.md +53 -0
- package/dist/esm/utils/AI/translateJSON/index.mjs +81 -0
- package/dist/esm/utils/AI/translateJSON/index.mjs.map +1 -0
- package/dist/esm/utils/extractJSON.mjs +28 -0
- package/dist/esm/utils/extractJSON.mjs.map +1 -0
- package/dist/types/controllers/ai.controller.d.ts +12 -21
- package/dist/types/controllers/ai.controller.d.ts.map +1 -1
- package/dist/types/controllers/dictionary.controller.d.ts.map +1 -1
- package/dist/types/export.d.ts +12 -11
- package/dist/types/export.d.ts.map +1 -1
- package/dist/types/routes/ai.routes.d.ts +5 -0
- package/dist/types/routes/ai.routes.d.ts.map +1 -1
- package/dist/types/services/dictionary.service.d.ts +2 -2
- package/dist/types/services/dictionary.service.d.ts.map +1 -1
- package/dist/types/services/sessionAuth.service.d.ts +2 -2
- package/dist/types/services/sessionAuth.service.d.ts.map +1 -1
- package/dist/types/utils/AI/aiSdk.d.ts +41 -0
- package/dist/types/utils/AI/aiSdk.d.ts.map +1 -0
- package/dist/types/utils/AI/askDocQuestion/askDocQuestion.d.ts +1 -1
- package/dist/types/utils/AI/askDocQuestion/askDocQuestion.d.ts.map +1 -1
- package/dist/types/utils/AI/auditDictionary/index.d.ts +10 -15
- package/dist/types/utils/AI/auditDictionary/index.d.ts.map +1 -1
- package/dist/types/utils/AI/auditDictionaryField/index.d.ts +9 -14
- package/dist/types/utils/AI/auditDictionaryField/index.d.ts.map +1 -1
- package/dist/types/utils/AI/auditDictionaryMetadata/index.d.ts +7 -13
- package/dist/types/utils/AI/auditDictionaryMetadata/index.d.ts.map +1 -1
- package/dist/types/utils/AI/auditTag/index.d.ts +18 -0
- package/dist/types/utils/AI/auditTag/index.d.ts.map +1 -0
- package/dist/types/utils/AI/autocomplete/index.d.ts +6 -12
- package/dist/types/utils/AI/autocomplete/index.d.ts.map +1 -1
- package/dist/types/utils/AI/translateJSON/index.d.ts +24 -0
- package/dist/types/utils/AI/translateJSON/index.d.ts.map +1 -0
- package/dist/types/utils/extractJSON.d.ts +6 -0
- package/dist/types/utils/extractJSON.d.ts.map +1 -0
- package/package.json +15 -11
- package/dist/cjs/utils/auditTag/index.cjs.map +0 -1
- package/dist/esm/utils/auditTag/index.mjs +0 -49
- package/dist/esm/utils/auditTag/index.mjs.map +0 -1
- package/dist/types/utils/auditTag/index.d.ts +0 -30
- package/dist/types/utils/auditTag/index.d.ts.map +0 -1
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/services/dictionary.service.ts"],"sourcesContent":["import { DictionaryModel } from '@models/dictionary.model';\nimport { ensureMongoDocumentToObject } from '@utils/ensureMongoDocumentToObject';\nimport { GenericError } from '@utils/errors';\nimport type { DictionaryFilters } from '@utils/filtersAndPagination/getDictionaryFiltersAndPagination';\nimport { removeObjectKeys } from '@utils/removeObjectKeys';\nimport {\n type DictionaryFields,\n validateDictionary,\n} from '@utils/validation/validateDictionary';\nimport type { ObjectId } from 'mongoose';\nimport type {\n Dictionary,\n DictionaryData,\n DictionaryDocument,\n} from '@/types/dictionary.types';\nimport type { Project } from '@/types/project.types';\n\n/**\n * Finds dictionaries based on filters and pagination options.\n * @param filters - MongoDB filter query.\n * @param skip - Number of documents to skip.\n * @param limit - Number of documents to limit.\n * @returns List of dictionaries matching the filters.\n */\nexport const findDictionaries = async (\n filters: DictionaryFilters,\n skip = 0,\n limit = 100\n): Promise<DictionaryDocument[]> => {\n try {\n const dictionaries = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the filters\n { $match: filters },\n\n // Stage 2: Skip for pagination\n { $skip: skip },\n\n // Stage 3: Limit the number of documents\n { $limit: limit },\n ]);\n\n const formattedResults = dictionaries.map(\n (result) => new DictionaryModel(result)\n );\n\n return formattedResults;\n } catch (error) {\n console.error('Error fetching dictionaries:', error);\n throw error;\n }\n};\n\n/**\n * Finds a dictionary by its ID.\n * @param dictionaryId - The ID of the dictionary to find.\n * @returns The dictionary matching the ID.\n */\n/**\n * Finds a dictionary by its ID and includes the 'availableVersions' field.\n * @param dictionaryId - The ID of the dictionary to find.\n * @returns The dictionary matching the ID with available versions.\n */\nexport const getDictionaryById = async (\n dictionaryId: string | ObjectId\n): Promise<DictionaryDocument> => {\n const dictionaries = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the document by ID\n { $match: { _id: dictionaryId } },\n\n // Stage 2: Add the 'availableVersions' field\n {\n $addFields: {\n availableVersions: {\n $map: {\n input: { $objectToArray: '$content' },\n as: 'version',\n in: '$$version.k',\n },\n },\n },\n },\n ]);\n\n if (!dictionaries.length) {\n throw new GenericError('DICTIONARY_NOT_FOUND', { dictionaryId });\n }\n\n return new DictionaryModel(dictionaries[0]);\n};\n\n/**\n * Finds a dictionary by its ID.\n * @param dictionaryKey - The ID of the dictionary to find.\n * @returns The dictionary matching the ID.\n */\nexport const getDictionaryByKey = async (\n dictionaryKey: string,\n projectId: string | ObjectId\n): Promise<DictionaryDocument> => {\n const dictionaries = await getDictionariesByKeys([dictionaryKey], projectId);\n\n return dictionaries[0];\n};\n\nexport const getDictionariesByKeys = async (\n dictionaryKeys: string[],\n projectId: string | ObjectId\n): Promise<DictionaryDocument[]> => {\n const dictionaries = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the document by key\n { $match: { key: { $in: dictionaryKeys }, projectIds: projectId } },\n\n // Stage 2: Add the 'availableVersions' field\n {\n $addFields: {\n availableVersions: {\n $map: {\n input: { $objectToArray: '$content' },\n as: 'version',\n in: '$$version.k',\n },\n },\n },\n },\n ]);\n\n if (!dictionaries) {\n throw new GenericError('DICTIONARY_NOT_FOUND', {\n dictionaryKeys,\n projectId,\n });\n }\n\n const formattedResults = dictionaries.map(\n (result) => new DictionaryModel(result)\n );\n\n return formattedResults;\n};\n\nexport const getDictionariesKeys = async (\n projectId: string | ObjectId\n): Promise<string[]> => {\n const dictionaries = await DictionaryModel.find({\n projectIds: projectId,\n }).select('key');\n\n return dictionaries.map((dictionary) => dictionary.key);\n};\n\nexport const getDictionariesByTags = async (\n tags: string[],\n projectId: string | Project['_id']\n): Promise<DictionaryDocument[]> => {\n const dictionaries = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the document by tags\n {\n $match: {\n tags: { $in: tags },\n projectIds: projectId,\n },\n },\n\n // Stage 2: Add the 'availableVersions' field\n {\n $addFields: {\n availableVersions: {\n $map: {\n input: { $objectToArray: '$content' },\n as: 'version',\n in: '$$version.k',\n },\n },\n },\n },\n ]);\n\n const formattedResults = dictionaries.map(\n (result) => new DictionaryModel(result)\n );\n\n return formattedResults;\n};\n\n/**\n * Counts the total number of dictionaries that match the filters.\n * @param filters - MongoDB filter query.\n * @returns Total number of dictionaries.\n */\nexport const countDictionaries = async (\n filters: DictionaryFilters\n): Promise<number> => {\n const result = await DictionaryModel.countDocuments(filters);\n\n if (typeof result === 'undefined') {\n throw new GenericError('DICTIONARY_COUNT_FAILED', { filters });\n }\n\n return result;\n};\n\n/**\n * Creates a new dictionary in the database.\n * @param dictionary - The dictionary data to create.\n * @returns The created dictionary.\n */\nexport const createDictionary = async (\n dictionary: DictionaryData\n): Promise<DictionaryDocument> => {\n const errors = await validateDictionary(dictionary);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('DICTIONARY_INVALID_FIELDS', {\n errors,\n });\n }\n\n return await DictionaryModel.create(dictionary);\n};\n\ntype GetExistingDictionaryResult = {\n existingDictionariesKey: string[];\n newDictionariesKey: string[];\n};\n\n/**\n * Gets the existing dictionaries from the provided list of keys.\n * @param dictionariesKeys - List of dictionary keys to check.\n * @param projectId - The ID of the project to check the dictionaries against.\n * @returns The existing dictionaries and the new dictionaries.\n */\nexport const getExistingDictionaryKey = async (\n dictionariesKeys: string[],\n projectId: string | ObjectId\n): Promise<GetExistingDictionaryResult> => {\n // Fetch dictionaries from the database where the key is in the provided list\n const existingDictionaries = await DictionaryModel.find({\n key: { $in: dictionariesKeys },\n projectIds: projectId,\n });\n\n // Map existing dictionaries to a LocalDictionary object\n const existingDictionariesKey: string[] = [];\n const newDictionariesKey: string[] = [];\n\n for (const key of dictionariesKeys) {\n const isDictionaryExist = existingDictionaries.some(\n (dictionary) => dictionary.key === key\n );\n\n if (isDictionaryExist) {\n existingDictionariesKey.push(key);\n } else {\n newDictionariesKey.push(key);\n }\n }\n\n return { existingDictionariesKey, newDictionariesKey };\n};\n\n/**\n * Updates an existing dictionary in the database by its ID.\n * @param dictionaryId - The ID of the dictionary to update.\n * @param dictionary - The updated dictionary data.\n * @returns The updated dictionary.\n */\nexport const updateDictionaryById = async (\n dictionaryId: string | ObjectId,\n dictionary: Partial<Dictionary>\n): Promise<DictionaryDocument> => {\n const dictionaryObject = ensureMongoDocumentToObject(dictionary);\n const dictionaryToUpdate = removeObjectKeys(dictionaryObject, ['_id']);\n\n const updatedKeys = Object.keys(dictionaryToUpdate) as DictionaryFields;\n const errors = await validateDictionary(dictionaryToUpdate, updatedKeys);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('DICTIONARY_INVALID_FIELDS', {\n dictionaryId,\n errors,\n });\n }\n\n const result = await DictionaryModel.updateOne(\n { _id: dictionaryId },\n dictionaryToUpdate\n );\n\n if (result.matchedCount === 0) {\n throw new GenericError('DICTIONARY_UPDATE_FAILED', { dictionaryId });\n }\n\n const updatedDictionary = await getDictionaryById(dictionaryId);\n\n return updatedDictionary;\n};\n\n/**\n * Updates an existing dictionary in the database by its key.\n * @param dictionaryKey - The ID of the dictionary to update.\n * @param dictionary - The updated dictionary data.\n * @returns The updated dictionary.\n */\nexport const updateDictionaryByKey = async (\n dictionaryKey: string,\n dictionary: Partial<Dictionary>,\n projectId: string | ObjectId\n): Promise<DictionaryDocument> => {\n const dictionaryObject = ensureMongoDocumentToObject(dictionary);\n const dictionaryToUpdate = removeObjectKeys(dictionaryObject, ['_id']);\n\n const updatedKeys = Object.keys(dictionaryToUpdate) as DictionaryFields;\n const errors = await validateDictionary(dictionaryToUpdate, updatedKeys);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('DICTIONARY_INVALID_FIELDS', {\n dictionaryKey,\n projectId,\n errors,\n });\n }\n\n const result = await DictionaryModel.updateOne(\n { key: dictionaryKey, projectIds: projectId },\n dictionaryToUpdate\n );\n\n if (result.matchedCount === 0) {\n throw new GenericError('DICTIONARY_UPDATE_FAILED', { dictionaryKey });\n }\n\n const updatedDictionary = await getDictionaryByKey(dictionaryKey, projectId);\n\n return updatedDictionary;\n};\n\n/**\n * Deletes a dictionary from the database by its ID.\n * @param dictionaryId - The ID of the dictionary to delete.\n * @returns The result of the deletion operation.\n */\nexport const deleteDictionaryById = async (\n dictionaryId: string\n): Promise<DictionaryDocument> => {\n const dictionary = await DictionaryModel.findByIdAndDelete(dictionaryId);\n\n if (!dictionary) {\n throw new GenericError('DICTIONARY_NOT_FOUND', { dictionaryId });\n }\n\n return dictionary;\n};\n\n// Function to extract the numeric part of the version\nconst getVersionNumber = (version: string): number => {\n const match = version.match(/^v(\\d+)$/);\n if (!match) {\n throw new Error(`Invalid version format: ${version}`);\n }\n return parseInt(match[1], 10);\n};\n\nexport const incrementVersion = (dictionary: Dictionary): string => {\n const VERSION_PREFIX = 'v';\n\n const availableVersions = [...(dictionary.content.keys() ?? [])];\n const lastVersion = availableVersions[availableVersions.length - 1];\n\n // Start with the next version number\n let newNumber = getVersionNumber(lastVersion) + 1;\n let newVersion = `${VERSION_PREFIX}${newNumber}`;\n\n // Loop until a unique version is found\n while (availableVersions.includes(newVersion)) {\n newNumber += 1;\n newVersion = `${VERSION_PREFIX}${newNumber}`;\n }\n\n return newVersion;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAAgC;AAChC,yCAA4C;AAC5C,oBAA6B;AAE7B,8BAAiC;AACjC,gCAGO;AAgBA,MAAM,mBAAmB,OAC9B,SACA,OAAO,GACP,QAAQ,QAC0B;AAClC,MAAI;AACF,UAAM,eAAe,MAAM,kCAAgB,UAA8B;AAAA;AAAA,MAEvE,EAAE,QAAQ,QAAQ;AAAA;AAAA,MAGlB,EAAE,OAAO,KAAK;AAAA;AAAA,MAGd,EAAE,QAAQ,MAAM;AAAA,IAClB,CAAC;AAED,UAAM,mBAAmB,aAAa;AAAA,MACpC,CAAC,WAAW,IAAI,kCAAgB,MAAM;AAAA,IACxC;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,YAAQ,MAAM,gCAAgC,KAAK;AACnD,UAAM;AAAA,EACR;AACF;AAYO,MAAM,oBAAoB,OAC/B,iBACgC;AAChC,QAAM,eAAe,MAAM,kCAAgB,UAA8B;AAAA;AAAA,IAEvE,EAAE,QAAQ,EAAE,KAAK,aAAa,EAAE;AAAA;AAAA,IAGhC;AAAA,MACE,YAAY;AAAA,QACV,mBAAmB;AAAA,UACjB,MAAM;AAAA,YACJ,OAAO,EAAE,gBAAgB,WAAW;AAAA,YACpC,IAAI;AAAA,YACJ,IAAI;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AAED,MAAI,CAAC,aAAa,QAAQ;AACxB,UAAM,IAAI,2BAAa,wBAAwB,EAAE,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO,IAAI,kCAAgB,aAAa,CAAC,CAAC;AAC5C;AAOO,MAAM,qBAAqB,OAChC,eACA,cACgC;AAChC,QAAM,eAAe,MAAM,sBAAsB,CAAC,aAAa,GAAG,SAAS;AAE3E,SAAO,aAAa,CAAC;AACvB;AAEO,MAAM,wBAAwB,OACnC,gBACA,cACkC;AAClC,QAAM,eAAe,MAAM,kCAAgB,UAA8B;AAAA;AAAA,IAEvE,EAAE,QAAQ,EAAE,KAAK,EAAE,KAAK,eAAe,GAAG,YAAY,UAAU,EAAE;AAAA;AAAA,IAGlE;AAAA,MACE,YAAY;AAAA,QACV,mBAAmB;AAAA,UACjB,MAAM;AAAA,YACJ,OAAO,EAAE,gBAAgB,WAAW;AAAA,YACpC,IAAI;AAAA,YACJ,IAAI;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AAED,MAAI,CAAC,cAAc;AACjB,UAAM,IAAI,2BAAa,wBAAwB;AAAA,MAC7C;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,mBAAmB,aAAa;AAAA,IACpC,CAAC,WAAW,IAAI,kCAAgB,MAAM;AAAA,EACxC;AAEA,SAAO;AACT;AAEO,MAAM,sBAAsB,OACjC,cACsB;AACtB,QAAM,eAAe,MAAM,kCAAgB,KAAK;AAAA,IAC9C,YAAY;AAAA,EACd,CAAC,EAAE,OAAO,KAAK;AAEf,SAAO,aAAa,IAAI,CAAC,eAAe,WAAW,GAAG;AACxD;AAEO,MAAM,wBAAwB,OACnC,MACA,cACkC;AAClC,QAAM,eAAe,MAAM,kCAAgB,UAA8B;AAAA;AAAA,IAEvE;AAAA,MACE,QAAQ;AAAA,QACN,MAAM,EAAE,KAAK,KAAK;AAAA,QAClB,YAAY;AAAA,MACd;AAAA,IACF;AAAA;AAAA,IAGA;AAAA,MACE,YAAY;AAAA,QACV,mBAAmB;AAAA,UACjB,MAAM;AAAA,YACJ,OAAO,EAAE,gBAAgB,WAAW;AAAA,YACpC,IAAI;AAAA,YACJ,IAAI;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AAED,QAAM,mBAAmB,aAAa;AAAA,IACpC,CAAC,WAAW,IAAI,kCAAgB,MAAM;AAAA,EACxC;AAEA,SAAO;AACT;AAOO,MAAM,oBAAoB,OAC/B,YACoB;AACpB,QAAM,SAAS,MAAM,kCAAgB,eAAe,OAAO;AAE3D,MAAI,OAAO,WAAW,aAAa;AACjC,UAAM,IAAI,2BAAa,2BAA2B,EAAE,QAAQ,CAAC;AAAA,EAC/D;AAEA,SAAO;AACT;AAOO,MAAM,mBAAmB,OAC9B,eACgC;AAChC,QAAM,SAAS,UAAM,8CAAmB,UAAU;AAElD,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,2BAAa,6BAA6B;AAAA,MAClD;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO,MAAM,kCAAgB,OAAO,UAAU;AAChD;AAaO,MAAM,2BAA2B,OACtC,kBACA,cACyC;AAEzC,QAAM,uBAAuB,MAAM,kCAAgB,KAAK;AAAA,IACtD,KAAK,EAAE,KAAK,iBAAiB;AAAA,IAC7B,YAAY;AAAA,EACd,CAAC;AAGD,QAAM,0BAAoC,CAAC;AAC3C,QAAM,qBAA+B,CAAC;AAEtC,aAAW,OAAO,kBAAkB;AAClC,UAAM,oBAAoB,qBAAqB;AAAA,MAC7C,CAAC,eAAe,WAAW,QAAQ;AAAA,IACrC;AAEA,QAAI,mBAAmB;AACrB,8BAAwB,KAAK,GAAG;AAAA,IAClC,OAAO;AACL,yBAAmB,KAAK,GAAG;AAAA,IAC7B;AAAA,EACF;AAEA,SAAO,EAAE,yBAAyB,mBAAmB;AACvD;AAQO,MAAM,uBAAuB,OAClC,cACA,eACgC;AAChC,QAAM,uBAAmB,gEAA4B,UAAU;AAC/D,QAAM,yBAAqB,0CAAiB,kBAAkB,CAAC,KAAK,CAAC;AAErE,QAAM,cAAc,OAAO,KAAK,kBAAkB;AAClD,QAAM,SAAS,UAAM,8CAAmB,oBAAoB,WAAW;AAEvE,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,2BAAa,6BAA6B;AAAA,MAClD;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAS,MAAM,kCAAgB;AAAA,IACnC,EAAE,KAAK,aAAa;AAAA,IACpB;AAAA,EACF;AAEA,MAAI,OAAO,iBAAiB,GAAG;AAC7B,UAAM,IAAI,2BAAa,4BAA4B,EAAE,aAAa,CAAC;AAAA,EACrE;AAEA,QAAM,oBAAoB,MAAM,kBAAkB,YAAY;AAE9D,SAAO;AACT;AAQO,MAAM,wBAAwB,OACnC,eACA,YACA,cACgC;AAChC,QAAM,uBAAmB,gEAA4B,UAAU;AAC/D,QAAM,yBAAqB,0CAAiB,kBAAkB,CAAC,KAAK,CAAC;AAErE,QAAM,cAAc,OAAO,KAAK,kBAAkB;AAClD,QAAM,SAAS,UAAM,8CAAmB,oBAAoB,WAAW;AAEvE,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,2BAAa,6BAA6B;AAAA,MAClD;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAS,MAAM,kCAAgB;AAAA,IACnC,EAAE,KAAK,eAAe,YAAY,UAAU;AAAA,IAC5C;AAAA,EACF;AAEA,MAAI,OAAO,iBAAiB,GAAG;AAC7B,UAAM,IAAI,2BAAa,4BAA4B,EAAE,cAAc,CAAC;AAAA,EACtE;AAEA,QAAM,oBAAoB,MAAM,mBAAmB,eAAe,SAAS;AAE3E,SAAO;AACT;AAOO,MAAM,uBAAuB,OAClC,iBACgC;AAChC,QAAM,aAAa,MAAM,kCAAgB,kBAAkB,YAAY;AAEvE,MAAI,CAAC,YAAY;AACf,UAAM,IAAI,2BAAa,wBAAwB,EAAE,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAGA,MAAM,mBAAmB,CAAC,YAA4B;AACpD,QAAM,QAAQ,QAAQ,MAAM,UAAU;AACtC,MAAI,CAAC,OAAO;AACV,UAAM,IAAI,MAAM,2BAA2B,OAAO,EAAE;AAAA,EACtD;AACA,SAAO,SAAS,MAAM,CAAC,GAAG,EAAE;AAC9B;AAEO,MAAM,mBAAmB,CAAC,eAAmC;AAClE,QAAM,iBAAiB;AAEvB,QAAM,oBAAoB,CAAC,GAAI,WAAW,QAAQ,KAAK,KAAK,CAAC,CAAE;AAC/D,QAAM,cAAc,kBAAkB,kBAAkB,SAAS,CAAC;AAGlE,MAAI,YAAY,iBAAiB,WAAW,IAAI;AAChD,MAAI,aAAa,GAAG,cAAc,GAAG,SAAS;AAG9C,SAAO,kBAAkB,SAAS,UAAU,GAAG;AAC7C,iBAAa;AACb,iBAAa,GAAG,cAAc,GAAG,SAAS;AAAA,EAC5C;AAEA,SAAO;AACT;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../../../src/services/dictionary.service.ts"],"sourcesContent":["import type {\n Dictionary,\n DictionaryData,\n DictionaryDocument,\n} from '@/types/dictionary.types';\nimport type { Project } from '@/types/project.types';\nimport { DictionaryModel } from '@models/dictionary.model';\nimport { ensureMongoDocumentToObject } from '@utils/ensureMongoDocumentToObject';\nimport { GenericError } from '@utils/errors';\nimport type { DictionaryFilters } from '@utils/filtersAndPagination/getDictionaryFiltersAndPagination';\nimport { removeObjectKeys } from '@utils/removeObjectKeys';\nimport {\n validateDictionary,\n type DictionaryFields,\n} from '@utils/validation/validateDictionary';\nimport { Types, type ObjectId } from 'mongoose';\n\n/**\n * Finds dictionaries based on filters and pagination options.\n * @param filters - MongoDB filter query.\n * @param skip - Number of documents to skip.\n * @param limit - Number of documents to limit.\n * @returns List of dictionaries matching the filters.\n */\nexport const findDictionaries = async (\n filters: DictionaryFilters,\n skip = 0,\n limit = 100\n): Promise<DictionaryDocument[]> => {\n try {\n const dictionaries = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the filters\n { $match: filters },\n\n // Stage 2: Skip for pagination\n { $skip: skip },\n\n // Stage 3: Limit the number of documents\n { $limit: limit },\n ]);\n\n const formattedResults = dictionaries.map(\n (result) => new DictionaryModel(result)\n );\n\n return formattedResults;\n } catch (error) {\n console.error('Error fetching dictionaries:', error);\n throw error;\n }\n};\n\n/**\n * Finds a dictionary by its ID.\n * @param dictionaryId - The ID of the dictionary to find.\n * @returns The dictionary matching the ID.\n */\n/**\n * Finds a dictionary by its ID and includes the 'availableVersions' field.\n * @param dictionaryId - The ID of the dictionary to find.\n * @returns The dictionary matching the ID with available versions.\n */\nexport const getDictionaryById = async (\n dictionaryId: string | ObjectId\n): Promise<DictionaryDocument> => {\n const id = Types.ObjectId.isValid(dictionaryId as string)\n ? new Types.ObjectId(dictionaryId as string)\n : dictionaryId;\n\n const dictionaries = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the document by ID\n { $match: { _id: id } },\n\n // Stage 2: Add the 'availableVersions' field\n {\n $addFields: {\n availableVersions: {\n $map: {\n input: { $objectToArray: '$content' },\n as: 'version',\n in: '$$version.k',\n },\n },\n },\n },\n ]);\n\n console.log('dictionaries', dictionaries);\n\n if (!dictionaries.length) {\n throw new GenericError('DICTIONARY_NOT_FOUND', { dictionaryId });\n }\n\n return new DictionaryModel(dictionaries[0]);\n};\n\n/**\n * Finds a dictionary by its ID.\n * @param dictionaryKey - The ID of the dictionary to find.\n * @returns The dictionary matching the ID.\n */\nexport const getDictionaryByKey = async (\n dictionaryKey: string,\n projectId: string | ObjectId\n): Promise<DictionaryDocument> => {\n const dictionaries = await getDictionariesByKeys([dictionaryKey], projectId);\n\n return dictionaries[0];\n};\n\nexport const getDictionariesByKeys = async (\n dictionaryKeys: string[],\n projectId: string | ObjectId\n): Promise<DictionaryDocument[]> => {\n const dictionaries = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the document by key\n { $match: { key: { $in: dictionaryKeys }, projectIds: projectId } },\n\n // Stage 2: Add the 'availableVersions' field\n {\n $addFields: {\n availableVersions: {\n $map: {\n input: { $objectToArray: '$content' },\n as: 'version',\n in: '$$version.k',\n },\n },\n },\n },\n ]);\n\n if (!dictionaries) {\n throw new GenericError('DICTIONARY_NOT_FOUND', {\n dictionaryKeys,\n projectId,\n });\n }\n\n const formattedResults = dictionaries.map(\n (result) => new DictionaryModel(result)\n );\n\n return formattedResults;\n};\n\nexport const getDictionariesKeys = async (\n projectId: string | ObjectId\n): Promise<string[]> => {\n const dictionaries = await DictionaryModel.find({\n projectIds: projectId,\n }).select('key');\n\n return dictionaries.map((dictionary) => dictionary.key);\n};\n\nexport const getDictionariesByTags = async (\n tags: string[],\n projectId: string | Project['_id']\n): Promise<DictionaryDocument[]> => {\n const dictionaries = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the document by tags\n {\n $match: {\n tags: { $in: tags },\n projectIds: projectId,\n },\n },\n\n // Stage 2: Add the 'availableVersions' field\n {\n $addFields: {\n availableVersions: {\n $map: {\n input: { $objectToArray: '$content' },\n as: 'version',\n in: '$$version.k',\n },\n },\n },\n },\n ]);\n\n const formattedResults = dictionaries.map(\n (result) => new DictionaryModel(result)\n );\n\n return formattedResults;\n};\n\n/**\n * Counts the total number of dictionaries that match the filters.\n * @param filters - MongoDB filter query.\n * @returns Total number of dictionaries.\n */\nexport const countDictionaries = async (\n filters: DictionaryFilters\n): Promise<number> => {\n const result = await DictionaryModel.countDocuments(filters);\n\n if (typeof result === 'undefined') {\n throw new GenericError('DICTIONARY_COUNT_FAILED', { filters });\n }\n\n return result;\n};\n\n/**\n * Creates a new dictionary in the database.\n * @param dictionary - The dictionary data to create.\n * @returns The created dictionary.\n */\nexport const createDictionary = async (\n dictionary: DictionaryData\n): Promise<DictionaryDocument> => {\n const errors = await validateDictionary(dictionary);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('DICTIONARY_INVALID_FIELDS', {\n errors,\n });\n }\n\n return await DictionaryModel.create(dictionary);\n};\n\ntype GetExistingDictionaryResult = {\n existingDictionariesKey: string[];\n newDictionariesKey: string[];\n};\n\n/**\n * Gets the existing dictionaries from the provided list of keys.\n * @param dictionariesKeys - List of dictionary keys to check.\n * @param projectId - The ID of the project to check the dictionaries against.\n * @returns The existing dictionaries and the new dictionaries.\n */\nexport const getExistingDictionaryKey = async (\n dictionariesKeys: string[],\n projectId: string | ObjectId\n): Promise<GetExistingDictionaryResult> => {\n // Fetch dictionaries from the database where the key is in the provided list\n const existingDictionaries = await DictionaryModel.find({\n key: { $in: dictionariesKeys },\n projectIds: projectId,\n });\n\n // Map existing dictionaries to a LocalDictionary object\n const existingDictionariesKey: string[] = [];\n const newDictionariesKey: string[] = [];\n\n for (const key of dictionariesKeys) {\n const isDictionaryExist = existingDictionaries.some(\n (dictionary) => dictionary.key === key\n );\n\n if (isDictionaryExist) {\n existingDictionariesKey.push(key);\n } else {\n newDictionariesKey.push(key);\n }\n }\n\n return { existingDictionariesKey, newDictionariesKey };\n};\n\n/**\n * Updates an existing dictionary in the database by its ID.\n * @param dictionaryId - The ID of the dictionary to update.\n * @param dictionary - The updated dictionary data.\n * @returns The updated dictionary.\n */\nexport const updateDictionaryById = async (\n dictionaryId: string | ObjectId,\n dictionary: Partial<Dictionary>\n): Promise<DictionaryDocument> => {\n const dictionaryObject = ensureMongoDocumentToObject(dictionary);\n const dictionaryToUpdate = removeObjectKeys(dictionaryObject, ['_id']);\n\n const updatedKeys = Object.keys(dictionaryToUpdate) as DictionaryFields;\n const errors = await validateDictionary(dictionaryToUpdate, updatedKeys);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('DICTIONARY_INVALID_FIELDS', {\n dictionaryId,\n errors,\n });\n }\n\n const result = await DictionaryModel.updateOne(\n { _id: dictionaryId },\n dictionaryToUpdate\n );\n\n if (result.matchedCount === 0) {\n throw new GenericError('DICTIONARY_UPDATE_FAILED', { dictionaryId });\n }\n\n const updatedDictionary = await getDictionaryById(dictionaryId);\n\n return updatedDictionary;\n};\n\n/**\n * Updates an existing dictionary in the database by its key.\n * @param dictionaryKey - The ID of the dictionary to update.\n * @param dictionary - The updated dictionary data.\n * @returns The updated dictionary.\n */\nexport const updateDictionaryByKey = async (\n dictionaryKey: string,\n dictionary: Partial<Dictionary>,\n projectId: string | ObjectId\n): Promise<DictionaryDocument> => {\n const dictionaryObject = ensureMongoDocumentToObject(dictionary);\n const dictionaryToUpdate = removeObjectKeys(dictionaryObject, ['_id']);\n\n const updatedKeys = Object.keys(dictionaryToUpdate) as DictionaryFields;\n const errors = await validateDictionary(dictionaryToUpdate, updatedKeys);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('DICTIONARY_INVALID_FIELDS', {\n dictionaryKey,\n projectId,\n errors,\n });\n }\n\n const result = await DictionaryModel.updateOne(\n { key: dictionaryKey, projectIds: projectId },\n dictionaryToUpdate\n );\n\n if (result.matchedCount === 0) {\n throw new GenericError('DICTIONARY_UPDATE_FAILED', { dictionaryKey });\n }\n\n const updatedDictionary = await getDictionaryByKey(dictionaryKey, projectId);\n\n return updatedDictionary;\n};\n\n/**\n * Deletes a dictionary from the database by its ID.\n * @param dictionaryId - The ID of the dictionary to delete.\n * @returns The result of the deletion operation.\n */\nexport const deleteDictionaryById = async (\n dictionaryId: string\n): Promise<DictionaryDocument> => {\n console.log('dictionaryId', dictionaryId);\n\n const dictionary = await DictionaryModel.findByIdAndDelete(dictionaryId);\n\n console.log('dictionary', dictionary);\n\n if (!dictionary) {\n throw new GenericError('DICTIONARY_NOT_FOUND', { dictionaryId });\n }\n\n return dictionary;\n};\n\n// Function to extract the numeric part of the version\nconst getVersionNumber = (version: string): number => {\n const match = version.match(/^v(\\d+)$/);\n if (!match) {\n throw new Error(`Invalid version format: ${version}`);\n }\n return parseInt(match[1], 10);\n};\n\nexport const incrementVersion = (dictionary: Dictionary): string => {\n const VERSION_PREFIX = 'v';\n\n const availableVersions = [...(dictionary.content.keys() ?? [])];\n const lastVersion = availableVersions[availableVersions.length - 1];\n\n // Start with the next version number\n let newNumber = getVersionNumber(lastVersion) + 1;\n let newVersion = `${VERSION_PREFIX}${newNumber}`;\n\n // Loop until a unique version is found\n while (availableVersions.includes(newVersion)) {\n newNumber += 1;\n newVersion = `${VERSION_PREFIX}${newNumber}`;\n }\n\n return newVersion;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAMA,wBAAgC;AAChC,yCAA4C;AAC5C,oBAA6B;AAE7B,8BAAiC;AACjC,gCAGO;AACP,sBAAqC;AAS9B,MAAM,mBAAmB,OAC9B,SACA,OAAO,GACP,QAAQ,QAC0B;AAClC,MAAI;AACF,UAAM,eAAe,MAAM,kCAAgB,UAA8B;AAAA;AAAA,MAEvE,EAAE,QAAQ,QAAQ;AAAA;AAAA,MAGlB,EAAE,OAAO,KAAK;AAAA;AAAA,MAGd,EAAE,QAAQ,MAAM;AAAA,IAClB,CAAC;AAED,UAAM,mBAAmB,aAAa;AAAA,MACpC,CAAC,WAAW,IAAI,kCAAgB,MAAM;AAAA,IACxC;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,YAAQ,MAAM,gCAAgC,KAAK;AACnD,UAAM;AAAA,EACR;AACF;AAYO,MAAM,oBAAoB,OAC/B,iBACgC;AAChC,QAAM,KAAK,sBAAM,SAAS,QAAQ,YAAsB,IACpD,IAAI,sBAAM,SAAS,YAAsB,IACzC;AAEJ,QAAM,eAAe,MAAM,kCAAgB,UAA8B;AAAA;AAAA,IAEvE,EAAE,QAAQ,EAAE,KAAK,GAAG,EAAE;AAAA;AAAA,IAGtB;AAAA,MACE,YAAY;AAAA,QACV,mBAAmB;AAAA,UACjB,MAAM;AAAA,YACJ,OAAO,EAAE,gBAAgB,WAAW;AAAA,YACpC,IAAI;AAAA,YACJ,IAAI;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AAED,UAAQ,IAAI,gBAAgB,YAAY;AAExC,MAAI,CAAC,aAAa,QAAQ;AACxB,UAAM,IAAI,2BAAa,wBAAwB,EAAE,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO,IAAI,kCAAgB,aAAa,CAAC,CAAC;AAC5C;AAOO,MAAM,qBAAqB,OAChC,eACA,cACgC;AAChC,QAAM,eAAe,MAAM,sBAAsB,CAAC,aAAa,GAAG,SAAS;AAE3E,SAAO,aAAa,CAAC;AACvB;AAEO,MAAM,wBAAwB,OACnC,gBACA,cACkC;AAClC,QAAM,eAAe,MAAM,kCAAgB,UAA8B;AAAA;AAAA,IAEvE,EAAE,QAAQ,EAAE,KAAK,EAAE,KAAK,eAAe,GAAG,YAAY,UAAU,EAAE;AAAA;AAAA,IAGlE;AAAA,MACE,YAAY;AAAA,QACV,mBAAmB;AAAA,UACjB,MAAM;AAAA,YACJ,OAAO,EAAE,gBAAgB,WAAW;AAAA,YACpC,IAAI;AAAA,YACJ,IAAI;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AAED,MAAI,CAAC,cAAc;AACjB,UAAM,IAAI,2BAAa,wBAAwB;AAAA,MAC7C;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,mBAAmB,aAAa;AAAA,IACpC,CAAC,WAAW,IAAI,kCAAgB,MAAM;AAAA,EACxC;AAEA,SAAO;AACT;AAEO,MAAM,sBAAsB,OACjC,cACsB;AACtB,QAAM,eAAe,MAAM,kCAAgB,KAAK;AAAA,IAC9C,YAAY;AAAA,EACd,CAAC,EAAE,OAAO,KAAK;AAEf,SAAO,aAAa,IAAI,CAAC,eAAe,WAAW,GAAG;AACxD;AAEO,MAAM,wBAAwB,OACnC,MACA,cACkC;AAClC,QAAM,eAAe,MAAM,kCAAgB,UAA8B;AAAA;AAAA,IAEvE;AAAA,MACE,QAAQ;AAAA,QACN,MAAM,EAAE,KAAK,KAAK;AAAA,QAClB,YAAY;AAAA,MACd;AAAA,IACF;AAAA;AAAA,IAGA;AAAA,MACE,YAAY;AAAA,QACV,mBAAmB;AAAA,UACjB,MAAM;AAAA,YACJ,OAAO,EAAE,gBAAgB,WAAW;AAAA,YACpC,IAAI;AAAA,YACJ,IAAI;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AAED,QAAM,mBAAmB,aAAa;AAAA,IACpC,CAAC,WAAW,IAAI,kCAAgB,MAAM;AAAA,EACxC;AAEA,SAAO;AACT;AAOO,MAAM,oBAAoB,OAC/B,YACoB;AACpB,QAAM,SAAS,MAAM,kCAAgB,eAAe,OAAO;AAE3D,MAAI,OAAO,WAAW,aAAa;AACjC,UAAM,IAAI,2BAAa,2BAA2B,EAAE,QAAQ,CAAC;AAAA,EAC/D;AAEA,SAAO;AACT;AAOO,MAAM,mBAAmB,OAC9B,eACgC;AAChC,QAAM,SAAS,UAAM,8CAAmB,UAAU;AAElD,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,2BAAa,6BAA6B;AAAA,MAClD;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO,MAAM,kCAAgB,OAAO,UAAU;AAChD;AAaO,MAAM,2BAA2B,OACtC,kBACA,cACyC;AAEzC,QAAM,uBAAuB,MAAM,kCAAgB,KAAK;AAAA,IACtD,KAAK,EAAE,KAAK,iBAAiB;AAAA,IAC7B,YAAY;AAAA,EACd,CAAC;AAGD,QAAM,0BAAoC,CAAC;AAC3C,QAAM,qBAA+B,CAAC;AAEtC,aAAW,OAAO,kBAAkB;AAClC,UAAM,oBAAoB,qBAAqB;AAAA,MAC7C,CAAC,eAAe,WAAW,QAAQ;AAAA,IACrC;AAEA,QAAI,mBAAmB;AACrB,8BAAwB,KAAK,GAAG;AAAA,IAClC,OAAO;AACL,yBAAmB,KAAK,GAAG;AAAA,IAC7B;AAAA,EACF;AAEA,SAAO,EAAE,yBAAyB,mBAAmB;AACvD;AAQO,MAAM,uBAAuB,OAClC,cACA,eACgC;AAChC,QAAM,uBAAmB,gEAA4B,UAAU;AAC/D,QAAM,yBAAqB,0CAAiB,kBAAkB,CAAC,KAAK,CAAC;AAErE,QAAM,cAAc,OAAO,KAAK,kBAAkB;AAClD,QAAM,SAAS,UAAM,8CAAmB,oBAAoB,WAAW;AAEvE,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,2BAAa,6BAA6B;AAAA,MAClD;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAS,MAAM,kCAAgB;AAAA,IACnC,EAAE,KAAK,aAAa;AAAA,IACpB;AAAA,EACF;AAEA,MAAI,OAAO,iBAAiB,GAAG;AAC7B,UAAM,IAAI,2BAAa,4BAA4B,EAAE,aAAa,CAAC;AAAA,EACrE;AAEA,QAAM,oBAAoB,MAAM,kBAAkB,YAAY;AAE9D,SAAO;AACT;AAQO,MAAM,wBAAwB,OACnC,eACA,YACA,cACgC;AAChC,QAAM,uBAAmB,gEAA4B,UAAU;AAC/D,QAAM,yBAAqB,0CAAiB,kBAAkB,CAAC,KAAK,CAAC;AAErE,QAAM,cAAc,OAAO,KAAK,kBAAkB;AAClD,QAAM,SAAS,UAAM,8CAAmB,oBAAoB,WAAW;AAEvE,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,2BAAa,6BAA6B;AAAA,MAClD;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAS,MAAM,kCAAgB;AAAA,IACnC,EAAE,KAAK,eAAe,YAAY,UAAU;AAAA,IAC5C;AAAA,EACF;AAEA,MAAI,OAAO,iBAAiB,GAAG;AAC7B,UAAM,IAAI,2BAAa,4BAA4B,EAAE,cAAc,CAAC;AAAA,EACtE;AAEA,QAAM,oBAAoB,MAAM,mBAAmB,eAAe,SAAS;AAE3E,SAAO;AACT;AAOO,MAAM,uBAAuB,OAClC,iBACgC;AAChC,UAAQ,IAAI,gBAAgB,YAAY;AAExC,QAAM,aAAa,MAAM,kCAAgB,kBAAkB,YAAY;AAEvE,UAAQ,IAAI,cAAc,UAAU;AAEpC,MAAI,CAAC,YAAY;AACf,UAAM,IAAI,2BAAa,wBAAwB,EAAE,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAGA,MAAM,mBAAmB,CAAC,YAA4B;AACpD,QAAM,QAAQ,QAAQ,MAAM,UAAU;AACtC,MAAI,CAAC,OAAO;AACV,UAAM,IAAI,MAAM,2BAA2B,OAAO,EAAE;AAAA,EACtD;AACA,SAAO,SAAS,MAAM,CAAC,GAAG,EAAE;AAC9B;AAEO,MAAM,mBAAmB,CAAC,eAAmC;AAClE,QAAM,iBAAiB;AAEvB,QAAM,oBAAoB,CAAC,GAAI,WAAW,QAAQ,KAAK,KAAK,CAAC,CAAE;AAC/D,QAAM,cAAc,kBAAkB,kBAAkB,SAAS,CAAC;AAGlE,MAAI,YAAY,iBAAiB,WAAW,IAAI;AAChD,MAAI,aAAa,GAAG,cAAc,GAAG,SAAS;AAG9C,SAAO,kBAAkB,SAAS,UAAU,GAAG;AAC7C,iBAAa;AACb,iBAAa,GAAG,cAAc,GAAG,SAAS;AAAA,EAC5C;AAEA,SAAO;AACT;","names":[]}
|
|
@@ -50,13 +50,13 @@ __export(sessionAuth_service_exports, {
|
|
|
50
50
|
updateUserProvider: () => updateUserProvider
|
|
51
51
|
});
|
|
52
52
|
module.exports = __toCommonJS(sessionAuth_service_exports);
|
|
53
|
-
var import_crypto = __toESM(require("crypto"), 1);
|
|
54
53
|
var import_logger = require('./../logger/index.cjs');
|
|
55
54
|
var import_cookies = require('./../utils/cookies.cjs');
|
|
56
55
|
var import_ensureMongoDocumentToObject = require('./../utils/ensureMongoDocumentToObject.cjs');
|
|
57
56
|
var import_errors = require('./../utils/errors/index.cjs');
|
|
58
57
|
var import_user = require('./../utils/mapper/user.cjs');
|
|
59
|
-
var
|
|
58
|
+
var import_bcryptjs = require("bcryptjs");
|
|
59
|
+
var import_crypto = __toESM(require("crypto"), 1);
|
|
60
60
|
var import_express_intlayer = require("express-intlayer");
|
|
61
61
|
var import_jsonwebtoken = __toESM(require("jsonwebtoken"), 1);
|
|
62
62
|
var import_uuid = require("uuid");
|
|
@@ -193,7 +193,7 @@ const resetUserPassword = async (userId, secret, newPassword) => {
|
|
|
193
193
|
throw new import_errors.GenericError("USER_PROVIDER_SECRET_NOT_VALID", { userId });
|
|
194
194
|
}
|
|
195
195
|
const updatedUser = await updateUserProvider(userId, "email", {
|
|
196
|
-
passwordHash: await (0,
|
|
196
|
+
passwordHash: await (0, import_bcryptjs.hash)(newPassword, await (0, import_bcryptjs.genSalt)()),
|
|
197
197
|
secret
|
|
198
198
|
});
|
|
199
199
|
return updatedUser;
|
|
@@ -311,7 +311,7 @@ const testUserPassword = async (email, password) => {
|
|
|
311
311
|
};
|
|
312
312
|
return { user: null, error: (0, import_express_intlayer.t)(errorMessages) };
|
|
313
313
|
}
|
|
314
|
-
const isMatch = await (0,
|
|
314
|
+
const isMatch = await (0, import_bcryptjs.compare)(
|
|
315
315
|
password,
|
|
316
316
|
userEmailPasswordProvider.passwordHash
|
|
317
317
|
);
|
|
@@ -334,7 +334,7 @@ const hashUserPassword = async (userWithPasswordNotHashed) => {
|
|
|
334
334
|
throw new import_errors.GenericError("USER_PASSWORD_NOT_DEFINED", { user });
|
|
335
335
|
}
|
|
336
336
|
const userProvider = formatUserProviderUpdate("email", user, {
|
|
337
|
-
passwordHash: await (0,
|
|
337
|
+
passwordHash: await (0, import_bcryptjs.hash)(password, await (0, import_bcryptjs.genSalt)()),
|
|
338
338
|
secret: generateSecret(35)
|
|
339
339
|
});
|
|
340
340
|
return { ...user, provider: userProvider };
|
|
@@ -345,7 +345,7 @@ const changeUserPassword = async (userId, newPassword) => {
|
|
|
345
345
|
throw new import_errors.GenericError("USER_NOT_FOUND", { userId });
|
|
346
346
|
}
|
|
347
347
|
const updatedUser = await updateUserProvider(userId, "email", {
|
|
348
|
-
passwordHash: await (0,
|
|
348
|
+
passwordHash: await (0, import_bcryptjs.hash)(newPassword, await (0, import_bcryptjs.genSalt)())
|
|
349
349
|
});
|
|
350
350
|
return updatedUser;
|
|
351
351
|
};
|
|
@@ -355,7 +355,7 @@ const resetPassword = async (userId, password) => {
|
|
|
355
355
|
throw new import_errors.GenericError("USER_NOT_FOUND", { userId });
|
|
356
356
|
}
|
|
357
357
|
const updatedUser = await updateUserProvider(userId, "email", {
|
|
358
|
-
passwordHash: await (0,
|
|
358
|
+
passwordHash: await (0, import_bcryptjs.hash)(password, await (0, import_bcryptjs.genSalt)())
|
|
359
359
|
});
|
|
360
360
|
return updatedUser;
|
|
361
361
|
};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/services/sessionAuth.service.ts"],"sourcesContent":["import crypto from 'crypto';\nimport { logger } from '@logger';\nimport type { ResponseWithInformation } from '@middlewares/sessionAuth.middleware';\nimport {\n Cookies,\n getClearCookieOptions,\n getCookieOptions,\n MAX_AGE,\n} from '@utils/cookies';\nimport { ensureMongoDocumentToObject } from '@utils/ensureMongoDocumentToObject';\nimport { GenericError } from '@utils/errors';\nimport { mapUserToAPI } from '@utils/mapper/user';\nimport { hash, genSalt, compare } from 'bcrypt';\nimport type { Response } from 'express';\n// @ts-ignore express-intlayer not build yet\nimport { t } from 'express-intlayer';\nimport jwt from 'jsonwebtoken';\nimport type { ObjectId } from 'mongoose';\nimport { v4 as uuidv4 } from 'uuid';\nimport { getUserByEmail, getUserById, updateUserById } from './user.service';\nimport type { Organization } from '@/types/organization.types';\nimport type { Project } from '@/types/project.types';\nimport type {\n SessionProviders,\n EmailPasswordSessionProvider,\n GoogleSessionProvider,\n GithubSessionProvider,\n Session,\n} from '@/types/session.types';\nimport type {\n User,\n UserDocument,\n UserWithPasswordNotHashed,\n} from '@/types/user.types';\n\n/**\n * Adds a session to a user or updates the existing one.\n * @param user - User object.\n * @returns Updated user object.\n */\nexport const addSession = async (user: User): Promise<UserDocument> => {\n const userSessionToken = uuidv4();\n\n const session: Session = {\n sessionToken: userSessionToken,\n expires: new Date(Date.now() + MAX_AGE),\n };\n\n const updatedUser: UserDocument = await updateUserById(user._id, { session });\n\n return updatedUser;\n};\n\nexport const removeSession = async (user: User): Promise<UserDocument> => {\n const session = undefined;\n\n const updatedUser: UserDocument = await updateUserById(user._id, { session });\n\n return updatedUser;\n};\n\n/**\n * Set user auth locals object\n * @param res - Express response object.\n * @param user - User object.\n */\nexport const setUserAuth = async (res: Response, user: User) => {\n const formattedUser = mapUserToAPI(user);\n\n const userToken = jwt.sign(formattedUser, process.env.JWT_TOKEN_SECRET!, {\n expiresIn: MAX_AGE,\n });\n\n if (!userToken) {\n throw new GenericError('JWT_TOKEN_CREATION_FAILED_USER', { user });\n }\n\n const cookieOptions = getCookieOptions();\n\n res.cookie(Cookies.JWT_USER, userToken, cookieOptions);\n\n const userWithSession: UserDocument = await addSession(user);\n\n const userSessionToken = userWithSession.session?.sessionToken;\n\n res.cookie(Cookies.JWT_AUTH, userSessionToken, cookieOptions);\n\n res.locals.user = user;\n logger.info(\n `User logged in - User: Name: ${user.name}, id: ${String(user._id)}`\n );\n};\n\n/**\n * Clears the JWT auth cookies and user locals object.\n * @param res - Express response object.\n */\nexport const clearUserAuth = async (res: ResponseWithInformation) => {\n const { user } = res.locals;\n const cookiesOptions = getClearCookieOptions();\n\n if (user) {\n await removeSession(user);\n }\n\n res.cookie(Cookies.JWT_AUTH, '', cookiesOptions);\n res.cookie(Cookies.JWT_USER, '', cookiesOptions);\n\n res.locals.user = null;\n res.locals.authType = null;\n};\n\n/**\n *\n * @param res\n * @param organization\n * @returns\n */\nexport const setOrganizationAuth = (\n res: ResponseWithInformation,\n organization: Organization\n) => {\n const organizationData = {\n _id: organization._id,\n name: organization.name,\n };\n\n const organizationToken = jwt.sign(\n organizationData,\n process.env.JWT_TOKEN_SECRET!,\n {\n expiresIn: MAX_AGE,\n }\n );\n\n if (!organizationToken) {\n throw new GenericError('JWT_TOKEN_CREATION_FAILED_ORGANIZATION', {\n organization,\n });\n }\n\n res.cookie(Cookies.JWT_ORGANIZATION, organizationToken, getCookieOptions());\n\n res.locals.organization = organization;\n};\n\n/**\n * Clears the JWT organization cookies and organization locals object.\n * @param res - Express response object.\n */\nexport const clearOrganizationAuth = (res: ResponseWithInformation) => {\n res.locals.organization = null;\n\n res.cookie(Cookies.JWT_ORGANIZATION, '', getClearCookieOptions());\n};\n\n/**\n * Set project auth locals object\n * @param res - Express response object.\n * @param project - Project object.\n */\nexport const setProjectAuth = (\n res: ResponseWithInformation,\n project: Project\n) => {\n const { organization } = res.locals;\n const projectData = {\n _id: project._id,\n name: project.name,\n };\n\n const projectToken = jwt.sign(projectData, process.env.JWT_TOKEN_SECRET!, {\n expiresIn: MAX_AGE,\n });\n\n if (!projectToken) {\n throw new GenericError('JWT_TOKEN_CREATION_FAILED_PROJECT', {\n project,\n });\n }\n\n res.cookie(Cookies.JWT_PROJECT, projectToken, getCookieOptions());\n\n if (!organization) {\n throw new GenericError('ORGANIZATION_NOT_FOUND', {\n project,\n });\n }\n\n if (\n // if the project is not in the organization's projects\n String(organization._id) !== String(project.organizationId)\n ) {\n throw new GenericError('JWT_TOKEN_ORGANIZATION_MISMATCH_PROJECT', {\n project,\n });\n }\n\n res.locals.project = project;\n};\n\n/**\n * Clears the JWT project cookies and project locals object.\n * @param res - Express response object.\n */\nexport const clearProjectAuth = (res: Response) => {\n res.locals.project = null;\n\n res.cookie(Cookies.JWT_PROJECT, '', getClearCookieOptions());\n};\n\n/**\n * Generates a random secret string of a specified length.\n * @param length - The length of the secret.\n * @returns The generated secret string.\n */\nexport const generateSecret = (length: number): string => {\n const characters =\n 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';\n return Array.from({ length }, () =>\n characters.charAt(Math.floor(Math.random() * characters.length))\n ).join('');\n};\n\n/**\n * Handles a password reset request for a user.\n * @param email - The user's email.\n * @param organization - The organization associated with the user.\n * @returns The user object or null if no user was found.\n */\nexport const requestPasswordReset = async (\n email: string\n): Promise<User | null> => {\n const user = await getUserByEmail(email);\n\n if (!user) {\n throw new GenericError('USER_NOT_FOUND', { email });\n }\n\n return updateUserProvider(user._id as unknown as string, 'email', {\n secret: generateSecret(35),\n });\n};\n\n/**\n * Resets a user's password.\n * @param userId - The ID of the user.\n * @param secret - The secret token associated with the user.\n * @param newPassword - The new password to set.\n * @returns The updated user or null if the reset failed.\n */\nexport const resetUserPassword = async (\n userId: string | ObjectId,\n secret: string,\n newPassword: string\n): Promise<User> => {\n const emailAndPasswordProvider = await getUserProvider(userId, 'email');\n\n if (!emailAndPasswordProvider) {\n throw new GenericError('USER_PROVIDER_NOT_FOUND', { userId });\n }\n\n if (!emailAndPasswordProvider.secret) {\n throw new GenericError('USER_PROVIDER_SECRET_NOT_DEFINED', { userId });\n }\n\n if (\n !crypto.timingSafeEqual(\n Buffer.from(emailAndPasswordProvider.secret),\n Buffer.from(secret)\n )\n ) {\n throw new GenericError('USER_PROVIDER_SECRET_NOT_VALID', { userId });\n }\n\n const updatedUser: User = await updateUserProvider(userId, 'email', {\n passwordHash: await hash(newPassword, await genSalt()),\n secret,\n });\n\n return updatedUser;\n};\n\ntype UserProvider<T extends SessionProviders['provider']> = T extends 'email'\n ? EmailPasswordSessionProvider\n : T extends 'google'\n ? GoogleSessionProvider\n : T extends 'github'\n ? GithubSessionProvider\n : SessionProviders;\n\n/**\n * Gets a user's provider.\n * @param userId - The ID of the user.\n * @param provider - The provider to get.\n * @returns The user's provider.\n */\nexport const getUserProvider = async <T extends SessionProviders['provider']>(\n userId: string | ObjectId,\n provider: T,\n providerAccountId?: string\n): Promise<UserProvider<T> | null> => {\n const user = await getUserById(userId);\n\n if (!user) {\n throw new GenericError('USER_NOT_FOUND', { userId });\n }\n\n const userProvider = user.provider?.find(\n (providerEl) =>\n (providerEl.provider === provider && !providerAccountId) ||\n (providerAccountId &&\n (providerEl as GithubSessionProvider).providerAccountId ===\n providerAccountId)\n );\n\n return (userProvider as UserProvider<T>) ?? null;\n};\n\n/**\n * Formats the given fields of a user's provider.\n * @param provider - The provider to update.\n * @param user - The user object.\n * @param providerUpdate - The updates to apply to the provider.\n * @returns The updated user provider.\n */\nexport const formatUserProviderUpdate = <\n T extends SessionProviders['provider'],\n>(\n provider: T,\n user: Partial<User>,\n providerUpdate: Partial<UserProvider<T>>\n): User['provider'] => {\n const userProvider: SessionProviders[] = ensureMongoDocumentToObject<\n SessionProviders[]\n >(user.provider ?? []);\n\n const userProviderToUpdate = userProvider?.find(\n (providerEl) => providerEl.provider === provider\n );\n\n let updatedProvider: User['provider'];\n\n if (userProviderToUpdate) {\n const otherProviders =\n user.provider?.filter((p) => p.provider !== provider) ?? [];\n\n updatedProvider = [\n ...otherProviders,\n { ...userProviderToUpdate, ...providerUpdate, provider },\n ];\n } else {\n updatedProvider = [\n ...(user.provider ?? []),\n { ...providerUpdate, provider } as SessionProviders,\n ];\n }\n\n return updatedProvider;\n};\n\n/**\n * Updates the given fields of a user's provider.\n * @param userId - The ID of the user.\n * @param provider - The provider to update.\n * @param providerUpdate - The updates to apply to the provider.\n * @returns The updated user.\n */\nexport const updateUserProvider = async <\n T extends SessionProviders['provider'],\n>(\n userId: string | ObjectId,\n provider: T,\n providerUpdate: Partial<UserProvider<T>>\n): Promise<UserDocument> => {\n const user = await getUserById(userId);\n\n if (!user) {\n throw new GenericError('USER_NOT_FOUND', { userId });\n }\n\n const formattedProviderToUpdate = formatUserProviderUpdate(\n provider,\n user,\n providerUpdate\n );\n\n const updatedUser: UserDocument = await updateUserById(userId, {\n provider: formattedProviderToUpdate,\n });\n\n logger.info(\n `User provider updated - User: Name: ${updatedUser.name}, id: ${String(updatedUser._id)} - Provider: ${provider}`\n );\n\n return updatedUser;\n};\n\n/**\n * Updates the given fields of a user's provider.\n * @param userId - The ID of the user.\n * @param provider - The updates to apply to the provider.\n * @returns The updated user.\n */\nexport const addUserProvider = async (\n userId: string | ObjectId,\n provider: SessionProviders\n): Promise<UserDocument> => {\n const user = await getUserById(userId);\n\n if (!user) {\n throw new GenericError('USER_NOT_FOUND', { userId });\n }\n\n const existingProvider = await getUserProvider(userId, provider.provider);\n\n if (existingProvider) {\n throw new GenericError('USER_PROVIDER_ALREADY_EXISTS', {\n userId,\n provider,\n });\n }\n\n const updatedProvider = [...(user.provider ?? []), provider];\n\n const updatedUser = await updateUserById(userId, {\n provider: updatedProvider,\n });\n\n logger.info(\n `User provider added - User: Name: ${updatedUser.name}, id: ${String(updatedUser._id)} - Provider: ${provider.provider}`\n );\n\n return updatedUser;\n};\n\n/**\n * Removes a user's provider.\n * @param userId - The ID of the user.\n * @param provider - The provider to remove.\n * @returns The updated user.\n */\nexport const removeUserProvider = async (\n userId: string | ObjectId,\n provider: SessionProviders['provider'],\n providerAccountId?: string\n) => {\n const user = await getUserById(userId);\n\n if (!user) {\n throw new GenericError('USER_NOT_FOUND', { userId });\n }\n\n const existingProvider = await getUserProvider(\n userId,\n provider,\n providerAccountId\n );\n\n if (!existingProvider) {\n throw new GenericError('USER_PROVIDER_NOT_FOUND', {\n userId,\n provider,\n });\n }\n\n const updatedProvider = user.provider?.filter(\n (p) =>\n p.provider !== provider &&\n (!providerAccountId ||\n (providerAccountId &&\n (p as GithubSessionProvider).providerAccountId !== providerAccountId))\n );\n\n return await updateUserById(userId, {\n provider: updatedProvider,\n });\n};\n\ntype TestUserPasswordResult = { user: User | null; error?: string };\n\n/**\n * Logs in a user.\n * @param email - The user's email.\n * @param password - The user's password.\n * @returns The user object.\n */\nexport const testUserPassword = async (\n email: string,\n password: string\n): Promise<TestUserPasswordResult> => {\n const user = await getUserByEmail(email);\n\n if (!user) {\n const errorMessages = {\n en: `User not found - ${email}`,\n fr: `Utilisateur non trouvé - ${email}`,\n es: `Usuario no encontrado - ${email}`,\n };\n\n return { user: null, error: t(errorMessages) };\n }\n\n const userEmailPasswordProvider = user.provider?.find(\n (provider) => provider.provider === 'email'\n );\n\n if (!userEmailPasswordProvider?.passwordHash) {\n const errorMessages = {\n en: `User request to login but no password defined: ${user.email}`,\n fr: `Demande de connexion d'utilisateur mais pas de mot de passe défini : ${user.email}`,\n es: `Solicitud de inicio de sesión de usuario pero no se define la contraseña : ${user.email}`,\n };\n\n return { user: null, error: t(errorMessages) };\n }\n\n const isMatch = await compare(\n password,\n userEmailPasswordProvider.passwordHash\n );\n\n if (!isMatch) {\n const errorMessages = {\n en: `Incorrect email or password: ${email}`,\n fr: `Email ou mot de passe incorrect : ${email}`,\n es: `Correo electrónico o contraseña incorrecta : ${email}`,\n };\n\n logger.error(errorMessages.en);\n\n // Await a random time to prevent brute force attacks\n const randomNumber = Math.floor(Math.random() * 1000) + 1000;\n await new Promise((resolve) => setTimeout(resolve, randomNumber));\n\n return { user: null, error: t(errorMessages) };\n }\n\n return { user };\n};\n\n/**\n * Hashes a user's password.\n * @param userWithPasswordNotHashed - The user object with password not hashed.\n * @returns The user object with hashed password.\n */\nexport const hashUserPassword = async (\n userWithPasswordNotHashed: UserWithPasswordNotHashed\n): Promise<Partial<UserDocument>> => {\n const { password, ...user } = userWithPasswordNotHashed;\n\n if (!password) {\n throw new GenericError('USER_PASSWORD_NOT_DEFINED', { user });\n }\n\n const userProvider = formatUserProviderUpdate('email', user, {\n passwordHash: await hash(password, await genSalt()),\n secret: generateSecret(35),\n });\n\n return { ...user, provider: userProvider };\n};\n\n/**\n * Changes a user's password.\n * @param userId - The ID of the user.\n * @param newPassword - The user's new password.\n * @returns The updated user or null if the password change failed.\n */\nexport const changeUserPassword = async (\n userId: string | ObjectId,\n newPassword: string\n) => {\n const user = await getUserById(userId);\n\n if (!user) {\n throw new GenericError('USER_NOT_FOUND', { userId });\n }\n\n const updatedUser: User = await updateUserProvider(userId, 'email', {\n passwordHash: await hash(newPassword, await genSalt()),\n });\n\n return updatedUser;\n};\n\n/**\n * Resets a user's password.\n * @param userId - The ID of the user.\n * @param secret - The secret token associated with the user.\n * @param newPassword - The new password to set.\n * @returns The updated user or null if the reset failed.\n */\nexport const resetPassword = async (userId: string, password: string) => {\n const user = await getUserById(userId);\n\n if (!user) {\n throw new GenericError('USER_NOT_FOUND', { userId });\n }\n\n const updatedUser: UserDocument = await updateUserProvider(userId, 'email', {\n passwordHash: await hash(password, await genSalt()),\n });\n\n return updatedUser;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAmB;AACnB,oBAAuB;AAEvB,qBAKO;AACP,yCAA4C;AAC5C,oBAA6B;AAC7B,kBAA6B;AAC7B,oBAAuC;AAGvC,8BAAkB;AAClB,0BAAgB;AAEhB,kBAA6B;AAC7B,IAAAA,eAA4D;AAqBrD,MAAM,aAAa,OAAO,SAAsC;AACrE,QAAM,uBAAmB,YAAAC,IAAO;AAEhC,QAAM,UAAmB;AAAA,IACvB,cAAc;AAAA,IACd,SAAS,IAAI,KAAK,KAAK,IAAI,IAAI,sBAAO;AAAA,EACxC;AAEA,QAAM,cAA4B,UAAM,6BAAe,KAAK,KAAK,EAAE,QAAQ,CAAC;AAE5E,SAAO;AACT;AAEO,MAAM,gBAAgB,OAAO,SAAsC;AACxE,QAAM,UAAU;AAEhB,QAAM,cAA4B,UAAM,6BAAe,KAAK,KAAK,EAAE,QAAQ,CAAC;AAE5E,SAAO;AACT;AAOO,MAAM,cAAc,OAAO,KAAe,SAAe;AAC9D,QAAM,oBAAgB,0BAAa,IAAI;AAEvC,QAAM,YAAY,oBAAAC,QAAI,KAAK,eAAe,QAAQ,IAAI,kBAAmB;AAAA,IACvE,WAAW;AAAA,EACb,CAAC;AAED,MAAI,CAAC,WAAW;AACd,UAAM,IAAI,2BAAa,kCAAkC,EAAE,KAAK,CAAC;AAAA,EACnE;AAEA,QAAM,oBAAgB,iCAAiB;AAEvC,MAAI,OAAO,uBAAQ,UAAU,WAAW,aAAa;AAErD,QAAM,kBAAgC,MAAM,WAAW,IAAI;AAE3D,QAAM,mBAAmB,gBAAgB,SAAS;AAElD,MAAI,OAAO,uBAAQ,UAAU,kBAAkB,aAAa;AAE5D,MAAI,OAAO,OAAO;AAClB,uBAAO;AAAA,IACL,gCAAgC,KAAK,IAAI,SAAS,OAAO,KAAK,GAAG,CAAC;AAAA,EACpE;AACF;AAMO,MAAM,gBAAgB,OAAO,QAAiC;AACnE,QAAM,EAAE,KAAK,IAAI,IAAI;AACrB,QAAM,qBAAiB,sCAAsB;AAE7C,MAAI,MAAM;AACR,UAAM,cAAc,IAAI;AAAA,EAC1B;AAEA,MAAI,OAAO,uBAAQ,UAAU,IAAI,cAAc;AAC/C,MAAI,OAAO,uBAAQ,UAAU,IAAI,cAAc;AAE/C,MAAI,OAAO,OAAO;AAClB,MAAI,OAAO,WAAW;AACxB;AAQO,MAAM,sBAAsB,CACjC,KACA,iBACG;AACH,QAAM,mBAAmB;AAAA,IACvB,KAAK,aAAa;AAAA,IAClB,MAAM,aAAa;AAAA,EACrB;AAEA,QAAM,oBAAoB,oBAAAA,QAAI;AAAA,IAC5B;AAAA,IACA,QAAQ,IAAI;AAAA,IACZ;AAAA,MACE,WAAW;AAAA,IACb;AAAA,EACF;AAEA,MAAI,CAAC,mBAAmB;AACtB,UAAM,IAAI,2BAAa,0CAA0C;AAAA,MAC/D;AAAA,IACF,CAAC;AAAA,EACH;AAEA,MAAI,OAAO,uBAAQ,kBAAkB,uBAAmB,iCAAiB,CAAC;AAE1E,MAAI,OAAO,eAAe;AAC5B;AAMO,MAAM,wBAAwB,CAAC,QAAiC;AACrE,MAAI,OAAO,eAAe;AAE1B,MAAI,OAAO,uBAAQ,kBAAkB,QAAI,sCAAsB,CAAC;AAClE;AAOO,MAAM,iBAAiB,CAC5B,KACA,YACG;AACH,QAAM,EAAE,aAAa,IAAI,IAAI;AAC7B,QAAM,cAAc;AAAA,IAClB,KAAK,QAAQ;AAAA,IACb,MAAM,QAAQ;AAAA,EAChB;AAEA,QAAM,eAAe,oBAAAA,QAAI,KAAK,aAAa,QAAQ,IAAI,kBAAmB;AAAA,IACxE,WAAW;AAAA,EACb,CAAC;AAED,MAAI,CAAC,cAAc;AACjB,UAAM,IAAI,2BAAa,qCAAqC;AAAA,MAC1D;AAAA,IACF,CAAC;AAAA,EACH;AAEA,MAAI,OAAO,uBAAQ,aAAa,kBAAc,iCAAiB,CAAC;AAEhE,MAAI,CAAC,cAAc;AACjB,UAAM,IAAI,2BAAa,0BAA0B;AAAA,MAC/C;AAAA,IACF,CAAC;AAAA,EACH;AAEA;AAAA;AAAA,IAEE,OAAO,aAAa,GAAG,MAAM,OAAO,QAAQ,cAAc;AAAA,IAC1D;AACA,UAAM,IAAI,2BAAa,2CAA2C;AAAA,MAChE;AAAA,IACF,CAAC;AAAA,EACH;AAEA,MAAI,OAAO,UAAU;AACvB;AAMO,MAAM,mBAAmB,CAAC,QAAkB;AACjD,MAAI,OAAO,UAAU;AAErB,MAAI,OAAO,uBAAQ,aAAa,QAAI,sCAAsB,CAAC;AAC7D;AAOO,MAAM,iBAAiB,CAAC,WAA2B;AACxD,QAAM,aACJ;AACF,SAAO,MAAM;AAAA,IAAK,EAAE,OAAO;AAAA,IAAG,MAC5B,WAAW,OAAO,KAAK,MAAM,KAAK,OAAO,IAAI,WAAW,MAAM,CAAC;AAAA,EACjE,EAAE,KAAK,EAAE;AACX;AAQO,MAAM,uBAAuB,OAClC,UACyB;AACzB,QAAM,OAAO,UAAM,6BAAe,KAAK;AAEvC,MAAI,CAAC,MAAM;AACT,UAAM,IAAI,2BAAa,kBAAkB,EAAE,MAAM,CAAC;AAAA,EACpD;AAEA,SAAO,mBAAmB,KAAK,KAA0B,SAAS;AAAA,IAChE,QAAQ,eAAe,EAAE;AAAA,EAC3B,CAAC;AACH;AASO,MAAM,oBAAoB,OAC/B,QACA,QACA,gBACkB;AAClB,QAAM,2BAA2B,MAAM,gBAAgB,QAAQ,OAAO;AAEtE,MAAI,CAAC,0BAA0B;AAC7B,UAAM,IAAI,2BAAa,2BAA2B,EAAE,OAAO,CAAC;AAAA,EAC9D;AAEA,MAAI,CAAC,yBAAyB,QAAQ;AACpC,UAAM,IAAI,2BAAa,oCAAoC,EAAE,OAAO,CAAC;AAAA,EACvE;AAEA,MACE,CAAC,cAAAC,QAAO;AAAA,IACN,OAAO,KAAK,yBAAyB,MAAM;AAAA,IAC3C,OAAO,KAAK,MAAM;AAAA,EACpB,GACA;AACA,UAAM,IAAI,2BAAa,kCAAkC,EAAE,OAAO,CAAC;AAAA,EACrE;AAEA,QAAM,cAAoB,MAAM,mBAAmB,QAAQ,SAAS;AAAA,IAClE,cAAc,UAAM,oBAAK,aAAa,UAAM,uBAAQ,CAAC;AAAA,IACrD;AAAA,EACF,CAAC;AAED,SAAO;AACT;AAgBO,MAAM,kBAAkB,OAC7B,QACA,UACA,sBACoC;AACpC,QAAM,OAAO,UAAM,0BAAY,MAAM;AAErC,MAAI,CAAC,MAAM;AACT,UAAM,IAAI,2BAAa,kBAAkB,EAAE,OAAO,CAAC;AAAA,EACrD;AAEA,QAAM,eAAe,KAAK,UAAU;AAAA,IAClC,CAAC,eACE,WAAW,aAAa,YAAY,CAAC,qBACrC,qBACE,WAAqC,sBACpC;AAAA,EACR;AAEA,SAAQ,gBAAoC;AAC9C;AASO,MAAM,2BAA2B,CAGtC,UACA,MACA,mBACqB;AACrB,QAAM,mBAAmC,gEAEvC,KAAK,YAAY,CAAC,CAAC;AAErB,QAAM,uBAAuB,cAAc;AAAA,IACzC,CAAC,eAAe,WAAW,aAAa;AAAA,EAC1C;AAEA,MAAI;AAEJ,MAAI,sBAAsB;AACxB,UAAM,iBACJ,KAAK,UAAU,OAAO,CAAC,MAAM,EAAE,aAAa,QAAQ,KAAK,CAAC;AAE5D,sBAAkB;AAAA,MAChB,GAAG;AAAA,MACH,EAAE,GAAG,sBAAsB,GAAG,gBAAgB,SAAS;AAAA,IACzD;AAAA,EACF,OAAO;AACL,sBAAkB;AAAA,MAChB,GAAI,KAAK,YAAY,CAAC;AAAA,MACtB,EAAE,GAAG,gBAAgB,SAAS;AAAA,IAChC;AAAA,EACF;AAEA,SAAO;AACT;AASO,MAAM,qBAAqB,OAGhC,QACA,UACA,mBAC0B;AAC1B,QAAM,OAAO,UAAM,0BAAY,MAAM;AAErC,MAAI,CAAC,MAAM;AACT,UAAM,IAAI,2BAAa,kBAAkB,EAAE,OAAO,CAAC;AAAA,EACrD;AAEA,QAAM,4BAA4B;AAAA,IAChC;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,QAAM,cAA4B,UAAM,6BAAe,QAAQ;AAAA,IAC7D,UAAU;AAAA,EACZ,CAAC;AAED,uBAAO;AAAA,IACL,uCAAuC,YAAY,IAAI,SAAS,OAAO,YAAY,GAAG,CAAC,gBAAgB,QAAQ;AAAA,EACjH;AAEA,SAAO;AACT;AAQO,MAAM,kBAAkB,OAC7B,QACA,aAC0B;AAC1B,QAAM,OAAO,UAAM,0BAAY,MAAM;AAErC,MAAI,CAAC,MAAM;AACT,UAAM,IAAI,2BAAa,kBAAkB,EAAE,OAAO,CAAC;AAAA,EACrD;AAEA,QAAM,mBAAmB,MAAM,gBAAgB,QAAQ,SAAS,QAAQ;AAExE,MAAI,kBAAkB;AACpB,UAAM,IAAI,2BAAa,gCAAgC;AAAA,MACrD;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,kBAAkB,CAAC,GAAI,KAAK,YAAY,CAAC,GAAI,QAAQ;AAE3D,QAAM,cAAc,UAAM,6BAAe,QAAQ;AAAA,IAC/C,UAAU;AAAA,EACZ,CAAC;AAED,uBAAO;AAAA,IACL,qCAAqC,YAAY,IAAI,SAAS,OAAO,YAAY,GAAG,CAAC,gBAAgB,SAAS,QAAQ;AAAA,EACxH;AAEA,SAAO;AACT;AAQO,MAAM,qBAAqB,OAChC,QACA,UACA,sBACG;AACH,QAAM,OAAO,UAAM,0BAAY,MAAM;AAErC,MAAI,CAAC,MAAM;AACT,UAAM,IAAI,2BAAa,kBAAkB,EAAE,OAAO,CAAC;AAAA,EACrD;AAEA,QAAM,mBAAmB,MAAM;AAAA,IAC7B;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,MAAI,CAAC,kBAAkB;AACrB,UAAM,IAAI,2BAAa,2BAA2B;AAAA,MAChD;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,kBAAkB,KAAK,UAAU;AAAA,IACrC,CAAC,MACC,EAAE,aAAa,aACd,CAAC,qBACC,qBACE,EAA4B,sBAAsB;AAAA,EAC3D;AAEA,SAAO,UAAM,6BAAe,QAAQ;AAAA,IAClC,UAAU;AAAA,EACZ,CAAC;AACH;AAUO,MAAM,mBAAmB,OAC9B,OACA,aACoC;AACpC,QAAM,OAAO,UAAM,6BAAe,KAAK;AAEvC,MAAI,CAAC,MAAM;AACT,UAAM,gBAAgB;AAAA,MACpB,IAAI,oBAAoB,KAAK;AAAA,MAC7B,IAAI,+BAA4B,KAAK;AAAA,MACrC,IAAI,2BAA2B,KAAK;AAAA,IACtC;AAEA,WAAO,EAAE,MAAM,MAAM,WAAO,2BAAE,aAAa,EAAE;AAAA,EAC/C;AAEA,QAAM,4BAA4B,KAAK,UAAU;AAAA,IAC/C,CAAC,aAAa,SAAS,aAAa;AAAA,EACtC;AAEA,MAAI,CAAC,2BAA2B,cAAc;AAC5C,UAAM,gBAAgB;AAAA,MACpB,IAAI,kDAAkD,KAAK,KAAK;AAAA,MAChE,IAAI,2EAAwE,KAAK,KAAK;AAAA,MACtF,IAAI,oFAA8E,KAAK,KAAK;AAAA,IAC9F;AAEA,WAAO,EAAE,MAAM,MAAM,WAAO,2BAAE,aAAa,EAAE;AAAA,EAC/C;AAEA,QAAM,UAAU,UAAM;AAAA,IACpB;AAAA,IACA,0BAA0B;AAAA,EAC5B;AAEA,MAAI,CAAC,SAAS;AACZ,UAAM,gBAAgB;AAAA,MACpB,IAAI,gCAAgC,KAAK;AAAA,MACzC,IAAI,qCAAqC,KAAK;AAAA,MAC9C,IAAI,sDAAgD,KAAK;AAAA,IAC3D;AAEA,yBAAO,MAAM,cAAc,EAAE;AAG7B,UAAM,eAAe,KAAK,MAAM,KAAK,OAAO,IAAI,GAAI,IAAI;AACxD,UAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,YAAY,CAAC;AAEhE,WAAO,EAAE,MAAM,MAAM,WAAO,2BAAE,aAAa,EAAE;AAAA,EAC/C;AAEA,SAAO,EAAE,KAAK;AAChB;AAOO,MAAM,mBAAmB,OAC9B,8BACmC;AACnC,QAAM,EAAE,UAAU,GAAG,KAAK,IAAI;AAE9B,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,2BAAa,6BAA6B,EAAE,KAAK,CAAC;AAAA,EAC9D;AAEA,QAAM,eAAe,yBAAyB,SAAS,MAAM;AAAA,IAC3D,cAAc,UAAM,oBAAK,UAAU,UAAM,uBAAQ,CAAC;AAAA,IAClD,QAAQ,eAAe,EAAE;AAAA,EAC3B,CAAC;AAED,SAAO,EAAE,GAAG,MAAM,UAAU,aAAa;AAC3C;AAQO,MAAM,qBAAqB,OAChC,QACA,gBACG;AACH,QAAM,OAAO,UAAM,0BAAY,MAAM;AAErC,MAAI,CAAC,MAAM;AACT,UAAM,IAAI,2BAAa,kBAAkB,EAAE,OAAO,CAAC;AAAA,EACrD;AAEA,QAAM,cAAoB,MAAM,mBAAmB,QAAQ,SAAS;AAAA,IAClE,cAAc,UAAM,oBAAK,aAAa,UAAM,uBAAQ,CAAC;AAAA,EACvD,CAAC;AAED,SAAO;AACT;AASO,MAAM,gBAAgB,OAAO,QAAgB,aAAqB;AACvE,QAAM,OAAO,UAAM,0BAAY,MAAM;AAErC,MAAI,CAAC,MAAM;AACT,UAAM,IAAI,2BAAa,kBAAkB,EAAE,OAAO,CAAC;AAAA,EACrD;AAEA,QAAM,cAA4B,MAAM,mBAAmB,QAAQ,SAAS;AAAA,IAC1E,cAAc,UAAM,oBAAK,UAAU,UAAM,uBAAQ,CAAC;AAAA,EACpD,CAAC;AAED,SAAO;AACT;","names":["import_user","uuidv4","jwt","crypto"]}
|
|
1
|
+
{"version":3,"sources":["../../../src/services/sessionAuth.service.ts"],"sourcesContent":["import { logger } from '@logger';\nimport type { ResponseWithInformation } from '@middlewares/sessionAuth.middleware';\nimport {\n Cookies,\n getClearCookieOptions,\n getCookieOptions,\n MAX_AGE,\n} from '@utils/cookies';\nimport { ensureMongoDocumentToObject } from '@utils/ensureMongoDocumentToObject';\nimport { GenericError } from '@utils/errors';\nimport { mapUserToAPI } from '@utils/mapper/user';\nimport { compare, genSalt, hash } from 'bcryptjs';\nimport crypto from 'crypto';\nimport type { Response } from 'express';\n// @ts-ignore express-intlayer not build yet\nimport type { Organization } from '@/types/organization.types';\nimport type { Project } from '@/types/project.types';\nimport type {\n EmailPasswordSessionProvider,\n GithubSessionProvider,\n GoogleSessionProvider,\n Session,\n SessionProviders,\n} from '@/types/session.types';\nimport type {\n User,\n UserDocument,\n UserWithPasswordNotHashed,\n} from '@/types/user.types';\nimport { t } from 'express-intlayer';\nimport jwt from 'jsonwebtoken';\nimport type { ObjectId } from 'mongoose';\nimport { v4 as uuidv4 } from 'uuid';\nimport { getUserByEmail, getUserById, updateUserById } from './user.service';\n\n/**\n * Adds a session to a user or updates the existing one.\n * @param user - User object.\n * @returns Updated user object.\n */\nexport const addSession = async (user: User): Promise<UserDocument> => {\n const userSessionToken = uuidv4();\n\n const session: Session = {\n sessionToken: userSessionToken,\n expires: new Date(Date.now() + MAX_AGE),\n };\n\n const updatedUser: UserDocument = await updateUserById(user._id, { session });\n\n return updatedUser;\n};\n\nexport const removeSession = async (user: User): Promise<UserDocument> => {\n const session = undefined;\n\n const updatedUser: UserDocument = await updateUserById(user._id, { session });\n\n return updatedUser;\n};\n\n/**\n * Set user auth locals object\n * @param res - Express response object.\n * @param user - User object.\n */\nexport const setUserAuth = async (res: Response, user: User) => {\n const formattedUser = mapUserToAPI(user);\n\n const userToken = jwt.sign(formattedUser, process.env.JWT_TOKEN_SECRET!, {\n expiresIn: MAX_AGE,\n });\n\n if (!userToken) {\n throw new GenericError('JWT_TOKEN_CREATION_FAILED_USER', { user });\n }\n\n const cookieOptions = getCookieOptions();\n\n res.cookie(Cookies.JWT_USER, userToken, cookieOptions);\n\n const userWithSession: UserDocument = await addSession(user);\n\n const userSessionToken = userWithSession.session?.sessionToken;\n\n res.cookie(Cookies.JWT_AUTH, userSessionToken, cookieOptions);\n\n res.locals.user = user;\n logger.info(\n `User logged in - User: Name: ${user.name}, id: ${String(user._id)}`\n );\n};\n\n/**\n * Clears the JWT auth cookies and user locals object.\n * @param res - Express response object.\n */\nexport const clearUserAuth = async (res: ResponseWithInformation) => {\n const { user } = res.locals;\n const cookiesOptions = getClearCookieOptions();\n\n if (user) {\n await removeSession(user);\n }\n\n res.cookie(Cookies.JWT_AUTH, '', cookiesOptions);\n res.cookie(Cookies.JWT_USER, '', cookiesOptions);\n\n res.locals.user = null;\n res.locals.authType = null;\n};\n\n/**\n *\n * @param res\n * @param organization\n * @returns\n */\nexport const setOrganizationAuth = (\n res: ResponseWithInformation,\n organization: Organization\n) => {\n const organizationData = {\n _id: organization._id,\n name: organization.name,\n };\n\n const organizationToken = jwt.sign(\n organizationData,\n process.env.JWT_TOKEN_SECRET!,\n {\n expiresIn: MAX_AGE,\n }\n );\n\n if (!organizationToken) {\n throw new GenericError('JWT_TOKEN_CREATION_FAILED_ORGANIZATION', {\n organization,\n });\n }\n\n res.cookie(Cookies.JWT_ORGANIZATION, organizationToken, getCookieOptions());\n\n res.locals.organization = organization;\n};\n\n/**\n * Clears the JWT organization cookies and organization locals object.\n * @param res - Express response object.\n */\nexport const clearOrganizationAuth = (res: ResponseWithInformation) => {\n res.locals.organization = null;\n\n res.cookie(Cookies.JWT_ORGANIZATION, '', getClearCookieOptions());\n};\n\n/**\n * Set project auth locals object\n * @param res - Express response object.\n * @param project - Project object.\n */\nexport const setProjectAuth = (\n res: ResponseWithInformation,\n project: Project\n) => {\n const { organization } = res.locals;\n const projectData = {\n _id: project._id,\n name: project.name,\n };\n\n const projectToken = jwt.sign(projectData, process.env.JWT_TOKEN_SECRET!, {\n expiresIn: MAX_AGE,\n });\n\n if (!projectToken) {\n throw new GenericError('JWT_TOKEN_CREATION_FAILED_PROJECT', {\n project,\n });\n }\n\n res.cookie(Cookies.JWT_PROJECT, projectToken, getCookieOptions());\n\n if (!organization) {\n throw new GenericError('ORGANIZATION_NOT_FOUND', {\n project,\n });\n }\n\n if (\n // if the project is not in the organization's projects\n String(organization._id) !== String(project.organizationId)\n ) {\n throw new GenericError('JWT_TOKEN_ORGANIZATION_MISMATCH_PROJECT', {\n project,\n });\n }\n\n res.locals.project = project;\n};\n\n/**\n * Clears the JWT project cookies and project locals object.\n * @param res - Express response object.\n */\nexport const clearProjectAuth = (res: Response) => {\n res.locals.project = null;\n\n res.cookie(Cookies.JWT_PROJECT, '', getClearCookieOptions());\n};\n\n/**\n * Generates a random secret string of a specified length.\n * @param length - The length of the secret.\n * @returns The generated secret string.\n */\nexport const generateSecret = (length: number): string => {\n const characters =\n 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';\n return Array.from({ length }, () =>\n characters.charAt(Math.floor(Math.random() * characters.length))\n ).join('');\n};\n\n/**\n * Handles a password reset request for a user.\n * @param email - The user's email.\n * @param organization - The organization associated with the user.\n * @returns The user object or null if no user was found.\n */\nexport const requestPasswordReset = async (\n email: string\n): Promise<User | null> => {\n const user = await getUserByEmail(email);\n\n if (!user) {\n throw new GenericError('USER_NOT_FOUND', { email });\n }\n\n return updateUserProvider(user._id as unknown as string, 'email', {\n secret: generateSecret(35),\n });\n};\n\n/**\n * Resets a user's password.\n * @param userId - The ID of the user.\n * @param secret - The secret token associated with the user.\n * @param newPassword - The new password to set.\n * @returns The updated user or null if the reset failed.\n */\nexport const resetUserPassword = async (\n userId: string | ObjectId,\n secret: string,\n newPassword: string\n): Promise<User> => {\n const emailAndPasswordProvider = await getUserProvider(userId, 'email');\n\n if (!emailAndPasswordProvider) {\n throw new GenericError('USER_PROVIDER_NOT_FOUND', { userId });\n }\n\n if (!emailAndPasswordProvider.secret) {\n throw new GenericError('USER_PROVIDER_SECRET_NOT_DEFINED', { userId });\n }\n\n if (\n !crypto.timingSafeEqual(\n Buffer.from(emailAndPasswordProvider.secret),\n Buffer.from(secret)\n )\n ) {\n throw new GenericError('USER_PROVIDER_SECRET_NOT_VALID', { userId });\n }\n\n const updatedUser: User = await updateUserProvider(userId, 'email', {\n passwordHash: await hash(newPassword, await genSalt()),\n secret,\n });\n\n return updatedUser;\n};\n\ntype UserProvider<T extends SessionProviders['provider']> = T extends 'email'\n ? EmailPasswordSessionProvider\n : T extends 'google'\n ? GoogleSessionProvider\n : T extends 'github'\n ? GithubSessionProvider\n : SessionProviders;\n\n/**\n * Gets a user's provider.\n * @param userId - The ID of the user.\n * @param provider - The provider to get.\n * @returns The user's provider.\n */\nexport const getUserProvider = async <T extends SessionProviders['provider']>(\n userId: string | ObjectId,\n provider: T,\n providerAccountId?: string\n): Promise<UserProvider<T> | null> => {\n const user = await getUserById(userId);\n\n if (!user) {\n throw new GenericError('USER_NOT_FOUND', { userId });\n }\n\n const userProvider = user.provider?.find(\n (providerEl) =>\n (providerEl.provider === provider && !providerAccountId) ||\n (providerAccountId &&\n (providerEl as GithubSessionProvider).providerAccountId ===\n providerAccountId)\n );\n\n return (userProvider as UserProvider<T>) ?? null;\n};\n\n/**\n * Formats the given fields of a user's provider.\n * @param provider - The provider to update.\n * @param user - The user object.\n * @param providerUpdate - The updates to apply to the provider.\n * @returns The updated user provider.\n */\nexport const formatUserProviderUpdate = <\n T extends SessionProviders['provider'],\n>(\n provider: T,\n user: Partial<User>,\n providerUpdate: Partial<UserProvider<T>>\n): User['provider'] => {\n const userProvider: SessionProviders[] = ensureMongoDocumentToObject<\n SessionProviders[]\n >(user.provider ?? []);\n\n const userProviderToUpdate = userProvider?.find(\n (providerEl) => providerEl.provider === provider\n );\n\n let updatedProvider: User['provider'];\n\n if (userProviderToUpdate) {\n const otherProviders =\n user.provider?.filter((p) => p.provider !== provider) ?? [];\n\n updatedProvider = [\n ...otherProviders,\n { ...userProviderToUpdate, ...providerUpdate, provider },\n ];\n } else {\n updatedProvider = [\n ...(user.provider ?? []),\n { ...providerUpdate, provider } as SessionProviders,\n ];\n }\n\n return updatedProvider;\n};\n\n/**\n * Updates the given fields of a user's provider.\n * @param userId - The ID of the user.\n * @param provider - The provider to update.\n * @param providerUpdate - The updates to apply to the provider.\n * @returns The updated user.\n */\nexport const updateUserProvider = async <\n T extends SessionProviders['provider'],\n>(\n userId: string | ObjectId,\n provider: T,\n providerUpdate: Partial<UserProvider<T>>\n): Promise<UserDocument> => {\n const user = await getUserById(userId);\n\n if (!user) {\n throw new GenericError('USER_NOT_FOUND', { userId });\n }\n\n const formattedProviderToUpdate = formatUserProviderUpdate(\n provider,\n user,\n providerUpdate\n );\n\n const updatedUser: UserDocument = await updateUserById(userId, {\n provider: formattedProviderToUpdate,\n });\n\n logger.info(\n `User provider updated - User: Name: ${updatedUser.name}, id: ${String(updatedUser._id)} - Provider: ${provider}`\n );\n\n return updatedUser;\n};\n\n/**\n * Updates the given fields of a user's provider.\n * @param userId - The ID of the user.\n * @param provider - The updates to apply to the provider.\n * @returns The updated user.\n */\nexport const addUserProvider = async (\n userId: string | ObjectId,\n provider: SessionProviders\n): Promise<UserDocument> => {\n const user = await getUserById(userId);\n\n if (!user) {\n throw new GenericError('USER_NOT_FOUND', { userId });\n }\n\n const existingProvider = await getUserProvider(userId, provider.provider);\n\n if (existingProvider) {\n throw new GenericError('USER_PROVIDER_ALREADY_EXISTS', {\n userId,\n provider,\n });\n }\n\n const updatedProvider = [...(user.provider ?? []), provider];\n\n const updatedUser = await updateUserById(userId, {\n provider: updatedProvider,\n });\n\n logger.info(\n `User provider added - User: Name: ${updatedUser.name}, id: ${String(updatedUser._id)} - Provider: ${provider.provider}`\n );\n\n return updatedUser;\n};\n\n/**\n * Removes a user's provider.\n * @param userId - The ID of the user.\n * @param provider - The provider to remove.\n * @returns The updated user.\n */\nexport const removeUserProvider = async (\n userId: string | ObjectId,\n provider: SessionProviders['provider'],\n providerAccountId?: string\n) => {\n const user = await getUserById(userId);\n\n if (!user) {\n throw new GenericError('USER_NOT_FOUND', { userId });\n }\n\n const existingProvider = await getUserProvider(\n userId,\n provider,\n providerAccountId\n );\n\n if (!existingProvider) {\n throw new GenericError('USER_PROVIDER_NOT_FOUND', {\n userId,\n provider,\n });\n }\n\n const updatedProvider = user.provider?.filter(\n (p) =>\n p.provider !== provider &&\n (!providerAccountId ||\n (providerAccountId &&\n (p as GithubSessionProvider).providerAccountId !== providerAccountId))\n );\n\n return await updateUserById(userId, {\n provider: updatedProvider,\n });\n};\n\ntype TestUserPasswordResult = { user: User | null; error?: string };\n\n/**\n * Logs in a user.\n * @param email - The user's email.\n * @param password - The user's password.\n * @returns The user object.\n */\nexport const testUserPassword = async (\n email: string,\n password: string\n): Promise<TestUserPasswordResult> => {\n const user = await getUserByEmail(email);\n\n if (!user) {\n const errorMessages = {\n en: `User not found - ${email}`,\n fr: `Utilisateur non trouvé - ${email}`,\n es: `Usuario no encontrado - ${email}`,\n };\n\n return { user: null, error: t(errorMessages) };\n }\n\n const userEmailPasswordProvider = user.provider?.find(\n (provider) => provider.provider === 'email'\n );\n\n if (!userEmailPasswordProvider?.passwordHash) {\n const errorMessages = {\n en: `User request to login but no password defined: ${user.email}`,\n fr: `Demande de connexion d'utilisateur mais pas de mot de passe défini : ${user.email}`,\n es: `Solicitud de inicio de sesión de usuario pero no se define la contraseña : ${user.email}`,\n };\n\n return { user: null, error: t(errorMessages) };\n }\n\n const isMatch = await compare(\n password,\n userEmailPasswordProvider.passwordHash\n );\n\n if (!isMatch) {\n const errorMessages = {\n en: `Incorrect email or password: ${email}`,\n fr: `Email ou mot de passe incorrect : ${email}`,\n es: `Correo electrónico o contraseña incorrecta : ${email}`,\n };\n\n logger.error(errorMessages.en);\n\n // Await a random time to prevent brute force attacks\n const randomNumber = Math.floor(Math.random() * 1000) + 1000;\n await new Promise((resolve) => setTimeout(resolve, randomNumber));\n\n return { user: null, error: t(errorMessages) };\n }\n\n return { user };\n};\n\n/**\n * Hashes a user's password.\n * @param userWithPasswordNotHashed - The user object with password not hashed.\n * @returns The user object with hashed password.\n */\nexport const hashUserPassword = async (\n userWithPasswordNotHashed: UserWithPasswordNotHashed\n): Promise<Partial<UserDocument>> => {\n const { password, ...user } = userWithPasswordNotHashed;\n\n if (!password) {\n throw new GenericError('USER_PASSWORD_NOT_DEFINED', { user });\n }\n\n const userProvider = formatUserProviderUpdate('email', user, {\n passwordHash: await hash(password, await genSalt()),\n secret: generateSecret(35),\n });\n\n return { ...user, provider: userProvider };\n};\n\n/**\n * Changes a user's password.\n * @param userId - The ID of the user.\n * @param newPassword - The user's new password.\n * @returns The updated user or null if the password change failed.\n */\nexport const changeUserPassword = async (\n userId: string | ObjectId,\n newPassword: string\n) => {\n const user = await getUserById(userId);\n\n if (!user) {\n throw new GenericError('USER_NOT_FOUND', { userId });\n }\n\n const updatedUser: User = await updateUserProvider(userId, 'email', {\n passwordHash: await hash(newPassword, await genSalt()),\n });\n\n return updatedUser;\n};\n\n/**\n * Resets a user's password.\n * @param userId - The ID of the user.\n * @param secret - The secret token associated with the user.\n * @param newPassword - The new password to set.\n * @returns The updated user or null if the reset failed.\n */\nexport const resetPassword = async (userId: string, password: string) => {\n const user = await getUserById(userId);\n\n if (!user) {\n throw new GenericError('USER_NOT_FOUND', { userId });\n }\n\n const updatedUser: UserDocument = await updateUserProvider(userId, 'email', {\n passwordHash: await hash(password, await genSalt()),\n });\n\n return updatedUser;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAuB;AAEvB,qBAKO;AACP,yCAA4C;AAC5C,oBAA6B;AAC7B,kBAA6B;AAC7B,sBAAuC;AACvC,oBAAmB;AAiBnB,8BAAkB;AAClB,0BAAgB;AAEhB,kBAA6B;AAC7B,IAAAA,eAA4D;AAOrD,MAAM,aAAa,OAAO,SAAsC;AACrE,QAAM,uBAAmB,YAAAC,IAAO;AAEhC,QAAM,UAAmB;AAAA,IACvB,cAAc;AAAA,IACd,SAAS,IAAI,KAAK,KAAK,IAAI,IAAI,sBAAO;AAAA,EACxC;AAEA,QAAM,cAA4B,UAAM,6BAAe,KAAK,KAAK,EAAE,QAAQ,CAAC;AAE5E,SAAO;AACT;AAEO,MAAM,gBAAgB,OAAO,SAAsC;AACxE,QAAM,UAAU;AAEhB,QAAM,cAA4B,UAAM,6BAAe,KAAK,KAAK,EAAE,QAAQ,CAAC;AAE5E,SAAO;AACT;AAOO,MAAM,cAAc,OAAO,KAAe,SAAe;AAC9D,QAAM,oBAAgB,0BAAa,IAAI;AAEvC,QAAM,YAAY,oBAAAC,QAAI,KAAK,eAAe,QAAQ,IAAI,kBAAmB;AAAA,IACvE,WAAW;AAAA,EACb,CAAC;AAED,MAAI,CAAC,WAAW;AACd,UAAM,IAAI,2BAAa,kCAAkC,EAAE,KAAK,CAAC;AAAA,EACnE;AAEA,QAAM,oBAAgB,iCAAiB;AAEvC,MAAI,OAAO,uBAAQ,UAAU,WAAW,aAAa;AAErD,QAAM,kBAAgC,MAAM,WAAW,IAAI;AAE3D,QAAM,mBAAmB,gBAAgB,SAAS;AAElD,MAAI,OAAO,uBAAQ,UAAU,kBAAkB,aAAa;AAE5D,MAAI,OAAO,OAAO;AAClB,uBAAO;AAAA,IACL,gCAAgC,KAAK,IAAI,SAAS,OAAO,KAAK,GAAG,CAAC;AAAA,EACpE;AACF;AAMO,MAAM,gBAAgB,OAAO,QAAiC;AACnE,QAAM,EAAE,KAAK,IAAI,IAAI;AACrB,QAAM,qBAAiB,sCAAsB;AAE7C,MAAI,MAAM;AACR,UAAM,cAAc,IAAI;AAAA,EAC1B;AAEA,MAAI,OAAO,uBAAQ,UAAU,IAAI,cAAc;AAC/C,MAAI,OAAO,uBAAQ,UAAU,IAAI,cAAc;AAE/C,MAAI,OAAO,OAAO;AAClB,MAAI,OAAO,WAAW;AACxB;AAQO,MAAM,sBAAsB,CACjC,KACA,iBACG;AACH,QAAM,mBAAmB;AAAA,IACvB,KAAK,aAAa;AAAA,IAClB,MAAM,aAAa;AAAA,EACrB;AAEA,QAAM,oBAAoB,oBAAAA,QAAI;AAAA,IAC5B;AAAA,IACA,QAAQ,IAAI;AAAA,IACZ;AAAA,MACE,WAAW;AAAA,IACb;AAAA,EACF;AAEA,MAAI,CAAC,mBAAmB;AACtB,UAAM,IAAI,2BAAa,0CAA0C;AAAA,MAC/D;AAAA,IACF,CAAC;AAAA,EACH;AAEA,MAAI,OAAO,uBAAQ,kBAAkB,uBAAmB,iCAAiB,CAAC;AAE1E,MAAI,OAAO,eAAe;AAC5B;AAMO,MAAM,wBAAwB,CAAC,QAAiC;AACrE,MAAI,OAAO,eAAe;AAE1B,MAAI,OAAO,uBAAQ,kBAAkB,QAAI,sCAAsB,CAAC;AAClE;AAOO,MAAM,iBAAiB,CAC5B,KACA,YACG;AACH,QAAM,EAAE,aAAa,IAAI,IAAI;AAC7B,QAAM,cAAc;AAAA,IAClB,KAAK,QAAQ;AAAA,IACb,MAAM,QAAQ;AAAA,EAChB;AAEA,QAAM,eAAe,oBAAAA,QAAI,KAAK,aAAa,QAAQ,IAAI,kBAAmB;AAAA,IACxE,WAAW;AAAA,EACb,CAAC;AAED,MAAI,CAAC,cAAc;AACjB,UAAM,IAAI,2BAAa,qCAAqC;AAAA,MAC1D;AAAA,IACF,CAAC;AAAA,EACH;AAEA,MAAI,OAAO,uBAAQ,aAAa,kBAAc,iCAAiB,CAAC;AAEhE,MAAI,CAAC,cAAc;AACjB,UAAM,IAAI,2BAAa,0BAA0B;AAAA,MAC/C;AAAA,IACF,CAAC;AAAA,EACH;AAEA;AAAA;AAAA,IAEE,OAAO,aAAa,GAAG,MAAM,OAAO,QAAQ,cAAc;AAAA,IAC1D;AACA,UAAM,IAAI,2BAAa,2CAA2C;AAAA,MAChE;AAAA,IACF,CAAC;AAAA,EACH;AAEA,MAAI,OAAO,UAAU;AACvB;AAMO,MAAM,mBAAmB,CAAC,QAAkB;AACjD,MAAI,OAAO,UAAU;AAErB,MAAI,OAAO,uBAAQ,aAAa,QAAI,sCAAsB,CAAC;AAC7D;AAOO,MAAM,iBAAiB,CAAC,WAA2B;AACxD,QAAM,aACJ;AACF,SAAO,MAAM;AAAA,IAAK,EAAE,OAAO;AAAA,IAAG,MAC5B,WAAW,OAAO,KAAK,MAAM,KAAK,OAAO,IAAI,WAAW,MAAM,CAAC;AAAA,EACjE,EAAE,KAAK,EAAE;AACX;AAQO,MAAM,uBAAuB,OAClC,UACyB;AACzB,QAAM,OAAO,UAAM,6BAAe,KAAK;AAEvC,MAAI,CAAC,MAAM;AACT,UAAM,IAAI,2BAAa,kBAAkB,EAAE,MAAM,CAAC;AAAA,EACpD;AAEA,SAAO,mBAAmB,KAAK,KAA0B,SAAS;AAAA,IAChE,QAAQ,eAAe,EAAE;AAAA,EAC3B,CAAC;AACH;AASO,MAAM,oBAAoB,OAC/B,QACA,QACA,gBACkB;AAClB,QAAM,2BAA2B,MAAM,gBAAgB,QAAQ,OAAO;AAEtE,MAAI,CAAC,0BAA0B;AAC7B,UAAM,IAAI,2BAAa,2BAA2B,EAAE,OAAO,CAAC;AAAA,EAC9D;AAEA,MAAI,CAAC,yBAAyB,QAAQ;AACpC,UAAM,IAAI,2BAAa,oCAAoC,EAAE,OAAO,CAAC;AAAA,EACvE;AAEA,MACE,CAAC,cAAAC,QAAO;AAAA,IACN,OAAO,KAAK,yBAAyB,MAAM;AAAA,IAC3C,OAAO,KAAK,MAAM;AAAA,EACpB,GACA;AACA,UAAM,IAAI,2BAAa,kCAAkC,EAAE,OAAO,CAAC;AAAA,EACrE;AAEA,QAAM,cAAoB,MAAM,mBAAmB,QAAQ,SAAS;AAAA,IAClE,cAAc,UAAM,sBAAK,aAAa,UAAM,yBAAQ,CAAC;AAAA,IACrD;AAAA,EACF,CAAC;AAED,SAAO;AACT;AAgBO,MAAM,kBAAkB,OAC7B,QACA,UACA,sBACoC;AACpC,QAAM,OAAO,UAAM,0BAAY,MAAM;AAErC,MAAI,CAAC,MAAM;AACT,UAAM,IAAI,2BAAa,kBAAkB,EAAE,OAAO,CAAC;AAAA,EACrD;AAEA,QAAM,eAAe,KAAK,UAAU;AAAA,IAClC,CAAC,eACE,WAAW,aAAa,YAAY,CAAC,qBACrC,qBACE,WAAqC,sBACpC;AAAA,EACR;AAEA,SAAQ,gBAAoC;AAC9C;AASO,MAAM,2BAA2B,CAGtC,UACA,MACA,mBACqB;AACrB,QAAM,mBAAmC,gEAEvC,KAAK,YAAY,CAAC,CAAC;AAErB,QAAM,uBAAuB,cAAc;AAAA,IACzC,CAAC,eAAe,WAAW,aAAa;AAAA,EAC1C;AAEA,MAAI;AAEJ,MAAI,sBAAsB;AACxB,UAAM,iBACJ,KAAK,UAAU,OAAO,CAAC,MAAM,EAAE,aAAa,QAAQ,KAAK,CAAC;AAE5D,sBAAkB;AAAA,MAChB,GAAG;AAAA,MACH,EAAE,GAAG,sBAAsB,GAAG,gBAAgB,SAAS;AAAA,IACzD;AAAA,EACF,OAAO;AACL,sBAAkB;AAAA,MAChB,GAAI,KAAK,YAAY,CAAC;AAAA,MACtB,EAAE,GAAG,gBAAgB,SAAS;AAAA,IAChC;AAAA,EACF;AAEA,SAAO;AACT;AASO,MAAM,qBAAqB,OAGhC,QACA,UACA,mBAC0B;AAC1B,QAAM,OAAO,UAAM,0BAAY,MAAM;AAErC,MAAI,CAAC,MAAM;AACT,UAAM,IAAI,2BAAa,kBAAkB,EAAE,OAAO,CAAC;AAAA,EACrD;AAEA,QAAM,4BAA4B;AAAA,IAChC;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,QAAM,cAA4B,UAAM,6BAAe,QAAQ;AAAA,IAC7D,UAAU;AAAA,EACZ,CAAC;AAED,uBAAO;AAAA,IACL,uCAAuC,YAAY,IAAI,SAAS,OAAO,YAAY,GAAG,CAAC,gBAAgB,QAAQ;AAAA,EACjH;AAEA,SAAO;AACT;AAQO,MAAM,kBAAkB,OAC7B,QACA,aAC0B;AAC1B,QAAM,OAAO,UAAM,0BAAY,MAAM;AAErC,MAAI,CAAC,MAAM;AACT,UAAM,IAAI,2BAAa,kBAAkB,EAAE,OAAO,CAAC;AAAA,EACrD;AAEA,QAAM,mBAAmB,MAAM,gBAAgB,QAAQ,SAAS,QAAQ;AAExE,MAAI,kBAAkB;AACpB,UAAM,IAAI,2BAAa,gCAAgC;AAAA,MACrD;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,kBAAkB,CAAC,GAAI,KAAK,YAAY,CAAC,GAAI,QAAQ;AAE3D,QAAM,cAAc,UAAM,6BAAe,QAAQ;AAAA,IAC/C,UAAU;AAAA,EACZ,CAAC;AAED,uBAAO;AAAA,IACL,qCAAqC,YAAY,IAAI,SAAS,OAAO,YAAY,GAAG,CAAC,gBAAgB,SAAS,QAAQ;AAAA,EACxH;AAEA,SAAO;AACT;AAQO,MAAM,qBAAqB,OAChC,QACA,UACA,sBACG;AACH,QAAM,OAAO,UAAM,0BAAY,MAAM;AAErC,MAAI,CAAC,MAAM;AACT,UAAM,IAAI,2BAAa,kBAAkB,EAAE,OAAO,CAAC;AAAA,EACrD;AAEA,QAAM,mBAAmB,MAAM;AAAA,IAC7B;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,MAAI,CAAC,kBAAkB;AACrB,UAAM,IAAI,2BAAa,2BAA2B;AAAA,MAChD;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,kBAAkB,KAAK,UAAU;AAAA,IACrC,CAAC,MACC,EAAE,aAAa,aACd,CAAC,qBACC,qBACE,EAA4B,sBAAsB;AAAA,EAC3D;AAEA,SAAO,UAAM,6BAAe,QAAQ;AAAA,IAClC,UAAU;AAAA,EACZ,CAAC;AACH;AAUO,MAAM,mBAAmB,OAC9B,OACA,aACoC;AACpC,QAAM,OAAO,UAAM,6BAAe,KAAK;AAEvC,MAAI,CAAC,MAAM;AACT,UAAM,gBAAgB;AAAA,MACpB,IAAI,oBAAoB,KAAK;AAAA,MAC7B,IAAI,+BAA4B,KAAK;AAAA,MACrC,IAAI,2BAA2B,KAAK;AAAA,IACtC;AAEA,WAAO,EAAE,MAAM,MAAM,WAAO,2BAAE,aAAa,EAAE;AAAA,EAC/C;AAEA,QAAM,4BAA4B,KAAK,UAAU;AAAA,IAC/C,CAAC,aAAa,SAAS,aAAa;AAAA,EACtC;AAEA,MAAI,CAAC,2BAA2B,cAAc;AAC5C,UAAM,gBAAgB;AAAA,MACpB,IAAI,kDAAkD,KAAK,KAAK;AAAA,MAChE,IAAI,2EAAwE,KAAK,KAAK;AAAA,MACtF,IAAI,oFAA8E,KAAK,KAAK;AAAA,IAC9F;AAEA,WAAO,EAAE,MAAM,MAAM,WAAO,2BAAE,aAAa,EAAE;AAAA,EAC/C;AAEA,QAAM,UAAU,UAAM;AAAA,IACpB;AAAA,IACA,0BAA0B;AAAA,EAC5B;AAEA,MAAI,CAAC,SAAS;AACZ,UAAM,gBAAgB;AAAA,MACpB,IAAI,gCAAgC,KAAK;AAAA,MACzC,IAAI,qCAAqC,KAAK;AAAA,MAC9C,IAAI,sDAAgD,KAAK;AAAA,IAC3D;AAEA,yBAAO,MAAM,cAAc,EAAE;AAG7B,UAAM,eAAe,KAAK,MAAM,KAAK,OAAO,IAAI,GAAI,IAAI;AACxD,UAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,YAAY,CAAC;AAEhE,WAAO,EAAE,MAAM,MAAM,WAAO,2BAAE,aAAa,EAAE;AAAA,EAC/C;AAEA,SAAO,EAAE,KAAK;AAChB;AAOO,MAAM,mBAAmB,OAC9B,8BACmC;AACnC,QAAM,EAAE,UAAU,GAAG,KAAK,IAAI;AAE9B,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,2BAAa,6BAA6B,EAAE,KAAK,CAAC;AAAA,EAC9D;AAEA,QAAM,eAAe,yBAAyB,SAAS,MAAM;AAAA,IAC3D,cAAc,UAAM,sBAAK,UAAU,UAAM,yBAAQ,CAAC;AAAA,IAClD,QAAQ,eAAe,EAAE;AAAA,EAC3B,CAAC;AAED,SAAO,EAAE,GAAG,MAAM,UAAU,aAAa;AAC3C;AAQO,MAAM,qBAAqB,OAChC,QACA,gBACG;AACH,QAAM,OAAO,UAAM,0BAAY,MAAM;AAErC,MAAI,CAAC,MAAM;AACT,UAAM,IAAI,2BAAa,kBAAkB,EAAE,OAAO,CAAC;AAAA,EACrD;AAEA,QAAM,cAAoB,MAAM,mBAAmB,QAAQ,SAAS;AAAA,IAClE,cAAc,UAAM,sBAAK,aAAa,UAAM,yBAAQ,CAAC;AAAA,EACvD,CAAC;AAED,SAAO;AACT;AASO,MAAM,gBAAgB,OAAO,QAAgB,aAAqB;AACvE,QAAM,OAAO,UAAM,0BAAY,MAAM;AAErC,MAAI,CAAC,MAAM;AACT,UAAM,IAAI,2BAAa,kBAAkB,EAAE,OAAO,CAAC;AAAA,EACrD;AAEA,QAAM,cAA4B,MAAM,mBAAmB,QAAQ,SAAS;AAAA,IAC1E,cAAc,UAAM,sBAAK,UAAU,UAAM,yBAAQ,CAAC;AAAA,EACpD,CAAC;AAED,SAAO;AACT;","names":["import_user","uuidv4","jwt","crypto"]}
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
var aiSdk_exports = {};
|
|
20
|
+
__export(aiSdk_exports, {
|
|
21
|
+
AIProvider: () => AIProvider,
|
|
22
|
+
getAIConfig: () => getAIConfig
|
|
23
|
+
});
|
|
24
|
+
module.exports = __toCommonJS(aiSdk_exports);
|
|
25
|
+
var import_anthropic = require("@ai-sdk/anthropic");
|
|
26
|
+
var import_deepseek = require("@ai-sdk/deepseek");
|
|
27
|
+
var import_google = require("@ai-sdk/google");
|
|
28
|
+
var import_mistral = require("@ai-sdk/mistral");
|
|
29
|
+
var import_openai = require("@ai-sdk/openai");
|
|
30
|
+
var import_logger = require('./../../logger/index.cjs');
|
|
31
|
+
var AIProvider = /* @__PURE__ */ ((AIProvider2) => {
|
|
32
|
+
AIProvider2["OPENAI"] = "openai";
|
|
33
|
+
AIProvider2["ANTHROPIC"] = "anthropic";
|
|
34
|
+
AIProvider2["MISTRAL"] = "mistral";
|
|
35
|
+
AIProvider2["DEEPSEEK"] = "deepseek";
|
|
36
|
+
AIProvider2["GEMINI"] = "gemini";
|
|
37
|
+
return AIProvider2;
|
|
38
|
+
})(AIProvider || {});
|
|
39
|
+
const getAIConfig = async (options) => {
|
|
40
|
+
try {
|
|
41
|
+
const {
|
|
42
|
+
provider = "openai" /* OPENAI */,
|
|
43
|
+
model,
|
|
44
|
+
temperature = 0.1
|
|
45
|
+
} = options ?? {};
|
|
46
|
+
let defaultModel;
|
|
47
|
+
switch (provider) {
|
|
48
|
+
case "openai" /* OPENAI */:
|
|
49
|
+
defaultModel = "gpt-4o-mini";
|
|
50
|
+
break;
|
|
51
|
+
case "anthropic" /* ANTHROPIC */:
|
|
52
|
+
defaultModel = "claude-3-haiku-20240307";
|
|
53
|
+
break;
|
|
54
|
+
case "mistral" /* MISTRAL */:
|
|
55
|
+
defaultModel = "mistral-large-latest";
|
|
56
|
+
break;
|
|
57
|
+
case "deepseek" /* DEEPSEEK */:
|
|
58
|
+
defaultModel = "deepseek-coder";
|
|
59
|
+
break;
|
|
60
|
+
case "gemini" /* GEMINI */:
|
|
61
|
+
defaultModel = "gemini-1.5-pro";
|
|
62
|
+
break;
|
|
63
|
+
default:
|
|
64
|
+
defaultModel = "gpt-4o-mini";
|
|
65
|
+
}
|
|
66
|
+
if (!options?.apiKey) {
|
|
67
|
+
import_logger.logger.error(`API key for ${provider} is missing`);
|
|
68
|
+
return void 0;
|
|
69
|
+
}
|
|
70
|
+
if (provider === "openai" /* OPENAI */) {
|
|
71
|
+
return {
|
|
72
|
+
model: (0, import_openai.openai)(model ?? defaultModel),
|
|
73
|
+
temperature
|
|
74
|
+
};
|
|
75
|
+
} else {
|
|
76
|
+
try {
|
|
77
|
+
switch (provider) {
|
|
78
|
+
case "anthropic" /* ANTHROPIC */:
|
|
79
|
+
try {
|
|
80
|
+
return {
|
|
81
|
+
model: (0, import_anthropic.anthropic)(model ?? defaultModel),
|
|
82
|
+
temperature
|
|
83
|
+
};
|
|
84
|
+
} catch (err) {
|
|
85
|
+
throw new Error(
|
|
86
|
+
"Failed to load @ai-sdk/anthropic. Please install it with: npm install @ai-sdk/anthropic"
|
|
87
|
+
);
|
|
88
|
+
}
|
|
89
|
+
case "mistral" /* MISTRAL */:
|
|
90
|
+
try {
|
|
91
|
+
return {
|
|
92
|
+
model: (0, import_mistral.mistral)(model ?? defaultModel),
|
|
93
|
+
temperature
|
|
94
|
+
};
|
|
95
|
+
} catch (err) {
|
|
96
|
+
throw new Error(
|
|
97
|
+
"Failed to load @ai-sdk/mistral. Please install it with: npm install @ai-sdk/mistral"
|
|
98
|
+
);
|
|
99
|
+
}
|
|
100
|
+
case "deepseek" /* DEEPSEEK */:
|
|
101
|
+
try {
|
|
102
|
+
return {
|
|
103
|
+
model: (0, import_deepseek.deepseek)(model ?? defaultModel),
|
|
104
|
+
temperature
|
|
105
|
+
};
|
|
106
|
+
} catch (err) {
|
|
107
|
+
throw new Error(
|
|
108
|
+
"Failed to load @ai-sdk/deepseek. Please install it with: npm install @ai-sdk/deepseek"
|
|
109
|
+
);
|
|
110
|
+
}
|
|
111
|
+
case "gemini" /* GEMINI */:
|
|
112
|
+
try {
|
|
113
|
+
return {
|
|
114
|
+
model: (0, import_google.google)(model ?? defaultModel),
|
|
115
|
+
temperature
|
|
116
|
+
};
|
|
117
|
+
} catch (err) {
|
|
118
|
+
throw new Error(
|
|
119
|
+
"Failed to load @ai-sdk/google. Please install it with: npm install @ai-sdk/google"
|
|
120
|
+
);
|
|
121
|
+
}
|
|
122
|
+
default:
|
|
123
|
+
throw new Error(`Provider ${provider} not supported`);
|
|
124
|
+
}
|
|
125
|
+
} catch (error) {
|
|
126
|
+
import_logger.logger.error(`Error loading SDK for provider ${provider}:`, error);
|
|
127
|
+
return void 0;
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
} catch (error) {
|
|
131
|
+
import_logger.logger.error("Error configuring AI model:", error);
|
|
132
|
+
return void 0;
|
|
133
|
+
}
|
|
134
|
+
};
|
|
135
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
136
|
+
0 && (module.exports = {
|
|
137
|
+
AIProvider,
|
|
138
|
+
getAIConfig
|
|
139
|
+
});
|
|
140
|
+
//# sourceMappingURL=aiSdk.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../src/utils/AI/aiSdk.ts"],"sourcesContent":["import { anthropic } from '@ai-sdk/anthropic';\nimport { deepseek } from '@ai-sdk/deepseek';\nimport { google } from '@ai-sdk/google';\nimport { mistral } from '@ai-sdk/mistral';\nimport { openai } from '@ai-sdk/openai';\nimport { logger } from '@logger';\n\n/**\n * Supported AI models\n */\nexport type Model =\n // OpenAI Models\n | 'gpt-4o-mini'\n | 'gpt-4o'\n | 'gpt-4.1'\n | 'gpt-4.1-mini'\n | 'gpt-4.1-nano'\n | 'gpt-4.5'\n | 'gpt-3.5-turbo'\n | 'gpt-4-turbo-preview'\n | 'gpt-4-vision-preview'\n | 'gpt-4o-audio-preview'\n | 'gpt-4o-mini-audio-preview'\n | 'o1-mini'\n | 'o1'\n | 'o1-pro'\n | 'o3-mini'\n | 'o3-mini-high'\n | 'o3'\n | 'o4-mini'\n | 'o4-mini-high'\n // Anthropic Models\n | 'claude-3-haiku-20240307'\n | 'claude-3-sonnet-20240229'\n | 'claude-3-opus-20240229'\n // Mistral Models\n | 'mistral-tiny'\n | 'mistral-small'\n | 'mistral-small-3.1'\n | 'mistral-medium'\n | 'mistral-medium-3'\n | 'mistral-large'\n | 'mistral-large-2'\n | 'mistral-large-latest'\n | 'codestral'\n | 'codestral-mamba'\n | 'mixtral-8x7b'\n | 'mixtral-8x22b'\n | 'mathstral-7b'\n | 'pixtral-large'\n // DeepSeek Models\n | 'deepseek-coder'\n | 'deepseek-chat'\n | 'deepseek-v3'\n // Google Models\n | 'gemini-1.0-pro'\n | 'gemini-1.5-pro'\n | 'gemini-1.5-flash'\n | (string & {});\n\n/**\n * Supported AI SDK providers\n */\nexport enum AIProvider {\n OPENAI = 'openai',\n ANTHROPIC = 'anthropic',\n MISTRAL = 'mistral',\n DEEPSEEK = 'deepseek',\n GEMINI = 'gemini',\n}\n\n/**\n * Common options for all AI providers\n */\nexport type AIOptions = {\n provider?: AIProvider;\n model?: Model;\n temperature?: number;\n apiKey?: string;\n customPrompt?: string;\n applicationContext?: string;\n};\n\n/**\n * Configuration for AI model based on provider\n */\nexport type AIModelConfig = {\n model: any; // Using any to handle different provider model types\n temperature?: number;\n};\n\n/**\n * Get AI model configuration based on the selected provider and options\n * This function handles the configuration for different AI providers\n *\n * @param options Configuration options including provider, API keys, models and temperature\n * @returns Configured AI model ready to use with generateText\n */\nexport const getAIConfig = async (\n options?: AIOptions\n): Promise<AIModelConfig | undefined> => {\n try {\n const {\n provider = AIProvider.OPENAI,\n model,\n temperature = 0.1,\n } = options ?? {};\n\n // Set default models based on provider\n let defaultModel: string;\n switch (provider) {\n case AIProvider.OPENAI:\n defaultModel = 'gpt-4o-mini';\n break;\n case AIProvider.ANTHROPIC:\n defaultModel = 'claude-3-haiku-20240307';\n break;\n case AIProvider.MISTRAL:\n defaultModel = 'mistral-large-latest';\n break;\n case AIProvider.DEEPSEEK:\n defaultModel = 'deepseek-coder';\n break;\n case AIProvider.GEMINI:\n defaultModel = 'gemini-1.5-pro';\n break;\n default:\n defaultModel = 'gpt-4o-mini';\n }\n\n // Check if API key is provided\n if (!options?.apiKey) {\n logger.error(`API key for ${provider} is missing`);\n return undefined;\n }\n\n // Handle each provider with appropriate model loading\n if (provider === AIProvider.OPENAI) {\n // OpenAI is imported statically at the top\n return {\n model: openai(model ?? defaultModel),\n temperature,\n };\n } else {\n // For other providers, attempt to load using require\n try {\n switch (provider) {\n case AIProvider.ANTHROPIC:\n try {\n return {\n model: anthropic(model ?? defaultModel),\n temperature,\n };\n } catch (err) {\n throw new Error(\n 'Failed to load @ai-sdk/anthropic. Please install it with: npm install @ai-sdk/anthropic'\n );\n }\n\n case AIProvider.MISTRAL:\n try {\n return {\n model: mistral(model ?? defaultModel),\n temperature,\n };\n } catch (err) {\n throw new Error(\n 'Failed to load @ai-sdk/mistral. Please install it with: npm install @ai-sdk/mistral'\n );\n }\n\n case AIProvider.DEEPSEEK:\n try {\n return {\n model: deepseek(model ?? defaultModel),\n temperature,\n };\n } catch (err) {\n throw new Error(\n 'Failed to load @ai-sdk/deepseek. Please install it with: npm install @ai-sdk/deepseek'\n );\n }\n\n case AIProvider.GEMINI:\n try {\n return {\n model: google(model ?? defaultModel),\n temperature,\n };\n } catch (err) {\n throw new Error(\n 'Failed to load @ai-sdk/google. Please install it with: npm install @ai-sdk/google'\n );\n }\n\n default:\n throw new Error(`Provider ${provider} not supported`);\n }\n } catch (error) {\n logger.error(`Error loading SDK for provider ${provider}:`, error);\n return undefined;\n }\n }\n } catch (error) {\n logger.error('Error configuring AI model:', error);\n return undefined;\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBAA0B;AAC1B,sBAAyB;AACzB,oBAAuB;AACvB,qBAAwB;AACxB,oBAAuB;AACvB,oBAAuB;AA0DhB,IAAK,aAAL,kBAAKA,gBAAL;AACL,EAAAA,YAAA,YAAS;AACT,EAAAA,YAAA,eAAY;AACZ,EAAAA,YAAA,aAAU;AACV,EAAAA,YAAA,cAAW;AACX,EAAAA,YAAA,YAAS;AALC,SAAAA;AAAA,GAAA;AAmCL,MAAM,cAAc,OACzB,YACuC;AACvC,MAAI;AACF,UAAM;AAAA,MACJ,WAAW;AAAA,MACX;AAAA,MACA,cAAc;AAAA,IAChB,IAAI,WAAW,CAAC;AAGhB,QAAI;AACJ,YAAQ,UAAU;AAAA,MAChB,KAAK;AACH,uBAAe;AACf;AAAA,MACF,KAAK;AACH,uBAAe;AACf;AAAA,MACF,KAAK;AACH,uBAAe;AACf;AAAA,MACF,KAAK;AACH,uBAAe;AACf;AAAA,MACF,KAAK;AACH,uBAAe;AACf;AAAA,MACF;AACE,uBAAe;AAAA,IACnB;AAGA,QAAI,CAAC,SAAS,QAAQ;AACpB,2BAAO,MAAM,eAAe,QAAQ,aAAa;AACjD,aAAO;AAAA,IACT;AAGA,QAAI,aAAa,uBAAmB;AAElC,aAAO;AAAA,QACL,WAAO,sBAAO,SAAS,YAAY;AAAA,QACnC;AAAA,MACF;AAAA,IACF,OAAO;AAEL,UAAI;AACF,gBAAQ,UAAU;AAAA,UAChB,KAAK;AACH,gBAAI;AACF,qBAAO;AAAA,gBACL,WAAO,4BAAU,SAAS,YAAY;AAAA,gBACtC;AAAA,cACF;AAAA,YACF,SAAS,KAAK;AACZ,oBAAM,IAAI;AAAA,gBACR;AAAA,cACF;AAAA,YACF;AAAA,UAEF,KAAK;AACH,gBAAI;AACF,qBAAO;AAAA,gBACL,WAAO,wBAAQ,SAAS,YAAY;AAAA,gBACpC;AAAA,cACF;AAAA,YACF,SAAS,KAAK;AACZ,oBAAM,IAAI;AAAA,gBACR;AAAA,cACF;AAAA,YACF;AAAA,UAEF,KAAK;AACH,gBAAI;AACF,qBAAO;AAAA,gBACL,WAAO,0BAAS,SAAS,YAAY;AAAA,gBACrC;AAAA,cACF;AAAA,YACF,SAAS,KAAK;AACZ,oBAAM,IAAI;AAAA,gBACR;AAAA,cACF;AAAA,YACF;AAAA,UAEF,KAAK;AACH,gBAAI;AACF,qBAAO;AAAA,gBACL,WAAO,sBAAO,SAAS,YAAY;AAAA,gBACnC;AAAA,cACF;AAAA,YACF,SAAS,KAAK;AACZ,oBAAM,IAAI;AAAA,gBACR;AAAA,cACF;AAAA,YACF;AAAA,UAEF;AACE,kBAAM,IAAI,MAAM,YAAY,QAAQ,gBAAgB;AAAA,QACxD;AAAA,MACF,SAAS,OAAO;AACd,6BAAO,MAAM,kCAAkC,QAAQ,KAAK,KAAK;AACjE,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,yBAAO,MAAM,+BAA+B,KAAK;AACjD,WAAO;AAAA,EACT;AACF;","names":["AIProvider"]}
|
|
@@ -13,10 +13,11 @@ If you don't have enough information to answer the question, not answer using ex
|
|
|
13
13
|
If your have a doubt about something, you should ask more question to the user.
|
|
14
14
|
If a user stuggles with a question, and you cant find an answer, you can :
|
|
15
15
|
|
|
16
|
+
- Mention that some specific bugs may not be documented yet. Encourage the user to share their solution in the chat. This helps our team analyze common issues and improve the bot's responses for future users.
|
|
16
17
|
- suggest to the user to submit an issue on the Intlayer repository: https://github.com/aymericzip/intlayer/issues. Our team generally responds to issues within a day. Notice that your issue will also probably help other users.
|
|
17
18
|
- the multiple example of implementations of Intlayer in the repository: https://github.com/aymericzip/intlayer/tree/main/examples
|
|
18
19
|
|
|
19
|
-
You can also suggest when it's
|
|
20
|
+
You can also suggest (when it's appropriate), to star the Intlayer repository: https://github.com/aymericzip/intlayer to support the project. It really help us.
|
|
20
21
|
|
|
21
22
|
Here some useful urls to know more about Intlayer:
|
|
22
23
|
https://intlayer.org/docs
|
|
@@ -37,11 +37,13 @@ module.exports = __toCommonJS(askDocQuestion_exports);
|
|
|
37
37
|
var import_blog = require("@intlayer/blog");
|
|
38
38
|
var import_config = require("@intlayer/config");
|
|
39
39
|
var import_docs = require("@intlayer/docs");
|
|
40
|
+
var import_ai = require("ai");
|
|
40
41
|
var import_dotenv = __toESM(require("dotenv"), 1);
|
|
41
42
|
var import_fs = __toESM(require("fs"), 1);
|
|
42
43
|
var import_openai = require("openai");
|
|
43
44
|
var import_path = require("path");
|
|
44
45
|
var import_url = require("url");
|
|
46
|
+
var import_aiSdk = require('../aiSdk.cjs');
|
|
45
47
|
var import_embeddings = __toESM(require('./embeddings.json'), 1);
|
|
46
48
|
const import_meta = {};
|
|
47
49
|
const vectorStore = [];
|
|
@@ -78,10 +80,13 @@ const chunkText = (text) => {
|
|
|
78
80
|
};
|
|
79
81
|
const generateEmbedding = async (text) => {
|
|
80
82
|
try {
|
|
81
|
-
|
|
82
|
-
|
|
83
|
+
await (0, import_aiSdk.getAIConfig)({
|
|
84
|
+
provider: import_aiSdk.AIProvider.OPENAI,
|
|
85
|
+
apiKey: process.env.OPENAI_API_KEY
|
|
86
|
+
});
|
|
87
|
+
const openaiClient = new import_openai.OpenAI({ apiKey: process.env.OPENAI_API_KEY });
|
|
88
|
+
const response = await openaiClient.embeddings.create({
|
|
83
89
|
model: EMBEDDING_MODEL,
|
|
84
|
-
// Specify the embedding model
|
|
85
90
|
input: text
|
|
86
91
|
});
|
|
87
92
|
return response.data[0].embedding;
|
|
@@ -163,38 +168,38 @@ const initPrompt = {
|
|
|
163
168
|
content: CHAT_GPT_PROMPT
|
|
164
169
|
};
|
|
165
170
|
const askDocQuestion = async (messages, options) => {
|
|
166
|
-
const
|
|
167
|
-
const userMessages = messages.filter((message) => message.role === "user");
|
|
168
|
-
const query = userMessages.map((message) => `- ${message.content}`).join("\n");
|
|
171
|
+
const query = messages.map((message) => `- ${message.content}`).join("\n");
|
|
169
172
|
const relevantFilesReferences = await searchChunkReference(query);
|
|
170
|
-
const
|
|
171
|
-
{
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
"{{relevantFilesReferences}}",
|
|
175
|
-
relevantFilesReferences.length === 0 ? "Not relevant file found related to the question." : relevantFilesReferences.map(
|
|
176
|
-
(doc, idx) => `[Chunk ${idx}] docKey = "${doc.fileKey}":
|
|
173
|
+
const systemPrompt = initPrompt.content.replace(
|
|
174
|
+
"{{relevantFilesReferences}}",
|
|
175
|
+
relevantFilesReferences.length === 0 ? "Not relevant file found related to the question." : relevantFilesReferences.map(
|
|
176
|
+
(doc, idx) => `[Chunk ${idx}] docKey = "${doc.fileKey}":
|
|
177
177
|
${doc.content}`
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
178
|
+
).join("\n\n")
|
|
179
|
+
// Insert relevant docs into the prompt
|
|
180
|
+
);
|
|
181
|
+
const aiMessages = [
|
|
182
|
+
{ role: "system", content: systemPrompt },
|
|
182
183
|
...messages
|
|
183
|
-
// Include all user and assistant messages
|
|
184
184
|
];
|
|
185
|
-
const
|
|
185
|
+
const aiConfig = await (0, import_aiSdk.getAIConfig)({
|
|
186
|
+
provider: import_aiSdk.AIProvider.OPENAI,
|
|
186
187
|
model: MODEL,
|
|
187
188
|
temperature: MODEL_TEMPERATURE,
|
|
188
|
-
|
|
189
|
-
stream: true
|
|
189
|
+
apiKey: process.env.OPENAI_API_KEY
|
|
190
190
|
});
|
|
191
|
+
if (!aiConfig) {
|
|
192
|
+
throw new Error("Failed to initialize AI configuration");
|
|
193
|
+
}
|
|
191
194
|
let fullResponse = "";
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
195
|
+
const stream = (0, import_ai.streamText)({
|
|
196
|
+
model: aiConfig.model,
|
|
197
|
+
temperature: aiConfig.temperature,
|
|
198
|
+
messages: aiMessages
|
|
199
|
+
});
|
|
200
|
+
for await (const chunk of stream.textStream) {
|
|
201
|
+
fullResponse += chunk;
|
|
202
|
+
options?.onMessage?.(chunk);
|
|
198
203
|
}
|
|
199
204
|
const relatedFiles = [
|
|
200
205
|
...new Set(relevantFilesReferences.map((doc) => doc.fileKey))
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/utils/AI/askDocQuestion/askDocQuestion.ts"],"sourcesContent":["import { getBlogs } from '@intlayer/blog';\nimport { Locales } from '@intlayer/config';\nimport { getDocs, getFequentQuestions } from '@intlayer/docs';\nimport dotenv from 'dotenv';\nimport fs, { readFileSync } from 'fs';\nimport { OpenAI } from 'openai';\nimport { dirname, join } from 'path';\nimport { fileURLToPath } from 'url';\nimport embeddingsList from './embeddings.json' with { type: 'json' };\n\ntype VectorStoreEl = {\n fileKey: string;\n chunkNumber: number;\n content: string;\n embedding: number[];\n};\n\n/**\n * Simple in-memory vector store to hold document embeddings and their content.\n * Each entry contains:\n * - fileKey: A unique key identifying the file\n * - chunkNumber: The number of the chunk within the document\n * - content: The chunk content\n * - embedding: The numerical embedding vector for the chunk\n */\nconst vectorStore: VectorStoreEl[] = [];\n\n// Constants defining OpenAI's token and character limits\nconst MODEL: OpenAI.Chat.ChatModel = 'gpt-4o-2024-11-20'; // Model to use for chat completions\nconst MODEL_TEMPERATURE = 0.1; // Temperature to use for chat completions\nconst EMBEDDING_MODEL: OpenAI.Embeddings.EmbeddingModel =\n 'text-embedding-3-large'; // Model to use for embedding generation\nconst OVERLAP_TOKENS = 200; // Number of tokens to overlap between chunks\nconst MAX_CHUNK_TOKENS = 800; // Maximum number of tokens per chunk\nconst CHAR_BY_TOKEN = 4.15; // Approximate pessimistically the number of characters per token // Can use `tiktoken` or other tokenizers to calculate it more precisely\nconst MAX_CHARS = MAX_CHUNK_TOKENS * CHAR_BY_TOKEN;\nconst OVERLAP_CHARS = OVERLAP_TOKENS * CHAR_BY_TOKEN;\nconst MAX_RELEVANT_CHUNKS_NB = 8; // Maximum number of relevant chunks to attach to chatGPT context\nconst MIN_RELEVANT_CHUNKS_SIMILARITY = 0.25; // Minimum similarity required for a chunk to be considered relevant\n\n/**\n * Splits a given text into chunks ensuring each chunk does not exceed MAX_CHARS.\n * @param text - The input text to split.\n * @returns - Array of text chunks.\n */\nconst chunkText = (text: string): string[] => {\n const chunks: string[] = [];\n let start = 0;\n\n while (start < text.length) {\n let end = Math.min(start + MAX_CHARS, text.length);\n\n // Ensure we don't cut words in the middle (find nearest space)\n if (end < text.length) {\n const lastSpace = text.lastIndexOf(' ', end);\n if (lastSpace > start) {\n end = lastSpace;\n }\n }\n\n chunks.push(text.substring(start, end));\n\n // Move start forward correctly\n const nextStart = end - OVERLAP_CHARS;\n if (nextStart <= start) {\n // Prevent infinite loop if overlap is too large\n start = end;\n } else {\n start = nextStart;\n }\n }\n\n return chunks;\n};\n\n/**\n * Generates an embedding for a given text using OpenAI's embedding API.\n * Trims the text if it exceeds the maximum allowed characters.\n *\n * @param text - The input text to generate an embedding for\n * @returns The embedding vector as a number array\n */\nconst generateEmbedding = async (text: string): Promise<number[]> => {\n try {\n const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n const response = await openai.embeddings.create({\n model: EMBEDDING_MODEL, // Specify the embedding model\n input: text,\n });\n\n return response.data[0].embedding; // Return the generated embedding\n } catch (error) {\n console.error('Error generating embedding:', error);\n return [];\n }\n};\n\n/**\n * Calculates the cosine similarity between two vectors.\n * Cosine similarity measures the cosine of the angle between two vectors in an inner product space.\n * Used to determine the similarity between chunks of text.\n *\n * @param vecA - The first vector\n * @param vecB - The second vector\n * @returns The cosine similarity score\n */\nconst cosineSimilarity = (vecA: number[], vecB: number[]): number => {\n // Calculate the dot product of the two vectors\n const dotProduct = vecA.reduce((sum, a, idx) => sum + a * vecB[idx], 0);\n\n // Calculate the magnitude (Euclidean norm) of each vector\n const magnitudeA = Math.sqrt(vecA.reduce((sum, a) => sum + a * a, 0));\n const magnitudeB = Math.sqrt(vecB.reduce((sum, b) => sum + b * b, 0));\n\n // Compute and return the cosine similarity\n return dotProduct / (magnitudeA * magnitudeB);\n};\n\n/**\n * Indexes all Markdown documents by generating embeddings for each chunk and storing them in memory.\n * Also updates the embeddings.json file if new embeddings are generated.\n */\nexport const indexMarkdownFiles = async (): Promise<void> => {\n const env = process.env.NODE_ENV;\n dotenv.config({\n path: [`.env.${env}.local`, `.env.${env}`, '.env.local', '.env'],\n });\n\n // Retrieve documentation and blog posts in English locale\n const frequentQuestions = getFequentQuestions();\n const docs = getDocs(Locales.ENGLISH);\n const blogs = getBlogs(Locales.ENGLISH);\n\n let result: Record<string, number[]> = {}; // Object to hold updated embeddings\n\n const files = { ...docs, ...blogs, ...frequentQuestions }; // Combine docs and blogs into a single object\n\n // Iterate over each file key (identifier) in the combined files\n for (const fileKey of Object.keys(files)) {\n // Split the document into chunks based on headings\n const fileChunks = chunkText(files[fileKey as keyof typeof files]);\n\n // Iterate over each chunk within the current file\n for (const chunkIndex of Object.keys(fileChunks)) {\n const chunkNumber = Number(chunkIndex) + 1; // Chunk number starts at 1\n const chunksNumber = fileChunks.length;\n\n const fileChunk = fileChunks[\n chunkIndex as keyof typeof fileChunks\n ] as string;\n\n const embeddingKeyName = `${fileKey}/chunk_${chunkNumber}`; // Unique key for the chunk\n\n // Retrieve precomputed embedding if available\n const docEmbedding = embeddingsList[\n embeddingKeyName as keyof typeof embeddingsList\n ] as number[] | undefined;\n\n let embedding = docEmbedding; // Use existing embedding if available\n\n if (!embedding) {\n embedding = await generateEmbedding(fileChunk); // Generate embedding if not present\n }\n\n // Update the result object with the new embedding\n result = { ...result, [embeddingKeyName]: embedding };\n\n // Store the embedding and content in the in-memory vector store\n vectorStore.push({\n fileKey,\n chunkNumber,\n embedding,\n content: fileChunk,\n });\n\n console.info(`- Indexed: ${embeddingKeyName}/${chunksNumber}`);\n }\n }\n\n if (process.env.NODE_ENV === 'development') {\n try {\n // Compare the newly generated embeddings with existing ones\n if (JSON.stringify(result) !== JSON.stringify(embeddingsList)) {\n // If there are new embeddings, save them to embeddings.json\n fs.writeFileSync(\n 'src/utils/AI/askDocQuestion/embeddings.json',\n JSON.stringify(result, null, 2)\n );\n }\n } catch (error) {\n console.error(error); // Log any errors during the file write process\n }\n }\n};\n\n// Automatically index Markdown files\nindexMarkdownFiles();\n\n/**\n * Searches the indexed documents for the most relevant chunks based on a query.\n * Utilizes cosine similarity to find the closest matching embeddings.\n *\n * @param query - The search query provided by the user\n * @returns An array of the top matching document chunks' content\n */\nexport const searchChunkReference = async (\n query: string\n): Promise<VectorStoreEl[]> => {\n // Generate an embedding for the user's query\n const queryEmbedding = await generateEmbedding(query);\n\n // Calculate similarity scores between the query embedding and each document's embedding\n const results = vectorStore\n .map((chunk) => ({\n ...chunk,\n similarity: cosineSimilarity(queryEmbedding, chunk.embedding), // Add similarity score to each doc\n }))\n .filter((chunk) => chunk.similarity > MIN_RELEVANT_CHUNKS_SIMILARITY) // Filter out documents with low similarity scores\n .sort((a, b) => b.similarity - a.similarity) // Sort documents by highest similarity first\n .slice(0, MAX_RELEVANT_CHUNKS_NB); // Select the top 6 most similar documents\n\n // Return the content of the top matching documents\n return results;\n};\n\n// Define the structure of messages used in chat completions\nexport type ChatCompletionRequestMessage = {\n role: 'system' | 'user' | 'assistant'; // The role of the message sender\n content: string; // The text content of the message\n};\n\n/**\n * Reads the content of a file synchronously.\n *\n * @function\n * @param relativeFilePath - The relative or absolute path to the target file.\n * @returns The entire contents of the specified file as a UTF-8 encoded string.\n */\nconst getFileContent = (relativeFilePath: string): string => {\n const __dirname = dirname(fileURLToPath(import.meta.url));\n const absolutePath = join(__dirname, relativeFilePath);\n const fileContent = readFileSync(absolutePath, 'utf-8');\n return fileContent;\n};\n\nconst CHAT_GPT_PROMPT = getFileContent('./PROMPT.md');\n\n// Initial prompt configuration for the chatbot\nexport const initPrompt: ChatCompletionRequestMessage = {\n role: 'system',\n content: CHAT_GPT_PROMPT,\n};\n\nexport type AskDocQuestionResult = {\n response: string;\n relatedFiles: string[];\n};\n\nexport type AskDocQuestionOptions = {\n onMessage?: (chunk: string) => void;\n};\n\n/**\n * Handles the \"Ask a question\" endpoint in an Express.js route.\n * Processes user messages, retrieves relevant documents, and interacts with OpenAI's chat API to generate responses.\n *\n * @param messages - An array of chat messages from the user and assistant\n * @returns The assistant's response as a string\n */\nexport const askDocQuestion = async (\n messages: ChatCompletionRequestMessage[],\n options?: AskDocQuestionOptions\n): Promise<AskDocQuestionResult> => {\n const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n\n // Assistant's response are filtered out otherwise the chatbot will be stuck in a self-referential loop\n // Note that the embedding precision will be lowered if the user change of context in the chat\n const userMessages = messages.filter((message) => message.role === 'user');\n\n // Format the user's question to keep only the relevant keywords\n const query = userMessages\n .map((message) => `- ${message.content}`)\n .join('\\n');\n\n // 1) Find relevant documents based on the user's question\n const relevantFilesReferences = await searchChunkReference(query);\n\n // 2) Integrate the relevant documents into the initial system prompt\n const messagesList: ChatCompletionRequestMessage[] = [\n {\n ...initPrompt,\n content: initPrompt.content.replace(\n '{{relevantFilesReferences}}',\n relevantFilesReferences.length === 0\n ? 'Not relevant file found related to the question.'\n : relevantFilesReferences\n .map(\n (doc, idx) =>\n `[Chunk ${idx}] docKey = \"${doc.fileKey}\":\\n${doc.content}`\n )\n .join('\\n\\n') // Insert relevant docs into the prompt\n ),\n },\n ...messages, // Include all user and assistant messages\n ];\n\n // 3) Send the compiled messages to OpenAI's Chat Completion API (using a specific model)\n const response = await openai.chat.completions.create({\n model: MODEL,\n temperature: MODEL_TEMPERATURE,\n messages: messagesList,\n stream: true,\n });\n\n let fullResponse = '';\n for await (const chunk of response) {\n const content = chunk.choices[0]?.delta?.content || '';\n if (content) {\n fullResponse += content;\n options?.onMessage?.(content);\n }\n }\n\n // 4) Extract unique related files\n const relatedFiles = [\n ...new Set(relevantFilesReferences.map((doc) => doc.fileKey)),\n ];\n\n // 5) Return the assistant's response to the user\n return {\n response: fullResponse ?? 'Error: No result found',\n relatedFiles,\n };\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAyB;AACzB,oBAAwB;AACxB,kBAA6C;AAC7C,oBAAmB;AACnB,gBAAiC;AACjC,oBAAuB;AACvB,kBAA8B;AAC9B,iBAA8B;AAC9B,wBAA2B;AAR3B;AAyBA,MAAM,cAA+B,CAAC;AAGtC,MAAM,QAA+B;AACrC,MAAM,oBAAoB;AAC1B,MAAM,kBACJ;AACF,MAAM,iBAAiB;AACvB,MAAM,mBAAmB;AACzB,MAAM,gBAAgB;AACtB,MAAM,YAAY,mBAAmB;AACrC,MAAM,gBAAgB,iBAAiB;AACvC,MAAM,yBAAyB;AAC/B,MAAM,iCAAiC;AAOvC,MAAM,YAAY,CAAC,SAA2B;AAC5C,QAAM,SAAmB,CAAC;AAC1B,MAAI,QAAQ;AAEZ,SAAO,QAAQ,KAAK,QAAQ;AAC1B,QAAI,MAAM,KAAK,IAAI,QAAQ,WAAW,KAAK,MAAM;AAGjD,QAAI,MAAM,KAAK,QAAQ;AACrB,YAAM,YAAY,KAAK,YAAY,KAAK,GAAG;AAC3C,UAAI,YAAY,OAAO;AACrB,cAAM;AAAA,MACR;AAAA,IACF;AAEA,WAAO,KAAK,KAAK,UAAU,OAAO,GAAG,CAAC;AAGtC,UAAM,YAAY,MAAM;AACxB,QAAI,aAAa,OAAO;AAEtB,cAAQ;AAAA,IACV,OAAO;AACL,cAAQ;AAAA,IACV;AAAA,EACF;AAEA,SAAO;AACT;AASA,MAAM,oBAAoB,OAAO,SAAoC;AACnE,MAAI;AACF,UAAM,SAAS,IAAI,qBAAO,EAAE,QAAQ,QAAQ,IAAI,eAAe,CAAC;AAChE,UAAM,WAAW,MAAM,OAAO,WAAW,OAAO;AAAA,MAC9C,OAAO;AAAA;AAAA,MACP,OAAO;AAAA,IACT,CAAC;AAED,WAAO,SAAS,KAAK,CAAC,EAAE;AAAA,EAC1B,SAAS,OAAO;AACd,YAAQ,MAAM,+BAA+B,KAAK;AAClD,WAAO,CAAC;AAAA,EACV;AACF;AAWA,MAAM,mBAAmB,CAAC,MAAgB,SAA2B;AAEnE,QAAM,aAAa,KAAK,OAAO,CAAC,KAAK,GAAG,QAAQ,MAAM,IAAI,KAAK,GAAG,GAAG,CAAC;AAGtE,QAAM,aAAa,KAAK,KAAK,KAAK,OAAO,CAAC,KAAK,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC;AACpE,QAAM,aAAa,KAAK,KAAK,KAAK,OAAO,CAAC,KAAK,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC;AAGpE,SAAO,cAAc,aAAa;AACpC;AAMO,MAAM,qBAAqB,YAA2B;AAC3D,QAAM,MAAM,QAAQ,IAAI;AACxB,gBAAAA,QAAO,OAAO;AAAA,IACZ,MAAM,CAAC,QAAQ,GAAG,UAAU,QAAQ,GAAG,IAAI,cAAc,MAAM;AAAA,EACjE,CAAC;AAGD,QAAM,wBAAoB,iCAAoB;AAC9C,QAAM,WAAO,qBAAQ,sBAAQ,OAAO;AACpC,QAAM,YAAQ,sBAAS,sBAAQ,OAAO;AAEtC,MAAI,SAAmC,CAAC;AAExC,QAAM,QAAQ,EAAE,GAAG,MAAM,GAAG,OAAO,GAAG,kBAAkB;AAGxD,aAAW,WAAW,OAAO,KAAK,KAAK,GAAG;AAExC,UAAM,aAAa,UAAU,MAAM,OAA6B,CAAC;AAGjE,eAAW,cAAc,OAAO,KAAK,UAAU,GAAG;AAChD,YAAM,cAAc,OAAO,UAAU,IAAI;AACzC,YAAM,eAAe,WAAW;AAEhC,YAAM,YAAY,WAChB,UACF;AAEA,YAAM,mBAAmB,GAAG,OAAO,UAAU,WAAW;AAGxD,YAAM,eAAe,kBAAAC,QACnB,gBACF;AAEA,UAAI,YAAY;AAEhB,UAAI,CAAC,WAAW;AACd,oBAAY,MAAM,kBAAkB,SAAS;AAAA,MAC/C;AAGA,eAAS,EAAE,GAAG,QAAQ,CAAC,gBAAgB,GAAG,UAAU;AAGpD,kBAAY,KAAK;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AAED,cAAQ,KAAK,cAAc,gBAAgB,IAAI,YAAY,EAAE;AAAA,IAC/D;AAAA,EACF;AAEA,MAAI,QAAQ,IAAI,aAAa,eAAe;AAC1C,QAAI;AAEF,UAAI,KAAK,UAAU,MAAM,MAAM,KAAK,UAAU,kBAAAA,OAAc,GAAG;AAE7D,kBAAAC,QAAG;AAAA,UACD;AAAA,UACA,KAAK,UAAU,QAAQ,MAAM,CAAC;AAAA,QAChC;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,cAAQ,MAAM,KAAK;AAAA,IACrB;AAAA,EACF;AACF;AAGA,mBAAmB;AASZ,MAAM,uBAAuB,OAClC,UAC6B;AAE7B,QAAM,iBAAiB,MAAM,kBAAkB,KAAK;AAGpD,QAAM,UAAU,YACb,IAAI,CAAC,WAAW;AAAA,IACf,GAAG;AAAA,IACH,YAAY,iBAAiB,gBAAgB,MAAM,SAAS;AAAA;AAAA,EAC9D,EAAE,EACD,OAAO,CAAC,UAAU,MAAM,aAAa,8BAA8B,EACnE,KAAK,CAAC,GAAG,MAAM,EAAE,aAAa,EAAE,UAAU,EAC1C,MAAM,GAAG,sBAAsB;AAGlC,SAAO;AACT;AAeA,MAAM,iBAAiB,CAAC,qBAAqC;AAC3D,QAAM,gBAAY,yBAAQ,0BAAc,YAAY,GAAG,CAAC;AACxD,QAAM,mBAAe,kBAAK,WAAW,gBAAgB;AACrD,QAAM,kBAAc,wBAAa,cAAc,OAAO;AACtD,SAAO;AACT;AAEA,MAAM,kBAAkB,eAAe,aAAa;AAG7C,MAAM,aAA2C;AAAA,EACtD,MAAM;AAAA,EACN,SAAS;AACX;AAkBO,MAAM,iBAAiB,OAC5B,UACA,YACkC;AAClC,QAAM,SAAS,IAAI,qBAAO,EAAE,QAAQ,QAAQ,IAAI,eAAe,CAAC;AAIhE,QAAM,eAAe,SAAS,OAAO,CAAC,YAAY,QAAQ,SAAS,MAAM;AAGzE,QAAM,QAAQ,aACX,IAAI,CAAC,YAAY,KAAK,QAAQ,OAAO,EAAE,EACvC,KAAK,IAAI;AAGZ,QAAM,0BAA0B,MAAM,qBAAqB,KAAK;AAGhE,QAAM,eAA+C;AAAA,IACnD;AAAA,MACE,GAAG;AAAA,MACH,SAAS,WAAW,QAAQ;AAAA,QAC1B;AAAA,QACA,wBAAwB,WAAW,IAC/B,qDACA,wBACG;AAAA,UACC,CAAC,KAAK,QACJ,UAAU,GAAG,eAAe,IAAI,OAAO;AAAA,EAAO,IAAI,OAAO;AAAA,QAC7D,EACC,KAAK,MAAM;AAAA;AAAA,MACpB;AAAA,IACF;AAAA,IACA,GAAG;AAAA;AAAA,EACL;AAGA,QAAM,WAAW,MAAM,OAAO,KAAK,YAAY,OAAO;AAAA,IACpD,OAAO;AAAA,IACP,aAAa;AAAA,IACb,UAAU;AAAA,IACV,QAAQ;AAAA,EACV,CAAC;AAED,MAAI,eAAe;AACnB,mBAAiB,SAAS,UAAU;AAClC,UAAM,UAAU,MAAM,QAAQ,CAAC,GAAG,OAAO,WAAW;AACpD,QAAI,SAAS;AACX,sBAAgB;AAChB,eAAS,YAAY,OAAO;AAAA,IAC9B;AAAA,EACF;AAGA,QAAM,eAAe;AAAA,IACnB,GAAG,IAAI,IAAI,wBAAwB,IAAI,CAAC,QAAQ,IAAI,OAAO,CAAC;AAAA,EAC9D;AAGA,SAAO;AAAA,IACL,UAAU,gBAAgB;AAAA,IAC1B;AAAA,EACF;AACF;","names":["dotenv","embeddingsList","fs"]}
|
|
1
|
+
{"version":3,"sources":["../../../../../src/utils/AI/askDocQuestion/askDocQuestion.ts"],"sourcesContent":["import { getBlogs } from '@intlayer/blog';\nimport { Locales } from '@intlayer/config';\nimport { getDocs, getFequentQuestions } from '@intlayer/docs';\nimport { streamText } from 'ai';\nimport dotenv from 'dotenv';\nimport fs, { readFileSync } from 'fs';\nimport { OpenAI } from 'openai';\nimport { dirname, join } from 'path';\nimport { fileURLToPath } from 'url';\nimport { AIProvider, getAIConfig } from '../aiSdk';\nimport embeddingsList from './embeddings.json' with { type: 'json' };\n\ntype VectorStoreEl = {\n fileKey: string;\n chunkNumber: number;\n content: string;\n embedding: number[];\n};\n\n/**\n * Simple in-memory vector store to hold document embeddings and their content.\n * Each entry contains:\n * - fileKey: A unique key identifying the file\n * - chunkNumber: The number of the chunk within the document\n * - content: The chunk content\n * - embedding: The numerical embedding vector for the chunk\n */\nconst vectorStore: VectorStoreEl[] = [];\n\n// Constants defining model and settings\nconst MODEL = 'gpt-4o-2024-11-20'; // Model to use for chat completions\nconst MODEL_TEMPERATURE = 0.1; // Temperature to use for chat completions\nconst EMBEDDING_MODEL = 'text-embedding-3-large'; // Model to use for embedding generation\nconst OVERLAP_TOKENS = 200; // Number of tokens to overlap between chunks\nconst MAX_CHUNK_TOKENS = 800; // Maximum number of tokens per chunk\nconst CHAR_BY_TOKEN = 4.15; // Approximate pessimistically the number of characters per token // Can use `tiktoken` or other tokenizers to calculate it more precisely\nconst MAX_CHARS = MAX_CHUNK_TOKENS * CHAR_BY_TOKEN;\nconst OVERLAP_CHARS = OVERLAP_TOKENS * CHAR_BY_TOKEN;\nconst MAX_RELEVANT_CHUNKS_NB = 8; // Maximum number of relevant chunks to attach to chatGPT context\nconst MIN_RELEVANT_CHUNKS_SIMILARITY = 0.25; // Minimum similarity required for a chunk to be considered relevant\n\n/**\n * Splits a given text into chunks ensuring each chunk does not exceed MAX_CHARS.\n * @param text - The input text to split.\n * @returns - Array of text chunks.\n */\nconst chunkText = (text: string): string[] => {\n const chunks: string[] = [];\n let start = 0;\n\n while (start < text.length) {\n let end = Math.min(start + MAX_CHARS, text.length);\n\n // Ensure we don't cut words in the middle (find nearest space)\n if (end < text.length) {\n const lastSpace = text.lastIndexOf(' ', end);\n if (lastSpace > start) {\n end = lastSpace;\n }\n }\n\n chunks.push(text.substring(start, end));\n\n // Move start forward correctly\n const nextStart = end - OVERLAP_CHARS;\n if (nextStart <= start) {\n // Prevent infinite loop if overlap is too large\n start = end;\n } else {\n start = nextStart;\n }\n }\n\n return chunks;\n};\n\n/**\n * Generates an embedding for a given text using OpenAI's embedding API.\n * Trims the text if it exceeds the maximum allowed characters.\n *\n * @param text - The input text to generate an embedding for\n * @returns The embedding vector as a number array\n */\nconst generateEmbedding = async (text: string): Promise<number[]> => {\n try {\n // Set API key through the SDK configuration\n await getAIConfig({\n provider: AIProvider.OPENAI,\n apiKey: process.env.OPENAI_API_KEY,\n });\n\n const openaiClient = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n\n const response = await openaiClient.embeddings.create({\n model: EMBEDDING_MODEL,\n input: text,\n });\n\n return response.data[0].embedding;\n } catch (error) {\n console.error('Error generating embedding:', error);\n return [];\n }\n};\n\n/**\n * Calculates the cosine similarity between two vectors.\n * Cosine similarity measures the cosine of the angle between two vectors in an inner product space.\n * Used to determine the similarity between chunks of text.\n *\n * @param vecA - The first vector\n * @param vecB - The second vector\n * @returns The cosine similarity score\n */\nconst cosineSimilarity = (vecA: number[], vecB: number[]): number => {\n // Calculate the dot product of the two vectors\n const dotProduct = vecA.reduce((sum, a, idx) => sum + a * vecB[idx], 0);\n\n // Calculate the magnitude (Euclidean norm) of each vector\n const magnitudeA = Math.sqrt(vecA.reduce((sum, a) => sum + a * a, 0));\n const magnitudeB = Math.sqrt(vecB.reduce((sum, b) => sum + b * b, 0));\n\n // Compute and return the cosine similarity\n return dotProduct / (magnitudeA * magnitudeB);\n};\n\n/**\n * Indexes all Markdown documents by generating embeddings for each chunk and storing them in memory.\n * Also updates the embeddings.json file if new embeddings are generated.\n */\nexport const indexMarkdownFiles = async (): Promise<void> => {\n const env = process.env.NODE_ENV;\n dotenv.config({\n path: [`.env.${env}.local`, `.env.${env}`, '.env.local', '.env'],\n });\n\n // Retrieve documentation and blog posts in English locale\n const frequentQuestions = getFequentQuestions();\n const docs = getDocs(Locales.ENGLISH);\n const blogs = getBlogs(Locales.ENGLISH);\n\n let result: Record<string, number[]> = {}; // Object to hold updated embeddings\n\n const files = { ...docs, ...blogs, ...frequentQuestions }; // Combine docs and blogs into a single object\n\n // Iterate over each file key (identifier) in the combined files\n for (const fileKey of Object.keys(files)) {\n // Split the document into chunks based on headings\n const fileChunks = chunkText(files[fileKey as keyof typeof files]);\n\n // Iterate over each chunk within the current file\n for (const chunkIndex of Object.keys(fileChunks)) {\n const chunkNumber = Number(chunkIndex) + 1; // Chunk number starts at 1\n const chunksNumber = fileChunks.length;\n\n const fileChunk = fileChunks[\n chunkIndex as keyof typeof fileChunks\n ] as string;\n\n const embeddingKeyName = `${fileKey}/chunk_${chunkNumber}`; // Unique key for the chunk\n\n // Retrieve precomputed embedding if available\n const docEmbedding = embeddingsList[\n embeddingKeyName as keyof typeof embeddingsList\n ] as number[] | undefined;\n\n let embedding = docEmbedding; // Use existing embedding if available\n\n if (!embedding) {\n embedding = await generateEmbedding(fileChunk); // Generate embedding if not present\n }\n\n // Update the result object with the new embedding\n result = { ...result, [embeddingKeyName]: embedding };\n\n // Store the embedding and content in the in-memory vector store\n vectorStore.push({\n fileKey,\n chunkNumber,\n embedding,\n content: fileChunk,\n });\n\n console.info(`- Indexed: ${embeddingKeyName}/${chunksNumber}`);\n }\n }\n\n if (process.env.NODE_ENV === 'development') {\n try {\n // Compare the newly generated embeddings with existing ones\n if (JSON.stringify(result) !== JSON.stringify(embeddingsList)) {\n // If there are new embeddings, save them to embeddings.json\n fs.writeFileSync(\n 'src/utils/AI/askDocQuestion/embeddings.json',\n JSON.stringify(result, null, 2)\n );\n }\n } catch (error) {\n console.error(error); // Log any errors during the file write process\n }\n }\n};\n\n// Automatically index Markdown files\nindexMarkdownFiles();\n\n/**\n * Searches the indexed documents for the most relevant chunks based on a query.\n * Utilizes cosine similarity to find the closest matching embeddings.\n *\n * @param query - The search query provided by the user\n * @returns An array of the top matching document chunks' content\n */\nexport const searchChunkReference = async (\n query: string\n): Promise<VectorStoreEl[]> => {\n // Generate an embedding for the user's query\n const queryEmbedding = await generateEmbedding(query);\n\n // Calculate similarity scores between the query embedding and each document's embedding\n const results = vectorStore\n .map((chunk) => ({\n ...chunk,\n similarity: cosineSimilarity(queryEmbedding, chunk.embedding), // Add similarity score to each doc\n }))\n .filter((chunk) => chunk.similarity > MIN_RELEVANT_CHUNKS_SIMILARITY) // Filter out documents with low similarity scores\n .sort((a, b) => b.similarity - a.similarity) // Sort documents by highest similarity first\n .slice(0, MAX_RELEVANT_CHUNKS_NB); // Select the top 6 most similar documents\n\n // Return the content of the top matching documents\n return results;\n};\n\n// Define the structure of messages used in chat completions\nexport type ChatCompletionRequestMessage = {\n role: 'system' | 'user' | 'assistant'; // The role of the message sender\n content: string; // The text content of the message\n};\n\n/**\n * Reads the content of a file synchronously.\n *\n * @function\n * @param relativeFilePath - The relative or absolute path to the target file.\n * @returns The entire contents of the specified file as a UTF-8 encoded string.\n */\nconst getFileContent = (relativeFilePath: string): string => {\n const __dirname = dirname(fileURLToPath(import.meta.url));\n const absolutePath = join(__dirname, relativeFilePath);\n const fileContent = readFileSync(absolutePath, 'utf-8');\n return fileContent;\n};\n\nconst CHAT_GPT_PROMPT = getFileContent('./PROMPT.md');\n\n// Initial prompt configuration for the chatbot\nexport const initPrompt: ChatCompletionRequestMessage = {\n role: 'system',\n content: CHAT_GPT_PROMPT,\n};\n\nexport type AskDocQuestionResult = {\n response: string;\n relatedFiles: string[];\n};\n\nexport type AskDocQuestionOptions = {\n onMessage?: (chunk: string) => void;\n};\n\n/**\n * Handles the \"Ask a question\" endpoint in an Express.js route.\n * Processes user messages, retrieves relevant documents, and interacts with AI models to generate responses.\n *\n * @param messages - An array of chat messages from the user and assistant\n * @returns The assistant's response as a string\n */\nexport const askDocQuestion = async (\n messages: ChatCompletionRequestMessage[],\n options?: AskDocQuestionOptions\n): Promise<AskDocQuestionResult> => {\n // Format the user's question to keep only the relevant keywords\n const query = messages.map((message) => `- ${message.content}`).join('\\n');\n\n // 1) Find relevant documents based on the user's question\n const relevantFilesReferences = await searchChunkReference(query);\n\n // 2) Integrate the relevant documents into the initial system prompt\n const systemPrompt = initPrompt.content.replace(\n '{{relevantFilesReferences}}',\n relevantFilesReferences.length === 0\n ? 'Not relevant file found related to the question.'\n : relevantFilesReferences\n .map(\n (doc, idx) =>\n `[Chunk ${idx}] docKey = \"${doc.fileKey}\":\\n${doc.content}`\n )\n .join('\\n\\n') // Insert relevant docs into the prompt\n );\n\n // Format messages for AI SDK\n const aiMessages = [\n { role: 'system' as const, content: systemPrompt },\n ...messages,\n ];\n\n // Get AI configuration\n const aiConfig = await getAIConfig({\n provider: AIProvider.OPENAI,\n model: MODEL,\n temperature: MODEL_TEMPERATURE,\n apiKey: process.env.OPENAI_API_KEY!,\n });\n\n if (!aiConfig) {\n throw new Error('Failed to initialize AI configuration');\n }\n\n // 3) Use the AI SDK to stream the response\n let fullResponse = '';\n const stream = streamText({\n model: aiConfig.model,\n temperature: aiConfig.temperature,\n messages: aiMessages,\n });\n\n // Process the stream\n for await (const chunk of stream.textStream) {\n fullResponse += chunk;\n options?.onMessage?.(chunk);\n }\n\n // 4) Extract unique related files\n const relatedFiles = [\n ...new Set(relevantFilesReferences.map((doc) => doc.fileKey)),\n ];\n\n // 5) Return the assistant's response to the user\n return {\n response: fullResponse ?? 'Error: No result found',\n relatedFiles,\n };\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAyB;AACzB,oBAAwB;AACxB,kBAA6C;AAC7C,gBAA2B;AAC3B,oBAAmB;AACnB,gBAAiC;AACjC,oBAAuB;AACvB,kBAA8B;AAC9B,iBAA8B;AAC9B,mBAAwC;AACxC,wBAA2B;AAV3B;AA2BA,MAAM,cAA+B,CAAC;AAGtC,MAAM,QAAQ;AACd,MAAM,oBAAoB;AAC1B,MAAM,kBAAkB;AACxB,MAAM,iBAAiB;AACvB,MAAM,mBAAmB;AACzB,MAAM,gBAAgB;AACtB,MAAM,YAAY,mBAAmB;AACrC,MAAM,gBAAgB,iBAAiB;AACvC,MAAM,yBAAyB;AAC/B,MAAM,iCAAiC;AAOvC,MAAM,YAAY,CAAC,SAA2B;AAC5C,QAAM,SAAmB,CAAC;AAC1B,MAAI,QAAQ;AAEZ,SAAO,QAAQ,KAAK,QAAQ;AAC1B,QAAI,MAAM,KAAK,IAAI,QAAQ,WAAW,KAAK,MAAM;AAGjD,QAAI,MAAM,KAAK,QAAQ;AACrB,YAAM,YAAY,KAAK,YAAY,KAAK,GAAG;AAC3C,UAAI,YAAY,OAAO;AACrB,cAAM;AAAA,MACR;AAAA,IACF;AAEA,WAAO,KAAK,KAAK,UAAU,OAAO,GAAG,CAAC;AAGtC,UAAM,YAAY,MAAM;AACxB,QAAI,aAAa,OAAO;AAEtB,cAAQ;AAAA,IACV,OAAO;AACL,cAAQ;AAAA,IACV;AAAA,EACF;AAEA,SAAO;AACT;AASA,MAAM,oBAAoB,OAAO,SAAoC;AACnE,MAAI;AAEF,cAAM,0BAAY;AAAA,MAChB,UAAU,wBAAW;AAAA,MACrB,QAAQ,QAAQ,IAAI;AAAA,IACtB,CAAC;AAED,UAAM,eAAe,IAAI,qBAAO,EAAE,QAAQ,QAAQ,IAAI,eAAe,CAAC;AAEtE,UAAM,WAAW,MAAM,aAAa,WAAW,OAAO;AAAA,MACpD,OAAO;AAAA,MACP,OAAO;AAAA,IACT,CAAC;AAED,WAAO,SAAS,KAAK,CAAC,EAAE;AAAA,EAC1B,SAAS,OAAO;AACd,YAAQ,MAAM,+BAA+B,KAAK;AAClD,WAAO,CAAC;AAAA,EACV;AACF;AAWA,MAAM,mBAAmB,CAAC,MAAgB,SAA2B;AAEnE,QAAM,aAAa,KAAK,OAAO,CAAC,KAAK,GAAG,QAAQ,MAAM,IAAI,KAAK,GAAG,GAAG,CAAC;AAGtE,QAAM,aAAa,KAAK,KAAK,KAAK,OAAO,CAAC,KAAK,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC;AACpE,QAAM,aAAa,KAAK,KAAK,KAAK,OAAO,CAAC,KAAK,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC;AAGpE,SAAO,cAAc,aAAa;AACpC;AAMO,MAAM,qBAAqB,YAA2B;AAC3D,QAAM,MAAM,QAAQ,IAAI;AACxB,gBAAAA,QAAO,OAAO;AAAA,IACZ,MAAM,CAAC,QAAQ,GAAG,UAAU,QAAQ,GAAG,IAAI,cAAc,MAAM;AAAA,EACjE,CAAC;AAGD,QAAM,wBAAoB,iCAAoB;AAC9C,QAAM,WAAO,qBAAQ,sBAAQ,OAAO;AACpC,QAAM,YAAQ,sBAAS,sBAAQ,OAAO;AAEtC,MAAI,SAAmC,CAAC;AAExC,QAAM,QAAQ,EAAE,GAAG,MAAM,GAAG,OAAO,GAAG,kBAAkB;AAGxD,aAAW,WAAW,OAAO,KAAK,KAAK,GAAG;AAExC,UAAM,aAAa,UAAU,MAAM,OAA6B,CAAC;AAGjE,eAAW,cAAc,OAAO,KAAK,UAAU,GAAG;AAChD,YAAM,cAAc,OAAO,UAAU,IAAI;AACzC,YAAM,eAAe,WAAW;AAEhC,YAAM,YAAY,WAChB,UACF;AAEA,YAAM,mBAAmB,GAAG,OAAO,UAAU,WAAW;AAGxD,YAAM,eAAe,kBAAAC,QACnB,gBACF;AAEA,UAAI,YAAY;AAEhB,UAAI,CAAC,WAAW;AACd,oBAAY,MAAM,kBAAkB,SAAS;AAAA,MAC/C;AAGA,eAAS,EAAE,GAAG,QAAQ,CAAC,gBAAgB,GAAG,UAAU;AAGpD,kBAAY,KAAK;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AAED,cAAQ,KAAK,cAAc,gBAAgB,IAAI,YAAY,EAAE;AAAA,IAC/D;AAAA,EACF;AAEA,MAAI,QAAQ,IAAI,aAAa,eAAe;AAC1C,QAAI;AAEF,UAAI,KAAK,UAAU,MAAM,MAAM,KAAK,UAAU,kBAAAA,OAAc,GAAG;AAE7D,kBAAAC,QAAG;AAAA,UACD;AAAA,UACA,KAAK,UAAU,QAAQ,MAAM,CAAC;AAAA,QAChC;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,cAAQ,MAAM,KAAK;AAAA,IACrB;AAAA,EACF;AACF;AAGA,mBAAmB;AASZ,MAAM,uBAAuB,OAClC,UAC6B;AAE7B,QAAM,iBAAiB,MAAM,kBAAkB,KAAK;AAGpD,QAAM,UAAU,YACb,IAAI,CAAC,WAAW;AAAA,IACf,GAAG;AAAA,IACH,YAAY,iBAAiB,gBAAgB,MAAM,SAAS;AAAA;AAAA,EAC9D,EAAE,EACD,OAAO,CAAC,UAAU,MAAM,aAAa,8BAA8B,EACnE,KAAK,CAAC,GAAG,MAAM,EAAE,aAAa,EAAE,UAAU,EAC1C,MAAM,GAAG,sBAAsB;AAGlC,SAAO;AACT;AAeA,MAAM,iBAAiB,CAAC,qBAAqC;AAC3D,QAAM,gBAAY,yBAAQ,0BAAc,YAAY,GAAG,CAAC;AACxD,QAAM,mBAAe,kBAAK,WAAW,gBAAgB;AACrD,QAAM,kBAAc,wBAAa,cAAc,OAAO;AACtD,SAAO;AACT;AAEA,MAAM,kBAAkB,eAAe,aAAa;AAG7C,MAAM,aAA2C;AAAA,EACtD,MAAM;AAAA,EACN,SAAS;AACX;AAkBO,MAAM,iBAAiB,OAC5B,UACA,YACkC;AAElC,QAAM,QAAQ,SAAS,IAAI,CAAC,YAAY,KAAK,QAAQ,OAAO,EAAE,EAAE,KAAK,IAAI;AAGzE,QAAM,0BAA0B,MAAM,qBAAqB,KAAK;AAGhE,QAAM,eAAe,WAAW,QAAQ;AAAA,IACtC;AAAA,IACA,wBAAwB,WAAW,IAC/B,qDACA,wBACG;AAAA,MACC,CAAC,KAAK,QACJ,UAAU,GAAG,eAAe,IAAI,OAAO;AAAA,EAAO,IAAI,OAAO;AAAA,IAC7D,EACC,KAAK,MAAM;AAAA;AAAA,EACpB;AAGA,QAAM,aAAa;AAAA,IACjB,EAAE,MAAM,UAAmB,SAAS,aAAa;AAAA,IACjD,GAAG;AAAA,EACL;AAGA,QAAM,WAAW,UAAM,0BAAY;AAAA,IACjC,UAAU,wBAAW;AAAA,IACrB,OAAO;AAAA,IACP,aAAa;AAAA,IACb,QAAQ,QAAQ,IAAI;AAAA,EACtB,CAAC;AAED,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,uCAAuC;AAAA,EACzD;AAGA,MAAI,eAAe;AACnB,QAAM,aAAS,sBAAW;AAAA,IACxB,OAAO,SAAS;AAAA,IAChB,aAAa,SAAS;AAAA,IACtB,UAAU;AAAA,EACZ,CAAC;AAGD,mBAAiB,SAAS,OAAO,YAAY;AAC3C,oBAAgB;AAChB,aAAS,YAAY,KAAK;AAAA,EAC5B;AAGA,QAAM,eAAe;AAAA,IACnB,GAAG,IAAI,IAAI,wBAAwB,IAAI,CAAC,QAAQ,IAAI,OAAO,CAAC;AAAA,EAC9D;AAGA,SAAO;AAAA,IACL,UAAU,gBAAgB;AAAA,IAC1B;AAAA,EACF;AACF;","names":["dotenv","embeddingsList","fs"]}
|