@intlayer/backend 5.7.2 → 5.7.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/controllers/ai.controller.cjs.map +1 -1
- package/dist/cjs/controllers/dictionary.controller.cjs +39 -20
- package/dist/cjs/controllers/dictionary.controller.cjs.map +1 -1
- package/dist/cjs/controllers/newsletter.controller.cjs +12 -13
- package/dist/cjs/controllers/newsletter.controller.cjs.map +1 -1
- package/dist/cjs/controllers/organization.controller.cjs +72 -53
- package/dist/cjs/controllers/organization.controller.cjs.map +1 -1
- package/dist/cjs/controllers/project.controller.cjs +48 -12
- package/dist/cjs/controllers/project.controller.cjs.map +1 -1
- package/dist/cjs/controllers/projectAccessKey.controller.cjs +30 -6
- package/dist/cjs/controllers/projectAccessKey.controller.cjs.map +1 -1
- package/dist/cjs/controllers/search.controller.cjs.map +1 -1
- package/dist/cjs/controllers/stripe.controller.cjs +15 -3
- package/dist/cjs/controllers/stripe.controller.cjs.map +1 -1
- package/dist/cjs/controllers/tag.controller.cjs +46 -17
- package/dist/cjs/controllers/tag.controller.cjs.map +1 -1
- package/dist/cjs/controllers/user.controller.cjs +19 -8
- package/dist/cjs/controllers/user.controller.cjs.map +1 -1
- package/dist/cjs/emails/OAuthTokenCreatedEmail.cjs +5 -7
- package/dist/cjs/emails/OAuthTokenCreatedEmail.cjs.map +1 -1
- package/dist/cjs/middlewares/sessionAuth.middleware.cjs.map +1 -1
- package/dist/cjs/routes/ai.routes.cjs.map +1 -1
- package/dist/cjs/routes/dictionary.routes.cjs.map +1 -1
- package/dist/cjs/routes/eventListener.routes.cjs.map +1 -1
- package/dist/cjs/routes/newsletter.routes.cjs.map +1 -1
- package/dist/cjs/routes/organization.routes.cjs.map +1 -1
- package/dist/cjs/routes/project.routes.cjs +3 -12
- package/dist/cjs/routes/project.routes.cjs.map +1 -1
- package/dist/cjs/routes/search.routes.cjs.map +1 -1
- package/dist/cjs/routes/stripe.routes.cjs.map +1 -1
- package/dist/cjs/routes/tags.routes.cjs.map +1 -1
- package/dist/cjs/routes/user.routes.cjs.map +1 -1
- package/dist/cjs/schemas/dictionary.schema.cjs +1 -1
- package/dist/cjs/schemas/dictionary.schema.cjs.map +1 -1
- package/dist/cjs/schemas/discussion.schema.cjs +1 -1
- package/dist/cjs/schemas/discussion.schema.cjs.map +1 -1
- package/dist/cjs/schemas/oAuth2.schema.cjs +1 -1
- package/dist/cjs/schemas/oAuth2.schema.cjs.map +1 -1
- package/dist/cjs/schemas/organization.schema.cjs +1 -1
- package/dist/cjs/schemas/organization.schema.cjs.map +1 -1
- package/dist/cjs/schemas/plans.schema.cjs +1 -1
- package/dist/cjs/schemas/plans.schema.cjs.map +1 -1
- package/dist/cjs/schemas/project.schema.cjs +1 -1
- package/dist/cjs/schemas/project.schema.cjs.map +1 -1
- package/dist/cjs/schemas/session.schema.cjs +1 -1
- package/dist/cjs/schemas/session.schema.cjs.map +1 -1
- package/dist/cjs/schemas/tag.schema.cjs +6 -1
- package/dist/cjs/schemas/tag.schema.cjs.map +1 -1
- package/dist/cjs/schemas/user.schema.cjs +1 -1
- package/dist/cjs/schemas/user.schema.cjs.map +1 -1
- package/dist/cjs/services/dictionary.service.cjs +0 -8
- package/dist/cjs/services/dictionary.service.cjs.map +1 -1
- package/dist/cjs/services/organization.service.cjs +0 -9
- package/dist/cjs/services/organization.service.cjs.map +1 -1
- package/dist/cjs/types/organization.types.cjs.map +1 -1
- package/dist/cjs/types/project.types.cjs.map +1 -1
- package/dist/cjs/types/tag.types.cjs.map +1 -1
- package/dist/cjs/utils/AI/askDocQuestion/askDocQuestion.cjs +3 -0
- package/dist/cjs/utils/AI/askDocQuestion/askDocQuestion.cjs.map +1 -1
- package/dist/cjs/utils/auth/getAuth.cjs +9 -5
- package/dist/cjs/utils/auth/getAuth.cjs.map +1 -1
- package/dist/cjs/utils/errors/errorCodes.cjs +13 -0
- package/dist/cjs/utils/errors/errorCodes.cjs.map +1 -1
- package/dist/cjs/utils/mapper/project.cjs +1 -2
- package/dist/cjs/utils/mapper/project.cjs.map +1 -1
- package/dist/cjs/utils/mapper/session.cjs +37 -0
- package/dist/cjs/utils/mapper/session.cjs.map +1 -0
- package/dist/cjs/utils/permissions.cjs +133 -38
- package/dist/cjs/utils/permissions.cjs.map +1 -1
- package/dist/cjs/utils/validation/validateArray.cjs +5 -1
- package/dist/cjs/utils/validation/validateArray.cjs.map +1 -1
- package/dist/esm/controllers/ai.controller.mjs.map +1 -1
- package/dist/esm/controllers/dictionary.controller.mjs +39 -20
- package/dist/esm/controllers/dictionary.controller.mjs.map +1 -1
- package/dist/esm/controllers/newsletter.controller.mjs +12 -13
- package/dist/esm/controllers/newsletter.controller.mjs.map +1 -1
- package/dist/esm/controllers/organization.controller.mjs +72 -53
- package/dist/esm/controllers/organization.controller.mjs.map +1 -1
- package/dist/esm/controllers/project.controller.mjs +48 -12
- package/dist/esm/controllers/project.controller.mjs.map +1 -1
- package/dist/esm/controllers/projectAccessKey.controller.mjs +31 -7
- package/dist/esm/controllers/projectAccessKey.controller.mjs.map +1 -1
- package/dist/esm/controllers/search.controller.mjs.map +1 -1
- package/dist/esm/controllers/stripe.controller.mjs +15 -3
- package/dist/esm/controllers/stripe.controller.mjs.map +1 -1
- package/dist/esm/controllers/tag.controller.mjs +46 -17
- package/dist/esm/controllers/tag.controller.mjs.map +1 -1
- package/dist/esm/controllers/user.controller.mjs +19 -8
- package/dist/esm/controllers/user.controller.mjs.map +1 -1
- package/dist/esm/emails/OAuthTokenCreatedEmail.mjs +5 -7
- package/dist/esm/emails/OAuthTokenCreatedEmail.mjs.map +1 -1
- package/dist/esm/middlewares/sessionAuth.middleware.mjs.map +1 -1
- package/dist/esm/routes/ai.routes.mjs.map +1 -1
- package/dist/esm/routes/dictionary.routes.mjs.map +1 -1
- package/dist/esm/routes/eventListener.routes.mjs.map +1 -1
- package/dist/esm/routes/newsletter.routes.mjs.map +1 -1
- package/dist/esm/routes/organization.routes.mjs.map +1 -1
- package/dist/esm/routes/project.routes.mjs +3 -12
- package/dist/esm/routes/project.routes.mjs.map +1 -1
- package/dist/esm/routes/search.routes.mjs.map +1 -1
- package/dist/esm/routes/stripe.routes.mjs.map +1 -1
- package/dist/esm/routes/tags.routes.mjs.map +1 -1
- package/dist/esm/routes/user.routes.mjs.map +1 -1
- package/dist/esm/schemas/dictionary.schema.mjs +1 -1
- package/dist/esm/schemas/dictionary.schema.mjs.map +1 -1
- package/dist/esm/schemas/discussion.schema.mjs +1 -1
- package/dist/esm/schemas/discussion.schema.mjs.map +1 -1
- package/dist/esm/schemas/oAuth2.schema.mjs +1 -1
- package/dist/esm/schemas/oAuth2.schema.mjs.map +1 -1
- package/dist/esm/schemas/organization.schema.mjs +1 -1
- package/dist/esm/schemas/organization.schema.mjs.map +1 -1
- package/dist/esm/schemas/plans.schema.mjs +1 -1
- package/dist/esm/schemas/plans.schema.mjs.map +1 -1
- package/dist/esm/schemas/project.schema.mjs +1 -1
- package/dist/esm/schemas/project.schema.mjs.map +1 -1
- package/dist/esm/schemas/session.schema.mjs +1 -1
- package/dist/esm/schemas/session.schema.mjs.map +1 -1
- package/dist/esm/schemas/tag.schema.mjs +6 -1
- package/dist/esm/schemas/tag.schema.mjs.map +1 -1
- package/dist/esm/schemas/user.schema.mjs +1 -1
- package/dist/esm/schemas/user.schema.mjs.map +1 -1
- package/dist/esm/services/dictionary.service.mjs +0 -7
- package/dist/esm/services/dictionary.service.mjs.map +1 -1
- package/dist/esm/services/organization.service.mjs +0 -9
- package/dist/esm/services/organization.service.mjs.map +1 -1
- package/dist/esm/utils/AI/askDocQuestion/askDocQuestion.mjs +3 -0
- package/dist/esm/utils/AI/askDocQuestion/askDocQuestion.mjs.map +1 -1
- package/dist/esm/utils/auth/getAuth.mjs +9 -5
- package/dist/esm/utils/auth/getAuth.mjs.map +1 -1
- package/dist/esm/utils/errors/errorCodes.mjs +13 -0
- package/dist/esm/utils/errors/errorCodes.mjs.map +1 -1
- package/dist/esm/utils/mapper/project.mjs +1 -2
- package/dist/esm/utils/mapper/project.mjs.map +1 -1
- package/dist/esm/utils/mapper/session.mjs +13 -0
- package/dist/esm/utils/mapper/session.mjs.map +1 -0
- package/dist/esm/utils/permissions.mjs +133 -38
- package/dist/esm/utils/permissions.mjs.map +1 -1
- package/dist/esm/utils/validation/validateArray.mjs +5 -1
- package/dist/esm/utils/validation/validateArray.mjs.map +1 -1
- package/dist/types/controllers/ai.controller.d.ts +10 -9
- package/dist/types/controllers/ai.controller.d.ts.map +1 -1
- package/dist/types/controllers/dictionary.controller.d.ts +9 -8
- package/dist/types/controllers/dictionary.controller.d.ts.map +1 -1
- package/dist/types/controllers/newsletter.controller.d.ts +5 -4
- package/dist/types/controllers/newsletter.controller.d.ts.map +1 -1
- package/dist/types/controllers/organization.controller.d.ts +14 -15
- package/dist/types/controllers/organization.controller.d.ts.map +1 -1
- package/dist/types/controllers/project.controller.d.ts +10 -9
- package/dist/types/controllers/project.controller.d.ts.map +1 -1
- package/dist/types/controllers/projectAccessKey.controller.d.ts +5 -4
- package/dist/types/controllers/projectAccessKey.controller.d.ts.map +1 -1
- package/dist/types/controllers/search.controller.d.ts.map +1 -1
- package/dist/types/controllers/stripe.controller.d.ts +9 -5
- package/dist/types/controllers/stripe.controller.d.ts.map +1 -1
- package/dist/types/controllers/tag.controller.d.ts +6 -5
- package/dist/types/controllers/tag.controller.d.ts.map +1 -1
- package/dist/types/controllers/user.controller.d.ts +9 -8
- package/dist/types/controllers/user.controller.d.ts.map +1 -1
- package/dist/types/emails/OAuthTokenCreatedEmail.d.ts.map +1 -1
- package/dist/types/middlewares/sessionAuth.middleware.d.ts +2 -0
- package/dist/types/middlewares/sessionAuth.middleware.d.ts.map +1 -1
- package/dist/types/schemas/tag.schema.d.ts.map +1 -1
- package/dist/types/services/dictionary.service.d.ts +0 -1
- package/dist/types/services/dictionary.service.d.ts.map +1 -1
- package/dist/types/services/organization.service.d.ts.map +1 -1
- package/dist/types/types/organization.types.d.ts +1 -3
- package/dist/types/types/organization.types.d.ts.map +1 -1
- package/dist/types/types/project.types.d.ts +1 -3
- package/dist/types/types/project.types.d.ts.map +1 -1
- package/dist/types/types/tag.types.d.ts +2 -0
- package/dist/types/types/tag.types.d.ts.map +1 -1
- package/dist/types/utils/AI/askDocQuestion/askDocQuestion.d.ts.map +1 -1
- package/dist/types/utils/auth/getAuth.d.ts +2 -2
- package/dist/types/utils/auth/getAuth.d.ts.map +1 -1
- package/dist/types/utils/errors/errorCodes.d.ts +13 -0
- package/dist/types/utils/errors/errorCodes.d.ts.map +1 -1
- package/dist/types/utils/mapper/project.d.ts.map +1 -1
- package/dist/types/utils/mapper/session.d.ts +4 -0
- package/dist/types/utils/mapper/session.d.ts.map +1 -0
- package/dist/types/utils/permissions.d.ts +83 -38
- package/dist/types/utils/permissions.d.ts.map +1 -1
- package/dist/types/utils/validation/validateArray.d.ts.map +1 -1
- package/package.json +10 -10
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/services/dictionary.service.ts"],"sourcesContent":["import type {\n Dictionary,\n DictionaryData,\n DictionaryDocument,\n} from '@/types/dictionary.types';\nimport type { Project } from '@/types/project.types';\nimport { DictionaryModel } from '@models/dictionary.model';\nimport { ensureMongoDocumentToObject } from '@utils/ensureMongoDocumentToObject';\nimport { GenericError } from '@utils/errors';\nimport type { DictionaryFilters } from '@utils/filtersAndPagination/getDictionaryFiltersAndPagination';\nimport { removeObjectKeys } from '@utils/removeObjectKeys';\nimport {\n validateDictionary,\n type DictionaryFields,\n} from '@utils/validation/validateDictionary';\nimport { Types } from 'mongoose';\n\n/**\n * Finds dictionaries based on filters and pagination options.\n * @param filters - MongoDB filter query.\n * @param skip - Number of documents to skip.\n * @param limit - Number of documents to limit.\n * @returns List of dictionaries matching the filters.\n */\nexport const findDictionaries = async (\n filters: DictionaryFilters,\n skip = 0,\n limit = 100\n): Promise<DictionaryDocument[]> => {\n try {\n const dictionaries = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the filters\n { $match: filters },\n\n // Stage 2: Skip for pagination\n { $skip: skip },\n\n // Stage 3: Limit the number of documents\n { $limit: limit },\n ]);\n\n const formattedResults = dictionaries.map(\n (result) => new DictionaryModel(result)\n );\n\n return formattedResults;\n } catch (error) {\n console.error('Error fetching dictionaries:', error);\n throw error;\n }\n};\n\n/**\n * Finds a dictionary by its ID.\n * @param dictionaryId - The ID of the dictionary to find.\n * @returns The dictionary matching the ID.\n */\n/**\n * Finds a dictionary by its ID and includes the 'availableVersions' field.\n * @param dictionaryId - The ID of the dictionary to find.\n * @returns The dictionary matching the ID with available versions.\n */\nexport const getDictionaryById = async (\n dictionaryId: string | Types.ObjectId\n): Promise<DictionaryDocument> => {\n const id = Types.ObjectId.isValid(dictionaryId as string)\n ? new Types.ObjectId(dictionaryId as string)\n : dictionaryId;\n\n const dictionaries = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the document by ID\n { $match: { _id: id } },\n\n // Stage 2: Add the 'availableVersions' field\n {\n $addFields: {\n availableVersions: {\n $map: {\n input: { $objectToArray: '$content' },\n as: 'version',\n in: '$$version.k',\n },\n },\n },\n },\n ]);\n\n if (!dictionaries.length) {\n throw new GenericError('DICTIONARY_NOT_FOUND', { dictionaryId });\n }\n\n return new DictionaryModel(dictionaries[0]);\n};\n\n/**\n * Finds a dictionary by its ID.\n * @param dictionaryKey - The ID of the dictionary to find.\n * @returns The dictionary matching the ID.\n */\nexport const getDictionaryByKey = async (\n dictionaryKey: string,\n projectId: string | Types.ObjectId\n): Promise<DictionaryDocument> => {\n const dictionaries = await getDictionariesByKeys([dictionaryKey], projectId);\n\n return dictionaries[0];\n};\n\nexport const getDictionariesByKeys = async (\n dictionaryKeys: string[],\n projectId: string | Types.ObjectId\n): Promise<DictionaryDocument[]> => {\n const dictionaries = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the document by key\n { $match: { key: { $in: dictionaryKeys }, projectIds: projectId } },\n\n // Stage 2: Add the 'availableVersions' field\n {\n $addFields: {\n availableVersions: {\n $map: {\n input: { $objectToArray: '$content' },\n as: 'version',\n in: '$$version.k',\n },\n },\n },\n },\n ]);\n\n if (!dictionaries) {\n throw new GenericError('DICTIONARY_NOT_FOUND', {\n dictionaryKeys,\n projectId,\n });\n }\n\n const formattedResults = dictionaries.map(\n (result) => new DictionaryModel(result)\n );\n\n return formattedResults;\n};\n\nexport const getDictionariesKeys = async (\n projectId: string | Types.ObjectId\n): Promise<string[]> => {\n const dictionaries = await DictionaryModel.find({\n projectIds: projectId,\n }).select('key');\n\n return dictionaries.map((dictionary) => dictionary.key);\n};\n\nexport const getDictionariesByTags = async (\n tags: string[],\n projectId: string | Project['id']\n): Promise<DictionaryDocument[]> => {\n const dictionaries = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the document by tags\n {\n $match: {\n tags: { $in: tags },\n projectIds: projectId,\n },\n },\n\n // Stage 2: Add the 'availableVersions' field\n {\n $addFields: {\n availableVersions: {\n $map: {\n input: { $objectToArray: '$content' },\n as: 'version',\n in: '$$version.k',\n },\n },\n },\n },\n ]);\n\n const formattedResults = dictionaries.map(\n (result) => new DictionaryModel(result)\n );\n\n return formattedResults;\n};\n\n/**\n * Counts the total number of dictionaries that match the filters.\n * @param filters - MongoDB filter query.\n * @returns Total number of dictionaries.\n */\nexport const countDictionaries = async (\n filters: DictionaryFilters\n): Promise<number> => {\n const result = await DictionaryModel.countDocuments(filters);\n\n if (typeof result === 'undefined') {\n throw new GenericError('DICTIONARY_COUNT_FAILED', { filters });\n }\n\n return result;\n};\n\n/**\n * Creates a new dictionary in the database.\n * @param dictionary - The dictionary data to create.\n * @returns The created dictionary.\n */\nexport const createDictionary = async (\n dictionary: DictionaryData\n): Promise<DictionaryDocument> => {\n const errors = await validateDictionary(dictionary);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('DICTIONARY_INVALID_FIELDS', {\n errors,\n });\n }\n\n return await DictionaryModel.create(dictionary);\n};\n\ntype GetExistingDictionaryResult = {\n existingDictionariesKey: string[];\n newDictionariesKey: string[];\n};\n\n/**\n * Gets the existing dictionaries from the provided list of keys.\n * @param dictionariesKeys - List of dictionary keys to check.\n * @param projectId - The ID of the project to check the dictionaries against.\n * @returns The existing dictionaries and the new dictionaries.\n */\nexport const getExistingDictionaryKey = async (\n dictionariesKeys: string[],\n projectId: string | Types.ObjectId\n): Promise<GetExistingDictionaryResult> => {\n // Fetch dictionaries from the database where the key is in the provided list\n const existingDictionaries = await DictionaryModel.find({\n key: { $in: dictionariesKeys },\n projectIds: projectId,\n });\n\n // Map existing dictionaries to a LocalDictionary object\n const existingDictionariesKey: string[] = [];\n const newDictionariesKey: string[] = [];\n\n for (const key of dictionariesKeys) {\n const isDictionaryExist = existingDictionaries.some(\n (dictionary) => dictionary.key === key\n );\n\n if (isDictionaryExist) {\n existingDictionariesKey.push(key);\n } else {\n newDictionariesKey.push(key);\n }\n }\n\n return { existingDictionariesKey, newDictionariesKey };\n};\n\n/**\n * Updates an existing dictionary in the database by its ID.\n * @param dictionaryId - The ID of the dictionary to update.\n * @param dictionary - The updated dictionary data.\n * @returns The updated dictionary.\n */\nexport const updateDictionaryById = async (\n dictionaryId: string | Types.ObjectId,\n dictionary: Partial<Dictionary>\n): Promise<DictionaryDocument> => {\n const dictionaryObject = ensureMongoDocumentToObject(dictionary);\n const dictionaryToUpdate = removeObjectKeys(dictionaryObject, [\n 'id',\n ]) as unknown as Partial<Dictionary>;\n\n const updatedKeys = Object.keys(dictionaryToUpdate) as DictionaryFields;\n const errors = await validateDictionary(dictionaryToUpdate, updatedKeys);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('DICTIONARY_INVALID_FIELDS', {\n dictionaryId,\n errors,\n });\n }\n\n const result = await DictionaryModel.updateOne(\n { _id: dictionaryId },\n dictionaryToUpdate\n );\n\n if (result.matchedCount === 0) {\n throw new GenericError('DICTIONARY_UPDATE_FAILED', { dictionaryId });\n }\n\n const updatedDictionary = await getDictionaryById(dictionaryId);\n\n return updatedDictionary;\n};\n\n/**\n * Updates an existing dictionary in the database by its key.\n * @param dictionaryKey - The ID of the dictionary to update.\n * @param dictionary - The updated dictionary data.\n * @returns The updated dictionary.\n */\nexport const updateDictionaryByKey = async (\n dictionaryKey: string,\n dictionary: Partial<Dictionary>,\n projectId: string | Types.ObjectId\n): Promise<DictionaryDocument> => {\n const dictionaryObject = ensureMongoDocumentToObject(dictionary);\n const dictionaryToUpdate = removeObjectKeys(dictionaryObject, [\n 'id',\n ]) as unknown as Partial<Dictionary>;\n\n const updatedKeys = Object.keys(dictionaryToUpdate) as DictionaryFields;\n const errors = await validateDictionary(dictionaryToUpdate, updatedKeys);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('DICTIONARY_INVALID_FIELDS', {\n dictionaryKey,\n projectId,\n errors,\n });\n }\n\n const result = await DictionaryModel.updateOne(\n { key: dictionaryKey, projectIds: projectId },\n dictionaryToUpdate\n );\n\n if (result.matchedCount === 0) {\n throw new GenericError('DICTIONARY_UPDATE_FAILED', { dictionaryKey });\n }\n\n const updatedDictionary = await getDictionaryByKey(dictionaryKey, projectId);\n\n return updatedDictionary;\n};\n\n/**\n * Deletes a dictionary from the database by its ID.\n * @param dictionaryId - The ID of the dictionary to delete.\n * @returns The result of the deletion operation.\n */\nexport const deleteDictionaryById = async (\n dictionaryId: string\n): Promise<DictionaryDocument> => {\n const dictionary = await DictionaryModel.findByIdAndDelete(dictionaryId);\n\n if (!dictionary) {\n throw new GenericError('DICTIONARY_NOT_FOUND', { dictionaryId });\n }\n\n return dictionary;\n};\n\n// Function to extract the numeric part of the version\nconst getVersionNumber = (version: string): number => {\n const match = version.match(/^v(\\d+)$/);\n if (!match) {\n throw new Error(`Invalid version format: ${version}`);\n }\n return parseInt(match[1], 10);\n};\n\nexport const incrementVersion = (dictionary: Dictionary): string => {\n const VERSION_PREFIX = 'v';\n\n const availableVersions = [...(dictionary.content.keys() ?? [])];\n const lastVersion = availableVersions[availableVersions.length - 1];\n\n // Start with the next version number\n let newNumber = getVersionNumber(lastVersion) + 1;\n let newVersion = `${VERSION_PREFIX}${newNumber}`;\n\n // Loop until a unique version is found\n while (availableVersions.includes(newVersion)) {\n newNumber += 1;\n newVersion = `${VERSION_PREFIX}${newNumber}`;\n }\n\n return newVersion;\n};\n"],"mappings":"AAMA,SAAS,uBAAuB;AAChC,SAAS,mCAAmC;AAC5C,SAAS,oBAAoB;AAE7B,SAAS,wBAAwB;AACjC;AAAA,EACE;AAAA,OAEK;AACP,SAAS,aAAa;AASf,MAAM,mBAAmB,OAC9B,SACA,OAAO,GACP,QAAQ,QAC0B;AAClC,MAAI;AACF,UAAM,eAAe,MAAM,gBAAgB,UAA8B;AAAA;AAAA,MAEvE,EAAE,QAAQ,QAAQ;AAAA;AAAA,MAGlB,EAAE,OAAO,KAAK;AAAA;AAAA,MAGd,EAAE,QAAQ,MAAM;AAAA,IAClB,CAAC;AAED,UAAM,mBAAmB,aAAa;AAAA,MACpC,CAAC,WAAW,IAAI,gBAAgB,MAAM;AAAA,IACxC;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,YAAQ,MAAM,gCAAgC,KAAK;AACnD,UAAM;AAAA,EACR;AACF;AAYO,MAAM,oBAAoB,OAC/B,iBACgC;AAChC,QAAM,KAAK,MAAM,SAAS,QAAQ,YAAsB,IACpD,IAAI,MAAM,SAAS,YAAsB,IACzC;AAEJ,QAAM,eAAe,MAAM,gBAAgB,UAA8B;AAAA;AAAA,IAEvE,EAAE,QAAQ,EAAE,KAAK,GAAG,EAAE;AAAA;AAAA,IAGtB;AAAA,MACE,YAAY;AAAA,QACV,mBAAmB;AAAA,UACjB,MAAM;AAAA,YACJ,OAAO,EAAE,gBAAgB,WAAW;AAAA,YACpC,IAAI;AAAA,YACJ,IAAI;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AAED,MAAI,CAAC,aAAa,QAAQ;AACxB,UAAM,IAAI,aAAa,wBAAwB,EAAE,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO,IAAI,gBAAgB,aAAa,CAAC,CAAC;AAC5C;AAOO,MAAM,qBAAqB,OAChC,eACA,cACgC;AAChC,QAAM,eAAe,MAAM,sBAAsB,CAAC,aAAa,GAAG,SAAS;AAE3E,SAAO,aAAa,CAAC;AACvB;AAEO,MAAM,wBAAwB,OACnC,gBACA,cACkC;AAClC,QAAM,eAAe,MAAM,gBAAgB,UAA8B;AAAA;AAAA,IAEvE,EAAE,QAAQ,EAAE,KAAK,EAAE,KAAK,eAAe,GAAG,YAAY,UAAU,EAAE;AAAA;AAAA,IAGlE;AAAA,MACE,YAAY;AAAA,QACV,mBAAmB;AAAA,UACjB,MAAM;AAAA,YACJ,OAAO,EAAE,gBAAgB,WAAW;AAAA,YACpC,IAAI;AAAA,YACJ,IAAI;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AAED,MAAI,CAAC,cAAc;AACjB,UAAM,IAAI,aAAa,wBAAwB;AAAA,MAC7C;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,mBAAmB,aAAa;AAAA,IACpC,CAAC,WAAW,IAAI,gBAAgB,MAAM;AAAA,EACxC;AAEA,SAAO;AACT;AAEO,MAAM,sBAAsB,OACjC,cACsB;AACtB,QAAM,eAAe,MAAM,gBAAgB,KAAK;AAAA,IAC9C,YAAY;AAAA,EACd,CAAC,EAAE,OAAO,KAAK;AAEf,SAAO,aAAa,IAAI,CAAC,eAAe,WAAW,GAAG;AACxD;AAEO,MAAM,wBAAwB,OACnC,MACA,cACkC;AAClC,QAAM,eAAe,MAAM,gBAAgB,UAA8B;AAAA;AAAA,IAEvE;AAAA,MACE,QAAQ;AAAA,QACN,MAAM,EAAE,KAAK,KAAK;AAAA,QAClB,YAAY;AAAA,MACd;AAAA,IACF;AAAA;AAAA,IAGA;AAAA,MACE,YAAY;AAAA,QACV,mBAAmB;AAAA,UACjB,MAAM;AAAA,YACJ,OAAO,EAAE,gBAAgB,WAAW;AAAA,YACpC,IAAI;AAAA,YACJ,IAAI;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AAED,QAAM,mBAAmB,aAAa;AAAA,IACpC,CAAC,WAAW,IAAI,gBAAgB,MAAM;AAAA,EACxC;AAEA,SAAO;AACT;AAOO,MAAM,oBAAoB,OAC/B,YACoB;AACpB,QAAM,SAAS,MAAM,gBAAgB,eAAe,OAAO;AAE3D,MAAI,OAAO,WAAW,aAAa;AACjC,UAAM,IAAI,aAAa,2BAA2B,EAAE,QAAQ,CAAC;AAAA,EAC/D;AAEA,SAAO;AACT;AAOO,MAAM,mBAAmB,OAC9B,eACgC;AAChC,QAAM,SAAS,MAAM,mBAAmB,UAAU;AAElD,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,6BAA6B;AAAA,MAClD;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO,MAAM,gBAAgB,OAAO,UAAU;AAChD;AAaO,MAAM,2BAA2B,OACtC,kBACA,cACyC;AAEzC,QAAM,uBAAuB,MAAM,gBAAgB,KAAK;AAAA,IACtD,KAAK,EAAE,KAAK,iBAAiB;AAAA,IAC7B,YAAY;AAAA,EACd,CAAC;AAGD,QAAM,0BAAoC,CAAC;AAC3C,QAAM,qBAA+B,CAAC;AAEtC,aAAW,OAAO,kBAAkB;AAClC,UAAM,oBAAoB,qBAAqB;AAAA,MAC7C,CAAC,eAAe,WAAW,QAAQ;AAAA,IACrC;AAEA,QAAI,mBAAmB;AACrB,8BAAwB,KAAK,GAAG;AAAA,IAClC,OAAO;AACL,yBAAmB,KAAK,GAAG;AAAA,IAC7B;AAAA,EACF;AAEA,SAAO,EAAE,yBAAyB,mBAAmB;AACvD;AAQO,MAAM,uBAAuB,OAClC,cACA,eACgC;AAChC,QAAM,mBAAmB,4BAA4B,UAAU;AAC/D,QAAM,qBAAqB,iBAAiB,kBAAkB;AAAA,IAC5D;AAAA,EACF,CAAC;AAED,QAAM,cAAc,OAAO,KAAK,kBAAkB;AAClD,QAAM,SAAS,MAAM,mBAAmB,oBAAoB,WAAW;AAEvE,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,6BAA6B;AAAA,MAClD;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAS,MAAM,gBAAgB;AAAA,IACnC,EAAE,KAAK,aAAa;AAAA,IACpB;AAAA,EACF;AAEA,MAAI,OAAO,iBAAiB,GAAG;AAC7B,UAAM,IAAI,aAAa,4BAA4B,EAAE,aAAa,CAAC;AAAA,EACrE;AAEA,QAAM,oBAAoB,MAAM,kBAAkB,YAAY;AAE9D,SAAO;AACT;AAQO,MAAM,wBAAwB,OACnC,eACA,YACA,cACgC;AAChC,QAAM,mBAAmB,4BAA4B,UAAU;AAC/D,QAAM,qBAAqB,iBAAiB,kBAAkB;AAAA,IAC5D;AAAA,EACF,CAAC;AAED,QAAM,cAAc,OAAO,KAAK,kBAAkB;AAClD,QAAM,SAAS,MAAM,mBAAmB,oBAAoB,WAAW;AAEvE,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,6BAA6B;AAAA,MAClD;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAS,MAAM,gBAAgB;AAAA,IACnC,EAAE,KAAK,eAAe,YAAY,UAAU;AAAA,IAC5C;AAAA,EACF;AAEA,MAAI,OAAO,iBAAiB,GAAG;AAC7B,UAAM,IAAI,aAAa,4BAA4B,EAAE,cAAc,CAAC;AAAA,EACtE;AAEA,QAAM,oBAAoB,MAAM,mBAAmB,eAAe,SAAS;AAE3E,SAAO;AACT;AAOO,MAAM,uBAAuB,OAClC,iBACgC;AAChC,QAAM,aAAa,MAAM,gBAAgB,kBAAkB,YAAY;AAEvE,MAAI,CAAC,YAAY;AACf,UAAM,IAAI,aAAa,wBAAwB,EAAE,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAGA,MAAM,mBAAmB,CAAC,YAA4B;AACpD,QAAM,QAAQ,QAAQ,MAAM,UAAU;AACtC,MAAI,CAAC,OAAO;AACV,UAAM,IAAI,MAAM,2BAA2B,OAAO,EAAE;AAAA,EACtD;AACA,SAAO,SAAS,MAAM,CAAC,GAAG,EAAE;AAC9B;AAEO,MAAM,mBAAmB,CAAC,eAAmC;AAClE,QAAM,iBAAiB;AAEvB,QAAM,oBAAoB,CAAC,GAAI,WAAW,QAAQ,KAAK,KAAK,CAAC,CAAE;AAC/D,QAAM,cAAc,kBAAkB,kBAAkB,SAAS,CAAC;AAGlE,MAAI,YAAY,iBAAiB,WAAW,IAAI;AAChD,MAAI,aAAa,GAAG,cAAc,GAAG,SAAS;AAG9C,SAAO,kBAAkB,SAAS,UAAU,GAAG;AAC7C,iBAAa;AACb,iBAAa,GAAG,cAAc,GAAG,SAAS;AAAA,EAC5C;AAEA,SAAO;AACT;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../../../src/services/dictionary.service.ts"],"sourcesContent":["import type {\n Dictionary,\n DictionaryData,\n DictionaryDocument,\n} from '@/types/dictionary.types';\nimport type { Project } from '@/types/project.types';\nimport { DictionaryModel } from '@models/dictionary.model';\nimport { ensureMongoDocumentToObject } from '@utils/ensureMongoDocumentToObject';\nimport { GenericError } from '@utils/errors';\nimport type { DictionaryFilters } from '@utils/filtersAndPagination/getDictionaryFiltersAndPagination';\nimport { removeObjectKeys } from '@utils/removeObjectKeys';\nimport {\n validateDictionary,\n type DictionaryFields,\n} from '@utils/validation/validateDictionary';\nimport { Types } from 'mongoose';\n\n/**\n * Finds dictionaries based on filters and pagination options.\n * @param filters - MongoDB filter query.\n * @param skip - Number of documents to skip.\n * @param limit - Number of documents to limit.\n * @returns List of dictionaries matching the filters.\n */\nexport const findDictionaries = async (\n filters: DictionaryFilters,\n skip = 0,\n limit = 100\n): Promise<DictionaryDocument[]> => {\n try {\n const dictionaries = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the filters\n { $match: filters },\n\n // Stage 2: Skip for pagination\n { $skip: skip },\n\n // Stage 3: Limit the number of documents\n { $limit: limit },\n ]);\n\n const formattedResults = dictionaries.map(\n (result) => new DictionaryModel(result)\n );\n\n return formattedResults;\n } catch (error) {\n console.error('Error fetching dictionaries:', error);\n throw error;\n }\n};\n\n/**\n * Finds a dictionary by its ID.\n * @param dictionaryId - The ID of the dictionary to find.\n * @returns The dictionary matching the ID.\n */\n/**\n * Finds a dictionary by its ID and includes the 'availableVersions' field.\n * @param dictionaryId - The ID of the dictionary to find.\n * @returns The dictionary matching the ID with available versions.\n */\nexport const getDictionaryById = async (\n dictionaryId: string | Types.ObjectId\n): Promise<DictionaryDocument> => {\n const id = Types.ObjectId.isValid(dictionaryId as string)\n ? new Types.ObjectId(dictionaryId as string)\n : dictionaryId;\n\n const dictionaries = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the document by ID\n { $match: { _id: id } },\n\n // Stage 2: Add the 'availableVersions' field\n {\n $addFields: {\n availableVersions: {\n $map: {\n input: { $objectToArray: '$content' },\n as: 'version',\n in: '$$version.k',\n },\n },\n },\n },\n ]);\n\n if (!dictionaries.length) {\n throw new GenericError('DICTIONARY_NOT_FOUND', { dictionaryId });\n }\n\n return new DictionaryModel(dictionaries[0]);\n};\n\n/**\n * Finds a dictionary by its ID.\n * @param dictionaryKey - The ID of the dictionary to find.\n * @returns The dictionary matching the ID.\n */\nexport const getDictionaryByKey = async (\n dictionaryKey: string,\n projectId: string | Types.ObjectId\n): Promise<DictionaryDocument> => {\n const dictionaries = await getDictionariesByKeys([dictionaryKey], projectId);\n\n return dictionaries[0];\n};\n\nexport const getDictionariesByKeys = async (\n dictionaryKeys: string[],\n projectId: string | Types.ObjectId\n): Promise<DictionaryDocument[]> => {\n const dictionaries = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the document by key\n { $match: { key: { $in: dictionaryKeys }, projectIds: projectId } },\n\n // Stage 2: Add the 'availableVersions' field\n {\n $addFields: {\n availableVersions: {\n $map: {\n input: { $objectToArray: '$content' },\n as: 'version',\n in: '$$version.k',\n },\n },\n },\n },\n ]);\n\n if (!dictionaries) {\n throw new GenericError('DICTIONARY_NOT_FOUND', {\n dictionaryKeys,\n projectId,\n });\n }\n\n const formattedResults = dictionaries.map(\n (result) => new DictionaryModel(result)\n );\n\n return formattedResults;\n};\n\nexport const getDictionariesByTags = async (\n tags: string[],\n projectId: string | Project['id']\n): Promise<DictionaryDocument[]> => {\n const dictionaries = await DictionaryModel.aggregate<DictionaryDocument>([\n // Stage 1: Match the document by tags\n {\n $match: {\n tags: { $in: tags },\n projectIds: projectId,\n },\n },\n\n // Stage 2: Add the 'availableVersions' field\n {\n $addFields: {\n availableVersions: {\n $map: {\n input: { $objectToArray: '$content' },\n as: 'version',\n in: '$$version.k',\n },\n },\n },\n },\n ]);\n\n const formattedResults = dictionaries.map(\n (result) => new DictionaryModel(result)\n );\n\n return formattedResults;\n};\n\n/**\n * Counts the total number of dictionaries that match the filters.\n * @param filters - MongoDB filter query.\n * @returns Total number of dictionaries.\n */\nexport const countDictionaries = async (\n filters: DictionaryFilters\n): Promise<number> => {\n const result = await DictionaryModel.countDocuments(filters);\n\n if (typeof result === 'undefined') {\n throw new GenericError('DICTIONARY_COUNT_FAILED', { filters });\n }\n\n return result;\n};\n\n/**\n * Creates a new dictionary in the database.\n * @param dictionary - The dictionary data to create.\n * @returns The created dictionary.\n */\nexport const createDictionary = async (\n dictionary: DictionaryData\n): Promise<DictionaryDocument> => {\n const errors = await validateDictionary(dictionary);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('DICTIONARY_INVALID_FIELDS', {\n errors,\n });\n }\n\n return await DictionaryModel.create(dictionary);\n};\n\ntype GetExistingDictionaryResult = {\n existingDictionariesKey: string[];\n newDictionariesKey: string[];\n};\n\n/**\n * Gets the existing dictionaries from the provided list of keys.\n * @param dictionariesKeys - List of dictionary keys to check.\n * @param projectId - The ID of the project to check the dictionaries against.\n * @returns The existing dictionaries and the new dictionaries.\n */\nexport const getExistingDictionaryKey = async (\n dictionariesKeys: string[],\n projectId: string | Types.ObjectId\n): Promise<GetExistingDictionaryResult> => {\n // Fetch dictionaries from the database where the key is in the provided list\n const existingDictionaries = await DictionaryModel.find({\n key: { $in: dictionariesKeys },\n projectIds: projectId,\n });\n\n // Map existing dictionaries to a LocalDictionary object\n const existingDictionariesKey: string[] = [];\n const newDictionariesKey: string[] = [];\n\n for (const key of dictionariesKeys) {\n const isDictionaryExist = existingDictionaries.some(\n (dictionary) => dictionary.key === key\n );\n\n if (isDictionaryExist) {\n existingDictionariesKey.push(key);\n } else {\n newDictionariesKey.push(key);\n }\n }\n\n return { existingDictionariesKey, newDictionariesKey };\n};\n\n/**\n * Updates an existing dictionary in the database by its ID.\n * @param dictionaryId - The ID of the dictionary to update.\n * @param dictionary - The updated dictionary data.\n * @returns The updated dictionary.\n */\nexport const updateDictionaryById = async (\n dictionaryId: string | Types.ObjectId,\n dictionary: Partial<Dictionary>\n): Promise<DictionaryDocument> => {\n const dictionaryObject = ensureMongoDocumentToObject(dictionary);\n const dictionaryToUpdate = removeObjectKeys(dictionaryObject, [\n 'id',\n ]) as unknown as Partial<Dictionary>;\n\n const updatedKeys = Object.keys(dictionaryToUpdate) as DictionaryFields;\n const errors = await validateDictionary(dictionaryToUpdate, updatedKeys);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('DICTIONARY_INVALID_FIELDS', {\n dictionaryId,\n errors,\n });\n }\n\n const result = await DictionaryModel.updateOne(\n { _id: dictionaryId },\n dictionaryToUpdate\n );\n\n if (result.matchedCount === 0) {\n throw new GenericError('DICTIONARY_UPDATE_FAILED', { dictionaryId });\n }\n\n const updatedDictionary = await getDictionaryById(dictionaryId);\n\n return updatedDictionary;\n};\n\n/**\n * Updates an existing dictionary in the database by its key.\n * @param dictionaryKey - The ID of the dictionary to update.\n * @param dictionary - The updated dictionary data.\n * @returns The updated dictionary.\n */\nexport const updateDictionaryByKey = async (\n dictionaryKey: string,\n dictionary: Partial<Dictionary>,\n projectId: string | Types.ObjectId\n): Promise<DictionaryDocument> => {\n const dictionaryObject = ensureMongoDocumentToObject(dictionary);\n const dictionaryToUpdate = removeObjectKeys(dictionaryObject, [\n 'id',\n ]) as unknown as Partial<Dictionary>;\n\n const updatedKeys = Object.keys(dictionaryToUpdate) as DictionaryFields;\n const errors = await validateDictionary(dictionaryToUpdate, updatedKeys);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('DICTIONARY_INVALID_FIELDS', {\n dictionaryKey,\n projectId,\n errors,\n });\n }\n\n const result = await DictionaryModel.updateOne(\n { key: dictionaryKey, projectIds: projectId },\n dictionaryToUpdate\n );\n\n if (result.matchedCount === 0) {\n throw new GenericError('DICTIONARY_UPDATE_FAILED', { dictionaryKey });\n }\n\n const updatedDictionary = await getDictionaryByKey(dictionaryKey, projectId);\n\n return updatedDictionary;\n};\n\n/**\n * Deletes a dictionary from the database by its ID.\n * @param dictionaryId - The ID of the dictionary to delete.\n * @returns The result of the deletion operation.\n */\nexport const deleteDictionaryById = async (\n dictionaryId: string\n): Promise<DictionaryDocument> => {\n const dictionary = await DictionaryModel.findByIdAndDelete(dictionaryId);\n\n if (!dictionary) {\n throw new GenericError('DICTIONARY_NOT_FOUND', { dictionaryId });\n }\n\n return dictionary;\n};\n\n// Function to extract the numeric part of the version\nconst getVersionNumber = (version: string): number => {\n const match = version.match(/^v(\\d+)$/);\n if (!match) {\n throw new Error(`Invalid version format: ${version}`);\n }\n return parseInt(match[1], 10);\n};\n\nexport const incrementVersion = (dictionary: Dictionary): string => {\n const VERSION_PREFIX = 'v';\n\n const availableVersions = [...(dictionary.content.keys() ?? [])];\n const lastVersion = availableVersions[availableVersions.length - 1];\n\n // Start with the next version number\n let newNumber = getVersionNumber(lastVersion) + 1;\n let newVersion = `${VERSION_PREFIX}${newNumber}`;\n\n // Loop until a unique version is found\n while (availableVersions.includes(newVersion)) {\n newNumber += 1;\n newVersion = `${VERSION_PREFIX}${newNumber}`;\n }\n\n return newVersion;\n};\n"],"mappings":"AAMA,SAAS,uBAAuB;AAChC,SAAS,mCAAmC;AAC5C,SAAS,oBAAoB;AAE7B,SAAS,wBAAwB;AACjC;AAAA,EACE;AAAA,OAEK;AACP,SAAS,aAAa;AASf,MAAM,mBAAmB,OAC9B,SACA,OAAO,GACP,QAAQ,QAC0B;AAClC,MAAI;AACF,UAAM,eAAe,MAAM,gBAAgB,UAA8B;AAAA;AAAA,MAEvE,EAAE,QAAQ,QAAQ;AAAA;AAAA,MAGlB,EAAE,OAAO,KAAK;AAAA;AAAA,MAGd,EAAE,QAAQ,MAAM;AAAA,IAClB,CAAC;AAED,UAAM,mBAAmB,aAAa;AAAA,MACpC,CAAC,WAAW,IAAI,gBAAgB,MAAM;AAAA,IACxC;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,YAAQ,MAAM,gCAAgC,KAAK;AACnD,UAAM;AAAA,EACR;AACF;AAYO,MAAM,oBAAoB,OAC/B,iBACgC;AAChC,QAAM,KAAK,MAAM,SAAS,QAAQ,YAAsB,IACpD,IAAI,MAAM,SAAS,YAAsB,IACzC;AAEJ,QAAM,eAAe,MAAM,gBAAgB,UAA8B;AAAA;AAAA,IAEvE,EAAE,QAAQ,EAAE,KAAK,GAAG,EAAE;AAAA;AAAA,IAGtB;AAAA,MACE,YAAY;AAAA,QACV,mBAAmB;AAAA,UACjB,MAAM;AAAA,YACJ,OAAO,EAAE,gBAAgB,WAAW;AAAA,YACpC,IAAI;AAAA,YACJ,IAAI;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AAED,MAAI,CAAC,aAAa,QAAQ;AACxB,UAAM,IAAI,aAAa,wBAAwB,EAAE,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO,IAAI,gBAAgB,aAAa,CAAC,CAAC;AAC5C;AAOO,MAAM,qBAAqB,OAChC,eACA,cACgC;AAChC,QAAM,eAAe,MAAM,sBAAsB,CAAC,aAAa,GAAG,SAAS;AAE3E,SAAO,aAAa,CAAC;AACvB;AAEO,MAAM,wBAAwB,OACnC,gBACA,cACkC;AAClC,QAAM,eAAe,MAAM,gBAAgB,UAA8B;AAAA;AAAA,IAEvE,EAAE,QAAQ,EAAE,KAAK,EAAE,KAAK,eAAe,GAAG,YAAY,UAAU,EAAE;AAAA;AAAA,IAGlE;AAAA,MACE,YAAY;AAAA,QACV,mBAAmB;AAAA,UACjB,MAAM;AAAA,YACJ,OAAO,EAAE,gBAAgB,WAAW;AAAA,YACpC,IAAI;AAAA,YACJ,IAAI;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AAED,MAAI,CAAC,cAAc;AACjB,UAAM,IAAI,aAAa,wBAAwB;AAAA,MAC7C;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,mBAAmB,aAAa;AAAA,IACpC,CAAC,WAAW,IAAI,gBAAgB,MAAM;AAAA,EACxC;AAEA,SAAO;AACT;AAEO,MAAM,wBAAwB,OACnC,MACA,cACkC;AAClC,QAAM,eAAe,MAAM,gBAAgB,UAA8B;AAAA;AAAA,IAEvE;AAAA,MACE,QAAQ;AAAA,QACN,MAAM,EAAE,KAAK,KAAK;AAAA,QAClB,YAAY;AAAA,MACd;AAAA,IACF;AAAA;AAAA,IAGA;AAAA,MACE,YAAY;AAAA,QACV,mBAAmB;AAAA,UACjB,MAAM;AAAA,YACJ,OAAO,EAAE,gBAAgB,WAAW;AAAA,YACpC,IAAI;AAAA,YACJ,IAAI;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AAED,QAAM,mBAAmB,aAAa;AAAA,IACpC,CAAC,WAAW,IAAI,gBAAgB,MAAM;AAAA,EACxC;AAEA,SAAO;AACT;AAOO,MAAM,oBAAoB,OAC/B,YACoB;AACpB,QAAM,SAAS,MAAM,gBAAgB,eAAe,OAAO;AAE3D,MAAI,OAAO,WAAW,aAAa;AACjC,UAAM,IAAI,aAAa,2BAA2B,EAAE,QAAQ,CAAC;AAAA,EAC/D;AAEA,SAAO;AACT;AAOO,MAAM,mBAAmB,OAC9B,eACgC;AAChC,QAAM,SAAS,MAAM,mBAAmB,UAAU;AAElD,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,6BAA6B;AAAA,MAClD;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO,MAAM,gBAAgB,OAAO,UAAU;AAChD;AAaO,MAAM,2BAA2B,OACtC,kBACA,cACyC;AAEzC,QAAM,uBAAuB,MAAM,gBAAgB,KAAK;AAAA,IACtD,KAAK,EAAE,KAAK,iBAAiB;AAAA,IAC7B,YAAY;AAAA,EACd,CAAC;AAGD,QAAM,0BAAoC,CAAC;AAC3C,QAAM,qBAA+B,CAAC;AAEtC,aAAW,OAAO,kBAAkB;AAClC,UAAM,oBAAoB,qBAAqB;AAAA,MAC7C,CAAC,eAAe,WAAW,QAAQ;AAAA,IACrC;AAEA,QAAI,mBAAmB;AACrB,8BAAwB,KAAK,GAAG;AAAA,IAClC,OAAO;AACL,yBAAmB,KAAK,GAAG;AAAA,IAC7B;AAAA,EACF;AAEA,SAAO,EAAE,yBAAyB,mBAAmB;AACvD;AAQO,MAAM,uBAAuB,OAClC,cACA,eACgC;AAChC,QAAM,mBAAmB,4BAA4B,UAAU;AAC/D,QAAM,qBAAqB,iBAAiB,kBAAkB;AAAA,IAC5D;AAAA,EACF,CAAC;AAED,QAAM,cAAc,OAAO,KAAK,kBAAkB;AAClD,QAAM,SAAS,MAAM,mBAAmB,oBAAoB,WAAW;AAEvE,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,6BAA6B;AAAA,MAClD;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAS,MAAM,gBAAgB;AAAA,IACnC,EAAE,KAAK,aAAa;AAAA,IACpB;AAAA,EACF;AAEA,MAAI,OAAO,iBAAiB,GAAG;AAC7B,UAAM,IAAI,aAAa,4BAA4B,EAAE,aAAa,CAAC;AAAA,EACrE;AAEA,QAAM,oBAAoB,MAAM,kBAAkB,YAAY;AAE9D,SAAO;AACT;AAQO,MAAM,wBAAwB,OACnC,eACA,YACA,cACgC;AAChC,QAAM,mBAAmB,4BAA4B,UAAU;AAC/D,QAAM,qBAAqB,iBAAiB,kBAAkB;AAAA,IAC5D;AAAA,EACF,CAAC;AAED,QAAM,cAAc,OAAO,KAAK,kBAAkB;AAClD,QAAM,SAAS,MAAM,mBAAmB,oBAAoB,WAAW;AAEvE,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,6BAA6B;AAAA,MAClD;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAS,MAAM,gBAAgB;AAAA,IACnC,EAAE,KAAK,eAAe,YAAY,UAAU;AAAA,IAC5C;AAAA,EACF;AAEA,MAAI,OAAO,iBAAiB,GAAG;AAC7B,UAAM,IAAI,aAAa,4BAA4B,EAAE,cAAc,CAAC;AAAA,EACtE;AAEA,QAAM,oBAAoB,MAAM,mBAAmB,eAAe,SAAS;AAE3E,SAAO;AACT;AAOO,MAAM,uBAAuB,OAClC,iBACgC;AAChC,QAAM,aAAa,MAAM,gBAAgB,kBAAkB,YAAY;AAEvE,MAAI,CAAC,YAAY;AACf,UAAM,IAAI,aAAa,wBAAwB,EAAE,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAGA,MAAM,mBAAmB,CAAC,YAA4B;AACpD,QAAM,QAAQ,QAAQ,MAAM,UAAU;AACtC,MAAI,CAAC,OAAO;AACV,UAAM,IAAI,MAAM,2BAA2B,OAAO,EAAE;AAAA,EACtD;AACA,SAAO,SAAS,MAAM,CAAC,GAAG,EAAE;AAC9B;AAEO,MAAM,mBAAmB,CAAC,eAAmC;AAClE,QAAM,iBAAiB;AAEvB,QAAM,oBAAoB,CAAC,GAAI,WAAW,QAAQ,KAAK,KAAK,CAAC,CAAE;AAC/D,QAAM,cAAc,kBAAkB,kBAAkB,SAAS,CAAC;AAGlE,MAAI,YAAY,iBAAiB,WAAW,IAAI;AAChD,MAAI,aAAa,GAAG,cAAc,GAAG,SAAS;AAG9C,SAAO,kBAAkB,SAAS,UAAU,GAAG;AAC7C,iBAAa;AACb,iBAAa,GAAG,cAAc,GAAG,SAAS;AAAA,EAC5C;AAEA,SAAO;AACT;","names":[]}
|
|
@@ -24,14 +24,6 @@ const createOrganization = async (organization, userId) => {
|
|
|
24
24
|
throw new GenericError("ORGANIZATION_INVALID_FIELDS", { errors });
|
|
25
25
|
}
|
|
26
26
|
try {
|
|
27
|
-
console.log({
|
|
28
|
-
organization: {
|
|
29
|
-
creatorId: userId,
|
|
30
|
-
membersIds: [userId],
|
|
31
|
-
adminsIds: [userId],
|
|
32
|
-
...organization
|
|
33
|
-
}
|
|
34
|
-
});
|
|
35
27
|
const result = await OrganizationModel.create({
|
|
36
28
|
creatorId: userId,
|
|
37
29
|
membersIds: [userId],
|
|
@@ -40,7 +32,6 @@ const createOrganization = async (organization, userId) => {
|
|
|
40
32
|
});
|
|
41
33
|
return result;
|
|
42
34
|
} catch (error) {
|
|
43
|
-
console.error("Organization creation failed:", error);
|
|
44
35
|
throw new GenericError("ORGANIZATION_CREATION_FAILED", {
|
|
45
36
|
error: error.message
|
|
46
37
|
});
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/services/organization.service.ts"],"sourcesContent":["import type {\n Organization,\n OrganizationCreationData,\n OrganizationDocument,\n} from '@/types/organization.types';\nimport type { Plan, PlanDocument } from '@/types/plan.types';\nimport { OrganizationModel } from '@models/organization.model';\nimport { GenericError } from '@utils/errors';\nimport type { OrganizationFilters } from '@utils/filtersAndPagination/getOrganizationFiltersAndPagination';\nimport {\n type OrganizationFields,\n validateOrganization,\n} from '@utils/validation/validateOrganization';\nimport type { Types } from 'mongoose';\n\n/**\n * Finds organizations based on filters and pagination options.\n * @param filters - MongoDB filter query.\n * @param skip - Number of documents to skip.\n * @param limit - Number of documents to limit.\n * @returns List of organizations matching the filters.\n */\nexport const findOrganizations = async (\n filters: OrganizationFilters,\n skip: number,\n limit: number\n): Promise<OrganizationDocument[]> =>\n await OrganizationModel.find(filters).skip(skip).limit(limit);\n\n/**\n * Finds an organization by its ID.\n * @param organizationId - The ID of the organization to find.\n * @returns The organization matching the ID.\n */\nexport const getOrganizationById = async (\n organizationId: string | Types.ObjectId\n): Promise<OrganizationDocument> => {\n const organization = await OrganizationModel.findById(organizationId);\n\n if (!organization) {\n throw new GenericError('ORGANIZATION_NOT_FOUND', { organizationId });\n }\n\n return organization;\n};\n\n/**\n * Counts the total number of organizations that match the filters.\n * @param filters - MongoDB filter query.\n * @returns Total number of organizations.\n */\nexport const countOrganizations = async (\n filters: OrganizationFilters\n): Promise<number> => {\n const result = await OrganizationModel.countDocuments(filters);\n\n if (typeof result === 'undefined') {\n throw new GenericError('ORGANIZATION_COUNT_FAILED', { filters });\n }\n\n return result;\n};\n\n/**\n * Creates a new organization in the database.\n * @param organization - The organization data to create.\n * @returns The created organization.\n */\nexport const createOrganization = async (\n organization: OrganizationCreationData,\n userId: string | Types.ObjectId\n): Promise<OrganizationDocument> => {\n const errors = validateOrganization(organization, ['name']);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('ORGANIZATION_INVALID_FIELDS', { errors });\n }\n\n try {\n
|
|
1
|
+
{"version":3,"sources":["../../../src/services/organization.service.ts"],"sourcesContent":["import type {\n Organization,\n OrganizationCreationData,\n OrganizationDocument,\n} from '@/types/organization.types';\nimport type { Plan, PlanDocument } from '@/types/plan.types';\nimport { OrganizationModel } from '@models/organization.model';\nimport { GenericError } from '@utils/errors';\nimport type { OrganizationFilters } from '@utils/filtersAndPagination/getOrganizationFiltersAndPagination';\nimport {\n type OrganizationFields,\n validateOrganization,\n} from '@utils/validation/validateOrganization';\nimport type { Types } from 'mongoose';\n\n/**\n * Finds organizations based on filters and pagination options.\n * @param filters - MongoDB filter query.\n * @param skip - Number of documents to skip.\n * @param limit - Number of documents to limit.\n * @returns List of organizations matching the filters.\n */\nexport const findOrganizations = async (\n filters: OrganizationFilters,\n skip: number,\n limit: number\n): Promise<OrganizationDocument[]> =>\n await OrganizationModel.find(filters).skip(skip).limit(limit);\n\n/**\n * Finds an organization by its ID.\n * @param organizationId - The ID of the organization to find.\n * @returns The organization matching the ID.\n */\nexport const getOrganizationById = async (\n organizationId: string | Types.ObjectId\n): Promise<OrganizationDocument> => {\n const organization = await OrganizationModel.findById(organizationId);\n\n if (!organization) {\n throw new GenericError('ORGANIZATION_NOT_FOUND', { organizationId });\n }\n\n return organization;\n};\n\n/**\n * Counts the total number of organizations that match the filters.\n * @param filters - MongoDB filter query.\n * @returns Total number of organizations.\n */\nexport const countOrganizations = async (\n filters: OrganizationFilters\n): Promise<number> => {\n const result = await OrganizationModel.countDocuments(filters);\n\n if (typeof result === 'undefined') {\n throw new GenericError('ORGANIZATION_COUNT_FAILED', { filters });\n }\n\n return result;\n};\n\n/**\n * Creates a new organization in the database.\n * @param organization - The organization data to create.\n * @returns The created organization.\n */\nexport const createOrganization = async (\n organization: OrganizationCreationData,\n userId: string | Types.ObjectId\n): Promise<OrganizationDocument> => {\n const errors = validateOrganization(organization, ['name']);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('ORGANIZATION_INVALID_FIELDS', { errors });\n }\n\n try {\n const result = await OrganizationModel.create({\n creatorId: userId,\n membersIds: [userId],\n adminsIds: [userId],\n ...organization,\n });\n\n return result;\n } catch (error) {\n throw new GenericError('ORGANIZATION_CREATION_FAILED', {\n error: (error as Error).message,\n });\n }\n};\n\n/**\n * Updates an existing organization in the database by its ID.\n * @param organizationId - The ID of the organization to update.\n * @param organization - The updated organization data.\n * @returns The updated organization.\n */\nexport const updateOrganizationById = async (\n organizationId: string | Types.ObjectId,\n organization: Partial<Organization>\n): Promise<OrganizationDocument> => {\n const updatedKeys = Object.keys(organization) as OrganizationFields;\n const errors = validateOrganization(organization, updatedKeys);\n\n if (Object.keys(errors).length > 0) {\n throw new GenericError('ORGANIZATION_INVALID_FIELDS', {\n organizationId,\n errors,\n });\n }\n\n const result = await OrganizationModel.updateOne(\n { _id: organizationId },\n organization\n );\n\n if (result.matchedCount === 0) {\n throw new GenericError('ORGANIZATION_UPDATE_FAILED', { organizationId });\n }\n\n return await getOrganizationById(organizationId);\n};\n\n/**\n * Deletes an organization from the database by its ID.\n * @param organizationId - The ID of the organization to delete.\n * @returns The result of the deletion operation.\n */\nexport const deleteOrganizationById = async (\n organizationId: string | Types.ObjectId\n): Promise<OrganizationDocument> => {\n const organization =\n await OrganizationModel.findByIdAndDelete(organizationId);\n\n if (!organization) {\n throw new GenericError('ORGANIZATION_NOT_FOUND', { organizationId });\n }\n\n return organization;\n};\n\n/**\n * Updates an existing plan in the database by its ID.\n * @param planId - The ID of the plan to update.\n * @param plan - The updated plan data.\n * @returns The updated plan.\n */\nexport const updatePlan = async (\n organization: Organization | OrganizationDocument,\n plan: Partial<Plan>\n): Promise<OrganizationDocument | null> => {\n let prevPlan = organization.plan ?? {};\n\n if (typeof (prevPlan as PlanDocument)?.toObject === 'function') {\n prevPlan = (prevPlan as PlanDocument).toObject();\n }\n\n const updateOrganizationResult = await OrganizationModel.updateOne(\n { _id: organization.id },\n { $set: { plan: { ...prevPlan, ...plan } } },\n { new: true }\n );\n\n if (updateOrganizationResult.matchedCount === 0) {\n throw new GenericError('ORGANIZATION_UPDATE_FAILED', {\n organizationId: organization.id,\n });\n }\n\n const updatedOrganization = await getOrganizationById(organization.id);\n\n return updatedOrganization;\n};\n"],"mappings":"AAMA,SAAS,yBAAyB;AAClC,SAAS,oBAAoB;AAE7B;AAAA,EAEE;AAAA,OACK;AAUA,MAAM,oBAAoB,OAC/B,SACA,MACA,UAEA,MAAM,kBAAkB,KAAK,OAAO,EAAE,KAAK,IAAI,EAAE,MAAM,KAAK;AAOvD,MAAM,sBAAsB,OACjC,mBACkC;AAClC,QAAM,eAAe,MAAM,kBAAkB,SAAS,cAAc;AAEpE,MAAI,CAAC,cAAc;AACjB,UAAM,IAAI,aAAa,0BAA0B,EAAE,eAAe,CAAC;AAAA,EACrE;AAEA,SAAO;AACT;AAOO,MAAM,qBAAqB,OAChC,YACoB;AACpB,QAAM,SAAS,MAAM,kBAAkB,eAAe,OAAO;AAE7D,MAAI,OAAO,WAAW,aAAa;AACjC,UAAM,IAAI,aAAa,6BAA6B,EAAE,QAAQ,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAOO,MAAM,qBAAqB,OAChC,cACA,WACkC;AAClC,QAAM,SAAS,qBAAqB,cAAc,CAAC,MAAM,CAAC;AAE1D,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,+BAA+B,EAAE,OAAO,CAAC;AAAA,EAClE;AAEA,MAAI;AACF,UAAM,SAAS,MAAM,kBAAkB,OAAO;AAAA,MAC5C,WAAW;AAAA,MACX,YAAY,CAAC,MAAM;AAAA,MACnB,WAAW,CAAC,MAAM;AAAA,MAClB,GAAG;AAAA,IACL,CAAC;AAED,WAAO;AAAA,EACT,SAAS,OAAO;AACd,UAAM,IAAI,aAAa,gCAAgC;AAAA,MACrD,OAAQ,MAAgB;AAAA,IAC1B,CAAC;AAAA,EACH;AACF;AAQO,MAAM,yBAAyB,OACpC,gBACA,iBACkC;AAClC,QAAM,cAAc,OAAO,KAAK,YAAY;AAC5C,QAAM,SAAS,qBAAqB,cAAc,WAAW;AAE7D,MAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,UAAM,IAAI,aAAa,+BAA+B;AAAA,MACpD;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,SAAS,MAAM,kBAAkB;AAAA,IACrC,EAAE,KAAK,eAAe;AAAA,IACtB;AAAA,EACF;AAEA,MAAI,OAAO,iBAAiB,GAAG;AAC7B,UAAM,IAAI,aAAa,8BAA8B,EAAE,eAAe,CAAC;AAAA,EACzE;AAEA,SAAO,MAAM,oBAAoB,cAAc;AACjD;AAOO,MAAM,yBAAyB,OACpC,mBACkC;AAClC,QAAM,eACJ,MAAM,kBAAkB,kBAAkB,cAAc;AAE1D,MAAI,CAAC,cAAc;AACjB,UAAM,IAAI,aAAa,0BAA0B,EAAE,eAAe,CAAC;AAAA,EACrE;AAEA,SAAO;AACT;AAQO,MAAM,aAAa,OACxB,cACA,SACyC;AACzC,MAAI,WAAW,aAAa,QAAQ,CAAC;AAErC,MAAI,OAAQ,UAA2B,aAAa,YAAY;AAC9D,eAAY,SAA0B,SAAS;AAAA,EACjD;AAEA,QAAM,2BAA2B,MAAM,kBAAkB;AAAA,IACvD,EAAE,KAAK,aAAa,GAAG;AAAA,IACvB,EAAE,MAAM,EAAE,MAAM,EAAE,GAAG,UAAU,GAAG,KAAK,EAAE,EAAE;AAAA,IAC3C,EAAE,KAAK,KAAK;AAAA,EACd;AAEA,MAAI,yBAAyB,iBAAiB,GAAG;AAC/C,UAAM,IAAI,aAAa,8BAA8B;AAAA,MACnD,gBAAgB,aAAa;AAAA,IAC/B,CAAC;AAAA,EACH;AAEA,QAAM,sBAAsB,MAAM,oBAAoB,aAAa,EAAE;AAErE,SAAO;AACT;","names":[]}
|
|
@@ -71,6 +71,9 @@ const indexMarkdownFiles = async () => {
|
|
|
71
71
|
dotenv.config({
|
|
72
72
|
path: [`.env.${env}.local`, `.env.${env}`, ".env.local", ".env"]
|
|
73
73
|
});
|
|
74
|
+
if (process.env.SKIP_DOC_EMBEDDINGS_INDEX === "true") {
|
|
75
|
+
return;
|
|
76
|
+
}
|
|
74
77
|
const frequentQuestions = await getFrequentQuestions();
|
|
75
78
|
const docs = await getDocs();
|
|
76
79
|
const blogs = await getBlogs();
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/utils/AI/askDocQuestion/askDocQuestion.ts"],"sourcesContent":["import { getBlogs, getDocs, getFrequentQuestions } from '@intlayer/docs';\nimport { streamText } from 'ai';\nimport dotenv from 'dotenv';\nimport { readFileSync, writeFileSync } from 'fs';\nimport { getMarkdownMetadata } from 'intlayer';\nimport { OpenAI } from 'openai';\nimport { dirname, join } from 'path';\nimport { fileURLToPath } from 'url';\nimport {\n AIConfig,\n AIOptions,\n AIProvider,\n ChatCompletionRequestMessage,\n} from '../aiSdk';\nimport embeddingsList from './embeddings.json' with { type: 'json' };\n\ntype VectorStoreEl = {\n fileKey: string;\n chunkNumber: number;\n content: string;\n embedding: number[];\n docUrl: string;\n docName: string;\n};\n\n/**\n * Simple in-memory vector store to hold document embeddings and their content.\n * Each entry contains:\n * - fileKey: A unique key identifying the file\n * - chunkNumber: The number of the chunk within the document\n * - content: The chunk content\n * - embedding: The numerical embedding vector for the chunk\n */\nconst vectorStore: VectorStoreEl[] = [];\n\n/*\n * Ask question AI configuration\n */\nconst MODEL: AIOptions['model'] = 'chatgpt-4o-latest'; // Model to use for chat completions\nconst MODEL_TEMPERATURE: AIOptions['temperature'] = 0.1; // Temperature to use for chat completions\nconst MAX_RELEVANT_CHUNKS_NB: number = 20; // Maximum number of relevant chunks to attach to chatGPT context\nconst MIN_RELEVANT_CHUNKS_SIMILARITY: number = 0.42; // Minimum similarity required for a chunk to be considered relevant\n\nexport const aiDefaultOptions: AIOptions = {\n provider: AIProvider.OPENAI,\n model: MODEL,\n temperature: MODEL_TEMPERATURE,\n};\n\n/*\n * Embedding model configuration\n */\nconst EMBEDDING_MODEL: OpenAI.EmbeddingModel = 'text-embedding-3-large'; // Model to use for embedding generation\nconst OVERLAP_TOKENS: number = 200; // Number of tokens to overlap between chunks\nconst MAX_CHUNK_TOKENS: number = 800; // Maximum number of tokens per chunk\nconst CHAR_BY_TOKEN: number = 4.15; // Approximate pessimistically the number of characters per token // Can use `tiktoken` or other tokenizers to calculate it more precisely\nconst MAX_CHARS: number = MAX_CHUNK_TOKENS * CHAR_BY_TOKEN;\nconst OVERLAP_CHARS: number = OVERLAP_TOKENS * CHAR_BY_TOKEN;\n\n/**\n * Splits a given text into chunks ensuring each chunk does not exceed MAX_CHARS.\n * @param text - The input text to split.\n * @returns - Array of text chunks.\n */\nconst chunkText = (text: string): string[] => {\n const chunks: string[] = [];\n let start = 0;\n\n while (start < text.length) {\n let end = Math.min(start + MAX_CHARS, text.length);\n\n // Ensure we don't cut words in the middle (find nearest space)\n if (end < text.length) {\n const lastSpace = text.lastIndexOf(' ', end);\n if (lastSpace > start) {\n end = lastSpace;\n }\n }\n\n chunks.push(text.substring(start, end));\n\n // Move start forward correctly\n const nextStart = end - OVERLAP_CHARS;\n if (nextStart <= start) {\n // Prevent infinite loop if overlap is too large\n start = end;\n } else {\n start = nextStart;\n }\n }\n\n return chunks;\n};\n\n/**\n * Generates an embedding for a given text using OpenAI's embedding API.\n * Trims the text if it exceeds the maximum allowed characters.\n *\n * @param text - The input text to generate an embedding for\n * @returns The embedding vector as a number array\n */\nconst generateEmbedding = async (text: string): Promise<number[]> => {\n try {\n const openaiClient = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n\n const response = await openaiClient.embeddings.create({\n model: EMBEDDING_MODEL,\n input: text,\n });\n\n return response.data[0].embedding;\n } catch (error) {\n console.error('Error generating embedding:', error);\n return [];\n }\n};\n\n/**\n * Calculates the cosine similarity between two vectors.\n * Cosine similarity measures the cosine of the angle between two vectors in an inner product space.\n * Used to determine the similarity between chunks of text.\n *\n * @param vecA - The first vector\n * @param vecB - The second vector\n * @returns The cosine similarity score\n */\nconst cosineSimilarity = (vecA: number[], vecB: number[]): number => {\n // Calculate the dot product of the two vectors\n const dotProduct = vecA.reduce((sum, a, idx) => sum + a * vecB[idx], 0);\n\n // Calculate the magnitude (Euclidean norm) of each vector\n const magnitudeA = Math.sqrt(vecA.reduce((sum, a) => sum + a * a, 0));\n const magnitudeB = Math.sqrt(vecB.reduce((sum, b) => sum + b * b, 0));\n\n // Compute and return the cosine similarity\n return dotProduct / (magnitudeA * magnitudeB);\n};\n\n/**\n * Indexes all Markdown documents by generating embeddings for each chunk and storing them in memory.\n * Also updates the embeddings.json file if new embeddings are generated.\n * Handles cases where files have been updated and chunk counts have changed.\n */\nexport const indexMarkdownFiles = async (): Promise<void> => {\n const env = process.env.NODE_ENV;\n dotenv.config({\n path: [`.env.${env}.local`, `.env.${env}`, '.env.local', '.env'],\n });\n\n // Retrieve documentation and blog posts in English locale\n const frequentQuestions = await getFrequentQuestions();\n const docs = await getDocs();\n const blogs = await getBlogs();\n\n let result: Record<string, number[]> = {}; // Object to hold updated embeddings\n const currentChunkKeys = new Set<string>(); // Track which chunks should exist\n\n const files = { ...docs, ...blogs, ...frequentQuestions }; // Combine docs and blogs into a single object\n\n // Iterate over each file key (identifier) in the combined files\n for await (const fileKey of Object.keys(files)) {\n // Get the metadata of the file\n const fileMetadata = getMarkdownMetadata(\n files[fileKey as keyof typeof files] as string\n );\n\n // Split the document into chunks based on headings\n const fileChunks = chunkText(\n files[fileKey as keyof typeof files] as string\n );\n\n // Check if the number of chunks has changed for this file\n const existingChunksForFile = Object.keys(embeddingsList).filter((key) =>\n key.startsWith(`${fileKey}/chunk_`)\n );\n const currentChunkCount = fileChunks.length;\n const previousChunkCount = existingChunksForFile.length;\n\n let shouldRegenerateFileEmbeddings = false;\n\n // If chunk count differs, we need to regenerate embeddings for this file\n if (currentChunkCount !== previousChunkCount) {\n console.info(\n `File \"${fileKey}\" chunk count changed: ${previousChunkCount} -> ${currentChunkCount}. Regenerating embeddings.`\n );\n shouldRegenerateFileEmbeddings = true;\n }\n\n // Iterate over each chunk within the current file\n for await (const chunkIndex of Object.keys(fileChunks)) {\n const chunkNumber = Number(chunkIndex) + 1; // Chunk number starts at 1\n const chunksNumber = fileChunks.length;\n\n const fileChunk = fileChunks[\n chunkIndex as keyof typeof fileChunks\n ] as string;\n\n const embeddingKeyName = `${fileKey}/chunk_${chunkNumber}`; // Unique key for the chunk\n currentChunkKeys.add(embeddingKeyName); // Track this chunk as current\n\n // Retrieve precomputed embedding if available and file hasn't changed\n const docEmbedding = !shouldRegenerateFileEmbeddings\n ? (embeddingsList[embeddingKeyName as keyof typeof embeddingsList] as\n | number[]\n | undefined)\n : undefined;\n\n let embedding = docEmbedding; // Use existing embedding if available and valid\n\n if (!embedding) {\n embedding = await generateEmbedding(fileChunk); // Generate embedding if not present or file changed\n console.info(`- Generated new embedding: ${embeddingKeyName}`);\n }\n\n // Update the result object with the embedding\n result = { ...result, [embeddingKeyName]: embedding };\n\n // Store the embedding and content in the in-memory vector store\n vectorStore.push({\n fileKey,\n chunkNumber,\n embedding,\n content: fileChunk,\n docUrl: fileMetadata.url,\n docName: fileMetadata.title,\n });\n\n console.info(`- Indexed: ${embeddingKeyName}/${chunksNumber}`);\n }\n }\n\n // Remove outdated embeddings that no longer exist in current files\n const filteredEmbeddings: Record<string, number[]> = {};\n for (const [key, embedding] of Object.entries(embeddingsList)) {\n if (currentChunkKeys.has(key)) {\n // Only keep embeddings for chunks that still exist\n if (!result[key]) {\n filteredEmbeddings[key] = embedding as number[];\n }\n }\n }\n\n // Merge filtered existing embeddings with new ones\n result = { ...filteredEmbeddings, ...result };\n\n if (process.env.NODE_ENV === 'development') {\n try {\n // Compare the newly generated embeddings with existing ones\n if (JSON.stringify(result) !== JSON.stringify(embeddingsList)) {\n // If there are new embeddings or changes, save them to embeddings.json\n writeFileSync(\n 'src/utils/AI/askDocQuestion/embeddings.json',\n JSON.stringify(result, null, 2)\n );\n console.info('Updated embeddings.json with new/changed embeddings.');\n }\n } catch (error) {\n console.error(error); // Log any errors during the file write process\n }\n }\n};\n\n// Automatically index Markdown files\nindexMarkdownFiles();\n\n/**\n * Searches the indexed documents for the most relevant chunks based on a query.\n * Utilizes cosine similarity to find the closest matching embeddings.\n *\n * @param query - The search query provided by the user\n * @returns An array of the top matching document chunks' content\n */\nexport const searchChunkReference = async (\n query: string,\n maxResults: number = MAX_RELEVANT_CHUNKS_NB,\n minSimilarity: number = MIN_RELEVANT_CHUNKS_SIMILARITY\n): Promise<VectorStoreEl[]> => {\n // Generate an embedding for the user's query\n const queryEmbedding = await generateEmbedding(query);\n\n // Calculate similarity scores between the query embedding and each document's embedding\n const selection = vectorStore\n .map((chunk) => ({\n ...chunk,\n similarity: cosineSimilarity(queryEmbedding, chunk.embedding), // Add similarity score to each doc\n }))\n .filter((chunk) => chunk.similarity > minSimilarity) // Filter out documents with low similarity scores\n .sort((a, b) => b.similarity - a.similarity) // Sort documents by highest similarity first\n .slice(0, maxResults); // Select the top 6 most similar documents\n\n const orderedDocKeys = new Set(selection.map((chunk) => chunk.fileKey));\n\n const orderedVectorStore = vectorStore.sort((a, b) =>\n orderedDocKeys.has(a.fileKey) ? -1 : 1\n );\n\n const results = orderedVectorStore.filter((chunk) =>\n selection.some(\n (v) => v.fileKey === chunk.fileKey && v.chunkNumber === chunk.chunkNumber\n )\n );\n\n console.log({ orderedDocKeys });\n\n // Return the content of the top matching documents\n return results;\n};\n\n/**\n * Reads the content of a file synchronously.\n *\n * @function\n * @param relativeFilePath - The relative or absolute path to the target file.\n * @returns The entire contents of the specified file as a UTF-8 encoded string.\n */\nconst getFileContent = (relativeFilePath: string): string => {\n const __dirname = dirname(fileURLToPath(import.meta.url));\n const absolutePath = join(__dirname, relativeFilePath);\n const fileContent = readFileSync(absolutePath, 'utf-8');\n return fileContent;\n};\n\nconst CHAT_GPT_PROMPT = getFileContent('./PROMPT.md');\n\n// Initial prompt configuration for the chatbot\nexport const initPrompt: ChatCompletionRequestMessage = {\n role: 'system',\n content: CHAT_GPT_PROMPT,\n};\n\nexport type AskDocQuestionResult = {\n response: string;\n relatedFiles: string[];\n};\n\nexport type AskDocQuestionOptions = {\n onMessage?: (chunk: string) => void;\n};\n\n/**\n * Handles the \"Ask a question\" endpoint in an Express.js route.\n * Processes user messages, retrieves relevant documents, and interacts with AI models to generate responses.\n *\n * @param messages - An array of chat messages from the user and assistant\n * @returns The assistant's response as a string\n */\nexport const askDocQuestion = async (\n messages: ChatCompletionRequestMessage[],\n aiConfig: AIConfig,\n options?: AskDocQuestionOptions\n): Promise<AskDocQuestionResult> => {\n // Format the user's question to keep only the relevant keywords\n const query = messages\n .filter((message) => message.role === 'user')\n .map((message) => `- ${message.content}`)\n .join('\\n');\n\n // 1) Find relevant documents based on the user's question\n const relevantFilesReferences = await searchChunkReference(query);\n\n // 2) Integrate the relevant documents into the initial system prompt\n const systemPrompt = initPrompt.content.replace(\n '{{relevantFilesReferences}}',\n relevantFilesReferences.length === 0\n ? 'Not relevant file found related to the question.'\n : relevantFilesReferences\n .map((doc, idx) =>\n [\n '-----',\n '---',\n `chunkId: ${idx}`,\n `docChunk: \"${doc.chunkNumber}/${doc.fileKey.length}\"`,\n `docName: \"${doc.docName}\"`,\n `docUrl: \"${doc.docUrl}\"`,\n `---`,\n doc.content,\n `-----`,\n ].join('\\n')\n )\n .join('\\n\\n') // Insert relevant docs into the prompt\n );\n\n // Format messages for AI SDK\n const aiMessages = [\n {\n role: 'system' as const,\n content: systemPrompt,\n },\n ...messages.slice(-8),\n ];\n\n if (!aiConfig) {\n throw new Error('Failed to initialize AI configuration');\n }\n\n // 3) Use the AI SDK to stream the response\n let fullResponse = '';\n const stream = streamText({\n ...aiConfig,\n messages: aiMessages,\n });\n\n // Process the stream\n for await (const chunk of stream.textStream) {\n fullResponse += chunk;\n options?.onMessage?.(chunk);\n }\n\n // 4) Extract unique related files\n const relatedFiles = [\n ...new Set(relevantFilesReferences.map((doc) => doc.fileKey)),\n ];\n\n // 5) Return the assistant's response to the user\n return {\n response: fullResponse ?? 'Error: No result found',\n relatedFiles,\n };\n};\n"],"mappings":"AAAA,SAAS,UAAU,SAAS,4BAA4B;AACxD,SAAS,kBAAkB;AAC3B,OAAO,YAAY;AACnB,SAAS,cAAc,qBAAqB;AAC5C,SAAS,2BAA2B;AACpC,SAAS,cAAc;AACvB,SAAS,SAAS,YAAY;AAC9B,SAAS,qBAAqB;AAC9B;AAAA,EAGE;AAAA,OAEK;AACP,OAAO,oBAAoB,oBAAoB,KAAK,EAAE,MAAM,OAAO;AAmBnE,MAAM,cAA+B,CAAC;AAKtC,MAAM,QAA4B;AAClC,MAAM,oBAA8C;AACpD,MAAM,yBAAiC;AACvC,MAAM,iCAAyC;AAExC,MAAM,mBAA8B;AAAA,EACzC,UAAU,WAAW;AAAA,EACrB,OAAO;AAAA,EACP,aAAa;AACf;AAKA,MAAM,kBAAyC;AAC/C,MAAM,iBAAyB;AAC/B,MAAM,mBAA2B;AACjC,MAAM,gBAAwB;AAC9B,MAAM,YAAoB,mBAAmB;AAC7C,MAAM,gBAAwB,iBAAiB;AAO/C,MAAM,YAAY,CAAC,SAA2B;AAC5C,QAAM,SAAmB,CAAC;AAC1B,MAAI,QAAQ;AAEZ,SAAO,QAAQ,KAAK,QAAQ;AAC1B,QAAI,MAAM,KAAK,IAAI,QAAQ,WAAW,KAAK,MAAM;AAGjD,QAAI,MAAM,KAAK,QAAQ;AACrB,YAAM,YAAY,KAAK,YAAY,KAAK,GAAG;AAC3C,UAAI,YAAY,OAAO;AACrB,cAAM;AAAA,MACR;AAAA,IACF;AAEA,WAAO,KAAK,KAAK,UAAU,OAAO,GAAG,CAAC;AAGtC,UAAM,YAAY,MAAM;AACxB,QAAI,aAAa,OAAO;AAEtB,cAAQ;AAAA,IACV,OAAO;AACL,cAAQ;AAAA,IACV;AAAA,EACF;AAEA,SAAO;AACT;AASA,MAAM,oBAAoB,OAAO,SAAoC;AACnE,MAAI;AACF,UAAM,eAAe,IAAI,OAAO,EAAE,QAAQ,QAAQ,IAAI,eAAe,CAAC;AAEtE,UAAM,WAAW,MAAM,aAAa,WAAW,OAAO;AAAA,MACpD,OAAO;AAAA,MACP,OAAO;AAAA,IACT,CAAC;AAED,WAAO,SAAS,KAAK,CAAC,EAAE;AAAA,EAC1B,SAAS,OAAO;AACd,YAAQ,MAAM,+BAA+B,KAAK;AAClD,WAAO,CAAC;AAAA,EACV;AACF;AAWA,MAAM,mBAAmB,CAAC,MAAgB,SAA2B;AAEnE,QAAM,aAAa,KAAK,OAAO,CAAC,KAAK,GAAG,QAAQ,MAAM,IAAI,KAAK,GAAG,GAAG,CAAC;AAGtE,QAAM,aAAa,KAAK,KAAK,KAAK,OAAO,CAAC,KAAK,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC;AACpE,QAAM,aAAa,KAAK,KAAK,KAAK,OAAO,CAAC,KAAK,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC;AAGpE,SAAO,cAAc,aAAa;AACpC;AAOO,MAAM,qBAAqB,YAA2B;AAC3D,QAAM,MAAM,QAAQ,IAAI;AACxB,SAAO,OAAO;AAAA,IACZ,MAAM,CAAC,QAAQ,GAAG,UAAU,QAAQ,GAAG,IAAI,cAAc,MAAM;AAAA,EACjE,CAAC;AAGD,QAAM,oBAAoB,MAAM,qBAAqB;AACrD,QAAM,OAAO,MAAM,QAAQ;AAC3B,QAAM,QAAQ,MAAM,SAAS;AAE7B,MAAI,SAAmC,CAAC;AACxC,QAAM,mBAAmB,oBAAI,IAAY;AAEzC,QAAM,QAAQ,EAAE,GAAG,MAAM,GAAG,OAAO,GAAG,kBAAkB;AAGxD,mBAAiB,WAAW,OAAO,KAAK,KAAK,GAAG;AAE9C,UAAM,eAAe;AAAA,MACnB,MAAM,OAA6B;AAAA,IACrC;AAGA,UAAM,aAAa;AAAA,MACjB,MAAM,OAA6B;AAAA,IACrC;AAGA,UAAM,wBAAwB,OAAO,KAAK,cAAc,EAAE;AAAA,MAAO,CAAC,QAChE,IAAI,WAAW,GAAG,OAAO,SAAS;AAAA,IACpC;AACA,UAAM,oBAAoB,WAAW;AACrC,UAAM,qBAAqB,sBAAsB;AAEjD,QAAI,iCAAiC;AAGrC,QAAI,sBAAsB,oBAAoB;AAC5C,cAAQ;AAAA,QACN,SAAS,OAAO,0BAA0B,kBAAkB,OAAO,iBAAiB;AAAA,MACtF;AACA,uCAAiC;AAAA,IACnC;AAGA,qBAAiB,cAAc,OAAO,KAAK,UAAU,GAAG;AACtD,YAAM,cAAc,OAAO,UAAU,IAAI;AACzC,YAAM,eAAe,WAAW;AAEhC,YAAM,YAAY,WAChB,UACF;AAEA,YAAM,mBAAmB,GAAG,OAAO,UAAU,WAAW;AACxD,uBAAiB,IAAI,gBAAgB;AAGrC,YAAM,eAAe,CAAC,iCACjB,eAAe,gBAA+C,IAG/D;AAEJ,UAAI,YAAY;AAEhB,UAAI,CAAC,WAAW;AACd,oBAAY,MAAM,kBAAkB,SAAS;AAC7C,gBAAQ,KAAK,8BAA8B,gBAAgB,EAAE;AAAA,MAC/D;AAGA,eAAS,EAAE,GAAG,QAAQ,CAAC,gBAAgB,GAAG,UAAU;AAGpD,kBAAY,KAAK;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,QACA,SAAS;AAAA,QACT,QAAQ,aAAa;AAAA,QACrB,SAAS,aAAa;AAAA,MACxB,CAAC;AAED,cAAQ,KAAK,cAAc,gBAAgB,IAAI,YAAY,EAAE;AAAA,IAC/D;AAAA,EACF;AAGA,QAAM,qBAA+C,CAAC;AACtD,aAAW,CAAC,KAAK,SAAS,KAAK,OAAO,QAAQ,cAAc,GAAG;AAC7D,QAAI,iBAAiB,IAAI,GAAG,GAAG;AAE7B,UAAI,CAAC,OAAO,GAAG,GAAG;AAChB,2BAAmB,GAAG,IAAI;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AAGA,WAAS,EAAE,GAAG,oBAAoB,GAAG,OAAO;AAE5C,MAAI,QAAQ,IAAI,aAAa,eAAe;AAC1C,QAAI;AAEF,UAAI,KAAK,UAAU,MAAM,MAAM,KAAK,UAAU,cAAc,GAAG;AAE7D;AAAA,UACE;AAAA,UACA,KAAK,UAAU,QAAQ,MAAM,CAAC;AAAA,QAChC;AACA,gBAAQ,KAAK,sDAAsD;AAAA,MACrE;AAAA,IACF,SAAS,OAAO;AACd,cAAQ,MAAM,KAAK;AAAA,IACrB;AAAA,EACF;AACF;AAGA,mBAAmB;AASZ,MAAM,uBAAuB,OAClC,OACA,aAAqB,wBACrB,gBAAwB,mCACK;AAE7B,QAAM,iBAAiB,MAAM,kBAAkB,KAAK;AAGpD,QAAM,YAAY,YACf,IAAI,CAAC,WAAW;AAAA,IACf,GAAG;AAAA,IACH,YAAY,iBAAiB,gBAAgB,MAAM,SAAS;AAAA;AAAA,EAC9D,EAAE,EACD,OAAO,CAAC,UAAU,MAAM,aAAa,aAAa,EAClD,KAAK,CAAC,GAAG,MAAM,EAAE,aAAa,EAAE,UAAU,EAC1C,MAAM,GAAG,UAAU;AAEtB,QAAM,iBAAiB,IAAI,IAAI,UAAU,IAAI,CAAC,UAAU,MAAM,OAAO,CAAC;AAEtE,QAAM,qBAAqB,YAAY;AAAA,IAAK,CAAC,GAAG,MAC9C,eAAe,IAAI,EAAE,OAAO,IAAI,KAAK;AAAA,EACvC;AAEA,QAAM,UAAU,mBAAmB;AAAA,IAAO,CAAC,UACzC,UAAU;AAAA,MACR,CAAC,MAAM,EAAE,YAAY,MAAM,WAAW,EAAE,gBAAgB,MAAM;AAAA,IAChE;AAAA,EACF;AAEA,UAAQ,IAAI,EAAE,eAAe,CAAC;AAG9B,SAAO;AACT;AASA,MAAM,iBAAiB,CAAC,qBAAqC;AAC3D,QAAM,YAAY,QAAQ,cAAc,YAAY,GAAG,CAAC;AACxD,QAAM,eAAe,KAAK,WAAW,gBAAgB;AACrD,QAAM,cAAc,aAAa,cAAc,OAAO;AACtD,SAAO;AACT;AAEA,MAAM,kBAAkB,eAAe,aAAa;AAG7C,MAAM,aAA2C;AAAA,EACtD,MAAM;AAAA,EACN,SAAS;AACX;AAkBO,MAAM,iBAAiB,OAC5B,UACA,UACA,YACkC;AAElC,QAAM,QAAQ,SACX,OAAO,CAAC,YAAY,QAAQ,SAAS,MAAM,EAC3C,IAAI,CAAC,YAAY,KAAK,QAAQ,OAAO,EAAE,EACvC,KAAK,IAAI;AAGZ,QAAM,0BAA0B,MAAM,qBAAqB,KAAK;AAGhE,QAAM,eAAe,WAAW,QAAQ;AAAA,IACtC;AAAA,IACA,wBAAwB,WAAW,IAC/B,qDACA,wBACG;AAAA,MAAI,CAAC,KAAK,QACT;AAAA,QACE;AAAA,QACA;AAAA,QACA,YAAY,GAAG;AAAA,QACf,cAAc,IAAI,WAAW,IAAI,IAAI,QAAQ,MAAM;AAAA,QACnD,aAAa,IAAI,OAAO;AAAA,QACxB,YAAY,IAAI,MAAM;AAAA,QACtB;AAAA,QACA,IAAI;AAAA,QACJ;AAAA,MACF,EAAE,KAAK,IAAI;AAAA,IACb,EACC,KAAK,MAAM;AAAA;AAAA,EACpB;AAGA,QAAM,aAAa;AAAA,IACjB;AAAA,MACE,MAAM;AAAA,MACN,SAAS;AAAA,IACX;AAAA,IACA,GAAG,SAAS,MAAM,EAAE;AAAA,EACtB;AAEA,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,uCAAuC;AAAA,EACzD;AAGA,MAAI,eAAe;AACnB,QAAM,SAAS,WAAW;AAAA,IACxB,GAAG;AAAA,IACH,UAAU;AAAA,EACZ,CAAC;AAGD,mBAAiB,SAAS,OAAO,YAAY;AAC3C,oBAAgB;AAChB,aAAS,YAAY,KAAK;AAAA,EAC5B;AAGA,QAAM,eAAe;AAAA,IACnB,GAAG,IAAI,IAAI,wBAAwB,IAAI,CAAC,QAAQ,IAAI,OAAO,CAAC;AAAA,EAC9D;AAGA,SAAO;AAAA,IACL,UAAU,gBAAgB;AAAA,IAC1B;AAAA,EACF;AACF;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../../../../../src/utils/AI/askDocQuestion/askDocQuestion.ts"],"sourcesContent":["import { getBlogs, getDocs, getFrequentQuestions } from '@intlayer/docs';\nimport { streamText } from 'ai';\nimport dotenv from 'dotenv';\nimport { readFileSync, writeFileSync } from 'fs';\nimport { getMarkdownMetadata } from 'intlayer';\nimport { OpenAI } from 'openai';\nimport { dirname, join } from 'path';\nimport { fileURLToPath } from 'url';\nimport {\n AIConfig,\n AIOptions,\n AIProvider,\n ChatCompletionRequestMessage,\n} from '../aiSdk';\nimport embeddingsList from './embeddings.json' with { type: 'json' };\n\ntype VectorStoreEl = {\n fileKey: string;\n chunkNumber: number;\n content: string;\n embedding: number[];\n docUrl: string;\n docName: string;\n};\n\n/**\n * Simple in-memory vector store to hold document embeddings and their content.\n * Each entry contains:\n * - fileKey: A unique key identifying the file\n * - chunkNumber: The number of the chunk within the document\n * - content: The chunk content\n * - embedding: The numerical embedding vector for the chunk\n */\nconst vectorStore: VectorStoreEl[] = [];\n\n/*\n * Ask question AI configuration\n */\nconst MODEL: AIOptions['model'] = 'chatgpt-4o-latest'; // Model to use for chat completions\nconst MODEL_TEMPERATURE: AIOptions['temperature'] = 0.1; // Temperature to use for chat completions\nconst MAX_RELEVANT_CHUNKS_NB: number = 20; // Maximum number of relevant chunks to attach to chatGPT context\nconst MIN_RELEVANT_CHUNKS_SIMILARITY: number = 0.42; // Minimum similarity required for a chunk to be considered relevant\n\nexport const aiDefaultOptions: AIOptions = {\n provider: AIProvider.OPENAI,\n model: MODEL,\n temperature: MODEL_TEMPERATURE,\n};\n\n/*\n * Embedding model configuration\n */\nconst EMBEDDING_MODEL: OpenAI.EmbeddingModel = 'text-embedding-3-large'; // Model to use for embedding generation\nconst OVERLAP_TOKENS: number = 200; // Number of tokens to overlap between chunks\nconst MAX_CHUNK_TOKENS: number = 800; // Maximum number of tokens per chunk\nconst CHAR_BY_TOKEN: number = 4.15; // Approximate pessimistically the number of characters per token // Can use `tiktoken` or other tokenizers to calculate it more precisely\nconst MAX_CHARS: number = MAX_CHUNK_TOKENS * CHAR_BY_TOKEN;\nconst OVERLAP_CHARS: number = OVERLAP_TOKENS * CHAR_BY_TOKEN;\n\n/**\n * Splits a given text into chunks ensuring each chunk does not exceed MAX_CHARS.\n * @param text - The input text to split.\n * @returns - Array of text chunks.\n */\nconst chunkText = (text: string): string[] => {\n const chunks: string[] = [];\n let start = 0;\n\n while (start < text.length) {\n let end = Math.min(start + MAX_CHARS, text.length);\n\n // Ensure we don't cut words in the middle (find nearest space)\n if (end < text.length) {\n const lastSpace = text.lastIndexOf(' ', end);\n if (lastSpace > start) {\n end = lastSpace;\n }\n }\n\n chunks.push(text.substring(start, end));\n\n // Move start forward correctly\n const nextStart = end - OVERLAP_CHARS;\n if (nextStart <= start) {\n // Prevent infinite loop if overlap is too large\n start = end;\n } else {\n start = nextStart;\n }\n }\n\n return chunks;\n};\n\n/**\n * Generates an embedding for a given text using OpenAI's embedding API.\n * Trims the text if it exceeds the maximum allowed characters.\n *\n * @param text - The input text to generate an embedding for\n * @returns The embedding vector as a number array\n */\nconst generateEmbedding = async (text: string): Promise<number[]> => {\n try {\n const openaiClient = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n\n const response = await openaiClient.embeddings.create({\n model: EMBEDDING_MODEL,\n input: text,\n });\n\n return response.data[0].embedding;\n } catch (error) {\n console.error('Error generating embedding:', error);\n return [];\n }\n};\n\n/**\n * Calculates the cosine similarity between two vectors.\n * Cosine similarity measures the cosine of the angle between two vectors in an inner product space.\n * Used to determine the similarity between chunks of text.\n *\n * @param vecA - The first vector\n * @param vecB - The second vector\n * @returns The cosine similarity score\n */\nconst cosineSimilarity = (vecA: number[], vecB: number[]): number => {\n // Calculate the dot product of the two vectors\n const dotProduct = vecA.reduce((sum, a, idx) => sum + a * vecB[idx], 0);\n\n // Calculate the magnitude (Euclidean norm) of each vector\n const magnitudeA = Math.sqrt(vecA.reduce((sum, a) => sum + a * a, 0));\n const magnitudeB = Math.sqrt(vecB.reduce((sum, b) => sum + b * b, 0));\n\n // Compute and return the cosine similarity\n return dotProduct / (magnitudeA * magnitudeB);\n};\n\n/**\n * Indexes all Markdown documents by generating embeddings for each chunk and storing them in memory.\n * Also updates the embeddings.json file if new embeddings are generated.\n * Handles cases where files have been updated and chunk counts have changed.\n */\nexport const indexMarkdownFiles = async (): Promise<void> => {\n const env = process.env.NODE_ENV;\n dotenv.config({\n path: [`.env.${env}.local`, `.env.${env}`, '.env.local', '.env'],\n });\n\n if (process.env.SKIP_DOC_EMBEDDINGS_INDEX === 'true') {\n return;\n }\n\n // Retrieve documentation and blog posts in English locale\n const frequentQuestions = await getFrequentQuestions();\n const docs = await getDocs();\n const blogs = await getBlogs();\n\n let result: Record<string, number[]> = {}; // Object to hold updated embeddings\n const currentChunkKeys = new Set<string>(); // Track which chunks should exist\n\n const files = { ...docs, ...blogs, ...frequentQuestions }; // Combine docs and blogs into a single object\n\n // Iterate over each file key (identifier) in the combined files\n for await (const fileKey of Object.keys(files)) {\n // Get the metadata of the file\n const fileMetadata = getMarkdownMetadata(\n files[fileKey as keyof typeof files] as string\n );\n\n // Split the document into chunks based on headings\n const fileChunks = chunkText(\n files[fileKey as keyof typeof files] as string\n );\n\n // Check if the number of chunks has changed for this file\n const existingChunksForFile = Object.keys(embeddingsList).filter((key) =>\n key.startsWith(`${fileKey}/chunk_`)\n );\n const currentChunkCount = fileChunks.length;\n const previousChunkCount = existingChunksForFile.length;\n\n let shouldRegenerateFileEmbeddings = false;\n\n // If chunk count differs, we need to regenerate embeddings for this file\n if (currentChunkCount !== previousChunkCount) {\n console.info(\n `File \"${fileKey}\" chunk count changed: ${previousChunkCount} -> ${currentChunkCount}. Regenerating embeddings.`\n );\n shouldRegenerateFileEmbeddings = true;\n }\n\n // Iterate over each chunk within the current file\n for await (const chunkIndex of Object.keys(fileChunks)) {\n const chunkNumber = Number(chunkIndex) + 1; // Chunk number starts at 1\n const chunksNumber = fileChunks.length;\n\n const fileChunk = fileChunks[\n chunkIndex as keyof typeof fileChunks\n ] as string;\n\n const embeddingKeyName = `${fileKey}/chunk_${chunkNumber}`; // Unique key for the chunk\n currentChunkKeys.add(embeddingKeyName); // Track this chunk as current\n\n // Retrieve precomputed embedding if available and file hasn't changed\n const docEmbedding = !shouldRegenerateFileEmbeddings\n ? (embeddingsList[embeddingKeyName as keyof typeof embeddingsList] as\n | number[]\n | undefined)\n : undefined;\n\n let embedding = docEmbedding; // Use existing embedding if available and valid\n\n if (!embedding) {\n embedding = await generateEmbedding(fileChunk); // Generate embedding if not present or file changed\n console.info(`- Generated new embedding: ${embeddingKeyName}`);\n }\n\n // Update the result object with the embedding\n result = { ...result, [embeddingKeyName]: embedding };\n\n // Store the embedding and content in the in-memory vector store\n vectorStore.push({\n fileKey,\n chunkNumber,\n embedding,\n content: fileChunk,\n docUrl: fileMetadata.url,\n docName: fileMetadata.title,\n });\n\n console.info(`- Indexed: ${embeddingKeyName}/${chunksNumber}`);\n }\n }\n\n // Remove outdated embeddings that no longer exist in current files\n const filteredEmbeddings: Record<string, number[]> = {};\n for (const [key, embedding] of Object.entries(embeddingsList)) {\n if (currentChunkKeys.has(key)) {\n // Only keep embeddings for chunks that still exist\n if (!result[key]) {\n filteredEmbeddings[key] = embedding as number[];\n }\n }\n }\n\n // Merge filtered existing embeddings with new ones\n result = { ...filteredEmbeddings, ...result };\n\n if (process.env.NODE_ENV === 'development') {\n try {\n // Compare the newly generated embeddings with existing ones\n if (JSON.stringify(result) !== JSON.stringify(embeddingsList)) {\n // If there are new embeddings or changes, save them to embeddings.json\n writeFileSync(\n 'src/utils/AI/askDocQuestion/embeddings.json',\n JSON.stringify(result, null, 2)\n );\n console.info('Updated embeddings.json with new/changed embeddings.');\n }\n } catch (error) {\n console.error(error); // Log any errors during the file write process\n }\n }\n};\n\n// Automatically index Markdown files\nindexMarkdownFiles();\n\n/**\n * Searches the indexed documents for the most relevant chunks based on a query.\n * Utilizes cosine similarity to find the closest matching embeddings.\n *\n * @param query - The search query provided by the user\n * @returns An array of the top matching document chunks' content\n */\nexport const searchChunkReference = async (\n query: string,\n maxResults: number = MAX_RELEVANT_CHUNKS_NB,\n minSimilarity: number = MIN_RELEVANT_CHUNKS_SIMILARITY\n): Promise<VectorStoreEl[]> => {\n // Generate an embedding for the user's query\n const queryEmbedding = await generateEmbedding(query);\n\n // Calculate similarity scores between the query embedding and each document's embedding\n const selection = vectorStore\n .map((chunk) => ({\n ...chunk,\n similarity: cosineSimilarity(queryEmbedding, chunk.embedding), // Add similarity score to each doc\n }))\n .filter((chunk) => chunk.similarity > minSimilarity) // Filter out documents with low similarity scores\n .sort((a, b) => b.similarity - a.similarity) // Sort documents by highest similarity first\n .slice(0, maxResults); // Select the top 6 most similar documents\n\n const orderedDocKeys = new Set(selection.map((chunk) => chunk.fileKey));\n\n const orderedVectorStore = vectorStore.sort((a, b) =>\n orderedDocKeys.has(a.fileKey) ? -1 : 1\n );\n\n const results = orderedVectorStore.filter((chunk) =>\n selection.some(\n (v) => v.fileKey === chunk.fileKey && v.chunkNumber === chunk.chunkNumber\n )\n );\n\n console.log({ orderedDocKeys });\n\n // Return the content of the top matching documents\n return results;\n};\n\n/**\n * Reads the content of a file synchronously.\n *\n * @function\n * @param relativeFilePath - The relative or absolute path to the target file.\n * @returns The entire contents of the specified file as a UTF-8 encoded string.\n */\nconst getFileContent = (relativeFilePath: string): string => {\n const __dirname = dirname(fileURLToPath(import.meta.url));\n const absolutePath = join(__dirname, relativeFilePath);\n const fileContent = readFileSync(absolutePath, 'utf-8');\n return fileContent;\n};\n\nconst CHAT_GPT_PROMPT = getFileContent('./PROMPT.md');\n\n// Initial prompt configuration for the chatbot\nexport const initPrompt: ChatCompletionRequestMessage = {\n role: 'system',\n content: CHAT_GPT_PROMPT,\n};\n\nexport type AskDocQuestionResult = {\n response: string;\n relatedFiles: string[];\n};\n\nexport type AskDocQuestionOptions = {\n onMessage?: (chunk: string) => void;\n};\n\n/**\n * Handles the \"Ask a question\" endpoint in an Express.js route.\n * Processes user messages, retrieves relevant documents, and interacts with AI models to generate responses.\n *\n * @param messages - An array of chat messages from the user and assistant\n * @returns The assistant's response as a string\n */\nexport const askDocQuestion = async (\n messages: ChatCompletionRequestMessage[],\n aiConfig: AIConfig,\n options?: AskDocQuestionOptions\n): Promise<AskDocQuestionResult> => {\n // Format the user's question to keep only the relevant keywords\n const query = messages\n .filter((message) => message.role === 'user')\n .map((message) => `- ${message.content}`)\n .join('\\n');\n\n // 1) Find relevant documents based on the user's question\n const relevantFilesReferences = await searchChunkReference(query);\n\n // 2) Integrate the relevant documents into the initial system prompt\n const systemPrompt = initPrompt.content.replace(\n '{{relevantFilesReferences}}',\n relevantFilesReferences.length === 0\n ? 'Not relevant file found related to the question.'\n : relevantFilesReferences\n .map((doc, idx) =>\n [\n '-----',\n '---',\n `chunkId: ${idx}`,\n `docChunk: \"${doc.chunkNumber}/${doc.fileKey.length}\"`,\n `docName: \"${doc.docName}\"`,\n `docUrl: \"${doc.docUrl}\"`,\n `---`,\n doc.content,\n `-----`,\n ].join('\\n')\n )\n .join('\\n\\n') // Insert relevant docs into the prompt\n );\n\n // Format messages for AI SDK\n const aiMessages = [\n {\n role: 'system' as const,\n content: systemPrompt,\n },\n ...messages.slice(-8),\n ];\n\n if (!aiConfig) {\n throw new Error('Failed to initialize AI configuration');\n }\n\n // 3) Use the AI SDK to stream the response\n let fullResponse = '';\n const stream = streamText({\n ...aiConfig,\n messages: aiMessages,\n });\n\n // Process the stream\n for await (const chunk of stream.textStream) {\n fullResponse += chunk;\n options?.onMessage?.(chunk);\n }\n\n // 4) Extract unique related files\n const relatedFiles = [\n ...new Set(relevantFilesReferences.map((doc) => doc.fileKey)),\n ];\n\n // 5) Return the assistant's response to the user\n return {\n response: fullResponse ?? 'Error: No result found',\n relatedFiles,\n };\n};\n"],"mappings":"AAAA,SAAS,UAAU,SAAS,4BAA4B;AACxD,SAAS,kBAAkB;AAC3B,OAAO,YAAY;AACnB,SAAS,cAAc,qBAAqB;AAC5C,SAAS,2BAA2B;AACpC,SAAS,cAAc;AACvB,SAAS,SAAS,YAAY;AAC9B,SAAS,qBAAqB;AAC9B;AAAA,EAGE;AAAA,OAEK;AACP,OAAO,oBAAoB,oBAAoB,KAAK,EAAE,MAAM,OAAO;AAmBnE,MAAM,cAA+B,CAAC;AAKtC,MAAM,QAA4B;AAClC,MAAM,oBAA8C;AACpD,MAAM,yBAAiC;AACvC,MAAM,iCAAyC;AAExC,MAAM,mBAA8B;AAAA,EACzC,UAAU,WAAW;AAAA,EACrB,OAAO;AAAA,EACP,aAAa;AACf;AAKA,MAAM,kBAAyC;AAC/C,MAAM,iBAAyB;AAC/B,MAAM,mBAA2B;AACjC,MAAM,gBAAwB;AAC9B,MAAM,YAAoB,mBAAmB;AAC7C,MAAM,gBAAwB,iBAAiB;AAO/C,MAAM,YAAY,CAAC,SAA2B;AAC5C,QAAM,SAAmB,CAAC;AAC1B,MAAI,QAAQ;AAEZ,SAAO,QAAQ,KAAK,QAAQ;AAC1B,QAAI,MAAM,KAAK,IAAI,QAAQ,WAAW,KAAK,MAAM;AAGjD,QAAI,MAAM,KAAK,QAAQ;AACrB,YAAM,YAAY,KAAK,YAAY,KAAK,GAAG;AAC3C,UAAI,YAAY,OAAO;AACrB,cAAM;AAAA,MACR;AAAA,IACF;AAEA,WAAO,KAAK,KAAK,UAAU,OAAO,GAAG,CAAC;AAGtC,UAAM,YAAY,MAAM;AACxB,QAAI,aAAa,OAAO;AAEtB,cAAQ;AAAA,IACV,OAAO;AACL,cAAQ;AAAA,IACV;AAAA,EACF;AAEA,SAAO;AACT;AASA,MAAM,oBAAoB,OAAO,SAAoC;AACnE,MAAI;AACF,UAAM,eAAe,IAAI,OAAO,EAAE,QAAQ,QAAQ,IAAI,eAAe,CAAC;AAEtE,UAAM,WAAW,MAAM,aAAa,WAAW,OAAO;AAAA,MACpD,OAAO;AAAA,MACP,OAAO;AAAA,IACT,CAAC;AAED,WAAO,SAAS,KAAK,CAAC,EAAE;AAAA,EAC1B,SAAS,OAAO;AACd,YAAQ,MAAM,+BAA+B,KAAK;AAClD,WAAO,CAAC;AAAA,EACV;AACF;AAWA,MAAM,mBAAmB,CAAC,MAAgB,SAA2B;AAEnE,QAAM,aAAa,KAAK,OAAO,CAAC,KAAK,GAAG,QAAQ,MAAM,IAAI,KAAK,GAAG,GAAG,CAAC;AAGtE,QAAM,aAAa,KAAK,KAAK,KAAK,OAAO,CAAC,KAAK,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC;AACpE,QAAM,aAAa,KAAK,KAAK,KAAK,OAAO,CAAC,KAAK,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC;AAGpE,SAAO,cAAc,aAAa;AACpC;AAOO,MAAM,qBAAqB,YAA2B;AAC3D,QAAM,MAAM,QAAQ,IAAI;AACxB,SAAO,OAAO;AAAA,IACZ,MAAM,CAAC,QAAQ,GAAG,UAAU,QAAQ,GAAG,IAAI,cAAc,MAAM;AAAA,EACjE,CAAC;AAED,MAAI,QAAQ,IAAI,8BAA8B,QAAQ;AACpD;AAAA,EACF;AAGA,QAAM,oBAAoB,MAAM,qBAAqB;AACrD,QAAM,OAAO,MAAM,QAAQ;AAC3B,QAAM,QAAQ,MAAM,SAAS;AAE7B,MAAI,SAAmC,CAAC;AACxC,QAAM,mBAAmB,oBAAI,IAAY;AAEzC,QAAM,QAAQ,EAAE,GAAG,MAAM,GAAG,OAAO,GAAG,kBAAkB;AAGxD,mBAAiB,WAAW,OAAO,KAAK,KAAK,GAAG;AAE9C,UAAM,eAAe;AAAA,MACnB,MAAM,OAA6B;AAAA,IACrC;AAGA,UAAM,aAAa;AAAA,MACjB,MAAM,OAA6B;AAAA,IACrC;AAGA,UAAM,wBAAwB,OAAO,KAAK,cAAc,EAAE;AAAA,MAAO,CAAC,QAChE,IAAI,WAAW,GAAG,OAAO,SAAS;AAAA,IACpC;AACA,UAAM,oBAAoB,WAAW;AACrC,UAAM,qBAAqB,sBAAsB;AAEjD,QAAI,iCAAiC;AAGrC,QAAI,sBAAsB,oBAAoB;AAC5C,cAAQ;AAAA,QACN,SAAS,OAAO,0BAA0B,kBAAkB,OAAO,iBAAiB;AAAA,MACtF;AACA,uCAAiC;AAAA,IACnC;AAGA,qBAAiB,cAAc,OAAO,KAAK,UAAU,GAAG;AACtD,YAAM,cAAc,OAAO,UAAU,IAAI;AACzC,YAAM,eAAe,WAAW;AAEhC,YAAM,YAAY,WAChB,UACF;AAEA,YAAM,mBAAmB,GAAG,OAAO,UAAU,WAAW;AACxD,uBAAiB,IAAI,gBAAgB;AAGrC,YAAM,eAAe,CAAC,iCACjB,eAAe,gBAA+C,IAG/D;AAEJ,UAAI,YAAY;AAEhB,UAAI,CAAC,WAAW;AACd,oBAAY,MAAM,kBAAkB,SAAS;AAC7C,gBAAQ,KAAK,8BAA8B,gBAAgB,EAAE;AAAA,MAC/D;AAGA,eAAS,EAAE,GAAG,QAAQ,CAAC,gBAAgB,GAAG,UAAU;AAGpD,kBAAY,KAAK;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,QACA,SAAS;AAAA,QACT,QAAQ,aAAa;AAAA,QACrB,SAAS,aAAa;AAAA,MACxB,CAAC;AAED,cAAQ,KAAK,cAAc,gBAAgB,IAAI,YAAY,EAAE;AAAA,IAC/D;AAAA,EACF;AAGA,QAAM,qBAA+C,CAAC;AACtD,aAAW,CAAC,KAAK,SAAS,KAAK,OAAO,QAAQ,cAAc,GAAG;AAC7D,QAAI,iBAAiB,IAAI,GAAG,GAAG;AAE7B,UAAI,CAAC,OAAO,GAAG,GAAG;AAChB,2BAAmB,GAAG,IAAI;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AAGA,WAAS,EAAE,GAAG,oBAAoB,GAAG,OAAO;AAE5C,MAAI,QAAQ,IAAI,aAAa,eAAe;AAC1C,QAAI;AAEF,UAAI,KAAK,UAAU,MAAM,MAAM,KAAK,UAAU,cAAc,GAAG;AAE7D;AAAA,UACE;AAAA,UACA,KAAK,UAAU,QAAQ,MAAM,CAAC;AAAA,QAChC;AACA,gBAAQ,KAAK,sDAAsD;AAAA,MACrE;AAAA,IACF,SAAS,OAAO;AACd,cAAQ,MAAM,KAAK;AAAA,IACrB;AAAA,EACF;AACF;AAGA,mBAAmB;AASZ,MAAM,uBAAuB,OAClC,OACA,aAAqB,wBACrB,gBAAwB,mCACK;AAE7B,QAAM,iBAAiB,MAAM,kBAAkB,KAAK;AAGpD,QAAM,YAAY,YACf,IAAI,CAAC,WAAW;AAAA,IACf,GAAG;AAAA,IACH,YAAY,iBAAiB,gBAAgB,MAAM,SAAS;AAAA;AAAA,EAC9D,EAAE,EACD,OAAO,CAAC,UAAU,MAAM,aAAa,aAAa,EAClD,KAAK,CAAC,GAAG,MAAM,EAAE,aAAa,EAAE,UAAU,EAC1C,MAAM,GAAG,UAAU;AAEtB,QAAM,iBAAiB,IAAI,IAAI,UAAU,IAAI,CAAC,UAAU,MAAM,OAAO,CAAC;AAEtE,QAAM,qBAAqB,YAAY;AAAA,IAAK,CAAC,GAAG,MAC9C,eAAe,IAAI,EAAE,OAAO,IAAI,KAAK;AAAA,EACvC;AAEA,QAAM,UAAU,mBAAmB;AAAA,IAAO,CAAC,UACzC,UAAU;AAAA,MACR,CAAC,MAAM,EAAE,YAAY,MAAM,WAAW,EAAE,gBAAgB,MAAM;AAAA,IAChE;AAAA,EACF;AAEA,UAAQ,IAAI,EAAE,eAAe,CAAC;AAG9B,SAAO;AACT;AASA,MAAM,iBAAiB,CAAC,qBAAqC;AAC3D,QAAM,YAAY,QAAQ,cAAc,YAAY,GAAG,CAAC;AACxD,QAAM,eAAe,KAAK,WAAW,gBAAgB;AACrD,QAAM,cAAc,aAAa,cAAc,OAAO;AACtD,SAAO;AACT;AAEA,MAAM,kBAAkB,eAAe,aAAa;AAG7C,MAAM,aAA2C;AAAA,EACtD,MAAM;AAAA,EACN,SAAS;AACX;AAkBO,MAAM,iBAAiB,OAC5B,UACA,UACA,YACkC;AAElC,QAAM,QAAQ,SACX,OAAO,CAAC,YAAY,QAAQ,SAAS,MAAM,EAC3C,IAAI,CAAC,YAAY,KAAK,QAAQ,OAAO,EAAE,EACvC,KAAK,IAAI;AAGZ,QAAM,0BAA0B,MAAM,qBAAqB,KAAK;AAGhE,QAAM,eAAe,WAAW,QAAQ;AAAA,IACtC;AAAA,IACA,wBAAwB,WAAW,IAC/B,qDACA,wBACG;AAAA,MAAI,CAAC,KAAK,QACT;AAAA,QACE;AAAA,QACA;AAAA,QACA,YAAY,GAAG;AAAA,QACf,cAAc,IAAI,WAAW,IAAI,IAAI,QAAQ,MAAM;AAAA,QACnD,aAAa,IAAI,OAAO;AAAA,QACxB,YAAY,IAAI,MAAM;AAAA,QACtB;AAAA,QACA,IAAI;AAAA,QACJ;AAAA,MACF,EAAE,KAAK,IAAI;AAAA,IACb,EACC,KAAK,MAAM;AAAA;AAAA,EACpB;AAGA,QAAM,aAAa;AAAA,IACjB;AAAA,MACE,MAAM;AAAA,MACN,SAAS;AAAA,IACX;AAAA,IACA,GAAG,SAAS,MAAM,EAAE;AAAA,EACtB;AAEA,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,uCAAuC;AAAA,EACzD;AAGA,MAAI,eAAe;AACnB,QAAM,SAAS,WAAW;AAAA,IACxB,GAAG;AAAA,IACH,UAAU;AAAA,EACZ,CAAC;AAGD,mBAAiB,SAAS,OAAO,YAAY;AAC3C,oBAAgB;AAChB,aAAS,YAAY,KAAK;AAAA,EAC5B;AAGA,QAAM,eAAe;AAAA,IACnB,GAAG,IAAI,IAAI,wBAAwB,IAAI,CAAC,QAAQ,IAAI,OAAO,CAAC;AAAA,EAC9D;AAGA,SAAO;AAAA,IACL,UAAU,gBAAgB;AAAA,IAC1B;AAAA,EACF;AACF;","names":[]}
|
|
@@ -6,6 +6,7 @@ import { getProjectById } from "./../../services/project.service.mjs";
|
|
|
6
6
|
import { getUserById } from "./../../services/user.service.mjs";
|
|
7
7
|
import { mapOrganizationToAPI } from "./../../utils/mapper/organization.mjs";
|
|
8
8
|
import { mapProjectToAPI } from "./../../utils/mapper/project.mjs";
|
|
9
|
+
import { mapSessionToAPI } from "./../../utils/mapper/session.mjs";
|
|
9
10
|
import { mapUserToAPI } from "./../../utils/mapper/user.mjs";
|
|
10
11
|
import {
|
|
11
12
|
computeEffectivePermission,
|
|
@@ -23,9 +24,10 @@ const formatSession = (session) => {
|
|
|
23
24
|
permissions = intersectPermissions(permissions, session.permissions);
|
|
24
25
|
}
|
|
25
26
|
const resultSession = {
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
27
|
+
session: session.session,
|
|
28
|
+
user: session.user,
|
|
29
|
+
organization: session.organization,
|
|
30
|
+
project: session.project,
|
|
29
31
|
authType: "session",
|
|
30
32
|
permissions,
|
|
31
33
|
roles
|
|
@@ -67,7 +69,7 @@ const getAuth = (dbClient) => {
|
|
|
67
69
|
},
|
|
68
70
|
hooks: {
|
|
69
71
|
after: createAuthMiddleware(async (ctx) => {
|
|
70
|
-
const { path, context } = ctx;
|
|
72
|
+
const { path, context, error } = ctx;
|
|
71
73
|
const newUser = context.newSession?.user;
|
|
72
74
|
const existingUser = context.session?.user;
|
|
73
75
|
const user = newUser ?? existingUser;
|
|
@@ -149,7 +151,8 @@ const getAuth = (dbClient) => {
|
|
|
149
151
|
project: projectAPI ?? null,
|
|
150
152
|
authType: "session"
|
|
151
153
|
};
|
|
152
|
-
|
|
154
|
+
const formattedSession = formatSession(sessionWithNoPermission);
|
|
155
|
+
return mapSessionToAPI(formattedSession);
|
|
153
156
|
})
|
|
154
157
|
],
|
|
155
158
|
emailAndPassword: {
|
|
@@ -178,6 +181,7 @@ const getAuth = (dbClient) => {
|
|
|
178
181
|
},
|
|
179
182
|
emailVerification: {
|
|
180
183
|
autoSignInAfterVerification: true,
|
|
184
|
+
sendOnSignIn: true,
|
|
181
185
|
sendVerificationEmail: async ({ user, url }) => {
|
|
182
186
|
logger.info("sending verification email", { email: user.email });
|
|
183
187
|
await sendEmail({
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../src/utils/auth/getAuth.ts"],"sourcesContent":["import type { OrganizationAPI } from '@/types/organization.types';\nimport type { ProjectAPI } from '@/types/project.types';\nimport type {\n SessionAPI,\n SessionContext,\n SessionDataApi,\n} from '@/types/session.types';\nimport type { User, UserAPI } from '@/types/user.types';\nimport { sendVerificationUpdate } from '@controllers/user.controller';\nimport { logger } from '@logger';\nimport { sendEmail } from '@services/email.service';\nimport { getOrganizationById } from '@services/organization.service';\nimport { getProjectById } from '@services/project.service';\nimport { getUserById } from '@services/user.service';\nimport { mapOrganizationToAPI } from '@utils/mapper/organization';\nimport { mapProjectToAPI } from '@utils/mapper/project';\nimport { mapUserToAPI } from '@utils/mapper/user';\nimport {\n computeEffectivePermission,\n getSessionRoles,\n intersectPermissions,\n} from '@utils/permissions';\nimport { betterAuth, OmitId } from 'better-auth';\nimport { mongodbAdapter } from 'better-auth/adapters/mongodb';\nimport { createAuthMiddleware } from 'better-auth/api';\nimport { customSession } from 'better-auth/plugins';\nimport type { MongoClient } from 'mongodb';\n\nexport type Auth = ReturnType<typeof betterAuth>;\n\nexport const formatSession = (session: SessionContext): OmitId<SessionAPI> => {\n const roles = getSessionRoles(session);\n let permissions = computeEffectivePermission(roles);\n\n // Intersect in the case a Access Token try to override the permissions\n if (session.permissions) {\n permissions = intersectPermissions(permissions, session.permissions);\n }\n\n const resultSession = {\n user: mapUserToAPI(session.user),\n organization: mapOrganizationToAPI(session.organization),\n project: mapProjectToAPI(session.project),\n authType: 'session',\n permissions,\n roles,\n } as OmitId<SessionAPI>;\n\n return resultSession;\n};\n\nexport const getAuth = (dbClient: MongoClient): Auth => {\n if (!dbClient) {\n throw new Error('MongoDB connection not established');\n }\n\n const auth = betterAuth({\n appName: 'Intlayer',\n\n database: mongodbAdapter(dbClient.db()),\n\n /**\n * User model\n */\n user: {\n modelName: 'users',\n },\n\n databaseHooks: {\n user: {\n create: {\n // Runs once, immediately after the INSERT\n after: async (user) => {\n if (!user?.emailVerified) return;\n\n await sendEmail({\n type: 'welcome',\n to: user.email,\n username: user.name ?? user.email.split('@')[0],\n loginLink: `${process.env.CLIENT_URL}/auth/login`,\n locale: (user as any).lang,\n });\n logger.info('Welcome e‑mail delivered', {\n email: user.email,\n });\n },\n },\n },\n },\n\n hooks: {\n after: createAuthMiddleware(async (ctx) => {\n const { path, context } = ctx;\n\n const newUser = context.newSession?.user;\n const existingUser = context.session?.user;\n const user = newUser ?? existingUser;\n\n if (!user) return;\n\n if (['/verify-email'].includes(path)) {\n sendVerificationUpdate(user as unknown as User);\n logger.info('SSE verification update sent', {\n email: user.email,\n userId: user.id,\n });\n\n await sendEmail({\n type: 'welcome',\n to: user.email,\n username: user.name ?? user.email.split('@')[0],\n loginLink: `${process.env.CLIENT_URL}/auth/login`,\n locale: (user as any).lang,\n });\n logger.info('Welcome e‑mail delivered', {\n email: user.email,\n });\n }\n }),\n },\n\n advanced: {\n // 1️⃣ Change or drop the global prefix\n // cookiePrefix: \"intlayer\", // => intlayer.session_token\n cookiePrefix: 'intlayer', // => session_token (no prefix)\n\n // 2️⃣ Override just the session‑token cookie\n cookies: {\n session_token: {\n // name: 'intlayer_session_token', // final name depends on the prefix above\n // attributes: { sameSite: \"lax\", maxAge: 60 * 60 * 24 } // optional\n },\n },\n\n // 3️⃣ (optional) turn off the automatic __Secure‑ prefix in non‑prod\n // useSecureCookies: false,\n },\n\n session: {\n modelName: 'sessions',\n id: 'id',\n\n additionalFields: {\n activeOrganizationId: { type: 'string', nullable: true, input: false },\n activeProjectId: { type: 'string', nullable: true, input: false },\n },\n },\n\n plugins: [\n customSession(async ({ session }) => {\n const typedSession = session as unknown as SessionDataApi;\n\n let userAPI: UserAPI | null = null;\n let organizationAPI: OrganizationAPI | null = null;\n let projectAPI: ProjectAPI | null = null;\n\n if (typedSession.userId) {\n const userData = await getUserById(typedSession.userId);\n\n if (userData) {\n userAPI = mapUserToAPI(userData);\n }\n }\n\n if (typedSession.activeOrganizationId) {\n const orgData = await getOrganizationById(\n typedSession.activeOrganizationId\n );\n\n if (orgData) {\n organizationAPI = mapOrganizationToAPI(orgData);\n }\n }\n if (typedSession.activeProjectId) {\n const projectData = await getProjectById(\n typedSession.activeProjectId\n );\n\n if (projectData) {\n projectAPI = mapProjectToAPI(projectData);\n }\n }\n\n const sessionWithNoPermission: SessionContext = {\n session: typedSession,\n user: userAPI!,\n organization: organizationAPI ?? null,\n project: projectAPI ?? null,\n authType: 'session',\n };\n\n return formatSession(sessionWithNoPermission);\n }),\n ],\n\n emailAndPassword: {\n enabled: true,\n disableSignUp: false,\n requireEmailVerification: true,\n minPasswordLength: 8,\n maxPasswordLength: 128,\n autoSignIn: true,\n sendResetPassword: async ({ user, url }) => {\n logger.info('sending reset password email', { email: user.email });\n await sendEmail({\n type: 'resetPassword',\n to: user.email,\n username: user.name ?? user.email.split('@')[0],\n resetLink: url,\n });\n },\n resetPasswordTokenExpiresIn: 3600,\n },\n accountLinking: {\n enabled: true, // allow linking in general\n trustedProviders: ['google', 'github'], // optional: auto‑link when Google verifies the e‑mail\n },\n emailVerification: {\n autoSignInAfterVerification: true,\n sendVerificationEmail: async ({ user, url }) => {\n logger.info('sending verification email', { email: user.email });\n await sendEmail({\n type: 'validate',\n to: user.email,\n username: user.name ?? user.email.split('@')[0],\n validationLink: url,\n });\n },\n },\n\n crossSubDomainCookies: {\n enabled: true,\n additionalCookies: ['session_token'],\n domain: process.env.CLIENT_URL as string,\n },\n cookiePrefix: 'intlayer',\n cookies: {\n session_token: {\n name: 'session_token',\n attributes: {\n httpOnly: true,\n secure: true,\n },\n },\n },\n\n trustedOrigins: [process.env.CLIENT_URL as string],\n\n socialProviders: {\n google: {\n clientId: process.env.GOOGLE_CLIENT_ID as string,\n clientSecret: process.env.GOOGLE_CLIENT_SECRET as string,\n },\n github: {\n clientId: process.env.GITHUB_CLIENT_ID as string,\n clientSecret: process.env.GITHUB_CLIENT_SECRET as string,\n },\n },\n\n logger: {\n log: (level, message, ...args) => logger[level](message, ...args),\n },\n });\n\n return auth;\n};\n"],"mappings":"AAQA,SAAS,8BAA8B;AACvC,SAAS,cAAc;AACvB,SAAS,iBAAiB;AAC1B,SAAS,2BAA2B;AACpC,SAAS,sBAAsB;AAC/B,SAAS,mBAAmB;AAC5B,SAAS,4BAA4B;AACrC,SAAS,uBAAuB;AAChC,SAAS,oBAAoB;AAC7B;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,kBAA0B;AACnC,SAAS,sBAAsB;AAC/B,SAAS,4BAA4B;AACrC,SAAS,qBAAqB;AAKvB,MAAM,gBAAgB,CAAC,YAAgD;AAC5E,QAAM,QAAQ,gBAAgB,OAAO;AACrC,MAAI,cAAc,2BAA2B,KAAK;AAGlD,MAAI,QAAQ,aAAa;AACvB,kBAAc,qBAAqB,aAAa,QAAQ,WAAW;AAAA,EACrE;AAEA,QAAM,gBAAgB;AAAA,IACpB,MAAM,aAAa,QAAQ,IAAI;AAAA,IAC/B,cAAc,qBAAqB,QAAQ,YAAY;AAAA,IACvD,SAAS,gBAAgB,QAAQ,OAAO;AAAA,IACxC,UAAU;AAAA,IACV;AAAA,IACA;AAAA,EACF;AAEA,SAAO;AACT;AAEO,MAAM,UAAU,CAAC,aAAgC;AACtD,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,oCAAoC;AAAA,EACtD;AAEA,QAAM,OAAO,WAAW;AAAA,IACtB,SAAS;AAAA,IAET,UAAU,eAAe,SAAS,GAAG,CAAC;AAAA;AAAA;AAAA;AAAA,IAKtC,MAAM;AAAA,MACJ,WAAW;AAAA,IACb;AAAA,IAEA,eAAe;AAAA,MACb,MAAM;AAAA,QACJ,QAAQ;AAAA;AAAA,UAEN,OAAO,OAAO,SAAS;AACrB,gBAAI,CAAC,MAAM,cAAe;AAE1B,kBAAM,UAAU;AAAA,cACd,MAAM;AAAA,cACN,IAAI,KAAK;AAAA,cACT,UAAU,KAAK,QAAQ,KAAK,MAAM,MAAM,GAAG,EAAE,CAAC;AAAA,cAC9C,WAAW,GAAG,QAAQ,IAAI,UAAU;AAAA,cACpC,QAAS,KAAa;AAAA,YACxB,CAAC;AACD,mBAAO,KAAK,iCAA4B;AAAA,cACtC,OAAO,KAAK;AAAA,YACd,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,IAEA,OAAO;AAAA,MACL,OAAO,qBAAqB,OAAO,QAAQ;AACzC,cAAM,EAAE,MAAM,QAAQ,IAAI;AAE1B,cAAM,UAAU,QAAQ,YAAY;AACpC,cAAM,eAAe,QAAQ,SAAS;AACtC,cAAM,OAAO,WAAW;AAExB,YAAI,CAAC,KAAM;AAEX,YAAI,CAAC,eAAe,EAAE,SAAS,IAAI,GAAG;AACpC,iCAAuB,IAAuB;AAC9C,iBAAO,KAAK,gCAAgC;AAAA,YAC1C,OAAO,KAAK;AAAA,YACZ,QAAQ,KAAK;AAAA,UACf,CAAC;AAED,gBAAM,UAAU;AAAA,YACd,MAAM;AAAA,YACN,IAAI,KAAK;AAAA,YACT,UAAU,KAAK,QAAQ,KAAK,MAAM,MAAM,GAAG,EAAE,CAAC;AAAA,YAC9C,WAAW,GAAG,QAAQ,IAAI,UAAU;AAAA,YACpC,QAAS,KAAa;AAAA,UACxB,CAAC;AACD,iBAAO,KAAK,iCAA4B;AAAA,YACtC,OAAO,KAAK;AAAA,UACd,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IAEA,UAAU;AAAA;AAAA;AAAA,MAGR,cAAc;AAAA;AAAA;AAAA,MAGd,SAAS;AAAA,QACP,eAAe;AAAA;AAAA;AAAA,QAGf;AAAA,MACF;AAAA;AAAA;AAAA,IAIF;AAAA,IAEA,SAAS;AAAA,MACP,WAAW;AAAA,MACX,IAAI;AAAA,MAEJ,kBAAkB;AAAA,QAChB,sBAAsB,EAAE,MAAM,UAAU,UAAU,MAAM,OAAO,MAAM;AAAA,QACrE,iBAAiB,EAAE,MAAM,UAAU,UAAU,MAAM,OAAO,MAAM;AAAA,MAClE;AAAA,IACF;AAAA,IAEA,SAAS;AAAA,MACP,cAAc,OAAO,EAAE,QAAQ,MAAM;AACnC,cAAM,eAAe;AAErB,YAAI,UAA0B;AAC9B,YAAI,kBAA0C;AAC9C,YAAI,aAAgC;AAEpC,YAAI,aAAa,QAAQ;AACvB,gBAAM,WAAW,MAAM,YAAY,aAAa,MAAM;AAEtD,cAAI,UAAU;AACZ,sBAAU,aAAa,QAAQ;AAAA,UACjC;AAAA,QACF;AAEA,YAAI,aAAa,sBAAsB;AACrC,gBAAM,UAAU,MAAM;AAAA,YACpB,aAAa;AAAA,UACf;AAEA,cAAI,SAAS;AACX,8BAAkB,qBAAqB,OAAO;AAAA,UAChD;AAAA,QACF;AACA,YAAI,aAAa,iBAAiB;AAChC,gBAAM,cAAc,MAAM;AAAA,YACxB,aAAa;AAAA,UACf;AAEA,cAAI,aAAa;AACf,yBAAa,gBAAgB,WAAW;AAAA,UAC1C;AAAA,QACF;AAEA,cAAM,0BAA0C;AAAA,UAC9C,SAAS;AAAA,UACT,MAAM;AAAA,UACN,cAAc,mBAAmB;AAAA,UACjC,SAAS,cAAc;AAAA,UACvB,UAAU;AAAA,QACZ;AAEA,eAAO,cAAc,uBAAuB;AAAA,MAC9C,CAAC;AAAA,IACH;AAAA,IAEA,kBAAkB;AAAA,MAChB,SAAS;AAAA,MACT,eAAe;AAAA,MACf,0BAA0B;AAAA,MAC1B,mBAAmB;AAAA,MACnB,mBAAmB;AAAA,MACnB,YAAY;AAAA,MACZ,mBAAmB,OAAO,EAAE,MAAM,IAAI,MAAM;AAC1C,eAAO,KAAK,gCAAgC,EAAE,OAAO,KAAK,MAAM,CAAC;AACjE,cAAM,UAAU;AAAA,UACd,MAAM;AAAA,UACN,IAAI,KAAK;AAAA,UACT,UAAU,KAAK,QAAQ,KAAK,MAAM,MAAM,GAAG,EAAE,CAAC;AAAA,UAC9C,WAAW;AAAA,QACb,CAAC;AAAA,MACH;AAAA,MACA,6BAA6B;AAAA,IAC/B;AAAA,IACA,gBAAgB;AAAA,MACd,SAAS;AAAA;AAAA,MACT,kBAAkB,CAAC,UAAU,QAAQ;AAAA;AAAA,IACvC;AAAA,IACA,mBAAmB;AAAA,MACjB,6BAA6B;AAAA,MAC7B,uBAAuB,OAAO,EAAE,MAAM,IAAI,MAAM;AAC9C,eAAO,KAAK,8BAA8B,EAAE,OAAO,KAAK,MAAM,CAAC;AAC/D,cAAM,UAAU;AAAA,UACd,MAAM;AAAA,UACN,IAAI,KAAK;AAAA,UACT,UAAU,KAAK,QAAQ,KAAK,MAAM,MAAM,GAAG,EAAE,CAAC;AAAA,UAC9C,gBAAgB;AAAA,QAClB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,IAEA,uBAAuB;AAAA,MACrB,SAAS;AAAA,MACT,mBAAmB,CAAC,eAAe;AAAA,MACnC,QAAQ,QAAQ,IAAI;AAAA,IACtB;AAAA,IACA,cAAc;AAAA,IACd,SAAS;AAAA,MACP,eAAe;AAAA,QACb,MAAM;AAAA,QACN,YAAY;AAAA,UACV,UAAU;AAAA,UACV,QAAQ;AAAA,QACV;AAAA,MACF;AAAA,IACF;AAAA,IAEA,gBAAgB,CAAC,QAAQ,IAAI,UAAoB;AAAA,IAEjD,iBAAiB;AAAA,MACf,QAAQ;AAAA,QACN,UAAU,QAAQ,IAAI;AAAA,QACtB,cAAc,QAAQ,IAAI;AAAA,MAC5B;AAAA,MACA,QAAQ;AAAA,QACN,UAAU,QAAQ,IAAI;AAAA,QACtB,cAAc,QAAQ,IAAI;AAAA,MAC5B;AAAA,IACF;AAAA,IAEA,QAAQ;AAAA,MACN,KAAK,CAAC,OAAO,YAAY,SAAS,OAAO,KAAK,EAAE,SAAS,GAAG,IAAI;AAAA,IAClE;AAAA,EACF,CAAC;AAED,SAAO;AACT;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../../../../src/utils/auth/getAuth.ts"],"sourcesContent":["import type { OrganizationAPI } from '@/types/organization.types';\nimport type { ProjectAPI } from '@/types/project.types';\nimport type {\n Session,\n SessionContext,\n SessionDataApi,\n} from '@/types/session.types';\nimport type { User, UserAPI } from '@/types/user.types';\nimport { sendVerificationUpdate } from '@controllers/user.controller';\nimport { logger } from '@logger';\nimport { sendEmail } from '@services/email.service';\nimport { getOrganizationById } from '@services/organization.service';\nimport { getProjectById } from '@services/project.service';\nimport { getUserById } from '@services/user.service';\nimport { mapOrganizationToAPI } from '@utils/mapper/organization';\nimport { mapProjectToAPI } from '@utils/mapper/project';\nimport { mapSessionToAPI } from '@utils/mapper/session';\nimport { mapUserToAPI } from '@utils/mapper/user';\nimport {\n computeEffectivePermission,\n getSessionRoles,\n intersectPermissions,\n} from '@utils/permissions';\nimport { betterAuth, OmitId } from 'better-auth';\nimport { mongodbAdapter } from 'better-auth/adapters/mongodb';\nimport { createAuthMiddleware } from 'better-auth/api';\nimport { customSession } from 'better-auth/plugins';\nimport type { MongoClient } from 'mongodb';\n\nexport type Auth = ReturnType<typeof betterAuth>;\n\nexport const formatSession = (session: SessionContext): OmitId<Session> => {\n const roles = getSessionRoles(session);\n let permissions = computeEffectivePermission(roles);\n\n // Intersect in the case a Access Token try to override the permissions\n if (session.permissions) {\n permissions = intersectPermissions(permissions, session.permissions);\n }\n\n const resultSession = {\n session: session.session,\n user: session.user,\n organization: session.organization,\n project: session.project,\n authType: 'session',\n permissions,\n roles,\n } as OmitId<Session>;\n\n return resultSession;\n};\n\nexport const getAuth = (dbClient: MongoClient): Auth => {\n if (!dbClient) {\n throw new Error('MongoDB connection not established');\n }\n\n const auth = betterAuth({\n appName: 'Intlayer',\n\n database: mongodbAdapter(dbClient.db()),\n\n /**\n * User model\n */\n user: {\n modelName: 'users',\n },\n\n databaseHooks: {\n user: {\n create: {\n // Runs once, immediately after the INSERT\n after: async (user) => {\n if (!user?.emailVerified) return;\n\n await sendEmail({\n type: 'welcome',\n to: user.email,\n username: user.name ?? user.email.split('@')[0],\n loginLink: `${process.env.CLIENT_URL}/auth/login`,\n locale: (user as any).lang,\n });\n logger.info('Welcome e‑mail delivered', {\n email: user.email,\n });\n },\n },\n },\n },\n\n hooks: {\n after: createAuthMiddleware(async (ctx) => {\n const { path, context, error } = ctx;\n\n const newUser = context.newSession?.user;\n const existingUser = context.session?.user;\n const user = newUser ?? existingUser;\n\n if (!user) return;\n\n if (['/verify-email'].includes(path)) {\n sendVerificationUpdate(user as unknown as User);\n logger.info('SSE verification update sent', {\n email: user.email,\n userId: user.id,\n });\n\n await sendEmail({\n type: 'welcome',\n to: user.email,\n username: user.name ?? user.email.split('@')[0],\n loginLink: `${process.env.CLIENT_URL}/auth/login`,\n locale: (user as any).lang,\n });\n logger.info('Welcome e‑mail delivered', {\n email: user.email,\n });\n }\n }),\n },\n\n advanced: {\n // 1️⃣ Change or drop the global prefix\n // cookiePrefix: \"intlayer\", // => intlayer.session_token\n cookiePrefix: 'intlayer', // => session_token (no prefix)\n\n // 2️⃣ Override just the session‑token cookie\n cookies: {\n session_token: {\n // name: 'intlayer_session_token', // final name depends on the prefix above\n // attributes: { sameSite: \"lax\", maxAge: 60 * 60 * 24 } // optional\n },\n },\n\n // 3️⃣ (optional) turn off the automatic __Secure‑ prefix in non‑prod\n // useSecureCookies: false,\n },\n\n session: {\n modelName: 'sessions',\n id: 'id',\n\n additionalFields: {\n activeOrganizationId: { type: 'string', nullable: true, input: false },\n activeProjectId: { type: 'string', nullable: true, input: false },\n },\n },\n\n plugins: [\n customSession(async ({ session }) => {\n const typedSession = session as unknown as SessionDataApi;\n\n let userAPI: UserAPI | null = null;\n let organizationAPI: OrganizationAPI | null = null;\n let projectAPI: ProjectAPI | null = null;\n\n if (typedSession.userId) {\n const userData = await getUserById(typedSession.userId);\n\n if (userData) {\n userAPI = mapUserToAPI(userData);\n }\n }\n\n if (typedSession.activeOrganizationId) {\n const orgData = await getOrganizationById(\n typedSession.activeOrganizationId\n );\n\n if (orgData) {\n organizationAPI = mapOrganizationToAPI(orgData);\n }\n }\n if (typedSession.activeProjectId) {\n const projectData = await getProjectById(\n typedSession.activeProjectId\n );\n\n if (projectData) {\n projectAPI = mapProjectToAPI(projectData);\n }\n }\n\n const sessionWithNoPermission: SessionContext = {\n session: typedSession,\n user: userAPI!,\n organization: organizationAPI ?? null,\n project: projectAPI ?? null,\n authType: 'session',\n };\n\n const formattedSession = formatSession(sessionWithNoPermission);\n\n return mapSessionToAPI(formattedSession);\n }),\n ],\n\n emailAndPassword: {\n enabled: true,\n disableSignUp: false,\n requireEmailVerification: true,\n minPasswordLength: 8,\n maxPasswordLength: 128,\n autoSignIn: true,\n sendResetPassword: async ({ user, url }) => {\n logger.info('sending reset password email', { email: user.email });\n await sendEmail({\n type: 'resetPassword',\n to: user.email,\n username: user.name ?? user.email.split('@')[0],\n resetLink: url,\n });\n },\n resetPasswordTokenExpiresIn: 3600,\n },\n accountLinking: {\n enabled: true, // allow linking in general\n trustedProviders: ['google', 'github'], // optional: auto‑link when Google verifies the e‑mail\n },\n emailVerification: {\n autoSignInAfterVerification: true,\n sendOnSignIn: true,\n sendVerificationEmail: async ({ user, url }) => {\n logger.info('sending verification email', { email: user.email });\n await sendEmail({\n type: 'validate',\n to: user.email,\n username: user.name ?? user.email.split('@')[0],\n validationLink: url,\n });\n },\n },\n\n crossSubDomainCookies: {\n enabled: true,\n additionalCookies: ['session_token'],\n domain: process.env.CLIENT_URL as string,\n },\n cookiePrefix: 'intlayer',\n cookies: {\n session_token: {\n name: 'session_token',\n attributes: {\n httpOnly: true,\n secure: true,\n },\n },\n },\n\n trustedOrigins: [process.env.CLIENT_URL as string],\n\n socialProviders: {\n google: {\n clientId: process.env.GOOGLE_CLIENT_ID as string,\n clientSecret: process.env.GOOGLE_CLIENT_SECRET as string,\n },\n github: {\n clientId: process.env.GITHUB_CLIENT_ID as string,\n clientSecret: process.env.GITHUB_CLIENT_SECRET as string,\n },\n },\n\n logger: {\n log: (level, message, ...args) => logger[level](message, ...args),\n },\n });\n\n return auth;\n};\n"],"mappings":"AAQA,SAAS,8BAA8B;AACvC,SAAS,cAAc;AACvB,SAAS,iBAAiB;AAC1B,SAAS,2BAA2B;AACpC,SAAS,sBAAsB;AAC/B,SAAS,mBAAmB;AAC5B,SAAS,4BAA4B;AACrC,SAAS,uBAAuB;AAChC,SAAS,uBAAuB;AAChC,SAAS,oBAAoB;AAC7B;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,kBAA0B;AACnC,SAAS,sBAAsB;AAC/B,SAAS,4BAA4B;AACrC,SAAS,qBAAqB;AAKvB,MAAM,gBAAgB,CAAC,YAA6C;AACzE,QAAM,QAAQ,gBAAgB,OAAO;AACrC,MAAI,cAAc,2BAA2B,KAAK;AAGlD,MAAI,QAAQ,aAAa;AACvB,kBAAc,qBAAqB,aAAa,QAAQ,WAAW;AAAA,EACrE;AAEA,QAAM,gBAAgB;AAAA,IACpB,SAAS,QAAQ;AAAA,IACjB,MAAM,QAAQ;AAAA,IACd,cAAc,QAAQ;AAAA,IACtB,SAAS,QAAQ;AAAA,IACjB,UAAU;AAAA,IACV;AAAA,IACA;AAAA,EACF;AAEA,SAAO;AACT;AAEO,MAAM,UAAU,CAAC,aAAgC;AACtD,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,oCAAoC;AAAA,EACtD;AAEA,QAAM,OAAO,WAAW;AAAA,IACtB,SAAS;AAAA,IAET,UAAU,eAAe,SAAS,GAAG,CAAC;AAAA;AAAA;AAAA;AAAA,IAKtC,MAAM;AAAA,MACJ,WAAW;AAAA,IACb;AAAA,IAEA,eAAe;AAAA,MACb,MAAM;AAAA,QACJ,QAAQ;AAAA;AAAA,UAEN,OAAO,OAAO,SAAS;AACrB,gBAAI,CAAC,MAAM,cAAe;AAE1B,kBAAM,UAAU;AAAA,cACd,MAAM;AAAA,cACN,IAAI,KAAK;AAAA,cACT,UAAU,KAAK,QAAQ,KAAK,MAAM,MAAM,GAAG,EAAE,CAAC;AAAA,cAC9C,WAAW,GAAG,QAAQ,IAAI,UAAU;AAAA,cACpC,QAAS,KAAa;AAAA,YACxB,CAAC;AACD,mBAAO,KAAK,iCAA4B;AAAA,cACtC,OAAO,KAAK;AAAA,YACd,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,IAEA,OAAO;AAAA,MACL,OAAO,qBAAqB,OAAO,QAAQ;AACzC,cAAM,EAAE,MAAM,SAAS,MAAM,IAAI;AAEjC,cAAM,UAAU,QAAQ,YAAY;AACpC,cAAM,eAAe,QAAQ,SAAS;AACtC,cAAM,OAAO,WAAW;AAExB,YAAI,CAAC,KAAM;AAEX,YAAI,CAAC,eAAe,EAAE,SAAS,IAAI,GAAG;AACpC,iCAAuB,IAAuB;AAC9C,iBAAO,KAAK,gCAAgC;AAAA,YAC1C,OAAO,KAAK;AAAA,YACZ,QAAQ,KAAK;AAAA,UACf,CAAC;AAED,gBAAM,UAAU;AAAA,YACd,MAAM;AAAA,YACN,IAAI,KAAK;AAAA,YACT,UAAU,KAAK,QAAQ,KAAK,MAAM,MAAM,GAAG,EAAE,CAAC;AAAA,YAC9C,WAAW,GAAG,QAAQ,IAAI,UAAU;AAAA,YACpC,QAAS,KAAa;AAAA,UACxB,CAAC;AACD,iBAAO,KAAK,iCAA4B;AAAA,YACtC,OAAO,KAAK;AAAA,UACd,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IAEA,UAAU;AAAA;AAAA;AAAA,MAGR,cAAc;AAAA;AAAA;AAAA,MAGd,SAAS;AAAA,QACP,eAAe;AAAA;AAAA;AAAA,QAGf;AAAA,MACF;AAAA;AAAA;AAAA,IAIF;AAAA,IAEA,SAAS;AAAA,MACP,WAAW;AAAA,MACX,IAAI;AAAA,MAEJ,kBAAkB;AAAA,QAChB,sBAAsB,EAAE,MAAM,UAAU,UAAU,MAAM,OAAO,MAAM;AAAA,QACrE,iBAAiB,EAAE,MAAM,UAAU,UAAU,MAAM,OAAO,MAAM;AAAA,MAClE;AAAA,IACF;AAAA,IAEA,SAAS;AAAA,MACP,cAAc,OAAO,EAAE,QAAQ,MAAM;AACnC,cAAM,eAAe;AAErB,YAAI,UAA0B;AAC9B,YAAI,kBAA0C;AAC9C,YAAI,aAAgC;AAEpC,YAAI,aAAa,QAAQ;AACvB,gBAAM,WAAW,MAAM,YAAY,aAAa,MAAM;AAEtD,cAAI,UAAU;AACZ,sBAAU,aAAa,QAAQ;AAAA,UACjC;AAAA,QACF;AAEA,YAAI,aAAa,sBAAsB;AACrC,gBAAM,UAAU,MAAM;AAAA,YACpB,aAAa;AAAA,UACf;AAEA,cAAI,SAAS;AACX,8BAAkB,qBAAqB,OAAO;AAAA,UAChD;AAAA,QACF;AACA,YAAI,aAAa,iBAAiB;AAChC,gBAAM,cAAc,MAAM;AAAA,YACxB,aAAa;AAAA,UACf;AAEA,cAAI,aAAa;AACf,yBAAa,gBAAgB,WAAW;AAAA,UAC1C;AAAA,QACF;AAEA,cAAM,0BAA0C;AAAA,UAC9C,SAAS;AAAA,UACT,MAAM;AAAA,UACN,cAAc,mBAAmB;AAAA,UACjC,SAAS,cAAc;AAAA,UACvB,UAAU;AAAA,QACZ;AAEA,cAAM,mBAAmB,cAAc,uBAAuB;AAE9D,eAAO,gBAAgB,gBAAgB;AAAA,MACzC,CAAC;AAAA,IACH;AAAA,IAEA,kBAAkB;AAAA,MAChB,SAAS;AAAA,MACT,eAAe;AAAA,MACf,0BAA0B;AAAA,MAC1B,mBAAmB;AAAA,MACnB,mBAAmB;AAAA,MACnB,YAAY;AAAA,MACZ,mBAAmB,OAAO,EAAE,MAAM,IAAI,MAAM;AAC1C,eAAO,KAAK,gCAAgC,EAAE,OAAO,KAAK,MAAM,CAAC;AACjE,cAAM,UAAU;AAAA,UACd,MAAM;AAAA,UACN,IAAI,KAAK;AAAA,UACT,UAAU,KAAK,QAAQ,KAAK,MAAM,MAAM,GAAG,EAAE,CAAC;AAAA,UAC9C,WAAW;AAAA,QACb,CAAC;AAAA,MACH;AAAA,MACA,6BAA6B;AAAA,IAC/B;AAAA,IACA,gBAAgB;AAAA,MACd,SAAS;AAAA;AAAA,MACT,kBAAkB,CAAC,UAAU,QAAQ;AAAA;AAAA,IACvC;AAAA,IACA,mBAAmB;AAAA,MACjB,6BAA6B;AAAA,MAC7B,cAAc;AAAA,MACd,uBAAuB,OAAO,EAAE,MAAM,IAAI,MAAM;AAC9C,eAAO,KAAK,8BAA8B,EAAE,OAAO,KAAK,MAAM,CAAC;AAC/D,cAAM,UAAU;AAAA,UACd,MAAM;AAAA,UACN,IAAI,KAAK;AAAA,UACT,UAAU,KAAK,QAAQ,KAAK,MAAM,MAAM,GAAG,EAAE,CAAC;AAAA,UAC9C,gBAAgB;AAAA,QAClB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,IAEA,uBAAuB;AAAA,MACrB,SAAS;AAAA,MACT,mBAAmB,CAAC,eAAe;AAAA,MACnC,QAAQ,QAAQ,IAAI;AAAA,IACtB;AAAA,IACA,cAAc;AAAA,IACd,SAAS;AAAA,MACP,eAAe;AAAA,QACb,MAAM;AAAA,QACN,YAAY;AAAA,UACV,UAAU;AAAA,UACV,QAAQ;AAAA,QACV;AAAA,MACF;AAAA,IACF;AAAA,IAEA,gBAAgB,CAAC,QAAQ,IAAI,UAAoB;AAAA,IAEjD,iBAAiB;AAAA,MACf,QAAQ;AAAA,QACN,UAAU,QAAQ,IAAI;AAAA,QACtB,cAAc,QAAQ,IAAI;AAAA,MAC5B;AAAA,MACA,QAAQ;AAAA,QACN,UAAU,QAAQ,IAAI;AAAA,QACtB,cAAc,QAAQ,IAAI;AAAA,MAC5B;AAAA,IACF;AAAA,IAEA,QAAQ;AAAA,MACN,KAAK,CAAC,OAAO,YAAY,SAAS,OAAO,KAAK,EAAE,SAAS,GAAG,IAAI;AAAA,IAClE;AAAA,EACF,CAAC;AAED,SAAO;AACT;","names":[]}
|
|
@@ -1364,6 +1364,19 @@ const errorData = {
|
|
|
1364
1364
|
es: "No tienes permisos para acceder a este recurso."
|
|
1365
1365
|
},
|
|
1366
1366
|
statusCode: HttpStatusCodes.FORBIDDEN_403
|
|
1367
|
+
},
|
|
1368
|
+
INVALID_REQUEST_BODY: {
|
|
1369
|
+
title: {
|
|
1370
|
+
en: "Invalid Request Body",
|
|
1371
|
+
fr: "Corps de requ\xEAte invalide",
|
|
1372
|
+
es: "Cuerpo de solicitud inv\xE1lido"
|
|
1373
|
+
},
|
|
1374
|
+
message: {
|
|
1375
|
+
en: "The request body is invalid.",
|
|
1376
|
+
fr: "Le corps de la requ\xEAte est invalide.",
|
|
1377
|
+
es: "El cuerpo de la solicitud es inv\xE1lido."
|
|
1378
|
+
},
|
|
1379
|
+
statusCode: HttpStatusCodes.BAD_REQUEST_400
|
|
1367
1380
|
}
|
|
1368
1381
|
};
|
|
1369
1382
|
export {
|