@intlayer/backend 5.5.1 → 5.5.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/controllers/ai.controller.cjs.map +1 -1
- package/dist/cjs/controllers/search.controller.cjs +54 -0
- package/dist/cjs/controllers/search.controller.cjs.map +1 -0
- package/dist/cjs/export.cjs.map +1 -1
- package/dist/cjs/index.cjs +6 -4
- package/dist/cjs/index.cjs.map +1 -1
- package/dist/cjs/routes/search.routes.cjs +42 -0
- package/dist/cjs/routes/search.routes.cjs.map +1 -0
- package/dist/cjs/utils/AI/aiSdk.cjs.map +1 -1
- package/dist/cjs/utils/AI/askDocQuestion/askDocQuestion.cjs +2 -2
- package/dist/cjs/utils/AI/askDocQuestion/askDocQuestion.cjs.map +1 -1
- package/dist/cjs/utils/AI/translateJSON/PROMPT.md +1 -1
- package/dist/cjs/utils/AI/translateJSON/index.cjs +2 -2
- package/dist/cjs/utils/AI/translateJSON/index.cjs.map +1 -1
- package/dist/esm/controllers/ai.controller.mjs.map +1 -1
- package/dist/esm/controllers/search.controller.mjs +20 -0
- package/dist/esm/controllers/search.controller.mjs.map +1 -0
- package/dist/esm/export.mjs.map +1 -1
- package/dist/esm/index.mjs +8 -6
- package/dist/esm/index.mjs.map +1 -1
- package/dist/esm/routes/search.routes.mjs +17 -0
- package/dist/esm/routes/search.routes.mjs.map +1 -0
- package/dist/esm/utils/AI/aiSdk.mjs.map +1 -1
- package/dist/esm/utils/AI/askDocQuestion/askDocQuestion.mjs +6 -3
- package/dist/esm/utils/AI/askDocQuestion/askDocQuestion.mjs.map +1 -1
- package/dist/esm/utils/AI/translateJSON/PROMPT.md +1 -1
- package/dist/esm/utils/AI/translateJSON/index.mjs +2 -2
- package/dist/esm/utils/AI/translateJSON/index.mjs.map +1 -1
- package/dist/types/controllers/ai.controller.d.ts +2 -2
- package/dist/types/controllers/ai.controller.d.ts.map +1 -1
- package/dist/types/controllers/search.controller.d.ts +9 -0
- package/dist/types/controllers/search.controller.d.ts.map +1 -0
- package/dist/types/export.d.ts +1 -0
- package/dist/types/export.d.ts.map +1 -1
- package/dist/types/index.d.ts.map +1 -1
- package/dist/types/routes/search.routes.d.ts +10 -0
- package/dist/types/routes/search.routes.d.ts.map +1 -0
- package/dist/types/utils/AI/aiSdk.d.ts +5 -0
- package/dist/types/utils/AI/aiSdk.d.ts.map +1 -1
- package/dist/types/utils/AI/askDocQuestion/askDocQuestion.d.ts +2 -6
- package/dist/types/utils/AI/askDocQuestion/askDocQuestion.d.ts.map +1 -1
- package/package.json +8 -8
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/controllers/ai.controller.ts"],"sourcesContent":["import { DiscussionModel } from '@/models/discussion.model';\nimport type { Dictionary } from '@/types/dictionary.types';\nimport type { Tag } from '@/types/tag.types';\nimport { type KeyPath } from '@intlayer/core';\nimport type { ResponseWithInformation } from '@middlewares/sessionAuth.middleware';\nimport { getDictionariesByTags } from '@services/dictionary.service';\nimport * as tagService from '@services/tag.service';\nimport { getTagsByKeys } from '@services/tag.service';\nimport { AIOptions } from '@utils/AI/aiSdk';\nimport * as askDocQuestionUtil from '@utils/AI/askDocQuestion/askDocQuestion';\nimport * as auditContentDeclarationUtil from '@utils/AI/auditDictionary';\nimport * as auditContentDeclarationFieldUtil from '@utils/AI/auditDictionaryField';\nimport * as auditContentDeclarationMetadataUtil from '@utils/AI/auditDictionaryMetadata';\nimport * as auditTagUtil from '@utils/AI/auditTag';\nimport * as autocompleteUtil from '@utils/AI/autocomplete';\nimport * as translateJSONUtil from '@utils/AI/translateJSON';\nimport { type AppError, ErrorHandler } from '@utils/errors';\nimport { formatResponse, type ResponseData } from '@utils/responseData';\nimport type { NextFunction, Request } from 'express';\nimport type { Locales } from 'intlayer';\n\nexport type TranslateJSONBody = Omit<\n translateJSONUtil.TranslateJSONOptions,\n 'tags'\n> & {\n tagsKeys?: string[];\n};\nexport type TranslateJSONResult =\n ResponseData<translateJSONUtil.TranslateJSONResultData>;\n\nexport const translateJSON = async (\n req: Request<AuditContentDeclarationBody>,\n res: ResponseWithInformation<TranslateJSONResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, project, organization } = res.locals;\n const { aiOptions, tagsKeys, ...rest } = req.body;\n\n // Check if any API key is present\n const hasApiKey = Boolean(aiOptions?.apiKey);\n\n if (!hasApiKey) {\n if (!user || !project || !organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse = await translateJSONUtil.translateJSON({\n ...rest,\n aiOptions,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<translateJSONUtil.TranslateJSONResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationBody = {\n aiOptions?: AIOptions;\n locales: Locales[];\n defaultLocale: Locales;\n fileContent: string;\n filePath?: string;\n tagsKeys?: string[];\n};\nexport type AuditContentDeclarationResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclaration = async (\n req: Request<AuditContentDeclarationBody>,\n res: ResponseWithInformation<AuditContentDeclarationResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, project, organization } = res.locals;\n const { fileContent, filePath, aiOptions, locales, defaultLocale, tagsKeys } =\n req.body;\n\n // Check if any API key is present\n const hasApiKey = Boolean(aiOptions?.apiKey);\n\n if (!hasApiKey) {\n if (!user || !project || !organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse = await auditContentDeclarationUtil.auditDictionary({\n fileContent,\n filePath,\n aiOptions,\n locales,\n defaultLocale,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationFieldBody = {\n aiOptions?: AIOptions;\n locales: Locales[];\n fileContent: string;\n filePath?: string;\n tagsKeys?: string[];\n keyPath: KeyPath[];\n};\nexport type AuditContentDeclarationFieldResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclarationField = async (\n req: Request<AuditContentDeclarationFieldBody>,\n res: ResponseWithInformation<AuditContentDeclarationFieldResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, project, organization } = res.locals;\n const { fileContent, aiOptions, locales, tagsKeys, keyPath } = req.body;\n\n // Check if any API key is present\n const hasApiKey = Boolean(aiOptions?.apiKey);\n\n if (!hasApiKey) {\n if (!user || !project || !organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse =\n await auditContentDeclarationFieldUtil.auditDictionaryField({\n fileContent,\n aiOptions,\n locales,\n tags,\n keyPath,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationMetadataBody = {\n aiOptions?: AIOptions;\n fileContent: string;\n};\nexport type AuditContentDeclarationMetadataResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclarationMetadata = async (\n req: Request<AuditContentDeclarationMetadataBody>,\n res: ResponseWithInformation<AuditContentDeclarationMetadataResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, organization, project } = res.locals;\n const { fileContent, aiOptions } = req.body;\n\n // Check if any API key is present\n const hasApiKey = Boolean(aiOptions?.apiKey);\n\n if (!hasApiKey) {\n if (!user || !project || !organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n }\n\n try {\n const tags: Tag[] = await tagService.findTags(\n {\n organizationId: organization?._id,\n },\n 0,\n 1000\n );\n\n const auditResponse =\n await auditContentDeclarationMetadataUtil.auditDictionaryMetadata({\n fileContent,\n aiOptions,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditTagBody = {\n aiOptions?: AIOptions;\n tag: Tag;\n};\nexport type AuditTagResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditTag = async (\n req: Request<undefined, undefined, AuditTagBody>,\n res: ResponseWithInformation<AuditTagResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, project, organization } = res.locals;\n const { aiOptions, tag } = req.body;\n\n // Check if any API key is present\n const hasApiKey = Boolean(aiOptions?.apiKey);\n\n if (!hasApiKey) {\n if (!user || !project || !organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n }\n\n try {\n let dictionaries: Dictionary[] = [];\n if (project?.organizationId) {\n dictionaries = await getDictionariesByTags([tag.key], project._id);\n }\n\n const auditResponse = await auditTagUtil.auditTag({\n aiOptions,\n dictionaries,\n tag,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AskDocQuestionBody = {\n messages: askDocQuestionUtil.ChatCompletionRequestMessage[];\n discutionId: string;\n};\nexport type AskDocQuestionResult =\n ResponseData<askDocQuestionUtil.AskDocQuestionResult>;\n\nexport const askDocQuestion = async (\n req: Request<undefined, undefined, AskDocQuestionBody>,\n res: ResponseWithInformation<AskDocQuestionResult>\n) => {\n const { messages, discutionId } = req.body;\n const { user, project, organization } = res.locals;\n\n // 1. Prepare SSE headers and flush them NOW\n res.setHeader('Content-Type', 'text/event-stream; charset=utf-8');\n res.setHeader('Cache-Control', 'no-cache, no-transform');\n res.setHeader('Connection', 'keep-alive');\n res.setHeader('X-Accel-Buffering', 'no'); // disable nginx buffering\n res.flushHeaders?.();\n res.write(': connected\\n\\n'); // initial comment keeps some browsers happy\n res.flush?.();\n\n // 2. Kick off the upstream stream WITHOUT awaiting it\n askDocQuestionUtil\n .askDocQuestion(messages, {\n onMessage: (chunk) => {\n res.write(`data: ${JSON.stringify({ chunk })}\\n\\n`);\n res.flush?.();\n },\n })\n .then(async (fullResponse) => {\n const lastUserMessageContent = messages.findLast(\n (msg) => msg.role === 'user'\n )?.content;\n const lastUserMessageNbWords = lastUserMessageContent\n ? lastUserMessageContent.split(' ').length\n : 0;\n if (lastUserMessageNbWords > 2) {\n // If the last user message is less than 3 words, don't persist the discussion\n // Example: \"Hello\", \"Hi\", \"Hey\", \"test\", etc.\n\n // 3. Persist discussion while the client already has all chunks\n await DiscussionModel.findOneAndUpdate(\n { discutionId },\n {\n $set: {\n discutionId,\n userId: user?._id,\n projectId: project?._id,\n organizationId: organization?._id,\n messages: [\n ...messages.map((msg) => ({\n role: msg.role,\n content: msg.content,\n timestamp: msg.timestamp,\n })),\n {\n role: 'assistant',\n content: fullResponse.response,\n relatedFiles: fullResponse.relatedFiles,\n timestamp: new Date(),\n },\n ],\n },\n },\n { upsert: true, new: true }\n );\n }\n\n // 4. Tell the client we're done and close the stream\n res.write(\n `data: ${JSON.stringify({ done: true, response: fullResponse })}\\n\\n`\n );\n res.end();\n })\n .catch((err) => {\n // propagate error as an SSE event so the client knows why it closed\n res.write(\n `event: error\\ndata: ${JSON.stringify({ message: err.message })}\\n\\n`\n );\n res.end();\n });\n};\n\nexport type AutocompleteBody = {\n text: string;\n aiOptions?: AIOptions;\n};\n\nexport type AutocompleteResponse = ResponseData<{\n autocompletion: string;\n}>;\n\nexport const autocomplete = async (\n req: Request<undefined, undefined, AutocompleteBody>,\n res: ResponseWithInformation<AutocompleteResponse>\n) => {\n try {\n const { text, aiOptions } = req.body;\n const { user, project, organization } = res.locals;\n\n // Check if any API key is present\n const hasApiKey = Boolean(aiOptions?.apiKey);\n\n if (!hasApiKey) {\n if (!user || !project || !organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n }\n\n const response = (await autocompleteUtil.autocomplete({\n text,\n aiOptions,\n })) ?? {\n autocompletion: '',\n tokenUsed: 0,\n };\n\n const responseData =\n formatResponse<autocompleteUtil.AutocompleteFileResultData>({\n data: response,\n });\n\n res.json(responseData);\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAAgC;AAKhC,wBAAsC;AACtC,iBAA4B;AAC5B,iBAA8B;AAE9B,yBAAoC;AACpC,kCAA6C;AAC7C,uCAAkD;AAClD,0CAAqD;AACrD,mBAA8B;AAC9B,uBAAkC;AAClC,wBAAmC;AACnC,oBAA4C;AAC5C,0BAAkD;AAa3C,MAAM,gBAAgB,OAC3B,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,SAAS,aAAa,IAAI,IAAI;AAC5C,QAAM,EAAE,WAAW,UAAU,GAAG,KAAK,IAAI,IAAI;AAG7C,QAAM,YAAY,QAAQ,WAAW,MAAM;AAE3C,MAAI,CAAC,WAAW;AACd,QAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,cAAc;AACtC,iCAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,QAAI,OAAc,CAAC;AAEnB,QAAI,SAAS,gBAAgB;AAC3B,aAAO,UAAM,0BAAc,UAAU,QAAQ,cAAc;AAAA,IAC7D;AAEA,UAAM,gBAAgB,MAAM,kBAAkB,cAAc;AAAA,MAC1D,GAAG;AAAA,MACH;AAAA,MACA;AAAA,IACF,CAAC;AAED,QAAI,CAAC,eAAe;AAClB,iCAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,mBACJ,oCAA0D;AAAA,MACxD,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,+BAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AAgBO,MAAM,0BAA0B,OACrC,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,SAAS,aAAa,IAAI,IAAI;AAC5C,QAAM,EAAE,aAAa,UAAU,WAAW,SAAS,eAAe,SAAS,IACzE,IAAI;AAGN,QAAM,YAAY,QAAQ,WAAW,MAAM;AAE3C,MAAI,CAAC,WAAW;AACd,QAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,cAAc;AACtC,iCAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,QAAI,OAAc,CAAC;AAEnB,QAAI,SAAS,gBAAgB;AAC3B,aAAO,UAAM,0BAAc,UAAU,QAAQ,cAAc;AAAA,IAC7D;AAEA,UAAM,gBAAgB,MAAM,4BAA4B,gBAAgB;AAAA,MACtE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,QAAI,CAAC,eAAe;AAClB,iCAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,mBACJ,oCAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,+BAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AAgBO,MAAM,+BAA+B,OAC1C,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,SAAS,aAAa,IAAI,IAAI;AAC5C,QAAM,EAAE,aAAa,WAAW,SAAS,UAAU,QAAQ,IAAI,IAAI;AAGnE,QAAM,YAAY,QAAQ,WAAW,MAAM;AAE3C,MAAI,CAAC,WAAW;AACd,QAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,cAAc;AACtC,iCAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,QAAI,OAAc,CAAC;AAEnB,QAAI,SAAS,gBAAgB;AAC3B,aAAO,UAAM,0BAAc,UAAU,QAAQ,cAAc;AAAA,IAC7D;AAEA,UAAM,gBACJ,MAAM,iCAAiC,qBAAqB;AAAA,MAC1D;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAEH,QAAI,CAAC,eAAe;AAClB,iCAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,mBACJ,oCAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,+BAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AAYO,MAAM,kCAAkC,OAC7C,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,cAAc,QAAQ,IAAI,IAAI;AAC5C,QAAM,EAAE,aAAa,UAAU,IAAI,IAAI;AAGvC,QAAM,YAAY,QAAQ,WAAW,MAAM;AAE3C,MAAI,CAAC,WAAW;AACd,QAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,cAAc;AACtC,iCAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,UAAM,OAAc,MAAM,WAAW;AAAA,MACnC;AAAA,QACE,gBAAgB,cAAc;AAAA,MAChC;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,UAAM,gBACJ,MAAM,oCAAoC,wBAAwB;AAAA,MAChE;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAEH,QAAI,CAAC,eAAe;AAClB,iCAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,mBACJ,oCAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,+BAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AAYO,MAAM,WAAW,OACtB,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,SAAS,aAAa,IAAI,IAAI;AAC5C,QAAM,EAAE,WAAW,IAAI,IAAI,IAAI;AAG/B,QAAM,YAAY,QAAQ,WAAW,MAAM;AAE3C,MAAI,CAAC,WAAW;AACd,QAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,cAAc;AACtC,iCAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,QAAI,eAA6B,CAAC;AAClC,QAAI,SAAS,gBAAgB;AAC3B,qBAAe,UAAM,yCAAsB,CAAC,IAAI,GAAG,GAAG,QAAQ,GAAG;AAAA,IACnE;AAEA,UAAM,gBAAgB,MAAM,aAAa,SAAS;AAAA,MAChD;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,QAAI,CAAC,eAAe;AAClB,iCAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,mBACJ,oCAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,+BAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AASO,MAAM,iBAAiB,OAC5B,KACA,QACG;AACH,QAAM,EAAE,UAAU,YAAY,IAAI,IAAI;AACtC,QAAM,EAAE,MAAM,SAAS,aAAa,IAAI,IAAI;AAG5C,MAAI,UAAU,gBAAgB,kCAAkC;AAChE,MAAI,UAAU,iBAAiB,wBAAwB;AACvD,MAAI,UAAU,cAAc,YAAY;AACxC,MAAI,UAAU,qBAAqB,IAAI;AACvC,MAAI,eAAe;AACnB,MAAI,MAAM,iBAAiB;AAC3B,MAAI,QAAQ;AAGZ,qBACG,eAAe,UAAU;AAAA,IACxB,WAAW,CAAC,UAAU;AACpB,UAAI,MAAM,SAAS,KAAK,UAAU,EAAE,MAAM,CAAC,CAAC;AAAA;AAAA,CAAM;AAClD,UAAI,QAAQ;AAAA,IACd;AAAA,EACF,CAAC,EACA,KAAK,OAAO,iBAAiB;AAC5B,UAAM,yBAAyB,SAAS;AAAA,MACtC,CAAC,QAAQ,IAAI,SAAS;AAAA,IACxB,GAAG;AACH,UAAM,yBAAyB,yBAC3B,uBAAuB,MAAM,GAAG,EAAE,SAClC;AACJ,QAAI,yBAAyB,GAAG;AAK9B,YAAM,kCAAgB;AAAA,QACpB,EAAE,YAAY;AAAA,QACd;AAAA,UACE,MAAM;AAAA,YACJ;AAAA,YACA,QAAQ,MAAM;AAAA,YACd,WAAW,SAAS;AAAA,YACpB,gBAAgB,cAAc;AAAA,YAC9B,UAAU;AAAA,cACR,GAAG,SAAS,IAAI,CAAC,SAAS;AAAA,gBACxB,MAAM,IAAI;AAAA,gBACV,SAAS,IAAI;AAAA,gBACb,WAAW,IAAI;AAAA,cACjB,EAAE;AAAA,cACF;AAAA,gBACE,MAAM;AAAA,gBACN,SAAS,aAAa;AAAA,gBACtB,cAAc,aAAa;AAAA,gBAC3B,WAAW,oBAAI,KAAK;AAAA,cACtB;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,QACA,EAAE,QAAQ,MAAM,KAAK,KAAK;AAAA,MAC5B;AAAA,IACF;AAGA,QAAI;AAAA,MACF,SAAS,KAAK,UAAU,EAAE,MAAM,MAAM,UAAU,aAAa,CAAC,CAAC;AAAA;AAAA;AAAA,IACjE;AACA,QAAI,IAAI;AAAA,EACV,CAAC,EACA,MAAM,CAAC,QAAQ;AAEd,QAAI;AAAA,MACF;AAAA,QAAuB,KAAK,UAAU,EAAE,SAAS,IAAI,QAAQ,CAAC,CAAC;AAAA;AAAA;AAAA,IACjE;AACA,QAAI,IAAI;AAAA,EACV,CAAC;AACL;AAWO,MAAM,eAAe,OAC1B,KACA,QACG;AACH,MAAI;AACF,UAAM,EAAE,MAAM,UAAU,IAAI,IAAI;AAChC,UAAM,EAAE,MAAM,SAAS,aAAa,IAAI,IAAI;AAG5C,UAAM,YAAY,QAAQ,WAAW,MAAM;AAE3C,QAAI,CAAC,WAAW;AACd,UAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,cAAc;AACtC,mCAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,MACF;AAAA,IACF;AAEA,UAAM,WAAY,MAAM,iBAAiB,aAAa;AAAA,MACpD;AAAA,MACA;AAAA,IACF,CAAC,KAAM;AAAA,MACL,gBAAgB;AAAA,MAChB,WAAW;AAAA,IACb;AAEA,UAAM,mBACJ,oCAA4D;AAAA,MAC1D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AAAA,EACvB,SAAS,OAAO;AACd,+BAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../../../src/controllers/ai.controller.ts"],"sourcesContent":["import { DiscussionModel } from '@/models/discussion.model';\nimport type { Dictionary } from '@/types/dictionary.types';\nimport type { Tag } from '@/types/tag.types';\nimport { type KeyPath } from '@intlayer/core';\nimport type { ResponseWithInformation } from '@middlewares/sessionAuth.middleware';\nimport { getDictionariesByTags } from '@services/dictionary.service';\nimport * as tagService from '@services/tag.service';\nimport { getTagsByKeys } from '@services/tag.service';\nimport type { AIOptions, ChatCompletionRequestMessage } from '@utils/AI/aiSdk';\nimport * as askDocQuestionUtil from '@utils/AI/askDocQuestion/askDocQuestion';\nimport * as auditContentDeclarationUtil from '@utils/AI/auditDictionary';\nimport * as auditContentDeclarationFieldUtil from '@utils/AI/auditDictionaryField';\nimport * as auditContentDeclarationMetadataUtil from '@utils/AI/auditDictionaryMetadata';\nimport * as auditTagUtil from '@utils/AI/auditTag';\nimport * as autocompleteUtil from '@utils/AI/autocomplete';\nimport * as translateJSONUtil from '@utils/AI/translateJSON';\nimport { type AppError, ErrorHandler } from '@utils/errors';\nimport { formatResponse, type ResponseData } from '@utils/responseData';\nimport type { NextFunction, Request } from 'express';\nimport type { Locales } from 'intlayer';\n\nexport type TranslateJSONBody = Omit<\n translateJSONUtil.TranslateJSONOptions,\n 'tags'\n> & {\n tagsKeys?: string[];\n};\nexport type TranslateJSONResult =\n ResponseData<translateJSONUtil.TranslateJSONResultData>;\n\nexport const translateJSON = async (\n req: Request<AuditContentDeclarationBody>,\n res: ResponseWithInformation<TranslateJSONResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, project, organization } = res.locals;\n const { aiOptions, tagsKeys, ...rest } = req.body;\n\n // Check if any API key is present\n const hasApiKey = Boolean(aiOptions?.apiKey);\n\n if (!hasApiKey) {\n if (!user || !project || !organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse = await translateJSONUtil.translateJSON({\n ...rest,\n aiOptions,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<translateJSONUtil.TranslateJSONResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationBody = {\n aiOptions?: AIOptions;\n locales: Locales[];\n defaultLocale: Locales;\n fileContent: string;\n filePath?: string;\n tagsKeys?: string[];\n};\nexport type AuditContentDeclarationResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclaration = async (\n req: Request<AuditContentDeclarationBody>,\n res: ResponseWithInformation<AuditContentDeclarationResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, project, organization } = res.locals;\n const { fileContent, filePath, aiOptions, locales, defaultLocale, tagsKeys } =\n req.body;\n\n // Check if any API key is present\n const hasApiKey = Boolean(aiOptions?.apiKey);\n\n if (!hasApiKey) {\n if (!user || !project || !organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse = await auditContentDeclarationUtil.auditDictionary({\n fileContent,\n filePath,\n aiOptions,\n locales,\n defaultLocale,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationFieldBody = {\n aiOptions?: AIOptions;\n locales: Locales[];\n fileContent: string;\n filePath?: string;\n tagsKeys?: string[];\n keyPath: KeyPath[];\n};\nexport type AuditContentDeclarationFieldResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclarationField = async (\n req: Request<AuditContentDeclarationFieldBody>,\n res: ResponseWithInformation<AuditContentDeclarationFieldResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, project, organization } = res.locals;\n const { fileContent, aiOptions, locales, tagsKeys, keyPath } = req.body;\n\n // Check if any API key is present\n const hasApiKey = Boolean(aiOptions?.apiKey);\n\n if (!hasApiKey) {\n if (!user || !project || !organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n }\n\n try {\n let tags: Tag[] = [];\n\n if (project?.organizationId) {\n tags = await getTagsByKeys(tagsKeys, project.organizationId);\n }\n\n const auditResponse =\n await auditContentDeclarationFieldUtil.auditDictionaryField({\n fileContent,\n aiOptions,\n locales,\n tags,\n keyPath,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditContentDeclarationMetadataBody = {\n aiOptions?: AIOptions;\n fileContent: string;\n};\nexport type AuditContentDeclarationMetadataResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditContentDeclarationMetadata = async (\n req: Request<AuditContentDeclarationMetadataBody>,\n res: ResponseWithInformation<AuditContentDeclarationMetadataResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, organization, project } = res.locals;\n const { fileContent, aiOptions } = req.body;\n\n // Check if any API key is present\n const hasApiKey = Boolean(aiOptions?.apiKey);\n\n if (!hasApiKey) {\n if (!user || !project || !organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n }\n\n try {\n const tags: Tag[] = await tagService.findTags(\n {\n organizationId: organization?._id,\n },\n 0,\n 1000\n );\n\n const auditResponse =\n await auditContentDeclarationMetadataUtil.auditDictionaryMetadata({\n fileContent,\n aiOptions,\n tags,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AuditTagBody = {\n aiOptions?: AIOptions;\n tag: Tag;\n};\nexport type AuditTagResult =\n ResponseData<auditContentDeclarationUtil.AuditFileResultData>;\n\n/**\n * Retrieves a list of dictionaries based on filters and pagination.\n */\nexport const auditTag = async (\n req: Request<undefined, undefined, AuditTagBody>,\n res: ResponseWithInformation<AuditTagResult>,\n _next: NextFunction\n): Promise<void> => {\n const { user, project, organization } = res.locals;\n const { aiOptions, tag } = req.body;\n\n // Check if any API key is present\n const hasApiKey = Boolean(aiOptions?.apiKey);\n\n if (!hasApiKey) {\n if (!user || !project || !organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n }\n\n try {\n let dictionaries: Dictionary[] = [];\n if (project?.organizationId) {\n dictionaries = await getDictionariesByTags([tag.key], project._id);\n }\n\n const auditResponse = await auditTagUtil.auditTag({\n aiOptions,\n dictionaries,\n tag,\n });\n\n if (!auditResponse) {\n ErrorHandler.handleGenericErrorResponse(res, 'AUDIT_FAILED');\n return;\n }\n\n const responseData =\n formatResponse<auditContentDeclarationUtil.AuditFileResultData>({\n data: auditResponse,\n });\n\n res.json(responseData);\n return;\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n\nexport type AskDocQuestionBody = {\n messages: ChatCompletionRequestMessage[];\n discutionId: string;\n};\nexport type AskDocQuestionResult =\n ResponseData<askDocQuestionUtil.AskDocQuestionResult>;\n\nexport const askDocQuestion = async (\n req: Request<undefined, undefined, AskDocQuestionBody>,\n res: ResponseWithInformation<AskDocQuestionResult>\n) => {\n const { messages, discutionId } = req.body;\n const { user, project, organization } = res.locals;\n\n // 1. Prepare SSE headers and flush them NOW\n res.setHeader('Content-Type', 'text/event-stream; charset=utf-8');\n res.setHeader('Cache-Control', 'no-cache, no-transform');\n res.setHeader('Connection', 'keep-alive');\n res.setHeader('X-Accel-Buffering', 'no'); // disable nginx buffering\n res.flushHeaders?.();\n res.write(': connected\\n\\n'); // initial comment keeps some browsers happy\n res.flush?.();\n\n // 2. Kick off the upstream stream WITHOUT awaiting it\n askDocQuestionUtil\n .askDocQuestion(messages, {\n onMessage: (chunk) => {\n res.write(`data: ${JSON.stringify({ chunk })}\\n\\n`);\n res.flush?.();\n },\n })\n .then(async (fullResponse) => {\n const lastUserMessageContent = messages.findLast(\n (msg) => msg.role === 'user'\n )?.content;\n const lastUserMessageNbWords = lastUserMessageContent\n ? lastUserMessageContent.split(' ').length\n : 0;\n if (lastUserMessageNbWords > 2) {\n // If the last user message is less than 3 words, don't persist the discussion\n // Example: \"Hello\", \"Hi\", \"Hey\", \"test\", etc.\n\n // 3. Persist discussion while the client already has all chunks\n await DiscussionModel.findOneAndUpdate(\n { discutionId },\n {\n $set: {\n discutionId,\n userId: user?._id,\n projectId: project?._id,\n organizationId: organization?._id,\n messages: [\n ...messages.map((msg) => ({\n role: msg.role,\n content: msg.content,\n timestamp: msg.timestamp,\n })),\n {\n role: 'assistant',\n content: fullResponse.response,\n relatedFiles: fullResponse.relatedFiles,\n timestamp: new Date(),\n },\n ],\n },\n },\n { upsert: true, new: true }\n );\n }\n\n // 4. Tell the client we're done and close the stream\n res.write(\n `data: ${JSON.stringify({ done: true, response: fullResponse })}\\n\\n`\n );\n res.end();\n })\n .catch((err) => {\n // propagate error as an SSE event so the client knows why it closed\n res.write(\n `event: error\\ndata: ${JSON.stringify({ message: err.message })}\\n\\n`\n );\n res.end();\n });\n};\n\nexport type AutocompleteBody = {\n text: string;\n aiOptions?: AIOptions;\n};\n\nexport type AutocompleteResponse = ResponseData<{\n autocompletion: string;\n}>;\n\nexport const autocomplete = async (\n req: Request<undefined, undefined, AutocompleteBody>,\n res: ResponseWithInformation<AutocompleteResponse>\n) => {\n try {\n const { text, aiOptions } = req.body;\n const { user, project, organization } = res.locals;\n\n // Check if any API key is present\n const hasApiKey = Boolean(aiOptions?.apiKey);\n\n if (!hasApiKey) {\n if (!user || !project || !organization) {\n ErrorHandler.handleGenericErrorResponse(res, 'AI_ACCESS_DENIED');\n return;\n }\n }\n\n const response = (await autocompleteUtil.autocomplete({\n text,\n aiOptions,\n })) ?? {\n autocompletion: '',\n tokenUsed: 0,\n };\n\n const responseData =\n formatResponse<autocompleteUtil.AutocompleteFileResultData>({\n data: response,\n });\n\n res.json(responseData);\n } catch (error) {\n ErrorHandler.handleAppErrorResponse(res, error as AppError);\n return;\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAAgC;AAKhC,wBAAsC;AACtC,iBAA4B;AAC5B,iBAA8B;AAE9B,yBAAoC;AACpC,kCAA6C;AAC7C,uCAAkD;AAClD,0CAAqD;AACrD,mBAA8B;AAC9B,uBAAkC;AAClC,wBAAmC;AACnC,oBAA4C;AAC5C,0BAAkD;AAa3C,MAAM,gBAAgB,OAC3B,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,SAAS,aAAa,IAAI,IAAI;AAC5C,QAAM,EAAE,WAAW,UAAU,GAAG,KAAK,IAAI,IAAI;AAG7C,QAAM,YAAY,QAAQ,WAAW,MAAM;AAE3C,MAAI,CAAC,WAAW;AACd,QAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,cAAc;AACtC,iCAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,QAAI,OAAc,CAAC;AAEnB,QAAI,SAAS,gBAAgB;AAC3B,aAAO,UAAM,0BAAc,UAAU,QAAQ,cAAc;AAAA,IAC7D;AAEA,UAAM,gBAAgB,MAAM,kBAAkB,cAAc;AAAA,MAC1D,GAAG;AAAA,MACH;AAAA,MACA;AAAA,IACF,CAAC;AAED,QAAI,CAAC,eAAe;AAClB,iCAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,mBACJ,oCAA0D;AAAA,MACxD,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,+BAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AAgBO,MAAM,0BAA0B,OACrC,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,SAAS,aAAa,IAAI,IAAI;AAC5C,QAAM,EAAE,aAAa,UAAU,WAAW,SAAS,eAAe,SAAS,IACzE,IAAI;AAGN,QAAM,YAAY,QAAQ,WAAW,MAAM;AAE3C,MAAI,CAAC,WAAW;AACd,QAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,cAAc;AACtC,iCAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,QAAI,OAAc,CAAC;AAEnB,QAAI,SAAS,gBAAgB;AAC3B,aAAO,UAAM,0BAAc,UAAU,QAAQ,cAAc;AAAA,IAC7D;AAEA,UAAM,gBAAgB,MAAM,4BAA4B,gBAAgB;AAAA,MACtE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,QAAI,CAAC,eAAe;AAClB,iCAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,mBACJ,oCAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,+BAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AAgBO,MAAM,+BAA+B,OAC1C,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,SAAS,aAAa,IAAI,IAAI;AAC5C,QAAM,EAAE,aAAa,WAAW,SAAS,UAAU,QAAQ,IAAI,IAAI;AAGnE,QAAM,YAAY,QAAQ,WAAW,MAAM;AAE3C,MAAI,CAAC,WAAW;AACd,QAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,cAAc;AACtC,iCAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,QAAI,OAAc,CAAC;AAEnB,QAAI,SAAS,gBAAgB;AAC3B,aAAO,UAAM,0BAAc,UAAU,QAAQ,cAAc;AAAA,IAC7D;AAEA,UAAM,gBACJ,MAAM,iCAAiC,qBAAqB;AAAA,MAC1D;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAEH,QAAI,CAAC,eAAe;AAClB,iCAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,mBACJ,oCAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,+BAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AAYO,MAAM,kCAAkC,OAC7C,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,cAAc,QAAQ,IAAI,IAAI;AAC5C,QAAM,EAAE,aAAa,UAAU,IAAI,IAAI;AAGvC,QAAM,YAAY,QAAQ,WAAW,MAAM;AAE3C,MAAI,CAAC,WAAW;AACd,QAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,cAAc;AACtC,iCAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,UAAM,OAAc,MAAM,WAAW;AAAA,MACnC;AAAA,QACE,gBAAgB,cAAc;AAAA,MAChC;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,UAAM,gBACJ,MAAM,oCAAoC,wBAAwB;AAAA,MAChE;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAEH,QAAI,CAAC,eAAe;AAClB,iCAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,mBACJ,oCAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,+BAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AAYO,MAAM,WAAW,OACtB,KACA,KACA,UACkB;AAClB,QAAM,EAAE,MAAM,SAAS,aAAa,IAAI,IAAI;AAC5C,QAAM,EAAE,WAAW,IAAI,IAAI,IAAI;AAG/B,QAAM,YAAY,QAAQ,WAAW,MAAM;AAE3C,MAAI,CAAC,WAAW;AACd,QAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,cAAc;AACtC,iCAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,QAAI,eAA6B,CAAC;AAClC,QAAI,SAAS,gBAAgB;AAC3B,qBAAe,UAAM,yCAAsB,CAAC,IAAI,GAAG,GAAG,QAAQ,GAAG;AAAA,IACnE;AAEA,UAAM,gBAAgB,MAAM,aAAa,SAAS;AAAA,MAChD;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAED,QAAI,CAAC,eAAe;AAClB,iCAAa,2BAA2B,KAAK,cAAc;AAC3D;AAAA,IACF;AAEA,UAAM,mBACJ,oCAAgE;AAAA,MAC9D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AACrB;AAAA,EACF,SAAS,OAAO;AACd,+BAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;AASO,MAAM,iBAAiB,OAC5B,KACA,QACG;AACH,QAAM,EAAE,UAAU,YAAY,IAAI,IAAI;AACtC,QAAM,EAAE,MAAM,SAAS,aAAa,IAAI,IAAI;AAG5C,MAAI,UAAU,gBAAgB,kCAAkC;AAChE,MAAI,UAAU,iBAAiB,wBAAwB;AACvD,MAAI,UAAU,cAAc,YAAY;AACxC,MAAI,UAAU,qBAAqB,IAAI;AACvC,MAAI,eAAe;AACnB,MAAI,MAAM,iBAAiB;AAC3B,MAAI,QAAQ;AAGZ,qBACG,eAAe,UAAU;AAAA,IACxB,WAAW,CAAC,UAAU;AACpB,UAAI,MAAM,SAAS,KAAK,UAAU,EAAE,MAAM,CAAC,CAAC;AAAA;AAAA,CAAM;AAClD,UAAI,QAAQ;AAAA,IACd;AAAA,EACF,CAAC,EACA,KAAK,OAAO,iBAAiB;AAC5B,UAAM,yBAAyB,SAAS;AAAA,MACtC,CAAC,QAAQ,IAAI,SAAS;AAAA,IACxB,GAAG;AACH,UAAM,yBAAyB,yBAC3B,uBAAuB,MAAM,GAAG,EAAE,SAClC;AACJ,QAAI,yBAAyB,GAAG;AAK9B,YAAM,kCAAgB;AAAA,QACpB,EAAE,YAAY;AAAA,QACd;AAAA,UACE,MAAM;AAAA,YACJ;AAAA,YACA,QAAQ,MAAM;AAAA,YACd,WAAW,SAAS;AAAA,YACpB,gBAAgB,cAAc;AAAA,YAC9B,UAAU;AAAA,cACR,GAAG,SAAS,IAAI,CAAC,SAAS;AAAA,gBACxB,MAAM,IAAI;AAAA,gBACV,SAAS,IAAI;AAAA,gBACb,WAAW,IAAI;AAAA,cACjB,EAAE;AAAA,cACF;AAAA,gBACE,MAAM;AAAA,gBACN,SAAS,aAAa;AAAA,gBACtB,cAAc,aAAa;AAAA,gBAC3B,WAAW,oBAAI,KAAK;AAAA,cACtB;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,QACA,EAAE,QAAQ,MAAM,KAAK,KAAK;AAAA,MAC5B;AAAA,IACF;AAGA,QAAI;AAAA,MACF,SAAS,KAAK,UAAU,EAAE,MAAM,MAAM,UAAU,aAAa,CAAC,CAAC;AAAA;AAAA;AAAA,IACjE;AACA,QAAI,IAAI;AAAA,EACV,CAAC,EACA,MAAM,CAAC,QAAQ;AAEd,QAAI;AAAA,MACF;AAAA,QAAuB,KAAK,UAAU,EAAE,SAAS,IAAI,QAAQ,CAAC,CAAC;AAAA;AAAA;AAAA,IACjE;AACA,QAAI,IAAI;AAAA,EACV,CAAC;AACL;AAWO,MAAM,eAAe,OAC1B,KACA,QACG;AACH,MAAI;AACF,UAAM,EAAE,MAAM,UAAU,IAAI,IAAI;AAChC,UAAM,EAAE,MAAM,SAAS,aAAa,IAAI,IAAI;AAG5C,UAAM,YAAY,QAAQ,WAAW,MAAM;AAE3C,QAAI,CAAC,WAAW;AACd,UAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,cAAc;AACtC,mCAAa,2BAA2B,KAAK,kBAAkB;AAC/D;AAAA,MACF;AAAA,IACF;AAEA,UAAM,WAAY,MAAM,iBAAiB,aAAa;AAAA,MACpD;AAAA,MACA;AAAA,IACF,CAAC,KAAM;AAAA,MACL,gBAAgB;AAAA,MAChB,WAAW;AAAA,IACb;AAEA,UAAM,mBACJ,oCAA4D;AAAA,MAC1D,MAAM;AAAA,IACR,CAAC;AAEH,QAAI,KAAK,YAAY;AAAA,EACvB,SAAS,OAAO;AACd,+BAAa,uBAAuB,KAAK,KAAiB;AAC1D;AAAA,EACF;AACF;","names":[]}
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
var search_controller_exports = {};
|
|
30
|
+
__export(search_controller_exports, {
|
|
31
|
+
searchDocUtil: () => searchDocUtil
|
|
32
|
+
});
|
|
33
|
+
module.exports = __toCommonJS(search_controller_exports);
|
|
34
|
+
var askDocQuestionUtil = __toESM(require('./../utils/AI/askDocQuestion/askDocQuestion.cjs'), 1);
|
|
35
|
+
var import_responseData = require('./../utils/responseData.cjs');
|
|
36
|
+
const searchDocUtil = async (req, res) => {
|
|
37
|
+
const { input } = req.query;
|
|
38
|
+
const response = await askDocQuestionUtil.searchChunkReference(
|
|
39
|
+
input,
|
|
40
|
+
30,
|
|
41
|
+
0.2
|
|
42
|
+
);
|
|
43
|
+
const docFileList = response.map((doc) => doc.fileKey);
|
|
44
|
+
const uniqueDocFileList = Array.from(new Set(docFileList));
|
|
45
|
+
const responseData = (0, import_responseData.formatResponse)({
|
|
46
|
+
data: uniqueDocFileList
|
|
47
|
+
});
|
|
48
|
+
res.json(responseData);
|
|
49
|
+
};
|
|
50
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
51
|
+
0 && (module.exports = {
|
|
52
|
+
searchDocUtil
|
|
53
|
+
});
|
|
54
|
+
//# sourceMappingURL=search.controller.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/controllers/search.controller.ts"],"sourcesContent":["import type { ResponseWithInformation } from '@middlewares/sessionAuth.middleware';\nimport * as askDocQuestionUtil from '@utils/AI/askDocQuestion/askDocQuestion';\nimport { formatResponse, type ResponseData } from '@utils/responseData';\nimport type { Request } from 'express';\n\nexport type SearchDocUtilParams = {\n input: string;\n};\nexport type SearchDocUtilResult = ResponseData<string[]>;\n\nexport const searchDocUtil = async (\n req: Request<unknown, unknown, unknown, SearchDocUtilParams>,\n res: ResponseWithInformation<SearchDocUtilResult>\n) => {\n const { input } = req.query;\n\n const response = await askDocQuestionUtil.searchChunkReference(\n input,\n 30,\n 0.2\n );\n const docFileList = response.map((doc) => doc.fileKey);\n\n const uniqueDocFileList = Array.from(new Set(docFileList));\n\n const responseData = formatResponse<string[]>({\n data: uniqueDocFileList,\n });\n\n res.json(responseData);\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,yBAAoC;AACpC,0BAAkD;AAQ3C,MAAM,gBAAgB,OAC3B,KACA,QACG;AACH,QAAM,EAAE,MAAM,IAAI,IAAI;AAEtB,QAAM,WAAW,MAAM,mBAAmB;AAAA,IACxC;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACA,QAAM,cAAc,SAAS,IAAI,CAAC,QAAQ,IAAI,OAAO;AAErD,QAAM,oBAAoB,MAAM,KAAK,IAAI,IAAI,WAAW,CAAC;AAEzD,QAAM,mBAAe,oCAAyB;AAAA,IAC5C,MAAM;AAAA,EACR,CAAC;AAED,MAAI,KAAK,YAAY;AACvB;","names":[]}
|
package/dist/cjs/export.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/export.ts"],"sourcesContent":["// Routes\nexport { getDictionaryRoutes } from '@routes/dictionary.routes';\nexport { getOrganizationRoutes } from '@routes/organization.routes';\nexport { getProjectRoutes } from '@routes/project.routes';\nexport { getSessionAuthRoutes } from '@routes/sessionAuth.routes';\nexport { getUserRoutes } from '@routes/user.routes';\n\n// Controllers types\nexport type * from '@controllers/ai.controller';\nexport type * from '@controllers/dictionary.controller';\nexport type * from '@controllers/eventListener.controller';\nexport type * from '@controllers/oAuth2.controller';\nexport type * from '@controllers/organization.controller';\nexport type * from '@controllers/project.controller';\nexport type * from '@controllers/projectAccessKey.controller';\nexport type * from '@controllers/sessionAuth.controller';\nexport type * from '@controllers/stripe.controller';\nexport type * from '@controllers/tag.controller';\nexport type * from '@controllers/user.controller';\n\n// Objects types\nexport type * from '@/types/dictionary.types';\nexport type * from '@/types/oAuth2.types';\nexport type * from '@/types/organization.types';\nexport type * from '@/types/plan.types';\nexport type * from '@/types/project.types';\nexport type * from '@/types/tag.types';\nexport type * from '@/types/user.types';\n\n// Utils\nexport * from '@utils/AI/aiSdk';\nexport * from '@utils/cookies';\nexport * from '@utils/httpStatusCodes';\nexport * from '@utils/responseData';\n"],"mappings":";;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,wBAAoC;AACpC,0BAAsC;AACtC,qBAAiC;AACjC,yBAAqC;AACrC,kBAA8B;
|
|
1
|
+
{"version":3,"sources":["../../src/export.ts"],"sourcesContent":["// Routes\nexport { getDictionaryRoutes } from '@routes/dictionary.routes';\nexport { getOrganizationRoutes } from '@routes/organization.routes';\nexport { getProjectRoutes } from '@routes/project.routes';\nexport { getSessionAuthRoutes } from '@routes/sessionAuth.routes';\nexport { getUserRoutes } from '@routes/user.routes';\n\n// Controllers types\nexport type * from '@controllers/ai.controller';\nexport type * from '@controllers/dictionary.controller';\nexport type * from '@controllers/eventListener.controller';\nexport type * from '@controllers/oAuth2.controller';\nexport type * from '@controllers/organization.controller';\nexport type * from '@controllers/project.controller';\nexport type * from '@controllers/projectAccessKey.controller';\nexport type * from '@controllers/search.controller';\nexport type * from '@controllers/sessionAuth.controller';\nexport type * from '@controllers/stripe.controller';\nexport type * from '@controllers/tag.controller';\nexport type * from '@controllers/user.controller';\n\n// Objects types\nexport type * from '@/types/dictionary.types';\nexport type * from '@/types/oAuth2.types';\nexport type * from '@/types/organization.types';\nexport type * from '@/types/plan.types';\nexport type * from '@/types/project.types';\nexport type * from '@/types/tag.types';\nexport type * from '@/types/user.types';\n\n// Utils\nexport * from '@utils/AI/aiSdk';\nexport * from '@utils/cookies';\nexport * from '@utils/httpStatusCodes';\nexport * from '@utils/responseData';\n"],"mappings":";;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,wBAAoC;AACpC,0BAAsC;AACtC,qBAAiC;AACjC,yBAAqC;AACrC,kBAA8B;AA0B9B,2BAAc,4BA/Bd;AAgCA,2BAAc,2BAhCd;AAiCA,2BAAc,mCAjCd;AAkCA,2BAAc,gCAlCd;","names":[]}
|
package/dist/cjs/index.cjs
CHANGED
|
@@ -41,15 +41,16 @@ var import_helmet = __toESM(require("helmet"), 1);
|
|
|
41
41
|
var import_oAuth2 = require('./middlewares/oAuth2.middleware.cjs');
|
|
42
42
|
var import_request = require('./middlewares/request.middleware.cjs');
|
|
43
43
|
var import_sessionAuth = require('./middlewares/sessionAuth.middleware.cjs');
|
|
44
|
+
var import_ai = require('./routes/ai.routes.cjs');
|
|
44
45
|
var import_dictionary = require('./routes/dictionary.routes.cjs');
|
|
46
|
+
var import_eventListener = require('./routes/eventListener.routes.cjs');
|
|
45
47
|
var import_organization = require('./routes/organization.routes.cjs');
|
|
46
48
|
var import_project = require('./routes/project.routes.cjs');
|
|
47
|
-
var
|
|
49
|
+
var import_search = require('./routes/search.routes.cjs');
|
|
48
50
|
var import_sessionAuth2 = require('./routes/sessionAuth.routes.cjs');
|
|
49
|
-
var import_user = require('./routes/user.routes.cjs');
|
|
50
51
|
var import_stripe = require('./routes/stripe.routes.cjs');
|
|
51
|
-
var
|
|
52
|
-
var
|
|
52
|
+
var import_tags = require('./routes/tags.routes.cjs');
|
|
53
|
+
var import_user = require('./routes/user.routes.cjs');
|
|
53
54
|
var import_stripe2 = require('./webhooks/stripe.webhook.cjs');
|
|
54
55
|
var import_oAuth22 = require('./controllers/oAuth2.controller.cjs');
|
|
55
56
|
var import_sessionAuth3 = require('./controllers/sessionAuth.controller.cjs');
|
|
@@ -148,6 +149,7 @@ app.use("/api/dictionary", import_dictionary.dictionaryRouter);
|
|
|
148
149
|
app.use("/api/stripe", import_stripe.stripeRouter);
|
|
149
150
|
app.use("/api/ai", import_ai.aiRouter);
|
|
150
151
|
app.use("/api/event-listener", import_eventListener.eventListenerRouter);
|
|
152
|
+
app.use("/api/search", import_search.searchRouter);
|
|
151
153
|
app.listen(process.env.PORT, () => {
|
|
152
154
|
import_logger.logger.info(`Listening on port ${process.env.PORT}`);
|
|
153
155
|
});
|
package/dist/cjs/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/index.ts"],"sourcesContent":["// Libraries\nimport compression from 'compression';\nimport cookieParser from 'cookie-parser';\nimport cors, { type CorsOptions } from 'cors';\nimport dotenv from 'dotenv';\nimport express, { type Express } from 'express';\nimport { intlayer, t } from 'express-intlayer';\nimport helmet from 'helmet';\n\n// Middlewares\nimport {\n type RequestWithOAuth2Information,\n attachOAuthInstance,\n authenticateOAuth2,\n} from '@middlewares/oAuth2.middleware';\nimport { logAPIRequestURL } from '@middlewares/request.middleware';\nimport {\n type ResponseWithInformation,\n
|
|
1
|
+
{"version":3,"sources":["../../src/index.ts"],"sourcesContent":["// Libraries\nimport compression from 'compression';\nimport cookieParser from 'cookie-parser';\nimport cors, { type CorsOptions } from 'cors';\nimport dotenv from 'dotenv';\nimport express, { type Express } from 'express';\nimport { intlayer, t } from 'express-intlayer';\nimport helmet from 'helmet';\n\n// Middlewares\nimport {\n type RequestWithOAuth2Information,\n attachOAuthInstance,\n authenticateOAuth2,\n} from '@middlewares/oAuth2.middleware';\nimport { logAPIRequestURL } from '@middlewares/request.middleware';\nimport {\n type ResponseWithInformation,\n checkAdmin,\n checkOrganization,\n checkProject,\n checkUser,\n} from '@middlewares/sessionAuth.middleware';\n\n// Routes\nimport { aiRouter } from '@routes/ai.routes';\nimport { dictionaryRouter } from '@routes/dictionary.routes';\nimport { eventListenerRouter } from '@routes/eventListener.routes';\nimport { organizationRouter } from '@routes/organization.routes';\nimport { projectRouter } from '@routes/project.routes';\nimport { searchRouter } from '@routes/search.routes';\nimport { sessionAuthRouter } from '@routes/sessionAuth.routes';\nimport { stripeRouter } from '@routes/stripe.routes';\nimport { tagRouter } from '@routes/tags.routes';\nimport { userRouter } from '@routes/user.routes';\n\n// Webhooks\nimport { stripeWebhook } from '@webhooks/stripe.webhook';\n\n// Controllers\nimport { getOAuth2Token } from '@controllers/oAuth2.controller';\nimport {\n getSessionInformation,\n setCSRFToken,\n} from '@controllers/sessionAuth.controller';\n\n// Utils\nimport { doubleCsrfProtection } from '@utils/CSRF';\nimport { connectDB } from '@utils/mongoDB/connectDB';\n\n// Logger\nimport { logger } from './logger/index';\n\nconst app: Express = express();\n\napp.disable('x-powered-by'); // Disabled to prevent attackers from knowing that the app is running Express\napp.use(helmet());\n\n// Environment variables\nconst env = app.get('env');\n\nlogger.info(`run as ${env}`);\n\ndotenv.config({\n path: [`.env.${env}.local`, `.env.${env}`, '.env.local', '.env'],\n});\n\n// Parse incoming requests with cookies\napp.use(cookieParser());\n\n// Load internationalization request handler\napp.use(intlayer());\n\nconst isDev = env === 'development';\n\n// Connect to MongoDB\nconnectDB();\n\n// Stripe\napp.post(\n '/webhook/stripe',\n express.raw({ type: 'application/json' }),\n stripeWebhook\n);\n\n// Compress all HTTP responses\napp.use(compression());\n\n// Parse incoming requests with JSON payloads\napp.use(express.json({ limit: '50mb' }));\n\n// Parse incoming requests with urlencoded payloads\napp.use(express.urlencoded({ extended: true }));\n\nconst whitelist = [process.env.CLIENT_URL!];\n\n// CORS\nconst corsOptions: CorsOptions = {\n origin: (origin, callback) => {\n // Allow requests with no origin (like mobile apps or curl requests)\n if (!origin) return callback(null, true);\n\n if (whitelist.includes(origin)) {\n logger.info('whitelisted origin', origin);\n return callback(null, true);\n }\n\n logger.info('non whitelisted origin', origin);\n // Reflect the request's origin (echo back the origin header)\n callback(null, origin);\n },\n allowedHeaders: [\n 'authorization',\n 'Content-Type',\n 'credentials',\n 'cache-control',\n 'Access-Control-Allow-Origin',\n 'private-state-token-redemption',\n 'private-state-token-issuance',\n 'browsing-topics',\n ],\n exposedHeaders: [''],\n preflightContinue: false,\n methods: 'GET,HEAD,PUT,PATCH,POST,DELETE',\n credentials: true,\n};\napp.use(cors(corsOptions));\n\n// Liveness check\napp.get('/', (_req, res) => {\n res.send(\n t({\n en: 'Ok - locale: en',\n fr: 'Ok - locale: fr',\n es: 'Ok - locale: es',\n })\n );\n});\n\n// middleware - jwt & session auth\napp.use(/(.*)/, checkUser);\napp.use(/(.*)/, checkOrganization);\napp.use(/(.*)/, checkProject);\napp.use(/(.*)/, checkAdmin);\n\n// debug\nif (isDev) {\n app.use(logAPIRequestURL);\n}\n\n// Sessions\napp.get('/session', getSessionInformation);\napp.use('/api/auth', sessionAuthRouter);\n\n// CSRF\napp.get('/csrf-token', setCSRFToken);\n\n// oAuth2\napp.use(/(.*)/, attachOAuthInstance);\napp.post('/oauth2/token', getOAuth2Token); // Route to get the token\napp.use(/(.*)/, (req, res, next) => {\n // If the request is not already authenticated check the oAuth2 token\n if (!res.locals.authType) {\n return authenticateOAuth2(\n req as RequestWithOAuth2Information,\n res as ResponseWithInformation,\n next\n );\n }\n next();\n});\n\n// CSRF protection\napp.use(/(.*)/, (req, res, next) => {\n // If the request is authenticated using the session auth check the CSRF token\n if (res.locals.authType === 'session') {\n return doubleCsrfProtection(req, res, next);\n }\n next();\n});\n\n// Routes\napp.use('/api/user', userRouter);\napp.use('/api/organization', organizationRouter);\napp.use('/api/project', projectRouter);\napp.use('/api/tag', tagRouter);\napp.use('/api/dictionary', dictionaryRouter);\napp.use('/api/stripe', stripeRouter);\napp.use('/api/ai', aiRouter);\napp.use('/api/event-listener', eventListenerRouter);\napp.use('/api/search', searchRouter);\n\n// Server\napp.listen(process.env.PORT, () => {\n logger.info(`Listening on port ${process.env.PORT}`);\n});\n\n// Export tu use as serverless function\nexport default app;\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,yBAAwB;AACxB,2BAAyB;AACzB,kBAAuC;AACvC,oBAAmB;AACnB,qBAAsC;AACtC,8BAA4B;AAC5B,oBAAmB;AAGnB,oBAIO;AACP,qBAAiC;AACjC,yBAMO;AAGP,gBAAyB;AACzB,wBAAiC;AACjC,2BAAoC;AACpC,0BAAmC;AACnC,qBAA8B;AAC9B,oBAA6B;AAC7B,IAAAA,sBAAkC;AAClC,oBAA6B;AAC7B,kBAA0B;AAC1B,kBAA2B;AAG3B,IAAAC,iBAA8B;AAG9B,IAAAC,iBAA+B;AAC/B,IAAAF,sBAGO;AAGP,kBAAqC;AACrC,uBAA0B;AAG1B,oBAAuB;AAEvB,MAAM,UAAe,eAAAG,SAAQ;AAE7B,IAAI,QAAQ,cAAc;AAC1B,IAAI,QAAI,cAAAC,SAAO,CAAC;AAGhB,MAAM,MAAM,IAAI,IAAI,KAAK;AAEzB,qBAAO,KAAK,UAAU,GAAG,EAAE;AAE3B,cAAAC,QAAO,OAAO;AAAA,EACZ,MAAM,CAAC,QAAQ,GAAG,UAAU,QAAQ,GAAG,IAAI,cAAc,MAAM;AACjE,CAAC;AAGD,IAAI,QAAI,qBAAAC,SAAa,CAAC;AAGtB,IAAI,QAAI,kCAAS,CAAC;AAElB,MAAM,QAAQ,QAAQ;AAAA,IAGtB,4BAAU;AAGV,IAAI;AAAA,EACF;AAAA,EACA,eAAAH,QAAQ,IAAI,EAAE,MAAM,mBAAmB,CAAC;AAAA,EACxC;AACF;AAGA,IAAI,QAAI,mBAAAI,SAAY,CAAC;AAGrB,IAAI,IAAI,eAAAJ,QAAQ,KAAK,EAAE,OAAO,OAAO,CAAC,CAAC;AAGvC,IAAI,IAAI,eAAAA,QAAQ,WAAW,EAAE,UAAU,KAAK,CAAC,CAAC;AAE9C,MAAM,YAAY,CAAC,QAAQ,IAAI,UAAW;AAG1C,MAAM,cAA2B;AAAA,EAC/B,QAAQ,CAAC,QAAQ,aAAa;AAE5B,QAAI,CAAC,OAAQ,QAAO,SAAS,MAAM,IAAI;AAEvC,QAAI,UAAU,SAAS,MAAM,GAAG;AAC9B,2BAAO,KAAK,sBAAsB,MAAM;AACxC,aAAO,SAAS,MAAM,IAAI;AAAA,IAC5B;AAEA,yBAAO,KAAK,0BAA0B,MAAM;AAE5C,aAAS,MAAM,MAAM;AAAA,EACvB;AAAA,EACA,gBAAgB;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA,EACA,gBAAgB,CAAC,EAAE;AAAA,EACnB,mBAAmB;AAAA,EACnB,SAAS;AAAA,EACT,aAAa;AACf;AACA,IAAI,QAAI,YAAAK,SAAK,WAAW,CAAC;AAGzB,IAAI,IAAI,KAAK,CAAC,MAAM,QAAQ;AAC1B,MAAI;AAAA,QACF,2BAAE;AAAA,MACA,IAAI;AAAA,MACJ,IAAI;AAAA,MACJ,IAAI;AAAA,IACN,CAAC;AAAA,EACH;AACF,CAAC;AAGD,IAAI,IAAI,QAAQ,4BAAS;AACzB,IAAI,IAAI,QAAQ,oCAAiB;AACjC,IAAI,IAAI,QAAQ,+BAAY;AAC5B,IAAI,IAAI,QAAQ,6BAAU;AAG1B,IAAI,OAAO;AACT,MAAI,IAAI,+BAAgB;AAC1B;AAGA,IAAI,IAAI,YAAY,yCAAqB;AACzC,IAAI,IAAI,aAAa,qCAAiB;AAGtC,IAAI,IAAI,eAAe,gCAAY;AAGnC,IAAI,IAAI,QAAQ,iCAAmB;AACnC,IAAI,KAAK,iBAAiB,6BAAc;AACxC,IAAI,IAAI,QAAQ,CAAC,KAAK,KAAK,SAAS;AAElC,MAAI,CAAC,IAAI,OAAO,UAAU;AACxB,eAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACA,OAAK;AACP,CAAC;AAGD,IAAI,IAAI,QAAQ,CAAC,KAAK,KAAK,SAAS;AAElC,MAAI,IAAI,OAAO,aAAa,WAAW;AACrC,eAAO,kCAAqB,KAAK,KAAK,IAAI;AAAA,EAC5C;AACA,OAAK;AACP,CAAC;AAGD,IAAI,IAAI,aAAa,sBAAU;AAC/B,IAAI,IAAI,qBAAqB,sCAAkB;AAC/C,IAAI,IAAI,gBAAgB,4BAAa;AACrC,IAAI,IAAI,YAAY,qBAAS;AAC7B,IAAI,IAAI,mBAAmB,kCAAgB;AAC3C,IAAI,IAAI,eAAe,0BAAY;AACnC,IAAI,IAAI,WAAW,kBAAQ;AAC3B,IAAI,IAAI,uBAAuB,wCAAmB;AAClD,IAAI,IAAI,eAAe,0BAAY;AAGnC,IAAI,OAAO,QAAQ,IAAI,MAAM,MAAM;AACjC,uBAAO,KAAK,qBAAqB,QAAQ,IAAI,IAAI,EAAE;AACrD,CAAC;AAGD,IAAO,gBAAQ;","names":["import_sessionAuth","import_stripe","import_oAuth2","express","helmet","dotenv","cookieParser","compression","cors"]}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
var search_routes_exports = {};
|
|
20
|
+
__export(search_routes_exports, {
|
|
21
|
+
getSearchRoutes: () => getSearchRoutes,
|
|
22
|
+
searchRouter: () => searchRouter
|
|
23
|
+
});
|
|
24
|
+
module.exports = __toCommonJS(search_routes_exports);
|
|
25
|
+
var import_search = require('./../controllers/search.controller.cjs');
|
|
26
|
+
var import_express = require("express");
|
|
27
|
+
const searchRouter = (0, import_express.Router)();
|
|
28
|
+
const baseURL = () => `${process.env.BACKEND_URL}/api/search`;
|
|
29
|
+
const getSearchRoutes = () => ({
|
|
30
|
+
doc: {
|
|
31
|
+
urlModel: "/doc",
|
|
32
|
+
url: `${baseURL()}/doc`,
|
|
33
|
+
method: "GET"
|
|
34
|
+
}
|
|
35
|
+
});
|
|
36
|
+
searchRouter.get(getSearchRoutes().doc.urlModel, import_search.searchDocUtil);
|
|
37
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
38
|
+
0 && (module.exports = {
|
|
39
|
+
getSearchRoutes,
|
|
40
|
+
searchRouter
|
|
41
|
+
});
|
|
42
|
+
//# sourceMappingURL=search.routes.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/routes/search.routes.ts"],"sourcesContent":["import type { Routes } from '@/types/Routes';\nimport { searchDocUtil } from '@controllers/search.controller';\nimport { Router } from 'express';\n\nexport const searchRouter: Router = Router();\n\nconst baseURL = () => `${process.env.BACKEND_URL}/api/search`;\n\nexport const getSearchRoutes = () =>\n ({\n doc: {\n urlModel: '/doc',\n url: `${baseURL()}/doc`,\n method: 'GET',\n },\n }) satisfies Routes;\n\nsearchRouter.get(getSearchRoutes().doc.urlModel, searchDocUtil);\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAA8B;AAC9B,qBAAuB;AAEhB,MAAM,mBAAuB,uBAAO;AAE3C,MAAM,UAAU,MAAM,GAAG,QAAQ,IAAI,WAAW;AAEzC,MAAM,kBAAkB,OAC5B;AAAA,EACC,KAAK;AAAA,IACH,UAAU;AAAA,IACV,KAAK,GAAG,QAAQ,CAAC;AAAA,IACjB,QAAQ;AAAA,EACV;AACF;AAEF,aAAa,IAAI,gBAAgB,EAAE,IAAI,UAAU,2BAAa;","names":[]}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../src/utils/AI/aiSdk.ts"],"sourcesContent":["import { anthropic } from '@ai-sdk/anthropic';\nimport { deepseek } from '@ai-sdk/deepseek';\nimport { google } from '@ai-sdk/google';\nimport { mistral } from '@ai-sdk/mistral';\nimport { openai } from '@ai-sdk/openai';\nimport { logger } from '@logger';\n\n/**\n * Supported AI models\n */\nexport type Model =\n // OpenAI Models\n | 'gpt-4o-mini'\n | 'gpt-4o'\n | 'gpt-4.1'\n | 'gpt-4.1-mini'\n | 'gpt-4.1-nano'\n | 'gpt-4.5'\n | 'gpt-3.5-turbo'\n | 'gpt-4-turbo-preview'\n | 'gpt-4-vision-preview'\n | 'gpt-4o-audio-preview'\n | 'gpt-4o-mini-audio-preview'\n | 'o1-mini'\n | 'o1'\n | 'o1-pro'\n | 'o3-mini'\n | 'o3-mini-high'\n | 'o3'\n | 'o4-mini'\n | 'o4-mini-high'\n // Anthropic Models\n | 'claude-3-haiku-20240307'\n | 'claude-3-sonnet-20240229'\n | 'claude-3-opus-20240229'\n // Mistral Models\n | 'mistral-tiny'\n | 'mistral-small'\n | 'mistral-small-3.1'\n | 'mistral-medium'\n | 'mistral-medium-3'\n | 'mistral-large'\n | 'mistral-large-2'\n | 'mistral-large-latest'\n | 'codestral'\n | 'codestral-mamba'\n | 'mixtral-8x7b'\n | 'mixtral-8x22b'\n | 'mathstral-7b'\n | 'pixtral-large'\n // DeepSeek Models\n | 'deepseek-coder'\n | 'deepseek-chat'\n | 'deepseek-v3'\n // Google Models\n | 'gemini-1.0-pro'\n | 'gemini-1.5-pro'\n | 'gemini-1.5-flash'\n | (string & {});\n\n/**\n * Supported AI SDK providers\n */\nexport enum AIProvider {\n OPENAI = 'openai',\n ANTHROPIC = 'anthropic',\n MISTRAL = 'mistral',\n DEEPSEEK = 'deepseek',\n GEMINI = 'gemini',\n}\n\n/**\n * Common options for all AI providers\n */\nexport type AIOptions = {\n provider?: AIProvider;\n model?: Model;\n temperature?: number;\n apiKey?: string;\n customPrompt?: string;\n applicationContext?: string;\n};\n\n/**\n * Configuration for AI model based on provider\n */\nexport type AIModelConfig = {\n model: any; // Using any to handle different provider model types\n temperature?: number;\n};\n\n/**\n * Get AI model configuration based on the selected provider and options\n * This function handles the configuration for different AI providers\n *\n * @param options Configuration options including provider, API keys, models and temperature\n * @returns Configured AI model ready to use with generateText\n */\nexport const getAIConfig = async (\n options?: AIOptions\n): Promise<AIModelConfig | undefined> => {\n try {\n const {\n provider = AIProvider.OPENAI,\n model,\n temperature = 0.1,\n } = options ?? {};\n\n // Set default models based on provider\n let defaultModel: string;\n switch (provider) {\n case AIProvider.OPENAI:\n defaultModel = 'gpt-4o-mini';\n break;\n case AIProvider.ANTHROPIC:\n defaultModel = 'claude-3-haiku-20240307';\n break;\n case AIProvider.MISTRAL:\n defaultModel = 'mistral-large-latest';\n break;\n case AIProvider.DEEPSEEK:\n defaultModel = 'deepseek-coder';\n break;\n case AIProvider.GEMINI:\n defaultModel = 'gemini-1.5-pro';\n break;\n default:\n defaultModel = 'gpt-4o-mini';\n }\n\n // Check if API key is provided\n if (!options?.apiKey) {\n logger.error(`API key for ${provider} is missing`);\n return undefined;\n }\n\n // Handle each provider with appropriate model loading\n if (provider === AIProvider.OPENAI) {\n // OpenAI is imported statically at the top\n return {\n model: openai(model ?? defaultModel),\n temperature,\n };\n } else {\n // For other providers, attempt to load using require\n try {\n switch (provider) {\n case AIProvider.ANTHROPIC:\n try {\n return {\n model: anthropic(model ?? defaultModel),\n temperature,\n };\n } catch (err) {\n throw new Error(\n 'Failed to load @ai-sdk/anthropic. Please install it with: npm install @ai-sdk/anthropic'\n );\n }\n\n case AIProvider.MISTRAL:\n try {\n return {\n model: mistral(model ?? defaultModel),\n temperature,\n };\n } catch (err) {\n throw new Error(\n 'Failed to load @ai-sdk/mistral. Please install it with: npm install @ai-sdk/mistral'\n );\n }\n\n case AIProvider.DEEPSEEK:\n try {\n return {\n model: deepseek(model ?? defaultModel),\n temperature,\n };\n } catch (err) {\n throw new Error(\n 'Failed to load @ai-sdk/deepseek. Please install it with: npm install @ai-sdk/deepseek'\n );\n }\n\n case AIProvider.GEMINI:\n try {\n return {\n model: google(model ?? defaultModel),\n temperature,\n };\n } catch (err) {\n throw new Error(\n 'Failed to load @ai-sdk/google. Please install it with: npm install @ai-sdk/google'\n );\n }\n\n default:\n throw new Error(`Provider ${provider} not supported`);\n }\n } catch (error) {\n logger.error(`Error loading SDK for provider ${provider}:`, error);\n return undefined;\n }\n }\n } catch (error) {\n logger.error('Error configuring AI model:', error);\n return undefined;\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBAA0B;AAC1B,sBAAyB;AACzB,oBAAuB;AACvB,qBAAwB;AACxB,oBAAuB;AACvB,oBAAuB;AA0DhB,IAAK,aAAL,kBAAKA,gBAAL;AACL,EAAAA,YAAA,YAAS;AACT,EAAAA,YAAA,eAAY;AACZ,EAAAA,YAAA,aAAU;AACV,EAAAA,YAAA,cAAW;AACX,EAAAA,YAAA,YAAS;AALC,SAAAA;AAAA,GAAA;
|
|
1
|
+
{"version":3,"sources":["../../../../src/utils/AI/aiSdk.ts"],"sourcesContent":["import { anthropic } from '@ai-sdk/anthropic';\nimport { deepseek } from '@ai-sdk/deepseek';\nimport { google } from '@ai-sdk/google';\nimport { mistral } from '@ai-sdk/mistral';\nimport { openai } from '@ai-sdk/openai';\nimport { logger } from '@logger';\n\n/**\n * Supported AI models\n */\nexport type Model =\n // OpenAI Models\n | 'gpt-4o-mini'\n | 'gpt-4o'\n | 'gpt-4.1'\n | 'gpt-4.1-mini'\n | 'gpt-4.1-nano'\n | 'gpt-4.5'\n | 'gpt-3.5-turbo'\n | 'gpt-4-turbo-preview'\n | 'gpt-4-vision-preview'\n | 'gpt-4o-audio-preview'\n | 'gpt-4o-mini-audio-preview'\n | 'o1-mini'\n | 'o1'\n | 'o1-pro'\n | 'o3-mini'\n | 'o3-mini-high'\n | 'o3'\n | 'o4-mini'\n | 'o4-mini-high'\n // Anthropic Models\n | 'claude-3-haiku-20240307'\n | 'claude-3-sonnet-20240229'\n | 'claude-3-opus-20240229'\n // Mistral Models\n | 'mistral-tiny'\n | 'mistral-small'\n | 'mistral-small-3.1'\n | 'mistral-medium'\n | 'mistral-medium-3'\n | 'mistral-large'\n | 'mistral-large-2'\n | 'mistral-large-latest'\n | 'codestral'\n | 'codestral-mamba'\n | 'mixtral-8x7b'\n | 'mixtral-8x22b'\n | 'mathstral-7b'\n | 'pixtral-large'\n // DeepSeek Models\n | 'deepseek-coder'\n | 'deepseek-chat'\n | 'deepseek-v3'\n // Google Models\n | 'gemini-1.0-pro'\n | 'gemini-1.5-pro'\n | 'gemini-1.5-flash'\n | (string & {});\n\n/**\n * Supported AI SDK providers\n */\nexport enum AIProvider {\n OPENAI = 'openai',\n ANTHROPIC = 'anthropic',\n MISTRAL = 'mistral',\n DEEPSEEK = 'deepseek',\n GEMINI = 'gemini',\n}\n\n/**\n * Common options for all AI providers\n */\nexport type AIOptions = {\n provider?: AIProvider;\n model?: Model;\n temperature?: number;\n apiKey?: string;\n customPrompt?: string;\n applicationContext?: string;\n};\n\n/**\n * Configuration for AI model based on provider\n */\nexport type AIModelConfig = {\n model: any; // Using any to handle different provider model types\n temperature?: number;\n};\n\n// Define the structure of messages used in chat completions\nexport type ChatCompletionRequestMessage = {\n role: 'system' | 'user' | 'assistant'; // The role of the message sender\n content: string; // The text content of the message\n timestamp?: Date; // The timestamp of the message\n};\n\n/**\n * Get AI model configuration based on the selected provider and options\n * This function handles the configuration for different AI providers\n *\n * @param options Configuration options including provider, API keys, models and temperature\n * @returns Configured AI model ready to use with generateText\n */\nexport const getAIConfig = async (\n options?: AIOptions\n): Promise<AIModelConfig | undefined> => {\n try {\n const {\n provider = AIProvider.OPENAI,\n model,\n temperature = 0.1,\n } = options ?? {};\n\n // Set default models based on provider\n let defaultModel: string;\n switch (provider) {\n case AIProvider.OPENAI:\n defaultModel = 'gpt-4o-mini';\n break;\n case AIProvider.ANTHROPIC:\n defaultModel = 'claude-3-haiku-20240307';\n break;\n case AIProvider.MISTRAL:\n defaultModel = 'mistral-large-latest';\n break;\n case AIProvider.DEEPSEEK:\n defaultModel = 'deepseek-coder';\n break;\n case AIProvider.GEMINI:\n defaultModel = 'gemini-1.5-pro';\n break;\n default:\n defaultModel = 'gpt-4o-mini';\n }\n\n // Check if API key is provided\n if (!options?.apiKey) {\n logger.error(`API key for ${provider} is missing`);\n return undefined;\n }\n\n // Handle each provider with appropriate model loading\n if (provider === AIProvider.OPENAI) {\n // OpenAI is imported statically at the top\n return {\n model: openai(model ?? defaultModel),\n temperature,\n };\n } else {\n // For other providers, attempt to load using require\n try {\n switch (provider) {\n case AIProvider.ANTHROPIC:\n try {\n return {\n model: anthropic(model ?? defaultModel),\n temperature,\n };\n } catch (err) {\n throw new Error(\n 'Failed to load @ai-sdk/anthropic. Please install it with: npm install @ai-sdk/anthropic'\n );\n }\n\n case AIProvider.MISTRAL:\n try {\n return {\n model: mistral(model ?? defaultModel),\n temperature,\n };\n } catch (err) {\n throw new Error(\n 'Failed to load @ai-sdk/mistral. Please install it with: npm install @ai-sdk/mistral'\n );\n }\n\n case AIProvider.DEEPSEEK:\n try {\n return {\n model: deepseek(model ?? defaultModel),\n temperature,\n };\n } catch (err) {\n throw new Error(\n 'Failed to load @ai-sdk/deepseek. Please install it with: npm install @ai-sdk/deepseek'\n );\n }\n\n case AIProvider.GEMINI:\n try {\n return {\n model: google(model ?? defaultModel),\n temperature,\n };\n } catch (err) {\n throw new Error(\n 'Failed to load @ai-sdk/google. Please install it with: npm install @ai-sdk/google'\n );\n }\n\n default:\n throw new Error(`Provider ${provider} not supported`);\n }\n } catch (error) {\n logger.error(`Error loading SDK for provider ${provider}:`, error);\n return undefined;\n }\n }\n } catch (error) {\n logger.error('Error configuring AI model:', error);\n return undefined;\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBAA0B;AAC1B,sBAAyB;AACzB,oBAAuB;AACvB,qBAAwB;AACxB,oBAAuB;AACvB,oBAAuB;AA0DhB,IAAK,aAAL,kBAAKA,gBAAL;AACL,EAAAA,YAAA,YAAS;AACT,EAAAA,YAAA,eAAY;AACZ,EAAAA,YAAA,aAAU;AACV,EAAAA,YAAA,cAAW;AACX,EAAAA,YAAA,YAAS;AALC,SAAAA;AAAA,GAAA;AA0CL,MAAM,cAAc,OACzB,YACuC;AACvC,MAAI;AACF,UAAM;AAAA,MACJ,WAAW;AAAA,MACX;AAAA,MACA,cAAc;AAAA,IAChB,IAAI,WAAW,CAAC;AAGhB,QAAI;AACJ,YAAQ,UAAU;AAAA,MAChB,KAAK;AACH,uBAAe;AACf;AAAA,MACF,KAAK;AACH,uBAAe;AACf;AAAA,MACF,KAAK;AACH,uBAAe;AACf;AAAA,MACF,KAAK;AACH,uBAAe;AACf;AAAA,MACF,KAAK;AACH,uBAAe;AACf;AAAA,MACF;AACE,uBAAe;AAAA,IACnB;AAGA,QAAI,CAAC,SAAS,QAAQ;AACpB,2BAAO,MAAM,eAAe,QAAQ,aAAa;AACjD,aAAO;AAAA,IACT;AAGA,QAAI,aAAa,uBAAmB;AAElC,aAAO;AAAA,QACL,WAAO,sBAAO,SAAS,YAAY;AAAA,QACnC;AAAA,MACF;AAAA,IACF,OAAO;AAEL,UAAI;AACF,gBAAQ,UAAU;AAAA,UAChB,KAAK;AACH,gBAAI;AACF,qBAAO;AAAA,gBACL,WAAO,4BAAU,SAAS,YAAY;AAAA,gBACtC;AAAA,cACF;AAAA,YACF,SAAS,KAAK;AACZ,oBAAM,IAAI;AAAA,gBACR;AAAA,cACF;AAAA,YACF;AAAA,UAEF,KAAK;AACH,gBAAI;AACF,qBAAO;AAAA,gBACL,WAAO,wBAAQ,SAAS,YAAY;AAAA,gBACpC;AAAA,cACF;AAAA,YACF,SAAS,KAAK;AACZ,oBAAM,IAAI;AAAA,gBACR;AAAA,cACF;AAAA,YACF;AAAA,UAEF,KAAK;AACH,gBAAI;AACF,qBAAO;AAAA,gBACL,WAAO,0BAAS,SAAS,YAAY;AAAA,gBACrC;AAAA,cACF;AAAA,YACF,SAAS,KAAK;AACZ,oBAAM,IAAI;AAAA,gBACR;AAAA,cACF;AAAA,YACF;AAAA,UAEF,KAAK;AACH,gBAAI;AACF,qBAAO;AAAA,gBACL,WAAO,sBAAO,SAAS,YAAY;AAAA,gBACnC;AAAA,cACF;AAAA,YACF,SAAS,KAAK;AACZ,oBAAM,IAAI;AAAA,gBACR;AAAA,cACF;AAAA,YACF;AAAA,UAEF;AACE,kBAAM,IAAI,MAAM,YAAY,QAAQ,gBAAgB;AAAA,QACxD;AAAA,MACF,SAAS,OAAO;AACd,6BAAO,MAAM,kCAAkC,QAAQ,KAAK,KAAK;AACjE,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,yBAAO,MAAM,+BAA+B,KAAK;AACjD,WAAO;AAAA,EACT;AACF;","names":["AIProvider"]}
|
|
@@ -147,13 +147,13 @@ const indexMarkdownFiles = async () => {
|
|
|
147
147
|
}
|
|
148
148
|
};
|
|
149
149
|
indexMarkdownFiles();
|
|
150
|
-
const searchChunkReference = async (query) => {
|
|
150
|
+
const searchChunkReference = async (query, maxResults = MAX_RELEVANT_CHUNKS_NB, minSimilarity = MIN_RELEVANT_CHUNKS_SIMILARITY) => {
|
|
151
151
|
const queryEmbedding = await generateEmbedding(query);
|
|
152
152
|
const results = vectorStore.map((chunk) => ({
|
|
153
153
|
...chunk,
|
|
154
154
|
similarity: cosineSimilarity(queryEmbedding, chunk.embedding)
|
|
155
155
|
// Add similarity score to each doc
|
|
156
|
-
})).filter((chunk) => chunk.similarity >
|
|
156
|
+
})).filter((chunk) => chunk.similarity > minSimilarity).sort((a, b) => b.similarity - a.similarity).slice(0, maxResults);
|
|
157
157
|
return results;
|
|
158
158
|
};
|
|
159
159
|
const getFileContent = (relativeFilePath) => {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/utils/AI/askDocQuestion/askDocQuestion.ts"],"sourcesContent":["import { getBlogs } from '@intlayer/blog';\nimport { Locales } from '@intlayer/config';\nimport { getDocs, getFequentQuestions } from '@intlayer/docs';\nimport { streamText } from 'ai';\nimport dotenv from 'dotenv';\nimport fs, { readFileSync } from 'fs';\nimport { OpenAI } from 'openai';\nimport { dirname, join } from 'path';\nimport { fileURLToPath } from 'url';\nimport { AIProvider, getAIConfig } from '../aiSdk';\nimport embeddingsList from './embeddings.json' with { type: 'json' };\n\ntype VectorStoreEl = {\n fileKey: string;\n chunkNumber: number;\n content: string;\n embedding: number[];\n};\n\n/**\n * Simple in-memory vector store to hold document embeddings and their content.\n * Each entry contains:\n * - fileKey: A unique key identifying the file\n * - chunkNumber: The number of the chunk within the document\n * - content: The chunk content\n * - embedding: The numerical embedding vector for the chunk\n */\nconst vectorStore: VectorStoreEl[] = [];\n\n// Constants defining model and settings\nconst MODEL = 'gpt-4o-2024-11-20'; // Model to use for chat completions\nconst MODEL_TEMPERATURE = 0.1; // Temperature to use for chat completions\nconst EMBEDDING_MODEL = 'text-embedding-3-large'; // Model to use for embedding generation\nconst OVERLAP_TOKENS = 200; // Number of tokens to overlap between chunks\nconst MAX_CHUNK_TOKENS = 800; // Maximum number of tokens per chunk\nconst CHAR_BY_TOKEN = 4.15; // Approximate pessimistically the number of characters per token // Can use `tiktoken` or other tokenizers to calculate it more precisely\nconst MAX_CHARS = MAX_CHUNK_TOKENS * CHAR_BY_TOKEN;\nconst OVERLAP_CHARS = OVERLAP_TOKENS * CHAR_BY_TOKEN;\nconst MAX_RELEVANT_CHUNKS_NB = 8; // Maximum number of relevant chunks to attach to chatGPT context\nconst MIN_RELEVANT_CHUNKS_SIMILARITY = 0.25; // Minimum similarity required for a chunk to be considered relevant\n\n/**\n * Splits a given text into chunks ensuring each chunk does not exceed MAX_CHARS.\n * @param text - The input text to split.\n * @returns - Array of text chunks.\n */\nconst chunkText = (text: string): string[] => {\n const chunks: string[] = [];\n let start = 0;\n\n while (start < text.length) {\n let end = Math.min(start + MAX_CHARS, text.length);\n\n // Ensure we don't cut words in the middle (find nearest space)\n if (end < text.length) {\n const lastSpace = text.lastIndexOf(' ', end);\n if (lastSpace > start) {\n end = lastSpace;\n }\n }\n\n chunks.push(text.substring(start, end));\n\n // Move start forward correctly\n const nextStart = end - OVERLAP_CHARS;\n if (nextStart <= start) {\n // Prevent infinite loop if overlap is too large\n start = end;\n } else {\n start = nextStart;\n }\n }\n\n return chunks;\n};\n\n/**\n * Generates an embedding for a given text using OpenAI's embedding API.\n * Trims the text if it exceeds the maximum allowed characters.\n *\n * @param text - The input text to generate an embedding for\n * @returns The embedding vector as a number array\n */\nconst generateEmbedding = async (text: string): Promise<number[]> => {\n try {\n // Set API key through the SDK configuration\n await getAIConfig({\n provider: AIProvider.OPENAI,\n apiKey: process.env.OPENAI_API_KEY,\n });\n\n const openaiClient = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n\n const response = await openaiClient.embeddings.create({\n model: EMBEDDING_MODEL,\n input: text,\n });\n\n return response.data[0].embedding;\n } catch (error) {\n console.error('Error generating embedding:', error);\n return [];\n }\n};\n\n/**\n * Calculates the cosine similarity between two vectors.\n * Cosine similarity measures the cosine of the angle between two vectors in an inner product space.\n * Used to determine the similarity between chunks of text.\n *\n * @param vecA - The first vector\n * @param vecB - The second vector\n * @returns The cosine similarity score\n */\nconst cosineSimilarity = (vecA: number[], vecB: number[]): number => {\n // Calculate the dot product of the two vectors\n const dotProduct = vecA.reduce((sum, a, idx) => sum + a * vecB[idx], 0);\n\n // Calculate the magnitude (Euclidean norm) of each vector\n const magnitudeA = Math.sqrt(vecA.reduce((sum, a) => sum + a * a, 0));\n const magnitudeB = Math.sqrt(vecB.reduce((sum, b) => sum + b * b, 0));\n\n // Compute and return the cosine similarity\n return dotProduct / (magnitudeA * magnitudeB);\n};\n\n/**\n * Indexes all Markdown documents by generating embeddings for each chunk and storing them in memory.\n * Also updates the embeddings.json file if new embeddings are generated.\n */\nexport const indexMarkdownFiles = async (): Promise<void> => {\n const env = process.env.NODE_ENV;\n dotenv.config({\n path: [`.env.${env}.local`, `.env.${env}`, '.env.local', '.env'],\n });\n\n // Retrieve documentation and blog posts in English locale\n const frequentQuestions = getFequentQuestions();\n const docs = getDocs(Locales.ENGLISH);\n const blogs = getBlogs(Locales.ENGLISH);\n\n let result: Record<string, number[]> = {}; // Object to hold updated embeddings\n\n const files = { ...docs, ...blogs, ...frequentQuestions }; // Combine docs and blogs into a single object\n\n // Iterate over each file key (identifier) in the combined files\n for (const fileKey of Object.keys(files)) {\n // Split the document into chunks based on headings\n const fileChunks = chunkText(files[fileKey as keyof typeof files]);\n\n // Iterate over each chunk within the current file\n for (const chunkIndex of Object.keys(fileChunks)) {\n const chunkNumber = Number(chunkIndex) + 1; // Chunk number starts at 1\n const chunksNumber = fileChunks.length;\n\n const fileChunk = fileChunks[\n chunkIndex as keyof typeof fileChunks\n ] as string;\n\n const embeddingKeyName = `${fileKey}/chunk_${chunkNumber}`; // Unique key for the chunk\n\n // Retrieve precomputed embedding if available\n const docEmbedding = embeddingsList[\n embeddingKeyName as keyof typeof embeddingsList\n ] as number[] | undefined;\n\n let embedding = docEmbedding; // Use existing embedding if available\n\n if (!embedding) {\n embedding = await generateEmbedding(fileChunk); // Generate embedding if not present\n }\n\n // Update the result object with the new embedding\n result = { ...result, [embeddingKeyName]: embedding };\n\n // Store the embedding and content in the in-memory vector store\n vectorStore.push({\n fileKey,\n chunkNumber,\n embedding,\n content: fileChunk,\n });\n\n console.info(`- Indexed: ${embeddingKeyName}/${chunksNumber}`);\n }\n }\n\n if (process.env.NODE_ENV === 'development') {\n try {\n // Compare the newly generated embeddings with existing ones\n if (JSON.stringify(result) !== JSON.stringify(embeddingsList)) {\n // If there are new embeddings, save them to embeddings.json\n fs.writeFileSync(\n 'src/utils/AI/askDocQuestion/embeddings.json',\n JSON.stringify(result, null, 2)\n );\n }\n } catch (error) {\n console.error(error); // Log any errors during the file write process\n }\n }\n};\n\n// Automatically index Markdown files\nindexMarkdownFiles();\n\n/**\n * Searches the indexed documents for the most relevant chunks based on a query.\n * Utilizes cosine similarity to find the closest matching embeddings.\n *\n * @param query - The search query provided by the user\n * @returns An array of the top matching document chunks' content\n */\nexport const searchChunkReference = async (\n query: string\n): Promise<VectorStoreEl[]> => {\n // Generate an embedding for the user's query\n const queryEmbedding = await generateEmbedding(query);\n\n // Calculate similarity scores between the query embedding and each document's embedding\n const results = vectorStore\n .map((chunk) => ({\n ...chunk,\n similarity: cosineSimilarity(queryEmbedding, chunk.embedding), // Add similarity score to each doc\n }))\n .filter((chunk) => chunk.similarity > MIN_RELEVANT_CHUNKS_SIMILARITY) // Filter out documents with low similarity scores\n .sort((a, b) => b.similarity - a.similarity) // Sort documents by highest similarity first\n .slice(0, MAX_RELEVANT_CHUNKS_NB); // Select the top 6 most similar documents\n\n // Return the content of the top matching documents\n return results;\n};\n\n// Define the structure of messages used in chat completions\nexport type ChatCompletionRequestMessage = {\n role: 'system' | 'user' | 'assistant'; // The role of the message sender\n content: string; // The text content of the message\n timestamp?: Date; // The timestamp of the message\n};\n\n/**\n * Reads the content of a file synchronously.\n *\n * @function\n * @param relativeFilePath - The relative or absolute path to the target file.\n * @returns The entire contents of the specified file as a UTF-8 encoded string.\n */\nconst getFileContent = (relativeFilePath: string): string => {\n const __dirname = dirname(fileURLToPath(import.meta.url));\n const absolutePath = join(__dirname, relativeFilePath);\n const fileContent = readFileSync(absolutePath, 'utf-8');\n return fileContent;\n};\n\nconst CHAT_GPT_PROMPT = getFileContent('./PROMPT.md');\n\n// Initial prompt configuration for the chatbot\nexport const initPrompt: ChatCompletionRequestMessage = {\n role: 'system',\n content: CHAT_GPT_PROMPT,\n};\n\nexport type AskDocQuestionResult = {\n response: string;\n relatedFiles: string[];\n};\n\nexport type AskDocQuestionOptions = {\n onMessage?: (chunk: string) => void;\n};\n\n/**\n * Handles the \"Ask a question\" endpoint in an Express.js route.\n * Processes user messages, retrieves relevant documents, and interacts with AI models to generate responses.\n *\n * @param messages - An array of chat messages from the user and assistant\n * @returns The assistant's response as a string\n */\nexport const askDocQuestion = async (\n messages: ChatCompletionRequestMessage[],\n options?: AskDocQuestionOptions\n): Promise<AskDocQuestionResult> => {\n // Format the user's question to keep only the relevant keywords\n const query = messages.map((message) => `- ${message.content}`).join('\\n');\n\n // 1) Find relevant documents based on the user's question\n const relevantFilesReferences = await searchChunkReference(query);\n\n // 2) Integrate the relevant documents into the initial system prompt\n const systemPrompt = initPrompt.content.replace(\n '{{relevantFilesReferences}}',\n relevantFilesReferences.length === 0\n ? 'Not relevant file found related to the question.'\n : relevantFilesReferences\n .map(\n (doc, idx) =>\n `[Chunk ${idx}] docKey = \"${doc.fileKey}\":\\n${doc.content}`\n )\n .join('\\n\\n') // Insert relevant docs into the prompt\n );\n\n // Format messages for AI SDK\n const aiMessages = [\n { role: 'system' as const, content: systemPrompt },\n ...messages,\n ];\n\n // Get AI configuration\n const aiConfig = await getAIConfig({\n provider: AIProvider.OPENAI,\n model: MODEL,\n temperature: MODEL_TEMPERATURE,\n apiKey: process.env.OPENAI_API_KEY!,\n });\n\n if (!aiConfig) {\n throw new Error('Failed to initialize AI configuration');\n }\n\n // 3) Use the AI SDK to stream the response\n let fullResponse = '';\n const stream = streamText({\n model: aiConfig.model,\n temperature: aiConfig.temperature,\n messages: aiMessages,\n });\n\n // Process the stream\n for await (const chunk of stream.textStream) {\n fullResponse += chunk;\n options?.onMessage?.(chunk);\n }\n\n // 4) Extract unique related files\n const relatedFiles = [\n ...new Set(relevantFilesReferences.map((doc) => doc.fileKey)),\n ];\n\n // 5) Return the assistant's response to the user\n return {\n response: fullResponse ?? 'Error: No result found',\n relatedFiles,\n };\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAyB;AACzB,oBAAwB;AACxB,kBAA6C;AAC7C,gBAA2B;AAC3B,oBAAmB;AACnB,gBAAiC;AACjC,oBAAuB;AACvB,kBAA8B;AAC9B,iBAA8B;AAC9B,mBAAwC;AACxC,wBAA2B;AAV3B;AA2BA,MAAM,cAA+B,CAAC;AAGtC,MAAM,QAAQ;AACd,MAAM,oBAAoB;AAC1B,MAAM,kBAAkB;AACxB,MAAM,iBAAiB;AACvB,MAAM,mBAAmB;AACzB,MAAM,gBAAgB;AACtB,MAAM,YAAY,mBAAmB;AACrC,MAAM,gBAAgB,iBAAiB;AACvC,MAAM,yBAAyB;AAC/B,MAAM,iCAAiC;AAOvC,MAAM,YAAY,CAAC,SAA2B;AAC5C,QAAM,SAAmB,CAAC;AAC1B,MAAI,QAAQ;AAEZ,SAAO,QAAQ,KAAK,QAAQ;AAC1B,QAAI,MAAM,KAAK,IAAI,QAAQ,WAAW,KAAK,MAAM;AAGjD,QAAI,MAAM,KAAK,QAAQ;AACrB,YAAM,YAAY,KAAK,YAAY,KAAK,GAAG;AAC3C,UAAI,YAAY,OAAO;AACrB,cAAM;AAAA,MACR;AAAA,IACF;AAEA,WAAO,KAAK,KAAK,UAAU,OAAO,GAAG,CAAC;AAGtC,UAAM,YAAY,MAAM;AACxB,QAAI,aAAa,OAAO;AAEtB,cAAQ;AAAA,IACV,OAAO;AACL,cAAQ;AAAA,IACV;AAAA,EACF;AAEA,SAAO;AACT;AASA,MAAM,oBAAoB,OAAO,SAAoC;AACnE,MAAI;AAEF,cAAM,0BAAY;AAAA,MAChB,UAAU,wBAAW;AAAA,MACrB,QAAQ,QAAQ,IAAI;AAAA,IACtB,CAAC;AAED,UAAM,eAAe,IAAI,qBAAO,EAAE,QAAQ,QAAQ,IAAI,eAAe,CAAC;AAEtE,UAAM,WAAW,MAAM,aAAa,WAAW,OAAO;AAAA,MACpD,OAAO;AAAA,MACP,OAAO;AAAA,IACT,CAAC;AAED,WAAO,SAAS,KAAK,CAAC,EAAE;AAAA,EAC1B,SAAS,OAAO;AACd,YAAQ,MAAM,+BAA+B,KAAK;AAClD,WAAO,CAAC;AAAA,EACV;AACF;AAWA,MAAM,mBAAmB,CAAC,MAAgB,SAA2B;AAEnE,QAAM,aAAa,KAAK,OAAO,CAAC,KAAK,GAAG,QAAQ,MAAM,IAAI,KAAK,GAAG,GAAG,CAAC;AAGtE,QAAM,aAAa,KAAK,KAAK,KAAK,OAAO,CAAC,KAAK,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC;AACpE,QAAM,aAAa,KAAK,KAAK,KAAK,OAAO,CAAC,KAAK,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC;AAGpE,SAAO,cAAc,aAAa;AACpC;AAMO,MAAM,qBAAqB,YAA2B;AAC3D,QAAM,MAAM,QAAQ,IAAI;AACxB,gBAAAA,QAAO,OAAO;AAAA,IACZ,MAAM,CAAC,QAAQ,GAAG,UAAU,QAAQ,GAAG,IAAI,cAAc,MAAM;AAAA,EACjE,CAAC;AAGD,QAAM,wBAAoB,iCAAoB;AAC9C,QAAM,WAAO,qBAAQ,sBAAQ,OAAO;AACpC,QAAM,YAAQ,sBAAS,sBAAQ,OAAO;AAEtC,MAAI,SAAmC,CAAC;AAExC,QAAM,QAAQ,EAAE,GAAG,MAAM,GAAG,OAAO,GAAG,kBAAkB;AAGxD,aAAW,WAAW,OAAO,KAAK,KAAK,GAAG;AAExC,UAAM,aAAa,UAAU,MAAM,OAA6B,CAAC;AAGjE,eAAW,cAAc,OAAO,KAAK,UAAU,GAAG;AAChD,YAAM,cAAc,OAAO,UAAU,IAAI;AACzC,YAAM,eAAe,WAAW;AAEhC,YAAM,YAAY,WAChB,UACF;AAEA,YAAM,mBAAmB,GAAG,OAAO,UAAU,WAAW;AAGxD,YAAM,eAAe,kBAAAC,QACnB,gBACF;AAEA,UAAI,YAAY;AAEhB,UAAI,CAAC,WAAW;AACd,oBAAY,MAAM,kBAAkB,SAAS;AAAA,MAC/C;AAGA,eAAS,EAAE,GAAG,QAAQ,CAAC,gBAAgB,GAAG,UAAU;AAGpD,kBAAY,KAAK;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AAED,cAAQ,KAAK,cAAc,gBAAgB,IAAI,YAAY,EAAE;AAAA,IAC/D;AAAA,EACF;AAEA,MAAI,QAAQ,IAAI,aAAa,eAAe;AAC1C,QAAI;AAEF,UAAI,KAAK,UAAU,MAAM,MAAM,KAAK,UAAU,kBAAAA,OAAc,GAAG;AAE7D,kBAAAC,QAAG;AAAA,UACD;AAAA,UACA,KAAK,UAAU,QAAQ,MAAM,CAAC;AAAA,QAChC;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,cAAQ,MAAM,KAAK;AAAA,IACrB;AAAA,EACF;AACF;AAGA,mBAAmB;AASZ,MAAM,uBAAuB,OAClC,UAC6B;AAE7B,QAAM,iBAAiB,MAAM,kBAAkB,KAAK;AAGpD,QAAM,UAAU,YACb,IAAI,CAAC,WAAW;AAAA,IACf,GAAG;AAAA,IACH,YAAY,iBAAiB,gBAAgB,MAAM,SAAS;AAAA;AAAA,EAC9D,EAAE,EACD,OAAO,CAAC,UAAU,MAAM,aAAa,8BAA8B,EACnE,KAAK,CAAC,GAAG,MAAM,EAAE,aAAa,EAAE,UAAU,EAC1C,MAAM,GAAG,sBAAsB;AAGlC,SAAO;AACT;AAgBA,MAAM,iBAAiB,CAAC,qBAAqC;AAC3D,QAAM,gBAAY,yBAAQ,0BAAc,YAAY,GAAG,CAAC;AACxD,QAAM,mBAAe,kBAAK,WAAW,gBAAgB;AACrD,QAAM,kBAAc,wBAAa,cAAc,OAAO;AACtD,SAAO;AACT;AAEA,MAAM,kBAAkB,eAAe,aAAa;AAG7C,MAAM,aAA2C;AAAA,EACtD,MAAM;AAAA,EACN,SAAS;AACX;AAkBO,MAAM,iBAAiB,OAC5B,UACA,YACkC;AAElC,QAAM,QAAQ,SAAS,IAAI,CAAC,YAAY,KAAK,QAAQ,OAAO,EAAE,EAAE,KAAK,IAAI;AAGzE,QAAM,0BAA0B,MAAM,qBAAqB,KAAK;AAGhE,QAAM,eAAe,WAAW,QAAQ;AAAA,IACtC;AAAA,IACA,wBAAwB,WAAW,IAC/B,qDACA,wBACG;AAAA,MACC,CAAC,KAAK,QACJ,UAAU,GAAG,eAAe,IAAI,OAAO;AAAA,EAAO,IAAI,OAAO;AAAA,IAC7D,EACC,KAAK,MAAM;AAAA;AAAA,EACpB;AAGA,QAAM,aAAa;AAAA,IACjB,EAAE,MAAM,UAAmB,SAAS,aAAa;AAAA,IACjD,GAAG;AAAA,EACL;AAGA,QAAM,WAAW,UAAM,0BAAY;AAAA,IACjC,UAAU,wBAAW;AAAA,IACrB,OAAO;AAAA,IACP,aAAa;AAAA,IACb,QAAQ,QAAQ,IAAI;AAAA,EACtB,CAAC;AAED,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,uCAAuC;AAAA,EACzD;AAGA,MAAI,eAAe;AACnB,QAAM,aAAS,sBAAW;AAAA,IACxB,OAAO,SAAS;AAAA,IAChB,aAAa,SAAS;AAAA,IACtB,UAAU;AAAA,EACZ,CAAC;AAGD,mBAAiB,SAAS,OAAO,YAAY;AAC3C,oBAAgB;AAChB,aAAS,YAAY,KAAK;AAAA,EAC5B;AAGA,QAAM,eAAe;AAAA,IACnB,GAAG,IAAI,IAAI,wBAAwB,IAAI,CAAC,QAAQ,IAAI,OAAO,CAAC;AAAA,EAC9D;AAGA,SAAO;AAAA,IACL,UAAU,gBAAgB;AAAA,IAC1B;AAAA,EACF;AACF;","names":["dotenv","embeddingsList","fs"]}
|
|
1
|
+
{"version":3,"sources":["../../../../../src/utils/AI/askDocQuestion/askDocQuestion.ts"],"sourcesContent":["import { getBlogs } from '@intlayer/blog';\nimport { Locales } from '@intlayer/config';\nimport { getDocs, getFequentQuestions } from '@intlayer/docs';\nimport { streamText } from 'ai';\nimport dotenv from 'dotenv';\nimport fs, { readFileSync } from 'fs';\nimport { OpenAI } from 'openai';\nimport { dirname, join } from 'path';\nimport { fileURLToPath } from 'url';\nimport {\n AIProvider,\n ChatCompletionRequestMessage,\n getAIConfig,\n} from '../aiSdk';\nimport embeddingsList from './embeddings.json' with { type: 'json' };\n\ntype VectorStoreEl = {\n fileKey: string;\n chunkNumber: number;\n content: string;\n embedding: number[];\n};\n\n/**\n * Simple in-memory vector store to hold document embeddings and their content.\n * Each entry contains:\n * - fileKey: A unique key identifying the file\n * - chunkNumber: The number of the chunk within the document\n * - content: The chunk content\n * - embedding: The numerical embedding vector for the chunk\n */\nconst vectorStore: VectorStoreEl[] = [];\n\n// Constants defining model and settings\nconst MODEL = 'gpt-4o-2024-11-20'; // Model to use for chat completions\nconst MODEL_TEMPERATURE = 0.1; // Temperature to use for chat completions\nconst EMBEDDING_MODEL = 'text-embedding-3-large'; // Model to use for embedding generation\nconst OVERLAP_TOKENS = 200; // Number of tokens to overlap between chunks\nconst MAX_CHUNK_TOKENS = 800; // Maximum number of tokens per chunk\nconst CHAR_BY_TOKEN = 4.15; // Approximate pessimistically the number of characters per token // Can use `tiktoken` or other tokenizers to calculate it more precisely\nconst MAX_CHARS = MAX_CHUNK_TOKENS * CHAR_BY_TOKEN;\nconst OVERLAP_CHARS = OVERLAP_TOKENS * CHAR_BY_TOKEN;\nconst MAX_RELEVANT_CHUNKS_NB = 8; // Maximum number of relevant chunks to attach to chatGPT context\nconst MIN_RELEVANT_CHUNKS_SIMILARITY = 0.25; // Minimum similarity required for a chunk to be considered relevant\n\n/**\n * Splits a given text into chunks ensuring each chunk does not exceed MAX_CHARS.\n * @param text - The input text to split.\n * @returns - Array of text chunks.\n */\nconst chunkText = (text: string): string[] => {\n const chunks: string[] = [];\n let start = 0;\n\n while (start < text.length) {\n let end = Math.min(start + MAX_CHARS, text.length);\n\n // Ensure we don't cut words in the middle (find nearest space)\n if (end < text.length) {\n const lastSpace = text.lastIndexOf(' ', end);\n if (lastSpace > start) {\n end = lastSpace;\n }\n }\n\n chunks.push(text.substring(start, end));\n\n // Move start forward correctly\n const nextStart = end - OVERLAP_CHARS;\n if (nextStart <= start) {\n // Prevent infinite loop if overlap is too large\n start = end;\n } else {\n start = nextStart;\n }\n }\n\n return chunks;\n};\n\n/**\n * Generates an embedding for a given text using OpenAI's embedding API.\n * Trims the text if it exceeds the maximum allowed characters.\n *\n * @param text - The input text to generate an embedding for\n * @returns The embedding vector as a number array\n */\nconst generateEmbedding = async (text: string): Promise<number[]> => {\n try {\n // Set API key through the SDK configuration\n await getAIConfig({\n provider: AIProvider.OPENAI,\n apiKey: process.env.OPENAI_API_KEY,\n });\n\n const openaiClient = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });\n\n const response = await openaiClient.embeddings.create({\n model: EMBEDDING_MODEL,\n input: text,\n });\n\n return response.data[0].embedding;\n } catch (error) {\n console.error('Error generating embedding:', error);\n return [];\n }\n};\n\n/**\n * Calculates the cosine similarity between two vectors.\n * Cosine similarity measures the cosine of the angle between two vectors in an inner product space.\n * Used to determine the similarity between chunks of text.\n *\n * @param vecA - The first vector\n * @param vecB - The second vector\n * @returns The cosine similarity score\n */\nconst cosineSimilarity = (vecA: number[], vecB: number[]): number => {\n // Calculate the dot product of the two vectors\n const dotProduct = vecA.reduce((sum, a, idx) => sum + a * vecB[idx], 0);\n\n // Calculate the magnitude (Euclidean norm) of each vector\n const magnitudeA = Math.sqrt(vecA.reduce((sum, a) => sum + a * a, 0));\n const magnitudeB = Math.sqrt(vecB.reduce((sum, b) => sum + b * b, 0));\n\n // Compute and return the cosine similarity\n return dotProduct / (magnitudeA * magnitudeB);\n};\n\n/**\n * Indexes all Markdown documents by generating embeddings for each chunk and storing them in memory.\n * Also updates the embeddings.json file if new embeddings are generated.\n */\nexport const indexMarkdownFiles = async (): Promise<void> => {\n const env = process.env.NODE_ENV;\n dotenv.config({\n path: [`.env.${env}.local`, `.env.${env}`, '.env.local', '.env'],\n });\n\n // Retrieve documentation and blog posts in English locale\n const frequentQuestions = getFequentQuestions();\n const docs = getDocs(Locales.ENGLISH);\n const blogs = getBlogs(Locales.ENGLISH);\n\n let result: Record<string, number[]> = {}; // Object to hold updated embeddings\n\n const files = { ...docs, ...blogs, ...frequentQuestions }; // Combine docs and blogs into a single object\n\n // Iterate over each file key (identifier) in the combined files\n for (const fileKey of Object.keys(files)) {\n // Split the document into chunks based on headings\n const fileChunks = chunkText(files[fileKey as keyof typeof files]);\n\n // Iterate over each chunk within the current file\n for (const chunkIndex of Object.keys(fileChunks)) {\n const chunkNumber = Number(chunkIndex) + 1; // Chunk number starts at 1\n const chunksNumber = fileChunks.length;\n\n const fileChunk = fileChunks[\n chunkIndex as keyof typeof fileChunks\n ] as string;\n\n const embeddingKeyName = `${fileKey}/chunk_${chunkNumber}`; // Unique key for the chunk\n\n // Retrieve precomputed embedding if available\n const docEmbedding = embeddingsList[\n embeddingKeyName as keyof typeof embeddingsList\n ] as number[] | undefined;\n\n let embedding = docEmbedding; // Use existing embedding if available\n\n if (!embedding) {\n embedding = await generateEmbedding(fileChunk); // Generate embedding if not present\n }\n\n // Update the result object with the new embedding\n result = { ...result, [embeddingKeyName]: embedding };\n\n // Store the embedding and content in the in-memory vector store\n vectorStore.push({\n fileKey,\n chunkNumber,\n embedding,\n content: fileChunk,\n });\n\n console.info(`- Indexed: ${embeddingKeyName}/${chunksNumber}`);\n }\n }\n\n if (process.env.NODE_ENV === 'development') {\n try {\n // Compare the newly generated embeddings with existing ones\n if (JSON.stringify(result) !== JSON.stringify(embeddingsList)) {\n // If there are new embeddings, save them to embeddings.json\n fs.writeFileSync(\n 'src/utils/AI/askDocQuestion/embeddings.json',\n JSON.stringify(result, null, 2)\n );\n }\n } catch (error) {\n console.error(error); // Log any errors during the file write process\n }\n }\n};\n\n// Automatically index Markdown files\nindexMarkdownFiles();\n\n/**\n * Searches the indexed documents for the most relevant chunks based on a query.\n * Utilizes cosine similarity to find the closest matching embeddings.\n *\n * @param query - The search query provided by the user\n * @returns An array of the top matching document chunks' content\n */\nexport const searchChunkReference = async (\n query: string,\n maxResults: number = MAX_RELEVANT_CHUNKS_NB,\n minSimilarity: number = MIN_RELEVANT_CHUNKS_SIMILARITY\n): Promise<VectorStoreEl[]> => {\n // Generate an embedding for the user's query\n const queryEmbedding = await generateEmbedding(query);\n\n // Calculate similarity scores between the query embedding and each document's embedding\n const results = vectorStore\n .map((chunk) => ({\n ...chunk,\n similarity: cosineSimilarity(queryEmbedding, chunk.embedding), // Add similarity score to each doc\n }))\n .filter((chunk) => chunk.similarity > minSimilarity) // Filter out documents with low similarity scores\n .sort((a, b) => b.similarity - a.similarity) // Sort documents by highest similarity first\n .slice(0, maxResults); // Select the top 6 most similar documents\n\n // Return the content of the top matching documents\n return results;\n};\n\n/**\n * Reads the content of a file synchronously.\n *\n * @function\n * @param relativeFilePath - The relative or absolute path to the target file.\n * @returns The entire contents of the specified file as a UTF-8 encoded string.\n */\nconst getFileContent = (relativeFilePath: string): string => {\n const __dirname = dirname(fileURLToPath(import.meta.url));\n const absolutePath = join(__dirname, relativeFilePath);\n const fileContent = readFileSync(absolutePath, 'utf-8');\n return fileContent;\n};\n\nconst CHAT_GPT_PROMPT = getFileContent('./PROMPT.md');\n\n// Initial prompt configuration for the chatbot\nexport const initPrompt: ChatCompletionRequestMessage = {\n role: 'system',\n content: CHAT_GPT_PROMPT,\n};\n\nexport type AskDocQuestionResult = {\n response: string;\n relatedFiles: string[];\n};\n\nexport type AskDocQuestionOptions = {\n onMessage?: (chunk: string) => void;\n};\n\n/**\n * Handles the \"Ask a question\" endpoint in an Express.js route.\n * Processes user messages, retrieves relevant documents, and interacts with AI models to generate responses.\n *\n * @param messages - An array of chat messages from the user and assistant\n * @returns The assistant's response as a string\n */\nexport const askDocQuestion = async (\n messages: ChatCompletionRequestMessage[],\n options?: AskDocQuestionOptions\n): Promise<AskDocQuestionResult> => {\n // Format the user's question to keep only the relevant keywords\n const query = messages.map((message) => `- ${message.content}`).join('\\n');\n\n // 1) Find relevant documents based on the user's question\n const relevantFilesReferences = await searchChunkReference(query);\n\n // 2) Integrate the relevant documents into the initial system prompt\n const systemPrompt = initPrompt.content.replace(\n '{{relevantFilesReferences}}',\n relevantFilesReferences.length === 0\n ? 'Not relevant file found related to the question.'\n : relevantFilesReferences\n .map(\n (doc, idx) =>\n `[Chunk ${idx}] docKey = \"${doc.fileKey}\":\\n${doc.content}`\n )\n .join('\\n\\n') // Insert relevant docs into the prompt\n );\n\n // Format messages for AI SDK\n const aiMessages = [\n { role: 'system' as const, content: systemPrompt },\n ...messages,\n ];\n\n // Get AI configuration\n const aiConfig = await getAIConfig({\n provider: AIProvider.OPENAI,\n model: MODEL,\n temperature: MODEL_TEMPERATURE,\n apiKey: process.env.OPENAI_API_KEY!,\n });\n\n if (!aiConfig) {\n throw new Error('Failed to initialize AI configuration');\n }\n\n // 3) Use the AI SDK to stream the response\n let fullResponse = '';\n const stream = streamText({\n model: aiConfig.model,\n temperature: aiConfig.temperature,\n messages: aiMessages,\n });\n\n // Process the stream\n for await (const chunk of stream.textStream) {\n fullResponse += chunk;\n options?.onMessage?.(chunk);\n }\n\n // 4) Extract unique related files\n const relatedFiles = [\n ...new Set(relevantFilesReferences.map((doc) => doc.fileKey)),\n ];\n\n // 5) Return the assistant's response to the user\n return {\n response: fullResponse ?? 'Error: No result found',\n relatedFiles,\n };\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAyB;AACzB,oBAAwB;AACxB,kBAA6C;AAC7C,gBAA2B;AAC3B,oBAAmB;AACnB,gBAAiC;AACjC,oBAAuB;AACvB,kBAA8B;AAC9B,iBAA8B;AAC9B,mBAIO;AACP,wBAA2B;AAd3B;AA+BA,MAAM,cAA+B,CAAC;AAGtC,MAAM,QAAQ;AACd,MAAM,oBAAoB;AAC1B,MAAM,kBAAkB;AACxB,MAAM,iBAAiB;AACvB,MAAM,mBAAmB;AACzB,MAAM,gBAAgB;AACtB,MAAM,YAAY,mBAAmB;AACrC,MAAM,gBAAgB,iBAAiB;AACvC,MAAM,yBAAyB;AAC/B,MAAM,iCAAiC;AAOvC,MAAM,YAAY,CAAC,SAA2B;AAC5C,QAAM,SAAmB,CAAC;AAC1B,MAAI,QAAQ;AAEZ,SAAO,QAAQ,KAAK,QAAQ;AAC1B,QAAI,MAAM,KAAK,IAAI,QAAQ,WAAW,KAAK,MAAM;AAGjD,QAAI,MAAM,KAAK,QAAQ;AACrB,YAAM,YAAY,KAAK,YAAY,KAAK,GAAG;AAC3C,UAAI,YAAY,OAAO;AACrB,cAAM;AAAA,MACR;AAAA,IACF;AAEA,WAAO,KAAK,KAAK,UAAU,OAAO,GAAG,CAAC;AAGtC,UAAM,YAAY,MAAM;AACxB,QAAI,aAAa,OAAO;AAEtB,cAAQ;AAAA,IACV,OAAO;AACL,cAAQ;AAAA,IACV;AAAA,EACF;AAEA,SAAO;AACT;AASA,MAAM,oBAAoB,OAAO,SAAoC;AACnE,MAAI;AAEF,cAAM,0BAAY;AAAA,MAChB,UAAU,wBAAW;AAAA,MACrB,QAAQ,QAAQ,IAAI;AAAA,IACtB,CAAC;AAED,UAAM,eAAe,IAAI,qBAAO,EAAE,QAAQ,QAAQ,IAAI,eAAe,CAAC;AAEtE,UAAM,WAAW,MAAM,aAAa,WAAW,OAAO;AAAA,MACpD,OAAO;AAAA,MACP,OAAO;AAAA,IACT,CAAC;AAED,WAAO,SAAS,KAAK,CAAC,EAAE;AAAA,EAC1B,SAAS,OAAO;AACd,YAAQ,MAAM,+BAA+B,KAAK;AAClD,WAAO,CAAC;AAAA,EACV;AACF;AAWA,MAAM,mBAAmB,CAAC,MAAgB,SAA2B;AAEnE,QAAM,aAAa,KAAK,OAAO,CAAC,KAAK,GAAG,QAAQ,MAAM,IAAI,KAAK,GAAG,GAAG,CAAC;AAGtE,QAAM,aAAa,KAAK,KAAK,KAAK,OAAO,CAAC,KAAK,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC;AACpE,QAAM,aAAa,KAAK,KAAK,KAAK,OAAO,CAAC,KAAK,MAAM,MAAM,IAAI,GAAG,CAAC,CAAC;AAGpE,SAAO,cAAc,aAAa;AACpC;AAMO,MAAM,qBAAqB,YAA2B;AAC3D,QAAM,MAAM,QAAQ,IAAI;AACxB,gBAAAA,QAAO,OAAO;AAAA,IACZ,MAAM,CAAC,QAAQ,GAAG,UAAU,QAAQ,GAAG,IAAI,cAAc,MAAM;AAAA,EACjE,CAAC;AAGD,QAAM,wBAAoB,iCAAoB;AAC9C,QAAM,WAAO,qBAAQ,sBAAQ,OAAO;AACpC,QAAM,YAAQ,sBAAS,sBAAQ,OAAO;AAEtC,MAAI,SAAmC,CAAC;AAExC,QAAM,QAAQ,EAAE,GAAG,MAAM,GAAG,OAAO,GAAG,kBAAkB;AAGxD,aAAW,WAAW,OAAO,KAAK,KAAK,GAAG;AAExC,UAAM,aAAa,UAAU,MAAM,OAA6B,CAAC;AAGjE,eAAW,cAAc,OAAO,KAAK,UAAU,GAAG;AAChD,YAAM,cAAc,OAAO,UAAU,IAAI;AACzC,YAAM,eAAe,WAAW;AAEhC,YAAM,YAAY,WAChB,UACF;AAEA,YAAM,mBAAmB,GAAG,OAAO,UAAU,WAAW;AAGxD,YAAM,eAAe,kBAAAC,QACnB,gBACF;AAEA,UAAI,YAAY;AAEhB,UAAI,CAAC,WAAW;AACd,oBAAY,MAAM,kBAAkB,SAAS;AAAA,MAC/C;AAGA,eAAS,EAAE,GAAG,QAAQ,CAAC,gBAAgB,GAAG,UAAU;AAGpD,kBAAY,KAAK;AAAA,QACf;AAAA,QACA;AAAA,QACA;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AAED,cAAQ,KAAK,cAAc,gBAAgB,IAAI,YAAY,EAAE;AAAA,IAC/D;AAAA,EACF;AAEA,MAAI,QAAQ,IAAI,aAAa,eAAe;AAC1C,QAAI;AAEF,UAAI,KAAK,UAAU,MAAM,MAAM,KAAK,UAAU,kBAAAA,OAAc,GAAG;AAE7D,kBAAAC,QAAG;AAAA,UACD;AAAA,UACA,KAAK,UAAU,QAAQ,MAAM,CAAC;AAAA,QAChC;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,cAAQ,MAAM,KAAK;AAAA,IACrB;AAAA,EACF;AACF;AAGA,mBAAmB;AASZ,MAAM,uBAAuB,OAClC,OACA,aAAqB,wBACrB,gBAAwB,mCACK;AAE7B,QAAM,iBAAiB,MAAM,kBAAkB,KAAK;AAGpD,QAAM,UAAU,YACb,IAAI,CAAC,WAAW;AAAA,IACf,GAAG;AAAA,IACH,YAAY,iBAAiB,gBAAgB,MAAM,SAAS;AAAA;AAAA,EAC9D,EAAE,EACD,OAAO,CAAC,UAAU,MAAM,aAAa,aAAa,EAClD,KAAK,CAAC,GAAG,MAAM,EAAE,aAAa,EAAE,UAAU,EAC1C,MAAM,GAAG,UAAU;AAGtB,SAAO;AACT;AASA,MAAM,iBAAiB,CAAC,qBAAqC;AAC3D,QAAM,gBAAY,yBAAQ,0BAAc,YAAY,GAAG,CAAC;AACxD,QAAM,mBAAe,kBAAK,WAAW,gBAAgB;AACrD,QAAM,kBAAc,wBAAa,cAAc,OAAO;AACtD,SAAO;AACT;AAEA,MAAM,kBAAkB,eAAe,aAAa;AAG7C,MAAM,aAA2C;AAAA,EACtD,MAAM;AAAA,EACN,SAAS;AACX;AAkBO,MAAM,iBAAiB,OAC5B,UACA,YACkC;AAElC,QAAM,QAAQ,SAAS,IAAI,CAAC,YAAY,KAAK,QAAQ,OAAO,EAAE,EAAE,KAAK,IAAI;AAGzE,QAAM,0BAA0B,MAAM,qBAAqB,KAAK;AAGhE,QAAM,eAAe,WAAW,QAAQ;AAAA,IACtC;AAAA,IACA,wBAAwB,WAAW,IAC/B,qDACA,wBACG;AAAA,MACC,CAAC,KAAK,QACJ,UAAU,GAAG,eAAe,IAAI,OAAO;AAAA,EAAO,IAAI,OAAO;AAAA,IAC7D,EACC,KAAK,MAAM;AAAA;AAAA,EACpB;AAGA,QAAM,aAAa;AAAA,IACjB,EAAE,MAAM,UAAmB,SAAS,aAAa;AAAA,IACjD,GAAG;AAAA,EACL;AAGA,QAAM,WAAW,UAAM,0BAAY;AAAA,IACjC,UAAU,wBAAW;AAAA,IACrB,OAAO;AAAA,IACP,aAAa;AAAA,IACb,QAAQ,QAAQ,IAAI;AAAA,EACtB,CAAC;AAED,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,uCAAuC;AAAA,EACzD;AAGA,MAAI,eAAe;AACnB,QAAM,aAAS,sBAAW;AAAA,IACxB,OAAO,SAAS;AAAA,IAChB,aAAa,SAAS;AAAA,IACtB,UAAU;AAAA,EACZ,CAAC;AAGD,mBAAiB,SAAS,OAAO,YAAY;AAC3C,oBAAgB;AAChB,aAAS,YAAY,KAAK;AAAA,EAC5B;AAGA,QAAM,eAAe;AAAA,IACnB,GAAG,IAAI,IAAI,wBAAwB,IAAI,CAAC,QAAQ,IAAI,OAAO,CAAC;AAAA,EAC9D;AAGA,SAAO;AAAA,IACL,UAAU,gBAAgB;AAAA,IAC1B;AAAA,EACF;AACF;","names":["dotenv","embeddingsList","fs"]}
|
|
@@ -12,9 +12,9 @@ You are an expert in internationalization, copy writing and content management.
|
|
|
12
12
|
3. **Modification Guidelines:**
|
|
13
13
|
|
|
14
14
|
- **Do Not Alter Structure:** If the file structure is correct, do not modify it. Only add, update, or remove content declarations as necessary.
|
|
15
|
+
- **Missing Content:** If one key is missing from the Preset Output Content, or if the Preset Output Content is empty, the output content should be completed by translating the Entry Content to Translate into the output locale.
|
|
15
16
|
- **Return Only Final File Content:** Provide the updated file content without any additional comments or explanations.
|
|
16
17
|
- **Manage Localizations:** If the output languages targeted is a variant contains similar languages, as `en` and `en-GB`, consider `en` as English US, and adapt it into `en-GB` as English UK.
|
|
17
|
-
- **Conflict between language and content:** If you detect a conflict between the language and content, translate it if the Mode Instruction is in 'review' mode
|
|
18
18
|
- **Escape Special Characters:** If the translations contain special characters, escape them using the appropriate escape sequence.
|
|
19
19
|
- **Respect the tags and description instructions:** If the tags and description instructions are provided, ensure that the audited file adheres to them.
|
|
20
20
|
- **Consider the Preset Output Content** If Preset Output Content is provided, and coherent with the entry, you can consider reuse it to fill the output file content.
|
|
@@ -49,9 +49,9 @@ ${tags.map(({ key, description }) => `- ${key}: ${description}`).join("\n\n")}`;
|
|
|
49
49
|
};
|
|
50
50
|
const getModeInstructions = (mode) => {
|
|
51
51
|
if (mode === "complete") {
|
|
52
|
-
return 'Mode: "Complete" - Enrich the preset content with the missing keys and values. Do not update existing keys. Everything should be returned in the output.';
|
|
52
|
+
return 'Mode: "Complete" - Enrich the preset content with the missing keys and values in the output locale. Do not update existing keys. Everything should be returned in the output.';
|
|
53
53
|
}
|
|
54
|
-
return 'Mode: "Review" - Fill missing content and review existing keys from the preset content. If you detect misspelled content, or content should be reformulated, correct it.';
|
|
54
|
+
return 'Mode: "Review" - Fill missing content and review existing keys from the preset content. If a key from the entry is missing in the output, it must be translated to the target language and added. If you detect misspelled content, or content that should be reformulated, correct it. If a translation is not coherent with the desired language, translate it.';
|
|
55
55
|
};
|
|
56
56
|
const translateJSON = async ({
|
|
57
57
|
entryFileContent,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/utils/AI/translateJSON/index.ts"],"sourcesContent":["import type { Tag } from '@/types/tag.types';\nimport { retryManager } from '@intlayer/config';\nimport { getLocaleName } from '@intlayer/core';\nimport { logger } from '@logger';\nimport { extractJson } from '@utils/extractJSON';\nimport { generateText } from 'ai';\nimport { readFileSync } from 'fs';\nimport type { Locales } from 'intlayer';\nimport { dirname, join } from 'path';\nimport { fileURLToPath } from 'url';\nimport { AIOptions, AIProvider, getAIConfig } from '../aiSdk';\n\nconst __dirname = dirname(fileURLToPath(import.meta.url));\n\n// Get the content of a file at the specified path\nconst getFileContent = (filePath: string) => {\n return readFileSync(join(__dirname, filePath), { encoding: 'utf-8' });\n};\n\nexport type TranslateJSONOptions = {\n entryFileContent: JSON;\n presetOutputContent: JSON;\n dictionaryDescription: string;\n entryLocale: Locales;\n outputLocale: Locales;\n tags: Tag[];\n aiOptions?: AIOptions;\n mode: 'complete' | 'review';\n};\n\nexport type TranslateJSONResultData = {\n fileContent: string;\n tokenUsed: number;\n};\n\n// The prompt template to send to the AI model\nconst CHAT_GPT_PROMPT = getFileContent('./PROMPT.md');\n\n/**\n * Format a locale with its name.\n *\n * @param locale - The locale to format.\n * @returns A string in the format \"locale: name\", e.g. \"en: English\".\n */\nconst formatLocaleWithName = (locale: Locales): string => {\n return `${locale}: ${getLocaleName(locale)}`;\n};\n\n/**\n * Formats tag instructions for the AI prompt.\n * Creates a string with all available tags and their descriptions.\n *\n * @param tags - The list of tags to format.\n * @returns A formatted string with tag instructions.\n */\nconst formatTagInstructions = (tags: Tag[]): string => {\n if (!tags || tags.length === 0) {\n return '';\n }\n\n // Prepare the tag instructions.\n return `Based on the dictionary content, identify specific tags from the list below that would be relevant:\n \n${tags.map(({ key, description }) => `- ${key}: ${description}`).join('\\n\\n')}`;\n};\n\nconst getModeInstructions = (mode: 'complete' | 'review'): string => {\n if (mode === 'complete') {\n return 'Mode: \"Complete\" - Enrich the preset content with the missing keys and values. Do not update existing keys. Everything should be returned in the output.';\n }\n\n return 'Mode: \"Review\" - Fill missing content and review existing keys from the preset content. If you detect misspelled content, or content should be reformulated, correct it.';\n};\n\n/**\n * TranslateJSONs a content declaration file by constructing a prompt for AI models.\n * The prompt includes details about the project's locales, file paths of content declarations,\n * and requests for identifying issues or inconsistencies.\n */\nexport const translateJSON = async ({\n entryFileContent,\n presetOutputContent,\n dictionaryDescription,\n aiOptions,\n entryLocale,\n outputLocale,\n tags,\n mode,\n}: TranslateJSONOptions): Promise<TranslateJSONResultData | undefined> => {\n try {\n // Get the appropriate AI model configuration\n const aiConfig = await getAIConfig({\n provider: AIProvider.OPENAI,\n model: 'gpt-4o-mini-2024-07-18',\n ...aiOptions,\n });\n\n // Prepare the prompt for AI by replacing placeholders with actual values.\n const prompt = CHAT_GPT_PROMPT.replace(\n '{{entryLocale}}',\n formatLocaleWithName(entryLocale)\n )\n .replace('{{outputLocale}}', formatLocaleWithName(outputLocale))\n .replace('{{entryFileContent}}', JSON.stringify(entryFileContent))\n .replace('{{presetOutputContent}}', JSON.stringify(presetOutputContent))\n .replace('{{dictionaryDescription}}', dictionaryDescription)\n .replace('{{applicationContext}}', aiOptions?.applicationContext ?? '')\n .replace('{{tagsInstructions}}', formatTagInstructions(tags))\n .replace('{{modeInstructions}}', getModeInstructions(mode));\n\n if (!aiConfig) {\n logger.error('Failed to configure AI model');\n return undefined;\n }\n\n const result = await retryManager(async () => {\n // Use the AI SDK to generate the completion\n const { text: newContent, usage } = await generateText({\n model: aiConfig.model,\n temperature: aiConfig.temperature,\n messages: [{ role: 'system', content: prompt }],\n });\n\n logger.info(`${usage?.totalTokens ?? 0} tokens used in the request`);\n\n return {\n fileContent: extractJson(newContent),\n tokenUsed: usage?.totalTokens ?? 0,\n };\n })();\n\n return {\n fileContent: result.fileContent,\n tokenUsed: result.tokenUsed,\n };\n } catch (error) {\n logger.error('Error translating JSON:' + error, {\n level: 'error',\n });\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAA6B;AAC7B,kBAA8B;AAC9B,oBAAuB;AACvB,yBAA4B;AAC5B,gBAA6B;AAC7B,gBAA6B;AAE7B,kBAA8B;AAC9B,iBAA8B;AAC9B,mBAAmD;AAVnD;AAYA,MAAM,gBAAY,yBAAQ,0BAAc,YAAY,GAAG,CAAC;AAGxD,MAAM,iBAAiB,CAAC,aAAqB;AAC3C,aAAO,4BAAa,kBAAK,WAAW,QAAQ,GAAG,EAAE,UAAU,QAAQ,CAAC;AACtE;AAmBA,MAAM,kBAAkB,eAAe,aAAa;AAQpD,MAAM,uBAAuB,CAAC,WAA4B;AACxD,SAAO,GAAG,MAAM,SAAK,2BAAc,MAAM,CAAC;AAC5C;AASA,MAAM,wBAAwB,CAAC,SAAwB;AACrD,MAAI,CAAC,QAAQ,KAAK,WAAW,GAAG;AAC9B,WAAO;AAAA,EACT;AAGA,SAAO;AAAA;AAAA,EAEP,KAAK,IAAI,CAAC,EAAE,KAAK,YAAY,MAAM,KAAK,GAAG,KAAK,WAAW,EAAE,EAAE,KAAK,MAAM,CAAC;AAC7E;AAEA,MAAM,sBAAsB,CAAC,SAAwC;AACnE,MAAI,SAAS,YAAY;AACvB,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAOO,MAAM,gBAAgB,OAAO;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAA0E;AACxE,MAAI;AAEF,UAAM,WAAW,UAAM,0BAAY;AAAA,MACjC,UAAU,wBAAW;AAAA,MACrB,OAAO;AAAA,MACP,GAAG;AAAA,IACL,CAAC;AAGD,UAAM,SAAS,gBAAgB;AAAA,MAC7B;AAAA,MACA,qBAAqB,WAAW;AAAA,IAClC,EACG,QAAQ,oBAAoB,qBAAqB,YAAY,CAAC,EAC9D,QAAQ,wBAAwB,KAAK,UAAU,gBAAgB,CAAC,EAChE,QAAQ,2BAA2B,KAAK,UAAU,mBAAmB,CAAC,EACtE,QAAQ,6BAA6B,qBAAqB,EAC1D,QAAQ,0BAA0B,WAAW,sBAAsB,EAAE,EACrE,QAAQ,wBAAwB,sBAAsB,IAAI,CAAC,EAC3D,QAAQ,wBAAwB,oBAAoB,IAAI,CAAC;AAE5D,QAAI,CAAC,UAAU;AACb,2BAAO,MAAM,8BAA8B;AAC3C,aAAO;AAAA,IACT;AAEA,UAAM,SAAS,UAAM,4BAAa,YAAY;AAE5C,YAAM,EAAE,MAAM,YAAY,MAAM,IAAI,UAAM,wBAAa;AAAA,QACrD,OAAO,SAAS;AAAA,QAChB,aAAa,SAAS;AAAA,QACtB,UAAU,CAAC,EAAE,MAAM,UAAU,SAAS,OAAO,CAAC;AAAA,MAChD,CAAC;AAED,2BAAO,KAAK,GAAG,OAAO,eAAe,CAAC,6BAA6B;AAEnE,aAAO;AAAA,QACL,iBAAa,gCAAY,UAAU;AAAA,QACnC,WAAW,OAAO,eAAe;AAAA,MACnC;AAAA,IACF,CAAC,EAAE;AAEH,WAAO;AAAA,MACL,aAAa,OAAO;AAAA,MACpB,WAAW,OAAO;AAAA,IACpB;AAAA,EACF,SAAS,OAAO;AACd,yBAAO,MAAM,4BAA4B,OAAO;AAAA,MAC9C,OAAO;AAAA,IACT,CAAC;AAAA,EACH;AACF;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../../../../../src/utils/AI/translateJSON/index.ts"],"sourcesContent":["import type { Tag } from '@/types/tag.types';\nimport { retryManager } from '@intlayer/config';\nimport { getLocaleName } from '@intlayer/core';\nimport { logger } from '@logger';\nimport { extractJson } from '@utils/extractJSON';\nimport { generateText } from 'ai';\nimport { readFileSync } from 'fs';\nimport type { Locales } from 'intlayer';\nimport { dirname, join } from 'path';\nimport { fileURLToPath } from 'url';\nimport { AIOptions, AIProvider, getAIConfig } from '../aiSdk';\n\nconst __dirname = dirname(fileURLToPath(import.meta.url));\n\n// Get the content of a file at the specified path\nconst getFileContent = (filePath: string) => {\n return readFileSync(join(__dirname, filePath), { encoding: 'utf-8' });\n};\n\nexport type TranslateJSONOptions = {\n entryFileContent: JSON;\n presetOutputContent: JSON;\n dictionaryDescription: string;\n entryLocale: Locales;\n outputLocale: Locales;\n tags: Tag[];\n aiOptions?: AIOptions;\n mode: 'complete' | 'review';\n};\n\nexport type TranslateJSONResultData = {\n fileContent: string;\n tokenUsed: number;\n};\n\n// The prompt template to send to the AI model\nconst CHAT_GPT_PROMPT = getFileContent('./PROMPT.md');\n\n/**\n * Format a locale with its name.\n *\n * @param locale - The locale to format.\n * @returns A string in the format \"locale: name\", e.g. \"en: English\".\n */\nconst formatLocaleWithName = (locale: Locales): string => {\n return `${locale}: ${getLocaleName(locale)}`;\n};\n\n/**\n * Formats tag instructions for the AI prompt.\n * Creates a string with all available tags and their descriptions.\n *\n * @param tags - The list of tags to format.\n * @returns A formatted string with tag instructions.\n */\nconst formatTagInstructions = (tags: Tag[]): string => {\n if (!tags || tags.length === 0) {\n return '';\n }\n\n // Prepare the tag instructions.\n return `Based on the dictionary content, identify specific tags from the list below that would be relevant:\n \n${tags.map(({ key, description }) => `- ${key}: ${description}`).join('\\n\\n')}`;\n};\n\nconst getModeInstructions = (mode: 'complete' | 'review'): string => {\n if (mode === 'complete') {\n return 'Mode: \"Complete\" - Enrich the preset content with the missing keys and values in the output locale. Do not update existing keys. Everything should be returned in the output.';\n }\n\n return 'Mode: \"Review\" - Fill missing content and review existing keys from the preset content. If a key from the entry is missing in the output, it must be translated to the target language and added. If you detect misspelled content, or content that should be reformulated, correct it. If a translation is not coherent with the desired language, translate it.';\n};\n\n/**\n * TranslateJSONs a content declaration file by constructing a prompt for AI models.\n * The prompt includes details about the project's locales, file paths of content declarations,\n * and requests for identifying issues or inconsistencies.\n */\nexport const translateJSON = async ({\n entryFileContent,\n presetOutputContent,\n dictionaryDescription,\n aiOptions,\n entryLocale,\n outputLocale,\n tags,\n mode,\n}: TranslateJSONOptions): Promise<TranslateJSONResultData | undefined> => {\n try {\n // Get the appropriate AI model configuration\n const aiConfig = await getAIConfig({\n provider: AIProvider.OPENAI,\n model: 'gpt-4o-mini-2024-07-18',\n ...aiOptions,\n });\n\n // Prepare the prompt for AI by replacing placeholders with actual values.\n const prompt = CHAT_GPT_PROMPT.replace(\n '{{entryLocale}}',\n formatLocaleWithName(entryLocale)\n )\n .replace('{{outputLocale}}', formatLocaleWithName(outputLocale))\n .replace('{{entryFileContent}}', JSON.stringify(entryFileContent))\n .replace('{{presetOutputContent}}', JSON.stringify(presetOutputContent))\n .replace('{{dictionaryDescription}}', dictionaryDescription)\n .replace('{{applicationContext}}', aiOptions?.applicationContext ?? '')\n .replace('{{tagsInstructions}}', formatTagInstructions(tags))\n .replace('{{modeInstructions}}', getModeInstructions(mode));\n\n if (!aiConfig) {\n logger.error('Failed to configure AI model');\n return undefined;\n }\n\n const result = await retryManager(async () => {\n // Use the AI SDK to generate the completion\n const { text: newContent, usage } = await generateText({\n model: aiConfig.model,\n temperature: aiConfig.temperature,\n messages: [{ role: 'system', content: prompt }],\n });\n\n logger.info(`${usage?.totalTokens ?? 0} tokens used in the request`);\n\n return {\n fileContent: extractJson(newContent),\n tokenUsed: usage?.totalTokens ?? 0,\n };\n })();\n\n return {\n fileContent: result.fileContent,\n tokenUsed: result.tokenUsed,\n };\n } catch (error) {\n logger.error('Error translating JSON:' + error, {\n level: 'error',\n });\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAA6B;AAC7B,kBAA8B;AAC9B,oBAAuB;AACvB,yBAA4B;AAC5B,gBAA6B;AAC7B,gBAA6B;AAE7B,kBAA8B;AAC9B,iBAA8B;AAC9B,mBAAmD;AAVnD;AAYA,MAAM,gBAAY,yBAAQ,0BAAc,YAAY,GAAG,CAAC;AAGxD,MAAM,iBAAiB,CAAC,aAAqB;AAC3C,aAAO,4BAAa,kBAAK,WAAW,QAAQ,GAAG,EAAE,UAAU,QAAQ,CAAC;AACtE;AAmBA,MAAM,kBAAkB,eAAe,aAAa;AAQpD,MAAM,uBAAuB,CAAC,WAA4B;AACxD,SAAO,GAAG,MAAM,SAAK,2BAAc,MAAM,CAAC;AAC5C;AASA,MAAM,wBAAwB,CAAC,SAAwB;AACrD,MAAI,CAAC,QAAQ,KAAK,WAAW,GAAG;AAC9B,WAAO;AAAA,EACT;AAGA,SAAO;AAAA;AAAA,EAEP,KAAK,IAAI,CAAC,EAAE,KAAK,YAAY,MAAM,KAAK,GAAG,KAAK,WAAW,EAAE,EAAE,KAAK,MAAM,CAAC;AAC7E;AAEA,MAAM,sBAAsB,CAAC,SAAwC;AACnE,MAAI,SAAS,YAAY;AACvB,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAOO,MAAM,gBAAgB,OAAO;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAA0E;AACxE,MAAI;AAEF,UAAM,WAAW,UAAM,0BAAY;AAAA,MACjC,UAAU,wBAAW;AAAA,MACrB,OAAO;AAAA,MACP,GAAG;AAAA,IACL,CAAC;AAGD,UAAM,SAAS,gBAAgB;AAAA,MAC7B;AAAA,MACA,qBAAqB,WAAW;AAAA,IAClC,EACG,QAAQ,oBAAoB,qBAAqB,YAAY,CAAC,EAC9D,QAAQ,wBAAwB,KAAK,UAAU,gBAAgB,CAAC,EAChE,QAAQ,2BAA2B,KAAK,UAAU,mBAAmB,CAAC,EACtE,QAAQ,6BAA6B,qBAAqB,EAC1D,QAAQ,0BAA0B,WAAW,sBAAsB,EAAE,EACrE,QAAQ,wBAAwB,sBAAsB,IAAI,CAAC,EAC3D,QAAQ,wBAAwB,oBAAoB,IAAI,CAAC;AAE5D,QAAI,CAAC,UAAU;AACb,2BAAO,MAAM,8BAA8B;AAC3C,aAAO;AAAA,IACT;AAEA,UAAM,SAAS,UAAM,4BAAa,YAAY;AAE5C,YAAM,EAAE,MAAM,YAAY,MAAM,IAAI,UAAM,wBAAa;AAAA,QACrD,OAAO,SAAS;AAAA,QAChB,aAAa,SAAS;AAAA,QACtB,UAAU,CAAC,EAAE,MAAM,UAAU,SAAS,OAAO,CAAC;AAAA,MAChD,CAAC;AAED,2BAAO,KAAK,GAAG,OAAO,eAAe,CAAC,6BAA6B;AAEnE,aAAO;AAAA,QACL,iBAAa,gCAAY,UAAU;AAAA,QACnC,WAAW,OAAO,eAAe;AAAA,MACnC;AAAA,IACF,CAAC,EAAE;AAEH,WAAO;AAAA,MACL,aAAa,OAAO;AAAA,MACpB,WAAW,OAAO;AAAA,IACpB;AAAA,EACF,SAAS,OAAO;AACd,yBAAO,MAAM,4BAA4B,OAAO;AAAA,MAC9C,OAAO;AAAA,IACT,CAAC;AAAA,EACH;AACF;","names":[]}
|