@librechat/data-schemas 0.0.31 → 0.0.34
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +1159 -147
- package/dist/index.cjs.map +1 -1
- package/dist/index.es.js +1146 -147
- package/dist/index.es.js.map +1 -1
- package/dist/types/app/endpoints.d.ts +0 -1
- package/dist/types/app/index.d.ts +1 -0
- package/dist/types/app/vertex.d.ts +19 -0
- package/dist/types/crypto/index.d.ts +52 -0
- package/dist/types/index.d.ts +1 -1
- package/dist/types/methods/aclEntry.d.ts +4 -0
- package/dist/types/methods/file.d.ts +55 -0
- package/dist/types/methods/file.spec.d.ts +1 -0
- package/dist/types/methods/index.d.ts +9 -4
- package/dist/types/methods/key.d.ts +55 -0
- package/dist/types/methods/mcpServer.d.ts +57 -0
- package/dist/types/methods/mcpServer.spec.d.ts +1 -0
- package/dist/types/methods/session.d.ts +3 -1
- package/dist/types/methods/user.d.ts +4 -1
- package/dist/types/models/index.d.ts +1 -0
- package/dist/types/models/mcpServer.d.ts +30 -0
- package/dist/types/models/plugins/mongoMeili.d.ts +2 -13
- package/dist/types/models/plugins/mongoMeili.spec.d.ts +1 -0
- package/dist/types/schema/banner.d.ts +1 -0
- package/dist/types/schema/mcpServer.d.ts +37 -0
- package/dist/types/schema/preset.d.ts +0 -1
- package/dist/types/types/agent.d.ts +2 -0
- package/dist/types/types/app.d.ts +8 -5
- package/dist/types/types/banner.d.ts +1 -0
- package/dist/types/types/convo.d.ts +0 -1
- package/dist/types/types/index.d.ts +1 -0
- package/dist/types/types/mcp.d.ts +34 -0
- package/dist/types/types/message.d.ts +1 -0
- package/dist/types/types/session.d.ts +6 -0
- package/dist/types/types/user.d.ts +5 -0
- package/package.json +1 -2
package/dist/index.es.js
CHANGED
|
@@ -1,10 +1,11 @@
|
|
|
1
|
-
import { EModelEndpoint, agentsEndpointSchema, memorySchema, removeNullishValues, SafeSearchTypes, normalizeEndpointName, defaultAssistantsVersion, Capabilities, assistantEndpointSchema, validateAzureGroups, mapModelToAzureConfig, OCRStrategy, getConfigDefaults, PermissionBits, FileSources, Constants, PermissionTypes, Permissions, SystemRoles, ResourceType, PrincipalType, PrincipalModel, roleDefaults, AccessRoleIds } from 'librechat-data-provider';
|
|
1
|
+
import { EModelEndpoint, agentsEndpointSchema, memorySchema, removeNullishValues, SafeSearchTypes, normalizeEndpointName, defaultAssistantsVersion, Capabilities, assistantEndpointSchema, validateAzureGroups, mapModelToAzureConfig, extractEnvVariable, envVarRegex, OCRStrategy, getConfigDefaults, PermissionBits, FileSources, Constants, PermissionTypes, Permissions, SystemRoles, parseTextParts, ResourceType, PrincipalType, PrincipalModel, roleDefaults, ErrorTypes, EToolResources, FileContext, AccessRoleIds } from 'librechat-data-provider';
|
|
2
2
|
import winston from 'winston';
|
|
3
3
|
import 'winston-daily-rotate-file';
|
|
4
4
|
import { klona } from 'klona';
|
|
5
5
|
import path from 'path';
|
|
6
|
+
import 'dotenv/config';
|
|
6
7
|
import jwt from 'jsonwebtoken';
|
|
7
|
-
import
|
|
8
|
+
import crypto from 'node:crypto';
|
|
8
9
|
import mongoose, { Schema, Types } from 'mongoose';
|
|
9
10
|
import _ from 'lodash';
|
|
10
11
|
import { MeiliSearch } from 'meilisearch';
|
|
@@ -854,6 +855,153 @@ function azureConfigSetup(config) {
|
|
|
854
855
|
};
|
|
855
856
|
}
|
|
856
857
|
|
|
858
|
+
/**
|
|
859
|
+
* Default Vertex AI models available through Google Cloud
|
|
860
|
+
* These are the standard Anthropic model names as served by Vertex AI
|
|
861
|
+
*/
|
|
862
|
+
const defaultVertexModels = [
|
|
863
|
+
'claude-sonnet-4-20250514',
|
|
864
|
+
'claude-3-7-sonnet-20250219',
|
|
865
|
+
'claude-3-5-sonnet-v2@20241022',
|
|
866
|
+
'claude-3-5-sonnet@20240620',
|
|
867
|
+
'claude-3-5-haiku@20241022',
|
|
868
|
+
'claude-3-opus@20240229',
|
|
869
|
+
'claude-3-haiku@20240307',
|
|
870
|
+
];
|
|
871
|
+
/**
|
|
872
|
+
* Processes models configuration and creates deployment name mapping
|
|
873
|
+
* Similar to Azure's model mapping logic
|
|
874
|
+
* @param models - The models configuration (can be array or object)
|
|
875
|
+
* @param defaultDeploymentName - Optional default deployment name
|
|
876
|
+
* @returns Object containing modelNames array and modelDeploymentMap
|
|
877
|
+
*/
|
|
878
|
+
function processVertexModels(models, defaultDeploymentName) {
|
|
879
|
+
const modelNames = [];
|
|
880
|
+
const modelDeploymentMap = {};
|
|
881
|
+
if (!models) {
|
|
882
|
+
// No models specified, use defaults
|
|
883
|
+
for (const model of defaultVertexModels) {
|
|
884
|
+
modelNames.push(model);
|
|
885
|
+
modelDeploymentMap[model] = model; // Default: model name = deployment name
|
|
886
|
+
}
|
|
887
|
+
return { modelNames, modelDeploymentMap };
|
|
888
|
+
}
|
|
889
|
+
if (Array.isArray(models)) {
|
|
890
|
+
// Legacy format: simple array of model names
|
|
891
|
+
for (const modelName of models) {
|
|
892
|
+
modelNames.push(modelName);
|
|
893
|
+
// If a default deployment name is provided, use it for all models
|
|
894
|
+
// Otherwise, model name is the deployment name
|
|
895
|
+
modelDeploymentMap[modelName] = defaultDeploymentName || modelName;
|
|
896
|
+
}
|
|
897
|
+
}
|
|
898
|
+
else {
|
|
899
|
+
// New format: object with model names as keys and config as values
|
|
900
|
+
for (const [modelName, modelConfig] of Object.entries(models)) {
|
|
901
|
+
modelNames.push(modelName);
|
|
902
|
+
if (typeof modelConfig === 'boolean') {
|
|
903
|
+
// Model is set to true/false - use default deployment name or model name
|
|
904
|
+
modelDeploymentMap[modelName] = defaultDeploymentName || modelName;
|
|
905
|
+
}
|
|
906
|
+
else if (modelConfig === null || modelConfig === void 0 ? void 0 : modelConfig.deploymentName) {
|
|
907
|
+
// Model has its own deployment name specified
|
|
908
|
+
modelDeploymentMap[modelName] = modelConfig.deploymentName;
|
|
909
|
+
}
|
|
910
|
+
else {
|
|
911
|
+
// Model is an object but no deployment name - use default or model name
|
|
912
|
+
modelDeploymentMap[modelName] = defaultDeploymentName || modelName;
|
|
913
|
+
}
|
|
914
|
+
}
|
|
915
|
+
}
|
|
916
|
+
return { modelNames, modelDeploymentMap };
|
|
917
|
+
}
|
|
918
|
+
/**
|
|
919
|
+
* Validates and processes Vertex AI configuration
|
|
920
|
+
* @param vertexConfig - The Vertex AI configuration object
|
|
921
|
+
* @returns Validated configuration with errors if any
|
|
922
|
+
*/
|
|
923
|
+
function validateVertexConfig(vertexConfig) {
|
|
924
|
+
if (!vertexConfig) {
|
|
925
|
+
return null;
|
|
926
|
+
}
|
|
927
|
+
const errors = [];
|
|
928
|
+
// Extract and validate environment variables
|
|
929
|
+
// projectId is optional - will be auto-detected from service key if not provided
|
|
930
|
+
const projectId = vertexConfig.projectId ? extractEnvVariable(vertexConfig.projectId) : undefined;
|
|
931
|
+
const region = extractEnvVariable(vertexConfig.region || 'us-east5');
|
|
932
|
+
const serviceKeyFile = vertexConfig.serviceKeyFile
|
|
933
|
+
? extractEnvVariable(vertexConfig.serviceKeyFile)
|
|
934
|
+
: undefined;
|
|
935
|
+
const defaultDeploymentName = vertexConfig.deploymentName
|
|
936
|
+
? extractEnvVariable(vertexConfig.deploymentName)
|
|
937
|
+
: undefined;
|
|
938
|
+
// Check for unresolved environment variables
|
|
939
|
+
if (projectId && envVarRegex.test(projectId)) {
|
|
940
|
+
errors.push(`Vertex AI projectId environment variable "${vertexConfig.projectId}" was not found.`);
|
|
941
|
+
}
|
|
942
|
+
if (envVarRegex.test(region)) {
|
|
943
|
+
errors.push(`Vertex AI region environment variable "${vertexConfig.region}" was not found.`);
|
|
944
|
+
}
|
|
945
|
+
if (serviceKeyFile && envVarRegex.test(serviceKeyFile)) {
|
|
946
|
+
errors.push(`Vertex AI serviceKeyFile environment variable "${vertexConfig.serviceKeyFile}" was not found.`);
|
|
947
|
+
}
|
|
948
|
+
if (defaultDeploymentName && envVarRegex.test(defaultDeploymentName)) {
|
|
949
|
+
errors.push(`Vertex AI deploymentName environment variable "${vertexConfig.deploymentName}" was not found.`);
|
|
950
|
+
}
|
|
951
|
+
// Process models and create deployment mapping
|
|
952
|
+
const { modelNames, modelDeploymentMap } = processVertexModels(vertexConfig.models, defaultDeploymentName);
|
|
953
|
+
// Note: projectId is optional - if not provided, it will be auto-detected from the service key file
|
|
954
|
+
const isValid = errors.length === 0;
|
|
955
|
+
return {
|
|
956
|
+
enabled: vertexConfig.enabled !== false,
|
|
957
|
+
projectId,
|
|
958
|
+
region,
|
|
959
|
+
serviceKeyFile,
|
|
960
|
+
deploymentName: defaultDeploymentName,
|
|
961
|
+
models: vertexConfig.models,
|
|
962
|
+
modelNames,
|
|
963
|
+
modelDeploymentMap,
|
|
964
|
+
isValid,
|
|
965
|
+
errors,
|
|
966
|
+
};
|
|
967
|
+
}
|
|
968
|
+
/**
|
|
969
|
+
* Sets up the Vertex AI configuration from the config (`librechat.yaml`) file.
|
|
970
|
+
* Similar to azureConfigSetup, this processes and validates the Vertex AI configuration.
|
|
971
|
+
* @param config - The loaded custom configuration.
|
|
972
|
+
* @returns The validated Vertex AI configuration or null if not configured.
|
|
973
|
+
*/
|
|
974
|
+
function vertexConfigSetup(config) {
|
|
975
|
+
var _a, _b;
|
|
976
|
+
const anthropicConfig = (_a = config.endpoints) === null || _a === void 0 ? void 0 : _a[EModelEndpoint.anthropic];
|
|
977
|
+
if (!(anthropicConfig === null || anthropicConfig === void 0 ? void 0 : anthropicConfig.vertex)) {
|
|
978
|
+
return null;
|
|
979
|
+
}
|
|
980
|
+
const vertexConfig = anthropicConfig.vertex;
|
|
981
|
+
// Skip if explicitly disabled (enabled: false)
|
|
982
|
+
// When vertex config exists, it's enabled by default unless explicitly set to false
|
|
983
|
+
if (vertexConfig.enabled === false) {
|
|
984
|
+
return null;
|
|
985
|
+
}
|
|
986
|
+
const validatedConfig = validateVertexConfig(vertexConfig);
|
|
987
|
+
if (!validatedConfig) {
|
|
988
|
+
return null;
|
|
989
|
+
}
|
|
990
|
+
if (!validatedConfig.isValid) {
|
|
991
|
+
const errorString = validatedConfig.errors.join('\n');
|
|
992
|
+
const errorMessage = 'Invalid Vertex AI configuration:\n' + errorString;
|
|
993
|
+
logger$1.error(errorMessage);
|
|
994
|
+
throw new Error(errorMessage);
|
|
995
|
+
}
|
|
996
|
+
logger$1.info('Vertex AI configuration loaded successfully', {
|
|
997
|
+
projectId: validatedConfig.projectId,
|
|
998
|
+
region: validatedConfig.region,
|
|
999
|
+
modelCount: ((_b = validatedConfig.modelNames) === null || _b === void 0 ? void 0 : _b.length) || 0,
|
|
1000
|
+
models: validatedConfig.modelNames,
|
|
1001
|
+
});
|
|
1002
|
+
return validatedConfig;
|
|
1003
|
+
}
|
|
1004
|
+
|
|
857
1005
|
/**
|
|
858
1006
|
* Loads custom config endpoints
|
|
859
1007
|
* @param [config]
|
|
@@ -876,12 +1024,24 @@ const loadEndpoints = (config, agentsDefaults) => {
|
|
|
876
1024
|
loadedEndpoints[EModelEndpoint.assistants] = assistantsConfigSetup(config, EModelEndpoint.assistants, loadedEndpoints[EModelEndpoint.assistants]);
|
|
877
1025
|
}
|
|
878
1026
|
loadedEndpoints[EModelEndpoint.agents] = agentsConfigSetup(config, agentsDefaults);
|
|
1027
|
+
// Handle Anthropic endpoint with Vertex AI configuration
|
|
1028
|
+
if (endpoints === null || endpoints === void 0 ? void 0 : endpoints[EModelEndpoint.anthropic]) {
|
|
1029
|
+
const anthropicConfig = endpoints[EModelEndpoint.anthropic];
|
|
1030
|
+
const vertexConfig = vertexConfigSetup(config);
|
|
1031
|
+
loadedEndpoints[EModelEndpoint.anthropic] = {
|
|
1032
|
+
...anthropicConfig,
|
|
1033
|
+
// If Vertex AI is enabled, use the visible model names from vertex config
|
|
1034
|
+
// Otherwise, use the models array from anthropic config
|
|
1035
|
+
...((vertexConfig === null || vertexConfig === void 0 ? void 0 : vertexConfig.modelNames) && { models: vertexConfig.modelNames }),
|
|
1036
|
+
// Attach validated Vertex AI config if present
|
|
1037
|
+
...(vertexConfig && { vertexConfig }),
|
|
1038
|
+
};
|
|
1039
|
+
}
|
|
879
1040
|
const endpointKeys = [
|
|
880
1041
|
EModelEndpoint.openAI,
|
|
881
1042
|
EModelEndpoint.google,
|
|
882
1043
|
EModelEndpoint.custom,
|
|
883
1044
|
EModelEndpoint.bedrock,
|
|
884
|
-
EModelEndpoint.anthropic,
|
|
885
1045
|
];
|
|
886
1046
|
endpointKeys.forEach((key) => {
|
|
887
1047
|
const currentKey = key;
|
|
@@ -936,7 +1096,9 @@ const AppService = async (params) => {
|
|
|
936
1096
|
const imageOutputType = (_e = config === null || config === void 0 ? void 0 : config.imageOutputType) !== null && _e !== void 0 ? _e : configDefaults.imageOutputType;
|
|
937
1097
|
process.env.CDN_PROVIDER = fileStrategy;
|
|
938
1098
|
const availableTools = systemTools;
|
|
939
|
-
const
|
|
1099
|
+
const mcpServersConfig = config.mcpServers || null;
|
|
1100
|
+
const mcpSettings = config.mcpSettings || null;
|
|
1101
|
+
const actions = config.actions;
|
|
940
1102
|
const registration = (_f = config.registration) !== null && _f !== void 0 ? _f : configDefaults.registration;
|
|
941
1103
|
const interfaceConfig = await loadDefaultInterface({ config, configDefaults });
|
|
942
1104
|
const turnstileConfig = loadTurnstileConfig(config, configDefaults);
|
|
@@ -948,8 +1110,10 @@ const AppService = async (params) => {
|
|
|
948
1110
|
memory,
|
|
949
1111
|
speech,
|
|
950
1112
|
balance,
|
|
1113
|
+
actions,
|
|
951
1114
|
transactions,
|
|
952
|
-
mcpConfig,
|
|
1115
|
+
mcpConfig: mcpServersConfig,
|
|
1116
|
+
mcpSettings,
|
|
953
1117
|
webSearch,
|
|
954
1118
|
fileStrategy,
|
|
955
1119
|
registration,
|
|
@@ -997,6 +1161,12 @@ var RoleBits;
|
|
|
997
1161
|
RoleBits[RoleBits["OWNER"] = PermissionBits.VIEW | PermissionBits.EDIT | PermissionBits.DELETE | PermissionBits.SHARE] = "OWNER";
|
|
998
1162
|
})(RoleBits || (RoleBits = {}));
|
|
999
1163
|
|
|
1164
|
+
var _a, _b;
|
|
1165
|
+
const { webcrypto } = crypto;
|
|
1166
|
+
/** Use hex decoding for both key and IV for legacy methods */
|
|
1167
|
+
const key = Buffer.from((_a = process.env.CREDS_KEY) !== null && _a !== void 0 ? _a : '', 'hex');
|
|
1168
|
+
const iv = Buffer.from((_b = process.env.CREDS_IV) !== null && _b !== void 0 ? _b : '', 'hex');
|
|
1169
|
+
const algorithm = 'AES-CBC';
|
|
1000
1170
|
async function signPayload({ payload, secret, expirationTime, }) {
|
|
1001
1171
|
return jwt.sign(payload, secret, { expiresIn: expirationTime });
|
|
1002
1172
|
}
|
|
@@ -1005,6 +1175,130 @@ async function hashToken(str) {
|
|
|
1005
1175
|
const hashBuffer = await webcrypto.subtle.digest('SHA-256', data);
|
|
1006
1176
|
return Buffer.from(hashBuffer).toString('hex');
|
|
1007
1177
|
}
|
|
1178
|
+
/** --- Legacy v1/v2 Setup: AES-CBC with fixed key and IV --- */
|
|
1179
|
+
/**
|
|
1180
|
+
* Encrypts a value using AES-CBC
|
|
1181
|
+
* @param value - The plaintext to encrypt
|
|
1182
|
+
* @returns The encrypted string in hex format
|
|
1183
|
+
*/
|
|
1184
|
+
async function encrypt(value) {
|
|
1185
|
+
const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [
|
|
1186
|
+
'encrypt',
|
|
1187
|
+
]);
|
|
1188
|
+
const encoder = new TextEncoder();
|
|
1189
|
+
const data = encoder.encode(value);
|
|
1190
|
+
const encryptedBuffer = await webcrypto.subtle.encrypt({ name: algorithm, iv: iv }, cryptoKey, data);
|
|
1191
|
+
return Buffer.from(encryptedBuffer).toString('hex');
|
|
1192
|
+
}
|
|
1193
|
+
/**
|
|
1194
|
+
* Decrypts an encrypted value using AES-CBC
|
|
1195
|
+
* @param encryptedValue - The encrypted string in hex format
|
|
1196
|
+
* @returns The decrypted plaintext
|
|
1197
|
+
*/
|
|
1198
|
+
async function decrypt(encryptedValue) {
|
|
1199
|
+
const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [
|
|
1200
|
+
'decrypt',
|
|
1201
|
+
]);
|
|
1202
|
+
const encryptedBuffer = Buffer.from(encryptedValue, 'hex');
|
|
1203
|
+
const decryptedBuffer = await webcrypto.subtle.decrypt({ name: algorithm, iv: iv }, cryptoKey, encryptedBuffer);
|
|
1204
|
+
const decoder = new TextDecoder();
|
|
1205
|
+
return decoder.decode(decryptedBuffer);
|
|
1206
|
+
}
|
|
1207
|
+
/** --- v2: AES-CBC with a random IV per encryption --- */
|
|
1208
|
+
/**
|
|
1209
|
+
* Encrypts a value using AES-CBC with a random IV per encryption
|
|
1210
|
+
* @param value - The plaintext to encrypt
|
|
1211
|
+
* @returns The encrypted string with IV prepended (iv:ciphertext format)
|
|
1212
|
+
*/
|
|
1213
|
+
async function encryptV2(value) {
|
|
1214
|
+
const gen_iv = webcrypto.getRandomValues(new Uint8Array(16));
|
|
1215
|
+
const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [
|
|
1216
|
+
'encrypt',
|
|
1217
|
+
]);
|
|
1218
|
+
const encoder = new TextEncoder();
|
|
1219
|
+
const data = encoder.encode(value);
|
|
1220
|
+
const encryptedBuffer = await webcrypto.subtle.encrypt({ name: algorithm, iv: gen_iv }, cryptoKey, data);
|
|
1221
|
+
return Buffer.from(gen_iv).toString('hex') + ':' + Buffer.from(encryptedBuffer).toString('hex');
|
|
1222
|
+
}
|
|
1223
|
+
/**
|
|
1224
|
+
* Decrypts an encrypted value using AES-CBC with random IV
|
|
1225
|
+
* @param encryptedValue - The encrypted string in iv:ciphertext format
|
|
1226
|
+
* @returns The decrypted plaintext
|
|
1227
|
+
*/
|
|
1228
|
+
async function decryptV2(encryptedValue) {
|
|
1229
|
+
var _a;
|
|
1230
|
+
const parts = encryptedValue.split(':');
|
|
1231
|
+
if (parts.length === 1) {
|
|
1232
|
+
return parts[0];
|
|
1233
|
+
}
|
|
1234
|
+
const gen_iv = Buffer.from((_a = parts.shift()) !== null && _a !== void 0 ? _a : '', 'hex');
|
|
1235
|
+
const encrypted = parts.join(':');
|
|
1236
|
+
const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [
|
|
1237
|
+
'decrypt',
|
|
1238
|
+
]);
|
|
1239
|
+
const encryptedBuffer = Buffer.from(encrypted, 'hex');
|
|
1240
|
+
const decryptedBuffer = await webcrypto.subtle.decrypt({ name: algorithm, iv: gen_iv }, cryptoKey, encryptedBuffer);
|
|
1241
|
+
const decoder = new TextDecoder();
|
|
1242
|
+
return decoder.decode(decryptedBuffer);
|
|
1243
|
+
}
|
|
1244
|
+
/** --- v3: AES-256-CTR using Node's crypto functions --- */
|
|
1245
|
+
const algorithm_v3 = 'aes-256-ctr';
|
|
1246
|
+
/**
|
|
1247
|
+
* Encrypts a value using AES-256-CTR.
|
|
1248
|
+
* Note: AES-256 requires a 32-byte key. Ensure that process.env.CREDS_KEY is a 64-character hex string.
|
|
1249
|
+
* @param value - The plaintext to encrypt.
|
|
1250
|
+
* @returns The encrypted string with a "v3:" prefix.
|
|
1251
|
+
*/
|
|
1252
|
+
function encryptV3(value) {
|
|
1253
|
+
if (key.length !== 32) {
|
|
1254
|
+
throw new Error(`Invalid key length: expected 32 bytes, got ${key.length} bytes`);
|
|
1255
|
+
}
|
|
1256
|
+
const iv_v3 = crypto.randomBytes(16);
|
|
1257
|
+
const cipher = crypto.createCipheriv(algorithm_v3, key, iv_v3);
|
|
1258
|
+
const encrypted = Buffer.concat([cipher.update(value, 'utf8'), cipher.final()]);
|
|
1259
|
+
return `v3:${iv_v3.toString('hex')}:${encrypted.toString('hex')}`;
|
|
1260
|
+
}
|
|
1261
|
+
/**
|
|
1262
|
+
* Decrypts an encrypted value using AES-256-CTR.
|
|
1263
|
+
* @param encryptedValue - The encrypted string with "v3:" prefix.
|
|
1264
|
+
* @returns The decrypted plaintext.
|
|
1265
|
+
*/
|
|
1266
|
+
function decryptV3(encryptedValue) {
|
|
1267
|
+
const parts = encryptedValue.split(':');
|
|
1268
|
+
if (parts[0] !== 'v3') {
|
|
1269
|
+
throw new Error('Not a v3 encrypted value');
|
|
1270
|
+
}
|
|
1271
|
+
const iv_v3 = Buffer.from(parts[1], 'hex');
|
|
1272
|
+
const encryptedText = Buffer.from(parts.slice(2).join(':'), 'hex');
|
|
1273
|
+
const decipher = crypto.createDecipheriv(algorithm_v3, key, iv_v3);
|
|
1274
|
+
const decrypted = Buffer.concat([decipher.update(encryptedText), decipher.final()]);
|
|
1275
|
+
return decrypted.toString('utf8');
|
|
1276
|
+
}
|
|
1277
|
+
/**
|
|
1278
|
+
* Generates random values as a hex string
|
|
1279
|
+
* @param length - The number of random bytes to generate
|
|
1280
|
+
* @returns The random values as a hex string
|
|
1281
|
+
*/
|
|
1282
|
+
async function getRandomValues(length) {
|
|
1283
|
+
if (!Number.isInteger(length) || length <= 0) {
|
|
1284
|
+
throw new Error('Length must be a positive integer');
|
|
1285
|
+
}
|
|
1286
|
+
const randomValues = new Uint8Array(length);
|
|
1287
|
+
webcrypto.getRandomValues(randomValues);
|
|
1288
|
+
return Buffer.from(randomValues).toString('hex');
|
|
1289
|
+
}
|
|
1290
|
+
/**
|
|
1291
|
+
* Computes SHA-256 hash for the given input.
|
|
1292
|
+
* @param input - The input to hash.
|
|
1293
|
+
* @returns The SHA-256 hash of the input.
|
|
1294
|
+
*/
|
|
1295
|
+
async function hashBackupCode(input) {
|
|
1296
|
+
const encoder = new TextEncoder();
|
|
1297
|
+
const data = encoder.encode(input);
|
|
1298
|
+
const hashBuffer = await webcrypto.subtle.digest('SHA-256', data);
|
|
1299
|
+
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
|
1300
|
+
return hashArray.map((b) => b.toString(16).padStart(2, '0')).join('');
|
|
1301
|
+
}
|
|
1008
1302
|
|
|
1009
1303
|
// Define the Auth sub-schema with type-safety.
|
|
1010
1304
|
const AuthSchema = new Schema({
|
|
@@ -1160,10 +1454,17 @@ const agentSchema = new Schema({
|
|
|
1160
1454
|
default: false,
|
|
1161
1455
|
index: true,
|
|
1162
1456
|
},
|
|
1457
|
+
/** MCP server names extracted from tools for efficient querying */
|
|
1458
|
+
mcpServerNames: {
|
|
1459
|
+
type: [String],
|
|
1460
|
+
default: [],
|
|
1461
|
+
index: true,
|
|
1462
|
+
},
|
|
1163
1463
|
}, {
|
|
1164
1464
|
timestamps: true,
|
|
1165
1465
|
});
|
|
1166
1466
|
agentSchema.index({ updatedAt: -1, _id: 1 });
|
|
1467
|
+
agentSchema.index({ 'edges.to': 1 });
|
|
1167
1468
|
|
|
1168
1469
|
const agentCategorySchema = new Schema({
|
|
1169
1470
|
value: {
|
|
@@ -1299,6 +1600,10 @@ const bannerSchema = new Schema({
|
|
|
1299
1600
|
type: Boolean,
|
|
1300
1601
|
default: false,
|
|
1301
1602
|
},
|
|
1603
|
+
persistable: {
|
|
1604
|
+
type: Boolean,
|
|
1605
|
+
default: false,
|
|
1606
|
+
},
|
|
1302
1607
|
}, { timestamps: true });
|
|
1303
1608
|
|
|
1304
1609
|
const categoriesSchema = new Schema({
|
|
@@ -1342,7 +1647,6 @@ conversationTag.index({ tag: 1, user: 1 }, { unique: true });
|
|
|
1342
1647
|
|
|
1343
1648
|
// @ts-ignore
|
|
1344
1649
|
const conversationPreset = {
|
|
1345
|
-
// endpoint: [azureOpenAI, openAI, anthropic, chatGPTBrowser]
|
|
1346
1650
|
endpoint: {
|
|
1347
1651
|
type: String,
|
|
1348
1652
|
default: null,
|
|
@@ -1351,7 +1655,7 @@ const conversationPreset = {
|
|
|
1351
1655
|
endpointType: {
|
|
1352
1656
|
type: String,
|
|
1353
1657
|
},
|
|
1354
|
-
// for azureOpenAI, openAI
|
|
1658
|
+
// for azureOpenAI, openAI only
|
|
1355
1659
|
model: {
|
|
1356
1660
|
type: String,
|
|
1357
1661
|
required: false,
|
|
@@ -1516,9 +1820,6 @@ const convoSchema = new Schema({
|
|
|
1516
1820
|
meiliIndex: true,
|
|
1517
1821
|
},
|
|
1518
1822
|
messages: [{ type: Schema.Types.ObjectId, ref: 'Message' }],
|
|
1519
|
-
agentOptions: {
|
|
1520
|
-
type: Schema.Types.Mixed,
|
|
1521
|
-
},
|
|
1522
1823
|
...conversationPreset,
|
|
1523
1824
|
agent_id: {
|
|
1524
1825
|
type: String,
|
|
@@ -1538,6 +1839,8 @@ const convoSchema = new Schema({
|
|
|
1538
1839
|
convoSchema.index({ expiredAt: 1 }, { expireAfterSeconds: 0 });
|
|
1539
1840
|
convoSchema.index({ createdAt: 1, updatedAt: 1 });
|
|
1540
1841
|
convoSchema.index({ conversationId: 1, user: 1 }, { unique: true });
|
|
1842
|
+
// index for MeiliSearch sync operations
|
|
1843
|
+
convoSchema.index({ _meiliIndex: 1, expiredAt: 1 });
|
|
1541
1844
|
|
|
1542
1845
|
const file = new Schema({
|
|
1543
1846
|
user: {
|
|
@@ -1734,25 +2037,6 @@ const messageSchema = new Schema({
|
|
|
1734
2037
|
default: false,
|
|
1735
2038
|
},
|
|
1736
2039
|
files: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
|
|
1737
|
-
plugin: {
|
|
1738
|
-
type: {
|
|
1739
|
-
latest: {
|
|
1740
|
-
type: String,
|
|
1741
|
-
required: false,
|
|
1742
|
-
},
|
|
1743
|
-
inputs: {
|
|
1744
|
-
type: [mongoose.Schema.Types.Mixed],
|
|
1745
|
-
required: false,
|
|
1746
|
-
default: undefined,
|
|
1747
|
-
},
|
|
1748
|
-
outputs: {
|
|
1749
|
-
type: String,
|
|
1750
|
-
required: false,
|
|
1751
|
-
},
|
|
1752
|
-
},
|
|
1753
|
-
default: undefined,
|
|
1754
|
-
},
|
|
1755
|
-
plugins: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
|
|
1756
2040
|
content: {
|
|
1757
2041
|
type: [{ type: mongoose.Schema.Types.Mixed }],
|
|
1758
2042
|
default: undefined,
|
|
@@ -1792,10 +2076,16 @@ const messageSchema = new Schema({
|
|
|
1792
2076
|
expiredAt: {
|
|
1793
2077
|
type: Date,
|
|
1794
2078
|
},
|
|
2079
|
+
addedConvo: {
|
|
2080
|
+
type: Boolean,
|
|
2081
|
+
default: undefined,
|
|
2082
|
+
},
|
|
1795
2083
|
}, { timestamps: true });
|
|
1796
2084
|
messageSchema.index({ expiredAt: 1 }, { expireAfterSeconds: 0 });
|
|
1797
2085
|
messageSchema.index({ createdAt: 1 });
|
|
1798
2086
|
messageSchema.index({ messageId: 1, user: 1 }, { unique: true });
|
|
2087
|
+
// index for MeiliSearch sync operations
|
|
2088
|
+
messageSchema.index({ _meiliIndex: 1, expiredAt: 1 });
|
|
1799
2089
|
|
|
1800
2090
|
const pluginAuthSchema = new Schema({
|
|
1801
2091
|
authField: {
|
|
@@ -1838,10 +2128,6 @@ const presetSchema = new Schema({
|
|
|
1838
2128
|
type: Number,
|
|
1839
2129
|
},
|
|
1840
2130
|
...conversationPreset,
|
|
1841
|
-
agentOptions: {
|
|
1842
|
-
type: mongoose.Schema.Types.Mixed,
|
|
1843
|
-
default: null,
|
|
1844
|
-
},
|
|
1845
2131
|
}, { timestamps: true });
|
|
1846
2132
|
|
|
1847
2133
|
const projectSchema = new Schema({
|
|
@@ -1958,9 +2244,10 @@ const rolePermissionsSchema = new Schema({
|
|
|
1958
2244
|
[Permissions.USE]: { type: Boolean },
|
|
1959
2245
|
},
|
|
1960
2246
|
[PermissionTypes.PROMPTS]: {
|
|
1961
|
-
[Permissions.SHARED_GLOBAL]: { type: Boolean },
|
|
1962
2247
|
[Permissions.USE]: { type: Boolean },
|
|
1963
2248
|
[Permissions.CREATE]: { type: Boolean },
|
|
2249
|
+
[Permissions.SHARE]: { type: Boolean },
|
|
2250
|
+
[Permissions.SHARE_PUBLIC]: { type: Boolean },
|
|
1964
2251
|
},
|
|
1965
2252
|
[PermissionTypes.MEMORIES]: {
|
|
1966
2253
|
[Permissions.USE]: { type: Boolean },
|
|
@@ -1970,9 +2257,10 @@ const rolePermissionsSchema = new Schema({
|
|
|
1970
2257
|
[Permissions.OPT_OUT]: { type: Boolean },
|
|
1971
2258
|
},
|
|
1972
2259
|
[PermissionTypes.AGENTS]: {
|
|
1973
|
-
[Permissions.SHARED_GLOBAL]: { type: Boolean },
|
|
1974
2260
|
[Permissions.USE]: { type: Boolean },
|
|
1975
2261
|
[Permissions.CREATE]: { type: Boolean },
|
|
2262
|
+
[Permissions.SHARE]: { type: Boolean },
|
|
2263
|
+
[Permissions.SHARE_PUBLIC]: { type: Boolean },
|
|
1976
2264
|
},
|
|
1977
2265
|
[PermissionTypes.MULTI_CONVO]: {
|
|
1978
2266
|
[Permissions.USE]: { type: Boolean },
|
|
@@ -2000,6 +2288,12 @@ const rolePermissionsSchema = new Schema({
|
|
|
2000
2288
|
[PermissionTypes.FILE_CITATIONS]: {
|
|
2001
2289
|
[Permissions.USE]: { type: Boolean },
|
|
2002
2290
|
},
|
|
2291
|
+
[PermissionTypes.MCP_SERVERS]: {
|
|
2292
|
+
[Permissions.USE]: { type: Boolean },
|
|
2293
|
+
[Permissions.CREATE]: { type: Boolean },
|
|
2294
|
+
[Permissions.SHARE]: { type: Boolean },
|
|
2295
|
+
[Permissions.SHARE_PUBLIC]: { type: Boolean },
|
|
2296
|
+
},
|
|
2003
2297
|
}, { _id: false });
|
|
2004
2298
|
const roleSchema = new Schema({
|
|
2005
2299
|
name: { type: String, required: true, unique: true, index: true },
|
|
@@ -2143,6 +2437,7 @@ const transactionSchema = new Schema({
|
|
|
2143
2437
|
},
|
|
2144
2438
|
model: {
|
|
2145
2439
|
type: String,
|
|
2440
|
+
index: true,
|
|
2146
2441
|
},
|
|
2147
2442
|
context: {
|
|
2148
2443
|
type: String,
|
|
@@ -2290,6 +2585,17 @@ const userSchema = new Schema({
|
|
|
2290
2585
|
},
|
|
2291
2586
|
default: {},
|
|
2292
2587
|
},
|
|
2588
|
+
favorites: {
|
|
2589
|
+
type: [
|
|
2590
|
+
{
|
|
2591
|
+
_id: false,
|
|
2592
|
+
agentId: String, // for agent
|
|
2593
|
+
model: String, // for model
|
|
2594
|
+
endpoint: String, // for model
|
|
2595
|
+
},
|
|
2596
|
+
],
|
|
2597
|
+
default: [],
|
|
2598
|
+
},
|
|
2293
2599
|
/** Field for external source identification (for consistency with TPrincipal schema) */
|
|
2294
2600
|
idOnTheSource: {
|
|
2295
2601
|
type: String,
|
|
@@ -2509,26 +2815,6 @@ const getSyncConfig = () => ({
|
|
|
2509
2815
|
batchSize: parseInt(process.env.MEILI_SYNC_BATCH_SIZE || '100', 10),
|
|
2510
2816
|
delayMs: parseInt(process.env.MEILI_SYNC_DELAY_MS || '100', 10),
|
|
2511
2817
|
});
|
|
2512
|
-
/**
|
|
2513
|
-
* Local implementation of parseTextParts to avoid dependency on librechat-data-provider
|
|
2514
|
-
* Extracts text content from an array of content items
|
|
2515
|
-
*/
|
|
2516
|
-
const parseTextParts = (content) => {
|
|
2517
|
-
if (!Array.isArray(content)) {
|
|
2518
|
-
return '';
|
|
2519
|
-
}
|
|
2520
|
-
return content
|
|
2521
|
-
.filter((item) => item.type === 'text' && typeof item.text === 'string')
|
|
2522
|
-
.map((item) => item.text)
|
|
2523
|
-
.join(' ')
|
|
2524
|
-
.trim();
|
|
2525
|
-
};
|
|
2526
|
-
/**
|
|
2527
|
-
* Local implementation to handle Bing convoId conversion
|
|
2528
|
-
*/
|
|
2529
|
-
const cleanUpPrimaryKeyValue = (value) => {
|
|
2530
|
-
return value.replace(/--/g, '|');
|
|
2531
|
-
};
|
|
2532
2818
|
/**
|
|
2533
2819
|
* Validates the required options for configuring the mongoMeili plugin.
|
|
2534
2820
|
*/
|
|
@@ -2572,8 +2858,8 @@ const createMeiliMongooseModel = ({ index, attributesToIndex, syncOptions, }) =>
|
|
|
2572
2858
|
* Get the current sync progress
|
|
2573
2859
|
*/
|
|
2574
2860
|
static async getSyncProgress() {
|
|
2575
|
-
const totalDocuments = await this.countDocuments();
|
|
2576
|
-
const indexedDocuments = await this.countDocuments({ _meiliIndex: true });
|
|
2861
|
+
const totalDocuments = await this.countDocuments({ expiredAt: null });
|
|
2862
|
+
const indexedDocuments = await this.countDocuments({ expiredAt: null, _meiliIndex: true });
|
|
2577
2863
|
return {
|
|
2578
2864
|
totalProcessed: indexedDocuments,
|
|
2579
2865
|
totalDocuments,
|
|
@@ -2581,92 +2867,84 @@ const createMeiliMongooseModel = ({ index, attributesToIndex, syncOptions, }) =>
|
|
|
2581
2867
|
};
|
|
2582
2868
|
}
|
|
2583
2869
|
/**
|
|
2584
|
-
* Synchronizes
|
|
2585
|
-
*
|
|
2586
|
-
|
|
2587
|
-
|
|
2870
|
+
* Synchronizes data between the MongoDB collection and the MeiliSearch index by
|
|
2871
|
+
* incrementally indexing only documents where `expiredAt` is `null` and `_meiliIndex` is `false`
|
|
2872
|
+
* (i.e., non-expired documents that have not yet been indexed).
|
|
2873
|
+
* */
|
|
2874
|
+
static async syncWithMeili() {
|
|
2875
|
+
const startTime = Date.now();
|
|
2876
|
+
const { batchSize, delayMs } = syncConfig;
|
|
2877
|
+
const collectionName = primaryKey === 'messageId' ? 'messages' : 'conversations';
|
|
2878
|
+
logger.info(`[syncWithMeili] Starting sync for ${collectionName} with batch size ${batchSize}`);
|
|
2879
|
+
// Get approximate total count for raw estimation, the sync should not overcome this number
|
|
2880
|
+
const approxTotalCount = await this.estimatedDocumentCount();
|
|
2881
|
+
logger.info(`[syncWithMeili] Approximate total number of all ${collectionName}: ${approxTotalCount}`);
|
|
2588
2882
|
try {
|
|
2589
|
-
const startTime = Date.now();
|
|
2590
|
-
const { batchSize, delayMs } = syncConfig;
|
|
2591
|
-
logger.info(`[syncWithMeili] Starting sync for ${primaryKey === 'messageId' ? 'messages' : 'conversations'} with batch size ${batchSize}`);
|
|
2592
|
-
// Build query with resume capability
|
|
2593
|
-
const query = {};
|
|
2594
|
-
if (options === null || options === void 0 ? void 0 : options.resumeFromId) {
|
|
2595
|
-
query._id = { $gt: options.resumeFromId };
|
|
2596
|
-
}
|
|
2597
|
-
// Get total count for progress tracking
|
|
2598
|
-
const totalCount = await this.countDocuments(query);
|
|
2599
|
-
let processedCount = 0;
|
|
2600
2883
|
// First, handle documents that need to be removed from Meili
|
|
2884
|
+
logger.info(`[syncWithMeili] Starting cleanup of Meili index ${index.uid} before sync`);
|
|
2601
2885
|
await this.cleanupMeiliIndex(index, primaryKey, batchSize, delayMs);
|
|
2602
|
-
|
|
2603
|
-
|
|
2604
|
-
|
|
2605
|
-
|
|
2606
|
-
|
|
2607
|
-
|
|
2608
|
-
|
|
2609
|
-
|
|
2610
|
-
|
|
2611
|
-
|
|
2612
|
-
|
|
2613
|
-
|
|
2614
|
-
|
|
2615
|
-
|
|
2616
|
-
|
|
2617
|
-
|
|
2618
|
-
|
|
2619
|
-
|
|
2620
|
-
|
|
2621
|
-
|
|
2622
|
-
|
|
2623
|
-
|
|
2886
|
+
logger.info(`[syncWithMeili] Completed cleanup of Meili index: ${index.uid}`);
|
|
2887
|
+
}
|
|
2888
|
+
catch (error) {
|
|
2889
|
+
logger.error('[syncWithMeili] Error during cleanup Meili before sync:', error);
|
|
2890
|
+
throw error;
|
|
2891
|
+
}
|
|
2892
|
+
let processedCount = 0;
|
|
2893
|
+
let hasMore = true;
|
|
2894
|
+
while (hasMore) {
|
|
2895
|
+
const query = {
|
|
2896
|
+
expiredAt: null,
|
|
2897
|
+
_meiliIndex: false,
|
|
2898
|
+
};
|
|
2899
|
+
try {
|
|
2900
|
+
const documents = await this.find(query)
|
|
2901
|
+
.select(attributesToIndex.join(' ') + ' _meiliIndex')
|
|
2902
|
+
.limit(batchSize)
|
|
2903
|
+
.lean();
|
|
2904
|
+
// Check if there are more documents to process
|
|
2905
|
+
if (documents.length === 0) {
|
|
2906
|
+
logger.info('[syncWithMeili] No more documents to process');
|
|
2907
|
+
break;
|
|
2624
2908
|
}
|
|
2625
|
-
|
|
2626
|
-
|
|
2627
|
-
|
|
2628
|
-
|
|
2629
|
-
|
|
2630
|
-
|
|
2631
|
-
|
|
2632
|
-
|
|
2633
|
-
|
|
2634
|
-
|
|
2635
|
-
if (delayMs > 0) {
|
|
2636
|
-
await new Promise((resolve) => setTimeout(resolve, delayMs));
|
|
2637
|
-
}
|
|
2909
|
+
// Process the batch
|
|
2910
|
+
await this.processSyncBatch(index, documents);
|
|
2911
|
+
processedCount += documents.length;
|
|
2912
|
+
logger.info(`[syncWithMeili] Processed: ${processedCount}`);
|
|
2913
|
+
if (documents.length < batchSize) {
|
|
2914
|
+
hasMore = false;
|
|
2915
|
+
}
|
|
2916
|
+
// Add delay to prevent overwhelming resources
|
|
2917
|
+
if (hasMore && delayMs > 0) {
|
|
2918
|
+
await new Promise((resolve) => setTimeout(resolve, delayMs));
|
|
2638
2919
|
}
|
|
2639
2920
|
}
|
|
2640
|
-
|
|
2641
|
-
|
|
2642
|
-
|
|
2921
|
+
catch (error) {
|
|
2922
|
+
logger.error('[syncWithMeili] Error processing documents batch:', error);
|
|
2923
|
+
throw error;
|
|
2643
2924
|
}
|
|
2644
|
-
const duration = Date.now() - startTime;
|
|
2645
|
-
logger.info(`[syncWithMeili] Completed sync for ${primaryKey === 'messageId' ? 'messages' : 'conversations'} in ${duration}ms`);
|
|
2646
|
-
}
|
|
2647
|
-
catch (error) {
|
|
2648
|
-
logger.error('[syncWithMeili] Error during sync:', error);
|
|
2649
|
-
throw error;
|
|
2650
2925
|
}
|
|
2926
|
+
const duration = Date.now() - startTime;
|
|
2927
|
+
logger.info(`[syncWithMeili] Completed sync for ${collectionName}. Processed ${processedCount} documents in ${duration}ms`);
|
|
2651
2928
|
}
|
|
2652
2929
|
/**
|
|
2653
2930
|
* Process a batch of documents for syncing
|
|
2654
2931
|
*/
|
|
2655
|
-
static async processSyncBatch(index, documents
|
|
2932
|
+
static async processSyncBatch(index, documents) {
|
|
2656
2933
|
if (documents.length === 0) {
|
|
2657
2934
|
return;
|
|
2658
2935
|
}
|
|
2936
|
+
// Format documents for MeiliSearch
|
|
2937
|
+
const formattedDocs = documents.map((doc) => _.omitBy(_.pick(doc, attributesToIndex), (_v, k) => k.startsWith('$')));
|
|
2659
2938
|
try {
|
|
2660
2939
|
// Add documents to MeiliSearch
|
|
2661
|
-
await index.
|
|
2940
|
+
await index.addDocumentsInBatches(formattedDocs);
|
|
2662
2941
|
// Update MongoDB to mark documents as indexed
|
|
2663
|
-
|
|
2664
|
-
|
|
2665
|
-
}
|
|
2942
|
+
const docsIds = documents.map((doc) => doc._id);
|
|
2943
|
+
await this.updateMany({ _id: { $in: docsIds } }, { $set: { _meiliIndex: true } });
|
|
2666
2944
|
}
|
|
2667
2945
|
catch (error) {
|
|
2668
2946
|
logger.error('[processSyncBatch] Error processing batch:', error);
|
|
2669
|
-
|
|
2947
|
+
throw error;
|
|
2670
2948
|
}
|
|
2671
2949
|
}
|
|
2672
2950
|
/**
|
|
@@ -2691,7 +2969,7 @@ const createMeiliMongooseModel = ({ index, attributesToIndex, syncOptions, }) =>
|
|
|
2691
2969
|
// Delete documents that don't exist in MongoDB
|
|
2692
2970
|
const toDelete = meiliIds.filter((id) => !existingIds.has(id));
|
|
2693
2971
|
if (toDelete.length > 0) {
|
|
2694
|
-
await
|
|
2972
|
+
await index.deleteDocuments(toDelete.map(String));
|
|
2695
2973
|
logger.debug(`[cleanupMeiliIndex] Deleted ${toDelete.length} orphaned documents`);
|
|
2696
2974
|
}
|
|
2697
2975
|
offset += batchSize;
|
|
@@ -2718,7 +2996,7 @@ const createMeiliMongooseModel = ({ index, attributesToIndex, syncOptions, }) =>
|
|
|
2718
2996
|
const data = await index.search(q, params);
|
|
2719
2997
|
if (populate) {
|
|
2720
2998
|
const query = {};
|
|
2721
|
-
query[primaryKey] = _.map(data.hits, (hit) =>
|
|
2999
|
+
query[primaryKey] = _.map(data.hits, (hit) => hit[primaryKey]);
|
|
2722
3000
|
const projection = Object.keys(this.schema.obj).reduce((results, key) => {
|
|
2723
3001
|
if (!key.startsWith('$')) {
|
|
2724
3002
|
results[key] = 1;
|
|
@@ -2761,6 +3039,10 @@ const createMeiliMongooseModel = ({ index, attributesToIndex, syncOptions, }) =>
|
|
|
2761
3039
|
* Adds the current document to the MeiliSearch index with retry logic
|
|
2762
3040
|
*/
|
|
2763
3041
|
async addObjectToMeili(next) {
|
|
3042
|
+
// If this conversation or message has a TTL, don't index it
|
|
3043
|
+
if (!_.isNil(this.expiredAt)) {
|
|
3044
|
+
return next();
|
|
3045
|
+
}
|
|
2764
3046
|
const object = this.preprocessObjectForIndex();
|
|
2765
3047
|
const maxRetries = 3;
|
|
2766
3048
|
let retryCount = 0;
|
|
@@ -3075,7 +3357,38 @@ function createAgentModel(mongoose) {
|
|
|
3075
3357
|
* Creates or returns the AgentCategory model using the provided mongoose instance and schema
|
|
3076
3358
|
*/
|
|
3077
3359
|
function createAgentCategoryModel(mongoose) {
|
|
3078
|
-
return mongoose.models.AgentCategory ||
|
|
3360
|
+
return (mongoose.models.AgentCategory ||
|
|
3361
|
+
mongoose.model('AgentCategory', agentCategorySchema));
|
|
3362
|
+
}
|
|
3363
|
+
|
|
3364
|
+
const mcpServerSchema = new Schema({
|
|
3365
|
+
serverName: {
|
|
3366
|
+
type: String,
|
|
3367
|
+
index: true,
|
|
3368
|
+
unique: true,
|
|
3369
|
+
required: true,
|
|
3370
|
+
},
|
|
3371
|
+
config: {
|
|
3372
|
+
type: Schema.Types.Mixed,
|
|
3373
|
+
required: true,
|
|
3374
|
+
// Config contains: title, description, url, oauth, etc.
|
|
3375
|
+
},
|
|
3376
|
+
author: {
|
|
3377
|
+
type: Schema.Types.ObjectId,
|
|
3378
|
+
ref: 'User',
|
|
3379
|
+
required: true,
|
|
3380
|
+
index: true,
|
|
3381
|
+
},
|
|
3382
|
+
}, {
|
|
3383
|
+
timestamps: true,
|
|
3384
|
+
});
|
|
3385
|
+
mcpServerSchema.index({ updatedAt: -1, _id: 1 });
|
|
3386
|
+
|
|
3387
|
+
/**
|
|
3388
|
+
* Creates or returns the MCPServer model using the provided mongoose instance and schema
|
|
3389
|
+
*/
|
|
3390
|
+
function createMCPServerModel(mongoose) {
|
|
3391
|
+
return (mongoose.models.MCPServer || mongoose.model('MCPServer', mcpServerSchema));
|
|
3079
3392
|
}
|
|
3080
3393
|
|
|
3081
3394
|
/**
|
|
@@ -3203,7 +3516,7 @@ const accessRoleSchema = new Schema({
|
|
|
3203
3516
|
description: String,
|
|
3204
3517
|
resourceType: {
|
|
3205
3518
|
type: String,
|
|
3206
|
-
enum: ['agent', 'project', 'file', 'promptGroup'],
|
|
3519
|
+
enum: ['agent', 'project', 'file', 'promptGroup', 'mcpServer'],
|
|
3207
3520
|
required: true,
|
|
3208
3521
|
default: 'agent',
|
|
3209
3522
|
},
|
|
@@ -3304,6 +3617,7 @@ function createModels(mongoose) {
|
|
|
3304
3617
|
Message: createMessageModel(mongoose),
|
|
3305
3618
|
Agent: createAgentModel(mongoose),
|
|
3306
3619
|
AgentCategory: createAgentCategoryModel(mongoose),
|
|
3620
|
+
MCPServer: createMCPServerModel(mongoose),
|
|
3307
3621
|
Role: createRoleModel(mongoose),
|
|
3308
3622
|
Action: createActionModel(mongoose),
|
|
3309
3623
|
Assistant: createAssistantModel(mongoose),
|
|
@@ -3326,7 +3640,6 @@ function createModels(mongoose) {
|
|
|
3326
3640
|
};
|
|
3327
3641
|
}
|
|
3328
3642
|
|
|
3329
|
-
var _a;
|
|
3330
3643
|
class SessionError extends Error {
|
|
3331
3644
|
constructor(message, code = 'SESSION_ERROR') {
|
|
3332
3645
|
super(message);
|
|
@@ -3334,22 +3647,24 @@ class SessionError extends Error {
|
|
|
3334
3647
|
this.code = code;
|
|
3335
3648
|
}
|
|
3336
3649
|
}
|
|
3337
|
-
|
|
3338
|
-
const
|
|
3650
|
+
/** Default refresh token expiry: 7 days in milliseconds */
|
|
3651
|
+
const DEFAULT_REFRESH_TOKEN_EXPIRY = 1000 * 60 * 60 * 24 * 7;
|
|
3339
3652
|
// Factory function that takes mongoose instance and returns the methods
|
|
3340
3653
|
function createSessionMethods(mongoose) {
|
|
3341
3654
|
/**
|
|
3342
3655
|
* Creates a new session for a user
|
|
3343
3656
|
*/
|
|
3344
3657
|
async function createSession(userId, options = {}) {
|
|
3658
|
+
var _a;
|
|
3345
3659
|
if (!userId) {
|
|
3346
3660
|
throw new SessionError('User ID is required', 'INVALID_USER_ID');
|
|
3347
3661
|
}
|
|
3662
|
+
const expiresIn = (_a = options.expiresIn) !== null && _a !== void 0 ? _a : DEFAULT_REFRESH_TOKEN_EXPIRY;
|
|
3348
3663
|
try {
|
|
3349
3664
|
const Session = mongoose.models.Session;
|
|
3350
3665
|
const currentSession = new Session({
|
|
3351
3666
|
user: userId,
|
|
3352
|
-
expiration: options.expiration || new Date(Date.now() +
|
|
3667
|
+
expiration: options.expiration || new Date(Date.now() + expiresIn),
|
|
3353
3668
|
});
|
|
3354
3669
|
const refreshToken = await generateRefreshToken(currentSession);
|
|
3355
3670
|
return { session: currentSession, refreshToken };
|
|
@@ -3403,14 +3718,16 @@ function createSessionMethods(mongoose) {
|
|
|
3403
3718
|
/**
|
|
3404
3719
|
* Updates session expiration
|
|
3405
3720
|
*/
|
|
3406
|
-
async function updateExpiration(session, newExpiration) {
|
|
3721
|
+
async function updateExpiration(session, newExpiration, options = {}) {
|
|
3722
|
+
var _a;
|
|
3723
|
+
const expiresIn = (_a = options.expiresIn) !== null && _a !== void 0 ? _a : DEFAULT_REFRESH_TOKEN_EXPIRY;
|
|
3407
3724
|
try {
|
|
3408
3725
|
const Session = mongoose.models.Session;
|
|
3409
3726
|
const sessionDoc = typeof session === 'string' ? await Session.findById(session) : session;
|
|
3410
3727
|
if (!sessionDoc) {
|
|
3411
3728
|
throw new SessionError('Session not found', 'SESSION_NOT_FOUND');
|
|
3412
3729
|
}
|
|
3413
|
-
sessionDoc.expiration = newExpiration || new Date(Date.now() +
|
|
3730
|
+
sessionDoc.expiration = newExpiration || new Date(Date.now() + expiresIn);
|
|
3414
3731
|
return await sessionDoc.save();
|
|
3415
3732
|
}
|
|
3416
3733
|
catch (error) {
|
|
@@ -3481,7 +3798,9 @@ function createSessionMethods(mongoose) {
|
|
|
3481
3798
|
throw new SessionError('Invalid session object', 'INVALID_SESSION');
|
|
3482
3799
|
}
|
|
3483
3800
|
try {
|
|
3484
|
-
const expiresIn = session.expiration
|
|
3801
|
+
const expiresIn = session.expiration
|
|
3802
|
+
? session.expiration.getTime()
|
|
3803
|
+
: Date.now() + DEFAULT_REFRESH_TOKEN_EXPIRY;
|
|
3485
3804
|
if (!session.expiration) {
|
|
3486
3805
|
session.expiration = new Date(expiresIn);
|
|
3487
3806
|
}
|
|
@@ -3687,6 +4006,8 @@ function createRoleMethods(mongoose) {
|
|
|
3687
4006
|
};
|
|
3688
4007
|
}
|
|
3689
4008
|
|
|
4009
|
+
/** Default JWT session expiry: 15 minutes in milliseconds */
|
|
4010
|
+
const DEFAULT_SESSION_EXPIRY = 1000 * 60 * 15;
|
|
3690
4011
|
/** Factory function that takes mongoose instance and returns the methods */
|
|
3691
4012
|
function createUserMethods(mongoose) {
|
|
3692
4013
|
/**
|
|
@@ -3812,23 +4133,14 @@ function createUserMethods(mongoose) {
|
|
|
3812
4133
|
}
|
|
3813
4134
|
/**
|
|
3814
4135
|
* Generates a JWT token for a given user.
|
|
4136
|
+
* @param user - The user object
|
|
4137
|
+
* @param expiresIn - Optional expiry time in milliseconds. Default: 15 minutes
|
|
3815
4138
|
*/
|
|
3816
|
-
async function generateToken(user) {
|
|
4139
|
+
async function generateToken(user, expiresIn) {
|
|
3817
4140
|
if (!user) {
|
|
3818
4141
|
throw new Error('No user provided');
|
|
3819
4142
|
}
|
|
3820
|
-
|
|
3821
|
-
if (process.env.SESSION_EXPIRY !== undefined && process.env.SESSION_EXPIRY !== '') {
|
|
3822
|
-
try {
|
|
3823
|
-
const evaluated = eval(process.env.SESSION_EXPIRY);
|
|
3824
|
-
if (evaluated) {
|
|
3825
|
-
expires = evaluated;
|
|
3826
|
-
}
|
|
3827
|
-
}
|
|
3828
|
-
catch (error) {
|
|
3829
|
-
console.warn('Invalid SESSION_EXPIRY expression, using default:', error);
|
|
3830
|
-
}
|
|
3831
|
-
}
|
|
4143
|
+
const expires = expiresIn !== null && expiresIn !== void 0 ? expiresIn : DEFAULT_SESSION_EXPIRY;
|
|
3832
4144
|
return await signPayload({
|
|
3833
4145
|
payload: {
|
|
3834
4146
|
id: user._id,
|
|
@@ -3922,6 +4234,26 @@ function createUserMethods(mongoose) {
|
|
|
3922
4234
|
return userWithoutScore;
|
|
3923
4235
|
});
|
|
3924
4236
|
};
|
|
4237
|
+
/**
|
|
4238
|
+
* Updates the plugins for a user based on the action specified (install/uninstall).
|
|
4239
|
+
* @param userId - The user ID whose plugins are to be updated
|
|
4240
|
+
* @param plugins - The current plugins array
|
|
4241
|
+
* @param pluginKey - The key of the plugin to install or uninstall
|
|
4242
|
+
* @param action - The action to perform, 'install' or 'uninstall'
|
|
4243
|
+
* @returns The result of the update operation or null if action is invalid
|
|
4244
|
+
*/
|
|
4245
|
+
async function updateUserPlugins(userId, plugins, pluginKey, action) {
|
|
4246
|
+
const userPlugins = plugins !== null && plugins !== void 0 ? plugins : [];
|
|
4247
|
+
if (action === 'install') {
|
|
4248
|
+
return updateUser(userId, { plugins: [...userPlugins, pluginKey] });
|
|
4249
|
+
}
|
|
4250
|
+
if (action === 'uninstall') {
|
|
4251
|
+
return updateUser(userId, {
|
|
4252
|
+
plugins: userPlugins.filter((plugin) => plugin !== pluginKey),
|
|
4253
|
+
});
|
|
4254
|
+
}
|
|
4255
|
+
return null;
|
|
4256
|
+
}
|
|
3925
4257
|
return {
|
|
3926
4258
|
findUser,
|
|
3927
4259
|
countUsers,
|
|
@@ -3931,10 +4263,356 @@ function createUserMethods(mongoose) {
|
|
|
3931
4263
|
getUserById,
|
|
3932
4264
|
generateToken,
|
|
3933
4265
|
deleteUserById,
|
|
4266
|
+
updateUserPlugins,
|
|
3934
4267
|
toggleUserMemories,
|
|
3935
4268
|
};
|
|
3936
4269
|
}
|
|
3937
4270
|
|
|
4271
|
+
/** Factory function that takes mongoose instance and returns the key methods */
|
|
4272
|
+
function createKeyMethods(mongoose) {
|
|
4273
|
+
/**
|
|
4274
|
+
* Retrieves and decrypts the key value for a given user identified by userId and identifier name.
|
|
4275
|
+
* @param params - The parameters object
|
|
4276
|
+
* @param params.userId - The unique identifier for the user
|
|
4277
|
+
* @param params.name - The name associated with the key
|
|
4278
|
+
* @returns The decrypted key value
|
|
4279
|
+
* @throws Error if the key is not found or if there is a problem during key retrieval
|
|
4280
|
+
* @description This function searches for a user's key in the database using their userId and name.
|
|
4281
|
+
* If found, it decrypts the value of the key and returns it. If no key is found, it throws
|
|
4282
|
+
* an error indicating that there is no user key available.
|
|
4283
|
+
*/
|
|
4284
|
+
async function getUserKey(params) {
|
|
4285
|
+
const { userId, name } = params;
|
|
4286
|
+
const Key = mongoose.models.Key;
|
|
4287
|
+
const keyValue = (await Key.findOne({ userId, name }).lean());
|
|
4288
|
+
if (!keyValue) {
|
|
4289
|
+
throw new Error(JSON.stringify({
|
|
4290
|
+
type: ErrorTypes.NO_USER_KEY,
|
|
4291
|
+
}));
|
|
4292
|
+
}
|
|
4293
|
+
return await decrypt(keyValue.value);
|
|
4294
|
+
}
|
|
4295
|
+
/**
|
|
4296
|
+
* Retrieves, decrypts, and parses the key values for a given user identified by userId and name.
|
|
4297
|
+
* @param params - The parameters object
|
|
4298
|
+
* @param params.userId - The unique identifier for the user
|
|
4299
|
+
* @param params.name - The name associated with the key
|
|
4300
|
+
* @returns The decrypted and parsed key values
|
|
4301
|
+
* @throws Error if the key is invalid or if there is a problem during key value parsing
|
|
4302
|
+
* @description This function retrieves a user's encrypted key using their userId and name, decrypts it,
|
|
4303
|
+
* and then attempts to parse the decrypted string into a JSON object. If the parsing fails,
|
|
4304
|
+
* it throws an error indicating that the user key is invalid.
|
|
4305
|
+
*/
|
|
4306
|
+
async function getUserKeyValues(params) {
|
|
4307
|
+
const { userId, name } = params;
|
|
4308
|
+
const userValues = await getUserKey({ userId, name });
|
|
4309
|
+
try {
|
|
4310
|
+
return JSON.parse(userValues);
|
|
4311
|
+
}
|
|
4312
|
+
catch (e) {
|
|
4313
|
+
logger$1.error('[getUserKeyValues]', e);
|
|
4314
|
+
throw new Error(JSON.stringify({
|
|
4315
|
+
type: ErrorTypes.INVALID_USER_KEY,
|
|
4316
|
+
}));
|
|
4317
|
+
}
|
|
4318
|
+
}
|
|
4319
|
+
/**
|
|
4320
|
+
* Retrieves the expiry information of a user's key identified by userId and name.
|
|
4321
|
+
* @param params - The parameters object
|
|
4322
|
+
* @param params.userId - The unique identifier for the user
|
|
4323
|
+
* @param params.name - The name associated with the key
|
|
4324
|
+
* @returns The expiry date of the key or null if the key doesn't exist
|
|
4325
|
+
* @description This function fetches a user's key from the database using their userId and name and
|
|
4326
|
+
* returns its expiry date. If the key is not found, it returns null for the expiry date.
|
|
4327
|
+
*/
|
|
4328
|
+
async function getUserKeyExpiry(params) {
|
|
4329
|
+
const { userId, name } = params;
|
|
4330
|
+
const Key = mongoose.models.Key;
|
|
4331
|
+
const keyValue = (await Key.findOne({ userId, name }).lean());
|
|
4332
|
+
if (!keyValue) {
|
|
4333
|
+
return { expiresAt: null };
|
|
4334
|
+
}
|
|
4335
|
+
return { expiresAt: keyValue.expiresAt || 'never' };
|
|
4336
|
+
}
|
|
4337
|
+
/**
|
|
4338
|
+
* Updates or inserts a new key for a given user identified by userId and name, with a specified value and expiry date.
|
|
4339
|
+
* @param params - The parameters object
|
|
4340
|
+
* @param params.userId - The unique identifier for the user
|
|
4341
|
+
* @param params.name - The name associated with the key
|
|
4342
|
+
* @param params.value - The value to be encrypted and stored as the key's value
|
|
4343
|
+
* @param params.expiresAt - The expiry date for the key [optional]
|
|
4344
|
+
* @returns The updated or newly inserted key document
|
|
4345
|
+
* @description This function either updates an existing user key or inserts a new one into the database,
|
|
4346
|
+
* after encrypting the provided value. It sets the provided expiry date for the key (or unsets for no expiry).
|
|
4347
|
+
*/
|
|
4348
|
+
async function updateUserKey(params) {
|
|
4349
|
+
const { userId, name, value, expiresAt = null } = params;
|
|
4350
|
+
const Key = mongoose.models.Key;
|
|
4351
|
+
const encryptedValue = await encrypt(value);
|
|
4352
|
+
const updateObject = {
|
|
4353
|
+
userId,
|
|
4354
|
+
name,
|
|
4355
|
+
value: encryptedValue,
|
|
4356
|
+
};
|
|
4357
|
+
const updateQuery = {
|
|
4358
|
+
$set: updateObject,
|
|
4359
|
+
};
|
|
4360
|
+
if (expiresAt) {
|
|
4361
|
+
updateObject.expiresAt = new Date(expiresAt);
|
|
4362
|
+
}
|
|
4363
|
+
else {
|
|
4364
|
+
updateQuery.$unset = { expiresAt: '' };
|
|
4365
|
+
}
|
|
4366
|
+
return await Key.findOneAndUpdate({ userId, name }, updateQuery, {
|
|
4367
|
+
upsert: true,
|
|
4368
|
+
new: true,
|
|
4369
|
+
}).lean();
|
|
4370
|
+
}
|
|
4371
|
+
/**
|
|
4372
|
+
* Deletes a key or all keys for a given user identified by userId, optionally based on a specified name.
|
|
4373
|
+
* @param params - The parameters object
|
|
4374
|
+
* @param params.userId - The unique identifier for the user
|
|
4375
|
+
* @param params.name - The name associated with the key to delete. If not provided and all is true, deletes all keys
|
|
4376
|
+
* @param params.all - Whether to delete all keys for the user
|
|
4377
|
+
* @returns The result of the deletion operation
|
|
4378
|
+
* @description This function deletes a specific key or all keys for a user from the database.
|
|
4379
|
+
* If a name is provided and all is false, it deletes only the key with that name.
|
|
4380
|
+
* If all is true, it ignores the name and deletes all keys for the user.
|
|
4381
|
+
*/
|
|
4382
|
+
async function deleteUserKey(params) {
|
|
4383
|
+
const { userId, name, all = false } = params;
|
|
4384
|
+
const Key = mongoose.models.Key;
|
|
4385
|
+
if (all) {
|
|
4386
|
+
return await Key.deleteMany({ userId });
|
|
4387
|
+
}
|
|
4388
|
+
return await Key.findOneAndDelete({ userId, name }).lean();
|
|
4389
|
+
}
|
|
4390
|
+
return {
|
|
4391
|
+
getUserKey,
|
|
4392
|
+
updateUserKey,
|
|
4393
|
+
deleteUserKey,
|
|
4394
|
+
getUserKeyValues,
|
|
4395
|
+
getUserKeyExpiry,
|
|
4396
|
+
};
|
|
4397
|
+
}
|
|
4398
|
+
|
|
4399
|
+
/** Factory function that takes mongoose instance and returns the file methods */
|
|
4400
|
+
function createFileMethods(mongoose) {
|
|
4401
|
+
/**
|
|
4402
|
+
* Finds a file by its file_id with additional query options.
|
|
4403
|
+
* @param file_id - The unique identifier of the file
|
|
4404
|
+
* @param options - Query options for filtering, projection, etc.
|
|
4405
|
+
* @returns A promise that resolves to the file document or null
|
|
4406
|
+
*/
|
|
4407
|
+
async function findFileById(file_id, options = {}) {
|
|
4408
|
+
const File = mongoose.models.File;
|
|
4409
|
+
return File.findOne({ file_id, ...options }).lean();
|
|
4410
|
+
}
|
|
4411
|
+
/**
|
|
4412
|
+
* Retrieves files matching a given filter, sorted by the most recently updated.
|
|
4413
|
+
* @param filter - The filter criteria to apply
|
|
4414
|
+
* @param _sortOptions - Optional sort parameters
|
|
4415
|
+
* @param selectFields - Fields to include/exclude in the query results. Default excludes the 'text' field
|
|
4416
|
+
* @param options - Additional query options (userId, agentId for ACL)
|
|
4417
|
+
* @returns A promise that resolves to an array of file documents
|
|
4418
|
+
*/
|
|
4419
|
+
async function getFiles(filter, _sortOptions, selectFields) {
|
|
4420
|
+
const File = mongoose.models.File;
|
|
4421
|
+
const sortOptions = { updatedAt: -1, ..._sortOptions };
|
|
4422
|
+
const query = File.find(filter);
|
|
4423
|
+
if (selectFields != null) {
|
|
4424
|
+
query.select(selectFields);
|
|
4425
|
+
}
|
|
4426
|
+
else {
|
|
4427
|
+
query.select({ text: 0 });
|
|
4428
|
+
}
|
|
4429
|
+
return await query.sort(sortOptions).lean();
|
|
4430
|
+
}
|
|
4431
|
+
/**
|
|
4432
|
+
* Retrieves tool files (files that are embedded or have a fileIdentifier) from an array of file IDs
|
|
4433
|
+
* @param fileIds - Array of file_id strings to search for
|
|
4434
|
+
* @param toolResourceSet - Optional filter for tool resources
|
|
4435
|
+
* @returns Files that match the criteria
|
|
4436
|
+
*/
|
|
4437
|
+
async function getToolFilesByIds(fileIds, toolResourceSet) {
|
|
4438
|
+
var _a, _b, _c;
|
|
4439
|
+
if (!fileIds || !fileIds.length || !(toolResourceSet === null || toolResourceSet === void 0 ? void 0 : toolResourceSet.size)) {
|
|
4440
|
+
return [];
|
|
4441
|
+
}
|
|
4442
|
+
try {
|
|
4443
|
+
const filter = {
|
|
4444
|
+
file_id: { $in: fileIds },
|
|
4445
|
+
$or: [],
|
|
4446
|
+
};
|
|
4447
|
+
if (toolResourceSet.has(EToolResources.context)) {
|
|
4448
|
+
(_a = filter.$or) === null || _a === void 0 ? void 0 : _a.push({ text: { $exists: true, $ne: null }, context: FileContext.agents });
|
|
4449
|
+
}
|
|
4450
|
+
if (toolResourceSet.has(EToolResources.file_search)) {
|
|
4451
|
+
(_b = filter.$or) === null || _b === void 0 ? void 0 : _b.push({ embedded: true });
|
|
4452
|
+
}
|
|
4453
|
+
if (toolResourceSet.has(EToolResources.execute_code)) {
|
|
4454
|
+
(_c = filter.$or) === null || _c === void 0 ? void 0 : _c.push({ 'metadata.fileIdentifier': { $exists: true } });
|
|
4455
|
+
}
|
|
4456
|
+
const selectFields = { text: 0 };
|
|
4457
|
+
const sortOptions = { updatedAt: -1 };
|
|
4458
|
+
const results = await getFiles(filter, sortOptions, selectFields);
|
|
4459
|
+
return results !== null && results !== void 0 ? results : [];
|
|
4460
|
+
}
|
|
4461
|
+
catch (error) {
|
|
4462
|
+
logger$1.error('[getToolFilesByIds] Error retrieving tool files:', error);
|
|
4463
|
+
throw new Error('Error retrieving tool files');
|
|
4464
|
+
}
|
|
4465
|
+
}
|
|
4466
|
+
/**
|
|
4467
|
+
* Creates a new file with a TTL of 1 hour.
|
|
4468
|
+
* @param data - The file data to be created, must contain file_id
|
|
4469
|
+
* @param disableTTL - Whether to disable the TTL
|
|
4470
|
+
* @returns A promise that resolves to the created file document
|
|
4471
|
+
*/
|
|
4472
|
+
async function createFile(data, disableTTL) {
|
|
4473
|
+
const File = mongoose.models.File;
|
|
4474
|
+
const fileData = {
|
|
4475
|
+
...data,
|
|
4476
|
+
expiresAt: new Date(Date.now() + 3600 * 1000),
|
|
4477
|
+
};
|
|
4478
|
+
if (disableTTL) {
|
|
4479
|
+
delete fileData.expiresAt;
|
|
4480
|
+
}
|
|
4481
|
+
return File.findOneAndUpdate({ file_id: data.file_id }, fileData, {
|
|
4482
|
+
new: true,
|
|
4483
|
+
upsert: true,
|
|
4484
|
+
}).lean();
|
|
4485
|
+
}
|
|
4486
|
+
/**
|
|
4487
|
+
* Updates a file identified by file_id with new data and removes the TTL.
|
|
4488
|
+
* @param data - The data to update, must contain file_id
|
|
4489
|
+
* @returns A promise that resolves to the updated file document
|
|
4490
|
+
*/
|
|
4491
|
+
async function updateFile(data) {
|
|
4492
|
+
const File = mongoose.models.File;
|
|
4493
|
+
const { file_id, ...update } = data;
|
|
4494
|
+
const updateOperation = {
|
|
4495
|
+
$set: update,
|
|
4496
|
+
$unset: { expiresAt: '' },
|
|
4497
|
+
};
|
|
4498
|
+
return File.findOneAndUpdate({ file_id }, updateOperation, {
|
|
4499
|
+
new: true,
|
|
4500
|
+
}).lean();
|
|
4501
|
+
}
|
|
4502
|
+
/**
|
|
4503
|
+
* Increments the usage of a file identified by file_id.
|
|
4504
|
+
* @param data - The data to update, must contain file_id and the increment value for usage
|
|
4505
|
+
* @returns A promise that resolves to the updated file document
|
|
4506
|
+
*/
|
|
4507
|
+
async function updateFileUsage(data) {
|
|
4508
|
+
const File = mongoose.models.File;
|
|
4509
|
+
const { file_id, inc = 1 } = data;
|
|
4510
|
+
const updateOperation = {
|
|
4511
|
+
$inc: { usage: inc },
|
|
4512
|
+
$unset: { expiresAt: '', temp_file_id: '' },
|
|
4513
|
+
};
|
|
4514
|
+
return File.findOneAndUpdate({ file_id }, updateOperation, {
|
|
4515
|
+
new: true,
|
|
4516
|
+
}).lean();
|
|
4517
|
+
}
|
|
4518
|
+
/**
|
|
4519
|
+
* Deletes a file identified by file_id.
|
|
4520
|
+
* @param file_id - The unique identifier of the file to delete
|
|
4521
|
+
* @returns A promise that resolves to the deleted file document or null
|
|
4522
|
+
*/
|
|
4523
|
+
async function deleteFile(file_id) {
|
|
4524
|
+
const File = mongoose.models.File;
|
|
4525
|
+
return File.findOneAndDelete({ file_id }).lean();
|
|
4526
|
+
}
|
|
4527
|
+
/**
|
|
4528
|
+
* Deletes a file identified by a filter.
|
|
4529
|
+
* @param filter - The filter criteria to apply
|
|
4530
|
+
* @returns A promise that resolves to the deleted file document or null
|
|
4531
|
+
*/
|
|
4532
|
+
async function deleteFileByFilter(filter) {
|
|
4533
|
+
const File = mongoose.models.File;
|
|
4534
|
+
return File.findOneAndDelete(filter).lean();
|
|
4535
|
+
}
|
|
4536
|
+
/**
|
|
4537
|
+
* Deletes multiple files identified by an array of file_ids.
|
|
4538
|
+
* @param file_ids - The unique identifiers of the files to delete
|
|
4539
|
+
* @param user - Optional user ID to filter by
|
|
4540
|
+
* @returns A promise that resolves to the result of the deletion operation
|
|
4541
|
+
*/
|
|
4542
|
+
async function deleteFiles(file_ids, user) {
|
|
4543
|
+
const File = mongoose.models.File;
|
|
4544
|
+
let deleteQuery = { file_id: { $in: file_ids } };
|
|
4545
|
+
if (user) {
|
|
4546
|
+
deleteQuery = { user: user };
|
|
4547
|
+
}
|
|
4548
|
+
return File.deleteMany(deleteQuery);
|
|
4549
|
+
}
|
|
4550
|
+
/**
|
|
4551
|
+
* Batch updates files with new signed URLs in MongoDB
|
|
4552
|
+
* @param updates - Array of updates in the format { file_id, filepath }
|
|
4553
|
+
*/
|
|
4554
|
+
async function batchUpdateFiles(updates) {
|
|
4555
|
+
if (!updates || updates.length === 0) {
|
|
4556
|
+
return;
|
|
4557
|
+
}
|
|
4558
|
+
const File = mongoose.models.File;
|
|
4559
|
+
const bulkOperations = updates.map((update) => ({
|
|
4560
|
+
updateOne: {
|
|
4561
|
+
filter: { file_id: update.file_id },
|
|
4562
|
+
update: { $set: { filepath: update.filepath } },
|
|
4563
|
+
},
|
|
4564
|
+
}));
|
|
4565
|
+
const result = await File.bulkWrite(bulkOperations);
|
|
4566
|
+
logger$1.info(`Updated ${result.modifiedCount} files with new S3 URLs`);
|
|
4567
|
+
}
|
|
4568
|
+
/**
|
|
4569
|
+
* Updates usage tracking for multiple files.
|
|
4570
|
+
* Processes files and optional fileIds, updating their usage count in the database.
|
|
4571
|
+
*
|
|
4572
|
+
* @param files - Array of file objects to process
|
|
4573
|
+
* @param fileIds - Optional array of file IDs to process
|
|
4574
|
+
* @returns Array of updated file documents (with null results filtered out)
|
|
4575
|
+
*/
|
|
4576
|
+
async function updateFilesUsage(files, fileIds) {
|
|
4577
|
+
const promises = [];
|
|
4578
|
+
const seen = new Set();
|
|
4579
|
+
for (const file of files) {
|
|
4580
|
+
const { file_id } = file;
|
|
4581
|
+
if (seen.has(file_id)) {
|
|
4582
|
+
continue;
|
|
4583
|
+
}
|
|
4584
|
+
seen.add(file_id);
|
|
4585
|
+
promises.push(updateFileUsage({ file_id }));
|
|
4586
|
+
}
|
|
4587
|
+
if (!fileIds) {
|
|
4588
|
+
const results = await Promise.all(promises);
|
|
4589
|
+
return results.filter((result) => result != null);
|
|
4590
|
+
}
|
|
4591
|
+
for (const file_id of fileIds) {
|
|
4592
|
+
if (seen.has(file_id)) {
|
|
4593
|
+
continue;
|
|
4594
|
+
}
|
|
4595
|
+
seen.add(file_id);
|
|
4596
|
+
promises.push(updateFileUsage({ file_id }));
|
|
4597
|
+
}
|
|
4598
|
+
const results = await Promise.all(promises);
|
|
4599
|
+
return results.filter((result) => result != null);
|
|
4600
|
+
}
|
|
4601
|
+
return {
|
|
4602
|
+
findFileById,
|
|
4603
|
+
getFiles,
|
|
4604
|
+
getToolFilesByIds,
|
|
4605
|
+
createFile,
|
|
4606
|
+
updateFile,
|
|
4607
|
+
updateFileUsage,
|
|
4608
|
+
deleteFile,
|
|
4609
|
+
deleteFiles,
|
|
4610
|
+
deleteFileByFilter,
|
|
4611
|
+
batchUpdateFiles,
|
|
4612
|
+
updateFilesUsage,
|
|
4613
|
+
};
|
|
4614
|
+
}
|
|
4615
|
+
|
|
3938
4616
|
/**
|
|
3939
4617
|
* Formats a date in YYYY-MM-DD format
|
|
3940
4618
|
*/
|
|
@@ -4282,6 +4960,258 @@ function createAgentCategoryMethods(mongoose) {
|
|
|
4282
4960
|
};
|
|
4283
4961
|
}
|
|
4284
4962
|
|
|
4963
|
+
const NORMALIZED_LIMIT_DEFAULT = 20;
|
|
4964
|
+
const MAX_CREATE_RETRIES = 5;
|
|
4965
|
+
const RETRY_BASE_DELAY_MS = 25;
|
|
4966
|
+
/**
|
|
4967
|
+
* Helper to check if an error is a MongoDB duplicate key error.
|
|
4968
|
+
* Since serverName is the only unique index on MCPServer, any E11000 error
|
|
4969
|
+
* during creation is necessarily a serverName collision.
|
|
4970
|
+
*/
|
|
4971
|
+
function isDuplicateKeyError(error) {
|
|
4972
|
+
if (error && typeof error === 'object' && 'code' in error) {
|
|
4973
|
+
const mongoError = error;
|
|
4974
|
+
return mongoError.code === 11000;
|
|
4975
|
+
}
|
|
4976
|
+
return false;
|
|
4977
|
+
}
|
|
4978
|
+
/**
|
|
4979
|
+
* Escapes special regex characters in a string so they are treated literally.
|
|
4980
|
+
*/
|
|
4981
|
+
function escapeRegex(str) {
|
|
4982
|
+
return str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
4983
|
+
}
|
|
4984
|
+
/**
|
|
4985
|
+
* Generates a URL-friendly server name from a title.
|
|
4986
|
+
* Converts to lowercase, replaces spaces with hyphens, removes special characters.
|
|
4987
|
+
*/
|
|
4988
|
+
function generateServerNameFromTitle(title) {
|
|
4989
|
+
const slug = title
|
|
4990
|
+
.toLowerCase()
|
|
4991
|
+
.trim()
|
|
4992
|
+
.replace(/[^a-z0-9\s-]/g, '') // Remove special chars except spaces and hyphens
|
|
4993
|
+
.replace(/\s+/g, '-') // Replace spaces with hyphens
|
|
4994
|
+
.replace(/-+/g, '-') // Remove consecutive hyphens
|
|
4995
|
+
.replace(/^-|-$/g, ''); // Trim leading/trailing hyphens
|
|
4996
|
+
return slug || 'mcp-server'; // Fallback if empty
|
|
4997
|
+
}
|
|
4998
|
+
function createMCPServerMethods(mongoose) {
|
|
4999
|
+
/**
|
|
5000
|
+
* Finds the next available server name by checking for duplicates.
|
|
5001
|
+
* If baseName exists, returns baseName-2, baseName-3, etc.
|
|
5002
|
+
*/
|
|
5003
|
+
async function findNextAvailableServerName(baseName) {
|
|
5004
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5005
|
+
// Find all servers with matching base name pattern (baseName or baseName-N)
|
|
5006
|
+
const escapedBaseName = escapeRegex(baseName);
|
|
5007
|
+
const existing = await MCPServer.find({
|
|
5008
|
+
serverName: { $regex: `^${escapedBaseName}(-\\d+)?$` },
|
|
5009
|
+
})
|
|
5010
|
+
.select('serverName')
|
|
5011
|
+
.lean();
|
|
5012
|
+
if (existing.length === 0) {
|
|
5013
|
+
return baseName;
|
|
5014
|
+
}
|
|
5015
|
+
// Extract numbers from existing names
|
|
5016
|
+
const numbers = existing.map((s) => {
|
|
5017
|
+
const match = s.serverName.match(/-(\d+)$/);
|
|
5018
|
+
return match ? parseInt(match[1], 10) : 1;
|
|
5019
|
+
});
|
|
5020
|
+
const maxNumber = Math.max(...numbers);
|
|
5021
|
+
return `${baseName}-${maxNumber + 1}`;
|
|
5022
|
+
}
|
|
5023
|
+
/**
|
|
5024
|
+
* Create a new MCP server with retry logic for handling race conditions.
|
|
5025
|
+
* When multiple requests try to create servers with the same title simultaneously,
|
|
5026
|
+
* they may get the same serverName from findNextAvailableServerName() before any
|
|
5027
|
+
* creates the record (TOCTOU race condition). This is handled by retrying with
|
|
5028
|
+
* exponential backoff when a duplicate key error occurs.
|
|
5029
|
+
* @param data - Object containing config (with title, description, url, etc.) and author
|
|
5030
|
+
* @returns The created MCP server document
|
|
5031
|
+
*/
|
|
5032
|
+
async function createMCPServer(data) {
|
|
5033
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5034
|
+
let lastError;
|
|
5035
|
+
for (let attempt = 0; attempt < MAX_CREATE_RETRIES; attempt++) {
|
|
5036
|
+
try {
|
|
5037
|
+
// Generate serverName from title, with fallback to nanoid if no title
|
|
5038
|
+
// Important: regenerate on each attempt to get fresh available name
|
|
5039
|
+
let serverName;
|
|
5040
|
+
if (data.config.title) {
|
|
5041
|
+
const baseSlug = generateServerNameFromTitle(data.config.title);
|
|
5042
|
+
serverName = await findNextAvailableServerName(baseSlug);
|
|
5043
|
+
}
|
|
5044
|
+
else {
|
|
5045
|
+
serverName = `mcp-${nanoid(16)}`;
|
|
5046
|
+
}
|
|
5047
|
+
const newServer = await MCPServer.create({
|
|
5048
|
+
serverName,
|
|
5049
|
+
config: data.config,
|
|
5050
|
+
author: data.author,
|
|
5051
|
+
});
|
|
5052
|
+
return newServer.toObject();
|
|
5053
|
+
}
|
|
5054
|
+
catch (error) {
|
|
5055
|
+
lastError = error;
|
|
5056
|
+
// Only retry on duplicate key errors (serverName collision)
|
|
5057
|
+
if (isDuplicateKeyError(error) && attempt < MAX_CREATE_RETRIES - 1) {
|
|
5058
|
+
// Exponential backoff: 10ms, 20ms, 40ms
|
|
5059
|
+
const delay = RETRY_BASE_DELAY_MS * Math.pow(2, attempt);
|
|
5060
|
+
logger$1.debug(`[createMCPServer] Duplicate serverName detected, retrying (attempt ${attempt + 2}/${MAX_CREATE_RETRIES}) after ${delay}ms`);
|
|
5061
|
+
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
5062
|
+
continue;
|
|
5063
|
+
}
|
|
5064
|
+
// Not a duplicate key error or out of retries - throw immediately
|
|
5065
|
+
throw error;
|
|
5066
|
+
}
|
|
5067
|
+
}
|
|
5068
|
+
// Should not reach here, but TypeScript requires a return
|
|
5069
|
+
throw lastError;
|
|
5070
|
+
}
|
|
5071
|
+
/**
|
|
5072
|
+
* Find an MCP server by serverName
|
|
5073
|
+
* @param serverName - The unique server name identifier
|
|
5074
|
+
* @returns The MCP server document or null
|
|
5075
|
+
*/
|
|
5076
|
+
async function findMCPServerByServerName(serverName) {
|
|
5077
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5078
|
+
return await MCPServer.findOne({ serverName }).lean();
|
|
5079
|
+
}
|
|
5080
|
+
/**
|
|
5081
|
+
* Find an MCP server by MongoDB ObjectId
|
|
5082
|
+
* @param _id - The MongoDB ObjectId
|
|
5083
|
+
* @returns The MCP server document or null
|
|
5084
|
+
*/
|
|
5085
|
+
async function findMCPServerByObjectId(_id) {
|
|
5086
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5087
|
+
return await MCPServer.findById(_id).lean();
|
|
5088
|
+
}
|
|
5089
|
+
/**
|
|
5090
|
+
* Find MCP servers by author
|
|
5091
|
+
* @param authorId - The author's ObjectId or string
|
|
5092
|
+
* @returns Array of MCP server documents
|
|
5093
|
+
*/
|
|
5094
|
+
async function findMCPServersByAuthor(authorId) {
|
|
5095
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5096
|
+
return await MCPServer.find({ author: authorId }).sort({ updatedAt: -1 }).lean();
|
|
5097
|
+
}
|
|
5098
|
+
/**
|
|
5099
|
+
* Get a paginated list of MCP servers by IDs with filtering and search
|
|
5100
|
+
* @param ids - Array of ObjectIds to include
|
|
5101
|
+
* @param otherParams - Additional filter parameters (e.g., search)
|
|
5102
|
+
* @param limit - Page size limit (null for no pagination)
|
|
5103
|
+
* @param after - Cursor for pagination
|
|
5104
|
+
* @returns Paginated list of MCP servers
|
|
5105
|
+
*/
|
|
5106
|
+
async function getListMCPServersByIds({ ids = [], otherParams = {}, limit = null, after = null, }) {
|
|
5107
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5108
|
+
const isPaginated = limit !== null && limit !== undefined;
|
|
5109
|
+
const normalizedLimit = isPaginated
|
|
5110
|
+
? Math.min(Math.max(1, parseInt(String(limit)) || NORMALIZED_LIMIT_DEFAULT), 100)
|
|
5111
|
+
: null;
|
|
5112
|
+
// Build base query combining accessible servers with other filters
|
|
5113
|
+
const baseQuery = { ...otherParams, _id: { $in: ids } };
|
|
5114
|
+
// Add cursor condition
|
|
5115
|
+
if (after) {
|
|
5116
|
+
try {
|
|
5117
|
+
const cursor = JSON.parse(Buffer.from(after, 'base64').toString('utf8'));
|
|
5118
|
+
const { updatedAt, _id } = cursor;
|
|
5119
|
+
const cursorCondition = {
|
|
5120
|
+
$or: [
|
|
5121
|
+
{ updatedAt: { $lt: new Date(updatedAt) } },
|
|
5122
|
+
{ updatedAt: new Date(updatedAt), _id: { $gt: new mongoose.Types.ObjectId(_id) } },
|
|
5123
|
+
],
|
|
5124
|
+
};
|
|
5125
|
+
// Merge cursor condition with base query
|
|
5126
|
+
if (Object.keys(baseQuery).length > 0) {
|
|
5127
|
+
baseQuery.$and = [{ ...baseQuery }, cursorCondition];
|
|
5128
|
+
// Remove the original conditions from baseQuery to avoid duplication
|
|
5129
|
+
Object.keys(baseQuery).forEach((key) => {
|
|
5130
|
+
if (key !== '$and') {
|
|
5131
|
+
delete baseQuery[key];
|
|
5132
|
+
}
|
|
5133
|
+
});
|
|
5134
|
+
}
|
|
5135
|
+
}
|
|
5136
|
+
catch (error) {
|
|
5137
|
+
// Invalid cursor, ignore
|
|
5138
|
+
logger$1.warn('[getListMCPServersByIds] Invalid cursor provided', error);
|
|
5139
|
+
}
|
|
5140
|
+
}
|
|
5141
|
+
if (normalizedLimit === null) {
|
|
5142
|
+
// No pagination - return all matching servers
|
|
5143
|
+
const servers = await MCPServer.find(baseQuery).sort({ updatedAt: -1, _id: 1 }).lean();
|
|
5144
|
+
return {
|
|
5145
|
+
data: servers,
|
|
5146
|
+
has_more: false,
|
|
5147
|
+
after: null,
|
|
5148
|
+
};
|
|
5149
|
+
}
|
|
5150
|
+
// Paginated query - assign to const to help TypeScript
|
|
5151
|
+
const servers = await MCPServer.find(baseQuery)
|
|
5152
|
+
.sort({ updatedAt: -1, _id: 1 })
|
|
5153
|
+
.limit(normalizedLimit + 1)
|
|
5154
|
+
.lean();
|
|
5155
|
+
const hasMore = servers.length > normalizedLimit;
|
|
5156
|
+
const data = hasMore ? servers.slice(0, normalizedLimit) : servers;
|
|
5157
|
+
let nextCursor = null;
|
|
5158
|
+
if (hasMore && data.length > 0) {
|
|
5159
|
+
const lastItem = data[data.length - 1];
|
|
5160
|
+
nextCursor = Buffer.from(JSON.stringify({
|
|
5161
|
+
updatedAt: lastItem.updatedAt,
|
|
5162
|
+
_id: lastItem._id,
|
|
5163
|
+
})).toString('base64');
|
|
5164
|
+
}
|
|
5165
|
+
return {
|
|
5166
|
+
data,
|
|
5167
|
+
has_more: hasMore,
|
|
5168
|
+
after: nextCursor,
|
|
5169
|
+
};
|
|
5170
|
+
}
|
|
5171
|
+
/**
|
|
5172
|
+
* Update an MCP server
|
|
5173
|
+
* @param serverName - The MCP server ID
|
|
5174
|
+
* @param updateData - Object containing config to update
|
|
5175
|
+
* @returns The updated MCP server document or null
|
|
5176
|
+
*/
|
|
5177
|
+
async function updateMCPServer(serverName, updateData) {
|
|
5178
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5179
|
+
return await MCPServer.findOneAndUpdate({ serverName }, { $set: updateData }, { new: true, runValidators: true }).lean();
|
|
5180
|
+
}
|
|
5181
|
+
/**
|
|
5182
|
+
* Delete an MCP server
|
|
5183
|
+
* @param serverName - The MCP server ID
|
|
5184
|
+
* @returns The deleted MCP server document or null
|
|
5185
|
+
*/
|
|
5186
|
+
async function deleteMCPServer(serverName) {
|
|
5187
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5188
|
+
return await MCPServer.findOneAndDelete({ serverName }).lean();
|
|
5189
|
+
}
|
|
5190
|
+
/**
|
|
5191
|
+
* Get MCP servers by their serverName strings
|
|
5192
|
+
* @param names - Array of serverName strings to fetch
|
|
5193
|
+
* @returns Object containing array of MCP server documents
|
|
5194
|
+
*/
|
|
5195
|
+
async function getListMCPServersByNames({ names = [] }) {
|
|
5196
|
+
if (names.length === 0) {
|
|
5197
|
+
return { data: [] };
|
|
5198
|
+
}
|
|
5199
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5200
|
+
const servers = await MCPServer.find({ serverName: { $in: names } }).lean();
|
|
5201
|
+
return { data: servers };
|
|
5202
|
+
}
|
|
5203
|
+
return {
|
|
5204
|
+
createMCPServer,
|
|
5205
|
+
findMCPServerByServerName,
|
|
5206
|
+
findMCPServerByObjectId,
|
|
5207
|
+
findMCPServersByAuthor,
|
|
5208
|
+
getListMCPServersByIds,
|
|
5209
|
+
getListMCPServersByNames,
|
|
5210
|
+
updateMCPServer,
|
|
5211
|
+
deleteMCPServer,
|
|
5212
|
+
};
|
|
5213
|
+
}
|
|
5214
|
+
|
|
4285
5215
|
// Factory function that takes mongoose instance and returns the methods
|
|
4286
5216
|
function createPluginAuthMethods(mongoose) {
|
|
4287
5217
|
/**
|
|
@@ -4509,6 +5439,27 @@ function createAccessRoleMethods(mongoose) {
|
|
|
4509
5439
|
resourceType: ResourceType.PROMPTGROUP,
|
|
4510
5440
|
permBits: RoleBits.OWNER,
|
|
4511
5441
|
},
|
|
5442
|
+
{
|
|
5443
|
+
accessRoleId: AccessRoleIds.MCPSERVER_VIEWER,
|
|
5444
|
+
name: 'com_ui_mcp_server_role_viewer',
|
|
5445
|
+
description: 'com_ui_mcp_server_role_viewer_desc',
|
|
5446
|
+
resourceType: ResourceType.MCPSERVER,
|
|
5447
|
+
permBits: RoleBits.VIEWER,
|
|
5448
|
+
},
|
|
5449
|
+
{
|
|
5450
|
+
accessRoleId: AccessRoleIds.MCPSERVER_EDITOR,
|
|
5451
|
+
name: 'com_ui_mcp_server_role_editor',
|
|
5452
|
+
description: 'com_ui_mcp_server_role_editor_desc',
|
|
5453
|
+
resourceType: ResourceType.MCPSERVER,
|
|
5454
|
+
permBits: RoleBits.EDITOR,
|
|
5455
|
+
},
|
|
5456
|
+
{
|
|
5457
|
+
accessRoleId: AccessRoleIds.MCPSERVER_OWNER,
|
|
5458
|
+
name: 'com_ui_mcp_server_role_owner',
|
|
5459
|
+
description: 'com_ui_mcp_server_role_owner_desc',
|
|
5460
|
+
resourceType: ResourceType.MCPSERVER,
|
|
5461
|
+
permBits: RoleBits.OWNER,
|
|
5462
|
+
},
|
|
4512
5463
|
];
|
|
4513
5464
|
const result = {};
|
|
4514
5465
|
for (const role of defaultRoles) {
|
|
@@ -5089,6 +6040,49 @@ function createAclEntryMethods(mongoose) {
|
|
|
5089
6040
|
}
|
|
5090
6041
|
return effectiveBits;
|
|
5091
6042
|
}
|
|
6043
|
+
/**
|
|
6044
|
+
* Get effective permissions for multiple resources in a single query (BATCH)
|
|
6045
|
+
* Returns a map of resourceId → effectivePermissionBits
|
|
6046
|
+
*
|
|
6047
|
+
* @param principalsList - List of principals (user + groups + public)
|
|
6048
|
+
* @param resourceType - The type of resource ('MCPSERVER', 'AGENT', etc.)
|
|
6049
|
+
* @param resourceIds - Array of resource IDs to check
|
|
6050
|
+
* @returns {Promise<Map<string, number>>} Map of resourceId → permission bits
|
|
6051
|
+
*
|
|
6052
|
+
* @example
|
|
6053
|
+
* const principals = await getUserPrincipals({ userId, role });
|
|
6054
|
+
* const serverIds = [id1, id2, id3];
|
|
6055
|
+
* const permMap = await getEffectivePermissionsForResources(
|
|
6056
|
+
* principals,
|
|
6057
|
+
* ResourceType.MCPSERVER,
|
|
6058
|
+
* serverIds
|
|
6059
|
+
* );
|
|
6060
|
+
* // permMap.get(id1.toString()) → 7 (VIEW|EDIT|DELETE)
|
|
6061
|
+
*/
|
|
6062
|
+
async function getEffectivePermissionsForResources(principalsList, resourceType, resourceIds) {
|
|
6063
|
+
if (!Array.isArray(resourceIds) || resourceIds.length === 0) {
|
|
6064
|
+
return new Map();
|
|
6065
|
+
}
|
|
6066
|
+
const AclEntry = mongoose.models.AclEntry;
|
|
6067
|
+
const principalsQuery = principalsList.map((p) => ({
|
|
6068
|
+
principalType: p.principalType,
|
|
6069
|
+
...(p.principalType !== PrincipalType.PUBLIC && { principalId: p.principalId }),
|
|
6070
|
+
}));
|
|
6071
|
+
// Batch query for all resources at once
|
|
6072
|
+
const aclEntries = await AclEntry.find({
|
|
6073
|
+
$or: principalsQuery,
|
|
6074
|
+
resourceType,
|
|
6075
|
+
resourceId: { $in: resourceIds },
|
|
6076
|
+
}).lean();
|
|
6077
|
+
// Compute effective permissions per resource
|
|
6078
|
+
const permissionsMap = new Map();
|
|
6079
|
+
for (const entry of aclEntries) {
|
|
6080
|
+
const rid = entry.resourceId.toString();
|
|
6081
|
+
const currentBits = permissionsMap.get(rid) || 0;
|
|
6082
|
+
permissionsMap.set(rid, currentBits | entry.permBits);
|
|
6083
|
+
}
|
|
6084
|
+
return permissionsMap;
|
|
6085
|
+
}
|
|
5092
6086
|
/**
|
|
5093
6087
|
* Grant permission to a principal for a resource
|
|
5094
6088
|
* @param principalType - The type of principal ('user', 'group', 'public')
|
|
@@ -5229,6 +6223,7 @@ function createAclEntryMethods(mongoose) {
|
|
|
5229
6223
|
findEntriesByPrincipalsAndResource,
|
|
5230
6224
|
hasPermission,
|
|
5231
6225
|
getEffectivePermissions,
|
|
6226
|
+
getEffectivePermissionsForResources,
|
|
5232
6227
|
grantPermission,
|
|
5233
6228
|
revokePermission,
|
|
5234
6229
|
modifyPermissionBits,
|
|
@@ -5704,6 +6699,7 @@ function createShareMethods(mongoose) {
|
|
|
5704
6699
|
|
|
5705
6700
|
/**
|
|
5706
6701
|
* Creates all database methods for all collections
|
|
6702
|
+
* @param mongoose - Mongoose instance
|
|
5707
6703
|
*/
|
|
5708
6704
|
function createMethods(mongoose) {
|
|
5709
6705
|
return {
|
|
@@ -5711,8 +6707,11 @@ function createMethods(mongoose) {
|
|
|
5711
6707
|
...createSessionMethods(mongoose),
|
|
5712
6708
|
...createTokenMethods(mongoose),
|
|
5713
6709
|
...createRoleMethods(mongoose),
|
|
6710
|
+
...createKeyMethods(mongoose),
|
|
6711
|
+
...createFileMethods(mongoose),
|
|
5714
6712
|
...createMemoryMethods(mongoose),
|
|
5715
6713
|
...createAgentCategoryMethods(mongoose),
|
|
6714
|
+
...createMCPServerMethods(mongoose),
|
|
5716
6715
|
...createAccessRoleMethods(mongoose),
|
|
5717
6716
|
...createUserGroupMethods(mongoose),
|
|
5718
6717
|
...createAclEntryMethods(mongoose),
|
|
@@ -5721,5 +6720,5 @@ function createMethods(mongoose) {
|
|
|
5721
6720
|
};
|
|
5722
6721
|
}
|
|
5723
6722
|
|
|
5724
|
-
export { AppService, RoleBits, Action as actionSchema, agentCategorySchema, agentSchema, agentsConfigSetup, assistantSchema, balanceSchema, bannerSchema, categoriesSchema, conversationTag as conversationTagSchema, convoSchema, createMethods, createModels, file as fileSchema, getTransactionSupport, getWebSearchKeys, groupSchema, hashToken, keySchema, loadDefaultInterface, loadTurnstileConfig, loadWebSearchConfig, logger$1 as logger, logger as meiliLogger, MemoryEntrySchema as memorySchema, messageSchema, pluginAuthSchema, presetSchema, processModelSpecs, projectSchema, promptGroupSchema, promptSchema, roleSchema, sessionSchema, shareSchema, signPayload, supportsTransactions, tokenSchema, toolCallSchema, transactionSchema, userSchema, webSearchAuth, webSearchKeys };
|
|
6723
|
+
export { AppService, DEFAULT_REFRESH_TOKEN_EXPIRY, DEFAULT_SESSION_EXPIRY, RoleBits, Action as actionSchema, agentCategorySchema, agentSchema, agentsConfigSetup, assistantSchema, balanceSchema, bannerSchema, categoriesSchema, conversationTag as conversationTagSchema, convoSchema, createMethods, createModels, decrypt, decryptV2, decryptV3, defaultVertexModels, encrypt, encryptV2, encryptV3, file as fileSchema, getRandomValues, getTransactionSupport, getWebSearchKeys, groupSchema, hashBackupCode, hashToken, keySchema, loadDefaultInterface, loadTurnstileConfig, loadWebSearchConfig, logger$1 as logger, logger as meiliLogger, MemoryEntrySchema as memorySchema, messageSchema, pluginAuthSchema, presetSchema, processModelSpecs, projectSchema, promptGroupSchema, promptSchema, roleSchema, sessionSchema, shareSchema, signPayload, supportsTransactions, tokenSchema, toolCallSchema, transactionSchema, userSchema, validateVertexConfig, vertexConfigSetup, webSearchAuth, webSearchKeys };
|
|
5725
6724
|
//# sourceMappingURL=index.es.js.map
|