@librechat/data-schemas 0.0.31 → 0.0.34
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +1159 -147
- package/dist/index.cjs.map +1 -1
- package/dist/index.es.js +1146 -147
- package/dist/index.es.js.map +1 -1
- package/dist/types/app/endpoints.d.ts +0 -1
- package/dist/types/app/index.d.ts +1 -0
- package/dist/types/app/vertex.d.ts +19 -0
- package/dist/types/crypto/index.d.ts +52 -0
- package/dist/types/index.d.ts +1 -1
- package/dist/types/methods/aclEntry.d.ts +4 -0
- package/dist/types/methods/file.d.ts +55 -0
- package/dist/types/methods/file.spec.d.ts +1 -0
- package/dist/types/methods/index.d.ts +9 -4
- package/dist/types/methods/key.d.ts +55 -0
- package/dist/types/methods/mcpServer.d.ts +57 -0
- package/dist/types/methods/mcpServer.spec.d.ts +1 -0
- package/dist/types/methods/session.d.ts +3 -1
- package/dist/types/methods/user.d.ts +4 -1
- package/dist/types/models/index.d.ts +1 -0
- package/dist/types/models/mcpServer.d.ts +30 -0
- package/dist/types/models/plugins/mongoMeili.d.ts +2 -13
- package/dist/types/models/plugins/mongoMeili.spec.d.ts +1 -0
- package/dist/types/schema/banner.d.ts +1 -0
- package/dist/types/schema/mcpServer.d.ts +37 -0
- package/dist/types/schema/preset.d.ts +0 -1
- package/dist/types/types/agent.d.ts +2 -0
- package/dist/types/types/app.d.ts +8 -5
- package/dist/types/types/banner.d.ts +1 -0
- package/dist/types/types/convo.d.ts +0 -1
- package/dist/types/types/index.d.ts +1 -0
- package/dist/types/types/mcp.d.ts +34 -0
- package/dist/types/types/message.d.ts +1 -0
- package/dist/types/types/session.d.ts +6 -0
- package/dist/types/types/user.d.ts +5 -0
- package/package.json +1 -2
package/dist/index.cjs
CHANGED
|
@@ -5,8 +5,9 @@ var winston = require('winston');
|
|
|
5
5
|
require('winston-daily-rotate-file');
|
|
6
6
|
var klona = require('klona');
|
|
7
7
|
var path = require('path');
|
|
8
|
+
require('dotenv/config');
|
|
8
9
|
var jwt = require('jsonwebtoken');
|
|
9
|
-
var
|
|
10
|
+
var crypto = require('node:crypto');
|
|
10
11
|
var mongoose = require('mongoose');
|
|
11
12
|
var _ = require('lodash');
|
|
12
13
|
var meilisearch = require('meilisearch');
|
|
@@ -856,6 +857,153 @@ function azureConfigSetup(config) {
|
|
|
856
857
|
};
|
|
857
858
|
}
|
|
858
859
|
|
|
860
|
+
/**
|
|
861
|
+
* Default Vertex AI models available through Google Cloud
|
|
862
|
+
* These are the standard Anthropic model names as served by Vertex AI
|
|
863
|
+
*/
|
|
864
|
+
const defaultVertexModels = [
|
|
865
|
+
'claude-sonnet-4-20250514',
|
|
866
|
+
'claude-3-7-sonnet-20250219',
|
|
867
|
+
'claude-3-5-sonnet-v2@20241022',
|
|
868
|
+
'claude-3-5-sonnet@20240620',
|
|
869
|
+
'claude-3-5-haiku@20241022',
|
|
870
|
+
'claude-3-opus@20240229',
|
|
871
|
+
'claude-3-haiku@20240307',
|
|
872
|
+
];
|
|
873
|
+
/**
|
|
874
|
+
* Processes models configuration and creates deployment name mapping
|
|
875
|
+
* Similar to Azure's model mapping logic
|
|
876
|
+
* @param models - The models configuration (can be array or object)
|
|
877
|
+
* @param defaultDeploymentName - Optional default deployment name
|
|
878
|
+
* @returns Object containing modelNames array and modelDeploymentMap
|
|
879
|
+
*/
|
|
880
|
+
function processVertexModels(models, defaultDeploymentName) {
|
|
881
|
+
const modelNames = [];
|
|
882
|
+
const modelDeploymentMap = {};
|
|
883
|
+
if (!models) {
|
|
884
|
+
// No models specified, use defaults
|
|
885
|
+
for (const model of defaultVertexModels) {
|
|
886
|
+
modelNames.push(model);
|
|
887
|
+
modelDeploymentMap[model] = model; // Default: model name = deployment name
|
|
888
|
+
}
|
|
889
|
+
return { modelNames, modelDeploymentMap };
|
|
890
|
+
}
|
|
891
|
+
if (Array.isArray(models)) {
|
|
892
|
+
// Legacy format: simple array of model names
|
|
893
|
+
for (const modelName of models) {
|
|
894
|
+
modelNames.push(modelName);
|
|
895
|
+
// If a default deployment name is provided, use it for all models
|
|
896
|
+
// Otherwise, model name is the deployment name
|
|
897
|
+
modelDeploymentMap[modelName] = defaultDeploymentName || modelName;
|
|
898
|
+
}
|
|
899
|
+
}
|
|
900
|
+
else {
|
|
901
|
+
// New format: object with model names as keys and config as values
|
|
902
|
+
for (const [modelName, modelConfig] of Object.entries(models)) {
|
|
903
|
+
modelNames.push(modelName);
|
|
904
|
+
if (typeof modelConfig === 'boolean') {
|
|
905
|
+
// Model is set to true/false - use default deployment name or model name
|
|
906
|
+
modelDeploymentMap[modelName] = defaultDeploymentName || modelName;
|
|
907
|
+
}
|
|
908
|
+
else if (modelConfig === null || modelConfig === void 0 ? void 0 : modelConfig.deploymentName) {
|
|
909
|
+
// Model has its own deployment name specified
|
|
910
|
+
modelDeploymentMap[modelName] = modelConfig.deploymentName;
|
|
911
|
+
}
|
|
912
|
+
else {
|
|
913
|
+
// Model is an object but no deployment name - use default or model name
|
|
914
|
+
modelDeploymentMap[modelName] = defaultDeploymentName || modelName;
|
|
915
|
+
}
|
|
916
|
+
}
|
|
917
|
+
}
|
|
918
|
+
return { modelNames, modelDeploymentMap };
|
|
919
|
+
}
|
|
920
|
+
/**
|
|
921
|
+
* Validates and processes Vertex AI configuration
|
|
922
|
+
* @param vertexConfig - The Vertex AI configuration object
|
|
923
|
+
* @returns Validated configuration with errors if any
|
|
924
|
+
*/
|
|
925
|
+
function validateVertexConfig(vertexConfig) {
|
|
926
|
+
if (!vertexConfig) {
|
|
927
|
+
return null;
|
|
928
|
+
}
|
|
929
|
+
const errors = [];
|
|
930
|
+
// Extract and validate environment variables
|
|
931
|
+
// projectId is optional - will be auto-detected from service key if not provided
|
|
932
|
+
const projectId = vertexConfig.projectId ? librechatDataProvider.extractEnvVariable(vertexConfig.projectId) : undefined;
|
|
933
|
+
const region = librechatDataProvider.extractEnvVariable(vertexConfig.region || 'us-east5');
|
|
934
|
+
const serviceKeyFile = vertexConfig.serviceKeyFile
|
|
935
|
+
? librechatDataProvider.extractEnvVariable(vertexConfig.serviceKeyFile)
|
|
936
|
+
: undefined;
|
|
937
|
+
const defaultDeploymentName = vertexConfig.deploymentName
|
|
938
|
+
? librechatDataProvider.extractEnvVariable(vertexConfig.deploymentName)
|
|
939
|
+
: undefined;
|
|
940
|
+
// Check for unresolved environment variables
|
|
941
|
+
if (projectId && librechatDataProvider.envVarRegex.test(projectId)) {
|
|
942
|
+
errors.push(`Vertex AI projectId environment variable "${vertexConfig.projectId}" was not found.`);
|
|
943
|
+
}
|
|
944
|
+
if (librechatDataProvider.envVarRegex.test(region)) {
|
|
945
|
+
errors.push(`Vertex AI region environment variable "${vertexConfig.region}" was not found.`);
|
|
946
|
+
}
|
|
947
|
+
if (serviceKeyFile && librechatDataProvider.envVarRegex.test(serviceKeyFile)) {
|
|
948
|
+
errors.push(`Vertex AI serviceKeyFile environment variable "${vertexConfig.serviceKeyFile}" was not found.`);
|
|
949
|
+
}
|
|
950
|
+
if (defaultDeploymentName && librechatDataProvider.envVarRegex.test(defaultDeploymentName)) {
|
|
951
|
+
errors.push(`Vertex AI deploymentName environment variable "${vertexConfig.deploymentName}" was not found.`);
|
|
952
|
+
}
|
|
953
|
+
// Process models and create deployment mapping
|
|
954
|
+
const { modelNames, modelDeploymentMap } = processVertexModels(vertexConfig.models, defaultDeploymentName);
|
|
955
|
+
// Note: projectId is optional - if not provided, it will be auto-detected from the service key file
|
|
956
|
+
const isValid = errors.length === 0;
|
|
957
|
+
return {
|
|
958
|
+
enabled: vertexConfig.enabled !== false,
|
|
959
|
+
projectId,
|
|
960
|
+
region,
|
|
961
|
+
serviceKeyFile,
|
|
962
|
+
deploymentName: defaultDeploymentName,
|
|
963
|
+
models: vertexConfig.models,
|
|
964
|
+
modelNames,
|
|
965
|
+
modelDeploymentMap,
|
|
966
|
+
isValid,
|
|
967
|
+
errors,
|
|
968
|
+
};
|
|
969
|
+
}
|
|
970
|
+
/**
|
|
971
|
+
* Sets up the Vertex AI configuration from the config (`librechat.yaml`) file.
|
|
972
|
+
* Similar to azureConfigSetup, this processes and validates the Vertex AI configuration.
|
|
973
|
+
* @param config - The loaded custom configuration.
|
|
974
|
+
* @returns The validated Vertex AI configuration or null if not configured.
|
|
975
|
+
*/
|
|
976
|
+
function vertexConfigSetup(config) {
|
|
977
|
+
var _a, _b;
|
|
978
|
+
const anthropicConfig = (_a = config.endpoints) === null || _a === void 0 ? void 0 : _a[librechatDataProvider.EModelEndpoint.anthropic];
|
|
979
|
+
if (!(anthropicConfig === null || anthropicConfig === void 0 ? void 0 : anthropicConfig.vertex)) {
|
|
980
|
+
return null;
|
|
981
|
+
}
|
|
982
|
+
const vertexConfig = anthropicConfig.vertex;
|
|
983
|
+
// Skip if explicitly disabled (enabled: false)
|
|
984
|
+
// When vertex config exists, it's enabled by default unless explicitly set to false
|
|
985
|
+
if (vertexConfig.enabled === false) {
|
|
986
|
+
return null;
|
|
987
|
+
}
|
|
988
|
+
const validatedConfig = validateVertexConfig(vertexConfig);
|
|
989
|
+
if (!validatedConfig) {
|
|
990
|
+
return null;
|
|
991
|
+
}
|
|
992
|
+
if (!validatedConfig.isValid) {
|
|
993
|
+
const errorString = validatedConfig.errors.join('\n');
|
|
994
|
+
const errorMessage = 'Invalid Vertex AI configuration:\n' + errorString;
|
|
995
|
+
logger$1.error(errorMessage);
|
|
996
|
+
throw new Error(errorMessage);
|
|
997
|
+
}
|
|
998
|
+
logger$1.info('Vertex AI configuration loaded successfully', {
|
|
999
|
+
projectId: validatedConfig.projectId,
|
|
1000
|
+
region: validatedConfig.region,
|
|
1001
|
+
modelCount: ((_b = validatedConfig.modelNames) === null || _b === void 0 ? void 0 : _b.length) || 0,
|
|
1002
|
+
models: validatedConfig.modelNames,
|
|
1003
|
+
});
|
|
1004
|
+
return validatedConfig;
|
|
1005
|
+
}
|
|
1006
|
+
|
|
859
1007
|
/**
|
|
860
1008
|
* Loads custom config endpoints
|
|
861
1009
|
* @param [config]
|
|
@@ -878,12 +1026,24 @@ const loadEndpoints = (config, agentsDefaults) => {
|
|
|
878
1026
|
loadedEndpoints[librechatDataProvider.EModelEndpoint.assistants] = assistantsConfigSetup(config, librechatDataProvider.EModelEndpoint.assistants, loadedEndpoints[librechatDataProvider.EModelEndpoint.assistants]);
|
|
879
1027
|
}
|
|
880
1028
|
loadedEndpoints[librechatDataProvider.EModelEndpoint.agents] = agentsConfigSetup(config, agentsDefaults);
|
|
1029
|
+
// Handle Anthropic endpoint with Vertex AI configuration
|
|
1030
|
+
if (endpoints === null || endpoints === void 0 ? void 0 : endpoints[librechatDataProvider.EModelEndpoint.anthropic]) {
|
|
1031
|
+
const anthropicConfig = endpoints[librechatDataProvider.EModelEndpoint.anthropic];
|
|
1032
|
+
const vertexConfig = vertexConfigSetup(config);
|
|
1033
|
+
loadedEndpoints[librechatDataProvider.EModelEndpoint.anthropic] = {
|
|
1034
|
+
...anthropicConfig,
|
|
1035
|
+
// If Vertex AI is enabled, use the visible model names from vertex config
|
|
1036
|
+
// Otherwise, use the models array from anthropic config
|
|
1037
|
+
...((vertexConfig === null || vertexConfig === void 0 ? void 0 : vertexConfig.modelNames) && { models: vertexConfig.modelNames }),
|
|
1038
|
+
// Attach validated Vertex AI config if present
|
|
1039
|
+
...(vertexConfig && { vertexConfig }),
|
|
1040
|
+
};
|
|
1041
|
+
}
|
|
881
1042
|
const endpointKeys = [
|
|
882
1043
|
librechatDataProvider.EModelEndpoint.openAI,
|
|
883
1044
|
librechatDataProvider.EModelEndpoint.google,
|
|
884
1045
|
librechatDataProvider.EModelEndpoint.custom,
|
|
885
1046
|
librechatDataProvider.EModelEndpoint.bedrock,
|
|
886
|
-
librechatDataProvider.EModelEndpoint.anthropic,
|
|
887
1047
|
];
|
|
888
1048
|
endpointKeys.forEach((key) => {
|
|
889
1049
|
const currentKey = key;
|
|
@@ -938,7 +1098,9 @@ const AppService = async (params) => {
|
|
|
938
1098
|
const imageOutputType = (_e = config === null || config === void 0 ? void 0 : config.imageOutputType) !== null && _e !== void 0 ? _e : configDefaults.imageOutputType;
|
|
939
1099
|
process.env.CDN_PROVIDER = fileStrategy;
|
|
940
1100
|
const availableTools = systemTools;
|
|
941
|
-
const
|
|
1101
|
+
const mcpServersConfig = config.mcpServers || null;
|
|
1102
|
+
const mcpSettings = config.mcpSettings || null;
|
|
1103
|
+
const actions = config.actions;
|
|
942
1104
|
const registration = (_f = config.registration) !== null && _f !== void 0 ? _f : configDefaults.registration;
|
|
943
1105
|
const interfaceConfig = await loadDefaultInterface({ config, configDefaults });
|
|
944
1106
|
const turnstileConfig = loadTurnstileConfig(config, configDefaults);
|
|
@@ -950,8 +1112,10 @@ const AppService = async (params) => {
|
|
|
950
1112
|
memory,
|
|
951
1113
|
speech,
|
|
952
1114
|
balance,
|
|
1115
|
+
actions,
|
|
953
1116
|
transactions,
|
|
954
|
-
mcpConfig,
|
|
1117
|
+
mcpConfig: mcpServersConfig,
|
|
1118
|
+
mcpSettings,
|
|
955
1119
|
webSearch,
|
|
956
1120
|
fileStrategy,
|
|
957
1121
|
registration,
|
|
@@ -999,14 +1163,144 @@ exports.RoleBits = void 0;
|
|
|
999
1163
|
RoleBits[RoleBits["OWNER"] = librechatDataProvider.PermissionBits.VIEW | librechatDataProvider.PermissionBits.EDIT | librechatDataProvider.PermissionBits.DELETE | librechatDataProvider.PermissionBits.SHARE] = "OWNER";
|
|
1000
1164
|
})(exports.RoleBits || (exports.RoleBits = {}));
|
|
1001
1165
|
|
|
1166
|
+
var _a, _b;
|
|
1167
|
+
const { webcrypto } = crypto;
|
|
1168
|
+
/** Use hex decoding for both key and IV for legacy methods */
|
|
1169
|
+
const key = Buffer.from((_a = process.env.CREDS_KEY) !== null && _a !== void 0 ? _a : '', 'hex');
|
|
1170
|
+
const iv = Buffer.from((_b = process.env.CREDS_IV) !== null && _b !== void 0 ? _b : '', 'hex');
|
|
1171
|
+
const algorithm = 'AES-CBC';
|
|
1002
1172
|
async function signPayload({ payload, secret, expirationTime, }) {
|
|
1003
1173
|
return jwt.sign(payload, secret, { expiresIn: expirationTime });
|
|
1004
1174
|
}
|
|
1005
1175
|
async function hashToken(str) {
|
|
1006
1176
|
const data = new TextEncoder().encode(str);
|
|
1007
|
-
const hashBuffer = await
|
|
1177
|
+
const hashBuffer = await webcrypto.subtle.digest('SHA-256', data);
|
|
1008
1178
|
return Buffer.from(hashBuffer).toString('hex');
|
|
1009
1179
|
}
|
|
1180
|
+
/** --- Legacy v1/v2 Setup: AES-CBC with fixed key and IV --- */
|
|
1181
|
+
/**
|
|
1182
|
+
* Encrypts a value using AES-CBC
|
|
1183
|
+
* @param value - The plaintext to encrypt
|
|
1184
|
+
* @returns The encrypted string in hex format
|
|
1185
|
+
*/
|
|
1186
|
+
async function encrypt(value) {
|
|
1187
|
+
const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [
|
|
1188
|
+
'encrypt',
|
|
1189
|
+
]);
|
|
1190
|
+
const encoder = new TextEncoder();
|
|
1191
|
+
const data = encoder.encode(value);
|
|
1192
|
+
const encryptedBuffer = await webcrypto.subtle.encrypt({ name: algorithm, iv: iv }, cryptoKey, data);
|
|
1193
|
+
return Buffer.from(encryptedBuffer).toString('hex');
|
|
1194
|
+
}
|
|
1195
|
+
/**
|
|
1196
|
+
* Decrypts an encrypted value using AES-CBC
|
|
1197
|
+
* @param encryptedValue - The encrypted string in hex format
|
|
1198
|
+
* @returns The decrypted plaintext
|
|
1199
|
+
*/
|
|
1200
|
+
async function decrypt(encryptedValue) {
|
|
1201
|
+
const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [
|
|
1202
|
+
'decrypt',
|
|
1203
|
+
]);
|
|
1204
|
+
const encryptedBuffer = Buffer.from(encryptedValue, 'hex');
|
|
1205
|
+
const decryptedBuffer = await webcrypto.subtle.decrypt({ name: algorithm, iv: iv }, cryptoKey, encryptedBuffer);
|
|
1206
|
+
const decoder = new TextDecoder();
|
|
1207
|
+
return decoder.decode(decryptedBuffer);
|
|
1208
|
+
}
|
|
1209
|
+
/** --- v2: AES-CBC with a random IV per encryption --- */
|
|
1210
|
+
/**
|
|
1211
|
+
* Encrypts a value using AES-CBC with a random IV per encryption
|
|
1212
|
+
* @param value - The plaintext to encrypt
|
|
1213
|
+
* @returns The encrypted string with IV prepended (iv:ciphertext format)
|
|
1214
|
+
*/
|
|
1215
|
+
async function encryptV2(value) {
|
|
1216
|
+
const gen_iv = webcrypto.getRandomValues(new Uint8Array(16));
|
|
1217
|
+
const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [
|
|
1218
|
+
'encrypt',
|
|
1219
|
+
]);
|
|
1220
|
+
const encoder = new TextEncoder();
|
|
1221
|
+
const data = encoder.encode(value);
|
|
1222
|
+
const encryptedBuffer = await webcrypto.subtle.encrypt({ name: algorithm, iv: gen_iv }, cryptoKey, data);
|
|
1223
|
+
return Buffer.from(gen_iv).toString('hex') + ':' + Buffer.from(encryptedBuffer).toString('hex');
|
|
1224
|
+
}
|
|
1225
|
+
/**
|
|
1226
|
+
* Decrypts an encrypted value using AES-CBC with random IV
|
|
1227
|
+
* @param encryptedValue - The encrypted string in iv:ciphertext format
|
|
1228
|
+
* @returns The decrypted plaintext
|
|
1229
|
+
*/
|
|
1230
|
+
async function decryptV2(encryptedValue) {
|
|
1231
|
+
var _a;
|
|
1232
|
+
const parts = encryptedValue.split(':');
|
|
1233
|
+
if (parts.length === 1) {
|
|
1234
|
+
return parts[0];
|
|
1235
|
+
}
|
|
1236
|
+
const gen_iv = Buffer.from((_a = parts.shift()) !== null && _a !== void 0 ? _a : '', 'hex');
|
|
1237
|
+
const encrypted = parts.join(':');
|
|
1238
|
+
const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [
|
|
1239
|
+
'decrypt',
|
|
1240
|
+
]);
|
|
1241
|
+
const encryptedBuffer = Buffer.from(encrypted, 'hex');
|
|
1242
|
+
const decryptedBuffer = await webcrypto.subtle.decrypt({ name: algorithm, iv: gen_iv }, cryptoKey, encryptedBuffer);
|
|
1243
|
+
const decoder = new TextDecoder();
|
|
1244
|
+
return decoder.decode(decryptedBuffer);
|
|
1245
|
+
}
|
|
1246
|
+
/** --- v3: AES-256-CTR using Node's crypto functions --- */
|
|
1247
|
+
const algorithm_v3 = 'aes-256-ctr';
|
|
1248
|
+
/**
|
|
1249
|
+
* Encrypts a value using AES-256-CTR.
|
|
1250
|
+
* Note: AES-256 requires a 32-byte key. Ensure that process.env.CREDS_KEY is a 64-character hex string.
|
|
1251
|
+
* @param value - The plaintext to encrypt.
|
|
1252
|
+
* @returns The encrypted string with a "v3:" prefix.
|
|
1253
|
+
*/
|
|
1254
|
+
function encryptV3(value) {
|
|
1255
|
+
if (key.length !== 32) {
|
|
1256
|
+
throw new Error(`Invalid key length: expected 32 bytes, got ${key.length} bytes`);
|
|
1257
|
+
}
|
|
1258
|
+
const iv_v3 = crypto.randomBytes(16);
|
|
1259
|
+
const cipher = crypto.createCipheriv(algorithm_v3, key, iv_v3);
|
|
1260
|
+
const encrypted = Buffer.concat([cipher.update(value, 'utf8'), cipher.final()]);
|
|
1261
|
+
return `v3:${iv_v3.toString('hex')}:${encrypted.toString('hex')}`;
|
|
1262
|
+
}
|
|
1263
|
+
/**
|
|
1264
|
+
* Decrypts an encrypted value using AES-256-CTR.
|
|
1265
|
+
* @param encryptedValue - The encrypted string with "v3:" prefix.
|
|
1266
|
+
* @returns The decrypted plaintext.
|
|
1267
|
+
*/
|
|
1268
|
+
function decryptV3(encryptedValue) {
|
|
1269
|
+
const parts = encryptedValue.split(':');
|
|
1270
|
+
if (parts[0] !== 'v3') {
|
|
1271
|
+
throw new Error('Not a v3 encrypted value');
|
|
1272
|
+
}
|
|
1273
|
+
const iv_v3 = Buffer.from(parts[1], 'hex');
|
|
1274
|
+
const encryptedText = Buffer.from(parts.slice(2).join(':'), 'hex');
|
|
1275
|
+
const decipher = crypto.createDecipheriv(algorithm_v3, key, iv_v3);
|
|
1276
|
+
const decrypted = Buffer.concat([decipher.update(encryptedText), decipher.final()]);
|
|
1277
|
+
return decrypted.toString('utf8');
|
|
1278
|
+
}
|
|
1279
|
+
/**
|
|
1280
|
+
* Generates random values as a hex string
|
|
1281
|
+
* @param length - The number of random bytes to generate
|
|
1282
|
+
* @returns The random values as a hex string
|
|
1283
|
+
*/
|
|
1284
|
+
async function getRandomValues(length) {
|
|
1285
|
+
if (!Number.isInteger(length) || length <= 0) {
|
|
1286
|
+
throw new Error('Length must be a positive integer');
|
|
1287
|
+
}
|
|
1288
|
+
const randomValues = new Uint8Array(length);
|
|
1289
|
+
webcrypto.getRandomValues(randomValues);
|
|
1290
|
+
return Buffer.from(randomValues).toString('hex');
|
|
1291
|
+
}
|
|
1292
|
+
/**
|
|
1293
|
+
* Computes SHA-256 hash for the given input.
|
|
1294
|
+
* @param input - The input to hash.
|
|
1295
|
+
* @returns The SHA-256 hash of the input.
|
|
1296
|
+
*/
|
|
1297
|
+
async function hashBackupCode(input) {
|
|
1298
|
+
const encoder = new TextEncoder();
|
|
1299
|
+
const data = encoder.encode(input);
|
|
1300
|
+
const hashBuffer = await webcrypto.subtle.digest('SHA-256', data);
|
|
1301
|
+
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
|
1302
|
+
return hashArray.map((b) => b.toString(16).padStart(2, '0')).join('');
|
|
1303
|
+
}
|
|
1010
1304
|
|
|
1011
1305
|
// Define the Auth sub-schema with type-safety.
|
|
1012
1306
|
const AuthSchema = new mongoose.Schema({
|
|
@@ -1162,10 +1456,17 @@ const agentSchema = new mongoose.Schema({
|
|
|
1162
1456
|
default: false,
|
|
1163
1457
|
index: true,
|
|
1164
1458
|
},
|
|
1459
|
+
/** MCP server names extracted from tools for efficient querying */
|
|
1460
|
+
mcpServerNames: {
|
|
1461
|
+
type: [String],
|
|
1462
|
+
default: [],
|
|
1463
|
+
index: true,
|
|
1464
|
+
},
|
|
1165
1465
|
}, {
|
|
1166
1466
|
timestamps: true,
|
|
1167
1467
|
});
|
|
1168
1468
|
agentSchema.index({ updatedAt: -1, _id: 1 });
|
|
1469
|
+
agentSchema.index({ 'edges.to': 1 });
|
|
1169
1470
|
|
|
1170
1471
|
const agentCategorySchema = new mongoose.Schema({
|
|
1171
1472
|
value: {
|
|
@@ -1301,6 +1602,10 @@ const bannerSchema = new mongoose.Schema({
|
|
|
1301
1602
|
type: Boolean,
|
|
1302
1603
|
default: false,
|
|
1303
1604
|
},
|
|
1605
|
+
persistable: {
|
|
1606
|
+
type: Boolean,
|
|
1607
|
+
default: false,
|
|
1608
|
+
},
|
|
1304
1609
|
}, { timestamps: true });
|
|
1305
1610
|
|
|
1306
1611
|
const categoriesSchema = new mongoose.Schema({
|
|
@@ -1344,7 +1649,6 @@ conversationTag.index({ tag: 1, user: 1 }, { unique: true });
|
|
|
1344
1649
|
|
|
1345
1650
|
// @ts-ignore
|
|
1346
1651
|
const conversationPreset = {
|
|
1347
|
-
// endpoint: [azureOpenAI, openAI, anthropic, chatGPTBrowser]
|
|
1348
1652
|
endpoint: {
|
|
1349
1653
|
type: String,
|
|
1350
1654
|
default: null,
|
|
@@ -1353,7 +1657,7 @@ const conversationPreset = {
|
|
|
1353
1657
|
endpointType: {
|
|
1354
1658
|
type: String,
|
|
1355
1659
|
},
|
|
1356
|
-
// for azureOpenAI, openAI
|
|
1660
|
+
// for azureOpenAI, openAI only
|
|
1357
1661
|
model: {
|
|
1358
1662
|
type: String,
|
|
1359
1663
|
required: false,
|
|
@@ -1518,9 +1822,6 @@ const convoSchema = new mongoose.Schema({
|
|
|
1518
1822
|
meiliIndex: true,
|
|
1519
1823
|
},
|
|
1520
1824
|
messages: [{ type: mongoose.Schema.Types.ObjectId, ref: 'Message' }],
|
|
1521
|
-
agentOptions: {
|
|
1522
|
-
type: mongoose.Schema.Types.Mixed,
|
|
1523
|
-
},
|
|
1524
1825
|
...conversationPreset,
|
|
1525
1826
|
agent_id: {
|
|
1526
1827
|
type: String,
|
|
@@ -1540,6 +1841,8 @@ const convoSchema = new mongoose.Schema({
|
|
|
1540
1841
|
convoSchema.index({ expiredAt: 1 }, { expireAfterSeconds: 0 });
|
|
1541
1842
|
convoSchema.index({ createdAt: 1, updatedAt: 1 });
|
|
1542
1843
|
convoSchema.index({ conversationId: 1, user: 1 }, { unique: true });
|
|
1844
|
+
// index for MeiliSearch sync operations
|
|
1845
|
+
convoSchema.index({ _meiliIndex: 1, expiredAt: 1 });
|
|
1543
1846
|
|
|
1544
1847
|
const file = new mongoose.Schema({
|
|
1545
1848
|
user: {
|
|
@@ -1736,25 +2039,6 @@ const messageSchema = new mongoose.Schema({
|
|
|
1736
2039
|
default: false,
|
|
1737
2040
|
},
|
|
1738
2041
|
files: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
|
|
1739
|
-
plugin: {
|
|
1740
|
-
type: {
|
|
1741
|
-
latest: {
|
|
1742
|
-
type: String,
|
|
1743
|
-
required: false,
|
|
1744
|
-
},
|
|
1745
|
-
inputs: {
|
|
1746
|
-
type: [mongoose.Schema.Types.Mixed],
|
|
1747
|
-
required: false,
|
|
1748
|
-
default: undefined,
|
|
1749
|
-
},
|
|
1750
|
-
outputs: {
|
|
1751
|
-
type: String,
|
|
1752
|
-
required: false,
|
|
1753
|
-
},
|
|
1754
|
-
},
|
|
1755
|
-
default: undefined,
|
|
1756
|
-
},
|
|
1757
|
-
plugins: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
|
|
1758
2042
|
content: {
|
|
1759
2043
|
type: [{ type: mongoose.Schema.Types.Mixed }],
|
|
1760
2044
|
default: undefined,
|
|
@@ -1794,10 +2078,16 @@ const messageSchema = new mongoose.Schema({
|
|
|
1794
2078
|
expiredAt: {
|
|
1795
2079
|
type: Date,
|
|
1796
2080
|
},
|
|
2081
|
+
addedConvo: {
|
|
2082
|
+
type: Boolean,
|
|
2083
|
+
default: undefined,
|
|
2084
|
+
},
|
|
1797
2085
|
}, { timestamps: true });
|
|
1798
2086
|
messageSchema.index({ expiredAt: 1 }, { expireAfterSeconds: 0 });
|
|
1799
2087
|
messageSchema.index({ createdAt: 1 });
|
|
1800
2088
|
messageSchema.index({ messageId: 1, user: 1 }, { unique: true });
|
|
2089
|
+
// index for MeiliSearch sync operations
|
|
2090
|
+
messageSchema.index({ _meiliIndex: 1, expiredAt: 1 });
|
|
1801
2091
|
|
|
1802
2092
|
const pluginAuthSchema = new mongoose.Schema({
|
|
1803
2093
|
authField: {
|
|
@@ -1840,10 +2130,6 @@ const presetSchema = new mongoose.Schema({
|
|
|
1840
2130
|
type: Number,
|
|
1841
2131
|
},
|
|
1842
2132
|
...conversationPreset,
|
|
1843
|
-
agentOptions: {
|
|
1844
|
-
type: mongoose.Schema.Types.Mixed,
|
|
1845
|
-
default: null,
|
|
1846
|
-
},
|
|
1847
2133
|
}, { timestamps: true });
|
|
1848
2134
|
|
|
1849
2135
|
const projectSchema = new mongoose.Schema({
|
|
@@ -1960,9 +2246,10 @@ const rolePermissionsSchema = new mongoose.Schema({
|
|
|
1960
2246
|
[librechatDataProvider.Permissions.USE]: { type: Boolean },
|
|
1961
2247
|
},
|
|
1962
2248
|
[librechatDataProvider.PermissionTypes.PROMPTS]: {
|
|
1963
|
-
[librechatDataProvider.Permissions.SHARED_GLOBAL]: { type: Boolean },
|
|
1964
2249
|
[librechatDataProvider.Permissions.USE]: { type: Boolean },
|
|
1965
2250
|
[librechatDataProvider.Permissions.CREATE]: { type: Boolean },
|
|
2251
|
+
[librechatDataProvider.Permissions.SHARE]: { type: Boolean },
|
|
2252
|
+
[librechatDataProvider.Permissions.SHARE_PUBLIC]: { type: Boolean },
|
|
1966
2253
|
},
|
|
1967
2254
|
[librechatDataProvider.PermissionTypes.MEMORIES]: {
|
|
1968
2255
|
[librechatDataProvider.Permissions.USE]: { type: Boolean },
|
|
@@ -1972,9 +2259,10 @@ const rolePermissionsSchema = new mongoose.Schema({
|
|
|
1972
2259
|
[librechatDataProvider.Permissions.OPT_OUT]: { type: Boolean },
|
|
1973
2260
|
},
|
|
1974
2261
|
[librechatDataProvider.PermissionTypes.AGENTS]: {
|
|
1975
|
-
[librechatDataProvider.Permissions.SHARED_GLOBAL]: { type: Boolean },
|
|
1976
2262
|
[librechatDataProvider.Permissions.USE]: { type: Boolean },
|
|
1977
2263
|
[librechatDataProvider.Permissions.CREATE]: { type: Boolean },
|
|
2264
|
+
[librechatDataProvider.Permissions.SHARE]: { type: Boolean },
|
|
2265
|
+
[librechatDataProvider.Permissions.SHARE_PUBLIC]: { type: Boolean },
|
|
1978
2266
|
},
|
|
1979
2267
|
[librechatDataProvider.PermissionTypes.MULTI_CONVO]: {
|
|
1980
2268
|
[librechatDataProvider.Permissions.USE]: { type: Boolean },
|
|
@@ -2002,6 +2290,12 @@ const rolePermissionsSchema = new mongoose.Schema({
|
|
|
2002
2290
|
[librechatDataProvider.PermissionTypes.FILE_CITATIONS]: {
|
|
2003
2291
|
[librechatDataProvider.Permissions.USE]: { type: Boolean },
|
|
2004
2292
|
},
|
|
2293
|
+
[librechatDataProvider.PermissionTypes.MCP_SERVERS]: {
|
|
2294
|
+
[librechatDataProvider.Permissions.USE]: { type: Boolean },
|
|
2295
|
+
[librechatDataProvider.Permissions.CREATE]: { type: Boolean },
|
|
2296
|
+
[librechatDataProvider.Permissions.SHARE]: { type: Boolean },
|
|
2297
|
+
[librechatDataProvider.Permissions.SHARE_PUBLIC]: { type: Boolean },
|
|
2298
|
+
},
|
|
2005
2299
|
}, { _id: false });
|
|
2006
2300
|
const roleSchema = new mongoose.Schema({
|
|
2007
2301
|
name: { type: String, required: true, unique: true, index: true },
|
|
@@ -2145,6 +2439,7 @@ const transactionSchema = new mongoose.Schema({
|
|
|
2145
2439
|
},
|
|
2146
2440
|
model: {
|
|
2147
2441
|
type: String,
|
|
2442
|
+
index: true,
|
|
2148
2443
|
},
|
|
2149
2444
|
context: {
|
|
2150
2445
|
type: String,
|
|
@@ -2292,6 +2587,17 @@ const userSchema = new mongoose.Schema({
|
|
|
2292
2587
|
},
|
|
2293
2588
|
default: {},
|
|
2294
2589
|
},
|
|
2590
|
+
favorites: {
|
|
2591
|
+
type: [
|
|
2592
|
+
{
|
|
2593
|
+
_id: false,
|
|
2594
|
+
agentId: String, // for agent
|
|
2595
|
+
model: String, // for model
|
|
2596
|
+
endpoint: String, // for model
|
|
2597
|
+
},
|
|
2598
|
+
],
|
|
2599
|
+
default: [],
|
|
2600
|
+
},
|
|
2295
2601
|
/** Field for external source identification (for consistency with TPrincipal schema) */
|
|
2296
2602
|
idOnTheSource: {
|
|
2297
2603
|
type: String,
|
|
@@ -2511,26 +2817,6 @@ const getSyncConfig = () => ({
|
|
|
2511
2817
|
batchSize: parseInt(process.env.MEILI_SYNC_BATCH_SIZE || '100', 10),
|
|
2512
2818
|
delayMs: parseInt(process.env.MEILI_SYNC_DELAY_MS || '100', 10),
|
|
2513
2819
|
});
|
|
2514
|
-
/**
|
|
2515
|
-
* Local implementation of parseTextParts to avoid dependency on librechat-data-provider
|
|
2516
|
-
* Extracts text content from an array of content items
|
|
2517
|
-
*/
|
|
2518
|
-
const parseTextParts = (content) => {
|
|
2519
|
-
if (!Array.isArray(content)) {
|
|
2520
|
-
return '';
|
|
2521
|
-
}
|
|
2522
|
-
return content
|
|
2523
|
-
.filter((item) => item.type === 'text' && typeof item.text === 'string')
|
|
2524
|
-
.map((item) => item.text)
|
|
2525
|
-
.join(' ')
|
|
2526
|
-
.trim();
|
|
2527
|
-
};
|
|
2528
|
-
/**
|
|
2529
|
-
* Local implementation to handle Bing convoId conversion
|
|
2530
|
-
*/
|
|
2531
|
-
const cleanUpPrimaryKeyValue = (value) => {
|
|
2532
|
-
return value.replace(/--/g, '|');
|
|
2533
|
-
};
|
|
2534
2820
|
/**
|
|
2535
2821
|
* Validates the required options for configuring the mongoMeili plugin.
|
|
2536
2822
|
*/
|
|
@@ -2574,8 +2860,8 @@ const createMeiliMongooseModel = ({ index, attributesToIndex, syncOptions, }) =>
|
|
|
2574
2860
|
* Get the current sync progress
|
|
2575
2861
|
*/
|
|
2576
2862
|
static async getSyncProgress() {
|
|
2577
|
-
const totalDocuments = await this.countDocuments();
|
|
2578
|
-
const indexedDocuments = await this.countDocuments({ _meiliIndex: true });
|
|
2863
|
+
const totalDocuments = await this.countDocuments({ expiredAt: null });
|
|
2864
|
+
const indexedDocuments = await this.countDocuments({ expiredAt: null, _meiliIndex: true });
|
|
2579
2865
|
return {
|
|
2580
2866
|
totalProcessed: indexedDocuments,
|
|
2581
2867
|
totalDocuments,
|
|
@@ -2583,92 +2869,84 @@ const createMeiliMongooseModel = ({ index, attributesToIndex, syncOptions, }) =>
|
|
|
2583
2869
|
};
|
|
2584
2870
|
}
|
|
2585
2871
|
/**
|
|
2586
|
-
* Synchronizes
|
|
2587
|
-
*
|
|
2588
|
-
|
|
2589
|
-
|
|
2872
|
+
* Synchronizes data between the MongoDB collection and the MeiliSearch index by
|
|
2873
|
+
* incrementally indexing only documents where `expiredAt` is `null` and `_meiliIndex` is `false`
|
|
2874
|
+
* (i.e., non-expired documents that have not yet been indexed).
|
|
2875
|
+
* */
|
|
2876
|
+
static async syncWithMeili() {
|
|
2877
|
+
const startTime = Date.now();
|
|
2878
|
+
const { batchSize, delayMs } = syncConfig;
|
|
2879
|
+
const collectionName = primaryKey === 'messageId' ? 'messages' : 'conversations';
|
|
2880
|
+
logger.info(`[syncWithMeili] Starting sync for ${collectionName} with batch size ${batchSize}`);
|
|
2881
|
+
// Get approximate total count for raw estimation, the sync should not overcome this number
|
|
2882
|
+
const approxTotalCount = await this.estimatedDocumentCount();
|
|
2883
|
+
logger.info(`[syncWithMeili] Approximate total number of all ${collectionName}: ${approxTotalCount}`);
|
|
2590
2884
|
try {
|
|
2591
|
-
const startTime = Date.now();
|
|
2592
|
-
const { batchSize, delayMs } = syncConfig;
|
|
2593
|
-
logger.info(`[syncWithMeili] Starting sync for ${primaryKey === 'messageId' ? 'messages' : 'conversations'} with batch size ${batchSize}`);
|
|
2594
|
-
// Build query with resume capability
|
|
2595
|
-
const query = {};
|
|
2596
|
-
if (options === null || options === void 0 ? void 0 : options.resumeFromId) {
|
|
2597
|
-
query._id = { $gt: options.resumeFromId };
|
|
2598
|
-
}
|
|
2599
|
-
// Get total count for progress tracking
|
|
2600
|
-
const totalCount = await this.countDocuments(query);
|
|
2601
|
-
let processedCount = 0;
|
|
2602
2885
|
// First, handle documents that need to be removed from Meili
|
|
2886
|
+
logger.info(`[syncWithMeili] Starting cleanup of Meili index ${index.uid} before sync`);
|
|
2603
2887
|
await this.cleanupMeiliIndex(index, primaryKey, batchSize, delayMs);
|
|
2604
|
-
|
|
2605
|
-
|
|
2606
|
-
|
|
2607
|
-
|
|
2608
|
-
|
|
2609
|
-
|
|
2610
|
-
|
|
2611
|
-
|
|
2612
|
-
|
|
2613
|
-
|
|
2614
|
-
|
|
2615
|
-
|
|
2616
|
-
|
|
2617
|
-
|
|
2618
|
-
|
|
2619
|
-
|
|
2620
|
-
|
|
2621
|
-
|
|
2622
|
-
|
|
2623
|
-
|
|
2624
|
-
|
|
2625
|
-
|
|
2888
|
+
logger.info(`[syncWithMeili] Completed cleanup of Meili index: ${index.uid}`);
|
|
2889
|
+
}
|
|
2890
|
+
catch (error) {
|
|
2891
|
+
logger.error('[syncWithMeili] Error during cleanup Meili before sync:', error);
|
|
2892
|
+
throw error;
|
|
2893
|
+
}
|
|
2894
|
+
let processedCount = 0;
|
|
2895
|
+
let hasMore = true;
|
|
2896
|
+
while (hasMore) {
|
|
2897
|
+
const query = {
|
|
2898
|
+
expiredAt: null,
|
|
2899
|
+
_meiliIndex: false,
|
|
2900
|
+
};
|
|
2901
|
+
try {
|
|
2902
|
+
const documents = await this.find(query)
|
|
2903
|
+
.select(attributesToIndex.join(' ') + ' _meiliIndex')
|
|
2904
|
+
.limit(batchSize)
|
|
2905
|
+
.lean();
|
|
2906
|
+
// Check if there are more documents to process
|
|
2907
|
+
if (documents.length === 0) {
|
|
2908
|
+
logger.info('[syncWithMeili] No more documents to process');
|
|
2909
|
+
break;
|
|
2626
2910
|
}
|
|
2627
|
-
|
|
2628
|
-
|
|
2629
|
-
|
|
2630
|
-
|
|
2631
|
-
|
|
2632
|
-
|
|
2633
|
-
|
|
2634
|
-
|
|
2635
|
-
|
|
2636
|
-
|
|
2637
|
-
if (delayMs > 0) {
|
|
2638
|
-
await new Promise((resolve) => setTimeout(resolve, delayMs));
|
|
2639
|
-
}
|
|
2911
|
+
// Process the batch
|
|
2912
|
+
await this.processSyncBatch(index, documents);
|
|
2913
|
+
processedCount += documents.length;
|
|
2914
|
+
logger.info(`[syncWithMeili] Processed: ${processedCount}`);
|
|
2915
|
+
if (documents.length < batchSize) {
|
|
2916
|
+
hasMore = false;
|
|
2917
|
+
}
|
|
2918
|
+
// Add delay to prevent overwhelming resources
|
|
2919
|
+
if (hasMore && delayMs > 0) {
|
|
2920
|
+
await new Promise((resolve) => setTimeout(resolve, delayMs));
|
|
2640
2921
|
}
|
|
2641
2922
|
}
|
|
2642
|
-
|
|
2643
|
-
|
|
2644
|
-
|
|
2923
|
+
catch (error) {
|
|
2924
|
+
logger.error('[syncWithMeili] Error processing documents batch:', error);
|
|
2925
|
+
throw error;
|
|
2645
2926
|
}
|
|
2646
|
-
const duration = Date.now() - startTime;
|
|
2647
|
-
logger.info(`[syncWithMeili] Completed sync for ${primaryKey === 'messageId' ? 'messages' : 'conversations'} in ${duration}ms`);
|
|
2648
|
-
}
|
|
2649
|
-
catch (error) {
|
|
2650
|
-
logger.error('[syncWithMeili] Error during sync:', error);
|
|
2651
|
-
throw error;
|
|
2652
2927
|
}
|
|
2928
|
+
const duration = Date.now() - startTime;
|
|
2929
|
+
logger.info(`[syncWithMeili] Completed sync for ${collectionName}. Processed ${processedCount} documents in ${duration}ms`);
|
|
2653
2930
|
}
|
|
2654
2931
|
/**
|
|
2655
2932
|
* Process a batch of documents for syncing
|
|
2656
2933
|
*/
|
|
2657
|
-
static async processSyncBatch(index, documents
|
|
2934
|
+
static async processSyncBatch(index, documents) {
|
|
2658
2935
|
if (documents.length === 0) {
|
|
2659
2936
|
return;
|
|
2660
2937
|
}
|
|
2938
|
+
// Format documents for MeiliSearch
|
|
2939
|
+
const formattedDocs = documents.map((doc) => _.omitBy(_.pick(doc, attributesToIndex), (_v, k) => k.startsWith('$')));
|
|
2661
2940
|
try {
|
|
2662
2941
|
// Add documents to MeiliSearch
|
|
2663
|
-
await index.
|
|
2942
|
+
await index.addDocumentsInBatches(formattedDocs);
|
|
2664
2943
|
// Update MongoDB to mark documents as indexed
|
|
2665
|
-
|
|
2666
|
-
|
|
2667
|
-
}
|
|
2944
|
+
const docsIds = documents.map((doc) => doc._id);
|
|
2945
|
+
await this.updateMany({ _id: { $in: docsIds } }, { $set: { _meiliIndex: true } });
|
|
2668
2946
|
}
|
|
2669
2947
|
catch (error) {
|
|
2670
2948
|
logger.error('[processSyncBatch] Error processing batch:', error);
|
|
2671
|
-
|
|
2949
|
+
throw error;
|
|
2672
2950
|
}
|
|
2673
2951
|
}
|
|
2674
2952
|
/**
|
|
@@ -2693,7 +2971,7 @@ const createMeiliMongooseModel = ({ index, attributesToIndex, syncOptions, }) =>
|
|
|
2693
2971
|
// Delete documents that don't exist in MongoDB
|
|
2694
2972
|
const toDelete = meiliIds.filter((id) => !existingIds.has(id));
|
|
2695
2973
|
if (toDelete.length > 0) {
|
|
2696
|
-
await
|
|
2974
|
+
await index.deleteDocuments(toDelete.map(String));
|
|
2697
2975
|
logger.debug(`[cleanupMeiliIndex] Deleted ${toDelete.length} orphaned documents`);
|
|
2698
2976
|
}
|
|
2699
2977
|
offset += batchSize;
|
|
@@ -2720,7 +2998,7 @@ const createMeiliMongooseModel = ({ index, attributesToIndex, syncOptions, }) =>
|
|
|
2720
2998
|
const data = await index.search(q, params);
|
|
2721
2999
|
if (populate) {
|
|
2722
3000
|
const query = {};
|
|
2723
|
-
query[primaryKey] = _.map(data.hits, (hit) =>
|
|
3001
|
+
query[primaryKey] = _.map(data.hits, (hit) => hit[primaryKey]);
|
|
2724
3002
|
const projection = Object.keys(this.schema.obj).reduce((results, key) => {
|
|
2725
3003
|
if (!key.startsWith('$')) {
|
|
2726
3004
|
results[key] = 1;
|
|
@@ -2754,7 +3032,7 @@ const createMeiliMongooseModel = ({ index, attributesToIndex, syncOptions, }) =>
|
|
|
2754
3032
|
object.conversationId = object.conversationId.replace(/\|/g, '--');
|
|
2755
3033
|
}
|
|
2756
3034
|
if (object.content && Array.isArray(object.content)) {
|
|
2757
|
-
object.text = parseTextParts(object.content);
|
|
3035
|
+
object.text = librechatDataProvider.parseTextParts(object.content);
|
|
2758
3036
|
delete object.content;
|
|
2759
3037
|
}
|
|
2760
3038
|
return object;
|
|
@@ -2763,6 +3041,10 @@ const createMeiliMongooseModel = ({ index, attributesToIndex, syncOptions, }) =>
|
|
|
2763
3041
|
* Adds the current document to the MeiliSearch index with retry logic
|
|
2764
3042
|
*/
|
|
2765
3043
|
async addObjectToMeili(next) {
|
|
3044
|
+
// If this conversation or message has a TTL, don't index it
|
|
3045
|
+
if (!_.isNil(this.expiredAt)) {
|
|
3046
|
+
return next();
|
|
3047
|
+
}
|
|
2766
3048
|
const object = this.preprocessObjectForIndex();
|
|
2767
3049
|
const maxRetries = 3;
|
|
2768
3050
|
let retryCount = 0;
|
|
@@ -3077,7 +3359,38 @@ function createAgentModel(mongoose) {
|
|
|
3077
3359
|
* Creates or returns the AgentCategory model using the provided mongoose instance and schema
|
|
3078
3360
|
*/
|
|
3079
3361
|
function createAgentCategoryModel(mongoose) {
|
|
3080
|
-
return mongoose.models.AgentCategory ||
|
|
3362
|
+
return (mongoose.models.AgentCategory ||
|
|
3363
|
+
mongoose.model('AgentCategory', agentCategorySchema));
|
|
3364
|
+
}
|
|
3365
|
+
|
|
3366
|
+
const mcpServerSchema = new mongoose.Schema({
|
|
3367
|
+
serverName: {
|
|
3368
|
+
type: String,
|
|
3369
|
+
index: true,
|
|
3370
|
+
unique: true,
|
|
3371
|
+
required: true,
|
|
3372
|
+
},
|
|
3373
|
+
config: {
|
|
3374
|
+
type: mongoose.Schema.Types.Mixed,
|
|
3375
|
+
required: true,
|
|
3376
|
+
// Config contains: title, description, url, oauth, etc.
|
|
3377
|
+
},
|
|
3378
|
+
author: {
|
|
3379
|
+
type: mongoose.Schema.Types.ObjectId,
|
|
3380
|
+
ref: 'User',
|
|
3381
|
+
required: true,
|
|
3382
|
+
index: true,
|
|
3383
|
+
},
|
|
3384
|
+
}, {
|
|
3385
|
+
timestamps: true,
|
|
3386
|
+
});
|
|
3387
|
+
mcpServerSchema.index({ updatedAt: -1, _id: 1 });
|
|
3388
|
+
|
|
3389
|
+
/**
|
|
3390
|
+
* Creates or returns the MCPServer model using the provided mongoose instance and schema
|
|
3391
|
+
*/
|
|
3392
|
+
function createMCPServerModel(mongoose) {
|
|
3393
|
+
return (mongoose.models.MCPServer || mongoose.model('MCPServer', mcpServerSchema));
|
|
3081
3394
|
}
|
|
3082
3395
|
|
|
3083
3396
|
/**
|
|
@@ -3205,7 +3518,7 @@ const accessRoleSchema = new mongoose.Schema({
|
|
|
3205
3518
|
description: String,
|
|
3206
3519
|
resourceType: {
|
|
3207
3520
|
type: String,
|
|
3208
|
-
enum: ['agent', 'project', 'file', 'promptGroup'],
|
|
3521
|
+
enum: ['agent', 'project', 'file', 'promptGroup', 'mcpServer'],
|
|
3209
3522
|
required: true,
|
|
3210
3523
|
default: 'agent',
|
|
3211
3524
|
},
|
|
@@ -3306,6 +3619,7 @@ function createModels(mongoose) {
|
|
|
3306
3619
|
Message: createMessageModel(mongoose),
|
|
3307
3620
|
Agent: createAgentModel(mongoose),
|
|
3308
3621
|
AgentCategory: createAgentCategoryModel(mongoose),
|
|
3622
|
+
MCPServer: createMCPServerModel(mongoose),
|
|
3309
3623
|
Role: createRoleModel(mongoose),
|
|
3310
3624
|
Action: createActionModel(mongoose),
|
|
3311
3625
|
Assistant: createAssistantModel(mongoose),
|
|
@@ -3328,7 +3642,6 @@ function createModels(mongoose) {
|
|
|
3328
3642
|
};
|
|
3329
3643
|
}
|
|
3330
3644
|
|
|
3331
|
-
var _a;
|
|
3332
3645
|
class SessionError extends Error {
|
|
3333
3646
|
constructor(message, code = 'SESSION_ERROR') {
|
|
3334
3647
|
super(message);
|
|
@@ -3336,22 +3649,24 @@ class SessionError extends Error {
|
|
|
3336
3649
|
this.code = code;
|
|
3337
3650
|
}
|
|
3338
3651
|
}
|
|
3339
|
-
|
|
3340
|
-
const
|
|
3652
|
+
/** Default refresh token expiry: 7 days in milliseconds */
|
|
3653
|
+
const DEFAULT_REFRESH_TOKEN_EXPIRY = 1000 * 60 * 60 * 24 * 7;
|
|
3341
3654
|
// Factory function that takes mongoose instance and returns the methods
|
|
3342
3655
|
function createSessionMethods(mongoose) {
|
|
3343
3656
|
/**
|
|
3344
3657
|
* Creates a new session for a user
|
|
3345
3658
|
*/
|
|
3346
3659
|
async function createSession(userId, options = {}) {
|
|
3660
|
+
var _a;
|
|
3347
3661
|
if (!userId) {
|
|
3348
3662
|
throw new SessionError('User ID is required', 'INVALID_USER_ID');
|
|
3349
3663
|
}
|
|
3664
|
+
const expiresIn = (_a = options.expiresIn) !== null && _a !== void 0 ? _a : DEFAULT_REFRESH_TOKEN_EXPIRY;
|
|
3350
3665
|
try {
|
|
3351
3666
|
const Session = mongoose.models.Session;
|
|
3352
3667
|
const currentSession = new Session({
|
|
3353
3668
|
user: userId,
|
|
3354
|
-
expiration: options.expiration || new Date(Date.now() +
|
|
3669
|
+
expiration: options.expiration || new Date(Date.now() + expiresIn),
|
|
3355
3670
|
});
|
|
3356
3671
|
const refreshToken = await generateRefreshToken(currentSession);
|
|
3357
3672
|
return { session: currentSession, refreshToken };
|
|
@@ -3405,14 +3720,16 @@ function createSessionMethods(mongoose) {
|
|
|
3405
3720
|
/**
|
|
3406
3721
|
* Updates session expiration
|
|
3407
3722
|
*/
|
|
3408
|
-
async function updateExpiration(session, newExpiration) {
|
|
3723
|
+
async function updateExpiration(session, newExpiration, options = {}) {
|
|
3724
|
+
var _a;
|
|
3725
|
+
const expiresIn = (_a = options.expiresIn) !== null && _a !== void 0 ? _a : DEFAULT_REFRESH_TOKEN_EXPIRY;
|
|
3409
3726
|
try {
|
|
3410
3727
|
const Session = mongoose.models.Session;
|
|
3411
3728
|
const sessionDoc = typeof session === 'string' ? await Session.findById(session) : session;
|
|
3412
3729
|
if (!sessionDoc) {
|
|
3413
3730
|
throw new SessionError('Session not found', 'SESSION_NOT_FOUND');
|
|
3414
3731
|
}
|
|
3415
|
-
sessionDoc.expiration = newExpiration || new Date(Date.now() +
|
|
3732
|
+
sessionDoc.expiration = newExpiration || new Date(Date.now() + expiresIn);
|
|
3416
3733
|
return await sessionDoc.save();
|
|
3417
3734
|
}
|
|
3418
3735
|
catch (error) {
|
|
@@ -3483,7 +3800,9 @@ function createSessionMethods(mongoose) {
|
|
|
3483
3800
|
throw new SessionError('Invalid session object', 'INVALID_SESSION');
|
|
3484
3801
|
}
|
|
3485
3802
|
try {
|
|
3486
|
-
const expiresIn = session.expiration
|
|
3803
|
+
const expiresIn = session.expiration
|
|
3804
|
+
? session.expiration.getTime()
|
|
3805
|
+
: Date.now() + DEFAULT_REFRESH_TOKEN_EXPIRY;
|
|
3487
3806
|
if (!session.expiration) {
|
|
3488
3807
|
session.expiration = new Date(expiresIn);
|
|
3489
3808
|
}
|
|
@@ -3689,6 +4008,8 @@ function createRoleMethods(mongoose) {
|
|
|
3689
4008
|
};
|
|
3690
4009
|
}
|
|
3691
4010
|
|
|
4011
|
+
/** Default JWT session expiry: 15 minutes in milliseconds */
|
|
4012
|
+
const DEFAULT_SESSION_EXPIRY = 1000 * 60 * 15;
|
|
3692
4013
|
/** Factory function that takes mongoose instance and returns the methods */
|
|
3693
4014
|
function createUserMethods(mongoose) {
|
|
3694
4015
|
/**
|
|
@@ -3814,23 +4135,14 @@ function createUserMethods(mongoose) {
|
|
|
3814
4135
|
}
|
|
3815
4136
|
/**
|
|
3816
4137
|
* Generates a JWT token for a given user.
|
|
4138
|
+
* @param user - The user object
|
|
4139
|
+
* @param expiresIn - Optional expiry time in milliseconds. Default: 15 minutes
|
|
3817
4140
|
*/
|
|
3818
|
-
async function generateToken(user) {
|
|
4141
|
+
async function generateToken(user, expiresIn) {
|
|
3819
4142
|
if (!user) {
|
|
3820
4143
|
throw new Error('No user provided');
|
|
3821
4144
|
}
|
|
3822
|
-
|
|
3823
|
-
if (process.env.SESSION_EXPIRY !== undefined && process.env.SESSION_EXPIRY !== '') {
|
|
3824
|
-
try {
|
|
3825
|
-
const evaluated = eval(process.env.SESSION_EXPIRY);
|
|
3826
|
-
if (evaluated) {
|
|
3827
|
-
expires = evaluated;
|
|
3828
|
-
}
|
|
3829
|
-
}
|
|
3830
|
-
catch (error) {
|
|
3831
|
-
console.warn('Invalid SESSION_EXPIRY expression, using default:', error);
|
|
3832
|
-
}
|
|
3833
|
-
}
|
|
4145
|
+
const expires = expiresIn !== null && expiresIn !== void 0 ? expiresIn : DEFAULT_SESSION_EXPIRY;
|
|
3834
4146
|
return await signPayload({
|
|
3835
4147
|
payload: {
|
|
3836
4148
|
id: user._id,
|
|
@@ -3924,6 +4236,26 @@ function createUserMethods(mongoose) {
|
|
|
3924
4236
|
return userWithoutScore;
|
|
3925
4237
|
});
|
|
3926
4238
|
};
|
|
4239
|
+
/**
|
|
4240
|
+
* Updates the plugins for a user based on the action specified (install/uninstall).
|
|
4241
|
+
* @param userId - The user ID whose plugins are to be updated
|
|
4242
|
+
* @param plugins - The current plugins array
|
|
4243
|
+
* @param pluginKey - The key of the plugin to install or uninstall
|
|
4244
|
+
* @param action - The action to perform, 'install' or 'uninstall'
|
|
4245
|
+
* @returns The result of the update operation or null if action is invalid
|
|
4246
|
+
*/
|
|
4247
|
+
async function updateUserPlugins(userId, plugins, pluginKey, action) {
|
|
4248
|
+
const userPlugins = plugins !== null && plugins !== void 0 ? plugins : [];
|
|
4249
|
+
if (action === 'install') {
|
|
4250
|
+
return updateUser(userId, { plugins: [...userPlugins, pluginKey] });
|
|
4251
|
+
}
|
|
4252
|
+
if (action === 'uninstall') {
|
|
4253
|
+
return updateUser(userId, {
|
|
4254
|
+
plugins: userPlugins.filter((plugin) => plugin !== pluginKey),
|
|
4255
|
+
});
|
|
4256
|
+
}
|
|
4257
|
+
return null;
|
|
4258
|
+
}
|
|
3927
4259
|
return {
|
|
3928
4260
|
findUser,
|
|
3929
4261
|
countUsers,
|
|
@@ -3933,10 +4265,356 @@ function createUserMethods(mongoose) {
|
|
|
3933
4265
|
getUserById,
|
|
3934
4266
|
generateToken,
|
|
3935
4267
|
deleteUserById,
|
|
4268
|
+
updateUserPlugins,
|
|
3936
4269
|
toggleUserMemories,
|
|
3937
4270
|
};
|
|
3938
4271
|
}
|
|
3939
4272
|
|
|
4273
|
+
/** Factory function that takes mongoose instance and returns the key methods */
|
|
4274
|
+
function createKeyMethods(mongoose) {
|
|
4275
|
+
/**
|
|
4276
|
+
* Retrieves and decrypts the key value for a given user identified by userId and identifier name.
|
|
4277
|
+
* @param params - The parameters object
|
|
4278
|
+
* @param params.userId - The unique identifier for the user
|
|
4279
|
+
* @param params.name - The name associated with the key
|
|
4280
|
+
* @returns The decrypted key value
|
|
4281
|
+
* @throws Error if the key is not found or if there is a problem during key retrieval
|
|
4282
|
+
* @description This function searches for a user's key in the database using their userId and name.
|
|
4283
|
+
* If found, it decrypts the value of the key and returns it. If no key is found, it throws
|
|
4284
|
+
* an error indicating that there is no user key available.
|
|
4285
|
+
*/
|
|
4286
|
+
async function getUserKey(params) {
|
|
4287
|
+
const { userId, name } = params;
|
|
4288
|
+
const Key = mongoose.models.Key;
|
|
4289
|
+
const keyValue = (await Key.findOne({ userId, name }).lean());
|
|
4290
|
+
if (!keyValue) {
|
|
4291
|
+
throw new Error(JSON.stringify({
|
|
4292
|
+
type: librechatDataProvider.ErrorTypes.NO_USER_KEY,
|
|
4293
|
+
}));
|
|
4294
|
+
}
|
|
4295
|
+
return await decrypt(keyValue.value);
|
|
4296
|
+
}
|
|
4297
|
+
/**
|
|
4298
|
+
* Retrieves, decrypts, and parses the key values for a given user identified by userId and name.
|
|
4299
|
+
* @param params - The parameters object
|
|
4300
|
+
* @param params.userId - The unique identifier for the user
|
|
4301
|
+
* @param params.name - The name associated with the key
|
|
4302
|
+
* @returns The decrypted and parsed key values
|
|
4303
|
+
* @throws Error if the key is invalid or if there is a problem during key value parsing
|
|
4304
|
+
* @description This function retrieves a user's encrypted key using their userId and name, decrypts it,
|
|
4305
|
+
* and then attempts to parse the decrypted string into a JSON object. If the parsing fails,
|
|
4306
|
+
* it throws an error indicating that the user key is invalid.
|
|
4307
|
+
*/
|
|
4308
|
+
async function getUserKeyValues(params) {
|
|
4309
|
+
const { userId, name } = params;
|
|
4310
|
+
const userValues = await getUserKey({ userId, name });
|
|
4311
|
+
try {
|
|
4312
|
+
return JSON.parse(userValues);
|
|
4313
|
+
}
|
|
4314
|
+
catch (e) {
|
|
4315
|
+
logger$1.error('[getUserKeyValues]', e);
|
|
4316
|
+
throw new Error(JSON.stringify({
|
|
4317
|
+
type: librechatDataProvider.ErrorTypes.INVALID_USER_KEY,
|
|
4318
|
+
}));
|
|
4319
|
+
}
|
|
4320
|
+
}
|
|
4321
|
+
/**
|
|
4322
|
+
* Retrieves the expiry information of a user's key identified by userId and name.
|
|
4323
|
+
* @param params - The parameters object
|
|
4324
|
+
* @param params.userId - The unique identifier for the user
|
|
4325
|
+
* @param params.name - The name associated with the key
|
|
4326
|
+
* @returns The expiry date of the key or null if the key doesn't exist
|
|
4327
|
+
* @description This function fetches a user's key from the database using their userId and name and
|
|
4328
|
+
* returns its expiry date. If the key is not found, it returns null for the expiry date.
|
|
4329
|
+
*/
|
|
4330
|
+
async function getUserKeyExpiry(params) {
|
|
4331
|
+
const { userId, name } = params;
|
|
4332
|
+
const Key = mongoose.models.Key;
|
|
4333
|
+
const keyValue = (await Key.findOne({ userId, name }).lean());
|
|
4334
|
+
if (!keyValue) {
|
|
4335
|
+
return { expiresAt: null };
|
|
4336
|
+
}
|
|
4337
|
+
return { expiresAt: keyValue.expiresAt || 'never' };
|
|
4338
|
+
}
|
|
4339
|
+
/**
|
|
4340
|
+
* Updates or inserts a new key for a given user identified by userId and name, with a specified value and expiry date.
|
|
4341
|
+
* @param params - The parameters object
|
|
4342
|
+
* @param params.userId - The unique identifier for the user
|
|
4343
|
+
* @param params.name - The name associated with the key
|
|
4344
|
+
* @param params.value - The value to be encrypted and stored as the key's value
|
|
4345
|
+
* @param params.expiresAt - The expiry date for the key [optional]
|
|
4346
|
+
* @returns The updated or newly inserted key document
|
|
4347
|
+
* @description This function either updates an existing user key or inserts a new one into the database,
|
|
4348
|
+
* after encrypting the provided value. It sets the provided expiry date for the key (or unsets for no expiry).
|
|
4349
|
+
*/
|
|
4350
|
+
async function updateUserKey(params) {
|
|
4351
|
+
const { userId, name, value, expiresAt = null } = params;
|
|
4352
|
+
const Key = mongoose.models.Key;
|
|
4353
|
+
const encryptedValue = await encrypt(value);
|
|
4354
|
+
const updateObject = {
|
|
4355
|
+
userId,
|
|
4356
|
+
name,
|
|
4357
|
+
value: encryptedValue,
|
|
4358
|
+
};
|
|
4359
|
+
const updateQuery = {
|
|
4360
|
+
$set: updateObject,
|
|
4361
|
+
};
|
|
4362
|
+
if (expiresAt) {
|
|
4363
|
+
updateObject.expiresAt = new Date(expiresAt);
|
|
4364
|
+
}
|
|
4365
|
+
else {
|
|
4366
|
+
updateQuery.$unset = { expiresAt: '' };
|
|
4367
|
+
}
|
|
4368
|
+
return await Key.findOneAndUpdate({ userId, name }, updateQuery, {
|
|
4369
|
+
upsert: true,
|
|
4370
|
+
new: true,
|
|
4371
|
+
}).lean();
|
|
4372
|
+
}
|
|
4373
|
+
/**
|
|
4374
|
+
* Deletes a key or all keys for a given user identified by userId, optionally based on a specified name.
|
|
4375
|
+
* @param params - The parameters object
|
|
4376
|
+
* @param params.userId - The unique identifier for the user
|
|
4377
|
+
* @param params.name - The name associated with the key to delete. If not provided and all is true, deletes all keys
|
|
4378
|
+
* @param params.all - Whether to delete all keys for the user
|
|
4379
|
+
* @returns The result of the deletion operation
|
|
4380
|
+
* @description This function deletes a specific key or all keys for a user from the database.
|
|
4381
|
+
* If a name is provided and all is false, it deletes only the key with that name.
|
|
4382
|
+
* If all is true, it ignores the name and deletes all keys for the user.
|
|
4383
|
+
*/
|
|
4384
|
+
async function deleteUserKey(params) {
|
|
4385
|
+
const { userId, name, all = false } = params;
|
|
4386
|
+
const Key = mongoose.models.Key;
|
|
4387
|
+
if (all) {
|
|
4388
|
+
return await Key.deleteMany({ userId });
|
|
4389
|
+
}
|
|
4390
|
+
return await Key.findOneAndDelete({ userId, name }).lean();
|
|
4391
|
+
}
|
|
4392
|
+
return {
|
|
4393
|
+
getUserKey,
|
|
4394
|
+
updateUserKey,
|
|
4395
|
+
deleteUserKey,
|
|
4396
|
+
getUserKeyValues,
|
|
4397
|
+
getUserKeyExpiry,
|
|
4398
|
+
};
|
|
4399
|
+
}
|
|
4400
|
+
|
|
4401
|
+
/** Factory function that takes mongoose instance and returns the file methods */
|
|
4402
|
+
function createFileMethods(mongoose) {
|
|
4403
|
+
/**
|
|
4404
|
+
* Finds a file by its file_id with additional query options.
|
|
4405
|
+
* @param file_id - The unique identifier of the file
|
|
4406
|
+
* @param options - Query options for filtering, projection, etc.
|
|
4407
|
+
* @returns A promise that resolves to the file document or null
|
|
4408
|
+
*/
|
|
4409
|
+
async function findFileById(file_id, options = {}) {
|
|
4410
|
+
const File = mongoose.models.File;
|
|
4411
|
+
return File.findOne({ file_id, ...options }).lean();
|
|
4412
|
+
}
|
|
4413
|
+
/**
|
|
4414
|
+
* Retrieves files matching a given filter, sorted by the most recently updated.
|
|
4415
|
+
* @param filter - The filter criteria to apply
|
|
4416
|
+
* @param _sortOptions - Optional sort parameters
|
|
4417
|
+
* @param selectFields - Fields to include/exclude in the query results. Default excludes the 'text' field
|
|
4418
|
+
* @param options - Additional query options (userId, agentId for ACL)
|
|
4419
|
+
* @returns A promise that resolves to an array of file documents
|
|
4420
|
+
*/
|
|
4421
|
+
async function getFiles(filter, _sortOptions, selectFields) {
|
|
4422
|
+
const File = mongoose.models.File;
|
|
4423
|
+
const sortOptions = { updatedAt: -1, ..._sortOptions };
|
|
4424
|
+
const query = File.find(filter);
|
|
4425
|
+
if (selectFields != null) {
|
|
4426
|
+
query.select(selectFields);
|
|
4427
|
+
}
|
|
4428
|
+
else {
|
|
4429
|
+
query.select({ text: 0 });
|
|
4430
|
+
}
|
|
4431
|
+
return await query.sort(sortOptions).lean();
|
|
4432
|
+
}
|
|
4433
|
+
/**
|
|
4434
|
+
* Retrieves tool files (files that are embedded or have a fileIdentifier) from an array of file IDs
|
|
4435
|
+
* @param fileIds - Array of file_id strings to search for
|
|
4436
|
+
* @param toolResourceSet - Optional filter for tool resources
|
|
4437
|
+
* @returns Files that match the criteria
|
|
4438
|
+
*/
|
|
4439
|
+
async function getToolFilesByIds(fileIds, toolResourceSet) {
|
|
4440
|
+
var _a, _b, _c;
|
|
4441
|
+
if (!fileIds || !fileIds.length || !(toolResourceSet === null || toolResourceSet === void 0 ? void 0 : toolResourceSet.size)) {
|
|
4442
|
+
return [];
|
|
4443
|
+
}
|
|
4444
|
+
try {
|
|
4445
|
+
const filter = {
|
|
4446
|
+
file_id: { $in: fileIds },
|
|
4447
|
+
$or: [],
|
|
4448
|
+
};
|
|
4449
|
+
if (toolResourceSet.has(librechatDataProvider.EToolResources.context)) {
|
|
4450
|
+
(_a = filter.$or) === null || _a === void 0 ? void 0 : _a.push({ text: { $exists: true, $ne: null }, context: librechatDataProvider.FileContext.agents });
|
|
4451
|
+
}
|
|
4452
|
+
if (toolResourceSet.has(librechatDataProvider.EToolResources.file_search)) {
|
|
4453
|
+
(_b = filter.$or) === null || _b === void 0 ? void 0 : _b.push({ embedded: true });
|
|
4454
|
+
}
|
|
4455
|
+
if (toolResourceSet.has(librechatDataProvider.EToolResources.execute_code)) {
|
|
4456
|
+
(_c = filter.$or) === null || _c === void 0 ? void 0 : _c.push({ 'metadata.fileIdentifier': { $exists: true } });
|
|
4457
|
+
}
|
|
4458
|
+
const selectFields = { text: 0 };
|
|
4459
|
+
const sortOptions = { updatedAt: -1 };
|
|
4460
|
+
const results = await getFiles(filter, sortOptions, selectFields);
|
|
4461
|
+
return results !== null && results !== void 0 ? results : [];
|
|
4462
|
+
}
|
|
4463
|
+
catch (error) {
|
|
4464
|
+
logger$1.error('[getToolFilesByIds] Error retrieving tool files:', error);
|
|
4465
|
+
throw new Error('Error retrieving tool files');
|
|
4466
|
+
}
|
|
4467
|
+
}
|
|
4468
|
+
/**
|
|
4469
|
+
* Creates a new file with a TTL of 1 hour.
|
|
4470
|
+
* @param data - The file data to be created, must contain file_id
|
|
4471
|
+
* @param disableTTL - Whether to disable the TTL
|
|
4472
|
+
* @returns A promise that resolves to the created file document
|
|
4473
|
+
*/
|
|
4474
|
+
async function createFile(data, disableTTL) {
|
|
4475
|
+
const File = mongoose.models.File;
|
|
4476
|
+
const fileData = {
|
|
4477
|
+
...data,
|
|
4478
|
+
expiresAt: new Date(Date.now() + 3600 * 1000),
|
|
4479
|
+
};
|
|
4480
|
+
if (disableTTL) {
|
|
4481
|
+
delete fileData.expiresAt;
|
|
4482
|
+
}
|
|
4483
|
+
return File.findOneAndUpdate({ file_id: data.file_id }, fileData, {
|
|
4484
|
+
new: true,
|
|
4485
|
+
upsert: true,
|
|
4486
|
+
}).lean();
|
|
4487
|
+
}
|
|
4488
|
+
/**
|
|
4489
|
+
* Updates a file identified by file_id with new data and removes the TTL.
|
|
4490
|
+
* @param data - The data to update, must contain file_id
|
|
4491
|
+
* @returns A promise that resolves to the updated file document
|
|
4492
|
+
*/
|
|
4493
|
+
async function updateFile(data) {
|
|
4494
|
+
const File = mongoose.models.File;
|
|
4495
|
+
const { file_id, ...update } = data;
|
|
4496
|
+
const updateOperation = {
|
|
4497
|
+
$set: update,
|
|
4498
|
+
$unset: { expiresAt: '' },
|
|
4499
|
+
};
|
|
4500
|
+
return File.findOneAndUpdate({ file_id }, updateOperation, {
|
|
4501
|
+
new: true,
|
|
4502
|
+
}).lean();
|
|
4503
|
+
}
|
|
4504
|
+
/**
|
|
4505
|
+
* Increments the usage of a file identified by file_id.
|
|
4506
|
+
* @param data - The data to update, must contain file_id and the increment value for usage
|
|
4507
|
+
* @returns A promise that resolves to the updated file document
|
|
4508
|
+
*/
|
|
4509
|
+
async function updateFileUsage(data) {
|
|
4510
|
+
const File = mongoose.models.File;
|
|
4511
|
+
const { file_id, inc = 1 } = data;
|
|
4512
|
+
const updateOperation = {
|
|
4513
|
+
$inc: { usage: inc },
|
|
4514
|
+
$unset: { expiresAt: '', temp_file_id: '' },
|
|
4515
|
+
};
|
|
4516
|
+
return File.findOneAndUpdate({ file_id }, updateOperation, {
|
|
4517
|
+
new: true,
|
|
4518
|
+
}).lean();
|
|
4519
|
+
}
|
|
4520
|
+
/**
|
|
4521
|
+
* Deletes a file identified by file_id.
|
|
4522
|
+
* @param file_id - The unique identifier of the file to delete
|
|
4523
|
+
* @returns A promise that resolves to the deleted file document or null
|
|
4524
|
+
*/
|
|
4525
|
+
async function deleteFile(file_id) {
|
|
4526
|
+
const File = mongoose.models.File;
|
|
4527
|
+
return File.findOneAndDelete({ file_id }).lean();
|
|
4528
|
+
}
|
|
4529
|
+
/**
|
|
4530
|
+
* Deletes a file identified by a filter.
|
|
4531
|
+
* @param filter - The filter criteria to apply
|
|
4532
|
+
* @returns A promise that resolves to the deleted file document or null
|
|
4533
|
+
*/
|
|
4534
|
+
async function deleteFileByFilter(filter) {
|
|
4535
|
+
const File = mongoose.models.File;
|
|
4536
|
+
return File.findOneAndDelete(filter).lean();
|
|
4537
|
+
}
|
|
4538
|
+
/**
|
|
4539
|
+
* Deletes multiple files identified by an array of file_ids.
|
|
4540
|
+
* @param file_ids - The unique identifiers of the files to delete
|
|
4541
|
+
* @param user - Optional user ID to filter by
|
|
4542
|
+
* @returns A promise that resolves to the result of the deletion operation
|
|
4543
|
+
*/
|
|
4544
|
+
async function deleteFiles(file_ids, user) {
|
|
4545
|
+
const File = mongoose.models.File;
|
|
4546
|
+
let deleteQuery = { file_id: { $in: file_ids } };
|
|
4547
|
+
if (user) {
|
|
4548
|
+
deleteQuery = { user: user };
|
|
4549
|
+
}
|
|
4550
|
+
return File.deleteMany(deleteQuery);
|
|
4551
|
+
}
|
|
4552
|
+
/**
|
|
4553
|
+
* Batch updates files with new signed URLs in MongoDB
|
|
4554
|
+
* @param updates - Array of updates in the format { file_id, filepath }
|
|
4555
|
+
*/
|
|
4556
|
+
async function batchUpdateFiles(updates) {
|
|
4557
|
+
if (!updates || updates.length === 0) {
|
|
4558
|
+
return;
|
|
4559
|
+
}
|
|
4560
|
+
const File = mongoose.models.File;
|
|
4561
|
+
const bulkOperations = updates.map((update) => ({
|
|
4562
|
+
updateOne: {
|
|
4563
|
+
filter: { file_id: update.file_id },
|
|
4564
|
+
update: { $set: { filepath: update.filepath } },
|
|
4565
|
+
},
|
|
4566
|
+
}));
|
|
4567
|
+
const result = await File.bulkWrite(bulkOperations);
|
|
4568
|
+
logger$1.info(`Updated ${result.modifiedCount} files with new S3 URLs`);
|
|
4569
|
+
}
|
|
4570
|
+
/**
|
|
4571
|
+
* Updates usage tracking for multiple files.
|
|
4572
|
+
* Processes files and optional fileIds, updating their usage count in the database.
|
|
4573
|
+
*
|
|
4574
|
+
* @param files - Array of file objects to process
|
|
4575
|
+
* @param fileIds - Optional array of file IDs to process
|
|
4576
|
+
* @returns Array of updated file documents (with null results filtered out)
|
|
4577
|
+
*/
|
|
4578
|
+
async function updateFilesUsage(files, fileIds) {
|
|
4579
|
+
const promises = [];
|
|
4580
|
+
const seen = new Set();
|
|
4581
|
+
for (const file of files) {
|
|
4582
|
+
const { file_id } = file;
|
|
4583
|
+
if (seen.has(file_id)) {
|
|
4584
|
+
continue;
|
|
4585
|
+
}
|
|
4586
|
+
seen.add(file_id);
|
|
4587
|
+
promises.push(updateFileUsage({ file_id }));
|
|
4588
|
+
}
|
|
4589
|
+
if (!fileIds) {
|
|
4590
|
+
const results = await Promise.all(promises);
|
|
4591
|
+
return results.filter((result) => result != null);
|
|
4592
|
+
}
|
|
4593
|
+
for (const file_id of fileIds) {
|
|
4594
|
+
if (seen.has(file_id)) {
|
|
4595
|
+
continue;
|
|
4596
|
+
}
|
|
4597
|
+
seen.add(file_id);
|
|
4598
|
+
promises.push(updateFileUsage({ file_id }));
|
|
4599
|
+
}
|
|
4600
|
+
const results = await Promise.all(promises);
|
|
4601
|
+
return results.filter((result) => result != null);
|
|
4602
|
+
}
|
|
4603
|
+
return {
|
|
4604
|
+
findFileById,
|
|
4605
|
+
getFiles,
|
|
4606
|
+
getToolFilesByIds,
|
|
4607
|
+
createFile,
|
|
4608
|
+
updateFile,
|
|
4609
|
+
updateFileUsage,
|
|
4610
|
+
deleteFile,
|
|
4611
|
+
deleteFiles,
|
|
4612
|
+
deleteFileByFilter,
|
|
4613
|
+
batchUpdateFiles,
|
|
4614
|
+
updateFilesUsage,
|
|
4615
|
+
};
|
|
4616
|
+
}
|
|
4617
|
+
|
|
3940
4618
|
/**
|
|
3941
4619
|
* Formats a date in YYYY-MM-DD format
|
|
3942
4620
|
*/
|
|
@@ -4284,6 +4962,258 @@ function createAgentCategoryMethods(mongoose) {
|
|
|
4284
4962
|
};
|
|
4285
4963
|
}
|
|
4286
4964
|
|
|
4965
|
+
const NORMALIZED_LIMIT_DEFAULT = 20;
|
|
4966
|
+
const MAX_CREATE_RETRIES = 5;
|
|
4967
|
+
const RETRY_BASE_DELAY_MS = 25;
|
|
4968
|
+
/**
|
|
4969
|
+
* Helper to check if an error is a MongoDB duplicate key error.
|
|
4970
|
+
* Since serverName is the only unique index on MCPServer, any E11000 error
|
|
4971
|
+
* during creation is necessarily a serverName collision.
|
|
4972
|
+
*/
|
|
4973
|
+
function isDuplicateKeyError(error) {
|
|
4974
|
+
if (error && typeof error === 'object' && 'code' in error) {
|
|
4975
|
+
const mongoError = error;
|
|
4976
|
+
return mongoError.code === 11000;
|
|
4977
|
+
}
|
|
4978
|
+
return false;
|
|
4979
|
+
}
|
|
4980
|
+
/**
|
|
4981
|
+
* Escapes special regex characters in a string so they are treated literally.
|
|
4982
|
+
*/
|
|
4983
|
+
function escapeRegex(str) {
|
|
4984
|
+
return str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
4985
|
+
}
|
|
4986
|
+
/**
|
|
4987
|
+
* Generates a URL-friendly server name from a title.
|
|
4988
|
+
* Converts to lowercase, replaces spaces with hyphens, removes special characters.
|
|
4989
|
+
*/
|
|
4990
|
+
function generateServerNameFromTitle(title) {
|
|
4991
|
+
const slug = title
|
|
4992
|
+
.toLowerCase()
|
|
4993
|
+
.trim()
|
|
4994
|
+
.replace(/[^a-z0-9\s-]/g, '') // Remove special chars except spaces and hyphens
|
|
4995
|
+
.replace(/\s+/g, '-') // Replace spaces with hyphens
|
|
4996
|
+
.replace(/-+/g, '-') // Remove consecutive hyphens
|
|
4997
|
+
.replace(/^-|-$/g, ''); // Trim leading/trailing hyphens
|
|
4998
|
+
return slug || 'mcp-server'; // Fallback if empty
|
|
4999
|
+
}
|
|
5000
|
+
function createMCPServerMethods(mongoose) {
|
|
5001
|
+
/**
|
|
5002
|
+
* Finds the next available server name by checking for duplicates.
|
|
5003
|
+
* If baseName exists, returns baseName-2, baseName-3, etc.
|
|
5004
|
+
*/
|
|
5005
|
+
async function findNextAvailableServerName(baseName) {
|
|
5006
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5007
|
+
// Find all servers with matching base name pattern (baseName or baseName-N)
|
|
5008
|
+
const escapedBaseName = escapeRegex(baseName);
|
|
5009
|
+
const existing = await MCPServer.find({
|
|
5010
|
+
serverName: { $regex: `^${escapedBaseName}(-\\d+)?$` },
|
|
5011
|
+
})
|
|
5012
|
+
.select('serverName')
|
|
5013
|
+
.lean();
|
|
5014
|
+
if (existing.length === 0) {
|
|
5015
|
+
return baseName;
|
|
5016
|
+
}
|
|
5017
|
+
// Extract numbers from existing names
|
|
5018
|
+
const numbers = existing.map((s) => {
|
|
5019
|
+
const match = s.serverName.match(/-(\d+)$/);
|
|
5020
|
+
return match ? parseInt(match[1], 10) : 1;
|
|
5021
|
+
});
|
|
5022
|
+
const maxNumber = Math.max(...numbers);
|
|
5023
|
+
return `${baseName}-${maxNumber + 1}`;
|
|
5024
|
+
}
|
|
5025
|
+
/**
|
|
5026
|
+
* Create a new MCP server with retry logic for handling race conditions.
|
|
5027
|
+
* When multiple requests try to create servers with the same title simultaneously,
|
|
5028
|
+
* they may get the same serverName from findNextAvailableServerName() before any
|
|
5029
|
+
* creates the record (TOCTOU race condition). This is handled by retrying with
|
|
5030
|
+
* exponential backoff when a duplicate key error occurs.
|
|
5031
|
+
* @param data - Object containing config (with title, description, url, etc.) and author
|
|
5032
|
+
* @returns The created MCP server document
|
|
5033
|
+
*/
|
|
5034
|
+
async function createMCPServer(data) {
|
|
5035
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5036
|
+
let lastError;
|
|
5037
|
+
for (let attempt = 0; attempt < MAX_CREATE_RETRIES; attempt++) {
|
|
5038
|
+
try {
|
|
5039
|
+
// Generate serverName from title, with fallback to nanoid if no title
|
|
5040
|
+
// Important: regenerate on each attempt to get fresh available name
|
|
5041
|
+
let serverName;
|
|
5042
|
+
if (data.config.title) {
|
|
5043
|
+
const baseSlug = generateServerNameFromTitle(data.config.title);
|
|
5044
|
+
serverName = await findNextAvailableServerName(baseSlug);
|
|
5045
|
+
}
|
|
5046
|
+
else {
|
|
5047
|
+
serverName = `mcp-${nanoid.nanoid(16)}`;
|
|
5048
|
+
}
|
|
5049
|
+
const newServer = await MCPServer.create({
|
|
5050
|
+
serverName,
|
|
5051
|
+
config: data.config,
|
|
5052
|
+
author: data.author,
|
|
5053
|
+
});
|
|
5054
|
+
return newServer.toObject();
|
|
5055
|
+
}
|
|
5056
|
+
catch (error) {
|
|
5057
|
+
lastError = error;
|
|
5058
|
+
// Only retry on duplicate key errors (serverName collision)
|
|
5059
|
+
if (isDuplicateKeyError(error) && attempt < MAX_CREATE_RETRIES - 1) {
|
|
5060
|
+
// Exponential backoff: 10ms, 20ms, 40ms
|
|
5061
|
+
const delay = RETRY_BASE_DELAY_MS * Math.pow(2, attempt);
|
|
5062
|
+
logger$1.debug(`[createMCPServer] Duplicate serverName detected, retrying (attempt ${attempt + 2}/${MAX_CREATE_RETRIES}) after ${delay}ms`);
|
|
5063
|
+
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
5064
|
+
continue;
|
|
5065
|
+
}
|
|
5066
|
+
// Not a duplicate key error or out of retries - throw immediately
|
|
5067
|
+
throw error;
|
|
5068
|
+
}
|
|
5069
|
+
}
|
|
5070
|
+
// Should not reach here, but TypeScript requires a return
|
|
5071
|
+
throw lastError;
|
|
5072
|
+
}
|
|
5073
|
+
/**
|
|
5074
|
+
* Find an MCP server by serverName
|
|
5075
|
+
* @param serverName - The unique server name identifier
|
|
5076
|
+
* @returns The MCP server document or null
|
|
5077
|
+
*/
|
|
5078
|
+
async function findMCPServerByServerName(serverName) {
|
|
5079
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5080
|
+
return await MCPServer.findOne({ serverName }).lean();
|
|
5081
|
+
}
|
|
5082
|
+
/**
|
|
5083
|
+
* Find an MCP server by MongoDB ObjectId
|
|
5084
|
+
* @param _id - The MongoDB ObjectId
|
|
5085
|
+
* @returns The MCP server document or null
|
|
5086
|
+
*/
|
|
5087
|
+
async function findMCPServerByObjectId(_id) {
|
|
5088
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5089
|
+
return await MCPServer.findById(_id).lean();
|
|
5090
|
+
}
|
|
5091
|
+
/**
|
|
5092
|
+
* Find MCP servers by author
|
|
5093
|
+
* @param authorId - The author's ObjectId or string
|
|
5094
|
+
* @returns Array of MCP server documents
|
|
5095
|
+
*/
|
|
5096
|
+
async function findMCPServersByAuthor(authorId) {
|
|
5097
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5098
|
+
return await MCPServer.find({ author: authorId }).sort({ updatedAt: -1 }).lean();
|
|
5099
|
+
}
|
|
5100
|
+
/**
|
|
5101
|
+
* Get a paginated list of MCP servers by IDs with filtering and search
|
|
5102
|
+
* @param ids - Array of ObjectIds to include
|
|
5103
|
+
* @param otherParams - Additional filter parameters (e.g., search)
|
|
5104
|
+
* @param limit - Page size limit (null for no pagination)
|
|
5105
|
+
* @param after - Cursor for pagination
|
|
5106
|
+
* @returns Paginated list of MCP servers
|
|
5107
|
+
*/
|
|
5108
|
+
async function getListMCPServersByIds({ ids = [], otherParams = {}, limit = null, after = null, }) {
|
|
5109
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5110
|
+
const isPaginated = limit !== null && limit !== undefined;
|
|
5111
|
+
const normalizedLimit = isPaginated
|
|
5112
|
+
? Math.min(Math.max(1, parseInt(String(limit)) || NORMALIZED_LIMIT_DEFAULT), 100)
|
|
5113
|
+
: null;
|
|
5114
|
+
// Build base query combining accessible servers with other filters
|
|
5115
|
+
const baseQuery = { ...otherParams, _id: { $in: ids } };
|
|
5116
|
+
// Add cursor condition
|
|
5117
|
+
if (after) {
|
|
5118
|
+
try {
|
|
5119
|
+
const cursor = JSON.parse(Buffer.from(after, 'base64').toString('utf8'));
|
|
5120
|
+
const { updatedAt, _id } = cursor;
|
|
5121
|
+
const cursorCondition = {
|
|
5122
|
+
$or: [
|
|
5123
|
+
{ updatedAt: { $lt: new Date(updatedAt) } },
|
|
5124
|
+
{ updatedAt: new Date(updatedAt), _id: { $gt: new mongoose.Types.ObjectId(_id) } },
|
|
5125
|
+
],
|
|
5126
|
+
};
|
|
5127
|
+
// Merge cursor condition with base query
|
|
5128
|
+
if (Object.keys(baseQuery).length > 0) {
|
|
5129
|
+
baseQuery.$and = [{ ...baseQuery }, cursorCondition];
|
|
5130
|
+
// Remove the original conditions from baseQuery to avoid duplication
|
|
5131
|
+
Object.keys(baseQuery).forEach((key) => {
|
|
5132
|
+
if (key !== '$and') {
|
|
5133
|
+
delete baseQuery[key];
|
|
5134
|
+
}
|
|
5135
|
+
});
|
|
5136
|
+
}
|
|
5137
|
+
}
|
|
5138
|
+
catch (error) {
|
|
5139
|
+
// Invalid cursor, ignore
|
|
5140
|
+
logger$1.warn('[getListMCPServersByIds] Invalid cursor provided', error);
|
|
5141
|
+
}
|
|
5142
|
+
}
|
|
5143
|
+
if (normalizedLimit === null) {
|
|
5144
|
+
// No pagination - return all matching servers
|
|
5145
|
+
const servers = await MCPServer.find(baseQuery).sort({ updatedAt: -1, _id: 1 }).lean();
|
|
5146
|
+
return {
|
|
5147
|
+
data: servers,
|
|
5148
|
+
has_more: false,
|
|
5149
|
+
after: null,
|
|
5150
|
+
};
|
|
5151
|
+
}
|
|
5152
|
+
// Paginated query - assign to const to help TypeScript
|
|
5153
|
+
const servers = await MCPServer.find(baseQuery)
|
|
5154
|
+
.sort({ updatedAt: -1, _id: 1 })
|
|
5155
|
+
.limit(normalizedLimit + 1)
|
|
5156
|
+
.lean();
|
|
5157
|
+
const hasMore = servers.length > normalizedLimit;
|
|
5158
|
+
const data = hasMore ? servers.slice(0, normalizedLimit) : servers;
|
|
5159
|
+
let nextCursor = null;
|
|
5160
|
+
if (hasMore && data.length > 0) {
|
|
5161
|
+
const lastItem = data[data.length - 1];
|
|
5162
|
+
nextCursor = Buffer.from(JSON.stringify({
|
|
5163
|
+
updatedAt: lastItem.updatedAt,
|
|
5164
|
+
_id: lastItem._id,
|
|
5165
|
+
})).toString('base64');
|
|
5166
|
+
}
|
|
5167
|
+
return {
|
|
5168
|
+
data,
|
|
5169
|
+
has_more: hasMore,
|
|
5170
|
+
after: nextCursor,
|
|
5171
|
+
};
|
|
5172
|
+
}
|
|
5173
|
+
/**
|
|
5174
|
+
* Update an MCP server
|
|
5175
|
+
* @param serverName - The MCP server ID
|
|
5176
|
+
* @param updateData - Object containing config to update
|
|
5177
|
+
* @returns The updated MCP server document or null
|
|
5178
|
+
*/
|
|
5179
|
+
async function updateMCPServer(serverName, updateData) {
|
|
5180
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5181
|
+
return await MCPServer.findOneAndUpdate({ serverName }, { $set: updateData }, { new: true, runValidators: true }).lean();
|
|
5182
|
+
}
|
|
5183
|
+
/**
|
|
5184
|
+
* Delete an MCP server
|
|
5185
|
+
* @param serverName - The MCP server ID
|
|
5186
|
+
* @returns The deleted MCP server document or null
|
|
5187
|
+
*/
|
|
5188
|
+
async function deleteMCPServer(serverName) {
|
|
5189
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5190
|
+
return await MCPServer.findOneAndDelete({ serverName }).lean();
|
|
5191
|
+
}
|
|
5192
|
+
/**
|
|
5193
|
+
* Get MCP servers by their serverName strings
|
|
5194
|
+
* @param names - Array of serverName strings to fetch
|
|
5195
|
+
* @returns Object containing array of MCP server documents
|
|
5196
|
+
*/
|
|
5197
|
+
async function getListMCPServersByNames({ names = [] }) {
|
|
5198
|
+
if (names.length === 0) {
|
|
5199
|
+
return { data: [] };
|
|
5200
|
+
}
|
|
5201
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5202
|
+
const servers = await MCPServer.find({ serverName: { $in: names } }).lean();
|
|
5203
|
+
return { data: servers };
|
|
5204
|
+
}
|
|
5205
|
+
return {
|
|
5206
|
+
createMCPServer,
|
|
5207
|
+
findMCPServerByServerName,
|
|
5208
|
+
findMCPServerByObjectId,
|
|
5209
|
+
findMCPServersByAuthor,
|
|
5210
|
+
getListMCPServersByIds,
|
|
5211
|
+
getListMCPServersByNames,
|
|
5212
|
+
updateMCPServer,
|
|
5213
|
+
deleteMCPServer,
|
|
5214
|
+
};
|
|
5215
|
+
}
|
|
5216
|
+
|
|
4287
5217
|
// Factory function that takes mongoose instance and returns the methods
|
|
4288
5218
|
function createPluginAuthMethods(mongoose) {
|
|
4289
5219
|
/**
|
|
@@ -4511,6 +5441,27 @@ function createAccessRoleMethods(mongoose) {
|
|
|
4511
5441
|
resourceType: librechatDataProvider.ResourceType.PROMPTGROUP,
|
|
4512
5442
|
permBits: exports.RoleBits.OWNER,
|
|
4513
5443
|
},
|
|
5444
|
+
{
|
|
5445
|
+
accessRoleId: librechatDataProvider.AccessRoleIds.MCPSERVER_VIEWER,
|
|
5446
|
+
name: 'com_ui_mcp_server_role_viewer',
|
|
5447
|
+
description: 'com_ui_mcp_server_role_viewer_desc',
|
|
5448
|
+
resourceType: librechatDataProvider.ResourceType.MCPSERVER,
|
|
5449
|
+
permBits: exports.RoleBits.VIEWER,
|
|
5450
|
+
},
|
|
5451
|
+
{
|
|
5452
|
+
accessRoleId: librechatDataProvider.AccessRoleIds.MCPSERVER_EDITOR,
|
|
5453
|
+
name: 'com_ui_mcp_server_role_editor',
|
|
5454
|
+
description: 'com_ui_mcp_server_role_editor_desc',
|
|
5455
|
+
resourceType: librechatDataProvider.ResourceType.MCPSERVER,
|
|
5456
|
+
permBits: exports.RoleBits.EDITOR,
|
|
5457
|
+
},
|
|
5458
|
+
{
|
|
5459
|
+
accessRoleId: librechatDataProvider.AccessRoleIds.MCPSERVER_OWNER,
|
|
5460
|
+
name: 'com_ui_mcp_server_role_owner',
|
|
5461
|
+
description: 'com_ui_mcp_server_role_owner_desc',
|
|
5462
|
+
resourceType: librechatDataProvider.ResourceType.MCPSERVER,
|
|
5463
|
+
permBits: exports.RoleBits.OWNER,
|
|
5464
|
+
},
|
|
4514
5465
|
];
|
|
4515
5466
|
const result = {};
|
|
4516
5467
|
for (const role of defaultRoles) {
|
|
@@ -5091,6 +6042,49 @@ function createAclEntryMethods(mongoose$1) {
|
|
|
5091
6042
|
}
|
|
5092
6043
|
return effectiveBits;
|
|
5093
6044
|
}
|
|
6045
|
+
/**
|
|
6046
|
+
* Get effective permissions for multiple resources in a single query (BATCH)
|
|
6047
|
+
* Returns a map of resourceId → effectivePermissionBits
|
|
6048
|
+
*
|
|
6049
|
+
* @param principalsList - List of principals (user + groups + public)
|
|
6050
|
+
* @param resourceType - The type of resource ('MCPSERVER', 'AGENT', etc.)
|
|
6051
|
+
* @param resourceIds - Array of resource IDs to check
|
|
6052
|
+
* @returns {Promise<Map<string, number>>} Map of resourceId → permission bits
|
|
6053
|
+
*
|
|
6054
|
+
* @example
|
|
6055
|
+
* const principals = await getUserPrincipals({ userId, role });
|
|
6056
|
+
* const serverIds = [id1, id2, id3];
|
|
6057
|
+
* const permMap = await getEffectivePermissionsForResources(
|
|
6058
|
+
* principals,
|
|
6059
|
+
* ResourceType.MCPSERVER,
|
|
6060
|
+
* serverIds
|
|
6061
|
+
* );
|
|
6062
|
+
* // permMap.get(id1.toString()) → 7 (VIEW|EDIT|DELETE)
|
|
6063
|
+
*/
|
|
6064
|
+
async function getEffectivePermissionsForResources(principalsList, resourceType, resourceIds) {
|
|
6065
|
+
if (!Array.isArray(resourceIds) || resourceIds.length === 0) {
|
|
6066
|
+
return new Map();
|
|
6067
|
+
}
|
|
6068
|
+
const AclEntry = mongoose$1.models.AclEntry;
|
|
6069
|
+
const principalsQuery = principalsList.map((p) => ({
|
|
6070
|
+
principalType: p.principalType,
|
|
6071
|
+
...(p.principalType !== librechatDataProvider.PrincipalType.PUBLIC && { principalId: p.principalId }),
|
|
6072
|
+
}));
|
|
6073
|
+
// Batch query for all resources at once
|
|
6074
|
+
const aclEntries = await AclEntry.find({
|
|
6075
|
+
$or: principalsQuery,
|
|
6076
|
+
resourceType,
|
|
6077
|
+
resourceId: { $in: resourceIds },
|
|
6078
|
+
}).lean();
|
|
6079
|
+
// Compute effective permissions per resource
|
|
6080
|
+
const permissionsMap = new Map();
|
|
6081
|
+
for (const entry of aclEntries) {
|
|
6082
|
+
const rid = entry.resourceId.toString();
|
|
6083
|
+
const currentBits = permissionsMap.get(rid) || 0;
|
|
6084
|
+
permissionsMap.set(rid, currentBits | entry.permBits);
|
|
6085
|
+
}
|
|
6086
|
+
return permissionsMap;
|
|
6087
|
+
}
|
|
5094
6088
|
/**
|
|
5095
6089
|
* Grant permission to a principal for a resource
|
|
5096
6090
|
* @param principalType - The type of principal ('user', 'group', 'public')
|
|
@@ -5231,6 +6225,7 @@ function createAclEntryMethods(mongoose$1) {
|
|
|
5231
6225
|
findEntriesByPrincipalsAndResource,
|
|
5232
6226
|
hasPermission,
|
|
5233
6227
|
getEffectivePermissions,
|
|
6228
|
+
getEffectivePermissionsForResources,
|
|
5234
6229
|
grantPermission,
|
|
5235
6230
|
revokePermission,
|
|
5236
6231
|
modifyPermissionBits,
|
|
@@ -5706,6 +6701,7 @@ function createShareMethods(mongoose) {
|
|
|
5706
6701
|
|
|
5707
6702
|
/**
|
|
5708
6703
|
* Creates all database methods for all collections
|
|
6704
|
+
* @param mongoose - Mongoose instance
|
|
5709
6705
|
*/
|
|
5710
6706
|
function createMethods(mongoose) {
|
|
5711
6707
|
return {
|
|
@@ -5713,8 +6709,11 @@ function createMethods(mongoose) {
|
|
|
5713
6709
|
...createSessionMethods(mongoose),
|
|
5714
6710
|
...createTokenMethods(mongoose),
|
|
5715
6711
|
...createRoleMethods(mongoose),
|
|
6712
|
+
...createKeyMethods(mongoose),
|
|
6713
|
+
...createFileMethods(mongoose),
|
|
5716
6714
|
...createMemoryMethods(mongoose),
|
|
5717
6715
|
...createAgentCategoryMethods(mongoose),
|
|
6716
|
+
...createMCPServerMethods(mongoose),
|
|
5718
6717
|
...createAccessRoleMethods(mongoose),
|
|
5719
6718
|
...createUserGroupMethods(mongoose),
|
|
5720
6719
|
...createAclEntryMethods(mongoose),
|
|
@@ -5724,6 +6723,8 @@ function createMethods(mongoose) {
|
|
|
5724
6723
|
}
|
|
5725
6724
|
|
|
5726
6725
|
exports.AppService = AppService;
|
|
6726
|
+
exports.DEFAULT_REFRESH_TOKEN_EXPIRY = DEFAULT_REFRESH_TOKEN_EXPIRY;
|
|
6727
|
+
exports.DEFAULT_SESSION_EXPIRY = DEFAULT_SESSION_EXPIRY;
|
|
5727
6728
|
exports.actionSchema = Action;
|
|
5728
6729
|
exports.agentCategorySchema = agentCategorySchema;
|
|
5729
6730
|
exports.agentSchema = agentSchema;
|
|
@@ -5736,10 +6737,19 @@ exports.conversationTagSchema = conversationTag;
|
|
|
5736
6737
|
exports.convoSchema = convoSchema;
|
|
5737
6738
|
exports.createMethods = createMethods;
|
|
5738
6739
|
exports.createModels = createModels;
|
|
6740
|
+
exports.decrypt = decrypt;
|
|
6741
|
+
exports.decryptV2 = decryptV2;
|
|
6742
|
+
exports.decryptV3 = decryptV3;
|
|
6743
|
+
exports.defaultVertexModels = defaultVertexModels;
|
|
6744
|
+
exports.encrypt = encrypt;
|
|
6745
|
+
exports.encryptV2 = encryptV2;
|
|
6746
|
+
exports.encryptV3 = encryptV3;
|
|
5739
6747
|
exports.fileSchema = file;
|
|
6748
|
+
exports.getRandomValues = getRandomValues;
|
|
5740
6749
|
exports.getTransactionSupport = getTransactionSupport;
|
|
5741
6750
|
exports.getWebSearchKeys = getWebSearchKeys;
|
|
5742
6751
|
exports.groupSchema = groupSchema;
|
|
6752
|
+
exports.hashBackupCode = hashBackupCode;
|
|
5743
6753
|
exports.hashToken = hashToken;
|
|
5744
6754
|
exports.keySchema = keySchema;
|
|
5745
6755
|
exports.loadDefaultInterface = loadDefaultInterface;
|
|
@@ -5764,6 +6774,8 @@ exports.tokenSchema = tokenSchema;
|
|
|
5764
6774
|
exports.toolCallSchema = toolCallSchema;
|
|
5765
6775
|
exports.transactionSchema = transactionSchema;
|
|
5766
6776
|
exports.userSchema = userSchema;
|
|
6777
|
+
exports.validateVertexConfig = validateVertexConfig;
|
|
6778
|
+
exports.vertexConfigSetup = vertexConfigSetup;
|
|
5767
6779
|
exports.webSearchAuth = webSearchAuth;
|
|
5768
6780
|
exports.webSearchKeys = webSearchKeys;
|
|
5769
6781
|
//# sourceMappingURL=index.cjs.map
|