@librechat/data-schemas 0.0.30 → 0.0.33
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +1127 -82
- package/dist/index.cjs.map +1 -1
- package/dist/index.es.js +1114 -82
- package/dist/index.es.js.map +1 -1
- package/dist/types/app/endpoints.d.ts +0 -1
- package/dist/types/app/index.d.ts +1 -0
- package/dist/types/app/vertex.d.ts +19 -0
- package/dist/types/crypto/index.d.ts +52 -0
- package/dist/types/index.d.ts +1 -1
- package/dist/types/methods/aclEntry.d.ts +4 -0
- package/dist/types/methods/file.d.ts +55 -0
- package/dist/types/methods/file.spec.d.ts +1 -0
- package/dist/types/methods/index.d.ts +9 -4
- package/dist/types/methods/key.d.ts +55 -0
- package/dist/types/methods/mcpServer.d.ts +57 -0
- package/dist/types/methods/mcpServer.spec.d.ts +1 -0
- package/dist/types/methods/session.d.ts +3 -1
- package/dist/types/methods/user.d.ts +4 -1
- package/dist/types/methods/user.methods.spec.d.ts +1 -0
- package/dist/types/models/index.d.ts +1 -0
- package/dist/types/models/mcpServer.d.ts +30 -0
- package/dist/types/models/plugins/mongoMeili.spec.d.ts +1 -0
- package/dist/types/schema/banner.d.ts +1 -0
- package/dist/types/schema/mcpServer.d.ts +37 -0
- package/dist/types/schema/preset.d.ts +0 -1
- package/dist/types/types/agent.d.ts +2 -0
- package/dist/types/types/app.d.ts +8 -5
- package/dist/types/types/banner.d.ts +1 -0
- package/dist/types/types/convo.d.ts +0 -1
- package/dist/types/types/index.d.ts +1 -0
- package/dist/types/types/mcp.d.ts +34 -0
- package/dist/types/types/message.d.ts +1 -0
- package/dist/types/types/session.d.ts +6 -0
- package/dist/types/types/token.d.ts +1 -0
- package/dist/types/types/user.d.ts +5 -0
- package/package.json +2 -2
package/dist/index.es.js
CHANGED
|
@@ -1,10 +1,11 @@
|
|
|
1
|
-
import { EModelEndpoint, agentsEndpointSchema, memorySchema, removeNullishValues, SafeSearchTypes, normalizeEndpointName, defaultAssistantsVersion, Capabilities, assistantEndpointSchema, validateAzureGroups, mapModelToAzureConfig, OCRStrategy, getConfigDefaults, PermissionBits, FileSources, Constants, PermissionTypes, Permissions, SystemRoles, ResourceType, PrincipalType, PrincipalModel, roleDefaults, AccessRoleIds } from 'librechat-data-provider';
|
|
1
|
+
import { EModelEndpoint, agentsEndpointSchema, memorySchema, removeNullishValues, SafeSearchTypes, normalizeEndpointName, defaultAssistantsVersion, Capabilities, assistantEndpointSchema, validateAzureGroups, mapModelToAzureConfig, extractEnvVariable, envVarRegex, OCRStrategy, getConfigDefaults, PermissionBits, FileSources, Constants, PermissionTypes, Permissions, SystemRoles, parseTextParts, ResourceType, PrincipalType, PrincipalModel, roleDefaults, ErrorTypes, EToolResources, FileContext, AccessRoleIds } from 'librechat-data-provider';
|
|
2
2
|
import winston from 'winston';
|
|
3
3
|
import 'winston-daily-rotate-file';
|
|
4
4
|
import { klona } from 'klona';
|
|
5
5
|
import path from 'path';
|
|
6
|
+
import 'dotenv/config';
|
|
6
7
|
import jwt from 'jsonwebtoken';
|
|
7
|
-
import
|
|
8
|
+
import crypto from 'node:crypto';
|
|
8
9
|
import mongoose, { Schema, Types } from 'mongoose';
|
|
9
10
|
import _ from 'lodash';
|
|
10
11
|
import { MeiliSearch } from 'meilisearch';
|
|
@@ -854,6 +855,153 @@ function azureConfigSetup(config) {
|
|
|
854
855
|
};
|
|
855
856
|
}
|
|
856
857
|
|
|
858
|
+
/**
|
|
859
|
+
* Default Vertex AI models available through Google Cloud
|
|
860
|
+
* These are the standard Anthropic model names as served by Vertex AI
|
|
861
|
+
*/
|
|
862
|
+
const defaultVertexModels = [
|
|
863
|
+
'claude-sonnet-4-20250514',
|
|
864
|
+
'claude-3-7-sonnet-20250219',
|
|
865
|
+
'claude-3-5-sonnet-v2@20241022',
|
|
866
|
+
'claude-3-5-sonnet@20240620',
|
|
867
|
+
'claude-3-5-haiku@20241022',
|
|
868
|
+
'claude-3-opus@20240229',
|
|
869
|
+
'claude-3-haiku@20240307',
|
|
870
|
+
];
|
|
871
|
+
/**
|
|
872
|
+
* Processes models configuration and creates deployment name mapping
|
|
873
|
+
* Similar to Azure's model mapping logic
|
|
874
|
+
* @param models - The models configuration (can be array or object)
|
|
875
|
+
* @param defaultDeploymentName - Optional default deployment name
|
|
876
|
+
* @returns Object containing modelNames array and modelDeploymentMap
|
|
877
|
+
*/
|
|
878
|
+
function processVertexModels(models, defaultDeploymentName) {
|
|
879
|
+
const modelNames = [];
|
|
880
|
+
const modelDeploymentMap = {};
|
|
881
|
+
if (!models) {
|
|
882
|
+
// No models specified, use defaults
|
|
883
|
+
for (const model of defaultVertexModels) {
|
|
884
|
+
modelNames.push(model);
|
|
885
|
+
modelDeploymentMap[model] = model; // Default: model name = deployment name
|
|
886
|
+
}
|
|
887
|
+
return { modelNames, modelDeploymentMap };
|
|
888
|
+
}
|
|
889
|
+
if (Array.isArray(models)) {
|
|
890
|
+
// Legacy format: simple array of model names
|
|
891
|
+
for (const modelName of models) {
|
|
892
|
+
modelNames.push(modelName);
|
|
893
|
+
// If a default deployment name is provided, use it for all models
|
|
894
|
+
// Otherwise, model name is the deployment name
|
|
895
|
+
modelDeploymentMap[modelName] = defaultDeploymentName || modelName;
|
|
896
|
+
}
|
|
897
|
+
}
|
|
898
|
+
else {
|
|
899
|
+
// New format: object with model names as keys and config as values
|
|
900
|
+
for (const [modelName, modelConfig] of Object.entries(models)) {
|
|
901
|
+
modelNames.push(modelName);
|
|
902
|
+
if (typeof modelConfig === 'boolean') {
|
|
903
|
+
// Model is set to true/false - use default deployment name or model name
|
|
904
|
+
modelDeploymentMap[modelName] = defaultDeploymentName || modelName;
|
|
905
|
+
}
|
|
906
|
+
else if (modelConfig === null || modelConfig === void 0 ? void 0 : modelConfig.deploymentName) {
|
|
907
|
+
// Model has its own deployment name specified
|
|
908
|
+
modelDeploymentMap[modelName] = modelConfig.deploymentName;
|
|
909
|
+
}
|
|
910
|
+
else {
|
|
911
|
+
// Model is an object but no deployment name - use default or model name
|
|
912
|
+
modelDeploymentMap[modelName] = defaultDeploymentName || modelName;
|
|
913
|
+
}
|
|
914
|
+
}
|
|
915
|
+
}
|
|
916
|
+
return { modelNames, modelDeploymentMap };
|
|
917
|
+
}
|
|
918
|
+
/**
|
|
919
|
+
* Validates and processes Vertex AI configuration
|
|
920
|
+
* @param vertexConfig - The Vertex AI configuration object
|
|
921
|
+
* @returns Validated configuration with errors if any
|
|
922
|
+
*/
|
|
923
|
+
function validateVertexConfig(vertexConfig) {
|
|
924
|
+
if (!vertexConfig) {
|
|
925
|
+
return null;
|
|
926
|
+
}
|
|
927
|
+
const errors = [];
|
|
928
|
+
// Extract and validate environment variables
|
|
929
|
+
// projectId is optional - will be auto-detected from service key if not provided
|
|
930
|
+
const projectId = vertexConfig.projectId ? extractEnvVariable(vertexConfig.projectId) : undefined;
|
|
931
|
+
const region = extractEnvVariable(vertexConfig.region || 'us-east5');
|
|
932
|
+
const serviceKeyFile = vertexConfig.serviceKeyFile
|
|
933
|
+
? extractEnvVariable(vertexConfig.serviceKeyFile)
|
|
934
|
+
: undefined;
|
|
935
|
+
const defaultDeploymentName = vertexConfig.deploymentName
|
|
936
|
+
? extractEnvVariable(vertexConfig.deploymentName)
|
|
937
|
+
: undefined;
|
|
938
|
+
// Check for unresolved environment variables
|
|
939
|
+
if (projectId && envVarRegex.test(projectId)) {
|
|
940
|
+
errors.push(`Vertex AI projectId environment variable "${vertexConfig.projectId}" was not found.`);
|
|
941
|
+
}
|
|
942
|
+
if (envVarRegex.test(region)) {
|
|
943
|
+
errors.push(`Vertex AI region environment variable "${vertexConfig.region}" was not found.`);
|
|
944
|
+
}
|
|
945
|
+
if (serviceKeyFile && envVarRegex.test(serviceKeyFile)) {
|
|
946
|
+
errors.push(`Vertex AI serviceKeyFile environment variable "${vertexConfig.serviceKeyFile}" was not found.`);
|
|
947
|
+
}
|
|
948
|
+
if (defaultDeploymentName && envVarRegex.test(defaultDeploymentName)) {
|
|
949
|
+
errors.push(`Vertex AI deploymentName environment variable "${vertexConfig.deploymentName}" was not found.`);
|
|
950
|
+
}
|
|
951
|
+
// Process models and create deployment mapping
|
|
952
|
+
const { modelNames, modelDeploymentMap } = processVertexModels(vertexConfig.models, defaultDeploymentName);
|
|
953
|
+
// Note: projectId is optional - if not provided, it will be auto-detected from the service key file
|
|
954
|
+
const isValid = errors.length === 0;
|
|
955
|
+
return {
|
|
956
|
+
enabled: vertexConfig.enabled !== false,
|
|
957
|
+
projectId,
|
|
958
|
+
region,
|
|
959
|
+
serviceKeyFile,
|
|
960
|
+
deploymentName: defaultDeploymentName,
|
|
961
|
+
models: vertexConfig.models,
|
|
962
|
+
modelNames,
|
|
963
|
+
modelDeploymentMap,
|
|
964
|
+
isValid,
|
|
965
|
+
errors,
|
|
966
|
+
};
|
|
967
|
+
}
|
|
968
|
+
/**
|
|
969
|
+
* Sets up the Vertex AI configuration from the config (`librechat.yaml`) file.
|
|
970
|
+
* Similar to azureConfigSetup, this processes and validates the Vertex AI configuration.
|
|
971
|
+
* @param config - The loaded custom configuration.
|
|
972
|
+
* @returns The validated Vertex AI configuration or null if not configured.
|
|
973
|
+
*/
|
|
974
|
+
function vertexConfigSetup(config) {
|
|
975
|
+
var _a, _b;
|
|
976
|
+
const anthropicConfig = (_a = config.endpoints) === null || _a === void 0 ? void 0 : _a[EModelEndpoint.anthropic];
|
|
977
|
+
if (!(anthropicConfig === null || anthropicConfig === void 0 ? void 0 : anthropicConfig.vertex)) {
|
|
978
|
+
return null;
|
|
979
|
+
}
|
|
980
|
+
const vertexConfig = anthropicConfig.vertex;
|
|
981
|
+
// Skip if explicitly disabled (enabled: false)
|
|
982
|
+
// When vertex config exists, it's enabled by default unless explicitly set to false
|
|
983
|
+
if (vertexConfig.enabled === false) {
|
|
984
|
+
return null;
|
|
985
|
+
}
|
|
986
|
+
const validatedConfig = validateVertexConfig(vertexConfig);
|
|
987
|
+
if (!validatedConfig) {
|
|
988
|
+
return null;
|
|
989
|
+
}
|
|
990
|
+
if (!validatedConfig.isValid) {
|
|
991
|
+
const errorString = validatedConfig.errors.join('\n');
|
|
992
|
+
const errorMessage = 'Invalid Vertex AI configuration:\n' + errorString;
|
|
993
|
+
logger$1.error(errorMessage);
|
|
994
|
+
throw new Error(errorMessage);
|
|
995
|
+
}
|
|
996
|
+
logger$1.info('Vertex AI configuration loaded successfully', {
|
|
997
|
+
projectId: validatedConfig.projectId,
|
|
998
|
+
region: validatedConfig.region,
|
|
999
|
+
modelCount: ((_b = validatedConfig.modelNames) === null || _b === void 0 ? void 0 : _b.length) || 0,
|
|
1000
|
+
models: validatedConfig.modelNames,
|
|
1001
|
+
});
|
|
1002
|
+
return validatedConfig;
|
|
1003
|
+
}
|
|
1004
|
+
|
|
857
1005
|
/**
|
|
858
1006
|
* Loads custom config endpoints
|
|
859
1007
|
* @param [config]
|
|
@@ -876,12 +1024,24 @@ const loadEndpoints = (config, agentsDefaults) => {
|
|
|
876
1024
|
loadedEndpoints[EModelEndpoint.assistants] = assistantsConfigSetup(config, EModelEndpoint.assistants, loadedEndpoints[EModelEndpoint.assistants]);
|
|
877
1025
|
}
|
|
878
1026
|
loadedEndpoints[EModelEndpoint.agents] = agentsConfigSetup(config, agentsDefaults);
|
|
1027
|
+
// Handle Anthropic endpoint with Vertex AI configuration
|
|
1028
|
+
if (endpoints === null || endpoints === void 0 ? void 0 : endpoints[EModelEndpoint.anthropic]) {
|
|
1029
|
+
const anthropicConfig = endpoints[EModelEndpoint.anthropic];
|
|
1030
|
+
const vertexConfig = vertexConfigSetup(config);
|
|
1031
|
+
loadedEndpoints[EModelEndpoint.anthropic] = {
|
|
1032
|
+
...anthropicConfig,
|
|
1033
|
+
// If Vertex AI is enabled, use the visible model names from vertex config
|
|
1034
|
+
// Otherwise, use the models array from anthropic config
|
|
1035
|
+
...((vertexConfig === null || vertexConfig === void 0 ? void 0 : vertexConfig.modelNames) && { models: vertexConfig.modelNames }),
|
|
1036
|
+
// Attach validated Vertex AI config if present
|
|
1037
|
+
...(vertexConfig && { vertexConfig }),
|
|
1038
|
+
};
|
|
1039
|
+
}
|
|
879
1040
|
const endpointKeys = [
|
|
880
1041
|
EModelEndpoint.openAI,
|
|
881
1042
|
EModelEndpoint.google,
|
|
882
1043
|
EModelEndpoint.custom,
|
|
883
1044
|
EModelEndpoint.bedrock,
|
|
884
|
-
EModelEndpoint.anthropic,
|
|
885
1045
|
];
|
|
886
1046
|
endpointKeys.forEach((key) => {
|
|
887
1047
|
const currentKey = key;
|
|
@@ -936,7 +1096,9 @@ const AppService = async (params) => {
|
|
|
936
1096
|
const imageOutputType = (_e = config === null || config === void 0 ? void 0 : config.imageOutputType) !== null && _e !== void 0 ? _e : configDefaults.imageOutputType;
|
|
937
1097
|
process.env.CDN_PROVIDER = fileStrategy;
|
|
938
1098
|
const availableTools = systemTools;
|
|
939
|
-
const
|
|
1099
|
+
const mcpServersConfig = config.mcpServers || null;
|
|
1100
|
+
const mcpSettings = config.mcpSettings || null;
|
|
1101
|
+
const actions = config.actions;
|
|
940
1102
|
const registration = (_f = config.registration) !== null && _f !== void 0 ? _f : configDefaults.registration;
|
|
941
1103
|
const interfaceConfig = await loadDefaultInterface({ config, configDefaults });
|
|
942
1104
|
const turnstileConfig = loadTurnstileConfig(config, configDefaults);
|
|
@@ -948,8 +1110,10 @@ const AppService = async (params) => {
|
|
|
948
1110
|
memory,
|
|
949
1111
|
speech,
|
|
950
1112
|
balance,
|
|
1113
|
+
actions,
|
|
951
1114
|
transactions,
|
|
952
|
-
mcpConfig,
|
|
1115
|
+
mcpConfig: mcpServersConfig,
|
|
1116
|
+
mcpSettings,
|
|
953
1117
|
webSearch,
|
|
954
1118
|
fileStrategy,
|
|
955
1119
|
registration,
|
|
@@ -997,6 +1161,12 @@ var RoleBits;
|
|
|
997
1161
|
RoleBits[RoleBits["OWNER"] = PermissionBits.VIEW | PermissionBits.EDIT | PermissionBits.DELETE | PermissionBits.SHARE] = "OWNER";
|
|
998
1162
|
})(RoleBits || (RoleBits = {}));
|
|
999
1163
|
|
|
1164
|
+
var _a, _b;
|
|
1165
|
+
const { webcrypto } = crypto;
|
|
1166
|
+
/** Use hex decoding for both key and IV for legacy methods */
|
|
1167
|
+
const key = Buffer.from((_a = process.env.CREDS_KEY) !== null && _a !== void 0 ? _a : '', 'hex');
|
|
1168
|
+
const iv = Buffer.from((_b = process.env.CREDS_IV) !== null && _b !== void 0 ? _b : '', 'hex');
|
|
1169
|
+
const algorithm = 'AES-CBC';
|
|
1000
1170
|
async function signPayload({ payload, secret, expirationTime, }) {
|
|
1001
1171
|
return jwt.sign(payload, secret, { expiresIn: expirationTime });
|
|
1002
1172
|
}
|
|
@@ -1005,6 +1175,130 @@ async function hashToken(str) {
|
|
|
1005
1175
|
const hashBuffer = await webcrypto.subtle.digest('SHA-256', data);
|
|
1006
1176
|
return Buffer.from(hashBuffer).toString('hex');
|
|
1007
1177
|
}
|
|
1178
|
+
/** --- Legacy v1/v2 Setup: AES-CBC with fixed key and IV --- */
|
|
1179
|
+
/**
|
|
1180
|
+
* Encrypts a value using AES-CBC
|
|
1181
|
+
* @param value - The plaintext to encrypt
|
|
1182
|
+
* @returns The encrypted string in hex format
|
|
1183
|
+
*/
|
|
1184
|
+
async function encrypt(value) {
|
|
1185
|
+
const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [
|
|
1186
|
+
'encrypt',
|
|
1187
|
+
]);
|
|
1188
|
+
const encoder = new TextEncoder();
|
|
1189
|
+
const data = encoder.encode(value);
|
|
1190
|
+
const encryptedBuffer = await webcrypto.subtle.encrypt({ name: algorithm, iv: iv }, cryptoKey, data);
|
|
1191
|
+
return Buffer.from(encryptedBuffer).toString('hex');
|
|
1192
|
+
}
|
|
1193
|
+
/**
|
|
1194
|
+
* Decrypts an encrypted value using AES-CBC
|
|
1195
|
+
* @param encryptedValue - The encrypted string in hex format
|
|
1196
|
+
* @returns The decrypted plaintext
|
|
1197
|
+
*/
|
|
1198
|
+
async function decrypt(encryptedValue) {
|
|
1199
|
+
const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [
|
|
1200
|
+
'decrypt',
|
|
1201
|
+
]);
|
|
1202
|
+
const encryptedBuffer = Buffer.from(encryptedValue, 'hex');
|
|
1203
|
+
const decryptedBuffer = await webcrypto.subtle.decrypt({ name: algorithm, iv: iv }, cryptoKey, encryptedBuffer);
|
|
1204
|
+
const decoder = new TextDecoder();
|
|
1205
|
+
return decoder.decode(decryptedBuffer);
|
|
1206
|
+
}
|
|
1207
|
+
/** --- v2: AES-CBC with a random IV per encryption --- */
|
|
1208
|
+
/**
|
|
1209
|
+
* Encrypts a value using AES-CBC with a random IV per encryption
|
|
1210
|
+
* @param value - The plaintext to encrypt
|
|
1211
|
+
* @returns The encrypted string with IV prepended (iv:ciphertext format)
|
|
1212
|
+
*/
|
|
1213
|
+
async function encryptV2(value) {
|
|
1214
|
+
const gen_iv = webcrypto.getRandomValues(new Uint8Array(16));
|
|
1215
|
+
const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [
|
|
1216
|
+
'encrypt',
|
|
1217
|
+
]);
|
|
1218
|
+
const encoder = new TextEncoder();
|
|
1219
|
+
const data = encoder.encode(value);
|
|
1220
|
+
const encryptedBuffer = await webcrypto.subtle.encrypt({ name: algorithm, iv: gen_iv }, cryptoKey, data);
|
|
1221
|
+
return Buffer.from(gen_iv).toString('hex') + ':' + Buffer.from(encryptedBuffer).toString('hex');
|
|
1222
|
+
}
|
|
1223
|
+
/**
|
|
1224
|
+
* Decrypts an encrypted value using AES-CBC with random IV
|
|
1225
|
+
* @param encryptedValue - The encrypted string in iv:ciphertext format
|
|
1226
|
+
* @returns The decrypted plaintext
|
|
1227
|
+
*/
|
|
1228
|
+
async function decryptV2(encryptedValue) {
|
|
1229
|
+
var _a;
|
|
1230
|
+
const parts = encryptedValue.split(':');
|
|
1231
|
+
if (parts.length === 1) {
|
|
1232
|
+
return parts[0];
|
|
1233
|
+
}
|
|
1234
|
+
const gen_iv = Buffer.from((_a = parts.shift()) !== null && _a !== void 0 ? _a : '', 'hex');
|
|
1235
|
+
const encrypted = parts.join(':');
|
|
1236
|
+
const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [
|
|
1237
|
+
'decrypt',
|
|
1238
|
+
]);
|
|
1239
|
+
const encryptedBuffer = Buffer.from(encrypted, 'hex');
|
|
1240
|
+
const decryptedBuffer = await webcrypto.subtle.decrypt({ name: algorithm, iv: gen_iv }, cryptoKey, encryptedBuffer);
|
|
1241
|
+
const decoder = new TextDecoder();
|
|
1242
|
+
return decoder.decode(decryptedBuffer);
|
|
1243
|
+
}
|
|
1244
|
+
/** --- v3: AES-256-CTR using Node's crypto functions --- */
|
|
1245
|
+
const algorithm_v3 = 'aes-256-ctr';
|
|
1246
|
+
/**
|
|
1247
|
+
* Encrypts a value using AES-256-CTR.
|
|
1248
|
+
* Note: AES-256 requires a 32-byte key. Ensure that process.env.CREDS_KEY is a 64-character hex string.
|
|
1249
|
+
* @param value - The plaintext to encrypt.
|
|
1250
|
+
* @returns The encrypted string with a "v3:" prefix.
|
|
1251
|
+
*/
|
|
1252
|
+
function encryptV3(value) {
|
|
1253
|
+
if (key.length !== 32) {
|
|
1254
|
+
throw new Error(`Invalid key length: expected 32 bytes, got ${key.length} bytes`);
|
|
1255
|
+
}
|
|
1256
|
+
const iv_v3 = crypto.randomBytes(16);
|
|
1257
|
+
const cipher = crypto.createCipheriv(algorithm_v3, key, iv_v3);
|
|
1258
|
+
const encrypted = Buffer.concat([cipher.update(value, 'utf8'), cipher.final()]);
|
|
1259
|
+
return `v3:${iv_v3.toString('hex')}:${encrypted.toString('hex')}`;
|
|
1260
|
+
}
|
|
1261
|
+
/**
|
|
1262
|
+
* Decrypts an encrypted value using AES-256-CTR.
|
|
1263
|
+
* @param encryptedValue - The encrypted string with "v3:" prefix.
|
|
1264
|
+
* @returns The decrypted plaintext.
|
|
1265
|
+
*/
|
|
1266
|
+
function decryptV3(encryptedValue) {
|
|
1267
|
+
const parts = encryptedValue.split(':');
|
|
1268
|
+
if (parts[0] !== 'v3') {
|
|
1269
|
+
throw new Error('Not a v3 encrypted value');
|
|
1270
|
+
}
|
|
1271
|
+
const iv_v3 = Buffer.from(parts[1], 'hex');
|
|
1272
|
+
const encryptedText = Buffer.from(parts.slice(2).join(':'), 'hex');
|
|
1273
|
+
const decipher = crypto.createDecipheriv(algorithm_v3, key, iv_v3);
|
|
1274
|
+
const decrypted = Buffer.concat([decipher.update(encryptedText), decipher.final()]);
|
|
1275
|
+
return decrypted.toString('utf8');
|
|
1276
|
+
}
|
|
1277
|
+
/**
|
|
1278
|
+
* Generates random values as a hex string
|
|
1279
|
+
* @param length - The number of random bytes to generate
|
|
1280
|
+
* @returns The random values as a hex string
|
|
1281
|
+
*/
|
|
1282
|
+
async function getRandomValues(length) {
|
|
1283
|
+
if (!Number.isInteger(length) || length <= 0) {
|
|
1284
|
+
throw new Error('Length must be a positive integer');
|
|
1285
|
+
}
|
|
1286
|
+
const randomValues = new Uint8Array(length);
|
|
1287
|
+
webcrypto.getRandomValues(randomValues);
|
|
1288
|
+
return Buffer.from(randomValues).toString('hex');
|
|
1289
|
+
}
|
|
1290
|
+
/**
|
|
1291
|
+
* Computes SHA-256 hash for the given input.
|
|
1292
|
+
* @param input - The input to hash.
|
|
1293
|
+
* @returns The SHA-256 hash of the input.
|
|
1294
|
+
*/
|
|
1295
|
+
async function hashBackupCode(input) {
|
|
1296
|
+
const encoder = new TextEncoder();
|
|
1297
|
+
const data = encoder.encode(input);
|
|
1298
|
+
const hashBuffer = await webcrypto.subtle.digest('SHA-256', data);
|
|
1299
|
+
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
|
1300
|
+
return hashArray.map((b) => b.toString(16).padStart(2, '0')).join('');
|
|
1301
|
+
}
|
|
1008
1302
|
|
|
1009
1303
|
// Define the Auth sub-schema with type-safety.
|
|
1010
1304
|
const AuthSchema = new Schema({
|
|
@@ -1160,10 +1454,17 @@ const agentSchema = new Schema({
|
|
|
1160
1454
|
default: false,
|
|
1161
1455
|
index: true,
|
|
1162
1456
|
},
|
|
1457
|
+
/** MCP server names extracted from tools for efficient querying */
|
|
1458
|
+
mcpServerNames: {
|
|
1459
|
+
type: [String],
|
|
1460
|
+
default: [],
|
|
1461
|
+
index: true,
|
|
1462
|
+
},
|
|
1163
1463
|
}, {
|
|
1164
1464
|
timestamps: true,
|
|
1165
1465
|
});
|
|
1166
1466
|
agentSchema.index({ updatedAt: -1, _id: 1 });
|
|
1467
|
+
agentSchema.index({ 'edges.to': 1 });
|
|
1167
1468
|
|
|
1168
1469
|
const agentCategorySchema = new Schema({
|
|
1169
1470
|
value: {
|
|
@@ -1299,6 +1600,10 @@ const bannerSchema = new Schema({
|
|
|
1299
1600
|
type: Boolean,
|
|
1300
1601
|
default: false,
|
|
1301
1602
|
},
|
|
1603
|
+
persistable: {
|
|
1604
|
+
type: Boolean,
|
|
1605
|
+
default: false,
|
|
1606
|
+
},
|
|
1302
1607
|
}, { timestamps: true });
|
|
1303
1608
|
|
|
1304
1609
|
const categoriesSchema = new Schema({
|
|
@@ -1342,7 +1647,6 @@ conversationTag.index({ tag: 1, user: 1 }, { unique: true });
|
|
|
1342
1647
|
|
|
1343
1648
|
// @ts-ignore
|
|
1344
1649
|
const conversationPreset = {
|
|
1345
|
-
// endpoint: [azureOpenAI, openAI, anthropic, chatGPTBrowser]
|
|
1346
1650
|
endpoint: {
|
|
1347
1651
|
type: String,
|
|
1348
1652
|
default: null,
|
|
@@ -1351,7 +1655,7 @@ const conversationPreset = {
|
|
|
1351
1655
|
endpointType: {
|
|
1352
1656
|
type: String,
|
|
1353
1657
|
},
|
|
1354
|
-
// for azureOpenAI, openAI
|
|
1658
|
+
// for azureOpenAI, openAI only
|
|
1355
1659
|
model: {
|
|
1356
1660
|
type: String,
|
|
1357
1661
|
required: false,
|
|
@@ -1516,9 +1820,6 @@ const convoSchema = new Schema({
|
|
|
1516
1820
|
meiliIndex: true,
|
|
1517
1821
|
},
|
|
1518
1822
|
messages: [{ type: Schema.Types.ObjectId, ref: 'Message' }],
|
|
1519
|
-
agentOptions: {
|
|
1520
|
-
type: Schema.Types.Mixed,
|
|
1521
|
-
},
|
|
1522
1823
|
...conversationPreset,
|
|
1523
1824
|
agent_id: {
|
|
1524
1825
|
type: String,
|
|
@@ -1538,6 +1839,8 @@ const convoSchema = new Schema({
|
|
|
1538
1839
|
convoSchema.index({ expiredAt: 1 }, { expireAfterSeconds: 0 });
|
|
1539
1840
|
convoSchema.index({ createdAt: 1, updatedAt: 1 });
|
|
1540
1841
|
convoSchema.index({ conversationId: 1, user: 1 }, { unique: true });
|
|
1842
|
+
// index for MeiliSearch sync operations
|
|
1843
|
+
convoSchema.index({ _meiliIndex: 1, expiredAt: 1 });
|
|
1541
1844
|
|
|
1542
1845
|
const file = new Schema({
|
|
1543
1846
|
user: {
|
|
@@ -1734,25 +2037,6 @@ const messageSchema = new Schema({
|
|
|
1734
2037
|
default: false,
|
|
1735
2038
|
},
|
|
1736
2039
|
files: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
|
|
1737
|
-
plugin: {
|
|
1738
|
-
type: {
|
|
1739
|
-
latest: {
|
|
1740
|
-
type: String,
|
|
1741
|
-
required: false,
|
|
1742
|
-
},
|
|
1743
|
-
inputs: {
|
|
1744
|
-
type: [mongoose.Schema.Types.Mixed],
|
|
1745
|
-
required: false,
|
|
1746
|
-
default: undefined,
|
|
1747
|
-
},
|
|
1748
|
-
outputs: {
|
|
1749
|
-
type: String,
|
|
1750
|
-
required: false,
|
|
1751
|
-
},
|
|
1752
|
-
},
|
|
1753
|
-
default: undefined,
|
|
1754
|
-
},
|
|
1755
|
-
plugins: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
|
|
1756
2040
|
content: {
|
|
1757
2041
|
type: [{ type: mongoose.Schema.Types.Mixed }],
|
|
1758
2042
|
default: undefined,
|
|
@@ -1792,10 +2076,16 @@ const messageSchema = new Schema({
|
|
|
1792
2076
|
expiredAt: {
|
|
1793
2077
|
type: Date,
|
|
1794
2078
|
},
|
|
2079
|
+
addedConvo: {
|
|
2080
|
+
type: Boolean,
|
|
2081
|
+
default: undefined,
|
|
2082
|
+
},
|
|
1795
2083
|
}, { timestamps: true });
|
|
1796
2084
|
messageSchema.index({ expiredAt: 1 }, { expireAfterSeconds: 0 });
|
|
1797
2085
|
messageSchema.index({ createdAt: 1 });
|
|
1798
2086
|
messageSchema.index({ messageId: 1, user: 1 }, { unique: true });
|
|
2087
|
+
// index for MeiliSearch sync operations
|
|
2088
|
+
messageSchema.index({ _meiliIndex: 1, expiredAt: 1 });
|
|
1799
2089
|
|
|
1800
2090
|
const pluginAuthSchema = new Schema({
|
|
1801
2091
|
authField: {
|
|
@@ -1838,10 +2128,6 @@ const presetSchema = new Schema({
|
|
|
1838
2128
|
type: Number,
|
|
1839
2129
|
},
|
|
1840
2130
|
...conversationPreset,
|
|
1841
|
-
agentOptions: {
|
|
1842
|
-
type: mongoose.Schema.Types.Mixed,
|
|
1843
|
-
default: null,
|
|
1844
|
-
},
|
|
1845
2131
|
}, { timestamps: true });
|
|
1846
2132
|
|
|
1847
2133
|
const projectSchema = new Schema({
|
|
@@ -2000,6 +2286,11 @@ const rolePermissionsSchema = new Schema({
|
|
|
2000
2286
|
[PermissionTypes.FILE_CITATIONS]: {
|
|
2001
2287
|
[Permissions.USE]: { type: Boolean },
|
|
2002
2288
|
},
|
|
2289
|
+
[PermissionTypes.MCP_SERVERS]: {
|
|
2290
|
+
[Permissions.USE]: { type: Boolean },
|
|
2291
|
+
[Permissions.CREATE]: { type: Boolean },
|
|
2292
|
+
[Permissions.SHARE]: { type: Boolean },
|
|
2293
|
+
},
|
|
2003
2294
|
}, { _id: false });
|
|
2004
2295
|
const roleSchema = new Schema({
|
|
2005
2296
|
name: { type: String, required: true, unique: true, index: true },
|
|
@@ -2143,6 +2434,7 @@ const transactionSchema = new Schema({
|
|
|
2143
2434
|
},
|
|
2144
2435
|
model: {
|
|
2145
2436
|
type: String,
|
|
2437
|
+
index: true,
|
|
2146
2438
|
},
|
|
2147
2439
|
context: {
|
|
2148
2440
|
type: String,
|
|
@@ -2290,6 +2582,17 @@ const userSchema = new Schema({
|
|
|
2290
2582
|
},
|
|
2291
2583
|
default: {},
|
|
2292
2584
|
},
|
|
2585
|
+
favorites: {
|
|
2586
|
+
type: [
|
|
2587
|
+
{
|
|
2588
|
+
_id: false,
|
|
2589
|
+
agentId: String, // for agent
|
|
2590
|
+
model: String, // for model
|
|
2591
|
+
endpoint: String, // for model
|
|
2592
|
+
},
|
|
2593
|
+
],
|
|
2594
|
+
default: [],
|
|
2595
|
+
},
|
|
2293
2596
|
/** Field for external source identification (for consistency with TPrincipal schema) */
|
|
2294
2597
|
idOnTheSource: {
|
|
2295
2598
|
type: String,
|
|
@@ -2509,26 +2812,6 @@ const getSyncConfig = () => ({
|
|
|
2509
2812
|
batchSize: parseInt(process.env.MEILI_SYNC_BATCH_SIZE || '100', 10),
|
|
2510
2813
|
delayMs: parseInt(process.env.MEILI_SYNC_DELAY_MS || '100', 10),
|
|
2511
2814
|
});
|
|
2512
|
-
/**
|
|
2513
|
-
* Local implementation of parseTextParts to avoid dependency on librechat-data-provider
|
|
2514
|
-
* Extracts text content from an array of content items
|
|
2515
|
-
*/
|
|
2516
|
-
const parseTextParts = (content) => {
|
|
2517
|
-
if (!Array.isArray(content)) {
|
|
2518
|
-
return '';
|
|
2519
|
-
}
|
|
2520
|
-
return content
|
|
2521
|
-
.filter((item) => item.type === 'text' && typeof item.text === 'string')
|
|
2522
|
-
.map((item) => item.text)
|
|
2523
|
-
.join(' ')
|
|
2524
|
-
.trim();
|
|
2525
|
-
};
|
|
2526
|
-
/**
|
|
2527
|
-
* Local implementation to handle Bing convoId conversion
|
|
2528
|
-
*/
|
|
2529
|
-
const cleanUpPrimaryKeyValue = (value) => {
|
|
2530
|
-
return value.replace(/--/g, '|');
|
|
2531
|
-
};
|
|
2532
2815
|
/**
|
|
2533
2816
|
* Validates the required options for configuring the mongoMeili plugin.
|
|
2534
2817
|
*/
|
|
@@ -2590,7 +2873,8 @@ const createMeiliMongooseModel = ({ index, attributesToIndex, syncOptions, }) =>
|
|
|
2590
2873
|
const { batchSize, delayMs } = syncConfig;
|
|
2591
2874
|
logger.info(`[syncWithMeili] Starting sync for ${primaryKey === 'messageId' ? 'messages' : 'conversations'} with batch size ${batchSize}`);
|
|
2592
2875
|
// Build query with resume capability
|
|
2593
|
-
|
|
2876
|
+
// Do not sync TTL documents
|
|
2877
|
+
const query = { expiredAt: null };
|
|
2594
2878
|
if (options === null || options === void 0 ? void 0 : options.resumeFromId) {
|
|
2595
2879
|
query._id = { $gt: options.resumeFromId };
|
|
2596
2880
|
}
|
|
@@ -2718,7 +3002,7 @@ const createMeiliMongooseModel = ({ index, attributesToIndex, syncOptions, }) =>
|
|
|
2718
3002
|
const data = await index.search(q, params);
|
|
2719
3003
|
if (populate) {
|
|
2720
3004
|
const query = {};
|
|
2721
|
-
query[primaryKey] = _.map(data.hits, (hit) =>
|
|
3005
|
+
query[primaryKey] = _.map(data.hits, (hit) => hit[primaryKey]);
|
|
2722
3006
|
const projection = Object.keys(this.schema.obj).reduce((results, key) => {
|
|
2723
3007
|
if (!key.startsWith('$')) {
|
|
2724
3008
|
results[key] = 1;
|
|
@@ -2761,6 +3045,10 @@ const createMeiliMongooseModel = ({ index, attributesToIndex, syncOptions, }) =>
|
|
|
2761
3045
|
* Adds the current document to the MeiliSearch index with retry logic
|
|
2762
3046
|
*/
|
|
2763
3047
|
async addObjectToMeili(next) {
|
|
3048
|
+
// If this conversation or message has a TTL, don't index it
|
|
3049
|
+
if (!_.isNil(this.expiredAt)) {
|
|
3050
|
+
return next();
|
|
3051
|
+
}
|
|
2764
3052
|
const object = this.preprocessObjectForIndex();
|
|
2765
3053
|
const maxRetries = 3;
|
|
2766
3054
|
let retryCount = 0;
|
|
@@ -3075,7 +3363,38 @@ function createAgentModel(mongoose) {
|
|
|
3075
3363
|
* Creates or returns the AgentCategory model using the provided mongoose instance and schema
|
|
3076
3364
|
*/
|
|
3077
3365
|
function createAgentCategoryModel(mongoose) {
|
|
3078
|
-
return mongoose.models.AgentCategory ||
|
|
3366
|
+
return (mongoose.models.AgentCategory ||
|
|
3367
|
+
mongoose.model('AgentCategory', agentCategorySchema));
|
|
3368
|
+
}
|
|
3369
|
+
|
|
3370
|
+
const mcpServerSchema = new Schema({
|
|
3371
|
+
serverName: {
|
|
3372
|
+
type: String,
|
|
3373
|
+
index: true,
|
|
3374
|
+
unique: true,
|
|
3375
|
+
required: true,
|
|
3376
|
+
},
|
|
3377
|
+
config: {
|
|
3378
|
+
type: Schema.Types.Mixed,
|
|
3379
|
+
required: true,
|
|
3380
|
+
// Config contains: title, description, url, oauth, etc.
|
|
3381
|
+
},
|
|
3382
|
+
author: {
|
|
3383
|
+
type: Schema.Types.ObjectId,
|
|
3384
|
+
ref: 'User',
|
|
3385
|
+
required: true,
|
|
3386
|
+
index: true,
|
|
3387
|
+
},
|
|
3388
|
+
}, {
|
|
3389
|
+
timestamps: true,
|
|
3390
|
+
});
|
|
3391
|
+
mcpServerSchema.index({ updatedAt: -1, _id: 1 });
|
|
3392
|
+
|
|
3393
|
+
/**
|
|
3394
|
+
* Creates or returns the MCPServer model using the provided mongoose instance and schema
|
|
3395
|
+
*/
|
|
3396
|
+
function createMCPServerModel(mongoose) {
|
|
3397
|
+
return (mongoose.models.MCPServer || mongoose.model('MCPServer', mcpServerSchema));
|
|
3079
3398
|
}
|
|
3080
3399
|
|
|
3081
3400
|
/**
|
|
@@ -3203,7 +3522,7 @@ const accessRoleSchema = new Schema({
|
|
|
3203
3522
|
description: String,
|
|
3204
3523
|
resourceType: {
|
|
3205
3524
|
type: String,
|
|
3206
|
-
enum: ['agent', 'project', 'file', 'promptGroup'],
|
|
3525
|
+
enum: ['agent', 'project', 'file', 'promptGroup', 'mcpServer'],
|
|
3207
3526
|
required: true,
|
|
3208
3527
|
default: 'agent',
|
|
3209
3528
|
},
|
|
@@ -3304,6 +3623,7 @@ function createModels(mongoose) {
|
|
|
3304
3623
|
Message: createMessageModel(mongoose),
|
|
3305
3624
|
Agent: createAgentModel(mongoose),
|
|
3306
3625
|
AgentCategory: createAgentCategoryModel(mongoose),
|
|
3626
|
+
MCPServer: createMCPServerModel(mongoose),
|
|
3307
3627
|
Role: createRoleModel(mongoose),
|
|
3308
3628
|
Action: createActionModel(mongoose),
|
|
3309
3629
|
Assistant: createAssistantModel(mongoose),
|
|
@@ -3326,7 +3646,6 @@ function createModels(mongoose) {
|
|
|
3326
3646
|
};
|
|
3327
3647
|
}
|
|
3328
3648
|
|
|
3329
|
-
var _a;
|
|
3330
3649
|
class SessionError extends Error {
|
|
3331
3650
|
constructor(message, code = 'SESSION_ERROR') {
|
|
3332
3651
|
super(message);
|
|
@@ -3334,22 +3653,24 @@ class SessionError extends Error {
|
|
|
3334
3653
|
this.code = code;
|
|
3335
3654
|
}
|
|
3336
3655
|
}
|
|
3337
|
-
|
|
3338
|
-
const
|
|
3656
|
+
/** Default refresh token expiry: 7 days in milliseconds */
|
|
3657
|
+
const DEFAULT_REFRESH_TOKEN_EXPIRY = 1000 * 60 * 60 * 24 * 7;
|
|
3339
3658
|
// Factory function that takes mongoose instance and returns the methods
|
|
3340
3659
|
function createSessionMethods(mongoose) {
|
|
3341
3660
|
/**
|
|
3342
3661
|
* Creates a new session for a user
|
|
3343
3662
|
*/
|
|
3344
3663
|
async function createSession(userId, options = {}) {
|
|
3664
|
+
var _a;
|
|
3345
3665
|
if (!userId) {
|
|
3346
3666
|
throw new SessionError('User ID is required', 'INVALID_USER_ID');
|
|
3347
3667
|
}
|
|
3668
|
+
const expiresIn = (_a = options.expiresIn) !== null && _a !== void 0 ? _a : DEFAULT_REFRESH_TOKEN_EXPIRY;
|
|
3348
3669
|
try {
|
|
3349
3670
|
const Session = mongoose.models.Session;
|
|
3350
3671
|
const currentSession = new Session({
|
|
3351
3672
|
user: userId,
|
|
3352
|
-
expiration: options.expiration || new Date(Date.now() +
|
|
3673
|
+
expiration: options.expiration || new Date(Date.now() + expiresIn),
|
|
3353
3674
|
});
|
|
3354
3675
|
const refreshToken = await generateRefreshToken(currentSession);
|
|
3355
3676
|
return { session: currentSession, refreshToken };
|
|
@@ -3403,14 +3724,16 @@ function createSessionMethods(mongoose) {
|
|
|
3403
3724
|
/**
|
|
3404
3725
|
* Updates session expiration
|
|
3405
3726
|
*/
|
|
3406
|
-
async function updateExpiration(session, newExpiration) {
|
|
3727
|
+
async function updateExpiration(session, newExpiration, options = {}) {
|
|
3728
|
+
var _a;
|
|
3729
|
+
const expiresIn = (_a = options.expiresIn) !== null && _a !== void 0 ? _a : DEFAULT_REFRESH_TOKEN_EXPIRY;
|
|
3407
3730
|
try {
|
|
3408
3731
|
const Session = mongoose.models.Session;
|
|
3409
3732
|
const sessionDoc = typeof session === 'string' ? await Session.findById(session) : session;
|
|
3410
3733
|
if (!sessionDoc) {
|
|
3411
3734
|
throw new SessionError('Session not found', 'SESSION_NOT_FOUND');
|
|
3412
3735
|
}
|
|
3413
|
-
sessionDoc.expiration = newExpiration || new Date(Date.now() +
|
|
3736
|
+
sessionDoc.expiration = newExpiration || new Date(Date.now() + expiresIn);
|
|
3414
3737
|
return await sessionDoc.save();
|
|
3415
3738
|
}
|
|
3416
3739
|
catch (error) {
|
|
@@ -3481,7 +3804,9 @@ function createSessionMethods(mongoose) {
|
|
|
3481
3804
|
throw new SessionError('Invalid session object', 'INVALID_SESSION');
|
|
3482
3805
|
}
|
|
3483
3806
|
try {
|
|
3484
|
-
const expiresIn = session.expiration
|
|
3807
|
+
const expiresIn = session.expiration
|
|
3808
|
+
? session.expiration.getTime()
|
|
3809
|
+
: Date.now() + DEFAULT_REFRESH_TOKEN_EXPIRY;
|
|
3485
3810
|
if (!session.expiration) {
|
|
3486
3811
|
session.expiration = new Date(expiresIn);
|
|
3487
3812
|
}
|
|
@@ -3561,7 +3886,11 @@ function createTokenMethods(mongoose) {
|
|
|
3561
3886
|
async function updateToken(query, updateData) {
|
|
3562
3887
|
try {
|
|
3563
3888
|
const Token = mongoose.models.Token;
|
|
3564
|
-
|
|
3889
|
+
const dataToUpdate = { ...updateData };
|
|
3890
|
+
if ((updateData === null || updateData === void 0 ? void 0 : updateData.expiresIn) !== undefined) {
|
|
3891
|
+
dataToUpdate.expiresAt = new Date(Date.now() + updateData.expiresIn * 1000);
|
|
3892
|
+
}
|
|
3893
|
+
return await Token.findOneAndUpdate(query, dataToUpdate, { new: true });
|
|
3565
3894
|
}
|
|
3566
3895
|
catch (error) {
|
|
3567
3896
|
logger$1.debug('An error occurred while updating token:', error);
|
|
@@ -3570,6 +3899,7 @@ function createTokenMethods(mongoose) {
|
|
|
3570
3899
|
}
|
|
3571
3900
|
/**
|
|
3572
3901
|
* Deletes all Token documents that match the provided token, user ID, or email.
|
|
3902
|
+
* Email is automatically normalized to lowercase for case-insensitive matching.
|
|
3573
3903
|
*/
|
|
3574
3904
|
async function deleteTokens(query) {
|
|
3575
3905
|
try {
|
|
@@ -3582,7 +3912,7 @@ function createTokenMethods(mongoose) {
|
|
|
3582
3912
|
conditions.push({ token: query.token });
|
|
3583
3913
|
}
|
|
3584
3914
|
if (query.email !== undefined) {
|
|
3585
|
-
conditions.push({ email: query.email });
|
|
3915
|
+
conditions.push({ email: query.email.trim().toLowerCase() });
|
|
3586
3916
|
}
|
|
3587
3917
|
if (query.identifier !== undefined) {
|
|
3588
3918
|
conditions.push({ identifier: query.identifier });
|
|
@@ -3604,6 +3934,7 @@ function createTokenMethods(mongoose) {
|
|
|
3604
3934
|
}
|
|
3605
3935
|
/**
|
|
3606
3936
|
* Finds a Token document that matches the provided query.
|
|
3937
|
+
* Email is automatically normalized to lowercase for case-insensitive matching.
|
|
3607
3938
|
*/
|
|
3608
3939
|
async function findToken(query, options) {
|
|
3609
3940
|
try {
|
|
@@ -3616,7 +3947,7 @@ function createTokenMethods(mongoose) {
|
|
|
3616
3947
|
conditions.push({ token: query.token });
|
|
3617
3948
|
}
|
|
3618
3949
|
if (query.email) {
|
|
3619
|
-
conditions.push({ email: query.email });
|
|
3950
|
+
conditions.push({ email: query.email.trim().toLowerCase() });
|
|
3620
3951
|
}
|
|
3621
3952
|
if (query.identifier) {
|
|
3622
3953
|
conditions.push({ identifier: query.identifier });
|
|
@@ -3681,14 +4012,37 @@ function createRoleMethods(mongoose) {
|
|
|
3681
4012
|
};
|
|
3682
4013
|
}
|
|
3683
4014
|
|
|
4015
|
+
/** Default JWT session expiry: 15 minutes in milliseconds */
|
|
4016
|
+
const DEFAULT_SESSION_EXPIRY = 1000 * 60 * 15;
|
|
3684
4017
|
/** Factory function that takes mongoose instance and returns the methods */
|
|
3685
4018
|
function createUserMethods(mongoose) {
|
|
4019
|
+
/**
|
|
4020
|
+
* Normalizes email fields in search criteria to lowercase and trimmed.
|
|
4021
|
+
* Handles both direct email fields and $or arrays containing email conditions.
|
|
4022
|
+
*/
|
|
4023
|
+
function normalizeEmailInCriteria(criteria) {
|
|
4024
|
+
const normalized = { ...criteria };
|
|
4025
|
+
if (typeof normalized.email === 'string') {
|
|
4026
|
+
normalized.email = normalized.email.trim().toLowerCase();
|
|
4027
|
+
}
|
|
4028
|
+
if (Array.isArray(normalized.$or)) {
|
|
4029
|
+
normalized.$or = normalized.$or.map((condition) => {
|
|
4030
|
+
if (typeof condition.email === 'string') {
|
|
4031
|
+
return { ...condition, email: condition.email.trim().toLowerCase() };
|
|
4032
|
+
}
|
|
4033
|
+
return condition;
|
|
4034
|
+
});
|
|
4035
|
+
}
|
|
4036
|
+
return normalized;
|
|
4037
|
+
}
|
|
3686
4038
|
/**
|
|
3687
4039
|
* Search for a single user based on partial data and return matching user document as plain object.
|
|
4040
|
+
* Email fields in searchCriteria are automatically normalized to lowercase for case-insensitive matching.
|
|
3688
4041
|
*/
|
|
3689
4042
|
async function findUser(searchCriteria, fieldsToSelect) {
|
|
3690
4043
|
const User = mongoose.models.User;
|
|
3691
|
-
const
|
|
4044
|
+
const normalizedCriteria = normalizeEmailInCriteria(searchCriteria);
|
|
4045
|
+
const query = User.findOne(normalizedCriteria);
|
|
3692
4046
|
if (fieldsToSelect) {
|
|
3693
4047
|
query.select(fieldsToSelect);
|
|
3694
4048
|
}
|
|
@@ -3785,23 +4139,14 @@ function createUserMethods(mongoose) {
|
|
|
3785
4139
|
}
|
|
3786
4140
|
/**
|
|
3787
4141
|
* Generates a JWT token for a given user.
|
|
4142
|
+
* @param user - The user object
|
|
4143
|
+
* @param expiresIn - Optional expiry time in milliseconds. Default: 15 minutes
|
|
3788
4144
|
*/
|
|
3789
|
-
async function generateToken(user) {
|
|
4145
|
+
async function generateToken(user, expiresIn) {
|
|
3790
4146
|
if (!user) {
|
|
3791
4147
|
throw new Error('No user provided');
|
|
3792
4148
|
}
|
|
3793
|
-
|
|
3794
|
-
if (process.env.SESSION_EXPIRY !== undefined && process.env.SESSION_EXPIRY !== '') {
|
|
3795
|
-
try {
|
|
3796
|
-
const evaluated = eval(process.env.SESSION_EXPIRY);
|
|
3797
|
-
if (evaluated) {
|
|
3798
|
-
expires = evaluated;
|
|
3799
|
-
}
|
|
3800
|
-
}
|
|
3801
|
-
catch (error) {
|
|
3802
|
-
console.warn('Invalid SESSION_EXPIRY expression, using default:', error);
|
|
3803
|
-
}
|
|
3804
|
-
}
|
|
4149
|
+
const expires = expiresIn !== null && expiresIn !== void 0 ? expiresIn : DEFAULT_SESSION_EXPIRY;
|
|
3805
4150
|
return await signPayload({
|
|
3806
4151
|
payload: {
|
|
3807
4152
|
id: user._id,
|
|
@@ -3895,6 +4240,26 @@ function createUserMethods(mongoose) {
|
|
|
3895
4240
|
return userWithoutScore;
|
|
3896
4241
|
});
|
|
3897
4242
|
};
|
|
4243
|
+
/**
|
|
4244
|
+
* Updates the plugins for a user based on the action specified (install/uninstall).
|
|
4245
|
+
* @param userId - The user ID whose plugins are to be updated
|
|
4246
|
+
* @param plugins - The current plugins array
|
|
4247
|
+
* @param pluginKey - The key of the plugin to install or uninstall
|
|
4248
|
+
* @param action - The action to perform, 'install' or 'uninstall'
|
|
4249
|
+
* @returns The result of the update operation or null if action is invalid
|
|
4250
|
+
*/
|
|
4251
|
+
async function updateUserPlugins(userId, plugins, pluginKey, action) {
|
|
4252
|
+
const userPlugins = plugins !== null && plugins !== void 0 ? plugins : [];
|
|
4253
|
+
if (action === 'install') {
|
|
4254
|
+
return updateUser(userId, { plugins: [...userPlugins, pluginKey] });
|
|
4255
|
+
}
|
|
4256
|
+
if (action === 'uninstall') {
|
|
4257
|
+
return updateUser(userId, {
|
|
4258
|
+
plugins: userPlugins.filter((plugin) => plugin !== pluginKey),
|
|
4259
|
+
});
|
|
4260
|
+
}
|
|
4261
|
+
return null;
|
|
4262
|
+
}
|
|
3898
4263
|
return {
|
|
3899
4264
|
findUser,
|
|
3900
4265
|
countUsers,
|
|
@@ -3904,10 +4269,356 @@ function createUserMethods(mongoose) {
|
|
|
3904
4269
|
getUserById,
|
|
3905
4270
|
generateToken,
|
|
3906
4271
|
deleteUserById,
|
|
4272
|
+
updateUserPlugins,
|
|
3907
4273
|
toggleUserMemories,
|
|
3908
4274
|
};
|
|
3909
4275
|
}
|
|
3910
4276
|
|
|
4277
|
+
/** Factory function that takes mongoose instance and returns the key methods */
|
|
4278
|
+
function createKeyMethods(mongoose) {
|
|
4279
|
+
/**
|
|
4280
|
+
* Retrieves and decrypts the key value for a given user identified by userId and identifier name.
|
|
4281
|
+
* @param params - The parameters object
|
|
4282
|
+
* @param params.userId - The unique identifier for the user
|
|
4283
|
+
* @param params.name - The name associated with the key
|
|
4284
|
+
* @returns The decrypted key value
|
|
4285
|
+
* @throws Error if the key is not found or if there is a problem during key retrieval
|
|
4286
|
+
* @description This function searches for a user's key in the database using their userId and name.
|
|
4287
|
+
* If found, it decrypts the value of the key and returns it. If no key is found, it throws
|
|
4288
|
+
* an error indicating that there is no user key available.
|
|
4289
|
+
*/
|
|
4290
|
+
async function getUserKey(params) {
|
|
4291
|
+
const { userId, name } = params;
|
|
4292
|
+
const Key = mongoose.models.Key;
|
|
4293
|
+
const keyValue = (await Key.findOne({ userId, name }).lean());
|
|
4294
|
+
if (!keyValue) {
|
|
4295
|
+
throw new Error(JSON.stringify({
|
|
4296
|
+
type: ErrorTypes.NO_USER_KEY,
|
|
4297
|
+
}));
|
|
4298
|
+
}
|
|
4299
|
+
return await decrypt(keyValue.value);
|
|
4300
|
+
}
|
|
4301
|
+
/**
|
|
4302
|
+
* Retrieves, decrypts, and parses the key values for a given user identified by userId and name.
|
|
4303
|
+
* @param params - The parameters object
|
|
4304
|
+
* @param params.userId - The unique identifier for the user
|
|
4305
|
+
* @param params.name - The name associated with the key
|
|
4306
|
+
* @returns The decrypted and parsed key values
|
|
4307
|
+
* @throws Error if the key is invalid or if there is a problem during key value parsing
|
|
4308
|
+
* @description This function retrieves a user's encrypted key using their userId and name, decrypts it,
|
|
4309
|
+
* and then attempts to parse the decrypted string into a JSON object. If the parsing fails,
|
|
4310
|
+
* it throws an error indicating that the user key is invalid.
|
|
4311
|
+
*/
|
|
4312
|
+
async function getUserKeyValues(params) {
|
|
4313
|
+
const { userId, name } = params;
|
|
4314
|
+
const userValues = await getUserKey({ userId, name });
|
|
4315
|
+
try {
|
|
4316
|
+
return JSON.parse(userValues);
|
|
4317
|
+
}
|
|
4318
|
+
catch (e) {
|
|
4319
|
+
logger$1.error('[getUserKeyValues]', e);
|
|
4320
|
+
throw new Error(JSON.stringify({
|
|
4321
|
+
type: ErrorTypes.INVALID_USER_KEY,
|
|
4322
|
+
}));
|
|
4323
|
+
}
|
|
4324
|
+
}
|
|
4325
|
+
/**
|
|
4326
|
+
* Retrieves the expiry information of a user's key identified by userId and name.
|
|
4327
|
+
* @param params - The parameters object
|
|
4328
|
+
* @param params.userId - The unique identifier for the user
|
|
4329
|
+
* @param params.name - The name associated with the key
|
|
4330
|
+
* @returns The expiry date of the key or null if the key doesn't exist
|
|
4331
|
+
* @description This function fetches a user's key from the database using their userId and name and
|
|
4332
|
+
* returns its expiry date. If the key is not found, it returns null for the expiry date.
|
|
4333
|
+
*/
|
|
4334
|
+
async function getUserKeyExpiry(params) {
|
|
4335
|
+
const { userId, name } = params;
|
|
4336
|
+
const Key = mongoose.models.Key;
|
|
4337
|
+
const keyValue = (await Key.findOne({ userId, name }).lean());
|
|
4338
|
+
if (!keyValue) {
|
|
4339
|
+
return { expiresAt: null };
|
|
4340
|
+
}
|
|
4341
|
+
return { expiresAt: keyValue.expiresAt || 'never' };
|
|
4342
|
+
}
|
|
4343
|
+
/**
|
|
4344
|
+
* Updates or inserts a new key for a given user identified by userId and name, with a specified value and expiry date.
|
|
4345
|
+
* @param params - The parameters object
|
|
4346
|
+
* @param params.userId - The unique identifier for the user
|
|
4347
|
+
* @param params.name - The name associated with the key
|
|
4348
|
+
* @param params.value - The value to be encrypted and stored as the key's value
|
|
4349
|
+
* @param params.expiresAt - The expiry date for the key [optional]
|
|
4350
|
+
* @returns The updated or newly inserted key document
|
|
4351
|
+
* @description This function either updates an existing user key or inserts a new one into the database,
|
|
4352
|
+
* after encrypting the provided value. It sets the provided expiry date for the key (or unsets for no expiry).
|
|
4353
|
+
*/
|
|
4354
|
+
async function updateUserKey(params) {
|
|
4355
|
+
const { userId, name, value, expiresAt = null } = params;
|
|
4356
|
+
const Key = mongoose.models.Key;
|
|
4357
|
+
const encryptedValue = await encrypt(value);
|
|
4358
|
+
const updateObject = {
|
|
4359
|
+
userId,
|
|
4360
|
+
name,
|
|
4361
|
+
value: encryptedValue,
|
|
4362
|
+
};
|
|
4363
|
+
const updateQuery = {
|
|
4364
|
+
$set: updateObject,
|
|
4365
|
+
};
|
|
4366
|
+
if (expiresAt) {
|
|
4367
|
+
updateObject.expiresAt = new Date(expiresAt);
|
|
4368
|
+
}
|
|
4369
|
+
else {
|
|
4370
|
+
updateQuery.$unset = { expiresAt: '' };
|
|
4371
|
+
}
|
|
4372
|
+
return await Key.findOneAndUpdate({ userId, name }, updateQuery, {
|
|
4373
|
+
upsert: true,
|
|
4374
|
+
new: true,
|
|
4375
|
+
}).lean();
|
|
4376
|
+
}
|
|
4377
|
+
/**
|
|
4378
|
+
* Deletes a key or all keys for a given user identified by userId, optionally based on a specified name.
|
|
4379
|
+
* @param params - The parameters object
|
|
4380
|
+
* @param params.userId - The unique identifier for the user
|
|
4381
|
+
* @param params.name - The name associated with the key to delete. If not provided and all is true, deletes all keys
|
|
4382
|
+
* @param params.all - Whether to delete all keys for the user
|
|
4383
|
+
* @returns The result of the deletion operation
|
|
4384
|
+
* @description This function deletes a specific key or all keys for a user from the database.
|
|
4385
|
+
* If a name is provided and all is false, it deletes only the key with that name.
|
|
4386
|
+
* If all is true, it ignores the name and deletes all keys for the user.
|
|
4387
|
+
*/
|
|
4388
|
+
async function deleteUserKey(params) {
|
|
4389
|
+
const { userId, name, all = false } = params;
|
|
4390
|
+
const Key = mongoose.models.Key;
|
|
4391
|
+
if (all) {
|
|
4392
|
+
return await Key.deleteMany({ userId });
|
|
4393
|
+
}
|
|
4394
|
+
return await Key.findOneAndDelete({ userId, name }).lean();
|
|
4395
|
+
}
|
|
4396
|
+
return {
|
|
4397
|
+
getUserKey,
|
|
4398
|
+
updateUserKey,
|
|
4399
|
+
deleteUserKey,
|
|
4400
|
+
getUserKeyValues,
|
|
4401
|
+
getUserKeyExpiry,
|
|
4402
|
+
};
|
|
4403
|
+
}
|
|
4404
|
+
|
|
4405
|
+
/** Factory function that takes mongoose instance and returns the file methods */
|
|
4406
|
+
function createFileMethods(mongoose) {
|
|
4407
|
+
/**
|
|
4408
|
+
* Finds a file by its file_id with additional query options.
|
|
4409
|
+
* @param file_id - The unique identifier of the file
|
|
4410
|
+
* @param options - Query options for filtering, projection, etc.
|
|
4411
|
+
* @returns A promise that resolves to the file document or null
|
|
4412
|
+
*/
|
|
4413
|
+
async function findFileById(file_id, options = {}) {
|
|
4414
|
+
const File = mongoose.models.File;
|
|
4415
|
+
return File.findOne({ file_id, ...options }).lean();
|
|
4416
|
+
}
|
|
4417
|
+
/**
|
|
4418
|
+
* Retrieves files matching a given filter, sorted by the most recently updated.
|
|
4419
|
+
* @param filter - The filter criteria to apply
|
|
4420
|
+
* @param _sortOptions - Optional sort parameters
|
|
4421
|
+
* @param selectFields - Fields to include/exclude in the query results. Default excludes the 'text' field
|
|
4422
|
+
* @param options - Additional query options (userId, agentId for ACL)
|
|
4423
|
+
* @returns A promise that resolves to an array of file documents
|
|
4424
|
+
*/
|
|
4425
|
+
async function getFiles(filter, _sortOptions, selectFields) {
|
|
4426
|
+
const File = mongoose.models.File;
|
|
4427
|
+
const sortOptions = { updatedAt: -1, ..._sortOptions };
|
|
4428
|
+
const query = File.find(filter);
|
|
4429
|
+
if (selectFields != null) {
|
|
4430
|
+
query.select(selectFields);
|
|
4431
|
+
}
|
|
4432
|
+
else {
|
|
4433
|
+
query.select({ text: 0 });
|
|
4434
|
+
}
|
|
4435
|
+
return await query.sort(sortOptions).lean();
|
|
4436
|
+
}
|
|
4437
|
+
/**
|
|
4438
|
+
* Retrieves tool files (files that are embedded or have a fileIdentifier) from an array of file IDs
|
|
4439
|
+
* @param fileIds - Array of file_id strings to search for
|
|
4440
|
+
* @param toolResourceSet - Optional filter for tool resources
|
|
4441
|
+
* @returns Files that match the criteria
|
|
4442
|
+
*/
|
|
4443
|
+
async function getToolFilesByIds(fileIds, toolResourceSet) {
|
|
4444
|
+
var _a, _b, _c;
|
|
4445
|
+
if (!fileIds || !fileIds.length || !(toolResourceSet === null || toolResourceSet === void 0 ? void 0 : toolResourceSet.size)) {
|
|
4446
|
+
return [];
|
|
4447
|
+
}
|
|
4448
|
+
try {
|
|
4449
|
+
const filter = {
|
|
4450
|
+
file_id: { $in: fileIds },
|
|
4451
|
+
$or: [],
|
|
4452
|
+
};
|
|
4453
|
+
if (toolResourceSet.has(EToolResources.context)) {
|
|
4454
|
+
(_a = filter.$or) === null || _a === void 0 ? void 0 : _a.push({ text: { $exists: true, $ne: null }, context: FileContext.agents });
|
|
4455
|
+
}
|
|
4456
|
+
if (toolResourceSet.has(EToolResources.file_search)) {
|
|
4457
|
+
(_b = filter.$or) === null || _b === void 0 ? void 0 : _b.push({ embedded: true });
|
|
4458
|
+
}
|
|
4459
|
+
if (toolResourceSet.has(EToolResources.execute_code)) {
|
|
4460
|
+
(_c = filter.$or) === null || _c === void 0 ? void 0 : _c.push({ 'metadata.fileIdentifier': { $exists: true } });
|
|
4461
|
+
}
|
|
4462
|
+
const selectFields = { text: 0 };
|
|
4463
|
+
const sortOptions = { updatedAt: -1 };
|
|
4464
|
+
const results = await getFiles(filter, sortOptions, selectFields);
|
|
4465
|
+
return results !== null && results !== void 0 ? results : [];
|
|
4466
|
+
}
|
|
4467
|
+
catch (error) {
|
|
4468
|
+
logger$1.error('[getToolFilesByIds] Error retrieving tool files:', error);
|
|
4469
|
+
throw new Error('Error retrieving tool files');
|
|
4470
|
+
}
|
|
4471
|
+
}
|
|
4472
|
+
/**
|
|
4473
|
+
* Creates a new file with a TTL of 1 hour.
|
|
4474
|
+
* @param data - The file data to be created, must contain file_id
|
|
4475
|
+
* @param disableTTL - Whether to disable the TTL
|
|
4476
|
+
* @returns A promise that resolves to the created file document
|
|
4477
|
+
*/
|
|
4478
|
+
async function createFile(data, disableTTL) {
|
|
4479
|
+
const File = mongoose.models.File;
|
|
4480
|
+
const fileData = {
|
|
4481
|
+
...data,
|
|
4482
|
+
expiresAt: new Date(Date.now() + 3600 * 1000),
|
|
4483
|
+
};
|
|
4484
|
+
if (disableTTL) {
|
|
4485
|
+
delete fileData.expiresAt;
|
|
4486
|
+
}
|
|
4487
|
+
return File.findOneAndUpdate({ file_id: data.file_id }, fileData, {
|
|
4488
|
+
new: true,
|
|
4489
|
+
upsert: true,
|
|
4490
|
+
}).lean();
|
|
4491
|
+
}
|
|
4492
|
+
/**
|
|
4493
|
+
* Updates a file identified by file_id with new data and removes the TTL.
|
|
4494
|
+
* @param data - The data to update, must contain file_id
|
|
4495
|
+
* @returns A promise that resolves to the updated file document
|
|
4496
|
+
*/
|
|
4497
|
+
async function updateFile(data) {
|
|
4498
|
+
const File = mongoose.models.File;
|
|
4499
|
+
const { file_id, ...update } = data;
|
|
4500
|
+
const updateOperation = {
|
|
4501
|
+
$set: update,
|
|
4502
|
+
$unset: { expiresAt: '' },
|
|
4503
|
+
};
|
|
4504
|
+
return File.findOneAndUpdate({ file_id }, updateOperation, {
|
|
4505
|
+
new: true,
|
|
4506
|
+
}).lean();
|
|
4507
|
+
}
|
|
4508
|
+
/**
|
|
4509
|
+
* Increments the usage of a file identified by file_id.
|
|
4510
|
+
* @param data - The data to update, must contain file_id and the increment value for usage
|
|
4511
|
+
* @returns A promise that resolves to the updated file document
|
|
4512
|
+
*/
|
|
4513
|
+
async function updateFileUsage(data) {
|
|
4514
|
+
const File = mongoose.models.File;
|
|
4515
|
+
const { file_id, inc = 1 } = data;
|
|
4516
|
+
const updateOperation = {
|
|
4517
|
+
$inc: { usage: inc },
|
|
4518
|
+
$unset: { expiresAt: '', temp_file_id: '' },
|
|
4519
|
+
};
|
|
4520
|
+
return File.findOneAndUpdate({ file_id }, updateOperation, {
|
|
4521
|
+
new: true,
|
|
4522
|
+
}).lean();
|
|
4523
|
+
}
|
|
4524
|
+
/**
|
|
4525
|
+
* Deletes a file identified by file_id.
|
|
4526
|
+
* @param file_id - The unique identifier of the file to delete
|
|
4527
|
+
* @returns A promise that resolves to the deleted file document or null
|
|
4528
|
+
*/
|
|
4529
|
+
async function deleteFile(file_id) {
|
|
4530
|
+
const File = mongoose.models.File;
|
|
4531
|
+
return File.findOneAndDelete({ file_id }).lean();
|
|
4532
|
+
}
|
|
4533
|
+
/**
|
|
4534
|
+
* Deletes a file identified by a filter.
|
|
4535
|
+
* @param filter - The filter criteria to apply
|
|
4536
|
+
* @returns A promise that resolves to the deleted file document or null
|
|
4537
|
+
*/
|
|
4538
|
+
async function deleteFileByFilter(filter) {
|
|
4539
|
+
const File = mongoose.models.File;
|
|
4540
|
+
return File.findOneAndDelete(filter).lean();
|
|
4541
|
+
}
|
|
4542
|
+
/**
|
|
4543
|
+
* Deletes multiple files identified by an array of file_ids.
|
|
4544
|
+
* @param file_ids - The unique identifiers of the files to delete
|
|
4545
|
+
* @param user - Optional user ID to filter by
|
|
4546
|
+
* @returns A promise that resolves to the result of the deletion operation
|
|
4547
|
+
*/
|
|
4548
|
+
async function deleteFiles(file_ids, user) {
|
|
4549
|
+
const File = mongoose.models.File;
|
|
4550
|
+
let deleteQuery = { file_id: { $in: file_ids } };
|
|
4551
|
+
if (user) {
|
|
4552
|
+
deleteQuery = { user: user };
|
|
4553
|
+
}
|
|
4554
|
+
return File.deleteMany(deleteQuery);
|
|
4555
|
+
}
|
|
4556
|
+
/**
|
|
4557
|
+
* Batch updates files with new signed URLs in MongoDB
|
|
4558
|
+
* @param updates - Array of updates in the format { file_id, filepath }
|
|
4559
|
+
*/
|
|
4560
|
+
async function batchUpdateFiles(updates) {
|
|
4561
|
+
if (!updates || updates.length === 0) {
|
|
4562
|
+
return;
|
|
4563
|
+
}
|
|
4564
|
+
const File = mongoose.models.File;
|
|
4565
|
+
const bulkOperations = updates.map((update) => ({
|
|
4566
|
+
updateOne: {
|
|
4567
|
+
filter: { file_id: update.file_id },
|
|
4568
|
+
update: { $set: { filepath: update.filepath } },
|
|
4569
|
+
},
|
|
4570
|
+
}));
|
|
4571
|
+
const result = await File.bulkWrite(bulkOperations);
|
|
4572
|
+
logger$1.info(`Updated ${result.modifiedCount} files with new S3 URLs`);
|
|
4573
|
+
}
|
|
4574
|
+
/**
|
|
4575
|
+
* Updates usage tracking for multiple files.
|
|
4576
|
+
* Processes files and optional fileIds, updating their usage count in the database.
|
|
4577
|
+
*
|
|
4578
|
+
* @param files - Array of file objects to process
|
|
4579
|
+
* @param fileIds - Optional array of file IDs to process
|
|
4580
|
+
* @returns Array of updated file documents (with null results filtered out)
|
|
4581
|
+
*/
|
|
4582
|
+
async function updateFilesUsage(files, fileIds) {
|
|
4583
|
+
const promises = [];
|
|
4584
|
+
const seen = new Set();
|
|
4585
|
+
for (const file of files) {
|
|
4586
|
+
const { file_id } = file;
|
|
4587
|
+
if (seen.has(file_id)) {
|
|
4588
|
+
continue;
|
|
4589
|
+
}
|
|
4590
|
+
seen.add(file_id);
|
|
4591
|
+
promises.push(updateFileUsage({ file_id }));
|
|
4592
|
+
}
|
|
4593
|
+
if (!fileIds) {
|
|
4594
|
+
const results = await Promise.all(promises);
|
|
4595
|
+
return results.filter((result) => result != null);
|
|
4596
|
+
}
|
|
4597
|
+
for (const file_id of fileIds) {
|
|
4598
|
+
if (seen.has(file_id)) {
|
|
4599
|
+
continue;
|
|
4600
|
+
}
|
|
4601
|
+
seen.add(file_id);
|
|
4602
|
+
promises.push(updateFileUsage({ file_id }));
|
|
4603
|
+
}
|
|
4604
|
+
const results = await Promise.all(promises);
|
|
4605
|
+
return results.filter((result) => result != null);
|
|
4606
|
+
}
|
|
4607
|
+
return {
|
|
4608
|
+
findFileById,
|
|
4609
|
+
getFiles,
|
|
4610
|
+
getToolFilesByIds,
|
|
4611
|
+
createFile,
|
|
4612
|
+
updateFile,
|
|
4613
|
+
updateFileUsage,
|
|
4614
|
+
deleteFile,
|
|
4615
|
+
deleteFiles,
|
|
4616
|
+
deleteFileByFilter,
|
|
4617
|
+
batchUpdateFiles,
|
|
4618
|
+
updateFilesUsage,
|
|
4619
|
+
};
|
|
4620
|
+
}
|
|
4621
|
+
|
|
3911
4622
|
/**
|
|
3912
4623
|
* Formats a date in YYYY-MM-DD format
|
|
3913
4624
|
*/
|
|
@@ -4255,6 +4966,258 @@ function createAgentCategoryMethods(mongoose) {
|
|
|
4255
4966
|
};
|
|
4256
4967
|
}
|
|
4257
4968
|
|
|
4969
|
+
const NORMALIZED_LIMIT_DEFAULT = 20;
|
|
4970
|
+
const MAX_CREATE_RETRIES = 5;
|
|
4971
|
+
const RETRY_BASE_DELAY_MS = 25;
|
|
4972
|
+
/**
|
|
4973
|
+
* Helper to check if an error is a MongoDB duplicate key error.
|
|
4974
|
+
* Since serverName is the only unique index on MCPServer, any E11000 error
|
|
4975
|
+
* during creation is necessarily a serverName collision.
|
|
4976
|
+
*/
|
|
4977
|
+
function isDuplicateKeyError(error) {
|
|
4978
|
+
if (error && typeof error === 'object' && 'code' in error) {
|
|
4979
|
+
const mongoError = error;
|
|
4980
|
+
return mongoError.code === 11000;
|
|
4981
|
+
}
|
|
4982
|
+
return false;
|
|
4983
|
+
}
|
|
4984
|
+
/**
|
|
4985
|
+
* Escapes special regex characters in a string so they are treated literally.
|
|
4986
|
+
*/
|
|
4987
|
+
function escapeRegex(str) {
|
|
4988
|
+
return str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
4989
|
+
}
|
|
4990
|
+
/**
|
|
4991
|
+
* Generates a URL-friendly server name from a title.
|
|
4992
|
+
* Converts to lowercase, replaces spaces with hyphens, removes special characters.
|
|
4993
|
+
*/
|
|
4994
|
+
function generateServerNameFromTitle(title) {
|
|
4995
|
+
const slug = title
|
|
4996
|
+
.toLowerCase()
|
|
4997
|
+
.trim()
|
|
4998
|
+
.replace(/[^a-z0-9\s-]/g, '') // Remove special chars except spaces and hyphens
|
|
4999
|
+
.replace(/\s+/g, '-') // Replace spaces with hyphens
|
|
5000
|
+
.replace(/-+/g, '-') // Remove consecutive hyphens
|
|
5001
|
+
.replace(/^-|-$/g, ''); // Trim leading/trailing hyphens
|
|
5002
|
+
return slug || 'mcp-server'; // Fallback if empty
|
|
5003
|
+
}
|
|
5004
|
+
function createMCPServerMethods(mongoose) {
|
|
5005
|
+
/**
|
|
5006
|
+
* Finds the next available server name by checking for duplicates.
|
|
5007
|
+
* If baseName exists, returns baseName-2, baseName-3, etc.
|
|
5008
|
+
*/
|
|
5009
|
+
async function findNextAvailableServerName(baseName) {
|
|
5010
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5011
|
+
// Find all servers with matching base name pattern (baseName or baseName-N)
|
|
5012
|
+
const escapedBaseName = escapeRegex(baseName);
|
|
5013
|
+
const existing = await MCPServer.find({
|
|
5014
|
+
serverName: { $regex: `^${escapedBaseName}(-\\d+)?$` },
|
|
5015
|
+
})
|
|
5016
|
+
.select('serverName')
|
|
5017
|
+
.lean();
|
|
5018
|
+
if (existing.length === 0) {
|
|
5019
|
+
return baseName;
|
|
5020
|
+
}
|
|
5021
|
+
// Extract numbers from existing names
|
|
5022
|
+
const numbers = existing.map((s) => {
|
|
5023
|
+
const match = s.serverName.match(/-(\d+)$/);
|
|
5024
|
+
return match ? parseInt(match[1], 10) : 1;
|
|
5025
|
+
});
|
|
5026
|
+
const maxNumber = Math.max(...numbers);
|
|
5027
|
+
return `${baseName}-${maxNumber + 1}`;
|
|
5028
|
+
}
|
|
5029
|
+
/**
|
|
5030
|
+
* Create a new MCP server with retry logic for handling race conditions.
|
|
5031
|
+
* When multiple requests try to create servers with the same title simultaneously,
|
|
5032
|
+
* they may get the same serverName from findNextAvailableServerName() before any
|
|
5033
|
+
* creates the record (TOCTOU race condition). This is handled by retrying with
|
|
5034
|
+
* exponential backoff when a duplicate key error occurs.
|
|
5035
|
+
* @param data - Object containing config (with title, description, url, etc.) and author
|
|
5036
|
+
* @returns The created MCP server document
|
|
5037
|
+
*/
|
|
5038
|
+
async function createMCPServer(data) {
|
|
5039
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5040
|
+
let lastError;
|
|
5041
|
+
for (let attempt = 0; attempt < MAX_CREATE_RETRIES; attempt++) {
|
|
5042
|
+
try {
|
|
5043
|
+
// Generate serverName from title, with fallback to nanoid if no title
|
|
5044
|
+
// Important: regenerate on each attempt to get fresh available name
|
|
5045
|
+
let serverName;
|
|
5046
|
+
if (data.config.title) {
|
|
5047
|
+
const baseSlug = generateServerNameFromTitle(data.config.title);
|
|
5048
|
+
serverName = await findNextAvailableServerName(baseSlug);
|
|
5049
|
+
}
|
|
5050
|
+
else {
|
|
5051
|
+
serverName = `mcp-${nanoid(16)}`;
|
|
5052
|
+
}
|
|
5053
|
+
const newServer = await MCPServer.create({
|
|
5054
|
+
serverName,
|
|
5055
|
+
config: data.config,
|
|
5056
|
+
author: data.author,
|
|
5057
|
+
});
|
|
5058
|
+
return newServer.toObject();
|
|
5059
|
+
}
|
|
5060
|
+
catch (error) {
|
|
5061
|
+
lastError = error;
|
|
5062
|
+
// Only retry on duplicate key errors (serverName collision)
|
|
5063
|
+
if (isDuplicateKeyError(error) && attempt < MAX_CREATE_RETRIES - 1) {
|
|
5064
|
+
// Exponential backoff: 10ms, 20ms, 40ms
|
|
5065
|
+
const delay = RETRY_BASE_DELAY_MS * Math.pow(2, attempt);
|
|
5066
|
+
logger$1.debug(`[createMCPServer] Duplicate serverName detected, retrying (attempt ${attempt + 2}/${MAX_CREATE_RETRIES}) after ${delay}ms`);
|
|
5067
|
+
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
5068
|
+
continue;
|
|
5069
|
+
}
|
|
5070
|
+
// Not a duplicate key error or out of retries - throw immediately
|
|
5071
|
+
throw error;
|
|
5072
|
+
}
|
|
5073
|
+
}
|
|
5074
|
+
// Should not reach here, but TypeScript requires a return
|
|
5075
|
+
throw lastError;
|
|
5076
|
+
}
|
|
5077
|
+
/**
|
|
5078
|
+
* Find an MCP server by serverName
|
|
5079
|
+
* @param serverName - The MCP server ID
|
|
5080
|
+
* @returns The MCP server document or null
|
|
5081
|
+
*/
|
|
5082
|
+
async function findMCPServerById(serverName) {
|
|
5083
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5084
|
+
return await MCPServer.findOne({ serverName }).lean();
|
|
5085
|
+
}
|
|
5086
|
+
/**
|
|
5087
|
+
* Find an MCP server by MongoDB ObjectId
|
|
5088
|
+
* @param _id - The MongoDB ObjectId
|
|
5089
|
+
* @returns The MCP server document or null
|
|
5090
|
+
*/
|
|
5091
|
+
async function findMCPServerByObjectId(_id) {
|
|
5092
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5093
|
+
return await MCPServer.findById(_id).lean();
|
|
5094
|
+
}
|
|
5095
|
+
/**
|
|
5096
|
+
* Find MCP servers by author
|
|
5097
|
+
* @param authorId - The author's ObjectId or string
|
|
5098
|
+
* @returns Array of MCP server documents
|
|
5099
|
+
*/
|
|
5100
|
+
async function findMCPServersByAuthor(authorId) {
|
|
5101
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5102
|
+
return await MCPServer.find({ author: authorId }).sort({ updatedAt: -1 }).lean();
|
|
5103
|
+
}
|
|
5104
|
+
/**
|
|
5105
|
+
* Get a paginated list of MCP servers by IDs with filtering and search
|
|
5106
|
+
* @param ids - Array of ObjectIds to include
|
|
5107
|
+
* @param otherParams - Additional filter parameters (e.g., search)
|
|
5108
|
+
* @param limit - Page size limit (null for no pagination)
|
|
5109
|
+
* @param after - Cursor for pagination
|
|
5110
|
+
* @returns Paginated list of MCP servers
|
|
5111
|
+
*/
|
|
5112
|
+
async function getListMCPServersByIds({ ids = [], otherParams = {}, limit = null, after = null, }) {
|
|
5113
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5114
|
+
const isPaginated = limit !== null && limit !== undefined;
|
|
5115
|
+
const normalizedLimit = isPaginated
|
|
5116
|
+
? Math.min(Math.max(1, parseInt(String(limit)) || NORMALIZED_LIMIT_DEFAULT), 100)
|
|
5117
|
+
: null;
|
|
5118
|
+
// Build base query combining accessible servers with other filters
|
|
5119
|
+
const baseQuery = { ...otherParams, _id: { $in: ids } };
|
|
5120
|
+
// Add cursor condition
|
|
5121
|
+
if (after) {
|
|
5122
|
+
try {
|
|
5123
|
+
const cursor = JSON.parse(Buffer.from(after, 'base64').toString('utf8'));
|
|
5124
|
+
const { updatedAt, _id } = cursor;
|
|
5125
|
+
const cursorCondition = {
|
|
5126
|
+
$or: [
|
|
5127
|
+
{ updatedAt: { $lt: new Date(updatedAt) } },
|
|
5128
|
+
{ updatedAt: new Date(updatedAt), _id: { $gt: new mongoose.Types.ObjectId(_id) } },
|
|
5129
|
+
],
|
|
5130
|
+
};
|
|
5131
|
+
// Merge cursor condition with base query
|
|
5132
|
+
if (Object.keys(baseQuery).length > 0) {
|
|
5133
|
+
baseQuery.$and = [{ ...baseQuery }, cursorCondition];
|
|
5134
|
+
// Remove the original conditions from baseQuery to avoid duplication
|
|
5135
|
+
Object.keys(baseQuery).forEach((key) => {
|
|
5136
|
+
if (key !== '$and') {
|
|
5137
|
+
delete baseQuery[key];
|
|
5138
|
+
}
|
|
5139
|
+
});
|
|
5140
|
+
}
|
|
5141
|
+
}
|
|
5142
|
+
catch (error) {
|
|
5143
|
+
// Invalid cursor, ignore
|
|
5144
|
+
logger$1.warn('[getListMCPServersByIds] Invalid cursor provided', error);
|
|
5145
|
+
}
|
|
5146
|
+
}
|
|
5147
|
+
if (normalizedLimit === null) {
|
|
5148
|
+
// No pagination - return all matching servers
|
|
5149
|
+
const servers = await MCPServer.find(baseQuery).sort({ updatedAt: -1, _id: 1 }).lean();
|
|
5150
|
+
return {
|
|
5151
|
+
data: servers,
|
|
5152
|
+
has_more: false,
|
|
5153
|
+
after: null,
|
|
5154
|
+
};
|
|
5155
|
+
}
|
|
5156
|
+
// Paginated query - assign to const to help TypeScript
|
|
5157
|
+
const servers = await MCPServer.find(baseQuery)
|
|
5158
|
+
.sort({ updatedAt: -1, _id: 1 })
|
|
5159
|
+
.limit(normalizedLimit + 1)
|
|
5160
|
+
.lean();
|
|
5161
|
+
const hasMore = servers.length > normalizedLimit;
|
|
5162
|
+
const data = hasMore ? servers.slice(0, normalizedLimit) : servers;
|
|
5163
|
+
let nextCursor = null;
|
|
5164
|
+
if (hasMore && data.length > 0) {
|
|
5165
|
+
const lastItem = data[data.length - 1];
|
|
5166
|
+
nextCursor = Buffer.from(JSON.stringify({
|
|
5167
|
+
updatedAt: lastItem.updatedAt,
|
|
5168
|
+
_id: lastItem._id,
|
|
5169
|
+
})).toString('base64');
|
|
5170
|
+
}
|
|
5171
|
+
return {
|
|
5172
|
+
data,
|
|
5173
|
+
has_more: hasMore,
|
|
5174
|
+
after: nextCursor,
|
|
5175
|
+
};
|
|
5176
|
+
}
|
|
5177
|
+
/**
|
|
5178
|
+
* Update an MCP server
|
|
5179
|
+
* @param serverName - The MCP server ID
|
|
5180
|
+
* @param updateData - Object containing config to update
|
|
5181
|
+
* @returns The updated MCP server document or null
|
|
5182
|
+
*/
|
|
5183
|
+
async function updateMCPServer(serverName, updateData) {
|
|
5184
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5185
|
+
return await MCPServer.findOneAndUpdate({ serverName }, { $set: updateData }, { new: true, runValidators: true }).lean();
|
|
5186
|
+
}
|
|
5187
|
+
/**
|
|
5188
|
+
* Delete an MCP server
|
|
5189
|
+
* @param serverName - The MCP server ID
|
|
5190
|
+
* @returns The deleted MCP server document or null
|
|
5191
|
+
*/
|
|
5192
|
+
async function deleteMCPServer(serverName) {
|
|
5193
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5194
|
+
return await MCPServer.findOneAndDelete({ serverName }).lean();
|
|
5195
|
+
}
|
|
5196
|
+
/**
|
|
5197
|
+
* Get MCP servers by their serverName strings
|
|
5198
|
+
* @param names - Array of serverName strings to fetch
|
|
5199
|
+
* @returns Object containing array of MCP server documents
|
|
5200
|
+
*/
|
|
5201
|
+
async function getListMCPServersByNames({ names = [] }) {
|
|
5202
|
+
if (names.length === 0) {
|
|
5203
|
+
return { data: [] };
|
|
5204
|
+
}
|
|
5205
|
+
const MCPServer = mongoose.models.MCPServer;
|
|
5206
|
+
const servers = await MCPServer.find({ serverName: { $in: names } }).lean();
|
|
5207
|
+
return { data: servers };
|
|
5208
|
+
}
|
|
5209
|
+
return {
|
|
5210
|
+
createMCPServer,
|
|
5211
|
+
findMCPServerById,
|
|
5212
|
+
findMCPServerByObjectId,
|
|
5213
|
+
findMCPServersByAuthor,
|
|
5214
|
+
getListMCPServersByIds,
|
|
5215
|
+
getListMCPServersByNames,
|
|
5216
|
+
updateMCPServer,
|
|
5217
|
+
deleteMCPServer,
|
|
5218
|
+
};
|
|
5219
|
+
}
|
|
5220
|
+
|
|
4258
5221
|
// Factory function that takes mongoose instance and returns the methods
|
|
4259
5222
|
function createPluginAuthMethods(mongoose) {
|
|
4260
5223
|
/**
|
|
@@ -4482,6 +5445,27 @@ function createAccessRoleMethods(mongoose) {
|
|
|
4482
5445
|
resourceType: ResourceType.PROMPTGROUP,
|
|
4483
5446
|
permBits: RoleBits.OWNER,
|
|
4484
5447
|
},
|
|
5448
|
+
{
|
|
5449
|
+
accessRoleId: AccessRoleIds.MCPSERVER_VIEWER,
|
|
5450
|
+
name: 'com_ui_mcp_server_role_viewer',
|
|
5451
|
+
description: 'com_ui_mcp_server_role_viewer_desc',
|
|
5452
|
+
resourceType: ResourceType.MCPSERVER,
|
|
5453
|
+
permBits: RoleBits.VIEWER,
|
|
5454
|
+
},
|
|
5455
|
+
{
|
|
5456
|
+
accessRoleId: AccessRoleIds.MCPSERVER_EDITOR,
|
|
5457
|
+
name: 'com_ui_mcp_server_role_editor',
|
|
5458
|
+
description: 'com_ui_mcp_server_role_editor_desc',
|
|
5459
|
+
resourceType: ResourceType.MCPSERVER,
|
|
5460
|
+
permBits: RoleBits.EDITOR,
|
|
5461
|
+
},
|
|
5462
|
+
{
|
|
5463
|
+
accessRoleId: AccessRoleIds.MCPSERVER_OWNER,
|
|
5464
|
+
name: 'com_ui_mcp_server_role_owner',
|
|
5465
|
+
description: 'com_ui_mcp_server_role_owner_desc',
|
|
5466
|
+
resourceType: ResourceType.MCPSERVER,
|
|
5467
|
+
permBits: RoleBits.OWNER,
|
|
5468
|
+
},
|
|
4485
5469
|
];
|
|
4486
5470
|
const result = {};
|
|
4487
5471
|
for (const role of defaultRoles) {
|
|
@@ -5062,6 +6046,49 @@ function createAclEntryMethods(mongoose) {
|
|
|
5062
6046
|
}
|
|
5063
6047
|
return effectiveBits;
|
|
5064
6048
|
}
|
|
6049
|
+
/**
|
|
6050
|
+
* Get effective permissions for multiple resources in a single query (BATCH)
|
|
6051
|
+
* Returns a map of resourceId → effectivePermissionBits
|
|
6052
|
+
*
|
|
6053
|
+
* @param principalsList - List of principals (user + groups + public)
|
|
6054
|
+
* @param resourceType - The type of resource ('MCPSERVER', 'AGENT', etc.)
|
|
6055
|
+
* @param resourceIds - Array of resource IDs to check
|
|
6056
|
+
* @returns {Promise<Map<string, number>>} Map of resourceId → permission bits
|
|
6057
|
+
*
|
|
6058
|
+
* @example
|
|
6059
|
+
* const principals = await getUserPrincipals({ userId, role });
|
|
6060
|
+
* const serverIds = [id1, id2, id3];
|
|
6061
|
+
* const permMap = await getEffectivePermissionsForResources(
|
|
6062
|
+
* principals,
|
|
6063
|
+
* ResourceType.MCPSERVER,
|
|
6064
|
+
* serverIds
|
|
6065
|
+
* );
|
|
6066
|
+
* // permMap.get(id1.toString()) → 7 (VIEW|EDIT|DELETE)
|
|
6067
|
+
*/
|
|
6068
|
+
async function getEffectivePermissionsForResources(principalsList, resourceType, resourceIds) {
|
|
6069
|
+
if (!Array.isArray(resourceIds) || resourceIds.length === 0) {
|
|
6070
|
+
return new Map();
|
|
6071
|
+
}
|
|
6072
|
+
const AclEntry = mongoose.models.AclEntry;
|
|
6073
|
+
const principalsQuery = principalsList.map((p) => ({
|
|
6074
|
+
principalType: p.principalType,
|
|
6075
|
+
...(p.principalType !== PrincipalType.PUBLIC && { principalId: p.principalId }),
|
|
6076
|
+
}));
|
|
6077
|
+
// Batch query for all resources at once
|
|
6078
|
+
const aclEntries = await AclEntry.find({
|
|
6079
|
+
$or: principalsQuery,
|
|
6080
|
+
resourceType,
|
|
6081
|
+
resourceId: { $in: resourceIds },
|
|
6082
|
+
}).lean();
|
|
6083
|
+
// Compute effective permissions per resource
|
|
6084
|
+
const permissionsMap = new Map();
|
|
6085
|
+
for (const entry of aclEntries) {
|
|
6086
|
+
const rid = entry.resourceId.toString();
|
|
6087
|
+
const currentBits = permissionsMap.get(rid) || 0;
|
|
6088
|
+
permissionsMap.set(rid, currentBits | entry.permBits);
|
|
6089
|
+
}
|
|
6090
|
+
return permissionsMap;
|
|
6091
|
+
}
|
|
5065
6092
|
/**
|
|
5066
6093
|
* Grant permission to a principal for a resource
|
|
5067
6094
|
* @param principalType - The type of principal ('user', 'group', 'public')
|
|
@@ -5202,6 +6229,7 @@ function createAclEntryMethods(mongoose) {
|
|
|
5202
6229
|
findEntriesByPrincipalsAndResource,
|
|
5203
6230
|
hasPermission,
|
|
5204
6231
|
getEffectivePermissions,
|
|
6232
|
+
getEffectivePermissionsForResources,
|
|
5205
6233
|
grantPermission,
|
|
5206
6234
|
revokePermission,
|
|
5207
6235
|
modifyPermissionBits,
|
|
@@ -5677,6 +6705,7 @@ function createShareMethods(mongoose) {
|
|
|
5677
6705
|
|
|
5678
6706
|
/**
|
|
5679
6707
|
* Creates all database methods for all collections
|
|
6708
|
+
* @param mongoose - Mongoose instance
|
|
5680
6709
|
*/
|
|
5681
6710
|
function createMethods(mongoose) {
|
|
5682
6711
|
return {
|
|
@@ -5684,8 +6713,11 @@ function createMethods(mongoose) {
|
|
|
5684
6713
|
...createSessionMethods(mongoose),
|
|
5685
6714
|
...createTokenMethods(mongoose),
|
|
5686
6715
|
...createRoleMethods(mongoose),
|
|
6716
|
+
...createKeyMethods(mongoose),
|
|
6717
|
+
...createFileMethods(mongoose),
|
|
5687
6718
|
...createMemoryMethods(mongoose),
|
|
5688
6719
|
...createAgentCategoryMethods(mongoose),
|
|
6720
|
+
...createMCPServerMethods(mongoose),
|
|
5689
6721
|
...createAccessRoleMethods(mongoose),
|
|
5690
6722
|
...createUserGroupMethods(mongoose),
|
|
5691
6723
|
...createAclEntryMethods(mongoose),
|
|
@@ -5694,5 +6726,5 @@ function createMethods(mongoose) {
|
|
|
5694
6726
|
};
|
|
5695
6727
|
}
|
|
5696
6728
|
|
|
5697
|
-
export { AppService, RoleBits, Action as actionSchema, agentCategorySchema, agentSchema, agentsConfigSetup, assistantSchema, balanceSchema, bannerSchema, categoriesSchema, conversationTag as conversationTagSchema, convoSchema, createMethods, createModels, file as fileSchema, getTransactionSupport, getWebSearchKeys, groupSchema, hashToken, keySchema, loadDefaultInterface, loadTurnstileConfig, loadWebSearchConfig, logger$1 as logger, logger as meiliLogger, MemoryEntrySchema as memorySchema, messageSchema, pluginAuthSchema, presetSchema, processModelSpecs, projectSchema, promptGroupSchema, promptSchema, roleSchema, sessionSchema, shareSchema, signPayload, supportsTransactions, tokenSchema, toolCallSchema, transactionSchema, userSchema, webSearchAuth, webSearchKeys };
|
|
6729
|
+
export { AppService, DEFAULT_REFRESH_TOKEN_EXPIRY, DEFAULT_SESSION_EXPIRY, RoleBits, Action as actionSchema, agentCategorySchema, agentSchema, agentsConfigSetup, assistantSchema, balanceSchema, bannerSchema, categoriesSchema, conversationTag as conversationTagSchema, convoSchema, createMethods, createModels, decrypt, decryptV2, decryptV3, defaultVertexModels, encrypt, encryptV2, encryptV3, file as fileSchema, getRandomValues, getTransactionSupport, getWebSearchKeys, groupSchema, hashBackupCode, hashToken, keySchema, loadDefaultInterface, loadTurnstileConfig, loadWebSearchConfig, logger$1 as logger, logger as meiliLogger, MemoryEntrySchema as memorySchema, messageSchema, pluginAuthSchema, presetSchema, processModelSpecs, projectSchema, promptGroupSchema, promptSchema, roleSchema, sessionSchema, shareSchema, signPayload, supportsTransactions, tokenSchema, toolCallSchema, transactionSchema, userSchema, validateVertexConfig, vertexConfigSetup, webSearchAuth, webSearchKeys };
|
|
5698
6730
|
//# sourceMappingURL=index.es.js.map
|