@librechat/data-schemas 0.0.33 → 0.0.35

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -2246,9 +2246,10 @@ const rolePermissionsSchema = new mongoose.Schema({
2246
2246
  [librechatDataProvider.Permissions.USE]: { type: Boolean },
2247
2247
  },
2248
2248
  [librechatDataProvider.PermissionTypes.PROMPTS]: {
2249
- [librechatDataProvider.Permissions.SHARED_GLOBAL]: { type: Boolean },
2250
2249
  [librechatDataProvider.Permissions.USE]: { type: Boolean },
2251
2250
  [librechatDataProvider.Permissions.CREATE]: { type: Boolean },
2251
+ [librechatDataProvider.Permissions.SHARE]: { type: Boolean },
2252
+ [librechatDataProvider.Permissions.SHARE_PUBLIC]: { type: Boolean },
2252
2253
  },
2253
2254
  [librechatDataProvider.PermissionTypes.MEMORIES]: {
2254
2255
  [librechatDataProvider.Permissions.USE]: { type: Boolean },
@@ -2258,9 +2259,10 @@ const rolePermissionsSchema = new mongoose.Schema({
2258
2259
  [librechatDataProvider.Permissions.OPT_OUT]: { type: Boolean },
2259
2260
  },
2260
2261
  [librechatDataProvider.PermissionTypes.AGENTS]: {
2261
- [librechatDataProvider.Permissions.SHARED_GLOBAL]: { type: Boolean },
2262
2262
  [librechatDataProvider.Permissions.USE]: { type: Boolean },
2263
2263
  [librechatDataProvider.Permissions.CREATE]: { type: Boolean },
2264
+ [librechatDataProvider.Permissions.SHARE]: { type: Boolean },
2265
+ [librechatDataProvider.Permissions.SHARE_PUBLIC]: { type: Boolean },
2264
2266
  },
2265
2267
  [librechatDataProvider.PermissionTypes.MULTI_CONVO]: {
2266
2268
  [librechatDataProvider.Permissions.USE]: { type: Boolean },
@@ -2292,6 +2294,7 @@ const rolePermissionsSchema = new mongoose.Schema({
2292
2294
  [librechatDataProvider.Permissions.USE]: { type: Boolean },
2293
2295
  [librechatDataProvider.Permissions.CREATE]: { type: Boolean },
2294
2296
  [librechatDataProvider.Permissions.SHARE]: { type: Boolean },
2297
+ [librechatDataProvider.Permissions.SHARE_PUBLIC]: { type: Boolean },
2295
2298
  },
2296
2299
  }, { _id: false });
2297
2300
  const roleSchema = new mongoose.Schema({
@@ -2857,8 +2860,8 @@ const createMeiliMongooseModel = ({ index, attributesToIndex, syncOptions, }) =>
2857
2860
  * Get the current sync progress
2858
2861
  */
2859
2862
  static async getSyncProgress() {
2860
- const totalDocuments = await this.countDocuments();
2861
- const indexedDocuments = await this.countDocuments({ _meiliIndex: true });
2863
+ const totalDocuments = await this.countDocuments({ expiredAt: null });
2864
+ const indexedDocuments = await this.countDocuments({ expiredAt: null, _meiliIndex: true });
2862
2865
  return {
2863
2866
  totalProcessed: indexedDocuments,
2864
2867
  totalDocuments,
@@ -2866,93 +2869,84 @@ const createMeiliMongooseModel = ({ index, attributesToIndex, syncOptions, }) =>
2866
2869
  };
2867
2870
  }
2868
2871
  /**
2869
- * Synchronizes the data between the MongoDB collection and the MeiliSearch index.
2870
- * Now uses streaming and batching to reduce memory usage.
2871
- */
2872
- static async syncWithMeili(options) {
2872
+ * Synchronizes data between the MongoDB collection and the MeiliSearch index by
2873
+ * incrementally indexing only documents where `expiredAt` is `null` and `_meiliIndex` is `false`
2874
+ * (i.e., non-expired documents that have not yet been indexed).
2875
+ * */
2876
+ static async syncWithMeili() {
2877
+ const startTime = Date.now();
2878
+ const { batchSize, delayMs } = syncConfig;
2879
+ const collectionName = primaryKey === 'messageId' ? 'messages' : 'conversations';
2880
+ logger.info(`[syncWithMeili] Starting sync for ${collectionName} with batch size ${batchSize}`);
2881
+ // Get approximate total count for raw estimation, the sync should not overcome this number
2882
+ const approxTotalCount = await this.estimatedDocumentCount();
2883
+ logger.info(`[syncWithMeili] Approximate total number of all ${collectionName}: ${approxTotalCount}`);
2873
2884
  try {
2874
- const startTime = Date.now();
2875
- const { batchSize, delayMs } = syncConfig;
2876
- logger.info(`[syncWithMeili] Starting sync for ${primaryKey === 'messageId' ? 'messages' : 'conversations'} with batch size ${batchSize}`);
2877
- // Build query with resume capability
2878
- // Do not sync TTL documents
2879
- const query = { expiredAt: null };
2880
- if (options === null || options === void 0 ? void 0 : options.resumeFromId) {
2881
- query._id = { $gt: options.resumeFromId };
2882
- }
2883
- // Get total count for progress tracking
2884
- const totalCount = await this.countDocuments(query);
2885
- let processedCount = 0;
2886
2885
  // First, handle documents that need to be removed from Meili
2886
+ logger.info(`[syncWithMeili] Starting cleanup of Meili index ${index.uid} before sync`);
2887
2887
  await this.cleanupMeiliIndex(index, primaryKey, batchSize, delayMs);
2888
- // Process MongoDB documents in batches using cursor
2889
- const cursor = this.find(query)
2890
- .select(attributesToIndex.join(' ') + ' _meiliIndex')
2891
- .sort({ _id: 1 })
2892
- .batchSize(batchSize)
2893
- .cursor();
2894
- const format = (doc) => _.omitBy(_.pick(doc, attributesToIndex), (v, k) => k.startsWith('$'));
2895
- let documentBatch = [];
2896
- let updateOps = [];
2897
- // Process documents in streaming fashion
2898
- for await (const doc of cursor) {
2899
- const typedDoc = doc.toObject();
2900
- const formatted = format(typedDoc);
2901
- // Check if document needs indexing
2902
- if (!typedDoc._meiliIndex) {
2903
- documentBatch.push(formatted);
2904
- updateOps.push({
2905
- updateOne: {
2906
- filter: { _id: typedDoc._id },
2907
- update: { $set: { _meiliIndex: true } },
2908
- },
2909
- });
2888
+ logger.info(`[syncWithMeili] Completed cleanup of Meili index: ${index.uid}`);
2889
+ }
2890
+ catch (error) {
2891
+ logger.error('[syncWithMeili] Error during cleanup Meili before sync:', error);
2892
+ throw error;
2893
+ }
2894
+ let processedCount = 0;
2895
+ let hasMore = true;
2896
+ while (hasMore) {
2897
+ const query = {
2898
+ expiredAt: null,
2899
+ _meiliIndex: false,
2900
+ };
2901
+ try {
2902
+ const documents = await this.find(query)
2903
+ .select(attributesToIndex.join(' ') + ' _meiliIndex')
2904
+ .limit(batchSize)
2905
+ .lean();
2906
+ // Check if there are more documents to process
2907
+ if (documents.length === 0) {
2908
+ logger.info('[syncWithMeili] No more documents to process');
2909
+ break;
2910
2910
  }
2911
- processedCount++;
2912
- // Process batch when it reaches the configured size
2913
- if (documentBatch.length >= batchSize) {
2914
- await this.processSyncBatch(index, documentBatch, updateOps);
2915
- documentBatch = [];
2916
- updateOps = [];
2917
- // Log progress
2918
- const progress = Math.round((processedCount / totalCount) * 100);
2919
- logger.info(`[syncWithMeili] Progress: ${progress}% (${processedCount}/${totalCount})`);
2920
- // Add delay to prevent overwhelming resources
2921
- if (delayMs > 0) {
2922
- await new Promise((resolve) => setTimeout(resolve, delayMs));
2923
- }
2911
+ // Process the batch
2912
+ await this.processSyncBatch(index, documents);
2913
+ processedCount += documents.length;
2914
+ logger.info(`[syncWithMeili] Processed: ${processedCount}`);
2915
+ if (documents.length < batchSize) {
2916
+ hasMore = false;
2917
+ }
2918
+ // Add delay to prevent overwhelming resources
2919
+ if (hasMore && delayMs > 0) {
2920
+ await new Promise((resolve) => setTimeout(resolve, delayMs));
2924
2921
  }
2925
2922
  }
2926
- // Process remaining documents
2927
- if (documentBatch.length > 0) {
2928
- await this.processSyncBatch(index, documentBatch, updateOps);
2923
+ catch (error) {
2924
+ logger.error('[syncWithMeili] Error processing documents batch:', error);
2925
+ throw error;
2929
2926
  }
2930
- const duration = Date.now() - startTime;
2931
- logger.info(`[syncWithMeili] Completed sync for ${primaryKey === 'messageId' ? 'messages' : 'conversations'} in ${duration}ms`);
2932
- }
2933
- catch (error) {
2934
- logger.error('[syncWithMeili] Error during sync:', error);
2935
- throw error;
2936
2927
  }
2928
+ const duration = Date.now() - startTime;
2929
+ logger.info(`[syncWithMeili] Completed sync for ${collectionName}. Processed ${processedCount} documents in ${duration}ms`);
2937
2930
  }
2938
2931
  /**
2939
2932
  * Process a batch of documents for syncing
2940
2933
  */
2941
- static async processSyncBatch(index, documents, updateOps) {
2934
+ static async processSyncBatch(index, documents) {
2942
2935
  if (documents.length === 0) {
2943
2936
  return;
2944
2937
  }
2938
+ // Format documents for MeiliSearch
2939
+ const formattedDocs = documents.map((doc) => _.omitBy(_.pick(doc, attributesToIndex), (_v, k) => k.startsWith('$')));
2945
2940
  try {
2946
2941
  // Add documents to MeiliSearch
2947
- await index.addDocuments(documents);
2942
+ await index.addDocumentsInBatches(formattedDocs);
2948
2943
  // Update MongoDB to mark documents as indexed
2949
- if (updateOps.length > 0) {
2950
- await this.collection.bulkWrite(updateOps);
2951
- }
2944
+ const docsIds = documents.map((doc) => doc._id);
2945
+ await this.updateMany({ _id: { $in: docsIds } }, { $set: { _meiliIndex: true } });
2952
2946
  }
2953
2947
  catch (error) {
2954
2948
  logger.error('[processSyncBatch] Error processing batch:', error);
2955
- // Don't throw - allow sync to continue with other documents
2949
+ throw error;
2956
2950
  }
2957
2951
  }
2958
2952
  /**
@@ -2977,10 +2971,14 @@ const createMeiliMongooseModel = ({ index, attributesToIndex, syncOptions, }) =>
2977
2971
  // Delete documents that don't exist in MongoDB
2978
2972
  const toDelete = meiliIds.filter((id) => !existingIds.has(id));
2979
2973
  if (toDelete.length > 0) {
2980
- await Promise.all(toDelete.map((id) => index.deleteDocument(id)));
2974
+ await index.deleteDocuments(toDelete.map(String));
2981
2975
  logger.debug(`[cleanupMeiliIndex] Deleted ${toDelete.length} orphaned documents`);
2982
2976
  }
2983
- offset += batchSize;
2977
+ // if fetch documents request returns less documents than limit, all documents are processed
2978
+ if (batch.results.length < batchSize) {
2979
+ break;
2980
+ }
2981
+ offset += batchSize - toDelete.length;
2984
2982
  // Add delay between batches
2985
2983
  if (delayMs > 0) {
2986
2984
  await new Promise((resolve) => setTimeout(resolve, delayMs));
@@ -5078,10 +5076,10 @@ function createMCPServerMethods(mongoose) {
5078
5076
  }
5079
5077
  /**
5080
5078
  * Find an MCP server by serverName
5081
- * @param serverName - The MCP server ID
5079
+ * @param serverName - The unique server name identifier
5082
5080
  * @returns The MCP server document or null
5083
5081
  */
5084
- async function findMCPServerById(serverName) {
5082
+ async function findMCPServerByServerName(serverName) {
5085
5083
  const MCPServer = mongoose.models.MCPServer;
5086
5084
  return await MCPServer.findOne({ serverName }).lean();
5087
5085
  }
@@ -5210,7 +5208,7 @@ function createMCPServerMethods(mongoose) {
5210
5208
  }
5211
5209
  return {
5212
5210
  createMCPServer,
5213
- findMCPServerById,
5211
+ findMCPServerByServerName,
5214
5212
  findMCPServerByObjectId,
5215
5213
  findMCPServersByAuthor,
5216
5214
  getListMCPServersByIds,