@mastra/mongodb 0.0.0-working-memory-per-user-20250620163010 → 0.0.0-zod-v4-compat-part-2-20250820135355

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. package/CHANGELOG.md +251 -3
  2. package/LICENSE.md +11 -42
  3. package/dist/index.cjs +1846 -505
  4. package/dist/index.cjs.map +1 -0
  5. package/dist/index.d.ts +5 -7
  6. package/dist/index.d.ts.map +1 -0
  7. package/dist/index.js +1815 -474
  8. package/dist/index.js.map +1 -0
  9. package/dist/storage/MongoDBConnector.d.ts +23 -0
  10. package/dist/storage/MongoDBConnector.d.ts.map +1 -0
  11. package/dist/storage/connectors/MongoDBConnector.d.ts +23 -0
  12. package/dist/storage/connectors/MongoDBConnector.d.ts.map +1 -0
  13. package/dist/storage/connectors/base.d.ts +6 -0
  14. package/dist/storage/connectors/base.d.ts.map +1 -0
  15. package/dist/storage/domains/legacy-evals/index.d.ts +18 -0
  16. package/dist/storage/domains/legacy-evals/index.d.ts.map +1 -0
  17. package/dist/storage/domains/memory/index.d.ts +80 -0
  18. package/dist/storage/domains/memory/index.d.ts.map +1 -0
  19. package/dist/storage/domains/operations/index.d.ts +38 -0
  20. package/dist/storage/domains/operations/index.d.ts.map +1 -0
  21. package/dist/storage/domains/scores/index.d.ts +41 -0
  22. package/dist/storage/domains/scores/index.d.ts.map +1 -0
  23. package/dist/storage/domains/traces/index.d.ts +18 -0
  24. package/dist/storage/domains/traces/index.d.ts.map +1 -0
  25. package/dist/storage/domains/utils.d.ts +8 -0
  26. package/dist/storage/domains/utils.d.ts.map +1 -0
  27. package/dist/storage/domains/workflows/index.d.ts +33 -0
  28. package/dist/storage/domains/workflows/index.d.ts.map +1 -0
  29. package/dist/storage/index.d.ts +178 -0
  30. package/dist/storage/index.d.ts.map +1 -0
  31. package/dist/storage/types.d.ts +11 -0
  32. package/dist/storage/types.d.ts.map +1 -0
  33. package/dist/vector/filter.d.ts +21 -0
  34. package/dist/vector/filter.d.ts.map +1 -0
  35. package/dist/vector/index.d.ts +78 -0
  36. package/dist/vector/index.d.ts.map +1 -0
  37. package/dist/vector/prompt.d.ts +6 -0
  38. package/dist/vector/prompt.d.ts.map +1 -0
  39. package/docker-compose.yaml +1 -1
  40. package/package.json +10 -10
  41. package/src/index.ts +1 -0
  42. package/src/storage/MongoDBConnector.ts +93 -0
  43. package/src/storage/connectors/MongoDBConnector.ts +93 -0
  44. package/src/storage/connectors/base.ts +7 -0
  45. package/src/storage/domains/legacy-evals/index.ts +193 -0
  46. package/src/storage/domains/memory/index.ts +741 -0
  47. package/src/storage/domains/operations/index.ts +155 -0
  48. package/src/storage/domains/scores/index.ts +379 -0
  49. package/src/storage/domains/traces/index.ts +142 -0
  50. package/src/storage/domains/utils.ts +43 -0
  51. package/src/storage/domains/workflows/index.ts +196 -0
  52. package/src/storage/index.test.ts +27 -989
  53. package/src/storage/index.ts +241 -605
  54. package/src/storage/types.ts +14 -0
  55. package/src/vector/filter.test.ts +40 -30
  56. package/src/vector/filter.ts +25 -4
  57. package/src/vector/index.test.ts +48 -3
  58. package/src/vector/index.ts +301 -131
  59. package/tsconfig.build.json +9 -0
  60. package/tsconfig.json +1 -1
  61. package/tsup.config.ts +22 -0
  62. package/dist/_tsup-dts-rollup.d.cts +0 -274
  63. package/dist/_tsup-dts-rollup.d.ts +0 -274
  64. package/dist/index.d.cts +0 -7
package/dist/index.cjs CHANGED
@@ -1,11 +1,12 @@
1
1
  'use strict';
2
2
 
3
+ var error = require('@mastra/core/error');
3
4
  var vector = require('@mastra/core/vector');
4
5
  var mongodb = require('mongodb');
5
6
  var uuid = require('uuid');
6
7
  var filter = require('@mastra/core/vector/filter');
7
- var agent = require('@mastra/core/agent');
8
8
  var storage = require('@mastra/core/storage');
9
+ var agent = require('@mastra/core/agent');
9
10
 
10
11
  // src/vector/index.ts
11
12
  var MongoDBFilterTranslator = class extends filter.BaseFilterTranslator {
@@ -118,28 +119,68 @@ var MongoDBVector = class extends vector.MastraVector {
118
119
  }
119
120
  // Public methods
120
121
  async connect() {
121
- await this.client.connect();
122
+ try {
123
+ await this.client.connect();
124
+ } catch (error$1) {
125
+ throw new error.MastraError(
126
+ {
127
+ id: "STORAGE_MONGODB_VECTOR_CONNECT_FAILED",
128
+ domain: error.ErrorDomain.STORAGE,
129
+ category: error.ErrorCategory.THIRD_PARTY
130
+ },
131
+ error$1
132
+ );
133
+ }
122
134
  }
123
135
  async disconnect() {
124
- await this.client.close();
136
+ try {
137
+ await this.client.close();
138
+ } catch (error$1) {
139
+ throw new error.MastraError(
140
+ {
141
+ id: "STORAGE_MONGODB_VECTOR_DISCONNECT_FAILED",
142
+ domain: error.ErrorDomain.STORAGE,
143
+ category: error.ErrorCategory.THIRD_PARTY
144
+ },
145
+ error$1
146
+ );
147
+ }
125
148
  }
126
149
  async createIndex({ indexName, dimension, metric = "cosine" }) {
127
- if (!Number.isInteger(dimension) || dimension <= 0) {
128
- throw new Error("Dimension must be a positive integer");
129
- }
130
- const mongoMetric = this.mongoMetricMap[metric];
131
- if (!mongoMetric) {
132
- throw new Error(`Invalid metric: "${metric}". Must be one of: cosine, euclidean, dotproduct`);
133
- }
134
- const collectionExists = await this.db.listCollections({ name: indexName }).hasNext();
135
- if (!collectionExists) {
136
- await this.db.createCollection(indexName);
150
+ let mongoMetric;
151
+ try {
152
+ if (!Number.isInteger(dimension) || dimension <= 0) {
153
+ throw new Error("Dimension must be a positive integer");
154
+ }
155
+ mongoMetric = this.mongoMetricMap[metric];
156
+ if (!mongoMetric) {
157
+ throw new Error(`Invalid metric: "${metric}". Must be one of: cosine, euclidean, dotproduct`);
158
+ }
159
+ } catch (error$1) {
160
+ throw new error.MastraError(
161
+ {
162
+ id: "STORAGE_MONGODB_VECTOR_CREATE_INDEX_INVALID_ARGS",
163
+ domain: error.ErrorDomain.STORAGE,
164
+ category: error.ErrorCategory.USER,
165
+ details: {
166
+ indexName,
167
+ dimension,
168
+ metric
169
+ }
170
+ },
171
+ error$1
172
+ );
137
173
  }
138
- const collection = await this.getCollection(indexName);
139
- const indexNameInternal = `${indexName}_vector_index`;
140
- const embeddingField = this.embeddingFieldName;
141
- const numDimensions = dimension;
174
+ let collection;
142
175
  try {
176
+ const collectionExists = await this.db.listCollections({ name: indexName }).hasNext();
177
+ if (!collectionExists) {
178
+ await this.db.createCollection(indexName);
179
+ }
180
+ collection = await this.getCollection(indexName);
181
+ const indexNameInternal = `${indexName}_vector_index`;
182
+ const embeddingField = this.embeddingFieldName;
183
+ const numDimensions = dimension;
143
184
  await collection.createSearchIndex({
144
185
  definition: {
145
186
  fields: [
@@ -148,18 +189,52 @@ var MongoDBVector = class extends vector.MastraVector {
148
189
  path: embeddingField,
149
190
  numDimensions,
150
191
  similarity: mongoMetric
192
+ },
193
+ {
194
+ type: "filter",
195
+ path: "_id"
151
196
  }
152
197
  ]
153
198
  },
154
199
  name: indexNameInternal,
155
200
  type: "vectorSearch"
156
201
  });
157
- } catch (error) {
158
- if (error.codeName !== "IndexAlreadyExists") {
159
- throw error;
202
+ await collection.createSearchIndex({
203
+ definition: {
204
+ mappings: {
205
+ dynamic: true
206
+ }
207
+ },
208
+ name: `${indexName}_search_index`,
209
+ type: "search"
210
+ });
211
+ } catch (error$1) {
212
+ if (error$1.codeName !== "IndexAlreadyExists") {
213
+ throw new error.MastraError(
214
+ {
215
+ id: "STORAGE_MONGODB_VECTOR_CREATE_INDEX_FAILED",
216
+ domain: error.ErrorDomain.STORAGE,
217
+ category: error.ErrorCategory.THIRD_PARTY
218
+ },
219
+ error$1
220
+ );
160
221
  }
161
222
  }
162
- await collection.updateOne({ _id: "__index_metadata__" }, { $set: { dimension, metric } }, { upsert: true });
223
+ try {
224
+ await collection?.updateOne({ _id: "__index_metadata__" }, { $set: { dimension, metric } }, { upsert: true });
225
+ } catch (error$1) {
226
+ throw new error.MastraError(
227
+ {
228
+ id: "STORAGE_MONGODB_VECTOR_CREATE_INDEX_FAILED_STORE_METADATA",
229
+ domain: error.ErrorDomain.STORAGE,
230
+ category: error.ErrorCategory.THIRD_PARTY,
231
+ details: {
232
+ indexName
233
+ }
234
+ },
235
+ error$1
236
+ );
237
+ }
163
238
  }
164
239
  /**
165
240
  * Waits for the index to be ready.
@@ -189,40 +264,54 @@ var MongoDBVector = class extends vector.MastraVector {
189
264
  throw new Error(`Index "${indexNameInternal}" did not become ready within timeout`);
190
265
  }
191
266
  async upsert({ indexName, vectors, metadata, ids, documents }) {
192
- const collection = await this.getCollection(indexName);
193
- this.collectionForValidation = collection;
194
- const stats = await this.describeIndex({ indexName });
195
- await this.validateVectorDimensions(vectors, stats.dimension);
196
- const generatedIds = ids || vectors.map(() => uuid.v4());
197
- const operations = vectors.map((vector, idx) => {
198
- const id = generatedIds[idx];
199
- const meta = metadata?.[idx] || {};
200
- const doc = documents?.[idx];
201
- const normalizedMeta = Object.keys(meta).reduce(
202
- (acc, key) => {
203
- acc[key] = meta[key] instanceof Date ? meta[key].toISOString() : meta[key];
204
- return acc;
267
+ try {
268
+ const collection = await this.getCollection(indexName);
269
+ this.collectionForValidation = collection;
270
+ const stats = await this.describeIndex({ indexName });
271
+ await this.validateVectorDimensions(vectors, stats.dimension);
272
+ const generatedIds = ids || vectors.map(() => uuid.v4());
273
+ const operations = vectors.map((vector, idx) => {
274
+ const id = generatedIds[idx];
275
+ const meta = metadata?.[idx] || {};
276
+ const doc = documents?.[idx];
277
+ const normalizedMeta = Object.keys(meta).reduce(
278
+ (acc, key) => {
279
+ acc[key] = meta[key] instanceof Date ? meta[key].toISOString() : meta[key];
280
+ return acc;
281
+ },
282
+ {}
283
+ );
284
+ const updateDoc = {
285
+ [this.embeddingFieldName]: vector,
286
+ [this.metadataFieldName]: normalizedMeta
287
+ };
288
+ if (doc !== void 0) {
289
+ updateDoc[this.documentFieldName] = doc;
290
+ }
291
+ return {
292
+ updateOne: {
293
+ filter: { _id: id },
294
+ // '_id' is a string as per MongoDBDocument interface
295
+ update: { $set: updateDoc },
296
+ upsert: true
297
+ }
298
+ };
299
+ });
300
+ await collection.bulkWrite(operations);
301
+ return generatedIds;
302
+ } catch (error$1) {
303
+ throw new error.MastraError(
304
+ {
305
+ id: "STORAGE_MONGODB_VECTOR_UPSERT_FAILED",
306
+ domain: error.ErrorDomain.STORAGE,
307
+ category: error.ErrorCategory.THIRD_PARTY,
308
+ details: {
309
+ indexName
310
+ }
205
311
  },
206
- {}
312
+ error$1
207
313
  );
208
- const updateDoc = {
209
- [this.embeddingFieldName]: vector,
210
- [this.metadataFieldName]: normalizedMeta
211
- };
212
- if (doc !== void 0) {
213
- updateDoc[this.documentFieldName] = doc;
214
- }
215
- return {
216
- updateOne: {
217
- filter: { _id: id },
218
- // '_id' is a string as per MongoDBDocument interface
219
- update: { $set: updateDoc },
220
- upsert: true
221
- }
222
- };
223
- });
224
- await collection.bulkWrite(operations);
225
- return generatedIds;
314
+ }
226
315
  }
227
316
  async query({
228
317
  indexName,
@@ -232,44 +321,49 @@ var MongoDBVector = class extends vector.MastraVector {
232
321
  includeVector = false,
233
322
  documentFilter
234
323
  }) {
235
- const collection = await this.getCollection(indexName, true);
236
- const indexNameInternal = `${indexName}_vector_index`;
237
- const mongoFilter = this.transformFilter(filter);
238
- const documentMongoFilter = documentFilter ? { [this.documentFieldName]: documentFilter } : {};
239
- let combinedFilter = {};
240
- if (Object.keys(mongoFilter).length > 0 && Object.keys(documentMongoFilter).length > 0) {
241
- combinedFilter = { $and: [mongoFilter, documentMongoFilter] };
242
- } else if (Object.keys(mongoFilter).length > 0) {
243
- combinedFilter = mongoFilter;
244
- } else if (Object.keys(documentMongoFilter).length > 0) {
245
- combinedFilter = documentMongoFilter;
246
- }
247
- const pipeline = [
248
- {
249
- $vectorSearch: {
250
- index: indexNameInternal,
251
- queryVector,
252
- path: this.embeddingFieldName,
253
- numCandidates: 100,
254
- limit: topK
255
- }
256
- },
257
- // Apply the filter using $match stage
258
- ...Object.keys(combinedFilter).length > 0 ? [{ $match: combinedFilter }] : [],
259
- {
260
- $set: { score: { $meta: "vectorSearchScore" } }
261
- },
262
- {
263
- $project: {
264
- _id: 1,
265
- score: 1,
266
- metadata: `$${this.metadataFieldName}`,
267
- document: `$${this.documentFieldName}`,
268
- ...includeVector && { vector: `$${this.embeddingFieldName}` }
324
+ try {
325
+ const collection = await this.getCollection(indexName, true);
326
+ const indexNameInternal = `${indexName}_vector_index`;
327
+ const mongoFilter = this.transformFilter(filter);
328
+ const documentMongoFilter = documentFilter ? { [this.documentFieldName]: documentFilter } : {};
329
+ let combinedFilter = {};
330
+ if (Object.keys(mongoFilter).length > 0 && Object.keys(documentMongoFilter).length > 0) {
331
+ combinedFilter = { $and: [mongoFilter, documentMongoFilter] };
332
+ } else if (Object.keys(mongoFilter).length > 0) {
333
+ combinedFilter = mongoFilter;
334
+ } else if (Object.keys(documentMongoFilter).length > 0) {
335
+ combinedFilter = documentMongoFilter;
336
+ }
337
+ const vectorSearch = {
338
+ index: indexNameInternal,
339
+ queryVector,
340
+ path: this.embeddingFieldName,
341
+ numCandidates: 100,
342
+ limit: topK
343
+ };
344
+ if (Object.keys(combinedFilter).length > 0) {
345
+ const candidateIds = await collection.aggregate([{ $match: combinedFilter }, { $project: { _id: 1 } }]).map((doc) => doc._id).toArray();
346
+ if (candidateIds.length > 0) {
347
+ vectorSearch.filter = { _id: { $in: candidateIds } };
269
348
  }
270
349
  }
271
- ];
272
- try {
350
+ const pipeline = [
351
+ {
352
+ $vectorSearch: vectorSearch
353
+ },
354
+ {
355
+ $set: { score: { $meta: "vectorSearchScore" } }
356
+ },
357
+ {
358
+ $project: {
359
+ _id: 1,
360
+ score: 1,
361
+ metadata: `$${this.metadataFieldName}`,
362
+ document: `$${this.documentFieldName}`,
363
+ ...includeVector && { vector: `$${this.embeddingFieldName}` }
364
+ }
365
+ }
366
+ ];
273
367
  const results = await collection.aggregate(pipeline).toArray();
274
368
  return results.map((result) => ({
275
369
  id: result._id,
@@ -278,14 +372,34 @@ var MongoDBVector = class extends vector.MastraVector {
278
372
  vector: includeVector ? result.vector : void 0,
279
373
  document: result.document
280
374
  }));
281
- } catch (error) {
282
- console.error("Error during vector search:", error);
283
- throw error;
375
+ } catch (error$1) {
376
+ throw new error.MastraError(
377
+ {
378
+ id: "STORAGE_MONGODB_VECTOR_QUERY_FAILED",
379
+ domain: error.ErrorDomain.STORAGE,
380
+ category: error.ErrorCategory.THIRD_PARTY,
381
+ details: {
382
+ indexName
383
+ }
384
+ },
385
+ error$1
386
+ );
284
387
  }
285
388
  }
286
389
  async listIndexes() {
287
- const collections = await this.db.listCollections().toArray();
288
- return collections.map((col) => col.name);
390
+ try {
391
+ const collections = await this.db.listCollections().toArray();
392
+ return collections.map((col) => col.name);
393
+ } catch (error$1) {
394
+ throw new error.MastraError(
395
+ {
396
+ id: "STORAGE_MONGODB_VECTOR_LIST_INDEXES_FAILED",
397
+ domain: error.ErrorDomain.STORAGE,
398
+ category: error.ErrorCategory.THIRD_PARTY
399
+ },
400
+ error$1
401
+ );
402
+ }
289
403
  }
290
404
  /**
291
405
  * Retrieves statistics about a vector index.
@@ -294,24 +408,52 @@ var MongoDBVector = class extends vector.MastraVector {
294
408
  * @returns A promise that resolves to the index statistics including dimension, count and metric
295
409
  */
296
410
  async describeIndex({ indexName }) {
297
- const collection = await this.getCollection(indexName, true);
298
- const count = await collection.countDocuments({ _id: { $ne: "__index_metadata__" } });
299
- const metadataDoc = await collection.findOne({ _id: "__index_metadata__" });
300
- const dimension = metadataDoc?.dimension || 0;
301
- const metric = metadataDoc?.metric || "cosine";
302
- return {
303
- dimension,
304
- count,
305
- metric
306
- };
411
+ try {
412
+ const collection = await this.getCollection(indexName, true);
413
+ const count = await collection.countDocuments({ _id: { $ne: "__index_metadata__" } });
414
+ const metadataDoc = await collection.findOne({ _id: "__index_metadata__" });
415
+ const dimension = metadataDoc?.dimension || 0;
416
+ const metric = metadataDoc?.metric || "cosine";
417
+ return {
418
+ dimension,
419
+ count,
420
+ metric
421
+ };
422
+ } catch (error$1) {
423
+ throw new error.MastraError(
424
+ {
425
+ id: "STORAGE_MONGODB_VECTOR_DESCRIBE_INDEX_FAILED",
426
+ domain: error.ErrorDomain.STORAGE,
427
+ category: error.ErrorCategory.THIRD_PARTY,
428
+ details: {
429
+ indexName
430
+ }
431
+ },
432
+ error$1
433
+ );
434
+ }
307
435
  }
308
436
  async deleteIndex({ indexName }) {
309
437
  const collection = await this.getCollection(indexName, false);
310
- if (collection) {
311
- await collection.drop();
312
- this.collections.delete(indexName);
313
- } else {
314
- throw new Error(`Index (Collection) "${indexName}" does not exist`);
438
+ try {
439
+ if (collection) {
440
+ await collection.drop();
441
+ this.collections.delete(indexName);
442
+ } else {
443
+ throw new Error(`Index (Collection) "${indexName}" does not exist`);
444
+ }
445
+ } catch (error$1) {
446
+ throw new error.MastraError(
447
+ {
448
+ id: "STORAGE_MONGODB_VECTOR_DELETE_INDEX_FAILED",
449
+ domain: error.ErrorDomain.STORAGE,
450
+ category: error.ErrorCategory.THIRD_PARTY,
451
+ details: {
452
+ indexName
453
+ }
454
+ },
455
+ error$1
456
+ );
315
457
  }
316
458
  }
317
459
  /**
@@ -347,8 +489,19 @@ var MongoDBVector = class extends vector.MastraVector {
347
489
  updateDoc[this.metadataFieldName] = normalizedMeta;
348
490
  }
349
491
  await collection.findOneAndUpdate({ _id: id }, { $set: updateDoc });
350
- } catch (error) {
351
- throw new Error(`Failed to update vector by id: ${id} for index name: ${indexName}: ${error.message}`);
492
+ } catch (error$1) {
493
+ throw new error.MastraError(
494
+ {
495
+ id: "STORAGE_MONGODB_VECTOR_UPDATE_VECTOR_FAILED",
496
+ domain: error.ErrorDomain.STORAGE,
497
+ category: error.ErrorCategory.THIRD_PARTY,
498
+ details: {
499
+ indexName,
500
+ id
501
+ }
502
+ },
503
+ error$1
504
+ );
352
505
  }
353
506
  }
354
507
  /**
@@ -362,8 +515,19 @@ var MongoDBVector = class extends vector.MastraVector {
362
515
  try {
363
516
  const collection = await this.getCollection(indexName, true);
364
517
  await collection.deleteOne({ _id: id });
365
- } catch (error) {
366
- throw new Error(`Failed to delete vector by id: ${id} for index name: ${indexName}: ${error.message}`);
518
+ } catch (error$1) {
519
+ throw new error.MastraError(
520
+ {
521
+ id: "STORAGE_MONGODB_VECTOR_DELETE_VECTOR_FAILED",
522
+ domain: error.ErrorDomain.STORAGE,
523
+ category: error.ErrorCategory.THIRD_PARTY,
524
+ details: {
525
+ indexName,
526
+ id
527
+ }
528
+ },
529
+ error$1
530
+ );
367
531
  }
368
532
  }
369
533
  // Private methods
@@ -404,21 +568,19 @@ var MongoDBVector = class extends vector.MastraVector {
404
568
  return translator.translate(filter);
405
569
  }
406
570
  };
407
- function safelyParseJSON(jsonString) {
408
- try {
409
- return JSON.parse(jsonString);
410
- } catch {
411
- return {};
412
- }
413
- }
414
- var MongoDBStore = class extends storage.MastraStorage {
415
- #isConnected = false;
571
+ var MongoDBConnector = class _MongoDBConnector {
416
572
  #client;
417
- #db;
418
573
  #dbName;
419
- constructor(config) {
420
- super({ name: "MongoDBStore" });
574
+ #handler;
575
+ #isConnected;
576
+ #db;
577
+ constructor(options) {
578
+ this.#client = options.client;
579
+ this.#dbName = options.dbName;
580
+ this.#handler = options.handler;
421
581
  this.#isConnected = false;
582
+ }
583
+ static fromDatabaseConfig(config) {
422
584
  if (!config.url?.trim().length) {
423
585
  throw new Error(
424
586
  "MongoDBStore: url must be provided and cannot be empty. Passing an empty string may cause fallback to local MongoDB defaults."
@@ -429,321 +591,1248 @@ var MongoDBStore = class extends storage.MastraStorage {
429
591
  "MongoDBStore: dbName must be provided and cannot be empty. Passing an empty string may cause fallback to local MongoDB defaults."
430
592
  );
431
593
  }
432
- this.#dbName = config.dbName;
433
- this.#client = new mongodb.MongoClient(config.url, config.options);
594
+ return new _MongoDBConnector({
595
+ client: new mongodb.MongoClient(config.url, config.options),
596
+ dbName: config.dbName,
597
+ handler: void 0
598
+ });
599
+ }
600
+ static fromConnectionHandler(handler) {
601
+ return new _MongoDBConnector({
602
+ client: void 0,
603
+ dbName: void 0,
604
+ handler
605
+ });
434
606
  }
435
607
  async getConnection() {
436
- if (this.#isConnected) {
608
+ if (this.#client) {
609
+ if (this.#isConnected && this.#db) {
610
+ return this.#db;
611
+ }
612
+ await this.#client.connect();
613
+ this.#db = this.#client.db(this.#dbName);
614
+ this.#isConnected = true;
437
615
  return this.#db;
438
616
  }
439
- await this.#client.connect();
440
- this.#db = this.#client.db(this.#dbName);
441
- this.#isConnected = true;
442
- return this.#db;
617
+ throw new Error("MongoDBStore: client cannot be empty. Check your MongoDBConnector configuration.");
443
618
  }
444
619
  async getCollection(collectionName) {
620
+ if (this.#handler) {
621
+ return this.#handler.getCollection(collectionName);
622
+ }
445
623
  const db = await this.getConnection();
446
624
  return db.collection(collectionName);
447
625
  }
448
- async createTable() {
449
- }
450
- /**
451
- * No-op: This backend is schemaless and does not require schema changes.
452
- * @param tableName Name of the table
453
- * @param schema Schema of the table
454
- * @param ifNotExists Array of column names to add if they don't exist
455
- */
456
- async alterTable(_args) {
457
- }
458
- async clearTable({ tableName }) {
459
- try {
460
- const collection = await this.getCollection(tableName);
461
- await collection.deleteMany({});
462
- } catch (error) {
463
- if (error instanceof Error) {
464
- this.logger.error(error.message);
465
- }
466
- }
467
- }
468
- async insert({ tableName, record }) {
469
- try {
470
- const collection = await this.getCollection(tableName);
471
- await collection.insertOne(record);
472
- } catch (error) {
473
- this.logger.error(`Error upserting into table ${tableName}: ${error}`);
474
- throw error;
475
- }
476
- }
477
- async batchInsert({ tableName, records }) {
478
- if (!records.length) {
626
+ async close() {
627
+ if (this.#client) {
628
+ await this.#client.close();
629
+ this.#isConnected = false;
479
630
  return;
480
631
  }
481
- try {
482
- const collection = await this.getCollection(tableName);
483
- await collection.insertMany(records);
484
- } catch (error) {
485
- this.logger.error(`Error upserting into table ${tableName}: ${error}`);
486
- throw error;
632
+ if (this.#handler) {
633
+ await this.#handler.close();
487
634
  }
488
635
  }
489
- async load({ tableName, keys }) {
490
- this.logger.info(`Loading ${tableName} with keys ${JSON.stringify(keys)}`);
636
+ };
637
+ function transformEvalRow(row) {
638
+ let testInfoValue = null;
639
+ if (row.test_info) {
491
640
  try {
492
- const collection = await this.getCollection(tableName);
493
- return await collection.find(keys).toArray();
494
- } catch (error) {
495
- this.logger.error(`Error loading ${tableName} with keys ${JSON.stringify(keys)}: ${error}`);
496
- throw error;
641
+ testInfoValue = typeof row.test_info === "string" ? storage.safelyParseJSON(row.test_info) : row.test_info;
642
+ } catch (e) {
643
+ console.warn("Failed to parse test_info:", e);
497
644
  }
498
645
  }
499
- async getThreadById({ threadId }) {
500
- try {
501
- const collection = await this.getCollection(storage.TABLE_THREADS);
502
- const result = await collection.findOne({ id: threadId });
503
- if (!result) {
504
- return null;
505
- }
506
- return {
507
- ...result,
508
- metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata
509
- };
510
- } catch (error) {
511
- this.logger.error(`Error loading thread with ID ${threadId}: ${error}`);
512
- throw error;
513
- }
646
+ let resultValue;
647
+ try {
648
+ resultValue = typeof row.result === "string" ? storage.safelyParseJSON(row.result) : row.result;
649
+ } catch (e) {
650
+ console.warn("Failed to parse result:", e);
651
+ throw new Error("Invalid result format");
514
652
  }
515
- async getThreadsByResourceId({ resourceId }) {
653
+ return {
654
+ agentName: row.agent_name,
655
+ input: row.input,
656
+ output: row.output,
657
+ result: resultValue,
658
+ metricName: row.metric_name,
659
+ instructions: row.instructions,
660
+ testInfo: testInfoValue,
661
+ globalRunId: row.global_run_id,
662
+ runId: row.run_id,
663
+ createdAt: row.createdAt
664
+ };
665
+ }
666
+ var LegacyEvalsMongoDB = class extends storage.LegacyEvalsStorage {
667
+ operations;
668
+ constructor({ operations }) {
669
+ super();
670
+ this.operations = operations;
671
+ }
672
+ /** @deprecated use getEvals instead */
673
+ async getEvalsByAgentName(agentName, type) {
516
674
  try {
517
- const collection = await this.getCollection(storage.TABLE_THREADS);
518
- const results = await collection.find({ resourceId }).toArray();
519
- if (!results.length) {
675
+ const query = {
676
+ agent_name: agentName
677
+ };
678
+ if (type === "test") {
679
+ query["test_info"] = { $ne: null };
680
+ }
681
+ if (type === "live") {
682
+ query["test_info"] = null;
683
+ }
684
+ const collection = await this.operations.getCollection(storage.TABLE_EVALS);
685
+ const documents = await collection.find(query).sort({ created_at: "desc" }).toArray();
686
+ const result = documents.map((row) => transformEvalRow(row));
687
+ return result.filter((row) => {
688
+ if (type === "live") {
689
+ return !Boolean(row.testInfo?.testPath);
690
+ }
691
+ if (type === "test") {
692
+ return row.testInfo?.testPath !== null;
693
+ }
694
+ return true;
695
+ });
696
+ } catch (error$1) {
697
+ if (error$1 instanceof Error && error$1.message.includes("no such table")) {
520
698
  return [];
521
699
  }
522
- return results.map((result) => ({
523
- ...result,
524
- metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata
525
- }));
526
- } catch (error) {
527
- this.logger.error(`Error loading threads by resourceId ${resourceId}: ${error}`);
528
- throw error;
529
- }
530
- }
531
- async saveThread({ thread }) {
532
- try {
533
- const collection = await this.getCollection(storage.TABLE_THREADS);
534
- await collection.updateOne(
535
- { id: thread.id },
700
+ throw new error.MastraError(
536
701
  {
537
- $set: {
538
- ...thread,
539
- metadata: JSON.stringify(thread.metadata)
540
- }
702
+ id: "STORAGE_MONGODB_STORE_GET_EVALS_BY_AGENT_NAME_FAILED",
703
+ domain: error.ErrorDomain.STORAGE,
704
+ category: error.ErrorCategory.THIRD_PARTY,
705
+ details: { agentName }
541
706
  },
542
- { upsert: true }
707
+ error$1
543
708
  );
544
- return thread;
545
- } catch (error) {
546
- this.logger.error(`Error saving thread ${thread.id}: ${error}`);
547
- throw error;
548
709
  }
549
710
  }
550
- async updateThread({
551
- id,
552
- title,
553
- metadata
554
- }) {
555
- const thread = await this.getThreadById({ threadId: id });
556
- if (!thread) {
557
- throw new Error(`Thread ${id} not found`);
711
+ async getEvals(options = {}) {
712
+ const { agentName, type, page = 0, perPage = 100, dateRange } = options;
713
+ const fromDate = dateRange?.start;
714
+ const toDate = dateRange?.end;
715
+ const currentOffset = page * perPage;
716
+ const query = {};
717
+ if (agentName) {
718
+ query["agent_name"] = agentName;
558
719
  }
559
- const updatedThread = {
560
- ...thread,
561
- title,
562
- metadata: {
563
- ...thread.metadata,
564
- ...metadata
720
+ if (type === "test") {
721
+ query["test_info"] = { $ne: null };
722
+ } else if (type === "live") {
723
+ query["test_info"] = null;
724
+ }
725
+ if (fromDate || toDate) {
726
+ query["createdAt"] = {};
727
+ if (fromDate) {
728
+ query["createdAt"]["$gte"] = fromDate;
565
729
  }
566
- };
730
+ if (toDate) {
731
+ query["createdAt"]["$lte"] = toDate;
732
+ }
733
+ }
567
734
  try {
568
- const collection = await this.getCollection(storage.TABLE_THREADS);
569
- await collection.updateOne(
570
- { id },
735
+ const collection = await this.operations.getCollection(storage.TABLE_EVALS);
736
+ let total = 0;
737
+ if (page === 0 || perPage < 1e3) {
738
+ total = await collection.countDocuments(query);
739
+ }
740
+ if (total === 0) {
741
+ return {
742
+ evals: [],
743
+ total: 0,
744
+ page,
745
+ perPage,
746
+ hasMore: false
747
+ };
748
+ }
749
+ const documents = await collection.find(query).sort({ created_at: "desc" }).skip(currentOffset).limit(perPage).toArray();
750
+ const evals = documents.map((row) => transformEvalRow(row));
751
+ const filteredEvals = evals.filter((row) => {
752
+ if (type === "live") {
753
+ return !Boolean(row.testInfo?.testPath);
754
+ }
755
+ if (type === "test") {
756
+ return row.testInfo?.testPath !== null;
757
+ }
758
+ return true;
759
+ });
760
+ const hasMore = currentOffset + filteredEvals.length < total;
761
+ return {
762
+ evals: filteredEvals,
763
+ total,
764
+ page,
765
+ perPage,
766
+ hasMore
767
+ };
768
+ } catch (error$1) {
769
+ throw new error.MastraError(
571
770
  {
572
- $set: {
573
- title,
574
- metadata: JSON.stringify(updatedThread.metadata)
771
+ id: "STORAGE_MONGODB_STORE_GET_EVALS_FAILED",
772
+ domain: error.ErrorDomain.STORAGE,
773
+ category: error.ErrorCategory.THIRD_PARTY,
774
+ details: {
775
+ agentName: agentName || "all",
776
+ type: type || "all",
777
+ page,
778
+ perPage
575
779
  }
576
- }
780
+ },
781
+ error$1
577
782
  );
578
- } catch (error) {
579
- this.logger.error(`Error updating thread ${id}:) ${error}`);
580
- throw error;
581
783
  }
582
- return updatedThread;
583
784
  }
584
- async deleteThread({ threadId }) {
785
+ };
786
+
787
+ // src/storage/domains/utils.ts
788
+ function formatDateForMongoDB(date) {
789
+ return typeof date === "string" ? new Date(date) : date;
790
+ }
791
+
792
+ // src/storage/domains/memory/index.ts
793
+ var MemoryStorageMongoDB = class extends storage.MemoryStorage {
794
+ operations;
795
+ constructor({ operations }) {
796
+ super();
797
+ this.operations = operations;
798
+ }
799
+ parseRow(row) {
800
+ let content = row.content;
801
+ if (typeof content === "string") {
802
+ try {
803
+ content = JSON.parse(content);
804
+ } catch {
805
+ }
806
+ }
807
+ const result = {
808
+ id: row.id,
809
+ content,
810
+ role: row.role,
811
+ createdAt: formatDateForMongoDB(row.createdAt),
812
+ threadId: row.thread_id,
813
+ resourceId: row.resourceId
814
+ };
815
+ if (row.type && row.type !== "v2") result.type = row.type;
816
+ return result;
817
+ }
818
+ async _getIncludedMessages({
819
+ threadId,
820
+ selectBy
821
+ }) {
822
+ const include = selectBy?.include;
823
+ if (!include) return null;
824
+ const collection = await this.operations.getCollection(storage.TABLE_MESSAGES);
825
+ const includedMessages = [];
826
+ for (const inc of include) {
827
+ const { id, withPreviousMessages = 0, withNextMessages = 0 } = inc;
828
+ const searchThreadId = inc.threadId || threadId;
829
+ const allMessages = await collection.find({ thread_id: searchThreadId }).sort({ createdAt: 1 }).toArray();
830
+ const targetIndex = allMessages.findIndex((msg) => msg.id === id);
831
+ if (targetIndex === -1) continue;
832
+ const startIndex = Math.max(0, targetIndex - withPreviousMessages);
833
+ const endIndex = Math.min(allMessages.length - 1, targetIndex + withNextMessages);
834
+ for (let i = startIndex; i <= endIndex; i++) {
835
+ includedMessages.push(allMessages[i]);
836
+ }
837
+ }
838
+ const seen = /* @__PURE__ */ new Set();
839
+ const dedupedMessages = includedMessages.filter((msg) => {
840
+ if (seen.has(msg.id)) return false;
841
+ seen.add(msg.id);
842
+ return true;
843
+ });
844
+ return dedupedMessages.map((row) => this.parseRow(row));
845
+ }
846
+ async getMessages({
847
+ threadId,
848
+ selectBy,
849
+ format
850
+ }) {
851
+ try {
852
+ const messages = [];
853
+ const limit = storage.resolveMessageLimit({ last: selectBy?.last, defaultLimit: 40 });
854
+ if (selectBy?.include?.length) {
855
+ const includeMessages = await this._getIncludedMessages({ threadId, selectBy });
856
+ if (includeMessages) {
857
+ messages.push(...includeMessages);
858
+ }
859
+ }
860
+ const excludeIds = messages.map((m) => m.id);
861
+ const collection = await this.operations.getCollection(storage.TABLE_MESSAGES);
862
+ const query = { thread_id: threadId };
863
+ if (excludeIds.length > 0) {
864
+ query.id = { $nin: excludeIds };
865
+ }
866
+ if (limit > 0) {
867
+ const remainingMessages = await collection.find(query).sort({ createdAt: -1 }).limit(limit).toArray();
868
+ messages.push(...remainingMessages.map((row) => this.parseRow(row)));
869
+ }
870
+ messages.sort((a, b) => a.createdAt.getTime() - b.createdAt.getTime());
871
+ const list = new agent.MessageList().add(messages, "memory");
872
+ if (format === "v2") return list.get.all.v2();
873
+ return list.get.all.v1();
874
+ } catch (error$1) {
875
+ throw new error.MastraError(
876
+ {
877
+ id: "MONGODB_STORE_GET_MESSAGES_FAILED",
878
+ domain: error.ErrorDomain.STORAGE,
879
+ category: error.ErrorCategory.THIRD_PARTY,
880
+ details: { threadId }
881
+ },
882
+ error$1
883
+ );
884
+ }
885
+ }
886
+ async getMessagesPaginated(args) {
887
+ const { threadId, format, selectBy } = args;
888
+ const { page = 0, perPage: perPageInput, dateRange } = selectBy?.pagination || {};
889
+ const perPage = perPageInput !== void 0 ? perPageInput : storage.resolveMessageLimit({ last: selectBy?.last, defaultLimit: 40 });
890
+ const fromDate = dateRange?.start;
891
+ const toDate = dateRange?.end;
892
+ const messages = [];
893
+ if (selectBy?.include?.length) {
894
+ try {
895
+ const includeMessages = await this._getIncludedMessages({ threadId, selectBy });
896
+ if (includeMessages) {
897
+ messages.push(...includeMessages);
898
+ }
899
+ } catch (error$1) {
900
+ throw new error.MastraError(
901
+ {
902
+ id: "MONGODB_STORE_GET_MESSAGES_PAGINATED_GET_INCLUDE_MESSAGES_FAILED",
903
+ domain: error.ErrorDomain.STORAGE,
904
+ category: error.ErrorCategory.THIRD_PARTY,
905
+ details: { threadId }
906
+ },
907
+ error$1
908
+ );
909
+ }
910
+ }
911
+ try {
912
+ const currentOffset = page * perPage;
913
+ const collection = await this.operations.getCollection(storage.TABLE_MESSAGES);
914
+ const query = { thread_id: threadId };
915
+ if (fromDate) {
916
+ query.createdAt = { ...query.createdAt, $gte: fromDate };
917
+ }
918
+ if (toDate) {
919
+ query.createdAt = { ...query.createdAt, $lte: toDate };
920
+ }
921
+ const total = await collection.countDocuments(query);
922
+ if (total === 0 && messages.length === 0) {
923
+ return {
924
+ messages: [],
925
+ total: 0,
926
+ page,
927
+ perPage,
928
+ hasMore: false
929
+ };
930
+ }
931
+ const excludeIds = messages.map((m) => m.id);
932
+ if (excludeIds.length > 0) {
933
+ query.id = { $nin: excludeIds };
934
+ }
935
+ const dataResult = await collection.find(query).sort({ createdAt: -1 }).skip(currentOffset).limit(perPage).toArray();
936
+ messages.push(...dataResult.map((row) => this.parseRow(row)));
937
+ const messagesToReturn = format === "v1" ? new agent.MessageList().add(messages, "memory").get.all.v1() : new agent.MessageList().add(messages, "memory").get.all.v2();
938
+ return {
939
+ messages: messagesToReturn,
940
+ total,
941
+ page,
942
+ perPage,
943
+ hasMore: (page + 1) * perPage < total
944
+ };
945
+ } catch (error$1) {
946
+ const mastraError = new error.MastraError(
947
+ {
948
+ id: "MONGODB_STORE_GET_MESSAGES_PAGINATED_FAILED",
949
+ domain: error.ErrorDomain.STORAGE,
950
+ category: error.ErrorCategory.THIRD_PARTY,
951
+ details: { threadId }
952
+ },
953
+ error$1
954
+ );
955
+ this.logger?.trackException?.(mastraError);
956
+ this.logger?.error?.(mastraError.toString());
957
+ return { messages: [], total: 0, page, perPage, hasMore: false };
958
+ }
959
+ }
960
+ async saveMessages({
961
+ messages,
962
+ format
963
+ }) {
964
+ if (messages.length === 0) return messages;
965
+ try {
966
+ const threadId = messages[0]?.threadId;
967
+ if (!threadId) {
968
+ throw new Error("Thread ID is required");
969
+ }
970
+ const collection = await this.operations.getCollection(storage.TABLE_MESSAGES);
971
+ const threadsCollection = await this.operations.getCollection(storage.TABLE_THREADS);
972
+ const messagesToInsert = messages.map((message) => {
973
+ const time = message.createdAt || /* @__PURE__ */ new Date();
974
+ if (!message.threadId) {
975
+ throw new Error(
976
+ "Expected to find a threadId for message, but couldn't find one. An unexpected error has occurred."
977
+ );
978
+ }
979
+ if (!message.resourceId) {
980
+ throw new Error(
981
+ "Expected to find a resourceId for message, but couldn't find one. An unexpected error has occurred."
982
+ );
983
+ }
984
+ return {
985
+ updateOne: {
986
+ filter: { id: message.id },
987
+ update: {
988
+ $set: {
989
+ id: message.id,
990
+ thread_id: message.threadId,
991
+ content: typeof message.content === "object" ? JSON.stringify(message.content) : message.content,
992
+ role: message.role,
993
+ type: message.type || "v2",
994
+ createdAt: formatDateForMongoDB(time),
995
+ resourceId: message.resourceId
996
+ }
997
+ },
998
+ upsert: true
999
+ }
1000
+ };
1001
+ });
1002
+ await Promise.all([
1003
+ collection.bulkWrite(messagesToInsert),
1004
+ threadsCollection.updateOne({ id: threadId }, { $set: { updatedAt: /* @__PURE__ */ new Date() } })
1005
+ ]);
1006
+ const list = new agent.MessageList().add(messages, "memory");
1007
+ if (format === "v2") return list.get.all.v2();
1008
+ return list.get.all.v1();
1009
+ } catch (error$1) {
1010
+ throw new error.MastraError(
1011
+ {
1012
+ id: "MONGODB_STORE_SAVE_MESSAGES_FAILED",
1013
+ domain: error.ErrorDomain.STORAGE,
1014
+ category: error.ErrorCategory.THIRD_PARTY
1015
+ },
1016
+ error$1
1017
+ );
1018
+ }
1019
+ }
1020
+ async updateMessages({
1021
+ messages
1022
+ }) {
1023
+ if (messages.length === 0) {
1024
+ return [];
1025
+ }
1026
+ const messageIds = messages.map((m) => m.id);
1027
+ const collection = await this.operations.getCollection(storage.TABLE_MESSAGES);
1028
+ const existingMessages = await collection.find({ id: { $in: messageIds } }).toArray();
1029
+ const existingMessagesParsed = existingMessages.map((msg) => this.parseRow(msg));
1030
+ if (existingMessagesParsed.length === 0) {
1031
+ return [];
1032
+ }
1033
+ const threadIdsToUpdate = /* @__PURE__ */ new Set();
1034
+ const bulkOps = [];
1035
+ for (const existingMessage of existingMessagesParsed) {
1036
+ const updatePayload = messages.find((m) => m.id === existingMessage.id);
1037
+ if (!updatePayload) continue;
1038
+ const { id, ...fieldsToUpdate } = updatePayload;
1039
+ if (Object.keys(fieldsToUpdate).length === 0) continue;
1040
+ threadIdsToUpdate.add(existingMessage.threadId);
1041
+ if (updatePayload.threadId && updatePayload.threadId !== existingMessage.threadId) {
1042
+ threadIdsToUpdate.add(updatePayload.threadId);
1043
+ }
1044
+ const updateDoc = {};
1045
+ const updatableFields = { ...fieldsToUpdate };
1046
+ if (updatableFields.content) {
1047
+ const newContent = {
1048
+ ...existingMessage.content,
1049
+ ...updatableFields.content,
1050
+ // Deep merge metadata if it exists on both
1051
+ ...existingMessage.content?.metadata && updatableFields.content.metadata ? {
1052
+ metadata: {
1053
+ ...existingMessage.content.metadata,
1054
+ ...updatableFields.content.metadata
1055
+ }
1056
+ } : {}
1057
+ };
1058
+ updateDoc.content = JSON.stringify(newContent);
1059
+ delete updatableFields.content;
1060
+ }
1061
+ for (const key in updatableFields) {
1062
+ if (Object.prototype.hasOwnProperty.call(updatableFields, key)) {
1063
+ const dbKey = key === "threadId" ? "thread_id" : key;
1064
+ let value = updatableFields[key];
1065
+ if (typeof value === "object" && value !== null) {
1066
+ value = JSON.stringify(value);
1067
+ }
1068
+ updateDoc[dbKey] = value;
1069
+ }
1070
+ }
1071
+ if (Object.keys(updateDoc).length > 0) {
1072
+ bulkOps.push({
1073
+ updateOne: {
1074
+ filter: { id },
1075
+ update: { $set: updateDoc }
1076
+ }
1077
+ });
1078
+ }
1079
+ }
1080
+ if (bulkOps.length > 0) {
1081
+ await collection.bulkWrite(bulkOps);
1082
+ }
1083
+ if (threadIdsToUpdate.size > 0) {
1084
+ const threadsCollection = await this.operations.getCollection(storage.TABLE_THREADS);
1085
+ await threadsCollection.updateMany(
1086
+ { id: { $in: Array.from(threadIdsToUpdate) } },
1087
+ { $set: { updatedAt: /* @__PURE__ */ new Date() } }
1088
+ );
1089
+ }
1090
+ const updatedMessages = await collection.find({ id: { $in: messageIds } }).toArray();
1091
+ return updatedMessages.map((row) => this.parseRow(row));
1092
+ }
1093
+ async getResourceById({ resourceId }) {
1094
+ try {
1095
+ const collection = await this.operations.getCollection(storage.TABLE_RESOURCES);
1096
+ const result = await collection.findOne({ id: resourceId });
1097
+ if (!result) {
1098
+ return null;
1099
+ }
1100
+ return {
1101
+ id: result.id,
1102
+ workingMemory: result.workingMemory || "",
1103
+ metadata: typeof result.metadata === "string" ? storage.safelyParseJSON(result.metadata) : result.metadata,
1104
+ createdAt: formatDateForMongoDB(result.createdAt),
1105
+ updatedAt: formatDateForMongoDB(result.updatedAt)
1106
+ };
1107
+ } catch (error$1) {
1108
+ throw new error.MastraError(
1109
+ {
1110
+ id: "STORAGE_MONGODB_STORE_GET_RESOURCE_BY_ID_FAILED",
1111
+ domain: error.ErrorDomain.STORAGE,
1112
+ category: error.ErrorCategory.THIRD_PARTY,
1113
+ details: { resourceId }
1114
+ },
1115
+ error$1
1116
+ );
1117
+ }
1118
+ }
1119
+ async saveResource({ resource }) {
1120
+ try {
1121
+ const collection = await this.operations.getCollection(storage.TABLE_RESOURCES);
1122
+ await collection.updateOne(
1123
+ { id: resource.id },
1124
+ {
1125
+ $set: {
1126
+ ...resource,
1127
+ metadata: JSON.stringify(resource.metadata)
1128
+ }
1129
+ },
1130
+ { upsert: true }
1131
+ );
1132
+ return resource;
1133
+ } catch (error$1) {
1134
+ throw new error.MastraError(
1135
+ {
1136
+ id: "STORAGE_MONGODB_STORE_SAVE_RESOURCE_FAILED",
1137
+ domain: error.ErrorDomain.STORAGE,
1138
+ category: error.ErrorCategory.THIRD_PARTY,
1139
+ details: { resourceId: resource.id }
1140
+ },
1141
+ error$1
1142
+ );
1143
+ }
1144
+ }
1145
+ async updateResource({
1146
+ resourceId,
1147
+ workingMemory,
1148
+ metadata
1149
+ }) {
1150
+ try {
1151
+ const existingResource = await this.getResourceById({ resourceId });
1152
+ if (!existingResource) {
1153
+ const newResource = {
1154
+ id: resourceId,
1155
+ workingMemory: workingMemory || "",
1156
+ metadata: metadata || {},
1157
+ createdAt: /* @__PURE__ */ new Date(),
1158
+ updatedAt: /* @__PURE__ */ new Date()
1159
+ };
1160
+ return this.saveResource({ resource: newResource });
1161
+ }
1162
+ const updatedResource = {
1163
+ ...existingResource,
1164
+ workingMemory: workingMemory !== void 0 ? workingMemory : existingResource.workingMemory,
1165
+ metadata: metadata ? { ...existingResource.metadata, ...metadata } : existingResource.metadata,
1166
+ updatedAt: /* @__PURE__ */ new Date()
1167
+ };
1168
+ const collection = await this.operations.getCollection(storage.TABLE_RESOURCES);
1169
+ const updateDoc = { updatedAt: updatedResource.updatedAt };
1170
+ if (workingMemory !== void 0) {
1171
+ updateDoc.workingMemory = workingMemory;
1172
+ }
1173
+ if (metadata) {
1174
+ updateDoc.metadata = JSON.stringify(updatedResource.metadata);
1175
+ }
1176
+ await collection.updateOne({ id: resourceId }, { $set: updateDoc });
1177
+ return updatedResource;
1178
+ } catch (error$1) {
1179
+ throw new error.MastraError(
1180
+ {
1181
+ id: "STORAGE_MONGODB_STORE_UPDATE_RESOURCE_FAILED",
1182
+ domain: error.ErrorDomain.STORAGE,
1183
+ category: error.ErrorCategory.THIRD_PARTY,
1184
+ details: { resourceId }
1185
+ },
1186
+ error$1
1187
+ );
1188
+ }
1189
+ }
1190
+ async getThreadById({ threadId }) {
585
1191
  try {
586
- const collectionMessages = await this.getCollection(storage.TABLE_MESSAGES);
1192
+ const collection = await this.operations.getCollection(storage.TABLE_THREADS);
1193
+ const result = await collection.findOne({ id: threadId });
1194
+ if (!result) {
1195
+ return null;
1196
+ }
1197
+ return {
1198
+ ...result,
1199
+ metadata: typeof result.metadata === "string" ? storage.safelyParseJSON(result.metadata) : result.metadata
1200
+ };
1201
+ } catch (error$1) {
1202
+ throw new error.MastraError(
1203
+ {
1204
+ id: "STORAGE_MONGODB_STORE_GET_THREAD_BY_ID_FAILED",
1205
+ domain: error.ErrorDomain.STORAGE,
1206
+ category: error.ErrorCategory.THIRD_PARTY,
1207
+ details: { threadId }
1208
+ },
1209
+ error$1
1210
+ );
1211
+ }
1212
+ }
1213
+ async getThreadsByResourceId({ resourceId }) {
1214
+ try {
1215
+ const collection = await this.operations.getCollection(storage.TABLE_THREADS);
1216
+ const results = await collection.find({ resourceId }).sort({ updatedAt: -1 }).toArray();
1217
+ if (!results.length) {
1218
+ return [];
1219
+ }
1220
+ return results.map((result) => ({
1221
+ ...result,
1222
+ metadata: typeof result.metadata === "string" ? storage.safelyParseJSON(result.metadata) : result.metadata
1223
+ }));
1224
+ } catch (error$1) {
1225
+ throw new error.MastraError(
1226
+ {
1227
+ id: "STORAGE_MONGODB_STORE_GET_THREADS_BY_RESOURCE_ID_FAILED",
1228
+ domain: error.ErrorDomain.STORAGE,
1229
+ category: error.ErrorCategory.THIRD_PARTY,
1230
+ details: { resourceId }
1231
+ },
1232
+ error$1
1233
+ );
1234
+ }
1235
+ }
1236
+ async getThreadsByResourceIdPaginated(args) {
1237
+ try {
1238
+ const { resourceId, page, perPage } = args;
1239
+ const collection = await this.operations.getCollection(storage.TABLE_THREADS);
1240
+ const query = { resourceId };
1241
+ const total = await collection.countDocuments(query);
1242
+ const threads = await collection.find(query).sort({ updatedAt: -1 }).skip(page * perPage).limit(perPage).toArray();
1243
+ return {
1244
+ threads: threads.map((thread) => ({
1245
+ id: thread.id,
1246
+ title: thread.title,
1247
+ resourceId: thread.resourceId,
1248
+ createdAt: formatDateForMongoDB(thread.createdAt),
1249
+ updatedAt: formatDateForMongoDB(thread.updatedAt),
1250
+ metadata: thread.metadata || {}
1251
+ })),
1252
+ total,
1253
+ page,
1254
+ perPage,
1255
+ hasMore: (page + 1) * perPage < total
1256
+ };
1257
+ } catch (error$1) {
1258
+ throw new error.MastraError(
1259
+ {
1260
+ id: "MONGODB_STORE_GET_THREADS_BY_RESOURCE_ID_PAGINATED_FAILED",
1261
+ domain: error.ErrorDomain.STORAGE,
1262
+ category: error.ErrorCategory.THIRD_PARTY,
1263
+ details: { resourceId: args.resourceId }
1264
+ },
1265
+ error$1
1266
+ );
1267
+ }
1268
+ }
1269
+ async saveThread({ thread }) {
1270
+ try {
1271
+ const collection = await this.operations.getCollection(storage.TABLE_THREADS);
1272
+ await collection.updateOne(
1273
+ { id: thread.id },
1274
+ {
1275
+ $set: {
1276
+ ...thread,
1277
+ metadata: thread.metadata
1278
+ }
1279
+ },
1280
+ { upsert: true }
1281
+ );
1282
+ return thread;
1283
+ } catch (error$1) {
1284
+ throw new error.MastraError(
1285
+ {
1286
+ id: "STORAGE_MONGODB_STORE_SAVE_THREAD_FAILED",
1287
+ domain: error.ErrorDomain.STORAGE,
1288
+ category: error.ErrorCategory.THIRD_PARTY,
1289
+ details: { threadId: thread.id }
1290
+ },
1291
+ error$1
1292
+ );
1293
+ }
1294
+ }
1295
+ async updateThread({
1296
+ id,
1297
+ title,
1298
+ metadata
1299
+ }) {
1300
+ const thread = await this.getThreadById({ threadId: id });
1301
+ if (!thread) {
1302
+ throw new error.MastraError({
1303
+ id: "STORAGE_MONGODB_STORE_UPDATE_THREAD_NOT_FOUND",
1304
+ domain: error.ErrorDomain.STORAGE,
1305
+ category: error.ErrorCategory.THIRD_PARTY,
1306
+ details: { threadId: id, status: 404 },
1307
+ text: `Thread ${id} not found`
1308
+ });
1309
+ }
1310
+ const updatedThread = {
1311
+ ...thread,
1312
+ title,
1313
+ metadata: {
1314
+ ...thread.metadata,
1315
+ ...metadata
1316
+ }
1317
+ };
1318
+ try {
1319
+ const collection = await this.operations.getCollection(storage.TABLE_THREADS);
1320
+ await collection.updateOne(
1321
+ { id },
1322
+ {
1323
+ $set: {
1324
+ title,
1325
+ metadata: updatedThread.metadata
1326
+ }
1327
+ }
1328
+ );
1329
+ } catch (error$1) {
1330
+ throw new error.MastraError(
1331
+ {
1332
+ id: "STORAGE_MONGODB_STORE_UPDATE_THREAD_FAILED",
1333
+ domain: error.ErrorDomain.STORAGE,
1334
+ category: error.ErrorCategory.THIRD_PARTY,
1335
+ details: { threadId: id }
1336
+ },
1337
+ error$1
1338
+ );
1339
+ }
1340
+ return updatedThread;
1341
+ }
1342
+ async deleteThread({ threadId }) {
1343
+ try {
1344
+ const collectionMessages = await this.operations.getCollection(storage.TABLE_MESSAGES);
587
1345
  await collectionMessages.deleteMany({ thread_id: threadId });
588
- const collectionThreads = await this.getCollection(storage.TABLE_THREADS);
1346
+ const collectionThreads = await this.operations.getCollection(storage.TABLE_THREADS);
589
1347
  await collectionThreads.deleteOne({ id: threadId });
590
- } catch (error) {
591
- this.logger.error(`Error deleting thread ${threadId}: ${error}`);
592
- throw error;
1348
+ } catch (error$1) {
1349
+ throw new error.MastraError(
1350
+ {
1351
+ id: "STORAGE_MONGODB_STORE_DELETE_THREAD_FAILED",
1352
+ domain: error.ErrorDomain.STORAGE,
1353
+ category: error.ErrorCategory.THIRD_PARTY,
1354
+ details: { threadId }
1355
+ },
1356
+ error$1
1357
+ );
1358
+ }
1359
+ }
1360
+ };
1361
+ var StoreOperationsMongoDB = class extends storage.StoreOperations {
1362
+ #connector;
1363
+ constructor(config) {
1364
+ super();
1365
+ this.#connector = config.connector;
1366
+ }
1367
+ async getCollection(collectionName) {
1368
+ return this.#connector.getCollection(collectionName);
1369
+ }
1370
+ async hasColumn(_table, _column) {
1371
+ return true;
1372
+ }
1373
+ async createTable() {
1374
+ }
1375
+ async alterTable(_args) {
1376
+ }
1377
+ async clearTable({ tableName }) {
1378
+ try {
1379
+ const collection = await this.getCollection(tableName);
1380
+ await collection.deleteMany({});
1381
+ } catch (error$1) {
1382
+ if (error$1 instanceof Error) {
1383
+ const matstraError = new error.MastraError(
1384
+ {
1385
+ id: "STORAGE_MONGODB_STORE_CLEAR_TABLE_FAILED",
1386
+ domain: error.ErrorDomain.STORAGE,
1387
+ category: error.ErrorCategory.THIRD_PARTY,
1388
+ details: { tableName }
1389
+ },
1390
+ error$1
1391
+ );
1392
+ this.logger.error(matstraError.message);
1393
+ this.logger?.trackException(matstraError);
1394
+ }
1395
+ }
1396
+ }
1397
+ async dropTable({ tableName }) {
1398
+ try {
1399
+ const collection = await this.getCollection(tableName);
1400
+ await collection.drop();
1401
+ } catch (error$1) {
1402
+ if (error$1 instanceof Error && error$1.message.includes("ns not found")) {
1403
+ return;
1404
+ }
1405
+ throw new error.MastraError(
1406
+ {
1407
+ id: "MONGODB_STORE_DROP_TABLE_FAILED",
1408
+ domain: error.ErrorDomain.STORAGE,
1409
+ category: error.ErrorCategory.THIRD_PARTY,
1410
+ details: { tableName }
1411
+ },
1412
+ error$1
1413
+ );
1414
+ }
1415
+ }
1416
+ processJsonbFields(tableName, record) {
1417
+ const schema = storage.TABLE_SCHEMAS[tableName];
1418
+ return Object.fromEntries(
1419
+ Object.entries(schema).map(([key, value]) => {
1420
+ if (value.type === "jsonb" && record[key] && typeof record[key] === "string") {
1421
+ return [key, storage.safelyParseJSON(record[key])];
1422
+ }
1423
+ return [key, record[key]];
1424
+ })
1425
+ );
1426
+ }
1427
+ async insert({ tableName, record }) {
1428
+ try {
1429
+ const collection = await this.getCollection(tableName);
1430
+ const recordToInsert = this.processJsonbFields(tableName, record);
1431
+ await collection.insertOne(recordToInsert);
1432
+ } catch (error$1) {
1433
+ if (error$1 instanceof Error) {
1434
+ const matstraError = new error.MastraError(
1435
+ {
1436
+ id: "STORAGE_MONGODB_STORE_INSERT_FAILED",
1437
+ domain: error.ErrorDomain.STORAGE,
1438
+ category: error.ErrorCategory.THIRD_PARTY,
1439
+ details: { tableName }
1440
+ },
1441
+ error$1
1442
+ );
1443
+ this.logger.error(matstraError.message);
1444
+ this.logger?.trackException(matstraError);
1445
+ }
1446
+ }
1447
+ }
1448
+ async batchInsert({ tableName, records }) {
1449
+ if (!records.length) {
1450
+ return;
1451
+ }
1452
+ try {
1453
+ const collection = await this.getCollection(tableName);
1454
+ const processedRecords = records.map((record) => this.processJsonbFields(tableName, record));
1455
+ await collection.insertMany(processedRecords);
1456
+ } catch (error$1) {
1457
+ throw new error.MastraError(
1458
+ {
1459
+ id: "STORAGE_MONGODB_STORE_BATCH_INSERT_FAILED",
1460
+ domain: error.ErrorDomain.STORAGE,
1461
+ category: error.ErrorCategory.THIRD_PARTY,
1462
+ details: { tableName }
1463
+ },
1464
+ error$1
1465
+ );
1466
+ }
1467
+ }
1468
+ async load({ tableName, keys }) {
1469
+ this.logger.info(`Loading ${tableName} with keys ${JSON.stringify(keys)}`);
1470
+ try {
1471
+ const collection = await this.getCollection(tableName);
1472
+ return await collection.find(keys).toArray();
1473
+ } catch (error$1) {
1474
+ throw new error.MastraError(
1475
+ {
1476
+ id: "STORAGE_MONGODB_STORE_LOAD_FAILED",
1477
+ domain: error.ErrorDomain.STORAGE,
1478
+ category: error.ErrorCategory.THIRD_PARTY,
1479
+ details: { tableName }
1480
+ },
1481
+ error$1
1482
+ );
1483
+ }
1484
+ }
1485
+ };
1486
+ function transformScoreRow(row) {
1487
+ let scorerValue = null;
1488
+ if (row.scorer) {
1489
+ try {
1490
+ scorerValue = typeof row.scorer === "string" ? storage.safelyParseJSON(row.scorer) : row.scorer;
1491
+ } catch (e) {
1492
+ console.warn("Failed to parse scorer:", e);
1493
+ }
1494
+ }
1495
+ let extractStepResultValue = null;
1496
+ if (row.extractStepResult) {
1497
+ try {
1498
+ extractStepResultValue = typeof row.extractStepResult === "string" ? storage.safelyParseJSON(row.extractStepResult) : row.extractStepResult;
1499
+ } catch (e) {
1500
+ console.warn("Failed to parse extractStepResult:", e);
1501
+ }
1502
+ }
1503
+ let analyzeStepResultValue = null;
1504
+ if (row.analyzeStepResult) {
1505
+ try {
1506
+ analyzeStepResultValue = typeof row.analyzeStepResult === "string" ? storage.safelyParseJSON(row.analyzeStepResult) : row.analyzeStepResult;
1507
+ } catch (e) {
1508
+ console.warn("Failed to parse analyzeStepResult:", e);
1509
+ }
1510
+ }
1511
+ let inputValue = null;
1512
+ if (row.input) {
1513
+ try {
1514
+ inputValue = typeof row.input === "string" ? storage.safelyParseJSON(row.input) : row.input;
1515
+ } catch (e) {
1516
+ console.warn("Failed to parse input:", e);
1517
+ }
1518
+ }
1519
+ let outputValue = null;
1520
+ if (row.output) {
1521
+ try {
1522
+ outputValue = typeof row.output === "string" ? storage.safelyParseJSON(row.output) : row.output;
1523
+ } catch (e) {
1524
+ console.warn("Failed to parse output:", e);
1525
+ }
1526
+ }
1527
+ let entityValue = null;
1528
+ if (row.entity) {
1529
+ try {
1530
+ entityValue = typeof row.entity === "string" ? storage.safelyParseJSON(row.entity) : row.entity;
1531
+ } catch (e) {
1532
+ console.warn("Failed to parse entity:", e);
1533
+ }
1534
+ }
1535
+ let runtimeContextValue = null;
1536
+ if (row.runtimeContext) {
1537
+ try {
1538
+ runtimeContextValue = typeof row.runtimeContext === "string" ? storage.safelyParseJSON(row.runtimeContext) : row.runtimeContext;
1539
+ } catch (e) {
1540
+ console.warn("Failed to parse runtimeContext:", e);
1541
+ }
1542
+ }
1543
+ return {
1544
+ id: row.id,
1545
+ entityId: row.entityId,
1546
+ entityType: row.entityType,
1547
+ scorerId: row.scorerId,
1548
+ traceId: row.traceId,
1549
+ runId: row.runId,
1550
+ scorer: scorerValue,
1551
+ extractStepResult: extractStepResultValue,
1552
+ analyzeStepResult: analyzeStepResultValue,
1553
+ score: row.score,
1554
+ reason: row.reason,
1555
+ extractPrompt: row.extractPrompt,
1556
+ analyzePrompt: row.analyzePrompt,
1557
+ reasonPrompt: row.reasonPrompt,
1558
+ input: inputValue,
1559
+ output: outputValue,
1560
+ additionalContext: row.additionalContext,
1561
+ runtimeContext: runtimeContextValue,
1562
+ entity: entityValue,
1563
+ source: row.source,
1564
+ resourceId: row.resourceId,
1565
+ threadId: row.threadId,
1566
+ createdAt: new Date(row.createdAt),
1567
+ updatedAt: new Date(row.updatedAt)
1568
+ };
1569
+ }
1570
+ var ScoresStorageMongoDB = class extends storage.ScoresStorage {
1571
+ operations;
1572
+ constructor({ operations }) {
1573
+ super();
1574
+ this.operations = operations;
1575
+ }
1576
+ async getScoreById({ id }) {
1577
+ try {
1578
+ const collection = await this.operations.getCollection(storage.TABLE_SCORERS);
1579
+ const document = await collection.findOne({ id });
1580
+ if (!document) {
1581
+ return null;
1582
+ }
1583
+ return transformScoreRow(document);
1584
+ } catch (error$1) {
1585
+ throw new error.MastraError(
1586
+ {
1587
+ id: "STORAGE_MONGODB_STORE_GET_SCORE_BY_ID_FAILED",
1588
+ domain: error.ErrorDomain.STORAGE,
1589
+ category: error.ErrorCategory.THIRD_PARTY,
1590
+ details: { id }
1591
+ },
1592
+ error$1
1593
+ );
1594
+ }
1595
+ }
1596
+ async saveScore(score) {
1597
+ try {
1598
+ const now = /* @__PURE__ */ new Date();
1599
+ const scoreId = `score-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
1600
+ const scoreData = {
1601
+ id: scoreId,
1602
+ entityId: score.entityId,
1603
+ entityType: score.entityType,
1604
+ scorerId: score.scorerId,
1605
+ traceId: score.traceId || "",
1606
+ runId: score.runId,
1607
+ scorer: typeof score.scorer === "string" ? storage.safelyParseJSON(score.scorer) : score.scorer,
1608
+ extractStepResult: typeof score.extractStepResult === "string" ? storage.safelyParseJSON(score.extractStepResult) : score.extractStepResult,
1609
+ analyzeStepResult: typeof score.analyzeStepResult === "string" ? storage.safelyParseJSON(score.analyzeStepResult) : score.analyzeStepResult,
1610
+ score: score.score,
1611
+ reason: score.reason,
1612
+ extractPrompt: score.extractPrompt,
1613
+ analyzePrompt: score.analyzePrompt,
1614
+ reasonPrompt: score.reasonPrompt,
1615
+ input: typeof score.input === "string" ? storage.safelyParseJSON(score.input) : score.input,
1616
+ output: typeof score.output === "string" ? storage.safelyParseJSON(score.output) : score.output,
1617
+ additionalContext: score.additionalContext,
1618
+ runtimeContext: typeof score.runtimeContext === "string" ? storage.safelyParseJSON(score.runtimeContext) : score.runtimeContext,
1619
+ entity: typeof score.entity === "string" ? storage.safelyParseJSON(score.entity) : score.entity,
1620
+ source: score.source,
1621
+ resourceId: score.resourceId || "",
1622
+ threadId: score.threadId || "",
1623
+ createdAt: now,
1624
+ updatedAt: now
1625
+ };
1626
+ const collection = await this.operations.getCollection(storage.TABLE_SCORERS);
1627
+ await collection.insertOne(scoreData);
1628
+ const savedScore = {
1629
+ ...score,
1630
+ id: scoreId,
1631
+ createdAt: now,
1632
+ updatedAt: now
1633
+ };
1634
+ return { score: savedScore };
1635
+ } catch (error$1) {
1636
+ throw new error.MastraError(
1637
+ {
1638
+ id: "STORAGE_MONGODB_STORE_SAVE_SCORE_FAILED",
1639
+ domain: error.ErrorDomain.STORAGE,
1640
+ category: error.ErrorCategory.THIRD_PARTY,
1641
+ details: { scorerId: score.scorerId, runId: score.runId }
1642
+ },
1643
+ error$1
1644
+ );
1645
+ }
1646
+ }
1647
+ async getScoresByScorerId({
1648
+ scorerId,
1649
+ pagination,
1650
+ entityId,
1651
+ entityType
1652
+ }) {
1653
+ try {
1654
+ const query = { scorerId };
1655
+ if (entityId) {
1656
+ query.entityId = entityId;
1657
+ }
1658
+ if (entityType) {
1659
+ query.entityType = entityType;
1660
+ }
1661
+ const collection = await this.operations.getCollection(storage.TABLE_SCORERS);
1662
+ const total = await collection.countDocuments(query);
1663
+ const currentOffset = pagination.page * pagination.perPage;
1664
+ if (total === 0) {
1665
+ return {
1666
+ scores: [],
1667
+ pagination: {
1668
+ total: 0,
1669
+ page: pagination.page,
1670
+ perPage: pagination.perPage,
1671
+ hasMore: false
1672
+ }
1673
+ };
1674
+ }
1675
+ const documents = await collection.find(query).sort({ createdAt: "desc" }).skip(currentOffset).limit(pagination.perPage).toArray();
1676
+ const scores = documents.map((row) => transformScoreRow(row));
1677
+ const hasMore = currentOffset + scores.length < total;
1678
+ return {
1679
+ scores,
1680
+ pagination: {
1681
+ total,
1682
+ page: pagination.page,
1683
+ perPage: pagination.perPage,
1684
+ hasMore
1685
+ }
1686
+ };
1687
+ } catch (error$1) {
1688
+ throw new error.MastraError(
1689
+ {
1690
+ id: "STORAGE_MONGODB_STORE_GET_SCORES_BY_SCORER_ID_FAILED",
1691
+ domain: error.ErrorDomain.STORAGE,
1692
+ category: error.ErrorCategory.THIRD_PARTY,
1693
+ details: { scorerId, page: pagination.page, perPage: pagination.perPage }
1694
+ },
1695
+ error$1
1696
+ );
593
1697
  }
594
1698
  }
595
- async getMessages({
596
- threadId,
597
- selectBy,
598
- format
1699
+ async getScoresByRunId({
1700
+ runId,
1701
+ pagination
599
1702
  }) {
600
1703
  try {
601
- const limit = this.resolveMessageLimit({ last: selectBy?.last, defaultLimit: 40 });
602
- const include = selectBy?.include || [];
603
- let messages = [];
604
- let allMessages = [];
605
- const collection = await this.getCollection(storage.TABLE_MESSAGES);
606
- allMessages = (await collection.find({ thread_id: threadId }).sort({ createdAt: -1 }).toArray()).map(
607
- (row) => this.parseRow(row)
608
- );
609
- if (include.length) {
610
- const idToIndex = /* @__PURE__ */ new Map();
611
- allMessages.forEach((msg, idx) => {
612
- idToIndex.set(msg.id, idx);
613
- });
614
- const selectedIndexes = /* @__PURE__ */ new Set();
615
- for (const inc of include) {
616
- const idx = idToIndex.get(inc.id);
617
- if (idx === void 0) continue;
618
- for (let i = 1; i <= (inc.withPreviousMessages || 0); i++) {
619
- if (idx + i < allMessages.length) selectedIndexes.add(idx + i);
620
- }
621
- selectedIndexes.add(idx);
622
- for (let i = 1; i <= (inc.withNextMessages || 0); i++) {
623
- if (idx - i >= 0) selectedIndexes.add(idx - i);
1704
+ const collection = await this.operations.getCollection(storage.TABLE_SCORERS);
1705
+ const total = await collection.countDocuments({ runId });
1706
+ const currentOffset = pagination.page * pagination.perPage;
1707
+ if (total === 0) {
1708
+ return {
1709
+ scores: [],
1710
+ pagination: {
1711
+ total: 0,
1712
+ page: pagination.page,
1713
+ perPage: pagination.perPage,
1714
+ hasMore: false
624
1715
  }
625
- }
626
- messages.push(
627
- ...Array.from(selectedIndexes).map((i) => allMessages[i]).filter((m) => !!m)
628
- );
1716
+ };
629
1717
  }
630
- const excludeIds = new Set(messages.map((m) => m.id));
631
- for (const msg of allMessages) {
632
- if (messages.length >= limit) break;
633
- if (!excludeIds.has(msg.id)) {
634
- messages.push(msg);
1718
+ const documents = await collection.find({ runId }).sort({ createdAt: "desc" }).skip(currentOffset).limit(pagination.perPage).toArray();
1719
+ const scores = documents.map((row) => transformScoreRow(row));
1720
+ const hasMore = currentOffset + scores.length < total;
1721
+ return {
1722
+ scores,
1723
+ pagination: {
1724
+ total,
1725
+ page: pagination.page,
1726
+ perPage: pagination.perPage,
1727
+ hasMore
635
1728
  }
636
- }
637
- messages.sort((a, b) => a.createdAt.getTime() - b.createdAt.getTime());
638
- const list = new agent.MessageList().add(messages.slice(0, limit), "memory");
639
- if (format === `v2`) return list.get.all.v2();
640
- return list.get.all.v1();
641
- } catch (error) {
642
- this.logger.error("Error getting messages:", error);
643
- throw error;
1729
+ };
1730
+ } catch (error$1) {
1731
+ throw new error.MastraError(
1732
+ {
1733
+ id: "STORAGE_MONGODB_STORE_GET_SCORES_BY_RUN_ID_FAILED",
1734
+ domain: error.ErrorDomain.STORAGE,
1735
+ category: error.ErrorCategory.THIRD_PARTY,
1736
+ details: { runId, page: pagination.page, perPage: pagination.perPage }
1737
+ },
1738
+ error$1
1739
+ );
644
1740
  }
645
1741
  }
646
- async saveMessages({
647
- messages,
648
- format
1742
+ async getScoresByEntityId({
1743
+ entityId,
1744
+ entityType,
1745
+ pagination
649
1746
  }) {
650
- if (!messages.length) {
651
- return messages;
652
- }
653
- const threadId = messages[0]?.threadId;
654
- if (!threadId) {
655
- this.logger.error("Thread ID is required to save messages");
656
- throw new Error("Thread ID is required");
657
- }
658
1747
  try {
659
- const messagesToInsert = messages.map((message) => {
660
- const time = message.createdAt || /* @__PURE__ */ new Date();
1748
+ const collection = await this.operations.getCollection(storage.TABLE_SCORERS);
1749
+ const total = await collection.countDocuments({ entityId, entityType });
1750
+ const currentOffset = pagination.page * pagination.perPage;
1751
+ if (total === 0) {
661
1752
  return {
662
- id: message.id,
663
- thread_id: threadId,
664
- content: typeof message.content === "string" ? message.content : JSON.stringify(message.content),
665
- role: message.role,
666
- type: message.type,
667
- resourceId: message.resourceId,
668
- createdAt: time instanceof Date ? time.toISOString() : time
1753
+ scores: [],
1754
+ pagination: {
1755
+ total: 0,
1756
+ page: pagination.page,
1757
+ perPage: pagination.perPage,
1758
+ hasMore: false
1759
+ }
669
1760
  };
670
- });
671
- const collection = await this.getCollection(storage.TABLE_MESSAGES);
672
- const threadsCollection = await this.getCollection(storage.TABLE_THREADS);
673
- await Promise.all([
674
- collection.insertMany(messagesToInsert),
675
- threadsCollection.updateOne({ id: threadId }, { $set: { updatedAt: /* @__PURE__ */ new Date() } })
676
- ]);
677
- const list = new agent.MessageList().add(messages, "memory");
678
- if (format === `v2`) return list.get.all.v2();
679
- return list.get.all.v1();
680
- } catch (error) {
681
- this.logger.error("Failed to save messages in database: " + error?.message);
682
- throw error;
683
- }
684
- }
685
- async getTraces({
686
- name,
687
- scope,
688
- page,
689
- perPage,
690
- attributes,
691
- filters
692
- } = {
693
- page: 0,
694
- perPage: 100
695
- }) {
696
- const limit = perPage;
697
- const offset = page * perPage;
1761
+ }
1762
+ const documents = await collection.find({ entityId, entityType }).sort({ createdAt: "desc" }).skip(currentOffset).limit(pagination.perPage).toArray();
1763
+ const scores = documents.map((row) => transformScoreRow(row));
1764
+ const hasMore = currentOffset + scores.length < total;
1765
+ return {
1766
+ scores,
1767
+ pagination: {
1768
+ total,
1769
+ page: pagination.page,
1770
+ perPage: pagination.perPage,
1771
+ hasMore
1772
+ }
1773
+ };
1774
+ } catch (error$1) {
1775
+ throw new error.MastraError(
1776
+ {
1777
+ id: "STORAGE_MONGODB_STORE_GET_SCORES_BY_ENTITY_ID_FAILED",
1778
+ domain: error.ErrorDomain.STORAGE,
1779
+ category: error.ErrorCategory.THIRD_PARTY,
1780
+ details: { entityId, entityType, page: pagination.page, perPage: pagination.perPage }
1781
+ },
1782
+ error$1
1783
+ );
1784
+ }
1785
+ }
1786
+ };
1787
+ var TracesStorageMongoDB = class extends storage.TracesStorage {
1788
+ operations;
1789
+ constructor({ operations }) {
1790
+ super();
1791
+ this.operations = operations;
1792
+ }
1793
+ async getTraces(args) {
1794
+ if (args.fromDate || args.toDate) {
1795
+ args.dateRange = {
1796
+ start: args.fromDate,
1797
+ end: args.toDate
1798
+ };
1799
+ }
1800
+ try {
1801
+ const result = await this.getTracesPaginated(args);
1802
+ return result.traces;
1803
+ } catch (error$1) {
1804
+ throw new error.MastraError(
1805
+ {
1806
+ id: "STORAGE_MONGODB_STORE_GET_TRACES_FAILED",
1807
+ domain: error.ErrorDomain.STORAGE,
1808
+ category: error.ErrorCategory.THIRD_PARTY
1809
+ },
1810
+ error$1
1811
+ );
1812
+ }
1813
+ }
1814
+ async getTracesPaginated(args) {
1815
+ const { name, scope, page = 0, perPage = 100, attributes, filters, dateRange } = args;
1816
+ const fromDate = dateRange?.start;
1817
+ const toDate = dateRange?.end;
1818
+ const currentOffset = page * perPage;
698
1819
  const query = {};
699
1820
  if (name) {
700
- query["name"] = `%${name}%`;
1821
+ query["name"] = new RegExp(name);
701
1822
  }
702
1823
  if (scope) {
703
1824
  query["scope"] = scope;
704
1825
  }
705
1826
  if (attributes) {
706
- Object.keys(attributes).forEach((key) => {
707
- query[`attributes.${key}`] = attributes[key];
708
- });
1827
+ query["$and"] = Object.entries(attributes).map(([key, value]) => ({
1828
+ [`attributes.${key}`]: value
1829
+ }));
709
1830
  }
710
1831
  if (filters) {
711
1832
  Object.entries(filters).forEach(([key, value]) => {
712
1833
  query[key] = value;
713
1834
  });
714
1835
  }
715
- const collection = await this.getCollection(storage.TABLE_TRACES);
716
- const result = await collection.find(query, {
717
- sort: { startTime: -1 }
718
- }).limit(limit).skip(offset).toArray();
719
- return result.map((row) => ({
720
- id: row.id,
721
- parentSpanId: row.parentSpanId,
722
- traceId: row.traceId,
723
- name: row.name,
724
- scope: row.scope,
725
- kind: row.kind,
726
- status: safelyParseJSON(row.status),
727
- events: safelyParseJSON(row.events),
728
- links: safelyParseJSON(row.links),
729
- attributes: safelyParseJSON(row.attributes),
730
- startTime: row.startTime,
731
- endTime: row.endTime,
732
- other: safelyParseJSON(row.other),
733
- createdAt: row.createdAt
734
- }));
735
- }
736
- async getWorkflowRuns({
737
- workflowName,
738
- fromDate,
739
- toDate,
740
- limit,
741
- offset
742
- } = {}) {
743
- const query = {};
744
- if (workflowName) {
745
- query["workflow_name"] = workflowName;
746
- }
747
1836
  if (fromDate || toDate) {
748
1837
  query["createdAt"] = {};
749
1838
  if (fromDate) {
@@ -753,68 +1842,68 @@ var MongoDBStore = class extends storage.MastraStorage {
753
1842
  query["createdAt"]["$lte"] = toDate;
754
1843
  }
755
1844
  }
756
- const collection = await this.getCollection(storage.TABLE_WORKFLOW_SNAPSHOT);
757
- let total = 0;
758
- if (limit !== void 0 && offset !== void 0) {
759
- total = await collection.countDocuments(query);
760
- }
761
- const request = collection.find(query).sort({ createdAt: "desc" });
762
- if (limit) {
763
- request.limit(limit);
764
- }
765
- if (offset) {
766
- request.skip(offset);
767
- }
768
- const result = await request.toArray();
769
- const runs = result.map((row) => {
770
- let parsedSnapshot = row.snapshot;
771
- if (typeof parsedSnapshot === "string") {
772
- try {
773
- parsedSnapshot = JSON.parse(row.snapshot);
774
- } catch (e) {
775
- console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
776
- }
1845
+ try {
1846
+ const collection = await this.operations.getCollection(storage.TABLE_TRACES);
1847
+ const total = await collection.countDocuments(query);
1848
+ if (total === 0) {
1849
+ return {
1850
+ traces: [],
1851
+ total: 0,
1852
+ page,
1853
+ perPage,
1854
+ hasMore: false
1855
+ };
777
1856
  }
1857
+ const result = await collection.find(query, {
1858
+ sort: { startTime: -1 }
1859
+ }).limit(perPage).skip(currentOffset).toArray();
1860
+ const traces = result.map((row) => ({
1861
+ id: row.id,
1862
+ parentSpanId: row.parentSpanId,
1863
+ traceId: row.traceId,
1864
+ name: row.name,
1865
+ scope: row.scope,
1866
+ kind: row.kind,
1867
+ status: storage.safelyParseJSON(row.status),
1868
+ events: storage.safelyParseJSON(row.events),
1869
+ links: storage.safelyParseJSON(row.links),
1870
+ attributes: storage.safelyParseJSON(row.attributes),
1871
+ startTime: row.startTime,
1872
+ endTime: row.endTime,
1873
+ other: storage.safelyParseJSON(row.other),
1874
+ createdAt: row.createdAt
1875
+ }));
778
1876
  return {
779
- workflowName: row.workflow_name,
780
- runId: row.run_id,
781
- snapshot: parsedSnapshot,
782
- createdAt: new Date(row.createdAt),
783
- updatedAt: new Date(row.updatedAt)
1877
+ traces,
1878
+ total,
1879
+ page,
1880
+ perPage,
1881
+ hasMore: currentOffset + traces.length < total
784
1882
  };
1883
+ } catch (error$1) {
1884
+ throw new error.MastraError(
1885
+ {
1886
+ id: "STORAGE_MONGODB_STORE_GET_TRACES_PAGINATED_FAILED",
1887
+ domain: error.ErrorDomain.STORAGE,
1888
+ category: error.ErrorCategory.THIRD_PARTY
1889
+ },
1890
+ error$1
1891
+ );
1892
+ }
1893
+ }
1894
+ async batchTraceInsert({ records }) {
1895
+ this.logger.debug("Batch inserting traces", { count: records.length });
1896
+ await this.operations.batchInsert({
1897
+ tableName: storage.TABLE_TRACES,
1898
+ records
785
1899
  });
786
- return { runs, total: total || runs.length };
787
1900
  }
788
- async getEvalsByAgentName(agentName, type) {
789
- try {
790
- const query = {
791
- agent_name: agentName
792
- };
793
- if (type === "test") {
794
- query["test_info"] = { $ne: null };
795
- }
796
- if (type === "live") {
797
- query["test_info"] = null;
798
- }
799
- const collection = await this.getCollection(storage.TABLE_EVALS);
800
- const documents = await collection.find(query).sort({ created_at: "desc" }).toArray();
801
- const result = documents.map((row) => this.transformEvalRow(row));
802
- return result.filter((row) => {
803
- if (type === "live") {
804
- return !Boolean(row.testInfo?.testPath);
805
- }
806
- if (type === "test") {
807
- return row.testInfo?.testPath !== null;
808
- }
809
- return true;
810
- });
811
- } catch (error) {
812
- if (error instanceof Error && error.message.includes("no such table")) {
813
- return [];
814
- }
815
- this.logger.error("Failed to get evals for the specified agent: " + error?.message);
816
- throw error;
817
- }
1901
+ };
1902
+ var WorkflowsStorageMongoDB = class extends storage.WorkflowsStorage {
1903
+ operations;
1904
+ constructor({ operations }) {
1905
+ super();
1906
+ this.operations = operations;
818
1907
  }
819
1908
  async persistWorkflowSnapshot({
820
1909
  workflowName,
@@ -822,24 +1911,30 @@ var MongoDBStore = class extends storage.MastraStorage {
822
1911
  snapshot
823
1912
  }) {
824
1913
  try {
825
- const now = (/* @__PURE__ */ new Date()).toISOString();
826
- const collection = await this.getCollection(storage.TABLE_WORKFLOW_SNAPSHOT);
1914
+ const collection = await this.operations.getCollection(storage.TABLE_WORKFLOW_SNAPSHOT);
827
1915
  await collection.updateOne(
828
1916
  { workflow_name: workflowName, run_id: runId },
829
1917
  {
830
1918
  $set: {
831
- snapshot: JSON.stringify(snapshot),
832
- updatedAt: now
833
- },
834
- $setOnInsert: {
835
- createdAt: now
1919
+ workflow_name: workflowName,
1920
+ run_id: runId,
1921
+ snapshot,
1922
+ createdAt: /* @__PURE__ */ new Date(),
1923
+ updatedAt: /* @__PURE__ */ new Date()
836
1924
  }
837
1925
  },
838
1926
  { upsert: true }
839
1927
  );
840
- } catch (error) {
841
- this.logger.error(`Error persisting workflow snapshot: ${error}`);
842
- throw error;
1928
+ } catch (error$1) {
1929
+ throw new error.MastraError(
1930
+ {
1931
+ id: "STORAGE_MONGODB_STORE_PERSIST_WORKFLOW_SNAPSHOT_FAILED",
1932
+ domain: error.ErrorDomain.STORAGE,
1933
+ category: error.ErrorCategory.THIRD_PARTY,
1934
+ details: { workflowName, runId }
1935
+ },
1936
+ error$1
1937
+ );
843
1938
  }
844
1939
  }
845
1940
  async loadWorkflowSnapshot({
@@ -847,7 +1942,7 @@ var MongoDBStore = class extends storage.MastraStorage {
847
1942
  runId
848
1943
  }) {
849
1944
  try {
850
- const result = await this.load({
1945
+ const result = await this.operations.load({
851
1946
  tableName: storage.TABLE_WORKFLOW_SNAPSHOT,
852
1947
  keys: {
853
1948
  workflow_name: workflowName,
@@ -857,40 +1952,98 @@ var MongoDBStore = class extends storage.MastraStorage {
857
1952
  if (!result?.length) {
858
1953
  return null;
859
1954
  }
860
- return JSON.parse(result[0].snapshot);
861
- } catch (error) {
862
- console.error("Error loading workflow snapshot:", error);
863
- throw error;
1955
+ return typeof result[0].snapshot === "string" ? storage.safelyParseJSON(result[0].snapshot) : result[0].snapshot;
1956
+ } catch (error$1) {
1957
+ throw new error.MastraError(
1958
+ {
1959
+ id: "STORAGE_MONGODB_STORE_LOAD_WORKFLOW_SNAPSHOT_FAILED",
1960
+ domain: error.ErrorDomain.STORAGE,
1961
+ category: error.ErrorCategory.THIRD_PARTY,
1962
+ details: { workflowName, runId }
1963
+ },
1964
+ error$1
1965
+ );
864
1966
  }
865
1967
  }
866
- async getWorkflowRunById({
867
- runId,
868
- workflowName
869
- }) {
1968
+ async getWorkflowRuns(args) {
1969
+ const options = args || {};
1970
+ try {
1971
+ const query = {};
1972
+ if (options.workflowName) {
1973
+ query["workflow_name"] = options.workflowName;
1974
+ }
1975
+ if (options.fromDate) {
1976
+ query["createdAt"] = { $gte: options.fromDate };
1977
+ }
1978
+ if (options.toDate) {
1979
+ if (query["createdAt"]) {
1980
+ query["createdAt"].$lte = options.toDate;
1981
+ } else {
1982
+ query["createdAt"] = { $lte: options.toDate };
1983
+ }
1984
+ }
1985
+ if (options.resourceId) {
1986
+ query["resourceId"] = options.resourceId;
1987
+ }
1988
+ const collection = await this.operations.getCollection(storage.TABLE_WORKFLOW_SNAPSHOT);
1989
+ const total = await collection.countDocuments(query);
1990
+ let cursor = collection.find(query).sort({ createdAt: -1 });
1991
+ if (options.offset) {
1992
+ cursor = cursor.skip(options.offset);
1993
+ }
1994
+ if (options.limit) {
1995
+ cursor = cursor.limit(options.limit);
1996
+ }
1997
+ const results = await cursor.toArray();
1998
+ const runs = results.map((row) => this.parseWorkflowRun(row));
1999
+ return {
2000
+ runs,
2001
+ total
2002
+ };
2003
+ } catch (error$1) {
2004
+ throw new error.MastraError(
2005
+ {
2006
+ id: "STORAGE_MONGODB_STORE_GET_WORKFLOW_RUNS_FAILED",
2007
+ domain: error.ErrorDomain.STORAGE,
2008
+ category: error.ErrorCategory.THIRD_PARTY,
2009
+ details: { workflowName: options.workflowName || "unknown" }
2010
+ },
2011
+ error$1
2012
+ );
2013
+ }
2014
+ }
2015
+ async getWorkflowRunById(args) {
870
2016
  try {
871
2017
  const query = {};
872
- if (runId) {
873
- query["run_id"] = runId;
2018
+ if (args.runId) {
2019
+ query["run_id"] = args.runId;
874
2020
  }
875
- if (workflowName) {
876
- query["workflow_name"] = workflowName;
2021
+ if (args.workflowName) {
2022
+ query["workflow_name"] = args.workflowName;
877
2023
  }
878
- const collection = await this.getCollection(storage.TABLE_WORKFLOW_SNAPSHOT);
2024
+ const collection = await this.operations.getCollection(storage.TABLE_WORKFLOW_SNAPSHOT);
879
2025
  const result = await collection.findOne(query);
880
2026
  if (!result) {
881
2027
  return null;
882
2028
  }
883
2029
  return this.parseWorkflowRun(result);
884
- } catch (error) {
885
- console.error("Error getting workflow run by ID:", error);
886
- throw error;
2030
+ } catch (error$1) {
2031
+ throw new error.MastraError(
2032
+ {
2033
+ id: "STORAGE_MONGODB_STORE_GET_WORKFLOW_RUN_BY_ID_FAILED",
2034
+ domain: error.ErrorDomain.STORAGE,
2035
+ category: error.ErrorCategory.THIRD_PARTY,
2036
+ details: { runId: args.runId }
2037
+ },
2038
+ error$1
2039
+ );
887
2040
  }
888
2041
  }
889
2042
  parseWorkflowRun(row) {
890
2043
  let parsedSnapshot = row.snapshot;
891
2044
  if (typeof parsedSnapshot === "string") {
892
2045
  try {
893
- parsedSnapshot = JSON.parse(row.snapshot);
2046
+ parsedSnapshot = typeof row.snapshot === "string" ? storage.safelyParseJSON(row.snapshot) : row.snapshot;
894
2047
  } catch (e) {
895
2048
  console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
896
2049
  }
@@ -899,64 +2052,250 @@ var MongoDBStore = class extends storage.MastraStorage {
899
2052
  workflowName: row.workflow_name,
900
2053
  runId: row.run_id,
901
2054
  snapshot: parsedSnapshot,
902
- createdAt: row.createdAt,
903
- updatedAt: row.updatedAt,
2055
+ createdAt: new Date(row.createdAt),
2056
+ updatedAt: new Date(row.updatedAt),
904
2057
  resourceId: row.resourceId
905
2058
  };
906
2059
  }
907
- parseRow(row) {
908
- let content = row.content;
909
- try {
910
- content = JSON.parse(row.content);
911
- } catch {
2060
+ };
2061
+
2062
+ // src/storage/index.ts
2063
+ var loadConnector = (config) => {
2064
+ try {
2065
+ if ("connectorHandler" in config) {
2066
+ return MongoDBConnector.fromConnectionHandler(config.connectorHandler);
912
2067
  }
2068
+ } catch (error$1) {
2069
+ throw new error.MastraError(
2070
+ {
2071
+ id: "STORAGE_MONGODB_STORE_CONSTRUCTOR_FAILED",
2072
+ domain: error.ErrorDomain.STORAGE,
2073
+ category: error.ErrorCategory.USER,
2074
+ details: { connectionHandler: true }
2075
+ },
2076
+ error$1
2077
+ );
2078
+ }
2079
+ try {
2080
+ return MongoDBConnector.fromDatabaseConfig({
2081
+ options: config.options,
2082
+ url: config.url,
2083
+ dbName: config.dbName
2084
+ });
2085
+ } catch (error$1) {
2086
+ throw new error.MastraError(
2087
+ {
2088
+ id: "STORAGE_MONGODB_STORE_CONSTRUCTOR_FAILED",
2089
+ domain: error.ErrorDomain.STORAGE,
2090
+ category: error.ErrorCategory.USER,
2091
+ details: { url: config?.url, dbName: config?.dbName }
2092
+ },
2093
+ error$1
2094
+ );
2095
+ }
2096
+ };
2097
+ var MongoDBStore = class extends storage.MastraStorage {
2098
+ #connector;
2099
+ stores;
2100
+ get supports() {
913
2101
  return {
914
- id: row.id,
915
- content,
916
- role: row.role,
917
- type: row.type,
918
- createdAt: new Date(row.createdAt),
919
- threadId: row.thread_id,
920
- resourceId: row.resourceId
2102
+ selectByIncludeResourceScope: true,
2103
+ resourceWorkingMemory: true,
2104
+ hasColumn: false,
2105
+ createTable: false,
2106
+ deleteMessages: false
921
2107
  };
922
2108
  }
923
- transformEvalRow(row) {
924
- let testInfoValue = null;
925
- if (row.test_info) {
926
- try {
927
- testInfoValue = typeof row.test_info === "string" ? JSON.parse(row.test_info) : row.test_info;
928
- } catch (e) {
929
- console.warn("Failed to parse test_info:", e);
930
- }
931
- }
932
- return {
933
- input: row.input,
934
- output: row.output,
935
- result: row.result,
936
- agentName: row.agent_name,
937
- metricName: row.metric_name,
938
- instructions: row.instructions,
939
- testInfo: testInfoValue,
940
- globalRunId: row.global_run_id,
941
- runId: row.run_id,
942
- createdAt: row.created_at
2109
+ constructor(config) {
2110
+ super({ name: "MongoDBStore" });
2111
+ this.stores = {};
2112
+ this.#connector = loadConnector(config);
2113
+ const operations = new StoreOperationsMongoDB({
2114
+ connector: this.#connector
2115
+ });
2116
+ const memory = new MemoryStorageMongoDB({
2117
+ operations
2118
+ });
2119
+ const traces = new TracesStorageMongoDB({
2120
+ operations
2121
+ });
2122
+ const legacyEvals = new LegacyEvalsMongoDB({
2123
+ operations
2124
+ });
2125
+ const scores = new ScoresStorageMongoDB({
2126
+ operations
2127
+ });
2128
+ const workflows = new WorkflowsStorageMongoDB({
2129
+ operations
2130
+ });
2131
+ this.stores = {
2132
+ operations,
2133
+ memory,
2134
+ traces,
2135
+ legacyEvals,
2136
+ scores,
2137
+ workflows
943
2138
  };
944
2139
  }
945
- async getTracesPaginated(_args) {
946
- throw new Error("Method not implemented.");
2140
+ async createTable({
2141
+ tableName,
2142
+ schema
2143
+ }) {
2144
+ return this.stores.operations.createTable({ tableName, schema });
2145
+ }
2146
+ async alterTable(_args) {
2147
+ return this.stores.operations.alterTable(_args);
2148
+ }
2149
+ async dropTable({ tableName }) {
2150
+ return this.stores.operations.dropTable({ tableName });
2151
+ }
2152
+ async clearTable({ tableName }) {
2153
+ return this.stores.operations.clearTable({ tableName });
2154
+ }
2155
+ async insert({ tableName, record }) {
2156
+ return this.stores.operations.insert({ tableName, record });
2157
+ }
2158
+ async batchInsert({ tableName, records }) {
2159
+ return this.stores.operations.batchInsert({ tableName, records });
2160
+ }
2161
+ async load({ tableName, keys }) {
2162
+ return this.stores.operations.load({ tableName, keys });
2163
+ }
2164
+ async getThreadById({ threadId }) {
2165
+ return this.stores.memory.getThreadById({ threadId });
2166
+ }
2167
+ async getThreadsByResourceId({ resourceId }) {
2168
+ return this.stores.memory.getThreadsByResourceId({ resourceId });
2169
+ }
2170
+ async saveThread({ thread }) {
2171
+ return this.stores.memory.saveThread({ thread });
2172
+ }
2173
+ async updateThread({
2174
+ id,
2175
+ title,
2176
+ metadata
2177
+ }) {
2178
+ return this.stores.memory.updateThread({ id, title, metadata });
2179
+ }
2180
+ async deleteThread({ threadId }) {
2181
+ return this.stores.memory.deleteThread({ threadId });
2182
+ }
2183
+ async getMessages({
2184
+ threadId,
2185
+ selectBy,
2186
+ format
2187
+ }) {
2188
+ return this.stores.memory.getMessages({ threadId, selectBy, format });
2189
+ }
2190
+ async saveMessages(args) {
2191
+ return this.stores.memory.saveMessages(args);
947
2192
  }
948
2193
  async getThreadsByResourceIdPaginated(_args) {
949
- throw new Error("Method not implemented.");
2194
+ return this.stores.memory.getThreadsByResourceIdPaginated(_args);
950
2195
  }
951
2196
  async getMessagesPaginated(_args) {
952
- throw new Error("Method not implemented.");
2197
+ return this.stores.memory.getMessagesPaginated(_args);
2198
+ }
2199
+ async updateMessages(_args) {
2200
+ return this.stores.memory.updateMessages(_args);
2201
+ }
2202
+ async getTraces(args) {
2203
+ return this.stores.traces.getTraces(args);
2204
+ }
2205
+ async getTracesPaginated(args) {
2206
+ return this.stores.traces.getTracesPaginated(args);
2207
+ }
2208
+ async getWorkflowRuns(args) {
2209
+ return this.stores.workflows.getWorkflowRuns(args);
2210
+ }
2211
+ async getEvals(options = {}) {
2212
+ return this.stores.legacyEvals.getEvals(options);
2213
+ }
2214
+ async getEvalsByAgentName(agentName, type) {
2215
+ return this.stores.legacyEvals.getEvalsByAgentName(agentName, type);
2216
+ }
2217
+ async persistWorkflowSnapshot({
2218
+ workflowName,
2219
+ runId,
2220
+ snapshot
2221
+ }) {
2222
+ return this.stores.workflows.persistWorkflowSnapshot({ workflowName, runId, snapshot });
2223
+ }
2224
+ async loadWorkflowSnapshot({
2225
+ workflowName,
2226
+ runId
2227
+ }) {
2228
+ return this.stores.workflows.loadWorkflowSnapshot({ workflowName, runId });
2229
+ }
2230
+ async getWorkflowRunById({
2231
+ runId,
2232
+ workflowName
2233
+ }) {
2234
+ return this.stores.workflows.getWorkflowRunById({ runId, workflowName });
953
2235
  }
954
2236
  async close() {
955
- await this.#client.close();
2237
+ try {
2238
+ await this.#connector.close();
2239
+ } catch (error$1) {
2240
+ throw new error.MastraError(
2241
+ {
2242
+ id: "STORAGE_MONGODB_STORE_CLOSE_FAILED",
2243
+ domain: error.ErrorDomain.STORAGE,
2244
+ category: error.ErrorCategory.USER
2245
+ },
2246
+ error$1
2247
+ );
2248
+ }
956
2249
  }
957
- async updateMessages(_args) {
958
- this.logger.error("updateMessages is not yet implemented in MongoDBStore");
959
- throw new Error("Method not implemented");
2250
+ /**
2251
+ * SCORERS
2252
+ */
2253
+ async getScoreById({ id }) {
2254
+ return this.stores.scores.getScoreById({ id });
2255
+ }
2256
+ async saveScore(score) {
2257
+ return this.stores.scores.saveScore(score);
2258
+ }
2259
+ async getScoresByRunId({
2260
+ runId,
2261
+ pagination
2262
+ }) {
2263
+ return this.stores.scores.getScoresByRunId({ runId, pagination });
2264
+ }
2265
+ async getScoresByEntityId({
2266
+ entityId,
2267
+ entityType,
2268
+ pagination
2269
+ }) {
2270
+ return this.stores.scores.getScoresByEntityId({ entityId, entityType, pagination });
2271
+ }
2272
+ async getScoresByScorerId({
2273
+ scorerId,
2274
+ pagination,
2275
+ entityId,
2276
+ entityType
2277
+ }) {
2278
+ return this.stores.scores.getScoresByScorerId({ scorerId, pagination, entityId, entityType });
2279
+ }
2280
+ /**
2281
+ * RESOURCES
2282
+ */
2283
+ async getResourceById({ resourceId }) {
2284
+ return this.stores.memory.getResourceById({ resourceId });
2285
+ }
2286
+ async saveResource({ resource }) {
2287
+ return this.stores.memory.saveResource({ resource });
2288
+ }
2289
+ async updateResource({
2290
+ resourceId,
2291
+ workingMemory,
2292
+ metadata
2293
+ }) {
2294
+ return this.stores.memory.updateResource({
2295
+ resourceId,
2296
+ workingMemory,
2297
+ metadata
2298
+ });
960
2299
  }
961
2300
  };
962
2301
 
@@ -1058,3 +2397,5 @@ Example Complex Query:
1058
2397
  exports.MONGODB_PROMPT = MONGODB_PROMPT;
1059
2398
  exports.MongoDBStore = MongoDBStore;
1060
2399
  exports.MongoDBVector = MongoDBVector;
2400
+ //# sourceMappingURL=index.cjs.map
2401
+ //# sourceMappingURL=index.cjs.map