@mastra/mongodb 0.0.0-vnext-inngest-20250508131921 → 0.0.0-vnextAgentNetwork-20250527091247

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -1,7 +1,7 @@
1
1
  export { MONGODB_PROMPT } from './_tsup-dts-rollup.js';
2
- export { MongoDBUpsertArgs } from './_tsup-dts-rollup.js';
3
- export { MongoDBQueryArgs } from './_tsup-dts-rollup.js';
4
- export { MongoDBUpsertParams } from './_tsup-dts-rollup.js';
5
2
  export { MongoDBUpsertVectorParams } from './_tsup-dts-rollup.js';
6
3
  export { MongoDBQueryVectorParams } from './_tsup-dts-rollup.js';
4
+ export { MongoDBIndexReadyParams } from './_tsup-dts-rollup.js';
7
5
  export { MongoDBVector } from './_tsup-dts-rollup.js';
6
+ export { MongoDBConfig } from './_tsup-dts-rollup.js';
7
+ export { MongoDBStore } from './_tsup-dts-rollup.js';
package/dist/index.js CHANGED
@@ -2,6 +2,7 @@ import { MastraVector } from '@mastra/core/vector';
2
2
  import { MongoClient } from 'mongodb';
3
3
  import { v4 } from 'uuid';
4
4
  import { BaseFilterTranslator } from '@mastra/core/vector/filter';
5
+ import { MastraStorage, TABLE_THREADS, TABLE_MESSAGES, TABLE_TRACES, TABLE_WORKFLOW_SNAPSHOT, TABLE_EVALS } from '@mastra/core/storage';
5
6
 
6
7
  // src/vector/index.ts
7
8
  var MongoDBFilterTranslator = class extends BaseFilterTranslator {
@@ -119,8 +120,7 @@ var MongoDBVector = class extends MastraVector {
119
120
  async disconnect() {
120
121
  await this.client.close();
121
122
  }
122
- async createIndex(params) {
123
- const { indexName, dimension, metric = "cosine" } = params;
123
+ async createIndex({ indexName, dimension, metric = "cosine" }) {
124
124
  if (!Number.isInteger(dimension) || dimension <= 0) {
125
125
  throw new Error("Dimension must be a positive integer");
126
126
  }
@@ -158,7 +158,19 @@ var MongoDBVector = class extends MastraVector {
158
158
  }
159
159
  await collection.updateOne({ _id: "__index_metadata__" }, { $set: { dimension, metric } }, { upsert: true });
160
160
  }
161
- async waitForIndexReady(indexName, timeoutMs = 6e4, checkIntervalMs = 2e3) {
161
+ /**
162
+ * Waits for the index to be ready.
163
+ *
164
+ * @param {string} indexName - The name of the index to wait for
165
+ * @param {number} timeoutMs - The maximum time in milliseconds to wait for the index to be ready (default: 60000)
166
+ * @param {number} checkIntervalMs - The interval in milliseconds at which to check if the index is ready (default: 2000)
167
+ * @returns A promise that resolves when the index is ready
168
+ */
169
+ async waitForIndexReady({
170
+ indexName,
171
+ timeoutMs = 6e4,
172
+ checkIntervalMs = 2e3
173
+ }) {
162
174
  const collection = await this.getCollection(indexName, true);
163
175
  const indexNameInternal = `${indexName}_vector_index`;
164
176
  const startTime = Date.now();
@@ -173,11 +185,10 @@ var MongoDBVector = class extends MastraVector {
173
185
  }
174
186
  throw new Error(`Index "${indexNameInternal}" did not become ready within timeout`);
175
187
  }
176
- async upsert(params) {
177
- const { indexName, vectors, metadata, ids, documents } = params;
188
+ async upsert({ indexName, vectors, metadata, ids, documents }) {
178
189
  const collection = await this.getCollection(indexName);
179
190
  this.collectionForValidation = collection;
180
- const stats = await this.describeIndex(indexName);
191
+ const stats = await this.describeIndex({ indexName });
181
192
  await this.validateVectorDimensions(vectors, stats.dimension);
182
193
  const generatedIds = ids || vectors.map(() => v4());
183
194
  const operations = vectors.map((vector, idx) => {
@@ -210,8 +221,14 @@ var MongoDBVector = class extends MastraVector {
210
221
  await collection.bulkWrite(operations);
211
222
  return generatedIds;
212
223
  }
213
- async query(params) {
214
- const { indexName, queryVector, topK = 10, filter, includeVector = false, documentFilter } = params;
224
+ async query({
225
+ indexName,
226
+ queryVector,
227
+ topK = 10,
228
+ filter,
229
+ includeVector = false,
230
+ documentFilter
231
+ }) {
215
232
  const collection = await this.getCollection(indexName, true);
216
233
  const indexNameInternal = `${indexName}_vector_index`;
217
234
  const mongoFilter = this.transformFilter(filter);
@@ -267,7 +284,13 @@ var MongoDBVector = class extends MastraVector {
267
284
  const collections = await this.db.listCollections().toArray();
268
285
  return collections.map((col) => col.name);
269
286
  }
270
- async describeIndex(indexName) {
287
+ /**
288
+ * Retrieves statistics about a vector index.
289
+ *
290
+ * @param {string} indexName - The name of the index to describe
291
+ * @returns A promise that resolves to the index statistics including dimension, count and metric
292
+ */
293
+ async describeIndex({ indexName }) {
271
294
  const collection = await this.getCollection(indexName, true);
272
295
  const count = await collection.countDocuments({ _id: { $ne: "__index_metadata__" } });
273
296
  const metadataDoc = await collection.findOne({ _id: "__index_metadata__" });
@@ -279,7 +302,7 @@ var MongoDBVector = class extends MastraVector {
279
302
  metric
280
303
  };
281
304
  }
282
- async deleteIndex(indexName) {
305
+ async deleteIndex({ indexName }) {
283
306
  const collection = await this.getCollection(indexName, false);
284
307
  if (collection) {
285
308
  await collection.drop();
@@ -288,33 +311,56 @@ var MongoDBVector = class extends MastraVector {
288
311
  throw new Error(`Index (Collection) "${indexName}" does not exist`);
289
312
  }
290
313
  }
291
- async updateIndexById(indexName, id, update) {
292
- if (!update.vector && !update.metadata) {
293
- throw new Error("No updates provided");
294
- }
295
- const collection = await this.getCollection(indexName, true);
296
- const updateDoc = {};
297
- if (update.vector) {
298
- updateDoc[this.embeddingFieldName] = update.vector;
299
- }
300
- if (update.metadata) {
301
- const normalizedMeta = Object.keys(update.metadata).reduce(
302
- (acc, key) => {
303
- acc[key] = update.metadata[key] instanceof Date ? update.metadata[key].toISOString() : update.metadata[key];
304
- return acc;
305
- },
306
- {}
307
- );
308
- updateDoc[this.metadataFieldName] = normalizedMeta;
314
+ /**
315
+ * Updates a vector by its ID with the provided vector and/or metadata.
316
+ * @param indexName - The name of the index containing the vector.
317
+ * @param id - The ID of the vector to update.
318
+ * @param update - An object containing the vector and/or metadata to update.
319
+ * @param update.vector - An optional array of numbers representing the new vector.
320
+ * @param update.metadata - An optional record containing the new metadata.
321
+ * @returns A promise that resolves when the update is complete.
322
+ * @throws Will throw an error if no updates are provided or if the update operation fails.
323
+ */
324
+ async updateVector({ indexName, id, update }) {
325
+ try {
326
+ if (!update.vector && !update.metadata) {
327
+ throw new Error("No updates provided");
328
+ }
329
+ const collection = await this.getCollection(indexName, true);
330
+ const updateDoc = {};
331
+ if (update.vector) {
332
+ const stats = await this.describeIndex({ indexName });
333
+ await this.validateVectorDimensions([update.vector], stats.dimension);
334
+ updateDoc[this.embeddingFieldName] = update.vector;
335
+ }
336
+ if (update.metadata) {
337
+ const normalizedMeta = Object.keys(update.metadata).reduce(
338
+ (acc, key) => {
339
+ acc[key] = update.metadata[key] instanceof Date ? update.metadata[key].toISOString() : update.metadata[key];
340
+ return acc;
341
+ },
342
+ {}
343
+ );
344
+ updateDoc[this.metadataFieldName] = normalizedMeta;
345
+ }
346
+ await collection.findOneAndUpdate({ _id: id }, { $set: updateDoc });
347
+ } catch (error) {
348
+ throw new Error(`Failed to update vector by id: ${id} for index name: ${indexName}: ${error.message}`);
309
349
  }
310
- await collection.findOneAndUpdate({ _id: id }, { $set: updateDoc });
311
350
  }
312
- async deleteIndexById(indexName, id) {
351
+ /**
352
+ * Deletes a vector by its ID.
353
+ * @param indexName - The name of the index containing the vector.
354
+ * @param id - The ID of the vector to delete.
355
+ * @returns A promise that resolves when the deletion is complete.
356
+ * @throws Will throw an error if the deletion operation fails.
357
+ */
358
+ async deleteVector({ indexName, id }) {
313
359
  try {
314
360
  const collection = await this.getCollection(indexName, true);
315
361
  await collection.deleteOne({ _id: id });
316
362
  } catch (error) {
317
- throw new Error(`Failed to delete index by id: ${id} for index name: ${indexName}: ${error.message}`);
363
+ throw new Error(`Failed to delete vector by id: ${id} for index name: ${indexName}: ${error.message}`);
318
364
  }
319
365
  }
320
366
  // Private methods
@@ -355,6 +401,524 @@ var MongoDBVector = class extends MastraVector {
355
401
  return translator.translate(filter);
356
402
  }
357
403
  };
404
+ function safelyParseJSON(jsonString) {
405
+ try {
406
+ return JSON.parse(jsonString);
407
+ } catch {
408
+ return {};
409
+ }
410
+ }
411
+ var MongoDBStore = class extends MastraStorage {
412
+ #isConnected = false;
413
+ #client;
414
+ #db;
415
+ #dbName;
416
+ constructor(config) {
417
+ super({ name: "MongoDBStore" });
418
+ this.#isConnected = false;
419
+ if (!config.url?.trim().length) {
420
+ throw new Error(
421
+ "MongoDBStore: url must be provided and cannot be empty. Passing an empty string may cause fallback to local MongoDB defaults."
422
+ );
423
+ }
424
+ if (!config.dbName?.trim().length) {
425
+ throw new Error(
426
+ "MongoDBStore: dbName must be provided and cannot be empty. Passing an empty string may cause fallback to local MongoDB defaults."
427
+ );
428
+ }
429
+ this.#dbName = config.dbName;
430
+ this.#client = new MongoClient(config.url, config.options);
431
+ }
432
+ async getConnection() {
433
+ if (this.#isConnected) {
434
+ return this.#db;
435
+ }
436
+ await this.#client.connect();
437
+ this.#db = this.#client.db(this.#dbName);
438
+ this.#isConnected = true;
439
+ return this.#db;
440
+ }
441
+ async getCollection(collectionName) {
442
+ const db = await this.getConnection();
443
+ return db.collection(collectionName);
444
+ }
445
+ async createTable() {
446
+ }
447
+ async clearTable({ tableName }) {
448
+ try {
449
+ const collection = await this.getCollection(tableName);
450
+ await collection.deleteMany({});
451
+ } catch (error) {
452
+ if (error instanceof Error) {
453
+ this.logger.error(error.message);
454
+ }
455
+ }
456
+ }
457
+ async insert({ tableName, record }) {
458
+ try {
459
+ const collection = await this.getCollection(tableName);
460
+ await collection.insertOne(record);
461
+ } catch (error) {
462
+ this.logger.error(`Error upserting into table ${tableName}: ${error}`);
463
+ throw error;
464
+ }
465
+ }
466
+ async batchInsert({ tableName, records }) {
467
+ if (!records.length) {
468
+ return;
469
+ }
470
+ try {
471
+ const collection = await this.getCollection(tableName);
472
+ await collection.insertMany(records);
473
+ } catch (error) {
474
+ this.logger.error(`Error upserting into table ${tableName}: ${error}`);
475
+ throw error;
476
+ }
477
+ }
478
+ async load({ tableName, keys }) {
479
+ this.logger.info(`Loading ${tableName} with keys ${JSON.stringify(keys)}`);
480
+ try {
481
+ const collection = await this.getCollection(tableName);
482
+ return await collection.find(keys).toArray();
483
+ } catch (error) {
484
+ this.logger.error(`Error loading ${tableName} with keys ${JSON.stringify(keys)}: ${error}`);
485
+ throw error;
486
+ }
487
+ }
488
+ async getThreadById({ threadId }) {
489
+ try {
490
+ const collection = await this.getCollection(TABLE_THREADS);
491
+ const result = await collection.findOne({ id: threadId });
492
+ if (!result) {
493
+ return null;
494
+ }
495
+ return {
496
+ ...result,
497
+ metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata
498
+ };
499
+ } catch (error) {
500
+ this.logger.error(`Error loading thread with ID ${threadId}: ${error}`);
501
+ throw error;
502
+ }
503
+ }
504
+ async getThreadsByResourceId({ resourceId }) {
505
+ try {
506
+ const collection = await this.getCollection(TABLE_THREADS);
507
+ const results = await collection.find({ resourceId }).toArray();
508
+ if (!results.length) {
509
+ return [];
510
+ }
511
+ return results.map((result) => ({
512
+ ...result,
513
+ metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata
514
+ }));
515
+ } catch (error) {
516
+ this.logger.error(`Error loading threads by resourceId ${resourceId}: ${error}`);
517
+ throw error;
518
+ }
519
+ }
520
+ async saveThread({ thread }) {
521
+ try {
522
+ const collection = await this.getCollection(TABLE_THREADS);
523
+ await collection.updateOne(
524
+ { id: thread.id },
525
+ {
526
+ $set: {
527
+ ...thread,
528
+ metadata: JSON.stringify(thread.metadata)
529
+ }
530
+ },
531
+ { upsert: true }
532
+ );
533
+ return thread;
534
+ } catch (error) {
535
+ this.logger.error(`Error saving thread ${thread.id}: ${error}`);
536
+ throw error;
537
+ }
538
+ }
539
+ async updateThread({
540
+ id,
541
+ title,
542
+ metadata
543
+ }) {
544
+ const thread = await this.getThreadById({ threadId: id });
545
+ if (!thread) {
546
+ throw new Error(`Thread ${id} not found`);
547
+ }
548
+ const updatedThread = {
549
+ ...thread,
550
+ title,
551
+ metadata: {
552
+ ...thread.metadata,
553
+ ...metadata
554
+ }
555
+ };
556
+ try {
557
+ const collection = await this.getCollection(TABLE_THREADS);
558
+ await collection.updateOne(
559
+ { id },
560
+ {
561
+ $set: {
562
+ title,
563
+ metadata: JSON.stringify(updatedThread.metadata)
564
+ }
565
+ }
566
+ );
567
+ } catch (error) {
568
+ this.logger.error(`Error updating thread ${id}:) ${error}`);
569
+ throw error;
570
+ }
571
+ return updatedThread;
572
+ }
573
+ async deleteThread({ threadId }) {
574
+ try {
575
+ const collectionMessages = await this.getCollection(TABLE_MESSAGES);
576
+ await collectionMessages.deleteMany({ thread_id: threadId });
577
+ const collectionThreads = await this.getCollection(TABLE_THREADS);
578
+ await collectionThreads.deleteOne({ id: threadId });
579
+ } catch (error) {
580
+ this.logger.error(`Error deleting thread ${threadId}: ${error}`);
581
+ throw error;
582
+ }
583
+ }
584
+ async getMessages({ threadId, selectBy }) {
585
+ try {
586
+ const limit = typeof selectBy?.last === "number" ? selectBy.last : 40;
587
+ const include = selectBy?.include || [];
588
+ let messages = [];
589
+ let allMessages = [];
590
+ const collection = await this.getCollection(TABLE_MESSAGES);
591
+ allMessages = (await collection.find({ thread_id: threadId }).sort({ createdAt: -1 }).toArray()).map(
592
+ (row) => this.parseRow(row)
593
+ );
594
+ if (include.length) {
595
+ const idToIndex = /* @__PURE__ */ new Map();
596
+ allMessages.forEach((msg, idx) => {
597
+ idToIndex.set(msg.id, idx);
598
+ });
599
+ const selectedIndexes = /* @__PURE__ */ new Set();
600
+ for (const inc of include) {
601
+ const idx = idToIndex.get(inc.id);
602
+ if (idx === void 0) continue;
603
+ for (let i = 1; i <= (inc.withPreviousMessages || 0); i++) {
604
+ if (idx + i < allMessages.length) selectedIndexes.add(idx + i);
605
+ }
606
+ selectedIndexes.add(idx);
607
+ for (let i = 1; i <= (inc.withNextMessages || 0); i++) {
608
+ if (idx - i >= 0) selectedIndexes.add(idx - i);
609
+ }
610
+ }
611
+ messages.push(
612
+ ...Array.from(selectedIndexes).map((i) => allMessages[i]).filter((m) => !!m)
613
+ );
614
+ }
615
+ const excludeIds = new Set(messages.map((m) => m.id));
616
+ for (const msg of allMessages) {
617
+ if (messages.length >= limit) break;
618
+ if (!excludeIds.has(msg.id)) {
619
+ messages.push(msg);
620
+ }
621
+ }
622
+ messages.sort((a, b) => a.createdAt.getTime() - b.createdAt.getTime());
623
+ return messages.slice(0, limit);
624
+ } catch (error) {
625
+ this.logger.error("Error getting messages:", error);
626
+ throw error;
627
+ }
628
+ }
629
+ async saveMessages({ messages }) {
630
+ if (!messages.length) {
631
+ return messages;
632
+ }
633
+ const threadId = messages[0]?.threadId;
634
+ if (!threadId) {
635
+ this.logger.error("Thread ID is required to save messages");
636
+ throw new Error("Thread ID is required");
637
+ }
638
+ try {
639
+ const messagesToInsert = messages.map((message) => {
640
+ const time = message.createdAt || /* @__PURE__ */ new Date();
641
+ return {
642
+ id: message.id,
643
+ thread_id: threadId,
644
+ content: typeof message.content === "string" ? message.content : JSON.stringify(message.content),
645
+ role: message.role,
646
+ type: message.type,
647
+ resourceId: message.resourceId,
648
+ createdAt: time instanceof Date ? time.toISOString() : time
649
+ };
650
+ });
651
+ const collection = await this.getCollection(TABLE_MESSAGES);
652
+ await collection.insertMany(messagesToInsert);
653
+ return messages;
654
+ } catch (error) {
655
+ this.logger.error("Failed to save messages in database: " + error?.message);
656
+ throw error;
657
+ }
658
+ }
659
+ async getTraces({
660
+ name,
661
+ scope,
662
+ page,
663
+ perPage,
664
+ attributes,
665
+ filters
666
+ } = {
667
+ page: 0,
668
+ perPage: 100
669
+ }) {
670
+ const limit = perPage;
671
+ const offset = page * perPage;
672
+ const query = {};
673
+ if (name) {
674
+ query["name"] = `%${name}%`;
675
+ }
676
+ if (scope) {
677
+ query["scope"] = scope;
678
+ }
679
+ if (attributes) {
680
+ Object.keys(attributes).forEach((key) => {
681
+ query[`attributes.${key}`] = attributes[key];
682
+ });
683
+ }
684
+ if (filters) {
685
+ Object.entries(filters).forEach(([key, value]) => {
686
+ query[key] = value;
687
+ });
688
+ }
689
+ const collection = await this.getCollection(TABLE_TRACES);
690
+ const result = await collection.find(query, {
691
+ sort: { startTime: -1 }
692
+ }).limit(limit).skip(offset).toArray();
693
+ return result.map((row) => ({
694
+ id: row.id,
695
+ parentSpanId: row.parentSpanId,
696
+ traceId: row.traceId,
697
+ name: row.name,
698
+ scope: row.scope,
699
+ kind: row.kind,
700
+ status: safelyParseJSON(row.status),
701
+ events: safelyParseJSON(row.events),
702
+ links: safelyParseJSON(row.links),
703
+ attributes: safelyParseJSON(row.attributes),
704
+ startTime: row.startTime,
705
+ endTime: row.endTime,
706
+ other: safelyParseJSON(row.other),
707
+ createdAt: row.createdAt
708
+ }));
709
+ }
710
+ async getWorkflowRuns({
711
+ workflowName,
712
+ fromDate,
713
+ toDate,
714
+ limit,
715
+ offset
716
+ } = {}) {
717
+ const query = {};
718
+ if (workflowName) {
719
+ query["workflow_name"] = workflowName;
720
+ }
721
+ if (fromDate || toDate) {
722
+ query["createdAt"] = {};
723
+ if (fromDate) {
724
+ query["createdAt"]["$gte"] = fromDate;
725
+ }
726
+ if (toDate) {
727
+ query["createdAt"]["$lte"] = toDate;
728
+ }
729
+ }
730
+ const collection = await this.getCollection(TABLE_WORKFLOW_SNAPSHOT);
731
+ let total = 0;
732
+ if (limit !== void 0 && offset !== void 0) {
733
+ total = await collection.countDocuments(query);
734
+ }
735
+ const request = collection.find(query).sort({ createdAt: "desc" });
736
+ if (limit) {
737
+ request.limit(limit);
738
+ }
739
+ if (offset) {
740
+ request.skip(offset);
741
+ }
742
+ const result = await request.toArray();
743
+ const runs = result.map((row) => {
744
+ let parsedSnapshot = row.snapshot;
745
+ if (typeof parsedSnapshot === "string") {
746
+ try {
747
+ parsedSnapshot = JSON.parse(row.snapshot);
748
+ } catch (e) {
749
+ console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
750
+ }
751
+ }
752
+ return {
753
+ workflowName: row.workflow_name,
754
+ runId: row.run_id,
755
+ snapshot: parsedSnapshot,
756
+ createdAt: new Date(row.createdAt),
757
+ updatedAt: new Date(row.updatedAt)
758
+ };
759
+ });
760
+ return { runs, total: total || runs.length };
761
+ }
762
+ async getEvalsByAgentName(agentName, type) {
763
+ try {
764
+ const query = {
765
+ agent_name: agentName
766
+ };
767
+ if (type === "test") {
768
+ query["test_info"] = { $ne: null };
769
+ }
770
+ if (type === "live") {
771
+ query["test_info"] = null;
772
+ }
773
+ const collection = await this.getCollection(TABLE_EVALS);
774
+ const documents = await collection.find(query).sort({ created_at: "desc" }).toArray();
775
+ const result = documents.map((row) => this.transformEvalRow(row));
776
+ return result.filter((row) => {
777
+ if (type === "live") {
778
+ return !Boolean(row.testInfo?.testPath);
779
+ }
780
+ if (type === "test") {
781
+ return row.testInfo?.testPath !== null;
782
+ }
783
+ return true;
784
+ });
785
+ } catch (error) {
786
+ if (error instanceof Error && error.message.includes("no such table")) {
787
+ return [];
788
+ }
789
+ this.logger.error("Failed to get evals for the specified agent: " + error?.message);
790
+ throw error;
791
+ }
792
+ }
793
+ async persistWorkflowSnapshot({
794
+ workflowName,
795
+ runId,
796
+ snapshot
797
+ }) {
798
+ try {
799
+ const now = (/* @__PURE__ */ new Date()).toISOString();
800
+ const collection = await this.getCollection(TABLE_WORKFLOW_SNAPSHOT);
801
+ await collection.updateOne(
802
+ { workflow_name: workflowName, run_id: runId },
803
+ {
804
+ $set: {
805
+ snapshot: JSON.stringify(snapshot),
806
+ updatedAt: now
807
+ },
808
+ $setOnInsert: {
809
+ createdAt: now
810
+ }
811
+ },
812
+ { upsert: true }
813
+ );
814
+ } catch (error) {
815
+ this.logger.error(`Error persisting workflow snapshot: ${error}`);
816
+ throw error;
817
+ }
818
+ }
819
+ async loadWorkflowSnapshot({
820
+ workflowName,
821
+ runId
822
+ }) {
823
+ try {
824
+ const result = await this.load({
825
+ tableName: TABLE_WORKFLOW_SNAPSHOT,
826
+ keys: {
827
+ workflow_name: workflowName,
828
+ run_id: runId
829
+ }
830
+ });
831
+ if (!result?.length) {
832
+ return null;
833
+ }
834
+ return JSON.parse(result[0].snapshot);
835
+ } catch (error) {
836
+ console.error("Error loading workflow snapshot:", error);
837
+ throw error;
838
+ }
839
+ }
840
+ async getWorkflowRunById({
841
+ runId,
842
+ workflowName
843
+ }) {
844
+ try {
845
+ const query = {};
846
+ if (runId) {
847
+ query["run_id"] = runId;
848
+ }
849
+ if (workflowName) {
850
+ query["workflow_name"] = workflowName;
851
+ }
852
+ const collection = await this.getCollection(TABLE_WORKFLOW_SNAPSHOT);
853
+ const result = await collection.findOne(query);
854
+ if (!result) {
855
+ return null;
856
+ }
857
+ return this.parseWorkflowRun(result);
858
+ } catch (error) {
859
+ console.error("Error getting workflow run by ID:", error);
860
+ throw error;
861
+ }
862
+ }
863
+ parseWorkflowRun(row) {
864
+ let parsedSnapshot = row.snapshot;
865
+ if (typeof parsedSnapshot === "string") {
866
+ try {
867
+ parsedSnapshot = JSON.parse(row.snapshot);
868
+ } catch (e) {
869
+ console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
870
+ }
871
+ }
872
+ return {
873
+ workflowName: row.workflow_name,
874
+ runId: row.run_id,
875
+ snapshot: parsedSnapshot,
876
+ createdAt: row.createdAt,
877
+ updatedAt: row.updatedAt,
878
+ resourceId: row.resourceId
879
+ };
880
+ }
881
+ parseRow(row) {
882
+ let content = row.content;
883
+ try {
884
+ content = JSON.parse(row.content);
885
+ } catch {
886
+ }
887
+ return {
888
+ id: row.id,
889
+ content,
890
+ role: row.role,
891
+ type: row.type,
892
+ createdAt: new Date(row.createdAt),
893
+ threadId: row.thread_id
894
+ };
895
+ }
896
+ transformEvalRow(row) {
897
+ let testInfoValue = null;
898
+ if (row.test_info) {
899
+ try {
900
+ testInfoValue = typeof row.test_info === "string" ? JSON.parse(row.test_info) : row.test_info;
901
+ } catch (e) {
902
+ console.warn("Failed to parse test_info:", e);
903
+ }
904
+ }
905
+ return {
906
+ input: row.input,
907
+ output: row.output,
908
+ result: row.result,
909
+ agentName: row.agent_name,
910
+ metricName: row.metric_name,
911
+ instructions: row.instructions,
912
+ testInfo: testInfoValue,
913
+ globalRunId: row.global_run_id,
914
+ runId: row.run_id,
915
+ createdAt: row.created_at
916
+ };
917
+ }
918
+ async close() {
919
+ await this.#client.close();
920
+ }
921
+ };
358
922
 
359
923
  // src/vector/prompt.ts
360
924
  var MONGODB_PROMPT = `When querying MongoDB Vector, you can ONLY use the operators listed below. Any other operators will be rejected.
@@ -451,4 +1015,4 @@ Example Complex Query:
451
1015
  ]
452
1016
  }`;
453
1017
 
454
- export { MONGODB_PROMPT, MongoDBVector };
1018
+ export { MONGODB_PROMPT, MongoDBStore, MongoDBVector };