@mastra/mongodb 0.10.0 → 0.10.1-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +7 -7
- package/CHANGELOG.md +9 -0
- package/README.md +50 -0
- package/dist/_tsup-dts-rollup.d.cts +103 -0
- package/dist/_tsup-dts-rollup.d.ts +103 -0
- package/dist/index.cjs +520 -0
- package/dist/index.d.cts +2 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +520 -1
- package/docker-compose.yaml +30 -0
- package/package.json +3 -3
- package/src/index.ts +1 -0
- package/src/storage/index.test.ts +779 -0
- package/src/storage/index.ts +674 -0
- package/docker-compose.yml +0 -8
package/dist/index.js
CHANGED
|
@@ -2,6 +2,7 @@ import { MastraVector } from '@mastra/core/vector';
|
|
|
2
2
|
import { MongoClient } from 'mongodb';
|
|
3
3
|
import { v4 } from 'uuid';
|
|
4
4
|
import { BaseFilterTranslator } from '@mastra/core/vector/filter';
|
|
5
|
+
import { MastraStorage, TABLE_THREADS, TABLE_MESSAGES, TABLE_TRACES, TABLE_WORKFLOW_SNAPSHOT, TABLE_EVALS } from '@mastra/core/storage';
|
|
5
6
|
|
|
6
7
|
// src/vector/index.ts
|
|
7
8
|
var MongoDBFilterTranslator = class extends BaseFilterTranslator {
|
|
@@ -400,6 +401,524 @@ var MongoDBVector = class extends MastraVector {
|
|
|
400
401
|
return translator.translate(filter);
|
|
401
402
|
}
|
|
402
403
|
};
|
|
404
|
+
function safelyParseJSON(jsonString) {
|
|
405
|
+
try {
|
|
406
|
+
return JSON.parse(jsonString);
|
|
407
|
+
} catch {
|
|
408
|
+
return {};
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
var MongoDBStore = class extends MastraStorage {
|
|
412
|
+
#isConnected = false;
|
|
413
|
+
#client;
|
|
414
|
+
#db;
|
|
415
|
+
#dbName;
|
|
416
|
+
constructor(config) {
|
|
417
|
+
super({ name: "MongoDBStore" });
|
|
418
|
+
this.#isConnected = false;
|
|
419
|
+
if (!config.url?.trim().length) {
|
|
420
|
+
throw new Error(
|
|
421
|
+
"MongoDBStore: url must be provided and cannot be empty. Passing an empty string may cause fallback to local MongoDB defaults."
|
|
422
|
+
);
|
|
423
|
+
}
|
|
424
|
+
if (!config.dbName?.trim().length) {
|
|
425
|
+
throw new Error(
|
|
426
|
+
"MongoDBStore: dbName must be provided and cannot be empty. Passing an empty string may cause fallback to local MongoDB defaults."
|
|
427
|
+
);
|
|
428
|
+
}
|
|
429
|
+
this.#dbName = config.dbName;
|
|
430
|
+
this.#client = new MongoClient(config.url);
|
|
431
|
+
}
|
|
432
|
+
async getConnection() {
|
|
433
|
+
if (this.#isConnected) {
|
|
434
|
+
return this.#db;
|
|
435
|
+
}
|
|
436
|
+
await this.#client.connect();
|
|
437
|
+
this.#db = this.#client.db(this.#dbName);
|
|
438
|
+
this.#isConnected = true;
|
|
439
|
+
return this.#db;
|
|
440
|
+
}
|
|
441
|
+
async getCollection(collectionName) {
|
|
442
|
+
const db = await this.getConnection();
|
|
443
|
+
return db.collection(collectionName);
|
|
444
|
+
}
|
|
445
|
+
async createTable() {
|
|
446
|
+
}
|
|
447
|
+
async clearTable({ tableName }) {
|
|
448
|
+
try {
|
|
449
|
+
const collection = await this.getCollection(tableName);
|
|
450
|
+
await collection.deleteMany({});
|
|
451
|
+
} catch (error) {
|
|
452
|
+
if (error instanceof Error) {
|
|
453
|
+
this.logger.error(error.message);
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
}
|
|
457
|
+
async insert({ tableName, record }) {
|
|
458
|
+
try {
|
|
459
|
+
const collection = await this.getCollection(tableName);
|
|
460
|
+
await collection.insertOne(record);
|
|
461
|
+
} catch (error) {
|
|
462
|
+
this.logger.error(`Error upserting into table ${tableName}: ${error}`);
|
|
463
|
+
throw error;
|
|
464
|
+
}
|
|
465
|
+
}
|
|
466
|
+
async batchInsert({ tableName, records }) {
|
|
467
|
+
if (!records.length) {
|
|
468
|
+
return;
|
|
469
|
+
}
|
|
470
|
+
try {
|
|
471
|
+
const collection = await this.getCollection(tableName);
|
|
472
|
+
await collection.insertMany(records);
|
|
473
|
+
} catch (error) {
|
|
474
|
+
this.logger.error(`Error upserting into table ${tableName}: ${error}`);
|
|
475
|
+
throw error;
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
async load({ tableName, keys }) {
|
|
479
|
+
this.logger.info(`Loading ${tableName} with keys ${JSON.stringify(keys)}`);
|
|
480
|
+
try {
|
|
481
|
+
const collection = await this.getCollection(tableName);
|
|
482
|
+
return await collection.find(keys).toArray();
|
|
483
|
+
} catch (error) {
|
|
484
|
+
this.logger.error(`Error loading ${tableName} with keys ${JSON.stringify(keys)}: ${error}`);
|
|
485
|
+
throw error;
|
|
486
|
+
}
|
|
487
|
+
}
|
|
488
|
+
async getThreadById({ threadId }) {
|
|
489
|
+
try {
|
|
490
|
+
const collection = await this.getCollection(TABLE_THREADS);
|
|
491
|
+
const result = await collection.findOne({ id: threadId });
|
|
492
|
+
if (!result) {
|
|
493
|
+
return null;
|
|
494
|
+
}
|
|
495
|
+
return {
|
|
496
|
+
...result,
|
|
497
|
+
metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata
|
|
498
|
+
};
|
|
499
|
+
} catch (error) {
|
|
500
|
+
this.logger.error(`Error loading thread with ID ${threadId}: ${error}`);
|
|
501
|
+
throw error;
|
|
502
|
+
}
|
|
503
|
+
}
|
|
504
|
+
async getThreadsByResourceId({ resourceId }) {
|
|
505
|
+
try {
|
|
506
|
+
const collection = await this.getCollection(TABLE_THREADS);
|
|
507
|
+
const results = await collection.find({ resourceId }).toArray();
|
|
508
|
+
if (!results.length) {
|
|
509
|
+
return [];
|
|
510
|
+
}
|
|
511
|
+
return results.map((result) => ({
|
|
512
|
+
...result,
|
|
513
|
+
metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata
|
|
514
|
+
}));
|
|
515
|
+
} catch (error) {
|
|
516
|
+
this.logger.error(`Error loading threads by resourceId ${resourceId}: ${error}`);
|
|
517
|
+
throw error;
|
|
518
|
+
}
|
|
519
|
+
}
|
|
520
|
+
async saveThread({ thread }) {
|
|
521
|
+
try {
|
|
522
|
+
const collection = await this.getCollection(TABLE_THREADS);
|
|
523
|
+
await collection.updateOne(
|
|
524
|
+
{ id: thread.id },
|
|
525
|
+
{
|
|
526
|
+
$set: {
|
|
527
|
+
...thread,
|
|
528
|
+
metadata: JSON.stringify(thread.metadata)
|
|
529
|
+
}
|
|
530
|
+
},
|
|
531
|
+
{ upsert: true }
|
|
532
|
+
);
|
|
533
|
+
return thread;
|
|
534
|
+
} catch (error) {
|
|
535
|
+
this.logger.error(`Error saving thread ${thread.id}: ${error}`);
|
|
536
|
+
throw error;
|
|
537
|
+
}
|
|
538
|
+
}
|
|
539
|
+
async updateThread({
|
|
540
|
+
id,
|
|
541
|
+
title,
|
|
542
|
+
metadata
|
|
543
|
+
}) {
|
|
544
|
+
const thread = await this.getThreadById({ threadId: id });
|
|
545
|
+
if (!thread) {
|
|
546
|
+
throw new Error(`Thread ${id} not found`);
|
|
547
|
+
}
|
|
548
|
+
const updatedThread = {
|
|
549
|
+
...thread,
|
|
550
|
+
title,
|
|
551
|
+
metadata: {
|
|
552
|
+
...thread.metadata,
|
|
553
|
+
...metadata
|
|
554
|
+
}
|
|
555
|
+
};
|
|
556
|
+
try {
|
|
557
|
+
const collection = await this.getCollection(TABLE_THREADS);
|
|
558
|
+
await collection.updateOne(
|
|
559
|
+
{ id },
|
|
560
|
+
{
|
|
561
|
+
$set: {
|
|
562
|
+
title,
|
|
563
|
+
metadata: JSON.stringify(updatedThread.metadata)
|
|
564
|
+
}
|
|
565
|
+
}
|
|
566
|
+
);
|
|
567
|
+
} catch (error) {
|
|
568
|
+
this.logger.error(`Error updating thread ${id}:) ${error}`);
|
|
569
|
+
throw error;
|
|
570
|
+
}
|
|
571
|
+
return updatedThread;
|
|
572
|
+
}
|
|
573
|
+
async deleteThread({ threadId }) {
|
|
574
|
+
try {
|
|
575
|
+
const collectionMessages = await this.getCollection(TABLE_MESSAGES);
|
|
576
|
+
await collectionMessages.deleteMany({ thread_id: threadId });
|
|
577
|
+
const collectionThreads = await this.getCollection(TABLE_THREADS);
|
|
578
|
+
await collectionThreads.deleteOne({ id: threadId });
|
|
579
|
+
} catch (error) {
|
|
580
|
+
this.logger.error(`Error deleting thread ${threadId}: ${error}`);
|
|
581
|
+
throw error;
|
|
582
|
+
}
|
|
583
|
+
}
|
|
584
|
+
async getMessages({ threadId, selectBy }) {
|
|
585
|
+
try {
|
|
586
|
+
const limit = typeof selectBy?.last === "number" ? selectBy.last : 40;
|
|
587
|
+
const include = selectBy?.include || [];
|
|
588
|
+
let messages = [];
|
|
589
|
+
let allMessages = [];
|
|
590
|
+
const collection = await this.getCollection(TABLE_MESSAGES);
|
|
591
|
+
allMessages = (await collection.find({ thread_id: threadId }).sort({ createdAt: -1 }).toArray()).map(
|
|
592
|
+
(row) => this.parseRow(row)
|
|
593
|
+
);
|
|
594
|
+
if (include.length) {
|
|
595
|
+
const idToIndex = /* @__PURE__ */ new Map();
|
|
596
|
+
allMessages.forEach((msg, idx) => {
|
|
597
|
+
idToIndex.set(msg.id, idx);
|
|
598
|
+
});
|
|
599
|
+
const selectedIndexes = /* @__PURE__ */ new Set();
|
|
600
|
+
for (const inc of include) {
|
|
601
|
+
const idx = idToIndex.get(inc.id);
|
|
602
|
+
if (idx === void 0) continue;
|
|
603
|
+
for (let i = 1; i <= (inc.withPreviousMessages || 0); i++) {
|
|
604
|
+
if (idx + i < allMessages.length) selectedIndexes.add(idx + i);
|
|
605
|
+
}
|
|
606
|
+
selectedIndexes.add(idx);
|
|
607
|
+
for (let i = 1; i <= (inc.withNextMessages || 0); i++) {
|
|
608
|
+
if (idx - i >= 0) selectedIndexes.add(idx - i);
|
|
609
|
+
}
|
|
610
|
+
}
|
|
611
|
+
messages.push(
|
|
612
|
+
...Array.from(selectedIndexes).map((i) => allMessages[i]).filter((m) => !!m)
|
|
613
|
+
);
|
|
614
|
+
}
|
|
615
|
+
const excludeIds = new Set(messages.map((m) => m.id));
|
|
616
|
+
for (const msg of allMessages) {
|
|
617
|
+
if (messages.length >= limit) break;
|
|
618
|
+
if (!excludeIds.has(msg.id)) {
|
|
619
|
+
messages.push(msg);
|
|
620
|
+
}
|
|
621
|
+
}
|
|
622
|
+
messages.sort((a, b) => a.createdAt.getTime() - b.createdAt.getTime());
|
|
623
|
+
return messages.slice(0, limit);
|
|
624
|
+
} catch (error) {
|
|
625
|
+
this.logger.error("Error getting messages:", error);
|
|
626
|
+
throw error;
|
|
627
|
+
}
|
|
628
|
+
}
|
|
629
|
+
async saveMessages({ messages }) {
|
|
630
|
+
if (!messages.length) {
|
|
631
|
+
return messages;
|
|
632
|
+
}
|
|
633
|
+
const threadId = messages[0]?.threadId;
|
|
634
|
+
if (!threadId) {
|
|
635
|
+
this.logger.error("Thread ID is required to save messages");
|
|
636
|
+
throw new Error("Thread ID is required");
|
|
637
|
+
}
|
|
638
|
+
try {
|
|
639
|
+
const messagesToInsert = messages.map((message) => {
|
|
640
|
+
const time = message.createdAt || /* @__PURE__ */ new Date();
|
|
641
|
+
return {
|
|
642
|
+
id: message.id,
|
|
643
|
+
thread_id: threadId,
|
|
644
|
+
content: typeof message.content === "string" ? message.content : JSON.stringify(message.content),
|
|
645
|
+
role: message.role,
|
|
646
|
+
type: message.type,
|
|
647
|
+
resourceId: message.resourceId,
|
|
648
|
+
createdAt: time instanceof Date ? time.toISOString() : time
|
|
649
|
+
};
|
|
650
|
+
});
|
|
651
|
+
const collection = await this.getCollection(TABLE_MESSAGES);
|
|
652
|
+
await collection.insertMany(messagesToInsert);
|
|
653
|
+
return messages;
|
|
654
|
+
} catch (error) {
|
|
655
|
+
this.logger.error("Failed to save messages in database: " + error?.message);
|
|
656
|
+
throw error;
|
|
657
|
+
}
|
|
658
|
+
}
|
|
659
|
+
async getTraces({
|
|
660
|
+
name,
|
|
661
|
+
scope,
|
|
662
|
+
page,
|
|
663
|
+
perPage,
|
|
664
|
+
attributes,
|
|
665
|
+
filters
|
|
666
|
+
} = {
|
|
667
|
+
page: 0,
|
|
668
|
+
perPage: 100
|
|
669
|
+
}) {
|
|
670
|
+
const limit = perPage;
|
|
671
|
+
const offset = page * perPage;
|
|
672
|
+
const query = {};
|
|
673
|
+
if (name) {
|
|
674
|
+
query["name"] = `%${name}%`;
|
|
675
|
+
}
|
|
676
|
+
if (scope) {
|
|
677
|
+
query["scope"] = scope;
|
|
678
|
+
}
|
|
679
|
+
if (attributes) {
|
|
680
|
+
Object.keys(attributes).forEach((key) => {
|
|
681
|
+
query[`attributes.${key}`] = attributes[key];
|
|
682
|
+
});
|
|
683
|
+
}
|
|
684
|
+
if (filters) {
|
|
685
|
+
Object.entries(filters).forEach(([key, value]) => {
|
|
686
|
+
query[key] = value;
|
|
687
|
+
});
|
|
688
|
+
}
|
|
689
|
+
const collection = await this.getCollection(TABLE_TRACES);
|
|
690
|
+
const result = await collection.find(query, {
|
|
691
|
+
sort: { startTime: -1 }
|
|
692
|
+
}).limit(limit).skip(offset).toArray();
|
|
693
|
+
return result.map((row) => ({
|
|
694
|
+
id: row.id,
|
|
695
|
+
parentSpanId: row.parentSpanId,
|
|
696
|
+
traceId: row.traceId,
|
|
697
|
+
name: row.name,
|
|
698
|
+
scope: row.scope,
|
|
699
|
+
kind: row.kind,
|
|
700
|
+
status: safelyParseJSON(row.status),
|
|
701
|
+
events: safelyParseJSON(row.events),
|
|
702
|
+
links: safelyParseJSON(row.links),
|
|
703
|
+
attributes: safelyParseJSON(row.attributes),
|
|
704
|
+
startTime: row.startTime,
|
|
705
|
+
endTime: row.endTime,
|
|
706
|
+
other: safelyParseJSON(row.other),
|
|
707
|
+
createdAt: row.createdAt
|
|
708
|
+
}));
|
|
709
|
+
}
|
|
710
|
+
async getWorkflowRuns({
|
|
711
|
+
workflowName,
|
|
712
|
+
fromDate,
|
|
713
|
+
toDate,
|
|
714
|
+
limit,
|
|
715
|
+
offset
|
|
716
|
+
} = {}) {
|
|
717
|
+
const query = {};
|
|
718
|
+
if (workflowName) {
|
|
719
|
+
query["workflow_name"] = workflowName;
|
|
720
|
+
}
|
|
721
|
+
if (fromDate || toDate) {
|
|
722
|
+
query["createdAt"] = {};
|
|
723
|
+
if (fromDate) {
|
|
724
|
+
query["createdAt"]["$gte"] = fromDate;
|
|
725
|
+
}
|
|
726
|
+
if (toDate) {
|
|
727
|
+
query["createdAt"]["$lte"] = toDate;
|
|
728
|
+
}
|
|
729
|
+
}
|
|
730
|
+
const collection = await this.getCollection(TABLE_WORKFLOW_SNAPSHOT);
|
|
731
|
+
let total = 0;
|
|
732
|
+
if (limit !== void 0 && offset !== void 0) {
|
|
733
|
+
total = await collection.countDocuments(query);
|
|
734
|
+
}
|
|
735
|
+
const request = collection.find(query).sort({ createdAt: "desc" });
|
|
736
|
+
if (limit) {
|
|
737
|
+
request.limit(limit);
|
|
738
|
+
}
|
|
739
|
+
if (offset) {
|
|
740
|
+
request.skip(offset);
|
|
741
|
+
}
|
|
742
|
+
const result = await request.toArray();
|
|
743
|
+
const runs = result.map((row) => {
|
|
744
|
+
let parsedSnapshot = row.snapshot;
|
|
745
|
+
if (typeof parsedSnapshot === "string") {
|
|
746
|
+
try {
|
|
747
|
+
parsedSnapshot = JSON.parse(row.snapshot);
|
|
748
|
+
} catch (e) {
|
|
749
|
+
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
750
|
+
}
|
|
751
|
+
}
|
|
752
|
+
return {
|
|
753
|
+
workflowName: row.workflow_name,
|
|
754
|
+
runId: row.run_id,
|
|
755
|
+
snapshot: parsedSnapshot,
|
|
756
|
+
createdAt: new Date(row.createdAt),
|
|
757
|
+
updatedAt: new Date(row.updatedAt)
|
|
758
|
+
};
|
|
759
|
+
});
|
|
760
|
+
return { runs, total: total || runs.length };
|
|
761
|
+
}
|
|
762
|
+
async getEvalsByAgentName(agentName, type) {
|
|
763
|
+
try {
|
|
764
|
+
const query = {
|
|
765
|
+
agent_name: agentName
|
|
766
|
+
};
|
|
767
|
+
if (type === "test") {
|
|
768
|
+
query["test_info"] = { $ne: null };
|
|
769
|
+
}
|
|
770
|
+
if (type === "live") {
|
|
771
|
+
query["test_info"] = null;
|
|
772
|
+
}
|
|
773
|
+
const collection = await this.getCollection(TABLE_EVALS);
|
|
774
|
+
const documents = await collection.find(query).sort({ created_at: "desc" }).toArray();
|
|
775
|
+
const result = documents.map((row) => this.transformEvalRow(row));
|
|
776
|
+
return result.filter((row) => {
|
|
777
|
+
if (type === "live") {
|
|
778
|
+
return !Boolean(row.testInfo?.testPath);
|
|
779
|
+
}
|
|
780
|
+
if (type === "test") {
|
|
781
|
+
return row.testInfo?.testPath !== null;
|
|
782
|
+
}
|
|
783
|
+
return true;
|
|
784
|
+
});
|
|
785
|
+
} catch (error) {
|
|
786
|
+
if (error instanceof Error && error.message.includes("no such table")) {
|
|
787
|
+
return [];
|
|
788
|
+
}
|
|
789
|
+
this.logger.error("Failed to get evals for the specified agent: " + error?.message);
|
|
790
|
+
throw error;
|
|
791
|
+
}
|
|
792
|
+
}
|
|
793
|
+
async persistWorkflowSnapshot({
|
|
794
|
+
workflowName,
|
|
795
|
+
runId,
|
|
796
|
+
snapshot
|
|
797
|
+
}) {
|
|
798
|
+
try {
|
|
799
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
800
|
+
const collection = await this.getCollection(TABLE_WORKFLOW_SNAPSHOT);
|
|
801
|
+
await collection.updateOne(
|
|
802
|
+
{ workflow_name: workflowName, run_id: runId },
|
|
803
|
+
{
|
|
804
|
+
$set: {
|
|
805
|
+
snapshot: JSON.stringify(snapshot),
|
|
806
|
+
updatedAt: now
|
|
807
|
+
},
|
|
808
|
+
$setOnInsert: {
|
|
809
|
+
createdAt: now
|
|
810
|
+
}
|
|
811
|
+
},
|
|
812
|
+
{ upsert: true }
|
|
813
|
+
);
|
|
814
|
+
} catch (error) {
|
|
815
|
+
this.logger.error(`Error persisting workflow snapshot: ${error}`);
|
|
816
|
+
throw error;
|
|
817
|
+
}
|
|
818
|
+
}
|
|
819
|
+
async loadWorkflowSnapshot({
|
|
820
|
+
workflowName,
|
|
821
|
+
runId
|
|
822
|
+
}) {
|
|
823
|
+
try {
|
|
824
|
+
const result = await this.load({
|
|
825
|
+
tableName: TABLE_WORKFLOW_SNAPSHOT,
|
|
826
|
+
keys: {
|
|
827
|
+
workflow_name: workflowName,
|
|
828
|
+
run_id: runId
|
|
829
|
+
}
|
|
830
|
+
});
|
|
831
|
+
if (!result?.length) {
|
|
832
|
+
return null;
|
|
833
|
+
}
|
|
834
|
+
return JSON.parse(result[0].snapshot);
|
|
835
|
+
} catch (error) {
|
|
836
|
+
console.error("Error loading workflow snapshot:", error);
|
|
837
|
+
throw error;
|
|
838
|
+
}
|
|
839
|
+
}
|
|
840
|
+
async getWorkflowRunById({
|
|
841
|
+
runId,
|
|
842
|
+
workflowName
|
|
843
|
+
}) {
|
|
844
|
+
try {
|
|
845
|
+
const query = {};
|
|
846
|
+
if (runId) {
|
|
847
|
+
query["run_id"] = runId;
|
|
848
|
+
}
|
|
849
|
+
if (workflowName) {
|
|
850
|
+
query["workflow_name"] = workflowName;
|
|
851
|
+
}
|
|
852
|
+
const collection = await this.getCollection(TABLE_WORKFLOW_SNAPSHOT);
|
|
853
|
+
const result = await collection.findOne(query);
|
|
854
|
+
if (!result) {
|
|
855
|
+
return null;
|
|
856
|
+
}
|
|
857
|
+
return this.parseWorkflowRun(result);
|
|
858
|
+
} catch (error) {
|
|
859
|
+
console.error("Error getting workflow run by ID:", error);
|
|
860
|
+
throw error;
|
|
861
|
+
}
|
|
862
|
+
}
|
|
863
|
+
parseWorkflowRun(row) {
|
|
864
|
+
let parsedSnapshot = row.snapshot;
|
|
865
|
+
if (typeof parsedSnapshot === "string") {
|
|
866
|
+
try {
|
|
867
|
+
parsedSnapshot = JSON.parse(row.snapshot);
|
|
868
|
+
} catch (e) {
|
|
869
|
+
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
870
|
+
}
|
|
871
|
+
}
|
|
872
|
+
return {
|
|
873
|
+
workflowName: row.workflow_name,
|
|
874
|
+
runId: row.run_id,
|
|
875
|
+
snapshot: parsedSnapshot,
|
|
876
|
+
createdAt: row.createdAt,
|
|
877
|
+
updatedAt: row.updatedAt,
|
|
878
|
+
resourceId: row.resourceId
|
|
879
|
+
};
|
|
880
|
+
}
|
|
881
|
+
parseRow(row) {
|
|
882
|
+
let content = row.content;
|
|
883
|
+
try {
|
|
884
|
+
content = JSON.parse(row.content);
|
|
885
|
+
} catch {
|
|
886
|
+
}
|
|
887
|
+
return {
|
|
888
|
+
id: row.id,
|
|
889
|
+
content,
|
|
890
|
+
role: row.role,
|
|
891
|
+
type: row.type,
|
|
892
|
+
createdAt: new Date(row.createdAt),
|
|
893
|
+
threadId: row.thread_id
|
|
894
|
+
};
|
|
895
|
+
}
|
|
896
|
+
transformEvalRow(row) {
|
|
897
|
+
let testInfoValue = null;
|
|
898
|
+
if (row.test_info) {
|
|
899
|
+
try {
|
|
900
|
+
testInfoValue = typeof row.test_info === "string" ? JSON.parse(row.test_info) : row.test_info;
|
|
901
|
+
} catch (e) {
|
|
902
|
+
console.warn("Failed to parse test_info:", e);
|
|
903
|
+
}
|
|
904
|
+
}
|
|
905
|
+
return {
|
|
906
|
+
input: row.input,
|
|
907
|
+
output: row.output,
|
|
908
|
+
result: row.result,
|
|
909
|
+
agentName: row.agent_name,
|
|
910
|
+
metricName: row.metric_name,
|
|
911
|
+
instructions: row.instructions,
|
|
912
|
+
testInfo: testInfoValue,
|
|
913
|
+
globalRunId: row.global_run_id,
|
|
914
|
+
runId: row.run_id,
|
|
915
|
+
createdAt: row.created_at
|
|
916
|
+
};
|
|
917
|
+
}
|
|
918
|
+
async close() {
|
|
919
|
+
await this.#client.close();
|
|
920
|
+
}
|
|
921
|
+
};
|
|
403
922
|
|
|
404
923
|
// src/vector/prompt.ts
|
|
405
924
|
var MONGODB_PROMPT = `When querying MongoDB Vector, you can ONLY use the operators listed below. Any other operators will be rejected.
|
|
@@ -496,4 +1015,4 @@ Example Complex Query:
|
|
|
496
1015
|
]
|
|
497
1016
|
}`;
|
|
498
1017
|
|
|
499
|
-
export { MONGODB_PROMPT, MongoDBVector };
|
|
1018
|
+
export { MONGODB_PROMPT, MongoDBStore, MongoDBVector };
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
services:
|
|
2
|
+
mongodb-storage:
|
|
3
|
+
image: mongo:6.0.16
|
|
4
|
+
container_name: 'mongodb-storage-test-db'
|
|
5
|
+
ports:
|
|
6
|
+
- '27017:27017'
|
|
7
|
+
volumes:
|
|
8
|
+
- mongodbdata:/data/db
|
|
9
|
+
healthcheck:
|
|
10
|
+
test: ['CMD', 'mongosh', '--eval', "db.adminCommand('ping')"]
|
|
11
|
+
interval: 2s
|
|
12
|
+
timeout: 2s
|
|
13
|
+
retries: 15
|
|
14
|
+
start_period: 3s
|
|
15
|
+
mongodb-vector:
|
|
16
|
+
image: mongodb/mongodb-atlas-local
|
|
17
|
+
container_name: 'mongodb-vector-test-db'
|
|
18
|
+
environment:
|
|
19
|
+
MONGODB_INITDB_ROOT_USERNAME: mongodb
|
|
20
|
+
MONGODB_INITDB_ROOT_PASSWORD: mongodb
|
|
21
|
+
ports:
|
|
22
|
+
- 27018:27017
|
|
23
|
+
healthcheck:
|
|
24
|
+
test: ['CMD', 'mongosh', '-u', 'mongodb', '-p', 'mongodb', '--eval', "db.adminCommand('ping')"]
|
|
25
|
+
interval: 2s
|
|
26
|
+
timeout: 2s
|
|
27
|
+
retries: 15
|
|
28
|
+
start_period: 3s
|
|
29
|
+
volumes:
|
|
30
|
+
mongodbdata:
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@mastra/mongodb",
|
|
3
|
-
"version": "0.10.0",
|
|
3
|
+
"version": "0.10.1-alpha.0",
|
|
4
4
|
"description": "MongoDB provider for Mastra - includes vector store capabilities",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -32,7 +32,7 @@
|
|
|
32
32
|
"typescript": "^5.8.2",
|
|
33
33
|
"vitest": "^3.1.2",
|
|
34
34
|
"@internal/lint": "0.0.6",
|
|
35
|
-
"@mastra/core": "0.10.0"
|
|
35
|
+
"@mastra/core": "0.10.1-alpha.0"
|
|
36
36
|
},
|
|
37
37
|
"peerDependencies": {
|
|
38
38
|
"@mastra/core": "^0.10.0"
|
|
@@ -40,7 +40,7 @@
|
|
|
40
40
|
"scripts": {
|
|
41
41
|
"build": "tsup src/index.ts --format esm,cjs --experimental-dts --clean --treeshake=smallest --splitting",
|
|
42
42
|
"build:watch": "pnpm build --watch",
|
|
43
|
-
"pretest": "docker compose up -d
|
|
43
|
+
"pretest": "docker compose up -d --wait",
|
|
44
44
|
"test": "vitest run",
|
|
45
45
|
"posttest": "docker compose down -v",
|
|
46
46
|
"pretest:watch": "docker compose up -d",
|
package/src/index.ts
CHANGED