@mastra/mongodb 0.0.0-vnext-inngest-20250508131921 → 0.0.0-vnextAgentNetwork-20250527091247
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +172 -2
- package/README.md +50 -0
- package/dist/_tsup-dts-rollup.d.cts +153 -24
- package/dist/_tsup-dts-rollup.d.ts +153 -24
- package/dist/index.cjs +596 -31
- package/dist/index.d.cts +3 -3
- package/dist/index.d.ts +3 -3
- package/dist/index.js +596 -32
- package/docker-compose.yaml +30 -0
- package/package.json +8 -5
- package/src/index.ts +1 -0
- package/src/storage/index.test.ts +778 -0
- package/src/storage/index.ts +675 -0
- package/src/vector/index.test.ts +26 -13
- package/src/vector/index.ts +89 -48
- package/docker-compose.yml +0 -8
package/dist/index.cjs
CHANGED
|
@@ -4,6 +4,7 @@ var vector = require('@mastra/core/vector');
|
|
|
4
4
|
var mongodb = require('mongodb');
|
|
5
5
|
var uuid = require('uuid');
|
|
6
6
|
var filter = require('@mastra/core/vector/filter');
|
|
7
|
+
var storage = require('@mastra/core/storage');
|
|
7
8
|
|
|
8
9
|
// src/vector/index.ts
|
|
9
10
|
var MongoDBFilterTranslator = class extends filter.BaseFilterTranslator {
|
|
@@ -121,8 +122,7 @@ var MongoDBVector = class extends vector.MastraVector {
|
|
|
121
122
|
async disconnect() {
|
|
122
123
|
await this.client.close();
|
|
123
124
|
}
|
|
124
|
-
async createIndex(
|
|
125
|
-
const { indexName, dimension, metric = "cosine" } = params;
|
|
125
|
+
async createIndex({ indexName, dimension, metric = "cosine" }) {
|
|
126
126
|
if (!Number.isInteger(dimension) || dimension <= 0) {
|
|
127
127
|
throw new Error("Dimension must be a positive integer");
|
|
128
128
|
}
|
|
@@ -160,7 +160,19 @@ var MongoDBVector = class extends vector.MastraVector {
|
|
|
160
160
|
}
|
|
161
161
|
await collection.updateOne({ _id: "__index_metadata__" }, { $set: { dimension, metric } }, { upsert: true });
|
|
162
162
|
}
|
|
163
|
-
|
|
163
|
+
/**
|
|
164
|
+
* Waits for the index to be ready.
|
|
165
|
+
*
|
|
166
|
+
* @param {string} indexName - The name of the index to wait for
|
|
167
|
+
* @param {number} timeoutMs - The maximum time in milliseconds to wait for the index to be ready (default: 60000)
|
|
168
|
+
* @param {number} checkIntervalMs - The interval in milliseconds at which to check if the index is ready (default: 2000)
|
|
169
|
+
* @returns A promise that resolves when the index is ready
|
|
170
|
+
*/
|
|
171
|
+
async waitForIndexReady({
|
|
172
|
+
indexName,
|
|
173
|
+
timeoutMs = 6e4,
|
|
174
|
+
checkIntervalMs = 2e3
|
|
175
|
+
}) {
|
|
164
176
|
const collection = await this.getCollection(indexName, true);
|
|
165
177
|
const indexNameInternal = `${indexName}_vector_index`;
|
|
166
178
|
const startTime = Date.now();
|
|
@@ -175,11 +187,10 @@ var MongoDBVector = class extends vector.MastraVector {
|
|
|
175
187
|
}
|
|
176
188
|
throw new Error(`Index "${indexNameInternal}" did not become ready within timeout`);
|
|
177
189
|
}
|
|
178
|
-
async upsert(
|
|
179
|
-
const { indexName, vectors, metadata, ids, documents } = params;
|
|
190
|
+
async upsert({ indexName, vectors, metadata, ids, documents }) {
|
|
180
191
|
const collection = await this.getCollection(indexName);
|
|
181
192
|
this.collectionForValidation = collection;
|
|
182
|
-
const stats = await this.describeIndex(indexName);
|
|
193
|
+
const stats = await this.describeIndex({ indexName });
|
|
183
194
|
await this.validateVectorDimensions(vectors, stats.dimension);
|
|
184
195
|
const generatedIds = ids || vectors.map(() => uuid.v4());
|
|
185
196
|
const operations = vectors.map((vector, idx) => {
|
|
@@ -212,8 +223,14 @@ var MongoDBVector = class extends vector.MastraVector {
|
|
|
212
223
|
await collection.bulkWrite(operations);
|
|
213
224
|
return generatedIds;
|
|
214
225
|
}
|
|
215
|
-
async query(
|
|
216
|
-
|
|
226
|
+
async query({
|
|
227
|
+
indexName,
|
|
228
|
+
queryVector,
|
|
229
|
+
topK = 10,
|
|
230
|
+
filter,
|
|
231
|
+
includeVector = false,
|
|
232
|
+
documentFilter
|
|
233
|
+
}) {
|
|
217
234
|
const collection = await this.getCollection(indexName, true);
|
|
218
235
|
const indexNameInternal = `${indexName}_vector_index`;
|
|
219
236
|
const mongoFilter = this.transformFilter(filter);
|
|
@@ -269,7 +286,13 @@ var MongoDBVector = class extends vector.MastraVector {
|
|
|
269
286
|
const collections = await this.db.listCollections().toArray();
|
|
270
287
|
return collections.map((col) => col.name);
|
|
271
288
|
}
|
|
272
|
-
|
|
289
|
+
/**
|
|
290
|
+
* Retrieves statistics about a vector index.
|
|
291
|
+
*
|
|
292
|
+
* @param {string} indexName - The name of the index to describe
|
|
293
|
+
* @returns A promise that resolves to the index statistics including dimension, count and metric
|
|
294
|
+
*/
|
|
295
|
+
async describeIndex({ indexName }) {
|
|
273
296
|
const collection = await this.getCollection(indexName, true);
|
|
274
297
|
const count = await collection.countDocuments({ _id: { $ne: "__index_metadata__" } });
|
|
275
298
|
const metadataDoc = await collection.findOne({ _id: "__index_metadata__" });
|
|
@@ -281,7 +304,7 @@ var MongoDBVector = class extends vector.MastraVector {
|
|
|
281
304
|
metric
|
|
282
305
|
};
|
|
283
306
|
}
|
|
284
|
-
async deleteIndex(indexName) {
|
|
307
|
+
async deleteIndex({ indexName }) {
|
|
285
308
|
const collection = await this.getCollection(indexName, false);
|
|
286
309
|
if (collection) {
|
|
287
310
|
await collection.drop();
|
|
@@ -290,33 +313,56 @@ var MongoDBVector = class extends vector.MastraVector {
|
|
|
290
313
|
throw new Error(`Index (Collection) "${indexName}" does not exist`);
|
|
291
314
|
}
|
|
292
315
|
}
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
316
|
+
/**
|
|
317
|
+
* Updates a vector by its ID with the provided vector and/or metadata.
|
|
318
|
+
* @param indexName - The name of the index containing the vector.
|
|
319
|
+
* @param id - The ID of the vector to update.
|
|
320
|
+
* @param update - An object containing the vector and/or metadata to update.
|
|
321
|
+
* @param update.vector - An optional array of numbers representing the new vector.
|
|
322
|
+
* @param update.metadata - An optional record containing the new metadata.
|
|
323
|
+
* @returns A promise that resolves when the update is complete.
|
|
324
|
+
* @throws Will throw an error if no updates are provided or if the update operation fails.
|
|
325
|
+
*/
|
|
326
|
+
async updateVector({ indexName, id, update }) {
|
|
327
|
+
try {
|
|
328
|
+
if (!update.vector && !update.metadata) {
|
|
329
|
+
throw new Error("No updates provided");
|
|
330
|
+
}
|
|
331
|
+
const collection = await this.getCollection(indexName, true);
|
|
332
|
+
const updateDoc = {};
|
|
333
|
+
if (update.vector) {
|
|
334
|
+
const stats = await this.describeIndex({ indexName });
|
|
335
|
+
await this.validateVectorDimensions([update.vector], stats.dimension);
|
|
336
|
+
updateDoc[this.embeddingFieldName] = update.vector;
|
|
337
|
+
}
|
|
338
|
+
if (update.metadata) {
|
|
339
|
+
const normalizedMeta = Object.keys(update.metadata).reduce(
|
|
340
|
+
(acc, key) => {
|
|
341
|
+
acc[key] = update.metadata[key] instanceof Date ? update.metadata[key].toISOString() : update.metadata[key];
|
|
342
|
+
return acc;
|
|
343
|
+
},
|
|
344
|
+
{}
|
|
345
|
+
);
|
|
346
|
+
updateDoc[this.metadataFieldName] = normalizedMeta;
|
|
347
|
+
}
|
|
348
|
+
await collection.findOneAndUpdate({ _id: id }, { $set: updateDoc });
|
|
349
|
+
} catch (error) {
|
|
350
|
+
throw new Error(`Failed to update vector by id: ${id} for index name: ${indexName}: ${error.message}`);
|
|
311
351
|
}
|
|
312
|
-
await collection.findOneAndUpdate({ _id: id }, { $set: updateDoc });
|
|
313
352
|
}
|
|
314
|
-
|
|
353
|
+
/**
|
|
354
|
+
* Deletes a vector by its ID.
|
|
355
|
+
* @param indexName - The name of the index containing the vector.
|
|
356
|
+
* @param id - The ID of the vector to delete.
|
|
357
|
+
* @returns A promise that resolves when the deletion is complete.
|
|
358
|
+
* @throws Will throw an error if the deletion operation fails.
|
|
359
|
+
*/
|
|
360
|
+
async deleteVector({ indexName, id }) {
|
|
315
361
|
try {
|
|
316
362
|
const collection = await this.getCollection(indexName, true);
|
|
317
363
|
await collection.deleteOne({ _id: id });
|
|
318
364
|
} catch (error) {
|
|
319
|
-
throw new Error(`Failed to delete
|
|
365
|
+
throw new Error(`Failed to delete vector by id: ${id} for index name: ${indexName}: ${error.message}`);
|
|
320
366
|
}
|
|
321
367
|
}
|
|
322
368
|
// Private methods
|
|
@@ -357,6 +403,524 @@ var MongoDBVector = class extends vector.MastraVector {
|
|
|
357
403
|
return translator.translate(filter);
|
|
358
404
|
}
|
|
359
405
|
};
|
|
406
|
+
function safelyParseJSON(jsonString) {
|
|
407
|
+
try {
|
|
408
|
+
return JSON.parse(jsonString);
|
|
409
|
+
} catch {
|
|
410
|
+
return {};
|
|
411
|
+
}
|
|
412
|
+
}
|
|
413
|
+
var MongoDBStore = class extends storage.MastraStorage {
|
|
414
|
+
#isConnected = false;
|
|
415
|
+
#client;
|
|
416
|
+
#db;
|
|
417
|
+
#dbName;
|
|
418
|
+
constructor(config) {
|
|
419
|
+
super({ name: "MongoDBStore" });
|
|
420
|
+
this.#isConnected = false;
|
|
421
|
+
if (!config.url?.trim().length) {
|
|
422
|
+
throw new Error(
|
|
423
|
+
"MongoDBStore: url must be provided and cannot be empty. Passing an empty string may cause fallback to local MongoDB defaults."
|
|
424
|
+
);
|
|
425
|
+
}
|
|
426
|
+
if (!config.dbName?.trim().length) {
|
|
427
|
+
throw new Error(
|
|
428
|
+
"MongoDBStore: dbName must be provided and cannot be empty. Passing an empty string may cause fallback to local MongoDB defaults."
|
|
429
|
+
);
|
|
430
|
+
}
|
|
431
|
+
this.#dbName = config.dbName;
|
|
432
|
+
this.#client = new mongodb.MongoClient(config.url, config.options);
|
|
433
|
+
}
|
|
434
|
+
async getConnection() {
|
|
435
|
+
if (this.#isConnected) {
|
|
436
|
+
return this.#db;
|
|
437
|
+
}
|
|
438
|
+
await this.#client.connect();
|
|
439
|
+
this.#db = this.#client.db(this.#dbName);
|
|
440
|
+
this.#isConnected = true;
|
|
441
|
+
return this.#db;
|
|
442
|
+
}
|
|
443
|
+
async getCollection(collectionName) {
|
|
444
|
+
const db = await this.getConnection();
|
|
445
|
+
return db.collection(collectionName);
|
|
446
|
+
}
|
|
447
|
+
async createTable() {
|
|
448
|
+
}
|
|
449
|
+
async clearTable({ tableName }) {
|
|
450
|
+
try {
|
|
451
|
+
const collection = await this.getCollection(tableName);
|
|
452
|
+
await collection.deleteMany({});
|
|
453
|
+
} catch (error) {
|
|
454
|
+
if (error instanceof Error) {
|
|
455
|
+
this.logger.error(error.message);
|
|
456
|
+
}
|
|
457
|
+
}
|
|
458
|
+
}
|
|
459
|
+
async insert({ tableName, record }) {
|
|
460
|
+
try {
|
|
461
|
+
const collection = await this.getCollection(tableName);
|
|
462
|
+
await collection.insertOne(record);
|
|
463
|
+
} catch (error) {
|
|
464
|
+
this.logger.error(`Error upserting into table ${tableName}: ${error}`);
|
|
465
|
+
throw error;
|
|
466
|
+
}
|
|
467
|
+
}
|
|
468
|
+
async batchInsert({ tableName, records }) {
|
|
469
|
+
if (!records.length) {
|
|
470
|
+
return;
|
|
471
|
+
}
|
|
472
|
+
try {
|
|
473
|
+
const collection = await this.getCollection(tableName);
|
|
474
|
+
await collection.insertMany(records);
|
|
475
|
+
} catch (error) {
|
|
476
|
+
this.logger.error(`Error upserting into table ${tableName}: ${error}`);
|
|
477
|
+
throw error;
|
|
478
|
+
}
|
|
479
|
+
}
|
|
480
|
+
async load({ tableName, keys }) {
|
|
481
|
+
this.logger.info(`Loading ${tableName} with keys ${JSON.stringify(keys)}`);
|
|
482
|
+
try {
|
|
483
|
+
const collection = await this.getCollection(tableName);
|
|
484
|
+
return await collection.find(keys).toArray();
|
|
485
|
+
} catch (error) {
|
|
486
|
+
this.logger.error(`Error loading ${tableName} with keys ${JSON.stringify(keys)}: ${error}`);
|
|
487
|
+
throw error;
|
|
488
|
+
}
|
|
489
|
+
}
|
|
490
|
+
async getThreadById({ threadId }) {
|
|
491
|
+
try {
|
|
492
|
+
const collection = await this.getCollection(storage.TABLE_THREADS);
|
|
493
|
+
const result = await collection.findOne({ id: threadId });
|
|
494
|
+
if (!result) {
|
|
495
|
+
return null;
|
|
496
|
+
}
|
|
497
|
+
return {
|
|
498
|
+
...result,
|
|
499
|
+
metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata
|
|
500
|
+
};
|
|
501
|
+
} catch (error) {
|
|
502
|
+
this.logger.error(`Error loading thread with ID ${threadId}: ${error}`);
|
|
503
|
+
throw error;
|
|
504
|
+
}
|
|
505
|
+
}
|
|
506
|
+
async getThreadsByResourceId({ resourceId }) {
|
|
507
|
+
try {
|
|
508
|
+
const collection = await this.getCollection(storage.TABLE_THREADS);
|
|
509
|
+
const results = await collection.find({ resourceId }).toArray();
|
|
510
|
+
if (!results.length) {
|
|
511
|
+
return [];
|
|
512
|
+
}
|
|
513
|
+
return results.map((result) => ({
|
|
514
|
+
...result,
|
|
515
|
+
metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata
|
|
516
|
+
}));
|
|
517
|
+
} catch (error) {
|
|
518
|
+
this.logger.error(`Error loading threads by resourceId ${resourceId}: ${error}`);
|
|
519
|
+
throw error;
|
|
520
|
+
}
|
|
521
|
+
}
|
|
522
|
+
async saveThread({ thread }) {
|
|
523
|
+
try {
|
|
524
|
+
const collection = await this.getCollection(storage.TABLE_THREADS);
|
|
525
|
+
await collection.updateOne(
|
|
526
|
+
{ id: thread.id },
|
|
527
|
+
{
|
|
528
|
+
$set: {
|
|
529
|
+
...thread,
|
|
530
|
+
metadata: JSON.stringify(thread.metadata)
|
|
531
|
+
}
|
|
532
|
+
},
|
|
533
|
+
{ upsert: true }
|
|
534
|
+
);
|
|
535
|
+
return thread;
|
|
536
|
+
} catch (error) {
|
|
537
|
+
this.logger.error(`Error saving thread ${thread.id}: ${error}`);
|
|
538
|
+
throw error;
|
|
539
|
+
}
|
|
540
|
+
}
|
|
541
|
+
async updateThread({
|
|
542
|
+
id,
|
|
543
|
+
title,
|
|
544
|
+
metadata
|
|
545
|
+
}) {
|
|
546
|
+
const thread = await this.getThreadById({ threadId: id });
|
|
547
|
+
if (!thread) {
|
|
548
|
+
throw new Error(`Thread ${id} not found`);
|
|
549
|
+
}
|
|
550
|
+
const updatedThread = {
|
|
551
|
+
...thread,
|
|
552
|
+
title,
|
|
553
|
+
metadata: {
|
|
554
|
+
...thread.metadata,
|
|
555
|
+
...metadata
|
|
556
|
+
}
|
|
557
|
+
};
|
|
558
|
+
try {
|
|
559
|
+
const collection = await this.getCollection(storage.TABLE_THREADS);
|
|
560
|
+
await collection.updateOne(
|
|
561
|
+
{ id },
|
|
562
|
+
{
|
|
563
|
+
$set: {
|
|
564
|
+
title,
|
|
565
|
+
metadata: JSON.stringify(updatedThread.metadata)
|
|
566
|
+
}
|
|
567
|
+
}
|
|
568
|
+
);
|
|
569
|
+
} catch (error) {
|
|
570
|
+
this.logger.error(`Error updating thread ${id}:) ${error}`);
|
|
571
|
+
throw error;
|
|
572
|
+
}
|
|
573
|
+
return updatedThread;
|
|
574
|
+
}
|
|
575
|
+
async deleteThread({ threadId }) {
|
|
576
|
+
try {
|
|
577
|
+
const collectionMessages = await this.getCollection(storage.TABLE_MESSAGES);
|
|
578
|
+
await collectionMessages.deleteMany({ thread_id: threadId });
|
|
579
|
+
const collectionThreads = await this.getCollection(storage.TABLE_THREADS);
|
|
580
|
+
await collectionThreads.deleteOne({ id: threadId });
|
|
581
|
+
} catch (error) {
|
|
582
|
+
this.logger.error(`Error deleting thread ${threadId}: ${error}`);
|
|
583
|
+
throw error;
|
|
584
|
+
}
|
|
585
|
+
}
|
|
586
|
+
async getMessages({ threadId, selectBy }) {
|
|
587
|
+
try {
|
|
588
|
+
const limit = typeof selectBy?.last === "number" ? selectBy.last : 40;
|
|
589
|
+
const include = selectBy?.include || [];
|
|
590
|
+
let messages = [];
|
|
591
|
+
let allMessages = [];
|
|
592
|
+
const collection = await this.getCollection(storage.TABLE_MESSAGES);
|
|
593
|
+
allMessages = (await collection.find({ thread_id: threadId }).sort({ createdAt: -1 }).toArray()).map(
|
|
594
|
+
(row) => this.parseRow(row)
|
|
595
|
+
);
|
|
596
|
+
if (include.length) {
|
|
597
|
+
const idToIndex = /* @__PURE__ */ new Map();
|
|
598
|
+
allMessages.forEach((msg, idx) => {
|
|
599
|
+
idToIndex.set(msg.id, idx);
|
|
600
|
+
});
|
|
601
|
+
const selectedIndexes = /* @__PURE__ */ new Set();
|
|
602
|
+
for (const inc of include) {
|
|
603
|
+
const idx = idToIndex.get(inc.id);
|
|
604
|
+
if (idx === void 0) continue;
|
|
605
|
+
for (let i = 1; i <= (inc.withPreviousMessages || 0); i++) {
|
|
606
|
+
if (idx + i < allMessages.length) selectedIndexes.add(idx + i);
|
|
607
|
+
}
|
|
608
|
+
selectedIndexes.add(idx);
|
|
609
|
+
for (let i = 1; i <= (inc.withNextMessages || 0); i++) {
|
|
610
|
+
if (idx - i >= 0) selectedIndexes.add(idx - i);
|
|
611
|
+
}
|
|
612
|
+
}
|
|
613
|
+
messages.push(
|
|
614
|
+
...Array.from(selectedIndexes).map((i) => allMessages[i]).filter((m) => !!m)
|
|
615
|
+
);
|
|
616
|
+
}
|
|
617
|
+
const excludeIds = new Set(messages.map((m) => m.id));
|
|
618
|
+
for (const msg of allMessages) {
|
|
619
|
+
if (messages.length >= limit) break;
|
|
620
|
+
if (!excludeIds.has(msg.id)) {
|
|
621
|
+
messages.push(msg);
|
|
622
|
+
}
|
|
623
|
+
}
|
|
624
|
+
messages.sort((a, b) => a.createdAt.getTime() - b.createdAt.getTime());
|
|
625
|
+
return messages.slice(0, limit);
|
|
626
|
+
} catch (error) {
|
|
627
|
+
this.logger.error("Error getting messages:", error);
|
|
628
|
+
throw error;
|
|
629
|
+
}
|
|
630
|
+
}
|
|
631
|
+
async saveMessages({ messages }) {
|
|
632
|
+
if (!messages.length) {
|
|
633
|
+
return messages;
|
|
634
|
+
}
|
|
635
|
+
const threadId = messages[0]?.threadId;
|
|
636
|
+
if (!threadId) {
|
|
637
|
+
this.logger.error("Thread ID is required to save messages");
|
|
638
|
+
throw new Error("Thread ID is required");
|
|
639
|
+
}
|
|
640
|
+
try {
|
|
641
|
+
const messagesToInsert = messages.map((message) => {
|
|
642
|
+
const time = message.createdAt || /* @__PURE__ */ new Date();
|
|
643
|
+
return {
|
|
644
|
+
id: message.id,
|
|
645
|
+
thread_id: threadId,
|
|
646
|
+
content: typeof message.content === "string" ? message.content : JSON.stringify(message.content),
|
|
647
|
+
role: message.role,
|
|
648
|
+
type: message.type,
|
|
649
|
+
resourceId: message.resourceId,
|
|
650
|
+
createdAt: time instanceof Date ? time.toISOString() : time
|
|
651
|
+
};
|
|
652
|
+
});
|
|
653
|
+
const collection = await this.getCollection(storage.TABLE_MESSAGES);
|
|
654
|
+
await collection.insertMany(messagesToInsert);
|
|
655
|
+
return messages;
|
|
656
|
+
} catch (error) {
|
|
657
|
+
this.logger.error("Failed to save messages in database: " + error?.message);
|
|
658
|
+
throw error;
|
|
659
|
+
}
|
|
660
|
+
}
|
|
661
|
+
async getTraces({
|
|
662
|
+
name,
|
|
663
|
+
scope,
|
|
664
|
+
page,
|
|
665
|
+
perPage,
|
|
666
|
+
attributes,
|
|
667
|
+
filters
|
|
668
|
+
} = {
|
|
669
|
+
page: 0,
|
|
670
|
+
perPage: 100
|
|
671
|
+
}) {
|
|
672
|
+
const limit = perPage;
|
|
673
|
+
const offset = page * perPage;
|
|
674
|
+
const query = {};
|
|
675
|
+
if (name) {
|
|
676
|
+
query["name"] = `%${name}%`;
|
|
677
|
+
}
|
|
678
|
+
if (scope) {
|
|
679
|
+
query["scope"] = scope;
|
|
680
|
+
}
|
|
681
|
+
if (attributes) {
|
|
682
|
+
Object.keys(attributes).forEach((key) => {
|
|
683
|
+
query[`attributes.${key}`] = attributes[key];
|
|
684
|
+
});
|
|
685
|
+
}
|
|
686
|
+
if (filters) {
|
|
687
|
+
Object.entries(filters).forEach(([key, value]) => {
|
|
688
|
+
query[key] = value;
|
|
689
|
+
});
|
|
690
|
+
}
|
|
691
|
+
const collection = await this.getCollection(storage.TABLE_TRACES);
|
|
692
|
+
const result = await collection.find(query, {
|
|
693
|
+
sort: { startTime: -1 }
|
|
694
|
+
}).limit(limit).skip(offset).toArray();
|
|
695
|
+
return result.map((row) => ({
|
|
696
|
+
id: row.id,
|
|
697
|
+
parentSpanId: row.parentSpanId,
|
|
698
|
+
traceId: row.traceId,
|
|
699
|
+
name: row.name,
|
|
700
|
+
scope: row.scope,
|
|
701
|
+
kind: row.kind,
|
|
702
|
+
status: safelyParseJSON(row.status),
|
|
703
|
+
events: safelyParseJSON(row.events),
|
|
704
|
+
links: safelyParseJSON(row.links),
|
|
705
|
+
attributes: safelyParseJSON(row.attributes),
|
|
706
|
+
startTime: row.startTime,
|
|
707
|
+
endTime: row.endTime,
|
|
708
|
+
other: safelyParseJSON(row.other),
|
|
709
|
+
createdAt: row.createdAt
|
|
710
|
+
}));
|
|
711
|
+
}
|
|
712
|
+
async getWorkflowRuns({
|
|
713
|
+
workflowName,
|
|
714
|
+
fromDate,
|
|
715
|
+
toDate,
|
|
716
|
+
limit,
|
|
717
|
+
offset
|
|
718
|
+
} = {}) {
|
|
719
|
+
const query = {};
|
|
720
|
+
if (workflowName) {
|
|
721
|
+
query["workflow_name"] = workflowName;
|
|
722
|
+
}
|
|
723
|
+
if (fromDate || toDate) {
|
|
724
|
+
query["createdAt"] = {};
|
|
725
|
+
if (fromDate) {
|
|
726
|
+
query["createdAt"]["$gte"] = fromDate;
|
|
727
|
+
}
|
|
728
|
+
if (toDate) {
|
|
729
|
+
query["createdAt"]["$lte"] = toDate;
|
|
730
|
+
}
|
|
731
|
+
}
|
|
732
|
+
const collection = await this.getCollection(storage.TABLE_WORKFLOW_SNAPSHOT);
|
|
733
|
+
let total = 0;
|
|
734
|
+
if (limit !== void 0 && offset !== void 0) {
|
|
735
|
+
total = await collection.countDocuments(query);
|
|
736
|
+
}
|
|
737
|
+
const request = collection.find(query).sort({ createdAt: "desc" });
|
|
738
|
+
if (limit) {
|
|
739
|
+
request.limit(limit);
|
|
740
|
+
}
|
|
741
|
+
if (offset) {
|
|
742
|
+
request.skip(offset);
|
|
743
|
+
}
|
|
744
|
+
const result = await request.toArray();
|
|
745
|
+
const runs = result.map((row) => {
|
|
746
|
+
let parsedSnapshot = row.snapshot;
|
|
747
|
+
if (typeof parsedSnapshot === "string") {
|
|
748
|
+
try {
|
|
749
|
+
parsedSnapshot = JSON.parse(row.snapshot);
|
|
750
|
+
} catch (e) {
|
|
751
|
+
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
752
|
+
}
|
|
753
|
+
}
|
|
754
|
+
return {
|
|
755
|
+
workflowName: row.workflow_name,
|
|
756
|
+
runId: row.run_id,
|
|
757
|
+
snapshot: parsedSnapshot,
|
|
758
|
+
createdAt: new Date(row.createdAt),
|
|
759
|
+
updatedAt: new Date(row.updatedAt)
|
|
760
|
+
};
|
|
761
|
+
});
|
|
762
|
+
return { runs, total: total || runs.length };
|
|
763
|
+
}
|
|
764
|
+
async getEvalsByAgentName(agentName, type) {
|
|
765
|
+
try {
|
|
766
|
+
const query = {
|
|
767
|
+
agent_name: agentName
|
|
768
|
+
};
|
|
769
|
+
if (type === "test") {
|
|
770
|
+
query["test_info"] = { $ne: null };
|
|
771
|
+
}
|
|
772
|
+
if (type === "live") {
|
|
773
|
+
query["test_info"] = null;
|
|
774
|
+
}
|
|
775
|
+
const collection = await this.getCollection(storage.TABLE_EVALS);
|
|
776
|
+
const documents = await collection.find(query).sort({ created_at: "desc" }).toArray();
|
|
777
|
+
const result = documents.map((row) => this.transformEvalRow(row));
|
|
778
|
+
return result.filter((row) => {
|
|
779
|
+
if (type === "live") {
|
|
780
|
+
return !Boolean(row.testInfo?.testPath);
|
|
781
|
+
}
|
|
782
|
+
if (type === "test") {
|
|
783
|
+
return row.testInfo?.testPath !== null;
|
|
784
|
+
}
|
|
785
|
+
return true;
|
|
786
|
+
});
|
|
787
|
+
} catch (error) {
|
|
788
|
+
if (error instanceof Error && error.message.includes("no such table")) {
|
|
789
|
+
return [];
|
|
790
|
+
}
|
|
791
|
+
this.logger.error("Failed to get evals for the specified agent: " + error?.message);
|
|
792
|
+
throw error;
|
|
793
|
+
}
|
|
794
|
+
}
|
|
795
|
+
async persistWorkflowSnapshot({
|
|
796
|
+
workflowName,
|
|
797
|
+
runId,
|
|
798
|
+
snapshot
|
|
799
|
+
}) {
|
|
800
|
+
try {
|
|
801
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
802
|
+
const collection = await this.getCollection(storage.TABLE_WORKFLOW_SNAPSHOT);
|
|
803
|
+
await collection.updateOne(
|
|
804
|
+
{ workflow_name: workflowName, run_id: runId },
|
|
805
|
+
{
|
|
806
|
+
$set: {
|
|
807
|
+
snapshot: JSON.stringify(snapshot),
|
|
808
|
+
updatedAt: now
|
|
809
|
+
},
|
|
810
|
+
$setOnInsert: {
|
|
811
|
+
createdAt: now
|
|
812
|
+
}
|
|
813
|
+
},
|
|
814
|
+
{ upsert: true }
|
|
815
|
+
);
|
|
816
|
+
} catch (error) {
|
|
817
|
+
this.logger.error(`Error persisting workflow snapshot: ${error}`);
|
|
818
|
+
throw error;
|
|
819
|
+
}
|
|
820
|
+
}
|
|
821
|
+
async loadWorkflowSnapshot({
|
|
822
|
+
workflowName,
|
|
823
|
+
runId
|
|
824
|
+
}) {
|
|
825
|
+
try {
|
|
826
|
+
const result = await this.load({
|
|
827
|
+
tableName: storage.TABLE_WORKFLOW_SNAPSHOT,
|
|
828
|
+
keys: {
|
|
829
|
+
workflow_name: workflowName,
|
|
830
|
+
run_id: runId
|
|
831
|
+
}
|
|
832
|
+
});
|
|
833
|
+
if (!result?.length) {
|
|
834
|
+
return null;
|
|
835
|
+
}
|
|
836
|
+
return JSON.parse(result[0].snapshot);
|
|
837
|
+
} catch (error) {
|
|
838
|
+
console.error("Error loading workflow snapshot:", error);
|
|
839
|
+
throw error;
|
|
840
|
+
}
|
|
841
|
+
}
|
|
842
|
+
async getWorkflowRunById({
|
|
843
|
+
runId,
|
|
844
|
+
workflowName
|
|
845
|
+
}) {
|
|
846
|
+
try {
|
|
847
|
+
const query = {};
|
|
848
|
+
if (runId) {
|
|
849
|
+
query["run_id"] = runId;
|
|
850
|
+
}
|
|
851
|
+
if (workflowName) {
|
|
852
|
+
query["workflow_name"] = workflowName;
|
|
853
|
+
}
|
|
854
|
+
const collection = await this.getCollection(storage.TABLE_WORKFLOW_SNAPSHOT);
|
|
855
|
+
const result = await collection.findOne(query);
|
|
856
|
+
if (!result) {
|
|
857
|
+
return null;
|
|
858
|
+
}
|
|
859
|
+
return this.parseWorkflowRun(result);
|
|
860
|
+
} catch (error) {
|
|
861
|
+
console.error("Error getting workflow run by ID:", error);
|
|
862
|
+
throw error;
|
|
863
|
+
}
|
|
864
|
+
}
|
|
865
|
+
parseWorkflowRun(row) {
|
|
866
|
+
let parsedSnapshot = row.snapshot;
|
|
867
|
+
if (typeof parsedSnapshot === "string") {
|
|
868
|
+
try {
|
|
869
|
+
parsedSnapshot = JSON.parse(row.snapshot);
|
|
870
|
+
} catch (e) {
|
|
871
|
+
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
872
|
+
}
|
|
873
|
+
}
|
|
874
|
+
return {
|
|
875
|
+
workflowName: row.workflow_name,
|
|
876
|
+
runId: row.run_id,
|
|
877
|
+
snapshot: parsedSnapshot,
|
|
878
|
+
createdAt: row.createdAt,
|
|
879
|
+
updatedAt: row.updatedAt,
|
|
880
|
+
resourceId: row.resourceId
|
|
881
|
+
};
|
|
882
|
+
}
|
|
883
|
+
parseRow(row) {
|
|
884
|
+
let content = row.content;
|
|
885
|
+
try {
|
|
886
|
+
content = JSON.parse(row.content);
|
|
887
|
+
} catch {
|
|
888
|
+
}
|
|
889
|
+
return {
|
|
890
|
+
id: row.id,
|
|
891
|
+
content,
|
|
892
|
+
role: row.role,
|
|
893
|
+
type: row.type,
|
|
894
|
+
createdAt: new Date(row.createdAt),
|
|
895
|
+
threadId: row.thread_id
|
|
896
|
+
};
|
|
897
|
+
}
|
|
898
|
+
transformEvalRow(row) {
|
|
899
|
+
let testInfoValue = null;
|
|
900
|
+
if (row.test_info) {
|
|
901
|
+
try {
|
|
902
|
+
testInfoValue = typeof row.test_info === "string" ? JSON.parse(row.test_info) : row.test_info;
|
|
903
|
+
} catch (e) {
|
|
904
|
+
console.warn("Failed to parse test_info:", e);
|
|
905
|
+
}
|
|
906
|
+
}
|
|
907
|
+
return {
|
|
908
|
+
input: row.input,
|
|
909
|
+
output: row.output,
|
|
910
|
+
result: row.result,
|
|
911
|
+
agentName: row.agent_name,
|
|
912
|
+
metricName: row.metric_name,
|
|
913
|
+
instructions: row.instructions,
|
|
914
|
+
testInfo: testInfoValue,
|
|
915
|
+
globalRunId: row.global_run_id,
|
|
916
|
+
runId: row.run_id,
|
|
917
|
+
createdAt: row.created_at
|
|
918
|
+
};
|
|
919
|
+
}
|
|
920
|
+
async close() {
|
|
921
|
+
await this.#client.close();
|
|
922
|
+
}
|
|
923
|
+
};
|
|
360
924
|
|
|
361
925
|
// src/vector/prompt.ts
|
|
362
926
|
var MONGODB_PROMPT = `When querying MongoDB Vector, you can ONLY use the operators listed below. Any other operators will be rejected.
|
|
@@ -454,4 +1018,5 @@ Example Complex Query:
|
|
|
454
1018
|
}`;
|
|
455
1019
|
|
|
456
1020
|
exports.MONGODB_PROMPT = MONGODB_PROMPT;
|
|
1021
|
+
exports.MongoDBStore = MongoDBStore;
|
|
457
1022
|
exports.MongoDBVector = MongoDBVector;
|
package/dist/index.d.cts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
export { MONGODB_PROMPT } from './_tsup-dts-rollup.cjs';
|
|
2
|
-
export { MongoDBUpsertArgs } from './_tsup-dts-rollup.cjs';
|
|
3
|
-
export { MongoDBQueryArgs } from './_tsup-dts-rollup.cjs';
|
|
4
|
-
export { MongoDBUpsertParams } from './_tsup-dts-rollup.cjs';
|
|
5
2
|
export { MongoDBUpsertVectorParams } from './_tsup-dts-rollup.cjs';
|
|
6
3
|
export { MongoDBQueryVectorParams } from './_tsup-dts-rollup.cjs';
|
|
4
|
+
export { MongoDBIndexReadyParams } from './_tsup-dts-rollup.cjs';
|
|
7
5
|
export { MongoDBVector } from './_tsup-dts-rollup.cjs';
|
|
6
|
+
export { MongoDBConfig } from './_tsup-dts-rollup.cjs';
|
|
7
|
+
export { MongoDBStore } from './_tsup-dts-rollup.cjs';
|