@mastra/dynamodb 0.0.0-pass-headers-for-create-mastra-client-20250529200245 → 0.0.0-share-agent-metadata-with-cloud-20250718110128
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +11 -42
- package/dist/_tsup-dts-rollup.d.cts +1160 -0
- package/dist/_tsup-dts-rollup.d.ts +1160 -0
- package/dist/index.cjs +380 -106
- package/dist/index.d.cts +2 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +356 -82
- package/package.json +31 -17
- package/src/storage/index.test.ts +431 -1
- package/src/storage/index.ts +423 -99
package/package.json
CHANGED
|
@@ -1,37 +1,51 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@mastra/dynamodb",
|
|
3
|
-
"version": "0.0.0-
|
|
3
|
+
"version": "0.0.0-share-agent-metadata-with-cloud-20250718110128",
|
|
4
4
|
"description": "DynamoDB storage adapter for Mastra",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
7
7
|
"types": "dist/index.d.ts",
|
|
8
|
+
"exports": {
|
|
9
|
+
".": {
|
|
10
|
+
"import": {
|
|
11
|
+
"types": "./dist/index.d.ts",
|
|
12
|
+
"default": "./dist/index.js"
|
|
13
|
+
},
|
|
14
|
+
"require": {
|
|
15
|
+
"types": "./dist/index.d.cts",
|
|
16
|
+
"default": "./dist/index.cjs"
|
|
17
|
+
}
|
|
18
|
+
},
|
|
19
|
+
"./package.json": "./package.json"
|
|
20
|
+
},
|
|
8
21
|
"files": [
|
|
9
22
|
"dist",
|
|
10
23
|
"src"
|
|
11
24
|
],
|
|
12
25
|
"dependencies": {
|
|
13
|
-
"@aws-sdk/client-dynamodb": "^3.
|
|
14
|
-
"@aws-sdk/lib-dynamodb": "^3.
|
|
15
|
-
"electrodb": "^3.4.
|
|
26
|
+
"@aws-sdk/client-dynamodb": "^3.840.0",
|
|
27
|
+
"@aws-sdk/lib-dynamodb": "^3.840.0",
|
|
28
|
+
"electrodb": "^3.4.3"
|
|
16
29
|
},
|
|
17
30
|
"peerDependencies": {
|
|
18
|
-
"@mastra/core": "
|
|
31
|
+
"@mastra/core": "0.0.0-share-agent-metadata-with-cloud-20250718110128"
|
|
19
32
|
},
|
|
20
33
|
"devDependencies": {
|
|
21
|
-
"@microsoft/api-extractor": "^7.52.
|
|
22
|
-
"@types/node": "^20.
|
|
23
|
-
"@vitest/coverage-v8": "3.
|
|
24
|
-
"@vitest/ui": "3.
|
|
25
|
-
"axios": "^1.
|
|
26
|
-
"eslint": "^9.
|
|
27
|
-
"tsup": "^8.
|
|
28
|
-
"typescript": "^5.8.
|
|
29
|
-
"vitest": "^3.
|
|
30
|
-
"@internal/lint": "0.0.0-
|
|
31
|
-
"@mastra/core": "0.0.0-
|
|
34
|
+
"@microsoft/api-extractor": "^7.52.8",
|
|
35
|
+
"@types/node": "^20.19.0",
|
|
36
|
+
"@vitest/coverage-v8": "3.2.3",
|
|
37
|
+
"@vitest/ui": "3.2.3",
|
|
38
|
+
"axios": "^1.10.0",
|
|
39
|
+
"eslint": "^9.30.1",
|
|
40
|
+
"tsup": "^8.5.0",
|
|
41
|
+
"typescript": "^5.8.3",
|
|
42
|
+
"vitest": "^3.2.4",
|
|
43
|
+
"@internal/lint": "0.0.0-share-agent-metadata-with-cloud-20250718110128",
|
|
44
|
+
"@mastra/core": "0.0.0-share-agent-metadata-with-cloud-20250718110128",
|
|
45
|
+
"@internal/storage-test-utils": "0.0.16"
|
|
32
46
|
},
|
|
33
47
|
"scripts": {
|
|
34
|
-
"build": "tsup src/index.ts --format esm,cjs --clean --treeshake=smallest --splitting",
|
|
48
|
+
"build": "tsup src/index.ts --format esm,cjs --experimental-dts --clean --treeshake=smallest --splitting",
|
|
35
49
|
"dev": "tsup --watch",
|
|
36
50
|
"clean": "rm -rf dist",
|
|
37
51
|
"lint": "eslint .",
|
|
@@ -11,6 +11,7 @@ import {
|
|
|
11
11
|
waitUntilTableExists,
|
|
12
12
|
waitUntilTableNotExists,
|
|
13
13
|
} from '@aws-sdk/client-dynamodb';
|
|
14
|
+
import { createSampleMessageV2, createSampleThread } from '@internal/storage-test-utils';
|
|
14
15
|
import type { MastraMessageV1, StorageThreadType, WorkflowRun, WorkflowRunState } from '@mastra/core';
|
|
15
16
|
import type { MastraMessageV2 } from '@mastra/core/agent';
|
|
16
17
|
import { TABLE_EVALS, TABLE_THREADS, TABLE_WORKFLOW_SNAPSHOT } from '@mastra/core/storage';
|
|
@@ -320,10 +321,12 @@ describe('DynamoDBStore Integration Tests', () => {
|
|
|
320
321
|
input: { source: 'test' },
|
|
321
322
|
step1: { status: 'success', output: { data: 'test' } },
|
|
322
323
|
} as unknown as WorkflowRunState['context'],
|
|
324
|
+
serializedStepGraph: [],
|
|
323
325
|
activePaths: [{ stepPath: ['test'], stepId: 'step1', status: 'success' }],
|
|
324
326
|
suspendedPaths: { test: [1] },
|
|
325
327
|
runId: 'test-run-large', // Use unique runId
|
|
326
328
|
timestamp: now,
|
|
329
|
+
status: 'success',
|
|
327
330
|
};
|
|
328
331
|
|
|
329
332
|
await expect(
|
|
@@ -386,6 +389,215 @@ describe('DynamoDBStore Integration Tests', () => {
|
|
|
386
389
|
expect(retrieved?.title).toBe('Updated Thread 2');
|
|
387
390
|
expect(retrieved?.metadata?.update).toBe(2);
|
|
388
391
|
});
|
|
392
|
+
|
|
393
|
+
test('getMessages should return the N most recent messages [v2 storage]', async () => {
|
|
394
|
+
const threadId = 'last-selector-thread';
|
|
395
|
+
const start = Date.now();
|
|
396
|
+
|
|
397
|
+
// Insert 10 messages with increasing timestamps
|
|
398
|
+
const messages: MastraMessageV2[] = Array.from({ length: 10 }, (_, i) => ({
|
|
399
|
+
id: `m-${i}`,
|
|
400
|
+
threadId,
|
|
401
|
+
resourceId: 'r',
|
|
402
|
+
content: { format: 2, parts: [{ type: 'text', text: `msg-${i}` }] },
|
|
403
|
+
createdAt: new Date(start + i), // 0..9 ms apart
|
|
404
|
+
role: 'user',
|
|
405
|
+
type: 'text',
|
|
406
|
+
}));
|
|
407
|
+
await store.saveMessages({ messages, format: 'v2' });
|
|
408
|
+
|
|
409
|
+
const last3 = await store.getMessages({
|
|
410
|
+
format: 'v2',
|
|
411
|
+
threadId,
|
|
412
|
+
selectBy: { last: 3 },
|
|
413
|
+
});
|
|
414
|
+
|
|
415
|
+
expect(last3).toHaveLength(3);
|
|
416
|
+
expect(last3.map(m => (m.content.parts[0] as { type: string; text: string }).text)).toEqual([
|
|
417
|
+
'msg-7',
|
|
418
|
+
'msg-8',
|
|
419
|
+
'msg-9',
|
|
420
|
+
]);
|
|
421
|
+
});
|
|
422
|
+
|
|
423
|
+
test('getMessages should return the N most recent messages [v1 storage]', async () => {
|
|
424
|
+
const threadId = 'last-selector-thread';
|
|
425
|
+
const start = Date.now();
|
|
426
|
+
|
|
427
|
+
// Insert 10 messages with increasing timestamps
|
|
428
|
+
const messages: MastraMessageV1[] = Array.from({ length: 10 }, (_, i) => ({
|
|
429
|
+
id: `m-${i}`,
|
|
430
|
+
threadId,
|
|
431
|
+
resourceId: 'r',
|
|
432
|
+
content: `msg-${i}`,
|
|
433
|
+
createdAt: new Date(start + i), // 0..9 ms apart
|
|
434
|
+
role: 'user',
|
|
435
|
+
type: 'text',
|
|
436
|
+
}));
|
|
437
|
+
await store.saveMessages({ messages });
|
|
438
|
+
|
|
439
|
+
const last3 = await store.getMessages({
|
|
440
|
+
threadId,
|
|
441
|
+
selectBy: { last: 3 },
|
|
442
|
+
});
|
|
443
|
+
|
|
444
|
+
expect(last3).toHaveLength(3);
|
|
445
|
+
expect(last3.map(m => m.content)).toEqual(['msg-7', 'msg-8', 'msg-9']);
|
|
446
|
+
});
|
|
447
|
+
|
|
448
|
+
test('should update thread updatedAt when a message is saved to it', async () => {
|
|
449
|
+
const thread: StorageThreadType = {
|
|
450
|
+
id: 'thread-update-test',
|
|
451
|
+
resourceId: 'resource-update',
|
|
452
|
+
title: 'Update Test Thread',
|
|
453
|
+
createdAt: new Date(),
|
|
454
|
+
updatedAt: new Date(),
|
|
455
|
+
metadata: { test: true },
|
|
456
|
+
};
|
|
457
|
+
await store.saveThread({ thread });
|
|
458
|
+
|
|
459
|
+
// Get the initial thread to capture the original updatedAt
|
|
460
|
+
const initialThread = await store.getThreadById({ threadId: thread.id });
|
|
461
|
+
expect(initialThread).toBeDefined();
|
|
462
|
+
const originalUpdatedAt = initialThread!.updatedAt;
|
|
463
|
+
|
|
464
|
+
// Wait a small amount to ensure different timestamp
|
|
465
|
+
await new Promise(resolve => setTimeout(resolve, 100));
|
|
466
|
+
|
|
467
|
+
// Create and save a message to the thread
|
|
468
|
+
const message: MastraMessageV1 = {
|
|
469
|
+
id: 'msg-update-test',
|
|
470
|
+
threadId: thread.id,
|
|
471
|
+
resourceId: 'resource-update',
|
|
472
|
+
content: 'Test message for update',
|
|
473
|
+
createdAt: new Date(),
|
|
474
|
+
role: 'user',
|
|
475
|
+
type: 'text',
|
|
476
|
+
};
|
|
477
|
+
await store.saveMessages({ messages: [message] });
|
|
478
|
+
|
|
479
|
+
// Retrieve the thread again and check that updatedAt was updated
|
|
480
|
+
const updatedThread = await store.getThreadById({ threadId: thread.id });
|
|
481
|
+
expect(updatedThread).toBeDefined();
|
|
482
|
+
expect(updatedThread!.updatedAt.getTime()).toBeGreaterThan(originalUpdatedAt.getTime());
|
|
483
|
+
});
|
|
484
|
+
|
|
485
|
+
test('saveThread upsert: should create new thread when thread does not exist', async () => {
|
|
486
|
+
const threadId = `upsert-new-${randomUUID()}`;
|
|
487
|
+
const now = new Date();
|
|
488
|
+
const thread: StorageThreadType = {
|
|
489
|
+
id: threadId,
|
|
490
|
+
resourceId: 'resource-upsert-new',
|
|
491
|
+
title: 'New Thread via Upsert',
|
|
492
|
+
createdAt: now,
|
|
493
|
+
updatedAt: now,
|
|
494
|
+
metadata: { operation: 'create', test: true },
|
|
495
|
+
};
|
|
496
|
+
|
|
497
|
+
// Save the thread (should create new)
|
|
498
|
+
await expect(store.saveThread({ thread })).resolves.not.toThrow();
|
|
499
|
+
|
|
500
|
+
// Verify the thread was created
|
|
501
|
+
const retrieved = await store.getThreadById({ threadId });
|
|
502
|
+
expect(retrieved).toBeDefined();
|
|
503
|
+
expect(retrieved?.id).toBe(threadId);
|
|
504
|
+
expect(retrieved?.title).toBe('New Thread via Upsert');
|
|
505
|
+
expect(retrieved?.resourceId).toBe('resource-upsert-new');
|
|
506
|
+
expect(retrieved?.metadata).toEqual({ operation: 'create', test: true });
|
|
507
|
+
});
|
|
508
|
+
|
|
509
|
+
test('saveThread upsert: should update existing thread when thread already exists', async () => {
|
|
510
|
+
const threadId = `upsert-update-${randomUUID()}`;
|
|
511
|
+
const initialCreatedAt = new Date();
|
|
512
|
+
|
|
513
|
+
// Create initial thread
|
|
514
|
+
const initialThread: StorageThreadType = {
|
|
515
|
+
id: threadId,
|
|
516
|
+
resourceId: 'resource-upsert-initial',
|
|
517
|
+
title: 'Initial Thread Title',
|
|
518
|
+
createdAt: initialCreatedAt,
|
|
519
|
+
updatedAt: initialCreatedAt,
|
|
520
|
+
metadata: { operation: 'initial', version: 1 },
|
|
521
|
+
};
|
|
522
|
+
await store.saveThread({ thread: initialThread });
|
|
523
|
+
|
|
524
|
+
// Wait a small amount to ensure different timestamp
|
|
525
|
+
await new Promise(resolve => setTimeout(resolve, 100));
|
|
526
|
+
|
|
527
|
+
// Update the thread with same ID but different data
|
|
528
|
+
const updatedThread: StorageThreadType = {
|
|
529
|
+
id: threadId,
|
|
530
|
+
resourceId: 'resource-upsert-updated',
|
|
531
|
+
title: 'Updated Thread Title',
|
|
532
|
+
createdAt: initialCreatedAt, // Keep original creation time
|
|
533
|
+
updatedAt: new Date(), // New update time
|
|
534
|
+
metadata: { operation: 'update', version: 2 },
|
|
535
|
+
};
|
|
536
|
+
await expect(store.saveThread({ thread: updatedThread })).resolves.not.toThrow();
|
|
537
|
+
|
|
538
|
+
// Verify the thread was updated
|
|
539
|
+
const retrieved = await store.getThreadById({ threadId });
|
|
540
|
+
expect(retrieved).toBeDefined();
|
|
541
|
+
expect(retrieved?.id).toBe(threadId);
|
|
542
|
+
expect(retrieved?.title).toBe('Updated Thread Title');
|
|
543
|
+
expect(retrieved?.resourceId).toBe('resource-upsert-updated');
|
|
544
|
+
expect(retrieved?.metadata).toEqual({ operation: 'update', version: 2 });
|
|
545
|
+
|
|
546
|
+
// updatedAt should be newer than the initial creation time
|
|
547
|
+
expect(retrieved?.updatedAt.getTime()).toBeGreaterThan(initialCreatedAt.getTime());
|
|
548
|
+
// createdAt should remain exactly equal to the initial creation time
|
|
549
|
+
expect(retrieved?.createdAt.getTime()).toBe(initialCreatedAt.getTime());
|
|
550
|
+
});
|
|
551
|
+
|
|
552
|
+
test('saveThread upsert: should handle complex metadata updates', async () => {
|
|
553
|
+
const threadId = `upsert-metadata-${randomUUID()}`;
|
|
554
|
+
const initialMetadata = {
|
|
555
|
+
user: 'initial-user',
|
|
556
|
+
tags: ['initial', 'test'],
|
|
557
|
+
count: 1,
|
|
558
|
+
};
|
|
559
|
+
|
|
560
|
+
// Create initial thread with complex metadata
|
|
561
|
+
const initialThread: StorageThreadType = {
|
|
562
|
+
id: threadId,
|
|
563
|
+
resourceId: 'resource-metadata-test',
|
|
564
|
+
title: 'Metadata Test Thread',
|
|
565
|
+
createdAt: new Date(),
|
|
566
|
+
updatedAt: new Date(),
|
|
567
|
+
metadata: initialMetadata,
|
|
568
|
+
};
|
|
569
|
+
await store.saveThread({ thread: initialThread });
|
|
570
|
+
|
|
571
|
+
// Wait a small amount to ensure different timestamp
|
|
572
|
+
await new Promise(resolve => setTimeout(resolve, 100));
|
|
573
|
+
|
|
574
|
+
// Update with completely different metadata structure
|
|
575
|
+
const updatedMetadata = {
|
|
576
|
+
user: 'updated-user',
|
|
577
|
+
settings: { theme: 'light', language: 'ja', notifications: true },
|
|
578
|
+
tags: ['updated', 'upsert', 'complex'],
|
|
579
|
+
count: 5,
|
|
580
|
+
newField: { nested: { deeply: 'value' } },
|
|
581
|
+
};
|
|
582
|
+
|
|
583
|
+
const updatedThread: StorageThreadType = {
|
|
584
|
+
id: threadId,
|
|
585
|
+
resourceId: 'resource-metadata-test',
|
|
586
|
+
title: 'Updated Metadata Thread',
|
|
587
|
+
createdAt: initialThread.createdAt,
|
|
588
|
+
updatedAt: new Date(),
|
|
589
|
+
metadata: updatedMetadata,
|
|
590
|
+
};
|
|
591
|
+
await expect(store.saveThread({ thread: updatedThread })).resolves.not.toThrow();
|
|
592
|
+
|
|
593
|
+
// Verify the metadata was completely replaced
|
|
594
|
+
const retrieved = await store.getThreadById({ threadId });
|
|
595
|
+
expect(retrieved).toBeDefined();
|
|
596
|
+
expect(retrieved?.metadata).toEqual(updatedMetadata);
|
|
597
|
+
expect(retrieved?.metadata?.user).toBe('updated-user');
|
|
598
|
+
expect(retrieved?.metadata?.tags).toEqual(['updated', 'upsert', 'complex']);
|
|
599
|
+
expect(retrieved?.title).toBe('Updated Metadata Thread');
|
|
600
|
+
});
|
|
389
601
|
});
|
|
390
602
|
|
|
391
603
|
describe('Batch Operations', () => {
|
|
@@ -461,6 +673,82 @@ describe('DynamoDBStore Integration Tests', () => {
|
|
|
461
673
|
expect(retrieved[0]?.content).toBe('Large Message 0');
|
|
462
674
|
expect(retrieved[29]?.content).toBe('Large Message 29');
|
|
463
675
|
});
|
|
676
|
+
|
|
677
|
+
test('should upsert messages: duplicate id+threadId results in update, not duplicate row', async () => {
|
|
678
|
+
const thread = await createSampleThread();
|
|
679
|
+
await store.saveThread({ thread });
|
|
680
|
+
const baseMessage = createSampleMessageV2({
|
|
681
|
+
threadId: thread.id,
|
|
682
|
+
createdAt: new Date(),
|
|
683
|
+
content: { content: 'Original' },
|
|
684
|
+
resourceId: thread.resourceId,
|
|
685
|
+
});
|
|
686
|
+
|
|
687
|
+
// Insert the message for the first time
|
|
688
|
+
await store.saveMessages({ messages: [baseMessage], format: 'v2' });
|
|
689
|
+
|
|
690
|
+
// // Insert again with the same id and threadId but different content
|
|
691
|
+
const updatedMessage = {
|
|
692
|
+
...createSampleMessageV2({
|
|
693
|
+
threadId: thread.id,
|
|
694
|
+
createdAt: new Date(),
|
|
695
|
+
content: { content: 'Updated' },
|
|
696
|
+
resourceId: thread.resourceId,
|
|
697
|
+
}),
|
|
698
|
+
id: baseMessage.id,
|
|
699
|
+
};
|
|
700
|
+
|
|
701
|
+
await store.saveMessages({ messages: [updatedMessage], format: 'v2' });
|
|
702
|
+
|
|
703
|
+
// Retrieve messages for the thread
|
|
704
|
+
const retrievedMessages = await store.getMessages({ threadId: thread.id, format: 'v2' });
|
|
705
|
+
|
|
706
|
+
// Only one message should exist for that id+threadId
|
|
707
|
+
expect(retrievedMessages.filter(m => m.id === baseMessage.id)).toHaveLength(1);
|
|
708
|
+
|
|
709
|
+
// The content should be the updated one
|
|
710
|
+
expect(retrievedMessages.find(m => m.id === baseMessage.id)?.content.content).toBe('Updated');
|
|
711
|
+
});
|
|
712
|
+
|
|
713
|
+
test('should upsert messages: duplicate id and different threadid', async () => {
|
|
714
|
+
const thread1 = await createSampleThread();
|
|
715
|
+
const thread2 = await createSampleThread();
|
|
716
|
+
await store.saveThread({ thread: thread1 });
|
|
717
|
+
await store.saveThread({ thread: thread2 });
|
|
718
|
+
|
|
719
|
+
const message = createSampleMessageV2({
|
|
720
|
+
threadId: thread1.id,
|
|
721
|
+
createdAt: new Date(),
|
|
722
|
+
content: { content: 'Thread1 Content' },
|
|
723
|
+
resourceId: thread1.resourceId,
|
|
724
|
+
});
|
|
725
|
+
|
|
726
|
+
// Insert message into thread1
|
|
727
|
+
await store.saveMessages({ messages: [message], format: 'v2' });
|
|
728
|
+
|
|
729
|
+
// Attempt to insert a message with the same id but different threadId
|
|
730
|
+
const conflictingMessage = {
|
|
731
|
+
...createSampleMessageV2({
|
|
732
|
+
threadId: thread2.id, // different thread
|
|
733
|
+
content: { content: 'Thread2 Content' },
|
|
734
|
+
resourceId: thread2.resourceId,
|
|
735
|
+
}),
|
|
736
|
+
id: message.id,
|
|
737
|
+
};
|
|
738
|
+
|
|
739
|
+
// Save should save the message to the new thread
|
|
740
|
+
await store.saveMessages({ messages: [conflictingMessage], format: 'v2' });
|
|
741
|
+
|
|
742
|
+
// Retrieve messages for both threads
|
|
743
|
+
const thread1Messages = await store.getMessages({ threadId: thread1.id, format: 'v2' });
|
|
744
|
+
const thread2Messages = await store.getMessages({ threadId: thread2.id, format: 'v2' });
|
|
745
|
+
|
|
746
|
+
// Thread 1 should NOT have the message with that id
|
|
747
|
+
expect(thread1Messages.find(m => m.id === message.id)).toBeUndefined();
|
|
748
|
+
|
|
749
|
+
// Thread 2 should have the message with that id
|
|
750
|
+
expect(thread2Messages.find(m => m.id === message.id)?.content.content).toBe('Thread2 Content');
|
|
751
|
+
});
|
|
464
752
|
});
|
|
465
753
|
|
|
466
754
|
describe('Single-Table Design', () => {
|
|
@@ -486,10 +774,12 @@ describe('DynamoDBStore Integration Tests', () => {
|
|
|
486
774
|
step1: { status: 'success', output: { data: 'test' } },
|
|
487
775
|
input: { source: 'test' },
|
|
488
776
|
} as unknown as WorkflowRunState['context'],
|
|
777
|
+
serializedStepGraph: [],
|
|
489
778
|
activePaths: [{ stepPath: ['test'], stepId: 'step1', status: 'success' }],
|
|
490
779
|
suspendedPaths: { test: [1] },
|
|
491
780
|
runId: 'mixed-run',
|
|
492
781
|
timestamp: Date.now(),
|
|
782
|
+
status: 'success',
|
|
493
783
|
};
|
|
494
784
|
await store.persistWorkflowSnapshot({ workflowName, runId: 'mixed-run', snapshot: workflowSnapshot });
|
|
495
785
|
|
|
@@ -587,6 +877,37 @@ describe('DynamoDBStore Integration Tests', () => {
|
|
|
587
877
|
expect(allTraces.length).toBe(3);
|
|
588
878
|
});
|
|
589
879
|
|
|
880
|
+
test('should handle Date objects for createdAt/updatedAt fields in batchTraceInsert', async () => {
|
|
881
|
+
// This test specifically verifies the bug from the issue where Date objects
|
|
882
|
+
// were passed instead of ISO strings and ElectroDB validation failed
|
|
883
|
+
const now = new Date();
|
|
884
|
+
const traceWithDateObjects = {
|
|
885
|
+
id: `trace-${randomUUID()}`,
|
|
886
|
+
parentSpanId: `span-${randomUUID()}`,
|
|
887
|
+
traceId: `traceid-${randomUUID()}`,
|
|
888
|
+
name: 'test-trace-with-dates',
|
|
889
|
+
scope: 'default-tracer',
|
|
890
|
+
kind: 1,
|
|
891
|
+
startTime: now.getTime(),
|
|
892
|
+
endTime: now.getTime() + 100,
|
|
893
|
+
status: JSON.stringify({ code: 0 }),
|
|
894
|
+
attributes: JSON.stringify({ key: 'value' }),
|
|
895
|
+
events: JSON.stringify([]),
|
|
896
|
+
links: JSON.stringify([]),
|
|
897
|
+
// These are Date objects, not ISO strings - this should be handled by ElectroDB attribute setters
|
|
898
|
+
createdAt: now,
|
|
899
|
+
updatedAt: now,
|
|
900
|
+
};
|
|
901
|
+
|
|
902
|
+
// This should not throw a validation error due to Date object type
|
|
903
|
+
await expect(store.batchTraceInsert({ records: [traceWithDateObjects] })).resolves.not.toThrow();
|
|
904
|
+
|
|
905
|
+
// Verify the trace was saved correctly
|
|
906
|
+
const allTraces = await store.getTraces({ name: 'test-trace-with-dates', page: 1, perPage: 10 });
|
|
907
|
+
expect(allTraces.length).toBe(1);
|
|
908
|
+
expect(allTraces[0].name).toBe('test-trace-with-dates');
|
|
909
|
+
});
|
|
910
|
+
|
|
590
911
|
test('should retrieve traces filtered by name using GSI', async () => {
|
|
591
912
|
const trace1 = sampleTrace('trace-filter-name', 'scope-X');
|
|
592
913
|
const trace2 = sampleTrace('trace-filter-name', 'scope-Y', Date.now() + 10);
|
|
@@ -668,6 +989,40 @@ describe('DynamoDBStore Integration Tests', () => {
|
|
|
668
989
|
};
|
|
669
990
|
};
|
|
670
991
|
|
|
992
|
+
test('should handle Date objects for createdAt/updatedAt fields in eval batchInsert', async () => {
|
|
993
|
+
// Test that eval entity properly handles Date objects in createdAt/updatedAt fields
|
|
994
|
+
const now = new Date();
|
|
995
|
+
const evalWithDateObjects = {
|
|
996
|
+
entity: 'eval',
|
|
997
|
+
agent_name: 'test-agent-dates',
|
|
998
|
+
input: 'Test input',
|
|
999
|
+
output: 'Test output',
|
|
1000
|
+
result: JSON.stringify({ score: 0.95 }),
|
|
1001
|
+
metric_name: 'test-metric',
|
|
1002
|
+
instructions: 'Test instructions',
|
|
1003
|
+
global_run_id: `global-${randomUUID()}`,
|
|
1004
|
+
run_id: `run-${randomUUID()}`,
|
|
1005
|
+
created_at: now, // Date object instead of ISO string
|
|
1006
|
+
// These are Date objects, not ISO strings - should be handled by ElectroDB attribute setters
|
|
1007
|
+
createdAt: now,
|
|
1008
|
+
updatedAt: now,
|
|
1009
|
+
metadata: JSON.stringify({ test: 'meta' }),
|
|
1010
|
+
};
|
|
1011
|
+
|
|
1012
|
+
// This should not throw a validation error due to Date object type
|
|
1013
|
+
await expect(
|
|
1014
|
+
store.batchInsert({
|
|
1015
|
+
tableName: TABLE_EVALS,
|
|
1016
|
+
records: [evalWithDateObjects],
|
|
1017
|
+
}),
|
|
1018
|
+
).resolves.not.toThrow();
|
|
1019
|
+
|
|
1020
|
+
// Verify the eval was saved correctly
|
|
1021
|
+
const evals = await store.getEvalsByAgentName('test-agent-dates');
|
|
1022
|
+
expect(evals.length).toBe(1);
|
|
1023
|
+
expect(evals[0].agentName).toBe('test-agent-dates');
|
|
1024
|
+
});
|
|
1025
|
+
|
|
671
1026
|
test('should retrieve evals by agent name using GSI and filter by type', async () => {
|
|
672
1027
|
const agent1 = 'eval-agent-1';
|
|
673
1028
|
const agent2 = 'eval-agent-2';
|
|
@@ -734,10 +1089,12 @@ describe('DynamoDBStore Integration Tests', () => {
|
|
|
734
1089
|
step1: { status: 'success', output: { data: 'test' } },
|
|
735
1090
|
input: { source: 'test' },
|
|
736
1091
|
} as unknown as WorkflowRunState['context'],
|
|
1092
|
+
serializedStepGraph: [],
|
|
737
1093
|
activePaths: [],
|
|
738
1094
|
suspendedPaths: {},
|
|
739
1095
|
runId: runId,
|
|
740
1096
|
timestamp: createdAt.getTime(),
|
|
1097
|
+
status: 'success',
|
|
741
1098
|
...(resourceId && { resourceId: resourceId }), // Conditionally add resourceId to snapshot
|
|
742
1099
|
};
|
|
743
1100
|
return {
|
|
@@ -778,6 +1135,53 @@ describe('DynamoDBStore Integration Tests', () => {
|
|
|
778
1135
|
expect(loadedSnapshot?.context).toEqual(snapshot.context);
|
|
779
1136
|
});
|
|
780
1137
|
|
|
1138
|
+
test('should allow updating an existing workflow snapshot', async () => {
|
|
1139
|
+
const wfName = 'update-test-wf';
|
|
1140
|
+
const runId = `run-${randomUUID()}`;
|
|
1141
|
+
|
|
1142
|
+
// Create initial snapshot
|
|
1143
|
+
const { snapshot: initialSnapshot } = sampleWorkflowSnapshot(wfName, runId);
|
|
1144
|
+
|
|
1145
|
+
await expect(
|
|
1146
|
+
store.persistWorkflowSnapshot({
|
|
1147
|
+
workflowName: wfName,
|
|
1148
|
+
runId: runId,
|
|
1149
|
+
snapshot: initialSnapshot,
|
|
1150
|
+
}),
|
|
1151
|
+
).resolves.not.toThrow();
|
|
1152
|
+
|
|
1153
|
+
// Create updated snapshot with different data
|
|
1154
|
+
const updatedSnapshot: WorkflowRunState = {
|
|
1155
|
+
...initialSnapshot,
|
|
1156
|
+
value: { currentState: 'completed' },
|
|
1157
|
+
context: {
|
|
1158
|
+
step1: { status: 'success', output: { data: 'updated-test' } },
|
|
1159
|
+
step2: { status: 'success', output: { data: 'new-step' } },
|
|
1160
|
+
input: { source: 'updated-test' },
|
|
1161
|
+
} as unknown as WorkflowRunState['context'],
|
|
1162
|
+
timestamp: Date.now(),
|
|
1163
|
+
};
|
|
1164
|
+
|
|
1165
|
+
// This should succeed (update existing snapshot)
|
|
1166
|
+
await expect(
|
|
1167
|
+
store.persistWorkflowSnapshot({
|
|
1168
|
+
workflowName: wfName,
|
|
1169
|
+
runId: runId,
|
|
1170
|
+
snapshot: updatedSnapshot,
|
|
1171
|
+
}),
|
|
1172
|
+
).resolves.not.toThrow();
|
|
1173
|
+
|
|
1174
|
+
// Verify the snapshot was updated
|
|
1175
|
+
const loadedSnapshot = await store.loadWorkflowSnapshot({
|
|
1176
|
+
workflowName: wfName,
|
|
1177
|
+
runId: runId,
|
|
1178
|
+
});
|
|
1179
|
+
|
|
1180
|
+
expect(loadedSnapshot?.runId).toEqual(updatedSnapshot.runId);
|
|
1181
|
+
expect(loadedSnapshot?.value).toEqual(updatedSnapshot.value);
|
|
1182
|
+
expect(loadedSnapshot?.context).toEqual(updatedSnapshot.context);
|
|
1183
|
+
});
|
|
1184
|
+
|
|
781
1185
|
test('getWorkflowRunById should retrieve correct run', async () => {
|
|
782
1186
|
const wfName = 'get-by-id-wf';
|
|
783
1187
|
const runId1 = `run-${randomUUID()}`;
|
|
@@ -1009,6 +1413,32 @@ describe('DynamoDBStore Integration Tests', () => {
|
|
|
1009
1413
|
}
|
|
1010
1414
|
});
|
|
1011
1415
|
|
|
1416
|
+
test('insert() should handle Date objects for createdAt/updatedAt fields', async () => {
|
|
1417
|
+
// Test that individual insert method properly handles Date objects in date fields
|
|
1418
|
+
const now = new Date();
|
|
1419
|
+
const recordWithDates = {
|
|
1420
|
+
id: `thread-${randomUUID()}`,
|
|
1421
|
+
resourceId: `resource-${randomUUID()}`,
|
|
1422
|
+
title: 'Thread with Date Objects',
|
|
1423
|
+
// These are Date objects, not ISO strings - should be handled by preprocessing
|
|
1424
|
+
createdAt: now,
|
|
1425
|
+
updatedAt: now,
|
|
1426
|
+
metadata: JSON.stringify({ test: 'with-dates' }),
|
|
1427
|
+
};
|
|
1428
|
+
|
|
1429
|
+
// This should not throw a validation error due to Date object type
|
|
1430
|
+
await expect(genericStore.insert({ tableName: TABLE_THREADS, record: recordWithDates })).resolves.not.toThrow();
|
|
1431
|
+
|
|
1432
|
+
// Verify the record was saved correctly
|
|
1433
|
+
const loaded = await genericStore.load<StorageThreadType>({
|
|
1434
|
+
tableName: TABLE_THREADS,
|
|
1435
|
+
keys: { id: recordWithDates.id },
|
|
1436
|
+
});
|
|
1437
|
+
expect(loaded).not.toBeNull();
|
|
1438
|
+
expect(loaded?.id).toBe(recordWithDates.id);
|
|
1439
|
+
expect(loaded?.title).toBe('Thread with Date Objects');
|
|
1440
|
+
});
|
|
1441
|
+
|
|
1012
1442
|
test('load() should return null for non-existent record', async () => {
|
|
1013
1443
|
// Use the genericStore instance
|
|
1014
1444
|
const loaded = await genericStore.load({ tableName: TABLE_THREADS, keys: { id: 'non-existent-generic' } });
|
|
@@ -1050,4 +1480,4 @@ describe('DynamoDBStore Integration Tests', () => {
|
|
|
1050
1480
|
).toBeNull();
|
|
1051
1481
|
});
|
|
1052
1482
|
}); // End Generic Storage Methods describe
|
|
1053
|
-
});
|
|
1483
|
+
});
|