@mastra/dynamodb 0.13.0 → 0.13.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,21 +1,15 @@
1
1
  import { spawn } from 'child_process';
2
- import { randomUUID } from 'crypto';
3
2
  import {
4
- BatchWriteItemCommand,
5
3
  CreateTableCommand,
6
4
  DeleteTableCommand,
7
5
  DescribeTableCommand,
8
6
  DynamoDBClient,
9
7
  ListTablesCommand,
10
- ScanCommand,
11
8
  waitUntilTableExists,
12
9
  waitUntilTableNotExists,
13
10
  } from '@aws-sdk/client-dynamodb';
14
- import { createSampleMessageV2, createSampleThread } from '@internal/storage-test-utils';
15
- import type { MastraMessageV1, StorageThreadType, WorkflowRun, WorkflowRunState } from '@mastra/core';
16
- import type { MastraMessageV2 } from '@mastra/core/agent';
17
- import { TABLE_EVALS, TABLE_THREADS, TABLE_WORKFLOW_SNAPSHOT } from '@mastra/core/storage';
18
- import { afterAll, beforeAll, beforeEach, describe, expect, test } from 'vitest';
11
+ import { createTestSuite } from '@internal/storage-test-utils';
12
+ import { beforeAll, describe } from 'vitest';
19
13
  import { DynamoDBStore } from '..';
20
14
 
21
15
  const TEST_TABLE_NAME = 'mastra-single-table-test'; // Define the single table name
@@ -63,209 +57,246 @@ async function waitForDynamoDBLocal(client: DynamoDBClient, timeoutMs = 90000):
63
57
  throw new Error(`DynamoDB Local did not become ready within ${timeoutMs}ms.`);
64
58
  }
65
59
 
66
- // Function to clear all items from the single table
67
- async function clearSingleTable(client: DynamoDBClient, tableName: string) {
68
- let ExclusiveStartKey: Record<string, any> | undefined;
69
- let items: Record<string, any>[] = [];
70
-
71
- // Scan all items (handling pagination)
72
- do {
73
- const scanOutput = await client.send(
74
- new ScanCommand({
75
- TableName: tableName,
76
- ExclusiveStartKey,
77
- ProjectionExpression: 'pk, sk', // Only need keys for deletion
78
- }),
79
- );
80
- items = items.concat(scanOutput.Items || []);
81
- ExclusiveStartKey = scanOutput.LastEvaluatedKey;
82
- } while (ExclusiveStartKey);
83
-
84
- if (items.length === 0) {
85
- return; // Nothing to delete
86
- }
87
-
88
- // Batch delete items (handling DynamoDB 25 item limit per batch)
89
- const deleteRequests = items.map(item => ({
90
- DeleteRequest: {
91
- Key: { pk: item.pk, sk: item.sk },
92
- },
93
- }));
94
-
95
- for (let i = 0; i < deleteRequests.length; i += 25) {
96
- const batch = deleteRequests.slice(i, i + 25);
97
- const command = new BatchWriteItemCommand({
98
- RequestItems: {
99
- [tableName]: batch,
100
- },
60
+ // // Function to clear all items from the single table
61
+ // async function clearSingleTable(client: DynamoDBClient, tableName: string) {
62
+ // let ExclusiveStartKey: Record<string, any> | undefined;
63
+ // let items: Record<string, any>[] = [];
64
+
65
+ // // Scan all items (handling pagination)
66
+ // do {
67
+ // const scanOutput = await client.send(
68
+ // new ScanCommand({
69
+ // TableName: tableName,
70
+ // ExclusiveStartKey,
71
+ // ProjectionExpression: 'pk, sk', // Only need keys for deletion
72
+ // }),
73
+ // );
74
+ // items = items.concat(scanOutput.Items || []);
75
+ // ExclusiveStartKey = scanOutput.LastEvaluatedKey;
76
+ // } while (ExclusiveStartKey);
77
+
78
+ // if (items.length === 0) {
79
+ // return; // Nothing to delete
80
+ // }
81
+
82
+ // // Batch delete items (handling DynamoDB 25 item limit per batch)
83
+ // const deleteRequests = items.map(item => ({
84
+ // DeleteRequest: {
85
+ // Key: { pk: item.pk, sk: item.sk },
86
+ // },
87
+ // }));
88
+
89
+ // for (let i = 0; i < deleteRequests.length; i += 25) {
90
+ // const batch = deleteRequests.slice(i, i + 25);
91
+ // const command = new BatchWriteItemCommand({
92
+ // RequestItems: {
93
+ // [tableName]: batch,
94
+ // },
95
+ // });
96
+ // // Handle unprocessed items if necessary (though less likely with local)
97
+ // let result = await client.send(command);
98
+ // while (
99
+ // result.UnprocessedItems &&
100
+ // result.UnprocessedItems[tableName] &&
101
+ // result.UnprocessedItems[tableName].length > 0
102
+ // ) {
103
+ // console.warn(`Retrying ${result.UnprocessedItems[tableName].length} unprocessed delete items...`);
104
+ // await new Promise(res => setTimeout(res, 200)); // Simple backoff
105
+ // const retryCommand = new BatchWriteItemCommand({ RequestItems: result.UnprocessedItems });
106
+ // result = await client.send(retryCommand);
107
+ // }
108
+ // }
109
+ // // console.log(`Cleared ${items.length} items from ${tableName}`);
110
+ // }
111
+
112
+ describe('DynamoDBStore', () => {
113
+ // Start DynamoDB Local container and create table
114
+ beforeAll(async () => {
115
+ // Initialize client for setup
116
+ setupClient = new DynamoDBClient({
117
+ endpoint: LOCAL_ENDPOINT,
118
+ region: LOCAL_REGION,
119
+ credentials: { accessKeyId: 'test', secretAccessKey: 'test' },
120
+ // Increase timeout for setup operations
121
+ requestHandler: { requestTimeout: 10000 },
122
+ // Add retries for setup commands
123
+ maxAttempts: 5,
101
124
  });
102
- // Handle unprocessed items if necessary (though less likely with local)
103
- let result = await client.send(command);
104
- while (
105
- result.UnprocessedItems &&
106
- result.UnprocessedItems[tableName] &&
107
- result.UnprocessedItems[tableName].length > 0
108
- ) {
109
- console.warn(`Retrying ${result.UnprocessedItems[tableName].length} unprocessed delete items...`);
110
- await new Promise(res => setTimeout(res, 200)); // Simple backoff
111
- const retryCommand = new BatchWriteItemCommand({ RequestItems: result.UnprocessedItems });
112
- result = await client.send(retryCommand);
113
- }
114
- }
115
- // console.log(`Cleared ${items.length} items from ${tableName}`);
116
- }
117
125
 
118
- // Start DynamoDB Local container and create table
119
- beforeAll(async () => {
120
- // Initialize client for setup
121
- setupClient = new DynamoDBClient({
122
- endpoint: LOCAL_ENDPOINT,
123
- region: LOCAL_REGION,
124
- credentials: { accessKeyId: 'test', secretAccessKey: 'test' },
125
- // Increase timeout for setup operations
126
- requestHandler: { requestTimeout: 10000 },
127
- // Add retries for setup commands
128
- maxAttempts: 5,
129
- });
130
-
131
- // Start DynamoDB Local using docker-compose
132
- console.log('Starting DynamoDB Local container...');
133
- dynamodbProcess = spawn('docker-compose', ['up', '-d'], {
134
- cwd: __dirname, // Ensure docker-compose runs from the test file directory if needed
135
- stdio: 'pipe', // Use pipe to potentially capture output if needed
136
- });
137
- dynamodbProcess.stderr?.on('data', data => console.error(`docker-compose stderr: ${data}`));
138
- dynamodbProcess.on('error', err => console.error('Failed to start docker-compose:', err));
139
-
140
- // Add a short fixed delay to allow the container process to stabilize before polling
141
- console.log('Waiting a few seconds for container process to stabilize...');
142
- await new Promise(resolve => setTimeout(resolve, 3000)); // 3-second delay
143
-
144
- // Wait for DynamoDB to be ready
145
- try {
146
- await waitForDynamoDBLocal(setupClient);
147
- } catch (e) {
148
- console.error('Failed to connect to DynamoDB Local after startup.', e);
149
- // Attempt to stop container on failure
150
- spawn('docker-compose', ['down'], { cwd: __dirname, stdio: 'pipe' });
151
- throw e; // Re-throw error to fail the test suite
152
- }
126
+ // Start DynamoDB Local using docker-compose
127
+ console.log('Starting DynamoDB Local container...');
128
+ dynamodbProcess = spawn('docker-compose', ['up', '-d'], {
129
+ cwd: __dirname, // Ensure docker-compose runs from the test file directory if needed
130
+ stdio: 'pipe', // Use pipe to potentially capture output if needed
131
+ });
132
+ dynamodbProcess.stderr?.on('data', data => console.error(`docker-compose stderr: ${data}`));
133
+ dynamodbProcess.on('error', err => console.error('Failed to start docker-compose:', err));
153
134
 
154
- // Delete the table if it exists from a previous run
155
- try {
156
- console.log(`Checking if table ${TEST_TABLE_NAME} exists...`);
157
- await setupClient.send(new DescribeTableCommand({ TableName: TEST_TABLE_NAME }));
158
- console.log(`Table ${TEST_TABLE_NAME} exists, attempting deletion...`);
159
- await setupClient.send(new DeleteTableCommand({ TableName: TEST_TABLE_NAME }));
160
- console.log(`Waiting for table ${TEST_TABLE_NAME} to be deleted...`);
161
- await waitUntilTableNotExists({ client: setupClient, maxWaitTime: 60 }, { TableName: TEST_TABLE_NAME });
162
- console.log(`Table ${TEST_TABLE_NAME} deleted.`);
163
- } catch (e: unknown) {
164
- let errorName: string | undefined;
165
-
166
- if (e instanceof Error) {
167
- errorName = e.name;
168
- } else if (
169
- typeof e === 'object' &&
170
- e !== null &&
171
- 'name' in e &&
172
- typeof (e as { name: unknown }).name === 'string'
173
- ) {
174
- errorName = (e as { name: string }).name;
175
- }
135
+ // Add a short fixed delay to allow the container process to stabilize before polling
136
+ console.log('Waiting a few seconds for container process to stabilize...');
137
+ await new Promise(resolve => setTimeout(resolve, 3000)); // 3-second delay
176
138
 
177
- if (errorName === 'ResourceNotFoundException') {
178
- console.log(`Table ${TEST_TABLE_NAME} does not exist, proceeding.`);
179
- } else {
180
- console.error(`Error deleting table ${TEST_TABLE_NAME}:`, e);
181
- throw e; // Rethrow other errors
139
+ // Wait for DynamoDB to be ready
140
+ try {
141
+ await waitForDynamoDBLocal(setupClient);
142
+ } catch (e) {
143
+ console.error('Failed to connect to DynamoDB Local after startup.', e);
144
+ // Attempt to stop container on failure
145
+ spawn('docker-compose', ['down'], { cwd: __dirname, stdio: 'pipe' });
146
+ throw e; // Re-throw error to fail the test suite
182
147
  }
183
- }
184
148
 
185
- // Create the single table with the correct schema
186
- console.log(`Creating table ${TEST_TABLE_NAME}...`);
187
- try {
188
- const createTableCommand = new CreateTableCommand({
189
- TableName: TEST_TABLE_NAME,
190
- AttributeDefinitions: [
191
- { AttributeName: 'pk', AttributeType: 'S' },
192
- { AttributeName: 'sk', AttributeType: 'S' },
193
- { AttributeName: 'gsi1pk', AttributeType: 'S' },
194
- { AttributeName: 'gsi1sk', AttributeType: 'S' },
195
- { AttributeName: 'gsi2pk', AttributeType: 'S' },
196
- { AttributeName: 'gsi2sk', AttributeType: 'S' },
197
- ],
198
- KeySchema: [
199
- { AttributeName: 'pk', KeyType: 'HASH' },
200
- { AttributeName: 'sk', KeyType: 'RANGE' },
201
- ],
202
- GlobalSecondaryIndexes: [
203
- {
204
- IndexName: 'gsi1',
205
- KeySchema: [
206
- { AttributeName: 'gsi1pk', KeyType: 'HASH' },
207
- { AttributeName: 'gsi1sk', KeyType: 'RANGE' },
208
- ],
209
- Projection: { ProjectionType: 'ALL' },
210
- },
211
- {
212
- IndexName: 'gsi2',
213
- KeySchema: [
214
- { AttributeName: 'gsi2pk', KeyType: 'HASH' },
215
- { AttributeName: 'gsi2sk', KeyType: 'RANGE' },
216
- ],
217
- Projection: { ProjectionType: 'ALL' },
218
- },
219
- ],
220
- BillingMode: 'PAY_PER_REQUEST', // Use PAY_PER_REQUEST for local testing ease
221
- });
222
- await setupClient.send(createTableCommand);
223
- console.log(`Waiting for table ${TEST_TABLE_NAME} to become active...`);
224
- await waitUntilTableExists({ client: setupClient, maxWaitTime: 60 }, { TableName: TEST_TABLE_NAME });
225
- console.log(`Table ${TEST_TABLE_NAME} created successfully.`);
226
- } catch (e) {
227
- console.error(`Failed to create table ${TEST_TABLE_NAME}:`, e);
228
- throw e;
229
- }
230
- }, 60000); // Increase timeout for beforeAll to accommodate Docker startup and table creation
231
-
232
- // Stop DynamoDB Local container
233
- afterAll(async () => {
234
- console.log('Stopping DynamoDB Local container...');
235
- // Optionally delete the table
236
- // try {
237
- // await setupClient.send(new DeleteTableCommand({ TableName: TEST_TABLE_NAME }));
238
- // await waitUntilTableNotExists({ client: setupClient, maxWaitTime: 60 }, { TableName: TEST_TABLE_NAME });
239
- // console.log(`Test table ${TEST_TABLE_NAME} deleted.`);
240
- // } catch (error) {
241
- // console.error(`Error deleting test table ${TEST_TABLE_NAME}:`, error);
242
- // }
243
-
244
- if (setupClient) {
245
- setupClient.destroy();
246
- }
149
+ // Delete the table if it exists from a previous run
150
+ try {
151
+ console.log(`Checking if table ${TEST_TABLE_NAME} exists...`);
152
+ await setupClient.send(new DescribeTableCommand({ TableName: TEST_TABLE_NAME }));
153
+ console.log(`Table ${TEST_TABLE_NAME} exists, attempting deletion...`);
154
+ await setupClient.send(new DeleteTableCommand({ TableName: TEST_TABLE_NAME }));
155
+ console.log(`Waiting for table ${TEST_TABLE_NAME} to be deleted...`);
156
+ await waitUntilTableNotExists({ client: setupClient, maxWaitTime: 60 }, { TableName: TEST_TABLE_NAME });
157
+ console.log(`Table ${TEST_TABLE_NAME} deleted.`);
158
+ } catch (e: unknown) {
159
+ let errorName: string | undefined;
247
160
 
248
- const stopProcess = spawn('docker-compose', ['down', '--volumes'], {
249
- // Remove volumes too
250
- cwd: __dirname,
251
- stdio: 'pipe',
252
- });
253
- stopProcess.stderr?.on('data', data => console.error(`docker-compose down stderr: ${data}`));
254
- stopProcess.on('error', err => console.error('Failed to stop docker-compose:', err));
255
- await new Promise(resolve => stopProcess.on('close', resolve)); // Wait for compose down
256
-
257
- if (dynamodbProcess && !dynamodbProcess.killed) {
258
- dynamodbProcess.kill();
259
- }
260
- console.log('DynamoDB Local container stopped.');
261
- }, 30000); // Increase timeout for afterAll
161
+ if (e instanceof Error) {
162
+ errorName = e.name;
163
+ } else if (
164
+ typeof e === 'object' &&
165
+ e !== null &&
166
+ 'name' in e &&
167
+ typeof (e as { name: unknown }).name === 'string'
168
+ ) {
169
+ errorName = (e as { name: string }).name;
170
+ }
262
171
 
263
- describe('DynamoDBStore Integration Tests', () => {
264
- let store: DynamoDBStore;
172
+ if (errorName === 'ResourceNotFoundException') {
173
+ console.log(`Table ${TEST_TABLE_NAME} does not exist, proceeding.`);
174
+ } else {
175
+ console.error(`Error deleting table ${TEST_TABLE_NAME}:`, e);
176
+ throw e; // Rethrow other errors
177
+ }
178
+ }
265
179
 
266
- beforeAll(async () => {
267
- // Initialize main store instance used by most tests
268
- store = new DynamoDBStore({
180
+ // Create the single table with the correct schema
181
+ console.log(`Creating table ${TEST_TABLE_NAME}...`);
182
+ try {
183
+ const createTableCommand = new CreateTableCommand({
184
+ TableName: TEST_TABLE_NAME,
185
+ AttributeDefinitions: [
186
+ { AttributeName: 'pk', AttributeType: 'S' },
187
+ { AttributeName: 'sk', AttributeType: 'S' },
188
+ { AttributeName: 'gsi1pk', AttributeType: 'S' },
189
+ { AttributeName: 'gsi1sk', AttributeType: 'S' },
190
+ { AttributeName: 'gsi2pk', AttributeType: 'S' },
191
+ { AttributeName: 'gsi2sk', AttributeType: 'S' },
192
+ { AttributeName: 'gsi3pk', AttributeType: 'S' },
193
+ { AttributeName: 'gsi3sk', AttributeType: 'S' },
194
+ { AttributeName: 'gsi4pk', AttributeType: 'S' },
195
+ { AttributeName: 'gsi4sk', AttributeType: 'S' },
196
+ { AttributeName: 'gsi5pk', AttributeType: 'S' },
197
+ { AttributeName: 'gsi5sk', AttributeType: 'S' },
198
+ { AttributeName: 'gsi6pk', AttributeType: 'S' },
199
+ { AttributeName: 'gsi6sk', AttributeType: 'S' },
200
+ ],
201
+ KeySchema: [
202
+ { AttributeName: 'pk', KeyType: 'HASH' },
203
+ { AttributeName: 'sk', KeyType: 'RANGE' },
204
+ ],
205
+ GlobalSecondaryIndexes: [
206
+ {
207
+ IndexName: 'gsi1',
208
+ KeySchema: [
209
+ { AttributeName: 'gsi1pk', KeyType: 'HASH' },
210
+ { AttributeName: 'gsi1sk', KeyType: 'RANGE' },
211
+ ],
212
+ Projection: { ProjectionType: 'ALL' },
213
+ },
214
+ {
215
+ IndexName: 'gsi2',
216
+ KeySchema: [
217
+ { AttributeName: 'gsi2pk', KeyType: 'HASH' },
218
+ { AttributeName: 'gsi2sk', KeyType: 'RANGE' },
219
+ ],
220
+ Projection: { ProjectionType: 'ALL' },
221
+ },
222
+ {
223
+ IndexName: 'gsi3',
224
+ KeySchema: [
225
+ { AttributeName: 'gsi3pk', KeyType: 'HASH' },
226
+ { AttributeName: 'gsi3sk', KeyType: 'RANGE' },
227
+ ],
228
+ Projection: { ProjectionType: 'ALL' },
229
+ },
230
+ {
231
+ IndexName: 'gsi4',
232
+ KeySchema: [
233
+ { AttributeName: 'gsi4pk', KeyType: 'HASH' },
234
+ { AttributeName: 'gsi4sk', KeyType: 'RANGE' },
235
+ ],
236
+ Projection: { ProjectionType: 'ALL' },
237
+ },
238
+ {
239
+ IndexName: 'gsi5',
240
+ KeySchema: [
241
+ { AttributeName: 'gsi5pk', KeyType: 'HASH' },
242
+ { AttributeName: 'gsi5sk', KeyType: 'RANGE' },
243
+ ],
244
+ Projection: { ProjectionType: 'ALL' },
245
+ },
246
+ {
247
+ IndexName: 'gsi6',
248
+ KeySchema: [
249
+ { AttributeName: 'gsi6pk', KeyType: 'HASH' },
250
+ { AttributeName: 'gsi6sk', KeyType: 'RANGE' },
251
+ ],
252
+ Projection: { ProjectionType: 'ALL' },
253
+ },
254
+ ],
255
+ BillingMode: 'PAY_PER_REQUEST', // Use PAY_PER_REQUEST for local testing ease
256
+ });
257
+ await setupClient.send(createTableCommand);
258
+ console.log(`Waiting for table ${TEST_TABLE_NAME} to become active...`);
259
+ await waitUntilTableExists({ client: setupClient, maxWaitTime: 60 }, { TableName: TEST_TABLE_NAME });
260
+ console.log(`Table ${TEST_TABLE_NAME} created successfully.`);
261
+ } catch (e) {
262
+ console.error(`Failed to create table ${TEST_TABLE_NAME}:`, e);
263
+ throw e;
264
+ }
265
+ }, 60000); // Increase timeout for beforeAll to accommodate Docker startup and table creation
266
+
267
+ // Stop DynamoDB Local container
268
+ // afterAll(async () => {
269
+ // console.log('Stopping DynamoDB Local container...');
270
+ // // Optionally delete the table
271
+ // // try {
272
+ // // await setupClient.send(new DeleteTableCommand({ TableName: TEST_TABLE_NAME }));
273
+ // // await waitUntilTableNotExists({ client: setupClient, maxWaitTime: 60 }, { TableName: TEST_TABLE_NAME });
274
+ // // console.log(`Test table ${TEST_TABLE_NAME} deleted.`);
275
+ // // } catch (error) {
276
+ // // console.error(`Error deleting test table ${TEST_TABLE_NAME}:`, error);
277
+ // // }
278
+
279
+ // if (setupClient) {
280
+ // setupClient.destroy();
281
+ // }
282
+
283
+ // const stopProcess = spawn('docker-compose', ['down', '--volumes'], {
284
+ // // Remove volumes too
285
+ // cwd: __dirname,
286
+ // stdio: 'pipe',
287
+ // });
288
+ // stopProcess.stderr?.on('data', data => console.error(`docker-compose down stderr: ${data}`));
289
+ // stopProcess.on('error', err => console.error('Failed to stop docker-compose:', err));
290
+ // await new Promise(resolve => stopProcess.on('close', resolve)); // Wait for compose down
291
+
292
+ // if (dynamodbProcess && !dynamodbProcess.killed) {
293
+ // dynamodbProcess.kill();
294
+ // }
295
+ // console.log('DynamoDB Local container stopped.');
296
+ // }, 30000); // Increase timeout for afterAll
297
+
298
+ createTestSuite(
299
+ new DynamoDBStore({
269
300
  name: 'DynamoDBStoreTest',
270
301
  config: {
271
302
  tableName: TEST_TABLE_NAME,
@@ -273,1211 +304,1117 @@ describe('DynamoDBStore Integration Tests', () => {
273
304
  region: LOCAL_REGION,
274
305
  credentials: { accessKeyId: 'test', secretAccessKey: 'test' },
275
306
  },
276
- });
277
- console.log('Main DynamoDBStore initialized for tests.');
278
- });
279
-
280
- beforeEach(async () => {
281
- // Clear table between tests using the setup client
282
- await clearSingleTable(setupClient, TEST_TABLE_NAME);
283
- });
284
-
285
- afterAll(async () => {
286
- // No client.destroy() needed here as the store manages its internal client
287
- // Or if the store exposes a close/destroy method, call that.
288
- if (store) {
289
- await store.close(); // Assuming store has a close method
290
- }
291
- });
292
-
293
- // DynamoDB-specific tests
294
- describe('DynamoDB-specific operations', () => {
295
- describe('Entity Operations', () => {
296
- test('should persist and retrieve thread metadata', async () => {
297
- const now = new Date();
298
- const threadId = 'metadata-thread';
299
- const metadata = { user: 'test-user', complex: { nested: true, arr: [1, 'a'] } };
300
- const thread: StorageThreadType = {
301
- id: threadId,
302
- resourceId: 'resource-meta',
303
- title: 'Metadata Test Thread',
304
- createdAt: now,
305
- updatedAt: now,
306
- metadata: metadata,
307
- };
308
- await store.saveThread({ thread });
309
- const retrieved = await store.getThreadById({ threadId });
310
- expect(retrieved).toBeDefined();
311
- expect(retrieved?.metadata).toEqual(metadata); // ElectroDB should handle JSON stringify/parse
312
- });
313
-
314
- test('should handle large workflow snapshots near DynamoDB item size limit', async () => {
315
- // Test remains largely the same, relies on clearSingleTable working
316
- const now = Date.now();
317
- const largeSnapshot: WorkflowRunState = {
318
- // ... (rest of the large snapshot definition) ...
319
- value: { state: 'test' },
320
- context: {
321
- input: { source: 'test' },
322
- step1: { status: 'success', output: { data: 'test' } },
323
- } as unknown as WorkflowRunState['context'],
324
- serializedStepGraph: [],
325
- activePaths: [{ stepPath: ['test'], stepId: 'step1', status: 'success' }],
326
- suspendedPaths: { test: [1] },
327
- runId: 'test-run-large', // Use unique runId
328
- timestamp: now,
329
- status: 'success',
330
- };
331
-
332
- await expect(
333
- store.persistWorkflowSnapshot({
334
- workflowName: 'test-workflow-large',
335
- runId: 'test-run-large',
336
- snapshot: largeSnapshot,
337
- }),
338
- ).resolves.not.toThrow();
339
-
340
- const retrieved = await store.loadWorkflowSnapshot({
341
- workflowName: 'test-workflow-large',
342
- runId: 'test-run-large',
343
- });
344
-
345
- expect(retrieved).toEqual(largeSnapshot);
346
- }, 10000); // Increase timeout for potentially large item handling
347
-
348
- test('should handle concurrent thread updates (last writer wins)', async () => {
349
- // Test remains largely the same, verifies final state
350
- const threadId = 'concurrent-thread';
351
- const resourceId = 'resource-123';
352
- const now = new Date();
353
- const thread: StorageThreadType = {
354
- id: threadId,
355
- resourceId,
356
- title: 'Initial Title',
357
- createdAt: now,
358
- updatedAt: now,
359
- metadata: { initial: true },
360
- };
361
- await store.saveThread({ thread });
362
-
363
- // Simulate potential delay between read and write for update 1
364
- const update1 = async () => {
365
- await new Promise(res => setTimeout(res, 50)); // Short delay
366
- await store.updateThread({
367
- id: threadId,
368
- title: 'Updated Thread 1',
369
- metadata: { update: 1, time: Date.now() },
370
- });
371
- };
372
- // Simulate potential delay between read and write for update 2
373
- const update2 = async () => {
374
- await new Promise(res => setTimeout(res, 100)); // Slightly longer delay
375
- await store.updateThread({
376
- id: threadId,
377
- title: 'Updated Thread 2',
378
- metadata: { update: 2, time: Date.now() },
379
- });
380
- };
381
-
382
- await Promise.all([update1(), update2()]);
383
-
384
- const retrieved = await store.getThreadById({ threadId });
385
- expect(retrieved).toBeDefined();
386
- expect(retrieved?.id).toBe(threadId);
387
- // In DynamoDB default (non-conditional) updates, the last writer wins.
388
- // We expect title 2 / metadata 2 because update2 started later.
389
- expect(retrieved?.title).toBe('Updated Thread 2');
390
- expect(retrieved?.metadata?.update).toBe(2);
391
- });
392
-
393
- test('getMessages should return the N most recent messages [v2 storage]', async () => {
394
- const threadId = 'last-selector-thread';
395
- const start = Date.now();
396
-
397
- // Insert 10 messages with increasing timestamps
398
- const messages: MastraMessageV2[] = Array.from({ length: 10 }, (_, i) => ({
399
- id: `m-${i}`,
400
- threadId,
401
- resourceId: 'r',
402
- content: { format: 2, parts: [{ type: 'text', text: `msg-${i}` }] },
403
- createdAt: new Date(start + i), // 0..9 ms apart
404
- role: 'user',
405
- type: 'text',
406
- }));
407
- await store.saveMessages({ messages, format: 'v2' });
408
-
409
- const last3 = await store.getMessages({
410
- format: 'v2',
411
- threadId,
412
- selectBy: { last: 3 },
413
- });
414
-
415
- expect(last3).toHaveLength(3);
416
- expect(last3.map(m => (m.content.parts[0] as { type: string; text: string }).text)).toEqual([
417
- 'msg-7',
418
- 'msg-8',
419
- 'msg-9',
420
- ]);
421
- });
422
-
423
- test('getMessages should return the N most recent messages [v1 storage]', async () => {
424
- const threadId = 'last-selector-thread';
425
- const start = Date.now();
426
-
427
- // Insert 10 messages with increasing timestamps
428
- const messages: MastraMessageV1[] = Array.from({ length: 10 }, (_, i) => ({
429
- id: `m-${i}`,
430
- threadId,
431
- resourceId: 'r',
432
- content: `msg-${i}`,
433
- createdAt: new Date(start + i), // 0..9 ms apart
434
- role: 'user',
435
- type: 'text',
436
- }));
437
- await store.saveMessages({ messages });
438
-
439
- const last3 = await store.getMessages({
440
- threadId,
441
- selectBy: { last: 3 },
442
- });
443
-
444
- expect(last3).toHaveLength(3);
445
- expect(last3.map(m => m.content)).toEqual(['msg-7', 'msg-8', 'msg-9']);
446
- });
447
-
448
- test('should update thread updatedAt when a message is saved to it', async () => {
449
- const thread: StorageThreadType = {
450
- id: 'thread-update-test',
451
- resourceId: 'resource-update',
452
- title: 'Update Test Thread',
453
- createdAt: new Date(),
454
- updatedAt: new Date(),
455
- metadata: { test: true },
456
- };
457
- await store.saveThread({ thread });
458
-
459
- // Get the initial thread to capture the original updatedAt
460
- const initialThread = await store.getThreadById({ threadId: thread.id });
461
- expect(initialThread).toBeDefined();
462
- const originalUpdatedAt = initialThread!.updatedAt;
463
-
464
- // Wait a small amount to ensure different timestamp
465
- await new Promise(resolve => setTimeout(resolve, 100));
466
-
467
- // Create and save a message to the thread
468
- const message: MastraMessageV1 = {
469
- id: 'msg-update-test',
470
- threadId: thread.id,
471
- resourceId: 'resource-update',
472
- content: 'Test message for update',
473
- createdAt: new Date(),
474
- role: 'user',
475
- type: 'text',
476
- };
477
- await store.saveMessages({ messages: [message] });
478
-
479
- // Retrieve the thread again and check that updatedAt was updated
480
- const updatedThread = await store.getThreadById({ threadId: thread.id });
481
- expect(updatedThread).toBeDefined();
482
- expect(updatedThread!.updatedAt.getTime()).toBeGreaterThan(originalUpdatedAt.getTime());
483
- });
484
-
485
- test('saveThread upsert: should create new thread when thread does not exist', async () => {
486
- const threadId = `upsert-new-${randomUUID()}`;
487
- const now = new Date();
488
- const thread: StorageThreadType = {
489
- id: threadId,
490
- resourceId: 'resource-upsert-new',
491
- title: 'New Thread via Upsert',
492
- createdAt: now,
493
- updatedAt: now,
494
- metadata: { operation: 'create', test: true },
495
- };
496
-
497
- // Save the thread (should create new)
498
- await expect(store.saveThread({ thread })).resolves.not.toThrow();
499
-
500
- // Verify the thread was created
501
- const retrieved = await store.getThreadById({ threadId });
502
- expect(retrieved).toBeDefined();
503
- expect(retrieved?.id).toBe(threadId);
504
- expect(retrieved?.title).toBe('New Thread via Upsert');
505
- expect(retrieved?.resourceId).toBe('resource-upsert-new');
506
- expect(retrieved?.metadata).toEqual({ operation: 'create', test: true });
507
- });
508
-
509
- test('saveThread upsert: should update existing thread when thread already exists', async () => {
510
- const threadId = `upsert-update-${randomUUID()}`;
511
- const initialCreatedAt = new Date();
512
-
513
- // Create initial thread
514
- const initialThread: StorageThreadType = {
515
- id: threadId,
516
- resourceId: 'resource-upsert-initial',
517
- title: 'Initial Thread Title',
518
- createdAt: initialCreatedAt,
519
- updatedAt: initialCreatedAt,
520
- metadata: { operation: 'initial', version: 1 },
521
- };
522
- await store.saveThread({ thread: initialThread });
523
-
524
- // Wait a small amount to ensure different timestamp
525
- await new Promise(resolve => setTimeout(resolve, 100));
526
-
527
- // Update the thread with same ID but different data
528
- const updatedThread: StorageThreadType = {
529
- id: threadId,
530
- resourceId: 'resource-upsert-updated',
531
- title: 'Updated Thread Title',
532
- createdAt: initialCreatedAt, // Keep original creation time
533
- updatedAt: new Date(), // New update time
534
- metadata: { operation: 'update', version: 2 },
535
- };
536
- await expect(store.saveThread({ thread: updatedThread })).resolves.not.toThrow();
537
-
538
- // Verify the thread was updated
539
- const retrieved = await store.getThreadById({ threadId });
540
- expect(retrieved).toBeDefined();
541
- expect(retrieved?.id).toBe(threadId);
542
- expect(retrieved?.title).toBe('Updated Thread Title');
543
- expect(retrieved?.resourceId).toBe('resource-upsert-updated');
544
- expect(retrieved?.metadata).toEqual({ operation: 'update', version: 2 });
545
-
546
- // updatedAt should be newer than the initial creation time
547
- expect(retrieved?.updatedAt.getTime()).toBeGreaterThan(initialCreatedAt.getTime());
548
- // createdAt should remain exactly equal to the initial creation time
549
- expect(retrieved?.createdAt.getTime()).toBe(initialCreatedAt.getTime());
550
- });
551
-
552
- test('saveThread upsert: should handle complex metadata updates', async () => {
553
- const threadId = `upsert-metadata-${randomUUID()}`;
554
- const initialMetadata = {
555
- user: 'initial-user',
556
- tags: ['initial', 'test'],
557
- count: 1,
558
- };
559
-
560
- // Create initial thread with complex metadata
561
- const initialThread: StorageThreadType = {
562
- id: threadId,
563
- resourceId: 'resource-metadata-test',
564
- title: 'Metadata Test Thread',
565
- createdAt: new Date(),
566
- updatedAt: new Date(),
567
- metadata: initialMetadata,
568
- };
569
- await store.saveThread({ thread: initialThread });
570
-
571
- // Wait a small amount to ensure different timestamp
572
- await new Promise(resolve => setTimeout(resolve, 100));
573
-
574
- // Update with completely different metadata structure
575
- const updatedMetadata = {
576
- user: 'updated-user',
577
- settings: { theme: 'light', language: 'ja', notifications: true },
578
- tags: ['updated', 'upsert', 'complex'],
579
- count: 5,
580
- newField: { nested: { deeply: 'value' } },
581
- };
582
-
583
- const updatedThread: StorageThreadType = {
584
- id: threadId,
585
- resourceId: 'resource-metadata-test',
586
- title: 'Updated Metadata Thread',
587
- createdAt: initialThread.createdAt,
588
- updatedAt: new Date(),
589
- metadata: updatedMetadata,
590
- };
591
- await expect(store.saveThread({ thread: updatedThread })).resolves.not.toThrow();
592
-
593
- // Verify the metadata was completely replaced
594
- const retrieved = await store.getThreadById({ threadId });
595
- expect(retrieved).toBeDefined();
596
- expect(retrieved?.metadata).toEqual(updatedMetadata);
597
- expect(retrieved?.metadata?.user).toBe('updated-user');
598
- expect(retrieved?.metadata?.tags).toEqual(['updated', 'upsert', 'complex']);
599
- expect(retrieved?.title).toBe('Updated Metadata Thread');
600
- });
601
- });
602
-
603
- describe('Batch Operations', () => {
604
- test('should handle batch message inserts efficiently (up to 25 items) [v1 storage]', async () => {
605
- const startTime = Date.now(); // Get a base time
606
- const threadId = 'batch-thread';
607
- const messages: MastraMessageV1[] = Array.from({ length: 25 }, (_, i) => ({
608
- id: `msg-${i}`,
609
- threadId,
610
- resourceId: 'test-resource',
611
- content: `Message ${i}`,
612
- // Increment timestamp slightly for each message to ensure order
613
- createdAt: new Date(startTime + i),
614
- role: i % 2 === 0 ? 'user' : 'assistant',
615
- type: 'text',
616
- }));
617
-
618
- // Assuming saveMessages uses BatchWriteItem internally
619
- await expect(store.saveMessages({ messages })).resolves.not.toThrow();
620
-
621
- const retrieved = await store.getMessages({ threadId });
622
- expect(retrieved).toHaveLength(25);
623
- // Now the order should be guaranteed by the ascending createdAt timestamp
624
- expect(retrieved[0]?.content).toBe('Message 0');
625
- expect(retrieved[24]?.content).toBe('Message 24');
626
- });
627
-
628
- test('should handle batch message inserts efficiently (up to 25 items) [v2 storage]', async () => {
629
- const startTime = Date.now(); // Get a base time
630
- const threadId = 'batch-thread';
631
- const messages: MastraMessageV2[] = Array.from({ length: 25 }, (_, i) => ({
632
- id: `msg-${i}`,
633
- threadId,
634
- resourceId: 'test-resource',
635
- content: { format: 2, parts: [{ type: 'text', text: `Message ${i}` }] },
636
- // Increment timestamp slightly for each message to ensure order
637
- createdAt: new Date(startTime + i),
638
- role: i % 2 === 0 ? 'user' : 'assistant',
639
- type: 'text',
640
- }));
641
-
642
- // Assuming saveMessages uses BatchWriteItem internally
643
- await expect(store.saveMessages({ messages, format: 'v2' })).resolves.not.toThrow();
644
-
645
- const retrieved = await store.getMessages({ threadId, format: 'v2' });
646
- expect(retrieved).toHaveLength(25);
647
- // Now the order should be guaranteed by the ascending createdAt timestamp
648
- if (retrieved[0]?.content?.parts[0]?.type !== `text`) throw new Error(`Expected text part`);
649
- expect(retrieved[0].content.parts[0].text).toBe('Message 0');
650
- if (retrieved[24]?.content?.parts?.[0]?.type !== `text`) throw new Error(`Expected text part`);
651
- expect(retrieved[24].content.parts[0].text).toBe('Message 24');
652
- });
653
-
654
- test('should handle batch inserts exceeding 25 items (if saveMessages chunks)', async () => {
655
- const startTime = Date.now(); // Get a base time
656
- const threadId = 'batch-thread-large';
657
- const messages: MastraMessageV1[] = Array.from({ length: 30 }, (_, i) => ({
658
- id: `msg-large-${i}`,
659
- threadId,
660
- resourceId: 'test-resource-large',
661
- content: `Large Message ${i}`,
662
- // Increment timestamp slightly for each message to ensure order
663
- createdAt: new Date(startTime + i),
664
- role: 'user',
665
- type: 'text',
666
- }));
667
-
668
- await expect(store.saveMessages({ messages })).resolves.not.toThrow();
669
-
670
- const retrieved = await store.getMessages({ threadId });
671
- expect(retrieved).toHaveLength(30); // Verify all were saved
672
- // Add order check for the > 25 test as well
673
- expect(retrieved[0]?.content).toBe('Large Message 0');
674
- expect(retrieved[29]?.content).toBe('Large Message 29');
675
- });
676
-
677
- test('should upsert messages: duplicate id+threadId results in update, not duplicate row', async () => {
678
- const thread = await createSampleThread();
679
- await store.saveThread({ thread });
680
- const baseMessage = createSampleMessageV2({
681
- threadId: thread.id,
682
- createdAt: new Date(),
683
- content: { content: 'Original' },
684
- resourceId: thread.resourceId,
685
- });
686
-
687
- // Insert the message for the first time
688
- await store.saveMessages({ messages: [baseMessage], format: 'v2' });
689
-
690
- // // Insert again with the same id and threadId but different content
691
- const updatedMessage = {
692
- ...createSampleMessageV2({
693
- threadId: thread.id,
694
- createdAt: new Date(),
695
- content: { content: 'Updated' },
696
- resourceId: thread.resourceId,
697
- }),
698
- id: baseMessage.id,
699
- };
700
-
701
- await store.saveMessages({ messages: [updatedMessage], format: 'v2' });
702
-
703
- // Retrieve messages for the thread
704
- const retrievedMessages = await store.getMessages({ threadId: thread.id, format: 'v2' });
705
-
706
- // Only one message should exist for that id+threadId
707
- expect(retrievedMessages.filter(m => m.id === baseMessage.id)).toHaveLength(1);
708
-
709
- // The content should be the updated one
710
- expect(retrievedMessages.find(m => m.id === baseMessage.id)?.content.content).toBe('Updated');
711
- });
712
-
713
- test('should upsert messages: duplicate id and different threadid', async () => {
714
- const thread1 = await createSampleThread();
715
- const thread2 = await createSampleThread();
716
- await store.saveThread({ thread: thread1 });
717
- await store.saveThread({ thread: thread2 });
718
-
719
- const message = createSampleMessageV2({
720
- threadId: thread1.id,
721
- createdAt: new Date(),
722
- content: { content: 'Thread1 Content' },
723
- resourceId: thread1.resourceId,
724
- });
725
-
726
- // Insert message into thread1
727
- await store.saveMessages({ messages: [message], format: 'v2' });
728
-
729
- // Attempt to insert a message with the same id but different threadId
730
- const conflictingMessage = {
731
- ...createSampleMessageV2({
732
- threadId: thread2.id, // different thread
733
- content: { content: 'Thread2 Content' },
734
- resourceId: thread2.resourceId,
735
- }),
736
- id: message.id,
737
- };
738
-
739
- // Save should save the message to the new thread
740
- await store.saveMessages({ messages: [conflictingMessage], format: 'v2' });
741
-
742
- // Retrieve messages for both threads
743
- const thread1Messages = await store.getMessages({ threadId: thread1.id, format: 'v2' });
744
- const thread2Messages = await store.getMessages({ threadId: thread2.id, format: 'v2' });
745
-
746
- // Thread 1 should NOT have the message with that id
747
- expect(thread1Messages.find(m => m.id === message.id)).toBeUndefined();
748
-
749
- // Thread 2 should have the message with that id
750
- expect(thread2Messages.find(m => m.id === message.id)?.content.content).toBe('Thread2 Content');
751
- });
752
- });
753
-
754
- describe('Single-Table Design', () => {
755
- test('should maintain entity separation in single table', async () => {
756
- // Test remains largely the same
757
- const threadId = 'mixed-thread';
758
- const workflowName = 'mixed-workflow';
759
- const now = new Date();
760
- const thread: StorageThreadType = {
761
- id: threadId,
762
- resourceId: 'mixed-resource',
763
- title: 'Mixed Thread',
764
- createdAt: now,
765
- updatedAt: now,
766
- metadata: { type: 'thread' },
767
- };
768
- await store.saveThread({ thread });
769
-
770
- const workflowSnapshot: WorkflowRunState = {
771
- // ...(snapshot definition)
772
- value: { state: 'test' },
773
- context: {
774
- step1: { status: 'success', output: { data: 'test' } },
775
- input: { source: 'test' },
776
- } as unknown as WorkflowRunState['context'],
777
- serializedStepGraph: [],
778
- activePaths: [{ stepPath: ['test'], stepId: 'step1', status: 'success' }],
779
- suspendedPaths: { test: [1] },
780
- runId: 'mixed-run',
781
- timestamp: Date.now(),
782
- status: 'success',
783
- };
784
- await store.persistWorkflowSnapshot({ workflowName, runId: 'mixed-run', snapshot: workflowSnapshot });
785
-
786
- const retrievedThread = await store.getThreadById({ threadId });
787
- const retrievedWorkflow = await store.loadWorkflowSnapshot({ workflowName, runId: 'mixed-run' });
788
-
789
- expect(retrievedThread?.metadata?.type).toBe('thread');
790
- expect(retrievedWorkflow).toEqual(workflowSnapshot);
791
- });
792
- });
793
-
794
- describe('Error Handling', () => {
795
- test('should handle non-existent IDs gracefully for getById methods', async () => {
796
- const nonExistentId = 'does-not-exist';
797
- // Test getThreadById (already partially covered but good to keep specific)
798
- const thread = await store.getThreadById({ threadId: nonExistentId });
799
- expect(thread).toBeNull();
800
-
801
- // Test loadWorkflowSnapshot (already covered in Workflow tests, technically)
802
- const snapshot = await store.loadWorkflowSnapshot({ workflowName: nonExistentId, runId: nonExistentId });
803
- expect(snapshot).toBeNull();
804
-
805
- // Test getWorkflowRunById (already covered in Workflow tests, technically)
806
- const workflowRun = await store.getWorkflowRunById({ runId: nonExistentId });
807
- expect(workflowRun).toBeNull();
808
- });
809
-
810
- test('getMessages should return empty array for non-existent thread', async () => {
811
- const messages = await store.getMessages({ threadId: 'non-existent-thread' });
812
- expect(messages).toEqual([]);
813
- });
814
-
815
- test('getThreadsByResourceId should return empty array for non-existent resourceId', async () => {
816
- const threads = await store.getThreadsByResourceId({ resourceId: 'non-existent-resource' });
817
- expect(threads).toEqual([]);
818
- });
819
-
820
- test('getTraces should return empty array when no traces match filter', async () => {
821
- const tracesByName = await store.getTraces({ name: 'non-existent-trace', page: 1, perPage: 10 });
822
- expect(tracesByName).toEqual([]);
823
- const tracesByScope = await store.getTraces({ scope: 'non-existent-scope', page: 1, perPage: 10 });
824
- expect(tracesByScope).toEqual([]);
825
- });
826
-
827
- test('getEvalsByAgentName should return empty array for non-existent agent', async () => {
828
- const evals = await store.getEvalsByAgentName('non-existent-agent');
829
- expect(evals).toEqual([]);
830
- });
831
-
832
- test('getWorkflowRuns should return empty result for non-existent filters', async () => {
833
- const { runs: runsByName, total: totalByName } = await store.getWorkflowRuns({
834
- workflowName: 'non-existent-workflow',
835
- });
836
- expect(runsByName).toEqual([]);
837
- expect(totalByName).toBe(0);
838
-
839
- const { runs: runsByResource, total: totalByResource } = await store.getWorkflowRuns({
840
- resourceId: 'non-existent-resource',
841
- });
842
- expect(runsByResource).toEqual([]);
843
- expect(totalByResource).toBe(0);
844
- });
845
- }); // End Error Handling describe
846
- });
847
-
848
- // --- Trace Operations Tests ---
849
- describe('Trace Operations', () => {
850
- const sampleTrace = (name: string, scope: string, startTime = Date.now()) => ({
851
- id: `trace-${randomUUID()}`,
852
- parentSpanId: `span-${randomUUID()}`,
853
- traceId: `traceid-${randomUUID()}`,
854
- name,
855
- scope,
856
- kind: 1, // Example kind
857
- startTime: startTime,
858
- endTime: startTime + 100, // Example duration
859
- status: JSON.stringify({ code: 0 }), // Example status
860
- attributes: JSON.stringify({ key: 'value', scopeAttr: scope }),
861
- events: JSON.stringify([{ name: 'event1', timestamp: startTime + 50 }]),
862
- links: JSON.stringify([]),
863
- createdAt: new Date(startTime).toISOString(),
864
- updatedAt: new Date(startTime).toISOString(),
865
- });
866
-
867
- test('should batch insert and retrieve traces', async () => {
868
- const trace1 = sampleTrace('trace-op-1', 'scope-A');
869
- const trace2 = sampleTrace('trace-op-2', 'scope-A', Date.now() + 10);
870
- const trace3 = sampleTrace('trace-op-3', 'scope-B', Date.now() + 20);
871
- const records = [trace1, trace2, trace3];
872
-
873
- await expect(store.batchTraceInsert({ records })).resolves.not.toThrow();
874
-
875
- // Retrieve all (via scan, assuming low test data volume)
876
- const allTraces = await store.getTraces({ page: 1, perPage: 10 });
877
- expect(allTraces.length).toBe(3);
878
- });
879
-
880
- test('should handle Date objects for createdAt/updatedAt fields in batchTraceInsert', async () => {
881
- // This test specifically verifies the bug from the issue where Date objects
882
- // were passed instead of ISO strings and ElectroDB validation failed
883
- const now = new Date();
884
- const traceWithDateObjects = {
885
- id: `trace-${randomUUID()}`,
886
- parentSpanId: `span-${randomUUID()}`,
887
- traceId: `traceid-${randomUUID()}`,
888
- name: 'test-trace-with-dates',
889
- scope: 'default-tracer',
890
- kind: 1,
891
- startTime: now.getTime(),
892
- endTime: now.getTime() + 100,
893
- status: JSON.stringify({ code: 0 }),
894
- attributes: JSON.stringify({ key: 'value' }),
895
- events: JSON.stringify([]),
896
- links: JSON.stringify([]),
897
- // These are Date objects, not ISO strings - this should be handled by ElectroDB attribute setters
898
- createdAt: now,
899
- updatedAt: now,
900
- };
901
-
902
- // This should not throw a validation error due to Date object type
903
- await expect(store.batchTraceInsert({ records: [traceWithDateObjects] })).resolves.not.toThrow();
904
-
905
- // Verify the trace was saved correctly
906
- const allTraces = await store.getTraces({ name: 'test-trace-with-dates', page: 1, perPage: 10 });
907
- expect(allTraces.length).toBe(1);
908
- expect(allTraces[0].name).toBe('test-trace-with-dates');
909
- });
910
-
911
- test('should retrieve traces filtered by name using GSI', async () => {
912
- const trace1 = sampleTrace('trace-filter-name', 'scope-X');
913
- const trace2 = sampleTrace('trace-filter-name', 'scope-Y', Date.now() + 10);
914
- const trace3 = sampleTrace('other-name', 'scope-X', Date.now() + 20);
915
- await store.batchTraceInsert({ records: [trace1, trace2, trace3] });
916
-
917
- const filteredTraces = await store.getTraces({ name: 'trace-filter-name', page: 1, perPage: 10 });
918
- expect(filteredTraces.length).toBe(2);
919
- expect(filteredTraces.every(t => t.name === 'trace-filter-name')).toBe(true);
920
- // Check if sorted by startTime (GSI SK) - ascending default
921
- expect(filteredTraces[0].scope).toBe('scope-X');
922
- expect(filteredTraces[1].scope).toBe('scope-Y');
923
- });
924
-
925
- test('should retrieve traces filtered by scope using GSI', async () => {
926
- const trace1 = sampleTrace('trace-filter-scope-A', 'scope-TARGET');
927
- const trace2 = sampleTrace('trace-filter-scope-B', 'scope-OTHER', Date.now() + 10);
928
- const trace3 = sampleTrace('trace-filter-scope-C', 'scope-TARGET', Date.now() + 20);
929
- await store.batchTraceInsert({ records: [trace1, trace2, trace3] });
930
-
931
- const filteredTraces = await store.getTraces({ scope: 'scope-TARGET', page: 1, perPage: 10 });
932
- expect(filteredTraces.length).toBe(2);
933
- expect(filteredTraces.every(t => t.scope === 'scope-TARGET')).toBe(true);
934
- // Check if sorted by startTime (GSI SK) - ascending default
935
- expect(filteredTraces[0].name).toBe('trace-filter-scope-A');
936
- expect(filteredTraces[1].name).toBe('trace-filter-scope-C');
937
- });
938
-
939
- test('should handle pagination for getTraces', async () => {
940
- const traceData = Array.from({ length: 5 }, (_, i) =>
941
- sampleTrace('trace-page', `scope-page`, Date.now() + i * 10),
942
- );
943
- await store.batchTraceInsert({ records: traceData });
944
-
945
- // Get page 1 (first 2 items)
946
- const page1 = await store.getTraces({ name: 'trace-page', page: 1, perPage: 2 });
947
- expect(page1.length).toBe(2);
948
- // Use non-null assertion (!) since lengths are verified
949
- expect(page1[0]!.startTime).toBe(traceData[0]!.startTime);
950
- expect(page1[1]!.startTime).toBe(traceData[1]!.startTime);
951
-
952
- // Get page 2 (next 2 items)
953
- const page2 = await store.getTraces({ name: 'trace-page', page: 2, perPage: 2 });
954
- expect(page2.length).toBe(2);
955
- expect(page2[0]!.startTime).toBe(traceData[2]!.startTime);
956
- expect(page2[1]!.startTime).toBe(traceData[3]!.startTime);
957
-
958
- // Get page 3 (last 1 item)
959
- const page3 = await store.getTraces({ name: 'trace-page', page: 3, perPage: 2 });
960
- expect(page3.length).toBe(1);
961
- expect(page3[0]!.startTime).toBe(traceData[4]!.startTime);
962
-
963
- // Get page beyond results
964
- const page4 = await store.getTraces({ name: 'trace-page', page: 4, perPage: 2 });
965
- expect(page4.length).toBe(0);
966
- });
967
- }); // End Trace Operations describe
968
-
969
- // --- Eval Operations Tests ---
970
- describe('Eval Operations', () => {
971
- const sampleEval = (agentName: string, isTest = false, createdAt = new Date()) => {
972
- const testInfo = isTest ? { testPath: 'test/path.ts', testName: 'Test Name' } : undefined;
973
- return {
974
- entity: 'eval', // Important for saving
975
- agent_name: agentName,
976
- input: 'Sample input',
977
- output: 'Sample output',
978
- result: JSON.stringify({ score: Math.random() }), // Random score
979
- metric_name: 'sample-metric',
980
- instructions: 'Sample instructions',
981
- test_info: testInfo ? JSON.stringify(testInfo) : undefined,
982
- global_run_id: `global-${randomUUID()}`,
983
- run_id: `run-${randomUUID()}`,
984
- created_at: createdAt.toISOString(),
985
- // Add core MastraStorage fields
986
- createdAt: createdAt.toISOString(),
987
- updatedAt: createdAt.toISOString(),
988
- metadata: JSON.stringify({ custom: 'eval_meta' }),
989
- };
990
- };
991
-
992
- test('should handle Date objects for createdAt/updatedAt fields in eval batchInsert', async () => {
993
- // Test that eval entity properly handles Date objects in createdAt/updatedAt fields
994
- const now = new Date();
995
- const evalWithDateObjects = {
996
- entity: 'eval',
997
- agent_name: 'test-agent-dates',
998
- input: 'Test input',
999
- output: 'Test output',
1000
- result: JSON.stringify({ score: 0.95 }),
1001
- metric_name: 'test-metric',
1002
- instructions: 'Test instructions',
1003
- global_run_id: `global-${randomUUID()}`,
1004
- run_id: `run-${randomUUID()}`,
1005
- created_at: now, // Date object instead of ISO string
1006
- // These are Date objects, not ISO strings - should be handled by ElectroDB attribute setters
1007
- createdAt: now,
1008
- updatedAt: now,
1009
- metadata: JSON.stringify({ test: 'meta' }),
1010
- };
1011
-
1012
- // This should not throw a validation error due to Date object type
1013
- await expect(
1014
- store.batchInsert({
1015
- tableName: TABLE_EVALS,
1016
- records: [evalWithDateObjects],
1017
- }),
1018
- ).resolves.not.toThrow();
1019
-
1020
- // Verify the eval was saved correctly
1021
- const evals = await store.getEvalsByAgentName('test-agent-dates');
1022
- expect(evals.length).toBe(1);
1023
- expect(evals[0].agentName).toBe('test-agent-dates');
1024
- });
1025
-
1026
- test('should retrieve evals by agent name using GSI and filter by type', async () => {
1027
- const agent1 = 'eval-agent-1';
1028
- const agent2 = 'eval-agent-2';
1029
- const time1 = new Date();
1030
- const time2 = new Date(Date.now() + 1000);
1031
- const time3 = new Date(Date.now() + 2000);
1032
- const time4 = new Date(Date.now() + 3000);
1033
-
1034
- const eval1_live = sampleEval(agent1, false, time1);
1035
- const eval1_test = sampleEval(agent1, true, time2);
1036
- const eval2_live = sampleEval(agent2, false, time3);
1037
- const eval1_live_later = sampleEval(agent1, false, time4);
1038
-
1039
- // Use generic batchInsert (which expects entity prop already set)
1040
- await store.batchInsert({
1041
- tableName: TABLE_EVALS,
1042
- records: [eval1_live, eval1_test, eval2_live, eval1_live_later],
1043
- });
1044
-
1045
- // Get all for agent1 (expecting DESCENDING order now)
1046
- const allAgent1 = await store.getEvalsByAgentName(agent1);
1047
- expect(allAgent1.length).toBe(3);
1048
- // Assert descending order (newest first)
1049
- expect(allAgent1[0]!.runId).toBe(eval1_live_later.run_id); // Newest (time4)
1050
- expect(allAgent1[1]!.runId).toBe(eval1_test.run_id); // Middle (time2)
1051
- expect(allAgent1[2]!.runId).toBe(eval1_live.run_id); // Oldest (time1)
1052
-
1053
- // Get only live for agent1 (should be 2, ordered descending)
1054
- const liveAgent1 = await store.getEvalsByAgentName(agent1, 'live');
1055
- expect(liveAgent1.length).toBe(2);
1056
- // Assert descending order
1057
- expect(liveAgent1[0]!.runId).toBe(eval1_live_later.run_id); // Newest live (time4)
1058
- expect(liveAgent1[1]!.runId).toBe(eval1_live.run_id); // Oldest live (time1)
1059
-
1060
- // Get only test for agent1 (should be 1)
1061
- const testAgent1 = await store.getEvalsByAgentName(agent1, 'test');
1062
- expect(testAgent1.length).toBe(1);
1063
- expect(testAgent1[0]!.runId).toBe(eval1_test.run_id);
1064
- expect(testAgent1[0]!.testInfo).toEqual(JSON.parse(eval1_test.test_info!));
1065
-
1066
- // Get for agent2 (should be 1)
1067
- const allAgent2 = await store.getEvalsByAgentName(agent2);
1068
- expect(allAgent2.length).toBe(1);
1069
- expect(allAgent2[0]!.runId).toBe(eval2_live.run_id);
1070
-
1071
- // Get for non-existent agent
1072
- const none = await store.getEvalsByAgentName('non-existent-agent');
1073
- expect(none.length).toBe(0);
1074
- });
1075
- }); // End Eval Operations describe
1076
-
1077
- // --- Workflow Operations Tests ---
1078
- describe('Workflow Operations', () => {
1079
- const sampleWorkflowSnapshot = (
1080
- workflowName: string,
1081
- runId: string,
1082
- resourceId?: string,
1083
- createdAt = new Date(),
1084
- status = 'running',
1085
- ): { recordData: Record<string, any>; snapshot: WorkflowRunState } => {
1086
- const snapshot: WorkflowRunState = {
1087
- value: { currentState: status },
1088
- context: {
1089
- step1: { status: 'success', output: { data: 'test' } },
1090
- input: { source: 'test' },
1091
- } as unknown as WorkflowRunState['context'],
1092
- serializedStepGraph: [],
1093
- activePaths: [],
1094
- suspendedPaths: {},
1095
- runId: runId,
1096
- timestamp: createdAt.getTime(),
1097
- status: 'success',
1098
- ...(resourceId && { resourceId: resourceId }), // Conditionally add resourceId to snapshot
1099
- };
1100
- return {
1101
- recordData: {
1102
- entity: 'workflow_snapshot',
1103
- workflow_name: workflowName,
1104
- run_id: runId,
1105
- snapshot: JSON.stringify(snapshot),
1106
- createdAt: createdAt.toISOString(),
1107
- updatedAt: createdAt.toISOString(),
1108
- resourceId: resourceId, // Store resourceId directly if available
1109
- metadata: JSON.stringify({ wf: 'meta' }),
1110
- },
1111
- snapshot: snapshot,
1112
- };
1113
- };
1114
-
1115
- test('should persist and load a workflow snapshot', async () => {
1116
- const wfName = 'persist-test-wf';
1117
- const runId = `run-${randomUUID()}`;
1118
- const { snapshot } = sampleWorkflowSnapshot(wfName, runId);
1119
-
1120
- await expect(
1121
- store.persistWorkflowSnapshot({
1122
- workflowName: wfName,
1123
- runId: runId,
1124
- snapshot: snapshot,
1125
- }),
1126
- ).resolves.not.toThrow();
1127
-
1128
- const loadedSnapshot = await store.loadWorkflowSnapshot({
1129
- workflowName: wfName,
1130
- runId: runId,
1131
- });
1132
- // Compare only relevant parts, as persist might add internal fields
1133
- expect(loadedSnapshot?.runId).toEqual(snapshot.runId);
1134
- expect(loadedSnapshot?.value).toEqual(snapshot.value);
1135
- expect(loadedSnapshot?.context).toEqual(snapshot.context);
1136
- });
1137
-
1138
- test('should allow updating an existing workflow snapshot', async () => {
1139
- const wfName = 'update-test-wf';
1140
- const runId = `run-${randomUUID()}`;
1141
-
1142
- // Create initial snapshot
1143
- const { snapshot: initialSnapshot } = sampleWorkflowSnapshot(wfName, runId);
1144
-
1145
- await expect(
1146
- store.persistWorkflowSnapshot({
1147
- workflowName: wfName,
1148
- runId: runId,
1149
- snapshot: initialSnapshot,
1150
- }),
1151
- ).resolves.not.toThrow();
1152
-
1153
- // Create updated snapshot with different data
1154
- const updatedSnapshot: WorkflowRunState = {
1155
- ...initialSnapshot,
1156
- value: { currentState: 'completed' },
1157
- context: {
1158
- step1: { status: 'success', output: { data: 'updated-test' } },
1159
- step2: { status: 'success', output: { data: 'new-step' } },
1160
- input: { source: 'updated-test' },
1161
- } as unknown as WorkflowRunState['context'],
1162
- timestamp: Date.now(),
1163
- };
1164
-
1165
- // This should succeed (update existing snapshot)
1166
- await expect(
1167
- store.persistWorkflowSnapshot({
1168
- workflowName: wfName,
1169
- runId: runId,
1170
- snapshot: updatedSnapshot,
1171
- }),
1172
- ).resolves.not.toThrow();
1173
-
1174
- // Verify the snapshot was updated
1175
- const loadedSnapshot = await store.loadWorkflowSnapshot({
1176
- workflowName: wfName,
1177
- runId: runId,
1178
- });
1179
-
1180
- expect(loadedSnapshot?.runId).toEqual(updatedSnapshot.runId);
1181
- expect(loadedSnapshot?.value).toEqual(updatedSnapshot.value);
1182
- expect(loadedSnapshot?.context).toEqual(updatedSnapshot.context);
1183
- });
1184
-
1185
- test('getWorkflowRunById should retrieve correct run', async () => {
1186
- const wfName = 'get-by-id-wf';
1187
- const runId1 = `run-${randomUUID()}`;
1188
- const runId2 = `run-${randomUUID()}`;
1189
- const wf1 = sampleWorkflowSnapshot(wfName, runId1);
1190
- const wf2 = sampleWorkflowSnapshot(wfName, runId2);
1191
-
1192
- await store.batchInsert({ tableName: TABLE_WORKFLOW_SNAPSHOT, records: [wf1.recordData, wf2.recordData] });
1193
-
1194
- const found = await store.getWorkflowRunById({ runId: runId1, workflowName: wfName });
1195
- expect(found).toBeDefined();
1196
- expect(found!.runId).toBe(runId1);
1197
- expect(found!.workflowName).toBe(wfName);
1198
-
1199
- const notFound = await store.getWorkflowRunById({ runId: 'non-existent', workflowName: wfName });
1200
- expect(notFound).toBeNull();
1201
- });
1202
-
1203
- test('getWorkflowRuns should return all runs when no filters applied', async () => {
1204
- const wfName = 'get-runs-all';
1205
- const runId1 = `run-${randomUUID()}`;
1206
- const runId2 = `run-${randomUUID()}`;
1207
- const wf1 = sampleWorkflowSnapshot(wfName, runId1, undefined, new Date(Date.now() - 1000));
1208
- const wf2 = sampleWorkflowSnapshot(wfName, runId2, undefined, new Date());
1209
-
1210
- await store.batchInsert({ tableName: TABLE_WORKFLOW_SNAPSHOT, records: [wf1.recordData, wf2.recordData] });
1211
-
1212
- const { runs, total } = await store.getWorkflowRuns(); // No filters
1213
- // Note: Scan order is not guaranteed, check for presence and count
1214
- expect(total).toBe(2);
1215
- expect(runs.length).toBe(2);
1216
- expect(runs.map(r => r.runId)).toEqual(expect.arrayContaining([runId1, runId2]));
1217
- });
1218
-
1219
- test('getWorkflowRuns should filter by workflowName', async () => {
1220
- const wfName1 = 'get-runs-filter-name1';
1221
- const wfName2 = 'get-runs-filter-name2';
1222
- const runId1 = `run-${randomUUID()}`;
1223
- const runId2 = `run-${randomUUID()}`;
1224
- const wf1 = sampleWorkflowSnapshot(wfName1, runId1);
1225
- const wf2 = sampleWorkflowSnapshot(wfName2, runId2);
1226
-
1227
- await store.batchInsert({ tableName: TABLE_WORKFLOW_SNAPSHOT, records: [wf1.recordData, wf2.recordData] });
1228
-
1229
- const { runs, total } = await store.getWorkflowRuns({ workflowName: wfName1 });
1230
- expect(total).toBe(1);
1231
- expect(runs.length).toBe(1);
1232
- expect(runs[0]!.runId).toBe(runId1);
1233
- });
1234
-
1235
- test('getWorkflowRuns should filter by resourceId', async () => {
1236
- const wfName = 'get-runs-filter-resource';
1237
- const resource1 = 'resource-filter-1';
1238
- const resource2 = 'resource-filter-2';
1239
- const runId1 = `run-${randomUUID()}`;
1240
- const runId2 = `run-${randomUUID()}`;
1241
- const runId3 = `run-${randomUUID()}`;
1242
- const wf1 = sampleWorkflowSnapshot(wfName, runId1, resource1);
1243
- const wf2 = sampleWorkflowSnapshot(wfName, runId2, resource2);
1244
- const wf3 = sampleWorkflowSnapshot(wfName, runId3, resource1);
1245
-
1246
- await store.batchInsert({
1247
- tableName: TABLE_WORKFLOW_SNAPSHOT,
1248
- records: [wf1.recordData, wf2.recordData, wf3.recordData],
1249
- });
1250
-
1251
- const { runs, total } = await store.getWorkflowRuns({ resourceId: resource1 });
1252
- // Note: Scan order not guaranteed
1253
- expect(total).toBe(2);
1254
- expect(runs.length).toBe(2);
1255
- expect(runs.map(r => r.runId)).toEqual(expect.arrayContaining([runId1, runId3]));
1256
- expect(runs.every(r => r.resourceId === resource1)).toBe(true);
1257
- });
1258
-
1259
- test('getWorkflowRuns should filter by date range', async () => {
1260
- const wfName = 'get-runs-filter-date';
1261
- const time1 = new Date(2024, 0, 10); // Jan 10 2024
1262
- const time2 = new Date(2024, 0, 15); // Jan 15 2024
1263
- const time3 = new Date(2024, 0, 20); // Jan 20 2024
1264
- const runId1 = `run-${randomUUID()}`;
1265
- const runId2 = `run-${randomUUID()}`;
1266
- const runId3 = `run-${randomUUID()}`;
1267
- const wf1 = sampleWorkflowSnapshot(wfName, runId1, undefined, time1);
1268
- const wf2 = sampleWorkflowSnapshot(wfName, runId2, undefined, time2);
1269
- const wf3 = sampleWorkflowSnapshot(wfName, runId3, undefined, time3);
1270
-
1271
- await store.batchInsert({
1272
- tableName: TABLE_WORKFLOW_SNAPSHOT,
1273
- records: [wf1.recordData, wf2.recordData, wf3.recordData],
1274
- });
1275
-
1276
- const { runs, total } = await store.getWorkflowRuns({
1277
- fromDate: new Date(2024, 0, 12), // Jan 12
1278
- toDate: new Date(2024, 0, 18), // Jan 18
1279
- });
1280
- expect(total).toBe(1);
1281
- expect(runs.length).toBe(1);
1282
- expect(runs[0]!.runId).toBe(runId2); // Only wf2 falls within the range
1283
- });
1284
-
1285
- test('getWorkflowRuns should handle pagination (limit/offset)', async () => {
1286
- const wfName = 'get-runs-pagination';
1287
- const snapshots = Array.from({ length: 5 }, (_, i) =>
1288
- sampleWorkflowSnapshot(wfName, `run-page-${i}`, undefined, new Date(Date.now() + i * 1000)),
1289
- );
1290
- await store.batchInsert({ tableName: TABLE_WORKFLOW_SNAPSHOT, records: snapshots.map(s => s.recordData) });
1291
-
1292
- // Get page 1 (limit 2, offset 0)
1293
- const page1 = await store.getWorkflowRuns({ workflowName: wfName, limit: 2, offset: 0 });
1294
- expect(page1.total).toBe(5);
1295
- expect(page1.runs.length).toBe(2);
1296
- // Scan order not guaranteed, check for presence of two expected runs
1297
- const page1Ids = page1.runs.map(r => r.runId);
1298
- expect(snapshots.slice(0, 2).map(s => s!.recordData.run_id)).toEqual(expect.arrayContaining(page1Ids));
1299
-
1300
- // Get page 2 (limit 2, offset 2)
1301
- const page2 = await store.getWorkflowRuns({ workflowName: wfName, limit: 2, offset: 2 });
1302
- expect(page2.total).toBe(5);
1303
- expect(page2.runs.length).toBe(2);
1304
- const page2Ids = page2.runs.map(r => r.runId);
1305
- expect(snapshots.slice(2, 4).map(s => s!.recordData.run_id)).toEqual(expect.arrayContaining(page2Ids));
1306
-
1307
- // Get page 3 (limit 2, offset 4)
1308
- const page3 = await store.getWorkflowRuns({ workflowName: wfName, limit: 2, offset: 4 });
1309
- expect(page3.total).toBe(5);
1310
- expect(page3.runs.length).toBe(1);
1311
- // Use explicit type assertion for runs array access to fix linter error
1312
- expect((page3.runs as WorkflowRun[])[0]!.runId).toBe(snapshots[4]!.recordData.run_id);
1313
-
1314
- // Get page beyond results (offset 5)
1315
- const page4 = await store.getWorkflowRuns({ workflowName: wfName, limit: 2, offset: 5 });
1316
- expect(page4.total).toBe(5);
1317
- expect(page4.runs.length).toBe(0);
1318
- });
1319
- }); // End Workflow Operations describe
1320
-
1321
- // --- Initialization & Configuration Tests ---
1322
- describe('Initialization & Configuration', () => {
1323
- test('should throw error if tableName is missing in config', () => {
1324
- expect(() => {
1325
- new DynamoDBStore({
1326
- name: 'MissingTableStore',
1327
- config: {
1328
- endpoint: LOCAL_ENDPOINT,
1329
- region: LOCAL_REGION,
1330
- credentials: { accessKeyId: 'test', secretAccessKey: 'test' },
1331
- } as any, // Cast to any to bypass compile-time check for this specific test
1332
- });
1333
- }).toThrow(/tableName must be provided/); // Check for specific error message if possible
1334
- });
1335
-
1336
- test('should throw error during operations if table does not exist', async () => {
1337
- // Use a valid but random table name unlikely to exist
1338
- const nonExistentTableName = `non-existent-${randomUUID()}`;
1339
- const storeWithInvalidTable = new DynamoDBStore({
1340
- name: 'InvalidTableStore',
1341
- config: {
1342
- tableName: nonExistentTableName,
1343
- endpoint: LOCAL_ENDPOINT,
1344
- region: LOCAL_REGION,
1345
- credentials: { accessKeyId: 'test', secretAccessKey: 'test' },
1346
- },
1347
- });
1348
-
1349
- await expect(storeWithInvalidTable.getThreadById({ threadId: 'any-id' }))
1350
- .rejects // Update regex to match either DDB error or ElectroDB wrapper
1351
- .toThrow(/ResourceNotFoundException|Table.*does not exist|Cannot do operations on a non-existent table/);
1352
- });
1353
-
1354
- test('init() should throw error if table does not exist', async () => {
1355
- // Use a valid but random table name unlikely to exist
1356
- const nonExistentTableName = `non-existent-init-${randomUUID()}`;
1357
- const storeWithInvalidTable = new DynamoDBStore({
1358
- name: 'InvalidTableStoreInit',
1359
- config: {
1360
- tableName: nonExistentTableName,
1361
- endpoint: LOCAL_ENDPOINT,
1362
- region: LOCAL_REGION,
1363
- credentials: { accessKeyId: 'test', secretAccessKey: 'test' },
1364
- },
1365
- });
1366
-
1367
- await expect(storeWithInvalidTable.init())
1368
- .rejects // Update regex here too for consistency
1369
- .toThrow(/ResourceNotFoundException|Table.*does not exist|Cannot do operations on a non-existent table/);
1370
- });
1371
- }); // End Initialization & Configuration describe
1372
-
1373
- // --- Generic Storage Methods Tests ---
1374
- describe('Generic Storage Methods (`insert`, `load`, `batchInsert`, `clearTable`)', () => {
1375
- // Declare genericStore specific to this block
1376
- let genericStore: DynamoDBStore;
1377
-
1378
- beforeAll(() => {
1379
- // Initialize genericStore using the same config as the main store
1380
- genericStore = new DynamoDBStore({
1381
- name: 'DynamoDBGenericTest',
1382
- config: {
1383
- tableName: TEST_TABLE_NAME, // Ensure this uses the correct test table
1384
- endpoint: LOCAL_ENDPOINT,
1385
- region: LOCAL_REGION,
1386
- credentials: { accessKeyId: 'test', secretAccessKey: 'test' },
1387
- },
1388
- });
1389
- console.log('Generic test store initialized for generic tests.');
1390
- });
1391
-
1392
- const sampleThreadData = (id: string) => ({
1393
- entity: 'thread',
1394
- id: id,
1395
- resourceId: `resource-${randomUUID()}`,
1396
- title: 'Generic Test Thread',
1397
- createdAt: new Date().toISOString(),
1398
- updatedAt: new Date().toISOString(),
1399
- metadata: JSON.stringify({ generic: true }),
1400
- });
1401
-
1402
- test('insert() should save a record', async () => {
1403
- const threadId = `thread-${randomUUID()}`;
1404
- const record = sampleThreadData(threadId);
1405
- // Use the genericStore instance
1406
- await expect(genericStore.insert({ tableName: TABLE_THREADS, record })).resolves.not.toThrow();
1407
- const loaded = await genericStore.load<StorageThreadType>({ tableName: TABLE_THREADS, keys: { id: threadId } });
1408
- expect(loaded).not.toBeNull();
1409
- if (loaded) {
1410
- expect(loaded.id).toBe(threadId);
1411
- expect(loaded.title).toBe('Generic Test Thread');
1412
- expect(loaded.metadata).toEqual({ generic: true });
1413
- }
1414
- });
1415
-
1416
- test('insert() should handle Date objects for createdAt/updatedAt fields', async () => {
1417
- // Test that individual insert method properly handles Date objects in date fields
1418
- const now = new Date();
1419
- const recordWithDates = {
1420
- id: `thread-${randomUUID()}`,
1421
- resourceId: `resource-${randomUUID()}`,
1422
- title: 'Thread with Date Objects',
1423
- // These are Date objects, not ISO strings - should be handled by preprocessing
1424
- createdAt: now,
1425
- updatedAt: now,
1426
- metadata: JSON.stringify({ test: 'with-dates' }),
1427
- };
1428
-
1429
- // This should not throw a validation error due to Date object type
1430
- await expect(genericStore.insert({ tableName: TABLE_THREADS, record: recordWithDates })).resolves.not.toThrow();
1431
-
1432
- // Verify the record was saved correctly
1433
- const loaded = await genericStore.load<StorageThreadType>({
1434
- tableName: TABLE_THREADS,
1435
- keys: { id: recordWithDates.id },
1436
- });
1437
- expect(loaded).not.toBeNull();
1438
- expect(loaded?.id).toBe(recordWithDates.id);
1439
- expect(loaded?.title).toBe('Thread with Date Objects');
1440
- });
1441
-
1442
- test('load() should return null for non-existent record', async () => {
1443
- // Use the genericStore instance
1444
- const loaded = await genericStore.load({ tableName: TABLE_THREADS, keys: { id: 'non-existent-generic' } });
1445
- expect(loaded).toBeNull();
1446
- });
1447
-
1448
- test('batchInsert() should save multiple records', async () => {
1449
- const threadId1 = `thread-batch-${randomUUID()}`;
1450
- const threadId2 = `thread-batch-${randomUUID()}`;
1451
- const records = [sampleThreadData(threadId1), sampleThreadData(threadId2)];
1452
- // Use the genericStore instance
1453
- await expect(genericStore.batchInsert({ tableName: TABLE_THREADS, records })).resolves.not.toThrow();
1454
- const loaded1 = await genericStore.load<StorageThreadType>({ tableName: TABLE_THREADS, keys: { id: threadId1 } });
1455
- const loaded2 = await genericStore.load<StorageThreadType>({ tableName: TABLE_THREADS, keys: { id: threadId2 } });
1456
- expect(loaded1).toBeDefined();
1457
- expect(loaded2).toBeDefined();
1458
- expect(loaded1?.id).toBe(threadId1);
1459
- expect(loaded2?.id).toBe(threadId2);
1460
- });
1461
-
1462
- test('clearTable() should remove all records for the logical table', async () => {
1463
- const threadId1 = `thread-clear-${randomUUID()}`;
1464
- const threadId2 = `thread-clear-${randomUUID()}`;
1465
- const records = [sampleThreadData(threadId1), sampleThreadData(threadId2)];
1466
- // Use the genericStore instance
1467
- await genericStore.batchInsert({ tableName: TABLE_THREADS, records });
1468
- expect(
1469
- await genericStore.load<StorageThreadType>({ tableName: TABLE_THREADS, keys: { id: threadId1 } }),
1470
- ).toBeDefined();
1471
- expect(
1472
- await genericStore.load<StorageThreadType>({ tableName: TABLE_THREADS, keys: { id: threadId2 } }),
1473
- ).toBeDefined();
1474
- await expect(genericStore.clearTable({ tableName: TABLE_THREADS })).resolves.not.toThrow();
1475
- expect(
1476
- await genericStore.load<StorageThreadType>({ tableName: TABLE_THREADS, keys: { id: threadId1 } }),
1477
- ).toBeNull();
1478
- expect(
1479
- await genericStore.load<StorageThreadType>({ tableName: TABLE_THREADS, keys: { id: threadId2 } }),
1480
- ).toBeNull();
1481
- });
1482
- }); // End Generic Storage Methods describe
307
+ }),
308
+ );
1483
309
  });
310
+
311
+ // describe('DynamoDBStore Integration Tests', () => {
312
+ // let store: DynamoDBStore;
313
+
314
+ // beforeAll(async () => {
315
+ // // Initialize main store instance used by most tests
316
+ // store = new DynamoDBStore({
317
+ // name: 'DynamoDBStoreTest',
318
+ // config: {
319
+ // tableName: TEST_TABLE_NAME,
320
+ // endpoint: LOCAL_ENDPOINT,
321
+ // region: LOCAL_REGION,
322
+ // credentials: { accessKeyId: 'test', secretAccessKey: 'test' },
323
+ // },
324
+ // });
325
+ // console.log('Main DynamoDBStore initialized for tests.');
326
+ // });
327
+
328
+ // beforeEach(async () => {
329
+ // // Clear table between tests using the setup client
330
+ // await clearSingleTable(setupClient, TEST_TABLE_NAME);
331
+ // });
332
+
333
+ // afterAll(async () => {
334
+ // // No client.destroy() needed here as the store manages its internal client
335
+ // // Or if the store exposes a close/destroy method, call that.
336
+ // if (store) {
337
+ // await store.close(); // Assuming store has a close method
338
+ // }
339
+ // });
340
+
341
+ // // DynamoDB-specific tests
342
+ // describe('DynamoDB-specific operations', () => {
343
+ // describe('Entity Operations', () => {
344
+ // test('should persist and retrieve thread metadata', async () => {
345
+ // const now = new Date();
346
+ // const threadId = 'metadata-thread';
347
+ // const metadata = { user: 'test-user', complex: { nested: true, arr: [1, 'a'] } };
348
+ // const thread: StorageThreadType = {
349
+ // id: threadId,
350
+ // resourceId: 'resource-meta',
351
+ // title: 'Metadata Test Thread',
352
+ // createdAt: now,
353
+ // updatedAt: now,
354
+ // metadata: metadata,
355
+ // };
356
+ // await store.saveThread({ thread });
357
+ // const retrieved = await store.getThreadById({ threadId });
358
+ // expect(retrieved).toBeDefined();
359
+ // expect(retrieved?.metadata).toEqual(metadata); // ElectroDB should handle JSON stringify/parse
360
+ // });
361
+
362
+ // test('should handle large workflow snapshots near DynamoDB item size limit', async () => {
363
+ // // Test remains largely the same, relies on clearSingleTable working
364
+ // const now = Date.now();
365
+ // const largeSnapshot: WorkflowRunState = {
366
+ // // ... (rest of the large snapshot definition) ...
367
+ // value: { state: 'test' },
368
+ // context: {
369
+ // input: { source: 'test' },
370
+ // step1: { status: 'success', output: { data: 'test' } },
371
+ // } as unknown as WorkflowRunState['context'],
372
+ // serializedStepGraph: [],
373
+ // activePaths: [{ stepPath: ['test'], stepId: 'step1', status: 'success' }],
374
+ // suspendedPaths: { test: [1] },
375
+ // runId: 'test-run-large', // Use unique runId
376
+ // timestamp: now,
377
+ // status: 'success',
378
+ // };
379
+
380
+ // await expect(
381
+ // store.persistWorkflowSnapshot({
382
+ // workflowName: 'test-workflow-large',
383
+ // runId: 'test-run-large',
384
+ // snapshot: largeSnapshot,
385
+ // }),
386
+ // ).resolves.not.toThrow();
387
+
388
+ // const retrieved = await store.loadWorkflowSnapshot({
389
+ // workflowName: 'test-workflow-large',
390
+ // runId: 'test-run-large',
391
+ // });
392
+
393
+ // expect(retrieved).toEqual(largeSnapshot);
394
+ // }, 10000); // Increase timeout for potentially large item handling
395
+
396
+ // test('should handle concurrent thread updates (last writer wins)', async () => {
397
+ // // Test remains largely the same, verifies final state
398
+ // const threadId = 'concurrent-thread';
399
+ // const resourceId = 'resource-123';
400
+ // const now = new Date();
401
+ // const thread: StorageThreadType = {
402
+ // id: threadId,
403
+ // resourceId,
404
+ // title: 'Initial Title',
405
+ // createdAt: now,
406
+ // updatedAt: now,
407
+ // metadata: { initial: true },
408
+ // };
409
+ // await store.saveThread({ thread });
410
+
411
+ // // Simulate potential delay between read and write for update 1
412
+ // const update1 = async () => {
413
+ // await new Promise(res => setTimeout(res, 50)); // Short delay
414
+ // await store.updateThread({
415
+ // id: threadId,
416
+ // title: 'Updated Thread 1',
417
+ // metadata: { update: 1, time: Date.now() },
418
+ // });
419
+ // };
420
+ // // Simulate potential delay between read and write for update 2
421
+ // const update2 = async () => {
422
+ // await new Promise(res => setTimeout(res, 100)); // Slightly longer delay
423
+ // await store.updateThread({
424
+ // id: threadId,
425
+ // title: 'Updated Thread 2',
426
+ // metadata: { update: 2, time: Date.now() },
427
+ // });
428
+ // };
429
+
430
+ // await Promise.all([update1(), update2()]);
431
+
432
+ // const retrieved = await store.getThreadById({ threadId });
433
+ // expect(retrieved).toBeDefined();
434
+ // expect(retrieved?.id).toBe(threadId);
435
+ // // In DynamoDB default (non-conditional) updates, the last writer wins.
436
+ // // We expect title 2 / metadata 2 because update2 started later.
437
+ // expect(retrieved?.title).toBe('Updated Thread 2');
438
+ // expect(retrieved?.metadata?.update).toBe(2);
439
+ // });
440
+
441
+ // test('getMessages should return the N most recent messages [v2 storage]', async () => {
442
+ // const threadId = 'last-selector-thread';
443
+ // const start = Date.now();
444
+
445
+ // // Insert 10 messages with increasing timestamps
446
+ // const messages: MastraMessageV2[] = Array.from({ length: 10 }, (_, i) => ({
447
+ // id: `m-${i}`,
448
+ // threadId,
449
+ // resourceId: 'r',
450
+ // content: { format: 2, parts: [{ type: 'text', text: `msg-${i}` }] },
451
+ // createdAt: new Date(start + i), // 0..9 ms apart
452
+ // role: 'user',
453
+ // type: 'text',
454
+ // }));
455
+ // await store.saveMessages({ messages, format: 'v2' });
456
+
457
+ // const last3 = await store.getMessages({
458
+ // format: 'v2',
459
+ // threadId,
460
+ // selectBy: { last: 3 },
461
+ // });
462
+
463
+ // expect(last3).toHaveLength(3);
464
+ // expect(last3.map(m => (m.content.parts[0] as { type: string; text: string }).text)).toEqual([
465
+ // 'msg-7',
466
+ // 'msg-8',
467
+ // 'msg-9',
468
+ // ]);
469
+ // });
470
+
471
+ // test('getMessages should return the N most recent messages [v1 storage]', async () => {
472
+ // const threadId = 'last-selector-thread';
473
+ // const start = Date.now();
474
+
475
+ // // Insert 10 messages with increasing timestamps
476
+ // const messages: MastraMessageV1[] = Array.from({ length: 10 }, (_, i) => ({
477
+ // id: `m-${i}`,
478
+ // threadId,
479
+ // resourceId: 'r',
480
+ // content: `msg-${i}`,
481
+ // createdAt: new Date(start + i), // 0..9 ms apart
482
+ // role: 'user',
483
+ // type: 'text',
484
+ // }));
485
+ // await store.saveMessages({ messages });
486
+
487
+ // const last3 = await store.getMessages({
488
+ // threadId,
489
+ // selectBy: { last: 3 },
490
+ // });
491
+
492
+ // expect(last3).toHaveLength(3);
493
+ // expect(last3.map(m => m.content)).toEqual(['msg-7', 'msg-8', 'msg-9']);
494
+ // });
495
+
496
+ // test('should update thread updatedAt when a message is saved to it', async () => {
497
+ // const thread: StorageThreadType = {
498
+ // id: 'thread-update-test',
499
+ // resourceId: 'resource-update',
500
+ // title: 'Update Test Thread',
501
+ // createdAt: new Date(),
502
+ // updatedAt: new Date(),
503
+ // metadata: { test: true },
504
+ // };
505
+ // await store.saveThread({ thread });
506
+
507
+ // // Get the initial thread to capture the original updatedAt
508
+ // const initialThread = await store.getThreadById({ threadId: thread.id });
509
+ // expect(initialThread).toBeDefined();
510
+ // const originalUpdatedAt = initialThread!.updatedAt;
511
+
512
+ // // Wait a small amount to ensure different timestamp
513
+ // await new Promise(resolve => setTimeout(resolve, 100));
514
+
515
+ // // Create and save a message to the thread
516
+ // const message: MastraMessageV1 = {
517
+ // id: 'msg-update-test',
518
+ // threadId: thread.id,
519
+ // resourceId: 'resource-update',
520
+ // content: 'Test message for update',
521
+ // createdAt: new Date(),
522
+ // role: 'user',
523
+ // type: 'text',
524
+ // };
525
+ // await store.saveMessages({ messages: [message] });
526
+
527
+ // // Retrieve the thread again and check that updatedAt was updated
528
+ // const updatedThread = await store.getThreadById({ threadId: thread.id });
529
+ // expect(updatedThread).toBeDefined();
530
+ // expect(updatedThread!.updatedAt.getTime()).toBeGreaterThan(originalUpdatedAt.getTime());
531
+ // });
532
+
533
+ // test('saveThread upsert: should create new thread when thread does not exist', async () => {
534
+ // const threadId = `upsert-new-${randomUUID()}`;
535
+ // const now = new Date();
536
+ // const thread: StorageThreadType = {
537
+ // id: threadId,
538
+ // resourceId: 'resource-upsert-new',
539
+ // title: 'New Thread via Upsert',
540
+ // createdAt: now,
541
+ // updatedAt: now,
542
+ // metadata: { operation: 'create', test: true },
543
+ // };
544
+
545
+ // // Save the thread (should create new)
546
+ // await expect(store.saveThread({ thread })).resolves.not.toThrow();
547
+
548
+ // // Verify the thread was created
549
+ // const retrieved = await store.getThreadById({ threadId });
550
+ // expect(retrieved).toBeDefined();
551
+ // expect(retrieved?.id).toBe(threadId);
552
+ // expect(retrieved?.title).toBe('New Thread via Upsert');
553
+ // expect(retrieved?.resourceId).toBe('resource-upsert-new');
554
+ // expect(retrieved?.metadata).toEqual({ operation: 'create', test: true });
555
+ // });
556
+
557
+ // test('saveThread upsert: should update existing thread when thread already exists', async () => {
558
+ // const threadId = `upsert-update-${randomUUID()}`;
559
+ // const initialCreatedAt = new Date();
560
+
561
+ // // Create initial thread
562
+ // const initialThread: StorageThreadType = {
563
+ // id: threadId,
564
+ // resourceId: 'resource-upsert-initial',
565
+ // title: 'Initial Thread Title',
566
+ // createdAt: initialCreatedAt,
567
+ // updatedAt: initialCreatedAt,
568
+ // metadata: { operation: 'initial', version: 1 },
569
+ // };
570
+ // await store.saveThread({ thread: initialThread });
571
+
572
+ // // Wait a small amount to ensure different timestamp
573
+ // await new Promise(resolve => setTimeout(resolve, 100));
574
+
575
+ // // Update the thread with same ID but different data
576
+ // const updatedThread: StorageThreadType = {
577
+ // id: threadId,
578
+ // resourceId: 'resource-upsert-updated',
579
+ // title: 'Updated Thread Title',
580
+ // createdAt: initialCreatedAt, // Keep original creation time
581
+ // updatedAt: new Date(), // New update time
582
+ // metadata: { operation: 'update', version: 2 },
583
+ // };
584
+ // await expect(store.saveThread({ thread: updatedThread })).resolves.not.toThrow();
585
+
586
+ // // Verify the thread was updated
587
+ // const retrieved = await store.getThreadById({ threadId });
588
+ // expect(retrieved).toBeDefined();
589
+ // expect(retrieved?.id).toBe(threadId);
590
+ // expect(retrieved?.title).toBe('Updated Thread Title');
591
+ // expect(retrieved?.resourceId).toBe('resource-upsert-updated');
592
+ // expect(retrieved?.metadata).toEqual({ operation: 'update', version: 2 });
593
+
594
+ // // updatedAt should be newer than the initial creation time
595
+ // expect(retrieved?.updatedAt.getTime()).toBeGreaterThan(initialCreatedAt.getTime());
596
+ // // createdAt should remain exactly equal to the initial creation time
597
+ // expect(retrieved?.createdAt.getTime()).toBe(initialCreatedAt.getTime());
598
+ // });
599
+
600
+ // test('saveThread upsert: should handle complex metadata updates', async () => {
601
+ // const threadId = `upsert-metadata-${randomUUID()}`;
602
+ // const initialMetadata = {
603
+ // user: 'initial-user',
604
+ // tags: ['initial', 'test'],
605
+ // count: 1,
606
+ // };
607
+
608
+ // // Create initial thread with complex metadata
609
+ // const initialThread: StorageThreadType = {
610
+ // id: threadId,
611
+ // resourceId: 'resource-metadata-test',
612
+ // title: 'Metadata Test Thread',
613
+ // createdAt: new Date(),
614
+ // updatedAt: new Date(),
615
+ // metadata: initialMetadata,
616
+ // };
617
+ // await store.saveThread({ thread: initialThread });
618
+
619
+ // // Wait a small amount to ensure different timestamp
620
+ // await new Promise(resolve => setTimeout(resolve, 100));
621
+
622
+ // // Update with completely different metadata structure
623
+ // const updatedMetadata = {
624
+ // user: 'updated-user',
625
+ // settings: { theme: 'light', language: 'ja', notifications: true },
626
+ // tags: ['updated', 'upsert', 'complex'],
627
+ // count: 5,
628
+ // newField: { nested: { deeply: 'value' } },
629
+ // };
630
+
631
+ // const updatedThread: StorageThreadType = {
632
+ // id: threadId,
633
+ // resourceId: 'resource-metadata-test',
634
+ // title: 'Updated Metadata Thread',
635
+ // createdAt: initialThread.createdAt,
636
+ // updatedAt: new Date(),
637
+ // metadata: updatedMetadata,
638
+ // };
639
+ // await expect(store.saveThread({ thread: updatedThread })).resolves.not.toThrow();
640
+
641
+ // // Verify the metadata was completely replaced
642
+ // const retrieved = await store.getThreadById({ threadId });
643
+ // expect(retrieved).toBeDefined();
644
+ // expect(retrieved?.metadata).toEqual(updatedMetadata);
645
+ // expect(retrieved?.metadata?.user).toBe('updated-user');
646
+ // expect(retrieved?.metadata?.tags).toEqual(['updated', 'upsert', 'complex']);
647
+ // expect(retrieved?.title).toBe('Updated Metadata Thread');
648
+ // });
649
+ // });
650
+
651
+ // describe('Batch Operations', () => {
652
+ // test('should handle batch message inserts efficiently (up to 25 items) [v1 storage]', async () => {
653
+ // const startTime = Date.now(); // Get a base time
654
+ // const threadId = 'batch-thread';
655
+ // const messages: MastraMessageV1[] = Array.from({ length: 25 }, (_, i) => ({
656
+ // id: `msg-${i}`,
657
+ // threadId,
658
+ // resourceId: 'test-resource',
659
+ // content: `Message ${i}`,
660
+ // // Increment timestamp slightly for each message to ensure order
661
+ // createdAt: new Date(startTime + i),
662
+ // role: i % 2 === 0 ? 'user' : 'assistant',
663
+ // type: 'text',
664
+ // }));
665
+
666
+ // // Assuming saveMessages uses BatchWriteItem internally
667
+ // await expect(store.saveMessages({ messages })).resolves.not.toThrow();
668
+
669
+ // const retrieved = await store.getMessages({ threadId });
670
+ // expect(retrieved).toHaveLength(25);
671
+ // // Now the order should be guaranteed by the ascending createdAt timestamp
672
+ // expect(retrieved[0]?.content).toBe('Message 0');
673
+ // expect(retrieved[24]?.content).toBe('Message 24');
674
+ // });
675
+
676
+ // test('should handle batch message inserts efficiently (up to 25 items) [v2 storage]', async () => {
677
+ // const startTime = Date.now(); // Get a base time
678
+ // const threadId = 'batch-thread';
679
+ // const messages: MastraMessageV2[] = Array.from({ length: 25 }, (_, i) => ({
680
+ // id: `msg-${i}`,
681
+ // threadId,
682
+ // resourceId: 'test-resource',
683
+ // content: { format: 2, parts: [{ type: 'text', text: `Message ${i}` }] },
684
+ // // Increment timestamp slightly for each message to ensure order
685
+ // createdAt: new Date(startTime + i),
686
+ // role: i % 2 === 0 ? 'user' : 'assistant',
687
+ // type: 'text',
688
+ // }));
689
+
690
+ // // Assuming saveMessages uses BatchWriteItem internally
691
+ // await expect(store.saveMessages({ messages, format: 'v2' })).resolves.not.toThrow();
692
+
693
+ // const retrieved = await store.getMessages({ threadId, format: 'v2' });
694
+ // expect(retrieved).toHaveLength(25);
695
+ // // Now the order should be guaranteed by the ascending createdAt timestamp
696
+ // if (retrieved[0]?.content?.parts[0]?.type !== `text`) throw new Error(`Expected text part`);
697
+ // expect(retrieved[0].content.parts[0].text).toBe('Message 0');
698
+ // if (retrieved[24]?.content?.parts?.[0]?.type !== `text`) throw new Error(`Expected text part`);
699
+ // expect(retrieved[24].content.parts[0].text).toBe('Message 24');
700
+ // });
701
+
702
+ // test('should handle batch inserts exceeding 25 items (if saveMessages chunks)', async () => {
703
+ // const startTime = Date.now(); // Get a base time
704
+ // const threadId = 'batch-thread-large';
705
+ // const messages: MastraMessageV1[] = Array.from({ length: 30 }, (_, i) => ({
706
+ // id: `msg-large-${i}`,
707
+ // threadId,
708
+ // resourceId: 'test-resource-large',
709
+ // content: `Large Message ${i}`,
710
+ // // Increment timestamp slightly for each message to ensure order
711
+ // createdAt: new Date(startTime + i),
712
+ // role: 'user',
713
+ // type: 'text',
714
+ // }));
715
+
716
+ // await expect(store.saveMessages({ messages })).resolves.not.toThrow();
717
+
718
+ // const retrieved = await store.getMessages({ threadId });
719
+ // expect(retrieved).toHaveLength(30); // Verify all were saved
720
+ // // Add order check for the > 25 test as well
721
+ // expect(retrieved[0]?.content).toBe('Large Message 0');
722
+ // expect(retrieved[29]?.content).toBe('Large Message 29');
723
+ // });
724
+
725
+ // test('should upsert messages: duplicate id+threadId results in update, not duplicate row', async () => {
726
+ // const thread = await createSampleThread();
727
+ // await store.saveThread({ thread });
728
+ // const baseMessage = createSampleMessageV2({
729
+ // threadId: thread.id,
730
+ // createdAt: new Date(),
731
+ // content: { content: 'Original' },
732
+ // resourceId: thread.resourceId,
733
+ // });
734
+
735
+ // // Insert the message for the first time
736
+ // await store.saveMessages({ messages: [baseMessage], format: 'v2' });
737
+
738
+ // // // Insert again with the same id and threadId but different content
739
+ // const updatedMessage = {
740
+ // ...createSampleMessageV2({
741
+ // threadId: thread.id,
742
+ // createdAt: new Date(),
743
+ // content: { content: 'Updated' },
744
+ // resourceId: thread.resourceId,
745
+ // }),
746
+ // id: baseMessage.id,
747
+ // };
748
+
749
+ // await store.saveMessages({ messages: [updatedMessage], format: 'v2' });
750
+
751
+ // // Retrieve messages for the thread
752
+ // const retrievedMessages = await store.getMessages({ threadId: thread.id, format: 'v2' });
753
+
754
+ // // Only one message should exist for that id+threadId
755
+ // expect(retrievedMessages.filter(m => m.id === baseMessage.id)).toHaveLength(1);
756
+
757
+ // // The content should be the updated one
758
+ // expect(retrievedMessages.find(m => m.id === baseMessage.id)?.content.content).toBe('Updated');
759
+ // });
760
+
761
+ // test('should upsert messages: duplicate id and different threadid', async () => {
762
+ // const thread1 = await createSampleThread();
763
+ // const thread2 = await createSampleThread();
764
+ // await store.saveThread({ thread: thread1 });
765
+ // await store.saveThread({ thread: thread2 });
766
+
767
+ // const message = createSampleMessageV2({
768
+ // threadId: thread1.id,
769
+ // createdAt: new Date(),
770
+ // content: { content: 'Thread1 Content' },
771
+ // resourceId: thread1.resourceId,
772
+ // });
773
+
774
+ // // Insert message into thread1
775
+ // await store.saveMessages({ messages: [message], format: 'v2' });
776
+
777
+ // // Attempt to insert a message with the same id but different threadId
778
+ // const conflictingMessage = {
779
+ // ...createSampleMessageV2({
780
+ // threadId: thread2.id, // different thread
781
+ // content: { content: 'Thread2 Content' },
782
+ // resourceId: thread2.resourceId,
783
+ // }),
784
+ // id: message.id,
785
+ // };
786
+
787
+ // // Save should save the message to the new thread
788
+ // await store.saveMessages({ messages: [conflictingMessage], format: 'v2' });
789
+
790
+ // // Retrieve messages for both threads
791
+ // const thread1Messages = await store.getMessages({ threadId: thread1.id, format: 'v2' });
792
+ // const thread2Messages = await store.getMessages({ threadId: thread2.id, format: 'v2' });
793
+
794
+ // // Thread 1 should NOT have the message with that id
795
+ // expect(thread1Messages.find(m => m.id === message.id)).toBeUndefined();
796
+
797
+ // // Thread 2 should have the message with that id
798
+ // expect(thread2Messages.find(m => m.id === message.id)?.content.content).toBe('Thread2 Content');
799
+ // });
800
+ // });
801
+
802
+ // describe('Single-Table Design', () => {
803
+ // test('should maintain entity separation in single table', async () => {
804
+ // // Test remains largely the same
805
+ // const threadId = 'mixed-thread';
806
+ // const workflowName = 'mixed-workflow';
807
+ // const now = new Date();
808
+ // const thread: StorageThreadType = {
809
+ // id: threadId,
810
+ // resourceId: 'mixed-resource',
811
+ // title: 'Mixed Thread',
812
+ // createdAt: now,
813
+ // updatedAt: now,
814
+ // metadata: { type: 'thread' },
815
+ // };
816
+ // await store.saveThread({ thread });
817
+
818
+ // const workflowSnapshot: WorkflowRunState = {
819
+ // // ...(snapshot definition)
820
+ // value: { state: 'test' },
821
+ // context: {
822
+ // step1: { status: 'success', output: { data: 'test' } },
823
+ // input: { source: 'test' },
824
+ // } as unknown as WorkflowRunState['context'],
825
+ // serializedStepGraph: [],
826
+ // activePaths: [{ stepPath: ['test'], stepId: 'step1', status: 'success' }],
827
+ // suspendedPaths: { test: [1] },
828
+ // runId: 'mixed-run',
829
+ // timestamp: Date.now(),
830
+ // status: 'success',
831
+ // };
832
+ // await store.persistWorkflowSnapshot({ workflowName, runId: 'mixed-run', snapshot: workflowSnapshot });
833
+
834
+ // const retrievedThread = await store.getThreadById({ threadId });
835
+ // const retrievedWorkflow = await store.loadWorkflowSnapshot({ workflowName, runId: 'mixed-run' });
836
+
837
+ // expect(retrievedThread?.metadata?.type).toBe('thread');
838
+ // expect(retrievedWorkflow).toEqual(workflowSnapshot);
839
+ // });
840
+ // });
841
+
842
+ // describe('Error Handling', () => {
843
+ // test('should handle non-existent IDs gracefully for getById methods', async () => {
844
+ // const nonExistentId = 'does-not-exist';
845
+ // // Test getThreadById (already partially covered but good to keep specific)
846
+ // const thread = await store.getThreadById({ threadId: nonExistentId });
847
+ // expect(thread).toBeNull();
848
+
849
+ // // Test loadWorkflowSnapshot (already covered in Workflow tests, technically)
850
+ // const snapshot = await store.loadWorkflowSnapshot({ workflowName: nonExistentId, runId: nonExistentId });
851
+ // expect(snapshot).toBeNull();
852
+
853
+ // // Test getWorkflowRunById (already covered in Workflow tests, technically)
854
+ // const workflowRun = await store.getWorkflowRunById({ runId: nonExistentId });
855
+ // expect(workflowRun).toBeNull();
856
+ // });
857
+
858
+ // test('getMessages should return empty array for non-existent thread', async () => {
859
+ // const messages = await store.getMessages({ threadId: 'non-existent-thread' });
860
+ // expect(messages).toEqual([]);
861
+ // });
862
+
863
+ // test('getThreadsByResourceId should return empty array for non-existent resourceId', async () => {
864
+ // const threads = await store.getThreadsByResourceId({ resourceId: 'non-existent-resource' });
865
+ // expect(threads).toEqual([]);
866
+ // });
867
+
868
+ // test('getTraces should return empty array when no traces match filter', async () => {
869
+ // const tracesByName = await store.getTraces({ name: 'non-existent-trace', page: 1, perPage: 10 });
870
+ // expect(tracesByName).toEqual([]);
871
+ // const tracesByScope = await store.getTraces({ scope: 'non-existent-scope', page: 1, perPage: 10 });
872
+ // expect(tracesByScope).toEqual([]);
873
+ // });
874
+
875
+ // test('getEvalsByAgentName should return empty array for non-existent agent', async () => {
876
+ // const evals = await store.getEvalsByAgentName('non-existent-agent');
877
+ // expect(evals).toEqual([]);
878
+ // });
879
+
880
+ // test('getWorkflowRuns should return empty result for non-existent filters', async () => {
881
+ // const { runs: runsByName, total: totalByName } = await store.getWorkflowRuns({
882
+ // workflowName: 'non-existent-workflow',
883
+ // });
884
+ // expect(runsByName).toEqual([]);
885
+ // expect(totalByName).toBe(0);
886
+
887
+ // const { runs: runsByResource, total: totalByResource } = await store.getWorkflowRuns({
888
+ // resourceId: 'non-existent-resource',
889
+ // });
890
+ // expect(runsByResource).toEqual([]);
891
+ // expect(totalByResource).toBe(0);
892
+ // });
893
+ // }); // End Error Handling describe
894
+ // });
895
+
896
+ // // --- Trace Operations Tests ---
897
+ // describe('Trace Operations', () => {
898
+ // const sampleTrace = (name: string, scope: string, startTime = Date.now()) => ({
899
+ // id: `trace-${randomUUID()}`,
900
+ // parentSpanId: `span-${randomUUID()}`,
901
+ // traceId: `traceid-${randomUUID()}`,
902
+ // name,
903
+ // scope,
904
+ // kind: 1, // Example kind
905
+ // startTime: startTime,
906
+ // endTime: startTime + 100, // Example duration
907
+ // status: JSON.stringify({ code: 0 }), // Example status
908
+ // attributes: JSON.stringify({ key: 'value', scopeAttr: scope }),
909
+ // events: JSON.stringify([{ name: 'event1', timestamp: startTime + 50 }]),
910
+ // links: JSON.stringify([]),
911
+ // createdAt: new Date(startTime).toISOString(),
912
+ // updatedAt: new Date(startTime).toISOString(),
913
+ // });
914
+
915
+ // test('should batch insert and retrieve traces', async () => {
916
+ // const trace1 = sampleTrace('trace-op-1', 'scope-A');
917
+ // const trace2 = sampleTrace('trace-op-2', 'scope-A', Date.now() + 10);
918
+ // const trace3 = sampleTrace('trace-op-3', 'scope-B', Date.now() + 20);
919
+ // const records = [trace1, trace2, trace3];
920
+
921
+ // await expect(store.batchTraceInsert({ records })).resolves.not.toThrow();
922
+
923
+ // // Retrieve all (via scan, assuming low test data volume)
924
+ // const allTraces = await store.getTraces({ page: 1, perPage: 10 });
925
+ // expect(allTraces.length).toBe(3);
926
+ // });
927
+
928
+ // test('should handle Date objects for createdAt/updatedAt fields in batchTraceInsert', async () => {
929
+ // // This test specifically verifies the bug from the issue where Date objects
930
+ // // were passed instead of ISO strings and ElectroDB validation failed
931
+ // const now = new Date();
932
+ // const traceWithDateObjects = {
933
+ // id: `trace-${randomUUID()}`,
934
+ // parentSpanId: `span-${randomUUID()}`,
935
+ // traceId: `traceid-${randomUUID()}`,
936
+ // name: 'test-trace-with-dates',
937
+ // scope: 'default-tracer',
938
+ // kind: 1,
939
+ // startTime: now.getTime(),
940
+ // endTime: now.getTime() + 100,
941
+ // status: JSON.stringify({ code: 0 }),
942
+ // attributes: JSON.stringify({ key: 'value' }),
943
+ // events: JSON.stringify([]),
944
+ // links: JSON.stringify([]),
945
+ // // These are Date objects, not ISO strings - this should be handled by ElectroDB attribute setters
946
+ // createdAt: now,
947
+ // updatedAt: now,
948
+ // };
949
+
950
+ // // This should not throw a validation error due to Date object type
951
+ // await expect(store.batchTraceInsert({ records: [traceWithDateObjects] })).resolves.not.toThrow();
952
+
953
+ // // Verify the trace was saved correctly
954
+ // const allTraces = await store.getTraces({ name: 'test-trace-with-dates', page: 1, perPage: 10 });
955
+ // expect(allTraces.length).toBe(1);
956
+ // expect(allTraces[0].name).toBe('test-trace-with-dates');
957
+ // });
958
+
959
+ // test('should retrieve traces filtered by name using GSI', async () => {
960
+ // const trace1 = sampleTrace('trace-filter-name', 'scope-X');
961
+ // const trace2 = sampleTrace('trace-filter-name', 'scope-Y', Date.now() + 10);
962
+ // const trace3 = sampleTrace('other-name', 'scope-X', Date.now() + 20);
963
+ // await store.batchTraceInsert({ records: [trace1, trace2, trace3] });
964
+
965
+ // const filteredTraces = await store.getTraces({ name: 'trace-filter-name', page: 1, perPage: 10 });
966
+ // expect(filteredTraces.length).toBe(2);
967
+ // expect(filteredTraces.every(t => t.name === 'trace-filter-name')).toBe(true);
968
+ // // Check if sorted by startTime (GSI SK) - ascending default
969
+ // expect(filteredTraces[0].scope).toBe('scope-X');
970
+ // expect(filteredTraces[1].scope).toBe('scope-Y');
971
+ // });
972
+
973
+ // test('should retrieve traces filtered by scope using GSI', async () => {
974
+ // const trace1 = sampleTrace('trace-filter-scope-A', 'scope-TARGET');
975
+ // const trace2 = sampleTrace('trace-filter-scope-B', 'scope-OTHER', Date.now() + 10);
976
+ // const trace3 = sampleTrace('trace-filter-scope-C', 'scope-TARGET', Date.now() + 20);
977
+ // await store.batchTraceInsert({ records: [trace1, trace2, trace3] });
978
+
979
+ // const filteredTraces = await store.getTraces({ scope: 'scope-TARGET', page: 1, perPage: 10 });
980
+ // expect(filteredTraces.length).toBe(2);
981
+ // expect(filteredTraces.every(t => t.scope === 'scope-TARGET')).toBe(true);
982
+ // // Check if sorted by startTime (GSI SK) - ascending default
983
+ // expect(filteredTraces[0].name).toBe('trace-filter-scope-A');
984
+ // expect(filteredTraces[1].name).toBe('trace-filter-scope-C');
985
+ // });
986
+
987
+ // test('should handle pagination for getTraces', async () => {
988
+ // const traceData = Array.from({ length: 5 }, (_, i) =>
989
+ // sampleTrace('trace-page', `scope-page`, Date.now() + i * 10),
990
+ // );
991
+ // await store.batchTraceInsert({ records: traceData });
992
+
993
+ // // Get page 1 (first 2 items)
994
+ // const page1 = await store.getTraces({ name: 'trace-page', page: 1, perPage: 2 });
995
+ // expect(page1.length).toBe(2);
996
+ // // Use non-null assertion (!) since lengths are verified
997
+ // expect(page1[0]!.startTime).toBe(traceData[0]!.startTime);
998
+ // expect(page1[1]!.startTime).toBe(traceData[1]!.startTime);
999
+
1000
+ // // Get page 2 (next 2 items)
1001
+ // const page2 = await store.getTraces({ name: 'trace-page', page: 2, perPage: 2 });
1002
+ // expect(page2.length).toBe(2);
1003
+ // expect(page2[0]!.startTime).toBe(traceData[2]!.startTime);
1004
+ // expect(page2[1]!.startTime).toBe(traceData[3]!.startTime);
1005
+
1006
+ // // Get page 3 (last 1 item)
1007
+ // const page3 = await store.getTraces({ name: 'trace-page', page: 3, perPage: 2 });
1008
+ // expect(page3.length).toBe(1);
1009
+ // expect(page3[0]!.startTime).toBe(traceData[4]!.startTime);
1010
+
1011
+ // // Get page beyond results
1012
+ // const page4 = await store.getTraces({ name: 'trace-page', page: 4, perPage: 2 });
1013
+ // expect(page4.length).toBe(0);
1014
+ // });
1015
+ // }); // End Trace Operations describe
1016
+
1017
+ // // --- Eval Operations Tests ---
1018
+ // describe('Eval Operations', () => {
1019
+ // const sampleEval = (agentName: string, isTest = false, createdAt = new Date()) => {
1020
+ // const testInfo = isTest ? { testPath: 'test/path.ts', testName: 'Test Name' } : undefined;
1021
+ // return {
1022
+ // entity: 'eval', // Important for saving
1023
+ // agent_name: agentName,
1024
+ // input: 'Sample input',
1025
+ // output: 'Sample output',
1026
+ // result: JSON.stringify({ score: Math.random() }), // Random score
1027
+ // metric_name: 'sample-metric',
1028
+ // instructions: 'Sample instructions',
1029
+ // test_info: testInfo ? JSON.stringify(testInfo) : undefined,
1030
+ // global_run_id: `global-${randomUUID()}`,
1031
+ // run_id: `run-${randomUUID()}`,
1032
+ // created_at: createdAt.toISOString(),
1033
+ // // Add core MastraStorage fields
1034
+ // createdAt: createdAt.toISOString(),
1035
+ // updatedAt: createdAt.toISOString(),
1036
+ // metadata: JSON.stringify({ custom: 'eval_meta' }),
1037
+ // };
1038
+ // };
1039
+
1040
+ // test('should handle Date objects for createdAt/updatedAt fields in eval batchInsert', async () => {
1041
+ // // Test that eval entity properly handles Date objects in createdAt/updatedAt fields
1042
+ // const now = new Date();
1043
+ // const evalWithDateObjects = {
1044
+ // entity: 'eval',
1045
+ // agent_name: 'test-agent-dates',
1046
+ // input: 'Test input',
1047
+ // output: 'Test output',
1048
+ // result: JSON.stringify({ score: 0.95 }),
1049
+ // metric_name: 'test-metric',
1050
+ // instructions: 'Test instructions',
1051
+ // global_run_id: `global-${randomUUID()}`,
1052
+ // run_id: `run-${randomUUID()}`,
1053
+ // created_at: now, // Date object instead of ISO string
1054
+ // // These are Date objects, not ISO strings - should be handled by ElectroDB attribute setters
1055
+ // createdAt: now,
1056
+ // updatedAt: now,
1057
+ // metadata: JSON.stringify({ test: 'meta' }),
1058
+ // };
1059
+
1060
+ // // This should not throw a validation error due to Date object type
1061
+ // await expect(
1062
+ // store.batchInsert({
1063
+ // tableName: TABLE_EVALS,
1064
+ // records: [evalWithDateObjects],
1065
+ // }),
1066
+ // ).resolves.not.toThrow();
1067
+
1068
+ // // Verify the eval was saved correctly
1069
+ // const evals = await store.getEvalsByAgentName('test-agent-dates');
1070
+ // expect(evals.length).toBe(1);
1071
+ // expect(evals[0].agentName).toBe('test-agent-dates');
1072
+ // });
1073
+
1074
+ // test('should retrieve evals by agent name using GSI and filter by type', async () => {
1075
+ // const agent1 = 'eval-agent-1';
1076
+ // const agent2 = 'eval-agent-2';
1077
+ // const time1 = new Date();
1078
+ // const time2 = new Date(Date.now() + 1000);
1079
+ // const time3 = new Date(Date.now() + 2000);
1080
+ // const time4 = new Date(Date.now() + 3000);
1081
+
1082
+ // const eval1_live = sampleEval(agent1, false, time1);
1083
+ // const eval1_test = sampleEval(agent1, true, time2);
1084
+ // const eval2_live = sampleEval(agent2, false, time3);
1085
+ // const eval1_live_later = sampleEval(agent1, false, time4);
1086
+
1087
+ // // Use generic batchInsert (which expects entity prop already set)
1088
+ // await store.batchInsert({
1089
+ // tableName: TABLE_EVALS,
1090
+ // records: [eval1_live, eval1_test, eval2_live, eval1_live_later],
1091
+ // });
1092
+
1093
+ // // Get all for agent1 (expecting DESCENDING order now)
1094
+ // const allAgent1 = await store.getEvalsByAgentName(agent1);
1095
+ // expect(allAgent1.length).toBe(3);
1096
+ // // Assert descending order (newest first)
1097
+ // expect(allAgent1[0]!.runId).toBe(eval1_live_later.run_id); // Newest (time4)
1098
+ // expect(allAgent1[1]!.runId).toBe(eval1_test.run_id); // Middle (time2)
1099
+ // expect(allAgent1[2]!.runId).toBe(eval1_live.run_id); // Oldest (time1)
1100
+
1101
+ // // Get only live for agent1 (should be 2, ordered descending)
1102
+ // const liveAgent1 = await store.getEvalsByAgentName(agent1, 'live');
1103
+ // expect(liveAgent1.length).toBe(2);
1104
+ // // Assert descending order
1105
+ // expect(liveAgent1[0]!.runId).toBe(eval1_live_later.run_id); // Newest live (time4)
1106
+ // expect(liveAgent1[1]!.runId).toBe(eval1_live.run_id); // Oldest live (time1)
1107
+
1108
+ // // Get only test for agent1 (should be 1)
1109
+ // const testAgent1 = await store.getEvalsByAgentName(agent1, 'test');
1110
+ // expect(testAgent1.length).toBe(1);
1111
+ // expect(testAgent1[0]!.runId).toBe(eval1_test.run_id);
1112
+ // expect(testAgent1[0]!.testInfo).toEqual(JSON.parse(eval1_test.test_info!));
1113
+
1114
+ // // Get for agent2 (should be 1)
1115
+ // const allAgent2 = await store.getEvalsByAgentName(agent2);
1116
+ // expect(allAgent2.length).toBe(1);
1117
+ // expect(allAgent2[0]!.runId).toBe(eval2_live.run_id);
1118
+
1119
+ // // Get for non-existent agent
1120
+ // const none = await store.getEvalsByAgentName('non-existent-agent');
1121
+ // expect(none.length).toBe(0);
1122
+ // });
1123
+ // }); // End Eval Operations describe
1124
+
1125
+ // // --- Workflow Operations Tests ---
1126
+ // describe('Workflow Operations', () => {
1127
+ // const sampleWorkflowSnapshot = (
1128
+ // workflowName: string,
1129
+ // runId: string,
1130
+ // resourceId?: string,
1131
+ // createdAt = new Date(),
1132
+ // status = 'running',
1133
+ // ): { recordData: Record<string, any>; snapshot: WorkflowRunState } => {
1134
+ // const snapshot: WorkflowRunState = {
1135
+ // value: { currentState: status },
1136
+ // context: {
1137
+ // step1: { status: 'success', output: { data: 'test' } },
1138
+ // input: { source: 'test' },
1139
+ // } as unknown as WorkflowRunState['context'],
1140
+ // serializedStepGraph: [],
1141
+ // activePaths: [],
1142
+ // suspendedPaths: {},
1143
+ // runId: runId,
1144
+ // timestamp: createdAt.getTime(),
1145
+ // status: 'success',
1146
+ // ...(resourceId && { resourceId: resourceId }), // Conditionally add resourceId to snapshot
1147
+ // };
1148
+ // return {
1149
+ // recordData: {
1150
+ // entity: 'workflow_snapshot',
1151
+ // workflow_name: workflowName,
1152
+ // run_id: runId,
1153
+ // snapshot: JSON.stringify(snapshot),
1154
+ // createdAt: createdAt.toISOString(),
1155
+ // updatedAt: createdAt.toISOString(),
1156
+ // resourceId: resourceId, // Store resourceId directly if available
1157
+ // metadata: JSON.stringify({ wf: 'meta' }),
1158
+ // },
1159
+ // snapshot: snapshot,
1160
+ // };
1161
+ // };
1162
+
1163
+ // test('should persist and load a workflow snapshot', async () => {
1164
+ // const wfName = 'persist-test-wf';
1165
+ // const runId = `run-${randomUUID()}`;
1166
+ // const { snapshot } = sampleWorkflowSnapshot(wfName, runId);
1167
+
1168
+ // await expect(
1169
+ // store.persistWorkflowSnapshot({
1170
+ // workflowName: wfName,
1171
+ // runId: runId,
1172
+ // snapshot: snapshot,
1173
+ // }),
1174
+ // ).resolves.not.toThrow();
1175
+
1176
+ // const loadedSnapshot = await store.loadWorkflowSnapshot({
1177
+ // workflowName: wfName,
1178
+ // runId: runId,
1179
+ // });
1180
+ // // Compare only relevant parts, as persist might add internal fields
1181
+ // expect(loadedSnapshot?.runId).toEqual(snapshot.runId);
1182
+ // expect(loadedSnapshot?.value).toEqual(snapshot.value);
1183
+ // expect(loadedSnapshot?.context).toEqual(snapshot.context);
1184
+ // });
1185
+
1186
+ // test('should allow updating an existing workflow snapshot', async () => {
1187
+ // const wfName = 'update-test-wf';
1188
+ // const runId = `run-${randomUUID()}`;
1189
+
1190
+ // // Create initial snapshot
1191
+ // const { snapshot: initialSnapshot } = sampleWorkflowSnapshot(wfName, runId);
1192
+
1193
+ // await expect(
1194
+ // store.persistWorkflowSnapshot({
1195
+ // workflowName: wfName,
1196
+ // runId: runId,
1197
+ // snapshot: initialSnapshot,
1198
+ // }),
1199
+ // ).resolves.not.toThrow();
1200
+
1201
+ // // Create updated snapshot with different data
1202
+ // const updatedSnapshot: WorkflowRunState = {
1203
+ // ...initialSnapshot,
1204
+ // value: { currentState: 'completed' },
1205
+ // context: {
1206
+ // step1: { status: 'success', output: { data: 'updated-test' } },
1207
+ // step2: { status: 'success', output: { data: 'new-step' } },
1208
+ // input: { source: 'updated-test' },
1209
+ // } as unknown as WorkflowRunState['context'],
1210
+ // timestamp: Date.now(),
1211
+ // };
1212
+
1213
+ // // This should succeed (update existing snapshot)
1214
+ // await expect(
1215
+ // store.persistWorkflowSnapshot({
1216
+ // workflowName: wfName,
1217
+ // runId: runId,
1218
+ // snapshot: updatedSnapshot,
1219
+ // }),
1220
+ // ).resolves.not.toThrow();
1221
+
1222
+ // // Verify the snapshot was updated
1223
+ // const loadedSnapshot = await store.loadWorkflowSnapshot({
1224
+ // workflowName: wfName,
1225
+ // runId: runId,
1226
+ // });
1227
+
1228
+ // expect(loadedSnapshot?.runId).toEqual(updatedSnapshot.runId);
1229
+ // expect(loadedSnapshot?.value).toEqual(updatedSnapshot.value);
1230
+ // expect(loadedSnapshot?.context).toEqual(updatedSnapshot.context);
1231
+ // });
1232
+
1233
+ // test('getWorkflowRunById should retrieve correct run', async () => {
1234
+ // const wfName = 'get-by-id-wf';
1235
+ // const runId1 = `run-${randomUUID()}`;
1236
+ // const runId2 = `run-${randomUUID()}`;
1237
+ // const wf1 = sampleWorkflowSnapshot(wfName, runId1);
1238
+ // const wf2 = sampleWorkflowSnapshot(wfName, runId2);
1239
+
1240
+ // await store.batchInsert({ tableName: TABLE_WORKFLOW_SNAPSHOT, records: [wf1.recordData, wf2.recordData] });
1241
+
1242
+ // const found = await store.getWorkflowRunById({ runId: runId1, workflowName: wfName });
1243
+ // expect(found).toBeDefined();
1244
+ // expect(found!.runId).toBe(runId1);
1245
+ // expect(found!.workflowName).toBe(wfName);
1246
+
1247
+ // const notFound = await store.getWorkflowRunById({ runId: 'non-existent', workflowName: wfName });
1248
+ // expect(notFound).toBeNull();
1249
+ // });
1250
+
1251
+ // test('getWorkflowRuns should return all runs when no filters applied', async () => {
1252
+ // const wfName = 'get-runs-all';
1253
+ // const runId1 = `run-${randomUUID()}`;
1254
+ // const runId2 = `run-${randomUUID()}`;
1255
+ // const wf1 = sampleWorkflowSnapshot(wfName, runId1, undefined, new Date(Date.now() - 1000));
1256
+ // const wf2 = sampleWorkflowSnapshot(wfName, runId2, undefined, new Date());
1257
+
1258
+ // await store.batchInsert({ tableName: TABLE_WORKFLOW_SNAPSHOT, records: [wf1.recordData, wf2.recordData] });
1259
+
1260
+ // const { runs, total } = await store.getWorkflowRuns(); // No filters
1261
+ // // Note: Scan order is not guaranteed, check for presence and count
1262
+ // expect(total).toBe(2);
1263
+ // expect(runs.length).toBe(2);
1264
+ // expect(runs.map(r => r.runId)).toEqual(expect.arrayContaining([runId1, runId2]));
1265
+ // });
1266
+
1267
+ // test('getWorkflowRuns should filter by workflowName', async () => {
1268
+ // const wfName1 = 'get-runs-filter-name1';
1269
+ // const wfName2 = 'get-runs-filter-name2';
1270
+ // const runId1 = `run-${randomUUID()}`;
1271
+ // const runId2 = `run-${randomUUID()}`;
1272
+ // const wf1 = sampleWorkflowSnapshot(wfName1, runId1);
1273
+ // const wf2 = sampleWorkflowSnapshot(wfName2, runId2);
1274
+
1275
+ // await store.batchInsert({ tableName: TABLE_WORKFLOW_SNAPSHOT, records: [wf1.recordData, wf2.recordData] });
1276
+
1277
+ // const { runs, total } = await store.getWorkflowRuns({ workflowName: wfName1 });
1278
+ // expect(total).toBe(1);
1279
+ // expect(runs.length).toBe(1);
1280
+ // expect(runs[0]!.runId).toBe(runId1);
1281
+ // });
1282
+
1283
+ // test('getWorkflowRuns should filter by resourceId', async () => {
1284
+ // const wfName = 'get-runs-filter-resource';
1285
+ // const resource1 = 'resource-filter-1';
1286
+ // const resource2 = 'resource-filter-2';
1287
+ // const runId1 = `run-${randomUUID()}`;
1288
+ // const runId2 = `run-${randomUUID()}`;
1289
+ // const runId3 = `run-${randomUUID()}`;
1290
+ // const wf1 = sampleWorkflowSnapshot(wfName, runId1, resource1);
1291
+ // const wf2 = sampleWorkflowSnapshot(wfName, runId2, resource2);
1292
+ // const wf3 = sampleWorkflowSnapshot(wfName, runId3, resource1);
1293
+
1294
+ // await store.batchInsert({
1295
+ // tableName: TABLE_WORKFLOW_SNAPSHOT,
1296
+ // records: [wf1.recordData, wf2.recordData, wf3.recordData],
1297
+ // });
1298
+
1299
+ // const { runs, total } = await store.getWorkflowRuns({ resourceId: resource1 });
1300
+ // // Note: Scan order not guaranteed
1301
+ // expect(total).toBe(2);
1302
+ // expect(runs.length).toBe(2);
1303
+ // expect(runs.map(r => r.runId)).toEqual(expect.arrayContaining([runId1, runId3]));
1304
+ // expect(runs.every(r => r.resourceId === resource1)).toBe(true);
1305
+ // });
1306
+
1307
+ // test('getWorkflowRuns should filter by date range', async () => {
1308
+ // const wfName = 'get-runs-filter-date';
1309
+ // const time1 = new Date(2024, 0, 10); // Jan 10 2024
1310
+ // const time2 = new Date(2024, 0, 15); // Jan 15 2024
1311
+ // const time3 = new Date(2024, 0, 20); // Jan 20 2024
1312
+ // const runId1 = `run-${randomUUID()}`;
1313
+ // const runId2 = `run-${randomUUID()}`;
1314
+ // const runId3 = `run-${randomUUID()}`;
1315
+ // const wf1 = sampleWorkflowSnapshot(wfName, runId1, undefined, time1);
1316
+ // const wf2 = sampleWorkflowSnapshot(wfName, runId2, undefined, time2);
1317
+ // const wf3 = sampleWorkflowSnapshot(wfName, runId3, undefined, time3);
1318
+
1319
+ // await store.batchInsert({
1320
+ // tableName: TABLE_WORKFLOW_SNAPSHOT,
1321
+ // records: [wf1.recordData, wf2.recordData, wf3.recordData],
1322
+ // });
1323
+
1324
+ // const { runs, total } = await store.getWorkflowRuns({
1325
+ // fromDate: new Date(2024, 0, 12), // Jan 12
1326
+ // toDate: new Date(2024, 0, 18), // Jan 18
1327
+ // });
1328
+ // expect(total).toBe(1);
1329
+ // expect(runs.length).toBe(1);
1330
+ // expect(runs[0]!.runId).toBe(runId2); // Only wf2 falls within the range
1331
+ // });
1332
+
1333
+ // test('getWorkflowRuns should handle pagination (limit/offset)', async () => {
1334
+ // const wfName = 'get-runs-pagination';
1335
+ // const snapshots = Array.from({ length: 5 }, (_, i) =>
1336
+ // sampleWorkflowSnapshot(wfName, `run-page-${i}`, undefined, new Date(Date.now() + i * 1000)),
1337
+ // );
1338
+ // await store.batchInsert({ tableName: TABLE_WORKFLOW_SNAPSHOT, records: snapshots.map(s => s.recordData) });
1339
+
1340
+ // // Get page 1 (limit 2, offset 0)
1341
+ // const page1 = await store.getWorkflowRuns({ workflowName: wfName, limit: 2, offset: 0 });
1342
+ // expect(page1.total).toBe(5);
1343
+ // expect(page1.runs.length).toBe(2);
1344
+ // // Scan order not guaranteed, check for presence of two expected runs
1345
+ // const page1Ids = page1.runs.map(r => r.runId);
1346
+ // expect(snapshots.slice(0, 2).map(s => s!.recordData.run_id)).toEqual(expect.arrayContaining(page1Ids));
1347
+
1348
+ // // Get page 2 (limit 2, offset 2)
1349
+ // const page2 = await store.getWorkflowRuns({ workflowName: wfName, limit: 2, offset: 2 });
1350
+ // expect(page2.total).toBe(5);
1351
+ // expect(page2.runs.length).toBe(2);
1352
+ // const page2Ids = page2.runs.map(r => r.runId);
1353
+ // expect(snapshots.slice(2, 4).map(s => s!.recordData.run_id)).toEqual(expect.arrayContaining(page2Ids));
1354
+
1355
+ // // Get page 3 (limit 2, offset 4)
1356
+ // const page3 = await store.getWorkflowRuns({ workflowName: wfName, limit: 2, offset: 4 });
1357
+ // expect(page3.total).toBe(5);
1358
+ // expect(page3.runs.length).toBe(1);
1359
+ // // Use explicit type assertion for runs array access to fix linter error
1360
+ // expect((page3.runs as WorkflowRun[])[0]!.runId).toBe(snapshots[4]!.recordData.run_id);
1361
+
1362
+ // // Get page beyond results (offset 5)
1363
+ // const page4 = await store.getWorkflowRuns({ workflowName: wfName, limit: 2, offset: 5 });
1364
+ // expect(page4.total).toBe(5);
1365
+ // expect(page4.runs.length).toBe(0);
1366
+ // });
1367
+ // }); // End Workflow Operations describe
1368
+
1369
+ // // --- Initialization & Configuration Tests ---
1370
+ // describe('Initialization & Configuration', () => {
1371
+ // test('should throw error if tableName is missing in config', () => {
1372
+ // expect(() => {
1373
+ // new DynamoDBStore({
1374
+ // name: 'MissingTableStore',
1375
+ // config: {
1376
+ // endpoint: LOCAL_ENDPOINT,
1377
+ // region: LOCAL_REGION,
1378
+ // credentials: { accessKeyId: 'test', secretAccessKey: 'test' },
1379
+ // } as any, // Cast to any to bypass compile-time check for this specific test
1380
+ // });
1381
+ // }).toThrow(/tableName must be provided/); // Check for specific error message if possible
1382
+ // });
1383
+
1384
+ // test('should throw error during operations if table does not exist', async () => {
1385
+ // // Use a valid but random table name unlikely to exist
1386
+ // const nonExistentTableName = `non-existent-${randomUUID()}`;
1387
+ // const storeWithInvalidTable = new DynamoDBStore({
1388
+ // name: 'InvalidTableStore',
1389
+ // config: {
1390
+ // tableName: nonExistentTableName,
1391
+ // endpoint: LOCAL_ENDPOINT,
1392
+ // region: LOCAL_REGION,
1393
+ // credentials: { accessKeyId: 'test', secretAccessKey: 'test' },
1394
+ // },
1395
+ // });
1396
+
1397
+ // await expect(storeWithInvalidTable.getThreadById({ threadId: 'any-id' }))
1398
+ // .rejects // Update regex to match either DDB error or ElectroDB wrapper
1399
+ // .toThrow(/ResourceNotFoundException|Table.*does not exist|Cannot do operations on a non-existent table/);
1400
+ // });
1401
+
1402
+ // test('init() should throw error if table does not exist', async () => {
1403
+ // // Use a valid but random table name unlikely to exist
1404
+ // const nonExistentTableName = `non-existent-init-${randomUUID()}`;
1405
+ // const storeWithInvalidTable = new DynamoDBStore({
1406
+ // name: 'InvalidTableStoreInit',
1407
+ // config: {
1408
+ // tableName: nonExistentTableName,
1409
+ // endpoint: LOCAL_ENDPOINT,
1410
+ // region: LOCAL_REGION,
1411
+ // credentials: { accessKeyId: 'test', secretAccessKey: 'test' },
1412
+ // },
1413
+ // });
1414
+
1415
+ // await expect(storeWithInvalidTable.init())
1416
+ // .rejects // Update regex here too for consistency
1417
+ // .toThrow(/ResourceNotFoundException|Table.*does not exist|Cannot do operations on a non-existent table/);
1418
+ // });
1419
+ // }); // End Initialization & Configuration describe
1420
+ // });