@mastra/dynamodb 0.0.0-working-memory-per-user-20250620163010 → 0.0.0-zod-v4-compat-part-2-20250822105954

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/LICENSE.md +11 -42
  2. package/dist/entities/eval.d.ts +102 -0
  3. package/dist/entities/eval.d.ts.map +1 -0
  4. package/dist/entities/index.d.ts +732 -0
  5. package/dist/entities/index.d.ts.map +1 -0
  6. package/dist/entities/message.d.ts +100 -0
  7. package/dist/entities/message.d.ts.map +1 -0
  8. package/dist/entities/resource.d.ts +54 -0
  9. package/dist/entities/resource.d.ts.map +1 -0
  10. package/dist/entities/score.d.ts +215 -0
  11. package/dist/entities/score.d.ts.map +1 -0
  12. package/dist/entities/thread.d.ts +69 -0
  13. package/dist/entities/thread.d.ts.map +1 -0
  14. package/dist/entities/trace.d.ts +127 -0
  15. package/dist/entities/trace.d.ts.map +1 -0
  16. package/dist/entities/utils.d.ts +21 -0
  17. package/dist/entities/utils.d.ts.map +1 -0
  18. package/dist/entities/workflow-snapshot.d.ts +74 -0
  19. package/dist/entities/workflow-snapshot.d.ts.map +1 -0
  20. package/dist/index.cjs +2176 -516
  21. package/dist/index.cjs.map +1 -0
  22. package/dist/index.d.ts +2 -2
  23. package/dist/index.d.ts.map +1 -0
  24. package/dist/index.js +2163 -503
  25. package/dist/index.js.map +1 -0
  26. package/dist/storage/domains/legacy-evals/index.d.ts +19 -0
  27. package/dist/storage/domains/legacy-evals/index.d.ts.map +1 -0
  28. package/dist/storage/domains/memory/index.d.ts +77 -0
  29. package/dist/storage/domains/memory/index.d.ts.map +1 -0
  30. package/dist/storage/domains/operations/index.d.ts +69 -0
  31. package/dist/storage/domains/operations/index.d.ts.map +1 -0
  32. package/dist/storage/domains/score/index.d.ts +42 -0
  33. package/dist/storage/domains/score/index.d.ts.map +1 -0
  34. package/dist/storage/domains/traces/index.d.ts +28 -0
  35. package/dist/storage/domains/traces/index.d.ts.map +1 -0
  36. package/dist/storage/domains/workflows/index.d.ts +32 -0
  37. package/dist/storage/domains/workflows/index.d.ts.map +1 -0
  38. package/dist/storage/index.d.ts +220 -0
  39. package/dist/storage/index.d.ts.map +1 -0
  40. package/package.json +12 -12
  41. package/src/entities/index.ts +5 -1
  42. package/src/entities/resource.ts +57 -0
  43. package/src/entities/score.ts +285 -0
  44. package/src/storage/domains/legacy-evals/index.ts +243 -0
  45. package/src/storage/domains/memory/index.ts +894 -0
  46. package/src/storage/domains/operations/index.ts +433 -0
  47. package/src/storage/domains/score/index.ts +285 -0
  48. package/src/storage/domains/traces/index.ts +286 -0
  49. package/src/storage/domains/workflows/index.ts +297 -0
  50. package/src/storage/index.test.ts +1347 -1216
  51. package/src/storage/index.ts +211 -881
  52. package/dist/_tsup-dts-rollup.d.cts +0 -1157
  53. package/dist/_tsup-dts-rollup.d.ts +0 -1157
  54. package/dist/index.d.cts +0 -2
@@ -1,18 +1,11 @@
1
1
  import { DynamoDBClient, DescribeTableCommand } from '@aws-sdk/client-dynamodb';
2
2
  import { DynamoDBDocumentClient } from '@aws-sdk/lib-dynamodb';
3
3
  import type { MastraMessageContentV2 } from '@mastra/core/agent';
4
- import { MessageList } from '@mastra/core/agent';
4
+ import { ErrorCategory, ErrorDomain, MastraError } from '@mastra/core/error';
5
5
  import type { StorageThreadType, MastraMessageV2, MastraMessageV1 } from '@mastra/core/memory';
6
6
 
7
- import {
8
- MastraStorage,
9
- TABLE_THREADS,
10
- TABLE_MESSAGES,
11
- TABLE_WORKFLOW_SNAPSHOT,
12
- TABLE_EVALS,
13
- TABLE_TRACES,
14
- TABLE_RESOURCES,
15
- } from '@mastra/core/storage';
7
+ import type { ScoreRowData } from '@mastra/core/scores';
8
+ import { MastraStorage } from '@mastra/core/storage';
16
9
  import type {
17
10
  EvalRow,
18
11
  StorageGetMessagesArg,
@@ -22,11 +15,21 @@ import type {
22
15
  StorageGetTracesArg,
23
16
  PaginationInfo,
24
17
  StorageColumn,
18
+ StoragePagination,
19
+ StorageDomains,
20
+ PaginationArgs,
21
+ StorageResourceType,
25
22
  } from '@mastra/core/storage';
26
23
  import type { Trace } from '@mastra/core/telemetry';
27
24
  import type { WorkflowRunState } from '@mastra/core/workflows';
28
25
  import type { Service } from 'electrodb';
29
26
  import { getElectroDbService } from '../entities';
27
+ import { LegacyEvalsDynamoDB } from './domains/legacy-evals';
28
+ import { MemoryStorageDynamoDB } from './domains/memory';
29
+ import { StoreOperationsDynamoDB } from './domains/operations';
30
+ import { ScoresStorageDynamoDB } from './domains/score';
31
+ import { TracesStorageDynamoDB } from './domains/traces';
32
+ import { WorkflowStorageDynamoDB } from './domains/workflows';
30
33
 
31
34
  export interface DynamoDBStoreConfig {
32
35
  region?: string;
@@ -43,79 +46,83 @@ type MastraService = Service<Record<string, any>> & {
43
46
  [key: string]: any;
44
47
  };
45
48
 
46
- // Define the structure for workflow snapshot items retrieved from DynamoDB
47
- interface WorkflowSnapshotDBItem {
48
- entity: string; // Typically 'workflow_snapshot'
49
- workflow_name: string;
50
- run_id: string;
51
- snapshot: WorkflowRunState; // Should be WorkflowRunState after ElectroDB get attribute processing
52
- createdAt: string; // ISO Date string
53
- updatedAt: string; // ISO Date string
54
- resourceId?: string;
55
- }
56
-
57
49
  export class DynamoDBStore extends MastraStorage {
58
50
  private tableName: string;
59
51
  private client: DynamoDBDocumentClient;
60
52
  private service: MastraService;
61
53
  protected hasInitialized: Promise<boolean> | null = null;
54
+ stores: StorageDomains;
62
55
 
63
56
  constructor({ name, config }: { name: string; config: DynamoDBStoreConfig }) {
64
57
  super({ name });
65
58
 
66
59
  // Validate required config
67
- if (!config.tableName || typeof config.tableName !== 'string' || config.tableName.trim() === '') {
68
- throw new Error('DynamoDBStore: config.tableName must be provided and cannot be empty.');
69
- }
70
- // Validate tableName characters (basic check)
71
- if (!/^[a-zA-Z0-9_.-]{3,255}$/.test(config.tableName)) {
72
- throw new Error(
73
- `DynamoDBStore: config.tableName "${config.tableName}" contains invalid characters or is not between 3 and 255 characters long.`,
74
- );
75
- }
60
+ try {
61
+ if (!config.tableName || typeof config.tableName !== 'string' || config.tableName.trim() === '') {
62
+ throw new Error('DynamoDBStore: config.tableName must be provided and cannot be empty.');
63
+ }
64
+ // Validate tableName characters (basic check)
65
+ if (!/^[a-zA-Z0-9_.-]{3,255}$/.test(config.tableName)) {
66
+ throw new Error(
67
+ `DynamoDBStore: config.tableName "${config.tableName}" contains invalid characters or is not between 3 and 255 characters long.`,
68
+ );
69
+ }
76
70
 
77
- const dynamoClient = new DynamoDBClient({
78
- region: config.region || 'us-east-1',
79
- endpoint: config.endpoint,
80
- credentials: config.credentials,
81
- });
71
+ const dynamoClient = new DynamoDBClient({
72
+ region: config.region || 'us-east-1',
73
+ endpoint: config.endpoint,
74
+ credentials: config.credentials,
75
+ });
82
76
 
83
- this.tableName = config.tableName;
84
- this.client = DynamoDBDocumentClient.from(dynamoClient);
85
- this.service = getElectroDbService(this.client, this.tableName) as MastraService;
77
+ this.tableName = config.tableName;
78
+ this.client = DynamoDBDocumentClient.from(dynamoClient);
79
+ this.service = getElectroDbService(this.client, this.tableName) as MastraService;
86
80
 
87
- // We're using a single table design with ElectroDB,
88
- // so we don't need to create multiple tables
89
- }
81
+ const operations = new StoreOperationsDynamoDB({
82
+ service: this.service,
83
+ tableName: this.tableName,
84
+ client: this.client,
85
+ });
90
86
 
91
- /**
92
- * This method is modified for DynamoDB with ElectroDB single-table design.
93
- * It assumes the table is created and managed externally via CDK/CloudFormation.
94
- *
95
- * This implementation only validates that the required table exists and is accessible.
96
- * No table creation is attempted - we simply check if we can access the table.
97
- */
98
- async createTable({ tableName }: { tableName: TABLE_NAMES; schema: Record<string, any> }): Promise<void> {
99
- this.logger.debug('Validating access to externally managed table', { tableName, physicalTable: this.tableName });
87
+ const traces = new TracesStorageDynamoDB({ service: this.service, operations });
100
88
 
101
- // For single-table design, we just need to verify the table exists and is accessible
102
- try {
103
- const tableExists = await this.validateTableExists();
89
+ const workflows = new WorkflowStorageDynamoDB({ service: this.service });
104
90
 
105
- if (!tableExists) {
106
- this.logger.error(
107
- `Table ${this.tableName} does not exist or is not accessible. It should be created via CDK/CloudFormation.`,
108
- );
109
- throw new Error(
110
- `Table ${this.tableName} does not exist or is not accessible. Ensure it's created via CDK/CloudFormation before using this store.`,
111
- );
112
- }
91
+ const memory = new MemoryStorageDynamoDB({ service: this.service });
113
92
 
114
- this.logger.debug(`Table ${this.tableName} exists and is accessible`);
93
+ const scores = new ScoresStorageDynamoDB({ service: this.service });
94
+
95
+ this.stores = {
96
+ operations,
97
+ legacyEvals: new LegacyEvalsDynamoDB({ service: this.service, tableName: this.tableName }),
98
+ traces,
99
+ workflows,
100
+ memory,
101
+ scores,
102
+ };
115
103
  } catch (error) {
116
- this.logger.error('Error validating table access', { tableName: this.tableName, error });
117
- throw error;
104
+ throw new MastraError(
105
+ {
106
+ id: 'STORAGE_DYNAMODB_STORE_CONSTRUCTOR_FAILED',
107
+ domain: ErrorDomain.STORAGE,
108
+ category: ErrorCategory.USER,
109
+ },
110
+ error,
111
+ );
118
112
  }
113
+
114
+ // We're using a single table design with ElectroDB,
115
+ // so we don't need to create multiple tables
116
+ }
117
+
118
+ get supports() {
119
+ return {
120
+ selectByIncludeResourceScope: true,
121
+ resourceWorkingMemory: true,
122
+ hasColumn: false,
123
+ createTable: false,
124
+ deleteMessages: false,
125
+ };
119
126
  }
120
127
 
121
128
  /**
@@ -140,7 +147,15 @@ export class DynamoDBStore extends MastraStorage {
140
147
  }
141
148
 
142
149
  // For other errors (like permissions issues), we should throw
143
- throw error;
150
+ throw new MastraError(
151
+ {
152
+ id: 'STORAGE_DYNAMODB_STORE_VALIDATE_TABLE_EXISTS_FAILED',
153
+ domain: ErrorDomain.STORAGE,
154
+ category: ErrorCategory.THIRD_PARTY,
155
+ details: { tableName: this.tableName },
156
+ },
157
+ error,
158
+ );
144
159
  }
145
160
  }
146
161
 
@@ -167,7 +182,15 @@ export class DynamoDBStore extends MastraStorage {
167
182
  // The error has already been handled by _performInitializationAndStore
168
183
  // (i.e., this.hasInitialized was reset). Re-throwing here ensures
169
184
  // the caller of init() is aware of the failure.
170
- throw error;
185
+ throw new MastraError(
186
+ {
187
+ id: 'STORAGE_DYNAMODB_STORE_INIT_FAILED',
188
+ domain: ErrorDomain.STORAGE,
189
+ category: ErrorCategory.THIRD_PARTY,
190
+ details: { tableName: this.tableName },
191
+ },
192
+ error,
193
+ );
171
194
  }
172
195
  }
173
196
 
@@ -194,27 +217,8 @@ export class DynamoDBStore extends MastraStorage {
194
217
  });
195
218
  }
196
219
 
197
- /**
198
- * Pre-processes a record to ensure Date objects are converted to ISO strings
199
- * This is necessary because ElectroDB validation happens before setters are applied
200
- */
201
- private preprocessRecord(record: Record<string, any>): Record<string, any> {
202
- const processed = { ...record };
203
-
204
- // Convert Date objects to ISO strings for date fields
205
- // This prevents ElectroDB validation errors that occur when Date objects are passed
206
- // to string-typed attributes, even when the attribute has a setter that converts dates
207
- if (processed.createdAt instanceof Date) {
208
- processed.createdAt = processed.createdAt.toISOString();
209
- }
210
- if (processed.updatedAt instanceof Date) {
211
- processed.updatedAt = processed.updatedAt.toISOString();
212
- }
213
- if (processed.created_at instanceof Date) {
214
- processed.created_at = processed.created_at.toISOString();
215
- }
216
-
217
- return processed;
220
+ async createTable({ tableName, schema }: { tableName: TABLE_NAMES; schema: Record<string, any> }): Promise<void> {
221
+ return this.stores.operations.createTable({ tableName, schema });
218
222
  }
219
223
 
220
224
  async alterTable(_args: {
@@ -222,274 +226,40 @@ export class DynamoDBStore extends MastraStorage {
222
226
  schema: Record<string, StorageColumn>;
223
227
  ifNotExists: string[];
224
228
  }): Promise<void> {
225
- // Nothing to do here, DynamoDB has a flexible schema and handles new attributes automatically upon insertion/update.
229
+ return this.stores.operations.alterTable(_args);
226
230
  }
227
231
 
228
- /**
229
- * Clear all items from a logical "table" (entity type)
230
- */
231
232
  async clearTable({ tableName }: { tableName: TABLE_NAMES }): Promise<void> {
232
- this.logger.debug('DynamoDB clearTable called', { tableName });
233
-
234
- const entityName = this.getEntityNameForTable(tableName);
235
- if (!entityName || !this.service.entities[entityName]) {
236
- throw new Error(`No entity defined for ${tableName}`);
237
- }
238
-
239
- try {
240
- // Scan requires no key, just uses the entity handler
241
- const result = await this.service.entities[entityName].scan.go({ pages: 'all' }); // Get all pages
242
-
243
- if (!result.data.length) {
244
- this.logger.debug(`No records found to clear for ${tableName}`);
245
- return;
246
- }
247
-
248
- this.logger.debug(`Found ${result.data.length} records to delete for ${tableName}`);
249
-
250
- // ElectroDB batch delete expects the key components for each item
251
- const keysToDelete = result.data.map((item: any) => {
252
- const key: { entity: string; [key: string]: any } = { entity: entityName };
253
-
254
- // Construct the key based on the specific entity's primary key structure
255
- switch (entityName) {
256
- case 'thread':
257
- if (!item.id) throw new Error(`Missing required key 'id' for entity 'thread'`);
258
- key.id = item.id;
259
- break;
260
- case 'message':
261
- if (!item.id) throw new Error(`Missing required key 'id' for entity 'message'`);
262
- key.id = item.id;
263
- break;
264
- case 'workflowSnapshot':
265
- if (!item.workflow_name)
266
- throw new Error(`Missing required key 'workflow_name' for entity 'workflowSnapshot'`);
267
- if (!item.run_id) throw new Error(`Missing required key 'run_id' for entity 'workflowSnapshot'`);
268
- key.workflow_name = item.workflow_name;
269
- key.run_id = item.run_id;
270
- break;
271
- case 'eval':
272
- // Assuming 'eval' uses 'run_id' or another unique identifier as part of its PK
273
- // Adjust based on the actual primary key defined in getElectroDbService
274
- if (!item.run_id) throw new Error(`Missing required key 'run_id' for entity 'eval'`);
275
- // Add other key components if necessary for 'eval' PK
276
- key.run_id = item.run_id;
277
- // Example: if global_run_id is also part of PK:
278
- // if (!item.global_run_id) throw new Error(`Missing required key 'global_run_id' for entity 'eval'`);
279
- // key.global_run_id = item.global_run_id;
280
- break;
281
- case 'trace':
282
- // Assuming 'trace' uses 'id' as its PK
283
- // Adjust based on the actual primary key defined in getElectroDbService
284
- if (!item.id) throw new Error(`Missing required key 'id' for entity 'trace'`);
285
- key.id = item.id;
286
- break;
287
- default:
288
- // Handle unknown entity types - log a warning or throw an error
289
- this.logger.warn(`Unknown entity type encountered during clearTable: ${entityName}`);
290
- // Optionally throw an error if strict handling is required
291
- throw new Error(`Cannot construct delete key for unknown entity type: ${entityName}`);
292
- }
293
-
294
- return key;
295
- });
296
-
297
- const batchSize = 25;
298
- for (let i = 0; i < keysToDelete.length; i += batchSize) {
299
- const batchKeys = keysToDelete.slice(i, i + batchSize);
300
- // Pass the array of key objects to delete
301
- await this.service.entities[entityName].delete(batchKeys).go();
302
- }
233
+ return this.stores.operations.clearTable({ tableName });
234
+ }
303
235
 
304
- this.logger.debug(`Successfully cleared all records for ${tableName}`);
305
- } catch (error) {
306
- this.logger.error('Failed to clear table', { tableName, error });
307
- throw error;
308
- }
236
+ async dropTable({ tableName }: { tableName: TABLE_NAMES }): Promise<void> {
237
+ return this.stores.operations.dropTable({ tableName });
309
238
  }
310
239
 
311
- /**
312
- * Insert a record into the specified "table" (entity)
313
- */
314
240
  async insert({ tableName, record }: { tableName: TABLE_NAMES; record: Record<string, any> }): Promise<void> {
315
- this.logger.debug('DynamoDB insert called', { tableName });
316
-
317
- const entityName = this.getEntityNameForTable(tableName);
318
- if (!entityName || !this.service.entities[entityName]) {
319
- throw new Error(`No entity defined for ${tableName}`);
320
- }
321
-
322
- try {
323
- // Add the entity type to the record and preprocess before creating
324
- const dataToSave = { entity: entityName, ...this.preprocessRecord(record) };
325
- await this.service.entities[entityName].create(dataToSave).go();
326
- } catch (error) {
327
- this.logger.error('Failed to insert record', { tableName, error });
328
- throw error;
329
- }
241
+ return this.stores.operations.insert({ tableName, record });
330
242
  }
331
243
 
332
- /**
333
- * Insert multiple records as a batch
334
- */
335
244
  async batchInsert({ tableName, records }: { tableName: TABLE_NAMES; records: Record<string, any>[] }): Promise<void> {
336
- this.logger.debug('DynamoDB batchInsert called', { tableName, count: records.length });
337
-
338
- const entityName = this.getEntityNameForTable(tableName);
339
- if (!entityName || !this.service.entities[entityName]) {
340
- throw new Error(`No entity defined for ${tableName}`);
341
- }
342
-
343
- // Add entity type and preprocess each record
344
- const recordsToSave = records.map(rec => ({ entity: entityName, ...this.preprocessRecord(rec) }));
345
-
346
- // ElectroDB has batch limits of 25 items, so we need to chunk
347
- const batchSize = 25;
348
- const batches = [];
349
- for (let i = 0; i < recordsToSave.length; i += batchSize) {
350
- const batch = recordsToSave.slice(i, i + batchSize);
351
- batches.push(batch);
352
- }
353
-
354
- try {
355
- // Process each batch
356
- for (const batch of batches) {
357
- // Create each item individually within the batch
358
- for (const recordData of batch) {
359
- if (!recordData.entity) {
360
- this.logger.error('Missing entity property in record data for batchInsert', { recordData, tableName });
361
- throw new Error(`Internal error: Missing entity property during batchInsert for ${tableName}`);
362
- }
363
- // Log the object just before the create call
364
- this.logger.debug('Attempting to create record in batchInsert:', { entityName, recordData });
365
- await this.service.entities[entityName].create(recordData).go();
366
- }
367
- // Original batch call: await this.service.entities[entityName].create(batch).go();
368
- }
369
- } catch (error) {
370
- this.logger.error('Failed to batch insert records', { tableName, error });
371
- throw error;
372
- }
245
+ return this.stores.operations.batchInsert({ tableName, records });
373
246
  }
374
247
 
375
- /**
376
- * Load a record by its keys
377
- */
378
248
  async load<R>({ tableName, keys }: { tableName: TABLE_NAMES; keys: Record<string, string> }): Promise<R | null> {
379
- this.logger.debug('DynamoDB load called', { tableName, keys });
380
-
381
- const entityName = this.getEntityNameForTable(tableName);
382
- if (!entityName || !this.service.entities[entityName]) {
383
- throw new Error(`No entity defined for ${tableName}`);
384
- }
385
-
386
- try {
387
- // Add the entity type to the key object for the .get call
388
- const keyObject = { entity: entityName, ...keys };
389
- const result = await this.service.entities[entityName].get(keyObject).go();
390
-
391
- if (!result.data) {
392
- return null;
393
- }
394
-
395
- // Add parsing logic if necessary (e.g., for metadata)
396
- let data = result.data;
397
- if (data.metadata && typeof data.metadata === 'string') {
398
- try {
399
- // data.metadata = JSON.parse(data.metadata); // REMOVED by AI
400
- } catch {
401
- /* ignore parse error */
402
- }
403
- }
404
- // Add similar parsing for other JSON fields if needed based on entity type
405
-
406
- return data as R;
407
- } catch (error) {
408
- this.logger.error('Failed to load record', { tableName, keys, error });
409
- throw error;
410
- }
249
+ return this.stores.operations.load({ tableName, keys });
411
250
  }
412
251
 
413
252
  // Thread operations
414
253
  async getThreadById({ threadId }: { threadId: string }): Promise<StorageThreadType | null> {
415
- this.logger.debug('Getting thread by ID', { threadId });
416
- try {
417
- const result = await this.service.entities.thread.get({ entity: 'thread', id: threadId }).go();
418
-
419
- if (!result.data) {
420
- return null;
421
- }
422
-
423
- // ElectroDB handles the transformation with attribute getters
424
- const data = result.data;
425
- return {
426
- ...data,
427
- // Convert date strings back to Date objects for consistency
428
- createdAt: typeof data.createdAt === 'string' ? new Date(data.createdAt) : data.createdAt,
429
- updatedAt: typeof data.updatedAt === 'string' ? new Date(data.updatedAt) : data.updatedAt,
430
- // metadata: data.metadata ? JSON.parse(data.metadata) : undefined, // REMOVED by AI
431
- // metadata is already transformed by the entity's getter
432
- } as StorageThreadType;
433
- } catch (error) {
434
- this.logger.error('Failed to get thread by ID', { threadId, error });
435
- throw error;
436
- }
254
+ return this.stores.memory.getThreadById({ threadId });
437
255
  }
438
256
 
439
257
  async getThreadsByResourceId({ resourceId }: { resourceId: string }): Promise<StorageThreadType[]> {
440
- this.logger.debug('Getting threads by resource ID', { resourceId });
441
- try {
442
- const result = await this.service.entities.thread.query.byResource({ entity: 'thread', resourceId }).go();
443
-
444
- if (!result.data.length) {
445
- return [];
446
- }
447
-
448
- // ElectroDB handles the transformation with attribute getters
449
- return result.data.map((data: any) => ({
450
- ...data,
451
- // Convert date strings back to Date objects for consistency
452
- createdAt: typeof data.createdAt === 'string' ? new Date(data.createdAt) : data.createdAt,
453
- updatedAt: typeof data.updatedAt === 'string' ? new Date(data.updatedAt) : data.updatedAt,
454
- // metadata: data.metadata ? JSON.parse(data.metadata) : undefined, // REMOVED by AI
455
- // metadata is already transformed by the entity's getter
456
- })) as StorageThreadType[];
457
- } catch (error) {
458
- this.logger.error('Failed to get threads by resource ID', { resourceId, error });
459
- throw error;
460
- }
258
+ return this.stores.memory.getThreadsByResourceId({ resourceId });
461
259
  }
462
260
 
463
261
  async saveThread({ thread }: { thread: StorageThreadType }): Promise<StorageThreadType> {
464
- this.logger.debug('Saving thread', { threadId: thread.id });
465
-
466
- const now = new Date();
467
-
468
- const threadData = {
469
- entity: 'thread',
470
- id: thread.id,
471
- resourceId: thread.resourceId,
472
- title: thread.title || `Thread ${thread.id}`,
473
- createdAt: thread.createdAt?.toISOString() || now.toISOString(),
474
- updatedAt: now.toISOString(),
475
- metadata: thread.metadata ? JSON.stringify(thread.metadata) : undefined,
476
- };
477
-
478
- try {
479
- await this.service.entities.thread.create(threadData).go();
480
-
481
- return {
482
- id: thread.id,
483
- resourceId: thread.resourceId,
484
- title: threadData.title,
485
- createdAt: thread.createdAt || now,
486
- updatedAt: now,
487
- metadata: thread.metadata,
488
- };
489
- } catch (error) {
490
- this.logger.error('Failed to save thread', { threadId: thread.id, error });
491
- throw error;
492
- }
262
+ return this.stores.memory.saveThread({ thread });
493
263
  }
494
264
 
495
265
  async updateThread({
@@ -501,68 +271,11 @@ export class DynamoDBStore extends MastraStorage {
501
271
  title: string;
502
272
  metadata: Record<string, unknown>;
503
273
  }): Promise<StorageThreadType> {
504
- this.logger.debug('Updating thread', { threadId: id });
505
-
506
- try {
507
- // First, get the existing thread to merge with updates
508
- const existingThread = await this.getThreadById({ threadId: id });
509
-
510
- if (!existingThread) {
511
- throw new Error(`Thread not found: ${id}`);
512
- }
513
-
514
- const now = new Date();
515
-
516
- // Prepare the update
517
- // Define type for only the fields we are actually updating
518
- type ThreadUpdatePayload = {
519
- updatedAt: string; // ISO String for DDB
520
- title?: string;
521
- metadata?: string; // Stringified JSON for DDB
522
- };
523
- const updateData: ThreadUpdatePayload = {
524
- updatedAt: now.toISOString(),
525
- };
526
-
527
- if (title) {
528
- updateData.title = title;
529
- }
530
-
531
- if (metadata) {
532
- updateData.metadata = JSON.stringify(metadata); // Stringify metadata for update
533
- }
534
-
535
- // Update the thread using the primary key
536
- await this.service.entities.thread.update({ entity: 'thread', id }).set(updateData).go();
537
-
538
- // Return the potentially updated thread object
539
- return {
540
- ...existingThread,
541
- title: title || existingThread.title,
542
- metadata: metadata || existingThread.metadata,
543
- updatedAt: now,
544
- };
545
- } catch (error) {
546
- this.logger.error('Failed to update thread', { threadId: id, error });
547
- throw error;
548
- }
274
+ return this.stores.memory.updateThread({ id, title, metadata });
549
275
  }
550
276
 
551
277
  async deleteThread({ threadId }: { threadId: string }): Promise<void> {
552
- this.logger.debug('Deleting thread', { threadId });
553
-
554
- try {
555
- // Delete the thread using the primary key
556
- await this.service.entities.thread.delete({ entity: 'thread', id: threadId }).go();
557
-
558
- // Note: In a production system, you might want to:
559
- // 1. Delete all messages associated with this thread
560
- // 2. Delete any vector embeddings related to this thread
561
- // These would be additional operations
562
- } catch (error) {
563
- this.logger.error('Failed to delete thread', { threadId, error });
564
- throw error;
565
- }
278
+ return this.stores.memory.deleteThread({ threadId });
566
279
  }
567
280
 
568
281
  // Message operations
@@ -574,133 +287,39 @@ export class DynamoDBStore extends MastraStorage {
574
287
  selectBy,
575
288
  format,
576
289
  }: StorageGetMessagesArg & { format?: 'v1' | 'v2' }): Promise<MastraMessageV1[] | MastraMessageV2[]> {
577
- this.logger.debug('Getting messages', { threadId, selectBy });
578
-
579
- try {
580
- // Query messages by thread ID using the GSI
581
- // Provide *all* composite key components for the 'byThread' index ('entity', 'threadId')
582
- const query = this.service.entities.message.query.byThread({ entity: 'message', threadId });
583
-
584
- const limit = this.resolveMessageLimit({ last: selectBy?.last, defaultLimit: Number.MAX_SAFE_INTEGER });
585
- // Apply the 'last' limit if provided
586
- if (limit !== Number.MAX_SAFE_INTEGER) {
587
- // Use ElectroDB's limit parameter
588
- // DDB GSIs are sorted in ascending order
589
- // Use ElectroDB's order parameter to sort in descending order to retrieve 'latest' messages
590
- const results = await query.go({ limit, order: 'desc' });
591
- // Use arrow function in map to preserve 'this' context for parseMessageData
592
- const list = new MessageList({ threadId, resourceId }).add(
593
- results.data.map((data: any) => this.parseMessageData(data)),
594
- 'memory',
595
- );
596
- if (format === `v2`) return list.get.all.v2();
597
- return list.get.all.v1();
598
- }
599
-
600
- // If no limit specified, get all messages (potentially paginated by ElectroDB)
601
- // Consider adding default limit or handling pagination if needed
602
- const results = await query.go();
603
- const list = new MessageList({ threadId, resourceId }).add(
604
- results.data.map((data: any) => this.parseMessageData(data)),
605
- 'memory',
606
- );
607
- if (format === `v2`) return list.get.all.v2();
608
- return list.get.all.v1();
609
- } catch (error) {
610
- this.logger.error('Failed to get messages', { threadId, error });
611
- throw error;
612
- }
290
+ return this.stores.memory.getMessages({ threadId, resourceId, selectBy, format });
613
291
  }
292
+
614
293
  async saveMessages(args: { messages: MastraMessageV1[]; format?: undefined | 'v1' }): Promise<MastraMessageV1[]>;
615
294
  async saveMessages(args: { messages: MastraMessageV2[]; format: 'v2' }): Promise<MastraMessageV2[]>;
616
295
  async saveMessages(
617
296
  args: { messages: MastraMessageV1[]; format?: undefined | 'v1' } | { messages: MastraMessageV2[]; format: 'v2' },
618
297
  ): Promise<MastraMessageV2[] | MastraMessageV1[]> {
619
- const { messages, format = 'v1' } = args;
620
- this.logger.debug('Saving messages', { count: messages.length });
621
-
622
- if (!messages.length) {
623
- return [];
624
- }
625
-
626
- const threadId = messages[0]?.threadId;
627
- if (!threadId) {
628
- throw new Error('Thread ID is required');
629
- }
630
-
631
- // Ensure 'entity' is added and complex fields are handled
632
- const messagesToSave = messages.map(msg => {
633
- const now = new Date().toISOString();
634
- return {
635
- entity: 'message', // Add entity type
636
- id: msg.id,
637
- threadId: msg.threadId,
638
- role: msg.role,
639
- type: msg.type,
640
- resourceId: msg.resourceId,
641
- // Ensure complex fields are stringified if not handled by attribute setters
642
- content: typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content),
643
- toolCallArgs: `toolCallArgs` in msg && msg.toolCallArgs ? JSON.stringify(msg.toolCallArgs) : undefined,
644
- toolCallIds: `toolCallIds` in msg && msg.toolCallIds ? JSON.stringify(msg.toolCallIds) : undefined,
645
- toolNames: `toolNames` in msg && msg.toolNames ? JSON.stringify(msg.toolNames) : undefined,
646
- createdAt: msg.createdAt instanceof Date ? msg.createdAt.toISOString() : msg.createdAt || now,
647
- updatedAt: now, // Add updatedAt
648
- };
649
- });
650
-
651
- try {
652
- // Process messages in batch
653
- const batchSize = 25; // DynamoDB batch limits
654
- const batches = [];
655
-
656
- for (let i = 0; i < messagesToSave.length; i += batchSize) {
657
- const batch = messagesToSave.slice(i, i + batchSize);
658
- batches.push(batch);
659
- }
298
+ return this.stores.memory.saveMessages(args);
299
+ }
660
300
 
661
- // Process each batch and update thread's updatedAt in parallel for better performance
662
- await Promise.all([
663
- // Process message batches
664
- ...batches.map(async batch => {
665
- for (const messageData of batch) {
666
- // Ensure each item has the entity property before sending
667
- if (!messageData.entity) {
668
- this.logger.error('Missing entity property in message data for create', { messageData });
669
- throw new Error('Internal error: Missing entity property during saveMessages');
670
- }
671
- await this.service.entities.message.create(messageData).go();
672
- }
673
- }),
674
- // Update thread's updatedAt timestamp
675
- this.service.entities.thread
676
- .update({ entity: 'thread', id: threadId })
677
- .set({
678
- updatedAt: new Date().toISOString(),
679
- })
680
- .go(),
681
- ]);
301
+ async getThreadsByResourceIdPaginated(args: {
302
+ resourceId: string;
303
+ page: number;
304
+ perPage: number;
305
+ }): Promise<PaginationInfo & { threads: StorageThreadType[] }> {
306
+ return this.stores.memory.getThreadsByResourceIdPaginated(args);
307
+ }
682
308
 
683
- const list = new MessageList().add(messages, 'memory');
684
- if (format === `v1`) return list.get.all.v1();
685
- return list.get.all.v2();
686
- } catch (error) {
687
- this.logger.error('Failed to save messages', { error });
688
- throw error;
689
- }
309
+ async getMessagesPaginated(
310
+ args: StorageGetMessagesArg & { format?: 'v1' | 'v2' },
311
+ ): Promise<PaginationInfo & { messages: MastraMessageV1[] | MastraMessageV2[] }> {
312
+ return this.stores.memory.getMessagesPaginated(args);
690
313
  }
691
314
 
692
- // Helper function to parse message data (handle JSON fields)
693
- private parseMessageData(data: any): MastraMessageV2 | MastraMessageV1 {
694
- // Removed try/catch and JSON.parse logic - now handled by entity 'get' attributes
695
- // This function now primarily ensures correct typing and Date conversion.
696
- return {
697
- ...data,
698
- // Ensure dates are Date objects if needed (ElectroDB might return strings)
699
- createdAt: data.createdAt ? new Date(data.createdAt) : undefined,
700
- updatedAt: data.updatedAt ? new Date(data.updatedAt) : undefined,
701
- // Other fields like content, toolCallArgs etc. are assumed to be correctly
702
- // transformed by the ElectroDB entity getters.
703
- };
315
+ async updateMessages(_args: {
316
+ messages: Partial<Omit<MastraMessageV2, 'createdAt'>> &
317
+ {
318
+ id: string;
319
+ content?: { metadata?: MastraMessageContentV2['metadata']; content?: MastraMessageContentV2['content'] };
320
+ }[];
321
+ }): Promise<MastraMessageV2[]> {
322
+ return this.stores.memory.updateMessages(_args);
704
323
  }
705
324
 
706
325
  // Trace operations
@@ -712,66 +331,15 @@ export class DynamoDBStore extends MastraStorage {
712
331
  attributes?: Record<string, string>;
713
332
  filters?: Record<string, any>;
714
333
  }): Promise<any[]> {
715
- const { name, scope, page, perPage } = args;
716
- this.logger.debug('Getting traces', { name, scope, page, perPage });
717
-
718
- try {
719
- let query;
720
-
721
- // Determine which index to use based on the provided filters
722
- // Provide *all* composite key components for the relevant index
723
- if (name) {
724
- query = this.service.entities.trace.query.byName({ entity: 'trace', name });
725
- } else if (scope) {
726
- query = this.service.entities.trace.query.byScope({ entity: 'trace', scope });
727
- } else {
728
- this.logger.warn('Performing a scan operation on traces - consider using a more specific query');
729
- query = this.service.entities.trace.scan;
730
- }
731
-
732
- let items: any[] = [];
733
- let cursor = null;
734
- let pagesFetched = 0;
735
- const startPage = page > 0 ? page : 1;
736
-
737
- do {
738
- const results: { data: any[]; cursor: string | null } = await query.go({ cursor, limit: perPage });
739
- pagesFetched++;
740
- if (pagesFetched === startPage) {
741
- items = results.data;
742
- break;
743
- }
744
- cursor = results.cursor;
745
- if (!cursor && results.data.length > 0 && pagesFetched < startPage) {
746
- break;
747
- }
748
- } while (cursor && pagesFetched < startPage);
749
-
750
- return items;
751
- } catch (error) {
752
- this.logger.error('Failed to get traces', { error });
753
- throw error;
754
- }
334
+ return this.stores.traces.getTraces(args);
755
335
  }
756
336
 
757
337
  async batchTraceInsert({ records }: { records: Record<string, any>[] }): Promise<void> {
758
- this.logger.debug('Batch inserting traces', { count: records.length });
759
-
760
- if (!records.length) {
761
- return;
762
- }
338
+ return this.stores.traces.batchTraceInsert({ records });
339
+ }
763
340
 
764
- try {
765
- // Add 'entity' type to each record before passing to generic batchInsert
766
- const recordsToSave = records.map(rec => ({ entity: 'trace', ...rec }));
767
- await this.batchInsert({
768
- tableName: TABLE_TRACES,
769
- records: recordsToSave, // Pass records with 'entity' included
770
- });
771
- } catch (error) {
772
- this.logger.error('Failed to batch insert traces', { error });
773
- throw error;
774
- }
341
+ async getTracesPaginated(_args: StorageGetTracesArg): Promise<PaginationInfo & { traces: Trace[] }> {
342
+ return this.stores.traces.getTracesPaginated(_args);
775
343
  }
776
344
 
777
345
  // Workflow operations
@@ -784,27 +352,7 @@ export class DynamoDBStore extends MastraStorage {
784
352
  runId: string;
785
353
  snapshot: WorkflowRunState;
786
354
  }): Promise<void> {
787
- this.logger.debug('Persisting workflow snapshot', { workflowName, runId });
788
-
789
- try {
790
- const resourceId = 'resourceId' in snapshot ? snapshot.resourceId : undefined;
791
- const now = new Date().toISOString();
792
- // Prepare data including the 'entity' type
793
- const data = {
794
- entity: 'workflow_snapshot', // Add entity type
795
- workflow_name: workflowName,
796
- run_id: runId,
797
- snapshot: JSON.stringify(snapshot), // Stringify the snapshot object
798
- createdAt: now,
799
- updatedAt: now,
800
- resourceId,
801
- };
802
- // Use upsert instead of create to handle both create and update cases
803
- await this.service.entities.workflowSnapshot.upsert(data).go();
804
- } catch (error) {
805
- this.logger.error('Failed to persist workflow snapshot', { workflowName, runId, error });
806
- throw error;
807
- }
355
+ return this.stores.workflows.persistWorkflowSnapshot({ workflowName, runId, snapshot });
808
356
  }
809
357
 
810
358
  async loadWorkflowSnapshot({
@@ -814,29 +362,7 @@ export class DynamoDBStore extends MastraStorage {
814
362
  workflowName: string;
815
363
  runId: string;
816
364
  }): Promise<WorkflowRunState | null> {
817
- this.logger.debug('Loading workflow snapshot', { workflowName, runId });
818
-
819
- try {
820
- // Provide *all* composite key components for the primary index ('entity', 'workflow_name', 'run_id')
821
- const result = await this.service.entities.workflowSnapshot
822
- .get({
823
- entity: 'workflow_snapshot', // Add entity type
824
- workflow_name: workflowName,
825
- run_id: runId,
826
- })
827
- .go();
828
-
829
- if (!result.data?.snapshot) {
830
- // Check snapshot exists
831
- return null;
832
- }
833
-
834
- // Parse the snapshot string
835
- return result.data.snapshot as WorkflowRunState;
836
- } catch (error) {
837
- this.logger.error('Failed to load workflow snapshot', { workflowName, runId, error });
838
- throw error;
839
- }
365
+ return this.stores.workflows.loadWorkflowSnapshot({ workflowName, runId });
840
366
  }
841
367
 
842
368
  async getWorkflowRuns(args?: {
@@ -847,281 +373,45 @@ export class DynamoDBStore extends MastraStorage {
847
373
  offset?: number;
848
374
  resourceId?: string;
849
375
  }): Promise<WorkflowRuns> {
850
- this.logger.debug('Getting workflow runs', { args });
851
-
852
- try {
853
- // Default values
854
- const limit = args?.limit || 10;
855
- const offset = args?.offset || 0;
856
-
857
- let query;
858
-
859
- if (args?.workflowName) {
860
- // Query by workflow name using the primary index
861
- // Provide *all* composite key components for the PK ('entity', 'workflow_name')
862
- query = this.service.entities.workflowSnapshot.query.primary({
863
- entity: 'workflow_snapshot', // Add entity type
864
- workflow_name: args.workflowName,
865
- });
866
- } else {
867
- // If no workflow name, we need to scan
868
- // This is not ideal for production with large datasets
869
- this.logger.warn('Performing a scan operation on workflow snapshots - consider using a more specific query');
870
- query = this.service.entities.workflowSnapshot.scan; // Scan still uses the service entity
871
- }
872
-
873
- const allMatchingSnapshots: WorkflowSnapshotDBItem[] = [];
874
- let cursor: string | null = null;
875
- const DYNAMODB_PAGE_SIZE = 100; // Sensible page size for fetching
876
-
877
- do {
878
- const pageResults: { data: WorkflowSnapshotDBItem[]; cursor: string | null } = await query.go({
879
- limit: DYNAMODB_PAGE_SIZE,
880
- cursor,
881
- });
882
-
883
- if (pageResults.data && pageResults.data.length > 0) {
884
- let pageFilteredData: WorkflowSnapshotDBItem[] = pageResults.data;
885
-
886
- // Apply date filters if specified
887
- if (args?.fromDate || args?.toDate) {
888
- pageFilteredData = pageFilteredData.filter((snapshot: WorkflowSnapshotDBItem) => {
889
- const createdAt = new Date(snapshot.createdAt);
890
- if (args.fromDate && createdAt < args.fromDate) {
891
- return false;
892
- }
893
- if (args.toDate && createdAt > args.toDate) {
894
- return false;
895
- }
896
- return true;
897
- });
898
- }
899
-
900
- // Filter by resourceId if specified
901
- if (args?.resourceId) {
902
- pageFilteredData = pageFilteredData.filter((snapshot: WorkflowSnapshotDBItem) => {
903
- return snapshot.resourceId === args.resourceId;
904
- });
905
- }
906
- allMatchingSnapshots.push(...pageFilteredData);
907
- }
908
-
909
- cursor = pageResults.cursor;
910
- } while (cursor);
911
-
912
- if (!allMatchingSnapshots.length) {
913
- return { runs: [], total: 0 };
914
- }
915
-
916
- // Apply offset and limit to the accumulated filtered results
917
- const total = allMatchingSnapshots.length;
918
- const paginatedData = allMatchingSnapshots.slice(offset, offset + limit);
919
-
920
- // Format and return the results
921
- const runs = paginatedData.map((snapshot: WorkflowSnapshotDBItem) => this.formatWorkflowRun(snapshot));
922
-
923
- return {
924
- runs,
925
- total,
926
- };
927
- } catch (error) {
928
- this.logger.error('Failed to get workflow runs', { error });
929
- throw error;
930
- }
376
+ return this.stores.workflows.getWorkflowRuns(args);
931
377
  }
932
378
 
933
379
  async getWorkflowRunById(args: { runId: string; workflowName?: string }): Promise<WorkflowRun | null> {
934
- const { runId, workflowName } = args;
935
- this.logger.debug('Getting workflow run by ID', { runId, workflowName });
936
-
937
- try {
938
- // If we have a workflowName, we can do a direct get using the primary key
939
- if (workflowName) {
940
- this.logger.debug('WorkflowName provided, using direct GET operation.');
941
- const result = await this.service.entities.workflowSnapshot
942
- .get({
943
- entity: 'workflow_snapshot', // Entity type for PK
944
- workflow_name: workflowName,
945
- run_id: runId,
946
- })
947
- .go();
948
-
949
- if (!result.data) {
950
- return null;
951
- }
952
-
953
- const snapshot = result.data.snapshot;
954
- return {
955
- workflowName: result.data.workflow_name,
956
- runId: result.data.run_id,
957
- snapshot,
958
- createdAt: new Date(result.data.createdAt),
959
- updatedAt: new Date(result.data.updatedAt),
960
- resourceId: result.data.resourceId,
961
- };
962
- }
963
-
964
- // Otherwise, if workflowName is not provided, use the GSI on runId.
965
- // This is more efficient than a full table scan.
966
- this.logger.debug(
967
- 'WorkflowName not provided. Attempting to find workflow run by runId using GSI. Ensure GSI (e.g., "byRunId") is defined on the workflowSnapshot entity with run_id as its key and provisioned in DynamoDB.',
968
- );
969
-
970
- // IMPORTANT: This assumes a GSI (e.g., named 'byRunId') exists on the workflowSnapshot entity
971
- // with 'run_id' as its partition key. This GSI must be:
972
- // 1. Defined in your ElectroDB model (e.g., in stores/dynamodb/src/entities/index.ts).
973
- // 2. Provisioned in the actual DynamoDB table (e.g., via CDK/CloudFormation).
974
- // The query key object includes 'entity' as it's good practice with ElectroDB and single-table design,
975
- // aligning with how other GSIs are queried in this file.
976
- const result = await this.service.entities.workflowSnapshot.query
977
- .gsi2({ entity: 'workflow_snapshot', run_id: runId }) // Replace 'byRunId' with your actual GSI name
978
- .go();
979
-
980
- // If the GSI query returns multiple items (e.g., if run_id is not globally unique across all snapshots),
981
- // this will take the first one. The original scan logic also effectively took the first match found.
982
- // If run_id is guaranteed unique, result.data should contain at most one item.
983
- const matchingRunDbItem: WorkflowSnapshotDBItem | null =
984
- result.data && result.data.length > 0 ? result.data[0] : null;
985
-
986
- if (!matchingRunDbItem) {
987
- return null;
988
- }
380
+ return this.stores.workflows.getWorkflowRunById(args);
381
+ }
989
382
 
990
- const snapshot = matchingRunDbItem.snapshot;
991
- return {
992
- workflowName: matchingRunDbItem.workflow_name,
993
- runId: matchingRunDbItem.run_id,
994
- snapshot,
995
- createdAt: new Date(matchingRunDbItem.createdAt),
996
- updatedAt: new Date(matchingRunDbItem.updatedAt),
997
- resourceId: matchingRunDbItem.resourceId,
998
- };
999
- } catch (error) {
1000
- this.logger.error('Failed to get workflow run by ID', { runId, workflowName, error });
1001
- throw error;
1002
- }
383
+ async getResourceById({ resourceId }: { resourceId: string }): Promise<StorageResourceType | null> {
384
+ return this.stores.memory.getResourceById({ resourceId });
1003
385
  }
1004
386
 
1005
- // Helper function to format workflow run
1006
- private formatWorkflowRun(snapshotData: WorkflowSnapshotDBItem): WorkflowRun {
1007
- return {
1008
- workflowName: snapshotData.workflow_name,
1009
- runId: snapshotData.run_id,
1010
- snapshot: snapshotData.snapshot as WorkflowRunState,
1011
- createdAt: new Date(snapshotData.createdAt),
1012
- updatedAt: new Date(snapshotData.updatedAt),
1013
- resourceId: snapshotData.resourceId,
1014
- };
387
+ async saveResource({ resource }: { resource: StorageResourceType }): Promise<StorageResourceType> {
388
+ return this.stores.memory.saveResource({ resource });
1015
389
  }
1016
390
 
1017
- // Helper methods for entity/table mapping
1018
- private getEntityNameForTable(tableName: TABLE_NAMES): string | null {
1019
- const mapping: Record<TABLE_NAMES, string> = {
1020
- [TABLE_THREADS]: 'thread',
1021
- [TABLE_MESSAGES]: 'message',
1022
- [TABLE_WORKFLOW_SNAPSHOT]: 'workflowSnapshot',
1023
- [TABLE_EVALS]: 'eval',
1024
- [TABLE_TRACES]: 'trace',
1025
- [TABLE_RESOURCES]: 'resource',
1026
- };
1027
- return mapping[tableName] || null;
391
+ async updateResource({
392
+ resourceId,
393
+ workingMemory,
394
+ metadata,
395
+ }: {
396
+ resourceId: string;
397
+ workingMemory?: string;
398
+ metadata?: Record<string, any>;
399
+ }): Promise<StorageResourceType> {
400
+ return this.stores.memory.updateResource({ resourceId, workingMemory, metadata });
1028
401
  }
1029
402
 
1030
403
  // Eval operations
1031
404
  async getEvalsByAgentName(agentName: string, type?: 'test' | 'live'): Promise<EvalRow[]> {
1032
- this.logger.debug('Getting evals for agent', { agentName, type });
1033
-
1034
- try {
1035
- // Query evals by agent name using the GSI
1036
- // Provide *all* composite key components for the 'byAgent' index ('entity', 'agent_name')
1037
- const query = this.service.entities.eval.query.byAgent({ entity: 'eval', agent_name: agentName });
1038
-
1039
- // Fetch potentially all items in descending order, using the correct 'order' option
1040
- const results = await query.go({ order: 'desc', limit: 100 }); // Use order: 'desc'
1041
-
1042
- if (!results.data.length) {
1043
- return [];
1044
- }
1045
-
1046
- // Filter by type if specified
1047
- let filteredData = results.data;
1048
- if (type) {
1049
- filteredData = filteredData.filter((evalRecord: Record<string, any>) => {
1050
- try {
1051
- // Need to handle potential parse errors for test_info
1052
- const testInfo =
1053
- evalRecord.test_info && typeof evalRecord.test_info === 'string'
1054
- ? JSON.parse(evalRecord.test_info)
1055
- : undefined;
1056
-
1057
- if (type === 'test' && !testInfo) {
1058
- return false;
1059
- }
1060
- if (type === 'live' && testInfo) {
1061
- return false;
1062
- }
1063
- } catch (e) {
1064
- this.logger.warn('Failed to parse test_info during filtering', { record: evalRecord, error: e });
1065
- // Decide how to handle parse errors - exclude or include? Including for now.
1066
- }
1067
- return true;
1068
- });
1069
- }
1070
-
1071
- // Format the results - ElectroDB transforms most attributes, but we need to map/parse
1072
- return filteredData.map((evalRecord: Record<string, any>) => {
1073
- try {
1074
- return {
1075
- input: evalRecord.input,
1076
- output: evalRecord.output,
1077
- // Safely parse result and test_info
1078
- result:
1079
- evalRecord.result && typeof evalRecord.result === 'string' ? JSON.parse(evalRecord.result) : undefined,
1080
- agentName: evalRecord.agent_name,
1081
- createdAt: evalRecord.created_at, // Keep as string from DDB?
1082
- metricName: evalRecord.metric_name,
1083
- instructions: evalRecord.instructions,
1084
- runId: evalRecord.run_id,
1085
- globalRunId: evalRecord.global_run_id,
1086
- testInfo:
1087
- evalRecord.test_info && typeof evalRecord.test_info === 'string'
1088
- ? JSON.parse(evalRecord.test_info)
1089
- : undefined,
1090
- } as EvalRow;
1091
- } catch (parseError) {
1092
- this.logger.error('Failed to parse eval record', { record: evalRecord, error: parseError });
1093
- // Return a partial record or null/undefined on error?
1094
- // Returning partial for now, might need adjustment based on requirements.
1095
- return {
1096
- agentName: evalRecord.agent_name,
1097
- createdAt: evalRecord.created_at,
1098
- runId: evalRecord.run_id,
1099
- globalRunId: evalRecord.global_run_id,
1100
- } as Partial<EvalRow> as EvalRow; // Cast needed for return type
1101
- }
1102
- });
1103
- } catch (error) {
1104
- this.logger.error('Failed to get evals by agent name', { agentName, type, error });
1105
- throw error;
1106
- }
405
+ return this.stores.legacyEvals.getEvalsByAgentName(agentName, type);
1107
406
  }
1108
407
 
1109
- async getTracesPaginated(_args: StorageGetTracesArg): Promise<PaginationInfo & { traces: Trace[] }> {
1110
- throw new Error('Method not implemented.');
1111
- }
1112
-
1113
- async getThreadsByResourceIdPaginated(_args: {
1114
- resourceId: string;
1115
- page?: number;
1116
- perPage?: number;
1117
- }): Promise<PaginationInfo & { threads: StorageThreadType[] }> {
1118
- throw new Error('Method not implemented.');
1119
- }
1120
-
1121
- async getMessagesPaginated(
1122
- _args: StorageGetMessagesArg,
1123
- ): Promise<PaginationInfo & { messages: MastraMessageV1[] | MastraMessageV2[] }> {
1124
- throw new Error('Method not implemented.');
408
+ async getEvals(
409
+ options: {
410
+ agentName?: string;
411
+ type?: 'test' | 'live';
412
+ } & PaginationArgs,
413
+ ): Promise<PaginationInfo & { evals: EvalRow[] }> {
414
+ return this.stores.legacyEvals.getEvals(options);
1125
415
  }
1126
416
 
1127
417
  /**
@@ -1134,20 +424,60 @@ export class DynamoDBStore extends MastraStorage {
1134
424
  this.client.destroy();
1135
425
  this.logger.debug('DynamoDB client closed successfully for store:', { name: this.name });
1136
426
  } catch (error) {
1137
- this.logger.error('Error closing DynamoDB client for store:', { name: this.name, error });
1138
- // Optionally re-throw or handle as appropriate for your application's error handling strategy
1139
- throw error;
427
+ throw new MastraError(
428
+ {
429
+ id: 'STORAGE_DYNAMODB_STORE_CLOSE_FAILED',
430
+ domain: ErrorDomain.STORAGE,
431
+ category: ErrorCategory.THIRD_PARTY,
432
+ },
433
+ error,
434
+ );
1140
435
  }
1141
436
  }
437
+ /**
438
+ * SCORERS - Not implemented
439
+ */
440
+ async getScoreById({ id: _id }: { id: string }): Promise<ScoreRowData | null> {
441
+ return this.stores.scores.getScoreById({ id: _id });
442
+ }
1142
443
 
1143
- async updateMessages(_args: {
1144
- messages: Partial<Omit<MastraMessageV2, 'createdAt'>> &
1145
- {
1146
- id: string;
1147
- content?: { metadata?: MastraMessageContentV2['metadata']; content?: MastraMessageContentV2['content'] };
1148
- }[];
1149
- }): Promise<MastraMessageV2[]> {
1150
- this.logger.error('updateMessages is not yet implemented in DynamoDBStore');
1151
- throw new Error('Method not implemented');
444
+ async saveScore(_score: ScoreRowData): Promise<{ score: ScoreRowData }> {
445
+ return this.stores.scores.saveScore(_score);
446
+ }
447
+
448
+ async getScoresByRunId({
449
+ runId: _runId,
450
+ pagination: _pagination,
451
+ }: {
452
+ runId: string;
453
+ pagination: StoragePagination;
454
+ }): Promise<{ pagination: PaginationInfo; scores: ScoreRowData[] }> {
455
+ return this.stores.scores.getScoresByRunId({ runId: _runId, pagination: _pagination });
456
+ }
457
+
458
+ async getScoresByEntityId({
459
+ entityId: _entityId,
460
+ entityType: _entityType,
461
+ pagination: _pagination,
462
+ }: {
463
+ pagination: StoragePagination;
464
+ entityId: string;
465
+ entityType: string;
466
+ }): Promise<{ pagination: PaginationInfo; scores: ScoreRowData[] }> {
467
+ return this.stores.scores.getScoresByEntityId({
468
+ entityId: _entityId,
469
+ entityType: _entityType,
470
+ pagination: _pagination,
471
+ });
472
+ }
473
+
474
+ async getScoresByScorerId({
475
+ scorerId: _scorerId,
476
+ pagination: _pagination,
477
+ }: {
478
+ scorerId: string;
479
+ pagination: StoragePagination;
480
+ }): Promise<{ pagination: PaginationInfo; scores: ScoreRowData[] }> {
481
+ return this.stores.scores.getScoresByScorerId({ scorerId: _scorerId, pagination: _pagination });
1152
482
  }
1153
483
  }