@mastra/dynamodb 0.0.0-pass-headers-for-create-mastra-client-20250529200245 → 0.0.0-scorers-api-v2-20250801171841

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/LICENSE.md +11 -42
  2. package/dist/entities/eval.d.ts +102 -0
  3. package/dist/entities/eval.d.ts.map +1 -0
  4. package/dist/entities/index.d.ts +746 -0
  5. package/dist/entities/index.d.ts.map +1 -0
  6. package/dist/entities/message.d.ts +100 -0
  7. package/dist/entities/message.d.ts.map +1 -0
  8. package/dist/entities/resource.d.ts +54 -0
  9. package/dist/entities/resource.d.ts.map +1 -0
  10. package/dist/entities/score.d.ts +229 -0
  11. package/dist/entities/score.d.ts.map +1 -0
  12. package/dist/entities/thread.d.ts +69 -0
  13. package/dist/entities/thread.d.ts.map +1 -0
  14. package/dist/entities/trace.d.ts +127 -0
  15. package/dist/entities/trace.d.ts.map +1 -0
  16. package/dist/entities/utils.d.ts +21 -0
  17. package/dist/entities/utils.d.ts.map +1 -0
  18. package/dist/entities/workflow-snapshot.d.ts +74 -0
  19. package/dist/entities/workflow-snapshot.d.ts.map +1 -0
  20. package/dist/index.cjs +2224 -478
  21. package/dist/index.cjs.map +1 -0
  22. package/dist/index.d.ts +2 -0
  23. package/dist/index.d.ts.map +1 -0
  24. package/dist/index.js +2213 -467
  25. package/dist/index.js.map +1 -0
  26. package/dist/storage/domains/legacy-evals/index.d.ts +19 -0
  27. package/dist/storage/domains/legacy-evals/index.d.ts.map +1 -0
  28. package/dist/storage/domains/memory/index.d.ts +77 -0
  29. package/dist/storage/domains/memory/index.d.ts.map +1 -0
  30. package/dist/storage/domains/operations/index.d.ts +69 -0
  31. package/dist/storage/domains/operations/index.d.ts.map +1 -0
  32. package/dist/storage/domains/score/index.d.ts +42 -0
  33. package/dist/storage/domains/score/index.d.ts.map +1 -0
  34. package/dist/storage/domains/traces/index.d.ts +28 -0
  35. package/dist/storage/domains/traces/index.d.ts.map +1 -0
  36. package/dist/storage/domains/workflows/index.d.ts +32 -0
  37. package/dist/storage/domains/workflows/index.d.ts.map +1 -0
  38. package/dist/storage/index.d.ts +220 -0
  39. package/dist/storage/index.d.ts.map +1 -0
  40. package/package.json +32 -18
  41. package/src/entities/index.ts +5 -1
  42. package/src/entities/resource.ts +57 -0
  43. package/src/entities/score.ts +317 -0
  44. package/src/storage/domains/legacy-evals/index.ts +243 -0
  45. package/src/storage/domains/memory/index.ts +894 -0
  46. package/src/storage/domains/operations/index.ts +433 -0
  47. package/src/storage/domains/score/index.ts +288 -0
  48. package/src/storage/domains/traces/index.ts +286 -0
  49. package/src/storage/domains/workflows/index.ts +297 -0
  50. package/src/storage/index.test.ts +1347 -980
  51. package/src/storage/index.ts +236 -812
@@ -1,19 +1,35 @@
1
1
  import { DynamoDBClient, DescribeTableCommand } from '@aws-sdk/client-dynamodb';
2
2
  import { DynamoDBDocumentClient } from '@aws-sdk/lib-dynamodb';
3
- import type { StorageThreadType, WorkflowRunState, MastraMessageV1 } from '@mastra/core';
4
- import type { MastraMessageV2 } from '@mastra/core/agent';
5
- import { MessageList } from '@mastra/core/agent';
6
- import {
7
- MastraStorage,
8
- TABLE_THREADS,
9
- TABLE_MESSAGES,
10
- TABLE_WORKFLOW_SNAPSHOT,
11
- TABLE_EVALS,
12
- TABLE_TRACES,
3
+ import type { MastraMessageContentV2 } from '@mastra/core/agent';
4
+ import { ErrorCategory, ErrorDomain, MastraError } from '@mastra/core/error';
5
+ import type { StorageThreadType, MastraMessageV2, MastraMessageV1 } from '@mastra/core/memory';
6
+
7
+ import type { ScoreRowData } from '@mastra/core/scores';
8
+ import { MastraStorage } from '@mastra/core/storage';
9
+ import type {
10
+ EvalRow,
11
+ StorageGetMessagesArg,
12
+ WorkflowRun,
13
+ WorkflowRuns,
14
+ TABLE_NAMES,
15
+ StorageGetTracesArg,
16
+ PaginationInfo,
17
+ StorageColumn,
18
+ StoragePagination,
19
+ StorageDomains,
20
+ PaginationArgs,
21
+ StorageResourceType,
13
22
  } from '@mastra/core/storage';
14
- import type { EvalRow, StorageGetMessagesArg, WorkflowRun, WorkflowRuns, TABLE_NAMES } from '@mastra/core/storage';
23
+ import type { Trace } from '@mastra/core/telemetry';
24
+ import type { WorkflowRunState } from '@mastra/core/workflows';
15
25
  import type { Service } from 'electrodb';
16
26
  import { getElectroDbService } from '../entities';
27
+ import { LegacyEvalsDynamoDB } from './domains/legacy-evals';
28
+ import { MemoryStorageDynamoDB } from './domains/memory';
29
+ import { StoreOperationsDynamoDB } from './domains/operations';
30
+ import { ScoresStorageDynamoDB } from './domains/score';
31
+ import { TracesStorageDynamoDB } from './domains/traces';
32
+ import { WorkflowStorageDynamoDB } from './domains/workflows';
17
33
 
18
34
  export interface DynamoDBStoreConfig {
19
35
  region?: string;
@@ -30,79 +46,83 @@ type MastraService = Service<Record<string, any>> & {
30
46
  [key: string]: any;
31
47
  };
32
48
 
33
- // Define the structure for workflow snapshot items retrieved from DynamoDB
34
- interface WorkflowSnapshotDBItem {
35
- entity: string; // Typically 'workflow_snapshot'
36
- workflow_name: string;
37
- run_id: string;
38
- snapshot: WorkflowRunState; // Should be WorkflowRunState after ElectroDB get attribute processing
39
- createdAt: string; // ISO Date string
40
- updatedAt: string; // ISO Date string
41
- resourceId?: string;
42
- }
43
-
44
49
  export class DynamoDBStore extends MastraStorage {
45
50
  private tableName: string;
46
51
  private client: DynamoDBDocumentClient;
47
52
  private service: MastraService;
48
53
  protected hasInitialized: Promise<boolean> | null = null;
54
+ stores: StorageDomains;
49
55
 
50
56
  constructor({ name, config }: { name: string; config: DynamoDBStoreConfig }) {
51
57
  super({ name });
52
58
 
53
59
  // Validate required config
54
- if (!config.tableName || typeof config.tableName !== 'string' || config.tableName.trim() === '') {
55
- throw new Error('DynamoDBStore: config.tableName must be provided and cannot be empty.');
56
- }
57
- // Validate tableName characters (basic check)
58
- if (!/^[a-zA-Z0-9_.-]{3,255}$/.test(config.tableName)) {
59
- throw new Error(
60
- `DynamoDBStore: config.tableName "${config.tableName}" contains invalid characters or is not between 3 and 255 characters long.`,
61
- );
62
- }
60
+ try {
61
+ if (!config.tableName || typeof config.tableName !== 'string' || config.tableName.trim() === '') {
62
+ throw new Error('DynamoDBStore: config.tableName must be provided and cannot be empty.');
63
+ }
64
+ // Validate tableName characters (basic check)
65
+ if (!/^[a-zA-Z0-9_.-]{3,255}$/.test(config.tableName)) {
66
+ throw new Error(
67
+ `DynamoDBStore: config.tableName "${config.tableName}" contains invalid characters or is not between 3 and 255 characters long.`,
68
+ );
69
+ }
63
70
 
64
- const dynamoClient = new DynamoDBClient({
65
- region: config.region || 'us-east-1',
66
- endpoint: config.endpoint,
67
- credentials: config.credentials,
68
- });
71
+ const dynamoClient = new DynamoDBClient({
72
+ region: config.region || 'us-east-1',
73
+ endpoint: config.endpoint,
74
+ credentials: config.credentials,
75
+ });
69
76
 
70
- this.tableName = config.tableName;
71
- this.client = DynamoDBDocumentClient.from(dynamoClient);
72
- this.service = getElectroDbService(this.client, this.tableName) as MastraService;
77
+ this.tableName = config.tableName;
78
+ this.client = DynamoDBDocumentClient.from(dynamoClient);
79
+ this.service = getElectroDbService(this.client, this.tableName) as MastraService;
73
80
 
74
- // We're using a single table design with ElectroDB,
75
- // so we don't need to create multiple tables
76
- }
81
+ const operations = new StoreOperationsDynamoDB({
82
+ service: this.service,
83
+ tableName: this.tableName,
84
+ client: this.client,
85
+ });
77
86
 
78
- /**
79
- * This method is modified for DynamoDB with ElectroDB single-table design.
80
- * It assumes the table is created and managed externally via CDK/CloudFormation.
81
- *
82
- * This implementation only validates that the required table exists and is accessible.
83
- * No table creation is attempted - we simply check if we can access the table.
84
- */
85
- async createTable({ tableName }: { tableName: TABLE_NAMES; schema: Record<string, any> }): Promise<void> {
86
- this.logger.debug('Validating access to externally managed table', { tableName, physicalTable: this.tableName });
87
+ const traces = new TracesStorageDynamoDB({ service: this.service, operations });
87
88
 
88
- // For single-table design, we just need to verify the table exists and is accessible
89
- try {
90
- const tableExists = await this.validateTableExists();
89
+ const workflows = new WorkflowStorageDynamoDB({ service: this.service });
91
90
 
92
- if (!tableExists) {
93
- this.logger.error(
94
- `Table ${this.tableName} does not exist or is not accessible. It should be created via CDK/CloudFormation.`,
95
- );
96
- throw new Error(
97
- `Table ${this.tableName} does not exist or is not accessible. Ensure it's created via CDK/CloudFormation before using this store.`,
98
- );
99
- }
91
+ const memory = new MemoryStorageDynamoDB({ service: this.service });
92
+
93
+ const scores = new ScoresStorageDynamoDB({ service: this.service });
100
94
 
101
- this.logger.debug(`Table ${this.tableName} exists and is accessible`);
95
+ this.stores = {
96
+ operations,
97
+ legacyEvals: new LegacyEvalsDynamoDB({ service: this.service, tableName: this.tableName }),
98
+ traces,
99
+ workflows,
100
+ memory,
101
+ scores,
102
+ };
102
103
  } catch (error) {
103
- this.logger.error('Error validating table access', { tableName: this.tableName, error });
104
- throw error;
104
+ throw new MastraError(
105
+ {
106
+ id: 'STORAGE_DYNAMODB_STORE_CONSTRUCTOR_FAILED',
107
+ domain: ErrorDomain.STORAGE,
108
+ category: ErrorCategory.USER,
109
+ },
110
+ error,
111
+ );
105
112
  }
113
+
114
+ // We're using a single table design with ElectroDB,
115
+ // so we don't need to create multiple tables
116
+ }
117
+
118
+ get supports() {
119
+ return {
120
+ selectByIncludeResourceScope: true,
121
+ resourceWorkingMemory: true,
122
+ hasColumn: false,
123
+ createTable: false,
124
+ deleteMessages: false,
125
+ };
106
126
  }
107
127
 
108
128
  /**
@@ -127,7 +147,15 @@ export class DynamoDBStore extends MastraStorage {
127
147
  }
128
148
 
129
149
  // For other errors (like permissions issues), we should throw
130
- throw error;
150
+ throw new MastraError(
151
+ {
152
+ id: 'STORAGE_DYNAMODB_STORE_VALIDATE_TABLE_EXISTS_FAILED',
153
+ domain: ErrorDomain.STORAGE,
154
+ category: ErrorCategory.THIRD_PARTY,
155
+ details: { tableName: this.tableName },
156
+ },
157
+ error,
158
+ );
131
159
  }
132
160
  }
133
161
 
@@ -154,7 +182,15 @@ export class DynamoDBStore extends MastraStorage {
154
182
  // The error has already been handled by _performInitializationAndStore
155
183
  // (i.e., this.hasInitialized was reset). Re-throwing here ensures
156
184
  // the caller of init() is aware of the failure.
157
- throw error;
185
+ throw new MastraError(
186
+ {
187
+ id: 'STORAGE_DYNAMODB_STORE_INIT_FAILED',
188
+ domain: ErrorDomain.STORAGE,
189
+ category: ErrorCategory.THIRD_PARTY,
190
+ details: { tableName: this.tableName },
191
+ },
192
+ error,
193
+ );
158
194
  }
159
195
  }
160
196
 
@@ -181,265 +217,49 @@ export class DynamoDBStore extends MastraStorage {
181
217
  });
182
218
  }
183
219
 
184
- /**
185
- * Clear all items from a logical "table" (entity type)
186
- */
187
- async clearTable({ tableName }: { tableName: TABLE_NAMES }): Promise<void> {
188
- this.logger.debug('DynamoDB clearTable called', { tableName });
189
-
190
- const entityName = this.getEntityNameForTable(tableName);
191
- if (!entityName || !this.service.entities[entityName]) {
192
- throw new Error(`No entity defined for ${tableName}`);
193
- }
194
-
195
- try {
196
- // Scan requires no key, just uses the entity handler
197
- const result = await this.service.entities[entityName].scan.go({ pages: 'all' }); // Get all pages
198
-
199
- if (!result.data.length) {
200
- this.logger.debug(`No records found to clear for ${tableName}`);
201
- return;
202
- }
203
-
204
- this.logger.debug(`Found ${result.data.length} records to delete for ${tableName}`);
205
-
206
- // ElectroDB batch delete expects the key components for each item
207
- const keysToDelete = result.data.map((item: any) => {
208
- const key: { entity: string; [key: string]: any } = { entity: entityName };
209
-
210
- // Construct the key based on the specific entity's primary key structure
211
- switch (entityName) {
212
- case 'thread':
213
- if (!item.id) throw new Error(`Missing required key 'id' for entity 'thread'`);
214
- key.id = item.id;
215
- break;
216
- case 'message':
217
- if (!item.id) throw new Error(`Missing required key 'id' for entity 'message'`);
218
- key.id = item.id;
219
- break;
220
- case 'workflowSnapshot':
221
- if (!item.workflow_name)
222
- throw new Error(`Missing required key 'workflow_name' for entity 'workflowSnapshot'`);
223
- if (!item.run_id) throw new Error(`Missing required key 'run_id' for entity 'workflowSnapshot'`);
224
- key.workflow_name = item.workflow_name;
225
- key.run_id = item.run_id;
226
- break;
227
- case 'eval':
228
- // Assuming 'eval' uses 'run_id' or another unique identifier as part of its PK
229
- // Adjust based on the actual primary key defined in getElectroDbService
230
- if (!item.run_id) throw new Error(`Missing required key 'run_id' for entity 'eval'`);
231
- // Add other key components if necessary for 'eval' PK
232
- key.run_id = item.run_id;
233
- // Example: if global_run_id is also part of PK:
234
- // if (!item.global_run_id) throw new Error(`Missing required key 'global_run_id' for entity 'eval'`);
235
- // key.global_run_id = item.global_run_id;
236
- break;
237
- case 'trace':
238
- // Assuming 'trace' uses 'id' as its PK
239
- // Adjust based on the actual primary key defined in getElectroDbService
240
- if (!item.id) throw new Error(`Missing required key 'id' for entity 'trace'`);
241
- key.id = item.id;
242
- break;
243
- default:
244
- // Handle unknown entity types - log a warning or throw an error
245
- this.logger.warn(`Unknown entity type encountered during clearTable: ${entityName}`);
246
- // Optionally throw an error if strict handling is required
247
- throw new Error(`Cannot construct delete key for unknown entity type: ${entityName}`);
248
- }
220
+ async createTable({ tableName, schema }: { tableName: TABLE_NAMES; schema: Record<string, any> }): Promise<void> {
221
+ return this.stores.operations.createTable({ tableName, schema });
222
+ }
249
223
 
250
- return key;
251
- });
224
+ async alterTable(_args: {
225
+ tableName: TABLE_NAMES;
226
+ schema: Record<string, StorageColumn>;
227
+ ifNotExists: string[];
228
+ }): Promise<void> {
229
+ return this.stores.operations.alterTable(_args);
230
+ }
252
231
 
253
- const batchSize = 25;
254
- for (let i = 0; i < keysToDelete.length; i += batchSize) {
255
- const batchKeys = keysToDelete.slice(i, i + batchSize);
256
- // Pass the array of key objects to delete
257
- await this.service.entities[entityName].delete(batchKeys).go();
258
- }
232
+ async clearTable({ tableName }: { tableName: TABLE_NAMES }): Promise<void> {
233
+ return this.stores.operations.clearTable({ tableName });
234
+ }
259
235
 
260
- this.logger.debug(`Successfully cleared all records for ${tableName}`);
261
- } catch (error) {
262
- this.logger.error('Failed to clear table', { tableName, error });
263
- throw error;
264
- }
236
+ async dropTable({ tableName }: { tableName: TABLE_NAMES }): Promise<void> {
237
+ return this.stores.operations.dropTable({ tableName });
265
238
  }
266
239
 
267
- /**
268
- * Insert a record into the specified "table" (entity)
269
- */
270
240
  async insert({ tableName, record }: { tableName: TABLE_NAMES; record: Record<string, any> }): Promise<void> {
271
- this.logger.debug('DynamoDB insert called', { tableName });
272
-
273
- const entityName = this.getEntityNameForTable(tableName);
274
- if (!entityName || !this.service.entities[entityName]) {
275
- throw new Error(`No entity defined for ${tableName}`);
276
- }
277
-
278
- try {
279
- // Add the entity type to the record before creating
280
- const dataToSave = { entity: entityName, ...record };
281
- await this.service.entities[entityName].create(dataToSave).go();
282
- } catch (error) {
283
- this.logger.error('Failed to insert record', { tableName, error });
284
- throw error;
285
- }
241
+ return this.stores.operations.insert({ tableName, record });
286
242
  }
287
243
 
288
- /**
289
- * Insert multiple records as a batch
290
- */
291
244
  async batchInsert({ tableName, records }: { tableName: TABLE_NAMES; records: Record<string, any>[] }): Promise<void> {
292
- this.logger.debug('DynamoDB batchInsert called', { tableName, count: records.length });
293
-
294
- const entityName = this.getEntityNameForTable(tableName);
295
- if (!entityName || !this.service.entities[entityName]) {
296
- throw new Error(`No entity defined for ${tableName}`);
297
- }
298
-
299
- // Add entity type to each record
300
- const recordsToSave = records.map(rec => ({ entity: entityName, ...rec }));
301
-
302
- // ElectroDB has batch limits of 25 items, so we need to chunk
303
- const batchSize = 25;
304
- const batches = [];
305
- for (let i = 0; i < recordsToSave.length; i += batchSize) {
306
- const batch = recordsToSave.slice(i, i + batchSize);
307
- batches.push(batch);
308
- }
309
-
310
- try {
311
- // Process each batch
312
- for (const batch of batches) {
313
- // Create each item individually within the batch
314
- for (const recordData of batch) {
315
- if (!recordData.entity) {
316
- this.logger.error('Missing entity property in record data for batchInsert', { recordData, tableName });
317
- throw new Error(`Internal error: Missing entity property during batchInsert for ${tableName}`);
318
- }
319
- // Log the object just before the create call
320
- this.logger.debug('Attempting to create record in batchInsert:', { entityName, recordData });
321
- await this.service.entities[entityName].create(recordData).go();
322
- }
323
- // Original batch call: await this.service.entities[entityName].create(batch).go();
324
- }
325
- } catch (error) {
326
- this.logger.error('Failed to batch insert records', { tableName, error });
327
- throw error;
328
- }
245
+ return this.stores.operations.batchInsert({ tableName, records });
329
246
  }
330
247
 
331
- /**
332
- * Load a record by its keys
333
- */
334
248
  async load<R>({ tableName, keys }: { tableName: TABLE_NAMES; keys: Record<string, string> }): Promise<R | null> {
335
- this.logger.debug('DynamoDB load called', { tableName, keys });
336
-
337
- const entityName = this.getEntityNameForTable(tableName);
338
- if (!entityName || !this.service.entities[entityName]) {
339
- throw new Error(`No entity defined for ${tableName}`);
340
- }
341
-
342
- try {
343
- // Add the entity type to the key object for the .get call
344
- const keyObject = { entity: entityName, ...keys };
345
- const result = await this.service.entities[entityName].get(keyObject).go();
346
-
347
- if (!result.data) {
348
- return null;
349
- }
350
-
351
- // Add parsing logic if necessary (e.g., for metadata)
352
- let data = result.data;
353
- if (data.metadata && typeof data.metadata === 'string') {
354
- try {
355
- // data.metadata = JSON.parse(data.metadata); // REMOVED by AI
356
- } catch {
357
- /* ignore parse error */
358
- }
359
- }
360
- // Add similar parsing for other JSON fields if needed based on entity type
361
-
362
- return data as R;
363
- } catch (error) {
364
- this.logger.error('Failed to load record', { tableName, keys, error });
365
- throw error;
366
- }
249
+ return this.stores.operations.load({ tableName, keys });
367
250
  }
368
251
 
369
252
  // Thread operations
370
253
  async getThreadById({ threadId }: { threadId: string }): Promise<StorageThreadType | null> {
371
- this.logger.debug('Getting thread by ID', { threadId });
372
- try {
373
- const result = await this.service.entities.thread.get({ entity: 'thread', id: threadId }).go();
374
-
375
- if (!result.data) {
376
- return null;
377
- }
378
-
379
- // ElectroDB handles the transformation with attribute getters
380
- const data = result.data;
381
- return {
382
- ...data,
383
- // metadata: data.metadata ? JSON.parse(data.metadata) : undefined, // REMOVED by AI
384
- // metadata is already transformed by the entity's getter
385
- } as StorageThreadType;
386
- } catch (error) {
387
- this.logger.error('Failed to get thread by ID', { threadId, error });
388
- throw error;
389
- }
254
+ return this.stores.memory.getThreadById({ threadId });
390
255
  }
391
256
 
392
257
  async getThreadsByResourceId({ resourceId }: { resourceId: string }): Promise<StorageThreadType[]> {
393
- this.logger.debug('Getting threads by resource ID', { resourceId });
394
- try {
395
- const result = await this.service.entities.thread.query.byResource({ entity: 'thread', resourceId }).go();
396
-
397
- if (!result.data.length) {
398
- return [];
399
- }
400
-
401
- // ElectroDB handles the transformation with attribute getters
402
- return result.data.map((data: any) => ({
403
- ...data,
404
- // metadata: data.metadata ? JSON.parse(data.metadata) : undefined, // REMOVED by AI
405
- // metadata is already transformed by the entity's getter
406
- })) as StorageThreadType[];
407
- } catch (error) {
408
- this.logger.error('Failed to get threads by resource ID', { resourceId, error });
409
- throw error;
410
- }
258
+ return this.stores.memory.getThreadsByResourceId({ resourceId });
411
259
  }
412
260
 
413
261
  async saveThread({ thread }: { thread: StorageThreadType }): Promise<StorageThreadType> {
414
- this.logger.debug('Saving thread', { threadId: thread.id });
415
-
416
- const now = new Date();
417
-
418
- const threadData = {
419
- entity: 'thread',
420
- id: thread.id,
421
- resourceId: thread.resourceId,
422
- title: thread.title || `Thread ${thread.id}`,
423
- createdAt: thread.createdAt?.toISOString() || now.toISOString(),
424
- updatedAt: now.toISOString(),
425
- metadata: thread.metadata ? JSON.stringify(thread.metadata) : undefined,
426
- };
427
-
428
- try {
429
- await this.service.entities.thread.create(threadData).go();
430
-
431
- return {
432
- id: thread.id,
433
- resourceId: thread.resourceId,
434
- title: threadData.title,
435
- createdAt: thread.createdAt || now,
436
- updatedAt: now,
437
- metadata: thread.metadata,
438
- };
439
- } catch (error) {
440
- this.logger.error('Failed to save thread', { threadId: thread.id, error });
441
- throw error;
442
- }
262
+ return this.stores.memory.saveThread({ thread });
443
263
  }
444
264
 
445
265
  async updateThread({
@@ -451,68 +271,11 @@ export class DynamoDBStore extends MastraStorage {
451
271
  title: string;
452
272
  metadata: Record<string, unknown>;
453
273
  }): Promise<StorageThreadType> {
454
- this.logger.debug('Updating thread', { threadId: id });
455
-
456
- try {
457
- // First, get the existing thread to merge with updates
458
- const existingThread = await this.getThreadById({ threadId: id });
459
-
460
- if (!existingThread) {
461
- throw new Error(`Thread not found: ${id}`);
462
- }
463
-
464
- const now = new Date();
465
-
466
- // Prepare the update
467
- // Define type for only the fields we are actually updating
468
- type ThreadUpdatePayload = {
469
- updatedAt: string; // ISO String for DDB
470
- title?: string;
471
- metadata?: string; // Stringified JSON for DDB
472
- };
473
- const updateData: ThreadUpdatePayload = {
474
- updatedAt: now.toISOString(),
475
- };
476
-
477
- if (title) {
478
- updateData.title = title;
479
- }
480
-
481
- if (metadata) {
482
- updateData.metadata = JSON.stringify(metadata); // Stringify metadata for update
483
- }
484
-
485
- // Update the thread using the primary key
486
- await this.service.entities.thread.update({ entity: 'thread', id }).set(updateData).go();
487
-
488
- // Return the potentially updated thread object
489
- return {
490
- ...existingThread,
491
- title: title || existingThread.title,
492
- metadata: metadata || existingThread.metadata,
493
- updatedAt: now,
494
- };
495
- } catch (error) {
496
- this.logger.error('Failed to update thread', { threadId: id, error });
497
- throw error;
498
- }
274
+ return this.stores.memory.updateThread({ id, title, metadata });
499
275
  }
500
276
 
501
277
  async deleteThread({ threadId }: { threadId: string }): Promise<void> {
502
- this.logger.debug('Deleting thread', { threadId });
503
-
504
- try {
505
- // Delete the thread using the primary key
506
- await this.service.entities.thread.delete({ entity: 'thread', id: threadId }).go();
507
-
508
- // Note: In a production system, you might want to:
509
- // 1. Delete all messages associated with this thread
510
- // 2. Delete any vector embeddings related to this thread
511
- // These would be additional operations
512
- } catch (error) {
513
- this.logger.error('Failed to delete thread', { threadId, error });
514
- throw error;
515
- }
278
+ return this.stores.memory.deleteThread({ threadId });
516
279
  }
517
280
 
518
281
  // Message operations
@@ -524,119 +287,39 @@ export class DynamoDBStore extends MastraStorage {
524
287
  selectBy,
525
288
  format,
526
289
  }: StorageGetMessagesArg & { format?: 'v1' | 'v2' }): Promise<MastraMessageV1[] | MastraMessageV2[]> {
527
- this.logger.debug('Getting messages', { threadId, selectBy });
528
-
529
- try {
530
- // Query messages by thread ID using the GSI
531
- // Provide *all* composite key components for the 'byThread' index ('entity', 'threadId')
532
- const query = this.service.entities.message.query.byThread({ entity: 'message', threadId });
533
-
534
- // Apply the 'last' limit if provided
535
- if (selectBy?.last && typeof selectBy.last === 'number') {
536
- // Use ElectroDB's limit parameter (descending sort assumed on GSI SK)
537
- // Ensure GSI sk (createdAt) is sorted descending for 'last' to work correctly
538
- // Assuming default sort is ascending on SK, use reverse: true for descending
539
- const results = await query.go({ limit: selectBy.last, reverse: true });
540
- // Use arrow function in map to preserve 'this' context for parseMessageData
541
- const list = new MessageList({ threadId, resourceId }).add(
542
- results.data.map((data: any) => this.parseMessageData(data)),
543
- 'memory',
544
- );
545
- if (format === `v2`) return list.get.all.mastra();
546
- return list.get.all.v1();
547
- }
548
-
549
- // If no limit specified, get all messages (potentially paginated by ElectroDB)
550
- // Consider adding default limit or handling pagination if needed
551
- const results = await query.go();
552
- const list = new MessageList({ threadId, resourceId }).add(
553
- results.data.map((data: any) => this.parseMessageData(data)),
554
- 'memory',
555
- );
556
- if (format === `v2`) return list.get.all.mastra();
557
- return list.get.all.v1();
558
- } catch (error) {
559
- this.logger.error('Failed to get messages', { threadId, error });
560
- throw error;
561
- }
290
+ return this.stores.memory.getMessages({ threadId, resourceId, selectBy, format });
562
291
  }
292
+
563
293
  async saveMessages(args: { messages: MastraMessageV1[]; format?: undefined | 'v1' }): Promise<MastraMessageV1[]>;
564
294
  async saveMessages(args: { messages: MastraMessageV2[]; format: 'v2' }): Promise<MastraMessageV2[]>;
565
295
  async saveMessages(
566
296
  args: { messages: MastraMessageV1[]; format?: undefined | 'v1' } | { messages: MastraMessageV2[]; format: 'v2' },
567
297
  ): Promise<MastraMessageV2[] | MastraMessageV1[]> {
568
- const { messages, format = 'v1' } = args;
569
- this.logger.debug('Saving messages', { count: messages.length });
570
-
571
- if (!messages.length) {
572
- return [];
573
- }
574
-
575
- // Ensure 'entity' is added and complex fields are handled
576
- const messagesToSave = messages.map(msg => {
577
- const now = new Date().toISOString();
578
- return {
579
- entity: 'message', // Add entity type
580
- id: msg.id,
581
- threadId: msg.threadId,
582
- role: msg.role,
583
- type: msg.type,
584
- resourceId: msg.resourceId,
585
- // Ensure complex fields are stringified if not handled by attribute setters
586
- content: typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content),
587
- toolCallArgs: `toolCallArgs` in msg && msg.toolCallArgs ? JSON.stringify(msg.toolCallArgs) : undefined,
588
- toolCallIds: `toolCallIds` in msg && msg.toolCallIds ? JSON.stringify(msg.toolCallIds) : undefined,
589
- toolNames: `toolNames` in msg && msg.toolNames ? JSON.stringify(msg.toolNames) : undefined,
590
- createdAt: msg.createdAt instanceof Date ? msg.createdAt.toISOString() : msg.createdAt || now,
591
- updatedAt: now, // Add updatedAt
592
- };
593
- });
594
-
595
- try {
596
- // Process messages in batch
597
- const batchSize = 25; // DynamoDB batch limits
598
- const batches = [];
599
-
600
- for (let i = 0; i < messagesToSave.length; i += batchSize) {
601
- const batch = messagesToSave.slice(i, i + batchSize);
602
- batches.push(batch);
603
- }
298
+ return this.stores.memory.saveMessages(args);
299
+ }
604
300
 
605
- // Process each batch
606
- for (const batch of batches) {
607
- // Try creating each item individually instead of passing the whole batch
608
- for (const messageData of batch) {
609
- // Ensure each item has the entity property before sending
610
- if (!messageData.entity) {
611
- this.logger.error('Missing entity property in message data for create', { messageData });
612
- throw new Error('Internal error: Missing entity property during saveMessages');
613
- }
614
- await this.service.entities.message.create(messageData).go();
615
- }
616
- // Original batch call: await this.service.entities.message.create(batch).go();
617
- }
301
+ async getThreadsByResourceIdPaginated(args: {
302
+ resourceId: string;
303
+ page: number;
304
+ perPage: number;
305
+ }): Promise<PaginationInfo & { threads: StorageThreadType[] }> {
306
+ return this.stores.memory.getThreadsByResourceIdPaginated(args);
307
+ }
618
308
 
619
- const list = new MessageList().add(messages, 'memory');
620
- if (format === `v1`) return list.get.all.v1();
621
- return list.get.all.mastra();
622
- } catch (error) {
623
- this.logger.error('Failed to save messages', { error });
624
- throw error;
625
- }
309
+ async getMessagesPaginated(
310
+ args: StorageGetMessagesArg & { format?: 'v1' | 'v2' },
311
+ ): Promise<PaginationInfo & { messages: MastraMessageV1[] | MastraMessageV2[] }> {
312
+ return this.stores.memory.getMessagesPaginated(args);
626
313
  }
627
314
 
628
- // Helper function to parse message data (handle JSON fields)
629
- private parseMessageData(data: any): MastraMessageV2 | MastraMessageV1 {
630
- // Removed try/catch and JSON.parse logic - now handled by entity 'get' attributes
631
- // This function now primarily ensures correct typing and Date conversion.
632
- return {
633
- ...data,
634
- // Ensure dates are Date objects if needed (ElectroDB might return strings)
635
- createdAt: data.createdAt ? new Date(data.createdAt) : undefined,
636
- updatedAt: data.updatedAt ? new Date(data.updatedAt) : undefined,
637
- // Other fields like content, toolCallArgs etc. are assumed to be correctly
638
- // transformed by the ElectroDB entity getters.
639
- };
315
+ async updateMessages(_args: {
316
+ messages: Partial<Omit<MastraMessageV2, 'createdAt'>> &
317
+ {
318
+ id: string;
319
+ content?: { metadata?: MastraMessageContentV2['metadata']; content?: MastraMessageContentV2['content'] };
320
+ }[];
321
+ }): Promise<MastraMessageV2[]> {
322
+ return this.stores.memory.updateMessages(_args);
640
323
  }
641
324
 
642
325
  // Trace operations
@@ -648,66 +331,15 @@ export class DynamoDBStore extends MastraStorage {
648
331
  attributes?: Record<string, string>;
649
332
  filters?: Record<string, any>;
650
333
  }): Promise<any[]> {
651
- const { name, scope, page, perPage } = args;
652
- this.logger.debug('Getting traces', { name, scope, page, perPage });
653
-
654
- try {
655
- let query;
656
-
657
- // Determine which index to use based on the provided filters
658
- // Provide *all* composite key components for the relevant index
659
- if (name) {
660
- query = this.service.entities.trace.query.byName({ entity: 'trace', name });
661
- } else if (scope) {
662
- query = this.service.entities.trace.query.byScope({ entity: 'trace', scope });
663
- } else {
664
- this.logger.warn('Performing a scan operation on traces - consider using a more specific query');
665
- query = this.service.entities.trace.scan;
666
- }
667
-
668
- let items: any[] = [];
669
- let cursor = null;
670
- let pagesFetched = 0;
671
- const startPage = page > 0 ? page : 1;
672
-
673
- do {
674
- const results: { data: any[]; cursor: string | null } = await query.go({ cursor, limit: perPage });
675
- pagesFetched++;
676
- if (pagesFetched === startPage) {
677
- items = results.data;
678
- break;
679
- }
680
- cursor = results.cursor;
681
- if (!cursor && results.data.length > 0 && pagesFetched < startPage) {
682
- break;
683
- }
684
- } while (cursor && pagesFetched < startPage);
685
-
686
- return items;
687
- } catch (error) {
688
- this.logger.error('Failed to get traces', { error });
689
- throw error;
690
- }
334
+ return this.stores.traces.getTraces(args);
691
335
  }
692
336
 
693
337
  async batchTraceInsert({ records }: { records: Record<string, any>[] }): Promise<void> {
694
- this.logger.debug('Batch inserting traces', { count: records.length });
695
-
696
- if (!records.length) {
697
- return;
698
- }
338
+ return this.stores.traces.batchTraceInsert({ records });
339
+ }
699
340
 
700
- try {
701
- // Add 'entity' type to each record before passing to generic batchInsert
702
- const recordsToSave = records.map(rec => ({ entity: 'trace', ...rec }));
703
- await this.batchInsert({
704
- tableName: TABLE_TRACES,
705
- records: recordsToSave, // Pass records with 'entity' included
706
- });
707
- } catch (error) {
708
- this.logger.error('Failed to batch insert traces', { error });
709
- throw error;
710
- }
341
+ async getTracesPaginated(_args: StorageGetTracesArg): Promise<PaginationInfo & { traces: Trace[] }> {
342
+ return this.stores.traces.getTracesPaginated(_args);
711
343
  }
712
344
 
713
345
  // Workflow operations
@@ -720,27 +352,7 @@ export class DynamoDBStore extends MastraStorage {
720
352
  runId: string;
721
353
  snapshot: WorkflowRunState;
722
354
  }): Promise<void> {
723
- this.logger.debug('Persisting workflow snapshot', { workflowName, runId });
724
-
725
- try {
726
- const resourceId = 'resourceId' in snapshot ? snapshot.resourceId : undefined;
727
- const now = new Date().toISOString();
728
- // Prepare data including the 'entity' type
729
- const data = {
730
- entity: 'workflow_snapshot', // Add entity type
731
- workflow_name: workflowName,
732
- run_id: runId,
733
- snapshot: JSON.stringify(snapshot), // Stringify the snapshot object
734
- createdAt: now,
735
- updatedAt: now,
736
- resourceId,
737
- };
738
- // Pass the data including 'entity'
739
- await this.service.entities.workflowSnapshot.create(data).go();
740
- } catch (error) {
741
- this.logger.error('Failed to persist workflow snapshot', { workflowName, runId, error });
742
- throw error;
743
- }
355
+ return this.stores.workflows.persistWorkflowSnapshot({ workflowName, runId, snapshot });
744
356
  }
745
357
 
746
358
  async loadWorkflowSnapshot({
@@ -750,29 +362,7 @@ export class DynamoDBStore extends MastraStorage {
750
362
  workflowName: string;
751
363
  runId: string;
752
364
  }): Promise<WorkflowRunState | null> {
753
- this.logger.debug('Loading workflow snapshot', { workflowName, runId });
754
-
755
- try {
756
- // Provide *all* composite key components for the primary index ('entity', 'workflow_name', 'run_id')
757
- const result = await this.service.entities.workflowSnapshot
758
- .get({
759
- entity: 'workflow_snapshot', // Add entity type
760
- workflow_name: workflowName,
761
- run_id: runId,
762
- })
763
- .go();
764
-
765
- if (!result.data?.snapshot) {
766
- // Check snapshot exists
767
- return null;
768
- }
769
-
770
- // Parse the snapshot string
771
- return result.data.snapshot as WorkflowRunState;
772
- } catch (error) {
773
- this.logger.error('Failed to load workflow snapshot', { workflowName, runId, error });
774
- throw error;
775
- }
365
+ return this.stores.workflows.loadWorkflowSnapshot({ workflowName, runId });
776
366
  }
777
367
 
778
368
  async getWorkflowRuns(args?: {
@@ -783,262 +373,45 @@ export class DynamoDBStore extends MastraStorage {
783
373
  offset?: number;
784
374
  resourceId?: string;
785
375
  }): Promise<WorkflowRuns> {
786
- this.logger.debug('Getting workflow runs', { args });
787
-
788
- try {
789
- // Default values
790
- const limit = args?.limit || 10;
791
- const offset = args?.offset || 0;
792
-
793
- let query;
794
-
795
- if (args?.workflowName) {
796
- // Query by workflow name using the primary index
797
- // Provide *all* composite key components for the PK ('entity', 'workflow_name')
798
- query = this.service.entities.workflowSnapshot.query.primary({
799
- entity: 'workflow_snapshot', // Add entity type
800
- workflow_name: args.workflowName,
801
- });
802
- } else {
803
- // If no workflow name, we need to scan
804
- // This is not ideal for production with large datasets
805
- this.logger.warn('Performing a scan operation on workflow snapshots - consider using a more specific query');
806
- query = this.service.entities.workflowSnapshot.scan; // Scan still uses the service entity
807
- }
808
-
809
- const allMatchingSnapshots: WorkflowSnapshotDBItem[] = [];
810
- let cursor: string | null = null;
811
- const DYNAMODB_PAGE_SIZE = 100; // Sensible page size for fetching
812
-
813
- do {
814
- const pageResults: { data: WorkflowSnapshotDBItem[]; cursor: string | null } = await query.go({
815
- limit: DYNAMODB_PAGE_SIZE,
816
- cursor,
817
- });
818
-
819
- if (pageResults.data && pageResults.data.length > 0) {
820
- let pageFilteredData: WorkflowSnapshotDBItem[] = pageResults.data;
821
-
822
- // Apply date filters if specified
823
- if (args?.fromDate || args?.toDate) {
824
- pageFilteredData = pageFilteredData.filter((snapshot: WorkflowSnapshotDBItem) => {
825
- const createdAt = new Date(snapshot.createdAt);
826
- if (args.fromDate && createdAt < args.fromDate) {
827
- return false;
828
- }
829
- if (args.toDate && createdAt > args.toDate) {
830
- return false;
831
- }
832
- return true;
833
- });
834
- }
835
-
836
- // Filter by resourceId if specified
837
- if (args?.resourceId) {
838
- pageFilteredData = pageFilteredData.filter((snapshot: WorkflowSnapshotDBItem) => {
839
- return snapshot.resourceId === args.resourceId;
840
- });
841
- }
842
- allMatchingSnapshots.push(...pageFilteredData);
843
- }
844
-
845
- cursor = pageResults.cursor;
846
- } while (cursor);
847
-
848
- if (!allMatchingSnapshots.length) {
849
- return { runs: [], total: 0 };
850
- }
851
-
852
- // Apply offset and limit to the accumulated filtered results
853
- const total = allMatchingSnapshots.length;
854
- const paginatedData = allMatchingSnapshots.slice(offset, offset + limit);
855
-
856
- // Format and return the results
857
- const runs = paginatedData.map((snapshot: WorkflowSnapshotDBItem) => this.formatWorkflowRun(snapshot));
858
-
859
- return {
860
- runs,
861
- total,
862
- };
863
- } catch (error) {
864
- this.logger.error('Failed to get workflow runs', { error });
865
- throw error;
866
- }
376
+ return this.stores.workflows.getWorkflowRuns(args);
867
377
  }
868
378
 
869
379
  async getWorkflowRunById(args: { runId: string; workflowName?: string }): Promise<WorkflowRun | null> {
870
- const { runId, workflowName } = args;
871
- this.logger.debug('Getting workflow run by ID', { runId, workflowName });
872
-
873
- try {
874
- // If we have a workflowName, we can do a direct get using the primary key
875
- if (workflowName) {
876
- this.logger.debug('WorkflowName provided, using direct GET operation.');
877
- const result = await this.service.entities.workflowSnapshot
878
- .get({
879
- entity: 'workflow_snapshot', // Entity type for PK
880
- workflow_name: workflowName,
881
- run_id: runId,
882
- })
883
- .go();
884
-
885
- if (!result.data) {
886
- return null;
887
- }
888
-
889
- const snapshot = result.data.snapshot;
890
- return {
891
- workflowName: result.data.workflow_name,
892
- runId: result.data.run_id,
893
- snapshot,
894
- createdAt: new Date(result.data.createdAt),
895
- updatedAt: new Date(result.data.updatedAt),
896
- resourceId: result.data.resourceId,
897
- };
898
- }
899
-
900
- // Otherwise, if workflowName is not provided, use the GSI on runId.
901
- // This is more efficient than a full table scan.
902
- this.logger.debug(
903
- 'WorkflowName not provided. Attempting to find workflow run by runId using GSI. Ensure GSI (e.g., "byRunId") is defined on the workflowSnapshot entity with run_id as its key and provisioned in DynamoDB.',
904
- );
905
-
906
- // IMPORTANT: This assumes a GSI (e.g., named 'byRunId') exists on the workflowSnapshot entity
907
- // with 'run_id' as its partition key. This GSI must be:
908
- // 1. Defined in your ElectroDB model (e.g., in stores/dynamodb/src/entities/index.ts).
909
- // 2. Provisioned in the actual DynamoDB table (e.g., via CDK/CloudFormation).
910
- // The query key object includes 'entity' as it's good practice with ElectroDB and single-table design,
911
- // aligning with how other GSIs are queried in this file.
912
- const result = await this.service.entities.workflowSnapshot.query
913
- .gsi2({ entity: 'workflow_snapshot', run_id: runId }) // Replace 'byRunId' with your actual GSI name
914
- .go();
915
-
916
- // If the GSI query returns multiple items (e.g., if run_id is not globally unique across all snapshots),
917
- // this will take the first one. The original scan logic also effectively took the first match found.
918
- // If run_id is guaranteed unique, result.data should contain at most one item.
919
- const matchingRunDbItem: WorkflowSnapshotDBItem | null =
920
- result.data && result.data.length > 0 ? result.data[0] : null;
921
-
922
- if (!matchingRunDbItem) {
923
- return null;
924
- }
380
+ return this.stores.workflows.getWorkflowRunById(args);
381
+ }
925
382
 
926
- const snapshot = matchingRunDbItem.snapshot;
927
- return {
928
- workflowName: matchingRunDbItem.workflow_name,
929
- runId: matchingRunDbItem.run_id,
930
- snapshot,
931
- createdAt: new Date(matchingRunDbItem.createdAt),
932
- updatedAt: new Date(matchingRunDbItem.updatedAt),
933
- resourceId: matchingRunDbItem.resourceId,
934
- };
935
- } catch (error) {
936
- this.logger.error('Failed to get workflow run by ID', { runId, workflowName, error });
937
- throw error;
938
- }
383
+ async getResourceById({ resourceId }: { resourceId: string }): Promise<StorageResourceType | null> {
384
+ return this.stores.memory.getResourceById({ resourceId });
939
385
  }
940
386
 
941
- // Helper function to format workflow run
942
- private formatWorkflowRun(snapshotData: WorkflowSnapshotDBItem): WorkflowRun {
943
- return {
944
- workflowName: snapshotData.workflow_name,
945
- runId: snapshotData.run_id,
946
- snapshot: snapshotData.snapshot as WorkflowRunState,
947
- createdAt: new Date(snapshotData.createdAt),
948
- updatedAt: new Date(snapshotData.updatedAt),
949
- resourceId: snapshotData.resourceId,
950
- };
387
+ async saveResource({ resource }: { resource: StorageResourceType }): Promise<StorageResourceType> {
388
+ return this.stores.memory.saveResource({ resource });
951
389
  }
952
390
 
953
- // Helper methods for entity/table mapping
954
- private getEntityNameForTable(tableName: TABLE_NAMES): string | null {
955
- const mapping: Record<TABLE_NAMES, string> = {
956
- [TABLE_THREADS]: 'thread',
957
- [TABLE_MESSAGES]: 'message',
958
- [TABLE_WORKFLOW_SNAPSHOT]: 'workflowSnapshot',
959
- [TABLE_EVALS]: 'eval',
960
- [TABLE_TRACES]: 'trace',
961
- };
962
- return mapping[tableName] || null;
391
+ async updateResource({
392
+ resourceId,
393
+ workingMemory,
394
+ metadata,
395
+ }: {
396
+ resourceId: string;
397
+ workingMemory?: string;
398
+ metadata?: Record<string, any>;
399
+ }): Promise<StorageResourceType> {
400
+ return this.stores.memory.updateResource({ resourceId, workingMemory, metadata });
963
401
  }
964
402
 
965
403
  // Eval operations
966
404
  async getEvalsByAgentName(agentName: string, type?: 'test' | 'live'): Promise<EvalRow[]> {
967
- this.logger.debug('Getting evals for agent', { agentName, type });
968
-
969
- try {
970
- // Query evals by agent name using the GSI
971
- // Provide *all* composite key components for the 'byAgent' index ('entity', 'agent_name')
972
- const query = this.service.entities.eval.query.byAgent({ entity: 'eval', agent_name: agentName });
973
-
974
- // Fetch potentially all items in descending order, using the correct 'order' option
975
- const results = await query.go({ order: 'desc', limit: 100 }); // Use order: 'desc'
976
-
977
- if (!results.data.length) {
978
- return [];
979
- }
980
-
981
- // Filter by type if specified
982
- let filteredData = results.data;
983
- if (type) {
984
- filteredData = filteredData.filter((evalRecord: Record<string, any>) => {
985
- try {
986
- // Need to handle potential parse errors for test_info
987
- const testInfo =
988
- evalRecord.test_info && typeof evalRecord.test_info === 'string'
989
- ? JSON.parse(evalRecord.test_info)
990
- : undefined;
991
-
992
- if (type === 'test' && !testInfo) {
993
- return false;
994
- }
995
- if (type === 'live' && testInfo) {
996
- return false;
997
- }
998
- } catch (e) {
999
- this.logger.warn('Failed to parse test_info during filtering', { record: evalRecord, error: e });
1000
- // Decide how to handle parse errors - exclude or include? Including for now.
1001
- }
1002
- return true;
1003
- });
1004
- }
405
+ return this.stores.legacyEvals.getEvalsByAgentName(agentName, type);
406
+ }
1005
407
 
1006
- // Format the results - ElectroDB transforms most attributes, but we need to map/parse
1007
- return filteredData.map((evalRecord: Record<string, any>) => {
1008
- try {
1009
- return {
1010
- input: evalRecord.input,
1011
- output: evalRecord.output,
1012
- // Safely parse result and test_info
1013
- result:
1014
- evalRecord.result && typeof evalRecord.result === 'string' ? JSON.parse(evalRecord.result) : undefined,
1015
- agentName: evalRecord.agent_name,
1016
- createdAt: evalRecord.created_at, // Keep as string from DDB?
1017
- metricName: evalRecord.metric_name,
1018
- instructions: evalRecord.instructions,
1019
- runId: evalRecord.run_id,
1020
- globalRunId: evalRecord.global_run_id,
1021
- testInfo:
1022
- evalRecord.test_info && typeof evalRecord.test_info === 'string'
1023
- ? JSON.parse(evalRecord.test_info)
1024
- : undefined,
1025
- } as EvalRow;
1026
- } catch (parseError) {
1027
- this.logger.error('Failed to parse eval record', { record: evalRecord, error: parseError });
1028
- // Return a partial record or null/undefined on error?
1029
- // Returning partial for now, might need adjustment based on requirements.
1030
- return {
1031
- agentName: evalRecord.agent_name,
1032
- createdAt: evalRecord.created_at,
1033
- runId: evalRecord.run_id,
1034
- globalRunId: evalRecord.global_run_id,
1035
- } as Partial<EvalRow> as EvalRow; // Cast needed for return type
1036
- }
1037
- });
1038
- } catch (error) {
1039
- this.logger.error('Failed to get evals by agent name', { agentName, type, error });
1040
- throw error;
1041
- }
408
+ async getEvals(
409
+ options: {
410
+ agentName?: string;
411
+ type?: 'test' | 'live';
412
+ } & PaginationArgs,
413
+ ): Promise<PaginationInfo & { evals: EvalRow[] }> {
414
+ return this.stores.legacyEvals.getEvals(options);
1042
415
  }
1043
416
 
1044
417
  /**
@@ -1051,9 +424,60 @@ export class DynamoDBStore extends MastraStorage {
1051
424
  this.client.destroy();
1052
425
  this.logger.debug('DynamoDB client closed successfully for store:', { name: this.name });
1053
426
  } catch (error) {
1054
- this.logger.error('Error closing DynamoDB client for store:', { name: this.name, error });
1055
- // Optionally re-throw or handle as appropriate for your application's error handling strategy
1056
- throw error;
427
+ throw new MastraError(
428
+ {
429
+ id: 'STORAGE_DYNAMODB_STORE_CLOSE_FAILED',
430
+ domain: ErrorDomain.STORAGE,
431
+ category: ErrorCategory.THIRD_PARTY,
432
+ },
433
+ error,
434
+ );
1057
435
  }
1058
436
  }
437
+ /**
438
+ * SCORERS - Not implemented
439
+ */
440
+ async getScoreById({ id: _id }: { id: string }): Promise<ScoreRowData | null> {
441
+ return this.stores.scores.getScoreById({ id: _id });
442
+ }
443
+
444
+ async saveScore(_score: ScoreRowData): Promise<{ score: ScoreRowData }> {
445
+ return this.stores.scores.saveScore(_score);
446
+ }
447
+
448
+ async getScoresByRunId({
449
+ runId: _runId,
450
+ pagination: _pagination,
451
+ }: {
452
+ runId: string;
453
+ pagination: StoragePagination;
454
+ }): Promise<{ pagination: PaginationInfo; scores: ScoreRowData[] }> {
455
+ return this.stores.scores.getScoresByRunId({ runId: _runId, pagination: _pagination });
456
+ }
457
+
458
+ async getScoresByEntityId({
459
+ entityId: _entityId,
460
+ entityType: _entityType,
461
+ pagination: _pagination,
462
+ }: {
463
+ pagination: StoragePagination;
464
+ entityId: string;
465
+ entityType: string;
466
+ }): Promise<{ pagination: PaginationInfo; scores: ScoreRowData[] }> {
467
+ return this.stores.scores.getScoresByEntityId({
468
+ entityId: _entityId,
469
+ entityType: _entityType,
470
+ pagination: _pagination,
471
+ });
472
+ }
473
+
474
+ async getScoresByScorerId({
475
+ scorerId: _scorerId,
476
+ pagination: _pagination,
477
+ }: {
478
+ scorerId: string;
479
+ pagination: StoragePagination;
480
+ }): Promise<{ pagination: PaginationInfo; scores: ScoreRowData[] }> {
481
+ return this.stores.scores.getScoresByScorerId({ scorerId: _scorerId, pagination: _pagination });
482
+ }
1059
483
  }