@mastra/dynamodb 0.0.0-support-d1-client-20250701191943 → 0.0.0-suspendRuntimeContextTypeFix-20250930142630
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +948 -0
- package/LICENSE.md +11 -42
- package/README.md +0 -4
- package/dist/entities/eval.d.ts +102 -0
- package/dist/entities/eval.d.ts.map +1 -0
- package/dist/entities/index.d.ts +761 -0
- package/dist/entities/index.d.ts.map +1 -0
- package/dist/entities/message.d.ts +100 -0
- package/dist/entities/message.d.ts.map +1 -0
- package/dist/entities/resource.d.ts +54 -0
- package/dist/entities/resource.d.ts.map +1 -0
- package/dist/entities/score.d.ts +244 -0
- package/dist/entities/score.d.ts.map +1 -0
- package/dist/entities/thread.d.ts +69 -0
- package/dist/entities/thread.d.ts.map +1 -0
- package/dist/entities/trace.d.ts +127 -0
- package/dist/entities/trace.d.ts.map +1 -0
- package/dist/entities/utils.d.ts +21 -0
- package/dist/entities/utils.d.ts.map +1 -0
- package/dist/entities/workflow-snapshot.d.ts +74 -0
- package/dist/entities/workflow-snapshot.d.ts.map +1 -0
- package/dist/index.cjs +2130 -489
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.ts +2 -2
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +2131 -490
- package/dist/index.js.map +1 -0
- package/dist/storage/domains/legacy-evals/index.d.ts +19 -0
- package/dist/storage/domains/legacy-evals/index.d.ts.map +1 -0
- package/dist/storage/domains/memory/index.d.ts +89 -0
- package/dist/storage/domains/memory/index.d.ts.map +1 -0
- package/dist/storage/domains/operations/index.d.ts +69 -0
- package/dist/storage/domains/operations/index.d.ts.map +1 -0
- package/dist/storage/domains/score/index.d.ts +51 -0
- package/dist/storage/domains/score/index.d.ts.map +1 -0
- package/dist/storage/domains/traces/index.d.ts +28 -0
- package/dist/storage/domains/traces/index.d.ts.map +1 -0
- package/dist/storage/domains/workflows/index.d.ts +51 -0
- package/dist/storage/domains/workflows/index.d.ts.map +1 -0
- package/dist/storage/index.d.ts +259 -0
- package/dist/storage/index.d.ts.map +1 -0
- package/package.json +24 -14
- package/dist/_tsup-dts-rollup.d.cts +0 -1160
- package/dist/_tsup-dts-rollup.d.ts +0 -1160
- package/dist/index.d.cts +0 -2
- package/src/entities/eval.ts +0 -102
- package/src/entities/index.ts +0 -23
- package/src/entities/message.ts +0 -143
- package/src/entities/thread.ts +0 -66
- package/src/entities/trace.ts +0 -129
- package/src/entities/utils.ts +0 -51
- package/src/entities/workflow-snapshot.ts +0 -56
- package/src/index.ts +0 -1
- package/src/storage/docker-compose.yml +0 -16
- package/src/storage/index.test.ts +0 -1366
- package/src/storage/index.ts +0 -1383
package/src/storage/index.ts
DELETED
|
@@ -1,1383 +0,0 @@
|
|
|
1
|
-
import { DynamoDBClient, DescribeTableCommand } from '@aws-sdk/client-dynamodb';
|
|
2
|
-
import { DynamoDBDocumentClient } from '@aws-sdk/lib-dynamodb';
|
|
3
|
-
import type { MastraMessageContentV2 } from '@mastra/core/agent';
|
|
4
|
-
import { MessageList } from '@mastra/core/agent';
|
|
5
|
-
import { ErrorCategory, ErrorDomain, MastraError } from '@mastra/core/error';
|
|
6
|
-
import type { StorageThreadType, MastraMessageV2, MastraMessageV1 } from '@mastra/core/memory';
|
|
7
|
-
|
|
8
|
-
import {
|
|
9
|
-
MastraStorage,
|
|
10
|
-
TABLE_THREADS,
|
|
11
|
-
TABLE_MESSAGES,
|
|
12
|
-
TABLE_WORKFLOW_SNAPSHOT,
|
|
13
|
-
TABLE_EVALS,
|
|
14
|
-
TABLE_TRACES,
|
|
15
|
-
} from '@mastra/core/storage';
|
|
16
|
-
import type {
|
|
17
|
-
EvalRow,
|
|
18
|
-
StorageGetMessagesArg,
|
|
19
|
-
WorkflowRun,
|
|
20
|
-
WorkflowRuns,
|
|
21
|
-
TABLE_NAMES,
|
|
22
|
-
StorageGetTracesArg,
|
|
23
|
-
PaginationInfo,
|
|
24
|
-
StorageColumn,
|
|
25
|
-
TABLE_RESOURCES,
|
|
26
|
-
} from '@mastra/core/storage';
|
|
27
|
-
import type { Trace } from '@mastra/core/telemetry';
|
|
28
|
-
import type { WorkflowRunState } from '@mastra/core/workflows';
|
|
29
|
-
import type { Service } from 'electrodb';
|
|
30
|
-
import { getElectroDbService } from '../entities';
|
|
31
|
-
|
|
32
|
-
export interface DynamoDBStoreConfig {
|
|
33
|
-
region?: string;
|
|
34
|
-
tableName: string;
|
|
35
|
-
endpoint?: string;
|
|
36
|
-
credentials?: {
|
|
37
|
-
accessKeyId: string;
|
|
38
|
-
secretAccessKey: string;
|
|
39
|
-
};
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
type SUPPORTED_TABLE_NAMES = Exclude<TABLE_NAMES, typeof TABLE_RESOURCES>;
|
|
43
|
-
|
|
44
|
-
// Define a type for our service that allows string indexing
|
|
45
|
-
type MastraService = Service<Record<string, any>> & {
|
|
46
|
-
[key: string]: any;
|
|
47
|
-
};
|
|
48
|
-
|
|
49
|
-
// Define the structure for workflow snapshot items retrieved from DynamoDB
|
|
50
|
-
interface WorkflowSnapshotDBItem {
|
|
51
|
-
entity: string; // Typically 'workflow_snapshot'
|
|
52
|
-
workflow_name: string;
|
|
53
|
-
run_id: string;
|
|
54
|
-
snapshot: WorkflowRunState; // Should be WorkflowRunState after ElectroDB get attribute processing
|
|
55
|
-
createdAt: string; // ISO Date string
|
|
56
|
-
updatedAt: string; // ISO Date string
|
|
57
|
-
resourceId?: string;
|
|
58
|
-
}
|
|
59
|
-
|
|
60
|
-
export class DynamoDBStore extends MastraStorage {
|
|
61
|
-
private tableName: string;
|
|
62
|
-
private client: DynamoDBDocumentClient;
|
|
63
|
-
private service: MastraService;
|
|
64
|
-
protected hasInitialized: Promise<boolean> | null = null;
|
|
65
|
-
|
|
66
|
-
constructor({ name, config }: { name: string; config: DynamoDBStoreConfig }) {
|
|
67
|
-
super({ name });
|
|
68
|
-
|
|
69
|
-
// Validate required config
|
|
70
|
-
try {
|
|
71
|
-
if (!config.tableName || typeof config.tableName !== 'string' || config.tableName.trim() === '') {
|
|
72
|
-
throw new Error('DynamoDBStore: config.tableName must be provided and cannot be empty.');
|
|
73
|
-
}
|
|
74
|
-
// Validate tableName characters (basic check)
|
|
75
|
-
if (!/^[a-zA-Z0-9_.-]{3,255}$/.test(config.tableName)) {
|
|
76
|
-
throw new Error(
|
|
77
|
-
`DynamoDBStore: config.tableName "${config.tableName}" contains invalid characters or is not between 3 and 255 characters long.`,
|
|
78
|
-
);
|
|
79
|
-
}
|
|
80
|
-
|
|
81
|
-
const dynamoClient = new DynamoDBClient({
|
|
82
|
-
region: config.region || 'us-east-1',
|
|
83
|
-
endpoint: config.endpoint,
|
|
84
|
-
credentials: config.credentials,
|
|
85
|
-
});
|
|
86
|
-
|
|
87
|
-
this.tableName = config.tableName;
|
|
88
|
-
this.client = DynamoDBDocumentClient.from(dynamoClient);
|
|
89
|
-
this.service = getElectroDbService(this.client, this.tableName) as MastraService;
|
|
90
|
-
} catch (error) {
|
|
91
|
-
throw new MastraError(
|
|
92
|
-
{
|
|
93
|
-
id: 'STORAGE_DYNAMODB_STORE_CONSTRUCTOR_FAILED',
|
|
94
|
-
domain: ErrorDomain.STORAGE,
|
|
95
|
-
category: ErrorCategory.USER,
|
|
96
|
-
},
|
|
97
|
-
error,
|
|
98
|
-
);
|
|
99
|
-
}
|
|
100
|
-
|
|
101
|
-
// We're using a single table design with ElectroDB,
|
|
102
|
-
// so we don't need to create multiple tables
|
|
103
|
-
}
|
|
104
|
-
|
|
105
|
-
/**
|
|
106
|
-
* This method is modified for DynamoDB with ElectroDB single-table design.
|
|
107
|
-
* It assumes the table is created and managed externally via CDK/CloudFormation.
|
|
108
|
-
*
|
|
109
|
-
* This implementation only validates that the required table exists and is accessible.
|
|
110
|
-
* No table creation is attempted - we simply check if we can access the table.
|
|
111
|
-
*/
|
|
112
|
-
async createTable({ tableName }: { tableName: TABLE_NAMES; schema: Record<string, any> }): Promise<void> {
|
|
113
|
-
this.logger.debug('Validating access to externally managed table', { tableName, physicalTable: this.tableName });
|
|
114
|
-
|
|
115
|
-
// For single-table design, we just need to verify the table exists and is accessible
|
|
116
|
-
try {
|
|
117
|
-
const tableExists = await this.validateTableExists();
|
|
118
|
-
|
|
119
|
-
if (!tableExists) {
|
|
120
|
-
this.logger.error(
|
|
121
|
-
`Table ${this.tableName} does not exist or is not accessible. It should be created via CDK/CloudFormation.`,
|
|
122
|
-
);
|
|
123
|
-
throw new Error(
|
|
124
|
-
`Table ${this.tableName} does not exist or is not accessible. Ensure it's created via CDK/CloudFormation before using this store.`,
|
|
125
|
-
);
|
|
126
|
-
}
|
|
127
|
-
|
|
128
|
-
this.logger.debug(`Table ${this.tableName} exists and is accessible`);
|
|
129
|
-
} catch (error) {
|
|
130
|
-
this.logger.error('Error validating table access', { tableName: this.tableName, error });
|
|
131
|
-
throw new MastraError(
|
|
132
|
-
{
|
|
133
|
-
id: 'STORAGE_DYNAMODB_STORE_VALIDATE_TABLE_ACCESS_FAILED',
|
|
134
|
-
domain: ErrorDomain.STORAGE,
|
|
135
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
136
|
-
details: { tableName: this.tableName },
|
|
137
|
-
},
|
|
138
|
-
error,
|
|
139
|
-
);
|
|
140
|
-
}
|
|
141
|
-
}
|
|
142
|
-
|
|
143
|
-
/**
|
|
144
|
-
* Validates that the required DynamoDB table exists and is accessible.
|
|
145
|
-
* This does not check the table structure - it assumes the table
|
|
146
|
-
* was created with the correct structure via CDK/CloudFormation.
|
|
147
|
-
*/
|
|
148
|
-
private async validateTableExists(): Promise<boolean> {
|
|
149
|
-
try {
|
|
150
|
-
const command = new DescribeTableCommand({
|
|
151
|
-
TableName: this.tableName,
|
|
152
|
-
});
|
|
153
|
-
|
|
154
|
-
// If the table exists, this call will succeed
|
|
155
|
-
// If the table doesn't exist, it will throw a ResourceNotFoundException
|
|
156
|
-
await this.client.send(command);
|
|
157
|
-
return true;
|
|
158
|
-
} catch (error: any) {
|
|
159
|
-
// If the table doesn't exist, DynamoDB returns a ResourceNotFoundException
|
|
160
|
-
if (error.name === 'ResourceNotFoundException') {
|
|
161
|
-
return false;
|
|
162
|
-
}
|
|
163
|
-
|
|
164
|
-
// For other errors (like permissions issues), we should throw
|
|
165
|
-
throw new MastraError(
|
|
166
|
-
{
|
|
167
|
-
id: 'STORAGE_DYNAMODB_STORE_VALIDATE_TABLE_EXISTS_FAILED',
|
|
168
|
-
domain: ErrorDomain.STORAGE,
|
|
169
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
170
|
-
details: { tableName: this.tableName },
|
|
171
|
-
},
|
|
172
|
-
error,
|
|
173
|
-
);
|
|
174
|
-
}
|
|
175
|
-
}
|
|
176
|
-
|
|
177
|
-
/**
|
|
178
|
-
* Initialize storage, validating the externally managed table is accessible.
|
|
179
|
-
* For the single-table design, we only validate once that we can access
|
|
180
|
-
* the table that was created via CDK/CloudFormation.
|
|
181
|
-
*/
|
|
182
|
-
async init(): Promise<void> {
|
|
183
|
-
if (this.hasInitialized === null) {
|
|
184
|
-
// If no initialization promise exists, create and store it.
|
|
185
|
-
// This assignment ensures that even if multiple calls arrive here concurrently,
|
|
186
|
-
// they will all eventually await the same promise instance created by the first one
|
|
187
|
-
// to complete this assignment.
|
|
188
|
-
this.hasInitialized = this._performInitializationAndStore();
|
|
189
|
-
}
|
|
190
|
-
|
|
191
|
-
try {
|
|
192
|
-
// Await the stored promise.
|
|
193
|
-
// If initialization was successful, this resolves.
|
|
194
|
-
// If it failed, this will re-throw the error caught and re-thrown by _performInitializationAndStore.
|
|
195
|
-
await this.hasInitialized;
|
|
196
|
-
} catch (error) {
|
|
197
|
-
// The error has already been handled by _performInitializationAndStore
|
|
198
|
-
// (i.e., this.hasInitialized was reset). Re-throwing here ensures
|
|
199
|
-
// the caller of init() is aware of the failure.
|
|
200
|
-
throw new MastraError(
|
|
201
|
-
{
|
|
202
|
-
id: 'STORAGE_DYNAMODB_STORE_INIT_FAILED',
|
|
203
|
-
domain: ErrorDomain.STORAGE,
|
|
204
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
205
|
-
details: { tableName: this.tableName },
|
|
206
|
-
},
|
|
207
|
-
error,
|
|
208
|
-
);
|
|
209
|
-
}
|
|
210
|
-
}
|
|
211
|
-
|
|
212
|
-
/**
|
|
213
|
-
* Performs the actual table validation and stores the promise.
|
|
214
|
-
* Handles resetting the stored promise on failure to allow retries.
|
|
215
|
-
*/
|
|
216
|
-
private _performInitializationAndStore(): Promise<boolean> {
|
|
217
|
-
return this.validateTableExists()
|
|
218
|
-
.then(exists => {
|
|
219
|
-
if (!exists) {
|
|
220
|
-
throw new Error(
|
|
221
|
-
`Table ${this.tableName} does not exist or is not accessible. Ensure it's created via CDK/CloudFormation before using this store.`,
|
|
222
|
-
);
|
|
223
|
-
}
|
|
224
|
-
// Successfully initialized
|
|
225
|
-
return true;
|
|
226
|
-
})
|
|
227
|
-
.catch(err => {
|
|
228
|
-
// Initialization failed. Clear the stored promise to allow future calls to init() to retry.
|
|
229
|
-
this.hasInitialized = null;
|
|
230
|
-
// Re-throw the error so it can be caught by the awaiter in init()
|
|
231
|
-
throw err;
|
|
232
|
-
});
|
|
233
|
-
}
|
|
234
|
-
|
|
235
|
-
/**
|
|
236
|
-
* Pre-processes a record to ensure Date objects are converted to ISO strings
|
|
237
|
-
* This is necessary because ElectroDB validation happens before setters are applied
|
|
238
|
-
*/
|
|
239
|
-
private preprocessRecord(record: Record<string, any>): Record<string, any> {
|
|
240
|
-
const processed = { ...record };
|
|
241
|
-
|
|
242
|
-
// Convert Date objects to ISO strings for date fields
|
|
243
|
-
// This prevents ElectroDB validation errors that occur when Date objects are passed
|
|
244
|
-
// to string-typed attributes, even when the attribute has a setter that converts dates
|
|
245
|
-
if (processed.createdAt instanceof Date) {
|
|
246
|
-
processed.createdAt = processed.createdAt.toISOString();
|
|
247
|
-
}
|
|
248
|
-
if (processed.updatedAt instanceof Date) {
|
|
249
|
-
processed.updatedAt = processed.updatedAt.toISOString();
|
|
250
|
-
}
|
|
251
|
-
if (processed.created_at instanceof Date) {
|
|
252
|
-
processed.created_at = processed.created_at.toISOString();
|
|
253
|
-
}
|
|
254
|
-
|
|
255
|
-
return processed;
|
|
256
|
-
}
|
|
257
|
-
|
|
258
|
-
async alterTable(_args: {
|
|
259
|
-
tableName: TABLE_NAMES;
|
|
260
|
-
schema: Record<string, StorageColumn>;
|
|
261
|
-
ifNotExists: string[];
|
|
262
|
-
}): Promise<void> {
|
|
263
|
-
// Nothing to do here, DynamoDB has a flexible schema and handles new attributes automatically upon insertion/update.
|
|
264
|
-
}
|
|
265
|
-
|
|
266
|
-
/**
|
|
267
|
-
* Clear all items from a logical "table" (entity type)
|
|
268
|
-
*/
|
|
269
|
-
async clearTable({ tableName }: { tableName: SUPPORTED_TABLE_NAMES }): Promise<void> {
|
|
270
|
-
this.logger.debug('DynamoDB clearTable called', { tableName });
|
|
271
|
-
|
|
272
|
-
const entityName = this.getEntityNameForTable(tableName);
|
|
273
|
-
if (!entityName || !this.service.entities[entityName]) {
|
|
274
|
-
throw new MastraError({
|
|
275
|
-
id: 'STORAGE_DYNAMODB_STORE_CLEAR_TABLE_INVALID_ARGS',
|
|
276
|
-
domain: ErrorDomain.STORAGE,
|
|
277
|
-
category: ErrorCategory.USER,
|
|
278
|
-
text: 'No entity defined for tableName',
|
|
279
|
-
details: { tableName },
|
|
280
|
-
});
|
|
281
|
-
}
|
|
282
|
-
|
|
283
|
-
try {
|
|
284
|
-
// Scan requires no key, just uses the entity handler
|
|
285
|
-
const result = await this.service.entities[entityName].scan.go({ pages: 'all' }); // Get all pages
|
|
286
|
-
|
|
287
|
-
if (!result.data.length) {
|
|
288
|
-
this.logger.debug(`No records found to clear for ${tableName}`);
|
|
289
|
-
return;
|
|
290
|
-
}
|
|
291
|
-
|
|
292
|
-
this.logger.debug(`Found ${result.data.length} records to delete for ${tableName}`);
|
|
293
|
-
|
|
294
|
-
// ElectroDB batch delete expects the key components for each item
|
|
295
|
-
const keysToDelete = result.data.map((item: any) => {
|
|
296
|
-
const key: { entity: string; [key: string]: any } = { entity: entityName };
|
|
297
|
-
|
|
298
|
-
// Construct the key based on the specific entity's primary key structure
|
|
299
|
-
switch (entityName) {
|
|
300
|
-
case 'thread':
|
|
301
|
-
if (!item.id) throw new Error(`Missing required key 'id' for entity 'thread'`);
|
|
302
|
-
key.id = item.id;
|
|
303
|
-
break;
|
|
304
|
-
case 'message':
|
|
305
|
-
if (!item.id) throw new Error(`Missing required key 'id' for entity 'message'`);
|
|
306
|
-
key.id = item.id;
|
|
307
|
-
break;
|
|
308
|
-
case 'workflowSnapshot':
|
|
309
|
-
if (!item.workflow_name)
|
|
310
|
-
throw new Error(`Missing required key 'workflow_name' for entity 'workflowSnapshot'`);
|
|
311
|
-
if (!item.run_id) throw new Error(`Missing required key 'run_id' for entity 'workflowSnapshot'`);
|
|
312
|
-
key.workflow_name = item.workflow_name;
|
|
313
|
-
key.run_id = item.run_id;
|
|
314
|
-
break;
|
|
315
|
-
case 'eval':
|
|
316
|
-
// Assuming 'eval' uses 'run_id' or another unique identifier as part of its PK
|
|
317
|
-
// Adjust based on the actual primary key defined in getElectroDbService
|
|
318
|
-
if (!item.run_id) throw new Error(`Missing required key 'run_id' for entity 'eval'`);
|
|
319
|
-
// Add other key components if necessary for 'eval' PK
|
|
320
|
-
key.run_id = item.run_id;
|
|
321
|
-
// Example: if global_run_id is also part of PK:
|
|
322
|
-
// if (!item.global_run_id) throw new Error(`Missing required key 'global_run_id' for entity 'eval'`);
|
|
323
|
-
// key.global_run_id = item.global_run_id;
|
|
324
|
-
break;
|
|
325
|
-
case 'trace':
|
|
326
|
-
// Assuming 'trace' uses 'id' as its PK
|
|
327
|
-
// Adjust based on the actual primary key defined in getElectroDbService
|
|
328
|
-
if (!item.id) throw new Error(`Missing required key 'id' for entity 'trace'`);
|
|
329
|
-
key.id = item.id;
|
|
330
|
-
break;
|
|
331
|
-
default:
|
|
332
|
-
// Handle unknown entity types - log a warning or throw an error
|
|
333
|
-
this.logger.warn(`Unknown entity type encountered during clearTable: ${entityName}`);
|
|
334
|
-
// Optionally throw an error if strict handling is required
|
|
335
|
-
throw new Error(`Cannot construct delete key for unknown entity type: ${entityName}`);
|
|
336
|
-
}
|
|
337
|
-
|
|
338
|
-
return key;
|
|
339
|
-
});
|
|
340
|
-
|
|
341
|
-
const batchSize = 25;
|
|
342
|
-
for (let i = 0; i < keysToDelete.length; i += batchSize) {
|
|
343
|
-
const batchKeys = keysToDelete.slice(i, i + batchSize);
|
|
344
|
-
// Pass the array of key objects to delete
|
|
345
|
-
await this.service.entities[entityName].delete(batchKeys).go();
|
|
346
|
-
}
|
|
347
|
-
|
|
348
|
-
this.logger.debug(`Successfully cleared all records for ${tableName}`);
|
|
349
|
-
} catch (error) {
|
|
350
|
-
throw new MastraError(
|
|
351
|
-
{
|
|
352
|
-
id: 'STORAGE_DYNAMODB_STORE_CLEAR_TABLE_FAILED',
|
|
353
|
-
domain: ErrorDomain.STORAGE,
|
|
354
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
355
|
-
details: { tableName },
|
|
356
|
-
},
|
|
357
|
-
error,
|
|
358
|
-
);
|
|
359
|
-
}
|
|
360
|
-
}
|
|
361
|
-
|
|
362
|
-
/**
|
|
363
|
-
* Insert a record into the specified "table" (entity)
|
|
364
|
-
*/
|
|
365
|
-
async insert({
|
|
366
|
-
tableName,
|
|
367
|
-
record,
|
|
368
|
-
}: {
|
|
369
|
-
tableName: SUPPORTED_TABLE_NAMES;
|
|
370
|
-
record: Record<string, any>;
|
|
371
|
-
}): Promise<void> {
|
|
372
|
-
this.logger.debug('DynamoDB insert called', { tableName });
|
|
373
|
-
|
|
374
|
-
const entityName = this.getEntityNameForTable(tableName);
|
|
375
|
-
if (!entityName || !this.service.entities[entityName]) {
|
|
376
|
-
throw new MastraError({
|
|
377
|
-
id: 'STORAGE_DYNAMODB_STORE_INSERT_INVALID_ARGS',
|
|
378
|
-
domain: ErrorDomain.STORAGE,
|
|
379
|
-
category: ErrorCategory.USER,
|
|
380
|
-
text: 'No entity defined for tableName',
|
|
381
|
-
details: { tableName },
|
|
382
|
-
});
|
|
383
|
-
}
|
|
384
|
-
|
|
385
|
-
try {
|
|
386
|
-
// Add the entity type to the record and preprocess before creating
|
|
387
|
-
const dataToSave = { entity: entityName, ...this.preprocessRecord(record) };
|
|
388
|
-
await this.service.entities[entityName].create(dataToSave).go();
|
|
389
|
-
} catch (error) {
|
|
390
|
-
throw new MastraError(
|
|
391
|
-
{
|
|
392
|
-
id: 'STORAGE_DYNAMODB_STORE_INSERT_FAILED',
|
|
393
|
-
domain: ErrorDomain.STORAGE,
|
|
394
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
395
|
-
details: { tableName },
|
|
396
|
-
},
|
|
397
|
-
error,
|
|
398
|
-
);
|
|
399
|
-
}
|
|
400
|
-
}
|
|
401
|
-
|
|
402
|
-
/**
|
|
403
|
-
* Insert multiple records as a batch
|
|
404
|
-
*/
|
|
405
|
-
async batchInsert({
|
|
406
|
-
tableName,
|
|
407
|
-
records,
|
|
408
|
-
}: {
|
|
409
|
-
tableName: SUPPORTED_TABLE_NAMES;
|
|
410
|
-
records: Record<string, any>[];
|
|
411
|
-
}): Promise<void> {
|
|
412
|
-
this.logger.debug('DynamoDB batchInsert called', { tableName, count: records.length });
|
|
413
|
-
|
|
414
|
-
const entityName = this.getEntityNameForTable(tableName);
|
|
415
|
-
if (!entityName || !this.service.entities[entityName]) {
|
|
416
|
-
throw new MastraError({
|
|
417
|
-
id: 'STORAGE_DYNAMODB_STORE_BATCH_INSERT_INVALID_ARGS',
|
|
418
|
-
domain: ErrorDomain.STORAGE,
|
|
419
|
-
category: ErrorCategory.USER,
|
|
420
|
-
text: 'No entity defined for tableName',
|
|
421
|
-
details: { tableName },
|
|
422
|
-
});
|
|
423
|
-
}
|
|
424
|
-
|
|
425
|
-
// Add entity type and preprocess each record
|
|
426
|
-
const recordsToSave = records.map(rec => ({ entity: entityName, ...this.preprocessRecord(rec) }));
|
|
427
|
-
|
|
428
|
-
// ElectroDB has batch limits of 25 items, so we need to chunk
|
|
429
|
-
const batchSize = 25;
|
|
430
|
-
const batches = [];
|
|
431
|
-
for (let i = 0; i < recordsToSave.length; i += batchSize) {
|
|
432
|
-
const batch = recordsToSave.slice(i, i + batchSize);
|
|
433
|
-
batches.push(batch);
|
|
434
|
-
}
|
|
435
|
-
|
|
436
|
-
try {
|
|
437
|
-
// Process each batch
|
|
438
|
-
for (const batch of batches) {
|
|
439
|
-
// Create each item individually within the batch
|
|
440
|
-
for (const recordData of batch) {
|
|
441
|
-
if (!recordData.entity) {
|
|
442
|
-
this.logger.error('Missing entity property in record data for batchInsert', { recordData, tableName });
|
|
443
|
-
throw new Error(`Internal error: Missing entity property during batchInsert for ${tableName}`);
|
|
444
|
-
}
|
|
445
|
-
// Log the object just before the create call
|
|
446
|
-
this.logger.debug('Attempting to create record in batchInsert:', { entityName, recordData });
|
|
447
|
-
await this.service.entities[entityName].create(recordData).go();
|
|
448
|
-
}
|
|
449
|
-
// Original batch call: await this.service.entities[entityName].create(batch).go();
|
|
450
|
-
}
|
|
451
|
-
} catch (error) {
|
|
452
|
-
throw new MastraError(
|
|
453
|
-
{
|
|
454
|
-
id: 'STORAGE_DYNAMODB_STORE_BATCH_INSERT_FAILED',
|
|
455
|
-
domain: ErrorDomain.STORAGE,
|
|
456
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
457
|
-
details: { tableName },
|
|
458
|
-
},
|
|
459
|
-
error,
|
|
460
|
-
);
|
|
461
|
-
}
|
|
462
|
-
}
|
|
463
|
-
|
|
464
|
-
/**
|
|
465
|
-
* Load a record by its keys
|
|
466
|
-
*/
|
|
467
|
-
async load<R>({
|
|
468
|
-
tableName,
|
|
469
|
-
keys,
|
|
470
|
-
}: {
|
|
471
|
-
tableName: SUPPORTED_TABLE_NAMES;
|
|
472
|
-
keys: Record<string, string>;
|
|
473
|
-
}): Promise<R | null> {
|
|
474
|
-
this.logger.debug('DynamoDB load called', { tableName, keys });
|
|
475
|
-
|
|
476
|
-
const entityName = this.getEntityNameForTable(tableName);
|
|
477
|
-
if (!entityName || !this.service.entities[entityName]) {
|
|
478
|
-
throw new MastraError({
|
|
479
|
-
id: 'STORAGE_DYNAMODB_STORE_LOAD_INVALID_ARGS',
|
|
480
|
-
domain: ErrorDomain.STORAGE,
|
|
481
|
-
category: ErrorCategory.USER,
|
|
482
|
-
text: 'No entity defined for tableName',
|
|
483
|
-
details: { tableName },
|
|
484
|
-
});
|
|
485
|
-
}
|
|
486
|
-
|
|
487
|
-
try {
|
|
488
|
-
// Add the entity type to the key object for the .get call
|
|
489
|
-
const keyObject = { entity: entityName, ...keys };
|
|
490
|
-
const result = await this.service.entities[entityName].get(keyObject).go();
|
|
491
|
-
|
|
492
|
-
if (!result.data) {
|
|
493
|
-
return null;
|
|
494
|
-
}
|
|
495
|
-
|
|
496
|
-
// Add parsing logic if necessary (e.g., for metadata)
|
|
497
|
-
let data = result.data;
|
|
498
|
-
if (data.metadata && typeof data.metadata === 'string') {
|
|
499
|
-
try {
|
|
500
|
-
// data.metadata = JSON.parse(data.metadata); // REMOVED by AI
|
|
501
|
-
} catch {
|
|
502
|
-
/* ignore parse error */
|
|
503
|
-
}
|
|
504
|
-
}
|
|
505
|
-
// Add similar parsing for other JSON fields if needed based on entity type
|
|
506
|
-
|
|
507
|
-
return data as R;
|
|
508
|
-
} catch (error) {
|
|
509
|
-
throw new MastraError(
|
|
510
|
-
{
|
|
511
|
-
id: 'STORAGE_DYNAMODB_STORE_LOAD_FAILED',
|
|
512
|
-
domain: ErrorDomain.STORAGE,
|
|
513
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
514
|
-
details: { tableName },
|
|
515
|
-
},
|
|
516
|
-
error,
|
|
517
|
-
);
|
|
518
|
-
}
|
|
519
|
-
}
|
|
520
|
-
|
|
521
|
-
// Thread operations
|
|
522
|
-
async getThreadById({ threadId }: { threadId: string }): Promise<StorageThreadType | null> {
|
|
523
|
-
this.logger.debug('Getting thread by ID', { threadId });
|
|
524
|
-
try {
|
|
525
|
-
const result = await this.service.entities.thread.get({ entity: 'thread', id: threadId }).go();
|
|
526
|
-
|
|
527
|
-
if (!result.data) {
|
|
528
|
-
return null;
|
|
529
|
-
}
|
|
530
|
-
|
|
531
|
-
// ElectroDB handles the transformation with attribute getters
|
|
532
|
-
const data = result.data;
|
|
533
|
-
return {
|
|
534
|
-
...data,
|
|
535
|
-
// Convert date strings back to Date objects for consistency
|
|
536
|
-
createdAt: typeof data.createdAt === 'string' ? new Date(data.createdAt) : data.createdAt,
|
|
537
|
-
updatedAt: typeof data.updatedAt === 'string' ? new Date(data.updatedAt) : data.updatedAt,
|
|
538
|
-
// metadata: data.metadata ? JSON.parse(data.metadata) : undefined, // REMOVED by AI
|
|
539
|
-
// metadata is already transformed by the entity's getter
|
|
540
|
-
} as StorageThreadType;
|
|
541
|
-
} catch (error) {
|
|
542
|
-
throw new MastraError(
|
|
543
|
-
{
|
|
544
|
-
id: 'STORAGE_DYNAMODB_STORE_GET_THREAD_BY_ID_FAILED',
|
|
545
|
-
domain: ErrorDomain.STORAGE,
|
|
546
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
547
|
-
details: { threadId },
|
|
548
|
-
},
|
|
549
|
-
error,
|
|
550
|
-
);
|
|
551
|
-
}
|
|
552
|
-
}
|
|
553
|
-
|
|
554
|
-
async getThreadsByResourceId({ resourceId }: { resourceId: string }): Promise<StorageThreadType[]> {
|
|
555
|
-
this.logger.debug('Getting threads by resource ID', { resourceId });
|
|
556
|
-
try {
|
|
557
|
-
const result = await this.service.entities.thread.query.byResource({ entity: 'thread', resourceId }).go();
|
|
558
|
-
|
|
559
|
-
if (!result.data.length) {
|
|
560
|
-
return [];
|
|
561
|
-
}
|
|
562
|
-
|
|
563
|
-
// ElectroDB handles the transformation with attribute getters
|
|
564
|
-
return result.data.map((data: any) => ({
|
|
565
|
-
...data,
|
|
566
|
-
// Convert date strings back to Date objects for consistency
|
|
567
|
-
createdAt: typeof data.createdAt === 'string' ? new Date(data.createdAt) : data.createdAt,
|
|
568
|
-
updatedAt: typeof data.updatedAt === 'string' ? new Date(data.updatedAt) : data.updatedAt,
|
|
569
|
-
// metadata: data.metadata ? JSON.parse(data.metadata) : undefined, // REMOVED by AI
|
|
570
|
-
// metadata is already transformed by the entity's getter
|
|
571
|
-
})) as StorageThreadType[];
|
|
572
|
-
} catch (error) {
|
|
573
|
-
throw new MastraError(
|
|
574
|
-
{
|
|
575
|
-
id: 'STORAGE_DYNAMODB_STORE_GET_THREADS_BY_RESOURCE_ID_FAILED',
|
|
576
|
-
domain: ErrorDomain.STORAGE,
|
|
577
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
578
|
-
details: { resourceId },
|
|
579
|
-
},
|
|
580
|
-
error,
|
|
581
|
-
);
|
|
582
|
-
}
|
|
583
|
-
}
|
|
584
|
-
|
|
585
|
-
async saveThread({ thread }: { thread: StorageThreadType }): Promise<StorageThreadType> {
|
|
586
|
-
this.logger.debug('Saving thread', { threadId: thread.id });
|
|
587
|
-
|
|
588
|
-
const now = new Date();
|
|
589
|
-
|
|
590
|
-
const threadData = {
|
|
591
|
-
entity: 'thread',
|
|
592
|
-
id: thread.id,
|
|
593
|
-
resourceId: thread.resourceId,
|
|
594
|
-
title: thread.title || `Thread ${thread.id}`,
|
|
595
|
-
createdAt: thread.createdAt?.toISOString() || now.toISOString(),
|
|
596
|
-
updatedAt: now.toISOString(),
|
|
597
|
-
metadata: thread.metadata ? JSON.stringify(thread.metadata) : undefined,
|
|
598
|
-
};
|
|
599
|
-
|
|
600
|
-
try {
|
|
601
|
-
await this.service.entities.thread.create(threadData).go();
|
|
602
|
-
|
|
603
|
-
return {
|
|
604
|
-
id: thread.id,
|
|
605
|
-
resourceId: thread.resourceId,
|
|
606
|
-
title: threadData.title,
|
|
607
|
-
createdAt: thread.createdAt || now,
|
|
608
|
-
updatedAt: now,
|
|
609
|
-
metadata: thread.metadata,
|
|
610
|
-
};
|
|
611
|
-
} catch (error) {
|
|
612
|
-
throw new MastraError(
|
|
613
|
-
{
|
|
614
|
-
id: 'STORAGE_DYNAMODB_STORE_SAVE_THREAD_FAILED',
|
|
615
|
-
domain: ErrorDomain.STORAGE,
|
|
616
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
617
|
-
details: { threadId: thread.id },
|
|
618
|
-
},
|
|
619
|
-
error,
|
|
620
|
-
);
|
|
621
|
-
}
|
|
622
|
-
}
|
|
623
|
-
|
|
624
|
-
async updateThread({
|
|
625
|
-
id,
|
|
626
|
-
title,
|
|
627
|
-
metadata,
|
|
628
|
-
}: {
|
|
629
|
-
id: string;
|
|
630
|
-
title: string;
|
|
631
|
-
metadata: Record<string, unknown>;
|
|
632
|
-
}): Promise<StorageThreadType> {
|
|
633
|
-
this.logger.debug('Updating thread', { threadId: id });
|
|
634
|
-
|
|
635
|
-
try {
|
|
636
|
-
// First, get the existing thread to merge with updates
|
|
637
|
-
const existingThread = await this.getThreadById({ threadId: id });
|
|
638
|
-
|
|
639
|
-
if (!existingThread) {
|
|
640
|
-
throw new Error(`Thread not found: ${id}`);
|
|
641
|
-
}
|
|
642
|
-
|
|
643
|
-
const now = new Date();
|
|
644
|
-
|
|
645
|
-
// Prepare the update
|
|
646
|
-
// Define type for only the fields we are actually updating
|
|
647
|
-
type ThreadUpdatePayload = {
|
|
648
|
-
updatedAt: string; // ISO String for DDB
|
|
649
|
-
title?: string;
|
|
650
|
-
metadata?: string; // Stringified JSON for DDB
|
|
651
|
-
};
|
|
652
|
-
const updateData: ThreadUpdatePayload = {
|
|
653
|
-
updatedAt: now.toISOString(),
|
|
654
|
-
};
|
|
655
|
-
|
|
656
|
-
if (title) {
|
|
657
|
-
updateData.title = title;
|
|
658
|
-
}
|
|
659
|
-
|
|
660
|
-
if (metadata) {
|
|
661
|
-
updateData.metadata = JSON.stringify(metadata); // Stringify metadata for update
|
|
662
|
-
}
|
|
663
|
-
|
|
664
|
-
// Update the thread using the primary key
|
|
665
|
-
await this.service.entities.thread.update({ entity: 'thread', id }).set(updateData).go();
|
|
666
|
-
|
|
667
|
-
// Return the potentially updated thread object
|
|
668
|
-
return {
|
|
669
|
-
...existingThread,
|
|
670
|
-
title: title || existingThread.title,
|
|
671
|
-
metadata: metadata || existingThread.metadata,
|
|
672
|
-
updatedAt: now,
|
|
673
|
-
};
|
|
674
|
-
} catch (error) {
|
|
675
|
-
throw new MastraError(
|
|
676
|
-
{
|
|
677
|
-
id: 'STORAGE_DYNAMODB_STORE_UPDATE_THREAD_FAILED',
|
|
678
|
-
domain: ErrorDomain.STORAGE,
|
|
679
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
680
|
-
details: { threadId: id },
|
|
681
|
-
},
|
|
682
|
-
error,
|
|
683
|
-
);
|
|
684
|
-
}
|
|
685
|
-
}
|
|
686
|
-
|
|
687
|
-
async deleteThread({ threadId }: { threadId: string }): Promise<void> {
|
|
688
|
-
this.logger.debug('Deleting thread', { threadId });
|
|
689
|
-
|
|
690
|
-
try {
|
|
691
|
-
// Delete the thread using the primary key
|
|
692
|
-
await this.service.entities.thread.delete({ entity: 'thread', id: threadId }).go();
|
|
693
|
-
|
|
694
|
-
// Note: In a production system, you might want to:
|
|
695
|
-
// 1. Delete all messages associated with this thread
|
|
696
|
-
// 2. Delete any vector embeddings related to this thread
|
|
697
|
-
// These would be additional operations
|
|
698
|
-
} catch (error) {
|
|
699
|
-
throw new MastraError(
|
|
700
|
-
{
|
|
701
|
-
id: 'STORAGE_DYNAMODB_STORE_DELETE_THREAD_FAILED',
|
|
702
|
-
domain: ErrorDomain.STORAGE,
|
|
703
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
704
|
-
details: { threadId },
|
|
705
|
-
},
|
|
706
|
-
error,
|
|
707
|
-
);
|
|
708
|
-
}
|
|
709
|
-
}
|
|
710
|
-
|
|
711
|
-
// Message operations
|
|
712
|
-
public async getMessages(args: StorageGetMessagesArg & { format?: 'v1' }): Promise<MastraMessageV1[]>;
|
|
713
|
-
public async getMessages(args: StorageGetMessagesArg & { format: 'v2' }): Promise<MastraMessageV2[]>;
|
|
714
|
-
public async getMessages({
|
|
715
|
-
threadId,
|
|
716
|
-
resourceId,
|
|
717
|
-
selectBy,
|
|
718
|
-
format,
|
|
719
|
-
}: StorageGetMessagesArg & { format?: 'v1' | 'v2' }): Promise<MastraMessageV1[] | MastraMessageV2[]> {
|
|
720
|
-
this.logger.debug('Getting messages', { threadId, selectBy });
|
|
721
|
-
|
|
722
|
-
try {
|
|
723
|
-
// Query messages by thread ID using the GSI
|
|
724
|
-
// Provide *all* composite key components for the 'byThread' index ('entity', 'threadId')
|
|
725
|
-
const query = this.service.entities.message.query.byThread({ entity: 'message', threadId });
|
|
726
|
-
|
|
727
|
-
const limit = this.resolveMessageLimit({ last: selectBy?.last, defaultLimit: Number.MAX_SAFE_INTEGER });
|
|
728
|
-
// Apply the 'last' limit if provided
|
|
729
|
-
if (limit !== Number.MAX_SAFE_INTEGER) {
|
|
730
|
-
// Use ElectroDB's limit parameter
|
|
731
|
-
// DDB GSIs are sorted in ascending order
|
|
732
|
-
// Use ElectroDB's order parameter to sort in descending order to retrieve 'latest' messages
|
|
733
|
-
const results = await query.go({ limit, order: 'desc' });
|
|
734
|
-
// Use arrow function in map to preserve 'this' context for parseMessageData
|
|
735
|
-
const list = new MessageList({ threadId, resourceId }).add(
|
|
736
|
-
results.data.map((data: any) => this.parseMessageData(data)),
|
|
737
|
-
'memory',
|
|
738
|
-
);
|
|
739
|
-
if (format === `v2`) return list.get.all.v2();
|
|
740
|
-
return list.get.all.v1();
|
|
741
|
-
}
|
|
742
|
-
|
|
743
|
-
// If no limit specified, get all messages (potentially paginated by ElectroDB)
|
|
744
|
-
// Consider adding default limit or handling pagination if needed
|
|
745
|
-
const results = await query.go();
|
|
746
|
-
const list = new MessageList({ threadId, resourceId }).add(
|
|
747
|
-
results.data.map((data: any) => this.parseMessageData(data)),
|
|
748
|
-
'memory',
|
|
749
|
-
);
|
|
750
|
-
if (format === `v2`) return list.get.all.v2();
|
|
751
|
-
return list.get.all.v1();
|
|
752
|
-
} catch (error) {
|
|
753
|
-
throw new MastraError(
|
|
754
|
-
{
|
|
755
|
-
id: 'STORAGE_DYNAMODB_STORE_GET_MESSAGES_FAILED',
|
|
756
|
-
domain: ErrorDomain.STORAGE,
|
|
757
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
758
|
-
details: { threadId },
|
|
759
|
-
},
|
|
760
|
-
error,
|
|
761
|
-
);
|
|
762
|
-
}
|
|
763
|
-
}
|
|
764
|
-
async saveMessages(args: { messages: MastraMessageV1[]; format?: undefined | 'v1' }): Promise<MastraMessageV1[]>;
|
|
765
|
-
async saveMessages(args: { messages: MastraMessageV2[]; format: 'v2' }): Promise<MastraMessageV2[]>;
|
|
766
|
-
async saveMessages(
|
|
767
|
-
args: { messages: MastraMessageV1[]; format?: undefined | 'v1' } | { messages: MastraMessageV2[]; format: 'v2' },
|
|
768
|
-
): Promise<MastraMessageV2[] | MastraMessageV1[]> {
|
|
769
|
-
const { messages, format = 'v1' } = args;
|
|
770
|
-
this.logger.debug('Saving messages', { count: messages.length });
|
|
771
|
-
|
|
772
|
-
if (!messages.length) {
|
|
773
|
-
return [];
|
|
774
|
-
}
|
|
775
|
-
|
|
776
|
-
const threadId = messages[0]?.threadId;
|
|
777
|
-
if (!threadId) {
|
|
778
|
-
throw new Error('Thread ID is required');
|
|
779
|
-
}
|
|
780
|
-
|
|
781
|
-
// Ensure 'entity' is added and complex fields are handled
|
|
782
|
-
const messagesToSave = messages.map(msg => {
|
|
783
|
-
const now = new Date().toISOString();
|
|
784
|
-
return {
|
|
785
|
-
entity: 'message', // Add entity type
|
|
786
|
-
id: msg.id,
|
|
787
|
-
threadId: msg.threadId,
|
|
788
|
-
role: msg.role,
|
|
789
|
-
type: msg.type,
|
|
790
|
-
resourceId: msg.resourceId,
|
|
791
|
-
// Ensure complex fields are stringified if not handled by attribute setters
|
|
792
|
-
content: typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content),
|
|
793
|
-
toolCallArgs: `toolCallArgs` in msg && msg.toolCallArgs ? JSON.stringify(msg.toolCallArgs) : undefined,
|
|
794
|
-
toolCallIds: `toolCallIds` in msg && msg.toolCallIds ? JSON.stringify(msg.toolCallIds) : undefined,
|
|
795
|
-
toolNames: `toolNames` in msg && msg.toolNames ? JSON.stringify(msg.toolNames) : undefined,
|
|
796
|
-
createdAt: msg.createdAt instanceof Date ? msg.createdAt.toISOString() : msg.createdAt || now,
|
|
797
|
-
updatedAt: now, // Add updatedAt
|
|
798
|
-
};
|
|
799
|
-
});
|
|
800
|
-
|
|
801
|
-
try {
|
|
802
|
-
// Process messages in batch
|
|
803
|
-
const batchSize = 25; // DynamoDB batch limits
|
|
804
|
-
const batches = [];
|
|
805
|
-
|
|
806
|
-
for (let i = 0; i < messagesToSave.length; i += batchSize) {
|
|
807
|
-
const batch = messagesToSave.slice(i, i + batchSize);
|
|
808
|
-
batches.push(batch);
|
|
809
|
-
}
|
|
810
|
-
|
|
811
|
-
// Process each batch and update thread's updatedAt in parallel for better performance
|
|
812
|
-
await Promise.all([
|
|
813
|
-
// Process message batches
|
|
814
|
-
...batches.map(async batch => {
|
|
815
|
-
for (const messageData of batch) {
|
|
816
|
-
// Ensure each item has the entity property before sending
|
|
817
|
-
if (!messageData.entity) {
|
|
818
|
-
this.logger.error('Missing entity property in message data for create', { messageData });
|
|
819
|
-
throw new Error('Internal error: Missing entity property during saveMessages');
|
|
820
|
-
}
|
|
821
|
-
await this.service.entities.message.put(messageData).go();
|
|
822
|
-
}
|
|
823
|
-
}),
|
|
824
|
-
// Update thread's updatedAt timestamp
|
|
825
|
-
this.service.entities.thread
|
|
826
|
-
.update({ entity: 'thread', id: threadId })
|
|
827
|
-
.set({
|
|
828
|
-
updatedAt: new Date().toISOString(),
|
|
829
|
-
})
|
|
830
|
-
.go(),
|
|
831
|
-
]);
|
|
832
|
-
|
|
833
|
-
const list = new MessageList().add(messages, 'memory');
|
|
834
|
-
if (format === `v1`) return list.get.all.v1();
|
|
835
|
-
return list.get.all.v2();
|
|
836
|
-
} catch (error) {
|
|
837
|
-
throw new MastraError(
|
|
838
|
-
{
|
|
839
|
-
id: 'STORAGE_DYNAMODB_STORE_SAVE_MESSAGES_FAILED',
|
|
840
|
-
domain: ErrorDomain.STORAGE,
|
|
841
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
842
|
-
details: { count: messages.length },
|
|
843
|
-
},
|
|
844
|
-
error,
|
|
845
|
-
);
|
|
846
|
-
}
|
|
847
|
-
}
|
|
848
|
-
|
|
849
|
-
// Helper function to parse message data (handle JSON fields)
|
|
850
|
-
private parseMessageData(data: any): MastraMessageV2 | MastraMessageV1 {
|
|
851
|
-
// Removed try/catch and JSON.parse logic - now handled by entity 'get' attributes
|
|
852
|
-
// This function now primarily ensures correct typing and Date conversion.
|
|
853
|
-
return {
|
|
854
|
-
...data,
|
|
855
|
-
// Ensure dates are Date objects if needed (ElectroDB might return strings)
|
|
856
|
-
createdAt: data.createdAt ? new Date(data.createdAt) : undefined,
|
|
857
|
-
updatedAt: data.updatedAt ? new Date(data.updatedAt) : undefined,
|
|
858
|
-
// Other fields like content, toolCallArgs etc. are assumed to be correctly
|
|
859
|
-
// transformed by the ElectroDB entity getters.
|
|
860
|
-
};
|
|
861
|
-
}
|
|
862
|
-
|
|
863
|
-
// Trace operations
|
|
864
|
-
async getTraces(args: {
|
|
865
|
-
name?: string;
|
|
866
|
-
scope?: string;
|
|
867
|
-
page: number;
|
|
868
|
-
perPage: number;
|
|
869
|
-
attributes?: Record<string, string>;
|
|
870
|
-
filters?: Record<string, any>;
|
|
871
|
-
}): Promise<any[]> {
|
|
872
|
-
const { name, scope, page, perPage } = args;
|
|
873
|
-
this.logger.debug('Getting traces', { name, scope, page, perPage });
|
|
874
|
-
|
|
875
|
-
try {
|
|
876
|
-
let query;
|
|
877
|
-
|
|
878
|
-
// Determine which index to use based on the provided filters
|
|
879
|
-
// Provide *all* composite key components for the relevant index
|
|
880
|
-
if (name) {
|
|
881
|
-
query = this.service.entities.trace.query.byName({ entity: 'trace', name });
|
|
882
|
-
} else if (scope) {
|
|
883
|
-
query = this.service.entities.trace.query.byScope({ entity: 'trace', scope });
|
|
884
|
-
} else {
|
|
885
|
-
this.logger.warn('Performing a scan operation on traces - consider using a more specific query');
|
|
886
|
-
query = this.service.entities.trace.scan;
|
|
887
|
-
}
|
|
888
|
-
|
|
889
|
-
let items: any[] = [];
|
|
890
|
-
let cursor = null;
|
|
891
|
-
let pagesFetched = 0;
|
|
892
|
-
const startPage = page > 0 ? page : 1;
|
|
893
|
-
|
|
894
|
-
do {
|
|
895
|
-
const results: { data: any[]; cursor: string | null } = await query.go({ cursor, limit: perPage });
|
|
896
|
-
pagesFetched++;
|
|
897
|
-
if (pagesFetched === startPage) {
|
|
898
|
-
items = results.data;
|
|
899
|
-
break;
|
|
900
|
-
}
|
|
901
|
-
cursor = results.cursor;
|
|
902
|
-
if (!cursor && results.data.length > 0 && pagesFetched < startPage) {
|
|
903
|
-
break;
|
|
904
|
-
}
|
|
905
|
-
} while (cursor && pagesFetched < startPage);
|
|
906
|
-
|
|
907
|
-
return items;
|
|
908
|
-
} catch (error) {
|
|
909
|
-
throw new MastraError(
|
|
910
|
-
{
|
|
911
|
-
id: 'STORAGE_DYNAMODB_STORE_GET_TRACES_FAILED',
|
|
912
|
-
domain: ErrorDomain.STORAGE,
|
|
913
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
914
|
-
},
|
|
915
|
-
error,
|
|
916
|
-
);
|
|
917
|
-
}
|
|
918
|
-
}
|
|
919
|
-
|
|
920
|
-
async batchTraceInsert({ records }: { records: Record<string, any>[] }): Promise<void> {
|
|
921
|
-
this.logger.debug('Batch inserting traces', { count: records.length });
|
|
922
|
-
|
|
923
|
-
if (!records.length) {
|
|
924
|
-
return;
|
|
925
|
-
}
|
|
926
|
-
|
|
927
|
-
try {
|
|
928
|
-
// Add 'entity' type to each record before passing to generic batchInsert
|
|
929
|
-
const recordsToSave = records.map(rec => ({ entity: 'trace', ...rec }));
|
|
930
|
-
await this.batchInsert({
|
|
931
|
-
tableName: TABLE_TRACES,
|
|
932
|
-
records: recordsToSave, // Pass records with 'entity' included
|
|
933
|
-
});
|
|
934
|
-
} catch (error) {
|
|
935
|
-
throw new MastraError(
|
|
936
|
-
{
|
|
937
|
-
id: 'STORAGE_DYNAMODB_STORE_BATCH_TRACE_INSERT_FAILED',
|
|
938
|
-
domain: ErrorDomain.STORAGE,
|
|
939
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
940
|
-
details: { count: records.length },
|
|
941
|
-
},
|
|
942
|
-
error,
|
|
943
|
-
);
|
|
944
|
-
}
|
|
945
|
-
}
|
|
946
|
-
|
|
947
|
-
// Workflow operations
|
|
948
|
-
async persistWorkflowSnapshot({
|
|
949
|
-
workflowName,
|
|
950
|
-
runId,
|
|
951
|
-
snapshot,
|
|
952
|
-
}: {
|
|
953
|
-
workflowName: string;
|
|
954
|
-
runId: string;
|
|
955
|
-
snapshot: WorkflowRunState;
|
|
956
|
-
}): Promise<void> {
|
|
957
|
-
this.logger.debug('Persisting workflow snapshot', { workflowName, runId });
|
|
958
|
-
|
|
959
|
-
try {
|
|
960
|
-
const resourceId = 'resourceId' in snapshot ? snapshot.resourceId : undefined;
|
|
961
|
-
const now = new Date().toISOString();
|
|
962
|
-
// Prepare data including the 'entity' type
|
|
963
|
-
const data = {
|
|
964
|
-
entity: 'workflow_snapshot', // Add entity type
|
|
965
|
-
workflow_name: workflowName,
|
|
966
|
-
run_id: runId,
|
|
967
|
-
snapshot: JSON.stringify(snapshot), // Stringify the snapshot object
|
|
968
|
-
createdAt: now,
|
|
969
|
-
updatedAt: now,
|
|
970
|
-
resourceId,
|
|
971
|
-
};
|
|
972
|
-
// Use upsert instead of create to handle both create and update cases
|
|
973
|
-
await this.service.entities.workflowSnapshot.upsert(data).go();
|
|
974
|
-
} catch (error) {
|
|
975
|
-
throw new MastraError(
|
|
976
|
-
{
|
|
977
|
-
id: 'STORAGE_DYNAMODB_STORE_PERSIST_WORKFLOW_SNAPSHOT_FAILED',
|
|
978
|
-
domain: ErrorDomain.STORAGE,
|
|
979
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
980
|
-
details: { workflowName, runId },
|
|
981
|
-
},
|
|
982
|
-
error,
|
|
983
|
-
);
|
|
984
|
-
}
|
|
985
|
-
}
|
|
986
|
-
|
|
987
|
-
async loadWorkflowSnapshot({
|
|
988
|
-
workflowName,
|
|
989
|
-
runId,
|
|
990
|
-
}: {
|
|
991
|
-
workflowName: string;
|
|
992
|
-
runId: string;
|
|
993
|
-
}): Promise<WorkflowRunState | null> {
|
|
994
|
-
this.logger.debug('Loading workflow snapshot', { workflowName, runId });
|
|
995
|
-
|
|
996
|
-
try {
|
|
997
|
-
// Provide *all* composite key components for the primary index ('entity', 'workflow_name', 'run_id')
|
|
998
|
-
const result = await this.service.entities.workflowSnapshot
|
|
999
|
-
.get({
|
|
1000
|
-
entity: 'workflow_snapshot', // Add entity type
|
|
1001
|
-
workflow_name: workflowName,
|
|
1002
|
-
run_id: runId,
|
|
1003
|
-
})
|
|
1004
|
-
.go();
|
|
1005
|
-
|
|
1006
|
-
if (!result.data?.snapshot) {
|
|
1007
|
-
// Check snapshot exists
|
|
1008
|
-
return null;
|
|
1009
|
-
}
|
|
1010
|
-
|
|
1011
|
-
// Parse the snapshot string
|
|
1012
|
-
return result.data.snapshot as WorkflowRunState;
|
|
1013
|
-
} catch (error) {
|
|
1014
|
-
throw new MastraError(
|
|
1015
|
-
{
|
|
1016
|
-
id: 'STORAGE_DYNAMODB_STORE_LOAD_WORKFLOW_SNAPSHOT_FAILED',
|
|
1017
|
-
domain: ErrorDomain.STORAGE,
|
|
1018
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
1019
|
-
details: { workflowName, runId },
|
|
1020
|
-
},
|
|
1021
|
-
error,
|
|
1022
|
-
);
|
|
1023
|
-
}
|
|
1024
|
-
}
|
|
1025
|
-
|
|
1026
|
-
async getWorkflowRuns(args?: {
|
|
1027
|
-
workflowName?: string;
|
|
1028
|
-
fromDate?: Date;
|
|
1029
|
-
toDate?: Date;
|
|
1030
|
-
limit?: number;
|
|
1031
|
-
offset?: number;
|
|
1032
|
-
resourceId?: string;
|
|
1033
|
-
}): Promise<WorkflowRuns> {
|
|
1034
|
-
this.logger.debug('Getting workflow runs', { args });
|
|
1035
|
-
|
|
1036
|
-
try {
|
|
1037
|
-
// Default values
|
|
1038
|
-
const limit = args?.limit || 10;
|
|
1039
|
-
const offset = args?.offset || 0;
|
|
1040
|
-
|
|
1041
|
-
let query;
|
|
1042
|
-
|
|
1043
|
-
if (args?.workflowName) {
|
|
1044
|
-
// Query by workflow name using the primary index
|
|
1045
|
-
// Provide *all* composite key components for the PK ('entity', 'workflow_name')
|
|
1046
|
-
query = this.service.entities.workflowSnapshot.query.primary({
|
|
1047
|
-
entity: 'workflow_snapshot', // Add entity type
|
|
1048
|
-
workflow_name: args.workflowName,
|
|
1049
|
-
});
|
|
1050
|
-
} else {
|
|
1051
|
-
// If no workflow name, we need to scan
|
|
1052
|
-
// This is not ideal for production with large datasets
|
|
1053
|
-
this.logger.warn('Performing a scan operation on workflow snapshots - consider using a more specific query');
|
|
1054
|
-
query = this.service.entities.workflowSnapshot.scan; // Scan still uses the service entity
|
|
1055
|
-
}
|
|
1056
|
-
|
|
1057
|
-
const allMatchingSnapshots: WorkflowSnapshotDBItem[] = [];
|
|
1058
|
-
let cursor: string | null = null;
|
|
1059
|
-
const DYNAMODB_PAGE_SIZE = 100; // Sensible page size for fetching
|
|
1060
|
-
|
|
1061
|
-
do {
|
|
1062
|
-
const pageResults: { data: WorkflowSnapshotDBItem[]; cursor: string | null } = await query.go({
|
|
1063
|
-
limit: DYNAMODB_PAGE_SIZE,
|
|
1064
|
-
cursor,
|
|
1065
|
-
});
|
|
1066
|
-
|
|
1067
|
-
if (pageResults.data && pageResults.data.length > 0) {
|
|
1068
|
-
let pageFilteredData: WorkflowSnapshotDBItem[] = pageResults.data;
|
|
1069
|
-
|
|
1070
|
-
// Apply date filters if specified
|
|
1071
|
-
if (args?.fromDate || args?.toDate) {
|
|
1072
|
-
pageFilteredData = pageFilteredData.filter((snapshot: WorkflowSnapshotDBItem) => {
|
|
1073
|
-
const createdAt = new Date(snapshot.createdAt);
|
|
1074
|
-
if (args.fromDate && createdAt < args.fromDate) {
|
|
1075
|
-
return false;
|
|
1076
|
-
}
|
|
1077
|
-
if (args.toDate && createdAt > args.toDate) {
|
|
1078
|
-
return false;
|
|
1079
|
-
}
|
|
1080
|
-
return true;
|
|
1081
|
-
});
|
|
1082
|
-
}
|
|
1083
|
-
|
|
1084
|
-
// Filter by resourceId if specified
|
|
1085
|
-
if (args?.resourceId) {
|
|
1086
|
-
pageFilteredData = pageFilteredData.filter((snapshot: WorkflowSnapshotDBItem) => {
|
|
1087
|
-
return snapshot.resourceId === args.resourceId;
|
|
1088
|
-
});
|
|
1089
|
-
}
|
|
1090
|
-
allMatchingSnapshots.push(...pageFilteredData);
|
|
1091
|
-
}
|
|
1092
|
-
|
|
1093
|
-
cursor = pageResults.cursor;
|
|
1094
|
-
} while (cursor);
|
|
1095
|
-
|
|
1096
|
-
if (!allMatchingSnapshots.length) {
|
|
1097
|
-
return { runs: [], total: 0 };
|
|
1098
|
-
}
|
|
1099
|
-
|
|
1100
|
-
// Apply offset and limit to the accumulated filtered results
|
|
1101
|
-
const total = allMatchingSnapshots.length;
|
|
1102
|
-
const paginatedData = allMatchingSnapshots.slice(offset, offset + limit);
|
|
1103
|
-
|
|
1104
|
-
// Format and return the results
|
|
1105
|
-
const runs = paginatedData.map((snapshot: WorkflowSnapshotDBItem) => this.formatWorkflowRun(snapshot));
|
|
1106
|
-
|
|
1107
|
-
return {
|
|
1108
|
-
runs,
|
|
1109
|
-
total,
|
|
1110
|
-
};
|
|
1111
|
-
} catch (error) {
|
|
1112
|
-
throw new MastraError(
|
|
1113
|
-
{
|
|
1114
|
-
id: 'STORAGE_DYNAMODB_STORE_GET_WORKFLOW_RUNS_FAILED',
|
|
1115
|
-
domain: ErrorDomain.STORAGE,
|
|
1116
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
1117
|
-
details: { workflowName: args?.workflowName || '', resourceId: args?.resourceId || '' },
|
|
1118
|
-
},
|
|
1119
|
-
error,
|
|
1120
|
-
);
|
|
1121
|
-
}
|
|
1122
|
-
}
|
|
1123
|
-
|
|
1124
|
-
async getWorkflowRunById(args: { runId: string; workflowName?: string }): Promise<WorkflowRun | null> {
|
|
1125
|
-
const { runId, workflowName } = args;
|
|
1126
|
-
this.logger.debug('Getting workflow run by ID', { runId, workflowName });
|
|
1127
|
-
|
|
1128
|
-
try {
|
|
1129
|
-
// If we have a workflowName, we can do a direct get using the primary key
|
|
1130
|
-
if (workflowName) {
|
|
1131
|
-
this.logger.debug('WorkflowName provided, using direct GET operation.');
|
|
1132
|
-
const result = await this.service.entities.workflowSnapshot
|
|
1133
|
-
.get({
|
|
1134
|
-
entity: 'workflow_snapshot', // Entity type for PK
|
|
1135
|
-
workflow_name: workflowName,
|
|
1136
|
-
run_id: runId,
|
|
1137
|
-
})
|
|
1138
|
-
.go();
|
|
1139
|
-
|
|
1140
|
-
if (!result.data) {
|
|
1141
|
-
return null;
|
|
1142
|
-
}
|
|
1143
|
-
|
|
1144
|
-
const snapshot = result.data.snapshot;
|
|
1145
|
-
return {
|
|
1146
|
-
workflowName: result.data.workflow_name,
|
|
1147
|
-
runId: result.data.run_id,
|
|
1148
|
-
snapshot,
|
|
1149
|
-
createdAt: new Date(result.data.createdAt),
|
|
1150
|
-
updatedAt: new Date(result.data.updatedAt),
|
|
1151
|
-
resourceId: result.data.resourceId,
|
|
1152
|
-
};
|
|
1153
|
-
}
|
|
1154
|
-
|
|
1155
|
-
// Otherwise, if workflowName is not provided, use the GSI on runId.
|
|
1156
|
-
// This is more efficient than a full table scan.
|
|
1157
|
-
this.logger.debug(
|
|
1158
|
-
'WorkflowName not provided. Attempting to find workflow run by runId using GSI. Ensure GSI (e.g., "byRunId") is defined on the workflowSnapshot entity with run_id as its key and provisioned in DynamoDB.',
|
|
1159
|
-
);
|
|
1160
|
-
|
|
1161
|
-
// IMPORTANT: This assumes a GSI (e.g., named 'byRunId') exists on the workflowSnapshot entity
|
|
1162
|
-
// with 'run_id' as its partition key. This GSI must be:
|
|
1163
|
-
// 1. Defined in your ElectroDB model (e.g., in stores/dynamodb/src/entities/index.ts).
|
|
1164
|
-
// 2. Provisioned in the actual DynamoDB table (e.g., via CDK/CloudFormation).
|
|
1165
|
-
// The query key object includes 'entity' as it's good practice with ElectroDB and single-table design,
|
|
1166
|
-
// aligning with how other GSIs are queried in this file.
|
|
1167
|
-
const result = await this.service.entities.workflowSnapshot.query
|
|
1168
|
-
.gsi2({ entity: 'workflow_snapshot', run_id: runId }) // Replace 'byRunId' with your actual GSI name
|
|
1169
|
-
.go();
|
|
1170
|
-
|
|
1171
|
-
// If the GSI query returns multiple items (e.g., if run_id is not globally unique across all snapshots),
|
|
1172
|
-
// this will take the first one. The original scan logic also effectively took the first match found.
|
|
1173
|
-
// If run_id is guaranteed unique, result.data should contain at most one item.
|
|
1174
|
-
const matchingRunDbItem: WorkflowSnapshotDBItem | null =
|
|
1175
|
-
result.data && result.data.length > 0 ? result.data[0] : null;
|
|
1176
|
-
|
|
1177
|
-
if (!matchingRunDbItem) {
|
|
1178
|
-
return null;
|
|
1179
|
-
}
|
|
1180
|
-
|
|
1181
|
-
const snapshot = matchingRunDbItem.snapshot;
|
|
1182
|
-
return {
|
|
1183
|
-
workflowName: matchingRunDbItem.workflow_name,
|
|
1184
|
-
runId: matchingRunDbItem.run_id,
|
|
1185
|
-
snapshot,
|
|
1186
|
-
createdAt: new Date(matchingRunDbItem.createdAt),
|
|
1187
|
-
updatedAt: new Date(matchingRunDbItem.updatedAt),
|
|
1188
|
-
resourceId: matchingRunDbItem.resourceId,
|
|
1189
|
-
};
|
|
1190
|
-
} catch (error) {
|
|
1191
|
-
throw new MastraError(
|
|
1192
|
-
{
|
|
1193
|
-
id: 'STORAGE_DYNAMODB_STORE_GET_WORKFLOW_RUN_BY_ID_FAILED',
|
|
1194
|
-
domain: ErrorDomain.STORAGE,
|
|
1195
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
1196
|
-
details: { runId, workflowName: args?.workflowName || '' },
|
|
1197
|
-
},
|
|
1198
|
-
error,
|
|
1199
|
-
);
|
|
1200
|
-
}
|
|
1201
|
-
}
|
|
1202
|
-
|
|
1203
|
-
// Helper function to format workflow run
|
|
1204
|
-
private formatWorkflowRun(snapshotData: WorkflowSnapshotDBItem): WorkflowRun {
|
|
1205
|
-
return {
|
|
1206
|
-
workflowName: snapshotData.workflow_name,
|
|
1207
|
-
runId: snapshotData.run_id,
|
|
1208
|
-
snapshot: snapshotData.snapshot as WorkflowRunState,
|
|
1209
|
-
createdAt: new Date(snapshotData.createdAt),
|
|
1210
|
-
updatedAt: new Date(snapshotData.updatedAt),
|
|
1211
|
-
resourceId: snapshotData.resourceId,
|
|
1212
|
-
};
|
|
1213
|
-
}
|
|
1214
|
-
|
|
1215
|
-
// Helper methods for entity/table mapping
|
|
1216
|
-
private getEntityNameForTable(tableName: SUPPORTED_TABLE_NAMES): string | null {
|
|
1217
|
-
const mapping: Record<SUPPORTED_TABLE_NAMES, string> = {
|
|
1218
|
-
[TABLE_THREADS]: 'thread',
|
|
1219
|
-
[TABLE_MESSAGES]: 'message',
|
|
1220
|
-
[TABLE_WORKFLOW_SNAPSHOT]: 'workflowSnapshot',
|
|
1221
|
-
[TABLE_EVALS]: 'eval',
|
|
1222
|
-
[TABLE_TRACES]: 'trace',
|
|
1223
|
-
};
|
|
1224
|
-
return mapping[tableName] || null;
|
|
1225
|
-
}
|
|
1226
|
-
|
|
1227
|
-
// Eval operations
|
|
1228
|
-
async getEvalsByAgentName(agentName: string, type?: 'test' | 'live'): Promise<EvalRow[]> {
|
|
1229
|
-
this.logger.debug('Getting evals for agent', { agentName, type });
|
|
1230
|
-
|
|
1231
|
-
try {
|
|
1232
|
-
// Query evals by agent name using the GSI
|
|
1233
|
-
// Provide *all* composite key components for the 'byAgent' index ('entity', 'agent_name')
|
|
1234
|
-
const query = this.service.entities.eval.query.byAgent({ entity: 'eval', agent_name: agentName });
|
|
1235
|
-
|
|
1236
|
-
// Fetch potentially all items in descending order, using the correct 'order' option
|
|
1237
|
-
const results = await query.go({ order: 'desc', limit: 100 }); // Use order: 'desc'
|
|
1238
|
-
|
|
1239
|
-
if (!results.data.length) {
|
|
1240
|
-
return [];
|
|
1241
|
-
}
|
|
1242
|
-
|
|
1243
|
-
// Filter by type if specified
|
|
1244
|
-
let filteredData = results.data;
|
|
1245
|
-
if (type) {
|
|
1246
|
-
filteredData = filteredData.filter((evalRecord: Record<string, any>) => {
|
|
1247
|
-
try {
|
|
1248
|
-
// Need to handle potential parse errors for test_info
|
|
1249
|
-
const testInfo =
|
|
1250
|
-
evalRecord.test_info && typeof evalRecord.test_info === 'string'
|
|
1251
|
-
? JSON.parse(evalRecord.test_info)
|
|
1252
|
-
: undefined;
|
|
1253
|
-
|
|
1254
|
-
if (type === 'test' && !testInfo) {
|
|
1255
|
-
return false;
|
|
1256
|
-
}
|
|
1257
|
-
if (type === 'live' && testInfo) {
|
|
1258
|
-
return false;
|
|
1259
|
-
}
|
|
1260
|
-
} catch (e) {
|
|
1261
|
-
this.logger.warn('Failed to parse test_info during filtering', { record: evalRecord, error: e });
|
|
1262
|
-
// Decide how to handle parse errors - exclude or include? Including for now.
|
|
1263
|
-
}
|
|
1264
|
-
return true;
|
|
1265
|
-
});
|
|
1266
|
-
}
|
|
1267
|
-
|
|
1268
|
-
// Format the results - ElectroDB transforms most attributes, but we need to map/parse
|
|
1269
|
-
return filteredData.map((evalRecord: Record<string, any>) => {
|
|
1270
|
-
try {
|
|
1271
|
-
return {
|
|
1272
|
-
input: evalRecord.input,
|
|
1273
|
-
output: evalRecord.output,
|
|
1274
|
-
// Safely parse result and test_info
|
|
1275
|
-
result:
|
|
1276
|
-
evalRecord.result && typeof evalRecord.result === 'string' ? JSON.parse(evalRecord.result) : undefined,
|
|
1277
|
-
agentName: evalRecord.agent_name,
|
|
1278
|
-
createdAt: evalRecord.created_at, // Keep as string from DDB?
|
|
1279
|
-
metricName: evalRecord.metric_name,
|
|
1280
|
-
instructions: evalRecord.instructions,
|
|
1281
|
-
runId: evalRecord.run_id,
|
|
1282
|
-
globalRunId: evalRecord.global_run_id,
|
|
1283
|
-
testInfo:
|
|
1284
|
-
evalRecord.test_info && typeof evalRecord.test_info === 'string'
|
|
1285
|
-
? JSON.parse(evalRecord.test_info)
|
|
1286
|
-
: undefined,
|
|
1287
|
-
} as EvalRow;
|
|
1288
|
-
} catch (parseError) {
|
|
1289
|
-
this.logger.error('Failed to parse eval record', { record: evalRecord, error: parseError });
|
|
1290
|
-
// Return a partial record or null/undefined on error?
|
|
1291
|
-
// Returning partial for now, might need adjustment based on requirements.
|
|
1292
|
-
return {
|
|
1293
|
-
agentName: evalRecord.agent_name,
|
|
1294
|
-
createdAt: evalRecord.created_at,
|
|
1295
|
-
runId: evalRecord.run_id,
|
|
1296
|
-
globalRunId: evalRecord.global_run_id,
|
|
1297
|
-
} as Partial<EvalRow> as EvalRow; // Cast needed for return type
|
|
1298
|
-
}
|
|
1299
|
-
});
|
|
1300
|
-
} catch (error) {
|
|
1301
|
-
throw new MastraError(
|
|
1302
|
-
{
|
|
1303
|
-
id: 'STORAGE_DYNAMODB_STORE_GET_EVALS_BY_AGENT_NAME_FAILED',
|
|
1304
|
-
domain: ErrorDomain.STORAGE,
|
|
1305
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
1306
|
-
details: { agentName },
|
|
1307
|
-
},
|
|
1308
|
-
error,
|
|
1309
|
-
);
|
|
1310
|
-
}
|
|
1311
|
-
}
|
|
1312
|
-
|
|
1313
|
-
async getTracesPaginated(_args: StorageGetTracesArg): Promise<PaginationInfo & { traces: Trace[] }> {
|
|
1314
|
-
throw new MastraError(
|
|
1315
|
-
{
|
|
1316
|
-
id: 'STORAGE_DYNAMODB_STORE_GET_TRACES_PAGINATED_FAILED',
|
|
1317
|
-
domain: ErrorDomain.STORAGE,
|
|
1318
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
1319
|
-
},
|
|
1320
|
-
new Error('Method not implemented.'),
|
|
1321
|
-
);
|
|
1322
|
-
}
|
|
1323
|
-
|
|
1324
|
-
async getThreadsByResourceIdPaginated(_args: {
|
|
1325
|
-
resourceId: string;
|
|
1326
|
-
page?: number;
|
|
1327
|
-
perPage?: number;
|
|
1328
|
-
}): Promise<PaginationInfo & { threads: StorageThreadType[] }> {
|
|
1329
|
-
throw new MastraError(
|
|
1330
|
-
{
|
|
1331
|
-
id: 'STORAGE_DYNAMODB_STORE_GET_THREADS_BY_RESOURCE_ID_PAGINATED_FAILED',
|
|
1332
|
-
domain: ErrorDomain.STORAGE,
|
|
1333
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
1334
|
-
},
|
|
1335
|
-
new Error('Method not implemented.'),
|
|
1336
|
-
);
|
|
1337
|
-
}
|
|
1338
|
-
|
|
1339
|
-
async getMessagesPaginated(
|
|
1340
|
-
_args: StorageGetMessagesArg,
|
|
1341
|
-
): Promise<PaginationInfo & { messages: MastraMessageV1[] | MastraMessageV2[] }> {
|
|
1342
|
-
throw new MastraError(
|
|
1343
|
-
{
|
|
1344
|
-
id: 'STORAGE_DYNAMODB_STORE_GET_MESSAGES_PAGINATED_FAILED',
|
|
1345
|
-
domain: ErrorDomain.STORAGE,
|
|
1346
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
1347
|
-
},
|
|
1348
|
-
new Error('Method not implemented.'),
|
|
1349
|
-
);
|
|
1350
|
-
}
|
|
1351
|
-
|
|
1352
|
-
/**
|
|
1353
|
-
* Closes the DynamoDB client connection and cleans up resources.
|
|
1354
|
-
* Should be called when the store is no longer needed, e.g., at the end of tests or application shutdown.
|
|
1355
|
-
*/
|
|
1356
|
-
public async close(): Promise<void> {
|
|
1357
|
-
this.logger.debug('Closing DynamoDB client for store:', { name: this.name });
|
|
1358
|
-
try {
|
|
1359
|
-
this.client.destroy();
|
|
1360
|
-
this.logger.debug('DynamoDB client closed successfully for store:', { name: this.name });
|
|
1361
|
-
} catch (error) {
|
|
1362
|
-
throw new MastraError(
|
|
1363
|
-
{
|
|
1364
|
-
id: 'STORAGE_DYNAMODB_STORE_CLOSE_FAILED',
|
|
1365
|
-
domain: ErrorDomain.STORAGE,
|
|
1366
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
1367
|
-
},
|
|
1368
|
-
error,
|
|
1369
|
-
);
|
|
1370
|
-
}
|
|
1371
|
-
}
|
|
1372
|
-
|
|
1373
|
-
async updateMessages(_args: {
|
|
1374
|
-
messages: Partial<Omit<MastraMessageV2, 'createdAt'>> &
|
|
1375
|
-
{
|
|
1376
|
-
id: string;
|
|
1377
|
-
content?: { metadata?: MastraMessageContentV2['metadata']; content?: MastraMessageContentV2['content'] };
|
|
1378
|
-
}[];
|
|
1379
|
-
}): Promise<MastraMessageV2[]> {
|
|
1380
|
-
this.logger.error('updateMessages is not yet implemented in DynamoDBStore');
|
|
1381
|
-
throw new Error('Method not implemented');
|
|
1382
|
-
}
|
|
1383
|
-
}
|