@mastra/dynamodb 0.0.2-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +46 -0
- package/README.md +144 -0
- package/dist/index.cjs +1273 -0
- package/dist/index.js +1271 -0
- package/package.json +46 -0
- package/src/entities/eval.ts +102 -0
- package/src/entities/index.ts +23 -0
- package/src/entities/message.ts +143 -0
- package/src/entities/thread.ts +66 -0
- package/src/entities/trace.ts +129 -0
- package/src/entities/utils.ts +51 -0
- package/src/entities/workflow-snapshot.ts +56 -0
- package/src/index.ts +1 -0
- package/src/storage/docker-compose.yml +16 -0
- package/src/storage/index.test.ts +1026 -0
- package/src/storage/index.ts +1036 -0
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,1273 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var clientDynamodb = require('@aws-sdk/client-dynamodb');
|
|
4
|
+
var libDynamodb = require('@aws-sdk/lib-dynamodb');
|
|
5
|
+
var storage = require('@mastra/core/storage');
|
|
6
|
+
var electrodb = require('electrodb');
|
|
7
|
+
|
|
8
|
+
// src/storage/index.ts
|
|
9
|
+
|
|
10
|
+
// src/entities/utils.ts
|
|
11
|
+
var baseAttributes = {
|
|
12
|
+
createdAt: {
|
|
13
|
+
type: "string",
|
|
14
|
+
required: true,
|
|
15
|
+
readOnly: true,
|
|
16
|
+
// Convert Date to ISO string on set
|
|
17
|
+
set: (value) => {
|
|
18
|
+
if (value instanceof Date) {
|
|
19
|
+
return value.toISOString();
|
|
20
|
+
}
|
|
21
|
+
return value || (/* @__PURE__ */ new Date()).toISOString();
|
|
22
|
+
},
|
|
23
|
+
// Initialize with current timestamp if not provided
|
|
24
|
+
default: () => (/* @__PURE__ */ new Date()).toISOString()
|
|
25
|
+
},
|
|
26
|
+
updatedAt: {
|
|
27
|
+
type: "string",
|
|
28
|
+
required: true,
|
|
29
|
+
// Convert Date to ISO string on set
|
|
30
|
+
set: (value) => {
|
|
31
|
+
if (value instanceof Date) {
|
|
32
|
+
return value.toISOString();
|
|
33
|
+
}
|
|
34
|
+
return value || (/* @__PURE__ */ new Date()).toISOString();
|
|
35
|
+
},
|
|
36
|
+
// Always use current timestamp when creating/updating
|
|
37
|
+
default: () => (/* @__PURE__ */ new Date()).toISOString()
|
|
38
|
+
},
|
|
39
|
+
metadata: {
|
|
40
|
+
type: "string",
|
|
41
|
+
// JSON stringified
|
|
42
|
+
// Stringify objects on set
|
|
43
|
+
set: (value) => {
|
|
44
|
+
if (value && typeof value !== "string") {
|
|
45
|
+
return JSON.stringify(value);
|
|
46
|
+
}
|
|
47
|
+
return value;
|
|
48
|
+
},
|
|
49
|
+
// Parse JSON string to object on get
|
|
50
|
+
get: (value) => {
|
|
51
|
+
if (value) {
|
|
52
|
+
try {
|
|
53
|
+
return JSON.parse(value);
|
|
54
|
+
} catch {
|
|
55
|
+
return value;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
return value;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
};
|
|
62
|
+
|
|
63
|
+
// src/entities/eval.ts
|
|
64
|
+
var evalEntity = new electrodb.Entity({
|
|
65
|
+
model: {
|
|
66
|
+
entity: "eval",
|
|
67
|
+
version: "1",
|
|
68
|
+
service: "mastra"
|
|
69
|
+
},
|
|
70
|
+
attributes: {
|
|
71
|
+
entity: {
|
|
72
|
+
type: "string",
|
|
73
|
+
required: true
|
|
74
|
+
},
|
|
75
|
+
...baseAttributes,
|
|
76
|
+
input: {
|
|
77
|
+
type: "string",
|
|
78
|
+
required: true
|
|
79
|
+
},
|
|
80
|
+
output: {
|
|
81
|
+
type: "string",
|
|
82
|
+
required: true
|
|
83
|
+
},
|
|
84
|
+
result: {
|
|
85
|
+
type: "string",
|
|
86
|
+
// JSON stringified
|
|
87
|
+
required: true,
|
|
88
|
+
// Stringify object on set
|
|
89
|
+
set: (value) => {
|
|
90
|
+
if (value && typeof value !== "string") {
|
|
91
|
+
return JSON.stringify(value);
|
|
92
|
+
}
|
|
93
|
+
return value;
|
|
94
|
+
},
|
|
95
|
+
// Parse JSON string to object on get
|
|
96
|
+
get: (value) => {
|
|
97
|
+
if (value) {
|
|
98
|
+
return JSON.parse(value);
|
|
99
|
+
}
|
|
100
|
+
return value;
|
|
101
|
+
}
|
|
102
|
+
},
|
|
103
|
+
agent_name: {
|
|
104
|
+
type: "string",
|
|
105
|
+
required: true
|
|
106
|
+
},
|
|
107
|
+
metric_name: {
|
|
108
|
+
type: "string",
|
|
109
|
+
required: true
|
|
110
|
+
},
|
|
111
|
+
instructions: {
|
|
112
|
+
type: "string",
|
|
113
|
+
required: true
|
|
114
|
+
},
|
|
115
|
+
test_info: {
|
|
116
|
+
type: "string",
|
|
117
|
+
// JSON stringified
|
|
118
|
+
required: false,
|
|
119
|
+
// Stringify object on set
|
|
120
|
+
set: (value) => {
|
|
121
|
+
if (value && typeof value !== "string") {
|
|
122
|
+
return JSON.stringify(value);
|
|
123
|
+
}
|
|
124
|
+
return value;
|
|
125
|
+
},
|
|
126
|
+
// Parse JSON string to object on get
|
|
127
|
+
get: (value) => {
|
|
128
|
+
return value;
|
|
129
|
+
}
|
|
130
|
+
},
|
|
131
|
+
global_run_id: {
|
|
132
|
+
type: "string",
|
|
133
|
+
required: true
|
|
134
|
+
},
|
|
135
|
+
run_id: {
|
|
136
|
+
type: "string",
|
|
137
|
+
required: true
|
|
138
|
+
},
|
|
139
|
+
created_at: {
|
|
140
|
+
type: "string",
|
|
141
|
+
required: true,
|
|
142
|
+
// Initialize with current timestamp if not provided
|
|
143
|
+
default: () => (/* @__PURE__ */ new Date()).toISOString(),
|
|
144
|
+
// Convert Date to ISO string on set
|
|
145
|
+
set: (value) => {
|
|
146
|
+
if (value instanceof Date) {
|
|
147
|
+
return value.toISOString();
|
|
148
|
+
}
|
|
149
|
+
return value || (/* @__PURE__ */ new Date()).toISOString();
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
},
|
|
153
|
+
indexes: {
|
|
154
|
+
primary: {
|
|
155
|
+
pk: { field: "pk", composite: ["entity", "run_id"] },
|
|
156
|
+
sk: { field: "sk", composite: [] }
|
|
157
|
+
},
|
|
158
|
+
byAgent: {
|
|
159
|
+
index: "gsi1",
|
|
160
|
+
pk: { field: "gsi1pk", composite: ["entity", "agent_name"] },
|
|
161
|
+
sk: { field: "gsi1sk", composite: ["created_at"] }
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
});
|
|
165
|
+
var messageEntity = new electrodb.Entity({
|
|
166
|
+
model: {
|
|
167
|
+
entity: "message",
|
|
168
|
+
version: "1",
|
|
169
|
+
service: "mastra"
|
|
170
|
+
},
|
|
171
|
+
attributes: {
|
|
172
|
+
entity: {
|
|
173
|
+
type: "string",
|
|
174
|
+
required: true
|
|
175
|
+
},
|
|
176
|
+
...baseAttributes,
|
|
177
|
+
id: {
|
|
178
|
+
type: "string",
|
|
179
|
+
required: true
|
|
180
|
+
},
|
|
181
|
+
threadId: {
|
|
182
|
+
type: "string",
|
|
183
|
+
required: true
|
|
184
|
+
},
|
|
185
|
+
content: {
|
|
186
|
+
type: "string",
|
|
187
|
+
required: true,
|
|
188
|
+
// Stringify content object on set if it's not already a string
|
|
189
|
+
set: (value) => {
|
|
190
|
+
if (value && typeof value !== "string") {
|
|
191
|
+
return JSON.stringify(value);
|
|
192
|
+
}
|
|
193
|
+
return value;
|
|
194
|
+
},
|
|
195
|
+
// Parse JSON string to object on get ONLY if it looks like JSON
|
|
196
|
+
get: (value) => {
|
|
197
|
+
if (value && typeof value === "string") {
|
|
198
|
+
try {
|
|
199
|
+
if (value.startsWith("{") || value.startsWith("[")) {
|
|
200
|
+
return JSON.parse(value);
|
|
201
|
+
}
|
|
202
|
+
} catch {
|
|
203
|
+
return value;
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
return value;
|
|
207
|
+
}
|
|
208
|
+
},
|
|
209
|
+
role: {
|
|
210
|
+
type: "string",
|
|
211
|
+
required: true
|
|
212
|
+
},
|
|
213
|
+
type: {
|
|
214
|
+
type: "string",
|
|
215
|
+
default: "text"
|
|
216
|
+
},
|
|
217
|
+
resourceId: {
|
|
218
|
+
type: "string",
|
|
219
|
+
required: false
|
|
220
|
+
},
|
|
221
|
+
toolCallIds: {
|
|
222
|
+
type: "string",
|
|
223
|
+
required: false,
|
|
224
|
+
set: (value) => {
|
|
225
|
+
if (Array.isArray(value)) {
|
|
226
|
+
return JSON.stringify(value);
|
|
227
|
+
}
|
|
228
|
+
return value;
|
|
229
|
+
},
|
|
230
|
+
// Parse JSON string to array on get
|
|
231
|
+
get: (value) => {
|
|
232
|
+
if (value && typeof value === "string") {
|
|
233
|
+
try {
|
|
234
|
+
return JSON.parse(value);
|
|
235
|
+
} catch {
|
|
236
|
+
return value;
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
return value;
|
|
240
|
+
}
|
|
241
|
+
},
|
|
242
|
+
toolCallArgs: {
|
|
243
|
+
type: "string",
|
|
244
|
+
required: false,
|
|
245
|
+
set: (value) => {
|
|
246
|
+
if (value && typeof value !== "string") {
|
|
247
|
+
return JSON.stringify(value);
|
|
248
|
+
}
|
|
249
|
+
return value;
|
|
250
|
+
},
|
|
251
|
+
// Parse JSON string to object on get
|
|
252
|
+
get: (value) => {
|
|
253
|
+
if (value && typeof value === "string") {
|
|
254
|
+
try {
|
|
255
|
+
return JSON.parse(value);
|
|
256
|
+
} catch {
|
|
257
|
+
return value;
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
return value;
|
|
261
|
+
}
|
|
262
|
+
},
|
|
263
|
+
toolNames: {
|
|
264
|
+
type: "string",
|
|
265
|
+
required: false,
|
|
266
|
+
set: (value) => {
|
|
267
|
+
if (Array.isArray(value)) {
|
|
268
|
+
return JSON.stringify(value);
|
|
269
|
+
}
|
|
270
|
+
return value;
|
|
271
|
+
},
|
|
272
|
+
// Parse JSON string to array on get
|
|
273
|
+
get: (value) => {
|
|
274
|
+
if (value && typeof value === "string") {
|
|
275
|
+
try {
|
|
276
|
+
return JSON.parse(value);
|
|
277
|
+
} catch {
|
|
278
|
+
return value;
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
return value;
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
},
|
|
285
|
+
indexes: {
|
|
286
|
+
primary: {
|
|
287
|
+
pk: { field: "pk", composite: ["entity", "id"] },
|
|
288
|
+
sk: { field: "sk", composite: ["entity"] }
|
|
289
|
+
},
|
|
290
|
+
byThread: {
|
|
291
|
+
index: "gsi1",
|
|
292
|
+
pk: { field: "gsi1pk", composite: ["entity", "threadId"] },
|
|
293
|
+
sk: { field: "gsi1sk", composite: ["createdAt"] }
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
});
|
|
297
|
+
var threadEntity = new electrodb.Entity({
|
|
298
|
+
model: {
|
|
299
|
+
entity: "thread",
|
|
300
|
+
version: "1",
|
|
301
|
+
service: "mastra"
|
|
302
|
+
},
|
|
303
|
+
attributes: {
|
|
304
|
+
entity: {
|
|
305
|
+
type: "string",
|
|
306
|
+
required: true
|
|
307
|
+
},
|
|
308
|
+
...baseAttributes,
|
|
309
|
+
id: {
|
|
310
|
+
type: "string",
|
|
311
|
+
required: true
|
|
312
|
+
},
|
|
313
|
+
resourceId: {
|
|
314
|
+
type: "string",
|
|
315
|
+
required: true
|
|
316
|
+
},
|
|
317
|
+
title: {
|
|
318
|
+
type: "string",
|
|
319
|
+
required: true
|
|
320
|
+
},
|
|
321
|
+
metadata: {
|
|
322
|
+
type: "string",
|
|
323
|
+
required: false,
|
|
324
|
+
// Stringify metadata object on set if it's not already a string
|
|
325
|
+
set: (value) => {
|
|
326
|
+
if (value && typeof value !== "string") {
|
|
327
|
+
return JSON.stringify(value);
|
|
328
|
+
}
|
|
329
|
+
return value;
|
|
330
|
+
},
|
|
331
|
+
// Parse JSON string to object on get
|
|
332
|
+
get: (value) => {
|
|
333
|
+
if (value && typeof value === "string") {
|
|
334
|
+
try {
|
|
335
|
+
if (value.startsWith("{") || value.startsWith("[")) {
|
|
336
|
+
return JSON.parse(value);
|
|
337
|
+
}
|
|
338
|
+
} catch {
|
|
339
|
+
return value;
|
|
340
|
+
}
|
|
341
|
+
}
|
|
342
|
+
return value;
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
},
|
|
346
|
+
indexes: {
|
|
347
|
+
primary: {
|
|
348
|
+
pk: { field: "pk", composite: ["entity", "id"] },
|
|
349
|
+
sk: { field: "sk", composite: ["id"] }
|
|
350
|
+
},
|
|
351
|
+
byResource: {
|
|
352
|
+
index: "gsi1",
|
|
353
|
+
pk: { field: "gsi1pk", composite: ["entity", "resourceId"] },
|
|
354
|
+
sk: { field: "gsi1sk", composite: ["createdAt"] }
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
});
|
|
358
|
+
var traceEntity = new electrodb.Entity({
|
|
359
|
+
model: {
|
|
360
|
+
entity: "trace",
|
|
361
|
+
version: "1",
|
|
362
|
+
service: "mastra"
|
|
363
|
+
},
|
|
364
|
+
attributes: {
|
|
365
|
+
entity: {
|
|
366
|
+
type: "string",
|
|
367
|
+
required: true
|
|
368
|
+
},
|
|
369
|
+
...baseAttributes,
|
|
370
|
+
id: {
|
|
371
|
+
type: "string",
|
|
372
|
+
required: true
|
|
373
|
+
},
|
|
374
|
+
parentSpanId: {
|
|
375
|
+
type: "string",
|
|
376
|
+
required: false
|
|
377
|
+
},
|
|
378
|
+
name: {
|
|
379
|
+
type: "string",
|
|
380
|
+
required: true
|
|
381
|
+
},
|
|
382
|
+
traceId: {
|
|
383
|
+
type: "string",
|
|
384
|
+
required: true
|
|
385
|
+
},
|
|
386
|
+
scope: {
|
|
387
|
+
type: "string",
|
|
388
|
+
required: true
|
|
389
|
+
},
|
|
390
|
+
kind: {
|
|
391
|
+
type: "number",
|
|
392
|
+
required: true
|
|
393
|
+
},
|
|
394
|
+
attributes: {
|
|
395
|
+
type: "string",
|
|
396
|
+
// JSON stringified
|
|
397
|
+
required: false,
|
|
398
|
+
// Stringify object on set
|
|
399
|
+
set: (value) => {
|
|
400
|
+
if (value && typeof value !== "string") {
|
|
401
|
+
return JSON.stringify(value);
|
|
402
|
+
}
|
|
403
|
+
return value;
|
|
404
|
+
},
|
|
405
|
+
// Parse JSON string to object on get
|
|
406
|
+
get: (value) => {
|
|
407
|
+
return value ? JSON.parse(value) : value;
|
|
408
|
+
}
|
|
409
|
+
},
|
|
410
|
+
status: {
|
|
411
|
+
type: "string",
|
|
412
|
+
// JSON stringified
|
|
413
|
+
required: false,
|
|
414
|
+
// Stringify object on set
|
|
415
|
+
set: (value) => {
|
|
416
|
+
if (value && typeof value !== "string") {
|
|
417
|
+
return JSON.stringify(value);
|
|
418
|
+
}
|
|
419
|
+
return value;
|
|
420
|
+
},
|
|
421
|
+
// Parse JSON string to object on get
|
|
422
|
+
get: (value) => {
|
|
423
|
+
return value;
|
|
424
|
+
}
|
|
425
|
+
},
|
|
426
|
+
events: {
|
|
427
|
+
type: "string",
|
|
428
|
+
// JSON stringified
|
|
429
|
+
required: false,
|
|
430
|
+
// Stringify object on set
|
|
431
|
+
set: (value) => {
|
|
432
|
+
if (value && typeof value !== "string") {
|
|
433
|
+
return JSON.stringify(value);
|
|
434
|
+
}
|
|
435
|
+
return value;
|
|
436
|
+
},
|
|
437
|
+
// Parse JSON string to object on get
|
|
438
|
+
get: (value) => {
|
|
439
|
+
return value;
|
|
440
|
+
}
|
|
441
|
+
},
|
|
442
|
+
links: {
|
|
443
|
+
type: "string",
|
|
444
|
+
// JSON stringified
|
|
445
|
+
required: false,
|
|
446
|
+
// Stringify object on set
|
|
447
|
+
set: (value) => {
|
|
448
|
+
if (value && typeof value !== "string") {
|
|
449
|
+
return JSON.stringify(value);
|
|
450
|
+
}
|
|
451
|
+
return value;
|
|
452
|
+
},
|
|
453
|
+
// Parse JSON string to object on get
|
|
454
|
+
get: (value) => {
|
|
455
|
+
return value;
|
|
456
|
+
}
|
|
457
|
+
},
|
|
458
|
+
other: {
|
|
459
|
+
type: "string",
|
|
460
|
+
required: false
|
|
461
|
+
},
|
|
462
|
+
startTime: {
|
|
463
|
+
type: "number",
|
|
464
|
+
required: true
|
|
465
|
+
},
|
|
466
|
+
endTime: {
|
|
467
|
+
type: "number",
|
|
468
|
+
required: true
|
|
469
|
+
}
|
|
470
|
+
},
|
|
471
|
+
indexes: {
|
|
472
|
+
primary: {
|
|
473
|
+
pk: { field: "pk", composite: ["entity", "id"] },
|
|
474
|
+
sk: { field: "sk", composite: [] }
|
|
475
|
+
},
|
|
476
|
+
byName: {
|
|
477
|
+
index: "gsi1",
|
|
478
|
+
pk: { field: "gsi1pk", composite: ["entity", "name"] },
|
|
479
|
+
sk: { field: "gsi1sk", composite: ["startTime"] }
|
|
480
|
+
},
|
|
481
|
+
byScope: {
|
|
482
|
+
index: "gsi2",
|
|
483
|
+
pk: { field: "gsi2pk", composite: ["entity", "scope"] },
|
|
484
|
+
sk: { field: "gsi2sk", composite: ["startTime"] }
|
|
485
|
+
}
|
|
486
|
+
}
|
|
487
|
+
});
|
|
488
|
+
var workflowSnapshotEntity = new electrodb.Entity({
|
|
489
|
+
model: {
|
|
490
|
+
entity: "workflow_snapshot",
|
|
491
|
+
version: "1",
|
|
492
|
+
service: "mastra"
|
|
493
|
+
},
|
|
494
|
+
attributes: {
|
|
495
|
+
entity: {
|
|
496
|
+
type: "string",
|
|
497
|
+
required: true
|
|
498
|
+
},
|
|
499
|
+
...baseAttributes,
|
|
500
|
+
workflow_name: {
|
|
501
|
+
type: "string",
|
|
502
|
+
required: true
|
|
503
|
+
},
|
|
504
|
+
run_id: {
|
|
505
|
+
type: "string",
|
|
506
|
+
required: true
|
|
507
|
+
},
|
|
508
|
+
snapshot: {
|
|
509
|
+
type: "string",
|
|
510
|
+
// JSON stringified
|
|
511
|
+
required: true,
|
|
512
|
+
// Stringify snapshot object on set
|
|
513
|
+
set: (value) => {
|
|
514
|
+
if (value && typeof value !== "string") {
|
|
515
|
+
return JSON.stringify(value);
|
|
516
|
+
}
|
|
517
|
+
return value;
|
|
518
|
+
},
|
|
519
|
+
// Parse JSON string to object on get
|
|
520
|
+
get: (value) => {
|
|
521
|
+
return value ? JSON.parse(value) : value;
|
|
522
|
+
}
|
|
523
|
+
},
|
|
524
|
+
resourceId: {
|
|
525
|
+
type: "string",
|
|
526
|
+
required: false
|
|
527
|
+
}
|
|
528
|
+
},
|
|
529
|
+
indexes: {
|
|
530
|
+
primary: {
|
|
531
|
+
pk: { field: "pk", composite: ["entity", "workflow_name"] },
|
|
532
|
+
sk: { field: "sk", composite: ["run_id"] }
|
|
533
|
+
},
|
|
534
|
+
// GSI to allow querying by run_id efficiently without knowing the workflow_name
|
|
535
|
+
gsi2: {
|
|
536
|
+
index: "gsi2",
|
|
537
|
+
pk: { field: "gsi2pk", composite: ["entity", "run_id"] },
|
|
538
|
+
sk: { field: "gsi2sk", composite: ["workflow_name"] }
|
|
539
|
+
}
|
|
540
|
+
}
|
|
541
|
+
});
|
|
542
|
+
|
|
543
|
+
// src/entities/index.ts
|
|
544
|
+
function getElectroDbService(client, tableName) {
|
|
545
|
+
return new electrodb.Service(
|
|
546
|
+
{
|
|
547
|
+
thread: threadEntity,
|
|
548
|
+
message: messageEntity,
|
|
549
|
+
eval: evalEntity,
|
|
550
|
+
trace: traceEntity,
|
|
551
|
+
workflowSnapshot: workflowSnapshotEntity
|
|
552
|
+
},
|
|
553
|
+
{
|
|
554
|
+
client,
|
|
555
|
+
table: tableName
|
|
556
|
+
}
|
|
557
|
+
);
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
// src/storage/index.ts
|
|
561
|
+
var DynamoDBStore = class extends storage.MastraStorage {
|
|
562
|
+
constructor({ name, config }) {
|
|
563
|
+
super({ name });
|
|
564
|
+
this.hasInitialized = null;
|
|
565
|
+
if (!config.tableName || typeof config.tableName !== "string" || config.tableName.trim() === "") {
|
|
566
|
+
throw new Error("DynamoDBStore: config.tableName must be provided and cannot be empty.");
|
|
567
|
+
}
|
|
568
|
+
if (!/^[a-zA-Z0-9_.-]{3,255}$/.test(config.tableName)) {
|
|
569
|
+
throw new Error(
|
|
570
|
+
`DynamoDBStore: config.tableName "${config.tableName}" contains invalid characters or is not between 3 and 255 characters long.`
|
|
571
|
+
);
|
|
572
|
+
}
|
|
573
|
+
const dynamoClient = new clientDynamodb.DynamoDBClient({
|
|
574
|
+
region: config.region || "us-east-1",
|
|
575
|
+
endpoint: config.endpoint,
|
|
576
|
+
credentials: config.credentials
|
|
577
|
+
});
|
|
578
|
+
this.tableName = config.tableName;
|
|
579
|
+
this.client = libDynamodb.DynamoDBDocumentClient.from(dynamoClient);
|
|
580
|
+
this.service = getElectroDbService(this.client, this.tableName);
|
|
581
|
+
}
|
|
582
|
+
/**
|
|
583
|
+
* This method is modified for DynamoDB with ElectroDB single-table design.
|
|
584
|
+
* It assumes the table is created and managed externally via CDK/CloudFormation.
|
|
585
|
+
*
|
|
586
|
+
* This implementation only validates that the required table exists and is accessible.
|
|
587
|
+
* No table creation is attempted - we simply check if we can access the table.
|
|
588
|
+
*/
|
|
589
|
+
async createTable({ tableName }) {
|
|
590
|
+
this.logger.debug("Validating access to externally managed table", { tableName, physicalTable: this.tableName });
|
|
591
|
+
try {
|
|
592
|
+
const tableExists = await this.validateTableExists();
|
|
593
|
+
if (!tableExists) {
|
|
594
|
+
this.logger.error(
|
|
595
|
+
`Table ${this.tableName} does not exist or is not accessible. It should be created via CDK/CloudFormation.`
|
|
596
|
+
);
|
|
597
|
+
throw new Error(
|
|
598
|
+
`Table ${this.tableName} does not exist or is not accessible. Ensure it's created via CDK/CloudFormation before using this store.`
|
|
599
|
+
);
|
|
600
|
+
}
|
|
601
|
+
this.logger.debug(`Table ${this.tableName} exists and is accessible`);
|
|
602
|
+
} catch (error) {
|
|
603
|
+
this.logger.error("Error validating table access", { tableName: this.tableName, error });
|
|
604
|
+
throw error;
|
|
605
|
+
}
|
|
606
|
+
}
|
|
607
|
+
/**
|
|
608
|
+
* Validates that the required DynamoDB table exists and is accessible.
|
|
609
|
+
* This does not check the table structure - it assumes the table
|
|
610
|
+
* was created with the correct structure via CDK/CloudFormation.
|
|
611
|
+
*/
|
|
612
|
+
async validateTableExists() {
|
|
613
|
+
try {
|
|
614
|
+
const command = new clientDynamodb.DescribeTableCommand({
|
|
615
|
+
TableName: this.tableName
|
|
616
|
+
});
|
|
617
|
+
await this.client.send(command);
|
|
618
|
+
return true;
|
|
619
|
+
} catch (error) {
|
|
620
|
+
if (error.name === "ResourceNotFoundException") {
|
|
621
|
+
return false;
|
|
622
|
+
}
|
|
623
|
+
throw error;
|
|
624
|
+
}
|
|
625
|
+
}
|
|
626
|
+
/**
|
|
627
|
+
* Initialize storage, validating the externally managed table is accessible.
|
|
628
|
+
* For the single-table design, we only validate once that we can access
|
|
629
|
+
* the table that was created via CDK/CloudFormation.
|
|
630
|
+
*/
|
|
631
|
+
async init() {
|
|
632
|
+
if (this.hasInitialized === null) {
|
|
633
|
+
this.hasInitialized = this._performInitializationAndStore();
|
|
634
|
+
}
|
|
635
|
+
try {
|
|
636
|
+
await this.hasInitialized;
|
|
637
|
+
} catch (error) {
|
|
638
|
+
throw error;
|
|
639
|
+
}
|
|
640
|
+
}
|
|
641
|
+
/**
|
|
642
|
+
* Performs the actual table validation and stores the promise.
|
|
643
|
+
* Handles resetting the stored promise on failure to allow retries.
|
|
644
|
+
*/
|
|
645
|
+
_performInitializationAndStore() {
|
|
646
|
+
return this.validateTableExists().then((exists) => {
|
|
647
|
+
if (!exists) {
|
|
648
|
+
throw new Error(
|
|
649
|
+
`Table ${this.tableName} does not exist or is not accessible. Ensure it's created via CDK/CloudFormation before using this store.`
|
|
650
|
+
);
|
|
651
|
+
}
|
|
652
|
+
return true;
|
|
653
|
+
}).catch((err) => {
|
|
654
|
+
this.hasInitialized = null;
|
|
655
|
+
throw err;
|
|
656
|
+
});
|
|
657
|
+
}
|
|
658
|
+
/**
|
|
659
|
+
* Clear all items from a logical "table" (entity type)
|
|
660
|
+
*/
|
|
661
|
+
async clearTable({ tableName }) {
|
|
662
|
+
this.logger.debug("DynamoDB clearTable called", { tableName });
|
|
663
|
+
const entityName = this.getEntityNameForTable(tableName);
|
|
664
|
+
if (!entityName || !this.service.entities[entityName]) {
|
|
665
|
+
throw new Error(`No entity defined for ${tableName}`);
|
|
666
|
+
}
|
|
667
|
+
try {
|
|
668
|
+
const result = await this.service.entities[entityName].scan.go({ pages: "all" });
|
|
669
|
+
if (!result.data.length) {
|
|
670
|
+
this.logger.debug(`No records found to clear for ${tableName}`);
|
|
671
|
+
return;
|
|
672
|
+
}
|
|
673
|
+
this.logger.debug(`Found ${result.data.length} records to delete for ${tableName}`);
|
|
674
|
+
const keysToDelete = result.data.map((item) => {
|
|
675
|
+
const key = { entity: entityName };
|
|
676
|
+
switch (entityName) {
|
|
677
|
+
case "thread":
|
|
678
|
+
if (!item.id) throw new Error(`Missing required key 'id' for entity 'thread'`);
|
|
679
|
+
key.id = item.id;
|
|
680
|
+
break;
|
|
681
|
+
case "message":
|
|
682
|
+
if (!item.id) throw new Error(`Missing required key 'id' for entity 'message'`);
|
|
683
|
+
key.id = item.id;
|
|
684
|
+
break;
|
|
685
|
+
case "workflowSnapshot":
|
|
686
|
+
if (!item.workflow_name)
|
|
687
|
+
throw new Error(`Missing required key 'workflow_name' for entity 'workflowSnapshot'`);
|
|
688
|
+
if (!item.run_id) throw new Error(`Missing required key 'run_id' for entity 'workflowSnapshot'`);
|
|
689
|
+
key.workflow_name = item.workflow_name;
|
|
690
|
+
key.run_id = item.run_id;
|
|
691
|
+
break;
|
|
692
|
+
case "eval":
|
|
693
|
+
if (!item.run_id) throw new Error(`Missing required key 'run_id' for entity 'eval'`);
|
|
694
|
+
key.run_id = item.run_id;
|
|
695
|
+
break;
|
|
696
|
+
case "trace":
|
|
697
|
+
if (!item.id) throw new Error(`Missing required key 'id' for entity 'trace'`);
|
|
698
|
+
key.id = item.id;
|
|
699
|
+
break;
|
|
700
|
+
default:
|
|
701
|
+
this.logger.warn(`Unknown entity type encountered during clearTable: ${entityName}`);
|
|
702
|
+
throw new Error(`Cannot construct delete key for unknown entity type: ${entityName}`);
|
|
703
|
+
}
|
|
704
|
+
return key;
|
|
705
|
+
});
|
|
706
|
+
const batchSize = 25;
|
|
707
|
+
for (let i = 0; i < keysToDelete.length; i += batchSize) {
|
|
708
|
+
const batchKeys = keysToDelete.slice(i, i + batchSize);
|
|
709
|
+
await this.service.entities[entityName].delete(batchKeys).go();
|
|
710
|
+
}
|
|
711
|
+
this.logger.debug(`Successfully cleared all records for ${tableName}`);
|
|
712
|
+
} catch (error) {
|
|
713
|
+
this.logger.error("Failed to clear table", { tableName, error });
|
|
714
|
+
throw error;
|
|
715
|
+
}
|
|
716
|
+
}
|
|
717
|
+
/**
|
|
718
|
+
* Insert a record into the specified "table" (entity)
|
|
719
|
+
*/
|
|
720
|
+
async insert({ tableName, record }) {
|
|
721
|
+
this.logger.debug("DynamoDB insert called", { tableName });
|
|
722
|
+
const entityName = this.getEntityNameForTable(tableName);
|
|
723
|
+
if (!entityName || !this.service.entities[entityName]) {
|
|
724
|
+
throw new Error(`No entity defined for ${tableName}`);
|
|
725
|
+
}
|
|
726
|
+
try {
|
|
727
|
+
const dataToSave = { entity: entityName, ...record };
|
|
728
|
+
await this.service.entities[entityName].create(dataToSave).go();
|
|
729
|
+
} catch (error) {
|
|
730
|
+
this.logger.error("Failed to insert record", { tableName, error });
|
|
731
|
+
throw error;
|
|
732
|
+
}
|
|
733
|
+
}
|
|
734
|
+
/**
|
|
735
|
+
* Insert multiple records as a batch
|
|
736
|
+
*/
|
|
737
|
+
async batchInsert({ tableName, records }) {
|
|
738
|
+
this.logger.debug("DynamoDB batchInsert called", { tableName, count: records.length });
|
|
739
|
+
const entityName = this.getEntityNameForTable(tableName);
|
|
740
|
+
if (!entityName || !this.service.entities[entityName]) {
|
|
741
|
+
throw new Error(`No entity defined for ${tableName}`);
|
|
742
|
+
}
|
|
743
|
+
const recordsToSave = records.map((rec) => ({ entity: entityName, ...rec }));
|
|
744
|
+
const batchSize = 25;
|
|
745
|
+
const batches = [];
|
|
746
|
+
for (let i = 0; i < recordsToSave.length; i += batchSize) {
|
|
747
|
+
const batch = recordsToSave.slice(i, i + batchSize);
|
|
748
|
+
batches.push(batch);
|
|
749
|
+
}
|
|
750
|
+
try {
|
|
751
|
+
for (const batch of batches) {
|
|
752
|
+
for (const recordData of batch) {
|
|
753
|
+
if (!recordData.entity) {
|
|
754
|
+
this.logger.error("Missing entity property in record data for batchInsert", { recordData, tableName });
|
|
755
|
+
throw new Error(`Internal error: Missing entity property during batchInsert for ${tableName}`);
|
|
756
|
+
}
|
|
757
|
+
this.logger.debug("Attempting to create record in batchInsert:", { entityName, recordData });
|
|
758
|
+
await this.service.entities[entityName].create(recordData).go();
|
|
759
|
+
}
|
|
760
|
+
}
|
|
761
|
+
} catch (error) {
|
|
762
|
+
this.logger.error("Failed to batch insert records", { tableName, error });
|
|
763
|
+
throw error;
|
|
764
|
+
}
|
|
765
|
+
}
|
|
766
|
+
/**
|
|
767
|
+
* Load a record by its keys
|
|
768
|
+
*/
|
|
769
|
+
async load({ tableName, keys }) {
|
|
770
|
+
this.logger.debug("DynamoDB load called", { tableName, keys });
|
|
771
|
+
const entityName = this.getEntityNameForTable(tableName);
|
|
772
|
+
if (!entityName || !this.service.entities[entityName]) {
|
|
773
|
+
throw new Error(`No entity defined for ${tableName}`);
|
|
774
|
+
}
|
|
775
|
+
try {
|
|
776
|
+
const keyObject = { entity: entityName, ...keys };
|
|
777
|
+
const result = await this.service.entities[entityName].get(keyObject).go();
|
|
778
|
+
if (!result.data) {
|
|
779
|
+
return null;
|
|
780
|
+
}
|
|
781
|
+
let data = result.data;
|
|
782
|
+
return data;
|
|
783
|
+
} catch (error) {
|
|
784
|
+
this.logger.error("Failed to load record", { tableName, keys, error });
|
|
785
|
+
throw error;
|
|
786
|
+
}
|
|
787
|
+
}
|
|
788
|
+
// Thread operations
|
|
789
|
+
async getThreadById({ threadId }) {
|
|
790
|
+
this.logger.debug("Getting thread by ID", { threadId });
|
|
791
|
+
try {
|
|
792
|
+
const result = await this.service.entities.thread.get({ entity: "thread", id: threadId }).go();
|
|
793
|
+
if (!result.data) {
|
|
794
|
+
return null;
|
|
795
|
+
}
|
|
796
|
+
const data = result.data;
|
|
797
|
+
return {
|
|
798
|
+
...data
|
|
799
|
+
// metadata: data.metadata ? JSON.parse(data.metadata) : undefined, // REMOVED by AI
|
|
800
|
+
// metadata is already transformed by the entity's getter
|
|
801
|
+
};
|
|
802
|
+
} catch (error) {
|
|
803
|
+
this.logger.error("Failed to get thread by ID", { threadId, error });
|
|
804
|
+
throw error;
|
|
805
|
+
}
|
|
806
|
+
}
|
|
807
|
+
async getThreadsByResourceId({ resourceId }) {
|
|
808
|
+
this.logger.debug("Getting threads by resource ID", { resourceId });
|
|
809
|
+
try {
|
|
810
|
+
const result = await this.service.entities.thread.query.byResource({ entity: "thread", resourceId }).go();
|
|
811
|
+
if (!result.data.length) {
|
|
812
|
+
return [];
|
|
813
|
+
}
|
|
814
|
+
return result.data.map((data) => ({
|
|
815
|
+
...data
|
|
816
|
+
// metadata: data.metadata ? JSON.parse(data.metadata) : undefined, // REMOVED by AI
|
|
817
|
+
// metadata is already transformed by the entity's getter
|
|
818
|
+
}));
|
|
819
|
+
} catch (error) {
|
|
820
|
+
this.logger.error("Failed to get threads by resource ID", { resourceId, error });
|
|
821
|
+
throw error;
|
|
822
|
+
}
|
|
823
|
+
}
|
|
824
|
+
async saveThread({ thread }) {
|
|
825
|
+
this.logger.debug("Saving thread", { threadId: thread.id });
|
|
826
|
+
const now = /* @__PURE__ */ new Date();
|
|
827
|
+
const threadData = {
|
|
828
|
+
entity: "thread",
|
|
829
|
+
id: thread.id,
|
|
830
|
+
resourceId: thread.resourceId,
|
|
831
|
+
title: thread.title || `Thread ${thread.id}`,
|
|
832
|
+
createdAt: thread.createdAt?.toISOString() || now.toISOString(),
|
|
833
|
+
updatedAt: now.toISOString(),
|
|
834
|
+
metadata: thread.metadata ? JSON.stringify(thread.metadata) : void 0
|
|
835
|
+
};
|
|
836
|
+
try {
|
|
837
|
+
await this.service.entities.thread.create(threadData).go();
|
|
838
|
+
return {
|
|
839
|
+
id: thread.id,
|
|
840
|
+
resourceId: thread.resourceId,
|
|
841
|
+
title: threadData.title,
|
|
842
|
+
createdAt: thread.createdAt || now,
|
|
843
|
+
updatedAt: now,
|
|
844
|
+
metadata: thread.metadata
|
|
845
|
+
};
|
|
846
|
+
} catch (error) {
|
|
847
|
+
this.logger.error("Failed to save thread", { threadId: thread.id, error });
|
|
848
|
+
throw error;
|
|
849
|
+
}
|
|
850
|
+
}
|
|
851
|
+
async updateThread({
|
|
852
|
+
id,
|
|
853
|
+
title,
|
|
854
|
+
metadata
|
|
855
|
+
}) {
|
|
856
|
+
this.logger.debug("Updating thread", { threadId: id });
|
|
857
|
+
try {
|
|
858
|
+
const existingThread = await this.getThreadById({ threadId: id });
|
|
859
|
+
if (!existingThread) {
|
|
860
|
+
throw new Error(`Thread not found: ${id}`);
|
|
861
|
+
}
|
|
862
|
+
const now = /* @__PURE__ */ new Date();
|
|
863
|
+
const updateData = {
|
|
864
|
+
updatedAt: now.toISOString()
|
|
865
|
+
};
|
|
866
|
+
if (title) {
|
|
867
|
+
updateData.title = title;
|
|
868
|
+
}
|
|
869
|
+
if (metadata) {
|
|
870
|
+
updateData.metadata = JSON.stringify(metadata);
|
|
871
|
+
}
|
|
872
|
+
await this.service.entities.thread.update({ entity: "thread", id }).set(updateData).go();
|
|
873
|
+
return {
|
|
874
|
+
...existingThread,
|
|
875
|
+
title: title || existingThread.title,
|
|
876
|
+
metadata: metadata || existingThread.metadata,
|
|
877
|
+
updatedAt: now
|
|
878
|
+
};
|
|
879
|
+
} catch (error) {
|
|
880
|
+
this.logger.error("Failed to update thread", { threadId: id, error });
|
|
881
|
+
throw error;
|
|
882
|
+
}
|
|
883
|
+
}
|
|
884
|
+
async deleteThread({ threadId }) {
|
|
885
|
+
this.logger.debug("Deleting thread", { threadId });
|
|
886
|
+
try {
|
|
887
|
+
await this.service.entities.thread.delete({ entity: "thread", id: threadId }).go();
|
|
888
|
+
} catch (error) {
|
|
889
|
+
this.logger.error("Failed to delete thread", { threadId, error });
|
|
890
|
+
throw error;
|
|
891
|
+
}
|
|
892
|
+
}
|
|
893
|
+
// Message operations
|
|
894
|
+
async getMessages(args) {
|
|
895
|
+
const { threadId, selectBy } = args;
|
|
896
|
+
this.logger.debug("Getting messages", { threadId, selectBy });
|
|
897
|
+
try {
|
|
898
|
+
const query = this.service.entities.message.query.byThread({ entity: "message", threadId });
|
|
899
|
+
if (selectBy?.last && typeof selectBy.last === "number") {
|
|
900
|
+
const results2 = await query.go({ limit: selectBy.last, reverse: true });
|
|
901
|
+
return results2.data.map((data) => this.parseMessageData(data));
|
|
902
|
+
}
|
|
903
|
+
const results = await query.go();
|
|
904
|
+
return results.data.map((data) => this.parseMessageData(data));
|
|
905
|
+
} catch (error) {
|
|
906
|
+
this.logger.error("Failed to get messages", { threadId, error });
|
|
907
|
+
throw error;
|
|
908
|
+
}
|
|
909
|
+
}
|
|
910
|
+
async saveMessages({ messages }) {
|
|
911
|
+
this.logger.debug("Saving messages", { count: messages.length });
|
|
912
|
+
if (!messages.length) {
|
|
913
|
+
return [];
|
|
914
|
+
}
|
|
915
|
+
const messagesToSave = messages.map((msg) => {
|
|
916
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
917
|
+
return {
|
|
918
|
+
entity: "message",
|
|
919
|
+
// Add entity type
|
|
920
|
+
id: msg.id,
|
|
921
|
+
threadId: msg.threadId,
|
|
922
|
+
role: msg.role,
|
|
923
|
+
type: msg.type,
|
|
924
|
+
resourceId: msg.resourceId,
|
|
925
|
+
// Ensure complex fields are stringified if not handled by attribute setters
|
|
926
|
+
content: typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content),
|
|
927
|
+
toolCallArgs: msg.toolCallArgs ? JSON.stringify(msg.toolCallArgs) : void 0,
|
|
928
|
+
toolCallIds: msg.toolCallIds ? JSON.stringify(msg.toolCallIds) : void 0,
|
|
929
|
+
toolNames: msg.toolNames ? JSON.stringify(msg.toolNames) : void 0,
|
|
930
|
+
createdAt: msg.createdAt?.toISOString() || now,
|
|
931
|
+
updatedAt: now
|
|
932
|
+
// Add updatedAt
|
|
933
|
+
};
|
|
934
|
+
});
|
|
935
|
+
try {
|
|
936
|
+
const batchSize = 25;
|
|
937
|
+
const batches = [];
|
|
938
|
+
for (let i = 0; i < messagesToSave.length; i += batchSize) {
|
|
939
|
+
const batch = messagesToSave.slice(i, i + batchSize);
|
|
940
|
+
batches.push(batch);
|
|
941
|
+
}
|
|
942
|
+
for (const batch of batches) {
|
|
943
|
+
for (const messageData of batch) {
|
|
944
|
+
if (!messageData.entity) {
|
|
945
|
+
this.logger.error("Missing entity property in message data for create", { messageData });
|
|
946
|
+
throw new Error("Internal error: Missing entity property during saveMessages");
|
|
947
|
+
}
|
|
948
|
+
await this.service.entities.message.create(messageData).go();
|
|
949
|
+
}
|
|
950
|
+
}
|
|
951
|
+
return messages;
|
|
952
|
+
} catch (error) {
|
|
953
|
+
this.logger.error("Failed to save messages", { error });
|
|
954
|
+
throw error;
|
|
955
|
+
}
|
|
956
|
+
}
|
|
957
|
+
// Helper function to parse message data (handle JSON fields)
|
|
958
|
+
parseMessageData(data) {
|
|
959
|
+
return {
|
|
960
|
+
...data,
|
|
961
|
+
// Ensure dates are Date objects if needed (ElectroDB might return strings)
|
|
962
|
+
createdAt: data.createdAt ? new Date(data.createdAt) : void 0,
|
|
963
|
+
updatedAt: data.updatedAt ? new Date(data.updatedAt) : void 0
|
|
964
|
+
// Other fields like content, toolCallArgs etc. are assumed to be correctly
|
|
965
|
+
// transformed by the ElectroDB entity getters.
|
|
966
|
+
};
|
|
967
|
+
}
|
|
968
|
+
// Trace operations
|
|
969
|
+
async getTraces(args) {
|
|
970
|
+
const { name, scope, page, perPage } = args;
|
|
971
|
+
this.logger.debug("Getting traces", { name, scope, page, perPage });
|
|
972
|
+
try {
|
|
973
|
+
let query;
|
|
974
|
+
if (name) {
|
|
975
|
+
query = this.service.entities.trace.query.byName({ entity: "trace", name });
|
|
976
|
+
} else if (scope) {
|
|
977
|
+
query = this.service.entities.trace.query.byScope({ entity: "trace", scope });
|
|
978
|
+
} else {
|
|
979
|
+
this.logger.warn("Performing a scan operation on traces - consider using a more specific query");
|
|
980
|
+
query = this.service.entities.trace.scan;
|
|
981
|
+
}
|
|
982
|
+
let items = [];
|
|
983
|
+
let cursor = null;
|
|
984
|
+
let pagesFetched = 0;
|
|
985
|
+
const startPage = page > 0 ? page : 1;
|
|
986
|
+
do {
|
|
987
|
+
const results = await query.go({ cursor, limit: perPage });
|
|
988
|
+
pagesFetched++;
|
|
989
|
+
if (pagesFetched === startPage) {
|
|
990
|
+
items = results.data;
|
|
991
|
+
break;
|
|
992
|
+
}
|
|
993
|
+
cursor = results.cursor;
|
|
994
|
+
if (!cursor && results.data.length > 0 && pagesFetched < startPage) {
|
|
995
|
+
break;
|
|
996
|
+
}
|
|
997
|
+
} while (cursor && pagesFetched < startPage);
|
|
998
|
+
return items;
|
|
999
|
+
} catch (error) {
|
|
1000
|
+
this.logger.error("Failed to get traces", { error });
|
|
1001
|
+
throw error;
|
|
1002
|
+
}
|
|
1003
|
+
}
|
|
1004
|
+
async batchTraceInsert({ records }) {
|
|
1005
|
+
this.logger.debug("Batch inserting traces", { count: records.length });
|
|
1006
|
+
if (!records.length) {
|
|
1007
|
+
return;
|
|
1008
|
+
}
|
|
1009
|
+
try {
|
|
1010
|
+
const recordsToSave = records.map((rec) => ({ entity: "trace", ...rec }));
|
|
1011
|
+
await this.batchInsert({
|
|
1012
|
+
tableName: storage.TABLE_TRACES,
|
|
1013
|
+
records: recordsToSave
|
|
1014
|
+
// Pass records with 'entity' included
|
|
1015
|
+
});
|
|
1016
|
+
} catch (error) {
|
|
1017
|
+
this.logger.error("Failed to batch insert traces", { error });
|
|
1018
|
+
throw error;
|
|
1019
|
+
}
|
|
1020
|
+
}
|
|
1021
|
+
// Workflow operations
|
|
1022
|
+
async persistWorkflowSnapshot({
|
|
1023
|
+
workflowName,
|
|
1024
|
+
runId,
|
|
1025
|
+
snapshot
|
|
1026
|
+
}) {
|
|
1027
|
+
this.logger.debug("Persisting workflow snapshot", { workflowName, runId });
|
|
1028
|
+
try {
|
|
1029
|
+
const resourceId = "resourceId" in snapshot ? snapshot.resourceId : void 0;
|
|
1030
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
1031
|
+
const data = {
|
|
1032
|
+
entity: "workflow_snapshot",
|
|
1033
|
+
// Add entity type
|
|
1034
|
+
workflow_name: workflowName,
|
|
1035
|
+
run_id: runId,
|
|
1036
|
+
snapshot: JSON.stringify(snapshot),
|
|
1037
|
+
// Stringify the snapshot object
|
|
1038
|
+
createdAt: now,
|
|
1039
|
+
updatedAt: now,
|
|
1040
|
+
resourceId
|
|
1041
|
+
};
|
|
1042
|
+
await this.service.entities.workflowSnapshot.create(data).go();
|
|
1043
|
+
} catch (error) {
|
|
1044
|
+
this.logger.error("Failed to persist workflow snapshot", { workflowName, runId, error });
|
|
1045
|
+
throw error;
|
|
1046
|
+
}
|
|
1047
|
+
}
|
|
1048
|
+
async loadWorkflowSnapshot({
|
|
1049
|
+
workflowName,
|
|
1050
|
+
runId
|
|
1051
|
+
}) {
|
|
1052
|
+
this.logger.debug("Loading workflow snapshot", { workflowName, runId });
|
|
1053
|
+
try {
|
|
1054
|
+
const result = await this.service.entities.workflowSnapshot.get({
|
|
1055
|
+
entity: "workflow_snapshot",
|
|
1056
|
+
// Add entity type
|
|
1057
|
+
workflow_name: workflowName,
|
|
1058
|
+
run_id: runId
|
|
1059
|
+
}).go();
|
|
1060
|
+
if (!result.data?.snapshot) {
|
|
1061
|
+
return null;
|
|
1062
|
+
}
|
|
1063
|
+
return result.data.snapshot;
|
|
1064
|
+
} catch (error) {
|
|
1065
|
+
this.logger.error("Failed to load workflow snapshot", { workflowName, runId, error });
|
|
1066
|
+
throw error;
|
|
1067
|
+
}
|
|
1068
|
+
}
|
|
1069
|
+
async getWorkflowRuns(args) {
|
|
1070
|
+
this.logger.debug("Getting workflow runs", { args });
|
|
1071
|
+
try {
|
|
1072
|
+
const limit = args?.limit || 10;
|
|
1073
|
+
const offset = args?.offset || 0;
|
|
1074
|
+
let query;
|
|
1075
|
+
if (args?.workflowName) {
|
|
1076
|
+
query = this.service.entities.workflowSnapshot.query.primary({
|
|
1077
|
+
entity: "workflow_snapshot",
|
|
1078
|
+
// Add entity type
|
|
1079
|
+
workflow_name: args.workflowName
|
|
1080
|
+
});
|
|
1081
|
+
} else {
|
|
1082
|
+
this.logger.warn("Performing a scan operation on workflow snapshots - consider using a more specific query");
|
|
1083
|
+
query = this.service.entities.workflowSnapshot.scan;
|
|
1084
|
+
}
|
|
1085
|
+
const allMatchingSnapshots = [];
|
|
1086
|
+
let cursor = null;
|
|
1087
|
+
const DYNAMODB_PAGE_SIZE = 100;
|
|
1088
|
+
do {
|
|
1089
|
+
const pageResults = await query.go({
|
|
1090
|
+
limit: DYNAMODB_PAGE_SIZE,
|
|
1091
|
+
cursor
|
|
1092
|
+
});
|
|
1093
|
+
if (pageResults.data && pageResults.data.length > 0) {
|
|
1094
|
+
let pageFilteredData = pageResults.data;
|
|
1095
|
+
if (args?.fromDate || args?.toDate) {
|
|
1096
|
+
pageFilteredData = pageFilteredData.filter((snapshot) => {
|
|
1097
|
+
const createdAt = new Date(snapshot.createdAt);
|
|
1098
|
+
if (args.fromDate && createdAt < args.fromDate) {
|
|
1099
|
+
return false;
|
|
1100
|
+
}
|
|
1101
|
+
if (args.toDate && createdAt > args.toDate) {
|
|
1102
|
+
return false;
|
|
1103
|
+
}
|
|
1104
|
+
return true;
|
|
1105
|
+
});
|
|
1106
|
+
}
|
|
1107
|
+
if (args?.resourceId) {
|
|
1108
|
+
pageFilteredData = pageFilteredData.filter((snapshot) => {
|
|
1109
|
+
return snapshot.resourceId === args.resourceId;
|
|
1110
|
+
});
|
|
1111
|
+
}
|
|
1112
|
+
allMatchingSnapshots.push(...pageFilteredData);
|
|
1113
|
+
}
|
|
1114
|
+
cursor = pageResults.cursor;
|
|
1115
|
+
} while (cursor);
|
|
1116
|
+
if (!allMatchingSnapshots.length) {
|
|
1117
|
+
return { runs: [], total: 0 };
|
|
1118
|
+
}
|
|
1119
|
+
const total = allMatchingSnapshots.length;
|
|
1120
|
+
const paginatedData = allMatchingSnapshots.slice(offset, offset + limit);
|
|
1121
|
+
const runs = paginatedData.map((snapshot) => this.formatWorkflowRun(snapshot));
|
|
1122
|
+
return {
|
|
1123
|
+
runs,
|
|
1124
|
+
total
|
|
1125
|
+
};
|
|
1126
|
+
} catch (error) {
|
|
1127
|
+
this.logger.error("Failed to get workflow runs", { error });
|
|
1128
|
+
throw error;
|
|
1129
|
+
}
|
|
1130
|
+
}
|
|
1131
|
+
async getWorkflowRunById(args) {
|
|
1132
|
+
const { runId, workflowName } = args;
|
|
1133
|
+
this.logger.debug("Getting workflow run by ID", { runId, workflowName });
|
|
1134
|
+
try {
|
|
1135
|
+
if (workflowName) {
|
|
1136
|
+
this.logger.debug("WorkflowName provided, using direct GET operation.");
|
|
1137
|
+
const result2 = await this.service.entities.workflowSnapshot.get({
|
|
1138
|
+
entity: "workflow_snapshot",
|
|
1139
|
+
// Entity type for PK
|
|
1140
|
+
workflow_name: workflowName,
|
|
1141
|
+
run_id: runId
|
|
1142
|
+
}).go();
|
|
1143
|
+
if (!result2.data) {
|
|
1144
|
+
return null;
|
|
1145
|
+
}
|
|
1146
|
+
const snapshot2 = result2.data.snapshot;
|
|
1147
|
+
return {
|
|
1148
|
+
workflowName: result2.data.workflow_name,
|
|
1149
|
+
runId: result2.data.run_id,
|
|
1150
|
+
snapshot: snapshot2,
|
|
1151
|
+
createdAt: new Date(result2.data.createdAt),
|
|
1152
|
+
updatedAt: new Date(result2.data.updatedAt),
|
|
1153
|
+
resourceId: result2.data.resourceId
|
|
1154
|
+
};
|
|
1155
|
+
}
|
|
1156
|
+
this.logger.debug(
|
|
1157
|
+
'WorkflowName not provided. Attempting to find workflow run by runId using GSI. Ensure GSI (e.g., "byRunId") is defined on the workflowSnapshot entity with run_id as its key and provisioned in DynamoDB.'
|
|
1158
|
+
);
|
|
1159
|
+
const result = await this.service.entities.workflowSnapshot.query.gsi2({ entity: "workflow_snapshot", run_id: runId }).go();
|
|
1160
|
+
const matchingRunDbItem = result.data && result.data.length > 0 ? result.data[0] : null;
|
|
1161
|
+
if (!matchingRunDbItem) {
|
|
1162
|
+
return null;
|
|
1163
|
+
}
|
|
1164
|
+
const snapshot = matchingRunDbItem.snapshot;
|
|
1165
|
+
return {
|
|
1166
|
+
workflowName: matchingRunDbItem.workflow_name,
|
|
1167
|
+
runId: matchingRunDbItem.run_id,
|
|
1168
|
+
snapshot,
|
|
1169
|
+
createdAt: new Date(matchingRunDbItem.createdAt),
|
|
1170
|
+
updatedAt: new Date(matchingRunDbItem.updatedAt),
|
|
1171
|
+
resourceId: matchingRunDbItem.resourceId
|
|
1172
|
+
};
|
|
1173
|
+
} catch (error) {
|
|
1174
|
+
this.logger.error("Failed to get workflow run by ID", { runId, workflowName, error });
|
|
1175
|
+
throw error;
|
|
1176
|
+
}
|
|
1177
|
+
}
|
|
1178
|
+
// Helper function to format workflow run
|
|
1179
|
+
formatWorkflowRun(snapshotData) {
|
|
1180
|
+
return {
|
|
1181
|
+
workflowName: snapshotData.workflow_name,
|
|
1182
|
+
runId: snapshotData.run_id,
|
|
1183
|
+
snapshot: snapshotData.snapshot,
|
|
1184
|
+
createdAt: new Date(snapshotData.createdAt),
|
|
1185
|
+
updatedAt: new Date(snapshotData.updatedAt),
|
|
1186
|
+
resourceId: snapshotData.resourceId
|
|
1187
|
+
};
|
|
1188
|
+
}
|
|
1189
|
+
// Helper methods for entity/table mapping
|
|
1190
|
+
getEntityNameForTable(tableName) {
|
|
1191
|
+
const mapping = {
|
|
1192
|
+
[storage.TABLE_THREADS]: "thread",
|
|
1193
|
+
[storage.TABLE_MESSAGES]: "message",
|
|
1194
|
+
[storage.TABLE_WORKFLOW_SNAPSHOT]: "workflowSnapshot",
|
|
1195
|
+
[storage.TABLE_EVALS]: "eval",
|
|
1196
|
+
[storage.TABLE_TRACES]: "trace"
|
|
1197
|
+
};
|
|
1198
|
+
return mapping[tableName] || null;
|
|
1199
|
+
}
|
|
1200
|
+
// Eval operations
|
|
1201
|
+
async getEvalsByAgentName(agentName, type) {
|
|
1202
|
+
this.logger.debug("Getting evals for agent", { agentName, type });
|
|
1203
|
+
try {
|
|
1204
|
+
const query = this.service.entities.eval.query.byAgent({ entity: "eval", agent_name: agentName });
|
|
1205
|
+
const results = await query.go({ order: "desc", limit: 100 });
|
|
1206
|
+
if (!results.data.length) {
|
|
1207
|
+
return [];
|
|
1208
|
+
}
|
|
1209
|
+
let filteredData = results.data;
|
|
1210
|
+
if (type) {
|
|
1211
|
+
filteredData = filteredData.filter((evalRecord) => {
|
|
1212
|
+
try {
|
|
1213
|
+
const testInfo = evalRecord.test_info && typeof evalRecord.test_info === "string" ? JSON.parse(evalRecord.test_info) : void 0;
|
|
1214
|
+
if (type === "test" && !testInfo) {
|
|
1215
|
+
return false;
|
|
1216
|
+
}
|
|
1217
|
+
if (type === "live" && testInfo) {
|
|
1218
|
+
return false;
|
|
1219
|
+
}
|
|
1220
|
+
} catch (e) {
|
|
1221
|
+
this.logger.warn("Failed to parse test_info during filtering", { record: evalRecord, error: e });
|
|
1222
|
+
}
|
|
1223
|
+
return true;
|
|
1224
|
+
});
|
|
1225
|
+
}
|
|
1226
|
+
return filteredData.map((evalRecord) => {
|
|
1227
|
+
try {
|
|
1228
|
+
return {
|
|
1229
|
+
input: evalRecord.input,
|
|
1230
|
+
output: evalRecord.output,
|
|
1231
|
+
// Safely parse result and test_info
|
|
1232
|
+
result: evalRecord.result && typeof evalRecord.result === "string" ? JSON.parse(evalRecord.result) : void 0,
|
|
1233
|
+
agentName: evalRecord.agent_name,
|
|
1234
|
+
createdAt: evalRecord.created_at,
|
|
1235
|
+
// Keep as string from DDB?
|
|
1236
|
+
metricName: evalRecord.metric_name,
|
|
1237
|
+
instructions: evalRecord.instructions,
|
|
1238
|
+
runId: evalRecord.run_id,
|
|
1239
|
+
globalRunId: evalRecord.global_run_id,
|
|
1240
|
+
testInfo: evalRecord.test_info && typeof evalRecord.test_info === "string" ? JSON.parse(evalRecord.test_info) : void 0
|
|
1241
|
+
};
|
|
1242
|
+
} catch (parseError) {
|
|
1243
|
+
this.logger.error("Failed to parse eval record", { record: evalRecord, error: parseError });
|
|
1244
|
+
return {
|
|
1245
|
+
agentName: evalRecord.agent_name,
|
|
1246
|
+
createdAt: evalRecord.created_at,
|
|
1247
|
+
runId: evalRecord.run_id,
|
|
1248
|
+
globalRunId: evalRecord.global_run_id
|
|
1249
|
+
};
|
|
1250
|
+
}
|
|
1251
|
+
});
|
|
1252
|
+
} catch (error) {
|
|
1253
|
+
this.logger.error("Failed to get evals by agent name", { agentName, type, error });
|
|
1254
|
+
throw error;
|
|
1255
|
+
}
|
|
1256
|
+
}
|
|
1257
|
+
/**
|
|
1258
|
+
* Closes the DynamoDB client connection and cleans up resources.
|
|
1259
|
+
* Should be called when the store is no longer needed, e.g., at the end of tests or application shutdown.
|
|
1260
|
+
*/
|
|
1261
|
+
async close() {
|
|
1262
|
+
this.logger.debug("Closing DynamoDB client for store:", { name: this.name });
|
|
1263
|
+
try {
|
|
1264
|
+
this.client.destroy();
|
|
1265
|
+
this.logger.debug("DynamoDB client closed successfully for store:", { name: this.name });
|
|
1266
|
+
} catch (error) {
|
|
1267
|
+
this.logger.error("Error closing DynamoDB client for store:", { name: this.name, error });
|
|
1268
|
+
throw error;
|
|
1269
|
+
}
|
|
1270
|
+
}
|
|
1271
|
+
};
|
|
1272
|
+
|
|
1273
|
+
exports.DynamoDBStore = DynamoDBStore;
|