@mastra/upstash 0.2.2-alpha.1 → 0.2.2-alpha.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +7 -7
- package/CHANGELOG.md +27 -0
- package/dist/_tsup-dts-rollup.d.cts +20 -1
- package/dist/_tsup-dts-rollup.d.ts +20 -1
- package/dist/index.cjs +146 -15
- package/dist/index.js +147 -16
- package/package.json +2 -2
- package/src/storage/index.ts +234 -17
- package/src/storage/upstash.test.ts +369 -137
|
@@ -1,4 +1,9 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { randomUUID } from 'crypto';
|
|
2
|
+
import type { MetricResult, TestInfo } from '@mastra/core/eval';
|
|
3
|
+
import type { MessageType } from '@mastra/core/memory';
|
|
4
|
+
import type { TABLE_NAMES } from '@mastra/core/storage';
|
|
5
|
+
import { TABLE_MESSAGES, TABLE_THREADS, TABLE_WORKFLOW_SNAPSHOT, TABLE_EVALS } from '@mastra/core/storage';
|
|
6
|
+
import type { WorkflowRunState } from '@mastra/core/workflows';
|
|
2
7
|
import { describe, it, expect, beforeAll, beforeEach, afterAll, vi } from 'vitest';
|
|
3
8
|
|
|
4
9
|
import { UpstashStore } from './index';
|
|
@@ -6,6 +11,67 @@ import { UpstashStore } from './index';
|
|
|
6
11
|
// Increase timeout for all tests in this file to 30 seconds
|
|
7
12
|
vi.setConfig({ testTimeout: 60_000, hookTimeout: 60_000 });
|
|
8
13
|
|
|
14
|
+
const createSampleThread = (date?: Date) => ({
|
|
15
|
+
id: `thread-${randomUUID()}`,
|
|
16
|
+
resourceId: `resource-${randomUUID()}`,
|
|
17
|
+
title: 'Test Thread',
|
|
18
|
+
createdAt: date || new Date(),
|
|
19
|
+
updatedAt: date || new Date(),
|
|
20
|
+
metadata: { key: 'value' },
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
const createSampleMessage = (threadId: string, content: string = 'Hello') =>
|
|
24
|
+
({
|
|
25
|
+
id: `msg-${randomUUID()}`,
|
|
26
|
+
role: 'user',
|
|
27
|
+
type: 'text',
|
|
28
|
+
threadId,
|
|
29
|
+
content: [{ type: 'text', text: content }],
|
|
30
|
+
createdAt: new Date(),
|
|
31
|
+
}) as any;
|
|
32
|
+
|
|
33
|
+
const createSampleWorkflowSnapshot = (status: string, createdAt?: Date) => {
|
|
34
|
+
const runId = `run-${randomUUID()}`;
|
|
35
|
+
const stepId = `step-${randomUUID()}`;
|
|
36
|
+
const timestamp = createdAt || new Date();
|
|
37
|
+
const snapshot = {
|
|
38
|
+
result: { success: true },
|
|
39
|
+
value: {},
|
|
40
|
+
context: {
|
|
41
|
+
steps: {
|
|
42
|
+
[stepId]: {
|
|
43
|
+
status,
|
|
44
|
+
payload: {},
|
|
45
|
+
error: undefined,
|
|
46
|
+
},
|
|
47
|
+
},
|
|
48
|
+
triggerData: {},
|
|
49
|
+
attempts: {},
|
|
50
|
+
},
|
|
51
|
+
activePaths: [],
|
|
52
|
+
runId,
|
|
53
|
+
timestamp: timestamp.getTime(),
|
|
54
|
+
} as WorkflowRunState;
|
|
55
|
+
return { snapshot, runId, stepId };
|
|
56
|
+
};
|
|
57
|
+
|
|
58
|
+
const createSampleEval = (agentName: string, isTest = false) => {
|
|
59
|
+
const testInfo = isTest ? { testPath: 'test/path.ts', testName: 'Test Name' } : undefined;
|
|
60
|
+
|
|
61
|
+
return {
|
|
62
|
+
agent_name: agentName,
|
|
63
|
+
input: 'Sample input',
|
|
64
|
+
output: 'Sample output',
|
|
65
|
+
result: JSON.stringify({ score: 0.8 }),
|
|
66
|
+
metric_name: 'sample-metric',
|
|
67
|
+
instructions: 'Sample instructions',
|
|
68
|
+
test_info: testInfo ? JSON.stringify(testInfo) : undefined,
|
|
69
|
+
global_run_id: `global-${randomUUID()}`,
|
|
70
|
+
run_id: `run-${randomUUID()}`,
|
|
71
|
+
created_at: new Date().toISOString(),
|
|
72
|
+
};
|
|
73
|
+
};
|
|
74
|
+
|
|
9
75
|
describe('UpstashStore', () => {
|
|
10
76
|
let store: UpstashStore;
|
|
11
77
|
const testTableName = 'test_table';
|
|
@@ -26,16 +92,18 @@ describe('UpstashStore', () => {
|
|
|
26
92
|
|
|
27
93
|
afterAll(async () => {
|
|
28
94
|
// Clean up test tables
|
|
29
|
-
await store.clearTable({ tableName: testTableName });
|
|
30
|
-
await store.clearTable({ tableName: testTableName2 });
|
|
31
|
-
await store.clearTable({ tableName:
|
|
32
|
-
await store.clearTable({ tableName:
|
|
95
|
+
await store.clearTable({ tableName: testTableName as TABLE_NAMES });
|
|
96
|
+
await store.clearTable({ tableName: testTableName2 as TABLE_NAMES });
|
|
97
|
+
await store.clearTable({ tableName: TABLE_THREADS });
|
|
98
|
+
await store.clearTable({ tableName: TABLE_MESSAGES });
|
|
99
|
+
await store.clearTable({ tableName: TABLE_WORKFLOW_SNAPSHOT });
|
|
100
|
+
await store.clearTable({ tableName: TABLE_EVALS });
|
|
33
101
|
});
|
|
34
102
|
|
|
35
103
|
describe('Table Operations', () => {
|
|
36
104
|
it('should create a new table with schema', async () => {
|
|
37
105
|
await store.createTable({
|
|
38
|
-
tableName: testTableName,
|
|
106
|
+
tableName: testTableName as TABLE_NAMES,
|
|
39
107
|
schema: {
|
|
40
108
|
id: { type: 'text', primaryKey: true },
|
|
41
109
|
data: { type: 'text', nullable: true },
|
|
@@ -44,17 +112,17 @@ describe('UpstashStore', () => {
|
|
|
44
112
|
|
|
45
113
|
// Verify table exists by inserting and retrieving data
|
|
46
114
|
await store.insert({
|
|
47
|
-
tableName: testTableName,
|
|
115
|
+
tableName: testTableName as TABLE_NAMES,
|
|
48
116
|
record: { id: 'test1', data: 'test-data' },
|
|
49
117
|
});
|
|
50
118
|
|
|
51
|
-
const result = await store.load({ tableName: testTableName, keys: { id: 'test1' } });
|
|
119
|
+
const result = await store.load({ tableName: testTableName as TABLE_NAMES, keys: { id: 'test1' } });
|
|
52
120
|
expect(result).toBeTruthy();
|
|
53
121
|
});
|
|
54
122
|
|
|
55
123
|
it('should handle multiple table creation', async () => {
|
|
56
124
|
await store.createTable({
|
|
57
|
-
tableName: testTableName2,
|
|
125
|
+
tableName: testTableName2 as TABLE_NAMES,
|
|
58
126
|
schema: {
|
|
59
127
|
id: { type: 'text', primaryKey: true },
|
|
60
128
|
data: { type: 'text', nullable: true },
|
|
@@ -63,35 +131,28 @@ describe('UpstashStore', () => {
|
|
|
63
131
|
|
|
64
132
|
// Verify both tables work independently
|
|
65
133
|
await store.insert({
|
|
66
|
-
tableName: testTableName2,
|
|
134
|
+
tableName: testTableName2 as TABLE_NAMES,
|
|
67
135
|
record: { id: 'test2', data: 'test-data-2' },
|
|
68
136
|
});
|
|
69
137
|
|
|
70
|
-
const result = await store.load({ tableName: testTableName2, keys: { id: 'test2' } });
|
|
138
|
+
const result = await store.load({ tableName: testTableName2 as TABLE_NAMES, keys: { id: 'test2' } });
|
|
71
139
|
expect(result).toBeTruthy();
|
|
72
140
|
});
|
|
73
141
|
});
|
|
74
142
|
|
|
75
143
|
describe('Thread Operations', () => {
|
|
76
144
|
beforeEach(async () => {
|
|
77
|
-
await store.clearTable({ tableName:
|
|
145
|
+
await store.clearTable({ tableName: TABLE_THREADS });
|
|
78
146
|
});
|
|
79
147
|
|
|
80
148
|
it('should create and retrieve a thread', async () => {
|
|
81
149
|
const now = new Date();
|
|
82
|
-
const thread =
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
title: 'Test Thread',
|
|
86
|
-
createdAt: now,
|
|
87
|
-
updatedAt: now,
|
|
88
|
-
metadata: { key: 'value' },
|
|
89
|
-
};
|
|
90
|
-
|
|
91
|
-
const savedThread = await store.saveThread({ thread });
|
|
150
|
+
const thread = createSampleThread(now);
|
|
151
|
+
|
|
152
|
+
const savedThread = await store.__saveThread({ thread });
|
|
92
153
|
expect(savedThread).toEqual(thread);
|
|
93
154
|
|
|
94
|
-
const retrievedThread = await store.
|
|
155
|
+
const retrievedThread = await store.__getThreadById({ threadId: thread.id });
|
|
95
156
|
expect(retrievedThread).toEqual({
|
|
96
157
|
...thread,
|
|
97
158
|
createdAt: new Date(now.toISOString()),
|
|
@@ -105,46 +166,26 @@ describe('UpstashStore', () => {
|
|
|
105
166
|
});
|
|
106
167
|
|
|
107
168
|
it('should get threads by resource ID', async () => {
|
|
108
|
-
const
|
|
109
|
-
const
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
createdAt: new Date(),
|
|
115
|
-
updatedAt: new Date(),
|
|
116
|
-
metadata: {},
|
|
117
|
-
},
|
|
118
|
-
{
|
|
119
|
-
id: 'thread-2',
|
|
120
|
-
resourceId,
|
|
121
|
-
title: 'Thread 2',
|
|
122
|
-
createdAt: new Date(),
|
|
123
|
-
updatedAt: new Date(),
|
|
124
|
-
metadata: {},
|
|
125
|
-
},
|
|
126
|
-
];
|
|
169
|
+
const thread1 = createSampleThread();
|
|
170
|
+
const thread2 = { ...createSampleThread(), resourceId: thread1.resourceId };
|
|
171
|
+
const threads = [thread1, thread2];
|
|
172
|
+
|
|
173
|
+
const resourceId = threads[0].resourceId;
|
|
174
|
+
const threadIds = threads.map(t => t.id);
|
|
127
175
|
|
|
128
176
|
await Promise.all(threads.map(thread => store.saveThread({ thread })));
|
|
129
177
|
|
|
130
178
|
const retrievedThreads = await store.getThreadsByResourceId({ resourceId });
|
|
131
179
|
expect(retrievedThreads).toHaveLength(2);
|
|
132
|
-
expect(retrievedThreads.map(t => t.id)).toEqual(expect.arrayContaining(
|
|
180
|
+
expect(retrievedThreads.map(t => t.id)).toEqual(expect.arrayContaining(threadIds));
|
|
133
181
|
});
|
|
134
182
|
|
|
135
183
|
it('should update thread metadata', async () => {
|
|
136
|
-
const thread =
|
|
137
|
-
id: 'thread-1',
|
|
138
|
-
resourceId: 'resource-1',
|
|
139
|
-
title: 'Test Thread',
|
|
140
|
-
createdAt: new Date(),
|
|
141
|
-
updatedAt: new Date(),
|
|
142
|
-
metadata: { initial: 'value' },
|
|
143
|
-
};
|
|
184
|
+
const thread = createSampleThread();
|
|
144
185
|
|
|
145
186
|
await store.saveThread({ thread });
|
|
146
187
|
|
|
147
|
-
const updatedThread = await store.
|
|
188
|
+
const updatedThread = await store.__updateThread({
|
|
148
189
|
id: thread.id,
|
|
149
190
|
title: 'Updated Title',
|
|
150
191
|
metadata: { updated: 'value' },
|
|
@@ -152,7 +193,7 @@ describe('UpstashStore', () => {
|
|
|
152
193
|
|
|
153
194
|
expect(updatedThread.title).toBe('Updated Title');
|
|
154
195
|
expect(updatedThread.metadata).toEqual({
|
|
155
|
-
|
|
196
|
+
key: 'value',
|
|
156
197
|
updated: 'value',
|
|
157
198
|
});
|
|
158
199
|
});
|
|
@@ -160,19 +201,12 @@ describe('UpstashStore', () => {
|
|
|
160
201
|
|
|
161
202
|
describe('Date Handling', () => {
|
|
162
203
|
beforeEach(async () => {
|
|
163
|
-
await store.clearTable({ tableName:
|
|
204
|
+
await store.clearTable({ tableName: TABLE_THREADS });
|
|
164
205
|
});
|
|
165
206
|
|
|
166
207
|
it('should handle Date objects in thread operations', async () => {
|
|
167
208
|
const now = new Date();
|
|
168
|
-
const thread =
|
|
169
|
-
id: 'thread-1',
|
|
170
|
-
resourceId: 'resource-1',
|
|
171
|
-
title: 'Test Thread',
|
|
172
|
-
createdAt: now,
|
|
173
|
-
updatedAt: now,
|
|
174
|
-
metadata: {},
|
|
175
|
-
};
|
|
209
|
+
const thread = createSampleThread(now);
|
|
176
210
|
|
|
177
211
|
await store.saveThread({ thread });
|
|
178
212
|
const retrievedThread = await store.getThreadById({ threadId: thread.id });
|
|
@@ -184,16 +218,9 @@ describe('UpstashStore', () => {
|
|
|
184
218
|
|
|
185
219
|
it('should handle ISO string dates in thread operations', async () => {
|
|
186
220
|
const now = new Date();
|
|
187
|
-
const thread =
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
title: 'Test Thread',
|
|
191
|
-
createdAt: now.toISOString(),
|
|
192
|
-
updatedAt: now.toISOString(),
|
|
193
|
-
metadata: {},
|
|
194
|
-
};
|
|
195
|
-
|
|
196
|
-
await store.saveThread({ thread: thread as any });
|
|
221
|
+
const thread = createSampleThread(now);
|
|
222
|
+
|
|
223
|
+
await store.saveThread({ thread });
|
|
197
224
|
const retrievedThread = await store.getThreadById({ threadId: thread.id });
|
|
198
225
|
expect(retrievedThread?.createdAt).toBeInstanceOf(Date);
|
|
199
226
|
expect(retrievedThread?.updatedAt).toBeInstanceOf(Date);
|
|
@@ -203,16 +230,9 @@ describe('UpstashStore', () => {
|
|
|
203
230
|
|
|
204
231
|
it('should handle mixed date formats in thread operations', async () => {
|
|
205
232
|
const now = new Date();
|
|
206
|
-
const thread =
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
title: 'Test Thread',
|
|
210
|
-
createdAt: now,
|
|
211
|
-
updatedAt: now.toISOString(),
|
|
212
|
-
metadata: {},
|
|
213
|
-
};
|
|
214
|
-
|
|
215
|
-
await store.saveThread({ thread: thread as any });
|
|
233
|
+
const thread = createSampleThread(now);
|
|
234
|
+
|
|
235
|
+
await store.saveThread({ thread });
|
|
216
236
|
const retrievedThread = await store.getThreadById({ threadId: thread.id });
|
|
217
237
|
expect(retrievedThread?.createdAt).toBeInstanceOf(Date);
|
|
218
238
|
expect(retrievedThread?.updatedAt).toBeInstanceOf(Date);
|
|
@@ -222,28 +242,13 @@ describe('UpstashStore', () => {
|
|
|
222
242
|
|
|
223
243
|
it('should handle date serialization in getThreadsByResourceId', async () => {
|
|
224
244
|
const now = new Date();
|
|
225
|
-
const
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
resourceId: 'resource-1',
|
|
229
|
-
title: 'Thread 1',
|
|
230
|
-
createdAt: now,
|
|
231
|
-
updatedAt: now.toISOString(),
|
|
232
|
-
metadata: {},
|
|
233
|
-
},
|
|
234
|
-
{
|
|
235
|
-
id: 'thread-2',
|
|
236
|
-
resourceId: 'resource-1',
|
|
237
|
-
title: 'Thread 2',
|
|
238
|
-
createdAt: now.toISOString(),
|
|
239
|
-
updatedAt: now,
|
|
240
|
-
metadata: {},
|
|
241
|
-
},
|
|
242
|
-
];
|
|
245
|
+
const thread1 = createSampleThread(now);
|
|
246
|
+
const thread2 = { ...createSampleThread(now), resourceId: thread1.resourceId };
|
|
247
|
+
const threads = [thread1, thread2];
|
|
243
248
|
|
|
244
|
-
await Promise.all(threads.map(thread => store.saveThread({ thread
|
|
249
|
+
await Promise.all(threads.map(thread => store.saveThread({ thread })));
|
|
245
250
|
|
|
246
|
-
const retrievedThreads = await store.getThreadsByResourceId({ resourceId:
|
|
251
|
+
const retrievedThreads = await store.getThreadsByResourceId({ resourceId: threads[0].resourceId });
|
|
247
252
|
expect(retrievedThreads).toHaveLength(2);
|
|
248
253
|
retrievedThreads.forEach(thread => {
|
|
249
254
|
expect(thread.createdAt).toBeInstanceOf(Date);
|
|
@@ -258,11 +263,11 @@ describe('UpstashStore', () => {
|
|
|
258
263
|
const threadId = 'test-thread';
|
|
259
264
|
|
|
260
265
|
beforeEach(async () => {
|
|
261
|
-
await store.clearTable({ tableName:
|
|
262
|
-
await store.clearTable({ tableName:
|
|
266
|
+
await store.clearTable({ tableName: TABLE_MESSAGES });
|
|
267
|
+
await store.clearTable({ tableName: TABLE_THREADS });
|
|
263
268
|
|
|
264
269
|
// Create a test thread
|
|
265
|
-
await store.
|
|
270
|
+
await store.__saveThread({
|
|
266
271
|
thread: {
|
|
267
272
|
id: threadId,
|
|
268
273
|
resourceId: 'resource-1',
|
|
@@ -276,41 +281,20 @@ describe('UpstashStore', () => {
|
|
|
276
281
|
|
|
277
282
|
it('should save and retrieve messages in order', async () => {
|
|
278
283
|
const messages = [
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
role: 'user',
|
|
283
|
-
type: 'text',
|
|
284
|
-
content: [{ type: 'text', text: 'First' }],
|
|
285
|
-
createdAt: new Date().toISOString(),
|
|
286
|
-
},
|
|
287
|
-
{
|
|
288
|
-
id: 'msg-2',
|
|
289
|
-
threadId,
|
|
290
|
-
role: 'assistant',
|
|
291
|
-
type: 'text',
|
|
292
|
-
content: [{ type: 'text', text: 'Second' }],
|
|
293
|
-
createdAt: new Date().toISOString(),
|
|
294
|
-
},
|
|
295
|
-
{
|
|
296
|
-
id: 'msg-3',
|
|
297
|
-
threadId,
|
|
298
|
-
role: 'user',
|
|
299
|
-
type: 'text',
|
|
300
|
-
content: [{ type: 'text', text: 'Third' }],
|
|
301
|
-
createdAt: new Date().toISOString(),
|
|
302
|
-
},
|
|
284
|
+
createSampleMessage(threadId, 'First'),
|
|
285
|
+
createSampleMessage(threadId, 'Second'),
|
|
286
|
+
createSampleMessage(threadId, 'Third'),
|
|
303
287
|
];
|
|
304
288
|
|
|
305
|
-
await store.
|
|
289
|
+
await store.__saveMessages({ messages: messages as MessageType[] });
|
|
306
290
|
|
|
307
|
-
const retrievedMessages = await store.
|
|
291
|
+
const retrievedMessages = await store.__getMessages({ threadId });
|
|
308
292
|
expect(retrievedMessages).toHaveLength(3);
|
|
309
293
|
expect(retrievedMessages.map(m => m.content[0].text)).toEqual(['First', 'Second', 'Third']);
|
|
310
294
|
});
|
|
311
295
|
|
|
312
296
|
it('should handle empty message array', async () => {
|
|
313
|
-
const result = await store.
|
|
297
|
+
const result = await store.__saveMessages({ messages: [] });
|
|
314
298
|
expect(result).toEqual([]);
|
|
315
299
|
});
|
|
316
300
|
|
|
@@ -326,13 +310,13 @@ describe('UpstashStore', () => {
|
|
|
326
310
|
{ type: 'code', text: 'code block', language: 'typescript' },
|
|
327
311
|
{ type: 'text', text: 'and more text' },
|
|
328
312
|
],
|
|
329
|
-
createdAt: new Date()
|
|
313
|
+
createdAt: new Date(),
|
|
330
314
|
},
|
|
331
315
|
];
|
|
332
316
|
|
|
333
|
-
await store.
|
|
317
|
+
await store.__saveMessages({ messages: messages as MessageType[] });
|
|
334
318
|
|
|
335
|
-
const retrievedMessages = await store.
|
|
319
|
+
const retrievedMessages = await store.__getMessages({ threadId });
|
|
336
320
|
expect(retrievedMessages[0].content).toEqual(messages[0].content);
|
|
337
321
|
});
|
|
338
322
|
});
|
|
@@ -343,7 +327,7 @@ describe('UpstashStore', () => {
|
|
|
343
327
|
const testRunId = 'test-run';
|
|
344
328
|
|
|
345
329
|
beforeEach(async () => {
|
|
346
|
-
await store.clearTable({ tableName:
|
|
330
|
+
await store.clearTable({ tableName: TABLE_WORKFLOW_SNAPSHOT });
|
|
347
331
|
});
|
|
348
332
|
|
|
349
333
|
it('should persist and load workflow snapshots', async () => {
|
|
@@ -359,7 +343,7 @@ describe('UpstashStore', () => {
|
|
|
359
343
|
runId: testRunId,
|
|
360
344
|
activePaths: [],
|
|
361
345
|
timestamp: Date.now(),
|
|
362
|
-
};
|
|
346
|
+
} as unknown as WorkflowRunState;
|
|
363
347
|
|
|
364
348
|
await store.persistWorkflowSnapshot({
|
|
365
349
|
namespace: testNamespace,
|
|
@@ -386,4 +370,252 @@ describe('UpstashStore', () => {
|
|
|
386
370
|
expect(result).toBeNull();
|
|
387
371
|
});
|
|
388
372
|
});
|
|
373
|
+
|
|
374
|
+
describe('Eval Operations', () => {
|
|
375
|
+
beforeEach(async () => {
|
|
376
|
+
await store.clearTable({ tableName: TABLE_EVALS });
|
|
377
|
+
});
|
|
378
|
+
|
|
379
|
+
it('should retrieve evals by agent name', async () => {
|
|
380
|
+
const agentName = `test-agent-${randomUUID()}`;
|
|
381
|
+
|
|
382
|
+
// Create sample evals
|
|
383
|
+
const liveEval = createSampleEval(agentName, false);
|
|
384
|
+
const testEval = createSampleEval(agentName, true);
|
|
385
|
+
const otherAgentEval = createSampleEval(`other-agent-${randomUUID()}`, false);
|
|
386
|
+
|
|
387
|
+
// Insert evals
|
|
388
|
+
await store.insert({
|
|
389
|
+
tableName: TABLE_EVALS,
|
|
390
|
+
record: liveEval,
|
|
391
|
+
});
|
|
392
|
+
|
|
393
|
+
await store.insert({
|
|
394
|
+
tableName: TABLE_EVALS,
|
|
395
|
+
record: testEval,
|
|
396
|
+
});
|
|
397
|
+
|
|
398
|
+
await store.insert({
|
|
399
|
+
tableName: TABLE_EVALS,
|
|
400
|
+
record: otherAgentEval,
|
|
401
|
+
});
|
|
402
|
+
|
|
403
|
+
// Test getting all evals for the agent
|
|
404
|
+
const allEvals = await store.getEvalsByAgentName(agentName);
|
|
405
|
+
expect(allEvals).toHaveLength(2);
|
|
406
|
+
expect(allEvals.map(e => e.runId)).toEqual(expect.arrayContaining([liveEval.run_id, testEval.run_id]));
|
|
407
|
+
|
|
408
|
+
// Test getting only live evals
|
|
409
|
+
const liveEvals = await store.getEvalsByAgentName(agentName, 'live');
|
|
410
|
+
expect(liveEvals).toHaveLength(1);
|
|
411
|
+
expect(liveEvals[0].runId).toBe(liveEval.run_id);
|
|
412
|
+
|
|
413
|
+
// Test getting only test evals
|
|
414
|
+
const testEvals = await store.getEvalsByAgentName(agentName, 'test');
|
|
415
|
+
expect(testEvals).toHaveLength(1);
|
|
416
|
+
expect(testEvals[0].runId).toBe(testEval.run_id);
|
|
417
|
+
|
|
418
|
+
// Verify the test_info was properly parsed
|
|
419
|
+
if (testEval.test_info) {
|
|
420
|
+
const expectedTestInfo = JSON.parse(testEval.test_info);
|
|
421
|
+
expect(testEvals[0].testInfo).toEqual(expectedTestInfo);
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
// Test getting evals for non-existent agent
|
|
425
|
+
const nonExistentEvals = await store.getEvalsByAgentName('non-existent-agent');
|
|
426
|
+
expect(nonExistentEvals).toHaveLength(0);
|
|
427
|
+
});
|
|
428
|
+
});
|
|
429
|
+
|
|
430
|
+
describe('getWorkflowRuns', () => {
|
|
431
|
+
const testNamespace = 'test-namespace';
|
|
432
|
+
beforeEach(async () => {
|
|
433
|
+
await store.clearTable({ tableName: TABLE_WORKFLOW_SNAPSHOT });
|
|
434
|
+
});
|
|
435
|
+
it('returns empty array when no workflows exist', async () => {
|
|
436
|
+
const { runs, total } = await store.__getWorkflowRuns();
|
|
437
|
+
expect(runs).toEqual([]);
|
|
438
|
+
expect(total).toBe(0);
|
|
439
|
+
});
|
|
440
|
+
|
|
441
|
+
it('returns all workflows by default', async () => {
|
|
442
|
+
const workflowName1 = 'default_test_1';
|
|
443
|
+
const workflowName2 = 'default_test_2';
|
|
444
|
+
|
|
445
|
+
const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('completed');
|
|
446
|
+
const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('running');
|
|
447
|
+
|
|
448
|
+
await store.persistWorkflowSnapshot({
|
|
449
|
+
namespace: testNamespace,
|
|
450
|
+
workflowName: workflowName1,
|
|
451
|
+
runId: runId1,
|
|
452
|
+
snapshot: workflow1,
|
|
453
|
+
});
|
|
454
|
+
await new Promise(resolve => setTimeout(resolve, 10)); // Small delay to ensure different timestamps
|
|
455
|
+
await store.persistWorkflowSnapshot({
|
|
456
|
+
namespace: testNamespace,
|
|
457
|
+
workflowName: workflowName2,
|
|
458
|
+
runId: runId2,
|
|
459
|
+
snapshot: workflow2,
|
|
460
|
+
});
|
|
461
|
+
|
|
462
|
+
const { runs, total } = await store.__getWorkflowRuns({ namespace: testNamespace });
|
|
463
|
+
expect(runs).toHaveLength(2);
|
|
464
|
+
expect(total).toBe(2);
|
|
465
|
+
expect(runs[0]!.workflowName).toBe(workflowName2); // Most recent first
|
|
466
|
+
expect(runs[1]!.workflowName).toBe(workflowName1);
|
|
467
|
+
const firstSnapshot = runs[0]!.snapshot as WorkflowRunState;
|
|
468
|
+
const secondSnapshot = runs[1]!.snapshot as WorkflowRunState;
|
|
469
|
+
expect(firstSnapshot.context?.steps[stepId2]?.status).toBe('running');
|
|
470
|
+
expect(secondSnapshot.context?.steps[stepId1]?.status).toBe('completed');
|
|
471
|
+
});
|
|
472
|
+
|
|
473
|
+
it('filters by workflow name', async () => {
|
|
474
|
+
const workflowName1 = 'filter_test_1';
|
|
475
|
+
const workflowName2 = 'filter_test_2';
|
|
476
|
+
|
|
477
|
+
const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('completed');
|
|
478
|
+
const { snapshot: workflow2, runId: runId2 } = createSampleWorkflowSnapshot('failed');
|
|
479
|
+
|
|
480
|
+
await store.persistWorkflowSnapshot({
|
|
481
|
+
namespace: testNamespace,
|
|
482
|
+
workflowName: workflowName1,
|
|
483
|
+
runId: runId1,
|
|
484
|
+
snapshot: workflow1,
|
|
485
|
+
});
|
|
486
|
+
await new Promise(resolve => setTimeout(resolve, 10)); // Small delay to ensure different timestamps
|
|
487
|
+
await store.persistWorkflowSnapshot({
|
|
488
|
+
namespace: testNamespace,
|
|
489
|
+
workflowName: workflowName2,
|
|
490
|
+
runId: runId2,
|
|
491
|
+
snapshot: workflow2,
|
|
492
|
+
});
|
|
493
|
+
|
|
494
|
+
const { runs, total } = await store.__getWorkflowRuns({ namespace: testNamespace, workflowName: workflowName1 });
|
|
495
|
+
expect(runs).toHaveLength(1);
|
|
496
|
+
expect(total).toBe(1);
|
|
497
|
+
expect(runs[0]!.workflowName).toBe(workflowName1);
|
|
498
|
+
const snapshot = runs[0]!.snapshot as WorkflowRunState;
|
|
499
|
+
expect(snapshot.context?.steps[stepId1]?.status).toBe('completed');
|
|
500
|
+
});
|
|
501
|
+
|
|
502
|
+
it('filters by date range', async () => {
|
|
503
|
+
const now = new Date();
|
|
504
|
+
const yesterday = new Date(now.getTime() - 24 * 60 * 60 * 1000);
|
|
505
|
+
const twoDaysAgo = new Date(now.getTime() - 2 * 24 * 60 * 60 * 1000);
|
|
506
|
+
const workflowName1 = 'date_test_1';
|
|
507
|
+
const workflowName2 = 'date_test_2';
|
|
508
|
+
const workflowName3 = 'date_test_3';
|
|
509
|
+
|
|
510
|
+
const { snapshot: workflow1, runId: runId1 } = createSampleWorkflowSnapshot('completed');
|
|
511
|
+
const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('running');
|
|
512
|
+
const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('waiting');
|
|
513
|
+
|
|
514
|
+
await store.insert({
|
|
515
|
+
tableName: TABLE_WORKFLOW_SNAPSHOT,
|
|
516
|
+
record: {
|
|
517
|
+
namespace: testNamespace,
|
|
518
|
+
workflow_name: workflowName1,
|
|
519
|
+
run_id: runId1,
|
|
520
|
+
snapshot: workflow1,
|
|
521
|
+
createdAt: twoDaysAgo,
|
|
522
|
+
updatedAt: twoDaysAgo,
|
|
523
|
+
},
|
|
524
|
+
});
|
|
525
|
+
await store.insert({
|
|
526
|
+
tableName: TABLE_WORKFLOW_SNAPSHOT,
|
|
527
|
+
record: {
|
|
528
|
+
namespace: testNamespace,
|
|
529
|
+
workflow_name: workflowName2,
|
|
530
|
+
run_id: runId2,
|
|
531
|
+
snapshot: workflow2,
|
|
532
|
+
createdAt: yesterday,
|
|
533
|
+
updatedAt: yesterday,
|
|
534
|
+
},
|
|
535
|
+
});
|
|
536
|
+
await store.insert({
|
|
537
|
+
tableName: TABLE_WORKFLOW_SNAPSHOT,
|
|
538
|
+
record: {
|
|
539
|
+
namespace: testNamespace,
|
|
540
|
+
workflow_name: workflowName3,
|
|
541
|
+
run_id: runId3,
|
|
542
|
+
snapshot: workflow3,
|
|
543
|
+
createdAt: now,
|
|
544
|
+
updatedAt: now,
|
|
545
|
+
},
|
|
546
|
+
});
|
|
547
|
+
|
|
548
|
+
const { runs } = await store.__getWorkflowRuns({
|
|
549
|
+
namespace: testNamespace,
|
|
550
|
+
fromDate: yesterday,
|
|
551
|
+
toDate: now,
|
|
552
|
+
});
|
|
553
|
+
|
|
554
|
+
expect(runs).toHaveLength(2);
|
|
555
|
+
expect(runs[0]!.workflowName).toBe(workflowName3);
|
|
556
|
+
expect(runs[1]!.workflowName).toBe(workflowName2);
|
|
557
|
+
const firstSnapshot = runs[0]!.snapshot as WorkflowRunState;
|
|
558
|
+
const secondSnapshot = runs[1]!.snapshot as WorkflowRunState;
|
|
559
|
+
expect(firstSnapshot.context?.steps[stepId3]?.status).toBe('waiting');
|
|
560
|
+
expect(secondSnapshot.context?.steps[stepId2]?.status).toBe('running');
|
|
561
|
+
});
|
|
562
|
+
|
|
563
|
+
it('handles pagination', async () => {
|
|
564
|
+
const workflowName1 = 'page_test_1';
|
|
565
|
+
const workflowName2 = 'page_test_2';
|
|
566
|
+
const workflowName3 = 'page_test_3';
|
|
567
|
+
|
|
568
|
+
const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('completed');
|
|
569
|
+
const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('running');
|
|
570
|
+
const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('waiting');
|
|
571
|
+
|
|
572
|
+
await store.persistWorkflowSnapshot({
|
|
573
|
+
namespace: testNamespace,
|
|
574
|
+
workflowName: workflowName1,
|
|
575
|
+
runId: runId1,
|
|
576
|
+
snapshot: workflow1,
|
|
577
|
+
});
|
|
578
|
+
await new Promise(resolve => setTimeout(resolve, 10)); // Small delay to ensure different timestamps
|
|
579
|
+
await store.persistWorkflowSnapshot({
|
|
580
|
+
namespace: testNamespace,
|
|
581
|
+
workflowName: workflowName2,
|
|
582
|
+
runId: runId2,
|
|
583
|
+
snapshot: workflow2,
|
|
584
|
+
});
|
|
585
|
+
await new Promise(resolve => setTimeout(resolve, 10)); // Small delay to ensure different timestamps
|
|
586
|
+
await store.persistWorkflowSnapshot({
|
|
587
|
+
namespace: testNamespace,
|
|
588
|
+
workflowName: workflowName3,
|
|
589
|
+
runId: runId3,
|
|
590
|
+
snapshot: workflow3,
|
|
591
|
+
});
|
|
592
|
+
|
|
593
|
+
// Get first page
|
|
594
|
+
const page1 = await store.__getWorkflowRuns({
|
|
595
|
+
namespace: testNamespace,
|
|
596
|
+
limit: 2,
|
|
597
|
+
offset: 0,
|
|
598
|
+
});
|
|
599
|
+
expect(page1.runs).toHaveLength(2);
|
|
600
|
+
expect(page1.total).toBe(3); // Total count of all records
|
|
601
|
+
expect(page1.runs[0]!.workflowName).toBe(workflowName3);
|
|
602
|
+
expect(page1.runs[1]!.workflowName).toBe(workflowName2);
|
|
603
|
+
const firstSnapshot = page1.runs[0]!.snapshot as WorkflowRunState;
|
|
604
|
+
const secondSnapshot = page1.runs[1]!.snapshot as WorkflowRunState;
|
|
605
|
+
expect(firstSnapshot.context?.steps[stepId3]?.status).toBe('waiting');
|
|
606
|
+
expect(secondSnapshot.context?.steps[stepId2]?.status).toBe('running');
|
|
607
|
+
|
|
608
|
+
// Get second page
|
|
609
|
+
const page2 = await store.__getWorkflowRuns({
|
|
610
|
+
namespace: testNamespace,
|
|
611
|
+
limit: 2,
|
|
612
|
+
offset: 2,
|
|
613
|
+
});
|
|
614
|
+
expect(page2.runs).toHaveLength(1);
|
|
615
|
+
expect(page2.total).toBe(3);
|
|
616
|
+
expect(page2.runs[0]!.workflowName).toBe(workflowName1);
|
|
617
|
+
const snapshot = page2.runs[0]!.snapshot as WorkflowRunState;
|
|
618
|
+
expect(snapshot.context?.steps[stepId1]?.status).toBe('completed');
|
|
619
|
+
});
|
|
620
|
+
});
|
|
389
621
|
});
|