@mastra/pg 0.12.3 → 0.12.4-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,2394 +1,10 @@
1
- import { randomUUID } from 'crypto';
2
- import {
3
- createSampleEval,
4
- createSampleTraceForDB,
5
- createSampleThread,
6
- createSampleMessageV1,
7
- createSampleMessageV2,
8
- createSampleWorkflowSnapshot,
9
- resetRole,
10
- checkWorkflowSnapshot,
11
- } from '@internal/storage-test-utils';
12
- import type { MastraMessageV2 } from '@mastra/core/agent';
13
- import type { MastraMessageV1, StorageThreadType } from '@mastra/core/memory';
14
- import type { StorageColumn, TABLE_NAMES } from '@mastra/core/storage';
15
- import {
16
- TABLE_WORKFLOW_SNAPSHOT,
17
- TABLE_MESSAGES,
18
- TABLE_THREADS,
19
- TABLE_EVALS,
20
- TABLE_TRACES,
21
- } from '@mastra/core/storage';
22
- import type { WorkflowRunState } from '@mastra/core/workflows';
23
- import pgPromise from 'pg-promise';
24
- import { describe, it, expect, beforeAll, beforeEach, afterAll, afterEach, vi } from 'vitest';
25
-
1
+ import { createTestSuite } from '@internal/storage-test-utils';
2
+ import { vi } from 'vitest';
3
+ import { pgTests, TEST_CONFIG } from './test-utils';
26
4
  import { PostgresStore } from '.';
27
- import type { PostgresConfig } from '.';
28
-
29
- const TEST_CONFIG: PostgresConfig = {
30
- host: process.env.POSTGRES_HOST || 'localhost',
31
- port: Number(process.env.POSTGRES_PORT) || 5434,
32
- database: process.env.POSTGRES_DB || 'postgres',
33
- user: process.env.POSTGRES_USER || 'postgres',
34
- password: process.env.POSTGRES_PASSWORD || 'postgres',
35
- };
36
-
37
- const connectionString = `postgresql://${TEST_CONFIG.user}:${TEST_CONFIG.password}@${TEST_CONFIG.host}:${TEST_CONFIG.port}/${TEST_CONFIG.database}`;
38
5
 
39
6
  vi.setConfig({ testTimeout: 60_000, hookTimeout: 60_000 });
40
7
 
41
- describe('PostgresStore', () => {
42
- let store: PostgresStore;
43
-
44
- beforeAll(async () => {
45
- store = new PostgresStore(TEST_CONFIG);
46
- await store.init();
47
- });
48
-
49
- describe('Public Fields Access', () => {
50
- let testDB: PostgresStore;
51
- beforeAll(async () => {
52
- testDB = new PostgresStore(TEST_CONFIG);
53
- });
54
- afterAll(async () => {
55
- try {
56
- await testDB.close();
57
- } catch {}
58
- store = new PostgresStore(TEST_CONFIG);
59
- await store.init();
60
- });
61
-
62
- it('should expose db field as public', () => {
63
- expect(testDB.db).toBeDefined();
64
- expect(typeof testDB.db).toBe('object');
65
- expect(testDB.db.query).toBeDefined();
66
- expect(typeof testDB.db.query).toBe('function');
67
- });
68
-
69
- it('should expose pgp field as public', () => {
70
- expect(testDB.pgp).toBeDefined();
71
- expect(typeof testDB.pgp).toBe('function');
72
- expect(testDB.pgp.end).toBeDefined();
73
- expect(typeof testDB.pgp.end).toBe('function');
74
- });
75
-
76
- it('should allow direct database queries via public db field', async () => {
77
- const result = await testDB.db.one('SELECT 1 as test');
78
- expect(result.test).toBe(1);
79
- });
80
-
81
- it('should allow access to pgp utilities via public pgp field', () => {
82
- const helpers = testDB.pgp.helpers;
83
- expect(helpers).toBeDefined();
84
- expect(helpers.insert).toBeDefined();
85
- expect(helpers.update).toBeDefined();
86
- });
87
-
88
- it('should maintain connection state through public db field', async () => {
89
- // Test multiple queries to ensure connection state
90
- const result1 = await testDB.db.one('SELECT NOW() as timestamp1');
91
- const result2 = await testDB.db.one('SELECT NOW() as timestamp2');
92
-
93
- expect(result1.timestamp1).toBeDefined();
94
- expect(result2.timestamp2).toBeDefined();
95
- expect(new Date(result2.timestamp2).getTime()).toBeGreaterThanOrEqual(new Date(result1.timestamp1).getTime());
96
- });
97
-
98
- it('should throw error when pool is used after disconnect', async () => {
99
- await testDB.close();
100
- expect(testDB.db.connect()).rejects.toThrow();
101
- });
102
- });
103
-
104
- beforeEach(async () => {
105
- // Only clear tables if store is initialized
106
- try {
107
- // Clear tables before each test
108
- await store.clearTable({ tableName: TABLE_WORKFLOW_SNAPSHOT });
109
- await store.clearTable({ tableName: TABLE_MESSAGES });
110
- await store.clearTable({ tableName: TABLE_THREADS });
111
- await store.clearTable({ tableName: TABLE_EVALS });
112
- await store.clearTable({ tableName: TABLE_TRACES });
113
- } catch (error) {
114
- // Ignore errors during table clearing
115
- console.warn('Error clearing tables:', error);
116
- }
117
- });
118
-
119
- // --- Validation tests ---
120
- describe('Validation', () => {
121
- const validConfig = TEST_CONFIG;
122
- it('throws if connectionString is empty', () => {
123
- expect(() => new PostgresStore({ connectionString: '' })).toThrow(
124
- /connectionString must be provided and cannot be empty/,
125
- );
126
- });
127
- it('throws if host is missing or empty', () => {
128
- expect(() => new PostgresStore({ ...validConfig, host: '' })).toThrow(
129
- /host must be provided and cannot be empty/,
130
- );
131
- const { host, ...rest } = validConfig;
132
- expect(() => new PostgresStore(rest as any)).toThrow(/host must be provided and cannot be empty/);
133
- });
134
- it('throws if user is missing or empty', () => {
135
- expect(() => new PostgresStore({ ...validConfig, user: '' })).toThrow(
136
- /user must be provided and cannot be empty/,
137
- );
138
- const { user, ...rest } = validConfig;
139
- expect(() => new PostgresStore(rest as any)).toThrow(/user must be provided and cannot be empty/);
140
- });
141
- it('throws if database is missing or empty', () => {
142
- expect(() => new PostgresStore({ ...validConfig, database: '' })).toThrow(
143
- /database must be provided and cannot be empty/,
144
- );
145
- const { database, ...rest } = validConfig;
146
- expect(() => new PostgresStore(rest as any)).toThrow(/database must be provided and cannot be empty/);
147
- });
148
- it('throws if password is missing or empty', () => {
149
- expect(() => new PostgresStore({ ...validConfig, password: '' })).toThrow(
150
- /password must be provided and cannot be empty/,
151
- );
152
- const { password, ...rest } = validConfig;
153
- expect(() => new PostgresStore(rest as any)).toThrow(/password must be provided and cannot be empty/);
154
- });
155
- it('does not throw on valid config (host-based)', () => {
156
- expect(() => new PostgresStore(validConfig)).not.toThrow();
157
- });
158
- it('does not throw on non-empty connection string', () => {
159
- expect(() => new PostgresStore({ connectionString })).not.toThrow();
160
- });
161
- });
162
-
163
- describe('Thread Operations', () => {
164
- it('should create and retrieve a thread', async () => {
165
- const thread = createSampleThread();
166
-
167
- // Save thread
168
- const savedThread = await store.saveThread({ thread });
169
- expect(savedThread).toEqual(thread);
170
-
171
- // Retrieve thread
172
- const retrievedThread = await store.getThreadById({ threadId: thread.id });
173
- expect(retrievedThread?.title).toEqual(thread.title);
174
- });
175
-
176
- it('should return null for non-existent thread', async () => {
177
- const result = await store.getThreadById({ threadId: 'non-existent' });
178
- expect(result).toBeNull();
179
- });
180
-
181
- it('should get threads by resource ID', async () => {
182
- const thread1 = createSampleThread();
183
- const thread2 = { ...createSampleThread(), resourceId: thread1.resourceId };
184
-
185
- await store.saveThread({ thread: thread1 });
186
- await store.saveThread({ thread: thread2 });
187
-
188
- const threads = await store.getThreadsByResourceId({ resourceId: thread1.resourceId });
189
- expect(threads).toHaveLength(2);
190
- expect(threads.map(t => t.id)).toEqual(expect.arrayContaining([thread1.id, thread2.id]));
191
- });
192
-
193
- it('should update thread title and metadata', async () => {
194
- const thread = createSampleThread();
195
- await store.saveThread({ thread });
196
-
197
- const newMetadata = { newKey: 'newValue' };
198
- const updatedThread = await store.updateThread({
199
- id: thread.id,
200
- title: 'Updated Title',
201
- metadata: newMetadata,
202
- });
203
-
204
- expect(updatedThread.title).toBe('Updated Title');
205
- expect(updatedThread.metadata).toEqual({
206
- ...thread.metadata,
207
- ...newMetadata,
208
- });
209
-
210
- // Verify persistence
211
- const retrievedThread = await store.getThreadById({ threadId: thread.id });
212
- expect(retrievedThread).toEqual(updatedThread);
213
- });
214
-
215
- it('should delete thread and its messages', async () => {
216
- const thread = createSampleThread();
217
- await store.saveThread({ thread });
218
-
219
- // Add some messages
220
- const messages = [createSampleMessageV1({ threadId: thread.id }), createSampleMessageV1({ threadId: thread.id })];
221
- await store.saveMessages({ messages });
222
-
223
- await store.deleteThread({ threadId: thread.id });
224
-
225
- const retrievedThread = await store.getThreadById({ threadId: thread.id });
226
- expect(retrievedThread).toBeNull();
227
-
228
- // Verify messages were also deleted
229
- const retrievedMessages = await store.getMessages({ threadId: thread.id });
230
- expect(retrievedMessages).toHaveLength(0);
231
- });
232
-
233
- it('should update thread updatedAt when a message is saved to it', async () => {
234
- const thread = createSampleThread();
235
- await store.saveThread({ thread });
236
-
237
- // Get the initial thread to capture the original updatedAt
238
- const initialThread = await store.getThreadById({ threadId: thread.id });
239
- expect(initialThread).toBeDefined();
240
- const originalUpdatedAt = initialThread!.updatedAt;
241
-
242
- // Wait a small amount to ensure different timestamp
243
- await new Promise(resolve => setTimeout(resolve, 10));
244
-
245
- // Create and save a message to the thread
246
- const message = createSampleMessageV1({ threadId: thread.id });
247
- await store.saveMessages({ messages: [message] });
248
-
249
- // Retrieve the thread again and check that updatedAt was updated
250
- const updatedThread = await store.getThreadById({ threadId: thread.id });
251
- expect(updatedThread).toBeDefined();
252
- expect(updatedThread!.updatedAt.getTime()).toBeGreaterThan(originalUpdatedAt.getTime());
253
- });
254
- });
255
-
256
- describe('Message Operations', () => {
257
- it('should save and retrieve messages', async () => {
258
- const thread = createSampleThread();
259
- await store.saveThread({ thread });
260
-
261
- const messages = [
262
- createSampleMessageV1({ threadId: thread.id, resourceId: thread.resourceId }),
263
- createSampleMessageV1({ threadId: thread.id, resourceId: thread.resourceId }),
264
- ];
265
-
266
- // Save messages
267
- const savedMessages = await store.saveMessages({ messages });
268
- expect(savedMessages).toEqual(messages);
269
-
270
- // Retrieve messages
271
- const retrievedMessages = await store.getMessages({ threadId: thread.id, format: 'v1' });
272
- expect(retrievedMessages).toHaveLength(2);
273
- expect(retrievedMessages).toEqual(expect.arrayContaining(messages));
274
- });
275
-
276
- it('should handle empty message array', async () => {
277
- const result = await store.saveMessages({ messages: [] });
278
- expect(result).toEqual([]);
279
- });
280
-
281
- it('should maintain message order', async () => {
282
- const thread = createSampleThread();
283
- await store.saveThread({ thread });
284
-
285
- const messageContent = ['First', 'Second', 'Third'];
286
-
287
- const messages = messageContent.map(content =>
288
- createSampleMessageV2({ threadId: thread.id, content: { content, parts: [{ type: 'text', text: content }] } }),
289
- );
290
-
291
- await store.saveMessages({ messages, format: 'v2' });
292
-
293
- const retrievedMessages = await store.getMessages({ threadId: thread.id, format: 'v2' });
294
- expect(retrievedMessages).toHaveLength(3);
295
-
296
- // Verify order is maintained
297
- retrievedMessages.forEach((msg, idx) => {
298
- expect((msg.content.parts[0] as any).text).toEqual(messageContent[idx]);
299
- });
300
- });
301
-
302
- it('should rollback on error during message save', async () => {
303
- const thread = createSampleThread();
304
- await store.saveThread({ thread });
305
-
306
- const messages = [
307
- createSampleMessageV1({ threadId: thread.id }),
308
- { ...createSampleMessageV1({ threadId: thread.id }), id: null } as any, // This will cause an error
309
- ];
310
-
311
- await expect(store.saveMessages({ messages })).rejects.toThrow();
312
-
313
- // Verify no messages were saved
314
- const savedMessages = await store.getMessages({ threadId: thread.id });
315
- expect(savedMessages).toHaveLength(0);
316
- });
317
-
318
- it('should retrieve messages w/ next/prev messages by message id + resource id', async () => {
319
- const thread = createSampleThread({ id: 'thread-one' });
320
- await store.saveThread({ thread });
321
-
322
- const thread2 = createSampleThread({ id: 'thread-two' });
323
- await store.saveThread({ thread: thread2 });
324
-
325
- const thread3 = createSampleThread({ id: 'thread-three' });
326
- await store.saveThread({ thread: thread3 });
327
-
328
- const messages: MastraMessageV2[] = [
329
- createSampleMessageV2({
330
- threadId: 'thread-one',
331
- content: { content: 'First' },
332
- resourceId: 'cross-thread-resource',
333
- }),
334
- createSampleMessageV2({
335
- threadId: 'thread-one',
336
- content: { content: 'Second' },
337
- resourceId: 'cross-thread-resource',
338
- }),
339
- createSampleMessageV2({
340
- threadId: 'thread-one',
341
- content: { content: 'Third' },
342
- resourceId: 'cross-thread-resource',
343
- }),
344
-
345
- createSampleMessageV2({
346
- threadId: 'thread-two',
347
- content: { content: 'Fourth' },
348
- resourceId: 'cross-thread-resource',
349
- }),
350
- createSampleMessageV2({
351
- threadId: 'thread-two',
352
- content: { content: 'Fifth' },
353
- resourceId: 'cross-thread-resource',
354
- }),
355
- createSampleMessageV2({
356
- threadId: 'thread-two',
357
- content: { content: 'Sixth' },
358
- resourceId: 'cross-thread-resource',
359
- }),
360
-
361
- createSampleMessageV2({
362
- threadId: 'thread-three',
363
- content: { content: 'Seventh' },
364
- resourceId: 'other-resource',
365
- }),
366
- createSampleMessageV2({
367
- threadId: 'thread-three',
368
- content: { content: 'Eighth' },
369
- resourceId: 'other-resource',
370
- }),
371
- ];
372
-
373
- await store.saveMessages({ messages: messages, format: 'v2' });
374
-
375
- const retrievedMessages = await store.getMessages({ threadId: 'thread-one', format: 'v2' });
376
- expect(retrievedMessages).toHaveLength(3);
377
- expect(retrievedMessages.map((m: any) => m.content.parts[0].text)).toEqual(['First', 'Second', 'Third']);
378
-
379
- const retrievedMessages2 = await store.getMessages({ threadId: 'thread-two', format: 'v2' });
380
- expect(retrievedMessages2).toHaveLength(3);
381
- expect(retrievedMessages2.map((m: any) => m.content.parts[0].text)).toEqual(['Fourth', 'Fifth', 'Sixth']);
382
-
383
- const retrievedMessages3 = await store.getMessages({ threadId: 'thread-three', format: 'v2' });
384
- expect(retrievedMessages3).toHaveLength(2);
385
- expect(retrievedMessages3.map((m: any) => m.content.parts[0].text)).toEqual(['Seventh', 'Eighth']);
386
-
387
- const crossThreadMessages: MastraMessageV2[] = await store.getMessages({
388
- threadId: 'thread-doesnt-exist',
389
- format: 'v2',
390
- selectBy: {
391
- last: 0,
392
- include: [
393
- {
394
- id: messages[1].id,
395
- threadId: 'thread-one',
396
- withNextMessages: 2,
397
- withPreviousMessages: 2,
398
- },
399
- {
400
- id: messages[4].id,
401
- threadId: 'thread-two',
402
- withPreviousMessages: 2,
403
- withNextMessages: 2,
404
- },
405
- ],
406
- },
407
- });
408
-
409
- expect(crossThreadMessages).toHaveLength(6);
410
- expect(crossThreadMessages.filter(m => m.threadId === `thread-one`)).toHaveLength(3);
411
- expect(crossThreadMessages.filter(m => m.threadId === `thread-two`)).toHaveLength(3);
412
-
413
- const crossThreadMessages2: MastraMessageV2[] = await store.getMessages({
414
- threadId: 'thread-one',
415
- format: 'v2',
416
- selectBy: {
417
- last: 0,
418
- include: [
419
- {
420
- id: messages[4].id,
421
- threadId: 'thread-two',
422
- withPreviousMessages: 1,
423
- withNextMessages: 1,
424
- },
425
- ],
426
- },
427
- });
428
-
429
- expect(crossThreadMessages2).toHaveLength(3);
430
- expect(crossThreadMessages2.filter(m => m.threadId === `thread-one`)).toHaveLength(0);
431
- expect(crossThreadMessages2.filter(m => m.threadId === `thread-two`)).toHaveLength(3);
432
-
433
- const crossThreadMessages3: MastraMessageV2[] = await store.getMessages({
434
- threadId: 'thread-two',
435
- format: 'v2',
436
- selectBy: {
437
- last: 0,
438
- include: [
439
- {
440
- id: messages[1].id,
441
- threadId: 'thread-one',
442
- withNextMessages: 1,
443
- withPreviousMessages: 1,
444
- },
445
- ],
446
- },
447
- });
448
-
449
- expect(crossThreadMessages3).toHaveLength(3);
450
- expect(crossThreadMessages3.filter(m => m.threadId === `thread-one`)).toHaveLength(3);
451
- expect(crossThreadMessages3.filter(m => m.threadId === `thread-two`)).toHaveLength(0);
452
- });
453
-
454
- it('should return messages using both last and include (cross-thread, deduped)', async () => {
455
- const thread = createSampleThread({ id: 'thread-one' });
456
- await store.saveThread({ thread });
457
-
458
- const thread2 = createSampleThread({ id: 'thread-two' });
459
- await store.saveThread({ thread: thread2 });
460
-
461
- const now = new Date();
462
-
463
- // Setup: create messages in two threads
464
- const messages = [
465
- createSampleMessageV2({
466
- threadId: 'thread-one',
467
- content: { content: 'A' },
468
- createdAt: new Date(now.getTime()),
469
- }),
470
- createSampleMessageV2({
471
- threadId: 'thread-one',
472
- content: { content: 'B' },
473
- createdAt: new Date(now.getTime() + 1000),
474
- }),
475
- createSampleMessageV2({
476
- threadId: 'thread-one',
477
- content: { content: 'C' },
478
- createdAt: new Date(now.getTime() + 2000),
479
- }),
480
- createSampleMessageV2({
481
- threadId: 'thread-two',
482
- content: { content: 'D' },
483
- createdAt: new Date(now.getTime() + 3000),
484
- }),
485
- createSampleMessageV2({
486
- threadId: 'thread-two',
487
- content: { content: 'E' },
488
- createdAt: new Date(now.getTime() + 4000),
489
- }),
490
- createSampleMessageV2({
491
- threadId: 'thread-two',
492
- content: { content: 'F' },
493
- createdAt: new Date(now.getTime() + 5000),
494
- }),
495
- ];
496
- await store.saveMessages({ messages, format: 'v2' });
497
-
498
- // Use last: 2 and include a message from another thread with context
499
- const result = await store.getMessages({
500
- threadId: 'thread-one',
501
- format: 'v2',
502
- selectBy: {
503
- last: 2,
504
- include: [
505
- {
506
- id: messages[4].id, // 'E' from thread-bar
507
- threadId: 'thread-two',
508
- withPreviousMessages: 1,
509
- withNextMessages: 1,
510
- },
511
- ],
512
- },
513
- });
514
-
515
- // Should include last 2 from thread-one and 3 from thread-two (D, E, F)
516
- expect(result.map(m => m.content.content).sort()).toEqual(['B', 'C', 'D', 'E', 'F']);
517
- // Should include 2 from thread-one
518
- expect(result.filter(m => m.threadId === 'thread-one').map(m => m.content.content)).toEqual(['B', 'C']);
519
- // Should include 3 from thread-two
520
- expect(result.filter(m => m.threadId === 'thread-two').map(m => m.content.content)).toEqual(['D', 'E', 'F']);
521
- });
522
- });
523
-
524
- describe('updateMessages', () => {
525
- let thread: StorageThreadType;
526
-
527
- beforeEach(async () => {
528
- const threadData = createSampleThread();
529
- thread = await store.saveThread({ thread: threadData as StorageThreadType });
530
- });
531
-
532
- it('should update a single field of a message (e.g., role)', async () => {
533
- const originalMessage = createSampleMessageV2({ threadId: thread.id, role: 'user', thread });
534
- await store.saveMessages({ messages: [originalMessage], format: 'v2' });
535
-
536
- const updatedMessages = await store.updateMessages({
537
- messages: [{ id: originalMessage.id, role: 'assistant' }],
538
- });
539
-
540
- expect(updatedMessages).toHaveLength(1);
541
- expect(updatedMessages[0].role).toBe('assistant');
542
- expect(updatedMessages[0].content).toEqual(originalMessage.content); // Ensure content is unchanged
543
- });
544
-
545
- it('should update only the metadata within the content field, preserving other content', async () => {
546
- const originalMessage = createSampleMessageV2({
547
- threadId: thread.id,
548
- content: { content: 'hello world', parts: [{ type: 'text', text: 'hello world' }] },
549
- thread,
550
- });
551
- await store.saveMessages({ messages: [originalMessage], format: 'v2' });
552
-
553
- const newMetadata = { someKey: 'someValue' };
554
- await store.updateMessages({
555
- messages: [{ id: originalMessage.id, content: { metadata: newMetadata } as any }],
556
- });
557
-
558
- const fromDb = await store.getMessages({ threadId: thread.id, format: 'v2' });
559
- expect(fromDb[0].content.metadata).toEqual(newMetadata);
560
- expect(fromDb[0].content.content).toBe('hello world');
561
- expect(fromDb[0].content.parts).toEqual([{ type: 'text', text: 'hello world' }]);
562
- });
563
-
564
- it('should deep merge metadata, not overwrite it', async () => {
565
- const originalMessage = createSampleMessageV2({
566
- threadId: thread.id,
567
- content: { metadata: { initial: true }, content: 'old content' },
568
- thread,
569
- });
570
- await store.saveMessages({ messages: [originalMessage], format: 'v2' });
571
-
572
- const newMetadata = { updated: true };
573
- await store.updateMessages({
574
- messages: [{ id: originalMessage.id, content: { metadata: newMetadata } as any }],
575
- });
576
-
577
- const fromDb = await store.getMessages({ threadId: thread.id, format: 'v2' });
578
- expect(fromDb[0].content.metadata).toEqual({ initial: true, updated: true });
579
- });
580
-
581
- it('should update multiple messages at once', async () => {
582
- const msg1 = createSampleMessageV2({ threadId: thread.id, role: 'user', thread });
583
- const msg2 = createSampleMessageV2({ threadId: thread.id, content: { content: 'original' }, thread });
584
- await store.saveMessages({ messages: [msg1, msg2], format: 'v2' });
585
-
586
- await store.updateMessages({
587
- messages: [
588
- { id: msg1.id, role: 'assistant' },
589
- { id: msg2.id, content: { content: 'updated' } as any },
590
- ],
591
- });
592
-
593
- const fromDb = await store.getMessages({ threadId: thread.id, format: 'v2' });
594
- const updatedMsg1 = fromDb.find(m => m.id === msg1.id)!;
595
- const updatedMsg2 = fromDb.find(m => m.id === msg2.id)!;
596
-
597
- expect(updatedMsg1.role).toBe('assistant');
598
- expect(updatedMsg2.content.content).toBe('updated');
599
- });
600
-
601
- it('should update the parent thread updatedAt timestamp', async () => {
602
- const originalMessage = createSampleMessageV2({ threadId: thread.id, thread });
603
- await store.saveMessages({ messages: [originalMessage], format: 'v2' });
604
- const initialThread = await store.getThreadById({ threadId: thread.id });
605
-
606
- await new Promise(r => setTimeout(r, 10));
607
-
608
- await store.updateMessages({ messages: [{ id: originalMessage.id, role: 'assistant' }] });
609
-
610
- const updatedThread = await store.getThreadById({ threadId: thread.id });
611
-
612
- expect(new Date(updatedThread!.updatedAt).getTime()).toBeGreaterThan(
613
- new Date(initialThread!.updatedAt).getTime(),
614
- );
615
- });
616
-
617
- it('should update timestamps on both threads when moving a message', async () => {
618
- const thread2 = await store.saveThread({ thread: createSampleThread() });
619
- const message = createSampleMessageV2({ threadId: thread.id, thread });
620
- await store.saveMessages({ messages: [message], format: 'v2' });
621
-
622
- const initialThread1 = await store.getThreadById({ threadId: thread.id });
623
- const initialThread2 = await store.getThreadById({ threadId: thread2.id });
624
-
625
- await new Promise(r => setTimeout(r, 10));
626
-
627
- await store.updateMessages({
628
- messages: [{ id: message.id, threadId: thread2.id }],
629
- });
630
-
631
- const updatedThread1 = await store.getThreadById({ threadId: thread.id });
632
- const updatedThread2 = await store.getThreadById({ threadId: thread2.id });
633
-
634
- expect(new Date(updatedThread1!.updatedAt).getTime()).toBeGreaterThan(
635
- new Date(initialThread1!.updatedAt).getTime(),
636
- );
637
- expect(new Date(updatedThread2!.updatedAt).getTime()).toBeGreaterThan(
638
- new Date(initialThread2!.updatedAt).getTime(),
639
- );
640
-
641
- // Verify the message was moved
642
- const thread1Messages = await store.getMessages({ threadId: thread.id, format: 'v2' });
643
- const thread2Messages = await store.getMessages({ threadId: thread2.id, format: 'v2' });
644
- expect(thread1Messages).toHaveLength(0);
645
- expect(thread2Messages).toHaveLength(1);
646
- expect(thread2Messages[0].id).toBe(message.id);
647
- });
648
- it('should upsert messages: duplicate id+threadId results in update, not duplicate row', async () => {
649
- const thread = await createSampleThread();
650
- await store.saveThread({ thread });
651
- const baseMessage = createSampleMessageV2({
652
- threadId: thread.id,
653
- createdAt: new Date(),
654
- content: { content: 'Original' },
655
- resourceId: thread.resourceId,
656
- });
657
-
658
- // Insert the message for the first time
659
- await store.saveMessages({ messages: [baseMessage], format: 'v2' });
660
-
661
- // Insert again with the same id and threadId but different content
662
- const updatedMessage = {
663
- ...createSampleMessageV2({
664
- threadId: thread.id,
665
- createdAt: new Date(),
666
- content: { content: 'Updated' },
667
- resourceId: thread.resourceId,
668
- }),
669
- id: baseMessage.id,
670
- };
671
-
672
- await store.saveMessages({ messages: [updatedMessage], format: 'v2' });
673
-
674
- // Retrieve messages for the thread
675
- const retrievedMessages = await store.getMessages({ threadId: thread.id, format: 'v2' });
676
-
677
- // Only one message should exist for that id+threadId
678
- expect(retrievedMessages.filter(m => m.id === baseMessage.id)).toHaveLength(1);
679
-
680
- // The content should be the updated one
681
- expect(retrievedMessages.find(m => m.id === baseMessage.id)?.content.content).toBe('Updated');
682
- });
683
-
684
- it('should upsert messages: duplicate id and different threadid', async () => {
685
- const thread1 = await createSampleThread();
686
- const thread2 = await createSampleThread();
687
- await store.saveThread({ thread: thread1 });
688
- await store.saveThread({ thread: thread2 });
689
-
690
- const message = createSampleMessageV2({
691
- threadId: thread1.id,
692
- createdAt: new Date(),
693
- content: { content: 'Thread1 Content' },
694
- resourceId: thread1.resourceId,
695
- });
696
-
697
- // Insert message into thread1
698
- await store.saveMessages({ messages: [message], format: 'v2' });
699
-
700
- // Attempt to insert a message with the same id but different threadId
701
- const conflictingMessage = {
702
- ...createSampleMessageV2({
703
- threadId: thread2.id, // different thread
704
- content: { content: 'Thread2 Content' },
705
- resourceId: thread2.resourceId,
706
- }),
707
- id: message.id,
708
- };
709
-
710
- // Save should move the message to the new thread
711
- await store.saveMessages({ messages: [conflictingMessage], format: 'v2' });
712
-
713
- // Retrieve messages for both threads
714
- const thread1Messages = await store.getMessages({ threadId: thread1.id, format: 'v2' });
715
- const thread2Messages = await store.getMessages({ threadId: thread2.id, format: 'v2' });
716
-
717
- // Thread 1 should NOT have the message with that id
718
- expect(thread1Messages.find(m => m.id === message.id)).toBeUndefined();
719
-
720
- // Thread 2 should have the message with that id
721
- expect(thread2Messages.find(m => m.id === message.id)?.content.content).toBe('Thread2 Content');
722
- });
723
- });
724
-
725
- describe('Edge Cases and Error Handling', () => {
726
- it('should handle large metadata objects', async () => {
727
- const thread = createSampleThread();
728
- const largeMetadata = {
729
- ...thread.metadata,
730
- largeArray: Array.from({ length: 1000 }, (_, i) => ({ index: i, data: 'test'.repeat(100) })),
731
- };
732
-
733
- const threadWithLargeMetadata = {
734
- ...thread,
735
- metadata: largeMetadata,
736
- };
737
-
738
- await store.saveThread({ thread: threadWithLargeMetadata });
739
- const retrieved = await store.getThreadById({ threadId: thread.id });
740
-
741
- expect(retrieved?.metadata).toEqual(largeMetadata);
742
- });
743
-
744
- it('should handle special characters in thread titles', async () => {
745
- const thread = {
746
- ...createSampleThread(),
747
- title: 'Special \'quotes\' and "double quotes" and emoji 🎉',
748
- };
749
-
750
- await store.saveThread({ thread });
751
- const retrieved = await store.getThreadById({ threadId: thread.id });
752
-
753
- expect(retrieved?.title).toBe(thread.title);
754
- });
755
-
756
- it('should handle concurrent thread updates', async () => {
757
- const thread = createSampleThread();
758
- await store.saveThread({ thread });
759
-
760
- // Perform multiple updates concurrently
761
- const updates = Array.from({ length: 5 }, (_, i) =>
762
- store.updateThread({
763
- id: thread.id,
764
- title: `Update ${i}`,
765
- metadata: { update: i },
766
- }),
767
- );
768
-
769
- await expect(Promise.all(updates)).resolves.toBeDefined();
770
-
771
- // Verify final state
772
- const finalThread = await store.getThreadById({ threadId: thread.id });
773
- expect(finalThread).toBeDefined();
774
- });
775
- });
776
-
777
- describe('Workflow Snapshots', () => {
778
- it('should persist and load workflow snapshots', async () => {
779
- const workflowName = 'test-workflow';
780
- const runId = `run-${randomUUID()}`;
781
- const snapshot = {
782
- status: 'running',
783
- context: {
784
- input: { type: 'manual' },
785
- step1: { status: 'success', output: { data: 'test' } },
786
- },
787
- value: {},
788
- activePaths: [],
789
- suspendedPaths: {},
790
- runId,
791
- timestamp: new Date().getTime(),
792
- serializedStepGraph: [],
793
- } as unknown as WorkflowRunState;
794
-
795
- await store.persistWorkflowSnapshot({
796
- workflowName,
797
- runId,
798
- snapshot,
799
- });
800
-
801
- const loadedSnapshot = await store.loadWorkflowSnapshot({
802
- workflowName,
803
- runId,
804
- });
805
-
806
- expect(loadedSnapshot).toEqual(snapshot);
807
- });
808
-
809
- it('should return null for non-existent workflow snapshot', async () => {
810
- const result = await store.loadWorkflowSnapshot({
811
- workflowName: 'non-existent',
812
- runId: 'non-existent',
813
- });
814
-
815
- expect(result).toBeNull();
816
- });
817
-
818
- it('should update existing workflow snapshot', async () => {
819
- const workflowName = 'test-workflow';
820
- const runId = `run-${randomUUID()}`;
821
- const initialSnapshot = {
822
- status: 'running',
823
- context: {
824
- input: { type: 'manual' },
825
- },
826
- value: {},
827
- activePaths: [],
828
- suspendedPaths: {},
829
- runId,
830
- timestamp: new Date().getTime(),
831
- serializedStepGraph: [],
832
- };
833
-
834
- await store.persistWorkflowSnapshot({
835
- workflowName,
836
- runId,
837
- snapshot: initialSnapshot as unknown as WorkflowRunState,
838
- });
839
-
840
- const updatedSnapshot = {
841
- status: 'success',
842
- context: {
843
- input: { type: 'manual' },
844
- 'step-1': { status: 'success', result: { data: 'test' } },
845
- },
846
- value: {},
847
- activePaths: [],
848
- suspendedPaths: {},
849
- runId,
850
- timestamp: new Date().getTime(),
851
- };
852
-
853
- await store.persistWorkflowSnapshot({
854
- workflowName,
855
- runId,
856
- snapshot: updatedSnapshot as unknown as WorkflowRunState,
857
- });
858
-
859
- const loadedSnapshot = await store.loadWorkflowSnapshot({
860
- workflowName,
861
- runId,
862
- });
863
-
864
- expect(loadedSnapshot).toEqual(updatedSnapshot);
865
- });
866
-
867
- it('should handle complex workflow state', async () => {
868
- const workflowName = 'complex-workflow';
869
- const runId = `run-${randomUUID()}`;
870
- const complexSnapshot = {
871
- value: { currentState: 'running' },
872
- context: {
873
- 'step-1': {
874
- status: 'success',
875
- output: {
876
- nestedData: {
877
- array: [1, 2, 3],
878
- object: { key: 'value' },
879
- date: new Date().toISOString(),
880
- },
881
- },
882
- },
883
- 'step-2': {
884
- status: 'waiting',
885
- dependencies: ['step-3', 'step-4'],
886
- },
887
- input: {
888
- type: 'scheduled',
889
- metadata: {
890
- schedule: '0 0 * * *',
891
- timezone: 'UTC',
892
- },
893
- },
894
- },
895
- activePaths: [
896
- {
897
- stepPath: ['step-1'],
898
- stepId: 'step-1',
899
- status: 'success',
900
- },
901
- {
902
- stepPath: ['step-2'],
903
- stepId: 'step-2',
904
- status: 'waiting',
905
- },
906
- ],
907
- suspendedPaths: {},
908
- runId: runId,
909
- timestamp: Date.now(),
910
- serializedStepGraph: [],
911
- status: 'running',
912
- };
913
-
914
- await store.persistWorkflowSnapshot({
915
- workflowName,
916
- runId,
917
- snapshot: complexSnapshot as unknown as WorkflowRunState,
918
- });
919
-
920
- const loadedSnapshot = await store.loadWorkflowSnapshot({
921
- workflowName,
922
- runId,
923
- });
924
-
925
- expect(loadedSnapshot).toEqual(complexSnapshot);
926
- });
927
- });
928
-
929
- describe('getWorkflowRuns', () => {
930
- beforeEach(async () => {
931
- await store.clearTable({ tableName: TABLE_WORKFLOW_SNAPSHOT });
932
- });
933
- it('returns empty array when no workflows exist', async () => {
934
- const { runs, total } = await store.getWorkflowRuns();
935
- expect(runs).toEqual([]);
936
- expect(total).toBe(0);
937
- });
938
-
939
- it('returns all workflows by default', async () => {
940
- const workflowName1 = 'default_test_1';
941
- const workflowName2 = 'default_test_2';
942
-
943
- const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
944
- const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('failed');
945
-
946
- await store.persistWorkflowSnapshot({ workflowName: workflowName1, runId: runId1, snapshot: workflow1 });
947
- await new Promise(resolve => setTimeout(resolve, 10)); // Small delay to ensure different timestamps
948
- await store.persistWorkflowSnapshot({ workflowName: workflowName2, runId: runId2, snapshot: workflow2 });
949
-
950
- const { runs, total } = await store.getWorkflowRuns();
951
- expect(runs).toHaveLength(2);
952
- expect(total).toBe(2);
953
- expect(runs[0]!.workflowName).toBe(workflowName2); // Most recent first
954
- expect(runs[1]!.workflowName).toBe(workflowName1);
955
- const firstSnapshot = runs[0]!.snapshot;
956
- const secondSnapshot = runs[1]!.snapshot;
957
- checkWorkflowSnapshot(firstSnapshot, stepId2, 'failed');
958
- checkWorkflowSnapshot(secondSnapshot, stepId1, 'success');
959
- });
960
-
961
- it('filters by workflow name', async () => {
962
- const workflowName1 = 'filter_test_1';
963
- const workflowName2 = 'filter_test_2';
964
-
965
- const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
966
- const { snapshot: workflow2, runId: runId2 } = createSampleWorkflowSnapshot('failed');
967
-
968
- await store.persistWorkflowSnapshot({ workflowName: workflowName1, runId: runId1, snapshot: workflow1 });
969
- await new Promise(resolve => setTimeout(resolve, 10)); // Small delay to ensure different timestamps
970
- await store.persistWorkflowSnapshot({ workflowName: workflowName2, runId: runId2, snapshot: workflow2 });
971
-
972
- const { runs, total } = await store.getWorkflowRuns({ workflowName: workflowName1 });
973
- expect(runs).toHaveLength(1);
974
- expect(total).toBe(1);
975
- expect(runs[0]!.workflowName).toBe(workflowName1);
976
- const snapshot = runs[0]!.snapshot;
977
- checkWorkflowSnapshot(snapshot, stepId1, 'success');
978
- });
979
-
980
- it('filters by date range', async () => {
981
- const now = new Date();
982
- const yesterday = new Date(now.getTime() - 24 * 60 * 60 * 1000);
983
- const twoDaysAgo = new Date(now.getTime() - 2 * 24 * 60 * 60 * 1000);
984
- const workflowName1 = 'date_test_1';
985
- const workflowName2 = 'date_test_2';
986
- const workflowName3 = 'date_test_3';
987
-
988
- const { snapshot: workflow1, runId: runId1 } = createSampleWorkflowSnapshot('success');
989
- const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('failed');
990
- const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('suspended');
991
-
992
- await store.insert({
993
- tableName: TABLE_WORKFLOW_SNAPSHOT,
994
- record: {
995
- workflow_name: workflowName1,
996
- run_id: runId1,
997
- snapshot: workflow1,
998
- createdAt: twoDaysAgo,
999
- updatedAt: twoDaysAgo,
1000
- },
1001
- });
1002
- await store.insert({
1003
- tableName: TABLE_WORKFLOW_SNAPSHOT,
1004
- record: {
1005
- workflow_name: workflowName2,
1006
- run_id: runId2,
1007
- snapshot: workflow2,
1008
- createdAt: yesterday,
1009
- updatedAt: yesterday,
1010
- },
1011
- });
1012
- await store.insert({
1013
- tableName: TABLE_WORKFLOW_SNAPSHOT,
1014
- record: {
1015
- workflow_name: workflowName3,
1016
- run_id: runId3,
1017
- snapshot: workflow3,
1018
- createdAt: now,
1019
- updatedAt: now,
1020
- },
1021
- });
1022
-
1023
- const { runs } = await store.getWorkflowRuns({
1024
- fromDate: yesterday,
1025
- toDate: now,
1026
- });
1027
-
1028
- expect(runs).toHaveLength(2);
1029
- expect(runs[0]!.workflowName).toBe(workflowName3);
1030
- expect(runs[1]!.workflowName).toBe(workflowName2);
1031
- const firstSnapshot = runs[0]!.snapshot;
1032
- const secondSnapshot = runs[1]!.snapshot;
1033
- checkWorkflowSnapshot(firstSnapshot, stepId3, 'suspended');
1034
- checkWorkflowSnapshot(secondSnapshot, stepId2, 'failed');
1035
- });
1036
-
1037
- it('handles pagination', async () => {
1038
- const workflowName1 = 'page_test_1';
1039
- const workflowName2 = 'page_test_2';
1040
- const workflowName3 = 'page_test_3';
1041
-
1042
- const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
1043
- const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('failed');
1044
- const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('suspended');
1045
-
1046
- await store.persistWorkflowSnapshot({ workflowName: workflowName1, runId: runId1, snapshot: workflow1 });
1047
- await new Promise(resolve => setTimeout(resolve, 10)); // Small delay to ensure different timestamps
1048
- await store.persistWorkflowSnapshot({ workflowName: workflowName2, runId: runId2, snapshot: workflow2 });
1049
- await new Promise(resolve => setTimeout(resolve, 10)); // Small delay to ensure different timestamps
1050
- await store.persistWorkflowSnapshot({ workflowName: workflowName3, runId: runId3, snapshot: workflow3 });
1051
-
1052
- // Get first page
1053
- const page1 = await store.getWorkflowRuns({ limit: 2, offset: 0 });
1054
- expect(page1.runs).toHaveLength(2);
1055
- expect(page1.total).toBe(3); // Total count of all records
1056
- expect(page1.runs[0]!.workflowName).toBe(workflowName3);
1057
- expect(page1.runs[1]!.workflowName).toBe(workflowName2);
1058
- const firstSnapshot = page1.runs[0]!.snapshot;
1059
- const secondSnapshot = page1.runs[1]!.snapshot;
1060
- checkWorkflowSnapshot(firstSnapshot, stepId3, 'suspended');
1061
- checkWorkflowSnapshot(secondSnapshot, stepId2, 'failed');
1062
-
1063
- // Get second page
1064
- const page2 = await store.getWorkflowRuns({ limit: 2, offset: 2 });
1065
- expect(page2.runs).toHaveLength(1);
1066
- expect(page2.total).toBe(3);
1067
- expect(page2.runs[0]!.workflowName).toBe(workflowName1);
1068
- const snapshot = page2.runs[0]!.snapshot;
1069
- checkWorkflowSnapshot(snapshot, stepId1, 'success');
1070
- });
1071
- });
1072
-
1073
- describe('getWorkflowRunById', () => {
1074
- const workflowName = 'workflow-id-test';
1075
- let runId: string;
1076
- let stepId: string;
1077
-
1078
- beforeEach(async () => {
1079
- // Insert a workflow run for positive test
1080
- const sample = createSampleWorkflowSnapshot('success');
1081
- runId = sample.runId;
1082
- stepId = sample.stepId;
1083
- await store.insert({
1084
- tableName: TABLE_WORKFLOW_SNAPSHOT,
1085
- record: {
1086
- workflow_name: workflowName,
1087
- run_id: runId,
1088
- resourceId: 'resource-abc',
1089
- snapshot: sample.snapshot,
1090
- createdAt: new Date(),
1091
- updatedAt: new Date(),
1092
- },
1093
- });
1094
- });
1095
-
1096
- it('should retrieve a workflow run by ID', async () => {
1097
- const found = await store.getWorkflowRunById({
1098
- runId,
1099
- workflowName,
1100
- });
1101
- expect(found).not.toBeNull();
1102
- expect(found?.runId).toBe(runId);
1103
- checkWorkflowSnapshot(found?.snapshot!, stepId, 'success');
1104
- });
1105
-
1106
- it('should return null for non-existent workflow run ID', async () => {
1107
- const notFound = await store.getWorkflowRunById({
1108
- runId: 'non-existent-id',
1109
- workflowName,
1110
- });
1111
- expect(notFound).toBeNull();
1112
- });
1113
- });
1114
- describe('getWorkflowRuns with resourceId', () => {
1115
- const workflowName = 'workflow-id-test';
1116
- let resourceId: string;
1117
- let runIds: string[] = [];
1118
-
1119
- beforeEach(async () => {
1120
- // Insert multiple workflow runs for the same resourceId
1121
- resourceId = 'resource-shared';
1122
- for (const status of ['success', 'failed']) {
1123
- const sample = createSampleWorkflowSnapshot(status as WorkflowRunState['context'][string]['status']);
1124
- runIds.push(sample.runId);
1125
- await store.insert({
1126
- tableName: TABLE_WORKFLOW_SNAPSHOT,
1127
- record: {
1128
- workflow_name: workflowName,
1129
- run_id: sample.runId,
1130
- resourceId,
1131
- snapshot: sample.snapshot,
1132
- createdAt: new Date(),
1133
- updatedAt: new Date(),
1134
- },
1135
- });
1136
- }
1137
- // Insert a run with a different resourceId
1138
- const other = createSampleWorkflowSnapshot('suspended');
1139
- await store.insert({
1140
- tableName: TABLE_WORKFLOW_SNAPSHOT,
1141
- record: {
1142
- workflow_name: workflowName,
1143
- run_id: other.runId,
1144
- resourceId: 'resource-other',
1145
- snapshot: other.snapshot,
1146
- createdAt: new Date(),
1147
- updatedAt: new Date(),
1148
- },
1149
- });
1150
- });
1151
-
1152
- it('should retrieve all workflow runs by resourceId', async () => {
1153
- const { runs } = await store.getWorkflowRuns({
1154
- resourceId,
1155
- workflowName,
1156
- });
1157
- expect(Array.isArray(runs)).toBe(true);
1158
- expect(runs.length).toBeGreaterThanOrEqual(2);
1159
- for (const run of runs) {
1160
- expect(run.resourceId).toBe(resourceId);
1161
- }
1162
- });
1163
-
1164
- it('should return an empty array if no workflow runs match resourceId', async () => {
1165
- const { runs } = await store.getWorkflowRuns({
1166
- resourceId: 'non-existent-resource',
1167
- workflowName,
1168
- });
1169
- expect(Array.isArray(runs)).toBe(true);
1170
- expect(runs.length).toBe(0);
1171
- });
1172
- });
1173
-
1174
- describe('Eval Operations', () => {
1175
- it('should retrieve evals by agent name', async () => {
1176
- const agentName = `test-agent-${randomUUID()}`;
1177
-
1178
- // Create sample evals using the imported helper
1179
- const liveEval = createSampleEval(agentName, false); // createSampleEval returns snake_case
1180
- const testEval = createSampleEval(agentName, true);
1181
- const otherAgentEval = createSampleEval(`other-agent-${randomUUID()}`, false);
1182
-
1183
- // Insert evals - ensure DB columns are snake_case
1184
- await store.insert({
1185
- tableName: TABLE_EVALS,
1186
- record: {
1187
- agent_name: liveEval.agent_name, // Use snake_case
1188
- input: liveEval.input,
1189
- output: liveEval.output,
1190
- result: liveEval.result,
1191
- metric_name: liveEval.metric_name, // Use snake_case
1192
- instructions: liveEval.instructions,
1193
- test_info: liveEval.test_info, // test_info from helper can be undefined or object
1194
- global_run_id: liveEval.global_run_id, // Use snake_case
1195
- run_id: liveEval.run_id, // Use snake_case
1196
- created_at: new Date(liveEval.created_at as string), // created_at from helper is string or Date
1197
- },
1198
- });
1199
-
1200
- await store.insert({
1201
- tableName: TABLE_EVALS,
1202
- record: {
1203
- agent_name: testEval.agent_name,
1204
- input: testEval.input,
1205
- output: testEval.output,
1206
- result: testEval.result,
1207
- metric_name: testEval.metric_name,
1208
- instructions: testEval.instructions,
1209
- test_info: testEval.test_info ? JSON.stringify(testEval.test_info) : null,
1210
- global_run_id: testEval.global_run_id,
1211
- run_id: testEval.run_id,
1212
- created_at: new Date(testEval.created_at as string),
1213
- },
1214
- });
1215
-
1216
- await store.insert({
1217
- tableName: TABLE_EVALS,
1218
- record: {
1219
- agent_name: otherAgentEval.agent_name,
1220
- input: otherAgentEval.input,
1221
- output: otherAgentEval.output,
1222
- result: otherAgentEval.result,
1223
- metric_name: otherAgentEval.metric_name,
1224
- instructions: otherAgentEval.instructions,
1225
- test_info: otherAgentEval.test_info, // Can be null/undefined directly
1226
- global_run_id: otherAgentEval.global_run_id,
1227
- run_id: otherAgentEval.run_id,
1228
- created_at: new Date(otherAgentEval.created_at as string),
1229
- },
1230
- });
1231
-
1232
- // Test getting all evals for the agent
1233
- const allEvals = await store.getEvalsByAgentName(agentName);
1234
- expect(allEvals).toHaveLength(2);
1235
- // EvalRow type expects camelCase, but PostgresStore.transformEvalRow converts snake_case from DB to camelCase
1236
- expect(allEvals.map(e => e.runId)).toEqual(expect.arrayContaining([liveEval.run_id, testEval.run_id]));
1237
-
1238
- // Test getting only live evals
1239
- const liveEvals = await store.getEvalsByAgentName(agentName, 'live');
1240
- expect(liveEvals).toHaveLength(1);
1241
- expect(liveEvals[0].runId).toBe(liveEval.run_id); // Comparing with snake_case run_id from original data
1242
-
1243
- // Test getting only test evals
1244
- const testEvalsResult = await store.getEvalsByAgentName(agentName, 'test');
1245
- expect(testEvalsResult).toHaveLength(1);
1246
- expect(testEvalsResult[0].runId).toBe(testEval.run_id);
1247
- expect(testEvalsResult[0].testInfo).toEqual(testEval.test_info);
1248
-
1249
- // Test getting evals for non-existent agent
1250
- const nonExistentEvals = await store.getEvalsByAgentName('non-existent-agent');
1251
- expect(nonExistentEvals).toHaveLength(0);
1252
- });
1253
- });
1254
-
1255
- describe('hasColumn', () => {
1256
- const tempTable = 'temp_test_table';
1257
-
1258
- beforeEach(async () => {
1259
- // Always try to drop the table before each test, ignore errors if it doesn't exist
1260
- try {
1261
- await store['db'].query(`DROP TABLE IF EXISTS ${tempTable}`);
1262
- } catch {
1263
- /* ignore */
1264
- }
1265
- });
1266
-
1267
- it('returns true if the column exists', async () => {
1268
- await store['db'].query(`CREATE TABLE ${tempTable} (id SERIAL PRIMARY KEY, resourceId TEXT)`);
1269
- expect(await store['hasColumn'](tempTable, 'resourceId')).toBe(true);
1270
- });
1271
-
1272
- it('returns false if the column does not exist', async () => {
1273
- await store['db'].query(`CREATE TABLE ${tempTable} (id SERIAL PRIMARY KEY)`);
1274
- expect(await store['hasColumn'](tempTable, 'resourceId')).toBe(false);
1275
- });
1276
-
1277
- afterEach(async () => {
1278
- // Always try to drop the table after each test, ignore errors if it doesn't exist
1279
- try {
1280
- await store['db'].query(`DROP TABLE IF EXISTS ${tempTable}`);
1281
- } catch {
1282
- /* ignore */
1283
- }
1284
- });
1285
- });
1286
-
1287
- describe('alterTable', () => {
1288
- const TEST_TABLE = 'test_alter_table';
1289
- const BASE_SCHEMA = {
1290
- id: { type: 'integer', primaryKey: true, nullable: false },
1291
- name: { type: 'text', nullable: true },
1292
- } as Record<string, StorageColumn>;
1293
-
1294
- beforeEach(async () => {
1295
- await store.createTable({ tableName: TEST_TABLE as TABLE_NAMES, schema: BASE_SCHEMA });
1296
- });
1297
-
1298
- afterEach(async () => {
1299
- await store.clearTable({ tableName: TEST_TABLE as TABLE_NAMES });
1300
- });
1301
-
1302
- it('adds a new column to an existing table', async () => {
1303
- await store.alterTable({
1304
- tableName: TEST_TABLE as TABLE_NAMES,
1305
- schema: { ...BASE_SCHEMA, age: { type: 'integer', nullable: true } },
1306
- ifNotExists: ['age'],
1307
- });
1308
-
1309
- await store.insert({
1310
- tableName: TEST_TABLE as TABLE_NAMES,
1311
- record: { id: 1, name: 'Alice', age: 42 },
1312
- });
1313
-
1314
- const row = await store.load<{ id: string; name: string; age?: number }>({
1315
- tableName: TEST_TABLE as TABLE_NAMES,
1316
- keys: { id: '1' },
1317
- });
1318
- expect(row?.age).toBe(42);
1319
- });
1320
-
1321
- it('is idempotent when adding an existing column', async () => {
1322
- await store.alterTable({
1323
- tableName: TEST_TABLE as TABLE_NAMES,
1324
- schema: { ...BASE_SCHEMA, foo: { type: 'text', nullable: true } },
1325
- ifNotExists: ['foo'],
1326
- });
1327
- // Add the column again (should not throw)
1328
- await expect(
1329
- store.alterTable({
1330
- tableName: TEST_TABLE as TABLE_NAMES,
1331
- schema: { ...BASE_SCHEMA, foo: { type: 'text', nullable: true } },
1332
- ifNotExists: ['foo'],
1333
- }),
1334
- ).resolves.not.toThrow();
1335
- });
1336
-
1337
- it('should add a default value to a column when using not null', async () => {
1338
- await store.insert({
1339
- tableName: TEST_TABLE as TABLE_NAMES,
1340
- record: { id: 1, name: 'Bob' },
1341
- });
1342
-
1343
- await expect(
1344
- store.alterTable({
1345
- tableName: TEST_TABLE as TABLE_NAMES,
1346
- schema: { ...BASE_SCHEMA, text_column: { type: 'text', nullable: false } },
1347
- ifNotExists: ['text_column'],
1348
- }),
1349
- ).resolves.not.toThrow();
1350
-
1351
- await expect(
1352
- store.alterTable({
1353
- tableName: TEST_TABLE as TABLE_NAMES,
1354
- schema: { ...BASE_SCHEMA, timestamp_column: { type: 'timestamp', nullable: false } },
1355
- ifNotExists: ['timestamp_column'],
1356
- }),
1357
- ).resolves.not.toThrow();
1358
-
1359
- await expect(
1360
- store.alterTable({
1361
- tableName: TEST_TABLE as TABLE_NAMES,
1362
- schema: { ...BASE_SCHEMA, bigint_column: { type: 'bigint', nullable: false } },
1363
- ifNotExists: ['bigint_column'],
1364
- }),
1365
- ).resolves.not.toThrow();
1366
-
1367
- await expect(
1368
- store.alterTable({
1369
- tableName: TEST_TABLE as TABLE_NAMES,
1370
- schema: { ...BASE_SCHEMA, jsonb_column: { type: 'jsonb', nullable: false } },
1371
- ifNotExists: ['jsonb_column'],
1372
- }),
1373
- ).resolves.not.toThrow();
1374
- });
1375
- });
1376
-
1377
- describe('Schema Support', () => {
1378
- const customSchema = 'mastraTest';
1379
- let customSchemaStore: PostgresStore;
1380
-
1381
- beforeAll(async () => {
1382
- customSchemaStore = new PostgresStore({
1383
- ...TEST_CONFIG,
1384
- schemaName: customSchema,
1385
- });
1386
-
1387
- await customSchemaStore.init();
1388
- });
1389
-
1390
- afterAll(async () => {
1391
- await customSchemaStore.close();
1392
- // Re-initialize the main store for subsequent tests
1393
- store = new PostgresStore(TEST_CONFIG);
1394
- await store.init();
1395
- });
1396
-
1397
- describe('Constructor and Initialization', () => {
1398
- it('should accept connectionString directly', () => {
1399
- // Use existing store instead of creating new one
1400
- expect(store).toBeInstanceOf(PostgresStore);
1401
- });
1402
-
1403
- it('should accept config object with schema', () => {
1404
- // Use existing custom schema store
1405
- expect(customSchemaStore).toBeInstanceOf(PostgresStore);
1406
- });
1407
- });
1408
-
1409
- describe('Schema Operations', () => {
1410
- it('should create and query tables in custom schema', async () => {
1411
- // Create thread in custom schema
1412
- const thread = createSampleThread();
1413
- await customSchemaStore.saveThread({ thread });
1414
-
1415
- // Verify thread exists in custom schema
1416
- const retrieved = await customSchemaStore.getThreadById({ threadId: thread.id });
1417
- expect(retrieved?.title).toBe(thread.title);
1418
- });
1419
-
1420
- it('should allow same table names in different schemas', async () => {
1421
- // Create threads in both schemas
1422
- const defaultThread = createSampleThread();
1423
- const customThread = createSampleThread();
1424
-
1425
- await store.saveThread({ thread: defaultThread });
1426
- await customSchemaStore.saveThread({ thread: customThread });
1427
-
1428
- // Verify threads exist in respective schemas
1429
- const defaultResult = await store.getThreadById({ threadId: defaultThread.id });
1430
- const customResult = await customSchemaStore.getThreadById({ threadId: customThread.id });
1431
-
1432
- expect(defaultResult?.id).toBe(defaultThread.id);
1433
- expect(customResult?.id).toBe(customThread.id);
1434
-
1435
- // Verify cross-schema isolation
1436
- const defaultInCustom = await customSchemaStore.getThreadById({ threadId: defaultThread.id });
1437
- const customInDefault = await store.getThreadById({ threadId: customThread.id });
1438
-
1439
- expect(defaultInCustom).toBeNull();
1440
- expect(customInDefault).toBeNull();
1441
- });
1442
- });
1443
- });
1444
-
1445
- describe('Pagination Features', () => {
1446
- beforeEach(async () => {
1447
- await store.clearTable({ tableName: TABLE_EVALS });
1448
- await store.clearTable({ tableName: TABLE_TRACES });
1449
- await store.clearTable({ tableName: TABLE_MESSAGES });
1450
- await store.clearTable({ tableName: TABLE_THREADS });
1451
- });
1452
-
1453
- describe('getEvals with pagination', () => {
1454
- it('should return paginated evals with total count (page/perPage)', async () => {
1455
- const agentName = 'pagination-agent-evals';
1456
- const evalPromises = Array.from({ length: 25 }, (_, i) => {
1457
- const evalData = createSampleEval(agentName, i % 2 === 0);
1458
- return store.insert({
1459
- tableName: TABLE_EVALS,
1460
- record: {
1461
- run_id: evalData.run_id,
1462
- agent_name: evalData.agent_name,
1463
- input: evalData.input,
1464
- output: evalData.output,
1465
- result: evalData.result,
1466
- metric_name: evalData.metric_name,
1467
- instructions: evalData.instructions,
1468
- test_info: evalData.test_info,
1469
- global_run_id: evalData.global_run_id,
1470
- created_at: new Date(evalData.created_at as string),
1471
- },
1472
- });
1473
- });
1474
- await Promise.all(evalPromises);
1475
-
1476
- const page1 = await store.getEvals({ agentName, page: 0, perPage: 10 });
1477
- expect(page1.evals).toHaveLength(10);
1478
- expect(page1.total).toBe(25);
1479
- expect(page1.page).toBe(0);
1480
- expect(page1.perPage).toBe(10);
1481
- expect(page1.hasMore).toBe(true);
1482
-
1483
- const page3 = await store.getEvals({ agentName, page: 2, perPage: 10 });
1484
- expect(page3.evals).toHaveLength(5);
1485
- expect(page3.total).toBe(25);
1486
- expect(page3.page).toBe(2);
1487
- expect(page3.hasMore).toBe(false);
1488
- });
1489
-
1490
- it('should support limit/offset pagination for getEvals', async () => {
1491
- const agentName = 'pagination-agent-lo-evals';
1492
- const evalPromises = Array.from({ length: 15 }, () => {
1493
- const evalData = createSampleEval(agentName);
1494
- return store.insert({
1495
- tableName: TABLE_EVALS,
1496
- record: {
1497
- run_id: evalData.run_id,
1498
- agent_name: evalData.agent_name,
1499
- input: evalData.input,
1500
- output: evalData.output,
1501
- result: evalData.result,
1502
- metric_name: evalData.metric_name,
1503
- instructions: evalData.instructions,
1504
- test_info: evalData.test_info,
1505
- global_run_id: evalData.global_run_id,
1506
- created_at: new Date(evalData.created_at as string),
1507
- },
1508
- });
1509
- });
1510
- await Promise.all(evalPromises);
1511
-
1512
- const result = await store.getEvals({ agentName, perPage: 5, page: 2 });
1513
- expect(result.evals).toHaveLength(5);
1514
- expect(result.total).toBe(15);
1515
- expect(result.page).toBe(2);
1516
- expect(result.perPage).toBe(5);
1517
- expect(result.hasMore).toBe(false);
1518
- });
1519
-
1520
- it('should filter by type with pagination for getEvals', async () => {
1521
- const agentName = 'pagination-agent-type-evals';
1522
- const testEvalPromises = Array.from({ length: 10 }, () => {
1523
- const evalData = createSampleEval(agentName, true);
1524
- return store.insert({
1525
- tableName: TABLE_EVALS,
1526
- record: {
1527
- run_id: evalData.run_id,
1528
- agent_name: evalData.agent_name,
1529
- input: evalData.input,
1530
- output: evalData.output,
1531
- result: evalData.result,
1532
- metric_name: evalData.metric_name,
1533
- instructions: evalData.instructions,
1534
- test_info: evalData.test_info,
1535
- global_run_id: evalData.global_run_id,
1536
- created_at: new Date(evalData.created_at as string),
1537
- },
1538
- });
1539
- });
1540
- const liveEvalPromises = Array.from({ length: 8 }, () => {
1541
- const evalData = createSampleEval(agentName, false);
1542
- return store.insert({
1543
- tableName: TABLE_EVALS,
1544
- record: {
1545
- run_id: evalData.run_id,
1546
- agent_name: evalData.agent_name,
1547
- input: evalData.input,
1548
- output: evalData.output,
1549
- result: evalData.result,
1550
- metric_name: evalData.metric_name,
1551
- instructions: evalData.instructions,
1552
- test_info: evalData.test_info,
1553
- global_run_id: evalData.global_run_id,
1554
- created_at: new Date(evalData.created_at as string),
1555
- },
1556
- });
1557
- });
1558
- await Promise.all([...testEvalPromises, ...liveEvalPromises]);
1559
-
1560
- const testResults = await store.getEvals({ agentName, type: 'test', page: 0, perPage: 5 });
1561
- expect(testResults.evals).toHaveLength(5);
1562
- expect(testResults.total).toBe(10);
1563
-
1564
- const liveResults = await store.getEvals({ agentName, type: 'live', page: 1, perPage: 3 });
1565
- expect(liveResults.evals).toHaveLength(3);
1566
- expect(liveResults.total).toBe(8);
1567
- expect(liveResults.hasMore).toBe(true);
1568
- });
1569
-
1570
- it('should filter by date with pagination for getEvals', async () => {
1571
- const agentName = 'pagination-agent-date-evals';
1572
- const now = new Date();
1573
- const yesterday = new Date(now.getTime() - 24 * 60 * 60 * 1000);
1574
- const dayBeforeYesterday = new Date(now.getTime() - 48 * 60 * 60 * 1000);
1575
-
1576
- const createEvalAtDate = (date: Date) => {
1577
- const evalData = createSampleEval(agentName, false, date); // Pass date to helper
1578
- return store.insert({
1579
- tableName: TABLE_EVALS,
1580
- record: {
1581
- run_id: evalData.run_id, // Use snake_case from helper
1582
- agent_name: evalData.agent_name,
1583
- input: evalData.input,
1584
- output: evalData.output,
1585
- result: evalData.result,
1586
- metric_name: evalData.metric_name,
1587
- instructions: evalData.instructions,
1588
- test_info: evalData.test_info,
1589
- global_run_id: evalData.global_run_id,
1590
- created_at: evalData.created_at, // Use created_at from helper (already Date or ISO string)
1591
- },
1592
- });
1593
- };
1594
-
1595
- await Promise.all([
1596
- createEvalAtDate(dayBeforeYesterday),
1597
- createEvalAtDate(dayBeforeYesterday),
1598
- createEvalAtDate(yesterday),
1599
- createEvalAtDate(yesterday),
1600
- createEvalAtDate(yesterday),
1601
- createEvalAtDate(now),
1602
- createEvalAtDate(now),
1603
- createEvalAtDate(now),
1604
- createEvalAtDate(now),
1605
- ]);
1606
-
1607
- const fromYesterday = await store.getEvals({ agentName, dateRange: { start: yesterday }, page: 0, perPage: 3 });
1608
- expect(fromYesterday.total).toBe(7); // 3 yesterday + 4 now
1609
- expect(fromYesterday.evals).toHaveLength(3);
1610
- // Evals are sorted DESC, so first 3 are from 'now'
1611
- fromYesterday.evals.forEach(e =>
1612
- expect(new Date(e.createdAt).getTime()).toBeGreaterThanOrEqual(yesterday.getTime()),
1613
- );
1614
-
1615
- const onlyDayBefore = await store.getEvals({
1616
- agentName,
1617
- dateRange: {
1618
- end: new Date(yesterday.getTime() - 1),
1619
- },
1620
- page: 0,
1621
- perPage: 5,
1622
- });
1623
- expect(onlyDayBefore.total).toBe(2);
1624
- expect(onlyDayBefore.evals).toHaveLength(2);
1625
- });
1626
- });
1627
-
1628
- describe('getTraces with pagination', () => {
1629
- it('should return paginated traces with total count', async () => {
1630
- const tracePromises = Array.from({ length: 18 }, (_, i) =>
1631
- store.insert({ tableName: TABLE_TRACES, record: createSampleTraceForDB(`test-trace-${i}`, 'pg-test-scope') }),
1632
- );
1633
- await Promise.all(tracePromises);
1634
-
1635
- const page1 = await store.getTracesPaginated({
1636
- scope: 'pg-test-scope',
1637
- page: 0,
1638
- perPage: 8,
1639
- });
1640
- expect(page1.traces).toHaveLength(8);
1641
- expect(page1.total).toBe(18);
1642
- expect(page1.page).toBe(0);
1643
- expect(page1.perPage).toBe(8);
1644
- expect(page1.hasMore).toBe(true);
1645
-
1646
- const page3 = await store.getTracesPaginated({
1647
- scope: 'pg-test-scope',
1648
- page: 2,
1649
- perPage: 8,
1650
- });
1651
- expect(page3.traces).toHaveLength(2);
1652
- expect(page3.total).toBe(18);
1653
- expect(page3.hasMore).toBe(false);
1654
- });
1655
-
1656
- it('should filter by attributes with pagination for getTraces', async () => {
1657
- const tracesWithAttr = Array.from({ length: 8 }, (_, i) =>
1658
- store.insert({
1659
- tableName: TABLE_TRACES,
1660
- record: createSampleTraceForDB(`trace-${i}`, 'pg-attr-scope', { environment: 'prod' }),
1661
- }),
1662
- );
1663
- const tracesWithoutAttr = Array.from({ length: 5 }, (_, i) =>
1664
- store.insert({
1665
- tableName: TABLE_TRACES,
1666
- record: createSampleTraceForDB(`trace-other-${i}`, 'pg-attr-scope', { environment: 'dev' }),
1667
- }),
1668
- );
1669
- await Promise.all([...tracesWithAttr, ...tracesWithoutAttr]);
1670
-
1671
- const prodTraces = await store.getTracesPaginated({
1672
- scope: 'pg-attr-scope',
1673
- attributes: { environment: 'prod' },
1674
- page: 0,
1675
- perPage: 5,
1676
- });
1677
- expect(prodTraces.traces).toHaveLength(5);
1678
- expect(prodTraces.total).toBe(8);
1679
- expect(prodTraces.hasMore).toBe(true);
1680
- });
1681
-
1682
- it('should filter by date with pagination for getTraces', async () => {
1683
- const scope = 'pg-date-traces';
1684
- const now = new Date();
1685
- const yesterday = new Date(now.getTime() - 24 * 60 * 60 * 1000);
1686
- const dayBeforeYesterday = new Date(now.getTime() - 48 * 60 * 60 * 1000);
1687
-
1688
- await Promise.all([
1689
- store.insert({
1690
- tableName: TABLE_TRACES,
1691
- record: createSampleTraceForDB('t1', scope, undefined, dayBeforeYesterday),
1692
- }),
1693
- store.insert({ tableName: TABLE_TRACES, record: createSampleTraceForDB('t2', scope, undefined, yesterday) }),
1694
- store.insert({ tableName: TABLE_TRACES, record: createSampleTraceForDB('t3', scope, undefined, yesterday) }),
1695
- store.insert({ tableName: TABLE_TRACES, record: createSampleTraceForDB('t4', scope, undefined, now) }),
1696
- store.insert({ tableName: TABLE_TRACES, record: createSampleTraceForDB('t5', scope, undefined, now) }),
1697
- ]);
1698
-
1699
- const fromYesterday = await store.getTracesPaginated({
1700
- scope,
1701
- dateRange: {
1702
- start: yesterday,
1703
- },
1704
- page: 0,
1705
- perPage: 2,
1706
- });
1707
- expect(fromYesterday.total).toBe(4); // 2 yesterday + 2 now
1708
- expect(fromYesterday.traces).toHaveLength(2);
1709
- fromYesterday.traces.forEach(t =>
1710
- expect(new Date(t.createdAt).getTime()).toBeGreaterThanOrEqual(yesterday.getTime()),
1711
- );
1712
-
1713
- const onlyNow = await store.getTracesPaginated({
1714
- scope,
1715
- dateRange: {
1716
- start: now,
1717
- end: now,
1718
- },
1719
- page: 0,
1720
- perPage: 5,
1721
- });
1722
- expect(onlyNow.total).toBe(2);
1723
- expect(onlyNow.traces).toHaveLength(2);
1724
- });
1725
- });
1726
-
1727
- describe('getMessages with pagination', () => {
1728
- it('should return paginated messages with total count', async () => {
1729
- const thread = createSampleThread();
1730
- await store.saveThread({ thread });
1731
- // Reset role to 'assistant' before creating messages
1732
- resetRole();
1733
- // Create messages sequentially to ensure unique timestamps
1734
- for (let i = 0; i < 15; i++) {
1735
- const message = createSampleMessageV1({ threadId: thread.id, content: `Message ${i + 1}` });
1736
- await store.saveMessages({
1737
- messages: [message],
1738
- });
1739
- await new Promise(r => setTimeout(r, 5));
1740
- }
1741
-
1742
- const page1 = await store.getMessagesPaginated({
1743
- threadId: thread.id,
1744
- selectBy: { pagination: { page: 0, perPage: 5 } },
1745
- format: 'v2',
1746
- });
1747
- expect(page1.messages).toHaveLength(5);
1748
- expect(page1.total).toBe(15);
1749
- expect(page1.page).toBe(0);
1750
- expect(page1.perPage).toBe(5);
1751
- expect(page1.hasMore).toBe(true);
1752
-
1753
- const page3 = await store.getMessagesPaginated({
1754
- threadId: thread.id,
1755
- selectBy: { pagination: { page: 2, perPage: 5 } },
1756
- format: 'v2',
1757
- });
1758
- expect(page3.messages).toHaveLength(5);
1759
- expect(page3.total).toBe(15);
1760
- expect(page3.hasMore).toBe(false);
1761
- });
1762
-
1763
- it('should filter by date with pagination for getMessages', async () => {
1764
- resetRole();
1765
- const threadData = createSampleThread();
1766
- const thread = await store.saveThread({ thread: threadData as StorageThreadType });
1767
- const now = new Date();
1768
- const yesterday = new Date(
1769
- now.getFullYear(),
1770
- now.getMonth(),
1771
- now.getDate() - 1,
1772
- now.getHours(),
1773
- now.getMinutes(),
1774
- now.getSeconds(),
1775
- );
1776
- const dayBeforeYesterday = new Date(
1777
- now.getFullYear(),
1778
- now.getMonth(),
1779
- now.getDate() - 2,
1780
- now.getHours(),
1781
- now.getMinutes(),
1782
- now.getSeconds(),
1783
- );
1784
-
1785
- // Ensure timestamps are distinct for reliable sorting by creating them with a slight delay for testing clarity
1786
- const messagesToSave: MastraMessageV1[] = [];
1787
- messagesToSave.push(createSampleMessageV1({ threadId: thread.id, createdAt: dayBeforeYesterday }));
1788
- await new Promise(r => setTimeout(r, 5));
1789
- messagesToSave.push(createSampleMessageV1({ threadId: thread.id, createdAt: dayBeforeYesterday }));
1790
- await new Promise(r => setTimeout(r, 5));
1791
- messagesToSave.push(createSampleMessageV1({ threadId: thread.id, createdAt: yesterday }));
1792
- await new Promise(r => setTimeout(r, 5));
1793
- messagesToSave.push(createSampleMessageV1({ threadId: thread.id, createdAt: yesterday }));
1794
- await new Promise(r => setTimeout(r, 5));
1795
- messagesToSave.push(createSampleMessageV1({ threadId: thread.id, createdAt: now }));
1796
- await new Promise(r => setTimeout(r, 5));
1797
- messagesToSave.push(createSampleMessageV1({ threadId: thread.id, createdAt: now }));
1798
-
1799
- await store.saveMessages({ messages: messagesToSave, format: 'v1' });
1800
- // Total 6 messages: 2 now, 2 yesterday, 2 dayBeforeYesterday (oldest to newest)
1801
-
1802
- const fromYesterday = await store.getMessagesPaginated({
1803
- threadId: thread.id,
1804
- selectBy: { pagination: { page: 0, perPage: 3, dateRange: { start: yesterday } } },
1805
- format: 'v2',
1806
- });
1807
- expect(fromYesterday.total).toBe(4);
1808
- expect(fromYesterday.messages).toHaveLength(3);
1809
- const firstMessageTime = new Date((fromYesterday.messages[0] as MastraMessageV1).createdAt).getTime();
1810
- expect(firstMessageTime).toBeGreaterThanOrEqual(new Date(yesterday.toISOString()).getTime());
1811
- if (fromYesterday.messages.length > 0) {
1812
- expect(new Date((fromYesterday.messages[0] as MastraMessageV1).createdAt).toISOString().slice(0, 10)).toEqual(
1813
- yesterday.toISOString().slice(0, 10),
1814
- );
1815
- }
1816
- });
1817
-
1818
- it('should save and retrieve messages', async () => {
1819
- const thread = createSampleThread();
1820
- await store.saveThread({ thread });
1821
-
1822
- const messages = [
1823
- createSampleMessageV1({ threadId: thread.id, resourceId: thread.resourceId }),
1824
- createSampleMessageV1({ threadId: thread.id, resourceId: thread.resourceId }),
1825
- ];
1826
-
1827
- // Save messages
1828
- const savedMessages = await store.saveMessages({ messages });
1829
- expect(savedMessages).toEqual(messages);
1830
-
1831
- // Retrieve messages
1832
- const retrievedMessages = await store.getMessagesPaginated({ threadId: thread.id, format: 'v1' });
1833
- expect(retrievedMessages.messages).toHaveLength(2);
1834
- expect(retrievedMessages.messages).toEqual(expect.arrayContaining(messages));
1835
- });
1836
-
1837
- it('should maintain message order', async () => {
1838
- const thread = createSampleThread();
1839
- await store.saveThread({ thread });
1840
-
1841
- const messageContent = ['First', 'Second', 'Third'];
1842
-
1843
- const messages = messageContent.map(content =>
1844
- createSampleMessageV2({
1845
- threadId: thread.id,
1846
- content: { content, parts: [{ type: 'text', text: content }] },
1847
- }),
1848
- );
1849
-
1850
- await store.saveMessages({ messages, format: 'v2' });
1851
-
1852
- const retrievedMessages = await store.getMessagesPaginated({ threadId: thread.id, format: 'v2' });
1853
- expect(retrievedMessages.messages).toHaveLength(3);
1854
-
1855
- // Verify order is maintained
1856
- retrievedMessages.messages.forEach((msg, idx) => {
1857
- expect((msg.content.parts[0] as any).text).toEqual(messageContent[idx]);
1858
- });
1859
- });
1860
-
1861
- it('should rollback on error during message save', async () => {
1862
- const thread = createSampleThread();
1863
- await store.saveThread({ thread });
1864
-
1865
- const messages = [
1866
- createSampleMessageV1({ threadId: thread.id }),
1867
- { ...createSampleMessageV1({ threadId: thread.id }), id: null } as any, // This will cause an error
1868
- ];
1869
-
1870
- await expect(store.saveMessages({ messages })).rejects.toThrow();
1871
-
1872
- // Verify no messages were saved
1873
- const savedMessages = await store.getMessagesPaginated({ threadId: thread.id, format: 'v2' });
1874
- expect(savedMessages.messages).toHaveLength(0);
1875
- });
1876
-
1877
- it('should retrieve messages w/ next/prev messages by message id + resource id', async () => {
1878
- const thread = createSampleThread({ id: 'thread-one' });
1879
- await store.saveThread({ thread });
1880
-
1881
- const thread2 = createSampleThread({ id: 'thread-two' });
1882
- await store.saveThread({ thread: thread2 });
1883
-
1884
- const thread3 = createSampleThread({ id: 'thread-three' });
1885
- await store.saveThread({ thread: thread3 });
1886
-
1887
- const messages: MastraMessageV2[] = [
1888
- createSampleMessageV2({
1889
- threadId: 'thread-one',
1890
- content: { content: 'First' },
1891
- resourceId: 'cross-thread-resource',
1892
- }),
1893
- createSampleMessageV2({
1894
- threadId: 'thread-one',
1895
- content: { content: 'Second' },
1896
- resourceId: 'cross-thread-resource',
1897
- }),
1898
- createSampleMessageV2({
1899
- threadId: 'thread-one',
1900
- content: { content: 'Third' },
1901
- resourceId: 'cross-thread-resource',
1902
- }),
1903
-
1904
- createSampleMessageV2({
1905
- threadId: 'thread-two',
1906
- content: { content: 'Fourth' },
1907
- resourceId: 'cross-thread-resource',
1908
- }),
1909
- createSampleMessageV2({
1910
- threadId: 'thread-two',
1911
- content: { content: 'Fifth' },
1912
- resourceId: 'cross-thread-resource',
1913
- }),
1914
- createSampleMessageV2({
1915
- threadId: 'thread-two',
1916
- content: { content: 'Sixth' },
1917
- resourceId: 'cross-thread-resource',
1918
- }),
1919
-
1920
- createSampleMessageV2({
1921
- threadId: 'thread-three',
1922
- content: { content: 'Seventh' },
1923
- resourceId: 'other-resource',
1924
- }),
1925
- createSampleMessageV2({
1926
- threadId: 'thread-three',
1927
- content: { content: 'Eighth' },
1928
- resourceId: 'other-resource',
1929
- }),
1930
- ];
1931
-
1932
- await store.saveMessages({ messages: messages, format: 'v2' });
1933
-
1934
- const retrievedMessages = await store.getMessagesPaginated({ threadId: 'thread-one', format: 'v2' });
1935
- expect(retrievedMessages.messages).toHaveLength(3);
1936
- expect(retrievedMessages.messages.map((m: any) => m.content.parts[0].text)).toEqual([
1937
- 'First',
1938
- 'Second',
1939
- 'Third',
1940
- ]);
1941
-
1942
- const retrievedMessages2 = await store.getMessagesPaginated({ threadId: 'thread-two', format: 'v2' });
1943
- expect(retrievedMessages2.messages).toHaveLength(3);
1944
- expect(retrievedMessages2.messages.map((m: any) => m.content.parts[0].text)).toEqual([
1945
- 'Fourth',
1946
- 'Fifth',
1947
- 'Sixth',
1948
- ]);
1949
-
1950
- const retrievedMessages3 = await store.getMessagesPaginated({ threadId: 'thread-three', format: 'v2' });
1951
- expect(retrievedMessages3.messages).toHaveLength(2);
1952
- expect(retrievedMessages3.messages.map((m: any) => m.content.parts[0].text)).toEqual(['Seventh', 'Eighth']);
1953
-
1954
- const { messages: crossThreadMessages } = await store.getMessagesPaginated({
1955
- threadId: 'thread-doesnt-exist',
1956
- format: 'v2',
1957
- selectBy: {
1958
- last: 0,
1959
- include: [
1960
- {
1961
- id: messages[1].id,
1962
- threadId: 'thread-one',
1963
- withNextMessages: 2,
1964
- withPreviousMessages: 2,
1965
- },
1966
- {
1967
- id: messages[4].id,
1968
- threadId: 'thread-two',
1969
- withPreviousMessages: 2,
1970
- withNextMessages: 2,
1971
- },
1972
- ],
1973
- },
1974
- });
1975
-
1976
- expect(crossThreadMessages).toHaveLength(6);
1977
- expect(crossThreadMessages.filter(m => m.threadId === `thread-one`)).toHaveLength(3);
1978
- expect(crossThreadMessages.filter(m => m.threadId === `thread-two`)).toHaveLength(3);
1979
-
1980
- const crossThreadMessages2 = await store.getMessagesPaginated({
1981
- threadId: 'thread-one',
1982
- format: 'v2',
1983
- selectBy: {
1984
- last: 0,
1985
- include: [
1986
- {
1987
- id: messages[4].id,
1988
- threadId: 'thread-two',
1989
- withPreviousMessages: 1,
1990
- withNextMessages: 1,
1991
- },
1992
- ],
1993
- },
1994
- });
1995
-
1996
- expect(crossThreadMessages2.messages).toHaveLength(3);
1997
- expect(crossThreadMessages2.messages.filter(m => m.threadId === `thread-one`)).toHaveLength(0);
1998
- expect(crossThreadMessages2.messages.filter(m => m.threadId === `thread-two`)).toHaveLength(3);
1999
-
2000
- const crossThreadMessages3 = await store.getMessagesPaginated({
2001
- threadId: 'thread-two',
2002
- format: 'v2',
2003
- selectBy: {
2004
- last: 0,
2005
- include: [
2006
- {
2007
- id: messages[1].id,
2008
- threadId: 'thread-one',
2009
- withNextMessages: 1,
2010
- withPreviousMessages: 1,
2011
- },
2012
- ],
2013
- },
2014
- });
2015
-
2016
- expect(crossThreadMessages3.messages).toHaveLength(3);
2017
- expect(crossThreadMessages3.messages.filter(m => m.threadId === `thread-one`)).toHaveLength(3);
2018
- expect(crossThreadMessages3.messages.filter(m => m.threadId === `thread-two`)).toHaveLength(0);
2019
- });
2020
-
2021
- it('should return messages using both last and include (cross-thread, deduped)', async () => {
2022
- const thread = createSampleThread({ id: 'thread-one' });
2023
- await store.saveThread({ thread });
2024
-
2025
- const thread2 = createSampleThread({ id: 'thread-two' });
2026
- await store.saveThread({ thread: thread2 });
2027
-
2028
- const now = new Date();
2029
-
2030
- // Setup: create messages in two threads
2031
- const messages = [
2032
- createSampleMessageV2({
2033
- threadId: 'thread-one',
2034
- content: { content: 'A' },
2035
- createdAt: new Date(now.getTime()),
2036
- }),
2037
- createSampleMessageV2({
2038
- threadId: 'thread-one',
2039
- content: { content: 'B' },
2040
- createdAt: new Date(now.getTime() + 1000),
2041
- }),
2042
- createSampleMessageV2({
2043
- threadId: 'thread-one',
2044
- content: { content: 'C' },
2045
- createdAt: new Date(now.getTime() + 2000),
2046
- }),
2047
- createSampleMessageV2({
2048
- threadId: 'thread-two',
2049
- content: { content: 'D' },
2050
- createdAt: new Date(now.getTime() + 3000),
2051
- }),
2052
- createSampleMessageV2({
2053
- threadId: 'thread-two',
2054
- content: { content: 'E' },
2055
- createdAt: new Date(now.getTime() + 4000),
2056
- }),
2057
- createSampleMessageV2({
2058
- threadId: 'thread-two',
2059
- content: { content: 'F' },
2060
- createdAt: new Date(now.getTime() + 5000),
2061
- }),
2062
- ];
2063
- await store.saveMessages({ messages, format: 'v2' });
2064
-
2065
- // Use last: 2 and include a message from another thread with context
2066
- const { messages: result } = await store.getMessagesPaginated({
2067
- threadId: 'thread-one',
2068
- format: 'v2',
2069
- selectBy: {
2070
- last: 2,
2071
- include: [
2072
- {
2073
- id: messages[4].id, // 'E' from thread-bar
2074
- threadId: 'thread-two',
2075
- withPreviousMessages: 1,
2076
- withNextMessages: 1,
2077
- },
2078
- ],
2079
- },
2080
- });
2081
-
2082
- // Should include last 2 from thread-one and 3 from thread-two (D, E, F)
2083
- expect(result.map(m => m.content.content).sort()).toEqual(['B', 'C', 'D', 'E', 'F']);
2084
- // Should include 2 from thread-one
2085
- expect(result.filter(m => m.threadId === 'thread-one').map((m: any) => m.content.content)).toEqual(['B', 'C']);
2086
- // Should include 3 from thread-two
2087
- expect(result.filter(m => m.threadId === 'thread-two').map((m: any) => m.content.content)).toEqual([
2088
- 'D',
2089
- 'E',
2090
- 'F',
2091
- ]);
2092
- });
2093
- });
2094
-
2095
- describe('getThreadsByResourceId with pagination', () => {
2096
- it('should return paginated threads with total count', async () => {
2097
- const resourceId = `pg-paginated-resource-${randomUUID()}`;
2098
- const threadPromises = Array.from({ length: 17 }, () =>
2099
- store.saveThread({ thread: { ...createSampleThread(), resourceId } }),
2100
- );
2101
- await Promise.all(threadPromises);
2102
-
2103
- const page1 = await store.getThreadsByResourceIdPaginated({ resourceId, page: 0, perPage: 7 });
2104
- expect(page1.threads).toHaveLength(7);
2105
- expect(page1.total).toBe(17);
2106
- expect(page1.page).toBe(0);
2107
- expect(page1.perPage).toBe(7);
2108
- expect(page1.hasMore).toBe(true);
2109
-
2110
- const page3 = await store.getThreadsByResourceIdPaginated({ resourceId, page: 2, perPage: 7 });
2111
- expect(page3.threads).toHaveLength(3); // 17 total, 7 per page, 3rd page has 17 - 2*7 = 3
2112
- expect(page3.total).toBe(17);
2113
- expect(page3.hasMore).toBe(false);
2114
- });
2115
-
2116
- it('should return paginated results when no pagination params for getThreadsByResourceId', async () => {
2117
- const resourceId = `pg-non-paginated-resource-${randomUUID()}`;
2118
- await store.saveThread({ thread: { ...createSampleThread(), resourceId } });
2119
-
2120
- const results = await store.getThreadsByResourceIdPaginated({ resourceId });
2121
- expect(Array.isArray(results.threads)).toBe(true);
2122
- expect(results.threads.length).toBe(1);
2123
- expect(results.total).toBe(1);
2124
- expect(results.page).toBe(0);
2125
- expect(results.perPage).toBe(100);
2126
- expect(results.hasMore).toBe(false);
2127
- });
2128
- });
2129
- });
2130
-
2131
- describe('PgStorage Table Name Quoting', () => {
2132
- const camelCaseTable = 'TestCamelCaseTable';
2133
- const snakeCaseTable = 'test_snake_case_table';
2134
- const BASE_SCHEMA = {
2135
- id: { type: 'integer', primaryKey: true, nullable: false },
2136
- name: { type: 'text', nullable: true },
2137
- } as Record<string, StorageColumn>;
2138
-
2139
- beforeEach(async () => {
2140
- // Only clear tables if store is initialized
2141
- try {
2142
- // Clear tables before each test
2143
- await store.clearTable({ tableName: camelCaseTable as TABLE_NAMES });
2144
- await store.clearTable({ tableName: snakeCaseTable as TABLE_NAMES });
2145
- } catch (error) {
2146
- // Ignore errors during table clearing
2147
- console.warn('Error clearing tables:', error);
2148
- }
2149
- });
2150
-
2151
- afterEach(async () => {
2152
- // Only clear tables if store is initialized
2153
- try {
2154
- // Clear tables before each test
2155
- await store.clearTable({ tableName: camelCaseTable as TABLE_NAMES });
2156
- await store.clearTable({ tableName: snakeCaseTable as TABLE_NAMES });
2157
- } catch (error) {
2158
- // Ignore errors during table clearing
2159
- console.warn('Error clearing tables:', error);
2160
- }
2161
- });
2162
-
2163
- it('should create and upsert to a camelCase table without quoting errors', async () => {
2164
- await expect(
2165
- store.createTable({
2166
- tableName: camelCaseTable as TABLE_NAMES,
2167
- schema: BASE_SCHEMA,
2168
- }),
2169
- ).resolves.not.toThrow();
2170
-
2171
- await store.insert({
2172
- tableName: camelCaseTable as TABLE_NAMES,
2173
- record: { id: '1', name: 'Alice' },
2174
- });
2175
-
2176
- const row: any = await store.load({
2177
- tableName: camelCaseTable as TABLE_NAMES,
2178
- keys: { id: '1' },
2179
- });
2180
- expect(row?.name).toBe('Alice');
2181
- });
2182
-
2183
- it('should create and upsert to a snake_case table without quoting errors', async () => {
2184
- await expect(
2185
- store.createTable({
2186
- tableName: snakeCaseTable as TABLE_NAMES,
2187
- schema: BASE_SCHEMA,
2188
- }),
2189
- ).resolves.not.toThrow();
2190
-
2191
- await store.insert({
2192
- tableName: snakeCaseTable as TABLE_NAMES,
2193
- record: { id: '2', name: 'Bob' },
2194
- });
2195
-
2196
- const row: any = await store.load({
2197
- tableName: snakeCaseTable as TABLE_NAMES,
2198
- keys: { id: '2' },
2199
- });
2200
- expect(row?.name).toBe('Bob');
2201
- });
2202
- });
2203
-
2204
- describe('Permission Handling', () => {
2205
- const schemaRestrictedUser = 'mastra_schema_restricted_storage';
2206
- const restrictedPassword = 'test123';
2207
- const testSchema = 'testSchema';
2208
- let adminDb: pgPromise.IDatabase<{}>;
2209
- let pgpAdmin: pgPromise.IMain;
2210
-
2211
- beforeAll(async () => {
2212
- // Create a separate pg-promise instance for admin operations
2213
- pgpAdmin = pgPromise();
2214
- adminDb = pgpAdmin(connectionString);
2215
- try {
2216
- await adminDb.tx(async t => {
2217
- // Drop the test schema if it exists from previous runs
2218
- await t.none(`DROP SCHEMA IF EXISTS ${testSchema} CASCADE`);
2219
-
2220
- // Create schema restricted user with minimal permissions
2221
- await t.none(`
2222
- DO $$
2223
- BEGIN
2224
- IF NOT EXISTS (SELECT FROM pg_catalog.pg_roles WHERE rolname = '${schemaRestrictedUser}') THEN
2225
- CREATE USER ${schemaRestrictedUser} WITH PASSWORD '${restrictedPassword}' NOCREATEDB;
2226
- END IF;
2227
- END
2228
- $$;`);
2229
-
2230
- // Grant only connect and usage to schema restricted user
2231
- await t.none(`
2232
- REVOKE ALL ON DATABASE ${TEST_CONFIG.database} FROM ${schemaRestrictedUser};
2233
- GRANT CONNECT ON DATABASE ${TEST_CONFIG.database} TO ${schemaRestrictedUser};
2234
- REVOKE ALL ON SCHEMA public FROM ${schemaRestrictedUser};
2235
- GRANT USAGE ON SCHEMA public TO ${schemaRestrictedUser};
2236
- `);
2237
- });
2238
- } catch (error) {
2239
- // Clean up the database connection on error
2240
- pgpAdmin.end();
2241
- throw error;
2242
- }
2243
- });
2244
-
2245
- afterAll(async () => {
2246
- try {
2247
- // First close any store connections
2248
- if (store) {
2249
- await store.close();
2250
- }
2251
-
2252
- // Then clean up test user in admin connection
2253
- await adminDb.tx(async t => {
2254
- await t.none(`
2255
- REASSIGN OWNED BY ${schemaRestrictedUser} TO postgres;
2256
- DROP OWNED BY ${schemaRestrictedUser};
2257
- DROP USER IF EXISTS ${schemaRestrictedUser};
2258
- `);
2259
- });
2260
-
2261
- // Finally clean up admin connection
2262
- if (pgpAdmin) {
2263
- pgpAdmin.end();
2264
- }
2265
- } catch (error) {
2266
- console.error('Error cleaning up test user:', error);
2267
- // Still try to clean up connections even if user cleanup fails
2268
- if (store) await store.close();
2269
- if (pgpAdmin) pgpAdmin.end();
2270
- }
2271
- });
2272
-
2273
- describe('Schema Creation', () => {
2274
- beforeEach(async () => {
2275
- // Create a fresh connection for each test
2276
- const tempPgp = pgPromise();
2277
- const tempDb = tempPgp(connectionString);
2278
-
2279
- try {
2280
- // Ensure schema doesn't exist before each test
2281
- await tempDb.none(`DROP SCHEMA IF EXISTS ${testSchema} CASCADE`);
2282
-
2283
- // Ensure no active connections from restricted user
2284
- await tempDb.none(`
2285
- SELECT pg_terminate_backend(pid)
2286
- FROM pg_stat_activity
2287
- WHERE usename = '${schemaRestrictedUser}'
2288
- `);
2289
- } finally {
2290
- tempPgp.end(); // Always clean up the connection
2291
- }
2292
- });
2293
-
2294
- afterEach(async () => {
2295
- // Create a fresh connection for cleanup
2296
- const tempPgp = pgPromise();
2297
- const tempDb = tempPgp(connectionString);
2298
-
2299
- try {
2300
- // Clean up any connections from the restricted user and drop schema
2301
- await tempDb.none(`
2302
- DO $$
2303
- BEGIN
2304
- -- Terminate connections
2305
- PERFORM pg_terminate_backend(pid)
2306
- FROM pg_stat_activity
2307
- WHERE usename = '${schemaRestrictedUser}';
2308
-
2309
- -- Drop schema
2310
- DROP SCHEMA IF EXISTS ${testSchema} CASCADE;
2311
- END $$;
2312
- `);
2313
- } catch (error) {
2314
- console.error('Error in afterEach cleanup:', error);
2315
- } finally {
2316
- tempPgp.end(); // Always clean up the connection
2317
- }
2318
- });
2319
-
2320
- it('should fail when user lacks CREATE privilege', async () => {
2321
- const restrictedDB = new PostgresStore({
2322
- ...TEST_CONFIG,
2323
- user: schemaRestrictedUser,
2324
- password: restrictedPassword,
2325
- schemaName: testSchema,
2326
- });
2327
-
2328
- // Create a fresh connection for verification
2329
- const tempPgp = pgPromise();
2330
- const tempDb = tempPgp(connectionString);
2331
-
2332
- try {
2333
- // Test schema creation by initializing the store
2334
- await expect(async () => {
2335
- await restrictedDB.init();
2336
- }).rejects.toThrow(
2337
- `Unable to create schema "${testSchema}". This requires CREATE privilege on the database.`,
2338
- );
2339
-
2340
- // Verify schema was not created
2341
- const exists = await tempDb.oneOrNone(
2342
- `SELECT EXISTS (SELECT 1 FROM information_schema.schemata WHERE schema_name = $1)`,
2343
- [testSchema],
2344
- );
2345
- expect(exists?.exists).toBe(false);
2346
- } finally {
2347
- await restrictedDB.close();
2348
- tempPgp.end(); // Clean up the verification connection
2349
- }
2350
- });
2351
-
2352
- it('should fail with schema creation error when saving thread', async () => {
2353
- const restrictedDB = new PostgresStore({
2354
- ...TEST_CONFIG,
2355
- user: schemaRestrictedUser,
2356
- password: restrictedPassword,
2357
- schemaName: testSchema,
2358
- });
2359
-
2360
- // Create a fresh connection for verification
2361
- const tempPgp = pgPromise();
2362
- const tempDb = tempPgp(connectionString);
2363
-
2364
- try {
2365
- await expect(async () => {
2366
- await restrictedDB.init();
2367
- const thread = createSampleThread();
2368
- await restrictedDB.saveThread({ thread });
2369
- }).rejects.toThrow(
2370
- `Unable to create schema "${testSchema}". This requires CREATE privilege on the database.`,
2371
- );
2372
-
2373
- // Verify schema was not created
2374
- const exists = await tempDb.oneOrNone(
2375
- `SELECT EXISTS (SELECT 1 FROM information_schema.schemata WHERE schema_name = $1)`,
2376
- [testSchema],
2377
- );
2378
- expect(exists?.exists).toBe(false);
2379
- } finally {
2380
- await restrictedDB.close();
2381
- tempPgp.end(); // Clean up the verification connection
2382
- }
2383
- });
2384
- });
2385
- });
8
+ createTestSuite(new PostgresStore(TEST_CONFIG));
2386
9
 
2387
- afterAll(async () => {
2388
- try {
2389
- await store.close();
2390
- } catch (error) {
2391
- console.warn('Error closing store:', error);
2392
- }
2393
- });
2394
- });
10
+ pgTests();