@mastra/mssql 0.2.1-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,2239 @@
1
+ import { randomUUID } from 'crypto';
2
+ import {
3
+ createSampleEval,
4
+ createSampleTraceForDB,
5
+ createSampleThread,
6
+ createSampleMessageV1,
7
+ createSampleMessageV2,
8
+ createSampleWorkflowSnapshot,
9
+ resetRole,
10
+ checkWorkflowSnapshot,
11
+ } from '@internal/storage-test-utils';
12
+ import type { MastraMessageV2 } from '@mastra/core/agent';
13
+ import type { MastraMessageV1, StorageThreadType } from '@mastra/core/memory';
14
+ import type { StorageColumn, TABLE_NAMES } from '@mastra/core/storage';
15
+ import {
16
+ TABLE_WORKFLOW_SNAPSHOT,
17
+ TABLE_MESSAGES,
18
+ TABLE_THREADS,
19
+ TABLE_EVALS,
20
+ TABLE_TRACES,
21
+ } from '@mastra/core/storage';
22
+ import type { WorkflowRunState } from '@mastra/core/workflows';
23
+ import sql from 'mssql';
24
+ import { describe, it, expect, beforeAll, beforeEach, afterAll, afterEach, vi } from 'vitest';
25
+
26
+ import { MSSQLStore } from '.';
27
+ import type { MSSQLConfig } from '.';
28
+
29
+ const TEST_CONFIG: MSSQLConfig = {
30
+ server: process.env.MSSQL_HOST || 'localhost',
31
+ port: Number(process.env.MSSQL_PORT) || 1433,
32
+ database: process.env.MSSQL_DB || 'master',
33
+ user: process.env.MSSQL_USER || 'sa',
34
+ password: process.env.MSSQL_PASSWORD || 'Your_password123',
35
+ };
36
+
37
+ const connectionString = `mssql://${TEST_CONFIG.user}:${TEST_CONFIG.password}@${TEST_CONFIG.server}:${TEST_CONFIG.port}/${TEST_CONFIG.database}`;
38
+
39
+ vi.setConfig({ testTimeout: 60_000, hookTimeout: 60_000 });
40
+
41
+ describe('MSSQLStore', () => {
42
+ let store: MSSQLStore;
43
+
44
+ beforeAll(async () => {
45
+ store = new MSSQLStore(TEST_CONFIG);
46
+ await store.init();
47
+ });
48
+
49
+ describe('Public Fields Access (MSSQL)', () => {
50
+ let testDB: MSSQLStore;
51
+
52
+ beforeAll(async () => {
53
+ testDB = new MSSQLStore(TEST_CONFIG);
54
+ await testDB.init();
55
+ });
56
+
57
+ afterAll(async () => {
58
+ try {
59
+ await testDB.close();
60
+ } catch {}
61
+ });
62
+
63
+ it('should expose pool field as public', () => {
64
+ expect(testDB.pool).toBeDefined();
65
+ // For mssql, db is likely a pool or connection
66
+ expect(typeof testDB.pool).toBe('object');
67
+ expect(typeof testDB.pool.request).toBe('function');
68
+ });
69
+
70
+ it('should allow direct database queries via public pool field', async () => {
71
+ const result = await testDB.pool.request().query('SELECT 1 as test');
72
+ expect(result.recordset[0].test).toBe(1);
73
+ });
74
+
75
+ it('should maintain connection state through public pool field', async () => {
76
+ // MSSQL: Use SYSDATETIME() for current timestamp
77
+ const result1 = await testDB.pool.request().query('SELECT SYSDATETIME() as timestamp1');
78
+ const result2 = await testDB.pool.request().query('SELECT SYSDATETIME() as timestamp2');
79
+
80
+ expect(result1.recordset[0].timestamp1).toBeDefined();
81
+ expect(result2.recordset[0].timestamp2).toBeDefined();
82
+ // Compare timestamps as strings (ISO format)
83
+ expect(result2.recordset[0].timestamp2 >= result1.recordset[0].timestamp1).toBe(true);
84
+ });
85
+
86
+ it('should throw error when pool is used after disconnect', async () => {
87
+ await testDB.close();
88
+ await expect(testDB.pool.request().query('SELECT 1')).rejects.toThrow();
89
+ });
90
+ });
91
+
92
+ beforeEach(async () => {
93
+ // Only clear tables if store is initialized
94
+ try {
95
+ // Clear tables before each test
96
+ await store.clearTable({ tableName: TABLE_WORKFLOW_SNAPSHOT });
97
+ await store.clearTable({ tableName: TABLE_MESSAGES });
98
+ await store.clearTable({ tableName: TABLE_THREADS });
99
+ await store.clearTable({ tableName: TABLE_EVALS });
100
+ await store.clearTable({ tableName: TABLE_TRACES });
101
+ } catch (error) {
102
+ // Ignore errors during table clearing
103
+ console.warn('Error clearing tables:', error);
104
+ }
105
+ });
106
+
107
+ // --- Validation tests ---
108
+ describe('Validation', () => {
109
+ const validConfig = TEST_CONFIG;
110
+ it('throws if connectionString is empty', () => {
111
+ expect(() => new MSSQLStore({ connectionString: '' })).toThrow(
112
+ /connectionString must be provided and cannot be empty/,
113
+ );
114
+ });
115
+ it('throws if server is missing or empty', () => {
116
+ expect(() => new MSSQLStore({ ...validConfig, server: '' })).toThrow(
117
+ /server must be provided and cannot be empty/,
118
+ );
119
+ const { server, ...rest } = validConfig;
120
+ expect(() => new MSSQLStore(rest as any)).toThrow(/server must be provided and cannot be empty/);
121
+ });
122
+ it('throws if user is missing or empty', () => {
123
+ expect(() => new MSSQLStore({ ...validConfig, user: '' })).toThrow(/user must be provided and cannot be empty/);
124
+ const { user, ...rest } = validConfig;
125
+ expect(() => new MSSQLStore(rest as any)).toThrow(/user must be provided and cannot be empty/);
126
+ });
127
+ it('throws if database is missing or empty', () => {
128
+ expect(() => new MSSQLStore({ ...validConfig, database: '' })).toThrow(
129
+ /database must be provided and cannot be empty/,
130
+ );
131
+ const { database, ...rest } = validConfig;
132
+ expect(() => new MSSQLStore(rest as any)).toThrow(/database must be provided and cannot be empty/);
133
+ });
134
+ it('throws if password is missing or empty', () => {
135
+ expect(() => new MSSQLStore({ ...validConfig, password: '' })).toThrow(
136
+ /password must be provided and cannot be empty/,
137
+ );
138
+ const { password, ...rest } = validConfig;
139
+ expect(() => new MSSQLStore(rest as any)).toThrow(/password must be provided and cannot be empty/);
140
+ });
141
+ it('does not throw on valid config (host-based)', () => {
142
+ expect(() => new MSSQLStore(validConfig)).not.toThrow();
143
+ });
144
+ it('does not throw on non-empty connection string', () => {
145
+ expect(() => new MSSQLStore({ connectionString })).not.toThrow();
146
+ });
147
+ });
148
+
149
+ describe('Thread Operations', () => {
150
+ it('should create and retrieve a thread', async () => {
151
+ const thread = createSampleThread();
152
+
153
+ // Save thread
154
+ const savedThread = await store.saveThread({ thread });
155
+ expect(savedThread).toEqual(thread);
156
+
157
+ // Retrieve thread
158
+ const retrievedThread = await store.getThreadById({ threadId: thread.id });
159
+ expect(retrievedThread?.title).toEqual(thread.title);
160
+ });
161
+
162
+ it('should return null for non-existent thread', async () => {
163
+ const result = await store.getThreadById({ threadId: 'non-existent' });
164
+ expect(result).toBeNull();
165
+ });
166
+
167
+ it('should get threads by resource ID', async () => {
168
+ const thread1 = createSampleThread();
169
+ const thread2 = { ...createSampleThread(), resourceId: thread1.resourceId };
170
+
171
+ await store.saveThread({ thread: thread1 });
172
+ await store.saveThread({ thread: thread2 });
173
+
174
+ const threads = await store.getThreadsByResourceId({ resourceId: thread1.resourceId });
175
+ expect(threads).toHaveLength(2);
176
+ expect(threads.map(t => t.id)).toEqual(expect.arrayContaining([thread1.id, thread2.id]));
177
+ });
178
+
179
+ it('should update thread title and metadata', async () => {
180
+ const thread = createSampleThread();
181
+ await store.saveThread({ thread });
182
+
183
+ const newMetadata = { newKey: 'newValue' };
184
+ const updatedThread = await store.updateThread({
185
+ id: thread.id,
186
+ title: 'Updated Title',
187
+ metadata: newMetadata,
188
+ });
189
+
190
+ expect(updatedThread.title).toBe('Updated Title');
191
+ expect(updatedThread.metadata).toEqual({
192
+ ...thread.metadata,
193
+ ...newMetadata,
194
+ });
195
+
196
+ // Verify persistence
197
+ const retrievedThread = await store.getThreadById({ threadId: thread.id });
198
+ expect(retrievedThread).toEqual(updatedThread);
199
+ });
200
+
201
+ it('should delete thread and its messages', async () => {
202
+ const thread = createSampleThread();
203
+ await store.saveThread({ thread });
204
+
205
+ // Add some messages
206
+ const messages = [createSampleMessageV1({ threadId: thread.id }), createSampleMessageV1({ threadId: thread.id })];
207
+ await store.saveMessages({ messages });
208
+
209
+ await store.deleteThread({ threadId: thread.id });
210
+
211
+ const retrievedThread = await store.getThreadById({ threadId: thread.id });
212
+ expect(retrievedThread).toBeNull();
213
+
214
+ // Verify messages were also deleted
215
+ const retrievedMessages = await store.getMessages({ threadId: thread.id });
216
+ expect(retrievedMessages).toHaveLength(0);
217
+ });
218
+
219
+ it('should update thread updatedAt when a message is saved to it', async () => {
220
+ const thread = createSampleThread();
221
+ await store.saveThread({ thread });
222
+
223
+ // Get the initial thread to capture the original updatedAt
224
+ const initialThread = await store.getThreadById({ threadId: thread.id });
225
+ expect(initialThread).toBeDefined();
226
+ const originalUpdatedAt = initialThread!.updatedAt;
227
+
228
+ // Wait a small amount to ensure different timestamp
229
+ await new Promise(resolve => setTimeout(resolve, 10));
230
+
231
+ // Create and save a message to the thread
232
+ const message = createSampleMessageV1({ threadId: thread.id });
233
+ await store.saveMessages({ messages: [message] });
234
+
235
+ // Retrieve the thread again and check that updatedAt was updated
236
+ const updatedThread = await store.getThreadById({ threadId: thread.id });
237
+ expect(updatedThread).toBeDefined();
238
+ expect(updatedThread!.updatedAt.getTime()).toBeGreaterThan(originalUpdatedAt.getTime());
239
+ });
240
+ });
241
+
242
+ describe('Message Operations', () => {
243
+ it('should save and retrieve messages', async () => {
244
+ const thread = createSampleThread();
245
+ await store.saveThread({ thread });
246
+
247
+ const messages = [createSampleMessageV1({ threadId: thread.id }), createSampleMessageV1({ threadId: thread.id })];
248
+
249
+ // Save messages
250
+ const savedMessages = await store.saveMessages({ messages });
251
+
252
+ // Retrieve messages
253
+ const retrievedMessages = await store.getMessages({ threadId: thread.id, format: 'v1' });
254
+
255
+ const checkMessages = messages.map(m => {
256
+ const { resourceId, ...rest } = m;
257
+ return rest;
258
+ });
259
+
260
+ expect(savedMessages).toEqual(messages);
261
+ expect(retrievedMessages).toHaveLength(2);
262
+ expect(retrievedMessages).toEqual(expect.arrayContaining(checkMessages));
263
+ });
264
+
265
+ it('should handle empty message array', async () => {
266
+ const result = await store.saveMessages({ messages: [] });
267
+ expect(result).toEqual([]);
268
+ });
269
+
270
+ it('should maintain message order', async () => {
271
+ const thread = createSampleThread();
272
+ await store.saveThread({ thread });
273
+
274
+ const messageContent = ['First', 'Second', 'Third'];
275
+
276
+ const messages = messageContent.map(content =>
277
+ createSampleMessageV2({ threadId: thread.id, content: { content, parts: [{ type: 'text', text: content }] } }),
278
+ );
279
+
280
+ await store.saveMessages({ messages, format: 'v2' });
281
+
282
+ const retrievedMessages = await store.getMessages({ threadId: thread.id, format: 'v2' });
283
+ expect(retrievedMessages).toHaveLength(3);
284
+
285
+ // Verify order is maintained
286
+ retrievedMessages.forEach((msg, idx) => {
287
+ expect((msg.content.parts[0] as any).text).toEqual(messageContent[idx]);
288
+ });
289
+ });
290
+
291
+ it('should rollback on error during message save', async () => {
292
+ const thread = createSampleThread();
293
+ await store.saveThread({ thread });
294
+
295
+ const messages = [
296
+ createSampleMessageV1({ threadId: thread.id }),
297
+ { ...createSampleMessageV1({ threadId: thread.id }), id: null } as any, // This will cause an error
298
+ ];
299
+
300
+ await expect(store.saveMessages({ messages })).rejects.toThrow();
301
+
302
+ // Verify no messages were saved
303
+ const savedMessages = await store.getMessages({ threadId: thread.id });
304
+ expect(savedMessages).toHaveLength(0);
305
+ });
306
+
307
+ it('should retrieve messages w/ next/prev messages by message id + resource id', async () => {
308
+ const thread = createSampleThread({ id: 'thread-one' });
309
+ await store.saveThread({ thread });
310
+
311
+ const thread2 = createSampleThread({ id: 'thread-two' });
312
+ await store.saveThread({ thread: thread2 });
313
+
314
+ const thread3 = createSampleThread({ id: 'thread-three' });
315
+ await store.saveThread({ thread: thread3 });
316
+
317
+ const messages: MastraMessageV2[] = [
318
+ createSampleMessageV2({
319
+ threadId: 'thread-one',
320
+ content: { content: 'First' },
321
+ resourceId: 'cross-thread-resource',
322
+ }),
323
+ createSampleMessageV2({
324
+ threadId: 'thread-one',
325
+ content: { content: 'Second' },
326
+ resourceId: 'cross-thread-resource',
327
+ }),
328
+ createSampleMessageV2({
329
+ threadId: 'thread-one',
330
+ content: { content: 'Third' },
331
+ resourceId: 'cross-thread-resource',
332
+ }),
333
+
334
+ createSampleMessageV2({
335
+ threadId: 'thread-two',
336
+ content: { content: 'Fourth' },
337
+ resourceId: 'cross-thread-resource',
338
+ }),
339
+ createSampleMessageV2({
340
+ threadId: 'thread-two',
341
+ content: { content: 'Fifth' },
342
+ resourceId: 'cross-thread-resource',
343
+ }),
344
+ createSampleMessageV2({
345
+ threadId: 'thread-two',
346
+ content: { content: 'Sixth' },
347
+ resourceId: 'cross-thread-resource',
348
+ }),
349
+
350
+ createSampleMessageV2({
351
+ threadId: 'thread-three',
352
+ content: { content: 'Seventh' },
353
+ resourceId: 'other-resource',
354
+ }),
355
+ createSampleMessageV2({
356
+ threadId: 'thread-three',
357
+ content: { content: 'Eighth' },
358
+ resourceId: 'other-resource',
359
+ }),
360
+ ];
361
+
362
+ await store.saveMessages({ messages: messages, format: 'v2' });
363
+
364
+ const retrievedMessages = await store.getMessages({ threadId: 'thread-one', format: 'v2' });
365
+ expect(retrievedMessages).toHaveLength(3);
366
+ expect(retrievedMessages.map((m: any) => m.content.parts[0].text)).toEqual(['First', 'Second', 'Third']);
367
+
368
+ const retrievedMessages2 = await store.getMessages({ threadId: 'thread-two', format: 'v2' });
369
+ expect(retrievedMessages2).toHaveLength(3);
370
+ expect(retrievedMessages2.map((m: any) => m.content.parts[0].text)).toEqual(['Fourth', 'Fifth', 'Sixth']);
371
+
372
+ const retrievedMessages3 = await store.getMessages({ threadId: 'thread-three', format: 'v2' });
373
+ expect(retrievedMessages3).toHaveLength(2);
374
+ expect(retrievedMessages3.map((m: any) => m.content.parts[0].text)).toEqual(['Seventh', 'Eighth']);
375
+
376
+ const { messages: crossThreadMessages } = await store.getMessagesPaginated({
377
+ threadId: 'thread-doesnt-exist',
378
+ format: 'v2',
379
+ selectBy: {
380
+ last: 0,
381
+ include: [
382
+ {
383
+ id: messages[1].id,
384
+ threadId: 'thread-one',
385
+ withNextMessages: 2,
386
+ withPreviousMessages: 2,
387
+ },
388
+ {
389
+ id: messages[4].id,
390
+ threadId: 'thread-two',
391
+ withPreviousMessages: 2,
392
+ withNextMessages: 2,
393
+ },
394
+ ],
395
+ },
396
+ });
397
+
398
+ expect(crossThreadMessages).toHaveLength(6);
399
+ expect(crossThreadMessages.filter(m => m.threadId === `thread-one`)).toHaveLength(3);
400
+ expect(crossThreadMessages.filter(m => m.threadId === `thread-two`)).toHaveLength(3);
401
+ });
402
+
403
+ it('should return messages using both last and include (cross-thread, deduped)', async () => {
404
+ const thread = createSampleThread({ id: 'thread-one' });
405
+ await store.saveThread({ thread });
406
+
407
+ const thread2 = createSampleThread({ id: 'thread-two' });
408
+ await store.saveThread({ thread: thread2 });
409
+
410
+ const now = new Date();
411
+
412
+ // Setup: create messages in two threads
413
+ const messages = [
414
+ createSampleMessageV2({
415
+ threadId: 'thread-one',
416
+ content: { content: 'A' },
417
+ createdAt: new Date(now.getTime()),
418
+ }),
419
+ createSampleMessageV2({
420
+ threadId: 'thread-one',
421
+ content: { content: 'B' },
422
+ createdAt: new Date(now.getTime() + 1000),
423
+ }),
424
+ createSampleMessageV2({
425
+ threadId: 'thread-one',
426
+ content: { content: 'C' },
427
+ createdAt: new Date(now.getTime() + 2000),
428
+ }),
429
+ createSampleMessageV2({
430
+ threadId: 'thread-two',
431
+ content: { content: 'D' },
432
+ createdAt: new Date(now.getTime() + 3000),
433
+ }),
434
+ createSampleMessageV2({
435
+ threadId: 'thread-two',
436
+ content: { content: 'E' },
437
+ createdAt: new Date(now.getTime() + 4000),
438
+ }),
439
+ createSampleMessageV2({
440
+ threadId: 'thread-two',
441
+ content: { content: 'F' },
442
+ createdAt: new Date(now.getTime() + 5000),
443
+ }),
444
+ ];
445
+ await store.saveMessages({ messages, format: 'v2' });
446
+
447
+ // Use last: 2 and include a message from another thread with context
448
+ const result = await store.getMessages({
449
+ threadId: 'thread-one',
450
+ format: 'v2',
451
+ selectBy: {
452
+ last: 2,
453
+ include: [
454
+ {
455
+ id: messages[4].id, // 'E' from thread-bar
456
+ threadId: 'thread-two',
457
+ withPreviousMessages: 1,
458
+ withNextMessages: 1,
459
+ },
460
+ ],
461
+ },
462
+ });
463
+
464
+ // Should include last 2 from thread-one and 3 from thread-two (D, E, F)
465
+ expect(result.map(m => m.content.content).sort()).toEqual(['B', 'C', 'D', 'E', 'F']);
466
+ // Should include 2 from thread-one
467
+ expect(result.filter(m => m.threadId === 'thread-one').map(m => m.content.content)).toEqual(['B', 'C']);
468
+ // Should include 3 from thread-two
469
+ expect(result.filter(m => m.threadId === 'thread-two').map(m => m.content.content)).toEqual(['D', 'E', 'F']);
470
+ });
471
+ });
472
+
473
+ describe('updateMessages', () => {
474
+ let thread: StorageThreadType;
475
+
476
+ beforeEach(async () => {
477
+ const threadData = createSampleThread();
478
+ thread = await store.saveThread({ thread: threadData as StorageThreadType });
479
+ });
480
+
481
+ it('should update a single field of a message (e.g., role)', async () => {
482
+ const originalMessage = createSampleMessageV2({ threadId: thread.id, role: 'user', thread });
483
+ await store.saveMessages({ messages: [originalMessage], format: 'v2' });
484
+
485
+ const updatedMessages = await store.updateMessages({
486
+ messages: [{ id: originalMessage.id, role: 'assistant' }],
487
+ });
488
+
489
+ expect(updatedMessages).toHaveLength(1);
490
+ expect(updatedMessages[0].role).toBe('assistant');
491
+ expect(updatedMessages[0].content).toEqual(originalMessage.content); // Ensure content is unchanged
492
+ });
493
+
494
+ it('should update only the metadata within the content field, preserving other content', async () => {
495
+ const originalMessage = createSampleMessageV2({
496
+ threadId: thread.id,
497
+ content: { content: 'hello world', parts: [{ type: 'text', text: 'hello world' }] },
498
+ thread,
499
+ });
500
+ await store.saveMessages({ messages: [originalMessage], format: 'v2' });
501
+
502
+ const newMetadata = { someKey: 'someValue' };
503
+ await store.updateMessages({
504
+ messages: [{ id: originalMessage.id, content: { metadata: newMetadata } as any }],
505
+ });
506
+
507
+ const fromDb = await store.getMessages({ threadId: thread.id, format: 'v2' });
508
+ expect(fromDb[0].content.metadata).toEqual(newMetadata);
509
+ expect(fromDb[0].content.content).toBe('hello world');
510
+ expect(fromDb[0].content.parts).toEqual([{ type: 'text', text: 'hello world' }]);
511
+ });
512
+
513
+ it('should deep merge metadata, not overwrite it', async () => {
514
+ const originalMessage = createSampleMessageV2({
515
+ threadId: thread.id,
516
+ content: { metadata: { initial: true }, content: 'old content' },
517
+ thread,
518
+ });
519
+ await store.saveMessages({ messages: [originalMessage], format: 'v2' });
520
+
521
+ const newMetadata = { updated: true };
522
+ await store.updateMessages({
523
+ messages: [{ id: originalMessage.id, content: { metadata: newMetadata } as any }],
524
+ });
525
+
526
+ const fromDb = await store.getMessages({ threadId: thread.id, format: 'v2' });
527
+ expect(fromDb[0].content.metadata).toEqual({ initial: true, updated: true });
528
+ });
529
+
530
+ it('should update multiple messages at once', async () => {
531
+ const msg1 = createSampleMessageV2({ threadId: thread.id, role: 'user', thread });
532
+ const msg2 = createSampleMessageV2({ threadId: thread.id, content: { content: 'original' }, thread });
533
+ await store.saveMessages({ messages: [msg1, msg2], format: 'v2' });
534
+
535
+ await store.updateMessages({
536
+ messages: [
537
+ { id: msg1.id, role: 'assistant' },
538
+ { id: msg2.id, content: { content: 'updated' } as any },
539
+ ],
540
+ });
541
+
542
+ const fromDb = await store.getMessages({ threadId: thread.id, format: 'v2' });
543
+ const updatedMsg1 = fromDb.find(m => m.id === msg1.id)!;
544
+ const updatedMsg2 = fromDb.find(m => m.id === msg2.id)!;
545
+
546
+ expect(updatedMsg1.role).toBe('assistant');
547
+ expect(updatedMsg2.content.content).toBe('updated');
548
+ });
549
+
550
+ it('should update the parent thread updatedAt timestamp', async () => {
551
+ const originalMessage = createSampleMessageV2({ threadId: thread.id, thread });
552
+ await store.saveMessages({ messages: [originalMessage], format: 'v2' });
553
+ const initialThread = await store.getThreadById({ threadId: thread.id });
554
+
555
+ await new Promise(r => setTimeout(r, 10));
556
+
557
+ await store.updateMessages({ messages: [{ id: originalMessage.id, role: 'assistant' }] });
558
+
559
+ const updatedThread = await store.getThreadById({ threadId: thread.id });
560
+
561
+ expect(new Date(updatedThread!.updatedAt).getTime()).toBeGreaterThan(
562
+ new Date(initialThread!.updatedAt).getTime(),
563
+ );
564
+ });
565
+
566
+ it('should update timestamps on both threads when moving a message', async () => {
567
+ const thread2 = await store.saveThread({ thread: createSampleThread() });
568
+ const message = createSampleMessageV2({ threadId: thread.id, thread });
569
+ await store.saveMessages({ messages: [message], format: 'v2' });
570
+
571
+ const initialThread1 = await store.getThreadById({ threadId: thread.id });
572
+ const initialThread2 = await store.getThreadById({ threadId: thread2.id });
573
+
574
+ await new Promise(r => setTimeout(r, 10));
575
+
576
+ await store.updateMessages({
577
+ messages: [{ id: message.id, threadId: thread2.id }],
578
+ });
579
+
580
+ const updatedThread1 = await store.getThreadById({ threadId: thread.id });
581
+ const updatedThread2 = await store.getThreadById({ threadId: thread2.id });
582
+
583
+ expect(new Date(updatedThread1!.updatedAt).getTime()).toBeGreaterThan(
584
+ new Date(initialThread1!.updatedAt).getTime(),
585
+ );
586
+ expect(new Date(updatedThread2!.updatedAt).getTime()).toBeGreaterThan(
587
+ new Date(initialThread2!.updatedAt).getTime(),
588
+ );
589
+
590
+ // Verify the message was moved
591
+ const thread1Messages = await store.getMessages({ threadId: thread.id, format: 'v2' });
592
+ const thread2Messages = await store.getMessages({ threadId: thread2.id, format: 'v2' });
593
+ expect(thread1Messages).toHaveLength(0);
594
+ expect(thread2Messages).toHaveLength(1);
595
+ expect(thread2Messages[0].id).toBe(message.id);
596
+ });
597
+ it('should upsert messages: duplicate id+threadId results in update, not duplicate row', async () => {
598
+ const thread = await createSampleThread();
599
+ await store.saveThread({ thread });
600
+ const baseMessage = createSampleMessageV2({
601
+ threadId: thread.id,
602
+ createdAt: new Date(),
603
+ content: { content: 'Original' },
604
+ resourceId: thread.resourceId,
605
+ });
606
+
607
+ // Insert the message for the first time
608
+ await store.saveMessages({ messages: [baseMessage], format: 'v2' });
609
+
610
+ // Insert again with the same id and threadId but different content
611
+ const updatedMessage = {
612
+ ...createSampleMessageV2({
613
+ threadId: thread.id,
614
+ createdAt: new Date(),
615
+ content: { content: 'Updated' },
616
+ resourceId: thread.resourceId,
617
+ }),
618
+ id: baseMessage.id,
619
+ };
620
+
621
+ await store.saveMessages({ messages: [updatedMessage], format: 'v2' });
622
+
623
+ // Retrieve messages for the thread
624
+ const retrievedMessages = await store.getMessages({ threadId: thread.id, format: 'v2' });
625
+
626
+ // Only one message should exist for that id+threadId
627
+ expect(retrievedMessages.filter(m => m.id === baseMessage.id)).toHaveLength(1);
628
+
629
+ // The content should be the updated one
630
+ expect(retrievedMessages.find(m => m.id === baseMessage.id)?.content.content).toBe('Updated');
631
+ });
632
+
633
+ it('should upsert messages: duplicate id and different threadid', async () => {
634
+ const thread1 = await createSampleThread();
635
+ const thread2 = await createSampleThread();
636
+ await store.saveThread({ thread: thread1 });
637
+ await store.saveThread({ thread: thread2 });
638
+
639
+ const message = createSampleMessageV2({
640
+ threadId: thread1.id,
641
+ createdAt: new Date(),
642
+ content: { content: 'Thread1 Content' },
643
+ resourceId: thread1.resourceId,
644
+ });
645
+
646
+ // Insert message into thread1
647
+ await store.saveMessages({ messages: [message], format: 'v2' });
648
+
649
+ // Attempt to insert a message with the same id but different threadId
650
+ const conflictingMessage = {
651
+ ...createSampleMessageV2({
652
+ threadId: thread2.id, // different thread
653
+ content: { content: 'Thread2 Content' },
654
+ resourceId: thread2.resourceId,
655
+ }),
656
+ id: message.id,
657
+ };
658
+
659
+ // Save should move the message to the new thread
660
+ await store.saveMessages({ messages: [conflictingMessage], format: 'v2' });
661
+
662
+ // Retrieve messages for both threads
663
+ const thread1Messages = await store.getMessages({ threadId: thread1.id, format: 'v2' });
664
+ const thread2Messages = await store.getMessages({ threadId: thread2.id, format: 'v2' });
665
+
666
+ // Thread 1 should NOT have the message with that id
667
+ expect(thread1Messages.find(m => m.id === message.id)).toBeUndefined();
668
+
669
+ // Thread 2 should have the message with that id
670
+ expect(thread2Messages.find(m => m.id === message.id)?.content.content).toBe('Thread2 Content');
671
+ });
672
+ });
673
+
674
+ describe('Edge Cases and Error Handling', () => {
675
+ it('should handle large metadata objects', async () => {
676
+ const thread = createSampleThread();
677
+ const largeMetadata = {
678
+ ...thread.metadata,
679
+ largeArray: Array.from({ length: 1000 }, (_, i) => ({ index: i, data: 'test'.repeat(100) })),
680
+ };
681
+
682
+ const threadWithLargeMetadata = {
683
+ ...thread,
684
+ metadata: largeMetadata,
685
+ };
686
+
687
+ await store.saveThread({ thread: threadWithLargeMetadata });
688
+ const retrieved = await store.getThreadById({ threadId: thread.id });
689
+
690
+ expect(retrieved?.metadata).toEqual(largeMetadata);
691
+ });
692
+
693
+ it('should handle special characters in thread titles', async () => {
694
+ const thread = {
695
+ ...createSampleThread(),
696
+ title: 'Special \'quotes\' and "double quotes" and emoji 🎉',
697
+ };
698
+
699
+ await store.saveThread({ thread });
700
+ const retrieved = await store.getThreadById({ threadId: thread.id });
701
+
702
+ expect(retrieved?.title).toBe(thread.title);
703
+ });
704
+
705
+ it('should handle concurrent thread updates', async () => {
706
+ const thread = createSampleThread();
707
+ await store.saveThread({ thread });
708
+
709
+ // Perform multiple updates concurrently
710
+ const updates = Array.from({ length: 5 }, (_, i) =>
711
+ store.updateThread({
712
+ id: thread.id,
713
+ title: `Update ${i}`,
714
+ metadata: { update: i },
715
+ }),
716
+ );
717
+
718
+ await expect(Promise.all(updates)).resolves.toBeDefined();
719
+
720
+ // Verify final state
721
+ const finalThread = await store.getThreadById({ threadId: thread.id });
722
+ expect(finalThread).toBeDefined();
723
+ });
724
+ });
725
+
726
+ describe('Workflow Snapshots', () => {
727
+ it('should persist and load workflow snapshots', async () => {
728
+ const workflowName = 'test-workflow';
729
+ const runId = `run-${randomUUID()}`;
730
+ const snapshot = {
731
+ status: 'running',
732
+ context: {
733
+ input: { type: 'manual' },
734
+ step1: { status: 'success', output: { data: 'test' } },
735
+ },
736
+ value: {},
737
+ activePaths: [],
738
+ suspendedPaths: {},
739
+ runId,
740
+ timestamp: new Date().getTime(),
741
+ serializedStepGraph: [],
742
+ } as unknown as WorkflowRunState;
743
+
744
+ await store.persistWorkflowSnapshot({
745
+ workflowName,
746
+ runId,
747
+ snapshot,
748
+ });
749
+
750
+ const loadedSnapshot = await store.loadWorkflowSnapshot({
751
+ workflowName,
752
+ runId,
753
+ });
754
+
755
+ expect(loadedSnapshot).toEqual(snapshot);
756
+ });
757
+
758
+ it('should return null for non-existent workflow snapshot', async () => {
759
+ const result = await store.loadWorkflowSnapshot({
760
+ workflowName: 'non-existent',
761
+ runId: 'non-existent',
762
+ });
763
+
764
+ expect(result).toBeNull();
765
+ });
766
+
767
+ it('should update existing workflow snapshot', async () => {
768
+ const workflowName = 'test-workflow';
769
+ const runId = `run-${randomUUID()}`;
770
+ const initialSnapshot = {
771
+ status: 'running',
772
+ context: {
773
+ input: { type: 'manual' },
774
+ },
775
+ value: {},
776
+ activePaths: [],
777
+ suspendedPaths: {},
778
+ runId,
779
+ timestamp: new Date().getTime(),
780
+ serializedStepGraph: [],
781
+ };
782
+
783
+ await store.persistWorkflowSnapshot({
784
+ workflowName,
785
+ runId,
786
+ snapshot: initialSnapshot as unknown as WorkflowRunState,
787
+ });
788
+
789
+ const updatedSnapshot = {
790
+ status: 'success',
791
+ context: {
792
+ input: { type: 'manual' },
793
+ 'step-1': { status: 'success', result: { data: 'test' } },
794
+ },
795
+ value: {},
796
+ activePaths: [],
797
+ suspendedPaths: {},
798
+ runId,
799
+ timestamp: new Date().getTime(),
800
+ };
801
+
802
+ await store.persistWorkflowSnapshot({
803
+ workflowName,
804
+ runId,
805
+ snapshot: updatedSnapshot as unknown as WorkflowRunState,
806
+ });
807
+
808
+ const loadedSnapshot = await store.loadWorkflowSnapshot({
809
+ workflowName,
810
+ runId,
811
+ });
812
+
813
+ expect(loadedSnapshot).toEqual(updatedSnapshot);
814
+ });
815
+
816
+ it('should handle complex workflow state', async () => {
817
+ const workflowName = 'complex-workflow';
818
+ const runId = `run-${randomUUID()}`;
819
+ const complexSnapshot = {
820
+ value: { currentState: 'running' },
821
+ context: {
822
+ 'step-1': {
823
+ status: 'success',
824
+ output: {
825
+ nestedData: {
826
+ array: [1, 2, 3],
827
+ object: { key: 'value' },
828
+ date: new Date().toISOString(),
829
+ },
830
+ },
831
+ },
832
+ 'step-2': {
833
+ status: 'waiting',
834
+ dependencies: ['step-3', 'step-4'],
835
+ },
836
+ input: {
837
+ type: 'scheduled',
838
+ metadata: {
839
+ schedule: '0 0 * * *',
840
+ timezone: 'UTC',
841
+ },
842
+ },
843
+ },
844
+ activePaths: [
845
+ {
846
+ stepPath: ['step-1'],
847
+ stepId: 'step-1',
848
+ status: 'success',
849
+ },
850
+ {
851
+ stepPath: ['step-2'],
852
+ stepId: 'step-2',
853
+ status: 'waiting',
854
+ },
855
+ ],
856
+ suspendedPaths: {},
857
+ runId: runId,
858
+ timestamp: Date.now(),
859
+ serializedStepGraph: [],
860
+ status: 'running',
861
+ };
862
+
863
+ await store.persistWorkflowSnapshot({
864
+ workflowName,
865
+ runId,
866
+ snapshot: complexSnapshot as unknown as WorkflowRunState,
867
+ });
868
+
869
+ const loadedSnapshot = await store.loadWorkflowSnapshot({
870
+ workflowName,
871
+ runId,
872
+ });
873
+
874
+ expect(loadedSnapshot).toEqual(complexSnapshot);
875
+ });
876
+ });
877
+
878
+ describe('getWorkflowRuns', () => {
879
+ beforeEach(async () => {
880
+ await store.clearTable({ tableName: TABLE_WORKFLOW_SNAPSHOT });
881
+ });
882
+ it('returns empty array when no workflows exist', async () => {
883
+ const { runs, total } = await store.getWorkflowRuns();
884
+ expect(runs).toEqual([]);
885
+ expect(total).toBe(0);
886
+ });
887
+
888
+ it('returns all workflows by default', async () => {
889
+ const workflowName1 = 'default_test_1';
890
+ const workflowName2 = 'default_test_2';
891
+
892
+ const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
893
+ const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('failed');
894
+
895
+ await store.persistWorkflowSnapshot({ workflowName: workflowName1, runId: runId1, snapshot: workflow1 });
896
+ await new Promise(resolve => setTimeout(resolve, 10)); // Small delay to ensure different timestamps
897
+ await store.persistWorkflowSnapshot({ workflowName: workflowName2, runId: runId2, snapshot: workflow2 });
898
+
899
+ const { runs, total } = await store.getWorkflowRuns();
900
+ expect(runs).toHaveLength(2);
901
+ expect(total).toBe(2);
902
+ expect(runs[0]!.workflowName).toBe(workflowName2); // Most recent first
903
+ expect(runs[1]!.workflowName).toBe(workflowName1);
904
+ const firstSnapshot = runs[0]!.snapshot;
905
+ const secondSnapshot = runs[1]!.snapshot;
906
+ checkWorkflowSnapshot(firstSnapshot, stepId2, 'failed');
907
+ checkWorkflowSnapshot(secondSnapshot, stepId1, 'success');
908
+ });
909
+
910
+ it('filters by workflow name', async () => {
911
+ const workflowName1 = 'filter_test_1';
912
+ const workflowName2 = 'filter_test_2';
913
+
914
+ const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
915
+ const { snapshot: workflow2, runId: runId2 } = createSampleWorkflowSnapshot('failed');
916
+
917
+ await store.persistWorkflowSnapshot({ workflowName: workflowName1, runId: runId1, snapshot: workflow1 });
918
+ await new Promise(resolve => setTimeout(resolve, 10)); // Small delay to ensure different timestamps
919
+ await store.persistWorkflowSnapshot({ workflowName: workflowName2, runId: runId2, snapshot: workflow2 });
920
+
921
+ const { runs, total } = await store.getWorkflowRuns({ workflowName: workflowName1 });
922
+ expect(runs).toHaveLength(1);
923
+ expect(total).toBe(1);
924
+ expect(runs[0]!.workflowName).toBe(workflowName1);
925
+ const snapshot = runs[0]!.snapshot;
926
+ checkWorkflowSnapshot(snapshot, stepId1, 'success');
927
+ });
928
+
929
+ it('filters by date range', async () => {
930
+ const now = new Date();
931
+ const yesterday = new Date(now.getTime() - 24 * 60 * 60 * 1000);
932
+ const twoDaysAgo = new Date(now.getTime() - 2 * 24 * 60 * 60 * 1000);
933
+ const workflowName1 = 'date_test_1';
934
+ const workflowName2 = 'date_test_2';
935
+ const workflowName3 = 'date_test_3';
936
+
937
+ const { snapshot: workflow1, runId: runId1 } = createSampleWorkflowSnapshot('success');
938
+ const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('failed');
939
+ const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('suspended');
940
+
941
+ await store.insert({
942
+ tableName: TABLE_WORKFLOW_SNAPSHOT,
943
+ record: {
944
+ workflow_name: workflowName1,
945
+ run_id: runId1,
946
+ snapshot: workflow1,
947
+ createdAt: twoDaysAgo,
948
+ updatedAt: twoDaysAgo,
949
+ },
950
+ });
951
+
952
+ await store.insert({
953
+ tableName: TABLE_WORKFLOW_SNAPSHOT,
954
+ record: {
955
+ workflow_name: workflowName2,
956
+ run_id: runId2,
957
+ snapshot: workflow2,
958
+ createdAt: yesterday,
959
+ updatedAt: yesterday,
960
+ },
961
+ });
962
+ await store.insert({
963
+ tableName: TABLE_WORKFLOW_SNAPSHOT,
964
+ record: {
965
+ workflow_name: workflowName3,
966
+ run_id: runId3,
967
+ snapshot: workflow3,
968
+ createdAt: now,
969
+ updatedAt: now,
970
+ },
971
+ });
972
+
973
+ const { runs } = await store.getWorkflowRuns({
974
+ fromDate: yesterday,
975
+ toDate: now,
976
+ });
977
+
978
+ expect(runs).toHaveLength(2);
979
+ expect(runs[0]!.workflowName).toBe(workflowName3);
980
+ expect(runs[1]!.workflowName).toBe(workflowName2);
981
+ const firstSnapshot = runs[0]!.snapshot;
982
+ const secondSnapshot = runs[1]!.snapshot;
983
+ checkWorkflowSnapshot(firstSnapshot, stepId3, 'suspended');
984
+ checkWorkflowSnapshot(secondSnapshot, stepId2, 'failed');
985
+ });
986
+
987
+ it('handles pagination', async () => {
988
+ const workflowName1 = 'page_test_1';
989
+ const workflowName2 = 'page_test_2';
990
+ const workflowName3 = 'page_test_3';
991
+
992
+ const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
993
+ const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('failed');
994
+ const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('suspended');
995
+
996
+ await store.persistWorkflowSnapshot({ workflowName: workflowName1, runId: runId1, snapshot: workflow1 });
997
+ await new Promise(resolve => setTimeout(resolve, 10)); // Small delay to ensure different timestamps
998
+ await store.persistWorkflowSnapshot({ workflowName: workflowName2, runId: runId2, snapshot: workflow2 });
999
+ await new Promise(resolve => setTimeout(resolve, 10)); // Small delay to ensure different timestamps
1000
+ await store.persistWorkflowSnapshot({ workflowName: workflowName3, runId: runId3, snapshot: workflow3 });
1001
+
1002
+ // Get first page
1003
+ const page1 = await store.getWorkflowRuns({ limit: 2, offset: 0 });
1004
+ expect(page1.runs).toHaveLength(2);
1005
+ expect(page1.total).toBe(3); // Total count of all records
1006
+ expect(page1.runs[0]!.workflowName).toBe(workflowName3);
1007
+ expect(page1.runs[1]!.workflowName).toBe(workflowName2);
1008
+ const firstSnapshot = page1.runs[0]!.snapshot;
1009
+ const secondSnapshot = page1.runs[1]!.snapshot;
1010
+ checkWorkflowSnapshot(firstSnapshot, stepId3, 'suspended');
1011
+ checkWorkflowSnapshot(secondSnapshot, stepId2, 'failed');
1012
+
1013
+ // Get second page
1014
+ const page2 = await store.getWorkflowRuns({ limit: 2, offset: 2 });
1015
+ expect(page2.runs).toHaveLength(1);
1016
+ expect(page2.total).toBe(3);
1017
+ expect(page2.runs[0]!.workflowName).toBe(workflowName1);
1018
+ const snapshot = page2.runs[0]!.snapshot;
1019
+ checkWorkflowSnapshot(snapshot, stepId1, 'success');
1020
+ });
1021
+ });
1022
+
1023
+ describe('getWorkflowRunById', () => {
1024
+ const workflowName = 'workflow-id-test';
1025
+ let runId: string;
1026
+ let stepId: string;
1027
+
1028
+ beforeEach(async () => {
1029
+ // Insert a workflow run for positive test
1030
+ const sample = createSampleWorkflowSnapshot('success');
1031
+ runId = sample.runId;
1032
+ stepId = sample.stepId;
1033
+ await store.insert({
1034
+ tableName: TABLE_WORKFLOW_SNAPSHOT,
1035
+ record: {
1036
+ workflow_name: workflowName,
1037
+ run_id: runId,
1038
+ resourceId: 'resource-abc',
1039
+ snapshot: sample.snapshot,
1040
+ createdAt: new Date(),
1041
+ updatedAt: new Date(),
1042
+ },
1043
+ });
1044
+ });
1045
+
1046
+ it('should retrieve a workflow run by ID', async () => {
1047
+ const found = await store.getWorkflowRunById({
1048
+ runId,
1049
+ workflowName,
1050
+ });
1051
+ expect(found).not.toBeNull();
1052
+ expect(found?.runId).toBe(runId);
1053
+ checkWorkflowSnapshot(found?.snapshot!, stepId, 'success');
1054
+ });
1055
+
1056
+ it('should return null for non-existent workflow run ID', async () => {
1057
+ const notFound = await store.getWorkflowRunById({
1058
+ runId: 'non-existent-id',
1059
+ workflowName,
1060
+ });
1061
+ expect(notFound).toBeNull();
1062
+ });
1063
+ });
1064
+ describe('getWorkflowRuns with resourceId', () => {
1065
+ const workflowName = 'workflow-id-test';
1066
+ let resourceId: string;
1067
+ let runIds: string[] = [];
1068
+
1069
+ beforeEach(async () => {
1070
+ // Insert multiple workflow runs for the same resourceId
1071
+ resourceId = 'resource-shared';
1072
+ for (const status of ['success', 'failed']) {
1073
+ const sample = createSampleWorkflowSnapshot(status as WorkflowRunState['context'][string]['status']);
1074
+ runIds.push(sample.runId);
1075
+ await store.insert({
1076
+ tableName: TABLE_WORKFLOW_SNAPSHOT,
1077
+ record: {
1078
+ workflow_name: workflowName,
1079
+ run_id: sample.runId,
1080
+ resourceId,
1081
+ snapshot: sample.snapshot,
1082
+ createdAt: new Date(),
1083
+ updatedAt: new Date(),
1084
+ },
1085
+ });
1086
+ }
1087
+ // Insert a run with a different resourceId
1088
+ const other = createSampleWorkflowSnapshot('suspended');
1089
+ await store.insert({
1090
+ tableName: TABLE_WORKFLOW_SNAPSHOT,
1091
+ record: {
1092
+ workflow_name: workflowName,
1093
+ run_id: other.runId,
1094
+ resourceId: 'resource-other',
1095
+ snapshot: other.snapshot,
1096
+ createdAt: new Date(),
1097
+ updatedAt: new Date(),
1098
+ },
1099
+ });
1100
+ });
1101
+
1102
+ it('should retrieve all workflow runs by resourceId', async () => {
1103
+ const { runs } = await store.getWorkflowRuns({
1104
+ resourceId,
1105
+ workflowName,
1106
+ });
1107
+ expect(Array.isArray(runs)).toBe(true);
1108
+ expect(runs.length).toBeGreaterThanOrEqual(2);
1109
+ for (const run of runs) {
1110
+ expect(run.resourceId).toBe(resourceId);
1111
+ }
1112
+ });
1113
+
1114
+ it('should return an empty array if no workflow runs match resourceId', async () => {
1115
+ const { runs } = await store.getWorkflowRuns({
1116
+ resourceId: 'non-existent-resource',
1117
+ workflowName,
1118
+ });
1119
+ expect(Array.isArray(runs)).toBe(true);
1120
+ expect(runs.length).toBe(0);
1121
+ });
1122
+ });
1123
+
1124
+ describe('Eval Operations', () => {
1125
+ it('should retrieve evals by agent name', async () => {
1126
+ const agentName = `test-agent-${randomUUID()}`;
1127
+
1128
+ // Create sample evals using the imported helper
1129
+ const liveEval = createSampleEval(agentName, false); // createSampleEval returns snake_case
1130
+ const testEval = createSampleEval(agentName, true);
1131
+ const otherAgentEval = createSampleEval(`other-agent-${randomUUID()}`, false);
1132
+
1133
+ // Insert evals - ensure DB columns are snake_case
1134
+ await store.insert({
1135
+ tableName: TABLE_EVALS,
1136
+ record: {
1137
+ agent_name: liveEval.agent_name, // Use snake_case
1138
+ input: liveEval.input,
1139
+ output: liveEval.output,
1140
+ result: liveEval.result,
1141
+ metric_name: liveEval.metric_name, // Use snake_case
1142
+ instructions: liveEval.instructions,
1143
+ test_info: liveEval.test_info, // test_info from helper can be undefined or object
1144
+ global_run_id: liveEval.global_run_id, // Use snake_case
1145
+ run_id: liveEval.run_id, // Use snake_case
1146
+ created_at: new Date(liveEval.created_at as string), // created_at from helper is string or Date
1147
+ },
1148
+ });
1149
+
1150
+ await store.insert({
1151
+ tableName: TABLE_EVALS,
1152
+ record: {
1153
+ agent_name: testEval.agent_name,
1154
+ input: testEval.input,
1155
+ output: testEval.output,
1156
+ result: testEval.result,
1157
+ metric_name: testEval.metric_name,
1158
+ instructions: testEval.instructions,
1159
+ test_info: testEval.test_info ? JSON.stringify(testEval.test_info) : null,
1160
+ global_run_id: testEval.global_run_id,
1161
+ run_id: testEval.run_id,
1162
+ created_at: new Date(testEval.created_at as string),
1163
+ },
1164
+ });
1165
+
1166
+ await store.insert({
1167
+ tableName: TABLE_EVALS,
1168
+ record: {
1169
+ agent_name: otherAgentEval.agent_name,
1170
+ input: otherAgentEval.input,
1171
+ output: otherAgentEval.output,
1172
+ result: otherAgentEval.result,
1173
+ metric_name: otherAgentEval.metric_name,
1174
+ instructions: otherAgentEval.instructions,
1175
+ test_info: otherAgentEval.test_info, // Can be null/undefined directly
1176
+ global_run_id: otherAgentEval.global_run_id,
1177
+ run_id: otherAgentEval.run_id,
1178
+ created_at: new Date(otherAgentEval.created_at as string),
1179
+ },
1180
+ });
1181
+
1182
+ // Test getting all evals for the agent
1183
+ const allEvals = await store.getEvalsByAgentName(agentName);
1184
+ expect(allEvals).toHaveLength(2);
1185
+ // EvalRow type expects camelCase, but MSSQLStore.transformEvalRow converts snake_case from DB to camelCase
1186
+ expect(allEvals.map(e => e.runId)).toEqual(expect.arrayContaining([liveEval.run_id, testEval.run_id]));
1187
+
1188
+ // Test getting only live evals
1189
+ const liveEvals = await store.getEvalsByAgentName(agentName, 'live');
1190
+ expect(liveEvals).toHaveLength(1);
1191
+ expect(liveEvals[0].runId).toBe(liveEval.run_id); // Comparing with snake_case run_id from original data
1192
+
1193
+ // Test getting only test evals
1194
+ const testEvalsResult = await store.getEvalsByAgentName(agentName, 'test');
1195
+ expect(testEvalsResult).toHaveLength(1);
1196
+ expect(testEvalsResult[0].runId).toBe(testEval.run_id);
1197
+ expect(testEvalsResult[0].testInfo).toEqual(testEval.test_info);
1198
+
1199
+ // Test getting evals for non-existent agent
1200
+ const nonExistentEvals = await store.getEvalsByAgentName('non-existent-agent');
1201
+ expect(nonExistentEvals).toHaveLength(0);
1202
+ });
1203
+ });
1204
+
1205
+ describe('hasColumn', () => {
1206
+ const tempTable = 'temp_test_table';
1207
+
1208
+ beforeEach(async () => {
1209
+ // Always try to drop the table before each test, ignore errors if it doesn't exist
1210
+ try {
1211
+ await store.pool.query(`DROP TABLE IF EXISTS ${tempTable}`);
1212
+ } catch {
1213
+ /* ignore */
1214
+ }
1215
+ });
1216
+
1217
+ it('returns true if the column exists', async () => {
1218
+ await store.pool.query(`CREATE TABLE ${tempTable} (id INT IDENTITY(1,1) PRIMARY KEY, resourceId NVARCHAR(MAX))`);
1219
+ expect(await store['hasColumn'](tempTable, 'resourceId')).toBe(true);
1220
+ });
1221
+
1222
+ it('returns false if the column does not exist', async () => {
1223
+ await store.pool.query(`CREATE TABLE ${tempTable} (id INT IDENTITY(1,1) PRIMARY KEY)`);
1224
+ expect(await store['hasColumn'](tempTable, 'resourceId')).toBe(false);
1225
+ });
1226
+
1227
+ afterEach(async () => {
1228
+ // Always try to drop the table after each test, ignore errors if it doesn't exist
1229
+ try {
1230
+ await store.pool.query(`DROP TABLE IF EXISTS ${tempTable}`);
1231
+ } catch {
1232
+ /* ignore */
1233
+ }
1234
+ });
1235
+ });
1236
+
1237
+ describe('alterTable', () => {
1238
+ const TEST_TABLE = 'test_alter_table';
1239
+ const BASE_SCHEMA = {
1240
+ id: { type: 'integer', primaryKey: true, nullable: false },
1241
+ name: { type: 'text', nullable: true },
1242
+ } as Record<string, StorageColumn>;
1243
+
1244
+ beforeEach(async () => {
1245
+ await store.createTable({ tableName: TEST_TABLE as TABLE_NAMES, schema: BASE_SCHEMA });
1246
+ });
1247
+
1248
+ afterEach(async () => {
1249
+ await store.clearTable({ tableName: TEST_TABLE as TABLE_NAMES });
1250
+ });
1251
+
1252
+ it('adds a new column to an existing table', async () => {
1253
+ await store.alterTable({
1254
+ tableName: TEST_TABLE as TABLE_NAMES,
1255
+ schema: { ...BASE_SCHEMA, age: { type: 'integer', nullable: true } },
1256
+ ifNotExists: ['age'],
1257
+ });
1258
+
1259
+ await store.insert({
1260
+ tableName: TEST_TABLE as TABLE_NAMES,
1261
+ record: { id: 1, name: 'Alice', age: 42 },
1262
+ });
1263
+
1264
+ const row = await store.load<{ id: string; name: string; age?: number }>({
1265
+ tableName: TEST_TABLE as TABLE_NAMES,
1266
+ keys: { id: '1' },
1267
+ });
1268
+ expect(row?.age).toBe(42);
1269
+ });
1270
+
1271
+ it('is idempotent when adding an existing column', async () => {
1272
+ await store.alterTable({
1273
+ tableName: TEST_TABLE as TABLE_NAMES,
1274
+ schema: { ...BASE_SCHEMA, foo: { type: 'text', nullable: true } },
1275
+ ifNotExists: ['foo'],
1276
+ });
1277
+ // Add the column again (should not throw)
1278
+ await expect(
1279
+ store.alterTable({
1280
+ tableName: TEST_TABLE as TABLE_NAMES,
1281
+ schema: { ...BASE_SCHEMA, foo: { type: 'text', nullable: true } },
1282
+ ifNotExists: ['foo'],
1283
+ }),
1284
+ ).resolves.not.toThrow();
1285
+ });
1286
+
1287
+ it('should add a default value to a column when using not null', async () => {
1288
+ await store.insert({
1289
+ tableName: TEST_TABLE as TABLE_NAMES,
1290
+ record: { id: 1, name: 'Bob' },
1291
+ });
1292
+
1293
+ await expect(
1294
+ store.alterTable({
1295
+ tableName: TEST_TABLE as TABLE_NAMES,
1296
+ schema: { ...BASE_SCHEMA, text_column: { type: 'text', nullable: false } },
1297
+ ifNotExists: ['text_column'],
1298
+ }),
1299
+ ).resolves.not.toThrow();
1300
+
1301
+ await expect(
1302
+ store.alterTable({
1303
+ tableName: TEST_TABLE as TABLE_NAMES,
1304
+ schema: { ...BASE_SCHEMA, timestamp_column: { type: 'timestamp', nullable: false } },
1305
+ ifNotExists: ['timestamp_column'],
1306
+ }),
1307
+ ).resolves.not.toThrow();
1308
+
1309
+ await expect(
1310
+ store.alterTable({
1311
+ tableName: TEST_TABLE as TABLE_NAMES,
1312
+ schema: { ...BASE_SCHEMA, bigint_column: { type: 'bigint', nullable: false } },
1313
+ ifNotExists: ['bigint_column'],
1314
+ }),
1315
+ ).resolves.not.toThrow();
1316
+
1317
+ await expect(
1318
+ store.alterTable({
1319
+ tableName: TEST_TABLE as TABLE_NAMES,
1320
+ schema: { ...BASE_SCHEMA, jsonb_column: { type: 'jsonb', nullable: false } },
1321
+ ifNotExists: ['jsonb_column'],
1322
+ }),
1323
+ ).resolves.not.toThrow();
1324
+ });
1325
+ });
1326
+
1327
+ describe('Schema Support', () => {
1328
+ const customSchema = 'mastraTest';
1329
+ let customSchemaStore: MSSQLStore;
1330
+
1331
+ beforeAll(async () => {
1332
+ customSchemaStore = new MSSQLStore({
1333
+ ...TEST_CONFIG,
1334
+ schemaName: customSchema,
1335
+ });
1336
+
1337
+ await customSchemaStore.init();
1338
+ });
1339
+
1340
+ afterAll(async () => {
1341
+ await customSchemaStore.close();
1342
+ // Re-initialize the main store for subsequent tests
1343
+ store = new MSSQLStore(TEST_CONFIG);
1344
+ await store.init();
1345
+ });
1346
+
1347
+ describe('Constructor and Initialization', () => {
1348
+ it('should accept connectionString directly', () => {
1349
+ // Use existing store instead of creating new one
1350
+ expect(store).toBeInstanceOf(MSSQLStore);
1351
+ });
1352
+
1353
+ it('should accept config object with schema', () => {
1354
+ // Use existing custom schema store
1355
+ expect(customSchemaStore).toBeInstanceOf(MSSQLStore);
1356
+ });
1357
+ });
1358
+
1359
+ describe('Schema Operations', () => {
1360
+ it('should create and query tables in custom schema', async () => {
1361
+ // Create thread in custom schema
1362
+ const thread = createSampleThread();
1363
+ await customSchemaStore.saveThread({ thread });
1364
+
1365
+ // Verify thread exists in custom schema
1366
+ const retrieved = await customSchemaStore.getThreadById({ threadId: thread.id });
1367
+ expect(retrieved?.title).toBe(thread.title);
1368
+ });
1369
+
1370
+ it('should allow same table names in different schemas', async () => {
1371
+ // Create threads in both schemas
1372
+ const defaultThread = createSampleThread();
1373
+ const customThread = createSampleThread();
1374
+
1375
+ await store.saveThread({ thread: defaultThread });
1376
+ await customSchemaStore.saveThread({ thread: customThread });
1377
+
1378
+ // Verify threads exist in respective schemas
1379
+ const defaultResult = await store.getThreadById({ threadId: defaultThread.id });
1380
+ const customResult = await customSchemaStore.getThreadById({ threadId: customThread.id });
1381
+
1382
+ expect(defaultResult?.id).toBe(defaultThread.id);
1383
+ expect(customResult?.id).toBe(customThread.id);
1384
+
1385
+ // Verify cross-schema isolation
1386
+ const defaultInCustom = await customSchemaStore.getThreadById({ threadId: defaultThread.id });
1387
+ const customInDefault = await store.getThreadById({ threadId: customThread.id });
1388
+
1389
+ expect(defaultInCustom).toBeNull();
1390
+ expect(customInDefault).toBeNull();
1391
+ });
1392
+ });
1393
+ });
1394
+
1395
+ describe('Pagination Features', () => {
1396
+ beforeEach(async () => {
1397
+ await store.clearTable({ tableName: TABLE_EVALS });
1398
+ await store.clearTable({ tableName: TABLE_TRACES });
1399
+ await store.clearTable({ tableName: TABLE_MESSAGES });
1400
+ await store.clearTable({ tableName: TABLE_THREADS });
1401
+ });
1402
+
1403
+ describe('getEvals with pagination', () => {
1404
+ it('should return paginated evals with total count (page/perPage)', async () => {
1405
+ const agentName = 'pagination-agent-evals';
1406
+ const evalPromises = Array.from({ length: 25 }, (_, i) => {
1407
+ const evalData = createSampleEval(agentName, i % 2 === 0);
1408
+ return store.insert({
1409
+ tableName: TABLE_EVALS,
1410
+ record: {
1411
+ run_id: evalData.run_id,
1412
+ agent_name: evalData.agent_name,
1413
+ input: evalData.input,
1414
+ output: evalData.output,
1415
+ result: evalData.result,
1416
+ metric_name: evalData.metric_name,
1417
+ instructions: evalData.instructions,
1418
+ test_info: evalData.test_info,
1419
+ global_run_id: evalData.global_run_id,
1420
+ created_at: new Date(evalData.created_at as string),
1421
+ },
1422
+ });
1423
+ });
1424
+ await Promise.all(evalPromises);
1425
+
1426
+ const page1 = await store.getEvals({ agentName, page: 0, perPage: 10 });
1427
+ expect(page1.evals).toHaveLength(10);
1428
+ expect(page1.total).toBe(25);
1429
+ expect(page1.page).toBe(0);
1430
+ expect(page1.perPage).toBe(10);
1431
+ expect(page1.hasMore).toBe(true);
1432
+
1433
+ const page3 = await store.getEvals({ agentName, page: 2, perPage: 10 });
1434
+ expect(page3.evals).toHaveLength(5);
1435
+ expect(page3.total).toBe(25);
1436
+ expect(page3.page).toBe(2);
1437
+ expect(page3.hasMore).toBe(false);
1438
+ });
1439
+
1440
+ it('should support limit/offset pagination for getEvals', async () => {
1441
+ const agentName = 'pagination-agent-lo-evals';
1442
+ const evalPromises = Array.from({ length: 15 }, () => {
1443
+ const evalData = createSampleEval(agentName);
1444
+ return store.insert({
1445
+ tableName: TABLE_EVALS,
1446
+ record: {
1447
+ run_id: evalData.run_id,
1448
+ agent_name: evalData.agent_name,
1449
+ input: evalData.input,
1450
+ output: evalData.output,
1451
+ result: evalData.result,
1452
+ metric_name: evalData.metric_name,
1453
+ instructions: evalData.instructions,
1454
+ test_info: evalData.test_info,
1455
+ global_run_id: evalData.global_run_id,
1456
+ created_at: new Date(evalData.created_at as string),
1457
+ },
1458
+ });
1459
+ });
1460
+ await Promise.all(evalPromises);
1461
+
1462
+ const result = await store.getEvals({ agentName, perPage: 5, page: 2 });
1463
+ expect(result.evals).toHaveLength(5);
1464
+ expect(result.total).toBe(15);
1465
+ expect(result.page).toBe(2);
1466
+ expect(result.perPage).toBe(5);
1467
+ expect(result.hasMore).toBe(false);
1468
+ });
1469
+
1470
+ it('should filter by type with pagination for getEvals', async () => {
1471
+ const agentName = 'pagination-agent-type-evals';
1472
+ const testEvalPromises = Array.from({ length: 10 }, () => {
1473
+ const evalData = createSampleEval(agentName, true);
1474
+ return store.insert({
1475
+ tableName: TABLE_EVALS,
1476
+ record: {
1477
+ run_id: evalData.run_id,
1478
+ agent_name: evalData.agent_name,
1479
+ input: evalData.input,
1480
+ output: evalData.output,
1481
+ result: evalData.result,
1482
+ metric_name: evalData.metric_name,
1483
+ instructions: evalData.instructions,
1484
+ test_info: evalData.test_info,
1485
+ global_run_id: evalData.global_run_id,
1486
+ created_at: new Date(evalData.created_at as string),
1487
+ },
1488
+ });
1489
+ });
1490
+ const liveEvalPromises = Array.from({ length: 8 }, () => {
1491
+ const evalData = createSampleEval(agentName, false);
1492
+ return store.insert({
1493
+ tableName: TABLE_EVALS,
1494
+ record: {
1495
+ run_id: evalData.run_id,
1496
+ agent_name: evalData.agent_name,
1497
+ input: evalData.input,
1498
+ output: evalData.output,
1499
+ result: evalData.result,
1500
+ metric_name: evalData.metric_name,
1501
+ instructions: evalData.instructions,
1502
+ test_info: evalData.test_info,
1503
+ global_run_id: evalData.global_run_id,
1504
+ created_at: new Date(evalData.created_at as string),
1505
+ },
1506
+ });
1507
+ });
1508
+ await Promise.all([...testEvalPromises, ...liveEvalPromises]);
1509
+
1510
+ const testResults = await store.getEvals({ agentName, type: 'test', page: 0, perPage: 5 });
1511
+ expect(testResults.evals).toHaveLength(5);
1512
+ expect(testResults.total).toBe(10);
1513
+
1514
+ const liveResults = await store.getEvals({ agentName, type: 'live', page: 1, perPage: 3 });
1515
+ expect(liveResults.evals).toHaveLength(3);
1516
+ expect(liveResults.total).toBe(8);
1517
+ expect(liveResults.hasMore).toBe(true);
1518
+ });
1519
+
1520
+ it('should filter by date with pagination for getEvals', async () => {
1521
+ const agentName = 'pagination-agent-date-evals';
1522
+ const now = new Date();
1523
+ const yesterday = new Date(now.getTime() - 24 * 60 * 60 * 1000);
1524
+ const dayBeforeYesterday = new Date(now.getTime() - 48 * 60 * 60 * 1000);
1525
+
1526
+ const createEvalAtDate = (date: Date) => {
1527
+ const evalData = createSampleEval(agentName, false, date); // Pass date to helper
1528
+ return store.insert({
1529
+ tableName: TABLE_EVALS,
1530
+ record: {
1531
+ run_id: evalData.run_id, // Use snake_case from helper
1532
+ agent_name: evalData.agent_name,
1533
+ input: evalData.input,
1534
+ output: evalData.output,
1535
+ result: evalData.result,
1536
+ metric_name: evalData.metric_name,
1537
+ instructions: evalData.instructions,
1538
+ test_info: evalData.test_info,
1539
+ global_run_id: evalData.global_run_id,
1540
+ created_at: evalData.created_at, // Use created_at from helper (already Date or ISO string)
1541
+ },
1542
+ });
1543
+ };
1544
+
1545
+ await Promise.all([
1546
+ createEvalAtDate(dayBeforeYesterday),
1547
+ createEvalAtDate(dayBeforeYesterday),
1548
+ createEvalAtDate(yesterday),
1549
+ createEvalAtDate(yesterday),
1550
+ createEvalAtDate(yesterday),
1551
+ createEvalAtDate(now),
1552
+ createEvalAtDate(now),
1553
+ createEvalAtDate(now),
1554
+ createEvalAtDate(now),
1555
+ ]);
1556
+
1557
+ const fromYesterday = await store.getEvals({ agentName, dateRange: { start: yesterday }, page: 0, perPage: 3 });
1558
+ expect(fromYesterday.total).toBe(7); // 3 yesterday + 4 now
1559
+ expect(fromYesterday.evals).toHaveLength(3);
1560
+ // Evals are sorted DESC, so first 3 are from 'now'
1561
+ fromYesterday.evals.forEach(e =>
1562
+ expect(new Date(e.createdAt).getTime()).toBeGreaterThanOrEqual(yesterday.getTime()),
1563
+ );
1564
+
1565
+ const onlyDayBefore = await store.getEvals({
1566
+ agentName,
1567
+ dateRange: {
1568
+ end: new Date(yesterday.getTime() - 1),
1569
+ },
1570
+ page: 0,
1571
+ perPage: 5,
1572
+ });
1573
+ expect(onlyDayBefore.total).toBe(2);
1574
+ expect(onlyDayBefore.evals).toHaveLength(2);
1575
+ });
1576
+ });
1577
+
1578
+ describe('getTraces with pagination', () => {
1579
+ it('should return paginated traces with total count', async () => {
1580
+ const tracePromises = Array.from({ length: 18 }, (_, i) =>
1581
+ store.insert({ tableName: TABLE_TRACES, record: createSampleTraceForDB(`test-trace-${i}`, 'pg-test-scope') }),
1582
+ );
1583
+ await Promise.all(tracePromises);
1584
+
1585
+ const page1 = await store.getTracesPaginated({
1586
+ scope: 'pg-test-scope',
1587
+ page: 0,
1588
+ perPage: 8,
1589
+ });
1590
+ expect(page1.traces).toHaveLength(8);
1591
+ expect(page1.total).toBe(18);
1592
+ expect(page1.page).toBe(0);
1593
+ expect(page1.perPage).toBe(8);
1594
+ expect(page1.hasMore).toBe(true);
1595
+
1596
+ const page3 = await store.getTracesPaginated({
1597
+ scope: 'pg-test-scope',
1598
+ page: 2,
1599
+ perPage: 8,
1600
+ });
1601
+ expect(page3.traces).toHaveLength(2);
1602
+ expect(page3.total).toBe(18);
1603
+ expect(page3.hasMore).toBe(false);
1604
+ });
1605
+
1606
+ it('should filter by attributes with pagination for getTraces', async () => {
1607
+ const tracesWithAttr = Array.from({ length: 8 }, (_, i) =>
1608
+ store.insert({
1609
+ tableName: TABLE_TRACES,
1610
+ record: createSampleTraceForDB(`trace-${i}`, 'pg-attr-scope', { environment: 'prod' }),
1611
+ }),
1612
+ );
1613
+ const tracesWithoutAttr = Array.from({ length: 5 }, (_, i) =>
1614
+ store.insert({
1615
+ tableName: TABLE_TRACES,
1616
+ record: createSampleTraceForDB(`trace-other-${i}`, 'pg-attr-scope', { environment: 'dev' }),
1617
+ }),
1618
+ );
1619
+ await Promise.all([...tracesWithAttr, ...tracesWithoutAttr]);
1620
+
1621
+ const prodTraces = await store.getTracesPaginated({
1622
+ scope: 'pg-attr-scope',
1623
+ attributes: { environment: 'prod' },
1624
+ page: 0,
1625
+ perPage: 5,
1626
+ });
1627
+ expect(prodTraces.traces).toHaveLength(5);
1628
+ expect(prodTraces.total).toBe(8);
1629
+ expect(prodTraces.hasMore).toBe(true);
1630
+ });
1631
+
1632
+ it('should filter by date with pagination for getTraces', async () => {
1633
+ const scope = 'pg-date-traces';
1634
+ const now = new Date();
1635
+ const yesterday = new Date(now.getTime() - 24 * 60 * 60 * 1000);
1636
+ const dayBeforeYesterday = new Date(now.getTime() - 48 * 60 * 60 * 1000);
1637
+
1638
+ await Promise.all([
1639
+ store.insert({
1640
+ tableName: TABLE_TRACES,
1641
+ record: createSampleTraceForDB('t1', scope, undefined, dayBeforeYesterday),
1642
+ }),
1643
+ store.insert({ tableName: TABLE_TRACES, record: createSampleTraceForDB('t2', scope, undefined, yesterday) }),
1644
+ store.insert({ tableName: TABLE_TRACES, record: createSampleTraceForDB('t3', scope, undefined, yesterday) }),
1645
+ store.insert({ tableName: TABLE_TRACES, record: createSampleTraceForDB('t4', scope, undefined, now) }),
1646
+ store.insert({ tableName: TABLE_TRACES, record: createSampleTraceForDB('t5', scope, undefined, now) }),
1647
+ ]);
1648
+
1649
+ const fromYesterday = await store.getTracesPaginated({
1650
+ scope,
1651
+ dateRange: {
1652
+ start: yesterday,
1653
+ },
1654
+ page: 0,
1655
+ perPage: 2,
1656
+ });
1657
+ expect(fromYesterday.total).toBe(4); // 2 yesterday + 2 now
1658
+ expect(fromYesterday.traces).toHaveLength(2);
1659
+ fromYesterday.traces.forEach(t =>
1660
+ expect(new Date(t.createdAt).getTime()).toBeGreaterThanOrEqual(yesterday.getTime()),
1661
+ );
1662
+
1663
+ const onlyNow = await store.getTracesPaginated({
1664
+ scope,
1665
+ dateRange: {
1666
+ start: now,
1667
+ end: now,
1668
+ },
1669
+ page: 0,
1670
+ perPage: 5,
1671
+ });
1672
+ expect(onlyNow.total).toBe(2);
1673
+ expect(onlyNow.traces).toHaveLength(2);
1674
+ });
1675
+ });
1676
+
1677
+ describe('getMessages with pagination', () => {
1678
+ it('should return paginated messages with total count', async () => {
1679
+ const thread = createSampleThread();
1680
+ await store.saveThread({ thread });
1681
+ // Reset role to 'assistant' before creating messages
1682
+ resetRole();
1683
+ // Create messages sequentially to ensure unique timestamps
1684
+ for (let i = 0; i < 15; i++) {
1685
+ const message = createSampleMessageV1({ threadId: thread.id, content: `Message ${i + 1}` });
1686
+ await store.saveMessages({
1687
+ messages: [message],
1688
+ });
1689
+ await new Promise(r => setTimeout(r, 5));
1690
+ }
1691
+
1692
+ const page1 = await store.getMessagesPaginated({
1693
+ threadId: thread.id,
1694
+ selectBy: { pagination: { page: 0, perPage: 5 } },
1695
+ format: 'v2',
1696
+ });
1697
+ expect(page1.messages).toHaveLength(5);
1698
+ expect(page1.total).toBe(15);
1699
+ expect(page1.page).toBe(0);
1700
+ expect(page1.perPage).toBe(5);
1701
+ expect(page1.hasMore).toBe(true);
1702
+
1703
+ const page3 = await store.getMessagesPaginated({
1704
+ threadId: thread.id,
1705
+ selectBy: { pagination: { page: 2, perPage: 5 } },
1706
+ format: 'v2',
1707
+ });
1708
+ expect(page3.messages).toHaveLength(5);
1709
+ expect(page3.total).toBe(15);
1710
+ expect(page3.hasMore).toBe(false);
1711
+ });
1712
+
1713
+ it('should filter by date with pagination for getMessages', async () => {
1714
+ resetRole();
1715
+ const threadData = createSampleThread();
1716
+ const thread = await store.saveThread({ thread: threadData as StorageThreadType });
1717
+ const now = new Date();
1718
+ const yesterday = new Date(
1719
+ now.getFullYear(),
1720
+ now.getMonth(),
1721
+ now.getDate() - 1,
1722
+ now.getHours(),
1723
+ now.getMinutes(),
1724
+ now.getSeconds(),
1725
+ );
1726
+ const dayBeforeYesterday = new Date(
1727
+ now.getFullYear(),
1728
+ now.getMonth(),
1729
+ now.getDate() - 2,
1730
+ now.getHours(),
1731
+ now.getMinutes(),
1732
+ now.getSeconds(),
1733
+ );
1734
+
1735
+ // Ensure timestamps are distinct for reliable sorting by creating them with a slight delay for testing clarity
1736
+ const messagesToSave: MastraMessageV1[] = [];
1737
+ messagesToSave.push(createSampleMessageV1({ threadId: thread.id, createdAt: dayBeforeYesterday }));
1738
+ await new Promise(r => setTimeout(r, 5));
1739
+ messagesToSave.push(createSampleMessageV1({ threadId: thread.id, createdAt: dayBeforeYesterday }));
1740
+ await new Promise(r => setTimeout(r, 5));
1741
+ messagesToSave.push(createSampleMessageV1({ threadId: thread.id, createdAt: yesterday }));
1742
+ await new Promise(r => setTimeout(r, 5));
1743
+ messagesToSave.push(createSampleMessageV1({ threadId: thread.id, createdAt: yesterday }));
1744
+ await new Promise(r => setTimeout(r, 5));
1745
+ messagesToSave.push(createSampleMessageV1({ threadId: thread.id, createdAt: now }));
1746
+ await new Promise(r => setTimeout(r, 5));
1747
+ messagesToSave.push(createSampleMessageV1({ threadId: thread.id, createdAt: now }));
1748
+
1749
+ await store.saveMessages({ messages: messagesToSave, format: 'v1' });
1750
+ // Total 6 messages: 2 now, 2 yesterday, 2 dayBeforeYesterday (oldest to newest)
1751
+
1752
+ const fromYesterday = await store.getMessagesPaginated({
1753
+ threadId: thread.id,
1754
+ selectBy: { pagination: { page: 0, perPage: 3, dateRange: { start: yesterday } } },
1755
+ format: 'v2',
1756
+ });
1757
+ expect(fromYesterday.total).toBe(4);
1758
+ expect(fromYesterday.messages).toHaveLength(3);
1759
+ const firstMessageTime = new Date((fromYesterday.messages[0] as MastraMessageV1).createdAt).getTime();
1760
+ expect(firstMessageTime).toBeGreaterThanOrEqual(new Date(yesterday.toISOString()).getTime());
1761
+ if (fromYesterday.messages.length > 0) {
1762
+ expect(new Date((fromYesterday.messages[0] as MastraMessageV1).createdAt).toISOString().slice(0, 10)).toEqual(
1763
+ yesterday.toISOString().slice(0, 10),
1764
+ );
1765
+ }
1766
+ });
1767
+
1768
+ it('should save and retrieve messages', async () => {
1769
+ const thread = createSampleThread();
1770
+ await store.saveThread({ thread });
1771
+
1772
+ const messages = [
1773
+ createSampleMessageV1({ threadId: thread.id }),
1774
+ createSampleMessageV1({ threadId: thread.id }),
1775
+ ];
1776
+
1777
+ // Save messages
1778
+ const savedMessages = await store.saveMessages({ messages });
1779
+ // Retrieve messages
1780
+ const retrievedMessages = await store.getMessagesPaginated({ threadId: thread.id, format: 'v1' });
1781
+
1782
+ const checkMessages = messages.map(m => {
1783
+ const { resourceId, ...rest } = m;
1784
+ return rest;
1785
+ });
1786
+
1787
+ try {
1788
+ expect(savedMessages).toEqual(messages);
1789
+ expect(retrievedMessages.messages).toHaveLength(2);
1790
+ expect(retrievedMessages.messages).toEqual(expect.arrayContaining(checkMessages));
1791
+ } catch (e) {
1792
+ console.error('Error in should save and retrieve messages:', e);
1793
+ throw e;
1794
+ }
1795
+ });
1796
+
1797
+ it('should maintain message order', async () => {
1798
+ const thread = createSampleThread();
1799
+ await store.saveThread({ thread });
1800
+
1801
+ const messageContent = ['First', 'Second', 'Third'];
1802
+
1803
+ const messages = messageContent.map(content =>
1804
+ createSampleMessageV2({
1805
+ threadId: thread.id,
1806
+ content: { content, parts: [{ type: 'text', text: content }] },
1807
+ }),
1808
+ );
1809
+
1810
+ await store.saveMessages({ messages, format: 'v2' });
1811
+
1812
+ const retrievedMessages = await store.getMessagesPaginated({ threadId: thread.id, format: 'v2' });
1813
+ expect(retrievedMessages.messages).toHaveLength(3);
1814
+
1815
+ // Verify order is maintained
1816
+ retrievedMessages.messages.forEach((msg, idx) => {
1817
+ if (typeof msg.content === 'object' && msg.content && 'parts' in msg.content) {
1818
+ expect((msg.content.parts[0] as any).text).toEqual(messageContent[idx]);
1819
+ } else {
1820
+ throw new Error('Message content is not in expected format');
1821
+ }
1822
+ });
1823
+ });
1824
+
1825
+ it('should rollback on error during message save', async () => {
1826
+ const thread = createSampleThread();
1827
+ await store.saveThread({ thread });
1828
+
1829
+ const messages = [
1830
+ createSampleMessageV1({ threadId: thread.id }),
1831
+ { ...createSampleMessageV1({ threadId: thread.id }), id: null } as any, // This will cause an error
1832
+ ];
1833
+
1834
+ await expect(store.saveMessages({ messages })).rejects.toThrow();
1835
+
1836
+ // Verify no messages were saved
1837
+ const savedMessages = await store.getMessagesPaginated({ threadId: thread.id, format: 'v2' });
1838
+ expect(savedMessages.messages).toHaveLength(0);
1839
+ });
1840
+
1841
+ it('should retrieve messages w/ next/prev messages by message id + resource id', async () => {
1842
+ const thread = createSampleThread({ id: 'thread-one' });
1843
+ await store.saveThread({ thread });
1844
+
1845
+ const thread2 = createSampleThread({ id: 'thread-two' });
1846
+ await store.saveThread({ thread: thread2 });
1847
+
1848
+ const thread3 = createSampleThread({ id: 'thread-three' });
1849
+ await store.saveThread({ thread: thread3 });
1850
+
1851
+ const messages: MastraMessageV2[] = [
1852
+ createSampleMessageV2({
1853
+ threadId: 'thread-one',
1854
+ content: { content: 'First' },
1855
+ resourceId: 'cross-thread-resource',
1856
+ }),
1857
+ createSampleMessageV2({
1858
+ threadId: 'thread-one',
1859
+ content: { content: 'Second' },
1860
+ resourceId: 'cross-thread-resource',
1861
+ }),
1862
+ createSampleMessageV2({
1863
+ threadId: 'thread-one',
1864
+ content: { content: 'Third' },
1865
+ resourceId: 'cross-thread-resource',
1866
+ }),
1867
+
1868
+ createSampleMessageV2({
1869
+ threadId: 'thread-two',
1870
+ content: { content: 'Fourth' },
1871
+ resourceId: 'cross-thread-resource',
1872
+ }),
1873
+ createSampleMessageV2({
1874
+ threadId: 'thread-two',
1875
+ content: { content: 'Fifth' },
1876
+ resourceId: 'cross-thread-resource',
1877
+ }),
1878
+ createSampleMessageV2({
1879
+ threadId: 'thread-two',
1880
+ content: { content: 'Sixth' },
1881
+ resourceId: 'cross-thread-resource',
1882
+ }),
1883
+
1884
+ createSampleMessageV2({
1885
+ threadId: 'thread-three',
1886
+ content: { content: 'Seventh' },
1887
+ resourceId: 'other-resource',
1888
+ }),
1889
+ createSampleMessageV2({
1890
+ threadId: 'thread-three',
1891
+ content: { content: 'Eighth' },
1892
+ resourceId: 'other-resource',
1893
+ }),
1894
+ ];
1895
+
1896
+ await store.saveMessages({ messages: messages, format: 'v2' });
1897
+
1898
+ const retrievedMessages = await store.getMessagesPaginated({ threadId: 'thread-one', format: 'v2' });
1899
+ expect(retrievedMessages.messages).toHaveLength(3);
1900
+ expect(retrievedMessages.messages.map((m: any) => m.content.parts[0].text)).toEqual([
1901
+ 'First',
1902
+ 'Second',
1903
+ 'Third',
1904
+ ]);
1905
+
1906
+ const retrievedMessages2 = await store.getMessagesPaginated({ threadId: 'thread-two', format: 'v2' });
1907
+ expect(retrievedMessages2.messages).toHaveLength(3);
1908
+ expect(retrievedMessages2.messages.map((m: any) => m.content.parts[0].text)).toEqual([
1909
+ 'Fourth',
1910
+ 'Fifth',
1911
+ 'Sixth',
1912
+ ]);
1913
+
1914
+ const retrievedMessages3 = await store.getMessagesPaginated({ threadId: 'thread-three', format: 'v2' });
1915
+ expect(retrievedMessages3.messages).toHaveLength(2);
1916
+ expect(retrievedMessages3.messages.map((m: any) => m.content.parts[0].text)).toEqual(['Seventh', 'Eighth']);
1917
+
1918
+ const { messages: crossThreadMessages } = await store.getMessagesPaginated({
1919
+ threadId: 'thread-doesnt-exist',
1920
+ format: 'v2',
1921
+ selectBy: {
1922
+ last: 0,
1923
+ include: [
1924
+ {
1925
+ id: messages[1].id,
1926
+ threadId: 'thread-one',
1927
+ withNextMessages: 2,
1928
+ withPreviousMessages: 2,
1929
+ },
1930
+ {
1931
+ id: messages[4].id,
1932
+ threadId: 'thread-two',
1933
+ withPreviousMessages: 2,
1934
+ withNextMessages: 2,
1935
+ },
1936
+ ],
1937
+ },
1938
+ });
1939
+ expect(crossThreadMessages).toHaveLength(6);
1940
+ expect(crossThreadMessages.filter(m => m.threadId === `thread-one`)).toHaveLength(3);
1941
+ expect(crossThreadMessages.filter(m => m.threadId === `thread-two`)).toHaveLength(3);
1942
+ });
1943
+
1944
+ it('should return messages using both last and include (cross-thread, deduped)', async () => {
1945
+ const thread = createSampleThread({ id: 'thread-one' });
1946
+ await store.saveThread({ thread });
1947
+
1948
+ const thread2 = createSampleThread({ id: 'thread-two' });
1949
+ await store.saveThread({ thread: thread2 });
1950
+
1951
+ const now = new Date();
1952
+
1953
+ // Setup: create messages in two threads
1954
+ const messages = [
1955
+ createSampleMessageV2({
1956
+ threadId: 'thread-one',
1957
+ content: { content: 'A' },
1958
+ createdAt: new Date(now.getTime()),
1959
+ }),
1960
+ createSampleMessageV2({
1961
+ threadId: 'thread-one',
1962
+ content: { content: 'B' },
1963
+ createdAt: new Date(now.getTime() + 1000),
1964
+ }),
1965
+ createSampleMessageV2({
1966
+ threadId: 'thread-one',
1967
+ content: { content: 'C' },
1968
+ createdAt: new Date(now.getTime() + 2000),
1969
+ }),
1970
+ createSampleMessageV2({
1971
+ threadId: 'thread-two',
1972
+ content: { content: 'D' },
1973
+ createdAt: new Date(now.getTime() + 3000),
1974
+ }),
1975
+ createSampleMessageV2({
1976
+ threadId: 'thread-two',
1977
+ content: { content: 'E' },
1978
+ createdAt: new Date(now.getTime() + 4000),
1979
+ }),
1980
+ createSampleMessageV2({
1981
+ threadId: 'thread-two',
1982
+ content: { content: 'F' },
1983
+ createdAt: new Date(now.getTime() + 5000),
1984
+ }),
1985
+ ];
1986
+ await store.saveMessages({ messages, format: 'v2' });
1987
+
1988
+ // Use last: 2 and include a message from another thread with context
1989
+ const { messages: result } = await store.getMessagesPaginated({
1990
+ threadId: 'thread-one',
1991
+ format: 'v2',
1992
+ selectBy: {
1993
+ last: 2,
1994
+ include: [
1995
+ {
1996
+ id: messages[4].id, // 'E' from thread-bar
1997
+ threadId: 'thread-two',
1998
+ withPreviousMessages: 1,
1999
+ withNextMessages: 1,
2000
+ },
2001
+ ],
2002
+ },
2003
+ });
2004
+
2005
+ // Should include last 2 from thread-one and 3 from thread-two (D, E, F)
2006
+ expect(result.map(m => (m.content as { content: string }).content).sort()).toEqual(['B', 'C', 'D', 'E', 'F']);
2007
+ // Should include 2 from thread-one
2008
+ expect(result.filter(m => m.threadId === 'thread-one').map((m: any) => m.content.content)).toEqual(['B', 'C']);
2009
+ // Should include 3 from thread-two
2010
+ expect(result.filter(m => m.threadId === 'thread-two').map((m: any) => m.content.content)).toEqual([
2011
+ 'D',
2012
+ 'E',
2013
+ 'F',
2014
+ ]);
2015
+ });
2016
+ });
2017
+
2018
+ describe('getThreadsByResourceId with pagination', () => {
2019
+ it('should return paginated threads with total count', async () => {
2020
+ const resourceId = `pg-paginated-resource-${randomUUID()}`;
2021
+ const threadPromises = Array.from({ length: 17 }, () =>
2022
+ store.saveThread({ thread: { ...createSampleThread(), resourceId } }),
2023
+ );
2024
+ await Promise.all(threadPromises);
2025
+
2026
+ const page1 = await store.getThreadsByResourceIdPaginated({ resourceId, page: 0, perPage: 7 });
2027
+ expect(page1.threads).toHaveLength(7);
2028
+ expect(page1.total).toBe(17);
2029
+ expect(page1.page).toBe(0);
2030
+ expect(page1.perPage).toBe(7);
2031
+ expect(page1.hasMore).toBe(true);
2032
+
2033
+ const page3 = await store.getThreadsByResourceIdPaginated({ resourceId, page: 2, perPage: 7 });
2034
+ expect(page3.threads).toHaveLength(3); // 17 total, 7 per page, 3rd page has 17 - 2*7 = 3
2035
+ expect(page3.total).toBe(17);
2036
+ expect(page3.hasMore).toBe(false);
2037
+ });
2038
+
2039
+ it('should return paginated results when no pagination params for getThreadsByResourceId', async () => {
2040
+ const resourceId = `pg-non-paginated-resource-${randomUUID()}`;
2041
+ await store.saveThread({ thread: { ...createSampleThread(), resourceId } });
2042
+
2043
+ const results = await store.getThreadsByResourceIdPaginated({ resourceId });
2044
+ expect(Array.isArray(results.threads)).toBe(true);
2045
+ expect(results.threads.length).toBe(1);
2046
+ expect(results.total).toBe(1);
2047
+ expect(results.page).toBe(0);
2048
+ expect(results.perPage).toBe(100);
2049
+ expect(results.hasMore).toBe(false);
2050
+ });
2051
+ });
2052
+ });
2053
+
2054
+ describe('MssqlStorage Table Name Quoting', () => {
2055
+ const camelCaseTable = 'TestCamelCaseTable';
2056
+ const snakeCaseTable = 'test_snake_case_table';
2057
+ const BASE_SCHEMA = {
2058
+ id: { type: 'integer', primaryKey: true, nullable: false },
2059
+ name: { type: 'text', nullable: true },
2060
+ } as Record<string, StorageColumn>;
2061
+
2062
+ beforeEach(async () => {
2063
+ // Only clear tables if store is initialized
2064
+ try {
2065
+ // Clear tables before each test
2066
+ await store.clearTable({ tableName: camelCaseTable as TABLE_NAMES });
2067
+ await store.clearTable({ tableName: snakeCaseTable as TABLE_NAMES });
2068
+ } catch (error) {
2069
+ // Ignore errors during table clearing
2070
+ console.warn('Error clearing tables:', error);
2071
+ }
2072
+ });
2073
+
2074
+ afterEach(async () => {
2075
+ // Only clear tables if store is initialized
2076
+ try {
2077
+ // Clear tables before each test
2078
+ await store.clearTable({ tableName: camelCaseTable as TABLE_NAMES });
2079
+ await store.clearTable({ tableName: snakeCaseTable as TABLE_NAMES });
2080
+ } catch (error) {
2081
+ // Ignore errors during table clearing
2082
+ console.warn('Error clearing tables:', error);
2083
+ }
2084
+ });
2085
+
2086
+ it('should create and upsert to a camelCase table without quoting errors', async () => {
2087
+ await expect(
2088
+ store.createTable({
2089
+ tableName: camelCaseTable as TABLE_NAMES,
2090
+ schema: BASE_SCHEMA,
2091
+ }),
2092
+ ).resolves.not.toThrow();
2093
+
2094
+ await store.insert({
2095
+ tableName: camelCaseTable as TABLE_NAMES,
2096
+ record: { id: '1', name: 'Alice' },
2097
+ });
2098
+
2099
+ const row: any = await store.load({
2100
+ tableName: camelCaseTable as TABLE_NAMES,
2101
+ keys: { id: '1' },
2102
+ });
2103
+ expect(row?.name).toBe('Alice');
2104
+ });
2105
+
2106
+ it('should create and upsert to a snake_case table without quoting errors', async () => {
2107
+ await expect(
2108
+ store.createTable({
2109
+ tableName: snakeCaseTable as TABLE_NAMES,
2110
+ schema: BASE_SCHEMA,
2111
+ }),
2112
+ ).resolves.not.toThrow();
2113
+
2114
+ await store.insert({
2115
+ tableName: snakeCaseTable as TABLE_NAMES,
2116
+ record: { id: '2', name: 'Bob' },
2117
+ });
2118
+
2119
+ const row: any = await store.load({
2120
+ tableName: snakeCaseTable as TABLE_NAMES,
2121
+ keys: { id: '2' },
2122
+ });
2123
+ expect(row?.name).toBe('Bob');
2124
+ });
2125
+ });
2126
+
2127
+ describe('Permission Handling (MSSQL)', () => {
2128
+ const schemaRestrictedUser = 'mastra_schema_restricted_storage';
2129
+ const restrictedPassword = 'Test123!@#'; // MSSQL requires a strong password
2130
+ const testSchema = 'testSchema';
2131
+ const adminConfig = {
2132
+ user: TEST_CONFIG.user,
2133
+ password: TEST_CONFIG.password,
2134
+ server: TEST_CONFIG.server,
2135
+ database: TEST_CONFIG.database,
2136
+ port: TEST_CONFIG.port,
2137
+ options: { encrypt: true, trustServerCertificate: true },
2138
+ };
2139
+
2140
+ let adminPool: sql.ConnectionPool;
2141
+
2142
+ beforeAll(async () => {
2143
+ adminPool = await sql.connect(adminConfig);
2144
+
2145
+ // Drop schema and user if they exist
2146
+ await adminPool.request().batch(`
2147
+ IF EXISTS (SELECT * FROM sys.schemas WHERE name = '${testSchema}')
2148
+ DROP SCHEMA [${testSchema}];
2149
+ IF EXISTS (SELECT * FROM sys.database_principals WHERE name = '${schemaRestrictedUser}')
2150
+ DROP USER [${schemaRestrictedUser}];
2151
+ IF EXISTS (SELECT * FROM sys.sql_logins WHERE name = '${schemaRestrictedUser}')
2152
+ DROP LOGIN [${schemaRestrictedUser}];
2153
+ `);
2154
+
2155
+ // Create restricted login and user
2156
+ await adminPool.request().batch(`
2157
+ CREATE LOGIN [${schemaRestrictedUser}] WITH PASSWORD = '${restrictedPassword}';
2158
+ CREATE USER [${schemaRestrictedUser}] FOR LOGIN [${schemaRestrictedUser}];
2159
+ -- Only grant CONNECT, do not grant CREATE SCHEMA
2160
+ GRANT CONNECT TO [${schemaRestrictedUser}];
2161
+ `);
2162
+ });
2163
+
2164
+ afterAll(async () => {
2165
+ // Drop schema and user
2166
+ await adminPool.request().batch(`
2167
+ IF EXISTS (SELECT * FROM sys.schemas WHERE name = '${testSchema}')
2168
+ DROP SCHEMA [${testSchema}];
2169
+ IF EXISTS (SELECT * FROM sys.database_principals WHERE name = '${schemaRestrictedUser}')
2170
+ DROP USER [${schemaRestrictedUser}];
2171
+ IF EXISTS (SELECT * FROM sys.sql_logins WHERE name = '${schemaRestrictedUser}')
2172
+ DROP LOGIN [${schemaRestrictedUser}];
2173
+ `);
2174
+ await adminPool.close();
2175
+ });
2176
+
2177
+ describe('Schema Creation', () => {
2178
+ it('should fail when user lacks CREATE SCHEMA privilege', async () => {
2179
+ const restrictedConfig = {
2180
+ ...adminConfig,
2181
+ user: schemaRestrictedUser,
2182
+ password: restrictedPassword,
2183
+ };
2184
+ const store = new MSSQLStore({
2185
+ ...restrictedConfig,
2186
+ schemaName: testSchema,
2187
+ });
2188
+
2189
+ try {
2190
+ await expect(store.init()).rejects.toThrow(
2191
+ `Unable to create schema "testSchema". This requires CREATE privilege on the database. Either create the schema manually or grant CREATE privilege to the user.`,
2192
+ );
2193
+
2194
+ // Verify schema was not created
2195
+ const result = await adminPool.request().query(`SELECT * FROM sys.schemas WHERE name = '${testSchema}'`);
2196
+ expect(result.recordset.length).toBe(0);
2197
+ } finally {
2198
+ await store.close();
2199
+ }
2200
+ });
2201
+
2202
+ it('should fail with schema creation error when saving thread', async () => {
2203
+ const restrictedConfig = {
2204
+ ...adminConfig,
2205
+ user: schemaRestrictedUser,
2206
+ password: restrictedPassword,
2207
+ };
2208
+ const store = new MSSQLStore({
2209
+ ...restrictedConfig,
2210
+ schemaName: testSchema,
2211
+ });
2212
+
2213
+ try {
2214
+ await expect(async () => {
2215
+ await store.init();
2216
+ const thread = createSampleThread();
2217
+ await store.saveThread({ thread });
2218
+ }).rejects.toThrow(
2219
+ `Unable to create schema "testSchema". This requires CREATE privilege on the database. Either create the schema manually or grant CREATE privilege to the user.`,
2220
+ );
2221
+
2222
+ // Verify schema was not created
2223
+ const result = await adminPool.request().query(`SELECT * FROM sys.schemas WHERE name = '${testSchema}'`);
2224
+ expect(result.recordset.length).toBe(0);
2225
+ } finally {
2226
+ await store.close();
2227
+ }
2228
+ });
2229
+ });
2230
+ });
2231
+
2232
+ afterAll(async () => {
2233
+ try {
2234
+ await store.close();
2235
+ } catch (error) {
2236
+ console.warn('Error closing store:', error);
2237
+ }
2238
+ });
2239
+ });