@mastra/clickhouse 0.0.0-working-memory-per-user-20250620163010 → 0.0.0-zod-v4-compat-part-2-20250820135355

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/CHANGELOG.md +168 -4
  2. package/LICENSE.md +12 -4
  3. package/dist/index.cjs +2259 -566
  4. package/dist/index.cjs.map +1 -0
  5. package/dist/index.d.ts +2 -4
  6. package/dist/index.d.ts.map +1 -0
  7. package/dist/index.js +2245 -550
  8. package/dist/index.js.map +1 -0
  9. package/dist/storage/domains/legacy-evals/index.d.ts +21 -0
  10. package/dist/storage/domains/legacy-evals/index.d.ts.map +1 -0
  11. package/dist/storage/domains/memory/index.d.ts +79 -0
  12. package/dist/storage/domains/memory/index.d.ts.map +1 -0
  13. package/dist/storage/domains/operations/index.d.ts +42 -0
  14. package/dist/storage/domains/operations/index.d.ts.map +1 -0
  15. package/dist/storage/domains/scores/index.d.ts +43 -0
  16. package/dist/storage/domains/scores/index.d.ts.map +1 -0
  17. package/dist/storage/domains/traces/index.d.ts +21 -0
  18. package/dist/storage/domains/traces/index.d.ts.map +1 -0
  19. package/dist/storage/domains/utils.d.ts +28 -0
  20. package/dist/storage/domains/utils.d.ts.map +1 -0
  21. package/dist/storage/domains/workflows/index.d.ts +36 -0
  22. package/dist/storage/domains/workflows/index.d.ts.map +1 -0
  23. package/dist/{_tsup-dts-rollup.d.cts → storage/index.d.ts} +106 -87
  24. package/dist/storage/index.d.ts.map +1 -0
  25. package/package.json +9 -9
  26. package/src/storage/domains/legacy-evals/index.ts +246 -0
  27. package/src/storage/domains/memory/index.ts +1393 -0
  28. package/src/storage/domains/operations/index.ts +319 -0
  29. package/src/storage/domains/scores/index.ts +326 -0
  30. package/src/storage/domains/traces/index.ts +275 -0
  31. package/src/storage/domains/utils.ts +86 -0
  32. package/src/storage/domains/workflows/index.ts +285 -0
  33. package/src/storage/index.test.ts +15 -1013
  34. package/src/storage/index.ts +214 -1013
  35. package/tsconfig.build.json +9 -0
  36. package/tsconfig.json +1 -1
  37. package/tsup.config.ts +22 -0
  38. package/dist/_tsup-dts-rollup.d.ts +0 -187
  39. package/dist/index.d.cts +0 -4
@@ -1,16 +1,6 @@
1
- import { randomUUID } from 'crypto';
2
- import {
3
- createSampleMessageV1,
4
- createSampleThread,
5
- createSampleWorkflowSnapshot,
6
- checkWorkflowSnapshot,
7
- } from '@internal/storage-test-utils';
8
- import type { MastraMessageV1, StorageColumn, WorkflowRunState } from '@mastra/core';
9
- import type { TABLE_NAMES } from '@mastra/core/storage';
10
- import { TABLE_THREADS, TABLE_MESSAGES, TABLE_WORKFLOW_SNAPSHOT } from '@mastra/core/storage';
11
- import { describe, it, expect, beforeAll, beforeEach, afterAll, vi, afterEach } from 'vitest';
12
-
13
- import { ClickhouseStore, TABLE_ENGINES } from '.';
1
+ import { createTestSuite } from '@internal/storage-test-utils';
2
+ import { vi } from 'vitest';
3
+ import { ClickhouseStore } from '.';
14
4
  import type { ClickhouseConfig } from '.';
15
5
 
16
6
  vi.setConfig({ testTimeout: 60_000, hookTimeout: 60_000 });
@@ -19,1006 +9,18 @@ const TEST_CONFIG: ClickhouseConfig = {
19
9
  url: process.env.CLICKHOUSE_URL || 'http://localhost:8123',
20
10
  username: process.env.CLICKHOUSE_USERNAME || 'default',
21
11
  password: process.env.CLICKHOUSE_PASSWORD || 'password',
22
- ttl: {
23
- mastra_traces: {
24
- row: { interval: 10, unit: 'SECOND' },
25
- },
26
- mastra_evals: {
27
- columns: {
28
- result: { interval: 10, unit: 'SECOND' },
29
- },
30
- },
31
- },
12
+ // ttl: {
13
+ // mastra_traces: {
14
+ // row: { interval: 600, unit: 'SECOND' },
15
+ // },
16
+ // mastra_evals: {
17
+ // columns: {
18
+ // result: { interval: 10, unit: 'SECOND' },
19
+ // },
20
+ // },
21
+ // },
32
22
  };
33
23
 
34
- const createSampleTrace = () => ({
35
- id: `trace-${randomUUID()}`,
36
- name: 'Test Trace',
37
- createdAt: new Date(),
38
- updatedAt: new Date(),
39
- metadata: { key: 'value' },
40
- });
41
-
42
- const createSampleEval = () => ({
43
- agent_name: 'test-agent',
44
- run_id: 'test-run-1',
45
- result: '{ "score": 1 }',
46
- createdAt: new Date(),
47
- });
48
-
49
- describe('ClickhouseStore', () => {
50
- let store: ClickhouseStore;
51
-
52
- beforeAll(async () => {
53
- store = new ClickhouseStore(TEST_CONFIG);
54
- await store.init();
55
- });
56
-
57
- beforeEach(async () => {
58
- // Clear tables before each test
59
- await store.clearTable({ tableName: TABLE_THREADS });
60
- await store.clearTable({ tableName: TABLE_MESSAGES });
61
- await store.clearTable({ tableName: TABLE_WORKFLOW_SNAPSHOT });
62
- });
63
-
64
- describe('Thread Operations', () => {
65
- it('should create and retrieve a thread', async () => {
66
- const thread = createSampleThread();
67
-
68
- // Save thread
69
- const savedThread = await store.saveThread({ thread });
70
- expect(savedThread).toEqual(thread);
71
-
72
- // Retrieve thread
73
- const retrievedThread = await store.getThreadById({ threadId: thread.id });
74
- expect(retrievedThread?.title).toEqual(thread.title);
75
- }, 10e3);
76
-
77
- it('should return null for non-existent thread', async () => {
78
- const result = await store.getThreadById({ threadId: 'non-existent' });
79
- expect(result).toBeNull();
80
- }, 10e3);
81
-
82
- it('should get threads by resource ID', async () => {
83
- const thread1 = createSampleThread();
84
- const thread2 = { ...createSampleThread(), resourceId: thread1.resourceId };
85
-
86
- await store.saveThread({ thread: thread1 });
87
- await store.saveThread({ thread: thread2 });
88
-
89
- const threads = await store.getThreadsByResourceId({ resourceId: thread1.resourceId });
90
- expect(threads).toHaveLength(2);
91
- expect(threads.map(t => t.id)).toEqual(expect.arrayContaining([thread1.id, thread2.id]));
92
- }, 10e3);
93
-
94
- it('should update thread title and metadata', async () => {
95
- const thread = createSampleThread();
96
- await store.saveThread({ thread });
97
-
98
- const newMetadata = { newKey: 'newValue' };
99
- const updatedThread = await store.updateThread({
100
- id: thread.id,
101
- title: 'Updated Title',
102
- metadata: newMetadata,
103
- });
104
-
105
- expect(updatedThread.title).toBe('Updated Title');
106
- expect(updatedThread.metadata).toEqual({
107
- ...thread.metadata,
108
- ...newMetadata,
109
- });
110
-
111
- // Verify persistence
112
- const retrievedThread = await store.getThreadById({ threadId: thread.id });
113
- expect(retrievedThread).toEqual(updatedThread);
114
- }, 10e3);
115
-
116
- it('should delete thread and its messages', async () => {
117
- const thread = createSampleThread();
118
- await store.saveThread({ thread });
119
-
120
- // Add some messages
121
- const messages = [
122
- createSampleMessageV1({ threadId: thread.id, resourceId: 'clickhouse-test' }),
123
- createSampleMessageV1({ threadId: thread.id, resourceId: 'clickhouse-test' }),
124
- ];
125
- await store.saveMessages({ messages });
126
-
127
- await store.deleteThread({ threadId: thread.id });
128
-
129
- const retrievedThread = await store.getThreadById({ threadId: thread.id });
130
- expect(retrievedThread).toBeNull();
131
-
132
- // Verify messages were also deleted
133
- const retrievedMessages = await store.getMessages({ threadId: thread.id });
134
- expect(retrievedMessages).toHaveLength(0);
135
- }, 10e3);
136
-
137
- it('should update thread updatedAt when a message is saved to it', async () => {
138
- const thread = createSampleThread();
139
- await store.saveThread({ thread });
140
-
141
- // Get the initial thread to capture the original updatedAt
142
- const initialThread = await store.getThreadById({ threadId: thread.id });
143
- expect(initialThread).toBeDefined();
144
- const originalUpdatedAt = initialThread!.updatedAt;
145
-
146
- // Wait a small amount to ensure different timestamp
147
- await new Promise(resolve => setTimeout(resolve, 10));
148
-
149
- // Create and save a message to the thread
150
- const message = createSampleMessageV1({ threadId: thread.id });
151
- await store.saveMessages({ messages: [message] });
152
-
153
- // Retrieve the thread again and check that updatedAt was updated
154
- const updatedThread = await store.getThreadById({ threadId: thread.id });
155
- expect(updatedThread).toBeDefined();
156
- expect(updatedThread!.updatedAt.getTime()).toBeGreaterThan(originalUpdatedAt.getTime());
157
- }, 10e3);
158
- });
159
-
160
- describe('Message Operations', () => {
161
- it('should save and retrieve messages', async () => {
162
- const thread = createSampleThread();
163
- await store.saveThread({ thread });
164
-
165
- const messages = [
166
- createSampleMessageV1({
167
- threadId: thread.id,
168
- createdAt: new Date(Date.now() - 1000 * 60 * 60 * 24),
169
- resourceId: 'clickhouse-test',
170
- }),
171
- createSampleMessageV1({ threadId: thread.id, resourceId: 'clickhouse-test' }),
172
- ];
173
-
174
- // Save messages
175
- const savedMessages = await store.saveMessages({ messages });
176
- expect(savedMessages).toEqual(messages);
177
-
178
- // Retrieve messages
179
- const retrievedMessages = await store.getMessages({ threadId: thread.id });
180
- expect(retrievedMessages).toHaveLength(2);
181
- const checkMessages = messages.map(m => {
182
- const { resourceId, ...rest } = m;
183
- return rest;
184
- });
185
- expect(retrievedMessages).toEqual(expect.arrayContaining(checkMessages));
186
- }, 10e3);
187
-
188
- it('should handle empty message array', async () => {
189
- const result = await store.saveMessages({ messages: [] });
190
- expect(result).toEqual([]);
191
- }, 10e3);
192
-
193
- it('should maintain message order', async () => {
194
- const thread = createSampleThread();
195
- await store.saveThread({ thread });
196
-
197
- const messages = [
198
- {
199
- ...createSampleMessageV1({
200
- threadId: thread.id,
201
- createdAt: new Date(Date.now() - 1000 * 3),
202
- content: 'First',
203
- resourceId: 'clickhouse-test',
204
- }),
205
- role: 'user',
206
- },
207
- {
208
- ...createSampleMessageV1({
209
- threadId: thread.id,
210
- createdAt: new Date(Date.now() - 1000 * 2),
211
- content: 'Second',
212
- resourceId: 'clickhouse-test',
213
- }),
214
- role: 'assistant',
215
- },
216
- {
217
- ...createSampleMessageV1({
218
- threadId: thread.id,
219
- createdAt: new Date(Date.now() - 1000 * 1),
220
- content: 'Third',
221
- resourceId: 'clickhouse-test',
222
- }),
223
- role: 'user',
224
- },
225
- ] as MastraMessageV1[];
226
-
227
- await store.saveMessages({ messages });
228
-
229
- const retrievedMessages = await store.getMessages({ threadId: thread.id });
230
- expect(retrievedMessages).toHaveLength(3);
231
-
232
- // Verify order is maintained
233
- retrievedMessages.forEach((msg, idx) => {
234
- // @ts-expect-error
235
- expect(msg.content[0].text).toBe(messages[idx].content[0].text);
236
- });
237
- }, 10e3);
238
-
239
- // it('should retrieve messages w/ next/prev messages by message id + resource id', async () => {
240
- // const messages: MastraMessageV2[] = [
241
- // createSampleMessageV2({ threadId: 'thread-one', content: 'First', resourceId: 'cross-thread-resource' }),
242
- // createSampleMessageV2({ threadId: 'thread-one', content: 'Second', resourceId: 'cross-thread-resource' }),
243
- // createSampleMessageV2({ threadId: 'thread-one', content: 'Third', resourceId: 'cross-thread-resource' }),
244
-
245
- // createSampleMessageV2({ threadId: 'thread-two', content: 'Fourth', resourceId: 'cross-thread-resource' }),
246
- // createSampleMessageV2({ threadId: 'thread-two', content: 'Fifth', resourceId: 'cross-thread-resource' }),
247
- // createSampleMessageV2({ threadId: 'thread-two', content: 'Sixth', resourceId: 'cross-thread-resource' }),
248
-
249
- // createSampleMessageV2({ threadId: 'thread-three', content: 'Seventh', resourceId: 'other-resource' }),
250
- // createSampleMessageV2({ threadId: 'thread-three', content: 'Eighth', resourceId: 'other-resource' }),
251
- // ];
252
-
253
- // await store.saveMessages({ messages: messages, format: 'v2' });
254
-
255
- // const retrievedMessages = await store.getMessages({ threadId: 'thread-one', format: 'v2' });
256
- // expect(retrievedMessages).toHaveLength(3);
257
- // expect(retrievedMessages.map((m: any) => m.content.parts[0].text)).toEqual(['First', 'Second', 'Third']);
258
-
259
- // const retrievedMessages2 = await store.getMessages({ threadId: 'thread-two', format: 'v2' });
260
- // expect(retrievedMessages2).toHaveLength(3);
261
- // expect(retrievedMessages2.map((m: any) => m.content.parts[0].text)).toEqual(['Fourth', 'Fifth', 'Sixth']);
262
-
263
- // const retrievedMessages3 = await store.getMessages({ threadId: 'thread-three', format: 'v2' });
264
- // expect(retrievedMessages3).toHaveLength(2);
265
- // expect(retrievedMessages3.map((m: any) => m.content.parts[0].text)).toEqual(['Seventh', 'Eighth']);
266
-
267
- // const crossThreadMessages = await store.getMessages({
268
- // threadId: 'thread-doesnt-exist',
269
- // resourceId: 'cross-thread-resource',
270
- // format: 'v2',
271
- // selectBy: {
272
- // last: 0,
273
- // include: [
274
- // {
275
- // id: messages[1].id,
276
- // withNextMessages: 2,
277
- // withPreviousMessages: 2,
278
- // },
279
- // {
280
- // id: messages[4].id,
281
- // withPreviousMessages: 2,
282
- // withNextMessages: 2,
283
- // },
284
- // ],
285
- // },
286
- // });
287
-
288
- // expect(crossThreadMessages).toHaveLength(6);
289
- // expect(crossThreadMessages.filter(m => m.threadId === `thread-one`)).toHaveLength(3);
290
- // expect(crossThreadMessages.filter(m => m.threadId === `thread-two`)).toHaveLength(3);
291
-
292
- // const crossThreadMessages2 = await store.getMessages({
293
- // threadId: 'thread-one',
294
- // resourceId: 'cross-thread-resource',
295
- // format: 'v2',
296
- // selectBy: {
297
- // last: 0,
298
- // include: [
299
- // {
300
- // id: messages[4].id,
301
- // withPreviousMessages: 1,
302
- // withNextMessages: 30,
303
- // },
304
- // ],
305
- // },
306
- // });
307
-
308
- // expect(crossThreadMessages2).toHaveLength(3);
309
- // expect(crossThreadMessages2.filter(m => m.threadId === `thread-one`)).toHaveLength(0);
310
- // expect(crossThreadMessages2.filter(m => m.threadId === `thread-two`)).toHaveLength(3);
311
-
312
- // const crossThreadMessages3 = await store.getMessages({
313
- // threadId: 'thread-two',
314
- // resourceId: 'cross-thread-resource',
315
- // format: 'v2',
316
- // selectBy: {
317
- // last: 0,
318
- // include: [
319
- // {
320
- // id: messages[1].id,
321
- // withNextMessages: 1,
322
- // withPreviousMessages: 1,
323
- // },
324
- // ],
325
- // },
326
- // });
327
-
328
- // expect(crossThreadMessages3).toHaveLength(3);
329
- // expect(crossThreadMessages3.filter(m => m.threadId === `thread-one`)).toHaveLength(3);
330
- // expect(crossThreadMessages3.filter(m => m.threadId === `thread-two`)).toHaveLength(0);
331
- // });
332
-
333
- // it('should rollback on error during message save', async () => {
334
- // const thread = createSampleThread();
335
- // await store.saveThread({ thread });
336
-
337
- // const messages = [
338
- // createSampleMessageV1({ threadId: thread.id }),
339
- // { ...createSampleMessageV1({ threadId: thread.id }), id: null }, // This will cause an error
340
- // ];
341
-
342
- // await expect(store.saveMessages({ messages })).rejects.toThrow();
343
-
344
- // // Verify no messages were saved
345
- // const savedMessages = await store.getMessages({ threadId: thread.id });
346
- // expect(savedMessages).toHaveLength(0);
347
- // });
348
- });
349
-
350
- describe('Traces and TTL', () => {
351
- it('should create and retrieve a trace, but not when row level ttl expires', async () => {
352
- const trace = createSampleTrace();
353
- await store.batchInsert({
354
- tableName: 'mastra_traces',
355
- records: [trace],
356
- });
357
- let traces = await store.getTraces({
358
- page: 0,
359
- perPage: 10,
360
- });
361
-
362
- expect(traces).toHaveLength(1);
363
- expect(traces[0]!.id).toBe(trace.id);
364
-
365
- await new Promise(resolve => setTimeout(resolve, 10e3));
366
- await store.optimizeTable({ tableName: 'mastra_traces' });
367
-
368
- traces = await store.getTraces({
369
- page: 0,
370
- perPage: 10,
371
- });
372
-
373
- expect(traces).toHaveLength(0);
374
- }, 60e3);
375
-
376
- // NOTE: unable to clear column level TTLs for the test case nicely, but it does seem to get applied correctly
377
- it.skip('should create and retrieve a trace, but not expired columns when column level ttl expires', async () => {
378
- await store.clearTable({ tableName: 'mastra_evals' });
379
- const ev = createSampleEval();
380
- await store.batchInsert({
381
- tableName: 'mastra_evals',
382
- records: [ev],
383
- });
384
- let evals = await store.getEvalsByAgentName('test-agent');
385
- console.log(evals);
386
-
387
- expect(evals).toHaveLength(1);
388
- expect(evals[0]!.agentName).toBe('test-agent');
389
- expect(evals[0]!.runId).toBe('test-run-1');
390
-
391
- await new Promise(resolve => setTimeout(resolve, 12e3));
392
- await store.materializeTtl({ tableName: 'mastra_evals' });
393
- await store.optimizeTable({ tableName: 'mastra_evals' });
394
-
395
- evals = await store.getEvalsByAgentName('test-agent');
396
-
397
- expect(evals).toHaveLength(1);
398
- expect(evals[0]!.agentName).toBe('test-agent');
399
- expect(evals[0]!.runId).toBeNull();
400
- }, 60e3);
401
- });
402
-
403
- describe('Edge Cases and Error Handling', () => {
404
- it('should handle large metadata objects', async () => {
405
- const thread = createSampleThread();
406
- const largeMetadata = {
407
- ...thread.metadata,
408
- largeArray: Array.from({ length: 1000 }, (_, i) => ({ index: i, data: 'test'.repeat(100) })),
409
- };
410
-
411
- const threadWithLargeMetadata = {
412
- ...thread,
413
- metadata: largeMetadata,
414
- };
415
-
416
- await store.saveThread({ thread: threadWithLargeMetadata });
417
- const retrieved = await store.getThreadById({ threadId: thread.id });
418
-
419
- expect(retrieved?.metadata).toEqual(largeMetadata);
420
- }, 10e3);
421
-
422
- it('should handle special characters in thread titles', async () => {
423
- const thread = {
424
- ...createSampleThread(),
425
- title: 'Special \'quotes\' and "double quotes" and emoji 🎉',
426
- };
427
-
428
- await store.saveThread({ thread });
429
- const retrieved = await store.getThreadById({ threadId: thread.id });
430
-
431
- expect(retrieved?.title).toBe(thread.title);
432
- }, 10e3);
433
-
434
- it('should handle concurrent thread updates', async () => {
435
- const thread = createSampleThread();
436
- await store.saveThread({ thread });
437
-
438
- // Perform multiple updates concurrently
439
- const updates = Array.from({ length: 5 }, (_, i) =>
440
- store.updateThread({
441
- id: thread.id,
442
- title: `Update ${i}`,
443
- metadata: { update: i },
444
- }),
445
- );
446
-
447
- await expect(Promise.all(updates)).resolves.toBeDefined();
448
-
449
- // Verify final state
450
- const finalThread = await store.getThreadById({ threadId: thread.id });
451
- expect(finalThread).toBeDefined();
452
- }, 10e3);
453
- });
454
-
455
- describe('Workflow Snapshots', () => {
456
- it('should persist and load workflow snapshots', async () => {
457
- const workflowName = 'test-workflow';
458
- const runId = `run-${randomUUID()}`;
459
- const snapshot = {
460
- status: 'running',
461
- context: {
462
- input: { type: 'manual' },
463
- },
464
- value: {},
465
- activePaths: [],
466
- suspendedPaths: {},
467
- runId,
468
- timestamp: new Date().getTime(),
469
- } as unknown as WorkflowRunState;
470
-
471
- await store.persistWorkflowSnapshot({
472
- workflowName,
473
- runId,
474
- snapshot,
475
- });
476
-
477
- const loadedSnapshot = await store.loadWorkflowSnapshot({
478
- workflowName,
479
- runId,
480
- });
481
-
482
- expect(loadedSnapshot).toEqual(snapshot);
483
- }, 10e3);
484
-
485
- it('should return null for non-existent workflow snapshot', async () => {
486
- const result = await store.loadWorkflowSnapshot({
487
- workflowName: 'non-existent',
488
- runId: 'non-existent',
489
- });
490
-
491
- expect(result).toBeNull();
492
- }, 10e3);
493
-
494
- it('should update existing workflow snapshot', async () => {
495
- const workflowName = 'test-workflow';
496
- const runId = `run-${randomUUID()}`;
497
- const initialSnapshot = {
498
- status: 'running',
499
- context: {
500
- input: { type: 'manual' },
501
- },
502
- value: {},
503
- activePaths: [],
504
- suspendedPaths: {},
505
- runId,
506
- timestamp: new Date().getTime(),
507
- } as unknown as WorkflowRunState;
508
-
509
- await store.persistWorkflowSnapshot({
510
- workflowName,
511
- runId,
512
- snapshot: initialSnapshot,
513
- });
514
-
515
- const updatedSnapshot = {
516
- status: 'completed',
517
- context: {
518
- input: { type: 'manual' },
519
- 'step-1': { status: 'success', result: { data: 'test' } },
520
- },
521
- value: {},
522
- activePaths: [],
523
- suspendedPaths: {},
524
- runId,
525
- timestamp: new Date().getTime(),
526
- } as unknown as WorkflowRunState;
527
-
528
- await store.persistWorkflowSnapshot({
529
- workflowName,
530
- runId,
531
- snapshot: updatedSnapshot,
532
- });
533
-
534
- const loadedSnapshot = await store.loadWorkflowSnapshot({
535
- workflowName,
536
- runId,
537
- });
538
-
539
- expect(loadedSnapshot).toEqual(updatedSnapshot);
540
- }, 10e3);
541
-
542
- it('should handle complex workflow state', async () => {
543
- const workflowName = 'complex-workflow';
544
- const runId = `run-${randomUUID()}`;
545
- const complexSnapshot = {
546
- value: { currentState: 'running' },
547
- context: {
548
- 'step-1': {
549
- status: 'success',
550
- output: {
551
- nestedData: {
552
- array: [1, 2, 3],
553
- object: { key: 'value' },
554
- date: new Date().toISOString(),
555
- },
556
- },
557
- },
558
- 'step-2': {
559
- status: 'waiting',
560
- dependencies: ['step-3', 'step-4'],
561
- },
562
- input: {
563
- type: 'scheduled',
564
- metadata: {
565
- schedule: '0 0 * * *',
566
- timezone: 'UTC',
567
- },
568
- },
569
- },
570
- activePaths: [
571
- {
572
- stepPath: ['step-1'],
573
- stepId: 'step-1',
574
- status: 'success',
575
- },
576
- {
577
- stepPath: ['step-2'],
578
- stepId: 'step-2',
579
- status: 'waiting',
580
- },
581
- ],
582
- suspendedPaths: {},
583
- runId: runId,
584
- timestamp: Date.now(),
585
- } as unknown as WorkflowRunState;
586
-
587
- await store.persistWorkflowSnapshot({
588
- workflowName,
589
- runId,
590
- snapshot: complexSnapshot,
591
- });
592
-
593
- const loadedSnapshot = await store.loadWorkflowSnapshot({
594
- workflowName,
595
- runId,
596
- });
597
-
598
- expect(loadedSnapshot).toEqual(complexSnapshot);
599
- }, 10e3);
600
- });
601
-
602
- describe('getWorkflowRuns', () => {
603
- beforeEach(async () => {
604
- await store.clearTable({ tableName: TABLE_WORKFLOW_SNAPSHOT });
605
- });
606
- it('returns empty array when no workflows exist', async () => {
607
- const { runs, total } = await store.getWorkflowRuns();
608
- expect(runs).toEqual([]);
609
- expect(total).toBe(0);
610
- });
611
-
612
- it('returns all workflows by default', async () => {
613
- const workflowName1 = 'default_test_1';
614
- const workflowName2 = 'default_test_2';
615
-
616
- const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
617
- const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('suspended');
618
-
619
- await store.persistWorkflowSnapshot({
620
- workflowName: workflowName1,
621
- runId: runId1,
622
- snapshot: workflow1,
623
- });
624
- await new Promise(resolve => setTimeout(resolve, 10)); // Small delay to ensure different timestamps
625
- await store.persistWorkflowSnapshot({
626
- workflowName: workflowName2,
627
- runId: runId2,
628
- snapshot: workflow2,
629
- });
630
-
631
- const { runs, total } = await store.getWorkflowRuns();
632
- expect(runs).toHaveLength(2);
633
- expect(total).toBe(2);
634
- expect(runs[0]!.workflowName).toBe(workflowName2); // Most recent first
635
- expect(runs[1]!.workflowName).toBe(workflowName1);
636
- const firstSnapshot = runs[0]!.snapshot;
637
- const secondSnapshot = runs[1]!.snapshot;
638
- checkWorkflowSnapshot(firstSnapshot, stepId2, 'suspended');
639
- checkWorkflowSnapshot(secondSnapshot, stepId1, 'success');
640
- });
641
-
642
- it('filters by workflow name', async () => {
643
- const workflowName1 = 'filter_test_1';
644
- const workflowName2 = 'filter_test_2';
645
-
646
- const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
647
- const { snapshot: workflow2, runId: runId2 } = createSampleWorkflowSnapshot('failed');
648
-
649
- await store.persistWorkflowSnapshot({
650
- workflowName: workflowName1,
651
- runId: runId1,
652
- snapshot: workflow1,
653
- });
654
- await new Promise(resolve => setTimeout(resolve, 10)); // Small delay to ensure different timestamps
655
- await store.persistWorkflowSnapshot({
656
- workflowName: workflowName2,
657
- runId: runId2,
658
- snapshot: workflow2,
659
- });
660
-
661
- const { runs, total } = await store.getWorkflowRuns({
662
- workflowName: workflowName1,
663
- });
664
- expect(runs).toHaveLength(1);
665
- expect(total).toBe(1);
666
- expect(runs[0]!.workflowName).toBe(workflowName1);
667
- const snapshot = runs[0]!.snapshot;
668
- checkWorkflowSnapshot(snapshot, stepId1, 'success');
669
- });
670
-
671
- it('filters by date range', async () => {
672
- const now = new Date();
673
- const yesterday = new Date(now.getTime() - 24 * 60 * 60 * 1000);
674
- const twoDaysAgo = new Date(now.getTime() - 2 * 24 * 60 * 60 * 1000);
675
- const workflowName1 = 'date_test_1';
676
- const workflowName2 = 'date_test_2';
677
- const workflowName3 = 'date_test_3';
678
-
679
- const { snapshot: workflow1, runId: runId1 } = createSampleWorkflowSnapshot('success');
680
- const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('suspended');
681
- const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('failed');
682
-
683
- await store.insert({
684
- tableName: TABLE_WORKFLOW_SNAPSHOT,
685
- record: {
686
- workflow_name: workflowName1,
687
- run_id: runId1,
688
- snapshot: workflow1,
689
- createdAt: twoDaysAgo,
690
- updatedAt: twoDaysAgo,
691
- },
692
- });
693
- await store.insert({
694
- tableName: TABLE_WORKFLOW_SNAPSHOT,
695
- record: {
696
- workflow_name: workflowName2,
697
- run_id: runId2,
698
- snapshot: workflow2,
699
- createdAt: yesterday,
700
- updatedAt: yesterday,
701
- },
702
- });
703
- await store.insert({
704
- tableName: TABLE_WORKFLOW_SNAPSHOT,
705
- record: {
706
- workflow_name: workflowName3,
707
- run_id: runId3,
708
- snapshot: workflow3,
709
- createdAt: now,
710
- updatedAt: now,
711
- },
712
- });
713
-
714
- const { runs } = await store.getWorkflowRuns({
715
- fromDate: yesterday,
716
- toDate: now,
717
- });
718
-
719
- expect(runs).toHaveLength(2);
720
- expect(runs[0]!.workflowName).toBe(workflowName3);
721
- expect(runs[1]!.workflowName).toBe(workflowName2);
722
- const firstSnapshot = runs[0]!.snapshot;
723
- const secondSnapshot = runs[1]!.snapshot;
724
- checkWorkflowSnapshot(firstSnapshot, stepId3, 'failed');
725
- checkWorkflowSnapshot(secondSnapshot, stepId2, 'suspended');
726
- });
727
-
728
- it('handles pagination', async () => {
729
- const workflowName1 = 'page_test_1';
730
- const workflowName2 = 'page_test_2';
731
- const workflowName3 = 'page_test_3';
732
-
733
- const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
734
- const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('suspended');
735
- const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('failed');
736
-
737
- await store.persistWorkflowSnapshot({
738
- workflowName: workflowName1,
739
- runId: runId1,
740
- snapshot: workflow1,
741
- });
742
- await new Promise(resolve => setTimeout(resolve, 10)); // Small delay to ensure different timestamps
743
- await store.persistWorkflowSnapshot({
744
- workflowName: workflowName2,
745
- runId: runId2,
746
- snapshot: workflow2,
747
- });
748
- await new Promise(resolve => setTimeout(resolve, 10)); // Small delay to ensure different timestamps
749
- await store.persistWorkflowSnapshot({
750
- workflowName: workflowName3,
751
- runId: runId3,
752
- snapshot: workflow3,
753
- });
754
-
755
- // Get first page
756
- const page1 = await store.getWorkflowRuns({
757
- limit: 2,
758
- offset: 0,
759
- });
760
- expect(page1.runs).toHaveLength(2);
761
- expect(page1.total).toBe(3); // Total count of all records
762
- expect(page1.runs[0]!.workflowName).toBe(workflowName3);
763
- expect(page1.runs[1]!.workflowName).toBe(workflowName2);
764
- const firstSnapshot = page1.runs[0]!.snapshot;
765
- const secondSnapshot = page1.runs[1]!.snapshot;
766
- checkWorkflowSnapshot(firstSnapshot, stepId3, 'failed');
767
- checkWorkflowSnapshot(secondSnapshot, stepId2, 'suspended');
768
-
769
- // Get second page
770
- const page2 = await store.getWorkflowRuns({
771
- limit: 2,
772
- offset: 2,
773
- });
774
- expect(page2.runs).toHaveLength(1);
775
- expect(page2.total).toBe(3);
776
- expect(page2.runs[0]!.workflowName).toBe(workflowName1);
777
- const snapshot = page2.runs[0]!.snapshot!;
778
- checkWorkflowSnapshot(snapshot, stepId1, 'success');
779
- }, 10e3);
780
- });
781
- describe('getWorkflowRunById', () => {
782
- const workflowName = 'workflow-id-test';
783
- let runId: string;
784
- let stepId: string;
785
-
786
- beforeEach(async () => {
787
- // Insert a workflow run for positive test
788
- const sample = createSampleWorkflowSnapshot('success');
789
- runId = sample.runId;
790
- stepId = sample.stepId;
791
- await store.insert({
792
- tableName: TABLE_WORKFLOW_SNAPSHOT,
793
- record: {
794
- workflow_name: workflowName,
795
- run_id: runId,
796
- resourceId: 'resource-abc',
797
- snapshot: sample.snapshot,
798
- createdAt: new Date(),
799
- updatedAt: new Date(),
800
- },
801
- });
802
- });
803
-
804
- it('should retrieve a workflow run by ID', async () => {
805
- const found = await store.getWorkflowRunById({
806
- runId,
807
- workflowName,
808
- });
809
- expect(found).not.toBeNull();
810
- expect(found?.runId).toBe(runId);
811
- checkWorkflowSnapshot(found?.snapshot!, stepId, 'success');
812
- });
813
-
814
- it('should return null for non-existent workflow run ID', async () => {
815
- const notFound = await store.getWorkflowRunById({
816
- runId: 'non-existent-id',
817
- workflowName,
818
- });
819
- expect(notFound).toBeNull();
820
- });
821
- });
822
- describe('getWorkflowRuns with resourceId', () => {
823
- const workflowName = 'workflow-id-test';
824
- let resourceId: string;
825
- let runIds: string[] = [];
826
-
827
- beforeEach(async () => {
828
- // Insert multiple workflow runs for the same resourceId
829
- resourceId = 'resource-shared';
830
- for (const status of ['completed', 'running']) {
831
- const sample = createSampleWorkflowSnapshot(status as WorkflowRunState['context']['steps']['status']);
832
- runIds.push(sample.runId);
833
- await store.insert({
834
- tableName: TABLE_WORKFLOW_SNAPSHOT,
835
- record: {
836
- workflow_name: workflowName,
837
- run_id: sample.runId,
838
- resourceId,
839
- snapshot: sample.snapshot,
840
- createdAt: new Date(),
841
- updatedAt: new Date(),
842
- },
843
- });
844
- }
845
- // Insert a run with a different resourceId
846
- const other = createSampleWorkflowSnapshot('suspended');
847
- await store.insert({
848
- tableName: TABLE_WORKFLOW_SNAPSHOT,
849
- record: {
850
- workflow_name: workflowName,
851
- run_id: other.runId,
852
- resourceId: 'resource-other',
853
- snapshot: other.snapshot,
854
- createdAt: new Date(),
855
- updatedAt: new Date(),
856
- },
857
- });
858
- });
859
-
860
- it('should retrieve all workflow runs by resourceId', async () => {
861
- const { runs } = await store.getWorkflowRuns({
862
- resourceId,
863
- workflowName,
864
- });
865
- expect(Array.isArray(runs)).toBe(true);
866
- expect(runs.length).toBeGreaterThanOrEqual(2);
867
- for (const run of runs) {
868
- expect(run.resourceId).toBe(resourceId);
869
- }
870
- });
871
-
872
- it('should return an empty array if no workflow runs match resourceId', async () => {
873
- const { runs } = await store.getWorkflowRuns({
874
- resourceId: 'non-existent-resource',
875
- workflowName,
876
- });
877
- expect(Array.isArray(runs)).toBe(true);
878
- expect(runs.length).toBe(0);
879
- });
880
- });
881
-
882
- describe('hasColumn', () => {
883
- const tempTable = 'temp_test_table';
884
-
885
- beforeEach(async () => {
886
- // Always try to drop the table before each test, ignore errors if it doesn't exist
887
- try {
888
- await store['db'].query({ query: `DROP TABLE IF EXISTS ${tempTable}` });
889
- } catch {
890
- /* ignore */
891
- }
892
- });
893
-
894
- it('returns true if the column exists', async () => {
895
- await store['db'].query({
896
- query: `CREATE TABLE temp_test_table (
897
- id UInt64,
898
- resourceId String
899
- ) ENGINE = MergeTree()
900
- ORDER BY id
901
- `,
902
- });
903
- expect(await store['hasColumn'](tempTable, 'resourceId')).toBe(true);
904
- });
905
-
906
- it('returns false if the column does not exist', async () => {
907
- await store['db'].query({
908
- query: `CREATE TABLE temp_test_table (
909
- id UInt64,
910
- ) ENGINE = MergeTree()
911
- ORDER BY id
912
- `,
913
- });
914
- expect(await store['hasColumn'](tempTable, 'resourceId')).toBe(false);
915
- });
916
-
917
- afterEach(async () => {
918
- // Clean up after each test
919
- try {
920
- await store['db'].query({ query: `DROP TABLE IF EXISTS ${tempTable}` });
921
- } catch {
922
- /* ignore */
923
- }
924
- });
925
- });
926
-
927
- describe('alterTable', () => {
928
- const TEST_TABLE = 'test_alter_table';
929
- const BASE_SCHEMA = {
930
- id: { type: 'integer', primaryKey: true, nullable: false },
931
- name: { type: 'text', nullable: true },
932
- createdAt: { type: 'timestamp', nullable: false },
933
- updatedAt: { type: 'timestamp', nullable: false },
934
- } as Record<string, StorageColumn>;
935
-
936
- TABLE_ENGINES[TEST_TABLE] = 'MergeTree()';
937
-
938
- beforeEach(async () => {
939
- await store.createTable({ tableName: TEST_TABLE as TABLE_NAMES, schema: BASE_SCHEMA });
940
- });
941
-
942
- afterEach(async () => {
943
- await store.clearTable({ tableName: TEST_TABLE as TABLE_NAMES });
944
- });
945
-
946
- it('adds a new column to an existing table', async () => {
947
- await store.alterTable({
948
- tableName: TEST_TABLE as TABLE_NAMES,
949
- schema: { ...BASE_SCHEMA, age: { type: 'integer', nullable: true } },
950
- ifNotExists: ['age'],
951
- });
952
-
953
- await store.insert({
954
- tableName: TEST_TABLE as TABLE_NAMES,
955
- record: { id: 1, name: 'Alice', age: 42, createdAt: new Date(), updatedAt: new Date() },
956
- });
957
-
958
- const row = await store.load<{ id: string; name: string; age?: number }>({
959
- tableName: TEST_TABLE as TABLE_NAMES,
960
- keys: { id: '1' },
961
- });
962
- expect(row?.age).toBe(42);
963
- });
964
-
965
- it('is idempotent when adding an existing column', async () => {
966
- await store.alterTable({
967
- tableName: TEST_TABLE as TABLE_NAMES,
968
- schema: { ...BASE_SCHEMA, foo: { type: 'text', nullable: true } },
969
- ifNotExists: ['foo'],
970
- });
971
- // Add the column again (should not throw)
972
- await expect(
973
- store.alterTable({
974
- tableName: TEST_TABLE as TABLE_NAMES,
975
- schema: { ...BASE_SCHEMA, foo: { type: 'text', nullable: true } },
976
- ifNotExists: ['foo'],
977
- }),
978
- ).resolves.not.toThrow();
979
- });
980
-
981
- it('should add a default value to a column when using not null', async () => {
982
- await store.insert({
983
- tableName: TEST_TABLE as TABLE_NAMES,
984
- record: { id: 1, name: 'Bob', createdAt: new Date(), updatedAt: new Date() },
985
- });
986
-
987
- await expect(
988
- store.alterTable({
989
- tableName: TEST_TABLE as TABLE_NAMES,
990
- schema: { ...BASE_SCHEMA, text_column: { type: 'text', nullable: false } },
991
- ifNotExists: ['text_column'],
992
- }),
993
- ).resolves.not.toThrow();
994
-
995
- await expect(
996
- store.alterTable({
997
- tableName: TEST_TABLE as TABLE_NAMES,
998
- schema: { ...BASE_SCHEMA, timestamp_column: { type: 'timestamp', nullable: false } },
999
- ifNotExists: ['timestamp_column'],
1000
- }),
1001
- ).resolves.not.toThrow();
1002
-
1003
- await expect(
1004
- store.alterTable({
1005
- tableName: TEST_TABLE as TABLE_NAMES,
1006
- schema: { ...BASE_SCHEMA, bigint_column: { type: 'bigint', nullable: false } },
1007
- ifNotExists: ['bigint_column'],
1008
- }),
1009
- ).resolves.not.toThrow();
1010
-
1011
- await expect(
1012
- store.alterTable({
1013
- tableName: TEST_TABLE as TABLE_NAMES,
1014
- schema: { ...BASE_SCHEMA, jsonb_column: { type: 'jsonb', nullable: false } },
1015
- ifNotExists: ['jsonb_column'],
1016
- }),
1017
- ).resolves.not.toThrow();
1018
- });
1019
- });
24
+ const storage = new ClickhouseStore(TEST_CONFIG);
1020
25
 
1021
- afterAll(async () => {
1022
- await store.close();
1023
- });
1024
- });
26
+ createTestSuite(storage);