@mastra/clickhouse 0.0.0-vnextWorkflows-20250422142014 → 0.0.0-workflow-deno-20250616130925

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,11 +1,20 @@
1
1
  import { randomUUID } from 'crypto';
2
+ import {
3
+ createSampleMessageV1,
4
+ createSampleThread,
5
+ createSampleWorkflowSnapshot,
6
+ checkWorkflowSnapshot,
7
+ } from '@internal/storage-test-utils';
8
+ import type { MastraMessageV1, StorageColumn, WorkflowRunState } from '@mastra/core';
9
+ import type { TABLE_NAMES } from '@mastra/core/storage';
2
10
  import { TABLE_THREADS, TABLE_MESSAGES, TABLE_WORKFLOW_SNAPSHOT } from '@mastra/core/storage';
3
- import type { WorkflowRunState } from '@mastra/core/workflows';
4
- import { describe, it, expect, beforeAll, beforeEach, afterAll } from 'vitest';
11
+ import { describe, it, expect, beforeAll, beforeEach, afterAll, vi, afterEach } from 'vitest';
5
12
 
6
- import { ClickhouseStore } from '.';
13
+ import { ClickhouseStore, TABLE_ENGINES } from '.';
7
14
  import type { ClickhouseConfig } from '.';
8
15
 
16
+ vi.setConfig({ testTimeout: 60_000, hookTimeout: 60_000 });
17
+
9
18
  const TEST_CONFIG: ClickhouseConfig = {
10
19
  url: process.env.CLICKHOUSE_URL || 'http://localhost:8123',
11
20
  username: process.env.CLICKHOUSE_USERNAME || 'default',
@@ -22,26 +31,6 @@ const TEST_CONFIG: ClickhouseConfig = {
22
31
  },
23
32
  };
24
33
 
25
- // Sample test data factory functions
26
- const createSampleThread = () => ({
27
- id: `thread-${randomUUID()}`,
28
- resourceId: `resource-${randomUUID()}`,
29
- title: 'Test Thread',
30
- createdAt: new Date(),
31
- updatedAt: new Date(),
32
- metadata: { key: 'value' },
33
- });
34
-
35
- const createSampleMessage = (threadId: string, createdAt: Date = new Date()) =>
36
- ({
37
- id: `msg-${randomUUID()}`,
38
- role: 'user',
39
- type: 'text',
40
- threadId,
41
- content: [{ type: 'text', text: 'Hello' }],
42
- createdAt,
43
- }) as any;
44
-
45
34
  const createSampleTrace = () => ({
46
35
  id: `trace-${randomUUID()}`,
47
36
  name: 'Test Trace',
@@ -57,31 +46,6 @@ const createSampleEval = () => ({
57
46
  createdAt: new Date(),
58
47
  });
59
48
 
60
- const createSampleWorkflowSnapshot = (status: string, createdAt?: Date) => {
61
- const runId = `run-${randomUUID()}`;
62
- const stepId = `step-${randomUUID()}`;
63
- const timestamp = createdAt || new Date();
64
- const snapshot = {
65
- result: { success: true },
66
- value: {},
67
- context: {
68
- steps: {
69
- [stepId]: {
70
- status,
71
- payload: {},
72
- error: undefined,
73
- },
74
- },
75
- triggerData: {},
76
- attempts: {},
77
- },
78
- activePaths: [],
79
- runId,
80
- timestamp: timestamp.getTime(),
81
- } as WorkflowRunState;
82
- return { snapshot, runId, stepId };
83
- };
84
-
85
49
  describe('ClickhouseStore', () => {
86
50
  let store: ClickhouseStore;
87
51
 
@@ -154,7 +118,10 @@ describe('ClickhouseStore', () => {
154
118
  await store.saveThread({ thread });
155
119
 
156
120
  // Add some messages
157
- const messages = [createSampleMessage(thread.id), createSampleMessage(thread.id)];
121
+ const messages = [
122
+ createSampleMessageV1({ threadId: thread.id, resourceId: 'clickhouse-test' }),
123
+ createSampleMessageV1({ threadId: thread.id, resourceId: 'clickhouse-test' }),
124
+ ];
158
125
  await store.saveMessages({ messages });
159
126
 
160
127
  await store.deleteThread({ threadId: thread.id });
@@ -166,6 +133,28 @@ describe('ClickhouseStore', () => {
166
133
  const retrievedMessages = await store.getMessages({ threadId: thread.id });
167
134
  expect(retrievedMessages).toHaveLength(0);
168
135
  }, 10e3);
136
+
137
+ it('should update thread updatedAt when a message is saved to it', async () => {
138
+ const thread = createSampleThread();
139
+ await store.saveThread({ thread });
140
+
141
+ // Get the initial thread to capture the original updatedAt
142
+ const initialThread = await store.getThreadById({ threadId: thread.id });
143
+ expect(initialThread).toBeDefined();
144
+ const originalUpdatedAt = initialThread!.updatedAt;
145
+
146
+ // Wait a small amount to ensure different timestamp
147
+ await new Promise(resolve => setTimeout(resolve, 10));
148
+
149
+ // Create and save a message to the thread
150
+ const message = createSampleMessageV1({ threadId: thread.id });
151
+ await store.saveMessages({ messages: [message] });
152
+
153
+ // Retrieve the thread again and check that updatedAt was updated
154
+ const updatedThread = await store.getThreadById({ threadId: thread.id });
155
+ expect(updatedThread).toBeDefined();
156
+ expect(updatedThread!.updatedAt.getTime()).toBeGreaterThan(originalUpdatedAt.getTime());
157
+ }, 10e3);
169
158
  });
170
159
 
171
160
  describe('Message Operations', () => {
@@ -174,8 +163,12 @@ describe('ClickhouseStore', () => {
174
163
  await store.saveThread({ thread });
175
164
 
176
165
  const messages = [
177
- createSampleMessage(thread.id, new Date(Date.now() - 1000 * 60 * 60 * 24)),
178
- createSampleMessage(thread.id),
166
+ createSampleMessageV1({
167
+ threadId: thread.id,
168
+ createdAt: new Date(Date.now() - 1000 * 60 * 60 * 24),
169
+ resourceId: 'clickhouse-test',
170
+ }),
171
+ createSampleMessageV1({ threadId: thread.id, resourceId: 'clickhouse-test' }),
179
172
  ];
180
173
 
181
174
  // Save messages
@@ -185,7 +178,11 @@ describe('ClickhouseStore', () => {
185
178
  // Retrieve messages
186
179
  const retrievedMessages = await store.getMessages({ threadId: thread.id });
187
180
  expect(retrievedMessages).toHaveLength(2);
188
- expect(retrievedMessages).toEqual(expect.arrayContaining(messages));
181
+ const checkMessages = messages.map(m => {
182
+ const { resourceId, ...rest } = m;
183
+ return rest;
184
+ });
185
+ expect(retrievedMessages).toEqual(expect.arrayContaining(checkMessages));
189
186
  }, 10e3);
190
187
 
191
188
  it('should handle empty message array', async () => {
@@ -199,18 +196,33 @@ describe('ClickhouseStore', () => {
199
196
 
200
197
  const messages = [
201
198
  {
202
- ...createSampleMessage(thread.id, new Date(Date.now() - 1000 * 3)),
203
- content: [{ type: 'text', text: 'First' }],
199
+ ...createSampleMessageV1({
200
+ threadId: thread.id,
201
+ createdAt: new Date(Date.now() - 1000 * 3),
202
+ content: 'First',
203
+ resourceId: 'clickhouse-test',
204
+ }),
205
+ role: 'user',
204
206
  },
205
207
  {
206
- ...createSampleMessage(thread.id, new Date(Date.now() - 1000 * 2)),
207
- content: [{ type: 'text', text: 'Second' }],
208
+ ...createSampleMessageV1({
209
+ threadId: thread.id,
210
+ createdAt: new Date(Date.now() - 1000 * 2),
211
+ content: 'Second',
212
+ resourceId: 'clickhouse-test',
213
+ }),
214
+ role: 'assistant',
208
215
  },
209
216
  {
210
- ...createSampleMessage(thread.id, new Date(Date.now() - 1000 * 1)),
211
- content: [{ type: 'text', text: 'Third' }],
217
+ ...createSampleMessageV1({
218
+ threadId: thread.id,
219
+ createdAt: new Date(Date.now() - 1000 * 1),
220
+ content: 'Third',
221
+ resourceId: 'clickhouse-test',
222
+ }),
223
+ role: 'user',
212
224
  },
213
- ];
225
+ ] as MastraMessageV1[];
214
226
 
215
227
  await store.saveMessages({ messages });
216
228
 
@@ -219,17 +231,112 @@ describe('ClickhouseStore', () => {
219
231
 
220
232
  // Verify order is maintained
221
233
  retrievedMessages.forEach((msg, idx) => {
234
+ // @ts-expect-error
222
235
  expect(msg.content[0].text).toBe(messages[idx].content[0].text);
223
236
  });
224
237
  }, 10e3);
225
238
 
239
+ // it('should retrieve messages w/ next/prev messages by message id + resource id', async () => {
240
+ // const messages: MastraMessageV2[] = [
241
+ // createSampleMessageV2({ threadId: 'thread-one', content: 'First', resourceId: 'cross-thread-resource' }),
242
+ // createSampleMessageV2({ threadId: 'thread-one', content: 'Second', resourceId: 'cross-thread-resource' }),
243
+ // createSampleMessageV2({ threadId: 'thread-one', content: 'Third', resourceId: 'cross-thread-resource' }),
244
+
245
+ // createSampleMessageV2({ threadId: 'thread-two', content: 'Fourth', resourceId: 'cross-thread-resource' }),
246
+ // createSampleMessageV2({ threadId: 'thread-two', content: 'Fifth', resourceId: 'cross-thread-resource' }),
247
+ // createSampleMessageV2({ threadId: 'thread-two', content: 'Sixth', resourceId: 'cross-thread-resource' }),
248
+
249
+ // createSampleMessageV2({ threadId: 'thread-three', content: 'Seventh', resourceId: 'other-resource' }),
250
+ // createSampleMessageV2({ threadId: 'thread-three', content: 'Eighth', resourceId: 'other-resource' }),
251
+ // ];
252
+
253
+ // await store.saveMessages({ messages: messages, format: 'v2' });
254
+
255
+ // const retrievedMessages = await store.getMessages({ threadId: 'thread-one', format: 'v2' });
256
+ // expect(retrievedMessages).toHaveLength(3);
257
+ // expect(retrievedMessages.map((m: any) => m.content.parts[0].text)).toEqual(['First', 'Second', 'Third']);
258
+
259
+ // const retrievedMessages2 = await store.getMessages({ threadId: 'thread-two', format: 'v2' });
260
+ // expect(retrievedMessages2).toHaveLength(3);
261
+ // expect(retrievedMessages2.map((m: any) => m.content.parts[0].text)).toEqual(['Fourth', 'Fifth', 'Sixth']);
262
+
263
+ // const retrievedMessages3 = await store.getMessages({ threadId: 'thread-three', format: 'v2' });
264
+ // expect(retrievedMessages3).toHaveLength(2);
265
+ // expect(retrievedMessages3.map((m: any) => m.content.parts[0].text)).toEqual(['Seventh', 'Eighth']);
266
+
267
+ // const crossThreadMessages = await store.getMessages({
268
+ // threadId: 'thread-doesnt-exist',
269
+ // resourceId: 'cross-thread-resource',
270
+ // format: 'v2',
271
+ // selectBy: {
272
+ // last: 0,
273
+ // include: [
274
+ // {
275
+ // id: messages[1].id,
276
+ // withNextMessages: 2,
277
+ // withPreviousMessages: 2,
278
+ // },
279
+ // {
280
+ // id: messages[4].id,
281
+ // withPreviousMessages: 2,
282
+ // withNextMessages: 2,
283
+ // },
284
+ // ],
285
+ // },
286
+ // });
287
+
288
+ // expect(crossThreadMessages).toHaveLength(6);
289
+ // expect(crossThreadMessages.filter(m => m.threadId === `thread-one`)).toHaveLength(3);
290
+ // expect(crossThreadMessages.filter(m => m.threadId === `thread-two`)).toHaveLength(3);
291
+
292
+ // const crossThreadMessages2 = await store.getMessages({
293
+ // threadId: 'thread-one',
294
+ // resourceId: 'cross-thread-resource',
295
+ // format: 'v2',
296
+ // selectBy: {
297
+ // last: 0,
298
+ // include: [
299
+ // {
300
+ // id: messages[4].id,
301
+ // withPreviousMessages: 1,
302
+ // withNextMessages: 30,
303
+ // },
304
+ // ],
305
+ // },
306
+ // });
307
+
308
+ // expect(crossThreadMessages2).toHaveLength(3);
309
+ // expect(crossThreadMessages2.filter(m => m.threadId === `thread-one`)).toHaveLength(0);
310
+ // expect(crossThreadMessages2.filter(m => m.threadId === `thread-two`)).toHaveLength(3);
311
+
312
+ // const crossThreadMessages3 = await store.getMessages({
313
+ // threadId: 'thread-two',
314
+ // resourceId: 'cross-thread-resource',
315
+ // format: 'v2',
316
+ // selectBy: {
317
+ // last: 0,
318
+ // include: [
319
+ // {
320
+ // id: messages[1].id,
321
+ // withNextMessages: 1,
322
+ // withPreviousMessages: 1,
323
+ // },
324
+ // ],
325
+ // },
326
+ // });
327
+
328
+ // expect(crossThreadMessages3).toHaveLength(3);
329
+ // expect(crossThreadMessages3.filter(m => m.threadId === `thread-one`)).toHaveLength(3);
330
+ // expect(crossThreadMessages3.filter(m => m.threadId === `thread-two`)).toHaveLength(0);
331
+ // });
332
+
226
333
  // it('should rollback on error during message save', async () => {
227
334
  // const thread = createSampleThread();
228
335
  // await store.saveThread({ thread });
229
336
 
230
337
  // const messages = [
231
- // createSampleMessage(thread.id),
232
- // { ...createSampleMessage(thread.id), id: null }, // This will cause an error
338
+ // createSampleMessageV1({ threadId: thread.id }),
339
+ // { ...createSampleMessageV1({ threadId: thread.id }), id: null }, // This will cause an error
233
340
  // ];
234
341
 
235
342
  // await expect(store.saveMessages({ messages })).rejects.toThrow();
@@ -352,11 +459,14 @@ describe('ClickhouseStore', () => {
352
459
  const snapshot = {
353
460
  status: 'running',
354
461
  context: {
355
- stepResults: {},
356
- attempts: {},
357
- triggerData: { type: 'manual' },
462
+ input: { type: 'manual' },
358
463
  },
359
- } as any;
464
+ value: {},
465
+ activePaths: [],
466
+ suspendedPaths: {},
467
+ runId,
468
+ timestamp: new Date().getTime(),
469
+ } as unknown as WorkflowRunState;
360
470
 
361
471
  await store.persistWorkflowSnapshot({
362
472
  workflowName,
@@ -387,28 +497,33 @@ describe('ClickhouseStore', () => {
387
497
  const initialSnapshot = {
388
498
  status: 'running',
389
499
  context: {
390
- stepResults: {},
391
- attempts: {},
392
- triggerData: { type: 'manual' },
500
+ input: { type: 'manual' },
393
501
  },
394
- };
502
+ value: {},
503
+ activePaths: [],
504
+ suspendedPaths: {},
505
+ runId,
506
+ timestamp: new Date().getTime(),
507
+ } as unknown as WorkflowRunState;
395
508
 
396
509
  await store.persistWorkflowSnapshot({
397
510
  workflowName,
398
511
  runId,
399
- snapshot: initialSnapshot as any,
512
+ snapshot: initialSnapshot,
400
513
  });
401
514
 
402
515
  const updatedSnapshot = {
403
516
  status: 'completed',
404
517
  context: {
405
- stepResults: {
406
- 'step-1': { status: 'success', result: { data: 'test' } },
407
- },
408
- attempts: { 'step-1': 1 },
409
- triggerData: { type: 'manual' },
518
+ input: { type: 'manual' },
519
+ 'step-1': { status: 'success', result: { data: 'test' } },
410
520
  },
411
- } as any;
521
+ value: {},
522
+ activePaths: [],
523
+ suspendedPaths: {},
524
+ runId,
525
+ timestamp: new Date().getTime(),
526
+ } as unknown as WorkflowRunState;
412
527
 
413
528
  await store.persistWorkflowSnapshot({
414
529
  workflowName,
@@ -430,24 +545,21 @@ describe('ClickhouseStore', () => {
430
545
  const complexSnapshot = {
431
546
  value: { currentState: 'running' },
432
547
  context: {
433
- stepResults: {
434
- 'step-1': {
435
- status: 'success',
436
- result: {
437
- nestedData: {
438
- array: [1, 2, 3],
439
- object: { key: 'value' },
440
- date: new Date().toISOString(),
441
- },
548
+ 'step-1': {
549
+ status: 'success',
550
+ output: {
551
+ nestedData: {
552
+ array: [1, 2, 3],
553
+ object: { key: 'value' },
554
+ date: new Date().toISOString(),
442
555
  },
443
556
  },
444
- 'step-2': {
445
- status: 'waiting',
446
- dependencies: ['step-3', 'step-4'],
447
- },
448
557
  },
449
- attempts: { 'step-1': 1, 'step-2': 0 },
450
- triggerData: {
558
+ 'step-2': {
559
+ status: 'waiting',
560
+ dependencies: ['step-3', 'step-4'],
561
+ },
562
+ input: {
451
563
  type: 'scheduled',
452
564
  metadata: {
453
565
  schedule: '0 0 * * *',
@@ -467,14 +579,15 @@ describe('ClickhouseStore', () => {
467
579
  status: 'waiting',
468
580
  },
469
581
  ],
582
+ suspendedPaths: {},
470
583
  runId: runId,
471
584
  timestamp: Date.now(),
472
- };
585
+ } as unknown as WorkflowRunState;
473
586
 
474
587
  await store.persistWorkflowSnapshot({
475
588
  workflowName,
476
589
  runId,
477
- snapshot: complexSnapshot as WorkflowRunState,
590
+ snapshot: complexSnapshot,
478
591
  });
479
592
 
480
593
  const loadedSnapshot = await store.loadWorkflowSnapshot({
@@ -500,8 +613,8 @@ describe('ClickhouseStore', () => {
500
613
  const workflowName1 = 'default_test_1';
501
614
  const workflowName2 = 'default_test_2';
502
615
 
503
- const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('completed');
504
- const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('running');
616
+ const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
617
+ const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('suspended');
505
618
 
506
619
  await store.persistWorkflowSnapshot({
507
620
  workflowName: workflowName1,
@@ -520,17 +633,17 @@ describe('ClickhouseStore', () => {
520
633
  expect(total).toBe(2);
521
634
  expect(runs[0]!.workflowName).toBe(workflowName2); // Most recent first
522
635
  expect(runs[1]!.workflowName).toBe(workflowName1);
523
- const firstSnapshot = runs[0]!.snapshot as WorkflowRunState;
524
- const secondSnapshot = runs[1]!.snapshot as WorkflowRunState;
525
- expect(firstSnapshot.context?.steps[stepId2]?.status).toBe('running');
526
- expect(secondSnapshot.context?.steps[stepId1]?.status).toBe('completed');
636
+ const firstSnapshot = runs[0]!.snapshot;
637
+ const secondSnapshot = runs[1]!.snapshot;
638
+ checkWorkflowSnapshot(firstSnapshot, stepId2, 'suspended');
639
+ checkWorkflowSnapshot(secondSnapshot, stepId1, 'success');
527
640
  });
528
641
 
529
642
  it('filters by workflow name', async () => {
530
643
  const workflowName1 = 'filter_test_1';
531
644
  const workflowName2 = 'filter_test_2';
532
645
 
533
- const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('completed');
646
+ const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
534
647
  const { snapshot: workflow2, runId: runId2 } = createSampleWorkflowSnapshot('failed');
535
648
 
536
649
  await store.persistWorkflowSnapshot({
@@ -551,8 +664,8 @@ describe('ClickhouseStore', () => {
551
664
  expect(runs).toHaveLength(1);
552
665
  expect(total).toBe(1);
553
666
  expect(runs[0]!.workflowName).toBe(workflowName1);
554
- const snapshot = runs[0]!.snapshot as WorkflowRunState;
555
- expect(snapshot.context?.steps[stepId1]?.status).toBe('completed');
667
+ const snapshot = runs[0]!.snapshot;
668
+ checkWorkflowSnapshot(snapshot, stepId1, 'success');
556
669
  });
557
670
 
558
671
  it('filters by date range', async () => {
@@ -563,9 +676,9 @@ describe('ClickhouseStore', () => {
563
676
  const workflowName2 = 'date_test_2';
564
677
  const workflowName3 = 'date_test_3';
565
678
 
566
- const { snapshot: workflow1, runId: runId1 } = createSampleWorkflowSnapshot('completed');
567
- const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('running');
568
- const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('waiting');
679
+ const { snapshot: workflow1, runId: runId1 } = createSampleWorkflowSnapshot('success');
680
+ const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('suspended');
681
+ const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('failed');
569
682
 
570
683
  await store.insert({
571
684
  tableName: TABLE_WORKFLOW_SNAPSHOT,
@@ -606,10 +719,10 @@ describe('ClickhouseStore', () => {
606
719
  expect(runs).toHaveLength(2);
607
720
  expect(runs[0]!.workflowName).toBe(workflowName3);
608
721
  expect(runs[1]!.workflowName).toBe(workflowName2);
609
- const firstSnapshot = runs[0]!.snapshot as WorkflowRunState;
610
- const secondSnapshot = runs[1]!.snapshot as WorkflowRunState;
611
- expect(firstSnapshot.context?.steps[stepId3]?.status).toBe('waiting');
612
- expect(secondSnapshot.context?.steps[stepId2]?.status).toBe('running');
722
+ const firstSnapshot = runs[0]!.snapshot;
723
+ const secondSnapshot = runs[1]!.snapshot;
724
+ checkWorkflowSnapshot(firstSnapshot, stepId3, 'failed');
725
+ checkWorkflowSnapshot(secondSnapshot, stepId2, 'suspended');
613
726
  });
614
727
 
615
728
  it('handles pagination', async () => {
@@ -617,9 +730,9 @@ describe('ClickhouseStore', () => {
617
730
  const workflowName2 = 'page_test_2';
618
731
  const workflowName3 = 'page_test_3';
619
732
 
620
- const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('completed');
621
- const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('running');
622
- const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('waiting');
733
+ const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
734
+ const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('suspended');
735
+ const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('failed');
623
736
 
624
737
  await store.persistWorkflowSnapshot({
625
738
  workflowName: workflowName1,
@@ -648,10 +761,10 @@ describe('ClickhouseStore', () => {
648
761
  expect(page1.total).toBe(3); // Total count of all records
649
762
  expect(page1.runs[0]!.workflowName).toBe(workflowName3);
650
763
  expect(page1.runs[1]!.workflowName).toBe(workflowName2);
651
- const firstSnapshot = page1.runs[0]!.snapshot as WorkflowRunState;
652
- const secondSnapshot = page1.runs[1]!.snapshot as WorkflowRunState;
653
- expect(firstSnapshot.context?.steps[stepId3]?.status).toBe('waiting');
654
- expect(secondSnapshot.context?.steps[stepId2]?.status).toBe('running');
764
+ const firstSnapshot = page1.runs[0]!.snapshot;
765
+ const secondSnapshot = page1.runs[1]!.snapshot;
766
+ checkWorkflowSnapshot(firstSnapshot, stepId3, 'failed');
767
+ checkWorkflowSnapshot(secondSnapshot, stepId2, 'suspended');
655
768
 
656
769
  // Get second page
657
770
  const page2 = await store.getWorkflowRuns({
@@ -661,10 +774,249 @@ describe('ClickhouseStore', () => {
661
774
  expect(page2.runs).toHaveLength(1);
662
775
  expect(page2.total).toBe(3);
663
776
  expect(page2.runs[0]!.workflowName).toBe(workflowName1);
664
- const snapshot = page2.runs[0]!.snapshot as WorkflowRunState;
665
- expect(snapshot.context?.steps[stepId1]?.status).toBe('completed');
777
+ const snapshot = page2.runs[0]!.snapshot!;
778
+ checkWorkflowSnapshot(snapshot, stepId1, 'success');
666
779
  }, 10e3);
667
780
  });
781
+ describe('getWorkflowRunById', () => {
782
+ const workflowName = 'workflow-id-test';
783
+ let runId: string;
784
+ let stepId: string;
785
+
786
+ beforeEach(async () => {
787
+ // Insert a workflow run for positive test
788
+ const sample = createSampleWorkflowSnapshot('success');
789
+ runId = sample.runId;
790
+ stepId = sample.stepId;
791
+ await store.insert({
792
+ tableName: TABLE_WORKFLOW_SNAPSHOT,
793
+ record: {
794
+ workflow_name: workflowName,
795
+ run_id: runId,
796
+ resourceId: 'resource-abc',
797
+ snapshot: sample.snapshot,
798
+ createdAt: new Date(),
799
+ updatedAt: new Date(),
800
+ },
801
+ });
802
+ });
803
+
804
+ it('should retrieve a workflow run by ID', async () => {
805
+ const found = await store.getWorkflowRunById({
806
+ runId,
807
+ workflowName,
808
+ });
809
+ expect(found).not.toBeNull();
810
+ expect(found?.runId).toBe(runId);
811
+ checkWorkflowSnapshot(found?.snapshot!, stepId, 'success');
812
+ });
813
+
814
+ it('should return null for non-existent workflow run ID', async () => {
815
+ const notFound = await store.getWorkflowRunById({
816
+ runId: 'non-existent-id',
817
+ workflowName,
818
+ });
819
+ expect(notFound).toBeNull();
820
+ });
821
+ });
822
+ describe('getWorkflowRuns with resourceId', () => {
823
+ const workflowName = 'workflow-id-test';
824
+ let resourceId: string;
825
+ let runIds: string[] = [];
826
+
827
+ beforeEach(async () => {
828
+ // Insert multiple workflow runs for the same resourceId
829
+ resourceId = 'resource-shared';
830
+ for (const status of ['completed', 'running']) {
831
+ const sample = createSampleWorkflowSnapshot(status as WorkflowRunState['context']['steps']['status']);
832
+ runIds.push(sample.runId);
833
+ await store.insert({
834
+ tableName: TABLE_WORKFLOW_SNAPSHOT,
835
+ record: {
836
+ workflow_name: workflowName,
837
+ run_id: sample.runId,
838
+ resourceId,
839
+ snapshot: sample.snapshot,
840
+ createdAt: new Date(),
841
+ updatedAt: new Date(),
842
+ },
843
+ });
844
+ }
845
+ // Insert a run with a different resourceId
846
+ const other = createSampleWorkflowSnapshot('suspended');
847
+ await store.insert({
848
+ tableName: TABLE_WORKFLOW_SNAPSHOT,
849
+ record: {
850
+ workflow_name: workflowName,
851
+ run_id: other.runId,
852
+ resourceId: 'resource-other',
853
+ snapshot: other.snapshot,
854
+ createdAt: new Date(),
855
+ updatedAt: new Date(),
856
+ },
857
+ });
858
+ });
859
+
860
+ it('should retrieve all workflow runs by resourceId', async () => {
861
+ const { runs } = await store.getWorkflowRuns({
862
+ resourceId,
863
+ workflowName,
864
+ });
865
+ expect(Array.isArray(runs)).toBe(true);
866
+ expect(runs.length).toBeGreaterThanOrEqual(2);
867
+ for (const run of runs) {
868
+ expect(run.resourceId).toBe(resourceId);
869
+ }
870
+ });
871
+
872
+ it('should return an empty array if no workflow runs match resourceId', async () => {
873
+ const { runs } = await store.getWorkflowRuns({
874
+ resourceId: 'non-existent-resource',
875
+ workflowName,
876
+ });
877
+ expect(Array.isArray(runs)).toBe(true);
878
+ expect(runs.length).toBe(0);
879
+ });
880
+ });
881
+
882
+ describe('hasColumn', () => {
883
+ const tempTable = 'temp_test_table';
884
+
885
+ beforeEach(async () => {
886
+ // Always try to drop the table before each test, ignore errors if it doesn't exist
887
+ try {
888
+ await store['db'].query({ query: `DROP TABLE IF EXISTS ${tempTable}` });
889
+ } catch {
890
+ /* ignore */
891
+ }
892
+ });
893
+
894
+ it('returns true if the column exists', async () => {
895
+ await store['db'].query({
896
+ query: `CREATE TABLE temp_test_table (
897
+ id UInt64,
898
+ resourceId String
899
+ ) ENGINE = MergeTree()
900
+ ORDER BY id
901
+ `,
902
+ });
903
+ expect(await store['hasColumn'](tempTable, 'resourceId')).toBe(true);
904
+ });
905
+
906
+ it('returns false if the column does not exist', async () => {
907
+ await store['db'].query({
908
+ query: `CREATE TABLE temp_test_table (
909
+ id UInt64,
910
+ ) ENGINE = MergeTree()
911
+ ORDER BY id
912
+ `,
913
+ });
914
+ expect(await store['hasColumn'](tempTable, 'resourceId')).toBe(false);
915
+ });
916
+
917
+ afterEach(async () => {
918
+ // Clean up after each test
919
+ try {
920
+ await store['db'].query({ query: `DROP TABLE IF EXISTS ${tempTable}` });
921
+ } catch {
922
+ /* ignore */
923
+ }
924
+ });
925
+ });
926
+
927
+ describe('alterTable', () => {
928
+ const TEST_TABLE = 'test_alter_table';
929
+ const BASE_SCHEMA = {
930
+ id: { type: 'integer', primaryKey: true, nullable: false },
931
+ name: { type: 'text', nullable: true },
932
+ createdAt: { type: 'timestamp', nullable: false },
933
+ updatedAt: { type: 'timestamp', nullable: false },
934
+ } as Record<string, StorageColumn>;
935
+
936
+ TABLE_ENGINES[TEST_TABLE] = 'MergeTree()';
937
+
938
+ beforeEach(async () => {
939
+ await store.createTable({ tableName: TEST_TABLE as TABLE_NAMES, schema: BASE_SCHEMA });
940
+ });
941
+
942
+ afterEach(async () => {
943
+ await store.clearTable({ tableName: TEST_TABLE as TABLE_NAMES });
944
+ });
945
+
946
+ it('adds a new column to an existing table', async () => {
947
+ await store.alterTable({
948
+ tableName: TEST_TABLE as TABLE_NAMES,
949
+ schema: { ...BASE_SCHEMA, age: { type: 'integer', nullable: true } },
950
+ ifNotExists: ['age'],
951
+ });
952
+
953
+ await store.insert({
954
+ tableName: TEST_TABLE as TABLE_NAMES,
955
+ record: { id: 1, name: 'Alice', age: 42, createdAt: new Date(), updatedAt: new Date() },
956
+ });
957
+
958
+ const row = await store.load<{ id: string; name: string; age?: number }>({
959
+ tableName: TEST_TABLE as TABLE_NAMES,
960
+ keys: { id: '1' },
961
+ });
962
+ expect(row?.age).toBe(42);
963
+ });
964
+
965
+ it('is idempotent when adding an existing column', async () => {
966
+ await store.alterTable({
967
+ tableName: TEST_TABLE as TABLE_NAMES,
968
+ schema: { ...BASE_SCHEMA, foo: { type: 'text', nullable: true } },
969
+ ifNotExists: ['foo'],
970
+ });
971
+ // Add the column again (should not throw)
972
+ await expect(
973
+ store.alterTable({
974
+ tableName: TEST_TABLE as TABLE_NAMES,
975
+ schema: { ...BASE_SCHEMA, foo: { type: 'text', nullable: true } },
976
+ ifNotExists: ['foo'],
977
+ }),
978
+ ).resolves.not.toThrow();
979
+ });
980
+
981
+ it('should add a default value to a column when using not null', async () => {
982
+ await store.insert({
983
+ tableName: TEST_TABLE as TABLE_NAMES,
984
+ record: { id: 1, name: 'Bob', createdAt: new Date(), updatedAt: new Date() },
985
+ });
986
+
987
+ await expect(
988
+ store.alterTable({
989
+ tableName: TEST_TABLE as TABLE_NAMES,
990
+ schema: { ...BASE_SCHEMA, text_column: { type: 'text', nullable: false } },
991
+ ifNotExists: ['text_column'],
992
+ }),
993
+ ).resolves.not.toThrow();
994
+
995
+ await expect(
996
+ store.alterTable({
997
+ tableName: TEST_TABLE as TABLE_NAMES,
998
+ schema: { ...BASE_SCHEMA, timestamp_column: { type: 'timestamp', nullable: false } },
999
+ ifNotExists: ['timestamp_column'],
1000
+ }),
1001
+ ).resolves.not.toThrow();
1002
+
1003
+ await expect(
1004
+ store.alterTable({
1005
+ tableName: TEST_TABLE as TABLE_NAMES,
1006
+ schema: { ...BASE_SCHEMA, bigint_column: { type: 'bigint', nullable: false } },
1007
+ ifNotExists: ['bigint_column'],
1008
+ }),
1009
+ ).resolves.not.toThrow();
1010
+
1011
+ await expect(
1012
+ store.alterTable({
1013
+ tableName: TEST_TABLE as TABLE_NAMES,
1014
+ schema: { ...BASE_SCHEMA, jsonb_column: { type: 'jsonb', nullable: false } },
1015
+ ifNotExists: ['jsonb_column'],
1016
+ }),
1017
+ ).resolves.not.toThrow();
1018
+ });
1019
+ });
668
1020
 
669
1021
  afterAll(async () => {
670
1022
  await store.close();