@mastra/clickhouse 0.2.7-alpha.1 → 0.2.7-alpha.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,5 +1,5 @@
1
- import { MastraStorage, TABLE_SCHEMAS, TABLE_TRACES, TABLE_WORKFLOW_SNAPSHOT, TABLE_THREADS, TABLE_MESSAGES, TABLE_EVALS } from '@mastra/core/storage';
2
1
  import { createClient } from '@clickhouse/client';
2
+ import { MastraStorage, TABLE_EVALS, TABLE_SCHEMAS, TABLE_TRACES, TABLE_WORKFLOW_SNAPSHOT, TABLE_THREADS, TABLE_MESSAGES } from '@mastra/core/storage';
3
3
 
4
4
  // src/storage/index.ts
5
5
  function safelyParseJSON(jsonString) {
@@ -41,6 +41,7 @@ function transformRow(row) {
41
41
  }
42
42
  var ClickhouseStore = class extends MastraStorage {
43
43
  db;
44
+ ttl = {};
44
45
  constructor(config) {
45
46
  super({ name: "ClickhouseStore" });
46
47
  this.db = createClient({
@@ -55,9 +56,54 @@ var ClickhouseStore = class extends MastraStorage {
55
56
  output_format_json_quote_64bit_integers: 0
56
57
  }
57
58
  });
59
+ this.ttl = config.ttl;
58
60
  }
59
- getEvalsByAgentName(_agentName, _type) {
60
- throw new Error("Method not implemented.");
61
+ transformEvalRow(row) {
62
+ row = transformRow(row);
63
+ const resultValue = JSON.parse(row.result);
64
+ const testInfoValue = row.test_info ? JSON.parse(row.test_info) : void 0;
65
+ if (!resultValue || typeof resultValue !== "object" || !("score" in resultValue)) {
66
+ throw new Error(`Invalid MetricResult format: ${JSON.stringify(resultValue)}`);
67
+ }
68
+ return {
69
+ input: row.input,
70
+ output: row.output,
71
+ result: resultValue,
72
+ agentName: row.agent_name,
73
+ metricName: row.metric_name,
74
+ instructions: row.instructions,
75
+ testInfo: testInfoValue,
76
+ globalRunId: row.global_run_id,
77
+ runId: row.run_id,
78
+ createdAt: row.created_at
79
+ };
80
+ }
81
+ async getEvalsByAgentName(agentName, type) {
82
+ try {
83
+ const baseQuery = `SELECT *, toDateTime64(createdAt, 3) as createdAt FROM ${TABLE_EVALS} WHERE agent_name = {var_agent_name:String}`;
84
+ const typeCondition = type === "test" ? " AND test_info IS NOT NULL AND JSONExtractString(test_info, 'testPath') IS NOT NULL" : type === "live" ? " AND (test_info IS NULL OR JSONExtractString(test_info, 'testPath') IS NULL)" : "";
85
+ const result = await this.db.query({
86
+ query: `${baseQuery}${typeCondition} ORDER BY createdAt DESC`,
87
+ query_params: { var_agent_name: agentName },
88
+ clickhouse_settings: {
89
+ date_time_input_format: "best_effort",
90
+ date_time_output_format: "iso",
91
+ use_client_time_zone: 1,
92
+ output_format_json_quote_64bit_integers: 0
93
+ }
94
+ });
95
+ if (!result) {
96
+ return [];
97
+ }
98
+ const rows = await result.json();
99
+ return rows.data.map((row) => this.transformEvalRow(row));
100
+ } catch (error) {
101
+ if (error instanceof Error && error.message.includes("no such table")) {
102
+ return [];
103
+ }
104
+ this.logger.error("Failed to get evals for the specified agent: " + error?.message);
105
+ throw error;
106
+ }
61
107
  }
62
108
  async batchInsert({ tableName, records }) {
63
109
  try {
@@ -89,7 +135,8 @@ var ClickhouseStore = class extends MastraStorage {
89
135
  scope,
90
136
  page,
91
137
  perPage,
92
- attributes
138
+ attributes,
139
+ filters
93
140
  }) {
94
141
  const limit = perPage;
95
142
  const offset = page * perPage;
@@ -105,8 +152,16 @@ var ClickhouseStore = class extends MastraStorage {
105
152
  }
106
153
  if (attributes) {
107
154
  Object.entries(attributes).forEach(([key, value]) => {
108
- conditions.push(`JSONExtractString(attributes, '${key}') = {var_${key}:String}`);
109
- args[`var_${key}`] = value;
155
+ conditions.push(`JSONExtractString(attributes, '${key}') = {var_attr_${key}:String}`);
156
+ args[`var_attr_${key}`] = value;
157
+ });
158
+ }
159
+ if (filters) {
160
+ Object.entries(filters).forEach(([key, value]) => {
161
+ conditions.push(
162
+ `${key} = {var_col_${key}:${COLUMN_TYPES[TABLE_SCHEMAS.mastra_traces?.[key]?.type ?? "text"]}}`
163
+ );
164
+ args[`var_col_${key}`] = value;
110
165
  });
111
166
  }
112
167
  const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
@@ -143,6 +198,16 @@ var ClickhouseStore = class extends MastraStorage {
143
198
  createdAt: row.createdAt
144
199
  }));
145
200
  }
201
+ async optimizeTable({ tableName }) {
202
+ await this.db.command({
203
+ query: `OPTIMIZE TABLE ${tableName} FINAL`
204
+ });
205
+ }
206
+ async materializeTtl({ tableName }) {
207
+ await this.db.command({
208
+ query: `ALTER TABLE ${tableName} MATERIALIZE TTL;`
209
+ });
210
+ }
146
211
  async createTable({
147
212
  tableName,
148
213
  schema
@@ -151,8 +216,10 @@ var ClickhouseStore = class extends MastraStorage {
151
216
  const columns = Object.entries(schema).map(([name, def]) => {
152
217
  const constraints = [];
153
218
  if (!def.nullable) constraints.push("NOT NULL");
154
- return `"${name}" ${COLUMN_TYPES[def.type]} ${constraints.join(" ")}`;
219
+ const columnTtl = this.ttl?.[tableName]?.columns?.[name];
220
+ return `"${name}" ${COLUMN_TYPES[def.type]} ${constraints.join(" ")} ${columnTtl ? `TTL toDateTime(${columnTtl.ttlKey ?? "createdAt"}) + INTERVAL ${columnTtl.interval} ${columnTtl.unit}` : ""}`;
155
221
  }).join(",\n");
222
+ const rowTtl = this.ttl?.[tableName]?.row;
156
223
  const sql = tableName === TABLE_WORKFLOW_SNAPSHOT ? `
157
224
  CREATE TABLE IF NOT EXISTS ${tableName} (
158
225
  ${["id String"].concat(columns)}
@@ -161,16 +228,18 @@ var ClickhouseStore = class extends MastraStorage {
161
228
  PARTITION BY "createdAt"
162
229
  PRIMARY KEY (createdAt, run_id, workflow_name)
163
230
  ORDER BY (createdAt, run_id, workflow_name)
164
- SETTINGS index_granularity = 8192;
231
+ ${rowTtl ? `TTL toDateTime(${rowTtl.ttlKey ?? "createdAt"}) + INTERVAL ${rowTtl.interval} ${rowTtl.unit}` : ""}
232
+ SETTINGS index_granularity = 8192
165
233
  ` : `
166
234
  CREATE TABLE IF NOT EXISTS ${tableName} (
167
235
  ${columns}
168
236
  )
169
237
  ENGINE = ${TABLE_ENGINES[tableName]}
170
238
  PARTITION BY "createdAt"
171
- PRIMARY KEY (createdAt, id)
172
- ORDER BY (createdAt, id)
173
- SETTINGS index_granularity = 8192;
239
+ PRIMARY KEY (createdAt, ${tableName === TABLE_EVALS ? "run_id" : "id"})
240
+ ORDER BY (createdAt, ${tableName === TABLE_EVALS ? "run_id" : "id"})
241
+ ${this.ttl?.[tableName]?.row ? `TTL toDateTime(createdAt) + INTERVAL ${this.ttl[tableName].row.interval} ${this.ttl[tableName].row.unit}` : ""}
242
+ SETTINGS index_granularity = 8192
174
243
  `;
175
244
  await this.db.query({
176
245
  query: sql,
@@ -625,6 +694,83 @@ var ClickhouseStore = class extends MastraStorage {
625
694
  throw error;
626
695
  }
627
696
  }
697
+ async getWorkflowRuns({
698
+ workflowName,
699
+ fromDate,
700
+ toDate,
701
+ limit,
702
+ offset
703
+ } = {}) {
704
+ try {
705
+ const conditions = [];
706
+ const values = {};
707
+ if (workflowName) {
708
+ conditions.push(`workflow_name = {var_workflow_name:String}`);
709
+ values.var_workflow_name = workflowName;
710
+ }
711
+ if (fromDate) {
712
+ conditions.push(`createdAt >= {var_from_date:DateTime64(3)}`);
713
+ values.var_from_date = fromDate.getTime() / 1e3;
714
+ }
715
+ if (toDate) {
716
+ conditions.push(`createdAt <= {var_to_date:DateTime64(3)}`);
717
+ values.var_to_date = toDate.getTime() / 1e3;
718
+ }
719
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
720
+ const limitClause = limit !== void 0 ? `LIMIT ${limit}` : "";
721
+ const offsetClause = offset !== void 0 ? `OFFSET ${offset}` : "";
722
+ let total = 0;
723
+ if (limit !== void 0 && offset !== void 0) {
724
+ const countResult = await this.db.query({
725
+ query: `SELECT COUNT(*) as count FROM ${TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""} ${whereClause}`,
726
+ query_params: values,
727
+ format: "JSONEachRow"
728
+ });
729
+ const countRows = await countResult.json();
730
+ total = Number(countRows[0]?.count ?? 0);
731
+ }
732
+ const result = await this.db.query({
733
+ query: `
734
+ SELECT
735
+ workflow_name,
736
+ run_id,
737
+ snapshot,
738
+ toDateTime64(createdAt, 3) as createdAt,
739
+ toDateTime64(updatedAt, 3) as updatedAt
740
+ FROM ${TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""}
741
+ ${whereClause}
742
+ ORDER BY createdAt DESC
743
+ ${limitClause}
744
+ ${offsetClause}
745
+ `,
746
+ query_params: values,
747
+ format: "JSONEachRow"
748
+ });
749
+ const resultJson = await result.json();
750
+ const rows = resultJson;
751
+ const runs = rows.map((row) => {
752
+ let parsedSnapshot = row.snapshot;
753
+ if (typeof parsedSnapshot === "string") {
754
+ try {
755
+ parsedSnapshot = JSON.parse(row.snapshot);
756
+ } catch (e) {
757
+ console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
758
+ }
759
+ }
760
+ return {
761
+ workflowName: row.workflow_name,
762
+ runId: row.run_id,
763
+ snapshot: parsedSnapshot,
764
+ createdAt: new Date(row.createdAt),
765
+ updatedAt: new Date(row.updatedAt)
766
+ };
767
+ });
768
+ return { runs, total: total || runs.length };
769
+ } catch (error) {
770
+ console.error("Error getting workflow runs:", error);
771
+ throw error;
772
+ }
773
+ }
628
774
  async close() {
629
775
  await this.db.close();
630
776
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@mastra/clickhouse",
3
- "version": "0.2.7-alpha.1",
3
+ "version": "0.2.7-alpha.2",
4
4
  "description": "Clickhouse provider for Mastra - includes db storage capabilities",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
@@ -20,7 +20,7 @@
20
20
  },
21
21
  "dependencies": {
22
22
  "@clickhouse/client": "^1.11.0",
23
- "@mastra/core": "^0.8.0-alpha.2"
23
+ "@mastra/core": "^0.8.0-alpha.3"
24
24
  },
25
25
  "devDependencies": {
26
26
  "@microsoft/api-extractor": "^7.52.1",
@@ -1,4 +1,5 @@
1
1
  import { randomUUID } from 'crypto';
2
+ import { TABLE_THREADS, TABLE_MESSAGES, TABLE_WORKFLOW_SNAPSHOT } from '@mastra/core/storage';
2
3
  import type { WorkflowRunState } from '@mastra/core/workflows';
3
4
  import { describe, it, expect, beforeAll, beforeEach, afterAll } from 'vitest';
4
5
 
@@ -9,6 +10,16 @@ const TEST_CONFIG: ClickhouseConfig = {
9
10
  url: process.env.CLICKHOUSE_URL || 'http://localhost:8123',
10
11
  username: process.env.CLICKHOUSE_USERNAME || 'default',
11
12
  password: process.env.CLICKHOUSE_PASSWORD || 'password',
13
+ ttl: {
14
+ mastra_traces: {
15
+ row: { interval: 10, unit: 'SECOND' },
16
+ },
17
+ mastra_evals: {
18
+ columns: {
19
+ result: { interval: 10, unit: 'SECOND' },
20
+ },
21
+ },
22
+ },
12
23
  };
13
24
 
14
25
  // Sample test data factory functions
@@ -31,6 +42,46 @@ const createSampleMessage = (threadId: string, createdAt: Date = new Date()) =>
31
42
  createdAt,
32
43
  }) as any;
33
44
 
45
+ const createSampleTrace = () => ({
46
+ id: `trace-${randomUUID()}`,
47
+ name: 'Test Trace',
48
+ createdAt: new Date(),
49
+ updatedAt: new Date(),
50
+ metadata: { key: 'value' },
51
+ });
52
+
53
+ const createSampleEval = () => ({
54
+ agent_name: 'test-agent',
55
+ run_id: 'test-run-1',
56
+ result: '{ "score": 1 }',
57
+ createdAt: new Date(),
58
+ });
59
+
60
+ const createSampleWorkflowSnapshot = (status: string, createdAt?: Date) => {
61
+ const runId = `run-${randomUUID()}`;
62
+ const stepId = `step-${randomUUID()}`;
63
+ const timestamp = createdAt || new Date();
64
+ const snapshot = {
65
+ result: { success: true },
66
+ value: {},
67
+ context: {
68
+ steps: {
69
+ [stepId]: {
70
+ status,
71
+ payload: {},
72
+ error: undefined,
73
+ },
74
+ },
75
+ triggerData: {},
76
+ attempts: {},
77
+ },
78
+ activePaths: [],
79
+ runId,
80
+ timestamp: timestamp.getTime(),
81
+ } as WorkflowRunState;
82
+ return { snapshot, runId, stepId };
83
+ };
84
+
34
85
  describe('ClickhouseStore', () => {
35
86
  let store: ClickhouseStore;
36
87
 
@@ -41,15 +92,14 @@ describe('ClickhouseStore', () => {
41
92
 
42
93
  beforeEach(async () => {
43
94
  // Clear tables before each test
44
- await store.clearTable({ tableName: 'mastra_workflow_snapshot' });
45
- await store.clearTable({ tableName: 'mastra_messages' });
46
- await store.clearTable({ tableName: 'mastra_threads' });
95
+ await store.clearTable({ tableName: TABLE_THREADS });
96
+ await store.clearTable({ tableName: TABLE_MESSAGES });
97
+ await store.clearTable({ tableName: TABLE_WORKFLOW_SNAPSHOT });
47
98
  });
48
99
 
49
100
  describe('Thread Operations', () => {
50
101
  it('should create and retrieve a thread', async () => {
51
102
  const thread = createSampleThread();
52
- console.log('Saving thread:', thread);
53
103
 
54
104
  // Save thread
55
105
  const savedThread = await store.__saveThread({ thread });
@@ -135,8 +185,6 @@ describe('ClickhouseStore', () => {
135
185
  // Retrieve messages
136
186
  const retrievedMessages = await store.__getMessages({ threadId: thread.id });
137
187
  expect(retrievedMessages).toHaveLength(2);
138
- console.log('Messages:', messages);
139
- console.log('Retrieved messages:', retrievedMessages);
140
188
  expect(retrievedMessages).toEqual(expect.arrayContaining(messages));
141
189
  }, 10e3);
142
190
 
@@ -192,6 +240,59 @@ describe('ClickhouseStore', () => {
192
240
  // });
193
241
  });
194
242
 
243
+ describe('Traces and TTL', () => {
244
+ it('should create and retrieve a trace, but not when row level ttl expires', async () => {
245
+ const trace = createSampleTrace();
246
+ await store.__batchInsert({
247
+ tableName: 'mastra_traces',
248
+ records: [trace],
249
+ });
250
+ let traces = await store.__getTraces({
251
+ page: 0,
252
+ perPage: 10,
253
+ });
254
+
255
+ expect(traces).toHaveLength(1);
256
+ expect(traces[0]!.id).toBe(trace.id);
257
+
258
+ await new Promise(resolve => setTimeout(resolve, 10e3));
259
+ await store.optimizeTable({ tableName: 'mastra_traces' });
260
+
261
+ traces = await store.__getTraces({
262
+ page: 0,
263
+ perPage: 10,
264
+ });
265
+
266
+ expect(traces).toHaveLength(0);
267
+ }, 60e3);
268
+
269
+ // NOTE: unable to clear column level TTLs for the test case nicely, but it does seem to get applied correctly
270
+ it.skip('should create and retrieve a trace, but not expired columns when column level ttl expires', async () => {
271
+ await store.clearTable({ tableName: 'mastra_evals' });
272
+ const ev = createSampleEval();
273
+ await store.__batchInsert({
274
+ tableName: 'mastra_evals',
275
+ records: [ev],
276
+ });
277
+ let evals = await store.__getEvalsByAgentName('test-agent');
278
+ console.log(evals);
279
+
280
+ expect(evals).toHaveLength(1);
281
+ expect(evals[0]!.agentName).toBe('test-agent');
282
+ expect(evals[0]!.runId).toBe('test-run-1');
283
+
284
+ await new Promise(resolve => setTimeout(resolve, 12e3));
285
+ await store.materializeTtl({ tableName: 'mastra_evals' });
286
+ await store.optimizeTable({ tableName: 'mastra_evals' });
287
+
288
+ evals = await store.__getEvalsByAgentName('test-agent');
289
+
290
+ expect(evals).toHaveLength(1);
291
+ expect(evals[0]!.agentName).toBe('test-agent');
292
+ expect(evals[0]!.runId).toBeNull();
293
+ }, 60e3);
294
+ });
295
+
195
296
  describe('Edge Cases and Error Handling', () => {
196
297
  it('should handle large metadata objects', async () => {
197
298
  const thread = createSampleThread();
@@ -385,6 +486,186 @@ describe('ClickhouseStore', () => {
385
486
  }, 10e3);
386
487
  });
387
488
 
489
+ describe('getWorkflowRuns', () => {
490
+ beforeEach(async () => {
491
+ await store.clearTable({ tableName: TABLE_WORKFLOW_SNAPSHOT });
492
+ });
493
+ it('returns empty array when no workflows exist', async () => {
494
+ const { runs, total } = await store.__getWorkflowRuns();
495
+ expect(runs).toEqual([]);
496
+ expect(total).toBe(0);
497
+ });
498
+
499
+ it('returns all workflows by default', async () => {
500
+ const workflowName1 = 'default_test_1';
501
+ const workflowName2 = 'default_test_2';
502
+
503
+ const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('completed');
504
+ const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('running');
505
+
506
+ await store.persistWorkflowSnapshot({
507
+ workflowName: workflowName1,
508
+ runId: runId1,
509
+ snapshot: workflow1,
510
+ });
511
+ await new Promise(resolve => setTimeout(resolve, 10)); // Small delay to ensure different timestamps
512
+ await store.persistWorkflowSnapshot({
513
+ workflowName: workflowName2,
514
+ runId: runId2,
515
+ snapshot: workflow2,
516
+ });
517
+
518
+ const { runs, total } = await store.__getWorkflowRuns();
519
+ expect(runs).toHaveLength(2);
520
+ expect(total).toBe(2);
521
+ expect(runs[0]!.workflowName).toBe(workflowName2); // Most recent first
522
+ expect(runs[1]!.workflowName).toBe(workflowName1);
523
+ const firstSnapshot = runs[0]!.snapshot as WorkflowRunState;
524
+ const secondSnapshot = runs[1]!.snapshot as WorkflowRunState;
525
+ expect(firstSnapshot.context?.steps[stepId2]?.status).toBe('running');
526
+ expect(secondSnapshot.context?.steps[stepId1]?.status).toBe('completed');
527
+ });
528
+
529
+ it('filters by workflow name', async () => {
530
+ const workflowName1 = 'filter_test_1';
531
+ const workflowName2 = 'filter_test_2';
532
+
533
+ const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('completed');
534
+ const { snapshot: workflow2, runId: runId2 } = createSampleWorkflowSnapshot('failed');
535
+
536
+ await store.persistWorkflowSnapshot({
537
+ workflowName: workflowName1,
538
+ runId: runId1,
539
+ snapshot: workflow1,
540
+ });
541
+ await new Promise(resolve => setTimeout(resolve, 10)); // Small delay to ensure different timestamps
542
+ await store.persistWorkflowSnapshot({
543
+ workflowName: workflowName2,
544
+ runId: runId2,
545
+ snapshot: workflow2,
546
+ });
547
+
548
+ const { runs, total } = await store.__getWorkflowRuns({
549
+ workflowName: workflowName1,
550
+ });
551
+ expect(runs).toHaveLength(1);
552
+ expect(total).toBe(1);
553
+ expect(runs[0]!.workflowName).toBe(workflowName1);
554
+ const snapshot = runs[0]!.snapshot as WorkflowRunState;
555
+ expect(snapshot.context?.steps[stepId1]?.status).toBe('completed');
556
+ });
557
+
558
+ it('filters by date range', async () => {
559
+ const now = new Date();
560
+ const yesterday = new Date(now.getTime() - 24 * 60 * 60 * 1000);
561
+ const twoDaysAgo = new Date(now.getTime() - 2 * 24 * 60 * 60 * 1000);
562
+ const workflowName1 = 'date_test_1';
563
+ const workflowName2 = 'date_test_2';
564
+ const workflowName3 = 'date_test_3';
565
+
566
+ const { snapshot: workflow1, runId: runId1 } = createSampleWorkflowSnapshot('completed');
567
+ const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('running');
568
+ const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('waiting');
569
+
570
+ await store.insert({
571
+ tableName: TABLE_WORKFLOW_SNAPSHOT,
572
+ record: {
573
+ workflow_name: workflowName1,
574
+ run_id: runId1,
575
+ snapshot: workflow1,
576
+ createdAt: twoDaysAgo,
577
+ updatedAt: twoDaysAgo,
578
+ },
579
+ });
580
+ await store.insert({
581
+ tableName: TABLE_WORKFLOW_SNAPSHOT,
582
+ record: {
583
+ workflow_name: workflowName2,
584
+ run_id: runId2,
585
+ snapshot: workflow2,
586
+ createdAt: yesterday,
587
+ updatedAt: yesterday,
588
+ },
589
+ });
590
+ await store.insert({
591
+ tableName: TABLE_WORKFLOW_SNAPSHOT,
592
+ record: {
593
+ workflow_name: workflowName3,
594
+ run_id: runId3,
595
+ snapshot: workflow3,
596
+ createdAt: now,
597
+ updatedAt: now,
598
+ },
599
+ });
600
+
601
+ const { runs } = await store.__getWorkflowRuns({
602
+ fromDate: yesterday,
603
+ toDate: now,
604
+ });
605
+
606
+ expect(runs).toHaveLength(2);
607
+ expect(runs[0]!.workflowName).toBe(workflowName3);
608
+ expect(runs[1]!.workflowName).toBe(workflowName2);
609
+ const firstSnapshot = runs[0]!.snapshot as WorkflowRunState;
610
+ const secondSnapshot = runs[1]!.snapshot as WorkflowRunState;
611
+ expect(firstSnapshot.context?.steps[stepId3]?.status).toBe('waiting');
612
+ expect(secondSnapshot.context?.steps[stepId2]?.status).toBe('running');
613
+ });
614
+
615
+ it('handles pagination', async () => {
616
+ const workflowName1 = 'page_test_1';
617
+ const workflowName2 = 'page_test_2';
618
+ const workflowName3 = 'page_test_3';
619
+
620
+ const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('completed');
621
+ const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('running');
622
+ const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('waiting');
623
+
624
+ await store.persistWorkflowSnapshot({
625
+ workflowName: workflowName1,
626
+ runId: runId1,
627
+ snapshot: workflow1,
628
+ });
629
+ await new Promise(resolve => setTimeout(resolve, 10)); // Small delay to ensure different timestamps
630
+ await store.persistWorkflowSnapshot({
631
+ workflowName: workflowName2,
632
+ runId: runId2,
633
+ snapshot: workflow2,
634
+ });
635
+ await new Promise(resolve => setTimeout(resolve, 10)); // Small delay to ensure different timestamps
636
+ await store.persistWorkflowSnapshot({
637
+ workflowName: workflowName3,
638
+ runId: runId3,
639
+ snapshot: workflow3,
640
+ });
641
+
642
+ // Get first page
643
+ const page1 = await store.__getWorkflowRuns({
644
+ limit: 2,
645
+ offset: 0,
646
+ });
647
+ expect(page1.runs).toHaveLength(2);
648
+ expect(page1.total).toBe(3); // Total count of all records
649
+ expect(page1.runs[0]!.workflowName).toBe(workflowName3);
650
+ expect(page1.runs[1]!.workflowName).toBe(workflowName2);
651
+ const firstSnapshot = page1.runs[0]!.snapshot as WorkflowRunState;
652
+ const secondSnapshot = page1.runs[1]!.snapshot as WorkflowRunState;
653
+ expect(firstSnapshot.context?.steps[stepId3]?.status).toBe('waiting');
654
+ expect(secondSnapshot.context?.steps[stepId2]?.status).toBe('running');
655
+
656
+ // Get second page
657
+ const page2 = await store.__getWorkflowRuns({
658
+ limit: 2,
659
+ offset: 2,
660
+ });
661
+ expect(page2.runs).toHaveLength(1);
662
+ expect(page2.total).toBe(3);
663
+ expect(page2.runs[0]!.workflowName).toBe(workflowName1);
664
+ const snapshot = page2.runs[0]!.snapshot as WorkflowRunState;
665
+ expect(snapshot.context?.steps[stepId1]?.status).toBe('completed');
666
+ }, 10e3);
667
+ });
668
+
388
669
  afterAll(async () => {
389
670
  await store.close();
390
671
  });