@mastra/clickhouse 0.3.1-alpha.2 → 0.3.1-alpha.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,23 +1,23 @@
1
1
 
2
- > @mastra/clickhouse@0.3.1-alpha.2 build /home/runner/work/mastra/mastra/stores/clickhouse
2
+ > @mastra/clickhouse@0.3.1-alpha.4 build /home/runner/work/mastra/mastra/stores/clickhouse
3
3
  > tsup src/index.ts --format esm,cjs --experimental-dts --clean --treeshake=smallest --splitting
4
4
 
5
5
  CLI Building entry: src/index.ts
6
6
  CLI Using tsconfig: tsconfig.json
7
7
  CLI tsup v8.4.0
8
8
  TSC Build start
9
- TSC ⚡️ Build success in 8640ms
9
+ TSC ⚡️ Build success in 10007ms
10
10
  DTS Build start
11
11
  CLI Target: es2022
12
- Analysis will use the bundled TypeScript version 5.8.2
12
+ Analysis will use the bundled TypeScript version 5.8.3
13
13
  Writing package typings: /home/runner/work/mastra/mastra/stores/clickhouse/dist/_tsup-dts-rollup.d.ts
14
- Analysis will use the bundled TypeScript version 5.8.2
14
+ Analysis will use the bundled TypeScript version 5.8.3
15
15
  Writing package typings: /home/runner/work/mastra/mastra/stores/clickhouse/dist/_tsup-dts-rollup.d.cts
16
- DTS ⚡️ Build success in 10115ms
16
+ DTS ⚡️ Build success in 12757ms
17
17
  CLI Cleaning output folder
18
18
  ESM Build start
19
19
  CJS Build start
20
- ESM dist/index.js 25.51 KB
21
- ESM ⚡️ Build success in 828ms
22
- CJS dist/index.cjs 25.72 KB
23
- CJS ⚡️ Build success in 829ms
20
+ CJS dist/index.cjs 27.76 KB
21
+ CJS ⚡️ Build success in 1093ms
22
+ ESM dist/index.js 27.51 KB
23
+ ESM ⚡️ Build success in 1093ms
package/CHANGELOG.md CHANGED
@@ -1,5 +1,21 @@
1
1
  # @mastra/clickhouse
2
2
 
3
+ ## 0.3.1-alpha.4
4
+
5
+ ### Patch Changes
6
+
7
+ - 479f490: [MASTRA-3131] Add getWorkflowRunByID and add resourceId as filter for getWorkflowRuns
8
+ - Updated dependencies [e4943b8]
9
+ - Updated dependencies [479f490]
10
+ - @mastra/core@0.9.1-alpha.4
11
+
12
+ ## 0.3.1-alpha.3
13
+
14
+ ### Patch Changes
15
+
16
+ - Updated dependencies [6262bd5]
17
+ - @mastra/core@0.9.1-alpha.3
18
+
3
19
  ## 0.3.1-alpha.2
4
20
 
5
21
  ### Patch Changes
@@ -7,6 +7,8 @@ import type { StorageGetMessagesArg } from '@mastra/core/storage';
7
7
  import type { StorageThreadType } from '@mastra/core/memory';
8
8
  import type { TABLE_NAMES } from '@mastra/core/storage';
9
9
  import { TABLE_SCHEMAS } from '@mastra/core/storage';
10
+ import type { WorkflowRun } from '@mastra/core/storage';
11
+ import type { WorkflowRuns } from '@mastra/core/storage';
10
12
  import type { WorkflowRunState } from '@mastra/core/workflows';
11
13
 
12
14
  declare type ClickhouseConfig = {
@@ -89,7 +91,7 @@ declare class ClickhouseStore extends MastraStorage {
89
91
  deleteThread({ threadId }: {
90
92
  threadId: string;
91
93
  }): Promise<void>;
92
- getMessages<T = unknown>({ threadId, selectBy }: StorageGetMessagesArg): Promise<T>;
94
+ getMessages<T = unknown>({ threadId, selectBy }: StorageGetMessagesArg): Promise<T[]>;
93
95
  saveMessages({ messages }: {
94
96
  messages: MessageType[];
95
97
  }): Promise<MessageType[]>;
@@ -102,22 +104,20 @@ declare class ClickhouseStore extends MastraStorage {
102
104
  workflowName: string;
103
105
  runId: string;
104
106
  }): Promise<WorkflowRunState | null>;
105
- getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, }?: {
107
+ private parseWorkflowRun;
108
+ getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, resourceId, }?: {
106
109
  workflowName?: string;
107
110
  fromDate?: Date;
108
111
  toDate?: Date;
109
112
  limit?: number;
110
113
  offset?: number;
111
- }): Promise<{
112
- runs: Array<{
113
- workflowName: string;
114
- runId: string;
115
- snapshot: WorkflowRunState | string;
116
- createdAt: Date;
117
- updatedAt: Date;
118
- }>;
119
- total: number;
120
- }>;
114
+ resourceId?: string;
115
+ }): Promise<WorkflowRuns>;
116
+ getWorkflowRunById({ runId, workflowName, }: {
117
+ runId: string;
118
+ workflowName?: string;
119
+ }): Promise<WorkflowRun | null>;
120
+ private hasColumn;
121
121
  close(): Promise<void>;
122
122
  }
123
123
  export { ClickhouseStore }
@@ -7,6 +7,8 @@ import type { StorageGetMessagesArg } from '@mastra/core/storage';
7
7
  import type { StorageThreadType } from '@mastra/core/memory';
8
8
  import type { TABLE_NAMES } from '@mastra/core/storage';
9
9
  import { TABLE_SCHEMAS } from '@mastra/core/storage';
10
+ import type { WorkflowRun } from '@mastra/core/storage';
11
+ import type { WorkflowRuns } from '@mastra/core/storage';
10
12
  import type { WorkflowRunState } from '@mastra/core/workflows';
11
13
 
12
14
  declare type ClickhouseConfig = {
@@ -89,7 +91,7 @@ declare class ClickhouseStore extends MastraStorage {
89
91
  deleteThread({ threadId }: {
90
92
  threadId: string;
91
93
  }): Promise<void>;
92
- getMessages<T = unknown>({ threadId, selectBy }: StorageGetMessagesArg): Promise<T>;
94
+ getMessages<T = unknown>({ threadId, selectBy }: StorageGetMessagesArg): Promise<T[]>;
93
95
  saveMessages({ messages }: {
94
96
  messages: MessageType[];
95
97
  }): Promise<MessageType[]>;
@@ -102,22 +104,20 @@ declare class ClickhouseStore extends MastraStorage {
102
104
  workflowName: string;
103
105
  runId: string;
104
106
  }): Promise<WorkflowRunState | null>;
105
- getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, }?: {
107
+ private parseWorkflowRun;
108
+ getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, resourceId, }?: {
106
109
  workflowName?: string;
107
110
  fromDate?: Date;
108
111
  toDate?: Date;
109
112
  limit?: number;
110
113
  offset?: number;
111
- }): Promise<{
112
- runs: Array<{
113
- workflowName: string;
114
- runId: string;
115
- snapshot: WorkflowRunState | string;
116
- createdAt: Date;
117
- updatedAt: Date;
118
- }>;
119
- total: number;
120
- }>;
114
+ resourceId?: string;
115
+ }): Promise<WorkflowRuns>;
116
+ getWorkflowRunById({ runId, workflowName, }: {
117
+ runId: string;
118
+ workflowName?: string;
119
+ }): Promise<WorkflowRun | null>;
120
+ private hasColumn;
121
121
  close(): Promise<void>;
122
122
  }
123
123
  export { ClickhouseStore }
package/dist/index.cjs CHANGED
@@ -696,12 +696,31 @@ var ClickhouseStore = class extends storage.MastraStorage {
696
696
  throw error;
697
697
  }
698
698
  }
699
+ parseWorkflowRun(row) {
700
+ let parsedSnapshot = row.snapshot;
701
+ if (typeof parsedSnapshot === "string") {
702
+ try {
703
+ parsedSnapshot = JSON.parse(row.snapshot);
704
+ } catch (e) {
705
+ console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
706
+ }
707
+ }
708
+ return {
709
+ workflowName: row.workflow_name,
710
+ runId: row.run_id,
711
+ snapshot: parsedSnapshot,
712
+ createdAt: new Date(row.createdAt),
713
+ updatedAt: new Date(row.updatedAt),
714
+ resourceId: row.resourceId
715
+ };
716
+ }
699
717
  async getWorkflowRuns({
700
718
  workflowName,
701
719
  fromDate,
702
720
  toDate,
703
721
  limit,
704
- offset
722
+ offset,
723
+ resourceId
705
724
  } = {}) {
706
725
  try {
707
726
  const conditions = [];
@@ -710,6 +729,15 @@ var ClickhouseStore = class extends storage.MastraStorage {
710
729
  conditions.push(`workflow_name = {var_workflow_name:String}`);
711
730
  values.var_workflow_name = workflowName;
712
731
  }
732
+ if (resourceId) {
733
+ const hasResourceId = await this.hasColumn(storage.TABLE_WORKFLOW_SNAPSHOT, "resourceId");
734
+ if (hasResourceId) {
735
+ conditions.push(`resourceId = {var_resourceId:String}`);
736
+ values.var_resourceId = resourceId;
737
+ } else {
738
+ console.warn(`[${storage.TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
739
+ }
740
+ }
713
741
  if (fromDate) {
714
742
  conditions.push(`createdAt >= {var_from_date:DateTime64(3)}`);
715
743
  values.var_from_date = fromDate.getTime() / 1e3;
@@ -738,7 +766,8 @@ var ClickhouseStore = class extends storage.MastraStorage {
738
766
  run_id,
739
767
  snapshot,
740
768
  toDateTime64(createdAt, 3) as createdAt,
741
- toDateTime64(updatedAt, 3) as updatedAt
769
+ toDateTime64(updatedAt, 3) as updatedAt,
770
+ resourceId
742
771
  FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[storage.TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""}
743
772
  ${whereClause}
744
773
  ORDER BY createdAt DESC
@@ -751,21 +780,7 @@ var ClickhouseStore = class extends storage.MastraStorage {
751
780
  const resultJson = await result.json();
752
781
  const rows = resultJson;
753
782
  const runs = rows.map((row) => {
754
- let parsedSnapshot = row.snapshot;
755
- if (typeof parsedSnapshot === "string") {
756
- try {
757
- parsedSnapshot = JSON.parse(row.snapshot);
758
- } catch (e) {
759
- console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
760
- }
761
- }
762
- return {
763
- workflowName: row.workflow_name,
764
- runId: row.run_id,
765
- snapshot: parsedSnapshot,
766
- createdAt: new Date(row.createdAt),
767
- updatedAt: new Date(row.updatedAt)
768
- };
783
+ return this.parseWorkflowRun(row);
769
784
  });
770
785
  return { runs, total: total || runs.length };
771
786
  } catch (error) {
@@ -773,6 +788,55 @@ var ClickhouseStore = class extends storage.MastraStorage {
773
788
  throw error;
774
789
  }
775
790
  }
791
+ async getWorkflowRunById({
792
+ runId,
793
+ workflowName
794
+ }) {
795
+ try {
796
+ const conditions = [];
797
+ const values = {};
798
+ if (runId) {
799
+ conditions.push(`run_id = {var_runId:String}`);
800
+ values.var_runId = runId;
801
+ }
802
+ if (workflowName) {
803
+ conditions.push(`workflow_name = {var_workflow_name:String}`);
804
+ values.var_workflow_name = workflowName;
805
+ }
806
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
807
+ const result = await this.db.query({
808
+ query: `
809
+ SELECT
810
+ workflow_name,
811
+ run_id,
812
+ snapshot,
813
+ toDateTime64(createdAt, 3) as createdAt,
814
+ toDateTime64(updatedAt, 3) as updatedAt,
815
+ resourceId
816
+ FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[storage.TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""}
817
+ ${whereClause}
818
+ `,
819
+ query_params: values,
820
+ format: "JSONEachRow"
821
+ });
822
+ const resultJson = await result.json();
823
+ if (!Array.isArray(resultJson) || resultJson.length === 0) {
824
+ return null;
825
+ }
826
+ return this.parseWorkflowRun(resultJson[0]);
827
+ } catch (error) {
828
+ console.error("Error getting workflow run by ID:", error);
829
+ throw error;
830
+ }
831
+ }
832
+ async hasColumn(table, column) {
833
+ const result = await this.db.query({
834
+ query: `DESCRIBE TABLE ${table}`,
835
+ format: "JSONEachRow"
836
+ });
837
+ const columns = await result.json();
838
+ return columns.some((c) => c.name === column);
839
+ }
776
840
  async close() {
777
841
  await this.db.close();
778
842
  }
package/dist/index.js CHANGED
@@ -694,12 +694,31 @@ var ClickhouseStore = class extends MastraStorage {
694
694
  throw error;
695
695
  }
696
696
  }
697
+ parseWorkflowRun(row) {
698
+ let parsedSnapshot = row.snapshot;
699
+ if (typeof parsedSnapshot === "string") {
700
+ try {
701
+ parsedSnapshot = JSON.parse(row.snapshot);
702
+ } catch (e) {
703
+ console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
704
+ }
705
+ }
706
+ return {
707
+ workflowName: row.workflow_name,
708
+ runId: row.run_id,
709
+ snapshot: parsedSnapshot,
710
+ createdAt: new Date(row.createdAt),
711
+ updatedAt: new Date(row.updatedAt),
712
+ resourceId: row.resourceId
713
+ };
714
+ }
697
715
  async getWorkflowRuns({
698
716
  workflowName,
699
717
  fromDate,
700
718
  toDate,
701
719
  limit,
702
- offset
720
+ offset,
721
+ resourceId
703
722
  } = {}) {
704
723
  try {
705
724
  const conditions = [];
@@ -708,6 +727,15 @@ var ClickhouseStore = class extends MastraStorage {
708
727
  conditions.push(`workflow_name = {var_workflow_name:String}`);
709
728
  values.var_workflow_name = workflowName;
710
729
  }
730
+ if (resourceId) {
731
+ const hasResourceId = await this.hasColumn(TABLE_WORKFLOW_SNAPSHOT, "resourceId");
732
+ if (hasResourceId) {
733
+ conditions.push(`resourceId = {var_resourceId:String}`);
734
+ values.var_resourceId = resourceId;
735
+ } else {
736
+ console.warn(`[${TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
737
+ }
738
+ }
711
739
  if (fromDate) {
712
740
  conditions.push(`createdAt >= {var_from_date:DateTime64(3)}`);
713
741
  values.var_from_date = fromDate.getTime() / 1e3;
@@ -736,7 +764,8 @@ var ClickhouseStore = class extends MastraStorage {
736
764
  run_id,
737
765
  snapshot,
738
766
  toDateTime64(createdAt, 3) as createdAt,
739
- toDateTime64(updatedAt, 3) as updatedAt
767
+ toDateTime64(updatedAt, 3) as updatedAt,
768
+ resourceId
740
769
  FROM ${TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""}
741
770
  ${whereClause}
742
771
  ORDER BY createdAt DESC
@@ -749,21 +778,7 @@ var ClickhouseStore = class extends MastraStorage {
749
778
  const resultJson = await result.json();
750
779
  const rows = resultJson;
751
780
  const runs = rows.map((row) => {
752
- let parsedSnapshot = row.snapshot;
753
- if (typeof parsedSnapshot === "string") {
754
- try {
755
- parsedSnapshot = JSON.parse(row.snapshot);
756
- } catch (e) {
757
- console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
758
- }
759
- }
760
- return {
761
- workflowName: row.workflow_name,
762
- runId: row.run_id,
763
- snapshot: parsedSnapshot,
764
- createdAt: new Date(row.createdAt),
765
- updatedAt: new Date(row.updatedAt)
766
- };
781
+ return this.parseWorkflowRun(row);
767
782
  });
768
783
  return { runs, total: total || runs.length };
769
784
  } catch (error) {
@@ -771,6 +786,55 @@ var ClickhouseStore = class extends MastraStorage {
771
786
  throw error;
772
787
  }
773
788
  }
789
+ async getWorkflowRunById({
790
+ runId,
791
+ workflowName
792
+ }) {
793
+ try {
794
+ const conditions = [];
795
+ const values = {};
796
+ if (runId) {
797
+ conditions.push(`run_id = {var_runId:String}`);
798
+ values.var_runId = runId;
799
+ }
800
+ if (workflowName) {
801
+ conditions.push(`workflow_name = {var_workflow_name:String}`);
802
+ values.var_workflow_name = workflowName;
803
+ }
804
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
805
+ const result = await this.db.query({
806
+ query: `
807
+ SELECT
808
+ workflow_name,
809
+ run_id,
810
+ snapshot,
811
+ toDateTime64(createdAt, 3) as createdAt,
812
+ toDateTime64(updatedAt, 3) as updatedAt,
813
+ resourceId
814
+ FROM ${TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""}
815
+ ${whereClause}
816
+ `,
817
+ query_params: values,
818
+ format: "JSONEachRow"
819
+ });
820
+ const resultJson = await result.json();
821
+ if (!Array.isArray(resultJson) || resultJson.length === 0) {
822
+ return null;
823
+ }
824
+ return this.parseWorkflowRun(resultJson[0]);
825
+ } catch (error) {
826
+ console.error("Error getting workflow run by ID:", error);
827
+ throw error;
828
+ }
829
+ }
830
+ async hasColumn(table, column) {
831
+ const result = await this.db.query({
832
+ query: `DESCRIBE TABLE ${table}`,
833
+ format: "JSONEachRow"
834
+ });
835
+ const columns = await result.json();
836
+ return columns.some((c) => c.name === column);
837
+ }
774
838
  async close() {
775
839
  await this.db.close();
776
840
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@mastra/clickhouse",
3
- "version": "0.3.1-alpha.2",
3
+ "version": "0.3.1-alpha.4",
4
4
  "description": "Clickhouse provider for Mastra - includes db storage capabilities",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
@@ -21,15 +21,15 @@
21
21
  "license": "MIT",
22
22
  "dependencies": {
23
23
  "@clickhouse/client": "^1.11.0",
24
- "@mastra/core": "^0.9.1-alpha.2"
24
+ "@mastra/core": "^0.9.1-alpha.4"
25
25
  },
26
26
  "devDependencies": {
27
- "@microsoft/api-extractor": "^7.52.1",
27
+ "@microsoft/api-extractor": "^7.52.5",
28
28
  "@types/node": "^20.17.27",
29
29
  "eslint": "^9.23.0",
30
30
  "tsup": "^8.4.0",
31
31
  "typescript": "^5.8.2",
32
- "vitest": "^3.0.9",
32
+ "vitest": "^3.1.2",
33
33
  "@internal/lint": "0.0.2"
34
34
  },
35
35
  "scripts": {
@@ -1,11 +1,14 @@
1
1
  import { randomUUID } from 'crypto';
2
+ import type { WorkflowRunState } from '@mastra/core';
3
+ import type { MessageType } from '@mastra/core/memory';
2
4
  import { TABLE_THREADS, TABLE_MESSAGES, TABLE_WORKFLOW_SNAPSHOT } from '@mastra/core/storage';
3
- import type { WorkflowRunState } from '@mastra/core/workflows';
4
- import { describe, it, expect, beforeAll, beforeEach, afterAll } from 'vitest';
5
+ import { describe, it, expect, beforeAll, beforeEach, afterAll, vi, afterEach } from 'vitest';
5
6
 
6
7
  import { ClickhouseStore } from '.';
7
8
  import type { ClickhouseConfig } from '.';
8
9
 
10
+ vi.setConfig({ testTimeout: 60_000, hookTimeout: 60_000 });
11
+
9
12
  const TEST_CONFIG: ClickhouseConfig = {
10
13
  url: process.env.CLICKHOUSE_URL || 'http://localhost:8123',
11
14
  username: process.env.CLICKHOUSE_USERNAME || 'default',
@@ -32,15 +35,15 @@ const createSampleThread = () => ({
32
35
  metadata: { key: 'value' },
33
36
  });
34
37
 
35
- const createSampleMessage = (threadId: string, createdAt: Date = new Date()) =>
36
- ({
37
- id: `msg-${randomUUID()}`,
38
- role: 'user',
39
- type: 'text',
40
- threadId,
41
- content: [{ type: 'text', text: 'Hello' }],
42
- createdAt,
43
- }) as any;
38
+ const createSampleMessage = (threadId: string, createdAt: Date = new Date()): MessageType => ({
39
+ id: `msg-${randomUUID()}`,
40
+ resourceId: `resource-${randomUUID()}`,
41
+ role: 'user',
42
+ type: 'text',
43
+ threadId,
44
+ content: [{ type: 'text', text: 'Hello' }] as MessageType['content'],
45
+ createdAt,
46
+ });
44
47
 
45
48
  const createSampleTrace = () => ({
46
49
  id: `trace-${randomUUID()}`,
@@ -57,7 +60,10 @@ const createSampleEval = () => ({
57
60
  createdAt: new Date(),
58
61
  });
59
62
 
60
- const createSampleWorkflowSnapshot = (status: string, createdAt?: Date) => {
63
+ const createSampleWorkflowSnapshot = (
64
+ status: WorkflowRunState['context']['steps'][string]['status'],
65
+ createdAt?: Date,
66
+ ) => {
61
67
  const runId = `run-${randomUUID()}`;
62
68
  const stepId = `step-${randomUUID()}`;
63
69
  const timestamp = createdAt || new Date();
@@ -76,12 +82,20 @@ const createSampleWorkflowSnapshot = (status: string, createdAt?: Date) => {
76
82
  attempts: {},
77
83
  },
78
84
  activePaths: [],
85
+ suspendedPaths: {},
79
86
  runId,
80
87
  timestamp: timestamp.getTime(),
81
- } as WorkflowRunState;
88
+ };
82
89
  return { snapshot, runId, stepId };
83
90
  };
84
91
 
92
+ const checkWorkflowSnapshot = (snapshot: WorkflowRunState | string, stepId: string, status: string) => {
93
+ if (typeof snapshot === 'string') {
94
+ throw new Error('Expected WorkflowRunState, got string');
95
+ }
96
+ expect(snapshot.context?.steps[stepId]?.status).toBe(status);
97
+ };
98
+
85
99
  describe('ClickhouseStore', () => {
86
100
  let store: ClickhouseStore;
87
101
 
@@ -185,7 +199,11 @@ describe('ClickhouseStore', () => {
185
199
  // Retrieve messages
186
200
  const retrievedMessages = await store.getMessages({ threadId: thread.id });
187
201
  expect(retrievedMessages).toHaveLength(2);
188
- expect(retrievedMessages).toEqual(expect.arrayContaining(messages));
202
+ const checkMessages = messages.map(m => {
203
+ const { resourceId, ...rest } = m;
204
+ return rest;
205
+ });
206
+ expect(retrievedMessages).toEqual(expect.arrayContaining(checkMessages));
189
207
  }, 10e3);
190
208
 
191
209
  it('should handle empty message array', async () => {
@@ -197,7 +215,7 @@ describe('ClickhouseStore', () => {
197
215
  const thread = createSampleThread();
198
216
  await store.saveThread({ thread });
199
217
 
200
- const messages = [
218
+ const messages: MessageType[] = [
201
219
  {
202
220
  ...createSampleMessage(thread.id, new Date(Date.now() - 1000 * 3)),
203
221
  content: [{ type: 'text', text: 'First' }],
@@ -214,11 +232,12 @@ describe('ClickhouseStore', () => {
214
232
 
215
233
  await store.saveMessages({ messages });
216
234
 
217
- const retrievedMessages = await store.getMessages({ threadId: thread.id });
235
+ const retrievedMessages = await store.getMessages<MessageType>({ threadId: thread.id });
218
236
  expect(retrievedMessages).toHaveLength(3);
219
237
 
220
238
  // Verify order is maintained
221
239
  retrievedMessages.forEach((msg, idx) => {
240
+ // @ts-expect-error
222
241
  expect(msg.content[0].text).toBe(messages[idx].content[0].text);
223
242
  });
224
243
  }, 10e3);
@@ -352,11 +371,17 @@ describe('ClickhouseStore', () => {
352
371
  const snapshot = {
353
372
  status: 'running',
354
373
  context: {
374
+ steps: {},
355
375
  stepResults: {},
356
376
  attempts: {},
357
377
  triggerData: { type: 'manual' },
358
378
  },
359
- } as any;
379
+ value: {},
380
+ activePaths: [],
381
+ suspendedPaths: {},
382
+ runId,
383
+ timestamp: new Date().getTime(),
384
+ };
360
385
 
361
386
  await store.persistWorkflowSnapshot({
362
387
  workflowName,
@@ -387,28 +412,40 @@ describe('ClickhouseStore', () => {
387
412
  const initialSnapshot = {
388
413
  status: 'running',
389
414
  context: {
415
+ steps: {},
390
416
  stepResults: {},
391
417
  attempts: {},
392
418
  triggerData: { type: 'manual' },
393
419
  },
420
+ value: {},
421
+ activePaths: [],
422
+ suspendedPaths: {},
423
+ runId,
424
+ timestamp: new Date().getTime(),
394
425
  };
395
426
 
396
427
  await store.persistWorkflowSnapshot({
397
428
  workflowName,
398
429
  runId,
399
- snapshot: initialSnapshot as any,
430
+ snapshot: initialSnapshot,
400
431
  });
401
432
 
402
433
  const updatedSnapshot = {
403
434
  status: 'completed',
404
435
  context: {
436
+ steps: {},
405
437
  stepResults: {
406
438
  'step-1': { status: 'success', result: { data: 'test' } },
407
439
  },
408
440
  attempts: { 'step-1': 1 },
409
441
  triggerData: { type: 'manual' },
410
442
  },
411
- } as any;
443
+ value: {},
444
+ activePaths: [],
445
+ suspendedPaths: {},
446
+ runId,
447
+ timestamp: new Date().getTime(),
448
+ };
412
449
 
413
450
  await store.persistWorkflowSnapshot({
414
451
  workflowName,
@@ -446,6 +483,7 @@ describe('ClickhouseStore', () => {
446
483
  dependencies: ['step-3', 'step-4'],
447
484
  },
448
485
  },
486
+ steps: {},
449
487
  attempts: { 'step-1': 1, 'step-2': 0 },
450
488
  triggerData: {
451
489
  type: 'scheduled',
@@ -467,6 +505,7 @@ describe('ClickhouseStore', () => {
467
505
  status: 'waiting',
468
506
  },
469
507
  ],
508
+ suspendedPaths: {},
470
509
  runId: runId,
471
510
  timestamp: Date.now(),
472
511
  };
@@ -474,7 +513,7 @@ describe('ClickhouseStore', () => {
474
513
  await store.persistWorkflowSnapshot({
475
514
  workflowName,
476
515
  runId,
477
- snapshot: complexSnapshot as WorkflowRunState,
516
+ snapshot: complexSnapshot,
478
517
  });
479
518
 
480
519
  const loadedSnapshot = await store.loadWorkflowSnapshot({
@@ -500,8 +539,8 @@ describe('ClickhouseStore', () => {
500
539
  const workflowName1 = 'default_test_1';
501
540
  const workflowName2 = 'default_test_2';
502
541
 
503
- const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('completed');
504
- const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('running');
542
+ const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
543
+ const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('waiting');
505
544
 
506
545
  await store.persistWorkflowSnapshot({
507
546
  workflowName: workflowName1,
@@ -520,17 +559,17 @@ describe('ClickhouseStore', () => {
520
559
  expect(total).toBe(2);
521
560
  expect(runs[0]!.workflowName).toBe(workflowName2); // Most recent first
522
561
  expect(runs[1]!.workflowName).toBe(workflowName1);
523
- const firstSnapshot = runs[0]!.snapshot as WorkflowRunState;
524
- const secondSnapshot = runs[1]!.snapshot as WorkflowRunState;
525
- expect(firstSnapshot.context?.steps[stepId2]?.status).toBe('running');
526
- expect(secondSnapshot.context?.steps[stepId1]?.status).toBe('completed');
562
+ const firstSnapshot = runs[0]!.snapshot;
563
+ const secondSnapshot = runs[1]!.snapshot;
564
+ checkWorkflowSnapshot(firstSnapshot, stepId2, 'waiting');
565
+ checkWorkflowSnapshot(secondSnapshot, stepId1, 'success');
527
566
  });
528
567
 
529
568
  it('filters by workflow name', async () => {
530
569
  const workflowName1 = 'filter_test_1';
531
570
  const workflowName2 = 'filter_test_2';
532
571
 
533
- const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('completed');
572
+ const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
534
573
  const { snapshot: workflow2, runId: runId2 } = createSampleWorkflowSnapshot('failed');
535
574
 
536
575
  await store.persistWorkflowSnapshot({
@@ -551,8 +590,8 @@ describe('ClickhouseStore', () => {
551
590
  expect(runs).toHaveLength(1);
552
591
  expect(total).toBe(1);
553
592
  expect(runs[0]!.workflowName).toBe(workflowName1);
554
- const snapshot = runs[0]!.snapshot as WorkflowRunState;
555
- expect(snapshot.context?.steps[stepId1]?.status).toBe('completed');
593
+ const snapshot = runs[0]!.snapshot;
594
+ checkWorkflowSnapshot(snapshot, stepId1, 'success');
556
595
  });
557
596
 
558
597
  it('filters by date range', async () => {
@@ -563,9 +602,9 @@ describe('ClickhouseStore', () => {
563
602
  const workflowName2 = 'date_test_2';
564
603
  const workflowName3 = 'date_test_3';
565
604
 
566
- const { snapshot: workflow1, runId: runId1 } = createSampleWorkflowSnapshot('completed');
567
- const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('running');
568
- const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('waiting');
605
+ const { snapshot: workflow1, runId: runId1 } = createSampleWorkflowSnapshot('success');
606
+ const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('waiting');
607
+ const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('skipped');
569
608
 
570
609
  await store.insert({
571
610
  tableName: TABLE_WORKFLOW_SNAPSHOT,
@@ -606,10 +645,10 @@ describe('ClickhouseStore', () => {
606
645
  expect(runs).toHaveLength(2);
607
646
  expect(runs[0]!.workflowName).toBe(workflowName3);
608
647
  expect(runs[1]!.workflowName).toBe(workflowName2);
609
- const firstSnapshot = runs[0]!.snapshot as WorkflowRunState;
610
- const secondSnapshot = runs[1]!.snapshot as WorkflowRunState;
611
- expect(firstSnapshot.context?.steps[stepId3]?.status).toBe('waiting');
612
- expect(secondSnapshot.context?.steps[stepId2]?.status).toBe('running');
648
+ const firstSnapshot = runs[0]!.snapshot;
649
+ const secondSnapshot = runs[1]!.snapshot;
650
+ checkWorkflowSnapshot(firstSnapshot, stepId3, 'skipped');
651
+ checkWorkflowSnapshot(secondSnapshot, stepId2, 'waiting');
613
652
  });
614
653
 
615
654
  it('handles pagination', async () => {
@@ -617,9 +656,9 @@ describe('ClickhouseStore', () => {
617
656
  const workflowName2 = 'page_test_2';
618
657
  const workflowName3 = 'page_test_3';
619
658
 
620
- const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('completed');
621
- const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('running');
622
- const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('waiting');
659
+ const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
660
+ const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('waiting');
661
+ const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('skipped');
623
662
 
624
663
  await store.persistWorkflowSnapshot({
625
664
  workflowName: workflowName1,
@@ -648,10 +687,10 @@ describe('ClickhouseStore', () => {
648
687
  expect(page1.total).toBe(3); // Total count of all records
649
688
  expect(page1.runs[0]!.workflowName).toBe(workflowName3);
650
689
  expect(page1.runs[1]!.workflowName).toBe(workflowName2);
651
- const firstSnapshot = page1.runs[0]!.snapshot as WorkflowRunState;
652
- const secondSnapshot = page1.runs[1]!.snapshot as WorkflowRunState;
653
- expect(firstSnapshot.context?.steps[stepId3]?.status).toBe('waiting');
654
- expect(secondSnapshot.context?.steps[stepId2]?.status).toBe('running');
690
+ const firstSnapshot = page1.runs[0]!.snapshot;
691
+ const secondSnapshot = page1.runs[1]!.snapshot;
692
+ checkWorkflowSnapshot(firstSnapshot, stepId3, 'skipped');
693
+ checkWorkflowSnapshot(secondSnapshot, stepId2, 'waiting');
655
694
 
656
695
  // Get second page
657
696
  const page2 = await store.getWorkflowRuns({
@@ -661,10 +700,155 @@ describe('ClickhouseStore', () => {
661
700
  expect(page2.runs).toHaveLength(1);
662
701
  expect(page2.total).toBe(3);
663
702
  expect(page2.runs[0]!.workflowName).toBe(workflowName1);
664
- const snapshot = page2.runs[0]!.snapshot as WorkflowRunState;
665
- expect(snapshot.context?.steps[stepId1]?.status).toBe('completed');
703
+ const snapshot = page2.runs[0]!.snapshot!;
704
+ checkWorkflowSnapshot(snapshot, stepId1, 'success');
666
705
  }, 10e3);
667
706
  });
707
+ describe('getWorkflowRunById', () => {
708
+ const workflowName = 'workflow-id-test';
709
+ let runId: string;
710
+ let stepId: string;
711
+
712
+ beforeEach(async () => {
713
+ // Insert a workflow run for positive test
714
+ const sample = createSampleWorkflowSnapshot('success');
715
+ runId = sample.runId;
716
+ stepId = sample.stepId;
717
+ await store.insert({
718
+ tableName: TABLE_WORKFLOW_SNAPSHOT,
719
+ record: {
720
+ workflow_name: workflowName,
721
+ run_id: runId,
722
+ resourceId: 'resource-abc',
723
+ snapshot: sample.snapshot,
724
+ createdAt: new Date(),
725
+ updatedAt: new Date(),
726
+ },
727
+ });
728
+ });
729
+
730
+ it('should retrieve a workflow run by ID', async () => {
731
+ const found = await store.getWorkflowRunById({
732
+ runId,
733
+ workflowName,
734
+ });
735
+ expect(found).not.toBeNull();
736
+ expect(found?.runId).toBe(runId);
737
+ checkWorkflowSnapshot(found?.snapshot!, stepId, 'success');
738
+ });
739
+
740
+ it('should return null for non-existent workflow run ID', async () => {
741
+ const notFound = await store.getWorkflowRunById({
742
+ runId: 'non-existent-id',
743
+ workflowName,
744
+ });
745
+ expect(notFound).toBeNull();
746
+ });
747
+ });
748
+ describe('getWorkflowRuns with resourceId', () => {
749
+ const workflowName = 'workflow-id-test';
750
+ let resourceId: string;
751
+ let runIds: string[] = [];
752
+
753
+ beforeEach(async () => {
754
+ // Insert multiple workflow runs for the same resourceId
755
+ resourceId = 'resource-shared';
756
+ for (const status of ['completed', 'running']) {
757
+ const sample = createSampleWorkflowSnapshot(status as WorkflowRunState['context']['steps'][string]['status']);
758
+ runIds.push(sample.runId);
759
+ await store.insert({
760
+ tableName: TABLE_WORKFLOW_SNAPSHOT,
761
+ record: {
762
+ workflow_name: workflowName,
763
+ run_id: sample.runId,
764
+ resourceId,
765
+ snapshot: sample.snapshot,
766
+ createdAt: new Date(),
767
+ updatedAt: new Date(),
768
+ },
769
+ });
770
+ }
771
+ // Insert a run with a different resourceId
772
+ const other = createSampleWorkflowSnapshot('waiting');
773
+ await store.insert({
774
+ tableName: TABLE_WORKFLOW_SNAPSHOT,
775
+ record: {
776
+ workflow_name: workflowName,
777
+ run_id: other.runId,
778
+ resourceId: 'resource-other',
779
+ snapshot: other.snapshot,
780
+ createdAt: new Date(),
781
+ updatedAt: new Date(),
782
+ },
783
+ });
784
+ });
785
+
786
+ it('should retrieve all workflow runs by resourceId', async () => {
787
+ const { runs } = await store.getWorkflowRuns({
788
+ resourceId,
789
+ workflowName,
790
+ });
791
+ expect(Array.isArray(runs)).toBe(true);
792
+ expect(runs.length).toBeGreaterThanOrEqual(2);
793
+ for (const run of runs) {
794
+ expect(run.resourceId).toBe(resourceId);
795
+ }
796
+ });
797
+
798
+ it('should return an empty array if no workflow runs match resourceId', async () => {
799
+ const { runs } = await store.getWorkflowRuns({
800
+ resourceId: 'non-existent-resource',
801
+ workflowName,
802
+ });
803
+ expect(Array.isArray(runs)).toBe(true);
804
+ expect(runs.length).toBe(0);
805
+ });
806
+ });
807
+
808
+ describe('hasColumn', () => {
809
+ const tempTable = 'temp_test_table';
810
+
811
+ beforeEach(async () => {
812
+ // Always try to drop the table before each test, ignore errors if it doesn't exist
813
+ try {
814
+ await store['db'].query({ query: `DROP TABLE IF EXISTS ${tempTable}` });
815
+ } catch {
816
+ /* ignore */
817
+ }
818
+ });
819
+
820
+ it('returns true if the column exists', async () => {
821
+ await store['db'].query({
822
+ query: `CREATE TABLE temp_test_table (
823
+ id UInt64,
824
+ resourceId String
825
+ ) ENGINE = MergeTree()
826
+ ORDER BY id
827
+ `,
828
+ });
829
+ expect(await store['hasColumn'](tempTable, 'resourceId')).toBe(true);
830
+ });
831
+
832
+ it('returns false if the column does not exist', async () => {
833
+ await store['db'].query({
834
+ query: `CREATE TABLE temp_test_table (
835
+ id UInt64,
836
+ ) ENGINE = MergeTree()
837
+ ORDER BY id
838
+ `,
839
+ });
840
+ expect(await store['hasColumn'](tempTable, 'resourceId')).toBe(false);
841
+ });
842
+
843
+ afterEach(async () => {
844
+ // Clean up after each test
845
+ try {
846
+ await store['db'].query({ query: `DROP TABLE IF EXISTS ${tempTable}` });
847
+ } catch {
848
+ /* ignore */
849
+ }
850
+ });
851
+ });
668
852
 
669
853
  afterAll(async () => {
670
854
  await store.close();
@@ -11,7 +11,14 @@ import {
11
11
  TABLE_TRACES,
12
12
  TABLE_WORKFLOW_SNAPSHOT,
13
13
  } from '@mastra/core/storage';
14
- import type { EvalRow, StorageColumn, StorageGetMessagesArg, TABLE_NAMES } from '@mastra/core/storage';
14
+ import type {
15
+ EvalRow,
16
+ StorageColumn,
17
+ StorageGetMessagesArg,
18
+ TABLE_NAMES,
19
+ WorkflowRun,
20
+ WorkflowRuns,
21
+ } from '@mastra/core/storage';
15
22
  import type { WorkflowRunState } from '@mastra/core/workflows';
16
23
 
17
24
  function safelyParseJSON(jsonString: string): any {
@@ -627,7 +634,7 @@ export class ClickhouseStore extends MastraStorage {
627
634
  }
628
635
  }
629
636
 
630
- async getMessages<T = unknown>({ threadId, selectBy }: StorageGetMessagesArg): Promise<T> {
637
+ async getMessages<T = unknown>({ threadId, selectBy }: StorageGetMessagesArg): Promise<T[]> {
631
638
  try {
632
639
  const messages: any[] = [];
633
640
  const limit = typeof selectBy?.last === `number` ? selectBy.last : 40;
@@ -734,7 +741,7 @@ export class ClickhouseStore extends MastraStorage {
734
741
  }
735
742
  });
736
743
 
737
- return messages as T;
744
+ return messages as T[];
738
745
  } catch (error) {
739
746
  console.error('Error getting messages:', error);
740
747
  throw error;
@@ -856,28 +863,42 @@ export class ClickhouseStore extends MastraStorage {
856
863
  }
857
864
  }
858
865
 
866
+ private parseWorkflowRun(row: any): WorkflowRun {
867
+ let parsedSnapshot: WorkflowRunState | string = row.snapshot as string;
868
+ if (typeof parsedSnapshot === 'string') {
869
+ try {
870
+ parsedSnapshot = JSON.parse(row.snapshot as string) as WorkflowRunState;
871
+ } catch (e) {
872
+ // If parsing fails, return the raw snapshot string
873
+ console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
874
+ }
875
+ }
876
+
877
+ return {
878
+ workflowName: row.workflow_name,
879
+ runId: row.run_id,
880
+ snapshot: parsedSnapshot,
881
+ createdAt: new Date(row.createdAt),
882
+ updatedAt: new Date(row.updatedAt),
883
+ resourceId: row.resourceId,
884
+ };
885
+ }
886
+
859
887
  async getWorkflowRuns({
860
888
  workflowName,
861
889
  fromDate,
862
890
  toDate,
863
891
  limit,
864
892
  offset,
893
+ resourceId,
865
894
  }: {
866
895
  workflowName?: string;
867
896
  fromDate?: Date;
868
897
  toDate?: Date;
869
898
  limit?: number;
870
899
  offset?: number;
871
- } = {}): Promise<{
872
- runs: Array<{
873
- workflowName: string;
874
- runId: string;
875
- snapshot: WorkflowRunState | string;
876
- createdAt: Date;
877
- updatedAt: Date;
878
- }>;
879
- total: number;
880
- }> {
900
+ resourceId?: string;
901
+ } = {}): Promise<WorkflowRuns> {
881
902
  try {
882
903
  const conditions: string[] = [];
883
904
  const values: Record<string, any> = {};
@@ -887,6 +908,16 @@ export class ClickhouseStore extends MastraStorage {
887
908
  values.var_workflow_name = workflowName;
888
909
  }
889
910
 
911
+ if (resourceId) {
912
+ const hasResourceId = await this.hasColumn(TABLE_WORKFLOW_SNAPSHOT, 'resourceId');
913
+ if (hasResourceId) {
914
+ conditions.push(`resourceId = {var_resourceId:String}`);
915
+ values.var_resourceId = resourceId;
916
+ } else {
917
+ console.warn(`[${TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
918
+ }
919
+ }
920
+
890
921
  if (fromDate) {
891
922
  conditions.push(`createdAt >= {var_from_date:DateTime64(3)}`);
892
923
  values.var_from_date = fromDate.getTime() / 1000; // Convert to Unix timestamp
@@ -921,7 +952,8 @@ export class ClickhouseStore extends MastraStorage {
921
952
  run_id,
922
953
  snapshot,
923
954
  toDateTime64(createdAt, 3) as createdAt,
924
- toDateTime64(updatedAt, 3) as updatedAt
955
+ toDateTime64(updatedAt, 3) as updatedAt,
956
+ resourceId
925
957
  FROM ${TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[TABLE_WORKFLOW_SNAPSHOT].startsWith('ReplacingMergeTree') ? 'FINAL' : ''}
926
958
  ${whereClause}
927
959
  ORDER BY createdAt DESC
@@ -935,23 +967,7 @@ export class ClickhouseStore extends MastraStorage {
935
967
  const resultJson = await result.json();
936
968
  const rows = resultJson as any[];
937
969
  const runs = rows.map(row => {
938
- let parsedSnapshot: WorkflowRunState | string = row.snapshot;
939
- if (typeof parsedSnapshot === 'string') {
940
- try {
941
- parsedSnapshot = JSON.parse(row.snapshot) as WorkflowRunState;
942
- } catch (e) {
943
- // If parsing fails, return the raw snapshot string
944
- console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
945
- }
946
- }
947
-
948
- return {
949
- workflowName: row.workflow_name,
950
- runId: row.run_id,
951
- snapshot: parsedSnapshot,
952
- createdAt: new Date(row.createdAt),
953
- updatedAt: new Date(row.updatedAt),
954
- };
970
+ return this.parseWorkflowRun(row);
955
971
  });
956
972
 
957
973
  // Use runs.length as total when not paginating
@@ -962,6 +978,66 @@ export class ClickhouseStore extends MastraStorage {
962
978
  }
963
979
  }
964
980
 
981
+ async getWorkflowRunById({
982
+ runId,
983
+ workflowName,
984
+ }: {
985
+ runId: string;
986
+ workflowName?: string;
987
+ }): Promise<WorkflowRun | null> {
988
+ try {
989
+ const conditions: string[] = [];
990
+ const values: Record<string, any> = {};
991
+
992
+ if (runId) {
993
+ conditions.push(`run_id = {var_runId:String}`);
994
+ values.var_runId = runId;
995
+ }
996
+
997
+ if (workflowName) {
998
+ conditions.push(`workflow_name = {var_workflow_name:String}`);
999
+ values.var_workflow_name = workflowName;
1000
+ }
1001
+
1002
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
1003
+
1004
+ // Get results
1005
+ const result = await this.db.query({
1006
+ query: `
1007
+ SELECT
1008
+ workflow_name,
1009
+ run_id,
1010
+ snapshot,
1011
+ toDateTime64(createdAt, 3) as createdAt,
1012
+ toDateTime64(updatedAt, 3) as updatedAt,
1013
+ resourceId
1014
+ FROM ${TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[TABLE_WORKFLOW_SNAPSHOT].startsWith('ReplacingMergeTree') ? 'FINAL' : ''}
1015
+ ${whereClause}
1016
+ `,
1017
+ query_params: values,
1018
+ format: 'JSONEachRow',
1019
+ });
1020
+
1021
+ const resultJson = await result.json();
1022
+ if (!Array.isArray(resultJson) || resultJson.length === 0) {
1023
+ return null;
1024
+ }
1025
+ return this.parseWorkflowRun(resultJson[0]);
1026
+ } catch (error) {
1027
+ console.error('Error getting workflow run by ID:', error);
1028
+ throw error;
1029
+ }
1030
+ }
1031
+
1032
+ private async hasColumn(table: string, column: string): Promise<boolean> {
1033
+ const result = await this.db.query({
1034
+ query: `DESCRIBE TABLE ${table}`,
1035
+ format: 'JSONEachRow',
1036
+ });
1037
+ const columns = (await result.json()) as { name: string }[];
1038
+ return columns.some(c => c.name === column);
1039
+ }
1040
+
965
1041
  async close(): Promise<void> {
966
1042
  await this.db.close();
967
1043
  }