@mastra/clickhouse 0.0.0-switch-to-core-20250424015131 → 0.0.0-vnext-inngest-20250506123700
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +155 -2
- package/dist/_tsup-dts-rollup.d.cts +15 -13
- package/dist/_tsup-dts-rollup.d.ts +15 -13
- package/dist/index.cjs +92 -18
- package/dist/index.js +92 -18
- package/package.json +5 -5
- package/src/storage/index.test.ts +229 -45
- package/src/storage/index.ts +121 -31
package/CHANGELOG.md
CHANGED
|
@@ -1,15 +1,168 @@
|
|
|
1
1
|
# @mastra/clickhouse
|
|
2
2
|
|
|
3
|
-
## 0.0.0-
|
|
3
|
+
## 0.0.0-vnext-inngest-20250506123700
|
|
4
4
|
|
|
5
5
|
### Patch Changes
|
|
6
6
|
|
|
7
|
+
- 4155f47: Add parameters to filter workflow runs
|
|
8
|
+
Add fromDate and toDate to telemetry parameters
|
|
9
|
+
- Updated dependencies [6052aa6]
|
|
10
|
+
- Updated dependencies [967b41c]
|
|
11
|
+
- Updated dependencies [3d2fb5c]
|
|
12
|
+
- Updated dependencies [26738f4]
|
|
13
|
+
- Updated dependencies [4155f47]
|
|
14
|
+
- Updated dependencies [7eeb2bc]
|
|
15
|
+
- Updated dependencies [b804723]
|
|
16
|
+
- Updated dependencies [8607972]
|
|
17
|
+
- Updated dependencies [ccef9f9]
|
|
18
|
+
- Updated dependencies [0097d50]
|
|
19
|
+
- Updated dependencies [7eeb2bc]
|
|
20
|
+
- Updated dependencies [17826a9]
|
|
21
|
+
- Updated dependencies [fba031f]
|
|
22
|
+
- Updated dependencies [51e6923]
|
|
23
|
+
- @mastra/core@0.0.0-vnext-inngest-20250506123700
|
|
24
|
+
|
|
25
|
+
## 0.3.2-alpha.5
|
|
26
|
+
|
|
27
|
+
### Patch Changes
|
|
28
|
+
|
|
29
|
+
- Updated dependencies [3d2fb5c]
|
|
30
|
+
- Updated dependencies [7eeb2bc]
|
|
31
|
+
- Updated dependencies [8607972]
|
|
32
|
+
- Updated dependencies [7eeb2bc]
|
|
33
|
+
- Updated dependencies [fba031f]
|
|
34
|
+
- @mastra/core@0.9.2-alpha.5
|
|
35
|
+
|
|
36
|
+
## 0.3.2-alpha.4
|
|
37
|
+
|
|
38
|
+
### Patch Changes
|
|
39
|
+
|
|
40
|
+
- Updated dependencies [ccef9f9]
|
|
41
|
+
- Updated dependencies [51e6923]
|
|
42
|
+
- @mastra/core@0.9.2-alpha.4
|
|
43
|
+
|
|
44
|
+
## 0.3.2-alpha.3
|
|
45
|
+
|
|
46
|
+
### Patch Changes
|
|
47
|
+
|
|
48
|
+
- 4155f47: Add parameters to filter workflow runs
|
|
49
|
+
Add fromDate and toDate to telemetry parameters
|
|
50
|
+
- Updated dependencies [967b41c]
|
|
51
|
+
- Updated dependencies [4155f47]
|
|
52
|
+
- Updated dependencies [17826a9]
|
|
53
|
+
- @mastra/core@0.9.2-alpha.3
|
|
54
|
+
|
|
55
|
+
## 0.3.2-alpha.2
|
|
56
|
+
|
|
57
|
+
### Patch Changes
|
|
58
|
+
|
|
59
|
+
- Updated dependencies [26738f4]
|
|
60
|
+
- @mastra/core@0.9.2-alpha.2
|
|
61
|
+
|
|
62
|
+
## 0.3.2-alpha.1
|
|
63
|
+
|
|
64
|
+
### Patch Changes
|
|
65
|
+
|
|
66
|
+
- Updated dependencies [b804723]
|
|
67
|
+
- @mastra/core@0.9.2-alpha.1
|
|
68
|
+
|
|
69
|
+
## 0.3.2-alpha.0
|
|
70
|
+
|
|
71
|
+
### Patch Changes
|
|
72
|
+
|
|
73
|
+
- Updated dependencies [0097d50]
|
|
74
|
+
- @mastra/core@0.9.2-alpha.0
|
|
75
|
+
|
|
76
|
+
## 0.3.1
|
|
77
|
+
|
|
78
|
+
### Patch Changes
|
|
79
|
+
|
|
80
|
+
- 479f490: [MASTRA-3131] Add getWorkflowRunByID and add resourceId as filter for getWorkflowRuns
|
|
81
|
+
- Updated dependencies [405b63d]
|
|
7
82
|
- Updated dependencies [81fb7f6]
|
|
83
|
+
- Updated dependencies [20275d4]
|
|
84
|
+
- Updated dependencies [7d1892c]
|
|
85
|
+
- Updated dependencies [a90a082]
|
|
86
|
+
- Updated dependencies [2d17c73]
|
|
87
|
+
- Updated dependencies [61e92f5]
|
|
88
|
+
- Updated dependencies [35955b0]
|
|
89
|
+
- Updated dependencies [6262bd5]
|
|
90
|
+
- Updated dependencies [c1409ef]
|
|
91
|
+
- Updated dependencies [3e7b69d]
|
|
92
|
+
- Updated dependencies [e4943b8]
|
|
93
|
+
- Updated dependencies [11d4485]
|
|
94
|
+
- Updated dependencies [479f490]
|
|
95
|
+
- Updated dependencies [c23a81c]
|
|
96
|
+
- Updated dependencies [2d4001d]
|
|
97
|
+
- Updated dependencies [c71013a]
|
|
98
|
+
- Updated dependencies [1d3b1cd]
|
|
99
|
+
- @mastra/core@0.9.1
|
|
100
|
+
|
|
101
|
+
## 0.3.1-alpha.8
|
|
102
|
+
|
|
103
|
+
### Patch Changes
|
|
104
|
+
|
|
105
|
+
- Updated dependencies [2d17c73]
|
|
106
|
+
- @mastra/core@0.9.1-alpha.8
|
|
107
|
+
|
|
108
|
+
## 0.3.1-alpha.7
|
|
109
|
+
|
|
110
|
+
### Patch Changes
|
|
111
|
+
|
|
112
|
+
- Updated dependencies [1d3b1cd]
|
|
113
|
+
- @mastra/core@0.9.1-alpha.7
|
|
114
|
+
|
|
115
|
+
## 0.3.1-alpha.6
|
|
116
|
+
|
|
117
|
+
### Patch Changes
|
|
118
|
+
|
|
119
|
+
- Updated dependencies [c23a81c]
|
|
120
|
+
- @mastra/core@0.9.1-alpha.6
|
|
121
|
+
|
|
122
|
+
## 0.3.1-alpha.5
|
|
123
|
+
|
|
124
|
+
### Patch Changes
|
|
125
|
+
|
|
126
|
+
- Updated dependencies [3e7b69d]
|
|
127
|
+
- @mastra/core@0.9.1-alpha.5
|
|
128
|
+
|
|
129
|
+
## 0.3.1-alpha.4
|
|
130
|
+
|
|
131
|
+
### Patch Changes
|
|
132
|
+
|
|
133
|
+
- 479f490: [MASTRA-3131] Add getWorkflowRunByID and add resourceId as filter for getWorkflowRuns
|
|
134
|
+
- Updated dependencies [e4943b8]
|
|
135
|
+
- Updated dependencies [479f490]
|
|
136
|
+
- @mastra/core@0.9.1-alpha.4
|
|
137
|
+
|
|
138
|
+
## 0.3.1-alpha.3
|
|
139
|
+
|
|
140
|
+
### Patch Changes
|
|
141
|
+
|
|
142
|
+
- Updated dependencies [6262bd5]
|
|
143
|
+
- @mastra/core@0.9.1-alpha.3
|
|
144
|
+
|
|
145
|
+
## 0.3.1-alpha.2
|
|
146
|
+
|
|
147
|
+
### Patch Changes
|
|
148
|
+
|
|
149
|
+
- Updated dependencies [405b63d]
|
|
150
|
+
- Updated dependencies [61e92f5]
|
|
151
|
+
- Updated dependencies [c71013a]
|
|
152
|
+
- @mastra/core@0.9.1-alpha.2
|
|
153
|
+
|
|
154
|
+
## 0.3.1-alpha.1
|
|
155
|
+
|
|
156
|
+
### Patch Changes
|
|
157
|
+
|
|
158
|
+
- Updated dependencies [20275d4]
|
|
8
159
|
- Updated dependencies [7d1892c]
|
|
9
160
|
- Updated dependencies [a90a082]
|
|
10
161
|
- Updated dependencies [35955b0]
|
|
162
|
+
- Updated dependencies [c1409ef]
|
|
163
|
+
- Updated dependencies [11d4485]
|
|
11
164
|
- Updated dependencies [2d4001d]
|
|
12
|
-
- @mastra/core@0.
|
|
165
|
+
- @mastra/core@0.9.1-alpha.1
|
|
13
166
|
|
|
14
167
|
## 0.3.1-alpha.0
|
|
15
168
|
|
|
@@ -7,6 +7,8 @@ import type { StorageGetMessagesArg } from '@mastra/core/storage';
|
|
|
7
7
|
import type { StorageThreadType } from '@mastra/core/memory';
|
|
8
8
|
import type { TABLE_NAMES } from '@mastra/core/storage';
|
|
9
9
|
import { TABLE_SCHEMAS } from '@mastra/core/storage';
|
|
10
|
+
import type { WorkflowRun } from '@mastra/core/storage';
|
|
11
|
+
import type { WorkflowRuns } from '@mastra/core/storage';
|
|
10
12
|
import type { WorkflowRunState } from '@mastra/core/workflows';
|
|
11
13
|
|
|
12
14
|
declare type ClickhouseConfig = {
|
|
@@ -43,13 +45,15 @@ declare class ClickhouseStore extends MastraStorage {
|
|
|
43
45
|
tableName: TABLE_NAMES;
|
|
44
46
|
records: Record<string, any>[];
|
|
45
47
|
}): Promise<void>;
|
|
46
|
-
getTraces({ name, scope, page, perPage, attributes, filters, }: {
|
|
48
|
+
getTraces({ name, scope, page, perPage, attributes, filters, fromDate, toDate, }: {
|
|
47
49
|
name?: string;
|
|
48
50
|
scope?: string;
|
|
49
51
|
page: number;
|
|
50
52
|
perPage: number;
|
|
51
53
|
attributes?: Record<string, string>;
|
|
52
54
|
filters?: Record<string, any>;
|
|
55
|
+
fromDate?: Date;
|
|
56
|
+
toDate?: Date;
|
|
53
57
|
}): Promise<any[]>;
|
|
54
58
|
optimizeTable({ tableName }: {
|
|
55
59
|
tableName: TABLE_NAMES;
|
|
@@ -89,7 +93,7 @@ declare class ClickhouseStore extends MastraStorage {
|
|
|
89
93
|
deleteThread({ threadId }: {
|
|
90
94
|
threadId: string;
|
|
91
95
|
}): Promise<void>;
|
|
92
|
-
getMessages<T = unknown>({ threadId, selectBy }: StorageGetMessagesArg): Promise<T>;
|
|
96
|
+
getMessages<T = unknown>({ threadId, selectBy }: StorageGetMessagesArg): Promise<T[]>;
|
|
93
97
|
saveMessages({ messages }: {
|
|
94
98
|
messages: MessageType[];
|
|
95
99
|
}): Promise<MessageType[]>;
|
|
@@ -102,22 +106,20 @@ declare class ClickhouseStore extends MastraStorage {
|
|
|
102
106
|
workflowName: string;
|
|
103
107
|
runId: string;
|
|
104
108
|
}): Promise<WorkflowRunState | null>;
|
|
105
|
-
|
|
109
|
+
private parseWorkflowRun;
|
|
110
|
+
getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, resourceId, }?: {
|
|
106
111
|
workflowName?: string;
|
|
107
112
|
fromDate?: Date;
|
|
108
113
|
toDate?: Date;
|
|
109
114
|
limit?: number;
|
|
110
115
|
offset?: number;
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
}>;
|
|
119
|
-
total: number;
|
|
120
|
-
}>;
|
|
116
|
+
resourceId?: string;
|
|
117
|
+
}): Promise<WorkflowRuns>;
|
|
118
|
+
getWorkflowRunById({ runId, workflowName, }: {
|
|
119
|
+
runId: string;
|
|
120
|
+
workflowName?: string;
|
|
121
|
+
}): Promise<WorkflowRun | null>;
|
|
122
|
+
private hasColumn;
|
|
121
123
|
close(): Promise<void>;
|
|
122
124
|
}
|
|
123
125
|
export { ClickhouseStore }
|
|
@@ -7,6 +7,8 @@ import type { StorageGetMessagesArg } from '@mastra/core/storage';
|
|
|
7
7
|
import type { StorageThreadType } from '@mastra/core/memory';
|
|
8
8
|
import type { TABLE_NAMES } from '@mastra/core/storage';
|
|
9
9
|
import { TABLE_SCHEMAS } from '@mastra/core/storage';
|
|
10
|
+
import type { WorkflowRun } from '@mastra/core/storage';
|
|
11
|
+
import type { WorkflowRuns } from '@mastra/core/storage';
|
|
10
12
|
import type { WorkflowRunState } from '@mastra/core/workflows';
|
|
11
13
|
|
|
12
14
|
declare type ClickhouseConfig = {
|
|
@@ -43,13 +45,15 @@ declare class ClickhouseStore extends MastraStorage {
|
|
|
43
45
|
tableName: TABLE_NAMES;
|
|
44
46
|
records: Record<string, any>[];
|
|
45
47
|
}): Promise<void>;
|
|
46
|
-
getTraces({ name, scope, page, perPage, attributes, filters, }: {
|
|
48
|
+
getTraces({ name, scope, page, perPage, attributes, filters, fromDate, toDate, }: {
|
|
47
49
|
name?: string;
|
|
48
50
|
scope?: string;
|
|
49
51
|
page: number;
|
|
50
52
|
perPage: number;
|
|
51
53
|
attributes?: Record<string, string>;
|
|
52
54
|
filters?: Record<string, any>;
|
|
55
|
+
fromDate?: Date;
|
|
56
|
+
toDate?: Date;
|
|
53
57
|
}): Promise<any[]>;
|
|
54
58
|
optimizeTable({ tableName }: {
|
|
55
59
|
tableName: TABLE_NAMES;
|
|
@@ -89,7 +93,7 @@ declare class ClickhouseStore extends MastraStorage {
|
|
|
89
93
|
deleteThread({ threadId }: {
|
|
90
94
|
threadId: string;
|
|
91
95
|
}): Promise<void>;
|
|
92
|
-
getMessages<T = unknown>({ threadId, selectBy }: StorageGetMessagesArg): Promise<T>;
|
|
96
|
+
getMessages<T = unknown>({ threadId, selectBy }: StorageGetMessagesArg): Promise<T[]>;
|
|
93
97
|
saveMessages({ messages }: {
|
|
94
98
|
messages: MessageType[];
|
|
95
99
|
}): Promise<MessageType[]>;
|
|
@@ -102,22 +106,20 @@ declare class ClickhouseStore extends MastraStorage {
|
|
|
102
106
|
workflowName: string;
|
|
103
107
|
runId: string;
|
|
104
108
|
}): Promise<WorkflowRunState | null>;
|
|
105
|
-
|
|
109
|
+
private parseWorkflowRun;
|
|
110
|
+
getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, resourceId, }?: {
|
|
106
111
|
workflowName?: string;
|
|
107
112
|
fromDate?: Date;
|
|
108
113
|
toDate?: Date;
|
|
109
114
|
limit?: number;
|
|
110
115
|
offset?: number;
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
}>;
|
|
119
|
-
total: number;
|
|
120
|
-
}>;
|
|
116
|
+
resourceId?: string;
|
|
117
|
+
}): Promise<WorkflowRuns>;
|
|
118
|
+
getWorkflowRunById({ runId, workflowName, }: {
|
|
119
|
+
runId: string;
|
|
120
|
+
workflowName?: string;
|
|
121
|
+
}): Promise<WorkflowRun | null>;
|
|
122
|
+
private hasColumn;
|
|
121
123
|
close(): Promise<void>;
|
|
122
124
|
}
|
|
123
125
|
export { ClickhouseStore }
|
package/dist/index.cjs
CHANGED
|
@@ -138,7 +138,9 @@ var ClickhouseStore = class extends storage.MastraStorage {
|
|
|
138
138
|
page,
|
|
139
139
|
perPage,
|
|
140
140
|
attributes,
|
|
141
|
-
filters
|
|
141
|
+
filters,
|
|
142
|
+
fromDate,
|
|
143
|
+
toDate
|
|
142
144
|
}) {
|
|
143
145
|
const limit = perPage;
|
|
144
146
|
const offset = page * perPage;
|
|
@@ -166,6 +168,14 @@ var ClickhouseStore = class extends storage.MastraStorage {
|
|
|
166
168
|
args[`var_col_${key}`] = value;
|
|
167
169
|
});
|
|
168
170
|
}
|
|
171
|
+
if (fromDate) {
|
|
172
|
+
conditions.push(`createdAt >= {var_from_date:DateTime64(3)}`);
|
|
173
|
+
args.var_from_date = fromDate.getTime() / 1e3;
|
|
174
|
+
}
|
|
175
|
+
if (toDate) {
|
|
176
|
+
conditions.push(`createdAt <= {var_to_date:DateTime64(3)}`);
|
|
177
|
+
args.var_to_date = toDate.getTime() / 1e3;
|
|
178
|
+
}
|
|
169
179
|
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
170
180
|
const result = await this.db.query({
|
|
171
181
|
query: `SELECT *, toDateTime64(createdAt, 3) as createdAt FROM ${storage.TABLE_TRACES} ${whereClause} ORDER BY "createdAt" DESC LIMIT ${limit} OFFSET ${offset}`,
|
|
@@ -696,12 +706,31 @@ var ClickhouseStore = class extends storage.MastraStorage {
|
|
|
696
706
|
throw error;
|
|
697
707
|
}
|
|
698
708
|
}
|
|
709
|
+
parseWorkflowRun(row) {
|
|
710
|
+
let parsedSnapshot = row.snapshot;
|
|
711
|
+
if (typeof parsedSnapshot === "string") {
|
|
712
|
+
try {
|
|
713
|
+
parsedSnapshot = JSON.parse(row.snapshot);
|
|
714
|
+
} catch (e) {
|
|
715
|
+
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
716
|
+
}
|
|
717
|
+
}
|
|
718
|
+
return {
|
|
719
|
+
workflowName: row.workflow_name,
|
|
720
|
+
runId: row.run_id,
|
|
721
|
+
snapshot: parsedSnapshot,
|
|
722
|
+
createdAt: new Date(row.createdAt),
|
|
723
|
+
updatedAt: new Date(row.updatedAt),
|
|
724
|
+
resourceId: row.resourceId
|
|
725
|
+
};
|
|
726
|
+
}
|
|
699
727
|
async getWorkflowRuns({
|
|
700
728
|
workflowName,
|
|
701
729
|
fromDate,
|
|
702
730
|
toDate,
|
|
703
731
|
limit,
|
|
704
|
-
offset
|
|
732
|
+
offset,
|
|
733
|
+
resourceId
|
|
705
734
|
} = {}) {
|
|
706
735
|
try {
|
|
707
736
|
const conditions = [];
|
|
@@ -710,6 +739,15 @@ var ClickhouseStore = class extends storage.MastraStorage {
|
|
|
710
739
|
conditions.push(`workflow_name = {var_workflow_name:String}`);
|
|
711
740
|
values.var_workflow_name = workflowName;
|
|
712
741
|
}
|
|
742
|
+
if (resourceId) {
|
|
743
|
+
const hasResourceId = await this.hasColumn(storage.TABLE_WORKFLOW_SNAPSHOT, "resourceId");
|
|
744
|
+
if (hasResourceId) {
|
|
745
|
+
conditions.push(`resourceId = {var_resourceId:String}`);
|
|
746
|
+
values.var_resourceId = resourceId;
|
|
747
|
+
} else {
|
|
748
|
+
console.warn(`[${storage.TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
|
|
749
|
+
}
|
|
750
|
+
}
|
|
713
751
|
if (fromDate) {
|
|
714
752
|
conditions.push(`createdAt >= {var_from_date:DateTime64(3)}`);
|
|
715
753
|
values.var_from_date = fromDate.getTime() / 1e3;
|
|
@@ -738,7 +776,8 @@ var ClickhouseStore = class extends storage.MastraStorage {
|
|
|
738
776
|
run_id,
|
|
739
777
|
snapshot,
|
|
740
778
|
toDateTime64(createdAt, 3) as createdAt,
|
|
741
|
-
toDateTime64(updatedAt, 3) as updatedAt
|
|
779
|
+
toDateTime64(updatedAt, 3) as updatedAt,
|
|
780
|
+
resourceId
|
|
742
781
|
FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[storage.TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""}
|
|
743
782
|
${whereClause}
|
|
744
783
|
ORDER BY createdAt DESC
|
|
@@ -751,21 +790,7 @@ var ClickhouseStore = class extends storage.MastraStorage {
|
|
|
751
790
|
const resultJson = await result.json();
|
|
752
791
|
const rows = resultJson;
|
|
753
792
|
const runs = rows.map((row) => {
|
|
754
|
-
|
|
755
|
-
if (typeof parsedSnapshot === "string") {
|
|
756
|
-
try {
|
|
757
|
-
parsedSnapshot = JSON.parse(row.snapshot);
|
|
758
|
-
} catch (e) {
|
|
759
|
-
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
760
|
-
}
|
|
761
|
-
}
|
|
762
|
-
return {
|
|
763
|
-
workflowName: row.workflow_name,
|
|
764
|
-
runId: row.run_id,
|
|
765
|
-
snapshot: parsedSnapshot,
|
|
766
|
-
createdAt: new Date(row.createdAt),
|
|
767
|
-
updatedAt: new Date(row.updatedAt)
|
|
768
|
-
};
|
|
793
|
+
return this.parseWorkflowRun(row);
|
|
769
794
|
});
|
|
770
795
|
return { runs, total: total || runs.length };
|
|
771
796
|
} catch (error) {
|
|
@@ -773,6 +798,55 @@ var ClickhouseStore = class extends storage.MastraStorage {
|
|
|
773
798
|
throw error;
|
|
774
799
|
}
|
|
775
800
|
}
|
|
801
|
+
async getWorkflowRunById({
|
|
802
|
+
runId,
|
|
803
|
+
workflowName
|
|
804
|
+
}) {
|
|
805
|
+
try {
|
|
806
|
+
const conditions = [];
|
|
807
|
+
const values = {};
|
|
808
|
+
if (runId) {
|
|
809
|
+
conditions.push(`run_id = {var_runId:String}`);
|
|
810
|
+
values.var_runId = runId;
|
|
811
|
+
}
|
|
812
|
+
if (workflowName) {
|
|
813
|
+
conditions.push(`workflow_name = {var_workflow_name:String}`);
|
|
814
|
+
values.var_workflow_name = workflowName;
|
|
815
|
+
}
|
|
816
|
+
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
817
|
+
const result = await this.db.query({
|
|
818
|
+
query: `
|
|
819
|
+
SELECT
|
|
820
|
+
workflow_name,
|
|
821
|
+
run_id,
|
|
822
|
+
snapshot,
|
|
823
|
+
toDateTime64(createdAt, 3) as createdAt,
|
|
824
|
+
toDateTime64(updatedAt, 3) as updatedAt,
|
|
825
|
+
resourceId
|
|
826
|
+
FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[storage.TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""}
|
|
827
|
+
${whereClause}
|
|
828
|
+
`,
|
|
829
|
+
query_params: values,
|
|
830
|
+
format: "JSONEachRow"
|
|
831
|
+
});
|
|
832
|
+
const resultJson = await result.json();
|
|
833
|
+
if (!Array.isArray(resultJson) || resultJson.length === 0) {
|
|
834
|
+
return null;
|
|
835
|
+
}
|
|
836
|
+
return this.parseWorkflowRun(resultJson[0]);
|
|
837
|
+
} catch (error) {
|
|
838
|
+
console.error("Error getting workflow run by ID:", error);
|
|
839
|
+
throw error;
|
|
840
|
+
}
|
|
841
|
+
}
|
|
842
|
+
async hasColumn(table, column) {
|
|
843
|
+
const result = await this.db.query({
|
|
844
|
+
query: `DESCRIBE TABLE ${table}`,
|
|
845
|
+
format: "JSONEachRow"
|
|
846
|
+
});
|
|
847
|
+
const columns = await result.json();
|
|
848
|
+
return columns.some((c) => c.name === column);
|
|
849
|
+
}
|
|
776
850
|
async close() {
|
|
777
851
|
await this.db.close();
|
|
778
852
|
}
|
package/dist/index.js
CHANGED
|
@@ -136,7 +136,9 @@ var ClickhouseStore = class extends MastraStorage {
|
|
|
136
136
|
page,
|
|
137
137
|
perPage,
|
|
138
138
|
attributes,
|
|
139
|
-
filters
|
|
139
|
+
filters,
|
|
140
|
+
fromDate,
|
|
141
|
+
toDate
|
|
140
142
|
}) {
|
|
141
143
|
const limit = perPage;
|
|
142
144
|
const offset = page * perPage;
|
|
@@ -164,6 +166,14 @@ var ClickhouseStore = class extends MastraStorage {
|
|
|
164
166
|
args[`var_col_${key}`] = value;
|
|
165
167
|
});
|
|
166
168
|
}
|
|
169
|
+
if (fromDate) {
|
|
170
|
+
conditions.push(`createdAt >= {var_from_date:DateTime64(3)}`);
|
|
171
|
+
args.var_from_date = fromDate.getTime() / 1e3;
|
|
172
|
+
}
|
|
173
|
+
if (toDate) {
|
|
174
|
+
conditions.push(`createdAt <= {var_to_date:DateTime64(3)}`);
|
|
175
|
+
args.var_to_date = toDate.getTime() / 1e3;
|
|
176
|
+
}
|
|
167
177
|
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
168
178
|
const result = await this.db.query({
|
|
169
179
|
query: `SELECT *, toDateTime64(createdAt, 3) as createdAt FROM ${TABLE_TRACES} ${whereClause} ORDER BY "createdAt" DESC LIMIT ${limit} OFFSET ${offset}`,
|
|
@@ -694,12 +704,31 @@ var ClickhouseStore = class extends MastraStorage {
|
|
|
694
704
|
throw error;
|
|
695
705
|
}
|
|
696
706
|
}
|
|
707
|
+
parseWorkflowRun(row) {
|
|
708
|
+
let parsedSnapshot = row.snapshot;
|
|
709
|
+
if (typeof parsedSnapshot === "string") {
|
|
710
|
+
try {
|
|
711
|
+
parsedSnapshot = JSON.parse(row.snapshot);
|
|
712
|
+
} catch (e) {
|
|
713
|
+
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
714
|
+
}
|
|
715
|
+
}
|
|
716
|
+
return {
|
|
717
|
+
workflowName: row.workflow_name,
|
|
718
|
+
runId: row.run_id,
|
|
719
|
+
snapshot: parsedSnapshot,
|
|
720
|
+
createdAt: new Date(row.createdAt),
|
|
721
|
+
updatedAt: new Date(row.updatedAt),
|
|
722
|
+
resourceId: row.resourceId
|
|
723
|
+
};
|
|
724
|
+
}
|
|
697
725
|
async getWorkflowRuns({
|
|
698
726
|
workflowName,
|
|
699
727
|
fromDate,
|
|
700
728
|
toDate,
|
|
701
729
|
limit,
|
|
702
|
-
offset
|
|
730
|
+
offset,
|
|
731
|
+
resourceId
|
|
703
732
|
} = {}) {
|
|
704
733
|
try {
|
|
705
734
|
const conditions = [];
|
|
@@ -708,6 +737,15 @@ var ClickhouseStore = class extends MastraStorage {
|
|
|
708
737
|
conditions.push(`workflow_name = {var_workflow_name:String}`);
|
|
709
738
|
values.var_workflow_name = workflowName;
|
|
710
739
|
}
|
|
740
|
+
if (resourceId) {
|
|
741
|
+
const hasResourceId = await this.hasColumn(TABLE_WORKFLOW_SNAPSHOT, "resourceId");
|
|
742
|
+
if (hasResourceId) {
|
|
743
|
+
conditions.push(`resourceId = {var_resourceId:String}`);
|
|
744
|
+
values.var_resourceId = resourceId;
|
|
745
|
+
} else {
|
|
746
|
+
console.warn(`[${TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
|
|
747
|
+
}
|
|
748
|
+
}
|
|
711
749
|
if (fromDate) {
|
|
712
750
|
conditions.push(`createdAt >= {var_from_date:DateTime64(3)}`);
|
|
713
751
|
values.var_from_date = fromDate.getTime() / 1e3;
|
|
@@ -736,7 +774,8 @@ var ClickhouseStore = class extends MastraStorage {
|
|
|
736
774
|
run_id,
|
|
737
775
|
snapshot,
|
|
738
776
|
toDateTime64(createdAt, 3) as createdAt,
|
|
739
|
-
toDateTime64(updatedAt, 3) as updatedAt
|
|
777
|
+
toDateTime64(updatedAt, 3) as updatedAt,
|
|
778
|
+
resourceId
|
|
740
779
|
FROM ${TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""}
|
|
741
780
|
${whereClause}
|
|
742
781
|
ORDER BY createdAt DESC
|
|
@@ -749,21 +788,7 @@ var ClickhouseStore = class extends MastraStorage {
|
|
|
749
788
|
const resultJson = await result.json();
|
|
750
789
|
const rows = resultJson;
|
|
751
790
|
const runs = rows.map((row) => {
|
|
752
|
-
|
|
753
|
-
if (typeof parsedSnapshot === "string") {
|
|
754
|
-
try {
|
|
755
|
-
parsedSnapshot = JSON.parse(row.snapshot);
|
|
756
|
-
} catch (e) {
|
|
757
|
-
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
758
|
-
}
|
|
759
|
-
}
|
|
760
|
-
return {
|
|
761
|
-
workflowName: row.workflow_name,
|
|
762
|
-
runId: row.run_id,
|
|
763
|
-
snapshot: parsedSnapshot,
|
|
764
|
-
createdAt: new Date(row.createdAt),
|
|
765
|
-
updatedAt: new Date(row.updatedAt)
|
|
766
|
-
};
|
|
791
|
+
return this.parseWorkflowRun(row);
|
|
767
792
|
});
|
|
768
793
|
return { runs, total: total || runs.length };
|
|
769
794
|
} catch (error) {
|
|
@@ -771,6 +796,55 @@ var ClickhouseStore = class extends MastraStorage {
|
|
|
771
796
|
throw error;
|
|
772
797
|
}
|
|
773
798
|
}
|
|
799
|
+
async getWorkflowRunById({
|
|
800
|
+
runId,
|
|
801
|
+
workflowName
|
|
802
|
+
}) {
|
|
803
|
+
try {
|
|
804
|
+
const conditions = [];
|
|
805
|
+
const values = {};
|
|
806
|
+
if (runId) {
|
|
807
|
+
conditions.push(`run_id = {var_runId:String}`);
|
|
808
|
+
values.var_runId = runId;
|
|
809
|
+
}
|
|
810
|
+
if (workflowName) {
|
|
811
|
+
conditions.push(`workflow_name = {var_workflow_name:String}`);
|
|
812
|
+
values.var_workflow_name = workflowName;
|
|
813
|
+
}
|
|
814
|
+
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
815
|
+
const result = await this.db.query({
|
|
816
|
+
query: `
|
|
817
|
+
SELECT
|
|
818
|
+
workflow_name,
|
|
819
|
+
run_id,
|
|
820
|
+
snapshot,
|
|
821
|
+
toDateTime64(createdAt, 3) as createdAt,
|
|
822
|
+
toDateTime64(updatedAt, 3) as updatedAt,
|
|
823
|
+
resourceId
|
|
824
|
+
FROM ${TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""}
|
|
825
|
+
${whereClause}
|
|
826
|
+
`,
|
|
827
|
+
query_params: values,
|
|
828
|
+
format: "JSONEachRow"
|
|
829
|
+
});
|
|
830
|
+
const resultJson = await result.json();
|
|
831
|
+
if (!Array.isArray(resultJson) || resultJson.length === 0) {
|
|
832
|
+
return null;
|
|
833
|
+
}
|
|
834
|
+
return this.parseWorkflowRun(resultJson[0]);
|
|
835
|
+
} catch (error) {
|
|
836
|
+
console.error("Error getting workflow run by ID:", error);
|
|
837
|
+
throw error;
|
|
838
|
+
}
|
|
839
|
+
}
|
|
840
|
+
async hasColumn(table, column) {
|
|
841
|
+
const result = await this.db.query({
|
|
842
|
+
query: `DESCRIBE TABLE ${table}`,
|
|
843
|
+
format: "JSONEachRow"
|
|
844
|
+
});
|
|
845
|
+
const columns = await result.json();
|
|
846
|
+
return columns.some((c) => c.name === column);
|
|
847
|
+
}
|
|
774
848
|
async close() {
|
|
775
849
|
await this.db.close();
|
|
776
850
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@mastra/clickhouse",
|
|
3
|
-
"version": "0.0.0-
|
|
3
|
+
"version": "0.0.0-vnext-inngest-20250506123700",
|
|
4
4
|
"description": "Clickhouse provider for Mastra - includes db storage capabilities",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -21,16 +21,16 @@
|
|
|
21
21
|
"license": "MIT",
|
|
22
22
|
"dependencies": {
|
|
23
23
|
"@clickhouse/client": "^1.11.0",
|
|
24
|
-
"@mastra/core": "0.0.0-
|
|
24
|
+
"@mastra/core": "0.0.0-vnext-inngest-20250506123700"
|
|
25
25
|
},
|
|
26
26
|
"devDependencies": {
|
|
27
|
-
"@microsoft/api-extractor": "^7.52.
|
|
27
|
+
"@microsoft/api-extractor": "^7.52.5",
|
|
28
28
|
"@types/node": "^20.17.27",
|
|
29
29
|
"eslint": "^9.23.0",
|
|
30
30
|
"tsup": "^8.4.0",
|
|
31
31
|
"typescript": "^5.8.2",
|
|
32
|
-
"vitest": "^3.
|
|
33
|
-
"@internal/lint": "0.0.
|
|
32
|
+
"vitest": "^3.1.2",
|
|
33
|
+
"@internal/lint": "0.0.0-vnext-inngest-20250506123700"
|
|
34
34
|
},
|
|
35
35
|
"scripts": {
|
|
36
36
|
"build": "tsup src/index.ts --format esm,cjs --experimental-dts --clean --treeshake=smallest --splitting",
|
|
@@ -1,11 +1,14 @@
|
|
|
1
1
|
import { randomUUID } from 'crypto';
|
|
2
|
+
import type { WorkflowRunState } from '@mastra/core';
|
|
3
|
+
import type { MessageType } from '@mastra/core/memory';
|
|
2
4
|
import { TABLE_THREADS, TABLE_MESSAGES, TABLE_WORKFLOW_SNAPSHOT } from '@mastra/core/storage';
|
|
3
|
-
import
|
|
4
|
-
import { describe, it, expect, beforeAll, beforeEach, afterAll } from 'vitest';
|
|
5
|
+
import { describe, it, expect, beforeAll, beforeEach, afterAll, vi, afterEach } from 'vitest';
|
|
5
6
|
|
|
6
7
|
import { ClickhouseStore } from '.';
|
|
7
8
|
import type { ClickhouseConfig } from '.';
|
|
8
9
|
|
|
10
|
+
vi.setConfig({ testTimeout: 60_000, hookTimeout: 60_000 });
|
|
11
|
+
|
|
9
12
|
const TEST_CONFIG: ClickhouseConfig = {
|
|
10
13
|
url: process.env.CLICKHOUSE_URL || 'http://localhost:8123',
|
|
11
14
|
username: process.env.CLICKHOUSE_USERNAME || 'default',
|
|
@@ -32,15 +35,15 @@ const createSampleThread = () => ({
|
|
|
32
35
|
metadata: { key: 'value' },
|
|
33
36
|
});
|
|
34
37
|
|
|
35
|
-
const createSampleMessage = (threadId: string, createdAt: Date = new Date()) =>
|
|
36
|
-
(
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
38
|
+
const createSampleMessage = (threadId: string, createdAt: Date = new Date()): MessageType => ({
|
|
39
|
+
id: `msg-${randomUUID()}`,
|
|
40
|
+
resourceId: `resource-${randomUUID()}`,
|
|
41
|
+
role: 'user',
|
|
42
|
+
type: 'text',
|
|
43
|
+
threadId,
|
|
44
|
+
content: [{ type: 'text', text: 'Hello' }] as MessageType['content'],
|
|
45
|
+
createdAt,
|
|
46
|
+
});
|
|
44
47
|
|
|
45
48
|
const createSampleTrace = () => ({
|
|
46
49
|
id: `trace-${randomUUID()}`,
|
|
@@ -57,7 +60,10 @@ const createSampleEval = () => ({
|
|
|
57
60
|
createdAt: new Date(),
|
|
58
61
|
});
|
|
59
62
|
|
|
60
|
-
const createSampleWorkflowSnapshot = (
|
|
63
|
+
const createSampleWorkflowSnapshot = (
|
|
64
|
+
status: WorkflowRunState['context']['steps'][string]['status'],
|
|
65
|
+
createdAt?: Date,
|
|
66
|
+
) => {
|
|
61
67
|
const runId = `run-${randomUUID()}`;
|
|
62
68
|
const stepId = `step-${randomUUID()}`;
|
|
63
69
|
const timestamp = createdAt || new Date();
|
|
@@ -76,12 +82,20 @@ const createSampleWorkflowSnapshot = (status: string, createdAt?: Date) => {
|
|
|
76
82
|
attempts: {},
|
|
77
83
|
},
|
|
78
84
|
activePaths: [],
|
|
85
|
+
suspendedPaths: {},
|
|
79
86
|
runId,
|
|
80
87
|
timestamp: timestamp.getTime(),
|
|
81
|
-
}
|
|
88
|
+
};
|
|
82
89
|
return { snapshot, runId, stepId };
|
|
83
90
|
};
|
|
84
91
|
|
|
92
|
+
const checkWorkflowSnapshot = (snapshot: WorkflowRunState | string, stepId: string, status: string) => {
|
|
93
|
+
if (typeof snapshot === 'string') {
|
|
94
|
+
throw new Error('Expected WorkflowRunState, got string');
|
|
95
|
+
}
|
|
96
|
+
expect(snapshot.context?.steps[stepId]?.status).toBe(status);
|
|
97
|
+
};
|
|
98
|
+
|
|
85
99
|
describe('ClickhouseStore', () => {
|
|
86
100
|
let store: ClickhouseStore;
|
|
87
101
|
|
|
@@ -185,7 +199,11 @@ describe('ClickhouseStore', () => {
|
|
|
185
199
|
// Retrieve messages
|
|
186
200
|
const retrievedMessages = await store.getMessages({ threadId: thread.id });
|
|
187
201
|
expect(retrievedMessages).toHaveLength(2);
|
|
188
|
-
|
|
202
|
+
const checkMessages = messages.map(m => {
|
|
203
|
+
const { resourceId, ...rest } = m;
|
|
204
|
+
return rest;
|
|
205
|
+
});
|
|
206
|
+
expect(retrievedMessages).toEqual(expect.arrayContaining(checkMessages));
|
|
189
207
|
}, 10e3);
|
|
190
208
|
|
|
191
209
|
it('should handle empty message array', async () => {
|
|
@@ -197,7 +215,7 @@ describe('ClickhouseStore', () => {
|
|
|
197
215
|
const thread = createSampleThread();
|
|
198
216
|
await store.saveThread({ thread });
|
|
199
217
|
|
|
200
|
-
const messages = [
|
|
218
|
+
const messages: MessageType[] = [
|
|
201
219
|
{
|
|
202
220
|
...createSampleMessage(thread.id, new Date(Date.now() - 1000 * 3)),
|
|
203
221
|
content: [{ type: 'text', text: 'First' }],
|
|
@@ -214,11 +232,12 @@ describe('ClickhouseStore', () => {
|
|
|
214
232
|
|
|
215
233
|
await store.saveMessages({ messages });
|
|
216
234
|
|
|
217
|
-
const retrievedMessages = await store.getMessages({ threadId: thread.id });
|
|
235
|
+
const retrievedMessages = await store.getMessages<MessageType>({ threadId: thread.id });
|
|
218
236
|
expect(retrievedMessages).toHaveLength(3);
|
|
219
237
|
|
|
220
238
|
// Verify order is maintained
|
|
221
239
|
retrievedMessages.forEach((msg, idx) => {
|
|
240
|
+
// @ts-expect-error
|
|
222
241
|
expect(msg.content[0].text).toBe(messages[idx].content[0].text);
|
|
223
242
|
});
|
|
224
243
|
}, 10e3);
|
|
@@ -352,11 +371,17 @@ describe('ClickhouseStore', () => {
|
|
|
352
371
|
const snapshot = {
|
|
353
372
|
status: 'running',
|
|
354
373
|
context: {
|
|
374
|
+
steps: {},
|
|
355
375
|
stepResults: {},
|
|
356
376
|
attempts: {},
|
|
357
377
|
triggerData: { type: 'manual' },
|
|
358
378
|
},
|
|
359
|
-
|
|
379
|
+
value: {},
|
|
380
|
+
activePaths: [],
|
|
381
|
+
suspendedPaths: {},
|
|
382
|
+
runId,
|
|
383
|
+
timestamp: new Date().getTime(),
|
|
384
|
+
};
|
|
360
385
|
|
|
361
386
|
await store.persistWorkflowSnapshot({
|
|
362
387
|
workflowName,
|
|
@@ -387,28 +412,40 @@ describe('ClickhouseStore', () => {
|
|
|
387
412
|
const initialSnapshot = {
|
|
388
413
|
status: 'running',
|
|
389
414
|
context: {
|
|
415
|
+
steps: {},
|
|
390
416
|
stepResults: {},
|
|
391
417
|
attempts: {},
|
|
392
418
|
triggerData: { type: 'manual' },
|
|
393
419
|
},
|
|
420
|
+
value: {},
|
|
421
|
+
activePaths: [],
|
|
422
|
+
suspendedPaths: {},
|
|
423
|
+
runId,
|
|
424
|
+
timestamp: new Date().getTime(),
|
|
394
425
|
};
|
|
395
426
|
|
|
396
427
|
await store.persistWorkflowSnapshot({
|
|
397
428
|
workflowName,
|
|
398
429
|
runId,
|
|
399
|
-
snapshot: initialSnapshot
|
|
430
|
+
snapshot: initialSnapshot,
|
|
400
431
|
});
|
|
401
432
|
|
|
402
433
|
const updatedSnapshot = {
|
|
403
434
|
status: 'completed',
|
|
404
435
|
context: {
|
|
436
|
+
steps: {},
|
|
405
437
|
stepResults: {
|
|
406
438
|
'step-1': { status: 'success', result: { data: 'test' } },
|
|
407
439
|
},
|
|
408
440
|
attempts: { 'step-1': 1 },
|
|
409
441
|
triggerData: { type: 'manual' },
|
|
410
442
|
},
|
|
411
|
-
|
|
443
|
+
value: {},
|
|
444
|
+
activePaths: [],
|
|
445
|
+
suspendedPaths: {},
|
|
446
|
+
runId,
|
|
447
|
+
timestamp: new Date().getTime(),
|
|
448
|
+
};
|
|
412
449
|
|
|
413
450
|
await store.persistWorkflowSnapshot({
|
|
414
451
|
workflowName,
|
|
@@ -446,6 +483,7 @@ describe('ClickhouseStore', () => {
|
|
|
446
483
|
dependencies: ['step-3', 'step-4'],
|
|
447
484
|
},
|
|
448
485
|
},
|
|
486
|
+
steps: {},
|
|
449
487
|
attempts: { 'step-1': 1, 'step-2': 0 },
|
|
450
488
|
triggerData: {
|
|
451
489
|
type: 'scheduled',
|
|
@@ -467,6 +505,7 @@ describe('ClickhouseStore', () => {
|
|
|
467
505
|
status: 'waiting',
|
|
468
506
|
},
|
|
469
507
|
],
|
|
508
|
+
suspendedPaths: {},
|
|
470
509
|
runId: runId,
|
|
471
510
|
timestamp: Date.now(),
|
|
472
511
|
};
|
|
@@ -474,7 +513,7 @@ describe('ClickhouseStore', () => {
|
|
|
474
513
|
await store.persistWorkflowSnapshot({
|
|
475
514
|
workflowName,
|
|
476
515
|
runId,
|
|
477
|
-
snapshot: complexSnapshot
|
|
516
|
+
snapshot: complexSnapshot,
|
|
478
517
|
});
|
|
479
518
|
|
|
480
519
|
const loadedSnapshot = await store.loadWorkflowSnapshot({
|
|
@@ -500,8 +539,8 @@ describe('ClickhouseStore', () => {
|
|
|
500
539
|
const workflowName1 = 'default_test_1';
|
|
501
540
|
const workflowName2 = 'default_test_2';
|
|
502
541
|
|
|
503
|
-
const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('
|
|
504
|
-
const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('
|
|
542
|
+
const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
|
|
543
|
+
const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('waiting');
|
|
505
544
|
|
|
506
545
|
await store.persistWorkflowSnapshot({
|
|
507
546
|
workflowName: workflowName1,
|
|
@@ -520,17 +559,17 @@ describe('ClickhouseStore', () => {
|
|
|
520
559
|
expect(total).toBe(2);
|
|
521
560
|
expect(runs[0]!.workflowName).toBe(workflowName2); // Most recent first
|
|
522
561
|
expect(runs[1]!.workflowName).toBe(workflowName1);
|
|
523
|
-
const firstSnapshot = runs[0]!.snapshot
|
|
524
|
-
const secondSnapshot = runs[1]!.snapshot
|
|
525
|
-
|
|
526
|
-
|
|
562
|
+
const firstSnapshot = runs[0]!.snapshot;
|
|
563
|
+
const secondSnapshot = runs[1]!.snapshot;
|
|
564
|
+
checkWorkflowSnapshot(firstSnapshot, stepId2, 'waiting');
|
|
565
|
+
checkWorkflowSnapshot(secondSnapshot, stepId1, 'success');
|
|
527
566
|
});
|
|
528
567
|
|
|
529
568
|
it('filters by workflow name', async () => {
|
|
530
569
|
const workflowName1 = 'filter_test_1';
|
|
531
570
|
const workflowName2 = 'filter_test_2';
|
|
532
571
|
|
|
533
|
-
const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('
|
|
572
|
+
const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
|
|
534
573
|
const { snapshot: workflow2, runId: runId2 } = createSampleWorkflowSnapshot('failed');
|
|
535
574
|
|
|
536
575
|
await store.persistWorkflowSnapshot({
|
|
@@ -551,8 +590,8 @@ describe('ClickhouseStore', () => {
|
|
|
551
590
|
expect(runs).toHaveLength(1);
|
|
552
591
|
expect(total).toBe(1);
|
|
553
592
|
expect(runs[0]!.workflowName).toBe(workflowName1);
|
|
554
|
-
const snapshot = runs[0]!.snapshot
|
|
555
|
-
|
|
593
|
+
const snapshot = runs[0]!.snapshot;
|
|
594
|
+
checkWorkflowSnapshot(snapshot, stepId1, 'success');
|
|
556
595
|
});
|
|
557
596
|
|
|
558
597
|
it('filters by date range', async () => {
|
|
@@ -563,9 +602,9 @@ describe('ClickhouseStore', () => {
|
|
|
563
602
|
const workflowName2 = 'date_test_2';
|
|
564
603
|
const workflowName3 = 'date_test_3';
|
|
565
604
|
|
|
566
|
-
const { snapshot: workflow1, runId: runId1 } = createSampleWorkflowSnapshot('
|
|
567
|
-
const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('
|
|
568
|
-
const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('
|
|
605
|
+
const { snapshot: workflow1, runId: runId1 } = createSampleWorkflowSnapshot('success');
|
|
606
|
+
const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('waiting');
|
|
607
|
+
const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('skipped');
|
|
569
608
|
|
|
570
609
|
await store.insert({
|
|
571
610
|
tableName: TABLE_WORKFLOW_SNAPSHOT,
|
|
@@ -606,10 +645,10 @@ describe('ClickhouseStore', () => {
|
|
|
606
645
|
expect(runs).toHaveLength(2);
|
|
607
646
|
expect(runs[0]!.workflowName).toBe(workflowName3);
|
|
608
647
|
expect(runs[1]!.workflowName).toBe(workflowName2);
|
|
609
|
-
const firstSnapshot = runs[0]!.snapshot
|
|
610
|
-
const secondSnapshot = runs[1]!.snapshot
|
|
611
|
-
|
|
612
|
-
|
|
648
|
+
const firstSnapshot = runs[0]!.snapshot;
|
|
649
|
+
const secondSnapshot = runs[1]!.snapshot;
|
|
650
|
+
checkWorkflowSnapshot(firstSnapshot, stepId3, 'skipped');
|
|
651
|
+
checkWorkflowSnapshot(secondSnapshot, stepId2, 'waiting');
|
|
613
652
|
});
|
|
614
653
|
|
|
615
654
|
it('handles pagination', async () => {
|
|
@@ -617,9 +656,9 @@ describe('ClickhouseStore', () => {
|
|
|
617
656
|
const workflowName2 = 'page_test_2';
|
|
618
657
|
const workflowName3 = 'page_test_3';
|
|
619
658
|
|
|
620
|
-
const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('
|
|
621
|
-
const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('
|
|
622
|
-
const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('
|
|
659
|
+
const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
|
|
660
|
+
const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('waiting');
|
|
661
|
+
const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('skipped');
|
|
623
662
|
|
|
624
663
|
await store.persistWorkflowSnapshot({
|
|
625
664
|
workflowName: workflowName1,
|
|
@@ -648,10 +687,10 @@ describe('ClickhouseStore', () => {
|
|
|
648
687
|
expect(page1.total).toBe(3); // Total count of all records
|
|
649
688
|
expect(page1.runs[0]!.workflowName).toBe(workflowName3);
|
|
650
689
|
expect(page1.runs[1]!.workflowName).toBe(workflowName2);
|
|
651
|
-
const firstSnapshot = page1.runs[0]!.snapshot
|
|
652
|
-
const secondSnapshot = page1.runs[1]!.snapshot
|
|
653
|
-
|
|
654
|
-
|
|
690
|
+
const firstSnapshot = page1.runs[0]!.snapshot;
|
|
691
|
+
const secondSnapshot = page1.runs[1]!.snapshot;
|
|
692
|
+
checkWorkflowSnapshot(firstSnapshot, stepId3, 'skipped');
|
|
693
|
+
checkWorkflowSnapshot(secondSnapshot, stepId2, 'waiting');
|
|
655
694
|
|
|
656
695
|
// Get second page
|
|
657
696
|
const page2 = await store.getWorkflowRuns({
|
|
@@ -661,10 +700,155 @@ describe('ClickhouseStore', () => {
|
|
|
661
700
|
expect(page2.runs).toHaveLength(1);
|
|
662
701
|
expect(page2.total).toBe(3);
|
|
663
702
|
expect(page2.runs[0]!.workflowName).toBe(workflowName1);
|
|
664
|
-
const snapshot = page2.runs[0]!.snapshot
|
|
665
|
-
|
|
703
|
+
const snapshot = page2.runs[0]!.snapshot!;
|
|
704
|
+
checkWorkflowSnapshot(snapshot, stepId1, 'success');
|
|
666
705
|
}, 10e3);
|
|
667
706
|
});
|
|
707
|
+
describe('getWorkflowRunById', () => {
|
|
708
|
+
const workflowName = 'workflow-id-test';
|
|
709
|
+
let runId: string;
|
|
710
|
+
let stepId: string;
|
|
711
|
+
|
|
712
|
+
beforeEach(async () => {
|
|
713
|
+
// Insert a workflow run for positive test
|
|
714
|
+
const sample = createSampleWorkflowSnapshot('success');
|
|
715
|
+
runId = sample.runId;
|
|
716
|
+
stepId = sample.stepId;
|
|
717
|
+
await store.insert({
|
|
718
|
+
tableName: TABLE_WORKFLOW_SNAPSHOT,
|
|
719
|
+
record: {
|
|
720
|
+
workflow_name: workflowName,
|
|
721
|
+
run_id: runId,
|
|
722
|
+
resourceId: 'resource-abc',
|
|
723
|
+
snapshot: sample.snapshot,
|
|
724
|
+
createdAt: new Date(),
|
|
725
|
+
updatedAt: new Date(),
|
|
726
|
+
},
|
|
727
|
+
});
|
|
728
|
+
});
|
|
729
|
+
|
|
730
|
+
it('should retrieve a workflow run by ID', async () => {
|
|
731
|
+
const found = await store.getWorkflowRunById({
|
|
732
|
+
runId,
|
|
733
|
+
workflowName,
|
|
734
|
+
});
|
|
735
|
+
expect(found).not.toBeNull();
|
|
736
|
+
expect(found?.runId).toBe(runId);
|
|
737
|
+
checkWorkflowSnapshot(found?.snapshot!, stepId, 'success');
|
|
738
|
+
});
|
|
739
|
+
|
|
740
|
+
it('should return null for non-existent workflow run ID', async () => {
|
|
741
|
+
const notFound = await store.getWorkflowRunById({
|
|
742
|
+
runId: 'non-existent-id',
|
|
743
|
+
workflowName,
|
|
744
|
+
});
|
|
745
|
+
expect(notFound).toBeNull();
|
|
746
|
+
});
|
|
747
|
+
});
|
|
748
|
+
describe('getWorkflowRuns with resourceId', () => {
|
|
749
|
+
const workflowName = 'workflow-id-test';
|
|
750
|
+
let resourceId: string;
|
|
751
|
+
let runIds: string[] = [];
|
|
752
|
+
|
|
753
|
+
beforeEach(async () => {
|
|
754
|
+
// Insert multiple workflow runs for the same resourceId
|
|
755
|
+
resourceId = 'resource-shared';
|
|
756
|
+
for (const status of ['completed', 'running']) {
|
|
757
|
+
const sample = createSampleWorkflowSnapshot(status as WorkflowRunState['context']['steps'][string]['status']);
|
|
758
|
+
runIds.push(sample.runId);
|
|
759
|
+
await store.insert({
|
|
760
|
+
tableName: TABLE_WORKFLOW_SNAPSHOT,
|
|
761
|
+
record: {
|
|
762
|
+
workflow_name: workflowName,
|
|
763
|
+
run_id: sample.runId,
|
|
764
|
+
resourceId,
|
|
765
|
+
snapshot: sample.snapshot,
|
|
766
|
+
createdAt: new Date(),
|
|
767
|
+
updatedAt: new Date(),
|
|
768
|
+
},
|
|
769
|
+
});
|
|
770
|
+
}
|
|
771
|
+
// Insert a run with a different resourceId
|
|
772
|
+
const other = createSampleWorkflowSnapshot('waiting');
|
|
773
|
+
await store.insert({
|
|
774
|
+
tableName: TABLE_WORKFLOW_SNAPSHOT,
|
|
775
|
+
record: {
|
|
776
|
+
workflow_name: workflowName,
|
|
777
|
+
run_id: other.runId,
|
|
778
|
+
resourceId: 'resource-other',
|
|
779
|
+
snapshot: other.snapshot,
|
|
780
|
+
createdAt: new Date(),
|
|
781
|
+
updatedAt: new Date(),
|
|
782
|
+
},
|
|
783
|
+
});
|
|
784
|
+
});
|
|
785
|
+
|
|
786
|
+
it('should retrieve all workflow runs by resourceId', async () => {
|
|
787
|
+
const { runs } = await store.getWorkflowRuns({
|
|
788
|
+
resourceId,
|
|
789
|
+
workflowName,
|
|
790
|
+
});
|
|
791
|
+
expect(Array.isArray(runs)).toBe(true);
|
|
792
|
+
expect(runs.length).toBeGreaterThanOrEqual(2);
|
|
793
|
+
for (const run of runs) {
|
|
794
|
+
expect(run.resourceId).toBe(resourceId);
|
|
795
|
+
}
|
|
796
|
+
});
|
|
797
|
+
|
|
798
|
+
it('should return an empty array if no workflow runs match resourceId', async () => {
|
|
799
|
+
const { runs } = await store.getWorkflowRuns({
|
|
800
|
+
resourceId: 'non-existent-resource',
|
|
801
|
+
workflowName,
|
|
802
|
+
});
|
|
803
|
+
expect(Array.isArray(runs)).toBe(true);
|
|
804
|
+
expect(runs.length).toBe(0);
|
|
805
|
+
});
|
|
806
|
+
});
|
|
807
|
+
|
|
808
|
+
describe('hasColumn', () => {
|
|
809
|
+
const tempTable = 'temp_test_table';
|
|
810
|
+
|
|
811
|
+
beforeEach(async () => {
|
|
812
|
+
// Always try to drop the table before each test, ignore errors if it doesn't exist
|
|
813
|
+
try {
|
|
814
|
+
await store['db'].query({ query: `DROP TABLE IF EXISTS ${tempTable}` });
|
|
815
|
+
} catch {
|
|
816
|
+
/* ignore */
|
|
817
|
+
}
|
|
818
|
+
});
|
|
819
|
+
|
|
820
|
+
it('returns true if the column exists', async () => {
|
|
821
|
+
await store['db'].query({
|
|
822
|
+
query: `CREATE TABLE temp_test_table (
|
|
823
|
+
id UInt64,
|
|
824
|
+
resourceId String
|
|
825
|
+
) ENGINE = MergeTree()
|
|
826
|
+
ORDER BY id
|
|
827
|
+
`,
|
|
828
|
+
});
|
|
829
|
+
expect(await store['hasColumn'](tempTable, 'resourceId')).toBe(true);
|
|
830
|
+
});
|
|
831
|
+
|
|
832
|
+
it('returns false if the column does not exist', async () => {
|
|
833
|
+
await store['db'].query({
|
|
834
|
+
query: `CREATE TABLE temp_test_table (
|
|
835
|
+
id UInt64,
|
|
836
|
+
) ENGINE = MergeTree()
|
|
837
|
+
ORDER BY id
|
|
838
|
+
`,
|
|
839
|
+
});
|
|
840
|
+
expect(await store['hasColumn'](tempTable, 'resourceId')).toBe(false);
|
|
841
|
+
});
|
|
842
|
+
|
|
843
|
+
afterEach(async () => {
|
|
844
|
+
// Clean up after each test
|
|
845
|
+
try {
|
|
846
|
+
await store['db'].query({ query: `DROP TABLE IF EXISTS ${tempTable}` });
|
|
847
|
+
} catch {
|
|
848
|
+
/* ignore */
|
|
849
|
+
}
|
|
850
|
+
});
|
|
851
|
+
});
|
|
668
852
|
|
|
669
853
|
afterAll(async () => {
|
|
670
854
|
await store.close();
|
package/src/storage/index.ts
CHANGED
|
@@ -11,7 +11,14 @@ import {
|
|
|
11
11
|
TABLE_TRACES,
|
|
12
12
|
TABLE_WORKFLOW_SNAPSHOT,
|
|
13
13
|
} from '@mastra/core/storage';
|
|
14
|
-
import type {
|
|
14
|
+
import type {
|
|
15
|
+
EvalRow,
|
|
16
|
+
StorageColumn,
|
|
17
|
+
StorageGetMessagesArg,
|
|
18
|
+
TABLE_NAMES,
|
|
19
|
+
WorkflowRun,
|
|
20
|
+
WorkflowRuns,
|
|
21
|
+
} from '@mastra/core/storage';
|
|
15
22
|
import type { WorkflowRunState } from '@mastra/core/workflows';
|
|
16
23
|
|
|
17
24
|
function safelyParseJSON(jsonString: string): any {
|
|
@@ -203,6 +210,8 @@ export class ClickhouseStore extends MastraStorage {
|
|
|
203
210
|
perPage,
|
|
204
211
|
attributes,
|
|
205
212
|
filters,
|
|
213
|
+
fromDate,
|
|
214
|
+
toDate,
|
|
206
215
|
}: {
|
|
207
216
|
name?: string;
|
|
208
217
|
scope?: string;
|
|
@@ -210,6 +219,8 @@ export class ClickhouseStore extends MastraStorage {
|
|
|
210
219
|
perPage: number;
|
|
211
220
|
attributes?: Record<string, string>;
|
|
212
221
|
filters?: Record<string, any>;
|
|
222
|
+
fromDate?: Date;
|
|
223
|
+
toDate?: Date;
|
|
213
224
|
}): Promise<any[]> {
|
|
214
225
|
const limit = perPage;
|
|
215
226
|
const offset = page * perPage;
|
|
@@ -241,6 +252,16 @@ export class ClickhouseStore extends MastraStorage {
|
|
|
241
252
|
});
|
|
242
253
|
}
|
|
243
254
|
|
|
255
|
+
if (fromDate) {
|
|
256
|
+
conditions.push(`createdAt >= {var_from_date:DateTime64(3)}`);
|
|
257
|
+
args.var_from_date = fromDate.getTime() / 1000; // Convert to Unix timestamp
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
if (toDate) {
|
|
261
|
+
conditions.push(`createdAt <= {var_to_date:DateTime64(3)}`);
|
|
262
|
+
args.var_to_date = toDate.getTime() / 1000; // Convert to Unix timestamp
|
|
263
|
+
}
|
|
264
|
+
|
|
244
265
|
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
|
245
266
|
|
|
246
267
|
const result = await this.db.query({
|
|
@@ -627,7 +648,7 @@ export class ClickhouseStore extends MastraStorage {
|
|
|
627
648
|
}
|
|
628
649
|
}
|
|
629
650
|
|
|
630
|
-
async getMessages<T = unknown>({ threadId, selectBy }: StorageGetMessagesArg): Promise<T> {
|
|
651
|
+
async getMessages<T = unknown>({ threadId, selectBy }: StorageGetMessagesArg): Promise<T[]> {
|
|
631
652
|
try {
|
|
632
653
|
const messages: any[] = [];
|
|
633
654
|
const limit = typeof selectBy?.last === `number` ? selectBy.last : 40;
|
|
@@ -734,7 +755,7 @@ export class ClickhouseStore extends MastraStorage {
|
|
|
734
755
|
}
|
|
735
756
|
});
|
|
736
757
|
|
|
737
|
-
return messages as T;
|
|
758
|
+
return messages as T[];
|
|
738
759
|
} catch (error) {
|
|
739
760
|
console.error('Error getting messages:', error);
|
|
740
761
|
throw error;
|
|
@@ -856,28 +877,42 @@ export class ClickhouseStore extends MastraStorage {
|
|
|
856
877
|
}
|
|
857
878
|
}
|
|
858
879
|
|
|
880
|
+
private parseWorkflowRun(row: any): WorkflowRun {
|
|
881
|
+
let parsedSnapshot: WorkflowRunState | string = row.snapshot as string;
|
|
882
|
+
if (typeof parsedSnapshot === 'string') {
|
|
883
|
+
try {
|
|
884
|
+
parsedSnapshot = JSON.parse(row.snapshot as string) as WorkflowRunState;
|
|
885
|
+
} catch (e) {
|
|
886
|
+
// If parsing fails, return the raw snapshot string
|
|
887
|
+
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
888
|
+
}
|
|
889
|
+
}
|
|
890
|
+
|
|
891
|
+
return {
|
|
892
|
+
workflowName: row.workflow_name,
|
|
893
|
+
runId: row.run_id,
|
|
894
|
+
snapshot: parsedSnapshot,
|
|
895
|
+
createdAt: new Date(row.createdAt),
|
|
896
|
+
updatedAt: new Date(row.updatedAt),
|
|
897
|
+
resourceId: row.resourceId,
|
|
898
|
+
};
|
|
899
|
+
}
|
|
900
|
+
|
|
859
901
|
async getWorkflowRuns({
|
|
860
902
|
workflowName,
|
|
861
903
|
fromDate,
|
|
862
904
|
toDate,
|
|
863
905
|
limit,
|
|
864
906
|
offset,
|
|
907
|
+
resourceId,
|
|
865
908
|
}: {
|
|
866
909
|
workflowName?: string;
|
|
867
910
|
fromDate?: Date;
|
|
868
911
|
toDate?: Date;
|
|
869
912
|
limit?: number;
|
|
870
913
|
offset?: number;
|
|
871
|
-
|
|
872
|
-
|
|
873
|
-
workflowName: string;
|
|
874
|
-
runId: string;
|
|
875
|
-
snapshot: WorkflowRunState | string;
|
|
876
|
-
createdAt: Date;
|
|
877
|
-
updatedAt: Date;
|
|
878
|
-
}>;
|
|
879
|
-
total: number;
|
|
880
|
-
}> {
|
|
914
|
+
resourceId?: string;
|
|
915
|
+
} = {}): Promise<WorkflowRuns> {
|
|
881
916
|
try {
|
|
882
917
|
const conditions: string[] = [];
|
|
883
918
|
const values: Record<string, any> = {};
|
|
@@ -887,6 +922,16 @@ export class ClickhouseStore extends MastraStorage {
|
|
|
887
922
|
values.var_workflow_name = workflowName;
|
|
888
923
|
}
|
|
889
924
|
|
|
925
|
+
if (resourceId) {
|
|
926
|
+
const hasResourceId = await this.hasColumn(TABLE_WORKFLOW_SNAPSHOT, 'resourceId');
|
|
927
|
+
if (hasResourceId) {
|
|
928
|
+
conditions.push(`resourceId = {var_resourceId:String}`);
|
|
929
|
+
values.var_resourceId = resourceId;
|
|
930
|
+
} else {
|
|
931
|
+
console.warn(`[${TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
|
|
932
|
+
}
|
|
933
|
+
}
|
|
934
|
+
|
|
890
935
|
if (fromDate) {
|
|
891
936
|
conditions.push(`createdAt >= {var_from_date:DateTime64(3)}`);
|
|
892
937
|
values.var_from_date = fromDate.getTime() / 1000; // Convert to Unix timestamp
|
|
@@ -921,7 +966,8 @@ export class ClickhouseStore extends MastraStorage {
|
|
|
921
966
|
run_id,
|
|
922
967
|
snapshot,
|
|
923
968
|
toDateTime64(createdAt, 3) as createdAt,
|
|
924
|
-
toDateTime64(updatedAt, 3) as updatedAt
|
|
969
|
+
toDateTime64(updatedAt, 3) as updatedAt,
|
|
970
|
+
resourceId
|
|
925
971
|
FROM ${TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[TABLE_WORKFLOW_SNAPSHOT].startsWith('ReplacingMergeTree') ? 'FINAL' : ''}
|
|
926
972
|
${whereClause}
|
|
927
973
|
ORDER BY createdAt DESC
|
|
@@ -935,23 +981,7 @@ export class ClickhouseStore extends MastraStorage {
|
|
|
935
981
|
const resultJson = await result.json();
|
|
936
982
|
const rows = resultJson as any[];
|
|
937
983
|
const runs = rows.map(row => {
|
|
938
|
-
|
|
939
|
-
if (typeof parsedSnapshot === 'string') {
|
|
940
|
-
try {
|
|
941
|
-
parsedSnapshot = JSON.parse(row.snapshot) as WorkflowRunState;
|
|
942
|
-
} catch (e) {
|
|
943
|
-
// If parsing fails, return the raw snapshot string
|
|
944
|
-
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
945
|
-
}
|
|
946
|
-
}
|
|
947
|
-
|
|
948
|
-
return {
|
|
949
|
-
workflowName: row.workflow_name,
|
|
950
|
-
runId: row.run_id,
|
|
951
|
-
snapshot: parsedSnapshot,
|
|
952
|
-
createdAt: new Date(row.createdAt),
|
|
953
|
-
updatedAt: new Date(row.updatedAt),
|
|
954
|
-
};
|
|
984
|
+
return this.parseWorkflowRun(row);
|
|
955
985
|
});
|
|
956
986
|
|
|
957
987
|
// Use runs.length as total when not paginating
|
|
@@ -962,6 +992,66 @@ export class ClickhouseStore extends MastraStorage {
|
|
|
962
992
|
}
|
|
963
993
|
}
|
|
964
994
|
|
|
995
|
+
async getWorkflowRunById({
|
|
996
|
+
runId,
|
|
997
|
+
workflowName,
|
|
998
|
+
}: {
|
|
999
|
+
runId: string;
|
|
1000
|
+
workflowName?: string;
|
|
1001
|
+
}): Promise<WorkflowRun | null> {
|
|
1002
|
+
try {
|
|
1003
|
+
const conditions: string[] = [];
|
|
1004
|
+
const values: Record<string, any> = {};
|
|
1005
|
+
|
|
1006
|
+
if (runId) {
|
|
1007
|
+
conditions.push(`run_id = {var_runId:String}`);
|
|
1008
|
+
values.var_runId = runId;
|
|
1009
|
+
}
|
|
1010
|
+
|
|
1011
|
+
if (workflowName) {
|
|
1012
|
+
conditions.push(`workflow_name = {var_workflow_name:String}`);
|
|
1013
|
+
values.var_workflow_name = workflowName;
|
|
1014
|
+
}
|
|
1015
|
+
|
|
1016
|
+
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
|
1017
|
+
|
|
1018
|
+
// Get results
|
|
1019
|
+
const result = await this.db.query({
|
|
1020
|
+
query: `
|
|
1021
|
+
SELECT
|
|
1022
|
+
workflow_name,
|
|
1023
|
+
run_id,
|
|
1024
|
+
snapshot,
|
|
1025
|
+
toDateTime64(createdAt, 3) as createdAt,
|
|
1026
|
+
toDateTime64(updatedAt, 3) as updatedAt,
|
|
1027
|
+
resourceId
|
|
1028
|
+
FROM ${TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[TABLE_WORKFLOW_SNAPSHOT].startsWith('ReplacingMergeTree') ? 'FINAL' : ''}
|
|
1029
|
+
${whereClause}
|
|
1030
|
+
`,
|
|
1031
|
+
query_params: values,
|
|
1032
|
+
format: 'JSONEachRow',
|
|
1033
|
+
});
|
|
1034
|
+
|
|
1035
|
+
const resultJson = await result.json();
|
|
1036
|
+
if (!Array.isArray(resultJson) || resultJson.length === 0) {
|
|
1037
|
+
return null;
|
|
1038
|
+
}
|
|
1039
|
+
return this.parseWorkflowRun(resultJson[0]);
|
|
1040
|
+
} catch (error) {
|
|
1041
|
+
console.error('Error getting workflow run by ID:', error);
|
|
1042
|
+
throw error;
|
|
1043
|
+
}
|
|
1044
|
+
}
|
|
1045
|
+
|
|
1046
|
+
private async hasColumn(table: string, column: string): Promise<boolean> {
|
|
1047
|
+
const result = await this.db.query({
|
|
1048
|
+
query: `DESCRIBE TABLE ${table}`,
|
|
1049
|
+
format: 'JSONEachRow',
|
|
1050
|
+
});
|
|
1051
|
+
const columns = (await result.json()) as { name: string }[];
|
|
1052
|
+
return columns.some(c => c.name === column);
|
|
1053
|
+
}
|
|
1054
|
+
|
|
965
1055
|
async close(): Promise<void> {
|
|
966
1056
|
await this.db.close();
|
|
967
1057
|
}
|