@mastra/clickhouse 0.2.7-alpha.1 → 0.2.7-alpha.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +7 -7
- package/CHANGELOG.md +24 -0
- package/dist/_tsup-dts-rollup.d.cts +46 -2
- package/dist/_tsup-dts-rollup.d.ts +46 -2
- package/dist/index.cjs +157 -11
- package/dist/index.js +157 -11
- package/package.json +2 -2
- package/src/storage/index.test.ts +287 -6
- package/src/storage/index.ts +228 -10
package/.turbo/turbo-build.log
CHANGED
|
@@ -1,23 +1,23 @@
|
|
|
1
1
|
|
|
2
|
-
> @mastra/clickhouse@0.2.7-alpha.
|
|
2
|
+
> @mastra/clickhouse@0.2.7-alpha.3 build /home/runner/work/mastra/mastra/stores/clickhouse
|
|
3
3
|
> tsup src/index.ts --format esm,cjs --experimental-dts --clean --treeshake=smallest --splitting
|
|
4
4
|
|
|
5
5
|
[34mCLI[39m Building entry: src/index.ts
|
|
6
6
|
[34mCLI[39m Using tsconfig: tsconfig.json
|
|
7
7
|
[34mCLI[39m tsup v8.4.0
|
|
8
8
|
[34mTSC[39m Build start
|
|
9
|
-
[32mTSC[39m ⚡️ Build success in
|
|
9
|
+
[32mTSC[39m ⚡️ Build success in 7488ms
|
|
10
10
|
[34mDTS[39m Build start
|
|
11
11
|
[34mCLI[39m Target: es2022
|
|
12
12
|
Analysis will use the bundled TypeScript version 5.8.2
|
|
13
13
|
[36mWriting package typings: /home/runner/work/mastra/mastra/stores/clickhouse/dist/_tsup-dts-rollup.d.ts[39m
|
|
14
14
|
Analysis will use the bundled TypeScript version 5.8.2
|
|
15
15
|
[36mWriting package typings: /home/runner/work/mastra/mastra/stores/clickhouse/dist/_tsup-dts-rollup.d.cts[39m
|
|
16
|
-
[32mDTS[39m ⚡️ Build success in
|
|
16
|
+
[32mDTS[39m ⚡️ Build success in 11085ms
|
|
17
17
|
[34mCLI[39m Cleaning output folder
|
|
18
18
|
[34mESM[39m Build start
|
|
19
19
|
[34mCJS[39m Build start
|
|
20
|
-
[32mESM[39m [1mdist/index.js [22m[
|
|
21
|
-
[32mESM[39m ⚡️ Build success in
|
|
22
|
-
[32mCJS[39m [1mdist/index.cjs [22m[
|
|
23
|
-
[32mCJS[39m ⚡️ Build success in
|
|
20
|
+
[32mESM[39m [1mdist/index.js [22m[32m25.49 KB[39m
|
|
21
|
+
[32mESM[39m ⚡️ Build success in 1089ms
|
|
22
|
+
[32mCJS[39m [1mdist/index.cjs [22m[32m25.66 KB[39m
|
|
23
|
+
[32mCJS[39m ⚡️ Build success in 1091ms
|
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,29 @@
|
|
|
1
1
|
# @mastra/clickhouse
|
|
2
2
|
|
|
3
|
+
## 0.2.7-alpha.3
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- Updated dependencies [d7e08e8]
|
|
8
|
+
- @mastra/core@0.8.0-alpha.4
|
|
9
|
+
|
|
10
|
+
## 0.2.7-alpha.2
|
|
11
|
+
|
|
12
|
+
### Patch Changes
|
|
13
|
+
|
|
14
|
+
- 88fa727: Added getWorkflowRuns for libsql, pg, clickhouse and upstash as well as added route getWorkflowRunsHandler
|
|
15
|
+
- 4d67826: Fix eval writes, remove id column
|
|
16
|
+
- Updated dependencies [5ae0180]
|
|
17
|
+
- Updated dependencies [9bfa12b]
|
|
18
|
+
- Updated dependencies [515ebfb]
|
|
19
|
+
- Updated dependencies [88fa727]
|
|
20
|
+
- Updated dependencies [f37f535]
|
|
21
|
+
- Updated dependencies [4d67826]
|
|
22
|
+
- Updated dependencies [6330967]
|
|
23
|
+
- Updated dependencies [8393832]
|
|
24
|
+
- Updated dependencies [6330967]
|
|
25
|
+
- @mastra/core@0.8.0-alpha.3
|
|
26
|
+
|
|
3
27
|
## 0.2.7-alpha.1
|
|
4
28
|
|
|
5
29
|
### Patch Changes
|
|
@@ -5,31 +5,57 @@ import type { StorageColumn } from '@mastra/core/storage';
|
|
|
5
5
|
import type { StorageGetMessagesArg } from '@mastra/core/storage';
|
|
6
6
|
import type { StorageThreadType } from '@mastra/core/memory';
|
|
7
7
|
import type { TABLE_NAMES } from '@mastra/core/storage';
|
|
8
|
+
import { TABLE_SCHEMAS } from '@mastra/core/storage';
|
|
8
9
|
import type { WorkflowRunState } from '@mastra/core/workflows';
|
|
9
10
|
|
|
10
11
|
declare type ClickhouseConfig = {
|
|
11
12
|
url: string;
|
|
12
13
|
username: string;
|
|
13
14
|
password: string;
|
|
15
|
+
ttl?: {
|
|
16
|
+
[TableKey in TABLE_NAMES]?: {
|
|
17
|
+
row?: {
|
|
18
|
+
interval: number;
|
|
19
|
+
unit: IntervalUnit;
|
|
20
|
+
ttlKey?: string;
|
|
21
|
+
};
|
|
22
|
+
columns?: Partial<{
|
|
23
|
+
[ColumnKey in keyof (typeof TABLE_SCHEMAS)[TableKey]]: {
|
|
24
|
+
interval: number;
|
|
25
|
+
unit: IntervalUnit;
|
|
26
|
+
ttlKey?: string;
|
|
27
|
+
};
|
|
28
|
+
}>;
|
|
29
|
+
};
|
|
30
|
+
};
|
|
14
31
|
};
|
|
15
32
|
export { ClickhouseConfig }
|
|
16
33
|
export { ClickhouseConfig as ClickhouseConfig_alias_1 }
|
|
17
34
|
|
|
18
35
|
declare class ClickhouseStore extends MastraStorage {
|
|
19
36
|
private db;
|
|
37
|
+
private ttl;
|
|
20
38
|
constructor(config: ClickhouseConfig);
|
|
21
|
-
|
|
39
|
+
private transformEvalRow;
|
|
40
|
+
getEvalsByAgentName(agentName: string, type?: 'test' | 'live'): Promise<EvalRow[]>;
|
|
22
41
|
batchInsert({ tableName, records }: {
|
|
23
42
|
tableName: TABLE_NAMES;
|
|
24
43
|
records: Record<string, any>[];
|
|
25
44
|
}): Promise<void>;
|
|
26
|
-
getTraces({ name, scope, page, perPage, attributes, }: {
|
|
45
|
+
getTraces({ name, scope, page, perPage, attributes, filters, }: {
|
|
27
46
|
name?: string;
|
|
28
47
|
scope?: string;
|
|
29
48
|
page: number;
|
|
30
49
|
perPage: number;
|
|
31
50
|
attributes?: Record<string, string>;
|
|
51
|
+
filters?: Record<string, any>;
|
|
32
52
|
}): Promise<any[]>;
|
|
53
|
+
optimizeTable({ tableName }: {
|
|
54
|
+
tableName: TABLE_NAMES;
|
|
55
|
+
}): Promise<void>;
|
|
56
|
+
materializeTtl({ tableName }: {
|
|
57
|
+
tableName: TABLE_NAMES;
|
|
58
|
+
}): Promise<void>;
|
|
33
59
|
createTable({ tableName, schema, }: {
|
|
34
60
|
tableName: TABLE_NAMES;
|
|
35
61
|
schema: Record<string, StorageColumn>;
|
|
@@ -75,9 +101,27 @@ declare class ClickhouseStore extends MastraStorage {
|
|
|
75
101
|
workflowName: string;
|
|
76
102
|
runId: string;
|
|
77
103
|
}): Promise<WorkflowRunState | null>;
|
|
104
|
+
getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, }?: {
|
|
105
|
+
workflowName?: string;
|
|
106
|
+
fromDate?: Date;
|
|
107
|
+
toDate?: Date;
|
|
108
|
+
limit?: number;
|
|
109
|
+
offset?: number;
|
|
110
|
+
}): Promise<{
|
|
111
|
+
runs: Array<{
|
|
112
|
+
workflowName: string;
|
|
113
|
+
runId: string;
|
|
114
|
+
snapshot: WorkflowRunState | string;
|
|
115
|
+
createdAt: Date;
|
|
116
|
+
updatedAt: Date;
|
|
117
|
+
}>;
|
|
118
|
+
total: number;
|
|
119
|
+
}>;
|
|
78
120
|
close(): Promise<void>;
|
|
79
121
|
}
|
|
80
122
|
export { ClickhouseStore }
|
|
81
123
|
export { ClickhouseStore as ClickhouseStore_alias_1 }
|
|
82
124
|
|
|
125
|
+
declare type IntervalUnit = 'NANOSECOND' | 'MICROSECOND' | 'MILLISECOND' | 'SECOND' | 'MINUTE' | 'HOUR' | 'DAY' | 'WEEK' | 'MONTH' | 'QUARTER' | 'YEAR';
|
|
126
|
+
|
|
83
127
|
export { }
|
|
@@ -5,31 +5,57 @@ import type { StorageColumn } from '@mastra/core/storage';
|
|
|
5
5
|
import type { StorageGetMessagesArg } from '@mastra/core/storage';
|
|
6
6
|
import type { StorageThreadType } from '@mastra/core/memory';
|
|
7
7
|
import type { TABLE_NAMES } from '@mastra/core/storage';
|
|
8
|
+
import { TABLE_SCHEMAS } from '@mastra/core/storage';
|
|
8
9
|
import type { WorkflowRunState } from '@mastra/core/workflows';
|
|
9
10
|
|
|
10
11
|
declare type ClickhouseConfig = {
|
|
11
12
|
url: string;
|
|
12
13
|
username: string;
|
|
13
14
|
password: string;
|
|
15
|
+
ttl?: {
|
|
16
|
+
[TableKey in TABLE_NAMES]?: {
|
|
17
|
+
row?: {
|
|
18
|
+
interval: number;
|
|
19
|
+
unit: IntervalUnit;
|
|
20
|
+
ttlKey?: string;
|
|
21
|
+
};
|
|
22
|
+
columns?: Partial<{
|
|
23
|
+
[ColumnKey in keyof (typeof TABLE_SCHEMAS)[TableKey]]: {
|
|
24
|
+
interval: number;
|
|
25
|
+
unit: IntervalUnit;
|
|
26
|
+
ttlKey?: string;
|
|
27
|
+
};
|
|
28
|
+
}>;
|
|
29
|
+
};
|
|
30
|
+
};
|
|
14
31
|
};
|
|
15
32
|
export { ClickhouseConfig }
|
|
16
33
|
export { ClickhouseConfig as ClickhouseConfig_alias_1 }
|
|
17
34
|
|
|
18
35
|
declare class ClickhouseStore extends MastraStorage {
|
|
19
36
|
private db;
|
|
37
|
+
private ttl;
|
|
20
38
|
constructor(config: ClickhouseConfig);
|
|
21
|
-
|
|
39
|
+
private transformEvalRow;
|
|
40
|
+
getEvalsByAgentName(agentName: string, type?: 'test' | 'live'): Promise<EvalRow[]>;
|
|
22
41
|
batchInsert({ tableName, records }: {
|
|
23
42
|
tableName: TABLE_NAMES;
|
|
24
43
|
records: Record<string, any>[];
|
|
25
44
|
}): Promise<void>;
|
|
26
|
-
getTraces({ name, scope, page, perPage, attributes, }: {
|
|
45
|
+
getTraces({ name, scope, page, perPage, attributes, filters, }: {
|
|
27
46
|
name?: string;
|
|
28
47
|
scope?: string;
|
|
29
48
|
page: number;
|
|
30
49
|
perPage: number;
|
|
31
50
|
attributes?: Record<string, string>;
|
|
51
|
+
filters?: Record<string, any>;
|
|
32
52
|
}): Promise<any[]>;
|
|
53
|
+
optimizeTable({ tableName }: {
|
|
54
|
+
tableName: TABLE_NAMES;
|
|
55
|
+
}): Promise<void>;
|
|
56
|
+
materializeTtl({ tableName }: {
|
|
57
|
+
tableName: TABLE_NAMES;
|
|
58
|
+
}): Promise<void>;
|
|
33
59
|
createTable({ tableName, schema, }: {
|
|
34
60
|
tableName: TABLE_NAMES;
|
|
35
61
|
schema: Record<string, StorageColumn>;
|
|
@@ -75,9 +101,27 @@ declare class ClickhouseStore extends MastraStorage {
|
|
|
75
101
|
workflowName: string;
|
|
76
102
|
runId: string;
|
|
77
103
|
}): Promise<WorkflowRunState | null>;
|
|
104
|
+
getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, }?: {
|
|
105
|
+
workflowName?: string;
|
|
106
|
+
fromDate?: Date;
|
|
107
|
+
toDate?: Date;
|
|
108
|
+
limit?: number;
|
|
109
|
+
offset?: number;
|
|
110
|
+
}): Promise<{
|
|
111
|
+
runs: Array<{
|
|
112
|
+
workflowName: string;
|
|
113
|
+
runId: string;
|
|
114
|
+
snapshot: WorkflowRunState | string;
|
|
115
|
+
createdAt: Date;
|
|
116
|
+
updatedAt: Date;
|
|
117
|
+
}>;
|
|
118
|
+
total: number;
|
|
119
|
+
}>;
|
|
78
120
|
close(): Promise<void>;
|
|
79
121
|
}
|
|
80
122
|
export { ClickhouseStore }
|
|
81
123
|
export { ClickhouseStore as ClickhouseStore_alias_1 }
|
|
82
124
|
|
|
125
|
+
declare type IntervalUnit = 'NANOSECOND' | 'MICROSECOND' | 'MILLISECOND' | 'SECOND' | 'MINUTE' | 'HOUR' | 'DAY' | 'WEEK' | 'MONTH' | 'QUARTER' | 'YEAR';
|
|
126
|
+
|
|
83
127
|
export { }
|
package/dist/index.cjs
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
var storage = require('@mastra/core/storage');
|
|
4
3
|
var client = require('@clickhouse/client');
|
|
4
|
+
var storage = require('@mastra/core/storage');
|
|
5
5
|
|
|
6
6
|
// src/storage/index.ts
|
|
7
7
|
function safelyParseJSON(jsonString) {
|
|
@@ -43,6 +43,7 @@ function transformRow(row) {
|
|
|
43
43
|
}
|
|
44
44
|
var ClickhouseStore = class extends storage.MastraStorage {
|
|
45
45
|
db;
|
|
46
|
+
ttl = {};
|
|
46
47
|
constructor(config) {
|
|
47
48
|
super({ name: "ClickhouseStore" });
|
|
48
49
|
this.db = client.createClient({
|
|
@@ -57,9 +58,54 @@ var ClickhouseStore = class extends storage.MastraStorage {
|
|
|
57
58
|
output_format_json_quote_64bit_integers: 0
|
|
58
59
|
}
|
|
59
60
|
});
|
|
61
|
+
this.ttl = config.ttl;
|
|
62
|
+
}
|
|
63
|
+
transformEvalRow(row) {
|
|
64
|
+
row = transformRow(row);
|
|
65
|
+
const resultValue = JSON.parse(row.result);
|
|
66
|
+
const testInfoValue = row.test_info ? JSON.parse(row.test_info) : void 0;
|
|
67
|
+
if (!resultValue || typeof resultValue !== "object" || !("score" in resultValue)) {
|
|
68
|
+
throw new Error(`Invalid MetricResult format: ${JSON.stringify(resultValue)}`);
|
|
69
|
+
}
|
|
70
|
+
return {
|
|
71
|
+
input: row.input,
|
|
72
|
+
output: row.output,
|
|
73
|
+
result: resultValue,
|
|
74
|
+
agentName: row.agent_name,
|
|
75
|
+
metricName: row.metric_name,
|
|
76
|
+
instructions: row.instructions,
|
|
77
|
+
testInfo: testInfoValue,
|
|
78
|
+
globalRunId: row.global_run_id,
|
|
79
|
+
runId: row.run_id,
|
|
80
|
+
createdAt: row.created_at
|
|
81
|
+
};
|
|
60
82
|
}
|
|
61
|
-
getEvalsByAgentName(
|
|
62
|
-
|
|
83
|
+
async getEvalsByAgentName(agentName, type) {
|
|
84
|
+
try {
|
|
85
|
+
const baseQuery = `SELECT *, toDateTime64(createdAt, 3) as createdAt FROM ${storage.TABLE_EVALS} WHERE agent_name = {var_agent_name:String}`;
|
|
86
|
+
const typeCondition = type === "test" ? " AND test_info IS NOT NULL AND JSONExtractString(test_info, 'testPath') IS NOT NULL" : type === "live" ? " AND (test_info IS NULL OR JSONExtractString(test_info, 'testPath') IS NULL)" : "";
|
|
87
|
+
const result = await this.db.query({
|
|
88
|
+
query: `${baseQuery}${typeCondition} ORDER BY createdAt DESC`,
|
|
89
|
+
query_params: { var_agent_name: agentName },
|
|
90
|
+
clickhouse_settings: {
|
|
91
|
+
date_time_input_format: "best_effort",
|
|
92
|
+
date_time_output_format: "iso",
|
|
93
|
+
use_client_time_zone: 1,
|
|
94
|
+
output_format_json_quote_64bit_integers: 0
|
|
95
|
+
}
|
|
96
|
+
});
|
|
97
|
+
if (!result) {
|
|
98
|
+
return [];
|
|
99
|
+
}
|
|
100
|
+
const rows = await result.json();
|
|
101
|
+
return rows.data.map((row) => this.transformEvalRow(row));
|
|
102
|
+
} catch (error) {
|
|
103
|
+
if (error instanceof Error && error.message.includes("no such table")) {
|
|
104
|
+
return [];
|
|
105
|
+
}
|
|
106
|
+
this.logger.error("Failed to get evals for the specified agent: " + error?.message);
|
|
107
|
+
throw error;
|
|
108
|
+
}
|
|
63
109
|
}
|
|
64
110
|
async batchInsert({ tableName, records }) {
|
|
65
111
|
try {
|
|
@@ -91,7 +137,8 @@ var ClickhouseStore = class extends storage.MastraStorage {
|
|
|
91
137
|
scope,
|
|
92
138
|
page,
|
|
93
139
|
perPage,
|
|
94
|
-
attributes
|
|
140
|
+
attributes,
|
|
141
|
+
filters
|
|
95
142
|
}) {
|
|
96
143
|
const limit = perPage;
|
|
97
144
|
const offset = page * perPage;
|
|
@@ -107,8 +154,16 @@ var ClickhouseStore = class extends storage.MastraStorage {
|
|
|
107
154
|
}
|
|
108
155
|
if (attributes) {
|
|
109
156
|
Object.entries(attributes).forEach(([key, value]) => {
|
|
110
|
-
conditions.push(`JSONExtractString(attributes, '${key}') = {
|
|
111
|
-
args[`
|
|
157
|
+
conditions.push(`JSONExtractString(attributes, '${key}') = {var_attr_${key}:String}`);
|
|
158
|
+
args[`var_attr_${key}`] = value;
|
|
159
|
+
});
|
|
160
|
+
}
|
|
161
|
+
if (filters) {
|
|
162
|
+
Object.entries(filters).forEach(([key, value]) => {
|
|
163
|
+
conditions.push(
|
|
164
|
+
`${key} = {var_col_${key}:${COLUMN_TYPES[storage.TABLE_SCHEMAS.mastra_traces?.[key]?.type ?? "text"]}}`
|
|
165
|
+
);
|
|
166
|
+
args[`var_col_${key}`] = value;
|
|
112
167
|
});
|
|
113
168
|
}
|
|
114
169
|
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
@@ -145,6 +200,16 @@ var ClickhouseStore = class extends storage.MastraStorage {
|
|
|
145
200
|
createdAt: row.createdAt
|
|
146
201
|
}));
|
|
147
202
|
}
|
|
203
|
+
async optimizeTable({ tableName }) {
|
|
204
|
+
await this.db.command({
|
|
205
|
+
query: `OPTIMIZE TABLE ${tableName} FINAL`
|
|
206
|
+
});
|
|
207
|
+
}
|
|
208
|
+
async materializeTtl({ tableName }) {
|
|
209
|
+
await this.db.command({
|
|
210
|
+
query: `ALTER TABLE ${tableName} MATERIALIZE TTL;`
|
|
211
|
+
});
|
|
212
|
+
}
|
|
148
213
|
async createTable({
|
|
149
214
|
tableName,
|
|
150
215
|
schema
|
|
@@ -153,8 +218,10 @@ var ClickhouseStore = class extends storage.MastraStorage {
|
|
|
153
218
|
const columns = Object.entries(schema).map(([name, def]) => {
|
|
154
219
|
const constraints = [];
|
|
155
220
|
if (!def.nullable) constraints.push("NOT NULL");
|
|
156
|
-
|
|
221
|
+
const columnTtl = this.ttl?.[tableName]?.columns?.[name];
|
|
222
|
+
return `"${name}" ${COLUMN_TYPES[def.type]} ${constraints.join(" ")} ${columnTtl ? `TTL toDateTime(${columnTtl.ttlKey ?? "createdAt"}) + INTERVAL ${columnTtl.interval} ${columnTtl.unit}` : ""}`;
|
|
157
223
|
}).join(",\n");
|
|
224
|
+
const rowTtl = this.ttl?.[tableName]?.row;
|
|
158
225
|
const sql = tableName === storage.TABLE_WORKFLOW_SNAPSHOT ? `
|
|
159
226
|
CREATE TABLE IF NOT EXISTS ${tableName} (
|
|
160
227
|
${["id String"].concat(columns)}
|
|
@@ -163,16 +230,18 @@ var ClickhouseStore = class extends storage.MastraStorage {
|
|
|
163
230
|
PARTITION BY "createdAt"
|
|
164
231
|
PRIMARY KEY (createdAt, run_id, workflow_name)
|
|
165
232
|
ORDER BY (createdAt, run_id, workflow_name)
|
|
166
|
-
|
|
233
|
+
${rowTtl ? `TTL toDateTime(${rowTtl.ttlKey ?? "createdAt"}) + INTERVAL ${rowTtl.interval} ${rowTtl.unit}` : ""}
|
|
234
|
+
SETTINGS index_granularity = 8192
|
|
167
235
|
` : `
|
|
168
236
|
CREATE TABLE IF NOT EXISTS ${tableName} (
|
|
169
237
|
${columns}
|
|
170
238
|
)
|
|
171
239
|
ENGINE = ${TABLE_ENGINES[tableName]}
|
|
172
240
|
PARTITION BY "createdAt"
|
|
173
|
-
PRIMARY KEY (createdAt, id)
|
|
174
|
-
ORDER BY (createdAt, id)
|
|
175
|
-
|
|
241
|
+
PRIMARY KEY (createdAt, ${tableName === storage.TABLE_EVALS ? "run_id" : "id"})
|
|
242
|
+
ORDER BY (createdAt, ${tableName === storage.TABLE_EVALS ? "run_id" : "id"})
|
|
243
|
+
${this.ttl?.[tableName]?.row ? `TTL toDateTime(createdAt) + INTERVAL ${this.ttl[tableName].row.interval} ${this.ttl[tableName].row.unit}` : ""}
|
|
244
|
+
SETTINGS index_granularity = 8192
|
|
176
245
|
`;
|
|
177
246
|
await this.db.query({
|
|
178
247
|
query: sql,
|
|
@@ -627,6 +696,83 @@ var ClickhouseStore = class extends storage.MastraStorage {
|
|
|
627
696
|
throw error;
|
|
628
697
|
}
|
|
629
698
|
}
|
|
699
|
+
async getWorkflowRuns({
|
|
700
|
+
workflowName,
|
|
701
|
+
fromDate,
|
|
702
|
+
toDate,
|
|
703
|
+
limit,
|
|
704
|
+
offset
|
|
705
|
+
} = {}) {
|
|
706
|
+
try {
|
|
707
|
+
const conditions = [];
|
|
708
|
+
const values = {};
|
|
709
|
+
if (workflowName) {
|
|
710
|
+
conditions.push(`workflow_name = {var_workflow_name:String}`);
|
|
711
|
+
values.var_workflow_name = workflowName;
|
|
712
|
+
}
|
|
713
|
+
if (fromDate) {
|
|
714
|
+
conditions.push(`createdAt >= {var_from_date:DateTime64(3)}`);
|
|
715
|
+
values.var_from_date = fromDate.getTime() / 1e3;
|
|
716
|
+
}
|
|
717
|
+
if (toDate) {
|
|
718
|
+
conditions.push(`createdAt <= {var_to_date:DateTime64(3)}`);
|
|
719
|
+
values.var_to_date = toDate.getTime() / 1e3;
|
|
720
|
+
}
|
|
721
|
+
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
722
|
+
const limitClause = limit !== void 0 ? `LIMIT ${limit}` : "";
|
|
723
|
+
const offsetClause = offset !== void 0 ? `OFFSET ${offset}` : "";
|
|
724
|
+
let total = 0;
|
|
725
|
+
if (limit !== void 0 && offset !== void 0) {
|
|
726
|
+
const countResult = await this.db.query({
|
|
727
|
+
query: `SELECT COUNT(*) as count FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[storage.TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""} ${whereClause}`,
|
|
728
|
+
query_params: values,
|
|
729
|
+
format: "JSONEachRow"
|
|
730
|
+
});
|
|
731
|
+
const countRows = await countResult.json();
|
|
732
|
+
total = Number(countRows[0]?.count ?? 0);
|
|
733
|
+
}
|
|
734
|
+
const result = await this.db.query({
|
|
735
|
+
query: `
|
|
736
|
+
SELECT
|
|
737
|
+
workflow_name,
|
|
738
|
+
run_id,
|
|
739
|
+
snapshot,
|
|
740
|
+
toDateTime64(createdAt, 3) as createdAt,
|
|
741
|
+
toDateTime64(updatedAt, 3) as updatedAt
|
|
742
|
+
FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[storage.TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""}
|
|
743
|
+
${whereClause}
|
|
744
|
+
ORDER BY createdAt DESC
|
|
745
|
+
${limitClause}
|
|
746
|
+
${offsetClause}
|
|
747
|
+
`,
|
|
748
|
+
query_params: values,
|
|
749
|
+
format: "JSONEachRow"
|
|
750
|
+
});
|
|
751
|
+
const resultJson = await result.json();
|
|
752
|
+
const rows = resultJson;
|
|
753
|
+
const runs = rows.map((row) => {
|
|
754
|
+
let parsedSnapshot = row.snapshot;
|
|
755
|
+
if (typeof parsedSnapshot === "string") {
|
|
756
|
+
try {
|
|
757
|
+
parsedSnapshot = JSON.parse(row.snapshot);
|
|
758
|
+
} catch (e) {
|
|
759
|
+
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
760
|
+
}
|
|
761
|
+
}
|
|
762
|
+
return {
|
|
763
|
+
workflowName: row.workflow_name,
|
|
764
|
+
runId: row.run_id,
|
|
765
|
+
snapshot: parsedSnapshot,
|
|
766
|
+
createdAt: new Date(row.createdAt),
|
|
767
|
+
updatedAt: new Date(row.updatedAt)
|
|
768
|
+
};
|
|
769
|
+
});
|
|
770
|
+
return { runs, total: total || runs.length };
|
|
771
|
+
} catch (error) {
|
|
772
|
+
console.error("Error getting workflow runs:", error);
|
|
773
|
+
throw error;
|
|
774
|
+
}
|
|
775
|
+
}
|
|
630
776
|
async close() {
|
|
631
777
|
await this.db.close();
|
|
632
778
|
}
|