@mastra/clickhouse 0.0.0-switch-to-core-20250424015131 → 0.0.0-vector-query-sources-20250516172905
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +281 -2
- package/dist/_tsup-dts-rollup.d.cts +15 -13
- package/dist/_tsup-dts-rollup.d.ts +15 -13
- package/dist/index.cjs +93 -21
- package/dist/index.js +93 -21
- package/package.json +5 -5
- package/src/storage/index.test.ts +229 -45
- package/src/storage/index.ts +122 -34
package/dist/index.js
CHANGED
|
@@ -136,7 +136,9 @@ var ClickhouseStore = class extends MastraStorage {
|
|
|
136
136
|
page,
|
|
137
137
|
perPage,
|
|
138
138
|
attributes,
|
|
139
|
-
filters
|
|
139
|
+
filters,
|
|
140
|
+
fromDate,
|
|
141
|
+
toDate
|
|
140
142
|
}) {
|
|
141
143
|
const limit = perPage;
|
|
142
144
|
const offset = page * perPage;
|
|
@@ -164,6 +166,14 @@ var ClickhouseStore = class extends MastraStorage {
|
|
|
164
166
|
args[`var_col_${key}`] = value;
|
|
165
167
|
});
|
|
166
168
|
}
|
|
169
|
+
if (fromDate) {
|
|
170
|
+
conditions.push(`createdAt >= {var_from_date:DateTime64(3)}`);
|
|
171
|
+
args.var_from_date = fromDate.getTime() / 1e3;
|
|
172
|
+
}
|
|
173
|
+
if (toDate) {
|
|
174
|
+
conditions.push(`createdAt <= {var_to_date:DateTime64(3)}`);
|
|
175
|
+
args.var_to_date = toDate.getTime() / 1e3;
|
|
176
|
+
}
|
|
167
177
|
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
168
178
|
const result = await this.db.query({
|
|
169
179
|
query: `SELECT *, toDateTime64(createdAt, 3) as createdAt FROM ${TABLE_TRACES} ${whereClause} ORDER BY "createdAt" DESC LIMIT ${limit} OFFSET ${offset}`,
|
|
@@ -225,7 +235,6 @@ var ClickhouseStore = class extends MastraStorage {
|
|
|
225
235
|
${["id String"].concat(columns)}
|
|
226
236
|
)
|
|
227
237
|
ENGINE = ${TABLE_ENGINES[tableName]}
|
|
228
|
-
PARTITION BY "createdAt"
|
|
229
238
|
PRIMARY KEY (createdAt, run_id, workflow_name)
|
|
230
239
|
ORDER BY (createdAt, run_id, workflow_name)
|
|
231
240
|
${rowTtl ? `TTL toDateTime(${rowTtl.ttlKey ?? "createdAt"}) + INTERVAL ${rowTtl.interval} ${rowTtl.unit}` : ""}
|
|
@@ -235,7 +244,6 @@ var ClickhouseStore = class extends MastraStorage {
|
|
|
235
244
|
${columns}
|
|
236
245
|
)
|
|
237
246
|
ENGINE = ${TABLE_ENGINES[tableName]}
|
|
238
|
-
PARTITION BY "createdAt"
|
|
239
247
|
PRIMARY KEY (createdAt, ${tableName === TABLE_EVALS ? "run_id" : "id"})
|
|
240
248
|
ORDER BY (createdAt, ${tableName === TABLE_EVALS ? "run_id" : "id"})
|
|
241
249
|
${this.ttl?.[tableName]?.row ? `TTL toDateTime(createdAt) + INTERVAL ${this.ttl[tableName].row.interval} ${this.ttl[tableName].row.unit}` : ""}
|
|
@@ -480,7 +488,7 @@ var ClickhouseStore = class extends MastraStorage {
|
|
|
480
488
|
async deleteThread({ threadId }) {
|
|
481
489
|
try {
|
|
482
490
|
await this.db.command({
|
|
483
|
-
query: `DELETE FROM "${TABLE_MESSAGES}" WHERE thread_id =
|
|
491
|
+
query: `DELETE FROM "${TABLE_MESSAGES}" WHERE thread_id = {var_thread_id:String};`,
|
|
484
492
|
query_params: { var_thread_id: threadId },
|
|
485
493
|
clickhouse_settings: {
|
|
486
494
|
output_format_json_quote_64bit_integers: 0
|
|
@@ -694,12 +702,31 @@ var ClickhouseStore = class extends MastraStorage {
|
|
|
694
702
|
throw error;
|
|
695
703
|
}
|
|
696
704
|
}
|
|
705
|
+
parseWorkflowRun(row) {
|
|
706
|
+
let parsedSnapshot = row.snapshot;
|
|
707
|
+
if (typeof parsedSnapshot === "string") {
|
|
708
|
+
try {
|
|
709
|
+
parsedSnapshot = JSON.parse(row.snapshot);
|
|
710
|
+
} catch (e) {
|
|
711
|
+
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
712
|
+
}
|
|
713
|
+
}
|
|
714
|
+
return {
|
|
715
|
+
workflowName: row.workflow_name,
|
|
716
|
+
runId: row.run_id,
|
|
717
|
+
snapshot: parsedSnapshot,
|
|
718
|
+
createdAt: new Date(row.createdAt),
|
|
719
|
+
updatedAt: new Date(row.updatedAt),
|
|
720
|
+
resourceId: row.resourceId
|
|
721
|
+
};
|
|
722
|
+
}
|
|
697
723
|
async getWorkflowRuns({
|
|
698
724
|
workflowName,
|
|
699
725
|
fromDate,
|
|
700
726
|
toDate,
|
|
701
727
|
limit,
|
|
702
|
-
offset
|
|
728
|
+
offset,
|
|
729
|
+
resourceId
|
|
703
730
|
} = {}) {
|
|
704
731
|
try {
|
|
705
732
|
const conditions = [];
|
|
@@ -708,6 +735,15 @@ var ClickhouseStore = class extends MastraStorage {
|
|
|
708
735
|
conditions.push(`workflow_name = {var_workflow_name:String}`);
|
|
709
736
|
values.var_workflow_name = workflowName;
|
|
710
737
|
}
|
|
738
|
+
if (resourceId) {
|
|
739
|
+
const hasResourceId = await this.hasColumn(TABLE_WORKFLOW_SNAPSHOT, "resourceId");
|
|
740
|
+
if (hasResourceId) {
|
|
741
|
+
conditions.push(`resourceId = {var_resourceId:String}`);
|
|
742
|
+
values.var_resourceId = resourceId;
|
|
743
|
+
} else {
|
|
744
|
+
console.warn(`[${TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
|
|
745
|
+
}
|
|
746
|
+
}
|
|
711
747
|
if (fromDate) {
|
|
712
748
|
conditions.push(`createdAt >= {var_from_date:DateTime64(3)}`);
|
|
713
749
|
values.var_from_date = fromDate.getTime() / 1e3;
|
|
@@ -736,7 +772,8 @@ var ClickhouseStore = class extends MastraStorage {
|
|
|
736
772
|
run_id,
|
|
737
773
|
snapshot,
|
|
738
774
|
toDateTime64(createdAt, 3) as createdAt,
|
|
739
|
-
toDateTime64(updatedAt, 3) as updatedAt
|
|
775
|
+
toDateTime64(updatedAt, 3) as updatedAt,
|
|
776
|
+
resourceId
|
|
740
777
|
FROM ${TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""}
|
|
741
778
|
${whereClause}
|
|
742
779
|
ORDER BY createdAt DESC
|
|
@@ -749,21 +786,7 @@ var ClickhouseStore = class extends MastraStorage {
|
|
|
749
786
|
const resultJson = await result.json();
|
|
750
787
|
const rows = resultJson;
|
|
751
788
|
const runs = rows.map((row) => {
|
|
752
|
-
|
|
753
|
-
if (typeof parsedSnapshot === "string") {
|
|
754
|
-
try {
|
|
755
|
-
parsedSnapshot = JSON.parse(row.snapshot);
|
|
756
|
-
} catch (e) {
|
|
757
|
-
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
758
|
-
}
|
|
759
|
-
}
|
|
760
|
-
return {
|
|
761
|
-
workflowName: row.workflow_name,
|
|
762
|
-
runId: row.run_id,
|
|
763
|
-
snapshot: parsedSnapshot,
|
|
764
|
-
createdAt: new Date(row.createdAt),
|
|
765
|
-
updatedAt: new Date(row.updatedAt)
|
|
766
|
-
};
|
|
789
|
+
return this.parseWorkflowRun(row);
|
|
767
790
|
});
|
|
768
791
|
return { runs, total: total || runs.length };
|
|
769
792
|
} catch (error) {
|
|
@@ -771,6 +794,55 @@ var ClickhouseStore = class extends MastraStorage {
|
|
|
771
794
|
throw error;
|
|
772
795
|
}
|
|
773
796
|
}
|
|
797
|
+
async getWorkflowRunById({
|
|
798
|
+
runId,
|
|
799
|
+
workflowName
|
|
800
|
+
}) {
|
|
801
|
+
try {
|
|
802
|
+
const conditions = [];
|
|
803
|
+
const values = {};
|
|
804
|
+
if (runId) {
|
|
805
|
+
conditions.push(`run_id = {var_runId:String}`);
|
|
806
|
+
values.var_runId = runId;
|
|
807
|
+
}
|
|
808
|
+
if (workflowName) {
|
|
809
|
+
conditions.push(`workflow_name = {var_workflow_name:String}`);
|
|
810
|
+
values.var_workflow_name = workflowName;
|
|
811
|
+
}
|
|
812
|
+
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
813
|
+
const result = await this.db.query({
|
|
814
|
+
query: `
|
|
815
|
+
SELECT
|
|
816
|
+
workflow_name,
|
|
817
|
+
run_id,
|
|
818
|
+
snapshot,
|
|
819
|
+
toDateTime64(createdAt, 3) as createdAt,
|
|
820
|
+
toDateTime64(updatedAt, 3) as updatedAt,
|
|
821
|
+
resourceId
|
|
822
|
+
FROM ${TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""}
|
|
823
|
+
${whereClause}
|
|
824
|
+
`,
|
|
825
|
+
query_params: values,
|
|
826
|
+
format: "JSONEachRow"
|
|
827
|
+
});
|
|
828
|
+
const resultJson = await result.json();
|
|
829
|
+
if (!Array.isArray(resultJson) || resultJson.length === 0) {
|
|
830
|
+
return null;
|
|
831
|
+
}
|
|
832
|
+
return this.parseWorkflowRun(resultJson[0]);
|
|
833
|
+
} catch (error) {
|
|
834
|
+
console.error("Error getting workflow run by ID:", error);
|
|
835
|
+
throw error;
|
|
836
|
+
}
|
|
837
|
+
}
|
|
838
|
+
async hasColumn(table, column) {
|
|
839
|
+
const result = await this.db.query({
|
|
840
|
+
query: `DESCRIBE TABLE ${table}`,
|
|
841
|
+
format: "JSONEachRow"
|
|
842
|
+
});
|
|
843
|
+
const columns = await result.json();
|
|
844
|
+
return columns.some((c) => c.name === column);
|
|
845
|
+
}
|
|
774
846
|
async close() {
|
|
775
847
|
await this.db.close();
|
|
776
848
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@mastra/clickhouse",
|
|
3
|
-
"version": "0.0.0-
|
|
3
|
+
"version": "0.0.0-vector-query-sources-20250516172905",
|
|
4
4
|
"description": "Clickhouse provider for Mastra - includes db storage capabilities",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -21,16 +21,16 @@
|
|
|
21
21
|
"license": "MIT",
|
|
22
22
|
"dependencies": {
|
|
23
23
|
"@clickhouse/client": "^1.11.0",
|
|
24
|
-
"@mastra/core": "0.0.0-
|
|
24
|
+
"@mastra/core": "0.0.0-vector-query-sources-20250516172905"
|
|
25
25
|
},
|
|
26
26
|
"devDependencies": {
|
|
27
|
-
"@microsoft/api-extractor": "^7.52.
|
|
27
|
+
"@microsoft/api-extractor": "^7.52.5",
|
|
28
28
|
"@types/node": "^20.17.27",
|
|
29
29
|
"eslint": "^9.23.0",
|
|
30
30
|
"tsup": "^8.4.0",
|
|
31
31
|
"typescript": "^5.8.2",
|
|
32
|
-
"vitest": "^3.
|
|
33
|
-
"@internal/lint": "0.0.
|
|
32
|
+
"vitest": "^3.1.2",
|
|
33
|
+
"@internal/lint": "0.0.0-vector-query-sources-20250516172905"
|
|
34
34
|
},
|
|
35
35
|
"scripts": {
|
|
36
36
|
"build": "tsup src/index.ts --format esm,cjs --experimental-dts --clean --treeshake=smallest --splitting",
|
|
@@ -1,11 +1,14 @@
|
|
|
1
1
|
import { randomUUID } from 'crypto';
|
|
2
|
+
import type { WorkflowRunState } from '@mastra/core';
|
|
3
|
+
import type { MessageType } from '@mastra/core/memory';
|
|
2
4
|
import { TABLE_THREADS, TABLE_MESSAGES, TABLE_WORKFLOW_SNAPSHOT } from '@mastra/core/storage';
|
|
3
|
-
import
|
|
4
|
-
import { describe, it, expect, beforeAll, beforeEach, afterAll } from 'vitest';
|
|
5
|
+
import { describe, it, expect, beforeAll, beforeEach, afterAll, vi, afterEach } from 'vitest';
|
|
5
6
|
|
|
6
7
|
import { ClickhouseStore } from '.';
|
|
7
8
|
import type { ClickhouseConfig } from '.';
|
|
8
9
|
|
|
10
|
+
vi.setConfig({ testTimeout: 60_000, hookTimeout: 60_000 });
|
|
11
|
+
|
|
9
12
|
const TEST_CONFIG: ClickhouseConfig = {
|
|
10
13
|
url: process.env.CLICKHOUSE_URL || 'http://localhost:8123',
|
|
11
14
|
username: process.env.CLICKHOUSE_USERNAME || 'default',
|
|
@@ -32,15 +35,15 @@ const createSampleThread = () => ({
|
|
|
32
35
|
metadata: { key: 'value' },
|
|
33
36
|
});
|
|
34
37
|
|
|
35
|
-
const createSampleMessage = (threadId: string, createdAt: Date = new Date()) =>
|
|
36
|
-
(
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
38
|
+
const createSampleMessage = (threadId: string, createdAt: Date = new Date()): MessageType => ({
|
|
39
|
+
id: `msg-${randomUUID()}`,
|
|
40
|
+
resourceId: `resource-${randomUUID()}`,
|
|
41
|
+
role: 'user',
|
|
42
|
+
type: 'text',
|
|
43
|
+
threadId,
|
|
44
|
+
content: [{ type: 'text', text: 'Hello' }] as MessageType['content'],
|
|
45
|
+
createdAt,
|
|
46
|
+
});
|
|
44
47
|
|
|
45
48
|
const createSampleTrace = () => ({
|
|
46
49
|
id: `trace-${randomUUID()}`,
|
|
@@ -57,7 +60,10 @@ const createSampleEval = () => ({
|
|
|
57
60
|
createdAt: new Date(),
|
|
58
61
|
});
|
|
59
62
|
|
|
60
|
-
const createSampleWorkflowSnapshot = (
|
|
63
|
+
const createSampleWorkflowSnapshot = (
|
|
64
|
+
status: WorkflowRunState['context']['steps'][string]['status'],
|
|
65
|
+
createdAt?: Date,
|
|
66
|
+
) => {
|
|
61
67
|
const runId = `run-${randomUUID()}`;
|
|
62
68
|
const stepId = `step-${randomUUID()}`;
|
|
63
69
|
const timestamp = createdAt || new Date();
|
|
@@ -76,12 +82,20 @@ const createSampleWorkflowSnapshot = (status: string, createdAt?: Date) => {
|
|
|
76
82
|
attempts: {},
|
|
77
83
|
},
|
|
78
84
|
activePaths: [],
|
|
85
|
+
suspendedPaths: {},
|
|
79
86
|
runId,
|
|
80
87
|
timestamp: timestamp.getTime(),
|
|
81
|
-
}
|
|
88
|
+
};
|
|
82
89
|
return { snapshot, runId, stepId };
|
|
83
90
|
};
|
|
84
91
|
|
|
92
|
+
const checkWorkflowSnapshot = (snapshot: WorkflowRunState | string, stepId: string, status: string) => {
|
|
93
|
+
if (typeof snapshot === 'string') {
|
|
94
|
+
throw new Error('Expected WorkflowRunState, got string');
|
|
95
|
+
}
|
|
96
|
+
expect(snapshot.context?.steps[stepId]?.status).toBe(status);
|
|
97
|
+
};
|
|
98
|
+
|
|
85
99
|
describe('ClickhouseStore', () => {
|
|
86
100
|
let store: ClickhouseStore;
|
|
87
101
|
|
|
@@ -185,7 +199,11 @@ describe('ClickhouseStore', () => {
|
|
|
185
199
|
// Retrieve messages
|
|
186
200
|
const retrievedMessages = await store.getMessages({ threadId: thread.id });
|
|
187
201
|
expect(retrievedMessages).toHaveLength(2);
|
|
188
|
-
|
|
202
|
+
const checkMessages = messages.map(m => {
|
|
203
|
+
const { resourceId, ...rest } = m;
|
|
204
|
+
return rest;
|
|
205
|
+
});
|
|
206
|
+
expect(retrievedMessages).toEqual(expect.arrayContaining(checkMessages));
|
|
189
207
|
}, 10e3);
|
|
190
208
|
|
|
191
209
|
it('should handle empty message array', async () => {
|
|
@@ -197,7 +215,7 @@ describe('ClickhouseStore', () => {
|
|
|
197
215
|
const thread = createSampleThread();
|
|
198
216
|
await store.saveThread({ thread });
|
|
199
217
|
|
|
200
|
-
const messages = [
|
|
218
|
+
const messages: MessageType[] = [
|
|
201
219
|
{
|
|
202
220
|
...createSampleMessage(thread.id, new Date(Date.now() - 1000 * 3)),
|
|
203
221
|
content: [{ type: 'text', text: 'First' }],
|
|
@@ -214,11 +232,12 @@ describe('ClickhouseStore', () => {
|
|
|
214
232
|
|
|
215
233
|
await store.saveMessages({ messages });
|
|
216
234
|
|
|
217
|
-
const retrievedMessages = await store.getMessages({ threadId: thread.id });
|
|
235
|
+
const retrievedMessages = await store.getMessages<MessageType>({ threadId: thread.id });
|
|
218
236
|
expect(retrievedMessages).toHaveLength(3);
|
|
219
237
|
|
|
220
238
|
// Verify order is maintained
|
|
221
239
|
retrievedMessages.forEach((msg, idx) => {
|
|
240
|
+
// @ts-expect-error
|
|
222
241
|
expect(msg.content[0].text).toBe(messages[idx].content[0].text);
|
|
223
242
|
});
|
|
224
243
|
}, 10e3);
|
|
@@ -352,11 +371,17 @@ describe('ClickhouseStore', () => {
|
|
|
352
371
|
const snapshot = {
|
|
353
372
|
status: 'running',
|
|
354
373
|
context: {
|
|
374
|
+
steps: {},
|
|
355
375
|
stepResults: {},
|
|
356
376
|
attempts: {},
|
|
357
377
|
triggerData: { type: 'manual' },
|
|
358
378
|
},
|
|
359
|
-
|
|
379
|
+
value: {},
|
|
380
|
+
activePaths: [],
|
|
381
|
+
suspendedPaths: {},
|
|
382
|
+
runId,
|
|
383
|
+
timestamp: new Date().getTime(),
|
|
384
|
+
};
|
|
360
385
|
|
|
361
386
|
await store.persistWorkflowSnapshot({
|
|
362
387
|
workflowName,
|
|
@@ -387,28 +412,40 @@ describe('ClickhouseStore', () => {
|
|
|
387
412
|
const initialSnapshot = {
|
|
388
413
|
status: 'running',
|
|
389
414
|
context: {
|
|
415
|
+
steps: {},
|
|
390
416
|
stepResults: {},
|
|
391
417
|
attempts: {},
|
|
392
418
|
triggerData: { type: 'manual' },
|
|
393
419
|
},
|
|
420
|
+
value: {},
|
|
421
|
+
activePaths: [],
|
|
422
|
+
suspendedPaths: {},
|
|
423
|
+
runId,
|
|
424
|
+
timestamp: new Date().getTime(),
|
|
394
425
|
};
|
|
395
426
|
|
|
396
427
|
await store.persistWorkflowSnapshot({
|
|
397
428
|
workflowName,
|
|
398
429
|
runId,
|
|
399
|
-
snapshot: initialSnapshot
|
|
430
|
+
snapshot: initialSnapshot,
|
|
400
431
|
});
|
|
401
432
|
|
|
402
433
|
const updatedSnapshot = {
|
|
403
434
|
status: 'completed',
|
|
404
435
|
context: {
|
|
436
|
+
steps: {},
|
|
405
437
|
stepResults: {
|
|
406
438
|
'step-1': { status: 'success', result: { data: 'test' } },
|
|
407
439
|
},
|
|
408
440
|
attempts: { 'step-1': 1 },
|
|
409
441
|
triggerData: { type: 'manual' },
|
|
410
442
|
},
|
|
411
|
-
|
|
443
|
+
value: {},
|
|
444
|
+
activePaths: [],
|
|
445
|
+
suspendedPaths: {},
|
|
446
|
+
runId,
|
|
447
|
+
timestamp: new Date().getTime(),
|
|
448
|
+
};
|
|
412
449
|
|
|
413
450
|
await store.persistWorkflowSnapshot({
|
|
414
451
|
workflowName,
|
|
@@ -446,6 +483,7 @@ describe('ClickhouseStore', () => {
|
|
|
446
483
|
dependencies: ['step-3', 'step-4'],
|
|
447
484
|
},
|
|
448
485
|
},
|
|
486
|
+
steps: {},
|
|
449
487
|
attempts: { 'step-1': 1, 'step-2': 0 },
|
|
450
488
|
triggerData: {
|
|
451
489
|
type: 'scheduled',
|
|
@@ -467,6 +505,7 @@ describe('ClickhouseStore', () => {
|
|
|
467
505
|
status: 'waiting',
|
|
468
506
|
},
|
|
469
507
|
],
|
|
508
|
+
suspendedPaths: {},
|
|
470
509
|
runId: runId,
|
|
471
510
|
timestamp: Date.now(),
|
|
472
511
|
};
|
|
@@ -474,7 +513,7 @@ describe('ClickhouseStore', () => {
|
|
|
474
513
|
await store.persistWorkflowSnapshot({
|
|
475
514
|
workflowName,
|
|
476
515
|
runId,
|
|
477
|
-
snapshot: complexSnapshot
|
|
516
|
+
snapshot: complexSnapshot,
|
|
478
517
|
});
|
|
479
518
|
|
|
480
519
|
const loadedSnapshot = await store.loadWorkflowSnapshot({
|
|
@@ -500,8 +539,8 @@ describe('ClickhouseStore', () => {
|
|
|
500
539
|
const workflowName1 = 'default_test_1';
|
|
501
540
|
const workflowName2 = 'default_test_2';
|
|
502
541
|
|
|
503
|
-
const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('
|
|
504
|
-
const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('
|
|
542
|
+
const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
|
|
543
|
+
const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('waiting');
|
|
505
544
|
|
|
506
545
|
await store.persistWorkflowSnapshot({
|
|
507
546
|
workflowName: workflowName1,
|
|
@@ -520,17 +559,17 @@ describe('ClickhouseStore', () => {
|
|
|
520
559
|
expect(total).toBe(2);
|
|
521
560
|
expect(runs[0]!.workflowName).toBe(workflowName2); // Most recent first
|
|
522
561
|
expect(runs[1]!.workflowName).toBe(workflowName1);
|
|
523
|
-
const firstSnapshot = runs[0]!.snapshot
|
|
524
|
-
const secondSnapshot = runs[1]!.snapshot
|
|
525
|
-
|
|
526
|
-
|
|
562
|
+
const firstSnapshot = runs[0]!.snapshot;
|
|
563
|
+
const secondSnapshot = runs[1]!.snapshot;
|
|
564
|
+
checkWorkflowSnapshot(firstSnapshot, stepId2, 'waiting');
|
|
565
|
+
checkWorkflowSnapshot(secondSnapshot, stepId1, 'success');
|
|
527
566
|
});
|
|
528
567
|
|
|
529
568
|
it('filters by workflow name', async () => {
|
|
530
569
|
const workflowName1 = 'filter_test_1';
|
|
531
570
|
const workflowName2 = 'filter_test_2';
|
|
532
571
|
|
|
533
|
-
const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('
|
|
572
|
+
const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
|
|
534
573
|
const { snapshot: workflow2, runId: runId2 } = createSampleWorkflowSnapshot('failed');
|
|
535
574
|
|
|
536
575
|
await store.persistWorkflowSnapshot({
|
|
@@ -551,8 +590,8 @@ describe('ClickhouseStore', () => {
|
|
|
551
590
|
expect(runs).toHaveLength(1);
|
|
552
591
|
expect(total).toBe(1);
|
|
553
592
|
expect(runs[0]!.workflowName).toBe(workflowName1);
|
|
554
|
-
const snapshot = runs[0]!.snapshot
|
|
555
|
-
|
|
593
|
+
const snapshot = runs[0]!.snapshot;
|
|
594
|
+
checkWorkflowSnapshot(snapshot, stepId1, 'success');
|
|
556
595
|
});
|
|
557
596
|
|
|
558
597
|
it('filters by date range', async () => {
|
|
@@ -563,9 +602,9 @@ describe('ClickhouseStore', () => {
|
|
|
563
602
|
const workflowName2 = 'date_test_2';
|
|
564
603
|
const workflowName3 = 'date_test_3';
|
|
565
604
|
|
|
566
|
-
const { snapshot: workflow1, runId: runId1 } = createSampleWorkflowSnapshot('
|
|
567
|
-
const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('
|
|
568
|
-
const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('
|
|
605
|
+
const { snapshot: workflow1, runId: runId1 } = createSampleWorkflowSnapshot('success');
|
|
606
|
+
const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('waiting');
|
|
607
|
+
const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('skipped');
|
|
569
608
|
|
|
570
609
|
await store.insert({
|
|
571
610
|
tableName: TABLE_WORKFLOW_SNAPSHOT,
|
|
@@ -606,10 +645,10 @@ describe('ClickhouseStore', () => {
|
|
|
606
645
|
expect(runs).toHaveLength(2);
|
|
607
646
|
expect(runs[0]!.workflowName).toBe(workflowName3);
|
|
608
647
|
expect(runs[1]!.workflowName).toBe(workflowName2);
|
|
609
|
-
const firstSnapshot = runs[0]!.snapshot
|
|
610
|
-
const secondSnapshot = runs[1]!.snapshot
|
|
611
|
-
|
|
612
|
-
|
|
648
|
+
const firstSnapshot = runs[0]!.snapshot;
|
|
649
|
+
const secondSnapshot = runs[1]!.snapshot;
|
|
650
|
+
checkWorkflowSnapshot(firstSnapshot, stepId3, 'skipped');
|
|
651
|
+
checkWorkflowSnapshot(secondSnapshot, stepId2, 'waiting');
|
|
613
652
|
});
|
|
614
653
|
|
|
615
654
|
it('handles pagination', async () => {
|
|
@@ -617,9 +656,9 @@ describe('ClickhouseStore', () => {
|
|
|
617
656
|
const workflowName2 = 'page_test_2';
|
|
618
657
|
const workflowName3 = 'page_test_3';
|
|
619
658
|
|
|
620
|
-
const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('
|
|
621
|
-
const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('
|
|
622
|
-
const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('
|
|
659
|
+
const { snapshot: workflow1, runId: runId1, stepId: stepId1 } = createSampleWorkflowSnapshot('success');
|
|
660
|
+
const { snapshot: workflow2, runId: runId2, stepId: stepId2 } = createSampleWorkflowSnapshot('waiting');
|
|
661
|
+
const { snapshot: workflow3, runId: runId3, stepId: stepId3 } = createSampleWorkflowSnapshot('skipped');
|
|
623
662
|
|
|
624
663
|
await store.persistWorkflowSnapshot({
|
|
625
664
|
workflowName: workflowName1,
|
|
@@ -648,10 +687,10 @@ describe('ClickhouseStore', () => {
|
|
|
648
687
|
expect(page1.total).toBe(3); // Total count of all records
|
|
649
688
|
expect(page1.runs[0]!.workflowName).toBe(workflowName3);
|
|
650
689
|
expect(page1.runs[1]!.workflowName).toBe(workflowName2);
|
|
651
|
-
const firstSnapshot = page1.runs[0]!.snapshot
|
|
652
|
-
const secondSnapshot = page1.runs[1]!.snapshot
|
|
653
|
-
|
|
654
|
-
|
|
690
|
+
const firstSnapshot = page1.runs[0]!.snapshot;
|
|
691
|
+
const secondSnapshot = page1.runs[1]!.snapshot;
|
|
692
|
+
checkWorkflowSnapshot(firstSnapshot, stepId3, 'skipped');
|
|
693
|
+
checkWorkflowSnapshot(secondSnapshot, stepId2, 'waiting');
|
|
655
694
|
|
|
656
695
|
// Get second page
|
|
657
696
|
const page2 = await store.getWorkflowRuns({
|
|
@@ -661,10 +700,155 @@ describe('ClickhouseStore', () => {
|
|
|
661
700
|
expect(page2.runs).toHaveLength(1);
|
|
662
701
|
expect(page2.total).toBe(3);
|
|
663
702
|
expect(page2.runs[0]!.workflowName).toBe(workflowName1);
|
|
664
|
-
const snapshot = page2.runs[0]!.snapshot
|
|
665
|
-
|
|
703
|
+
const snapshot = page2.runs[0]!.snapshot!;
|
|
704
|
+
checkWorkflowSnapshot(snapshot, stepId1, 'success');
|
|
666
705
|
}, 10e3);
|
|
667
706
|
});
|
|
707
|
+
describe('getWorkflowRunById', () => {
|
|
708
|
+
const workflowName = 'workflow-id-test';
|
|
709
|
+
let runId: string;
|
|
710
|
+
let stepId: string;
|
|
711
|
+
|
|
712
|
+
beforeEach(async () => {
|
|
713
|
+
// Insert a workflow run for positive test
|
|
714
|
+
const sample = createSampleWorkflowSnapshot('success');
|
|
715
|
+
runId = sample.runId;
|
|
716
|
+
stepId = sample.stepId;
|
|
717
|
+
await store.insert({
|
|
718
|
+
tableName: TABLE_WORKFLOW_SNAPSHOT,
|
|
719
|
+
record: {
|
|
720
|
+
workflow_name: workflowName,
|
|
721
|
+
run_id: runId,
|
|
722
|
+
resourceId: 'resource-abc',
|
|
723
|
+
snapshot: sample.snapshot,
|
|
724
|
+
createdAt: new Date(),
|
|
725
|
+
updatedAt: new Date(),
|
|
726
|
+
},
|
|
727
|
+
});
|
|
728
|
+
});
|
|
729
|
+
|
|
730
|
+
it('should retrieve a workflow run by ID', async () => {
|
|
731
|
+
const found = await store.getWorkflowRunById({
|
|
732
|
+
runId,
|
|
733
|
+
workflowName,
|
|
734
|
+
});
|
|
735
|
+
expect(found).not.toBeNull();
|
|
736
|
+
expect(found?.runId).toBe(runId);
|
|
737
|
+
checkWorkflowSnapshot(found?.snapshot!, stepId, 'success');
|
|
738
|
+
});
|
|
739
|
+
|
|
740
|
+
it('should return null for non-existent workflow run ID', async () => {
|
|
741
|
+
const notFound = await store.getWorkflowRunById({
|
|
742
|
+
runId: 'non-existent-id',
|
|
743
|
+
workflowName,
|
|
744
|
+
});
|
|
745
|
+
expect(notFound).toBeNull();
|
|
746
|
+
});
|
|
747
|
+
});
|
|
748
|
+
describe('getWorkflowRuns with resourceId', () => {
|
|
749
|
+
const workflowName = 'workflow-id-test';
|
|
750
|
+
let resourceId: string;
|
|
751
|
+
let runIds: string[] = [];
|
|
752
|
+
|
|
753
|
+
beforeEach(async () => {
|
|
754
|
+
// Insert multiple workflow runs for the same resourceId
|
|
755
|
+
resourceId = 'resource-shared';
|
|
756
|
+
for (const status of ['completed', 'running']) {
|
|
757
|
+
const sample = createSampleWorkflowSnapshot(status as WorkflowRunState['context']['steps'][string]['status']);
|
|
758
|
+
runIds.push(sample.runId);
|
|
759
|
+
await store.insert({
|
|
760
|
+
tableName: TABLE_WORKFLOW_SNAPSHOT,
|
|
761
|
+
record: {
|
|
762
|
+
workflow_name: workflowName,
|
|
763
|
+
run_id: sample.runId,
|
|
764
|
+
resourceId,
|
|
765
|
+
snapshot: sample.snapshot,
|
|
766
|
+
createdAt: new Date(),
|
|
767
|
+
updatedAt: new Date(),
|
|
768
|
+
},
|
|
769
|
+
});
|
|
770
|
+
}
|
|
771
|
+
// Insert a run with a different resourceId
|
|
772
|
+
const other = createSampleWorkflowSnapshot('waiting');
|
|
773
|
+
await store.insert({
|
|
774
|
+
tableName: TABLE_WORKFLOW_SNAPSHOT,
|
|
775
|
+
record: {
|
|
776
|
+
workflow_name: workflowName,
|
|
777
|
+
run_id: other.runId,
|
|
778
|
+
resourceId: 'resource-other',
|
|
779
|
+
snapshot: other.snapshot,
|
|
780
|
+
createdAt: new Date(),
|
|
781
|
+
updatedAt: new Date(),
|
|
782
|
+
},
|
|
783
|
+
});
|
|
784
|
+
});
|
|
785
|
+
|
|
786
|
+
it('should retrieve all workflow runs by resourceId', async () => {
|
|
787
|
+
const { runs } = await store.getWorkflowRuns({
|
|
788
|
+
resourceId,
|
|
789
|
+
workflowName,
|
|
790
|
+
});
|
|
791
|
+
expect(Array.isArray(runs)).toBe(true);
|
|
792
|
+
expect(runs.length).toBeGreaterThanOrEqual(2);
|
|
793
|
+
for (const run of runs) {
|
|
794
|
+
expect(run.resourceId).toBe(resourceId);
|
|
795
|
+
}
|
|
796
|
+
});
|
|
797
|
+
|
|
798
|
+
it('should return an empty array if no workflow runs match resourceId', async () => {
|
|
799
|
+
const { runs } = await store.getWorkflowRuns({
|
|
800
|
+
resourceId: 'non-existent-resource',
|
|
801
|
+
workflowName,
|
|
802
|
+
});
|
|
803
|
+
expect(Array.isArray(runs)).toBe(true);
|
|
804
|
+
expect(runs.length).toBe(0);
|
|
805
|
+
});
|
|
806
|
+
});
|
|
807
|
+
|
|
808
|
+
describe('hasColumn', () => {
|
|
809
|
+
const tempTable = 'temp_test_table';
|
|
810
|
+
|
|
811
|
+
beforeEach(async () => {
|
|
812
|
+
// Always try to drop the table before each test, ignore errors if it doesn't exist
|
|
813
|
+
try {
|
|
814
|
+
await store['db'].query({ query: `DROP TABLE IF EXISTS ${tempTable}` });
|
|
815
|
+
} catch {
|
|
816
|
+
/* ignore */
|
|
817
|
+
}
|
|
818
|
+
});
|
|
819
|
+
|
|
820
|
+
it('returns true if the column exists', async () => {
|
|
821
|
+
await store['db'].query({
|
|
822
|
+
query: `CREATE TABLE temp_test_table (
|
|
823
|
+
id UInt64,
|
|
824
|
+
resourceId String
|
|
825
|
+
) ENGINE = MergeTree()
|
|
826
|
+
ORDER BY id
|
|
827
|
+
`,
|
|
828
|
+
});
|
|
829
|
+
expect(await store['hasColumn'](tempTable, 'resourceId')).toBe(true);
|
|
830
|
+
});
|
|
831
|
+
|
|
832
|
+
it('returns false if the column does not exist', async () => {
|
|
833
|
+
await store['db'].query({
|
|
834
|
+
query: `CREATE TABLE temp_test_table (
|
|
835
|
+
id UInt64,
|
|
836
|
+
) ENGINE = MergeTree()
|
|
837
|
+
ORDER BY id
|
|
838
|
+
`,
|
|
839
|
+
});
|
|
840
|
+
expect(await store['hasColumn'](tempTable, 'resourceId')).toBe(false);
|
|
841
|
+
});
|
|
842
|
+
|
|
843
|
+
afterEach(async () => {
|
|
844
|
+
// Clean up after each test
|
|
845
|
+
try {
|
|
846
|
+
await store['db'].query({ query: `DROP TABLE IF EXISTS ${tempTable}` });
|
|
847
|
+
} catch {
|
|
848
|
+
/* ignore */
|
|
849
|
+
}
|
|
850
|
+
});
|
|
851
|
+
});
|
|
668
852
|
|
|
669
853
|
afterAll(async () => {
|
|
670
854
|
await store.close();
|