@mastra/clickhouse 0.14.2 → 0.14.3-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,275 +0,0 @@
1
- import type { ClickHouseClient } from '@clickhouse/client';
2
- import { ErrorCategory, ErrorDomain, MastraError } from '@mastra/core/error';
3
- import { safelyParseJSON, TABLE_SCHEMAS, TABLE_TRACES, TracesStorage } from '@mastra/core/storage';
4
- import type { PaginationInfo, StorageGetTracesPaginatedArg, StorageGetTracesArg } from '@mastra/core/storage';
5
- import type { Trace } from '@mastra/core/telemetry';
6
- import type { StoreOperationsClickhouse } from '../operations';
7
-
8
- export class TracesStorageClickhouse extends TracesStorage {
9
- protected client: ClickHouseClient;
10
- protected operations: StoreOperationsClickhouse;
11
-
12
- constructor({ client, operations }: { client: ClickHouseClient; operations: StoreOperationsClickhouse }) {
13
- super();
14
- this.client = client;
15
- this.operations = operations;
16
- }
17
-
18
- async getTracesPaginated(args: StorageGetTracesPaginatedArg): Promise<PaginationInfo & { traces: Trace[] }> {
19
- const { name, scope, page = 0, perPage = 100, attributes, filters, dateRange } = args;
20
- const fromDate = dateRange?.start;
21
- const toDate = dateRange?.end;
22
- const currentOffset = page * perPage;
23
-
24
- const queryArgs: Record<string, any> = {};
25
- const conditions: string[] = [];
26
-
27
- if (name) {
28
- conditions.push(`name LIKE CONCAT({var_name:String}, '%')`);
29
- queryArgs.var_name = name;
30
- }
31
- if (scope) {
32
- conditions.push(`scope = {var_scope:String}`);
33
- queryArgs.var_scope = scope;
34
- }
35
- if (attributes) {
36
- Object.entries(attributes).forEach(([key, value]) => {
37
- conditions.push(`JSONExtractString(attributes, '${key}') = {var_attr_${key}:String}`);
38
- queryArgs[`var_attr_${key}`] = value;
39
- });
40
- }
41
- if (filters) {
42
- Object.entries(filters).forEach(([key, value]) => {
43
- conditions.push(`${key} = {var_col_${key}:${TABLE_SCHEMAS.mastra_traces?.[key]?.type ?? 'text'}}`);
44
- queryArgs[`var_col_${key}`] = value;
45
- });
46
- }
47
- if (fromDate) {
48
- conditions.push(`createdAt >= parseDateTime64BestEffort({var_from_date:String})`);
49
- queryArgs.var_from_date = fromDate.toISOString();
50
- }
51
- if (toDate) {
52
- conditions.push(`createdAt <= parseDateTime64BestEffort({var_to_date:String})`);
53
- queryArgs.var_to_date = toDate.toISOString();
54
- }
55
-
56
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
57
-
58
- try {
59
- // Get total count
60
- const countResult = await this.client.query({
61
- query: `SELECT COUNT(*) as count FROM ${TABLE_TRACES} ${whereClause}`,
62
- query_params: queryArgs,
63
- clickhouse_settings: {
64
- date_time_input_format: 'best_effort',
65
- date_time_output_format: 'iso',
66
- use_client_time_zone: 1,
67
- output_format_json_quote_64bit_integers: 0,
68
- },
69
- });
70
-
71
- const countData = await countResult.json();
72
- const total = Number((countData.data?.[0] as any)?.count ?? 0);
73
-
74
- if (total === 0) {
75
- return {
76
- traces: [],
77
- total: 0,
78
- page,
79
- perPage,
80
- hasMore: false,
81
- };
82
- }
83
-
84
- // Get traces with pagination
85
- const result = await this.client.query({
86
- query: `SELECT *, toDateTime64(createdAt, 3) as createdAt FROM ${TABLE_TRACES} ${whereClause} ORDER BY "createdAt" DESC LIMIT {var_limit:UInt32} OFFSET {var_offset:UInt32}`,
87
- query_params: { ...queryArgs, var_limit: perPage, var_offset: currentOffset },
88
- clickhouse_settings: {
89
- date_time_input_format: 'best_effort',
90
- date_time_output_format: 'iso',
91
- use_client_time_zone: 1,
92
- output_format_json_quote_64bit_integers: 0,
93
- },
94
- });
95
-
96
- if (!result) {
97
- return {
98
- traces: [],
99
- total,
100
- page,
101
- perPage,
102
- hasMore: false,
103
- };
104
- }
105
-
106
- const resp = await result.json();
107
- const rows: any[] = resp.data;
108
- const traces = rows.map(row => ({
109
- id: row.id,
110
- parentSpanId: row.parentSpanId,
111
- traceId: row.traceId,
112
- name: row.name,
113
- scope: row.scope,
114
- kind: row.kind,
115
- status: safelyParseJSON(row.status),
116
- events: safelyParseJSON(row.events),
117
- links: safelyParseJSON(row.links),
118
- attributes: safelyParseJSON(row.attributes),
119
- startTime: row.startTime,
120
- endTime: row.endTime,
121
- other: safelyParseJSON(row.other),
122
- createdAt: row.createdAt,
123
- }));
124
-
125
- return {
126
- traces,
127
- total,
128
- page,
129
- perPage,
130
- hasMore: currentOffset + traces.length < total,
131
- };
132
- } catch (error: any) {
133
- if (error?.message?.includes('no such table') || error?.message?.includes('does not exist')) {
134
- return {
135
- traces: [],
136
- total: 0,
137
- page,
138
- perPage,
139
- hasMore: false,
140
- };
141
- }
142
- throw new MastraError(
143
- {
144
- id: 'CLICKHOUSE_STORAGE_GET_TRACES_PAGINATED_FAILED',
145
- domain: ErrorDomain.STORAGE,
146
- category: ErrorCategory.THIRD_PARTY,
147
- details: {
148
- name: name ?? null,
149
- scope: scope ?? null,
150
- page,
151
- perPage,
152
- attributes: attributes ? JSON.stringify(attributes) : null,
153
- filters: filters ? JSON.stringify(filters) : null,
154
- dateRange: dateRange ? JSON.stringify(dateRange) : null,
155
- },
156
- },
157
- error,
158
- );
159
- }
160
- }
161
-
162
- async getTraces({
163
- name,
164
- scope,
165
- page,
166
- perPage,
167
- attributes,
168
- filters,
169
- fromDate,
170
- toDate,
171
- }: StorageGetTracesArg): Promise<any[]> {
172
- const limit = perPage;
173
- const offset = page * perPage;
174
-
175
- const args: Record<string, any> = {};
176
-
177
- const conditions: string[] = [];
178
- if (name) {
179
- conditions.push(`name LIKE CONCAT({var_name:String}, '%')`);
180
- args.var_name = name;
181
- }
182
- if (scope) {
183
- conditions.push(`scope = {var_scope:String}`);
184
- args.var_scope = scope;
185
- }
186
- if (attributes) {
187
- Object.entries(attributes).forEach(([key, value]) => {
188
- conditions.push(`JSONExtractString(attributes, '${key}') = {var_attr_${key}:String}`);
189
- args[`var_attr_${key}`] = value;
190
- });
191
- }
192
-
193
- if (filters) {
194
- Object.entries(filters).forEach(([key, value]) => {
195
- conditions.push(`${key} = {var_col_${key}:${TABLE_SCHEMAS.mastra_traces?.[key]?.type ?? 'text'}}`);
196
- args[`var_col_${key}`] = value;
197
- });
198
- }
199
-
200
- if (fromDate) {
201
- conditions.push(`createdAt >= {var_from_date:DateTime64(3)}`);
202
- args.var_from_date = fromDate.getTime() / 1000; // Convert to Unix timestamp
203
- }
204
-
205
- if (toDate) {
206
- conditions.push(`createdAt <= {var_to_date:DateTime64(3)}`);
207
- args.var_to_date = toDate.getTime() / 1000; // Convert to Unix timestamp
208
- }
209
-
210
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
211
-
212
- try {
213
- const result = await this.client.query({
214
- query: `SELECT *, toDateTime64(createdAt, 3) as createdAt FROM ${TABLE_TRACES} ${whereClause} ORDER BY "createdAt" DESC LIMIT ${limit} OFFSET ${offset}`,
215
- query_params: args,
216
- clickhouse_settings: {
217
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
218
- date_time_input_format: 'best_effort',
219
- date_time_output_format: 'iso',
220
- use_client_time_zone: 1,
221
- output_format_json_quote_64bit_integers: 0,
222
- },
223
- });
224
-
225
- if (!result) {
226
- return [];
227
- }
228
-
229
- const resp = await result.json();
230
- const rows: any[] = resp.data;
231
- return rows.map(row => ({
232
- id: row.id,
233
- parentSpanId: row.parentSpanId,
234
- traceId: row.traceId,
235
- name: row.name,
236
- scope: row.scope,
237
- kind: row.kind,
238
- status: safelyParseJSON(row.status),
239
- events: safelyParseJSON(row.events),
240
- links: safelyParseJSON(row.links),
241
- attributes: safelyParseJSON(row.attributes),
242
- startTime: row.startTime,
243
- endTime: row.endTime,
244
- other: safelyParseJSON(row.other),
245
- createdAt: row.createdAt,
246
- }));
247
- } catch (error: any) {
248
- if (error?.message?.includes('no such table') || error?.message?.includes('does not exist')) {
249
- return [];
250
- }
251
- throw new MastraError(
252
- {
253
- id: 'CLICKHOUSE_STORAGE_GET_TRACES_FAILED',
254
- domain: ErrorDomain.STORAGE,
255
- category: ErrorCategory.THIRD_PARTY,
256
- details: {
257
- name: name ?? null,
258
- scope: scope ?? null,
259
- page,
260
- perPage,
261
- attributes: attributes ? JSON.stringify(attributes) : null,
262
- filters: filters ? JSON.stringify(filters) : null,
263
- fromDate: fromDate?.toISOString() ?? null,
264
- toDate: toDate?.toISOString() ?? null,
265
- },
266
- },
267
- error,
268
- );
269
- }
270
- }
271
-
272
- async batchTraceInsert(args: { records: Trace[] }): Promise<void> {
273
- await this.operations.batchInsert({ tableName: TABLE_TRACES, records: args.records });
274
- }
275
- }
@@ -1,90 +0,0 @@
1
- import type { TABLE_NAMES, TABLE_SCHEMAS, StorageColumn } from '@mastra/core/storage';
2
- import {
3
- TABLE_MESSAGES,
4
- TABLE_RESOURCES,
5
- TABLE_EVALS,
6
- TABLE_SCORERS,
7
- TABLE_THREADS,
8
- TABLE_TRACES,
9
- TABLE_WORKFLOW_SNAPSHOT,
10
- safelyParseJSON,
11
- TABLE_AI_SPANS,
12
- } from '@mastra/core/storage';
13
-
14
- export const TABLE_ENGINES: Record<TABLE_NAMES, string> = {
15
- [TABLE_MESSAGES]: `MergeTree()`,
16
- [TABLE_WORKFLOW_SNAPSHOT]: `ReplacingMergeTree()`,
17
- [TABLE_TRACES]: `MergeTree()`,
18
- [TABLE_THREADS]: `ReplacingMergeTree()`,
19
- [TABLE_EVALS]: `MergeTree()`,
20
- [TABLE_SCORERS]: `MergeTree()`,
21
- [TABLE_RESOURCES]: `ReplacingMergeTree()`,
22
- // TODO: verify this is the correct engine for ai spans when implementing clickhouse storage
23
- [TABLE_AI_SPANS]: `ReplacingMergeTree()`,
24
- };
25
-
26
- export const COLUMN_TYPES: Record<StorageColumn['type'], string> = {
27
- text: 'String',
28
- timestamp: 'DateTime64(3)',
29
- uuid: 'String',
30
- jsonb: 'String',
31
- integer: 'Int64',
32
- float: 'Float64',
33
- bigint: 'Int64',
34
- boolean: 'Bool',
35
- };
36
-
37
- export type IntervalUnit =
38
- | 'NANOSECOND'
39
- | 'MICROSECOND'
40
- | 'MILLISECOND'
41
- | 'SECOND'
42
- | 'MINUTE'
43
- | 'HOUR'
44
- | 'DAY'
45
- | 'WEEK'
46
- | 'MONTH'
47
- | 'QUARTER'
48
- | 'YEAR';
49
-
50
- export type ClickhouseConfig = {
51
- url: string;
52
- username: string;
53
- password: string;
54
- ttl?: {
55
- [TableKey in TABLE_NAMES]?: {
56
- row?: { interval: number; unit: IntervalUnit; ttlKey?: string };
57
- columns?: Partial<{
58
- [ColumnKey in keyof (typeof TABLE_SCHEMAS)[TableKey]]: {
59
- interval: number;
60
- unit: IntervalUnit;
61
- ttlKey?: string;
62
- };
63
- }>;
64
- };
65
- };
66
- };
67
-
68
- export function transformRow<R>(row: any): R {
69
- if (!row) {
70
- return row;
71
- }
72
-
73
- if (row.createdAt) {
74
- row.createdAt = new Date(row.createdAt);
75
- }
76
- if (row.updatedAt) {
77
- row.updatedAt = new Date(row.updatedAt);
78
- }
79
-
80
- // Parse content field if it's a JSON string
81
- if (row.content && typeof row.content === 'string') {
82
- row.content = safelyParseJSON(row.content);
83
- }
84
-
85
- return row;
86
- }
87
-
88
- export function transformRows<R>(rows: any[]): R[] {
89
- return rows.map((row: any) => transformRow<R>(row));
90
- }
@@ -1,323 +0,0 @@
1
- import type { ClickHouseClient } from '@clickhouse/client';
2
- import { ErrorCategory, ErrorDomain, MastraError } from '@mastra/core/error';
3
- import { TABLE_WORKFLOW_SNAPSHOT, WorkflowsStorage } from '@mastra/core/storage';
4
- import type { WorkflowRun, WorkflowRuns } from '@mastra/core/storage';
5
- import type { StepResult, WorkflowRunState } from '@mastra/core/workflows';
6
- import type { StoreOperationsClickhouse } from '../operations';
7
- import { TABLE_ENGINES } from '../utils';
8
-
9
- export class WorkflowsStorageClickhouse extends WorkflowsStorage {
10
- protected client: ClickHouseClient;
11
- protected operations: StoreOperationsClickhouse;
12
- constructor({ client, operations }: { client: ClickHouseClient; operations: StoreOperationsClickhouse }) {
13
- super();
14
- this.operations = operations;
15
- this.client = client;
16
- }
17
-
18
- updateWorkflowResults(
19
- {
20
- // workflowName,
21
- // runId,
22
- // stepId,
23
- // result,
24
- // runtimeContext,
25
- }: {
26
- workflowName: string;
27
- runId: string;
28
- stepId: string;
29
- result: StepResult<any, any, any, any>;
30
- runtimeContext: Record<string, any>;
31
- },
32
- ): Promise<Record<string, StepResult<any, any, any, any>>> {
33
- throw new Error('Method not implemented.');
34
- }
35
- updateWorkflowState(
36
- {
37
- // workflowName,
38
- // runId,
39
- // opts,
40
- }: {
41
- workflowName: string;
42
- runId: string;
43
- opts: {
44
- status: string;
45
- result?: StepResult<any, any, any, any>;
46
- error?: string;
47
- suspendedPaths?: Record<string, number[]>;
48
- waitingPaths?: Record<string, number[]>;
49
- };
50
- },
51
- ): Promise<WorkflowRunState | undefined> {
52
- throw new Error('Method not implemented.');
53
- }
54
-
55
- async persistWorkflowSnapshot({
56
- workflowName,
57
- runId,
58
- snapshot,
59
- }: {
60
- workflowName: string;
61
- runId: string;
62
- snapshot: WorkflowRunState;
63
- }): Promise<void> {
64
- try {
65
- const currentSnapshot = await this.operations.load({
66
- tableName: TABLE_WORKFLOW_SNAPSHOT,
67
- keys: { workflow_name: workflowName, run_id: runId },
68
- });
69
-
70
- const now = new Date();
71
- const persisting = currentSnapshot
72
- ? {
73
- ...currentSnapshot,
74
- snapshot: JSON.stringify(snapshot),
75
- updatedAt: now.toISOString(),
76
- }
77
- : {
78
- workflow_name: workflowName,
79
- run_id: runId,
80
- snapshot: JSON.stringify(snapshot),
81
- createdAt: now.toISOString(),
82
- updatedAt: now.toISOString(),
83
- };
84
-
85
- await this.client.insert({
86
- table: TABLE_WORKFLOW_SNAPSHOT,
87
- format: 'JSONEachRow',
88
- values: [persisting],
89
- clickhouse_settings: {
90
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
91
- date_time_input_format: 'best_effort',
92
- use_client_time_zone: 1,
93
- output_format_json_quote_64bit_integers: 0,
94
- },
95
- });
96
- } catch (error: any) {
97
- throw new MastraError(
98
- {
99
- id: 'CLICKHOUSE_STORAGE_PERSIST_WORKFLOW_SNAPSHOT_FAILED',
100
- domain: ErrorDomain.STORAGE,
101
- category: ErrorCategory.THIRD_PARTY,
102
- details: { workflowName, runId },
103
- },
104
- error,
105
- );
106
- }
107
- }
108
-
109
- async loadWorkflowSnapshot({
110
- workflowName,
111
- runId,
112
- }: {
113
- workflowName: string;
114
- runId: string;
115
- }): Promise<WorkflowRunState | null> {
116
- try {
117
- const result = await this.operations.load({
118
- tableName: TABLE_WORKFLOW_SNAPSHOT,
119
- keys: {
120
- workflow_name: workflowName,
121
- run_id: runId,
122
- },
123
- });
124
-
125
- if (!result) {
126
- return null;
127
- }
128
-
129
- return (result as any).snapshot;
130
- } catch (error: any) {
131
- throw new MastraError(
132
- {
133
- id: 'CLICKHOUSE_STORAGE_LOAD_WORKFLOW_SNAPSHOT_FAILED',
134
- domain: ErrorDomain.STORAGE,
135
- category: ErrorCategory.THIRD_PARTY,
136
- details: { workflowName, runId },
137
- },
138
- error,
139
- );
140
- }
141
- }
142
-
143
- private parseWorkflowRun(row: any): WorkflowRun {
144
- let parsedSnapshot: WorkflowRunState | string = row.snapshot as string;
145
- if (typeof parsedSnapshot === 'string') {
146
- try {
147
- parsedSnapshot = JSON.parse(row.snapshot as string) as WorkflowRunState;
148
- } catch (e) {
149
- // If parsing fails, return the raw snapshot string
150
- console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
151
- }
152
- }
153
-
154
- return {
155
- workflowName: row.workflow_name,
156
- runId: row.run_id,
157
- snapshot: parsedSnapshot,
158
- createdAt: new Date(row.createdAt),
159
- updatedAt: new Date(row.updatedAt),
160
- resourceId: row.resourceId,
161
- };
162
- }
163
-
164
- async getWorkflowRuns({
165
- workflowName,
166
- fromDate,
167
- toDate,
168
- limit,
169
- offset,
170
- resourceId,
171
- }: {
172
- workflowName?: string;
173
- fromDate?: Date;
174
- toDate?: Date;
175
- limit?: number;
176
- offset?: number;
177
- resourceId?: string;
178
- } = {}): Promise<WorkflowRuns> {
179
- try {
180
- const conditions: string[] = [];
181
- const values: Record<string, any> = {};
182
-
183
- if (workflowName) {
184
- conditions.push(`workflow_name = {var_workflow_name:String}`);
185
- values.var_workflow_name = workflowName;
186
- }
187
-
188
- if (resourceId) {
189
- const hasResourceId = await this.operations.hasColumn(TABLE_WORKFLOW_SNAPSHOT, 'resourceId');
190
- if (hasResourceId) {
191
- conditions.push(`resourceId = {var_resourceId:String}`);
192
- values.var_resourceId = resourceId;
193
- } else {
194
- console.warn(`[${TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
195
- }
196
- }
197
-
198
- if (fromDate) {
199
- conditions.push(`createdAt >= {var_from_date:DateTime64(3)}`);
200
- values.var_from_date = fromDate.getTime() / 1000; // Convert to Unix timestamp
201
- }
202
-
203
- if (toDate) {
204
- conditions.push(`createdAt <= {var_to_date:DateTime64(3)}`);
205
- values.var_to_date = toDate.getTime() / 1000; // Convert to Unix timestamp
206
- }
207
-
208
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
209
- const limitClause = limit !== undefined ? `LIMIT ${limit}` : '';
210
- const offsetClause = offset !== undefined ? `OFFSET ${offset}` : '';
211
-
212
- let total = 0;
213
- // Only get total count when using pagination
214
- if (limit !== undefined && offset !== undefined) {
215
- const countResult = await this.client.query({
216
- query: `SELECT COUNT(*) as count FROM ${TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[TABLE_WORKFLOW_SNAPSHOT].startsWith('ReplacingMergeTree') ? 'FINAL' : ''} ${whereClause}`,
217
- query_params: values,
218
- format: 'JSONEachRow',
219
- });
220
- const countRows = await countResult.json();
221
- total = Number((countRows as Array<{ count: string | number }>)[0]?.count ?? 0);
222
- }
223
-
224
- // Get results
225
- const result = await this.client.query({
226
- query: `
227
- SELECT
228
- workflow_name,
229
- run_id,
230
- snapshot,
231
- toDateTime64(createdAt, 3) as createdAt,
232
- toDateTime64(updatedAt, 3) as updatedAt,
233
- resourceId
234
- FROM ${TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[TABLE_WORKFLOW_SNAPSHOT].startsWith('ReplacingMergeTree') ? 'FINAL' : ''}
235
- ${whereClause}
236
- ORDER BY createdAt DESC
237
- ${limitClause}
238
- ${offsetClause}
239
- `,
240
- query_params: values,
241
- format: 'JSONEachRow',
242
- });
243
-
244
- const resultJson = await result.json();
245
- const rows = resultJson as any[];
246
- const runs = rows.map(row => {
247
- return this.parseWorkflowRun(row);
248
- });
249
-
250
- // Use runs.length as total when not paginating
251
- return { runs, total: total || runs.length };
252
- } catch (error: any) {
253
- throw new MastraError(
254
- {
255
- id: 'CLICKHOUSE_STORAGE_GET_WORKFLOW_RUNS_FAILED',
256
- domain: ErrorDomain.STORAGE,
257
- category: ErrorCategory.THIRD_PARTY,
258
- details: { workflowName: workflowName ?? '', resourceId: resourceId ?? '' },
259
- },
260
- error,
261
- );
262
- }
263
- }
264
-
265
- async getWorkflowRunById({
266
- runId,
267
- workflowName,
268
- }: {
269
- runId: string;
270
- workflowName?: string;
271
- }): Promise<WorkflowRun | null> {
272
- try {
273
- const conditions: string[] = [];
274
- const values: Record<string, any> = {};
275
-
276
- if (runId) {
277
- conditions.push(`run_id = {var_runId:String}`);
278
- values.var_runId = runId;
279
- }
280
-
281
- if (workflowName) {
282
- conditions.push(`workflow_name = {var_workflow_name:String}`);
283
- values.var_workflow_name = workflowName;
284
- }
285
-
286
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
287
-
288
- // Get results
289
- const result = await this.client.query({
290
- query: `
291
- SELECT
292
- workflow_name,
293
- run_id,
294
- snapshot,
295
- toDateTime64(createdAt, 3) as createdAt,
296
- toDateTime64(updatedAt, 3) as updatedAt,
297
- resourceId
298
- FROM ${TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[TABLE_WORKFLOW_SNAPSHOT].startsWith('ReplacingMergeTree') ? 'FINAL' : ''}
299
- ${whereClause}
300
- ORDER BY createdAt DESC LIMIT 1
301
- `,
302
- query_params: values,
303
- format: 'JSONEachRow',
304
- });
305
-
306
- const resultJson = await result.json();
307
- if (!Array.isArray(resultJson) || resultJson.length === 0) {
308
- return null;
309
- }
310
- return this.parseWorkflowRun(resultJson[0]);
311
- } catch (error: any) {
312
- throw new MastraError(
313
- {
314
- id: 'CLICKHOUSE_STORAGE_GET_WORKFLOW_RUN_BY_ID_FAILED',
315
- domain: ErrorDomain.STORAGE,
316
- category: ErrorCategory.THIRD_PARTY,
317
- details: { runId: runId ?? '', workflowName: workflowName ?? '' },
318
- },
319
- error,
320
- );
321
- }
322
- }
323
- }
@@ -1,26 +0,0 @@
1
- import { createTestSuite } from '@internal/storage-test-utils';
2
- import { vi } from 'vitest';
3
- import { ClickhouseStore } from '.';
4
- import type { ClickhouseConfig } from '.';
5
-
6
- vi.setConfig({ testTimeout: 60_000, hookTimeout: 60_000 });
7
-
8
- const TEST_CONFIG: ClickhouseConfig = {
9
- url: process.env.CLICKHOUSE_URL || 'http://localhost:8123',
10
- username: process.env.CLICKHOUSE_USERNAME || 'default',
11
- password: process.env.CLICKHOUSE_PASSWORD || 'password',
12
- // ttl: {
13
- // mastra_traces: {
14
- // row: { interval: 600, unit: 'SECOND' },
15
- // },
16
- // mastra_evals: {
17
- // columns: {
18
- // result: { interval: 10, unit: 'SECOND' },
19
- // },
20
- // },
21
- // },
22
- };
23
-
24
- const storage = new ClickhouseStore(TEST_CONFIG);
25
-
26
- createTestSuite(storage);