@mastra/clickhouse 0.0.0-tsconfig-compile-20250703214351 → 0.0.0-unified-sidebar-20251010130811

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/CHANGELOG.md +524 -4
  2. package/LICENSE.md +12 -4
  3. package/dist/index.cjs +2278 -598
  4. package/dist/index.cjs.map +1 -0
  5. package/dist/index.d.ts +3 -4
  6. package/dist/index.d.ts.map +1 -0
  7. package/dist/index.js +2278 -598
  8. package/dist/index.js.map +1 -0
  9. package/dist/storage/domains/legacy-evals/index.d.ts +21 -0
  10. package/dist/storage/domains/legacy-evals/index.d.ts.map +1 -0
  11. package/dist/storage/domains/memory/index.d.ts +87 -0
  12. package/dist/storage/domains/memory/index.d.ts.map +1 -0
  13. package/dist/storage/domains/operations/index.d.ts +42 -0
  14. package/dist/storage/domains/operations/index.d.ts.map +1 -0
  15. package/dist/storage/domains/scores/index.d.ts +54 -0
  16. package/dist/storage/domains/scores/index.d.ts.map +1 -0
  17. package/dist/storage/domains/traces/index.d.ts +21 -0
  18. package/dist/storage/domains/traces/index.d.ts.map +1 -0
  19. package/dist/storage/domains/utils.d.ts +28 -0
  20. package/dist/storage/domains/utils.d.ts.map +1 -0
  21. package/dist/storage/domains/workflows/index.d.ts +55 -0
  22. package/dist/storage/domains/workflows/index.d.ts.map +1 -0
  23. package/dist/storage/index.d.ts +245 -0
  24. package/dist/storage/index.d.ts.map +1 -0
  25. package/package.json +25 -11
  26. package/dist/_tsup-dts-rollup.d.cts +0 -191
  27. package/dist/_tsup-dts-rollup.d.ts +0 -191
  28. package/dist/index.d.cts +0 -4
  29. package/docker-compose.yaml +0 -15
  30. package/eslint.config.js +0 -6
  31. package/src/index.ts +0 -1
  32. package/src/storage/index.test.ts +0 -1154
  33. package/src/storage/index.ts +0 -1464
  34. package/tsconfig.json +0 -5
  35. package/vitest.config.ts +0 -12
@@ -1,1464 +0,0 @@
1
- import type { ClickHouseClient } from '@clickhouse/client';
2
- import { createClient } from '@clickhouse/client';
3
- import { MessageList } from '@mastra/core/agent';
4
- import type { MastraMessageContentV2 } from '@mastra/core/agent';
5
- import { MastraError, ErrorDomain, ErrorCategory } from '@mastra/core/error';
6
- import type { MetricResult, TestInfo } from '@mastra/core/eval';
7
- import type { MastraMessageV1, MastraMessageV2, StorageThreadType } from '@mastra/core/memory';
8
- import {
9
- MastraStorage,
10
- TABLE_EVALS,
11
- TABLE_MESSAGES,
12
- TABLE_SCHEMAS,
13
- TABLE_THREADS,
14
- TABLE_TRACES,
15
- TABLE_WORKFLOW_SNAPSHOT,
16
- } from '@mastra/core/storage';
17
- import type {
18
- EvalRow,
19
- PaginationInfo,
20
- StorageColumn,
21
- StorageGetMessagesArg,
22
- TABLE_NAMES,
23
- WorkflowRun,
24
- WorkflowRuns,
25
- StorageGetTracesArg,
26
- TABLE_RESOURCES,
27
- } from '@mastra/core/storage';
28
- import type { Trace } from '@mastra/core/telemetry';
29
- import type { WorkflowRunState } from '@mastra/core/workflows';
30
-
31
- type SUPPORTED_TABLE_NAMES = Exclude<TABLE_NAMES, typeof TABLE_RESOURCES>;
32
-
33
- function safelyParseJSON(jsonString: string): any {
34
- try {
35
- return JSON.parse(jsonString);
36
- } catch {
37
- return {};
38
- }
39
- }
40
-
41
- type IntervalUnit =
42
- | 'NANOSECOND'
43
- | 'MICROSECOND'
44
- | 'MILLISECOND'
45
- | 'SECOND'
46
- | 'MINUTE'
47
- | 'HOUR'
48
- | 'DAY'
49
- | 'WEEK'
50
- | 'MONTH'
51
- | 'QUARTER'
52
- | 'YEAR';
53
-
54
- export type ClickhouseConfig = {
55
- url: string;
56
- username: string;
57
- password: string;
58
- ttl?: {
59
- [TableKey in TABLE_NAMES]?: {
60
- row?: { interval: number; unit: IntervalUnit; ttlKey?: string };
61
- columns?: Partial<{
62
- [ColumnKey in keyof (typeof TABLE_SCHEMAS)[TableKey]]: {
63
- interval: number;
64
- unit: IntervalUnit;
65
- ttlKey?: string;
66
- };
67
- }>;
68
- };
69
- };
70
- };
71
-
72
- export const TABLE_ENGINES: Record<SUPPORTED_TABLE_NAMES, string> = {
73
- [TABLE_MESSAGES]: `MergeTree()`,
74
- [TABLE_WORKFLOW_SNAPSHOT]: `ReplacingMergeTree()`,
75
- [TABLE_TRACES]: `MergeTree()`,
76
- [TABLE_THREADS]: `ReplacingMergeTree()`,
77
- [TABLE_EVALS]: `MergeTree()`,
78
- };
79
-
80
- export const COLUMN_TYPES: Record<StorageColumn['type'], string> = {
81
- text: 'String',
82
- timestamp: 'DateTime64(3)',
83
- uuid: 'String',
84
- jsonb: 'String',
85
- integer: 'Int64',
86
- bigint: 'Int64',
87
- };
88
-
89
- function transformRows<R>(rows: any[]): R[] {
90
- return rows.map((row: any) => transformRow<R>(row));
91
- }
92
-
93
- function transformRow<R>(row: any): R {
94
- if (!row) {
95
- return row;
96
- }
97
-
98
- if (row.createdAt) {
99
- row.createdAt = new Date(row.createdAt);
100
- }
101
- if (row.updatedAt) {
102
- row.updatedAt = new Date(row.updatedAt);
103
- }
104
- return row;
105
- }
106
-
107
- export class ClickhouseStore extends MastraStorage {
108
- protected db: ClickHouseClient;
109
- protected ttl: ClickhouseConfig['ttl'] = {};
110
-
111
- constructor(config: ClickhouseConfig) {
112
- super({ name: 'ClickhouseStore' });
113
- this.db = createClient({
114
- url: config.url,
115
- username: config.username,
116
- password: config.password,
117
- clickhouse_settings: {
118
- date_time_input_format: 'best_effort',
119
- date_time_output_format: 'iso', // This is crucial
120
- use_client_time_zone: 1,
121
- output_format_json_quote_64bit_integers: 0,
122
- },
123
- });
124
- this.ttl = config.ttl;
125
- }
126
-
127
- private transformEvalRow(row: Record<string, any>): EvalRow {
128
- row = transformRow(row);
129
- const resultValue = JSON.parse(row.result as string);
130
- const testInfoValue = row.test_info ? JSON.parse(row.test_info as string) : undefined;
131
-
132
- if (!resultValue || typeof resultValue !== 'object' || !('score' in resultValue)) {
133
- throw new MastraError({
134
- id: 'CLICKHOUSE_STORAGE_INVALID_METRIC_FORMAT',
135
- text: `Invalid MetricResult format: ${JSON.stringify(resultValue)}`,
136
- domain: ErrorDomain.STORAGE,
137
- category: ErrorCategory.USER,
138
- });
139
- }
140
-
141
- return {
142
- input: row.input as string,
143
- output: row.output as string,
144
- result: resultValue as MetricResult,
145
- agentName: row.agent_name as string,
146
- metricName: row.metric_name as string,
147
- instructions: row.instructions as string,
148
- testInfo: testInfoValue as TestInfo,
149
- globalRunId: row.global_run_id as string,
150
- runId: row.run_id as string,
151
- createdAt: row.created_at as string,
152
- };
153
- }
154
-
155
- private escape(value: any): string {
156
- if (typeof value === 'string') {
157
- return `'${value.replace(/'/g, "''")}'`;
158
- }
159
- if (value instanceof Date) {
160
- return `'${value.toISOString()}'`;
161
- }
162
- if (value === null || value === undefined) {
163
- return 'NULL';
164
- }
165
- return value.toString();
166
- }
167
-
168
- async getEvalsByAgentName(agentName: string, type?: 'test' | 'live'): Promise<EvalRow[]> {
169
- try {
170
- const baseQuery = `SELECT *, toDateTime64(createdAt, 3) as createdAt FROM ${TABLE_EVALS} WHERE agent_name = {var_agent_name:String}`;
171
- const typeCondition =
172
- type === 'test'
173
- ? " AND test_info IS NOT NULL AND JSONExtractString(test_info, 'testPath') IS NOT NULL"
174
- : type === 'live'
175
- ? " AND (test_info IS NULL OR JSONExtractString(test_info, 'testPath') IS NULL)"
176
- : '';
177
-
178
- const result = await this.db.query({
179
- query: `${baseQuery}${typeCondition} ORDER BY createdAt DESC`,
180
- query_params: { var_agent_name: agentName },
181
- clickhouse_settings: {
182
- date_time_input_format: 'best_effort',
183
- date_time_output_format: 'iso',
184
- use_client_time_zone: 1,
185
- output_format_json_quote_64bit_integers: 0,
186
- },
187
- });
188
-
189
- if (!result) {
190
- return [];
191
- }
192
-
193
- const rows = await result.json();
194
- return rows.data.map((row: any) => this.transformEvalRow(row));
195
- } catch (error: any) {
196
- if (error?.message?.includes('no such table') || error?.message?.includes('does not exist')) {
197
- return [];
198
- }
199
- throw new MastraError(
200
- {
201
- id: 'CLICKHOUSE_STORAGE_GET_EVALS_BY_AGENT_FAILED',
202
- domain: ErrorDomain.STORAGE,
203
- category: ErrorCategory.THIRD_PARTY,
204
- details: { agentName, type: type ?? null },
205
- },
206
- error,
207
- );
208
- }
209
- }
210
-
211
- async batchInsert({ tableName, records }: { tableName: TABLE_NAMES; records: Record<string, any>[] }): Promise<void> {
212
- try {
213
- await this.db.insert({
214
- table: tableName,
215
- values: records.map(record => ({
216
- ...Object.fromEntries(
217
- Object.entries(record).map(([key, value]) => [
218
- key,
219
- TABLE_SCHEMAS[tableName as TABLE_NAMES]?.[key]?.type === 'timestamp'
220
- ? new Date(value).toISOString()
221
- : value,
222
- ]),
223
- ),
224
- })),
225
- format: 'JSONEachRow',
226
- clickhouse_settings: {
227
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
228
- date_time_input_format: 'best_effort',
229
- use_client_time_zone: 1,
230
- output_format_json_quote_64bit_integers: 0,
231
- },
232
- });
233
- } catch (error: any) {
234
- throw new MastraError(
235
- {
236
- id: 'CLICKHOUSE_STORAGE_BATCH_INSERT_FAILED',
237
- domain: ErrorDomain.STORAGE,
238
- category: ErrorCategory.THIRD_PARTY,
239
- details: { tableName },
240
- },
241
- error,
242
- );
243
- }
244
- }
245
-
246
- async getTraces({
247
- name,
248
- scope,
249
- page,
250
- perPage,
251
- attributes,
252
- filters,
253
- fromDate,
254
- toDate,
255
- }: {
256
- name?: string;
257
- scope?: string;
258
- page: number;
259
- perPage: number;
260
- attributes?: Record<string, string>;
261
- filters?: Record<string, any>;
262
- fromDate?: Date;
263
- toDate?: Date;
264
- }): Promise<any[]> {
265
- const limit = perPage;
266
- const offset = page * perPage;
267
-
268
- const args: Record<string, any> = {};
269
-
270
- const conditions: string[] = [];
271
- if (name) {
272
- conditions.push(`name LIKE CONCAT({var_name:String}, '%')`);
273
- args.var_name = name;
274
- }
275
- if (scope) {
276
- conditions.push(`scope = {var_scope:String}`);
277
- args.var_scope = scope;
278
- }
279
- if (attributes) {
280
- Object.entries(attributes).forEach(([key, value]) => {
281
- conditions.push(`JSONExtractString(attributes, '${key}') = {var_attr_${key}:String}`);
282
- args[`var_attr_${key}`] = value;
283
- });
284
- }
285
-
286
- if (filters) {
287
- Object.entries(filters).forEach(([key, value]) => {
288
- conditions.push(
289
- `${key} = {var_col_${key}:${COLUMN_TYPES[TABLE_SCHEMAS.mastra_traces?.[key]?.type ?? 'text']}}`,
290
- );
291
- args[`var_col_${key}`] = value;
292
- });
293
- }
294
-
295
- if (fromDate) {
296
- conditions.push(`createdAt >= {var_from_date:DateTime64(3)}`);
297
- args.var_from_date = fromDate.getTime() / 1000; // Convert to Unix timestamp
298
- }
299
-
300
- if (toDate) {
301
- conditions.push(`createdAt <= {var_to_date:DateTime64(3)}`);
302
- args.var_to_date = toDate.getTime() / 1000; // Convert to Unix timestamp
303
- }
304
-
305
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
306
-
307
- try {
308
- const result = await this.db.query({
309
- query: `SELECT *, toDateTime64(createdAt, 3) as createdAt FROM ${TABLE_TRACES} ${whereClause} ORDER BY "createdAt" DESC LIMIT ${limit} OFFSET ${offset}`,
310
- query_params: args,
311
- clickhouse_settings: {
312
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
313
- date_time_input_format: 'best_effort',
314
- date_time_output_format: 'iso',
315
- use_client_time_zone: 1,
316
- output_format_json_quote_64bit_integers: 0,
317
- },
318
- });
319
-
320
- if (!result) {
321
- return [];
322
- }
323
-
324
- const resp = await result.json();
325
- const rows: any[] = resp.data;
326
- return rows.map(row => ({
327
- id: row.id,
328
- parentSpanId: row.parentSpanId,
329
- traceId: row.traceId,
330
- name: row.name,
331
- scope: row.scope,
332
- kind: row.kind,
333
- status: safelyParseJSON(row.status as string),
334
- events: safelyParseJSON(row.events as string),
335
- links: safelyParseJSON(row.links as string),
336
- attributes: safelyParseJSON(row.attributes as string),
337
- startTime: row.startTime,
338
- endTime: row.endTime,
339
- other: safelyParseJSON(row.other as string),
340
- createdAt: row.createdAt,
341
- }));
342
- } catch (error: any) {
343
- if (error?.message?.includes('no such table') || error?.message?.includes('does not exist')) {
344
- return [];
345
- }
346
- throw new MastraError(
347
- {
348
- id: 'CLICKHOUSE_STORAGE_GET_TRACES_FAILED',
349
- domain: ErrorDomain.STORAGE,
350
- category: ErrorCategory.THIRD_PARTY,
351
- details: {
352
- name: name ?? null,
353
- scope: scope ?? null,
354
- page,
355
- perPage,
356
- attributes: attributes ? JSON.stringify(attributes) : null,
357
- filters: filters ? JSON.stringify(filters) : null,
358
- fromDate: fromDate?.toISOString() ?? null,
359
- toDate: toDate?.toISOString() ?? null,
360
- },
361
- },
362
- error,
363
- );
364
- }
365
- }
366
-
367
- async optimizeTable({ tableName }: { tableName: TABLE_NAMES }): Promise<void> {
368
- try {
369
- await this.db.command({
370
- query: `OPTIMIZE TABLE ${tableName} FINAL`,
371
- });
372
- } catch (error: any) {
373
- throw new MastraError(
374
- {
375
- id: 'CLICKHOUSE_STORAGE_OPTIMIZE_TABLE_FAILED',
376
- domain: ErrorDomain.STORAGE,
377
- category: ErrorCategory.THIRD_PARTY,
378
- details: { tableName },
379
- },
380
- error,
381
- );
382
- }
383
- }
384
-
385
- async materializeTtl({ tableName }: { tableName: TABLE_NAMES }): Promise<void> {
386
- try {
387
- await this.db.command({
388
- query: `ALTER TABLE ${tableName} MATERIALIZE TTL;`,
389
- });
390
- } catch (error: any) {
391
- throw new MastraError(
392
- {
393
- id: 'CLICKHOUSE_STORAGE_MATERIALIZE_TTL_FAILED',
394
- domain: ErrorDomain.STORAGE,
395
- category: ErrorCategory.THIRD_PARTY,
396
- details: { tableName },
397
- },
398
- error,
399
- );
400
- }
401
- }
402
-
403
- async createTable({
404
- tableName,
405
- schema,
406
- }: {
407
- tableName: SUPPORTED_TABLE_NAMES;
408
- schema: Record<string, StorageColumn>;
409
- }): Promise<void> {
410
- try {
411
- const columns = Object.entries(schema)
412
- .map(([name, def]) => {
413
- const constraints = [];
414
- if (!def.nullable) constraints.push('NOT NULL');
415
- const columnTtl = this.ttl?.[tableName]?.columns?.[name];
416
- return `"${name}" ${COLUMN_TYPES[def.type]} ${constraints.join(' ')} ${columnTtl ? `TTL toDateTime(${columnTtl.ttlKey ?? 'createdAt'}) + INTERVAL ${columnTtl.interval} ${columnTtl.unit}` : ''}`;
417
- })
418
- .join(',\n');
419
-
420
- const rowTtl = this.ttl?.[tableName]?.row;
421
- const sql =
422
- tableName === TABLE_WORKFLOW_SNAPSHOT
423
- ? `
424
- CREATE TABLE IF NOT EXISTS ${tableName} (
425
- ${['id String'].concat(columns)}
426
- )
427
- ENGINE = ${TABLE_ENGINES[tableName]}
428
- PRIMARY KEY (createdAt, run_id, workflow_name)
429
- ORDER BY (createdAt, run_id, workflow_name)
430
- ${rowTtl ? `TTL toDateTime(${rowTtl.ttlKey ?? 'createdAt'}) + INTERVAL ${rowTtl.interval} ${rowTtl.unit}` : ''}
431
- SETTINGS index_granularity = 8192
432
- `
433
- : `
434
- CREATE TABLE IF NOT EXISTS ${tableName} (
435
- ${columns}
436
- )
437
- ENGINE = ${TABLE_ENGINES[tableName]}
438
- PRIMARY KEY (createdAt, ${tableName === TABLE_EVALS ? 'run_id' : 'id'})
439
- ORDER BY (createdAt, ${tableName === TABLE_EVALS ? 'run_id' : 'id'})
440
- ${this.ttl?.[tableName]?.row ? `TTL toDateTime(createdAt) + INTERVAL ${this.ttl[tableName].row.interval} ${this.ttl[tableName].row.unit}` : ''}
441
- SETTINGS index_granularity = 8192
442
- `;
443
-
444
- await this.db.query({
445
- query: sql,
446
- clickhouse_settings: {
447
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
448
- date_time_input_format: 'best_effort',
449
- date_time_output_format: 'iso',
450
- use_client_time_zone: 1,
451
- output_format_json_quote_64bit_integers: 0,
452
- },
453
- });
454
- } catch (error: any) {
455
- throw new MastraError(
456
- {
457
- id: 'CLICKHOUSE_STORAGE_CREATE_TABLE_FAILED',
458
- domain: ErrorDomain.STORAGE,
459
- category: ErrorCategory.THIRD_PARTY,
460
- details: { tableName },
461
- },
462
- error,
463
- );
464
- }
465
- }
466
-
467
- protected getSqlType(type: StorageColumn['type']): string {
468
- switch (type) {
469
- case 'text':
470
- return 'String';
471
- case 'timestamp':
472
- return 'DateTime64(3)';
473
- case 'integer':
474
- case 'bigint':
475
- return 'Int64';
476
- case 'jsonb':
477
- return 'String';
478
- default:
479
- return super.getSqlType(type); // fallback to base implementation
480
- }
481
- }
482
-
483
- /**
484
- * Alters table schema to add columns if they don't exist
485
- * @param tableName Name of the table
486
- * @param schema Schema of the table
487
- * @param ifNotExists Array of column names to add if they don't exist
488
- */
489
- async alterTable({
490
- tableName,
491
- schema,
492
- ifNotExists,
493
- }: {
494
- tableName: TABLE_NAMES;
495
- schema: Record<string, StorageColumn>;
496
- ifNotExists: string[];
497
- }): Promise<void> {
498
- try {
499
- // 1. Get existing columns
500
- const describeSql = `DESCRIBE TABLE ${tableName}`;
501
- const result = await this.db.query({
502
- query: describeSql,
503
- });
504
- const rows = await result.json();
505
- const existingColumnNames = new Set(rows.data.map((row: any) => row.name.toLowerCase()));
506
-
507
- // 2. Add missing columns
508
- for (const columnName of ifNotExists) {
509
- if (!existingColumnNames.has(columnName.toLowerCase()) && schema[columnName]) {
510
- const columnDef = schema[columnName];
511
- let sqlType = this.getSqlType(columnDef.type);
512
- if (columnDef.nullable !== false) {
513
- sqlType = `Nullable(${sqlType})`;
514
- }
515
- const defaultValue = columnDef.nullable === false ? this.getDefaultValue(columnDef.type) : '';
516
- // Use backticks or double quotes as needed for identifiers
517
- const alterSql =
518
- `ALTER TABLE ${tableName} ADD COLUMN IF NOT EXISTS "${columnName}" ${sqlType} ${defaultValue}`.trim();
519
-
520
- await this.db.query({
521
- query: alterSql,
522
- });
523
- this.logger?.debug?.(`Added column ${columnName} to table ${tableName}`);
524
- }
525
- }
526
- } catch (error: any) {
527
- throw new MastraError(
528
- {
529
- id: 'CLICKHOUSE_STORAGE_ALTER_TABLE_FAILED',
530
- domain: ErrorDomain.STORAGE,
531
- category: ErrorCategory.THIRD_PARTY,
532
- details: { tableName },
533
- },
534
- error,
535
- );
536
- }
537
- }
538
-
539
- async clearTable({ tableName }: { tableName: TABLE_NAMES }): Promise<void> {
540
- try {
541
- await this.db.query({
542
- query: `TRUNCATE TABLE ${tableName}`,
543
- clickhouse_settings: {
544
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
545
- date_time_input_format: 'best_effort',
546
- date_time_output_format: 'iso',
547
- use_client_time_zone: 1,
548
- output_format_json_quote_64bit_integers: 0,
549
- },
550
- });
551
- } catch (error: any) {
552
- throw new MastraError(
553
- {
554
- id: 'CLICKHOUSE_STORAGE_CLEAR_TABLE_FAILED',
555
- domain: ErrorDomain.STORAGE,
556
- category: ErrorCategory.THIRD_PARTY,
557
- details: { tableName },
558
- },
559
- error,
560
- );
561
- }
562
- }
563
-
564
- async insert({ tableName, record }: { tableName: TABLE_NAMES; record: Record<string, any> }): Promise<void> {
565
- try {
566
- await this.db.insert({
567
- table: tableName,
568
- values: [
569
- {
570
- ...record,
571
- createdAt: record.createdAt.toISOString(),
572
- updatedAt: record.updatedAt.toISOString(),
573
- },
574
- ],
575
- format: 'JSONEachRow',
576
- clickhouse_settings: {
577
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
578
- output_format_json_quote_64bit_integers: 0,
579
- date_time_input_format: 'best_effort',
580
- use_client_time_zone: 1,
581
- },
582
- });
583
- } catch (error: any) {
584
- throw new MastraError(
585
- {
586
- id: 'CLICKHOUSE_STORAGE_INSERT_FAILED',
587
- domain: ErrorDomain.STORAGE,
588
- category: ErrorCategory.THIRD_PARTY,
589
- details: { tableName },
590
- },
591
- error,
592
- );
593
- }
594
- }
595
-
596
- async load<R>({
597
- tableName,
598
- keys,
599
- }: {
600
- tableName: SUPPORTED_TABLE_NAMES;
601
- keys: Record<string, string>;
602
- }): Promise<R | null> {
603
- try {
604
- const keyEntries = Object.entries(keys);
605
- const conditions = keyEntries
606
- .map(
607
- ([key]) =>
608
- `"${key}" = {var_${key}:${COLUMN_TYPES[TABLE_SCHEMAS[tableName as TABLE_NAMES]?.[key]?.type ?? 'text']}}`,
609
- )
610
- .join(' AND ');
611
- const values = keyEntries.reduce((acc, [key, value]) => {
612
- return { ...acc, [`var_${key}`]: value };
613
- }, {});
614
-
615
- const result = await this.db.query({
616
- query: `SELECT *, toDateTime64(createdAt, 3) as createdAt, toDateTime64(updatedAt, 3) as updatedAt FROM ${tableName} ${TABLE_ENGINES[tableName as SUPPORTED_TABLE_NAMES].startsWith('ReplacingMergeTree') ? 'FINAL' : ''} WHERE ${conditions}`,
617
- query_params: values,
618
- clickhouse_settings: {
619
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
620
- date_time_input_format: 'best_effort',
621
- date_time_output_format: 'iso',
622
- use_client_time_zone: 1,
623
- output_format_json_quote_64bit_integers: 0,
624
- },
625
- });
626
-
627
- if (!result) {
628
- return null;
629
- }
630
-
631
- const rows = await result.json();
632
- // If this is a workflow snapshot, parse the snapshot field
633
- if (tableName === TABLE_WORKFLOW_SNAPSHOT) {
634
- const snapshot = rows.data[0] as any;
635
- if (!snapshot) {
636
- return null;
637
- }
638
- if (typeof snapshot.snapshot === 'string') {
639
- snapshot.snapshot = JSON.parse(snapshot.snapshot);
640
- }
641
- return transformRow(snapshot);
642
- }
643
-
644
- const data: R = transformRow(rows.data[0]);
645
- return data;
646
- } catch (error) {
647
- throw new MastraError(
648
- {
649
- id: 'CLICKHOUSE_STORAGE_LOAD_FAILED',
650
- domain: ErrorDomain.STORAGE,
651
- category: ErrorCategory.THIRD_PARTY,
652
- details: { tableName },
653
- },
654
- error,
655
- );
656
- }
657
- }
658
-
659
- async getThreadById({ threadId }: { threadId: string }): Promise<StorageThreadType | null> {
660
- try {
661
- const result = await this.db.query({
662
- query: `SELECT
663
- id,
664
- "resourceId",
665
- title,
666
- metadata,
667
- toDateTime64(createdAt, 3) as createdAt,
668
- toDateTime64(updatedAt, 3) as updatedAt
669
- FROM "${TABLE_THREADS}"
670
- FINAL
671
- WHERE id = {var_id:String}`,
672
- query_params: { var_id: threadId },
673
- clickhouse_settings: {
674
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
675
- date_time_input_format: 'best_effort',
676
- date_time_output_format: 'iso',
677
- use_client_time_zone: 1,
678
- output_format_json_quote_64bit_integers: 0,
679
- },
680
- });
681
-
682
- const rows = await result.json();
683
- const thread = transformRow(rows.data[0]) as StorageThreadType;
684
-
685
- if (!thread) {
686
- return null;
687
- }
688
-
689
- return {
690
- ...thread,
691
- metadata: typeof thread.metadata === 'string' ? JSON.parse(thread.metadata) : thread.metadata,
692
- createdAt: thread.createdAt,
693
- updatedAt: thread.updatedAt,
694
- };
695
- } catch (error: any) {
696
- throw new MastraError(
697
- {
698
- id: 'CLICKHOUSE_STORAGE_GET_THREAD_BY_ID_FAILED',
699
- domain: ErrorDomain.STORAGE,
700
- category: ErrorCategory.THIRD_PARTY,
701
- details: { threadId },
702
- },
703
- error,
704
- );
705
- }
706
- }
707
-
708
- async getThreadsByResourceId({ resourceId }: { resourceId: string }): Promise<StorageThreadType[]> {
709
- try {
710
- const result = await this.db.query({
711
- query: `SELECT
712
- id,
713
- "resourceId",
714
- title,
715
- metadata,
716
- toDateTime64(createdAt, 3) as createdAt,
717
- toDateTime64(updatedAt, 3) as updatedAt
718
- FROM "${TABLE_THREADS}"
719
- WHERE "resourceId" = {var_resourceId:String}`,
720
- query_params: { var_resourceId: resourceId },
721
- clickhouse_settings: {
722
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
723
- date_time_input_format: 'best_effort',
724
- date_time_output_format: 'iso',
725
- use_client_time_zone: 1,
726
- output_format_json_quote_64bit_integers: 0,
727
- },
728
- });
729
-
730
- const rows = await result.json();
731
- const threads = transformRows(rows.data) as StorageThreadType[];
732
-
733
- return threads.map((thread: StorageThreadType) => ({
734
- ...thread,
735
- metadata: typeof thread.metadata === 'string' ? JSON.parse(thread.metadata) : thread.metadata,
736
- createdAt: thread.createdAt,
737
- updatedAt: thread.updatedAt,
738
- }));
739
- } catch (error) {
740
- throw new MastraError(
741
- {
742
- id: 'CLICKHOUSE_STORAGE_GET_THREADS_BY_RESOURCE_ID_FAILED',
743
- domain: ErrorDomain.STORAGE,
744
- category: ErrorCategory.THIRD_PARTY,
745
- details: { resourceId },
746
- },
747
- error,
748
- );
749
- }
750
- }
751
-
752
- async saveThread({ thread }: { thread: StorageThreadType }): Promise<StorageThreadType> {
753
- try {
754
- await this.db.insert({
755
- table: TABLE_THREADS,
756
- values: [
757
- {
758
- ...thread,
759
- createdAt: thread.createdAt.toISOString(),
760
- updatedAt: thread.updatedAt.toISOString(),
761
- },
762
- ],
763
- format: 'JSONEachRow',
764
- clickhouse_settings: {
765
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
766
- date_time_input_format: 'best_effort',
767
- use_client_time_zone: 1,
768
- output_format_json_quote_64bit_integers: 0,
769
- },
770
- });
771
-
772
- return thread;
773
- } catch (error) {
774
- throw new MastraError(
775
- {
776
- id: 'CLICKHOUSE_STORAGE_SAVE_THREAD_FAILED',
777
- domain: ErrorDomain.STORAGE,
778
- category: ErrorCategory.THIRD_PARTY,
779
- details: { threadId: thread.id },
780
- },
781
- error,
782
- );
783
- }
784
- }
785
-
786
- async updateThread({
787
- id,
788
- title,
789
- metadata,
790
- }: {
791
- id: string;
792
- title: string;
793
- metadata: Record<string, unknown>;
794
- }): Promise<StorageThreadType> {
795
- try {
796
- // First get the existing thread to merge metadata
797
- const existingThread = await this.getThreadById({ threadId: id });
798
- if (!existingThread) {
799
- throw new Error(`Thread ${id} not found`);
800
- }
801
-
802
- // Merge the existing metadata with the new metadata
803
- const mergedMetadata = {
804
- ...existingThread.metadata,
805
- ...metadata,
806
- };
807
-
808
- const updatedThread = {
809
- ...existingThread,
810
- title,
811
- metadata: mergedMetadata,
812
- updatedAt: new Date(),
813
- };
814
-
815
- await this.db.insert({
816
- table: TABLE_THREADS,
817
- format: 'JSONEachRow',
818
- values: [
819
- {
820
- id: updatedThread.id,
821
- resourceId: updatedThread.resourceId,
822
- title: updatedThread.title,
823
- metadata: updatedThread.metadata,
824
- createdAt: updatedThread.createdAt,
825
- updatedAt: updatedThread.updatedAt.toISOString(),
826
- },
827
- ],
828
- clickhouse_settings: {
829
- date_time_input_format: 'best_effort',
830
- use_client_time_zone: 1,
831
- output_format_json_quote_64bit_integers: 0,
832
- },
833
- });
834
-
835
- return updatedThread;
836
- } catch (error) {
837
- throw new MastraError(
838
- {
839
- id: 'CLICKHOUSE_STORAGE_UPDATE_THREAD_FAILED',
840
- domain: ErrorDomain.STORAGE,
841
- category: ErrorCategory.THIRD_PARTY,
842
- details: { threadId: id, title },
843
- },
844
- error,
845
- );
846
- }
847
- }
848
-
849
- async deleteThread({ threadId }: { threadId: string }): Promise<void> {
850
- try {
851
- // First delete all messages associated with this thread
852
- await this.db.command({
853
- query: `DELETE FROM "${TABLE_MESSAGES}" WHERE thread_id = {var_thread_id:String};`,
854
- query_params: { var_thread_id: threadId },
855
- clickhouse_settings: {
856
- output_format_json_quote_64bit_integers: 0,
857
- },
858
- });
859
-
860
- // Then delete the thread
861
- await this.db.command({
862
- query: `DELETE FROM "${TABLE_THREADS}" WHERE id = {var_id:String};`,
863
- query_params: { var_id: threadId },
864
- clickhouse_settings: {
865
- output_format_json_quote_64bit_integers: 0,
866
- },
867
- });
868
- } catch (error) {
869
- throw new MastraError(
870
- {
871
- id: 'CLICKHOUSE_STORAGE_DELETE_THREAD_FAILED',
872
- domain: ErrorDomain.STORAGE,
873
- category: ErrorCategory.THIRD_PARTY,
874
- details: { threadId },
875
- },
876
- error,
877
- );
878
- }
879
- }
880
-
881
- public async getMessages(args: StorageGetMessagesArg & { format?: 'v1' }): Promise<MastraMessageV1[]>;
882
- public async getMessages(args: StorageGetMessagesArg & { format: 'v2' }): Promise<MastraMessageV2[]>;
883
- public async getMessages({
884
- threadId,
885
- resourceId,
886
- selectBy,
887
- format,
888
- }: StorageGetMessagesArg & { format?: 'v1' | 'v2' }): Promise<MastraMessageV1[] | MastraMessageV2[]> {
889
- try {
890
- const messages: any[] = [];
891
- const limit = this.resolveMessageLimit({ last: selectBy?.last, defaultLimit: 40 });
892
- const include = selectBy?.include || [];
893
-
894
- if (include.length) {
895
- const includeResult = await this.db.query({
896
- query: `
897
- WITH ordered_messages AS (
898
- SELECT
899
- *,
900
- toDateTime64(createdAt, 3) as createdAt,
901
- toDateTime64(updatedAt, 3) as updatedAt,
902
- ROW_NUMBER() OVER (ORDER BY "createdAt" DESC) as row_num
903
- FROM "${TABLE_MESSAGES}"
904
- WHERE thread_id = {var_thread_id:String}
905
- )
906
- SELECT
907
- m.id AS id,
908
- m.content as content,
909
- m.role as role,
910
- m.type as type,
911
- m.createdAt as createdAt,
912
- m.updatedAt as updatedAt,
913
- m.thread_id AS "threadId"
914
- FROM ordered_messages m
915
- WHERE m.id = ANY({var_include:Array(String)})
916
- OR EXISTS (
917
- SELECT 1 FROM ordered_messages target
918
- WHERE target.id = ANY({var_include:Array(String)})
919
- AND (
920
- -- Get previous messages based on the max withPreviousMessages
921
- (m.row_num <= target.row_num + {var_withPreviousMessages:Int64} AND m.row_num > target.row_num)
922
- OR
923
- -- Get next messages based on the max withNextMessages
924
- (m.row_num >= target.row_num - {var_withNextMessages:Int64} AND m.row_num < target.row_num)
925
- )
926
- )
927
- ORDER BY m."createdAt" DESC
928
- `,
929
- query_params: {
930
- var_thread_id: threadId,
931
- var_include: include.map(i => i.id),
932
- var_withPreviousMessages: Math.max(...include.map(i => i.withPreviousMessages || 0)),
933
- var_withNextMessages: Math.max(...include.map(i => i.withNextMessages || 0)),
934
- },
935
- clickhouse_settings: {
936
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
937
- date_time_input_format: 'best_effort',
938
- date_time_output_format: 'iso',
939
- use_client_time_zone: 1,
940
- output_format_json_quote_64bit_integers: 0,
941
- },
942
- });
943
-
944
- const rows = await includeResult.json();
945
- messages.push(...transformRows(rows.data));
946
- }
947
-
948
- // Then get the remaining messages, excluding the ids we just fetched
949
- const result = await this.db.query({
950
- query: `
951
- SELECT
952
- id,
953
- content,
954
- role,
955
- type,
956
- toDateTime64(createdAt, 3) as createdAt,
957
- thread_id AS "threadId"
958
- FROM "${TABLE_MESSAGES}"
959
- WHERE thread_id = {threadId:String}
960
- AND id NOT IN ({exclude:Array(String)})
961
- ORDER BY "createdAt" DESC
962
- LIMIT {limit:Int64}
963
- `,
964
- query_params: {
965
- threadId,
966
- exclude: messages.map(m => m.id),
967
- limit,
968
- },
969
- clickhouse_settings: {
970
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
971
- date_time_input_format: 'best_effort',
972
- date_time_output_format: 'iso',
973
- use_client_time_zone: 1,
974
- output_format_json_quote_64bit_integers: 0,
975
- },
976
- });
977
-
978
- const rows = await result.json();
979
- messages.push(...transformRows(rows.data));
980
-
981
- // Sort all messages by creation date
982
- messages.sort((a, b) => new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime());
983
-
984
- // Parse message content
985
- messages.forEach(message => {
986
- if (typeof message.content === 'string') {
987
- try {
988
- message.content = JSON.parse(message.content);
989
- } catch {
990
- // If parsing fails, leave as string
991
- }
992
- }
993
- });
994
-
995
- const list = new MessageList({ threadId, resourceId }).add(messages, 'memory');
996
- if (format === `v2`) return list.get.all.v2();
997
- return list.get.all.v1();
998
- } catch (error) {
999
- throw new MastraError(
1000
- {
1001
- id: 'CLICKHOUSE_STORAGE_GET_MESSAGES_FAILED',
1002
- domain: ErrorDomain.STORAGE,
1003
- category: ErrorCategory.THIRD_PARTY,
1004
- details: { threadId, resourceId: resourceId ?? '' },
1005
- },
1006
- error,
1007
- );
1008
- }
1009
- }
1010
-
1011
- async saveMessages(args: { messages: MastraMessageV1[]; format?: undefined | 'v1' }): Promise<MastraMessageV1[]>;
1012
- async saveMessages(args: { messages: MastraMessageV2[]; format: 'v2' }): Promise<MastraMessageV2[]>;
1013
- async saveMessages(
1014
- args: { messages: MastraMessageV1[]; format?: undefined | 'v1' } | { messages: MastraMessageV2[]; format: 'v2' },
1015
- ): Promise<MastraMessageV2[] | MastraMessageV1[]> {
1016
- const { messages, format = 'v1' } = args;
1017
- if (messages.length === 0) return messages;
1018
-
1019
- try {
1020
- const threadId = messages[0]?.threadId;
1021
- const resourceId = messages[0]?.resourceId;
1022
- if (!threadId) {
1023
- throw new Error('Thread ID is required');
1024
- }
1025
-
1026
- // Check if thread exists
1027
- const thread = await this.getThreadById({ threadId });
1028
- if (!thread) {
1029
- throw new Error(`Thread ${threadId} not found`);
1030
- }
1031
-
1032
- // Clickhouse's MergeTree engine does not support native upserts or unique constraints on (id, thread_id).
1033
- // Note: We cannot switch to ReplacingMergeTree without a schema migration,
1034
- // as it would require altering the table engine.
1035
- // To ensure correct upsert behavior, we first fetch existing (id, thread_id) pairs for the incoming messages.
1036
- const existingResult = await this.db.query({
1037
- query: `SELECT id, thread_id FROM ${TABLE_MESSAGES} WHERE id IN ({ids:Array(String)})`,
1038
- query_params: {
1039
- ids: messages.map(m => m.id),
1040
- },
1041
- clickhouse_settings: {
1042
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
1043
- date_time_input_format: 'best_effort',
1044
- date_time_output_format: 'iso',
1045
- use_client_time_zone: 1,
1046
- output_format_json_quote_64bit_integers: 0,
1047
- },
1048
- format: 'JSONEachRow',
1049
- });
1050
- const existingRows: Array<{ id: string; thread_id: string }> = await existingResult.json();
1051
-
1052
- const existingSet = new Set(existingRows.map(row => `${row.id}::${row.thread_id}`));
1053
- // Partition the batch into new inserts and updates:
1054
- // New messages are inserted in bulk.
1055
- const toInsert = messages.filter(m => !existingSet.has(`${m.id}::${threadId}`));
1056
- // Existing messages are updated via ALTER TABLE ... UPDATE.
1057
- const toUpdate = messages.filter(m => existingSet.has(`${m.id}::${threadId}`));
1058
- const updatePromises = toUpdate.map(message =>
1059
- this.db.command({
1060
- query: `
1061
- ALTER TABLE ${TABLE_MESSAGES}
1062
- UPDATE content = {var_content:String}, role = {var_role:String}, type = {var_type:String}
1063
- WHERE id = {var_id:String} AND thread_id = {var_thread_id:String}
1064
- `,
1065
- query_params: {
1066
- var_content: typeof message.content === 'string' ? message.content : JSON.stringify(message.content),
1067
- var_role: message.role,
1068
- var_type: message.type || 'v2',
1069
- var_id: message.id,
1070
- var_thread_id: threadId,
1071
- },
1072
- clickhouse_settings: {
1073
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
1074
- date_time_input_format: 'best_effort',
1075
- use_client_time_zone: 1,
1076
- output_format_json_quote_64bit_integers: 0,
1077
- },
1078
- }),
1079
- );
1080
-
1081
- // Execute message inserts and thread update in parallel for better performance
1082
- await Promise.all([
1083
- // Insert messages
1084
- this.db.insert({
1085
- table: TABLE_MESSAGES,
1086
- format: 'JSONEachRow',
1087
- values: toInsert.map(message => ({
1088
- id: message.id,
1089
- thread_id: threadId,
1090
- content: typeof message.content === 'string' ? message.content : JSON.stringify(message.content),
1091
- createdAt: message.createdAt.toISOString(),
1092
- role: message.role,
1093
- type: message.type || 'v2',
1094
- })),
1095
- clickhouse_settings: {
1096
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
1097
- date_time_input_format: 'best_effort',
1098
- use_client_time_zone: 1,
1099
- output_format_json_quote_64bit_integers: 0,
1100
- },
1101
- }),
1102
- ...updatePromises,
1103
- // Update thread's updatedAt timestamp
1104
- this.db.insert({
1105
- table: TABLE_THREADS,
1106
- format: 'JSONEachRow',
1107
- values: [
1108
- {
1109
- id: thread.id,
1110
- resourceId: thread.resourceId,
1111
- title: thread.title,
1112
- metadata: thread.metadata,
1113
- createdAt: thread.createdAt,
1114
- updatedAt: new Date().toISOString(),
1115
- },
1116
- ],
1117
- clickhouse_settings: {
1118
- date_time_input_format: 'best_effort',
1119
- use_client_time_zone: 1,
1120
- output_format_json_quote_64bit_integers: 0,
1121
- },
1122
- }),
1123
- ]);
1124
-
1125
- const list = new MessageList({ threadId, resourceId }).add(messages, 'memory');
1126
- if (format === `v2`) return list.get.all.v2();
1127
- return list.get.all.v1();
1128
- } catch (error: any) {
1129
- throw new MastraError(
1130
- {
1131
- id: 'CLICKHOUSE_STORAGE_SAVE_MESSAGES_FAILED',
1132
- domain: ErrorDomain.STORAGE,
1133
- category: ErrorCategory.THIRD_PARTY,
1134
- },
1135
- error,
1136
- );
1137
- }
1138
- }
1139
-
1140
- async persistWorkflowSnapshot({
1141
- workflowName,
1142
- runId,
1143
- snapshot,
1144
- }: {
1145
- workflowName: string;
1146
- runId: string;
1147
- snapshot: WorkflowRunState;
1148
- }): Promise<void> {
1149
- try {
1150
- const currentSnapshot = await this.load({
1151
- tableName: TABLE_WORKFLOW_SNAPSHOT,
1152
- keys: { workflow_name: workflowName, run_id: runId },
1153
- });
1154
-
1155
- const now = new Date();
1156
- const persisting = currentSnapshot
1157
- ? {
1158
- ...currentSnapshot,
1159
- snapshot: JSON.stringify(snapshot),
1160
- updatedAt: now.toISOString(),
1161
- }
1162
- : {
1163
- workflow_name: workflowName,
1164
- run_id: runId,
1165
- snapshot: JSON.stringify(snapshot),
1166
- createdAt: now.toISOString(),
1167
- updatedAt: now.toISOString(),
1168
- };
1169
-
1170
- await this.db.insert({
1171
- table: TABLE_WORKFLOW_SNAPSHOT,
1172
- format: 'JSONEachRow',
1173
- values: [persisting],
1174
- clickhouse_settings: {
1175
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
1176
- date_time_input_format: 'best_effort',
1177
- use_client_time_zone: 1,
1178
- output_format_json_quote_64bit_integers: 0,
1179
- },
1180
- });
1181
- } catch (error: any) {
1182
- throw new MastraError(
1183
- {
1184
- id: 'CLICKHOUSE_STORAGE_PERSIST_WORKFLOW_SNAPSHOT_FAILED',
1185
- domain: ErrorDomain.STORAGE,
1186
- category: ErrorCategory.THIRD_PARTY,
1187
- details: { workflowName, runId },
1188
- },
1189
- error,
1190
- );
1191
- }
1192
- }
1193
-
1194
- async loadWorkflowSnapshot({
1195
- workflowName,
1196
- runId,
1197
- }: {
1198
- workflowName: string;
1199
- runId: string;
1200
- }): Promise<WorkflowRunState | null> {
1201
- try {
1202
- const result = await this.load({
1203
- tableName: TABLE_WORKFLOW_SNAPSHOT,
1204
- keys: {
1205
- workflow_name: workflowName,
1206
- run_id: runId,
1207
- },
1208
- });
1209
-
1210
- if (!result) {
1211
- return null;
1212
- }
1213
-
1214
- return (result as any).snapshot;
1215
- } catch (error: any) {
1216
- throw new MastraError(
1217
- {
1218
- id: 'CLICKHOUSE_STORAGE_LOAD_WORKFLOW_SNAPSHOT_FAILED',
1219
- domain: ErrorDomain.STORAGE,
1220
- category: ErrorCategory.THIRD_PARTY,
1221
- details: { workflowName, runId },
1222
- },
1223
- error,
1224
- );
1225
- }
1226
- }
1227
-
1228
- private parseWorkflowRun(row: any): WorkflowRun {
1229
- let parsedSnapshot: WorkflowRunState | string = row.snapshot as string;
1230
- if (typeof parsedSnapshot === 'string') {
1231
- try {
1232
- parsedSnapshot = JSON.parse(row.snapshot as string) as WorkflowRunState;
1233
- } catch (e) {
1234
- // If parsing fails, return the raw snapshot string
1235
- console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
1236
- }
1237
- }
1238
-
1239
- return {
1240
- workflowName: row.workflow_name,
1241
- runId: row.run_id,
1242
- snapshot: parsedSnapshot,
1243
- createdAt: new Date(row.createdAt),
1244
- updatedAt: new Date(row.updatedAt),
1245
- resourceId: row.resourceId,
1246
- };
1247
- }
1248
-
1249
- async getWorkflowRuns({
1250
- workflowName,
1251
- fromDate,
1252
- toDate,
1253
- limit,
1254
- offset,
1255
- resourceId,
1256
- }: {
1257
- workflowName?: string;
1258
- fromDate?: Date;
1259
- toDate?: Date;
1260
- limit?: number;
1261
- offset?: number;
1262
- resourceId?: string;
1263
- } = {}): Promise<WorkflowRuns> {
1264
- try {
1265
- const conditions: string[] = [];
1266
- const values: Record<string, any> = {};
1267
-
1268
- if (workflowName) {
1269
- conditions.push(`workflow_name = {var_workflow_name:String}`);
1270
- values.var_workflow_name = workflowName;
1271
- }
1272
-
1273
- if (resourceId) {
1274
- const hasResourceId = await this.hasColumn(TABLE_WORKFLOW_SNAPSHOT, 'resourceId');
1275
- if (hasResourceId) {
1276
- conditions.push(`resourceId = {var_resourceId:String}`);
1277
- values.var_resourceId = resourceId;
1278
- } else {
1279
- console.warn(`[${TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
1280
- }
1281
- }
1282
-
1283
- if (fromDate) {
1284
- conditions.push(`createdAt >= {var_from_date:DateTime64(3)}`);
1285
- values.var_from_date = fromDate.getTime() / 1000; // Convert to Unix timestamp
1286
- }
1287
-
1288
- if (toDate) {
1289
- conditions.push(`createdAt <= {var_to_date:DateTime64(3)}`);
1290
- values.var_to_date = toDate.getTime() / 1000; // Convert to Unix timestamp
1291
- }
1292
-
1293
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
1294
- const limitClause = limit !== undefined ? `LIMIT ${limit}` : '';
1295
- const offsetClause = offset !== undefined ? `OFFSET ${offset}` : '';
1296
-
1297
- let total = 0;
1298
- // Only get total count when using pagination
1299
- if (limit !== undefined && offset !== undefined) {
1300
- const countResult = await this.db.query({
1301
- query: `SELECT COUNT(*) as count FROM ${TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[TABLE_WORKFLOW_SNAPSHOT].startsWith('ReplacingMergeTree') ? 'FINAL' : ''} ${whereClause}`,
1302
- query_params: values,
1303
- format: 'JSONEachRow',
1304
- });
1305
- const countRows = await countResult.json();
1306
- total = Number((countRows as Array<{ count: string | number }>)[0]?.count ?? 0);
1307
- }
1308
-
1309
- // Get results
1310
- const result = await this.db.query({
1311
- query: `
1312
- SELECT
1313
- workflow_name,
1314
- run_id,
1315
- snapshot,
1316
- toDateTime64(createdAt, 3) as createdAt,
1317
- toDateTime64(updatedAt, 3) as updatedAt,
1318
- resourceId
1319
- FROM ${TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[TABLE_WORKFLOW_SNAPSHOT].startsWith('ReplacingMergeTree') ? 'FINAL' : ''}
1320
- ${whereClause}
1321
- ORDER BY createdAt DESC
1322
- ${limitClause}
1323
- ${offsetClause}
1324
- `,
1325
- query_params: values,
1326
- format: 'JSONEachRow',
1327
- });
1328
-
1329
- const resultJson = await result.json();
1330
- const rows = resultJson as any[];
1331
- const runs = rows.map(row => {
1332
- return this.parseWorkflowRun(row);
1333
- });
1334
-
1335
- // Use runs.length as total when not paginating
1336
- return { runs, total: total || runs.length };
1337
- } catch (error: any) {
1338
- throw new MastraError(
1339
- {
1340
- id: 'CLICKHOUSE_STORAGE_GET_WORKFLOW_RUNS_FAILED',
1341
- domain: ErrorDomain.STORAGE,
1342
- category: ErrorCategory.THIRD_PARTY,
1343
- details: { workflowName: workflowName ?? '', resourceId: resourceId ?? '' },
1344
- },
1345
- error,
1346
- );
1347
- }
1348
- }
1349
-
1350
- async getWorkflowRunById({
1351
- runId,
1352
- workflowName,
1353
- }: {
1354
- runId: string;
1355
- workflowName?: string;
1356
- }): Promise<WorkflowRun | null> {
1357
- try {
1358
- const conditions: string[] = [];
1359
- const values: Record<string, any> = {};
1360
-
1361
- if (runId) {
1362
- conditions.push(`run_id = {var_runId:String}`);
1363
- values.var_runId = runId;
1364
- }
1365
-
1366
- if (workflowName) {
1367
- conditions.push(`workflow_name = {var_workflow_name:String}`);
1368
- values.var_workflow_name = workflowName;
1369
- }
1370
-
1371
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
1372
-
1373
- // Get results
1374
- const result = await this.db.query({
1375
- query: `
1376
- SELECT
1377
- workflow_name,
1378
- run_id,
1379
- snapshot,
1380
- toDateTime64(createdAt, 3) as createdAt,
1381
- toDateTime64(updatedAt, 3) as updatedAt,
1382
- resourceId
1383
- FROM ${TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[TABLE_WORKFLOW_SNAPSHOT].startsWith('ReplacingMergeTree') ? 'FINAL' : ''}
1384
- ${whereClause}
1385
- `,
1386
- query_params: values,
1387
- format: 'JSONEachRow',
1388
- });
1389
-
1390
- const resultJson = await result.json();
1391
- if (!Array.isArray(resultJson) || resultJson.length === 0) {
1392
- return null;
1393
- }
1394
- return this.parseWorkflowRun(resultJson[0]);
1395
- } catch (error: any) {
1396
- throw new MastraError(
1397
- {
1398
- id: 'CLICKHOUSE_STORAGE_GET_WORKFLOW_RUN_BY_ID_FAILED',
1399
- domain: ErrorDomain.STORAGE,
1400
- category: ErrorCategory.THIRD_PARTY,
1401
- details: { runId: runId ?? '', workflowName: workflowName ?? '' },
1402
- },
1403
- error,
1404
- );
1405
- }
1406
- }
1407
-
1408
- private async hasColumn(table: string, column: string): Promise<boolean> {
1409
- const result = await this.db.query({
1410
- query: `DESCRIBE TABLE ${table}`,
1411
- format: 'JSONEachRow',
1412
- });
1413
- const columns = (await result.json()) as { name: string }[];
1414
- return columns.some(c => c.name === column);
1415
- }
1416
-
1417
- async getTracesPaginated(_args: StorageGetTracesArg): Promise<PaginationInfo & { traces: Trace[] }> {
1418
- throw new MastraError({
1419
- id: 'CLICKHOUSE_STORAGE_GET_TRACES_PAGINATED_FAILED',
1420
- domain: ErrorDomain.STORAGE,
1421
- category: ErrorCategory.USER,
1422
- text: 'Method not implemented.',
1423
- });
1424
- }
1425
-
1426
- async getThreadsByResourceIdPaginated(_args: {
1427
- resourceId: string;
1428
- page?: number;
1429
- perPage?: number;
1430
- }): Promise<PaginationInfo & { threads: StorageThreadType[] }> {
1431
- throw new MastraError({
1432
- id: 'CLICKHOUSE_STORAGE_GET_THREADS_BY_RESOURCE_ID_PAGINATED_FAILED',
1433
- domain: ErrorDomain.STORAGE,
1434
- category: ErrorCategory.USER,
1435
- text: 'Method not implemented.',
1436
- });
1437
- }
1438
-
1439
- async getMessagesPaginated(
1440
- _args: StorageGetMessagesArg,
1441
- ): Promise<PaginationInfo & { messages: MastraMessageV1[] | MastraMessageV2[] }> {
1442
- throw new MastraError({
1443
- id: 'CLICKHOUSE_STORAGE_GET_MESSAGES_PAGINATED_FAILED',
1444
- domain: ErrorDomain.STORAGE,
1445
- category: ErrorCategory.USER,
1446
- text: 'Method not implemented.',
1447
- });
1448
- }
1449
-
1450
- async close(): Promise<void> {
1451
- await this.db.close();
1452
- }
1453
-
1454
- async updateMessages(_args: {
1455
- messages: Partial<Omit<MastraMessageV2, 'createdAt'>> &
1456
- {
1457
- id: string;
1458
- content?: { metadata?: MastraMessageContentV2['metadata']; content?: MastraMessageContentV2['content'] };
1459
- }[];
1460
- }): Promise<MastraMessageV2[]> {
1461
- this.logger.error('updateMessages is not yet implemented in ClickhouseStore');
1462
- throw new Error('Method not implemented');
1463
- }
1464
- }