@mastra/clickhouse 0.0.0-working-memory-per-user-20250620163010 → 0.0.0-zod-v4-compat-part-2-20250820135355

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/CHANGELOG.md +168 -4
  2. package/LICENSE.md +12 -4
  3. package/dist/index.cjs +2259 -566
  4. package/dist/index.cjs.map +1 -0
  5. package/dist/index.d.ts +2 -4
  6. package/dist/index.d.ts.map +1 -0
  7. package/dist/index.js +2245 -550
  8. package/dist/index.js.map +1 -0
  9. package/dist/storage/domains/legacy-evals/index.d.ts +21 -0
  10. package/dist/storage/domains/legacy-evals/index.d.ts.map +1 -0
  11. package/dist/storage/domains/memory/index.d.ts +79 -0
  12. package/dist/storage/domains/memory/index.d.ts.map +1 -0
  13. package/dist/storage/domains/operations/index.d.ts +42 -0
  14. package/dist/storage/domains/operations/index.d.ts.map +1 -0
  15. package/dist/storage/domains/scores/index.d.ts +43 -0
  16. package/dist/storage/domains/scores/index.d.ts.map +1 -0
  17. package/dist/storage/domains/traces/index.d.ts +21 -0
  18. package/dist/storage/domains/traces/index.d.ts.map +1 -0
  19. package/dist/storage/domains/utils.d.ts +28 -0
  20. package/dist/storage/domains/utils.d.ts.map +1 -0
  21. package/dist/storage/domains/workflows/index.d.ts +36 -0
  22. package/dist/storage/domains/workflows/index.d.ts.map +1 -0
  23. package/dist/{_tsup-dts-rollup.d.cts → storage/index.d.ts} +106 -87
  24. package/dist/storage/index.d.ts.map +1 -0
  25. package/package.json +9 -9
  26. package/src/storage/domains/legacy-evals/index.ts +246 -0
  27. package/src/storage/domains/memory/index.ts +1393 -0
  28. package/src/storage/domains/operations/index.ts +319 -0
  29. package/src/storage/domains/scores/index.ts +326 -0
  30. package/src/storage/domains/traces/index.ts +275 -0
  31. package/src/storage/domains/utils.ts +86 -0
  32. package/src/storage/domains/workflows/index.ts +285 -0
  33. package/src/storage/index.test.ts +15 -1013
  34. package/src/storage/index.ts +214 -1013
  35. package/tsconfig.build.json +9 -0
  36. package/tsconfig.json +1 -1
  37. package/tsup.config.ts +22 -0
  38. package/dist/_tsup-dts-rollup.d.ts +0 -187
  39. package/dist/index.d.cts +0 -4
package/dist/index.cjs CHANGED
@@ -1,23 +1,18 @@
1
1
  'use strict';
2
2
 
3
3
  var client = require('@clickhouse/client');
4
- var agent = require('@mastra/core/agent');
4
+ var error = require('@mastra/core/error');
5
5
  var storage = require('@mastra/core/storage');
6
+ var agent = require('@mastra/core/agent');
6
7
 
7
8
  // src/storage/index.ts
8
- function safelyParseJSON(jsonString) {
9
- try {
10
- return JSON.parse(jsonString);
11
- } catch {
12
- return {};
13
- }
14
- }
15
9
  var TABLE_ENGINES = {
16
10
  [storage.TABLE_MESSAGES]: `MergeTree()`,
17
11
  [storage.TABLE_WORKFLOW_SNAPSHOT]: `ReplacingMergeTree()`,
18
12
  [storage.TABLE_TRACES]: `MergeTree()`,
19
13
  [storage.TABLE_THREADS]: `ReplacingMergeTree()`,
20
14
  [storage.TABLE_EVALS]: `MergeTree()`,
15
+ [storage.TABLE_SCORERS]: `MergeTree()`,
21
16
  [storage.TABLE_RESOURCES]: `ReplacingMergeTree()`
22
17
  };
23
18
  var COLUMN_TYPES = {
@@ -26,11 +21,9 @@ var COLUMN_TYPES = {
26
21
  uuid: "String",
27
22
  jsonb: "String",
28
23
  integer: "Int64",
24
+ float: "Float64",
29
25
  bigint: "Int64"
30
26
  };
31
- function transformRows(rows) {
32
- return rows.map((row) => transformRow(row));
33
- }
34
27
  function transformRow(row) {
35
28
  if (!row) {
36
29
  return row;
@@ -41,33 +34,63 @@ function transformRow(row) {
41
34
  if (row.updatedAt) {
42
35
  row.updatedAt = new Date(row.updatedAt);
43
36
  }
37
+ if (row.content && typeof row.content === "string") {
38
+ row.content = storage.safelyParseJSON(row.content);
39
+ }
44
40
  return row;
45
41
  }
46
- var ClickhouseStore = class extends storage.MastraStorage {
47
- db;
48
- ttl = {};
49
- constructor(config) {
50
- super({ name: "ClickhouseStore" });
51
- this.db = client.createClient({
52
- url: config.url,
53
- username: config.username,
54
- password: config.password,
55
- clickhouse_settings: {
56
- date_time_input_format: "best_effort",
57
- date_time_output_format: "iso",
58
- // This is crucial
59
- use_client_time_zone: 1,
60
- output_format_json_quote_64bit_integers: 0
61
- }
62
- });
63
- this.ttl = config.ttl;
42
+ function transformRows(rows) {
43
+ return rows.map((row) => transformRow(row));
44
+ }
45
+
46
+ // src/storage/domains/legacy-evals/index.ts
47
+ var LegacyEvalsStorageClickhouse = class extends storage.LegacyEvalsStorage {
48
+ client;
49
+ operations;
50
+ constructor({ client, operations }) {
51
+ super();
52
+ this.client = client;
53
+ this.operations = operations;
64
54
  }
65
55
  transformEvalRow(row) {
66
56
  row = transformRow(row);
67
- const resultValue = JSON.parse(row.result);
68
- const testInfoValue = row.test_info ? JSON.parse(row.test_info) : void 0;
57
+ let resultValue;
58
+ try {
59
+ if (row.result && typeof row.result === "string" && row.result.trim() !== "") {
60
+ resultValue = JSON.parse(row.result);
61
+ } else if (typeof row.result === "object" && row.result !== null) {
62
+ resultValue = row.result;
63
+ } else if (row.result === null || row.result === void 0 || row.result === "") {
64
+ resultValue = { score: 0 };
65
+ } else {
66
+ throw new Error(`Invalid or empty result field: ${JSON.stringify(row.result)}`);
67
+ }
68
+ } catch (error$1) {
69
+ console.error("Error parsing result field:", row.result, error$1);
70
+ throw new error.MastraError({
71
+ id: "CLICKHOUSE_STORAGE_INVALID_RESULT_FORMAT",
72
+ text: `Invalid result format: ${JSON.stringify(row.result)}`,
73
+ domain: error.ErrorDomain.STORAGE,
74
+ category: error.ErrorCategory.USER
75
+ });
76
+ }
77
+ let testInfoValue;
78
+ try {
79
+ if (row.test_info && typeof row.test_info === "string" && row.test_info.trim() !== "" && row.test_info !== "null") {
80
+ testInfoValue = JSON.parse(row.test_info);
81
+ } else if (typeof row.test_info === "object" && row.test_info !== null) {
82
+ testInfoValue = row.test_info;
83
+ }
84
+ } catch {
85
+ testInfoValue = void 0;
86
+ }
69
87
  if (!resultValue || typeof resultValue !== "object" || !("score" in resultValue)) {
70
- throw new Error(`Invalid MetricResult format: ${JSON.stringify(resultValue)}`);
88
+ throw new error.MastraError({
89
+ id: "CLICKHOUSE_STORAGE_INVALID_METRIC_FORMAT",
90
+ text: `Invalid MetricResult format: ${JSON.stringify(resultValue)}`,
91
+ domain: error.ErrorDomain.STORAGE,
92
+ category: error.ErrorCategory.USER
93
+ });
71
94
  }
72
95
  return {
73
96
  input: row.input,
@@ -84,9 +107,9 @@ var ClickhouseStore = class extends storage.MastraStorage {
84
107
  }
85
108
  async getEvalsByAgentName(agentName, type) {
86
109
  try {
87
- const baseQuery = `SELECT *, toDateTime64(createdAt, 3) as createdAt FROM ${storage.TABLE_EVALS} WHERE agent_name = {var_agent_name:String}`;
88
- const typeCondition = type === "test" ? " AND test_info IS NOT NULL AND JSONExtractString(test_info, 'testPath') IS NOT NULL" : type === "live" ? " AND (test_info IS NULL OR JSONExtractString(test_info, 'testPath') IS NULL)" : "";
89
- const result = await this.db.query({
110
+ const baseQuery = `SELECT *, toDateTime64(created_at, 3) as createdAt FROM ${storage.TABLE_EVALS} WHERE agent_name = {var_agent_name:String}`;
111
+ const typeCondition = type === "test" ? " AND test_info IS NOT NULL AND test_info != 'null' AND JSONExtractString(test_info, 'testPath') IS NOT NULL AND JSONExtractString(test_info, 'testPath') != ''" : type === "live" ? " AND (test_info IS NULL OR test_info = 'null' OR JSONExtractString(test_info, 'testPath') IS NULL OR JSONExtractString(test_info, 'testPath') = '')" : "";
112
+ const result = await this.client.query({
90
113
  query: `${baseQuery}${typeCondition} ORDER BY createdAt DESC`,
91
114
  query_params: { var_agent_name: agentName },
92
115
  clickhouse_settings: {
@@ -101,254 +124,1426 @@ var ClickhouseStore = class extends storage.MastraStorage {
101
124
  }
102
125
  const rows = await result.json();
103
126
  return rows.data.map((row) => this.transformEvalRow(row));
104
- } catch (error) {
105
- if (error instanceof Error && error.message.includes("no such table")) {
127
+ } catch (error$1) {
128
+ if (error$1?.message?.includes("no such table") || error$1?.message?.includes("does not exist")) {
106
129
  return [];
107
130
  }
108
- this.logger.error("Failed to get evals for the specified agent: " + error?.message);
109
- throw error;
110
- }
111
- }
112
- async batchInsert({ tableName, records }) {
113
- try {
114
- await this.db.insert({
115
- table: tableName,
116
- values: records.map((record) => ({
117
- ...Object.fromEntries(
118
- Object.entries(record).map(([key, value]) => [
119
- key,
120
- storage.TABLE_SCHEMAS[tableName]?.[key]?.type === "timestamp" ? new Date(value).toISOString() : value
121
- ])
122
- )
123
- })),
124
- format: "JSONEachRow",
125
- clickhouse_settings: {
126
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
127
- date_time_input_format: "best_effort",
128
- use_client_time_zone: 1,
129
- output_format_json_quote_64bit_integers: 0
130
- }
131
- });
132
- } catch (error) {
133
- console.error(`Error inserting into ${tableName}:`, error);
134
- throw error;
131
+ throw new error.MastraError(
132
+ {
133
+ id: "CLICKHOUSE_STORAGE_GET_EVALS_BY_AGENT_FAILED",
134
+ domain: error.ErrorDomain.STORAGE,
135
+ category: error.ErrorCategory.THIRD_PARTY,
136
+ details: { agentName, type: type ?? null }
137
+ },
138
+ error$1
139
+ );
135
140
  }
136
141
  }
137
- async getTraces({
138
- name,
139
- scope,
140
- page,
141
- perPage,
142
- attributes,
143
- filters,
144
- fromDate,
145
- toDate
146
- }) {
147
- const limit = perPage;
148
- const offset = page * perPage;
149
- const args = {};
142
+ async getEvals(options = {}) {
143
+ const { agentName, type, page = 0, perPage = 100, dateRange } = options;
144
+ const fromDate = dateRange?.start;
145
+ const toDate = dateRange?.end;
150
146
  const conditions = [];
151
- if (name) {
152
- conditions.push(`name LIKE CONCAT({var_name:String}, '%')`);
153
- args.var_name = name;
154
- }
155
- if (scope) {
156
- conditions.push(`scope = {var_scope:String}`);
157
- args.var_scope = scope;
158
- }
159
- if (attributes) {
160
- Object.entries(attributes).forEach(([key, value]) => {
161
- conditions.push(`JSONExtractString(attributes, '${key}') = {var_attr_${key}:String}`);
162
- args[`var_attr_${key}`] = value;
163
- });
147
+ if (agentName) {
148
+ conditions.push(`agent_name = {var_agent_name:String}`);
164
149
  }
165
- if (filters) {
166
- Object.entries(filters).forEach(([key, value]) => {
167
- conditions.push(
168
- `${key} = {var_col_${key}:${COLUMN_TYPES[storage.TABLE_SCHEMAS.mastra_traces?.[key]?.type ?? "text"]}}`
169
- );
170
- args[`var_col_${key}`] = value;
171
- });
150
+ if (type === "test") {
151
+ conditions.push(
152
+ `(test_info IS NOT NULL AND test_info != 'null' AND JSONExtractString(test_info, 'testPath') IS NOT NULL AND JSONExtractString(test_info, 'testPath') != '')`
153
+ );
154
+ } else if (type === "live") {
155
+ conditions.push(
156
+ `(test_info IS NULL OR test_info = 'null' OR JSONExtractString(test_info, 'testPath') IS NULL OR JSONExtractString(test_info, 'testPath') = '')`
157
+ );
172
158
  }
173
159
  if (fromDate) {
174
- conditions.push(`createdAt >= {var_from_date:DateTime64(3)}`);
175
- args.var_from_date = fromDate.getTime() / 1e3;
160
+ conditions.push(`created_at >= parseDateTime64BestEffort({var_from_date:String})`);
161
+ fromDate.toISOString();
176
162
  }
177
163
  if (toDate) {
178
- conditions.push(`createdAt <= {var_to_date:DateTime64(3)}`);
179
- args.var_to_date = toDate.getTime() / 1e3;
164
+ conditions.push(`created_at <= parseDateTime64BestEffort({var_to_date:String})`);
165
+ toDate.toISOString();
180
166
  }
181
167
  const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
182
- const result = await this.db.query({
183
- query: `SELECT *, toDateTime64(createdAt, 3) as createdAt FROM ${storage.TABLE_TRACES} ${whereClause} ORDER BY "createdAt" DESC LIMIT ${limit} OFFSET ${offset}`,
184
- query_params: args,
185
- clickhouse_settings: {
186
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
187
- date_time_input_format: "best_effort",
188
- date_time_output_format: "iso",
189
- use_client_time_zone: 1,
190
- output_format_json_quote_64bit_integers: 0
191
- }
192
- });
193
- if (!result) {
194
- return [];
195
- }
196
- const resp = await result.json();
197
- const rows = resp.data;
198
- return rows.map((row) => ({
199
- id: row.id,
200
- parentSpanId: row.parentSpanId,
201
- traceId: row.traceId,
202
- name: row.name,
203
- scope: row.scope,
204
- kind: row.kind,
205
- status: safelyParseJSON(row.status),
206
- events: safelyParseJSON(row.events),
207
- links: safelyParseJSON(row.links),
208
- attributes: safelyParseJSON(row.attributes),
209
- startTime: row.startTime,
210
- endTime: row.endTime,
211
- other: safelyParseJSON(row.other),
212
- createdAt: row.createdAt
213
- }));
214
- }
215
- async optimizeTable({ tableName }) {
216
- await this.db.command({
217
- query: `OPTIMIZE TABLE ${tableName} FINAL`
218
- });
219
- }
220
- async materializeTtl({ tableName }) {
221
- await this.db.command({
222
- query: `ALTER TABLE ${tableName} MATERIALIZE TTL;`
223
- });
224
- }
225
- async createTable({
226
- tableName,
227
- schema
228
- }) {
229
168
  try {
230
- const columns = Object.entries(schema).map(([name, def]) => {
231
- const constraints = [];
232
- if (!def.nullable) constraints.push("NOT NULL");
233
- const columnTtl = this.ttl?.[tableName]?.columns?.[name];
234
- return `"${name}" ${COLUMN_TYPES[def.type]} ${constraints.join(" ")} ${columnTtl ? `TTL toDateTime(${columnTtl.ttlKey ?? "createdAt"}) + INTERVAL ${columnTtl.interval} ${columnTtl.unit}` : ""}`;
235
- }).join(",\n");
236
- const rowTtl = this.ttl?.[tableName]?.row;
237
- const sql = tableName === storage.TABLE_WORKFLOW_SNAPSHOT ? `
238
- CREATE TABLE IF NOT EXISTS ${tableName} (
239
- ${["id String"].concat(columns)}
240
- )
241
- ENGINE = ${TABLE_ENGINES[tableName]}
242
- PRIMARY KEY (createdAt, run_id, workflow_name)
243
- ORDER BY (createdAt, run_id, workflow_name)
244
- ${rowTtl ? `TTL toDateTime(${rowTtl.ttlKey ?? "createdAt"}) + INTERVAL ${rowTtl.interval} ${rowTtl.unit}` : ""}
245
- SETTINGS index_granularity = 8192
246
- ` : `
247
- CREATE TABLE IF NOT EXISTS ${tableName} (
248
- ${columns}
249
- )
250
- ENGINE = ${TABLE_ENGINES[tableName]}
251
- PRIMARY KEY (createdAt, ${tableName === storage.TABLE_EVALS ? "run_id" : "id"})
252
- ORDER BY (createdAt, ${tableName === storage.TABLE_EVALS ? "run_id" : "id"})
253
- ${this.ttl?.[tableName]?.row ? `TTL toDateTime(createdAt) + INTERVAL ${this.ttl[tableName].row.interval} ${this.ttl[tableName].row.unit}` : ""}
254
- SETTINGS index_granularity = 8192
255
- `;
256
- await this.db.query({
257
- query: sql,
169
+ const countResult = await this.client.query({
170
+ query: `SELECT COUNT(*) as count FROM ${storage.TABLE_EVALS} ${whereClause}`,
171
+ query_params: {
172
+ ...agentName ? { var_agent_name: agentName } : {},
173
+ ...fromDate ? { var_from_date: fromDate.toISOString() } : {},
174
+ ...toDate ? { var_to_date: toDate.toISOString() } : {}
175
+ },
176
+ clickhouse_settings: {
177
+ date_time_input_format: "best_effort",
178
+ date_time_output_format: "iso",
179
+ use_client_time_zone: 1,
180
+ output_format_json_quote_64bit_integers: 0
181
+ }
182
+ });
183
+ const countData = await countResult.json();
184
+ const total = Number(countData.data?.[0]?.count ?? 0);
185
+ const currentOffset = page * perPage;
186
+ const hasMore = currentOffset + perPage < total;
187
+ if (total === 0) {
188
+ return {
189
+ evals: [],
190
+ total: 0,
191
+ page,
192
+ perPage,
193
+ hasMore: false
194
+ };
195
+ }
196
+ const dataResult = await this.client.query({
197
+ query: `SELECT *, toDateTime64(createdAt, 3) as createdAt FROM ${storage.TABLE_EVALS} ${whereClause} ORDER BY created_at DESC LIMIT {var_limit:UInt32} OFFSET {var_offset:UInt32}`,
198
+ query_params: {
199
+ ...agentName ? { var_agent_name: agentName } : {},
200
+ ...fromDate ? { var_from_date: fromDate.toISOString() } : {},
201
+ ...toDate ? { var_to_date: toDate.toISOString() } : {},
202
+ var_limit: perPage || 100,
203
+ var_offset: currentOffset || 0
204
+ },
258
205
  clickhouse_settings: {
259
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
260
206
  date_time_input_format: "best_effort",
261
207
  date_time_output_format: "iso",
262
208
  use_client_time_zone: 1,
263
209
  output_format_json_quote_64bit_integers: 0
264
210
  }
265
211
  });
266
- } catch (error) {
267
- console.error(`Error creating table ${tableName}:`, error);
268
- throw error;
212
+ const rows = await dataResult.json();
213
+ return {
214
+ evals: rows.data.map((row) => this.transformEvalRow(row)),
215
+ total,
216
+ page,
217
+ perPage,
218
+ hasMore
219
+ };
220
+ } catch (error$1) {
221
+ if (error$1?.message?.includes("no such table") || error$1?.message?.includes("does not exist")) {
222
+ return {
223
+ evals: [],
224
+ total: 0,
225
+ page,
226
+ perPage,
227
+ hasMore: false
228
+ };
229
+ }
230
+ throw new error.MastraError(
231
+ {
232
+ id: "CLICKHOUSE_STORAGE_GET_EVALS_FAILED",
233
+ domain: error.ErrorDomain.STORAGE,
234
+ category: error.ErrorCategory.THIRD_PARTY,
235
+ details: { agentName: agentName ?? "all", type: type ?? "all" }
236
+ },
237
+ error$1
238
+ );
269
239
  }
270
240
  }
271
- getSqlType(type) {
272
- switch (type) {
273
- case "text":
274
- return "String";
275
- case "timestamp":
276
- return "DateTime64(3)";
277
- case "integer":
278
- case "bigint":
279
- return "Int64";
280
- case "jsonb":
281
- return "String";
282
- default:
283
- return super.getSqlType(type);
284
- }
241
+ };
242
+ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
243
+ client;
244
+ operations;
245
+ constructor({ client, operations }) {
246
+ super();
247
+ this.client = client;
248
+ this.operations = operations;
285
249
  }
286
- /**
287
- * Alters table schema to add columns if they don't exist
288
- * @param tableName Name of the table
289
- * @param schema Schema of the table
290
- * @param ifNotExists Array of column names to add if they don't exist
291
- */
292
- async alterTable({
293
- tableName,
294
- schema,
295
- ifNotExists
250
+ async getMessages({
251
+ threadId,
252
+ resourceId,
253
+ selectBy,
254
+ format
296
255
  }) {
297
256
  try {
298
- const describeSql = `DESCRIBE TABLE ${tableName}`;
299
- const result = await this.db.query({
300
- query: describeSql
257
+ const messages = [];
258
+ const limit = storage.resolveMessageLimit({ last: selectBy?.last, defaultLimit: 40 });
259
+ const include = selectBy?.include || [];
260
+ if (include.length) {
261
+ const unionQueries = [];
262
+ const params = [];
263
+ let paramIdx = 1;
264
+ for (const inc of include) {
265
+ const { id, withPreviousMessages = 0, withNextMessages = 0 } = inc;
266
+ const searchId = inc.threadId || threadId;
267
+ unionQueries.push(`
268
+ SELECT * FROM (
269
+ WITH numbered_messages AS (
270
+ SELECT
271
+ id, content, role, type, "createdAt", thread_id, "resourceId",
272
+ ROW_NUMBER() OVER (ORDER BY "createdAt" ASC) as row_num
273
+ FROM "${storage.TABLE_MESSAGES}"
274
+ WHERE thread_id = {var_thread_id_${paramIdx}:String}
275
+ ),
276
+ target_positions AS (
277
+ SELECT row_num as target_pos
278
+ FROM numbered_messages
279
+ WHERE id = {var_include_id_${paramIdx}:String}
280
+ )
281
+ SELECT DISTINCT m.id, m.content, m.role, m.type, m."createdAt", m.thread_id AS "threadId"
282
+ FROM numbered_messages m
283
+ CROSS JOIN target_positions t
284
+ WHERE m.row_num BETWEEN (t.target_pos - {var_withPreviousMessages_${paramIdx}:Int64}) AND (t.target_pos + {var_withNextMessages_${paramIdx}:Int64})
285
+ ) AS query_${paramIdx}
286
+ `);
287
+ params.push(
288
+ { [`var_thread_id_${paramIdx}`]: searchId },
289
+ { [`var_include_id_${paramIdx}`]: id },
290
+ { [`var_withPreviousMessages_${paramIdx}`]: withPreviousMessages },
291
+ { [`var_withNextMessages_${paramIdx}`]: withNextMessages }
292
+ );
293
+ paramIdx++;
294
+ }
295
+ const finalQuery = unionQueries.join(" UNION ALL ") + ' ORDER BY "createdAt" DESC';
296
+ const mergedParams = params.reduce((acc, paramObj) => ({ ...acc, ...paramObj }), {});
297
+ const includeResult = await this.client.query({
298
+ query: finalQuery,
299
+ query_params: mergedParams,
300
+ clickhouse_settings: {
301
+ date_time_input_format: "best_effort",
302
+ date_time_output_format: "iso",
303
+ use_client_time_zone: 1,
304
+ output_format_json_quote_64bit_integers: 0
305
+ }
306
+ });
307
+ const rows2 = await includeResult.json();
308
+ const includedMessages = transformRows(rows2.data);
309
+ const seen = /* @__PURE__ */ new Set();
310
+ const dedupedMessages = includedMessages.filter((message) => {
311
+ if (seen.has(message.id)) return false;
312
+ seen.add(message.id);
313
+ return true;
314
+ });
315
+ messages.push(...dedupedMessages);
316
+ }
317
+ const result = await this.client.query({
318
+ query: `
319
+ SELECT
320
+ id,
321
+ content,
322
+ role,
323
+ type,
324
+ toDateTime64(createdAt, 3) as createdAt,
325
+ thread_id AS "threadId"
326
+ FROM "${storage.TABLE_MESSAGES}"
327
+ WHERE thread_id = {threadId:String}
328
+ AND id NOT IN ({exclude:Array(String)})
329
+ ORDER BY "createdAt" DESC
330
+ LIMIT {limit:Int64}
331
+ `,
332
+ query_params: {
333
+ threadId,
334
+ exclude: messages.map((m) => m.id),
335
+ limit
336
+ },
337
+ clickhouse_settings: {
338
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
339
+ date_time_input_format: "best_effort",
340
+ date_time_output_format: "iso",
341
+ use_client_time_zone: 1,
342
+ output_format_json_quote_64bit_integers: 0
343
+ }
301
344
  });
302
345
  const rows = await result.json();
303
- const existingColumnNames = new Set(rows.data.map((row) => row.name.toLowerCase()));
304
- for (const columnName of ifNotExists) {
305
- if (!existingColumnNames.has(columnName.toLowerCase()) && schema[columnName]) {
306
- const columnDef = schema[columnName];
307
- let sqlType = this.getSqlType(columnDef.type);
308
- if (columnDef.nullable !== false) {
309
- sqlType = `Nullable(${sqlType})`;
346
+ messages.push(...transformRows(rows.data));
347
+ messages.sort((a, b) => new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime());
348
+ messages.forEach((message) => {
349
+ if (typeof message.content === "string") {
350
+ try {
351
+ message.content = JSON.parse(message.content);
352
+ } catch {
310
353
  }
311
- const defaultValue = columnDef.nullable === false ? this.getDefaultValue(columnDef.type) : "";
312
- const alterSql = `ALTER TABLE ${tableName} ADD COLUMN IF NOT EXISTS "${columnName}" ${sqlType} ${defaultValue}`.trim();
313
- await this.db.query({
314
- query: alterSql
315
- });
316
- this.logger?.debug?.(`Added column ${columnName} to table ${tableName}`);
317
354
  }
318
- }
319
- } catch (error) {
320
- this.logger?.error?.(
321
- `Error altering table ${tableName}: ${error instanceof Error ? error.message : String(error)}`
355
+ });
356
+ const list = new agent.MessageList({ threadId, resourceId }).add(messages, "memory");
357
+ if (format === `v2`) return list.get.all.v2();
358
+ return list.get.all.v1();
359
+ } catch (error$1) {
360
+ throw new error.MastraError(
361
+ {
362
+ id: "CLICKHOUSE_STORAGE_GET_MESSAGES_FAILED",
363
+ domain: error.ErrorDomain.STORAGE,
364
+ category: error.ErrorCategory.THIRD_PARTY,
365
+ details: { threadId, resourceId: resourceId ?? "" }
366
+ },
367
+ error$1
322
368
  );
323
- throw new Error(`Failed to alter table ${tableName}: ${error}`);
324
369
  }
325
370
  }
326
- async clearTable({ tableName }) {
371
+ async saveMessages(args) {
372
+ const { messages, format = "v1" } = args;
373
+ if (messages.length === 0) return messages;
374
+ for (const message of messages) {
375
+ const resourceId = message.resourceId;
376
+ if (!resourceId) {
377
+ throw new Error("Resource ID is required");
378
+ }
379
+ if (!message.threadId) {
380
+ throw new Error("Thread ID is required");
381
+ }
382
+ const thread = await this.getThreadById({ threadId: message.threadId });
383
+ if (!thread) {
384
+ throw new Error(`Thread ${message.threadId} not found`);
385
+ }
386
+ }
387
+ const threadIdSet = /* @__PURE__ */ new Map();
388
+ await Promise.all(
389
+ messages.map(async (m) => {
390
+ const resourceId = m.resourceId;
391
+ if (!resourceId) {
392
+ throw new Error("Resource ID is required");
393
+ }
394
+ if (!m.threadId) {
395
+ throw new Error("Thread ID is required");
396
+ }
397
+ const thread = await this.getThreadById({ threadId: m.threadId });
398
+ if (!thread) {
399
+ throw new Error(`Thread ${m.threadId} not found`);
400
+ }
401
+ threadIdSet.set(m.threadId, thread);
402
+ })
403
+ );
327
404
  try {
328
- await this.db.query({
329
- query: `TRUNCATE TABLE ${tableName}`,
405
+ const existingResult = await this.client.query({
406
+ query: `SELECT id, thread_id FROM ${storage.TABLE_MESSAGES} WHERE id IN ({ids:Array(String)})`,
407
+ query_params: {
408
+ ids: messages.map((m) => m.id)
409
+ },
330
410
  clickhouse_settings: {
331
411
  // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
332
412
  date_time_input_format: "best_effort",
333
413
  date_time_output_format: "iso",
334
414
  use_client_time_zone: 1,
335
415
  output_format_json_quote_64bit_integers: 0
336
- }
416
+ },
417
+ format: "JSONEachRow"
418
+ });
419
+ const existingRows = await existingResult.json();
420
+ const existingSet = new Set(existingRows.map((row) => `${row.id}::${row.thread_id}`));
421
+ const toInsert = messages.filter((m) => !existingSet.has(`${m.id}::${m.threadId}`));
422
+ const toUpdate = messages.filter((m) => existingSet.has(`${m.id}::${m.threadId}`));
423
+ const toMove = messages.filter((m) => {
424
+ const existingRow = existingRows.find((row) => row.id === m.id);
425
+ return existingRow && existingRow.thread_id !== m.threadId;
337
426
  });
338
- } catch (error) {
339
- console.error(`Error clearing table ${tableName}:`, error);
340
- throw error;
427
+ const deletePromises = toMove.map((message) => {
428
+ const existingRow = existingRows.find((row) => row.id === message.id);
429
+ if (!existingRow) return Promise.resolve();
430
+ return this.client.command({
431
+ query: `DELETE FROM ${storage.TABLE_MESSAGES} WHERE id = {var_id:String} AND thread_id = {var_old_thread_id:String}`,
432
+ query_params: {
433
+ var_id: message.id,
434
+ var_old_thread_id: existingRow.thread_id
435
+ },
436
+ clickhouse_settings: {
437
+ date_time_input_format: "best_effort",
438
+ use_client_time_zone: 1,
439
+ output_format_json_quote_64bit_integers: 0
440
+ }
441
+ });
442
+ });
443
+ const updatePromises = toUpdate.map(
444
+ (message) => this.client.command({
445
+ query: `
446
+ ALTER TABLE ${storage.TABLE_MESSAGES}
447
+ UPDATE content = {var_content:String}, role = {var_role:String}, type = {var_type:String}, resourceId = {var_resourceId:String}
448
+ WHERE id = {var_id:String} AND thread_id = {var_thread_id:String}
449
+ `,
450
+ query_params: {
451
+ var_content: typeof message.content === "string" ? message.content : JSON.stringify(message.content),
452
+ var_role: message.role,
453
+ var_type: message.type || "v2",
454
+ var_resourceId: message.resourceId,
455
+ var_id: message.id,
456
+ var_thread_id: message.threadId
457
+ },
458
+ clickhouse_settings: {
459
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
460
+ date_time_input_format: "best_effort",
461
+ use_client_time_zone: 1,
462
+ output_format_json_quote_64bit_integers: 0
463
+ }
464
+ })
465
+ );
466
+ await Promise.all([
467
+ // Insert new messages (including moved messages)
468
+ this.client.insert({
469
+ table: storage.TABLE_MESSAGES,
470
+ format: "JSONEachRow",
471
+ values: toInsert.map((message) => ({
472
+ id: message.id,
473
+ thread_id: message.threadId,
474
+ resourceId: message.resourceId,
475
+ content: typeof message.content === "string" ? message.content : JSON.stringify(message.content),
476
+ createdAt: message.createdAt.toISOString(),
477
+ role: message.role,
478
+ type: message.type || "v2"
479
+ })),
480
+ clickhouse_settings: {
481
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
482
+ date_time_input_format: "best_effort",
483
+ use_client_time_zone: 1,
484
+ output_format_json_quote_64bit_integers: 0
485
+ }
486
+ }),
487
+ ...updatePromises,
488
+ ...deletePromises,
489
+ // Update thread's updatedAt timestamp
490
+ this.client.insert({
491
+ table: storage.TABLE_THREADS,
492
+ format: "JSONEachRow",
493
+ values: Array.from(threadIdSet.values()).map((thread) => ({
494
+ id: thread.id,
495
+ resourceId: thread.resourceId,
496
+ title: thread.title,
497
+ metadata: thread.metadata,
498
+ createdAt: thread.createdAt,
499
+ updatedAt: (/* @__PURE__ */ new Date()).toISOString()
500
+ })),
501
+ clickhouse_settings: {
502
+ date_time_input_format: "best_effort",
503
+ use_client_time_zone: 1,
504
+ output_format_json_quote_64bit_integers: 0
505
+ }
506
+ })
507
+ ]);
508
+ const list = new agent.MessageList().add(messages, "memory");
509
+ if (format === `v2`) return list.get.all.v2();
510
+ return list.get.all.v1();
511
+ } catch (error$1) {
512
+ throw new error.MastraError(
513
+ {
514
+ id: "CLICKHOUSE_STORAGE_SAVE_MESSAGES_FAILED",
515
+ domain: error.ErrorDomain.STORAGE,
516
+ category: error.ErrorCategory.THIRD_PARTY
517
+ },
518
+ error$1
519
+ );
520
+ }
521
+ }
522
+ async getThreadById({ threadId }) {
523
+ try {
524
+ const result = await this.client.query({
525
+ query: `SELECT
526
+ id,
527
+ "resourceId",
528
+ title,
529
+ metadata,
530
+ toDateTime64(createdAt, 3) as createdAt,
531
+ toDateTime64(updatedAt, 3) as updatedAt
532
+ FROM "${storage.TABLE_THREADS}"
533
+ FINAL
534
+ WHERE id = {var_id:String}`,
535
+ query_params: { var_id: threadId },
536
+ clickhouse_settings: {
537
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
538
+ date_time_input_format: "best_effort",
539
+ date_time_output_format: "iso",
540
+ use_client_time_zone: 1,
541
+ output_format_json_quote_64bit_integers: 0
542
+ }
543
+ });
544
+ const rows = await result.json();
545
+ const thread = transformRow(rows.data[0]);
546
+ if (!thread) {
547
+ return null;
548
+ }
549
+ return {
550
+ ...thread,
551
+ metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata,
552
+ createdAt: thread.createdAt,
553
+ updatedAt: thread.updatedAt
554
+ };
555
+ } catch (error$1) {
556
+ throw new error.MastraError(
557
+ {
558
+ id: "CLICKHOUSE_STORAGE_GET_THREAD_BY_ID_FAILED",
559
+ domain: error.ErrorDomain.STORAGE,
560
+ category: error.ErrorCategory.THIRD_PARTY,
561
+ details: { threadId }
562
+ },
563
+ error$1
564
+ );
565
+ }
566
+ }
567
+ async getThreadsByResourceId({ resourceId }) {
568
+ try {
569
+ const result = await this.client.query({
570
+ query: `SELECT
571
+ id,
572
+ "resourceId",
573
+ title,
574
+ metadata,
575
+ toDateTime64(createdAt, 3) as createdAt,
576
+ toDateTime64(updatedAt, 3) as updatedAt
577
+ FROM "${storage.TABLE_THREADS}"
578
+ WHERE "resourceId" = {var_resourceId:String}`,
579
+ query_params: { var_resourceId: resourceId },
580
+ clickhouse_settings: {
581
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
582
+ date_time_input_format: "best_effort",
583
+ date_time_output_format: "iso",
584
+ use_client_time_zone: 1,
585
+ output_format_json_quote_64bit_integers: 0
586
+ }
587
+ });
588
+ const rows = await result.json();
589
+ const threads = transformRows(rows.data);
590
+ return threads.map((thread) => ({
591
+ ...thread,
592
+ metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata,
593
+ createdAt: thread.createdAt,
594
+ updatedAt: thread.updatedAt
595
+ }));
596
+ } catch (error$1) {
597
+ throw new error.MastraError(
598
+ {
599
+ id: "CLICKHOUSE_STORAGE_GET_THREADS_BY_RESOURCE_ID_FAILED",
600
+ domain: error.ErrorDomain.STORAGE,
601
+ category: error.ErrorCategory.THIRD_PARTY,
602
+ details: { resourceId }
603
+ },
604
+ error$1
605
+ );
606
+ }
607
+ }
608
+ async saveThread({ thread }) {
609
+ try {
610
+ await this.client.insert({
611
+ table: storage.TABLE_THREADS,
612
+ values: [
613
+ {
614
+ ...thread,
615
+ createdAt: thread.createdAt.toISOString(),
616
+ updatedAt: thread.updatedAt.toISOString()
617
+ }
618
+ ],
619
+ format: "JSONEachRow",
620
+ clickhouse_settings: {
621
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
622
+ date_time_input_format: "best_effort",
623
+ use_client_time_zone: 1,
624
+ output_format_json_quote_64bit_integers: 0
625
+ }
626
+ });
627
+ return thread;
628
+ } catch (error$1) {
629
+ throw new error.MastraError(
630
+ {
631
+ id: "CLICKHOUSE_STORAGE_SAVE_THREAD_FAILED",
632
+ domain: error.ErrorDomain.STORAGE,
633
+ category: error.ErrorCategory.THIRD_PARTY,
634
+ details: { threadId: thread.id }
635
+ },
636
+ error$1
637
+ );
638
+ }
639
+ }
640
+ async updateThread({
641
+ id,
642
+ title,
643
+ metadata
644
+ }) {
645
+ try {
646
+ const existingThread = await this.getThreadById({ threadId: id });
647
+ if (!existingThread) {
648
+ throw new Error(`Thread ${id} not found`);
649
+ }
650
+ const mergedMetadata = {
651
+ ...existingThread.metadata,
652
+ ...metadata
653
+ };
654
+ const updatedThread = {
655
+ ...existingThread,
656
+ title,
657
+ metadata: mergedMetadata,
658
+ updatedAt: /* @__PURE__ */ new Date()
659
+ };
660
+ await this.client.insert({
661
+ table: storage.TABLE_THREADS,
662
+ format: "JSONEachRow",
663
+ values: [
664
+ {
665
+ id: updatedThread.id,
666
+ resourceId: updatedThread.resourceId,
667
+ title: updatedThread.title,
668
+ metadata: updatedThread.metadata,
669
+ createdAt: updatedThread.createdAt,
670
+ updatedAt: updatedThread.updatedAt.toISOString()
671
+ }
672
+ ],
673
+ clickhouse_settings: {
674
+ date_time_input_format: "best_effort",
675
+ use_client_time_zone: 1,
676
+ output_format_json_quote_64bit_integers: 0
677
+ }
678
+ });
679
+ return updatedThread;
680
+ } catch (error$1) {
681
+ throw new error.MastraError(
682
+ {
683
+ id: "CLICKHOUSE_STORAGE_UPDATE_THREAD_FAILED",
684
+ domain: error.ErrorDomain.STORAGE,
685
+ category: error.ErrorCategory.THIRD_PARTY,
686
+ details: { threadId: id, title }
687
+ },
688
+ error$1
689
+ );
690
+ }
691
+ }
692
+ async deleteThread({ threadId }) {
693
+ try {
694
+ await this.client.command({
695
+ query: `DELETE FROM "${storage.TABLE_MESSAGES}" WHERE thread_id = {var_thread_id:String};`,
696
+ query_params: { var_thread_id: threadId },
697
+ clickhouse_settings: {
698
+ output_format_json_quote_64bit_integers: 0
699
+ }
700
+ });
701
+ await this.client.command({
702
+ query: `DELETE FROM "${storage.TABLE_THREADS}" WHERE id = {var_id:String};`,
703
+ query_params: { var_id: threadId },
704
+ clickhouse_settings: {
705
+ output_format_json_quote_64bit_integers: 0
706
+ }
707
+ });
708
+ } catch (error$1) {
709
+ throw new error.MastraError(
710
+ {
711
+ id: "CLICKHOUSE_STORAGE_DELETE_THREAD_FAILED",
712
+ domain: error.ErrorDomain.STORAGE,
713
+ category: error.ErrorCategory.THIRD_PARTY,
714
+ details: { threadId }
715
+ },
716
+ error$1
717
+ );
718
+ }
719
+ }
720
+ async getThreadsByResourceIdPaginated(args) {
721
+ const { resourceId, page = 0, perPage = 100 } = args;
722
+ try {
723
+ const currentOffset = page * perPage;
724
+ const countResult = await this.client.query({
725
+ query: `SELECT count() as total FROM ${storage.TABLE_THREADS} WHERE resourceId = {resourceId:String}`,
726
+ query_params: { resourceId },
727
+ clickhouse_settings: {
728
+ date_time_input_format: "best_effort",
729
+ date_time_output_format: "iso",
730
+ use_client_time_zone: 1,
731
+ output_format_json_quote_64bit_integers: 0
732
+ }
733
+ });
734
+ const countData = await countResult.json();
735
+ const total = countData.data[0].total;
736
+ if (total === 0) {
737
+ return {
738
+ threads: [],
739
+ total: 0,
740
+ page,
741
+ perPage,
742
+ hasMore: false
743
+ };
744
+ }
745
+ const dataResult = await this.client.query({
746
+ query: `
747
+ SELECT
748
+ id,
749
+ resourceId,
750
+ title,
751
+ metadata,
752
+ toDateTime64(createdAt, 3) as createdAt,
753
+ toDateTime64(updatedAt, 3) as updatedAt
754
+ FROM ${storage.TABLE_THREADS}
755
+ WHERE resourceId = {resourceId:String}
756
+ ORDER BY createdAt DESC
757
+ LIMIT {limit:Int64} OFFSET {offset:Int64}
758
+ `,
759
+ query_params: {
760
+ resourceId,
761
+ limit: perPage,
762
+ offset: currentOffset
763
+ },
764
+ clickhouse_settings: {
765
+ date_time_input_format: "best_effort",
766
+ date_time_output_format: "iso",
767
+ use_client_time_zone: 1,
768
+ output_format_json_quote_64bit_integers: 0
769
+ }
770
+ });
771
+ const rows = await dataResult.json();
772
+ const threads = transformRows(rows.data);
773
+ return {
774
+ threads,
775
+ total,
776
+ page,
777
+ perPage,
778
+ hasMore: currentOffset + threads.length < total
779
+ };
780
+ } catch (error$1) {
781
+ throw new error.MastraError(
782
+ {
783
+ id: "CLICKHOUSE_STORAGE_GET_THREADS_BY_RESOURCE_ID_PAGINATED_FAILED",
784
+ domain: error.ErrorDomain.STORAGE,
785
+ category: error.ErrorCategory.THIRD_PARTY,
786
+ details: { resourceId, page }
787
+ },
788
+ error$1
789
+ );
790
+ }
791
+ }
792
+ async getMessagesPaginated(args) {
793
+ try {
794
+ const { threadId, selectBy, format = "v1" } = args;
795
+ const page = selectBy?.pagination?.page || 0;
796
+ const perPageInput = selectBy?.pagination?.perPage;
797
+ const perPage = perPageInput !== void 0 ? perPageInput : storage.resolveMessageLimit({ last: selectBy?.last, defaultLimit: 20 });
798
+ const offset = page * perPage;
799
+ const dateRange = selectBy?.pagination?.dateRange;
800
+ const fromDate = dateRange?.start;
801
+ const toDate = dateRange?.end;
802
+ const messages = [];
803
+ if (selectBy?.include?.length) {
804
+ const include = selectBy.include;
805
+ const unionQueries = [];
806
+ const params = [];
807
+ let paramIdx = 1;
808
+ for (const inc of include) {
809
+ const { id, withPreviousMessages = 0, withNextMessages = 0 } = inc;
810
+ const searchId = inc.threadId || threadId;
811
+ unionQueries.push(`
812
+ SELECT * FROM (
813
+ WITH numbered_messages AS (
814
+ SELECT
815
+ id, content, role, type, "createdAt", thread_id, "resourceId",
816
+ ROW_NUMBER() OVER (ORDER BY "createdAt" ASC) as row_num
817
+ FROM "${storage.TABLE_MESSAGES}"
818
+ WHERE thread_id = {var_thread_id_${paramIdx}:String}
819
+ ),
820
+ target_positions AS (
821
+ SELECT row_num as target_pos
822
+ FROM numbered_messages
823
+ WHERE id = {var_include_id_${paramIdx}:String}
824
+ )
825
+ SELECT DISTINCT m.id, m.content, m.role, m.type, m."createdAt", m.thread_id AS "threadId"
826
+ FROM numbered_messages m
827
+ CROSS JOIN target_positions t
828
+ WHERE m.row_num BETWEEN (t.target_pos - {var_withPreviousMessages_${paramIdx}:Int64}) AND (t.target_pos + {var_withNextMessages_${paramIdx}:Int64})
829
+ ) AS query_${paramIdx}
830
+ `);
831
+ params.push(
832
+ { [`var_thread_id_${paramIdx}`]: searchId },
833
+ { [`var_include_id_${paramIdx}`]: id },
834
+ { [`var_withPreviousMessages_${paramIdx}`]: withPreviousMessages },
835
+ { [`var_withNextMessages_${paramIdx}`]: withNextMessages }
836
+ );
837
+ paramIdx++;
838
+ }
839
+ const finalQuery = unionQueries.join(" UNION ALL ") + ' ORDER BY "createdAt" DESC';
840
+ const mergedParams = params.reduce((acc, paramObj) => ({ ...acc, ...paramObj }), {});
841
+ const includeResult = await this.client.query({
842
+ query: finalQuery,
843
+ query_params: mergedParams,
844
+ clickhouse_settings: {
845
+ date_time_input_format: "best_effort",
846
+ date_time_output_format: "iso",
847
+ use_client_time_zone: 1,
848
+ output_format_json_quote_64bit_integers: 0
849
+ }
850
+ });
851
+ const rows2 = await includeResult.json();
852
+ const includedMessages = transformRows(rows2.data);
853
+ const seen = /* @__PURE__ */ new Set();
854
+ const dedupedMessages = includedMessages.filter((message) => {
855
+ if (seen.has(message.id)) return false;
856
+ seen.add(message.id);
857
+ return true;
858
+ });
859
+ messages.push(...dedupedMessages);
860
+ }
861
+ let countQuery = `SELECT count() as total FROM ${storage.TABLE_MESSAGES} WHERE thread_id = {threadId:String}`;
862
+ const countParams = { threadId };
863
+ if (fromDate) {
864
+ countQuery += ` AND createdAt >= parseDateTime64BestEffort({fromDate:String}, 3)`;
865
+ countParams.fromDate = fromDate.toISOString();
866
+ }
867
+ if (toDate) {
868
+ countQuery += ` AND createdAt <= parseDateTime64BestEffort({toDate:String}, 3)`;
869
+ countParams.toDate = toDate.toISOString();
870
+ }
871
+ const countResult = await this.client.query({
872
+ query: countQuery,
873
+ query_params: countParams,
874
+ clickhouse_settings: {
875
+ date_time_input_format: "best_effort",
876
+ date_time_output_format: "iso",
877
+ use_client_time_zone: 1,
878
+ output_format_json_quote_64bit_integers: 0
879
+ }
880
+ });
881
+ const countData = await countResult.json();
882
+ const total = countData.data[0].total;
883
+ if (total === 0 && messages.length === 0) {
884
+ return {
885
+ messages: [],
886
+ total: 0,
887
+ page,
888
+ perPage,
889
+ hasMore: false
890
+ };
891
+ }
892
+ const excludeIds = messages.map((m) => m.id);
893
+ let dataQuery = `
894
+ SELECT
895
+ id,
896
+ content,
897
+ role,
898
+ type,
899
+ toDateTime64(createdAt, 3) as createdAt,
900
+ thread_id AS "threadId",
901
+ resourceId
902
+ FROM ${storage.TABLE_MESSAGES}
903
+ WHERE thread_id = {threadId:String}
904
+ `;
905
+ const dataParams = { threadId };
906
+ if (fromDate) {
907
+ dataQuery += ` AND createdAt >= parseDateTime64BestEffort({fromDate:String}, 3)`;
908
+ dataParams.fromDate = fromDate.toISOString();
909
+ }
910
+ if (toDate) {
911
+ dataQuery += ` AND createdAt <= parseDateTime64BestEffort({toDate:String}, 3)`;
912
+ dataParams.toDate = toDate.toISOString();
913
+ }
914
+ if (excludeIds.length > 0) {
915
+ dataQuery += ` AND id NOT IN ({excludeIds:Array(String)})`;
916
+ dataParams.excludeIds = excludeIds;
917
+ }
918
+ if (selectBy?.last) {
919
+ dataQuery += `
920
+ ORDER BY createdAt DESC
921
+ LIMIT {limit:Int64}
922
+ `;
923
+ dataParams.limit = perPage;
924
+ } else {
925
+ dataQuery += `
926
+ ORDER BY createdAt ASC
927
+ LIMIT {limit:Int64} OFFSET {offset:Int64}
928
+ `;
929
+ dataParams.limit = perPage;
930
+ dataParams.offset = offset;
931
+ }
932
+ const result = await this.client.query({
933
+ query: dataQuery,
934
+ query_params: dataParams,
935
+ clickhouse_settings: {
936
+ date_time_input_format: "best_effort",
937
+ date_time_output_format: "iso",
938
+ use_client_time_zone: 1,
939
+ output_format_json_quote_64bit_integers: 0
940
+ }
941
+ });
942
+ const rows = await result.json();
943
+ const paginatedMessages = transformRows(rows.data);
944
+ messages.push(...paginatedMessages);
945
+ if (selectBy?.last) {
946
+ messages.sort((a, b) => new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime());
947
+ }
948
+ return {
949
+ messages: format === "v2" ? messages : messages,
950
+ total,
951
+ page,
952
+ perPage,
953
+ hasMore: offset + perPage < total
954
+ };
955
+ } catch (error$1) {
956
+ throw new error.MastraError(
957
+ {
958
+ id: "CLICKHOUSE_STORAGE_GET_MESSAGES_PAGINATED_FAILED",
959
+ domain: error.ErrorDomain.STORAGE,
960
+ category: error.ErrorCategory.THIRD_PARTY
961
+ },
962
+ error$1
963
+ );
964
+ }
965
+ }
966
+ async updateMessages(args) {
967
+ const { messages } = args;
968
+ if (messages.length === 0) {
969
+ return [];
970
+ }
971
+ try {
972
+ const messageIds = messages.map((m) => m.id);
973
+ const existingResult = await this.client.query({
974
+ query: `SELECT id, content, role, type, "createdAt", thread_id AS "threadId", "resourceId" FROM ${storage.TABLE_MESSAGES} WHERE id IN (${messageIds.map((_, i) => `{id_${i}:String}`).join(",")})`,
975
+ query_params: messageIds.reduce((acc, m, i) => ({ ...acc, [`id_${i}`]: m }), {}),
976
+ clickhouse_settings: {
977
+ date_time_input_format: "best_effort",
978
+ date_time_output_format: "iso",
979
+ use_client_time_zone: 1,
980
+ output_format_json_quote_64bit_integers: 0
981
+ }
982
+ });
983
+ const existingRows = await existingResult.json();
984
+ const existingMessages = transformRows(existingRows.data);
985
+ if (existingMessages.length === 0) {
986
+ return [];
987
+ }
988
+ const parsedExistingMessages = existingMessages.map((msg) => {
989
+ if (typeof msg.content === "string") {
990
+ try {
991
+ msg.content = JSON.parse(msg.content);
992
+ } catch {
993
+ }
994
+ }
995
+ return msg;
996
+ });
997
+ const threadIdsToUpdate = /* @__PURE__ */ new Set();
998
+ const updatePromises = [];
999
+ for (const existingMessage of parsedExistingMessages) {
1000
+ const updatePayload = messages.find((m) => m.id === existingMessage.id);
1001
+ if (!updatePayload) continue;
1002
+ const { id, ...fieldsToUpdate } = updatePayload;
1003
+ if (Object.keys(fieldsToUpdate).length === 0) continue;
1004
+ threadIdsToUpdate.add(existingMessage.threadId);
1005
+ if (updatePayload.threadId && updatePayload.threadId !== existingMessage.threadId) {
1006
+ threadIdsToUpdate.add(updatePayload.threadId);
1007
+ }
1008
+ const setClauses = [];
1009
+ const values = {};
1010
+ let paramIdx = 1;
1011
+ let newContent = null;
1012
+ const updatableFields = { ...fieldsToUpdate };
1013
+ if (updatableFields.content) {
1014
+ const existingContent = existingMessage.content || {};
1015
+ const existingMetadata = existingContent.metadata || {};
1016
+ const updateMetadata = updatableFields.content.metadata || {};
1017
+ newContent = {
1018
+ ...existingContent,
1019
+ ...updatableFields.content,
1020
+ // Deep merge metadata
1021
+ metadata: {
1022
+ ...existingMetadata,
1023
+ ...updateMetadata
1024
+ }
1025
+ };
1026
+ setClauses.push(`content = {var_content_${paramIdx}:String}`);
1027
+ values[`var_content_${paramIdx}`] = JSON.stringify(newContent);
1028
+ paramIdx++;
1029
+ delete updatableFields.content;
1030
+ }
1031
+ for (const key in updatableFields) {
1032
+ if (Object.prototype.hasOwnProperty.call(updatableFields, key)) {
1033
+ const dbColumn = key === "threadId" ? "thread_id" : key;
1034
+ setClauses.push(`"${dbColumn}" = {var_${key}_${paramIdx}:String}`);
1035
+ values[`var_${key}_${paramIdx}`] = updatableFields[key];
1036
+ paramIdx++;
1037
+ }
1038
+ }
1039
+ if (setClauses.length > 0) {
1040
+ values[`var_id_${paramIdx}`] = id;
1041
+ const updateQuery = `
1042
+ ALTER TABLE ${storage.TABLE_MESSAGES}
1043
+ UPDATE ${setClauses.join(", ")}
1044
+ WHERE id = {var_id_${paramIdx}:String}
1045
+ `;
1046
+ console.log("Updating message:", id, "with query:", updateQuery, "values:", values);
1047
+ updatePromises.push(
1048
+ this.client.command({
1049
+ query: updateQuery,
1050
+ query_params: values,
1051
+ clickhouse_settings: {
1052
+ date_time_input_format: "best_effort",
1053
+ use_client_time_zone: 1,
1054
+ output_format_json_quote_64bit_integers: 0
1055
+ }
1056
+ })
1057
+ );
1058
+ }
1059
+ }
1060
+ if (updatePromises.length > 0) {
1061
+ await Promise.all(updatePromises);
1062
+ }
1063
+ await this.client.command({
1064
+ query: `OPTIMIZE TABLE ${storage.TABLE_MESSAGES} FINAL`,
1065
+ clickhouse_settings: {
1066
+ date_time_input_format: "best_effort",
1067
+ use_client_time_zone: 1,
1068
+ output_format_json_quote_64bit_integers: 0
1069
+ }
1070
+ });
1071
+ for (const existingMessage of parsedExistingMessages) {
1072
+ const updatePayload = messages.find((m) => m.id === existingMessage.id);
1073
+ if (!updatePayload) continue;
1074
+ const { id, ...fieldsToUpdate } = updatePayload;
1075
+ if (Object.keys(fieldsToUpdate).length === 0) continue;
1076
+ const verifyResult = await this.client.query({
1077
+ query: `SELECT id, content, role, type, "createdAt", thread_id AS "threadId", "resourceId" FROM ${storage.TABLE_MESSAGES} WHERE id = {messageId:String}`,
1078
+ query_params: { messageId: id },
1079
+ clickhouse_settings: {
1080
+ date_time_input_format: "best_effort",
1081
+ date_time_output_format: "iso",
1082
+ use_client_time_zone: 1,
1083
+ output_format_json_quote_64bit_integers: 0
1084
+ }
1085
+ });
1086
+ const verifyRows = await verifyResult.json();
1087
+ if (verifyRows.data.length > 0) {
1088
+ const updatedMessage = transformRows(verifyRows.data)[0];
1089
+ if (updatedMessage) {
1090
+ let needsRetry = false;
1091
+ for (const [key, value] of Object.entries(fieldsToUpdate)) {
1092
+ if (key === "content") {
1093
+ const expectedContent = typeof value === "string" ? value : JSON.stringify(value);
1094
+ const actualContent = typeof updatedMessage.content === "string" ? updatedMessage.content : JSON.stringify(updatedMessage.content);
1095
+ if (actualContent !== expectedContent) {
1096
+ needsRetry = true;
1097
+ break;
1098
+ }
1099
+ } else if (updatedMessage[key] !== value) {
1100
+ needsRetry = true;
1101
+ break;
1102
+ }
1103
+ }
1104
+ if (needsRetry) {
1105
+ console.log("Update not applied correctly, retrying with DELETE + INSERT for message:", id);
1106
+ await this.client.command({
1107
+ query: `DELETE FROM ${storage.TABLE_MESSAGES} WHERE id = {messageId:String}`,
1108
+ query_params: { messageId: id },
1109
+ clickhouse_settings: {
1110
+ date_time_input_format: "best_effort",
1111
+ use_client_time_zone: 1,
1112
+ output_format_json_quote_64bit_integers: 0
1113
+ }
1114
+ });
1115
+ let updatedContent = existingMessage.content || {};
1116
+ if (fieldsToUpdate.content) {
1117
+ const existingContent = existingMessage.content || {};
1118
+ const existingMetadata = existingContent.metadata || {};
1119
+ const updateMetadata = fieldsToUpdate.content.metadata || {};
1120
+ updatedContent = {
1121
+ ...existingContent,
1122
+ ...fieldsToUpdate.content,
1123
+ metadata: {
1124
+ ...existingMetadata,
1125
+ ...updateMetadata
1126
+ }
1127
+ };
1128
+ }
1129
+ const updatedMessageData = {
1130
+ ...existingMessage,
1131
+ ...fieldsToUpdate,
1132
+ content: updatedContent
1133
+ };
1134
+ await this.client.insert({
1135
+ table: storage.TABLE_MESSAGES,
1136
+ format: "JSONEachRow",
1137
+ values: [
1138
+ {
1139
+ id: updatedMessageData.id,
1140
+ thread_id: updatedMessageData.threadId,
1141
+ resourceId: updatedMessageData.resourceId,
1142
+ content: typeof updatedMessageData.content === "string" ? updatedMessageData.content : JSON.stringify(updatedMessageData.content),
1143
+ createdAt: updatedMessageData.createdAt.toISOString(),
1144
+ role: updatedMessageData.role,
1145
+ type: updatedMessageData.type || "v2"
1146
+ }
1147
+ ],
1148
+ clickhouse_settings: {
1149
+ date_time_input_format: "best_effort",
1150
+ use_client_time_zone: 1,
1151
+ output_format_json_quote_64bit_integers: 0
1152
+ }
1153
+ });
1154
+ }
1155
+ }
1156
+ }
1157
+ }
1158
+ if (threadIdsToUpdate.size > 0) {
1159
+ await new Promise((resolve) => setTimeout(resolve, 10));
1160
+ const now = (/* @__PURE__ */ new Date()).toISOString().replace("Z", "");
1161
+ const threadUpdatePromises = Array.from(threadIdsToUpdate).map(async (threadId) => {
1162
+ const threadResult = await this.client.query({
1163
+ query: `SELECT id, resourceId, title, metadata, createdAt FROM ${storage.TABLE_THREADS} WHERE id = {threadId:String}`,
1164
+ query_params: { threadId },
1165
+ clickhouse_settings: {
1166
+ date_time_input_format: "best_effort",
1167
+ date_time_output_format: "iso",
1168
+ use_client_time_zone: 1,
1169
+ output_format_json_quote_64bit_integers: 0
1170
+ }
1171
+ });
1172
+ const threadRows = await threadResult.json();
1173
+ if (threadRows.data.length > 0) {
1174
+ const existingThread = threadRows.data[0];
1175
+ await this.client.command({
1176
+ query: `DELETE FROM ${storage.TABLE_THREADS} WHERE id = {threadId:String}`,
1177
+ query_params: { threadId },
1178
+ clickhouse_settings: {
1179
+ date_time_input_format: "best_effort",
1180
+ use_client_time_zone: 1,
1181
+ output_format_json_quote_64bit_integers: 0
1182
+ }
1183
+ });
1184
+ await this.client.insert({
1185
+ table: storage.TABLE_THREADS,
1186
+ format: "JSONEachRow",
1187
+ values: [
1188
+ {
1189
+ id: existingThread.id,
1190
+ resourceId: existingThread.resourceId,
1191
+ title: existingThread.title,
1192
+ metadata: existingThread.metadata,
1193
+ createdAt: existingThread.createdAt,
1194
+ updatedAt: now
1195
+ }
1196
+ ],
1197
+ clickhouse_settings: {
1198
+ date_time_input_format: "best_effort",
1199
+ use_client_time_zone: 1,
1200
+ output_format_json_quote_64bit_integers: 0
1201
+ }
1202
+ });
1203
+ }
1204
+ });
1205
+ await Promise.all(threadUpdatePromises);
1206
+ }
1207
+ const updatedMessages = [];
1208
+ for (const messageId of messageIds) {
1209
+ const updatedResult = await this.client.query({
1210
+ query: `SELECT id, content, role, type, "createdAt", thread_id AS "threadId", "resourceId" FROM ${storage.TABLE_MESSAGES} WHERE id = {messageId:String}`,
1211
+ query_params: { messageId },
1212
+ clickhouse_settings: {
1213
+ date_time_input_format: "best_effort",
1214
+ date_time_output_format: "iso",
1215
+ use_client_time_zone: 1,
1216
+ output_format_json_quote_64bit_integers: 0
1217
+ }
1218
+ });
1219
+ const updatedRows = await updatedResult.json();
1220
+ if (updatedRows.data.length > 0) {
1221
+ const message = transformRows(updatedRows.data)[0];
1222
+ if (message) {
1223
+ updatedMessages.push(message);
1224
+ }
1225
+ }
1226
+ }
1227
+ return updatedMessages.map((message) => {
1228
+ if (typeof message.content === "string") {
1229
+ try {
1230
+ message.content = JSON.parse(message.content);
1231
+ } catch {
1232
+ }
1233
+ }
1234
+ return message;
1235
+ });
1236
+ } catch (error$1) {
1237
+ throw new error.MastraError(
1238
+ {
1239
+ id: "CLICKHOUSE_STORAGE_UPDATE_MESSAGES_FAILED",
1240
+ domain: error.ErrorDomain.STORAGE,
1241
+ category: error.ErrorCategory.THIRD_PARTY,
1242
+ details: { messageIds: messages.map((m) => m.id).join(",") }
1243
+ },
1244
+ error$1
1245
+ );
1246
+ }
1247
+ }
1248
+ async getResourceById({ resourceId }) {
1249
+ try {
1250
+ const result = await this.client.query({
1251
+ query: `SELECT id, workingMemory, metadata, createdAt, updatedAt FROM ${storage.TABLE_RESOURCES} WHERE id = {resourceId:String}`,
1252
+ query_params: { resourceId },
1253
+ clickhouse_settings: {
1254
+ date_time_input_format: "best_effort",
1255
+ date_time_output_format: "iso",
1256
+ use_client_time_zone: 1,
1257
+ output_format_json_quote_64bit_integers: 0
1258
+ }
1259
+ });
1260
+ const rows = await result.json();
1261
+ if (rows.data.length === 0) {
1262
+ return null;
1263
+ }
1264
+ const resource = rows.data[0];
1265
+ return {
1266
+ id: resource.id,
1267
+ workingMemory: resource.workingMemory && typeof resource.workingMemory === "object" ? JSON.stringify(resource.workingMemory) : resource.workingMemory,
1268
+ metadata: resource.metadata && typeof resource.metadata === "string" ? JSON.parse(resource.metadata) : resource.metadata,
1269
+ createdAt: new Date(resource.createdAt),
1270
+ updatedAt: new Date(resource.updatedAt)
1271
+ };
1272
+ } catch (error$1) {
1273
+ throw new error.MastraError(
1274
+ {
1275
+ id: "CLICKHOUSE_STORAGE_GET_RESOURCE_BY_ID_FAILED",
1276
+ domain: error.ErrorDomain.STORAGE,
1277
+ category: error.ErrorCategory.THIRD_PARTY,
1278
+ details: { resourceId }
1279
+ },
1280
+ error$1
1281
+ );
1282
+ }
1283
+ }
1284
+ async saveResource({ resource }) {
1285
+ try {
1286
+ await this.client.insert({
1287
+ table: storage.TABLE_RESOURCES,
1288
+ format: "JSONEachRow",
1289
+ values: [
1290
+ {
1291
+ id: resource.id,
1292
+ workingMemory: resource.workingMemory,
1293
+ metadata: JSON.stringify(resource.metadata),
1294
+ createdAt: resource.createdAt.toISOString(),
1295
+ updatedAt: resource.updatedAt.toISOString()
1296
+ }
1297
+ ],
1298
+ clickhouse_settings: {
1299
+ date_time_input_format: "best_effort",
1300
+ use_client_time_zone: 1,
1301
+ output_format_json_quote_64bit_integers: 0
1302
+ }
1303
+ });
1304
+ return resource;
1305
+ } catch (error$1) {
1306
+ throw new error.MastraError(
1307
+ {
1308
+ id: "CLICKHOUSE_STORAGE_SAVE_RESOURCE_FAILED",
1309
+ domain: error.ErrorDomain.STORAGE,
1310
+ category: error.ErrorCategory.THIRD_PARTY,
1311
+ details: { resourceId: resource.id }
1312
+ },
1313
+ error$1
1314
+ );
1315
+ }
1316
+ }
1317
+ async updateResource({
1318
+ resourceId,
1319
+ workingMemory,
1320
+ metadata
1321
+ }) {
1322
+ try {
1323
+ const existingResource = await this.getResourceById({ resourceId });
1324
+ if (!existingResource) {
1325
+ const newResource = {
1326
+ id: resourceId,
1327
+ workingMemory,
1328
+ metadata: metadata || {},
1329
+ createdAt: /* @__PURE__ */ new Date(),
1330
+ updatedAt: /* @__PURE__ */ new Date()
1331
+ };
1332
+ return this.saveResource({ resource: newResource });
1333
+ }
1334
+ const updatedResource = {
1335
+ ...existingResource,
1336
+ workingMemory: workingMemory !== void 0 ? workingMemory : existingResource.workingMemory,
1337
+ metadata: {
1338
+ ...existingResource.metadata,
1339
+ ...metadata
1340
+ },
1341
+ updatedAt: /* @__PURE__ */ new Date()
1342
+ };
1343
+ const updateQuery = `
1344
+ ALTER TABLE ${storage.TABLE_RESOURCES}
1345
+ UPDATE workingMemory = {workingMemory:String}, metadata = {metadata:String}, updatedAt = {updatedAt:String}
1346
+ WHERE id = {resourceId:String}
1347
+ `;
1348
+ await this.client.command({
1349
+ query: updateQuery,
1350
+ query_params: {
1351
+ workingMemory: updatedResource.workingMemory,
1352
+ metadata: JSON.stringify(updatedResource.metadata),
1353
+ updatedAt: updatedResource.updatedAt.toISOString().replace("Z", ""),
1354
+ resourceId
1355
+ },
1356
+ clickhouse_settings: {
1357
+ date_time_input_format: "best_effort",
1358
+ use_client_time_zone: 1,
1359
+ output_format_json_quote_64bit_integers: 0
1360
+ }
1361
+ });
1362
+ await this.client.command({
1363
+ query: `OPTIMIZE TABLE ${storage.TABLE_RESOURCES} FINAL`,
1364
+ clickhouse_settings: {
1365
+ date_time_input_format: "best_effort",
1366
+ use_client_time_zone: 1,
1367
+ output_format_json_quote_64bit_integers: 0
1368
+ }
1369
+ });
1370
+ return updatedResource;
1371
+ } catch (error$1) {
1372
+ throw new error.MastraError(
1373
+ {
1374
+ id: "CLICKHOUSE_STORAGE_UPDATE_RESOURCE_FAILED",
1375
+ domain: error.ErrorDomain.STORAGE,
1376
+ category: error.ErrorCategory.THIRD_PARTY,
1377
+ details: { resourceId }
1378
+ },
1379
+ error$1
1380
+ );
1381
+ }
1382
+ }
1383
+ };
1384
+ var StoreOperationsClickhouse = class extends storage.StoreOperations {
1385
+ ttl;
1386
+ client;
1387
+ constructor({ client, ttl }) {
1388
+ super();
1389
+ this.ttl = ttl;
1390
+ this.client = client;
1391
+ }
1392
+ async hasColumn(table, column) {
1393
+ const result = await this.client.query({
1394
+ query: `DESCRIBE TABLE ${table}`,
1395
+ format: "JSONEachRow"
1396
+ });
1397
+ const columns = await result.json();
1398
+ return columns.some((c) => c.name === column);
1399
+ }
1400
+ getSqlType(type) {
1401
+ switch (type) {
1402
+ case "text":
1403
+ return "String";
1404
+ case "timestamp":
1405
+ return "DateTime64(3)";
1406
+ case "integer":
1407
+ case "bigint":
1408
+ return "Int64";
1409
+ case "jsonb":
1410
+ return "String";
1411
+ default:
1412
+ return super.getSqlType(type);
1413
+ }
1414
+ }
1415
+ async createTable({
1416
+ tableName,
1417
+ schema
1418
+ }) {
1419
+ try {
1420
+ const columns = Object.entries(schema).map(([name, def]) => {
1421
+ const constraints = [];
1422
+ if (!def.nullable) constraints.push("NOT NULL");
1423
+ const columnTtl = this.ttl?.[tableName]?.columns?.[name];
1424
+ return `"${name}" ${COLUMN_TYPES[def.type]} ${constraints.join(" ")} ${columnTtl ? `TTL toDateTime(${columnTtl.ttlKey ?? "createdAt"}) + INTERVAL ${columnTtl.interval} ${columnTtl.unit}` : ""}`;
1425
+ }).join(",\n");
1426
+ const rowTtl = this.ttl?.[tableName]?.row;
1427
+ const sql = tableName === storage.TABLE_WORKFLOW_SNAPSHOT ? `
1428
+ CREATE TABLE IF NOT EXISTS ${tableName} (
1429
+ ${["id String"].concat(columns)}
1430
+ )
1431
+ ENGINE = ${TABLE_ENGINES[tableName] ?? "MergeTree()"}
1432
+ PRIMARY KEY (createdAt, run_id, workflow_name)
1433
+ ORDER BY (createdAt, run_id, workflow_name)
1434
+ ${rowTtl ? `TTL toDateTime(${rowTtl.ttlKey ?? "createdAt"}) + INTERVAL ${rowTtl.interval} ${rowTtl.unit}` : ""}
1435
+ SETTINGS index_granularity = 8192
1436
+ ` : `
1437
+ CREATE TABLE IF NOT EXISTS ${tableName} (
1438
+ ${columns}
1439
+ )
1440
+ ENGINE = ${TABLE_ENGINES[tableName] ?? "MergeTree()"}
1441
+ PRIMARY KEY (createdAt, ${tableName === storage.TABLE_EVALS ? "run_id" : "id"})
1442
+ ORDER BY (createdAt, ${tableName === storage.TABLE_EVALS ? "run_id" : "id"})
1443
+ ${this.ttl?.[tableName]?.row ? `TTL toDateTime(createdAt) + INTERVAL ${this.ttl[tableName].row.interval} ${this.ttl[tableName].row.unit}` : ""}
1444
+ SETTINGS index_granularity = 8192
1445
+ `;
1446
+ await this.client.query({
1447
+ query: sql,
1448
+ clickhouse_settings: {
1449
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
1450
+ date_time_input_format: "best_effort",
1451
+ date_time_output_format: "iso",
1452
+ use_client_time_zone: 1,
1453
+ output_format_json_quote_64bit_integers: 0
1454
+ }
1455
+ });
1456
+ } catch (error$1) {
1457
+ throw new error.MastraError(
1458
+ {
1459
+ id: "CLICKHOUSE_STORAGE_CREATE_TABLE_FAILED",
1460
+ domain: error.ErrorDomain.STORAGE,
1461
+ category: error.ErrorCategory.THIRD_PARTY,
1462
+ details: { tableName }
1463
+ },
1464
+ error$1
1465
+ );
1466
+ }
1467
+ }
1468
+ async alterTable({
1469
+ tableName,
1470
+ schema,
1471
+ ifNotExists
1472
+ }) {
1473
+ try {
1474
+ const describeSql = `DESCRIBE TABLE ${tableName}`;
1475
+ const result = await this.client.query({
1476
+ query: describeSql
1477
+ });
1478
+ const rows = await result.json();
1479
+ const existingColumnNames = new Set(rows.data.map((row) => row.name.toLowerCase()));
1480
+ for (const columnName of ifNotExists) {
1481
+ if (!existingColumnNames.has(columnName.toLowerCase()) && schema[columnName]) {
1482
+ const columnDef = schema[columnName];
1483
+ let sqlType = this.getSqlType(columnDef.type);
1484
+ if (columnDef.nullable !== false) {
1485
+ sqlType = `Nullable(${sqlType})`;
1486
+ }
1487
+ const defaultValue = columnDef.nullable === false ? this.getDefaultValue(columnDef.type) : "";
1488
+ const alterSql = `ALTER TABLE ${tableName} ADD COLUMN IF NOT EXISTS "${columnName}" ${sqlType} ${defaultValue}`.trim();
1489
+ await this.client.query({
1490
+ query: alterSql
1491
+ });
1492
+ this.logger?.debug?.(`Added column ${columnName} to table ${tableName}`);
1493
+ }
1494
+ }
1495
+ } catch (error$1) {
1496
+ throw new error.MastraError(
1497
+ {
1498
+ id: "CLICKHOUSE_STORAGE_ALTER_TABLE_FAILED",
1499
+ domain: error.ErrorDomain.STORAGE,
1500
+ category: error.ErrorCategory.THIRD_PARTY,
1501
+ details: { tableName }
1502
+ },
1503
+ error$1
1504
+ );
1505
+ }
1506
+ }
1507
+ async clearTable({ tableName }) {
1508
+ try {
1509
+ await this.client.query({
1510
+ query: `TRUNCATE TABLE ${tableName}`,
1511
+ clickhouse_settings: {
1512
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
1513
+ date_time_input_format: "best_effort",
1514
+ date_time_output_format: "iso",
1515
+ use_client_time_zone: 1,
1516
+ output_format_json_quote_64bit_integers: 0
1517
+ }
1518
+ });
1519
+ } catch (error$1) {
1520
+ throw new error.MastraError(
1521
+ {
1522
+ id: "CLICKHOUSE_STORAGE_CLEAR_TABLE_FAILED",
1523
+ domain: error.ErrorDomain.STORAGE,
1524
+ category: error.ErrorCategory.THIRD_PARTY,
1525
+ details: { tableName }
1526
+ },
1527
+ error$1
1528
+ );
341
1529
  }
342
1530
  }
1531
+ async dropTable({ tableName }) {
1532
+ await this.client.query({
1533
+ query: `DROP TABLE IF EXISTS ${tableName}`
1534
+ });
1535
+ }
343
1536
  async insert({ tableName, record }) {
1537
+ const createdAt = (record.createdAt || record.created_at || /* @__PURE__ */ new Date()).toISOString();
1538
+ const updatedAt = (record.updatedAt || /* @__PURE__ */ new Date()).toISOString();
344
1539
  try {
345
- await this.db.insert({
1540
+ const result = await this.client.insert({
346
1541
  table: tableName,
347
1542
  values: [
348
1543
  {
349
1544
  ...record,
350
- createdAt: record.createdAt.toISOString(),
351
- updatedAt: record.updatedAt.toISOString()
1545
+ createdAt,
1546
+ updatedAt
352
1547
  }
353
1548
  ],
354
1549
  format: "JSONEachRow",
@@ -359,13 +1554,55 @@ var ClickhouseStore = class extends storage.MastraStorage {
359
1554
  use_client_time_zone: 1
360
1555
  }
361
1556
  });
362
- } catch (error) {
363
- console.error(`Error inserting into ${tableName}:`, error);
364
- throw error;
1557
+ console.log("INSERT RESULT", result);
1558
+ } catch (error$1) {
1559
+ throw new error.MastraError(
1560
+ {
1561
+ id: "CLICKHOUSE_STORAGE_INSERT_FAILED",
1562
+ domain: error.ErrorDomain.STORAGE,
1563
+ category: error.ErrorCategory.THIRD_PARTY,
1564
+ details: { tableName }
1565
+ },
1566
+ error$1
1567
+ );
1568
+ }
1569
+ }
1570
+ async batchInsert({ tableName, records }) {
1571
+ const recordsToBeInserted = records.map((record) => ({
1572
+ ...Object.fromEntries(
1573
+ Object.entries(record).map(([key, value]) => [
1574
+ key,
1575
+ storage.TABLE_SCHEMAS[tableName]?.[key]?.type === "timestamp" ? new Date(value).toISOString() : value
1576
+ ])
1577
+ )
1578
+ }));
1579
+ try {
1580
+ await this.client.insert({
1581
+ table: tableName,
1582
+ values: recordsToBeInserted,
1583
+ format: "JSONEachRow",
1584
+ clickhouse_settings: {
1585
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
1586
+ date_time_input_format: "best_effort",
1587
+ use_client_time_zone: 1,
1588
+ output_format_json_quote_64bit_integers: 0
1589
+ }
1590
+ });
1591
+ } catch (error$1) {
1592
+ throw new error.MastraError(
1593
+ {
1594
+ id: "CLICKHOUSE_STORAGE_BATCH_INSERT_FAILED",
1595
+ domain: error.ErrorDomain.STORAGE,
1596
+ category: error.ErrorCategory.THIRD_PARTY,
1597
+ details: { tableName }
1598
+ },
1599
+ error$1
1600
+ );
365
1601
  }
366
1602
  }
367
1603
  async load({ tableName, keys }) {
368
1604
  try {
1605
+ const engine = TABLE_ENGINES[tableName] ?? "MergeTree()";
369
1606
  const keyEntries = Object.entries(keys);
370
1607
  const conditions = keyEntries.map(
371
1608
  ([key]) => `"${key}" = {var_${key}:${COLUMN_TYPES[storage.TABLE_SCHEMAS[tableName]?.[key]?.type ?? "text"]}}`
@@ -373,8 +1610,10 @@ var ClickhouseStore = class extends storage.MastraStorage {
373
1610
  const values = keyEntries.reduce((acc, [key, value]) => {
374
1611
  return { ...acc, [`var_${key}`]: value };
375
1612
  }, {});
376
- const result = await this.db.query({
377
- query: `SELECT *, toDateTime64(createdAt, 3) as createdAt, toDateTime64(updatedAt, 3) as updatedAt FROM ${tableName} ${TABLE_ENGINES[tableName].startsWith("ReplacingMergeTree") ? "FINAL" : ""} WHERE ${conditions}`,
1613
+ const hasUpdatedAt = storage.TABLE_SCHEMAS[tableName]?.updatedAt;
1614
+ const selectClause = `SELECT *, toDateTime64(createdAt, 3) as createdAt${hasUpdatedAt ? ", toDateTime64(updatedAt, 3) as updatedAt" : ""}`;
1615
+ const result = await this.client.query({
1616
+ query: `${selectClause} FROM ${tableName} ${engine.startsWith("ReplacingMergeTree") ? "FINAL" : ""} WHERE ${conditions}`,
378
1617
  query_params: values,
379
1618
  clickhouse_settings: {
380
1619
  // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
@@ -400,25 +1639,58 @@ var ClickhouseStore = class extends storage.MastraStorage {
400
1639
  }
401
1640
  const data = transformRow(rows.data[0]);
402
1641
  return data;
403
- } catch (error) {
404
- console.error(`Error loading from ${tableName}:`, error);
405
- throw error;
1642
+ } catch (error$1) {
1643
+ throw new error.MastraError(
1644
+ {
1645
+ id: "CLICKHOUSE_STORAGE_LOAD_FAILED",
1646
+ domain: error.ErrorDomain.STORAGE,
1647
+ category: error.ErrorCategory.THIRD_PARTY,
1648
+ details: { tableName }
1649
+ },
1650
+ error$1
1651
+ );
406
1652
  }
407
1653
  }
408
- async getThreadById({ threadId }) {
1654
+ };
1655
+ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1656
+ client;
1657
+ operations;
1658
+ constructor({ client, operations }) {
1659
+ super();
1660
+ this.client = client;
1661
+ this.operations = operations;
1662
+ }
1663
+ transformScoreRow(row) {
1664
+ const scorer = storage.safelyParseJSON(row.scorer);
1665
+ const extractStepResult = storage.safelyParseJSON(row.extractStepResult);
1666
+ const analyzeStepResult = storage.safelyParseJSON(row.analyzeStepResult);
1667
+ const metadata = storage.safelyParseJSON(row.metadata);
1668
+ const input = storage.safelyParseJSON(row.input);
1669
+ const output = storage.safelyParseJSON(row.output);
1670
+ const additionalContext = storage.safelyParseJSON(row.additionalContext);
1671
+ const runtimeContext = storage.safelyParseJSON(row.runtimeContext);
1672
+ const entity = storage.safelyParseJSON(row.entity);
1673
+ return {
1674
+ ...row,
1675
+ scorer,
1676
+ extractStepResult,
1677
+ analyzeStepResult,
1678
+ metadata,
1679
+ input,
1680
+ output,
1681
+ additionalContext,
1682
+ runtimeContext,
1683
+ entity,
1684
+ createdAt: new Date(row.createdAt),
1685
+ updatedAt: new Date(row.updatedAt)
1686
+ };
1687
+ }
1688
+ async getScoreById({ id }) {
409
1689
  try {
410
- const result = await this.db.query({
411
- query: `SELECT
412
- id,
413
- "resourceId",
414
- title,
415
- metadata,
416
- toDateTime64(createdAt, 3) as createdAt,
417
- toDateTime64(updatedAt, 3) as updatedAt
418
- FROM "${storage.TABLE_THREADS}"
419
- FINAL
420
- WHERE id = {var_id:String}`,
421
- query_params: { var_id: threadId },
1690
+ const result = await this.client.query({
1691
+ query: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE id = {var_id:String}`,
1692
+ query_params: { var_id: id },
1693
+ format: "JSONEachRow",
422
1694
  clickhouse_settings: {
423
1695
  // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
424
1696
  date_time_input_format: "best_effort",
@@ -427,37 +1699,88 @@ var ClickhouseStore = class extends storage.MastraStorage {
427
1699
  output_format_json_quote_64bit_integers: 0
428
1700
  }
429
1701
  });
430
- const rows = await result.json();
431
- const thread = transformRow(rows.data[0]);
432
- if (!thread) {
1702
+ const resultJson = await result.json();
1703
+ if (!Array.isArray(resultJson) || resultJson.length === 0) {
433
1704
  return null;
434
1705
  }
435
- return {
436
- ...thread,
437
- metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata,
438
- createdAt: thread.createdAt,
439
- updatedAt: thread.updatedAt
1706
+ return this.transformScoreRow(resultJson[0]);
1707
+ } catch (error$1) {
1708
+ throw new error.MastraError(
1709
+ {
1710
+ id: "CLICKHOUSE_STORAGE_GET_SCORE_BY_ID_FAILED",
1711
+ domain: error.ErrorDomain.STORAGE,
1712
+ category: error.ErrorCategory.THIRD_PARTY,
1713
+ details: { scoreId: id }
1714
+ },
1715
+ error$1
1716
+ );
1717
+ }
1718
+ }
1719
+ async saveScore(score) {
1720
+ try {
1721
+ const record = {
1722
+ ...score
440
1723
  };
441
- } catch (error) {
442
- console.error(`Error getting thread ${threadId}:`, error);
443
- throw error;
1724
+ await this.client.insert({
1725
+ table: storage.TABLE_SCORERS,
1726
+ values: [record],
1727
+ format: "JSONEachRow",
1728
+ clickhouse_settings: {
1729
+ date_time_input_format: "best_effort",
1730
+ use_client_time_zone: 1,
1731
+ output_format_json_quote_64bit_integers: 0
1732
+ }
1733
+ });
1734
+ return { score };
1735
+ } catch (error$1) {
1736
+ throw new error.MastraError(
1737
+ {
1738
+ id: "CLICKHOUSE_STORAGE_SAVE_SCORE_FAILED",
1739
+ domain: error.ErrorDomain.STORAGE,
1740
+ category: error.ErrorCategory.THIRD_PARTY,
1741
+ details: { scoreId: score.id }
1742
+ },
1743
+ error$1
1744
+ );
444
1745
  }
445
1746
  }
446
- async getThreadsByResourceId({ resourceId }) {
1747
+ async getScoresByRunId({
1748
+ runId,
1749
+ pagination
1750
+ }) {
447
1751
  try {
448
- const result = await this.db.query({
449
- query: `SELECT
450
- id,
451
- "resourceId",
452
- title,
453
- metadata,
454
- toDateTime64(createdAt, 3) as createdAt,
455
- toDateTime64(updatedAt, 3) as updatedAt
456
- FROM "${storage.TABLE_THREADS}"
457
- WHERE "resourceId" = {var_resourceId:String}`,
458
- query_params: { var_resourceId: resourceId },
1752
+ const countResult = await this.client.query({
1753
+ query: `SELECT COUNT(*) as count FROM ${storage.TABLE_SCORERS} WHERE runId = {var_runId:String}`,
1754
+ query_params: { var_runId: runId },
1755
+ format: "JSONEachRow"
1756
+ });
1757
+ const countRows = await countResult.json();
1758
+ let total = 0;
1759
+ if (Array.isArray(countRows) && countRows.length > 0 && countRows[0]) {
1760
+ const countObj = countRows[0];
1761
+ total = Number(countObj.count);
1762
+ }
1763
+ if (!total) {
1764
+ return {
1765
+ pagination: {
1766
+ total: 0,
1767
+ page: pagination.page,
1768
+ perPage: pagination.perPage,
1769
+ hasMore: false
1770
+ },
1771
+ scores: []
1772
+ };
1773
+ }
1774
+ const offset = pagination.page * pagination.perPage;
1775
+ const result = await this.client.query({
1776
+ query: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE runId = {var_runId:String} ORDER BY createdAt DESC LIMIT {var_limit:Int64} OFFSET {var_offset:Int64}`,
1777
+ query_params: {
1778
+ var_runId: runId,
1779
+ var_limit: pagination.perPage,
1780
+ var_offset: offset
1781
+ },
1782
+ format: "JSONEachRow",
459
1783
  clickhouse_settings: {
460
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
461
1784
  date_time_input_format: "best_effort",
462
1785
  date_time_output_format: "iso",
463
1786
  use_client_time_zone: 1,
@@ -465,191 +1788,349 @@ var ClickhouseStore = class extends storage.MastraStorage {
465
1788
  }
466
1789
  });
467
1790
  const rows = await result.json();
468
- const threads = transformRows(rows.data);
469
- return threads.map((thread) => ({
470
- ...thread,
471
- metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata,
472
- createdAt: thread.createdAt,
473
- updatedAt: thread.updatedAt
474
- }));
475
- } catch (error) {
476
- console.error(`Error getting threads for resource ${resourceId}:`, error);
477
- throw error;
1791
+ const scores = Array.isArray(rows) ? rows.map((row) => this.transformScoreRow(row)) : [];
1792
+ return {
1793
+ pagination: {
1794
+ total,
1795
+ page: pagination.page,
1796
+ perPage: pagination.perPage,
1797
+ hasMore: total > (pagination.page + 1) * pagination.perPage
1798
+ },
1799
+ scores
1800
+ };
1801
+ } catch (error$1) {
1802
+ throw new error.MastraError(
1803
+ {
1804
+ id: "CLICKHOUSE_STORAGE_GET_SCORES_BY_RUN_ID_FAILED",
1805
+ domain: error.ErrorDomain.STORAGE,
1806
+ category: error.ErrorCategory.THIRD_PARTY,
1807
+ details: { runId }
1808
+ },
1809
+ error$1
1810
+ );
478
1811
  }
479
1812
  }
480
- async saveThread({ thread }) {
1813
+ async getScoresByScorerId({
1814
+ scorerId,
1815
+ pagination
1816
+ }) {
481
1817
  try {
482
- await this.db.insert({
483
- table: storage.TABLE_THREADS,
484
- values: [
485
- {
486
- ...thread,
487
- createdAt: thread.createdAt.toISOString(),
488
- updatedAt: thread.updatedAt.toISOString()
489
- }
490
- ],
1818
+ const countResult = await this.client.query({
1819
+ query: `SELECT COUNT(*) as count FROM ${storage.TABLE_SCORERS} WHERE scorerId = {var_scorerId:String}`,
1820
+ query_params: { var_scorerId: scorerId },
1821
+ format: "JSONEachRow"
1822
+ });
1823
+ const countRows = await countResult.json();
1824
+ let total = 0;
1825
+ if (Array.isArray(countRows) && countRows.length > 0 && countRows[0]) {
1826
+ const countObj = countRows[0];
1827
+ total = Number(countObj.count);
1828
+ }
1829
+ if (!total) {
1830
+ return {
1831
+ pagination: {
1832
+ total: 0,
1833
+ page: pagination.page,
1834
+ perPage: pagination.perPage,
1835
+ hasMore: false
1836
+ },
1837
+ scores: []
1838
+ };
1839
+ }
1840
+ const offset = pagination.page * pagination.perPage;
1841
+ const result = await this.client.query({
1842
+ query: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE scorerId = {var_scorerId:String} ORDER BY createdAt DESC LIMIT {var_limit:Int64} OFFSET {var_offset:Int64}`,
1843
+ query_params: {
1844
+ var_scorerId: scorerId,
1845
+ var_limit: pagination.perPage,
1846
+ var_offset: offset
1847
+ },
491
1848
  format: "JSONEachRow",
492
1849
  clickhouse_settings: {
493
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
494
1850
  date_time_input_format: "best_effort",
1851
+ date_time_output_format: "iso",
495
1852
  use_client_time_zone: 1,
496
1853
  output_format_json_quote_64bit_integers: 0
497
1854
  }
498
1855
  });
499
- return thread;
500
- } catch (error) {
501
- console.error("Error saving thread:", error);
502
- throw error;
1856
+ const rows = await result.json();
1857
+ const scores = Array.isArray(rows) ? rows.map((row) => this.transformScoreRow(row)) : [];
1858
+ return {
1859
+ pagination: {
1860
+ total,
1861
+ page: pagination.page,
1862
+ perPage: pagination.perPage,
1863
+ hasMore: total > (pagination.page + 1) * pagination.perPage
1864
+ },
1865
+ scores
1866
+ };
1867
+ } catch (error$1) {
1868
+ throw new error.MastraError(
1869
+ {
1870
+ id: "CLICKHOUSE_STORAGE_GET_SCORES_BY_SCORER_ID_FAILED",
1871
+ domain: error.ErrorDomain.STORAGE,
1872
+ category: error.ErrorCategory.THIRD_PARTY,
1873
+ details: { scorerId }
1874
+ },
1875
+ error$1
1876
+ );
503
1877
  }
504
1878
  }
505
- async updateThread({
506
- id,
507
- title,
508
- metadata
1879
+ async getScoresByEntityId({
1880
+ entityId,
1881
+ entityType,
1882
+ pagination
509
1883
  }) {
510
1884
  try {
511
- const existingThread = await this.getThreadById({ threadId: id });
512
- if (!existingThread) {
513
- throw new Error(`Thread ${id} not found`);
1885
+ const countResult = await this.client.query({
1886
+ query: `SELECT COUNT(*) as count FROM ${storage.TABLE_SCORERS} WHERE entityId = {var_entityId:String} AND entityType = {var_entityType:String}`,
1887
+ query_params: { var_entityId: entityId, var_entityType: entityType },
1888
+ format: "JSONEachRow"
1889
+ });
1890
+ const countRows = await countResult.json();
1891
+ let total = 0;
1892
+ if (Array.isArray(countRows) && countRows.length > 0 && countRows[0]) {
1893
+ const countObj = countRows[0];
1894
+ total = Number(countObj.count);
514
1895
  }
515
- const mergedMetadata = {
516
- ...existingThread.metadata,
517
- ...metadata
518
- };
519
- const updatedThread = {
520
- ...existingThread,
521
- title,
522
- metadata: mergedMetadata,
523
- updatedAt: /* @__PURE__ */ new Date()
524
- };
525
- await this.db.insert({
526
- table: storage.TABLE_THREADS,
1896
+ if (!total) {
1897
+ return {
1898
+ pagination: {
1899
+ total: 0,
1900
+ page: pagination.page,
1901
+ perPage: pagination.perPage,
1902
+ hasMore: false
1903
+ },
1904
+ scores: []
1905
+ };
1906
+ }
1907
+ const offset = pagination.page * pagination.perPage;
1908
+ const result = await this.client.query({
1909
+ query: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE entityId = {var_entityId:String} AND entityType = {var_entityType:String} ORDER BY createdAt DESC LIMIT {var_limit:Int64} OFFSET {var_offset:Int64}`,
1910
+ query_params: {
1911
+ var_entityId: entityId,
1912
+ var_entityType: entityType,
1913
+ var_limit: pagination.perPage,
1914
+ var_offset: offset
1915
+ },
527
1916
  format: "JSONEachRow",
528
- values: [
529
- {
530
- id: updatedThread.id,
531
- resourceId: updatedThread.resourceId,
532
- title: updatedThread.title,
533
- metadata: updatedThread.metadata,
534
- createdAt: updatedThread.createdAt,
535
- updatedAt: updatedThread.updatedAt.toISOString()
536
- }
537
- ],
538
1917
  clickhouse_settings: {
539
1918
  date_time_input_format: "best_effort",
1919
+ date_time_output_format: "iso",
540
1920
  use_client_time_zone: 1,
541
1921
  output_format_json_quote_64bit_integers: 0
542
1922
  }
543
1923
  });
544
- return updatedThread;
545
- } catch (error) {
546
- console.error("Error updating thread:", error);
547
- throw error;
1924
+ const rows = await result.json();
1925
+ const scores = Array.isArray(rows) ? rows.map((row) => this.transformScoreRow(row)) : [];
1926
+ return {
1927
+ pagination: {
1928
+ total,
1929
+ page: pagination.page,
1930
+ perPage: pagination.perPage,
1931
+ hasMore: total > (pagination.page + 1) * pagination.perPage
1932
+ },
1933
+ scores
1934
+ };
1935
+ } catch (error$1) {
1936
+ throw new error.MastraError(
1937
+ {
1938
+ id: "CLICKHOUSE_STORAGE_GET_SCORES_BY_ENTITY_ID_FAILED",
1939
+ domain: error.ErrorDomain.STORAGE,
1940
+ category: error.ErrorCategory.THIRD_PARTY,
1941
+ details: { entityId, entityType }
1942
+ },
1943
+ error$1
1944
+ );
548
1945
  }
549
1946
  }
550
- async deleteThread({ threadId }) {
1947
+ };
1948
+ var TracesStorageClickhouse = class extends storage.TracesStorage {
1949
+ client;
1950
+ operations;
1951
+ constructor({ client, operations }) {
1952
+ super();
1953
+ this.client = client;
1954
+ this.operations = operations;
1955
+ }
1956
+ async getTracesPaginated(args) {
1957
+ const { name, scope, page = 0, perPage = 100, attributes, filters, dateRange } = args;
1958
+ const fromDate = dateRange?.start;
1959
+ const toDate = dateRange?.end;
1960
+ const currentOffset = page * perPage;
1961
+ const queryArgs = {};
1962
+ const conditions = [];
1963
+ if (name) {
1964
+ conditions.push(`name LIKE CONCAT({var_name:String}, '%')`);
1965
+ queryArgs.var_name = name;
1966
+ }
1967
+ if (scope) {
1968
+ conditions.push(`scope = {var_scope:String}`);
1969
+ queryArgs.var_scope = scope;
1970
+ }
1971
+ if (attributes) {
1972
+ Object.entries(attributes).forEach(([key, value]) => {
1973
+ conditions.push(`JSONExtractString(attributes, '${key}') = {var_attr_${key}:String}`);
1974
+ queryArgs[`var_attr_${key}`] = value;
1975
+ });
1976
+ }
1977
+ if (filters) {
1978
+ Object.entries(filters).forEach(([key, value]) => {
1979
+ conditions.push(`${key} = {var_col_${key}:${storage.TABLE_SCHEMAS.mastra_traces?.[key]?.type ?? "text"}}`);
1980
+ queryArgs[`var_col_${key}`] = value;
1981
+ });
1982
+ }
1983
+ if (fromDate) {
1984
+ conditions.push(`createdAt >= parseDateTime64BestEffort({var_from_date:String})`);
1985
+ queryArgs.var_from_date = fromDate.toISOString();
1986
+ }
1987
+ if (toDate) {
1988
+ conditions.push(`createdAt <= parseDateTime64BestEffort({var_to_date:String})`);
1989
+ queryArgs.var_to_date = toDate.toISOString();
1990
+ }
1991
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
551
1992
  try {
552
- await this.db.command({
553
- query: `DELETE FROM "${storage.TABLE_MESSAGES}" WHERE thread_id = {var_thread_id:String};`,
554
- query_params: { var_thread_id: threadId },
1993
+ const countResult = await this.client.query({
1994
+ query: `SELECT COUNT(*) as count FROM ${storage.TABLE_TRACES} ${whereClause}`,
1995
+ query_params: queryArgs,
555
1996
  clickhouse_settings: {
1997
+ date_time_input_format: "best_effort",
1998
+ date_time_output_format: "iso",
1999
+ use_client_time_zone: 1,
556
2000
  output_format_json_quote_64bit_integers: 0
557
2001
  }
558
2002
  });
559
- await this.db.command({
560
- query: `DELETE FROM "${storage.TABLE_THREADS}" WHERE id = {var_id:String};`,
561
- query_params: { var_id: threadId },
2003
+ const countData = await countResult.json();
2004
+ const total = Number(countData.data?.[0]?.count ?? 0);
2005
+ if (total === 0) {
2006
+ return {
2007
+ traces: [],
2008
+ total: 0,
2009
+ page,
2010
+ perPage,
2011
+ hasMore: false
2012
+ };
2013
+ }
2014
+ const result = await this.client.query({
2015
+ query: `SELECT *, toDateTime64(createdAt, 3) as createdAt FROM ${storage.TABLE_TRACES} ${whereClause} ORDER BY "createdAt" DESC LIMIT {var_limit:UInt32} OFFSET {var_offset:UInt32}`,
2016
+ query_params: { ...queryArgs, var_limit: perPage, var_offset: currentOffset },
562
2017
  clickhouse_settings: {
2018
+ date_time_input_format: "best_effort",
2019
+ date_time_output_format: "iso",
2020
+ use_client_time_zone: 1,
563
2021
  output_format_json_quote_64bit_integers: 0
564
2022
  }
565
2023
  });
566
- } catch (error) {
567
- console.error("Error deleting thread:", error);
568
- throw error;
2024
+ if (!result) {
2025
+ return {
2026
+ traces: [],
2027
+ total,
2028
+ page,
2029
+ perPage,
2030
+ hasMore: false
2031
+ };
2032
+ }
2033
+ const resp = await result.json();
2034
+ const rows = resp.data;
2035
+ const traces = rows.map((row) => ({
2036
+ id: row.id,
2037
+ parentSpanId: row.parentSpanId,
2038
+ traceId: row.traceId,
2039
+ name: row.name,
2040
+ scope: row.scope,
2041
+ kind: row.kind,
2042
+ status: storage.safelyParseJSON(row.status),
2043
+ events: storage.safelyParseJSON(row.events),
2044
+ links: storage.safelyParseJSON(row.links),
2045
+ attributes: storage.safelyParseJSON(row.attributes),
2046
+ startTime: row.startTime,
2047
+ endTime: row.endTime,
2048
+ other: storage.safelyParseJSON(row.other),
2049
+ createdAt: row.createdAt
2050
+ }));
2051
+ return {
2052
+ traces,
2053
+ total,
2054
+ page,
2055
+ perPage,
2056
+ hasMore: currentOffset + traces.length < total
2057
+ };
2058
+ } catch (error$1) {
2059
+ if (error$1?.message?.includes("no such table") || error$1?.message?.includes("does not exist")) {
2060
+ return {
2061
+ traces: [],
2062
+ total: 0,
2063
+ page,
2064
+ perPage,
2065
+ hasMore: false
2066
+ };
2067
+ }
2068
+ throw new error.MastraError(
2069
+ {
2070
+ id: "CLICKHOUSE_STORAGE_GET_TRACES_PAGINATED_FAILED",
2071
+ domain: error.ErrorDomain.STORAGE,
2072
+ category: error.ErrorCategory.THIRD_PARTY,
2073
+ details: {
2074
+ name: name ?? null,
2075
+ scope: scope ?? null,
2076
+ page,
2077
+ perPage,
2078
+ attributes: attributes ? JSON.stringify(attributes) : null,
2079
+ filters: filters ? JSON.stringify(filters) : null,
2080
+ dateRange: dateRange ? JSON.stringify(dateRange) : null
2081
+ }
2082
+ },
2083
+ error$1
2084
+ );
2085
+ }
2086
+ }
2087
+ async getTraces({
2088
+ name,
2089
+ scope,
2090
+ page,
2091
+ perPage,
2092
+ attributes,
2093
+ filters,
2094
+ fromDate,
2095
+ toDate
2096
+ }) {
2097
+ const limit = perPage;
2098
+ const offset = page * perPage;
2099
+ const args = {};
2100
+ const conditions = [];
2101
+ if (name) {
2102
+ conditions.push(`name LIKE CONCAT({var_name:String}, '%')`);
2103
+ args.var_name = name;
2104
+ }
2105
+ if (scope) {
2106
+ conditions.push(`scope = {var_scope:String}`);
2107
+ args.var_scope = scope;
2108
+ }
2109
+ if (attributes) {
2110
+ Object.entries(attributes).forEach(([key, value]) => {
2111
+ conditions.push(`JSONExtractString(attributes, '${key}') = {var_attr_${key}:String}`);
2112
+ args[`var_attr_${key}`] = value;
2113
+ });
2114
+ }
2115
+ if (filters) {
2116
+ Object.entries(filters).forEach(([key, value]) => {
2117
+ conditions.push(`${key} = {var_col_${key}:${storage.TABLE_SCHEMAS.mastra_traces?.[key]?.type ?? "text"}}`);
2118
+ args[`var_col_${key}`] = value;
2119
+ });
2120
+ }
2121
+ if (fromDate) {
2122
+ conditions.push(`createdAt >= {var_from_date:DateTime64(3)}`);
2123
+ args.var_from_date = fromDate.getTime() / 1e3;
2124
+ }
2125
+ if (toDate) {
2126
+ conditions.push(`createdAt <= {var_to_date:DateTime64(3)}`);
2127
+ args.var_to_date = toDate.getTime() / 1e3;
569
2128
  }
570
- }
571
- async getMessages({
572
- threadId,
573
- resourceId,
574
- selectBy,
575
- format
576
- }) {
2129
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
577
2130
  try {
578
- const messages = [];
579
- const limit = this.resolveMessageLimit({ last: selectBy?.last, defaultLimit: 40 });
580
- const include = selectBy?.include || [];
581
- if (include.length) {
582
- const includeResult = await this.db.query({
583
- query: `
584
- WITH ordered_messages AS (
585
- SELECT
586
- *,
587
- toDateTime64(createdAt, 3) as createdAt,
588
- toDateTime64(updatedAt, 3) as updatedAt,
589
- ROW_NUMBER() OVER (ORDER BY "createdAt" DESC) as row_num
590
- FROM "${storage.TABLE_MESSAGES}"
591
- WHERE thread_id = {var_thread_id:String}
592
- )
593
- SELECT
594
- m.id AS id,
595
- m.content as content,
596
- m.role as role,
597
- m.type as type,
598
- m.createdAt as createdAt,
599
- m.updatedAt as updatedAt,
600
- m.thread_id AS "threadId"
601
- FROM ordered_messages m
602
- WHERE m.id = ANY({var_include:Array(String)})
603
- OR EXISTS (
604
- SELECT 1 FROM ordered_messages target
605
- WHERE target.id = ANY({var_include:Array(String)})
606
- AND (
607
- -- Get previous messages based on the max withPreviousMessages
608
- (m.row_num <= target.row_num + {var_withPreviousMessages:Int64} AND m.row_num > target.row_num)
609
- OR
610
- -- Get next messages based on the max withNextMessages
611
- (m.row_num >= target.row_num - {var_withNextMessages:Int64} AND m.row_num < target.row_num)
612
- )
613
- )
614
- ORDER BY m."createdAt" DESC
615
- `,
616
- query_params: {
617
- var_thread_id: threadId,
618
- var_include: include.map((i) => i.id),
619
- var_withPreviousMessages: Math.max(...include.map((i) => i.withPreviousMessages || 0)),
620
- var_withNextMessages: Math.max(...include.map((i) => i.withNextMessages || 0))
621
- },
622
- clickhouse_settings: {
623
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
624
- date_time_input_format: "best_effort",
625
- date_time_output_format: "iso",
626
- use_client_time_zone: 1,
627
- output_format_json_quote_64bit_integers: 0
628
- }
629
- });
630
- const rows2 = await includeResult.json();
631
- messages.push(...transformRows(rows2.data));
632
- }
633
- const result = await this.db.query({
634
- query: `
635
- SELECT
636
- id,
637
- content,
638
- role,
639
- type,
640
- toDateTime64(createdAt, 3) as createdAt,
641
- thread_id AS "threadId"
642
- FROM "${storage.TABLE_MESSAGES}"
643
- WHERE thread_id = {threadId:String}
644
- AND id NOT IN ({exclude:Array(String)})
645
- ORDER BY "createdAt" DESC
646
- LIMIT {limit:Int64}
647
- `,
648
- query_params: {
649
- threadId,
650
- exclude: messages.map((m) => m.id),
651
- limit
652
- },
2131
+ const result = await this.client.query({
2132
+ query: `SELECT *, toDateTime64(createdAt, 3) as createdAt FROM ${storage.TABLE_TRACES} ${whereClause} ORDER BY "createdAt" DESC LIMIT ${limit} OFFSET ${offset}`,
2133
+ query_params: args,
653
2134
  clickhouse_settings: {
654
2135
  // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
655
2136
  date_time_input_format: "best_effort",
@@ -658,94 +2139,70 @@ var ClickhouseStore = class extends storage.MastraStorage {
658
2139
  output_format_json_quote_64bit_integers: 0
659
2140
  }
660
2141
  });
661
- const rows = await result.json();
662
- messages.push(...transformRows(rows.data));
663
- messages.sort((a, b) => new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime());
664
- messages.forEach((message) => {
665
- if (typeof message.content === "string") {
666
- try {
667
- message.content = JSON.parse(message.content);
668
- } catch {
669
- }
670
- }
671
- });
672
- const list = new agent.MessageList({ threadId, resourceId }).add(messages, "memory");
673
- if (format === `v2`) return list.get.all.v2();
674
- return list.get.all.v1();
675
- } catch (error) {
676
- console.error("Error getting messages:", error);
677
- throw error;
678
- }
679
- }
680
- async saveMessages(args) {
681
- const { messages, format = "v1" } = args;
682
- if (messages.length === 0) return messages;
683
- try {
684
- const threadId = messages[0]?.threadId;
685
- const resourceId = messages[0]?.resourceId;
686
- if (!threadId) {
687
- throw new Error("Thread ID is required");
2142
+ if (!result) {
2143
+ return [];
688
2144
  }
689
- const thread = await this.getThreadById({ threadId });
690
- if (!thread) {
691
- throw new Error(`Thread ${threadId} not found`);
2145
+ const resp = await result.json();
2146
+ const rows = resp.data;
2147
+ return rows.map((row) => ({
2148
+ id: row.id,
2149
+ parentSpanId: row.parentSpanId,
2150
+ traceId: row.traceId,
2151
+ name: row.name,
2152
+ scope: row.scope,
2153
+ kind: row.kind,
2154
+ status: storage.safelyParseJSON(row.status),
2155
+ events: storage.safelyParseJSON(row.events),
2156
+ links: storage.safelyParseJSON(row.links),
2157
+ attributes: storage.safelyParseJSON(row.attributes),
2158
+ startTime: row.startTime,
2159
+ endTime: row.endTime,
2160
+ other: storage.safelyParseJSON(row.other),
2161
+ createdAt: row.createdAt
2162
+ }));
2163
+ } catch (error$1) {
2164
+ if (error$1?.message?.includes("no such table") || error$1?.message?.includes("does not exist")) {
2165
+ return [];
692
2166
  }
693
- await Promise.all([
694
- // Insert messages
695
- this.db.insert({
696
- table: storage.TABLE_MESSAGES,
697
- format: "JSONEachRow",
698
- values: messages.map((message) => ({
699
- id: message.id,
700
- thread_id: threadId,
701
- content: typeof message.content === "string" ? message.content : JSON.stringify(message.content),
702
- createdAt: message.createdAt.toISOString(),
703
- role: message.role,
704
- type: message.type || "v2"
705
- })),
706
- clickhouse_settings: {
707
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
708
- date_time_input_format: "best_effort",
709
- use_client_time_zone: 1,
710
- output_format_json_quote_64bit_integers: 0
711
- }
712
- }),
713
- // Update thread's updatedAt timestamp
714
- this.db.insert({
715
- table: storage.TABLE_THREADS,
716
- format: "JSONEachRow",
717
- values: [
718
- {
719
- id: thread.id,
720
- resourceId: thread.resourceId,
721
- title: thread.title,
722
- metadata: thread.metadata,
723
- createdAt: thread.createdAt,
724
- updatedAt: (/* @__PURE__ */ new Date()).toISOString()
725
- }
726
- ],
727
- clickhouse_settings: {
728
- date_time_input_format: "best_effort",
729
- use_client_time_zone: 1,
730
- output_format_json_quote_64bit_integers: 0
2167
+ throw new error.MastraError(
2168
+ {
2169
+ id: "CLICKHOUSE_STORAGE_GET_TRACES_FAILED",
2170
+ domain: error.ErrorDomain.STORAGE,
2171
+ category: error.ErrorCategory.THIRD_PARTY,
2172
+ details: {
2173
+ name: name ?? null,
2174
+ scope: scope ?? null,
2175
+ page,
2176
+ perPage,
2177
+ attributes: attributes ? JSON.stringify(attributes) : null,
2178
+ filters: filters ? JSON.stringify(filters) : null,
2179
+ fromDate: fromDate?.toISOString() ?? null,
2180
+ toDate: toDate?.toISOString() ?? null
731
2181
  }
732
- })
733
- ]);
734
- const list = new agent.MessageList({ threadId, resourceId }).add(messages, "memory");
735
- if (format === `v2`) return list.get.all.v2();
736
- return list.get.all.v1();
737
- } catch (error) {
738
- console.error("Error saving messages:", error);
739
- throw error;
2182
+ },
2183
+ error$1
2184
+ );
740
2185
  }
741
2186
  }
2187
+ async batchTraceInsert(args) {
2188
+ await this.operations.batchInsert({ tableName: storage.TABLE_TRACES, records: args.records });
2189
+ }
2190
+ };
2191
+ var WorkflowsStorageClickhouse = class extends storage.WorkflowsStorage {
2192
+ client;
2193
+ operations;
2194
+ constructor({ client, operations }) {
2195
+ super();
2196
+ this.operations = operations;
2197
+ this.client = client;
2198
+ }
742
2199
  async persistWorkflowSnapshot({
743
2200
  workflowName,
744
2201
  runId,
745
2202
  snapshot
746
2203
  }) {
747
2204
  try {
748
- const currentSnapshot = await this.load({
2205
+ const currentSnapshot = await this.operations.load({
749
2206
  tableName: storage.TABLE_WORKFLOW_SNAPSHOT,
750
2207
  keys: { workflow_name: workflowName, run_id: runId }
751
2208
  });
@@ -761,7 +2218,7 @@ var ClickhouseStore = class extends storage.MastraStorage {
761
2218
  createdAt: now.toISOString(),
762
2219
  updatedAt: now.toISOString()
763
2220
  };
764
- await this.db.insert({
2221
+ await this.client.insert({
765
2222
  table: storage.TABLE_WORKFLOW_SNAPSHOT,
766
2223
  format: "JSONEachRow",
767
2224
  values: [persisting],
@@ -772,9 +2229,16 @@ var ClickhouseStore = class extends storage.MastraStorage {
772
2229
  output_format_json_quote_64bit_integers: 0
773
2230
  }
774
2231
  });
775
- } catch (error) {
776
- console.error("Error persisting workflow snapshot:", error);
777
- throw error;
2232
+ } catch (error$1) {
2233
+ throw new error.MastraError(
2234
+ {
2235
+ id: "CLICKHOUSE_STORAGE_PERSIST_WORKFLOW_SNAPSHOT_FAILED",
2236
+ domain: error.ErrorDomain.STORAGE,
2237
+ category: error.ErrorCategory.THIRD_PARTY,
2238
+ details: { workflowName, runId }
2239
+ },
2240
+ error$1
2241
+ );
778
2242
  }
779
2243
  }
780
2244
  async loadWorkflowSnapshot({
@@ -782,7 +2246,7 @@ var ClickhouseStore = class extends storage.MastraStorage {
782
2246
  runId
783
2247
  }) {
784
2248
  try {
785
- const result = await this.load({
2249
+ const result = await this.operations.load({
786
2250
  tableName: storage.TABLE_WORKFLOW_SNAPSHOT,
787
2251
  keys: {
788
2252
  workflow_name: workflowName,
@@ -793,9 +2257,16 @@ var ClickhouseStore = class extends storage.MastraStorage {
793
2257
  return null;
794
2258
  }
795
2259
  return result.snapshot;
796
- } catch (error) {
797
- console.error("Error loading workflow snapshot:", error);
798
- throw error;
2260
+ } catch (error$1) {
2261
+ throw new error.MastraError(
2262
+ {
2263
+ id: "CLICKHOUSE_STORAGE_LOAD_WORKFLOW_SNAPSHOT_FAILED",
2264
+ domain: error.ErrorDomain.STORAGE,
2265
+ category: error.ErrorCategory.THIRD_PARTY,
2266
+ details: { workflowName, runId }
2267
+ },
2268
+ error$1
2269
+ );
799
2270
  }
800
2271
  }
801
2272
  parseWorkflowRun(row) {
@@ -832,7 +2303,7 @@ var ClickhouseStore = class extends storage.MastraStorage {
832
2303
  values.var_workflow_name = workflowName;
833
2304
  }
834
2305
  if (resourceId) {
835
- const hasResourceId = await this.hasColumn(storage.TABLE_WORKFLOW_SNAPSHOT, "resourceId");
2306
+ const hasResourceId = await this.operations.hasColumn(storage.TABLE_WORKFLOW_SNAPSHOT, "resourceId");
836
2307
  if (hasResourceId) {
837
2308
  conditions.push(`resourceId = {var_resourceId:String}`);
838
2309
  values.var_resourceId = resourceId;
@@ -853,7 +2324,7 @@ var ClickhouseStore = class extends storage.MastraStorage {
853
2324
  const offsetClause = offset !== void 0 ? `OFFSET ${offset}` : "";
854
2325
  let total = 0;
855
2326
  if (limit !== void 0 && offset !== void 0) {
856
- const countResult = await this.db.query({
2327
+ const countResult = await this.client.query({
857
2328
  query: `SELECT COUNT(*) as count FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[storage.TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""} ${whereClause}`,
858
2329
  query_params: values,
859
2330
  format: "JSONEachRow"
@@ -861,21 +2332,21 @@ var ClickhouseStore = class extends storage.MastraStorage {
861
2332
  const countRows = await countResult.json();
862
2333
  total = Number(countRows[0]?.count ?? 0);
863
2334
  }
864
- const result = await this.db.query({
2335
+ const result = await this.client.query({
865
2336
  query: `
866
- SELECT
867
- workflow_name,
868
- run_id,
869
- snapshot,
870
- toDateTime64(createdAt, 3) as createdAt,
871
- toDateTime64(updatedAt, 3) as updatedAt,
872
- resourceId
873
- FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[storage.TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""}
874
- ${whereClause}
875
- ORDER BY createdAt DESC
876
- ${limitClause}
877
- ${offsetClause}
878
- `,
2337
+ SELECT
2338
+ workflow_name,
2339
+ run_id,
2340
+ snapshot,
2341
+ toDateTime64(createdAt, 3) as createdAt,
2342
+ toDateTime64(updatedAt, 3) as updatedAt,
2343
+ resourceId
2344
+ FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[storage.TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""}
2345
+ ${whereClause}
2346
+ ORDER BY createdAt DESC
2347
+ ${limitClause}
2348
+ ${offsetClause}
2349
+ `,
879
2350
  query_params: values,
880
2351
  format: "JSONEachRow"
881
2352
  });
@@ -885,9 +2356,16 @@ var ClickhouseStore = class extends storage.MastraStorage {
885
2356
  return this.parseWorkflowRun(row);
886
2357
  });
887
2358
  return { runs, total: total || runs.length };
888
- } catch (error) {
889
- console.error("Error getting workflow runs:", error);
890
- throw error;
2359
+ } catch (error$1) {
2360
+ throw new error.MastraError(
2361
+ {
2362
+ id: "CLICKHOUSE_STORAGE_GET_WORKFLOW_RUNS_FAILED",
2363
+ domain: error.ErrorDomain.STORAGE,
2364
+ category: error.ErrorCategory.THIRD_PARTY,
2365
+ details: { workflowName: workflowName ?? "", resourceId: resourceId ?? "" }
2366
+ },
2367
+ error$1
2368
+ );
891
2369
  }
892
2370
  }
893
2371
  async getWorkflowRunById({
@@ -906,18 +2384,18 @@ var ClickhouseStore = class extends storage.MastraStorage {
906
2384
  values.var_workflow_name = workflowName;
907
2385
  }
908
2386
  const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
909
- const result = await this.db.query({
2387
+ const result = await this.client.query({
910
2388
  query: `
911
- SELECT
912
- workflow_name,
913
- run_id,
914
- snapshot,
915
- toDateTime64(createdAt, 3) as createdAt,
916
- toDateTime64(updatedAt, 3) as updatedAt,
917
- resourceId
918
- FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[storage.TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""}
919
- ${whereClause}
920
- `,
2389
+ SELECT
2390
+ workflow_name,
2391
+ run_id,
2392
+ snapshot,
2393
+ toDateTime64(createdAt, 3) as createdAt,
2394
+ toDateTime64(updatedAt, 3) as updatedAt,
2395
+ resourceId
2396
+ FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[storage.TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""}
2397
+ ${whereClause}
2398
+ `,
921
2399
  query_params: values,
922
2400
  format: "JSONEachRow"
923
2401
  });
@@ -926,37 +2404,252 @@ var ClickhouseStore = class extends storage.MastraStorage {
926
2404
  return null;
927
2405
  }
928
2406
  return this.parseWorkflowRun(resultJson[0]);
929
- } catch (error) {
930
- console.error("Error getting workflow run by ID:", error);
931
- throw error;
2407
+ } catch (error$1) {
2408
+ throw new error.MastraError(
2409
+ {
2410
+ id: "CLICKHOUSE_STORAGE_GET_WORKFLOW_RUN_BY_ID_FAILED",
2411
+ domain: error.ErrorDomain.STORAGE,
2412
+ category: error.ErrorCategory.THIRD_PARTY,
2413
+ details: { runId: runId ?? "", workflowName: workflowName ?? "" }
2414
+ },
2415
+ error$1
2416
+ );
932
2417
  }
933
2418
  }
934
- async hasColumn(table, column) {
935
- const result = await this.db.query({
936
- query: `DESCRIBE TABLE ${table}`,
937
- format: "JSONEachRow"
2419
+ };
2420
+
2421
+ // src/storage/index.ts
2422
+ var ClickhouseStore = class extends storage.MastraStorage {
2423
+ db;
2424
+ ttl = {};
2425
+ stores;
2426
+ constructor(config) {
2427
+ super({ name: "ClickhouseStore" });
2428
+ this.db = client.createClient({
2429
+ url: config.url,
2430
+ username: config.username,
2431
+ password: config.password,
2432
+ clickhouse_settings: {
2433
+ date_time_input_format: "best_effort",
2434
+ date_time_output_format: "iso",
2435
+ // This is crucial
2436
+ use_client_time_zone: 1,
2437
+ output_format_json_quote_64bit_integers: 0
2438
+ }
938
2439
  });
939
- const columns = await result.json();
940
- return columns.some((c) => c.name === column);
2440
+ this.ttl = config.ttl;
2441
+ const operations = new StoreOperationsClickhouse({ client: this.db, ttl: this.ttl });
2442
+ const workflows = new WorkflowsStorageClickhouse({ client: this.db, operations });
2443
+ const scores = new ScoresStorageClickhouse({ client: this.db, operations });
2444
+ const legacyEvals = new LegacyEvalsStorageClickhouse({ client: this.db, operations });
2445
+ const traces = new TracesStorageClickhouse({ client: this.db, operations });
2446
+ const memory = new MemoryStorageClickhouse({ client: this.db, operations });
2447
+ this.stores = {
2448
+ operations,
2449
+ workflows,
2450
+ scores,
2451
+ legacyEvals,
2452
+ traces,
2453
+ memory
2454
+ };
2455
+ }
2456
+ get supports() {
2457
+ return {
2458
+ selectByIncludeResourceScope: true,
2459
+ resourceWorkingMemory: true,
2460
+ hasColumn: true,
2461
+ createTable: true,
2462
+ deleteMessages: false
2463
+ };
2464
+ }
2465
+ async getEvalsByAgentName(agentName, type) {
2466
+ return this.stores.legacyEvals.getEvalsByAgentName(agentName, type);
2467
+ }
2468
+ async getEvals(options) {
2469
+ return this.stores.legacyEvals.getEvals(options);
2470
+ }
2471
+ async batchInsert({ tableName, records }) {
2472
+ await this.stores.operations.batchInsert({ tableName, records });
2473
+ }
2474
+ async optimizeTable({ tableName }) {
2475
+ try {
2476
+ await this.db.command({
2477
+ query: `OPTIMIZE TABLE ${tableName} FINAL`
2478
+ });
2479
+ } catch (error$1) {
2480
+ throw new error.MastraError(
2481
+ {
2482
+ id: "CLICKHOUSE_STORAGE_OPTIMIZE_TABLE_FAILED",
2483
+ domain: error.ErrorDomain.STORAGE,
2484
+ category: error.ErrorCategory.THIRD_PARTY,
2485
+ details: { tableName }
2486
+ },
2487
+ error$1
2488
+ );
2489
+ }
2490
+ }
2491
+ async materializeTtl({ tableName }) {
2492
+ try {
2493
+ await this.db.command({
2494
+ query: `ALTER TABLE ${tableName} MATERIALIZE TTL;`
2495
+ });
2496
+ } catch (error$1) {
2497
+ throw new error.MastraError(
2498
+ {
2499
+ id: "CLICKHOUSE_STORAGE_MATERIALIZE_TTL_FAILED",
2500
+ domain: error.ErrorDomain.STORAGE,
2501
+ category: error.ErrorCategory.THIRD_PARTY,
2502
+ details: { tableName }
2503
+ },
2504
+ error$1
2505
+ );
2506
+ }
2507
+ }
2508
+ async createTable({
2509
+ tableName,
2510
+ schema
2511
+ }) {
2512
+ return this.stores.operations.createTable({ tableName, schema });
2513
+ }
2514
+ async dropTable({ tableName }) {
2515
+ return this.stores.operations.dropTable({ tableName });
2516
+ }
2517
+ async alterTable({
2518
+ tableName,
2519
+ schema,
2520
+ ifNotExists
2521
+ }) {
2522
+ return this.stores.operations.alterTable({ tableName, schema, ifNotExists });
2523
+ }
2524
+ async clearTable({ tableName }) {
2525
+ return this.stores.operations.clearTable({ tableName });
2526
+ }
2527
+ async insert({ tableName, record }) {
2528
+ return this.stores.operations.insert({ tableName, record });
2529
+ }
2530
+ async load({ tableName, keys }) {
2531
+ return this.stores.operations.load({ tableName, keys });
2532
+ }
2533
+ async persistWorkflowSnapshot({
2534
+ workflowName,
2535
+ runId,
2536
+ snapshot
2537
+ }) {
2538
+ return this.stores.workflows.persistWorkflowSnapshot({ workflowName, runId, snapshot });
2539
+ }
2540
+ async loadWorkflowSnapshot({
2541
+ workflowName,
2542
+ runId
2543
+ }) {
2544
+ return this.stores.workflows.loadWorkflowSnapshot({ workflowName, runId });
2545
+ }
2546
+ async getWorkflowRuns({
2547
+ workflowName,
2548
+ fromDate,
2549
+ toDate,
2550
+ limit,
2551
+ offset,
2552
+ resourceId
2553
+ } = {}) {
2554
+ return this.stores.workflows.getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, resourceId });
2555
+ }
2556
+ async getWorkflowRunById({
2557
+ runId,
2558
+ workflowName
2559
+ }) {
2560
+ return this.stores.workflows.getWorkflowRunById({ runId, workflowName });
2561
+ }
2562
+ async getTraces(args) {
2563
+ return this.stores.traces.getTraces(args);
2564
+ }
2565
+ async getTracesPaginated(args) {
2566
+ return this.stores.traces.getTracesPaginated(args);
2567
+ }
2568
+ async batchTraceInsert(args) {
2569
+ return this.stores.traces.batchTraceInsert(args);
2570
+ }
2571
+ async getThreadById({ threadId }) {
2572
+ return this.stores.memory.getThreadById({ threadId });
2573
+ }
2574
+ async getThreadsByResourceId({ resourceId }) {
2575
+ return this.stores.memory.getThreadsByResourceId({ resourceId });
2576
+ }
2577
+ async saveThread({ thread }) {
2578
+ return this.stores.memory.saveThread({ thread });
2579
+ }
2580
+ async updateThread({
2581
+ id,
2582
+ title,
2583
+ metadata
2584
+ }) {
2585
+ return this.stores.memory.updateThread({ id, title, metadata });
2586
+ }
2587
+ async deleteThread({ threadId }) {
2588
+ return this.stores.memory.deleteThread({ threadId });
2589
+ }
2590
+ async getThreadsByResourceIdPaginated(args) {
2591
+ return this.stores.memory.getThreadsByResourceIdPaginated(args);
2592
+ }
2593
+ async getMessages({
2594
+ threadId,
2595
+ resourceId,
2596
+ selectBy,
2597
+ format
2598
+ }) {
2599
+ return this.stores.memory.getMessages({ threadId, resourceId, selectBy, format });
2600
+ }
2601
+ async saveMessages(args) {
2602
+ return this.stores.memory.saveMessages(args);
2603
+ }
2604
+ async getMessagesPaginated(args) {
2605
+ return this.stores.memory.getMessagesPaginated(args);
2606
+ }
2607
+ async updateMessages(args) {
2608
+ return this.stores.memory.updateMessages(args);
2609
+ }
2610
+ async getResourceById({ resourceId }) {
2611
+ return this.stores.memory.getResourceById({ resourceId });
2612
+ }
2613
+ async saveResource({ resource }) {
2614
+ return this.stores.memory.saveResource({ resource });
2615
+ }
2616
+ async updateResource({
2617
+ resourceId,
2618
+ workingMemory,
2619
+ metadata
2620
+ }) {
2621
+ return this.stores.memory.updateResource({ resourceId, workingMemory, metadata });
2622
+ }
2623
+ async getScoreById({ id }) {
2624
+ return this.stores.scores.getScoreById({ id });
941
2625
  }
942
- async getTracesPaginated(_args) {
943
- throw new Error("Method not implemented.");
2626
+ async saveScore(_score) {
2627
+ return this.stores.scores.saveScore(_score);
2628
+ }
2629
+ async getScoresByRunId({
2630
+ runId,
2631
+ pagination
2632
+ }) {
2633
+ return this.stores.scores.getScoresByRunId({ runId, pagination });
944
2634
  }
945
- async getThreadsByResourceIdPaginated(_args) {
946
- throw new Error("Method not implemented.");
2635
+ async getScoresByEntityId({
2636
+ entityId,
2637
+ entityType,
2638
+ pagination
2639
+ }) {
2640
+ return this.stores.scores.getScoresByEntityId({ entityId, entityType, pagination });
947
2641
  }
948
- async getMessagesPaginated(_args) {
949
- throw new Error("Method not implemented.");
2642
+ async getScoresByScorerId({
2643
+ scorerId,
2644
+ pagination
2645
+ }) {
2646
+ return this.stores.scores.getScoresByScorerId({ scorerId, pagination });
950
2647
  }
951
2648
  async close() {
952
2649
  await this.db.close();
953
2650
  }
954
- async updateMessages(_args) {
955
- this.logger.error("updateMessages is not yet implemented in ClickhouseStore");
956
- throw new Error("Method not implemented");
957
- }
958
2651
  };
959
2652
 
960
- exports.COLUMN_TYPES = COLUMN_TYPES;
961
2653
  exports.ClickhouseStore = ClickhouseStore;
962
- exports.TABLE_ENGINES = TABLE_ENGINES;
2654
+ //# sourceMappingURL=index.cjs.map
2655
+ //# sourceMappingURL=index.cjs.map