@mastra/clickhouse 0.0.0-toolOptionTypes-20250917085558 → 0.0.0-top-level-fix-20251211111608

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -4,6 +4,7 @@ var client = require('@clickhouse/client');
4
4
  var error = require('@mastra/core/error');
5
5
  var storage = require('@mastra/core/storage');
6
6
  var agent = require('@mastra/core/agent');
7
+ var evals = require('@mastra/core/evals');
7
8
 
8
9
  // src/storage/index.ts
9
10
  var TABLE_ENGINES = {
@@ -11,11 +12,11 @@ var TABLE_ENGINES = {
11
12
  [storage.TABLE_WORKFLOW_SNAPSHOT]: `ReplacingMergeTree()`,
12
13
  [storage.TABLE_TRACES]: `MergeTree()`,
13
14
  [storage.TABLE_THREADS]: `ReplacingMergeTree()`,
14
- [storage.TABLE_EVALS]: `MergeTree()`,
15
15
  [storage.TABLE_SCORERS]: `MergeTree()`,
16
16
  [storage.TABLE_RESOURCES]: `ReplacingMergeTree()`,
17
- // TODO: verify this is the correct engine for ai spans when implementing clickhouse storage
18
- [storage.TABLE_AI_SPANS]: `ReplacingMergeTree()`
17
+ // TODO: verify this is the correct engine for Spans when implementing clickhouse storage
18
+ [storage.TABLE_SPANS]: `ReplacingMergeTree()`,
19
+ mastra_agents: `ReplacingMergeTree()`
19
20
  };
20
21
  var COLUMN_TYPES = {
21
22
  text: "String",
@@ -46,8 +47,26 @@ function transformRows(rows) {
46
47
  return rows.map((row) => transformRow(row));
47
48
  }
48
49
 
49
- // src/storage/domains/legacy-evals/index.ts
50
- var LegacyEvalsStorageClickhouse = class extends storage.LegacyEvalsStorage {
50
+ // src/storage/domains/memory/index.ts
51
+ function serializeMetadata(metadata) {
52
+ if (!metadata || Object.keys(metadata).length === 0) {
53
+ return "{}";
54
+ }
55
+ return JSON.stringify(metadata);
56
+ }
57
+ function parseMetadata(metadata) {
58
+ if (!metadata) return {};
59
+ if (typeof metadata === "object") return metadata;
60
+ if (typeof metadata !== "string") return {};
61
+ const trimmed = metadata.trim();
62
+ if (trimmed === "" || trimmed === "null") return {};
63
+ try {
64
+ return JSON.parse(trimmed);
65
+ } catch {
66
+ return {};
67
+ }
68
+ }
69
+ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
51
70
  client;
52
71
  operations;
53
72
  constructor({ client, operations }) {
@@ -55,127 +74,128 @@ var LegacyEvalsStorageClickhouse = class extends storage.LegacyEvalsStorage {
55
74
  this.client = client;
56
75
  this.operations = operations;
57
76
  }
58
- transformEvalRow(row) {
59
- row = transformRow(row);
60
- let resultValue;
61
- try {
62
- if (row.result && typeof row.result === "string" && row.result.trim() !== "") {
63
- resultValue = JSON.parse(row.result);
64
- } else if (typeof row.result === "object" && row.result !== null) {
65
- resultValue = row.result;
66
- } else if (row.result === null || row.result === void 0 || row.result === "") {
67
- resultValue = { score: 0 };
68
- } else {
69
- throw new Error(`Invalid or empty result field: ${JSON.stringify(row.result)}`);
70
- }
71
- } catch (error$1) {
72
- console.error("Error parsing result field:", row.result, error$1);
73
- throw new error.MastraError({
74
- id: "CLICKHOUSE_STORAGE_INVALID_RESULT_FORMAT",
75
- text: `Invalid result format: ${JSON.stringify(row.result)}`,
76
- domain: error.ErrorDomain.STORAGE,
77
- category: error.ErrorCategory.USER
78
- });
79
- }
80
- let testInfoValue;
81
- try {
82
- if (row.test_info && typeof row.test_info === "string" && row.test_info.trim() !== "" && row.test_info !== "null") {
83
- testInfoValue = JSON.parse(row.test_info);
84
- } else if (typeof row.test_info === "object" && row.test_info !== null) {
85
- testInfoValue = row.test_info;
86
- }
87
- } catch {
88
- testInfoValue = void 0;
89
- }
90
- if (!resultValue || typeof resultValue !== "object" || !("score" in resultValue)) {
91
- throw new error.MastraError({
92
- id: "CLICKHOUSE_STORAGE_INVALID_METRIC_FORMAT",
93
- text: `Invalid MetricResult format: ${JSON.stringify(resultValue)}`,
94
- domain: error.ErrorDomain.STORAGE,
95
- category: error.ErrorCategory.USER
96
- });
97
- }
98
- return {
99
- input: row.input,
100
- output: row.output,
101
- result: resultValue,
102
- agentName: row.agent_name,
103
- metricName: row.metric_name,
104
- instructions: row.instructions,
105
- testInfo: testInfoValue,
106
- globalRunId: row.global_run_id,
107
- runId: row.run_id,
108
- createdAt: row.created_at
109
- };
110
- }
111
- async getEvalsByAgentName(agentName, type) {
77
+ async listMessagesById({ messageIds }) {
78
+ if (messageIds.length === 0) return { messages: [] };
112
79
  try {
113
- const baseQuery = `SELECT *, toDateTime64(created_at, 3) as createdAt FROM ${storage.TABLE_EVALS} WHERE agent_name = {var_agent_name:String}`;
114
- const typeCondition = type === "test" ? " AND test_info IS NOT NULL AND test_info != 'null' AND JSONExtractString(test_info, 'testPath') IS NOT NULL AND JSONExtractString(test_info, 'testPath') != ''" : type === "live" ? " AND (test_info IS NULL OR test_info = 'null' OR JSONExtractString(test_info, 'testPath') IS NULL OR JSONExtractString(test_info, 'testPath') = '')" : "";
115
80
  const result = await this.client.query({
116
- query: `${baseQuery}${typeCondition} ORDER BY createdAt DESC`,
117
- query_params: { var_agent_name: agentName },
81
+ query: `
82
+ SELECT
83
+ id,
84
+ content,
85
+ role,
86
+ type,
87
+ toDateTime64(createdAt, 3) as createdAt,
88
+ thread_id AS "threadId",
89
+ "resourceId"
90
+ FROM "${storage.TABLE_MESSAGES}"
91
+ WHERE id IN {messageIds:Array(String)}
92
+ ORDER BY "createdAt" DESC
93
+ `,
94
+ query_params: {
95
+ messageIds
96
+ },
118
97
  clickhouse_settings: {
98
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
119
99
  date_time_input_format: "best_effort",
120
100
  date_time_output_format: "iso",
121
101
  use_client_time_zone: 1,
122
102
  output_format_json_quote_64bit_integers: 0
123
103
  }
124
104
  });
125
- if (!result) {
126
- return [];
127
- }
128
105
  const rows = await result.json();
129
- return rows.data.map((row) => this.transformEvalRow(row));
106
+ const messages = transformRows(rows.data);
107
+ messages.forEach((message) => {
108
+ if (typeof message.content === "string") {
109
+ try {
110
+ message.content = JSON.parse(message.content);
111
+ } catch {
112
+ }
113
+ }
114
+ });
115
+ const list = new agent.MessageList().add(messages, "memory");
116
+ return { messages: list.get.all.db() };
130
117
  } catch (error$1) {
131
- if (error$1?.message?.includes("no such table") || error$1?.message?.includes("does not exist")) {
132
- return [];
133
- }
134
118
  throw new error.MastraError(
135
119
  {
136
- id: "CLICKHOUSE_STORAGE_GET_EVALS_BY_AGENT_FAILED",
120
+ id: storage.createStorageErrorId("CLICKHOUSE", "LIST_MESSAGES_BY_ID", "FAILED"),
137
121
  domain: error.ErrorDomain.STORAGE,
138
122
  category: error.ErrorCategory.THIRD_PARTY,
139
- details: { agentName, type: type ?? null }
123
+ details: { messageIds: JSON.stringify(messageIds) }
140
124
  },
141
125
  error$1
142
126
  );
143
127
  }
144
128
  }
145
- async getEvals(options = {}) {
146
- const { agentName, type, page = 0, perPage = 100, dateRange } = options;
147
- const fromDate = dateRange?.start;
148
- const toDate = dateRange?.end;
149
- const conditions = [];
150
- if (agentName) {
151
- conditions.push(`agent_name = {var_agent_name:String}`);
152
- }
153
- if (type === "test") {
154
- conditions.push(
155
- `(test_info IS NOT NULL AND test_info != 'null' AND JSONExtractString(test_info, 'testPath') IS NOT NULL AND JSONExtractString(test_info, 'testPath') != '')`
156
- );
157
- } else if (type === "live") {
158
- conditions.push(
159
- `(test_info IS NULL OR test_info = 'null' OR JSONExtractString(test_info, 'testPath') IS NULL OR JSONExtractString(test_info, 'testPath') = '')`
129
+ async listMessages(args) {
130
+ const { threadId, resourceId, include, filter, perPage: perPageInput, page = 0, orderBy } = args;
131
+ const rawThreadIds = Array.isArray(threadId) ? threadId : [threadId];
132
+ const threadIds = rawThreadIds.filter((id) => id !== void 0 && id !== null).map((id) => (typeof id === "string" ? id : String(id)).trim()).filter((id) => id.length > 0);
133
+ if (page < 0) {
134
+ throw new error.MastraError(
135
+ {
136
+ id: storage.createStorageErrorId("CLICKHOUSE", "LIST_MESSAGES", "INVALID_PAGE"),
137
+ domain: error.ErrorDomain.STORAGE,
138
+ category: error.ErrorCategory.USER,
139
+ details: { page }
140
+ },
141
+ new Error("page must be >= 0")
160
142
  );
161
143
  }
162
- if (fromDate) {
163
- conditions.push(`created_at >= parseDateTime64BestEffort({var_from_date:String})`);
164
- fromDate.toISOString();
165
- }
166
- if (toDate) {
167
- conditions.push(`created_at <= parseDateTime64BestEffort({var_to_date:String})`);
168
- toDate.toISOString();
144
+ if (threadIds.length === 0) {
145
+ throw new error.MastraError(
146
+ {
147
+ id: storage.createStorageErrorId("CLICKHOUSE", "LIST_MESSAGES", "INVALID_THREAD_ID"),
148
+ domain: error.ErrorDomain.STORAGE,
149
+ category: error.ErrorCategory.THIRD_PARTY,
150
+ details: { threadId: Array.isArray(threadId) ? JSON.stringify(threadId) : String(threadId) }
151
+ },
152
+ new Error("threadId must be a non-empty string or array of non-empty strings")
153
+ );
169
154
  }
170
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
155
+ const perPageForQuery = storage.normalizePerPage(perPageInput, 40);
156
+ const { offset, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPageForQuery);
171
157
  try {
172
- const countResult = await this.client.query({
173
- query: `SELECT COUNT(*) as count FROM ${storage.TABLE_EVALS} ${whereClause}`,
174
- query_params: {
175
- ...agentName ? { var_agent_name: agentName } : {},
176
- ...fromDate ? { var_from_date: fromDate.toISOString() } : {},
177
- ...toDate ? { var_to_date: toDate.toISOString() } : {}
178
- },
158
+ const threadCondition = threadIds.length === 1 ? `thread_id = {threadId0:String}` : `thread_id IN (${threadIds.map((_, i) => `{threadId${i}:String}`).join(", ")})`;
159
+ let dataQuery = `
160
+ SELECT
161
+ id,
162
+ content,
163
+ role,
164
+ type,
165
+ toDateTime64(createdAt, 3) as createdAt,
166
+ thread_id AS "threadId",
167
+ resourceId
168
+ FROM ${storage.TABLE_MESSAGES}
169
+ WHERE ${threadCondition}
170
+ `;
171
+ const dataParams = {};
172
+ threadIds.forEach((tid, i) => {
173
+ dataParams[`threadId${i}`] = tid;
174
+ });
175
+ if (resourceId) {
176
+ dataQuery += ` AND resourceId = {resourceId:String}`;
177
+ dataParams.resourceId = resourceId;
178
+ }
179
+ if (filter?.dateRange?.start) {
180
+ const startDate = filter.dateRange.start instanceof Date ? filter.dateRange.start.toISOString() : new Date(filter.dateRange.start).toISOString();
181
+ dataQuery += ` AND createdAt >= parseDateTime64BestEffort({fromDate:String}, 3)`;
182
+ dataParams.fromDate = startDate;
183
+ }
184
+ if (filter?.dateRange?.end) {
185
+ const endDate = filter.dateRange.end instanceof Date ? filter.dateRange.end.toISOString() : new Date(filter.dateRange.end).toISOString();
186
+ dataQuery += ` AND createdAt <= parseDateTime64BestEffort({toDate:String}, 3)`;
187
+ dataParams.toDate = endDate;
188
+ }
189
+ const { field, direction } = this.parseOrderBy(orderBy, "ASC");
190
+ dataQuery += ` ORDER BY "${field}" ${direction}`;
191
+ if (perPageForResponse === false) ; else {
192
+ dataQuery += ` LIMIT {limit:Int64} OFFSET {offset:Int64}`;
193
+ dataParams.limit = perPageForQuery;
194
+ dataParams.offset = offset;
195
+ }
196
+ const result = await this.client.query({
197
+ query: dataQuery,
198
+ query_params: dataParams,
179
199
  clickhouse_settings: {
180
200
  date_time_input_format: "best_effort",
181
201
  date_time_output_format: "iso",
@@ -183,28 +203,31 @@ var LegacyEvalsStorageClickhouse = class extends storage.LegacyEvalsStorage {
183
203
  output_format_json_quote_64bit_integers: 0
184
204
  }
185
205
  });
186
- const countData = await countResult.json();
187
- const total = Number(countData.data?.[0]?.count ?? 0);
188
- const currentOffset = page * perPage;
189
- const hasMore = currentOffset + perPage < total;
190
- if (total === 0) {
191
- return {
192
- evals: [],
193
- total: 0,
194
- page,
195
- perPage,
196
- hasMore: false
197
- };
206
+ const rows = await result.json();
207
+ const paginatedMessages = transformRows(rows.data);
208
+ const paginatedCount = paginatedMessages.length;
209
+ let countQuery = `SELECT count() as total FROM ${storage.TABLE_MESSAGES} WHERE ${threadCondition}`;
210
+ const countParams = {};
211
+ threadIds.forEach((tid, i) => {
212
+ countParams[`threadId${i}`] = tid;
213
+ });
214
+ if (resourceId) {
215
+ countQuery += ` AND resourceId = {resourceId:String}`;
216
+ countParams.resourceId = resourceId;
198
217
  }
199
- const dataResult = await this.client.query({
200
- query: `SELECT *, toDateTime64(createdAt, 3) as createdAt FROM ${storage.TABLE_EVALS} ${whereClause} ORDER BY created_at DESC LIMIT {var_limit:UInt32} OFFSET {var_offset:UInt32}`,
201
- query_params: {
202
- ...agentName ? { var_agent_name: agentName } : {},
203
- ...fromDate ? { var_from_date: fromDate.toISOString() } : {},
204
- ...toDate ? { var_to_date: toDate.toISOString() } : {},
205
- var_limit: perPage || 100,
206
- var_offset: currentOffset || 0
207
- },
218
+ if (filter?.dateRange?.start) {
219
+ const startDate = filter.dateRange.start instanceof Date ? filter.dateRange.start.toISOString() : new Date(filter.dateRange.start).toISOString();
220
+ countQuery += ` AND createdAt >= parseDateTime64BestEffort({fromDate:String}, 3)`;
221
+ countParams.fromDate = startDate;
222
+ }
223
+ if (filter?.dateRange?.end) {
224
+ const endDate = filter.dateRange.end instanceof Date ? filter.dateRange.end.toISOString() : new Date(filter.dateRange.end).toISOString();
225
+ countQuery += ` AND createdAt <= parseDateTime64BestEffort({toDate:String}, 3)`;
226
+ countParams.toDate = endDate;
227
+ }
228
+ const countResult = await this.client.query({
229
+ query: countQuery,
230
+ query_params: countParams,
208
231
  clickhouse_settings: {
209
232
  date_time_input_format: "best_effort",
210
233
  date_time_output_format: "iso",
@@ -212,62 +235,39 @@ var LegacyEvalsStorageClickhouse = class extends storage.LegacyEvalsStorage {
212
235
  output_format_json_quote_64bit_integers: 0
213
236
  }
214
237
  });
215
- const rows = await dataResult.json();
216
- return {
217
- evals: rows.data.map((row) => this.transformEvalRow(row)),
218
- total,
219
- page,
220
- perPage,
221
- hasMore
222
- };
223
- } catch (error$1) {
224
- if (error$1?.message?.includes("no such table") || error$1?.message?.includes("does not exist")) {
238
+ const countData = await countResult.json();
239
+ const total = countData.data[0].total;
240
+ if (total === 0 && paginatedCount === 0 && (!include || include.length === 0)) {
225
241
  return {
226
- evals: [],
242
+ messages: [],
227
243
  total: 0,
228
244
  page,
229
- perPage,
245
+ perPage: perPageForResponse,
230
246
  hasMore: false
231
247
  };
232
248
  }
233
- throw new error.MastraError(
234
- {
235
- id: "CLICKHOUSE_STORAGE_GET_EVALS_FAILED",
236
- domain: error.ErrorDomain.STORAGE,
237
- category: error.ErrorCategory.THIRD_PARTY,
238
- details: { agentName: agentName ?? "all", type: type ?? "all" }
239
- },
240
- error$1
241
- );
242
- }
243
- }
244
- };
245
- var MemoryStorageClickhouse = class extends storage.MemoryStorage {
246
- client;
247
- operations;
248
- constructor({ client, operations }) {
249
- super();
250
- this.client = client;
251
- this.operations = operations;
252
- }
253
- async getMessages({
254
- threadId,
255
- resourceId,
256
- selectBy,
257
- format
258
- }) {
259
- try {
260
- if (!threadId.trim()) throw new Error("threadId must be a non-empty string");
261
- const messages = [];
262
- const limit = storage.resolveMessageLimit({ last: selectBy?.last, defaultLimit: 40 });
263
- const include = selectBy?.include || [];
264
- if (include.length) {
249
+ const messageIds = new Set(paginatedMessages.map((m) => m.id));
250
+ let includeMessages = [];
251
+ if (include && include.length > 0) {
252
+ const includesNeedingThread = include.filter((inc) => !inc.threadId);
253
+ const threadByMessageId = /* @__PURE__ */ new Map();
254
+ if (includesNeedingThread.length > 0) {
255
+ const { messages: includeLookup } = await this.listMessagesById({
256
+ messageIds: includesNeedingThread.map((inc) => inc.id)
257
+ });
258
+ for (const msg of includeLookup) {
259
+ if (msg.threadId) {
260
+ threadByMessageId.set(msg.id, msg.threadId);
261
+ }
262
+ }
263
+ }
265
264
  const unionQueries = [];
266
265
  const params = [];
267
266
  let paramIdx = 1;
268
267
  for (const inc of include) {
269
268
  const { id, withPreviousMessages = 0, withNextMessages = 0 } = inc;
270
- const searchId = inc.threadId || threadId;
269
+ const searchThreadId = inc.threadId ?? threadByMessageId.get(id);
270
+ if (!searchThreadId) continue;
271
271
  unionQueries.push(`
272
272
  SELECT * FROM (
273
273
  WITH numbered_messages AS (
@@ -282,155 +282,97 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
282
282
  FROM numbered_messages
283
283
  WHERE id = {var_include_id_${paramIdx}:String}
284
284
  )
285
- SELECT DISTINCT m.id, m.content, m.role, m.type, m."createdAt", m.thread_id AS "threadId"
285
+ SELECT DISTINCT m.id, m.content, m.role, m.type, m."createdAt", m.thread_id AS "threadId", m."resourceId"
286
286
  FROM numbered_messages m
287
287
  CROSS JOIN target_positions t
288
288
  WHERE m.row_num BETWEEN (t.target_pos - {var_withPreviousMessages_${paramIdx}:Int64}) AND (t.target_pos + {var_withNextMessages_${paramIdx}:Int64})
289
289
  ) AS query_${paramIdx}
290
290
  `);
291
291
  params.push(
292
- { [`var_thread_id_${paramIdx}`]: searchId },
292
+ { [`var_thread_id_${paramIdx}`]: searchThreadId },
293
293
  { [`var_include_id_${paramIdx}`]: id },
294
294
  { [`var_withPreviousMessages_${paramIdx}`]: withPreviousMessages },
295
295
  { [`var_withNextMessages_${paramIdx}`]: withNextMessages }
296
296
  );
297
297
  paramIdx++;
298
298
  }
299
- const finalQuery = unionQueries.join(" UNION ALL ") + ' ORDER BY "createdAt" DESC';
300
- const mergedParams = params.reduce((acc, paramObj) => ({ ...acc, ...paramObj }), {});
301
- const includeResult = await this.client.query({
302
- query: finalQuery,
303
- query_params: mergedParams,
304
- clickhouse_settings: {
305
- date_time_input_format: "best_effort",
306
- date_time_output_format: "iso",
307
- use_client_time_zone: 1,
308
- output_format_json_quote_64bit_integers: 0
299
+ if (unionQueries.length > 0) {
300
+ const finalQuery = unionQueries.join(" UNION ALL ") + ' ORDER BY "createdAt" ASC';
301
+ const mergedParams = params.reduce((acc, paramObj) => ({ ...acc, ...paramObj }), {});
302
+ const includeResult = await this.client.query({
303
+ query: finalQuery,
304
+ query_params: mergedParams,
305
+ clickhouse_settings: {
306
+ date_time_input_format: "best_effort",
307
+ date_time_output_format: "iso",
308
+ use_client_time_zone: 1,
309
+ output_format_json_quote_64bit_integers: 0
310
+ }
311
+ });
312
+ const includeRows = await includeResult.json();
313
+ includeMessages = transformRows(includeRows.data);
314
+ for (const includeMsg of includeMessages) {
315
+ if (!messageIds.has(includeMsg.id)) {
316
+ paginatedMessages.push(includeMsg);
317
+ messageIds.add(includeMsg.id);
318
+ }
309
319
  }
310
- });
311
- const rows2 = await includeResult.json();
312
- const includedMessages = transformRows(rows2.data);
313
- const seen = /* @__PURE__ */ new Set();
314
- const dedupedMessages = includedMessages.filter((message) => {
315
- if (seen.has(message.id)) return false;
316
- seen.add(message.id);
317
- return true;
318
- });
319
- messages.push(...dedupedMessages);
320
+ }
320
321
  }
321
- const result = await this.client.query({
322
- query: `
323
- SELECT
324
- id,
325
- content,
326
- role,
327
- type,
328
- toDateTime64(createdAt, 3) as createdAt,
329
- thread_id AS "threadId"
330
- FROM "${storage.TABLE_MESSAGES}"
331
- WHERE thread_id = {threadId:String}
332
- AND id NOT IN ({exclude:Array(String)})
333
- ORDER BY "createdAt" DESC
334
- LIMIT {limit:Int64}
335
- `,
336
- query_params: {
337
- threadId,
338
- exclude: messages.map((m) => m.id),
339
- limit
340
- },
341
- clickhouse_settings: {
342
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
343
- date_time_input_format: "best_effort",
344
- date_time_output_format: "iso",
345
- use_client_time_zone: 1,
346
- output_format_json_quote_64bit_integers: 0
322
+ const list = new agent.MessageList().add(paginatedMessages, "memory");
323
+ let finalMessages = list.get.all.db();
324
+ finalMessages = finalMessages.sort((a, b) => {
325
+ const isDateField = field === "createdAt" || field === "updatedAt";
326
+ const aValue = isDateField ? new Date(a[field]).getTime() : a[field];
327
+ const bValue = isDateField ? new Date(b[field]).getTime() : b[field];
328
+ if (aValue === bValue) {
329
+ return a.id.localeCompare(b.id);
347
330
  }
348
- });
349
- const rows = await result.json();
350
- messages.push(...transformRows(rows.data));
351
- messages.sort((a, b) => new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime());
352
- messages.forEach((message) => {
353
- if (typeof message.content === "string") {
354
- try {
355
- message.content = JSON.parse(message.content);
356
- } catch {
357
- }
331
+ if (typeof aValue === "number" && typeof bValue === "number") {
332
+ return direction === "ASC" ? aValue - bValue : bValue - aValue;
358
333
  }
334
+ return direction === "ASC" ? String(aValue).localeCompare(String(bValue)) : String(bValue).localeCompare(String(aValue));
359
335
  });
360
- const list = new agent.MessageList({ threadId, resourceId }).add(messages, "memory");
361
- if (format === `v2`) return list.get.all.v2();
362
- return list.get.all.v1();
363
- } catch (error$1) {
364
- throw new error.MastraError(
365
- {
366
- id: "CLICKHOUSE_STORAGE_GET_MESSAGES_FAILED",
367
- domain: error.ErrorDomain.STORAGE,
368
- category: error.ErrorCategory.THIRD_PARTY,
369
- details: { threadId, resourceId: resourceId ?? "" }
370
- },
371
- error$1
336
+ const threadIdSet = new Set(threadIds);
337
+ const returnedThreadMessageIds = new Set(
338
+ finalMessages.filter((m) => m.threadId && threadIdSet.has(m.threadId)).map((m) => m.id)
372
339
  );
373
- }
374
- }
375
- async getMessagesById({
376
- messageIds,
377
- format
378
- }) {
379
- if (messageIds.length === 0) return [];
380
- try {
381
- const result = await this.client.query({
382
- query: `
383
- SELECT
384
- id,
385
- content,
386
- role,
387
- type,
388
- toDateTime64(createdAt, 3) as createdAt,
389
- thread_id AS "threadId",
390
- "resourceId"
391
- FROM "${storage.TABLE_MESSAGES}"
392
- WHERE id IN {messageIds:Array(String)}
393
- ORDER BY "createdAt" DESC
394
- `,
395
- query_params: {
396
- messageIds
397
- },
398
- clickhouse_settings: {
399
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
400
- date_time_input_format: "best_effort",
401
- date_time_output_format: "iso",
402
- use_client_time_zone: 1,
403
- output_format_json_quote_64bit_integers: 0
404
- }
405
- });
406
- const rows = await result.json();
407
- const messages = transformRows(rows.data);
408
- messages.forEach((message) => {
409
- if (typeof message.content === "string") {
410
- try {
411
- message.content = JSON.parse(message.content);
412
- } catch {
413
- }
414
- }
415
- });
416
- const list = new agent.MessageList().add(messages, "memory");
417
- if (format === `v1`) return list.get.all.v1();
418
- return list.get.all.v2();
340
+ const allThreadMessagesReturned = returnedThreadMessageIds.size >= total;
341
+ const hasMore = perPageForResponse === false ? false : allThreadMessagesReturned ? false : offset + paginatedCount < total;
342
+ return {
343
+ messages: finalMessages,
344
+ total,
345
+ page,
346
+ perPage: perPageForResponse,
347
+ hasMore
348
+ };
419
349
  } catch (error$1) {
420
- throw new error.MastraError(
350
+ const mastraError = new error.MastraError(
421
351
  {
422
- id: "CLICKHOUSE_STORAGE_GET_MESSAGES_BY_ID_FAILED",
352
+ id: storage.createStorageErrorId("CLICKHOUSE", "LIST_MESSAGES", "FAILED"),
423
353
  domain: error.ErrorDomain.STORAGE,
424
354
  category: error.ErrorCategory.THIRD_PARTY,
425
- details: { messageIds: JSON.stringify(messageIds) }
355
+ details: {
356
+ threadId: Array.isArray(threadId) ? threadId.join(",") : threadId,
357
+ resourceId: resourceId ?? ""
358
+ }
426
359
  },
427
360
  error$1
428
361
  );
362
+ this.logger?.error?.(mastraError.toString());
363
+ this.logger?.trackException?.(mastraError);
364
+ return {
365
+ messages: [],
366
+ total: 0,
367
+ page,
368
+ perPage: perPageForResponse,
369
+ hasMore: false
370
+ };
429
371
  }
430
372
  }
431
373
  async saveMessages(args) {
432
- const { messages, format = "v1" } = args;
433
- if (messages.length === 0) return messages;
374
+ const { messages } = args;
375
+ if (messages.length === 0) return { messages };
434
376
  for (const message of messages) {
435
377
  const resourceId = message.resourceId;
436
378
  if (!resourceId) {
@@ -554,7 +496,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
554
496
  id: thread.id,
555
497
  resourceId: thread.resourceId,
556
498
  title: thread.title,
557
- metadata: thread.metadata,
499
+ metadata: serializeMetadata(thread.metadata),
558
500
  createdAt: thread.createdAt,
559
501
  updatedAt: (/* @__PURE__ */ new Date()).toISOString()
560
502
  })),
@@ -566,12 +508,11 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
566
508
  })
567
509
  ]);
568
510
  const list = new agent.MessageList().add(messages, "memory");
569
- if (format === `v2`) return list.get.all.v2();
570
- return list.get.all.v1();
511
+ return { messages: list.get.all.db() };
571
512
  } catch (error$1) {
572
513
  throw new error.MastraError(
573
514
  {
574
- id: "CLICKHOUSE_STORAGE_SAVE_MESSAGES_FAILED",
515
+ id: storage.createStorageErrorId("CLICKHOUSE", "SAVE_MESSAGES", "FAILED"),
575
516
  domain: error.ErrorDomain.STORAGE,
576
517
  category: error.ErrorCategory.THIRD_PARTY
577
518
  },
@@ -590,8 +531,9 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
590
531
  toDateTime64(createdAt, 3) as createdAt,
591
532
  toDateTime64(updatedAt, 3) as updatedAt
592
533
  FROM "${storage.TABLE_THREADS}"
593
- FINAL
594
- WHERE id = {var_id:String}`,
534
+ WHERE id = {var_id:String}
535
+ ORDER BY updatedAt DESC
536
+ LIMIT 1`,
595
537
  query_params: { var_id: threadId },
596
538
  clickhouse_settings: {
597
539
  // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
@@ -608,14 +550,14 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
608
550
  }
609
551
  return {
610
552
  ...thread,
611
- metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata,
553
+ metadata: parseMetadata(thread.metadata),
612
554
  createdAt: thread.createdAt,
613
555
  updatedAt: thread.updatedAt
614
556
  };
615
557
  } catch (error$1) {
616
558
  throw new error.MastraError(
617
559
  {
618
- id: "CLICKHOUSE_STORAGE_GET_THREAD_BY_ID_FAILED",
560
+ id: storage.createStorageErrorId("CLICKHOUSE", "GET_THREAD_BY_ID", "FAILED"),
619
561
  domain: error.ErrorDomain.STORAGE,
620
562
  category: error.ErrorCategory.THIRD_PARTY,
621
563
  details: { threadId }
@@ -624,47 +566,6 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
624
566
  );
625
567
  }
626
568
  }
627
- async getThreadsByResourceId({ resourceId }) {
628
- try {
629
- const result = await this.client.query({
630
- query: `SELECT
631
- id,
632
- "resourceId",
633
- title,
634
- metadata,
635
- toDateTime64(createdAt, 3) as createdAt,
636
- toDateTime64(updatedAt, 3) as updatedAt
637
- FROM "${storage.TABLE_THREADS}"
638
- WHERE "resourceId" = {var_resourceId:String}`,
639
- query_params: { var_resourceId: resourceId },
640
- clickhouse_settings: {
641
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
642
- date_time_input_format: "best_effort",
643
- date_time_output_format: "iso",
644
- use_client_time_zone: 1,
645
- output_format_json_quote_64bit_integers: 0
646
- }
647
- });
648
- const rows = await result.json();
649
- const threads = transformRows(rows.data);
650
- return threads.map((thread) => ({
651
- ...thread,
652
- metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata,
653
- createdAt: thread.createdAt,
654
- updatedAt: thread.updatedAt
655
- }));
656
- } catch (error$1) {
657
- throw new error.MastraError(
658
- {
659
- id: "CLICKHOUSE_STORAGE_GET_THREADS_BY_RESOURCE_ID_FAILED",
660
- domain: error.ErrorDomain.STORAGE,
661
- category: error.ErrorCategory.THIRD_PARTY,
662
- details: { resourceId }
663
- },
664
- error$1
665
- );
666
- }
667
- }
668
569
  async saveThread({ thread }) {
669
570
  try {
670
571
  await this.client.insert({
@@ -672,6 +573,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
672
573
  values: [
673
574
  {
674
575
  ...thread,
576
+ metadata: serializeMetadata(thread.metadata),
675
577
  createdAt: thread.createdAt.toISOString(),
676
578
  updatedAt: thread.updatedAt.toISOString()
677
579
  }
@@ -688,7 +590,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
688
590
  } catch (error$1) {
689
591
  throw new error.MastraError(
690
592
  {
691
- id: "CLICKHOUSE_STORAGE_SAVE_THREAD_FAILED",
593
+ id: storage.createStorageErrorId("CLICKHOUSE", "SAVE_THREAD", "FAILED"),
692
594
  domain: error.ErrorDomain.STORAGE,
693
595
  category: error.ErrorCategory.THIRD_PARTY,
694
596
  details: { threadId: thread.id }
@@ -725,7 +627,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
725
627
  id: updatedThread.id,
726
628
  resourceId: updatedThread.resourceId,
727
629
  title: updatedThread.title,
728
- metadata: updatedThread.metadata,
630
+ metadata: serializeMetadata(updatedThread.metadata),
729
631
  createdAt: updatedThread.createdAt,
730
632
  updatedAt: updatedThread.updatedAt.toISOString()
731
633
  }
@@ -740,7 +642,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
740
642
  } catch (error$1) {
741
643
  throw new error.MastraError(
742
644
  {
743
- id: "CLICKHOUSE_STORAGE_UPDATE_THREAD_FAILED",
645
+ id: storage.createStorageErrorId("CLICKHOUSE", "UPDATE_THREAD", "FAILED"),
744
646
  domain: error.ErrorDomain.STORAGE,
745
647
  category: error.ErrorCategory.THIRD_PARTY,
746
648
  details: { threadId: id, title }
@@ -761,177 +663,42 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
761
663
  await this.client.command({
762
664
  query: `DELETE FROM "${storage.TABLE_THREADS}" WHERE id = {var_id:String};`,
763
665
  query_params: { var_id: threadId },
764
- clickhouse_settings: {
765
- output_format_json_quote_64bit_integers: 0
766
- }
767
- });
768
- } catch (error$1) {
769
- throw new error.MastraError(
770
- {
771
- id: "CLICKHOUSE_STORAGE_DELETE_THREAD_FAILED",
772
- domain: error.ErrorDomain.STORAGE,
773
- category: error.ErrorCategory.THIRD_PARTY,
774
- details: { threadId }
775
- },
776
- error$1
777
- );
778
- }
779
- }
780
- async getThreadsByResourceIdPaginated(args) {
781
- const { resourceId, page = 0, perPage = 100 } = args;
782
- try {
783
- const currentOffset = page * perPage;
784
- const countResult = await this.client.query({
785
- query: `SELECT count() as total FROM ${storage.TABLE_THREADS} WHERE resourceId = {resourceId:String}`,
786
- query_params: { resourceId },
787
- clickhouse_settings: {
788
- date_time_input_format: "best_effort",
789
- date_time_output_format: "iso",
790
- use_client_time_zone: 1,
791
- output_format_json_quote_64bit_integers: 0
792
- }
793
- });
794
- const countData = await countResult.json();
795
- const total = countData.data[0].total;
796
- if (total === 0) {
797
- return {
798
- threads: [],
799
- total: 0,
800
- page,
801
- perPage,
802
- hasMore: false
803
- };
804
- }
805
- const dataResult = await this.client.query({
806
- query: `
807
- SELECT
808
- id,
809
- resourceId,
810
- title,
811
- metadata,
812
- toDateTime64(createdAt, 3) as createdAt,
813
- toDateTime64(updatedAt, 3) as updatedAt
814
- FROM ${storage.TABLE_THREADS}
815
- WHERE resourceId = {resourceId:String}
816
- ORDER BY createdAt DESC
817
- LIMIT {limit:Int64} OFFSET {offset:Int64}
818
- `,
819
- query_params: {
820
- resourceId,
821
- limit: perPage,
822
- offset: currentOffset
823
- },
824
- clickhouse_settings: {
825
- date_time_input_format: "best_effort",
826
- date_time_output_format: "iso",
827
- use_client_time_zone: 1,
828
- output_format_json_quote_64bit_integers: 0
829
- }
830
- });
831
- const rows = await dataResult.json();
832
- const threads = transformRows(rows.data);
833
- return {
834
- threads,
835
- total,
836
- page,
837
- perPage,
838
- hasMore: currentOffset + threads.length < total
839
- };
840
- } catch (error$1) {
841
- throw new error.MastraError(
842
- {
843
- id: "CLICKHOUSE_STORAGE_GET_THREADS_BY_RESOURCE_ID_PAGINATED_FAILED",
844
- domain: error.ErrorDomain.STORAGE,
845
- category: error.ErrorCategory.THIRD_PARTY,
846
- details: { resourceId, page }
847
- },
848
- error$1
849
- );
850
- }
851
- }
852
- async getMessagesPaginated(args) {
853
- const { threadId, resourceId, selectBy, format = "v1" } = args;
854
- const page = selectBy?.pagination?.page || 0;
855
- const perPageInput = selectBy?.pagination?.perPage;
856
- const perPage = perPageInput !== void 0 ? perPageInput : storage.resolveMessageLimit({ last: selectBy?.last, defaultLimit: 20 });
857
- try {
858
- if (!threadId.trim()) throw new Error("threadId must be a non-empty string");
859
- const offset = page * perPage;
860
- const dateRange = selectBy?.pagination?.dateRange;
861
- const fromDate = dateRange?.start;
862
- const toDate = dateRange?.end;
863
- const messages = [];
864
- if (selectBy?.include?.length) {
865
- const include = selectBy.include;
866
- const unionQueries = [];
867
- const params = [];
868
- let paramIdx = 1;
869
- for (const inc of include) {
870
- const { id, withPreviousMessages = 0, withNextMessages = 0 } = inc;
871
- const searchId = inc.threadId || threadId;
872
- unionQueries.push(`
873
- SELECT * FROM (
874
- WITH numbered_messages AS (
875
- SELECT
876
- id, content, role, type, "createdAt", thread_id, "resourceId",
877
- ROW_NUMBER() OVER (ORDER BY "createdAt" ASC) as row_num
878
- FROM "${storage.TABLE_MESSAGES}"
879
- WHERE thread_id = {var_thread_id_${paramIdx}:String}
880
- ),
881
- target_positions AS (
882
- SELECT row_num as target_pos
883
- FROM numbered_messages
884
- WHERE id = {var_include_id_${paramIdx}:String}
885
- )
886
- SELECT DISTINCT m.id, m.content, m.role, m.type, m."createdAt", m.thread_id AS "threadId"
887
- FROM numbered_messages m
888
- CROSS JOIN target_positions t
889
- WHERE m.row_num BETWEEN (t.target_pos - {var_withPreviousMessages_${paramIdx}:Int64}) AND (t.target_pos + {var_withNextMessages_${paramIdx}:Int64})
890
- ) AS query_${paramIdx}
891
- `);
892
- params.push(
893
- { [`var_thread_id_${paramIdx}`]: searchId },
894
- { [`var_include_id_${paramIdx}`]: id },
895
- { [`var_withPreviousMessages_${paramIdx}`]: withPreviousMessages },
896
- { [`var_withNextMessages_${paramIdx}`]: withNextMessages }
897
- );
898
- paramIdx++;
899
- }
900
- const finalQuery = unionQueries.join(" UNION ALL ") + ' ORDER BY "createdAt" DESC';
901
- const mergedParams = params.reduce((acc, paramObj) => ({ ...acc, ...paramObj }), {});
902
- const includeResult = await this.client.query({
903
- query: finalQuery,
904
- query_params: mergedParams,
905
- clickhouse_settings: {
906
- date_time_input_format: "best_effort",
907
- date_time_output_format: "iso",
908
- use_client_time_zone: 1,
909
- output_format_json_quote_64bit_integers: 0
910
- }
911
- });
912
- const rows2 = await includeResult.json();
913
- const includedMessages = transformRows(rows2.data);
914
- const seen = /* @__PURE__ */ new Set();
915
- const dedupedMessages = includedMessages.filter((message) => {
916
- if (seen.has(message.id)) return false;
917
- seen.add(message.id);
918
- return true;
919
- });
920
- messages.push(...dedupedMessages);
921
- }
922
- let countQuery = `SELECT count() as total FROM ${storage.TABLE_MESSAGES} WHERE thread_id = {threadId:String}`;
923
- const countParams = { threadId };
924
- if (fromDate) {
925
- countQuery += ` AND createdAt >= parseDateTime64BestEffort({fromDate:String}, 3)`;
926
- countParams.fromDate = fromDate.toISOString();
927
- }
928
- if (toDate) {
929
- countQuery += ` AND createdAt <= parseDateTime64BestEffort({toDate:String}, 3)`;
930
- countParams.toDate = toDate.toISOString();
931
- }
666
+ clickhouse_settings: {
667
+ output_format_json_quote_64bit_integers: 0
668
+ }
669
+ });
670
+ } catch (error$1) {
671
+ throw new error.MastraError(
672
+ {
673
+ id: storage.createStorageErrorId("CLICKHOUSE", "DELETE_THREAD", "FAILED"),
674
+ domain: error.ErrorDomain.STORAGE,
675
+ category: error.ErrorCategory.THIRD_PARTY,
676
+ details: { threadId }
677
+ },
678
+ error$1
679
+ );
680
+ }
681
+ }
682
+ async listThreadsByResourceId(args) {
683
+ const { resourceId, page = 0, perPage: perPageInput, orderBy } = args;
684
+ const perPage = storage.normalizePerPage(perPageInput, 100);
685
+ if (page < 0) {
686
+ throw new error.MastraError(
687
+ {
688
+ id: storage.createStorageErrorId("CLICKHOUSE", "LIST_THREADS_BY_RESOURCE_ID", "INVALID_PAGE"),
689
+ domain: error.ErrorDomain.STORAGE,
690
+ category: error.ErrorCategory.USER,
691
+ details: { page }
692
+ },
693
+ new Error("page must be >= 0")
694
+ );
695
+ }
696
+ const { offset, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
697
+ const { field, direction } = this.parseOrderBy(orderBy);
698
+ try {
932
699
  const countResult = await this.client.query({
933
- query: countQuery,
934
- query_params: countParams,
700
+ query: `SELECT count(DISTINCT id) as total FROM ${storage.TABLE_THREADS} WHERE resourceId = {resourceId:String}`,
701
+ query_params: { resourceId },
935
702
  clickhouse_settings: {
936
703
  date_time_input_format: "best_effort",
937
704
  date_time_output_format: "iso",
@@ -941,58 +708,46 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
941
708
  });
942
709
  const countData = await countResult.json();
943
710
  const total = countData.data[0].total;
944
- if (total === 0 && messages.length === 0) {
711
+ if (total === 0) {
945
712
  return {
946
- messages: [],
713
+ threads: [],
947
714
  total: 0,
948
715
  page,
949
- perPage,
716
+ perPage: perPageForResponse,
950
717
  hasMore: false
951
718
  };
952
719
  }
953
- const excludeIds = messages.map((m) => m.id);
954
- let dataQuery = `
955
- SELECT
956
- id,
957
- content,
958
- role,
959
- type,
960
- toDateTime64(createdAt, 3) as createdAt,
961
- thread_id AS "threadId",
962
- resourceId
963
- FROM ${storage.TABLE_MESSAGES}
964
- WHERE thread_id = {threadId:String}
965
- `;
966
- const dataParams = { threadId };
967
- if (fromDate) {
968
- dataQuery += ` AND createdAt >= parseDateTime64BestEffort({fromDate:String}, 3)`;
969
- dataParams.fromDate = fromDate.toISOString();
970
- }
971
- if (toDate) {
972
- dataQuery += ` AND createdAt <= parseDateTime64BestEffort({toDate:String}, 3)`;
973
- dataParams.toDate = toDate.toISOString();
974
- }
975
- if (excludeIds.length > 0) {
976
- dataQuery += ` AND id NOT IN ({excludeIds:Array(String)})`;
977
- dataParams.excludeIds = excludeIds;
978
- }
979
- if (selectBy?.last) {
980
- dataQuery += `
981
- ORDER BY createdAt DESC
982
- LIMIT {limit:Int64}
983
- `;
984
- dataParams.limit = perPage;
985
- } else {
986
- dataQuery += `
987
- ORDER BY createdAt ASC
988
- LIMIT {limit:Int64} OFFSET {offset:Int64}
989
- `;
990
- dataParams.limit = perPage;
991
- dataParams.offset = offset;
992
- }
993
- const result = await this.client.query({
994
- query: dataQuery,
995
- query_params: dataParams,
720
+ const dataResult = await this.client.query({
721
+ query: `
722
+ WITH ranked_threads AS (
723
+ SELECT
724
+ id,
725
+ resourceId,
726
+ title,
727
+ metadata,
728
+ toDateTime64(createdAt, 3) as createdAt,
729
+ toDateTime64(updatedAt, 3) as updatedAt,
730
+ ROW_NUMBER() OVER (PARTITION BY id ORDER BY updatedAt DESC) as row_num
731
+ FROM ${storage.TABLE_THREADS}
732
+ WHERE resourceId = {resourceId:String}
733
+ )
734
+ SELECT
735
+ id,
736
+ resourceId,
737
+ title,
738
+ metadata,
739
+ createdAt,
740
+ updatedAt
741
+ FROM ranked_threads
742
+ WHERE row_num = 1
743
+ ORDER BY "${field}" ${direction === "DESC" ? "DESC" : "ASC"}
744
+ LIMIT {perPage:Int64} OFFSET {offset:Int64}
745
+ `,
746
+ query_params: {
747
+ resourceId,
748
+ perPage,
749
+ offset
750
+ },
996
751
  clickhouse_settings: {
997
752
  date_time_input_format: "best_effort",
998
753
  date_time_output_format: "iso",
@@ -1000,35 +755,28 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
1000
755
  output_format_json_quote_64bit_integers: 0
1001
756
  }
1002
757
  });
1003
- const rows = await result.json();
1004
- const paginatedMessages = transformRows(rows.data);
1005
- messages.push(...paginatedMessages);
1006
- if (selectBy?.last) {
1007
- messages.sort((a, b) => new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime());
1008
- }
758
+ const rows = await dataResult.json();
759
+ const threads = transformRows(rows.data).map((thread) => ({
760
+ ...thread,
761
+ metadata: parseMetadata(thread.metadata)
762
+ }));
1009
763
  return {
1010
- messages: format === "v2" ? messages : messages,
764
+ threads,
1011
765
  total,
1012
766
  page,
1013
- perPage,
767
+ perPage: perPageForResponse,
1014
768
  hasMore: offset + perPage < total
1015
769
  };
1016
770
  } catch (error$1) {
1017
- const mastraError = new error.MastraError(
771
+ throw new error.MastraError(
1018
772
  {
1019
- id: "CLICKHOUSE_STORAGE_GET_MESSAGES_PAGINATED_FAILED",
773
+ id: storage.createStorageErrorId("CLICKHOUSE", "LIST_THREADS_BY_RESOURCE_ID", "FAILED"),
1020
774
  domain: error.ErrorDomain.STORAGE,
1021
775
  category: error.ErrorCategory.THIRD_PARTY,
1022
- details: {
1023
- threadId,
1024
- resourceId: resourceId ?? ""
1025
- }
776
+ details: { resourceId, page }
1026
777
  },
1027
778
  error$1
1028
779
  );
1029
- this.logger?.trackException?.(mastraError);
1030
- this.logger?.error?.(mastraError.toString());
1031
- return { messages: [], total: 0, page, perPage: perPageInput || 40, hasMore: false };
1032
780
  }
1033
781
  }
1034
782
  async updateMessages(args) {
@@ -1111,7 +859,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
1111
859
  UPDATE ${setClauses.join(", ")}
1112
860
  WHERE id = {var_id_${paramIdx}:String}
1113
861
  `;
1114
- console.log("Updating message:", id, "with query:", updateQuery, "values:", values);
862
+ console.info("Updating message:", id, "with query:", updateQuery, "values:", values);
1115
863
  updatePromises.push(
1116
864
  this.client.command({
1117
865
  query: updateQuery,
@@ -1170,7 +918,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
1170
918
  }
1171
919
  }
1172
920
  if (needsRetry) {
1173
- console.log("Update not applied correctly, retrying with DELETE + INSERT for message:", id);
921
+ console.info("Update not applied correctly, retrying with DELETE + INSERT for message:", id);
1174
922
  await this.client.command({
1175
923
  query: `DELETE FROM ${storage.TABLE_MESSAGES} WHERE id = {messageId:String}`,
1176
924
  query_params: { messageId: id },
@@ -1228,7 +976,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
1228
976
  const now = (/* @__PURE__ */ new Date()).toISOString().replace("Z", "");
1229
977
  const threadUpdatePromises = Array.from(threadIdsToUpdate).map(async (threadId) => {
1230
978
  const threadResult = await this.client.query({
1231
- query: `SELECT id, resourceId, title, metadata, createdAt FROM ${storage.TABLE_THREADS} WHERE id = {threadId:String}`,
979
+ query: `SELECT id, resourceId, title, metadata, createdAt FROM ${storage.TABLE_THREADS} WHERE id = {threadId:String} ORDER BY updatedAt DESC LIMIT 1`,
1232
980
  query_params: { threadId },
1233
981
  clickhouse_settings: {
1234
982
  date_time_input_format: "best_effort",
@@ -1257,7 +1005,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
1257
1005
  id: existingThread.id,
1258
1006
  resourceId: existingThread.resourceId,
1259
1007
  title: existingThread.title,
1260
- metadata: existingThread.metadata,
1008
+ metadata: typeof existingThread.metadata === "string" ? existingThread.metadata : serializeMetadata(existingThread.metadata),
1261
1009
  createdAt: existingThread.createdAt,
1262
1010
  updatedAt: now
1263
1011
  }
@@ -1304,7 +1052,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
1304
1052
  } catch (error$1) {
1305
1053
  throw new error.MastraError(
1306
1054
  {
1307
- id: "CLICKHOUSE_STORAGE_UPDATE_MESSAGES_FAILED",
1055
+ id: storage.createStorageErrorId("CLICKHOUSE", "UPDATE_MESSAGES", "FAILED"),
1308
1056
  domain: error.ErrorDomain.STORAGE,
1309
1057
  category: error.ErrorCategory.THIRD_PARTY,
1310
1058
  details: { messageIds: messages.map((m) => m.id).join(",") }
@@ -1316,7 +1064,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
1316
1064
  async getResourceById({ resourceId }) {
1317
1065
  try {
1318
1066
  const result = await this.client.query({
1319
- query: `SELECT id, workingMemory, metadata, createdAt, updatedAt FROM ${storage.TABLE_RESOURCES} WHERE id = {resourceId:String}`,
1067
+ query: `SELECT id, workingMemory, metadata, createdAt, updatedAt FROM ${storage.TABLE_RESOURCES} WHERE id = {resourceId:String} ORDER BY updatedAt DESC LIMIT 1`,
1320
1068
  query_params: { resourceId },
1321
1069
  clickhouse_settings: {
1322
1070
  date_time_input_format: "best_effort",
@@ -1340,7 +1088,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
1340
1088
  } catch (error$1) {
1341
1089
  throw new error.MastraError(
1342
1090
  {
1343
- id: "CLICKHOUSE_STORAGE_GET_RESOURCE_BY_ID_FAILED",
1091
+ id: storage.createStorageErrorId("CLICKHOUSE", "GET_RESOURCE_BY_ID", "FAILED"),
1344
1092
  domain: error.ErrorDomain.STORAGE,
1345
1093
  category: error.ErrorCategory.THIRD_PARTY,
1346
1094
  details: { resourceId }
@@ -1373,7 +1121,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
1373
1121
  } catch (error$1) {
1374
1122
  throw new error.MastraError(
1375
1123
  {
1376
- id: "CLICKHOUSE_STORAGE_SAVE_RESOURCE_FAILED",
1124
+ id: storage.createStorageErrorId("CLICKHOUSE", "SAVE_RESOURCE", "FAILED"),
1377
1125
  domain: error.ErrorDomain.STORAGE,
1378
1126
  category: error.ErrorCategory.THIRD_PARTY,
1379
1127
  details: { resourceId: resource.id }
@@ -1439,7 +1187,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
1439
1187
  } catch (error$1) {
1440
1188
  throw new error.MastraError(
1441
1189
  {
1442
- id: "CLICKHOUSE_STORAGE_UPDATE_RESOURCE_FAILED",
1190
+ id: storage.createStorageErrorId("CLICKHOUSE", "UPDATE_RESOURCE", "FAILED"),
1443
1191
  domain: error.ErrorDomain.STORAGE,
1444
1192
  category: error.ErrorCategory.THIRD_PARTY,
1445
1193
  details: { resourceId }
@@ -1488,6 +1236,9 @@ var StoreOperationsClickhouse = class extends storage.StoreOperations {
1488
1236
  const columns = Object.entries(schema).map(([name, def]) => {
1489
1237
  const constraints = [];
1490
1238
  if (!def.nullable) constraints.push("NOT NULL");
1239
+ if (name === "metadata" && def.type === "text" && def.nullable) {
1240
+ constraints.push("DEFAULT '{}'");
1241
+ }
1491
1242
  const columnTtl = this.ttl?.[tableName]?.columns?.[name];
1492
1243
  return `"${name}" ${COLUMN_TYPES[def.type]} ${constraints.join(" ")} ${columnTtl ? `TTL toDateTime(${columnTtl.ttlKey ?? "createdAt"}) + INTERVAL ${columnTtl.interval} ${columnTtl.unit}` : ""}`;
1493
1244
  }).join(",\n");
@@ -1506,8 +1257,8 @@ var StoreOperationsClickhouse = class extends storage.StoreOperations {
1506
1257
  ${columns}
1507
1258
  )
1508
1259
  ENGINE = ${TABLE_ENGINES[tableName] ?? "MergeTree()"}
1509
- PRIMARY KEY (createdAt, ${tableName === storage.TABLE_EVALS ? "run_id" : "id"})
1510
- ORDER BY (createdAt, ${tableName === storage.TABLE_EVALS ? "run_id" : "id"})
1260
+ PRIMARY KEY (createdAt, ${"id"})
1261
+ ORDER BY (createdAt, ${"id"})
1511
1262
  ${this.ttl?.[tableName]?.row ? `TTL toDateTime(createdAt) + INTERVAL ${this.ttl[tableName].row.interval} ${this.ttl[tableName].row.unit}` : ""}
1512
1263
  SETTINGS index_granularity = 8192
1513
1264
  `;
@@ -1524,7 +1275,7 @@ var StoreOperationsClickhouse = class extends storage.StoreOperations {
1524
1275
  } catch (error$1) {
1525
1276
  throw new error.MastraError(
1526
1277
  {
1527
- id: "CLICKHOUSE_STORAGE_CREATE_TABLE_FAILED",
1278
+ id: storage.createStorageErrorId("CLICKHOUSE", "CREATE_TABLE", "FAILED"),
1528
1279
  domain: error.ErrorDomain.STORAGE,
1529
1280
  category: error.ErrorCategory.THIRD_PARTY,
1530
1281
  details: { tableName }
@@ -1563,7 +1314,7 @@ var StoreOperationsClickhouse = class extends storage.StoreOperations {
1563
1314
  } catch (error$1) {
1564
1315
  throw new error.MastraError(
1565
1316
  {
1566
- id: "CLICKHOUSE_STORAGE_ALTER_TABLE_FAILED",
1317
+ id: storage.createStorageErrorId("CLICKHOUSE", "ALTER_TABLE", "FAILED"),
1567
1318
  domain: error.ErrorDomain.STORAGE,
1568
1319
  category: error.ErrorCategory.THIRD_PARTY,
1569
1320
  details: { tableName }
@@ -1587,7 +1338,7 @@ var StoreOperationsClickhouse = class extends storage.StoreOperations {
1587
1338
  } catch (error$1) {
1588
1339
  throw new error.MastraError(
1589
1340
  {
1590
- id: "CLICKHOUSE_STORAGE_CLEAR_TABLE_FAILED",
1341
+ id: storage.createStorageErrorId("CLICKHOUSE", "CLEAR_TABLE", "FAILED"),
1591
1342
  domain: error.ErrorDomain.STORAGE,
1592
1343
  category: error.ErrorCategory.THIRD_PARTY,
1593
1344
  details: { tableName }
@@ -1622,11 +1373,11 @@ var StoreOperationsClickhouse = class extends storage.StoreOperations {
1622
1373
  use_client_time_zone: 1
1623
1374
  }
1624
1375
  });
1625
- console.log("INSERT RESULT", result);
1376
+ console.info("INSERT RESULT", result);
1626
1377
  } catch (error$1) {
1627
1378
  throw new error.MastraError(
1628
1379
  {
1629
- id: "CLICKHOUSE_STORAGE_INSERT_FAILED",
1380
+ id: storage.createStorageErrorId("CLICKHOUSE", "INSERT", "FAILED"),
1630
1381
  domain: error.ErrorDomain.STORAGE,
1631
1382
  category: error.ErrorCategory.THIRD_PARTY,
1632
1383
  details: { tableName }
@@ -1659,7 +1410,7 @@ var StoreOperationsClickhouse = class extends storage.StoreOperations {
1659
1410
  } catch (error$1) {
1660
1411
  throw new error.MastraError(
1661
1412
  {
1662
- id: "CLICKHOUSE_STORAGE_BATCH_INSERT_FAILED",
1413
+ id: storage.createStorageErrorId("CLICKHOUSE", "BATCH_INSERT", "FAILED"),
1663
1414
  domain: error.ErrorDomain.STORAGE,
1664
1415
  category: error.ErrorCategory.THIRD_PARTY,
1665
1416
  details: { tableName }
@@ -1710,7 +1461,7 @@ var StoreOperationsClickhouse = class extends storage.StoreOperations {
1710
1461
  } catch (error$1) {
1711
1462
  throw new error.MastraError(
1712
1463
  {
1713
- id: "CLICKHOUSE_STORAGE_LOAD_FAILED",
1464
+ id: storage.createStorageErrorId("CLICKHOUSE", "LOAD", "FAILED"),
1714
1465
  domain: error.ErrorDomain.STORAGE,
1715
1466
  category: error.ErrorCategory.THIRD_PARTY,
1716
1467
  details: { tableName }
@@ -1728,30 +1479,15 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1728
1479
  this.client = client;
1729
1480
  this.operations = operations;
1730
1481
  }
1482
+ /**
1483
+ * ClickHouse-specific score row transformation.
1484
+ * Converts timestamps to Date objects and filters out '_null_' values.
1485
+ */
1731
1486
  transformScoreRow(row) {
1732
- const scorer = storage.safelyParseJSON(row.scorer);
1733
- const preprocessStepResult = storage.safelyParseJSON(row.preprocessStepResult);
1734
- const analyzeStepResult = storage.safelyParseJSON(row.analyzeStepResult);
1735
- const metadata = storage.safelyParseJSON(row.metadata);
1736
- const input = storage.safelyParseJSON(row.input);
1737
- const output = storage.safelyParseJSON(row.output);
1738
- const additionalContext = storage.safelyParseJSON(row.additionalContext);
1739
- const runtimeContext = storage.safelyParseJSON(row.runtimeContext);
1740
- const entity = storage.safelyParseJSON(row.entity);
1741
- return {
1742
- ...row,
1743
- scorer,
1744
- preprocessStepResult,
1745
- analyzeStepResult,
1746
- metadata,
1747
- input,
1748
- output,
1749
- additionalContext,
1750
- runtimeContext,
1751
- entity,
1752
- createdAt: new Date(row.createdAt),
1753
- updatedAt: new Date(row.updatedAt)
1754
- };
1487
+ return storage.transformScoreRow(row, {
1488
+ convertTimestamps: true,
1489
+ nullValuePattern: "_null_"
1490
+ });
1755
1491
  }
1756
1492
  async getScoreById({ id }) {
1757
1493
  try {
@@ -1775,7 +1511,7 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1775
1511
  } catch (error$1) {
1776
1512
  throw new error.MastraError(
1777
1513
  {
1778
- id: "CLICKHOUSE_STORAGE_GET_SCORE_BY_ID_FAILED",
1514
+ id: storage.createStorageErrorId("CLICKHOUSE", "GET_SCORE_BY_ID", "FAILED"),
1779
1515
  domain: error.ErrorDomain.STORAGE,
1780
1516
  category: error.ErrorCategory.THIRD_PARTY,
1781
1517
  details: { scoreId: id }
@@ -1785,10 +1521,44 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1785
1521
  }
1786
1522
  }
1787
1523
  async saveScore(score) {
1524
+ let parsedScore;
1788
1525
  try {
1789
- const record = {
1790
- ...score
1791
- };
1526
+ parsedScore = evals.saveScorePayloadSchema.parse(score);
1527
+ } catch (error$1) {
1528
+ throw new error.MastraError(
1529
+ {
1530
+ id: storage.createStorageErrorId("CLICKHOUSE", "SAVE_SCORE", "VALIDATION_FAILED"),
1531
+ domain: error.ErrorDomain.STORAGE,
1532
+ category: error.ErrorCategory.USER,
1533
+ details: {
1534
+ scorer: score.scorer?.id ?? "unknown",
1535
+ entityId: score.entityId ?? "unknown",
1536
+ entityType: score.entityType ?? "unknown",
1537
+ traceId: score.traceId ?? "",
1538
+ spanId: score.spanId ?? ""
1539
+ }
1540
+ },
1541
+ error$1
1542
+ );
1543
+ }
1544
+ const now = /* @__PURE__ */ new Date();
1545
+ const id = crypto.randomUUID();
1546
+ const createdAt = now;
1547
+ const updatedAt = now;
1548
+ try {
1549
+ const record = {};
1550
+ for (const key of Object.keys(storage.SCORERS_SCHEMA)) {
1551
+ if (key === "id") {
1552
+ record[key] = id;
1553
+ continue;
1554
+ }
1555
+ if (key === "createdAt" || key === "updatedAt") {
1556
+ record[key] = now.toISOString();
1557
+ continue;
1558
+ }
1559
+ const value = parsedScore[key];
1560
+ record[key] = value === void 0 || value === null ? "_null_" : value;
1561
+ }
1792
1562
  await this.client.insert({
1793
1563
  table: storage.TABLE_SCORERS,
1794
1564
  values: [record],
@@ -1799,20 +1569,20 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1799
1569
  output_format_json_quote_64bit_integers: 0
1800
1570
  }
1801
1571
  });
1802
- return { score };
1572
+ return { score: { ...parsedScore, id, createdAt, updatedAt } };
1803
1573
  } catch (error$1) {
1804
1574
  throw new error.MastraError(
1805
1575
  {
1806
- id: "CLICKHOUSE_STORAGE_SAVE_SCORE_FAILED",
1576
+ id: storage.createStorageErrorId("CLICKHOUSE", "SAVE_SCORE", "FAILED"),
1807
1577
  domain: error.ErrorDomain.STORAGE,
1808
1578
  category: error.ErrorCategory.THIRD_PARTY,
1809
- details: { scoreId: score.id }
1579
+ details: { scoreId: id }
1810
1580
  },
1811
1581
  error$1
1812
1582
  );
1813
1583
  }
1814
1584
  }
1815
- async getScoresByRunId({
1585
+ async listScoresByRunId({
1816
1586
  runId,
1817
1587
  pagination
1818
1588
  }) {
@@ -1828,24 +1598,28 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1828
1598
  const countObj = countRows[0];
1829
1599
  total = Number(countObj.count);
1830
1600
  }
1601
+ const { page, perPage: perPageInput } = pagination;
1831
1602
  if (!total) {
1832
1603
  return {
1833
1604
  pagination: {
1834
1605
  total: 0,
1835
- page: pagination.page,
1836
- perPage: pagination.perPage,
1606
+ page,
1607
+ perPage: perPageInput,
1837
1608
  hasMore: false
1838
1609
  },
1839
1610
  scores: []
1840
1611
  };
1841
1612
  }
1842
- const offset = pagination.page * pagination.perPage;
1613
+ const perPage = storage.normalizePerPage(perPageInput, 100);
1614
+ const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
1615
+ const limitValue = perPageInput === false ? total : perPage;
1616
+ const end = perPageInput === false ? total : start + perPage;
1843
1617
  const result = await this.client.query({
1844
1618
  query: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE runId = {var_runId:String} ORDER BY createdAt DESC LIMIT {var_limit:Int64} OFFSET {var_offset:Int64}`,
1845
1619
  query_params: {
1846
1620
  var_runId: runId,
1847
- var_limit: pagination.perPage,
1848
- var_offset: offset
1621
+ var_limit: limitValue,
1622
+ var_offset: start
1849
1623
  },
1850
1624
  format: "JSONEachRow",
1851
1625
  clickhouse_settings: {
@@ -1860,16 +1634,16 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1860
1634
  return {
1861
1635
  pagination: {
1862
1636
  total,
1863
- page: pagination.page,
1864
- perPage: pagination.perPage,
1865
- hasMore: total > (pagination.page + 1) * pagination.perPage
1637
+ page,
1638
+ perPage: perPageForResponse,
1639
+ hasMore: end < total
1866
1640
  },
1867
1641
  scores
1868
1642
  };
1869
1643
  } catch (error$1) {
1870
1644
  throw new error.MastraError(
1871
1645
  {
1872
- id: "CLICKHOUSE_STORAGE_GET_SCORES_BY_RUN_ID_FAILED",
1646
+ id: storage.createStorageErrorId("CLICKHOUSE", "LIST_SCORES_BY_RUN_ID", "FAILED"),
1873
1647
  domain: error.ErrorDomain.STORAGE,
1874
1648
  category: error.ErrorCategory.THIRD_PARTY,
1875
1649
  details: { runId }
@@ -1878,7 +1652,7 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1878
1652
  );
1879
1653
  }
1880
1654
  }
1881
- async getScoresByScorerId({
1655
+ async listScoresByScorerId({
1882
1656
  scorerId,
1883
1657
  entityId,
1884
1658
  entityType,
@@ -1912,24 +1686,28 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1912
1686
  const countObj = countRows[0];
1913
1687
  total = Number(countObj.count);
1914
1688
  }
1689
+ const { page, perPage: perPageInput } = pagination;
1915
1690
  if (!total) {
1916
1691
  return {
1917
1692
  pagination: {
1918
1693
  total: 0,
1919
- page: pagination.page,
1920
- perPage: pagination.perPage,
1694
+ page,
1695
+ perPage: perPageInput,
1921
1696
  hasMore: false
1922
1697
  },
1923
1698
  scores: []
1924
1699
  };
1925
1700
  }
1926
- const offset = pagination.page * pagination.perPage;
1701
+ const perPage = storage.normalizePerPage(perPageInput, 100);
1702
+ const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
1703
+ const limitValue = perPageInput === false ? total : perPage;
1704
+ const end = perPageInput === false ? total : start + perPage;
1927
1705
  const result = await this.client.query({
1928
1706
  query: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE ${whereClause} ORDER BY createdAt DESC LIMIT {var_limit:Int64} OFFSET {var_offset:Int64}`,
1929
1707
  query_params: {
1930
1708
  var_scorerId: scorerId,
1931
- var_limit: pagination.perPage,
1932
- var_offset: offset,
1709
+ var_limit: limitValue,
1710
+ var_offset: start,
1933
1711
  var_entityId: entityId,
1934
1712
  var_entityType: entityType,
1935
1713
  var_source: source
@@ -1947,16 +1725,16 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1947
1725
  return {
1948
1726
  pagination: {
1949
1727
  total,
1950
- page: pagination.page,
1951
- perPage: pagination.perPage,
1952
- hasMore: total > (pagination.page + 1) * pagination.perPage
1728
+ page,
1729
+ perPage: perPageForResponse,
1730
+ hasMore: end < total
1953
1731
  },
1954
1732
  scores
1955
1733
  };
1956
1734
  } catch (error$1) {
1957
1735
  throw new error.MastraError(
1958
1736
  {
1959
- id: "CLICKHOUSE_STORAGE_GET_SCORES_BY_SCORER_ID_FAILED",
1737
+ id: storage.createStorageErrorId("CLICKHOUSE", "LIST_SCORES_BY_SCORER_ID", "FAILED"),
1960
1738
  domain: error.ErrorDomain.STORAGE,
1961
1739
  category: error.ErrorCategory.THIRD_PARTY,
1962
1740
  details: { scorerId }
@@ -1965,7 +1743,7 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1965
1743
  );
1966
1744
  }
1967
1745
  }
1968
- async getScoresByEntityId({
1746
+ async listScoresByEntityId({
1969
1747
  entityId,
1970
1748
  entityType,
1971
1749
  pagination
@@ -1982,25 +1760,29 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1982
1760
  const countObj = countRows[0];
1983
1761
  total = Number(countObj.count);
1984
1762
  }
1763
+ const { page, perPage: perPageInput } = pagination;
1985
1764
  if (!total) {
1986
1765
  return {
1987
1766
  pagination: {
1988
1767
  total: 0,
1989
- page: pagination.page,
1990
- perPage: pagination.perPage,
1768
+ page,
1769
+ perPage: perPageInput,
1991
1770
  hasMore: false
1992
1771
  },
1993
1772
  scores: []
1994
1773
  };
1995
1774
  }
1996
- const offset = pagination.page * pagination.perPage;
1775
+ const perPage = storage.normalizePerPage(perPageInput, 100);
1776
+ const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
1777
+ const limitValue = perPageInput === false ? total : perPage;
1778
+ const end = perPageInput === false ? total : start + perPage;
1997
1779
  const result = await this.client.query({
1998
1780
  query: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE entityId = {var_entityId:String} AND entityType = {var_entityType:String} ORDER BY createdAt DESC LIMIT {var_limit:Int64} OFFSET {var_offset:Int64}`,
1999
1781
  query_params: {
2000
1782
  var_entityId: entityId,
2001
1783
  var_entityType: entityType,
2002
- var_limit: pagination.perPage,
2003
- var_offset: offset
1784
+ var_limit: limitValue,
1785
+ var_offset: start
2004
1786
  },
2005
1787
  format: "JSONEachRow",
2006
1788
  clickhouse_settings: {
@@ -2015,16 +1797,16 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
2015
1797
  return {
2016
1798
  pagination: {
2017
1799
  total,
2018
- page: pagination.page,
2019
- perPage: pagination.perPage,
2020
- hasMore: total > (pagination.page + 1) * pagination.perPage
1800
+ page,
1801
+ perPage: perPageForResponse,
1802
+ hasMore: end < total
2021
1803
  },
2022
1804
  scores
2023
1805
  };
2024
1806
  } catch (error$1) {
2025
1807
  throw new error.MastraError(
2026
1808
  {
2027
- id: "CLICKHOUSE_STORAGE_GET_SCORES_BY_ENTITY_ID_FAILED",
1809
+ id: storage.createStorageErrorId("CLICKHOUSE", "LIST_SCORES_BY_ENTITY_ID", "FAILED"),
2028
1810
  domain: error.ErrorDomain.STORAGE,
2029
1811
  category: error.ErrorCategory.THIRD_PARTY,
2030
1812
  details: { entityId, entityType }
@@ -2033,76 +1815,51 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
2033
1815
  );
2034
1816
  }
2035
1817
  }
2036
- };
2037
- var TracesStorageClickhouse = class extends storage.TracesStorage {
2038
- client;
2039
- operations;
2040
- constructor({ client, operations }) {
2041
- super();
2042
- this.client = client;
2043
- this.operations = operations;
2044
- }
2045
- async getTracesPaginated(args) {
2046
- const { name, scope, page = 0, perPage = 100, attributes, filters, dateRange } = args;
2047
- const fromDate = dateRange?.start;
2048
- const toDate = dateRange?.end;
2049
- const currentOffset = page * perPage;
2050
- const queryArgs = {};
2051
- const conditions = [];
2052
- if (name) {
2053
- conditions.push(`name LIKE CONCAT({var_name:String}, '%')`);
2054
- queryArgs.var_name = name;
2055
- }
2056
- if (scope) {
2057
- conditions.push(`scope = {var_scope:String}`);
2058
- queryArgs.var_scope = scope;
2059
- }
2060
- if (attributes) {
2061
- Object.entries(attributes).forEach(([key, value]) => {
2062
- conditions.push(`JSONExtractString(attributes, '${key}') = {var_attr_${key}:String}`);
2063
- queryArgs[`var_attr_${key}`] = value;
2064
- });
2065
- }
2066
- if (filters) {
2067
- Object.entries(filters).forEach(([key, value]) => {
2068
- conditions.push(`${key} = {var_col_${key}:${storage.TABLE_SCHEMAS.mastra_traces?.[key]?.type ?? "text"}}`);
2069
- queryArgs[`var_col_${key}`] = value;
2070
- });
2071
- }
2072
- if (fromDate) {
2073
- conditions.push(`createdAt >= parseDateTime64BestEffort({var_from_date:String})`);
2074
- queryArgs.var_from_date = fromDate.toISOString();
2075
- }
2076
- if (toDate) {
2077
- conditions.push(`createdAt <= parseDateTime64BestEffort({var_to_date:String})`);
2078
- queryArgs.var_to_date = toDate.toISOString();
2079
- }
2080
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
1818
+ async listScoresBySpan({
1819
+ traceId,
1820
+ spanId,
1821
+ pagination
1822
+ }) {
2081
1823
  try {
2082
1824
  const countResult = await this.client.query({
2083
- query: `SELECT COUNT(*) as count FROM ${storage.TABLE_TRACES} ${whereClause}`,
2084
- query_params: queryArgs,
2085
- clickhouse_settings: {
2086
- date_time_input_format: "best_effort",
2087
- date_time_output_format: "iso",
2088
- use_client_time_zone: 1,
2089
- output_format_json_quote_64bit_integers: 0
2090
- }
1825
+ query: `SELECT COUNT(*) as count FROM ${storage.TABLE_SCORERS} WHERE traceId = {var_traceId:String} AND spanId = {var_spanId:String}`,
1826
+ query_params: {
1827
+ var_traceId: traceId,
1828
+ var_spanId: spanId
1829
+ },
1830
+ format: "JSONEachRow"
2091
1831
  });
2092
- const countData = await countResult.json();
2093
- const total = Number(countData.data?.[0]?.count ?? 0);
2094
- if (total === 0) {
1832
+ const countRows = await countResult.json();
1833
+ let total = 0;
1834
+ if (Array.isArray(countRows) && countRows.length > 0 && countRows[0]) {
1835
+ const countObj = countRows[0];
1836
+ total = Number(countObj.count);
1837
+ }
1838
+ const { page, perPage: perPageInput } = pagination;
1839
+ if (!total) {
2095
1840
  return {
2096
- traces: [],
2097
- total: 0,
2098
- page,
2099
- perPage,
2100
- hasMore: false
1841
+ pagination: {
1842
+ total: 0,
1843
+ page,
1844
+ perPage: perPageInput,
1845
+ hasMore: false
1846
+ },
1847
+ scores: []
2101
1848
  };
2102
1849
  }
1850
+ const perPage = storage.normalizePerPage(perPageInput, 100);
1851
+ const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
1852
+ const limitValue = perPageInput === false ? total : perPage;
1853
+ const end = perPageInput === false ? total : start + perPage;
2103
1854
  const result = await this.client.query({
2104
- query: `SELECT *, toDateTime64(createdAt, 3) as createdAt FROM ${storage.TABLE_TRACES} ${whereClause} ORDER BY "createdAt" DESC LIMIT {var_limit:UInt32} OFFSET {var_offset:UInt32}`,
2105
- query_params: { ...queryArgs, var_limit: perPage, var_offset: currentOffset },
1855
+ query: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE traceId = {var_traceId:String} AND spanId = {var_spanId:String} ORDER BY createdAt DESC LIMIT {var_limit:Int64} OFFSET {var_offset:Int64}`,
1856
+ query_params: {
1857
+ var_traceId: traceId,
1858
+ var_spanId: spanId,
1859
+ var_limit: limitValue,
1860
+ var_offset: start
1861
+ },
1862
+ format: "JSONEachRow",
2106
1863
  clickhouse_settings: {
2107
1864
  date_time_input_format: "best_effort",
2108
1865
  date_time_output_format: "iso",
@@ -2110,172 +1867,29 @@ var TracesStorageClickhouse = class extends storage.TracesStorage {
2110
1867
  output_format_json_quote_64bit_integers: 0
2111
1868
  }
2112
1869
  });
2113
- if (!result) {
2114
- return {
2115
- traces: [],
2116
- total,
2117
- page,
2118
- perPage,
2119
- hasMore: false
2120
- };
2121
- }
2122
- const resp = await result.json();
2123
- const rows = resp.data;
2124
- const traces = rows.map((row) => ({
2125
- id: row.id,
2126
- parentSpanId: row.parentSpanId,
2127
- traceId: row.traceId,
2128
- name: row.name,
2129
- scope: row.scope,
2130
- kind: row.kind,
2131
- status: storage.safelyParseJSON(row.status),
2132
- events: storage.safelyParseJSON(row.events),
2133
- links: storage.safelyParseJSON(row.links),
2134
- attributes: storage.safelyParseJSON(row.attributes),
2135
- startTime: row.startTime,
2136
- endTime: row.endTime,
2137
- other: storage.safelyParseJSON(row.other),
2138
- createdAt: row.createdAt
2139
- }));
1870
+ const rows = await result.json();
1871
+ const scores = Array.isArray(rows) ? rows.map((row) => this.transformScoreRow(row)) : [];
2140
1872
  return {
2141
- traces,
2142
- total,
2143
- page,
2144
- perPage,
2145
- hasMore: currentOffset + traces.length < total
2146
- };
2147
- } catch (error$1) {
2148
- if (error$1?.message?.includes("no such table") || error$1?.message?.includes("does not exist")) {
2149
- return {
2150
- traces: [],
2151
- total: 0,
1873
+ pagination: {
1874
+ total,
2152
1875
  page,
2153
- perPage,
2154
- hasMore: false
2155
- };
2156
- }
2157
- throw new error.MastraError(
2158
- {
2159
- id: "CLICKHOUSE_STORAGE_GET_TRACES_PAGINATED_FAILED",
2160
- domain: error.ErrorDomain.STORAGE,
2161
- category: error.ErrorCategory.THIRD_PARTY,
2162
- details: {
2163
- name: name ?? null,
2164
- scope: scope ?? null,
2165
- page,
2166
- perPage,
2167
- attributes: attributes ? JSON.stringify(attributes) : null,
2168
- filters: filters ? JSON.stringify(filters) : null,
2169
- dateRange: dateRange ? JSON.stringify(dateRange) : null
2170
- }
1876
+ perPage: perPageForResponse,
1877
+ hasMore: end < total
2171
1878
  },
2172
- error$1
2173
- );
2174
- }
2175
- }
2176
- async getTraces({
2177
- name,
2178
- scope,
2179
- page,
2180
- perPage,
2181
- attributes,
2182
- filters,
2183
- fromDate,
2184
- toDate
2185
- }) {
2186
- const limit = perPage;
2187
- const offset = page * perPage;
2188
- const args = {};
2189
- const conditions = [];
2190
- if (name) {
2191
- conditions.push(`name LIKE CONCAT({var_name:String}, '%')`);
2192
- args.var_name = name;
2193
- }
2194
- if (scope) {
2195
- conditions.push(`scope = {var_scope:String}`);
2196
- args.var_scope = scope;
2197
- }
2198
- if (attributes) {
2199
- Object.entries(attributes).forEach(([key, value]) => {
2200
- conditions.push(`JSONExtractString(attributes, '${key}') = {var_attr_${key}:String}`);
2201
- args[`var_attr_${key}`] = value;
2202
- });
2203
- }
2204
- if (filters) {
2205
- Object.entries(filters).forEach(([key, value]) => {
2206
- conditions.push(`${key} = {var_col_${key}:${storage.TABLE_SCHEMAS.mastra_traces?.[key]?.type ?? "text"}}`);
2207
- args[`var_col_${key}`] = value;
2208
- });
2209
- }
2210
- if (fromDate) {
2211
- conditions.push(`createdAt >= {var_from_date:DateTime64(3)}`);
2212
- args.var_from_date = fromDate.getTime() / 1e3;
2213
- }
2214
- if (toDate) {
2215
- conditions.push(`createdAt <= {var_to_date:DateTime64(3)}`);
2216
- args.var_to_date = toDate.getTime() / 1e3;
2217
- }
2218
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
2219
- try {
2220
- const result = await this.client.query({
2221
- query: `SELECT *, toDateTime64(createdAt, 3) as createdAt FROM ${storage.TABLE_TRACES} ${whereClause} ORDER BY "createdAt" DESC LIMIT ${limit} OFFSET ${offset}`,
2222
- query_params: args,
2223
- clickhouse_settings: {
2224
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
2225
- date_time_input_format: "best_effort",
2226
- date_time_output_format: "iso",
2227
- use_client_time_zone: 1,
2228
- output_format_json_quote_64bit_integers: 0
2229
- }
2230
- });
2231
- if (!result) {
2232
- return [];
2233
- }
2234
- const resp = await result.json();
2235
- const rows = resp.data;
2236
- return rows.map((row) => ({
2237
- id: row.id,
2238
- parentSpanId: row.parentSpanId,
2239
- traceId: row.traceId,
2240
- name: row.name,
2241
- scope: row.scope,
2242
- kind: row.kind,
2243
- status: storage.safelyParseJSON(row.status),
2244
- events: storage.safelyParseJSON(row.events),
2245
- links: storage.safelyParseJSON(row.links),
2246
- attributes: storage.safelyParseJSON(row.attributes),
2247
- startTime: row.startTime,
2248
- endTime: row.endTime,
2249
- other: storage.safelyParseJSON(row.other),
2250
- createdAt: row.createdAt
2251
- }));
1879
+ scores
1880
+ };
2252
1881
  } catch (error$1) {
2253
- if (error$1?.message?.includes("no such table") || error$1?.message?.includes("does not exist")) {
2254
- return [];
2255
- }
2256
1882
  throw new error.MastraError(
2257
1883
  {
2258
- id: "CLICKHOUSE_STORAGE_GET_TRACES_FAILED",
1884
+ id: storage.createStorageErrorId("CLICKHOUSE", "LIST_SCORES_BY_SPAN", "FAILED"),
2259
1885
  domain: error.ErrorDomain.STORAGE,
2260
1886
  category: error.ErrorCategory.THIRD_PARTY,
2261
- details: {
2262
- name: name ?? null,
2263
- scope: scope ?? null,
2264
- page,
2265
- perPage,
2266
- attributes: attributes ? JSON.stringify(attributes) : null,
2267
- filters: filters ? JSON.stringify(filters) : null,
2268
- fromDate: fromDate?.toISOString() ?? null,
2269
- toDate: toDate?.toISOString() ?? null
2270
- }
1887
+ details: { traceId, spanId }
2271
1888
  },
2272
1889
  error$1
2273
1890
  );
2274
1891
  }
2275
1892
  }
2276
- async batchTraceInsert(args) {
2277
- await this.operations.batchInsert({ tableName: storage.TABLE_TRACES, records: args.records });
2278
- }
2279
1893
  };
2280
1894
  var WorkflowsStorageClickhouse = class extends storage.WorkflowsStorage {
2281
1895
  client;
@@ -2290,16 +1904,26 @@ var WorkflowsStorageClickhouse = class extends storage.WorkflowsStorage {
2290
1904
  // runId,
2291
1905
  // stepId,
2292
1906
  // result,
2293
- // runtimeContext,
1907
+ // requestContext,
2294
1908
  }) {
2295
- throw new Error("Method not implemented.");
1909
+ throw new error.MastraError({
1910
+ id: storage.createStorageErrorId("CLICKHOUSE", "UPDATE_WORKFLOW_RESULTS", "NOT_IMPLEMENTED"),
1911
+ domain: error.ErrorDomain.STORAGE,
1912
+ category: error.ErrorCategory.SYSTEM,
1913
+ text: "Method not implemented."
1914
+ });
2296
1915
  }
2297
1916
  updateWorkflowState({
2298
1917
  // workflowName,
2299
1918
  // runId,
2300
1919
  // opts,
2301
1920
  }) {
2302
- throw new Error("Method not implemented.");
1921
+ throw new error.MastraError({
1922
+ id: storage.createStorageErrorId("CLICKHOUSE", "UPDATE_WORKFLOW_STATE", "NOT_IMPLEMENTED"),
1923
+ domain: error.ErrorDomain.STORAGE,
1924
+ category: error.ErrorCategory.SYSTEM,
1925
+ text: "Method not implemented."
1926
+ });
2303
1927
  }
2304
1928
  async persistWorkflowSnapshot({
2305
1929
  workflowName,
@@ -2340,7 +1964,7 @@ var WorkflowsStorageClickhouse = class extends storage.WorkflowsStorage {
2340
1964
  } catch (error$1) {
2341
1965
  throw new error.MastraError(
2342
1966
  {
2343
- id: "CLICKHOUSE_STORAGE_PERSIST_WORKFLOW_SNAPSHOT_FAILED",
1967
+ id: storage.createStorageErrorId("CLICKHOUSE", "PERSIST_WORKFLOW_SNAPSHOT", "FAILED"),
2344
1968
  domain: error.ErrorDomain.STORAGE,
2345
1969
  category: error.ErrorCategory.THIRD_PARTY,
2346
1970
  details: { workflowName, runId }
@@ -2368,7 +1992,7 @@ var WorkflowsStorageClickhouse = class extends storage.WorkflowsStorage {
2368
1992
  } catch (error$1) {
2369
1993
  throw new error.MastraError(
2370
1994
  {
2371
- id: "CLICKHOUSE_STORAGE_LOAD_WORKFLOW_SNAPSHOT_FAILED",
1995
+ id: storage.createStorageErrorId("CLICKHOUSE", "LOAD_WORKFLOW_SNAPSHOT", "FAILED"),
2372
1996
  domain: error.ErrorDomain.STORAGE,
2373
1997
  category: error.ErrorCategory.THIRD_PARTY,
2374
1998
  details: { workflowName, runId }
@@ -2395,13 +2019,14 @@ var WorkflowsStorageClickhouse = class extends storage.WorkflowsStorage {
2395
2019
  resourceId: row.resourceId
2396
2020
  };
2397
2021
  }
2398
- async getWorkflowRuns({
2022
+ async listWorkflowRuns({
2399
2023
  workflowName,
2400
2024
  fromDate,
2401
2025
  toDate,
2402
- limit,
2403
- offset,
2404
- resourceId
2026
+ page,
2027
+ perPage,
2028
+ resourceId,
2029
+ status
2405
2030
  } = {}) {
2406
2031
  try {
2407
2032
  const conditions = [];
@@ -2410,6 +2035,10 @@ var WorkflowsStorageClickhouse = class extends storage.WorkflowsStorage {
2410
2035
  conditions.push(`workflow_name = {var_workflow_name:String}`);
2411
2036
  values.var_workflow_name = workflowName;
2412
2037
  }
2038
+ if (status) {
2039
+ conditions.push(`JSONExtractString(snapshot, 'status') = {var_status:String}`);
2040
+ values.var_status = status;
2041
+ }
2413
2042
  if (resourceId) {
2414
2043
  const hasResourceId = await this.operations.hasColumn(storage.TABLE_WORKFLOW_SNAPSHOT, "resourceId");
2415
2044
  if (hasResourceId) {
@@ -2428,10 +2057,13 @@ var WorkflowsStorageClickhouse = class extends storage.WorkflowsStorage {
2428
2057
  values.var_to_date = toDate.getTime() / 1e3;
2429
2058
  }
2430
2059
  const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
2431
- const limitClause = limit !== void 0 ? `LIMIT ${limit}` : "";
2432
- const offsetClause = offset !== void 0 ? `OFFSET ${offset}` : "";
2060
+ const usePagination = perPage !== void 0 && page !== void 0;
2061
+ const normalizedPerPage = usePagination ? storage.normalizePerPage(perPage, Number.MAX_SAFE_INTEGER) : 0;
2062
+ const offset = usePagination ? page * normalizedPerPage : 0;
2063
+ const limitClause = usePagination ? `LIMIT ${normalizedPerPage}` : "";
2064
+ const offsetClause = usePagination ? `OFFSET ${offset}` : "";
2433
2065
  let total = 0;
2434
- if (limit !== void 0 && offset !== void 0) {
2066
+ if (usePagination) {
2435
2067
  const countResult = await this.client.query({
2436
2068
  query: `SELECT COUNT(*) as count FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[storage.TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""} ${whereClause}`,
2437
2069
  query_params: values,
@@ -2467,7 +2099,7 @@ var WorkflowsStorageClickhouse = class extends storage.WorkflowsStorage {
2467
2099
  } catch (error$1) {
2468
2100
  throw new error.MastraError(
2469
2101
  {
2470
- id: "CLICKHOUSE_STORAGE_GET_WORKFLOW_RUNS_FAILED",
2102
+ id: storage.createStorageErrorId("CLICKHOUSE", "LIST_WORKFLOW_RUNS", "FAILED"),
2471
2103
  domain: error.ErrorDomain.STORAGE,
2472
2104
  category: error.ErrorCategory.THIRD_PARTY,
2473
2105
  details: { workflowName: workflowName ?? "", resourceId: resourceId ?? "" }
@@ -2516,7 +2148,7 @@ var WorkflowsStorageClickhouse = class extends storage.WorkflowsStorage {
2516
2148
  } catch (error$1) {
2517
2149
  throw new error.MastraError(
2518
2150
  {
2519
- id: "CLICKHOUSE_STORAGE_GET_WORKFLOW_RUN_BY_ID_FAILED",
2151
+ id: storage.createStorageErrorId("CLICKHOUSE", "GET_WORKFLOW_RUN_BY_ID", "FAILED"),
2520
2152
  domain: error.ErrorDomain.STORAGE,
2521
2153
  category: error.ErrorCategory.THIRD_PARTY,
2522
2154
  details: { runId: runId ?? "", workflowName: workflowName ?? "" }
@@ -2525,6 +2157,28 @@ var WorkflowsStorageClickhouse = class extends storage.WorkflowsStorage {
2525
2157
  );
2526
2158
  }
2527
2159
  }
2160
+ async deleteWorkflowRunById({ runId, workflowName }) {
2161
+ try {
2162
+ const values = {
2163
+ var_runId: runId,
2164
+ var_workflow_name: workflowName
2165
+ };
2166
+ await this.client.command({
2167
+ query: `DELETE FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} WHERE run_id = {var_runId:String} AND workflow_name = {var_workflow_name:String}`,
2168
+ query_params: values
2169
+ });
2170
+ } catch (error$1) {
2171
+ throw new error.MastraError(
2172
+ {
2173
+ id: storage.createStorageErrorId("CLICKHOUSE", "DELETE_WORKFLOW_RUN_BY_ID", "FAILED"),
2174
+ domain: error.ErrorDomain.STORAGE,
2175
+ category: error.ErrorCategory.THIRD_PARTY,
2176
+ details: { runId, workflowName }
2177
+ },
2178
+ error$1
2179
+ );
2180
+ }
2181
+ }
2528
2182
  };
2529
2183
 
2530
2184
  // src/storage/index.ts
@@ -2533,7 +2187,7 @@ var ClickhouseStore = class extends storage.MastraStorage {
2533
2187
  ttl = {};
2534
2188
  stores;
2535
2189
  constructor(config) {
2536
- super({ name: "ClickhouseStore" });
2190
+ super({ id: config.id, name: "ClickhouseStore", disableInit: config.disableInit });
2537
2191
  this.db = client.createClient({
2538
2192
  url: config.url,
2539
2193
  username: config.username,
@@ -2550,15 +2204,11 @@ var ClickhouseStore = class extends storage.MastraStorage {
2550
2204
  const operations = new StoreOperationsClickhouse({ client: this.db, ttl: this.ttl });
2551
2205
  const workflows = new WorkflowsStorageClickhouse({ client: this.db, operations });
2552
2206
  const scores = new ScoresStorageClickhouse({ client: this.db, operations });
2553
- const legacyEvals = new LegacyEvalsStorageClickhouse({ client: this.db, operations });
2554
- const traces = new TracesStorageClickhouse({ client: this.db, operations });
2555
2207
  const memory = new MemoryStorageClickhouse({ client: this.db, operations });
2556
2208
  this.stores = {
2557
2209
  operations,
2558
2210
  workflows,
2559
2211
  scores,
2560
- legacyEvals,
2561
- traces,
2562
2212
  memory
2563
2213
  };
2564
2214
  }
@@ -2568,15 +2218,10 @@ var ClickhouseStore = class extends storage.MastraStorage {
2568
2218
  resourceWorkingMemory: true,
2569
2219
  hasColumn: true,
2570
2220
  createTable: true,
2571
- deleteMessages: false
2221
+ deleteMessages: false,
2222
+ listScoresBySpan: true
2572
2223
  };
2573
2224
  }
2574
- async getEvalsByAgentName(agentName, type) {
2575
- return this.stores.legacyEvals.getEvalsByAgentName(agentName, type);
2576
- }
2577
- async getEvals(options) {
2578
- return this.stores.legacyEvals.getEvals(options);
2579
- }
2580
2225
  async batchInsert({ tableName, records }) {
2581
2226
  await this.stores.operations.batchInsert({ tableName, records });
2582
2227
  }
@@ -2588,7 +2233,7 @@ var ClickhouseStore = class extends storage.MastraStorage {
2588
2233
  } catch (error$1) {
2589
2234
  throw new error.MastraError(
2590
2235
  {
2591
- id: "CLICKHOUSE_STORAGE_OPTIMIZE_TABLE_FAILED",
2236
+ id: storage.createStorageErrorId("CLICKHOUSE", "OPTIMIZE_TABLE", "FAILED"),
2592
2237
  domain: error.ErrorDomain.STORAGE,
2593
2238
  category: error.ErrorCategory.THIRD_PARTY,
2594
2239
  details: { tableName }
@@ -2605,7 +2250,7 @@ var ClickhouseStore = class extends storage.MastraStorage {
2605
2250
  } catch (error$1) {
2606
2251
  throw new error.MastraError(
2607
2252
  {
2608
- id: "CLICKHOUSE_STORAGE_MATERIALIZE_TTL_FAILED",
2253
+ id: storage.createStorageErrorId("CLICKHOUSE", "MATERIALIZE_TTL", "FAILED"),
2609
2254
  domain: error.ErrorDomain.STORAGE,
2610
2255
  category: error.ErrorCategory.THIRD_PARTY,
2611
2256
  details: { tableName }
@@ -2644,9 +2289,9 @@ var ClickhouseStore = class extends storage.MastraStorage {
2644
2289
  runId,
2645
2290
  stepId,
2646
2291
  result,
2647
- runtimeContext
2292
+ requestContext
2648
2293
  }) {
2649
- return this.stores.workflows.updateWorkflowResults({ workflowName, runId, stepId, result, runtimeContext });
2294
+ return this.stores.workflows.updateWorkflowResults({ workflowName, runId, stepId, result, requestContext });
2650
2295
  }
2651
2296
  async updateWorkflowState({
2652
2297
  workflowName,
@@ -2669,15 +2314,8 @@ var ClickhouseStore = class extends storage.MastraStorage {
2669
2314
  }) {
2670
2315
  return this.stores.workflows.loadWorkflowSnapshot({ workflowName, runId });
2671
2316
  }
2672
- async getWorkflowRuns({
2673
- workflowName,
2674
- fromDate,
2675
- toDate,
2676
- limit,
2677
- offset,
2678
- resourceId
2679
- } = {}) {
2680
- return this.stores.workflows.getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, resourceId });
2317
+ async listWorkflowRuns(args = {}) {
2318
+ return this.stores.workflows.listWorkflowRuns(args);
2681
2319
  }
2682
2320
  async getWorkflowRunById({
2683
2321
  runId,
@@ -2685,21 +2323,12 @@ var ClickhouseStore = class extends storage.MastraStorage {
2685
2323
  }) {
2686
2324
  return this.stores.workflows.getWorkflowRunById({ runId, workflowName });
2687
2325
  }
2688
- async getTraces(args) {
2689
- return this.stores.traces.getTraces(args);
2690
- }
2691
- async getTracesPaginated(args) {
2692
- return this.stores.traces.getTracesPaginated(args);
2693
- }
2694
- async batchTraceInsert(args) {
2695
- return this.stores.traces.batchTraceInsert(args);
2326
+ async deleteWorkflowRunById({ runId, workflowName }) {
2327
+ return this.stores.workflows.deleteWorkflowRunById({ runId, workflowName });
2696
2328
  }
2697
2329
  async getThreadById({ threadId }) {
2698
2330
  return this.stores.memory.getThreadById({ threadId });
2699
2331
  }
2700
- async getThreadsByResourceId({ resourceId }) {
2701
- return this.stores.memory.getThreadsByResourceId({ resourceId });
2702
- }
2703
2332
  async saveThread({ thread }) {
2704
2333
  return this.stores.memory.saveThread({ thread });
2705
2334
  }
@@ -2713,29 +2342,9 @@ var ClickhouseStore = class extends storage.MastraStorage {
2713
2342
  async deleteThread({ threadId }) {
2714
2343
  return this.stores.memory.deleteThread({ threadId });
2715
2344
  }
2716
- async getThreadsByResourceIdPaginated(args) {
2717
- return this.stores.memory.getThreadsByResourceIdPaginated(args);
2718
- }
2719
- async getMessages({
2720
- threadId,
2721
- resourceId,
2722
- selectBy,
2723
- format
2724
- }) {
2725
- return this.stores.memory.getMessages({ threadId, resourceId, selectBy, format });
2726
- }
2727
- async getMessagesById({
2728
- messageIds,
2729
- format
2730
- }) {
2731
- return this.stores.memory.getMessagesById({ messageIds, format });
2732
- }
2733
2345
  async saveMessages(args) {
2734
2346
  return this.stores.memory.saveMessages(args);
2735
2347
  }
2736
- async getMessagesPaginated(args) {
2737
- return this.stores.memory.getMessagesPaginated(args);
2738
- }
2739
2348
  async updateMessages(args) {
2740
2349
  return this.stores.memory.updateMessages(args);
2741
2350
  }
@@ -2755,30 +2364,37 @@ var ClickhouseStore = class extends storage.MastraStorage {
2755
2364
  async getScoreById({ id }) {
2756
2365
  return this.stores.scores.getScoreById({ id });
2757
2366
  }
2758
- async saveScore(_score) {
2759
- return this.stores.scores.saveScore(_score);
2367
+ async saveScore(score) {
2368
+ return this.stores.scores.saveScore(score);
2760
2369
  }
2761
- async getScoresByRunId({
2370
+ async listScoresByRunId({
2762
2371
  runId,
2763
2372
  pagination
2764
2373
  }) {
2765
- return this.stores.scores.getScoresByRunId({ runId, pagination });
2374
+ return this.stores.scores.listScoresByRunId({ runId, pagination });
2766
2375
  }
2767
- async getScoresByEntityId({
2376
+ async listScoresByEntityId({
2768
2377
  entityId,
2769
2378
  entityType,
2770
2379
  pagination
2771
2380
  }) {
2772
- return this.stores.scores.getScoresByEntityId({ entityId, entityType, pagination });
2381
+ return this.stores.scores.listScoresByEntityId({ entityId, entityType, pagination });
2773
2382
  }
2774
- async getScoresByScorerId({
2383
+ async listScoresByScorerId({
2775
2384
  scorerId,
2776
2385
  pagination,
2777
2386
  entityId,
2778
2387
  entityType,
2779
2388
  source
2780
2389
  }) {
2781
- return this.stores.scores.getScoresByScorerId({ scorerId, pagination, entityId, entityType, source });
2390
+ return this.stores.scores.listScoresByScorerId({ scorerId, pagination, entityId, entityType, source });
2391
+ }
2392
+ async listScoresBySpan({
2393
+ traceId,
2394
+ spanId,
2395
+ pagination
2396
+ }) {
2397
+ return this.stores.scores.listScoresBySpan({ traceId, spanId, pagination });
2782
2398
  }
2783
2399
  async close() {
2784
2400
  await this.db.close();