@mastra/clickhouse 0.0.0-toolOptionTypes-20250917085558 → 0.0.0-trace-timeline-update-20251121114225

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -4,6 +4,7 @@ var client = require('@clickhouse/client');
4
4
  var error = require('@mastra/core/error');
5
5
  var storage = require('@mastra/core/storage');
6
6
  var agent = require('@mastra/core/agent');
7
+ var evals = require('@mastra/core/evals');
7
8
 
8
9
  // src/storage/index.ts
9
10
  var TABLE_ENGINES = {
@@ -11,11 +12,10 @@ var TABLE_ENGINES = {
11
12
  [storage.TABLE_WORKFLOW_SNAPSHOT]: `ReplacingMergeTree()`,
12
13
  [storage.TABLE_TRACES]: `MergeTree()`,
13
14
  [storage.TABLE_THREADS]: `ReplacingMergeTree()`,
14
- [storage.TABLE_EVALS]: `MergeTree()`,
15
15
  [storage.TABLE_SCORERS]: `MergeTree()`,
16
16
  [storage.TABLE_RESOURCES]: `ReplacingMergeTree()`,
17
- // TODO: verify this is the correct engine for ai spans when implementing clickhouse storage
18
- [storage.TABLE_AI_SPANS]: `ReplacingMergeTree()`
17
+ // TODO: verify this is the correct engine for Spans when implementing clickhouse storage
18
+ [storage.TABLE_SPANS]: `ReplacingMergeTree()`
19
19
  };
20
20
  var COLUMN_TYPES = {
21
21
  text: "String",
@@ -46,8 +46,26 @@ function transformRows(rows) {
46
46
  return rows.map((row) => transformRow(row));
47
47
  }
48
48
 
49
- // src/storage/domains/legacy-evals/index.ts
50
- var LegacyEvalsStorageClickhouse = class extends storage.LegacyEvalsStorage {
49
+ // src/storage/domains/memory/index.ts
50
+ function serializeMetadata(metadata) {
51
+ if (!metadata || Object.keys(metadata).length === 0) {
52
+ return "{}";
53
+ }
54
+ return JSON.stringify(metadata);
55
+ }
56
+ function parseMetadata(metadata) {
57
+ if (!metadata) return {};
58
+ if (typeof metadata === "object") return metadata;
59
+ if (typeof metadata !== "string") return {};
60
+ const trimmed = metadata.trim();
61
+ if (trimmed === "" || trimmed === "null") return {};
62
+ try {
63
+ return JSON.parse(trimmed);
64
+ } catch {
65
+ return {};
66
+ }
67
+ }
68
+ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
51
69
  client;
52
70
  operations;
53
71
  constructor({ client, operations }) {
@@ -55,127 +73,122 @@ var LegacyEvalsStorageClickhouse = class extends storage.LegacyEvalsStorage {
55
73
  this.client = client;
56
74
  this.operations = operations;
57
75
  }
58
- transformEvalRow(row) {
59
- row = transformRow(row);
60
- let resultValue;
76
+ async listMessagesById({ messageIds }) {
77
+ if (messageIds.length === 0) return { messages: [] };
61
78
  try {
62
- if (row.result && typeof row.result === "string" && row.result.trim() !== "") {
63
- resultValue = JSON.parse(row.result);
64
- } else if (typeof row.result === "object" && row.result !== null) {
65
- resultValue = row.result;
66
- } else if (row.result === null || row.result === void 0 || row.result === "") {
67
- resultValue = { score: 0 };
68
- } else {
69
- throw new Error(`Invalid or empty result field: ${JSON.stringify(row.result)}`);
70
- }
71
- } catch (error$1) {
72
- console.error("Error parsing result field:", row.result, error$1);
73
- throw new error.MastraError({
74
- id: "CLICKHOUSE_STORAGE_INVALID_RESULT_FORMAT",
75
- text: `Invalid result format: ${JSON.stringify(row.result)}`,
76
- domain: error.ErrorDomain.STORAGE,
77
- category: error.ErrorCategory.USER
78
- });
79
- }
80
- let testInfoValue;
81
- try {
82
- if (row.test_info && typeof row.test_info === "string" && row.test_info.trim() !== "" && row.test_info !== "null") {
83
- testInfoValue = JSON.parse(row.test_info);
84
- } else if (typeof row.test_info === "object" && row.test_info !== null) {
85
- testInfoValue = row.test_info;
86
- }
87
- } catch {
88
- testInfoValue = void 0;
89
- }
90
- if (!resultValue || typeof resultValue !== "object" || !("score" in resultValue)) {
91
- throw new error.MastraError({
92
- id: "CLICKHOUSE_STORAGE_INVALID_METRIC_FORMAT",
93
- text: `Invalid MetricResult format: ${JSON.stringify(resultValue)}`,
94
- domain: error.ErrorDomain.STORAGE,
95
- category: error.ErrorCategory.USER
96
- });
97
- }
98
- return {
99
- input: row.input,
100
- output: row.output,
101
- result: resultValue,
102
- agentName: row.agent_name,
103
- metricName: row.metric_name,
104
- instructions: row.instructions,
105
- testInfo: testInfoValue,
106
- globalRunId: row.global_run_id,
107
- runId: row.run_id,
108
- createdAt: row.created_at
109
- };
110
- }
111
- async getEvalsByAgentName(agentName, type) {
112
- try {
113
- const baseQuery = `SELECT *, toDateTime64(created_at, 3) as createdAt FROM ${storage.TABLE_EVALS} WHERE agent_name = {var_agent_name:String}`;
114
- const typeCondition = type === "test" ? " AND test_info IS NOT NULL AND test_info != 'null' AND JSONExtractString(test_info, 'testPath') IS NOT NULL AND JSONExtractString(test_info, 'testPath') != ''" : type === "live" ? " AND (test_info IS NULL OR test_info = 'null' OR JSONExtractString(test_info, 'testPath') IS NULL OR JSONExtractString(test_info, 'testPath') = '')" : "";
115
79
  const result = await this.client.query({
116
- query: `${baseQuery}${typeCondition} ORDER BY createdAt DESC`,
117
- query_params: { var_agent_name: agentName },
80
+ query: `
81
+ SELECT
82
+ id,
83
+ content,
84
+ role,
85
+ type,
86
+ toDateTime64(createdAt, 3) as createdAt,
87
+ thread_id AS "threadId",
88
+ "resourceId"
89
+ FROM "${storage.TABLE_MESSAGES}"
90
+ WHERE id IN {messageIds:Array(String)}
91
+ ORDER BY "createdAt" DESC
92
+ `,
93
+ query_params: {
94
+ messageIds
95
+ },
118
96
  clickhouse_settings: {
97
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
119
98
  date_time_input_format: "best_effort",
120
99
  date_time_output_format: "iso",
121
100
  use_client_time_zone: 1,
122
101
  output_format_json_quote_64bit_integers: 0
123
102
  }
124
103
  });
125
- if (!result) {
126
- return [];
127
- }
128
104
  const rows = await result.json();
129
- return rows.data.map((row) => this.transformEvalRow(row));
105
+ const messages = transformRows(rows.data);
106
+ messages.forEach((message) => {
107
+ if (typeof message.content === "string") {
108
+ try {
109
+ message.content = JSON.parse(message.content);
110
+ } catch {
111
+ }
112
+ }
113
+ });
114
+ const list = new agent.MessageList().add(messages, "memory");
115
+ return { messages: list.get.all.db() };
130
116
  } catch (error$1) {
131
- if (error$1?.message?.includes("no such table") || error$1?.message?.includes("does not exist")) {
132
- return [];
133
- }
134
117
  throw new error.MastraError(
135
118
  {
136
- id: "CLICKHOUSE_STORAGE_GET_EVALS_BY_AGENT_FAILED",
119
+ id: "CLICKHOUSE_STORAGE_LIST_MESSAGES_BY_ID_FAILED",
137
120
  domain: error.ErrorDomain.STORAGE,
138
121
  category: error.ErrorCategory.THIRD_PARTY,
139
- details: { agentName, type: type ?? null }
122
+ details: { messageIds: JSON.stringify(messageIds) }
140
123
  },
141
124
  error$1
142
125
  );
143
126
  }
144
127
  }
145
- async getEvals(options = {}) {
146
- const { agentName, type, page = 0, perPage = 100, dateRange } = options;
147
- const fromDate = dateRange?.start;
148
- const toDate = dateRange?.end;
149
- const conditions = [];
150
- if (agentName) {
151
- conditions.push(`agent_name = {var_agent_name:String}`);
152
- }
153
- if (type === "test") {
154
- conditions.push(
155
- `(test_info IS NOT NULL AND test_info != 'null' AND JSONExtractString(test_info, 'testPath') IS NOT NULL AND JSONExtractString(test_info, 'testPath') != '')`
156
- );
157
- } else if (type === "live") {
158
- conditions.push(
159
- `(test_info IS NULL OR test_info = 'null' OR JSONExtractString(test_info, 'testPath') IS NULL OR JSONExtractString(test_info, 'testPath') = '')`
128
+ async listMessages(args) {
129
+ const { threadId, resourceId, include, filter, perPage: perPageInput, page = 0, orderBy } = args;
130
+ if (page < 0) {
131
+ throw new error.MastraError(
132
+ {
133
+ id: "STORAGE_CLICKHOUSE_LIST_MESSAGES_INVALID_PAGE",
134
+ domain: error.ErrorDomain.STORAGE,
135
+ category: error.ErrorCategory.USER,
136
+ details: { page }
137
+ },
138
+ new Error("page must be >= 0")
160
139
  );
161
140
  }
162
- if (fromDate) {
163
- conditions.push(`created_at >= parseDateTime64BestEffort({var_from_date:String})`);
164
- fromDate.toISOString();
165
- }
166
- if (toDate) {
167
- conditions.push(`created_at <= parseDateTime64BestEffort({var_to_date:String})`);
168
- toDate.toISOString();
141
+ if (!threadId.trim()) {
142
+ throw new error.MastraError(
143
+ {
144
+ id: "STORAGE_CLICKHOUSE_LIST_MESSAGES_INVALID_THREAD_ID",
145
+ domain: error.ErrorDomain.STORAGE,
146
+ category: error.ErrorCategory.THIRD_PARTY,
147
+ details: { threadId }
148
+ },
149
+ new Error("threadId must be a non-empty string")
150
+ );
169
151
  }
170
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
152
+ const perPageForQuery = storage.normalizePerPage(perPageInput, 40);
153
+ const { offset, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPageForQuery);
171
154
  try {
172
- const countResult = await this.client.query({
173
- query: `SELECT COUNT(*) as count FROM ${storage.TABLE_EVALS} ${whereClause}`,
174
- query_params: {
175
- ...agentName ? { var_agent_name: agentName } : {},
176
- ...fromDate ? { var_from_date: fromDate.toISOString() } : {},
177
- ...toDate ? { var_to_date: toDate.toISOString() } : {}
178
- },
155
+ let dataQuery = `
156
+ SELECT
157
+ id,
158
+ content,
159
+ role,
160
+ type,
161
+ toDateTime64(createdAt, 3) as createdAt,
162
+ thread_id AS "threadId",
163
+ resourceId
164
+ FROM ${storage.TABLE_MESSAGES}
165
+ WHERE thread_id = {threadId:String}
166
+ `;
167
+ const dataParams = { threadId };
168
+ if (resourceId) {
169
+ dataQuery += ` AND resourceId = {resourceId:String}`;
170
+ dataParams.resourceId = resourceId;
171
+ }
172
+ if (filter?.dateRange?.start) {
173
+ const startDate = filter.dateRange.start instanceof Date ? filter.dateRange.start.toISOString() : new Date(filter.dateRange.start).toISOString();
174
+ dataQuery += ` AND createdAt >= parseDateTime64BestEffort({fromDate:String}, 3)`;
175
+ dataParams.fromDate = startDate;
176
+ }
177
+ if (filter?.dateRange?.end) {
178
+ const endDate = filter.dateRange.end instanceof Date ? filter.dateRange.end.toISOString() : new Date(filter.dateRange.end).toISOString();
179
+ dataQuery += ` AND createdAt <= parseDateTime64BestEffort({toDate:String}, 3)`;
180
+ dataParams.toDate = endDate;
181
+ }
182
+ const { field, direction } = this.parseOrderBy(orderBy, "ASC");
183
+ dataQuery += ` ORDER BY "${field}" ${direction}`;
184
+ if (perPageForResponse === false) ; else {
185
+ dataQuery += ` LIMIT {limit:Int64} OFFSET {offset:Int64}`;
186
+ dataParams.limit = perPageForQuery;
187
+ dataParams.offset = offset;
188
+ }
189
+ const result = await this.client.query({
190
+ query: dataQuery,
191
+ query_params: dataParams,
179
192
  clickhouse_settings: {
180
193
  date_time_input_format: "best_effort",
181
194
  date_time_output_format: "iso",
@@ -183,28 +196,28 @@ var LegacyEvalsStorageClickhouse = class extends storage.LegacyEvalsStorage {
183
196
  output_format_json_quote_64bit_integers: 0
184
197
  }
185
198
  });
186
- const countData = await countResult.json();
187
- const total = Number(countData.data?.[0]?.count ?? 0);
188
- const currentOffset = page * perPage;
189
- const hasMore = currentOffset + perPage < total;
190
- if (total === 0) {
191
- return {
192
- evals: [],
193
- total: 0,
194
- page,
195
- perPage,
196
- hasMore: false
197
- };
199
+ const rows = await result.json();
200
+ const paginatedMessages = transformRows(rows.data);
201
+ const paginatedCount = paginatedMessages.length;
202
+ let countQuery = `SELECT count() as total FROM ${storage.TABLE_MESSAGES} WHERE thread_id = {threadId:String}`;
203
+ const countParams = { threadId };
204
+ if (resourceId) {
205
+ countQuery += ` AND resourceId = {resourceId:String}`;
206
+ countParams.resourceId = resourceId;
198
207
  }
199
- const dataResult = await this.client.query({
200
- query: `SELECT *, toDateTime64(createdAt, 3) as createdAt FROM ${storage.TABLE_EVALS} ${whereClause} ORDER BY created_at DESC LIMIT {var_limit:UInt32} OFFSET {var_offset:UInt32}`,
201
- query_params: {
202
- ...agentName ? { var_agent_name: agentName } : {},
203
- ...fromDate ? { var_from_date: fromDate.toISOString() } : {},
204
- ...toDate ? { var_to_date: toDate.toISOString() } : {},
205
- var_limit: perPage || 100,
206
- var_offset: currentOffset || 0
207
- },
208
+ if (filter?.dateRange?.start) {
209
+ const startDate = filter.dateRange.start instanceof Date ? filter.dateRange.start.toISOString() : new Date(filter.dateRange.start).toISOString();
210
+ countQuery += ` AND createdAt >= parseDateTime64BestEffort({fromDate:String}, 3)`;
211
+ countParams.fromDate = startDate;
212
+ }
213
+ if (filter?.dateRange?.end) {
214
+ const endDate = filter.dateRange.end instanceof Date ? filter.dateRange.end.toISOString() : new Date(filter.dateRange.end).toISOString();
215
+ countQuery += ` AND createdAt <= parseDateTime64BestEffort({toDate:String}, 3)`;
216
+ countParams.toDate = endDate;
217
+ }
218
+ const countResult = await this.client.query({
219
+ query: countQuery,
220
+ query_params: countParams,
208
221
  clickhouse_settings: {
209
222
  date_time_input_format: "best_effort",
210
223
  date_time_output_format: "iso",
@@ -212,56 +225,20 @@ var LegacyEvalsStorageClickhouse = class extends storage.LegacyEvalsStorage {
212
225
  output_format_json_quote_64bit_integers: 0
213
226
  }
214
227
  });
215
- const rows = await dataResult.json();
216
- return {
217
- evals: rows.data.map((row) => this.transformEvalRow(row)),
218
- total,
219
- page,
220
- perPage,
221
- hasMore
222
- };
223
- } catch (error$1) {
224
- if (error$1?.message?.includes("no such table") || error$1?.message?.includes("does not exist")) {
228
+ const countData = await countResult.json();
229
+ const total = countData.data[0].total;
230
+ if (total === 0 && paginatedCount === 0 && (!include || include.length === 0)) {
225
231
  return {
226
- evals: [],
232
+ messages: [],
227
233
  total: 0,
228
234
  page,
229
- perPage,
235
+ perPage: perPageForResponse,
230
236
  hasMore: false
231
237
  };
232
238
  }
233
- throw new error.MastraError(
234
- {
235
- id: "CLICKHOUSE_STORAGE_GET_EVALS_FAILED",
236
- domain: error.ErrorDomain.STORAGE,
237
- category: error.ErrorCategory.THIRD_PARTY,
238
- details: { agentName: agentName ?? "all", type: type ?? "all" }
239
- },
240
- error$1
241
- );
242
- }
243
- }
244
- };
245
- var MemoryStorageClickhouse = class extends storage.MemoryStorage {
246
- client;
247
- operations;
248
- constructor({ client, operations }) {
249
- super();
250
- this.client = client;
251
- this.operations = operations;
252
- }
253
- async getMessages({
254
- threadId,
255
- resourceId,
256
- selectBy,
257
- format
258
- }) {
259
- try {
260
- if (!threadId.trim()) throw new Error("threadId must be a non-empty string");
261
- const messages = [];
262
- const limit = storage.resolveMessageLimit({ last: selectBy?.last, defaultLimit: 40 });
263
- const include = selectBy?.include || [];
264
- if (include.length) {
239
+ const messageIds = new Set(paginatedMessages.map((m) => m.id));
240
+ let includeMessages = [];
241
+ if (include && include.length > 0) {
265
242
  const unionQueries = [];
266
243
  const params = [];
267
244
  let paramIdx = 1;
@@ -282,7 +259,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
282
259
  FROM numbered_messages
283
260
  WHERE id = {var_include_id_${paramIdx}:String}
284
261
  )
285
- SELECT DISTINCT m.id, m.content, m.role, m.type, m."createdAt", m.thread_id AS "threadId"
262
+ SELECT DISTINCT m.id, m.content, m.role, m.type, m."createdAt", m.thread_id AS "threadId", m."resourceId"
286
263
  FROM numbered_messages m
287
264
  CROSS JOIN target_positions t
288
265
  WHERE m.row_num BETWEEN (t.target_pos - {var_withPreviousMessages_${paramIdx}:Int64}) AND (t.target_pos + {var_withNextMessages_${paramIdx}:Int64})
@@ -296,7 +273,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
296
273
  );
297
274
  paramIdx++;
298
275
  }
299
- const finalQuery = unionQueries.join(" UNION ALL ") + ' ORDER BY "createdAt" DESC';
276
+ const finalQuery = unionQueries.join(" UNION ALL ") + ' ORDER BY "createdAt" ASC';
300
277
  const mergedParams = params.reduce((acc, paramObj) => ({ ...acc, ...paramObj }), {});
301
278
  const includeResult = await this.client.query({
302
279
  query: finalQuery,
@@ -308,129 +285,66 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
308
285
  output_format_json_quote_64bit_integers: 0
309
286
  }
310
287
  });
311
- const rows2 = await includeResult.json();
312
- const includedMessages = transformRows(rows2.data);
313
- const seen = /* @__PURE__ */ new Set();
314
- const dedupedMessages = includedMessages.filter((message) => {
315
- if (seen.has(message.id)) return false;
316
- seen.add(message.id);
317
- return true;
318
- });
319
- messages.push(...dedupedMessages);
320
- }
321
- const result = await this.client.query({
322
- query: `
323
- SELECT
324
- id,
325
- content,
326
- role,
327
- type,
328
- toDateTime64(createdAt, 3) as createdAt,
329
- thread_id AS "threadId"
330
- FROM "${storage.TABLE_MESSAGES}"
331
- WHERE thread_id = {threadId:String}
332
- AND id NOT IN ({exclude:Array(String)})
333
- ORDER BY "createdAt" DESC
334
- LIMIT {limit:Int64}
335
- `,
336
- query_params: {
337
- threadId,
338
- exclude: messages.map((m) => m.id),
339
- limit
340
- },
341
- clickhouse_settings: {
342
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
343
- date_time_input_format: "best_effort",
344
- date_time_output_format: "iso",
345
- use_client_time_zone: 1,
346
- output_format_json_quote_64bit_integers: 0
347
- }
348
- });
349
- const rows = await result.json();
350
- messages.push(...transformRows(rows.data));
351
- messages.sort((a, b) => new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime());
352
- messages.forEach((message) => {
353
- if (typeof message.content === "string") {
354
- try {
355
- message.content = JSON.parse(message.content);
356
- } catch {
288
+ const includeRows = await includeResult.json();
289
+ includeMessages = transformRows(includeRows.data);
290
+ for (const includeMsg of includeMessages) {
291
+ if (!messageIds.has(includeMsg.id)) {
292
+ paginatedMessages.push(includeMsg);
293
+ messageIds.add(includeMsg.id);
357
294
  }
358
295
  }
359
- });
360
- const list = new agent.MessageList({ threadId, resourceId }).add(messages, "memory");
361
- if (format === `v2`) return list.get.all.v2();
362
- return list.get.all.v1();
363
- } catch (error$1) {
364
- throw new error.MastraError(
365
- {
366
- id: "CLICKHOUSE_STORAGE_GET_MESSAGES_FAILED",
367
- domain: error.ErrorDomain.STORAGE,
368
- category: error.ErrorCategory.THIRD_PARTY,
369
- details: { threadId, resourceId: resourceId ?? "" }
370
- },
371
- error$1
372
- );
373
- }
374
- }
375
- async getMessagesById({
376
- messageIds,
377
- format
378
- }) {
379
- if (messageIds.length === 0) return [];
380
- try {
381
- const result = await this.client.query({
382
- query: `
383
- SELECT
384
- id,
385
- content,
386
- role,
387
- type,
388
- toDateTime64(createdAt, 3) as createdAt,
389
- thread_id AS "threadId",
390
- "resourceId"
391
- FROM "${storage.TABLE_MESSAGES}"
392
- WHERE id IN {messageIds:Array(String)}
393
- ORDER BY "createdAt" DESC
394
- `,
395
- query_params: {
396
- messageIds
397
- },
398
- clickhouse_settings: {
399
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
400
- date_time_input_format: "best_effort",
401
- date_time_output_format: "iso",
402
- use_client_time_zone: 1,
403
- output_format_json_quote_64bit_integers: 0
296
+ }
297
+ const list = new agent.MessageList().add(paginatedMessages, "memory");
298
+ let finalMessages = list.get.all.db();
299
+ finalMessages = finalMessages.sort((a, b) => {
300
+ const isDateField = field === "createdAt" || field === "updatedAt";
301
+ const aValue = isDateField ? new Date(a[field]).getTime() : a[field];
302
+ const bValue = isDateField ? new Date(b[field]).getTime() : b[field];
303
+ if (aValue === bValue) {
304
+ return a.id.localeCompare(b.id);
404
305
  }
405
- });
406
- const rows = await result.json();
407
- const messages = transformRows(rows.data);
408
- messages.forEach((message) => {
409
- if (typeof message.content === "string") {
410
- try {
411
- message.content = JSON.parse(message.content);
412
- } catch {
413
- }
306
+ if (typeof aValue === "number" && typeof bValue === "number") {
307
+ return direction === "ASC" ? aValue - bValue : bValue - aValue;
414
308
  }
309
+ return direction === "ASC" ? String(aValue).localeCompare(String(bValue)) : String(bValue).localeCompare(String(aValue));
415
310
  });
416
- const list = new agent.MessageList().add(messages, "memory");
417
- if (format === `v1`) return list.get.all.v1();
418
- return list.get.all.v2();
311
+ const returnedThreadMessageIds = new Set(finalMessages.filter((m) => m.threadId === threadId).map((m) => m.id));
312
+ const allThreadMessagesReturned = returnedThreadMessageIds.size >= total;
313
+ const hasMore = perPageForResponse === false ? false : allThreadMessagesReturned ? false : offset + paginatedCount < total;
314
+ return {
315
+ messages: finalMessages,
316
+ total,
317
+ page,
318
+ perPage: perPageForResponse,
319
+ hasMore
320
+ };
419
321
  } catch (error$1) {
420
- throw new error.MastraError(
322
+ const mastraError = new error.MastraError(
421
323
  {
422
- id: "CLICKHOUSE_STORAGE_GET_MESSAGES_BY_ID_FAILED",
324
+ id: "STORAGE_CLICKHOUSE_STORE_LIST_MESSAGES_FAILED",
423
325
  domain: error.ErrorDomain.STORAGE,
424
326
  category: error.ErrorCategory.THIRD_PARTY,
425
- details: { messageIds: JSON.stringify(messageIds) }
327
+ details: {
328
+ threadId,
329
+ resourceId: resourceId ?? ""
330
+ }
426
331
  },
427
332
  error$1
428
333
  );
334
+ this.logger?.error?.(mastraError.toString());
335
+ this.logger?.trackException?.(mastraError);
336
+ return {
337
+ messages: [],
338
+ total: 0,
339
+ page,
340
+ perPage: perPageForResponse,
341
+ hasMore: false
342
+ };
429
343
  }
430
344
  }
431
345
  async saveMessages(args) {
432
- const { messages, format = "v1" } = args;
433
- if (messages.length === 0) return messages;
346
+ const { messages } = args;
347
+ if (messages.length === 0) return { messages };
434
348
  for (const message of messages) {
435
349
  const resourceId = message.resourceId;
436
350
  if (!resourceId) {
@@ -554,7 +468,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
554
468
  id: thread.id,
555
469
  resourceId: thread.resourceId,
556
470
  title: thread.title,
557
- metadata: thread.metadata,
471
+ metadata: serializeMetadata(thread.metadata),
558
472
  createdAt: thread.createdAt,
559
473
  updatedAt: (/* @__PURE__ */ new Date()).toISOString()
560
474
  })),
@@ -566,8 +480,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
566
480
  })
567
481
  ]);
568
482
  const list = new agent.MessageList().add(messages, "memory");
569
- if (format === `v2`) return list.get.all.v2();
570
- return list.get.all.v1();
483
+ return { messages: list.get.all.db() };
571
484
  } catch (error$1) {
572
485
  throw new error.MastraError(
573
486
  {
@@ -590,8 +503,9 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
590
503
  toDateTime64(createdAt, 3) as createdAt,
591
504
  toDateTime64(updatedAt, 3) as updatedAt
592
505
  FROM "${storage.TABLE_THREADS}"
593
- FINAL
594
- WHERE id = {var_id:String}`,
506
+ WHERE id = {var_id:String}
507
+ ORDER BY updatedAt DESC
508
+ LIMIT 1`,
595
509
  query_params: { var_id: threadId },
596
510
  clickhouse_settings: {
597
511
  // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
@@ -608,7 +522,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
608
522
  }
609
523
  return {
610
524
  ...thread,
611
- metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata,
525
+ metadata: parseMetadata(thread.metadata),
612
526
  createdAt: thread.createdAt,
613
527
  updatedAt: thread.updatedAt
614
528
  };
@@ -624,47 +538,6 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
624
538
  );
625
539
  }
626
540
  }
627
- async getThreadsByResourceId({ resourceId }) {
628
- try {
629
- const result = await this.client.query({
630
- query: `SELECT
631
- id,
632
- "resourceId",
633
- title,
634
- metadata,
635
- toDateTime64(createdAt, 3) as createdAt,
636
- toDateTime64(updatedAt, 3) as updatedAt
637
- FROM "${storage.TABLE_THREADS}"
638
- WHERE "resourceId" = {var_resourceId:String}`,
639
- query_params: { var_resourceId: resourceId },
640
- clickhouse_settings: {
641
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
642
- date_time_input_format: "best_effort",
643
- date_time_output_format: "iso",
644
- use_client_time_zone: 1,
645
- output_format_json_quote_64bit_integers: 0
646
- }
647
- });
648
- const rows = await result.json();
649
- const threads = transformRows(rows.data);
650
- return threads.map((thread) => ({
651
- ...thread,
652
- metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata,
653
- createdAt: thread.createdAt,
654
- updatedAt: thread.updatedAt
655
- }));
656
- } catch (error$1) {
657
- throw new error.MastraError(
658
- {
659
- id: "CLICKHOUSE_STORAGE_GET_THREADS_BY_RESOURCE_ID_FAILED",
660
- domain: error.ErrorDomain.STORAGE,
661
- category: error.ErrorCategory.THIRD_PARTY,
662
- details: { resourceId }
663
- },
664
- error$1
665
- );
666
- }
667
- }
668
541
  async saveThread({ thread }) {
669
542
  try {
670
543
  await this.client.insert({
@@ -672,6 +545,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
672
545
  values: [
673
546
  {
674
547
  ...thread,
548
+ metadata: serializeMetadata(thread.metadata),
675
549
  createdAt: thread.createdAt.toISOString(),
676
550
  updatedAt: thread.updatedAt.toISOString()
677
551
  }
@@ -725,7 +599,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
725
599
  id: updatedThread.id,
726
600
  resourceId: updatedThread.resourceId,
727
601
  title: updatedThread.title,
728
- metadata: updatedThread.metadata,
602
+ metadata: serializeMetadata(updatedThread.metadata),
729
603
  createdAt: updatedThread.createdAt,
730
604
  updatedAt: updatedThread.updatedAt.toISOString()
731
605
  }
@@ -764,174 +638,39 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
764
638
  clickhouse_settings: {
765
639
  output_format_json_quote_64bit_integers: 0
766
640
  }
767
- });
768
- } catch (error$1) {
769
- throw new error.MastraError(
770
- {
771
- id: "CLICKHOUSE_STORAGE_DELETE_THREAD_FAILED",
772
- domain: error.ErrorDomain.STORAGE,
773
- category: error.ErrorCategory.THIRD_PARTY,
774
- details: { threadId }
775
- },
776
- error$1
777
- );
778
- }
779
- }
780
- async getThreadsByResourceIdPaginated(args) {
781
- const { resourceId, page = 0, perPage = 100 } = args;
782
- try {
783
- const currentOffset = page * perPage;
784
- const countResult = await this.client.query({
785
- query: `SELECT count() as total FROM ${storage.TABLE_THREADS} WHERE resourceId = {resourceId:String}`,
786
- query_params: { resourceId },
787
- clickhouse_settings: {
788
- date_time_input_format: "best_effort",
789
- date_time_output_format: "iso",
790
- use_client_time_zone: 1,
791
- output_format_json_quote_64bit_integers: 0
792
- }
793
- });
794
- const countData = await countResult.json();
795
- const total = countData.data[0].total;
796
- if (total === 0) {
797
- return {
798
- threads: [],
799
- total: 0,
800
- page,
801
- perPage,
802
- hasMore: false
803
- };
804
- }
805
- const dataResult = await this.client.query({
806
- query: `
807
- SELECT
808
- id,
809
- resourceId,
810
- title,
811
- metadata,
812
- toDateTime64(createdAt, 3) as createdAt,
813
- toDateTime64(updatedAt, 3) as updatedAt
814
- FROM ${storage.TABLE_THREADS}
815
- WHERE resourceId = {resourceId:String}
816
- ORDER BY createdAt DESC
817
- LIMIT {limit:Int64} OFFSET {offset:Int64}
818
- `,
819
- query_params: {
820
- resourceId,
821
- limit: perPage,
822
- offset: currentOffset
823
- },
824
- clickhouse_settings: {
825
- date_time_input_format: "best_effort",
826
- date_time_output_format: "iso",
827
- use_client_time_zone: 1,
828
- output_format_json_quote_64bit_integers: 0
829
- }
830
- });
831
- const rows = await dataResult.json();
832
- const threads = transformRows(rows.data);
833
- return {
834
- threads,
835
- total,
836
- page,
837
- perPage,
838
- hasMore: currentOffset + threads.length < total
839
- };
641
+ });
840
642
  } catch (error$1) {
841
643
  throw new error.MastraError(
842
644
  {
843
- id: "CLICKHOUSE_STORAGE_GET_THREADS_BY_RESOURCE_ID_PAGINATED_FAILED",
645
+ id: "CLICKHOUSE_STORAGE_DELETE_THREAD_FAILED",
844
646
  domain: error.ErrorDomain.STORAGE,
845
647
  category: error.ErrorCategory.THIRD_PARTY,
846
- details: { resourceId, page }
648
+ details: { threadId }
847
649
  },
848
650
  error$1
849
651
  );
850
652
  }
851
653
  }
852
- async getMessagesPaginated(args) {
853
- const { threadId, resourceId, selectBy, format = "v1" } = args;
854
- const page = selectBy?.pagination?.page || 0;
855
- const perPageInput = selectBy?.pagination?.perPage;
856
- const perPage = perPageInput !== void 0 ? perPageInput : storage.resolveMessageLimit({ last: selectBy?.last, defaultLimit: 20 });
654
+ async listThreadsByResourceId(args) {
655
+ const { resourceId, page = 0, perPage: perPageInput, orderBy } = args;
656
+ const perPage = storage.normalizePerPage(perPageInput, 100);
657
+ if (page < 0) {
658
+ throw new error.MastraError(
659
+ {
660
+ id: "STORAGE_CLICKHOUSE_LIST_THREADS_BY_RESOURCE_ID_INVALID_PAGE",
661
+ domain: error.ErrorDomain.STORAGE,
662
+ category: error.ErrorCategory.USER,
663
+ details: { page }
664
+ },
665
+ new Error("page must be >= 0")
666
+ );
667
+ }
668
+ const { offset, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
669
+ const { field, direction } = this.parseOrderBy(orderBy);
857
670
  try {
858
- if (!threadId.trim()) throw new Error("threadId must be a non-empty string");
859
- const offset = page * perPage;
860
- const dateRange = selectBy?.pagination?.dateRange;
861
- const fromDate = dateRange?.start;
862
- const toDate = dateRange?.end;
863
- const messages = [];
864
- if (selectBy?.include?.length) {
865
- const include = selectBy.include;
866
- const unionQueries = [];
867
- const params = [];
868
- let paramIdx = 1;
869
- for (const inc of include) {
870
- const { id, withPreviousMessages = 0, withNextMessages = 0 } = inc;
871
- const searchId = inc.threadId || threadId;
872
- unionQueries.push(`
873
- SELECT * FROM (
874
- WITH numbered_messages AS (
875
- SELECT
876
- id, content, role, type, "createdAt", thread_id, "resourceId",
877
- ROW_NUMBER() OVER (ORDER BY "createdAt" ASC) as row_num
878
- FROM "${storage.TABLE_MESSAGES}"
879
- WHERE thread_id = {var_thread_id_${paramIdx}:String}
880
- ),
881
- target_positions AS (
882
- SELECT row_num as target_pos
883
- FROM numbered_messages
884
- WHERE id = {var_include_id_${paramIdx}:String}
885
- )
886
- SELECT DISTINCT m.id, m.content, m.role, m.type, m."createdAt", m.thread_id AS "threadId"
887
- FROM numbered_messages m
888
- CROSS JOIN target_positions t
889
- WHERE m.row_num BETWEEN (t.target_pos - {var_withPreviousMessages_${paramIdx}:Int64}) AND (t.target_pos + {var_withNextMessages_${paramIdx}:Int64})
890
- ) AS query_${paramIdx}
891
- `);
892
- params.push(
893
- { [`var_thread_id_${paramIdx}`]: searchId },
894
- { [`var_include_id_${paramIdx}`]: id },
895
- { [`var_withPreviousMessages_${paramIdx}`]: withPreviousMessages },
896
- { [`var_withNextMessages_${paramIdx}`]: withNextMessages }
897
- );
898
- paramIdx++;
899
- }
900
- const finalQuery = unionQueries.join(" UNION ALL ") + ' ORDER BY "createdAt" DESC';
901
- const mergedParams = params.reduce((acc, paramObj) => ({ ...acc, ...paramObj }), {});
902
- const includeResult = await this.client.query({
903
- query: finalQuery,
904
- query_params: mergedParams,
905
- clickhouse_settings: {
906
- date_time_input_format: "best_effort",
907
- date_time_output_format: "iso",
908
- use_client_time_zone: 1,
909
- output_format_json_quote_64bit_integers: 0
910
- }
911
- });
912
- const rows2 = await includeResult.json();
913
- const includedMessages = transformRows(rows2.data);
914
- const seen = /* @__PURE__ */ new Set();
915
- const dedupedMessages = includedMessages.filter((message) => {
916
- if (seen.has(message.id)) return false;
917
- seen.add(message.id);
918
- return true;
919
- });
920
- messages.push(...dedupedMessages);
921
- }
922
- let countQuery = `SELECT count() as total FROM ${storage.TABLE_MESSAGES} WHERE thread_id = {threadId:String}`;
923
- const countParams = { threadId };
924
- if (fromDate) {
925
- countQuery += ` AND createdAt >= parseDateTime64BestEffort({fromDate:String}, 3)`;
926
- countParams.fromDate = fromDate.toISOString();
927
- }
928
- if (toDate) {
929
- countQuery += ` AND createdAt <= parseDateTime64BestEffort({toDate:String}, 3)`;
930
- countParams.toDate = toDate.toISOString();
931
- }
932
671
  const countResult = await this.client.query({
933
- query: countQuery,
934
- query_params: countParams,
672
+ query: `SELECT count(DISTINCT id) as total FROM ${storage.TABLE_THREADS} WHERE resourceId = {resourceId:String}`,
673
+ query_params: { resourceId },
935
674
  clickhouse_settings: {
936
675
  date_time_input_format: "best_effort",
937
676
  date_time_output_format: "iso",
@@ -941,58 +680,46 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
941
680
  });
942
681
  const countData = await countResult.json();
943
682
  const total = countData.data[0].total;
944
- if (total === 0 && messages.length === 0) {
683
+ if (total === 0) {
945
684
  return {
946
- messages: [],
685
+ threads: [],
947
686
  total: 0,
948
687
  page,
949
- perPage,
688
+ perPage: perPageForResponse,
950
689
  hasMore: false
951
690
  };
952
691
  }
953
- const excludeIds = messages.map((m) => m.id);
954
- let dataQuery = `
955
- SELECT
956
- id,
957
- content,
958
- role,
959
- type,
960
- toDateTime64(createdAt, 3) as createdAt,
961
- thread_id AS "threadId",
962
- resourceId
963
- FROM ${storage.TABLE_MESSAGES}
964
- WHERE thread_id = {threadId:String}
965
- `;
966
- const dataParams = { threadId };
967
- if (fromDate) {
968
- dataQuery += ` AND createdAt >= parseDateTime64BestEffort({fromDate:String}, 3)`;
969
- dataParams.fromDate = fromDate.toISOString();
970
- }
971
- if (toDate) {
972
- dataQuery += ` AND createdAt <= parseDateTime64BestEffort({toDate:String}, 3)`;
973
- dataParams.toDate = toDate.toISOString();
974
- }
975
- if (excludeIds.length > 0) {
976
- dataQuery += ` AND id NOT IN ({excludeIds:Array(String)})`;
977
- dataParams.excludeIds = excludeIds;
978
- }
979
- if (selectBy?.last) {
980
- dataQuery += `
981
- ORDER BY createdAt DESC
982
- LIMIT {limit:Int64}
983
- `;
984
- dataParams.limit = perPage;
985
- } else {
986
- dataQuery += `
987
- ORDER BY createdAt ASC
988
- LIMIT {limit:Int64} OFFSET {offset:Int64}
989
- `;
990
- dataParams.limit = perPage;
991
- dataParams.offset = offset;
992
- }
993
- const result = await this.client.query({
994
- query: dataQuery,
995
- query_params: dataParams,
692
+ const dataResult = await this.client.query({
693
+ query: `
694
+ WITH ranked_threads AS (
695
+ SELECT
696
+ id,
697
+ resourceId,
698
+ title,
699
+ metadata,
700
+ toDateTime64(createdAt, 3) as createdAt,
701
+ toDateTime64(updatedAt, 3) as updatedAt,
702
+ ROW_NUMBER() OVER (PARTITION BY id ORDER BY updatedAt DESC) as row_num
703
+ FROM ${storage.TABLE_THREADS}
704
+ WHERE resourceId = {resourceId:String}
705
+ )
706
+ SELECT
707
+ id,
708
+ resourceId,
709
+ title,
710
+ metadata,
711
+ createdAt,
712
+ updatedAt
713
+ FROM ranked_threads
714
+ WHERE row_num = 1
715
+ ORDER BY "${field}" ${direction === "DESC" ? "DESC" : "ASC"}
716
+ LIMIT {perPage:Int64} OFFSET {offset:Int64}
717
+ `,
718
+ query_params: {
719
+ resourceId,
720
+ perPage,
721
+ offset
722
+ },
996
723
  clickhouse_settings: {
997
724
  date_time_input_format: "best_effort",
998
725
  date_time_output_format: "iso",
@@ -1000,35 +727,28 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
1000
727
  output_format_json_quote_64bit_integers: 0
1001
728
  }
1002
729
  });
1003
- const rows = await result.json();
1004
- const paginatedMessages = transformRows(rows.data);
1005
- messages.push(...paginatedMessages);
1006
- if (selectBy?.last) {
1007
- messages.sort((a, b) => new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime());
1008
- }
730
+ const rows = await dataResult.json();
731
+ const threads = transformRows(rows.data).map((thread) => ({
732
+ ...thread,
733
+ metadata: parseMetadata(thread.metadata)
734
+ }));
1009
735
  return {
1010
- messages: format === "v2" ? messages : messages,
736
+ threads,
1011
737
  total,
1012
738
  page,
1013
- perPage,
739
+ perPage: perPageForResponse,
1014
740
  hasMore: offset + perPage < total
1015
741
  };
1016
742
  } catch (error$1) {
1017
- const mastraError = new error.MastraError(
743
+ throw new error.MastraError(
1018
744
  {
1019
- id: "CLICKHOUSE_STORAGE_GET_MESSAGES_PAGINATED_FAILED",
745
+ id: "CLICKHOUSE_STORAGE_LIST_THREADS_BY_RESOURCE_ID_FAILED",
1020
746
  domain: error.ErrorDomain.STORAGE,
1021
747
  category: error.ErrorCategory.THIRD_PARTY,
1022
- details: {
1023
- threadId,
1024
- resourceId: resourceId ?? ""
1025
- }
748
+ details: { resourceId, page }
1026
749
  },
1027
750
  error$1
1028
751
  );
1029
- this.logger?.trackException?.(mastraError);
1030
- this.logger?.error?.(mastraError.toString());
1031
- return { messages: [], total: 0, page, perPage: perPageInput || 40, hasMore: false };
1032
752
  }
1033
753
  }
1034
754
  async updateMessages(args) {
@@ -1111,7 +831,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
1111
831
  UPDATE ${setClauses.join(", ")}
1112
832
  WHERE id = {var_id_${paramIdx}:String}
1113
833
  `;
1114
- console.log("Updating message:", id, "with query:", updateQuery, "values:", values);
834
+ console.info("Updating message:", id, "with query:", updateQuery, "values:", values);
1115
835
  updatePromises.push(
1116
836
  this.client.command({
1117
837
  query: updateQuery,
@@ -1170,7 +890,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
1170
890
  }
1171
891
  }
1172
892
  if (needsRetry) {
1173
- console.log("Update not applied correctly, retrying with DELETE + INSERT for message:", id);
893
+ console.info("Update not applied correctly, retrying with DELETE + INSERT for message:", id);
1174
894
  await this.client.command({
1175
895
  query: `DELETE FROM ${storage.TABLE_MESSAGES} WHERE id = {messageId:String}`,
1176
896
  query_params: { messageId: id },
@@ -1228,7 +948,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
1228
948
  const now = (/* @__PURE__ */ new Date()).toISOString().replace("Z", "");
1229
949
  const threadUpdatePromises = Array.from(threadIdsToUpdate).map(async (threadId) => {
1230
950
  const threadResult = await this.client.query({
1231
- query: `SELECT id, resourceId, title, metadata, createdAt FROM ${storage.TABLE_THREADS} WHERE id = {threadId:String}`,
951
+ query: `SELECT id, resourceId, title, metadata, createdAt FROM ${storage.TABLE_THREADS} WHERE id = {threadId:String} ORDER BY updatedAt DESC LIMIT 1`,
1232
952
  query_params: { threadId },
1233
953
  clickhouse_settings: {
1234
954
  date_time_input_format: "best_effort",
@@ -1257,7 +977,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
1257
977
  id: existingThread.id,
1258
978
  resourceId: existingThread.resourceId,
1259
979
  title: existingThread.title,
1260
- metadata: existingThread.metadata,
980
+ metadata: typeof existingThread.metadata === "string" ? existingThread.metadata : serializeMetadata(existingThread.metadata),
1261
981
  createdAt: existingThread.createdAt,
1262
982
  updatedAt: now
1263
983
  }
@@ -1316,7 +1036,7 @@ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
1316
1036
  async getResourceById({ resourceId }) {
1317
1037
  try {
1318
1038
  const result = await this.client.query({
1319
- query: `SELECT id, workingMemory, metadata, createdAt, updatedAt FROM ${storage.TABLE_RESOURCES} WHERE id = {resourceId:String}`,
1039
+ query: `SELECT id, workingMemory, metadata, createdAt, updatedAt FROM ${storage.TABLE_RESOURCES} WHERE id = {resourceId:String} ORDER BY updatedAt DESC LIMIT 1`,
1320
1040
  query_params: { resourceId },
1321
1041
  clickhouse_settings: {
1322
1042
  date_time_input_format: "best_effort",
@@ -1488,6 +1208,9 @@ var StoreOperationsClickhouse = class extends storage.StoreOperations {
1488
1208
  const columns = Object.entries(schema).map(([name, def]) => {
1489
1209
  const constraints = [];
1490
1210
  if (!def.nullable) constraints.push("NOT NULL");
1211
+ if (name === "metadata" && def.type === "text" && def.nullable) {
1212
+ constraints.push("DEFAULT '{}'");
1213
+ }
1491
1214
  const columnTtl = this.ttl?.[tableName]?.columns?.[name];
1492
1215
  return `"${name}" ${COLUMN_TYPES[def.type]} ${constraints.join(" ")} ${columnTtl ? `TTL toDateTime(${columnTtl.ttlKey ?? "createdAt"}) + INTERVAL ${columnTtl.interval} ${columnTtl.unit}` : ""}`;
1493
1216
  }).join(",\n");
@@ -1506,8 +1229,8 @@ var StoreOperationsClickhouse = class extends storage.StoreOperations {
1506
1229
  ${columns}
1507
1230
  )
1508
1231
  ENGINE = ${TABLE_ENGINES[tableName] ?? "MergeTree()"}
1509
- PRIMARY KEY (createdAt, ${tableName === storage.TABLE_EVALS ? "run_id" : "id"})
1510
- ORDER BY (createdAt, ${tableName === storage.TABLE_EVALS ? "run_id" : "id"})
1232
+ PRIMARY KEY (createdAt, ${"id"})
1233
+ ORDER BY (createdAt, ${"id"})
1511
1234
  ${this.ttl?.[tableName]?.row ? `TTL toDateTime(createdAt) + INTERVAL ${this.ttl[tableName].row.interval} ${this.ttl[tableName].row.unit}` : ""}
1512
1235
  SETTINGS index_granularity = 8192
1513
1236
  `;
@@ -1622,7 +1345,7 @@ var StoreOperationsClickhouse = class extends storage.StoreOperations {
1622
1345
  use_client_time_zone: 1
1623
1346
  }
1624
1347
  });
1625
- console.log("INSERT RESULT", result);
1348
+ console.info("INSERT RESULT", result);
1626
1349
  } catch (error$1) {
1627
1350
  throw new error.MastraError(
1628
1351
  {
@@ -1736,7 +1459,7 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1736
1459
  const input = storage.safelyParseJSON(row.input);
1737
1460
  const output = storage.safelyParseJSON(row.output);
1738
1461
  const additionalContext = storage.safelyParseJSON(row.additionalContext);
1739
- const runtimeContext = storage.safelyParseJSON(row.runtimeContext);
1462
+ const requestContext = storage.safelyParseJSON(row.requestContext);
1740
1463
  const entity = storage.safelyParseJSON(row.entity);
1741
1464
  return {
1742
1465
  ...row,
@@ -1747,7 +1470,7 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1747
1470
  input,
1748
1471
  output,
1749
1472
  additionalContext,
1750
- runtimeContext,
1473
+ requestContext,
1751
1474
  entity,
1752
1475
  createdAt: new Date(row.createdAt),
1753
1476
  updatedAt: new Date(row.updatedAt)
@@ -1785,9 +1508,23 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1785
1508
  }
1786
1509
  }
1787
1510
  async saveScore(score) {
1511
+ let parsedScore;
1512
+ try {
1513
+ parsedScore = evals.saveScorePayloadSchema.parse(score);
1514
+ } catch (error$1) {
1515
+ throw new error.MastraError(
1516
+ {
1517
+ id: "CLICKHOUSE_STORAGE_SAVE_SCORE_FAILED_INVALID_SCORE_PAYLOAD",
1518
+ domain: error.ErrorDomain.STORAGE,
1519
+ category: error.ErrorCategory.USER,
1520
+ details: { scoreId: score.id }
1521
+ },
1522
+ error$1
1523
+ );
1524
+ }
1788
1525
  try {
1789
1526
  const record = {
1790
- ...score
1527
+ ...parsedScore
1791
1528
  };
1792
1529
  await this.client.insert({
1793
1530
  table: storage.TABLE_SCORERS,
@@ -1812,7 +1549,7 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1812
1549
  );
1813
1550
  }
1814
1551
  }
1815
- async getScoresByRunId({
1552
+ async listScoresByRunId({
1816
1553
  runId,
1817
1554
  pagination
1818
1555
  }) {
@@ -1828,24 +1565,28 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1828
1565
  const countObj = countRows[0];
1829
1566
  total = Number(countObj.count);
1830
1567
  }
1568
+ const { page, perPage: perPageInput } = pagination;
1831
1569
  if (!total) {
1832
1570
  return {
1833
1571
  pagination: {
1834
1572
  total: 0,
1835
- page: pagination.page,
1836
- perPage: pagination.perPage,
1573
+ page,
1574
+ perPage: perPageInput,
1837
1575
  hasMore: false
1838
1576
  },
1839
1577
  scores: []
1840
1578
  };
1841
1579
  }
1842
- const offset = pagination.page * pagination.perPage;
1580
+ const perPage = storage.normalizePerPage(perPageInput, 100);
1581
+ const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
1582
+ const limitValue = perPageInput === false ? total : perPage;
1583
+ const end = perPageInput === false ? total : start + perPage;
1843
1584
  const result = await this.client.query({
1844
1585
  query: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE runId = {var_runId:String} ORDER BY createdAt DESC LIMIT {var_limit:Int64} OFFSET {var_offset:Int64}`,
1845
1586
  query_params: {
1846
1587
  var_runId: runId,
1847
- var_limit: pagination.perPage,
1848
- var_offset: offset
1588
+ var_limit: limitValue,
1589
+ var_offset: start
1849
1590
  },
1850
1591
  format: "JSONEachRow",
1851
1592
  clickhouse_settings: {
@@ -1860,9 +1601,9 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1860
1601
  return {
1861
1602
  pagination: {
1862
1603
  total,
1863
- page: pagination.page,
1864
- perPage: pagination.perPage,
1865
- hasMore: total > (pagination.page + 1) * pagination.perPage
1604
+ page,
1605
+ perPage: perPageForResponse,
1606
+ hasMore: end < total
1866
1607
  },
1867
1608
  scores
1868
1609
  };
@@ -1878,7 +1619,7 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1878
1619
  );
1879
1620
  }
1880
1621
  }
1881
- async getScoresByScorerId({
1622
+ async listScoresByScorerId({
1882
1623
  scorerId,
1883
1624
  entityId,
1884
1625
  entityType,
@@ -1912,24 +1653,28 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1912
1653
  const countObj = countRows[0];
1913
1654
  total = Number(countObj.count);
1914
1655
  }
1656
+ const { page, perPage: perPageInput } = pagination;
1915
1657
  if (!total) {
1916
1658
  return {
1917
1659
  pagination: {
1918
1660
  total: 0,
1919
- page: pagination.page,
1920
- perPage: pagination.perPage,
1661
+ page,
1662
+ perPage: perPageInput,
1921
1663
  hasMore: false
1922
1664
  },
1923
1665
  scores: []
1924
1666
  };
1925
1667
  }
1926
- const offset = pagination.page * pagination.perPage;
1668
+ const perPage = storage.normalizePerPage(perPageInput, 100);
1669
+ const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
1670
+ const limitValue = perPageInput === false ? total : perPage;
1671
+ const end = perPageInput === false ? total : start + perPage;
1927
1672
  const result = await this.client.query({
1928
1673
  query: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE ${whereClause} ORDER BY createdAt DESC LIMIT {var_limit:Int64} OFFSET {var_offset:Int64}`,
1929
1674
  query_params: {
1930
1675
  var_scorerId: scorerId,
1931
- var_limit: pagination.perPage,
1932
- var_offset: offset,
1676
+ var_limit: limitValue,
1677
+ var_offset: start,
1933
1678
  var_entityId: entityId,
1934
1679
  var_entityType: entityType,
1935
1680
  var_source: source
@@ -1947,9 +1692,9 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1947
1692
  return {
1948
1693
  pagination: {
1949
1694
  total,
1950
- page: pagination.page,
1951
- perPage: pagination.perPage,
1952
- hasMore: total > (pagination.page + 1) * pagination.perPage
1695
+ page,
1696
+ perPage: perPageForResponse,
1697
+ hasMore: end < total
1953
1698
  },
1954
1699
  scores
1955
1700
  };
@@ -1965,7 +1710,7 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1965
1710
  );
1966
1711
  }
1967
1712
  }
1968
- async getScoresByEntityId({
1713
+ async listScoresByEntityId({
1969
1714
  entityId,
1970
1715
  entityType,
1971
1716
  pagination
@@ -1982,25 +1727,29 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1982
1727
  const countObj = countRows[0];
1983
1728
  total = Number(countObj.count);
1984
1729
  }
1730
+ const { page, perPage: perPageInput } = pagination;
1985
1731
  if (!total) {
1986
1732
  return {
1987
1733
  pagination: {
1988
1734
  total: 0,
1989
- page: pagination.page,
1990
- perPage: pagination.perPage,
1735
+ page,
1736
+ perPage: perPageInput,
1991
1737
  hasMore: false
1992
1738
  },
1993
1739
  scores: []
1994
1740
  };
1995
1741
  }
1996
- const offset = pagination.page * pagination.perPage;
1742
+ const perPage = storage.normalizePerPage(perPageInput, 100);
1743
+ const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
1744
+ const limitValue = perPageInput === false ? total : perPage;
1745
+ const end = perPageInput === false ? total : start + perPage;
1997
1746
  const result = await this.client.query({
1998
1747
  query: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE entityId = {var_entityId:String} AND entityType = {var_entityType:String} ORDER BY createdAt DESC LIMIT {var_limit:Int64} OFFSET {var_offset:Int64}`,
1999
1748
  query_params: {
2000
1749
  var_entityId: entityId,
2001
1750
  var_entityType: entityType,
2002
- var_limit: pagination.perPage,
2003
- var_offset: offset
1751
+ var_limit: limitValue,
1752
+ var_offset: start
2004
1753
  },
2005
1754
  format: "JSONEachRow",
2006
1755
  clickhouse_settings: {
@@ -2015,9 +1764,9 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
2015
1764
  return {
2016
1765
  pagination: {
2017
1766
  total,
2018
- page: pagination.page,
2019
- perPage: pagination.perPage,
2020
- hasMore: total > (pagination.page + 1) * pagination.perPage
1767
+ page,
1768
+ perPage: perPageForResponse,
1769
+ hasMore: end < total
2021
1770
  },
2022
1771
  scores
2023
1772
  };
@@ -2033,76 +1782,51 @@ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
2033
1782
  );
2034
1783
  }
2035
1784
  }
2036
- };
2037
- var TracesStorageClickhouse = class extends storage.TracesStorage {
2038
- client;
2039
- operations;
2040
- constructor({ client, operations }) {
2041
- super();
2042
- this.client = client;
2043
- this.operations = operations;
2044
- }
2045
- async getTracesPaginated(args) {
2046
- const { name, scope, page = 0, perPage = 100, attributes, filters, dateRange } = args;
2047
- const fromDate = dateRange?.start;
2048
- const toDate = dateRange?.end;
2049
- const currentOffset = page * perPage;
2050
- const queryArgs = {};
2051
- const conditions = [];
2052
- if (name) {
2053
- conditions.push(`name LIKE CONCAT({var_name:String}, '%')`);
2054
- queryArgs.var_name = name;
2055
- }
2056
- if (scope) {
2057
- conditions.push(`scope = {var_scope:String}`);
2058
- queryArgs.var_scope = scope;
2059
- }
2060
- if (attributes) {
2061
- Object.entries(attributes).forEach(([key, value]) => {
2062
- conditions.push(`JSONExtractString(attributes, '${key}') = {var_attr_${key}:String}`);
2063
- queryArgs[`var_attr_${key}`] = value;
2064
- });
2065
- }
2066
- if (filters) {
2067
- Object.entries(filters).forEach(([key, value]) => {
2068
- conditions.push(`${key} = {var_col_${key}:${storage.TABLE_SCHEMAS.mastra_traces?.[key]?.type ?? "text"}}`);
2069
- queryArgs[`var_col_${key}`] = value;
2070
- });
2071
- }
2072
- if (fromDate) {
2073
- conditions.push(`createdAt >= parseDateTime64BestEffort({var_from_date:String})`);
2074
- queryArgs.var_from_date = fromDate.toISOString();
2075
- }
2076
- if (toDate) {
2077
- conditions.push(`createdAt <= parseDateTime64BestEffort({var_to_date:String})`);
2078
- queryArgs.var_to_date = toDate.toISOString();
2079
- }
2080
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
1785
+ async listScoresBySpan({
1786
+ traceId,
1787
+ spanId,
1788
+ pagination
1789
+ }) {
2081
1790
  try {
2082
1791
  const countResult = await this.client.query({
2083
- query: `SELECT COUNT(*) as count FROM ${storage.TABLE_TRACES} ${whereClause}`,
2084
- query_params: queryArgs,
2085
- clickhouse_settings: {
2086
- date_time_input_format: "best_effort",
2087
- date_time_output_format: "iso",
2088
- use_client_time_zone: 1,
2089
- output_format_json_quote_64bit_integers: 0
2090
- }
1792
+ query: `SELECT COUNT(*) as count FROM ${storage.TABLE_SCORERS} WHERE traceId = {var_traceId:String} AND spanId = {var_spanId:String}`,
1793
+ query_params: {
1794
+ var_traceId: traceId,
1795
+ var_spanId: spanId
1796
+ },
1797
+ format: "JSONEachRow"
2091
1798
  });
2092
- const countData = await countResult.json();
2093
- const total = Number(countData.data?.[0]?.count ?? 0);
2094
- if (total === 0) {
1799
+ const countRows = await countResult.json();
1800
+ let total = 0;
1801
+ if (Array.isArray(countRows) && countRows.length > 0 && countRows[0]) {
1802
+ const countObj = countRows[0];
1803
+ total = Number(countObj.count);
1804
+ }
1805
+ const { page, perPage: perPageInput } = pagination;
1806
+ if (!total) {
2095
1807
  return {
2096
- traces: [],
2097
- total: 0,
2098
- page,
2099
- perPage,
2100
- hasMore: false
1808
+ pagination: {
1809
+ total: 0,
1810
+ page,
1811
+ perPage: perPageInput,
1812
+ hasMore: false
1813
+ },
1814
+ scores: []
2101
1815
  };
2102
1816
  }
1817
+ const perPage = storage.normalizePerPage(perPageInput, 100);
1818
+ const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
1819
+ const limitValue = perPageInput === false ? total : perPage;
1820
+ const end = perPageInput === false ? total : start + perPage;
2103
1821
  const result = await this.client.query({
2104
- query: `SELECT *, toDateTime64(createdAt, 3) as createdAt FROM ${storage.TABLE_TRACES} ${whereClause} ORDER BY "createdAt" DESC LIMIT {var_limit:UInt32} OFFSET {var_offset:UInt32}`,
2105
- query_params: { ...queryArgs, var_limit: perPage, var_offset: currentOffset },
1822
+ query: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE traceId = {var_traceId:String} AND spanId = {var_spanId:String} ORDER BY createdAt DESC LIMIT {var_limit:Int64} OFFSET {var_offset:Int64}`,
1823
+ query_params: {
1824
+ var_traceId: traceId,
1825
+ var_spanId: spanId,
1826
+ var_limit: limitValue,
1827
+ var_offset: start
1828
+ },
1829
+ format: "JSONEachRow",
2106
1830
  clickhouse_settings: {
2107
1831
  date_time_input_format: "best_effort",
2108
1832
  date_time_output_format: "iso",
@@ -2110,172 +1834,29 @@ var TracesStorageClickhouse = class extends storage.TracesStorage {
2110
1834
  output_format_json_quote_64bit_integers: 0
2111
1835
  }
2112
1836
  });
2113
- if (!result) {
2114
- return {
2115
- traces: [],
2116
- total,
2117
- page,
2118
- perPage,
2119
- hasMore: false
2120
- };
2121
- }
2122
- const resp = await result.json();
2123
- const rows = resp.data;
2124
- const traces = rows.map((row) => ({
2125
- id: row.id,
2126
- parentSpanId: row.parentSpanId,
2127
- traceId: row.traceId,
2128
- name: row.name,
2129
- scope: row.scope,
2130
- kind: row.kind,
2131
- status: storage.safelyParseJSON(row.status),
2132
- events: storage.safelyParseJSON(row.events),
2133
- links: storage.safelyParseJSON(row.links),
2134
- attributes: storage.safelyParseJSON(row.attributes),
2135
- startTime: row.startTime,
2136
- endTime: row.endTime,
2137
- other: storage.safelyParseJSON(row.other),
2138
- createdAt: row.createdAt
2139
- }));
1837
+ const rows = await result.json();
1838
+ const scores = Array.isArray(rows) ? rows.map((row) => this.transformScoreRow(row)) : [];
2140
1839
  return {
2141
- traces,
2142
- total,
2143
- page,
2144
- perPage,
2145
- hasMore: currentOffset + traces.length < total
2146
- };
2147
- } catch (error$1) {
2148
- if (error$1?.message?.includes("no such table") || error$1?.message?.includes("does not exist")) {
2149
- return {
2150
- traces: [],
2151
- total: 0,
1840
+ pagination: {
1841
+ total,
2152
1842
  page,
2153
- perPage,
2154
- hasMore: false
2155
- };
2156
- }
2157
- throw new error.MastraError(
2158
- {
2159
- id: "CLICKHOUSE_STORAGE_GET_TRACES_PAGINATED_FAILED",
2160
- domain: error.ErrorDomain.STORAGE,
2161
- category: error.ErrorCategory.THIRD_PARTY,
2162
- details: {
2163
- name: name ?? null,
2164
- scope: scope ?? null,
2165
- page,
2166
- perPage,
2167
- attributes: attributes ? JSON.stringify(attributes) : null,
2168
- filters: filters ? JSON.stringify(filters) : null,
2169
- dateRange: dateRange ? JSON.stringify(dateRange) : null
2170
- }
1843
+ perPage: perPageForResponse,
1844
+ hasMore: end < total
2171
1845
  },
2172
- error$1
2173
- );
2174
- }
2175
- }
2176
- async getTraces({
2177
- name,
2178
- scope,
2179
- page,
2180
- perPage,
2181
- attributes,
2182
- filters,
2183
- fromDate,
2184
- toDate
2185
- }) {
2186
- const limit = perPage;
2187
- const offset = page * perPage;
2188
- const args = {};
2189
- const conditions = [];
2190
- if (name) {
2191
- conditions.push(`name LIKE CONCAT({var_name:String}, '%')`);
2192
- args.var_name = name;
2193
- }
2194
- if (scope) {
2195
- conditions.push(`scope = {var_scope:String}`);
2196
- args.var_scope = scope;
2197
- }
2198
- if (attributes) {
2199
- Object.entries(attributes).forEach(([key, value]) => {
2200
- conditions.push(`JSONExtractString(attributes, '${key}') = {var_attr_${key}:String}`);
2201
- args[`var_attr_${key}`] = value;
2202
- });
2203
- }
2204
- if (filters) {
2205
- Object.entries(filters).forEach(([key, value]) => {
2206
- conditions.push(`${key} = {var_col_${key}:${storage.TABLE_SCHEMAS.mastra_traces?.[key]?.type ?? "text"}}`);
2207
- args[`var_col_${key}`] = value;
2208
- });
2209
- }
2210
- if (fromDate) {
2211
- conditions.push(`createdAt >= {var_from_date:DateTime64(3)}`);
2212
- args.var_from_date = fromDate.getTime() / 1e3;
2213
- }
2214
- if (toDate) {
2215
- conditions.push(`createdAt <= {var_to_date:DateTime64(3)}`);
2216
- args.var_to_date = toDate.getTime() / 1e3;
2217
- }
2218
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
2219
- try {
2220
- const result = await this.client.query({
2221
- query: `SELECT *, toDateTime64(createdAt, 3) as createdAt FROM ${storage.TABLE_TRACES} ${whereClause} ORDER BY "createdAt" DESC LIMIT ${limit} OFFSET ${offset}`,
2222
- query_params: args,
2223
- clickhouse_settings: {
2224
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
2225
- date_time_input_format: "best_effort",
2226
- date_time_output_format: "iso",
2227
- use_client_time_zone: 1,
2228
- output_format_json_quote_64bit_integers: 0
2229
- }
2230
- });
2231
- if (!result) {
2232
- return [];
2233
- }
2234
- const resp = await result.json();
2235
- const rows = resp.data;
2236
- return rows.map((row) => ({
2237
- id: row.id,
2238
- parentSpanId: row.parentSpanId,
2239
- traceId: row.traceId,
2240
- name: row.name,
2241
- scope: row.scope,
2242
- kind: row.kind,
2243
- status: storage.safelyParseJSON(row.status),
2244
- events: storage.safelyParseJSON(row.events),
2245
- links: storage.safelyParseJSON(row.links),
2246
- attributes: storage.safelyParseJSON(row.attributes),
2247
- startTime: row.startTime,
2248
- endTime: row.endTime,
2249
- other: storage.safelyParseJSON(row.other),
2250
- createdAt: row.createdAt
2251
- }));
1846
+ scores
1847
+ };
2252
1848
  } catch (error$1) {
2253
- if (error$1?.message?.includes("no such table") || error$1?.message?.includes("does not exist")) {
2254
- return [];
2255
- }
2256
1849
  throw new error.MastraError(
2257
1850
  {
2258
- id: "CLICKHOUSE_STORAGE_GET_TRACES_FAILED",
1851
+ id: "CLICKHOUSE_STORAGE_GET_SCORES_BY_SPAN_FAILED",
2259
1852
  domain: error.ErrorDomain.STORAGE,
2260
1853
  category: error.ErrorCategory.THIRD_PARTY,
2261
- details: {
2262
- name: name ?? null,
2263
- scope: scope ?? null,
2264
- page,
2265
- perPage,
2266
- attributes: attributes ? JSON.stringify(attributes) : null,
2267
- filters: filters ? JSON.stringify(filters) : null,
2268
- fromDate: fromDate?.toISOString() ?? null,
2269
- toDate: toDate?.toISOString() ?? null
2270
- }
1854
+ details: { traceId, spanId }
2271
1855
  },
2272
1856
  error$1
2273
1857
  );
2274
1858
  }
2275
1859
  }
2276
- async batchTraceInsert(args) {
2277
- await this.operations.batchInsert({ tableName: storage.TABLE_TRACES, records: args.records });
2278
- }
2279
1860
  };
2280
1861
  var WorkflowsStorageClickhouse = class extends storage.WorkflowsStorage {
2281
1862
  client;
@@ -2290,7 +1871,7 @@ var WorkflowsStorageClickhouse = class extends storage.WorkflowsStorage {
2290
1871
  // runId,
2291
1872
  // stepId,
2292
1873
  // result,
2293
- // runtimeContext,
1874
+ // requestContext,
2294
1875
  }) {
2295
1876
  throw new Error("Method not implemented.");
2296
1877
  }
@@ -2395,13 +1976,14 @@ var WorkflowsStorageClickhouse = class extends storage.WorkflowsStorage {
2395
1976
  resourceId: row.resourceId
2396
1977
  };
2397
1978
  }
2398
- async getWorkflowRuns({
1979
+ async listWorkflowRuns({
2399
1980
  workflowName,
2400
1981
  fromDate,
2401
1982
  toDate,
2402
- limit,
2403
- offset,
2404
- resourceId
1983
+ page,
1984
+ perPage,
1985
+ resourceId,
1986
+ status
2405
1987
  } = {}) {
2406
1988
  try {
2407
1989
  const conditions = [];
@@ -2410,6 +1992,10 @@ var WorkflowsStorageClickhouse = class extends storage.WorkflowsStorage {
2410
1992
  conditions.push(`workflow_name = {var_workflow_name:String}`);
2411
1993
  values.var_workflow_name = workflowName;
2412
1994
  }
1995
+ if (status) {
1996
+ conditions.push(`JSONExtractString(snapshot, 'status') = {var_status:String}`);
1997
+ values.var_status = status;
1998
+ }
2413
1999
  if (resourceId) {
2414
2000
  const hasResourceId = await this.operations.hasColumn(storage.TABLE_WORKFLOW_SNAPSHOT, "resourceId");
2415
2001
  if (hasResourceId) {
@@ -2428,10 +2014,13 @@ var WorkflowsStorageClickhouse = class extends storage.WorkflowsStorage {
2428
2014
  values.var_to_date = toDate.getTime() / 1e3;
2429
2015
  }
2430
2016
  const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
2431
- const limitClause = limit !== void 0 ? `LIMIT ${limit}` : "";
2432
- const offsetClause = offset !== void 0 ? `OFFSET ${offset}` : "";
2017
+ const usePagination = perPage !== void 0 && page !== void 0;
2018
+ const normalizedPerPage = usePagination ? storage.normalizePerPage(perPage, Number.MAX_SAFE_INTEGER) : 0;
2019
+ const offset = usePagination ? page * normalizedPerPage : 0;
2020
+ const limitClause = usePagination ? `LIMIT ${normalizedPerPage}` : "";
2021
+ const offsetClause = usePagination ? `OFFSET ${offset}` : "";
2433
2022
  let total = 0;
2434
- if (limit !== void 0 && offset !== void 0) {
2023
+ if (usePagination) {
2435
2024
  const countResult = await this.client.query({
2436
2025
  query: `SELECT COUNT(*) as count FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[storage.TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""} ${whereClause}`,
2437
2026
  query_params: values,
@@ -2467,7 +2056,7 @@ var WorkflowsStorageClickhouse = class extends storage.WorkflowsStorage {
2467
2056
  } catch (error$1) {
2468
2057
  throw new error.MastraError(
2469
2058
  {
2470
- id: "CLICKHOUSE_STORAGE_GET_WORKFLOW_RUNS_FAILED",
2059
+ id: "CLICKHOUSE_STORAGE_LIST_WORKFLOW_RUNS_FAILED",
2471
2060
  domain: error.ErrorDomain.STORAGE,
2472
2061
  category: error.ErrorCategory.THIRD_PARTY,
2473
2062
  details: { workflowName: workflowName ?? "", resourceId: resourceId ?? "" }
@@ -2533,7 +2122,7 @@ var ClickhouseStore = class extends storage.MastraStorage {
2533
2122
  ttl = {};
2534
2123
  stores;
2535
2124
  constructor(config) {
2536
- super({ name: "ClickhouseStore" });
2125
+ super({ id: config.id, name: "ClickhouseStore" });
2537
2126
  this.db = client.createClient({
2538
2127
  url: config.url,
2539
2128
  username: config.username,
@@ -2550,15 +2139,11 @@ var ClickhouseStore = class extends storage.MastraStorage {
2550
2139
  const operations = new StoreOperationsClickhouse({ client: this.db, ttl: this.ttl });
2551
2140
  const workflows = new WorkflowsStorageClickhouse({ client: this.db, operations });
2552
2141
  const scores = new ScoresStorageClickhouse({ client: this.db, operations });
2553
- const legacyEvals = new LegacyEvalsStorageClickhouse({ client: this.db, operations });
2554
- const traces = new TracesStorageClickhouse({ client: this.db, operations });
2555
2142
  const memory = new MemoryStorageClickhouse({ client: this.db, operations });
2556
2143
  this.stores = {
2557
2144
  operations,
2558
2145
  workflows,
2559
2146
  scores,
2560
- legacyEvals,
2561
- traces,
2562
2147
  memory
2563
2148
  };
2564
2149
  }
@@ -2568,15 +2153,10 @@ var ClickhouseStore = class extends storage.MastraStorage {
2568
2153
  resourceWorkingMemory: true,
2569
2154
  hasColumn: true,
2570
2155
  createTable: true,
2571
- deleteMessages: false
2156
+ deleteMessages: false,
2157
+ listScoresBySpan: true
2572
2158
  };
2573
2159
  }
2574
- async getEvalsByAgentName(agentName, type) {
2575
- return this.stores.legacyEvals.getEvalsByAgentName(agentName, type);
2576
- }
2577
- async getEvals(options) {
2578
- return this.stores.legacyEvals.getEvals(options);
2579
- }
2580
2160
  async batchInsert({ tableName, records }) {
2581
2161
  await this.stores.operations.batchInsert({ tableName, records });
2582
2162
  }
@@ -2644,9 +2224,9 @@ var ClickhouseStore = class extends storage.MastraStorage {
2644
2224
  runId,
2645
2225
  stepId,
2646
2226
  result,
2647
- runtimeContext
2227
+ requestContext
2648
2228
  }) {
2649
- return this.stores.workflows.updateWorkflowResults({ workflowName, runId, stepId, result, runtimeContext });
2229
+ return this.stores.workflows.updateWorkflowResults({ workflowName, runId, stepId, result, requestContext });
2650
2230
  }
2651
2231
  async updateWorkflowState({
2652
2232
  workflowName,
@@ -2669,15 +2249,8 @@ var ClickhouseStore = class extends storage.MastraStorage {
2669
2249
  }) {
2670
2250
  return this.stores.workflows.loadWorkflowSnapshot({ workflowName, runId });
2671
2251
  }
2672
- async getWorkflowRuns({
2673
- workflowName,
2674
- fromDate,
2675
- toDate,
2676
- limit,
2677
- offset,
2678
- resourceId
2679
- } = {}) {
2680
- return this.stores.workflows.getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, resourceId });
2252
+ async listWorkflowRuns(args = {}) {
2253
+ return this.stores.workflows.listWorkflowRuns(args);
2681
2254
  }
2682
2255
  async getWorkflowRunById({
2683
2256
  runId,
@@ -2685,21 +2258,9 @@ var ClickhouseStore = class extends storage.MastraStorage {
2685
2258
  }) {
2686
2259
  return this.stores.workflows.getWorkflowRunById({ runId, workflowName });
2687
2260
  }
2688
- async getTraces(args) {
2689
- return this.stores.traces.getTraces(args);
2690
- }
2691
- async getTracesPaginated(args) {
2692
- return this.stores.traces.getTracesPaginated(args);
2693
- }
2694
- async batchTraceInsert(args) {
2695
- return this.stores.traces.batchTraceInsert(args);
2696
- }
2697
2261
  async getThreadById({ threadId }) {
2698
2262
  return this.stores.memory.getThreadById({ threadId });
2699
2263
  }
2700
- async getThreadsByResourceId({ resourceId }) {
2701
- return this.stores.memory.getThreadsByResourceId({ resourceId });
2702
- }
2703
2264
  async saveThread({ thread }) {
2704
2265
  return this.stores.memory.saveThread({ thread });
2705
2266
  }
@@ -2713,29 +2274,9 @@ var ClickhouseStore = class extends storage.MastraStorage {
2713
2274
  async deleteThread({ threadId }) {
2714
2275
  return this.stores.memory.deleteThread({ threadId });
2715
2276
  }
2716
- async getThreadsByResourceIdPaginated(args) {
2717
- return this.stores.memory.getThreadsByResourceIdPaginated(args);
2718
- }
2719
- async getMessages({
2720
- threadId,
2721
- resourceId,
2722
- selectBy,
2723
- format
2724
- }) {
2725
- return this.stores.memory.getMessages({ threadId, resourceId, selectBy, format });
2726
- }
2727
- async getMessagesById({
2728
- messageIds,
2729
- format
2730
- }) {
2731
- return this.stores.memory.getMessagesById({ messageIds, format });
2732
- }
2733
2277
  async saveMessages(args) {
2734
2278
  return this.stores.memory.saveMessages(args);
2735
2279
  }
2736
- async getMessagesPaginated(args) {
2737
- return this.stores.memory.getMessagesPaginated(args);
2738
- }
2739
2280
  async updateMessages(args) {
2740
2281
  return this.stores.memory.updateMessages(args);
2741
2282
  }
@@ -2758,27 +2299,34 @@ var ClickhouseStore = class extends storage.MastraStorage {
2758
2299
  async saveScore(_score) {
2759
2300
  return this.stores.scores.saveScore(_score);
2760
2301
  }
2761
- async getScoresByRunId({
2302
+ async listScoresByRunId({
2762
2303
  runId,
2763
2304
  pagination
2764
2305
  }) {
2765
- return this.stores.scores.getScoresByRunId({ runId, pagination });
2306
+ return this.stores.scores.listScoresByRunId({ runId, pagination });
2766
2307
  }
2767
- async getScoresByEntityId({
2308
+ async listScoresByEntityId({
2768
2309
  entityId,
2769
2310
  entityType,
2770
2311
  pagination
2771
2312
  }) {
2772
- return this.stores.scores.getScoresByEntityId({ entityId, entityType, pagination });
2313
+ return this.stores.scores.listScoresByEntityId({ entityId, entityType, pagination });
2773
2314
  }
2774
- async getScoresByScorerId({
2315
+ async listScoresByScorerId({
2775
2316
  scorerId,
2776
2317
  pagination,
2777
2318
  entityId,
2778
2319
  entityType,
2779
2320
  source
2780
2321
  }) {
2781
- return this.stores.scores.getScoresByScorerId({ scorerId, pagination, entityId, entityType, source });
2322
+ return this.stores.scores.listScoresByScorerId({ scorerId, pagination, entityId, entityType, source });
2323
+ }
2324
+ async listScoresBySpan({
2325
+ traceId,
2326
+ spanId,
2327
+ pagination
2328
+ }) {
2329
+ return this.stores.scores.listScoresBySpan({ traceId, spanId, pagination });
2782
2330
  }
2783
2331
  async close() {
2784
2332
  await this.db.close();