@mastra/clickhouse 0.0.0-vnext-inngest-20250508131921 → 0.0.0-vnext-20251119160359

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -1,22 +1,21 @@
1
1
  'use strict';
2
2
 
3
3
  var client = require('@clickhouse/client');
4
+ var error = require('@mastra/core/error');
4
5
  var storage = require('@mastra/core/storage');
6
+ var agent = require('@mastra/core/agent');
7
+ var evals = require('@mastra/core/evals');
5
8
 
6
9
  // src/storage/index.ts
7
- function safelyParseJSON(jsonString) {
8
- try {
9
- return JSON.parse(jsonString);
10
- } catch {
11
- return {};
12
- }
13
- }
14
10
  var TABLE_ENGINES = {
15
11
  [storage.TABLE_MESSAGES]: `MergeTree()`,
16
12
  [storage.TABLE_WORKFLOW_SNAPSHOT]: `ReplacingMergeTree()`,
17
13
  [storage.TABLE_TRACES]: `MergeTree()`,
18
14
  [storage.TABLE_THREADS]: `ReplacingMergeTree()`,
19
- [storage.TABLE_EVALS]: `MergeTree()`
15
+ [storage.TABLE_SCORERS]: `MergeTree()`,
16
+ [storage.TABLE_RESOURCES]: `ReplacingMergeTree()`,
17
+ // TODO: verify this is the correct engine for Spans when implementing clickhouse storage
18
+ [storage.TABLE_SPANS]: `ReplacingMergeTree()`
20
19
  };
21
20
  var COLUMN_TYPES = {
22
21
  text: "String",
@@ -24,11 +23,10 @@ var COLUMN_TYPES = {
24
23
  uuid: "String",
25
24
  jsonb: "String",
26
25
  integer: "Int64",
27
- bigint: "Int64"
26
+ float: "Float64",
27
+ bigint: "Int64",
28
+ boolean: "Bool"
28
29
  };
29
- function transformRows(rows) {
30
- return rows.map((row) => transformRow(row));
31
- }
32
30
  function transformRow(row) {
33
31
  if (!row) {
34
32
  return row;
@@ -39,54 +37,730 @@ function transformRow(row) {
39
37
  if (row.updatedAt) {
40
38
  row.updatedAt = new Date(row.updatedAt);
41
39
  }
40
+ if (row.content && typeof row.content === "string") {
41
+ row.content = storage.safelyParseJSON(row.content);
42
+ }
42
43
  return row;
43
44
  }
44
- var ClickhouseStore = class extends storage.MastraStorage {
45
- db;
46
- ttl = {};
47
- constructor(config) {
48
- super({ name: "ClickhouseStore" });
49
- this.db = client.createClient({
50
- url: config.url,
51
- username: config.username,
52
- password: config.password,
53
- clickhouse_settings: {
54
- date_time_input_format: "best_effort",
55
- date_time_output_format: "iso",
56
- // This is crucial
57
- use_client_time_zone: 1,
58
- output_format_json_quote_64bit_integers: 0
45
+ function transformRows(rows) {
46
+ return rows.map((row) => transformRow(row));
47
+ }
48
+
49
+ // src/storage/domains/memory/index.ts
50
+ function serializeMetadata(metadata) {
51
+ if (!metadata || Object.keys(metadata).length === 0) {
52
+ return "{}";
53
+ }
54
+ return JSON.stringify(metadata);
55
+ }
56
+ function parseMetadata(metadata) {
57
+ if (!metadata) return {};
58
+ if (typeof metadata === "object") return metadata;
59
+ if (typeof metadata !== "string") return {};
60
+ const trimmed = metadata.trim();
61
+ if (trimmed === "" || trimmed === "null") return {};
62
+ try {
63
+ return JSON.parse(trimmed);
64
+ } catch {
65
+ return {};
66
+ }
67
+ }
68
+ var MemoryStorageClickhouse = class extends storage.MemoryStorage {
69
+ client;
70
+ operations;
71
+ constructor({ client, operations }) {
72
+ super();
73
+ this.client = client;
74
+ this.operations = operations;
75
+ }
76
+ async listMessagesById({ messageIds }) {
77
+ if (messageIds.length === 0) return { messages: [] };
78
+ try {
79
+ const result = await this.client.query({
80
+ query: `
81
+ SELECT
82
+ id,
83
+ content,
84
+ role,
85
+ type,
86
+ toDateTime64(createdAt, 3) as createdAt,
87
+ thread_id AS "threadId",
88
+ "resourceId"
89
+ FROM "${storage.TABLE_MESSAGES}"
90
+ WHERE id IN {messageIds:Array(String)}
91
+ ORDER BY "createdAt" DESC
92
+ `,
93
+ query_params: {
94
+ messageIds
95
+ },
96
+ clickhouse_settings: {
97
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
98
+ date_time_input_format: "best_effort",
99
+ date_time_output_format: "iso",
100
+ use_client_time_zone: 1,
101
+ output_format_json_quote_64bit_integers: 0
102
+ }
103
+ });
104
+ const rows = await result.json();
105
+ const messages = transformRows(rows.data);
106
+ messages.forEach((message) => {
107
+ if (typeof message.content === "string") {
108
+ try {
109
+ message.content = JSON.parse(message.content);
110
+ } catch {
111
+ }
112
+ }
113
+ });
114
+ const list = new agent.MessageList().add(messages, "memory");
115
+ return { messages: list.get.all.db() };
116
+ } catch (error$1) {
117
+ throw new error.MastraError(
118
+ {
119
+ id: "CLICKHOUSE_STORAGE_LIST_MESSAGES_BY_ID_FAILED",
120
+ domain: error.ErrorDomain.STORAGE,
121
+ category: error.ErrorCategory.THIRD_PARTY,
122
+ details: { messageIds: JSON.stringify(messageIds) }
123
+ },
124
+ error$1
125
+ );
126
+ }
127
+ }
128
+ async listMessages(args) {
129
+ const { threadId, resourceId, include, filter, perPage: perPageInput, page = 0, orderBy } = args;
130
+ if (page < 0) {
131
+ throw new error.MastraError(
132
+ {
133
+ id: "STORAGE_CLICKHOUSE_LIST_MESSAGES_INVALID_PAGE",
134
+ domain: error.ErrorDomain.STORAGE,
135
+ category: error.ErrorCategory.USER,
136
+ details: { page }
137
+ },
138
+ new Error("page must be >= 0")
139
+ );
140
+ }
141
+ if (!threadId.trim()) {
142
+ throw new error.MastraError(
143
+ {
144
+ id: "STORAGE_CLICKHOUSE_LIST_MESSAGES_INVALID_THREAD_ID",
145
+ domain: error.ErrorDomain.STORAGE,
146
+ category: error.ErrorCategory.THIRD_PARTY,
147
+ details: { threadId }
148
+ },
149
+ new Error("threadId must be a non-empty string")
150
+ );
151
+ }
152
+ const perPageForQuery = storage.normalizePerPage(perPageInput, 40);
153
+ const { offset, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPageForQuery);
154
+ try {
155
+ let dataQuery = `
156
+ SELECT
157
+ id,
158
+ content,
159
+ role,
160
+ type,
161
+ toDateTime64(createdAt, 3) as createdAt,
162
+ thread_id AS "threadId",
163
+ resourceId
164
+ FROM ${storage.TABLE_MESSAGES}
165
+ WHERE thread_id = {threadId:String}
166
+ `;
167
+ const dataParams = { threadId };
168
+ if (resourceId) {
169
+ dataQuery += ` AND resourceId = {resourceId:String}`;
170
+ dataParams.resourceId = resourceId;
59
171
  }
60
- });
61
- this.ttl = config.ttl;
172
+ if (filter?.dateRange?.start) {
173
+ const startDate = filter.dateRange.start instanceof Date ? filter.dateRange.start.toISOString() : new Date(filter.dateRange.start).toISOString();
174
+ dataQuery += ` AND createdAt >= parseDateTime64BestEffort({fromDate:String}, 3)`;
175
+ dataParams.fromDate = startDate;
176
+ }
177
+ if (filter?.dateRange?.end) {
178
+ const endDate = filter.dateRange.end instanceof Date ? filter.dateRange.end.toISOString() : new Date(filter.dateRange.end).toISOString();
179
+ dataQuery += ` AND createdAt <= parseDateTime64BestEffort({toDate:String}, 3)`;
180
+ dataParams.toDate = endDate;
181
+ }
182
+ const { field, direction } = this.parseOrderBy(orderBy, "ASC");
183
+ dataQuery += ` ORDER BY "${field}" ${direction}`;
184
+ if (perPageForResponse === false) ; else {
185
+ dataQuery += ` LIMIT {limit:Int64} OFFSET {offset:Int64}`;
186
+ dataParams.limit = perPageForQuery;
187
+ dataParams.offset = offset;
188
+ }
189
+ const result = await this.client.query({
190
+ query: dataQuery,
191
+ query_params: dataParams,
192
+ clickhouse_settings: {
193
+ date_time_input_format: "best_effort",
194
+ date_time_output_format: "iso",
195
+ use_client_time_zone: 1,
196
+ output_format_json_quote_64bit_integers: 0
197
+ }
198
+ });
199
+ const rows = await result.json();
200
+ const paginatedMessages = transformRows(rows.data);
201
+ const paginatedCount = paginatedMessages.length;
202
+ let countQuery = `SELECT count() as total FROM ${storage.TABLE_MESSAGES} WHERE thread_id = {threadId:String}`;
203
+ const countParams = { threadId };
204
+ if (resourceId) {
205
+ countQuery += ` AND resourceId = {resourceId:String}`;
206
+ countParams.resourceId = resourceId;
207
+ }
208
+ if (filter?.dateRange?.start) {
209
+ const startDate = filter.dateRange.start instanceof Date ? filter.dateRange.start.toISOString() : new Date(filter.dateRange.start).toISOString();
210
+ countQuery += ` AND createdAt >= parseDateTime64BestEffort({fromDate:String}, 3)`;
211
+ countParams.fromDate = startDate;
212
+ }
213
+ if (filter?.dateRange?.end) {
214
+ const endDate = filter.dateRange.end instanceof Date ? filter.dateRange.end.toISOString() : new Date(filter.dateRange.end).toISOString();
215
+ countQuery += ` AND createdAt <= parseDateTime64BestEffort({toDate:String}, 3)`;
216
+ countParams.toDate = endDate;
217
+ }
218
+ const countResult = await this.client.query({
219
+ query: countQuery,
220
+ query_params: countParams,
221
+ clickhouse_settings: {
222
+ date_time_input_format: "best_effort",
223
+ date_time_output_format: "iso",
224
+ use_client_time_zone: 1,
225
+ output_format_json_quote_64bit_integers: 0
226
+ }
227
+ });
228
+ const countData = await countResult.json();
229
+ const total = countData.data[0].total;
230
+ if (total === 0 && paginatedCount === 0 && (!include || include.length === 0)) {
231
+ return {
232
+ messages: [],
233
+ total: 0,
234
+ page,
235
+ perPage: perPageForResponse,
236
+ hasMore: false
237
+ };
238
+ }
239
+ const messageIds = new Set(paginatedMessages.map((m) => m.id));
240
+ let includeMessages = [];
241
+ if (include && include.length > 0) {
242
+ const unionQueries = [];
243
+ const params = [];
244
+ let paramIdx = 1;
245
+ for (const inc of include) {
246
+ const { id, withPreviousMessages = 0, withNextMessages = 0 } = inc;
247
+ const searchId = inc.threadId || threadId;
248
+ unionQueries.push(`
249
+ SELECT * FROM (
250
+ WITH numbered_messages AS (
251
+ SELECT
252
+ id, content, role, type, "createdAt", thread_id, "resourceId",
253
+ ROW_NUMBER() OVER (ORDER BY "createdAt" ASC) as row_num
254
+ FROM "${storage.TABLE_MESSAGES}"
255
+ WHERE thread_id = {var_thread_id_${paramIdx}:String}
256
+ ),
257
+ target_positions AS (
258
+ SELECT row_num as target_pos
259
+ FROM numbered_messages
260
+ WHERE id = {var_include_id_${paramIdx}:String}
261
+ )
262
+ SELECT DISTINCT m.id, m.content, m.role, m.type, m."createdAt", m.thread_id AS "threadId", m."resourceId"
263
+ FROM numbered_messages m
264
+ CROSS JOIN target_positions t
265
+ WHERE m.row_num BETWEEN (t.target_pos - {var_withPreviousMessages_${paramIdx}:Int64}) AND (t.target_pos + {var_withNextMessages_${paramIdx}:Int64})
266
+ ) AS query_${paramIdx}
267
+ `);
268
+ params.push(
269
+ { [`var_thread_id_${paramIdx}`]: searchId },
270
+ { [`var_include_id_${paramIdx}`]: id },
271
+ { [`var_withPreviousMessages_${paramIdx}`]: withPreviousMessages },
272
+ { [`var_withNextMessages_${paramIdx}`]: withNextMessages }
273
+ );
274
+ paramIdx++;
275
+ }
276
+ const finalQuery = unionQueries.join(" UNION ALL ") + ' ORDER BY "createdAt" ASC';
277
+ const mergedParams = params.reduce((acc, paramObj) => ({ ...acc, ...paramObj }), {});
278
+ const includeResult = await this.client.query({
279
+ query: finalQuery,
280
+ query_params: mergedParams,
281
+ clickhouse_settings: {
282
+ date_time_input_format: "best_effort",
283
+ date_time_output_format: "iso",
284
+ use_client_time_zone: 1,
285
+ output_format_json_quote_64bit_integers: 0
286
+ }
287
+ });
288
+ const includeRows = await includeResult.json();
289
+ includeMessages = transformRows(includeRows.data);
290
+ for (const includeMsg of includeMessages) {
291
+ if (!messageIds.has(includeMsg.id)) {
292
+ paginatedMessages.push(includeMsg);
293
+ messageIds.add(includeMsg.id);
294
+ }
295
+ }
296
+ }
297
+ const list = new agent.MessageList().add(paginatedMessages, "memory");
298
+ let finalMessages = list.get.all.db();
299
+ finalMessages = finalMessages.sort((a, b) => {
300
+ const isDateField = field === "createdAt" || field === "updatedAt";
301
+ const aValue = isDateField ? new Date(a[field]).getTime() : a[field];
302
+ const bValue = isDateField ? new Date(b[field]).getTime() : b[field];
303
+ if (aValue === bValue) {
304
+ return a.id.localeCompare(b.id);
305
+ }
306
+ if (typeof aValue === "number" && typeof bValue === "number") {
307
+ return direction === "ASC" ? aValue - bValue : bValue - aValue;
308
+ }
309
+ return direction === "ASC" ? String(aValue).localeCompare(String(bValue)) : String(bValue).localeCompare(String(aValue));
310
+ });
311
+ const returnedThreadMessageIds = new Set(finalMessages.filter((m) => m.threadId === threadId).map((m) => m.id));
312
+ const allThreadMessagesReturned = returnedThreadMessageIds.size >= total;
313
+ const hasMore = perPageForResponse === false ? false : allThreadMessagesReturned ? false : offset + paginatedCount < total;
314
+ return {
315
+ messages: finalMessages,
316
+ total,
317
+ page,
318
+ perPage: perPageForResponse,
319
+ hasMore
320
+ };
321
+ } catch (error$1) {
322
+ const mastraError = new error.MastraError(
323
+ {
324
+ id: "STORAGE_CLICKHOUSE_STORE_LIST_MESSAGES_FAILED",
325
+ domain: error.ErrorDomain.STORAGE,
326
+ category: error.ErrorCategory.THIRD_PARTY,
327
+ details: {
328
+ threadId,
329
+ resourceId: resourceId ?? ""
330
+ }
331
+ },
332
+ error$1
333
+ );
334
+ this.logger?.error?.(mastraError.toString());
335
+ this.logger?.trackException?.(mastraError);
336
+ return {
337
+ messages: [],
338
+ total: 0,
339
+ page,
340
+ perPage: perPageForResponse,
341
+ hasMore: false
342
+ };
343
+ }
344
+ }
345
+ async saveMessages(args) {
346
+ const { messages } = args;
347
+ if (messages.length === 0) return { messages };
348
+ for (const message of messages) {
349
+ const resourceId = message.resourceId;
350
+ if (!resourceId) {
351
+ throw new Error("Resource ID is required");
352
+ }
353
+ if (!message.threadId) {
354
+ throw new Error("Thread ID is required");
355
+ }
356
+ const thread = await this.getThreadById({ threadId: message.threadId });
357
+ if (!thread) {
358
+ throw new Error(`Thread ${message.threadId} not found`);
359
+ }
360
+ }
361
+ const threadIdSet = /* @__PURE__ */ new Map();
362
+ await Promise.all(
363
+ messages.map(async (m) => {
364
+ const resourceId = m.resourceId;
365
+ if (!resourceId) {
366
+ throw new Error("Resource ID is required");
367
+ }
368
+ if (!m.threadId) {
369
+ throw new Error("Thread ID is required");
370
+ }
371
+ const thread = await this.getThreadById({ threadId: m.threadId });
372
+ if (!thread) {
373
+ throw new Error(`Thread ${m.threadId} not found`);
374
+ }
375
+ threadIdSet.set(m.threadId, thread);
376
+ })
377
+ );
378
+ try {
379
+ const existingResult = await this.client.query({
380
+ query: `SELECT id, thread_id FROM ${storage.TABLE_MESSAGES} WHERE id IN ({ids:Array(String)})`,
381
+ query_params: {
382
+ ids: messages.map((m) => m.id)
383
+ },
384
+ clickhouse_settings: {
385
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
386
+ date_time_input_format: "best_effort",
387
+ date_time_output_format: "iso",
388
+ use_client_time_zone: 1,
389
+ output_format_json_quote_64bit_integers: 0
390
+ },
391
+ format: "JSONEachRow"
392
+ });
393
+ const existingRows = await existingResult.json();
394
+ const existingSet = new Set(existingRows.map((row) => `${row.id}::${row.thread_id}`));
395
+ const toInsert = messages.filter((m) => !existingSet.has(`${m.id}::${m.threadId}`));
396
+ const toUpdate = messages.filter((m) => existingSet.has(`${m.id}::${m.threadId}`));
397
+ const toMove = messages.filter((m) => {
398
+ const existingRow = existingRows.find((row) => row.id === m.id);
399
+ return existingRow && existingRow.thread_id !== m.threadId;
400
+ });
401
+ const deletePromises = toMove.map((message) => {
402
+ const existingRow = existingRows.find((row) => row.id === message.id);
403
+ if (!existingRow) return Promise.resolve();
404
+ return this.client.command({
405
+ query: `DELETE FROM ${storage.TABLE_MESSAGES} WHERE id = {var_id:String} AND thread_id = {var_old_thread_id:String}`,
406
+ query_params: {
407
+ var_id: message.id,
408
+ var_old_thread_id: existingRow.thread_id
409
+ },
410
+ clickhouse_settings: {
411
+ date_time_input_format: "best_effort",
412
+ use_client_time_zone: 1,
413
+ output_format_json_quote_64bit_integers: 0
414
+ }
415
+ });
416
+ });
417
+ const updatePromises = toUpdate.map(
418
+ (message) => this.client.command({
419
+ query: `
420
+ ALTER TABLE ${storage.TABLE_MESSAGES}
421
+ UPDATE content = {var_content:String}, role = {var_role:String}, type = {var_type:String}, resourceId = {var_resourceId:String}
422
+ WHERE id = {var_id:String} AND thread_id = {var_thread_id:String}
423
+ `,
424
+ query_params: {
425
+ var_content: typeof message.content === "string" ? message.content : JSON.stringify(message.content),
426
+ var_role: message.role,
427
+ var_type: message.type || "v2",
428
+ var_resourceId: message.resourceId,
429
+ var_id: message.id,
430
+ var_thread_id: message.threadId
431
+ },
432
+ clickhouse_settings: {
433
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
434
+ date_time_input_format: "best_effort",
435
+ use_client_time_zone: 1,
436
+ output_format_json_quote_64bit_integers: 0
437
+ }
438
+ })
439
+ );
440
+ await Promise.all([
441
+ // Insert new messages (including moved messages)
442
+ this.client.insert({
443
+ table: storage.TABLE_MESSAGES,
444
+ format: "JSONEachRow",
445
+ values: toInsert.map((message) => ({
446
+ id: message.id,
447
+ thread_id: message.threadId,
448
+ resourceId: message.resourceId,
449
+ content: typeof message.content === "string" ? message.content : JSON.stringify(message.content),
450
+ createdAt: message.createdAt.toISOString(),
451
+ role: message.role,
452
+ type: message.type || "v2"
453
+ })),
454
+ clickhouse_settings: {
455
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
456
+ date_time_input_format: "best_effort",
457
+ use_client_time_zone: 1,
458
+ output_format_json_quote_64bit_integers: 0
459
+ }
460
+ }),
461
+ ...updatePromises,
462
+ ...deletePromises,
463
+ // Update thread's updatedAt timestamp
464
+ this.client.insert({
465
+ table: storage.TABLE_THREADS,
466
+ format: "JSONEachRow",
467
+ values: Array.from(threadIdSet.values()).map((thread) => ({
468
+ id: thread.id,
469
+ resourceId: thread.resourceId,
470
+ title: thread.title,
471
+ metadata: serializeMetadata(thread.metadata),
472
+ createdAt: thread.createdAt,
473
+ updatedAt: (/* @__PURE__ */ new Date()).toISOString()
474
+ })),
475
+ clickhouse_settings: {
476
+ date_time_input_format: "best_effort",
477
+ use_client_time_zone: 1,
478
+ output_format_json_quote_64bit_integers: 0
479
+ }
480
+ })
481
+ ]);
482
+ const list = new agent.MessageList().add(messages, "memory");
483
+ return { messages: list.get.all.db() };
484
+ } catch (error$1) {
485
+ throw new error.MastraError(
486
+ {
487
+ id: "CLICKHOUSE_STORAGE_SAVE_MESSAGES_FAILED",
488
+ domain: error.ErrorDomain.STORAGE,
489
+ category: error.ErrorCategory.THIRD_PARTY
490
+ },
491
+ error$1
492
+ );
493
+ }
494
+ }
495
+ async getThreadById({ threadId }) {
496
+ try {
497
+ const result = await this.client.query({
498
+ query: `SELECT
499
+ id,
500
+ "resourceId",
501
+ title,
502
+ metadata,
503
+ toDateTime64(createdAt, 3) as createdAt,
504
+ toDateTime64(updatedAt, 3) as updatedAt
505
+ FROM "${storage.TABLE_THREADS}"
506
+ WHERE id = {var_id:String}
507
+ ORDER BY updatedAt DESC
508
+ LIMIT 1`,
509
+ query_params: { var_id: threadId },
510
+ clickhouse_settings: {
511
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
512
+ date_time_input_format: "best_effort",
513
+ date_time_output_format: "iso",
514
+ use_client_time_zone: 1,
515
+ output_format_json_quote_64bit_integers: 0
516
+ }
517
+ });
518
+ const rows = await result.json();
519
+ const thread = transformRow(rows.data[0]);
520
+ if (!thread) {
521
+ return null;
522
+ }
523
+ return {
524
+ ...thread,
525
+ metadata: parseMetadata(thread.metadata),
526
+ createdAt: thread.createdAt,
527
+ updatedAt: thread.updatedAt
528
+ };
529
+ } catch (error$1) {
530
+ throw new error.MastraError(
531
+ {
532
+ id: "CLICKHOUSE_STORAGE_GET_THREAD_BY_ID_FAILED",
533
+ domain: error.ErrorDomain.STORAGE,
534
+ category: error.ErrorCategory.THIRD_PARTY,
535
+ details: { threadId }
536
+ },
537
+ error$1
538
+ );
539
+ }
540
+ }
541
+ async saveThread({ thread }) {
542
+ try {
543
+ await this.client.insert({
544
+ table: storage.TABLE_THREADS,
545
+ values: [
546
+ {
547
+ ...thread,
548
+ metadata: serializeMetadata(thread.metadata),
549
+ createdAt: thread.createdAt.toISOString(),
550
+ updatedAt: thread.updatedAt.toISOString()
551
+ }
552
+ ],
553
+ format: "JSONEachRow",
554
+ clickhouse_settings: {
555
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
556
+ date_time_input_format: "best_effort",
557
+ use_client_time_zone: 1,
558
+ output_format_json_quote_64bit_integers: 0
559
+ }
560
+ });
561
+ return thread;
562
+ } catch (error$1) {
563
+ throw new error.MastraError(
564
+ {
565
+ id: "CLICKHOUSE_STORAGE_SAVE_THREAD_FAILED",
566
+ domain: error.ErrorDomain.STORAGE,
567
+ category: error.ErrorCategory.THIRD_PARTY,
568
+ details: { threadId: thread.id }
569
+ },
570
+ error$1
571
+ );
572
+ }
573
+ }
574
+ async updateThread({
575
+ id,
576
+ title,
577
+ metadata
578
+ }) {
579
+ try {
580
+ const existingThread = await this.getThreadById({ threadId: id });
581
+ if (!existingThread) {
582
+ throw new Error(`Thread ${id} not found`);
583
+ }
584
+ const mergedMetadata = {
585
+ ...existingThread.metadata,
586
+ ...metadata
587
+ };
588
+ const updatedThread = {
589
+ ...existingThread,
590
+ title,
591
+ metadata: mergedMetadata,
592
+ updatedAt: /* @__PURE__ */ new Date()
593
+ };
594
+ await this.client.insert({
595
+ table: storage.TABLE_THREADS,
596
+ format: "JSONEachRow",
597
+ values: [
598
+ {
599
+ id: updatedThread.id,
600
+ resourceId: updatedThread.resourceId,
601
+ title: updatedThread.title,
602
+ metadata: serializeMetadata(updatedThread.metadata),
603
+ createdAt: updatedThread.createdAt,
604
+ updatedAt: updatedThread.updatedAt.toISOString()
605
+ }
606
+ ],
607
+ clickhouse_settings: {
608
+ date_time_input_format: "best_effort",
609
+ use_client_time_zone: 1,
610
+ output_format_json_quote_64bit_integers: 0
611
+ }
612
+ });
613
+ return updatedThread;
614
+ } catch (error$1) {
615
+ throw new error.MastraError(
616
+ {
617
+ id: "CLICKHOUSE_STORAGE_UPDATE_THREAD_FAILED",
618
+ domain: error.ErrorDomain.STORAGE,
619
+ category: error.ErrorCategory.THIRD_PARTY,
620
+ details: { threadId: id, title }
621
+ },
622
+ error$1
623
+ );
624
+ }
625
+ }
626
+ async deleteThread({ threadId }) {
627
+ try {
628
+ await this.client.command({
629
+ query: `DELETE FROM "${storage.TABLE_MESSAGES}" WHERE thread_id = {var_thread_id:String};`,
630
+ query_params: { var_thread_id: threadId },
631
+ clickhouse_settings: {
632
+ output_format_json_quote_64bit_integers: 0
633
+ }
634
+ });
635
+ await this.client.command({
636
+ query: `DELETE FROM "${storage.TABLE_THREADS}" WHERE id = {var_id:String};`,
637
+ query_params: { var_id: threadId },
638
+ clickhouse_settings: {
639
+ output_format_json_quote_64bit_integers: 0
640
+ }
641
+ });
642
+ } catch (error$1) {
643
+ throw new error.MastraError(
644
+ {
645
+ id: "CLICKHOUSE_STORAGE_DELETE_THREAD_FAILED",
646
+ domain: error.ErrorDomain.STORAGE,
647
+ category: error.ErrorCategory.THIRD_PARTY,
648
+ details: { threadId }
649
+ },
650
+ error$1
651
+ );
652
+ }
62
653
  }
63
- transformEvalRow(row) {
64
- row = transformRow(row);
65
- const resultValue = JSON.parse(row.result);
66
- const testInfoValue = row.test_info ? JSON.parse(row.test_info) : void 0;
67
- if (!resultValue || typeof resultValue !== "object" || !("score" in resultValue)) {
68
- throw new Error(`Invalid MetricResult format: ${JSON.stringify(resultValue)}`);
654
+ async listThreadsByResourceId(args) {
655
+ const { resourceId, page = 0, perPage: perPageInput, orderBy } = args;
656
+ const perPage = storage.normalizePerPage(perPageInput, 100);
657
+ if (page < 0) {
658
+ throw new error.MastraError(
659
+ {
660
+ id: "STORAGE_CLICKHOUSE_LIST_THREADS_BY_RESOURCE_ID_INVALID_PAGE",
661
+ domain: error.ErrorDomain.STORAGE,
662
+ category: error.ErrorCategory.USER,
663
+ details: { page }
664
+ },
665
+ new Error("page must be >= 0")
666
+ );
667
+ }
668
+ const { offset, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
669
+ const { field, direction } = this.parseOrderBy(orderBy);
670
+ try {
671
+ const countResult = await this.client.query({
672
+ query: `SELECT count(DISTINCT id) as total FROM ${storage.TABLE_THREADS} WHERE resourceId = {resourceId:String}`,
673
+ query_params: { resourceId },
674
+ clickhouse_settings: {
675
+ date_time_input_format: "best_effort",
676
+ date_time_output_format: "iso",
677
+ use_client_time_zone: 1,
678
+ output_format_json_quote_64bit_integers: 0
679
+ }
680
+ });
681
+ const countData = await countResult.json();
682
+ const total = countData.data[0].total;
683
+ if (total === 0) {
684
+ return {
685
+ threads: [],
686
+ total: 0,
687
+ page,
688
+ perPage: perPageForResponse,
689
+ hasMore: false
690
+ };
691
+ }
692
+ const dataResult = await this.client.query({
693
+ query: `
694
+ WITH ranked_threads AS (
695
+ SELECT
696
+ id,
697
+ resourceId,
698
+ title,
699
+ metadata,
700
+ toDateTime64(createdAt, 3) as createdAt,
701
+ toDateTime64(updatedAt, 3) as updatedAt,
702
+ ROW_NUMBER() OVER (PARTITION BY id ORDER BY updatedAt DESC) as row_num
703
+ FROM ${storage.TABLE_THREADS}
704
+ WHERE resourceId = {resourceId:String}
705
+ )
706
+ SELECT
707
+ id,
708
+ resourceId,
709
+ title,
710
+ metadata,
711
+ createdAt,
712
+ updatedAt
713
+ FROM ranked_threads
714
+ WHERE row_num = 1
715
+ ORDER BY "${field}" ${direction === "DESC" ? "DESC" : "ASC"}
716
+ LIMIT {perPage:Int64} OFFSET {offset:Int64}
717
+ `,
718
+ query_params: {
719
+ resourceId,
720
+ perPage,
721
+ offset
722
+ },
723
+ clickhouse_settings: {
724
+ date_time_input_format: "best_effort",
725
+ date_time_output_format: "iso",
726
+ use_client_time_zone: 1,
727
+ output_format_json_quote_64bit_integers: 0
728
+ }
729
+ });
730
+ const rows = await dataResult.json();
731
+ const threads = transformRows(rows.data).map((thread) => ({
732
+ ...thread,
733
+ metadata: parseMetadata(thread.metadata)
734
+ }));
735
+ return {
736
+ threads,
737
+ total,
738
+ page,
739
+ perPage: perPageForResponse,
740
+ hasMore: offset + perPage < total
741
+ };
742
+ } catch (error$1) {
743
+ throw new error.MastraError(
744
+ {
745
+ id: "CLICKHOUSE_STORAGE_LIST_THREADS_BY_RESOURCE_ID_FAILED",
746
+ domain: error.ErrorDomain.STORAGE,
747
+ category: error.ErrorCategory.THIRD_PARTY,
748
+ details: { resourceId, page }
749
+ },
750
+ error$1
751
+ );
69
752
  }
70
- return {
71
- input: row.input,
72
- output: row.output,
73
- result: resultValue,
74
- agentName: row.agent_name,
75
- metricName: row.metric_name,
76
- instructions: row.instructions,
77
- testInfo: testInfoValue,
78
- globalRunId: row.global_run_id,
79
- runId: row.run_id,
80
- createdAt: row.created_at
81
- };
82
753
  }
83
- async getEvalsByAgentName(agentName, type) {
754
+ async updateMessages(args) {
755
+ const { messages } = args;
756
+ if (messages.length === 0) {
757
+ return [];
758
+ }
84
759
  try {
85
- const baseQuery = `SELECT *, toDateTime64(createdAt, 3) as createdAt FROM ${storage.TABLE_EVALS} WHERE agent_name = {var_agent_name:String}`;
86
- const typeCondition = type === "test" ? " AND test_info IS NOT NULL AND JSONExtractString(test_info, 'testPath') IS NOT NULL" : type === "live" ? " AND (test_info IS NULL OR JSONExtractString(test_info, 'testPath') IS NULL)" : "";
87
- const result = await this.db.query({
88
- query: `${baseQuery}${typeCondition} ORDER BY createdAt DESC`,
89
- query_params: { var_agent_name: agentName },
760
+ const messageIds = messages.map((m) => m.id);
761
+ const existingResult = await this.client.query({
762
+ query: `SELECT id, content, role, type, "createdAt", thread_id AS "threadId", "resourceId" FROM ${storage.TABLE_MESSAGES} WHERE id IN (${messageIds.map((_, i) => `{id_${i}:String}`).join(",")})`,
763
+ query_params: messageIds.reduce((acc, m, i) => ({ ...acc, [`id_${i}`]: m }), {}),
90
764
  clickhouse_settings: {
91
765
  date_time_input_format: "best_effort",
92
766
  date_time_output_format: "iso",
@@ -94,131 +768,437 @@ var ClickhouseStore = class extends storage.MastraStorage {
94
768
  output_format_json_quote_64bit_integers: 0
95
769
  }
96
770
  });
97
- if (!result) {
771
+ const existingRows = await existingResult.json();
772
+ const existingMessages = transformRows(existingRows.data);
773
+ if (existingMessages.length === 0) {
98
774
  return [];
99
775
  }
776
+ const parsedExistingMessages = existingMessages.map((msg) => {
777
+ if (typeof msg.content === "string") {
778
+ try {
779
+ msg.content = JSON.parse(msg.content);
780
+ } catch {
781
+ }
782
+ }
783
+ return msg;
784
+ });
785
+ const threadIdsToUpdate = /* @__PURE__ */ new Set();
786
+ const updatePromises = [];
787
+ for (const existingMessage of parsedExistingMessages) {
788
+ const updatePayload = messages.find((m) => m.id === existingMessage.id);
789
+ if (!updatePayload) continue;
790
+ const { id, ...fieldsToUpdate } = updatePayload;
791
+ if (Object.keys(fieldsToUpdate).length === 0) continue;
792
+ threadIdsToUpdate.add(existingMessage.threadId);
793
+ if (updatePayload.threadId && updatePayload.threadId !== existingMessage.threadId) {
794
+ threadIdsToUpdate.add(updatePayload.threadId);
795
+ }
796
+ const setClauses = [];
797
+ const values = {};
798
+ let paramIdx = 1;
799
+ let newContent = null;
800
+ const updatableFields = { ...fieldsToUpdate };
801
+ if (updatableFields.content) {
802
+ const existingContent = existingMessage.content || {};
803
+ const existingMetadata = existingContent.metadata || {};
804
+ const updateMetadata = updatableFields.content.metadata || {};
805
+ newContent = {
806
+ ...existingContent,
807
+ ...updatableFields.content,
808
+ // Deep merge metadata
809
+ metadata: {
810
+ ...existingMetadata,
811
+ ...updateMetadata
812
+ }
813
+ };
814
+ setClauses.push(`content = {var_content_${paramIdx}:String}`);
815
+ values[`var_content_${paramIdx}`] = JSON.stringify(newContent);
816
+ paramIdx++;
817
+ delete updatableFields.content;
818
+ }
819
+ for (const key in updatableFields) {
820
+ if (Object.prototype.hasOwnProperty.call(updatableFields, key)) {
821
+ const dbColumn = key === "threadId" ? "thread_id" : key;
822
+ setClauses.push(`"${dbColumn}" = {var_${key}_${paramIdx}:String}`);
823
+ values[`var_${key}_${paramIdx}`] = updatableFields[key];
824
+ paramIdx++;
825
+ }
826
+ }
827
+ if (setClauses.length > 0) {
828
+ values[`var_id_${paramIdx}`] = id;
829
+ const updateQuery = `
830
+ ALTER TABLE ${storage.TABLE_MESSAGES}
831
+ UPDATE ${setClauses.join(", ")}
832
+ WHERE id = {var_id_${paramIdx}:String}
833
+ `;
834
+ console.info("Updating message:", id, "with query:", updateQuery, "values:", values);
835
+ updatePromises.push(
836
+ this.client.command({
837
+ query: updateQuery,
838
+ query_params: values,
839
+ clickhouse_settings: {
840
+ date_time_input_format: "best_effort",
841
+ use_client_time_zone: 1,
842
+ output_format_json_quote_64bit_integers: 0
843
+ }
844
+ })
845
+ );
846
+ }
847
+ }
848
+ if (updatePromises.length > 0) {
849
+ await Promise.all(updatePromises);
850
+ }
851
+ await this.client.command({
852
+ query: `OPTIMIZE TABLE ${storage.TABLE_MESSAGES} FINAL`,
853
+ clickhouse_settings: {
854
+ date_time_input_format: "best_effort",
855
+ use_client_time_zone: 1,
856
+ output_format_json_quote_64bit_integers: 0
857
+ }
858
+ });
859
+ for (const existingMessage of parsedExistingMessages) {
860
+ const updatePayload = messages.find((m) => m.id === existingMessage.id);
861
+ if (!updatePayload) continue;
862
+ const { id, ...fieldsToUpdate } = updatePayload;
863
+ if (Object.keys(fieldsToUpdate).length === 0) continue;
864
+ const verifyResult = await this.client.query({
865
+ query: `SELECT id, content, role, type, "createdAt", thread_id AS "threadId", "resourceId" FROM ${storage.TABLE_MESSAGES} WHERE id = {messageId:String}`,
866
+ query_params: { messageId: id },
867
+ clickhouse_settings: {
868
+ date_time_input_format: "best_effort",
869
+ date_time_output_format: "iso",
870
+ use_client_time_zone: 1,
871
+ output_format_json_quote_64bit_integers: 0
872
+ }
873
+ });
874
+ const verifyRows = await verifyResult.json();
875
+ if (verifyRows.data.length > 0) {
876
+ const updatedMessage = transformRows(verifyRows.data)[0];
877
+ if (updatedMessage) {
878
+ let needsRetry = false;
879
+ for (const [key, value] of Object.entries(fieldsToUpdate)) {
880
+ if (key === "content") {
881
+ const expectedContent = typeof value === "string" ? value : JSON.stringify(value);
882
+ const actualContent = typeof updatedMessage.content === "string" ? updatedMessage.content : JSON.stringify(updatedMessage.content);
883
+ if (actualContent !== expectedContent) {
884
+ needsRetry = true;
885
+ break;
886
+ }
887
+ } else if (updatedMessage[key] !== value) {
888
+ needsRetry = true;
889
+ break;
890
+ }
891
+ }
892
+ if (needsRetry) {
893
+ console.info("Update not applied correctly, retrying with DELETE + INSERT for message:", id);
894
+ await this.client.command({
895
+ query: `DELETE FROM ${storage.TABLE_MESSAGES} WHERE id = {messageId:String}`,
896
+ query_params: { messageId: id },
897
+ clickhouse_settings: {
898
+ date_time_input_format: "best_effort",
899
+ use_client_time_zone: 1,
900
+ output_format_json_quote_64bit_integers: 0
901
+ }
902
+ });
903
+ let updatedContent = existingMessage.content || {};
904
+ if (fieldsToUpdate.content) {
905
+ const existingContent = existingMessage.content || {};
906
+ const existingMetadata = existingContent.metadata || {};
907
+ const updateMetadata = fieldsToUpdate.content.metadata || {};
908
+ updatedContent = {
909
+ ...existingContent,
910
+ ...fieldsToUpdate.content,
911
+ metadata: {
912
+ ...existingMetadata,
913
+ ...updateMetadata
914
+ }
915
+ };
916
+ }
917
+ const updatedMessageData = {
918
+ ...existingMessage,
919
+ ...fieldsToUpdate,
920
+ content: updatedContent
921
+ };
922
+ await this.client.insert({
923
+ table: storage.TABLE_MESSAGES,
924
+ format: "JSONEachRow",
925
+ values: [
926
+ {
927
+ id: updatedMessageData.id,
928
+ thread_id: updatedMessageData.threadId,
929
+ resourceId: updatedMessageData.resourceId,
930
+ content: typeof updatedMessageData.content === "string" ? updatedMessageData.content : JSON.stringify(updatedMessageData.content),
931
+ createdAt: updatedMessageData.createdAt.toISOString(),
932
+ role: updatedMessageData.role,
933
+ type: updatedMessageData.type || "v2"
934
+ }
935
+ ],
936
+ clickhouse_settings: {
937
+ date_time_input_format: "best_effort",
938
+ use_client_time_zone: 1,
939
+ output_format_json_quote_64bit_integers: 0
940
+ }
941
+ });
942
+ }
943
+ }
944
+ }
945
+ }
946
+ if (threadIdsToUpdate.size > 0) {
947
+ await new Promise((resolve) => setTimeout(resolve, 10));
948
+ const now = (/* @__PURE__ */ new Date()).toISOString().replace("Z", "");
949
+ const threadUpdatePromises = Array.from(threadIdsToUpdate).map(async (threadId) => {
950
+ const threadResult = await this.client.query({
951
+ query: `SELECT id, resourceId, title, metadata, createdAt FROM ${storage.TABLE_THREADS} WHERE id = {threadId:String} ORDER BY updatedAt DESC LIMIT 1`,
952
+ query_params: { threadId },
953
+ clickhouse_settings: {
954
+ date_time_input_format: "best_effort",
955
+ date_time_output_format: "iso",
956
+ use_client_time_zone: 1,
957
+ output_format_json_quote_64bit_integers: 0
958
+ }
959
+ });
960
+ const threadRows = await threadResult.json();
961
+ if (threadRows.data.length > 0) {
962
+ const existingThread = threadRows.data[0];
963
+ await this.client.command({
964
+ query: `DELETE FROM ${storage.TABLE_THREADS} WHERE id = {threadId:String}`,
965
+ query_params: { threadId },
966
+ clickhouse_settings: {
967
+ date_time_input_format: "best_effort",
968
+ use_client_time_zone: 1,
969
+ output_format_json_quote_64bit_integers: 0
970
+ }
971
+ });
972
+ await this.client.insert({
973
+ table: storage.TABLE_THREADS,
974
+ format: "JSONEachRow",
975
+ values: [
976
+ {
977
+ id: existingThread.id,
978
+ resourceId: existingThread.resourceId,
979
+ title: existingThread.title,
980
+ metadata: typeof existingThread.metadata === "string" ? existingThread.metadata : serializeMetadata(existingThread.metadata),
981
+ createdAt: existingThread.createdAt,
982
+ updatedAt: now
983
+ }
984
+ ],
985
+ clickhouse_settings: {
986
+ date_time_input_format: "best_effort",
987
+ use_client_time_zone: 1,
988
+ output_format_json_quote_64bit_integers: 0
989
+ }
990
+ });
991
+ }
992
+ });
993
+ await Promise.all(threadUpdatePromises);
994
+ }
995
+ const updatedMessages = [];
996
+ for (const messageId of messageIds) {
997
+ const updatedResult = await this.client.query({
998
+ query: `SELECT id, content, role, type, "createdAt", thread_id AS "threadId", "resourceId" FROM ${storage.TABLE_MESSAGES} WHERE id = {messageId:String}`,
999
+ query_params: { messageId },
1000
+ clickhouse_settings: {
1001
+ date_time_input_format: "best_effort",
1002
+ date_time_output_format: "iso",
1003
+ use_client_time_zone: 1,
1004
+ output_format_json_quote_64bit_integers: 0
1005
+ }
1006
+ });
1007
+ const updatedRows = await updatedResult.json();
1008
+ if (updatedRows.data.length > 0) {
1009
+ const message = transformRows(updatedRows.data)[0];
1010
+ if (message) {
1011
+ updatedMessages.push(message);
1012
+ }
1013
+ }
1014
+ }
1015
+ return updatedMessages.map((message) => {
1016
+ if (typeof message.content === "string") {
1017
+ try {
1018
+ message.content = JSON.parse(message.content);
1019
+ } catch {
1020
+ }
1021
+ }
1022
+ return message;
1023
+ });
1024
+ } catch (error$1) {
1025
+ throw new error.MastraError(
1026
+ {
1027
+ id: "CLICKHOUSE_STORAGE_UPDATE_MESSAGES_FAILED",
1028
+ domain: error.ErrorDomain.STORAGE,
1029
+ category: error.ErrorCategory.THIRD_PARTY,
1030
+ details: { messageIds: messages.map((m) => m.id).join(",") }
1031
+ },
1032
+ error$1
1033
+ );
1034
+ }
1035
+ }
1036
+ async getResourceById({ resourceId }) {
1037
+ try {
1038
+ const result = await this.client.query({
1039
+ query: `SELECT id, workingMemory, metadata, createdAt, updatedAt FROM ${storage.TABLE_RESOURCES} WHERE id = {resourceId:String} ORDER BY updatedAt DESC LIMIT 1`,
1040
+ query_params: { resourceId },
1041
+ clickhouse_settings: {
1042
+ date_time_input_format: "best_effort",
1043
+ date_time_output_format: "iso",
1044
+ use_client_time_zone: 1,
1045
+ output_format_json_quote_64bit_integers: 0
1046
+ }
1047
+ });
100
1048
  const rows = await result.json();
101
- return rows.data.map((row) => this.transformEvalRow(row));
102
- } catch (error) {
103
- if (error instanceof Error && error.message.includes("no such table")) {
104
- return [];
1049
+ if (rows.data.length === 0) {
1050
+ return null;
105
1051
  }
106
- this.logger.error("Failed to get evals for the specified agent: " + error?.message);
107
- throw error;
1052
+ const resource = rows.data[0];
1053
+ return {
1054
+ id: resource.id,
1055
+ workingMemory: resource.workingMemory && typeof resource.workingMemory === "object" ? JSON.stringify(resource.workingMemory) : resource.workingMemory,
1056
+ metadata: resource.metadata && typeof resource.metadata === "string" ? JSON.parse(resource.metadata) : resource.metadata,
1057
+ createdAt: new Date(resource.createdAt),
1058
+ updatedAt: new Date(resource.updatedAt)
1059
+ };
1060
+ } catch (error$1) {
1061
+ throw new error.MastraError(
1062
+ {
1063
+ id: "CLICKHOUSE_STORAGE_GET_RESOURCE_BY_ID_FAILED",
1064
+ domain: error.ErrorDomain.STORAGE,
1065
+ category: error.ErrorCategory.THIRD_PARTY,
1066
+ details: { resourceId }
1067
+ },
1068
+ error$1
1069
+ );
108
1070
  }
109
1071
  }
110
- async batchInsert({ tableName, records }) {
1072
+ async saveResource({ resource }) {
111
1073
  try {
112
- await this.db.insert({
113
- table: tableName,
114
- values: records.map((record) => ({
115
- ...Object.fromEntries(
116
- Object.entries(record).map(([key, value]) => [
117
- key,
118
- storage.TABLE_SCHEMAS[tableName]?.[key]?.type === "timestamp" ? new Date(value).toISOString() : value
119
- ])
120
- )
121
- })),
1074
+ await this.client.insert({
1075
+ table: storage.TABLE_RESOURCES,
122
1076
  format: "JSONEachRow",
1077
+ values: [
1078
+ {
1079
+ id: resource.id,
1080
+ workingMemory: resource.workingMemory,
1081
+ metadata: JSON.stringify(resource.metadata),
1082
+ createdAt: resource.createdAt.toISOString(),
1083
+ updatedAt: resource.updatedAt.toISOString()
1084
+ }
1085
+ ],
123
1086
  clickhouse_settings: {
124
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
125
1087
  date_time_input_format: "best_effort",
126
1088
  use_client_time_zone: 1,
127
1089
  output_format_json_quote_64bit_integers: 0
128
1090
  }
129
1091
  });
130
- } catch (error) {
131
- console.error(`Error inserting into ${tableName}:`, error);
132
- throw error;
1092
+ return resource;
1093
+ } catch (error$1) {
1094
+ throw new error.MastraError(
1095
+ {
1096
+ id: "CLICKHOUSE_STORAGE_SAVE_RESOURCE_FAILED",
1097
+ domain: error.ErrorDomain.STORAGE,
1098
+ category: error.ErrorCategory.THIRD_PARTY,
1099
+ details: { resourceId: resource.id }
1100
+ },
1101
+ error$1
1102
+ );
133
1103
  }
134
1104
  }
135
- async getTraces({
136
- name,
137
- scope,
138
- page,
139
- perPage,
140
- attributes,
141
- filters,
142
- fromDate,
143
- toDate
1105
+ async updateResource({
1106
+ resourceId,
1107
+ workingMemory,
1108
+ metadata
144
1109
  }) {
145
- const limit = perPage;
146
- const offset = page * perPage;
147
- const args = {};
148
- const conditions = [];
149
- if (name) {
150
- conditions.push(`name LIKE CONCAT({var_name:String}, '%')`);
151
- args.var_name = name;
152
- }
153
- if (scope) {
154
- conditions.push(`scope = {var_scope:String}`);
155
- args.var_scope = scope;
156
- }
157
- if (attributes) {
158
- Object.entries(attributes).forEach(([key, value]) => {
159
- conditions.push(`JSONExtractString(attributes, '${key}') = {var_attr_${key}:String}`);
160
- args[`var_attr_${key}`] = value;
161
- });
162
- }
163
- if (filters) {
164
- Object.entries(filters).forEach(([key, value]) => {
165
- conditions.push(
166
- `${key} = {var_col_${key}:${COLUMN_TYPES[storage.TABLE_SCHEMAS.mastra_traces?.[key]?.type ?? "text"]}}`
167
- );
168
- args[`var_col_${key}`] = value;
169
- });
170
- }
171
- if (fromDate) {
172
- conditions.push(`createdAt >= {var_from_date:DateTime64(3)}`);
173
- args.var_from_date = fromDate.getTime() / 1e3;
174
- }
175
- if (toDate) {
176
- conditions.push(`createdAt <= {var_to_date:DateTime64(3)}`);
177
- args.var_to_date = toDate.getTime() / 1e3;
178
- }
179
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
180
- const result = await this.db.query({
181
- query: `SELECT *, toDateTime64(createdAt, 3) as createdAt FROM ${storage.TABLE_TRACES} ${whereClause} ORDER BY "createdAt" DESC LIMIT ${limit} OFFSET ${offset}`,
182
- query_params: args,
183
- clickhouse_settings: {
184
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
185
- date_time_input_format: "best_effort",
186
- date_time_output_format: "iso",
187
- use_client_time_zone: 1,
188
- output_format_json_quote_64bit_integers: 0
1110
+ try {
1111
+ const existingResource = await this.getResourceById({ resourceId });
1112
+ if (!existingResource) {
1113
+ const newResource = {
1114
+ id: resourceId,
1115
+ workingMemory,
1116
+ metadata: metadata || {},
1117
+ createdAt: /* @__PURE__ */ new Date(),
1118
+ updatedAt: /* @__PURE__ */ new Date()
1119
+ };
1120
+ return this.saveResource({ resource: newResource });
189
1121
  }
190
- });
191
- if (!result) {
192
- return [];
1122
+ const updatedResource = {
1123
+ ...existingResource,
1124
+ workingMemory: workingMemory !== void 0 ? workingMemory : existingResource.workingMemory,
1125
+ metadata: {
1126
+ ...existingResource.metadata,
1127
+ ...metadata
1128
+ },
1129
+ updatedAt: /* @__PURE__ */ new Date()
1130
+ };
1131
+ const updateQuery = `
1132
+ ALTER TABLE ${storage.TABLE_RESOURCES}
1133
+ UPDATE workingMemory = {workingMemory:String}, metadata = {metadata:String}, updatedAt = {updatedAt:String}
1134
+ WHERE id = {resourceId:String}
1135
+ `;
1136
+ await this.client.command({
1137
+ query: updateQuery,
1138
+ query_params: {
1139
+ workingMemory: updatedResource.workingMemory,
1140
+ metadata: JSON.stringify(updatedResource.metadata),
1141
+ updatedAt: updatedResource.updatedAt.toISOString().replace("Z", ""),
1142
+ resourceId
1143
+ },
1144
+ clickhouse_settings: {
1145
+ date_time_input_format: "best_effort",
1146
+ use_client_time_zone: 1,
1147
+ output_format_json_quote_64bit_integers: 0
1148
+ }
1149
+ });
1150
+ await this.client.command({
1151
+ query: `OPTIMIZE TABLE ${storage.TABLE_RESOURCES} FINAL`,
1152
+ clickhouse_settings: {
1153
+ date_time_input_format: "best_effort",
1154
+ use_client_time_zone: 1,
1155
+ output_format_json_quote_64bit_integers: 0
1156
+ }
1157
+ });
1158
+ return updatedResource;
1159
+ } catch (error$1) {
1160
+ throw new error.MastraError(
1161
+ {
1162
+ id: "CLICKHOUSE_STORAGE_UPDATE_RESOURCE_FAILED",
1163
+ domain: error.ErrorDomain.STORAGE,
1164
+ category: error.ErrorCategory.THIRD_PARTY,
1165
+ details: { resourceId }
1166
+ },
1167
+ error$1
1168
+ );
193
1169
  }
194
- const resp = await result.json();
195
- const rows = resp.data;
196
- return rows.map((row) => ({
197
- id: row.id,
198
- parentSpanId: row.parentSpanId,
199
- traceId: row.traceId,
200
- name: row.name,
201
- scope: row.scope,
202
- kind: row.kind,
203
- status: safelyParseJSON(row.status),
204
- events: safelyParseJSON(row.events),
205
- links: safelyParseJSON(row.links),
206
- attributes: safelyParseJSON(row.attributes),
207
- startTime: row.startTime,
208
- endTime: row.endTime,
209
- other: safelyParseJSON(row.other),
210
- createdAt: row.createdAt
211
- }));
212
1170
  }
213
- async optimizeTable({ tableName }) {
214
- await this.db.command({
215
- query: `OPTIMIZE TABLE ${tableName} FINAL`
216
- });
1171
+ };
1172
+ var StoreOperationsClickhouse = class extends storage.StoreOperations {
1173
+ ttl;
1174
+ client;
1175
+ constructor({ client, ttl }) {
1176
+ super();
1177
+ this.ttl = ttl;
1178
+ this.client = client;
217
1179
  }
218
- async materializeTtl({ tableName }) {
219
- await this.db.command({
220
- query: `ALTER TABLE ${tableName} MATERIALIZE TTL;`
1180
+ async hasColumn(table, column) {
1181
+ const result = await this.client.query({
1182
+ query: `DESCRIBE TABLE ${table}`,
1183
+ format: "JSONEachRow"
221
1184
  });
1185
+ const columns = await result.json();
1186
+ return columns.some((c) => c.name === column);
1187
+ }
1188
+ getSqlType(type) {
1189
+ switch (type) {
1190
+ case "text":
1191
+ return "String";
1192
+ case "timestamp":
1193
+ return "DateTime64(3)";
1194
+ case "integer":
1195
+ case "bigint":
1196
+ return "Int64";
1197
+ case "jsonb":
1198
+ return "String";
1199
+ default:
1200
+ return super.getSqlType(type);
1201
+ }
222
1202
  }
223
1203
  async createTable({
224
1204
  tableName,
@@ -228,32 +1208,33 @@ var ClickhouseStore = class extends storage.MastraStorage {
228
1208
  const columns = Object.entries(schema).map(([name, def]) => {
229
1209
  const constraints = [];
230
1210
  if (!def.nullable) constraints.push("NOT NULL");
1211
+ if (name === "metadata" && def.type === "text" && def.nullable) {
1212
+ constraints.push("DEFAULT '{}'");
1213
+ }
231
1214
  const columnTtl = this.ttl?.[tableName]?.columns?.[name];
232
1215
  return `"${name}" ${COLUMN_TYPES[def.type]} ${constraints.join(" ")} ${columnTtl ? `TTL toDateTime(${columnTtl.ttlKey ?? "createdAt"}) + INTERVAL ${columnTtl.interval} ${columnTtl.unit}` : ""}`;
233
1216
  }).join(",\n");
234
1217
  const rowTtl = this.ttl?.[tableName]?.row;
235
1218
  const sql = tableName === storage.TABLE_WORKFLOW_SNAPSHOT ? `
236
- CREATE TABLE IF NOT EXISTS ${tableName} (
237
- ${["id String"].concat(columns)}
238
- )
239
- ENGINE = ${TABLE_ENGINES[tableName]}
240
- PARTITION BY "createdAt"
241
- PRIMARY KEY (createdAt, run_id, workflow_name)
242
- ORDER BY (createdAt, run_id, workflow_name)
243
- ${rowTtl ? `TTL toDateTime(${rowTtl.ttlKey ?? "createdAt"}) + INTERVAL ${rowTtl.interval} ${rowTtl.unit}` : ""}
244
- SETTINGS index_granularity = 8192
245
- ` : `
246
- CREATE TABLE IF NOT EXISTS ${tableName} (
247
- ${columns}
248
- )
249
- ENGINE = ${TABLE_ENGINES[tableName]}
250
- PARTITION BY "createdAt"
251
- PRIMARY KEY (createdAt, ${tableName === storage.TABLE_EVALS ? "run_id" : "id"})
252
- ORDER BY (createdAt, ${tableName === storage.TABLE_EVALS ? "run_id" : "id"})
253
- ${this.ttl?.[tableName]?.row ? `TTL toDateTime(createdAt) + INTERVAL ${this.ttl[tableName].row.interval} ${this.ttl[tableName].row.unit}` : ""}
254
- SETTINGS index_granularity = 8192
255
- `;
256
- await this.db.query({
1219
+ CREATE TABLE IF NOT EXISTS ${tableName} (
1220
+ ${["id String"].concat(columns)}
1221
+ )
1222
+ ENGINE = ${TABLE_ENGINES[tableName] ?? "MergeTree()"}
1223
+ PRIMARY KEY (createdAt, run_id, workflow_name)
1224
+ ORDER BY (createdAt, run_id, workflow_name)
1225
+ ${rowTtl ? `TTL toDateTime(${rowTtl.ttlKey ?? "createdAt"}) + INTERVAL ${rowTtl.interval} ${rowTtl.unit}` : ""}
1226
+ SETTINGS index_granularity = 8192
1227
+ ` : `
1228
+ CREATE TABLE IF NOT EXISTS ${tableName} (
1229
+ ${columns}
1230
+ )
1231
+ ENGINE = ${TABLE_ENGINES[tableName] ?? "MergeTree()"}
1232
+ PRIMARY KEY (createdAt, ${"id"})
1233
+ ORDER BY (createdAt, ${"id"})
1234
+ ${this.ttl?.[tableName]?.row ? `TTL toDateTime(createdAt) + INTERVAL ${this.ttl[tableName].row.interval} ${this.ttl[tableName].row.unit}` : ""}
1235
+ SETTINGS index_granularity = 8192
1236
+ `;
1237
+ await this.client.query({
257
1238
  query: sql,
258
1239
  clickhouse_settings: {
259
1240
  // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
@@ -263,14 +1244,60 @@ var ClickhouseStore = class extends storage.MastraStorage {
263
1244
  output_format_json_quote_64bit_integers: 0
264
1245
  }
265
1246
  });
266
- } catch (error) {
267
- console.error(`Error creating table ${tableName}:`, error);
268
- throw error;
1247
+ } catch (error$1) {
1248
+ throw new error.MastraError(
1249
+ {
1250
+ id: "CLICKHOUSE_STORAGE_CREATE_TABLE_FAILED",
1251
+ domain: error.ErrorDomain.STORAGE,
1252
+ category: error.ErrorCategory.THIRD_PARTY,
1253
+ details: { tableName }
1254
+ },
1255
+ error$1
1256
+ );
1257
+ }
1258
+ }
1259
+ async alterTable({
1260
+ tableName,
1261
+ schema,
1262
+ ifNotExists
1263
+ }) {
1264
+ try {
1265
+ const describeSql = `DESCRIBE TABLE ${tableName}`;
1266
+ const result = await this.client.query({
1267
+ query: describeSql
1268
+ });
1269
+ const rows = await result.json();
1270
+ const existingColumnNames = new Set(rows.data.map((row) => row.name.toLowerCase()));
1271
+ for (const columnName of ifNotExists) {
1272
+ if (!existingColumnNames.has(columnName.toLowerCase()) && schema[columnName]) {
1273
+ const columnDef = schema[columnName];
1274
+ let sqlType = this.getSqlType(columnDef.type);
1275
+ if (columnDef.nullable !== false) {
1276
+ sqlType = `Nullable(${sqlType})`;
1277
+ }
1278
+ const defaultValue = columnDef.nullable === false ? this.getDefaultValue(columnDef.type) : "";
1279
+ const alterSql = `ALTER TABLE ${tableName} ADD COLUMN IF NOT EXISTS "${columnName}" ${sqlType} ${defaultValue}`.trim();
1280
+ await this.client.query({
1281
+ query: alterSql
1282
+ });
1283
+ this.logger?.debug?.(`Added column ${columnName} to table ${tableName}`);
1284
+ }
1285
+ }
1286
+ } catch (error$1) {
1287
+ throw new error.MastraError(
1288
+ {
1289
+ id: "CLICKHOUSE_STORAGE_ALTER_TABLE_FAILED",
1290
+ domain: error.ErrorDomain.STORAGE,
1291
+ category: error.ErrorCategory.THIRD_PARTY,
1292
+ details: { tableName }
1293
+ },
1294
+ error$1
1295
+ );
269
1296
  }
270
1297
  }
271
1298
  async clearTable({ tableName }) {
272
1299
  try {
273
- await this.db.query({
1300
+ await this.client.query({
274
1301
  query: `TRUNCATE TABLE ${tableName}`,
275
1302
  clickhouse_settings: {
276
1303
  // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
@@ -280,20 +1307,34 @@ var ClickhouseStore = class extends storage.MastraStorage {
280
1307
  output_format_json_quote_64bit_integers: 0
281
1308
  }
282
1309
  });
283
- } catch (error) {
284
- console.error(`Error clearing table ${tableName}:`, error);
285
- throw error;
1310
+ } catch (error$1) {
1311
+ throw new error.MastraError(
1312
+ {
1313
+ id: "CLICKHOUSE_STORAGE_CLEAR_TABLE_FAILED",
1314
+ domain: error.ErrorDomain.STORAGE,
1315
+ category: error.ErrorCategory.THIRD_PARTY,
1316
+ details: { tableName }
1317
+ },
1318
+ error$1
1319
+ );
286
1320
  }
287
1321
  }
1322
+ async dropTable({ tableName }) {
1323
+ await this.client.query({
1324
+ query: `DROP TABLE IF EXISTS ${tableName}`
1325
+ });
1326
+ }
288
1327
  async insert({ tableName, record }) {
1328
+ const createdAt = (record.createdAt || record.created_at || /* @__PURE__ */ new Date()).toISOString();
1329
+ const updatedAt = (record.updatedAt || /* @__PURE__ */ new Date()).toISOString();
289
1330
  try {
290
- await this.db.insert({
1331
+ const result = await this.client.insert({
291
1332
  table: tableName,
292
1333
  values: [
293
1334
  {
294
1335
  ...record,
295
- createdAt: record.createdAt.toISOString(),
296
- updatedAt: record.updatedAt.toISOString()
1336
+ createdAt,
1337
+ updatedAt
297
1338
  }
298
1339
  ],
299
1340
  format: "JSONEachRow",
@@ -304,13 +1345,55 @@ var ClickhouseStore = class extends storage.MastraStorage {
304
1345
  use_client_time_zone: 1
305
1346
  }
306
1347
  });
307
- } catch (error) {
308
- console.error(`Error inserting into ${tableName}:`, error);
309
- throw error;
1348
+ console.info("INSERT RESULT", result);
1349
+ } catch (error$1) {
1350
+ throw new error.MastraError(
1351
+ {
1352
+ id: "CLICKHOUSE_STORAGE_INSERT_FAILED",
1353
+ domain: error.ErrorDomain.STORAGE,
1354
+ category: error.ErrorCategory.THIRD_PARTY,
1355
+ details: { tableName }
1356
+ },
1357
+ error$1
1358
+ );
1359
+ }
1360
+ }
1361
+ async batchInsert({ tableName, records }) {
1362
+ const recordsToBeInserted = records.map((record) => ({
1363
+ ...Object.fromEntries(
1364
+ Object.entries(record).map(([key, value]) => [
1365
+ key,
1366
+ storage.TABLE_SCHEMAS[tableName]?.[key]?.type === "timestamp" ? new Date(value).toISOString() : value
1367
+ ])
1368
+ )
1369
+ }));
1370
+ try {
1371
+ await this.client.insert({
1372
+ table: tableName,
1373
+ values: recordsToBeInserted,
1374
+ format: "JSONEachRow",
1375
+ clickhouse_settings: {
1376
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
1377
+ date_time_input_format: "best_effort",
1378
+ use_client_time_zone: 1,
1379
+ output_format_json_quote_64bit_integers: 0
1380
+ }
1381
+ });
1382
+ } catch (error$1) {
1383
+ throw new error.MastraError(
1384
+ {
1385
+ id: "CLICKHOUSE_STORAGE_BATCH_INSERT_FAILED",
1386
+ domain: error.ErrorDomain.STORAGE,
1387
+ category: error.ErrorCategory.THIRD_PARTY,
1388
+ details: { tableName }
1389
+ },
1390
+ error$1
1391
+ );
310
1392
  }
311
1393
  }
312
1394
  async load({ tableName, keys }) {
313
1395
  try {
1396
+ const engine = TABLE_ENGINES[tableName] ?? "MergeTree()";
314
1397
  const keyEntries = Object.entries(keys);
315
1398
  const conditions = keyEntries.map(
316
1399
  ([key]) => `"${key}" = {var_${key}:${COLUMN_TYPES[storage.TABLE_SCHEMAS[tableName]?.[key]?.type ?? "text"]}}`
@@ -318,8 +1401,10 @@ var ClickhouseStore = class extends storage.MastraStorage {
318
1401
  const values = keyEntries.reduce((acc, [key, value]) => {
319
1402
  return { ...acc, [`var_${key}`]: value };
320
1403
  }, {});
321
- const result = await this.db.query({
322
- query: `SELECT *, toDateTime64(createdAt, 3) as createdAt, toDateTime64(updatedAt, 3) as updatedAt FROM ${tableName} ${TABLE_ENGINES[tableName].startsWith("ReplacingMergeTree") ? "FINAL" : ""} WHERE ${conditions}`,
1404
+ const hasUpdatedAt = storage.TABLE_SCHEMAS[tableName]?.updatedAt;
1405
+ const selectClause = `SELECT *, toDateTime64(createdAt, 3) as createdAt${hasUpdatedAt ? ", toDateTime64(updatedAt, 3) as updatedAt" : ""}`;
1406
+ const result = await this.client.query({
1407
+ query: `${selectClause} FROM ${tableName} ${engine.startsWith("ReplacingMergeTree") ? "FINAL" : ""} WHERE ${conditions} ORDER BY createdAt DESC LIMIT 1`,
323
1408
  query_params: values,
324
1409
  clickhouse_settings: {
325
1410
  // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
@@ -345,25 +1430,58 @@ var ClickhouseStore = class extends storage.MastraStorage {
345
1430
  }
346
1431
  const data = transformRow(rows.data[0]);
347
1432
  return data;
348
- } catch (error) {
349
- console.error(`Error loading from ${tableName}:`, error);
350
- throw error;
1433
+ } catch (error$1) {
1434
+ throw new error.MastraError(
1435
+ {
1436
+ id: "CLICKHOUSE_STORAGE_LOAD_FAILED",
1437
+ domain: error.ErrorDomain.STORAGE,
1438
+ category: error.ErrorCategory.THIRD_PARTY,
1439
+ details: { tableName }
1440
+ },
1441
+ error$1
1442
+ );
351
1443
  }
352
1444
  }
353
- async getThreadById({ threadId }) {
1445
+ };
1446
+ var ScoresStorageClickhouse = class extends storage.ScoresStorage {
1447
+ client;
1448
+ operations;
1449
+ constructor({ client, operations }) {
1450
+ super();
1451
+ this.client = client;
1452
+ this.operations = operations;
1453
+ }
1454
+ transformScoreRow(row) {
1455
+ const scorer = storage.safelyParseJSON(row.scorer);
1456
+ const preprocessStepResult = storage.safelyParseJSON(row.preprocessStepResult);
1457
+ const analyzeStepResult = storage.safelyParseJSON(row.analyzeStepResult);
1458
+ const metadata = storage.safelyParseJSON(row.metadata);
1459
+ const input = storage.safelyParseJSON(row.input);
1460
+ const output = storage.safelyParseJSON(row.output);
1461
+ const additionalContext = storage.safelyParseJSON(row.additionalContext);
1462
+ const requestContext = storage.safelyParseJSON(row.requestContext);
1463
+ const entity = storage.safelyParseJSON(row.entity);
1464
+ return {
1465
+ ...row,
1466
+ scorer,
1467
+ preprocessStepResult,
1468
+ analyzeStepResult,
1469
+ metadata,
1470
+ input,
1471
+ output,
1472
+ additionalContext,
1473
+ requestContext,
1474
+ entity,
1475
+ createdAt: new Date(row.createdAt),
1476
+ updatedAt: new Date(row.updatedAt)
1477
+ };
1478
+ }
1479
+ async getScoreById({ id }) {
354
1480
  try {
355
- const result = await this.db.query({
356
- query: `SELECT
357
- id,
358
- "resourceId",
359
- title,
360
- metadata,
361
- toDateTime64(createdAt, 3) as createdAt,
362
- toDateTime64(updatedAt, 3) as updatedAt
363
- FROM "${storage.TABLE_THREADS}"
364
- FINAL
365
- WHERE id = {var_id:String}`,
366
- query_params: { var_id: threadId },
1481
+ const result = await this.client.query({
1482
+ query: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE id = {var_id:String}`,
1483
+ query_params: { var_id: id },
1484
+ format: "JSONEachRow",
367
1485
  clickhouse_settings: {
368
1486
  // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
369
1487
  date_time_input_format: "best_effort",
@@ -372,223 +1490,269 @@ var ClickhouseStore = class extends storage.MastraStorage {
372
1490
  output_format_json_quote_64bit_integers: 0
373
1491
  }
374
1492
  });
375
- const rows = await result.json();
376
- const thread = transformRow(rows.data[0]);
377
- if (!thread) {
1493
+ const resultJson = await result.json();
1494
+ if (!Array.isArray(resultJson) || resultJson.length === 0) {
378
1495
  return null;
379
1496
  }
380
- return {
381
- ...thread,
382
- metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata,
383
- createdAt: thread.createdAt,
384
- updatedAt: thread.updatedAt
385
- };
386
- } catch (error) {
387
- console.error(`Error getting thread ${threadId}:`, error);
388
- throw error;
1497
+ return this.transformScoreRow(resultJson[0]);
1498
+ } catch (error$1) {
1499
+ throw new error.MastraError(
1500
+ {
1501
+ id: "CLICKHOUSE_STORAGE_GET_SCORE_BY_ID_FAILED",
1502
+ domain: error.ErrorDomain.STORAGE,
1503
+ category: error.ErrorCategory.THIRD_PARTY,
1504
+ details: { scoreId: id }
1505
+ },
1506
+ error$1
1507
+ );
389
1508
  }
390
1509
  }
391
- async getThreadsByResourceId({ resourceId }) {
1510
+ async saveScore(score) {
1511
+ let parsedScore;
392
1512
  try {
393
- const result = await this.db.query({
394
- query: `SELECT
395
- id,
396
- "resourceId",
397
- title,
398
- metadata,
399
- toDateTime64(createdAt, 3) as createdAt,
400
- toDateTime64(updatedAt, 3) as updatedAt
401
- FROM "${storage.TABLE_THREADS}"
402
- WHERE "resourceId" = {var_resourceId:String}`,
403
- query_params: { var_resourceId: resourceId },
404
- clickhouse_settings: {
405
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
406
- date_time_input_format: "best_effort",
407
- date_time_output_format: "iso",
408
- use_client_time_zone: 1,
409
- output_format_json_quote_64bit_integers: 0
410
- }
411
- });
412
- const rows = await result.json();
413
- const threads = transformRows(rows.data);
414
- return threads.map((thread) => ({
415
- ...thread,
416
- metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata,
417
- createdAt: thread.createdAt,
418
- updatedAt: thread.updatedAt
419
- }));
420
- } catch (error) {
421
- console.error(`Error getting threads for resource ${resourceId}:`, error);
422
- throw error;
1513
+ parsedScore = evals.saveScorePayloadSchema.parse(score);
1514
+ } catch (error$1) {
1515
+ throw new error.MastraError(
1516
+ {
1517
+ id: "CLICKHOUSE_STORAGE_SAVE_SCORE_FAILED_INVALID_SCORE_PAYLOAD",
1518
+ domain: error.ErrorDomain.STORAGE,
1519
+ category: error.ErrorCategory.USER,
1520
+ details: { scoreId: score.id }
1521
+ },
1522
+ error$1
1523
+ );
423
1524
  }
424
- }
425
- async saveThread({ thread }) {
426
1525
  try {
427
- await this.db.insert({
428
- table: storage.TABLE_THREADS,
429
- values: [
430
- {
431
- ...thread,
432
- createdAt: thread.createdAt.toISOString(),
433
- updatedAt: thread.updatedAt.toISOString()
434
- }
435
- ],
1526
+ const record = {
1527
+ ...parsedScore
1528
+ };
1529
+ await this.client.insert({
1530
+ table: storage.TABLE_SCORERS,
1531
+ values: [record],
436
1532
  format: "JSONEachRow",
437
1533
  clickhouse_settings: {
438
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
439
1534
  date_time_input_format: "best_effort",
440
1535
  use_client_time_zone: 1,
441
1536
  output_format_json_quote_64bit_integers: 0
442
1537
  }
443
1538
  });
444
- return thread;
445
- } catch (error) {
446
- console.error("Error saving thread:", error);
447
- throw error;
1539
+ return { score };
1540
+ } catch (error$1) {
1541
+ throw new error.MastraError(
1542
+ {
1543
+ id: "CLICKHOUSE_STORAGE_SAVE_SCORE_FAILED",
1544
+ domain: error.ErrorDomain.STORAGE,
1545
+ category: error.ErrorCategory.THIRD_PARTY,
1546
+ details: { scoreId: score.id }
1547
+ },
1548
+ error$1
1549
+ );
448
1550
  }
449
1551
  }
450
- async updateThread({
451
- id,
452
- title,
453
- metadata
1552
+ async listScoresByRunId({
1553
+ runId,
1554
+ pagination
454
1555
  }) {
455
1556
  try {
456
- const existingThread = await this.getThreadById({ threadId: id });
457
- if (!existingThread) {
458
- throw new Error(`Thread ${id} not found`);
1557
+ const countResult = await this.client.query({
1558
+ query: `SELECT COUNT(*) as count FROM ${storage.TABLE_SCORERS} WHERE runId = {var_runId:String}`,
1559
+ query_params: { var_runId: runId },
1560
+ format: "JSONEachRow"
1561
+ });
1562
+ const countRows = await countResult.json();
1563
+ let total = 0;
1564
+ if (Array.isArray(countRows) && countRows.length > 0 && countRows[0]) {
1565
+ const countObj = countRows[0];
1566
+ total = Number(countObj.count);
459
1567
  }
460
- const mergedMetadata = {
461
- ...existingThread.metadata,
462
- ...metadata
463
- };
464
- const updatedThread = {
465
- ...existingThread,
466
- title,
467
- metadata: mergedMetadata,
468
- updatedAt: /* @__PURE__ */ new Date()
469
- };
470
- await this.db.insert({
471
- table: storage.TABLE_THREADS,
472
- values: [
473
- {
474
- ...updatedThread,
475
- updatedAt: updatedThread.updatedAt.toISOString()
476
- }
477
- ],
1568
+ const { page, perPage: perPageInput } = pagination;
1569
+ if (!total) {
1570
+ return {
1571
+ pagination: {
1572
+ total: 0,
1573
+ page,
1574
+ perPage: perPageInput,
1575
+ hasMore: false
1576
+ },
1577
+ scores: []
1578
+ };
1579
+ }
1580
+ const perPage = storage.normalizePerPage(perPageInput, 100);
1581
+ const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
1582
+ const limitValue = perPageInput === false ? total : perPage;
1583
+ const end = perPageInput === false ? total : start + perPage;
1584
+ const result = await this.client.query({
1585
+ query: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE runId = {var_runId:String} ORDER BY createdAt DESC LIMIT {var_limit:Int64} OFFSET {var_offset:Int64}`,
1586
+ query_params: {
1587
+ var_runId: runId,
1588
+ var_limit: limitValue,
1589
+ var_offset: start
1590
+ },
478
1591
  format: "JSONEachRow",
479
1592
  clickhouse_settings: {
480
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
481
1593
  date_time_input_format: "best_effort",
1594
+ date_time_output_format: "iso",
482
1595
  use_client_time_zone: 1,
483
1596
  output_format_json_quote_64bit_integers: 0
484
1597
  }
485
1598
  });
486
- return updatedThread;
487
- } catch (error) {
488
- console.error("Error updating thread:", error);
489
- throw error;
1599
+ const rows = await result.json();
1600
+ const scores = Array.isArray(rows) ? rows.map((row) => this.transformScoreRow(row)) : [];
1601
+ return {
1602
+ pagination: {
1603
+ total,
1604
+ page,
1605
+ perPage: perPageForResponse,
1606
+ hasMore: end < total
1607
+ },
1608
+ scores
1609
+ };
1610
+ } catch (error$1) {
1611
+ throw new error.MastraError(
1612
+ {
1613
+ id: "CLICKHOUSE_STORAGE_GET_SCORES_BY_RUN_ID_FAILED",
1614
+ domain: error.ErrorDomain.STORAGE,
1615
+ category: error.ErrorCategory.THIRD_PARTY,
1616
+ details: { runId }
1617
+ },
1618
+ error$1
1619
+ );
490
1620
  }
491
1621
  }
492
- async deleteThread({ threadId }) {
1622
+ async listScoresByScorerId({
1623
+ scorerId,
1624
+ entityId,
1625
+ entityType,
1626
+ source,
1627
+ pagination
1628
+ }) {
1629
+ let whereClause = `scorerId = {var_scorerId:String}`;
1630
+ if (entityId) {
1631
+ whereClause += ` AND entityId = {var_entityId:String}`;
1632
+ }
1633
+ if (entityType) {
1634
+ whereClause += ` AND entityType = {var_entityType:String}`;
1635
+ }
1636
+ if (source) {
1637
+ whereClause += ` AND source = {var_source:String}`;
1638
+ }
493
1639
  try {
494
- await this.db.command({
495
- query: `DELETE FROM "${storage.TABLE_MESSAGES}" WHERE thread_id = '${threadId}';`,
496
- query_params: { var_thread_id: threadId },
497
- clickhouse_settings: {
498
- output_format_json_quote_64bit_integers: 0
499
- }
1640
+ const countResult = await this.client.query({
1641
+ query: `SELECT COUNT(*) as count FROM ${storage.TABLE_SCORERS} WHERE ${whereClause}`,
1642
+ query_params: {
1643
+ var_scorerId: scorerId,
1644
+ var_entityId: entityId,
1645
+ var_entityType: entityType,
1646
+ var_source: source
1647
+ },
1648
+ format: "JSONEachRow"
500
1649
  });
501
- await this.db.command({
502
- query: `DELETE FROM "${storage.TABLE_THREADS}" WHERE id = {var_id:String};`,
503
- query_params: { var_id: threadId },
1650
+ const countRows = await countResult.json();
1651
+ let total = 0;
1652
+ if (Array.isArray(countRows) && countRows.length > 0 && countRows[0]) {
1653
+ const countObj = countRows[0];
1654
+ total = Number(countObj.count);
1655
+ }
1656
+ const { page, perPage: perPageInput } = pagination;
1657
+ if (!total) {
1658
+ return {
1659
+ pagination: {
1660
+ total: 0,
1661
+ page,
1662
+ perPage: perPageInput,
1663
+ hasMore: false
1664
+ },
1665
+ scores: []
1666
+ };
1667
+ }
1668
+ const perPage = storage.normalizePerPage(perPageInput, 100);
1669
+ const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
1670
+ const limitValue = perPageInput === false ? total : perPage;
1671
+ const end = perPageInput === false ? total : start + perPage;
1672
+ const result = await this.client.query({
1673
+ query: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE ${whereClause} ORDER BY createdAt DESC LIMIT {var_limit:Int64} OFFSET {var_offset:Int64}`,
1674
+ query_params: {
1675
+ var_scorerId: scorerId,
1676
+ var_limit: limitValue,
1677
+ var_offset: start,
1678
+ var_entityId: entityId,
1679
+ var_entityType: entityType,
1680
+ var_source: source
1681
+ },
1682
+ format: "JSONEachRow",
504
1683
  clickhouse_settings: {
1684
+ date_time_input_format: "best_effort",
1685
+ date_time_output_format: "iso",
1686
+ use_client_time_zone: 1,
505
1687
  output_format_json_quote_64bit_integers: 0
506
1688
  }
507
1689
  });
508
- } catch (error) {
509
- console.error("Error deleting thread:", error);
510
- throw error;
1690
+ const rows = await result.json();
1691
+ const scores = Array.isArray(rows) ? rows.map((row) => this.transformScoreRow(row)) : [];
1692
+ return {
1693
+ pagination: {
1694
+ total,
1695
+ page,
1696
+ perPage: perPageForResponse,
1697
+ hasMore: end < total
1698
+ },
1699
+ scores
1700
+ };
1701
+ } catch (error$1) {
1702
+ throw new error.MastraError(
1703
+ {
1704
+ id: "CLICKHOUSE_STORAGE_GET_SCORES_BY_SCORER_ID_FAILED",
1705
+ domain: error.ErrorDomain.STORAGE,
1706
+ category: error.ErrorCategory.THIRD_PARTY,
1707
+ details: { scorerId }
1708
+ },
1709
+ error$1
1710
+ );
511
1711
  }
512
1712
  }
513
- async getMessages({ threadId, selectBy }) {
1713
+ async listScoresByEntityId({
1714
+ entityId,
1715
+ entityType,
1716
+ pagination
1717
+ }) {
514
1718
  try {
515
- const messages = [];
516
- const limit = typeof selectBy?.last === `number` ? selectBy.last : 40;
517
- const include = selectBy?.include || [];
518
- if (include.length) {
519
- const includeResult = await this.db.query({
520
- query: `
521
- WITH ordered_messages AS (
522
- SELECT
523
- *,
524
- toDateTime64(createdAt, 3) as createdAt,
525
- toDateTime64(updatedAt, 3) as updatedAt,
526
- ROW_NUMBER() OVER (ORDER BY "createdAt" DESC) as row_num
527
- FROM "${storage.TABLE_MESSAGES}"
528
- WHERE thread_id = {var_thread_id:String}
529
- )
530
- SELECT
531
- m.id AS id,
532
- m.content as content,
533
- m.role as role,
534
- m.type as type,
535
- m.createdAt as createdAt,
536
- m.updatedAt as updatedAt,
537
- m.thread_id AS "threadId"
538
- FROM ordered_messages m
539
- WHERE m.id = ANY({var_include:Array(String)})
540
- OR EXISTS (
541
- SELECT 1 FROM ordered_messages target
542
- WHERE target.id = ANY({var_include:Array(String)})
543
- AND (
544
- -- Get previous messages based on the max withPreviousMessages
545
- (m.row_num <= target.row_num + {var_withPreviousMessages:Int64} AND m.row_num > target.row_num)
546
- OR
547
- -- Get next messages based on the max withNextMessages
548
- (m.row_num >= target.row_num - {var_withNextMessages:Int64} AND m.row_num < target.row_num)
549
- )
550
- )
551
- ORDER BY m."createdAt" DESC
552
- `,
553
- query_params: {
554
- var_thread_id: threadId,
555
- var_include: include.map((i) => i.id),
556
- var_withPreviousMessages: Math.max(...include.map((i) => i.withPreviousMessages || 0)),
557
- var_withNextMessages: Math.max(...include.map((i) => i.withNextMessages || 0))
1719
+ const countResult = await this.client.query({
1720
+ query: `SELECT COUNT(*) as count FROM ${storage.TABLE_SCORERS} WHERE entityId = {var_entityId:String} AND entityType = {var_entityType:String}`,
1721
+ query_params: { var_entityId: entityId, var_entityType: entityType },
1722
+ format: "JSONEachRow"
1723
+ });
1724
+ const countRows = await countResult.json();
1725
+ let total = 0;
1726
+ if (Array.isArray(countRows) && countRows.length > 0 && countRows[0]) {
1727
+ const countObj = countRows[0];
1728
+ total = Number(countObj.count);
1729
+ }
1730
+ const { page, perPage: perPageInput } = pagination;
1731
+ if (!total) {
1732
+ return {
1733
+ pagination: {
1734
+ total: 0,
1735
+ page,
1736
+ perPage: perPageInput,
1737
+ hasMore: false
558
1738
  },
559
- clickhouse_settings: {
560
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
561
- date_time_input_format: "best_effort",
562
- date_time_output_format: "iso",
563
- use_client_time_zone: 1,
564
- output_format_json_quote_64bit_integers: 0
565
- }
566
- });
567
- const rows2 = await includeResult.json();
568
- messages.push(...transformRows(rows2.data));
1739
+ scores: []
1740
+ };
569
1741
  }
570
- const result = await this.db.query({
571
- query: `
572
- SELECT
573
- id,
574
- content,
575
- role,
576
- type,
577
- toDateTime64(createdAt, 3) as createdAt,
578
- thread_id AS "threadId"
579
- FROM "${storage.TABLE_MESSAGES}"
580
- WHERE thread_id = {threadId:String}
581
- AND id NOT IN ({exclude:Array(String)})
582
- ORDER BY "createdAt" DESC
583
- LIMIT {limit:Int64}
584
- `,
1742
+ const perPage = storage.normalizePerPage(perPageInput, 100);
1743
+ const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
1744
+ const limitValue = perPageInput === false ? total : perPage;
1745
+ const end = perPageInput === false ? total : start + perPage;
1746
+ const result = await this.client.query({
1747
+ query: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE entityId = {var_entityId:String} AND entityType = {var_entityType:String} ORDER BY createdAt DESC LIMIT {var_limit:Int64} OFFSET {var_offset:Int64}`,
585
1748
  query_params: {
586
- threadId,
587
- exclude: messages.map((m) => m.id),
588
- limit
1749
+ var_entityId: entityId,
1750
+ var_entityType: entityType,
1751
+ var_limit: limitValue,
1752
+ var_offset: start
589
1753
  },
1754
+ format: "JSONEachRow",
590
1755
  clickhouse_settings: {
591
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
592
1756
  date_time_input_format: "best_effort",
593
1757
  date_time_output_format: "iso",
594
1758
  use_client_time_zone: 1,
@@ -596,80 +1760,154 @@ var ClickhouseStore = class extends storage.MastraStorage {
596
1760
  }
597
1761
  });
598
1762
  const rows = await result.json();
599
- messages.push(...transformRows(rows.data));
600
- messages.sort((a, b) => new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime());
601
- messages.forEach((message) => {
602
- if (typeof message.content === "string") {
603
- try {
604
- message.content = JSON.parse(message.content);
605
- } catch {
606
- }
607
- }
608
- });
609
- return messages;
610
- } catch (error) {
611
- console.error("Error getting messages:", error);
612
- throw error;
1763
+ const scores = Array.isArray(rows) ? rows.map((row) => this.transformScoreRow(row)) : [];
1764
+ return {
1765
+ pagination: {
1766
+ total,
1767
+ page,
1768
+ perPage: perPageForResponse,
1769
+ hasMore: end < total
1770
+ },
1771
+ scores
1772
+ };
1773
+ } catch (error$1) {
1774
+ throw new error.MastraError(
1775
+ {
1776
+ id: "CLICKHOUSE_STORAGE_GET_SCORES_BY_ENTITY_ID_FAILED",
1777
+ domain: error.ErrorDomain.STORAGE,
1778
+ category: error.ErrorCategory.THIRD_PARTY,
1779
+ details: { entityId, entityType }
1780
+ },
1781
+ error$1
1782
+ );
613
1783
  }
614
1784
  }
615
- async saveMessages({ messages }) {
616
- if (messages.length === 0) return messages;
1785
+ async listScoresBySpan({
1786
+ traceId,
1787
+ spanId,
1788
+ pagination
1789
+ }) {
617
1790
  try {
618
- const threadId = messages[0]?.threadId;
619
- if (!threadId) {
620
- throw new Error("Thread ID is required");
1791
+ const countResult = await this.client.query({
1792
+ query: `SELECT COUNT(*) as count FROM ${storage.TABLE_SCORERS} WHERE traceId = {var_traceId:String} AND spanId = {var_spanId:String}`,
1793
+ query_params: {
1794
+ var_traceId: traceId,
1795
+ var_spanId: spanId
1796
+ },
1797
+ format: "JSONEachRow"
1798
+ });
1799
+ const countRows = await countResult.json();
1800
+ let total = 0;
1801
+ if (Array.isArray(countRows) && countRows.length > 0 && countRows[0]) {
1802
+ const countObj = countRows[0];
1803
+ total = Number(countObj.count);
621
1804
  }
622
- const thread = await this.getThreadById({ threadId });
623
- if (!thread) {
624
- throw new Error(`Thread ${threadId} not found`);
1805
+ const { page, perPage: perPageInput } = pagination;
1806
+ if (!total) {
1807
+ return {
1808
+ pagination: {
1809
+ total: 0,
1810
+ page,
1811
+ perPage: perPageInput,
1812
+ hasMore: false
1813
+ },
1814
+ scores: []
1815
+ };
625
1816
  }
626
- await this.db.insert({
627
- table: storage.TABLE_MESSAGES,
1817
+ const perPage = storage.normalizePerPage(perPageInput, 100);
1818
+ const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
1819
+ const limitValue = perPageInput === false ? total : perPage;
1820
+ const end = perPageInput === false ? total : start + perPage;
1821
+ const result = await this.client.query({
1822
+ query: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE traceId = {var_traceId:String} AND spanId = {var_spanId:String} ORDER BY createdAt DESC LIMIT {var_limit:Int64} OFFSET {var_offset:Int64}`,
1823
+ query_params: {
1824
+ var_traceId: traceId,
1825
+ var_spanId: spanId,
1826
+ var_limit: limitValue,
1827
+ var_offset: start
1828
+ },
628
1829
  format: "JSONEachRow",
629
- values: messages.map((message) => ({
630
- id: message.id,
631
- thread_id: threadId,
632
- content: typeof message.content === "string" ? message.content : JSON.stringify(message.content),
633
- createdAt: message.createdAt.toISOString(),
634
- role: message.role,
635
- type: message.type
636
- })),
637
1830
  clickhouse_settings: {
638
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
639
1831
  date_time_input_format: "best_effort",
1832
+ date_time_output_format: "iso",
640
1833
  use_client_time_zone: 1,
641
1834
  output_format_json_quote_64bit_integers: 0
642
1835
  }
643
1836
  });
644
- return messages;
645
- } catch (error) {
646
- console.error("Error saving messages:", error);
647
- throw error;
1837
+ const rows = await result.json();
1838
+ const scores = Array.isArray(rows) ? rows.map((row) => this.transformScoreRow(row)) : [];
1839
+ return {
1840
+ pagination: {
1841
+ total,
1842
+ page,
1843
+ perPage: perPageForResponse,
1844
+ hasMore: end < total
1845
+ },
1846
+ scores
1847
+ };
1848
+ } catch (error$1) {
1849
+ throw new error.MastraError(
1850
+ {
1851
+ id: "CLICKHOUSE_STORAGE_GET_SCORES_BY_SPAN_FAILED",
1852
+ domain: error.ErrorDomain.STORAGE,
1853
+ category: error.ErrorCategory.THIRD_PARTY,
1854
+ details: { traceId, spanId }
1855
+ },
1856
+ error$1
1857
+ );
648
1858
  }
649
1859
  }
1860
+ };
1861
+ var WorkflowsStorageClickhouse = class extends storage.WorkflowsStorage {
1862
+ client;
1863
+ operations;
1864
+ constructor({ client, operations }) {
1865
+ super();
1866
+ this.operations = operations;
1867
+ this.client = client;
1868
+ }
1869
+ updateWorkflowResults({
1870
+ // workflowName,
1871
+ // runId,
1872
+ // stepId,
1873
+ // result,
1874
+ // requestContext,
1875
+ }) {
1876
+ throw new Error("Method not implemented.");
1877
+ }
1878
+ updateWorkflowState({
1879
+ // workflowName,
1880
+ // runId,
1881
+ // opts,
1882
+ }) {
1883
+ throw new Error("Method not implemented.");
1884
+ }
650
1885
  async persistWorkflowSnapshot({
651
1886
  workflowName,
652
1887
  runId,
1888
+ resourceId,
653
1889
  snapshot
654
1890
  }) {
655
1891
  try {
656
- const currentSnapshot = await this.load({
1892
+ const currentSnapshot = await this.operations.load({
657
1893
  tableName: storage.TABLE_WORKFLOW_SNAPSHOT,
658
1894
  keys: { workflow_name: workflowName, run_id: runId }
659
1895
  });
660
1896
  const now = /* @__PURE__ */ new Date();
661
1897
  const persisting = currentSnapshot ? {
662
1898
  ...currentSnapshot,
1899
+ resourceId,
663
1900
  snapshot: JSON.stringify(snapshot),
664
1901
  updatedAt: now.toISOString()
665
1902
  } : {
666
1903
  workflow_name: workflowName,
667
1904
  run_id: runId,
1905
+ resourceId,
668
1906
  snapshot: JSON.stringify(snapshot),
669
1907
  createdAt: now.toISOString(),
670
1908
  updatedAt: now.toISOString()
671
1909
  };
672
- await this.db.insert({
1910
+ await this.client.insert({
673
1911
  table: storage.TABLE_WORKFLOW_SNAPSHOT,
674
1912
  format: "JSONEachRow",
675
1913
  values: [persisting],
@@ -680,9 +1918,16 @@ var ClickhouseStore = class extends storage.MastraStorage {
680
1918
  output_format_json_quote_64bit_integers: 0
681
1919
  }
682
1920
  });
683
- } catch (error) {
684
- console.error("Error persisting workflow snapshot:", error);
685
- throw error;
1921
+ } catch (error$1) {
1922
+ throw new error.MastraError(
1923
+ {
1924
+ id: "CLICKHOUSE_STORAGE_PERSIST_WORKFLOW_SNAPSHOT_FAILED",
1925
+ domain: error.ErrorDomain.STORAGE,
1926
+ category: error.ErrorCategory.THIRD_PARTY,
1927
+ details: { workflowName, runId }
1928
+ },
1929
+ error$1
1930
+ );
686
1931
  }
687
1932
  }
688
1933
  async loadWorkflowSnapshot({
@@ -690,7 +1935,7 @@ var ClickhouseStore = class extends storage.MastraStorage {
690
1935
  runId
691
1936
  }) {
692
1937
  try {
693
- const result = await this.load({
1938
+ const result = await this.operations.load({
694
1939
  tableName: storage.TABLE_WORKFLOW_SNAPSHOT,
695
1940
  keys: {
696
1941
  workflow_name: workflowName,
@@ -701,9 +1946,16 @@ var ClickhouseStore = class extends storage.MastraStorage {
701
1946
  return null;
702
1947
  }
703
1948
  return result.snapshot;
704
- } catch (error) {
705
- console.error("Error loading workflow snapshot:", error);
706
- throw error;
1949
+ } catch (error$1) {
1950
+ throw new error.MastraError(
1951
+ {
1952
+ id: "CLICKHOUSE_STORAGE_LOAD_WORKFLOW_SNAPSHOT_FAILED",
1953
+ domain: error.ErrorDomain.STORAGE,
1954
+ category: error.ErrorCategory.THIRD_PARTY,
1955
+ details: { workflowName, runId }
1956
+ },
1957
+ error$1
1958
+ );
707
1959
  }
708
1960
  }
709
1961
  parseWorkflowRun(row) {
@@ -724,13 +1976,14 @@ var ClickhouseStore = class extends storage.MastraStorage {
724
1976
  resourceId: row.resourceId
725
1977
  };
726
1978
  }
727
- async getWorkflowRuns({
1979
+ async listWorkflowRuns({
728
1980
  workflowName,
729
1981
  fromDate,
730
1982
  toDate,
731
- limit,
732
- offset,
733
- resourceId
1983
+ page,
1984
+ perPage,
1985
+ resourceId,
1986
+ status
734
1987
  } = {}) {
735
1988
  try {
736
1989
  const conditions = [];
@@ -739,8 +1992,12 @@ var ClickhouseStore = class extends storage.MastraStorage {
739
1992
  conditions.push(`workflow_name = {var_workflow_name:String}`);
740
1993
  values.var_workflow_name = workflowName;
741
1994
  }
1995
+ if (status) {
1996
+ conditions.push(`JSONExtractString(snapshot, 'status') = {var_status:String}`);
1997
+ values.var_status = status;
1998
+ }
742
1999
  if (resourceId) {
743
- const hasResourceId = await this.hasColumn(storage.TABLE_WORKFLOW_SNAPSHOT, "resourceId");
2000
+ const hasResourceId = await this.operations.hasColumn(storage.TABLE_WORKFLOW_SNAPSHOT, "resourceId");
744
2001
  if (hasResourceId) {
745
2002
  conditions.push(`resourceId = {var_resourceId:String}`);
746
2003
  values.var_resourceId = resourceId;
@@ -757,11 +2014,14 @@ var ClickhouseStore = class extends storage.MastraStorage {
757
2014
  values.var_to_date = toDate.getTime() / 1e3;
758
2015
  }
759
2016
  const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
760
- const limitClause = limit !== void 0 ? `LIMIT ${limit}` : "";
761
- const offsetClause = offset !== void 0 ? `OFFSET ${offset}` : "";
2017
+ const usePagination = perPage !== void 0 && page !== void 0;
2018
+ const normalizedPerPage = usePagination ? storage.normalizePerPage(perPage, Number.MAX_SAFE_INTEGER) : 0;
2019
+ const offset = usePagination ? page * normalizedPerPage : 0;
2020
+ const limitClause = usePagination ? `LIMIT ${normalizedPerPage}` : "";
2021
+ const offsetClause = usePagination ? `OFFSET ${offset}` : "";
762
2022
  let total = 0;
763
- if (limit !== void 0 && offset !== void 0) {
764
- const countResult = await this.db.query({
2023
+ if (usePagination) {
2024
+ const countResult = await this.client.query({
765
2025
  query: `SELECT COUNT(*) as count FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[storage.TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""} ${whereClause}`,
766
2026
  query_params: values,
767
2027
  format: "JSONEachRow"
@@ -769,21 +2029,21 @@ var ClickhouseStore = class extends storage.MastraStorage {
769
2029
  const countRows = await countResult.json();
770
2030
  total = Number(countRows[0]?.count ?? 0);
771
2031
  }
772
- const result = await this.db.query({
2032
+ const result = await this.client.query({
773
2033
  query: `
774
- SELECT
775
- workflow_name,
776
- run_id,
777
- snapshot,
778
- toDateTime64(createdAt, 3) as createdAt,
779
- toDateTime64(updatedAt, 3) as updatedAt,
780
- resourceId
781
- FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[storage.TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""}
782
- ${whereClause}
783
- ORDER BY createdAt DESC
784
- ${limitClause}
785
- ${offsetClause}
786
- `,
2034
+ SELECT
2035
+ workflow_name,
2036
+ run_id,
2037
+ snapshot,
2038
+ toDateTime64(createdAt, 3) as createdAt,
2039
+ toDateTime64(updatedAt, 3) as updatedAt,
2040
+ resourceId
2041
+ FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[storage.TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""}
2042
+ ${whereClause}
2043
+ ORDER BY createdAt DESC
2044
+ ${limitClause}
2045
+ ${offsetClause}
2046
+ `,
787
2047
  query_params: values,
788
2048
  format: "JSONEachRow"
789
2049
  });
@@ -793,9 +2053,16 @@ var ClickhouseStore = class extends storage.MastraStorage {
793
2053
  return this.parseWorkflowRun(row);
794
2054
  });
795
2055
  return { runs, total: total || runs.length };
796
- } catch (error) {
797
- console.error("Error getting workflow runs:", error);
798
- throw error;
2056
+ } catch (error$1) {
2057
+ throw new error.MastraError(
2058
+ {
2059
+ id: "CLICKHOUSE_STORAGE_LIST_WORKFLOW_RUNS_FAILED",
2060
+ domain: error.ErrorDomain.STORAGE,
2061
+ category: error.ErrorCategory.THIRD_PARTY,
2062
+ details: { workflowName: workflowName ?? "", resourceId: resourceId ?? "" }
2063
+ },
2064
+ error$1
2065
+ );
799
2066
  }
800
2067
  }
801
2068
  async getWorkflowRunById({
@@ -814,18 +2081,19 @@ var ClickhouseStore = class extends storage.MastraStorage {
814
2081
  values.var_workflow_name = workflowName;
815
2082
  }
816
2083
  const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
817
- const result = await this.db.query({
2084
+ const result = await this.client.query({
818
2085
  query: `
819
- SELECT
820
- workflow_name,
821
- run_id,
822
- snapshot,
823
- toDateTime64(createdAt, 3) as createdAt,
824
- toDateTime64(updatedAt, 3) as updatedAt,
825
- resourceId
826
- FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[storage.TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""}
827
- ${whereClause}
828
- `,
2086
+ SELECT
2087
+ workflow_name,
2088
+ run_id,
2089
+ snapshot,
2090
+ toDateTime64(createdAt, 3) as createdAt,
2091
+ toDateTime64(updatedAt, 3) as updatedAt,
2092
+ resourceId
2093
+ FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${TABLE_ENGINES[storage.TABLE_WORKFLOW_SNAPSHOT].startsWith("ReplacingMergeTree") ? "FINAL" : ""}
2094
+ ${whereClause}
2095
+ ORDER BY createdAt DESC LIMIT 1
2096
+ `,
829
2097
  query_params: values,
830
2098
  format: "JSONEachRow"
831
2099
  });
@@ -834,18 +2102,231 @@ var ClickhouseStore = class extends storage.MastraStorage {
834
2102
  return null;
835
2103
  }
836
2104
  return this.parseWorkflowRun(resultJson[0]);
837
- } catch (error) {
838
- console.error("Error getting workflow run by ID:", error);
839
- throw error;
2105
+ } catch (error$1) {
2106
+ throw new error.MastraError(
2107
+ {
2108
+ id: "CLICKHOUSE_STORAGE_GET_WORKFLOW_RUN_BY_ID_FAILED",
2109
+ domain: error.ErrorDomain.STORAGE,
2110
+ category: error.ErrorCategory.THIRD_PARTY,
2111
+ details: { runId: runId ?? "", workflowName: workflowName ?? "" }
2112
+ },
2113
+ error$1
2114
+ );
840
2115
  }
841
2116
  }
842
- async hasColumn(table, column) {
843
- const result = await this.db.query({
844
- query: `DESCRIBE TABLE ${table}`,
845
- format: "JSONEachRow"
2117
+ };
2118
+
2119
+ // src/storage/index.ts
2120
+ var ClickhouseStore = class extends storage.MastraStorage {
2121
+ db;
2122
+ ttl = {};
2123
+ stores;
2124
+ constructor(config) {
2125
+ super({ id: config.id, name: "ClickhouseStore" });
2126
+ this.db = client.createClient({
2127
+ url: config.url,
2128
+ username: config.username,
2129
+ password: config.password,
2130
+ clickhouse_settings: {
2131
+ date_time_input_format: "best_effort",
2132
+ date_time_output_format: "iso",
2133
+ // This is crucial
2134
+ use_client_time_zone: 1,
2135
+ output_format_json_quote_64bit_integers: 0
2136
+ }
846
2137
  });
847
- const columns = await result.json();
848
- return columns.some((c) => c.name === column);
2138
+ this.ttl = config.ttl;
2139
+ const operations = new StoreOperationsClickhouse({ client: this.db, ttl: this.ttl });
2140
+ const workflows = new WorkflowsStorageClickhouse({ client: this.db, operations });
2141
+ const scores = new ScoresStorageClickhouse({ client: this.db, operations });
2142
+ const memory = new MemoryStorageClickhouse({ client: this.db, operations });
2143
+ this.stores = {
2144
+ operations,
2145
+ workflows,
2146
+ scores,
2147
+ memory
2148
+ };
2149
+ }
2150
+ get supports() {
2151
+ return {
2152
+ selectByIncludeResourceScope: true,
2153
+ resourceWorkingMemory: true,
2154
+ hasColumn: true,
2155
+ createTable: true,
2156
+ deleteMessages: false,
2157
+ listScoresBySpan: true
2158
+ };
2159
+ }
2160
+ async batchInsert({ tableName, records }) {
2161
+ await this.stores.operations.batchInsert({ tableName, records });
2162
+ }
2163
+ async optimizeTable({ tableName }) {
2164
+ try {
2165
+ await this.db.command({
2166
+ query: `OPTIMIZE TABLE ${tableName} FINAL`
2167
+ });
2168
+ } catch (error$1) {
2169
+ throw new error.MastraError(
2170
+ {
2171
+ id: "CLICKHOUSE_STORAGE_OPTIMIZE_TABLE_FAILED",
2172
+ domain: error.ErrorDomain.STORAGE,
2173
+ category: error.ErrorCategory.THIRD_PARTY,
2174
+ details: { tableName }
2175
+ },
2176
+ error$1
2177
+ );
2178
+ }
2179
+ }
2180
+ async materializeTtl({ tableName }) {
2181
+ try {
2182
+ await this.db.command({
2183
+ query: `ALTER TABLE ${tableName} MATERIALIZE TTL;`
2184
+ });
2185
+ } catch (error$1) {
2186
+ throw new error.MastraError(
2187
+ {
2188
+ id: "CLICKHOUSE_STORAGE_MATERIALIZE_TTL_FAILED",
2189
+ domain: error.ErrorDomain.STORAGE,
2190
+ category: error.ErrorCategory.THIRD_PARTY,
2191
+ details: { tableName }
2192
+ },
2193
+ error$1
2194
+ );
2195
+ }
2196
+ }
2197
+ async createTable({
2198
+ tableName,
2199
+ schema
2200
+ }) {
2201
+ return this.stores.operations.createTable({ tableName, schema });
2202
+ }
2203
+ async dropTable({ tableName }) {
2204
+ return this.stores.operations.dropTable({ tableName });
2205
+ }
2206
+ async alterTable({
2207
+ tableName,
2208
+ schema,
2209
+ ifNotExists
2210
+ }) {
2211
+ return this.stores.operations.alterTable({ tableName, schema, ifNotExists });
2212
+ }
2213
+ async clearTable({ tableName }) {
2214
+ return this.stores.operations.clearTable({ tableName });
2215
+ }
2216
+ async insert({ tableName, record }) {
2217
+ return this.stores.operations.insert({ tableName, record });
2218
+ }
2219
+ async load({ tableName, keys }) {
2220
+ return this.stores.operations.load({ tableName, keys });
2221
+ }
2222
+ async updateWorkflowResults({
2223
+ workflowName,
2224
+ runId,
2225
+ stepId,
2226
+ result,
2227
+ requestContext
2228
+ }) {
2229
+ return this.stores.workflows.updateWorkflowResults({ workflowName, runId, stepId, result, requestContext });
2230
+ }
2231
+ async updateWorkflowState({
2232
+ workflowName,
2233
+ runId,
2234
+ opts
2235
+ }) {
2236
+ return this.stores.workflows.updateWorkflowState({ workflowName, runId, opts });
2237
+ }
2238
+ async persistWorkflowSnapshot({
2239
+ workflowName,
2240
+ runId,
2241
+ resourceId,
2242
+ snapshot
2243
+ }) {
2244
+ return this.stores.workflows.persistWorkflowSnapshot({ workflowName, runId, resourceId, snapshot });
2245
+ }
2246
+ async loadWorkflowSnapshot({
2247
+ workflowName,
2248
+ runId
2249
+ }) {
2250
+ return this.stores.workflows.loadWorkflowSnapshot({ workflowName, runId });
2251
+ }
2252
+ async listWorkflowRuns(args = {}) {
2253
+ return this.stores.workflows.listWorkflowRuns(args);
2254
+ }
2255
+ async getWorkflowRunById({
2256
+ runId,
2257
+ workflowName
2258
+ }) {
2259
+ return this.stores.workflows.getWorkflowRunById({ runId, workflowName });
2260
+ }
2261
+ async getThreadById({ threadId }) {
2262
+ return this.stores.memory.getThreadById({ threadId });
2263
+ }
2264
+ async saveThread({ thread }) {
2265
+ return this.stores.memory.saveThread({ thread });
2266
+ }
2267
+ async updateThread({
2268
+ id,
2269
+ title,
2270
+ metadata
2271
+ }) {
2272
+ return this.stores.memory.updateThread({ id, title, metadata });
2273
+ }
2274
+ async deleteThread({ threadId }) {
2275
+ return this.stores.memory.deleteThread({ threadId });
2276
+ }
2277
+ async saveMessages(args) {
2278
+ return this.stores.memory.saveMessages(args);
2279
+ }
2280
+ async updateMessages(args) {
2281
+ return this.stores.memory.updateMessages(args);
2282
+ }
2283
+ async getResourceById({ resourceId }) {
2284
+ return this.stores.memory.getResourceById({ resourceId });
2285
+ }
2286
+ async saveResource({ resource }) {
2287
+ return this.stores.memory.saveResource({ resource });
2288
+ }
2289
+ async updateResource({
2290
+ resourceId,
2291
+ workingMemory,
2292
+ metadata
2293
+ }) {
2294
+ return this.stores.memory.updateResource({ resourceId, workingMemory, metadata });
2295
+ }
2296
+ async getScoreById({ id }) {
2297
+ return this.stores.scores.getScoreById({ id });
2298
+ }
2299
+ async saveScore(_score) {
2300
+ return this.stores.scores.saveScore(_score);
2301
+ }
2302
+ async listScoresByRunId({
2303
+ runId,
2304
+ pagination
2305
+ }) {
2306
+ return this.stores.scores.listScoresByRunId({ runId, pagination });
2307
+ }
2308
+ async listScoresByEntityId({
2309
+ entityId,
2310
+ entityType,
2311
+ pagination
2312
+ }) {
2313
+ return this.stores.scores.listScoresByEntityId({ entityId, entityType, pagination });
2314
+ }
2315
+ async listScoresByScorerId({
2316
+ scorerId,
2317
+ pagination,
2318
+ entityId,
2319
+ entityType,
2320
+ source
2321
+ }) {
2322
+ return this.stores.scores.listScoresByScorerId({ scorerId, pagination, entityId, entityType, source });
2323
+ }
2324
+ async listScoresBySpan({
2325
+ traceId,
2326
+ spanId,
2327
+ pagination
2328
+ }) {
2329
+ return this.stores.scores.listScoresBySpan({ traceId, spanId, pagination });
849
2330
  }
850
2331
  async close() {
851
2332
  await this.db.close();
@@ -855,3 +2336,5 @@ var ClickhouseStore = class extends storage.MastraStorage {
855
2336
  exports.COLUMN_TYPES = COLUMN_TYPES;
856
2337
  exports.ClickhouseStore = ClickhouseStore;
857
2338
  exports.TABLE_ENGINES = TABLE_ENGINES;
2339
+ //# sourceMappingURL=index.cjs.map
2340
+ //# sourceMappingURL=index.cjs.map