@mastra/clickhouse 0.0.0-vnext-20251104230439 → 0.0.0-vnext-20251119160359

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,6 +1,6 @@
1
1
  import { createClient } from '@clickhouse/client';
2
2
  import { MastraError, ErrorCategory, ErrorDomain } from '@mastra/core/error';
3
- import { TABLE_AI_SPANS, TABLE_RESOURCES, TABLE_SCORERS, TABLE_THREADS, TABLE_TRACES, TABLE_WORKFLOW_SNAPSHOT, TABLE_MESSAGES, MastraStorage, StoreOperations, TABLE_SCHEMAS, WorkflowsStorage, normalizePerPage, ScoresStorage, safelyParseJSON, calculatePagination, MemoryStorage, resolveMessageLimit } from '@mastra/core/storage';
3
+ import { TABLE_SPANS, TABLE_RESOURCES, TABLE_SCORERS, TABLE_THREADS, TABLE_TRACES, TABLE_WORKFLOW_SNAPSHOT, TABLE_MESSAGES, MastraStorage, StoreOperations, TABLE_SCHEMAS, WorkflowsStorage, normalizePerPage, ScoresStorage, safelyParseJSON, calculatePagination, MemoryStorage } from '@mastra/core/storage';
4
4
  import { MessageList } from '@mastra/core/agent';
5
5
  import { saveScorePayloadSchema } from '@mastra/core/evals';
6
6
 
@@ -12,8 +12,8 @@ var TABLE_ENGINES = {
12
12
  [TABLE_THREADS]: `ReplacingMergeTree()`,
13
13
  [TABLE_SCORERS]: `MergeTree()`,
14
14
  [TABLE_RESOURCES]: `ReplacingMergeTree()`,
15
- // TODO: verify this is the correct engine for ai spans when implementing clickhouse storage
16
- [TABLE_AI_SPANS]: `ReplacingMergeTree()`
15
+ // TODO: verify this is the correct engine for Spans when implementing clickhouse storage
16
+ [TABLE_SPANS]: `ReplacingMergeTree()`
17
17
  };
18
18
  var COLUMN_TYPES = {
19
19
  text: "String",
@@ -45,6 +45,24 @@ function transformRows(rows) {
45
45
  }
46
46
 
47
47
  // src/storage/domains/memory/index.ts
48
+ function serializeMetadata(metadata) {
49
+ if (!metadata || Object.keys(metadata).length === 0) {
50
+ return "{}";
51
+ }
52
+ return JSON.stringify(metadata);
53
+ }
54
+ function parseMetadata(metadata) {
55
+ if (!metadata) return {};
56
+ if (typeof metadata === "object") return metadata;
57
+ if (typeof metadata !== "string") return {};
58
+ const trimmed = metadata.trim();
59
+ if (trimmed === "" || trimmed === "null") return {};
60
+ try {
61
+ return JSON.parse(trimmed);
62
+ } catch {
63
+ return {};
64
+ }
65
+ }
48
66
  var MemoryStorageClickhouse = class extends MemoryStorage {
49
67
  client;
50
68
  operations;
@@ -53,135 +71,6 @@ var MemoryStorageClickhouse = class extends MemoryStorage {
53
71
  this.client = client;
54
72
  this.operations = operations;
55
73
  }
56
- async getMessages({
57
- threadId,
58
- resourceId,
59
- selectBy
60
- }) {
61
- try {
62
- if (!threadId.trim()) throw new Error("threadId must be a non-empty string");
63
- const messages = [];
64
- const limit = resolveMessageLimit({ last: selectBy?.last, defaultLimit: 40 });
65
- const include = selectBy?.include || [];
66
- if (include.length) {
67
- const unionQueries = [];
68
- const params = [];
69
- let paramIdx = 1;
70
- for (const inc of include) {
71
- const { id, withPreviousMessages = 0, withNextMessages = 0 } = inc;
72
- const searchId = inc.threadId || threadId;
73
- unionQueries.push(`
74
- SELECT * FROM (
75
- WITH numbered_messages AS (
76
- SELECT
77
- id, content, role, type, "createdAt", thread_id, "resourceId",
78
- ROW_NUMBER() OVER (ORDER BY "createdAt" ASC) as row_num
79
- FROM "${TABLE_MESSAGES}"
80
- WHERE thread_id = {var_thread_id_${paramIdx}:String}
81
- ),
82
- target_positions AS (
83
- SELECT row_num as target_pos
84
- FROM numbered_messages
85
- WHERE id = {var_include_id_${paramIdx}:String}
86
- )
87
- SELECT DISTINCT m.id, m.content, m.role, m.type, m."createdAt", m.thread_id AS "threadId"
88
- FROM numbered_messages m
89
- CROSS JOIN target_positions t
90
- WHERE m.row_num BETWEEN (t.target_pos - {var_withPreviousMessages_${paramIdx}:Int64}) AND (t.target_pos + {var_withNextMessages_${paramIdx}:Int64})
91
- ) AS query_${paramIdx}
92
- `);
93
- params.push(
94
- { [`var_thread_id_${paramIdx}`]: searchId },
95
- { [`var_include_id_${paramIdx}`]: id },
96
- { [`var_withPreviousMessages_${paramIdx}`]: withPreviousMessages },
97
- { [`var_withNextMessages_${paramIdx}`]: withNextMessages }
98
- );
99
- paramIdx++;
100
- }
101
- const finalQuery = unionQueries.join(" UNION ALL ") + ' ORDER BY "createdAt" DESC';
102
- const mergedParams = params.reduce((acc, paramObj) => ({ ...acc, ...paramObj }), {});
103
- const includeResult = await this.client.query({
104
- query: finalQuery,
105
- query_params: mergedParams,
106
- clickhouse_settings: {
107
- date_time_input_format: "best_effort",
108
- date_time_output_format: "iso",
109
- use_client_time_zone: 1,
110
- output_format_json_quote_64bit_integers: 0
111
- }
112
- });
113
- const rows2 = await includeResult.json();
114
- const includedMessages = transformRows(rows2.data);
115
- const seen = /* @__PURE__ */ new Set();
116
- const dedupedMessages = includedMessages.filter((message) => {
117
- if (seen.has(message.id)) return false;
118
- seen.add(message.id);
119
- return true;
120
- });
121
- messages.push(...dedupedMessages);
122
- }
123
- let whereClause = "WHERE thread_id = {threadId:String}";
124
- const queryParams = {
125
- threadId,
126
- exclude: messages.map((m) => m.id),
127
- limit
128
- };
129
- if (resourceId) {
130
- whereClause += ' AND "resourceId" = {resourceId:String}';
131
- queryParams.resourceId = resourceId;
132
- }
133
- const result = await this.client.query({
134
- query: `
135
- SELECT
136
- id,
137
- content,
138
- role,
139
- type,
140
- toDateTime64(createdAt, 3) as createdAt,
141
- thread_id AS "threadId"
142
- FROM "${TABLE_MESSAGES}"
143
- ${whereClause}
144
- AND id NOT IN ({exclude:Array(String)})
145
- ORDER BY "createdAt" DESC
146
- LIMIT {limit:Int64}
147
- `,
148
- query_params: queryParams,
149
- clickhouse_settings: {
150
- // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
151
- date_time_input_format: "best_effort",
152
- date_time_output_format: "iso",
153
- use_client_time_zone: 1,
154
- output_format_json_quote_64bit_integers: 0
155
- }
156
- });
157
- const rows = await result.json();
158
- messages.push(...transformRows(rows.data));
159
- messages.sort((a, b) => new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime());
160
- messages.forEach((message) => {
161
- if (typeof message.content === "string") {
162
- try {
163
- message.content = JSON.parse(message.content);
164
- } catch {
165
- }
166
- }
167
- });
168
- const list = new MessageList({ threadId, resourceId }).add(
169
- messages,
170
- "memory"
171
- );
172
- return { messages: list.get.all.db() };
173
- } catch (error) {
174
- throw new MastraError(
175
- {
176
- id: "CLICKHOUSE_STORAGE_GET_MESSAGES_FAILED",
177
- domain: ErrorDomain.STORAGE,
178
- category: ErrorCategory.THIRD_PARTY,
179
- details: { threadId, resourceId: resourceId ?? "" }
180
- },
181
- error
182
- );
183
- }
184
- }
185
74
  async listMessagesById({ messageIds }) {
186
75
  if (messageIds.length === 0) return { messages: [] };
187
76
  try {
@@ -288,7 +177,7 @@ var MemoryStorageClickhouse = class extends MemoryStorage {
288
177
  dataQuery += ` AND createdAt <= parseDateTime64BestEffort({toDate:String}, 3)`;
289
178
  dataParams.toDate = endDate;
290
179
  }
291
- const { field, direction } = this.parseOrderBy(orderBy);
180
+ const { field, direction } = this.parseOrderBy(orderBy, "ASC");
292
181
  dataQuery += ` ORDER BY "${field}" ${direction}`;
293
182
  if (perPageForResponse === false) ; else {
294
183
  dataQuery += ` LIMIT {limit:Int64} OFFSET {offset:Int64}`;
@@ -577,7 +466,7 @@ var MemoryStorageClickhouse = class extends MemoryStorage {
577
466
  id: thread.id,
578
467
  resourceId: thread.resourceId,
579
468
  title: thread.title,
580
- metadata: thread.metadata,
469
+ metadata: serializeMetadata(thread.metadata),
581
470
  createdAt: thread.createdAt,
582
471
  updatedAt: (/* @__PURE__ */ new Date()).toISOString()
583
472
  })),
@@ -612,8 +501,9 @@ var MemoryStorageClickhouse = class extends MemoryStorage {
612
501
  toDateTime64(createdAt, 3) as createdAt,
613
502
  toDateTime64(updatedAt, 3) as updatedAt
614
503
  FROM "${TABLE_THREADS}"
615
- FINAL
616
- WHERE id = {var_id:String}`,
504
+ WHERE id = {var_id:String}
505
+ ORDER BY updatedAt DESC
506
+ LIMIT 1`,
617
507
  query_params: { var_id: threadId },
618
508
  clickhouse_settings: {
619
509
  // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
@@ -630,7 +520,7 @@ var MemoryStorageClickhouse = class extends MemoryStorage {
630
520
  }
631
521
  return {
632
522
  ...thread,
633
- metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata,
523
+ metadata: parseMetadata(thread.metadata),
634
524
  createdAt: thread.createdAt,
635
525
  updatedAt: thread.updatedAt
636
526
  };
@@ -653,6 +543,7 @@ var MemoryStorageClickhouse = class extends MemoryStorage {
653
543
  values: [
654
544
  {
655
545
  ...thread,
546
+ metadata: serializeMetadata(thread.metadata),
656
547
  createdAt: thread.createdAt.toISOString(),
657
548
  updatedAt: thread.updatedAt.toISOString()
658
549
  }
@@ -706,7 +597,7 @@ var MemoryStorageClickhouse = class extends MemoryStorage {
706
597
  id: updatedThread.id,
707
598
  resourceId: updatedThread.resourceId,
708
599
  title: updatedThread.title,
709
- metadata: updatedThread.metadata,
600
+ metadata: serializeMetadata(updatedThread.metadata),
710
601
  createdAt: updatedThread.createdAt,
711
602
  updatedAt: updatedThread.updatedAt.toISOString()
712
603
  }
@@ -776,7 +667,7 @@ var MemoryStorageClickhouse = class extends MemoryStorage {
776
667
  const { field, direction } = this.parseOrderBy(orderBy);
777
668
  try {
778
669
  const countResult = await this.client.query({
779
- query: `SELECT count() as total FROM ${TABLE_THREADS} WHERE resourceId = {resourceId:String}`,
670
+ query: `SELECT count(DISTINCT id) as total FROM ${TABLE_THREADS} WHERE resourceId = {resourceId:String}`,
780
671
  query_params: { resourceId },
781
672
  clickhouse_settings: {
782
673
  date_time_input_format: "best_effort",
@@ -798,15 +689,27 @@ var MemoryStorageClickhouse = class extends MemoryStorage {
798
689
  }
799
690
  const dataResult = await this.client.query({
800
691
  query: `
692
+ WITH ranked_threads AS (
693
+ SELECT
694
+ id,
695
+ resourceId,
696
+ title,
697
+ metadata,
698
+ toDateTime64(createdAt, 3) as createdAt,
699
+ toDateTime64(updatedAt, 3) as updatedAt,
700
+ ROW_NUMBER() OVER (PARTITION BY id ORDER BY updatedAt DESC) as row_num
701
+ FROM ${TABLE_THREADS}
702
+ WHERE resourceId = {resourceId:String}
703
+ )
801
704
  SELECT
802
705
  id,
803
706
  resourceId,
804
707
  title,
805
708
  metadata,
806
- toDateTime64(createdAt, 3) as createdAt,
807
- toDateTime64(updatedAt, 3) as updatedAt
808
- FROM ${TABLE_THREADS}
809
- WHERE resourceId = {resourceId:String}
709
+ createdAt,
710
+ updatedAt
711
+ FROM ranked_threads
712
+ WHERE row_num = 1
810
713
  ORDER BY "${field}" ${direction === "DESC" ? "DESC" : "ASC"}
811
714
  LIMIT {perPage:Int64} OFFSET {offset:Int64}
812
715
  `,
@@ -823,7 +726,10 @@ var MemoryStorageClickhouse = class extends MemoryStorage {
823
726
  }
824
727
  });
825
728
  const rows = await dataResult.json();
826
- const threads = transformRows(rows.data);
729
+ const threads = transformRows(rows.data).map((thread) => ({
730
+ ...thread,
731
+ metadata: parseMetadata(thread.metadata)
732
+ }));
827
733
  return {
828
734
  threads,
829
735
  total,
@@ -1040,7 +946,7 @@ var MemoryStorageClickhouse = class extends MemoryStorage {
1040
946
  const now = (/* @__PURE__ */ new Date()).toISOString().replace("Z", "");
1041
947
  const threadUpdatePromises = Array.from(threadIdsToUpdate).map(async (threadId) => {
1042
948
  const threadResult = await this.client.query({
1043
- query: `SELECT id, resourceId, title, metadata, createdAt FROM ${TABLE_THREADS} WHERE id = {threadId:String}`,
949
+ query: `SELECT id, resourceId, title, metadata, createdAt FROM ${TABLE_THREADS} WHERE id = {threadId:String} ORDER BY updatedAt DESC LIMIT 1`,
1044
950
  query_params: { threadId },
1045
951
  clickhouse_settings: {
1046
952
  date_time_input_format: "best_effort",
@@ -1069,7 +975,7 @@ var MemoryStorageClickhouse = class extends MemoryStorage {
1069
975
  id: existingThread.id,
1070
976
  resourceId: existingThread.resourceId,
1071
977
  title: existingThread.title,
1072
- metadata: existingThread.metadata,
978
+ metadata: typeof existingThread.metadata === "string" ? existingThread.metadata : serializeMetadata(existingThread.metadata),
1073
979
  createdAt: existingThread.createdAt,
1074
980
  updatedAt: now
1075
981
  }
@@ -1128,7 +1034,7 @@ var MemoryStorageClickhouse = class extends MemoryStorage {
1128
1034
  async getResourceById({ resourceId }) {
1129
1035
  try {
1130
1036
  const result = await this.client.query({
1131
- query: `SELECT id, workingMemory, metadata, createdAt, updatedAt FROM ${TABLE_RESOURCES} WHERE id = {resourceId:String}`,
1037
+ query: `SELECT id, workingMemory, metadata, createdAt, updatedAt FROM ${TABLE_RESOURCES} WHERE id = {resourceId:String} ORDER BY updatedAt DESC LIMIT 1`,
1132
1038
  query_params: { resourceId },
1133
1039
  clickhouse_settings: {
1134
1040
  date_time_input_format: "best_effort",
@@ -1300,6 +1206,9 @@ var StoreOperationsClickhouse = class extends StoreOperations {
1300
1206
  const columns = Object.entries(schema).map(([name, def]) => {
1301
1207
  const constraints = [];
1302
1208
  if (!def.nullable) constraints.push("NOT NULL");
1209
+ if (name === "metadata" && def.type === "text" && def.nullable) {
1210
+ constraints.push("DEFAULT '{}'");
1211
+ }
1303
1212
  const columnTtl = this.ttl?.[tableName]?.columns?.[name];
1304
1213
  return `"${name}" ${COLUMN_TYPES[def.type]} ${constraints.join(" ")} ${columnTtl ? `TTL toDateTime(${columnTtl.ttlKey ?? "createdAt"}) + INTERVAL ${columnTtl.interval} ${columnTtl.unit}` : ""}`;
1305
1214
  }).join(",\n");
@@ -2071,7 +1980,8 @@ var WorkflowsStorageClickhouse = class extends WorkflowsStorage {
2071
1980
  toDate,
2072
1981
  page,
2073
1982
  perPage,
2074
- resourceId
1983
+ resourceId,
1984
+ status
2075
1985
  } = {}) {
2076
1986
  try {
2077
1987
  const conditions = [];
@@ -2080,6 +1990,10 @@ var WorkflowsStorageClickhouse = class extends WorkflowsStorage {
2080
1990
  conditions.push(`workflow_name = {var_workflow_name:String}`);
2081
1991
  values.var_workflow_name = workflowName;
2082
1992
  }
1993
+ if (status) {
1994
+ conditions.push(`JSONExtractString(snapshot, 'status') = {var_status:String}`);
1995
+ values.var_status = status;
1996
+ }
2083
1997
  if (resourceId) {
2084
1998
  const hasResourceId = await this.operations.hasColumn(TABLE_WORKFLOW_SNAPSHOT, "resourceId");
2085
1999
  if (hasResourceId) {
@@ -2206,7 +2120,7 @@ var ClickhouseStore = class extends MastraStorage {
2206
2120
  ttl = {};
2207
2121
  stores;
2208
2122
  constructor(config) {
2209
- super({ name: "ClickhouseStore" });
2123
+ super({ id: config.id, name: "ClickhouseStore" });
2210
2124
  this.db = createClient({
2211
2125
  url: config.url,
2212
2126
  username: config.username,
@@ -2333,15 +2247,8 @@ var ClickhouseStore = class extends MastraStorage {
2333
2247
  }) {
2334
2248
  return this.stores.workflows.loadWorkflowSnapshot({ workflowName, runId });
2335
2249
  }
2336
- async listWorkflowRuns({
2337
- workflowName,
2338
- fromDate,
2339
- toDate,
2340
- perPage,
2341
- page,
2342
- resourceId
2343
- } = {}) {
2344
- return this.stores.workflows.listWorkflowRuns({ workflowName, fromDate, toDate, perPage, page, resourceId });
2250
+ async listWorkflowRuns(args = {}) {
2251
+ return this.stores.workflows.listWorkflowRuns(args);
2345
2252
  }
2346
2253
  async getWorkflowRunById({
2347
2254
  runId,
@@ -2365,13 +2272,6 @@ var ClickhouseStore = class extends MastraStorage {
2365
2272
  async deleteThread({ threadId }) {
2366
2273
  return this.stores.memory.deleteThread({ threadId });
2367
2274
  }
2368
- async getMessages({
2369
- threadId,
2370
- resourceId,
2371
- selectBy
2372
- }) {
2373
- return this.stores.memory.getMessages({ threadId, resourceId, selectBy });
2374
- }
2375
2275
  async saveMessages(args) {
2376
2276
  return this.stores.memory.saveMessages(args);
2377
2277
  }