@mastra/clickhouse 0.2.7-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,633 @@
1
+ import { MastraStorage, TABLE_SCHEMAS, TABLE_TRACES, TABLE_WORKFLOW_SNAPSHOT, TABLE_THREADS, TABLE_MESSAGES, TABLE_EVALS } from '@mastra/core/storage';
2
+ import { createClient } from '@clickhouse/client';
3
+
4
+ // src/storage/index.ts
5
+ function safelyParseJSON(jsonString) {
6
+ try {
7
+ return JSON.parse(jsonString);
8
+ } catch {
9
+ return {};
10
+ }
11
+ }
12
+ var TABLE_ENGINES = {
13
+ [TABLE_MESSAGES]: `MergeTree()`,
14
+ [TABLE_WORKFLOW_SNAPSHOT]: `ReplacingMergeTree()`,
15
+ [TABLE_TRACES]: `MergeTree()`,
16
+ [TABLE_THREADS]: `ReplacingMergeTree()`,
17
+ [TABLE_EVALS]: `MergeTree()`
18
+ };
19
+ var COLUMN_TYPES = {
20
+ text: "String",
21
+ timestamp: "DateTime64(3)",
22
+ uuid: "String",
23
+ jsonb: "String",
24
+ integer: "Int64",
25
+ bigint: "Int64"
26
+ };
27
+ function transformRows(rows) {
28
+ return rows.map((row) => transformRow(row));
29
+ }
30
+ function transformRow(row) {
31
+ if (!row) {
32
+ return row;
33
+ }
34
+ if (row.createdAt) {
35
+ row.createdAt = new Date(row.createdAt);
36
+ }
37
+ if (row.updatedAt) {
38
+ row.updatedAt = new Date(row.updatedAt);
39
+ }
40
+ return row;
41
+ }
42
+ var ClickhouseStore = class extends MastraStorage {
43
+ db;
44
+ constructor(config) {
45
+ super({ name: "ClickhouseStore" });
46
+ this.db = createClient({
47
+ url: config.url,
48
+ username: config.username,
49
+ password: config.password,
50
+ clickhouse_settings: {
51
+ date_time_input_format: "best_effort",
52
+ date_time_output_format: "iso",
53
+ // This is crucial
54
+ use_client_time_zone: 1,
55
+ output_format_json_quote_64bit_integers: 0
56
+ }
57
+ });
58
+ }
59
+ getEvalsByAgentName(_agentName, _type) {
60
+ throw new Error("Method not implemented.");
61
+ }
62
+ async batchInsert({ tableName, records }) {
63
+ try {
64
+ await this.db.insert({
65
+ table: tableName,
66
+ values: records.map((record) => ({
67
+ ...Object.fromEntries(
68
+ Object.entries(record).map(([key, value]) => [
69
+ key,
70
+ TABLE_SCHEMAS[tableName]?.[key]?.type === "timestamp" ? new Date(value).toISOString() : value
71
+ ])
72
+ )
73
+ })),
74
+ format: "JSONEachRow",
75
+ clickhouse_settings: {
76
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
77
+ date_time_input_format: "best_effort",
78
+ use_client_time_zone: 1,
79
+ output_format_json_quote_64bit_integers: 0
80
+ }
81
+ });
82
+ } catch (error) {
83
+ console.error(`Error inserting into ${tableName}:`, error);
84
+ throw error;
85
+ }
86
+ }
87
+ async getTraces({
88
+ name,
89
+ scope,
90
+ page,
91
+ perPage,
92
+ attributes
93
+ }) {
94
+ const limit = perPage;
95
+ const offset = page * perPage;
96
+ const args = {};
97
+ const conditions = [];
98
+ if (name) {
99
+ conditions.push(`name LIKE CONCAT({var_name:String}, '%')`);
100
+ args.var_name = name;
101
+ }
102
+ if (scope) {
103
+ conditions.push(`scope = {var_scope:String}`);
104
+ args.var_scope = scope;
105
+ }
106
+ if (attributes) {
107
+ Object.entries(attributes).forEach(([key, value]) => {
108
+ conditions.push(`JSONExtractString(attributes, '${key}') = {var_${key}:String}`);
109
+ args[`var_${key}`] = value;
110
+ });
111
+ }
112
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
113
+ const result = await this.db.query({
114
+ query: `SELECT *, toDateTime64(createdAt, 3) as createdAt FROM ${TABLE_TRACES} ${whereClause} ORDER BY "createdAt" DESC LIMIT ${limit} OFFSET ${offset}`,
115
+ query_params: args,
116
+ clickhouse_settings: {
117
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
118
+ date_time_input_format: "best_effort",
119
+ date_time_output_format: "iso",
120
+ use_client_time_zone: 1,
121
+ output_format_json_quote_64bit_integers: 0
122
+ }
123
+ });
124
+ if (!result) {
125
+ return [];
126
+ }
127
+ const resp = await result.json();
128
+ const rows = resp.data;
129
+ return rows.map((row) => ({
130
+ id: row.id,
131
+ parentSpanId: row.parentSpanId,
132
+ traceId: row.traceId,
133
+ name: row.name,
134
+ scope: row.scope,
135
+ kind: row.kind,
136
+ status: safelyParseJSON(row.status),
137
+ events: safelyParseJSON(row.events),
138
+ links: safelyParseJSON(row.links),
139
+ attributes: safelyParseJSON(row.attributes),
140
+ startTime: row.startTime,
141
+ endTime: row.endTime,
142
+ other: safelyParseJSON(row.other),
143
+ createdAt: row.createdAt
144
+ }));
145
+ }
146
+ async createTable({
147
+ tableName,
148
+ schema
149
+ }) {
150
+ try {
151
+ const columns = Object.entries(schema).map(([name, def]) => {
152
+ const constraints = [];
153
+ if (!def.nullable) constraints.push("NOT NULL");
154
+ return `"${name}" ${COLUMN_TYPES[def.type]} ${constraints.join(" ")}`;
155
+ }).join(",\n");
156
+ const sql = tableName === TABLE_WORKFLOW_SNAPSHOT ? `
157
+ CREATE TABLE IF NOT EXISTS ${tableName} (
158
+ ${["id String"].concat(columns)}
159
+ )
160
+ ENGINE = ${TABLE_ENGINES[tableName]}
161
+ PARTITION BY "createdAt"
162
+ PRIMARY KEY (createdAt, run_id, workflow_name)
163
+ ORDER BY (createdAt, run_id, workflow_name)
164
+ SETTINGS index_granularity = 8192;
165
+ ` : `
166
+ CREATE TABLE IF NOT EXISTS ${tableName} (
167
+ ${columns}
168
+ )
169
+ ENGINE = ${TABLE_ENGINES[tableName]}
170
+ PARTITION BY "createdAt"
171
+ PRIMARY KEY (createdAt, id)
172
+ ORDER BY (createdAt, id)
173
+ SETTINGS index_granularity = 8192;
174
+ `;
175
+ await this.db.query({
176
+ query: sql,
177
+ clickhouse_settings: {
178
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
179
+ date_time_input_format: "best_effort",
180
+ date_time_output_format: "iso",
181
+ use_client_time_zone: 1,
182
+ output_format_json_quote_64bit_integers: 0
183
+ }
184
+ });
185
+ } catch (error) {
186
+ console.error(`Error creating table ${tableName}:`, error);
187
+ throw error;
188
+ }
189
+ }
190
+ async clearTable({ tableName }) {
191
+ try {
192
+ await this.db.query({
193
+ query: `TRUNCATE TABLE ${tableName}`,
194
+ clickhouse_settings: {
195
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
196
+ date_time_input_format: "best_effort",
197
+ date_time_output_format: "iso",
198
+ use_client_time_zone: 1,
199
+ output_format_json_quote_64bit_integers: 0
200
+ }
201
+ });
202
+ } catch (error) {
203
+ console.error(`Error clearing table ${tableName}:`, error);
204
+ throw error;
205
+ }
206
+ }
207
+ async insert({ tableName, record }) {
208
+ try {
209
+ await this.db.insert({
210
+ table: tableName,
211
+ values: [
212
+ {
213
+ ...record,
214
+ createdAt: record.createdAt.toISOString(),
215
+ updatedAt: record.updatedAt.toISOString()
216
+ }
217
+ ],
218
+ format: "JSONEachRow",
219
+ clickhouse_settings: {
220
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
221
+ output_format_json_quote_64bit_integers: 0,
222
+ date_time_input_format: "best_effort",
223
+ use_client_time_zone: 1
224
+ }
225
+ });
226
+ } catch (error) {
227
+ console.error(`Error inserting into ${tableName}:`, error);
228
+ throw error;
229
+ }
230
+ }
231
+ async load({ tableName, keys }) {
232
+ try {
233
+ const keyEntries = Object.entries(keys);
234
+ const conditions = keyEntries.map(
235
+ ([key], index) => `"${key}" = {var_${key}:${COLUMN_TYPES[TABLE_SCHEMAS[tableName]?.[key]?.type ?? "text"]}}`
236
+ ).join(" AND ");
237
+ const values = keyEntries.reduce((acc, [key, value]) => {
238
+ return { ...acc, [`var_${key}`]: value };
239
+ }, {});
240
+ const result = await this.db.query({
241
+ query: `SELECT *, toDateTime64(createdAt, 3) as createdAt, toDateTime64(updatedAt, 3) as updatedAt FROM ${tableName} ${TABLE_ENGINES[tableName].startsWith("ReplacingMergeTree") ? "FINAL" : ""} WHERE ${conditions}`,
242
+ query_params: values,
243
+ clickhouse_settings: {
244
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
245
+ date_time_input_format: "best_effort",
246
+ date_time_output_format: "iso",
247
+ use_client_time_zone: 1,
248
+ output_format_json_quote_64bit_integers: 0
249
+ }
250
+ });
251
+ if (!result) {
252
+ return null;
253
+ }
254
+ const rows = await result.json();
255
+ if (tableName === TABLE_WORKFLOW_SNAPSHOT) {
256
+ const snapshot = rows.data[0];
257
+ if (!snapshot) {
258
+ return null;
259
+ }
260
+ if (typeof snapshot.snapshot === "string") {
261
+ snapshot.snapshot = JSON.parse(snapshot.snapshot);
262
+ }
263
+ return transformRow(snapshot);
264
+ }
265
+ const data = transformRow(rows.data[0]);
266
+ return data;
267
+ } catch (error) {
268
+ console.error(`Error loading from ${tableName}:`, error);
269
+ throw error;
270
+ }
271
+ }
272
+ async getThreadById({ threadId }) {
273
+ try {
274
+ const result = await this.db.query({
275
+ query: `SELECT
276
+ id,
277
+ "resourceId",
278
+ title,
279
+ metadata,
280
+ toDateTime64(createdAt, 3) as createdAt,
281
+ toDateTime64(updatedAt, 3) as updatedAt
282
+ FROM "${TABLE_THREADS}"
283
+ FINAL
284
+ WHERE id = {var_id:String}`,
285
+ query_params: { var_id: threadId },
286
+ clickhouse_settings: {
287
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
288
+ date_time_input_format: "best_effort",
289
+ date_time_output_format: "iso",
290
+ use_client_time_zone: 1,
291
+ output_format_json_quote_64bit_integers: 0
292
+ }
293
+ });
294
+ const rows = await result.json();
295
+ const thread = transformRow(rows.data[0]);
296
+ if (!thread) {
297
+ return null;
298
+ }
299
+ return {
300
+ ...thread,
301
+ metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata,
302
+ createdAt: thread.createdAt,
303
+ updatedAt: thread.updatedAt
304
+ };
305
+ } catch (error) {
306
+ console.error(`Error getting thread ${threadId}:`, error);
307
+ throw error;
308
+ }
309
+ }
310
+ async getThreadsByResourceId({ resourceId }) {
311
+ try {
312
+ const result = await this.db.query({
313
+ query: `SELECT
314
+ id,
315
+ "resourceId",
316
+ title,
317
+ metadata,
318
+ toDateTime64(createdAt, 3) as createdAt,
319
+ toDateTime64(updatedAt, 3) as updatedAt
320
+ FROM "${TABLE_THREADS}"
321
+ WHERE "resourceId" = {var_resourceId:String}`,
322
+ query_params: { var_resourceId: resourceId },
323
+ clickhouse_settings: {
324
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
325
+ date_time_input_format: "best_effort",
326
+ date_time_output_format: "iso",
327
+ use_client_time_zone: 1,
328
+ output_format_json_quote_64bit_integers: 0
329
+ }
330
+ });
331
+ const rows = await result.json();
332
+ const threads = transformRows(rows.data);
333
+ return threads.map((thread) => ({
334
+ ...thread,
335
+ metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata,
336
+ createdAt: thread.createdAt,
337
+ updatedAt: thread.updatedAt
338
+ }));
339
+ } catch (error) {
340
+ console.error(`Error getting threads for resource ${resourceId}:`, error);
341
+ throw error;
342
+ }
343
+ }
344
+ async saveThread({ thread }) {
345
+ try {
346
+ await this.db.insert({
347
+ table: TABLE_THREADS,
348
+ values: [
349
+ {
350
+ ...thread,
351
+ createdAt: thread.createdAt.toISOString(),
352
+ updatedAt: thread.updatedAt.toISOString()
353
+ }
354
+ ],
355
+ format: "JSONEachRow",
356
+ clickhouse_settings: {
357
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
358
+ date_time_input_format: "best_effort",
359
+ use_client_time_zone: 1,
360
+ output_format_json_quote_64bit_integers: 0
361
+ }
362
+ });
363
+ return thread;
364
+ } catch (error) {
365
+ console.error("Error saving thread:", error);
366
+ throw error;
367
+ }
368
+ }
369
+ async updateThread({
370
+ id,
371
+ title,
372
+ metadata
373
+ }) {
374
+ try {
375
+ const existingThread = await this.getThreadById({ threadId: id });
376
+ if (!existingThread) {
377
+ throw new Error(`Thread ${id} not found`);
378
+ }
379
+ const mergedMetadata = {
380
+ ...existingThread.metadata,
381
+ ...metadata
382
+ };
383
+ const updatedThread = {
384
+ ...existingThread,
385
+ title,
386
+ metadata: mergedMetadata,
387
+ updatedAt: /* @__PURE__ */ new Date()
388
+ };
389
+ await this.db.insert({
390
+ table: TABLE_THREADS,
391
+ values: [
392
+ {
393
+ ...updatedThread,
394
+ updatedAt: updatedThread.updatedAt.toISOString()
395
+ }
396
+ ],
397
+ format: "JSONEachRow",
398
+ clickhouse_settings: {
399
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
400
+ date_time_input_format: "best_effort",
401
+ use_client_time_zone: 1,
402
+ output_format_json_quote_64bit_integers: 0
403
+ }
404
+ });
405
+ return updatedThread;
406
+ } catch (error) {
407
+ console.error("Error updating thread:", error);
408
+ throw error;
409
+ }
410
+ }
411
+ async deleteThread({ threadId }) {
412
+ try {
413
+ await this.db.command({
414
+ query: `DELETE FROM "${TABLE_MESSAGES}" WHERE thread_id = '${threadId}';`,
415
+ query_params: { var_thread_id: threadId },
416
+ clickhouse_settings: {
417
+ output_format_json_quote_64bit_integers: 0
418
+ }
419
+ });
420
+ await this.db.command({
421
+ query: `DELETE FROM "${TABLE_THREADS}" WHERE id = {var_id:String};`,
422
+ query_params: { var_id: threadId },
423
+ clickhouse_settings: {
424
+ output_format_json_quote_64bit_integers: 0
425
+ }
426
+ });
427
+ } catch (error) {
428
+ console.error("Error deleting thread:", error);
429
+ throw error;
430
+ }
431
+ }
432
+ async getMessages({ threadId, selectBy }) {
433
+ try {
434
+ const messages = [];
435
+ const limit = typeof selectBy?.last === `number` ? selectBy.last : 40;
436
+ const include = selectBy?.include || [];
437
+ if (include.length) {
438
+ const includeResult = await this.db.query({
439
+ query: `
440
+ WITH ordered_messages AS (
441
+ SELECT
442
+ *,
443
+ toDateTime64(createdAt, 3) as createdAt,
444
+ toDateTime64(updatedAt, 3) as updatedAt,
445
+ ROW_NUMBER() OVER (ORDER BY "createdAt" DESC) as row_num
446
+ FROM "${TABLE_MESSAGES}"
447
+ WHERE thread_id = {var_thread_id:String}
448
+ )
449
+ SELECT
450
+ m.id AS id,
451
+ m.content as content,
452
+ m.role as role,
453
+ m.type as type,
454
+ m.createdAt as createdAt,
455
+ m.updatedAt as updatedAt,
456
+ m.thread_id AS "threadId"
457
+ FROM ordered_messages m
458
+ WHERE m.id = ANY({var_include:Array(String)})
459
+ OR EXISTS (
460
+ SELECT 1 FROM ordered_messages target
461
+ WHERE target.id = ANY({var_include:Array(String)})
462
+ AND (
463
+ -- Get previous messages based on the max withPreviousMessages
464
+ (m.row_num <= target.row_num + {var_withPreviousMessages:Int64} AND m.row_num > target.row_num)
465
+ OR
466
+ -- Get next messages based on the max withNextMessages
467
+ (m.row_num >= target.row_num - {var_withNextMessages:Int64} AND m.row_num < target.row_num)
468
+ )
469
+ )
470
+ ORDER BY m."createdAt" DESC
471
+ `,
472
+ query_params: {
473
+ var_thread_id: threadId,
474
+ var_include: include.map((i) => i.id),
475
+ var_withPreviousMessages: Math.max(...include.map((i) => i.withPreviousMessages || 0)),
476
+ var_withNextMessages: Math.max(...include.map((i) => i.withNextMessages || 0))
477
+ },
478
+ clickhouse_settings: {
479
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
480
+ date_time_input_format: "best_effort",
481
+ date_time_output_format: "iso",
482
+ use_client_time_zone: 1,
483
+ output_format_json_quote_64bit_integers: 0
484
+ }
485
+ });
486
+ const rows2 = await includeResult.json();
487
+ messages.push(...transformRows(rows2.data));
488
+ }
489
+ const result = await this.db.query({
490
+ query: `
491
+ SELECT
492
+ id,
493
+ content,
494
+ role,
495
+ type,
496
+ toDateTime64(createdAt, 3) as createdAt,
497
+ thread_id AS "threadId"
498
+ FROM "${TABLE_MESSAGES}"
499
+ WHERE thread_id = {threadId:String}
500
+ AND id NOT IN ({exclude:Array(String)})
501
+ ORDER BY "createdAt" DESC
502
+ LIMIT {limit:Int64}
503
+ `,
504
+ query_params: {
505
+ threadId,
506
+ exclude: messages.map((m) => m.id),
507
+ limit
508
+ },
509
+ clickhouse_settings: {
510
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
511
+ date_time_input_format: "best_effort",
512
+ date_time_output_format: "iso",
513
+ use_client_time_zone: 1,
514
+ output_format_json_quote_64bit_integers: 0
515
+ }
516
+ });
517
+ const rows = await result.json();
518
+ messages.push(...transformRows(rows.data));
519
+ messages.sort((a, b) => new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime());
520
+ messages.forEach((message) => {
521
+ if (typeof message.content === "string") {
522
+ try {
523
+ message.content = JSON.parse(message.content);
524
+ } catch {
525
+ }
526
+ }
527
+ });
528
+ return messages;
529
+ } catch (error) {
530
+ console.error("Error getting messages:", error);
531
+ throw error;
532
+ }
533
+ }
534
+ async saveMessages({ messages }) {
535
+ if (messages.length === 0) return messages;
536
+ try {
537
+ const threadId = messages[0]?.threadId;
538
+ if (!threadId) {
539
+ throw new Error("Thread ID is required");
540
+ }
541
+ const thread = await this.getThreadById({ threadId });
542
+ if (!thread) {
543
+ throw new Error(`Thread ${threadId} not found`);
544
+ }
545
+ await this.db.insert({
546
+ table: TABLE_MESSAGES,
547
+ format: "JSONEachRow",
548
+ values: messages.map((message) => ({
549
+ id: message.id,
550
+ thread_id: threadId,
551
+ content: typeof message.content === "string" ? message.content : JSON.stringify(message.content),
552
+ createdAt: message.createdAt.toISOString(),
553
+ role: message.role,
554
+ type: message.type
555
+ })),
556
+ clickhouse_settings: {
557
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
558
+ date_time_input_format: "best_effort",
559
+ use_client_time_zone: 1,
560
+ output_format_json_quote_64bit_integers: 0
561
+ }
562
+ });
563
+ return messages;
564
+ } catch (error) {
565
+ console.error("Error saving messages:", error);
566
+ throw error;
567
+ }
568
+ }
569
+ async persistWorkflowSnapshot({
570
+ workflowName,
571
+ runId,
572
+ snapshot
573
+ }) {
574
+ try {
575
+ const currentSnapshot = await this.load({
576
+ tableName: TABLE_WORKFLOW_SNAPSHOT,
577
+ keys: { workflow_name: workflowName, run_id: runId }
578
+ });
579
+ const now = /* @__PURE__ */ new Date();
580
+ const persisting = currentSnapshot ? {
581
+ ...currentSnapshot,
582
+ snapshot: JSON.stringify(snapshot),
583
+ updatedAt: now.toISOString()
584
+ } : {
585
+ workflow_name: workflowName,
586
+ run_id: runId,
587
+ snapshot: JSON.stringify(snapshot),
588
+ createdAt: now.toISOString(),
589
+ updatedAt: now.toISOString()
590
+ };
591
+ await this.db.insert({
592
+ table: TABLE_WORKFLOW_SNAPSHOT,
593
+ format: "JSONEachRow",
594
+ values: [persisting],
595
+ clickhouse_settings: {
596
+ // Allows to insert serialized JS Dates (such as '2023-12-06T10:54:48.000Z')
597
+ date_time_input_format: "best_effort",
598
+ use_client_time_zone: 1,
599
+ output_format_json_quote_64bit_integers: 0
600
+ }
601
+ });
602
+ } catch (error) {
603
+ console.error("Error persisting workflow snapshot:", error);
604
+ throw error;
605
+ }
606
+ }
607
+ async loadWorkflowSnapshot({
608
+ workflowName,
609
+ runId
610
+ }) {
611
+ try {
612
+ const result = await this.load({
613
+ tableName: TABLE_WORKFLOW_SNAPSHOT,
614
+ keys: {
615
+ workflow_name: workflowName,
616
+ run_id: runId
617
+ }
618
+ });
619
+ if (!result) {
620
+ return null;
621
+ }
622
+ return result.snapshot;
623
+ } catch (error) {
624
+ console.error("Error loading workflow snapshot:", error);
625
+ throw error;
626
+ }
627
+ }
628
+ async close() {
629
+ await this.db.close();
630
+ }
631
+ };
632
+
633
+ export { ClickhouseStore };
@@ -0,0 +1,15 @@
1
+ services:
2
+ db:
3
+ image: clickhouse/clickhouse-server:latest
4
+ container_name: 'clickhouse-test-db'
5
+ ports:
6
+ - '8123:8123'
7
+ - '9000:9000'
8
+ environment:
9
+ CLICKHOUSE_DEFAULT_ACCESS_MANAGEMENT: 1
10
+ CLICKHOUSE_USER: ${CLICKHOUSE_USER:-default}
11
+ CLICKHOUSE_PASSWORD: ${CLICKHOUSE_PASSWORD:-password}
12
+ volumes:
13
+ - clickhouse_data:/var/lib/clickhouse
14
+ volumes:
15
+ clickhouse_data:
@@ -0,0 +1,6 @@
1
+ import { createConfig } from '@internal/lint/eslint';
2
+
3
+ const config = await createConfig();
4
+
5
+ /** @type {import("eslint").Linter.Config[]} */
6
+ export default [...config.map(conf => ({ ...conf, ignores: [...(conf.ignores || []), '**/vitest.perf.config.ts'] }))];
package/package.json ADDED
@@ -0,0 +1,45 @@
1
+ {
2
+ "name": "@mastra/clickhouse",
3
+ "version": "0.2.7-alpha.1",
4
+ "description": "Clickhouse provider for Mastra - includes db storage capabilities",
5
+ "type": "module",
6
+ "main": "dist/index.js",
7
+ "types": "dist/index.d.ts",
8
+ "exports": {
9
+ ".": {
10
+ "import": {
11
+ "types": "./dist/index.d.ts",
12
+ "default": "./dist/index.js"
13
+ },
14
+ "require": {
15
+ "types": "./dist/index.d.cts",
16
+ "default": "./dist/index.cjs"
17
+ }
18
+ },
19
+ "./package.json": "./package.json"
20
+ },
21
+ "dependencies": {
22
+ "@clickhouse/client": "^1.11.0",
23
+ "@mastra/core": "^0.8.0-alpha.2"
24
+ },
25
+ "devDependencies": {
26
+ "@microsoft/api-extractor": "^7.52.1",
27
+ "@types/node": "^20.17.27",
28
+ "eslint": "^9.23.0",
29
+ "tsup": "^8.4.0",
30
+ "typescript": "^5.8.2",
31
+ "vitest": "^3.0.9",
32
+ "@internal/lint": "0.0.1"
33
+ },
34
+ "scripts": {
35
+ "build": "tsup src/index.ts --format esm,cjs --experimental-dts --clean --treeshake=smallest --splitting",
36
+ "build:watch": "pnpm build --watch",
37
+ "pretest": "docker compose up -d && (for i in $(seq 1 30); do docker compose exec -T db clickhouse-client --query 'SELECT 1' && break || (sleep 1; [ $i -eq 30 ] && exit 1); done)",
38
+ "test": "vitest run",
39
+ "posttest": "docker compose down -v",
40
+ "pretest:watch": "docker compose up -d",
41
+ "test:watch": "vitest watch",
42
+ "posttest:watch": "docker compose down -v",
43
+ "lint": "eslint ."
44
+ }
45
+ }
package/src/index.ts ADDED
@@ -0,0 +1 @@
1
+ export * from './storage';