@mastra/libsql 0.0.1-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs ADDED
@@ -0,0 +1,1143 @@
1
+ 'use strict';
2
+
3
+ var client = require('@libsql/client');
4
+ var vector = require('@mastra/core/vector');
5
+ var filter = require('@mastra/core/vector/filter');
6
+ var storage = require('@mastra/core/storage');
7
+
8
+ // src/vector/index.ts
9
+ var LibSQLFilterTranslator = class extends filter.BaseFilterTranslator {
10
+ getSupportedOperators() {
11
+ return {
12
+ ...filter.BaseFilterTranslator.DEFAULT_OPERATORS,
13
+ regex: [],
14
+ custom: ["$contains", "$size"]
15
+ };
16
+ }
17
+ translate(filter) {
18
+ if (this.isEmpty(filter)) {
19
+ return filter;
20
+ }
21
+ this.validateFilter(filter);
22
+ return this.translateNode(filter);
23
+ }
24
+ translateNode(node, currentPath = "") {
25
+ if (this.isRegex(node)) {
26
+ throw new Error("Direct regex pattern format is not supported in LibSQL");
27
+ }
28
+ const withPath = (result2) => currentPath ? { [currentPath]: result2 } : result2;
29
+ if (this.isPrimitive(node)) {
30
+ return withPath({ $eq: this.normalizeComparisonValue(node) });
31
+ }
32
+ if (Array.isArray(node)) {
33
+ return withPath({ $in: this.normalizeArrayValues(node) });
34
+ }
35
+ const entries = Object.entries(node);
36
+ const result = {};
37
+ for (const [key, value] of entries) {
38
+ const newPath = currentPath ? `${currentPath}.${key}` : key;
39
+ if (this.isLogicalOperator(key)) {
40
+ result[key] = Array.isArray(value) ? value.map((filter) => this.translateNode(filter)) : this.translateNode(value);
41
+ } else if (this.isOperator(key)) {
42
+ if (this.isArrayOperator(key) && !Array.isArray(value) && key !== "$elemMatch") {
43
+ result[key] = [value];
44
+ } else if (this.isBasicOperator(key) && Array.isArray(value)) {
45
+ result[key] = JSON.stringify(value);
46
+ } else {
47
+ result[key] = value;
48
+ }
49
+ } else if (typeof value === "object" && value !== null) {
50
+ const hasOperators = Object.keys(value).some((k) => this.isOperator(k));
51
+ if (hasOperators) {
52
+ result[newPath] = this.translateNode(value);
53
+ } else {
54
+ Object.assign(result, this.translateNode(value, newPath));
55
+ }
56
+ } else {
57
+ result[newPath] = this.translateNode(value);
58
+ }
59
+ }
60
+ return result;
61
+ }
62
+ // TODO: Look more into regex support for LibSQL
63
+ // private translateRegexPattern(pattern: string, options: string = ''): any {
64
+ // if (!options) return { $regex: pattern };
65
+ // const flags = options
66
+ // .split('')
67
+ // .filter(f => 'imsux'.includes(f))
68
+ // .join('');
69
+ // return {
70
+ // $regex: pattern,
71
+ // $options: flags,
72
+ // };
73
+ // }
74
+ };
75
+
76
+ // src/vector/sql-builder.ts
77
+ var createBasicOperator = (symbol) => {
78
+ return (key) => ({
79
+ sql: `CASE
80
+ WHEN ? IS NULL THEN json_extract(metadata, '$."${handleKey(key)}"') IS ${symbol === "=" ? "" : "NOT"} NULL
81
+ ELSE json_extract(metadata, '$."${handleKey(key)}"') ${symbol} ?
82
+ END`,
83
+ needsValue: true,
84
+ transformValue: (value) => {
85
+ return [value, value];
86
+ }
87
+ });
88
+ };
89
+ var createNumericOperator = (symbol) => {
90
+ return (key) => ({
91
+ sql: `CAST(json_extract(metadata, '$."${handleKey(key)}"') AS NUMERIC) ${symbol} ?`,
92
+ needsValue: true
93
+ });
94
+ };
95
+ var validateJsonArray = (key) => `json_valid(json_extract(metadata, '$."${handleKey(key)}"'))
96
+ AND json_type(json_extract(metadata, '$."${handleKey(key)}"')) = 'array'`;
97
+ var FILTER_OPERATORS = {
98
+ $eq: createBasicOperator("="),
99
+ $ne: createBasicOperator("!="),
100
+ $gt: createNumericOperator(">"),
101
+ $gte: createNumericOperator(">="),
102
+ $lt: createNumericOperator("<"),
103
+ $lte: createNumericOperator("<="),
104
+ // Array Operators
105
+ $in: (key, value) => ({
106
+ sql: `json_extract(metadata, '$."${handleKey(key)}"') IN (${value.map(() => "?").join(",")})`,
107
+ needsValue: true
108
+ }),
109
+ $nin: (key, value) => ({
110
+ sql: `json_extract(metadata, '$."${handleKey(key)}"') NOT IN (${value.map(() => "?").join(",")})`,
111
+ needsValue: true
112
+ }),
113
+ $all: (key) => ({
114
+ sql: `json_extract(metadata, '$."${handleKey(key)}"') = ?`,
115
+ needsValue: true,
116
+ transformValue: (value) => {
117
+ const arrayValue = Array.isArray(value) ? value : [value];
118
+ if (arrayValue.length === 0) {
119
+ return {
120
+ sql: "1 = 0",
121
+ values: []
122
+ };
123
+ }
124
+ return {
125
+ sql: `(
126
+ CASE
127
+ WHEN ${validateJsonArray(key)} THEN
128
+ NOT EXISTS (
129
+ SELECT value
130
+ FROM json_each(?)
131
+ WHERE value NOT IN (
132
+ SELECT value
133
+ FROM json_each(json_extract(metadata, '$."${handleKey(key)}"'))
134
+ )
135
+ )
136
+ ELSE FALSE
137
+ END
138
+ )`,
139
+ values: [JSON.stringify(arrayValue)]
140
+ };
141
+ }
142
+ }),
143
+ $elemMatch: (key) => ({
144
+ sql: `json_extract(metadata, '$."${handleKey(key)}"') = ?`,
145
+ needsValue: true,
146
+ transformValue: (value) => {
147
+ if (typeof value !== "object" || Array.isArray(value)) {
148
+ throw new Error("$elemMatch requires an object with conditions");
149
+ }
150
+ const conditions = Object.entries(value).map(([field, fieldValue]) => {
151
+ if (field.startsWith("$")) {
152
+ const { sql, values } = buildCondition("elem.value", { [field]: fieldValue });
153
+ const pattern = /json_extract\(metadata, '\$\."[^"]*"(\."[^"]*")*'\)/g;
154
+ const elemSql = sql.replace(pattern, "elem.value");
155
+ return { sql: elemSql, values };
156
+ } else if (typeof fieldValue === "object" && !Array.isArray(fieldValue)) {
157
+ const { sql, values } = buildCondition(field, fieldValue);
158
+ const pattern = /json_extract\(metadata, '\$\."[^"]*"(\."[^"]*")*'\)/g;
159
+ const elemSql = sql.replace(pattern, `json_extract(elem.value, '$."${field}"')`);
160
+ return { sql: elemSql, values };
161
+ } else {
162
+ return {
163
+ sql: `json_extract(elem.value, '$."${field}"') = ?`,
164
+ values: [fieldValue]
165
+ };
166
+ }
167
+ });
168
+ return {
169
+ sql: `(
170
+ CASE
171
+ WHEN ${validateJsonArray(key)} THEN
172
+ EXISTS (
173
+ SELECT 1
174
+ FROM json_each(json_extract(metadata, '$."${handleKey(key)}"')) as elem
175
+ WHERE ${conditions.map((c) => c.sql).join(" AND ")}
176
+ )
177
+ ELSE FALSE
178
+ END
179
+ )`,
180
+ values: conditions.flatMap((c) => c.values)
181
+ };
182
+ }
183
+ }),
184
+ // Element Operators
185
+ $exists: (key) => ({
186
+ sql: `json_extract(metadata, '$."${handleKey(key)}"') IS NOT NULL`,
187
+ needsValue: false
188
+ }),
189
+ // Logical Operators
190
+ $and: (key) => ({
191
+ sql: `(${key})`,
192
+ needsValue: false
193
+ }),
194
+ $or: (key) => ({
195
+ sql: `(${key})`,
196
+ needsValue: false
197
+ }),
198
+ $not: (key) => ({ sql: `NOT (${key})`, needsValue: false }),
199
+ $nor: (key) => ({
200
+ sql: `NOT (${key})`,
201
+ needsValue: false
202
+ }),
203
+ $size: (key, paramIndex) => ({
204
+ sql: `(
205
+ CASE
206
+ WHEN json_type(json_extract(metadata, '$."${handleKey(key)}"')) = 'array' THEN
207
+ json_array_length(json_extract(metadata, '$."${handleKey(key)}"')) = $${paramIndex}
208
+ ELSE FALSE
209
+ END
210
+ )`,
211
+ needsValue: true
212
+ }),
213
+ // /**
214
+ // * Regex Operators
215
+ // * Supports case insensitive and multiline
216
+ // */
217
+ // $regex: (key: string): FilterOperator => ({
218
+ // sql: `json_extract(metadata, '$."${handleKey(key)}"') = ?`,
219
+ // needsValue: true,
220
+ // transformValue: (value: any) => {
221
+ // const pattern = typeof value === 'object' ? value.$regex : value;
222
+ // const options = typeof value === 'object' ? value.$options || '' : '';
223
+ // let sql = `json_extract(metadata, '$."${handleKey(key)}"')`;
224
+ // // Handle multiline
225
+ // // if (options.includes('m')) {
226
+ // // sql = `REPLACE(${sql}, CHAR(10), '\n')`;
227
+ // // }
228
+ // // let finalPattern = pattern;
229
+ // // if (options) {
230
+ // // finalPattern = `(\\?${options})${pattern}`;
231
+ // // }
232
+ // // // Handle case insensitivity
233
+ // // if (options.includes('i')) {
234
+ // // sql = `LOWER(${sql}) REGEXP LOWER(?)`;
235
+ // // } else {
236
+ // // sql = `${sql} REGEXP ?`;
237
+ // // }
238
+ // if (options.includes('m')) {
239
+ // sql = `EXISTS (
240
+ // SELECT 1
241
+ // FROM json_each(
242
+ // json_array(
243
+ // ${sql},
244
+ // REPLACE(${sql}, CHAR(10), CHAR(13))
245
+ // )
246
+ // ) as lines
247
+ // WHERE lines.value REGEXP ?
248
+ // )`;
249
+ // } else {
250
+ // sql = `${sql} REGEXP ?`;
251
+ // }
252
+ // // Handle case insensitivity
253
+ // if (options.includes('i')) {
254
+ // sql = sql.replace('REGEXP ?', 'REGEXP LOWER(?)');
255
+ // sql = sql.replace('value REGEXP', 'LOWER(value) REGEXP');
256
+ // }
257
+ // // Handle extended - allows whitespace and comments in pattern
258
+ // if (options.includes('x')) {
259
+ // // Remove whitespace and comments from pattern
260
+ // const cleanPattern = pattern.replace(/\s+|#.*$/gm, '');
261
+ // return {
262
+ // sql,
263
+ // values: [cleanPattern],
264
+ // };
265
+ // }
266
+ // return {
267
+ // sql,
268
+ // values: [pattern],
269
+ // };
270
+ // },
271
+ // }),
272
+ $contains: (key) => ({
273
+ sql: `json_extract(metadata, '$."${handleKey(key)}"') = ?`,
274
+ needsValue: true,
275
+ transformValue: (value) => {
276
+ if (Array.isArray(value)) {
277
+ return {
278
+ sql: `(
279
+ SELECT ${validateJsonArray(key)}
280
+ AND EXISTS (
281
+ SELECT 1
282
+ FROM json_each(json_extract(metadata, '$."${handleKey(key)}"')) as m
283
+ WHERE m.value IN (SELECT value FROM json_each(?))
284
+ )
285
+ )`,
286
+ values: [JSON.stringify(value)]
287
+ };
288
+ }
289
+ if (value && typeof value === "object") {
290
+ let traverse2 = function(obj, path = []) {
291
+ for (const [k, v] of Object.entries(obj)) {
292
+ const currentPath = [...path, k];
293
+ if (v && typeof v === "object" && !Array.isArray(v)) {
294
+ traverse2(v, currentPath);
295
+ } else {
296
+ paths.push(currentPath.join("."));
297
+ values.push(v);
298
+ }
299
+ }
300
+ };
301
+ const paths = [];
302
+ const values = [];
303
+ traverse2(value);
304
+ return {
305
+ sql: `(${paths.map((path) => `json_extract(metadata, '$."${handleKey(key)}"."${path}"') = ?`).join(" AND ")})`,
306
+ values
307
+ };
308
+ }
309
+ return value;
310
+ }
311
+ })
312
+ };
313
+ var handleKey = (key) => {
314
+ return key.replace(/\./g, '"."');
315
+ };
316
+ function buildFilterQuery(filter) {
317
+ if (!filter) {
318
+ return { sql: "", values: [] };
319
+ }
320
+ const values = [];
321
+ const conditions = Object.entries(filter).map(([key, value]) => {
322
+ const condition = buildCondition(key, value);
323
+ values.push(...condition.values);
324
+ return condition.sql;
325
+ }).join(" AND ");
326
+ return {
327
+ sql: conditions ? `WHERE ${conditions}` : "",
328
+ values
329
+ };
330
+ }
331
+ function buildCondition(key, value, parentPath) {
332
+ if (["$and", "$or", "$not", "$nor"].includes(key)) {
333
+ return handleLogicalOperator(key, value);
334
+ }
335
+ if (!value || typeof value !== "object") {
336
+ return {
337
+ sql: `json_extract(metadata, '$."${key.replace(/\./g, '"."')}"') = ?`,
338
+ values: [value]
339
+ };
340
+ }
341
+ return handleOperator(key, value);
342
+ }
343
+ function handleLogicalOperator(key, value, parentPath) {
344
+ if (!value || value.length === 0) {
345
+ switch (key) {
346
+ case "$and":
347
+ case "$nor":
348
+ return { sql: "true", values: [] };
349
+ case "$or":
350
+ return { sql: "false", values: [] };
351
+ case "$not":
352
+ throw new Error("$not operator cannot be empty");
353
+ default:
354
+ return { sql: "true", values: [] };
355
+ }
356
+ }
357
+ if (key === "$not") {
358
+ const entries = Object.entries(value);
359
+ const conditions2 = entries.map(([fieldKey, fieldValue]) => buildCondition(fieldKey, fieldValue));
360
+ return {
361
+ sql: `NOT (${conditions2.map((c) => c.sql).join(" AND ")})`,
362
+ values: conditions2.flatMap((c) => c.values)
363
+ };
364
+ }
365
+ const values = [];
366
+ const joinOperator = key === "$or" || key === "$nor" ? "OR" : "AND";
367
+ const conditions = Array.isArray(value) ? value.map((f) => {
368
+ const entries = Object.entries(f);
369
+ return entries.map(([k, v]) => buildCondition(k, v));
370
+ }) : [buildCondition(key, value)];
371
+ const joined = conditions.flat().map((c) => {
372
+ values.push(...c.values);
373
+ return c.sql;
374
+ }).join(` ${joinOperator} `);
375
+ return {
376
+ sql: key === "$nor" ? `NOT (${joined})` : `(${joined})`,
377
+ values
378
+ };
379
+ }
380
+ function handleOperator(key, value) {
381
+ if (typeof value === "object" && !Array.isArray(value)) {
382
+ const entries = Object.entries(value);
383
+ const results = entries.map(
384
+ ([operator2, operatorValue2]) => operator2 === "$not" ? {
385
+ sql: `NOT (${Object.entries(operatorValue2).map(([op, val]) => processOperator(key, op, val).sql).join(" AND ")})`,
386
+ values: Object.entries(operatorValue2).flatMap(
387
+ ([op, val]) => processOperator(key, op, val).values
388
+ )
389
+ } : processOperator(key, operator2, operatorValue2)
390
+ );
391
+ return {
392
+ sql: `(${results.map((r) => r.sql).join(" AND ")})`,
393
+ values: results.flatMap((r) => r.values)
394
+ };
395
+ }
396
+ const [[operator, operatorValue] = []] = Object.entries(value);
397
+ return processOperator(key, operator, operatorValue);
398
+ }
399
+ var processOperator = (key, operator, operatorValue) => {
400
+ if (!operator.startsWith("$") || !FILTER_OPERATORS[operator]) {
401
+ throw new Error(`Invalid operator: ${operator}`);
402
+ }
403
+ const operatorFn = FILTER_OPERATORS[operator];
404
+ const operatorResult = operatorFn(key, operatorValue);
405
+ if (!operatorResult.needsValue) {
406
+ return { sql: operatorResult.sql, values: [] };
407
+ }
408
+ const transformed = operatorResult.transformValue ? operatorResult.transformValue(operatorValue) : operatorValue;
409
+ if (transformed && typeof transformed === "object" && "sql" in transformed) {
410
+ return transformed;
411
+ }
412
+ return {
413
+ sql: operatorResult.sql,
414
+ values: Array.isArray(transformed) ? transformed : [transformed]
415
+ };
416
+ };
417
+
418
+ // src/vector/index.ts
419
+ var LibSQLVector = class extends vector.MastraVector {
420
+ turso;
421
+ constructor({
422
+ connectionUrl,
423
+ authToken,
424
+ syncUrl,
425
+ syncInterval
426
+ }) {
427
+ super();
428
+ this.turso = client.createClient({
429
+ url: connectionUrl,
430
+ syncUrl,
431
+ authToken,
432
+ syncInterval
433
+ });
434
+ if (connectionUrl.includes(`file:`) || connectionUrl.includes(`:memory:`)) {
435
+ void this.turso.execute({
436
+ sql: "PRAGMA journal_mode=WAL;",
437
+ args: {}
438
+ });
439
+ }
440
+ }
441
+ transformFilter(filter) {
442
+ const translator = new LibSQLFilterTranslator();
443
+ return translator.translate(filter);
444
+ }
445
+ async query(...args) {
446
+ const params = this.normalizeArgs("query", args, ["minScore"]);
447
+ try {
448
+ const { indexName, queryVector, topK = 10, filter, includeVector = false, minScore = 0 } = params;
449
+ const vectorStr = `[${queryVector.join(",")}]`;
450
+ const translatedFilter = this.transformFilter(filter);
451
+ const { sql: filterQuery, values: filterValues } = buildFilterQuery(translatedFilter);
452
+ filterValues.push(minScore);
453
+ const query = `
454
+ WITH vector_scores AS (
455
+ SELECT
456
+ vector_id as id,
457
+ (1-vector_distance_cos(embedding, '${vectorStr}')) as score,
458
+ metadata
459
+ ${includeVector ? ", vector_extract(embedding) as embedding" : ""}
460
+ FROM ${indexName}
461
+ ${filterQuery}
462
+ )
463
+ SELECT *
464
+ FROM vector_scores
465
+ WHERE score > ?
466
+ ORDER BY score DESC
467
+ LIMIT ${topK}`;
468
+ const result = await this.turso.execute({
469
+ sql: query,
470
+ args: filterValues
471
+ });
472
+ return result.rows.map(({ id, score, metadata, embedding }) => ({
473
+ id,
474
+ score,
475
+ metadata: JSON.parse(metadata ?? "{}"),
476
+ ...includeVector && embedding && { vector: JSON.parse(embedding) }
477
+ }));
478
+ } finally {
479
+ }
480
+ }
481
+ async upsert(...args) {
482
+ const params = this.normalizeArgs("upsert", args);
483
+ const { indexName, vectors, metadata, ids } = params;
484
+ const tx = await this.turso.transaction("write");
485
+ try {
486
+ const vectorIds = ids || vectors.map(() => crypto.randomUUID());
487
+ for (let i = 0; i < vectors.length; i++) {
488
+ const query = `
489
+ INSERT INTO ${indexName} (vector_id, embedding, metadata)
490
+ VALUES (?, vector32(?), ?)
491
+ ON CONFLICT(vector_id) DO UPDATE SET
492
+ embedding = vector32(?),
493
+ metadata = ?
494
+ `;
495
+ await tx.execute({
496
+ sql: query,
497
+ // @ts-ignore
498
+ args: [
499
+ vectorIds[i],
500
+ JSON.stringify(vectors[i]),
501
+ JSON.stringify(metadata?.[i] || {}),
502
+ JSON.stringify(vectors[i]),
503
+ JSON.stringify(metadata?.[i] || {})
504
+ ]
505
+ });
506
+ }
507
+ await tx.commit();
508
+ return vectorIds;
509
+ } catch (error) {
510
+ await tx.rollback();
511
+ if (error instanceof Error && error.message?.includes("dimensions are different")) {
512
+ const match = error.message.match(/dimensions are different: (\d+) != (\d+)/);
513
+ if (match) {
514
+ const [, actual, expected] = match;
515
+ throw new Error(
516
+ `Vector dimension mismatch: Index "${indexName}" expects ${expected} dimensions but got ${actual} dimensions. Either use a matching embedding model or delete and recreate the index with the new dimension.`
517
+ );
518
+ }
519
+ }
520
+ throw error;
521
+ }
522
+ }
523
+ async createIndex(...args) {
524
+ const params = this.normalizeArgs("createIndex", args);
525
+ const { indexName, dimension } = params;
526
+ try {
527
+ if (!indexName.match(/^[a-zA-Z_][a-zA-Z0-9_]*$/)) {
528
+ throw new Error("Invalid index name format");
529
+ }
530
+ if (!Number.isInteger(dimension) || dimension <= 0) {
531
+ throw new Error("Dimension must be a positive integer");
532
+ }
533
+ await this.turso.execute({
534
+ sql: `
535
+ CREATE TABLE IF NOT EXISTS ${indexName} (
536
+ id SERIAL PRIMARY KEY,
537
+ vector_id TEXT UNIQUE NOT NULL,
538
+ embedding F32_BLOB(${dimension}),
539
+ metadata TEXT DEFAULT '{}'
540
+ );
541
+ `,
542
+ args: []
543
+ });
544
+ await this.turso.execute({
545
+ sql: `
546
+ CREATE INDEX IF NOT EXISTS ${indexName}_vector_idx
547
+ ON ${indexName} (libsql_vector_idx(embedding))
548
+ `,
549
+ args: []
550
+ });
551
+ } catch (error) {
552
+ console.error("Failed to create vector table:", error);
553
+ throw error;
554
+ } finally {
555
+ }
556
+ }
557
+ async deleteIndex(indexName) {
558
+ try {
559
+ await this.turso.execute({
560
+ sql: `DROP TABLE IF EXISTS ${indexName}`,
561
+ args: []
562
+ });
563
+ } catch (error) {
564
+ console.error("Failed to delete vector table:", error);
565
+ throw new Error(`Failed to delete vector table: ${error.message}`);
566
+ } finally {
567
+ }
568
+ }
569
+ async listIndexes() {
570
+ try {
571
+ const vectorTablesQuery = `
572
+ SELECT name FROM sqlite_master
573
+ WHERE type='table'
574
+ AND sql LIKE '%F32_BLOB%';
575
+ `;
576
+ const result = await this.turso.execute({
577
+ sql: vectorTablesQuery,
578
+ args: []
579
+ });
580
+ return result.rows.map((row) => row.name);
581
+ } catch (error) {
582
+ throw new Error(`Failed to list vector tables: ${error.message}`);
583
+ }
584
+ }
585
+ async describeIndex(indexName) {
586
+ try {
587
+ const tableInfoQuery = `
588
+ SELECT sql
589
+ FROM sqlite_master
590
+ WHERE type='table'
591
+ AND name = ?;
592
+ `;
593
+ const tableInfo = await this.turso.execute({
594
+ sql: tableInfoQuery,
595
+ args: [indexName]
596
+ });
597
+ if (!tableInfo.rows[0]?.sql) {
598
+ throw new Error(`Table ${indexName} not found`);
599
+ }
600
+ const dimension = parseInt(tableInfo.rows[0].sql.match(/F32_BLOB\((\d+)\)/)?.[1] || "0");
601
+ const countQuery = `
602
+ SELECT COUNT(*) as count
603
+ FROM ${indexName};
604
+ `;
605
+ const countResult = await this.turso.execute({
606
+ sql: countQuery,
607
+ args: []
608
+ });
609
+ const metric = "cosine";
610
+ return {
611
+ dimension,
612
+ count: countResult?.rows?.[0]?.count ?? 0,
613
+ metric
614
+ };
615
+ } catch (e) {
616
+ throw new Error(`Failed to describe vector table: ${e.message}`);
617
+ }
618
+ }
619
+ /**
620
+ * Updates an index entry by its ID with the provided vector and/or metadata.
621
+ *
622
+ * @param indexName - The name of the index to update.
623
+ * @param id - The ID of the index entry to update.
624
+ * @param update - An object containing the vector and/or metadata to update.
625
+ * @param update.vector - An optional array of numbers representing the new vector.
626
+ * @param update.metadata - An optional record containing the new metadata.
627
+ * @returns A promise that resolves when the update is complete.
628
+ * @throws Will throw an error if no updates are provided or if the update operation fails.
629
+ */
630
+ async updateIndexById(indexName, id, update) {
631
+ try {
632
+ const updates = [];
633
+ const args = [];
634
+ if (update.vector) {
635
+ updates.push("embedding = vector32(?)");
636
+ args.push(JSON.stringify(update.vector));
637
+ }
638
+ if (update.metadata) {
639
+ updates.push("metadata = ?");
640
+ args.push(JSON.stringify(update.metadata));
641
+ }
642
+ if (updates.length === 0) {
643
+ throw new Error("No updates provided");
644
+ }
645
+ args.push(id);
646
+ const query = `
647
+ UPDATE ${indexName}
648
+ SET ${updates.join(", ")}
649
+ WHERE vector_id = ?;
650
+ `;
651
+ await this.turso.execute({
652
+ sql: query,
653
+ args
654
+ });
655
+ } catch (error) {
656
+ throw new Error(`Failed to update index by id: ${id} for index: ${indexName}: ${error.message}`);
657
+ }
658
+ }
659
+ async deleteIndexById(indexName, id) {
660
+ try {
661
+ await this.turso.execute({
662
+ sql: `DELETE FROM ${indexName} WHERE vector_id = ?`,
663
+ args: [id]
664
+ });
665
+ } catch (error) {
666
+ throw new Error(`Failed to delete index by id: ${id} for index: ${indexName}: ${error.message}`);
667
+ }
668
+ }
669
+ async truncateIndex(indexName) {
670
+ await this.turso.execute({
671
+ sql: `DELETE FROM ${indexName}`,
672
+ args: []
673
+ });
674
+ }
675
+ };
676
+ function safelyParseJSON(jsonString) {
677
+ try {
678
+ return JSON.parse(jsonString);
679
+ } catch {
680
+ return {};
681
+ }
682
+ }
683
+ var LibSQLStore = class extends storage.MastraStorage {
684
+ client;
685
+ constructor(config) {
686
+ super({ name: `LibSQLStore` });
687
+ if (config.url.endsWith(":memory:")) {
688
+ this.shouldCacheInit = false;
689
+ }
690
+ this.client = client.createClient(config);
691
+ }
692
+ getCreateTableSQL(tableName, schema) {
693
+ const columns = Object.entries(schema).map(([name, col]) => {
694
+ let type = col.type.toUpperCase();
695
+ if (type === "TEXT") type = "TEXT";
696
+ if (type === "TIMESTAMP") type = "TEXT";
697
+ const nullable = col.nullable ? "" : "NOT NULL";
698
+ const primaryKey = col.primaryKey ? "PRIMARY KEY" : "";
699
+ return `${name} ${type} ${nullable} ${primaryKey}`.trim();
700
+ });
701
+ if (tableName === storage.TABLE_WORKFLOW_SNAPSHOT) {
702
+ const stmnt = `CREATE TABLE IF NOT EXISTS ${tableName} (
703
+ ${columns.join(",\n")},
704
+ PRIMARY KEY (workflow_name, run_id)
705
+ )`;
706
+ return stmnt;
707
+ }
708
+ return `CREATE TABLE IF NOT EXISTS ${tableName} (${columns.join(", ")})`;
709
+ }
710
+ async createTable({
711
+ tableName,
712
+ schema
713
+ }) {
714
+ try {
715
+ this.logger.debug(`Creating database table`, { tableName, operation: "schema init" });
716
+ const sql = this.getCreateTableSQL(tableName, schema);
717
+ await this.client.execute(sql);
718
+ } catch (error) {
719
+ this.logger.error(`Error creating table ${tableName}: ${error}`);
720
+ throw error;
721
+ }
722
+ }
723
+ async clearTable({ tableName }) {
724
+ try {
725
+ await this.client.execute(`DELETE FROM ${tableName}`);
726
+ } catch (e) {
727
+ if (e instanceof Error) {
728
+ this.logger.error(e.message);
729
+ }
730
+ }
731
+ }
732
+ prepareStatement({ tableName, record }) {
733
+ const columns = Object.keys(record);
734
+ const values = Object.values(record).map((v) => {
735
+ if (typeof v === `undefined`) {
736
+ return null;
737
+ }
738
+ if (v instanceof Date) {
739
+ return v.toISOString();
740
+ }
741
+ return typeof v === "object" ? JSON.stringify(v) : v;
742
+ });
743
+ const placeholders = values.map(() => "?").join(", ");
744
+ return {
745
+ sql: `INSERT OR REPLACE INTO ${tableName} (${columns.join(", ")}) VALUES (${placeholders})`,
746
+ args: values
747
+ };
748
+ }
749
+ async insert({ tableName, record }) {
750
+ try {
751
+ await this.client.execute(
752
+ this.prepareStatement({
753
+ tableName,
754
+ record
755
+ })
756
+ );
757
+ } catch (error) {
758
+ this.logger.error(`Error upserting into table ${tableName}: ${error}`);
759
+ throw error;
760
+ }
761
+ }
762
+ async batchInsert({ tableName, records }) {
763
+ if (records.length === 0) return;
764
+ try {
765
+ const batchStatements = records.map((r) => this.prepareStatement({ tableName, record: r }));
766
+ await this.client.batch(batchStatements, "write");
767
+ } catch (error) {
768
+ this.logger.error(`Error upserting into table ${tableName}: ${error}`);
769
+ throw error;
770
+ }
771
+ }
772
+ async load({ tableName, keys }) {
773
+ const conditions = Object.entries(keys).map(([key]) => `${key} = ?`).join(" AND ");
774
+ const values = Object.values(keys);
775
+ const result = await this.client.execute({
776
+ sql: `SELECT * FROM ${tableName} WHERE ${conditions} ORDER BY createdAt DESC LIMIT 1`,
777
+ args: values
778
+ });
779
+ if (!result.rows || result.rows.length === 0) {
780
+ return null;
781
+ }
782
+ const row = result.rows[0];
783
+ const parsed = Object.fromEntries(
784
+ Object.entries(row || {}).map(([k, v]) => {
785
+ try {
786
+ return [k, typeof v === "string" ? v.startsWith("{") || v.startsWith("[") ? JSON.parse(v) : v : v];
787
+ } catch {
788
+ return [k, v];
789
+ }
790
+ })
791
+ );
792
+ return parsed;
793
+ }
794
+ async getThreadById({ threadId }) {
795
+ const result = await this.load({
796
+ tableName: storage.TABLE_THREADS,
797
+ keys: { id: threadId }
798
+ });
799
+ if (!result) {
800
+ return null;
801
+ }
802
+ return {
803
+ ...result,
804
+ metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata
805
+ };
806
+ }
807
+ async getThreadsByResourceId({ resourceId }) {
808
+ const result = await this.client.execute({
809
+ sql: `SELECT * FROM ${storage.TABLE_THREADS} WHERE resourceId = ?`,
810
+ args: [resourceId]
811
+ });
812
+ if (!result.rows) {
813
+ return [];
814
+ }
815
+ return result.rows.map((thread) => ({
816
+ id: thread.id,
817
+ resourceId: thread.resourceId,
818
+ title: thread.title,
819
+ createdAt: thread.createdAt,
820
+ updatedAt: thread.updatedAt,
821
+ metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata
822
+ }));
823
+ }
824
+ async saveThread({ thread }) {
825
+ await this.insert({
826
+ tableName: storage.TABLE_THREADS,
827
+ record: {
828
+ ...thread,
829
+ metadata: JSON.stringify(thread.metadata)
830
+ }
831
+ });
832
+ return thread;
833
+ }
834
+ async updateThread({
835
+ id,
836
+ title,
837
+ metadata
838
+ }) {
839
+ const thread = await this.getThreadById({ threadId: id });
840
+ if (!thread) {
841
+ throw new Error(`Thread ${id} not found`);
842
+ }
843
+ const updatedThread = {
844
+ ...thread,
845
+ title,
846
+ metadata: {
847
+ ...thread.metadata,
848
+ ...metadata
849
+ }
850
+ };
851
+ await this.client.execute({
852
+ sql: `UPDATE ${storage.TABLE_THREADS} SET title = ?, metadata = ? WHERE id = ?`,
853
+ args: [title, JSON.stringify(updatedThread.metadata), id]
854
+ });
855
+ return updatedThread;
856
+ }
857
+ async deleteThread({ threadId }) {
858
+ await this.client.execute({
859
+ sql: `DELETE FROM ${storage.TABLE_THREADS} WHERE id = ?`,
860
+ args: [threadId]
861
+ });
862
+ }
863
+ parseRow(row) {
864
+ let content = row.content;
865
+ try {
866
+ content = JSON.parse(row.content);
867
+ } catch {
868
+ }
869
+ return {
870
+ id: row.id,
871
+ content,
872
+ role: row.role,
873
+ type: row.type,
874
+ createdAt: new Date(row.createdAt),
875
+ threadId: row.thread_id
876
+ };
877
+ }
878
+ async getMessages({ threadId, selectBy }) {
879
+ try {
880
+ const messages = [];
881
+ const limit = typeof selectBy?.last === `number` ? selectBy.last : 40;
882
+ if (selectBy?.include?.length) {
883
+ const includeIds = selectBy.include.map((i) => i.id);
884
+ const maxPrev = Math.max(...selectBy.include.map((i) => i.withPreviousMessages || 0));
885
+ const maxNext = Math.max(...selectBy.include.map((i) => i.withNextMessages || 0));
886
+ const includeResult = await this.client.execute({
887
+ sql: `
888
+ WITH numbered_messages AS (
889
+ SELECT
890
+ id,
891
+ content,
892
+ role,
893
+ type,
894
+ "createdAt",
895
+ thread_id,
896
+ ROW_NUMBER() OVER (ORDER BY "createdAt" ASC) as row_num
897
+ FROM "${storage.TABLE_MESSAGES}"
898
+ WHERE thread_id = ?
899
+ ),
900
+ target_positions AS (
901
+ SELECT row_num as target_pos
902
+ FROM numbered_messages
903
+ WHERE id IN (${includeIds.map(() => "?").join(", ")})
904
+ )
905
+ SELECT DISTINCT m.*
906
+ FROM numbered_messages m
907
+ CROSS JOIN target_positions t
908
+ WHERE m.row_num BETWEEN (t.target_pos - ?) AND (t.target_pos + ?)
909
+ ORDER BY m."createdAt" ASC
910
+ `,
911
+ args: [threadId, ...includeIds, maxPrev, maxNext]
912
+ });
913
+ if (includeResult.rows) {
914
+ messages.push(...includeResult.rows.map((row) => this.parseRow(row)));
915
+ }
916
+ }
917
+ const excludeIds = messages.map((m) => m.id);
918
+ const remainingSql = `
919
+ SELECT
920
+ id,
921
+ content,
922
+ role,
923
+ type,
924
+ "createdAt",
925
+ thread_id
926
+ FROM "${storage.TABLE_MESSAGES}"
927
+ WHERE thread_id = ?
928
+ ${excludeIds.length ? `AND id NOT IN (${excludeIds.map(() => "?").join(", ")})` : ""}
929
+ ORDER BY "createdAt" DESC
930
+ LIMIT ?
931
+ `;
932
+ const remainingArgs = [threadId, ...excludeIds.length ? excludeIds : [], limit];
933
+ const remainingResult = await this.client.execute({
934
+ sql: remainingSql,
935
+ args: remainingArgs
936
+ });
937
+ if (remainingResult.rows) {
938
+ messages.push(...remainingResult.rows.map((row) => this.parseRow(row)));
939
+ }
940
+ messages.sort((a, b) => a.createdAt.getTime() - b.createdAt.getTime());
941
+ return messages;
942
+ } catch (error) {
943
+ this.logger.error("Error getting messages:", error);
944
+ throw error;
945
+ }
946
+ }
947
+ async saveMessages({ messages }) {
948
+ if (messages.length === 0) return messages;
949
+ try {
950
+ const threadId = messages[0]?.threadId;
951
+ if (!threadId) {
952
+ throw new Error("Thread ID is required");
953
+ }
954
+ const batchStatements = messages.map((message) => {
955
+ const time = message.createdAt || /* @__PURE__ */ new Date();
956
+ return {
957
+ sql: `INSERT INTO ${storage.TABLE_MESSAGES} (id, thread_id, content, role, type, createdAt)
958
+ VALUES (?, ?, ?, ?, ?, ?)`,
959
+ args: [
960
+ message.id,
961
+ threadId,
962
+ typeof message.content === "object" ? JSON.stringify(message.content) : message.content,
963
+ message.role,
964
+ message.type,
965
+ time instanceof Date ? time.toISOString() : time
966
+ ]
967
+ };
968
+ });
969
+ await this.client.batch(batchStatements, "write");
970
+ return messages;
971
+ } catch (error) {
972
+ this.logger.error("Failed to save messages in database: " + error?.message);
973
+ throw error;
974
+ }
975
+ }
976
+ transformEvalRow(row) {
977
+ const resultValue = JSON.parse(row.result);
978
+ const testInfoValue = row.test_info ? JSON.parse(row.test_info) : void 0;
979
+ if (!resultValue || typeof resultValue !== "object" || !("score" in resultValue)) {
980
+ throw new Error(`Invalid MetricResult format: ${JSON.stringify(resultValue)}`);
981
+ }
982
+ return {
983
+ input: row.input,
984
+ output: row.output,
985
+ result: resultValue,
986
+ agentName: row.agent_name,
987
+ metricName: row.metric_name,
988
+ instructions: row.instructions,
989
+ testInfo: testInfoValue,
990
+ globalRunId: row.global_run_id,
991
+ runId: row.run_id,
992
+ createdAt: row.created_at
993
+ };
994
+ }
995
+ async getEvalsByAgentName(agentName, type) {
996
+ try {
997
+ const baseQuery = `SELECT * FROM ${storage.TABLE_EVALS} WHERE agent_name = ?`;
998
+ const typeCondition = type === "test" ? " AND test_info IS NOT NULL AND test_info->>'testPath' IS NOT NULL" : type === "live" ? " AND (test_info IS NULL OR test_info->>'testPath' IS NULL)" : "";
999
+ const result = await this.client.execute({
1000
+ sql: `${baseQuery}${typeCondition} ORDER BY created_at DESC`,
1001
+ args: [agentName]
1002
+ });
1003
+ return result.rows?.map((row) => this.transformEvalRow(row)) ?? [];
1004
+ } catch (error) {
1005
+ if (error instanceof Error && error.message.includes("no such table")) {
1006
+ return [];
1007
+ }
1008
+ this.logger.error("Failed to get evals for the specified agent: " + error?.message);
1009
+ throw error;
1010
+ }
1011
+ }
1012
+ // TODO: add types
1013
+ async getTraces({
1014
+ name,
1015
+ scope,
1016
+ page,
1017
+ perPage,
1018
+ attributes,
1019
+ filters
1020
+ } = {
1021
+ page: 0,
1022
+ perPage: 100
1023
+ }) {
1024
+ const limit = perPage;
1025
+ const offset = page * perPage;
1026
+ const args = [];
1027
+ const conditions = [];
1028
+ if (name) {
1029
+ conditions.push("name LIKE CONCAT(?, '%')");
1030
+ }
1031
+ if (scope) {
1032
+ conditions.push("scope = ?");
1033
+ }
1034
+ if (attributes) {
1035
+ Object.keys(attributes).forEach((key) => {
1036
+ conditions.push(`attributes->>'$.${key}' = ?`);
1037
+ });
1038
+ }
1039
+ if (filters) {
1040
+ Object.entries(filters).forEach(([key, _value]) => {
1041
+ conditions.push(`${key} = ?`);
1042
+ });
1043
+ }
1044
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
1045
+ if (name) {
1046
+ args.push(name);
1047
+ }
1048
+ if (scope) {
1049
+ args.push(scope);
1050
+ }
1051
+ if (attributes) {
1052
+ for (const [, value] of Object.entries(attributes)) {
1053
+ args.push(value);
1054
+ }
1055
+ }
1056
+ if (filters) {
1057
+ for (const [, value] of Object.entries(filters)) {
1058
+ args.push(value);
1059
+ }
1060
+ }
1061
+ args.push(limit, offset);
1062
+ const result = await this.client.execute({
1063
+ sql: `SELECT * FROM ${storage.TABLE_TRACES} ${whereClause} ORDER BY "startTime" DESC LIMIT ? OFFSET ?`,
1064
+ args
1065
+ });
1066
+ if (!result.rows) {
1067
+ return [];
1068
+ }
1069
+ return result.rows.map((row) => ({
1070
+ id: row.id,
1071
+ parentSpanId: row.parentSpanId,
1072
+ traceId: row.traceId,
1073
+ name: row.name,
1074
+ scope: row.scope,
1075
+ kind: row.kind,
1076
+ status: safelyParseJSON(row.status),
1077
+ events: safelyParseJSON(row.events),
1078
+ links: safelyParseJSON(row.links),
1079
+ attributes: safelyParseJSON(row.attributes),
1080
+ startTime: row.startTime,
1081
+ endTime: row.endTime,
1082
+ other: safelyParseJSON(row.other),
1083
+ createdAt: row.createdAt
1084
+ }));
1085
+ }
1086
+ async getWorkflowRuns({
1087
+ workflowName,
1088
+ fromDate,
1089
+ toDate,
1090
+ limit,
1091
+ offset
1092
+ } = {}) {
1093
+ const conditions = [];
1094
+ const args = [];
1095
+ if (workflowName) {
1096
+ conditions.push("workflow_name = ?");
1097
+ args.push(workflowName);
1098
+ }
1099
+ if (fromDate) {
1100
+ conditions.push("createdAt >= ?");
1101
+ args.push(fromDate.toISOString());
1102
+ }
1103
+ if (toDate) {
1104
+ conditions.push("createdAt <= ?");
1105
+ args.push(toDate.toISOString());
1106
+ }
1107
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
1108
+ let total = 0;
1109
+ if (limit !== void 0 && offset !== void 0) {
1110
+ const countResult = await this.client.execute({
1111
+ sql: `SELECT COUNT(*) as count FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${whereClause}`,
1112
+ args
1113
+ });
1114
+ total = Number(countResult.rows?.[0]?.count ?? 0);
1115
+ }
1116
+ const result = await this.client.execute({
1117
+ sql: `SELECT * FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${whereClause} ORDER BY createdAt DESC${limit !== void 0 && offset !== void 0 ? ` LIMIT ? OFFSET ?` : ""}`,
1118
+ args: limit !== void 0 && offset !== void 0 ? [...args, limit, offset] : args
1119
+ });
1120
+ const runs = (result.rows || []).map((row) => {
1121
+ let parsedSnapshot = row.snapshot;
1122
+ if (typeof parsedSnapshot === "string") {
1123
+ try {
1124
+ parsedSnapshot = JSON.parse(row.snapshot);
1125
+ } catch (e) {
1126
+ console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
1127
+ }
1128
+ }
1129
+ return {
1130
+ workflowName: row.workflow_name,
1131
+ runId: row.run_id,
1132
+ snapshot: parsedSnapshot,
1133
+ createdAt: new Date(row.createdAt),
1134
+ updatedAt: new Date(row.updatedAt)
1135
+ };
1136
+ });
1137
+ return { runs, total: total || runs.length };
1138
+ }
1139
+ };
1140
+
1141
+ exports.DefaultStorage = LibSQLStore;
1142
+ exports.LibSQLStore = LibSQLStore;
1143
+ exports.LibSQLVector = LibSQLVector;