@mastra/libsql 0.0.1-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +23 -0
- package/CHANGELOG.md +15 -0
- package/LICENSE.md +7 -0
- package/README.md +144 -0
- package/dist/_tsup-dts-rollup.d.cts +192 -0
- package/dist/_tsup-dts-rollup.d.ts +192 -0
- package/dist/index.cjs +1143 -0
- package/dist/index.d.cts +4 -0
- package/dist/index.d.ts +4 -0
- package/dist/index.js +1139 -0
- package/eslint.config.js +6 -0
- package/package.json +42 -0
- package/src/index.ts +2 -0
- package/src/storage/index.test.ts +15 -0
- package/src/storage/index.ts +624 -0
- package/src/vector/filter.test.ts +968 -0
- package/src/vector/filter.ts +117 -0
- package/src/vector/index.test.ts +1702 -0
- package/src/vector/index.ts +344 -0
- package/src/vector/sql-builder.ts +462 -0
- package/tsconfig.json +5 -0
- package/vitest.config.ts +11 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,1139 @@
|
|
|
1
|
+
import { createClient } from '@libsql/client';
|
|
2
|
+
import { MastraVector } from '@mastra/core/vector';
|
|
3
|
+
import { BaseFilterTranslator } from '@mastra/core/vector/filter';
|
|
4
|
+
import { MastraStorage, TABLE_WORKFLOW_SNAPSHOT, TABLE_THREADS, TABLE_MESSAGES, TABLE_EVALS, TABLE_TRACES } from '@mastra/core/storage';
|
|
5
|
+
|
|
6
|
+
// src/vector/index.ts
|
|
7
|
+
var LibSQLFilterTranslator = class extends BaseFilterTranslator {
|
|
8
|
+
getSupportedOperators() {
|
|
9
|
+
return {
|
|
10
|
+
...BaseFilterTranslator.DEFAULT_OPERATORS,
|
|
11
|
+
regex: [],
|
|
12
|
+
custom: ["$contains", "$size"]
|
|
13
|
+
};
|
|
14
|
+
}
|
|
15
|
+
translate(filter) {
|
|
16
|
+
if (this.isEmpty(filter)) {
|
|
17
|
+
return filter;
|
|
18
|
+
}
|
|
19
|
+
this.validateFilter(filter);
|
|
20
|
+
return this.translateNode(filter);
|
|
21
|
+
}
|
|
22
|
+
translateNode(node, currentPath = "") {
|
|
23
|
+
if (this.isRegex(node)) {
|
|
24
|
+
throw new Error("Direct regex pattern format is not supported in LibSQL");
|
|
25
|
+
}
|
|
26
|
+
const withPath = (result2) => currentPath ? { [currentPath]: result2 } : result2;
|
|
27
|
+
if (this.isPrimitive(node)) {
|
|
28
|
+
return withPath({ $eq: this.normalizeComparisonValue(node) });
|
|
29
|
+
}
|
|
30
|
+
if (Array.isArray(node)) {
|
|
31
|
+
return withPath({ $in: this.normalizeArrayValues(node) });
|
|
32
|
+
}
|
|
33
|
+
const entries = Object.entries(node);
|
|
34
|
+
const result = {};
|
|
35
|
+
for (const [key, value] of entries) {
|
|
36
|
+
const newPath = currentPath ? `${currentPath}.${key}` : key;
|
|
37
|
+
if (this.isLogicalOperator(key)) {
|
|
38
|
+
result[key] = Array.isArray(value) ? value.map((filter) => this.translateNode(filter)) : this.translateNode(value);
|
|
39
|
+
} else if (this.isOperator(key)) {
|
|
40
|
+
if (this.isArrayOperator(key) && !Array.isArray(value) && key !== "$elemMatch") {
|
|
41
|
+
result[key] = [value];
|
|
42
|
+
} else if (this.isBasicOperator(key) && Array.isArray(value)) {
|
|
43
|
+
result[key] = JSON.stringify(value);
|
|
44
|
+
} else {
|
|
45
|
+
result[key] = value;
|
|
46
|
+
}
|
|
47
|
+
} else if (typeof value === "object" && value !== null) {
|
|
48
|
+
const hasOperators = Object.keys(value).some((k) => this.isOperator(k));
|
|
49
|
+
if (hasOperators) {
|
|
50
|
+
result[newPath] = this.translateNode(value);
|
|
51
|
+
} else {
|
|
52
|
+
Object.assign(result, this.translateNode(value, newPath));
|
|
53
|
+
}
|
|
54
|
+
} else {
|
|
55
|
+
result[newPath] = this.translateNode(value);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
return result;
|
|
59
|
+
}
|
|
60
|
+
// TODO: Look more into regex support for LibSQL
|
|
61
|
+
// private translateRegexPattern(pattern: string, options: string = ''): any {
|
|
62
|
+
// if (!options) return { $regex: pattern };
|
|
63
|
+
// const flags = options
|
|
64
|
+
// .split('')
|
|
65
|
+
// .filter(f => 'imsux'.includes(f))
|
|
66
|
+
// .join('');
|
|
67
|
+
// return {
|
|
68
|
+
// $regex: pattern,
|
|
69
|
+
// $options: flags,
|
|
70
|
+
// };
|
|
71
|
+
// }
|
|
72
|
+
};
|
|
73
|
+
|
|
74
|
+
// src/vector/sql-builder.ts
|
|
75
|
+
var createBasicOperator = (symbol) => {
|
|
76
|
+
return (key) => ({
|
|
77
|
+
sql: `CASE
|
|
78
|
+
WHEN ? IS NULL THEN json_extract(metadata, '$."${handleKey(key)}"') IS ${symbol === "=" ? "" : "NOT"} NULL
|
|
79
|
+
ELSE json_extract(metadata, '$."${handleKey(key)}"') ${symbol} ?
|
|
80
|
+
END`,
|
|
81
|
+
needsValue: true,
|
|
82
|
+
transformValue: (value) => {
|
|
83
|
+
return [value, value];
|
|
84
|
+
}
|
|
85
|
+
});
|
|
86
|
+
};
|
|
87
|
+
var createNumericOperator = (symbol) => {
|
|
88
|
+
return (key) => ({
|
|
89
|
+
sql: `CAST(json_extract(metadata, '$."${handleKey(key)}"') AS NUMERIC) ${symbol} ?`,
|
|
90
|
+
needsValue: true
|
|
91
|
+
});
|
|
92
|
+
};
|
|
93
|
+
var validateJsonArray = (key) => `json_valid(json_extract(metadata, '$."${handleKey(key)}"'))
|
|
94
|
+
AND json_type(json_extract(metadata, '$."${handleKey(key)}"')) = 'array'`;
|
|
95
|
+
var FILTER_OPERATORS = {
|
|
96
|
+
$eq: createBasicOperator("="),
|
|
97
|
+
$ne: createBasicOperator("!="),
|
|
98
|
+
$gt: createNumericOperator(">"),
|
|
99
|
+
$gte: createNumericOperator(">="),
|
|
100
|
+
$lt: createNumericOperator("<"),
|
|
101
|
+
$lte: createNumericOperator("<="),
|
|
102
|
+
// Array Operators
|
|
103
|
+
$in: (key, value) => ({
|
|
104
|
+
sql: `json_extract(metadata, '$."${handleKey(key)}"') IN (${value.map(() => "?").join(",")})`,
|
|
105
|
+
needsValue: true
|
|
106
|
+
}),
|
|
107
|
+
$nin: (key, value) => ({
|
|
108
|
+
sql: `json_extract(metadata, '$."${handleKey(key)}"') NOT IN (${value.map(() => "?").join(",")})`,
|
|
109
|
+
needsValue: true
|
|
110
|
+
}),
|
|
111
|
+
$all: (key) => ({
|
|
112
|
+
sql: `json_extract(metadata, '$."${handleKey(key)}"') = ?`,
|
|
113
|
+
needsValue: true,
|
|
114
|
+
transformValue: (value) => {
|
|
115
|
+
const arrayValue = Array.isArray(value) ? value : [value];
|
|
116
|
+
if (arrayValue.length === 0) {
|
|
117
|
+
return {
|
|
118
|
+
sql: "1 = 0",
|
|
119
|
+
values: []
|
|
120
|
+
};
|
|
121
|
+
}
|
|
122
|
+
return {
|
|
123
|
+
sql: `(
|
|
124
|
+
CASE
|
|
125
|
+
WHEN ${validateJsonArray(key)} THEN
|
|
126
|
+
NOT EXISTS (
|
|
127
|
+
SELECT value
|
|
128
|
+
FROM json_each(?)
|
|
129
|
+
WHERE value NOT IN (
|
|
130
|
+
SELECT value
|
|
131
|
+
FROM json_each(json_extract(metadata, '$."${handleKey(key)}"'))
|
|
132
|
+
)
|
|
133
|
+
)
|
|
134
|
+
ELSE FALSE
|
|
135
|
+
END
|
|
136
|
+
)`,
|
|
137
|
+
values: [JSON.stringify(arrayValue)]
|
|
138
|
+
};
|
|
139
|
+
}
|
|
140
|
+
}),
|
|
141
|
+
$elemMatch: (key) => ({
|
|
142
|
+
sql: `json_extract(metadata, '$."${handleKey(key)}"') = ?`,
|
|
143
|
+
needsValue: true,
|
|
144
|
+
transformValue: (value) => {
|
|
145
|
+
if (typeof value !== "object" || Array.isArray(value)) {
|
|
146
|
+
throw new Error("$elemMatch requires an object with conditions");
|
|
147
|
+
}
|
|
148
|
+
const conditions = Object.entries(value).map(([field, fieldValue]) => {
|
|
149
|
+
if (field.startsWith("$")) {
|
|
150
|
+
const { sql, values } = buildCondition("elem.value", { [field]: fieldValue });
|
|
151
|
+
const pattern = /json_extract\(metadata, '\$\."[^"]*"(\."[^"]*")*'\)/g;
|
|
152
|
+
const elemSql = sql.replace(pattern, "elem.value");
|
|
153
|
+
return { sql: elemSql, values };
|
|
154
|
+
} else if (typeof fieldValue === "object" && !Array.isArray(fieldValue)) {
|
|
155
|
+
const { sql, values } = buildCondition(field, fieldValue);
|
|
156
|
+
const pattern = /json_extract\(metadata, '\$\."[^"]*"(\."[^"]*")*'\)/g;
|
|
157
|
+
const elemSql = sql.replace(pattern, `json_extract(elem.value, '$."${field}"')`);
|
|
158
|
+
return { sql: elemSql, values };
|
|
159
|
+
} else {
|
|
160
|
+
return {
|
|
161
|
+
sql: `json_extract(elem.value, '$."${field}"') = ?`,
|
|
162
|
+
values: [fieldValue]
|
|
163
|
+
};
|
|
164
|
+
}
|
|
165
|
+
});
|
|
166
|
+
return {
|
|
167
|
+
sql: `(
|
|
168
|
+
CASE
|
|
169
|
+
WHEN ${validateJsonArray(key)} THEN
|
|
170
|
+
EXISTS (
|
|
171
|
+
SELECT 1
|
|
172
|
+
FROM json_each(json_extract(metadata, '$."${handleKey(key)}"')) as elem
|
|
173
|
+
WHERE ${conditions.map((c) => c.sql).join(" AND ")}
|
|
174
|
+
)
|
|
175
|
+
ELSE FALSE
|
|
176
|
+
END
|
|
177
|
+
)`,
|
|
178
|
+
values: conditions.flatMap((c) => c.values)
|
|
179
|
+
};
|
|
180
|
+
}
|
|
181
|
+
}),
|
|
182
|
+
// Element Operators
|
|
183
|
+
$exists: (key) => ({
|
|
184
|
+
sql: `json_extract(metadata, '$."${handleKey(key)}"') IS NOT NULL`,
|
|
185
|
+
needsValue: false
|
|
186
|
+
}),
|
|
187
|
+
// Logical Operators
|
|
188
|
+
$and: (key) => ({
|
|
189
|
+
sql: `(${key})`,
|
|
190
|
+
needsValue: false
|
|
191
|
+
}),
|
|
192
|
+
$or: (key) => ({
|
|
193
|
+
sql: `(${key})`,
|
|
194
|
+
needsValue: false
|
|
195
|
+
}),
|
|
196
|
+
$not: (key) => ({ sql: `NOT (${key})`, needsValue: false }),
|
|
197
|
+
$nor: (key) => ({
|
|
198
|
+
sql: `NOT (${key})`,
|
|
199
|
+
needsValue: false
|
|
200
|
+
}),
|
|
201
|
+
$size: (key, paramIndex) => ({
|
|
202
|
+
sql: `(
|
|
203
|
+
CASE
|
|
204
|
+
WHEN json_type(json_extract(metadata, '$."${handleKey(key)}"')) = 'array' THEN
|
|
205
|
+
json_array_length(json_extract(metadata, '$."${handleKey(key)}"')) = $${paramIndex}
|
|
206
|
+
ELSE FALSE
|
|
207
|
+
END
|
|
208
|
+
)`,
|
|
209
|
+
needsValue: true
|
|
210
|
+
}),
|
|
211
|
+
// /**
|
|
212
|
+
// * Regex Operators
|
|
213
|
+
// * Supports case insensitive and multiline
|
|
214
|
+
// */
|
|
215
|
+
// $regex: (key: string): FilterOperator => ({
|
|
216
|
+
// sql: `json_extract(metadata, '$."${handleKey(key)}"') = ?`,
|
|
217
|
+
// needsValue: true,
|
|
218
|
+
// transformValue: (value: any) => {
|
|
219
|
+
// const pattern = typeof value === 'object' ? value.$regex : value;
|
|
220
|
+
// const options = typeof value === 'object' ? value.$options || '' : '';
|
|
221
|
+
// let sql = `json_extract(metadata, '$."${handleKey(key)}"')`;
|
|
222
|
+
// // Handle multiline
|
|
223
|
+
// // if (options.includes('m')) {
|
|
224
|
+
// // sql = `REPLACE(${sql}, CHAR(10), '\n')`;
|
|
225
|
+
// // }
|
|
226
|
+
// // let finalPattern = pattern;
|
|
227
|
+
// // if (options) {
|
|
228
|
+
// // finalPattern = `(\\?${options})${pattern}`;
|
|
229
|
+
// // }
|
|
230
|
+
// // // Handle case insensitivity
|
|
231
|
+
// // if (options.includes('i')) {
|
|
232
|
+
// // sql = `LOWER(${sql}) REGEXP LOWER(?)`;
|
|
233
|
+
// // } else {
|
|
234
|
+
// // sql = `${sql} REGEXP ?`;
|
|
235
|
+
// // }
|
|
236
|
+
// if (options.includes('m')) {
|
|
237
|
+
// sql = `EXISTS (
|
|
238
|
+
// SELECT 1
|
|
239
|
+
// FROM json_each(
|
|
240
|
+
// json_array(
|
|
241
|
+
// ${sql},
|
|
242
|
+
// REPLACE(${sql}, CHAR(10), CHAR(13))
|
|
243
|
+
// )
|
|
244
|
+
// ) as lines
|
|
245
|
+
// WHERE lines.value REGEXP ?
|
|
246
|
+
// )`;
|
|
247
|
+
// } else {
|
|
248
|
+
// sql = `${sql} REGEXP ?`;
|
|
249
|
+
// }
|
|
250
|
+
// // Handle case insensitivity
|
|
251
|
+
// if (options.includes('i')) {
|
|
252
|
+
// sql = sql.replace('REGEXP ?', 'REGEXP LOWER(?)');
|
|
253
|
+
// sql = sql.replace('value REGEXP', 'LOWER(value) REGEXP');
|
|
254
|
+
// }
|
|
255
|
+
// // Handle extended - allows whitespace and comments in pattern
|
|
256
|
+
// if (options.includes('x')) {
|
|
257
|
+
// // Remove whitespace and comments from pattern
|
|
258
|
+
// const cleanPattern = pattern.replace(/\s+|#.*$/gm, '');
|
|
259
|
+
// return {
|
|
260
|
+
// sql,
|
|
261
|
+
// values: [cleanPattern],
|
|
262
|
+
// };
|
|
263
|
+
// }
|
|
264
|
+
// return {
|
|
265
|
+
// sql,
|
|
266
|
+
// values: [pattern],
|
|
267
|
+
// };
|
|
268
|
+
// },
|
|
269
|
+
// }),
|
|
270
|
+
$contains: (key) => ({
|
|
271
|
+
sql: `json_extract(metadata, '$."${handleKey(key)}"') = ?`,
|
|
272
|
+
needsValue: true,
|
|
273
|
+
transformValue: (value) => {
|
|
274
|
+
if (Array.isArray(value)) {
|
|
275
|
+
return {
|
|
276
|
+
sql: `(
|
|
277
|
+
SELECT ${validateJsonArray(key)}
|
|
278
|
+
AND EXISTS (
|
|
279
|
+
SELECT 1
|
|
280
|
+
FROM json_each(json_extract(metadata, '$."${handleKey(key)}"')) as m
|
|
281
|
+
WHERE m.value IN (SELECT value FROM json_each(?))
|
|
282
|
+
)
|
|
283
|
+
)`,
|
|
284
|
+
values: [JSON.stringify(value)]
|
|
285
|
+
};
|
|
286
|
+
}
|
|
287
|
+
if (value && typeof value === "object") {
|
|
288
|
+
let traverse2 = function(obj, path = []) {
|
|
289
|
+
for (const [k, v] of Object.entries(obj)) {
|
|
290
|
+
const currentPath = [...path, k];
|
|
291
|
+
if (v && typeof v === "object" && !Array.isArray(v)) {
|
|
292
|
+
traverse2(v, currentPath);
|
|
293
|
+
} else {
|
|
294
|
+
paths.push(currentPath.join("."));
|
|
295
|
+
values.push(v);
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
};
|
|
299
|
+
const paths = [];
|
|
300
|
+
const values = [];
|
|
301
|
+
traverse2(value);
|
|
302
|
+
return {
|
|
303
|
+
sql: `(${paths.map((path) => `json_extract(metadata, '$."${handleKey(key)}"."${path}"') = ?`).join(" AND ")})`,
|
|
304
|
+
values
|
|
305
|
+
};
|
|
306
|
+
}
|
|
307
|
+
return value;
|
|
308
|
+
}
|
|
309
|
+
})
|
|
310
|
+
};
|
|
311
|
+
var handleKey = (key) => {
|
|
312
|
+
return key.replace(/\./g, '"."');
|
|
313
|
+
};
|
|
314
|
+
function buildFilterQuery(filter) {
|
|
315
|
+
if (!filter) {
|
|
316
|
+
return { sql: "", values: [] };
|
|
317
|
+
}
|
|
318
|
+
const values = [];
|
|
319
|
+
const conditions = Object.entries(filter).map(([key, value]) => {
|
|
320
|
+
const condition = buildCondition(key, value);
|
|
321
|
+
values.push(...condition.values);
|
|
322
|
+
return condition.sql;
|
|
323
|
+
}).join(" AND ");
|
|
324
|
+
return {
|
|
325
|
+
sql: conditions ? `WHERE ${conditions}` : "",
|
|
326
|
+
values
|
|
327
|
+
};
|
|
328
|
+
}
|
|
329
|
+
function buildCondition(key, value, parentPath) {
|
|
330
|
+
if (["$and", "$or", "$not", "$nor"].includes(key)) {
|
|
331
|
+
return handleLogicalOperator(key, value);
|
|
332
|
+
}
|
|
333
|
+
if (!value || typeof value !== "object") {
|
|
334
|
+
return {
|
|
335
|
+
sql: `json_extract(metadata, '$."${key.replace(/\./g, '"."')}"') = ?`,
|
|
336
|
+
values: [value]
|
|
337
|
+
};
|
|
338
|
+
}
|
|
339
|
+
return handleOperator(key, value);
|
|
340
|
+
}
|
|
341
|
+
function handleLogicalOperator(key, value, parentPath) {
|
|
342
|
+
if (!value || value.length === 0) {
|
|
343
|
+
switch (key) {
|
|
344
|
+
case "$and":
|
|
345
|
+
case "$nor":
|
|
346
|
+
return { sql: "true", values: [] };
|
|
347
|
+
case "$or":
|
|
348
|
+
return { sql: "false", values: [] };
|
|
349
|
+
case "$not":
|
|
350
|
+
throw new Error("$not operator cannot be empty");
|
|
351
|
+
default:
|
|
352
|
+
return { sql: "true", values: [] };
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
if (key === "$not") {
|
|
356
|
+
const entries = Object.entries(value);
|
|
357
|
+
const conditions2 = entries.map(([fieldKey, fieldValue]) => buildCondition(fieldKey, fieldValue));
|
|
358
|
+
return {
|
|
359
|
+
sql: `NOT (${conditions2.map((c) => c.sql).join(" AND ")})`,
|
|
360
|
+
values: conditions2.flatMap((c) => c.values)
|
|
361
|
+
};
|
|
362
|
+
}
|
|
363
|
+
const values = [];
|
|
364
|
+
const joinOperator = key === "$or" || key === "$nor" ? "OR" : "AND";
|
|
365
|
+
const conditions = Array.isArray(value) ? value.map((f) => {
|
|
366
|
+
const entries = Object.entries(f);
|
|
367
|
+
return entries.map(([k, v]) => buildCondition(k, v));
|
|
368
|
+
}) : [buildCondition(key, value)];
|
|
369
|
+
const joined = conditions.flat().map((c) => {
|
|
370
|
+
values.push(...c.values);
|
|
371
|
+
return c.sql;
|
|
372
|
+
}).join(` ${joinOperator} `);
|
|
373
|
+
return {
|
|
374
|
+
sql: key === "$nor" ? `NOT (${joined})` : `(${joined})`,
|
|
375
|
+
values
|
|
376
|
+
};
|
|
377
|
+
}
|
|
378
|
+
function handleOperator(key, value) {
|
|
379
|
+
if (typeof value === "object" && !Array.isArray(value)) {
|
|
380
|
+
const entries = Object.entries(value);
|
|
381
|
+
const results = entries.map(
|
|
382
|
+
([operator2, operatorValue2]) => operator2 === "$not" ? {
|
|
383
|
+
sql: `NOT (${Object.entries(operatorValue2).map(([op, val]) => processOperator(key, op, val).sql).join(" AND ")})`,
|
|
384
|
+
values: Object.entries(operatorValue2).flatMap(
|
|
385
|
+
([op, val]) => processOperator(key, op, val).values
|
|
386
|
+
)
|
|
387
|
+
} : processOperator(key, operator2, operatorValue2)
|
|
388
|
+
);
|
|
389
|
+
return {
|
|
390
|
+
sql: `(${results.map((r) => r.sql).join(" AND ")})`,
|
|
391
|
+
values: results.flatMap((r) => r.values)
|
|
392
|
+
};
|
|
393
|
+
}
|
|
394
|
+
const [[operator, operatorValue] = []] = Object.entries(value);
|
|
395
|
+
return processOperator(key, operator, operatorValue);
|
|
396
|
+
}
|
|
397
|
+
var processOperator = (key, operator, operatorValue) => {
|
|
398
|
+
if (!operator.startsWith("$") || !FILTER_OPERATORS[operator]) {
|
|
399
|
+
throw new Error(`Invalid operator: ${operator}`);
|
|
400
|
+
}
|
|
401
|
+
const operatorFn = FILTER_OPERATORS[operator];
|
|
402
|
+
const operatorResult = operatorFn(key, operatorValue);
|
|
403
|
+
if (!operatorResult.needsValue) {
|
|
404
|
+
return { sql: operatorResult.sql, values: [] };
|
|
405
|
+
}
|
|
406
|
+
const transformed = operatorResult.transformValue ? operatorResult.transformValue(operatorValue) : operatorValue;
|
|
407
|
+
if (transformed && typeof transformed === "object" && "sql" in transformed) {
|
|
408
|
+
return transformed;
|
|
409
|
+
}
|
|
410
|
+
return {
|
|
411
|
+
sql: operatorResult.sql,
|
|
412
|
+
values: Array.isArray(transformed) ? transformed : [transformed]
|
|
413
|
+
};
|
|
414
|
+
};
|
|
415
|
+
|
|
416
|
+
// src/vector/index.ts
|
|
417
|
+
var LibSQLVector = class extends MastraVector {
|
|
418
|
+
turso;
|
|
419
|
+
constructor({
|
|
420
|
+
connectionUrl,
|
|
421
|
+
authToken,
|
|
422
|
+
syncUrl,
|
|
423
|
+
syncInterval
|
|
424
|
+
}) {
|
|
425
|
+
super();
|
|
426
|
+
this.turso = createClient({
|
|
427
|
+
url: connectionUrl,
|
|
428
|
+
syncUrl,
|
|
429
|
+
authToken,
|
|
430
|
+
syncInterval
|
|
431
|
+
});
|
|
432
|
+
if (connectionUrl.includes(`file:`) || connectionUrl.includes(`:memory:`)) {
|
|
433
|
+
void this.turso.execute({
|
|
434
|
+
sql: "PRAGMA journal_mode=WAL;",
|
|
435
|
+
args: {}
|
|
436
|
+
});
|
|
437
|
+
}
|
|
438
|
+
}
|
|
439
|
+
transformFilter(filter) {
|
|
440
|
+
const translator = new LibSQLFilterTranslator();
|
|
441
|
+
return translator.translate(filter);
|
|
442
|
+
}
|
|
443
|
+
async query(...args) {
|
|
444
|
+
const params = this.normalizeArgs("query", args, ["minScore"]);
|
|
445
|
+
try {
|
|
446
|
+
const { indexName, queryVector, topK = 10, filter, includeVector = false, minScore = 0 } = params;
|
|
447
|
+
const vectorStr = `[${queryVector.join(",")}]`;
|
|
448
|
+
const translatedFilter = this.transformFilter(filter);
|
|
449
|
+
const { sql: filterQuery, values: filterValues } = buildFilterQuery(translatedFilter);
|
|
450
|
+
filterValues.push(minScore);
|
|
451
|
+
const query = `
|
|
452
|
+
WITH vector_scores AS (
|
|
453
|
+
SELECT
|
|
454
|
+
vector_id as id,
|
|
455
|
+
(1-vector_distance_cos(embedding, '${vectorStr}')) as score,
|
|
456
|
+
metadata
|
|
457
|
+
${includeVector ? ", vector_extract(embedding) as embedding" : ""}
|
|
458
|
+
FROM ${indexName}
|
|
459
|
+
${filterQuery}
|
|
460
|
+
)
|
|
461
|
+
SELECT *
|
|
462
|
+
FROM vector_scores
|
|
463
|
+
WHERE score > ?
|
|
464
|
+
ORDER BY score DESC
|
|
465
|
+
LIMIT ${topK}`;
|
|
466
|
+
const result = await this.turso.execute({
|
|
467
|
+
sql: query,
|
|
468
|
+
args: filterValues
|
|
469
|
+
});
|
|
470
|
+
return result.rows.map(({ id, score, metadata, embedding }) => ({
|
|
471
|
+
id,
|
|
472
|
+
score,
|
|
473
|
+
metadata: JSON.parse(metadata ?? "{}"),
|
|
474
|
+
...includeVector && embedding && { vector: JSON.parse(embedding) }
|
|
475
|
+
}));
|
|
476
|
+
} finally {
|
|
477
|
+
}
|
|
478
|
+
}
|
|
479
|
+
async upsert(...args) {
|
|
480
|
+
const params = this.normalizeArgs("upsert", args);
|
|
481
|
+
const { indexName, vectors, metadata, ids } = params;
|
|
482
|
+
const tx = await this.turso.transaction("write");
|
|
483
|
+
try {
|
|
484
|
+
const vectorIds = ids || vectors.map(() => crypto.randomUUID());
|
|
485
|
+
for (let i = 0; i < vectors.length; i++) {
|
|
486
|
+
const query = `
|
|
487
|
+
INSERT INTO ${indexName} (vector_id, embedding, metadata)
|
|
488
|
+
VALUES (?, vector32(?), ?)
|
|
489
|
+
ON CONFLICT(vector_id) DO UPDATE SET
|
|
490
|
+
embedding = vector32(?),
|
|
491
|
+
metadata = ?
|
|
492
|
+
`;
|
|
493
|
+
await tx.execute({
|
|
494
|
+
sql: query,
|
|
495
|
+
// @ts-ignore
|
|
496
|
+
args: [
|
|
497
|
+
vectorIds[i],
|
|
498
|
+
JSON.stringify(vectors[i]),
|
|
499
|
+
JSON.stringify(metadata?.[i] || {}),
|
|
500
|
+
JSON.stringify(vectors[i]),
|
|
501
|
+
JSON.stringify(metadata?.[i] || {})
|
|
502
|
+
]
|
|
503
|
+
});
|
|
504
|
+
}
|
|
505
|
+
await tx.commit();
|
|
506
|
+
return vectorIds;
|
|
507
|
+
} catch (error) {
|
|
508
|
+
await tx.rollback();
|
|
509
|
+
if (error instanceof Error && error.message?.includes("dimensions are different")) {
|
|
510
|
+
const match = error.message.match(/dimensions are different: (\d+) != (\d+)/);
|
|
511
|
+
if (match) {
|
|
512
|
+
const [, actual, expected] = match;
|
|
513
|
+
throw new Error(
|
|
514
|
+
`Vector dimension mismatch: Index "${indexName}" expects ${expected} dimensions but got ${actual} dimensions. Either use a matching embedding model or delete and recreate the index with the new dimension.`
|
|
515
|
+
);
|
|
516
|
+
}
|
|
517
|
+
}
|
|
518
|
+
throw error;
|
|
519
|
+
}
|
|
520
|
+
}
|
|
521
|
+
async createIndex(...args) {
|
|
522
|
+
const params = this.normalizeArgs("createIndex", args);
|
|
523
|
+
const { indexName, dimension } = params;
|
|
524
|
+
try {
|
|
525
|
+
if (!indexName.match(/^[a-zA-Z_][a-zA-Z0-9_]*$/)) {
|
|
526
|
+
throw new Error("Invalid index name format");
|
|
527
|
+
}
|
|
528
|
+
if (!Number.isInteger(dimension) || dimension <= 0) {
|
|
529
|
+
throw new Error("Dimension must be a positive integer");
|
|
530
|
+
}
|
|
531
|
+
await this.turso.execute({
|
|
532
|
+
sql: `
|
|
533
|
+
CREATE TABLE IF NOT EXISTS ${indexName} (
|
|
534
|
+
id SERIAL PRIMARY KEY,
|
|
535
|
+
vector_id TEXT UNIQUE NOT NULL,
|
|
536
|
+
embedding F32_BLOB(${dimension}),
|
|
537
|
+
metadata TEXT DEFAULT '{}'
|
|
538
|
+
);
|
|
539
|
+
`,
|
|
540
|
+
args: []
|
|
541
|
+
});
|
|
542
|
+
await this.turso.execute({
|
|
543
|
+
sql: `
|
|
544
|
+
CREATE INDEX IF NOT EXISTS ${indexName}_vector_idx
|
|
545
|
+
ON ${indexName} (libsql_vector_idx(embedding))
|
|
546
|
+
`,
|
|
547
|
+
args: []
|
|
548
|
+
});
|
|
549
|
+
} catch (error) {
|
|
550
|
+
console.error("Failed to create vector table:", error);
|
|
551
|
+
throw error;
|
|
552
|
+
} finally {
|
|
553
|
+
}
|
|
554
|
+
}
|
|
555
|
+
async deleteIndex(indexName) {
|
|
556
|
+
try {
|
|
557
|
+
await this.turso.execute({
|
|
558
|
+
sql: `DROP TABLE IF EXISTS ${indexName}`,
|
|
559
|
+
args: []
|
|
560
|
+
});
|
|
561
|
+
} catch (error) {
|
|
562
|
+
console.error("Failed to delete vector table:", error);
|
|
563
|
+
throw new Error(`Failed to delete vector table: ${error.message}`);
|
|
564
|
+
} finally {
|
|
565
|
+
}
|
|
566
|
+
}
|
|
567
|
+
async listIndexes() {
|
|
568
|
+
try {
|
|
569
|
+
const vectorTablesQuery = `
|
|
570
|
+
SELECT name FROM sqlite_master
|
|
571
|
+
WHERE type='table'
|
|
572
|
+
AND sql LIKE '%F32_BLOB%';
|
|
573
|
+
`;
|
|
574
|
+
const result = await this.turso.execute({
|
|
575
|
+
sql: vectorTablesQuery,
|
|
576
|
+
args: []
|
|
577
|
+
});
|
|
578
|
+
return result.rows.map((row) => row.name);
|
|
579
|
+
} catch (error) {
|
|
580
|
+
throw new Error(`Failed to list vector tables: ${error.message}`);
|
|
581
|
+
}
|
|
582
|
+
}
|
|
583
|
+
async describeIndex(indexName) {
|
|
584
|
+
try {
|
|
585
|
+
const tableInfoQuery = `
|
|
586
|
+
SELECT sql
|
|
587
|
+
FROM sqlite_master
|
|
588
|
+
WHERE type='table'
|
|
589
|
+
AND name = ?;
|
|
590
|
+
`;
|
|
591
|
+
const tableInfo = await this.turso.execute({
|
|
592
|
+
sql: tableInfoQuery,
|
|
593
|
+
args: [indexName]
|
|
594
|
+
});
|
|
595
|
+
if (!tableInfo.rows[0]?.sql) {
|
|
596
|
+
throw new Error(`Table ${indexName} not found`);
|
|
597
|
+
}
|
|
598
|
+
const dimension = parseInt(tableInfo.rows[0].sql.match(/F32_BLOB\((\d+)\)/)?.[1] || "0");
|
|
599
|
+
const countQuery = `
|
|
600
|
+
SELECT COUNT(*) as count
|
|
601
|
+
FROM ${indexName};
|
|
602
|
+
`;
|
|
603
|
+
const countResult = await this.turso.execute({
|
|
604
|
+
sql: countQuery,
|
|
605
|
+
args: []
|
|
606
|
+
});
|
|
607
|
+
const metric = "cosine";
|
|
608
|
+
return {
|
|
609
|
+
dimension,
|
|
610
|
+
count: countResult?.rows?.[0]?.count ?? 0,
|
|
611
|
+
metric
|
|
612
|
+
};
|
|
613
|
+
} catch (e) {
|
|
614
|
+
throw new Error(`Failed to describe vector table: ${e.message}`);
|
|
615
|
+
}
|
|
616
|
+
}
|
|
617
|
+
/**
|
|
618
|
+
* Updates an index entry by its ID with the provided vector and/or metadata.
|
|
619
|
+
*
|
|
620
|
+
* @param indexName - The name of the index to update.
|
|
621
|
+
* @param id - The ID of the index entry to update.
|
|
622
|
+
* @param update - An object containing the vector and/or metadata to update.
|
|
623
|
+
* @param update.vector - An optional array of numbers representing the new vector.
|
|
624
|
+
* @param update.metadata - An optional record containing the new metadata.
|
|
625
|
+
* @returns A promise that resolves when the update is complete.
|
|
626
|
+
* @throws Will throw an error if no updates are provided or if the update operation fails.
|
|
627
|
+
*/
|
|
628
|
+
async updateIndexById(indexName, id, update) {
|
|
629
|
+
try {
|
|
630
|
+
const updates = [];
|
|
631
|
+
const args = [];
|
|
632
|
+
if (update.vector) {
|
|
633
|
+
updates.push("embedding = vector32(?)");
|
|
634
|
+
args.push(JSON.stringify(update.vector));
|
|
635
|
+
}
|
|
636
|
+
if (update.metadata) {
|
|
637
|
+
updates.push("metadata = ?");
|
|
638
|
+
args.push(JSON.stringify(update.metadata));
|
|
639
|
+
}
|
|
640
|
+
if (updates.length === 0) {
|
|
641
|
+
throw new Error("No updates provided");
|
|
642
|
+
}
|
|
643
|
+
args.push(id);
|
|
644
|
+
const query = `
|
|
645
|
+
UPDATE ${indexName}
|
|
646
|
+
SET ${updates.join(", ")}
|
|
647
|
+
WHERE vector_id = ?;
|
|
648
|
+
`;
|
|
649
|
+
await this.turso.execute({
|
|
650
|
+
sql: query,
|
|
651
|
+
args
|
|
652
|
+
});
|
|
653
|
+
} catch (error) {
|
|
654
|
+
throw new Error(`Failed to update index by id: ${id} for index: ${indexName}: ${error.message}`);
|
|
655
|
+
}
|
|
656
|
+
}
|
|
657
|
+
async deleteIndexById(indexName, id) {
|
|
658
|
+
try {
|
|
659
|
+
await this.turso.execute({
|
|
660
|
+
sql: `DELETE FROM ${indexName} WHERE vector_id = ?`,
|
|
661
|
+
args: [id]
|
|
662
|
+
});
|
|
663
|
+
} catch (error) {
|
|
664
|
+
throw new Error(`Failed to delete index by id: ${id} for index: ${indexName}: ${error.message}`);
|
|
665
|
+
}
|
|
666
|
+
}
|
|
667
|
+
async truncateIndex(indexName) {
|
|
668
|
+
await this.turso.execute({
|
|
669
|
+
sql: `DELETE FROM ${indexName}`,
|
|
670
|
+
args: []
|
|
671
|
+
});
|
|
672
|
+
}
|
|
673
|
+
};
|
|
674
|
+
function safelyParseJSON(jsonString) {
|
|
675
|
+
try {
|
|
676
|
+
return JSON.parse(jsonString);
|
|
677
|
+
} catch {
|
|
678
|
+
return {};
|
|
679
|
+
}
|
|
680
|
+
}
|
|
681
|
+
var LibSQLStore = class extends MastraStorage {
|
|
682
|
+
client;
|
|
683
|
+
constructor(config) {
|
|
684
|
+
super({ name: `LibSQLStore` });
|
|
685
|
+
if (config.url.endsWith(":memory:")) {
|
|
686
|
+
this.shouldCacheInit = false;
|
|
687
|
+
}
|
|
688
|
+
this.client = createClient(config);
|
|
689
|
+
}
|
|
690
|
+
getCreateTableSQL(tableName, schema) {
|
|
691
|
+
const columns = Object.entries(schema).map(([name, col]) => {
|
|
692
|
+
let type = col.type.toUpperCase();
|
|
693
|
+
if (type === "TEXT") type = "TEXT";
|
|
694
|
+
if (type === "TIMESTAMP") type = "TEXT";
|
|
695
|
+
const nullable = col.nullable ? "" : "NOT NULL";
|
|
696
|
+
const primaryKey = col.primaryKey ? "PRIMARY KEY" : "";
|
|
697
|
+
return `${name} ${type} ${nullable} ${primaryKey}`.trim();
|
|
698
|
+
});
|
|
699
|
+
if (tableName === TABLE_WORKFLOW_SNAPSHOT) {
|
|
700
|
+
const stmnt = `CREATE TABLE IF NOT EXISTS ${tableName} (
|
|
701
|
+
${columns.join(",\n")},
|
|
702
|
+
PRIMARY KEY (workflow_name, run_id)
|
|
703
|
+
)`;
|
|
704
|
+
return stmnt;
|
|
705
|
+
}
|
|
706
|
+
return `CREATE TABLE IF NOT EXISTS ${tableName} (${columns.join(", ")})`;
|
|
707
|
+
}
|
|
708
|
+
async createTable({
|
|
709
|
+
tableName,
|
|
710
|
+
schema
|
|
711
|
+
}) {
|
|
712
|
+
try {
|
|
713
|
+
this.logger.debug(`Creating database table`, { tableName, operation: "schema init" });
|
|
714
|
+
const sql = this.getCreateTableSQL(tableName, schema);
|
|
715
|
+
await this.client.execute(sql);
|
|
716
|
+
} catch (error) {
|
|
717
|
+
this.logger.error(`Error creating table ${tableName}: ${error}`);
|
|
718
|
+
throw error;
|
|
719
|
+
}
|
|
720
|
+
}
|
|
721
|
+
async clearTable({ tableName }) {
|
|
722
|
+
try {
|
|
723
|
+
await this.client.execute(`DELETE FROM ${tableName}`);
|
|
724
|
+
} catch (e) {
|
|
725
|
+
if (e instanceof Error) {
|
|
726
|
+
this.logger.error(e.message);
|
|
727
|
+
}
|
|
728
|
+
}
|
|
729
|
+
}
|
|
730
|
+
prepareStatement({ tableName, record }) {
|
|
731
|
+
const columns = Object.keys(record);
|
|
732
|
+
const values = Object.values(record).map((v) => {
|
|
733
|
+
if (typeof v === `undefined`) {
|
|
734
|
+
return null;
|
|
735
|
+
}
|
|
736
|
+
if (v instanceof Date) {
|
|
737
|
+
return v.toISOString();
|
|
738
|
+
}
|
|
739
|
+
return typeof v === "object" ? JSON.stringify(v) : v;
|
|
740
|
+
});
|
|
741
|
+
const placeholders = values.map(() => "?").join(", ");
|
|
742
|
+
return {
|
|
743
|
+
sql: `INSERT OR REPLACE INTO ${tableName} (${columns.join(", ")}) VALUES (${placeholders})`,
|
|
744
|
+
args: values
|
|
745
|
+
};
|
|
746
|
+
}
|
|
747
|
+
async insert({ tableName, record }) {
|
|
748
|
+
try {
|
|
749
|
+
await this.client.execute(
|
|
750
|
+
this.prepareStatement({
|
|
751
|
+
tableName,
|
|
752
|
+
record
|
|
753
|
+
})
|
|
754
|
+
);
|
|
755
|
+
} catch (error) {
|
|
756
|
+
this.logger.error(`Error upserting into table ${tableName}: ${error}`);
|
|
757
|
+
throw error;
|
|
758
|
+
}
|
|
759
|
+
}
|
|
760
|
+
async batchInsert({ tableName, records }) {
|
|
761
|
+
if (records.length === 0) return;
|
|
762
|
+
try {
|
|
763
|
+
const batchStatements = records.map((r) => this.prepareStatement({ tableName, record: r }));
|
|
764
|
+
await this.client.batch(batchStatements, "write");
|
|
765
|
+
} catch (error) {
|
|
766
|
+
this.logger.error(`Error upserting into table ${tableName}: ${error}`);
|
|
767
|
+
throw error;
|
|
768
|
+
}
|
|
769
|
+
}
|
|
770
|
+
async load({ tableName, keys }) {
|
|
771
|
+
const conditions = Object.entries(keys).map(([key]) => `${key} = ?`).join(" AND ");
|
|
772
|
+
const values = Object.values(keys);
|
|
773
|
+
const result = await this.client.execute({
|
|
774
|
+
sql: `SELECT * FROM ${tableName} WHERE ${conditions} ORDER BY createdAt DESC LIMIT 1`,
|
|
775
|
+
args: values
|
|
776
|
+
});
|
|
777
|
+
if (!result.rows || result.rows.length === 0) {
|
|
778
|
+
return null;
|
|
779
|
+
}
|
|
780
|
+
const row = result.rows[0];
|
|
781
|
+
const parsed = Object.fromEntries(
|
|
782
|
+
Object.entries(row || {}).map(([k, v]) => {
|
|
783
|
+
try {
|
|
784
|
+
return [k, typeof v === "string" ? v.startsWith("{") || v.startsWith("[") ? JSON.parse(v) : v : v];
|
|
785
|
+
} catch {
|
|
786
|
+
return [k, v];
|
|
787
|
+
}
|
|
788
|
+
})
|
|
789
|
+
);
|
|
790
|
+
return parsed;
|
|
791
|
+
}
|
|
792
|
+
async getThreadById({ threadId }) {
|
|
793
|
+
const result = await this.load({
|
|
794
|
+
tableName: TABLE_THREADS,
|
|
795
|
+
keys: { id: threadId }
|
|
796
|
+
});
|
|
797
|
+
if (!result) {
|
|
798
|
+
return null;
|
|
799
|
+
}
|
|
800
|
+
return {
|
|
801
|
+
...result,
|
|
802
|
+
metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata
|
|
803
|
+
};
|
|
804
|
+
}
|
|
805
|
+
async getThreadsByResourceId({ resourceId }) {
|
|
806
|
+
const result = await this.client.execute({
|
|
807
|
+
sql: `SELECT * FROM ${TABLE_THREADS} WHERE resourceId = ?`,
|
|
808
|
+
args: [resourceId]
|
|
809
|
+
});
|
|
810
|
+
if (!result.rows) {
|
|
811
|
+
return [];
|
|
812
|
+
}
|
|
813
|
+
return result.rows.map((thread) => ({
|
|
814
|
+
id: thread.id,
|
|
815
|
+
resourceId: thread.resourceId,
|
|
816
|
+
title: thread.title,
|
|
817
|
+
createdAt: thread.createdAt,
|
|
818
|
+
updatedAt: thread.updatedAt,
|
|
819
|
+
metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata
|
|
820
|
+
}));
|
|
821
|
+
}
|
|
822
|
+
async saveThread({ thread }) {
|
|
823
|
+
await this.insert({
|
|
824
|
+
tableName: TABLE_THREADS,
|
|
825
|
+
record: {
|
|
826
|
+
...thread,
|
|
827
|
+
metadata: JSON.stringify(thread.metadata)
|
|
828
|
+
}
|
|
829
|
+
});
|
|
830
|
+
return thread;
|
|
831
|
+
}
|
|
832
|
+
async updateThread({
|
|
833
|
+
id,
|
|
834
|
+
title,
|
|
835
|
+
metadata
|
|
836
|
+
}) {
|
|
837
|
+
const thread = await this.getThreadById({ threadId: id });
|
|
838
|
+
if (!thread) {
|
|
839
|
+
throw new Error(`Thread ${id} not found`);
|
|
840
|
+
}
|
|
841
|
+
const updatedThread = {
|
|
842
|
+
...thread,
|
|
843
|
+
title,
|
|
844
|
+
metadata: {
|
|
845
|
+
...thread.metadata,
|
|
846
|
+
...metadata
|
|
847
|
+
}
|
|
848
|
+
};
|
|
849
|
+
await this.client.execute({
|
|
850
|
+
sql: `UPDATE ${TABLE_THREADS} SET title = ?, metadata = ? WHERE id = ?`,
|
|
851
|
+
args: [title, JSON.stringify(updatedThread.metadata), id]
|
|
852
|
+
});
|
|
853
|
+
return updatedThread;
|
|
854
|
+
}
|
|
855
|
+
async deleteThread({ threadId }) {
|
|
856
|
+
await this.client.execute({
|
|
857
|
+
sql: `DELETE FROM ${TABLE_THREADS} WHERE id = ?`,
|
|
858
|
+
args: [threadId]
|
|
859
|
+
});
|
|
860
|
+
}
|
|
861
|
+
parseRow(row) {
|
|
862
|
+
let content = row.content;
|
|
863
|
+
try {
|
|
864
|
+
content = JSON.parse(row.content);
|
|
865
|
+
} catch {
|
|
866
|
+
}
|
|
867
|
+
return {
|
|
868
|
+
id: row.id,
|
|
869
|
+
content,
|
|
870
|
+
role: row.role,
|
|
871
|
+
type: row.type,
|
|
872
|
+
createdAt: new Date(row.createdAt),
|
|
873
|
+
threadId: row.thread_id
|
|
874
|
+
};
|
|
875
|
+
}
|
|
876
|
+
async getMessages({ threadId, selectBy }) {
|
|
877
|
+
try {
|
|
878
|
+
const messages = [];
|
|
879
|
+
const limit = typeof selectBy?.last === `number` ? selectBy.last : 40;
|
|
880
|
+
if (selectBy?.include?.length) {
|
|
881
|
+
const includeIds = selectBy.include.map((i) => i.id);
|
|
882
|
+
const maxPrev = Math.max(...selectBy.include.map((i) => i.withPreviousMessages || 0));
|
|
883
|
+
const maxNext = Math.max(...selectBy.include.map((i) => i.withNextMessages || 0));
|
|
884
|
+
const includeResult = await this.client.execute({
|
|
885
|
+
sql: `
|
|
886
|
+
WITH numbered_messages AS (
|
|
887
|
+
SELECT
|
|
888
|
+
id,
|
|
889
|
+
content,
|
|
890
|
+
role,
|
|
891
|
+
type,
|
|
892
|
+
"createdAt",
|
|
893
|
+
thread_id,
|
|
894
|
+
ROW_NUMBER() OVER (ORDER BY "createdAt" ASC) as row_num
|
|
895
|
+
FROM "${TABLE_MESSAGES}"
|
|
896
|
+
WHERE thread_id = ?
|
|
897
|
+
),
|
|
898
|
+
target_positions AS (
|
|
899
|
+
SELECT row_num as target_pos
|
|
900
|
+
FROM numbered_messages
|
|
901
|
+
WHERE id IN (${includeIds.map(() => "?").join(", ")})
|
|
902
|
+
)
|
|
903
|
+
SELECT DISTINCT m.*
|
|
904
|
+
FROM numbered_messages m
|
|
905
|
+
CROSS JOIN target_positions t
|
|
906
|
+
WHERE m.row_num BETWEEN (t.target_pos - ?) AND (t.target_pos + ?)
|
|
907
|
+
ORDER BY m."createdAt" ASC
|
|
908
|
+
`,
|
|
909
|
+
args: [threadId, ...includeIds, maxPrev, maxNext]
|
|
910
|
+
});
|
|
911
|
+
if (includeResult.rows) {
|
|
912
|
+
messages.push(...includeResult.rows.map((row) => this.parseRow(row)));
|
|
913
|
+
}
|
|
914
|
+
}
|
|
915
|
+
const excludeIds = messages.map((m) => m.id);
|
|
916
|
+
const remainingSql = `
|
|
917
|
+
SELECT
|
|
918
|
+
id,
|
|
919
|
+
content,
|
|
920
|
+
role,
|
|
921
|
+
type,
|
|
922
|
+
"createdAt",
|
|
923
|
+
thread_id
|
|
924
|
+
FROM "${TABLE_MESSAGES}"
|
|
925
|
+
WHERE thread_id = ?
|
|
926
|
+
${excludeIds.length ? `AND id NOT IN (${excludeIds.map(() => "?").join(", ")})` : ""}
|
|
927
|
+
ORDER BY "createdAt" DESC
|
|
928
|
+
LIMIT ?
|
|
929
|
+
`;
|
|
930
|
+
const remainingArgs = [threadId, ...excludeIds.length ? excludeIds : [], limit];
|
|
931
|
+
const remainingResult = await this.client.execute({
|
|
932
|
+
sql: remainingSql,
|
|
933
|
+
args: remainingArgs
|
|
934
|
+
});
|
|
935
|
+
if (remainingResult.rows) {
|
|
936
|
+
messages.push(...remainingResult.rows.map((row) => this.parseRow(row)));
|
|
937
|
+
}
|
|
938
|
+
messages.sort((a, b) => a.createdAt.getTime() - b.createdAt.getTime());
|
|
939
|
+
return messages;
|
|
940
|
+
} catch (error) {
|
|
941
|
+
this.logger.error("Error getting messages:", error);
|
|
942
|
+
throw error;
|
|
943
|
+
}
|
|
944
|
+
}
|
|
945
|
+
async saveMessages({ messages }) {
|
|
946
|
+
if (messages.length === 0) return messages;
|
|
947
|
+
try {
|
|
948
|
+
const threadId = messages[0]?.threadId;
|
|
949
|
+
if (!threadId) {
|
|
950
|
+
throw new Error("Thread ID is required");
|
|
951
|
+
}
|
|
952
|
+
const batchStatements = messages.map((message) => {
|
|
953
|
+
const time = message.createdAt || /* @__PURE__ */ new Date();
|
|
954
|
+
return {
|
|
955
|
+
sql: `INSERT INTO ${TABLE_MESSAGES} (id, thread_id, content, role, type, createdAt)
|
|
956
|
+
VALUES (?, ?, ?, ?, ?, ?)`,
|
|
957
|
+
args: [
|
|
958
|
+
message.id,
|
|
959
|
+
threadId,
|
|
960
|
+
typeof message.content === "object" ? JSON.stringify(message.content) : message.content,
|
|
961
|
+
message.role,
|
|
962
|
+
message.type,
|
|
963
|
+
time instanceof Date ? time.toISOString() : time
|
|
964
|
+
]
|
|
965
|
+
};
|
|
966
|
+
});
|
|
967
|
+
await this.client.batch(batchStatements, "write");
|
|
968
|
+
return messages;
|
|
969
|
+
} catch (error) {
|
|
970
|
+
this.logger.error("Failed to save messages in database: " + error?.message);
|
|
971
|
+
throw error;
|
|
972
|
+
}
|
|
973
|
+
}
|
|
974
|
+
transformEvalRow(row) {
|
|
975
|
+
const resultValue = JSON.parse(row.result);
|
|
976
|
+
const testInfoValue = row.test_info ? JSON.parse(row.test_info) : void 0;
|
|
977
|
+
if (!resultValue || typeof resultValue !== "object" || !("score" in resultValue)) {
|
|
978
|
+
throw new Error(`Invalid MetricResult format: ${JSON.stringify(resultValue)}`);
|
|
979
|
+
}
|
|
980
|
+
return {
|
|
981
|
+
input: row.input,
|
|
982
|
+
output: row.output,
|
|
983
|
+
result: resultValue,
|
|
984
|
+
agentName: row.agent_name,
|
|
985
|
+
metricName: row.metric_name,
|
|
986
|
+
instructions: row.instructions,
|
|
987
|
+
testInfo: testInfoValue,
|
|
988
|
+
globalRunId: row.global_run_id,
|
|
989
|
+
runId: row.run_id,
|
|
990
|
+
createdAt: row.created_at
|
|
991
|
+
};
|
|
992
|
+
}
|
|
993
|
+
async getEvalsByAgentName(agentName, type) {
|
|
994
|
+
try {
|
|
995
|
+
const baseQuery = `SELECT * FROM ${TABLE_EVALS} WHERE agent_name = ?`;
|
|
996
|
+
const typeCondition = type === "test" ? " AND test_info IS NOT NULL AND test_info->>'testPath' IS NOT NULL" : type === "live" ? " AND (test_info IS NULL OR test_info->>'testPath' IS NULL)" : "";
|
|
997
|
+
const result = await this.client.execute({
|
|
998
|
+
sql: `${baseQuery}${typeCondition} ORDER BY created_at DESC`,
|
|
999
|
+
args: [agentName]
|
|
1000
|
+
});
|
|
1001
|
+
return result.rows?.map((row) => this.transformEvalRow(row)) ?? [];
|
|
1002
|
+
} catch (error) {
|
|
1003
|
+
if (error instanceof Error && error.message.includes("no such table")) {
|
|
1004
|
+
return [];
|
|
1005
|
+
}
|
|
1006
|
+
this.logger.error("Failed to get evals for the specified agent: " + error?.message);
|
|
1007
|
+
throw error;
|
|
1008
|
+
}
|
|
1009
|
+
}
|
|
1010
|
+
// TODO: add types
|
|
1011
|
+
async getTraces({
|
|
1012
|
+
name,
|
|
1013
|
+
scope,
|
|
1014
|
+
page,
|
|
1015
|
+
perPage,
|
|
1016
|
+
attributes,
|
|
1017
|
+
filters
|
|
1018
|
+
} = {
|
|
1019
|
+
page: 0,
|
|
1020
|
+
perPage: 100
|
|
1021
|
+
}) {
|
|
1022
|
+
const limit = perPage;
|
|
1023
|
+
const offset = page * perPage;
|
|
1024
|
+
const args = [];
|
|
1025
|
+
const conditions = [];
|
|
1026
|
+
if (name) {
|
|
1027
|
+
conditions.push("name LIKE CONCAT(?, '%')");
|
|
1028
|
+
}
|
|
1029
|
+
if (scope) {
|
|
1030
|
+
conditions.push("scope = ?");
|
|
1031
|
+
}
|
|
1032
|
+
if (attributes) {
|
|
1033
|
+
Object.keys(attributes).forEach((key) => {
|
|
1034
|
+
conditions.push(`attributes->>'$.${key}' = ?`);
|
|
1035
|
+
});
|
|
1036
|
+
}
|
|
1037
|
+
if (filters) {
|
|
1038
|
+
Object.entries(filters).forEach(([key, _value]) => {
|
|
1039
|
+
conditions.push(`${key} = ?`);
|
|
1040
|
+
});
|
|
1041
|
+
}
|
|
1042
|
+
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
1043
|
+
if (name) {
|
|
1044
|
+
args.push(name);
|
|
1045
|
+
}
|
|
1046
|
+
if (scope) {
|
|
1047
|
+
args.push(scope);
|
|
1048
|
+
}
|
|
1049
|
+
if (attributes) {
|
|
1050
|
+
for (const [, value] of Object.entries(attributes)) {
|
|
1051
|
+
args.push(value);
|
|
1052
|
+
}
|
|
1053
|
+
}
|
|
1054
|
+
if (filters) {
|
|
1055
|
+
for (const [, value] of Object.entries(filters)) {
|
|
1056
|
+
args.push(value);
|
|
1057
|
+
}
|
|
1058
|
+
}
|
|
1059
|
+
args.push(limit, offset);
|
|
1060
|
+
const result = await this.client.execute({
|
|
1061
|
+
sql: `SELECT * FROM ${TABLE_TRACES} ${whereClause} ORDER BY "startTime" DESC LIMIT ? OFFSET ?`,
|
|
1062
|
+
args
|
|
1063
|
+
});
|
|
1064
|
+
if (!result.rows) {
|
|
1065
|
+
return [];
|
|
1066
|
+
}
|
|
1067
|
+
return result.rows.map((row) => ({
|
|
1068
|
+
id: row.id,
|
|
1069
|
+
parentSpanId: row.parentSpanId,
|
|
1070
|
+
traceId: row.traceId,
|
|
1071
|
+
name: row.name,
|
|
1072
|
+
scope: row.scope,
|
|
1073
|
+
kind: row.kind,
|
|
1074
|
+
status: safelyParseJSON(row.status),
|
|
1075
|
+
events: safelyParseJSON(row.events),
|
|
1076
|
+
links: safelyParseJSON(row.links),
|
|
1077
|
+
attributes: safelyParseJSON(row.attributes),
|
|
1078
|
+
startTime: row.startTime,
|
|
1079
|
+
endTime: row.endTime,
|
|
1080
|
+
other: safelyParseJSON(row.other),
|
|
1081
|
+
createdAt: row.createdAt
|
|
1082
|
+
}));
|
|
1083
|
+
}
|
|
1084
|
+
async getWorkflowRuns({
|
|
1085
|
+
workflowName,
|
|
1086
|
+
fromDate,
|
|
1087
|
+
toDate,
|
|
1088
|
+
limit,
|
|
1089
|
+
offset
|
|
1090
|
+
} = {}) {
|
|
1091
|
+
const conditions = [];
|
|
1092
|
+
const args = [];
|
|
1093
|
+
if (workflowName) {
|
|
1094
|
+
conditions.push("workflow_name = ?");
|
|
1095
|
+
args.push(workflowName);
|
|
1096
|
+
}
|
|
1097
|
+
if (fromDate) {
|
|
1098
|
+
conditions.push("createdAt >= ?");
|
|
1099
|
+
args.push(fromDate.toISOString());
|
|
1100
|
+
}
|
|
1101
|
+
if (toDate) {
|
|
1102
|
+
conditions.push("createdAt <= ?");
|
|
1103
|
+
args.push(toDate.toISOString());
|
|
1104
|
+
}
|
|
1105
|
+
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
1106
|
+
let total = 0;
|
|
1107
|
+
if (limit !== void 0 && offset !== void 0) {
|
|
1108
|
+
const countResult = await this.client.execute({
|
|
1109
|
+
sql: `SELECT COUNT(*) as count FROM ${TABLE_WORKFLOW_SNAPSHOT} ${whereClause}`,
|
|
1110
|
+
args
|
|
1111
|
+
});
|
|
1112
|
+
total = Number(countResult.rows?.[0]?.count ?? 0);
|
|
1113
|
+
}
|
|
1114
|
+
const result = await this.client.execute({
|
|
1115
|
+
sql: `SELECT * FROM ${TABLE_WORKFLOW_SNAPSHOT} ${whereClause} ORDER BY createdAt DESC${limit !== void 0 && offset !== void 0 ? ` LIMIT ? OFFSET ?` : ""}`,
|
|
1116
|
+
args: limit !== void 0 && offset !== void 0 ? [...args, limit, offset] : args
|
|
1117
|
+
});
|
|
1118
|
+
const runs = (result.rows || []).map((row) => {
|
|
1119
|
+
let parsedSnapshot = row.snapshot;
|
|
1120
|
+
if (typeof parsedSnapshot === "string") {
|
|
1121
|
+
try {
|
|
1122
|
+
parsedSnapshot = JSON.parse(row.snapshot);
|
|
1123
|
+
} catch (e) {
|
|
1124
|
+
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
1125
|
+
}
|
|
1126
|
+
}
|
|
1127
|
+
return {
|
|
1128
|
+
workflowName: row.workflow_name,
|
|
1129
|
+
runId: row.run_id,
|
|
1130
|
+
snapshot: parsedSnapshot,
|
|
1131
|
+
createdAt: new Date(row.createdAt),
|
|
1132
|
+
updatedAt: new Date(row.updatedAt)
|
|
1133
|
+
};
|
|
1134
|
+
});
|
|
1135
|
+
return { runs, total: total || runs.length };
|
|
1136
|
+
}
|
|
1137
|
+
};
|
|
1138
|
+
|
|
1139
|
+
export { LibSQLStore as DefaultStorage, LibSQLStore, LibSQLVector };
|