@mastra/libsql 0.0.0-1.x-tester-20251106055847
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +1565 -0
- package/LICENSE.md +15 -0
- package/README.md +151 -0
- package/dist/index.cjs +3394 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.ts +4 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +3389 -0
- package/dist/index.js.map +1 -0
- package/dist/storage/domains/memory/index.d.ts +64 -0
- package/dist/storage/domains/memory/index.d.ts.map +1 -0
- package/dist/storage/domains/observability/index.d.ts +34 -0
- package/dist/storage/domains/observability/index.d.ts.map +1 -0
- package/dist/storage/domains/operations/index.d.ts +110 -0
- package/dist/storage/domains/operations/index.d.ts.map +1 -0
- package/dist/storage/domains/scores/index.d.ts +54 -0
- package/dist/storage/domains/scores/index.d.ts.map +1 -0
- package/dist/storage/domains/utils.d.ts +60 -0
- package/dist/storage/domains/utils.d.ts.map +1 -0
- package/dist/storage/domains/workflows/index.d.ts +53 -0
- package/dist/storage/domains/workflows/index.d.ts.map +1 -0
- package/dist/storage/index.d.ts +220 -0
- package/dist/storage/index.d.ts.map +1 -0
- package/dist/vector/filter.d.ts +29 -0
- package/dist/vector/filter.d.ts.map +1 -0
- package/dist/vector/index.d.ts +74 -0
- package/dist/vector/index.d.ts.map +1 -0
- package/dist/vector/prompt.d.ts +6 -0
- package/dist/vector/prompt.d.ts.map +1 -0
- package/dist/vector/sql-builder.d.ts +9 -0
- package/dist/vector/sql-builder.d.ts.map +1 -0
- package/package.json +62 -0
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,3394 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var client = require('@libsql/client');
|
|
4
|
+
var error = require('@mastra/core/error');
|
|
5
|
+
var utils = require('@mastra/core/utils');
|
|
6
|
+
var vector = require('@mastra/core/vector');
|
|
7
|
+
var filter = require('@mastra/core/vector/filter');
|
|
8
|
+
var storage = require('@mastra/core/storage');
|
|
9
|
+
var agent = require('@mastra/core/agent');
|
|
10
|
+
var evals = require('@mastra/core/evals');
|
|
11
|
+
|
|
12
|
+
// src/vector/index.ts
|
|
13
|
+
var LibSQLFilterTranslator = class extends filter.BaseFilterTranslator {
|
|
14
|
+
getSupportedOperators() {
|
|
15
|
+
return {
|
|
16
|
+
...filter.BaseFilterTranslator.DEFAULT_OPERATORS,
|
|
17
|
+
regex: [],
|
|
18
|
+
custom: ["$contains", "$size"]
|
|
19
|
+
};
|
|
20
|
+
}
|
|
21
|
+
translate(filter) {
|
|
22
|
+
if (this.isEmpty(filter)) {
|
|
23
|
+
return filter;
|
|
24
|
+
}
|
|
25
|
+
this.validateFilter(filter);
|
|
26
|
+
return this.translateNode(filter);
|
|
27
|
+
}
|
|
28
|
+
translateNode(node, currentPath = "") {
|
|
29
|
+
if (this.isRegex(node)) {
|
|
30
|
+
throw new Error("Direct regex pattern format is not supported in LibSQL");
|
|
31
|
+
}
|
|
32
|
+
const withPath = (result2) => currentPath ? { [currentPath]: result2 } : result2;
|
|
33
|
+
if (this.isPrimitive(node)) {
|
|
34
|
+
return withPath({ $eq: this.normalizeComparisonValue(node) });
|
|
35
|
+
}
|
|
36
|
+
if (Array.isArray(node)) {
|
|
37
|
+
return withPath({ $in: this.normalizeArrayValues(node) });
|
|
38
|
+
}
|
|
39
|
+
const entries = Object.entries(node);
|
|
40
|
+
const result = {};
|
|
41
|
+
for (const [key, value] of entries) {
|
|
42
|
+
const newPath = currentPath ? `${currentPath}.${key}` : key;
|
|
43
|
+
if (this.isLogicalOperator(key)) {
|
|
44
|
+
result[key] = Array.isArray(value) ? value.map((filter) => this.translateNode(filter)) : this.translateNode(value);
|
|
45
|
+
} else if (this.isOperator(key)) {
|
|
46
|
+
if (this.isArrayOperator(key) && !Array.isArray(value) && key !== "$elemMatch") {
|
|
47
|
+
result[key] = [value];
|
|
48
|
+
} else if (this.isBasicOperator(key) && Array.isArray(value)) {
|
|
49
|
+
result[key] = JSON.stringify(value);
|
|
50
|
+
} else {
|
|
51
|
+
result[key] = value;
|
|
52
|
+
}
|
|
53
|
+
} else if (typeof value === "object" && value !== null) {
|
|
54
|
+
const hasOperators = Object.keys(value).some((k) => this.isOperator(k));
|
|
55
|
+
if (hasOperators) {
|
|
56
|
+
result[newPath] = this.translateNode(value);
|
|
57
|
+
} else {
|
|
58
|
+
Object.assign(result, this.translateNode(value, newPath));
|
|
59
|
+
}
|
|
60
|
+
} else {
|
|
61
|
+
result[newPath] = this.translateNode(value);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
return result;
|
|
65
|
+
}
|
|
66
|
+
// TODO: Look more into regex support for LibSQL
|
|
67
|
+
// private translateRegexPattern(pattern: string, options: string = ''): any {
|
|
68
|
+
// if (!options) return { $regex: pattern };
|
|
69
|
+
// const flags = options
|
|
70
|
+
// .split('')
|
|
71
|
+
// .filter(f => 'imsux'.includes(f))
|
|
72
|
+
// .join('');
|
|
73
|
+
// return {
|
|
74
|
+
// $regex: pattern,
|
|
75
|
+
// $options: flags,
|
|
76
|
+
// };
|
|
77
|
+
// }
|
|
78
|
+
};
|
|
79
|
+
var createBasicOperator = (symbol) => {
|
|
80
|
+
return (key, value) => {
|
|
81
|
+
const jsonPath = getJsonPath(key);
|
|
82
|
+
return {
|
|
83
|
+
sql: `CASE
|
|
84
|
+
WHEN ? IS NULL THEN json_extract(metadata, ${jsonPath}) IS ${symbol === "=" ? "" : "NOT"} NULL
|
|
85
|
+
ELSE json_extract(metadata, ${jsonPath}) ${symbol} ?
|
|
86
|
+
END`,
|
|
87
|
+
needsValue: true,
|
|
88
|
+
transformValue: () => {
|
|
89
|
+
return [value, value];
|
|
90
|
+
}
|
|
91
|
+
};
|
|
92
|
+
};
|
|
93
|
+
};
|
|
94
|
+
var createNumericOperator = (symbol) => {
|
|
95
|
+
return (key) => {
|
|
96
|
+
const jsonPath = getJsonPath(key);
|
|
97
|
+
return {
|
|
98
|
+
sql: `CAST(json_extract(metadata, ${jsonPath}) AS NUMERIC) ${symbol} ?`,
|
|
99
|
+
needsValue: true
|
|
100
|
+
};
|
|
101
|
+
};
|
|
102
|
+
};
|
|
103
|
+
var validateJsonArray = (key) => {
|
|
104
|
+
const jsonPath = getJsonPath(key);
|
|
105
|
+
return `json_valid(json_extract(metadata, ${jsonPath}))
|
|
106
|
+
AND json_type(json_extract(metadata, ${jsonPath})) = 'array'`;
|
|
107
|
+
};
|
|
108
|
+
var pattern = /json_extract\(metadata, '\$\.(?:"[^"]*"(?:\."[^"]*")*|[^']+)'\)/g;
|
|
109
|
+
function buildElemMatchConditions(value) {
|
|
110
|
+
const conditions = Object.entries(value).map(([field, fieldValue]) => {
|
|
111
|
+
if (field.startsWith("$")) {
|
|
112
|
+
const { sql, values } = buildCondition("elem.value", { [field]: fieldValue });
|
|
113
|
+
const elemSql = sql.replace(pattern, "elem.value");
|
|
114
|
+
return { sql: elemSql, values };
|
|
115
|
+
} else if (typeof fieldValue === "object" && !Array.isArray(fieldValue)) {
|
|
116
|
+
const { sql, values } = buildCondition(field, fieldValue);
|
|
117
|
+
const jsonPath = parseJsonPathKey(field);
|
|
118
|
+
const elemSql = sql.replace(pattern, `json_extract(elem.value, '$.${jsonPath}')`);
|
|
119
|
+
return { sql: elemSql, values };
|
|
120
|
+
} else {
|
|
121
|
+
const jsonPath = parseJsonPathKey(field);
|
|
122
|
+
return {
|
|
123
|
+
sql: `json_extract(elem.value, '$.${jsonPath}') = ?`,
|
|
124
|
+
values: [fieldValue]
|
|
125
|
+
};
|
|
126
|
+
}
|
|
127
|
+
});
|
|
128
|
+
return conditions;
|
|
129
|
+
}
|
|
130
|
+
var FILTER_OPERATORS = {
|
|
131
|
+
$eq: createBasicOperator("="),
|
|
132
|
+
$ne: createBasicOperator("!="),
|
|
133
|
+
$gt: createNumericOperator(">"),
|
|
134
|
+
$gte: createNumericOperator(">="),
|
|
135
|
+
$lt: createNumericOperator("<"),
|
|
136
|
+
$lte: createNumericOperator("<="),
|
|
137
|
+
// Array Operators
|
|
138
|
+
$in: (key, value) => {
|
|
139
|
+
const jsonPath = getJsonPath(key);
|
|
140
|
+
const arr = Array.isArray(value) ? value : [value];
|
|
141
|
+
if (arr.length === 0) {
|
|
142
|
+
return { sql: "1 = 0", needsValue: true, transformValue: () => [] };
|
|
143
|
+
}
|
|
144
|
+
const paramPlaceholders = arr.map(() => "?").join(",");
|
|
145
|
+
return {
|
|
146
|
+
sql: `(
|
|
147
|
+
CASE
|
|
148
|
+
WHEN ${validateJsonArray(key)} THEN
|
|
149
|
+
EXISTS (
|
|
150
|
+
SELECT 1 FROM json_each(json_extract(metadata, ${jsonPath})) as elem
|
|
151
|
+
WHERE elem.value IN (SELECT value FROM json_each(?))
|
|
152
|
+
)
|
|
153
|
+
ELSE json_extract(metadata, ${jsonPath}) IN (${paramPlaceholders})
|
|
154
|
+
END
|
|
155
|
+
)`,
|
|
156
|
+
needsValue: true,
|
|
157
|
+
transformValue: () => [JSON.stringify(arr), ...arr]
|
|
158
|
+
};
|
|
159
|
+
},
|
|
160
|
+
$nin: (key, value) => {
|
|
161
|
+
const jsonPath = getJsonPath(key);
|
|
162
|
+
const arr = Array.isArray(value) ? value : [value];
|
|
163
|
+
if (arr.length === 0) {
|
|
164
|
+
return { sql: "1 = 1", needsValue: true, transformValue: () => [] };
|
|
165
|
+
}
|
|
166
|
+
const paramPlaceholders = arr.map(() => "?").join(",");
|
|
167
|
+
return {
|
|
168
|
+
sql: `(
|
|
169
|
+
CASE
|
|
170
|
+
WHEN ${validateJsonArray(key)} THEN
|
|
171
|
+
NOT EXISTS (
|
|
172
|
+
SELECT 1 FROM json_each(json_extract(metadata, ${jsonPath})) as elem
|
|
173
|
+
WHERE elem.value IN (SELECT value FROM json_each(?))
|
|
174
|
+
)
|
|
175
|
+
ELSE json_extract(metadata, ${jsonPath}) NOT IN (${paramPlaceholders})
|
|
176
|
+
END
|
|
177
|
+
)`,
|
|
178
|
+
needsValue: true,
|
|
179
|
+
transformValue: () => [JSON.stringify(arr), ...arr]
|
|
180
|
+
};
|
|
181
|
+
},
|
|
182
|
+
$all: (key, value) => {
|
|
183
|
+
const jsonPath = getJsonPath(key);
|
|
184
|
+
let sql;
|
|
185
|
+
const arrayValue = Array.isArray(value) ? value : [value];
|
|
186
|
+
if (arrayValue.length === 0) {
|
|
187
|
+
sql = "1 = 0";
|
|
188
|
+
} else {
|
|
189
|
+
sql = `(
|
|
190
|
+
CASE
|
|
191
|
+
WHEN ${validateJsonArray(key)} THEN
|
|
192
|
+
NOT EXISTS (
|
|
193
|
+
SELECT value
|
|
194
|
+
FROM json_each(?)
|
|
195
|
+
WHERE value NOT IN (
|
|
196
|
+
SELECT value
|
|
197
|
+
FROM json_each(json_extract(metadata, ${jsonPath}))
|
|
198
|
+
)
|
|
199
|
+
)
|
|
200
|
+
ELSE FALSE
|
|
201
|
+
END
|
|
202
|
+
)`;
|
|
203
|
+
}
|
|
204
|
+
return {
|
|
205
|
+
sql,
|
|
206
|
+
needsValue: true,
|
|
207
|
+
transformValue: () => {
|
|
208
|
+
if (arrayValue.length === 0) {
|
|
209
|
+
return [];
|
|
210
|
+
}
|
|
211
|
+
return [JSON.stringify(arrayValue)];
|
|
212
|
+
}
|
|
213
|
+
};
|
|
214
|
+
},
|
|
215
|
+
$elemMatch: (key, value) => {
|
|
216
|
+
const jsonPath = getJsonPath(key);
|
|
217
|
+
if (typeof value !== "object" || Array.isArray(value)) {
|
|
218
|
+
throw new Error("$elemMatch requires an object with conditions");
|
|
219
|
+
}
|
|
220
|
+
const conditions = buildElemMatchConditions(value);
|
|
221
|
+
return {
|
|
222
|
+
sql: `(
|
|
223
|
+
CASE
|
|
224
|
+
WHEN ${validateJsonArray(key)} THEN
|
|
225
|
+
EXISTS (
|
|
226
|
+
SELECT 1
|
|
227
|
+
FROM json_each(json_extract(metadata, ${jsonPath})) as elem
|
|
228
|
+
WHERE ${conditions.map((c) => c.sql).join(" AND ")}
|
|
229
|
+
)
|
|
230
|
+
ELSE FALSE
|
|
231
|
+
END
|
|
232
|
+
)`,
|
|
233
|
+
needsValue: true,
|
|
234
|
+
transformValue: () => conditions.flatMap((c) => c.values)
|
|
235
|
+
};
|
|
236
|
+
},
|
|
237
|
+
// Element Operators
|
|
238
|
+
$exists: (key) => {
|
|
239
|
+
const jsonPath = getJsonPath(key);
|
|
240
|
+
return {
|
|
241
|
+
sql: `json_extract(metadata, ${jsonPath}) IS NOT NULL`,
|
|
242
|
+
needsValue: false
|
|
243
|
+
};
|
|
244
|
+
},
|
|
245
|
+
// Logical Operators
|
|
246
|
+
$and: (key) => ({
|
|
247
|
+
sql: `(${key})`,
|
|
248
|
+
needsValue: false
|
|
249
|
+
}),
|
|
250
|
+
$or: (key) => ({
|
|
251
|
+
sql: `(${key})`,
|
|
252
|
+
needsValue: false
|
|
253
|
+
}),
|
|
254
|
+
$not: (key) => ({ sql: `NOT (${key})`, needsValue: false }),
|
|
255
|
+
$nor: (key) => ({
|
|
256
|
+
sql: `NOT (${key})`,
|
|
257
|
+
needsValue: false
|
|
258
|
+
}),
|
|
259
|
+
$size: (key, paramIndex) => {
|
|
260
|
+
const jsonPath = getJsonPath(key);
|
|
261
|
+
return {
|
|
262
|
+
sql: `(
|
|
263
|
+
CASE
|
|
264
|
+
WHEN json_type(json_extract(metadata, ${jsonPath})) = 'array' THEN
|
|
265
|
+
json_array_length(json_extract(metadata, ${jsonPath})) = $${paramIndex}
|
|
266
|
+
ELSE FALSE
|
|
267
|
+
END
|
|
268
|
+
)`,
|
|
269
|
+
needsValue: true
|
|
270
|
+
};
|
|
271
|
+
},
|
|
272
|
+
// /**
|
|
273
|
+
// * Regex Operators
|
|
274
|
+
// * Supports case insensitive and multiline
|
|
275
|
+
// */
|
|
276
|
+
// $regex: (key: string): FilterOperator => ({
|
|
277
|
+
// sql: `json_extract(metadata, '$."${toJsonPathKey(key)}"') = ?`,
|
|
278
|
+
// needsValue: true,
|
|
279
|
+
// transformValue: (value: any) => {
|
|
280
|
+
// const pattern = typeof value === 'object' ? value.$regex : value;
|
|
281
|
+
// const options = typeof value === 'object' ? value.$options || '' : '';
|
|
282
|
+
// let sql = `json_extract(metadata, '$."${toJsonPathKey(key)}"')`;
|
|
283
|
+
// // Handle multiline
|
|
284
|
+
// // if (options.includes('m')) {
|
|
285
|
+
// // sql = `REPLACE(${sql}, CHAR(10), '\n')`;
|
|
286
|
+
// // }
|
|
287
|
+
// // let finalPattern = pattern;
|
|
288
|
+
// // if (options) {
|
|
289
|
+
// // finalPattern = `(\\?${options})${pattern}`;
|
|
290
|
+
// // }
|
|
291
|
+
// // // Handle case insensitivity
|
|
292
|
+
// // if (options.includes('i')) {
|
|
293
|
+
// // sql = `LOWER(${sql}) REGEXP LOWER(?)`;
|
|
294
|
+
// // } else {
|
|
295
|
+
// // sql = `${sql} REGEXP ?`;
|
|
296
|
+
// // }
|
|
297
|
+
// if (options.includes('m')) {
|
|
298
|
+
// sql = `EXISTS (
|
|
299
|
+
// SELECT 1
|
|
300
|
+
// FROM json_each(
|
|
301
|
+
// json_array(
|
|
302
|
+
// ${sql},
|
|
303
|
+
// REPLACE(${sql}, CHAR(10), CHAR(13))
|
|
304
|
+
// )
|
|
305
|
+
// ) as lines
|
|
306
|
+
// WHERE lines.value REGEXP ?
|
|
307
|
+
// )`;
|
|
308
|
+
// } else {
|
|
309
|
+
// sql = `${sql} REGEXP ?`;
|
|
310
|
+
// }
|
|
311
|
+
// // Handle case insensitivity
|
|
312
|
+
// if (options.includes('i')) {
|
|
313
|
+
// sql = sql.replace('REGEXP ?', 'REGEXP LOWER(?)');
|
|
314
|
+
// sql = sql.replace('value REGEXP', 'LOWER(value) REGEXP');
|
|
315
|
+
// }
|
|
316
|
+
// // Handle extended - allows whitespace and comments in pattern
|
|
317
|
+
// if (options.includes('x')) {
|
|
318
|
+
// // Remove whitespace and comments from pattern
|
|
319
|
+
// const cleanPattern = pattern.replace(/\s+|#.*$/gm, '');
|
|
320
|
+
// return {
|
|
321
|
+
// sql,
|
|
322
|
+
// values: [cleanPattern],
|
|
323
|
+
// };
|
|
324
|
+
// }
|
|
325
|
+
// return {
|
|
326
|
+
// sql,
|
|
327
|
+
// values: [pattern],
|
|
328
|
+
// };
|
|
329
|
+
// },
|
|
330
|
+
// }),
|
|
331
|
+
$contains: (key, value) => {
|
|
332
|
+
const jsonPathKey = parseJsonPathKey(key);
|
|
333
|
+
let sql;
|
|
334
|
+
if (Array.isArray(value)) {
|
|
335
|
+
sql = `(
|
|
336
|
+
SELECT ${validateJsonArray(jsonPathKey)}
|
|
337
|
+
AND EXISTS (
|
|
338
|
+
SELECT 1
|
|
339
|
+
FROM json_each(json_extract(metadata, '$."${jsonPathKey}"')) as m
|
|
340
|
+
WHERE m.value IN (SELECT value FROM json_each(?))
|
|
341
|
+
)
|
|
342
|
+
)`;
|
|
343
|
+
} else if (typeof value === "string") {
|
|
344
|
+
sql = `lower(json_extract(metadata, '$."${jsonPathKey}"')) LIKE '%' || lower(?) || '%' ESCAPE '\\'`;
|
|
345
|
+
} else {
|
|
346
|
+
sql = `json_extract(metadata, '$."${jsonPathKey}"') = ?`;
|
|
347
|
+
}
|
|
348
|
+
return {
|
|
349
|
+
sql,
|
|
350
|
+
needsValue: true,
|
|
351
|
+
transformValue: () => {
|
|
352
|
+
if (Array.isArray(value)) {
|
|
353
|
+
return [JSON.stringify(value)];
|
|
354
|
+
}
|
|
355
|
+
if (typeof value === "object" && value !== null) {
|
|
356
|
+
return [JSON.stringify(value)];
|
|
357
|
+
}
|
|
358
|
+
if (typeof value === "string") {
|
|
359
|
+
return [escapeLikePattern(value)];
|
|
360
|
+
}
|
|
361
|
+
return [value];
|
|
362
|
+
}
|
|
363
|
+
};
|
|
364
|
+
}
|
|
365
|
+
/**
|
|
366
|
+
* $objectContains: True JSON containment for advanced use (deep sub-object match).
|
|
367
|
+
* Usage: { field: { $objectContains: { ...subobject } } }
|
|
368
|
+
*/
|
|
369
|
+
// $objectContains: (key: string) => ({
|
|
370
|
+
// sql: '', // Will be overridden by transformValue
|
|
371
|
+
// needsValue: true,
|
|
372
|
+
// transformValue: (value: any) => ({
|
|
373
|
+
// sql: `json_type(json_extract(metadata, '$."${toJsonPathKey(key)}"')) = 'object'
|
|
374
|
+
// AND json_patch(json_extract(metadata, '$."${toJsonPathKey(key)}"'), ?) = json_extract(metadata, '$."${toJsonPathKey(key)}"')`,
|
|
375
|
+
// values: [JSON.stringify(value)],
|
|
376
|
+
// }),
|
|
377
|
+
// }),
|
|
378
|
+
};
|
|
379
|
+
function isFilterResult(obj) {
|
|
380
|
+
return obj && typeof obj === "object" && typeof obj.sql === "string" && Array.isArray(obj.values);
|
|
381
|
+
}
|
|
382
|
+
var parseJsonPathKey = (key) => {
|
|
383
|
+
const parsedKey = utils.parseFieldKey(key);
|
|
384
|
+
if (parsedKey.includes(".")) {
|
|
385
|
+
return parsedKey.split(".").map((segment) => `"${segment}"`).join(".");
|
|
386
|
+
}
|
|
387
|
+
return parsedKey;
|
|
388
|
+
};
|
|
389
|
+
var getJsonPath = (key) => {
|
|
390
|
+
const jsonPathKey = parseJsonPathKey(key);
|
|
391
|
+
return `'$.${jsonPathKey}'`;
|
|
392
|
+
};
|
|
393
|
+
function escapeLikePattern(str) {
|
|
394
|
+
return str.replace(/([%_\\])/g, "\\$1");
|
|
395
|
+
}
|
|
396
|
+
function buildFilterQuery(filter) {
|
|
397
|
+
if (!filter) {
|
|
398
|
+
return { sql: "", values: [] };
|
|
399
|
+
}
|
|
400
|
+
const values = [];
|
|
401
|
+
const conditions = Object.entries(filter).map(([key, value]) => {
|
|
402
|
+
const condition = buildCondition(key, value);
|
|
403
|
+
values.push(...condition.values);
|
|
404
|
+
return condition.sql;
|
|
405
|
+
}).join(" AND ");
|
|
406
|
+
return {
|
|
407
|
+
sql: conditions ? `WHERE ${conditions}` : "",
|
|
408
|
+
values
|
|
409
|
+
};
|
|
410
|
+
}
|
|
411
|
+
function buildCondition(key, value, parentPath) {
|
|
412
|
+
if (["$and", "$or", "$not", "$nor"].includes(key)) {
|
|
413
|
+
return handleLogicalOperator(key, value);
|
|
414
|
+
}
|
|
415
|
+
if (!value || typeof value !== "object") {
|
|
416
|
+
const jsonPath = getJsonPath(key);
|
|
417
|
+
return {
|
|
418
|
+
sql: `json_extract(metadata, ${jsonPath}) = ?`,
|
|
419
|
+
values: [value]
|
|
420
|
+
};
|
|
421
|
+
}
|
|
422
|
+
return handleOperator(key, value);
|
|
423
|
+
}
|
|
424
|
+
function handleLogicalOperator(key, value, parentPath) {
|
|
425
|
+
if (!value || Array.isArray(value) && value.length === 0) {
|
|
426
|
+
switch (key) {
|
|
427
|
+
case "$and":
|
|
428
|
+
case "$nor":
|
|
429
|
+
return { sql: "true", values: [] };
|
|
430
|
+
case "$or":
|
|
431
|
+
return { sql: "false", values: [] };
|
|
432
|
+
case "$not":
|
|
433
|
+
throw new Error("$not operator cannot be empty");
|
|
434
|
+
default:
|
|
435
|
+
return { sql: "true", values: [] };
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
if (key === "$not") {
|
|
439
|
+
const entries = Object.entries(value);
|
|
440
|
+
const conditions2 = entries.map(([fieldKey, fieldValue]) => buildCondition(fieldKey, fieldValue));
|
|
441
|
+
return {
|
|
442
|
+
sql: `NOT (${conditions2.map((c) => c.sql).join(" AND ")})`,
|
|
443
|
+
values: conditions2.flatMap((c) => c.values)
|
|
444
|
+
};
|
|
445
|
+
}
|
|
446
|
+
const values = [];
|
|
447
|
+
const joinOperator = key === "$or" || key === "$nor" ? "OR" : "AND";
|
|
448
|
+
const conditions = Array.isArray(value) ? value.map((f) => {
|
|
449
|
+
const entries = !!f ? Object.entries(f) : [];
|
|
450
|
+
return entries.map(([k, v]) => buildCondition(k, v));
|
|
451
|
+
}) : [buildCondition(key, value)];
|
|
452
|
+
const joined = conditions.flat().map((c) => {
|
|
453
|
+
values.push(...c.values);
|
|
454
|
+
return c.sql;
|
|
455
|
+
}).join(` ${joinOperator} `);
|
|
456
|
+
return {
|
|
457
|
+
sql: key === "$nor" ? `NOT (${joined})` : `(${joined})`,
|
|
458
|
+
values
|
|
459
|
+
};
|
|
460
|
+
}
|
|
461
|
+
function handleOperator(key, value) {
|
|
462
|
+
if (typeof value === "object" && !Array.isArray(value)) {
|
|
463
|
+
const entries = Object.entries(value);
|
|
464
|
+
const results = entries.map(
|
|
465
|
+
([operator2, operatorValue2]) => operator2 === "$not" ? {
|
|
466
|
+
sql: `NOT (${Object.entries(operatorValue2).map(([op, val]) => processOperator(key, op, val).sql).join(" AND ")})`,
|
|
467
|
+
values: Object.entries(operatorValue2).flatMap(
|
|
468
|
+
([op, val]) => processOperator(key, op, val).values
|
|
469
|
+
)
|
|
470
|
+
} : processOperator(key, operator2, operatorValue2)
|
|
471
|
+
);
|
|
472
|
+
return {
|
|
473
|
+
sql: `(${results.map((r) => r.sql).join(" AND ")})`,
|
|
474
|
+
values: results.flatMap((r) => r.values)
|
|
475
|
+
};
|
|
476
|
+
}
|
|
477
|
+
const [[operator, operatorValue] = []] = Object.entries(value);
|
|
478
|
+
return processOperator(key, operator, operatorValue);
|
|
479
|
+
}
|
|
480
|
+
var processOperator = (key, operator, operatorValue) => {
|
|
481
|
+
if (!operator.startsWith("$") || !FILTER_OPERATORS[operator]) {
|
|
482
|
+
throw new Error(`Invalid operator: ${operator}`);
|
|
483
|
+
}
|
|
484
|
+
const operatorFn = FILTER_OPERATORS[operator];
|
|
485
|
+
const operatorResult = operatorFn(key, operatorValue);
|
|
486
|
+
if (!operatorResult.needsValue) {
|
|
487
|
+
return { sql: operatorResult.sql, values: [] };
|
|
488
|
+
}
|
|
489
|
+
const transformed = operatorResult.transformValue ? operatorResult.transformValue() : operatorValue;
|
|
490
|
+
if (isFilterResult(transformed)) {
|
|
491
|
+
return transformed;
|
|
492
|
+
}
|
|
493
|
+
return {
|
|
494
|
+
sql: operatorResult.sql,
|
|
495
|
+
values: Array.isArray(transformed) ? transformed : [transformed]
|
|
496
|
+
};
|
|
497
|
+
};
|
|
498
|
+
|
|
499
|
+
// src/vector/index.ts
|
|
500
|
+
var LibSQLVector = class extends vector.MastraVector {
|
|
501
|
+
turso;
|
|
502
|
+
maxRetries;
|
|
503
|
+
initialBackoffMs;
|
|
504
|
+
constructor({
|
|
505
|
+
connectionUrl,
|
|
506
|
+
authToken,
|
|
507
|
+
syncUrl,
|
|
508
|
+
syncInterval,
|
|
509
|
+
maxRetries = 5,
|
|
510
|
+
initialBackoffMs = 100,
|
|
511
|
+
id
|
|
512
|
+
}) {
|
|
513
|
+
super({ id });
|
|
514
|
+
this.turso = client.createClient({
|
|
515
|
+
url: connectionUrl,
|
|
516
|
+
syncUrl,
|
|
517
|
+
authToken,
|
|
518
|
+
syncInterval
|
|
519
|
+
});
|
|
520
|
+
this.maxRetries = maxRetries;
|
|
521
|
+
this.initialBackoffMs = initialBackoffMs;
|
|
522
|
+
if (connectionUrl.includes(`file:`) || connectionUrl.includes(`:memory:`)) {
|
|
523
|
+
this.turso.execute("PRAGMA journal_mode=WAL;").then(() => this.logger.debug("LibSQLStore: PRAGMA journal_mode=WAL set.")).catch((err) => this.logger.warn("LibSQLStore: Failed to set PRAGMA journal_mode=WAL.", err));
|
|
524
|
+
this.turso.execute("PRAGMA busy_timeout = 5000;").then(() => this.logger.debug("LibSQLStore: PRAGMA busy_timeout=5000 set.")).catch((err) => this.logger.warn("LibSQLStore: Failed to set PRAGMA busy_timeout=5000.", err));
|
|
525
|
+
}
|
|
526
|
+
}
|
|
527
|
+
async executeWriteOperationWithRetry(operation, isTransaction = false) {
|
|
528
|
+
let attempts = 0;
|
|
529
|
+
let backoff = this.initialBackoffMs;
|
|
530
|
+
while (attempts < this.maxRetries) {
|
|
531
|
+
try {
|
|
532
|
+
return await operation();
|
|
533
|
+
} catch (error) {
|
|
534
|
+
if (error.code === "SQLITE_BUSY" || error.message && error.message.toLowerCase().includes("database is locked")) {
|
|
535
|
+
attempts++;
|
|
536
|
+
if (attempts >= this.maxRetries) {
|
|
537
|
+
this.logger.error(
|
|
538
|
+
`LibSQLVector: Operation failed after ${this.maxRetries} attempts due to: ${error.message}`,
|
|
539
|
+
error
|
|
540
|
+
);
|
|
541
|
+
throw error;
|
|
542
|
+
}
|
|
543
|
+
this.logger.warn(
|
|
544
|
+
`LibSQLVector: Attempt ${attempts} failed due to ${isTransaction ? "transaction " : ""}database lock. Retrying in ${backoff}ms...`
|
|
545
|
+
);
|
|
546
|
+
await new Promise((resolve) => setTimeout(resolve, backoff));
|
|
547
|
+
backoff *= 2;
|
|
548
|
+
} else {
|
|
549
|
+
throw error;
|
|
550
|
+
}
|
|
551
|
+
}
|
|
552
|
+
}
|
|
553
|
+
throw new Error("LibSQLVector: Max retries reached, but no error was re-thrown from the loop.");
|
|
554
|
+
}
|
|
555
|
+
transformFilter(filter) {
|
|
556
|
+
const translator = new LibSQLFilterTranslator();
|
|
557
|
+
return translator.translate(filter);
|
|
558
|
+
}
|
|
559
|
+
async query({
|
|
560
|
+
indexName,
|
|
561
|
+
queryVector,
|
|
562
|
+
topK = 10,
|
|
563
|
+
filter,
|
|
564
|
+
includeVector = false,
|
|
565
|
+
minScore = -1
|
|
566
|
+
// Default to -1 to include all results (cosine similarity ranges from -1 to 1)
|
|
567
|
+
}) {
|
|
568
|
+
try {
|
|
569
|
+
if (!Number.isInteger(topK) || topK <= 0) {
|
|
570
|
+
throw new Error("topK must be a positive integer");
|
|
571
|
+
}
|
|
572
|
+
if (!Array.isArray(queryVector) || !queryVector.every((x) => typeof x === "number" && Number.isFinite(x))) {
|
|
573
|
+
throw new Error("queryVector must be an array of finite numbers");
|
|
574
|
+
}
|
|
575
|
+
} catch (error$1) {
|
|
576
|
+
throw new error.MastraError(
|
|
577
|
+
{
|
|
578
|
+
id: "LIBSQL_VECTOR_QUERY_INVALID_ARGS",
|
|
579
|
+
domain: error.ErrorDomain.STORAGE,
|
|
580
|
+
category: error.ErrorCategory.USER
|
|
581
|
+
},
|
|
582
|
+
error$1
|
|
583
|
+
);
|
|
584
|
+
}
|
|
585
|
+
try {
|
|
586
|
+
const parsedIndexName = utils.parseSqlIdentifier(indexName, "index name");
|
|
587
|
+
const vectorStr = `[${queryVector.join(",")}]`;
|
|
588
|
+
const translatedFilter = this.transformFilter(filter);
|
|
589
|
+
const { sql: filterQuery, values: filterValues } = buildFilterQuery(translatedFilter);
|
|
590
|
+
filterValues.push(minScore);
|
|
591
|
+
filterValues.push(topK);
|
|
592
|
+
const query = `
|
|
593
|
+
WITH vector_scores AS (
|
|
594
|
+
SELECT
|
|
595
|
+
vector_id as id,
|
|
596
|
+
(1-vector_distance_cos(embedding, '${vectorStr}')) as score,
|
|
597
|
+
metadata
|
|
598
|
+
${includeVector ? ", vector_extract(embedding) as embedding" : ""}
|
|
599
|
+
FROM ${parsedIndexName}
|
|
600
|
+
${filterQuery}
|
|
601
|
+
)
|
|
602
|
+
SELECT *
|
|
603
|
+
FROM vector_scores
|
|
604
|
+
WHERE score > ?
|
|
605
|
+
ORDER BY score DESC
|
|
606
|
+
LIMIT ?`;
|
|
607
|
+
const result = await this.turso.execute({
|
|
608
|
+
sql: query,
|
|
609
|
+
args: filterValues
|
|
610
|
+
});
|
|
611
|
+
return result.rows.map(({ id, score, metadata, embedding }) => ({
|
|
612
|
+
id,
|
|
613
|
+
score,
|
|
614
|
+
metadata: JSON.parse(metadata ?? "{}"),
|
|
615
|
+
...includeVector && embedding && { vector: JSON.parse(embedding) }
|
|
616
|
+
}));
|
|
617
|
+
} catch (error$1) {
|
|
618
|
+
throw new error.MastraError(
|
|
619
|
+
{
|
|
620
|
+
id: "LIBSQL_VECTOR_QUERY_FAILED",
|
|
621
|
+
domain: error.ErrorDomain.STORAGE,
|
|
622
|
+
category: error.ErrorCategory.THIRD_PARTY
|
|
623
|
+
},
|
|
624
|
+
error$1
|
|
625
|
+
);
|
|
626
|
+
}
|
|
627
|
+
}
|
|
628
|
+
upsert(args) {
|
|
629
|
+
try {
|
|
630
|
+
return this.executeWriteOperationWithRetry(() => this.doUpsert(args), true);
|
|
631
|
+
} catch (error$1) {
|
|
632
|
+
throw new error.MastraError(
|
|
633
|
+
{
|
|
634
|
+
id: "LIBSQL_VECTOR_UPSERT_FAILED",
|
|
635
|
+
domain: error.ErrorDomain.STORAGE,
|
|
636
|
+
category: error.ErrorCategory.THIRD_PARTY
|
|
637
|
+
},
|
|
638
|
+
error$1
|
|
639
|
+
);
|
|
640
|
+
}
|
|
641
|
+
}
|
|
642
|
+
async doUpsert({ indexName, vectors, metadata, ids }) {
|
|
643
|
+
const tx = await this.turso.transaction("write");
|
|
644
|
+
try {
|
|
645
|
+
const parsedIndexName = utils.parseSqlIdentifier(indexName, "index name");
|
|
646
|
+
const vectorIds = ids || vectors.map(() => crypto.randomUUID());
|
|
647
|
+
for (let i = 0; i < vectors.length; i++) {
|
|
648
|
+
const query = `
|
|
649
|
+
INSERT INTO ${parsedIndexName} (vector_id, embedding, metadata)
|
|
650
|
+
VALUES (?, vector32(?), ?)
|
|
651
|
+
ON CONFLICT(vector_id) DO UPDATE SET
|
|
652
|
+
embedding = vector32(?),
|
|
653
|
+
metadata = ?
|
|
654
|
+
`;
|
|
655
|
+
await tx.execute({
|
|
656
|
+
sql: query,
|
|
657
|
+
args: [
|
|
658
|
+
vectorIds[i],
|
|
659
|
+
JSON.stringify(vectors[i]),
|
|
660
|
+
JSON.stringify(metadata?.[i] || {}),
|
|
661
|
+
JSON.stringify(vectors[i]),
|
|
662
|
+
JSON.stringify(metadata?.[i] || {})
|
|
663
|
+
]
|
|
664
|
+
});
|
|
665
|
+
}
|
|
666
|
+
await tx.commit();
|
|
667
|
+
return vectorIds;
|
|
668
|
+
} catch (error) {
|
|
669
|
+
!tx.closed && await tx.rollback();
|
|
670
|
+
if (error instanceof Error && error.message?.includes("dimensions are different")) {
|
|
671
|
+
const match = error.message.match(/dimensions are different: (\d+) != (\d+)/);
|
|
672
|
+
if (match) {
|
|
673
|
+
const [, actual, expected] = match;
|
|
674
|
+
throw new Error(
|
|
675
|
+
`Vector dimension mismatch: Index "${indexName}" expects ${expected} dimensions but got ${actual} dimensions. Either use a matching embedding model or delete and recreate the index with the new dimension.`
|
|
676
|
+
);
|
|
677
|
+
}
|
|
678
|
+
}
|
|
679
|
+
throw error;
|
|
680
|
+
}
|
|
681
|
+
}
|
|
682
|
+
createIndex(args) {
|
|
683
|
+
try {
|
|
684
|
+
return this.executeWriteOperationWithRetry(() => this.doCreateIndex(args));
|
|
685
|
+
} catch (error$1) {
|
|
686
|
+
throw new error.MastraError(
|
|
687
|
+
{
|
|
688
|
+
id: "LIBSQL_VECTOR_CREATE_INDEX_FAILED",
|
|
689
|
+
domain: error.ErrorDomain.STORAGE,
|
|
690
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
691
|
+
details: { indexName: args.indexName, dimension: args.dimension }
|
|
692
|
+
},
|
|
693
|
+
error$1
|
|
694
|
+
);
|
|
695
|
+
}
|
|
696
|
+
}
|
|
697
|
+
async doCreateIndex({ indexName, dimension }) {
|
|
698
|
+
if (!Number.isInteger(dimension) || dimension <= 0) {
|
|
699
|
+
throw new Error("Dimension must be a positive integer");
|
|
700
|
+
}
|
|
701
|
+
const parsedIndexName = utils.parseSqlIdentifier(indexName, "index name");
|
|
702
|
+
await this.turso.execute({
|
|
703
|
+
sql: `
|
|
704
|
+
CREATE TABLE IF NOT EXISTS ${parsedIndexName} (
|
|
705
|
+
id SERIAL PRIMARY KEY,
|
|
706
|
+
vector_id TEXT UNIQUE NOT NULL,
|
|
707
|
+
embedding F32_BLOB(${dimension}),
|
|
708
|
+
metadata TEXT DEFAULT '{}'
|
|
709
|
+
);
|
|
710
|
+
`,
|
|
711
|
+
args: []
|
|
712
|
+
});
|
|
713
|
+
await this.turso.execute({
|
|
714
|
+
sql: `
|
|
715
|
+
CREATE INDEX IF NOT EXISTS ${parsedIndexName}_vector_idx
|
|
716
|
+
ON ${parsedIndexName} (libsql_vector_idx(embedding))
|
|
717
|
+
`,
|
|
718
|
+
args: []
|
|
719
|
+
});
|
|
720
|
+
}
|
|
721
|
+
deleteIndex(args) {
|
|
722
|
+
try {
|
|
723
|
+
return this.executeWriteOperationWithRetry(() => this.doDeleteIndex(args));
|
|
724
|
+
} catch (error$1) {
|
|
725
|
+
throw new error.MastraError(
|
|
726
|
+
{
|
|
727
|
+
id: "LIBSQL_VECTOR_DELETE_INDEX_FAILED",
|
|
728
|
+
domain: error.ErrorDomain.STORAGE,
|
|
729
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
730
|
+
details: { indexName: args.indexName }
|
|
731
|
+
},
|
|
732
|
+
error$1
|
|
733
|
+
);
|
|
734
|
+
}
|
|
735
|
+
}
|
|
736
|
+
async doDeleteIndex({ indexName }) {
|
|
737
|
+
const parsedIndexName = utils.parseSqlIdentifier(indexName, "index name");
|
|
738
|
+
await this.turso.execute({
|
|
739
|
+
sql: `DROP TABLE IF EXISTS ${parsedIndexName}`,
|
|
740
|
+
args: []
|
|
741
|
+
});
|
|
742
|
+
}
|
|
743
|
+
async listIndexes() {
|
|
744
|
+
try {
|
|
745
|
+
const vectorTablesQuery = `
|
|
746
|
+
SELECT name FROM sqlite_master
|
|
747
|
+
WHERE type='table'
|
|
748
|
+
AND sql LIKE '%F32_BLOB%';
|
|
749
|
+
`;
|
|
750
|
+
const result = await this.turso.execute({
|
|
751
|
+
sql: vectorTablesQuery,
|
|
752
|
+
args: []
|
|
753
|
+
});
|
|
754
|
+
return result.rows.map((row) => row.name);
|
|
755
|
+
} catch (error$1) {
|
|
756
|
+
throw new error.MastraError(
|
|
757
|
+
{
|
|
758
|
+
id: "LIBSQL_VECTOR_LIST_INDEXES_FAILED",
|
|
759
|
+
domain: error.ErrorDomain.STORAGE,
|
|
760
|
+
category: error.ErrorCategory.THIRD_PARTY
|
|
761
|
+
},
|
|
762
|
+
error$1
|
|
763
|
+
);
|
|
764
|
+
}
|
|
765
|
+
}
|
|
766
|
+
/**
|
|
767
|
+
* Retrieves statistics about a vector index.
|
|
768
|
+
*
|
|
769
|
+
* @param {string} indexName - The name of the index to describe
|
|
770
|
+
* @returns A promise that resolves to the index statistics including dimension, count and metric
|
|
771
|
+
*/
|
|
772
|
+
async describeIndex({ indexName }) {
|
|
773
|
+
try {
|
|
774
|
+
const parsedIndexName = utils.parseSqlIdentifier(indexName, "index name");
|
|
775
|
+
const tableInfoQuery = `
|
|
776
|
+
SELECT sql
|
|
777
|
+
FROM sqlite_master
|
|
778
|
+
WHERE type='table'
|
|
779
|
+
AND name = ?;
|
|
780
|
+
`;
|
|
781
|
+
const tableInfo = await this.turso.execute({
|
|
782
|
+
sql: tableInfoQuery,
|
|
783
|
+
args: [parsedIndexName]
|
|
784
|
+
});
|
|
785
|
+
if (!tableInfo.rows[0]?.sql) {
|
|
786
|
+
throw new Error(`Table ${parsedIndexName} not found`);
|
|
787
|
+
}
|
|
788
|
+
const dimension = parseInt(tableInfo.rows[0].sql.match(/F32_BLOB\((\d+)\)/)?.[1] || "0");
|
|
789
|
+
const countQuery = `
|
|
790
|
+
SELECT COUNT(*) as count
|
|
791
|
+
FROM ${parsedIndexName};
|
|
792
|
+
`;
|
|
793
|
+
const countResult = await this.turso.execute({
|
|
794
|
+
sql: countQuery,
|
|
795
|
+
args: []
|
|
796
|
+
});
|
|
797
|
+
const metric = "cosine";
|
|
798
|
+
return {
|
|
799
|
+
dimension,
|
|
800
|
+
count: countResult?.rows?.[0]?.count ?? 0,
|
|
801
|
+
metric
|
|
802
|
+
};
|
|
803
|
+
} catch (e) {
|
|
804
|
+
throw new error.MastraError(
|
|
805
|
+
{
|
|
806
|
+
id: "LIBSQL_VECTOR_DESCRIBE_INDEX_FAILED",
|
|
807
|
+
domain: error.ErrorDomain.STORAGE,
|
|
808
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
809
|
+
details: { indexName }
|
|
810
|
+
},
|
|
811
|
+
e
|
|
812
|
+
);
|
|
813
|
+
}
|
|
814
|
+
}
|
|
815
|
+
/**
|
|
816
|
+
* Updates a vector by its ID with the provided vector and/or metadata.
|
|
817
|
+
*
|
|
818
|
+
* @param indexName - The name of the index containing the vector.
|
|
819
|
+
* @param id - The ID of the vector to update.
|
|
820
|
+
* @param update - An object containing the vector and/or metadata to update.
|
|
821
|
+
* @param update.vector - An optional array of numbers representing the new vector.
|
|
822
|
+
* @param update.metadata - An optional record containing the new metadata.
|
|
823
|
+
* @returns A promise that resolves when the update is complete.
|
|
824
|
+
* @throws Will throw an error if no updates are provided or if the update operation fails.
|
|
825
|
+
*/
|
|
826
|
+
updateVector(args) {
|
|
827
|
+
return this.executeWriteOperationWithRetry(() => this.doUpdateVector(args));
|
|
828
|
+
}
|
|
829
|
+
async doUpdateVector({ indexName, id, update }) {
|
|
830
|
+
const parsedIndexName = utils.parseSqlIdentifier(indexName, "index name");
|
|
831
|
+
const updates = [];
|
|
832
|
+
const args = [];
|
|
833
|
+
if (update.vector) {
|
|
834
|
+
updates.push("embedding = vector32(?)");
|
|
835
|
+
args.push(JSON.stringify(update.vector));
|
|
836
|
+
}
|
|
837
|
+
if (update.metadata) {
|
|
838
|
+
updates.push("metadata = ?");
|
|
839
|
+
args.push(JSON.stringify(update.metadata));
|
|
840
|
+
}
|
|
841
|
+
if (updates.length === 0) {
|
|
842
|
+
throw new error.MastraError({
|
|
843
|
+
id: "LIBSQL_VECTOR_UPDATE_VECTOR_INVALID_ARGS",
|
|
844
|
+
domain: error.ErrorDomain.STORAGE,
|
|
845
|
+
category: error.ErrorCategory.USER,
|
|
846
|
+
details: { indexName, id },
|
|
847
|
+
text: "No updates provided"
|
|
848
|
+
});
|
|
849
|
+
}
|
|
850
|
+
args.push(id);
|
|
851
|
+
const query = `
|
|
852
|
+
UPDATE ${parsedIndexName}
|
|
853
|
+
SET ${updates.join(", ")}
|
|
854
|
+
WHERE vector_id = ?;
|
|
855
|
+
`;
|
|
856
|
+
try {
|
|
857
|
+
await this.turso.execute({
|
|
858
|
+
sql: query,
|
|
859
|
+
args
|
|
860
|
+
});
|
|
861
|
+
} catch (error$1) {
|
|
862
|
+
throw new error.MastraError(
|
|
863
|
+
{
|
|
864
|
+
id: "LIBSQL_VECTOR_UPDATE_VECTOR_FAILED",
|
|
865
|
+
domain: error.ErrorDomain.STORAGE,
|
|
866
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
867
|
+
details: { indexName, id }
|
|
868
|
+
},
|
|
869
|
+
error$1
|
|
870
|
+
);
|
|
871
|
+
}
|
|
872
|
+
}
|
|
873
|
+
/**
|
|
874
|
+
* Deletes a vector by its ID.
|
|
875
|
+
* @param indexName - The name of the index containing the vector.
|
|
876
|
+
* @param id - The ID of the vector to delete.
|
|
877
|
+
* @returns A promise that resolves when the deletion is complete.
|
|
878
|
+
* @throws Will throw an error if the deletion operation fails.
|
|
879
|
+
*/
|
|
880
|
+
deleteVector(args) {
|
|
881
|
+
try {
|
|
882
|
+
return this.executeWriteOperationWithRetry(() => this.doDeleteVector(args));
|
|
883
|
+
} catch (error$1) {
|
|
884
|
+
throw new error.MastraError(
|
|
885
|
+
{
|
|
886
|
+
id: "LIBSQL_VECTOR_DELETE_VECTOR_FAILED",
|
|
887
|
+
domain: error.ErrorDomain.STORAGE,
|
|
888
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
889
|
+
details: { indexName: args.indexName, id: args.id }
|
|
890
|
+
},
|
|
891
|
+
error$1
|
|
892
|
+
);
|
|
893
|
+
}
|
|
894
|
+
}
|
|
895
|
+
async doDeleteVector({ indexName, id }) {
|
|
896
|
+
const parsedIndexName = utils.parseSqlIdentifier(indexName, "index name");
|
|
897
|
+
await this.turso.execute({
|
|
898
|
+
sql: `DELETE FROM ${parsedIndexName} WHERE vector_id = ?`,
|
|
899
|
+
args: [id]
|
|
900
|
+
});
|
|
901
|
+
}
|
|
902
|
+
truncateIndex(args) {
|
|
903
|
+
try {
|
|
904
|
+
return this.executeWriteOperationWithRetry(() => this._doTruncateIndex(args));
|
|
905
|
+
} catch (error$1) {
|
|
906
|
+
throw new error.MastraError(
|
|
907
|
+
{
|
|
908
|
+
id: "LIBSQL_VECTOR_TRUNCATE_INDEX_FAILED",
|
|
909
|
+
domain: error.ErrorDomain.STORAGE,
|
|
910
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
911
|
+
details: { indexName: args.indexName }
|
|
912
|
+
},
|
|
913
|
+
error$1
|
|
914
|
+
);
|
|
915
|
+
}
|
|
916
|
+
}
|
|
917
|
+
async _doTruncateIndex({ indexName }) {
|
|
918
|
+
await this.turso.execute({
|
|
919
|
+
sql: `DELETE FROM ${utils.parseSqlIdentifier(indexName, "index name")}`,
|
|
920
|
+
args: []
|
|
921
|
+
});
|
|
922
|
+
}
|
|
923
|
+
};
|
|
924
|
+
var MemoryLibSQL = class extends storage.MemoryStorage {
|
|
925
|
+
client;
|
|
926
|
+
operations;
|
|
927
|
+
constructor({ client, operations }) {
|
|
928
|
+
super();
|
|
929
|
+
this.client = client;
|
|
930
|
+
this.operations = operations;
|
|
931
|
+
}
|
|
932
|
+
parseRow(row) {
|
|
933
|
+
let content = row.content;
|
|
934
|
+
try {
|
|
935
|
+
content = JSON.parse(row.content);
|
|
936
|
+
} catch {
|
|
937
|
+
}
|
|
938
|
+
const result = {
|
|
939
|
+
id: row.id,
|
|
940
|
+
content,
|
|
941
|
+
role: row.role,
|
|
942
|
+
createdAt: new Date(row.createdAt),
|
|
943
|
+
threadId: row.thread_id,
|
|
944
|
+
resourceId: row.resourceId
|
|
945
|
+
};
|
|
946
|
+
if (row.type && row.type !== `v2`) result.type = row.type;
|
|
947
|
+
return result;
|
|
948
|
+
}
|
|
949
|
+
async _getIncludedMessages({
|
|
950
|
+
threadId,
|
|
951
|
+
include
|
|
952
|
+
}) {
|
|
953
|
+
if (!threadId.trim()) throw new Error("threadId must be a non-empty string");
|
|
954
|
+
if (!include) return null;
|
|
955
|
+
const unionQueries = [];
|
|
956
|
+
const params = [];
|
|
957
|
+
for (const inc of include) {
|
|
958
|
+
const { id, withPreviousMessages = 0, withNextMessages = 0 } = inc;
|
|
959
|
+
const searchId = inc.threadId || threadId;
|
|
960
|
+
unionQueries.push(
|
|
961
|
+
`
|
|
962
|
+
SELECT * FROM (
|
|
963
|
+
WITH numbered_messages AS (
|
|
964
|
+
SELECT
|
|
965
|
+
id, content, role, type, "createdAt", thread_id, "resourceId",
|
|
966
|
+
ROW_NUMBER() OVER (ORDER BY "createdAt" ASC) as row_num
|
|
967
|
+
FROM "${storage.TABLE_MESSAGES}"
|
|
968
|
+
WHERE thread_id = ?
|
|
969
|
+
),
|
|
970
|
+
target_positions AS (
|
|
971
|
+
SELECT row_num as target_pos
|
|
972
|
+
FROM numbered_messages
|
|
973
|
+
WHERE id = ?
|
|
974
|
+
)
|
|
975
|
+
SELECT DISTINCT m.*
|
|
976
|
+
FROM numbered_messages m
|
|
977
|
+
CROSS JOIN target_positions t
|
|
978
|
+
WHERE m.row_num BETWEEN (t.target_pos - ?) AND (t.target_pos + ?)
|
|
979
|
+
)
|
|
980
|
+
`
|
|
981
|
+
// Keep ASC for final sorting after fetching context
|
|
982
|
+
);
|
|
983
|
+
params.push(searchId, id, withPreviousMessages, withNextMessages);
|
|
984
|
+
}
|
|
985
|
+
const finalQuery = unionQueries.join(" UNION ALL ") + ' ORDER BY "createdAt" ASC';
|
|
986
|
+
const includedResult = await this.client.execute({ sql: finalQuery, args: params });
|
|
987
|
+
const includedRows = includedResult.rows?.map((row) => this.parseRow(row));
|
|
988
|
+
const seen = /* @__PURE__ */ new Set();
|
|
989
|
+
const dedupedRows = includedRows.filter((row) => {
|
|
990
|
+
if (seen.has(row.id)) return false;
|
|
991
|
+
seen.add(row.id);
|
|
992
|
+
return true;
|
|
993
|
+
});
|
|
994
|
+
return dedupedRows;
|
|
995
|
+
}
|
|
996
|
+
async listMessagesById({ messageIds }) {
|
|
997
|
+
if (messageIds.length === 0) return { messages: [] };
|
|
998
|
+
try {
|
|
999
|
+
const sql = `
|
|
1000
|
+
SELECT
|
|
1001
|
+
id,
|
|
1002
|
+
content,
|
|
1003
|
+
role,
|
|
1004
|
+
type,
|
|
1005
|
+
"createdAt",
|
|
1006
|
+
thread_id,
|
|
1007
|
+
"resourceId"
|
|
1008
|
+
FROM "${storage.TABLE_MESSAGES}"
|
|
1009
|
+
WHERE id IN (${messageIds.map(() => "?").join(", ")})
|
|
1010
|
+
ORDER BY "createdAt" DESC
|
|
1011
|
+
`;
|
|
1012
|
+
const result = await this.client.execute({ sql, args: messageIds });
|
|
1013
|
+
if (!result.rows) return { messages: [] };
|
|
1014
|
+
const list = new agent.MessageList().add(result.rows.map(this.parseRow), "memory");
|
|
1015
|
+
return { messages: list.get.all.db() };
|
|
1016
|
+
} catch (error$1) {
|
|
1017
|
+
throw new error.MastraError(
|
|
1018
|
+
{
|
|
1019
|
+
id: "LIBSQL_STORE_LIST_MESSAGES_BY_ID_FAILED",
|
|
1020
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1021
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
1022
|
+
details: { messageIds: JSON.stringify(messageIds) }
|
|
1023
|
+
},
|
|
1024
|
+
error$1
|
|
1025
|
+
);
|
|
1026
|
+
}
|
|
1027
|
+
}
|
|
1028
|
+
async listMessages(args) {
|
|
1029
|
+
const { threadId, resourceId, include, filter, perPage: perPageInput, page = 0, orderBy } = args;
|
|
1030
|
+
if (!threadId.trim()) {
|
|
1031
|
+
throw new error.MastraError(
|
|
1032
|
+
{
|
|
1033
|
+
id: "STORAGE_LIBSQL_LIST_MESSAGES_INVALID_THREAD_ID",
|
|
1034
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1035
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
1036
|
+
details: { threadId }
|
|
1037
|
+
},
|
|
1038
|
+
new Error("threadId must be a non-empty string")
|
|
1039
|
+
);
|
|
1040
|
+
}
|
|
1041
|
+
if (page < 0) {
|
|
1042
|
+
throw new error.MastraError(
|
|
1043
|
+
{
|
|
1044
|
+
id: "LIBSQL_STORE_LIST_MESSAGES_INVALID_PAGE",
|
|
1045
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1046
|
+
category: error.ErrorCategory.USER,
|
|
1047
|
+
details: { page }
|
|
1048
|
+
},
|
|
1049
|
+
new Error("page must be >= 0")
|
|
1050
|
+
);
|
|
1051
|
+
}
|
|
1052
|
+
const perPage = storage.normalizePerPage(perPageInput, 40);
|
|
1053
|
+
const { offset, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
|
|
1054
|
+
try {
|
|
1055
|
+
const { field, direction } = this.parseOrderBy(orderBy, "ASC");
|
|
1056
|
+
const orderByStatement = `ORDER BY "${field}" ${direction}`;
|
|
1057
|
+
const conditions = [`thread_id = ?`];
|
|
1058
|
+
const queryParams = [threadId];
|
|
1059
|
+
if (resourceId) {
|
|
1060
|
+
conditions.push(`"resourceId" = ?`);
|
|
1061
|
+
queryParams.push(resourceId);
|
|
1062
|
+
}
|
|
1063
|
+
if (filter?.dateRange?.start) {
|
|
1064
|
+
conditions.push(`"createdAt" >= ?`);
|
|
1065
|
+
queryParams.push(
|
|
1066
|
+
filter.dateRange.start instanceof Date ? filter.dateRange.start.toISOString() : filter.dateRange.start
|
|
1067
|
+
);
|
|
1068
|
+
}
|
|
1069
|
+
if (filter?.dateRange?.end) {
|
|
1070
|
+
conditions.push(`"createdAt" <= ?`);
|
|
1071
|
+
queryParams.push(
|
|
1072
|
+
filter.dateRange.end instanceof Date ? filter.dateRange.end.toISOString() : filter.dateRange.end
|
|
1073
|
+
);
|
|
1074
|
+
}
|
|
1075
|
+
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
1076
|
+
const countResult = await this.client.execute({
|
|
1077
|
+
sql: `SELECT COUNT(*) as count FROM ${storage.TABLE_MESSAGES} ${whereClause}`,
|
|
1078
|
+
args: queryParams
|
|
1079
|
+
});
|
|
1080
|
+
const total = Number(countResult.rows?.[0]?.count ?? 0);
|
|
1081
|
+
const limitValue = perPageInput === false ? total : perPage;
|
|
1082
|
+
const dataResult = await this.client.execute({
|
|
1083
|
+
sql: `SELECT id, content, role, type, "createdAt", "resourceId", "thread_id" FROM ${storage.TABLE_MESSAGES} ${whereClause} ${orderByStatement} LIMIT ? OFFSET ?`,
|
|
1084
|
+
args: [...queryParams, limitValue, offset]
|
|
1085
|
+
});
|
|
1086
|
+
const messages = (dataResult.rows || []).map((row) => this.parseRow(row));
|
|
1087
|
+
if (total === 0 && messages.length === 0 && (!include || include.length === 0)) {
|
|
1088
|
+
return {
|
|
1089
|
+
messages: [],
|
|
1090
|
+
total: 0,
|
|
1091
|
+
page,
|
|
1092
|
+
perPage: perPageForResponse,
|
|
1093
|
+
hasMore: false
|
|
1094
|
+
};
|
|
1095
|
+
}
|
|
1096
|
+
const messageIds = new Set(messages.map((m) => m.id));
|
|
1097
|
+
if (include && include.length > 0) {
|
|
1098
|
+
const includeMessages = await this._getIncludedMessages({ threadId, include });
|
|
1099
|
+
if (includeMessages) {
|
|
1100
|
+
for (const includeMsg of includeMessages) {
|
|
1101
|
+
if (!messageIds.has(includeMsg.id)) {
|
|
1102
|
+
messages.push(includeMsg);
|
|
1103
|
+
messageIds.add(includeMsg.id);
|
|
1104
|
+
}
|
|
1105
|
+
}
|
|
1106
|
+
}
|
|
1107
|
+
}
|
|
1108
|
+
const list = new agent.MessageList().add(messages, "memory");
|
|
1109
|
+
let finalMessages = list.get.all.db();
|
|
1110
|
+
finalMessages = finalMessages.sort((a, b) => {
|
|
1111
|
+
const isDateField = field === "createdAt" || field === "updatedAt";
|
|
1112
|
+
const aValue = isDateField ? new Date(a[field]).getTime() : a[field];
|
|
1113
|
+
const bValue = isDateField ? new Date(b[field]).getTime() : b[field];
|
|
1114
|
+
if (typeof aValue === "number" && typeof bValue === "number") {
|
|
1115
|
+
return direction === "ASC" ? aValue - bValue : bValue - aValue;
|
|
1116
|
+
}
|
|
1117
|
+
return direction === "ASC" ? String(aValue).localeCompare(String(bValue)) : String(bValue).localeCompare(String(aValue));
|
|
1118
|
+
});
|
|
1119
|
+
const returnedThreadMessageIds = new Set(finalMessages.filter((m) => m.threadId === threadId).map((m) => m.id));
|
|
1120
|
+
const allThreadMessagesReturned = returnedThreadMessageIds.size >= total;
|
|
1121
|
+
const hasMore = perPageInput !== false && !allThreadMessagesReturned && offset + perPage < total;
|
|
1122
|
+
return {
|
|
1123
|
+
messages: finalMessages,
|
|
1124
|
+
total,
|
|
1125
|
+
page,
|
|
1126
|
+
perPage: perPageForResponse,
|
|
1127
|
+
hasMore
|
|
1128
|
+
};
|
|
1129
|
+
} catch (error$1) {
|
|
1130
|
+
const mastraError = new error.MastraError(
|
|
1131
|
+
{
|
|
1132
|
+
id: "LIBSQL_STORE_LIST_MESSAGES_FAILED",
|
|
1133
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1134
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
1135
|
+
details: {
|
|
1136
|
+
threadId,
|
|
1137
|
+
resourceId: resourceId ?? ""
|
|
1138
|
+
}
|
|
1139
|
+
},
|
|
1140
|
+
error$1
|
|
1141
|
+
);
|
|
1142
|
+
this.logger?.error?.(mastraError.toString());
|
|
1143
|
+
this.logger?.trackException?.(mastraError);
|
|
1144
|
+
return {
|
|
1145
|
+
messages: [],
|
|
1146
|
+
total: 0,
|
|
1147
|
+
page,
|
|
1148
|
+
perPage: perPageForResponse,
|
|
1149
|
+
hasMore: false
|
|
1150
|
+
};
|
|
1151
|
+
}
|
|
1152
|
+
}
|
|
1153
|
+
async saveMessages({ messages }) {
|
|
1154
|
+
if (messages.length === 0) return { messages };
|
|
1155
|
+
try {
|
|
1156
|
+
const threadId = messages[0]?.threadId;
|
|
1157
|
+
if (!threadId) {
|
|
1158
|
+
throw new Error("Thread ID is required");
|
|
1159
|
+
}
|
|
1160
|
+
const batchStatements = messages.map((message) => {
|
|
1161
|
+
const time = message.createdAt || /* @__PURE__ */ new Date();
|
|
1162
|
+
if (!message.threadId) {
|
|
1163
|
+
throw new Error(
|
|
1164
|
+
`Expected to find a threadId for message, but couldn't find one. An unexpected error has occurred.`
|
|
1165
|
+
);
|
|
1166
|
+
}
|
|
1167
|
+
if (!message.resourceId) {
|
|
1168
|
+
throw new Error(
|
|
1169
|
+
`Expected to find a resourceId for message, but couldn't find one. An unexpected error has occurred.`
|
|
1170
|
+
);
|
|
1171
|
+
}
|
|
1172
|
+
return {
|
|
1173
|
+
sql: `INSERT INTO "${storage.TABLE_MESSAGES}" (id, thread_id, content, role, type, "createdAt", "resourceId")
|
|
1174
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
1175
|
+
ON CONFLICT(id) DO UPDATE SET
|
|
1176
|
+
thread_id=excluded.thread_id,
|
|
1177
|
+
content=excluded.content,
|
|
1178
|
+
role=excluded.role,
|
|
1179
|
+
type=excluded.type,
|
|
1180
|
+
"resourceId"=excluded."resourceId"
|
|
1181
|
+
`,
|
|
1182
|
+
args: [
|
|
1183
|
+
message.id,
|
|
1184
|
+
message.threadId,
|
|
1185
|
+
typeof message.content === "object" ? JSON.stringify(message.content) : message.content,
|
|
1186
|
+
message.role,
|
|
1187
|
+
message.type || "v2",
|
|
1188
|
+
time instanceof Date ? time.toISOString() : time,
|
|
1189
|
+
message.resourceId
|
|
1190
|
+
]
|
|
1191
|
+
};
|
|
1192
|
+
});
|
|
1193
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
1194
|
+
batchStatements.push({
|
|
1195
|
+
sql: `UPDATE "${storage.TABLE_THREADS}" SET "updatedAt" = ? WHERE id = ?`,
|
|
1196
|
+
args: [now, threadId]
|
|
1197
|
+
});
|
|
1198
|
+
const BATCH_SIZE = 50;
|
|
1199
|
+
const messageStatements = batchStatements.slice(0, -1);
|
|
1200
|
+
const threadUpdateStatement = batchStatements[batchStatements.length - 1];
|
|
1201
|
+
for (let i = 0; i < messageStatements.length; i += BATCH_SIZE) {
|
|
1202
|
+
const batch = messageStatements.slice(i, i + BATCH_SIZE);
|
|
1203
|
+
if (batch.length > 0) {
|
|
1204
|
+
await this.client.batch(batch, "write");
|
|
1205
|
+
}
|
|
1206
|
+
}
|
|
1207
|
+
if (threadUpdateStatement) {
|
|
1208
|
+
await this.client.execute(threadUpdateStatement);
|
|
1209
|
+
}
|
|
1210
|
+
const list = new agent.MessageList().add(messages, "memory");
|
|
1211
|
+
return { messages: list.get.all.db() };
|
|
1212
|
+
} catch (error$1) {
|
|
1213
|
+
throw new error.MastraError(
|
|
1214
|
+
{
|
|
1215
|
+
id: "LIBSQL_STORE_SAVE_MESSAGES_FAILED",
|
|
1216
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1217
|
+
category: error.ErrorCategory.THIRD_PARTY
|
|
1218
|
+
},
|
|
1219
|
+
error$1
|
|
1220
|
+
);
|
|
1221
|
+
}
|
|
1222
|
+
}
|
|
1223
|
+
async updateMessages({
|
|
1224
|
+
messages
|
|
1225
|
+
}) {
|
|
1226
|
+
if (messages.length === 0) {
|
|
1227
|
+
return [];
|
|
1228
|
+
}
|
|
1229
|
+
const messageIds = messages.map((m) => m.id);
|
|
1230
|
+
const placeholders = messageIds.map(() => "?").join(",");
|
|
1231
|
+
const selectSql = `SELECT * FROM ${storage.TABLE_MESSAGES} WHERE id IN (${placeholders})`;
|
|
1232
|
+
const existingResult = await this.client.execute({ sql: selectSql, args: messageIds });
|
|
1233
|
+
const existingMessages = existingResult.rows.map((row) => this.parseRow(row));
|
|
1234
|
+
if (existingMessages.length === 0) {
|
|
1235
|
+
return [];
|
|
1236
|
+
}
|
|
1237
|
+
const batchStatements = [];
|
|
1238
|
+
const threadIdsToUpdate = /* @__PURE__ */ new Set();
|
|
1239
|
+
const columnMapping = {
|
|
1240
|
+
threadId: "thread_id"
|
|
1241
|
+
};
|
|
1242
|
+
for (const existingMessage of existingMessages) {
|
|
1243
|
+
const updatePayload = messages.find((m) => m.id === existingMessage.id);
|
|
1244
|
+
if (!updatePayload) continue;
|
|
1245
|
+
const { id, ...fieldsToUpdate } = updatePayload;
|
|
1246
|
+
if (Object.keys(fieldsToUpdate).length === 0) continue;
|
|
1247
|
+
threadIdsToUpdate.add(existingMessage.threadId);
|
|
1248
|
+
if (updatePayload.threadId && updatePayload.threadId !== existingMessage.threadId) {
|
|
1249
|
+
threadIdsToUpdate.add(updatePayload.threadId);
|
|
1250
|
+
}
|
|
1251
|
+
const setClauses = [];
|
|
1252
|
+
const args = [];
|
|
1253
|
+
const updatableFields = { ...fieldsToUpdate };
|
|
1254
|
+
if (updatableFields.content) {
|
|
1255
|
+
const newContent = {
|
|
1256
|
+
...existingMessage.content,
|
|
1257
|
+
...updatableFields.content,
|
|
1258
|
+
// Deep merge metadata if it exists on both
|
|
1259
|
+
...existingMessage.content?.metadata && updatableFields.content.metadata ? {
|
|
1260
|
+
metadata: {
|
|
1261
|
+
...existingMessage.content.metadata,
|
|
1262
|
+
...updatableFields.content.metadata
|
|
1263
|
+
}
|
|
1264
|
+
} : {}
|
|
1265
|
+
};
|
|
1266
|
+
setClauses.push(`${utils.parseSqlIdentifier("content", "column name")} = ?`);
|
|
1267
|
+
args.push(JSON.stringify(newContent));
|
|
1268
|
+
delete updatableFields.content;
|
|
1269
|
+
}
|
|
1270
|
+
for (const key in updatableFields) {
|
|
1271
|
+
if (Object.prototype.hasOwnProperty.call(updatableFields, key)) {
|
|
1272
|
+
const dbKey = columnMapping[key] || key;
|
|
1273
|
+
setClauses.push(`${utils.parseSqlIdentifier(dbKey, "column name")} = ?`);
|
|
1274
|
+
let value = updatableFields[key];
|
|
1275
|
+
if (typeof value === "object" && value !== null) {
|
|
1276
|
+
value = JSON.stringify(value);
|
|
1277
|
+
}
|
|
1278
|
+
args.push(value);
|
|
1279
|
+
}
|
|
1280
|
+
}
|
|
1281
|
+
if (setClauses.length === 0) continue;
|
|
1282
|
+
args.push(id);
|
|
1283
|
+
const sql = `UPDATE ${storage.TABLE_MESSAGES} SET ${setClauses.join(", ")} WHERE id = ?`;
|
|
1284
|
+
batchStatements.push({ sql, args });
|
|
1285
|
+
}
|
|
1286
|
+
if (batchStatements.length === 0) {
|
|
1287
|
+
return existingMessages;
|
|
1288
|
+
}
|
|
1289
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
1290
|
+
for (const threadId of threadIdsToUpdate) {
|
|
1291
|
+
if (threadId) {
|
|
1292
|
+
batchStatements.push({
|
|
1293
|
+
sql: `UPDATE ${storage.TABLE_THREADS} SET updatedAt = ? WHERE id = ?`,
|
|
1294
|
+
args: [now, threadId]
|
|
1295
|
+
});
|
|
1296
|
+
}
|
|
1297
|
+
}
|
|
1298
|
+
await this.client.batch(batchStatements, "write");
|
|
1299
|
+
const updatedResult = await this.client.execute({ sql: selectSql, args: messageIds });
|
|
1300
|
+
return updatedResult.rows.map((row) => this.parseRow(row));
|
|
1301
|
+
}
|
|
1302
|
+
async deleteMessages(messageIds) {
|
|
1303
|
+
if (!messageIds || messageIds.length === 0) {
|
|
1304
|
+
return;
|
|
1305
|
+
}
|
|
1306
|
+
try {
|
|
1307
|
+
const BATCH_SIZE = 100;
|
|
1308
|
+
const threadIds = /* @__PURE__ */ new Set();
|
|
1309
|
+
const tx = await this.client.transaction("write");
|
|
1310
|
+
try {
|
|
1311
|
+
for (let i = 0; i < messageIds.length; i += BATCH_SIZE) {
|
|
1312
|
+
const batch = messageIds.slice(i, i + BATCH_SIZE);
|
|
1313
|
+
const placeholders = batch.map(() => "?").join(",");
|
|
1314
|
+
const result = await tx.execute({
|
|
1315
|
+
sql: `SELECT DISTINCT thread_id FROM "${storage.TABLE_MESSAGES}" WHERE id IN (${placeholders})`,
|
|
1316
|
+
args: batch
|
|
1317
|
+
});
|
|
1318
|
+
result.rows?.forEach((row) => {
|
|
1319
|
+
if (row.thread_id) threadIds.add(row.thread_id);
|
|
1320
|
+
});
|
|
1321
|
+
await tx.execute({
|
|
1322
|
+
sql: `DELETE FROM "${storage.TABLE_MESSAGES}" WHERE id IN (${placeholders})`,
|
|
1323
|
+
args: batch
|
|
1324
|
+
});
|
|
1325
|
+
}
|
|
1326
|
+
if (threadIds.size > 0) {
|
|
1327
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
1328
|
+
for (const threadId of threadIds) {
|
|
1329
|
+
await tx.execute({
|
|
1330
|
+
sql: `UPDATE "${storage.TABLE_THREADS}" SET "updatedAt" = ? WHERE id = ?`,
|
|
1331
|
+
args: [now, threadId]
|
|
1332
|
+
});
|
|
1333
|
+
}
|
|
1334
|
+
}
|
|
1335
|
+
await tx.commit();
|
|
1336
|
+
} catch (error) {
|
|
1337
|
+
await tx.rollback();
|
|
1338
|
+
throw error;
|
|
1339
|
+
}
|
|
1340
|
+
} catch (error$1) {
|
|
1341
|
+
throw new error.MastraError(
|
|
1342
|
+
{
|
|
1343
|
+
id: "LIBSQL_STORE_DELETE_MESSAGES_FAILED",
|
|
1344
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1345
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
1346
|
+
details: { messageIds: messageIds.join(", ") }
|
|
1347
|
+
},
|
|
1348
|
+
error$1
|
|
1349
|
+
);
|
|
1350
|
+
}
|
|
1351
|
+
}
|
|
1352
|
+
async getResourceById({ resourceId }) {
|
|
1353
|
+
const result = await this.operations.load({
|
|
1354
|
+
tableName: storage.TABLE_RESOURCES,
|
|
1355
|
+
keys: { id: resourceId }
|
|
1356
|
+
});
|
|
1357
|
+
if (!result) {
|
|
1358
|
+
return null;
|
|
1359
|
+
}
|
|
1360
|
+
return {
|
|
1361
|
+
...result,
|
|
1362
|
+
// Ensure workingMemory is always returned as a string, even if auto-parsed as JSON
|
|
1363
|
+
workingMemory: result.workingMemory && typeof result.workingMemory === "object" ? JSON.stringify(result.workingMemory) : result.workingMemory,
|
|
1364
|
+
metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata,
|
|
1365
|
+
createdAt: new Date(result.createdAt),
|
|
1366
|
+
updatedAt: new Date(result.updatedAt)
|
|
1367
|
+
};
|
|
1368
|
+
}
|
|
1369
|
+
async saveResource({ resource }) {
|
|
1370
|
+
await this.operations.insert({
|
|
1371
|
+
tableName: storage.TABLE_RESOURCES,
|
|
1372
|
+
record: {
|
|
1373
|
+
...resource,
|
|
1374
|
+
metadata: JSON.stringify(resource.metadata)
|
|
1375
|
+
}
|
|
1376
|
+
});
|
|
1377
|
+
return resource;
|
|
1378
|
+
}
|
|
1379
|
+
async updateResource({
|
|
1380
|
+
resourceId,
|
|
1381
|
+
workingMemory,
|
|
1382
|
+
metadata
|
|
1383
|
+
}) {
|
|
1384
|
+
const existingResource = await this.getResourceById({ resourceId });
|
|
1385
|
+
if (!existingResource) {
|
|
1386
|
+
const newResource = {
|
|
1387
|
+
id: resourceId,
|
|
1388
|
+
workingMemory,
|
|
1389
|
+
metadata: metadata || {},
|
|
1390
|
+
createdAt: /* @__PURE__ */ new Date(),
|
|
1391
|
+
updatedAt: /* @__PURE__ */ new Date()
|
|
1392
|
+
};
|
|
1393
|
+
return this.saveResource({ resource: newResource });
|
|
1394
|
+
}
|
|
1395
|
+
const updatedResource = {
|
|
1396
|
+
...existingResource,
|
|
1397
|
+
workingMemory: workingMemory !== void 0 ? workingMemory : existingResource.workingMemory,
|
|
1398
|
+
metadata: {
|
|
1399
|
+
...existingResource.metadata,
|
|
1400
|
+
...metadata
|
|
1401
|
+
},
|
|
1402
|
+
updatedAt: /* @__PURE__ */ new Date()
|
|
1403
|
+
};
|
|
1404
|
+
const updates = [];
|
|
1405
|
+
const values = [];
|
|
1406
|
+
if (workingMemory !== void 0) {
|
|
1407
|
+
updates.push("workingMemory = ?");
|
|
1408
|
+
values.push(workingMemory);
|
|
1409
|
+
}
|
|
1410
|
+
if (metadata) {
|
|
1411
|
+
updates.push("metadata = ?");
|
|
1412
|
+
values.push(JSON.stringify(updatedResource.metadata));
|
|
1413
|
+
}
|
|
1414
|
+
updates.push("updatedAt = ?");
|
|
1415
|
+
values.push(updatedResource.updatedAt.toISOString());
|
|
1416
|
+
values.push(resourceId);
|
|
1417
|
+
await this.client.execute({
|
|
1418
|
+
sql: `UPDATE ${storage.TABLE_RESOURCES} SET ${updates.join(", ")} WHERE id = ?`,
|
|
1419
|
+
args: values
|
|
1420
|
+
});
|
|
1421
|
+
return updatedResource;
|
|
1422
|
+
}
|
|
1423
|
+
async getThreadById({ threadId }) {
|
|
1424
|
+
try {
|
|
1425
|
+
const result = await this.operations.load({
|
|
1426
|
+
tableName: storage.TABLE_THREADS,
|
|
1427
|
+
keys: { id: threadId }
|
|
1428
|
+
});
|
|
1429
|
+
if (!result) {
|
|
1430
|
+
return null;
|
|
1431
|
+
}
|
|
1432
|
+
return {
|
|
1433
|
+
...result,
|
|
1434
|
+
metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata,
|
|
1435
|
+
createdAt: new Date(result.createdAt),
|
|
1436
|
+
updatedAt: new Date(result.updatedAt)
|
|
1437
|
+
};
|
|
1438
|
+
} catch (error$1) {
|
|
1439
|
+
throw new error.MastraError(
|
|
1440
|
+
{
|
|
1441
|
+
id: "LIBSQL_STORE_GET_THREAD_BY_ID_FAILED",
|
|
1442
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1443
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
1444
|
+
details: { threadId }
|
|
1445
|
+
},
|
|
1446
|
+
error$1
|
|
1447
|
+
);
|
|
1448
|
+
}
|
|
1449
|
+
}
|
|
1450
|
+
async listThreadsByResourceId(args) {
|
|
1451
|
+
const { resourceId, page = 0, perPage: perPageInput, orderBy } = args;
|
|
1452
|
+
if (page < 0) {
|
|
1453
|
+
throw new error.MastraError(
|
|
1454
|
+
{
|
|
1455
|
+
id: "LIBSQL_STORE_LIST_THREADS_BY_RESOURCE_ID_INVALID_PAGE",
|
|
1456
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1457
|
+
category: error.ErrorCategory.USER,
|
|
1458
|
+
details: { page }
|
|
1459
|
+
},
|
|
1460
|
+
new Error("page must be >= 0")
|
|
1461
|
+
);
|
|
1462
|
+
}
|
|
1463
|
+
const perPage = storage.normalizePerPage(perPageInput, 100);
|
|
1464
|
+
const { offset, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
|
|
1465
|
+
const { field, direction } = this.parseOrderBy(orderBy);
|
|
1466
|
+
try {
|
|
1467
|
+
const baseQuery = `FROM ${storage.TABLE_THREADS} WHERE resourceId = ?`;
|
|
1468
|
+
const queryParams = [resourceId];
|
|
1469
|
+
const mapRowToStorageThreadType = (row) => ({
|
|
1470
|
+
id: row.id,
|
|
1471
|
+
resourceId: row.resourceId,
|
|
1472
|
+
title: row.title,
|
|
1473
|
+
createdAt: new Date(row.createdAt),
|
|
1474
|
+
// Convert string to Date
|
|
1475
|
+
updatedAt: new Date(row.updatedAt),
|
|
1476
|
+
// Convert string to Date
|
|
1477
|
+
metadata: typeof row.metadata === "string" ? JSON.parse(row.metadata) : row.metadata
|
|
1478
|
+
});
|
|
1479
|
+
const countResult = await this.client.execute({
|
|
1480
|
+
sql: `SELECT COUNT(*) as count ${baseQuery}`,
|
|
1481
|
+
args: queryParams
|
|
1482
|
+
});
|
|
1483
|
+
const total = Number(countResult.rows?.[0]?.count ?? 0);
|
|
1484
|
+
if (total === 0) {
|
|
1485
|
+
return {
|
|
1486
|
+
threads: [],
|
|
1487
|
+
total: 0,
|
|
1488
|
+
page,
|
|
1489
|
+
perPage: perPageForResponse,
|
|
1490
|
+
hasMore: false
|
|
1491
|
+
};
|
|
1492
|
+
}
|
|
1493
|
+
const limitValue = perPageInput === false ? total : perPage;
|
|
1494
|
+
const dataResult = await this.client.execute({
|
|
1495
|
+
sql: `SELECT * ${baseQuery} ORDER BY "${field}" ${direction} LIMIT ? OFFSET ?`,
|
|
1496
|
+
args: [...queryParams, limitValue, offset]
|
|
1497
|
+
});
|
|
1498
|
+
const threads = (dataResult.rows || []).map(mapRowToStorageThreadType);
|
|
1499
|
+
return {
|
|
1500
|
+
threads,
|
|
1501
|
+
total,
|
|
1502
|
+
page,
|
|
1503
|
+
perPage: perPageForResponse,
|
|
1504
|
+
hasMore: perPageInput === false ? false : offset + perPage < total
|
|
1505
|
+
};
|
|
1506
|
+
} catch (error$1) {
|
|
1507
|
+
const mastraError = new error.MastraError(
|
|
1508
|
+
{
|
|
1509
|
+
id: "LIBSQL_STORE_LIST_THREADS_BY_RESOURCE_ID_FAILED",
|
|
1510
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1511
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
1512
|
+
details: { resourceId }
|
|
1513
|
+
},
|
|
1514
|
+
error$1
|
|
1515
|
+
);
|
|
1516
|
+
this.logger?.trackException?.(mastraError);
|
|
1517
|
+
this.logger?.error?.(mastraError.toString());
|
|
1518
|
+
return {
|
|
1519
|
+
threads: [],
|
|
1520
|
+
total: 0,
|
|
1521
|
+
page,
|
|
1522
|
+
perPage: perPageForResponse,
|
|
1523
|
+
hasMore: false
|
|
1524
|
+
};
|
|
1525
|
+
}
|
|
1526
|
+
}
|
|
1527
|
+
async saveThread({ thread }) {
|
|
1528
|
+
try {
|
|
1529
|
+
await this.operations.insert({
|
|
1530
|
+
tableName: storage.TABLE_THREADS,
|
|
1531
|
+
record: {
|
|
1532
|
+
...thread,
|
|
1533
|
+
metadata: JSON.stringify(thread.metadata)
|
|
1534
|
+
}
|
|
1535
|
+
});
|
|
1536
|
+
return thread;
|
|
1537
|
+
} catch (error$1) {
|
|
1538
|
+
const mastraError = new error.MastraError(
|
|
1539
|
+
{
|
|
1540
|
+
id: "LIBSQL_STORE_SAVE_THREAD_FAILED",
|
|
1541
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1542
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
1543
|
+
details: { threadId: thread.id }
|
|
1544
|
+
},
|
|
1545
|
+
error$1
|
|
1546
|
+
);
|
|
1547
|
+
this.logger?.trackException?.(mastraError);
|
|
1548
|
+
this.logger?.error?.(mastraError.toString());
|
|
1549
|
+
throw mastraError;
|
|
1550
|
+
}
|
|
1551
|
+
}
|
|
1552
|
+
async updateThread({
|
|
1553
|
+
id,
|
|
1554
|
+
title,
|
|
1555
|
+
metadata
|
|
1556
|
+
}) {
|
|
1557
|
+
const thread = await this.getThreadById({ threadId: id });
|
|
1558
|
+
if (!thread) {
|
|
1559
|
+
throw new error.MastraError({
|
|
1560
|
+
id: "LIBSQL_STORE_UPDATE_THREAD_FAILED_THREAD_NOT_FOUND",
|
|
1561
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1562
|
+
category: error.ErrorCategory.USER,
|
|
1563
|
+
text: `Thread ${id} not found`,
|
|
1564
|
+
details: {
|
|
1565
|
+
status: 404,
|
|
1566
|
+
threadId: id
|
|
1567
|
+
}
|
|
1568
|
+
});
|
|
1569
|
+
}
|
|
1570
|
+
const updatedThread = {
|
|
1571
|
+
...thread,
|
|
1572
|
+
title,
|
|
1573
|
+
metadata: {
|
|
1574
|
+
...thread.metadata,
|
|
1575
|
+
...metadata
|
|
1576
|
+
}
|
|
1577
|
+
};
|
|
1578
|
+
try {
|
|
1579
|
+
await this.client.execute({
|
|
1580
|
+
sql: `UPDATE ${storage.TABLE_THREADS} SET title = ?, metadata = ? WHERE id = ?`,
|
|
1581
|
+
args: [title, JSON.stringify(updatedThread.metadata), id]
|
|
1582
|
+
});
|
|
1583
|
+
return updatedThread;
|
|
1584
|
+
} catch (error$1) {
|
|
1585
|
+
throw new error.MastraError(
|
|
1586
|
+
{
|
|
1587
|
+
id: "LIBSQL_STORE_UPDATE_THREAD_FAILED",
|
|
1588
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1589
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
1590
|
+
text: `Failed to update thread ${id}`,
|
|
1591
|
+
details: { threadId: id }
|
|
1592
|
+
},
|
|
1593
|
+
error$1
|
|
1594
|
+
);
|
|
1595
|
+
}
|
|
1596
|
+
}
|
|
1597
|
+
async deleteThread({ threadId }) {
|
|
1598
|
+
try {
|
|
1599
|
+
await this.client.execute({
|
|
1600
|
+
sql: `DELETE FROM ${storage.TABLE_MESSAGES} WHERE thread_id = ?`,
|
|
1601
|
+
args: [threadId]
|
|
1602
|
+
});
|
|
1603
|
+
await this.client.execute({
|
|
1604
|
+
sql: `DELETE FROM ${storage.TABLE_THREADS} WHERE id = ?`,
|
|
1605
|
+
args: [threadId]
|
|
1606
|
+
});
|
|
1607
|
+
} catch (error$1) {
|
|
1608
|
+
throw new error.MastraError(
|
|
1609
|
+
{
|
|
1610
|
+
id: "LIBSQL_STORE_DELETE_THREAD_FAILED",
|
|
1611
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1612
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
1613
|
+
details: { threadId }
|
|
1614
|
+
},
|
|
1615
|
+
error$1
|
|
1616
|
+
);
|
|
1617
|
+
}
|
|
1618
|
+
}
|
|
1619
|
+
};
|
|
1620
|
+
function createExecuteWriteOperationWithRetry({
|
|
1621
|
+
logger,
|
|
1622
|
+
maxRetries,
|
|
1623
|
+
initialBackoffMs
|
|
1624
|
+
}) {
|
|
1625
|
+
return async function executeWriteOperationWithRetry(operationFn, operationDescription) {
|
|
1626
|
+
let retries = 0;
|
|
1627
|
+
while (true) {
|
|
1628
|
+
try {
|
|
1629
|
+
return await operationFn();
|
|
1630
|
+
} catch (error) {
|
|
1631
|
+
if (error.message && (error.message.includes("SQLITE_BUSY") || error.message.includes("database is locked")) && retries < maxRetries) {
|
|
1632
|
+
retries++;
|
|
1633
|
+
const backoffTime = initialBackoffMs * Math.pow(2, retries - 1);
|
|
1634
|
+
logger.warn(
|
|
1635
|
+
`LibSQLStore: Encountered SQLITE_BUSY during ${operationDescription}. Retrying (${retries}/${maxRetries}) in ${backoffTime}ms...`
|
|
1636
|
+
);
|
|
1637
|
+
await new Promise((resolve) => setTimeout(resolve, backoffTime));
|
|
1638
|
+
} else {
|
|
1639
|
+
logger.error(`LibSQLStore: Error during ${operationDescription} after ${retries} retries: ${error}`);
|
|
1640
|
+
throw error;
|
|
1641
|
+
}
|
|
1642
|
+
}
|
|
1643
|
+
}
|
|
1644
|
+
};
|
|
1645
|
+
}
|
|
1646
|
+
function prepareStatement({ tableName, record }) {
|
|
1647
|
+
const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
|
|
1648
|
+
const columns = Object.keys(record).map((col) => utils.parseSqlIdentifier(col, "column name"));
|
|
1649
|
+
const values = Object.values(record).map((v) => {
|
|
1650
|
+
if (typeof v === `undefined` || v === null) {
|
|
1651
|
+
return null;
|
|
1652
|
+
}
|
|
1653
|
+
if (v instanceof Date) {
|
|
1654
|
+
return v.toISOString();
|
|
1655
|
+
}
|
|
1656
|
+
return typeof v === "object" ? JSON.stringify(v) : v;
|
|
1657
|
+
});
|
|
1658
|
+
const placeholders = values.map(() => "?").join(", ");
|
|
1659
|
+
return {
|
|
1660
|
+
sql: `INSERT OR REPLACE INTO ${parsedTableName} (${columns.join(", ")}) VALUES (${placeholders})`,
|
|
1661
|
+
args: values
|
|
1662
|
+
};
|
|
1663
|
+
}
|
|
1664
|
+
function prepareUpdateStatement({
|
|
1665
|
+
tableName,
|
|
1666
|
+
updates,
|
|
1667
|
+
keys
|
|
1668
|
+
}) {
|
|
1669
|
+
const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
|
|
1670
|
+
const schema = storage.TABLE_SCHEMAS[tableName];
|
|
1671
|
+
const updateColumns = Object.keys(updates).map((col) => utils.parseSqlIdentifier(col, "column name"));
|
|
1672
|
+
const updateValues = Object.values(updates).map(transformToSqlValue);
|
|
1673
|
+
const setClause = updateColumns.map((col) => `${col} = ?`).join(", ");
|
|
1674
|
+
const whereClause = prepareWhereClause(keys, schema);
|
|
1675
|
+
return {
|
|
1676
|
+
sql: `UPDATE ${parsedTableName} SET ${setClause}${whereClause.sql}`,
|
|
1677
|
+
args: [...updateValues, ...whereClause.args]
|
|
1678
|
+
};
|
|
1679
|
+
}
|
|
1680
|
+
function transformToSqlValue(value) {
|
|
1681
|
+
if (typeof value === "undefined" || value === null) {
|
|
1682
|
+
return null;
|
|
1683
|
+
}
|
|
1684
|
+
if (value instanceof Date) {
|
|
1685
|
+
return value.toISOString();
|
|
1686
|
+
}
|
|
1687
|
+
return typeof value === "object" ? JSON.stringify(value) : value;
|
|
1688
|
+
}
|
|
1689
|
+
function prepareDeleteStatement({ tableName, keys }) {
|
|
1690
|
+
const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
|
|
1691
|
+
const whereClause = prepareWhereClause(keys, storage.TABLE_SCHEMAS[tableName]);
|
|
1692
|
+
return {
|
|
1693
|
+
sql: `DELETE FROM ${parsedTableName}${whereClause.sql}`,
|
|
1694
|
+
args: whereClause.args
|
|
1695
|
+
};
|
|
1696
|
+
}
|
|
1697
|
+
function prepareWhereClause(filters, schema) {
|
|
1698
|
+
const conditions = [];
|
|
1699
|
+
const args = [];
|
|
1700
|
+
for (const [columnName, filterValue] of Object.entries(filters)) {
|
|
1701
|
+
const column = schema[columnName];
|
|
1702
|
+
if (!column) {
|
|
1703
|
+
throw new Error(`Unknown column: ${columnName}`);
|
|
1704
|
+
}
|
|
1705
|
+
const parsedColumn = utils.parseSqlIdentifier(columnName, "column name");
|
|
1706
|
+
const result = buildCondition2(parsedColumn, filterValue);
|
|
1707
|
+
conditions.push(result.condition);
|
|
1708
|
+
args.push(...result.args);
|
|
1709
|
+
}
|
|
1710
|
+
return {
|
|
1711
|
+
sql: conditions.length > 0 ? ` WHERE ${conditions.join(" AND ")}` : "",
|
|
1712
|
+
args
|
|
1713
|
+
};
|
|
1714
|
+
}
|
|
1715
|
+
function buildCondition2(columnName, filterValue) {
|
|
1716
|
+
if (filterValue === null) {
|
|
1717
|
+
return { condition: `${columnName} IS NULL`, args: [] };
|
|
1718
|
+
}
|
|
1719
|
+
if (typeof filterValue === "object" && filterValue !== null && ("startAt" in filterValue || "endAt" in filterValue)) {
|
|
1720
|
+
return buildDateRangeCondition(columnName, filterValue);
|
|
1721
|
+
}
|
|
1722
|
+
return {
|
|
1723
|
+
condition: `${columnName} = ?`,
|
|
1724
|
+
args: [transformToSqlValue(filterValue)]
|
|
1725
|
+
};
|
|
1726
|
+
}
|
|
1727
|
+
function buildDateRangeCondition(columnName, range) {
|
|
1728
|
+
const conditions = [];
|
|
1729
|
+
const args = [];
|
|
1730
|
+
if (range.startAt !== void 0) {
|
|
1731
|
+
conditions.push(`${columnName} >= ?`);
|
|
1732
|
+
args.push(transformToSqlValue(range.startAt));
|
|
1733
|
+
}
|
|
1734
|
+
if (range.endAt !== void 0) {
|
|
1735
|
+
conditions.push(`${columnName} <= ?`);
|
|
1736
|
+
args.push(transformToSqlValue(range.endAt));
|
|
1737
|
+
}
|
|
1738
|
+
if (conditions.length === 0) {
|
|
1739
|
+
throw new Error("Date range must specify at least startAt or endAt");
|
|
1740
|
+
}
|
|
1741
|
+
return {
|
|
1742
|
+
condition: conditions.join(" AND "),
|
|
1743
|
+
args
|
|
1744
|
+
};
|
|
1745
|
+
}
|
|
1746
|
+
function buildDateRangeFilter(dateRange, columnName = "createdAt") {
|
|
1747
|
+
if (!dateRange?.start && !dateRange?.end) {
|
|
1748
|
+
return {};
|
|
1749
|
+
}
|
|
1750
|
+
const filter = {};
|
|
1751
|
+
if (dateRange.start) {
|
|
1752
|
+
filter.startAt = new Date(dateRange.start).toISOString();
|
|
1753
|
+
}
|
|
1754
|
+
if (dateRange.end) {
|
|
1755
|
+
filter.endAt = new Date(dateRange.end).toISOString();
|
|
1756
|
+
}
|
|
1757
|
+
return { [columnName]: filter };
|
|
1758
|
+
}
|
|
1759
|
+
function transformFromSqlRow({
|
|
1760
|
+
tableName,
|
|
1761
|
+
sqlRow
|
|
1762
|
+
}) {
|
|
1763
|
+
const result = {};
|
|
1764
|
+
const jsonColumns = new Set(
|
|
1765
|
+
Object.keys(storage.TABLE_SCHEMAS[tableName]).filter((key) => storage.TABLE_SCHEMAS[tableName][key].type === "jsonb").map((key) => key)
|
|
1766
|
+
);
|
|
1767
|
+
const dateColumns = new Set(
|
|
1768
|
+
Object.keys(storage.TABLE_SCHEMAS[tableName]).filter((key) => storage.TABLE_SCHEMAS[tableName][key].type === "timestamp").map((key) => key)
|
|
1769
|
+
);
|
|
1770
|
+
for (const [key, value] of Object.entries(sqlRow)) {
|
|
1771
|
+
if (value === null || value === void 0) {
|
|
1772
|
+
result[key] = value;
|
|
1773
|
+
continue;
|
|
1774
|
+
}
|
|
1775
|
+
if (dateColumns.has(key) && typeof value === "string") {
|
|
1776
|
+
result[key] = new Date(value);
|
|
1777
|
+
continue;
|
|
1778
|
+
}
|
|
1779
|
+
if (jsonColumns.has(key) && typeof value === "string") {
|
|
1780
|
+
result[key] = storage.safelyParseJSON(value);
|
|
1781
|
+
continue;
|
|
1782
|
+
}
|
|
1783
|
+
result[key] = value;
|
|
1784
|
+
}
|
|
1785
|
+
return result;
|
|
1786
|
+
}
|
|
1787
|
+
|
|
1788
|
+
// src/storage/domains/observability/index.ts
|
|
1789
|
+
var ObservabilityLibSQL = class extends storage.ObservabilityStorage {
|
|
1790
|
+
operations;
|
|
1791
|
+
constructor({ operations }) {
|
|
1792
|
+
super();
|
|
1793
|
+
this.operations = operations;
|
|
1794
|
+
}
|
|
1795
|
+
async createSpan(span) {
|
|
1796
|
+
try {
|
|
1797
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
1798
|
+
const record = {
|
|
1799
|
+
...span,
|
|
1800
|
+
createdAt: now,
|
|
1801
|
+
updatedAt: now
|
|
1802
|
+
};
|
|
1803
|
+
return this.operations.insert({ tableName: storage.TABLE_SPANS, record });
|
|
1804
|
+
} catch (error$1) {
|
|
1805
|
+
throw new error.MastraError(
|
|
1806
|
+
{
|
|
1807
|
+
id: "LIBSQL_STORE_CREATE_SPAN_FAILED",
|
|
1808
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1809
|
+
category: error.ErrorCategory.USER,
|
|
1810
|
+
details: {
|
|
1811
|
+
spanId: span.spanId,
|
|
1812
|
+
traceId: span.traceId,
|
|
1813
|
+
spanType: span.spanType,
|
|
1814
|
+
spanName: span.name
|
|
1815
|
+
}
|
|
1816
|
+
},
|
|
1817
|
+
error$1
|
|
1818
|
+
);
|
|
1819
|
+
}
|
|
1820
|
+
}
|
|
1821
|
+
async getTrace(traceId) {
|
|
1822
|
+
try {
|
|
1823
|
+
const spans = await this.operations.loadMany({
|
|
1824
|
+
tableName: storage.TABLE_SPANS,
|
|
1825
|
+
whereClause: { sql: " WHERE traceId = ?", args: [traceId] },
|
|
1826
|
+
orderBy: "startedAt DESC"
|
|
1827
|
+
});
|
|
1828
|
+
if (!spans || spans.length === 0) {
|
|
1829
|
+
return null;
|
|
1830
|
+
}
|
|
1831
|
+
return {
|
|
1832
|
+
traceId,
|
|
1833
|
+
spans: spans.map((span) => transformFromSqlRow({ tableName: storage.TABLE_SPANS, sqlRow: span }))
|
|
1834
|
+
};
|
|
1835
|
+
} catch (error$1) {
|
|
1836
|
+
throw new error.MastraError(
|
|
1837
|
+
{
|
|
1838
|
+
id: "LIBSQL_STORE_GET_TRACE_FAILED",
|
|
1839
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1840
|
+
category: error.ErrorCategory.USER,
|
|
1841
|
+
details: {
|
|
1842
|
+
traceId
|
|
1843
|
+
}
|
|
1844
|
+
},
|
|
1845
|
+
error$1
|
|
1846
|
+
);
|
|
1847
|
+
}
|
|
1848
|
+
}
|
|
1849
|
+
async updateSpan({
|
|
1850
|
+
spanId,
|
|
1851
|
+
traceId,
|
|
1852
|
+
updates
|
|
1853
|
+
}) {
|
|
1854
|
+
try {
|
|
1855
|
+
await this.operations.update({
|
|
1856
|
+
tableName: storage.TABLE_SPANS,
|
|
1857
|
+
keys: { spanId, traceId },
|
|
1858
|
+
data: { ...updates, updatedAt: (/* @__PURE__ */ new Date()).toISOString() }
|
|
1859
|
+
});
|
|
1860
|
+
} catch (error$1) {
|
|
1861
|
+
throw new error.MastraError(
|
|
1862
|
+
{
|
|
1863
|
+
id: "LIBSQL_STORE_UPDATE_SPAN_FAILED",
|
|
1864
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1865
|
+
category: error.ErrorCategory.USER,
|
|
1866
|
+
details: {
|
|
1867
|
+
spanId,
|
|
1868
|
+
traceId
|
|
1869
|
+
}
|
|
1870
|
+
},
|
|
1871
|
+
error$1
|
|
1872
|
+
);
|
|
1873
|
+
}
|
|
1874
|
+
}
|
|
1875
|
+
async getTracesPaginated({
|
|
1876
|
+
filters,
|
|
1877
|
+
pagination
|
|
1878
|
+
}) {
|
|
1879
|
+
const page = pagination?.page ?? 0;
|
|
1880
|
+
const perPage = pagination?.perPage ?? 10;
|
|
1881
|
+
const { entityId, entityType, ...actualFilters } = filters || {};
|
|
1882
|
+
const filtersWithDateRange = {
|
|
1883
|
+
...actualFilters,
|
|
1884
|
+
...buildDateRangeFilter(pagination?.dateRange, "startedAt"),
|
|
1885
|
+
parentSpanId: null
|
|
1886
|
+
};
|
|
1887
|
+
const whereClause = prepareWhereClause(filtersWithDateRange, storage.SPAN_SCHEMA);
|
|
1888
|
+
let actualWhereClause = whereClause.sql || "";
|
|
1889
|
+
if (entityId && entityType) {
|
|
1890
|
+
const statement = `name = ?`;
|
|
1891
|
+
let name = "";
|
|
1892
|
+
if (entityType === "workflow") {
|
|
1893
|
+
name = `workflow run: '${entityId}'`;
|
|
1894
|
+
} else if (entityType === "agent") {
|
|
1895
|
+
name = `agent run: '${entityId}'`;
|
|
1896
|
+
} else {
|
|
1897
|
+
const error$1 = new error.MastraError({
|
|
1898
|
+
id: "LIBSQL_STORE_GET_TRACES_PAGINATED_FAILED",
|
|
1899
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1900
|
+
category: error.ErrorCategory.USER,
|
|
1901
|
+
details: {
|
|
1902
|
+
entityType
|
|
1903
|
+
},
|
|
1904
|
+
text: `Cannot filter by entity type: ${entityType}`
|
|
1905
|
+
});
|
|
1906
|
+
this.logger?.trackException(error$1);
|
|
1907
|
+
throw error$1;
|
|
1908
|
+
}
|
|
1909
|
+
whereClause.args.push(name);
|
|
1910
|
+
if (actualWhereClause) {
|
|
1911
|
+
actualWhereClause += ` AND ${statement}`;
|
|
1912
|
+
} else {
|
|
1913
|
+
actualWhereClause += `WHERE ${statement}`;
|
|
1914
|
+
}
|
|
1915
|
+
}
|
|
1916
|
+
const orderBy = "startedAt DESC";
|
|
1917
|
+
let count = 0;
|
|
1918
|
+
try {
|
|
1919
|
+
count = await this.operations.loadTotalCount({
|
|
1920
|
+
tableName: storage.TABLE_SPANS,
|
|
1921
|
+
whereClause: { sql: actualWhereClause, args: whereClause.args }
|
|
1922
|
+
});
|
|
1923
|
+
} catch (error$1) {
|
|
1924
|
+
throw new error.MastraError(
|
|
1925
|
+
{
|
|
1926
|
+
id: "LIBSQL_STORE_GET_TRACES_PAGINATED_COUNT_FAILED",
|
|
1927
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1928
|
+
category: error.ErrorCategory.USER
|
|
1929
|
+
},
|
|
1930
|
+
error$1
|
|
1931
|
+
);
|
|
1932
|
+
}
|
|
1933
|
+
if (count === 0) {
|
|
1934
|
+
return {
|
|
1935
|
+
pagination: {
|
|
1936
|
+
total: 0,
|
|
1937
|
+
page,
|
|
1938
|
+
perPage,
|
|
1939
|
+
hasMore: false
|
|
1940
|
+
},
|
|
1941
|
+
spans: []
|
|
1942
|
+
};
|
|
1943
|
+
}
|
|
1944
|
+
try {
|
|
1945
|
+
const spans = await this.operations.loadMany({
|
|
1946
|
+
tableName: storage.TABLE_SPANS,
|
|
1947
|
+
whereClause: {
|
|
1948
|
+
sql: actualWhereClause,
|
|
1949
|
+
args: whereClause.args
|
|
1950
|
+
},
|
|
1951
|
+
orderBy,
|
|
1952
|
+
offset: page * perPage,
|
|
1953
|
+
limit: perPage
|
|
1954
|
+
});
|
|
1955
|
+
return {
|
|
1956
|
+
pagination: {
|
|
1957
|
+
total: count,
|
|
1958
|
+
page,
|
|
1959
|
+
perPage,
|
|
1960
|
+
hasMore: spans.length === perPage
|
|
1961
|
+
},
|
|
1962
|
+
spans: spans.map((span) => transformFromSqlRow({ tableName: storage.TABLE_SPANS, sqlRow: span }))
|
|
1963
|
+
};
|
|
1964
|
+
} catch (error$1) {
|
|
1965
|
+
throw new error.MastraError(
|
|
1966
|
+
{
|
|
1967
|
+
id: "LIBSQL_STORE_GET_TRACES_PAGINATED_FAILED",
|
|
1968
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1969
|
+
category: error.ErrorCategory.USER
|
|
1970
|
+
},
|
|
1971
|
+
error$1
|
|
1972
|
+
);
|
|
1973
|
+
}
|
|
1974
|
+
}
|
|
1975
|
+
async batchCreateSpans(args) {
|
|
1976
|
+
try {
|
|
1977
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
1978
|
+
return this.operations.batchInsert({
|
|
1979
|
+
tableName: storage.TABLE_SPANS,
|
|
1980
|
+
records: args.records.map((record) => ({
|
|
1981
|
+
...record,
|
|
1982
|
+
createdAt: now,
|
|
1983
|
+
updatedAt: now
|
|
1984
|
+
}))
|
|
1985
|
+
});
|
|
1986
|
+
} catch (error$1) {
|
|
1987
|
+
throw new error.MastraError(
|
|
1988
|
+
{
|
|
1989
|
+
id: "LIBSQL_STORE_BATCH_CREATE_SPANS_FAILED",
|
|
1990
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1991
|
+
category: error.ErrorCategory.USER
|
|
1992
|
+
},
|
|
1993
|
+
error$1
|
|
1994
|
+
);
|
|
1995
|
+
}
|
|
1996
|
+
}
|
|
1997
|
+
async batchUpdateSpans(args) {
|
|
1998
|
+
try {
|
|
1999
|
+
return this.operations.batchUpdate({
|
|
2000
|
+
tableName: storage.TABLE_SPANS,
|
|
2001
|
+
updates: args.records.map((record) => ({
|
|
2002
|
+
keys: { spanId: record.spanId, traceId: record.traceId },
|
|
2003
|
+
data: { ...record.updates, updatedAt: (/* @__PURE__ */ new Date()).toISOString() }
|
|
2004
|
+
}))
|
|
2005
|
+
});
|
|
2006
|
+
} catch (error$1) {
|
|
2007
|
+
throw new error.MastraError(
|
|
2008
|
+
{
|
|
2009
|
+
id: "LIBSQL_STORE_BATCH_UPDATE_SPANS_FAILED",
|
|
2010
|
+
domain: error.ErrorDomain.STORAGE,
|
|
2011
|
+
category: error.ErrorCategory.USER
|
|
2012
|
+
},
|
|
2013
|
+
error$1
|
|
2014
|
+
);
|
|
2015
|
+
}
|
|
2016
|
+
}
|
|
2017
|
+
async batchDeleteTraces(args) {
|
|
2018
|
+
try {
|
|
2019
|
+
const keys = args.traceIds.map((traceId) => ({ traceId }));
|
|
2020
|
+
return this.operations.batchDelete({
|
|
2021
|
+
tableName: storage.TABLE_SPANS,
|
|
2022
|
+
keys
|
|
2023
|
+
});
|
|
2024
|
+
} catch (error$1) {
|
|
2025
|
+
throw new error.MastraError(
|
|
2026
|
+
{
|
|
2027
|
+
id: "LIBSQL_STORE_BATCH_DELETE_TRACES_FAILED",
|
|
2028
|
+
domain: error.ErrorDomain.STORAGE,
|
|
2029
|
+
category: error.ErrorCategory.USER
|
|
2030
|
+
},
|
|
2031
|
+
error$1
|
|
2032
|
+
);
|
|
2033
|
+
}
|
|
2034
|
+
}
|
|
2035
|
+
};
|
|
2036
|
+
var StoreOperationsLibSQL = class extends storage.StoreOperations {
|
|
2037
|
+
client;
|
|
2038
|
+
/**
|
|
2039
|
+
* Maximum number of retries for write operations if an SQLITE_BUSY error occurs.
|
|
2040
|
+
* @default 5
|
|
2041
|
+
*/
|
|
2042
|
+
maxRetries;
|
|
2043
|
+
/**
|
|
2044
|
+
* Initial backoff time in milliseconds for retrying write operations on SQLITE_BUSY.
|
|
2045
|
+
* The backoff time will double with each retry (exponential backoff).
|
|
2046
|
+
* @default 100
|
|
2047
|
+
*/
|
|
2048
|
+
initialBackoffMs;
|
|
2049
|
+
constructor({
|
|
2050
|
+
client,
|
|
2051
|
+
maxRetries,
|
|
2052
|
+
initialBackoffMs
|
|
2053
|
+
}) {
|
|
2054
|
+
super();
|
|
2055
|
+
this.client = client;
|
|
2056
|
+
this.maxRetries = maxRetries ?? 5;
|
|
2057
|
+
this.initialBackoffMs = initialBackoffMs ?? 100;
|
|
2058
|
+
}
|
|
2059
|
+
async hasColumn(table, column) {
|
|
2060
|
+
const result = await this.client.execute({
|
|
2061
|
+
sql: `PRAGMA table_info(${table})`
|
|
2062
|
+
});
|
|
2063
|
+
return (await result.rows)?.some((row) => row.name === column);
|
|
2064
|
+
}
|
|
2065
|
+
getCreateTableSQL(tableName, schema) {
|
|
2066
|
+
const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
|
|
2067
|
+
const columns = Object.entries(schema).map(([name, col]) => {
|
|
2068
|
+
const parsedColumnName = utils.parseSqlIdentifier(name, "column name");
|
|
2069
|
+
let type = col.type.toUpperCase();
|
|
2070
|
+
if (type === "TEXT") type = "TEXT";
|
|
2071
|
+
if (type === "TIMESTAMP") type = "TEXT";
|
|
2072
|
+
const nullable = col.nullable ? "" : "NOT NULL";
|
|
2073
|
+
const primaryKey = col.primaryKey ? "PRIMARY KEY" : "";
|
|
2074
|
+
return `${parsedColumnName} ${type} ${nullable} ${primaryKey}`.trim();
|
|
2075
|
+
});
|
|
2076
|
+
if (tableName === storage.TABLE_WORKFLOW_SNAPSHOT) {
|
|
2077
|
+
const stmnt = `CREATE TABLE IF NOT EXISTS ${parsedTableName} (
|
|
2078
|
+
${columns.join(",\n")},
|
|
2079
|
+
PRIMARY KEY (workflow_name, run_id)
|
|
2080
|
+
)`;
|
|
2081
|
+
return stmnt;
|
|
2082
|
+
}
|
|
2083
|
+
if (tableName === storage.TABLE_SPANS) {
|
|
2084
|
+
const stmnt = `CREATE TABLE IF NOT EXISTS ${parsedTableName} (
|
|
2085
|
+
${columns.join(",\n")},
|
|
2086
|
+
PRIMARY KEY (traceId, spanId)
|
|
2087
|
+
)`;
|
|
2088
|
+
return stmnt;
|
|
2089
|
+
}
|
|
2090
|
+
return `CREATE TABLE IF NOT EXISTS ${parsedTableName} (${columns.join(", ")})`;
|
|
2091
|
+
}
|
|
2092
|
+
async createTable({
|
|
2093
|
+
tableName,
|
|
2094
|
+
schema
|
|
2095
|
+
}) {
|
|
2096
|
+
try {
|
|
2097
|
+
this.logger.debug(`Creating database table`, { tableName, operation: "schema init" });
|
|
2098
|
+
const sql = this.getCreateTableSQL(tableName, schema);
|
|
2099
|
+
await this.client.execute(sql);
|
|
2100
|
+
} catch (error$1) {
|
|
2101
|
+
throw new error.MastraError(
|
|
2102
|
+
{
|
|
2103
|
+
id: "LIBSQL_STORE_CREATE_TABLE_FAILED",
|
|
2104
|
+
domain: error.ErrorDomain.STORAGE,
|
|
2105
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
2106
|
+
details: {
|
|
2107
|
+
tableName
|
|
2108
|
+
}
|
|
2109
|
+
},
|
|
2110
|
+
error$1
|
|
2111
|
+
);
|
|
2112
|
+
}
|
|
2113
|
+
}
|
|
2114
|
+
getSqlType(type) {
|
|
2115
|
+
switch (type) {
|
|
2116
|
+
case "bigint":
|
|
2117
|
+
return "INTEGER";
|
|
2118
|
+
// SQLite uses INTEGER for all integer sizes
|
|
2119
|
+
case "jsonb":
|
|
2120
|
+
return "TEXT";
|
|
2121
|
+
// Store JSON as TEXT in SQLite
|
|
2122
|
+
default:
|
|
2123
|
+
return super.getSqlType(type);
|
|
2124
|
+
}
|
|
2125
|
+
}
|
|
2126
|
+
async doInsert({
|
|
2127
|
+
tableName,
|
|
2128
|
+
record
|
|
2129
|
+
}) {
|
|
2130
|
+
await this.client.execute(
|
|
2131
|
+
prepareStatement({
|
|
2132
|
+
tableName,
|
|
2133
|
+
record
|
|
2134
|
+
})
|
|
2135
|
+
);
|
|
2136
|
+
}
|
|
2137
|
+
insert(args) {
|
|
2138
|
+
const executeWriteOperationWithRetry = createExecuteWriteOperationWithRetry({
|
|
2139
|
+
logger: this.logger,
|
|
2140
|
+
maxRetries: this.maxRetries,
|
|
2141
|
+
initialBackoffMs: this.initialBackoffMs
|
|
2142
|
+
});
|
|
2143
|
+
return executeWriteOperationWithRetry(() => this.doInsert(args), `insert into table ${args.tableName}`);
|
|
2144
|
+
}
|
|
2145
|
+
async load({ tableName, keys }) {
|
|
2146
|
+
const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
|
|
2147
|
+
const parsedKeys = Object.keys(keys).map((key) => utils.parseSqlIdentifier(key, "column name"));
|
|
2148
|
+
const conditions = parsedKeys.map((key) => `${key} = ?`).join(" AND ");
|
|
2149
|
+
const values = Object.values(keys);
|
|
2150
|
+
const result = await this.client.execute({
|
|
2151
|
+
sql: `SELECT * FROM ${parsedTableName} WHERE ${conditions} ORDER BY createdAt DESC LIMIT 1`,
|
|
2152
|
+
args: values
|
|
2153
|
+
});
|
|
2154
|
+
if (!result.rows || result.rows.length === 0) {
|
|
2155
|
+
return null;
|
|
2156
|
+
}
|
|
2157
|
+
const row = result.rows[0];
|
|
2158
|
+
const parsed = Object.fromEntries(
|
|
2159
|
+
Object.entries(row || {}).map(([k, v]) => {
|
|
2160
|
+
try {
|
|
2161
|
+
return [k, typeof v === "string" ? v.startsWith("{") || v.startsWith("[") ? JSON.parse(v) : v : v];
|
|
2162
|
+
} catch {
|
|
2163
|
+
return [k, v];
|
|
2164
|
+
}
|
|
2165
|
+
})
|
|
2166
|
+
);
|
|
2167
|
+
return parsed;
|
|
2168
|
+
}
|
|
2169
|
+
async loadMany({
|
|
2170
|
+
tableName,
|
|
2171
|
+
whereClause,
|
|
2172
|
+
orderBy,
|
|
2173
|
+
offset,
|
|
2174
|
+
limit,
|
|
2175
|
+
args
|
|
2176
|
+
}) {
|
|
2177
|
+
const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
|
|
2178
|
+
let statement = `SELECT * FROM ${parsedTableName}`;
|
|
2179
|
+
if (whereClause?.sql) {
|
|
2180
|
+
statement += `${whereClause.sql}`;
|
|
2181
|
+
}
|
|
2182
|
+
if (orderBy) {
|
|
2183
|
+
statement += ` ORDER BY ${orderBy}`;
|
|
2184
|
+
}
|
|
2185
|
+
if (limit) {
|
|
2186
|
+
statement += ` LIMIT ${limit}`;
|
|
2187
|
+
}
|
|
2188
|
+
if (offset) {
|
|
2189
|
+
statement += ` OFFSET ${offset}`;
|
|
2190
|
+
}
|
|
2191
|
+
const result = await this.client.execute({
|
|
2192
|
+
sql: statement,
|
|
2193
|
+
args: [...whereClause?.args ?? [], ...args ?? []]
|
|
2194
|
+
});
|
|
2195
|
+
return result.rows;
|
|
2196
|
+
}
|
|
2197
|
+
async loadTotalCount({
|
|
2198
|
+
tableName,
|
|
2199
|
+
whereClause
|
|
2200
|
+
}) {
|
|
2201
|
+
const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
|
|
2202
|
+
const statement = `SELECT COUNT(*) as count FROM ${parsedTableName} ${whereClause ? `${whereClause.sql}` : ""}`;
|
|
2203
|
+
const result = await this.client.execute({
|
|
2204
|
+
sql: statement,
|
|
2205
|
+
args: whereClause?.args ?? []
|
|
2206
|
+
});
|
|
2207
|
+
if (!result.rows || result.rows.length === 0) {
|
|
2208
|
+
return 0;
|
|
2209
|
+
}
|
|
2210
|
+
return result.rows[0]?.count ?? 0;
|
|
2211
|
+
}
|
|
2212
|
+
update(args) {
|
|
2213
|
+
const executeWriteOperationWithRetry = createExecuteWriteOperationWithRetry({
|
|
2214
|
+
logger: this.logger,
|
|
2215
|
+
maxRetries: this.maxRetries,
|
|
2216
|
+
initialBackoffMs: this.initialBackoffMs
|
|
2217
|
+
});
|
|
2218
|
+
return executeWriteOperationWithRetry(() => this.executeUpdate(args), `update table ${args.tableName}`);
|
|
2219
|
+
}
|
|
2220
|
+
async executeUpdate({
|
|
2221
|
+
tableName,
|
|
2222
|
+
keys,
|
|
2223
|
+
data
|
|
2224
|
+
}) {
|
|
2225
|
+
await this.client.execute(prepareUpdateStatement({ tableName, updates: data, keys }));
|
|
2226
|
+
}
|
|
2227
|
+
async doBatchInsert({
|
|
2228
|
+
tableName,
|
|
2229
|
+
records
|
|
2230
|
+
}) {
|
|
2231
|
+
if (records.length === 0) return;
|
|
2232
|
+
const batchStatements = records.map((r) => prepareStatement({ tableName, record: r }));
|
|
2233
|
+
await this.client.batch(batchStatements, "write");
|
|
2234
|
+
}
|
|
2235
|
+
batchInsert(args) {
|
|
2236
|
+
const executeWriteOperationWithRetry = createExecuteWriteOperationWithRetry({
|
|
2237
|
+
logger: this.logger,
|
|
2238
|
+
maxRetries: this.maxRetries,
|
|
2239
|
+
initialBackoffMs: this.initialBackoffMs
|
|
2240
|
+
});
|
|
2241
|
+
return executeWriteOperationWithRetry(
|
|
2242
|
+
() => this.doBatchInsert(args),
|
|
2243
|
+
`batch insert into table ${args.tableName}`
|
|
2244
|
+
).catch((error$1) => {
|
|
2245
|
+
throw new error.MastraError(
|
|
2246
|
+
{
|
|
2247
|
+
id: "LIBSQL_STORE_BATCH_INSERT_FAILED",
|
|
2248
|
+
domain: error.ErrorDomain.STORAGE,
|
|
2249
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
2250
|
+
details: {
|
|
2251
|
+
tableName: args.tableName
|
|
2252
|
+
}
|
|
2253
|
+
},
|
|
2254
|
+
error$1
|
|
2255
|
+
);
|
|
2256
|
+
});
|
|
2257
|
+
}
|
|
2258
|
+
/**
|
|
2259
|
+
* Public batch update method with retry logic
|
|
2260
|
+
*/
|
|
2261
|
+
batchUpdate(args) {
|
|
2262
|
+
const executeWriteOperationWithRetry = createExecuteWriteOperationWithRetry({
|
|
2263
|
+
logger: this.logger,
|
|
2264
|
+
maxRetries: this.maxRetries,
|
|
2265
|
+
initialBackoffMs: this.initialBackoffMs
|
|
2266
|
+
});
|
|
2267
|
+
return executeWriteOperationWithRetry(
|
|
2268
|
+
() => this.executeBatchUpdate(args),
|
|
2269
|
+
`batch update in table ${args.tableName}`
|
|
2270
|
+
).catch((error$1) => {
|
|
2271
|
+
throw new error.MastraError(
|
|
2272
|
+
{
|
|
2273
|
+
id: "LIBSQL_STORE_BATCH_UPDATE_FAILED",
|
|
2274
|
+
domain: error.ErrorDomain.STORAGE,
|
|
2275
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
2276
|
+
details: {
|
|
2277
|
+
tableName: args.tableName
|
|
2278
|
+
}
|
|
2279
|
+
},
|
|
2280
|
+
error$1
|
|
2281
|
+
);
|
|
2282
|
+
});
|
|
2283
|
+
}
|
|
2284
|
+
/**
|
|
2285
|
+
* Updates multiple records in batch. Each record can be updated based on single or composite keys.
|
|
2286
|
+
*/
|
|
2287
|
+
async executeBatchUpdate({
|
|
2288
|
+
tableName,
|
|
2289
|
+
updates
|
|
2290
|
+
}) {
|
|
2291
|
+
if (updates.length === 0) return;
|
|
2292
|
+
const batchStatements = updates.map(
|
|
2293
|
+
({ keys, data }) => prepareUpdateStatement({
|
|
2294
|
+
tableName,
|
|
2295
|
+
updates: data,
|
|
2296
|
+
keys
|
|
2297
|
+
})
|
|
2298
|
+
);
|
|
2299
|
+
await this.client.batch(batchStatements, "write");
|
|
2300
|
+
}
|
|
2301
|
+
/**
|
|
2302
|
+
* Public batch delete method with retry logic
|
|
2303
|
+
*/
|
|
2304
|
+
batchDelete({ tableName, keys }) {
|
|
2305
|
+
const executeWriteOperationWithRetry = createExecuteWriteOperationWithRetry({
|
|
2306
|
+
logger: this.logger,
|
|
2307
|
+
maxRetries: this.maxRetries,
|
|
2308
|
+
initialBackoffMs: this.initialBackoffMs
|
|
2309
|
+
});
|
|
2310
|
+
return executeWriteOperationWithRetry(
|
|
2311
|
+
() => this.executeBatchDelete({ tableName, keys }),
|
|
2312
|
+
`batch delete from table ${tableName}`
|
|
2313
|
+
).catch((error$1) => {
|
|
2314
|
+
throw new error.MastraError(
|
|
2315
|
+
{
|
|
2316
|
+
id: "LIBSQL_STORE_BATCH_DELETE_FAILED",
|
|
2317
|
+
domain: error.ErrorDomain.STORAGE,
|
|
2318
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
2319
|
+
details: {
|
|
2320
|
+
tableName
|
|
2321
|
+
}
|
|
2322
|
+
},
|
|
2323
|
+
error$1
|
|
2324
|
+
);
|
|
2325
|
+
});
|
|
2326
|
+
}
|
|
2327
|
+
/**
|
|
2328
|
+
* Deletes multiple records in batch. Each record can be deleted based on single or composite keys.
|
|
2329
|
+
*/
|
|
2330
|
+
async executeBatchDelete({
|
|
2331
|
+
tableName,
|
|
2332
|
+
keys
|
|
2333
|
+
}) {
|
|
2334
|
+
if (keys.length === 0) return;
|
|
2335
|
+
const batchStatements = keys.map(
|
|
2336
|
+
(keyObj) => prepareDeleteStatement({
|
|
2337
|
+
tableName,
|
|
2338
|
+
keys: keyObj
|
|
2339
|
+
})
|
|
2340
|
+
);
|
|
2341
|
+
await this.client.batch(batchStatements, "write");
|
|
2342
|
+
}
|
|
2343
|
+
/**
|
|
2344
|
+
* Alters table schema to add columns if they don't exist
|
|
2345
|
+
* @param tableName Name of the table
|
|
2346
|
+
* @param schema Schema of the table
|
|
2347
|
+
* @param ifNotExists Array of column names to add if they don't exist
|
|
2348
|
+
*/
|
|
2349
|
+
async alterTable({
|
|
2350
|
+
tableName,
|
|
2351
|
+
schema,
|
|
2352
|
+
ifNotExists
|
|
2353
|
+
}) {
|
|
2354
|
+
const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
|
|
2355
|
+
try {
|
|
2356
|
+
const pragmaQuery = `PRAGMA table_info(${parsedTableName})`;
|
|
2357
|
+
const result = await this.client.execute(pragmaQuery);
|
|
2358
|
+
const existingColumnNames = new Set(result.rows.map((row) => row.name.toLowerCase()));
|
|
2359
|
+
for (const columnName of ifNotExists) {
|
|
2360
|
+
if (!existingColumnNames.has(columnName.toLowerCase()) && schema[columnName]) {
|
|
2361
|
+
const columnDef = schema[columnName];
|
|
2362
|
+
const sqlType = this.getSqlType(columnDef.type);
|
|
2363
|
+
const nullable = columnDef.nullable === false ? "NOT NULL" : "";
|
|
2364
|
+
const defaultValue = columnDef.nullable === false ? this.getDefaultValue(columnDef.type) : "";
|
|
2365
|
+
const alterSql = `ALTER TABLE ${parsedTableName} ADD COLUMN "${columnName}" ${sqlType} ${nullable} ${defaultValue}`.trim();
|
|
2366
|
+
await this.client.execute(alterSql);
|
|
2367
|
+
this.logger?.debug?.(`Added column ${columnName} to table ${parsedTableName}`);
|
|
2368
|
+
}
|
|
2369
|
+
}
|
|
2370
|
+
} catch (error$1) {
|
|
2371
|
+
throw new error.MastraError(
|
|
2372
|
+
{
|
|
2373
|
+
id: "LIBSQL_STORE_ALTER_TABLE_FAILED",
|
|
2374
|
+
domain: error.ErrorDomain.STORAGE,
|
|
2375
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
2376
|
+
details: {
|
|
2377
|
+
tableName
|
|
2378
|
+
}
|
|
2379
|
+
},
|
|
2380
|
+
error$1
|
|
2381
|
+
);
|
|
2382
|
+
}
|
|
2383
|
+
}
|
|
2384
|
+
async clearTable({ tableName }) {
|
|
2385
|
+
const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
|
|
2386
|
+
try {
|
|
2387
|
+
await this.client.execute(`DELETE FROM ${parsedTableName}`);
|
|
2388
|
+
} catch (e) {
|
|
2389
|
+
const mastraError = new error.MastraError(
|
|
2390
|
+
{
|
|
2391
|
+
id: "LIBSQL_STORE_CLEAR_TABLE_FAILED",
|
|
2392
|
+
domain: error.ErrorDomain.STORAGE,
|
|
2393
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
2394
|
+
details: {
|
|
2395
|
+
tableName
|
|
2396
|
+
}
|
|
2397
|
+
},
|
|
2398
|
+
e
|
|
2399
|
+
);
|
|
2400
|
+
this.logger?.trackException?.(mastraError);
|
|
2401
|
+
this.logger?.error?.(mastraError.toString());
|
|
2402
|
+
}
|
|
2403
|
+
}
|
|
2404
|
+
async dropTable({ tableName }) {
|
|
2405
|
+
const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
|
|
2406
|
+
try {
|
|
2407
|
+
await this.client.execute(`DROP TABLE IF EXISTS ${parsedTableName}`);
|
|
2408
|
+
} catch (e) {
|
|
2409
|
+
throw new error.MastraError(
|
|
2410
|
+
{
|
|
2411
|
+
id: "LIBSQL_STORE_DROP_TABLE_FAILED",
|
|
2412
|
+
domain: error.ErrorDomain.STORAGE,
|
|
2413
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
2414
|
+
details: {
|
|
2415
|
+
tableName
|
|
2416
|
+
}
|
|
2417
|
+
},
|
|
2418
|
+
e
|
|
2419
|
+
);
|
|
2420
|
+
}
|
|
2421
|
+
}
|
|
2422
|
+
};
|
|
2423
|
+
var ScoresLibSQL = class extends storage.ScoresStorage {
|
|
2424
|
+
operations;
|
|
2425
|
+
client;
|
|
2426
|
+
constructor({ client, operations }) {
|
|
2427
|
+
super();
|
|
2428
|
+
this.operations = operations;
|
|
2429
|
+
this.client = client;
|
|
2430
|
+
}
|
|
2431
|
+
async listScoresByRunId({
|
|
2432
|
+
runId,
|
|
2433
|
+
pagination
|
|
2434
|
+
}) {
|
|
2435
|
+
try {
|
|
2436
|
+
const { page, perPage: perPageInput } = pagination;
|
|
2437
|
+
const countResult = await this.client.execute({
|
|
2438
|
+
sql: `SELECT COUNT(*) as count FROM ${storage.TABLE_SCORERS} WHERE runId = ?`,
|
|
2439
|
+
args: [runId]
|
|
2440
|
+
});
|
|
2441
|
+
const total = Number(countResult.rows?.[0]?.count ?? 0);
|
|
2442
|
+
if (total === 0) {
|
|
2443
|
+
return {
|
|
2444
|
+
pagination: {
|
|
2445
|
+
total: 0,
|
|
2446
|
+
page,
|
|
2447
|
+
perPage: perPageInput,
|
|
2448
|
+
hasMore: false
|
|
2449
|
+
},
|
|
2450
|
+
scores: []
|
|
2451
|
+
};
|
|
2452
|
+
}
|
|
2453
|
+
const perPage = storage.normalizePerPage(perPageInput, 100);
|
|
2454
|
+
const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
|
|
2455
|
+
const limitValue = perPageInput === false ? total : perPage;
|
|
2456
|
+
const end = perPageInput === false ? total : start + perPage;
|
|
2457
|
+
const result = await this.client.execute({
|
|
2458
|
+
sql: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE runId = ? ORDER BY createdAt DESC LIMIT ? OFFSET ?`,
|
|
2459
|
+
args: [runId, limitValue, start]
|
|
2460
|
+
});
|
|
2461
|
+
const scores = result.rows?.map((row) => this.transformScoreRow(row)) ?? [];
|
|
2462
|
+
return {
|
|
2463
|
+
scores,
|
|
2464
|
+
pagination: {
|
|
2465
|
+
total,
|
|
2466
|
+
page,
|
|
2467
|
+
perPage: perPageForResponse,
|
|
2468
|
+
hasMore: end < total
|
|
2469
|
+
}
|
|
2470
|
+
};
|
|
2471
|
+
} catch (error$1) {
|
|
2472
|
+
throw new error.MastraError(
|
|
2473
|
+
{
|
|
2474
|
+
id: "LIBSQL_STORE_GET_SCORES_BY_RUN_ID_FAILED",
|
|
2475
|
+
domain: error.ErrorDomain.STORAGE,
|
|
2476
|
+
category: error.ErrorCategory.THIRD_PARTY
|
|
2477
|
+
},
|
|
2478
|
+
error$1
|
|
2479
|
+
);
|
|
2480
|
+
}
|
|
2481
|
+
}
|
|
2482
|
+
async listScoresByScorerId({
|
|
2483
|
+
scorerId,
|
|
2484
|
+
entityId,
|
|
2485
|
+
entityType,
|
|
2486
|
+
source,
|
|
2487
|
+
pagination
|
|
2488
|
+
}) {
|
|
2489
|
+
try {
|
|
2490
|
+
const { page, perPage: perPageInput } = pagination;
|
|
2491
|
+
const conditions = [];
|
|
2492
|
+
const queryParams = [];
|
|
2493
|
+
if (scorerId) {
|
|
2494
|
+
conditions.push(`scorerId = ?`);
|
|
2495
|
+
queryParams.push(scorerId);
|
|
2496
|
+
}
|
|
2497
|
+
if (entityId) {
|
|
2498
|
+
conditions.push(`entityId = ?`);
|
|
2499
|
+
queryParams.push(entityId);
|
|
2500
|
+
}
|
|
2501
|
+
if (entityType) {
|
|
2502
|
+
conditions.push(`entityType = ?`);
|
|
2503
|
+
queryParams.push(entityType);
|
|
2504
|
+
}
|
|
2505
|
+
if (source) {
|
|
2506
|
+
conditions.push(`source = ?`);
|
|
2507
|
+
queryParams.push(source);
|
|
2508
|
+
}
|
|
2509
|
+
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
2510
|
+
const countResult = await this.client.execute({
|
|
2511
|
+
sql: `SELECT COUNT(*) as count FROM ${storage.TABLE_SCORERS} ${whereClause}`,
|
|
2512
|
+
args: queryParams
|
|
2513
|
+
});
|
|
2514
|
+
const total = Number(countResult.rows?.[0]?.count ?? 0);
|
|
2515
|
+
if (total === 0) {
|
|
2516
|
+
return {
|
|
2517
|
+
pagination: {
|
|
2518
|
+
total: 0,
|
|
2519
|
+
page,
|
|
2520
|
+
perPage: perPageInput,
|
|
2521
|
+
hasMore: false
|
|
2522
|
+
},
|
|
2523
|
+
scores: []
|
|
2524
|
+
};
|
|
2525
|
+
}
|
|
2526
|
+
const perPage = storage.normalizePerPage(perPageInput, 100);
|
|
2527
|
+
const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
|
|
2528
|
+
const limitValue = perPageInput === false ? total : perPage;
|
|
2529
|
+
const end = perPageInput === false ? total : start + perPage;
|
|
2530
|
+
const result = await this.client.execute({
|
|
2531
|
+
sql: `SELECT * FROM ${storage.TABLE_SCORERS} ${whereClause} ORDER BY createdAt DESC LIMIT ? OFFSET ?`,
|
|
2532
|
+
args: [...queryParams, limitValue, start]
|
|
2533
|
+
});
|
|
2534
|
+
const scores = result.rows?.map((row) => this.transformScoreRow(row)) ?? [];
|
|
2535
|
+
return {
|
|
2536
|
+
scores,
|
|
2537
|
+
pagination: {
|
|
2538
|
+
total,
|
|
2539
|
+
page,
|
|
2540
|
+
perPage: perPageForResponse,
|
|
2541
|
+
hasMore: end < total
|
|
2542
|
+
}
|
|
2543
|
+
};
|
|
2544
|
+
} catch (error$1) {
|
|
2545
|
+
throw new error.MastraError(
|
|
2546
|
+
{
|
|
2547
|
+
id: "LIBSQL_STORE_GET_SCORES_BY_SCORER_ID_FAILED",
|
|
2548
|
+
domain: error.ErrorDomain.STORAGE,
|
|
2549
|
+
category: error.ErrorCategory.THIRD_PARTY
|
|
2550
|
+
},
|
|
2551
|
+
error$1
|
|
2552
|
+
);
|
|
2553
|
+
}
|
|
2554
|
+
}
|
|
2555
|
+
transformScoreRow(row) {
|
|
2556
|
+
const scorerValue = storage.safelyParseJSON(row.scorer);
|
|
2557
|
+
const inputValue = storage.safelyParseJSON(row.input ?? "{}");
|
|
2558
|
+
const outputValue = storage.safelyParseJSON(row.output ?? "{}");
|
|
2559
|
+
const additionalLLMContextValue = row.additionalLLMContext ? storage.safelyParseJSON(row.additionalLLMContext) : null;
|
|
2560
|
+
const requestContextValue = row.requestContext ? storage.safelyParseJSON(row.requestContext) : null;
|
|
2561
|
+
const metadataValue = row.metadata ? storage.safelyParseJSON(row.metadata) : null;
|
|
2562
|
+
const entityValue = row.entity ? storage.safelyParseJSON(row.entity) : null;
|
|
2563
|
+
const preprocessStepResultValue = row.preprocessStepResult ? storage.safelyParseJSON(row.preprocessStepResult) : null;
|
|
2564
|
+
const analyzeStepResultValue = row.analyzeStepResult ? storage.safelyParseJSON(row.analyzeStepResult) : null;
|
|
2565
|
+
return {
|
|
2566
|
+
id: row.id,
|
|
2567
|
+
traceId: row.traceId,
|
|
2568
|
+
spanId: row.spanId,
|
|
2569
|
+
runId: row.runId,
|
|
2570
|
+
scorer: scorerValue,
|
|
2571
|
+
score: row.score,
|
|
2572
|
+
reason: row.reason,
|
|
2573
|
+
preprocessStepResult: preprocessStepResultValue,
|
|
2574
|
+
analyzeStepResult: analyzeStepResultValue,
|
|
2575
|
+
analyzePrompt: row.analyzePrompt,
|
|
2576
|
+
preprocessPrompt: row.preprocessPrompt,
|
|
2577
|
+
generateScorePrompt: row.generateScorePrompt,
|
|
2578
|
+
generateReasonPrompt: row.generateReasonPrompt,
|
|
2579
|
+
metadata: metadataValue,
|
|
2580
|
+
input: inputValue,
|
|
2581
|
+
output: outputValue,
|
|
2582
|
+
additionalContext: additionalLLMContextValue,
|
|
2583
|
+
requestContext: requestContextValue,
|
|
2584
|
+
entityType: row.entityType,
|
|
2585
|
+
entity: entityValue,
|
|
2586
|
+
entityId: row.entityId,
|
|
2587
|
+
scorerId: row.scorerId,
|
|
2588
|
+
source: row.source,
|
|
2589
|
+
resourceId: row.resourceId,
|
|
2590
|
+
threadId: row.threadId,
|
|
2591
|
+
createdAt: row.createdAt,
|
|
2592
|
+
updatedAt: row.updatedAt
|
|
2593
|
+
};
|
|
2594
|
+
}
|
|
2595
|
+
async getScoreById({ id }) {
|
|
2596
|
+
const result = await this.client.execute({
|
|
2597
|
+
sql: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE id = ?`,
|
|
2598
|
+
args: [id]
|
|
2599
|
+
});
|
|
2600
|
+
return result.rows?.[0] ? this.transformScoreRow(result.rows[0]) : null;
|
|
2601
|
+
}
|
|
2602
|
+
async saveScore(score) {
|
|
2603
|
+
let parsedScore;
|
|
2604
|
+
try {
|
|
2605
|
+
parsedScore = evals.saveScorePayloadSchema.parse(score);
|
|
2606
|
+
} catch (error$1) {
|
|
2607
|
+
throw new error.MastraError(
|
|
2608
|
+
{
|
|
2609
|
+
id: "LIBSQL_STORE_SAVE_SCORE_FAILED_INVALID_SCORE_PAYLOAD",
|
|
2610
|
+
domain: error.ErrorDomain.STORAGE,
|
|
2611
|
+
category: error.ErrorCategory.USER,
|
|
2612
|
+
details: {
|
|
2613
|
+
scorer: score.scorer.id,
|
|
2614
|
+
entityId: score.entityId,
|
|
2615
|
+
entityType: score.entityType,
|
|
2616
|
+
traceId: score.traceId || "",
|
|
2617
|
+
spanId: score.spanId || ""
|
|
2618
|
+
}
|
|
2619
|
+
},
|
|
2620
|
+
error$1
|
|
2621
|
+
);
|
|
2622
|
+
}
|
|
2623
|
+
try {
|
|
2624
|
+
const id = crypto.randomUUID();
|
|
2625
|
+
await this.operations.insert({
|
|
2626
|
+
tableName: storage.TABLE_SCORERS,
|
|
2627
|
+
record: {
|
|
2628
|
+
id,
|
|
2629
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
2630
|
+
updatedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
2631
|
+
...parsedScore
|
|
2632
|
+
}
|
|
2633
|
+
});
|
|
2634
|
+
const scoreFromDb = await this.getScoreById({ id });
|
|
2635
|
+
return { score: scoreFromDb };
|
|
2636
|
+
} catch (error$1) {
|
|
2637
|
+
throw new error.MastraError(
|
|
2638
|
+
{
|
|
2639
|
+
id: "LIBSQL_STORE_SAVE_SCORE_FAILED",
|
|
2640
|
+
domain: error.ErrorDomain.STORAGE,
|
|
2641
|
+
category: error.ErrorCategory.THIRD_PARTY
|
|
2642
|
+
},
|
|
2643
|
+
error$1
|
|
2644
|
+
);
|
|
2645
|
+
}
|
|
2646
|
+
}
|
|
2647
|
+
async listScoresByEntityId({
|
|
2648
|
+
entityId,
|
|
2649
|
+
entityType,
|
|
2650
|
+
pagination
|
|
2651
|
+
}) {
|
|
2652
|
+
try {
|
|
2653
|
+
const { page, perPage: perPageInput } = pagination;
|
|
2654
|
+
const countResult = await this.client.execute({
|
|
2655
|
+
sql: `SELECT COUNT(*) as count FROM ${storage.TABLE_SCORERS} WHERE entityId = ? AND entityType = ?`,
|
|
2656
|
+
args: [entityId, entityType]
|
|
2657
|
+
});
|
|
2658
|
+
const total = Number(countResult.rows?.[0]?.count ?? 0);
|
|
2659
|
+
if (total === 0) {
|
|
2660
|
+
return {
|
|
2661
|
+
pagination: {
|
|
2662
|
+
total: 0,
|
|
2663
|
+
page,
|
|
2664
|
+
perPage: perPageInput,
|
|
2665
|
+
hasMore: false
|
|
2666
|
+
},
|
|
2667
|
+
scores: []
|
|
2668
|
+
};
|
|
2669
|
+
}
|
|
2670
|
+
const perPage = storage.normalizePerPage(perPageInput, 100);
|
|
2671
|
+
const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
|
|
2672
|
+
const limitValue = perPageInput === false ? total : perPage;
|
|
2673
|
+
const end = perPageInput === false ? total : start + perPage;
|
|
2674
|
+
const result = await this.client.execute({
|
|
2675
|
+
sql: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE entityId = ? AND entityType = ? ORDER BY createdAt DESC LIMIT ? OFFSET ?`,
|
|
2676
|
+
args: [entityId, entityType, limitValue, start]
|
|
2677
|
+
});
|
|
2678
|
+
const scores = result.rows?.map((row) => this.transformScoreRow(row)) ?? [];
|
|
2679
|
+
return {
|
|
2680
|
+
scores,
|
|
2681
|
+
pagination: {
|
|
2682
|
+
total,
|
|
2683
|
+
page,
|
|
2684
|
+
perPage: perPageForResponse,
|
|
2685
|
+
hasMore: end < total
|
|
2686
|
+
}
|
|
2687
|
+
};
|
|
2688
|
+
} catch (error$1) {
|
|
2689
|
+
throw new error.MastraError(
|
|
2690
|
+
{
|
|
2691
|
+
id: "LIBSQL_STORE_GET_SCORES_BY_ENTITY_ID_FAILED",
|
|
2692
|
+
domain: error.ErrorDomain.STORAGE,
|
|
2693
|
+
category: error.ErrorCategory.THIRD_PARTY
|
|
2694
|
+
},
|
|
2695
|
+
error$1
|
|
2696
|
+
);
|
|
2697
|
+
}
|
|
2698
|
+
}
|
|
2699
|
+
async listScoresBySpan({
|
|
2700
|
+
traceId,
|
|
2701
|
+
spanId,
|
|
2702
|
+
pagination
|
|
2703
|
+
}) {
|
|
2704
|
+
try {
|
|
2705
|
+
const { page, perPage: perPageInput } = pagination;
|
|
2706
|
+
const perPage = storage.normalizePerPage(perPageInput, 100);
|
|
2707
|
+
const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
|
|
2708
|
+
const countSQLResult = await this.client.execute({
|
|
2709
|
+
sql: `SELECT COUNT(*) as count FROM ${storage.TABLE_SCORERS} WHERE traceId = ? AND spanId = ?`,
|
|
2710
|
+
args: [traceId, spanId]
|
|
2711
|
+
});
|
|
2712
|
+
const total = Number(countSQLResult.rows?.[0]?.count ?? 0);
|
|
2713
|
+
const limitValue = perPageInput === false ? total : perPage;
|
|
2714
|
+
const end = perPageInput === false ? total : start + perPage;
|
|
2715
|
+
const result = await this.client.execute({
|
|
2716
|
+
sql: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE traceId = ? AND spanId = ? ORDER BY createdAt DESC LIMIT ? OFFSET ?`,
|
|
2717
|
+
args: [traceId, spanId, limitValue, start]
|
|
2718
|
+
});
|
|
2719
|
+
const scores = result.rows?.map((row) => this.transformScoreRow(row)) ?? [];
|
|
2720
|
+
return {
|
|
2721
|
+
scores,
|
|
2722
|
+
pagination: {
|
|
2723
|
+
total,
|
|
2724
|
+
page,
|
|
2725
|
+
perPage: perPageForResponse,
|
|
2726
|
+
hasMore: end < total
|
|
2727
|
+
}
|
|
2728
|
+
};
|
|
2729
|
+
} catch (error$1) {
|
|
2730
|
+
throw new error.MastraError(
|
|
2731
|
+
{
|
|
2732
|
+
id: "LIBSQL_STORE_GET_SCORES_BY_SPAN_FAILED",
|
|
2733
|
+
domain: error.ErrorDomain.STORAGE,
|
|
2734
|
+
category: error.ErrorCategory.THIRD_PARTY
|
|
2735
|
+
},
|
|
2736
|
+
error$1
|
|
2737
|
+
);
|
|
2738
|
+
}
|
|
2739
|
+
}
|
|
2740
|
+
};
|
|
2741
|
+
function parseWorkflowRun(row) {
|
|
2742
|
+
let parsedSnapshot = row.snapshot;
|
|
2743
|
+
if (typeof parsedSnapshot === "string") {
|
|
2744
|
+
try {
|
|
2745
|
+
parsedSnapshot = JSON.parse(row.snapshot);
|
|
2746
|
+
} catch (e) {
|
|
2747
|
+
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
2748
|
+
}
|
|
2749
|
+
}
|
|
2750
|
+
return {
|
|
2751
|
+
workflowName: row.workflow_name,
|
|
2752
|
+
runId: row.run_id,
|
|
2753
|
+
snapshot: parsedSnapshot,
|
|
2754
|
+
resourceId: row.resourceId,
|
|
2755
|
+
createdAt: new Date(row.createdAt),
|
|
2756
|
+
updatedAt: new Date(row.updatedAt)
|
|
2757
|
+
};
|
|
2758
|
+
}
|
|
2759
|
+
var WorkflowsLibSQL = class extends storage.WorkflowsStorage {
|
|
2760
|
+
operations;
|
|
2761
|
+
client;
|
|
2762
|
+
maxRetries;
|
|
2763
|
+
initialBackoffMs;
|
|
2764
|
+
constructor({
|
|
2765
|
+
operations,
|
|
2766
|
+
client,
|
|
2767
|
+
maxRetries = 5,
|
|
2768
|
+
initialBackoffMs = 500
|
|
2769
|
+
}) {
|
|
2770
|
+
super();
|
|
2771
|
+
this.operations = operations;
|
|
2772
|
+
this.client = client;
|
|
2773
|
+
this.maxRetries = maxRetries;
|
|
2774
|
+
this.initialBackoffMs = initialBackoffMs;
|
|
2775
|
+
this.setupPragmaSettings().catch(
|
|
2776
|
+
(err) => this.logger.warn("LibSQL Workflows: Failed to setup PRAGMA settings.", err)
|
|
2777
|
+
);
|
|
2778
|
+
}
|
|
2779
|
+
async setupPragmaSettings() {
|
|
2780
|
+
try {
|
|
2781
|
+
await this.client.execute("PRAGMA busy_timeout = 10000;");
|
|
2782
|
+
this.logger.debug("LibSQL Workflows: PRAGMA busy_timeout=10000 set.");
|
|
2783
|
+
try {
|
|
2784
|
+
await this.client.execute("PRAGMA journal_mode = WAL;");
|
|
2785
|
+
this.logger.debug("LibSQL Workflows: PRAGMA journal_mode=WAL set.");
|
|
2786
|
+
} catch {
|
|
2787
|
+
this.logger.debug("LibSQL Workflows: WAL mode not supported, using default journal mode.");
|
|
2788
|
+
}
|
|
2789
|
+
try {
|
|
2790
|
+
await this.client.execute("PRAGMA synchronous = NORMAL;");
|
|
2791
|
+
this.logger.debug("LibSQL Workflows: PRAGMA synchronous=NORMAL set.");
|
|
2792
|
+
} catch {
|
|
2793
|
+
this.logger.debug("LibSQL Workflows: Failed to set synchronous mode.");
|
|
2794
|
+
}
|
|
2795
|
+
} catch (err) {
|
|
2796
|
+
this.logger.warn("LibSQL Workflows: Failed to set PRAGMA settings.", err);
|
|
2797
|
+
}
|
|
2798
|
+
}
|
|
2799
|
+
async executeWithRetry(operation) {
|
|
2800
|
+
let attempts = 0;
|
|
2801
|
+
let backoff = this.initialBackoffMs;
|
|
2802
|
+
while (attempts < this.maxRetries) {
|
|
2803
|
+
try {
|
|
2804
|
+
return await operation();
|
|
2805
|
+
} catch (error) {
|
|
2806
|
+
this.logger.debug("LibSQL Workflows: Error caught in retry loop", {
|
|
2807
|
+
errorType: error.constructor.name,
|
|
2808
|
+
errorCode: error.code,
|
|
2809
|
+
errorMessage: error.message,
|
|
2810
|
+
attempts,
|
|
2811
|
+
maxRetries: this.maxRetries
|
|
2812
|
+
});
|
|
2813
|
+
const isLockError = error.code === "SQLITE_BUSY" || error.code === "SQLITE_LOCKED" || error.message?.toLowerCase().includes("database is locked") || error.message?.toLowerCase().includes("database table is locked") || error.message?.toLowerCase().includes("table is locked") || error.constructor.name === "SqliteError" && error.message?.toLowerCase().includes("locked");
|
|
2814
|
+
if (isLockError) {
|
|
2815
|
+
attempts++;
|
|
2816
|
+
if (attempts >= this.maxRetries) {
|
|
2817
|
+
this.logger.error(
|
|
2818
|
+
`LibSQL Workflows: Operation failed after ${this.maxRetries} attempts due to database lock: ${error.message}`,
|
|
2819
|
+
{ error, attempts, maxRetries: this.maxRetries }
|
|
2820
|
+
);
|
|
2821
|
+
throw error;
|
|
2822
|
+
}
|
|
2823
|
+
this.logger.warn(
|
|
2824
|
+
`LibSQL Workflows: Attempt ${attempts} failed due to database lock. Retrying in ${backoff}ms...`,
|
|
2825
|
+
{ errorMessage: error.message, attempts, backoff, maxRetries: this.maxRetries }
|
|
2826
|
+
);
|
|
2827
|
+
await new Promise((resolve) => setTimeout(resolve, backoff));
|
|
2828
|
+
backoff *= 2;
|
|
2829
|
+
} else {
|
|
2830
|
+
this.logger.error("LibSQL Workflows: Non-lock error occurred, not retrying", { error });
|
|
2831
|
+
throw error;
|
|
2832
|
+
}
|
|
2833
|
+
}
|
|
2834
|
+
}
|
|
2835
|
+
throw new Error("LibSQL Workflows: Max retries reached, but no error was re-thrown from the loop.");
|
|
2836
|
+
}
|
|
2837
|
+
async updateWorkflowResults({
|
|
2838
|
+
workflowName,
|
|
2839
|
+
runId,
|
|
2840
|
+
stepId,
|
|
2841
|
+
result,
|
|
2842
|
+
requestContext
|
|
2843
|
+
}) {
|
|
2844
|
+
return this.executeWithRetry(async () => {
|
|
2845
|
+
const tx = await this.client.transaction("write");
|
|
2846
|
+
try {
|
|
2847
|
+
const existingSnapshotResult = await tx.execute({
|
|
2848
|
+
sql: `SELECT snapshot FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} WHERE workflow_name = ? AND run_id = ?`,
|
|
2849
|
+
args: [workflowName, runId]
|
|
2850
|
+
});
|
|
2851
|
+
let snapshot;
|
|
2852
|
+
if (!existingSnapshotResult.rows?.[0]) {
|
|
2853
|
+
snapshot = {
|
|
2854
|
+
context: {},
|
|
2855
|
+
activePaths: [],
|
|
2856
|
+
timestamp: Date.now(),
|
|
2857
|
+
suspendedPaths: {},
|
|
2858
|
+
resumeLabels: {},
|
|
2859
|
+
serializedStepGraph: [],
|
|
2860
|
+
value: {},
|
|
2861
|
+
waitingPaths: {},
|
|
2862
|
+
status: "pending",
|
|
2863
|
+
runId,
|
|
2864
|
+
requestContext: {}
|
|
2865
|
+
};
|
|
2866
|
+
} else {
|
|
2867
|
+
const existingSnapshot = existingSnapshotResult.rows[0].snapshot;
|
|
2868
|
+
snapshot = typeof existingSnapshot === "string" ? JSON.parse(existingSnapshot) : existingSnapshot;
|
|
2869
|
+
}
|
|
2870
|
+
snapshot.context[stepId] = result;
|
|
2871
|
+
snapshot.requestContext = { ...snapshot.requestContext, ...requestContext };
|
|
2872
|
+
await tx.execute({
|
|
2873
|
+
sql: `UPDATE ${storage.TABLE_WORKFLOW_SNAPSHOT} SET snapshot = ? WHERE workflow_name = ? AND run_id = ?`,
|
|
2874
|
+
args: [JSON.stringify(snapshot), workflowName, runId]
|
|
2875
|
+
});
|
|
2876
|
+
await tx.commit();
|
|
2877
|
+
return snapshot.context;
|
|
2878
|
+
} catch (error) {
|
|
2879
|
+
if (!tx.closed) {
|
|
2880
|
+
await tx.rollback();
|
|
2881
|
+
}
|
|
2882
|
+
throw error;
|
|
2883
|
+
}
|
|
2884
|
+
});
|
|
2885
|
+
}
|
|
2886
|
+
async updateWorkflowState({
|
|
2887
|
+
workflowName,
|
|
2888
|
+
runId,
|
|
2889
|
+
opts
|
|
2890
|
+
}) {
|
|
2891
|
+
return this.executeWithRetry(async () => {
|
|
2892
|
+
const tx = await this.client.transaction("write");
|
|
2893
|
+
try {
|
|
2894
|
+
const existingSnapshotResult = await tx.execute({
|
|
2895
|
+
sql: `SELECT snapshot FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} WHERE workflow_name = ? AND run_id = ?`,
|
|
2896
|
+
args: [workflowName, runId]
|
|
2897
|
+
});
|
|
2898
|
+
if (!existingSnapshotResult.rows?.[0]) {
|
|
2899
|
+
await tx.rollback();
|
|
2900
|
+
return void 0;
|
|
2901
|
+
}
|
|
2902
|
+
const existingSnapshot = existingSnapshotResult.rows[0].snapshot;
|
|
2903
|
+
const snapshot = typeof existingSnapshot === "string" ? JSON.parse(existingSnapshot) : existingSnapshot;
|
|
2904
|
+
if (!snapshot || !snapshot?.context) {
|
|
2905
|
+
await tx.rollback();
|
|
2906
|
+
throw new Error(`Snapshot not found for runId ${runId}`);
|
|
2907
|
+
}
|
|
2908
|
+
const updatedSnapshot = { ...snapshot, ...opts };
|
|
2909
|
+
await tx.execute({
|
|
2910
|
+
sql: `UPDATE ${storage.TABLE_WORKFLOW_SNAPSHOT} SET snapshot = ? WHERE workflow_name = ? AND run_id = ?`,
|
|
2911
|
+
args: [JSON.stringify(updatedSnapshot), workflowName, runId]
|
|
2912
|
+
});
|
|
2913
|
+
await tx.commit();
|
|
2914
|
+
return updatedSnapshot;
|
|
2915
|
+
} catch (error) {
|
|
2916
|
+
if (!tx.closed) {
|
|
2917
|
+
await tx.rollback();
|
|
2918
|
+
}
|
|
2919
|
+
throw error;
|
|
2920
|
+
}
|
|
2921
|
+
});
|
|
2922
|
+
}
|
|
2923
|
+
async persistWorkflowSnapshot({
|
|
2924
|
+
workflowName,
|
|
2925
|
+
runId,
|
|
2926
|
+
resourceId,
|
|
2927
|
+
snapshot
|
|
2928
|
+
}) {
|
|
2929
|
+
const data = {
|
|
2930
|
+
workflow_name: workflowName,
|
|
2931
|
+
run_id: runId,
|
|
2932
|
+
resourceId,
|
|
2933
|
+
snapshot,
|
|
2934
|
+
createdAt: /* @__PURE__ */ new Date(),
|
|
2935
|
+
updatedAt: /* @__PURE__ */ new Date()
|
|
2936
|
+
};
|
|
2937
|
+
this.logger.debug("Persisting workflow snapshot", { workflowName, runId, data });
|
|
2938
|
+
await this.operations.insert({
|
|
2939
|
+
tableName: storage.TABLE_WORKFLOW_SNAPSHOT,
|
|
2940
|
+
record: data
|
|
2941
|
+
});
|
|
2942
|
+
}
|
|
2943
|
+
async loadWorkflowSnapshot({
|
|
2944
|
+
workflowName,
|
|
2945
|
+
runId
|
|
2946
|
+
}) {
|
|
2947
|
+
this.logger.debug("Loading workflow snapshot", { workflowName, runId });
|
|
2948
|
+
const d = await this.operations.load({
|
|
2949
|
+
tableName: storage.TABLE_WORKFLOW_SNAPSHOT,
|
|
2950
|
+
keys: { workflow_name: workflowName, run_id: runId }
|
|
2951
|
+
});
|
|
2952
|
+
return d ? d.snapshot : null;
|
|
2953
|
+
}
|
|
2954
|
+
async getWorkflowRunById({
|
|
2955
|
+
runId,
|
|
2956
|
+
workflowName
|
|
2957
|
+
}) {
|
|
2958
|
+
const conditions = [];
|
|
2959
|
+
const args = [];
|
|
2960
|
+
if (runId) {
|
|
2961
|
+
conditions.push("run_id = ?");
|
|
2962
|
+
args.push(runId);
|
|
2963
|
+
}
|
|
2964
|
+
if (workflowName) {
|
|
2965
|
+
conditions.push("workflow_name = ?");
|
|
2966
|
+
args.push(workflowName);
|
|
2967
|
+
}
|
|
2968
|
+
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
2969
|
+
try {
|
|
2970
|
+
const result = await this.client.execute({
|
|
2971
|
+
sql: `SELECT * FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${whereClause} ORDER BY createdAt DESC LIMIT 1`,
|
|
2972
|
+
args
|
|
2973
|
+
});
|
|
2974
|
+
if (!result.rows?.[0]) {
|
|
2975
|
+
return null;
|
|
2976
|
+
}
|
|
2977
|
+
return parseWorkflowRun(result.rows[0]);
|
|
2978
|
+
} catch (error$1) {
|
|
2979
|
+
throw new error.MastraError(
|
|
2980
|
+
{
|
|
2981
|
+
id: "LIBSQL_STORE_GET_WORKFLOW_RUN_BY_ID_FAILED",
|
|
2982
|
+
domain: error.ErrorDomain.STORAGE,
|
|
2983
|
+
category: error.ErrorCategory.THIRD_PARTY
|
|
2984
|
+
},
|
|
2985
|
+
error$1
|
|
2986
|
+
);
|
|
2987
|
+
}
|
|
2988
|
+
}
|
|
2989
|
+
async listWorkflowRuns({
|
|
2990
|
+
workflowName,
|
|
2991
|
+
fromDate,
|
|
2992
|
+
toDate,
|
|
2993
|
+
page,
|
|
2994
|
+
perPage,
|
|
2995
|
+
resourceId
|
|
2996
|
+
} = {}) {
|
|
2997
|
+
try {
|
|
2998
|
+
const conditions = [];
|
|
2999
|
+
const args = [];
|
|
3000
|
+
if (workflowName) {
|
|
3001
|
+
conditions.push("workflow_name = ?");
|
|
3002
|
+
args.push(workflowName);
|
|
3003
|
+
}
|
|
3004
|
+
if (fromDate) {
|
|
3005
|
+
conditions.push("createdAt >= ?");
|
|
3006
|
+
args.push(fromDate.toISOString());
|
|
3007
|
+
}
|
|
3008
|
+
if (toDate) {
|
|
3009
|
+
conditions.push("createdAt <= ?");
|
|
3010
|
+
args.push(toDate.toISOString());
|
|
3011
|
+
}
|
|
3012
|
+
if (resourceId) {
|
|
3013
|
+
const hasResourceId = await this.operations.hasColumn(storage.TABLE_WORKFLOW_SNAPSHOT, "resourceId");
|
|
3014
|
+
if (hasResourceId) {
|
|
3015
|
+
conditions.push("resourceId = ?");
|
|
3016
|
+
args.push(resourceId);
|
|
3017
|
+
} else {
|
|
3018
|
+
console.warn(`[${storage.TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
|
|
3019
|
+
}
|
|
3020
|
+
}
|
|
3021
|
+
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
3022
|
+
let total = 0;
|
|
3023
|
+
const usePagination = typeof perPage === "number" && typeof page === "number";
|
|
3024
|
+
if (usePagination) {
|
|
3025
|
+
const countResult = await this.client.execute({
|
|
3026
|
+
sql: `SELECT COUNT(*) as count FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${whereClause}`,
|
|
3027
|
+
args
|
|
3028
|
+
});
|
|
3029
|
+
total = Number(countResult.rows?.[0]?.count ?? 0);
|
|
3030
|
+
}
|
|
3031
|
+
const normalizedPerPage = usePagination ? storage.normalizePerPage(perPage, Number.MAX_SAFE_INTEGER) : 0;
|
|
3032
|
+
const offset = usePagination ? page * normalizedPerPage : 0;
|
|
3033
|
+
const result = await this.client.execute({
|
|
3034
|
+
sql: `SELECT * FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${whereClause} ORDER BY createdAt DESC${usePagination ? ` LIMIT ? OFFSET ?` : ""}`,
|
|
3035
|
+
args: usePagination ? [...args, normalizedPerPage, offset] : args
|
|
3036
|
+
});
|
|
3037
|
+
const runs = (result.rows || []).map((row) => parseWorkflowRun(row));
|
|
3038
|
+
return { runs, total: total || runs.length };
|
|
3039
|
+
} catch (error$1) {
|
|
3040
|
+
throw new error.MastraError(
|
|
3041
|
+
{
|
|
3042
|
+
id: "LIBSQL_STORE_LIST_WORKFLOW_RUNS_FAILED",
|
|
3043
|
+
domain: error.ErrorDomain.STORAGE,
|
|
3044
|
+
category: error.ErrorCategory.THIRD_PARTY
|
|
3045
|
+
},
|
|
3046
|
+
error$1
|
|
3047
|
+
);
|
|
3048
|
+
}
|
|
3049
|
+
}
|
|
3050
|
+
};
|
|
3051
|
+
|
|
3052
|
+
// src/storage/index.ts
|
|
3053
|
+
var LibSQLStore = class extends storage.MastraStorage {
|
|
3054
|
+
client;
|
|
3055
|
+
maxRetries;
|
|
3056
|
+
initialBackoffMs;
|
|
3057
|
+
stores;
|
|
3058
|
+
constructor(config) {
|
|
3059
|
+
if (!config.id || typeof config.id !== "string" || config.id.trim() === "") {
|
|
3060
|
+
throw new Error("LibSQLStore: id must be provided and cannot be empty.");
|
|
3061
|
+
}
|
|
3062
|
+
super({ id: config.id, name: `LibSQLStore` });
|
|
3063
|
+
this.maxRetries = config.maxRetries ?? 5;
|
|
3064
|
+
this.initialBackoffMs = config.initialBackoffMs ?? 100;
|
|
3065
|
+
if ("url" in config) {
|
|
3066
|
+
if (config.url.endsWith(":memory:")) {
|
|
3067
|
+
this.shouldCacheInit = false;
|
|
3068
|
+
}
|
|
3069
|
+
this.client = client.createClient({
|
|
3070
|
+
url: config.url,
|
|
3071
|
+
...config.authToken ? { authToken: config.authToken } : {}
|
|
3072
|
+
});
|
|
3073
|
+
if (config.url.startsWith("file:") || config.url.includes(":memory:")) {
|
|
3074
|
+
this.client.execute("PRAGMA journal_mode=WAL;").then(() => this.logger.debug("LibSQLStore: PRAGMA journal_mode=WAL set.")).catch((err) => this.logger.warn("LibSQLStore: Failed to set PRAGMA journal_mode=WAL.", err));
|
|
3075
|
+
this.client.execute("PRAGMA busy_timeout = 5000;").then(() => this.logger.debug("LibSQLStore: PRAGMA busy_timeout=5000 set.")).catch((err) => this.logger.warn("LibSQLStore: Failed to set PRAGMA busy_timeout.", err));
|
|
3076
|
+
}
|
|
3077
|
+
} else {
|
|
3078
|
+
this.client = config.client;
|
|
3079
|
+
}
|
|
3080
|
+
const operations = new StoreOperationsLibSQL({
|
|
3081
|
+
client: this.client,
|
|
3082
|
+
maxRetries: this.maxRetries,
|
|
3083
|
+
initialBackoffMs: this.initialBackoffMs
|
|
3084
|
+
});
|
|
3085
|
+
const scores = new ScoresLibSQL({ client: this.client, operations });
|
|
3086
|
+
const workflows = new WorkflowsLibSQL({ client: this.client, operations });
|
|
3087
|
+
const memory = new MemoryLibSQL({ client: this.client, operations });
|
|
3088
|
+
const observability = new ObservabilityLibSQL({ operations });
|
|
3089
|
+
this.stores = {
|
|
3090
|
+
operations,
|
|
3091
|
+
scores,
|
|
3092
|
+
workflows,
|
|
3093
|
+
memory,
|
|
3094
|
+
observability
|
|
3095
|
+
};
|
|
3096
|
+
}
|
|
3097
|
+
get supports() {
|
|
3098
|
+
return {
|
|
3099
|
+
selectByIncludeResourceScope: true,
|
|
3100
|
+
resourceWorkingMemory: true,
|
|
3101
|
+
hasColumn: true,
|
|
3102
|
+
createTable: true,
|
|
3103
|
+
deleteMessages: true,
|
|
3104
|
+
observabilityInstance: true,
|
|
3105
|
+
listScoresBySpan: true
|
|
3106
|
+
};
|
|
3107
|
+
}
|
|
3108
|
+
async createTable({
|
|
3109
|
+
tableName,
|
|
3110
|
+
schema
|
|
3111
|
+
}) {
|
|
3112
|
+
await this.stores.operations.createTable({ tableName, schema });
|
|
3113
|
+
}
|
|
3114
|
+
/**
|
|
3115
|
+
* Alters table schema to add columns if they don't exist
|
|
3116
|
+
* @param tableName Name of the table
|
|
3117
|
+
* @param schema Schema of the table
|
|
3118
|
+
* @param ifNotExists Array of column names to add if they don't exist
|
|
3119
|
+
*/
|
|
3120
|
+
async alterTable({
|
|
3121
|
+
tableName,
|
|
3122
|
+
schema,
|
|
3123
|
+
ifNotExists
|
|
3124
|
+
}) {
|
|
3125
|
+
await this.stores.operations.alterTable({ tableName, schema, ifNotExists });
|
|
3126
|
+
}
|
|
3127
|
+
async clearTable({ tableName }) {
|
|
3128
|
+
await this.stores.operations.clearTable({ tableName });
|
|
3129
|
+
}
|
|
3130
|
+
async dropTable({ tableName }) {
|
|
3131
|
+
await this.stores.operations.dropTable({ tableName });
|
|
3132
|
+
}
|
|
3133
|
+
insert(args) {
|
|
3134
|
+
return this.stores.operations.insert(args);
|
|
3135
|
+
}
|
|
3136
|
+
batchInsert(args) {
|
|
3137
|
+
return this.stores.operations.batchInsert(args);
|
|
3138
|
+
}
|
|
3139
|
+
async load({ tableName, keys }) {
|
|
3140
|
+
return this.stores.operations.load({ tableName, keys });
|
|
3141
|
+
}
|
|
3142
|
+
async getThreadById({ threadId }) {
|
|
3143
|
+
return this.stores.memory.getThreadById({ threadId });
|
|
3144
|
+
}
|
|
3145
|
+
async saveThread({ thread }) {
|
|
3146
|
+
return this.stores.memory.saveThread({ thread });
|
|
3147
|
+
}
|
|
3148
|
+
async updateThread({
|
|
3149
|
+
id,
|
|
3150
|
+
title,
|
|
3151
|
+
metadata
|
|
3152
|
+
}) {
|
|
3153
|
+
return this.stores.memory.updateThread({ id, title, metadata });
|
|
3154
|
+
}
|
|
3155
|
+
async deleteThread({ threadId }) {
|
|
3156
|
+
return this.stores.memory.deleteThread({ threadId });
|
|
3157
|
+
}
|
|
3158
|
+
async listMessagesById({ messageIds }) {
|
|
3159
|
+
return this.stores.memory.listMessagesById({ messageIds });
|
|
3160
|
+
}
|
|
3161
|
+
async saveMessages(args) {
|
|
3162
|
+
const result = await this.stores.memory.saveMessages({ messages: args.messages });
|
|
3163
|
+
return { messages: result.messages };
|
|
3164
|
+
}
|
|
3165
|
+
async updateMessages({
|
|
3166
|
+
messages
|
|
3167
|
+
}) {
|
|
3168
|
+
return this.stores.memory.updateMessages({ messages });
|
|
3169
|
+
}
|
|
3170
|
+
async deleteMessages(messageIds) {
|
|
3171
|
+
return this.stores.memory.deleteMessages(messageIds);
|
|
3172
|
+
}
|
|
3173
|
+
async getScoreById({ id }) {
|
|
3174
|
+
return this.stores.scores.getScoreById({ id });
|
|
3175
|
+
}
|
|
3176
|
+
async saveScore(score) {
|
|
3177
|
+
return this.stores.scores.saveScore(score);
|
|
3178
|
+
}
|
|
3179
|
+
async listScoresByScorerId({
|
|
3180
|
+
scorerId,
|
|
3181
|
+
entityId,
|
|
3182
|
+
entityType,
|
|
3183
|
+
source,
|
|
3184
|
+
pagination
|
|
3185
|
+
}) {
|
|
3186
|
+
return this.stores.scores.listScoresByScorerId({ scorerId, entityId, entityType, source, pagination });
|
|
3187
|
+
}
|
|
3188
|
+
async listScoresByRunId({
|
|
3189
|
+
runId,
|
|
3190
|
+
pagination
|
|
3191
|
+
}) {
|
|
3192
|
+
return this.stores.scores.listScoresByRunId({ runId, pagination });
|
|
3193
|
+
}
|
|
3194
|
+
async listScoresByEntityId({
|
|
3195
|
+
entityId,
|
|
3196
|
+
entityType,
|
|
3197
|
+
pagination
|
|
3198
|
+
}) {
|
|
3199
|
+
return this.stores.scores.listScoresByEntityId({ entityId, entityType, pagination });
|
|
3200
|
+
}
|
|
3201
|
+
/**
|
|
3202
|
+
* WORKFLOWS
|
|
3203
|
+
*/
|
|
3204
|
+
async updateWorkflowResults({
|
|
3205
|
+
workflowName,
|
|
3206
|
+
runId,
|
|
3207
|
+
stepId,
|
|
3208
|
+
result,
|
|
3209
|
+
requestContext
|
|
3210
|
+
}) {
|
|
3211
|
+
return this.stores.workflows.updateWorkflowResults({ workflowName, runId, stepId, result, requestContext });
|
|
3212
|
+
}
|
|
3213
|
+
async updateWorkflowState({
|
|
3214
|
+
workflowName,
|
|
3215
|
+
runId,
|
|
3216
|
+
opts
|
|
3217
|
+
}) {
|
|
3218
|
+
return this.stores.workflows.updateWorkflowState({ workflowName, runId, opts });
|
|
3219
|
+
}
|
|
3220
|
+
async persistWorkflowSnapshot({
|
|
3221
|
+
workflowName,
|
|
3222
|
+
runId,
|
|
3223
|
+
resourceId,
|
|
3224
|
+
snapshot
|
|
3225
|
+
}) {
|
|
3226
|
+
return this.stores.workflows.persistWorkflowSnapshot({ workflowName, runId, resourceId, snapshot });
|
|
3227
|
+
}
|
|
3228
|
+
async loadWorkflowSnapshot({
|
|
3229
|
+
workflowName,
|
|
3230
|
+
runId
|
|
3231
|
+
}) {
|
|
3232
|
+
return this.stores.workflows.loadWorkflowSnapshot({ workflowName, runId });
|
|
3233
|
+
}
|
|
3234
|
+
async listWorkflowRuns({
|
|
3235
|
+
workflowName,
|
|
3236
|
+
fromDate,
|
|
3237
|
+
toDate,
|
|
3238
|
+
perPage,
|
|
3239
|
+
page,
|
|
3240
|
+
resourceId
|
|
3241
|
+
} = {}) {
|
|
3242
|
+
return this.stores.workflows.listWorkflowRuns({ workflowName, fromDate, toDate, perPage, page, resourceId });
|
|
3243
|
+
}
|
|
3244
|
+
async getWorkflowRunById({
|
|
3245
|
+
runId,
|
|
3246
|
+
workflowName
|
|
3247
|
+
}) {
|
|
3248
|
+
return this.stores.workflows.getWorkflowRunById({ runId, workflowName });
|
|
3249
|
+
}
|
|
3250
|
+
async getResourceById({ resourceId }) {
|
|
3251
|
+
return this.stores.memory.getResourceById({ resourceId });
|
|
3252
|
+
}
|
|
3253
|
+
async saveResource({ resource }) {
|
|
3254
|
+
return this.stores.memory.saveResource({ resource });
|
|
3255
|
+
}
|
|
3256
|
+
async updateResource({
|
|
3257
|
+
resourceId,
|
|
3258
|
+
workingMemory,
|
|
3259
|
+
metadata
|
|
3260
|
+
}) {
|
|
3261
|
+
return this.stores.memory.updateResource({ resourceId, workingMemory, metadata });
|
|
3262
|
+
}
|
|
3263
|
+
async createSpan(span) {
|
|
3264
|
+
return this.stores.observability.createSpan(span);
|
|
3265
|
+
}
|
|
3266
|
+
async updateSpan(params) {
|
|
3267
|
+
return this.stores.observability.updateSpan(params);
|
|
3268
|
+
}
|
|
3269
|
+
async getTrace(traceId) {
|
|
3270
|
+
return this.stores.observability.getTrace(traceId);
|
|
3271
|
+
}
|
|
3272
|
+
async getTracesPaginated(args) {
|
|
3273
|
+
return this.stores.observability.getTracesPaginated(args);
|
|
3274
|
+
}
|
|
3275
|
+
async listScoresBySpan({
|
|
3276
|
+
traceId,
|
|
3277
|
+
spanId,
|
|
3278
|
+
pagination
|
|
3279
|
+
}) {
|
|
3280
|
+
return this.stores.scores.listScoresBySpan({ traceId, spanId, pagination });
|
|
3281
|
+
}
|
|
3282
|
+
async batchCreateSpans(args) {
|
|
3283
|
+
return this.stores.observability.batchCreateSpans(args);
|
|
3284
|
+
}
|
|
3285
|
+
async batchUpdateSpans(args) {
|
|
3286
|
+
return this.stores.observability.batchUpdateSpans(args);
|
|
3287
|
+
}
|
|
3288
|
+
};
|
|
3289
|
+
|
|
3290
|
+
// src/vector/prompt.ts
|
|
3291
|
+
var LIBSQL_PROMPT = `When querying LibSQL Vector, you can ONLY use the operators listed below. Any other operators will be rejected.
|
|
3292
|
+
Important: Don't explain how to construct the filter - use the specified operators and fields to search the content and return relevant results.
|
|
3293
|
+
If a user tries to give an explicit operator that is not supported, reject the filter entirely and let them know that the operator is not supported.
|
|
3294
|
+
|
|
3295
|
+
Basic Comparison Operators:
|
|
3296
|
+
- $eq: Exact match (default when using field: value)
|
|
3297
|
+
Example: { "category": "electronics" }
|
|
3298
|
+
- $ne: Not equal
|
|
3299
|
+
Example: { "category": { "$ne": "electronics" } }
|
|
3300
|
+
- $gt: Greater than
|
|
3301
|
+
Example: { "price": { "$gt": 100 } }
|
|
3302
|
+
- $gte: Greater than or equal
|
|
3303
|
+
Example: { "price": { "$gte": 100 } }
|
|
3304
|
+
- $lt: Less than
|
|
3305
|
+
Example: { "price": { "$lt": 100 } }
|
|
3306
|
+
- $lte: Less than or equal
|
|
3307
|
+
Example: { "price": { "$lte": 100 } }
|
|
3308
|
+
|
|
3309
|
+
Array Operators:
|
|
3310
|
+
- $in: Match any value in array
|
|
3311
|
+
Example: { "category": { "$in": ["electronics", "books"] } }
|
|
3312
|
+
- $nin: Does not match any value in array
|
|
3313
|
+
Example: { "category": { "$nin": ["electronics", "books"] } }
|
|
3314
|
+
- $all: Match all values in array
|
|
3315
|
+
Example: { "tags": { "$all": ["premium", "sale"] } }
|
|
3316
|
+
- $elemMatch: Match array elements that meet all specified conditions
|
|
3317
|
+
Example: { "items": { "$elemMatch": { "price": { "$gt": 100 } } } }
|
|
3318
|
+
- $contains: Check if array contains value
|
|
3319
|
+
Example: { "tags": { "$contains": "premium" } }
|
|
3320
|
+
|
|
3321
|
+
Logical Operators:
|
|
3322
|
+
- $and: Logical AND (implicit when using multiple conditions)
|
|
3323
|
+
Example: { "$and": [{ "price": { "$gt": 100 } }, { "category": "electronics" }] }
|
|
3324
|
+
- $or: Logical OR
|
|
3325
|
+
Example: { "$or": [{ "price": { "$lt": 50 } }, { "category": "books" }] }
|
|
3326
|
+
- $not: Logical NOT
|
|
3327
|
+
Example: { "$not": { "category": "electronics" } }
|
|
3328
|
+
- $nor: Logical NOR
|
|
3329
|
+
Example: { "$nor": [{ "price": { "$lt": 50 } }, { "category": "books" }] }
|
|
3330
|
+
|
|
3331
|
+
Element Operators:
|
|
3332
|
+
- $exists: Check if field exists
|
|
3333
|
+
Example: { "rating": { "$exists": true } }
|
|
3334
|
+
|
|
3335
|
+
Special Operators:
|
|
3336
|
+
- $size: Array length check
|
|
3337
|
+
Example: { "tags": { "$size": 2 } }
|
|
3338
|
+
|
|
3339
|
+
Restrictions:
|
|
3340
|
+
- Regex patterns are not supported
|
|
3341
|
+
- Direct RegExp patterns will throw an error
|
|
3342
|
+
- Nested fields are supported using dot notation
|
|
3343
|
+
- Multiple conditions on the same field are supported with both implicit and explicit $and
|
|
3344
|
+
- Array operations work on array fields only
|
|
3345
|
+
- Basic operators handle array values as JSON strings
|
|
3346
|
+
- Empty arrays in conditions are handled gracefully
|
|
3347
|
+
- Only logical operators ($and, $or, $not, $nor) can be used at the top level
|
|
3348
|
+
- All other operators must be used within a field condition
|
|
3349
|
+
Valid: { "field": { "$gt": 100 } }
|
|
3350
|
+
Valid: { "$and": [...] }
|
|
3351
|
+
Invalid: { "$gt": 100 }
|
|
3352
|
+
Invalid: { "$contains": "value" }
|
|
3353
|
+
- Logical operators must contain field conditions, not direct operators
|
|
3354
|
+
Valid: { "$and": [{ "field": { "$gt": 100 } }] }
|
|
3355
|
+
Invalid: { "$and": [{ "$gt": 100 }] }
|
|
3356
|
+
- $not operator:
|
|
3357
|
+
- Must be an object
|
|
3358
|
+
- Cannot be empty
|
|
3359
|
+
- Can be used at field level or top level
|
|
3360
|
+
- Valid: { "$not": { "field": "value" } }
|
|
3361
|
+
- Valid: { "field": { "$not": { "$eq": "value" } } }
|
|
3362
|
+
- Other logical operators ($and, $or, $nor):
|
|
3363
|
+
- Can only be used at top level or nested within other logical operators
|
|
3364
|
+
- Can not be used on a field level, or be nested inside a field
|
|
3365
|
+
- Can not be used inside an operator
|
|
3366
|
+
- Valid: { "$and": [{ "field": { "$gt": 100 } }] }
|
|
3367
|
+
- Valid: { "$or": [{ "$and": [{ "field": { "$gt": 100 } }] }] }
|
|
3368
|
+
- Invalid: { "field": { "$and": [{ "$gt": 100 }] } }
|
|
3369
|
+
- Invalid: { "field": { "$or": [{ "$gt": 100 }] } }
|
|
3370
|
+
- Invalid: { "field": { "$gt": { "$and": [{...}] } } }
|
|
3371
|
+
- $elemMatch requires an object with conditions
|
|
3372
|
+
Valid: { "array": { "$elemMatch": { "field": "value" } } }
|
|
3373
|
+
Invalid: { "array": { "$elemMatch": "value" } }
|
|
3374
|
+
|
|
3375
|
+
Example Complex Query:
|
|
3376
|
+
{
|
|
3377
|
+
"$and": [
|
|
3378
|
+
{ "category": { "$in": ["electronics", "computers"] } },
|
|
3379
|
+
{ "price": { "$gte": 100, "$lte": 1000 } },
|
|
3380
|
+
{ "tags": { "$all": ["premium", "sale"] } },
|
|
3381
|
+
{ "items": { "$elemMatch": { "price": { "$gt": 50 }, "inStock": true } } },
|
|
3382
|
+
{ "$or": [
|
|
3383
|
+
{ "stock": { "$gt": 0 } },
|
|
3384
|
+
{ "preorder": true }
|
|
3385
|
+
]}
|
|
3386
|
+
]
|
|
3387
|
+
}`;
|
|
3388
|
+
|
|
3389
|
+
exports.DefaultStorage = LibSQLStore;
|
|
3390
|
+
exports.LIBSQL_PROMPT = LIBSQL_PROMPT;
|
|
3391
|
+
exports.LibSQLStore = LibSQLStore;
|
|
3392
|
+
exports.LibSQLVector = LibSQLVector;
|
|
3393
|
+
//# sourceMappingURL=index.cjs.map
|
|
3394
|
+
//# sourceMappingURL=index.cjs.map
|