@mastra/core 0.9.5-alpha.0 → 0.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -2
- package/dist/agent/index.cjs +2 -2
- package/dist/agent/index.d.cts +12 -13
- package/dist/agent/index.d.ts +12 -13
- package/dist/agent/index.js +1 -1
- package/dist/{base-CI7Uz2GO.d.ts → base-DttB7sJD.d.ts} +1172 -1130
- package/dist/{base-BV8SNIdm.d.cts → base-aJ3etsh5.d.cts} +1172 -1130
- package/dist/{base-H7siSAfu.d.cts → base-aPYtPBT2.d.cts} +3 -3
- package/dist/{base-4Z34GQL8.d.ts → base-tc5kgDTD.d.ts} +3 -3
- package/dist/base.cjs +2 -2
- package/dist/base.d.cts +2 -3
- package/dist/base.d.ts +2 -3
- package/dist/base.js +1 -1
- package/dist/bundler/index.cjs +2 -2
- package/dist/bundler/index.d.cts +2 -3
- package/dist/bundler/index.d.ts +2 -3
- package/dist/bundler/index.js +1 -1
- package/dist/{chunk-W4BSLENO.js → chunk-2NTUAUGH.js} +1 -1
- package/dist/{chunk-MF5BUJO6.js → chunk-2PW6UJMW.js} +57 -9
- package/dist/{chunk-HKCSQI7G.cjs → chunk-2U7ZZUFO.cjs} +2 -2
- package/dist/{workflows/vNext/index.js → chunk-3SQ3G2EJ.js} +16 -15
- package/dist/chunk-5HULBQ2W.cjs +38 -0
- package/dist/{chunk-BATBI3D4.cjs → chunk-5JRD3NDP.cjs} +15 -15
- package/dist/{chunk-EYQLTWRJ.cjs → chunk-5W2G7S44.cjs} +33 -58
- package/dist/chunk-75Z4BZEE.js +36 -0
- package/dist/{chunk-S3Y7QBO7.cjs → chunk-AOGUTJUO.cjs} +57 -50
- package/dist/{chunk-BZUIFK6C.js → chunk-BPTSLJHA.js} +9 -9
- package/dist/{chunk-JBJ7KXN5.js → chunk-C3ICEF6E.js} +1 -1
- package/dist/{chunk-E4FAXBUV.cjs → chunk-CWSFP2HS.cjs} +2 -2
- package/dist/{chunk-RWWUZTAK.js → chunk-E7CLY6SK.js} +41 -34
- package/dist/{chunk-235X76GC.js → chunk-FI7R232B.js} +2 -2
- package/dist/chunk-GQ2XQ4UN.js +321 -0
- package/dist/{chunk-6EPEYXAE.js → chunk-J7GDGV6F.js} +1 -1
- package/dist/{chunk-NLBJOFNG.js → chunk-JW5TMK2L.js} +1 -1
- package/dist/{chunk-NOPY74PV.js → chunk-KJQFFEEQ.js} +1 -1
- package/dist/{chunk-V2Q2FEVO.cjs → chunk-KSPHQSZX.cjs} +2 -2
- package/dist/{chunk-SU4O2A72.js → chunk-LHRZV5VG.js} +1 -22
- package/dist/chunk-NMDM4IZN.cjs +78 -0
- package/dist/chunk-OBEALYTK.js +74 -0
- package/dist/{chunk-PU2TNRKO.js → chunk-OCT2762Q.js} +1 -1
- package/dist/{chunk-5DUFC7TV.cjs → chunk-P7BGXOQV.cjs} +2 -2
- package/dist/{chunk-NQE264UM.cjs → chunk-PIZM25KI.cjs} +2 -23
- package/dist/{chunk-S7J63JOG.cjs → chunk-RVS6OUJN.cjs} +2 -2
- package/dist/{chunk-LF5CE7LU.cjs → chunk-SKG2NIZW.cjs} +2 -2
- package/dist/chunk-SWW4EBUZ.cjs +96 -0
- package/dist/{chunk-RGE4UF6A.cjs → chunk-TMPFLJLG.cjs} +2 -2
- package/dist/{workflows/vNext/index.cjs → chunk-TTKQ37ZZ.cjs} +21 -20
- package/dist/{chunk-F7HNPG53.cjs → chunk-U3L3NEOM.cjs} +58 -10
- package/dist/chunk-VHLL4AZK.js +90 -0
- package/dist/{chunk-EG74VBSP.cjs → chunk-YEULQPUY.cjs} +6 -6
- package/dist/{chunk-RVKA5VUT.js → chunk-ZB3LEHL2.js} +31 -56
- package/dist/{chunk-TAFOICHR.js → chunk-ZKN6HYYQ.js} +1 -1
- package/dist/chunk-ZZDAC5KD.cjs +325 -0
- package/dist/deployer/index.cjs +2 -2
- package/dist/deployer/index.d.cts +2 -3
- package/dist/deployer/index.d.ts +2 -3
- package/dist/deployer/index.js +1 -1
- package/dist/eval/index.d.cts +12 -13
- package/dist/eval/index.d.ts +12 -13
- package/dist/hooks/index.d.cts +1 -1
- package/dist/hooks/index.d.ts +1 -1
- package/dist/index.cjs +71 -158
- package/dist/index.d.cts +16 -24
- package/dist/index.d.ts +16 -24
- package/dist/index.js +16 -23
- package/dist/integration/index.cjs +3 -3
- package/dist/integration/index.d.cts +11 -12
- package/dist/integration/index.d.ts +11 -12
- package/dist/integration/index.js +1 -1
- package/dist/llm/index.d.cts +11 -12
- package/dist/llm/index.d.ts +11 -12
- package/dist/logger/index.cjs +18 -17
- package/dist/logger/index.d.cts +42 -2
- package/dist/logger/index.d.ts +42 -2
- package/dist/logger/index.js +2 -1
- package/dist/{index-QV27jkxQ.d.ts → logger-EhZkzZOr.d.cts} +38 -52
- package/dist/{index-QV27jkxQ.d.cts → logger-EhZkzZOr.d.ts} +38 -52
- package/dist/mastra/index.cjs +2 -2
- package/dist/mastra/index.d.cts +10 -11
- package/dist/mastra/index.d.ts +10 -11
- package/dist/mastra/index.js +1 -1
- package/dist/mcp/index.cjs +4 -4
- package/dist/mcp/index.d.cts +13 -14
- package/dist/mcp/index.d.ts +13 -14
- package/dist/mcp/index.js +2 -2
- package/dist/memory/index.cjs +4 -4
- package/dist/memory/index.d.cts +11 -12
- package/dist/memory/index.d.ts +11 -12
- package/dist/memory/index.js +1 -1
- package/dist/network/index.cjs +8 -8
- package/dist/network/index.d.cts +11 -12
- package/dist/network/index.d.ts +11 -12
- package/dist/network/index.js +4 -4
- package/dist/relevance/index.cjs +4 -4
- package/dist/relevance/index.d.cts +14 -15
- package/dist/relevance/index.d.ts +14 -15
- package/dist/relevance/index.js +1 -1
- package/dist/server/index.cjs +4 -0
- package/dist/server/index.d.cts +14 -13
- package/dist/server/index.d.ts +14 -13
- package/dist/server/index.js +4 -1
- package/dist/storage/index.cjs +184 -3
- package/dist/storage/index.d.cts +82 -12
- package/dist/storage/index.d.ts +82 -12
- package/dist/storage/index.js +186 -1
- package/dist/telemetry/index.d.cts +13 -14
- package/dist/telemetry/index.d.ts +13 -14
- package/dist/telemetry/otel-vendor.cjs +2 -2
- package/dist/telemetry/otel-vendor.d.cts +1 -1
- package/dist/telemetry/otel-vendor.d.ts +1 -1
- package/dist/telemetry/otel-vendor.js +1 -1
- package/dist/tools/index.cjs +4 -4
- package/dist/tools/index.d.cts +11 -12
- package/dist/tools/index.d.ts +11 -12
- package/dist/tools/index.js +1 -1
- package/dist/tts/index.cjs +2 -2
- package/dist/tts/index.d.cts +2 -3
- package/dist/tts/index.d.ts +2 -3
- package/dist/tts/index.js +1 -1
- package/dist/{types-BtMyV38I.d.ts → types-Bo1uigWx.d.cts} +1 -1
- package/dist/{types-BtMyV38I.d.cts → types-Bo1uigWx.d.ts} +1 -1
- package/dist/utils.cjs +16 -16
- package/dist/utils.d.cts +14 -15
- package/dist/utils.d.ts +14 -15
- package/dist/utils.js +1 -1
- package/dist/vector/filter/index.cjs +189 -7
- package/dist/vector/filter/index.js +190 -1
- package/dist/vector/index.cjs +2 -2
- package/dist/vector/index.d.cts +10 -24
- package/dist/vector/index.d.ts +10 -24
- package/dist/vector/index.js +1 -1
- package/dist/voice/index.cjs +4 -4
- package/dist/voice/index.d.cts +11 -12
- package/dist/voice/index.d.ts +11 -12
- package/dist/voice/index.js +1 -1
- package/dist/workflows/index.cjs +17 -69
- package/dist/workflows/index.d.cts +195 -75
- package/dist/workflows/index.d.ts +195 -75
- package/dist/workflows/index.js +1 -1
- package/dist/workflows/legacy/index.cjs +90 -0
- package/dist/workflows/legacy/index.d.cts +87 -0
- package/dist/workflows/legacy/index.d.ts +87 -0
- package/dist/workflows/legacy/index.js +1 -0
- package/package.json +19 -41
- package/workflows/legacy.d.ts +1 -0
- package/dist/chunk-CMPPXW4U.js +0 -154
- package/dist/chunk-GH6EMQSO.js +0 -131
- package/dist/chunk-JMJXBE7L.js +0 -843
- package/dist/chunk-LGERQTJM.js +0 -190
- package/dist/chunk-O44MSFMZ.cjs +0 -168
- package/dist/chunk-T6DN4C76.cjs +0 -134
- package/dist/chunk-UCZWRJRW.cjs +0 -667
- package/dist/chunk-YK3XJ52U.cjs +0 -192
- package/dist/chunk-YNOTY72R.cjs +0 -845
- package/dist/chunk-ZY2DW2VQ.js +0 -658
- package/dist/storage/libsql/index.cjs +0 -588
- package/dist/storage/libsql/index.d.cts +0 -104
- package/dist/storage/libsql/index.d.ts +0 -104
- package/dist/storage/libsql/index.js +0 -585
- package/dist/vector/libsql/index.cjs +0 -14
- package/dist/vector/libsql/index.d.cts +0 -86
- package/dist/vector/libsql/index.d.ts +0 -86
- package/dist/vector/libsql/index.js +0 -1
- package/dist/workflows/vNext/index.d.cts +0 -209
- package/dist/workflows/vNext/index.d.ts +0 -209
- package/storage/libsql.d.ts +0 -1
- package/vector/libsql.d.ts +0 -1
- package/workflows/vNext.d.ts +0 -1
package/dist/chunk-JMJXBE7L.js
DELETED
|
@@ -1,843 +0,0 @@
|
|
|
1
|
-
import { MastraVector } from './chunk-SU4O2A72.js';
|
|
2
|
-
import { BaseFilterTranslator } from './chunk-LGERQTJM.js';
|
|
3
|
-
import { parseSqlIdentifier, parseFieldKey } from './chunk-MF5BUJO6.js';
|
|
4
|
-
import { isAbsolute, join, resolve } from 'path';
|
|
5
|
-
import { createClient } from '@libsql/client';
|
|
6
|
-
|
|
7
|
-
// src/vector/libsql/filter.ts
|
|
8
|
-
var LibSQLFilterTranslator = class extends BaseFilterTranslator {
|
|
9
|
-
getSupportedOperators() {
|
|
10
|
-
return {
|
|
11
|
-
...BaseFilterTranslator.DEFAULT_OPERATORS,
|
|
12
|
-
regex: [],
|
|
13
|
-
custom: ["$contains", "$size"]
|
|
14
|
-
};
|
|
15
|
-
}
|
|
16
|
-
translate(filter) {
|
|
17
|
-
if (this.isEmpty(filter)) {
|
|
18
|
-
return filter;
|
|
19
|
-
}
|
|
20
|
-
this.validateFilter(filter);
|
|
21
|
-
return this.translateNode(filter);
|
|
22
|
-
}
|
|
23
|
-
translateNode(node, currentPath = "") {
|
|
24
|
-
if (this.isRegex(node)) {
|
|
25
|
-
throw new Error("Direct regex pattern format is not supported in LibSQL");
|
|
26
|
-
}
|
|
27
|
-
const withPath = (result2) => currentPath ? { [currentPath]: result2 } : result2;
|
|
28
|
-
if (this.isPrimitive(node)) {
|
|
29
|
-
return withPath({ $eq: this.normalizeComparisonValue(node) });
|
|
30
|
-
}
|
|
31
|
-
if (Array.isArray(node)) {
|
|
32
|
-
return withPath({ $in: this.normalizeArrayValues(node) });
|
|
33
|
-
}
|
|
34
|
-
const entries = Object.entries(node);
|
|
35
|
-
const result = {};
|
|
36
|
-
for (const [key, value] of entries) {
|
|
37
|
-
const newPath = currentPath ? `${currentPath}.${key}` : key;
|
|
38
|
-
if (this.isLogicalOperator(key)) {
|
|
39
|
-
result[key] = Array.isArray(value) ? value.map((filter) => this.translateNode(filter)) : this.translateNode(value);
|
|
40
|
-
} else if (this.isOperator(key)) {
|
|
41
|
-
if (this.isArrayOperator(key) && !Array.isArray(value) && key !== "$elemMatch") {
|
|
42
|
-
result[key] = [value];
|
|
43
|
-
} else if (this.isBasicOperator(key) && Array.isArray(value)) {
|
|
44
|
-
result[key] = JSON.stringify(value);
|
|
45
|
-
} else {
|
|
46
|
-
result[key] = value;
|
|
47
|
-
}
|
|
48
|
-
} else if (typeof value === "object" && value !== null) {
|
|
49
|
-
const hasOperators = Object.keys(value).some((k) => this.isOperator(k));
|
|
50
|
-
if (hasOperators) {
|
|
51
|
-
result[newPath] = this.translateNode(value);
|
|
52
|
-
} else {
|
|
53
|
-
Object.assign(result, this.translateNode(value, newPath));
|
|
54
|
-
}
|
|
55
|
-
} else {
|
|
56
|
-
result[newPath] = this.translateNode(value);
|
|
57
|
-
}
|
|
58
|
-
}
|
|
59
|
-
return result;
|
|
60
|
-
}
|
|
61
|
-
// TODO: Look more into regex support for LibSQL
|
|
62
|
-
// private translateRegexPattern(pattern: string, options: string = ''): any {
|
|
63
|
-
// if (!options) return { $regex: pattern };
|
|
64
|
-
// const flags = options
|
|
65
|
-
// .split('')
|
|
66
|
-
// .filter(f => 'imsux'.includes(f))
|
|
67
|
-
// .join('');
|
|
68
|
-
// return {
|
|
69
|
-
// $regex: pattern,
|
|
70
|
-
// $options: flags,
|
|
71
|
-
// };
|
|
72
|
-
// }
|
|
73
|
-
};
|
|
74
|
-
|
|
75
|
-
// src/vector/libsql/sql-builder.ts
|
|
76
|
-
var createBasicOperator = (symbol) => {
|
|
77
|
-
return (key, value) => {
|
|
78
|
-
const jsonPathKey = parseJsonPathKey(key);
|
|
79
|
-
return {
|
|
80
|
-
sql: `CASE
|
|
81
|
-
WHEN ? IS NULL THEN json_extract(metadata, '$."${jsonPathKey}"') IS ${symbol === "=" ? "" : "NOT"} NULL
|
|
82
|
-
ELSE json_extract(metadata, '$."${jsonPathKey}"') ${symbol} ?
|
|
83
|
-
END`,
|
|
84
|
-
needsValue: true,
|
|
85
|
-
transformValue: () => {
|
|
86
|
-
return [value, value];
|
|
87
|
-
}
|
|
88
|
-
};
|
|
89
|
-
};
|
|
90
|
-
};
|
|
91
|
-
var createNumericOperator = (symbol) => {
|
|
92
|
-
return (key) => {
|
|
93
|
-
const jsonPathKey = parseJsonPathKey(key);
|
|
94
|
-
return {
|
|
95
|
-
sql: `CAST(json_extract(metadata, '$."${jsonPathKey}"') AS NUMERIC) ${symbol} ?`,
|
|
96
|
-
needsValue: true
|
|
97
|
-
};
|
|
98
|
-
};
|
|
99
|
-
};
|
|
100
|
-
var validateJsonArray = (key) => `json_valid(json_extract(metadata, '$."${key}"'))
|
|
101
|
-
AND json_type(json_extract(metadata, '$."${key}"')) = 'array'`;
|
|
102
|
-
var pattern = /json_extract\(metadata, '\$\."[^"]*"(\."[^"]*")*'\)/g;
|
|
103
|
-
function buildElemMatchConditions(value) {
|
|
104
|
-
const conditions = Object.entries(value).map(([field, fieldValue]) => {
|
|
105
|
-
if (field.startsWith("$")) {
|
|
106
|
-
const { sql, values } = buildCondition("elem.value", { [field]: fieldValue });
|
|
107
|
-
const elemSql = sql.replace(pattern, "elem.value");
|
|
108
|
-
return { sql: elemSql, values };
|
|
109
|
-
} else if (typeof fieldValue === "object" && !Array.isArray(fieldValue)) {
|
|
110
|
-
const { sql, values } = buildCondition(field, fieldValue);
|
|
111
|
-
const elemSql = sql.replace(pattern, `json_extract(elem.value, '$."${field}"')`);
|
|
112
|
-
return { sql: elemSql, values };
|
|
113
|
-
} else {
|
|
114
|
-
const parsedFieldKey = parseFieldKey(field);
|
|
115
|
-
return {
|
|
116
|
-
sql: `json_extract(elem.value, '$."${parsedFieldKey}"') = ?`,
|
|
117
|
-
values: [fieldValue]
|
|
118
|
-
};
|
|
119
|
-
}
|
|
120
|
-
});
|
|
121
|
-
return conditions;
|
|
122
|
-
}
|
|
123
|
-
var FILTER_OPERATORS = {
|
|
124
|
-
$eq: createBasicOperator("="),
|
|
125
|
-
$ne: createBasicOperator("!="),
|
|
126
|
-
$gt: createNumericOperator(">"),
|
|
127
|
-
$gte: createNumericOperator(">="),
|
|
128
|
-
$lt: createNumericOperator("<"),
|
|
129
|
-
$lte: createNumericOperator("<="),
|
|
130
|
-
// Array Operators
|
|
131
|
-
$in: (key, value) => {
|
|
132
|
-
const jsonPathKey = parseJsonPathKey(key);
|
|
133
|
-
const arr = Array.isArray(value) ? value : [value];
|
|
134
|
-
if (arr.length === 0) {
|
|
135
|
-
return { sql: "1 = 0", needsValue: true, transformValue: () => [] };
|
|
136
|
-
}
|
|
137
|
-
const paramPlaceholders = arr.map(() => "?").join(",");
|
|
138
|
-
return {
|
|
139
|
-
sql: `(
|
|
140
|
-
CASE
|
|
141
|
-
WHEN ${validateJsonArray(jsonPathKey)} THEN
|
|
142
|
-
EXISTS (
|
|
143
|
-
SELECT 1 FROM json_each(json_extract(metadata, '$."${jsonPathKey}"')) as elem
|
|
144
|
-
WHERE elem.value IN (SELECT value FROM json_each(?))
|
|
145
|
-
)
|
|
146
|
-
ELSE json_extract(metadata, '$."${jsonPathKey}"') IN (${paramPlaceholders})
|
|
147
|
-
END
|
|
148
|
-
)`,
|
|
149
|
-
needsValue: true,
|
|
150
|
-
transformValue: () => [JSON.stringify(arr), ...arr]
|
|
151
|
-
};
|
|
152
|
-
},
|
|
153
|
-
$nin: (key, value) => {
|
|
154
|
-
const jsonPathKey = parseJsonPathKey(key);
|
|
155
|
-
const arr = Array.isArray(value) ? value : [value];
|
|
156
|
-
if (arr.length === 0) {
|
|
157
|
-
return { sql: "1 = 1", needsValue: true, transformValue: () => [] };
|
|
158
|
-
}
|
|
159
|
-
const paramPlaceholders = arr.map(() => "?").join(",");
|
|
160
|
-
return {
|
|
161
|
-
sql: `(
|
|
162
|
-
CASE
|
|
163
|
-
WHEN ${validateJsonArray(jsonPathKey)} THEN
|
|
164
|
-
NOT EXISTS (
|
|
165
|
-
SELECT 1 FROM json_each(json_extract(metadata, '$."${jsonPathKey}"')) as elem
|
|
166
|
-
WHERE elem.value IN (SELECT value FROM json_each(?))
|
|
167
|
-
)
|
|
168
|
-
ELSE json_extract(metadata, '$."${jsonPathKey}"') NOT IN (${paramPlaceholders})
|
|
169
|
-
END
|
|
170
|
-
)`,
|
|
171
|
-
needsValue: true,
|
|
172
|
-
transformValue: () => [JSON.stringify(arr), ...arr]
|
|
173
|
-
};
|
|
174
|
-
},
|
|
175
|
-
$all: (key, value) => {
|
|
176
|
-
const jsonPathKey = parseJsonPathKey(key);
|
|
177
|
-
let sql;
|
|
178
|
-
const arrayValue = Array.isArray(value) ? value : [value];
|
|
179
|
-
if (arrayValue.length === 0) {
|
|
180
|
-
sql = "1 = 0";
|
|
181
|
-
} else {
|
|
182
|
-
sql = `(
|
|
183
|
-
CASE
|
|
184
|
-
WHEN ${validateJsonArray(jsonPathKey)} THEN
|
|
185
|
-
NOT EXISTS (
|
|
186
|
-
SELECT value
|
|
187
|
-
FROM json_each(?)
|
|
188
|
-
WHERE value NOT IN (
|
|
189
|
-
SELECT value
|
|
190
|
-
FROM json_each(json_extract(metadata, '$."${jsonPathKey}"'))
|
|
191
|
-
)
|
|
192
|
-
)
|
|
193
|
-
ELSE FALSE
|
|
194
|
-
END
|
|
195
|
-
)`;
|
|
196
|
-
}
|
|
197
|
-
return {
|
|
198
|
-
sql,
|
|
199
|
-
needsValue: true,
|
|
200
|
-
transformValue: () => {
|
|
201
|
-
if (arrayValue.length === 0) {
|
|
202
|
-
return [];
|
|
203
|
-
}
|
|
204
|
-
return [JSON.stringify(arrayValue)];
|
|
205
|
-
}
|
|
206
|
-
};
|
|
207
|
-
},
|
|
208
|
-
$elemMatch: (key, value) => {
|
|
209
|
-
const jsonPathKey = parseJsonPathKey(key);
|
|
210
|
-
if (typeof value !== "object" || Array.isArray(value)) {
|
|
211
|
-
throw new Error("$elemMatch requires an object with conditions");
|
|
212
|
-
}
|
|
213
|
-
const conditions = buildElemMatchConditions(value);
|
|
214
|
-
return {
|
|
215
|
-
sql: `(
|
|
216
|
-
CASE
|
|
217
|
-
WHEN ${validateJsonArray(jsonPathKey)} THEN
|
|
218
|
-
EXISTS (
|
|
219
|
-
SELECT 1
|
|
220
|
-
FROM json_each(json_extract(metadata, '$."${jsonPathKey}"')) as elem
|
|
221
|
-
WHERE ${conditions.map((c) => c.sql).join(" AND ")}
|
|
222
|
-
)
|
|
223
|
-
ELSE FALSE
|
|
224
|
-
END
|
|
225
|
-
)`,
|
|
226
|
-
needsValue: true,
|
|
227
|
-
transformValue: () => conditions.flatMap((c) => c.values)
|
|
228
|
-
};
|
|
229
|
-
},
|
|
230
|
-
// Element Operators
|
|
231
|
-
$exists: (key) => {
|
|
232
|
-
const jsonPathKey = parseJsonPathKey(key);
|
|
233
|
-
return {
|
|
234
|
-
sql: `json_extract(metadata, '$."${jsonPathKey}"') IS NOT NULL`,
|
|
235
|
-
needsValue: false
|
|
236
|
-
};
|
|
237
|
-
},
|
|
238
|
-
// Logical Operators
|
|
239
|
-
$and: (key) => ({
|
|
240
|
-
sql: `(${key})`,
|
|
241
|
-
needsValue: false
|
|
242
|
-
}),
|
|
243
|
-
$or: (key) => ({
|
|
244
|
-
sql: `(${key})`,
|
|
245
|
-
needsValue: false
|
|
246
|
-
}),
|
|
247
|
-
$not: (key) => ({ sql: `NOT (${key})`, needsValue: false }),
|
|
248
|
-
$nor: (key) => ({
|
|
249
|
-
sql: `NOT (${key})`,
|
|
250
|
-
needsValue: false
|
|
251
|
-
}),
|
|
252
|
-
$size: (key, paramIndex) => {
|
|
253
|
-
const jsonPathKey = parseJsonPathKey(key);
|
|
254
|
-
return {
|
|
255
|
-
sql: `(
|
|
256
|
-
CASE
|
|
257
|
-
WHEN json_type(json_extract(metadata, '$."${jsonPathKey}"')) = 'array' THEN
|
|
258
|
-
json_array_length(json_extract(metadata, '$."${jsonPathKey}"')) = $${paramIndex}
|
|
259
|
-
ELSE FALSE
|
|
260
|
-
END
|
|
261
|
-
)`,
|
|
262
|
-
needsValue: true
|
|
263
|
-
};
|
|
264
|
-
},
|
|
265
|
-
// /**
|
|
266
|
-
// * Regex Operators
|
|
267
|
-
// * Supports case insensitive and multiline
|
|
268
|
-
// */
|
|
269
|
-
// $regex: (key: string): FilterOperator => ({
|
|
270
|
-
// sql: `json_extract(metadata, '$."${toJsonPathKey(key)}"') = ?`,
|
|
271
|
-
// needsValue: true,
|
|
272
|
-
// transformValue: (value: any) => {
|
|
273
|
-
// const pattern = typeof value === 'object' ? value.$regex : value;
|
|
274
|
-
// const options = typeof value === 'object' ? value.$options || '' : '';
|
|
275
|
-
// let sql = `json_extract(metadata, '$."${toJsonPathKey(key)}"')`;
|
|
276
|
-
// // Handle multiline
|
|
277
|
-
// // if (options.includes('m')) {
|
|
278
|
-
// // sql = `REPLACE(${sql}, CHAR(10), '\n')`;
|
|
279
|
-
// // }
|
|
280
|
-
// // let finalPattern = pattern;
|
|
281
|
-
// // if (options) {
|
|
282
|
-
// // finalPattern = `(\\?${options})${pattern}`;
|
|
283
|
-
// // }
|
|
284
|
-
// // // Handle case insensitivity
|
|
285
|
-
// // if (options.includes('i')) {
|
|
286
|
-
// // sql = `LOWER(${sql}) REGEXP LOWER(?)`;
|
|
287
|
-
// // } else {
|
|
288
|
-
// // sql = `${sql} REGEXP ?`;
|
|
289
|
-
// // }
|
|
290
|
-
// if (options.includes('m')) {
|
|
291
|
-
// sql = `EXISTS (
|
|
292
|
-
// SELECT 1
|
|
293
|
-
// FROM json_each(
|
|
294
|
-
// json_array(
|
|
295
|
-
// ${sql},
|
|
296
|
-
// REPLACE(${sql}, CHAR(10), CHAR(13))
|
|
297
|
-
// )
|
|
298
|
-
// ) as lines
|
|
299
|
-
// WHERE lines.value REGEXP ?
|
|
300
|
-
// )`;
|
|
301
|
-
// } else {
|
|
302
|
-
// sql = `${sql} REGEXP ?`;
|
|
303
|
-
// }
|
|
304
|
-
// // Handle case insensitivity
|
|
305
|
-
// if (options.includes('i')) {
|
|
306
|
-
// sql = sql.replace('REGEXP ?', 'REGEXP LOWER(?)');
|
|
307
|
-
// sql = sql.replace('value REGEXP', 'LOWER(value) REGEXP');
|
|
308
|
-
// }
|
|
309
|
-
// // Handle extended - allows whitespace and comments in pattern
|
|
310
|
-
// if (options.includes('x')) {
|
|
311
|
-
// // Remove whitespace and comments from pattern
|
|
312
|
-
// const cleanPattern = pattern.replace(/\s+|#.*$/gm, '');
|
|
313
|
-
// return {
|
|
314
|
-
// sql,
|
|
315
|
-
// values: [cleanPattern],
|
|
316
|
-
// };
|
|
317
|
-
// }
|
|
318
|
-
// return {
|
|
319
|
-
// sql,
|
|
320
|
-
// values: [pattern],
|
|
321
|
-
// };
|
|
322
|
-
// },
|
|
323
|
-
// }),
|
|
324
|
-
$contains: (key, value) => {
|
|
325
|
-
const jsonPathKey = parseJsonPathKey(key);
|
|
326
|
-
let sql;
|
|
327
|
-
if (Array.isArray(value)) {
|
|
328
|
-
sql = `(
|
|
329
|
-
SELECT ${validateJsonArray(jsonPathKey)}
|
|
330
|
-
AND EXISTS (
|
|
331
|
-
SELECT 1
|
|
332
|
-
FROM json_each(json_extract(metadata, '$."${jsonPathKey}"')) as m
|
|
333
|
-
WHERE m.value IN (SELECT value FROM json_each(?))
|
|
334
|
-
)
|
|
335
|
-
)`;
|
|
336
|
-
} else if (typeof value === "string") {
|
|
337
|
-
sql = `lower(json_extract(metadata, '$."${jsonPathKey}"')) LIKE '%' || lower(?) || '%' ESCAPE '\\'`;
|
|
338
|
-
} else {
|
|
339
|
-
sql = `json_extract(metadata, '$."${jsonPathKey}"') = ?`;
|
|
340
|
-
}
|
|
341
|
-
return {
|
|
342
|
-
sql,
|
|
343
|
-
needsValue: true,
|
|
344
|
-
transformValue: () => {
|
|
345
|
-
if (Array.isArray(value)) {
|
|
346
|
-
return [JSON.stringify(value)];
|
|
347
|
-
}
|
|
348
|
-
if (typeof value === "object" && value !== null) {
|
|
349
|
-
return [JSON.stringify(value)];
|
|
350
|
-
}
|
|
351
|
-
if (typeof value === "string") {
|
|
352
|
-
return [escapeLikePattern(value)];
|
|
353
|
-
}
|
|
354
|
-
return [value];
|
|
355
|
-
}
|
|
356
|
-
};
|
|
357
|
-
}
|
|
358
|
-
/**
|
|
359
|
-
* $objectContains: True JSON containment for advanced use (deep sub-object match).
|
|
360
|
-
* Usage: { field: { $objectContains: { ...subobject } } }
|
|
361
|
-
*/
|
|
362
|
-
// $objectContains: (key: string) => ({
|
|
363
|
-
// sql: '', // Will be overridden by transformValue
|
|
364
|
-
// needsValue: true,
|
|
365
|
-
// transformValue: (value: any) => ({
|
|
366
|
-
// sql: `json_type(json_extract(metadata, '$."${toJsonPathKey(key)}"')) = 'object'
|
|
367
|
-
// AND json_patch(json_extract(metadata, '$."${toJsonPathKey(key)}"'), ?) = json_extract(metadata, '$."${toJsonPathKey(key)}"')`,
|
|
368
|
-
// values: [JSON.stringify(value)],
|
|
369
|
-
// }),
|
|
370
|
-
// }),
|
|
371
|
-
};
|
|
372
|
-
function isFilterResult(obj) {
|
|
373
|
-
return obj && typeof obj === "object" && typeof obj.sql === "string" && Array.isArray(obj.values);
|
|
374
|
-
}
|
|
375
|
-
var parseJsonPathKey = (key) => {
|
|
376
|
-
const parsedKey = parseFieldKey(key);
|
|
377
|
-
return parsedKey.replace(/\./g, '"."');
|
|
378
|
-
};
|
|
379
|
-
function escapeLikePattern(str) {
|
|
380
|
-
return str.replace(/([%_\\])/g, "\\$1");
|
|
381
|
-
}
|
|
382
|
-
function buildFilterQuery(filter) {
|
|
383
|
-
if (!filter) {
|
|
384
|
-
return { sql: "", values: [] };
|
|
385
|
-
}
|
|
386
|
-
const values = [];
|
|
387
|
-
const conditions = Object.entries(filter).map(([key, value]) => {
|
|
388
|
-
const condition = buildCondition(key, value);
|
|
389
|
-
values.push(...condition.values);
|
|
390
|
-
return condition.sql;
|
|
391
|
-
}).join(" AND ");
|
|
392
|
-
return {
|
|
393
|
-
sql: conditions ? `WHERE ${conditions}` : "",
|
|
394
|
-
values
|
|
395
|
-
};
|
|
396
|
-
}
|
|
397
|
-
function buildCondition(key, value, parentPath) {
|
|
398
|
-
if (["$and", "$or", "$not", "$nor"].includes(key)) {
|
|
399
|
-
return handleLogicalOperator(key, value);
|
|
400
|
-
}
|
|
401
|
-
if (!value || typeof value !== "object") {
|
|
402
|
-
return {
|
|
403
|
-
sql: `json_extract(metadata, '$."${key.replace(/\./g, '"."')}"') = ?`,
|
|
404
|
-
values: [value]
|
|
405
|
-
};
|
|
406
|
-
}
|
|
407
|
-
return handleOperator(key, value);
|
|
408
|
-
}
|
|
409
|
-
function handleLogicalOperator(key, value, parentPath) {
|
|
410
|
-
if (!value || value.length === 0) {
|
|
411
|
-
switch (key) {
|
|
412
|
-
case "$and":
|
|
413
|
-
case "$nor":
|
|
414
|
-
return { sql: "true", values: [] };
|
|
415
|
-
case "$or":
|
|
416
|
-
return { sql: "false", values: [] };
|
|
417
|
-
case "$not":
|
|
418
|
-
throw new Error("$not operator cannot be empty");
|
|
419
|
-
default:
|
|
420
|
-
return { sql: "true", values: [] };
|
|
421
|
-
}
|
|
422
|
-
}
|
|
423
|
-
if (key === "$not") {
|
|
424
|
-
const entries = Object.entries(value);
|
|
425
|
-
const conditions2 = entries.map(([fieldKey, fieldValue]) => buildCondition(fieldKey, fieldValue));
|
|
426
|
-
return {
|
|
427
|
-
sql: `NOT (${conditions2.map((c) => c.sql).join(" AND ")})`,
|
|
428
|
-
values: conditions2.flatMap((c) => c.values)
|
|
429
|
-
};
|
|
430
|
-
}
|
|
431
|
-
const values = [];
|
|
432
|
-
const joinOperator = key === "$or" || key === "$nor" ? "OR" : "AND";
|
|
433
|
-
const conditions = Array.isArray(value) ? value.map((f) => {
|
|
434
|
-
const entries = Object.entries(f);
|
|
435
|
-
return entries.map(([k, v]) => buildCondition(k, v));
|
|
436
|
-
}) : [buildCondition(key, value)];
|
|
437
|
-
const joined = conditions.flat().map((c) => {
|
|
438
|
-
values.push(...c.values);
|
|
439
|
-
return c.sql;
|
|
440
|
-
}).join(` ${joinOperator} `);
|
|
441
|
-
return {
|
|
442
|
-
sql: key === "$nor" ? `NOT (${joined})` : `(${joined})`,
|
|
443
|
-
values
|
|
444
|
-
};
|
|
445
|
-
}
|
|
446
|
-
function handleOperator(key, value) {
|
|
447
|
-
if (typeof value === "object" && !Array.isArray(value)) {
|
|
448
|
-
const entries = Object.entries(value);
|
|
449
|
-
const results = entries.map(
|
|
450
|
-
([operator2, operatorValue2]) => operator2 === "$not" ? {
|
|
451
|
-
sql: `NOT (${Object.entries(operatorValue2).map(([op, val]) => processOperator(key, op, val).sql).join(" AND ")})`,
|
|
452
|
-
values: Object.entries(operatorValue2).flatMap(
|
|
453
|
-
([op, val]) => processOperator(key, op, val).values
|
|
454
|
-
)
|
|
455
|
-
} : processOperator(key, operator2, operatorValue2)
|
|
456
|
-
);
|
|
457
|
-
return {
|
|
458
|
-
sql: `(${results.map((r) => r.sql).join(" AND ")})`,
|
|
459
|
-
values: results.flatMap((r) => r.values)
|
|
460
|
-
};
|
|
461
|
-
}
|
|
462
|
-
const [[operator, operatorValue] = []] = Object.entries(value);
|
|
463
|
-
return processOperator(key, operator, operatorValue);
|
|
464
|
-
}
|
|
465
|
-
var processOperator = (key, operator, operatorValue) => {
|
|
466
|
-
if (!operator.startsWith("$") || !FILTER_OPERATORS[operator]) {
|
|
467
|
-
throw new Error(`Invalid operator: ${operator}`);
|
|
468
|
-
}
|
|
469
|
-
const operatorFn = FILTER_OPERATORS[operator];
|
|
470
|
-
const operatorResult = operatorFn(key, operatorValue);
|
|
471
|
-
if (!operatorResult.needsValue) {
|
|
472
|
-
return { sql: operatorResult.sql, values: [] };
|
|
473
|
-
}
|
|
474
|
-
const transformed = operatorResult.transformValue ? operatorResult.transformValue() : operatorValue;
|
|
475
|
-
if (isFilterResult(transformed)) {
|
|
476
|
-
return transformed;
|
|
477
|
-
}
|
|
478
|
-
return {
|
|
479
|
-
sql: operatorResult.sql,
|
|
480
|
-
values: Array.isArray(transformed) ? transformed : [transformed]
|
|
481
|
-
};
|
|
482
|
-
};
|
|
483
|
-
|
|
484
|
-
// src/vector/libsql/index.ts
|
|
485
|
-
var LibSQLVector = class extends MastraVector {
|
|
486
|
-
turso;
|
|
487
|
-
constructor({
|
|
488
|
-
connectionUrl,
|
|
489
|
-
authToken,
|
|
490
|
-
syncUrl,
|
|
491
|
-
syncInterval
|
|
492
|
-
}) {
|
|
493
|
-
super();
|
|
494
|
-
this.turso = createClient({
|
|
495
|
-
url: this.rewriteDbUrl(connectionUrl),
|
|
496
|
-
syncUrl,
|
|
497
|
-
authToken,
|
|
498
|
-
syncInterval
|
|
499
|
-
});
|
|
500
|
-
if (connectionUrl.includes(`file:`) || connectionUrl.includes(`:memory:`)) {
|
|
501
|
-
void this.turso.execute({
|
|
502
|
-
sql: "PRAGMA journal_mode=WAL;",
|
|
503
|
-
args: {}
|
|
504
|
-
});
|
|
505
|
-
}
|
|
506
|
-
}
|
|
507
|
-
// If we're in the .mastra/output directory, use the dir outside .mastra dir
|
|
508
|
-
// reason we need to do this is libsql relative file paths are based on cwd, not current file path
|
|
509
|
-
// since mastra dev sets cwd to .mastra/output this means running an agent directly vs running with mastra dev
|
|
510
|
-
// will put db files in different locations, leading to an inconsistent experience between the two.
|
|
511
|
-
// Ex: with `file:ex.db`
|
|
512
|
-
// 1. `mastra dev`: ${cwd}/.mastra/output/ex.db
|
|
513
|
-
// 2. `tsx src/index.ts`: ${cwd}/ex.db
|
|
514
|
-
// so if we're in .mastra/output we need to rewrite the file url to be relative to the project root dir
|
|
515
|
-
// or the experience will be inconsistent
|
|
516
|
-
// this means `file:` urls are always relative to project root
|
|
517
|
-
// TODO: can we make this easier via bundling? https://github.com/mastra-ai/mastra/pull/2783#pullrequestreview-2662444241
|
|
518
|
-
rewriteDbUrl(url) {
|
|
519
|
-
if (url.startsWith("file:")) {
|
|
520
|
-
const pathPart = url.slice("file:".length);
|
|
521
|
-
if (isAbsolute(pathPart)) {
|
|
522
|
-
return url;
|
|
523
|
-
}
|
|
524
|
-
const cwd = process.cwd();
|
|
525
|
-
if (cwd.includes(".mastra") && (cwd.endsWith(`output`) || cwd.endsWith(`output/`) || cwd.endsWith(`output\\`))) {
|
|
526
|
-
const baseDir = join(cwd, `..`, `..`);
|
|
527
|
-
const fullPath = resolve(baseDir, pathPart);
|
|
528
|
-
this.logger.debug(
|
|
529
|
-
`Initializing LibSQL db with url ${url} with relative file path from inside .mastra/output directory. Rewriting relative file url to "file:${fullPath}". This ensures it's outside the .mastra/output directory.`
|
|
530
|
-
);
|
|
531
|
-
return `file:${fullPath}`;
|
|
532
|
-
}
|
|
533
|
-
}
|
|
534
|
-
return url;
|
|
535
|
-
}
|
|
536
|
-
transformFilter(filter) {
|
|
537
|
-
const translator = new LibSQLFilterTranslator();
|
|
538
|
-
return translator.translate(filter);
|
|
539
|
-
}
|
|
540
|
-
async query(...args) {
|
|
541
|
-
const params = this.normalizeArgs("query", args, ["minScore"]);
|
|
542
|
-
try {
|
|
543
|
-
const { indexName, queryVector, topK = 10, filter, includeVector = false, minScore = 0 } = params;
|
|
544
|
-
if (!Number.isInteger(topK) || topK <= 0) {
|
|
545
|
-
throw new Error("topK must be a positive integer");
|
|
546
|
-
}
|
|
547
|
-
if (!Array.isArray(queryVector) || !queryVector.every((x) => typeof x === "number" && Number.isFinite(x))) {
|
|
548
|
-
throw new Error("queryVector must be an array of finite numbers");
|
|
549
|
-
}
|
|
550
|
-
const parsedIndexName = parseSqlIdentifier(indexName, "index name");
|
|
551
|
-
const vectorStr = `[${queryVector.join(",")}]`;
|
|
552
|
-
const translatedFilter = this.transformFilter(filter);
|
|
553
|
-
const { sql: filterQuery, values: filterValues } = buildFilterQuery(translatedFilter);
|
|
554
|
-
filterValues.push(minScore);
|
|
555
|
-
filterValues.push(topK);
|
|
556
|
-
const query = `
|
|
557
|
-
WITH vector_scores AS (
|
|
558
|
-
SELECT
|
|
559
|
-
vector_id as id,
|
|
560
|
-
(1-vector_distance_cos(embedding, '${vectorStr}')) as score,
|
|
561
|
-
metadata
|
|
562
|
-
${includeVector ? ", vector_extract(embedding) as embedding" : ""}
|
|
563
|
-
FROM ${parsedIndexName}
|
|
564
|
-
${filterQuery}
|
|
565
|
-
)
|
|
566
|
-
SELECT *
|
|
567
|
-
FROM vector_scores
|
|
568
|
-
WHERE score > ?
|
|
569
|
-
ORDER BY score DESC
|
|
570
|
-
LIMIT ?`;
|
|
571
|
-
const result = await this.turso.execute({
|
|
572
|
-
sql: query,
|
|
573
|
-
args: filterValues
|
|
574
|
-
});
|
|
575
|
-
return result.rows.map(({ id, score, metadata, embedding }) => ({
|
|
576
|
-
id,
|
|
577
|
-
score,
|
|
578
|
-
metadata: JSON.parse(metadata ?? "{}"),
|
|
579
|
-
...includeVector && embedding && { vector: JSON.parse(embedding) }
|
|
580
|
-
}));
|
|
581
|
-
} finally {
|
|
582
|
-
}
|
|
583
|
-
}
|
|
584
|
-
async upsert(...args) {
|
|
585
|
-
const params = this.normalizeArgs("upsert", args);
|
|
586
|
-
const { indexName, vectors, metadata, ids } = params;
|
|
587
|
-
const tx = await this.turso.transaction("write");
|
|
588
|
-
try {
|
|
589
|
-
const parsedIndexName = parseSqlIdentifier(indexName, "index name");
|
|
590
|
-
const vectorIds = ids || vectors.map(() => crypto.randomUUID());
|
|
591
|
-
for (let i = 0; i < vectors.length; i++) {
|
|
592
|
-
const query = `
|
|
593
|
-
INSERT INTO ${parsedIndexName} (vector_id, embedding, metadata)
|
|
594
|
-
VALUES (?, vector32(?), ?)
|
|
595
|
-
ON CONFLICT(vector_id) DO UPDATE SET
|
|
596
|
-
embedding = vector32(?),
|
|
597
|
-
metadata = ?
|
|
598
|
-
`;
|
|
599
|
-
await tx.execute({
|
|
600
|
-
sql: query,
|
|
601
|
-
// @ts-ignore
|
|
602
|
-
args: [
|
|
603
|
-
vectorIds[i],
|
|
604
|
-
JSON.stringify(vectors[i]),
|
|
605
|
-
JSON.stringify(metadata?.[i] || {}),
|
|
606
|
-
JSON.stringify(vectors[i]),
|
|
607
|
-
JSON.stringify(metadata?.[i] || {})
|
|
608
|
-
]
|
|
609
|
-
});
|
|
610
|
-
}
|
|
611
|
-
await tx.commit();
|
|
612
|
-
return vectorIds;
|
|
613
|
-
} catch (error) {
|
|
614
|
-
await tx.rollback();
|
|
615
|
-
if (error instanceof Error && error.message?.includes("dimensions are different")) {
|
|
616
|
-
const match = error.message.match(/dimensions are different: (\d+) != (\d+)/);
|
|
617
|
-
if (match) {
|
|
618
|
-
const [, actual, expected] = match;
|
|
619
|
-
throw new Error(
|
|
620
|
-
`Vector dimension mismatch: Index "${indexName}" expects ${expected} dimensions but got ${actual} dimensions. Either use a matching embedding model or delete and recreate the index with the new dimension.`
|
|
621
|
-
);
|
|
622
|
-
}
|
|
623
|
-
}
|
|
624
|
-
throw error;
|
|
625
|
-
}
|
|
626
|
-
}
|
|
627
|
-
async createIndex(...args) {
|
|
628
|
-
const params = this.normalizeArgs("createIndex", args);
|
|
629
|
-
const { indexName, dimension } = params;
|
|
630
|
-
try {
|
|
631
|
-
if (!Number.isInteger(dimension) || dimension <= 0) {
|
|
632
|
-
throw new Error("Dimension must be a positive integer");
|
|
633
|
-
}
|
|
634
|
-
const parsedIndexName = parseSqlIdentifier(indexName, "index name");
|
|
635
|
-
await this.turso.execute({
|
|
636
|
-
sql: `
|
|
637
|
-
CREATE TABLE IF NOT EXISTS ${parsedIndexName} (
|
|
638
|
-
id SERIAL PRIMARY KEY,
|
|
639
|
-
vector_id TEXT UNIQUE NOT NULL,
|
|
640
|
-
embedding F32_BLOB(${dimension}),
|
|
641
|
-
metadata TEXT DEFAULT '{}'
|
|
642
|
-
);
|
|
643
|
-
`,
|
|
644
|
-
args: []
|
|
645
|
-
});
|
|
646
|
-
await this.turso.execute({
|
|
647
|
-
sql: `
|
|
648
|
-
CREATE INDEX IF NOT EXISTS ${parsedIndexName}_vector_idx
|
|
649
|
-
ON ${parsedIndexName} (libsql_vector_idx(embedding))
|
|
650
|
-
`,
|
|
651
|
-
args: []
|
|
652
|
-
});
|
|
653
|
-
} catch (error) {
|
|
654
|
-
console.error("Failed to create vector table:", error);
|
|
655
|
-
throw error;
|
|
656
|
-
} finally {
|
|
657
|
-
}
|
|
658
|
-
}
|
|
659
|
-
async deleteIndex(...args) {
|
|
660
|
-
const params = this.normalizeArgs("deleteIndex", args);
|
|
661
|
-
const { indexName } = params;
|
|
662
|
-
try {
|
|
663
|
-
const parsedIndexName = parseSqlIdentifier(indexName, "index name");
|
|
664
|
-
await this.turso.execute({
|
|
665
|
-
sql: `DROP TABLE IF EXISTS ${parsedIndexName}`,
|
|
666
|
-
args: []
|
|
667
|
-
});
|
|
668
|
-
} catch (error) {
|
|
669
|
-
console.error("Failed to delete vector table:", error);
|
|
670
|
-
throw new Error(`Failed to delete vector table: ${error.message}`);
|
|
671
|
-
} finally {
|
|
672
|
-
}
|
|
673
|
-
}
|
|
674
|
-
async listIndexes() {
|
|
675
|
-
try {
|
|
676
|
-
const vectorTablesQuery = `
|
|
677
|
-
SELECT name FROM sqlite_master
|
|
678
|
-
WHERE type='table'
|
|
679
|
-
AND sql LIKE '%F32_BLOB%';
|
|
680
|
-
`;
|
|
681
|
-
const result = await this.turso.execute({
|
|
682
|
-
sql: vectorTablesQuery,
|
|
683
|
-
args: []
|
|
684
|
-
});
|
|
685
|
-
return result.rows.map((row) => row.name);
|
|
686
|
-
} catch (error) {
|
|
687
|
-
throw new Error(`Failed to list vector tables: ${error.message}`);
|
|
688
|
-
}
|
|
689
|
-
}
|
|
690
|
-
/**
|
|
691
|
-
* Retrieves statistics about a vector index.
|
|
692
|
-
*
|
|
693
|
-
* @param params - The parameters for describing an index
|
|
694
|
-
* @param params.indexName - The name of the index to describe
|
|
695
|
-
* @returns A promise that resolves to the index statistics including dimension, count and metric
|
|
696
|
-
*/
|
|
697
|
-
async describeIndex(...args) {
|
|
698
|
-
try {
|
|
699
|
-
const { indexName } = this.normalizeArgs("describeIndex", args);
|
|
700
|
-
const parsedIndexName = parseSqlIdentifier(indexName, "index name");
|
|
701
|
-
const tableInfoQuery = `
|
|
702
|
-
SELECT sql
|
|
703
|
-
FROM sqlite_master
|
|
704
|
-
WHERE type='table'
|
|
705
|
-
AND name = ?;
|
|
706
|
-
`;
|
|
707
|
-
const tableInfo = await this.turso.execute({
|
|
708
|
-
sql: tableInfoQuery,
|
|
709
|
-
args: [parsedIndexName]
|
|
710
|
-
});
|
|
711
|
-
if (!tableInfo.rows[0]?.sql) {
|
|
712
|
-
throw new Error(`Table ${parsedIndexName} not found`);
|
|
713
|
-
}
|
|
714
|
-
const dimension = parseInt(tableInfo.rows[0].sql.match(/F32_BLOB\((\d+)\)/)?.[1] || "0");
|
|
715
|
-
const countQuery = `
|
|
716
|
-
SELECT COUNT(*) as count
|
|
717
|
-
FROM ${parsedIndexName};
|
|
718
|
-
`;
|
|
719
|
-
const countResult = await this.turso.execute({
|
|
720
|
-
sql: countQuery,
|
|
721
|
-
args: []
|
|
722
|
-
});
|
|
723
|
-
const metric = "cosine";
|
|
724
|
-
return {
|
|
725
|
-
dimension,
|
|
726
|
-
count: countResult?.rows?.[0]?.count ?? 0,
|
|
727
|
-
metric
|
|
728
|
-
};
|
|
729
|
-
} catch (e) {
|
|
730
|
-
throw new Error(`Failed to describe vector table: ${e.message}`);
|
|
731
|
-
}
|
|
732
|
-
}
|
|
733
|
-
/**
|
|
734
|
-
* @deprecated Use {@link updateVector} instead. This method will be removed on May 20th, 2025.
|
|
735
|
-
*
|
|
736
|
-
* Updates a vector by its ID with the provided vector and/or metadata.
|
|
737
|
-
* @param indexName - The name of the index containing the vector.
|
|
738
|
-
* @param id - The ID of the vector to update.
|
|
739
|
-
* @param update - An object containing the vector and/or metadata to update.
|
|
740
|
-
* @param update.vector - An optional array of numbers representing the new vector.
|
|
741
|
-
* @param update.metadata - An optional record containing the new metadata.
|
|
742
|
-
* @returns A promise that resolves when the update is complete.
|
|
743
|
-
* @throws Will throw an error if no updates are provided or if the update operation fails.
|
|
744
|
-
*/
|
|
745
|
-
async updateIndexById(indexName, id, update) {
|
|
746
|
-
this.logger.warn(
|
|
747
|
-
`Deprecation Warning: updateIndexById() is deprecated.
|
|
748
|
-
Please use updateVector() instead.
|
|
749
|
-
updateIndexById() will be removed on May 20th, 2025.`
|
|
750
|
-
);
|
|
751
|
-
await this.updateVector({ indexName, id, update });
|
|
752
|
-
}
|
|
753
|
-
/**
|
|
754
|
-
* Updates a vector by its ID with the provided vector and/or metadata.
|
|
755
|
-
*
|
|
756
|
-
* @param indexName - The name of the index containing the vector.
|
|
757
|
-
* @param id - The ID of the vector to update.
|
|
758
|
-
* @param update - An object containing the vector and/or metadata to update.
|
|
759
|
-
* @param update.vector - An optional array of numbers representing the new vector.
|
|
760
|
-
* @param update.metadata - An optional record containing the new metadata.
|
|
761
|
-
* @returns A promise that resolves when the update is complete.
|
|
762
|
-
* @throws Will throw an error if no updates are provided or if the update operation fails.
|
|
763
|
-
*/
|
|
764
|
-
async updateVector(...args) {
|
|
765
|
-
const params = this.normalizeArgs("updateVector", args);
|
|
766
|
-
const { indexName, id, update } = params;
|
|
767
|
-
try {
|
|
768
|
-
const parsedIndexName = parseSqlIdentifier(indexName, "index name");
|
|
769
|
-
const updates = [];
|
|
770
|
-
const args2 = [];
|
|
771
|
-
if (update.vector) {
|
|
772
|
-
updates.push("embedding = vector32(?)");
|
|
773
|
-
args2.push(JSON.stringify(update.vector));
|
|
774
|
-
}
|
|
775
|
-
if (update.metadata) {
|
|
776
|
-
updates.push("metadata = ?");
|
|
777
|
-
args2.push(JSON.stringify(update.metadata));
|
|
778
|
-
}
|
|
779
|
-
if (updates.length === 0) {
|
|
780
|
-
throw new Error("No updates provided");
|
|
781
|
-
}
|
|
782
|
-
args2.push(id);
|
|
783
|
-
const query = `
|
|
784
|
-
UPDATE ${parsedIndexName}
|
|
785
|
-
SET ${updates.join(", ")}
|
|
786
|
-
WHERE vector_id = ?;
|
|
787
|
-
`;
|
|
788
|
-
await this.turso.execute({
|
|
789
|
-
sql: query,
|
|
790
|
-
args: args2
|
|
791
|
-
});
|
|
792
|
-
} catch (error) {
|
|
793
|
-
throw new Error(`Failed to update vector by id: ${id} for index: ${indexName}: ${error.message}`);
|
|
794
|
-
}
|
|
795
|
-
}
|
|
796
|
-
/**
|
|
797
|
-
* @deprecated Use {@link deleteVector} instead. This method will be removed on May 20th, 2025.
|
|
798
|
-
*
|
|
799
|
-
* Deletes a vector by its ID.
|
|
800
|
-
* @param indexName - The name of the index containing the vector.
|
|
801
|
-
* @param id - The ID of the vector to delete.
|
|
802
|
-
* @returns A promise that resolves when the deletion is complete.
|
|
803
|
-
* @throws Will throw an error if the deletion operation fails.
|
|
804
|
-
*/
|
|
805
|
-
async deleteIndexById(indexName, id) {
|
|
806
|
-
this.logger.warn(
|
|
807
|
-
`Deprecation Warning: deleteIndexById() is deprecated.
|
|
808
|
-
Please use deleteVector() instead.
|
|
809
|
-
deleteIndexById() will be removed on May 20th, 2025.`
|
|
810
|
-
);
|
|
811
|
-
await this.deleteVector({ indexName, id });
|
|
812
|
-
}
|
|
813
|
-
/**
|
|
814
|
-
* Deletes a vector by its ID.
|
|
815
|
-
* @param indexName - The name of the index containing the vector.
|
|
816
|
-
* @param id - The ID of the vector to delete.
|
|
817
|
-
* @returns A promise that resolves when the deletion is complete.
|
|
818
|
-
* @throws Will throw an error if the deletion operation fails.
|
|
819
|
-
*/
|
|
820
|
-
async deleteVector(...args) {
|
|
821
|
-
const params = this.normalizeArgs("deleteVector", args);
|
|
822
|
-
const { indexName, id } = params;
|
|
823
|
-
try {
|
|
824
|
-
const parsedIndexName = parseSqlIdentifier(indexName, "index name");
|
|
825
|
-
await this.turso.execute({
|
|
826
|
-
sql: `DELETE FROM ${parsedIndexName} WHERE vector_id = ?`,
|
|
827
|
-
args: [id]
|
|
828
|
-
});
|
|
829
|
-
} catch (error) {
|
|
830
|
-
throw new Error(`Failed to delete vector by id: ${id} for index: ${indexName}: ${error.message}`);
|
|
831
|
-
}
|
|
832
|
-
}
|
|
833
|
-
async truncateIndex(...args) {
|
|
834
|
-
const params = this.normalizeArgs("truncateIndex", args);
|
|
835
|
-
const { indexName } = params;
|
|
836
|
-
await this.turso.execute({
|
|
837
|
-
sql: `DELETE FROM ${parseSqlIdentifier(indexName, "index name")}`,
|
|
838
|
-
args: []
|
|
839
|
-
});
|
|
840
|
-
}
|
|
841
|
-
};
|
|
842
|
-
|
|
843
|
-
export { LibSQLVector };
|