@mastra/core 0.5.0-alpha.8 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agent/index.cjs +3 -2100
- package/dist/agent/index.d.cts +1 -1
- package/dist/agent/index.d.ts +1 -1
- package/dist/agent/index.js +1 -1
- package/dist/{base-CTdONy0_.d.cts → base-CIPKleAU.d.cts} +103 -70
- package/dist/{base-DIn_km7X.d.ts → base-C_Oq53qk.d.ts} +103 -70
- package/dist/base.cjs +5 -140
- package/dist/bundler/index.cjs +5 -160
- package/dist/chunk-2W2GYEYQ.cjs +25 -0
- package/dist/chunk-3ASEZT7U.cjs +1586 -0
- package/dist/chunk-43Y7WG5W.cjs +335 -0
- package/dist/{chunk-HBHPTMAC.js → chunk-4Y74D74B.js} +46 -6
- package/dist/chunk-ENT7U27Y.cjs +37 -0
- package/dist/chunk-F5UYWPV4.cjs +14 -0
- package/dist/chunk-FL3GQXQ2.cjs +218 -0
- package/dist/chunk-FRQFWZDN.cjs +2 -0
- package/dist/chunk-GXQRMKSN.cjs +367 -0
- package/dist/chunk-HJPMYDWO.cjs +37 -0
- package/dist/chunk-IIWRJFLQ.cjs +51 -0
- package/dist/chunk-KFQ7Z3PO.cjs +347 -0
- package/dist/{chunk-SWDQYPJS.js → chunk-KP5UAFLN.js} +3 -2
- package/dist/chunk-KPKFLQFR.cjs +12 -0
- package/dist/{chunk-RRJB4TCC.js → chunk-MLFXOST6.js} +1 -1
- package/dist/{chunk-KBSR2LLT.js → chunk-OD7ZMKHY.js} +176 -63
- package/dist/chunk-OTFLHXHZ.cjs +65 -0
- package/dist/chunk-RWTSGWWL.cjs +81 -0
- package/dist/chunk-ST5RMVLG.cjs +87 -0
- package/dist/chunk-SYQ7NK2E.cjs +24 -0
- package/dist/chunk-UZNQG7QO.cjs +1868 -0
- package/dist/chunk-V5ORZPFW.cjs +38 -0
- package/dist/chunk-VA4P7QJT.cjs +443 -0
- package/dist/chunk-WB2HREXE.cjs +166 -0
- package/dist/chunk-WOMOGDGR.cjs +691 -0
- package/dist/chunk-XB2TJ7LX.cjs +408 -0
- package/dist/{chunk-QABMKXI3.js → chunk-XF2FMJYK.js} +1 -1
- package/dist/chunk-XLSROQ26.cjs +91 -0
- package/dist/chunk-YK3XJ52U.cjs +192 -0
- package/dist/{chunk-SF5GHHOQ.js → chunk-YPD6BQIM.js} +121 -93
- package/dist/deployer/index.cjs +5 -167
- package/dist/eval/index.cjs +9 -105
- package/dist/eval/index.d.cts +1 -1
- package/dist/eval/index.d.ts +1 -1
- package/dist/hooks/index.cjs +14 -83
- package/dist/index.cjs +253 -7470
- package/dist/index.d.cts +4 -4
- package/dist/index.d.ts +4 -4
- package/dist/index.js +7 -7
- package/dist/integration/index.cjs +9 -108
- package/dist/integration/index.d.cts +1 -1
- package/dist/integration/index.d.ts +1 -1
- package/dist/llm/index.d.cts +1 -1
- package/dist/llm/index.d.ts +1 -1
- package/dist/logger/index.cjs +33 -161
- package/dist/mastra/index.cjs +3 -1755
- package/dist/mastra/index.d.cts +1 -1
- package/dist/mastra/index.d.ts +1 -1
- package/dist/mastra/index.js +1 -1
- package/dist/memory/index.cjs +4 -2050
- package/dist/memory/index.d.cts +1 -1
- package/dist/memory/index.d.ts +1 -1
- package/dist/memory/index.js +1 -1
- package/dist/relevance/index.cjs +10 -2161
- package/dist/relevance/index.d.cts +19 -2
- package/dist/relevance/index.d.ts +19 -2
- package/dist/relevance/index.js +1 -1
- package/dist/storage/index.cjs +29 -367
- package/dist/storage/index.d.cts +1 -1
- package/dist/storage/index.d.ts +1 -1
- package/dist/storage/libsql/index.cjs +9 -798
- package/dist/storage/libsql/index.d.cts +1 -1
- package/dist/storage/libsql/index.d.ts +1 -1
- package/dist/telemetry/index.cjs +21 -408
- package/dist/telemetry/index.d.cts +1 -1
- package/dist/telemetry/index.d.ts +1 -1
- package/dist/tools/index.cjs +11 -22
- package/dist/tools/index.d.cts +3 -3
- package/dist/tools/index.d.ts +3 -3
- package/dist/tts/index.cjs +3 -328
- package/dist/utils.cjs +41 -309
- package/dist/utils.d.cts +10 -4
- package/dist/utils.d.ts +10 -4
- package/dist/utils.js +1 -1
- package/dist/vector/filter/index.cjs +7 -189
- package/dist/vector/index.cjs +5 -172
- package/dist/vector/libsql/index.cjs +9 -1047
- package/dist/voice/index.cjs +8 -306
- package/dist/workflows/index.cjs +65 -1925
- package/dist/workflows/index.d.cts +4 -3
- package/dist/workflows/index.d.ts +4 -3
- package/dist/workflows/index.js +1 -1
- package/package.json +27 -27
package/dist/memory/index.cjs
CHANGED
|
@@ -1,2056 +1,10 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
var
|
|
4
|
-
var path = require('path');
|
|
5
|
-
var stream = require('stream');
|
|
6
|
-
var pino = require('pino');
|
|
7
|
-
var pretty = require('pino-pretty');
|
|
8
|
-
var client = require('@libsql/client');
|
|
9
|
-
var ai = require('ai');
|
|
10
|
-
var node_modulesPath = require('node_modules-path');
|
|
3
|
+
var chunkKFQ7Z3PO_cjs = require('../chunk-KFQ7Z3PO.cjs');
|
|
11
4
|
|
|
12
|
-
function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
|
|
13
5
|
|
|
14
|
-
var path__default = /*#__PURE__*/_interopDefault(path);
|
|
15
|
-
var pino__default = /*#__PURE__*/_interopDefault(pino);
|
|
16
|
-
var pretty__default = /*#__PURE__*/_interopDefault(pretty);
|
|
17
|
-
var node_modulesPath__default = /*#__PURE__*/_interopDefault(node_modulesPath);
|
|
18
6
|
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
var LogLevel = {
|
|
23
|
-
INFO: "info"};
|
|
24
|
-
var Logger = class {
|
|
25
|
-
logger;
|
|
26
|
-
transports;
|
|
27
|
-
constructor(options = {}) {
|
|
28
|
-
this.transports = options.transports || {};
|
|
29
|
-
const transportsAry = Object.entries(this.transports);
|
|
30
|
-
this.logger = pino__default.default(
|
|
31
|
-
{
|
|
32
|
-
name: options.name || "app",
|
|
33
|
-
level: options.level || LogLevel.INFO,
|
|
34
|
-
formatters: {
|
|
35
|
-
level: (label) => {
|
|
36
|
-
return {
|
|
37
|
-
level: label
|
|
38
|
-
};
|
|
39
|
-
}
|
|
40
|
-
}
|
|
41
|
-
},
|
|
42
|
-
options.overrideDefaultTransports ? options?.transports?.default : transportsAry.length === 0 ? pretty__default.default({
|
|
43
|
-
colorize: true,
|
|
44
|
-
levelFirst: true,
|
|
45
|
-
ignore: "pid,hostname",
|
|
46
|
-
colorizeObjects: true,
|
|
47
|
-
translateTime: "SYS:standard",
|
|
48
|
-
singleLine: false
|
|
49
|
-
}) : pino__default.default.multistream([
|
|
50
|
-
...transportsAry.map(([_, transport]) => ({
|
|
51
|
-
stream: transport,
|
|
52
|
-
level: options.level || LogLevel.INFO
|
|
53
|
-
})),
|
|
54
|
-
{
|
|
55
|
-
stream: pretty__default.default({
|
|
56
|
-
colorize: true,
|
|
57
|
-
levelFirst: true,
|
|
58
|
-
ignore: "pid,hostname",
|
|
59
|
-
colorizeObjects: true,
|
|
60
|
-
translateTime: "SYS:standard",
|
|
61
|
-
singleLine: false
|
|
62
|
-
}),
|
|
63
|
-
level: options.level || LogLevel.INFO
|
|
64
|
-
}
|
|
65
|
-
])
|
|
66
|
-
);
|
|
67
|
-
}
|
|
68
|
-
debug(message, args = {}) {
|
|
69
|
-
this.logger.debug(args, message);
|
|
70
|
-
}
|
|
71
|
-
info(message, args = {}) {
|
|
72
|
-
this.logger.info(args, message);
|
|
73
|
-
}
|
|
74
|
-
warn(message, args = {}) {
|
|
75
|
-
this.logger.warn(args, message);
|
|
76
|
-
}
|
|
77
|
-
error(message, args = {}) {
|
|
78
|
-
this.logger.error(args, message);
|
|
79
|
-
}
|
|
80
|
-
// Stream creation for process output handling
|
|
81
|
-
createStream() {
|
|
82
|
-
return new stream.Transform({
|
|
83
|
-
transform: (chunk, _encoding, callback) => {
|
|
84
|
-
const line = chunk.toString().trim();
|
|
85
|
-
if (line) {
|
|
86
|
-
this.info(line);
|
|
87
|
-
}
|
|
88
|
-
callback(null, chunk);
|
|
89
|
-
}
|
|
90
|
-
});
|
|
91
|
-
}
|
|
92
|
-
async getLogs(transportId) {
|
|
93
|
-
if (!transportId || !this.transports[transportId]) {
|
|
94
|
-
return [];
|
|
95
|
-
}
|
|
96
|
-
return this.transports[transportId].getLogs();
|
|
97
|
-
}
|
|
98
|
-
async getLogsByRunId({ runId, transportId }) {
|
|
99
|
-
return this.transports[transportId]?.getLogsByRunId({ runId });
|
|
100
|
-
}
|
|
101
|
-
};
|
|
102
|
-
function createLogger(options) {
|
|
103
|
-
return new Logger(options);
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
// src/base.ts
|
|
107
|
-
var MastraBase = class {
|
|
108
|
-
component = RegisteredLogger.LLM;
|
|
109
|
-
logger;
|
|
110
|
-
name;
|
|
111
|
-
telemetry;
|
|
112
|
-
constructor({ component, name }) {
|
|
113
|
-
this.component = component || RegisteredLogger.LLM;
|
|
114
|
-
this.name = name;
|
|
115
|
-
this.logger = createLogger({ name: `${this.component} - ${this.name}` });
|
|
116
|
-
}
|
|
117
|
-
/**
|
|
118
|
-
* Set the logger for the agent
|
|
119
|
-
* @param logger
|
|
120
|
-
*/
|
|
121
|
-
__setLogger(logger) {
|
|
122
|
-
this.logger = logger;
|
|
123
|
-
this.logger.debug(`Logger updated [component=${this.component}] [name=${this.name}]`);
|
|
124
|
-
}
|
|
125
|
-
/**
|
|
126
|
-
* Set the telemetry for the
|
|
127
|
-
* @param telemetry
|
|
128
|
-
*/
|
|
129
|
-
__setTelemetry(telemetry) {
|
|
130
|
-
this.telemetry = telemetry;
|
|
131
|
-
this.logger.debug(`Telemetry updated [component=${this.component}] [tracer=${this.telemetry.tracer}]`);
|
|
132
|
-
}
|
|
133
|
-
/**
|
|
134
|
-
* Get the telemetry on the vector
|
|
135
|
-
* @returns telemetry
|
|
136
|
-
*/
|
|
137
|
-
__getTelemetry() {
|
|
138
|
-
return this.telemetry;
|
|
139
|
-
}
|
|
140
|
-
/*
|
|
141
|
-
get experimental_telemetry config
|
|
142
|
-
*/
|
|
143
|
-
get experimental_telemetry() {
|
|
144
|
-
return this.telemetry ? {
|
|
145
|
-
// tracer: this.telemetry.tracer,
|
|
146
|
-
tracer: this.telemetry.getBaggageTracer(),
|
|
147
|
-
isEnabled: !!this.telemetry.tracer
|
|
148
|
-
} : void 0;
|
|
149
|
-
}
|
|
150
|
-
};
|
|
151
|
-
|
|
152
|
-
// src/storage/constants.ts
|
|
153
|
-
var TABLE_WORKFLOW_SNAPSHOT = "mastra_workflow_snapshot";
|
|
154
|
-
var TABLE_EVALS = "mastra_evals";
|
|
155
|
-
var TABLE_MESSAGES = "mastra_messages";
|
|
156
|
-
var TABLE_THREADS = "mastra_threads";
|
|
157
|
-
var TABLE_TRACES = "mastra_traces";
|
|
158
|
-
|
|
159
|
-
// src/storage/base.ts
|
|
160
|
-
var MastraStorage = class extends MastraBase {
|
|
161
|
-
/** @deprecated import from { TABLE_WORKFLOW_SNAPSHOT } '@mastra/core/storage' instead */
|
|
162
|
-
static TABLE_WORKFLOW_SNAPSHOT = TABLE_WORKFLOW_SNAPSHOT;
|
|
163
|
-
/** @deprecated import from { TABLE_EVALS } '@mastra/core/storage' instead */
|
|
164
|
-
static TABLE_EVALS = TABLE_EVALS;
|
|
165
|
-
/** @deprecated import from { TABLE_MESSAGES } '@mastra/core/storage' instead */
|
|
166
|
-
static TABLE_MESSAGES = TABLE_MESSAGES;
|
|
167
|
-
/** @deprecated import from { TABLE_THREADS } '@mastra/core/storage' instead */
|
|
168
|
-
static TABLE_THREADS = TABLE_THREADS;
|
|
169
|
-
/** @deprecated import { TABLE_TRACES } from '@mastra/core/storage' instead */
|
|
170
|
-
static TABLE_TRACES = TABLE_TRACES;
|
|
171
|
-
hasInitialized = null;
|
|
172
|
-
shouldCacheInit = true;
|
|
173
|
-
constructor({ name }) {
|
|
174
|
-
super({
|
|
175
|
-
component: "STORAGE",
|
|
176
|
-
name
|
|
177
|
-
});
|
|
178
|
-
}
|
|
179
|
-
async __batchInsert({
|
|
180
|
-
tableName,
|
|
181
|
-
records
|
|
182
|
-
}) {
|
|
183
|
-
await this.init();
|
|
184
|
-
return this.batchInsert({ tableName, records });
|
|
185
|
-
}
|
|
186
|
-
async __getThreadById({ threadId }) {
|
|
187
|
-
await this.init();
|
|
188
|
-
return this.getThreadById({ threadId });
|
|
189
|
-
}
|
|
190
|
-
async __getThreadsByResourceId({ resourceId }) {
|
|
191
|
-
await this.init();
|
|
192
|
-
return this.getThreadsByResourceId({ resourceId });
|
|
193
|
-
}
|
|
194
|
-
async __saveThread({ thread }) {
|
|
195
|
-
await this.init();
|
|
196
|
-
return this.saveThread({ thread });
|
|
197
|
-
}
|
|
198
|
-
async __updateThread({
|
|
199
|
-
id,
|
|
200
|
-
title,
|
|
201
|
-
metadata
|
|
202
|
-
}) {
|
|
203
|
-
await this.init();
|
|
204
|
-
return this.updateThread({ id, title, metadata });
|
|
205
|
-
}
|
|
206
|
-
async __deleteThread({ threadId }) {
|
|
207
|
-
await this.init();
|
|
208
|
-
return this.deleteThread({ threadId });
|
|
209
|
-
}
|
|
210
|
-
async __getMessages({ threadId, selectBy, threadConfig }) {
|
|
211
|
-
await this.init();
|
|
212
|
-
return this.getMessages({ threadId, selectBy, threadConfig });
|
|
213
|
-
}
|
|
214
|
-
async __saveMessages({ messages }) {
|
|
215
|
-
await this.init();
|
|
216
|
-
return this.saveMessages({ messages });
|
|
217
|
-
}
|
|
218
|
-
async __getTraces({
|
|
219
|
-
scope,
|
|
220
|
-
page,
|
|
221
|
-
perPage,
|
|
222
|
-
attributes
|
|
223
|
-
}) {
|
|
224
|
-
await this.init();
|
|
225
|
-
return this.getTraces({ scope, page, perPage, attributes });
|
|
226
|
-
}
|
|
227
|
-
async init() {
|
|
228
|
-
if (this.shouldCacheInit && await this.hasInitialized) {
|
|
229
|
-
return;
|
|
230
|
-
}
|
|
231
|
-
this.hasInitialized = Promise.all([
|
|
232
|
-
this.createTable({
|
|
233
|
-
tableName: TABLE_WORKFLOW_SNAPSHOT,
|
|
234
|
-
schema: {
|
|
235
|
-
workflow_name: {
|
|
236
|
-
type: "text"
|
|
237
|
-
},
|
|
238
|
-
run_id: {
|
|
239
|
-
type: "text"
|
|
240
|
-
},
|
|
241
|
-
snapshot: {
|
|
242
|
-
type: "text"
|
|
243
|
-
},
|
|
244
|
-
createdAt: {
|
|
245
|
-
type: "timestamp"
|
|
246
|
-
},
|
|
247
|
-
updatedAt: {
|
|
248
|
-
type: "timestamp"
|
|
249
|
-
}
|
|
250
|
-
}
|
|
251
|
-
}),
|
|
252
|
-
this.createTable({
|
|
253
|
-
tableName: TABLE_EVALS,
|
|
254
|
-
schema: {
|
|
255
|
-
input: {
|
|
256
|
-
type: "text"
|
|
257
|
-
},
|
|
258
|
-
output: {
|
|
259
|
-
type: "text"
|
|
260
|
-
},
|
|
261
|
-
result: {
|
|
262
|
-
type: "jsonb"
|
|
263
|
-
},
|
|
264
|
-
agent_name: {
|
|
265
|
-
type: "text"
|
|
266
|
-
},
|
|
267
|
-
metric_name: {
|
|
268
|
-
type: "text"
|
|
269
|
-
},
|
|
270
|
-
instructions: {
|
|
271
|
-
type: "text"
|
|
272
|
-
},
|
|
273
|
-
test_info: {
|
|
274
|
-
type: "jsonb",
|
|
275
|
-
nullable: true
|
|
276
|
-
},
|
|
277
|
-
global_run_id: {
|
|
278
|
-
type: "text"
|
|
279
|
-
},
|
|
280
|
-
run_id: {
|
|
281
|
-
type: "text"
|
|
282
|
-
},
|
|
283
|
-
created_at: {
|
|
284
|
-
type: "timestamp"
|
|
285
|
-
}
|
|
286
|
-
}
|
|
287
|
-
}),
|
|
288
|
-
this.createTable({
|
|
289
|
-
tableName: TABLE_THREADS,
|
|
290
|
-
schema: {
|
|
291
|
-
id: { type: "text", nullable: false, primaryKey: true },
|
|
292
|
-
resourceId: { type: "text", nullable: false },
|
|
293
|
-
title: { type: "text", nullable: false },
|
|
294
|
-
metadata: { type: "text", nullable: true },
|
|
295
|
-
createdAt: { type: "timestamp", nullable: false },
|
|
296
|
-
updatedAt: { type: "timestamp", nullable: false }
|
|
297
|
-
}
|
|
298
|
-
}),
|
|
299
|
-
this.createTable({
|
|
300
|
-
tableName: TABLE_MESSAGES,
|
|
301
|
-
schema: {
|
|
302
|
-
id: { type: "text", nullable: false, primaryKey: true },
|
|
303
|
-
thread_id: { type: "text", nullable: false },
|
|
304
|
-
content: { type: "text", nullable: false },
|
|
305
|
-
role: { type: "text", nullable: false },
|
|
306
|
-
type: { type: "text", nullable: false },
|
|
307
|
-
createdAt: { type: "timestamp", nullable: false }
|
|
308
|
-
}
|
|
309
|
-
}),
|
|
310
|
-
this.createTable({
|
|
311
|
-
tableName: TABLE_TRACES,
|
|
312
|
-
schema: {
|
|
313
|
-
id: { type: "text", nullable: false, primaryKey: true },
|
|
314
|
-
parentSpanId: { type: "text", nullable: true },
|
|
315
|
-
name: { type: "text", nullable: false },
|
|
316
|
-
traceId: { type: "text", nullable: false },
|
|
317
|
-
scope: { type: "text", nullable: false },
|
|
318
|
-
kind: { type: "integer", nullable: false },
|
|
319
|
-
attributes: { type: "jsonb", nullable: true },
|
|
320
|
-
status: { type: "jsonb", nullable: true },
|
|
321
|
-
events: { type: "jsonb", nullable: true },
|
|
322
|
-
links: { type: "jsonb", nullable: true },
|
|
323
|
-
other: { type: "text", nullable: true },
|
|
324
|
-
startTime: { type: "bigint", nullable: false },
|
|
325
|
-
endTime: { type: "bigint", nullable: false },
|
|
326
|
-
createdAt: { type: "timestamp", nullable: false }
|
|
327
|
-
}
|
|
328
|
-
})
|
|
329
|
-
]).then(() => true);
|
|
330
|
-
await this.hasInitialized;
|
|
331
|
-
}
|
|
332
|
-
async persistWorkflowSnapshot({
|
|
333
|
-
workflowName,
|
|
334
|
-
runId,
|
|
335
|
-
snapshot
|
|
336
|
-
}) {
|
|
337
|
-
await this.init();
|
|
338
|
-
const data = {
|
|
339
|
-
workflow_name: workflowName,
|
|
340
|
-
run_id: runId,
|
|
341
|
-
snapshot,
|
|
342
|
-
createdAt: /* @__PURE__ */ new Date(),
|
|
343
|
-
updatedAt: /* @__PURE__ */ new Date()
|
|
344
|
-
};
|
|
345
|
-
this.logger.debug("Persisting workflow snapshot", { workflowName, runId, data });
|
|
346
|
-
await this.insert({
|
|
347
|
-
tableName: TABLE_WORKFLOW_SNAPSHOT,
|
|
348
|
-
record: data
|
|
349
|
-
});
|
|
350
|
-
}
|
|
351
|
-
async loadWorkflowSnapshot({
|
|
352
|
-
workflowName,
|
|
353
|
-
runId
|
|
354
|
-
}) {
|
|
355
|
-
if (!this.hasInitialized) {
|
|
356
|
-
await this.init();
|
|
357
|
-
}
|
|
358
|
-
this.logger.debug("Loading workflow snapshot", { workflowName, runId });
|
|
359
|
-
const d = await this.load({
|
|
360
|
-
tableName: TABLE_WORKFLOW_SNAPSHOT,
|
|
361
|
-
keys: { workflow_name: workflowName, run_id: runId }
|
|
362
|
-
});
|
|
363
|
-
return d ? d.snapshot : null;
|
|
364
|
-
}
|
|
365
|
-
async __getEvalsByAgentName(agentName, type) {
|
|
366
|
-
await this.init();
|
|
367
|
-
return this.getEvalsByAgentName(agentName, type);
|
|
368
|
-
}
|
|
369
|
-
};
|
|
370
|
-
|
|
371
|
-
// src/storage/libsql/index.ts
|
|
372
|
-
function safelyParseJSON(jsonString) {
|
|
373
|
-
try {
|
|
374
|
-
return JSON.parse(jsonString);
|
|
375
|
-
} catch {
|
|
376
|
-
return {};
|
|
377
|
-
}
|
|
378
|
-
}
|
|
379
|
-
var LibSQLStore = class extends MastraStorage {
|
|
380
|
-
client;
|
|
381
|
-
constructor({ config }) {
|
|
382
|
-
super({ name: `LibSQLStore` });
|
|
383
|
-
if (config.url === ":memory:") {
|
|
384
|
-
this.shouldCacheInit = false;
|
|
385
|
-
}
|
|
386
|
-
this.client = client.createClient({
|
|
387
|
-
url: this.rewriteDbUrl(config.url),
|
|
388
|
-
authToken: config.authToken
|
|
389
|
-
});
|
|
390
|
-
}
|
|
391
|
-
// If we're in the .mastra/output directory, use the dir outside .mastra dir
|
|
392
|
-
// reason we need to do this is libsql relative file paths are based on cwd, not current file path
|
|
393
|
-
// since mastra dev sets cwd to .mastra/output this means running an agent directly vs running with mastra dev
|
|
394
|
-
// will put db files in different locations, leading to an inconsistent experience between the two.
|
|
395
|
-
// Ex: with `file:ex.db`
|
|
396
|
-
// 1. `mastra dev`: ${cwd}/.mastra/output/ex.db
|
|
397
|
-
// 2. `tsx src/index.ts`: ${cwd}/ex.db
|
|
398
|
-
// so if we're in .mastra/output we need to rewrite the file url to be relative to the project root dir
|
|
399
|
-
// or the experience will be inconsistent
|
|
400
|
-
// this means `file:` urls are always relative to project root
|
|
401
|
-
// TODO: can we make this easier via bundling? https://github.com/mastra-ai/mastra/pull/2783#pullrequestreview-2662444241
|
|
402
|
-
rewriteDbUrl(url) {
|
|
403
|
-
if (url.startsWith("file:")) {
|
|
404
|
-
const pathPart = url.slice("file:".length);
|
|
405
|
-
if (path.isAbsolute(pathPart)) {
|
|
406
|
-
return url;
|
|
407
|
-
}
|
|
408
|
-
const cwd = process.cwd();
|
|
409
|
-
if (cwd.includes(".mastra") && (cwd.endsWith(`output`) || cwd.endsWith(`output/`) || cwd.endsWith(`output\\`))) {
|
|
410
|
-
const baseDir = path.join(cwd, `..`, `..`);
|
|
411
|
-
const fullPath = path.resolve(baseDir, pathPart);
|
|
412
|
-
this.logger.debug(
|
|
413
|
-
`Initializing LibSQL db with url ${url} with relative file path from inside .mastra/output directory. Rewriting relative file url to "file:${fullPath}". This ensures it's outside the .mastra/output directory.`
|
|
414
|
-
);
|
|
415
|
-
return `file:${fullPath}`;
|
|
416
|
-
}
|
|
417
|
-
}
|
|
418
|
-
return url;
|
|
419
|
-
}
|
|
420
|
-
getCreateTableSQL(tableName, schema) {
|
|
421
|
-
const columns = Object.entries(schema).map(([name, col]) => {
|
|
422
|
-
let type = col.type.toUpperCase();
|
|
423
|
-
if (type === "TEXT") type = "TEXT";
|
|
424
|
-
if (type === "TIMESTAMP") type = "TEXT";
|
|
425
|
-
const nullable = col.nullable ? "" : "NOT NULL";
|
|
426
|
-
const primaryKey = col.primaryKey ? "PRIMARY KEY" : "";
|
|
427
|
-
return `${name} ${type} ${nullable} ${primaryKey}`.trim();
|
|
428
|
-
});
|
|
429
|
-
if (tableName === TABLE_WORKFLOW_SNAPSHOT) {
|
|
430
|
-
const stmnt = `CREATE TABLE IF NOT EXISTS ${tableName} (
|
|
431
|
-
${columns.join(",\n")},
|
|
432
|
-
PRIMARY KEY (workflow_name, run_id)
|
|
433
|
-
)`;
|
|
434
|
-
return stmnt;
|
|
435
|
-
}
|
|
436
|
-
return `CREATE TABLE IF NOT EXISTS ${tableName} (${columns.join(", ")})`;
|
|
437
|
-
}
|
|
438
|
-
async createTable({
|
|
439
|
-
tableName,
|
|
440
|
-
schema
|
|
441
|
-
}) {
|
|
442
|
-
try {
|
|
443
|
-
this.logger.debug(`Creating database table`, { tableName, operation: "schema init" });
|
|
444
|
-
const sql = this.getCreateTableSQL(tableName, schema);
|
|
445
|
-
await this.client.execute(sql);
|
|
446
|
-
} catch (error) {
|
|
447
|
-
this.logger.error(`Error creating table ${tableName}: ${error}`);
|
|
448
|
-
throw error;
|
|
449
|
-
}
|
|
450
|
-
}
|
|
451
|
-
async clearTable({ tableName }) {
|
|
452
|
-
try {
|
|
453
|
-
await this.client.execute(`DELETE FROM ${tableName}`);
|
|
454
|
-
} catch (e) {
|
|
455
|
-
if (e instanceof Error) {
|
|
456
|
-
this.logger.error(e.message);
|
|
457
|
-
}
|
|
458
|
-
}
|
|
459
|
-
}
|
|
460
|
-
prepareStatement({ tableName, record }) {
|
|
461
|
-
const columns = Object.keys(record);
|
|
462
|
-
const values = Object.values(record).map((v) => {
|
|
463
|
-
if (typeof v === `undefined`) {
|
|
464
|
-
return null;
|
|
465
|
-
}
|
|
466
|
-
if (v instanceof Date) {
|
|
467
|
-
return v.toISOString();
|
|
468
|
-
}
|
|
469
|
-
return typeof v === "object" ? JSON.stringify(v) : v;
|
|
470
|
-
});
|
|
471
|
-
const placeholders = values.map(() => "?").join(", ");
|
|
472
|
-
return {
|
|
473
|
-
sql: `INSERT OR REPLACE INTO ${tableName} (${columns.join(", ")}) VALUES (${placeholders})`,
|
|
474
|
-
args: values
|
|
475
|
-
};
|
|
476
|
-
}
|
|
477
|
-
async insert({ tableName, record }) {
|
|
478
|
-
try {
|
|
479
|
-
await this.client.execute(
|
|
480
|
-
this.prepareStatement({
|
|
481
|
-
tableName,
|
|
482
|
-
record
|
|
483
|
-
})
|
|
484
|
-
);
|
|
485
|
-
} catch (error) {
|
|
486
|
-
this.logger.error(`Error upserting into table ${tableName}: ${error}`);
|
|
487
|
-
throw error;
|
|
488
|
-
}
|
|
489
|
-
}
|
|
490
|
-
async batchInsert({ tableName, records }) {
|
|
491
|
-
if (records.length === 0) return;
|
|
492
|
-
try {
|
|
493
|
-
const batchStatements = records.map((r) => this.prepareStatement({ tableName, record: r }));
|
|
494
|
-
await this.client.batch(batchStatements, "write");
|
|
495
|
-
} catch (error) {
|
|
496
|
-
this.logger.error(`Error upserting into table ${tableName}: ${error}`);
|
|
497
|
-
throw error;
|
|
498
|
-
}
|
|
499
|
-
}
|
|
500
|
-
async load({ tableName, keys }) {
|
|
501
|
-
const conditions = Object.entries(keys).map(([key]) => `${key} = ?`).join(" AND ");
|
|
502
|
-
const values = Object.values(keys);
|
|
503
|
-
const result = await this.client.execute({
|
|
504
|
-
sql: `SELECT * FROM ${tableName} WHERE ${conditions} ORDER BY createdAt DESC LIMIT 1`,
|
|
505
|
-
args: values
|
|
506
|
-
});
|
|
507
|
-
if (!result.rows || result.rows.length === 0) {
|
|
508
|
-
return null;
|
|
509
|
-
}
|
|
510
|
-
const row = result.rows[0];
|
|
511
|
-
const parsed = Object.fromEntries(
|
|
512
|
-
Object.entries(row || {}).map(([k, v]) => {
|
|
513
|
-
try {
|
|
514
|
-
return [k, typeof v === "string" ? v.startsWith("{") || v.startsWith("[") ? JSON.parse(v) : v : v];
|
|
515
|
-
} catch {
|
|
516
|
-
return [k, v];
|
|
517
|
-
}
|
|
518
|
-
})
|
|
519
|
-
);
|
|
520
|
-
return parsed;
|
|
521
|
-
}
|
|
522
|
-
async getThreadById({ threadId }) {
|
|
523
|
-
const result = await this.load({
|
|
524
|
-
tableName: TABLE_THREADS,
|
|
525
|
-
keys: { id: threadId }
|
|
526
|
-
});
|
|
527
|
-
if (!result) {
|
|
528
|
-
return null;
|
|
529
|
-
}
|
|
530
|
-
return {
|
|
531
|
-
...result,
|
|
532
|
-
metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata
|
|
533
|
-
};
|
|
534
|
-
}
|
|
535
|
-
async getThreadsByResourceId({ resourceId }) {
|
|
536
|
-
const result = await this.client.execute({
|
|
537
|
-
sql: `SELECT * FROM ${TABLE_THREADS} WHERE resourceId = ?`,
|
|
538
|
-
args: [resourceId]
|
|
539
|
-
});
|
|
540
|
-
if (!result.rows) {
|
|
541
|
-
return [];
|
|
542
|
-
}
|
|
543
|
-
return result.rows.map((thread) => ({
|
|
544
|
-
id: thread.id,
|
|
545
|
-
resourceId: thread.resourceId,
|
|
546
|
-
title: thread.title,
|
|
547
|
-
createdAt: thread.createdAt,
|
|
548
|
-
updatedAt: thread.updatedAt,
|
|
549
|
-
metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata
|
|
550
|
-
}));
|
|
551
|
-
}
|
|
552
|
-
async saveThread({ thread }) {
|
|
553
|
-
await this.insert({
|
|
554
|
-
tableName: TABLE_THREADS,
|
|
555
|
-
record: {
|
|
556
|
-
...thread,
|
|
557
|
-
metadata: JSON.stringify(thread.metadata)
|
|
558
|
-
}
|
|
559
|
-
});
|
|
560
|
-
return thread;
|
|
561
|
-
}
|
|
562
|
-
async updateThread({
|
|
563
|
-
id,
|
|
564
|
-
title,
|
|
565
|
-
metadata
|
|
566
|
-
}) {
|
|
567
|
-
const thread = await this.getThreadById({ threadId: id });
|
|
568
|
-
if (!thread) {
|
|
569
|
-
throw new Error(`Thread ${id} not found`);
|
|
570
|
-
}
|
|
571
|
-
const updatedThread = {
|
|
572
|
-
...thread,
|
|
573
|
-
title,
|
|
574
|
-
metadata: {
|
|
575
|
-
...thread.metadata,
|
|
576
|
-
...metadata
|
|
577
|
-
}
|
|
578
|
-
};
|
|
579
|
-
await this.client.execute({
|
|
580
|
-
sql: `UPDATE ${TABLE_THREADS} SET title = ?, metadata = ? WHERE id = ?`,
|
|
581
|
-
args: [title, JSON.stringify(updatedThread.metadata), id]
|
|
582
|
-
});
|
|
583
|
-
return updatedThread;
|
|
584
|
-
}
|
|
585
|
-
async deleteThread({ threadId }) {
|
|
586
|
-
await this.client.execute({
|
|
587
|
-
sql: `DELETE FROM ${TABLE_THREADS} WHERE id = ?`,
|
|
588
|
-
args: [threadId]
|
|
589
|
-
});
|
|
590
|
-
}
|
|
591
|
-
parseRow(row) {
|
|
592
|
-
let content = row.content;
|
|
593
|
-
try {
|
|
594
|
-
content = JSON.parse(row.content);
|
|
595
|
-
} catch {
|
|
596
|
-
}
|
|
597
|
-
return {
|
|
598
|
-
id: row.id,
|
|
599
|
-
content,
|
|
600
|
-
role: row.role,
|
|
601
|
-
type: row.type,
|
|
602
|
-
createdAt: new Date(row.createdAt),
|
|
603
|
-
threadId: row.thread_id
|
|
604
|
-
};
|
|
605
|
-
}
|
|
606
|
-
async getMessages({ threadId, selectBy }) {
|
|
607
|
-
try {
|
|
608
|
-
const messages = [];
|
|
609
|
-
const limit = typeof selectBy?.last === `number` ? selectBy.last : 40;
|
|
610
|
-
if (selectBy?.include?.length) {
|
|
611
|
-
const includeIds = selectBy.include.map((i) => i.id);
|
|
612
|
-
const maxPrev = Math.max(...selectBy.include.map((i) => i.withPreviousMessages || 0));
|
|
613
|
-
const maxNext = Math.max(...selectBy.include.map((i) => i.withNextMessages || 0));
|
|
614
|
-
const includeResult = await this.client.execute({
|
|
615
|
-
sql: `
|
|
616
|
-
WITH numbered_messages AS (
|
|
617
|
-
SELECT
|
|
618
|
-
id,
|
|
619
|
-
content,
|
|
620
|
-
role,
|
|
621
|
-
type,
|
|
622
|
-
"createdAt",
|
|
623
|
-
thread_id,
|
|
624
|
-
ROW_NUMBER() OVER (ORDER BY "createdAt" ASC) as row_num
|
|
625
|
-
FROM "${TABLE_MESSAGES}"
|
|
626
|
-
WHERE thread_id = ?
|
|
627
|
-
),
|
|
628
|
-
target_positions AS (
|
|
629
|
-
SELECT row_num as target_pos
|
|
630
|
-
FROM numbered_messages
|
|
631
|
-
WHERE id IN (${includeIds.map(() => "?").join(", ")})
|
|
632
|
-
)
|
|
633
|
-
SELECT DISTINCT m.*
|
|
634
|
-
FROM numbered_messages m
|
|
635
|
-
CROSS JOIN target_positions t
|
|
636
|
-
WHERE m.row_num BETWEEN (t.target_pos - ?) AND (t.target_pos + ?)
|
|
637
|
-
ORDER BY m."createdAt" ASC
|
|
638
|
-
`,
|
|
639
|
-
args: [threadId, ...includeIds, maxPrev, maxNext]
|
|
640
|
-
});
|
|
641
|
-
if (includeResult.rows) {
|
|
642
|
-
messages.push(...includeResult.rows.map((row) => this.parseRow(row)));
|
|
643
|
-
}
|
|
644
|
-
}
|
|
645
|
-
const excludeIds = messages.map((m) => m.id);
|
|
646
|
-
const remainingSql = `
|
|
647
|
-
SELECT
|
|
648
|
-
id,
|
|
649
|
-
content,
|
|
650
|
-
role,
|
|
651
|
-
type,
|
|
652
|
-
"createdAt",
|
|
653
|
-
thread_id
|
|
654
|
-
FROM "${TABLE_MESSAGES}"
|
|
655
|
-
WHERE thread_id = ?
|
|
656
|
-
${excludeIds.length ? `AND id NOT IN (${excludeIds.map(() => "?").join(", ")})` : ""}
|
|
657
|
-
ORDER BY "createdAt" DESC
|
|
658
|
-
LIMIT ?
|
|
659
|
-
`;
|
|
660
|
-
const remainingArgs = [threadId, ...excludeIds.length ? excludeIds : [], limit];
|
|
661
|
-
const remainingResult = await this.client.execute({
|
|
662
|
-
sql: remainingSql,
|
|
663
|
-
args: remainingArgs
|
|
664
|
-
});
|
|
665
|
-
if (remainingResult.rows) {
|
|
666
|
-
messages.push(...remainingResult.rows.map((row) => this.parseRow(row)));
|
|
667
|
-
}
|
|
668
|
-
messages.sort((a, b) => a.createdAt.getTime() - b.createdAt.getTime());
|
|
669
|
-
return messages;
|
|
670
|
-
} catch (error) {
|
|
671
|
-
this.logger.error("Error getting messages:", error);
|
|
672
|
-
throw error;
|
|
673
|
-
}
|
|
674
|
-
}
|
|
675
|
-
async saveMessages({ messages }) {
|
|
676
|
-
if (messages.length === 0) return messages;
|
|
677
|
-
const tx = await this.client.transaction("write");
|
|
678
|
-
try {
|
|
679
|
-
const threadId = messages[0]?.threadId;
|
|
680
|
-
if (!threadId) {
|
|
681
|
-
throw new Error("Thread ID is required");
|
|
682
|
-
}
|
|
683
|
-
for (const message of messages) {
|
|
684
|
-
const time = message.createdAt || /* @__PURE__ */ new Date();
|
|
685
|
-
await tx.execute({
|
|
686
|
-
sql: `INSERT INTO ${TABLE_MESSAGES} (id, thread_id, content, role, type, createdAt)
|
|
687
|
-
VALUES (?, ?, ?, ?, ?, ?)`,
|
|
688
|
-
args: [
|
|
689
|
-
message.id,
|
|
690
|
-
threadId,
|
|
691
|
-
typeof message.content === "object" ? JSON.stringify(message.content) : message.content,
|
|
692
|
-
message.role,
|
|
693
|
-
message.type,
|
|
694
|
-
time instanceof Date ? time.toISOString() : time
|
|
695
|
-
]
|
|
696
|
-
});
|
|
697
|
-
}
|
|
698
|
-
await tx.commit();
|
|
699
|
-
return messages;
|
|
700
|
-
} catch (error) {
|
|
701
|
-
this.logger.error("Failed to save messages in database: " + error?.message);
|
|
702
|
-
await tx.rollback();
|
|
703
|
-
throw error;
|
|
704
|
-
}
|
|
705
|
-
}
|
|
706
|
-
transformEvalRow(row) {
|
|
707
|
-
const resultValue = JSON.parse(row.result);
|
|
708
|
-
const testInfoValue = row.test_info ? JSON.parse(row.test_info) : void 0;
|
|
709
|
-
if (!resultValue || typeof resultValue !== "object" || !("score" in resultValue)) {
|
|
710
|
-
throw new Error(`Invalid MetricResult format: ${JSON.stringify(resultValue)}`);
|
|
711
|
-
}
|
|
712
|
-
return {
|
|
713
|
-
input: row.input,
|
|
714
|
-
output: row.output,
|
|
715
|
-
result: resultValue,
|
|
716
|
-
agentName: row.agent_name,
|
|
717
|
-
metricName: row.metric_name,
|
|
718
|
-
instructions: row.instructions,
|
|
719
|
-
testInfo: testInfoValue,
|
|
720
|
-
globalRunId: row.global_run_id,
|
|
721
|
-
runId: row.run_id,
|
|
722
|
-
createdAt: row.created_at
|
|
723
|
-
};
|
|
724
|
-
}
|
|
725
|
-
async getEvalsByAgentName(agentName, type) {
|
|
726
|
-
try {
|
|
727
|
-
const baseQuery = `SELECT * FROM ${TABLE_EVALS} WHERE agent_name = ?`;
|
|
728
|
-
const typeCondition = type === "test" ? " AND test_info IS NOT NULL AND test_info->>'testPath' IS NOT NULL" : type === "live" ? " AND (test_info IS NULL OR test_info->>'testPath' IS NULL)" : "";
|
|
729
|
-
const result = await this.client.execute({
|
|
730
|
-
sql: `${baseQuery}${typeCondition} ORDER BY created_at DESC`,
|
|
731
|
-
args: [agentName]
|
|
732
|
-
});
|
|
733
|
-
return result.rows?.map((row) => this.transformEvalRow(row)) ?? [];
|
|
734
|
-
} catch (error) {
|
|
735
|
-
if (error instanceof Error && error.message.includes("no such table")) {
|
|
736
|
-
return [];
|
|
737
|
-
}
|
|
738
|
-
this.logger.error("Failed to get evals for the specified agent: " + error?.message);
|
|
739
|
-
throw error;
|
|
740
|
-
}
|
|
741
|
-
}
|
|
742
|
-
// TODO: add types
|
|
743
|
-
async getTraces({
|
|
744
|
-
name,
|
|
745
|
-
scope,
|
|
746
|
-
page,
|
|
747
|
-
perPage,
|
|
748
|
-
attributes
|
|
749
|
-
} = {
|
|
750
|
-
page: 0,
|
|
751
|
-
perPage: 100
|
|
752
|
-
}) {
|
|
753
|
-
const limit = perPage;
|
|
754
|
-
const offset = page * perPage;
|
|
755
|
-
const args = [];
|
|
756
|
-
const conditions = [];
|
|
757
|
-
if (name) {
|
|
758
|
-
conditions.push("name LIKE CONCAT(?, '%')");
|
|
759
|
-
}
|
|
760
|
-
if (scope) {
|
|
761
|
-
conditions.push("scope = ?");
|
|
762
|
-
}
|
|
763
|
-
if (attributes) {
|
|
764
|
-
Object.keys(attributes).forEach((key) => {
|
|
765
|
-
conditions.push(`attributes->>'$.${key}' = ?`);
|
|
766
|
-
});
|
|
767
|
-
}
|
|
768
|
-
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
769
|
-
if (name) {
|
|
770
|
-
args.push(name);
|
|
771
|
-
}
|
|
772
|
-
if (scope) {
|
|
773
|
-
args.push(scope);
|
|
774
|
-
}
|
|
775
|
-
if (attributes) {
|
|
776
|
-
for (const [_key, value] of Object.entries(attributes)) {
|
|
777
|
-
args.push(value);
|
|
778
|
-
}
|
|
779
|
-
}
|
|
780
|
-
args.push(limit, offset);
|
|
781
|
-
const result = await this.client.execute({
|
|
782
|
-
sql: `SELECT * FROM ${TABLE_TRACES} ${whereClause} ORDER BY "startTime" DESC LIMIT ? OFFSET ?`,
|
|
783
|
-
args
|
|
784
|
-
});
|
|
785
|
-
if (!result.rows) {
|
|
786
|
-
return [];
|
|
787
|
-
}
|
|
788
|
-
return result.rows.map((row) => ({
|
|
789
|
-
id: row.id,
|
|
790
|
-
parentSpanId: row.parentSpanId,
|
|
791
|
-
traceId: row.traceId,
|
|
792
|
-
name: row.name,
|
|
793
|
-
scope: row.scope,
|
|
794
|
-
kind: row.kind,
|
|
795
|
-
status: safelyParseJSON(row.status),
|
|
796
|
-
events: safelyParseJSON(row.events),
|
|
797
|
-
links: safelyParseJSON(row.links),
|
|
798
|
-
attributes: safelyParseJSON(row.attributes),
|
|
799
|
-
startTime: row.startTime,
|
|
800
|
-
endTime: row.endTime,
|
|
801
|
-
other: safelyParseJSON(row.other),
|
|
802
|
-
createdAt: row.createdAt
|
|
803
|
-
}));
|
|
804
|
-
}
|
|
805
|
-
};
|
|
806
|
-
function deepMerge(target, source) {
|
|
807
|
-
const output = { ...target };
|
|
808
|
-
if (!source) return output;
|
|
809
|
-
Object.keys(source).forEach((key) => {
|
|
810
|
-
const targetValue = output[key];
|
|
811
|
-
const sourceValue = source[key];
|
|
812
|
-
if (Array.isArray(targetValue) && Array.isArray(sourceValue)) {
|
|
813
|
-
output[key] = sourceValue;
|
|
814
|
-
} else if (sourceValue instanceof Object && targetValue instanceof Object && !Array.isArray(sourceValue) && !Array.isArray(targetValue)) {
|
|
815
|
-
output[key] = deepMerge(targetValue, sourceValue);
|
|
816
|
-
} else if (sourceValue !== void 0) {
|
|
817
|
-
output[key] = sourceValue;
|
|
818
|
-
}
|
|
819
|
-
});
|
|
820
|
-
return output;
|
|
821
|
-
}
|
|
822
|
-
var cachedPath = false;
|
|
823
|
-
function getModelCachePath() {
|
|
824
|
-
if (cachedPath) return cachedPath;
|
|
825
|
-
const firstNodeModules = node_modulesPath__default.default().split("node_modules")[0];
|
|
826
|
-
cachedPath = path__default.default.join(firstNodeModules, "node_modules", ".fastembed-model-cache");
|
|
827
|
-
return cachedPath;
|
|
828
|
-
}
|
|
829
|
-
function unbundleableImport(name) {
|
|
830
|
-
const nonStaticallyAnalyzableName = `${name}?d=${Date.now()}`;
|
|
831
|
-
return import(nonStaticallyAnalyzableName.split(`?`)[0]);
|
|
832
|
-
}
|
|
833
|
-
async function generateEmbeddings(values, modelType) {
|
|
834
|
-
try {
|
|
835
|
-
let mod;
|
|
836
|
-
const importErrors = [];
|
|
837
|
-
{
|
|
838
|
-
try {
|
|
839
|
-
mod = await unbundleableImport("fastembed");
|
|
840
|
-
} catch (e) {
|
|
841
|
-
if (e instanceof Error) {
|
|
842
|
-
importErrors.push(e);
|
|
843
|
-
} else {
|
|
844
|
-
throw e;
|
|
845
|
-
}
|
|
846
|
-
}
|
|
847
|
-
}
|
|
848
|
-
if (!mod) {
|
|
849
|
-
throw new Error(`${importErrors.map((e) => e.message).join(`
|
|
850
|
-
`)}
|
|
851
|
-
|
|
852
|
-
This runtime does not support fastembed-js, which is the default embedder in Mastra.
|
|
853
|
-
Scroll up to read import errors. These errors mean you can't use the default Mastra embedder on this hosting platform.
|
|
854
|
-
You can either use Mastra Cloud which supports the default embedder, or you can configure an alternate provider.
|
|
855
|
-
|
|
856
|
-
For example if you're using Memory:
|
|
857
|
-
|
|
858
|
-
import { openai } from "@ai-sdk/openai";
|
|
859
|
-
|
|
860
|
-
const memory = new Memory({
|
|
861
|
-
embedder: openai.embedding("text-embedding-3-small"), // <- doesn't have to be openai
|
|
862
|
-
})
|
|
863
|
-
|
|
864
|
-
Visit https://sdk.vercel.ai/docs/foundations/overview#embedding-models to find an alternate embedding provider
|
|
865
|
-
|
|
866
|
-
If you do not want to use the Memory semantic recall feature, you can disable it entirely and this error will go away.
|
|
867
|
-
|
|
868
|
-
const memory = new Memory({
|
|
869
|
-
options: {
|
|
870
|
-
semanticRecall: false // <- an embedder will not be required with this set to false
|
|
871
|
-
}
|
|
872
|
-
})
|
|
873
|
-
`);
|
|
874
|
-
}
|
|
875
|
-
const { FlagEmbedding, EmbeddingModel } = mod;
|
|
876
|
-
const model = await FlagEmbedding.init({
|
|
877
|
-
model: EmbeddingModel[modelType],
|
|
878
|
-
cacheDir: getModelCachePath()
|
|
879
|
-
});
|
|
880
|
-
const embeddings = await model.embed(values);
|
|
881
|
-
const allResults = [];
|
|
882
|
-
for await (const result of embeddings) {
|
|
883
|
-
allResults.push(...result.map((embedding) => Array.from(embedding)));
|
|
884
|
-
}
|
|
885
|
-
if (allResults.length === 0) throw new Error("No embeddings generated");
|
|
886
|
-
return {
|
|
887
|
-
embeddings: allResults
|
|
888
|
-
};
|
|
889
|
-
} catch (error) {
|
|
890
|
-
console.error("Error generating embeddings:", error);
|
|
891
|
-
throw error;
|
|
892
|
-
}
|
|
893
|
-
}
|
|
894
|
-
var fastEmbedProvider = ai.experimental_customProvider({
|
|
895
|
-
textEmbeddingModels: {
|
|
896
|
-
"bge-small-en-v1.5": {
|
|
897
|
-
specificationVersion: "v1",
|
|
898
|
-
provider: "fastembed",
|
|
899
|
-
modelId: "bge-small-en-v1.5",
|
|
900
|
-
maxEmbeddingsPerCall: 256,
|
|
901
|
-
supportsParallelCalls: true,
|
|
902
|
-
async doEmbed({ values }) {
|
|
903
|
-
return generateEmbeddings(values, "BGESmallENV15");
|
|
904
|
-
}
|
|
905
|
-
},
|
|
906
|
-
"bge-base-en-v1.5": {
|
|
907
|
-
specificationVersion: "v1",
|
|
908
|
-
provider: "fastembed",
|
|
909
|
-
modelId: "bge-base-en-v1.5",
|
|
910
|
-
maxEmbeddingsPerCall: 256,
|
|
911
|
-
supportsParallelCalls: true,
|
|
912
|
-
async doEmbed({ values }) {
|
|
913
|
-
return generateEmbeddings(values, "BGEBaseENV15");
|
|
914
|
-
}
|
|
915
|
-
}
|
|
916
|
-
}
|
|
7
|
+
Object.defineProperty(exports, "MastraMemory", {
|
|
8
|
+
enumerable: true,
|
|
9
|
+
get: function () { return chunkKFQ7Z3PO_cjs.MastraMemory; }
|
|
917
10
|
});
|
|
918
|
-
var defaultEmbedder = fastEmbedProvider.textEmbeddingModel;
|
|
919
|
-
|
|
920
|
-
// src/vector/vector.ts
|
|
921
|
-
var MastraVector = class extends MastraBase {
|
|
922
|
-
constructor() {
|
|
923
|
-
super({ name: "MastraVector", component: "VECTOR" });
|
|
924
|
-
}
|
|
925
|
-
baseKeys = {
|
|
926
|
-
query: ["queryVector", "topK", "filter", "includeVector"],
|
|
927
|
-
upsert: ["vectors", "metadata", "ids"],
|
|
928
|
-
createIndex: ["dimension", "metric"]
|
|
929
|
-
};
|
|
930
|
-
normalizeArgs(method, [first, ...rest], extendedKeys = []) {
|
|
931
|
-
if (typeof first === "object") {
|
|
932
|
-
return first;
|
|
933
|
-
}
|
|
934
|
-
this.logger.warn(
|
|
935
|
-
`Deprecation Warning: Passing individual arguments to ${method}() is deprecated. Please use an object parameter instead.`
|
|
936
|
-
);
|
|
937
|
-
const baseKeys = this.baseKeys[method] || [];
|
|
938
|
-
const paramKeys = [...baseKeys, ...extendedKeys].slice(0, rest.length);
|
|
939
|
-
return {
|
|
940
|
-
indexName: first,
|
|
941
|
-
...Object.fromEntries(paramKeys.map((key, i) => [key, rest[i]]))
|
|
942
|
-
};
|
|
943
|
-
}
|
|
944
|
-
async updateIndexById(_indexName, _id, _update) {
|
|
945
|
-
throw new Error("updateIndexById is not implemented yet");
|
|
946
|
-
}
|
|
947
|
-
async deleteIndexById(_indexName, _id) {
|
|
948
|
-
throw new Error("deleteById is not implemented yet");
|
|
949
|
-
}
|
|
950
|
-
};
|
|
951
|
-
|
|
952
|
-
// src/vector/filter/base.ts
|
|
953
|
-
var BaseFilterTranslator = class _BaseFilterTranslator {
|
|
954
|
-
/**
|
|
955
|
-
* Operator type checks
|
|
956
|
-
*/
|
|
957
|
-
isOperator(key) {
|
|
958
|
-
return key.startsWith("$");
|
|
959
|
-
}
|
|
960
|
-
static BASIC_OPERATORS = ["$eq", "$ne"];
|
|
961
|
-
static NUMERIC_OPERATORS = ["$gt", "$gte", "$lt", "$lte"];
|
|
962
|
-
static ARRAY_OPERATORS = ["$in", "$nin", "$all", "$elemMatch"];
|
|
963
|
-
static LOGICAL_OPERATORS = ["$and", "$or", "$not", "$nor"];
|
|
964
|
-
static ELEMENT_OPERATORS = ["$exists"];
|
|
965
|
-
static REGEX_OPERATORS = ["$regex", "$options"];
|
|
966
|
-
static DEFAULT_OPERATORS = {
|
|
967
|
-
logical: _BaseFilterTranslator.LOGICAL_OPERATORS,
|
|
968
|
-
basic: _BaseFilterTranslator.BASIC_OPERATORS,
|
|
969
|
-
numeric: _BaseFilterTranslator.NUMERIC_OPERATORS,
|
|
970
|
-
array: _BaseFilterTranslator.ARRAY_OPERATORS,
|
|
971
|
-
element: _BaseFilterTranslator.ELEMENT_OPERATORS,
|
|
972
|
-
regex: _BaseFilterTranslator.REGEX_OPERATORS
|
|
973
|
-
};
|
|
974
|
-
isLogicalOperator(key) {
|
|
975
|
-
return _BaseFilterTranslator.DEFAULT_OPERATORS.logical.includes(key);
|
|
976
|
-
}
|
|
977
|
-
isBasicOperator(key) {
|
|
978
|
-
return _BaseFilterTranslator.DEFAULT_OPERATORS.basic.includes(key);
|
|
979
|
-
}
|
|
980
|
-
isNumericOperator(key) {
|
|
981
|
-
return _BaseFilterTranslator.DEFAULT_OPERATORS.numeric.includes(key);
|
|
982
|
-
}
|
|
983
|
-
isArrayOperator(key) {
|
|
984
|
-
return _BaseFilterTranslator.DEFAULT_OPERATORS.array.includes(key);
|
|
985
|
-
}
|
|
986
|
-
isElementOperator(key) {
|
|
987
|
-
return _BaseFilterTranslator.DEFAULT_OPERATORS.element.includes(key);
|
|
988
|
-
}
|
|
989
|
-
isRegexOperator(key) {
|
|
990
|
-
return _BaseFilterTranslator.DEFAULT_OPERATORS.regex.includes(key);
|
|
991
|
-
}
|
|
992
|
-
isFieldOperator(key) {
|
|
993
|
-
return this.isOperator(key) && !this.isLogicalOperator(key);
|
|
994
|
-
}
|
|
995
|
-
isCustomOperator(key) {
|
|
996
|
-
const support = this.getSupportedOperators();
|
|
997
|
-
return support.custom?.includes(key) ?? false;
|
|
998
|
-
}
|
|
999
|
-
getSupportedOperators() {
|
|
1000
|
-
return _BaseFilterTranslator.DEFAULT_OPERATORS;
|
|
1001
|
-
}
|
|
1002
|
-
isValidOperator(key) {
|
|
1003
|
-
const support = this.getSupportedOperators();
|
|
1004
|
-
const allSupported = Object.values(support).flat();
|
|
1005
|
-
return allSupported.includes(key);
|
|
1006
|
-
}
|
|
1007
|
-
/**
|
|
1008
|
-
* Value normalization for comparison operators
|
|
1009
|
-
*/
|
|
1010
|
-
normalizeComparisonValue(value) {
|
|
1011
|
-
if (value instanceof Date) {
|
|
1012
|
-
return value.toISOString();
|
|
1013
|
-
}
|
|
1014
|
-
if (typeof value === "number" && Object.is(value, -0)) {
|
|
1015
|
-
return 0;
|
|
1016
|
-
}
|
|
1017
|
-
return value;
|
|
1018
|
-
}
|
|
1019
|
-
/**
|
|
1020
|
-
* Helper method to simulate $all operator using $and + $eq when needed.
|
|
1021
|
-
* Some vector stores don't support $all natively.
|
|
1022
|
-
*/
|
|
1023
|
-
simulateAllOperator(field, values) {
|
|
1024
|
-
return {
|
|
1025
|
-
$and: values.map((value) => ({
|
|
1026
|
-
[field]: { $in: [this.normalizeComparisonValue(value)] }
|
|
1027
|
-
}))
|
|
1028
|
-
};
|
|
1029
|
-
}
|
|
1030
|
-
/**
|
|
1031
|
-
* Utility functions for type checking
|
|
1032
|
-
*/
|
|
1033
|
-
isPrimitive(value) {
|
|
1034
|
-
return value === null || value === void 0 || typeof value === "string" || typeof value === "number" || typeof value === "boolean";
|
|
1035
|
-
}
|
|
1036
|
-
isRegex(value) {
|
|
1037
|
-
return value instanceof RegExp;
|
|
1038
|
-
}
|
|
1039
|
-
isEmpty(obj) {
|
|
1040
|
-
return obj === null || obj === void 0 || typeof obj === "object" && Object.keys(obj).length === 0;
|
|
1041
|
-
}
|
|
1042
|
-
static ErrorMessages = {
|
|
1043
|
-
UNSUPPORTED_OPERATOR: (op) => `Unsupported operator: ${op}`,
|
|
1044
|
-
INVALID_LOGICAL_OPERATOR_LOCATION: (op, path2) => `Logical operator ${op} cannot be used at field level: ${path2}`,
|
|
1045
|
-
NOT_REQUIRES_OBJECT: `$not operator requires an object`,
|
|
1046
|
-
NOT_CANNOT_BE_EMPTY: `$not operator cannot be empty`,
|
|
1047
|
-
INVALID_LOGICAL_OPERATOR_CONTENT: (path2) => `Logical operators must contain field conditions, not direct operators: ${path2}`,
|
|
1048
|
-
INVALID_TOP_LEVEL_OPERATOR: (op) => `Invalid top-level operator: ${op}`,
|
|
1049
|
-
ELEM_MATCH_REQUIRES_OBJECT: `$elemMatch requires an object with conditions`
|
|
1050
|
-
};
|
|
1051
|
-
/**
|
|
1052
|
-
* Helper to handle array value normalization consistently
|
|
1053
|
-
*/
|
|
1054
|
-
normalizeArrayValues(values) {
|
|
1055
|
-
return values.map((value) => this.normalizeComparisonValue(value));
|
|
1056
|
-
}
|
|
1057
|
-
validateFilter(filter) {
|
|
1058
|
-
const validation = this.validateFilterSupport(filter);
|
|
1059
|
-
if (!validation.supported) {
|
|
1060
|
-
throw new Error(validation.messages.join(", "));
|
|
1061
|
-
}
|
|
1062
|
-
}
|
|
1063
|
-
/**
|
|
1064
|
-
* Validates if a filter structure is supported by the specific vector DB
|
|
1065
|
-
* and returns detailed validation information.
|
|
1066
|
-
*/
|
|
1067
|
-
validateFilterSupport(node, path2 = "") {
|
|
1068
|
-
const messages = [];
|
|
1069
|
-
if (this.isPrimitive(node) || this.isEmpty(node)) {
|
|
1070
|
-
return { supported: true, messages: [] };
|
|
1071
|
-
}
|
|
1072
|
-
if (Array.isArray(node)) {
|
|
1073
|
-
const arrayResults = node.map((item) => this.validateFilterSupport(item, path2));
|
|
1074
|
-
const arrayMessages = arrayResults.flatMap((r) => r.messages);
|
|
1075
|
-
return {
|
|
1076
|
-
supported: arrayResults.every((r) => r.supported),
|
|
1077
|
-
messages: arrayMessages
|
|
1078
|
-
};
|
|
1079
|
-
}
|
|
1080
|
-
const nodeObj = node;
|
|
1081
|
-
let isSupported = true;
|
|
1082
|
-
for (const [key, value] of Object.entries(nodeObj)) {
|
|
1083
|
-
const newPath = path2 ? `${path2}.${key}` : key;
|
|
1084
|
-
if (this.isOperator(key)) {
|
|
1085
|
-
if (!this.isValidOperator(key)) {
|
|
1086
|
-
isSupported = false;
|
|
1087
|
-
messages.push(_BaseFilterTranslator.ErrorMessages.UNSUPPORTED_OPERATOR(key));
|
|
1088
|
-
continue;
|
|
1089
|
-
}
|
|
1090
|
-
if (!path2 && !this.isLogicalOperator(key)) {
|
|
1091
|
-
isSupported = false;
|
|
1092
|
-
messages.push(_BaseFilterTranslator.ErrorMessages.INVALID_TOP_LEVEL_OPERATOR(key));
|
|
1093
|
-
continue;
|
|
1094
|
-
}
|
|
1095
|
-
if (key === "$elemMatch" && (typeof value !== "object" || Array.isArray(value))) {
|
|
1096
|
-
isSupported = false;
|
|
1097
|
-
messages.push(_BaseFilterTranslator.ErrorMessages.ELEM_MATCH_REQUIRES_OBJECT);
|
|
1098
|
-
continue;
|
|
1099
|
-
}
|
|
1100
|
-
if (this.isLogicalOperator(key)) {
|
|
1101
|
-
if (key === "$not") {
|
|
1102
|
-
if (Array.isArray(value) || typeof value !== "object") {
|
|
1103
|
-
isSupported = false;
|
|
1104
|
-
messages.push(_BaseFilterTranslator.ErrorMessages.NOT_REQUIRES_OBJECT);
|
|
1105
|
-
continue;
|
|
1106
|
-
}
|
|
1107
|
-
if (this.isEmpty(value)) {
|
|
1108
|
-
isSupported = false;
|
|
1109
|
-
messages.push(_BaseFilterTranslator.ErrorMessages.NOT_CANNOT_BE_EMPTY);
|
|
1110
|
-
continue;
|
|
1111
|
-
}
|
|
1112
|
-
continue;
|
|
1113
|
-
}
|
|
1114
|
-
if (path2 && !this.isLogicalOperator(path2.split(".").pop())) {
|
|
1115
|
-
isSupported = false;
|
|
1116
|
-
messages.push(_BaseFilterTranslator.ErrorMessages.INVALID_LOGICAL_OPERATOR_LOCATION(key, newPath));
|
|
1117
|
-
continue;
|
|
1118
|
-
}
|
|
1119
|
-
if (Array.isArray(value)) {
|
|
1120
|
-
const hasDirectOperators = value.some(
|
|
1121
|
-
(item) => typeof item === "object" && Object.keys(item).length === 1 && this.isFieldOperator(Object.keys(item)[0])
|
|
1122
|
-
);
|
|
1123
|
-
if (hasDirectOperators) {
|
|
1124
|
-
isSupported = false;
|
|
1125
|
-
messages.push(_BaseFilterTranslator.ErrorMessages.INVALID_LOGICAL_OPERATOR_CONTENT(newPath));
|
|
1126
|
-
continue;
|
|
1127
|
-
}
|
|
1128
|
-
}
|
|
1129
|
-
}
|
|
1130
|
-
}
|
|
1131
|
-
const nestedValidation = this.validateFilterSupport(value, newPath);
|
|
1132
|
-
if (!nestedValidation.supported) {
|
|
1133
|
-
isSupported = false;
|
|
1134
|
-
messages.push(...nestedValidation.messages);
|
|
1135
|
-
}
|
|
1136
|
-
}
|
|
1137
|
-
return { supported: isSupported, messages };
|
|
1138
|
-
}
|
|
1139
|
-
};
|
|
1140
|
-
|
|
1141
|
-
// src/vector/libsql/filter.ts
|
|
1142
|
-
var LibSQLFilterTranslator = class extends BaseFilterTranslator {
|
|
1143
|
-
getSupportedOperators() {
|
|
1144
|
-
return {
|
|
1145
|
-
...BaseFilterTranslator.DEFAULT_OPERATORS,
|
|
1146
|
-
regex: [],
|
|
1147
|
-
custom: ["$contains", "$size"]
|
|
1148
|
-
};
|
|
1149
|
-
}
|
|
1150
|
-
translate(filter) {
|
|
1151
|
-
if (this.isEmpty(filter)) {
|
|
1152
|
-
return filter;
|
|
1153
|
-
}
|
|
1154
|
-
this.validateFilter(filter);
|
|
1155
|
-
return this.translateNode(filter);
|
|
1156
|
-
}
|
|
1157
|
-
translateNode(node, currentPath = "") {
|
|
1158
|
-
if (this.isRegex(node)) {
|
|
1159
|
-
throw new Error("Direct regex pattern format is not supported in LibSQL");
|
|
1160
|
-
}
|
|
1161
|
-
const withPath = (result2) => currentPath ? { [currentPath]: result2 } : result2;
|
|
1162
|
-
if (this.isPrimitive(node)) {
|
|
1163
|
-
return withPath({ $eq: this.normalizeComparisonValue(node) });
|
|
1164
|
-
}
|
|
1165
|
-
if (Array.isArray(node)) {
|
|
1166
|
-
return withPath({ $in: this.normalizeArrayValues(node) });
|
|
1167
|
-
}
|
|
1168
|
-
const entries = Object.entries(node);
|
|
1169
|
-
const result = {};
|
|
1170
|
-
for (const [key, value] of entries) {
|
|
1171
|
-
const newPath = currentPath ? `${currentPath}.${key}` : key;
|
|
1172
|
-
if (this.isLogicalOperator(key)) {
|
|
1173
|
-
result[key] = Array.isArray(value) ? value.map((filter) => this.translateNode(filter)) : this.translateNode(value);
|
|
1174
|
-
} else if (this.isOperator(key)) {
|
|
1175
|
-
if (this.isArrayOperator(key) && !Array.isArray(value) && key !== "$elemMatch") {
|
|
1176
|
-
result[key] = [value];
|
|
1177
|
-
} else if (this.isBasicOperator(key) && Array.isArray(value)) {
|
|
1178
|
-
result[key] = JSON.stringify(value);
|
|
1179
|
-
} else {
|
|
1180
|
-
result[key] = value;
|
|
1181
|
-
}
|
|
1182
|
-
} else if (typeof value === "object" && value !== null) {
|
|
1183
|
-
const hasOperators = Object.keys(value).some((k) => this.isOperator(k));
|
|
1184
|
-
if (hasOperators) {
|
|
1185
|
-
result[newPath] = this.translateNode(value);
|
|
1186
|
-
} else {
|
|
1187
|
-
Object.assign(result, this.translateNode(value, newPath));
|
|
1188
|
-
}
|
|
1189
|
-
} else {
|
|
1190
|
-
result[newPath] = this.translateNode(value);
|
|
1191
|
-
}
|
|
1192
|
-
}
|
|
1193
|
-
return result;
|
|
1194
|
-
}
|
|
1195
|
-
// TODO: Look more into regex support for LibSQL
|
|
1196
|
-
// private translateRegexPattern(pattern: string, options: string = ''): any {
|
|
1197
|
-
// if (!options) return { $regex: pattern };
|
|
1198
|
-
// const flags = options
|
|
1199
|
-
// .split('')
|
|
1200
|
-
// .filter(f => 'imsux'.includes(f))
|
|
1201
|
-
// .join('');
|
|
1202
|
-
// return {
|
|
1203
|
-
// $regex: pattern,
|
|
1204
|
-
// $options: flags,
|
|
1205
|
-
// };
|
|
1206
|
-
// }
|
|
1207
|
-
};
|
|
1208
|
-
|
|
1209
|
-
// src/vector/libsql/sql-builder.ts
|
|
1210
|
-
var createBasicOperator = (symbol) => {
|
|
1211
|
-
return (key) => ({
|
|
1212
|
-
sql: `CASE
|
|
1213
|
-
WHEN ? IS NULL THEN json_extract(metadata, '$."${handleKey(key)}"') IS ${symbol === "=" ? "" : "NOT"} NULL
|
|
1214
|
-
ELSE json_extract(metadata, '$."${handleKey(key)}"') ${symbol} ?
|
|
1215
|
-
END`,
|
|
1216
|
-
needsValue: true,
|
|
1217
|
-
transformValue: (value) => {
|
|
1218
|
-
return [value, value];
|
|
1219
|
-
}
|
|
1220
|
-
});
|
|
1221
|
-
};
|
|
1222
|
-
var createNumericOperator = (symbol) => {
|
|
1223
|
-
return (key) => ({
|
|
1224
|
-
sql: `CAST(json_extract(metadata, '$."${handleKey(key)}"') AS NUMERIC) ${symbol} ?`,
|
|
1225
|
-
needsValue: true
|
|
1226
|
-
});
|
|
1227
|
-
};
|
|
1228
|
-
var validateJsonArray = (key) => `json_valid(json_extract(metadata, '$."${handleKey(key)}"'))
|
|
1229
|
-
AND json_type(json_extract(metadata, '$."${handleKey(key)}"')) = 'array'`;
|
|
1230
|
-
var FILTER_OPERATORS = {
|
|
1231
|
-
$eq: createBasicOperator("="),
|
|
1232
|
-
$ne: createBasicOperator("!="),
|
|
1233
|
-
$gt: createNumericOperator(">"),
|
|
1234
|
-
$gte: createNumericOperator(">="),
|
|
1235
|
-
$lt: createNumericOperator("<"),
|
|
1236
|
-
$lte: createNumericOperator("<="),
|
|
1237
|
-
// Array Operators
|
|
1238
|
-
$in: (key, value) => ({
|
|
1239
|
-
sql: `json_extract(metadata, '$."${handleKey(key)}"') IN (${value.map(() => "?").join(",")})`,
|
|
1240
|
-
needsValue: true
|
|
1241
|
-
}),
|
|
1242
|
-
$nin: (key, value) => ({
|
|
1243
|
-
sql: `json_extract(metadata, '$."${handleKey(key)}"') NOT IN (${value.map(() => "?").join(",")})`,
|
|
1244
|
-
needsValue: true
|
|
1245
|
-
}),
|
|
1246
|
-
$all: (key) => ({
|
|
1247
|
-
sql: `json_extract(metadata, '$."${handleKey(key)}"') = ?`,
|
|
1248
|
-
needsValue: true,
|
|
1249
|
-
transformValue: (value) => {
|
|
1250
|
-
const arrayValue = Array.isArray(value) ? value : [value];
|
|
1251
|
-
if (arrayValue.length === 0) {
|
|
1252
|
-
return {
|
|
1253
|
-
sql: "1 = 0",
|
|
1254
|
-
values: []
|
|
1255
|
-
};
|
|
1256
|
-
}
|
|
1257
|
-
return {
|
|
1258
|
-
sql: `(
|
|
1259
|
-
CASE
|
|
1260
|
-
WHEN ${validateJsonArray(key)} THEN
|
|
1261
|
-
NOT EXISTS (
|
|
1262
|
-
SELECT value
|
|
1263
|
-
FROM json_each(?)
|
|
1264
|
-
WHERE value NOT IN (
|
|
1265
|
-
SELECT value
|
|
1266
|
-
FROM json_each(json_extract(metadata, '$."${handleKey(key)}"'))
|
|
1267
|
-
)
|
|
1268
|
-
)
|
|
1269
|
-
ELSE FALSE
|
|
1270
|
-
END
|
|
1271
|
-
)`,
|
|
1272
|
-
values: [JSON.stringify(arrayValue)]
|
|
1273
|
-
};
|
|
1274
|
-
}
|
|
1275
|
-
}),
|
|
1276
|
-
$elemMatch: (key) => ({
|
|
1277
|
-
sql: `json_extract(metadata, '$."${handleKey(key)}"') = ?`,
|
|
1278
|
-
needsValue: true,
|
|
1279
|
-
transformValue: (value) => {
|
|
1280
|
-
if (typeof value !== "object" || Array.isArray(value)) {
|
|
1281
|
-
throw new Error("$elemMatch requires an object with conditions");
|
|
1282
|
-
}
|
|
1283
|
-
const conditions = Object.entries(value).map(([field, fieldValue]) => {
|
|
1284
|
-
if (field.startsWith("$")) {
|
|
1285
|
-
const { sql, values } = buildCondition("elem.value", { [field]: fieldValue });
|
|
1286
|
-
const pattern = /json_extract\(metadata, '\$\."[^"]*"(\."[^"]*")*'\)/g;
|
|
1287
|
-
const elemSql = sql.replace(pattern, "elem.value");
|
|
1288
|
-
return { sql: elemSql, values };
|
|
1289
|
-
} else if (typeof fieldValue === "object" && !Array.isArray(fieldValue)) {
|
|
1290
|
-
const { sql, values } = buildCondition(field, fieldValue);
|
|
1291
|
-
const pattern = /json_extract\(metadata, '\$\."[^"]*"(\."[^"]*")*'\)/g;
|
|
1292
|
-
const elemSql = sql.replace(pattern, `json_extract(elem.value, '$."${field}"')`);
|
|
1293
|
-
return { sql: elemSql, values };
|
|
1294
|
-
} else {
|
|
1295
|
-
return {
|
|
1296
|
-
sql: `json_extract(elem.value, '$."${field}"') = ?`,
|
|
1297
|
-
values: [fieldValue]
|
|
1298
|
-
};
|
|
1299
|
-
}
|
|
1300
|
-
});
|
|
1301
|
-
return {
|
|
1302
|
-
sql: `(
|
|
1303
|
-
CASE
|
|
1304
|
-
WHEN ${validateJsonArray(key)} THEN
|
|
1305
|
-
EXISTS (
|
|
1306
|
-
SELECT 1
|
|
1307
|
-
FROM json_each(json_extract(metadata, '$."${handleKey(key)}"')) as elem
|
|
1308
|
-
WHERE ${conditions.map((c) => c.sql).join(" AND ")}
|
|
1309
|
-
)
|
|
1310
|
-
ELSE FALSE
|
|
1311
|
-
END
|
|
1312
|
-
)`,
|
|
1313
|
-
values: conditions.flatMap((c) => c.values)
|
|
1314
|
-
};
|
|
1315
|
-
}
|
|
1316
|
-
}),
|
|
1317
|
-
// Element Operators
|
|
1318
|
-
$exists: (key) => ({
|
|
1319
|
-
sql: `json_extract(metadata, '$."${handleKey(key)}"') IS NOT NULL`,
|
|
1320
|
-
needsValue: false
|
|
1321
|
-
}),
|
|
1322
|
-
// Logical Operators
|
|
1323
|
-
$and: (key) => ({
|
|
1324
|
-
sql: `(${key})`,
|
|
1325
|
-
needsValue: false
|
|
1326
|
-
}),
|
|
1327
|
-
$or: (key) => ({
|
|
1328
|
-
sql: `(${key})`,
|
|
1329
|
-
needsValue: false
|
|
1330
|
-
}),
|
|
1331
|
-
$not: (key) => ({ sql: `NOT (${key})`, needsValue: false }),
|
|
1332
|
-
$nor: (key) => ({
|
|
1333
|
-
sql: `NOT (${key})`,
|
|
1334
|
-
needsValue: false
|
|
1335
|
-
}),
|
|
1336
|
-
$size: (key, paramIndex) => ({
|
|
1337
|
-
sql: `(
|
|
1338
|
-
CASE
|
|
1339
|
-
WHEN json_type(json_extract(metadata, '$."${handleKey(key)}"')) = 'array' THEN
|
|
1340
|
-
json_array_length(json_extract(metadata, '$."${handleKey(key)}"')) = $${paramIndex}
|
|
1341
|
-
ELSE FALSE
|
|
1342
|
-
END
|
|
1343
|
-
)`,
|
|
1344
|
-
needsValue: true
|
|
1345
|
-
}),
|
|
1346
|
-
// /**
|
|
1347
|
-
// * Regex Operators
|
|
1348
|
-
// * Supports case insensitive and multiline
|
|
1349
|
-
// */
|
|
1350
|
-
// $regex: (key: string): FilterOperator => ({
|
|
1351
|
-
// sql: `json_extract(metadata, '$."${handleKey(key)}"') = ?`,
|
|
1352
|
-
// needsValue: true,
|
|
1353
|
-
// transformValue: (value: any) => {
|
|
1354
|
-
// const pattern = typeof value === 'object' ? value.$regex : value;
|
|
1355
|
-
// const options = typeof value === 'object' ? value.$options || '' : '';
|
|
1356
|
-
// let sql = `json_extract(metadata, '$."${handleKey(key)}"')`;
|
|
1357
|
-
// // Handle multiline
|
|
1358
|
-
// // if (options.includes('m')) {
|
|
1359
|
-
// // sql = `REPLACE(${sql}, CHAR(10), '\n')`;
|
|
1360
|
-
// // }
|
|
1361
|
-
// // let finalPattern = pattern;
|
|
1362
|
-
// // if (options) {
|
|
1363
|
-
// // finalPattern = `(\\?${options})${pattern}`;
|
|
1364
|
-
// // }
|
|
1365
|
-
// // // Handle case insensitivity
|
|
1366
|
-
// // if (options.includes('i')) {
|
|
1367
|
-
// // sql = `LOWER(${sql}) REGEXP LOWER(?)`;
|
|
1368
|
-
// // } else {
|
|
1369
|
-
// // sql = `${sql} REGEXP ?`;
|
|
1370
|
-
// // }
|
|
1371
|
-
// if (options.includes('m')) {
|
|
1372
|
-
// sql = `EXISTS (
|
|
1373
|
-
// SELECT 1
|
|
1374
|
-
// FROM json_each(
|
|
1375
|
-
// json_array(
|
|
1376
|
-
// ${sql},
|
|
1377
|
-
// REPLACE(${sql}, CHAR(10), CHAR(13))
|
|
1378
|
-
// )
|
|
1379
|
-
// ) as lines
|
|
1380
|
-
// WHERE lines.value REGEXP ?
|
|
1381
|
-
// )`;
|
|
1382
|
-
// } else {
|
|
1383
|
-
// sql = `${sql} REGEXP ?`;
|
|
1384
|
-
// }
|
|
1385
|
-
// // Handle case insensitivity
|
|
1386
|
-
// if (options.includes('i')) {
|
|
1387
|
-
// sql = sql.replace('REGEXP ?', 'REGEXP LOWER(?)');
|
|
1388
|
-
// sql = sql.replace('value REGEXP', 'LOWER(value) REGEXP');
|
|
1389
|
-
// }
|
|
1390
|
-
// // Handle extended - allows whitespace and comments in pattern
|
|
1391
|
-
// if (options.includes('x')) {
|
|
1392
|
-
// // Remove whitespace and comments from pattern
|
|
1393
|
-
// const cleanPattern = pattern.replace(/\s+|#.*$/gm, '');
|
|
1394
|
-
// return {
|
|
1395
|
-
// sql,
|
|
1396
|
-
// values: [cleanPattern],
|
|
1397
|
-
// };
|
|
1398
|
-
// }
|
|
1399
|
-
// return {
|
|
1400
|
-
// sql,
|
|
1401
|
-
// values: [pattern],
|
|
1402
|
-
// };
|
|
1403
|
-
// },
|
|
1404
|
-
// }),
|
|
1405
|
-
$contains: (key) => ({
|
|
1406
|
-
sql: `json_extract(metadata, '$."${handleKey(key)}"') = ?`,
|
|
1407
|
-
needsValue: true,
|
|
1408
|
-
transformValue: (value) => {
|
|
1409
|
-
if (Array.isArray(value)) {
|
|
1410
|
-
return {
|
|
1411
|
-
sql: `(
|
|
1412
|
-
SELECT ${validateJsonArray(key)}
|
|
1413
|
-
AND EXISTS (
|
|
1414
|
-
SELECT 1
|
|
1415
|
-
FROM json_each(json_extract(metadata, '$."${handleKey(key)}"')) as m
|
|
1416
|
-
WHERE m.value IN (SELECT value FROM json_each(?))
|
|
1417
|
-
)
|
|
1418
|
-
)`,
|
|
1419
|
-
values: [JSON.stringify(value)]
|
|
1420
|
-
};
|
|
1421
|
-
}
|
|
1422
|
-
if (value && typeof value === "object") {
|
|
1423
|
-
let traverse2 = function(obj, path2 = []) {
|
|
1424
|
-
for (const [k, v] of Object.entries(obj)) {
|
|
1425
|
-
const currentPath = [...path2, k];
|
|
1426
|
-
if (v && typeof v === "object" && !Array.isArray(v)) {
|
|
1427
|
-
traverse2(v, currentPath);
|
|
1428
|
-
} else {
|
|
1429
|
-
paths.push(currentPath.join("."));
|
|
1430
|
-
values.push(v);
|
|
1431
|
-
}
|
|
1432
|
-
}
|
|
1433
|
-
};
|
|
1434
|
-
const paths = [];
|
|
1435
|
-
const values = [];
|
|
1436
|
-
traverse2(value);
|
|
1437
|
-
return {
|
|
1438
|
-
sql: `(${paths.map((path2) => `json_extract(metadata, '$."${handleKey(key)}"."${path2}"') = ?`).join(" AND ")})`,
|
|
1439
|
-
values
|
|
1440
|
-
};
|
|
1441
|
-
}
|
|
1442
|
-
return value;
|
|
1443
|
-
}
|
|
1444
|
-
})
|
|
1445
|
-
};
|
|
1446
|
-
var handleKey = (key) => {
|
|
1447
|
-
return key.replace(/\./g, '"."');
|
|
1448
|
-
};
|
|
1449
|
-
function buildFilterQuery(filter) {
|
|
1450
|
-
if (!filter) {
|
|
1451
|
-
return { sql: "", values: [] };
|
|
1452
|
-
}
|
|
1453
|
-
const values = [];
|
|
1454
|
-
const conditions = Object.entries(filter).map(([key, value]) => {
|
|
1455
|
-
const condition = buildCondition(key, value);
|
|
1456
|
-
values.push(...condition.values);
|
|
1457
|
-
return condition.sql;
|
|
1458
|
-
}).join(" AND ");
|
|
1459
|
-
return {
|
|
1460
|
-
sql: conditions ? `WHERE ${conditions}` : "",
|
|
1461
|
-
values
|
|
1462
|
-
};
|
|
1463
|
-
}
|
|
1464
|
-
function buildCondition(key, value, parentPath) {
|
|
1465
|
-
if (["$and", "$or", "$not", "$nor"].includes(key)) {
|
|
1466
|
-
return handleLogicalOperator(key, value);
|
|
1467
|
-
}
|
|
1468
|
-
if (!value || typeof value !== "object") {
|
|
1469
|
-
return {
|
|
1470
|
-
sql: `json_extract(metadata, '$."${key.replace(/\./g, '"."')}"') = ?`,
|
|
1471
|
-
values: [value]
|
|
1472
|
-
};
|
|
1473
|
-
}
|
|
1474
|
-
return handleOperator(key, value);
|
|
1475
|
-
}
|
|
1476
|
-
function handleLogicalOperator(key, value, parentPath) {
|
|
1477
|
-
if (!value || value.length === 0) {
|
|
1478
|
-
switch (key) {
|
|
1479
|
-
case "$and":
|
|
1480
|
-
case "$nor":
|
|
1481
|
-
return { sql: "true", values: [] };
|
|
1482
|
-
case "$or":
|
|
1483
|
-
return { sql: "false", values: [] };
|
|
1484
|
-
case "$not":
|
|
1485
|
-
throw new Error("$not operator cannot be empty");
|
|
1486
|
-
default:
|
|
1487
|
-
return { sql: "true", values: [] };
|
|
1488
|
-
}
|
|
1489
|
-
}
|
|
1490
|
-
if (key === "$not") {
|
|
1491
|
-
const entries = Object.entries(value);
|
|
1492
|
-
const conditions2 = entries.map(([fieldKey, fieldValue]) => buildCondition(fieldKey, fieldValue));
|
|
1493
|
-
return {
|
|
1494
|
-
sql: `NOT (${conditions2.map((c) => c.sql).join(" AND ")})`,
|
|
1495
|
-
values: conditions2.flatMap((c) => c.values)
|
|
1496
|
-
};
|
|
1497
|
-
}
|
|
1498
|
-
const values = [];
|
|
1499
|
-
const joinOperator = key === "$or" || key === "$nor" ? "OR" : "AND";
|
|
1500
|
-
const conditions = Array.isArray(value) ? value.map((f) => {
|
|
1501
|
-
const entries = Object.entries(f);
|
|
1502
|
-
return entries.map(([k, v]) => buildCondition(k, v));
|
|
1503
|
-
}) : [buildCondition(key, value)];
|
|
1504
|
-
const joined = conditions.flat().map((c) => {
|
|
1505
|
-
values.push(...c.values);
|
|
1506
|
-
return c.sql;
|
|
1507
|
-
}).join(` ${joinOperator} `);
|
|
1508
|
-
return {
|
|
1509
|
-
sql: key === "$nor" ? `NOT (${joined})` : `(${joined})`,
|
|
1510
|
-
values
|
|
1511
|
-
};
|
|
1512
|
-
}
|
|
1513
|
-
function handleOperator(key, value) {
|
|
1514
|
-
if (typeof value === "object" && !Array.isArray(value)) {
|
|
1515
|
-
const entries = Object.entries(value);
|
|
1516
|
-
const results = entries.map(
|
|
1517
|
-
([operator2, operatorValue2]) => operator2 === "$not" ? {
|
|
1518
|
-
sql: `NOT (${Object.entries(operatorValue2).map(([op, val]) => processOperator(key, op, val).sql).join(" AND ")})`,
|
|
1519
|
-
values: Object.entries(operatorValue2).flatMap(
|
|
1520
|
-
([op, val]) => processOperator(key, op, val).values
|
|
1521
|
-
)
|
|
1522
|
-
} : processOperator(key, operator2, operatorValue2)
|
|
1523
|
-
);
|
|
1524
|
-
return {
|
|
1525
|
-
sql: `(${results.map((r) => r.sql).join(" AND ")})`,
|
|
1526
|
-
values: results.flatMap((r) => r.values)
|
|
1527
|
-
};
|
|
1528
|
-
}
|
|
1529
|
-
const [[operator, operatorValue] = []] = Object.entries(value);
|
|
1530
|
-
return processOperator(key, operator, operatorValue);
|
|
1531
|
-
}
|
|
1532
|
-
var processOperator = (key, operator, operatorValue) => {
|
|
1533
|
-
if (!operator.startsWith("$") || !FILTER_OPERATORS[operator]) {
|
|
1534
|
-
throw new Error(`Invalid operator: ${operator}`);
|
|
1535
|
-
}
|
|
1536
|
-
const operatorFn = FILTER_OPERATORS[operator];
|
|
1537
|
-
const operatorResult = operatorFn(key, operatorValue);
|
|
1538
|
-
if (!operatorResult.needsValue) {
|
|
1539
|
-
return { sql: operatorResult.sql, values: [] };
|
|
1540
|
-
}
|
|
1541
|
-
const transformed = operatorResult.transformValue ? operatorResult.transformValue(operatorValue) : operatorValue;
|
|
1542
|
-
if (transformed && typeof transformed === "object" && "sql" in transformed) {
|
|
1543
|
-
return transformed;
|
|
1544
|
-
}
|
|
1545
|
-
return {
|
|
1546
|
-
sql: operatorResult.sql,
|
|
1547
|
-
values: Array.isArray(transformed) ? transformed : [transformed]
|
|
1548
|
-
};
|
|
1549
|
-
};
|
|
1550
|
-
|
|
1551
|
-
// src/vector/libsql/index.ts
|
|
1552
|
-
var LibSQLVector = class extends MastraVector {
|
|
1553
|
-
turso;
|
|
1554
|
-
constructor({
|
|
1555
|
-
connectionUrl,
|
|
1556
|
-
authToken,
|
|
1557
|
-
syncUrl,
|
|
1558
|
-
syncInterval
|
|
1559
|
-
}) {
|
|
1560
|
-
super();
|
|
1561
|
-
this.turso = client.createClient({
|
|
1562
|
-
url: this.rewriteDbUrl(connectionUrl),
|
|
1563
|
-
syncUrl,
|
|
1564
|
-
authToken,
|
|
1565
|
-
syncInterval
|
|
1566
|
-
});
|
|
1567
|
-
}
|
|
1568
|
-
// If we're in the .mastra/output directory, use the dir outside .mastra dir
|
|
1569
|
-
// reason we need to do this is libsql relative file paths are based on cwd, not current file path
|
|
1570
|
-
// since mastra dev sets cwd to .mastra/output this means running an agent directly vs running with mastra dev
|
|
1571
|
-
// will put db files in different locations, leading to an inconsistent experience between the two.
|
|
1572
|
-
// Ex: with `file:ex.db`
|
|
1573
|
-
// 1. `mastra dev`: ${cwd}/.mastra/output/ex.db
|
|
1574
|
-
// 2. `tsx src/index.ts`: ${cwd}/ex.db
|
|
1575
|
-
// so if we're in .mastra/output we need to rewrite the file url to be relative to the project root dir
|
|
1576
|
-
// or the experience will be inconsistent
|
|
1577
|
-
// this means `file:` urls are always relative to project root
|
|
1578
|
-
// TODO: can we make this easier via bundling? https://github.com/mastra-ai/mastra/pull/2783#pullrequestreview-2662444241
|
|
1579
|
-
rewriteDbUrl(url) {
|
|
1580
|
-
if (url.startsWith("file:")) {
|
|
1581
|
-
const pathPart = url.slice("file:".length);
|
|
1582
|
-
if (path.isAbsolute(pathPart)) {
|
|
1583
|
-
return url;
|
|
1584
|
-
}
|
|
1585
|
-
const cwd = process.cwd();
|
|
1586
|
-
if (cwd.includes(".mastra") && (cwd.endsWith(`output`) || cwd.endsWith(`output/`) || cwd.endsWith(`output\\`))) {
|
|
1587
|
-
const baseDir = path.join(cwd, `..`, `..`);
|
|
1588
|
-
const fullPath = path.resolve(baseDir, pathPart);
|
|
1589
|
-
this.logger.debug(
|
|
1590
|
-
`Initializing LibSQL db with url ${url} with relative file path from inside .mastra/output directory. Rewriting relative file url to "file:${fullPath}". This ensures it's outside the .mastra/output directory.`
|
|
1591
|
-
);
|
|
1592
|
-
return `file:${fullPath}`;
|
|
1593
|
-
}
|
|
1594
|
-
}
|
|
1595
|
-
return url;
|
|
1596
|
-
}
|
|
1597
|
-
transformFilter(filter) {
|
|
1598
|
-
const translator = new LibSQLFilterTranslator();
|
|
1599
|
-
return translator.translate(filter);
|
|
1600
|
-
}
|
|
1601
|
-
async query(...args) {
|
|
1602
|
-
const params = this.normalizeArgs("query", args, ["minScore"]);
|
|
1603
|
-
try {
|
|
1604
|
-
const { indexName, queryVector, topK = 10, filter, includeVector = false, minScore = 0 } = params;
|
|
1605
|
-
const vectorStr = `[${queryVector.join(",")}]`;
|
|
1606
|
-
const translatedFilter = this.transformFilter(filter);
|
|
1607
|
-
const { sql: filterQuery, values: filterValues } = buildFilterQuery(translatedFilter);
|
|
1608
|
-
filterValues.push(minScore);
|
|
1609
|
-
const query = `
|
|
1610
|
-
WITH vector_scores AS (
|
|
1611
|
-
SELECT
|
|
1612
|
-
vector_id as id,
|
|
1613
|
-
(1-vector_distance_cos(embedding, '${vectorStr}')) as score,
|
|
1614
|
-
metadata
|
|
1615
|
-
${includeVector ? ", vector_extract(embedding) as embedding" : ""}
|
|
1616
|
-
FROM ${indexName}
|
|
1617
|
-
${filterQuery}
|
|
1618
|
-
)
|
|
1619
|
-
SELECT *
|
|
1620
|
-
FROM vector_scores
|
|
1621
|
-
WHERE score > ?
|
|
1622
|
-
ORDER BY score DESC
|
|
1623
|
-
LIMIT ${topK}`;
|
|
1624
|
-
const result = await this.turso.execute({
|
|
1625
|
-
sql: query,
|
|
1626
|
-
args: filterValues
|
|
1627
|
-
});
|
|
1628
|
-
return result.rows.map(({ id, score, metadata, embedding }) => ({
|
|
1629
|
-
id,
|
|
1630
|
-
score,
|
|
1631
|
-
metadata: JSON.parse(metadata ?? "{}"),
|
|
1632
|
-
...includeVector && embedding && { vector: JSON.parse(embedding) }
|
|
1633
|
-
}));
|
|
1634
|
-
} finally {
|
|
1635
|
-
}
|
|
1636
|
-
}
|
|
1637
|
-
async upsert(...args) {
|
|
1638
|
-
const params = this.normalizeArgs("upsert", args);
|
|
1639
|
-
const { indexName, vectors, metadata, ids } = params;
|
|
1640
|
-
const tx = await this.turso.transaction("write");
|
|
1641
|
-
try {
|
|
1642
|
-
const vectorIds = ids || vectors.map(() => crypto.randomUUID());
|
|
1643
|
-
for (let i = 0; i < vectors.length; i++) {
|
|
1644
|
-
const query = `
|
|
1645
|
-
INSERT INTO ${indexName} (vector_id, embedding, metadata)
|
|
1646
|
-
VALUES (?, vector32(?), ?)
|
|
1647
|
-
ON CONFLICT(vector_id) DO UPDATE SET
|
|
1648
|
-
embedding = vector32(?),
|
|
1649
|
-
metadata = ?
|
|
1650
|
-
`;
|
|
1651
|
-
await tx.execute({
|
|
1652
|
-
sql: query,
|
|
1653
|
-
// @ts-ignore
|
|
1654
|
-
args: [
|
|
1655
|
-
vectorIds[i],
|
|
1656
|
-
JSON.stringify(vectors[i]),
|
|
1657
|
-
JSON.stringify(metadata?.[i] || {}),
|
|
1658
|
-
JSON.stringify(vectors[i]),
|
|
1659
|
-
JSON.stringify(metadata?.[i] || {})
|
|
1660
|
-
]
|
|
1661
|
-
});
|
|
1662
|
-
}
|
|
1663
|
-
await tx.commit();
|
|
1664
|
-
return vectorIds;
|
|
1665
|
-
} catch (error) {
|
|
1666
|
-
await tx.rollback();
|
|
1667
|
-
throw error;
|
|
1668
|
-
}
|
|
1669
|
-
}
|
|
1670
|
-
async createIndex(...args) {
|
|
1671
|
-
const params = this.normalizeArgs("createIndex", args);
|
|
1672
|
-
const { indexName, dimension } = params;
|
|
1673
|
-
try {
|
|
1674
|
-
if (!indexName.match(/^[a-zA-Z_][a-zA-Z0-9_]*$/)) {
|
|
1675
|
-
throw new Error("Invalid index name format");
|
|
1676
|
-
}
|
|
1677
|
-
if (!Number.isInteger(dimension) || dimension <= 0) {
|
|
1678
|
-
throw new Error("Dimension must be a positive integer");
|
|
1679
|
-
}
|
|
1680
|
-
await this.turso.execute({
|
|
1681
|
-
sql: `
|
|
1682
|
-
CREATE TABLE IF NOT EXISTS ${indexName} (
|
|
1683
|
-
id SERIAL PRIMARY KEY,
|
|
1684
|
-
vector_id TEXT UNIQUE NOT NULL,
|
|
1685
|
-
embedding F32_BLOB(${dimension}),
|
|
1686
|
-
metadata TEXT DEFAULT '{}'
|
|
1687
|
-
);
|
|
1688
|
-
`,
|
|
1689
|
-
args: []
|
|
1690
|
-
});
|
|
1691
|
-
await this.turso.execute({
|
|
1692
|
-
sql: `
|
|
1693
|
-
CREATE INDEX IF NOT EXISTS ${indexName}_vector_idx
|
|
1694
|
-
ON ${indexName} (libsql_vector_idx(embedding))
|
|
1695
|
-
`,
|
|
1696
|
-
args: []
|
|
1697
|
-
});
|
|
1698
|
-
} catch (error) {
|
|
1699
|
-
console.error("Failed to create vector table:", error);
|
|
1700
|
-
throw error;
|
|
1701
|
-
} finally {
|
|
1702
|
-
}
|
|
1703
|
-
}
|
|
1704
|
-
async deleteIndex(indexName) {
|
|
1705
|
-
try {
|
|
1706
|
-
await this.turso.execute({
|
|
1707
|
-
sql: `DROP TABLE IF EXISTS ${indexName}`,
|
|
1708
|
-
args: []
|
|
1709
|
-
});
|
|
1710
|
-
} catch (error) {
|
|
1711
|
-
console.error("Failed to delete vector table:", error);
|
|
1712
|
-
throw new Error(`Failed to delete vector table: ${error.message}`);
|
|
1713
|
-
} finally {
|
|
1714
|
-
}
|
|
1715
|
-
}
|
|
1716
|
-
async listIndexes() {
|
|
1717
|
-
try {
|
|
1718
|
-
const vectorTablesQuery = `
|
|
1719
|
-
SELECT name FROM sqlite_master
|
|
1720
|
-
WHERE type='table'
|
|
1721
|
-
AND sql LIKE '%F32_BLOB%';
|
|
1722
|
-
`;
|
|
1723
|
-
const result = await this.turso.execute({
|
|
1724
|
-
sql: vectorTablesQuery,
|
|
1725
|
-
args: []
|
|
1726
|
-
});
|
|
1727
|
-
return result.rows.map((row) => row.name);
|
|
1728
|
-
} catch (error) {
|
|
1729
|
-
throw new Error(`Failed to list vector tables: ${error.message}`);
|
|
1730
|
-
}
|
|
1731
|
-
}
|
|
1732
|
-
async describeIndex(indexName) {
|
|
1733
|
-
try {
|
|
1734
|
-
const tableInfoQuery = `
|
|
1735
|
-
SELECT sql
|
|
1736
|
-
FROM sqlite_master
|
|
1737
|
-
WHERE type='table'
|
|
1738
|
-
AND name = ?;
|
|
1739
|
-
`;
|
|
1740
|
-
const tableInfo = await this.turso.execute({
|
|
1741
|
-
sql: tableInfoQuery,
|
|
1742
|
-
args: [indexName]
|
|
1743
|
-
});
|
|
1744
|
-
if (!tableInfo.rows[0]?.sql) {
|
|
1745
|
-
throw new Error(`Table ${indexName} not found`);
|
|
1746
|
-
}
|
|
1747
|
-
const dimension = parseInt(tableInfo.rows[0].sql.match(/F32_BLOB\((\d+)\)/)?.[1] || "0");
|
|
1748
|
-
const countQuery = `
|
|
1749
|
-
SELECT COUNT(*) as count
|
|
1750
|
-
FROM ${indexName};
|
|
1751
|
-
`;
|
|
1752
|
-
const countResult = await this.turso.execute({
|
|
1753
|
-
sql: countQuery,
|
|
1754
|
-
args: []
|
|
1755
|
-
});
|
|
1756
|
-
const metric = "cosine";
|
|
1757
|
-
return {
|
|
1758
|
-
dimension,
|
|
1759
|
-
count: countResult?.rows?.[0]?.count ?? 0,
|
|
1760
|
-
metric
|
|
1761
|
-
};
|
|
1762
|
-
} catch (e) {
|
|
1763
|
-
throw new Error(`Failed to describe vector table: ${e.message}`);
|
|
1764
|
-
}
|
|
1765
|
-
}
|
|
1766
|
-
/**
|
|
1767
|
-
* Updates an index entry by its ID with the provided vector and/or metadata.
|
|
1768
|
-
*
|
|
1769
|
-
* @param indexName - The name of the index to update.
|
|
1770
|
-
* @param id - The ID of the index entry to update.
|
|
1771
|
-
* @param update - An object containing the vector and/or metadata to update.
|
|
1772
|
-
* @param update.vector - An optional array of numbers representing the new vector.
|
|
1773
|
-
* @param update.metadata - An optional record containing the new metadata.
|
|
1774
|
-
* @returns A promise that resolves when the update is complete.
|
|
1775
|
-
* @throws Will throw an error if no updates are provided or if the update operation fails.
|
|
1776
|
-
*/
|
|
1777
|
-
async updateIndexById(indexName, id, update) {
|
|
1778
|
-
try {
|
|
1779
|
-
const updates = [];
|
|
1780
|
-
const args = [];
|
|
1781
|
-
if (update.vector) {
|
|
1782
|
-
updates.push("embedding = vector32(?)");
|
|
1783
|
-
args.push(JSON.stringify(update.vector));
|
|
1784
|
-
}
|
|
1785
|
-
if (update.metadata) {
|
|
1786
|
-
updates.push("metadata = ?");
|
|
1787
|
-
args.push(JSON.stringify(update.metadata));
|
|
1788
|
-
}
|
|
1789
|
-
if (updates.length === 0) {
|
|
1790
|
-
throw new Error("No updates provided");
|
|
1791
|
-
}
|
|
1792
|
-
args.push(id);
|
|
1793
|
-
const query = `
|
|
1794
|
-
UPDATE ${indexName}
|
|
1795
|
-
SET ${updates.join(", ")}
|
|
1796
|
-
WHERE vector_id = ?;
|
|
1797
|
-
`;
|
|
1798
|
-
await this.turso.execute({
|
|
1799
|
-
sql: query,
|
|
1800
|
-
args
|
|
1801
|
-
});
|
|
1802
|
-
} catch (error) {
|
|
1803
|
-
throw new Error(`Failed to update index by id: ${id} for index: ${indexName}: ${error.message}`);
|
|
1804
|
-
}
|
|
1805
|
-
}
|
|
1806
|
-
async deleteIndexById(indexName, id) {
|
|
1807
|
-
try {
|
|
1808
|
-
await this.turso.execute({
|
|
1809
|
-
sql: `DELETE FROM ${indexName} WHERE vector_id = ?`,
|
|
1810
|
-
args: [id]
|
|
1811
|
-
});
|
|
1812
|
-
} catch (error) {
|
|
1813
|
-
throw new Error(`Failed to delete index by id: ${id} for index: ${indexName}: ${error.message}`);
|
|
1814
|
-
}
|
|
1815
|
-
}
|
|
1816
|
-
async truncateIndex(indexName) {
|
|
1817
|
-
await this.turso.execute({
|
|
1818
|
-
sql: `DELETE FROM ${indexName}`,
|
|
1819
|
-
args: []
|
|
1820
|
-
});
|
|
1821
|
-
}
|
|
1822
|
-
};
|
|
1823
|
-
|
|
1824
|
-
// src/memory/memory.ts
|
|
1825
|
-
var MastraMemory = class extends MastraBase {
|
|
1826
|
-
MAX_CONTEXT_TOKENS;
|
|
1827
|
-
storage;
|
|
1828
|
-
vector;
|
|
1829
|
-
embedder;
|
|
1830
|
-
threadConfig = {
|
|
1831
|
-
lastMessages: 40,
|
|
1832
|
-
semanticRecall: true,
|
|
1833
|
-
threads: {
|
|
1834
|
-
generateTitle: true
|
|
1835
|
-
// TODO: should we disable this by default to reduce latency?
|
|
1836
|
-
}
|
|
1837
|
-
};
|
|
1838
|
-
constructor(config) {
|
|
1839
|
-
super({ component: "MEMORY", name: config.name });
|
|
1840
|
-
this.storage = config.storage || new LibSQLStore({
|
|
1841
|
-
config: {
|
|
1842
|
-
url: "file:memory.db"
|
|
1843
|
-
}
|
|
1844
|
-
});
|
|
1845
|
-
if (config.vector) {
|
|
1846
|
-
this.vector = config.vector;
|
|
1847
|
-
} else {
|
|
1848
|
-
const oldDb = "memory-vector.db";
|
|
1849
|
-
const hasOldDb = fs.existsSync(path.join(process.cwd(), oldDb)) || fs.existsSync(path.join(process.cwd(), ".mastra", oldDb));
|
|
1850
|
-
const newDb = "memory.db";
|
|
1851
|
-
if (hasOldDb) {
|
|
1852
|
-
this.logger.warn(
|
|
1853
|
-
`Found deprecated Memory vector db file ${oldDb} this db is now merged with the default ${newDb} file. Delete the old one to use the new one. You will need to migrate any data if that's important to you. For now the deprecated path will be used but in a future breaking change we will only use the new db file path.`
|
|
1854
|
-
);
|
|
1855
|
-
}
|
|
1856
|
-
this.vector = new LibSQLVector({
|
|
1857
|
-
connectionUrl: hasOldDb ? `file:${oldDb}` : `file:${newDb}`
|
|
1858
|
-
});
|
|
1859
|
-
}
|
|
1860
|
-
if (config.embedder) {
|
|
1861
|
-
this.embedder = config.embedder;
|
|
1862
|
-
} else {
|
|
1863
|
-
this.embedder = defaultEmbedder("bge-small-en-v1.5");
|
|
1864
|
-
}
|
|
1865
|
-
if (config.options) {
|
|
1866
|
-
this.threadConfig = this.getMergedThreadConfig(config.options);
|
|
1867
|
-
}
|
|
1868
|
-
}
|
|
1869
|
-
setStorage(storage) {
|
|
1870
|
-
this.storage = storage;
|
|
1871
|
-
}
|
|
1872
|
-
setVector(vector) {
|
|
1873
|
-
this.vector = vector;
|
|
1874
|
-
}
|
|
1875
|
-
setEmbedder(embedder) {
|
|
1876
|
-
this.embedder = embedder;
|
|
1877
|
-
}
|
|
1878
|
-
/**
|
|
1879
|
-
* Get a system message to inject into the conversation.
|
|
1880
|
-
* This will be called before each conversation turn.
|
|
1881
|
-
* Implementations can override this to inject custom system messages.
|
|
1882
|
-
*/
|
|
1883
|
-
async getSystemMessage(_input) {
|
|
1884
|
-
return null;
|
|
1885
|
-
}
|
|
1886
|
-
/**
|
|
1887
|
-
* Get tools that should be available to the agent.
|
|
1888
|
-
* This will be called when converting tools for the agent.
|
|
1889
|
-
* Implementations can override this to provide additional tools.
|
|
1890
|
-
*/
|
|
1891
|
-
getTools(_config) {
|
|
1892
|
-
return {};
|
|
1893
|
-
}
|
|
1894
|
-
async createEmbeddingIndex() {
|
|
1895
|
-
const defaultDimensions = 1536;
|
|
1896
|
-
const dimensionsByModelId = {
|
|
1897
|
-
"bge-small-en-v1.5": 384,
|
|
1898
|
-
"bge-base-en-v1.5": 768
|
|
1899
|
-
};
|
|
1900
|
-
const dimensions = dimensionsByModelId[this.embedder.modelId] || defaultDimensions;
|
|
1901
|
-
const isDefault = dimensions === defaultDimensions;
|
|
1902
|
-
const indexName = isDefault ? "memory_messages" : `memory_messages_${dimensions}`;
|
|
1903
|
-
await this.vector.createIndex({ indexName, dimension: dimensions });
|
|
1904
|
-
return { indexName };
|
|
1905
|
-
}
|
|
1906
|
-
getMergedThreadConfig(config) {
|
|
1907
|
-
return deepMerge(this.threadConfig, config || {});
|
|
1908
|
-
}
|
|
1909
|
-
estimateTokens(text) {
|
|
1910
|
-
return Math.ceil(text.split(" ").length * 1.3);
|
|
1911
|
-
}
|
|
1912
|
-
parseMessages(messages) {
|
|
1913
|
-
return messages.map((msg) => ({
|
|
1914
|
-
...msg,
|
|
1915
|
-
content: typeof msg.content === "string" && (msg.content.startsWith("[") || msg.content.startsWith("{")) ? JSON.parse(msg.content) : typeof msg.content === "number" ? String(msg.content) : msg.content
|
|
1916
|
-
}));
|
|
1917
|
-
}
|
|
1918
|
-
convertToUIMessages(messages) {
|
|
1919
|
-
function addToolMessageToChat({
|
|
1920
|
-
toolMessage,
|
|
1921
|
-
messages: messages2,
|
|
1922
|
-
toolResultContents
|
|
1923
|
-
}) {
|
|
1924
|
-
const chatMessages2 = messages2.map((message) => {
|
|
1925
|
-
if (message.toolInvocations) {
|
|
1926
|
-
return {
|
|
1927
|
-
...message,
|
|
1928
|
-
toolInvocations: message.toolInvocations.map((toolInvocation) => {
|
|
1929
|
-
const toolResult = toolMessage.content.find((tool) => tool.toolCallId === toolInvocation.toolCallId);
|
|
1930
|
-
if (toolResult) {
|
|
1931
|
-
return {
|
|
1932
|
-
...toolInvocation,
|
|
1933
|
-
state: "result",
|
|
1934
|
-
result: toolResult.result
|
|
1935
|
-
};
|
|
1936
|
-
}
|
|
1937
|
-
return toolInvocation;
|
|
1938
|
-
})
|
|
1939
|
-
};
|
|
1940
|
-
}
|
|
1941
|
-
return message;
|
|
1942
|
-
});
|
|
1943
|
-
const resultContents = [...toolResultContents, ...toolMessage.content];
|
|
1944
|
-
return { chatMessages: chatMessages2, toolResultContents: resultContents };
|
|
1945
|
-
}
|
|
1946
|
-
const { chatMessages } = messages.reduce(
|
|
1947
|
-
(obj, message) => {
|
|
1948
|
-
if (message.role === "tool") {
|
|
1949
|
-
return addToolMessageToChat({
|
|
1950
|
-
toolMessage: message,
|
|
1951
|
-
messages: obj.chatMessages,
|
|
1952
|
-
toolResultContents: obj.toolResultContents
|
|
1953
|
-
});
|
|
1954
|
-
}
|
|
1955
|
-
let textContent = "";
|
|
1956
|
-
let toolInvocations = [];
|
|
1957
|
-
if (typeof message.content === "string") {
|
|
1958
|
-
textContent = message.content;
|
|
1959
|
-
} else if (typeof message.content === "number") {
|
|
1960
|
-
textContent = String(message.content);
|
|
1961
|
-
} else if (Array.isArray(message.content)) {
|
|
1962
|
-
for (const content of message.content) {
|
|
1963
|
-
if (content.type === "text") {
|
|
1964
|
-
textContent += content.text;
|
|
1965
|
-
} else if (content.type === "tool-call") {
|
|
1966
|
-
const toolResult = obj.toolResultContents.find((tool) => tool.toolCallId === content.toolCallId);
|
|
1967
|
-
toolInvocations.push({
|
|
1968
|
-
state: toolResult ? "result" : "call",
|
|
1969
|
-
toolCallId: content.toolCallId,
|
|
1970
|
-
toolName: content.toolName,
|
|
1971
|
-
args: content.args,
|
|
1972
|
-
result: toolResult?.result
|
|
1973
|
-
});
|
|
1974
|
-
}
|
|
1975
|
-
}
|
|
1976
|
-
}
|
|
1977
|
-
obj.chatMessages.push({
|
|
1978
|
-
id: message.id,
|
|
1979
|
-
role: message.role,
|
|
1980
|
-
content: textContent,
|
|
1981
|
-
toolInvocations
|
|
1982
|
-
});
|
|
1983
|
-
return obj;
|
|
1984
|
-
},
|
|
1985
|
-
{ chatMessages: [], toolResultContents: [] }
|
|
1986
|
-
);
|
|
1987
|
-
return chatMessages;
|
|
1988
|
-
}
|
|
1989
|
-
/**
|
|
1990
|
-
* Helper method to create a new thread
|
|
1991
|
-
* @param title - Optional title for the thread
|
|
1992
|
-
* @param metadata - Optional metadata for the thread
|
|
1993
|
-
* @returns Promise resolving to the created thread
|
|
1994
|
-
*/
|
|
1995
|
-
async createThread({
|
|
1996
|
-
threadId,
|
|
1997
|
-
resourceId,
|
|
1998
|
-
title,
|
|
1999
|
-
metadata,
|
|
2000
|
-
memoryConfig
|
|
2001
|
-
}) {
|
|
2002
|
-
const thread = {
|
|
2003
|
-
id: threadId || this.generateId(),
|
|
2004
|
-
title: title || `New Thread ${(/* @__PURE__ */ new Date()).toISOString()}`,
|
|
2005
|
-
resourceId,
|
|
2006
|
-
createdAt: /* @__PURE__ */ new Date(),
|
|
2007
|
-
updatedAt: /* @__PURE__ */ new Date(),
|
|
2008
|
-
metadata
|
|
2009
|
-
};
|
|
2010
|
-
return this.saveThread({ thread, memoryConfig });
|
|
2011
|
-
}
|
|
2012
|
-
/**
|
|
2013
|
-
* Helper method to add a single message to a thread
|
|
2014
|
-
* @param threadId - The thread to add the message to
|
|
2015
|
-
* @param content - The message content
|
|
2016
|
-
* @param role - The role of the message sender
|
|
2017
|
-
* @param type - The type of the message
|
|
2018
|
-
* @param toolNames - Optional array of tool names that were called
|
|
2019
|
-
* @param toolCallArgs - Optional array of tool call arguments
|
|
2020
|
-
* @param toolCallIds - Optional array of tool call ids
|
|
2021
|
-
* @returns Promise resolving to the saved message
|
|
2022
|
-
*/
|
|
2023
|
-
async addMessage({
|
|
2024
|
-
threadId,
|
|
2025
|
-
config,
|
|
2026
|
-
content,
|
|
2027
|
-
role,
|
|
2028
|
-
type,
|
|
2029
|
-
toolNames,
|
|
2030
|
-
toolCallArgs,
|
|
2031
|
-
toolCallIds
|
|
2032
|
-
}) {
|
|
2033
|
-
const message = {
|
|
2034
|
-
id: this.generateId(),
|
|
2035
|
-
content,
|
|
2036
|
-
role,
|
|
2037
|
-
createdAt: /* @__PURE__ */ new Date(),
|
|
2038
|
-
threadId,
|
|
2039
|
-
type,
|
|
2040
|
-
toolNames,
|
|
2041
|
-
toolCallArgs,
|
|
2042
|
-
toolCallIds
|
|
2043
|
-
};
|
|
2044
|
-
const savedMessages = await this.saveMessages({ messages: [message], memoryConfig: config });
|
|
2045
|
-
return savedMessages[0];
|
|
2046
|
-
}
|
|
2047
|
-
/**
|
|
2048
|
-
* Generates a unique identifier
|
|
2049
|
-
* @returns A unique string ID
|
|
2050
|
-
*/
|
|
2051
|
-
generateId() {
|
|
2052
|
-
return crypto.randomUUID();
|
|
2053
|
-
}
|
|
2054
|
-
};
|
|
2055
|
-
|
|
2056
|
-
exports.MastraMemory = MastraMemory;
|