@mastra/core 0.9.0 → 0.9.1-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agent/index.cjs +2 -2
- package/dist/agent/index.d.cts +4 -2
- package/dist/agent/index.d.ts +4 -2
- package/dist/agent/index.js +1 -1
- package/dist/{base-oP3DoUrm.d.cts → base-CZmT-p10.d.cts} +524 -53
- package/dist/{base-Bk5V1doj.d.ts → base-DDoWFpFc.d.ts} +524 -53
- package/dist/{chunk-NTHHPNOW.js → chunk-34622N67.js} +1 -1
- package/dist/{chunk-ATXF6TIO.cjs → chunk-422BXQRT.cjs} +24 -677
- package/dist/{chunk-HARYMLZH.js → chunk-4K4DHXRF.js} +6 -658
- package/dist/{chunk-N2ONCUUS.js → chunk-6AHC67YY.js} +132 -12
- package/dist/chunk-7AGAXO6B.cjs +588 -0
- package/dist/{chunk-GEIPVIW4.js → chunk-BFF2O6RO.js} +1 -1
- package/dist/{chunk-RASVJ3TR.js → chunk-BU45BMXY.js} +55 -1
- package/dist/{chunk-DIZZQ3E5.cjs → chunk-CEXM6WP5.cjs} +2 -2
- package/dist/chunk-HNEE7IF4.js +60 -0
- package/dist/{chunk-NZDXKMDP.cjs → chunk-LABUWBKX.cjs} +1 -1
- package/dist/chunk-MUNFCOMB.cjs +62 -0
- package/dist/{chunk-GJWCFDFN.js → chunk-PS2ZF6MA.js} +1 -1
- package/dist/{chunk-M472GIT6.js → chunk-SGGPJWRQ.js} +1 -1
- package/dist/{chunk-7CSNWYGJ.cjs → chunk-U5DGGGS2.cjs} +2 -2
- package/dist/{chunk-W5IA5OGL.cjs → chunk-V7IE36YV.cjs} +58 -4
- package/dist/chunk-VMVXIPGW.js +586 -0
- package/dist/{chunk-53P5ZBJE.cjs → chunk-YTTBFAQJ.cjs} +2 -2
- package/dist/{chunk-KZWBYRXW.cjs → chunk-YZFNMXY4.cjs} +135 -14
- package/dist/di/index.cjs +2 -2
- package/dist/di/index.d.cts +1 -52
- package/dist/di/index.d.ts +1 -52
- package/dist/di/index.js +1 -1
- package/dist/eval/index.d.cts +4 -2
- package/dist/eval/index.d.ts +4 -2
- package/dist/index.cjs +60 -55
- package/dist/index.d.cts +5 -3
- package/dist/index.d.ts +5 -3
- package/dist/index.js +10 -9
- package/dist/integration/index.cjs +3 -3
- package/dist/integration/index.d.cts +4 -2
- package/dist/integration/index.d.ts +4 -2
- package/dist/integration/index.js +1 -1
- package/dist/llm/index.cjs +8 -0
- package/dist/llm/index.d.cts +4 -2
- package/dist/llm/index.d.ts +4 -2
- package/dist/llm/index.js +1 -1
- package/dist/mastra/index.cjs +2 -2
- package/dist/mastra/index.d.cts +4 -2
- package/dist/mastra/index.d.ts +4 -2
- package/dist/mastra/index.js +1 -1
- package/dist/memory/index.cjs +7 -3
- package/dist/memory/index.d.cts +4 -2
- package/dist/memory/index.d.ts +4 -2
- package/dist/memory/index.js +1 -1
- package/dist/network/index.cjs +4 -4
- package/dist/network/index.d.cts +4 -2
- package/dist/network/index.d.ts +4 -2
- package/dist/network/index.js +2 -2
- package/dist/relevance/index.cjs +4 -4
- package/dist/relevance/index.d.cts +4 -2
- package/dist/relevance/index.d.ts +4 -2
- package/dist/relevance/index.js +1 -1
- package/dist/runtime-context/index.cjs +7 -68
- package/dist/runtime-context/index.js +1 -69
- package/dist/server/index.d.cts +4 -2
- package/dist/server/index.d.ts +4 -2
- package/dist/storage/index.d.cts +4 -2
- package/dist/storage/index.d.ts +4 -2
- package/dist/storage/libsql/index.cjs +19 -0
- package/dist/storage/libsql/index.d.cts +7 -8
- package/dist/storage/libsql/index.d.ts +7 -8
- package/dist/storage/libsql/index.js +19 -0
- package/dist/telemetry/index.d.cts +4 -2
- package/dist/telemetry/index.d.ts +4 -2
- package/dist/tools/index.cjs +4 -4
- package/dist/tools/index.d.cts +4 -2
- package/dist/tools/index.d.ts +4 -2
- package/dist/tools/index.js +1 -1
- package/dist/utils.cjs +14 -14
- package/dist/utils.d.cts +6 -4
- package/dist/utils.d.ts +6 -4
- package/dist/utils.js +1 -1
- package/dist/voice/index.d.cts +5 -3
- package/dist/voice/index.d.ts +5 -3
- package/dist/workflows/index.cjs +22 -26
- package/dist/workflows/index.d.cts +5 -3
- package/dist/workflows/index.d.ts +5 -3
- package/dist/workflows/index.js +1 -1
- package/dist/workflows/vNext/index.cjs +1058 -0
- package/dist/workflows/vNext/index.d.cts +180 -0
- package/dist/workflows/vNext/index.d.ts +180 -0
- package/dist/workflows/vNext/index.js +1046 -0
- package/package.json +11 -1
- package/workflows/vNext.d.ts +1 -0
- /package/dist/{chunk-WEYWYKLG.cjs → chunk-27PAET7X.cjs} +0 -0
- /package/dist/{chunk-FRQFWZDN.cjs → chunk-HSVOEWAM.cjs} +0 -0
- /package/dist/{chunk-ZDWFBE5L.js → chunk-NH5WJNNS.js} +0 -0
- /package/dist/{chunk-LANFNMEE.js → chunk-SGTFVHOZ.js} +0 -0
|
@@ -0,0 +1,586 @@
|
|
|
1
|
+
import { createMastraProxy, makeCoreTool, delay } from './chunk-34622N67.js';
|
|
2
|
+
import { MastraBase } from './chunk-CLJQYXNM.js';
|
|
3
|
+
import { RegisteredLogger } from './chunk-2BVZNKLX.js';
|
|
4
|
+
import { jsonSchema, generateText, Output, generateObject, streamText, streamObject } from 'ai';
|
|
5
|
+
import { z } from 'zod';
|
|
6
|
+
|
|
7
|
+
// src/llm/model/base.ts
|
|
8
|
+
var MastraLLMBase = class extends MastraBase {
|
|
9
|
+
// @ts-ignore
|
|
10
|
+
#mastra;
|
|
11
|
+
#model;
|
|
12
|
+
constructor({ name, model }) {
|
|
13
|
+
super({
|
|
14
|
+
component: RegisteredLogger.LLM,
|
|
15
|
+
name
|
|
16
|
+
});
|
|
17
|
+
this.#model = model;
|
|
18
|
+
}
|
|
19
|
+
getProvider() {
|
|
20
|
+
return this.#model.provider;
|
|
21
|
+
}
|
|
22
|
+
getModelId() {
|
|
23
|
+
return this.#model.modelId;
|
|
24
|
+
}
|
|
25
|
+
getModel() {
|
|
26
|
+
return this.#model;
|
|
27
|
+
}
|
|
28
|
+
convertToMessages(messages) {
|
|
29
|
+
if (Array.isArray(messages)) {
|
|
30
|
+
return messages.map((m) => {
|
|
31
|
+
if (typeof m === "string") {
|
|
32
|
+
return {
|
|
33
|
+
role: "user",
|
|
34
|
+
content: m
|
|
35
|
+
};
|
|
36
|
+
}
|
|
37
|
+
return m;
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
return [
|
|
41
|
+
{
|
|
42
|
+
role: "user",
|
|
43
|
+
content: messages
|
|
44
|
+
}
|
|
45
|
+
];
|
|
46
|
+
}
|
|
47
|
+
__registerPrimitives(p) {
|
|
48
|
+
if (p.telemetry) {
|
|
49
|
+
this.__setTelemetry(p.telemetry);
|
|
50
|
+
}
|
|
51
|
+
if (p.logger) {
|
|
52
|
+
this.__setLogger(p.logger);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
__registerMastra(p) {
|
|
56
|
+
this.#mastra = p;
|
|
57
|
+
}
|
|
58
|
+
async __text(input) {
|
|
59
|
+
this.logger.debug(`[LLMs:${this.name}] Generating text.`, { input });
|
|
60
|
+
throw new Error("Method not implemented.");
|
|
61
|
+
}
|
|
62
|
+
async __textObject(input) {
|
|
63
|
+
this.logger.debug(`[LLMs:${this.name}] Generating object.`, { input });
|
|
64
|
+
throw new Error("Method not implemented.");
|
|
65
|
+
}
|
|
66
|
+
async generate(messages, options) {
|
|
67
|
+
this.logger.debug(`[LLMs:${this.name}] Generating text.`, { messages, options });
|
|
68
|
+
throw new Error("Method not implemented.");
|
|
69
|
+
}
|
|
70
|
+
async __stream(input) {
|
|
71
|
+
this.logger.debug(`[LLMs:${this.name}] Streaming text.`, { input });
|
|
72
|
+
throw new Error("Method not implemented.");
|
|
73
|
+
}
|
|
74
|
+
async __streamObject(input) {
|
|
75
|
+
this.logger.debug(`[LLMs:${this.name}] Streaming object.`, { input });
|
|
76
|
+
throw new Error("Method not implemented.");
|
|
77
|
+
}
|
|
78
|
+
async stream(messages, options) {
|
|
79
|
+
this.logger.debug(`[LLMs:${this.name}] Streaming text.`, { messages, options });
|
|
80
|
+
throw new Error("Method not implemented.");
|
|
81
|
+
}
|
|
82
|
+
};
|
|
83
|
+
|
|
84
|
+
// src/llm/model/model.ts
|
|
85
|
+
var MastraLLM = class extends MastraLLMBase {
|
|
86
|
+
#model;
|
|
87
|
+
#mastra;
|
|
88
|
+
constructor({ model, mastra }) {
|
|
89
|
+
super({ name: "aisdk", model });
|
|
90
|
+
this.#model = model;
|
|
91
|
+
if (mastra) {
|
|
92
|
+
this.#mastra = mastra;
|
|
93
|
+
if (mastra.getLogger()) {
|
|
94
|
+
this.__setLogger(mastra.getLogger());
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
__registerPrimitives(p) {
|
|
99
|
+
if (p.telemetry) {
|
|
100
|
+
this.__setTelemetry(p.telemetry);
|
|
101
|
+
}
|
|
102
|
+
if (p.logger) {
|
|
103
|
+
this.__setLogger(p.logger);
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
__registerMastra(p) {
|
|
107
|
+
this.#mastra = p;
|
|
108
|
+
}
|
|
109
|
+
getProvider() {
|
|
110
|
+
return this.#model.provider;
|
|
111
|
+
}
|
|
112
|
+
getModelId() {
|
|
113
|
+
return this.#model.modelId;
|
|
114
|
+
}
|
|
115
|
+
getModel() {
|
|
116
|
+
return this.#model;
|
|
117
|
+
}
|
|
118
|
+
convertTools({
|
|
119
|
+
tools,
|
|
120
|
+
runId,
|
|
121
|
+
threadId,
|
|
122
|
+
resourceId,
|
|
123
|
+
memory,
|
|
124
|
+
runtimeContext
|
|
125
|
+
}) {
|
|
126
|
+
this.logger.debug("Starting tool conversion for LLM");
|
|
127
|
+
let mastraProxy = void 0;
|
|
128
|
+
const logger = this.logger;
|
|
129
|
+
if (this.#mastra) {
|
|
130
|
+
mastraProxy = createMastraProxy({ mastra: this.#mastra, logger });
|
|
131
|
+
}
|
|
132
|
+
const converted = Object.entries(tools || {}).reduce(
|
|
133
|
+
(memo, value) => {
|
|
134
|
+
const k = value[0];
|
|
135
|
+
const tool = value[1];
|
|
136
|
+
if (tool) {
|
|
137
|
+
const options = {
|
|
138
|
+
name: k,
|
|
139
|
+
runId,
|
|
140
|
+
threadId,
|
|
141
|
+
resourceId,
|
|
142
|
+
logger: this.logger,
|
|
143
|
+
memory,
|
|
144
|
+
mastra: mastraProxy,
|
|
145
|
+
runtimeContext
|
|
146
|
+
};
|
|
147
|
+
memo[k] = makeCoreTool(tool, options);
|
|
148
|
+
}
|
|
149
|
+
return memo;
|
|
150
|
+
},
|
|
151
|
+
{}
|
|
152
|
+
);
|
|
153
|
+
this.logger.debug(`Converted tools for LLM`);
|
|
154
|
+
return converted;
|
|
155
|
+
}
|
|
156
|
+
async __text({
|
|
157
|
+
runId,
|
|
158
|
+
messages,
|
|
159
|
+
maxSteps = 5,
|
|
160
|
+
tools,
|
|
161
|
+
convertedTools,
|
|
162
|
+
temperature,
|
|
163
|
+
toolChoice = "auto",
|
|
164
|
+
onStepFinish,
|
|
165
|
+
experimental_output,
|
|
166
|
+
telemetry,
|
|
167
|
+
threadId,
|
|
168
|
+
resourceId,
|
|
169
|
+
memory,
|
|
170
|
+
runtimeContext,
|
|
171
|
+
...rest
|
|
172
|
+
}) {
|
|
173
|
+
const model = this.#model;
|
|
174
|
+
this.logger.debug(`[LLM] - Generating text`, {
|
|
175
|
+
runId,
|
|
176
|
+
messages,
|
|
177
|
+
maxSteps,
|
|
178
|
+
threadId,
|
|
179
|
+
resourceId,
|
|
180
|
+
tools: Object.keys(tools || convertedTools || {})
|
|
181
|
+
});
|
|
182
|
+
const finalTools = convertedTools || this.convertTools({ tools, runId, threadId, resourceId, memory, runtimeContext });
|
|
183
|
+
const argsForExecute = {
|
|
184
|
+
model,
|
|
185
|
+
temperature,
|
|
186
|
+
tools: {
|
|
187
|
+
...finalTools
|
|
188
|
+
},
|
|
189
|
+
toolChoice,
|
|
190
|
+
maxSteps,
|
|
191
|
+
onStepFinish: async (props) => {
|
|
192
|
+
void onStepFinish?.(props);
|
|
193
|
+
this.logger.debug("[LLM] - Step Change:", {
|
|
194
|
+
text: props?.text,
|
|
195
|
+
toolCalls: props?.toolCalls,
|
|
196
|
+
toolResults: props?.toolResults,
|
|
197
|
+
finishReason: props?.finishReason,
|
|
198
|
+
usage: props?.usage,
|
|
199
|
+
runId
|
|
200
|
+
});
|
|
201
|
+
if (props?.response?.headers?.["x-ratelimit-remaining-tokens"] && parseInt(props?.response?.headers?.["x-ratelimit-remaining-tokens"], 10) < 2e3) {
|
|
202
|
+
this.logger.warn("Rate limit approaching, waiting 10 seconds", { runId });
|
|
203
|
+
await delay(10 * 1e3);
|
|
204
|
+
}
|
|
205
|
+
},
|
|
206
|
+
...rest
|
|
207
|
+
};
|
|
208
|
+
let schema;
|
|
209
|
+
if (experimental_output) {
|
|
210
|
+
this.logger.debug("[LLM] - Using experimental output", {
|
|
211
|
+
runId
|
|
212
|
+
});
|
|
213
|
+
if (typeof experimental_output.parse === "function") {
|
|
214
|
+
schema = experimental_output;
|
|
215
|
+
if (schema instanceof z.ZodArray) {
|
|
216
|
+
schema = schema._def.type;
|
|
217
|
+
}
|
|
218
|
+
} else {
|
|
219
|
+
schema = jsonSchema(experimental_output);
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
return await generateText({
|
|
223
|
+
messages,
|
|
224
|
+
...argsForExecute,
|
|
225
|
+
experimental_telemetry: {
|
|
226
|
+
...this.experimental_telemetry,
|
|
227
|
+
...telemetry
|
|
228
|
+
},
|
|
229
|
+
experimental_output: schema ? Output.object({
|
|
230
|
+
schema
|
|
231
|
+
}) : void 0
|
|
232
|
+
});
|
|
233
|
+
}
|
|
234
|
+
async __textObject({
|
|
235
|
+
messages,
|
|
236
|
+
onStepFinish,
|
|
237
|
+
maxSteps = 5,
|
|
238
|
+
tools,
|
|
239
|
+
convertedTools,
|
|
240
|
+
structuredOutput,
|
|
241
|
+
runId,
|
|
242
|
+
temperature,
|
|
243
|
+
toolChoice = "auto",
|
|
244
|
+
telemetry,
|
|
245
|
+
threadId,
|
|
246
|
+
resourceId,
|
|
247
|
+
memory,
|
|
248
|
+
runtimeContext,
|
|
249
|
+
...rest
|
|
250
|
+
}) {
|
|
251
|
+
const model = this.#model;
|
|
252
|
+
this.logger.debug(`[LLM] - Generating a text object`, { runId });
|
|
253
|
+
const finalTools = convertedTools || this.convertTools({ tools, runId, threadId, resourceId, memory, runtimeContext });
|
|
254
|
+
const argsForExecute = {
|
|
255
|
+
model,
|
|
256
|
+
temperature,
|
|
257
|
+
tools: {
|
|
258
|
+
...finalTools
|
|
259
|
+
},
|
|
260
|
+
maxSteps,
|
|
261
|
+
toolChoice,
|
|
262
|
+
onStepFinish: async (props) => {
|
|
263
|
+
void onStepFinish?.(props);
|
|
264
|
+
this.logger.debug("[LLM] - Step Change:", {
|
|
265
|
+
text: props?.text,
|
|
266
|
+
toolCalls: props?.toolCalls,
|
|
267
|
+
toolResults: props?.toolResults,
|
|
268
|
+
finishReason: props?.finishReason,
|
|
269
|
+
usage: props?.usage,
|
|
270
|
+
runId
|
|
271
|
+
});
|
|
272
|
+
if (props?.response?.headers?.["x-ratelimit-remaining-tokens"] && parseInt(props?.response?.headers?.["x-ratelimit-remaining-tokens"], 10) < 2e3) {
|
|
273
|
+
this.logger.warn("Rate limit approaching, waiting 10 seconds", { runId });
|
|
274
|
+
await delay(10 * 1e3);
|
|
275
|
+
}
|
|
276
|
+
},
|
|
277
|
+
...rest
|
|
278
|
+
};
|
|
279
|
+
let schema;
|
|
280
|
+
let output = "object";
|
|
281
|
+
if (typeof structuredOutput.parse === "function") {
|
|
282
|
+
schema = structuredOutput;
|
|
283
|
+
if (schema instanceof z.ZodArray) {
|
|
284
|
+
output = "array";
|
|
285
|
+
schema = schema._def.type;
|
|
286
|
+
}
|
|
287
|
+
} else {
|
|
288
|
+
schema = jsonSchema(structuredOutput);
|
|
289
|
+
}
|
|
290
|
+
return await generateObject({
|
|
291
|
+
messages,
|
|
292
|
+
...argsForExecute,
|
|
293
|
+
output,
|
|
294
|
+
schema,
|
|
295
|
+
experimental_telemetry: {
|
|
296
|
+
...this.experimental_telemetry,
|
|
297
|
+
...telemetry
|
|
298
|
+
}
|
|
299
|
+
});
|
|
300
|
+
}
|
|
301
|
+
async __stream({
|
|
302
|
+
messages,
|
|
303
|
+
onStepFinish,
|
|
304
|
+
onFinish,
|
|
305
|
+
maxSteps = 5,
|
|
306
|
+
tools,
|
|
307
|
+
convertedTools,
|
|
308
|
+
runId,
|
|
309
|
+
temperature,
|
|
310
|
+
toolChoice = "auto",
|
|
311
|
+
experimental_output,
|
|
312
|
+
telemetry,
|
|
313
|
+
threadId,
|
|
314
|
+
resourceId,
|
|
315
|
+
memory,
|
|
316
|
+
runtimeContext,
|
|
317
|
+
...rest
|
|
318
|
+
}) {
|
|
319
|
+
const model = this.#model;
|
|
320
|
+
this.logger.debug(`[LLM] - Streaming text`, {
|
|
321
|
+
runId,
|
|
322
|
+
threadId,
|
|
323
|
+
resourceId,
|
|
324
|
+
messages,
|
|
325
|
+
maxSteps,
|
|
326
|
+
tools: Object.keys(tools || convertedTools || {})
|
|
327
|
+
});
|
|
328
|
+
const finalTools = convertedTools || this.convertTools({ tools, runId, threadId, resourceId, memory, runtimeContext });
|
|
329
|
+
const argsForExecute = {
|
|
330
|
+
model,
|
|
331
|
+
temperature,
|
|
332
|
+
tools: {
|
|
333
|
+
...finalTools
|
|
334
|
+
},
|
|
335
|
+
maxSteps,
|
|
336
|
+
toolChoice,
|
|
337
|
+
onStepFinish: async (props) => {
|
|
338
|
+
void onStepFinish?.(props);
|
|
339
|
+
this.logger.debug("[LLM] - Stream Step Change:", {
|
|
340
|
+
text: props?.text,
|
|
341
|
+
toolCalls: props?.toolCalls,
|
|
342
|
+
toolResults: props?.toolResults,
|
|
343
|
+
finishReason: props?.finishReason,
|
|
344
|
+
usage: props?.usage,
|
|
345
|
+
runId
|
|
346
|
+
});
|
|
347
|
+
if (props?.response?.headers?.["x-ratelimit-remaining-tokens"] && parseInt(props?.response?.headers?.["x-ratelimit-remaining-tokens"], 10) < 2e3) {
|
|
348
|
+
this.logger.warn("Rate limit approaching, waiting 10 seconds", { runId });
|
|
349
|
+
await delay(10 * 1e3);
|
|
350
|
+
}
|
|
351
|
+
},
|
|
352
|
+
onFinish: async (props) => {
|
|
353
|
+
void onFinish?.(props);
|
|
354
|
+
this.logger.debug("[LLM] - Stream Finished:", {
|
|
355
|
+
text: props?.text,
|
|
356
|
+
toolCalls: props?.toolCalls,
|
|
357
|
+
toolResults: props?.toolResults,
|
|
358
|
+
finishReason: props?.finishReason,
|
|
359
|
+
usage: props?.usage,
|
|
360
|
+
runId,
|
|
361
|
+
threadId,
|
|
362
|
+
resourceId
|
|
363
|
+
});
|
|
364
|
+
},
|
|
365
|
+
...rest
|
|
366
|
+
};
|
|
367
|
+
let schema;
|
|
368
|
+
if (experimental_output) {
|
|
369
|
+
this.logger.debug("[LLM] - Using experimental output", {
|
|
370
|
+
runId
|
|
371
|
+
});
|
|
372
|
+
if (typeof experimental_output.parse === "function") {
|
|
373
|
+
schema = experimental_output;
|
|
374
|
+
if (schema instanceof z.ZodArray) {
|
|
375
|
+
schema = schema._def.type;
|
|
376
|
+
}
|
|
377
|
+
} else {
|
|
378
|
+
schema = jsonSchema(experimental_output);
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
return await streamText({
|
|
382
|
+
messages,
|
|
383
|
+
...argsForExecute,
|
|
384
|
+
experimental_telemetry: {
|
|
385
|
+
...this.experimental_telemetry,
|
|
386
|
+
...telemetry
|
|
387
|
+
},
|
|
388
|
+
experimental_output: schema ? Output.object({
|
|
389
|
+
schema
|
|
390
|
+
}) : void 0
|
|
391
|
+
});
|
|
392
|
+
}
|
|
393
|
+
async __streamObject({
|
|
394
|
+
messages,
|
|
395
|
+
runId,
|
|
396
|
+
tools,
|
|
397
|
+
convertedTools,
|
|
398
|
+
maxSteps = 5,
|
|
399
|
+
toolChoice = "auto",
|
|
400
|
+
runtimeContext,
|
|
401
|
+
threadId,
|
|
402
|
+
resourceId,
|
|
403
|
+
memory,
|
|
404
|
+
temperature,
|
|
405
|
+
onStepFinish,
|
|
406
|
+
onFinish,
|
|
407
|
+
structuredOutput,
|
|
408
|
+
telemetry,
|
|
409
|
+
...rest
|
|
410
|
+
}) {
|
|
411
|
+
const model = this.#model;
|
|
412
|
+
this.logger.debug(`[LLM] - Streaming structured output`, {
|
|
413
|
+
runId,
|
|
414
|
+
messages,
|
|
415
|
+
maxSteps,
|
|
416
|
+
tools: Object.keys(tools || convertedTools || {})
|
|
417
|
+
});
|
|
418
|
+
const finalTools = convertedTools || this.convertTools({ tools, runId, threadId, resourceId, memory, runtimeContext });
|
|
419
|
+
const argsForExecute = {
|
|
420
|
+
model,
|
|
421
|
+
temperature,
|
|
422
|
+
tools: {
|
|
423
|
+
...finalTools
|
|
424
|
+
},
|
|
425
|
+
maxSteps,
|
|
426
|
+
toolChoice,
|
|
427
|
+
onStepFinish: async (props) => {
|
|
428
|
+
void onStepFinish?.(props);
|
|
429
|
+
this.logger.debug("[LLM] - Stream Step Change:", {
|
|
430
|
+
text: props?.text,
|
|
431
|
+
toolCalls: props?.toolCalls,
|
|
432
|
+
toolResults: props?.toolResults,
|
|
433
|
+
finishReason: props?.finishReason,
|
|
434
|
+
usage: props?.usage,
|
|
435
|
+
runId,
|
|
436
|
+
threadId,
|
|
437
|
+
resourceId
|
|
438
|
+
});
|
|
439
|
+
if (props?.response?.headers?.["x-ratelimit-remaining-tokens"] && parseInt(props?.response?.headers?.["x-ratelimit-remaining-tokens"], 10) < 2e3) {
|
|
440
|
+
this.logger.warn("Rate limit approaching, waiting 10 seconds", { runId });
|
|
441
|
+
await delay(10 * 1e3);
|
|
442
|
+
}
|
|
443
|
+
},
|
|
444
|
+
onFinish: async (props) => {
|
|
445
|
+
void onFinish?.(props);
|
|
446
|
+
this.logger.debug("[LLM] - Stream Finished:", {
|
|
447
|
+
text: props?.text,
|
|
448
|
+
toolCalls: props?.toolCalls,
|
|
449
|
+
toolResults: props?.toolResults,
|
|
450
|
+
finishReason: props?.finishReason,
|
|
451
|
+
usage: props?.usage,
|
|
452
|
+
runId,
|
|
453
|
+
threadId,
|
|
454
|
+
resourceId
|
|
455
|
+
});
|
|
456
|
+
},
|
|
457
|
+
...rest
|
|
458
|
+
};
|
|
459
|
+
let schema;
|
|
460
|
+
let output = "object";
|
|
461
|
+
if (typeof structuredOutput.parse === "function") {
|
|
462
|
+
schema = structuredOutput;
|
|
463
|
+
if (schema instanceof z.ZodArray) {
|
|
464
|
+
output = "array";
|
|
465
|
+
schema = schema._def.type;
|
|
466
|
+
}
|
|
467
|
+
} else {
|
|
468
|
+
schema = jsonSchema(structuredOutput);
|
|
469
|
+
}
|
|
470
|
+
return streamObject({
|
|
471
|
+
messages,
|
|
472
|
+
...argsForExecute,
|
|
473
|
+
output,
|
|
474
|
+
schema,
|
|
475
|
+
experimental_telemetry: {
|
|
476
|
+
...this.experimental_telemetry,
|
|
477
|
+
...telemetry
|
|
478
|
+
}
|
|
479
|
+
});
|
|
480
|
+
}
|
|
481
|
+
async generate(messages, { maxSteps = 5, output, ...rest }) {
|
|
482
|
+
const msgs = this.convertToMessages(messages);
|
|
483
|
+
if (!output) {
|
|
484
|
+
return await this.__text({
|
|
485
|
+
messages: msgs,
|
|
486
|
+
maxSteps,
|
|
487
|
+
...rest
|
|
488
|
+
});
|
|
489
|
+
}
|
|
490
|
+
return await this.__textObject({
|
|
491
|
+
messages: msgs,
|
|
492
|
+
structuredOutput: output,
|
|
493
|
+
maxSteps,
|
|
494
|
+
...rest
|
|
495
|
+
});
|
|
496
|
+
}
|
|
497
|
+
async stream(messages, { maxSteps = 5, output, ...rest }) {
|
|
498
|
+
const msgs = this.convertToMessages(messages);
|
|
499
|
+
if (!output) {
|
|
500
|
+
return await this.__stream({
|
|
501
|
+
messages: msgs,
|
|
502
|
+
maxSteps,
|
|
503
|
+
...rest
|
|
504
|
+
});
|
|
505
|
+
}
|
|
506
|
+
return await this.__streamObject({
|
|
507
|
+
messages: msgs,
|
|
508
|
+
structuredOutput: output,
|
|
509
|
+
maxSteps,
|
|
510
|
+
...rest
|
|
511
|
+
});
|
|
512
|
+
}
|
|
513
|
+
convertToUIMessages(messages) {
|
|
514
|
+
function addToolMessageToChat({
|
|
515
|
+
toolMessage,
|
|
516
|
+
messages: messages2,
|
|
517
|
+
toolResultContents
|
|
518
|
+
}) {
|
|
519
|
+
const chatMessages2 = messages2.map((message) => {
|
|
520
|
+
if (message.toolInvocations) {
|
|
521
|
+
return {
|
|
522
|
+
...message,
|
|
523
|
+
toolInvocations: message.toolInvocations.map((toolInvocation) => {
|
|
524
|
+
const toolResult = toolMessage.content.find((tool) => tool.toolCallId === toolInvocation.toolCallId);
|
|
525
|
+
if (toolResult) {
|
|
526
|
+
return {
|
|
527
|
+
...toolInvocation,
|
|
528
|
+
state: "result",
|
|
529
|
+
result: toolResult.result
|
|
530
|
+
};
|
|
531
|
+
}
|
|
532
|
+
return toolInvocation;
|
|
533
|
+
})
|
|
534
|
+
};
|
|
535
|
+
}
|
|
536
|
+
return message;
|
|
537
|
+
});
|
|
538
|
+
const resultContents = [...toolResultContents, ...toolMessage.content];
|
|
539
|
+
return { chatMessages: chatMessages2, toolResultContents: resultContents };
|
|
540
|
+
}
|
|
541
|
+
const { chatMessages } = messages.reduce(
|
|
542
|
+
(obj, message) => {
|
|
543
|
+
if (message.role === "tool") {
|
|
544
|
+
return addToolMessageToChat({
|
|
545
|
+
toolMessage: message,
|
|
546
|
+
messages: obj.chatMessages,
|
|
547
|
+
toolResultContents: obj.toolResultContents
|
|
548
|
+
});
|
|
549
|
+
}
|
|
550
|
+
let textContent = "";
|
|
551
|
+
let toolInvocations = [];
|
|
552
|
+
if (typeof message.content === "string") {
|
|
553
|
+
textContent = message.content;
|
|
554
|
+
} else if (typeof message.content === "number") {
|
|
555
|
+
textContent = String(message.content);
|
|
556
|
+
} else if (Array.isArray(message.content)) {
|
|
557
|
+
for (const content of message.content) {
|
|
558
|
+
if (content.type === "text") {
|
|
559
|
+
textContent += content.text;
|
|
560
|
+
} else if (content.type === "tool-call") {
|
|
561
|
+
const toolResult = obj.toolResultContents.find((tool) => tool.toolCallId === content.toolCallId);
|
|
562
|
+
toolInvocations.push({
|
|
563
|
+
state: toolResult ? "result" : "call",
|
|
564
|
+
toolCallId: content.toolCallId,
|
|
565
|
+
toolName: content.toolName,
|
|
566
|
+
args: content.args,
|
|
567
|
+
result: toolResult?.result
|
|
568
|
+
});
|
|
569
|
+
}
|
|
570
|
+
}
|
|
571
|
+
}
|
|
572
|
+
obj.chatMessages.push({
|
|
573
|
+
id: message.id,
|
|
574
|
+
role: message.role,
|
|
575
|
+
content: textContent,
|
|
576
|
+
toolInvocations
|
|
577
|
+
});
|
|
578
|
+
return obj;
|
|
579
|
+
},
|
|
580
|
+
{ chatMessages: [], toolResultContents: [] }
|
|
581
|
+
);
|
|
582
|
+
return chatMessages;
|
|
583
|
+
}
|
|
584
|
+
};
|
|
585
|
+
|
|
586
|
+
export { MastraLLM };
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
var
|
|
3
|
+
var chunkLABUWBKX_cjs = require('./chunk-LABUWBKX.cjs');
|
|
4
4
|
var crypto = require('crypto');
|
|
5
5
|
var ai = require('ai');
|
|
6
6
|
var jsonSchemaToZod = require('json-schema-to-zod');
|
|
@@ -245,7 +245,7 @@ function createExecute(tool, options, logType) {
|
|
|
245
245
|
mastra: options.mastra,
|
|
246
246
|
memory: options.memory,
|
|
247
247
|
runId: options.runId,
|
|
248
|
-
runtimeContext: runtimeContext ?? new
|
|
248
|
+
runtimeContext: runtimeContext ?? new chunkLABUWBKX_cjs.RuntimeContext()
|
|
249
249
|
},
|
|
250
250
|
execOptions
|
|
251
251
|
) ?? void 0;
|