@mastra/core 0.4.3-alpha.1 → 0.4.3-alpha.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. package/dist/agent/index.cjs +1939 -0
  2. package/dist/agent/index.d.cts +16 -0
  3. package/dist/agent/index.d.ts +3 -2
  4. package/dist/agent/index.js +1 -1
  5. package/dist/base-CbCmDLUb.d.cts +944 -0
  6. package/dist/{base-JKlKFf5I.d.ts → base-DqRXkxCw.d.ts} +59 -45
  7. package/dist/base-hs9NDAZ2.d.cts +139 -0
  8. package/dist/base.cjs +138 -0
  9. package/dist/base.d.cts +6 -0
  10. package/dist/bundler/index.cjs +158 -0
  11. package/dist/bundler/index.d.cts +28 -0
  12. package/dist/{chunk-HQ55LN2U.js → chunk-5NRHVENG.js} +2 -2
  13. package/dist/{chunk-B3M27AMP.js → chunk-C2YWDMSM.js} +44 -21
  14. package/dist/{chunk-FZMBA5CV.js → chunk-E7K35ROR.js} +141 -67
  15. package/dist/chunk-I5KM37BN.js +29 -0
  16. package/dist/{chunk-R5DDQQJT.js → chunk-JXEH6PBQ.js} +1 -1
  17. package/dist/{chunk-33GSTUNK.js → chunk-L7SWFW3L.js} +13 -8
  18. package/dist/{chunk-SIFBBGY6.js → chunk-LGERQTJM.js} +1 -1
  19. package/dist/deployer/index.cjs +165 -0
  20. package/dist/deployer/index.d.cts +19 -0
  21. package/dist/eval/index.cjs +110 -0
  22. package/dist/eval/index.d.cts +29 -0
  23. package/dist/eval/index.d.ts +2 -1
  24. package/dist/hooks/index.cjs +87 -0
  25. package/dist/hooks/index.d.cts +33 -0
  26. package/dist/index-mKY1XrpK.d.cts +90 -0
  27. package/dist/index.cjs +6984 -0
  28. package/dist/index.d.cts +97 -0
  29. package/dist/index.d.ts +7 -7
  30. package/dist/index.js +6 -7
  31. package/dist/integration/index.cjs +113 -0
  32. package/dist/integration/index.d.cts +53 -0
  33. package/dist/integration/index.d.ts +3 -2
  34. package/dist/llm/index.cjs +2 -0
  35. package/dist/llm/index.d.cts +16 -0
  36. package/dist/llm/index.d.ts +2 -1
  37. package/dist/logger/index.cjs +159 -0
  38. package/dist/logger/index.d.cts +3 -0
  39. package/dist/mastra/index.cjs +1741 -0
  40. package/dist/mastra/index.d.cts +68 -0
  41. package/dist/mastra/index.d.ts +3 -2
  42. package/dist/memory/index.cjs +1931 -0
  43. package/dist/memory/index.d.cts +16 -0
  44. package/dist/memory/index.d.ts +2 -1
  45. package/dist/memory/index.js +1 -1
  46. package/dist/relevance/index.cjs +2001 -0
  47. package/dist/relevance/index.d.cts +21 -0
  48. package/dist/relevance/index.js +1 -1
  49. package/dist/storage/index.cjs +361 -0
  50. package/dist/storage/index.d.cts +16 -0
  51. package/dist/storage/index.d.ts +2 -1
  52. package/dist/storage/libsql/index.cjs +770 -0
  53. package/dist/storage/libsql/index.d.cts +82 -0
  54. package/dist/storage/libsql/index.d.ts +2 -1
  55. package/dist/telemetry/index.cjs +413 -0
  56. package/dist/telemetry/index.d.cts +52 -0
  57. package/dist/telemetry/index.d.ts +2 -1
  58. package/dist/telemetry/otel-vendor.cjs +52 -0
  59. package/dist/telemetry/otel-vendor.d.cts +7 -0
  60. package/dist/tools/index.cjs +25 -0
  61. package/dist/tools/index.d.cts +30 -0
  62. package/dist/tools/index.d.ts +6 -5
  63. package/dist/tts/index.cjs +328 -0
  64. package/dist/tts/index.d.cts +28 -0
  65. package/dist/types-m9RryK9a.d.cts +14 -0
  66. package/dist/utils.cjs +179 -0
  67. package/dist/utils.d.cts +26 -0
  68. package/dist/vector/filter/index.cjs +192 -0
  69. package/dist/vector/filter/index.d.cts +90 -0
  70. package/dist/{filter → vector/filter}/index.d.ts +7 -7
  71. package/dist/vector/filter/index.js +1 -0
  72. package/dist/vector/index.cjs +164 -0
  73. package/dist/vector/index.d.cts +58 -0
  74. package/dist/vector/index.d.ts +32 -4
  75. package/dist/vector/index.js +1 -1
  76. package/dist/vector/libsql/index.cjs +975 -0
  77. package/dist/vector/libsql/index.d.cts +32 -0
  78. package/dist/vector/libsql/index.d.ts +9 -6
  79. package/dist/vector/libsql/index.js +1 -1
  80. package/dist/voice/index.cjs +369 -0
  81. package/dist/voice/index.d.cts +67 -0
  82. package/dist/workflow-DlRFMI4Q.d.cts +96 -0
  83. package/dist/{workflow-VWNjiLwe.d.ts → workflow-fGgxPZk4.d.ts} +14 -2
  84. package/dist/workflows/index.cjs +1652 -0
  85. package/dist/workflows/index.d.cts +58 -0
  86. package/dist/workflows/index.d.ts +14 -4
  87. package/dist/workflows/index.js +1 -1
  88. package/package.json +37 -3
  89. package/dist/chunk-55NFNRKO.js +0 -10
  90. package/dist/filter/index.js +0 -1
@@ -0,0 +1,1939 @@
1
+ 'use strict';
2
+
3
+ var crypto$1 = require('crypto');
4
+ var zod = require('zod');
5
+ var stream = require('stream');
6
+ var pino = require('pino');
7
+ var pretty = require('pino-pretty');
8
+ var ai = require('ai');
9
+ var api = require('@opentelemetry/api');
10
+
11
+ function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
12
+
13
+ var pino__default = /*#__PURE__*/_interopDefault(pino);
14
+ var pretty__default = /*#__PURE__*/_interopDefault(pretty);
15
+
16
+ var __create = Object.create;
17
+ var __defProp = Object.defineProperty;
18
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
19
+ var __knownSymbol = (name, symbol) => (symbol = Symbol[name]) ? symbol : Symbol.for("Symbol." + name);
20
+ var __typeError = msg => {
21
+ throw TypeError(msg);
22
+ };
23
+ var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, {
24
+ enumerable: true,
25
+ configurable: true,
26
+ writable: true,
27
+ value
28
+ }) : obj[key] = value;
29
+ var __name = (target, value) => __defProp(target, "name", {
30
+ value,
31
+ configurable: true
32
+ });
33
+ var __decoratorStart = base => [,,, __create(base?.[__knownSymbol("metadata")] ?? null)];
34
+ var __decoratorStrings = ["class", "method", "getter", "setter", "accessor", "field", "value", "get", "set"];
35
+ var __expectFn = fn => fn !== void 0 && typeof fn !== "function" ? __typeError("Function expected") : fn;
36
+ var __decoratorContext = (kind, name, done, metadata, fns) => ({
37
+ kind: __decoratorStrings[kind],
38
+ name,
39
+ metadata,
40
+ addInitializer: fn => done._ ? __typeError("Already initialized") : fns.push(__expectFn(fn || null))
41
+ });
42
+ var __decoratorMetadata = (array, target) => __defNormalProp(target, __knownSymbol("metadata"), array[3]);
43
+ var __runInitializers = (array, flags, self, value) => {
44
+ for (var i = 0, fns = array[flags >> 1], n = fns && fns.length; i < n; i++) fns[i].call(self) ;
45
+ return value;
46
+ };
47
+ var __decorateElement = (array, flags, name, decorators, target, extra) => {
48
+ var it,
49
+ done,
50
+ ctx,
51
+ k = flags & 7,
52
+ p = false;
53
+ var j = 0;
54
+ var extraInitializers = array[j] || (array[j] = []);
55
+ var desc = k && ((target = target.prototype), k < 5 && (k > 3 || !p) && __getOwnPropDesc(target , name));
56
+ __name(target, name);
57
+ for (var i = decorators.length - 1; i >= 0; i--) {
58
+ ctx = __decoratorContext(k, name, done = {}, array[3], extraInitializers);
59
+ it = (0, decorators[i])(target, ctx), done._ = 1;
60
+ __expectFn(it) && (target = it);
61
+ }
62
+ return __decoratorMetadata(array, target), desc && __defProp(target, name, desc), p ? k ^ 4 ? extra : desc : target;
63
+ };
64
+ var RegisteredLogger = {
65
+ AGENT: "AGENT",
66
+ LLM: "LLM"};
67
+ var LogLevel = {
68
+ INFO: "info"};
69
+ var Logger = class {
70
+ logger;
71
+ transports;
72
+ constructor(options = {}) {
73
+ this.transports = options.transports || {};
74
+ const transportsAry = Object.entries(this.transports);
75
+ this.logger = pino__default.default({
76
+ name: options.name || "app",
77
+ level: options.level || LogLevel.INFO
78
+ }, options.overrideDefaultTransports ? options?.transports?.default : transportsAry.length === 0 ? pretty__default.default({
79
+ colorize: true,
80
+ levelFirst: true,
81
+ ignore: "pid,hostname",
82
+ colorizeObjects: true,
83
+ translateTime: "SYS:standard",
84
+ singleLine: false
85
+ }) : pino__default.default.multistream([...transportsAry.map(([_, transport]) => ({
86
+ stream: transport,
87
+ level: options.level || LogLevel.INFO
88
+ })), {
89
+ stream: pretty__default.default({
90
+ colorize: true,
91
+ levelFirst: true,
92
+ ignore: "pid,hostname",
93
+ colorizeObjects: true,
94
+ translateTime: "SYS:standard",
95
+ singleLine: false
96
+ }),
97
+ level: options.level || LogLevel.INFO
98
+ }]));
99
+ }
100
+ debug(message, args = {}) {
101
+ this.logger.debug(args, message);
102
+ }
103
+ info(message, args = {}) {
104
+ this.logger.info(args, message);
105
+ }
106
+ warn(message, args = {}) {
107
+ this.logger.warn(args, message);
108
+ }
109
+ error(message, args = {}) {
110
+ this.logger.error(args, message);
111
+ }
112
+ // Stream creation for process output handling
113
+ createStream() {
114
+ return new stream.Transform({
115
+ transform: (chunk, _encoding, callback) => {
116
+ const line = chunk.toString().trim();
117
+ if (line) {
118
+ this.info(line);
119
+ }
120
+ callback(null, chunk);
121
+ }
122
+ });
123
+ }
124
+ async getLogs(transportId) {
125
+ if (!transportId || !this.transports[transportId]) {
126
+ return [];
127
+ }
128
+ return this.transports[transportId].getLogs();
129
+ }
130
+ async getLogsByRunId({
131
+ runId,
132
+ transportId
133
+ }) {
134
+ return this.transports[transportId]?.getLogsByRunId({
135
+ runId
136
+ });
137
+ }
138
+ };
139
+ function createLogger(options) {
140
+ return new Logger(options);
141
+ }
142
+
143
+ // src/base.ts
144
+ var MastraBase = class {
145
+ component = RegisteredLogger.LLM;
146
+ logger;
147
+ name;
148
+ telemetry;
149
+ constructor({
150
+ component,
151
+ name
152
+ }) {
153
+ this.component = component || RegisteredLogger.LLM;
154
+ this.name = name;
155
+ this.logger = createLogger({
156
+ name: `${this.component} - ${this.name}`
157
+ });
158
+ }
159
+ /**
160
+ * Set the logger for the agent
161
+ * @param logger
162
+ */
163
+ __setLogger(logger) {
164
+ this.logger = logger;
165
+ this.logger.debug(`Logger updated [component=${this.component}] [name=${this.name}]`);
166
+ }
167
+ /**
168
+ * Set the telemetry for the
169
+ * @param telemetry
170
+ */
171
+ __setTelemetry(telemetry) {
172
+ this.telemetry = telemetry;
173
+ this.logger.debug(`Telemetry updated [component=${this.component}] [tracer=${this.telemetry.tracer}]`);
174
+ }
175
+ /**
176
+ * Get the telemetry on the vector
177
+ * @returns telemetry
178
+ */
179
+ __getTelemetry() {
180
+ return this.telemetry;
181
+ }
182
+ /*
183
+ get experimental_telemetry config
184
+ */
185
+ get experimental_telemetry() {
186
+ return this.telemetry ? {
187
+ // tracer: this.telemetry.tracer,
188
+ tracer: this.telemetry.getBaggageTracer(),
189
+ isEnabled: !!this.telemetry.tracer
190
+ } : void 0;
191
+ }
192
+ };
193
+
194
+ // src/hooks/mitt.ts
195
+ function mitt(all) {
196
+ all = all || /* @__PURE__ */new Map();
197
+ return {
198
+ /**
199
+ * A Map of event names to registered handler functions.
200
+ */
201
+ all,
202
+ /**
203
+ * Register an event handler for the given type.
204
+ * @param {string|symbol} type Type of event to listen for, or `'*'` for all events
205
+ * @param {Function} handler Function to call in response to given event
206
+ * @memberOf mitt
207
+ */
208
+ on(type, handler) {
209
+ const handlers = all.get(type);
210
+ if (handlers) {
211
+ handlers.push(handler);
212
+ } else {
213
+ all.set(type, [handler]);
214
+ }
215
+ },
216
+ /**
217
+ * Remove an event handler for the given type.
218
+ * If `handler` is omitted, all handlers of the given type are removed.
219
+ * @param {string|symbol} type Type of event to unregister `handler` from (`'*'` to remove a wildcard handler)
220
+ * @param {Function} [handler] Handler function to remove
221
+ * @memberOf mitt
222
+ */
223
+ off(type, handler) {
224
+ const handlers = all.get(type);
225
+ if (handlers) {
226
+ if (handler) {
227
+ handlers.splice(handlers.indexOf(handler) >>> 0, 1);
228
+ } else {
229
+ all.set(type, []);
230
+ }
231
+ }
232
+ },
233
+ /**
234
+ * Invoke all handlers for the given type.
235
+ * If present, `'*'` handlers are invoked after type-matched handlers.
236
+ *
237
+ * Note: Manually firing '*' handlers is not supported.
238
+ *
239
+ * @param {string|symbol} type The event type to invoke
240
+ * @param {Any} [evt] Any value (object is recommended and powerful), passed to each handler
241
+ * @memberOf mitt
242
+ */
243
+ emit(type, evt) {
244
+ let handlers = all.get(type);
245
+ if (handlers) {
246
+ handlers.slice().map(handler => {
247
+ handler(evt);
248
+ });
249
+ }
250
+ handlers = all.get("*");
251
+ if (handlers) {
252
+ handlers.slice().map(handler => {
253
+ handler(type, evt);
254
+ });
255
+ }
256
+ }
257
+ };
258
+ }
259
+
260
+ // src/hooks/index.ts
261
+ var hooks = mitt();
262
+ function executeHook(hook, data) {
263
+ setImmediate(() => {
264
+ hooks.emit(hook, data);
265
+ });
266
+ }
267
+
268
+ // src/llm/model/base.ts
269
+ var MastraLLMBase = class extends MastraBase {
270
+ // @ts-ignore
271
+ #mastra;
272
+ #model;
273
+ constructor({
274
+ name,
275
+ model
276
+ }) {
277
+ super({
278
+ component: RegisteredLogger.LLM,
279
+ name
280
+ });
281
+ this.#model = model;
282
+ }
283
+ getProvider() {
284
+ return this.#model.provider;
285
+ }
286
+ getModelId() {
287
+ return this.#model.modelId;
288
+ }
289
+ getModel() {
290
+ return this.#model;
291
+ }
292
+ convertToMessages(messages) {
293
+ if (Array.isArray(messages)) {
294
+ return messages.map(m => {
295
+ if (typeof m === "string") {
296
+ return {
297
+ role: "user",
298
+ content: m
299
+ };
300
+ }
301
+ return m;
302
+ });
303
+ }
304
+ return [{
305
+ role: "user",
306
+ content: messages
307
+ }];
308
+ }
309
+ __registerPrimitives(p) {
310
+ if (p.telemetry) {
311
+ this.__setTelemetry(p.telemetry);
312
+ }
313
+ if (p.logger) {
314
+ this.__setLogger(p.logger);
315
+ }
316
+ this.#mastra = p;
317
+ }
318
+ async __text(input) {
319
+ this.logger.debug(`[LLMs:${this.name}] Generating text.`, {
320
+ input
321
+ });
322
+ throw new Error("Method not implemented.");
323
+ }
324
+ async __textObject(input) {
325
+ this.logger.debug(`[LLMs:${this.name}] Generating object.`, {
326
+ input
327
+ });
328
+ throw new Error("Method not implemented.");
329
+ }
330
+ async generate(messages, options = {}) {
331
+ this.logger.debug(`[LLMs:${this.name}] Generating text.`, {
332
+ messages,
333
+ options
334
+ });
335
+ throw new Error("Method not implemented.");
336
+ }
337
+ async __stream(input) {
338
+ this.logger.debug(`[LLMs:${this.name}] Streaming text.`, {
339
+ input
340
+ });
341
+ throw new Error("Method not implemented.");
342
+ }
343
+ async __streamObject(input) {
344
+ this.logger.debug(`[LLMs:${this.name}] Streaming object.`, {
345
+ input
346
+ });
347
+ throw new Error("Method not implemented.");
348
+ }
349
+ async stream(messages, options = {}) {
350
+ this.logger.debug(`[LLMs:${this.name}] Streaming text.`, {
351
+ messages,
352
+ options
353
+ });
354
+ throw new Error("Method not implemented.");
355
+ }
356
+ };
357
+ var delay = ms => new Promise(resolve => setTimeout(resolve, ms));
358
+
359
+ // src/llm/model/model.ts
360
+ var MastraLLM = class extends MastraLLMBase {
361
+ #model;
362
+ #mastra;
363
+ constructor({
364
+ model,
365
+ mastra
366
+ }) {
367
+ super({
368
+ name: "aisdk",
369
+ model
370
+ });
371
+ this.#model = model;
372
+ if (mastra) {
373
+ this.#mastra = mastra;
374
+ if (mastra.logger) {
375
+ this.__setLogger(mastra.logger);
376
+ }
377
+ }
378
+ }
379
+ __registerPrimitives(p) {
380
+ if (p.telemetry) {
381
+ this.__setTelemetry(p.telemetry);
382
+ }
383
+ if (p.logger) {
384
+ this.__setLogger(p.logger);
385
+ }
386
+ this.#mastra = p;
387
+ }
388
+ getProvider() {
389
+ return this.#model.provider;
390
+ }
391
+ getModelId() {
392
+ return this.#model.modelId;
393
+ }
394
+ getModel() {
395
+ return this.#model;
396
+ }
397
+ convertTools({
398
+ tools,
399
+ runId,
400
+ threadId,
401
+ resourceId
402
+ } = {}) {
403
+ this.logger.debug("Starting tool conversion for LLM");
404
+ const converted = Object.entries(tools || {}).reduce((memo, value) => {
405
+ const k = value[0];
406
+ const tool = value[1];
407
+ if (tool) {
408
+ memo[k] = {
409
+ description: tool.description,
410
+ parameters: tool.inputSchema,
411
+ execute: typeof tool?.execute === "function" ? async (props, options) => {
412
+ try {
413
+ this.logger.debug("Executing tool", {
414
+ tool: k,
415
+ props
416
+ });
417
+ return tool?.execute?.({
418
+ context: props,
419
+ threadId,
420
+ resourceId,
421
+ mastra: this.#mastra,
422
+ runId
423
+ }, options) ?? void 0;
424
+ } catch (error) {
425
+ this.logger.error("Error executing tool", {
426
+ tool: k,
427
+ props,
428
+ error,
429
+ runId,
430
+ threadId,
431
+ resourceId
432
+ });
433
+ throw error;
434
+ }
435
+ } : void 0
436
+ };
437
+ }
438
+ return memo;
439
+ }, {});
440
+ this.logger.debug(`Converted tools for LLM`);
441
+ return converted;
442
+ }
443
+ async __text({
444
+ runId,
445
+ messages,
446
+ maxSteps,
447
+ tools,
448
+ convertedTools,
449
+ temperature,
450
+ toolChoice = "auto",
451
+ onStepFinish,
452
+ experimental_output,
453
+ telemetry,
454
+ threadId,
455
+ resourceId,
456
+ ...rest
457
+ }) {
458
+ const model = this.#model;
459
+ this.logger.debug(`[LLM] - Generating text`, {
460
+ runId,
461
+ messages,
462
+ maxSteps,
463
+ threadId,
464
+ resourceId,
465
+ tools: Object.keys(tools || convertedTools || {})
466
+ });
467
+ const finalTools = convertedTools || this.convertTools({
468
+ tools,
469
+ runId,
470
+ threadId,
471
+ resourceId
472
+ });
473
+ const argsForExecute = {
474
+ model,
475
+ temperature,
476
+ tools: {
477
+ ...finalTools
478
+ },
479
+ toolChoice,
480
+ maxSteps,
481
+ onStepFinish: async props => {
482
+ onStepFinish?.(JSON.stringify(props, null, 2));
483
+ this.logger.debug("[LLM] - Step Change:", {
484
+ text: props?.text,
485
+ toolCalls: props?.toolCalls,
486
+ toolResults: props?.toolResults,
487
+ finishReason: props?.finishReason,
488
+ usage: props?.usage,
489
+ runId
490
+ });
491
+ if (props?.response?.headers?.["x-ratelimit-remaining-tokens"] && parseInt(props?.response?.headers?.["x-ratelimit-remaining-tokens"], 10) < 2e3) {
492
+ this.logger.warn("Rate limit approaching, waiting 10 seconds", {
493
+ runId
494
+ });
495
+ await delay(10 * 1e3);
496
+ }
497
+ },
498
+ ...rest
499
+ };
500
+ let schema;
501
+ if (experimental_output) {
502
+ this.logger.debug("[LLM] - Using experimental output", {
503
+ runId
504
+ });
505
+ if (typeof experimental_output.parse === "function") {
506
+ schema = experimental_output;
507
+ if (schema instanceof zod.z.ZodArray) {
508
+ schema = schema._def.type;
509
+ }
510
+ } else {
511
+ schema = ai.jsonSchema(experimental_output);
512
+ }
513
+ }
514
+ return await ai.generateText({
515
+ messages,
516
+ ...argsForExecute,
517
+ experimental_telemetry: {
518
+ ...this.experimental_telemetry,
519
+ ...telemetry
520
+ },
521
+ experimental_output: schema ? ai.Output.object({
522
+ schema
523
+ }) : void 0
524
+ });
525
+ }
526
+ async __textObject({
527
+ messages,
528
+ onStepFinish,
529
+ maxSteps = 5,
530
+ tools,
531
+ convertedTools,
532
+ structuredOutput,
533
+ runId,
534
+ temperature,
535
+ toolChoice = "auto",
536
+ telemetry,
537
+ threadId,
538
+ resourceId,
539
+ ...rest
540
+ }) {
541
+ const model = this.#model;
542
+ this.logger.debug(`[LLM] - Generating a text object`, {
543
+ runId
544
+ });
545
+ const finalTools = convertedTools || this.convertTools({
546
+ tools,
547
+ runId,
548
+ threadId,
549
+ resourceId
550
+ });
551
+ const argsForExecute = {
552
+ model,
553
+ temperature,
554
+ tools: {
555
+ ...finalTools
556
+ },
557
+ maxSteps,
558
+ toolChoice,
559
+ onStepFinish: async props => {
560
+ onStepFinish?.(JSON.stringify(props, null, 2));
561
+ this.logger.debug("[LLM] - Step Change:", {
562
+ text: props?.text,
563
+ toolCalls: props?.toolCalls,
564
+ toolResults: props?.toolResults,
565
+ finishReason: props?.finishReason,
566
+ usage: props?.usage,
567
+ runId
568
+ });
569
+ if (props?.response?.headers?.["x-ratelimit-remaining-tokens"] && parseInt(props?.response?.headers?.["x-ratelimit-remaining-tokens"], 10) < 2e3) {
570
+ this.logger.warn("Rate limit approaching, waiting 10 seconds", {
571
+ runId
572
+ });
573
+ await delay(10 * 1e3);
574
+ }
575
+ },
576
+ ...rest
577
+ };
578
+ let schema;
579
+ let output = "object";
580
+ if (typeof structuredOutput.parse === "function") {
581
+ schema = structuredOutput;
582
+ if (schema instanceof zod.z.ZodArray) {
583
+ output = "array";
584
+ schema = schema._def.type;
585
+ }
586
+ } else {
587
+ schema = ai.jsonSchema(structuredOutput);
588
+ }
589
+ return await ai.generateObject({
590
+ messages,
591
+ ...argsForExecute,
592
+ output,
593
+ schema,
594
+ experimental_telemetry: {
595
+ ...this.experimental_telemetry,
596
+ ...telemetry
597
+ }
598
+ });
599
+ }
600
+ async __stream({
601
+ messages,
602
+ onStepFinish,
603
+ onFinish,
604
+ maxSteps = 5,
605
+ tools,
606
+ convertedTools,
607
+ runId,
608
+ temperature,
609
+ toolChoice = "auto",
610
+ experimental_output,
611
+ telemetry,
612
+ threadId,
613
+ resourceId,
614
+ ...rest
615
+ }) {
616
+ const model = this.#model;
617
+ this.logger.debug(`[LLM] - Streaming text`, {
618
+ runId,
619
+ threadId,
620
+ resourceId,
621
+ messages,
622
+ maxSteps,
623
+ tools: Object.keys(tools || convertedTools || {})
624
+ });
625
+ const finalTools = convertedTools || this.convertTools({
626
+ tools,
627
+ runId,
628
+ threadId,
629
+ resourceId
630
+ });
631
+ const argsForExecute = {
632
+ model,
633
+ temperature,
634
+ tools: {
635
+ ...finalTools
636
+ },
637
+ maxSteps,
638
+ toolChoice,
639
+ onStepFinish: async props => {
640
+ onStepFinish?.(JSON.stringify(props, null, 2));
641
+ this.logger.debug("[LLM] - Stream Step Change:", {
642
+ text: props?.text,
643
+ toolCalls: props?.toolCalls,
644
+ toolResults: props?.toolResults,
645
+ finishReason: props?.finishReason,
646
+ usage: props?.usage,
647
+ runId
648
+ });
649
+ if (props?.response?.headers?.["x-ratelimit-remaining-tokens"] && parseInt(props?.response?.headers?.["x-ratelimit-remaining-tokens"], 10) < 2e3) {
650
+ this.logger.warn("Rate limit approaching, waiting 10 seconds", {
651
+ runId
652
+ });
653
+ await delay(10 * 1e3);
654
+ }
655
+ },
656
+ onFinish: async props => {
657
+ void onFinish?.(JSON.stringify(props, null, 2));
658
+ this.logger.debug("[LLM] - Stream Finished:", {
659
+ text: props?.text,
660
+ toolCalls: props?.toolCalls,
661
+ toolResults: props?.toolResults,
662
+ finishReason: props?.finishReason,
663
+ usage: props?.usage,
664
+ runId,
665
+ threadId,
666
+ resourceId
667
+ });
668
+ },
669
+ ...rest
670
+ };
671
+ let schema;
672
+ if (experimental_output) {
673
+ this.logger.debug("[LLM] - Using experimental output", {
674
+ runId
675
+ });
676
+ if (typeof experimental_output.parse === "function") {
677
+ schema = experimental_output;
678
+ if (schema instanceof zod.z.ZodArray) {
679
+ schema = schema._def.type;
680
+ }
681
+ } else {
682
+ schema = ai.jsonSchema(experimental_output);
683
+ }
684
+ }
685
+ return await ai.streamText({
686
+ messages,
687
+ ...argsForExecute,
688
+ experimental_telemetry: {
689
+ ...this.experimental_telemetry,
690
+ ...telemetry
691
+ },
692
+ experimental_output: schema ? ai.Output.object({
693
+ schema
694
+ }) : void 0
695
+ });
696
+ }
697
+ async __streamObject({
698
+ messages,
699
+ onStepFinish,
700
+ onFinish,
701
+ maxSteps = 5,
702
+ tools,
703
+ convertedTools,
704
+ structuredOutput,
705
+ runId,
706
+ temperature,
707
+ toolChoice = "auto",
708
+ telemetry,
709
+ threadId,
710
+ resourceId,
711
+ ...rest
712
+ }) {
713
+ const model = this.#model;
714
+ this.logger.debug(`[LLM] - Streaming structured output`, {
715
+ runId,
716
+ messages,
717
+ maxSteps,
718
+ tools: Object.keys(tools || convertedTools || {})
719
+ });
720
+ const finalTools = convertedTools || this.convertTools({
721
+ tools,
722
+ runId,
723
+ threadId,
724
+ resourceId
725
+ });
726
+ const argsForExecute = {
727
+ model,
728
+ temperature,
729
+ tools: {
730
+ ...finalTools
731
+ },
732
+ maxSteps,
733
+ toolChoice,
734
+ onStepFinish: async props => {
735
+ onStepFinish?.(JSON.stringify(props, null, 2));
736
+ this.logger.debug("[LLM] - Stream Step Change:", {
737
+ text: props?.text,
738
+ toolCalls: props?.toolCalls,
739
+ toolResults: props?.toolResults,
740
+ finishReason: props?.finishReason,
741
+ usage: props?.usage,
742
+ runId,
743
+ threadId,
744
+ resourceId
745
+ });
746
+ if (props?.response?.headers?.["x-ratelimit-remaining-tokens"] && parseInt(props?.response?.headers?.["x-ratelimit-remaining-tokens"], 10) < 2e3) {
747
+ this.logger.warn("Rate limit approaching, waiting 10 seconds", {
748
+ runId
749
+ });
750
+ await delay(10 * 1e3);
751
+ }
752
+ },
753
+ onFinish: async props => {
754
+ void onFinish?.(JSON.stringify(props, null, 2));
755
+ this.logger.debug("[LLM] - Stream Finished:", {
756
+ text: props?.text,
757
+ toolCalls: props?.toolCalls,
758
+ toolResults: props?.toolResults,
759
+ finishReason: props?.finishReason,
760
+ usage: props?.usage,
761
+ runId,
762
+ threadId,
763
+ resourceId
764
+ });
765
+ },
766
+ ...rest
767
+ };
768
+ let schema;
769
+ let output = "object";
770
+ if (typeof structuredOutput.parse === "function") {
771
+ schema = structuredOutput;
772
+ if (schema instanceof zod.z.ZodArray) {
773
+ output = "array";
774
+ schema = schema._def.type;
775
+ }
776
+ } else {
777
+ schema = ai.jsonSchema(structuredOutput);
778
+ }
779
+ return ai.streamObject({
780
+ messages,
781
+ ...argsForExecute,
782
+ output,
783
+ schema,
784
+ experimental_telemetry: {
785
+ ...this.experimental_telemetry,
786
+ ...telemetry
787
+ }
788
+ });
789
+ }
790
+ async generate(messages, {
791
+ maxSteps = 5,
792
+ onStepFinish,
793
+ tools,
794
+ convertedTools,
795
+ runId,
796
+ output,
797
+ temperature,
798
+ telemetry,
799
+ ...rest
800
+ } = {}) {
801
+ const msgs = this.convertToMessages(messages);
802
+ if (!output) {
803
+ return await this.__text({
804
+ messages: msgs,
805
+ onStepFinish,
806
+ maxSteps,
807
+ tools,
808
+ convertedTools,
809
+ runId,
810
+ temperature,
811
+ ...rest
812
+ });
813
+ }
814
+ return await this.__textObject({
815
+ messages: msgs,
816
+ structuredOutput: output,
817
+ onStepFinish,
818
+ maxSteps,
819
+ tools,
820
+ convertedTools,
821
+ runId,
822
+ telemetry,
823
+ ...rest
824
+ });
825
+ }
826
+ async stream(messages, {
827
+ maxSteps = 5,
828
+ onFinish,
829
+ onStepFinish,
830
+ tools,
831
+ convertedTools,
832
+ runId,
833
+ output,
834
+ temperature,
835
+ telemetry,
836
+ ...rest
837
+ } = {}) {
838
+ const msgs = this.convertToMessages(messages);
839
+ if (!output) {
840
+ return await this.__stream({
841
+ messages: msgs,
842
+ onStepFinish,
843
+ onFinish,
844
+ maxSteps,
845
+ tools,
846
+ convertedTools,
847
+ runId,
848
+ temperature,
849
+ telemetry,
850
+ ...rest
851
+ });
852
+ }
853
+ return await this.__streamObject({
854
+ messages: msgs,
855
+ structuredOutput: output,
856
+ onStepFinish,
857
+ onFinish,
858
+ maxSteps,
859
+ tools,
860
+ convertedTools,
861
+ runId,
862
+ temperature,
863
+ telemetry,
864
+ ...rest
865
+ });
866
+ }
867
+ };
868
+ function hasActiveTelemetry(tracerName = "default-tracer") {
869
+ try {
870
+ return !!api.trace.getTracer(tracerName);
871
+ } catch {
872
+ return false;
873
+ }
874
+ }
875
+
876
+ // src/telemetry/telemetry.decorators.ts
877
+ function withSpan(options) {
878
+ return function (_target, propertyKey, descriptor) {
879
+ if (!descriptor || typeof descriptor === "number") return;
880
+ const originalMethod = descriptor.value;
881
+ const methodName = String(propertyKey);
882
+ descriptor.value = function (...args) {
883
+ if (options?.skipIfNoTelemetry && !hasActiveTelemetry(options?.tracerName)) {
884
+ return originalMethod.apply(this, args);
885
+ }
886
+ const tracer = api.trace.getTracer(options?.tracerName ?? "default-tracer");
887
+ let spanName;
888
+ let spanKind;
889
+ if (typeof options === "string") {
890
+ spanName = options;
891
+ } else if (options) {
892
+ spanName = options.spanName || methodName;
893
+ spanKind = options.spanKind;
894
+ } else {
895
+ spanName = methodName;
896
+ }
897
+ const span = tracer.startSpan(spanName, {
898
+ kind: spanKind
899
+ });
900
+ let ctx = api.trace.setSpan(api.context.active(), span);
901
+ args.forEach((arg, index) => {
902
+ try {
903
+ span.setAttribute(`${spanName}.argument.${index}`, JSON.stringify(arg));
904
+ } catch {
905
+ span.setAttribute(`${spanName}.argument.${index}`, "[Not Serializable]");
906
+ }
907
+ });
908
+ const currentBaggage = api.propagation.getBaggage(ctx);
909
+ if (currentBaggage?.componentName) {
910
+ span.setAttribute("componentName", currentBaggage?.componentName);
911
+ span.setAttribute("runId", currentBaggage?.runId);
912
+ } else if (this && this.name) {
913
+ span.setAttribute("componentName", this.name);
914
+ span.setAttribute("runId", this.runId);
915
+ ctx = api.propagation.setBaggage(ctx, {
916
+ componentName: this.name,
917
+ runId: this.runId
918
+ });
919
+ }
920
+ let result;
921
+ try {
922
+ result = api.context.with(ctx, () => originalMethod.apply(this, args));
923
+ if (result instanceof Promise) {
924
+ return result.then(resolvedValue => {
925
+ try {
926
+ span.setAttribute(`${spanName}.result`, JSON.stringify(resolvedValue));
927
+ } catch {
928
+ span.setAttribute(`${spanName}.result`, "[Not Serializable]");
929
+ }
930
+ return resolvedValue;
931
+ }).finally(() => span.end());
932
+ }
933
+ try {
934
+ span.setAttribute(`${spanName}.result`, JSON.stringify(result));
935
+ } catch {
936
+ span.setAttribute(`${spanName}.result`, "[Not Serializable]");
937
+ }
938
+ return result;
939
+ } catch (error) {
940
+ span.setStatus({
941
+ code: api.SpanStatusCode.ERROR,
942
+ message: error instanceof Error ? error.message : "Unknown error"
943
+ });
944
+ if (error instanceof Error) {
945
+ span.recordException(error);
946
+ }
947
+ throw error;
948
+ } finally {
949
+ if (!(result instanceof Promise)) {
950
+ span.end();
951
+ }
952
+ }
953
+ };
954
+ return descriptor;
955
+ };
956
+ }
957
+ function InstrumentClass(options) {
958
+ return function (target) {
959
+ const methods = Object.getOwnPropertyNames(target.prototype);
960
+ methods.forEach(method => {
961
+ if (options?.excludeMethods?.includes(method) || method === "constructor") return;
962
+ if (options?.methodFilter && !options.methodFilter(method)) return;
963
+ const descriptor = Object.getOwnPropertyDescriptor(target.prototype, method);
964
+ if (descriptor && typeof descriptor.value === "function") {
965
+ Object.defineProperty(target.prototype, method, withSpan({
966
+ spanName: options?.prefix ? `${options.prefix}.${method}` : method,
967
+ skipIfNoTelemetry: true,
968
+ spanKind: options?.spanKind || api.SpanKind.INTERNAL,
969
+ tracerName: options?.tracerName
970
+ })(target, method, descriptor));
971
+ }
972
+ });
973
+ return target;
974
+ };
975
+ }
976
+
977
+ // src/agent/index.ts
978
+ var _Agent_decorators, _init, _a;
979
+ _Agent_decorators = [InstrumentClass({
980
+ prefix: "agent",
981
+ excludeMethods: ["__setTools", "__setLogger", "__setTelemetry", "log"]
982
+ })];
983
+ exports.Agent = class Agent extends (_a = MastraBase) {
984
+ name;
985
+ llm;
986
+ instructions;
987
+ model;
988
+ #mastra;
989
+ #memory;
990
+ tools;
991
+ /** @deprecated This property is deprecated. Use evals instead. */
992
+ metrics;
993
+ evals;
994
+ voice;
995
+ constructor(config) {
996
+ super({
997
+ component: RegisteredLogger.AGENT
998
+ });
999
+ this.name = config.name;
1000
+ this.instructions = config.instructions;
1001
+ if (!config.model) {
1002
+ throw new Error(`LanguageModel is required to create an Agent. Please provide the 'model'.`);
1003
+ }
1004
+ this.llm = new MastraLLM({
1005
+ model: config.model
1006
+ });
1007
+ this.tools = {};
1008
+ this.metrics = {};
1009
+ this.evals = {};
1010
+ if (config.tools) {
1011
+ this.tools = config.tools;
1012
+ }
1013
+ if (config.mastra) {
1014
+ this.#mastra = config.mastra;
1015
+ }
1016
+ if (config.metrics) {
1017
+ this.logger.warn("The metrics property is deprecated. Please use evals instead to add evaluation metrics.");
1018
+ this.metrics = config.metrics;
1019
+ this.evals = config.metrics;
1020
+ }
1021
+ if (config.evals) {
1022
+ this.evals = config.evals;
1023
+ }
1024
+ if (config.memory) {
1025
+ this.#memory = config.memory;
1026
+ }
1027
+ if (config.voice) {
1028
+ this.voice = config.voice;
1029
+ }
1030
+ }
1031
+ hasOwnMemory() {
1032
+ return Boolean(this.#memory);
1033
+ }
1034
+ getMemory() {
1035
+ return this.#memory ?? this.#mastra?.memory;
1036
+ }
1037
+ __updateInstructions(newInstructions) {
1038
+ this.instructions = newInstructions;
1039
+ this.logger.debug(`[Agents:${this.name}] Instructions updated.`, {
1040
+ model: this.model,
1041
+ name: this.name
1042
+ });
1043
+ }
1044
+ __registerPrimitives(p) {
1045
+ if (p.telemetry) {
1046
+ this.__setTelemetry(p.telemetry);
1047
+ }
1048
+ if (p.logger) {
1049
+ this.__setLogger(p.logger);
1050
+ }
1051
+ this.llm.__registerPrimitives(p);
1052
+ this.#mastra = p;
1053
+ this.logger.debug(`[Agents:${this.name}] initialized.`, {
1054
+ model: this.model,
1055
+ name: this.name
1056
+ });
1057
+ }
1058
+ /**
1059
+ * Set the concrete tools for the agent
1060
+ * @param tools
1061
+ */
1062
+ __setTools(tools) {
1063
+ this.tools = tools;
1064
+ this.logger.debug(`[Agents:${this.name}] Tools set for agent ${this.name}`, {
1065
+ model: this.model,
1066
+ name: this.name
1067
+ });
1068
+ }
1069
+ async generateTitleFromUserMessage({
1070
+ message
1071
+ }) {
1072
+ const {
1073
+ object
1074
+ } = await this.llm.__textObject({
1075
+ messages: [{
1076
+ role: "system",
1077
+ content: `
1078
+
1079
+ - you will generate a short title based on the first message a user begins a conversation with
1080
+ - ensure it is not more than 80 characters long
1081
+ - the title should be a summary of the user's message
1082
+ - do not use quotes or colons`
1083
+ }, {
1084
+ role: "user",
1085
+ content: JSON.stringify(message)
1086
+ }],
1087
+ structuredOutput: zod.z.object({
1088
+ title: zod.z.string()
1089
+ })
1090
+ });
1091
+ return object.title;
1092
+ }
1093
+ getMostRecentUserMessage(messages) {
1094
+ const userMessages = messages.filter(message => message.role === "user");
1095
+ return userMessages.at(-1);
1096
+ }
1097
+ async genTitle(userMessage) {
1098
+ let title = "New Thread";
1099
+ try {
1100
+ if (userMessage) {
1101
+ title = await this.generateTitleFromUserMessage({
1102
+ message: userMessage
1103
+ });
1104
+ }
1105
+ } catch (e) {
1106
+ console.error("Error generating title:", e);
1107
+ }
1108
+ return title;
1109
+ }
1110
+ async saveMemory({
1111
+ threadId,
1112
+ memoryConfig,
1113
+ resourceId,
1114
+ userMessages,
1115
+ runId
1116
+ }) {
1117
+ const userMessage = this.getMostRecentUserMessage(userMessages);
1118
+ const memory = this.getMemory();
1119
+ if (memory) {
1120
+ let thread;
1121
+ if (!threadId) {
1122
+ this.logger.debug(`No threadId, creating new thread for agent ${this.name}`, {
1123
+ runId: runId || this.name
1124
+ });
1125
+ const title = await this.genTitle(userMessage);
1126
+ thread = await memory.createThread({
1127
+ threadId,
1128
+ resourceId,
1129
+ title,
1130
+ memoryConfig
1131
+ });
1132
+ } else {
1133
+ thread = await memory.getThreadById({
1134
+ threadId
1135
+ });
1136
+ if (!thread) {
1137
+ this.logger.debug(`Thread with id ${threadId} not found, creating new thread for agent ${this.name}`, {
1138
+ runId: runId || this.name
1139
+ });
1140
+ const title = await this.genTitle(userMessage);
1141
+ thread = await memory.createThread({
1142
+ threadId,
1143
+ resourceId,
1144
+ title,
1145
+ memoryConfig
1146
+ });
1147
+ }
1148
+ }
1149
+ const newMessages = userMessage ? [userMessage] : userMessages;
1150
+ if (thread) {
1151
+ const messages = newMessages.map(u => {
1152
+ return {
1153
+ id: this.getMemory()?.generateId(),
1154
+ createdAt: /* @__PURE__ */new Date(),
1155
+ threadId: thread.id,
1156
+ ...u,
1157
+ content: u.content,
1158
+ role: u.role,
1159
+ type: "text"
1160
+ };
1161
+ });
1162
+ const memoryMessages = threadId && memory ? (await memory.rememberMessages({
1163
+ threadId,
1164
+ config: memoryConfig,
1165
+ vectorMessageSearch: messages.slice(-1).map(m => {
1166
+ if (typeof m === `string`) {
1167
+ return m;
1168
+ }
1169
+ return m?.content || ``;
1170
+ }).join(`
1171
+ `)
1172
+ })).messages : [];
1173
+ if (memory) {
1174
+ await memory.saveMessages({
1175
+ messages,
1176
+ memoryConfig
1177
+ });
1178
+ }
1179
+ this.logger.debug("Saved messages to memory", {
1180
+ threadId: thread.id,
1181
+ runId
1182
+ });
1183
+ const memorySystemMessage = memory && threadId ? await memory.getSystemMessage({
1184
+ threadId,
1185
+ memoryConfig
1186
+ }) : null;
1187
+ return {
1188
+ threadId: thread.id,
1189
+ messages: [memorySystemMessage ? {
1190
+ role: "system",
1191
+ content: memorySystemMessage
1192
+ } : null, ...this.sanitizeResponseMessages(memoryMessages), ...newMessages].filter(message => Boolean(message))
1193
+ };
1194
+ }
1195
+ return {
1196
+ threadId: thread?.id || threadId || "",
1197
+ messages: userMessages
1198
+ };
1199
+ }
1200
+ return {
1201
+ threadId: threadId || "",
1202
+ messages: userMessages
1203
+ };
1204
+ }
1205
+ async saveResponse({
1206
+ result,
1207
+ threadId,
1208
+ resourceId,
1209
+ runId,
1210
+ memoryConfig
1211
+ }) {
1212
+ const {
1213
+ response
1214
+ } = result;
1215
+ try {
1216
+ if (response.messages) {
1217
+ const ms = Array.isArray(response.messages) ? response.messages : [response.messages];
1218
+ const responseMessagesWithoutIncompleteToolCalls = this.sanitizeResponseMessages(ms);
1219
+ const memory = this.getMemory();
1220
+ if (memory) {
1221
+ this.logger.debug(`[Agent:${this.name}] - Memory persistence: store=${this.getMemory()?.constructor.name} threadId=${threadId}`, {
1222
+ runId,
1223
+ resourceId,
1224
+ threadId,
1225
+ memoryStore: this.getMemory()?.constructor.name
1226
+ });
1227
+ await memory.saveMessages({
1228
+ memoryConfig,
1229
+ messages: responseMessagesWithoutIncompleteToolCalls.map((message, index) => {
1230
+ const messageId = crypto$1.randomUUID();
1231
+ let toolCallIds;
1232
+ let toolCallArgs;
1233
+ let toolNames;
1234
+ let type = "text";
1235
+ if (message.role === "tool") {
1236
+ toolCallIds = message.content.map(content => content.toolCallId);
1237
+ type = "tool-result";
1238
+ }
1239
+ if (message.role === "assistant") {
1240
+ const assistantContent = message.content;
1241
+ const assistantToolCalls = assistantContent.map(content => {
1242
+ if (content.type === "tool-call") {
1243
+ return {
1244
+ toolCallId: content.toolCallId,
1245
+ toolArgs: content.args,
1246
+ toolName: content.toolName
1247
+ };
1248
+ }
1249
+ return void 0;
1250
+ })?.filter(Boolean);
1251
+ toolCallIds = assistantToolCalls?.map(toolCall => toolCall.toolCallId);
1252
+ toolCallArgs = assistantToolCalls?.map(toolCall => toolCall.toolArgs);
1253
+ toolNames = assistantToolCalls?.map(toolCall => toolCall.toolName);
1254
+ type = assistantContent?.[0]?.type;
1255
+ }
1256
+ return {
1257
+ id: messageId,
1258
+ threadId,
1259
+ role: message.role,
1260
+ content: message.content,
1261
+ createdAt: new Date(Date.now() + index),
1262
+ // use Date.now() + index to make sure every message is atleast one millisecond apart
1263
+ toolCallIds: toolCallIds?.length ? toolCallIds : void 0,
1264
+ toolCallArgs: toolCallArgs?.length ? toolCallArgs : void 0,
1265
+ toolNames: toolNames?.length ? toolNames : void 0,
1266
+ type
1267
+ };
1268
+ })
1269
+ });
1270
+ }
1271
+ }
1272
+ } catch (err) {
1273
+ this.logger.error(`[Agent:${this.name}] - Failed to save assistant response`, {
1274
+ error: err,
1275
+ runId
1276
+ });
1277
+ }
1278
+ }
1279
+ sanitizeResponseMessages(messages) {
1280
+ let toolResultIds = [];
1281
+ let toolCallIds = [];
1282
+ for (const message of messages) {
1283
+ if (message.role === "tool") {
1284
+ for (const content of message.content) {
1285
+ if (content.type === "tool-result") {
1286
+ toolResultIds.push(content.toolCallId);
1287
+ }
1288
+ }
1289
+ } else if (message.role === "assistant" || message.role === "user") {
1290
+ for (const content of message.content) {
1291
+ if (typeof content !== `string`) {
1292
+ if (content.type === `tool-call`) {
1293
+ toolCallIds.push(content.toolCallId);
1294
+ }
1295
+ }
1296
+ }
1297
+ }
1298
+ }
1299
+ const messagesBySanitizedContent = messages.map(message => {
1300
+ if (message.role !== "assistant" && message.role !== `tool` && message.role !== `user`) return message;
1301
+ if (typeof message.content === "string") return message;
1302
+ const sanitizedContent = message.content.filter(content => {
1303
+ if (content.type === `tool-call`) {
1304
+ return toolResultIds.includes(content.toolCallId);
1305
+ }
1306
+ if (content.type === `text`) {
1307
+ return content.text.trim() !== ``;
1308
+ }
1309
+ if (content.type === `tool-result`) {
1310
+ return toolCallIds.includes(content.toolCallId);
1311
+ }
1312
+ return true;
1313
+ });
1314
+ return {
1315
+ ...message,
1316
+ content: sanitizedContent
1317
+ };
1318
+ });
1319
+ return messagesBySanitizedContent.filter(message => {
1320
+ if (typeof message.content === `string`) {
1321
+ return message.content !== "";
1322
+ }
1323
+ if (Array.isArray(message.content)) {
1324
+ return message.content.length && message.content.every(c => {
1325
+ if (c.type === `text`) {
1326
+ return c.text && c.text !== "";
1327
+ }
1328
+ return true;
1329
+ });
1330
+ }
1331
+ return true;
1332
+ });
1333
+ }
1334
+ convertTools({
1335
+ toolsets,
1336
+ threadId,
1337
+ resourceId,
1338
+ runId
1339
+ }) {
1340
+ this.logger.debug(`[Agents:${this.name}] - Assigning tools`, {
1341
+ runId,
1342
+ threadId,
1343
+ resourceId
1344
+ });
1345
+ const converted = Object.entries(this.tools || {}).reduce((memo, value) => {
1346
+ const k = value[0];
1347
+ const tool = this.tools[k];
1348
+ if (tool) {
1349
+ memo[k] = {
1350
+ description: tool.description,
1351
+ parameters: tool.inputSchema,
1352
+ execute: typeof tool?.execute === "function" ? async (args, options) => {
1353
+ try {
1354
+ this.logger.debug(`[Agent:${this.name}] - Executing tool ${k}`, {
1355
+ name: k,
1356
+ description: tool.description,
1357
+ args,
1358
+ runId,
1359
+ threadId,
1360
+ resourceId
1361
+ });
1362
+ return tool?.execute?.({
1363
+ context: args,
1364
+ mastra: this.#mastra,
1365
+ runId,
1366
+ threadId,
1367
+ resourceId
1368
+ }, options) ?? void 0;
1369
+ } catch (err) {
1370
+ this.logger.error(`[Agent:${this.name}] - Failed execution`, {
1371
+ error: err,
1372
+ runId,
1373
+ threadId,
1374
+ resourceId
1375
+ });
1376
+ throw err;
1377
+ }
1378
+ } : void 0
1379
+ };
1380
+ }
1381
+ return memo;
1382
+ }, {});
1383
+ const toolsFromToolsetsConverted = {
1384
+ ...converted
1385
+ };
1386
+ const toolsFromToolsets = Object.values(toolsets || {});
1387
+ if (toolsFromToolsets.length > 0) {
1388
+ this.logger.debug(`[Agent:${this.name}] - Adding tools from toolsets ${Object.keys(toolsets || {}).join(", ")}`, {
1389
+ runId
1390
+ });
1391
+ toolsFromToolsets.forEach(toolset => {
1392
+ Object.entries(toolset).forEach(([toolName, tool]) => {
1393
+ const toolObj = tool;
1394
+ toolsFromToolsetsConverted[toolName] = {
1395
+ description: toolObj.description || "",
1396
+ parameters: toolObj.inputSchema,
1397
+ execute: typeof toolObj?.execute === "function" ? async (args, options) => {
1398
+ try {
1399
+ this.logger.debug(`[Agent:${this.name}] - Executing tool ${toolName}`, {
1400
+ name: toolName,
1401
+ description: toolObj.description,
1402
+ args,
1403
+ runId,
1404
+ threadId,
1405
+ resourceId
1406
+ });
1407
+ return toolObj?.execute?.({
1408
+ context: args,
1409
+ runId,
1410
+ threadId,
1411
+ resourceId
1412
+ }, options) ?? void 0;
1413
+ } catch (error) {
1414
+ this.logger.error(`[Agent:${this.name}] - Failed toolset execution`, {
1415
+ error,
1416
+ runId,
1417
+ threadId,
1418
+ resourceId
1419
+ });
1420
+ throw error;
1421
+ }
1422
+ } : void 0
1423
+ };
1424
+ });
1425
+ });
1426
+ }
1427
+ return toolsFromToolsetsConverted;
1428
+ }
1429
+ async preExecute({
1430
+ resourceId,
1431
+ runId,
1432
+ threadId,
1433
+ memoryConfig,
1434
+ messages
1435
+ }) {
1436
+ let coreMessages = [];
1437
+ let threadIdToUse = threadId;
1438
+ this.logger.debug(`Saving user messages in memory for agent ${this.name}`, {
1439
+ runId
1440
+ });
1441
+ const saveMessageResponse = await this.saveMemory({
1442
+ threadId,
1443
+ resourceId,
1444
+ userMessages: messages,
1445
+ memoryConfig
1446
+ });
1447
+ coreMessages = saveMessageResponse.messages;
1448
+ threadIdToUse = saveMessageResponse.threadId;
1449
+ return {
1450
+ coreMessages,
1451
+ threadIdToUse
1452
+ };
1453
+ }
1454
+ __primitive({
1455
+ messages,
1456
+ context: context2,
1457
+ threadId,
1458
+ memoryConfig,
1459
+ resourceId,
1460
+ runId,
1461
+ toolsets
1462
+ }) {
1463
+ return {
1464
+ before: async () => {
1465
+ if (process.env.NODE_ENV !== "test") {
1466
+ this.logger.debug(`[Agents:${this.name}] - Starting generation`, {
1467
+ runId
1468
+ });
1469
+ }
1470
+ const systemMessage = {
1471
+ role: "system",
1472
+ content: `${this.instructions}.`
1473
+ };
1474
+ let coreMessages = messages;
1475
+ let threadIdToUse = threadId;
1476
+ const memory = this.getMemory();
1477
+ if (threadId && memory && !resourceId) {
1478
+ throw new Error(`A resourceId must be provided when passing a threadId and using Memory. Saw threadId ${threadId} but resourceId is ${resourceId}`);
1479
+ }
1480
+ if (memory && resourceId) {
1481
+ this.logger.debug(`[Agent:${this.name}] - Memory persistence enabled: store=${this.getMemory()?.constructor.name}, resourceId=${resourceId}`, {
1482
+ runId,
1483
+ resourceId,
1484
+ threadId: threadIdToUse,
1485
+ memoryStore: this.getMemory()?.constructor.name
1486
+ });
1487
+ const preExecuteResult = await this.preExecute({
1488
+ resourceId,
1489
+ runId,
1490
+ threadId: threadIdToUse,
1491
+ memoryConfig,
1492
+ messages
1493
+ });
1494
+ coreMessages = preExecuteResult.coreMessages;
1495
+ threadIdToUse = preExecuteResult.threadIdToUse;
1496
+ }
1497
+ let convertedTools;
1498
+ if (toolsets && Object.keys(toolsets || {}).length > 0 || this.getMemory() && resourceId) {
1499
+ const reasons = [];
1500
+ if (toolsets && Object.keys(toolsets || {}).length > 0) {
1501
+ reasons.push(`toolsets present (${Object.keys(toolsets || {}).length} tools)`);
1502
+ }
1503
+ if (this.getMemory() && resourceId) {
1504
+ reasons.push("memory and resourceId available");
1505
+ }
1506
+ this.logger.debug(`[Agent:${this.name}] - Enhancing tools: ${reasons.join(", ")}`, {
1507
+ runId,
1508
+ toolsets: toolsets ? Object.keys(toolsets) : void 0,
1509
+ hasMemory: !!this.getMemory(),
1510
+ hasResourceId: !!resourceId
1511
+ });
1512
+ convertedTools = this.convertTools({
1513
+ toolsets,
1514
+ threadId: threadIdToUse,
1515
+ resourceId,
1516
+ runId
1517
+ });
1518
+ }
1519
+ const messageObjects = [systemMessage, ...(context2 || []), ...coreMessages];
1520
+ return {
1521
+ messageObjects,
1522
+ convertedTools,
1523
+ threadId: threadIdToUse
1524
+ };
1525
+ },
1526
+ after: async ({
1527
+ result,
1528
+ threadId: threadId2,
1529
+ memoryConfig: memoryConfig2,
1530
+ outputText,
1531
+ runId: runId2
1532
+ }) => {
1533
+ const resToLog = {
1534
+ text: result?.text,
1535
+ object: result?.object,
1536
+ toolResults: result?.toolResults,
1537
+ toolCalls: result?.toolCalls,
1538
+ usage: result?.usage,
1539
+ steps: result?.steps?.map(s => {
1540
+ return {
1541
+ stepType: s?.stepType,
1542
+ text: result?.text,
1543
+ object: result?.object,
1544
+ toolResults: result?.toolResults,
1545
+ toolCalls: result?.toolCalls,
1546
+ usage: result?.usage
1547
+ };
1548
+ })
1549
+ };
1550
+ this.logger.debug(`[Agent:${this.name}] - Post processing LLM response`, {
1551
+ runId: runId2,
1552
+ result: resToLog,
1553
+ threadId: threadId2
1554
+ });
1555
+ if (this.getMemory() && resourceId) {
1556
+ try {
1557
+ await this.saveResponse({
1558
+ result,
1559
+ threadId: threadId2,
1560
+ resourceId,
1561
+ memoryConfig: memoryConfig2,
1562
+ runId: runId2
1563
+ });
1564
+ } catch (e) {
1565
+ this.logger.error("Error saving response", {
1566
+ error: e,
1567
+ runId: runId2,
1568
+ result: resToLog,
1569
+ threadId: threadId2
1570
+ });
1571
+ }
1572
+ }
1573
+ if (Object.keys(this.evals || {}).length > 0) {
1574
+ const input = messages.map(message => message.content).join("\n");
1575
+ const runIdToUse = runId2 || crypto.randomUUID();
1576
+ for (const metric of Object.values(this.evals || {})) {
1577
+ executeHook("onGeneration" /* ON_GENERATION */, {
1578
+ input,
1579
+ output: outputText,
1580
+ runId: runIdToUse,
1581
+ metric,
1582
+ agentName: this.name,
1583
+ instructions: this.instructions
1584
+ });
1585
+ }
1586
+ }
1587
+ }
1588
+ };
1589
+ }
1590
+ async generate(messages, {
1591
+ context: context2,
1592
+ threadId: threadIdInFn,
1593
+ memoryOptions,
1594
+ resourceId,
1595
+ maxSteps = 5,
1596
+ onStepFinish,
1597
+ runId,
1598
+ output,
1599
+ toolsets,
1600
+ temperature,
1601
+ toolChoice = "auto",
1602
+ experimental_output,
1603
+ telemetry,
1604
+ ...rest
1605
+ } = {}) {
1606
+ let messagesToUse = [];
1607
+ if (typeof messages === `string`) {
1608
+ messagesToUse = [{
1609
+ role: "user",
1610
+ content: messages
1611
+ }];
1612
+ } else {
1613
+ messagesToUse = messages.map(message => {
1614
+ if (typeof message === `string`) {
1615
+ return {
1616
+ role: "user",
1617
+ content: message
1618
+ };
1619
+ }
1620
+ return message;
1621
+ });
1622
+ }
1623
+ const runIdToUse = runId || crypto$1.randomUUID();
1624
+ const {
1625
+ before,
1626
+ after
1627
+ } = this.__primitive({
1628
+ messages: messagesToUse,
1629
+ context: context2,
1630
+ threadId: threadIdInFn,
1631
+ memoryConfig: memoryOptions,
1632
+ resourceId,
1633
+ runId: runIdToUse,
1634
+ toolsets
1635
+ });
1636
+ const {
1637
+ threadId,
1638
+ messageObjects,
1639
+ convertedTools
1640
+ } = await before();
1641
+ if (!output && experimental_output) {
1642
+ const result2 = await this.llm.__text({
1643
+ messages: messageObjects,
1644
+ tools: this.tools,
1645
+ convertedTools,
1646
+ onStepFinish,
1647
+ maxSteps: maxSteps || 5,
1648
+ runId: runIdToUse,
1649
+ temperature,
1650
+ toolChoice: toolChoice || "auto",
1651
+ experimental_output,
1652
+ threadId,
1653
+ resourceId,
1654
+ ...rest
1655
+ });
1656
+ const outputText2 = result2.text;
1657
+ await after({
1658
+ result: result2,
1659
+ threadId,
1660
+ memoryConfig: memoryOptions,
1661
+ outputText: outputText2,
1662
+ runId: runIdToUse
1663
+ });
1664
+ const newResult = result2;
1665
+ newResult.object = result2.experimental_output;
1666
+ return newResult;
1667
+ }
1668
+ if (!output) {
1669
+ const result2 = await this.llm.__text({
1670
+ messages: messageObjects,
1671
+ tools: this.tools,
1672
+ convertedTools,
1673
+ onStepFinish,
1674
+ maxSteps,
1675
+ runId: runIdToUse,
1676
+ temperature,
1677
+ toolChoice,
1678
+ telemetry,
1679
+ threadId,
1680
+ resourceId,
1681
+ ...rest
1682
+ });
1683
+ const outputText2 = result2.text;
1684
+ await after({
1685
+ result: result2,
1686
+ threadId,
1687
+ memoryConfig: memoryOptions,
1688
+ outputText: outputText2,
1689
+ runId: runIdToUse
1690
+ });
1691
+ return result2;
1692
+ }
1693
+ const result = await this.llm.__textObject({
1694
+ messages: messageObjects,
1695
+ tools: this.tools,
1696
+ structuredOutput: output,
1697
+ convertedTools,
1698
+ onStepFinish,
1699
+ maxSteps,
1700
+ runId: runIdToUse,
1701
+ temperature,
1702
+ toolChoice,
1703
+ telemetry,
1704
+ ...rest
1705
+ });
1706
+ const outputText = JSON.stringify(result.object);
1707
+ await after({
1708
+ result,
1709
+ threadId,
1710
+ memoryConfig: memoryOptions,
1711
+ outputText,
1712
+ runId: runIdToUse
1713
+ });
1714
+ return result;
1715
+ }
1716
+ async stream(messages, {
1717
+ context: context2,
1718
+ threadId: threadIdInFn,
1719
+ memoryOptions,
1720
+ resourceId,
1721
+ maxSteps = 5,
1722
+ onFinish,
1723
+ onStepFinish,
1724
+ runId,
1725
+ toolsets,
1726
+ output,
1727
+ temperature,
1728
+ toolChoice = "auto",
1729
+ experimental_output,
1730
+ telemetry,
1731
+ ...rest
1732
+ } = {}) {
1733
+ const runIdToUse = runId || crypto$1.randomUUID();
1734
+ let messagesToUse = [];
1735
+ if (typeof messages === `string`) {
1736
+ messagesToUse = [{
1737
+ role: "user",
1738
+ content: messages
1739
+ }];
1740
+ } else {
1741
+ messagesToUse = messages.map(message => {
1742
+ if (typeof message === `string`) {
1743
+ return {
1744
+ role: "user",
1745
+ content: message
1746
+ };
1747
+ }
1748
+ return message;
1749
+ });
1750
+ }
1751
+ const {
1752
+ before,
1753
+ after
1754
+ } = this.__primitive({
1755
+ messages: messagesToUse,
1756
+ context: context2,
1757
+ threadId: threadIdInFn,
1758
+ memoryConfig: memoryOptions,
1759
+ resourceId,
1760
+ runId: runIdToUse,
1761
+ toolsets
1762
+ });
1763
+ const {
1764
+ threadId,
1765
+ messageObjects,
1766
+ convertedTools
1767
+ } = await before();
1768
+ if (!output && experimental_output) {
1769
+ this.logger.debug(`Starting agent ${this.name} llm stream call`, {
1770
+ runId
1771
+ });
1772
+ const streamResult = await this.llm.__stream({
1773
+ messages: messageObjects,
1774
+ temperature,
1775
+ tools: this.tools,
1776
+ convertedTools,
1777
+ onStepFinish,
1778
+ onFinish: async result => {
1779
+ try {
1780
+ const res = JSON.parse(result) || {};
1781
+ const outputText = res.text;
1782
+ await after({
1783
+ result: res,
1784
+ threadId,
1785
+ memoryConfig: memoryOptions,
1786
+ outputText,
1787
+ runId: runIdToUse
1788
+ });
1789
+ } catch (e) {
1790
+ this.logger.error("Error saving memory on finish", {
1791
+ error: e,
1792
+ runId
1793
+ });
1794
+ }
1795
+ onFinish?.(result);
1796
+ },
1797
+ maxSteps,
1798
+ runId: runIdToUse,
1799
+ toolChoice,
1800
+ experimental_output,
1801
+ ...rest
1802
+ });
1803
+ const newStreamResult = streamResult;
1804
+ newStreamResult.partialObjectStream = streamResult.experimental_partialOutputStream;
1805
+ return newStreamResult;
1806
+ } else if (!output) {
1807
+ this.logger.debug(`Starting agent ${this.name} llm stream call`, {
1808
+ runId
1809
+ });
1810
+ return this.llm.__stream({
1811
+ messages: messageObjects,
1812
+ temperature,
1813
+ tools: this.tools,
1814
+ convertedTools,
1815
+ onStepFinish,
1816
+ onFinish: async result => {
1817
+ try {
1818
+ const res = JSON.parse(result) || {};
1819
+ const outputText = res.text;
1820
+ await after({
1821
+ result: res,
1822
+ threadId,
1823
+ memoryConfig: memoryOptions,
1824
+ outputText,
1825
+ runId: runIdToUse
1826
+ });
1827
+ } catch (e) {
1828
+ this.logger.error("Error saving memory on finish", {
1829
+ error: e,
1830
+ runId
1831
+ });
1832
+ }
1833
+ onFinish?.(result);
1834
+ },
1835
+ maxSteps,
1836
+ runId: runIdToUse,
1837
+ toolChoice,
1838
+ telemetry,
1839
+ ...rest
1840
+ });
1841
+ }
1842
+ this.logger.debug(`Starting agent ${this.name} llm streamObject call`, {
1843
+ runId
1844
+ });
1845
+ return this.llm.__streamObject({
1846
+ messages: messageObjects,
1847
+ tools: this.tools,
1848
+ temperature,
1849
+ structuredOutput: output,
1850
+ convertedTools,
1851
+ onStepFinish,
1852
+ onFinish: async result => {
1853
+ try {
1854
+ const res = JSON.parse(result) || {};
1855
+ const outputText = JSON.stringify(res.object);
1856
+ await after({
1857
+ result: res,
1858
+ threadId,
1859
+ memoryConfig: memoryOptions,
1860
+ outputText,
1861
+ runId: runIdToUse
1862
+ });
1863
+ } catch (e) {
1864
+ this.logger.error("Error saving memory on finish", {
1865
+ error: e,
1866
+ runId
1867
+ });
1868
+ }
1869
+ onFinish?.(result);
1870
+ },
1871
+ runId: runIdToUse,
1872
+ toolChoice,
1873
+ telemetry,
1874
+ ...rest
1875
+ });
1876
+ }
1877
+ /**
1878
+ * Convert text to speech using the configured voice provider
1879
+ * @param input Text or text stream to convert to speech
1880
+ * @param options Speech options including speaker and provider-specific options
1881
+ * @returns Audio stream
1882
+ */
1883
+ async speak(input, options) {
1884
+ if (!this.voice) {
1885
+ throw new Error("No voice provider configured");
1886
+ }
1887
+ try {
1888
+ return this.voice.speak(input, options);
1889
+ } catch (e) {
1890
+ this.logger.error("Error during agent speak", {
1891
+ error: e
1892
+ });
1893
+ throw e;
1894
+ }
1895
+ }
1896
+ /**
1897
+ * Convert speech to text using the configured voice provider
1898
+ * @param audioStream Audio stream to transcribe
1899
+ * @param options Provider-specific transcription options
1900
+ * @returns Text or text stream
1901
+ */
1902
+ async listen(audioStream, options) {
1903
+ if (!this.voice) {
1904
+ throw new Error("No voice provider configured");
1905
+ }
1906
+ try {
1907
+ return this.voice.listen(audioStream, options);
1908
+ } catch (e) {
1909
+ this.logger.error("Error during agent listen", {
1910
+ error: e
1911
+ });
1912
+ throw e;
1913
+ }
1914
+ }
1915
+ /**
1916
+ * Get a list of available speakers from the configured voice provider
1917
+ * @throws {Error} If no voice provider is configured
1918
+ * @returns {Promise<Array<{voiceId: string}>>} List of available speakers
1919
+ */
1920
+ async getSpeakers() {
1921
+ if (!this.voice) {
1922
+ throw new Error("No voice provider configured");
1923
+ }
1924
+ try {
1925
+ return await this.voice.getSpeakers();
1926
+ } catch (e) {
1927
+ this.logger.error("Error during agent getSpeakers", {
1928
+ error: e
1929
+ });
1930
+ throw e;
1931
+ }
1932
+ }
1933
+ };
1934
+ exports.Agent = /*@__PURE__*/(_ => {
1935
+ _init = __decoratorStart(_a);
1936
+ exports.Agent = __decorateElement(_init, 0, "Agent", _Agent_decorators, exports.Agent);
1937
+ __runInitializers(_init, 1, exports.Agent);
1938
+ return exports.Agent;
1939
+ })();