@mastra/core 0.5.0-alpha.9 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (91) hide show
  1. package/dist/agent/index.cjs +3 -2140
  2. package/dist/agent/index.d.cts +1 -1
  3. package/dist/agent/index.d.ts +1 -1
  4. package/dist/agent/index.js +1 -1
  5. package/dist/{base-CMEKtEnE.d.cts → base-CIPKleAU.d.cts} +97 -60
  6. package/dist/{base-B78F6w8S.d.ts → base-C_Oq53qk.d.ts} +97 -60
  7. package/dist/base.cjs +5 -140
  8. package/dist/bundler/index.cjs +5 -160
  9. package/dist/chunk-2W2GYEYQ.cjs +25 -0
  10. package/dist/chunk-3ASEZT7U.cjs +1586 -0
  11. package/dist/chunk-43Y7WG5W.cjs +335 -0
  12. package/dist/{chunk-IM7CM3DU.js → chunk-4Y74D74B.js} +1 -1
  13. package/dist/chunk-ENT7U27Y.cjs +37 -0
  14. package/dist/chunk-F5UYWPV4.cjs +14 -0
  15. package/dist/chunk-FL3GQXQ2.cjs +218 -0
  16. package/dist/chunk-FRQFWZDN.cjs +2 -0
  17. package/dist/chunk-GXQRMKSN.cjs +367 -0
  18. package/dist/chunk-HJPMYDWO.cjs +37 -0
  19. package/dist/chunk-IIWRJFLQ.cjs +51 -0
  20. package/dist/chunk-KFQ7Z3PO.cjs +347 -0
  21. package/dist/{chunk-TWAIC2XA.js → chunk-KP5UAFLN.js} +1 -1
  22. package/dist/chunk-KPKFLQFR.cjs +12 -0
  23. package/dist/{chunk-NR5T72G7.js → chunk-MLFXOST6.js} +1 -1
  24. package/dist/{chunk-6V737PR2.js → chunk-OD7ZMKHY.js} +166 -58
  25. package/dist/chunk-OTFLHXHZ.cjs +65 -0
  26. package/dist/chunk-RWTSGWWL.cjs +81 -0
  27. package/dist/chunk-ST5RMVLG.cjs +87 -0
  28. package/dist/chunk-SYQ7NK2E.cjs +24 -0
  29. package/dist/chunk-UZNQG7QO.cjs +1868 -0
  30. package/dist/chunk-V5ORZPFW.cjs +38 -0
  31. package/dist/chunk-VA4P7QJT.cjs +443 -0
  32. package/dist/chunk-WB2HREXE.cjs +166 -0
  33. package/dist/chunk-WOMOGDGR.cjs +691 -0
  34. package/dist/chunk-XB2TJ7LX.cjs +408 -0
  35. package/dist/{chunk-4AQBRUR2.js → chunk-XF2FMJYK.js} +1 -1
  36. package/dist/chunk-XLSROQ26.cjs +91 -0
  37. package/dist/chunk-YK3XJ52U.cjs +192 -0
  38. package/dist/{chunk-EWB556GS.js → chunk-YPD6BQIM.js} +29 -1
  39. package/dist/deployer/index.cjs +5 -167
  40. package/dist/eval/index.cjs +9 -105
  41. package/dist/eval/index.d.cts +1 -1
  42. package/dist/eval/index.d.ts +1 -1
  43. package/dist/hooks/index.cjs +14 -83
  44. package/dist/index.cjs +253 -7516
  45. package/dist/index.d.cts +3 -3
  46. package/dist/index.d.ts +3 -3
  47. package/dist/index.js +7 -7
  48. package/dist/integration/index.cjs +9 -108
  49. package/dist/integration/index.d.cts +1 -1
  50. package/dist/integration/index.d.ts +1 -1
  51. package/dist/llm/index.d.cts +1 -1
  52. package/dist/llm/index.d.ts +1 -1
  53. package/dist/logger/index.cjs +33 -161
  54. package/dist/mastra/index.cjs +3 -1755
  55. package/dist/mastra/index.d.cts +1 -1
  56. package/dist/mastra/index.d.ts +1 -1
  57. package/dist/mastra/index.js +1 -1
  58. package/dist/memory/index.cjs +4 -2050
  59. package/dist/memory/index.d.cts +1 -1
  60. package/dist/memory/index.d.ts +1 -1
  61. package/dist/memory/index.js +1 -1
  62. package/dist/relevance/index.cjs +10 -2201
  63. package/dist/relevance/index.d.cts +19 -2
  64. package/dist/relevance/index.d.ts +19 -2
  65. package/dist/relevance/index.js +1 -1
  66. package/dist/storage/index.cjs +29 -367
  67. package/dist/storage/index.d.cts +1 -1
  68. package/dist/storage/index.d.ts +1 -1
  69. package/dist/storage/libsql/index.cjs +9 -798
  70. package/dist/storage/libsql/index.d.cts +1 -1
  71. package/dist/storage/libsql/index.d.ts +1 -1
  72. package/dist/telemetry/index.cjs +21 -408
  73. package/dist/telemetry/index.d.cts +1 -1
  74. package/dist/telemetry/index.d.ts +1 -1
  75. package/dist/tools/index.cjs +11 -22
  76. package/dist/tools/index.d.cts +3 -3
  77. package/dist/tools/index.d.ts +3 -3
  78. package/dist/tts/index.cjs +3 -328
  79. package/dist/utils.cjs +41 -350
  80. package/dist/utils.d.cts +3 -3
  81. package/dist/utils.d.ts +3 -3
  82. package/dist/utils.js +1 -1
  83. package/dist/vector/filter/index.cjs +7 -189
  84. package/dist/vector/index.cjs +5 -172
  85. package/dist/vector/libsql/index.cjs +9 -1047
  86. package/dist/voice/index.cjs +8 -306
  87. package/dist/workflows/index.cjs +65 -1936
  88. package/dist/workflows/index.d.cts +4 -3
  89. package/dist/workflows/index.d.ts +4 -3
  90. package/dist/workflows/index.js +1 -1
  91. package/package.json +27 -27
@@ -1,2147 +1,10 @@
1
1
  'use strict';
2
2
 
3
- var crypto$1 = require('crypto');
4
- var stream = require('stream');
5
- var pino = require('pino');
6
- var pretty = require('pino-pretty');
7
- var ai = require('ai');
8
- var zod = require('zod');
9
- var jsonSchemaToZod = require('json-schema-to-zod');
10
- var api = require('@opentelemetry/api');
3
+ var chunk3ASEZT7U_cjs = require('../chunk-3ASEZT7U.cjs');
11
4
 
12
- function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
13
5
 
14
- var pino__default = /*#__PURE__*/_interopDefault(pino);
15
- var pretty__default = /*#__PURE__*/_interopDefault(pretty);
16
- var jsonSchemaToZod__default = /*#__PURE__*/_interopDefault(jsonSchemaToZod);
17
6
 
18
- var __create = Object.create;
19
- var __defProp = Object.defineProperty;
20
- var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
21
- var __knownSymbol = (name, symbol) => (symbol = Symbol[name]) ? symbol : Symbol.for("Symbol." + name);
22
- var __typeError = msg => {
23
- throw TypeError(msg);
24
- };
25
- var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, {
7
+ Object.defineProperty(exports, "Agent", {
26
8
  enumerable: true,
27
- configurable: true,
28
- writable: true,
29
- value
30
- }) : obj[key] = value;
31
- var __name = (target, value) => __defProp(target, "name", {
32
- value,
33
- configurable: true
9
+ get: function () { return chunk3ASEZT7U_cjs.Agent; }
34
10
  });
35
- var __decoratorStart = base => [,,, __create(base?.[__knownSymbol("metadata")] ?? null)];
36
- var __decoratorStrings = ["class", "method", "getter", "setter", "accessor", "field", "value", "get", "set"];
37
- var __expectFn = fn => fn !== void 0 && typeof fn !== "function" ? __typeError("Function expected") : fn;
38
- var __decoratorContext = (kind, name, done, metadata, fns) => ({
39
- kind: __decoratorStrings[kind],
40
- name,
41
- metadata,
42
- addInitializer: fn => done._ ? __typeError("Already initialized") : fns.push(__expectFn(fn || null))
43
- });
44
- var __decoratorMetadata = (array, target) => __defNormalProp(target, __knownSymbol("metadata"), array[3]);
45
- var __runInitializers = (array, flags, self, value) => {
46
- for (var i = 0, fns = array[flags >> 1], n = fns && fns.length; i < n; i++) fns[i].call(self) ;
47
- return value;
48
- };
49
- var __decorateElement = (array, flags, name, decorators, target, extra) => {
50
- var it,
51
- done,
52
- ctx,
53
- k = flags & 7,
54
- p = false;
55
- var j = 0;
56
- var extraInitializers = array[j] || (array[j] = []);
57
- var desc = k && ((target = target.prototype), k < 5 && (k > 3 || !p) && __getOwnPropDesc(target , name));
58
- __name(target, name);
59
- for (var i = decorators.length - 1; i >= 0; i--) {
60
- ctx = __decoratorContext(k, name, done = {}, array[3], extraInitializers);
61
- it = (0, decorators[i])(target, ctx), done._ = 1;
62
- __expectFn(it) && (target = it);
63
- }
64
- return __decoratorMetadata(array, target), desc && __defProp(target, name, desc), p ? k ^ 4 ? extra : desc : target;
65
- };
66
- var RegisteredLogger = {
67
- AGENT: "AGENT",
68
- LLM: "LLM"};
69
- var LogLevel = {
70
- INFO: "info"};
71
- var Logger = class {
72
- logger;
73
- transports;
74
- constructor(options = {}) {
75
- this.transports = options.transports || {};
76
- const transportsAry = Object.entries(this.transports);
77
- this.logger = pino__default.default({
78
- name: options.name || "app",
79
- level: options.level || LogLevel.INFO,
80
- formatters: {
81
- level: label => {
82
- return {
83
- level: label
84
- };
85
- }
86
- }
87
- }, options.overrideDefaultTransports ? options?.transports?.default : transportsAry.length === 0 ? pretty__default.default({
88
- colorize: true,
89
- levelFirst: true,
90
- ignore: "pid,hostname",
91
- colorizeObjects: true,
92
- translateTime: "SYS:standard",
93
- singleLine: false
94
- }) : pino__default.default.multistream([...transportsAry.map(([_, transport]) => ({
95
- stream: transport,
96
- level: options.level || LogLevel.INFO
97
- })), {
98
- stream: pretty__default.default({
99
- colorize: true,
100
- levelFirst: true,
101
- ignore: "pid,hostname",
102
- colorizeObjects: true,
103
- translateTime: "SYS:standard",
104
- singleLine: false
105
- }),
106
- level: options.level || LogLevel.INFO
107
- }]));
108
- }
109
- debug(message, args = {}) {
110
- this.logger.debug(args, message);
111
- }
112
- info(message, args = {}) {
113
- this.logger.info(args, message);
114
- }
115
- warn(message, args = {}) {
116
- this.logger.warn(args, message);
117
- }
118
- error(message, args = {}) {
119
- this.logger.error(args, message);
120
- }
121
- // Stream creation for process output handling
122
- createStream() {
123
- return new stream.Transform({
124
- transform: (chunk, _encoding, callback) => {
125
- const line = chunk.toString().trim();
126
- if (line) {
127
- this.info(line);
128
- }
129
- callback(null, chunk);
130
- }
131
- });
132
- }
133
- async getLogs(transportId) {
134
- if (!transportId || !this.transports[transportId]) {
135
- return [];
136
- }
137
- return this.transports[transportId].getLogs();
138
- }
139
- async getLogsByRunId({
140
- runId,
141
- transportId
142
- }) {
143
- return this.transports[transportId]?.getLogsByRunId({
144
- runId
145
- });
146
- }
147
- };
148
- function createLogger(options) {
149
- return new Logger(options);
150
- }
151
-
152
- // src/base.ts
153
- var MastraBase = class {
154
- component = RegisteredLogger.LLM;
155
- logger;
156
- name;
157
- telemetry;
158
- constructor({
159
- component,
160
- name
161
- }) {
162
- this.component = component || RegisteredLogger.LLM;
163
- this.name = name;
164
- this.logger = createLogger({
165
- name: `${this.component} - ${this.name}`
166
- });
167
- }
168
- /**
169
- * Set the logger for the agent
170
- * @param logger
171
- */
172
- __setLogger(logger) {
173
- this.logger = logger;
174
- this.logger.debug(`Logger updated [component=${this.component}] [name=${this.name}]`);
175
- }
176
- /**
177
- * Set the telemetry for the
178
- * @param telemetry
179
- */
180
- __setTelemetry(telemetry) {
181
- this.telemetry = telemetry;
182
- this.logger.debug(`Telemetry updated [component=${this.component}] [tracer=${this.telemetry.tracer}]`);
183
- }
184
- /**
185
- * Get the telemetry on the vector
186
- * @returns telemetry
187
- */
188
- __getTelemetry() {
189
- return this.telemetry;
190
- }
191
- /*
192
- get experimental_telemetry config
193
- */
194
- get experimental_telemetry() {
195
- return this.telemetry ? {
196
- // tracer: this.telemetry.tracer,
197
- tracer: this.telemetry.getBaggageTracer(),
198
- isEnabled: !!this.telemetry.tracer
199
- } : void 0;
200
- }
201
- };
202
-
203
- // src/hooks/mitt.ts
204
- function mitt(all) {
205
- all = all || /* @__PURE__ */new Map();
206
- return {
207
- /**
208
- * A Map of event names to registered handler functions.
209
- */
210
- all,
211
- /**
212
- * Register an event handler for the given type.
213
- * @param {string|symbol} type Type of event to listen for, or `'*'` for all events
214
- * @param {Function} handler Function to call in response to given event
215
- * @memberOf mitt
216
- */
217
- on(type, handler) {
218
- const handlers = all.get(type);
219
- if (handlers) {
220
- handlers.push(handler);
221
- } else {
222
- all.set(type, [handler]);
223
- }
224
- },
225
- /**
226
- * Remove an event handler for the given type.
227
- * If `handler` is omitted, all handlers of the given type are removed.
228
- * @param {string|symbol} type Type of event to unregister `handler` from (`'*'` to remove a wildcard handler)
229
- * @param {Function} [handler] Handler function to remove
230
- * @memberOf mitt
231
- */
232
- off(type, handler) {
233
- const handlers = all.get(type);
234
- if (handlers) {
235
- if (handler) {
236
- handlers.splice(handlers.indexOf(handler) >>> 0, 1);
237
- } else {
238
- all.set(type, []);
239
- }
240
- }
241
- },
242
- /**
243
- * Invoke all handlers for the given type.
244
- * If present, `'*'` handlers are invoked after type-matched handlers.
245
- *
246
- * Note: Manually firing '*' handlers is not supported.
247
- *
248
- * @param {string|symbol} type The event type to invoke
249
- * @param {Any} [evt] Any value (object is recommended and powerful), passed to each handler
250
- * @memberOf mitt
251
- */
252
- emit(type, evt) {
253
- let handlers = all.get(type);
254
- if (handlers) {
255
- handlers.slice().map(handler => {
256
- handler(evt);
257
- });
258
- }
259
- handlers = all.get("*");
260
- if (handlers) {
261
- handlers.slice().map(handler => {
262
- handler(type, evt);
263
- });
264
- }
265
- }
266
- };
267
- }
268
-
269
- // src/hooks/index.ts
270
- var hooks = mitt();
271
- function executeHook(hook, data) {
272
- setImmediate(() => {
273
- hooks.emit(hook, data);
274
- });
275
- }
276
-
277
- // src/llm/model/base.ts
278
- var MastraLLMBase = class extends MastraBase {
279
- // @ts-ignore
280
- #mastra;
281
- #model;
282
- constructor({
283
- name,
284
- model
285
- }) {
286
- super({
287
- component: RegisteredLogger.LLM,
288
- name
289
- });
290
- this.#model = model;
291
- }
292
- getProvider() {
293
- return this.#model.provider;
294
- }
295
- getModelId() {
296
- return this.#model.modelId;
297
- }
298
- getModel() {
299
- return this.#model;
300
- }
301
- convertToMessages(messages) {
302
- if (Array.isArray(messages)) {
303
- return messages.map(m => {
304
- if (typeof m === "string") {
305
- return {
306
- role: "user",
307
- content: m
308
- };
309
- }
310
- return m;
311
- });
312
- }
313
- return [{
314
- role: "user",
315
- content: messages
316
- }];
317
- }
318
- __registerPrimitives(p) {
319
- if (p.telemetry) {
320
- this.__setTelemetry(p.telemetry);
321
- }
322
- if (p.logger) {
323
- this.__setLogger(p.logger);
324
- }
325
- this.#mastra = p;
326
- }
327
- async __text(input) {
328
- this.logger.debug(`[LLMs:${this.name}] Generating text.`, {
329
- input
330
- });
331
- throw new Error("Method not implemented.");
332
- }
333
- async __textObject(input) {
334
- this.logger.debug(`[LLMs:${this.name}] Generating object.`, {
335
- input
336
- });
337
- throw new Error("Method not implemented.");
338
- }
339
- async generate(messages, options = {}) {
340
- this.logger.debug(`[LLMs:${this.name}] Generating text.`, {
341
- messages,
342
- options
343
- });
344
- throw new Error("Method not implemented.");
345
- }
346
- async __stream(input) {
347
- this.logger.debug(`[LLMs:${this.name}] Streaming text.`, {
348
- input
349
- });
350
- throw new Error("Method not implemented.");
351
- }
352
- async __streamObject(input) {
353
- this.logger.debug(`[LLMs:${this.name}] Streaming object.`, {
354
- input
355
- });
356
- throw new Error("Method not implemented.");
357
- }
358
- async stream(messages, options = {}) {
359
- this.logger.debug(`[LLMs:${this.name}] Streaming text.`, {
360
- messages,
361
- options
362
- });
363
- throw new Error("Method not implemented.");
364
- }
365
- };
366
-
367
- // src/tools/tool.ts
368
- var Tool = class {
369
- id;
370
- description;
371
- inputSchema;
372
- outputSchema;
373
- execute;
374
- mastra;
375
- constructor(opts) {
376
- this.id = opts.id;
377
- this.description = opts.description;
378
- this.inputSchema = opts.inputSchema;
379
- this.outputSchema = opts.outputSchema;
380
- this.execute = opts.execute;
381
- this.mastra = opts.mastra;
382
- }
383
- };
384
-
385
- // src/utils.ts
386
- var delay = ms => new Promise(resolve => setTimeout(resolve, ms));
387
- function resolveSerializedZodOutput(schema) {
388
- return Function("z", `"use strict";return (${schema});`)(zod.z);
389
- }
390
- function isVercelTool(tool) {
391
- return !(tool instanceof Tool);
392
- }
393
- function createLogMessageOptions({
394
- agentName,
395
- toolName,
396
- tool,
397
- type
398
- }) {
399
- if (!agentName) {
400
- return {
401
- start: `Executing tool ${toolName}`,
402
- error: `Failed tool execution`
403
- };
404
- }
405
- const prefix = `[Agent:${agentName}]`;
406
- const vercelPrefix = isVercelTool(tool) ? "Vercel " : "";
407
- const toolType = type === "toolset" ? "toolset" : "tool";
408
- return {
409
- start: `${prefix} - Executing ${vercelPrefix}${toolType} ${toolName}`,
410
- error: `${prefix} - Failed ${vercelPrefix}${toolType} execution`
411
- };
412
- }
413
- function createExecute(tool, options, logType) {
414
- const {
415
- logger,
416
- ...rest
417
- } = options;
418
- const {
419
- start,
420
- error
421
- } = createLogMessageOptions({
422
- agentName: options.agentName,
423
- toolName: options.name,
424
- tool,
425
- type: logType
426
- });
427
- const execFunction = async (args, execOptions) => {
428
- if (isVercelTool(tool)) {
429
- return tool?.execute?.(args, execOptions) ?? void 0;
430
- }
431
- return tool?.execute?.({
432
- context: args,
433
- threadId: options.threadId,
434
- resourceId: options.resourceId,
435
- mastra: options.mastra,
436
- memory: options.memory,
437
- runId: options.runId
438
- }, execOptions) ?? void 0;
439
- };
440
- return async (args, execOptions) => {
441
- try {
442
- logger.debug(start, {
443
- ...rest,
444
- args
445
- });
446
- return await execFunction(args, execOptions);
447
- } catch (err) {
448
- logger.error(error, {
449
- ...rest,
450
- error: err,
451
- args
452
- });
453
- throw err;
454
- }
455
- };
456
- }
457
- function isZodType(value) {
458
- return typeof value === "object" && value !== null && "_def" in value && "parse" in value && typeof value.parse === "function" && "safeParse" in value && typeof value.safeParse === "function";
459
- }
460
- function createDeterministicId(input) {
461
- return crypto$1.createHash("sha256").update(input).digest("hex").slice(0, 8);
462
- }
463
- function setVercelToolProperties(tool) {
464
- const inputSchema = convertVercelToolParameters(tool);
465
- const toolId = !("id" in tool) ? tool.description ? `tool-${createDeterministicId(tool.description)}` : `tool-${Math.random().toString(36).substring(2, 9)}` : tool.id;
466
- return {
467
- ...tool,
468
- id: toolId,
469
- inputSchema
470
- };
471
- }
472
- function ensureToolProperties(tools) {
473
- const toolsWithProperties = Object.keys(tools).reduce((acc, key) => {
474
- const tool = tools?.[key];
475
- if (tool) {
476
- if (isVercelTool(tool)) {
477
- acc[key] = setVercelToolProperties(tool);
478
- } else {
479
- acc[key] = tool;
480
- }
481
- }
482
- return acc;
483
- }, {});
484
- return toolsWithProperties;
485
- }
486
- function convertVercelToolParameters(tool) {
487
- const schema = tool.parameters ?? zod.z.object({});
488
- return isZodType(schema) ? schema : resolveSerializedZodOutput(jsonSchemaToZod__default.default(schema));
489
- }
490
- function makeCoreTool(tool, options, logType) {
491
- const getParameters = () => {
492
- if (isVercelTool(tool)) {
493
- return convertVercelToolParameters(tool);
494
- }
495
- return tool.inputSchema ?? zod.z.object({});
496
- };
497
- return {
498
- description: tool.description,
499
- parameters: getParameters(),
500
- execute: tool.execute ? createExecute(tool, {
501
- ...options,
502
- description: tool.description
503
- }, logType) : void 0
504
- };
505
- }
506
- function createMastraProxy({
507
- mastra,
508
- logger
509
- }) {
510
- return new Proxy(mastra, {
511
- get(target, prop) {
512
- const hasProp = Reflect.has(target, prop);
513
- if (hasProp) {
514
- const value = Reflect.get(target, prop);
515
- const isFunction = typeof value === "function";
516
- if (isFunction) {
517
- return value.bind(target);
518
- }
519
- return value;
520
- }
521
- if (prop === "logger") {
522
- logger.warn(`Please use 'getLogger' instead, logger is deprecated`);
523
- return Reflect.apply(target.getLogger, target, []);
524
- }
525
- if (prop === "telemetry") {
526
- logger.warn(`Please use 'getTelemetry' instead, telemetry is deprecated`);
527
- return Reflect.apply(target.getTelemetry, target, []);
528
- }
529
- if (prop === "storage") {
530
- logger.warn(`Please use 'getStorage' instead, storage is deprecated`);
531
- return Reflect.get(target, "storage");
532
- }
533
- if (prop === "agents") {
534
- logger.warn(`Please use 'getAgents' instead, agents is deprecated`);
535
- return Reflect.apply(target.getAgents, target, []);
536
- }
537
- if (prop === "tts") {
538
- logger.warn(`Please use 'getTTS' instead, tts is deprecated`);
539
- return Reflect.apply(target.getTTS, target, []);
540
- }
541
- if (prop === "vectors") {
542
- logger.warn(`Please use 'getVectors' instead, vectors is deprecated`);
543
- return Reflect.apply(target.getVectors, target, []);
544
- }
545
- if (prop === "memory") {
546
- logger.warn(`Please use 'getMemory' instead, memory is deprecated`);
547
- return Reflect.get(target, "memory");
548
- }
549
- return Reflect.get(target, prop);
550
- }
551
- });
552
- }
553
-
554
- // src/llm/model/model.ts
555
- var MastraLLM = class extends MastraLLMBase {
556
- #model;
557
- #mastra;
558
- constructor({
559
- model,
560
- mastra
561
- }) {
562
- super({
563
- name: "aisdk",
564
- model
565
- });
566
- this.#model = model;
567
- if (mastra) {
568
- this.#mastra = mastra;
569
- if (mastra.logger) {
570
- this.__setLogger(mastra.logger);
571
- }
572
- }
573
- }
574
- __registerPrimitives(p) {
575
- if (p.telemetry) {
576
- this.__setTelemetry(p.telemetry);
577
- }
578
- if (p.logger) {
579
- this.__setLogger(p.logger);
580
- }
581
- this.#mastra = p;
582
- }
583
- getProvider() {
584
- return this.#model.provider;
585
- }
586
- getModelId() {
587
- return this.#model.modelId;
588
- }
589
- getModel() {
590
- return this.#model;
591
- }
592
- convertTools({
593
- tools,
594
- runId,
595
- threadId,
596
- resourceId,
597
- memory
598
- } = {}) {
599
- this.logger.debug("Starting tool conversion for LLM");
600
- const converted = Object.entries(tools || {}).reduce((memo, value) => {
601
- const k = value[0];
602
- const tool = value[1];
603
- if (tool) {
604
- const options = {
605
- name: k,
606
- runId,
607
- threadId,
608
- resourceId,
609
- logger: this.logger,
610
- memory,
611
- mastra: this.#mastra
612
- };
613
- memo[k] = makeCoreTool(tool, options);
614
- }
615
- return memo;
616
- }, {});
617
- this.logger.debug(`Converted tools for LLM`);
618
- return converted;
619
- }
620
- async __text({
621
- runId,
622
- messages,
623
- maxSteps,
624
- tools,
625
- convertedTools,
626
- temperature,
627
- toolChoice = "auto",
628
- onStepFinish,
629
- experimental_output,
630
- telemetry,
631
- threadId,
632
- resourceId,
633
- memory,
634
- ...rest
635
- }) {
636
- const model = this.#model;
637
- this.logger.debug(`[LLM] - Generating text`, {
638
- runId,
639
- messages,
640
- maxSteps,
641
- threadId,
642
- resourceId,
643
- tools: Object.keys(tools || convertedTools || {})
644
- });
645
- const finalTools = convertedTools || this.convertTools({
646
- tools,
647
- runId,
648
- threadId,
649
- resourceId,
650
- memory
651
- });
652
- const argsForExecute = {
653
- model,
654
- temperature,
655
- tools: {
656
- ...finalTools
657
- },
658
- toolChoice,
659
- maxSteps,
660
- onStepFinish: async props => {
661
- onStepFinish?.(JSON.stringify(props, null, 2));
662
- this.logger.debug("[LLM] - Step Change:", {
663
- text: props?.text,
664
- toolCalls: props?.toolCalls,
665
- toolResults: props?.toolResults,
666
- finishReason: props?.finishReason,
667
- usage: props?.usage,
668
- runId
669
- });
670
- if (props?.response?.headers?.["x-ratelimit-remaining-tokens"] && parseInt(props?.response?.headers?.["x-ratelimit-remaining-tokens"], 10) < 2e3) {
671
- this.logger.warn("Rate limit approaching, waiting 10 seconds", {
672
- runId
673
- });
674
- await delay(10 * 1e3);
675
- }
676
- },
677
- ...rest
678
- };
679
- let schema;
680
- if (experimental_output) {
681
- this.logger.debug("[LLM] - Using experimental output", {
682
- runId
683
- });
684
- if (typeof experimental_output.parse === "function") {
685
- schema = experimental_output;
686
- if (schema instanceof zod.z.ZodArray) {
687
- schema = schema._def.type;
688
- }
689
- } else {
690
- schema = ai.jsonSchema(experimental_output);
691
- }
692
- }
693
- return await ai.generateText({
694
- messages,
695
- ...argsForExecute,
696
- experimental_telemetry: {
697
- ...this.experimental_telemetry,
698
- ...telemetry
699
- },
700
- experimental_output: schema ? ai.Output.object({
701
- schema
702
- }) : void 0
703
- });
704
- }
705
- async __textObject({
706
- messages,
707
- onStepFinish,
708
- maxSteps = 5,
709
- tools,
710
- convertedTools,
711
- structuredOutput,
712
- runId,
713
- temperature,
714
- toolChoice = "auto",
715
- telemetry,
716
- threadId,
717
- resourceId,
718
- memory,
719
- ...rest
720
- }) {
721
- const model = this.#model;
722
- this.logger.debug(`[LLM] - Generating a text object`, {
723
- runId
724
- });
725
- const finalTools = convertedTools || this.convertTools({
726
- tools,
727
- runId,
728
- threadId,
729
- resourceId,
730
- memory
731
- });
732
- const argsForExecute = {
733
- model,
734
- temperature,
735
- tools: {
736
- ...finalTools
737
- },
738
- maxSteps,
739
- toolChoice,
740
- onStepFinish: async props => {
741
- onStepFinish?.(JSON.stringify(props, null, 2));
742
- this.logger.debug("[LLM] - Step Change:", {
743
- text: props?.text,
744
- toolCalls: props?.toolCalls,
745
- toolResults: props?.toolResults,
746
- finishReason: props?.finishReason,
747
- usage: props?.usage,
748
- runId
749
- });
750
- if (props?.response?.headers?.["x-ratelimit-remaining-tokens"] && parseInt(props?.response?.headers?.["x-ratelimit-remaining-tokens"], 10) < 2e3) {
751
- this.logger.warn("Rate limit approaching, waiting 10 seconds", {
752
- runId
753
- });
754
- await delay(10 * 1e3);
755
- }
756
- },
757
- ...rest
758
- };
759
- let schema;
760
- let output = "object";
761
- if (typeof structuredOutput.parse === "function") {
762
- schema = structuredOutput;
763
- if (schema instanceof zod.z.ZodArray) {
764
- output = "array";
765
- schema = schema._def.type;
766
- }
767
- } else {
768
- schema = ai.jsonSchema(structuredOutput);
769
- }
770
- return await ai.generateObject({
771
- messages,
772
- ...argsForExecute,
773
- output,
774
- schema,
775
- experimental_telemetry: {
776
- ...this.experimental_telemetry,
777
- ...telemetry
778
- }
779
- });
780
- }
781
- async __stream({
782
- messages,
783
- onStepFinish,
784
- onFinish,
785
- maxSteps = 5,
786
- tools,
787
- convertedTools,
788
- runId,
789
- temperature,
790
- toolChoice = "auto",
791
- experimental_output,
792
- telemetry,
793
- threadId,
794
- resourceId,
795
- memory,
796
- ...rest
797
- }) {
798
- const model = this.#model;
799
- this.logger.debug(`[LLM] - Streaming text`, {
800
- runId,
801
- threadId,
802
- resourceId,
803
- messages,
804
- maxSteps,
805
- tools: Object.keys(tools || convertedTools || {})
806
- });
807
- const finalTools = convertedTools || this.convertTools({
808
- tools,
809
- runId,
810
- threadId,
811
- resourceId,
812
- memory
813
- });
814
- const argsForExecute = {
815
- model,
816
- temperature,
817
- tools: {
818
- ...finalTools
819
- },
820
- maxSteps,
821
- toolChoice,
822
- onStepFinish: async props => {
823
- onStepFinish?.(JSON.stringify(props, null, 2));
824
- this.logger.debug("[LLM] - Stream Step Change:", {
825
- text: props?.text,
826
- toolCalls: props?.toolCalls,
827
- toolResults: props?.toolResults,
828
- finishReason: props?.finishReason,
829
- usage: props?.usage,
830
- runId
831
- });
832
- if (props?.response?.headers?.["x-ratelimit-remaining-tokens"] && parseInt(props?.response?.headers?.["x-ratelimit-remaining-tokens"], 10) < 2e3) {
833
- this.logger.warn("Rate limit approaching, waiting 10 seconds", {
834
- runId
835
- });
836
- await delay(10 * 1e3);
837
- }
838
- },
839
- onFinish: async props => {
840
- void onFinish?.(JSON.stringify(props, null, 2));
841
- this.logger.debug("[LLM] - Stream Finished:", {
842
- text: props?.text,
843
- toolCalls: props?.toolCalls,
844
- toolResults: props?.toolResults,
845
- finishReason: props?.finishReason,
846
- usage: props?.usage,
847
- runId,
848
- threadId,
849
- resourceId
850
- });
851
- },
852
- ...rest
853
- };
854
- let schema;
855
- if (experimental_output) {
856
- this.logger.debug("[LLM] - Using experimental output", {
857
- runId
858
- });
859
- if (typeof experimental_output.parse === "function") {
860
- schema = experimental_output;
861
- if (schema instanceof zod.z.ZodArray) {
862
- schema = schema._def.type;
863
- }
864
- } else {
865
- schema = ai.jsonSchema(experimental_output);
866
- }
867
- }
868
- return await ai.streamText({
869
- messages,
870
- ...argsForExecute,
871
- experimental_telemetry: {
872
- ...this.experimental_telemetry,
873
- ...telemetry
874
- },
875
- experimental_output: schema ? ai.Output.object({
876
- schema
877
- }) : void 0
878
- });
879
- }
880
- async __streamObject({
881
- messages,
882
- onStepFinish,
883
- onFinish,
884
- maxSteps = 5,
885
- tools,
886
- convertedTools,
887
- structuredOutput,
888
- runId,
889
- temperature,
890
- toolChoice = "auto",
891
- telemetry,
892
- threadId,
893
- resourceId,
894
- memory,
895
- ...rest
896
- }) {
897
- const model = this.#model;
898
- this.logger.debug(`[LLM] - Streaming structured output`, {
899
- runId,
900
- messages,
901
- maxSteps,
902
- tools: Object.keys(tools || convertedTools || {})
903
- });
904
- const finalTools = convertedTools || this.convertTools({
905
- tools,
906
- runId,
907
- threadId,
908
- resourceId,
909
- memory
910
- });
911
- const argsForExecute = {
912
- model,
913
- temperature,
914
- tools: {
915
- ...finalTools
916
- },
917
- maxSteps,
918
- toolChoice,
919
- onStepFinish: async props => {
920
- onStepFinish?.(JSON.stringify(props, null, 2));
921
- this.logger.debug("[LLM] - Stream Step Change:", {
922
- text: props?.text,
923
- toolCalls: props?.toolCalls,
924
- toolResults: props?.toolResults,
925
- finishReason: props?.finishReason,
926
- usage: props?.usage,
927
- runId,
928
- threadId,
929
- resourceId
930
- });
931
- if (props?.response?.headers?.["x-ratelimit-remaining-tokens"] && parseInt(props?.response?.headers?.["x-ratelimit-remaining-tokens"], 10) < 2e3) {
932
- this.logger.warn("Rate limit approaching, waiting 10 seconds", {
933
- runId
934
- });
935
- await delay(10 * 1e3);
936
- }
937
- },
938
- onFinish: async props => {
939
- void onFinish?.(JSON.stringify(props, null, 2));
940
- this.logger.debug("[LLM] - Stream Finished:", {
941
- text: props?.text,
942
- toolCalls: props?.toolCalls,
943
- toolResults: props?.toolResults,
944
- finishReason: props?.finishReason,
945
- usage: props?.usage,
946
- runId,
947
- threadId,
948
- resourceId
949
- });
950
- },
951
- ...rest
952
- };
953
- let schema;
954
- let output = "object";
955
- if (typeof structuredOutput.parse === "function") {
956
- schema = structuredOutput;
957
- if (schema instanceof zod.z.ZodArray) {
958
- output = "array";
959
- schema = schema._def.type;
960
- }
961
- } else {
962
- schema = ai.jsonSchema(structuredOutput);
963
- }
964
- return ai.streamObject({
965
- messages,
966
- ...argsForExecute,
967
- output,
968
- schema,
969
- experimental_telemetry: {
970
- ...this.experimental_telemetry,
971
- ...telemetry
972
- }
973
- });
974
- }
975
- async generate(messages, {
976
- maxSteps = 5,
977
- onStepFinish,
978
- tools,
979
- convertedTools,
980
- runId,
981
- output,
982
- temperature,
983
- telemetry,
984
- memory,
985
- ...rest
986
- } = {}) {
987
- const msgs = this.convertToMessages(messages);
988
- if (!output) {
989
- return await this.__text({
990
- messages: msgs,
991
- onStepFinish,
992
- maxSteps,
993
- tools,
994
- convertedTools,
995
- runId,
996
- temperature,
997
- memory,
998
- ...rest
999
- });
1000
- }
1001
- return await this.__textObject({
1002
- messages: msgs,
1003
- structuredOutput: output,
1004
- onStepFinish,
1005
- maxSteps,
1006
- tools,
1007
- convertedTools,
1008
- runId,
1009
- telemetry,
1010
- memory,
1011
- ...rest
1012
- });
1013
- }
1014
- async stream(messages, {
1015
- maxSteps = 5,
1016
- onFinish,
1017
- onStepFinish,
1018
- tools,
1019
- convertedTools,
1020
- runId,
1021
- output,
1022
- temperature,
1023
- telemetry,
1024
- ...rest
1025
- } = {}) {
1026
- const msgs = this.convertToMessages(messages);
1027
- if (!output) {
1028
- return await this.__stream({
1029
- messages: msgs,
1030
- onStepFinish,
1031
- onFinish,
1032
- maxSteps,
1033
- tools,
1034
- convertedTools,
1035
- runId,
1036
- temperature,
1037
- telemetry,
1038
- ...rest
1039
- });
1040
- }
1041
- return await this.__streamObject({
1042
- messages: msgs,
1043
- structuredOutput: output,
1044
- onStepFinish,
1045
- onFinish,
1046
- maxSteps,
1047
- tools,
1048
- convertedTools,
1049
- runId,
1050
- temperature,
1051
- telemetry,
1052
- ...rest
1053
- });
1054
- }
1055
- };
1056
- function hasActiveTelemetry(tracerName = "default-tracer") {
1057
- try {
1058
- return !!api.trace.getTracer(tracerName);
1059
- } catch {
1060
- return false;
1061
- }
1062
- }
1063
-
1064
- // src/telemetry/telemetry.decorators.ts
1065
- function withSpan(options) {
1066
- return function (_target, propertyKey, descriptor) {
1067
- if (!descriptor || typeof descriptor === "number") return;
1068
- const originalMethod = descriptor.value;
1069
- const methodName = String(propertyKey);
1070
- descriptor.value = function (...args) {
1071
- if (options?.skipIfNoTelemetry && !hasActiveTelemetry(options?.tracerName)) {
1072
- return originalMethod.apply(this, args);
1073
- }
1074
- const tracer = api.trace.getTracer(options?.tracerName ?? "default-tracer");
1075
- let spanName;
1076
- let spanKind;
1077
- if (typeof options === "string") {
1078
- spanName = options;
1079
- } else if (options) {
1080
- spanName = options.spanName || methodName;
1081
- spanKind = options.spanKind;
1082
- } else {
1083
- spanName = methodName;
1084
- }
1085
- const span = tracer.startSpan(spanName, {
1086
- kind: spanKind
1087
- });
1088
- let ctx = api.trace.setSpan(api.context.active(), span);
1089
- args.forEach((arg, index) => {
1090
- try {
1091
- span.setAttribute(`${spanName}.argument.${index}`, JSON.stringify(arg));
1092
- } catch {
1093
- span.setAttribute(`${spanName}.argument.${index}`, "[Not Serializable]");
1094
- }
1095
- });
1096
- const currentBaggage = api.propagation.getBaggage(ctx);
1097
- if (currentBaggage?.componentName) {
1098
- span.setAttribute("componentName", currentBaggage?.componentName);
1099
- span.setAttribute("runId", currentBaggage?.runId);
1100
- } else if (this && this.name) {
1101
- span.setAttribute("componentName", this.name);
1102
- span.setAttribute("runId", this.runId);
1103
- ctx = api.propagation.setBaggage(ctx, {
1104
- componentName: this.name,
1105
- runId: this.runId
1106
- });
1107
- }
1108
- let result;
1109
- try {
1110
- result = api.context.with(ctx, () => originalMethod.apply(this, args));
1111
- if (result instanceof Promise) {
1112
- return result.then(resolvedValue => {
1113
- try {
1114
- span.setAttribute(`${spanName}.result`, JSON.stringify(resolvedValue));
1115
- } catch {
1116
- span.setAttribute(`${spanName}.result`, "[Not Serializable]");
1117
- }
1118
- return resolvedValue;
1119
- }).finally(() => span.end());
1120
- }
1121
- try {
1122
- span.setAttribute(`${spanName}.result`, JSON.stringify(result));
1123
- } catch {
1124
- span.setAttribute(`${spanName}.result`, "[Not Serializable]");
1125
- }
1126
- return result;
1127
- } catch (error) {
1128
- span.setStatus({
1129
- code: api.SpanStatusCode.ERROR,
1130
- message: error instanceof Error ? error.message : "Unknown error"
1131
- });
1132
- if (error instanceof Error) {
1133
- span.recordException(error);
1134
- }
1135
- throw error;
1136
- } finally {
1137
- if (!(result instanceof Promise)) {
1138
- span.end();
1139
- }
1140
- }
1141
- };
1142
- return descriptor;
1143
- };
1144
- }
1145
- function InstrumentClass(options) {
1146
- return function (target) {
1147
- const methods = Object.getOwnPropertyNames(target.prototype);
1148
- methods.forEach(method => {
1149
- if (options?.excludeMethods?.includes(method) || method === "constructor") return;
1150
- if (options?.methodFilter && !options.methodFilter(method)) return;
1151
- const descriptor = Object.getOwnPropertyDescriptor(target.prototype, method);
1152
- if (descriptor && typeof descriptor.value === "function") {
1153
- Object.defineProperty(target.prototype, method, withSpan({
1154
- spanName: options?.prefix ? `${options.prefix}.${method}` : method,
1155
- skipIfNoTelemetry: true,
1156
- spanKind: options?.spanKind || api.SpanKind.INTERNAL,
1157
- tracerName: options?.tracerName
1158
- })(target, method, descriptor));
1159
- }
1160
- });
1161
- return target;
1162
- };
1163
- }
1164
-
1165
- // src/agent/index.ts
1166
- var _Agent_decorators, _init, _a;
1167
- _Agent_decorators = [InstrumentClass({
1168
- prefix: "agent",
1169
- excludeMethods: ["__setTools", "__setLogger", "__setTelemetry", "log"]
1170
- })];
1171
- exports.Agent = class Agent extends (_a = MastraBase) {
1172
- name;
1173
- llm;
1174
- instructions;
1175
- model;
1176
- #mastra;
1177
- #memory;
1178
- tools;
1179
- /** @deprecated This property is deprecated. Use evals instead. */
1180
- metrics;
1181
- evals;
1182
- voice;
1183
- constructor(config) {
1184
- super({
1185
- component: RegisteredLogger.AGENT
1186
- });
1187
- this.name = config.name;
1188
- this.instructions = config.instructions;
1189
- if (!config.model) {
1190
- throw new Error(`LanguageModel is required to create an Agent. Please provide the 'model'.`);
1191
- }
1192
- this.llm = new MastraLLM({
1193
- model: config.model
1194
- });
1195
- this.tools = {};
1196
- this.metrics = {};
1197
- this.evals = {};
1198
- if (config.tools) {
1199
- this.tools = ensureToolProperties(config.tools);
1200
- }
1201
- if (config.mastra) {
1202
- this.__registerPrimitives({
1203
- telemetry: config.mastra.getTelemetry(),
1204
- logger: config.mastra.getLogger()
1205
- });
1206
- }
1207
- if (config.metrics) {
1208
- this.logger.warn("The metrics property is deprecated. Please use evals instead to add evaluation metrics.");
1209
- this.metrics = config.metrics;
1210
- this.evals = config.metrics;
1211
- }
1212
- if (config.evals) {
1213
- this.evals = config.evals;
1214
- }
1215
- if (config.memory) {
1216
- this.#memory = config.memory;
1217
- }
1218
- if (config.voice) {
1219
- this.voice = config.voice;
1220
- }
1221
- }
1222
- hasOwnMemory() {
1223
- return Boolean(this.#memory);
1224
- }
1225
- getMemory() {
1226
- return this.#memory ?? this.#mastra?.memory;
1227
- }
1228
- __updateInstructions(newInstructions) {
1229
- this.instructions = newInstructions;
1230
- this.logger.debug(`[Agents:${this.name}] Instructions updated.`, {
1231
- model: this.model,
1232
- name: this.name
1233
- });
1234
- }
1235
- __registerPrimitives(p) {
1236
- if (p.telemetry) {
1237
- this.__setTelemetry(p.telemetry);
1238
- }
1239
- if (p.logger) {
1240
- this.__setLogger(p.logger);
1241
- }
1242
- this.llm.__registerPrimitives(p);
1243
- this.logger.debug(`[Agents:${this.name}] initialized.`, {
1244
- model: this.model,
1245
- name: this.name
1246
- });
1247
- }
1248
- __registerMastra(mastra) {
1249
- this.#mastra = mastra;
1250
- }
1251
- /**
1252
- * Set the concrete tools for the agent
1253
- * @param tools
1254
- */
1255
- __setTools(tools) {
1256
- this.tools = tools;
1257
- this.logger.debug(`[Agents:${this.name}] Tools set for agent ${this.name}`, {
1258
- model: this.model,
1259
- name: this.name
1260
- });
1261
- }
1262
- async generateTitleFromUserMessage({
1263
- message
1264
- }) {
1265
- const {
1266
- text
1267
- } = await this.llm.__text({
1268
- messages: [{
1269
- role: "system",
1270
- content: `
1271
-
1272
- - you will generate a short title based on the first message a user begins a conversation with
1273
- - ensure it is not more than 80 characters long
1274
- - the title should be a summary of the user's message
1275
- - do not use quotes or colons
1276
- - the entire text you return will be used as the title`
1277
- }, {
1278
- role: "user",
1279
- content: JSON.stringify(message)
1280
- }]
1281
- });
1282
- const cleanedText = text.replace(/<think>[\s\S]*?<\/think>/g, "").trim();
1283
- return cleanedText;
1284
- }
1285
- getMostRecentUserMessage(messages) {
1286
- const userMessages = messages.filter(message => message.role === "user");
1287
- return userMessages.at(-1);
1288
- }
1289
- async genTitle(userMessage) {
1290
- let title = `New Thread ${(/* @__PURE__ */new Date()).toISOString()}`;
1291
- try {
1292
- if (userMessage) {
1293
- title = await this.generateTitleFromUserMessage({
1294
- message: userMessage
1295
- });
1296
- }
1297
- } catch (e) {
1298
- console.error("Error generating title:", e);
1299
- }
1300
- return title;
1301
- }
1302
- async saveMemory({
1303
- threadId,
1304
- memoryConfig,
1305
- resourceId,
1306
- userMessages,
1307
- runId
1308
- }) {
1309
- const userMessage = this.getMostRecentUserMessage(userMessages);
1310
- const memory = this.getMemory();
1311
- if (memory) {
1312
- const config = memory.getMergedThreadConfig(memoryConfig);
1313
- let thread;
1314
- if (!threadId) {
1315
- this.logger.debug(`No threadId, creating new thread for agent ${this.name}`, {
1316
- runId: runId || this.name
1317
- });
1318
- const title = config?.threads?.generateTitle ? await this.genTitle(userMessage) : void 0;
1319
- thread = await memory.createThread({
1320
- threadId,
1321
- resourceId,
1322
- memoryConfig,
1323
- title
1324
- });
1325
- } else {
1326
- thread = await memory.getThreadById({
1327
- threadId
1328
- });
1329
- if (!thread) {
1330
- this.logger.debug(`Thread with id ${threadId} not found, creating new thread for agent ${this.name}`, {
1331
- runId: runId || this.name
1332
- });
1333
- const title = config?.threads?.generateTitle ? await this.genTitle(userMessage) : void 0;
1334
- thread = await memory.createThread({
1335
- threadId,
1336
- resourceId,
1337
- title,
1338
- memoryConfig
1339
- });
1340
- }
1341
- }
1342
- const newMessages = userMessage ? [userMessage] : userMessages;
1343
- if (thread) {
1344
- const messages = newMessages.map(u => {
1345
- return {
1346
- id: this.getMemory()?.generateId(),
1347
- createdAt: /* @__PURE__ */new Date(),
1348
- threadId: thread.id,
1349
- ...u,
1350
- content: u.content,
1351
- role: u.role,
1352
- type: "text"
1353
- };
1354
- });
1355
- const memoryMessages = threadId && memory ? (await memory.rememberMessages({
1356
- threadId,
1357
- resourceId,
1358
- config: memoryConfig,
1359
- vectorMessageSearch: messages.slice(-1).map(m => {
1360
- if (typeof m === `string`) {
1361
- return m;
1362
- }
1363
- return m?.content || ``;
1364
- }).join(`
1365
- `)
1366
- })).messages : [];
1367
- if (memory) {
1368
- await memory.saveMessages({
1369
- messages,
1370
- memoryConfig
1371
- });
1372
- }
1373
- this.logger.debug("Saved messages to memory", {
1374
- threadId: thread.id,
1375
- runId
1376
- });
1377
- const memorySystemMessage = memory && threadId ? await memory.getSystemMessage({
1378
- threadId,
1379
- memoryConfig
1380
- }) : null;
1381
- return {
1382
- threadId: thread.id,
1383
- messages: [memorySystemMessage ? {
1384
- role: "system",
1385
- content: memorySystemMessage
1386
- } : null, ...this.sanitizeResponseMessages(memoryMessages), ...newMessages].filter(message => Boolean(message))
1387
- };
1388
- }
1389
- return {
1390
- threadId: thread?.id || threadId || "",
1391
- messages: userMessages
1392
- };
1393
- }
1394
- return {
1395
- threadId: threadId || "",
1396
- messages: userMessages
1397
- };
1398
- }
1399
- async saveResponse({
1400
- result,
1401
- threadId,
1402
- resourceId,
1403
- runId,
1404
- memoryConfig
1405
- }) {
1406
- const {
1407
- response
1408
- } = result;
1409
- try {
1410
- if (response.messages) {
1411
- const ms = Array.isArray(response.messages) ? response.messages : [response.messages];
1412
- const responseMessagesWithoutIncompleteToolCalls = this.sanitizeResponseMessages(ms);
1413
- const memory = this.getMemory();
1414
- if (memory) {
1415
- this.logger.debug(`[Agent:${this.name}] - Memory persistence: store=${this.getMemory()?.constructor.name} threadId=${threadId}`, {
1416
- runId,
1417
- resourceId,
1418
- threadId,
1419
- memoryStore: this.getMemory()?.constructor.name
1420
- });
1421
- await memory.saveMessages({
1422
- memoryConfig,
1423
- messages: responseMessagesWithoutIncompleteToolCalls.map((message, index) => {
1424
- const messageId = crypto$1.randomUUID();
1425
- let toolCallIds;
1426
- let toolCallArgs;
1427
- let toolNames;
1428
- let type = "text";
1429
- if (message.role === "tool") {
1430
- toolCallIds = message.content.map(content => content.toolCallId);
1431
- type = "tool-result";
1432
- }
1433
- if (message.role === "assistant") {
1434
- const assistantContent = message.content;
1435
- const assistantToolCalls = assistantContent.map(content => {
1436
- if (content.type === "tool-call") {
1437
- return {
1438
- toolCallId: content.toolCallId,
1439
- toolArgs: content.args,
1440
- toolName: content.toolName
1441
- };
1442
- }
1443
- return void 0;
1444
- })?.filter(Boolean);
1445
- toolCallIds = assistantToolCalls?.map(toolCall => toolCall.toolCallId);
1446
- toolCallArgs = assistantToolCalls?.map(toolCall => toolCall.toolArgs);
1447
- toolNames = assistantToolCalls?.map(toolCall => toolCall.toolName);
1448
- type = assistantContent?.[0]?.type;
1449
- }
1450
- return {
1451
- id: messageId,
1452
- threadId,
1453
- role: message.role,
1454
- content: message.content,
1455
- createdAt: new Date(Date.now() + index),
1456
- // use Date.now() + index to make sure every message is atleast one millisecond apart
1457
- toolCallIds: toolCallIds?.length ? toolCallIds : void 0,
1458
- toolCallArgs: toolCallArgs?.length ? toolCallArgs : void 0,
1459
- toolNames: toolNames?.length ? toolNames : void 0,
1460
- type
1461
- };
1462
- })
1463
- });
1464
- }
1465
- }
1466
- } catch (err) {
1467
- this.logger.error(`[Agent:${this.name}] - Failed to save assistant response`, {
1468
- error: err,
1469
- runId
1470
- });
1471
- }
1472
- }
1473
- sanitizeResponseMessages(messages) {
1474
- let toolResultIds = [];
1475
- let toolCallIds = [];
1476
- for (const message of messages) {
1477
- if (!Array.isArray(message.content)) continue;
1478
- if (message.role === "tool") {
1479
- for (const content of message.content) {
1480
- if (content.type === "tool-result") {
1481
- toolResultIds.push(content.toolCallId);
1482
- }
1483
- }
1484
- } else if (message.role === "assistant" || message.role === "user") {
1485
- for (const content of message.content) {
1486
- if (typeof content !== `string`) {
1487
- if (content.type === `tool-call`) {
1488
- toolCallIds.push(content.toolCallId);
1489
- }
1490
- }
1491
- }
1492
- }
1493
- }
1494
- const messagesBySanitizedContent = messages.map(message => {
1495
- if (message.role !== "assistant" && message.role !== `tool` && message.role !== `user`) return message;
1496
- if (!message.content || typeof message.content === "string" || typeof message.content === "number") {
1497
- return message;
1498
- }
1499
- const sanitizedContent = message.content.filter(content => {
1500
- if (content.type === `tool-call`) {
1501
- return toolResultIds.includes(content.toolCallId);
1502
- }
1503
- if (content.type === `text`) {
1504
- return content.text.trim() !== ``;
1505
- }
1506
- if (content.type === `tool-result`) {
1507
- return toolCallIds.includes(content.toolCallId);
1508
- }
1509
- return true;
1510
- });
1511
- return {
1512
- ...message,
1513
- content: sanitizedContent
1514
- };
1515
- });
1516
- return messagesBySanitizedContent.filter(message => {
1517
- if (typeof message.content === `string`) {
1518
- return message.content !== "";
1519
- }
1520
- if (Array.isArray(message.content)) {
1521
- return message.content.length && message.content.every(c => {
1522
- if (c.type === `text`) {
1523
- return c.text && c.text !== "";
1524
- }
1525
- return true;
1526
- });
1527
- }
1528
- return true;
1529
- });
1530
- }
1531
- convertTools({
1532
- toolsets,
1533
- threadId,
1534
- resourceId,
1535
- runId
1536
- }) {
1537
- this.logger.debug(`[Agents:${this.name}] - Assigning tools`, {
1538
- runId,
1539
- threadId,
1540
- resourceId
1541
- });
1542
- const memory = this.getMemory();
1543
- const memoryTools = memory?.getTools?.();
1544
- let mastraProxy = void 0;
1545
- const logger = this.logger;
1546
- if (this.#mastra) {
1547
- mastraProxy = createMastraProxy({
1548
- mastra: this.#mastra,
1549
- logger
1550
- });
1551
- }
1552
- const converted = Object.entries(this.tools || {}).reduce((memo, value) => {
1553
- const k = value[0];
1554
- const tool = this.tools[k];
1555
- if (tool) {
1556
- const options = {
1557
- name: k,
1558
- runId,
1559
- threadId,
1560
- resourceId,
1561
- logger: this.logger,
1562
- mastra: mastraProxy,
1563
- memory,
1564
- agentName: this.name
1565
- };
1566
- memo[k] = makeCoreTool(tool, options);
1567
- }
1568
- return memo;
1569
- }, {});
1570
- const convertedMemoryTools = memoryTools ? Object.entries(memoryTools).reduce((memo, [k, tool]) => {
1571
- memo[k] = {
1572
- description: tool.description,
1573
- parameters: tool.parameters,
1574
- execute: typeof tool?.execute === "function" ? async (args, options) => {
1575
- try {
1576
- this.logger.debug(`[Agent:${this.name}] - Executing memory tool ${k}`, {
1577
- name: k,
1578
- description: tool.description,
1579
- args,
1580
- runId,
1581
- threadId,
1582
- resourceId
1583
- });
1584
- return tool?.execute?.({
1585
- context: args,
1586
- mastra: mastraProxy,
1587
- memory,
1588
- runId,
1589
- threadId,
1590
- resourceId
1591
- }, options) ?? void 0;
1592
- } catch (err) {
1593
- this.logger.error(`[Agent:${this.name}] - Failed memory tool execution`, {
1594
- error: err,
1595
- runId,
1596
- threadId,
1597
- resourceId
1598
- });
1599
- throw err;
1600
- }
1601
- } : void 0
1602
- };
1603
- return memo;
1604
- }, {}) : {};
1605
- const toolsFromToolsetsConverted = {
1606
- ...converted,
1607
- ...convertedMemoryTools
1608
- };
1609
- const toolsFromToolsets = Object.values(toolsets || {});
1610
- if (toolsFromToolsets.length > 0) {
1611
- this.logger.debug(`[Agent:${this.name}] - Adding tools from toolsets ${Object.keys(toolsets || {}).join(", ")}`, {
1612
- runId
1613
- });
1614
- toolsFromToolsets.forEach(toolset => {
1615
- Object.entries(toolset).forEach(([toolName, tool]) => {
1616
- const toolObj = tool;
1617
- const options = {
1618
- name: toolName,
1619
- runId,
1620
- threadId,
1621
- resourceId,
1622
- logger: this.logger,
1623
- agentName: this.name
1624
- };
1625
- toolsFromToolsetsConverted[toolName] = makeCoreTool(toolObj, options, "toolset");
1626
- });
1627
- });
1628
- }
1629
- return toolsFromToolsetsConverted;
1630
- }
1631
- async preExecute({
1632
- resourceId,
1633
- runId,
1634
- threadId,
1635
- memoryConfig,
1636
- messages
1637
- }) {
1638
- let coreMessages = [];
1639
- let threadIdToUse = threadId;
1640
- this.logger.debug(`Saving user messages in memory for agent ${this.name}`, {
1641
- runId
1642
- });
1643
- const saveMessageResponse = await this.saveMemory({
1644
- threadId,
1645
- resourceId,
1646
- userMessages: messages,
1647
- memoryConfig
1648
- });
1649
- coreMessages = saveMessageResponse.messages;
1650
- threadIdToUse = saveMessageResponse.threadId;
1651
- return {
1652
- coreMessages,
1653
- threadIdToUse
1654
- };
1655
- }
1656
- __primitive({
1657
- messages,
1658
- context: context2,
1659
- threadId,
1660
- memoryConfig,
1661
- resourceId,
1662
- runId,
1663
- toolsets
1664
- }) {
1665
- return {
1666
- before: async () => {
1667
- if (process.env.NODE_ENV !== "test") {
1668
- this.logger.debug(`[Agents:${this.name}] - Starting generation`, {
1669
- runId
1670
- });
1671
- }
1672
- const systemMessage = {
1673
- role: "system",
1674
- content: `${this.instructions}.`
1675
- };
1676
- let coreMessages = messages;
1677
- let threadIdToUse = threadId;
1678
- const memory = this.getMemory();
1679
- if (threadId && memory && !resourceId) {
1680
- throw new Error(`A resourceId must be provided when passing a threadId and using Memory. Saw threadId ${threadId} but resourceId is ${resourceId}`);
1681
- }
1682
- if (memory && resourceId) {
1683
- this.logger.debug(`[Agent:${this.name}] - Memory persistence enabled: store=${this.getMemory()?.constructor.name}, resourceId=${resourceId}`, {
1684
- runId,
1685
- resourceId,
1686
- threadId: threadIdToUse,
1687
- memoryStore: this.getMemory()?.constructor.name
1688
- });
1689
- const preExecuteResult = await this.preExecute({
1690
- resourceId,
1691
- runId,
1692
- threadId: threadIdToUse,
1693
- memoryConfig,
1694
- messages
1695
- });
1696
- coreMessages = preExecuteResult.coreMessages;
1697
- threadIdToUse = preExecuteResult.threadIdToUse;
1698
- }
1699
- let convertedTools;
1700
- if (toolsets && Object.keys(toolsets || {}).length > 0 || this.getMemory() && resourceId) {
1701
- const reasons = [];
1702
- if (toolsets && Object.keys(toolsets || {}).length > 0) {
1703
- reasons.push(`toolsets present (${Object.keys(toolsets || {}).length} tools)`);
1704
- }
1705
- if (this.getMemory() && resourceId) {
1706
- reasons.push("memory and resourceId available");
1707
- }
1708
- this.logger.debug(`[Agent:${this.name}] - Enhancing tools: ${reasons.join(", ")}`, {
1709
- runId,
1710
- toolsets: toolsets ? Object.keys(toolsets) : void 0,
1711
- hasMemory: !!this.getMemory(),
1712
- hasResourceId: !!resourceId
1713
- });
1714
- convertedTools = this.convertTools({
1715
- toolsets,
1716
- threadId: threadIdToUse,
1717
- resourceId,
1718
- runId
1719
- });
1720
- }
1721
- const messageObjects = [systemMessage, ...(context2 || []), ...coreMessages];
1722
- return {
1723
- messageObjects,
1724
- convertedTools,
1725
- threadId: threadIdToUse
1726
- };
1727
- },
1728
- after: async ({
1729
- result,
1730
- threadId: threadId2,
1731
- memoryConfig: memoryConfig2,
1732
- outputText,
1733
- runId: runId2
1734
- }) => {
1735
- const resToLog = {
1736
- text: result?.text,
1737
- object: result?.object,
1738
- toolResults: result?.toolResults,
1739
- toolCalls: result?.toolCalls,
1740
- usage: result?.usage,
1741
- steps: result?.steps?.map(s => {
1742
- return {
1743
- stepType: s?.stepType,
1744
- text: result?.text,
1745
- object: result?.object,
1746
- toolResults: result?.toolResults,
1747
- toolCalls: result?.toolCalls,
1748
- usage: result?.usage
1749
- };
1750
- })
1751
- };
1752
- this.logger.debug(`[Agent:${this.name}] - Post processing LLM response`, {
1753
- runId: runId2,
1754
- result: resToLog,
1755
- threadId: threadId2
1756
- });
1757
- if (this.getMemory() && resourceId) {
1758
- try {
1759
- await this.saveResponse({
1760
- result,
1761
- threadId: threadId2,
1762
- resourceId,
1763
- memoryConfig: memoryConfig2,
1764
- runId: runId2
1765
- });
1766
- } catch (e) {
1767
- this.logger.error("Error saving response", {
1768
- error: e,
1769
- runId: runId2,
1770
- result: resToLog,
1771
- threadId: threadId2
1772
- });
1773
- }
1774
- }
1775
- if (Object.keys(this.evals || {}).length > 0) {
1776
- const input = messages.map(message => message.content).join("\n");
1777
- const runIdToUse = runId2 || crypto.randomUUID();
1778
- for (const metric of Object.values(this.evals || {})) {
1779
- executeHook("onGeneration" /* ON_GENERATION */, {
1780
- input,
1781
- output: outputText,
1782
- runId: runIdToUse,
1783
- metric,
1784
- agentName: this.name,
1785
- instructions: this.instructions
1786
- });
1787
- }
1788
- }
1789
- }
1790
- };
1791
- }
1792
- async generate(messages, {
1793
- context: context2,
1794
- threadId: threadIdInFn,
1795
- memoryOptions,
1796
- resourceId,
1797
- maxSteps = 5,
1798
- onStepFinish,
1799
- runId,
1800
- output,
1801
- toolsets,
1802
- temperature,
1803
- toolChoice = "auto",
1804
- experimental_output,
1805
- telemetry,
1806
- ...rest
1807
- } = {}) {
1808
- let messagesToUse = [];
1809
- if (typeof messages === `string`) {
1810
- messagesToUse = [{
1811
- role: "user",
1812
- content: messages
1813
- }];
1814
- } else {
1815
- messagesToUse = messages.map(message => {
1816
- if (typeof message === `string`) {
1817
- return {
1818
- role: "user",
1819
- content: message
1820
- };
1821
- }
1822
- return message;
1823
- });
1824
- }
1825
- const runIdToUse = runId || crypto$1.randomUUID();
1826
- const {
1827
- before,
1828
- after
1829
- } = this.__primitive({
1830
- messages: messagesToUse,
1831
- context: context2,
1832
- threadId: threadIdInFn,
1833
- memoryConfig: memoryOptions,
1834
- resourceId,
1835
- runId: runIdToUse,
1836
- toolsets
1837
- });
1838
- const {
1839
- threadId,
1840
- messageObjects,
1841
- convertedTools
1842
- } = await before();
1843
- if (!output && experimental_output) {
1844
- const result2 = await this.llm.__text({
1845
- messages: messageObjects,
1846
- tools: this.tools,
1847
- convertedTools,
1848
- onStepFinish,
1849
- maxSteps: maxSteps || 5,
1850
- runId: runIdToUse,
1851
- temperature,
1852
- toolChoice: toolChoice || "auto",
1853
- experimental_output,
1854
- threadId,
1855
- resourceId,
1856
- memory: this.getMemory(),
1857
- ...rest
1858
- });
1859
- const outputText2 = result2.text;
1860
- await after({
1861
- result: result2,
1862
- threadId,
1863
- memoryConfig: memoryOptions,
1864
- outputText: outputText2,
1865
- runId: runIdToUse
1866
- });
1867
- const newResult = result2;
1868
- newResult.object = result2.experimental_output;
1869
- return newResult;
1870
- }
1871
- if (!output) {
1872
- const result2 = await this.llm.__text({
1873
- messages: messageObjects,
1874
- tools: this.tools,
1875
- convertedTools,
1876
- onStepFinish,
1877
- maxSteps,
1878
- runId: runIdToUse,
1879
- temperature,
1880
- toolChoice,
1881
- telemetry,
1882
- threadId,
1883
- resourceId,
1884
- memory: this.getMemory(),
1885
- ...rest
1886
- });
1887
- const outputText2 = result2.text;
1888
- await after({
1889
- result: result2,
1890
- threadId,
1891
- memoryConfig: memoryOptions,
1892
- outputText: outputText2,
1893
- runId: runIdToUse
1894
- });
1895
- return result2;
1896
- }
1897
- const result = await this.llm.__textObject({
1898
- messages: messageObjects,
1899
- tools: this.tools,
1900
- structuredOutput: output,
1901
- convertedTools,
1902
- onStepFinish,
1903
- maxSteps,
1904
- runId: runIdToUse,
1905
- temperature,
1906
- toolChoice,
1907
- telemetry,
1908
- memory: this.getMemory(),
1909
- ...rest
1910
- });
1911
- const outputText = JSON.stringify(result.object);
1912
- await after({
1913
- result,
1914
- threadId,
1915
- memoryConfig: memoryOptions,
1916
- outputText,
1917
- runId: runIdToUse
1918
- });
1919
- return result;
1920
- }
1921
- async stream(messages, {
1922
- context: context2,
1923
- threadId: threadIdInFn,
1924
- memoryOptions,
1925
- resourceId,
1926
- maxSteps = 5,
1927
- onFinish,
1928
- onStepFinish,
1929
- runId,
1930
- toolsets,
1931
- output,
1932
- temperature,
1933
- toolChoice = "auto",
1934
- experimental_output,
1935
- telemetry,
1936
- ...rest
1937
- } = {}) {
1938
- const runIdToUse = runId || crypto$1.randomUUID();
1939
- let messagesToUse = [];
1940
- if (typeof messages === `string`) {
1941
- messagesToUse = [{
1942
- role: "user",
1943
- content: messages
1944
- }];
1945
- } else {
1946
- messagesToUse = messages.map(message => {
1947
- if (typeof message === `string`) {
1948
- return {
1949
- role: "user",
1950
- content: message
1951
- };
1952
- }
1953
- return message;
1954
- });
1955
- }
1956
- const {
1957
- before,
1958
- after
1959
- } = this.__primitive({
1960
- messages: messagesToUse,
1961
- context: context2,
1962
- threadId: threadIdInFn,
1963
- memoryConfig: memoryOptions,
1964
- resourceId,
1965
- runId: runIdToUse,
1966
- toolsets
1967
- });
1968
- const {
1969
- threadId,
1970
- messageObjects,
1971
- convertedTools
1972
- } = await before();
1973
- if (!output && experimental_output) {
1974
- this.logger.debug(`Starting agent ${this.name} llm stream call`, {
1975
- runId
1976
- });
1977
- const streamResult = await this.llm.__stream({
1978
- messages: messageObjects,
1979
- temperature,
1980
- tools: this.tools,
1981
- convertedTools,
1982
- onStepFinish,
1983
- onFinish: async result => {
1984
- try {
1985
- const res = JSON.parse(result) || {};
1986
- const outputText = res.text;
1987
- await after({
1988
- result: res,
1989
- threadId,
1990
- memoryConfig: memoryOptions,
1991
- outputText,
1992
- runId: runIdToUse
1993
- });
1994
- } catch (e) {
1995
- this.logger.error("Error saving memory on finish", {
1996
- error: e,
1997
- runId
1998
- });
1999
- }
2000
- onFinish?.(result);
2001
- },
2002
- maxSteps,
2003
- runId: runIdToUse,
2004
- toolChoice,
2005
- experimental_output,
2006
- memory: this.getMemory(),
2007
- ...rest
2008
- });
2009
- const newStreamResult = streamResult;
2010
- newStreamResult.partialObjectStream = streamResult.experimental_partialOutputStream;
2011
- return newStreamResult;
2012
- } else if (!output) {
2013
- this.logger.debug(`Starting agent ${this.name} llm stream call`, {
2014
- runId
2015
- });
2016
- return this.llm.__stream({
2017
- messages: messageObjects,
2018
- temperature,
2019
- tools: this.tools,
2020
- convertedTools,
2021
- onStepFinish,
2022
- onFinish: async result => {
2023
- try {
2024
- const res = JSON.parse(result) || {};
2025
- const outputText = res.text;
2026
- await after({
2027
- result: res,
2028
- threadId,
2029
- memoryConfig: memoryOptions,
2030
- outputText,
2031
- runId: runIdToUse
2032
- });
2033
- } catch (e) {
2034
- this.logger.error("Error saving memory on finish", {
2035
- error: e,
2036
- runId
2037
- });
2038
- }
2039
- onFinish?.(result);
2040
- },
2041
- maxSteps,
2042
- runId: runIdToUse,
2043
- toolChoice,
2044
- telemetry,
2045
- memory: this.getMemory(),
2046
- ...rest
2047
- });
2048
- }
2049
- this.logger.debug(`Starting agent ${this.name} llm streamObject call`, {
2050
- runId
2051
- });
2052
- return this.llm.__streamObject({
2053
- messages: messageObjects,
2054
- tools: this.tools,
2055
- temperature,
2056
- structuredOutput: output,
2057
- convertedTools,
2058
- onStepFinish,
2059
- onFinish: async result => {
2060
- try {
2061
- const res = JSON.parse(result) || {};
2062
- const outputText = JSON.stringify(res.object);
2063
- await after({
2064
- result: res,
2065
- threadId,
2066
- memoryConfig: memoryOptions,
2067
- outputText,
2068
- runId: runIdToUse
2069
- });
2070
- } catch (e) {
2071
- this.logger.error("Error saving memory on finish", {
2072
- error: e,
2073
- runId
2074
- });
2075
- }
2076
- onFinish?.(result);
2077
- },
2078
- runId: runIdToUse,
2079
- toolChoice,
2080
- telemetry,
2081
- memory: this.getMemory(),
2082
- ...rest
2083
- });
2084
- }
2085
- /**
2086
- * Convert text to speech using the configured voice provider
2087
- * @param input Text or text stream to convert to speech
2088
- * @param options Speech options including speaker and provider-specific options
2089
- * @returns Audio stream
2090
- */
2091
- async speak(input, options) {
2092
- if (!this.voice) {
2093
- throw new Error("No voice provider configured");
2094
- }
2095
- try {
2096
- return this.voice.speak(input, options);
2097
- } catch (e) {
2098
- this.logger.error("Error during agent speak", {
2099
- error: e
2100
- });
2101
- throw e;
2102
- }
2103
- }
2104
- /**
2105
- * Convert speech to text using the configured voice provider
2106
- * @param audioStream Audio stream to transcribe
2107
- * @param options Provider-specific transcription options
2108
- * @returns Text or text stream
2109
- */
2110
- async listen(audioStream, options) {
2111
- if (!this.voice) {
2112
- throw new Error("No voice provider configured");
2113
- }
2114
- try {
2115
- return this.voice.listen(audioStream, options);
2116
- } catch (e) {
2117
- this.logger.error("Error during agent listen", {
2118
- error: e
2119
- });
2120
- throw e;
2121
- }
2122
- }
2123
- /**
2124
- * Get a list of available speakers from the configured voice provider
2125
- * @throws {Error} If no voice provider is configured
2126
- * @returns {Promise<Array<{voiceId: string}>>} List of available speakers
2127
- */
2128
- async getSpeakers() {
2129
- if (!this.voice) {
2130
- throw new Error("No voice provider configured");
2131
- }
2132
- try {
2133
- return await this.voice.getSpeakers();
2134
- } catch (e) {
2135
- this.logger.error("Error during agent getSpeakers", {
2136
- error: e
2137
- });
2138
- throw e;
2139
- }
2140
- }
2141
- };
2142
- exports.Agent = /*@__PURE__*/(_ => {
2143
- _init = __decoratorStart(_a);
2144
- exports.Agent = __decorateElement(_init, 0, "Agent", _Agent_decorators, exports.Agent);
2145
- __runInitializers(_init, 1, exports.Agent);
2146
- return exports.Agent;
2147
- })();