@mastra/core 0.15.3-alpha.3 → 0.15.3-alpha.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (166) hide show
  1. package/dist/agent/agent.types.d.ts +9 -4
  2. package/dist/agent/agent.types.d.ts.map +1 -1
  3. package/dist/agent/index.cjs +11 -11
  4. package/dist/agent/index.d.ts +33 -5
  5. package/dist/agent/index.d.ts.map +1 -1
  6. package/dist/agent/index.js +2 -2
  7. package/dist/agent/input-processor/index.cjs +6 -6
  8. package/dist/agent/input-processor/index.js +1 -1
  9. package/dist/agent/types.d.ts +11 -6
  10. package/dist/agent/types.d.ts.map +1 -1
  11. package/dist/ai-tracing/context.d.ts.map +1 -1
  12. package/dist/ai-tracing/default.d.ts.map +1 -1
  13. package/dist/ai-tracing/index.cjs +43 -31
  14. package/dist/ai-tracing/index.js +1 -1
  15. package/dist/ai-tracing/no-op.d.ts +0 -1
  16. package/dist/ai-tracing/no-op.d.ts.map +1 -1
  17. package/dist/ai-tracing/types.d.ts +0 -2
  18. package/dist/ai-tracing/types.d.ts.map +1 -1
  19. package/dist/ai-tracing/utils.d.ts +44 -8
  20. package/dist/ai-tracing/utils.d.ts.map +1 -1
  21. package/dist/{chunk-TWNFR6MQ.cjs → chunk-5CJDO3UO.cjs} +8 -8
  22. package/dist/{chunk-TWNFR6MQ.cjs.map → chunk-5CJDO3UO.cjs.map} +1 -1
  23. package/dist/{chunk-ZAJTIZZF.cjs → chunk-ABRPHTOG.cjs} +2 -2
  24. package/dist/{chunk-ZAJTIZZF.cjs.map → chunk-ABRPHTOG.cjs.map} +1 -1
  25. package/dist/{chunk-GOG77M6R.js → chunk-BJGHUKKM.js} +2 -2
  26. package/dist/{chunk-GOG77M6R.js.map → chunk-BJGHUKKM.js.map} +1 -1
  27. package/dist/{chunk-5TFCIXWE.js → chunk-CKM2ESZF.js} +44 -21
  28. package/dist/chunk-CKM2ESZF.js.map +1 -0
  29. package/dist/{chunk-AYFN43FB.cjs → chunk-DZADAEAF.cjs} +68 -22
  30. package/dist/chunk-DZADAEAF.cjs.map +1 -0
  31. package/dist/{chunk-QHEB6ZLO.cjs → chunk-F2CAC2R2.cjs} +47 -24
  32. package/dist/chunk-F2CAC2R2.cjs.map +1 -0
  33. package/dist/{chunk-O7IQL4DX.js → chunk-F4SQXAXR.js} +7 -7
  34. package/dist/{chunk-O7IQL4DX.js.map → chunk-F4SQXAXR.js.map} +1 -1
  35. package/dist/{chunk-HXEHQLBD.cjs → chunk-F6XWBVVG.cjs} +4 -4
  36. package/dist/{chunk-HXEHQLBD.cjs.map → chunk-F6XWBVVG.cjs.map} +1 -1
  37. package/dist/{chunk-UGN2UU3K.cjs → chunk-FQRDHVZC.cjs} +4 -4
  38. package/dist/{chunk-UGN2UU3K.cjs.map → chunk-FQRDHVZC.cjs.map} +1 -1
  39. package/dist/{chunk-4DKPMUAC.cjs → chunk-I7OAONIW.cjs} +259 -177
  40. package/dist/chunk-I7OAONIW.cjs.map +1 -0
  41. package/dist/{chunk-EMAAAVRA.js → chunk-IODUKRQP.js} +3 -3
  42. package/dist/{chunk-EMAAAVRA.js.map → chunk-IODUKRQP.js.map} +1 -1
  43. package/dist/{chunk-DJKIK6ZB.cjs → chunk-KUNWELBC.cjs} +422 -250
  44. package/dist/chunk-KUNWELBC.cjs.map +1 -0
  45. package/dist/{chunk-WOTBMZCN.js → chunk-LOYT3WUA.js} +255 -176
  46. package/dist/chunk-LOYT3WUA.js.map +1 -0
  47. package/dist/{chunk-24TFSB6Z.cjs → chunk-LVGGMWSE.cjs} +8 -8
  48. package/dist/chunk-LVGGMWSE.cjs.map +1 -0
  49. package/dist/{chunk-TQRLZH64.js → chunk-OFPVAPUH.js} +4 -4
  50. package/dist/{chunk-TQRLZH64.js.map → chunk-OFPVAPUH.js.map} +1 -1
  51. package/dist/{chunk-BGOXFBFK.js → chunk-P2IJ74UW.js} +391 -219
  52. package/dist/chunk-P2IJ74UW.js.map +1 -0
  53. package/dist/{chunk-YTVX52NU.cjs → chunk-VVTB47UG.cjs} +6 -6
  54. package/dist/{chunk-YTVX52NU.cjs.map → chunk-VVTB47UG.cjs.map} +1 -1
  55. package/dist/{chunk-D2GH2HAK.cjs → chunk-W5CF7DLB.cjs} +9 -9
  56. package/dist/{chunk-D2GH2HAK.cjs.map → chunk-W5CF7DLB.cjs.map} +1 -1
  57. package/dist/{chunk-ZC64CG7J.js → chunk-WWQ3QRPF.js} +4 -4
  58. package/dist/chunk-WWQ3QRPF.js.map +1 -0
  59. package/dist/{chunk-WUQSFK7W.js → chunk-XPFWOBV4.js} +4 -4
  60. package/dist/{chunk-WUQSFK7W.js.map → chunk-XPFWOBV4.js.map} +1 -1
  61. package/dist/{chunk-MEROMP3Z.js → chunk-YAWYQH3N.js} +3 -3
  62. package/dist/{chunk-MEROMP3Z.js.map → chunk-YAWYQH3N.js.map} +1 -1
  63. package/dist/{chunk-APVV75XG.js → chunk-YVIYEC6R.js} +62 -16
  64. package/dist/chunk-YVIYEC6R.js.map +1 -0
  65. package/dist/index.cjs +44 -44
  66. package/dist/index.js +10 -10
  67. package/dist/integration/index.cjs +3 -3
  68. package/dist/integration/index.js +1 -1
  69. package/dist/llm/index.d.ts +2 -2
  70. package/dist/llm/index.d.ts.map +1 -1
  71. package/dist/llm/model/base.types.d.ts +2 -2
  72. package/dist/llm/model/base.types.d.ts.map +1 -1
  73. package/dist/llm/model/model.d.ts +4 -4
  74. package/dist/llm/model/model.d.ts.map +1 -1
  75. package/dist/llm/model/model.loop.d.ts +1 -1
  76. package/dist/llm/model/model.loop.d.ts.map +1 -1
  77. package/dist/llm/model/model.loop.types.d.ts +2 -0
  78. package/dist/llm/model/model.loop.types.d.ts.map +1 -1
  79. package/dist/loop/index.cjs +2 -2
  80. package/dist/loop/index.js +1 -1
  81. package/dist/loop/loop.d.ts +1 -1
  82. package/dist/loop/loop.d.ts.map +1 -1
  83. package/dist/loop/types.d.ts +2 -0
  84. package/dist/loop/types.d.ts.map +1 -1
  85. package/dist/loop/workflow/stream.d.ts +1 -1
  86. package/dist/loop/workflow/stream.d.ts.map +1 -1
  87. package/dist/mastra/hooks.d.ts.map +1 -1
  88. package/dist/mastra/index.cjs +2 -2
  89. package/dist/mastra/index.d.ts +8 -3
  90. package/dist/mastra/index.d.ts.map +1 -1
  91. package/dist/mastra/index.js +1 -1
  92. package/dist/memory/index.cjs +4 -4
  93. package/dist/memory/index.js +1 -1
  94. package/dist/network/index.cjs +4 -4
  95. package/dist/network/index.js +2 -2
  96. package/dist/network/vNext/index.cjs +14 -14
  97. package/dist/network/vNext/index.js +2 -2
  98. package/dist/processors/index.cjs +13 -11
  99. package/dist/processors/index.cjs.map +1 -1
  100. package/dist/processors/index.d.ts +4 -0
  101. package/dist/processors/index.d.ts.map +1 -1
  102. package/dist/processors/index.js +7 -5
  103. package/dist/processors/index.js.map +1 -1
  104. package/dist/processors/processors/moderation.d.ts +4 -0
  105. package/dist/processors/processors/moderation.d.ts.map +1 -1
  106. package/dist/processors/processors/pii-detector.d.ts +3 -0
  107. package/dist/processors/processors/pii-detector.d.ts.map +1 -1
  108. package/dist/processors/processors/prompt-injection-detector.d.ts +2 -0
  109. package/dist/processors/processors/prompt-injection-detector.d.ts.map +1 -1
  110. package/dist/processors/processors/system-prompt-scrubber.d.ts +2 -0
  111. package/dist/processors/processors/system-prompt-scrubber.d.ts.map +1 -1
  112. package/dist/processors/runner.d.ts +5 -4
  113. package/dist/processors/runner.d.ts.map +1 -1
  114. package/dist/relevance/index.cjs +4 -4
  115. package/dist/relevance/index.js +1 -1
  116. package/dist/scores/base.d.ts +2 -0
  117. package/dist/scores/base.d.ts.map +1 -1
  118. package/dist/scores/hooks.d.ts +3 -1
  119. package/dist/scores/hooks.d.ts.map +1 -1
  120. package/dist/scores/index.cjs +27 -18
  121. package/dist/scores/index.cjs.map +1 -1
  122. package/dist/scores/index.js +23 -14
  123. package/dist/scores/index.js.map +1 -1
  124. package/dist/scores/run-experiment/index.d.ts +2 -0
  125. package/dist/scores/run-experiment/index.d.ts.map +1 -1
  126. package/dist/scores/types.d.ts +3 -0
  127. package/dist/scores/types.d.ts.map +1 -1
  128. package/dist/storage/index.cjs +3 -3
  129. package/dist/storage/index.js +1 -1
  130. package/dist/stream/index.cjs +3 -3
  131. package/dist/stream/index.js +1 -1
  132. package/dist/test-utils/llm-mock.cjs +2 -2
  133. package/dist/test-utils/llm-mock.js +1 -1
  134. package/dist/tools/index.cjs +4 -4
  135. package/dist/tools/index.js +1 -1
  136. package/dist/tools/is-vercel-tool.cjs +2 -2
  137. package/dist/tools/is-vercel-tool.js +1 -1
  138. package/dist/tools/tool.d.ts +3 -1
  139. package/dist/tools/tool.d.ts.map +1 -1
  140. package/dist/utils.cjs +17 -17
  141. package/dist/utils.d.ts +3 -3
  142. package/dist/utils.d.ts.map +1 -1
  143. package/dist/utils.js +1 -1
  144. package/dist/workflows/default.d.ts +13 -3
  145. package/dist/workflows/default.d.ts.map +1 -1
  146. package/dist/workflows/evented/index.cjs +10 -10
  147. package/dist/workflows/evented/index.js +1 -1
  148. package/dist/workflows/execution-engine.d.ts +2 -2
  149. package/dist/workflows/execution-engine.d.ts.map +1 -1
  150. package/dist/workflows/index.cjs +10 -10
  151. package/dist/workflows/index.js +1 -1
  152. package/dist/workflows/legacy/index.cjs +22 -22
  153. package/dist/workflows/legacy/index.js +1 -1
  154. package/dist/workflows/workflow.d.ts +8 -8
  155. package/dist/workflows/workflow.d.ts.map +1 -1
  156. package/package.json +6 -6
  157. package/dist/chunk-24TFSB6Z.cjs.map +0 -1
  158. package/dist/chunk-4DKPMUAC.cjs.map +0 -1
  159. package/dist/chunk-5TFCIXWE.js.map +0 -1
  160. package/dist/chunk-APVV75XG.js.map +0 -1
  161. package/dist/chunk-AYFN43FB.cjs.map +0 -1
  162. package/dist/chunk-BGOXFBFK.js.map +0 -1
  163. package/dist/chunk-DJKIK6ZB.cjs.map +0 -1
  164. package/dist/chunk-QHEB6ZLO.cjs.map +0 -1
  165. package/dist/chunk-WOTBMZCN.js.map +0 -1
  166. package/dist/chunk-ZC64CG7J.js.map +0 -1
@@ -1,14 +1,14 @@
1
1
  import { DefaultVoice } from './chunk-XM2ASGWH.js';
2
2
  import { EMITTER_SYMBOL } from './chunk-GK5V7YTQ.js';
3
3
  import { InstrumentClass, Telemetry } from './chunk-76MWMAR7.js';
4
- import { MastraLLMV1 } from './chunk-5TFCIXWE.js';
5
- import { MessageList, DefaultGeneratedFile, DefaultGeneratedFileWithType } from './chunk-O7IQL4DX.js';
4
+ import { MastraLLMV1 } from './chunk-CKM2ESZF.js';
5
+ import { MessageList, DefaultGeneratedFile, DefaultGeneratedFileWithType } from './chunk-F4SQXAXR.js';
6
6
  import { executeHook } from './chunk-TTELJD4F.js';
7
- import { ensureToolProperties, makeCoreTool, createMastraProxy, delay } from './chunk-ZC64CG7J.js';
7
+ import { ensureToolProperties, makeCoreTool, createMastraProxy, delay } from './chunk-WWQ3QRPF.js';
8
8
  import { RuntimeContext } from './chunk-HLRWYUFN.js';
9
9
  import { ToolStream } from './chunk-YW7UILPE.js';
10
- import { Tool } from './chunk-GOG77M6R.js';
11
- import { getSelectedAITracing, wrapMastra } from './chunk-WOTBMZCN.js';
10
+ import { Tool } from './chunk-BJGHUKKM.js';
11
+ import { getOrCreateSpan, wrapMastra, selectFields } from './chunk-LOYT3WUA.js';
12
12
  import { MastraError } from './chunk-MCOVMKIS.js';
13
13
  import { MastraBase } from './chunk-6GF5M4GX.js';
14
14
  import { ConsoleLogger, RegisteredLogger } from './chunk-X3GXU6TZ.js';
@@ -510,7 +510,7 @@ var ProcessorRunner = class {
510
510
  this.logger = logger;
511
511
  this.agentName = agentName;
512
512
  }
513
- async runOutputProcessors(messageList, telemetry) {
513
+ async runOutputProcessors(messageList, tracingContext, telemetry) {
514
514
  const responseMessages = messageList.clear.response.v2();
515
515
  let processableMessages = [...responseMessages];
516
516
  const ctx = {
@@ -530,13 +530,15 @@ var ProcessorRunner = class {
530
530
  if (!telemetry) {
531
531
  processableMessages = await processMethod({
532
532
  messages: processableMessages,
533
- abort: ctx.abort
533
+ abort: ctx.abort,
534
+ tracingContext
534
535
  });
535
536
  } else {
536
537
  await telemetry.traceMethod(async () => {
537
538
  processableMessages = await processMethod({
538
539
  messages: processableMessages,
539
- abort: ctx.abort
540
+ abort: ctx.abort,
541
+ tracingContext
540
542
  });
541
543
  return processableMessages;
542
544
  }, {
@@ -557,7 +559,7 @@ var ProcessorRunner = class {
557
559
  /**
558
560
  * Process a stream part through all output processors with state management
559
561
  */
560
- async processPart(part, processorStates) {
562
+ async processPart(part, processorStates, tracingContext) {
561
563
  if (!this.outputProcessors.length) {
562
564
  return {
563
565
  part,
@@ -581,7 +583,8 @@ var ProcessorRunner = class {
581
583
  state: state.customState,
582
584
  abort: reason => {
583
585
  throw new TripWire(reason || `Stream part blocked by ${processor.name}`);
584
- }
586
+ },
587
+ tracingContext
585
588
  });
586
589
  processedPart = result;
587
590
  }
@@ -608,7 +611,7 @@ var ProcessorRunner = class {
608
611
  };
609
612
  }
610
613
  }
611
- async runOutputProcessorsForStream(streamResult) {
614
+ async runOutputProcessorsForStream(streamResult, tracingContext) {
612
615
  return new ReadableStream({
613
616
  start: async controller => {
614
617
  const reader = streamResult.fullStream.getReader();
@@ -627,7 +630,7 @@ var ProcessorRunner = class {
627
630
  part: processedPart,
628
631
  blocked,
629
632
  reason
630
- } = await this.processPart(value, processorStates);
633
+ } = await this.processPart(value, processorStates, tracingContext);
631
634
  if (blocked) {
632
635
  void this.logger.debug(`[Agent:${this.agentName}] - Stream part blocked by output processor`, {
633
636
  reason,
@@ -649,7 +652,7 @@ var ProcessorRunner = class {
649
652
  }
650
653
  });
651
654
  }
652
- async runInputProcessors(messageList, telemetry) {
655
+ async runInputProcessors(messageList, tracingContext, telemetry) {
653
656
  const userMessages = messageList.clear.input.v2();
654
657
  let processableMessages = [...userMessages];
655
658
  const ctx = {
@@ -669,13 +672,15 @@ var ProcessorRunner = class {
669
672
  if (!telemetry) {
670
673
  processableMessages = await processMethod({
671
674
  messages: processableMessages,
672
- abort: ctx.abort
675
+ abort: ctx.abort,
676
+ tracingContext
673
677
  });
674
678
  } else {
675
679
  await telemetry.traceMethod(async () => {
676
680
  processableMessages = await processMethod({
677
681
  messages: processableMessages,
678
- abort: ctx.abort
682
+ abort: ctx.abort,
683
+ tracingContext
679
684
  });
680
685
  return processableMessages;
681
686
  }, {
@@ -3738,12 +3743,23 @@ function workflowLoopStream({
3738
3743
  modelSettings,
3739
3744
  _internal,
3740
3745
  modelStreamSpan,
3746
+ llmAISpan,
3741
3747
  ...rest
3742
3748
  }) {
3743
3749
  return new ReadableStream$1({
3744
3750
  start: async controller => {
3745
3751
  const writer = new WritableStream({
3746
3752
  write: chunk => {
3753
+ if (llmAISpan && chunk.type === "text-delta") {
3754
+ llmAISpan.createEventSpan({
3755
+ type: "llm_chunk" /* LLM_CHUNK */,
3756
+ name: `llm chunk: ${chunk.type}`,
3757
+ output: chunk.payload.text,
3758
+ attributes: {
3759
+ chunkType: chunk.type
3760
+ }
3761
+ });
3762
+ }
3747
3763
  controller.enqueue(chunk);
3748
3764
  }
3749
3765
  });
@@ -3844,6 +3860,9 @@ function workflowLoopStream({
3844
3860
  user: rest.messageList.get.input.aiV5.model(),
3845
3861
  nonUser: []
3846
3862
  }
3863
+ },
3864
+ tracingContext: {
3865
+ currentSpan: llmAISpan
3847
3866
  }
3848
3867
  });
3849
3868
  if (executionResult.status !== "success") {
@@ -3886,6 +3905,7 @@ function loop({
3886
3905
  _internal,
3887
3906
  mode = "stream",
3888
3907
  outputProcessors,
3908
+ llmAISpan,
3889
3909
  ...rest
3890
3910
  }) {
3891
3911
  let loggerToUse = logger || new ConsoleLogger({
@@ -3943,6 +3963,7 @@ function loop({
3943
3963
  telemetry_settings,
3944
3964
  modelSettings,
3945
3965
  outputProcessors,
3966
+ llmAISpan,
3946
3967
  ...rest
3947
3968
  };
3948
3969
  const streamFn = workflowLoopStream(workflowLoopProps);
@@ -4055,7 +4076,8 @@ var MastraLLMVNext = class extends MastraBase {
4055
4076
  output,
4056
4077
  options,
4057
4078
  outputProcessors,
4058
- providerOptions
4079
+ providerOptions,
4080
+ tracingContext
4059
4081
  // ...rest
4060
4082
  }) {
4061
4083
  let stopWhenToUse;
@@ -4075,6 +4097,20 @@ var MastraLLMVNext = class extends MastraBase {
4075
4097
  if (output) {
4076
4098
  output = this._applySchemaCompat(output);
4077
4099
  }
4100
+ const llmAISpan = tracingContext?.currentSpan?.createChildSpan({
4101
+ name: `llm stream: '${model.modelId}'`,
4102
+ type: "llm_generation" /* LLM_GENERATION */,
4103
+ input: messages,
4104
+ attributes: {
4105
+ model: model.modelId,
4106
+ provider: model.provider,
4107
+ streaming: true
4108
+ },
4109
+ metadata: {
4110
+ threadId,
4111
+ resourceId
4112
+ }
4113
+ });
4078
4114
  try {
4079
4115
  const messageList = new MessageList({
4080
4116
  threadId,
@@ -4095,6 +4131,7 @@ var MastraLLMVNext = class extends MastraBase {
4095
4131
  },
4096
4132
  output,
4097
4133
  outputProcessors,
4134
+ llmAISpan,
4098
4135
  options: {
4099
4136
  ...options,
4100
4137
  onStepFinish: async props => {
@@ -4177,7 +4214,11 @@ var MastraLLMVNext = class extends MastraBase {
4177
4214
  }
4178
4215
  }
4179
4216
  };
4180
- return loop(loopOptions);
4217
+ const result = loop(loopOptions);
4218
+ llmAISpan?.end({
4219
+ output: result
4220
+ });
4221
+ return result;
4181
4222
  } catch (e) {
4182
4223
  const mastraError = new MastraError({
4183
4224
  id: "LLM_STREAM_TEXT_AI_SDK_EXECUTION_FAILED",
@@ -4191,6 +4232,9 @@ var MastraLLMVNext = class extends MastraBase {
4191
4232
  resourceId: resourceId ?? "unknown"
4192
4233
  }
4193
4234
  }, e);
4235
+ llmAISpan?.error({
4236
+ error: mastraError
4237
+ });
4194
4238
  throw mastraError;
4195
4239
  }
4196
4240
  }
@@ -4354,6 +4398,7 @@ function runScorer({
4354
4398
  input,
4355
4399
  output,
4356
4400
  runtimeContext,
4401
+ tracingContext,
4357
4402
  entity,
4358
4403
  structuredOutput,
4359
4404
  source,
@@ -4386,6 +4431,7 @@ function runScorer({
4386
4431
  input,
4387
4432
  output,
4388
4433
  runtimeContext: Object.fromEntries(runtimeContext.entries()),
4434
+ tracingContext,
4389
4435
  runId,
4390
4436
  source,
4391
4437
  entity,
@@ -7599,7 +7645,8 @@ var ModerationProcessor = class _ModerationProcessor {
7599
7645
  try {
7600
7646
  const {
7601
7647
  messages,
7602
- abort
7648
+ abort,
7649
+ tracingContext
7603
7650
  } = args;
7604
7651
  if (messages.length === 0) {
7605
7652
  return messages;
@@ -7611,7 +7658,7 @@ var ModerationProcessor = class _ModerationProcessor {
7611
7658
  passedMessages.push(message);
7612
7659
  continue;
7613
7660
  }
7614
- const moderationResult = await this.moderateContent(textContent);
7661
+ const moderationResult = await this.moderateContent(textContent, false, tracingContext);
7615
7662
  if (this.isModerationFlagged(moderationResult)) {
7616
7663
  this.handleFlaggedContent(moderationResult, this.strategy, abort);
7617
7664
  if (this.strategy === "filter") {
@@ -7636,13 +7683,14 @@ var ModerationProcessor = class _ModerationProcessor {
7636
7683
  const {
7637
7684
  part,
7638
7685
  streamParts,
7639
- abort
7686
+ abort,
7687
+ tracingContext
7640
7688
  } = args;
7641
7689
  if (part.type !== "text-delta") {
7642
7690
  return part;
7643
7691
  }
7644
7692
  const contentToModerate = this.buildContextFromChunks(streamParts);
7645
- const moderationResult = await this.moderateContent(contentToModerate, true);
7693
+ const moderationResult = await this.moderateContent(contentToModerate, true, tracingContext);
7646
7694
  if (this.isModerationFlagged(moderationResult)) {
7647
7695
  this.handleFlaggedContent(moderationResult, this.strategy, abort);
7648
7696
  if (this.strategy === "filter") {
@@ -7661,7 +7709,7 @@ var ModerationProcessor = class _ModerationProcessor {
7661
7709
  /**
7662
7710
  * Moderate content using the internal agent
7663
7711
  */
7664
- async moderateContent(content, isStream = false) {
7712
+ async moderateContent(content, isStream = false, tracingContext) {
7665
7713
  const prompt = this.createModerationPrompt(content, isStream);
7666
7714
  try {
7667
7715
  const model = await this.moderationAgent.getModel();
@@ -7678,12 +7726,14 @@ var ModerationProcessor = class _ModerationProcessor {
7678
7726
  output: schema,
7679
7727
  modelSettings: {
7680
7728
  temperature: 0
7681
- }
7729
+ },
7730
+ tracingContext
7682
7731
  });
7683
7732
  } else {
7684
7733
  response = await this.moderationAgent.generate(prompt, {
7685
7734
  output: schema,
7686
- temperature: 0
7735
+ temperature: 0,
7736
+ tracingContext
7687
7737
  });
7688
7738
  }
7689
7739
  const result = response.object;
@@ -7836,7 +7886,8 @@ var PromptInjectionDetector = class _PromptInjectionDetector {
7836
7886
  try {
7837
7887
  const {
7838
7888
  messages,
7839
- abort
7889
+ abort,
7890
+ tracingContext
7840
7891
  } = args;
7841
7892
  if (messages.length === 0) {
7842
7893
  return messages;
@@ -7848,7 +7899,7 @@ var PromptInjectionDetector = class _PromptInjectionDetector {
7848
7899
  processedMessages.push(message);
7849
7900
  continue;
7850
7901
  }
7851
- const detectionResult = await this.detectPromptInjection(textContent);
7902
+ const detectionResult = await this.detectPromptInjection(textContent, tracingContext);
7852
7903
  if (this.isInjectionFlagged(detectionResult)) {
7853
7904
  const processedMessage = this.handleDetectedInjection(message, detectionResult, this.strategy, abort);
7854
7905
  if (this.strategy === "filter") {
@@ -7873,7 +7924,7 @@ var PromptInjectionDetector = class _PromptInjectionDetector {
7873
7924
  /**
7874
7925
  * Detect prompt injection using the internal agent
7875
7926
  */
7876
- async detectPromptInjection(content) {
7927
+ async detectPromptInjection(content, tracingContext) {
7877
7928
  const prompt = this.createDetectionPrompt(content);
7878
7929
  try {
7879
7930
  const model = await this.detectionAgent.getModel();
@@ -7891,12 +7942,14 @@ var PromptInjectionDetector = class _PromptInjectionDetector {
7891
7942
  output: schema,
7892
7943
  modelSettings: {
7893
7944
  temperature: 0
7894
- }
7945
+ },
7946
+ tracingContext
7895
7947
  });
7896
7948
  } else {
7897
7949
  response = await this.detectionAgent.generate(prompt, {
7898
7950
  output: schema,
7899
- temperature: 0
7951
+ temperature: 0,
7952
+ tracingContext
7900
7953
  });
7901
7954
  }
7902
7955
  const result = response.object;
@@ -8067,7 +8120,8 @@ var PIIDetector = class _PIIDetector {
8067
8120
  try {
8068
8121
  const {
8069
8122
  messages,
8070
- abort
8123
+ abort,
8124
+ tracingContext
8071
8125
  } = args;
8072
8126
  if (messages.length === 0) {
8073
8127
  return messages;
@@ -8079,7 +8133,7 @@ var PIIDetector = class _PIIDetector {
8079
8133
  processedMessages.push(message);
8080
8134
  continue;
8081
8135
  }
8082
- const detectionResult = await this.detectPII(textContent);
8136
+ const detectionResult = await this.detectPII(textContent, tracingContext);
8083
8137
  if (this.isPIIFlagged(detectionResult)) {
8084
8138
  const processedMessage = this.handleDetectedPII(message, detectionResult, this.strategy, abort);
8085
8139
  if (this.strategy === "filter") {
@@ -8106,7 +8160,7 @@ var PIIDetector = class _PIIDetector {
8106
8160
  /**
8107
8161
  * Detect PII using the internal agent
8108
8162
  */
8109
- async detectPII(content) {
8163
+ async detectPII(content, tracingContext) {
8110
8164
  const prompt = this.createDetectionPrompt(content);
8111
8165
  const schema = z.object({
8112
8166
  categories: z.object(this.detectionTypes.reduce((props, type) => {
@@ -8131,12 +8185,14 @@ var PIIDetector = class _PIIDetector {
8131
8185
  output: schema,
8132
8186
  modelSettings: {
8133
8187
  temperature: 0
8134
- }
8188
+ },
8189
+ tracingContext
8135
8190
  });
8136
8191
  } else {
8137
8192
  response = await this.detectionAgent.generate(prompt, {
8138
8193
  output: schema,
8139
- temperature: 0
8194
+ temperature: 0,
8195
+ tracingContext
8140
8196
  });
8141
8197
  }
8142
8198
  const result = response.object;
@@ -8330,7 +8386,8 @@ IMPORTANT: IF NO PII IS DETECTED, RETURN AN EMPTY OBJECT, DO NOT INCLUDE ANYTHIN
8330
8386
  async processOutputStream(args) {
8331
8387
  const {
8332
8388
  part,
8333
- abort
8389
+ abort,
8390
+ tracingContext
8334
8391
  } = args;
8335
8392
  try {
8336
8393
  if (part.type !== "text-delta") {
@@ -8340,7 +8397,7 @@ IMPORTANT: IF NO PII IS DETECTED, RETURN AN EMPTY OBJECT, DO NOT INCLUDE ANYTHIN
8340
8397
  if (!textContent.trim()) {
8341
8398
  return part;
8342
8399
  }
8343
- const detectionResult = await this.detectPII(textContent);
8400
+ const detectionResult = await this.detectPII(textContent, tracingContext);
8344
8401
  if (this.isPIIFlagged(detectionResult)) {
8345
8402
  switch (this.strategy) {
8346
8403
  case "block":
@@ -9358,6 +9415,7 @@ var Agent = class extends (_a = MastraBase) {
9358
9415
  async generateTitleFromUserMessage({
9359
9416
  message,
9360
9417
  runtimeContext = new RuntimeContext(),
9418
+ tracingContext,
9361
9419
  model,
9362
9420
  instructions
9363
9421
  }) {
@@ -9390,6 +9448,7 @@ var Agent = class extends (_a = MastraBase) {
9390
9448
  if (llm.getModel().specificationVersion === "v2") {
9391
9449
  const result = llm.stream({
9392
9450
  runtimeContext,
9451
+ tracingContext,
9393
9452
  messages: [{
9394
9453
  role: "system",
9395
9454
  content: systemInstructions
@@ -9402,6 +9461,7 @@ var Agent = class extends (_a = MastraBase) {
9402
9461
  } else {
9403
9462
  const result = await llm.__text({
9404
9463
  runtimeContext,
9464
+ tracingContext,
9405
9465
  messages: [{
9406
9466
  role: "system",
9407
9467
  content: systemInstructions
@@ -9419,7 +9479,7 @@ var Agent = class extends (_a = MastraBase) {
9419
9479
  const userMessages = messages.filter(message => message.role === "user");
9420
9480
  return userMessages.at(-1);
9421
9481
  }
9422
- async genTitle(userMessage, runtimeContext, model, instructions) {
9482
+ async genTitle(userMessage, runtimeContext, tracingContext, model, instructions) {
9423
9483
  try {
9424
9484
  if (userMessage) {
9425
9485
  const normMessage = new MessageList().add(userMessage, "user").get.all.ui().at(-1);
@@ -9427,6 +9487,7 @@ var Agent = class extends (_a = MastraBase) {
9427
9487
  return await this.generateTitleFromUserMessage({
9428
9488
  message: normMessage,
9429
9489
  runtimeContext,
9490
+ tracingContext,
9430
9491
  model,
9431
9492
  instructions
9432
9493
  });
@@ -9518,8 +9579,8 @@ var Agent = class extends (_a = MastraBase) {
9518
9579
  resourceId,
9519
9580
  threadId,
9520
9581
  runtimeContext,
9521
- mastraProxy,
9522
- agentAISpan
9582
+ tracingContext,
9583
+ mastraProxy
9523
9584
  }) {
9524
9585
  let convertedMemoryTools = {};
9525
9586
  const memory = await this.getMemory({
@@ -9542,10 +9603,10 @@ var Agent = class extends (_a = MastraBase) {
9542
9603
  memory,
9543
9604
  agentName: this.name,
9544
9605
  runtimeContext,
9606
+ tracingContext,
9545
9607
  model: typeof this.model === "function" ? await this.getModel({
9546
9608
  runtimeContext
9547
- }) : this.model,
9548
- agentAISpan
9609
+ }) : this.model
9549
9610
  };
9550
9611
  const convertedToCoreTool = makeCoreTool(toolObj, options);
9551
9612
  convertedMemoryTools[toolName] = convertedToCoreTool;
@@ -9555,6 +9616,7 @@ var Agent = class extends (_a = MastraBase) {
9555
9616
  }
9556
9617
  async __runInputProcessors({
9557
9618
  runtimeContext,
9619
+ tracingContext,
9558
9620
  messageList,
9559
9621
  inputProcessorOverrides
9560
9622
  }) {
@@ -9565,13 +9627,13 @@ var Agent = class extends (_a = MastraBase) {
9565
9627
  runtimeContext,
9566
9628
  inputProcessorOverrides
9567
9629
  });
9568
- const tracedRunInputProcessors = messageList2 => {
9630
+ const tracedRunInputProcessors = (messageList2, tracingContext2) => {
9569
9631
  const telemetry = this.#mastra?.getTelemetry();
9570
9632
  if (!telemetry) {
9571
- return runner.runInputProcessors(messageList2, void 0);
9633
+ return runner.runInputProcessors(messageList2, tracingContext2, void 0);
9572
9634
  }
9573
9635
  return telemetry.traceMethod(async data => {
9574
- return runner.runInputProcessors(data.messageList, telemetry);
9636
+ return runner.runInputProcessors(data.messageList, tracingContext2, telemetry);
9575
9637
  }, {
9576
9638
  spanName: `agent.${this.name}.inputProcessors`,
9577
9639
  attributes: {
@@ -9584,7 +9646,7 @@ var Agent = class extends (_a = MastraBase) {
9584
9646
  });
9585
9647
  };
9586
9648
  try {
9587
- messageList = await tracedRunInputProcessors(messageList);
9649
+ messageList = await tracedRunInputProcessors(messageList, tracingContext);
9588
9650
  } catch (error) {
9589
9651
  if (error instanceof TripWire) {
9590
9652
  tripwireTriggered = true;
@@ -9607,6 +9669,7 @@ var Agent = class extends (_a = MastraBase) {
9607
9669
  }
9608
9670
  async __runOutputProcessors({
9609
9671
  runtimeContext,
9672
+ tracingContext,
9610
9673
  messageList,
9611
9674
  outputProcessorOverrides
9612
9675
  }) {
@@ -9617,13 +9680,13 @@ var Agent = class extends (_a = MastraBase) {
9617
9680
  runtimeContext,
9618
9681
  outputProcessorOverrides
9619
9682
  });
9620
- const tracedRunOutputProcessors = messageList2 => {
9683
+ const tracedRunOutputProcessors = (messageList2, tracingContext2) => {
9621
9684
  const telemetry = this.#mastra?.getTelemetry();
9622
9685
  if (!telemetry) {
9623
- return runner.runOutputProcessors(messageList2, void 0);
9686
+ return runner.runOutputProcessors(messageList2, tracingContext2, void 0);
9624
9687
  }
9625
9688
  return telemetry.traceMethod(async data => {
9626
- return runner.runOutputProcessors(data.messageList, telemetry);
9689
+ return runner.runOutputProcessors(data.messageList, tracingContext2, telemetry);
9627
9690
  }, {
9628
9691
  spanName: `agent.${this.name}.outputProcessors`,
9629
9692
  attributes: {
@@ -9636,7 +9699,7 @@ var Agent = class extends (_a = MastraBase) {
9636
9699
  });
9637
9700
  };
9638
9701
  try {
9639
- messageList = await tracedRunOutputProcessors(messageList);
9702
+ messageList = await tracedRunOutputProcessors(messageList, tracingContext);
9640
9703
  } catch (e) {
9641
9704
  if (e instanceof TripWire) {
9642
9705
  tripwireTriggered = true;
@@ -9675,13 +9738,13 @@ var Agent = class extends (_a = MastraBase) {
9675
9738
  }).then(r => r.messagesV2);
9676
9739
  }
9677
9740
  async getAssignedTools({
9678
- runtimeContext,
9679
9741
  runId,
9680
9742
  resourceId,
9681
9743
  threadId,
9744
+ runtimeContext,
9745
+ tracingContext,
9682
9746
  mastraProxy,
9683
- writableStream,
9684
- agentAISpan
9747
+ writableStream
9685
9748
  }) {
9686
9749
  let toolsForRequest = {};
9687
9750
  this.logger.debug(`[Agents:${this.name}] - Assembling assigned tools`, {
@@ -9710,11 +9773,11 @@ var Agent = class extends (_a = MastraBase) {
9710
9773
  memory,
9711
9774
  agentName: this.name,
9712
9775
  runtimeContext,
9776
+ tracingContext,
9713
9777
  model: typeof this.model === "function" ? await this.getModel({
9714
9778
  runtimeContext
9715
9779
  }) : this.model,
9716
- writableStream,
9717
- agentAISpan
9780
+ writableStream
9718
9781
  };
9719
9782
  return [k, makeCoreTool(tool, options)];
9720
9783
  }));
@@ -9730,8 +9793,8 @@ var Agent = class extends (_a = MastraBase) {
9730
9793
  resourceId,
9731
9794
  toolsets,
9732
9795
  runtimeContext,
9733
- mastraProxy,
9734
- agentAISpan
9796
+ tracingContext,
9797
+ mastraProxy
9735
9798
  }) {
9736
9799
  let toolsForRequest = {};
9737
9800
  const memory = await this.getMemory({
@@ -9755,10 +9818,10 @@ var Agent = class extends (_a = MastraBase) {
9755
9818
  memory,
9756
9819
  agentName: this.name,
9757
9820
  runtimeContext,
9821
+ tracingContext,
9758
9822
  model: typeof this.model === "function" ? await this.getModel({
9759
9823
  runtimeContext
9760
- }) : this.model,
9761
- agentAISpan
9824
+ }) : this.model
9762
9825
  };
9763
9826
  const convertedToCoreTool = makeCoreTool(toolObj, options, "toolset");
9764
9827
  toolsForRequest[toolName] = convertedToCoreTool;
@@ -9772,9 +9835,9 @@ var Agent = class extends (_a = MastraBase) {
9772
9835
  threadId,
9773
9836
  resourceId,
9774
9837
  runtimeContext,
9838
+ tracingContext,
9775
9839
  mastraProxy,
9776
- clientTools,
9777
- agentAISpan
9840
+ clientTools
9778
9841
  }) {
9779
9842
  let toolsForRequest = {};
9780
9843
  const memory = await this.getMemory({
@@ -9800,10 +9863,10 @@ var Agent = class extends (_a = MastraBase) {
9800
9863
  memory,
9801
9864
  agentName: this.name,
9802
9865
  runtimeContext,
9866
+ tracingContext,
9803
9867
  model: typeof this.model === "function" ? await this.getModel({
9804
9868
  runtimeContext
9805
- }) : this.model,
9806
- agentAISpan
9869
+ }) : this.model
9807
9870
  };
9808
9871
  const convertedToCoreTool = makeCoreTool(rest, options, "client-tool");
9809
9872
  toolsForRequest[toolName] = convertedToCoreTool;
@@ -9816,7 +9879,7 @@ var Agent = class extends (_a = MastraBase) {
9816
9879
  threadId,
9817
9880
  resourceId,
9818
9881
  runtimeContext,
9819
- agentAISpan
9882
+ tracingContext
9820
9883
  }) {
9821
9884
  let convertedWorkflowTools = {};
9822
9885
  const workflows = await this.getWorkflows({
@@ -9833,7 +9896,7 @@ var Agent = class extends (_a = MastraBase) {
9833
9896
  // manually wrap workflow tools with ai tracing, so that we can pass the
9834
9897
  // current tool span onto the workflow to maintain continuity of the trace
9835
9898
  execute: async args => {
9836
- const toolAISpan = agentAISpan?.createChildSpan({
9899
+ const toolAISpan = tracingContext.currentSpan?.createChildSpan({
9837
9900
  type: "tool_call" /* TOOL_CALL */,
9838
9901
  name: `tool: '${workflowName}'`,
9839
9902
  input: args,
@@ -9855,7 +9918,9 @@ var Agent = class extends (_a = MastraBase) {
9855
9918
  const result = await run.start({
9856
9919
  inputData: args,
9857
9920
  runtimeContext,
9858
- currentSpan: toolAISpan
9921
+ tracingContext: {
9922
+ currentSpan: toolAISpan
9923
+ }
9859
9924
  });
9860
9925
  toolAISpan?.end({
9861
9926
  output: result
@@ -9895,8 +9960,8 @@ var Agent = class extends (_a = MastraBase) {
9895
9960
  resourceId,
9896
9961
  runId,
9897
9962
  runtimeContext,
9898
- writableStream,
9899
- agentAISpan
9963
+ tracingContext,
9964
+ writableStream
9900
9965
  }) {
9901
9966
  let mastraProxy = void 0;
9902
9967
  const logger = this.logger;
@@ -9911,42 +9976,42 @@ var Agent = class extends (_a = MastraBase) {
9911
9976
  resourceId,
9912
9977
  threadId,
9913
9978
  runtimeContext,
9979
+ tracingContext,
9914
9980
  mastraProxy,
9915
- writableStream,
9916
- agentAISpan
9981
+ writableStream
9917
9982
  });
9918
9983
  const memoryTools = await this.getMemoryTools({
9919
9984
  runId,
9920
9985
  resourceId,
9921
9986
  threadId,
9922
9987
  runtimeContext,
9923
- mastraProxy,
9924
- agentAISpan
9988
+ tracingContext,
9989
+ mastraProxy
9925
9990
  });
9926
9991
  const toolsetTools = await this.getToolsets({
9927
9992
  runId,
9928
9993
  resourceId,
9929
9994
  threadId,
9930
9995
  runtimeContext,
9996
+ tracingContext,
9931
9997
  mastraProxy,
9932
- toolsets,
9933
- agentAISpan
9998
+ toolsets
9934
9999
  });
9935
10000
  const clientSideTools = await this.getClientTools({
9936
10001
  runId,
9937
10002
  resourceId,
9938
10003
  threadId,
9939
10004
  runtimeContext,
10005
+ tracingContext,
9940
10006
  mastraProxy,
9941
- clientTools,
9942
- agentAISpan
10007
+ clientTools
9943
10008
  });
9944
10009
  const workflowTools = await this.getWorkflowTools({
9945
10010
  runId,
9946
10011
  resourceId,
9947
10012
  threadId,
9948
10013
  runtimeContext,
9949
- agentAISpan
10014
+ tracingContext
9950
10015
  });
9951
10016
  return this.formatTools({
9952
10017
  ...assignedTools,
@@ -10031,7 +10096,7 @@ var Agent = class extends (_a = MastraBase) {
10031
10096
  runtimeContext,
10032
10097
  saveQueueManager,
10033
10098
  writableStream,
10034
- currentSpan
10099
+ tracingContext
10035
10100
  }) {
10036
10101
  return {
10037
10102
  before: async () => {
@@ -10040,8 +10105,10 @@ var Agent = class extends (_a = MastraBase) {
10040
10105
  runId
10041
10106
  });
10042
10107
  }
10043
- const spanArgs = {
10108
+ const agentAISpan = getOrCreateSpan({
10109
+ type: "agent_run" /* AGENT_RUN */,
10044
10110
  name: `agent run: '${this.id}'`,
10111
+ input: messages,
10045
10112
  attributes: {
10046
10113
  agentId: this.id,
10047
10114
  instructions,
@@ -10051,28 +10118,13 @@ var Agent = class extends (_a = MastraBase) {
10051
10118
  runId,
10052
10119
  resourceId,
10053
10120
  threadId: thread ? thread.id : void 0
10054
- }
10121
+ },
10122
+ tracingContext,
10123
+ runtimeContext
10124
+ });
10125
+ const innerTracingContext = {
10126
+ currentSpan: agentAISpan
10055
10127
  };
10056
- let agentAISpan;
10057
- if (currentSpan) {
10058
- agentAISpan = currentSpan.createChildSpan({
10059
- type: "agent_run" /* AGENT_RUN */,
10060
- ...spanArgs
10061
- });
10062
- } else {
10063
- const aiTracing = getSelectedAITracing({
10064
- runtimeContext
10065
- });
10066
- if (aiTracing) {
10067
- agentAISpan = aiTracing.startSpan({
10068
- type: "agent_run" /* AGENT_RUN */,
10069
- ...spanArgs,
10070
- startOptions: {
10071
- runtimeContext
10072
- }
10073
- });
10074
- }
10075
- }
10076
10128
  const memory = await this.getMemory({
10077
10129
  runtimeContext
10078
10130
  });
@@ -10096,8 +10148,8 @@ var Agent = class extends (_a = MastraBase) {
10096
10148
  resourceId,
10097
10149
  runId,
10098
10150
  runtimeContext,
10099
- writableStream,
10100
- agentAISpan
10151
+ tracingContext: innerTracingContext,
10152
+ writableStream
10101
10153
  });
10102
10154
  const messageList = new MessageList({
10103
10155
  threadId,
@@ -10116,6 +10168,7 @@ var Agent = class extends (_a = MastraBase) {
10116
10168
  tripwireReason: tripwireReason2
10117
10169
  } = await this.__runInputProcessors({
10118
10170
  runtimeContext,
10171
+ tracingContext: innerTracingContext,
10119
10172
  messageList
10120
10173
  });
10121
10174
  return {
@@ -10248,6 +10301,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10248
10301
  tripwireReason
10249
10302
  } = await this.__runInputProcessors({
10250
10303
  runtimeContext,
10304
+ tracingContext: innerTracingContext,
10251
10305
  messageList
10252
10306
  });
10253
10307
  const systemMessage = [...messageList.getSystemMessages(), ...messageList.getSystemMessages("memory")]?.map(m => m.content)?.join(`
@@ -10312,17 +10366,6 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10312
10366
  };
10313
10367
  })
10314
10368
  };
10315
- agentAISpan?.end({
10316
- output: {
10317
- text: result?.text,
10318
- object: result?.object
10319
- },
10320
- metadata: {
10321
- usage: result?.usage,
10322
- toolResults: result?.toolResults,
10323
- toolCalls: result?.toolCalls
10324
- }
10325
- });
10326
10369
  this.logger.debug(`[Agent:${this.name}] - Post processing LLM response`, {
10327
10370
  runId: runId2,
10328
10371
  result: resToLog,
@@ -10384,7 +10427,9 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10384
10427
  instructions: titleInstructions
10385
10428
  } = this.resolveTitleGenerationConfig(config?.threads?.generateTitle);
10386
10429
  if (shouldGenerate && userMessage) {
10387
- promises.push(this.genTitle(userMessage, runtimeContext, titleModel, titleInstructions).then(title => {
10430
+ promises.push(this.genTitle(userMessage, runtimeContext, {
10431
+ currentSpan: agentAISpan
10432
+ }, titleModel, titleInstructions).then(title => {
10388
10433
  if (title) {
10389
10434
  return memory.createThread({
10390
10435
  threadId: thread2.id,
@@ -10401,6 +10446,9 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10401
10446
  } catch (e) {
10402
10447
  await saveQueueManager.flushMessages(messageList, threadId, memoryConfig2);
10403
10448
  if (e instanceof MastraError) {
10449
+ agentAISpan?.error({
10450
+ error: e
10451
+ });
10404
10452
  throw e;
10405
10453
  }
10406
10454
  const mastraError = new MastraError({
@@ -10416,6 +10464,9 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10416
10464
  }, e);
10417
10465
  this.logger.trackException(mastraError);
10418
10466
  this.logger.error(mastraError.toString());
10467
+ agentAISpan?.error({
10468
+ error: mastraError
10469
+ });
10419
10470
  throw mastraError;
10420
10471
  }
10421
10472
  } else {
@@ -10440,6 +10491,9 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10440
10491
  outputText,
10441
10492
  instructions,
10442
10493
  runtimeContext,
10494
+ tracingContext: {
10495
+ currentSpan: agentAISpan
10496
+ },
10443
10497
  structuredOutput,
10444
10498
  overrideScorers,
10445
10499
  threadId,
@@ -10454,6 +10508,17 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10454
10508
  },
10455
10509
  output: messageList.getPersisted.response.ui()
10456
10510
  };
10511
+ agentAISpan?.end({
10512
+ output: {
10513
+ text: result?.text,
10514
+ object: result?.object
10515
+ },
10516
+ metadata: {
10517
+ usage: result?.usage,
10518
+ toolResults: result?.toolResults,
10519
+ toolCalls: result?.toolCalls
10520
+ }
10521
+ });
10457
10522
  return {
10458
10523
  scoringData
10459
10524
  };
@@ -10466,6 +10531,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10466
10531
  outputText,
10467
10532
  instructions,
10468
10533
  runtimeContext,
10534
+ tracingContext,
10469
10535
  structuredOutput,
10470
10536
  overrideScorers,
10471
10537
  threadId,
@@ -10487,9 +10553,15 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10487
10553
  });
10488
10554
  }
10489
10555
  }
10490
- const scorers = overrideScorers ?? (await this.getScorers({
10491
- runtimeContext
10492
- }));
10556
+ let scorers = {};
10557
+ try {
10558
+ scorers = overrideScorers ? this.resolveOverrideScorerReferences(overrideScorers) : await this.getScorers({
10559
+ runtimeContext
10560
+ });
10561
+ } catch (e) {
10562
+ this.logger.warn(`[Agent:${this.name}] - Failed to get scorers: ${e}`);
10563
+ return;
10564
+ }
10493
10565
  const scorerInput = {
10494
10566
  inputMessages: messageList.getPersisted.input.ui(),
10495
10567
  rememberedMessages: messageList.getPersisted.remembered.ui(),
@@ -10500,12 +10572,13 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10500
10572
  if (Object.keys(scorers || {}).length > 0) {
10501
10573
  for (const [id, scorerObject] of Object.entries(scorers)) {
10502
10574
  runScorer({
10503
- scorerId: id,
10575
+ scorerId: overrideScorers ? scorerObject.scorer.name : id,
10504
10576
  scorerObject,
10505
10577
  runId,
10506
10578
  input: scorerInput,
10507
10579
  output: scorerOutput,
10508
10580
  runtimeContext,
10581
+ tracingContext,
10509
10582
  entity: {
10510
10583
  id: this.id,
10511
10584
  name: this.name
@@ -10519,6 +10592,41 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10519
10592
  }
10520
10593
  }
10521
10594
  }
10595
+ resolveOverrideScorerReferences(overrideScorers) {
10596
+ const result = {};
10597
+ for (const [id, scorerObject] of Object.entries(overrideScorers)) {
10598
+ if (typeof scorerObject.scorer === "string") {
10599
+ try {
10600
+ if (!this.#mastra) {
10601
+ throw new MastraError({
10602
+ id: "AGENT_GENEREATE_SCORER_NOT_FOUND",
10603
+ domain: "AGENT" /* AGENT */,
10604
+ category: "USER" /* USER */,
10605
+ text: `Mastra not found when fetching scorer. Make sure to fetch agent from mastra.getAgent()`
10606
+ });
10607
+ }
10608
+ const scorer = this.#mastra.getScorerByName(scorerObject.scorer);
10609
+ result[id] = {
10610
+ scorer,
10611
+ sampling: scorerObject.sampling
10612
+ };
10613
+ } catch (error) {
10614
+ this.logger.warn(`[Agent:${this.name}] - Failed to get scorer ${scorerObject.scorer}: ${error}`);
10615
+ }
10616
+ } else {
10617
+ result[id] = scorerObject;
10618
+ }
10619
+ }
10620
+ if (Object.keys(result).length === 0) {
10621
+ throw new MastraError({
10622
+ id: "AGENT_GENEREATE_SCORER_NOT_FOUND",
10623
+ domain: "AGENT" /* AGENT */,
10624
+ category: "USER" /* USER */,
10625
+ text: `No scorers found in overrideScorers`
10626
+ });
10627
+ }
10628
+ return result;
10629
+ }
10522
10630
  async prepareLLMOptions(messages, options) {
10523
10631
  const {
10524
10632
  context,
@@ -10531,6 +10639,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10531
10639
  temperature,
10532
10640
  toolChoice = "auto",
10533
10641
  runtimeContext = new RuntimeContext(),
10642
+ tracingContext,
10534
10643
  savePerStep,
10535
10644
  writableStream,
10536
10645
  ...args
@@ -10595,7 +10704,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10595
10704
  runtimeContext,
10596
10705
  saveQueueManager,
10597
10706
  writableStream,
10598
- currentSpan: args.tracingContext?.currentSpan
10707
+ tracingContext
10599
10708
  });
10600
10709
  let messageList;
10601
10710
  let thread;
@@ -10662,7 +10771,8 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10662
10771
  result,
10663
10772
  outputText,
10664
10773
  structuredOutput = false,
10665
- agentAISpan
10774
+ agentAISpan,
10775
+ overrideScorers
10666
10776
  }) => {
10667
10777
  const afterResult = await after({
10668
10778
  result,
@@ -10674,7 +10784,8 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10674
10784
  messageList,
10675
10785
  structuredOutput,
10676
10786
  threadExists,
10677
- agentAISpan
10787
+ agentAISpan,
10788
+ overrideScorers
10678
10789
  });
10679
10790
  return afterResult;
10680
10791
  }
@@ -10698,6 +10809,22 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10698
10809
  const instructions = options.instructions || (await this.getInstructions({
10699
10810
  runtimeContext
10700
10811
  }));
10812
+ const agentAISpan = getOrCreateSpan({
10813
+ type: "agent_run" /* AGENT_RUN */,
10814
+ name: `agent run: '${this.id}'`,
10815
+ input: options.messages,
10816
+ attributes: {
10817
+ agentId: this.id,
10818
+ instructions
10819
+ },
10820
+ metadata: {
10821
+ runId,
10822
+ resourceId,
10823
+ threadId: threadFromArgs ? threadFromArgs.id : void 0
10824
+ },
10825
+ tracingContext: options.tracingContext,
10826
+ runtimeContext
10827
+ });
10701
10828
  const activeSpan = Telemetry.getActiveSpan();
10702
10829
  const baggageEntries = {};
10703
10830
  if (threadFromArgs?.id) {
@@ -10758,7 +10885,10 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10758
10885
  resourceId,
10759
10886
  runId,
10760
10887
  runtimeContext,
10761
- writableStream: options.writableStream
10888
+ writableStream: options.writableStream,
10889
+ tracingContext: {
10890
+ currentSpan: agentAISpan
10891
+ }
10762
10892
  });
10763
10893
  return {
10764
10894
  convertedTools
@@ -10776,7 +10906,9 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10776
10906
  tripwire: z$1.boolean().optional(),
10777
10907
  tripwireReason: z$1.string().optional()
10778
10908
  }),
10779
- execute: async () => {
10909
+ execute: async ({
10910
+ tracingContext
10911
+ }) => {
10780
10912
  const thread = threadFromArgs;
10781
10913
  const messageList = new MessageList({
10782
10914
  threadId: thread?.id,
@@ -10795,6 +10927,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10795
10927
  tripwireReason: tripwireReason2
10796
10928
  } = await this.__runInputProcessors({
10797
10929
  runtimeContext,
10930
+ tracingContext,
10798
10931
  messageList
10799
10932
  });
10800
10933
  return {
@@ -10882,7 +11015,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10882
11015
  The following messages were remembered from a different conversation:
10883
11016
  <remembered_from_other_conversation>
10884
11017
  ${(() => {
10885
- let result = ``;
11018
+ let result2 = ``;
10886
11019
  const messages = new MessageList().add(resultsFromOtherThreads, "memory").get.all.v1();
10887
11020
  let lastYmd = null;
10888
11021
  for (const msg of messages) {
@@ -10899,15 +11032,14 @@ ${(() => {
10899
11032
  const ampm = utcHour < 12 ? "AM" : "PM";
10900
11033
  const timeofday = `${hour12}:${utcMinute < 10 ? "0" : ""}${utcMinute} ${ampm}`;
10901
11034
  if (!lastYmd || lastYmd !== ymd) {
10902
- result += `
11035
+ result2 += `
10903
11036
  the following messages are from ${ymd}
10904
11037
  `;
10905
11038
  }
10906
- result += `
10907
- Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conversation" : ""} at ${timeofday}: ${JSON.stringify(msg)}`;
11039
+ result2 += `Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conversation" : ""} at ${timeofday}: ${JSON.stringify(msg)}`;
10908
11040
  lastYmd = ymd;
10909
11041
  }
10910
- return result;
11042
+ return result2;
10911
11043
  })()}
10912
11044
  <end_remembered_from_other_conversation>`;
10913
11045
  }
@@ -10922,6 +11054,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10922
11054
  tripwireReason
10923
11055
  } = await this.__runInputProcessors({
10924
11056
  runtimeContext,
11057
+ tracingContext,
10925
11058
  messageList
10926
11059
  });
10927
11060
  const systemMessage = [...messageList.getSystemMessages(), ...messageList.getSystemMessages("memory")]?.map(m => m.content)?.join(`
@@ -10960,7 +11093,8 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10960
11093
  inputSchema: z$1.any(),
10961
11094
  outputSchema: z$1.any(),
10962
11095
  execute: async ({
10963
- inputData
11096
+ inputData,
11097
+ tracingContext
10964
11098
  }) => {
10965
11099
  this.logger.debug(`Starting agent ${this.name} llm stream call`, {
10966
11100
  runId
@@ -10970,7 +11104,8 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10970
11104
  }) : this.#outputProcessors : []);
10971
11105
  const streamResult = llm.stream({
10972
11106
  ...inputData,
10973
- outputProcessors
11107
+ outputProcessors,
11108
+ tracingContext
10974
11109
  });
10975
11110
  if (options.format === "aisdk") {
10976
11111
  return streamResult.aisdk.v5;
@@ -10985,9 +11120,10 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
10985
11120
  steps: [prepareToolsStep, prepareMemory]
10986
11121
  }).parallel([prepareToolsStep, prepareMemory]).map(async ({
10987
11122
  inputData,
10988
- bail
11123
+ bail,
11124
+ tracingContext
10989
11125
  }) => {
10990
- const result = {
11126
+ const result2 = {
10991
11127
  ...options,
10992
11128
  messages: inputData["prepare-memory-step"].messageObjects,
10993
11129
  tools: inputData["prepare-tools-step"].convertedTools,
@@ -11029,7 +11165,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
11029
11165
  tripwireReason: inputData["prepare-memory-step"].tripwireReason
11030
11166
  })
11031
11167
  };
11032
- if (result.tripwire) {
11168
+ if (result2.tripwire) {
11033
11169
  const emptyResult = {
11034
11170
  textStream: async function* () {}(),
11035
11171
  fullStream: new globalThis.ReadableStream({
@@ -11050,7 +11186,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
11050
11186
  }),
11051
11187
  finishReason: Promise.resolve("other"),
11052
11188
  tripwire: true,
11053
- tripwireReason: result.tripwireReason,
11189
+ tripwireReason: result2.tripwireReason,
11054
11190
  response: {
11055
11191
  id: randomUUID(),
11056
11192
  timestamp: /* @__PURE__ */new Date(),
@@ -11073,23 +11209,27 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
11073
11209
  return bail(emptyResult);
11074
11210
  }
11075
11211
  let effectiveOutputProcessors = options.outputProcessors || (this.#outputProcessors ? typeof this.#outputProcessors === "function" ? await this.#outputProcessors({
11076
- runtimeContext: result.runtimeContext
11212
+ runtimeContext: result2.runtimeContext
11077
11213
  }) : this.#outputProcessors : []);
11078
11214
  if (options.structuredOutput) {
11079
11215
  const structuredProcessor = new StructuredOutputProcessor(options.structuredOutput);
11080
11216
  effectiveOutputProcessors = effectiveOutputProcessors ? [...effectiveOutputProcessors, structuredProcessor] : [structuredProcessor];
11081
11217
  }
11082
11218
  const loopOptions = {
11083
- messages: result.messages,
11084
- runtimeContext: result.runtimeContext,
11219
+ messages: result2.messages,
11220
+ runtimeContext: result2.runtimeContext,
11221
+ tracingContext: {
11222
+ currentSpan: agentAISpan
11223
+ },
11085
11224
  runId,
11086
- toolChoice: result.toolChoice,
11087
- tools: result.tools,
11088
- resourceId: result.resourceId,
11089
- threadId: result.threadId,
11090
- structuredOutput: result.structuredOutput,
11091
- stopWhen: result.stopWhen,
11092
- maxSteps: result.maxSteps,
11225
+ toolChoice: result2.toolChoice,
11226
+ tools: result2.tools,
11227
+ resourceId: result2.resourceId,
11228
+ threadId: result2.threadId,
11229
+ structuredOutput: result2.structuredOutput,
11230
+ stopWhen: result2.stopWhen,
11231
+ maxSteps: result2.maxSteps,
11232
+ providerOptions: result2.providerOptions,
11093
11233
  options: {
11094
11234
  onFinish: async payload => {
11095
11235
  if (payload.finishReason === "error") {
@@ -11107,16 +11247,18 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
11107
11247
  result: payload,
11108
11248
  outputText,
11109
11249
  instructions,
11110
- thread: result.thread,
11111
- threadId: result.threadId,
11250
+ thread: result2.thread,
11251
+ threadId: result2.threadId,
11112
11252
  resourceId,
11113
11253
  memoryConfig,
11114
11254
  runtimeContext,
11255
+ tracingContext,
11115
11256
  runId,
11116
11257
  messageList,
11117
11258
  threadExists: inputData["prepare-memory-step"].threadExists,
11118
11259
  structuredOutput: !!options.output,
11119
- saveQueueManager
11260
+ saveQueueManager,
11261
+ overrideScorers: options.scorers
11120
11262
  });
11121
11263
  } catch (e) {
11122
11264
  this.logger.error("Error saving memory on finish", {
@@ -11125,11 +11267,12 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
11125
11267
  });
11126
11268
  }
11127
11269
  await options?.onFinish?.({
11128
- ...result,
11129
- runId
11270
+ ...result2,
11271
+ runId,
11272
+ messages: messageList.get.response.aiV5.model()
11130
11273
  });
11131
11274
  },
11132
- onStepFinish: result.onStepFinish
11275
+ onStepFinish: result2.onStepFinish
11133
11276
  },
11134
11277
  output: options.output,
11135
11278
  outputProcessors: effectiveOutputProcessors,
@@ -11141,7 +11284,15 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
11141
11284
  return loopOptions;
11142
11285
  }).then(streamStep).commit();
11143
11286
  const run = await executionWorkflow.createRunAsync();
11144
- return await run.start({});
11287
+ const result = await run.start({
11288
+ tracingContext: {
11289
+ currentSpan: agentAISpan
11290
+ }
11291
+ });
11292
+ agentAISpan?.end({
11293
+ output: result
11294
+ });
11295
+ return result;
11145
11296
  }
11146
11297
  async #executeOnFinish({
11147
11298
  result,
@@ -11152,11 +11303,13 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
11152
11303
  memoryConfig,
11153
11304
  outputText,
11154
11305
  runtimeContext,
11306
+ tracingContext,
11155
11307
  runId,
11156
11308
  messageList,
11157
11309
  threadExists,
11158
11310
  structuredOutput = false,
11159
- saveQueueManager
11311
+ saveQueueManager,
11312
+ overrideScorers
11160
11313
  }) {
11161
11314
  const resToLog = {
11162
11315
  text: result?.text,
@@ -11231,7 +11384,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
11231
11384
  instructions: titleInstructions
11232
11385
  } = this.resolveTitleGenerationConfig(config?.threads?.generateTitle);
11233
11386
  if (shouldGenerate && userMessage) {
11234
- promises.push(this.genTitle(userMessage, runtimeContext, titleModel, titleInstructions).then(title => {
11387
+ promises.push(this.genTitle(userMessage, runtimeContext, tracingContext, titleModel, titleInstructions).then(title => {
11235
11388
  if (title) {
11236
11389
  return memory.createThread({
11237
11390
  threadId: thread.id,
@@ -11287,7 +11440,9 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
11287
11440
  outputText,
11288
11441
  instructions,
11289
11442
  runtimeContext,
11290
- structuredOutput
11443
+ tracingContext,
11444
+ structuredOutput,
11445
+ overrideScorers
11291
11446
  });
11292
11447
  }
11293
11448
  async generateVNext(messages, options) {
@@ -11347,6 +11502,10 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
11347
11502
  return result.result;
11348
11503
  }
11349
11504
  async generate(messages, generateOptions = {}) {
11505
+ this.logger.warn("Deprecation NOTICE:\nGenerate method will switch to use generateVNext implementation September 16th. Please use generateLegacy if you don't want to upgrade just yet.");
11506
+ return this.generateLegacy(messages, generateOptions);
11507
+ }
11508
+ async generateLegacy(messages, generateOptions = {}) {
11350
11509
  const defaultGenerateOptions = await this.getDefaultGenerateOptions({
11351
11510
  runtimeContext: generateOptions.runtimeContext
11352
11511
  });
@@ -11414,6 +11573,9 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
11414
11573
  agentAISpan,
11415
11574
  ...llmOptions
11416
11575
  } = beforeResult;
11576
+ const tracingContext = {
11577
+ currentSpan: agentAISpan
11578
+ };
11417
11579
  let finalOutputProcessors = mergedGenerateOptions.outputProcessors;
11418
11580
  if (mergedGenerateOptions.structuredOutput) {
11419
11581
  const structuredProcessor = new StructuredOutputProcessor(mergedGenerateOptions.structuredOutput);
@@ -11422,11 +11584,12 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
11422
11584
  if (!output || experimental_output) {
11423
11585
  const result2 = await llmToUse.__text({
11424
11586
  ...llmOptions,
11425
- agentAISpan,
11587
+ tracingContext,
11426
11588
  experimental_output
11427
11589
  });
11428
11590
  const outputProcessorResult2 = await this.__runOutputProcessors({
11429
11591
  runtimeContext: mergedGenerateOptions.runtimeContext || new RuntimeContext(),
11592
+ tracingContext,
11430
11593
  outputProcessorOverrides: finalOutputProcessors,
11431
11594
  messageList: new MessageList({
11432
11595
  threadId: llmOptions.threadId || "",
@@ -11498,12 +11661,13 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
11498
11661
  }
11499
11662
  }
11500
11663
  }
11664
+ const overrideScorers = mergedGenerateOptions.scorers;
11501
11665
  const afterResult2 = await after({
11502
11666
  result: result2,
11503
11667
  outputText: newText2,
11504
11668
  agentAISpan,
11505
- ...(generateOptions.scorers ? {
11506
- overrideScorers: generateOptions.scorers
11669
+ ...(overrideScorers ? {
11670
+ overrideScorers
11507
11671
  } : {})
11508
11672
  });
11509
11673
  if (generateOptions.returnScorerData) {
@@ -11513,12 +11677,13 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
11513
11677
  }
11514
11678
  const result = await llmToUse.__textObject({
11515
11679
  ...llmOptions,
11516
- agentAISpan,
11680
+ tracingContext,
11517
11681
  structuredOutput: output
11518
11682
  });
11519
11683
  const outputText = JSON.stringify(result.object);
11520
11684
  const outputProcessorResult = await this.__runOutputProcessors({
11521
11685
  runtimeContext: mergedGenerateOptions.runtimeContext || new RuntimeContext(),
11686
+ tracingContext,
11522
11687
  messageList: new MessageList({
11523
11688
  threadId: llmOptions.threadId || "",
11524
11689
  resourceId: llmOptions.resourceId || ""
@@ -11587,6 +11752,10 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
11587
11752
  return result;
11588
11753
  }
11589
11754
  async stream(messages, streamOptions = {}) {
11755
+ this.logger.warn("Deprecation NOTICE:\nStream method will switch to use streamVNext implementation September 16th. Please use streamLegacy if you don't want to upgrade just yet.");
11756
+ return this.streamLegacy(messages, streamOptions);
11757
+ }
11758
+ async streamLegacy(messages, streamOptions = {}) {
11590
11759
  const defaultStreamOptions = await this.getDefaultStreamOptions({
11591
11760
  runtimeContext: streamOptions.runtimeContext
11592
11761
  });
@@ -11687,6 +11856,10 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
11687
11856
  agentAISpan,
11688
11857
  ...llmOptions
11689
11858
  } = beforeResult;
11859
+ const overrideScorers = mergedStreamOptions.scorers;
11860
+ const tracingContext = {
11861
+ currentSpan: agentAISpan
11862
+ };
11690
11863
  if (!output || experimental_output) {
11691
11864
  this.logger.debug(`Starting agent ${this.name} llm stream call`, {
11692
11865
  runId
@@ -11694,14 +11867,17 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
11694
11867
  const streamResult = llm.__stream({
11695
11868
  ...llmOptions,
11696
11869
  experimental_output,
11697
- agentAISpan,
11870
+ tracingContext,
11698
11871
  onFinish: async result => {
11699
11872
  try {
11700
11873
  const outputText = result.text;
11701
11874
  await after({
11702
11875
  result,
11703
11876
  outputText,
11704
- agentAISpan
11877
+ agentAISpan,
11878
+ ...(overrideScorers ? {
11879
+ overrideScorers
11880
+ } : {})
11705
11881
  });
11706
11882
  } catch (e) {
11707
11883
  this.logger.error("Error saving memory on finish", {
@@ -11723,7 +11899,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
11723
11899
  });
11724
11900
  return llm.__streamObject({
11725
11901
  ...llmOptions,
11726
- agentAISpan,
11902
+ tracingContext,
11727
11903
  onFinish: async result => {
11728
11904
  try {
11729
11905
  const outputText = JSON.stringify(result.object);
@@ -11731,7 +11907,10 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
11731
11907
  result,
11732
11908
  outputText,
11733
11909
  structuredOutput: true,
11734
- agentAISpan
11910
+ agentAISpan,
11911
+ ...(overrideScorers ? {
11912
+ overrideScorers
11913
+ } : {})
11735
11914
  });
11736
11915
  } catch (e) {
11737
11916
  this.logger.error("Error saving memory on finish", {
@@ -12049,6 +12228,21 @@ var ExecutionEngine = class extends MastraBase {
12049
12228
 
12050
12229
  // src/workflows/default.ts
12051
12230
  var DefaultExecutionEngine = class extends ExecutionEngine {
12231
+ /**
12232
+ * Preprocesses an error caught during workflow execution.
12233
+ *
12234
+ * - Wraps a non-MastraError exception
12235
+ * - Logs error details
12236
+ */
12237
+ preprocessExecutionError(e, errorDefinition, logPrefix) {
12238
+ const error = e instanceof MastraError ? e : new MastraError(errorDefinition, e);
12239
+ if (!(e instanceof MastraError) && e instanceof Error && e.stack) {
12240
+ error.stack = e.stack;
12241
+ }
12242
+ this.logger?.trackException(error);
12243
+ this.logger?.error(logPrefix + error?.stack);
12244
+ return error;
12245
+ }
12052
12246
  /**
12053
12247
  * The runCounts map is used to keep track of the run count for each step.
12054
12248
  * The step id is used as the key and the run count is the value.
@@ -12145,7 +12339,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
12145
12339
  resume,
12146
12340
  retryConfig,
12147
12341
  runtimeContext,
12148
- currentSpan,
12342
+ tracingContext,
12149
12343
  disableScorers
12150
12344
  } = params;
12151
12345
  const {
@@ -12154,33 +12348,16 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
12154
12348
  } = retryConfig ?? {};
12155
12349
  const steps = graph.steps;
12156
12350
  this.runCounts.clear();
12157
- const spanArgs = {
12351
+ const workflowAISpan = getOrCreateSpan({
12352
+ type: "workflow_run" /* WORKFLOW_RUN */,
12158
12353
  name: `workflow run: '${workflowId}'`,
12159
12354
  input,
12160
12355
  attributes: {
12161
12356
  workflowId
12162
- }
12163
- };
12164
- let workflowAISpan;
12165
- if (currentSpan) {
12166
- workflowAISpan = currentSpan.createChildSpan({
12167
- type: "workflow_run" /* WORKFLOW_RUN */,
12168
- ...spanArgs
12169
- });
12170
- } else {
12171
- const aiTracing = getSelectedAITracing({
12172
- runtimeContext
12173
- });
12174
- if (aiTracing) {
12175
- workflowAISpan = aiTracing.startSpan({
12176
- type: "workflow_run" /* WORKFLOW_RUN */,
12177
- ...spanArgs,
12178
- startOptions: {
12179
- runtimeContext
12180
- }
12181
- });
12182
- }
12183
- }
12357
+ },
12358
+ tracingContext,
12359
+ runtimeContext
12360
+ });
12184
12361
  if (steps.length === 0) {
12185
12362
  const empty_graph_error = new MastraError({
12186
12363
  id: "WORKFLOW_EXECUTE_EMPTY_GRAPH",
@@ -12273,7 +12450,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
12273
12450
  return result2;
12274
12451
  }
12275
12452
  } catch (e) {
12276
- const error = e instanceof MastraError ? e : new MastraError({
12453
+ const error = this.preprocessExecutionError(e, {
12277
12454
  id: "WORKFLOW_ENGINE_STEP_EXECUTION_FAILED",
12278
12455
  domain: "MASTRA_WORKFLOW" /* MASTRA_WORKFLOW */,
12279
12456
  category: "USER" /* USER */,
@@ -12281,9 +12458,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
12281
12458
  workflowId,
12282
12459
  runId
12283
12460
  }
12284
- }, e);
12285
- this.logger?.trackException(error);
12286
- this.logger?.error(`Error executing step: ${error?.stack}`);
12461
+ }, "Error executing step: ");
12287
12462
  const result2 = await this.fmtReturnValue(executionSpan, params.emitter, stepResults, lastOutput.result, e);
12288
12463
  await this.persistStepUpdate({
12289
12464
  workflowId,
@@ -12597,11 +12772,15 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
12597
12772
  const stepAISpan = tracingContext.currentSpan?.createChildSpan({
12598
12773
  name: `workflow step: '${step.id}'`,
12599
12774
  type: "workflow_step" /* WORKFLOW_STEP */,
12600
- input: prevOutput,
12775
+
12776
+ //input: prevOutput,
12601
12777
  attributes: {
12602
12778
  stepId: step.id
12603
12779
  }
12604
12780
  });
12781
+ const innerTracingContext = {
12782
+ currentSpan: stepAISpan
12783
+ };
12605
12784
  if (!skipEmits) {
12606
12785
  await emitter.emit("watch", {
12607
12786
  type: "watch",
@@ -12677,16 +12856,12 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
12677
12856
  const result = await runStep({
12678
12857
  runId,
12679
12858
  workflowId,
12680
- mastra: this.mastra ? wrapMastra(this.mastra, {
12681
- currentSpan: stepAISpan
12682
- }) : void 0,
12859
+ mastra: this.mastra ? wrapMastra(this.mastra, innerTracingContext) : void 0,
12683
12860
  runtimeContext,
12684
12861
  inputData: prevOutput,
12685
12862
  runCount: this.getOrGenerateRunCount(step.id),
12686
12863
  resumeData: resume?.steps[0] === step.id ? resume?.resumePayload : void 0,
12687
- tracingContext: {
12688
- currentSpan: stepAISpan
12689
- },
12864
+ tracingContext: innerTracingContext,
12690
12865
  getInitData: () => stepResults?.input,
12691
12866
  getStepResult: step2 => {
12692
12867
  if (!step2?.id) {
@@ -12741,6 +12916,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
12741
12916
  workflowId,
12742
12917
  stepId: step.id,
12743
12918
  runtimeContext,
12919
+ tracingContext: innerTracingContext,
12744
12920
  disableScorers
12745
12921
  });
12746
12922
  }
@@ -12765,7 +12941,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
12765
12941
  }
12766
12942
  break;
12767
12943
  } catch (e) {
12768
- const error = e instanceof MastraError ? e : new MastraError({
12944
+ const error = this.preprocessExecutionError(e, {
12769
12945
  id: "WORKFLOW_STEP_INVOKE_FAILED",
12770
12946
  domain: "MASTRA_WORKFLOW" /* MASTRA_WORKFLOW */,
12771
12947
  category: "USER" /* USER */,
@@ -12774,9 +12950,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
12774
12950
  runId,
12775
12951
  stepId: step.id
12776
12952
  }
12777
- }, e);
12778
- this.logger.trackException(error);
12779
- this.logger.error(`Error executing step ${step.id}: ` + error?.stack);
12953
+ }, `Error executing step ${step.id}: `);
12780
12954
  stepAISpan?.error({
12781
12955
  error,
12782
12956
  attributes: {
@@ -12863,6 +13037,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
12863
13037
  workflowId,
12864
13038
  stepId,
12865
13039
  runtimeContext,
13040
+ tracingContext,
12866
13041
  disableScorers
12867
13042
  }) {
12868
13043
  let scorersToUse = scorers;
@@ -12872,18 +13047,16 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
12872
13047
  runtimeContext
12873
13048
  });
12874
13049
  } catch (error) {
12875
- const mastraError = new MastraError({
13050
+ this.preprocessExecutionError(error, {
12876
13051
  id: "WORKFLOW_FAILED_TO_FETCH_SCORERS",
12877
- domain: "MASTRA_WORKFLOW",
13052
+ domain: "MASTRA_WORKFLOW" /* MASTRA_WORKFLOW */,
12878
13053
  category: "USER" /* USER */,
12879
13054
  details: {
12880
13055
  runId,
12881
13056
  workflowId,
12882
13057
  stepId
12883
13058
  }
12884
- }, error);
12885
- this.logger.trackException(mastraError);
12886
- this.logger.error(mastraError.toString(), error);
13059
+ }, "Error fetching scorers: ");
12887
13060
  }
12888
13061
  }
12889
13062
  if (!disableScorers && scorersToUse && Object.keys(scorersToUse || {}).length > 0) {
@@ -12895,6 +13068,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
12895
13068
  input: [input],
12896
13069
  output,
12897
13070
  runtimeContext,
13071
+ tracingContext,
12898
13072
  entity: {
12899
13073
  id: workflowId,
12900
13074
  stepId
@@ -13076,7 +13250,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
13076
13250
  });
13077
13251
  return result ? index : null;
13078
13252
  } catch (e) {
13079
- const error = e instanceof MastraError ? e : new MastraError({
13253
+ const error = this.preprocessExecutionError(e, {
13080
13254
  id: "WORKFLOW_CONDITION_EVALUATION_FAILED",
13081
13255
  domain: "MASTRA_WORKFLOW" /* MASTRA_WORKFLOW */,
13082
13256
  category: "USER" /* USER */,
@@ -13084,9 +13258,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
13084
13258
  workflowId,
13085
13259
  runId
13086
13260
  }
13087
- }, e);
13088
- this.logger.trackException(error);
13089
- this.logger.error("Error evaluating condition: " + error?.stack);
13261
+ }, "Error evaluating condition: ");
13090
13262
  evalSpan?.error({
13091
13263
  error,
13092
13264
  attributes: {
@@ -13266,7 +13438,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
13266
13438
  const evalSpan = loopSpan?.createChildSpan({
13267
13439
  type: "workflow_conditional_eval" /* WORKFLOW_CONDITIONAL_EVAL */,
13268
13440
  name: `condition: ${entry.loopType}`,
13269
- input: result.output,
13441
+ input: selectFields(result.output, ["stepResult", "output.text", "output.object", "messages"]),
13270
13442
  attributes: {
13271
13443
  conditionIndex: iteration
13272
13444
  }
@@ -14764,7 +14936,7 @@ var Workflow = class extends MastraBase {
14764
14936
  cleanup: () => this.#runs.delete(runIdToUse)
14765
14937
  });
14766
14938
  this.#runs.set(runIdToUse, run);
14767
- this.mastra?.getLogger().warn("createRun() is deprecated. Use createRunAsync() instead.");
14939
+ this.mastra?.getLogger().warn("createRun() will be removed on September 16th. Use createRunAsync() instead.");
14768
14940
  return run;
14769
14941
  }
14770
14942
  /**
@@ -14849,7 +15021,7 @@ var Workflow = class extends MastraBase {
14849
15021
  abort,
14850
15022
  abortSignal,
14851
15023
  runCount,
14852
- currentSpan
15024
+ tracingContext
14853
15025
  }) {
14854
15026
  this.__registerMastra(mastra);
14855
15027
  const isResume = !!(resume?.steps && resume.steps.length > 0);
@@ -14885,11 +15057,11 @@ var Workflow = class extends MastraBase {
14885
15057
  resumeData,
14886
15058
  step: resume.steps,
14887
15059
  runtimeContext,
14888
- currentSpan
15060
+ tracingContext
14889
15061
  }) : await run.start({
14890
15062
  inputData,
14891
15063
  runtimeContext,
14892
- currentSpan
15064
+ tracingContext
14893
15065
  });
14894
15066
  unwatch();
14895
15067
  unwatchV2();
@@ -15055,7 +15227,7 @@ var Run = class {
15055
15227
  inputData,
15056
15228
  runtimeContext,
15057
15229
  writableStream,
15058
- currentSpan
15230
+ tracingContext
15059
15231
  }) {
15060
15232
  const result = await this.executionEngine.execute({
15061
15233
  workflowId: this.workflowId,
@@ -15082,7 +15254,7 @@ var Run = class {
15082
15254
  runtimeContext: runtimeContext ?? new RuntimeContext(),
15083
15255
  abortController: this.abortController,
15084
15256
  writableStream,
15085
- currentSpan
15257
+ tracingContext
15086
15258
  });
15087
15259
  if (result.status !== "suspended") {
15088
15260
  this.cleanup?.();
@@ -15419,7 +15591,7 @@ var Run = class {
15419
15591
  },
15420
15592
  runtimeContext: runtimeContextToUse,
15421
15593
  abortController: this.abortController,
15422
- currentSpan: params.currentSpan
15594
+ tracingContext: params.tracingContext
15423
15595
  }).then(result => {
15424
15596
  if (result.status !== "suspended") {
15425
15597
  this.closeStreamAction?.().catch(() => {});
@@ -15480,5 +15652,5 @@ function deepMergeWorkflowState(a, b) {
15480
15652
  }
15481
15653
 
15482
15654
  export { AISDKV5OutputStream, Agent, DefaultExecutionEngine, ExecutionEngine, LanguageDetector, LanguageDetectorInputProcessor, LegacyStep, LegacyWorkflow, MastraModelOutput, ModerationInputProcessor, ModerationProcessor, PIIDetector, PIIDetectorInputProcessor, PromptInjectionDetector, PromptInjectionDetectorInputProcessor, Run, StructuredOutputProcessor, TripWire, UnicodeNormalizer, UnicodeNormalizerInputProcessor, WhenConditionReturnValue, Workflow, agentToStep, cloneStep, cloneWorkflow, createStep, createWorkflow, getActivePathsAndStatus, getResultActivePaths, getStepResult, getSuspendedPaths, isAgent, isConditionalKey, isErrorEvent, isFinalState, isLimboState, isTransitionEvent, isVariableReference, isWorkflow, loop, mapVariable, mergeChildValue, recursivelyCheckForFinalState, resolveVariables, updateStepInHierarchy, workflowToStep };
15483
- //# sourceMappingURL=chunk-BGOXFBFK.js.map
15484
- //# sourceMappingURL=chunk-BGOXFBFK.js.map
15655
+ //# sourceMappingURL=chunk-P2IJ74UW.js.map
15656
+ //# sourceMappingURL=chunk-P2IJ74UW.js.map