@mastra/core 0.15.3-alpha.3 → 0.15.3-alpha.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agent/agent.types.d.ts +9 -4
- package/dist/agent/agent.types.d.ts.map +1 -1
- package/dist/agent/index.cjs +11 -11
- package/dist/agent/index.d.ts +33 -5
- package/dist/agent/index.d.ts.map +1 -1
- package/dist/agent/index.js +2 -2
- package/dist/agent/input-processor/index.cjs +6 -6
- package/dist/agent/input-processor/index.js +1 -1
- package/dist/agent/types.d.ts +11 -6
- package/dist/agent/types.d.ts.map +1 -1
- package/dist/ai-tracing/context.d.ts.map +1 -1
- package/dist/ai-tracing/default.d.ts.map +1 -1
- package/dist/ai-tracing/index.cjs +43 -31
- package/dist/ai-tracing/index.js +1 -1
- package/dist/ai-tracing/no-op.d.ts +0 -1
- package/dist/ai-tracing/no-op.d.ts.map +1 -1
- package/dist/ai-tracing/types.d.ts +0 -2
- package/dist/ai-tracing/types.d.ts.map +1 -1
- package/dist/ai-tracing/utils.d.ts +44 -8
- package/dist/ai-tracing/utils.d.ts.map +1 -1
- package/dist/{chunk-TWNFR6MQ.cjs → chunk-5CJDO3UO.cjs} +8 -8
- package/dist/{chunk-TWNFR6MQ.cjs.map → chunk-5CJDO3UO.cjs.map} +1 -1
- package/dist/{chunk-ZAJTIZZF.cjs → chunk-ABRPHTOG.cjs} +2 -2
- package/dist/{chunk-ZAJTIZZF.cjs.map → chunk-ABRPHTOG.cjs.map} +1 -1
- package/dist/{chunk-GOG77M6R.js → chunk-BJGHUKKM.js} +2 -2
- package/dist/{chunk-GOG77M6R.js.map → chunk-BJGHUKKM.js.map} +1 -1
- package/dist/{chunk-5TFCIXWE.js → chunk-CKM2ESZF.js} +44 -21
- package/dist/chunk-CKM2ESZF.js.map +1 -0
- package/dist/{chunk-AYFN43FB.cjs → chunk-DZADAEAF.cjs} +68 -22
- package/dist/chunk-DZADAEAF.cjs.map +1 -0
- package/dist/{chunk-QHEB6ZLO.cjs → chunk-F2CAC2R2.cjs} +47 -24
- package/dist/chunk-F2CAC2R2.cjs.map +1 -0
- package/dist/{chunk-O7IQL4DX.js → chunk-F4SQXAXR.js} +7 -7
- package/dist/{chunk-O7IQL4DX.js.map → chunk-F4SQXAXR.js.map} +1 -1
- package/dist/{chunk-HXEHQLBD.cjs → chunk-F6XWBVVG.cjs} +4 -4
- package/dist/{chunk-HXEHQLBD.cjs.map → chunk-F6XWBVVG.cjs.map} +1 -1
- package/dist/{chunk-UGN2UU3K.cjs → chunk-FQRDHVZC.cjs} +4 -4
- package/dist/{chunk-UGN2UU3K.cjs.map → chunk-FQRDHVZC.cjs.map} +1 -1
- package/dist/{chunk-4DKPMUAC.cjs → chunk-I7OAONIW.cjs} +259 -177
- package/dist/chunk-I7OAONIW.cjs.map +1 -0
- package/dist/{chunk-EMAAAVRA.js → chunk-IODUKRQP.js} +3 -3
- package/dist/{chunk-EMAAAVRA.js.map → chunk-IODUKRQP.js.map} +1 -1
- package/dist/{chunk-DJKIK6ZB.cjs → chunk-KUNWELBC.cjs} +422 -250
- package/dist/chunk-KUNWELBC.cjs.map +1 -0
- package/dist/{chunk-WOTBMZCN.js → chunk-LOYT3WUA.js} +255 -176
- package/dist/chunk-LOYT3WUA.js.map +1 -0
- package/dist/{chunk-24TFSB6Z.cjs → chunk-LVGGMWSE.cjs} +8 -8
- package/dist/chunk-LVGGMWSE.cjs.map +1 -0
- package/dist/{chunk-TQRLZH64.js → chunk-OFPVAPUH.js} +4 -4
- package/dist/{chunk-TQRLZH64.js.map → chunk-OFPVAPUH.js.map} +1 -1
- package/dist/{chunk-BGOXFBFK.js → chunk-P2IJ74UW.js} +391 -219
- package/dist/chunk-P2IJ74UW.js.map +1 -0
- package/dist/{chunk-YTVX52NU.cjs → chunk-VVTB47UG.cjs} +6 -6
- package/dist/{chunk-YTVX52NU.cjs.map → chunk-VVTB47UG.cjs.map} +1 -1
- package/dist/{chunk-D2GH2HAK.cjs → chunk-W5CF7DLB.cjs} +9 -9
- package/dist/{chunk-D2GH2HAK.cjs.map → chunk-W5CF7DLB.cjs.map} +1 -1
- package/dist/{chunk-ZC64CG7J.js → chunk-WWQ3QRPF.js} +4 -4
- package/dist/chunk-WWQ3QRPF.js.map +1 -0
- package/dist/{chunk-WUQSFK7W.js → chunk-XPFWOBV4.js} +4 -4
- package/dist/{chunk-WUQSFK7W.js.map → chunk-XPFWOBV4.js.map} +1 -1
- package/dist/{chunk-MEROMP3Z.js → chunk-YAWYQH3N.js} +3 -3
- package/dist/{chunk-MEROMP3Z.js.map → chunk-YAWYQH3N.js.map} +1 -1
- package/dist/{chunk-APVV75XG.js → chunk-YVIYEC6R.js} +62 -16
- package/dist/chunk-YVIYEC6R.js.map +1 -0
- package/dist/index.cjs +44 -44
- package/dist/index.js +10 -10
- package/dist/integration/index.cjs +3 -3
- package/dist/integration/index.js +1 -1
- package/dist/llm/index.d.ts +2 -2
- package/dist/llm/index.d.ts.map +1 -1
- package/dist/llm/model/base.types.d.ts +2 -2
- package/dist/llm/model/base.types.d.ts.map +1 -1
- package/dist/llm/model/model.d.ts +4 -4
- package/dist/llm/model/model.d.ts.map +1 -1
- package/dist/llm/model/model.loop.d.ts +1 -1
- package/dist/llm/model/model.loop.d.ts.map +1 -1
- package/dist/llm/model/model.loop.types.d.ts +2 -0
- package/dist/llm/model/model.loop.types.d.ts.map +1 -1
- package/dist/loop/index.cjs +2 -2
- package/dist/loop/index.js +1 -1
- package/dist/loop/loop.d.ts +1 -1
- package/dist/loop/loop.d.ts.map +1 -1
- package/dist/loop/types.d.ts +2 -0
- package/dist/loop/types.d.ts.map +1 -1
- package/dist/loop/workflow/stream.d.ts +1 -1
- package/dist/loop/workflow/stream.d.ts.map +1 -1
- package/dist/mastra/hooks.d.ts.map +1 -1
- package/dist/mastra/index.cjs +2 -2
- package/dist/mastra/index.d.ts +8 -3
- package/dist/mastra/index.d.ts.map +1 -1
- package/dist/mastra/index.js +1 -1
- package/dist/memory/index.cjs +4 -4
- package/dist/memory/index.js +1 -1
- package/dist/network/index.cjs +4 -4
- package/dist/network/index.js +2 -2
- package/dist/network/vNext/index.cjs +14 -14
- package/dist/network/vNext/index.js +2 -2
- package/dist/processors/index.cjs +13 -11
- package/dist/processors/index.cjs.map +1 -1
- package/dist/processors/index.d.ts +4 -0
- package/dist/processors/index.d.ts.map +1 -1
- package/dist/processors/index.js +7 -5
- package/dist/processors/index.js.map +1 -1
- package/dist/processors/processors/moderation.d.ts +4 -0
- package/dist/processors/processors/moderation.d.ts.map +1 -1
- package/dist/processors/processors/pii-detector.d.ts +3 -0
- package/dist/processors/processors/pii-detector.d.ts.map +1 -1
- package/dist/processors/processors/prompt-injection-detector.d.ts +2 -0
- package/dist/processors/processors/prompt-injection-detector.d.ts.map +1 -1
- package/dist/processors/processors/system-prompt-scrubber.d.ts +2 -0
- package/dist/processors/processors/system-prompt-scrubber.d.ts.map +1 -1
- package/dist/processors/runner.d.ts +5 -4
- package/dist/processors/runner.d.ts.map +1 -1
- package/dist/relevance/index.cjs +4 -4
- package/dist/relevance/index.js +1 -1
- package/dist/scores/base.d.ts +2 -0
- package/dist/scores/base.d.ts.map +1 -1
- package/dist/scores/hooks.d.ts +3 -1
- package/dist/scores/hooks.d.ts.map +1 -1
- package/dist/scores/index.cjs +27 -18
- package/dist/scores/index.cjs.map +1 -1
- package/dist/scores/index.js +23 -14
- package/dist/scores/index.js.map +1 -1
- package/dist/scores/run-experiment/index.d.ts +2 -0
- package/dist/scores/run-experiment/index.d.ts.map +1 -1
- package/dist/scores/types.d.ts +3 -0
- package/dist/scores/types.d.ts.map +1 -1
- package/dist/storage/index.cjs +3 -3
- package/dist/storage/index.js +1 -1
- package/dist/stream/index.cjs +3 -3
- package/dist/stream/index.js +1 -1
- package/dist/test-utils/llm-mock.cjs +2 -2
- package/dist/test-utils/llm-mock.js +1 -1
- package/dist/tools/index.cjs +4 -4
- package/dist/tools/index.js +1 -1
- package/dist/tools/is-vercel-tool.cjs +2 -2
- package/dist/tools/is-vercel-tool.js +1 -1
- package/dist/tools/tool.d.ts +3 -1
- package/dist/tools/tool.d.ts.map +1 -1
- package/dist/utils.cjs +17 -17
- package/dist/utils.d.ts +3 -3
- package/dist/utils.d.ts.map +1 -1
- package/dist/utils.js +1 -1
- package/dist/workflows/default.d.ts +13 -3
- package/dist/workflows/default.d.ts.map +1 -1
- package/dist/workflows/evented/index.cjs +10 -10
- package/dist/workflows/evented/index.js +1 -1
- package/dist/workflows/execution-engine.d.ts +2 -2
- package/dist/workflows/execution-engine.d.ts.map +1 -1
- package/dist/workflows/index.cjs +10 -10
- package/dist/workflows/index.js +1 -1
- package/dist/workflows/legacy/index.cjs +22 -22
- package/dist/workflows/legacy/index.js +1 -1
- package/dist/workflows/workflow.d.ts +8 -8
- package/dist/workflows/workflow.d.ts.map +1 -1
- package/package.json +6 -6
- package/dist/chunk-24TFSB6Z.cjs.map +0 -1
- package/dist/chunk-4DKPMUAC.cjs.map +0 -1
- package/dist/chunk-5TFCIXWE.js.map +0 -1
- package/dist/chunk-APVV75XG.js.map +0 -1
- package/dist/chunk-AYFN43FB.cjs.map +0 -1
- package/dist/chunk-BGOXFBFK.js.map +0 -1
- package/dist/chunk-DJKIK6ZB.cjs.map +0 -1
- package/dist/chunk-QHEB6ZLO.cjs.map +0 -1
- package/dist/chunk-WOTBMZCN.js.map +0 -1
- package/dist/chunk-ZC64CG7J.js.map +0 -1
|
@@ -3,14 +3,14 @@
|
|
|
3
3
|
var chunkFBBP67MQ_cjs = require('./chunk-FBBP67MQ.cjs');
|
|
4
4
|
var chunkNFXTYMWZ_cjs = require('./chunk-NFXTYMWZ.cjs');
|
|
5
5
|
var chunkZOU4K5MI_cjs = require('./chunk-ZOU4K5MI.cjs');
|
|
6
|
-
var
|
|
7
|
-
var
|
|
6
|
+
var chunkF2CAC2R2_cjs = require('./chunk-F2CAC2R2.cjs');
|
|
7
|
+
var chunk5CJDO3UO_cjs = require('./chunk-5CJDO3UO.cjs');
|
|
8
8
|
var chunkTSNDVBUU_cjs = require('./chunk-TSNDVBUU.cjs');
|
|
9
|
-
var
|
|
9
|
+
var chunkLVGGMWSE_cjs = require('./chunk-LVGGMWSE.cjs');
|
|
10
10
|
var chunkGPWMM745_cjs = require('./chunk-GPWMM745.cjs');
|
|
11
11
|
var chunkS6MAHT7F_cjs = require('./chunk-S6MAHT7F.cjs');
|
|
12
|
-
var
|
|
13
|
-
var
|
|
12
|
+
var chunkABRPHTOG_cjs = require('./chunk-ABRPHTOG.cjs');
|
|
13
|
+
var chunkI7OAONIW_cjs = require('./chunk-I7OAONIW.cjs');
|
|
14
14
|
var chunkC73WLCY3_cjs = require('./chunk-C73WLCY3.cjs');
|
|
15
15
|
var chunkKXCUCBEI_cjs = require('./chunk-KXCUCBEI.cjs');
|
|
16
16
|
var chunkV5WKCX3G_cjs = require('./chunk-V5WKCX3G.cjs');
|
|
@@ -536,7 +536,7 @@ var ProcessorRunner = class {
|
|
|
536
536
|
this.logger = logger;
|
|
537
537
|
this.agentName = agentName;
|
|
538
538
|
}
|
|
539
|
-
async runOutputProcessors(messageList, telemetry) {
|
|
539
|
+
async runOutputProcessors(messageList, tracingContext, telemetry) {
|
|
540
540
|
const responseMessages = messageList.clear.response.v2();
|
|
541
541
|
let processableMessages = [...responseMessages];
|
|
542
542
|
const ctx = {
|
|
@@ -556,13 +556,15 @@ var ProcessorRunner = class {
|
|
|
556
556
|
if (!telemetry) {
|
|
557
557
|
processableMessages = await processMethod({
|
|
558
558
|
messages: processableMessages,
|
|
559
|
-
abort: ctx.abort
|
|
559
|
+
abort: ctx.abort,
|
|
560
|
+
tracingContext
|
|
560
561
|
});
|
|
561
562
|
} else {
|
|
562
563
|
await telemetry.traceMethod(async () => {
|
|
563
564
|
processableMessages = await processMethod({
|
|
564
565
|
messages: processableMessages,
|
|
565
|
-
abort: ctx.abort
|
|
566
|
+
abort: ctx.abort,
|
|
567
|
+
tracingContext
|
|
566
568
|
});
|
|
567
569
|
return processableMessages;
|
|
568
570
|
}, {
|
|
@@ -583,7 +585,7 @@ var ProcessorRunner = class {
|
|
|
583
585
|
/**
|
|
584
586
|
* Process a stream part through all output processors with state management
|
|
585
587
|
*/
|
|
586
|
-
async processPart(part, processorStates) {
|
|
588
|
+
async processPart(part, processorStates, tracingContext) {
|
|
587
589
|
if (!this.outputProcessors.length) {
|
|
588
590
|
return {
|
|
589
591
|
part,
|
|
@@ -607,7 +609,8 @@ var ProcessorRunner = class {
|
|
|
607
609
|
state: state.customState,
|
|
608
610
|
abort: reason => {
|
|
609
611
|
throw new TripWire(reason || `Stream part blocked by ${processor.name}`);
|
|
610
|
-
}
|
|
612
|
+
},
|
|
613
|
+
tracingContext
|
|
611
614
|
});
|
|
612
615
|
processedPart = result;
|
|
613
616
|
}
|
|
@@ -634,7 +637,7 @@ var ProcessorRunner = class {
|
|
|
634
637
|
};
|
|
635
638
|
}
|
|
636
639
|
}
|
|
637
|
-
async runOutputProcessorsForStream(streamResult) {
|
|
640
|
+
async runOutputProcessorsForStream(streamResult, tracingContext) {
|
|
638
641
|
return new ReadableStream({
|
|
639
642
|
start: async controller => {
|
|
640
643
|
const reader = streamResult.fullStream.getReader();
|
|
@@ -653,7 +656,7 @@ var ProcessorRunner = class {
|
|
|
653
656
|
part: processedPart,
|
|
654
657
|
blocked,
|
|
655
658
|
reason
|
|
656
|
-
} = await this.processPart(value, processorStates);
|
|
659
|
+
} = await this.processPart(value, processorStates, tracingContext);
|
|
657
660
|
if (blocked) {
|
|
658
661
|
void this.logger.debug(`[Agent:${this.agentName}] - Stream part blocked by output processor`, {
|
|
659
662
|
reason,
|
|
@@ -675,7 +678,7 @@ var ProcessorRunner = class {
|
|
|
675
678
|
}
|
|
676
679
|
});
|
|
677
680
|
}
|
|
678
|
-
async runInputProcessors(messageList, telemetry) {
|
|
681
|
+
async runInputProcessors(messageList, tracingContext, telemetry) {
|
|
679
682
|
const userMessages = messageList.clear.input.v2();
|
|
680
683
|
let processableMessages = [...userMessages];
|
|
681
684
|
const ctx = {
|
|
@@ -695,13 +698,15 @@ var ProcessorRunner = class {
|
|
|
695
698
|
if (!telemetry) {
|
|
696
699
|
processableMessages = await processMethod({
|
|
697
700
|
messages: processableMessages,
|
|
698
|
-
abort: ctx.abort
|
|
701
|
+
abort: ctx.abort,
|
|
702
|
+
tracingContext
|
|
699
703
|
});
|
|
700
704
|
} else {
|
|
701
705
|
await telemetry.traceMethod(async () => {
|
|
702
706
|
processableMessages = await processMethod({
|
|
703
707
|
messages: processableMessages,
|
|
704
|
-
abort: ctx.abort
|
|
708
|
+
abort: ctx.abort,
|
|
709
|
+
tracingContext
|
|
705
710
|
});
|
|
706
711
|
return processableMessages;
|
|
707
712
|
}, {
|
|
@@ -1168,7 +1173,7 @@ function convertMastraChunkToAISDKv5({
|
|
|
1168
1173
|
if (mode === "generate") {
|
|
1169
1174
|
return {
|
|
1170
1175
|
type: "file",
|
|
1171
|
-
file: new
|
|
1176
|
+
file: new chunk5CJDO3UO_cjs.DefaultGeneratedFile({
|
|
1172
1177
|
data: chunk.payload.data,
|
|
1173
1178
|
mediaType: chunk.payload.mimeType
|
|
1174
1179
|
})
|
|
@@ -1176,7 +1181,7 @@ function convertMastraChunkToAISDKv5({
|
|
|
1176
1181
|
}
|
|
1177
1182
|
return {
|
|
1178
1183
|
type: "file",
|
|
1179
|
-
file: new
|
|
1184
|
+
file: new chunk5CJDO3UO_cjs.DefaultGeneratedFileWithType({
|
|
1180
1185
|
data: chunk.payload.data,
|
|
1181
1186
|
mediaType: chunk.payload.mimeType
|
|
1182
1187
|
})
|
|
@@ -2220,7 +2225,7 @@ var MastraModelOutput = class extends chunkKXCUCBEI_cjs.MastraBase {
|
|
|
2220
2225
|
try {
|
|
2221
2226
|
if (self.processorRunner) {
|
|
2222
2227
|
await self.processorRunner.runOutputProcessors(self.messageList);
|
|
2223
|
-
const outputText = self.messageList.get.response.aiV4.core().map(m =>
|
|
2228
|
+
const outputText = self.messageList.get.response.aiV4.core().map(m => chunk5CJDO3UO_cjs.MessageList.coreContentToString(m.content)).join("\n");
|
|
2224
2229
|
const messages = self.messageList.get.response.v2();
|
|
2225
2230
|
const messagesWithStructuredData = messages.filter(msg => msg.content.metadata && msg.content.metadata.structuredOutput);
|
|
2226
2231
|
if (messagesWithStructuredData[0] && messagesWithStructuredData[0].content.metadata?.structuredOutput) {
|
|
@@ -3764,12 +3769,23 @@ function workflowLoopStream({
|
|
|
3764
3769
|
modelSettings,
|
|
3765
3770
|
_internal,
|
|
3766
3771
|
modelStreamSpan,
|
|
3772
|
+
llmAISpan,
|
|
3767
3773
|
...rest
|
|
3768
3774
|
}) {
|
|
3769
3775
|
return new web.ReadableStream({
|
|
3770
3776
|
start: async controller => {
|
|
3771
3777
|
const writer = new WritableStream({
|
|
3772
3778
|
write: chunk => {
|
|
3779
|
+
if (llmAISpan && chunk.type === "text-delta") {
|
|
3780
|
+
llmAISpan.createEventSpan({
|
|
3781
|
+
type: "llm_chunk" /* LLM_CHUNK */,
|
|
3782
|
+
name: `llm chunk: ${chunk.type}`,
|
|
3783
|
+
output: chunk.payload.text,
|
|
3784
|
+
attributes: {
|
|
3785
|
+
chunkType: chunk.type
|
|
3786
|
+
}
|
|
3787
|
+
});
|
|
3788
|
+
}
|
|
3773
3789
|
controller.enqueue(chunk);
|
|
3774
3790
|
}
|
|
3775
3791
|
});
|
|
@@ -3870,6 +3886,9 @@ function workflowLoopStream({
|
|
|
3870
3886
|
user: rest.messageList.get.input.aiV5.model(),
|
|
3871
3887
|
nonUser: []
|
|
3872
3888
|
}
|
|
3889
|
+
},
|
|
3890
|
+
tracingContext: {
|
|
3891
|
+
currentSpan: llmAISpan
|
|
3873
3892
|
}
|
|
3874
3893
|
});
|
|
3875
3894
|
if (executionResult.status !== "success") {
|
|
@@ -3912,6 +3931,7 @@ function loop({
|
|
|
3912
3931
|
_internal,
|
|
3913
3932
|
mode = "stream",
|
|
3914
3933
|
outputProcessors,
|
|
3934
|
+
llmAISpan,
|
|
3915
3935
|
...rest
|
|
3916
3936
|
}) {
|
|
3917
3937
|
let loggerToUse = logger || new chunkV5WKCX3G_cjs.ConsoleLogger({
|
|
@@ -3969,6 +3989,7 @@ function loop({
|
|
|
3969
3989
|
telemetry_settings,
|
|
3970
3990
|
modelSettings,
|
|
3971
3991
|
outputProcessors,
|
|
3992
|
+
llmAISpan,
|
|
3972
3993
|
...rest
|
|
3973
3994
|
};
|
|
3974
3995
|
const streamFn = workflowLoopStream(workflowLoopProps);
|
|
@@ -4081,7 +4102,8 @@ var MastraLLMVNext = class extends chunkKXCUCBEI_cjs.MastraBase {
|
|
|
4081
4102
|
output,
|
|
4082
4103
|
options,
|
|
4083
4104
|
outputProcessors,
|
|
4084
|
-
providerOptions
|
|
4105
|
+
providerOptions,
|
|
4106
|
+
tracingContext
|
|
4085
4107
|
// ...rest
|
|
4086
4108
|
}) {
|
|
4087
4109
|
let stopWhenToUse;
|
|
@@ -4101,8 +4123,22 @@ var MastraLLMVNext = class extends chunkKXCUCBEI_cjs.MastraBase {
|
|
|
4101
4123
|
if (output) {
|
|
4102
4124
|
output = this._applySchemaCompat(output);
|
|
4103
4125
|
}
|
|
4126
|
+
const llmAISpan = tracingContext?.currentSpan?.createChildSpan({
|
|
4127
|
+
name: `llm stream: '${model.modelId}'`,
|
|
4128
|
+
type: "llm_generation" /* LLM_GENERATION */,
|
|
4129
|
+
input: messages,
|
|
4130
|
+
attributes: {
|
|
4131
|
+
model: model.modelId,
|
|
4132
|
+
provider: model.provider,
|
|
4133
|
+
streaming: true
|
|
4134
|
+
},
|
|
4135
|
+
metadata: {
|
|
4136
|
+
threadId,
|
|
4137
|
+
resourceId
|
|
4138
|
+
}
|
|
4139
|
+
});
|
|
4104
4140
|
try {
|
|
4105
|
-
const messageList = new
|
|
4141
|
+
const messageList = new chunk5CJDO3UO_cjs.MessageList({
|
|
4106
4142
|
threadId,
|
|
4107
4143
|
resourceId
|
|
4108
4144
|
});
|
|
@@ -4121,6 +4157,7 @@ var MastraLLMVNext = class extends chunkKXCUCBEI_cjs.MastraBase {
|
|
|
4121
4157
|
},
|
|
4122
4158
|
output,
|
|
4123
4159
|
outputProcessors,
|
|
4160
|
+
llmAISpan,
|
|
4124
4161
|
options: {
|
|
4125
4162
|
...options,
|
|
4126
4163
|
onStepFinish: async props => {
|
|
@@ -4161,7 +4198,7 @@ var MastraLLMVNext = class extends chunkKXCUCBEI_cjs.MastraBase {
|
|
|
4161
4198
|
this.logger.warn("Rate limit approaching, waiting 10 seconds", {
|
|
4162
4199
|
runId
|
|
4163
4200
|
});
|
|
4164
|
-
await
|
|
4201
|
+
await chunkLVGGMWSE_cjs.delay(10 * 1e3);
|
|
4165
4202
|
}
|
|
4166
4203
|
},
|
|
4167
4204
|
onFinish: async props => {
|
|
@@ -4203,7 +4240,11 @@ var MastraLLMVNext = class extends chunkKXCUCBEI_cjs.MastraBase {
|
|
|
4203
4240
|
}
|
|
4204
4241
|
}
|
|
4205
4242
|
};
|
|
4206
|
-
|
|
4243
|
+
const result = loop(loopOptions);
|
|
4244
|
+
llmAISpan?.end({
|
|
4245
|
+
output: result
|
|
4246
|
+
});
|
|
4247
|
+
return result;
|
|
4207
4248
|
} catch (e) {
|
|
4208
4249
|
const mastraError = new chunkC73WLCY3_cjs.MastraError({
|
|
4209
4250
|
id: "LLM_STREAM_TEXT_AI_SDK_EXECUTION_FAILED",
|
|
@@ -4217,6 +4258,9 @@ var MastraLLMVNext = class extends chunkKXCUCBEI_cjs.MastraBase {
|
|
|
4217
4258
|
resourceId: resourceId ?? "unknown"
|
|
4218
4259
|
}
|
|
4219
4260
|
}, e);
|
|
4261
|
+
llmAISpan?.error({
|
|
4262
|
+
error: mastraError
|
|
4263
|
+
});
|
|
4220
4264
|
throw mastraError;
|
|
4221
4265
|
}
|
|
4222
4266
|
}
|
|
@@ -4380,6 +4424,7 @@ function runScorer({
|
|
|
4380
4424
|
input,
|
|
4381
4425
|
output,
|
|
4382
4426
|
runtimeContext,
|
|
4427
|
+
tracingContext,
|
|
4383
4428
|
entity,
|
|
4384
4429
|
structuredOutput,
|
|
4385
4430
|
source,
|
|
@@ -4412,6 +4457,7 @@ function runScorer({
|
|
|
4412
4457
|
input,
|
|
4413
4458
|
output,
|
|
4414
4459
|
runtimeContext: Object.fromEntries(runtimeContext.entries()),
|
|
4460
|
+
tracingContext,
|
|
4415
4461
|
runId,
|
|
4416
4462
|
source,
|
|
4417
4463
|
entity,
|
|
@@ -5047,7 +5093,7 @@ var Machine = class extends EventEmitter__default.default {
|
|
|
5047
5093
|
const logger = this.logger;
|
|
5048
5094
|
let mastraProxy = void 0;
|
|
5049
5095
|
if (this.#mastra) {
|
|
5050
|
-
mastraProxy =
|
|
5096
|
+
mastraProxy = chunkLVGGMWSE_cjs.createMastraProxy({
|
|
5051
5097
|
mastra: this.#mastra,
|
|
5052
5098
|
logger
|
|
5053
5099
|
});
|
|
@@ -7625,7 +7671,8 @@ var ModerationProcessor = class _ModerationProcessor {
|
|
|
7625
7671
|
try {
|
|
7626
7672
|
const {
|
|
7627
7673
|
messages,
|
|
7628
|
-
abort
|
|
7674
|
+
abort,
|
|
7675
|
+
tracingContext
|
|
7629
7676
|
} = args;
|
|
7630
7677
|
if (messages.length === 0) {
|
|
7631
7678
|
return messages;
|
|
@@ -7637,7 +7684,7 @@ var ModerationProcessor = class _ModerationProcessor {
|
|
|
7637
7684
|
passedMessages.push(message);
|
|
7638
7685
|
continue;
|
|
7639
7686
|
}
|
|
7640
|
-
const moderationResult = await this.moderateContent(textContent);
|
|
7687
|
+
const moderationResult = await this.moderateContent(textContent, false, tracingContext);
|
|
7641
7688
|
if (this.isModerationFlagged(moderationResult)) {
|
|
7642
7689
|
this.handleFlaggedContent(moderationResult, this.strategy, abort);
|
|
7643
7690
|
if (this.strategy === "filter") {
|
|
@@ -7662,13 +7709,14 @@ var ModerationProcessor = class _ModerationProcessor {
|
|
|
7662
7709
|
const {
|
|
7663
7710
|
part,
|
|
7664
7711
|
streamParts,
|
|
7665
|
-
abort
|
|
7712
|
+
abort,
|
|
7713
|
+
tracingContext
|
|
7666
7714
|
} = args;
|
|
7667
7715
|
if (part.type !== "text-delta") {
|
|
7668
7716
|
return part;
|
|
7669
7717
|
}
|
|
7670
7718
|
const contentToModerate = this.buildContextFromChunks(streamParts);
|
|
7671
|
-
const moderationResult = await this.moderateContent(contentToModerate, true);
|
|
7719
|
+
const moderationResult = await this.moderateContent(contentToModerate, true, tracingContext);
|
|
7672
7720
|
if (this.isModerationFlagged(moderationResult)) {
|
|
7673
7721
|
this.handleFlaggedContent(moderationResult, this.strategy, abort);
|
|
7674
7722
|
if (this.strategy === "filter") {
|
|
@@ -7687,7 +7735,7 @@ var ModerationProcessor = class _ModerationProcessor {
|
|
|
7687
7735
|
/**
|
|
7688
7736
|
* Moderate content using the internal agent
|
|
7689
7737
|
*/
|
|
7690
|
-
async moderateContent(content, isStream = false) {
|
|
7738
|
+
async moderateContent(content, isStream = false, tracingContext) {
|
|
7691
7739
|
const prompt = this.createModerationPrompt(content, isStream);
|
|
7692
7740
|
try {
|
|
7693
7741
|
const model = await this.moderationAgent.getModel();
|
|
@@ -7704,12 +7752,14 @@ var ModerationProcessor = class _ModerationProcessor {
|
|
|
7704
7752
|
output: schema,
|
|
7705
7753
|
modelSettings: {
|
|
7706
7754
|
temperature: 0
|
|
7707
|
-
}
|
|
7755
|
+
},
|
|
7756
|
+
tracingContext
|
|
7708
7757
|
});
|
|
7709
7758
|
} else {
|
|
7710
7759
|
response = await this.moderationAgent.generate(prompt, {
|
|
7711
7760
|
output: schema,
|
|
7712
|
-
temperature: 0
|
|
7761
|
+
temperature: 0,
|
|
7762
|
+
tracingContext
|
|
7713
7763
|
});
|
|
7714
7764
|
}
|
|
7715
7765
|
const result = response.object;
|
|
@@ -7862,7 +7912,8 @@ var PromptInjectionDetector = class _PromptInjectionDetector {
|
|
|
7862
7912
|
try {
|
|
7863
7913
|
const {
|
|
7864
7914
|
messages,
|
|
7865
|
-
abort
|
|
7915
|
+
abort,
|
|
7916
|
+
tracingContext
|
|
7866
7917
|
} = args;
|
|
7867
7918
|
if (messages.length === 0) {
|
|
7868
7919
|
return messages;
|
|
@@ -7874,7 +7925,7 @@ var PromptInjectionDetector = class _PromptInjectionDetector {
|
|
|
7874
7925
|
processedMessages.push(message);
|
|
7875
7926
|
continue;
|
|
7876
7927
|
}
|
|
7877
|
-
const detectionResult = await this.detectPromptInjection(textContent);
|
|
7928
|
+
const detectionResult = await this.detectPromptInjection(textContent, tracingContext);
|
|
7878
7929
|
if (this.isInjectionFlagged(detectionResult)) {
|
|
7879
7930
|
const processedMessage = this.handleDetectedInjection(message, detectionResult, this.strategy, abort);
|
|
7880
7931
|
if (this.strategy === "filter") {
|
|
@@ -7899,7 +7950,7 @@ var PromptInjectionDetector = class _PromptInjectionDetector {
|
|
|
7899
7950
|
/**
|
|
7900
7951
|
* Detect prompt injection using the internal agent
|
|
7901
7952
|
*/
|
|
7902
|
-
async detectPromptInjection(content) {
|
|
7953
|
+
async detectPromptInjection(content, tracingContext) {
|
|
7903
7954
|
const prompt = this.createDetectionPrompt(content);
|
|
7904
7955
|
try {
|
|
7905
7956
|
const model = await this.detectionAgent.getModel();
|
|
@@ -7917,12 +7968,14 @@ var PromptInjectionDetector = class _PromptInjectionDetector {
|
|
|
7917
7968
|
output: schema,
|
|
7918
7969
|
modelSettings: {
|
|
7919
7970
|
temperature: 0
|
|
7920
|
-
}
|
|
7971
|
+
},
|
|
7972
|
+
tracingContext
|
|
7921
7973
|
});
|
|
7922
7974
|
} else {
|
|
7923
7975
|
response = await this.detectionAgent.generate(prompt, {
|
|
7924
7976
|
output: schema,
|
|
7925
|
-
temperature: 0
|
|
7977
|
+
temperature: 0,
|
|
7978
|
+
tracingContext
|
|
7926
7979
|
});
|
|
7927
7980
|
}
|
|
7928
7981
|
const result = response.object;
|
|
@@ -8093,7 +8146,8 @@ var PIIDetector = class _PIIDetector {
|
|
|
8093
8146
|
try {
|
|
8094
8147
|
const {
|
|
8095
8148
|
messages,
|
|
8096
|
-
abort
|
|
8149
|
+
abort,
|
|
8150
|
+
tracingContext
|
|
8097
8151
|
} = args;
|
|
8098
8152
|
if (messages.length === 0) {
|
|
8099
8153
|
return messages;
|
|
@@ -8105,7 +8159,7 @@ var PIIDetector = class _PIIDetector {
|
|
|
8105
8159
|
processedMessages.push(message);
|
|
8106
8160
|
continue;
|
|
8107
8161
|
}
|
|
8108
|
-
const detectionResult = await this.detectPII(textContent);
|
|
8162
|
+
const detectionResult = await this.detectPII(textContent, tracingContext);
|
|
8109
8163
|
if (this.isPIIFlagged(detectionResult)) {
|
|
8110
8164
|
const processedMessage = this.handleDetectedPII(message, detectionResult, this.strategy, abort);
|
|
8111
8165
|
if (this.strategy === "filter") {
|
|
@@ -8132,7 +8186,7 @@ var PIIDetector = class _PIIDetector {
|
|
|
8132
8186
|
/**
|
|
8133
8187
|
* Detect PII using the internal agent
|
|
8134
8188
|
*/
|
|
8135
|
-
async detectPII(content) {
|
|
8189
|
+
async detectPII(content, tracingContext) {
|
|
8136
8190
|
const prompt = this.createDetectionPrompt(content);
|
|
8137
8191
|
const schema = z__default.default.object({
|
|
8138
8192
|
categories: z__default.default.object(this.detectionTypes.reduce((props, type) => {
|
|
@@ -8157,12 +8211,14 @@ var PIIDetector = class _PIIDetector {
|
|
|
8157
8211
|
output: schema,
|
|
8158
8212
|
modelSettings: {
|
|
8159
8213
|
temperature: 0
|
|
8160
|
-
}
|
|
8214
|
+
},
|
|
8215
|
+
tracingContext
|
|
8161
8216
|
});
|
|
8162
8217
|
} else {
|
|
8163
8218
|
response = await this.detectionAgent.generate(prompt, {
|
|
8164
8219
|
output: schema,
|
|
8165
|
-
temperature: 0
|
|
8220
|
+
temperature: 0,
|
|
8221
|
+
tracingContext
|
|
8166
8222
|
});
|
|
8167
8223
|
}
|
|
8168
8224
|
const result = response.object;
|
|
@@ -8356,7 +8412,8 @@ IMPORTANT: IF NO PII IS DETECTED, RETURN AN EMPTY OBJECT, DO NOT INCLUDE ANYTHIN
|
|
|
8356
8412
|
async processOutputStream(args) {
|
|
8357
8413
|
const {
|
|
8358
8414
|
part,
|
|
8359
|
-
abort
|
|
8415
|
+
abort,
|
|
8416
|
+
tracingContext
|
|
8360
8417
|
} = args;
|
|
8361
8418
|
try {
|
|
8362
8419
|
if (part.type !== "text-delta") {
|
|
@@ -8366,7 +8423,7 @@ IMPORTANT: IF NO PII IS DETECTED, RETURN AN EMPTY OBJECT, DO NOT INCLUDE ANYTHIN
|
|
|
8366
8423
|
if (!textContent.trim()) {
|
|
8367
8424
|
return part;
|
|
8368
8425
|
}
|
|
8369
|
-
const detectionResult = await this.detectPII(textContent);
|
|
8426
|
+
const detectionResult = await this.detectPII(textContent, tracingContext);
|
|
8370
8427
|
if (this.isPIIFlagged(detectionResult)) {
|
|
8371
8428
|
switch (this.strategy) {
|
|
8372
8429
|
case "block":
|
|
@@ -9205,13 +9262,13 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9205
9262
|
this.logger.error(mastraError.toString());
|
|
9206
9263
|
throw mastraError;
|
|
9207
9264
|
}
|
|
9208
|
-
return
|
|
9265
|
+
return chunkLVGGMWSE_cjs.ensureToolProperties(this.#tools);
|
|
9209
9266
|
}
|
|
9210
9267
|
getTools({
|
|
9211
9268
|
runtimeContext = new chunkGPWMM745_cjs.RuntimeContext()
|
|
9212
9269
|
} = {}) {
|
|
9213
9270
|
if (typeof this.#tools !== "function") {
|
|
9214
|
-
return
|
|
9271
|
+
return chunkLVGGMWSE_cjs.ensureToolProperties(this.#tools);
|
|
9215
9272
|
}
|
|
9216
9273
|
const result = this.#tools({
|
|
9217
9274
|
runtimeContext,
|
|
@@ -9232,7 +9289,7 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9232
9289
|
this.logger.error(mastraError.toString());
|
|
9233
9290
|
throw mastraError;
|
|
9234
9291
|
}
|
|
9235
|
-
return
|
|
9292
|
+
return chunkLVGGMWSE_cjs.ensureToolProperties(tools);
|
|
9236
9293
|
});
|
|
9237
9294
|
}
|
|
9238
9295
|
get llm() {
|
|
@@ -9276,7 +9333,7 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9276
9333
|
mastra: this.#mastra
|
|
9277
9334
|
});
|
|
9278
9335
|
} else {
|
|
9279
|
-
llm = new
|
|
9336
|
+
llm = new chunkF2CAC2R2_cjs.MastraLLMV1({
|
|
9280
9337
|
model: resolvedModel,
|
|
9281
9338
|
mastra: this.#mastra
|
|
9282
9339
|
});
|
|
@@ -9384,6 +9441,7 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9384
9441
|
async generateTitleFromUserMessage({
|
|
9385
9442
|
message,
|
|
9386
9443
|
runtimeContext = new chunkGPWMM745_cjs.RuntimeContext(),
|
|
9444
|
+
tracingContext,
|
|
9387
9445
|
model,
|
|
9388
9446
|
instructions
|
|
9389
9447
|
}) {
|
|
@@ -9391,7 +9449,7 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9391
9449
|
runtimeContext,
|
|
9392
9450
|
model
|
|
9393
9451
|
});
|
|
9394
|
-
const normMessage = new
|
|
9452
|
+
const normMessage = new chunk5CJDO3UO_cjs.MessageList().add(message, "user").get.all.ui().at(-1);
|
|
9395
9453
|
if (!normMessage) {
|
|
9396
9454
|
throw new Error(`Could not generate title from input ${JSON.stringify(message)}`);
|
|
9397
9455
|
}
|
|
@@ -9416,6 +9474,7 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9416
9474
|
if (llm.getModel().specificationVersion === "v2") {
|
|
9417
9475
|
const result = llm.stream({
|
|
9418
9476
|
runtimeContext,
|
|
9477
|
+
tracingContext,
|
|
9419
9478
|
messages: [{
|
|
9420
9479
|
role: "system",
|
|
9421
9480
|
content: systemInstructions
|
|
@@ -9428,6 +9487,7 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9428
9487
|
} else {
|
|
9429
9488
|
const result = await llm.__text({
|
|
9430
9489
|
runtimeContext,
|
|
9490
|
+
tracingContext,
|
|
9431
9491
|
messages: [{
|
|
9432
9492
|
role: "system",
|
|
9433
9493
|
content: systemInstructions
|
|
@@ -9445,14 +9505,15 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9445
9505
|
const userMessages = messages.filter(message => message.role === "user");
|
|
9446
9506
|
return userMessages.at(-1);
|
|
9447
9507
|
}
|
|
9448
|
-
async genTitle(userMessage, runtimeContext, model, instructions) {
|
|
9508
|
+
async genTitle(userMessage, runtimeContext, tracingContext, model, instructions) {
|
|
9449
9509
|
try {
|
|
9450
9510
|
if (userMessage) {
|
|
9451
|
-
const normMessage = new
|
|
9511
|
+
const normMessage = new chunk5CJDO3UO_cjs.MessageList().add(userMessage, "user").get.all.ui().at(-1);
|
|
9452
9512
|
if (normMessage) {
|
|
9453
9513
|
return await this.generateTitleFromUserMessage({
|
|
9454
9514
|
message: normMessage,
|
|
9455
9515
|
runtimeContext,
|
|
9516
|
+
tracingContext,
|
|
9456
9517
|
model,
|
|
9457
9518
|
instructions
|
|
9458
9519
|
});
|
|
@@ -9473,7 +9534,7 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9473
9534
|
runId,
|
|
9474
9535
|
userMessages,
|
|
9475
9536
|
systemMessage,
|
|
9476
|
-
messageList = new
|
|
9537
|
+
messageList = new chunk5CJDO3UO_cjs.MessageList({
|
|
9477
9538
|
threadId,
|
|
9478
9539
|
resourceId
|
|
9479
9540
|
}),
|
|
@@ -9528,7 +9589,7 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9528
9589
|
systemMessage: systemMessages,
|
|
9529
9590
|
memorySystemMessage: memorySystemMessage || void 0
|
|
9530
9591
|
});
|
|
9531
|
-
const returnList = new
|
|
9592
|
+
const returnList = new chunk5CJDO3UO_cjs.MessageList().addSystem(systemMessages).add(processedMemoryMessages, "memory").add(newMessages, "user");
|
|
9532
9593
|
return {
|
|
9533
9594
|
threadId: thread.id,
|
|
9534
9595
|
messages: returnList.get.all.prompt()
|
|
@@ -9544,8 +9605,8 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9544
9605
|
resourceId,
|
|
9545
9606
|
threadId,
|
|
9546
9607
|
runtimeContext,
|
|
9547
|
-
|
|
9548
|
-
|
|
9608
|
+
tracingContext,
|
|
9609
|
+
mastraProxy
|
|
9549
9610
|
}) {
|
|
9550
9611
|
let convertedMemoryTools = {};
|
|
9551
9612
|
const memory = await this.getMemory({
|
|
@@ -9568,12 +9629,12 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9568
9629
|
memory,
|
|
9569
9630
|
agentName: this.name,
|
|
9570
9631
|
runtimeContext,
|
|
9632
|
+
tracingContext,
|
|
9571
9633
|
model: typeof this.model === "function" ? await this.getModel({
|
|
9572
9634
|
runtimeContext
|
|
9573
|
-
}) : this.model
|
|
9574
|
-
agentAISpan
|
|
9635
|
+
}) : this.model
|
|
9575
9636
|
};
|
|
9576
|
-
const convertedToCoreTool =
|
|
9637
|
+
const convertedToCoreTool = chunkLVGGMWSE_cjs.makeCoreTool(toolObj, options);
|
|
9577
9638
|
convertedMemoryTools[toolName] = convertedToCoreTool;
|
|
9578
9639
|
}
|
|
9579
9640
|
}
|
|
@@ -9581,6 +9642,7 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9581
9642
|
}
|
|
9582
9643
|
async __runInputProcessors({
|
|
9583
9644
|
runtimeContext,
|
|
9645
|
+
tracingContext,
|
|
9584
9646
|
messageList,
|
|
9585
9647
|
inputProcessorOverrides
|
|
9586
9648
|
}) {
|
|
@@ -9591,13 +9653,13 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9591
9653
|
runtimeContext,
|
|
9592
9654
|
inputProcessorOverrides
|
|
9593
9655
|
});
|
|
9594
|
-
const tracedRunInputProcessors = messageList2 => {
|
|
9656
|
+
const tracedRunInputProcessors = (messageList2, tracingContext2) => {
|
|
9595
9657
|
const telemetry = this.#mastra?.getTelemetry();
|
|
9596
9658
|
if (!telemetry) {
|
|
9597
|
-
return runner.runInputProcessors(messageList2, void 0);
|
|
9659
|
+
return runner.runInputProcessors(messageList2, tracingContext2, void 0);
|
|
9598
9660
|
}
|
|
9599
9661
|
return telemetry.traceMethod(async data => {
|
|
9600
|
-
return runner.runInputProcessors(data.messageList, telemetry);
|
|
9662
|
+
return runner.runInputProcessors(data.messageList, tracingContext2, telemetry);
|
|
9601
9663
|
}, {
|
|
9602
9664
|
spanName: `agent.${this.name}.inputProcessors`,
|
|
9603
9665
|
attributes: {
|
|
@@ -9610,7 +9672,7 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9610
9672
|
});
|
|
9611
9673
|
};
|
|
9612
9674
|
try {
|
|
9613
|
-
messageList = await tracedRunInputProcessors(messageList);
|
|
9675
|
+
messageList = await tracedRunInputProcessors(messageList, tracingContext);
|
|
9614
9676
|
} catch (error) {
|
|
9615
9677
|
if (error instanceof TripWire) {
|
|
9616
9678
|
tripwireTriggered = true;
|
|
@@ -9633,6 +9695,7 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9633
9695
|
}
|
|
9634
9696
|
async __runOutputProcessors({
|
|
9635
9697
|
runtimeContext,
|
|
9698
|
+
tracingContext,
|
|
9636
9699
|
messageList,
|
|
9637
9700
|
outputProcessorOverrides
|
|
9638
9701
|
}) {
|
|
@@ -9643,13 +9706,13 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9643
9706
|
runtimeContext,
|
|
9644
9707
|
outputProcessorOverrides
|
|
9645
9708
|
});
|
|
9646
|
-
const tracedRunOutputProcessors = messageList2 => {
|
|
9709
|
+
const tracedRunOutputProcessors = (messageList2, tracingContext2) => {
|
|
9647
9710
|
const telemetry = this.#mastra?.getTelemetry();
|
|
9648
9711
|
if (!telemetry) {
|
|
9649
|
-
return runner.runOutputProcessors(messageList2, void 0);
|
|
9712
|
+
return runner.runOutputProcessors(messageList2, tracingContext2, void 0);
|
|
9650
9713
|
}
|
|
9651
9714
|
return telemetry.traceMethod(async data => {
|
|
9652
|
-
return runner.runOutputProcessors(data.messageList, telemetry);
|
|
9715
|
+
return runner.runOutputProcessors(data.messageList, tracingContext2, telemetry);
|
|
9653
9716
|
}, {
|
|
9654
9717
|
spanName: `agent.${this.name}.outputProcessors`,
|
|
9655
9718
|
attributes: {
|
|
@@ -9662,7 +9725,7 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9662
9725
|
});
|
|
9663
9726
|
};
|
|
9664
9727
|
try {
|
|
9665
|
-
messageList = await tracedRunOutputProcessors(messageList);
|
|
9728
|
+
messageList = await tracedRunOutputProcessors(messageList, tracingContext);
|
|
9666
9729
|
} catch (e) {
|
|
9667
9730
|
if (e instanceof TripWire) {
|
|
9668
9731
|
tripwireTriggered = true;
|
|
@@ -9701,13 +9764,13 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9701
9764
|
}).then(r => r.messagesV2);
|
|
9702
9765
|
}
|
|
9703
9766
|
async getAssignedTools({
|
|
9704
|
-
runtimeContext,
|
|
9705
9767
|
runId,
|
|
9706
9768
|
resourceId,
|
|
9707
9769
|
threadId,
|
|
9770
|
+
runtimeContext,
|
|
9771
|
+
tracingContext,
|
|
9708
9772
|
mastraProxy,
|
|
9709
|
-
writableStream
|
|
9710
|
-
agentAISpan
|
|
9773
|
+
writableStream
|
|
9711
9774
|
}) {
|
|
9712
9775
|
let toolsForRequest = {};
|
|
9713
9776
|
this.logger.debug(`[Agents:${this.name}] - Assembling assigned tools`, {
|
|
@@ -9736,13 +9799,13 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9736
9799
|
memory,
|
|
9737
9800
|
agentName: this.name,
|
|
9738
9801
|
runtimeContext,
|
|
9802
|
+
tracingContext,
|
|
9739
9803
|
model: typeof this.model === "function" ? await this.getModel({
|
|
9740
9804
|
runtimeContext
|
|
9741
9805
|
}) : this.model,
|
|
9742
|
-
writableStream
|
|
9743
|
-
agentAISpan
|
|
9806
|
+
writableStream
|
|
9744
9807
|
};
|
|
9745
|
-
return [k,
|
|
9808
|
+
return [k, chunkLVGGMWSE_cjs.makeCoreTool(tool, options)];
|
|
9746
9809
|
}));
|
|
9747
9810
|
const assignedToolEntriesConverted = Object.fromEntries(assignedCoreToolEntries.filter(entry => Boolean(entry)));
|
|
9748
9811
|
toolsForRequest = {
|
|
@@ -9756,8 +9819,8 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9756
9819
|
resourceId,
|
|
9757
9820
|
toolsets,
|
|
9758
9821
|
runtimeContext,
|
|
9759
|
-
|
|
9760
|
-
|
|
9822
|
+
tracingContext,
|
|
9823
|
+
mastraProxy
|
|
9761
9824
|
}) {
|
|
9762
9825
|
let toolsForRequest = {};
|
|
9763
9826
|
const memory = await this.getMemory({
|
|
@@ -9781,12 +9844,12 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9781
9844
|
memory,
|
|
9782
9845
|
agentName: this.name,
|
|
9783
9846
|
runtimeContext,
|
|
9847
|
+
tracingContext,
|
|
9784
9848
|
model: typeof this.model === "function" ? await this.getModel({
|
|
9785
9849
|
runtimeContext
|
|
9786
|
-
}) : this.model
|
|
9787
|
-
agentAISpan
|
|
9850
|
+
}) : this.model
|
|
9788
9851
|
};
|
|
9789
|
-
const convertedToCoreTool =
|
|
9852
|
+
const convertedToCoreTool = chunkLVGGMWSE_cjs.makeCoreTool(toolObj, options, "toolset");
|
|
9790
9853
|
toolsForRequest[toolName] = convertedToCoreTool;
|
|
9791
9854
|
}
|
|
9792
9855
|
}
|
|
@@ -9798,9 +9861,9 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9798
9861
|
threadId,
|
|
9799
9862
|
resourceId,
|
|
9800
9863
|
runtimeContext,
|
|
9864
|
+
tracingContext,
|
|
9801
9865
|
mastraProxy,
|
|
9802
|
-
clientTools
|
|
9803
|
-
agentAISpan
|
|
9866
|
+
clientTools
|
|
9804
9867
|
}) {
|
|
9805
9868
|
let toolsForRequest = {};
|
|
9806
9869
|
const memory = await this.getMemory({
|
|
@@ -9826,12 +9889,12 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9826
9889
|
memory,
|
|
9827
9890
|
agentName: this.name,
|
|
9828
9891
|
runtimeContext,
|
|
9892
|
+
tracingContext,
|
|
9829
9893
|
model: typeof this.model === "function" ? await this.getModel({
|
|
9830
9894
|
runtimeContext
|
|
9831
|
-
}) : this.model
|
|
9832
|
-
agentAISpan
|
|
9895
|
+
}) : this.model
|
|
9833
9896
|
};
|
|
9834
|
-
const convertedToCoreTool =
|
|
9897
|
+
const convertedToCoreTool = chunkLVGGMWSE_cjs.makeCoreTool(rest, options, "client-tool");
|
|
9835
9898
|
toolsForRequest[toolName] = convertedToCoreTool;
|
|
9836
9899
|
}
|
|
9837
9900
|
}
|
|
@@ -9842,7 +9905,7 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9842
9905
|
threadId,
|
|
9843
9906
|
resourceId,
|
|
9844
9907
|
runtimeContext,
|
|
9845
|
-
|
|
9908
|
+
tracingContext
|
|
9846
9909
|
}) {
|
|
9847
9910
|
let convertedWorkflowTools = {};
|
|
9848
9911
|
const workflows = await this.getWorkflows({
|
|
@@ -9859,7 +9922,7 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9859
9922
|
// manually wrap workflow tools with ai tracing, so that we can pass the
|
|
9860
9923
|
// current tool span onto the workflow to maintain continuity of the trace
|
|
9861
9924
|
execute: async args => {
|
|
9862
|
-
const toolAISpan =
|
|
9925
|
+
const toolAISpan = tracingContext.currentSpan?.createChildSpan({
|
|
9863
9926
|
type: "tool_call" /* TOOL_CALL */,
|
|
9864
9927
|
name: `tool: '${workflowName}'`,
|
|
9865
9928
|
input: args,
|
|
@@ -9881,7 +9944,9 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9881
9944
|
const result = await run.start({
|
|
9882
9945
|
inputData: args,
|
|
9883
9946
|
runtimeContext,
|
|
9884
|
-
|
|
9947
|
+
tracingContext: {
|
|
9948
|
+
currentSpan: toolAISpan
|
|
9949
|
+
}
|
|
9885
9950
|
});
|
|
9886
9951
|
toolAISpan?.end({
|
|
9887
9952
|
output: result
|
|
@@ -9921,13 +9986,13 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9921
9986
|
resourceId,
|
|
9922
9987
|
runId,
|
|
9923
9988
|
runtimeContext,
|
|
9924
|
-
|
|
9925
|
-
|
|
9989
|
+
tracingContext,
|
|
9990
|
+
writableStream
|
|
9926
9991
|
}) {
|
|
9927
9992
|
let mastraProxy = void 0;
|
|
9928
9993
|
const logger = this.logger;
|
|
9929
9994
|
if (this.#mastra) {
|
|
9930
|
-
mastraProxy =
|
|
9995
|
+
mastraProxy = chunkLVGGMWSE_cjs.createMastraProxy({
|
|
9931
9996
|
mastra: this.#mastra,
|
|
9932
9997
|
logger
|
|
9933
9998
|
});
|
|
@@ -9937,42 +10002,42 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
9937
10002
|
resourceId,
|
|
9938
10003
|
threadId,
|
|
9939
10004
|
runtimeContext,
|
|
10005
|
+
tracingContext,
|
|
9940
10006
|
mastraProxy,
|
|
9941
|
-
writableStream
|
|
9942
|
-
agentAISpan
|
|
10007
|
+
writableStream
|
|
9943
10008
|
});
|
|
9944
10009
|
const memoryTools = await this.getMemoryTools({
|
|
9945
10010
|
runId,
|
|
9946
10011
|
resourceId,
|
|
9947
10012
|
threadId,
|
|
9948
10013
|
runtimeContext,
|
|
9949
|
-
|
|
9950
|
-
|
|
10014
|
+
tracingContext,
|
|
10015
|
+
mastraProxy
|
|
9951
10016
|
});
|
|
9952
10017
|
const toolsetTools = await this.getToolsets({
|
|
9953
10018
|
runId,
|
|
9954
10019
|
resourceId,
|
|
9955
10020
|
threadId,
|
|
9956
10021
|
runtimeContext,
|
|
10022
|
+
tracingContext,
|
|
9957
10023
|
mastraProxy,
|
|
9958
|
-
toolsets
|
|
9959
|
-
agentAISpan
|
|
10024
|
+
toolsets
|
|
9960
10025
|
});
|
|
9961
10026
|
const clientSideTools = await this.getClientTools({
|
|
9962
10027
|
runId,
|
|
9963
10028
|
resourceId,
|
|
9964
10029
|
threadId,
|
|
9965
10030
|
runtimeContext,
|
|
10031
|
+
tracingContext,
|
|
9966
10032
|
mastraProxy,
|
|
9967
|
-
clientTools
|
|
9968
|
-
agentAISpan
|
|
10033
|
+
clientTools
|
|
9969
10034
|
});
|
|
9970
10035
|
const workflowTools = await this.getWorkflowTools({
|
|
9971
10036
|
runId,
|
|
9972
10037
|
resourceId,
|
|
9973
10038
|
threadId,
|
|
9974
10039
|
runtimeContext,
|
|
9975
|
-
|
|
10040
|
+
tracingContext
|
|
9976
10041
|
});
|
|
9977
10042
|
return this.formatTools({
|
|
9978
10043
|
...assignedTools,
|
|
@@ -10057,7 +10122,7 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
10057
10122
|
runtimeContext,
|
|
10058
10123
|
saveQueueManager,
|
|
10059
10124
|
writableStream,
|
|
10060
|
-
|
|
10125
|
+
tracingContext
|
|
10061
10126
|
}) {
|
|
10062
10127
|
return {
|
|
10063
10128
|
before: async () => {
|
|
@@ -10066,8 +10131,10 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
10066
10131
|
runId
|
|
10067
10132
|
});
|
|
10068
10133
|
}
|
|
10069
|
-
const
|
|
10134
|
+
const agentAISpan = chunkI7OAONIW_cjs.getOrCreateSpan({
|
|
10135
|
+
type: "agent_run" /* AGENT_RUN */,
|
|
10070
10136
|
name: `agent run: '${this.id}'`,
|
|
10137
|
+
input: messages,
|
|
10071
10138
|
attributes: {
|
|
10072
10139
|
agentId: this.id,
|
|
10073
10140
|
instructions,
|
|
@@ -10077,28 +10144,13 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
10077
10144
|
runId,
|
|
10078
10145
|
resourceId,
|
|
10079
10146
|
threadId: thread ? thread.id : void 0
|
|
10080
|
-
}
|
|
10147
|
+
},
|
|
10148
|
+
tracingContext,
|
|
10149
|
+
runtimeContext
|
|
10150
|
+
});
|
|
10151
|
+
const innerTracingContext = {
|
|
10152
|
+
currentSpan: agentAISpan
|
|
10081
10153
|
};
|
|
10082
|
-
let agentAISpan;
|
|
10083
|
-
if (currentSpan) {
|
|
10084
|
-
agentAISpan = currentSpan.createChildSpan({
|
|
10085
|
-
type: "agent_run" /* AGENT_RUN */,
|
|
10086
|
-
...spanArgs
|
|
10087
|
-
});
|
|
10088
|
-
} else {
|
|
10089
|
-
const aiTracing = chunk4DKPMUAC_cjs.getSelectedAITracing({
|
|
10090
|
-
runtimeContext
|
|
10091
|
-
});
|
|
10092
|
-
if (aiTracing) {
|
|
10093
|
-
agentAISpan = aiTracing.startSpan({
|
|
10094
|
-
type: "agent_run" /* AGENT_RUN */,
|
|
10095
|
-
...spanArgs,
|
|
10096
|
-
startOptions: {
|
|
10097
|
-
runtimeContext
|
|
10098
|
-
}
|
|
10099
|
-
});
|
|
10100
|
-
}
|
|
10101
|
-
}
|
|
10102
10154
|
const memory = await this.getMemory({
|
|
10103
10155
|
runtimeContext
|
|
10104
10156
|
});
|
|
@@ -10122,10 +10174,10 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
10122
10174
|
resourceId,
|
|
10123
10175
|
runId,
|
|
10124
10176
|
runtimeContext,
|
|
10125
|
-
|
|
10126
|
-
|
|
10177
|
+
tracingContext: innerTracingContext,
|
|
10178
|
+
writableStream
|
|
10127
10179
|
});
|
|
10128
|
-
const messageList = new
|
|
10180
|
+
const messageList = new chunk5CJDO3UO_cjs.MessageList({
|
|
10129
10181
|
threadId,
|
|
10130
10182
|
resourceId,
|
|
10131
10183
|
generateMessageId: this.#mastra?.generateId?.bind(this.#mastra),
|
|
@@ -10142,6 +10194,7 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
10142
10194
|
tripwireReason: tripwireReason2
|
|
10143
10195
|
} = await this.__runInputProcessors({
|
|
10144
10196
|
runtimeContext,
|
|
10197
|
+
tracingContext: innerTracingContext,
|
|
10145
10198
|
messageList
|
|
10146
10199
|
});
|
|
10147
10200
|
return {
|
|
@@ -10212,7 +10265,7 @@ exports.Agent = class Agent extends (_a = chunkKXCUCBEI_cjs.MastraBase) {
|
|
|
10212
10265
|
let [memoryMessages, memorySystemMessage] = await Promise.all([existingThread ? this.getMemoryMessages({
|
|
10213
10266
|
resourceId,
|
|
10214
10267
|
threadId: threadObject.id,
|
|
10215
|
-
vectorMessageSearch: new
|
|
10268
|
+
vectorMessageSearch: new chunk5CJDO3UO_cjs.MessageList().add(messages, `user`).getLatestUserContent() || "",
|
|
10216
10269
|
memoryConfig,
|
|
10217
10270
|
runtimeContext
|
|
10218
10271
|
}) : [], memory.getSystemMessage({
|
|
@@ -10235,7 +10288,7 @@ The following messages were remembered from a different conversation:
|
|
|
10235
10288
|
<remembered_from_other_conversation>
|
|
10236
10289
|
${(() => {
|
|
10237
10290
|
let result = ``;
|
|
10238
|
-
const messages2 = new
|
|
10291
|
+
const messages2 = new chunk5CJDO3UO_cjs.MessageList().add(resultsFromOtherThreads, "memory").get.all.v1();
|
|
10239
10292
|
let lastYmd = null;
|
|
10240
10293
|
for (const msg of messages2) {
|
|
10241
10294
|
const date = msg.createdAt;
|
|
@@ -10274,6 +10327,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10274
10327
|
tripwireReason
|
|
10275
10328
|
} = await this.__runInputProcessors({
|
|
10276
10329
|
runtimeContext,
|
|
10330
|
+
tracingContext: innerTracingContext,
|
|
10277
10331
|
messageList
|
|
10278
10332
|
});
|
|
10279
10333
|
const systemMessage = [...messageList.getSystemMessages(), ...messageList.getSystemMessages("memory")]?.map(m => m.content)?.join(`
|
|
@@ -10287,7 +10341,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10287
10341
|
systemMessage,
|
|
10288
10342
|
memorySystemMessage: memorySystemMessage || void 0
|
|
10289
10343
|
});
|
|
10290
|
-
const processedList = new
|
|
10344
|
+
const processedList = new chunk5CJDO3UO_cjs.MessageList({
|
|
10291
10345
|
threadId: threadObject.id,
|
|
10292
10346
|
resourceId,
|
|
10293
10347
|
generateMessageId: this.#mastra?.generateId?.bind(this.#mastra),
|
|
@@ -10338,23 +10392,12 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10338
10392
|
};
|
|
10339
10393
|
})
|
|
10340
10394
|
};
|
|
10341
|
-
agentAISpan?.end({
|
|
10342
|
-
output: {
|
|
10343
|
-
text: result?.text,
|
|
10344
|
-
object: result?.object
|
|
10345
|
-
},
|
|
10346
|
-
metadata: {
|
|
10347
|
-
usage: result?.usage,
|
|
10348
|
-
toolResults: result?.toolResults,
|
|
10349
|
-
toolCalls: result?.toolCalls
|
|
10350
|
-
}
|
|
10351
|
-
});
|
|
10352
10395
|
this.logger.debug(`[Agent:${this.name}] - Post processing LLM response`, {
|
|
10353
10396
|
runId: runId2,
|
|
10354
10397
|
result: resToLog,
|
|
10355
10398
|
threadId
|
|
10356
10399
|
});
|
|
10357
|
-
const messageListResponses = new
|
|
10400
|
+
const messageListResponses = new chunk5CJDO3UO_cjs.MessageList({
|
|
10358
10401
|
threadId,
|
|
10359
10402
|
resourceId,
|
|
10360
10403
|
generateMessageId: this.#mastra?.generateId?.bind(this.#mastra),
|
|
@@ -10410,7 +10453,9 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10410
10453
|
instructions: titleInstructions
|
|
10411
10454
|
} = this.resolveTitleGenerationConfig(config?.threads?.generateTitle);
|
|
10412
10455
|
if (shouldGenerate && userMessage) {
|
|
10413
|
-
promises.push(this.genTitle(userMessage, runtimeContext,
|
|
10456
|
+
promises.push(this.genTitle(userMessage, runtimeContext, {
|
|
10457
|
+
currentSpan: agentAISpan
|
|
10458
|
+
}, titleModel, titleInstructions).then(title => {
|
|
10414
10459
|
if (title) {
|
|
10415
10460
|
return memory.createThread({
|
|
10416
10461
|
threadId: thread2.id,
|
|
@@ -10427,6 +10472,9 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10427
10472
|
} catch (e) {
|
|
10428
10473
|
await saveQueueManager.flushMessages(messageList, threadId, memoryConfig2);
|
|
10429
10474
|
if (e instanceof chunkC73WLCY3_cjs.MastraError) {
|
|
10475
|
+
agentAISpan?.error({
|
|
10476
|
+
error: e
|
|
10477
|
+
});
|
|
10430
10478
|
throw e;
|
|
10431
10479
|
}
|
|
10432
10480
|
const mastraError = new chunkC73WLCY3_cjs.MastraError({
|
|
@@ -10442,6 +10490,9 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10442
10490
|
}, e);
|
|
10443
10491
|
this.logger.trackException(mastraError);
|
|
10444
10492
|
this.logger.error(mastraError.toString());
|
|
10493
|
+
agentAISpan?.error({
|
|
10494
|
+
error: mastraError
|
|
10495
|
+
});
|
|
10445
10496
|
throw mastraError;
|
|
10446
10497
|
}
|
|
10447
10498
|
} else {
|
|
@@ -10466,6 +10517,9 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10466
10517
|
outputText,
|
|
10467
10518
|
instructions,
|
|
10468
10519
|
runtimeContext,
|
|
10520
|
+
tracingContext: {
|
|
10521
|
+
currentSpan: agentAISpan
|
|
10522
|
+
},
|
|
10469
10523
|
structuredOutput,
|
|
10470
10524
|
overrideScorers,
|
|
10471
10525
|
threadId,
|
|
@@ -10480,6 +10534,17 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10480
10534
|
},
|
|
10481
10535
|
output: messageList.getPersisted.response.ui()
|
|
10482
10536
|
};
|
|
10537
|
+
agentAISpan?.end({
|
|
10538
|
+
output: {
|
|
10539
|
+
text: result?.text,
|
|
10540
|
+
object: result?.object
|
|
10541
|
+
},
|
|
10542
|
+
metadata: {
|
|
10543
|
+
usage: result?.usage,
|
|
10544
|
+
toolResults: result?.toolResults,
|
|
10545
|
+
toolCalls: result?.toolCalls
|
|
10546
|
+
}
|
|
10547
|
+
});
|
|
10483
10548
|
return {
|
|
10484
10549
|
scoringData
|
|
10485
10550
|
};
|
|
@@ -10492,6 +10557,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10492
10557
|
outputText,
|
|
10493
10558
|
instructions,
|
|
10494
10559
|
runtimeContext,
|
|
10560
|
+
tracingContext,
|
|
10495
10561
|
structuredOutput,
|
|
10496
10562
|
overrideScorers,
|
|
10497
10563
|
threadId,
|
|
@@ -10513,9 +10579,15 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10513
10579
|
});
|
|
10514
10580
|
}
|
|
10515
10581
|
}
|
|
10516
|
-
|
|
10517
|
-
|
|
10518
|
-
|
|
10582
|
+
let scorers = {};
|
|
10583
|
+
try {
|
|
10584
|
+
scorers = overrideScorers ? this.resolveOverrideScorerReferences(overrideScorers) : await this.getScorers({
|
|
10585
|
+
runtimeContext
|
|
10586
|
+
});
|
|
10587
|
+
} catch (e) {
|
|
10588
|
+
this.logger.warn(`[Agent:${this.name}] - Failed to get scorers: ${e}`);
|
|
10589
|
+
return;
|
|
10590
|
+
}
|
|
10519
10591
|
const scorerInput = {
|
|
10520
10592
|
inputMessages: messageList.getPersisted.input.ui(),
|
|
10521
10593
|
rememberedMessages: messageList.getPersisted.remembered.ui(),
|
|
@@ -10526,12 +10598,13 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10526
10598
|
if (Object.keys(scorers || {}).length > 0) {
|
|
10527
10599
|
for (const [id, scorerObject] of Object.entries(scorers)) {
|
|
10528
10600
|
runScorer({
|
|
10529
|
-
scorerId: id,
|
|
10601
|
+
scorerId: overrideScorers ? scorerObject.scorer.name : id,
|
|
10530
10602
|
scorerObject,
|
|
10531
10603
|
runId,
|
|
10532
10604
|
input: scorerInput,
|
|
10533
10605
|
output: scorerOutput,
|
|
10534
10606
|
runtimeContext,
|
|
10607
|
+
tracingContext,
|
|
10535
10608
|
entity: {
|
|
10536
10609
|
id: this.id,
|
|
10537
10610
|
name: this.name
|
|
@@ -10545,6 +10618,41 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10545
10618
|
}
|
|
10546
10619
|
}
|
|
10547
10620
|
}
|
|
10621
|
+
resolveOverrideScorerReferences(overrideScorers) {
|
|
10622
|
+
const result = {};
|
|
10623
|
+
for (const [id, scorerObject] of Object.entries(overrideScorers)) {
|
|
10624
|
+
if (typeof scorerObject.scorer === "string") {
|
|
10625
|
+
try {
|
|
10626
|
+
if (!this.#mastra) {
|
|
10627
|
+
throw new chunkC73WLCY3_cjs.MastraError({
|
|
10628
|
+
id: "AGENT_GENEREATE_SCORER_NOT_FOUND",
|
|
10629
|
+
domain: "AGENT" /* AGENT */,
|
|
10630
|
+
category: "USER" /* USER */,
|
|
10631
|
+
text: `Mastra not found when fetching scorer. Make sure to fetch agent from mastra.getAgent()`
|
|
10632
|
+
});
|
|
10633
|
+
}
|
|
10634
|
+
const scorer = this.#mastra.getScorerByName(scorerObject.scorer);
|
|
10635
|
+
result[id] = {
|
|
10636
|
+
scorer,
|
|
10637
|
+
sampling: scorerObject.sampling
|
|
10638
|
+
};
|
|
10639
|
+
} catch (error) {
|
|
10640
|
+
this.logger.warn(`[Agent:${this.name}] - Failed to get scorer ${scorerObject.scorer}: ${error}`);
|
|
10641
|
+
}
|
|
10642
|
+
} else {
|
|
10643
|
+
result[id] = scorerObject;
|
|
10644
|
+
}
|
|
10645
|
+
}
|
|
10646
|
+
if (Object.keys(result).length === 0) {
|
|
10647
|
+
throw new chunkC73WLCY3_cjs.MastraError({
|
|
10648
|
+
id: "AGENT_GENEREATE_SCORER_NOT_FOUND",
|
|
10649
|
+
domain: "AGENT" /* AGENT */,
|
|
10650
|
+
category: "USER" /* USER */,
|
|
10651
|
+
text: `No scorers found in overrideScorers`
|
|
10652
|
+
});
|
|
10653
|
+
}
|
|
10654
|
+
return result;
|
|
10655
|
+
}
|
|
10548
10656
|
async prepareLLMOptions(messages, options) {
|
|
10549
10657
|
const {
|
|
10550
10658
|
context,
|
|
@@ -10557,6 +10665,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10557
10665
|
temperature,
|
|
10558
10666
|
toolChoice = "auto",
|
|
10559
10667
|
runtimeContext = new chunkGPWMM745_cjs.RuntimeContext(),
|
|
10668
|
+
tracingContext,
|
|
10560
10669
|
savePerStep,
|
|
10561
10670
|
writableStream,
|
|
10562
10671
|
...args
|
|
@@ -10621,7 +10730,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10621
10730
|
runtimeContext,
|
|
10622
10731
|
saveQueueManager,
|
|
10623
10732
|
writableStream,
|
|
10624
|
-
|
|
10733
|
+
tracingContext
|
|
10625
10734
|
});
|
|
10626
10735
|
let messageList;
|
|
10627
10736
|
let thread;
|
|
@@ -10688,7 +10797,8 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10688
10797
|
result,
|
|
10689
10798
|
outputText,
|
|
10690
10799
|
structuredOutput = false,
|
|
10691
|
-
agentAISpan
|
|
10800
|
+
agentAISpan,
|
|
10801
|
+
overrideScorers
|
|
10692
10802
|
}) => {
|
|
10693
10803
|
const afterResult = await after({
|
|
10694
10804
|
result,
|
|
@@ -10700,7 +10810,8 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10700
10810
|
messageList,
|
|
10701
10811
|
structuredOutput,
|
|
10702
10812
|
threadExists,
|
|
10703
|
-
agentAISpan
|
|
10813
|
+
agentAISpan,
|
|
10814
|
+
overrideScorers
|
|
10704
10815
|
});
|
|
10705
10816
|
return afterResult;
|
|
10706
10817
|
}
|
|
@@ -10724,6 +10835,22 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10724
10835
|
const instructions = options.instructions || (await this.getInstructions({
|
|
10725
10836
|
runtimeContext
|
|
10726
10837
|
}));
|
|
10838
|
+
const agentAISpan = chunkI7OAONIW_cjs.getOrCreateSpan({
|
|
10839
|
+
type: "agent_run" /* AGENT_RUN */,
|
|
10840
|
+
name: `agent run: '${this.id}'`,
|
|
10841
|
+
input: options.messages,
|
|
10842
|
+
attributes: {
|
|
10843
|
+
agentId: this.id,
|
|
10844
|
+
instructions
|
|
10845
|
+
},
|
|
10846
|
+
metadata: {
|
|
10847
|
+
runId,
|
|
10848
|
+
resourceId,
|
|
10849
|
+
threadId: threadFromArgs ? threadFromArgs.id : void 0
|
|
10850
|
+
},
|
|
10851
|
+
tracingContext: options.tracingContext,
|
|
10852
|
+
runtimeContext
|
|
10853
|
+
});
|
|
10727
10854
|
const activeSpan = chunkZOU4K5MI_cjs.Telemetry.getActiveSpan();
|
|
10728
10855
|
const baggageEntries = {};
|
|
10729
10856
|
if (threadFromArgs?.id) {
|
|
@@ -10784,7 +10911,10 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10784
10911
|
resourceId,
|
|
10785
10912
|
runId,
|
|
10786
10913
|
runtimeContext,
|
|
10787
|
-
writableStream: options.writableStream
|
|
10914
|
+
writableStream: options.writableStream,
|
|
10915
|
+
tracingContext: {
|
|
10916
|
+
currentSpan: agentAISpan
|
|
10917
|
+
}
|
|
10788
10918
|
});
|
|
10789
10919
|
return {
|
|
10790
10920
|
convertedTools
|
|
@@ -10802,9 +10932,11 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10802
10932
|
tripwire: z.z.boolean().optional(),
|
|
10803
10933
|
tripwireReason: z.z.string().optional()
|
|
10804
10934
|
}),
|
|
10805
|
-
execute: async (
|
|
10935
|
+
execute: async ({
|
|
10936
|
+
tracingContext
|
|
10937
|
+
}) => {
|
|
10806
10938
|
const thread = threadFromArgs;
|
|
10807
|
-
const messageList = new
|
|
10939
|
+
const messageList = new chunk5CJDO3UO_cjs.MessageList({
|
|
10808
10940
|
threadId: thread?.id,
|
|
10809
10941
|
resourceId,
|
|
10810
10942
|
generateMessageId: this.#mastra?.generateId?.bind(this.#mastra),
|
|
@@ -10821,6 +10953,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10821
10953
|
tripwireReason: tripwireReason2
|
|
10822
10954
|
} = await this.__runInputProcessors({
|
|
10823
10955
|
runtimeContext,
|
|
10956
|
+
tracingContext,
|
|
10824
10957
|
messageList
|
|
10825
10958
|
});
|
|
10826
10959
|
return {
|
|
@@ -10886,7 +11019,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10886
11019
|
let [memoryMessages, memorySystemMessage] = await Promise.all([existingThread ? this.getMemoryMessages({
|
|
10887
11020
|
resourceId,
|
|
10888
11021
|
threadId: threadObject.id,
|
|
10889
|
-
vectorMessageSearch: new
|
|
11022
|
+
vectorMessageSearch: new chunk5CJDO3UO_cjs.MessageList().add(options.messages, `user`).getLatestUserContent() || "",
|
|
10890
11023
|
memoryConfig,
|
|
10891
11024
|
runtimeContext
|
|
10892
11025
|
}) : [], memory.getSystemMessage({
|
|
@@ -10908,8 +11041,8 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10908
11041
|
The following messages were remembered from a different conversation:
|
|
10909
11042
|
<remembered_from_other_conversation>
|
|
10910
11043
|
${(() => {
|
|
10911
|
-
let
|
|
10912
|
-
const messages = new
|
|
11044
|
+
let result2 = ``;
|
|
11045
|
+
const messages = new chunk5CJDO3UO_cjs.MessageList().add(resultsFromOtherThreads, "memory").get.all.v1();
|
|
10913
11046
|
let lastYmd = null;
|
|
10914
11047
|
for (const msg of messages) {
|
|
10915
11048
|
const date = msg.createdAt;
|
|
@@ -10925,15 +11058,14 @@ ${(() => {
|
|
|
10925
11058
|
const ampm = utcHour < 12 ? "AM" : "PM";
|
|
10926
11059
|
const timeofday = `${hour12}:${utcMinute < 10 ? "0" : ""}${utcMinute} ${ampm}`;
|
|
10927
11060
|
if (!lastYmd || lastYmd !== ymd) {
|
|
10928
|
-
|
|
11061
|
+
result2 += `
|
|
10929
11062
|
the following messages are from ${ymd}
|
|
10930
11063
|
`;
|
|
10931
11064
|
}
|
|
10932
|
-
|
|
10933
|
-
Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conversation" : ""} at ${timeofday}: ${JSON.stringify(msg)}`;
|
|
11065
|
+
result2 += `Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conversation" : ""} at ${timeofday}: ${JSON.stringify(msg)}`;
|
|
10934
11066
|
lastYmd = ymd;
|
|
10935
11067
|
}
|
|
10936
|
-
return
|
|
11068
|
+
return result2;
|
|
10937
11069
|
})()}
|
|
10938
11070
|
<end_remembered_from_other_conversation>`;
|
|
10939
11071
|
}
|
|
@@ -10948,6 +11080,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10948
11080
|
tripwireReason
|
|
10949
11081
|
} = await this.__runInputProcessors({
|
|
10950
11082
|
runtimeContext,
|
|
11083
|
+
tracingContext,
|
|
10951
11084
|
messageList
|
|
10952
11085
|
});
|
|
10953
11086
|
const systemMessage = [...messageList.getSystemMessages(), ...messageList.getSystemMessages("memory")]?.map(m => m.content)?.join(`
|
|
@@ -10961,7 +11094,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10961
11094
|
systemMessage,
|
|
10962
11095
|
memorySystemMessage: memorySystemMessage || void 0
|
|
10963
11096
|
});
|
|
10964
|
-
const processedList = new
|
|
11097
|
+
const processedList = new chunk5CJDO3UO_cjs.MessageList({
|
|
10965
11098
|
threadId: threadObject.id,
|
|
10966
11099
|
resourceId,
|
|
10967
11100
|
generateMessageId: this.#mastra?.generateId?.bind(this.#mastra),
|
|
@@ -10986,7 +11119,8 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10986
11119
|
inputSchema: z.z.any(),
|
|
10987
11120
|
outputSchema: z.z.any(),
|
|
10988
11121
|
execute: async ({
|
|
10989
|
-
inputData
|
|
11122
|
+
inputData,
|
|
11123
|
+
tracingContext
|
|
10990
11124
|
}) => {
|
|
10991
11125
|
this.logger.debug(`Starting agent ${this.name} llm stream call`, {
|
|
10992
11126
|
runId
|
|
@@ -10996,7 +11130,8 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10996
11130
|
}) : this.#outputProcessors : []);
|
|
10997
11131
|
const streamResult = llm.stream({
|
|
10998
11132
|
...inputData,
|
|
10999
|
-
outputProcessors
|
|
11133
|
+
outputProcessors,
|
|
11134
|
+
tracingContext
|
|
11000
11135
|
});
|
|
11001
11136
|
if (options.format === "aisdk") {
|
|
11002
11137
|
return streamResult.aisdk.v5;
|
|
@@ -11011,9 +11146,10 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11011
11146
|
steps: [prepareToolsStep, prepareMemory]
|
|
11012
11147
|
}).parallel([prepareToolsStep, prepareMemory]).map(async ({
|
|
11013
11148
|
inputData,
|
|
11014
|
-
bail
|
|
11149
|
+
bail,
|
|
11150
|
+
tracingContext
|
|
11015
11151
|
}) => {
|
|
11016
|
-
const
|
|
11152
|
+
const result2 = {
|
|
11017
11153
|
...options,
|
|
11018
11154
|
messages: inputData["prepare-memory-step"].messageObjects,
|
|
11019
11155
|
tools: inputData["prepare-tools-step"].convertedTools,
|
|
@@ -11055,7 +11191,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11055
11191
|
tripwireReason: inputData["prepare-memory-step"].tripwireReason
|
|
11056
11192
|
})
|
|
11057
11193
|
};
|
|
11058
|
-
if (
|
|
11194
|
+
if (result2.tripwire) {
|
|
11059
11195
|
const emptyResult = {
|
|
11060
11196
|
textStream: async function* () {}(),
|
|
11061
11197
|
fullStream: new globalThis.ReadableStream({
|
|
@@ -11076,7 +11212,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11076
11212
|
}),
|
|
11077
11213
|
finishReason: Promise.resolve("other"),
|
|
11078
11214
|
tripwire: true,
|
|
11079
|
-
tripwireReason:
|
|
11215
|
+
tripwireReason: result2.tripwireReason,
|
|
11080
11216
|
response: {
|
|
11081
11217
|
id: crypto2.randomUUID(),
|
|
11082
11218
|
timestamp: /* @__PURE__ */new Date(),
|
|
@@ -11099,23 +11235,27 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11099
11235
|
return bail(emptyResult);
|
|
11100
11236
|
}
|
|
11101
11237
|
let effectiveOutputProcessors = options.outputProcessors || (this.#outputProcessors ? typeof this.#outputProcessors === "function" ? await this.#outputProcessors({
|
|
11102
|
-
runtimeContext:
|
|
11238
|
+
runtimeContext: result2.runtimeContext
|
|
11103
11239
|
}) : this.#outputProcessors : []);
|
|
11104
11240
|
if (options.structuredOutput) {
|
|
11105
11241
|
const structuredProcessor = new StructuredOutputProcessor(options.structuredOutput);
|
|
11106
11242
|
effectiveOutputProcessors = effectiveOutputProcessors ? [...effectiveOutputProcessors, structuredProcessor] : [structuredProcessor];
|
|
11107
11243
|
}
|
|
11108
11244
|
const loopOptions = {
|
|
11109
|
-
messages:
|
|
11110
|
-
runtimeContext:
|
|
11245
|
+
messages: result2.messages,
|
|
11246
|
+
runtimeContext: result2.runtimeContext,
|
|
11247
|
+
tracingContext: {
|
|
11248
|
+
currentSpan: agentAISpan
|
|
11249
|
+
},
|
|
11111
11250
|
runId,
|
|
11112
|
-
toolChoice:
|
|
11113
|
-
tools:
|
|
11114
|
-
resourceId:
|
|
11115
|
-
threadId:
|
|
11116
|
-
structuredOutput:
|
|
11117
|
-
stopWhen:
|
|
11118
|
-
maxSteps:
|
|
11251
|
+
toolChoice: result2.toolChoice,
|
|
11252
|
+
tools: result2.tools,
|
|
11253
|
+
resourceId: result2.resourceId,
|
|
11254
|
+
threadId: result2.threadId,
|
|
11255
|
+
structuredOutput: result2.structuredOutput,
|
|
11256
|
+
stopWhen: result2.stopWhen,
|
|
11257
|
+
maxSteps: result2.maxSteps,
|
|
11258
|
+
providerOptions: result2.providerOptions,
|
|
11119
11259
|
options: {
|
|
11120
11260
|
onFinish: async payload => {
|
|
11121
11261
|
if (payload.finishReason === "error") {
|
|
@@ -11133,16 +11273,18 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11133
11273
|
result: payload,
|
|
11134
11274
|
outputText,
|
|
11135
11275
|
instructions,
|
|
11136
|
-
thread:
|
|
11137
|
-
threadId:
|
|
11276
|
+
thread: result2.thread,
|
|
11277
|
+
threadId: result2.threadId,
|
|
11138
11278
|
resourceId,
|
|
11139
11279
|
memoryConfig,
|
|
11140
11280
|
runtimeContext,
|
|
11281
|
+
tracingContext,
|
|
11141
11282
|
runId,
|
|
11142
11283
|
messageList,
|
|
11143
11284
|
threadExists: inputData["prepare-memory-step"].threadExists,
|
|
11144
11285
|
structuredOutput: !!options.output,
|
|
11145
|
-
saveQueueManager
|
|
11286
|
+
saveQueueManager,
|
|
11287
|
+
overrideScorers: options.scorers
|
|
11146
11288
|
});
|
|
11147
11289
|
} catch (e) {
|
|
11148
11290
|
this.logger.error("Error saving memory on finish", {
|
|
@@ -11151,11 +11293,12 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11151
11293
|
});
|
|
11152
11294
|
}
|
|
11153
11295
|
await options?.onFinish?.({
|
|
11154
|
-
...
|
|
11155
|
-
runId
|
|
11296
|
+
...result2,
|
|
11297
|
+
runId,
|
|
11298
|
+
messages: messageList.get.response.aiV5.model()
|
|
11156
11299
|
});
|
|
11157
11300
|
},
|
|
11158
|
-
onStepFinish:
|
|
11301
|
+
onStepFinish: result2.onStepFinish
|
|
11159
11302
|
},
|
|
11160
11303
|
output: options.output,
|
|
11161
11304
|
outputProcessors: effectiveOutputProcessors,
|
|
@@ -11167,7 +11310,15 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11167
11310
|
return loopOptions;
|
|
11168
11311
|
}).then(streamStep).commit();
|
|
11169
11312
|
const run = await executionWorkflow.createRunAsync();
|
|
11170
|
-
|
|
11313
|
+
const result = await run.start({
|
|
11314
|
+
tracingContext: {
|
|
11315
|
+
currentSpan: agentAISpan
|
|
11316
|
+
}
|
|
11317
|
+
});
|
|
11318
|
+
agentAISpan?.end({
|
|
11319
|
+
output: result
|
|
11320
|
+
});
|
|
11321
|
+
return result;
|
|
11171
11322
|
}
|
|
11172
11323
|
async #executeOnFinish({
|
|
11173
11324
|
result,
|
|
@@ -11178,11 +11329,13 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11178
11329
|
memoryConfig,
|
|
11179
11330
|
outputText,
|
|
11180
11331
|
runtimeContext,
|
|
11332
|
+
tracingContext,
|
|
11181
11333
|
runId,
|
|
11182
11334
|
messageList,
|
|
11183
11335
|
threadExists,
|
|
11184
11336
|
structuredOutput = false,
|
|
11185
|
-
saveQueueManager
|
|
11337
|
+
saveQueueManager,
|
|
11338
|
+
overrideScorers
|
|
11186
11339
|
}) {
|
|
11187
11340
|
const resToLog = {
|
|
11188
11341
|
text: result?.text,
|
|
@@ -11257,7 +11410,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11257
11410
|
instructions: titleInstructions
|
|
11258
11411
|
} = this.resolveTitleGenerationConfig(config?.threads?.generateTitle);
|
|
11259
11412
|
if (shouldGenerate && userMessage) {
|
|
11260
|
-
promises.push(this.genTitle(userMessage, runtimeContext, titleModel, titleInstructions).then(title => {
|
|
11413
|
+
promises.push(this.genTitle(userMessage, runtimeContext, tracingContext, titleModel, titleInstructions).then(title => {
|
|
11261
11414
|
if (title) {
|
|
11262
11415
|
return memory.createThread({
|
|
11263
11416
|
threadId: thread.id,
|
|
@@ -11313,7 +11466,9 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11313
11466
|
outputText,
|
|
11314
11467
|
instructions,
|
|
11315
11468
|
runtimeContext,
|
|
11316
|
-
|
|
11469
|
+
tracingContext,
|
|
11470
|
+
structuredOutput,
|
|
11471
|
+
overrideScorers
|
|
11317
11472
|
});
|
|
11318
11473
|
}
|
|
11319
11474
|
async generateVNext(messages, options) {
|
|
@@ -11373,6 +11528,10 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11373
11528
|
return result.result;
|
|
11374
11529
|
}
|
|
11375
11530
|
async generate(messages, generateOptions = {}) {
|
|
11531
|
+
this.logger.warn("Deprecation NOTICE:\nGenerate method will switch to use generateVNext implementation September 16th. Please use generateLegacy if you don't want to upgrade just yet.");
|
|
11532
|
+
return this.generateLegacy(messages, generateOptions);
|
|
11533
|
+
}
|
|
11534
|
+
async generateLegacy(messages, generateOptions = {}) {
|
|
11376
11535
|
const defaultGenerateOptions = await this.getDefaultGenerateOptions({
|
|
11377
11536
|
runtimeContext: generateOptions.runtimeContext
|
|
11378
11537
|
});
|
|
@@ -11440,6 +11599,9 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11440
11599
|
agentAISpan,
|
|
11441
11600
|
...llmOptions
|
|
11442
11601
|
} = beforeResult;
|
|
11602
|
+
const tracingContext = {
|
|
11603
|
+
currentSpan: agentAISpan
|
|
11604
|
+
};
|
|
11443
11605
|
let finalOutputProcessors = mergedGenerateOptions.outputProcessors;
|
|
11444
11606
|
if (mergedGenerateOptions.structuredOutput) {
|
|
11445
11607
|
const structuredProcessor = new StructuredOutputProcessor(mergedGenerateOptions.structuredOutput);
|
|
@@ -11448,13 +11610,14 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11448
11610
|
if (!output || experimental_output) {
|
|
11449
11611
|
const result2 = await llmToUse.__text({
|
|
11450
11612
|
...llmOptions,
|
|
11451
|
-
|
|
11613
|
+
tracingContext,
|
|
11452
11614
|
experimental_output
|
|
11453
11615
|
});
|
|
11454
11616
|
const outputProcessorResult2 = await this.__runOutputProcessors({
|
|
11455
11617
|
runtimeContext: mergedGenerateOptions.runtimeContext || new chunkGPWMM745_cjs.RuntimeContext(),
|
|
11618
|
+
tracingContext,
|
|
11456
11619
|
outputProcessorOverrides: finalOutputProcessors,
|
|
11457
|
-
messageList: new
|
|
11620
|
+
messageList: new chunk5CJDO3UO_cjs.MessageList({
|
|
11458
11621
|
threadId: llmOptions.threadId || "",
|
|
11459
11622
|
resourceId: llmOptions.resourceId || ""
|
|
11460
11623
|
}).add({
|
|
@@ -11524,12 +11687,13 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11524
11687
|
}
|
|
11525
11688
|
}
|
|
11526
11689
|
}
|
|
11690
|
+
const overrideScorers = mergedGenerateOptions.scorers;
|
|
11527
11691
|
const afterResult2 = await after({
|
|
11528
11692
|
result: result2,
|
|
11529
11693
|
outputText: newText2,
|
|
11530
11694
|
agentAISpan,
|
|
11531
|
-
...(
|
|
11532
|
-
overrideScorers
|
|
11695
|
+
...(overrideScorers ? {
|
|
11696
|
+
overrideScorers
|
|
11533
11697
|
} : {})
|
|
11534
11698
|
});
|
|
11535
11699
|
if (generateOptions.returnScorerData) {
|
|
@@ -11539,13 +11703,14 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11539
11703
|
}
|
|
11540
11704
|
const result = await llmToUse.__textObject({
|
|
11541
11705
|
...llmOptions,
|
|
11542
|
-
|
|
11706
|
+
tracingContext,
|
|
11543
11707
|
structuredOutput: output
|
|
11544
11708
|
});
|
|
11545
11709
|
const outputText = JSON.stringify(result.object);
|
|
11546
11710
|
const outputProcessorResult = await this.__runOutputProcessors({
|
|
11547
11711
|
runtimeContext: mergedGenerateOptions.runtimeContext || new chunkGPWMM745_cjs.RuntimeContext(),
|
|
11548
|
-
|
|
11712
|
+
tracingContext,
|
|
11713
|
+
messageList: new chunk5CJDO3UO_cjs.MessageList({
|
|
11549
11714
|
threadId: llmOptions.threadId || "",
|
|
11550
11715
|
resourceId: llmOptions.resourceId || ""
|
|
11551
11716
|
}).add({
|
|
@@ -11613,6 +11778,10 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11613
11778
|
return result;
|
|
11614
11779
|
}
|
|
11615
11780
|
async stream(messages, streamOptions = {}) {
|
|
11781
|
+
this.logger.warn("Deprecation NOTICE:\nStream method will switch to use streamVNext implementation September 16th. Please use streamLegacy if you don't want to upgrade just yet.");
|
|
11782
|
+
return this.streamLegacy(messages, streamOptions);
|
|
11783
|
+
}
|
|
11784
|
+
async streamLegacy(messages, streamOptions = {}) {
|
|
11616
11785
|
const defaultStreamOptions = await this.getDefaultStreamOptions({
|
|
11617
11786
|
runtimeContext: streamOptions.runtimeContext
|
|
11618
11787
|
});
|
|
@@ -11713,6 +11882,10 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11713
11882
|
agentAISpan,
|
|
11714
11883
|
...llmOptions
|
|
11715
11884
|
} = beforeResult;
|
|
11885
|
+
const overrideScorers = mergedStreamOptions.scorers;
|
|
11886
|
+
const tracingContext = {
|
|
11887
|
+
currentSpan: agentAISpan
|
|
11888
|
+
};
|
|
11716
11889
|
if (!output || experimental_output) {
|
|
11717
11890
|
this.logger.debug(`Starting agent ${this.name} llm stream call`, {
|
|
11718
11891
|
runId
|
|
@@ -11720,14 +11893,17 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11720
11893
|
const streamResult = llm.__stream({
|
|
11721
11894
|
...llmOptions,
|
|
11722
11895
|
experimental_output,
|
|
11723
|
-
|
|
11896
|
+
tracingContext,
|
|
11724
11897
|
onFinish: async result => {
|
|
11725
11898
|
try {
|
|
11726
11899
|
const outputText = result.text;
|
|
11727
11900
|
await after({
|
|
11728
11901
|
result,
|
|
11729
11902
|
outputText,
|
|
11730
|
-
agentAISpan
|
|
11903
|
+
agentAISpan,
|
|
11904
|
+
...(overrideScorers ? {
|
|
11905
|
+
overrideScorers
|
|
11906
|
+
} : {})
|
|
11731
11907
|
});
|
|
11732
11908
|
} catch (e) {
|
|
11733
11909
|
this.logger.error("Error saving memory on finish", {
|
|
@@ -11749,7 +11925,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11749
11925
|
});
|
|
11750
11926
|
return llm.__streamObject({
|
|
11751
11927
|
...llmOptions,
|
|
11752
|
-
|
|
11928
|
+
tracingContext,
|
|
11753
11929
|
onFinish: async result => {
|
|
11754
11930
|
try {
|
|
11755
11931
|
const outputText = JSON.stringify(result.object);
|
|
@@ -11757,7 +11933,10 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11757
11933
|
result,
|
|
11758
11934
|
outputText,
|
|
11759
11935
|
structuredOutput: true,
|
|
11760
|
-
agentAISpan
|
|
11936
|
+
agentAISpan,
|
|
11937
|
+
...(overrideScorers ? {
|
|
11938
|
+
overrideScorers
|
|
11939
|
+
} : {})
|
|
11761
11940
|
});
|
|
11762
11941
|
} catch (e) {
|
|
11763
11942
|
this.logger.error("Error saving memory on finish", {
|
|
@@ -12075,6 +12254,21 @@ var ExecutionEngine = class extends chunkKXCUCBEI_cjs.MastraBase {
|
|
|
12075
12254
|
|
|
12076
12255
|
// src/workflows/default.ts
|
|
12077
12256
|
var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
12257
|
+
/**
|
|
12258
|
+
* Preprocesses an error caught during workflow execution.
|
|
12259
|
+
*
|
|
12260
|
+
* - Wraps a non-MastraError exception
|
|
12261
|
+
* - Logs error details
|
|
12262
|
+
*/
|
|
12263
|
+
preprocessExecutionError(e, errorDefinition, logPrefix) {
|
|
12264
|
+
const error = e instanceof chunkC73WLCY3_cjs.MastraError ? e : new chunkC73WLCY3_cjs.MastraError(errorDefinition, e);
|
|
12265
|
+
if (!(e instanceof chunkC73WLCY3_cjs.MastraError) && e instanceof Error && e.stack) {
|
|
12266
|
+
error.stack = e.stack;
|
|
12267
|
+
}
|
|
12268
|
+
this.logger?.trackException(error);
|
|
12269
|
+
this.logger?.error(logPrefix + error?.stack);
|
|
12270
|
+
return error;
|
|
12271
|
+
}
|
|
12078
12272
|
/**
|
|
12079
12273
|
* The runCounts map is used to keep track of the run count for each step.
|
|
12080
12274
|
* The step id is used as the key and the run count is the value.
|
|
@@ -12171,7 +12365,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12171
12365
|
resume,
|
|
12172
12366
|
retryConfig,
|
|
12173
12367
|
runtimeContext,
|
|
12174
|
-
|
|
12368
|
+
tracingContext,
|
|
12175
12369
|
disableScorers
|
|
12176
12370
|
} = params;
|
|
12177
12371
|
const {
|
|
@@ -12180,33 +12374,16 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12180
12374
|
} = retryConfig ?? {};
|
|
12181
12375
|
const steps = graph.steps;
|
|
12182
12376
|
this.runCounts.clear();
|
|
12183
|
-
const
|
|
12377
|
+
const workflowAISpan = chunkI7OAONIW_cjs.getOrCreateSpan({
|
|
12378
|
+
type: "workflow_run" /* WORKFLOW_RUN */,
|
|
12184
12379
|
name: `workflow run: '${workflowId}'`,
|
|
12185
12380
|
input,
|
|
12186
12381
|
attributes: {
|
|
12187
12382
|
workflowId
|
|
12188
|
-
}
|
|
12189
|
-
|
|
12190
|
-
|
|
12191
|
-
|
|
12192
|
-
workflowAISpan = currentSpan.createChildSpan({
|
|
12193
|
-
type: "workflow_run" /* WORKFLOW_RUN */,
|
|
12194
|
-
...spanArgs
|
|
12195
|
-
});
|
|
12196
|
-
} else {
|
|
12197
|
-
const aiTracing = chunk4DKPMUAC_cjs.getSelectedAITracing({
|
|
12198
|
-
runtimeContext
|
|
12199
|
-
});
|
|
12200
|
-
if (aiTracing) {
|
|
12201
|
-
workflowAISpan = aiTracing.startSpan({
|
|
12202
|
-
type: "workflow_run" /* WORKFLOW_RUN */,
|
|
12203
|
-
...spanArgs,
|
|
12204
|
-
startOptions: {
|
|
12205
|
-
runtimeContext
|
|
12206
|
-
}
|
|
12207
|
-
});
|
|
12208
|
-
}
|
|
12209
|
-
}
|
|
12383
|
+
},
|
|
12384
|
+
tracingContext,
|
|
12385
|
+
runtimeContext
|
|
12386
|
+
});
|
|
12210
12387
|
if (steps.length === 0) {
|
|
12211
12388
|
const empty_graph_error = new chunkC73WLCY3_cjs.MastraError({
|
|
12212
12389
|
id: "WORKFLOW_EXECUTE_EMPTY_GRAPH",
|
|
@@ -12299,7 +12476,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12299
12476
|
return result2;
|
|
12300
12477
|
}
|
|
12301
12478
|
} catch (e) {
|
|
12302
|
-
const error =
|
|
12479
|
+
const error = this.preprocessExecutionError(e, {
|
|
12303
12480
|
id: "WORKFLOW_ENGINE_STEP_EXECUTION_FAILED",
|
|
12304
12481
|
domain: "MASTRA_WORKFLOW" /* MASTRA_WORKFLOW */,
|
|
12305
12482
|
category: "USER" /* USER */,
|
|
@@ -12307,9 +12484,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12307
12484
|
workflowId,
|
|
12308
12485
|
runId
|
|
12309
12486
|
}
|
|
12310
|
-
},
|
|
12311
|
-
this.logger?.trackException(error);
|
|
12312
|
-
this.logger?.error(`Error executing step: ${error?.stack}`);
|
|
12487
|
+
}, "Error executing step: ");
|
|
12313
12488
|
const result2 = await this.fmtReturnValue(executionSpan, params.emitter, stepResults, lastOutput.result, e);
|
|
12314
12489
|
await this.persistStepUpdate({
|
|
12315
12490
|
workflowId,
|
|
@@ -12623,11 +12798,15 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12623
12798
|
const stepAISpan = tracingContext.currentSpan?.createChildSpan({
|
|
12624
12799
|
name: `workflow step: '${step.id}'`,
|
|
12625
12800
|
type: "workflow_step" /* WORKFLOW_STEP */,
|
|
12626
|
-
|
|
12801
|
+
|
|
12802
|
+
//input: prevOutput,
|
|
12627
12803
|
attributes: {
|
|
12628
12804
|
stepId: step.id
|
|
12629
12805
|
}
|
|
12630
12806
|
});
|
|
12807
|
+
const innerTracingContext = {
|
|
12808
|
+
currentSpan: stepAISpan
|
|
12809
|
+
};
|
|
12631
12810
|
if (!skipEmits) {
|
|
12632
12811
|
await emitter.emit("watch", {
|
|
12633
12812
|
type: "watch",
|
|
@@ -12703,16 +12882,12 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12703
12882
|
const result = await runStep({
|
|
12704
12883
|
runId,
|
|
12705
12884
|
workflowId,
|
|
12706
|
-
mastra: this.mastra ?
|
|
12707
|
-
currentSpan: stepAISpan
|
|
12708
|
-
}) : void 0,
|
|
12885
|
+
mastra: this.mastra ? chunkI7OAONIW_cjs.wrapMastra(this.mastra, innerTracingContext) : void 0,
|
|
12709
12886
|
runtimeContext,
|
|
12710
12887
|
inputData: prevOutput,
|
|
12711
12888
|
runCount: this.getOrGenerateRunCount(step.id),
|
|
12712
12889
|
resumeData: resume?.steps[0] === step.id ? resume?.resumePayload : void 0,
|
|
12713
|
-
tracingContext:
|
|
12714
|
-
currentSpan: stepAISpan
|
|
12715
|
-
},
|
|
12890
|
+
tracingContext: innerTracingContext,
|
|
12716
12891
|
getInitData: () => stepResults?.input,
|
|
12717
12892
|
getStepResult: step2 => {
|
|
12718
12893
|
if (!step2?.id) {
|
|
@@ -12767,6 +12942,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12767
12942
|
workflowId,
|
|
12768
12943
|
stepId: step.id,
|
|
12769
12944
|
runtimeContext,
|
|
12945
|
+
tracingContext: innerTracingContext,
|
|
12770
12946
|
disableScorers
|
|
12771
12947
|
});
|
|
12772
12948
|
}
|
|
@@ -12791,7 +12967,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12791
12967
|
}
|
|
12792
12968
|
break;
|
|
12793
12969
|
} catch (e) {
|
|
12794
|
-
const error =
|
|
12970
|
+
const error = this.preprocessExecutionError(e, {
|
|
12795
12971
|
id: "WORKFLOW_STEP_INVOKE_FAILED",
|
|
12796
12972
|
domain: "MASTRA_WORKFLOW" /* MASTRA_WORKFLOW */,
|
|
12797
12973
|
category: "USER" /* USER */,
|
|
@@ -12800,9 +12976,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12800
12976
|
runId,
|
|
12801
12977
|
stepId: step.id
|
|
12802
12978
|
}
|
|
12803
|
-
},
|
|
12804
|
-
this.logger.trackException(error);
|
|
12805
|
-
this.logger.error(`Error executing step ${step.id}: ` + error?.stack);
|
|
12979
|
+
}, `Error executing step ${step.id}: `);
|
|
12806
12980
|
stepAISpan?.error({
|
|
12807
12981
|
error,
|
|
12808
12982
|
attributes: {
|
|
@@ -12889,6 +13063,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12889
13063
|
workflowId,
|
|
12890
13064
|
stepId,
|
|
12891
13065
|
runtimeContext,
|
|
13066
|
+
tracingContext,
|
|
12892
13067
|
disableScorers
|
|
12893
13068
|
}) {
|
|
12894
13069
|
let scorersToUse = scorers;
|
|
@@ -12898,18 +13073,16 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12898
13073
|
runtimeContext
|
|
12899
13074
|
});
|
|
12900
13075
|
} catch (error) {
|
|
12901
|
-
|
|
13076
|
+
this.preprocessExecutionError(error, {
|
|
12902
13077
|
id: "WORKFLOW_FAILED_TO_FETCH_SCORERS",
|
|
12903
|
-
domain: "MASTRA_WORKFLOW"
|
|
13078
|
+
domain: "MASTRA_WORKFLOW" /* MASTRA_WORKFLOW */,
|
|
12904
13079
|
category: "USER" /* USER */,
|
|
12905
13080
|
details: {
|
|
12906
13081
|
runId,
|
|
12907
13082
|
workflowId,
|
|
12908
13083
|
stepId
|
|
12909
13084
|
}
|
|
12910
|
-
},
|
|
12911
|
-
this.logger.trackException(mastraError);
|
|
12912
|
-
this.logger.error(mastraError.toString(), error);
|
|
13085
|
+
}, "Error fetching scorers: ");
|
|
12913
13086
|
}
|
|
12914
13087
|
}
|
|
12915
13088
|
if (!disableScorers && scorersToUse && Object.keys(scorersToUse || {}).length > 0) {
|
|
@@ -12921,6 +13094,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12921
13094
|
input: [input],
|
|
12922
13095
|
output,
|
|
12923
13096
|
runtimeContext,
|
|
13097
|
+
tracingContext,
|
|
12924
13098
|
entity: {
|
|
12925
13099
|
id: workflowId,
|
|
12926
13100
|
stepId
|
|
@@ -13102,7 +13276,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
13102
13276
|
});
|
|
13103
13277
|
return result ? index : null;
|
|
13104
13278
|
} catch (e) {
|
|
13105
|
-
const error =
|
|
13279
|
+
const error = this.preprocessExecutionError(e, {
|
|
13106
13280
|
id: "WORKFLOW_CONDITION_EVALUATION_FAILED",
|
|
13107
13281
|
domain: "MASTRA_WORKFLOW" /* MASTRA_WORKFLOW */,
|
|
13108
13282
|
category: "USER" /* USER */,
|
|
@@ -13110,9 +13284,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
13110
13284
|
workflowId,
|
|
13111
13285
|
runId
|
|
13112
13286
|
}
|
|
13113
|
-
},
|
|
13114
|
-
this.logger.trackException(error);
|
|
13115
|
-
this.logger.error("Error evaluating condition: " + error?.stack);
|
|
13287
|
+
}, "Error evaluating condition: ");
|
|
13116
13288
|
evalSpan?.error({
|
|
13117
13289
|
error,
|
|
13118
13290
|
attributes: {
|
|
@@ -13292,7 +13464,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
13292
13464
|
const evalSpan = loopSpan?.createChildSpan({
|
|
13293
13465
|
type: "workflow_conditional_eval" /* WORKFLOW_CONDITIONAL_EVAL */,
|
|
13294
13466
|
name: `condition: ${entry.loopType}`,
|
|
13295
|
-
input: result.output,
|
|
13467
|
+
input: chunkI7OAONIW_cjs.selectFields(result.output, ["stepResult", "output.text", "output.object", "messages"]),
|
|
13296
13468
|
attributes: {
|
|
13297
13469
|
conditionIndex: iteration
|
|
13298
13470
|
}
|
|
@@ -14249,7 +14421,7 @@ function createStep(params) {
|
|
|
14249
14421
|
}
|
|
14250
14422
|
};
|
|
14251
14423
|
}
|
|
14252
|
-
if (params instanceof
|
|
14424
|
+
if (params instanceof chunkABRPHTOG_cjs.Tool) {
|
|
14253
14425
|
if (!params.inputSchema || !params.outputSchema) {
|
|
14254
14426
|
throw new Error("Tool must have input and output schemas defined");
|
|
14255
14427
|
}
|
|
@@ -14790,7 +14962,7 @@ var Workflow = class extends chunkKXCUCBEI_cjs.MastraBase {
|
|
|
14790
14962
|
cleanup: () => this.#runs.delete(runIdToUse)
|
|
14791
14963
|
});
|
|
14792
14964
|
this.#runs.set(runIdToUse, run);
|
|
14793
|
-
this.mastra?.getLogger().warn("createRun()
|
|
14965
|
+
this.mastra?.getLogger().warn("createRun() will be removed on September 16th. Use createRunAsync() instead.");
|
|
14794
14966
|
return run;
|
|
14795
14967
|
}
|
|
14796
14968
|
/**
|
|
@@ -14875,7 +15047,7 @@ var Workflow = class extends chunkKXCUCBEI_cjs.MastraBase {
|
|
|
14875
15047
|
abort,
|
|
14876
15048
|
abortSignal,
|
|
14877
15049
|
runCount,
|
|
14878
|
-
|
|
15050
|
+
tracingContext
|
|
14879
15051
|
}) {
|
|
14880
15052
|
this.__registerMastra(mastra);
|
|
14881
15053
|
const isResume = !!(resume?.steps && resume.steps.length > 0);
|
|
@@ -14911,11 +15083,11 @@ var Workflow = class extends chunkKXCUCBEI_cjs.MastraBase {
|
|
|
14911
15083
|
resumeData,
|
|
14912
15084
|
step: resume.steps,
|
|
14913
15085
|
runtimeContext,
|
|
14914
|
-
|
|
15086
|
+
tracingContext
|
|
14915
15087
|
}) : await run.start({
|
|
14916
15088
|
inputData,
|
|
14917
15089
|
runtimeContext,
|
|
14918
|
-
|
|
15090
|
+
tracingContext
|
|
14919
15091
|
});
|
|
14920
15092
|
unwatch();
|
|
14921
15093
|
unwatchV2();
|
|
@@ -15081,7 +15253,7 @@ var Run = class {
|
|
|
15081
15253
|
inputData,
|
|
15082
15254
|
runtimeContext,
|
|
15083
15255
|
writableStream,
|
|
15084
|
-
|
|
15256
|
+
tracingContext
|
|
15085
15257
|
}) {
|
|
15086
15258
|
const result = await this.executionEngine.execute({
|
|
15087
15259
|
workflowId: this.workflowId,
|
|
@@ -15108,7 +15280,7 @@ var Run = class {
|
|
|
15108
15280
|
runtimeContext: runtimeContext ?? new chunkGPWMM745_cjs.RuntimeContext(),
|
|
15109
15281
|
abortController: this.abortController,
|
|
15110
15282
|
writableStream,
|
|
15111
|
-
|
|
15283
|
+
tracingContext
|
|
15112
15284
|
});
|
|
15113
15285
|
if (result.status !== "suspended") {
|
|
15114
15286
|
this.cleanup?.();
|
|
@@ -15445,7 +15617,7 @@ var Run = class {
|
|
|
15445
15617
|
},
|
|
15446
15618
|
runtimeContext: runtimeContextToUse,
|
|
15447
15619
|
abortController: this.abortController,
|
|
15448
|
-
|
|
15620
|
+
tracingContext: params.tracingContext
|
|
15449
15621
|
}).then(result => {
|
|
15450
15622
|
if (result.status !== "suspended") {
|
|
15451
15623
|
this.closeStreamAction?.().catch(() => {});
|
|
@@ -15550,5 +15722,5 @@ exports.recursivelyCheckForFinalState = recursivelyCheckForFinalState;
|
|
|
15550
15722
|
exports.resolveVariables = resolveVariables;
|
|
15551
15723
|
exports.updateStepInHierarchy = updateStepInHierarchy;
|
|
15552
15724
|
exports.workflowToStep = workflowToStep;
|
|
15553
|
-
//# sourceMappingURL=chunk-
|
|
15554
|
-
//# sourceMappingURL=chunk-
|
|
15725
|
+
//# sourceMappingURL=chunk-KUNWELBC.cjs.map
|
|
15726
|
+
//# sourceMappingURL=chunk-KUNWELBC.cjs.map
|