@mastra/core 0.15.3-alpha.2 → 0.15.3-alpha.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agent/agent.types.d.ts +9 -4
- package/dist/agent/agent.types.d.ts.map +1 -1
- package/dist/agent/index.cjs +11 -11
- package/dist/agent/index.d.ts +33 -5
- package/dist/agent/index.d.ts.map +1 -1
- package/dist/agent/index.js +2 -2
- package/dist/agent/input-processor/index.cjs +6 -6
- package/dist/agent/input-processor/index.js +1 -1
- package/dist/agent/message-list/index.d.ts +5 -1
- package/dist/agent/message-list/index.d.ts.map +1 -1
- package/dist/agent/message-list/prompt/convert-file.d.ts +7 -0
- package/dist/agent/message-list/prompt/convert-file.d.ts.map +1 -0
- package/dist/agent/message-list/prompt/download-assets.d.ts +15 -0
- package/dist/agent/message-list/prompt/download-assets.d.ts.map +1 -0
- package/dist/agent/types.d.ts +11 -6
- package/dist/agent/types.d.ts.map +1 -1
- package/dist/ai-tracing/context.d.ts.map +1 -1
- package/dist/ai-tracing/default.d.ts.map +1 -1
- package/dist/ai-tracing/index.cjs +43 -31
- package/dist/ai-tracing/index.js +1 -1
- package/dist/ai-tracing/no-op.d.ts +0 -1
- package/dist/ai-tracing/no-op.d.ts.map +1 -1
- package/dist/ai-tracing/types.d.ts +0 -2
- package/dist/ai-tracing/types.d.ts.map +1 -1
- package/dist/ai-tracing/utils.d.ts +44 -8
- package/dist/ai-tracing/utils.d.ts.map +1 -1
- package/dist/{chunk-QA2TWVUS.cjs → chunk-5CJDO3UO.cjs} +292 -9
- package/dist/chunk-5CJDO3UO.cjs.map +1 -0
- package/dist/chunk-ABRPHTOG.cjs +108 -0
- package/dist/chunk-ABRPHTOG.cjs.map +1 -0
- package/dist/chunk-BJGHUKKM.js +103 -0
- package/dist/chunk-BJGHUKKM.js.map +1 -0
- package/dist/{chunk-BRNBKCHE.js → chunk-CKM2ESZF.js} +51 -26
- package/dist/chunk-CKM2ESZF.js.map +1 -0
- package/dist/{chunk-HHLPYCQG.cjs → chunk-DZADAEAF.cjs} +68 -22
- package/dist/chunk-DZADAEAF.cjs.map +1 -0
- package/dist/{chunk-VBAWR62U.cjs → chunk-F2CAC2R2.cjs} +54 -29
- package/dist/chunk-F2CAC2R2.cjs.map +1 -0
- package/dist/{chunk-C5C4PN54.js → chunk-F4SQXAXR.js} +292 -9
- package/dist/chunk-F4SQXAXR.js.map +1 -0
- package/dist/{chunk-UG3KS3XV.cjs → chunk-F6XWBVVG.cjs} +4 -4
- package/dist/{chunk-UG3KS3XV.cjs.map → chunk-F6XWBVVG.cjs.map} +1 -1
- package/dist/{chunk-WHEH32QU.cjs → chunk-FQRDHVZC.cjs} +4 -4
- package/dist/{chunk-WHEH32QU.cjs.map → chunk-FQRDHVZC.cjs.map} +1 -1
- package/dist/{chunk-4DKPMUAC.cjs → chunk-I7OAONIW.cjs} +259 -177
- package/dist/chunk-I7OAONIW.cjs.map +1 -0
- package/dist/{chunk-XOZ737RO.js → chunk-IODUKRQP.js} +3 -3
- package/dist/{chunk-XOZ737RO.js.map → chunk-IODUKRQP.js.map} +1 -1
- package/dist/{chunk-7OSUKFNG.cjs → chunk-KUNWELBC.cjs} +692 -507
- package/dist/chunk-KUNWELBC.cjs.map +1 -0
- package/dist/{chunk-WOTBMZCN.js → chunk-LOYT3WUA.js} +255 -176
- package/dist/chunk-LOYT3WUA.js.map +1 -0
- package/dist/{chunk-6VROHRAR.cjs → chunk-LVGGMWSE.cjs} +31 -8
- package/dist/chunk-LVGGMWSE.cjs.map +1 -0
- package/dist/{chunk-T64BA34G.js → chunk-OFPVAPUH.js} +4 -4
- package/dist/{chunk-T64BA34G.js.map → chunk-OFPVAPUH.js.map} +1 -1
- package/dist/{chunk-5NGEKEU7.js → chunk-P2IJ74UW.js} +661 -476
- package/dist/chunk-P2IJ74UW.js.map +1 -0
- package/dist/{chunk-QBNRMJAN.cjs → chunk-VVTB47UG.cjs} +6 -6
- package/dist/{chunk-QBNRMJAN.cjs.map → chunk-VVTB47UG.cjs.map} +1 -1
- package/dist/{chunk-HXYE4EJA.cjs → chunk-W5CF7DLB.cjs} +9 -9
- package/dist/{chunk-HXYE4EJA.cjs.map → chunk-W5CF7DLB.cjs.map} +1 -1
- package/dist/{chunk-FLXWZUIG.js → chunk-WWQ3QRPF.js} +27 -5
- package/dist/chunk-WWQ3QRPF.js.map +1 -0
- package/dist/{chunk-E3LAPNKY.js → chunk-XPFWOBV4.js} +4 -4
- package/dist/{chunk-E3LAPNKY.js.map → chunk-XPFWOBV4.js.map} +1 -1
- package/dist/{chunk-IYCG5OVT.js → chunk-YAWYQH3N.js} +3 -3
- package/dist/{chunk-IYCG5OVT.js.map → chunk-YAWYQH3N.js.map} +1 -1
- package/dist/{chunk-UT6KEZRF.js → chunk-YVIYEC6R.js} +62 -16
- package/dist/chunk-YVIYEC6R.js.map +1 -0
- package/dist/index.cjs +47 -43
- package/dist/index.js +10 -10
- package/dist/integration/index.cjs +3 -3
- package/dist/integration/index.js +1 -1
- package/dist/llm/index.d.ts +2 -2
- package/dist/llm/index.d.ts.map +1 -1
- package/dist/llm/model/base.types.d.ts +2 -2
- package/dist/llm/model/base.types.d.ts.map +1 -1
- package/dist/llm/model/model.d.ts +4 -4
- package/dist/llm/model/model.d.ts.map +1 -1
- package/dist/llm/model/model.loop.d.ts +1 -1
- package/dist/llm/model/model.loop.d.ts.map +1 -1
- package/dist/llm/model/model.loop.types.d.ts +2 -0
- package/dist/llm/model/model.loop.types.d.ts.map +1 -1
- package/dist/loop/index.cjs +2 -2
- package/dist/loop/index.js +1 -1
- package/dist/loop/loop.d.ts +1 -1
- package/dist/loop/loop.d.ts.map +1 -1
- package/dist/loop/types.d.ts +4 -0
- package/dist/loop/types.d.ts.map +1 -1
- package/dist/loop/workflow/llm-execution.d.ts +1 -1
- package/dist/loop/workflow/llm-execution.d.ts.map +1 -1
- package/dist/loop/workflow/stream.d.ts +1 -1
- package/dist/loop/workflow/stream.d.ts.map +1 -1
- package/dist/mastra/hooks.d.ts.map +1 -1
- package/dist/mastra/index.cjs +2 -2
- package/dist/mastra/index.d.ts +8 -3
- package/dist/mastra/index.d.ts.map +1 -1
- package/dist/mastra/index.js +1 -1
- package/dist/memory/index.cjs +4 -4
- package/dist/memory/index.js +1 -1
- package/dist/network/index.cjs +4 -4
- package/dist/network/index.js +2 -2
- package/dist/network/vNext/index.cjs +14 -14
- package/dist/network/vNext/index.js +2 -2
- package/dist/processors/index.cjs +13 -11
- package/dist/processors/index.cjs.map +1 -1
- package/dist/processors/index.d.ts +4 -0
- package/dist/processors/index.d.ts.map +1 -1
- package/dist/processors/index.js +7 -5
- package/dist/processors/index.js.map +1 -1
- package/dist/processors/processors/moderation.d.ts +4 -0
- package/dist/processors/processors/moderation.d.ts.map +1 -1
- package/dist/processors/processors/pii-detector.d.ts +3 -0
- package/dist/processors/processors/pii-detector.d.ts.map +1 -1
- package/dist/processors/processors/prompt-injection-detector.d.ts +2 -0
- package/dist/processors/processors/prompt-injection-detector.d.ts.map +1 -1
- package/dist/processors/processors/system-prompt-scrubber.d.ts +2 -0
- package/dist/processors/processors/system-prompt-scrubber.d.ts.map +1 -1
- package/dist/processors/runner.d.ts +5 -4
- package/dist/processors/runner.d.ts.map +1 -1
- package/dist/relevance/index.cjs +4 -4
- package/dist/relevance/index.js +1 -1
- package/dist/scores/base.d.ts +2 -0
- package/dist/scores/base.d.ts.map +1 -1
- package/dist/scores/hooks.d.ts +3 -1
- package/dist/scores/hooks.d.ts.map +1 -1
- package/dist/scores/index.cjs +27 -18
- package/dist/scores/index.cjs.map +1 -1
- package/dist/scores/index.js +23 -14
- package/dist/scores/index.js.map +1 -1
- package/dist/scores/run-experiment/index.d.ts +2 -0
- package/dist/scores/run-experiment/index.d.ts.map +1 -1
- package/dist/scores/types.d.ts +3 -0
- package/dist/scores/types.d.ts.map +1 -1
- package/dist/storage/index.cjs +3 -3
- package/dist/storage/index.js +1 -1
- package/dist/stream/aisdk/v5/compat/consume-stream.d.ts +8 -0
- package/dist/stream/aisdk/v5/compat/consume-stream.d.ts.map +1 -0
- package/dist/stream/aisdk/v5/compat/content.d.ts +7 -0
- package/dist/stream/aisdk/v5/compat/content.d.ts.map +1 -0
- package/dist/stream/aisdk/v5/compat/delayed-promise.d.ts +23 -0
- package/dist/stream/aisdk/v5/compat/delayed-promise.d.ts.map +1 -0
- package/dist/stream/aisdk/v5/compat/index.d.ts +8 -0
- package/dist/stream/aisdk/v5/compat/index.d.ts.map +1 -0
- package/dist/stream/aisdk/v5/compat/media.d.ts +91 -0
- package/dist/stream/aisdk/v5/compat/media.d.ts.map +1 -0
- package/dist/stream/aisdk/v5/compat/prepare-tools.d.ts +11 -0
- package/dist/stream/aisdk/v5/compat/prepare-tools.d.ts.map +1 -0
- package/dist/stream/aisdk/v5/compat/ui-message.d.ts +20 -0
- package/dist/stream/aisdk/v5/compat/ui-message.d.ts.map +1 -0
- package/dist/stream/aisdk/v5/compat/validation.d.ts +17 -0
- package/dist/stream/aisdk/v5/compat/validation.d.ts.map +1 -0
- package/dist/stream/aisdk/v5/output.d.ts +1 -1
- package/dist/stream/base/output.d.ts +1 -1
- package/dist/stream/index.cjs +3 -3
- package/dist/stream/index.js +1 -1
- package/dist/test-utils/llm-mock.cjs +2 -2
- package/dist/test-utils/llm-mock.js +1 -1
- package/dist/tools/index.cjs +4 -4
- package/dist/tools/index.js +1 -1
- package/dist/tools/is-vercel-tool.cjs +2 -2
- package/dist/tools/is-vercel-tool.js +1 -1
- package/dist/tools/tool.d.ts +3 -1
- package/dist/tools/tool.d.ts.map +1 -1
- package/dist/tools/validation.d.ts.map +1 -1
- package/dist/utils.cjs +20 -16
- package/dist/utils.d.ts +12 -3
- package/dist/utils.d.ts.map +1 -1
- package/dist/utils.js +1 -1
- package/dist/workflows/default.d.ts +13 -3
- package/dist/workflows/default.d.ts.map +1 -1
- package/dist/workflows/evented/index.cjs +10 -10
- package/dist/workflows/evented/index.js +1 -1
- package/dist/workflows/execution-engine.d.ts +2 -2
- package/dist/workflows/execution-engine.d.ts.map +1 -1
- package/dist/workflows/index.cjs +10 -10
- package/dist/workflows/index.js +1 -1
- package/dist/workflows/legacy/index.cjs +22 -22
- package/dist/workflows/legacy/index.js +1 -1
- package/dist/workflows/workflow.d.ts +8 -8
- package/dist/workflows/workflow.d.ts.map +1 -1
- package/package.json +6 -6
- package/dist/chunk-4DKPMUAC.cjs.map +0 -1
- package/dist/chunk-5NGEKEU7.js.map +0 -1
- package/dist/chunk-6VROHRAR.cjs.map +0 -1
- package/dist/chunk-7OSUKFNG.cjs.map +0 -1
- package/dist/chunk-BRNBKCHE.js.map +0 -1
- package/dist/chunk-C5C4PN54.js.map +0 -1
- package/dist/chunk-E4XQMNEI.cjs +0 -77
- package/dist/chunk-E4XQMNEI.cjs.map +0 -1
- package/dist/chunk-FLXWZUIG.js.map +0 -1
- package/dist/chunk-HHLPYCQG.cjs.map +0 -1
- package/dist/chunk-IVGAHFAJ.js +0 -72
- package/dist/chunk-IVGAHFAJ.js.map +0 -1
- package/dist/chunk-QA2TWVUS.cjs.map +0 -1
- package/dist/chunk-UT6KEZRF.js.map +0 -1
- package/dist/chunk-VBAWR62U.cjs.map +0 -1
- package/dist/chunk-WOTBMZCN.js.map +0 -1
- package/dist/stream/aisdk/v5/compat.d.ts +0 -73
- package/dist/stream/aisdk/v5/compat.d.ts.map +0 -1
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
import { DefaultVoice } from './chunk-XM2ASGWH.js';
|
|
2
2
|
import { EMITTER_SYMBOL } from './chunk-GK5V7YTQ.js';
|
|
3
3
|
import { InstrumentClass, Telemetry } from './chunk-76MWMAR7.js';
|
|
4
|
-
import { MastraLLMV1 } from './chunk-
|
|
5
|
-
import { MessageList, DefaultGeneratedFile, DefaultGeneratedFileWithType } from './chunk-
|
|
4
|
+
import { MastraLLMV1 } from './chunk-CKM2ESZF.js';
|
|
5
|
+
import { MessageList, DefaultGeneratedFile, DefaultGeneratedFileWithType } from './chunk-F4SQXAXR.js';
|
|
6
6
|
import { executeHook } from './chunk-TTELJD4F.js';
|
|
7
|
-
import { ensureToolProperties, makeCoreTool, createMastraProxy, delay } from './chunk-
|
|
7
|
+
import { ensureToolProperties, makeCoreTool, createMastraProxy, delay } from './chunk-WWQ3QRPF.js';
|
|
8
8
|
import { RuntimeContext } from './chunk-HLRWYUFN.js';
|
|
9
9
|
import { ToolStream } from './chunk-YW7UILPE.js';
|
|
10
|
-
import { Tool } from './chunk-
|
|
11
|
-
import {
|
|
10
|
+
import { Tool } from './chunk-BJGHUKKM.js';
|
|
11
|
+
import { getOrCreateSpan, wrapMastra, selectFields } from './chunk-LOYT3WUA.js';
|
|
12
12
|
import { MastraError } from './chunk-MCOVMKIS.js';
|
|
13
13
|
import { MastraBase } from './chunk-6GF5M4GX.js';
|
|
14
14
|
import { ConsoleLogger, RegisteredLogger } from './chunk-X3GXU6TZ.js';
|
|
@@ -64,230 +64,17 @@ var require_fast_deep_equal = __commonJS({
|
|
|
64
64
|
// src/agent/index.ts
|
|
65
65
|
var import_fast_deep_equal = __toESM(require_fast_deep_equal(), 1);
|
|
66
66
|
|
|
67
|
-
// src/
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
// src/processors/runner.ts
|
|
76
|
-
var ProcessorState = class {
|
|
77
|
-
constructor(processorName) {
|
|
78
|
-
this.processorName = processorName;
|
|
79
|
-
}
|
|
80
|
-
accumulatedText = "";
|
|
81
|
-
customState = {};
|
|
82
|
-
streamParts = [];
|
|
83
|
-
// Internal methods for the runner
|
|
84
|
-
addPart(part) {
|
|
85
|
-
if (part.type === "text-delta") {
|
|
86
|
-
this.accumulatedText += part.payload.text;
|
|
87
|
-
}
|
|
88
|
-
this.streamParts.push(part);
|
|
89
|
-
}
|
|
90
|
-
};
|
|
91
|
-
var ProcessorRunner = class {
|
|
92
|
-
inputProcessors;
|
|
93
|
-
outputProcessors;
|
|
94
|
-
logger;
|
|
95
|
-
agentName;
|
|
96
|
-
constructor({
|
|
97
|
-
inputProcessors,
|
|
98
|
-
outputProcessors,
|
|
99
|
-
logger,
|
|
100
|
-
agentName
|
|
101
|
-
}) {
|
|
102
|
-
this.inputProcessors = inputProcessors ?? [];
|
|
103
|
-
this.outputProcessors = outputProcessors ?? [];
|
|
104
|
-
this.logger = logger;
|
|
105
|
-
this.agentName = agentName;
|
|
106
|
-
}
|
|
107
|
-
async runOutputProcessors(messageList, telemetry) {
|
|
108
|
-
const responseMessages = messageList.clear.response.v2();
|
|
109
|
-
let processableMessages = [...responseMessages];
|
|
110
|
-
const ctx = {
|
|
111
|
-
abort: () => {
|
|
112
|
-
throw new TripWire("Tripwire triggered");
|
|
113
|
-
}
|
|
114
|
-
};
|
|
115
|
-
for (const [index, processor] of this.outputProcessors.entries()) {
|
|
116
|
-
const abort = reason => {
|
|
117
|
-
throw new TripWire(reason || `Tripwire triggered by ${processor.name}`);
|
|
118
|
-
};
|
|
119
|
-
ctx.abort = abort;
|
|
120
|
-
const processMethod = processor.processOutputResult?.bind(processor);
|
|
121
|
-
if (!processMethod) {
|
|
122
|
-
continue;
|
|
123
|
-
}
|
|
124
|
-
if (!telemetry) {
|
|
125
|
-
processableMessages = await processMethod({
|
|
126
|
-
messages: processableMessages,
|
|
127
|
-
abort: ctx.abort
|
|
128
|
-
});
|
|
129
|
-
} else {
|
|
130
|
-
await telemetry.traceMethod(async () => {
|
|
131
|
-
processableMessages = await processMethod({
|
|
132
|
-
messages: processableMessages,
|
|
133
|
-
abort: ctx.abort
|
|
134
|
-
});
|
|
135
|
-
return processableMessages;
|
|
136
|
-
}, {
|
|
137
|
-
spanName: `agent.outputProcessor.${processor.name}`,
|
|
138
|
-
attributes: {
|
|
139
|
-
"processor.name": processor.name,
|
|
140
|
-
"processor.index": index.toString(),
|
|
141
|
-
"processor.total": this.outputProcessors.length.toString()
|
|
142
|
-
}
|
|
143
|
-
})();
|
|
144
|
-
}
|
|
145
|
-
}
|
|
146
|
-
if (processableMessages.length > 0) {
|
|
147
|
-
messageList.add(processableMessages, "response");
|
|
148
|
-
}
|
|
149
|
-
return messageList;
|
|
150
|
-
}
|
|
151
|
-
/**
|
|
152
|
-
* Process a stream part through all output processors with state management
|
|
153
|
-
*/
|
|
154
|
-
async processPart(part, processorStates) {
|
|
155
|
-
if (!this.outputProcessors.length) {
|
|
156
|
-
return {
|
|
157
|
-
part,
|
|
158
|
-
blocked: false
|
|
159
|
-
};
|
|
160
|
-
}
|
|
161
|
-
try {
|
|
162
|
-
let processedPart = part;
|
|
163
|
-
for (const processor of this.outputProcessors) {
|
|
164
|
-
try {
|
|
165
|
-
if (processor.processOutputStream && processedPart) {
|
|
166
|
-
let state = processorStates.get(processor.name);
|
|
167
|
-
if (!state) {
|
|
168
|
-
state = new ProcessorState(processor.name);
|
|
169
|
-
processorStates.set(processor.name, state);
|
|
170
|
-
}
|
|
171
|
-
state.addPart(processedPart);
|
|
172
|
-
const result = await processor.processOutputStream({
|
|
173
|
-
part: processedPart,
|
|
174
|
-
streamParts: state.streamParts,
|
|
175
|
-
state: state.customState,
|
|
176
|
-
abort: reason => {
|
|
177
|
-
throw new TripWire(reason || `Stream part blocked by ${processor.name}`);
|
|
178
|
-
}
|
|
179
|
-
});
|
|
180
|
-
processedPart = result;
|
|
181
|
-
}
|
|
182
|
-
} catch (error) {
|
|
183
|
-
if (error instanceof TripWire) {
|
|
184
|
-
return {
|
|
185
|
-
part: null,
|
|
186
|
-
blocked: true,
|
|
187
|
-
reason: error.message
|
|
188
|
-
};
|
|
189
|
-
}
|
|
190
|
-
this.logger.error(`[Agent:${this.agentName}] - Output processor ${processor.name} failed:`, error);
|
|
191
|
-
}
|
|
192
|
-
}
|
|
193
|
-
return {
|
|
194
|
-
part: processedPart,
|
|
195
|
-
blocked: false
|
|
196
|
-
};
|
|
197
|
-
} catch (error) {
|
|
198
|
-
this.logger.error(`[Agent:${this.agentName}] - Stream part processing failed:`, error);
|
|
199
|
-
return {
|
|
200
|
-
part,
|
|
201
|
-
blocked: false
|
|
202
|
-
};
|
|
203
|
-
}
|
|
204
|
-
}
|
|
205
|
-
async runOutputProcessorsForStream(streamResult) {
|
|
206
|
-
return new ReadableStream({
|
|
207
|
-
start: async controller => {
|
|
208
|
-
const reader = streamResult.fullStream.getReader();
|
|
209
|
-
const processorStates = /* @__PURE__ */new Map();
|
|
210
|
-
try {
|
|
211
|
-
while (true) {
|
|
212
|
-
const {
|
|
213
|
-
done,
|
|
214
|
-
value
|
|
215
|
-
} = await reader.read();
|
|
216
|
-
if (done) {
|
|
217
|
-
controller.close();
|
|
218
|
-
break;
|
|
219
|
-
}
|
|
220
|
-
const {
|
|
221
|
-
part: processedPart,
|
|
222
|
-
blocked,
|
|
223
|
-
reason
|
|
224
|
-
} = await this.processPart(value, processorStates);
|
|
225
|
-
if (blocked) {
|
|
226
|
-
void this.logger.debug(`[Agent:${this.agentName}] - Stream part blocked by output processor`, {
|
|
227
|
-
reason,
|
|
228
|
-
originalPart: value
|
|
229
|
-
});
|
|
230
|
-
controller.enqueue({
|
|
231
|
-
type: "tripwire",
|
|
232
|
-
tripwireReason: reason || "Output processor blocked content"
|
|
233
|
-
});
|
|
234
|
-
controller.close();
|
|
235
|
-
break;
|
|
236
|
-
} else if (processedPart !== null) {
|
|
237
|
-
controller.enqueue(processedPart);
|
|
238
|
-
}
|
|
239
|
-
}
|
|
240
|
-
} catch (error) {
|
|
241
|
-
controller.error(error);
|
|
242
|
-
}
|
|
243
|
-
}
|
|
244
|
-
});
|
|
245
|
-
}
|
|
246
|
-
async runInputProcessors(messageList, telemetry) {
|
|
247
|
-
const userMessages = messageList.clear.input.v2();
|
|
248
|
-
let processableMessages = [...userMessages];
|
|
249
|
-
const ctx = {
|
|
250
|
-
abort: () => {
|
|
251
|
-
throw new TripWire("Tripwire triggered");
|
|
252
|
-
}
|
|
253
|
-
};
|
|
254
|
-
for (const [index, processor] of this.inputProcessors.entries()) {
|
|
255
|
-
const abort = reason => {
|
|
256
|
-
throw new TripWire(reason || `Tripwire triggered by ${processor.name}`);
|
|
257
|
-
};
|
|
258
|
-
ctx.abort = abort;
|
|
259
|
-
const processMethod = processor.processInput?.bind(processor);
|
|
260
|
-
if (!processMethod) {
|
|
261
|
-
continue;
|
|
262
|
-
}
|
|
263
|
-
if (!telemetry) {
|
|
264
|
-
processableMessages = await processMethod({
|
|
265
|
-
messages: processableMessages,
|
|
266
|
-
abort: ctx.abort
|
|
267
|
-
});
|
|
268
|
-
} else {
|
|
269
|
-
await telemetry.traceMethod(async () => {
|
|
270
|
-
processableMessages = await processMethod({
|
|
271
|
-
messages: processableMessages,
|
|
272
|
-
abort: ctx.abort
|
|
273
|
-
});
|
|
274
|
-
return processableMessages;
|
|
275
|
-
}, {
|
|
276
|
-
spanName: `agent.inputProcessor.${processor.name}`,
|
|
277
|
-
attributes: {
|
|
278
|
-
"processor.name": processor.name,
|
|
279
|
-
"processor.index": index.toString(),
|
|
280
|
-
"processor.total": this.inputProcessors.length.toString()
|
|
281
|
-
}
|
|
282
|
-
})();
|
|
283
|
-
}
|
|
284
|
-
}
|
|
285
|
-
if (processableMessages.length > 0) {
|
|
286
|
-
messageList.add(processableMessages, "user");
|
|
287
|
-
}
|
|
288
|
-
return messageList;
|
|
67
|
+
// src/stream/aisdk/v5/compat/ui-message.ts
|
|
68
|
+
function getResponseUIMessageId({
|
|
69
|
+
originalMessages,
|
|
70
|
+
responseMessageId
|
|
71
|
+
}) {
|
|
72
|
+
if (originalMessages == null) {
|
|
73
|
+
return void 0;
|
|
289
74
|
}
|
|
290
|
-
|
|
75
|
+
const lastMessage = originalMessages[originalMessages.length - 1];
|
|
76
|
+
return lastMessage?.role === "assistant" ? lastMessage.id : typeof responseMessageId === "function" ? responseMessageId() : responseMessageId;
|
|
77
|
+
}
|
|
291
78
|
function convertFullStreamChunkToUIMessageStream({
|
|
292
79
|
part,
|
|
293
80
|
messageMetadataValue,
|
|
@@ -540,16 +327,6 @@ function convertFullStreamChunkToUIMessageStream({
|
|
|
540
327
|
}
|
|
541
328
|
}
|
|
542
329
|
}
|
|
543
|
-
function getResponseUIMessageId({
|
|
544
|
-
originalMessages,
|
|
545
|
-
responseMessageId
|
|
546
|
-
}) {
|
|
547
|
-
if (originalMessages == null) {
|
|
548
|
-
return void 0;
|
|
549
|
-
}
|
|
550
|
-
const lastMessage = originalMessages[originalMessages.length - 1];
|
|
551
|
-
return lastMessage?.role === "assistant" ? lastMessage.id : typeof responseMessageId === "function" ? responseMessageId() : responseMessageId;
|
|
552
|
-
}
|
|
553
330
|
async function safeValidateTypes({
|
|
554
331
|
value,
|
|
555
332
|
schema
|
|
@@ -582,6 +359,49 @@ async function safeValidateTypes({
|
|
|
582
359
|
};
|
|
583
360
|
}
|
|
584
361
|
}
|
|
362
|
+
|
|
363
|
+
// src/stream/aisdk/v5/compat/delayed-promise.ts
|
|
364
|
+
var DelayedPromise = class {
|
|
365
|
+
status = {
|
|
366
|
+
type: "pending"
|
|
367
|
+
};
|
|
368
|
+
_promise;
|
|
369
|
+
_resolve = void 0;
|
|
370
|
+
_reject = void 0;
|
|
371
|
+
get promise() {
|
|
372
|
+
if (this._promise) {
|
|
373
|
+
return this._promise;
|
|
374
|
+
}
|
|
375
|
+
this._promise = new Promise((resolve, reject) => {
|
|
376
|
+
if (this.status.type === "resolved") {
|
|
377
|
+
resolve(this.status.value);
|
|
378
|
+
} else if (this.status.type === "rejected") {
|
|
379
|
+
reject(this.status.error);
|
|
380
|
+
}
|
|
381
|
+
this._resolve = resolve;
|
|
382
|
+
this._reject = reject;
|
|
383
|
+
});
|
|
384
|
+
return this._promise;
|
|
385
|
+
}
|
|
386
|
+
resolve(value) {
|
|
387
|
+
this.status = {
|
|
388
|
+
type: "resolved",
|
|
389
|
+
value
|
|
390
|
+
};
|
|
391
|
+
if (this._promise) {
|
|
392
|
+
this._resolve?.(value);
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
reject(error) {
|
|
396
|
+
this.status = {
|
|
397
|
+
type: "rejected",
|
|
398
|
+
error
|
|
399
|
+
};
|
|
400
|
+
if (this._promise) {
|
|
401
|
+
this._reject?.(error);
|
|
402
|
+
}
|
|
403
|
+
}
|
|
404
|
+
};
|
|
585
405
|
function prepareToolsAndToolChoice({
|
|
586
406
|
tools,
|
|
587
407
|
toolChoice,
|
|
@@ -649,45 +469,234 @@ function prepareToolsAndToolChoice({
|
|
|
649
469
|
}
|
|
650
470
|
};
|
|
651
471
|
}
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
472
|
+
|
|
473
|
+
// src/agent/trip-wire.ts
|
|
474
|
+
var TripWire = class extends Error {
|
|
475
|
+
constructor(reason) {
|
|
476
|
+
super(reason);
|
|
477
|
+
Object.setPrototypeOf(this, new.target.prototype);
|
|
478
|
+
}
|
|
479
|
+
};
|
|
480
|
+
|
|
481
|
+
// src/processors/runner.ts
|
|
482
|
+
var ProcessorState = class {
|
|
483
|
+
constructor(processorName) {
|
|
484
|
+
this.processorName = processorName;
|
|
485
|
+
}
|
|
486
|
+
accumulatedText = "";
|
|
487
|
+
customState = {};
|
|
488
|
+
streamParts = [];
|
|
489
|
+
// Internal methods for the runner
|
|
490
|
+
addPart(part) {
|
|
491
|
+
if (part.type === "text-delta") {
|
|
492
|
+
this.accumulatedText += part.payload.text;
|
|
662
493
|
}
|
|
663
|
-
this.
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
494
|
+
this.streamParts.push(part);
|
|
495
|
+
}
|
|
496
|
+
};
|
|
497
|
+
var ProcessorRunner = class {
|
|
498
|
+
inputProcessors;
|
|
499
|
+
outputProcessors;
|
|
500
|
+
logger;
|
|
501
|
+
agentName;
|
|
502
|
+
constructor({
|
|
503
|
+
inputProcessors,
|
|
504
|
+
outputProcessors,
|
|
505
|
+
logger,
|
|
506
|
+
agentName
|
|
507
|
+
}) {
|
|
508
|
+
this.inputProcessors = inputProcessors ?? [];
|
|
509
|
+
this.outputProcessors = outputProcessors ?? [];
|
|
510
|
+
this.logger = logger;
|
|
511
|
+
this.agentName = agentName;
|
|
512
|
+
}
|
|
513
|
+
async runOutputProcessors(messageList, tracingContext, telemetry) {
|
|
514
|
+
const responseMessages = messageList.clear.response.v2();
|
|
515
|
+
let processableMessages = [...responseMessages];
|
|
516
|
+
const ctx = {
|
|
517
|
+
abort: () => {
|
|
518
|
+
throw new TripWire("Tripwire triggered");
|
|
519
|
+
}
|
|
520
|
+
};
|
|
521
|
+
for (const [index, processor] of this.outputProcessors.entries()) {
|
|
522
|
+
const abort = reason => {
|
|
523
|
+
throw new TripWire(reason || `Tripwire triggered by ${processor.name}`);
|
|
524
|
+
};
|
|
525
|
+
ctx.abort = abort;
|
|
526
|
+
const processMethod = processor.processOutputResult?.bind(processor);
|
|
527
|
+
if (!processMethod) {
|
|
528
|
+
continue;
|
|
529
|
+
}
|
|
530
|
+
if (!telemetry) {
|
|
531
|
+
processableMessages = await processMethod({
|
|
532
|
+
messages: processableMessages,
|
|
533
|
+
abort: ctx.abort,
|
|
534
|
+
tracingContext
|
|
535
|
+
});
|
|
536
|
+
} else {
|
|
537
|
+
await telemetry.traceMethod(async () => {
|
|
538
|
+
processableMessages = await processMethod({
|
|
539
|
+
messages: processableMessages,
|
|
540
|
+
abort: ctx.abort,
|
|
541
|
+
tracingContext
|
|
542
|
+
});
|
|
543
|
+
return processableMessages;
|
|
544
|
+
}, {
|
|
545
|
+
spanName: `agent.outputProcessor.${processor.name}`,
|
|
546
|
+
attributes: {
|
|
547
|
+
"processor.name": processor.name,
|
|
548
|
+
"processor.index": index.toString(),
|
|
549
|
+
"processor.total": this.outputProcessors.length.toString()
|
|
550
|
+
}
|
|
551
|
+
})();
|
|
552
|
+
}
|
|
553
|
+
}
|
|
554
|
+
if (processableMessages.length > 0) {
|
|
555
|
+
messageList.add(processableMessages, "response");
|
|
556
|
+
}
|
|
557
|
+
return messageList;
|
|
558
|
+
}
|
|
559
|
+
/**
|
|
560
|
+
* Process a stream part through all output processors with state management
|
|
561
|
+
*/
|
|
562
|
+
async processPart(part, processorStates, tracingContext) {
|
|
563
|
+
if (!this.outputProcessors.length) {
|
|
564
|
+
return {
|
|
565
|
+
part,
|
|
566
|
+
blocked: false
|
|
567
|
+
};
|
|
568
|
+
}
|
|
569
|
+
try {
|
|
570
|
+
let processedPart = part;
|
|
571
|
+
for (const processor of this.outputProcessors) {
|
|
572
|
+
try {
|
|
573
|
+
if (processor.processOutputStream && processedPart) {
|
|
574
|
+
let state = processorStates.get(processor.name);
|
|
575
|
+
if (!state) {
|
|
576
|
+
state = new ProcessorState(processor.name);
|
|
577
|
+
processorStates.set(processor.name, state);
|
|
578
|
+
}
|
|
579
|
+
state.addPart(processedPart);
|
|
580
|
+
const result = await processor.processOutputStream({
|
|
581
|
+
part: processedPart,
|
|
582
|
+
streamParts: state.streamParts,
|
|
583
|
+
state: state.customState,
|
|
584
|
+
abort: reason => {
|
|
585
|
+
throw new TripWire(reason || `Stream part blocked by ${processor.name}`);
|
|
586
|
+
},
|
|
587
|
+
tracingContext
|
|
588
|
+
});
|
|
589
|
+
processedPart = result;
|
|
590
|
+
}
|
|
591
|
+
} catch (error) {
|
|
592
|
+
if (error instanceof TripWire) {
|
|
593
|
+
return {
|
|
594
|
+
part: null,
|
|
595
|
+
blocked: true,
|
|
596
|
+
reason: error.message
|
|
597
|
+
};
|
|
598
|
+
}
|
|
599
|
+
this.logger.error(`[Agent:${this.agentName}] - Output processor ${processor.name} failed:`, error);
|
|
600
|
+
}
|
|
601
|
+
}
|
|
602
|
+
return {
|
|
603
|
+
part: processedPart,
|
|
604
|
+
blocked: false
|
|
605
|
+
};
|
|
606
|
+
} catch (error) {
|
|
607
|
+
this.logger.error(`[Agent:${this.agentName}] - Stream part processing failed:`, error);
|
|
608
|
+
return {
|
|
609
|
+
part,
|
|
610
|
+
blocked: false
|
|
611
|
+
};
|
|
612
|
+
}
|
|
613
|
+
}
|
|
614
|
+
async runOutputProcessorsForStream(streamResult, tracingContext) {
|
|
615
|
+
return new ReadableStream({
|
|
616
|
+
start: async controller => {
|
|
617
|
+
const reader = streamResult.fullStream.getReader();
|
|
618
|
+
const processorStates = /* @__PURE__ */new Map();
|
|
619
|
+
try {
|
|
620
|
+
while (true) {
|
|
621
|
+
const {
|
|
622
|
+
done,
|
|
623
|
+
value
|
|
624
|
+
} = await reader.read();
|
|
625
|
+
if (done) {
|
|
626
|
+
controller.close();
|
|
627
|
+
break;
|
|
628
|
+
}
|
|
629
|
+
const {
|
|
630
|
+
part: processedPart,
|
|
631
|
+
blocked,
|
|
632
|
+
reason
|
|
633
|
+
} = await this.processPart(value, processorStates, tracingContext);
|
|
634
|
+
if (blocked) {
|
|
635
|
+
void this.logger.debug(`[Agent:${this.agentName}] - Stream part blocked by output processor`, {
|
|
636
|
+
reason,
|
|
637
|
+
originalPart: value
|
|
638
|
+
});
|
|
639
|
+
controller.enqueue({
|
|
640
|
+
type: "tripwire",
|
|
641
|
+
tripwireReason: reason || "Output processor blocked content"
|
|
642
|
+
});
|
|
643
|
+
controller.close();
|
|
644
|
+
break;
|
|
645
|
+
} else if (processedPart !== null) {
|
|
646
|
+
controller.enqueue(processedPart);
|
|
647
|
+
}
|
|
648
|
+
}
|
|
649
|
+
} catch (error) {
|
|
650
|
+
controller.error(error);
|
|
651
|
+
}
|
|
668
652
|
}
|
|
669
|
-
this._resolve = resolve;
|
|
670
|
-
this._reject = reject;
|
|
671
653
|
});
|
|
672
|
-
return this._promise;
|
|
673
654
|
}
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
655
|
+
async runInputProcessors(messageList, tracingContext, telemetry) {
|
|
656
|
+
const userMessages = messageList.clear.input.v2();
|
|
657
|
+
let processableMessages = [...userMessages];
|
|
658
|
+
const ctx = {
|
|
659
|
+
abort: () => {
|
|
660
|
+
throw new TripWire("Tripwire triggered");
|
|
661
|
+
}
|
|
678
662
|
};
|
|
679
|
-
|
|
680
|
-
|
|
663
|
+
for (const [index, processor] of this.inputProcessors.entries()) {
|
|
664
|
+
const abort = reason => {
|
|
665
|
+
throw new TripWire(reason || `Tripwire triggered by ${processor.name}`);
|
|
666
|
+
};
|
|
667
|
+
ctx.abort = abort;
|
|
668
|
+
const processMethod = processor.processInput?.bind(processor);
|
|
669
|
+
if (!processMethod) {
|
|
670
|
+
continue;
|
|
671
|
+
}
|
|
672
|
+
if (!telemetry) {
|
|
673
|
+
processableMessages = await processMethod({
|
|
674
|
+
messages: processableMessages,
|
|
675
|
+
abort: ctx.abort,
|
|
676
|
+
tracingContext
|
|
677
|
+
});
|
|
678
|
+
} else {
|
|
679
|
+
await telemetry.traceMethod(async () => {
|
|
680
|
+
processableMessages = await processMethod({
|
|
681
|
+
messages: processableMessages,
|
|
682
|
+
abort: ctx.abort,
|
|
683
|
+
tracingContext
|
|
684
|
+
});
|
|
685
|
+
return processableMessages;
|
|
686
|
+
}, {
|
|
687
|
+
spanName: `agent.inputProcessor.${processor.name}`,
|
|
688
|
+
attributes: {
|
|
689
|
+
"processor.name": processor.name,
|
|
690
|
+
"processor.index": index.toString(),
|
|
691
|
+
"processor.total": this.inputProcessors.length.toString()
|
|
692
|
+
}
|
|
693
|
+
})();
|
|
694
|
+
}
|
|
681
695
|
}
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
this.status = {
|
|
685
|
-
type: "rejected",
|
|
686
|
-
error
|
|
687
|
-
};
|
|
688
|
-
if (this._promise) {
|
|
689
|
-
this._reject?.(error);
|
|
696
|
+
if (processableMessages.length > 0) {
|
|
697
|
+
messageList.add(processableMessages, "user");
|
|
690
698
|
}
|
|
699
|
+
return messageList;
|
|
691
700
|
}
|
|
692
701
|
};
|
|
693
702
|
function getTransformedSchema(schema) {
|
|
@@ -3285,7 +3294,9 @@ function createLLMExecutionStep({
|
|
|
3285
3294
|
toolCallStreaming,
|
|
3286
3295
|
controller,
|
|
3287
3296
|
output,
|
|
3288
|
-
headers
|
|
3297
|
+
headers,
|
|
3298
|
+
downloadRetries,
|
|
3299
|
+
downloadConcurrency
|
|
3289
3300
|
}) {
|
|
3290
3301
|
return createStep({
|
|
3291
3302
|
id: "llm-execution",
|
|
@@ -3306,11 +3317,16 @@ function createLLMExecutionStep({
|
|
|
3306
3317
|
switch (model.specificationVersion) {
|
|
3307
3318
|
case "v2":
|
|
3308
3319
|
{
|
|
3320
|
+
const inputMessages = await messageList.get.all.aiV5.llmPrompt({
|
|
3321
|
+
downloadRetries,
|
|
3322
|
+
downloadConcurrency,
|
|
3323
|
+
supportedUrls: model?.supportedUrls
|
|
3324
|
+
});
|
|
3309
3325
|
modelResult = execute({
|
|
3310
3326
|
runId,
|
|
3311
3327
|
model,
|
|
3312
3328
|
providerOptions,
|
|
3313
|
-
inputMessages
|
|
3329
|
+
inputMessages,
|
|
3314
3330
|
tools,
|
|
3315
3331
|
toolChoice,
|
|
3316
3332
|
options,
|
|
@@ -3727,12 +3743,23 @@ function workflowLoopStream({
|
|
|
3727
3743
|
modelSettings,
|
|
3728
3744
|
_internal,
|
|
3729
3745
|
modelStreamSpan,
|
|
3746
|
+
llmAISpan,
|
|
3730
3747
|
...rest
|
|
3731
3748
|
}) {
|
|
3732
3749
|
return new ReadableStream$1({
|
|
3733
3750
|
start: async controller => {
|
|
3734
3751
|
const writer = new WritableStream({
|
|
3735
3752
|
write: chunk => {
|
|
3753
|
+
if (llmAISpan && chunk.type === "text-delta") {
|
|
3754
|
+
llmAISpan.createEventSpan({
|
|
3755
|
+
type: "llm_chunk" /* LLM_CHUNK */,
|
|
3756
|
+
name: `llm chunk: ${chunk.type}`,
|
|
3757
|
+
output: chunk.payload.text,
|
|
3758
|
+
attributes: {
|
|
3759
|
+
chunkType: chunk.type
|
|
3760
|
+
}
|
|
3761
|
+
});
|
|
3762
|
+
}
|
|
3736
3763
|
controller.enqueue(chunk);
|
|
3737
3764
|
}
|
|
3738
3765
|
});
|
|
@@ -3833,6 +3860,9 @@ function workflowLoopStream({
|
|
|
3833
3860
|
user: rest.messageList.get.input.aiV5.model(),
|
|
3834
3861
|
nonUser: []
|
|
3835
3862
|
}
|
|
3863
|
+
},
|
|
3864
|
+
tracingContext: {
|
|
3865
|
+
currentSpan: llmAISpan
|
|
3836
3866
|
}
|
|
3837
3867
|
});
|
|
3838
3868
|
if (executionResult.status !== "success") {
|
|
@@ -3875,6 +3905,7 @@ function loop({
|
|
|
3875
3905
|
_internal,
|
|
3876
3906
|
mode = "stream",
|
|
3877
3907
|
outputProcessors,
|
|
3908
|
+
llmAISpan,
|
|
3878
3909
|
...rest
|
|
3879
3910
|
}) {
|
|
3880
3911
|
let loggerToUse = logger || new ConsoleLogger({
|
|
@@ -3932,6 +3963,7 @@ function loop({
|
|
|
3932
3963
|
telemetry_settings,
|
|
3933
3964
|
modelSettings,
|
|
3934
3965
|
outputProcessors,
|
|
3966
|
+
llmAISpan,
|
|
3935
3967
|
...rest
|
|
3936
3968
|
};
|
|
3937
3969
|
const streamFn = workflowLoopStream(workflowLoopProps);
|
|
@@ -4043,7 +4075,9 @@ var MastraLLMVNext = class extends MastraBase {
|
|
|
4043
4075
|
resourceId,
|
|
4044
4076
|
output,
|
|
4045
4077
|
options,
|
|
4046
|
-
outputProcessors
|
|
4078
|
+
outputProcessors,
|
|
4079
|
+
providerOptions,
|
|
4080
|
+
tracingContext
|
|
4047
4081
|
// ...rest
|
|
4048
4082
|
}) {
|
|
4049
4083
|
let stopWhenToUse;
|
|
@@ -4063,6 +4097,20 @@ var MastraLLMVNext = class extends MastraBase {
|
|
|
4063
4097
|
if (output) {
|
|
4064
4098
|
output = this._applySchemaCompat(output);
|
|
4065
4099
|
}
|
|
4100
|
+
const llmAISpan = tracingContext?.currentSpan?.createChildSpan({
|
|
4101
|
+
name: `llm stream: '${model.modelId}'`,
|
|
4102
|
+
type: "llm_generation" /* LLM_GENERATION */,
|
|
4103
|
+
input: messages,
|
|
4104
|
+
attributes: {
|
|
4105
|
+
model: model.modelId,
|
|
4106
|
+
provider: model.provider,
|
|
4107
|
+
streaming: true
|
|
4108
|
+
},
|
|
4109
|
+
metadata: {
|
|
4110
|
+
threadId,
|
|
4111
|
+
resourceId
|
|
4112
|
+
}
|
|
4113
|
+
});
|
|
4066
4114
|
try {
|
|
4067
4115
|
const messageList = new MessageList({
|
|
4068
4116
|
threadId,
|
|
@@ -4076,12 +4124,14 @@ var MastraLLMVNext = class extends MastraBase {
|
|
|
4076
4124
|
stopWhen: stopWhenToUse,
|
|
4077
4125
|
toolChoice,
|
|
4078
4126
|
modelSettings,
|
|
4127
|
+
providerOptions,
|
|
4079
4128
|
telemetry_settings: {
|
|
4080
4129
|
...this.experimental_telemetry,
|
|
4081
4130
|
...telemetry_settings
|
|
4082
4131
|
},
|
|
4083
4132
|
output,
|
|
4084
4133
|
outputProcessors,
|
|
4134
|
+
llmAISpan,
|
|
4085
4135
|
options: {
|
|
4086
4136
|
...options,
|
|
4087
4137
|
onStepFinish: async props => {
|
|
@@ -4164,7 +4214,11 @@ var MastraLLMVNext = class extends MastraBase {
|
|
|
4164
4214
|
}
|
|
4165
4215
|
}
|
|
4166
4216
|
};
|
|
4167
|
-
|
|
4217
|
+
const result = loop(loopOptions);
|
|
4218
|
+
llmAISpan?.end({
|
|
4219
|
+
output: result
|
|
4220
|
+
});
|
|
4221
|
+
return result;
|
|
4168
4222
|
} catch (e) {
|
|
4169
4223
|
const mastraError = new MastraError({
|
|
4170
4224
|
id: "LLM_STREAM_TEXT_AI_SDK_EXECUTION_FAILED",
|
|
@@ -4178,6 +4232,9 @@ var MastraLLMVNext = class extends MastraBase {
|
|
|
4178
4232
|
resourceId: resourceId ?? "unknown"
|
|
4179
4233
|
}
|
|
4180
4234
|
}, e);
|
|
4235
|
+
llmAISpan?.error({
|
|
4236
|
+
error: mastraError
|
|
4237
|
+
});
|
|
4181
4238
|
throw mastraError;
|
|
4182
4239
|
}
|
|
4183
4240
|
}
|
|
@@ -4341,6 +4398,7 @@ function runScorer({
|
|
|
4341
4398
|
input,
|
|
4342
4399
|
output,
|
|
4343
4400
|
runtimeContext,
|
|
4401
|
+
tracingContext,
|
|
4344
4402
|
entity,
|
|
4345
4403
|
structuredOutput,
|
|
4346
4404
|
source,
|
|
@@ -4373,6 +4431,7 @@ function runScorer({
|
|
|
4373
4431
|
input,
|
|
4374
4432
|
output,
|
|
4375
4433
|
runtimeContext: Object.fromEntries(runtimeContext.entries()),
|
|
4434
|
+
tracingContext,
|
|
4376
4435
|
runId,
|
|
4377
4436
|
source,
|
|
4378
4437
|
entity,
|
|
@@ -7586,7 +7645,8 @@ var ModerationProcessor = class _ModerationProcessor {
|
|
|
7586
7645
|
try {
|
|
7587
7646
|
const {
|
|
7588
7647
|
messages,
|
|
7589
|
-
abort
|
|
7648
|
+
abort,
|
|
7649
|
+
tracingContext
|
|
7590
7650
|
} = args;
|
|
7591
7651
|
if (messages.length === 0) {
|
|
7592
7652
|
return messages;
|
|
@@ -7598,7 +7658,7 @@ var ModerationProcessor = class _ModerationProcessor {
|
|
|
7598
7658
|
passedMessages.push(message);
|
|
7599
7659
|
continue;
|
|
7600
7660
|
}
|
|
7601
|
-
const moderationResult = await this.moderateContent(textContent);
|
|
7661
|
+
const moderationResult = await this.moderateContent(textContent, false, tracingContext);
|
|
7602
7662
|
if (this.isModerationFlagged(moderationResult)) {
|
|
7603
7663
|
this.handleFlaggedContent(moderationResult, this.strategy, abort);
|
|
7604
7664
|
if (this.strategy === "filter") {
|
|
@@ -7623,13 +7683,14 @@ var ModerationProcessor = class _ModerationProcessor {
|
|
|
7623
7683
|
const {
|
|
7624
7684
|
part,
|
|
7625
7685
|
streamParts,
|
|
7626
|
-
abort
|
|
7686
|
+
abort,
|
|
7687
|
+
tracingContext
|
|
7627
7688
|
} = args;
|
|
7628
7689
|
if (part.type !== "text-delta") {
|
|
7629
7690
|
return part;
|
|
7630
7691
|
}
|
|
7631
7692
|
const contentToModerate = this.buildContextFromChunks(streamParts);
|
|
7632
|
-
const moderationResult = await this.moderateContent(contentToModerate, true);
|
|
7693
|
+
const moderationResult = await this.moderateContent(contentToModerate, true, tracingContext);
|
|
7633
7694
|
if (this.isModerationFlagged(moderationResult)) {
|
|
7634
7695
|
this.handleFlaggedContent(moderationResult, this.strategy, abort);
|
|
7635
7696
|
if (this.strategy === "filter") {
|
|
@@ -7648,7 +7709,7 @@ var ModerationProcessor = class _ModerationProcessor {
|
|
|
7648
7709
|
/**
|
|
7649
7710
|
* Moderate content using the internal agent
|
|
7650
7711
|
*/
|
|
7651
|
-
async moderateContent(content, isStream = false) {
|
|
7712
|
+
async moderateContent(content, isStream = false, tracingContext) {
|
|
7652
7713
|
const prompt = this.createModerationPrompt(content, isStream);
|
|
7653
7714
|
try {
|
|
7654
7715
|
const model = await this.moderationAgent.getModel();
|
|
@@ -7665,12 +7726,14 @@ var ModerationProcessor = class _ModerationProcessor {
|
|
|
7665
7726
|
output: schema,
|
|
7666
7727
|
modelSettings: {
|
|
7667
7728
|
temperature: 0
|
|
7668
|
-
}
|
|
7729
|
+
},
|
|
7730
|
+
tracingContext
|
|
7669
7731
|
});
|
|
7670
7732
|
} else {
|
|
7671
7733
|
response = await this.moderationAgent.generate(prompt, {
|
|
7672
7734
|
output: schema,
|
|
7673
|
-
temperature: 0
|
|
7735
|
+
temperature: 0,
|
|
7736
|
+
tracingContext
|
|
7674
7737
|
});
|
|
7675
7738
|
}
|
|
7676
7739
|
const result = response.object;
|
|
@@ -7823,7 +7886,8 @@ var PromptInjectionDetector = class _PromptInjectionDetector {
|
|
|
7823
7886
|
try {
|
|
7824
7887
|
const {
|
|
7825
7888
|
messages,
|
|
7826
|
-
abort
|
|
7889
|
+
abort,
|
|
7890
|
+
tracingContext
|
|
7827
7891
|
} = args;
|
|
7828
7892
|
if (messages.length === 0) {
|
|
7829
7893
|
return messages;
|
|
@@ -7835,7 +7899,7 @@ var PromptInjectionDetector = class _PromptInjectionDetector {
|
|
|
7835
7899
|
processedMessages.push(message);
|
|
7836
7900
|
continue;
|
|
7837
7901
|
}
|
|
7838
|
-
const detectionResult = await this.detectPromptInjection(textContent);
|
|
7902
|
+
const detectionResult = await this.detectPromptInjection(textContent, tracingContext);
|
|
7839
7903
|
if (this.isInjectionFlagged(detectionResult)) {
|
|
7840
7904
|
const processedMessage = this.handleDetectedInjection(message, detectionResult, this.strategy, abort);
|
|
7841
7905
|
if (this.strategy === "filter") {
|
|
@@ -7860,7 +7924,7 @@ var PromptInjectionDetector = class _PromptInjectionDetector {
|
|
|
7860
7924
|
/**
|
|
7861
7925
|
* Detect prompt injection using the internal agent
|
|
7862
7926
|
*/
|
|
7863
|
-
async detectPromptInjection(content) {
|
|
7927
|
+
async detectPromptInjection(content, tracingContext) {
|
|
7864
7928
|
const prompt = this.createDetectionPrompt(content);
|
|
7865
7929
|
try {
|
|
7866
7930
|
const model = await this.detectionAgent.getModel();
|
|
@@ -7878,12 +7942,14 @@ var PromptInjectionDetector = class _PromptInjectionDetector {
|
|
|
7878
7942
|
output: schema,
|
|
7879
7943
|
modelSettings: {
|
|
7880
7944
|
temperature: 0
|
|
7881
|
-
}
|
|
7945
|
+
},
|
|
7946
|
+
tracingContext
|
|
7882
7947
|
});
|
|
7883
7948
|
} else {
|
|
7884
7949
|
response = await this.detectionAgent.generate(prompt, {
|
|
7885
7950
|
output: schema,
|
|
7886
|
-
temperature: 0
|
|
7951
|
+
temperature: 0,
|
|
7952
|
+
tracingContext
|
|
7887
7953
|
});
|
|
7888
7954
|
}
|
|
7889
7955
|
const result = response.object;
|
|
@@ -8054,7 +8120,8 @@ var PIIDetector = class _PIIDetector {
|
|
|
8054
8120
|
try {
|
|
8055
8121
|
const {
|
|
8056
8122
|
messages,
|
|
8057
|
-
abort
|
|
8123
|
+
abort,
|
|
8124
|
+
tracingContext
|
|
8058
8125
|
} = args;
|
|
8059
8126
|
if (messages.length === 0) {
|
|
8060
8127
|
return messages;
|
|
@@ -8066,7 +8133,7 @@ var PIIDetector = class _PIIDetector {
|
|
|
8066
8133
|
processedMessages.push(message);
|
|
8067
8134
|
continue;
|
|
8068
8135
|
}
|
|
8069
|
-
const detectionResult = await this.detectPII(textContent);
|
|
8136
|
+
const detectionResult = await this.detectPII(textContent, tracingContext);
|
|
8070
8137
|
if (this.isPIIFlagged(detectionResult)) {
|
|
8071
8138
|
const processedMessage = this.handleDetectedPII(message, detectionResult, this.strategy, abort);
|
|
8072
8139
|
if (this.strategy === "filter") {
|
|
@@ -8093,7 +8160,7 @@ var PIIDetector = class _PIIDetector {
|
|
|
8093
8160
|
/**
|
|
8094
8161
|
* Detect PII using the internal agent
|
|
8095
8162
|
*/
|
|
8096
|
-
async detectPII(content) {
|
|
8163
|
+
async detectPII(content, tracingContext) {
|
|
8097
8164
|
const prompt = this.createDetectionPrompt(content);
|
|
8098
8165
|
const schema = z.object({
|
|
8099
8166
|
categories: z.object(this.detectionTypes.reduce((props, type) => {
|
|
@@ -8118,12 +8185,14 @@ var PIIDetector = class _PIIDetector {
|
|
|
8118
8185
|
output: schema,
|
|
8119
8186
|
modelSettings: {
|
|
8120
8187
|
temperature: 0
|
|
8121
|
-
}
|
|
8188
|
+
},
|
|
8189
|
+
tracingContext
|
|
8122
8190
|
});
|
|
8123
8191
|
} else {
|
|
8124
8192
|
response = await this.detectionAgent.generate(prompt, {
|
|
8125
8193
|
output: schema,
|
|
8126
|
-
temperature: 0
|
|
8194
|
+
temperature: 0,
|
|
8195
|
+
tracingContext
|
|
8127
8196
|
});
|
|
8128
8197
|
}
|
|
8129
8198
|
const result = response.object;
|
|
@@ -8317,7 +8386,8 @@ IMPORTANT: IF NO PII IS DETECTED, RETURN AN EMPTY OBJECT, DO NOT INCLUDE ANYTHIN
|
|
|
8317
8386
|
async processOutputStream(args) {
|
|
8318
8387
|
const {
|
|
8319
8388
|
part,
|
|
8320
|
-
abort
|
|
8389
|
+
abort,
|
|
8390
|
+
tracingContext
|
|
8321
8391
|
} = args;
|
|
8322
8392
|
try {
|
|
8323
8393
|
if (part.type !== "text-delta") {
|
|
@@ -8327,7 +8397,7 @@ IMPORTANT: IF NO PII IS DETECTED, RETURN AN EMPTY OBJECT, DO NOT INCLUDE ANYTHIN
|
|
|
8327
8397
|
if (!textContent.trim()) {
|
|
8328
8398
|
return part;
|
|
8329
8399
|
}
|
|
8330
|
-
const detectionResult = await this.detectPII(textContent);
|
|
8400
|
+
const detectionResult = await this.detectPII(textContent, tracingContext);
|
|
8331
8401
|
if (this.isPIIFlagged(detectionResult)) {
|
|
8332
8402
|
switch (this.strategy) {
|
|
8333
8403
|
case "block":
|
|
@@ -9345,6 +9415,7 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9345
9415
|
async generateTitleFromUserMessage({
|
|
9346
9416
|
message,
|
|
9347
9417
|
runtimeContext = new RuntimeContext(),
|
|
9418
|
+
tracingContext,
|
|
9348
9419
|
model,
|
|
9349
9420
|
instructions
|
|
9350
9421
|
}) {
|
|
@@ -9377,6 +9448,7 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9377
9448
|
if (llm.getModel().specificationVersion === "v2") {
|
|
9378
9449
|
const result = llm.stream({
|
|
9379
9450
|
runtimeContext,
|
|
9451
|
+
tracingContext,
|
|
9380
9452
|
messages: [{
|
|
9381
9453
|
role: "system",
|
|
9382
9454
|
content: systemInstructions
|
|
@@ -9389,6 +9461,7 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9389
9461
|
} else {
|
|
9390
9462
|
const result = await llm.__text({
|
|
9391
9463
|
runtimeContext,
|
|
9464
|
+
tracingContext,
|
|
9392
9465
|
messages: [{
|
|
9393
9466
|
role: "system",
|
|
9394
9467
|
content: systemInstructions
|
|
@@ -9406,7 +9479,7 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9406
9479
|
const userMessages = messages.filter(message => message.role === "user");
|
|
9407
9480
|
return userMessages.at(-1);
|
|
9408
9481
|
}
|
|
9409
|
-
async genTitle(userMessage, runtimeContext, model, instructions) {
|
|
9482
|
+
async genTitle(userMessage, runtimeContext, tracingContext, model, instructions) {
|
|
9410
9483
|
try {
|
|
9411
9484
|
if (userMessage) {
|
|
9412
9485
|
const normMessage = new MessageList().add(userMessage, "user").get.all.ui().at(-1);
|
|
@@ -9414,6 +9487,7 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9414
9487
|
return await this.generateTitleFromUserMessage({
|
|
9415
9488
|
message: normMessage,
|
|
9416
9489
|
runtimeContext,
|
|
9490
|
+
tracingContext,
|
|
9417
9491
|
model,
|
|
9418
9492
|
instructions
|
|
9419
9493
|
});
|
|
@@ -9505,8 +9579,8 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9505
9579
|
resourceId,
|
|
9506
9580
|
threadId,
|
|
9507
9581
|
runtimeContext,
|
|
9508
|
-
|
|
9509
|
-
|
|
9582
|
+
tracingContext,
|
|
9583
|
+
mastraProxy
|
|
9510
9584
|
}) {
|
|
9511
9585
|
let convertedMemoryTools = {};
|
|
9512
9586
|
const memory = await this.getMemory({
|
|
@@ -9529,10 +9603,10 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9529
9603
|
memory,
|
|
9530
9604
|
agentName: this.name,
|
|
9531
9605
|
runtimeContext,
|
|
9606
|
+
tracingContext,
|
|
9532
9607
|
model: typeof this.model === "function" ? await this.getModel({
|
|
9533
9608
|
runtimeContext
|
|
9534
|
-
}) : this.model
|
|
9535
|
-
agentAISpan
|
|
9609
|
+
}) : this.model
|
|
9536
9610
|
};
|
|
9537
9611
|
const convertedToCoreTool = makeCoreTool(toolObj, options);
|
|
9538
9612
|
convertedMemoryTools[toolName] = convertedToCoreTool;
|
|
@@ -9542,6 +9616,7 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9542
9616
|
}
|
|
9543
9617
|
async __runInputProcessors({
|
|
9544
9618
|
runtimeContext,
|
|
9619
|
+
tracingContext,
|
|
9545
9620
|
messageList,
|
|
9546
9621
|
inputProcessorOverrides
|
|
9547
9622
|
}) {
|
|
@@ -9552,13 +9627,13 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9552
9627
|
runtimeContext,
|
|
9553
9628
|
inputProcessorOverrides
|
|
9554
9629
|
});
|
|
9555
|
-
const tracedRunInputProcessors = messageList2 => {
|
|
9630
|
+
const tracedRunInputProcessors = (messageList2, tracingContext2) => {
|
|
9556
9631
|
const telemetry = this.#mastra?.getTelemetry();
|
|
9557
9632
|
if (!telemetry) {
|
|
9558
|
-
return runner.runInputProcessors(messageList2, void 0);
|
|
9633
|
+
return runner.runInputProcessors(messageList2, tracingContext2, void 0);
|
|
9559
9634
|
}
|
|
9560
9635
|
return telemetry.traceMethod(async data => {
|
|
9561
|
-
return runner.runInputProcessors(data.messageList, telemetry);
|
|
9636
|
+
return runner.runInputProcessors(data.messageList, tracingContext2, telemetry);
|
|
9562
9637
|
}, {
|
|
9563
9638
|
spanName: `agent.${this.name}.inputProcessors`,
|
|
9564
9639
|
attributes: {
|
|
@@ -9571,7 +9646,7 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9571
9646
|
});
|
|
9572
9647
|
};
|
|
9573
9648
|
try {
|
|
9574
|
-
messageList = await tracedRunInputProcessors(messageList);
|
|
9649
|
+
messageList = await tracedRunInputProcessors(messageList, tracingContext);
|
|
9575
9650
|
} catch (error) {
|
|
9576
9651
|
if (error instanceof TripWire) {
|
|
9577
9652
|
tripwireTriggered = true;
|
|
@@ -9594,6 +9669,7 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9594
9669
|
}
|
|
9595
9670
|
async __runOutputProcessors({
|
|
9596
9671
|
runtimeContext,
|
|
9672
|
+
tracingContext,
|
|
9597
9673
|
messageList,
|
|
9598
9674
|
outputProcessorOverrides
|
|
9599
9675
|
}) {
|
|
@@ -9604,13 +9680,13 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9604
9680
|
runtimeContext,
|
|
9605
9681
|
outputProcessorOverrides
|
|
9606
9682
|
});
|
|
9607
|
-
const tracedRunOutputProcessors = messageList2 => {
|
|
9683
|
+
const tracedRunOutputProcessors = (messageList2, tracingContext2) => {
|
|
9608
9684
|
const telemetry = this.#mastra?.getTelemetry();
|
|
9609
9685
|
if (!telemetry) {
|
|
9610
|
-
return runner.runOutputProcessors(messageList2, void 0);
|
|
9686
|
+
return runner.runOutputProcessors(messageList2, tracingContext2, void 0);
|
|
9611
9687
|
}
|
|
9612
9688
|
return telemetry.traceMethod(async data => {
|
|
9613
|
-
return runner.runOutputProcessors(data.messageList, telemetry);
|
|
9689
|
+
return runner.runOutputProcessors(data.messageList, tracingContext2, telemetry);
|
|
9614
9690
|
}, {
|
|
9615
9691
|
spanName: `agent.${this.name}.outputProcessors`,
|
|
9616
9692
|
attributes: {
|
|
@@ -9623,7 +9699,7 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9623
9699
|
});
|
|
9624
9700
|
};
|
|
9625
9701
|
try {
|
|
9626
|
-
messageList = await tracedRunOutputProcessors(messageList);
|
|
9702
|
+
messageList = await tracedRunOutputProcessors(messageList, tracingContext);
|
|
9627
9703
|
} catch (e) {
|
|
9628
9704
|
if (e instanceof TripWire) {
|
|
9629
9705
|
tripwireTriggered = true;
|
|
@@ -9662,13 +9738,13 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9662
9738
|
}).then(r => r.messagesV2);
|
|
9663
9739
|
}
|
|
9664
9740
|
async getAssignedTools({
|
|
9665
|
-
runtimeContext,
|
|
9666
9741
|
runId,
|
|
9667
9742
|
resourceId,
|
|
9668
9743
|
threadId,
|
|
9744
|
+
runtimeContext,
|
|
9745
|
+
tracingContext,
|
|
9669
9746
|
mastraProxy,
|
|
9670
|
-
writableStream
|
|
9671
|
-
agentAISpan
|
|
9747
|
+
writableStream
|
|
9672
9748
|
}) {
|
|
9673
9749
|
let toolsForRequest = {};
|
|
9674
9750
|
this.logger.debug(`[Agents:${this.name}] - Assembling assigned tools`, {
|
|
@@ -9697,11 +9773,11 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9697
9773
|
memory,
|
|
9698
9774
|
agentName: this.name,
|
|
9699
9775
|
runtimeContext,
|
|
9776
|
+
tracingContext,
|
|
9700
9777
|
model: typeof this.model === "function" ? await this.getModel({
|
|
9701
9778
|
runtimeContext
|
|
9702
9779
|
}) : this.model,
|
|
9703
|
-
writableStream
|
|
9704
|
-
agentAISpan
|
|
9780
|
+
writableStream
|
|
9705
9781
|
};
|
|
9706
9782
|
return [k, makeCoreTool(tool, options)];
|
|
9707
9783
|
}));
|
|
@@ -9717,8 +9793,8 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9717
9793
|
resourceId,
|
|
9718
9794
|
toolsets,
|
|
9719
9795
|
runtimeContext,
|
|
9720
|
-
|
|
9721
|
-
|
|
9796
|
+
tracingContext,
|
|
9797
|
+
mastraProxy
|
|
9722
9798
|
}) {
|
|
9723
9799
|
let toolsForRequest = {};
|
|
9724
9800
|
const memory = await this.getMemory({
|
|
@@ -9742,10 +9818,10 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9742
9818
|
memory,
|
|
9743
9819
|
agentName: this.name,
|
|
9744
9820
|
runtimeContext,
|
|
9821
|
+
tracingContext,
|
|
9745
9822
|
model: typeof this.model === "function" ? await this.getModel({
|
|
9746
9823
|
runtimeContext
|
|
9747
|
-
}) : this.model
|
|
9748
|
-
agentAISpan
|
|
9824
|
+
}) : this.model
|
|
9749
9825
|
};
|
|
9750
9826
|
const convertedToCoreTool = makeCoreTool(toolObj, options, "toolset");
|
|
9751
9827
|
toolsForRequest[toolName] = convertedToCoreTool;
|
|
@@ -9759,9 +9835,9 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9759
9835
|
threadId,
|
|
9760
9836
|
resourceId,
|
|
9761
9837
|
runtimeContext,
|
|
9838
|
+
tracingContext,
|
|
9762
9839
|
mastraProxy,
|
|
9763
|
-
clientTools
|
|
9764
|
-
agentAISpan
|
|
9840
|
+
clientTools
|
|
9765
9841
|
}) {
|
|
9766
9842
|
let toolsForRequest = {};
|
|
9767
9843
|
const memory = await this.getMemory({
|
|
@@ -9787,10 +9863,10 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9787
9863
|
memory,
|
|
9788
9864
|
agentName: this.name,
|
|
9789
9865
|
runtimeContext,
|
|
9866
|
+
tracingContext,
|
|
9790
9867
|
model: typeof this.model === "function" ? await this.getModel({
|
|
9791
9868
|
runtimeContext
|
|
9792
|
-
}) : this.model
|
|
9793
|
-
agentAISpan
|
|
9869
|
+
}) : this.model
|
|
9794
9870
|
};
|
|
9795
9871
|
const convertedToCoreTool = makeCoreTool(rest, options, "client-tool");
|
|
9796
9872
|
toolsForRequest[toolName] = convertedToCoreTool;
|
|
@@ -9803,7 +9879,7 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9803
9879
|
threadId,
|
|
9804
9880
|
resourceId,
|
|
9805
9881
|
runtimeContext,
|
|
9806
|
-
|
|
9882
|
+
tracingContext
|
|
9807
9883
|
}) {
|
|
9808
9884
|
let convertedWorkflowTools = {};
|
|
9809
9885
|
const workflows = await this.getWorkflows({
|
|
@@ -9820,7 +9896,7 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9820
9896
|
// manually wrap workflow tools with ai tracing, so that we can pass the
|
|
9821
9897
|
// current tool span onto the workflow to maintain continuity of the trace
|
|
9822
9898
|
execute: async args => {
|
|
9823
|
-
const toolAISpan =
|
|
9899
|
+
const toolAISpan = tracingContext.currentSpan?.createChildSpan({
|
|
9824
9900
|
type: "tool_call" /* TOOL_CALL */,
|
|
9825
9901
|
name: `tool: '${workflowName}'`,
|
|
9826
9902
|
input: args,
|
|
@@ -9842,7 +9918,9 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9842
9918
|
const result = await run.start({
|
|
9843
9919
|
inputData: args,
|
|
9844
9920
|
runtimeContext,
|
|
9845
|
-
|
|
9921
|
+
tracingContext: {
|
|
9922
|
+
currentSpan: toolAISpan
|
|
9923
|
+
}
|
|
9846
9924
|
});
|
|
9847
9925
|
toolAISpan?.end({
|
|
9848
9926
|
output: result
|
|
@@ -9882,8 +9960,8 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9882
9960
|
resourceId,
|
|
9883
9961
|
runId,
|
|
9884
9962
|
runtimeContext,
|
|
9885
|
-
|
|
9886
|
-
|
|
9963
|
+
tracingContext,
|
|
9964
|
+
writableStream
|
|
9887
9965
|
}) {
|
|
9888
9966
|
let mastraProxy = void 0;
|
|
9889
9967
|
const logger = this.logger;
|
|
@@ -9898,42 +9976,42 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
9898
9976
|
resourceId,
|
|
9899
9977
|
threadId,
|
|
9900
9978
|
runtimeContext,
|
|
9979
|
+
tracingContext,
|
|
9901
9980
|
mastraProxy,
|
|
9902
|
-
writableStream
|
|
9903
|
-
agentAISpan
|
|
9981
|
+
writableStream
|
|
9904
9982
|
});
|
|
9905
9983
|
const memoryTools = await this.getMemoryTools({
|
|
9906
9984
|
runId,
|
|
9907
9985
|
resourceId,
|
|
9908
9986
|
threadId,
|
|
9909
9987
|
runtimeContext,
|
|
9910
|
-
|
|
9911
|
-
|
|
9988
|
+
tracingContext,
|
|
9989
|
+
mastraProxy
|
|
9912
9990
|
});
|
|
9913
9991
|
const toolsetTools = await this.getToolsets({
|
|
9914
9992
|
runId,
|
|
9915
9993
|
resourceId,
|
|
9916
9994
|
threadId,
|
|
9917
9995
|
runtimeContext,
|
|
9996
|
+
tracingContext,
|
|
9918
9997
|
mastraProxy,
|
|
9919
|
-
toolsets
|
|
9920
|
-
agentAISpan
|
|
9998
|
+
toolsets
|
|
9921
9999
|
});
|
|
9922
10000
|
const clientSideTools = await this.getClientTools({
|
|
9923
10001
|
runId,
|
|
9924
10002
|
resourceId,
|
|
9925
10003
|
threadId,
|
|
9926
10004
|
runtimeContext,
|
|
10005
|
+
tracingContext,
|
|
9927
10006
|
mastraProxy,
|
|
9928
|
-
clientTools
|
|
9929
|
-
agentAISpan
|
|
10007
|
+
clientTools
|
|
9930
10008
|
});
|
|
9931
10009
|
const workflowTools = await this.getWorkflowTools({
|
|
9932
10010
|
runId,
|
|
9933
10011
|
resourceId,
|
|
9934
10012
|
threadId,
|
|
9935
10013
|
runtimeContext,
|
|
9936
|
-
|
|
10014
|
+
tracingContext
|
|
9937
10015
|
});
|
|
9938
10016
|
return this.formatTools({
|
|
9939
10017
|
...assignedTools,
|
|
@@ -10018,7 +10096,7 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
10018
10096
|
runtimeContext,
|
|
10019
10097
|
saveQueueManager,
|
|
10020
10098
|
writableStream,
|
|
10021
|
-
|
|
10099
|
+
tracingContext
|
|
10022
10100
|
}) {
|
|
10023
10101
|
return {
|
|
10024
10102
|
before: async () => {
|
|
@@ -10027,8 +10105,10 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
10027
10105
|
runId
|
|
10028
10106
|
});
|
|
10029
10107
|
}
|
|
10030
|
-
const
|
|
10108
|
+
const agentAISpan = getOrCreateSpan({
|
|
10109
|
+
type: "agent_run" /* AGENT_RUN */,
|
|
10031
10110
|
name: `agent run: '${this.id}'`,
|
|
10111
|
+
input: messages,
|
|
10032
10112
|
attributes: {
|
|
10033
10113
|
agentId: this.id,
|
|
10034
10114
|
instructions,
|
|
@@ -10038,28 +10118,13 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
10038
10118
|
runId,
|
|
10039
10119
|
resourceId,
|
|
10040
10120
|
threadId: thread ? thread.id : void 0
|
|
10041
|
-
}
|
|
10121
|
+
},
|
|
10122
|
+
tracingContext,
|
|
10123
|
+
runtimeContext
|
|
10124
|
+
});
|
|
10125
|
+
const innerTracingContext = {
|
|
10126
|
+
currentSpan: agentAISpan
|
|
10042
10127
|
};
|
|
10043
|
-
let agentAISpan;
|
|
10044
|
-
if (currentSpan) {
|
|
10045
|
-
agentAISpan = currentSpan.createChildSpan({
|
|
10046
|
-
type: "agent_run" /* AGENT_RUN */,
|
|
10047
|
-
...spanArgs
|
|
10048
|
-
});
|
|
10049
|
-
} else {
|
|
10050
|
-
const aiTracing = getSelectedAITracing({
|
|
10051
|
-
runtimeContext
|
|
10052
|
-
});
|
|
10053
|
-
if (aiTracing) {
|
|
10054
|
-
agentAISpan = aiTracing.startSpan({
|
|
10055
|
-
type: "agent_run" /* AGENT_RUN */,
|
|
10056
|
-
...spanArgs,
|
|
10057
|
-
startOptions: {
|
|
10058
|
-
runtimeContext
|
|
10059
|
-
}
|
|
10060
|
-
});
|
|
10061
|
-
}
|
|
10062
|
-
}
|
|
10063
10128
|
const memory = await this.getMemory({
|
|
10064
10129
|
runtimeContext
|
|
10065
10130
|
});
|
|
@@ -10083,8 +10148,8 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
10083
10148
|
resourceId,
|
|
10084
10149
|
runId,
|
|
10085
10150
|
runtimeContext,
|
|
10086
|
-
|
|
10087
|
-
|
|
10151
|
+
tracingContext: innerTracingContext,
|
|
10152
|
+
writableStream
|
|
10088
10153
|
});
|
|
10089
10154
|
const messageList = new MessageList({
|
|
10090
10155
|
threadId,
|
|
@@ -10103,6 +10168,7 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
10103
10168
|
tripwireReason: tripwireReason2
|
|
10104
10169
|
} = await this.__runInputProcessors({
|
|
10105
10170
|
runtimeContext,
|
|
10171
|
+
tracingContext: innerTracingContext,
|
|
10106
10172
|
messageList
|
|
10107
10173
|
});
|
|
10108
10174
|
return {
|
|
@@ -10235,6 +10301,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10235
10301
|
tripwireReason
|
|
10236
10302
|
} = await this.__runInputProcessors({
|
|
10237
10303
|
runtimeContext,
|
|
10304
|
+
tracingContext: innerTracingContext,
|
|
10238
10305
|
messageList
|
|
10239
10306
|
});
|
|
10240
10307
|
const systemMessage = [...messageList.getSystemMessages(), ...messageList.getSystemMessages("memory")]?.map(m => m.content)?.join(`
|
|
@@ -10299,17 +10366,6 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10299
10366
|
};
|
|
10300
10367
|
})
|
|
10301
10368
|
};
|
|
10302
|
-
agentAISpan?.end({
|
|
10303
|
-
output: {
|
|
10304
|
-
text: result?.text,
|
|
10305
|
-
object: result?.object
|
|
10306
|
-
},
|
|
10307
|
-
metadata: {
|
|
10308
|
-
usage: result?.usage,
|
|
10309
|
-
toolResults: result?.toolResults,
|
|
10310
|
-
toolCalls: result?.toolCalls
|
|
10311
|
-
}
|
|
10312
|
-
});
|
|
10313
10369
|
this.logger.debug(`[Agent:${this.name}] - Post processing LLM response`, {
|
|
10314
10370
|
runId: runId2,
|
|
10315
10371
|
result: resToLog,
|
|
@@ -10371,7 +10427,9 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10371
10427
|
instructions: titleInstructions
|
|
10372
10428
|
} = this.resolveTitleGenerationConfig(config?.threads?.generateTitle);
|
|
10373
10429
|
if (shouldGenerate && userMessage) {
|
|
10374
|
-
promises.push(this.genTitle(userMessage, runtimeContext,
|
|
10430
|
+
promises.push(this.genTitle(userMessage, runtimeContext, {
|
|
10431
|
+
currentSpan: agentAISpan
|
|
10432
|
+
}, titleModel, titleInstructions).then(title => {
|
|
10375
10433
|
if (title) {
|
|
10376
10434
|
return memory.createThread({
|
|
10377
10435
|
threadId: thread2.id,
|
|
@@ -10388,6 +10446,9 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10388
10446
|
} catch (e) {
|
|
10389
10447
|
await saveQueueManager.flushMessages(messageList, threadId, memoryConfig2);
|
|
10390
10448
|
if (e instanceof MastraError) {
|
|
10449
|
+
agentAISpan?.error({
|
|
10450
|
+
error: e
|
|
10451
|
+
});
|
|
10391
10452
|
throw e;
|
|
10392
10453
|
}
|
|
10393
10454
|
const mastraError = new MastraError({
|
|
@@ -10403,6 +10464,9 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10403
10464
|
}, e);
|
|
10404
10465
|
this.logger.trackException(mastraError);
|
|
10405
10466
|
this.logger.error(mastraError.toString());
|
|
10467
|
+
agentAISpan?.error({
|
|
10468
|
+
error: mastraError
|
|
10469
|
+
});
|
|
10406
10470
|
throw mastraError;
|
|
10407
10471
|
}
|
|
10408
10472
|
} else {
|
|
@@ -10427,6 +10491,9 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10427
10491
|
outputText,
|
|
10428
10492
|
instructions,
|
|
10429
10493
|
runtimeContext,
|
|
10494
|
+
tracingContext: {
|
|
10495
|
+
currentSpan: agentAISpan
|
|
10496
|
+
},
|
|
10430
10497
|
structuredOutput,
|
|
10431
10498
|
overrideScorers,
|
|
10432
10499
|
threadId,
|
|
@@ -10441,6 +10508,17 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10441
10508
|
},
|
|
10442
10509
|
output: messageList.getPersisted.response.ui()
|
|
10443
10510
|
};
|
|
10511
|
+
agentAISpan?.end({
|
|
10512
|
+
output: {
|
|
10513
|
+
text: result?.text,
|
|
10514
|
+
object: result?.object
|
|
10515
|
+
},
|
|
10516
|
+
metadata: {
|
|
10517
|
+
usage: result?.usage,
|
|
10518
|
+
toolResults: result?.toolResults,
|
|
10519
|
+
toolCalls: result?.toolCalls
|
|
10520
|
+
}
|
|
10521
|
+
});
|
|
10444
10522
|
return {
|
|
10445
10523
|
scoringData
|
|
10446
10524
|
};
|
|
@@ -10453,6 +10531,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10453
10531
|
outputText,
|
|
10454
10532
|
instructions,
|
|
10455
10533
|
runtimeContext,
|
|
10534
|
+
tracingContext,
|
|
10456
10535
|
structuredOutput,
|
|
10457
10536
|
overrideScorers,
|
|
10458
10537
|
threadId,
|
|
@@ -10474,9 +10553,15 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10474
10553
|
});
|
|
10475
10554
|
}
|
|
10476
10555
|
}
|
|
10477
|
-
|
|
10478
|
-
|
|
10479
|
-
|
|
10556
|
+
let scorers = {};
|
|
10557
|
+
try {
|
|
10558
|
+
scorers = overrideScorers ? this.resolveOverrideScorerReferences(overrideScorers) : await this.getScorers({
|
|
10559
|
+
runtimeContext
|
|
10560
|
+
});
|
|
10561
|
+
} catch (e) {
|
|
10562
|
+
this.logger.warn(`[Agent:${this.name}] - Failed to get scorers: ${e}`);
|
|
10563
|
+
return;
|
|
10564
|
+
}
|
|
10480
10565
|
const scorerInput = {
|
|
10481
10566
|
inputMessages: messageList.getPersisted.input.ui(),
|
|
10482
10567
|
rememberedMessages: messageList.getPersisted.remembered.ui(),
|
|
@@ -10487,12 +10572,13 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10487
10572
|
if (Object.keys(scorers || {}).length > 0) {
|
|
10488
10573
|
for (const [id, scorerObject] of Object.entries(scorers)) {
|
|
10489
10574
|
runScorer({
|
|
10490
|
-
scorerId: id,
|
|
10575
|
+
scorerId: overrideScorers ? scorerObject.scorer.name : id,
|
|
10491
10576
|
scorerObject,
|
|
10492
10577
|
runId,
|
|
10493
10578
|
input: scorerInput,
|
|
10494
10579
|
output: scorerOutput,
|
|
10495
10580
|
runtimeContext,
|
|
10581
|
+
tracingContext,
|
|
10496
10582
|
entity: {
|
|
10497
10583
|
id: this.id,
|
|
10498
10584
|
name: this.name
|
|
@@ -10506,6 +10592,41 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10506
10592
|
}
|
|
10507
10593
|
}
|
|
10508
10594
|
}
|
|
10595
|
+
resolveOverrideScorerReferences(overrideScorers) {
|
|
10596
|
+
const result = {};
|
|
10597
|
+
for (const [id, scorerObject] of Object.entries(overrideScorers)) {
|
|
10598
|
+
if (typeof scorerObject.scorer === "string") {
|
|
10599
|
+
try {
|
|
10600
|
+
if (!this.#mastra) {
|
|
10601
|
+
throw new MastraError({
|
|
10602
|
+
id: "AGENT_GENEREATE_SCORER_NOT_FOUND",
|
|
10603
|
+
domain: "AGENT" /* AGENT */,
|
|
10604
|
+
category: "USER" /* USER */,
|
|
10605
|
+
text: `Mastra not found when fetching scorer. Make sure to fetch agent from mastra.getAgent()`
|
|
10606
|
+
});
|
|
10607
|
+
}
|
|
10608
|
+
const scorer = this.#mastra.getScorerByName(scorerObject.scorer);
|
|
10609
|
+
result[id] = {
|
|
10610
|
+
scorer,
|
|
10611
|
+
sampling: scorerObject.sampling
|
|
10612
|
+
};
|
|
10613
|
+
} catch (error) {
|
|
10614
|
+
this.logger.warn(`[Agent:${this.name}] - Failed to get scorer ${scorerObject.scorer}: ${error}`);
|
|
10615
|
+
}
|
|
10616
|
+
} else {
|
|
10617
|
+
result[id] = scorerObject;
|
|
10618
|
+
}
|
|
10619
|
+
}
|
|
10620
|
+
if (Object.keys(result).length === 0) {
|
|
10621
|
+
throw new MastraError({
|
|
10622
|
+
id: "AGENT_GENEREATE_SCORER_NOT_FOUND",
|
|
10623
|
+
domain: "AGENT" /* AGENT */,
|
|
10624
|
+
category: "USER" /* USER */,
|
|
10625
|
+
text: `No scorers found in overrideScorers`
|
|
10626
|
+
});
|
|
10627
|
+
}
|
|
10628
|
+
return result;
|
|
10629
|
+
}
|
|
10509
10630
|
async prepareLLMOptions(messages, options) {
|
|
10510
10631
|
const {
|
|
10511
10632
|
context,
|
|
@@ -10518,6 +10639,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10518
10639
|
temperature,
|
|
10519
10640
|
toolChoice = "auto",
|
|
10520
10641
|
runtimeContext = new RuntimeContext(),
|
|
10642
|
+
tracingContext,
|
|
10521
10643
|
savePerStep,
|
|
10522
10644
|
writableStream,
|
|
10523
10645
|
...args
|
|
@@ -10582,7 +10704,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10582
10704
|
runtimeContext,
|
|
10583
10705
|
saveQueueManager,
|
|
10584
10706
|
writableStream,
|
|
10585
|
-
|
|
10707
|
+
tracingContext
|
|
10586
10708
|
});
|
|
10587
10709
|
let messageList;
|
|
10588
10710
|
let thread;
|
|
@@ -10649,7 +10771,8 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10649
10771
|
result,
|
|
10650
10772
|
outputText,
|
|
10651
10773
|
structuredOutput = false,
|
|
10652
|
-
agentAISpan
|
|
10774
|
+
agentAISpan,
|
|
10775
|
+
overrideScorers
|
|
10653
10776
|
}) => {
|
|
10654
10777
|
const afterResult = await after({
|
|
10655
10778
|
result,
|
|
@@ -10661,7 +10784,8 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10661
10784
|
messageList,
|
|
10662
10785
|
structuredOutput,
|
|
10663
10786
|
threadExists,
|
|
10664
|
-
agentAISpan
|
|
10787
|
+
agentAISpan,
|
|
10788
|
+
overrideScorers
|
|
10665
10789
|
});
|
|
10666
10790
|
return afterResult;
|
|
10667
10791
|
}
|
|
@@ -10685,6 +10809,22 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10685
10809
|
const instructions = options.instructions || (await this.getInstructions({
|
|
10686
10810
|
runtimeContext
|
|
10687
10811
|
}));
|
|
10812
|
+
const agentAISpan = getOrCreateSpan({
|
|
10813
|
+
type: "agent_run" /* AGENT_RUN */,
|
|
10814
|
+
name: `agent run: '${this.id}'`,
|
|
10815
|
+
input: options.messages,
|
|
10816
|
+
attributes: {
|
|
10817
|
+
agentId: this.id,
|
|
10818
|
+
instructions
|
|
10819
|
+
},
|
|
10820
|
+
metadata: {
|
|
10821
|
+
runId,
|
|
10822
|
+
resourceId,
|
|
10823
|
+
threadId: threadFromArgs ? threadFromArgs.id : void 0
|
|
10824
|
+
},
|
|
10825
|
+
tracingContext: options.tracingContext,
|
|
10826
|
+
runtimeContext
|
|
10827
|
+
});
|
|
10688
10828
|
const activeSpan = Telemetry.getActiveSpan();
|
|
10689
10829
|
const baggageEntries = {};
|
|
10690
10830
|
if (threadFromArgs?.id) {
|
|
@@ -10745,7 +10885,10 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10745
10885
|
resourceId,
|
|
10746
10886
|
runId,
|
|
10747
10887
|
runtimeContext,
|
|
10748
|
-
writableStream: options.writableStream
|
|
10888
|
+
writableStream: options.writableStream,
|
|
10889
|
+
tracingContext: {
|
|
10890
|
+
currentSpan: agentAISpan
|
|
10891
|
+
}
|
|
10749
10892
|
});
|
|
10750
10893
|
return {
|
|
10751
10894
|
convertedTools
|
|
@@ -10763,7 +10906,9 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10763
10906
|
tripwire: z$1.boolean().optional(),
|
|
10764
10907
|
tripwireReason: z$1.string().optional()
|
|
10765
10908
|
}),
|
|
10766
|
-
execute: async (
|
|
10909
|
+
execute: async ({
|
|
10910
|
+
tracingContext
|
|
10911
|
+
}) => {
|
|
10767
10912
|
const thread = threadFromArgs;
|
|
10768
10913
|
const messageList = new MessageList({
|
|
10769
10914
|
threadId: thread?.id,
|
|
@@ -10782,6 +10927,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10782
10927
|
tripwireReason: tripwireReason2
|
|
10783
10928
|
} = await this.__runInputProcessors({
|
|
10784
10929
|
runtimeContext,
|
|
10930
|
+
tracingContext,
|
|
10785
10931
|
messageList
|
|
10786
10932
|
});
|
|
10787
10933
|
return {
|
|
@@ -10869,7 +11015,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10869
11015
|
The following messages were remembered from a different conversation:
|
|
10870
11016
|
<remembered_from_other_conversation>
|
|
10871
11017
|
${(() => {
|
|
10872
|
-
let
|
|
11018
|
+
let result2 = ``;
|
|
10873
11019
|
const messages = new MessageList().add(resultsFromOtherThreads, "memory").get.all.v1();
|
|
10874
11020
|
let lastYmd = null;
|
|
10875
11021
|
for (const msg of messages) {
|
|
@@ -10886,15 +11032,14 @@ ${(() => {
|
|
|
10886
11032
|
const ampm = utcHour < 12 ? "AM" : "PM";
|
|
10887
11033
|
const timeofday = `${hour12}:${utcMinute < 10 ? "0" : ""}${utcMinute} ${ampm}`;
|
|
10888
11034
|
if (!lastYmd || lastYmd !== ymd) {
|
|
10889
|
-
|
|
11035
|
+
result2 += `
|
|
10890
11036
|
the following messages are from ${ymd}
|
|
10891
11037
|
`;
|
|
10892
11038
|
}
|
|
10893
|
-
|
|
10894
|
-
Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conversation" : ""} at ${timeofday}: ${JSON.stringify(msg)}`;
|
|
11039
|
+
result2 += `Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conversation" : ""} at ${timeofday}: ${JSON.stringify(msg)}`;
|
|
10895
11040
|
lastYmd = ymd;
|
|
10896
11041
|
}
|
|
10897
|
-
return
|
|
11042
|
+
return result2;
|
|
10898
11043
|
})()}
|
|
10899
11044
|
<end_remembered_from_other_conversation>`;
|
|
10900
11045
|
}
|
|
@@ -10909,6 +11054,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10909
11054
|
tripwireReason
|
|
10910
11055
|
} = await this.__runInputProcessors({
|
|
10911
11056
|
runtimeContext,
|
|
11057
|
+
tracingContext,
|
|
10912
11058
|
messageList
|
|
10913
11059
|
});
|
|
10914
11060
|
const systemMessage = [...messageList.getSystemMessages(), ...messageList.getSystemMessages("memory")]?.map(m => m.content)?.join(`
|
|
@@ -10947,7 +11093,8 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10947
11093
|
inputSchema: z$1.any(),
|
|
10948
11094
|
outputSchema: z$1.any(),
|
|
10949
11095
|
execute: async ({
|
|
10950
|
-
inputData
|
|
11096
|
+
inputData,
|
|
11097
|
+
tracingContext
|
|
10951
11098
|
}) => {
|
|
10952
11099
|
this.logger.debug(`Starting agent ${this.name} llm stream call`, {
|
|
10953
11100
|
runId
|
|
@@ -10957,7 +11104,8 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10957
11104
|
}) : this.#outputProcessors : []);
|
|
10958
11105
|
const streamResult = llm.stream({
|
|
10959
11106
|
...inputData,
|
|
10960
|
-
outputProcessors
|
|
11107
|
+
outputProcessors,
|
|
11108
|
+
tracingContext
|
|
10961
11109
|
});
|
|
10962
11110
|
if (options.format === "aisdk") {
|
|
10963
11111
|
return streamResult.aisdk.v5;
|
|
@@ -10972,9 +11120,10 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
10972
11120
|
steps: [prepareToolsStep, prepareMemory]
|
|
10973
11121
|
}).parallel([prepareToolsStep, prepareMemory]).map(async ({
|
|
10974
11122
|
inputData,
|
|
10975
|
-
bail
|
|
11123
|
+
bail,
|
|
11124
|
+
tracingContext
|
|
10976
11125
|
}) => {
|
|
10977
|
-
const
|
|
11126
|
+
const result2 = {
|
|
10978
11127
|
...options,
|
|
10979
11128
|
messages: inputData["prepare-memory-step"].messageObjects,
|
|
10980
11129
|
tools: inputData["prepare-tools-step"].convertedTools,
|
|
@@ -11016,7 +11165,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11016
11165
|
tripwireReason: inputData["prepare-memory-step"].tripwireReason
|
|
11017
11166
|
})
|
|
11018
11167
|
};
|
|
11019
|
-
if (
|
|
11168
|
+
if (result2.tripwire) {
|
|
11020
11169
|
const emptyResult = {
|
|
11021
11170
|
textStream: async function* () {}(),
|
|
11022
11171
|
fullStream: new globalThis.ReadableStream({
|
|
@@ -11037,7 +11186,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11037
11186
|
}),
|
|
11038
11187
|
finishReason: Promise.resolve("other"),
|
|
11039
11188
|
tripwire: true,
|
|
11040
|
-
tripwireReason:
|
|
11189
|
+
tripwireReason: result2.tripwireReason,
|
|
11041
11190
|
response: {
|
|
11042
11191
|
id: randomUUID(),
|
|
11043
11192
|
timestamp: /* @__PURE__ */new Date(),
|
|
@@ -11060,23 +11209,27 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11060
11209
|
return bail(emptyResult);
|
|
11061
11210
|
}
|
|
11062
11211
|
let effectiveOutputProcessors = options.outputProcessors || (this.#outputProcessors ? typeof this.#outputProcessors === "function" ? await this.#outputProcessors({
|
|
11063
|
-
runtimeContext:
|
|
11212
|
+
runtimeContext: result2.runtimeContext
|
|
11064
11213
|
}) : this.#outputProcessors : []);
|
|
11065
11214
|
if (options.structuredOutput) {
|
|
11066
11215
|
const structuredProcessor = new StructuredOutputProcessor(options.structuredOutput);
|
|
11067
11216
|
effectiveOutputProcessors = effectiveOutputProcessors ? [...effectiveOutputProcessors, structuredProcessor] : [structuredProcessor];
|
|
11068
11217
|
}
|
|
11069
11218
|
const loopOptions = {
|
|
11070
|
-
messages:
|
|
11071
|
-
runtimeContext:
|
|
11219
|
+
messages: result2.messages,
|
|
11220
|
+
runtimeContext: result2.runtimeContext,
|
|
11221
|
+
tracingContext: {
|
|
11222
|
+
currentSpan: agentAISpan
|
|
11223
|
+
},
|
|
11072
11224
|
runId,
|
|
11073
|
-
toolChoice:
|
|
11074
|
-
tools:
|
|
11075
|
-
resourceId:
|
|
11076
|
-
threadId:
|
|
11077
|
-
structuredOutput:
|
|
11078
|
-
stopWhen:
|
|
11079
|
-
maxSteps:
|
|
11225
|
+
toolChoice: result2.toolChoice,
|
|
11226
|
+
tools: result2.tools,
|
|
11227
|
+
resourceId: result2.resourceId,
|
|
11228
|
+
threadId: result2.threadId,
|
|
11229
|
+
structuredOutput: result2.structuredOutput,
|
|
11230
|
+
stopWhen: result2.stopWhen,
|
|
11231
|
+
maxSteps: result2.maxSteps,
|
|
11232
|
+
providerOptions: result2.providerOptions,
|
|
11080
11233
|
options: {
|
|
11081
11234
|
onFinish: async payload => {
|
|
11082
11235
|
if (payload.finishReason === "error") {
|
|
@@ -11094,16 +11247,18 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11094
11247
|
result: payload,
|
|
11095
11248
|
outputText,
|
|
11096
11249
|
instructions,
|
|
11097
|
-
thread:
|
|
11098
|
-
threadId:
|
|
11250
|
+
thread: result2.thread,
|
|
11251
|
+
threadId: result2.threadId,
|
|
11099
11252
|
resourceId,
|
|
11100
11253
|
memoryConfig,
|
|
11101
11254
|
runtimeContext,
|
|
11255
|
+
tracingContext,
|
|
11102
11256
|
runId,
|
|
11103
11257
|
messageList,
|
|
11104
11258
|
threadExists: inputData["prepare-memory-step"].threadExists,
|
|
11105
11259
|
structuredOutput: !!options.output,
|
|
11106
|
-
saveQueueManager
|
|
11260
|
+
saveQueueManager,
|
|
11261
|
+
overrideScorers: options.scorers
|
|
11107
11262
|
});
|
|
11108
11263
|
} catch (e) {
|
|
11109
11264
|
this.logger.error("Error saving memory on finish", {
|
|
@@ -11112,11 +11267,12 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11112
11267
|
});
|
|
11113
11268
|
}
|
|
11114
11269
|
await options?.onFinish?.({
|
|
11115
|
-
...
|
|
11116
|
-
runId
|
|
11270
|
+
...result2,
|
|
11271
|
+
runId,
|
|
11272
|
+
messages: messageList.get.response.aiV5.model()
|
|
11117
11273
|
});
|
|
11118
11274
|
},
|
|
11119
|
-
onStepFinish:
|
|
11275
|
+
onStepFinish: result2.onStepFinish
|
|
11120
11276
|
},
|
|
11121
11277
|
output: options.output,
|
|
11122
11278
|
outputProcessors: effectiveOutputProcessors,
|
|
@@ -11128,7 +11284,15 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11128
11284
|
return loopOptions;
|
|
11129
11285
|
}).then(streamStep).commit();
|
|
11130
11286
|
const run = await executionWorkflow.createRunAsync();
|
|
11131
|
-
|
|
11287
|
+
const result = await run.start({
|
|
11288
|
+
tracingContext: {
|
|
11289
|
+
currentSpan: agentAISpan
|
|
11290
|
+
}
|
|
11291
|
+
});
|
|
11292
|
+
agentAISpan?.end({
|
|
11293
|
+
output: result
|
|
11294
|
+
});
|
|
11295
|
+
return result;
|
|
11132
11296
|
}
|
|
11133
11297
|
async #executeOnFinish({
|
|
11134
11298
|
result,
|
|
@@ -11139,11 +11303,13 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11139
11303
|
memoryConfig,
|
|
11140
11304
|
outputText,
|
|
11141
11305
|
runtimeContext,
|
|
11306
|
+
tracingContext,
|
|
11142
11307
|
runId,
|
|
11143
11308
|
messageList,
|
|
11144
11309
|
threadExists,
|
|
11145
11310
|
structuredOutput = false,
|
|
11146
|
-
saveQueueManager
|
|
11311
|
+
saveQueueManager,
|
|
11312
|
+
overrideScorers
|
|
11147
11313
|
}) {
|
|
11148
11314
|
const resToLog = {
|
|
11149
11315
|
text: result?.text,
|
|
@@ -11218,7 +11384,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11218
11384
|
instructions: titleInstructions
|
|
11219
11385
|
} = this.resolveTitleGenerationConfig(config?.threads?.generateTitle);
|
|
11220
11386
|
if (shouldGenerate && userMessage) {
|
|
11221
|
-
promises.push(this.genTitle(userMessage, runtimeContext, titleModel, titleInstructions).then(title => {
|
|
11387
|
+
promises.push(this.genTitle(userMessage, runtimeContext, tracingContext, titleModel, titleInstructions).then(title => {
|
|
11222
11388
|
if (title) {
|
|
11223
11389
|
return memory.createThread({
|
|
11224
11390
|
threadId: thread.id,
|
|
@@ -11274,7 +11440,9 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11274
11440
|
outputText,
|
|
11275
11441
|
instructions,
|
|
11276
11442
|
runtimeContext,
|
|
11277
|
-
|
|
11443
|
+
tracingContext,
|
|
11444
|
+
structuredOutput,
|
|
11445
|
+
overrideScorers
|
|
11278
11446
|
});
|
|
11279
11447
|
}
|
|
11280
11448
|
async generateVNext(messages, options) {
|
|
@@ -11334,6 +11502,10 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11334
11502
|
return result.result;
|
|
11335
11503
|
}
|
|
11336
11504
|
async generate(messages, generateOptions = {}) {
|
|
11505
|
+
this.logger.warn("Deprecation NOTICE:\nGenerate method will switch to use generateVNext implementation September 16th. Please use generateLegacy if you don't want to upgrade just yet.");
|
|
11506
|
+
return this.generateLegacy(messages, generateOptions);
|
|
11507
|
+
}
|
|
11508
|
+
async generateLegacy(messages, generateOptions = {}) {
|
|
11337
11509
|
const defaultGenerateOptions = await this.getDefaultGenerateOptions({
|
|
11338
11510
|
runtimeContext: generateOptions.runtimeContext
|
|
11339
11511
|
});
|
|
@@ -11401,6 +11573,9 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11401
11573
|
agentAISpan,
|
|
11402
11574
|
...llmOptions
|
|
11403
11575
|
} = beforeResult;
|
|
11576
|
+
const tracingContext = {
|
|
11577
|
+
currentSpan: agentAISpan
|
|
11578
|
+
};
|
|
11404
11579
|
let finalOutputProcessors = mergedGenerateOptions.outputProcessors;
|
|
11405
11580
|
if (mergedGenerateOptions.structuredOutput) {
|
|
11406
11581
|
const structuredProcessor = new StructuredOutputProcessor(mergedGenerateOptions.structuredOutput);
|
|
@@ -11409,11 +11584,12 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11409
11584
|
if (!output || experimental_output) {
|
|
11410
11585
|
const result2 = await llmToUse.__text({
|
|
11411
11586
|
...llmOptions,
|
|
11412
|
-
|
|
11587
|
+
tracingContext,
|
|
11413
11588
|
experimental_output
|
|
11414
11589
|
});
|
|
11415
11590
|
const outputProcessorResult2 = await this.__runOutputProcessors({
|
|
11416
11591
|
runtimeContext: mergedGenerateOptions.runtimeContext || new RuntimeContext(),
|
|
11592
|
+
tracingContext,
|
|
11417
11593
|
outputProcessorOverrides: finalOutputProcessors,
|
|
11418
11594
|
messageList: new MessageList({
|
|
11419
11595
|
threadId: llmOptions.threadId || "",
|
|
@@ -11485,12 +11661,13 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11485
11661
|
}
|
|
11486
11662
|
}
|
|
11487
11663
|
}
|
|
11664
|
+
const overrideScorers = mergedGenerateOptions.scorers;
|
|
11488
11665
|
const afterResult2 = await after({
|
|
11489
11666
|
result: result2,
|
|
11490
11667
|
outputText: newText2,
|
|
11491
11668
|
agentAISpan,
|
|
11492
|
-
...(
|
|
11493
|
-
overrideScorers
|
|
11669
|
+
...(overrideScorers ? {
|
|
11670
|
+
overrideScorers
|
|
11494
11671
|
} : {})
|
|
11495
11672
|
});
|
|
11496
11673
|
if (generateOptions.returnScorerData) {
|
|
@@ -11500,12 +11677,13 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11500
11677
|
}
|
|
11501
11678
|
const result = await llmToUse.__textObject({
|
|
11502
11679
|
...llmOptions,
|
|
11503
|
-
|
|
11680
|
+
tracingContext,
|
|
11504
11681
|
structuredOutput: output
|
|
11505
11682
|
});
|
|
11506
11683
|
const outputText = JSON.stringify(result.object);
|
|
11507
11684
|
const outputProcessorResult = await this.__runOutputProcessors({
|
|
11508
11685
|
runtimeContext: mergedGenerateOptions.runtimeContext || new RuntimeContext(),
|
|
11686
|
+
tracingContext,
|
|
11509
11687
|
messageList: new MessageList({
|
|
11510
11688
|
threadId: llmOptions.threadId || "",
|
|
11511
11689
|
resourceId: llmOptions.resourceId || ""
|
|
@@ -11574,6 +11752,10 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11574
11752
|
return result;
|
|
11575
11753
|
}
|
|
11576
11754
|
async stream(messages, streamOptions = {}) {
|
|
11755
|
+
this.logger.warn("Deprecation NOTICE:\nStream method will switch to use streamVNext implementation September 16th. Please use streamLegacy if you don't want to upgrade just yet.");
|
|
11756
|
+
return this.streamLegacy(messages, streamOptions);
|
|
11757
|
+
}
|
|
11758
|
+
async streamLegacy(messages, streamOptions = {}) {
|
|
11577
11759
|
const defaultStreamOptions = await this.getDefaultStreamOptions({
|
|
11578
11760
|
runtimeContext: streamOptions.runtimeContext
|
|
11579
11761
|
});
|
|
@@ -11674,6 +11856,10 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11674
11856
|
agentAISpan,
|
|
11675
11857
|
...llmOptions
|
|
11676
11858
|
} = beforeResult;
|
|
11859
|
+
const overrideScorers = mergedStreamOptions.scorers;
|
|
11860
|
+
const tracingContext = {
|
|
11861
|
+
currentSpan: agentAISpan
|
|
11862
|
+
};
|
|
11677
11863
|
if (!output || experimental_output) {
|
|
11678
11864
|
this.logger.debug(`Starting agent ${this.name} llm stream call`, {
|
|
11679
11865
|
runId
|
|
@@ -11681,14 +11867,17 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11681
11867
|
const streamResult = llm.__stream({
|
|
11682
11868
|
...llmOptions,
|
|
11683
11869
|
experimental_output,
|
|
11684
|
-
|
|
11870
|
+
tracingContext,
|
|
11685
11871
|
onFinish: async result => {
|
|
11686
11872
|
try {
|
|
11687
11873
|
const outputText = result.text;
|
|
11688
11874
|
await after({
|
|
11689
11875
|
result,
|
|
11690
11876
|
outputText,
|
|
11691
|
-
agentAISpan
|
|
11877
|
+
agentAISpan,
|
|
11878
|
+
...(overrideScorers ? {
|
|
11879
|
+
overrideScorers
|
|
11880
|
+
} : {})
|
|
11692
11881
|
});
|
|
11693
11882
|
} catch (e) {
|
|
11694
11883
|
this.logger.error("Error saving memory on finish", {
|
|
@@ -11710,7 +11899,7 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11710
11899
|
});
|
|
11711
11900
|
return llm.__streamObject({
|
|
11712
11901
|
...llmOptions,
|
|
11713
|
-
|
|
11902
|
+
tracingContext,
|
|
11714
11903
|
onFinish: async result => {
|
|
11715
11904
|
try {
|
|
11716
11905
|
const outputText = JSON.stringify(result.object);
|
|
@@ -11718,7 +11907,10 @@ Message ${msg.threadId && msg.threadId !== threadObject.id ? "from previous conv
|
|
|
11718
11907
|
result,
|
|
11719
11908
|
outputText,
|
|
11720
11909
|
structuredOutput: true,
|
|
11721
|
-
agentAISpan
|
|
11910
|
+
agentAISpan,
|
|
11911
|
+
...(overrideScorers ? {
|
|
11912
|
+
overrideScorers
|
|
11913
|
+
} : {})
|
|
11722
11914
|
});
|
|
11723
11915
|
} catch (e) {
|
|
11724
11916
|
this.logger.error("Error saving memory on finish", {
|
|
@@ -12036,6 +12228,21 @@ var ExecutionEngine = class extends MastraBase {
|
|
|
12036
12228
|
|
|
12037
12229
|
// src/workflows/default.ts
|
|
12038
12230
|
var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
12231
|
+
/**
|
|
12232
|
+
* Preprocesses an error caught during workflow execution.
|
|
12233
|
+
*
|
|
12234
|
+
* - Wraps a non-MastraError exception
|
|
12235
|
+
* - Logs error details
|
|
12236
|
+
*/
|
|
12237
|
+
preprocessExecutionError(e, errorDefinition, logPrefix) {
|
|
12238
|
+
const error = e instanceof MastraError ? e : new MastraError(errorDefinition, e);
|
|
12239
|
+
if (!(e instanceof MastraError) && e instanceof Error && e.stack) {
|
|
12240
|
+
error.stack = e.stack;
|
|
12241
|
+
}
|
|
12242
|
+
this.logger?.trackException(error);
|
|
12243
|
+
this.logger?.error(logPrefix + error?.stack);
|
|
12244
|
+
return error;
|
|
12245
|
+
}
|
|
12039
12246
|
/**
|
|
12040
12247
|
* The runCounts map is used to keep track of the run count for each step.
|
|
12041
12248
|
* The step id is used as the key and the run count is the value.
|
|
@@ -12132,7 +12339,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12132
12339
|
resume,
|
|
12133
12340
|
retryConfig,
|
|
12134
12341
|
runtimeContext,
|
|
12135
|
-
|
|
12342
|
+
tracingContext,
|
|
12136
12343
|
disableScorers
|
|
12137
12344
|
} = params;
|
|
12138
12345
|
const {
|
|
@@ -12141,33 +12348,16 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12141
12348
|
} = retryConfig ?? {};
|
|
12142
12349
|
const steps = graph.steps;
|
|
12143
12350
|
this.runCounts.clear();
|
|
12144
|
-
const
|
|
12351
|
+
const workflowAISpan = getOrCreateSpan({
|
|
12352
|
+
type: "workflow_run" /* WORKFLOW_RUN */,
|
|
12145
12353
|
name: `workflow run: '${workflowId}'`,
|
|
12146
12354
|
input,
|
|
12147
12355
|
attributes: {
|
|
12148
12356
|
workflowId
|
|
12149
|
-
}
|
|
12150
|
-
|
|
12151
|
-
|
|
12152
|
-
|
|
12153
|
-
workflowAISpan = currentSpan.createChildSpan({
|
|
12154
|
-
type: "workflow_run" /* WORKFLOW_RUN */,
|
|
12155
|
-
...spanArgs
|
|
12156
|
-
});
|
|
12157
|
-
} else {
|
|
12158
|
-
const aiTracing = getSelectedAITracing({
|
|
12159
|
-
runtimeContext
|
|
12160
|
-
});
|
|
12161
|
-
if (aiTracing) {
|
|
12162
|
-
workflowAISpan = aiTracing.startSpan({
|
|
12163
|
-
type: "workflow_run" /* WORKFLOW_RUN */,
|
|
12164
|
-
...spanArgs,
|
|
12165
|
-
startOptions: {
|
|
12166
|
-
runtimeContext
|
|
12167
|
-
}
|
|
12168
|
-
});
|
|
12169
|
-
}
|
|
12170
|
-
}
|
|
12357
|
+
},
|
|
12358
|
+
tracingContext,
|
|
12359
|
+
runtimeContext
|
|
12360
|
+
});
|
|
12171
12361
|
if (steps.length === 0) {
|
|
12172
12362
|
const empty_graph_error = new MastraError({
|
|
12173
12363
|
id: "WORKFLOW_EXECUTE_EMPTY_GRAPH",
|
|
@@ -12260,7 +12450,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12260
12450
|
return result2;
|
|
12261
12451
|
}
|
|
12262
12452
|
} catch (e) {
|
|
12263
|
-
const error = e
|
|
12453
|
+
const error = this.preprocessExecutionError(e, {
|
|
12264
12454
|
id: "WORKFLOW_ENGINE_STEP_EXECUTION_FAILED",
|
|
12265
12455
|
domain: "MASTRA_WORKFLOW" /* MASTRA_WORKFLOW */,
|
|
12266
12456
|
category: "USER" /* USER */,
|
|
@@ -12268,9 +12458,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12268
12458
|
workflowId,
|
|
12269
12459
|
runId
|
|
12270
12460
|
}
|
|
12271
|
-
},
|
|
12272
|
-
this.logger?.trackException(error);
|
|
12273
|
-
this.logger?.error(`Error executing step: ${error?.stack}`);
|
|
12461
|
+
}, "Error executing step: ");
|
|
12274
12462
|
const result2 = await this.fmtReturnValue(executionSpan, params.emitter, stepResults, lastOutput.result, e);
|
|
12275
12463
|
await this.persistStepUpdate({
|
|
12276
12464
|
workflowId,
|
|
@@ -12584,11 +12772,15 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12584
12772
|
const stepAISpan = tracingContext.currentSpan?.createChildSpan({
|
|
12585
12773
|
name: `workflow step: '${step.id}'`,
|
|
12586
12774
|
type: "workflow_step" /* WORKFLOW_STEP */,
|
|
12587
|
-
|
|
12775
|
+
|
|
12776
|
+
//input: prevOutput,
|
|
12588
12777
|
attributes: {
|
|
12589
12778
|
stepId: step.id
|
|
12590
12779
|
}
|
|
12591
12780
|
});
|
|
12781
|
+
const innerTracingContext = {
|
|
12782
|
+
currentSpan: stepAISpan
|
|
12783
|
+
};
|
|
12592
12784
|
if (!skipEmits) {
|
|
12593
12785
|
await emitter.emit("watch", {
|
|
12594
12786
|
type: "watch",
|
|
@@ -12664,16 +12856,12 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12664
12856
|
const result = await runStep({
|
|
12665
12857
|
runId,
|
|
12666
12858
|
workflowId,
|
|
12667
|
-
mastra: this.mastra ? wrapMastra(this.mastra,
|
|
12668
|
-
currentSpan: stepAISpan
|
|
12669
|
-
}) : void 0,
|
|
12859
|
+
mastra: this.mastra ? wrapMastra(this.mastra, innerTracingContext) : void 0,
|
|
12670
12860
|
runtimeContext,
|
|
12671
12861
|
inputData: prevOutput,
|
|
12672
12862
|
runCount: this.getOrGenerateRunCount(step.id),
|
|
12673
12863
|
resumeData: resume?.steps[0] === step.id ? resume?.resumePayload : void 0,
|
|
12674
|
-
tracingContext:
|
|
12675
|
-
currentSpan: stepAISpan
|
|
12676
|
-
},
|
|
12864
|
+
tracingContext: innerTracingContext,
|
|
12677
12865
|
getInitData: () => stepResults?.input,
|
|
12678
12866
|
getStepResult: step2 => {
|
|
12679
12867
|
if (!step2?.id) {
|
|
@@ -12728,6 +12916,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12728
12916
|
workflowId,
|
|
12729
12917
|
stepId: step.id,
|
|
12730
12918
|
runtimeContext,
|
|
12919
|
+
tracingContext: innerTracingContext,
|
|
12731
12920
|
disableScorers
|
|
12732
12921
|
});
|
|
12733
12922
|
}
|
|
@@ -12752,7 +12941,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12752
12941
|
}
|
|
12753
12942
|
break;
|
|
12754
12943
|
} catch (e) {
|
|
12755
|
-
const error = e
|
|
12944
|
+
const error = this.preprocessExecutionError(e, {
|
|
12756
12945
|
id: "WORKFLOW_STEP_INVOKE_FAILED",
|
|
12757
12946
|
domain: "MASTRA_WORKFLOW" /* MASTRA_WORKFLOW */,
|
|
12758
12947
|
category: "USER" /* USER */,
|
|
@@ -12761,9 +12950,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12761
12950
|
runId,
|
|
12762
12951
|
stepId: step.id
|
|
12763
12952
|
}
|
|
12764
|
-
},
|
|
12765
|
-
this.logger.trackException(error);
|
|
12766
|
-
this.logger.error(`Error executing step ${step.id}: ` + error?.stack);
|
|
12953
|
+
}, `Error executing step ${step.id}: `);
|
|
12767
12954
|
stepAISpan?.error({
|
|
12768
12955
|
error,
|
|
12769
12956
|
attributes: {
|
|
@@ -12850,6 +13037,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12850
13037
|
workflowId,
|
|
12851
13038
|
stepId,
|
|
12852
13039
|
runtimeContext,
|
|
13040
|
+
tracingContext,
|
|
12853
13041
|
disableScorers
|
|
12854
13042
|
}) {
|
|
12855
13043
|
let scorersToUse = scorers;
|
|
@@ -12859,18 +13047,16 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12859
13047
|
runtimeContext
|
|
12860
13048
|
});
|
|
12861
13049
|
} catch (error) {
|
|
12862
|
-
|
|
13050
|
+
this.preprocessExecutionError(error, {
|
|
12863
13051
|
id: "WORKFLOW_FAILED_TO_FETCH_SCORERS",
|
|
12864
|
-
domain: "MASTRA_WORKFLOW"
|
|
13052
|
+
domain: "MASTRA_WORKFLOW" /* MASTRA_WORKFLOW */,
|
|
12865
13053
|
category: "USER" /* USER */,
|
|
12866
13054
|
details: {
|
|
12867
13055
|
runId,
|
|
12868
13056
|
workflowId,
|
|
12869
13057
|
stepId
|
|
12870
13058
|
}
|
|
12871
|
-
},
|
|
12872
|
-
this.logger.trackException(mastraError);
|
|
12873
|
-
this.logger.error(mastraError.toString(), error);
|
|
13059
|
+
}, "Error fetching scorers: ");
|
|
12874
13060
|
}
|
|
12875
13061
|
}
|
|
12876
13062
|
if (!disableScorers && scorersToUse && Object.keys(scorersToUse || {}).length > 0) {
|
|
@@ -12882,6 +13068,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
12882
13068
|
input: [input],
|
|
12883
13069
|
output,
|
|
12884
13070
|
runtimeContext,
|
|
13071
|
+
tracingContext,
|
|
12885
13072
|
entity: {
|
|
12886
13073
|
id: workflowId,
|
|
12887
13074
|
stepId
|
|
@@ -13063,7 +13250,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
13063
13250
|
});
|
|
13064
13251
|
return result ? index : null;
|
|
13065
13252
|
} catch (e) {
|
|
13066
|
-
const error = e
|
|
13253
|
+
const error = this.preprocessExecutionError(e, {
|
|
13067
13254
|
id: "WORKFLOW_CONDITION_EVALUATION_FAILED",
|
|
13068
13255
|
domain: "MASTRA_WORKFLOW" /* MASTRA_WORKFLOW */,
|
|
13069
13256
|
category: "USER" /* USER */,
|
|
@@ -13071,9 +13258,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
13071
13258
|
workflowId,
|
|
13072
13259
|
runId
|
|
13073
13260
|
}
|
|
13074
|
-
},
|
|
13075
|
-
this.logger.trackException(error);
|
|
13076
|
-
this.logger.error("Error evaluating condition: " + error?.stack);
|
|
13261
|
+
}, "Error evaluating condition: ");
|
|
13077
13262
|
evalSpan?.error({
|
|
13078
13263
|
error,
|
|
13079
13264
|
attributes: {
|
|
@@ -13253,7 +13438,7 @@ var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
|
13253
13438
|
const evalSpan = loopSpan?.createChildSpan({
|
|
13254
13439
|
type: "workflow_conditional_eval" /* WORKFLOW_CONDITIONAL_EVAL */,
|
|
13255
13440
|
name: `condition: ${entry.loopType}`,
|
|
13256
|
-
input: result.output,
|
|
13441
|
+
input: selectFields(result.output, ["stepResult", "output.text", "output.object", "messages"]),
|
|
13257
13442
|
attributes: {
|
|
13258
13443
|
conditionIndex: iteration
|
|
13259
13444
|
}
|
|
@@ -14751,7 +14936,7 @@ var Workflow = class extends MastraBase {
|
|
|
14751
14936
|
cleanup: () => this.#runs.delete(runIdToUse)
|
|
14752
14937
|
});
|
|
14753
14938
|
this.#runs.set(runIdToUse, run);
|
|
14754
|
-
this.mastra?.getLogger().warn("createRun()
|
|
14939
|
+
this.mastra?.getLogger().warn("createRun() will be removed on September 16th. Use createRunAsync() instead.");
|
|
14755
14940
|
return run;
|
|
14756
14941
|
}
|
|
14757
14942
|
/**
|
|
@@ -14836,7 +15021,7 @@ var Workflow = class extends MastraBase {
|
|
|
14836
15021
|
abort,
|
|
14837
15022
|
abortSignal,
|
|
14838
15023
|
runCount,
|
|
14839
|
-
|
|
15024
|
+
tracingContext
|
|
14840
15025
|
}) {
|
|
14841
15026
|
this.__registerMastra(mastra);
|
|
14842
15027
|
const isResume = !!(resume?.steps && resume.steps.length > 0);
|
|
@@ -14872,11 +15057,11 @@ var Workflow = class extends MastraBase {
|
|
|
14872
15057
|
resumeData,
|
|
14873
15058
|
step: resume.steps,
|
|
14874
15059
|
runtimeContext,
|
|
14875
|
-
|
|
15060
|
+
tracingContext
|
|
14876
15061
|
}) : await run.start({
|
|
14877
15062
|
inputData,
|
|
14878
15063
|
runtimeContext,
|
|
14879
|
-
|
|
15064
|
+
tracingContext
|
|
14880
15065
|
});
|
|
14881
15066
|
unwatch();
|
|
14882
15067
|
unwatchV2();
|
|
@@ -15042,7 +15227,7 @@ var Run = class {
|
|
|
15042
15227
|
inputData,
|
|
15043
15228
|
runtimeContext,
|
|
15044
15229
|
writableStream,
|
|
15045
|
-
|
|
15230
|
+
tracingContext
|
|
15046
15231
|
}) {
|
|
15047
15232
|
const result = await this.executionEngine.execute({
|
|
15048
15233
|
workflowId: this.workflowId,
|
|
@@ -15069,7 +15254,7 @@ var Run = class {
|
|
|
15069
15254
|
runtimeContext: runtimeContext ?? new RuntimeContext(),
|
|
15070
15255
|
abortController: this.abortController,
|
|
15071
15256
|
writableStream,
|
|
15072
|
-
|
|
15257
|
+
tracingContext
|
|
15073
15258
|
});
|
|
15074
15259
|
if (result.status !== "suspended") {
|
|
15075
15260
|
this.cleanup?.();
|
|
@@ -15406,7 +15591,7 @@ var Run = class {
|
|
|
15406
15591
|
},
|
|
15407
15592
|
runtimeContext: runtimeContextToUse,
|
|
15408
15593
|
abortController: this.abortController,
|
|
15409
|
-
|
|
15594
|
+
tracingContext: params.tracingContext
|
|
15410
15595
|
}).then(result => {
|
|
15411
15596
|
if (result.status !== "suspended") {
|
|
15412
15597
|
this.closeStreamAction?.().catch(() => {});
|
|
@@ -15467,5 +15652,5 @@ function deepMergeWorkflowState(a, b) {
|
|
|
15467
15652
|
}
|
|
15468
15653
|
|
|
15469
15654
|
export { AISDKV5OutputStream, Agent, DefaultExecutionEngine, ExecutionEngine, LanguageDetector, LanguageDetectorInputProcessor, LegacyStep, LegacyWorkflow, MastraModelOutput, ModerationInputProcessor, ModerationProcessor, PIIDetector, PIIDetectorInputProcessor, PromptInjectionDetector, PromptInjectionDetectorInputProcessor, Run, StructuredOutputProcessor, TripWire, UnicodeNormalizer, UnicodeNormalizerInputProcessor, WhenConditionReturnValue, Workflow, agentToStep, cloneStep, cloneWorkflow, createStep, createWorkflow, getActivePathsAndStatus, getResultActivePaths, getStepResult, getSuspendedPaths, isAgent, isConditionalKey, isErrorEvent, isFinalState, isLimboState, isTransitionEvent, isVariableReference, isWorkflow, loop, mapVariable, mergeChildValue, recursivelyCheckForFinalState, resolveVariables, updateStepInHierarchy, workflowToStep };
|
|
15470
|
-
//# sourceMappingURL=chunk-
|
|
15471
|
-
//# sourceMappingURL=chunk-
|
|
15655
|
+
//# sourceMappingURL=chunk-P2IJ74UW.js.map
|
|
15656
|
+
//# sourceMappingURL=chunk-P2IJ74UW.js.map
|