@mastra/core 0.17.0-alpha.7 → 0.17.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +130 -0
- package/dist/agent/agent.d.ts.map +1 -1
- package/dist/agent/index.cjs +11 -11
- package/dist/agent/index.js +2 -2
- package/dist/agent/input-processor/index.cjs +6 -6
- package/dist/agent/input-processor/index.js +1 -1
- package/dist/ai-tracing/index.cjs +32 -32
- package/dist/ai-tracing/index.js +1 -1
- package/dist/ai-tracing/types.d.ts +5 -1
- package/dist/ai-tracing/types.d.ts.map +1 -1
- package/dist/{chunk-725O36VY.cjs → chunk-2TQHRYRK.cjs} +4 -4
- package/dist/{chunk-725O36VY.cjs.map → chunk-2TQHRYRK.cjs.map} +1 -1
- package/dist/{chunk-YISIO2V3.cjs → chunk-5OH5VHX5.cjs} +4 -4
- package/dist/{chunk-YISIO2V3.cjs.map → chunk-5OH5VHX5.cjs.map} +1 -1
- package/dist/{chunk-CWTKL7YK.cjs → chunk-6TQSVAUZ.cjs} +7 -7
- package/dist/{chunk-CWTKL7YK.cjs.map → chunk-6TQSVAUZ.cjs.map} +1 -1
- package/dist/{chunk-MVBGQHNV.cjs → chunk-B2ZNJ7EW.cjs} +6 -6
- package/dist/{chunk-MVBGQHNV.cjs.map → chunk-B2ZNJ7EW.cjs.map} +1 -1
- package/dist/{chunk-L3EC4ERG.js → chunk-B6WB5A6U.js} +3 -3
- package/dist/{chunk-L3EC4ERG.js.map → chunk-B6WB5A6U.js.map} +1 -1
- package/dist/{chunk-DH34KXOJ.js → chunk-BJZWMVNW.js} +3 -3
- package/dist/{chunk-DH34KXOJ.js.map → chunk-BJZWMVNW.js.map} +1 -1
- package/dist/{chunk-K72D7BXB.cjs → chunk-BL44ZRZT.cjs} +6 -6
- package/dist/{chunk-K72D7BXB.cjs.map → chunk-BL44ZRZT.cjs.map} +1 -1
- package/dist/{chunk-4LANQQSH.js → chunk-CD4WDS4V.js} +4 -4
- package/dist/{chunk-4LANQQSH.js.map → chunk-CD4WDS4V.js.map} +1 -1
- package/dist/{chunk-6WEZASFM.cjs → chunk-DOARUBVA.cjs} +41 -41
- package/dist/chunk-DOARUBVA.cjs.map +1 -0
- package/dist/{chunk-OCKXNBKT.js → chunk-EMPBDL5C.js} +3 -3
- package/dist/{chunk-OCKXNBKT.js.map → chunk-EMPBDL5C.js.map} +1 -1
- package/dist/{chunk-ZFD7BUUD.js → chunk-GS2TSJOZ.js} +3 -3
- package/dist/{chunk-ZFD7BUUD.js.map → chunk-GS2TSJOZ.js.map} +1 -1
- package/dist/{chunk-GKSWFKHA.cjs → chunk-HQFRMWRB.cjs} +2 -2
- package/dist/chunk-HQFRMWRB.cjs.map +1 -0
- package/dist/{chunk-FJWPEAJE.cjs → chunk-ICE3AMTD.cjs} +4 -4
- package/dist/{chunk-FJWPEAJE.cjs.map → chunk-ICE3AMTD.cjs.map} +1 -1
- package/dist/{chunk-QBRFMSMN.js → chunk-ISQ7LHV6.js} +2 -2
- package/dist/chunk-ISQ7LHV6.js.map +1 -0
- package/dist/{chunk-SW656AYX.js → chunk-K4ROJXAQ.js} +3 -3
- package/dist/{chunk-SW656AYX.js.map → chunk-K4ROJXAQ.js.map} +1 -1
- package/dist/{chunk-36QJI2HU.js → chunk-KE5KVIPQ.js} +6 -6
- package/dist/{chunk-36QJI2HU.js.map → chunk-KE5KVIPQ.js.map} +1 -1
- package/dist/{chunk-XNZBDAEX.cjs → chunk-LIVKDHYX.cjs} +7 -7
- package/dist/{chunk-XNZBDAEX.cjs.map → chunk-LIVKDHYX.cjs.map} +1 -1
- package/dist/{chunk-OWYPTFXD.js → chunk-M65NZ6EW.js} +13 -13
- package/dist/chunk-M65NZ6EW.js.map +1 -0
- package/dist/{chunk-2B5DS7ML.js → chunk-PUQCQUH7.js} +3 -3
- package/dist/{chunk-2B5DS7ML.js.map → chunk-PUQCQUH7.js.map} +1 -1
- package/dist/{chunk-NRJQMZLW.js → chunk-RPFJBB5X.js} +3 -3
- package/dist/{chunk-NRJQMZLW.js.map → chunk-RPFJBB5X.js.map} +1 -1
- package/dist/{chunk-64ONYYLY.cjs → chunk-RUZBKNXG.cjs} +16 -16
- package/dist/{chunk-64ONYYLY.cjs.map → chunk-RUZBKNXG.cjs.map} +1 -1
- package/dist/{chunk-5QXPHYHO.js → chunk-SEZBEL3U.js} +2 -2
- package/dist/chunk-SEZBEL3U.js.map +1 -0
- package/dist/{chunk-ZBINIUJL.cjs → chunk-TKFLL33F.cjs} +2 -2
- package/dist/chunk-TKFLL33F.cjs.map +1 -0
- package/dist/{chunk-OT25QTJ2.cjs → chunk-WECJNCGT.cjs} +4 -4
- package/dist/{chunk-OT25QTJ2.cjs.map → chunk-WECJNCGT.cjs.map} +1 -1
- package/dist/index.cjs +47 -47
- package/dist/index.js +9 -9
- package/dist/llm/model/model.loop.d.ts.map +1 -1
- package/dist/loop/index.cjs +2 -2
- package/dist/loop/index.js +1 -1
- package/dist/loop/test-utils/fullStream.d.ts.map +1 -1
- package/dist/loop/test-utils/options.d.ts.map +1 -1
- package/dist/loop/test-utils/resultObject.d.ts.map +1 -1
- package/dist/loop/test-utils/streamObject.d.ts.map +1 -1
- package/dist/loop/test-utils/toUIMessageStream.d.ts.map +1 -1
- package/dist/loop/test-utils/tools.d.ts.map +1 -1
- package/dist/loop/workflows/agentic-execution/index.d.ts +36 -36
- package/dist/loop/workflows/agentic-execution/llm-execution-step.d.ts +24 -24
- package/dist/loop/workflows/agentic-execution/llm-mapping-step.d.ts +12 -12
- package/dist/loop/workflows/agentic-loop/index.d.ts +36 -36
- package/dist/loop/workflows/schema.d.ts +16 -16
- package/dist/mastra/index.cjs +2 -2
- package/dist/mastra/index.js +1 -1
- package/dist/memory/index.cjs +4 -4
- package/dist/memory/index.js +1 -1
- package/dist/network/vNext/index.cjs +29 -29
- package/dist/network/vNext/index.js +2 -2
- package/dist/processors/index.cjs +8 -8
- package/dist/processors/index.js +2 -2
- package/dist/relevance/index.cjs +4 -4
- package/dist/relevance/index.js +1 -1
- package/dist/scores/index.cjs +8 -8
- package/dist/scores/index.js +1 -1
- package/dist/server/index.cjs +2 -2
- package/dist/server/index.js +1 -1
- package/dist/storage/index.cjs +3 -3
- package/dist/storage/index.js +1 -1
- package/dist/stream/MastraAgentNetworkStream.d.ts +2 -2
- package/dist/stream/MastraWorkflowStream.d.ts +2 -2
- package/dist/stream/MastraWorkflowStream.d.ts.map +1 -1
- package/dist/stream/aisdk/v5/output.d.ts +2 -2
- package/dist/stream/base/output.d.ts +14 -7
- package/dist/stream/base/output.d.ts.map +1 -1
- package/dist/stream/index.cjs +4 -4
- package/dist/stream/index.js +1 -1
- package/dist/telemetry/index.cjs +7 -7
- package/dist/telemetry/index.js +1 -1
- package/dist/telemetry/telemetry.decorators.d.ts.map +1 -1
- package/dist/test-utils/llm-mock.cjs +2 -2
- package/dist/test-utils/llm-mock.js +1 -1
- package/dist/tts/index.cjs +2 -2
- package/dist/tts/index.js +1 -1
- package/dist/utils.cjs +17 -17
- package/dist/utils.js +1 -1
- package/dist/voice/index.cjs +4 -4
- package/dist/voice/index.js +1 -1
- package/dist/workflows/evented/index.cjs +10 -10
- package/dist/workflows/evented/index.js +1 -1
- package/dist/workflows/index.cjs +10 -10
- package/dist/workflows/index.js +1 -1
- package/dist/workflows/legacy/index.cjs +22 -22
- package/dist/workflows/legacy/index.js +1 -1
- package/package.json +3 -3
- package/dist/chunk-5QXPHYHO.js.map +0 -1
- package/dist/chunk-6WEZASFM.cjs.map +0 -1
- package/dist/chunk-GKSWFKHA.cjs.map +0 -1
- package/dist/chunk-OWYPTFXD.js.map +0 -1
- package/dist/chunk-QBRFMSMN.js.map +0 -1
- package/dist/chunk-ZBINIUJL.cjs.map +0 -1
- package/dist/stream/MastraAgentStream.d.ts +0 -25
- package/dist/stream/MastraAgentStream.d.ts.map +0 -1
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
import { DefaultVoice } from './chunk-
|
|
1
|
+
import { DefaultVoice } from './chunk-RPFJBB5X.js';
|
|
2
2
|
import { STREAM_FORMAT_SYMBOL, EMITTER_SYMBOL } from './chunk-NLNKQD2T.js';
|
|
3
|
-
import { InstrumentClass, Telemetry } from './chunk-
|
|
4
|
-
import { MastraLLMV1 } from './chunk-
|
|
5
|
-
import { MessageList, DefaultGeneratedFile, DefaultGeneratedFileWithType } from './chunk-
|
|
3
|
+
import { InstrumentClass, Telemetry } from './chunk-SEZBEL3U.js';
|
|
4
|
+
import { MastraLLMV1 } from './chunk-EMPBDL5C.js';
|
|
5
|
+
import { MessageList, DefaultGeneratedFile, DefaultGeneratedFileWithType } from './chunk-PUQCQUH7.js';
|
|
6
6
|
import { executeHook } from './chunk-TTELJD4F.js';
|
|
7
7
|
import { zodToJsonSchema } from './chunk-PJKCPRYF.js';
|
|
8
|
-
import { getValidTraceId, wrapMastra, selectFields, getOrCreateSpan, ensureToolProperties, makeCoreTool, createMastraProxy, delay } from './chunk-
|
|
8
|
+
import { getValidTraceId, wrapMastra, selectFields, getOrCreateSpan, ensureToolProperties, makeCoreTool, createMastraProxy, delay } from './chunk-ISQ7LHV6.js';
|
|
9
9
|
import { ToolStream } from './chunk-A3QHQYMC.js';
|
|
10
10
|
import { Tool, createTool } from './chunk-RAQ4VAQ4.js';
|
|
11
11
|
import { RuntimeContext } from './chunk-HLRWYUFN.js';
|
|
@@ -128,7 +128,7 @@ content:messageList.get.response.aiV5.stepContent(),request:await self.request,e
|
|
|
128
128
|
* Resolves to the complete request sent to the model.
|
|
129
129
|
*/get request(){return this.#getDelayedPromise(this.#delayedPromises.request);}/**
|
|
130
130
|
* Resolves to an error if an error occurred during streaming.
|
|
131
|
-
*/get error(){if(typeof this.#error==="object"){const error=new Error(this.#error.message);error.stack=this.#error.stack;return error;}return this.#error;}updateUsageCount(usage){if(!usage){return;}
|
|
131
|
+
*/get error(){if(typeof this.#error==="object"){const error=new Error(this.#error.message);error.stack=this.#error.stack;return error;}return this.#error;}updateUsageCount(usage){if(!usage){return;}if(usage.inputTokens!==void 0){this.#usageCount.inputTokens=(this.#usageCount.inputTokens??0)+usage.inputTokens;}if(usage.outputTokens!==void 0){this.#usageCount.outputTokens=(this.#usageCount.outputTokens??0)+usage.outputTokens;}if(usage.totalTokens!==void 0){this.#usageCount.totalTokens=(this.#usageCount.totalTokens??0)+usage.totalTokens;}if(usage.reasoningTokens!==void 0){this.#usageCount.reasoningTokens=(this.#usageCount.reasoningTokens??0)+usage.reasoningTokens;}if(usage.cachedInputTokens!==void 0){this.#usageCount.cachedInputTokens=(this.#usageCount.cachedInputTokens??0)+usage.cachedInputTokens;}}populateUsageCount(usage){if(!usage){return;}if(usage.inputTokens!==void 0&&this.#usageCount.inputTokens===void 0){this.#usageCount.inputTokens=usage.inputTokens;}if(usage.outputTokens!==void 0&&this.#usageCount.outputTokens===void 0){this.#usageCount.outputTokens=usage.outputTokens;}if(usage.totalTokens!==void 0&&this.#usageCount.totalTokens===void 0){this.#usageCount.totalTokens=usage.totalTokens;}if(usage.reasoningTokens!==void 0&&this.#usageCount.reasoningTokens===void 0){this.#usageCount.reasoningTokens=usage.reasoningTokens;}if(usage.cachedInputTokens!==void 0&&this.#usageCount.cachedInputTokens===void 0){this.#usageCount.cachedInputTokens=usage.cachedInputTokens;}}async consumeStream(options){this.#streamConsumed=true;try{await consumeStream({stream:this.fullStream.pipeThrough(new TransformStream({transform(chunk,controller){controller.enqueue(chunk);}})),onError:options?.onError});}catch(error){options?.onError?.(error);}}/**
|
|
132
132
|
* Returns complete output including text, usage, tool calls, and all metadata.
|
|
133
133
|
*/async getFullOutput(){await this.consumeStream({onError:error=>{console.error(error);throw error;}});let scoringData;if(this.#returnScorerData){scoringData={input:{inputMessages:this.messageList.getPersisted.input.ui(),rememberedMessages:this.messageList.getPersisted.remembered.ui(),systemMessages:this.messageList.getSystemMessages(),taggedSystemMessages:this.messageList.getPersisted.taggedSystemMessages},output:this.messageList.getPersisted.response.ui()};}const fullOutput={text:await this.text,usage:await this.usage,steps:await this.steps,finishReason:await this.finishReason,warnings:await this.warnings,providerMetadata:await this.providerMetadata,request:await this.request,reasoning:await this.reasoning,reasoningText:await this.reasoningText,toolCalls:await this.toolCalls,toolResults:await this.toolResults,sources:await this.sources,files:await this.files,response:await this.response,totalUsage:await this.totalUsage,object:await this.object,error:this.error,tripwire:this.#tripwire,tripwireReason:this.#tripwireReason,...(scoringData?{scoringData}:{}),traceId:this.traceId};return fullOutput;}/**
|
|
134
134
|
* The tripwire flag is set when the stream is aborted due to an output processor blocking the content.
|
|
@@ -170,13 +170,13 @@ content:messageList.get.response.aiV5.stepContent(),request:await self.request,e
|
|
|
170
170
|
* ```
|
|
171
171
|
*/get object(){if(!this.processorRunner&&!this.#options.output){this.#delayedPromises.object.resolve(void 0);}return this.#getDelayedPromise(this.#delayedPromises.object);}// Internal methods for immediate values - used internally by Mastra (llm-execution.ts bailing on errors/abort signals with current state)
|
|
172
172
|
// These are not part of the public API
|
|
173
|
-
/** @internal */_getImmediateToolCalls(){return this.#toolCalls;}/** @internal */_getImmediateToolResults(){return this.#toolResults;}/** @internal */_getImmediateText(){return this.#bufferedText.join("");}/** @internal */_getImmediateUsage(){return this.#usageCount;}/** @internal */_getImmediateWarnings(){return this.#warnings;}/** @internal */_getImmediateFinishReason(){return this.#finishReason;}#getTotalUsage(){let total=
|
|
173
|
+
/** @internal */_getImmediateToolCalls(){return this.#toolCalls;}/** @internal */_getImmediateToolResults(){return this.#toolResults;}/** @internal */_getImmediateText(){return this.#bufferedText.join("");}/** @internal */_getImmediateUsage(){return this.#usageCount;}/** @internal */_getImmediateWarnings(){return this.#warnings;}/** @internal */_getImmediateFinishReason(){return this.#finishReason;}#getTotalUsage(){let total=this.#usageCount.totalTokens;if(total===void 0){const input=this.#usageCount.inputTokens??0;const output=this.#usageCount.outputTokens??0;const reasoning=this.#usageCount.reasoningTokens??0;total=input+output+reasoning;}return {inputTokens:this.#usageCount.inputTokens,outputTokens:this.#usageCount.outputTokens,totalTokens:total,reasoningTokens:this.#usageCount.reasoningTokens,cachedInputTokens:this.#usageCount.cachedInputTokens};}};// src/loop/telemetry/index.ts
|
|
174
174
|
var noopSpanContext={traceId:"",spanId:"",traceFlags:0};var noopSpan={spanContext(){return noopSpanContext;},setAttribute(){return this;},setAttributes(){return this;},addEvent(){return this;},addLink(){return this;},addLinks(){return this;},setStatus(){return this;},updateName(){return this;},end(){return this;},isRecording(){return false;},recordException(){return this;}};var noopTracer={startSpan(){return noopSpan;},startActiveSpan(name,arg1,arg2,arg3){if(typeof arg1==="function"){return arg1(noopSpan);}if(typeof arg2==="function"){return arg2(noopSpan);}if(typeof arg3==="function"){return arg3(noopSpan);}}};// src/loop/telemetry/index.ts
|
|
175
175
|
function getTracer({isEnabled=false,tracer}={}){if(!isEnabled){return noopTracer;}if(tracer){return tracer;}return trace.getTracer("mastra");}function assembleOperationName({operationId,telemetry}){return {"mastra.operationId":operationId,"operation.name":`${operationId}${telemetry?.functionId!=null?` ${telemetry.functionId}`:""}`,...(telemetry?.functionId?{"resource.name":telemetry?.functionId}:{})};}function getTelemetryAttributes({model,settings,telemetry,headers}){return {"aisdk.model.provider":model.provider,"aisdk.model.id":model.modelId,// settings:
|
|
176
176
|
...Object.entries(settings).reduce((attributes,[key,value])=>{attributes[`stream.settings.${key}`]=value;return attributes;},{}),// add metadata as attributes:
|
|
177
177
|
...Object.entries(telemetry?.metadata??{}).reduce((attributes,[key,value])=>{attributes[`stream.telemetry.metadata.${key}`]=value;return attributes;},{}),// request headers
|
|
178
178
|
...Object.entries(headers??{}).reduce((attributes,[key,value])=>{if(value!==void 0){attributes[`stream.request.headers.${key}`]=value;}return attributes;},{})};}function getRootSpan({operationId,model,modelSettings,telemetry_settings,headers}){const tracer=getTracer({isEnabled:telemetry_settings?.isEnabled,tracer:telemetry_settings?.tracer});const baseTelemetryAttributes=getTelemetryAttributes({model:{modelId:model.modelId,provider:model.provider},settings:modelSettings??{maxRetries:2},telemetry:telemetry_settings,headers});const rootSpan=tracer.startSpan(operationId).setAttributes({...assembleOperationName({operationId,telemetry:telemetry_settings}),...baseTelemetryAttributes});return {rootSpan};}// src/loop/workflows/stream.ts
|
|
179
|
-
var MastraWorkflowStream=class extends ReadableStream$1{#usageCount={
|
|
179
|
+
var MastraWorkflowStream=class extends ReadableStream$1{#usageCount={inputTokens:0,outputTokens:0,totalTokens:0};#streamPromise;#run;constructor({createStream,run}){const deferredPromise={promise:null,resolve:null,reject:null};deferredPromise.promise=new Promise((resolve,reject)=>{deferredPromise.resolve=resolve;deferredPromise.reject=reject;});const updateUsageCount=usage=>{if("inputTokens"in usage){this.#usageCount.inputTokens+=parseInt(usage?.inputTokens?.toString()??"0",10);this.#usageCount.outputTokens+=parseInt(usage?.outputTokens?.toString()??"0",10);}else if("promptTokens"in usage){this.#usageCount.inputTokens+=parseInt(usage?.promptTokens?.toString()??"0",10);this.#usageCount.outputTokens+=parseInt(usage?.completionTokens?.toString()??"0",10);}this.#usageCount.totalTokens+=parseInt(usage?.totalTokens?.toString()??"0",10);};super({start:async controller=>{const writer=new WritableStream({write:chunk=>{if(chunk.type==="step-output"&&chunk.payload?.output?.from==="AGENT"&&chunk.payload?.output?.type==="finish"||chunk.type==="step-output"&&chunk.payload?.output?.from==="WORKFLOW"&&chunk.payload?.output?.type==="finish"){const finishPayload=chunk.payload?.output.payload;if(finishPayload){updateUsageCount(finishPayload.usage);}}controller.enqueue(chunk);}});controller.enqueue({type:"workflow-start",runId:run.runId,from:"WORKFLOW"/* WORKFLOW */,payload:{workflowId:run.workflowId}});const stream=await createStream(writer);let workflowStatus="success";for await(const chunk of stream){if(chunk.type==="step-finish"&&chunk.payload.usage){updateUsageCount(chunk.payload.usage);}else if(chunk.type==="workflow-canceled"){workflowStatus="canceled";}else if(chunk.type==="workflow-step-suspended"){workflowStatus="suspended";}else if(chunk.type==="workflow-step-result"&&chunk.payload.status==="failed"){workflowStatus="failed";}controller.enqueue(chunk);}controller.enqueue({type:"workflow-finish",runId:run.runId,from:"WORKFLOW"/* WORKFLOW */,payload:{workflowStatus,output:{usage:this.#usageCount},metadata:{}}});controller.close();deferredPromise.resolve();}});this.#run=run;this.#streamPromise=deferredPromise;}get status(){return this.#streamPromise.promise.then(()=>this.#run._getExecutionResults()).then(res=>res.status);}get result(){return this.#streamPromise.promise.then(()=>this.#run._getExecutionResults());}get usage(){return this.#streamPromise.promise.then(()=>this.#usageCount);}};// src/workflows/default.ts
|
|
180
180
|
function runScorer({runId,scorerId,scorerObject,input,output,runtimeContext,entity,structuredOutput,source,entityType,threadId,resourceId,tracingContext}){let shouldExecute=false;if(!scorerObject?.sampling||scorerObject?.sampling?.type==="none"){shouldExecute=true;}if(scorerObject?.sampling?.type){switch(scorerObject?.sampling?.type){case "ratio":shouldExecute=Math.random()<scorerObject?.sampling?.rate;break;default:shouldExecute=true;}}if(!shouldExecute){return;}const payload={scorer:{id:scorerId,name:scorerObject.scorer.name,description:scorerObject.scorer.description},input,output,runtimeContext:Object.fromEntries(runtimeContext.entries()),runId,source,entity,structuredOutput,entityType,threadId,resourceId,tracingContext};executeHook("onScorerRun"/* ON_SCORER_RUN */,payload);}// src/workflows/execution-engine.ts
|
|
181
181
|
var ExecutionEngine=class extends MastraBase{mastra;options;constructor({mastra,options}){super({name:"ExecutionEngine",component:RegisteredLogger.WORKFLOW});this.mastra=mastra;this.options=options;}__registerMastra(mastra){this.mastra=mastra;}};// src/workflows/default.ts
|
|
182
182
|
var DefaultExecutionEngine=class extends ExecutionEngine{/**
|
|
@@ -342,8 +342,8 @@ function workflowLoopStream({telemetry_settings,models,toolChoice,modelSettings,
|
|
|
342
342
|
reason:executionResult.result.stepResult.reason}}});const msToFinish=(_internal?.now?.()??Date.now())-startTimestamp;modelStreamSpan.addEvent("ai.stream.finish");modelStreamSpan.setAttributes({"stream.response.msToFinish":msToFinish,"stream.response.avgOutputTokensPerSecond":1e3*(executionResult?.result?.output?.usage?.outputTokens??0)/msToFinish});controller.close();}});}// src/loop/loop.ts
|
|
343
343
|
function loop({models,logger,runId,idGenerator,telemetry_settings,messageList,includeRawChunks,modelSettings,tools,_internal,mode="stream",outputProcessors,returnScorerData,llmAISpan,...rest}){let loggerToUse=logger||new ConsoleLogger({level:"debug"});if(models.length===0||!models[0]){const mastraError=new MastraError({id:"LOOP_MODELS_EMPTY",domain:"LLM"/* LLM */,category:"USER"/* USER */});loggerToUse.trackException(mastraError);loggerToUse.error(mastraError.toString());throw mastraError;}const firstModel=models[0];let runIdToUse=runId;if(!runIdToUse){runIdToUse=idGenerator?.()||crypto.randomUUID();}const internalToUse={now:_internal?.now||(()=>Date.now()),generateId:_internal?.generateId||(()=>generateId()),currentDate:_internal?.currentDate||(()=>/* @__PURE__ */new Date())};let startTimestamp=internalToUse.now?.();const{rootSpan}=getRootSpan({operationId:mode==="stream"?`mastra.stream`:`mastra.generate`,model:{modelId:firstModel.model.modelId,provider:firstModel.model.provider},modelSettings,headers:modelSettings?.headers??rest.headers,telemetry_settings});rootSpan.setAttributes({...(telemetry_settings?.recordOutputs!==false?{"stream.prompt.messages":JSON.stringify(messageList.get.input.aiV5.model())}:{})});const{rootSpan:modelStreamSpan}=getRootSpan({operationId:`mastra.${mode}.aisdk.doStream`,model:{modelId:firstModel.model.modelId,provider:firstModel.model.provider},modelSettings,headers:modelSettings?.headers??rest.headers,telemetry_settings});const messageId=rest.experimental_generateMessageId?.()||internalToUse.generateId?.();const workflowLoopProps={models,runId:runIdToUse,logger:loggerToUse,startTimestamp,messageList,includeRawChunks:!!includeRawChunks,_internal:internalToUse,tools,modelStreamSpan,telemetry_settings,modelSettings,outputProcessors,llmAISpan,messageId,...rest};const stream=workflowLoopStream(workflowLoopProps);return new MastraModelOutput({model:{modelId:firstModel.model.modelId,provider:firstModel.model.provider,version:firstModel.model.specificationVersion},stream,messageList,messageId,options:{runId:runIdToUse,telemetry_settings,rootSpan,toolCallStreaming:rest.toolCallStreaming,onFinish:rest.options?.onFinish,onStepFinish:rest.options?.onStepFinish,includeRawChunks:!!includeRawChunks,output:rest.output,outputProcessors,outputProcessorRunnerMode:"result",returnScorerData,tracingContext:{currentSpan:llmAISpan}}});}// src/llm/model/model.loop.ts
|
|
344
344
|
var MastraLLMVNext=class extends MastraBase{#models;#mastra;#options;#firstModel;constructor({mastra,models,options}){super({name:"aisdk"});this.#options=options;if(mastra){this.#mastra=mastra;if(mastra.getLogger()){this.__setLogger(this.#mastra.getLogger());}}if(models.length===0||!models[0]){const mastraError=new MastraError({id:"LLM_LOOP_MODELS_EMPTY",domain:"LLM"/* LLM */,category:"USER"/* USER */});this.logger.trackException(mastraError);this.logger.error(mastraError.toString());throw mastraError;}else {this.#models=models;this.#firstModel=models[0];}}__registerPrimitives(p){if(p.telemetry){this.__setTelemetry(p.telemetry);}if(p.logger){this.__setLogger(p.logger);}}__registerMastra(p){this.#mastra=p;}getProvider(){return this.#firstModel.model.provider;}getModelId(){return this.#firstModel.model.modelId;}getModel(){return this.#firstModel.model;}_applySchemaCompat(schema){const model=this.#firstModel.model;const schemaCompatLayers=[];if(model){const modelInfo={modelId:model.modelId,supportsStructuredOutputs:true,provider:model.provider};schemaCompatLayers.push(new OpenAIReasoningSchemaCompatLayer(modelInfo),new OpenAISchemaCompatLayer(modelInfo),new GoogleSchemaCompatLayer(modelInfo),new AnthropicSchemaCompatLayer(modelInfo),new DeepSeekSchemaCompatLayer(modelInfo),new MetaSchemaCompatLayer(modelInfo));}return applyCompatLayer({schema,compatLayers:schemaCompatLayers,mode:"aiSdkSchema"});}convertToMessages(messages){if(Array.isArray(messages)){return messages.map(m=>{if(typeof m==="string"){return {role:"user",content:m};}return m;});}return [{role:"user",content:messages}];}stream({stopWhen=stepCountIs(5),maxSteps,tools={},runId,modelSettings,toolChoice="auto",telemetry_settings,threadId,resourceId,output,options,outputProcessors,returnScorerData,providerOptions,tracingContext,messageList,_internal// ...rest
|
|
345
|
-
}){let stopWhenToUse;if(maxSteps&&typeof maxSteps==="number"){stopWhenToUse=stepCountIs(maxSteps);}else {stopWhenToUse=stopWhen;}const messages=messageList.get.all.aiV5.model();const firstModel=this.#firstModel.model;this.logger.debug(`[LLM] - Streaming text`,{runId,threadId,resourceId,messages,tools:Object.keys(tools||{})});const llmAISpan=tracingContext?.currentSpan?.createChildSpan({name:`llm: '${firstModel.modelId}'`,type:"llm_generation"/* LLM_GENERATION */,input:{messages:[...messageList.getSystemMessages(),...messages]},attributes:{model:firstModel.modelId,provider:firstModel.provider,streaming:true,parameters:modelSettings},metadata:{runId,threadId,resourceId},tracingPolicy:this.#options?.tracingPolicy});try{const loopOptions={messageList,models:this.#models,tools,stopWhen:stopWhenToUse,toolChoice,modelSettings,providerOptions,telemetry_settings:{...this.experimental_telemetry,...telemetry_settings},_internal,output,outputProcessors,returnScorerData,llmAISpan,options:{...options,onStepFinish:async props=>{try{await options?.onStepFinish?.({...props,runId});}catch(e){const mastraError=new MastraError({id:"LLM_STREAM_ON_STEP_FINISH_CALLBACK_EXECUTION_FAILED",domain:"LLM"/* LLM */,category:"USER"/* USER */,details:{modelId:props.model.modelId,modelProvider:props.model.provider,runId:runId??"unknown",threadId:threadId??"unknown",resourceId:resourceId??"unknown",finishReason:props?.finishReason,toolCalls:props?.toolCalls?JSON.stringify(props.toolCalls):"",toolResults:props?.toolResults?JSON.stringify(props.toolResults):"",usage:props?.usage?JSON.stringify(props.usage):""}},e);llmAISpan?.error({error:mastraError});this.logger.trackException(mastraError);throw mastraError;}this.logger.debug("[LLM] - Stream Step Change:",{text:props?.text,toolCalls:props?.toolCalls,toolResults:props?.toolResults,finishReason:props?.finishReason,usage:props?.usage,runId});if(props?.response?.headers?.["x-ratelimit-remaining-tokens"]&&parseInt(props?.response?.headers?.["x-ratelimit-remaining-tokens"],10)<2e3){this.logger.warn("Rate limit approaching, waiting 10 seconds",{runId});await delay(10*1e3);}},onFinish:async props=>{try{await options?.onFinish?.({...props,runId});}catch(e){const mastraError=new MastraError({id:"LLM_STREAM_ON_FINISH_CALLBACK_EXECUTION_FAILED",domain:"LLM"/* LLM */,category:"USER"/* USER */,details:{modelId:props.model.modelId,modelProvider:props.model.provider,runId:runId??"unknown",threadId:threadId??"unknown",resourceId:resourceId??"unknown",finishReason:props?.finishReason,toolCalls:props?.toolCalls?JSON.stringify(props.toolCalls):"",toolResults:props?.toolResults?JSON.stringify(props.toolResults):"",usage:props?.usage?JSON.stringify(props.usage):""}},e);llmAISpan?.error({error:mastraError});this.logger.trackException(mastraError);throw mastraError;}llmAISpan?.end({output:{text:props?.text,reasoning:props?.reasoning,reasoningText:props?.reasoningText,files:props?.files,sources:props?.sources,warnings:props?.warnings},attributes:{finishReason:props?.finishReason,usage:{
|
|
346
|
-
var MastraAgentNetworkStream=class extends ReadableStream$1{#usageCount={
|
|
345
|
+
}){let stopWhenToUse;if(maxSteps&&typeof maxSteps==="number"){stopWhenToUse=stepCountIs(maxSteps);}else {stopWhenToUse=stopWhen;}const messages=messageList.get.all.aiV5.model();const firstModel=this.#firstModel.model;this.logger.debug(`[LLM] - Streaming text`,{runId,threadId,resourceId,messages,tools:Object.keys(tools||{})});const llmAISpan=tracingContext?.currentSpan?.createChildSpan({name:`llm: '${firstModel.modelId}'`,type:"llm_generation"/* LLM_GENERATION */,input:{messages:[...messageList.getSystemMessages(),...messages]},attributes:{model:firstModel.modelId,provider:firstModel.provider,streaming:true,parameters:modelSettings},metadata:{runId,threadId,resourceId},tracingPolicy:this.#options?.tracingPolicy});try{const loopOptions={messageList,models:this.#models,tools,stopWhen:stopWhenToUse,toolChoice,modelSettings,providerOptions,telemetry_settings:{...this.experimental_telemetry,...telemetry_settings},_internal,output,outputProcessors,returnScorerData,llmAISpan,options:{...options,onStepFinish:async props=>{try{await options?.onStepFinish?.({...props,runId});}catch(e){const mastraError=new MastraError({id:"LLM_STREAM_ON_STEP_FINISH_CALLBACK_EXECUTION_FAILED",domain:"LLM"/* LLM */,category:"USER"/* USER */,details:{modelId:props.model.modelId,modelProvider:props.model.provider,runId:runId??"unknown",threadId:threadId??"unknown",resourceId:resourceId??"unknown",finishReason:props?.finishReason,toolCalls:props?.toolCalls?JSON.stringify(props.toolCalls):"",toolResults:props?.toolResults?JSON.stringify(props.toolResults):"",usage:props?.usage?JSON.stringify(props.usage):""}},e);llmAISpan?.error({error:mastraError});this.logger.trackException(mastraError);throw mastraError;}this.logger.debug("[LLM] - Stream Step Change:",{text:props?.text,toolCalls:props?.toolCalls,toolResults:props?.toolResults,finishReason:props?.finishReason,usage:props?.usage,runId});if(props?.response?.headers?.["x-ratelimit-remaining-tokens"]&&parseInt(props?.response?.headers?.["x-ratelimit-remaining-tokens"],10)<2e3){this.logger.warn("Rate limit approaching, waiting 10 seconds",{runId});await delay(10*1e3);}},onFinish:async props=>{try{await options?.onFinish?.({...props,runId});}catch(e){const mastraError=new MastraError({id:"LLM_STREAM_ON_FINISH_CALLBACK_EXECUTION_FAILED",domain:"LLM"/* LLM */,category:"USER"/* USER */,details:{modelId:props.model.modelId,modelProvider:props.model.provider,runId:runId??"unknown",threadId:threadId??"unknown",resourceId:resourceId??"unknown",finishReason:props?.finishReason,toolCalls:props?.toolCalls?JSON.stringify(props.toolCalls):"",toolResults:props?.toolResults?JSON.stringify(props.toolResults):"",usage:props?.usage?JSON.stringify(props.usage):""}},e);llmAISpan?.error({error:mastraError});this.logger.trackException(mastraError);throw mastraError;}llmAISpan?.end({output:{text:props?.text,reasoning:props?.reasoning,reasoningText:props?.reasoningText,files:props?.files,sources:props?.sources,warnings:props?.warnings},attributes:{finishReason:props?.finishReason,usage:{inputTokens:props?.totalUsage?.inputTokens,outputTokens:props?.totalUsage?.outputTokens,totalTokens:props?.totalUsage?.totalTokens,reasoningTokens:props?.totalUsage?.reasoningTokens,cachedInputTokens:props?.totalUsage?.cachedInputTokens}}});this.logger.debug("[LLM] - Stream Finished:",{text:props?.text,toolCalls:props?.toolCalls,toolResults:props?.toolResults,finishReason:props?.finishReason,usage:props?.usage,runId,threadId,resourceId});}}};return loop(loopOptions);}catch(e){const mastraError=new MastraError({id:"LLM_STREAM_TEXT_AI_SDK_EXECUTION_FAILED",domain:"LLM"/* LLM */,category:"THIRD_PARTY"/* THIRD_PARTY */,details:{modelId:firstModel.modelId,modelProvider:firstModel.provider,runId:runId??"unknown",threadId:threadId??"unknown",resourceId:resourceId??"unknown"}},e);llmAISpan?.error({error:mastraError});throw mastraError;}}};// src/loop/network/index.ts
|
|
346
|
+
var MastraAgentNetworkStream=class extends ReadableStream$1{#usageCount={inputTokens:0,outputTokens:0,totalTokens:0};#streamPromise;#run;constructor({createStream,run}){const deferredPromise={promise:null,resolve:null,reject:null};deferredPromise.promise=new Promise((resolve,reject)=>{deferredPromise.resolve=resolve;deferredPromise.reject=reject;});const updateUsageCount=usage=>{this.#usageCount.inputTokens+=parseInt(usage?.inputTokens?.toString()??"0",10);this.#usageCount.outputTokens+=parseInt(usage?.outputTokens?.toString()??"0",10);this.#usageCount.totalTokens+=parseInt(usage?.totalTokens?.toString()??"0",10);};super({start:async controller=>{const writer=new WritableStream({write:chunk=>{if(chunk.type==="step-output"&&chunk.payload?.output?.from==="AGENT"&&chunk.payload?.output?.type==="finish"||chunk.type==="step-output"&&chunk.payload?.output?.from==="WORKFLOW"&&chunk.payload?.output?.type==="finish"){const finishPayload=chunk.payload?.output.payload;if(finishPayload){updateUsageCount(finishPayload.usage);}}controller.enqueue(chunk);}});const stream=await createStream(writer);for await(const chunk of stream){if(chunk.type==="workflow-step-output"){const innerChunk=chunk.payload.output;const innerChunkType=innerChunk.payload.output;controller.enqueue(innerChunkType);}}controller.close();deferredPromise.resolve();}});this.#run=run;this.#streamPromise=deferredPromise;}get status(){return this.#streamPromise.promise.then(()=>this.#run._getExecutionResults()).then(res=>res.status);}get result(){return this.#streamPromise.promise.then(()=>this.#run._getExecutionResults());}get usage(){return this.#streamPromise.promise.then(()=>this.#usageCount);}};// src/loop/types.ts
|
|
347
347
|
var RESOURCE_TYPES=z5.enum(["agent","workflow","none","tool"]);// src/loop/network/index.ts
|
|
348
348
|
async function getRoutingAgent({runtimeContext,agent}){const instructionsToUse=await agent.getInstructions({runtimeContext});const agentsToUse=await agent.listAgents({runtimeContext});const workflowsToUse=await agent.getWorkflows({runtimeContext});const toolsToUse=await agent.getTools({runtimeContext});const model=await agent.getModel({runtimeContext});const memoryToUse=await agent.getMemory({runtimeContext});const agentList=Object.entries(agentsToUse).map(([name,agent2])=>{return ` - **${name}**: ${agent2.getDescription()}`;}).join("\n");const workflowList=Object.entries(workflowsToUse).map(([name,workflow])=>{return ` - **${name}**: ${workflow.description}, input schema: ${JSON.stringify(zodToJsonSchema(workflow.inputSchema))}`;}).join("\n");const toolList=Object.entries(toolsToUse).map(([name,tool])=>{return ` - **${name}**: ${tool.description}, input schema: ${JSON.stringify(zodToJsonSchema(tool.inputSchema||z5.object({})))}`;}).join("\n");const instructions=`
|
|
349
349
|
You are a router in a network of specialized AI agents.
|
|
@@ -588,7 +588,7 @@ _agentNetworkAppend:this._agentNetworkAppend}).addSystem(instructions||`${this.i
|
|
|
588
588
|
internal:1/* WORKFLOW */}}}).parallel([prepareToolsStep,prepareMemory]).map(async({inputData,bail})=>{const result2={...options,tools:inputData["prepare-tools-step"].convertedTools,runId,toolChoice:options.toolChoice,thread:inputData["prepare-memory-step"].thread,threadId:inputData["prepare-memory-step"].thread?.id,resourceId,runtimeContext,onStepFinish:async props=>{if(options.savePerStep){if(!inputData["prepare-memory-step"].threadExists&&memory&&inputData["prepare-memory-step"].thread){await memory.createThread({threadId:inputData["prepare-memory-step"].thread?.id,title:inputData["prepare-memory-step"].thread?.title,metadata:inputData["prepare-memory-step"].thread?.metadata,resourceId:inputData["prepare-memory-step"].thread?.resourceId,memoryConfig});inputData["prepare-memory-step"].threadExists=true;}await this.saveStepMessages({saveQueueManager,result:props,messageList:inputData["prepare-memory-step"].messageList,threadId:inputData["prepare-memory-step"].thread?.id,memoryConfig,runId});}return options.onStepFinish?.({...props,runId});},...(inputData["prepare-memory-step"].tripwire&&{tripwire:inputData["prepare-memory-step"].tripwire,tripwireReason:inputData["prepare-memory-step"].tripwireReason})};if(result2.tripwire){const emptyResult={textStream:async function*(){}(),fullStream:new globalThis.ReadableStream({start(controller){controller.enqueue({type:"tripwire",runId:result2.runId,from:"AGENT"/* AGENT */,payload:{tripwireReason:result2.tripwireReason}});controller.close();}}),objectStream:new globalThis.ReadableStream({start(controller){controller.close();}}),text:Promise.resolve(""),usage:Promise.resolve({inputTokens:0,outputTokens:0,totalTokens:0}),finishReason:Promise.resolve("other"),tripwire:true,tripwireReason:result2.tripwireReason,response:{id:randomUUID(),timestamp:/* @__PURE__ */new Date(),modelId:"tripwire",messages:[]},toolCalls:Promise.resolve([]),toolResults:Promise.resolve([]),warnings:Promise.resolve(void 0),request:{body:JSON.stringify({messages:[]})},object:void 0,experimental_output:void 0,steps:void 0,experimental_providerMetadata:void 0};return bail(emptyResult);}let effectiveOutputProcessors=options.outputProcessors||(this.#outputProcessors?typeof this.#outputProcessors==="function"?await this.#outputProcessors({runtimeContext:result2.runtimeContext}):this.#outputProcessors:[]);if(options.structuredOutput){const agentModel=await this.getModel({runtimeContext:result2.runtimeContext});const structuredProcessor=new StructuredOutputProcessor(options.structuredOutput,agentModel);effectiveOutputProcessors=effectiveOutputProcessors?[...effectiveOutputProcessors,structuredProcessor]:[structuredProcessor];}const loopOptions={runtimeContext:result2.runtimeContext,tracingContext:{currentSpan:agentAISpan},runId,toolChoice:result2.toolChoice,tools:result2.tools,resourceId:result2.resourceId,threadId:result2.threadId,structuredOutput:result2.structuredOutput,stopWhen:result2.stopWhen,maxSteps:result2.maxSteps,providerOptions:result2.providerOptions,options:{...(options.prepareStep&&{prepareStep:options.prepareStep}),onFinish:async payload=>{if(payload.finishReason==="error"){this.logger.error("Error in agent stream",{error:payload.error,runId});return;}const messageList=inputData["prepare-memory-step"].messageList;try{const outputText=messageList.get.all.core().map(m=>m.content).join("\n");await this.#executeOnFinish({result:payload,outputText,instructions,thread:result2.thread,threadId:result2.threadId,readOnlyMemory:options.memory?.readOnly,resourceId,memoryConfig,runtimeContext,agentAISpan,runId,messageList,threadExists:inputData["prepare-memory-step"].threadExists,structuredOutput:!!options.output,saveQueueManager,overrideScorers:options.scorers});}catch(e){this.logger.error("Error saving memory on finish",{error:e,runId});}await options?.onFinish?.({...payload,runId,messages:messageList.get.response.aiV5.model(),usage:payload.usage,totalUsage:payload.totalUsage});},onStepFinish:result2.onStepFinish,onChunk:options.onChunk,onError:options.onError,onAbort:options.onAbort,activeTools:options.activeTools,abortSignal:options.abortSignal},output:options.output,outputProcessors:effectiveOutputProcessors,modelSettings:{temperature:0,...(options.modelSettings||{})},messageList:inputData["prepare-memory-step"].messageList};return loopOptions;}).then(streamStep).commit();const run=await executionWorkflow.createRunAsync();const result=await run.start({tracingContext:{currentSpan:agentAISpan}});return result;}async#executeOnFinish({result,instructions,readOnlyMemory,thread:threadAfter,threadId,resourceId,memoryConfig,outputText,runtimeContext,agentAISpan,runId,messageList,threadExists,structuredOutput=false,saveQueueManager,overrideScorers}){const resToLog={text:result?.text,object:result?.object,toolResults:result?.toolResults,toolCalls:result?.toolCalls,usage:result?.usage,steps:result?.steps?.map(s=>{return {stepType:s?.stepType,text:result?.text,object:result?.object,toolResults:result?.toolResults,toolCalls:result?.toolCalls,usage:result?.usage};})};this.logger.debug(`[Agent:${this.name}] - Post processing LLM response`,{runId,result:resToLog,threadId,resourceId});const messageListResponses=messageList.get.response.aiV4.core();const usedWorkingMemory=messageListResponses?.some(m=>m.role==="tool"&&m?.content?.some(c=>c?.toolName==="updateWorkingMemory"));const memory=await this.getMemory({runtimeContext});const thread=usedWorkingMemory?threadId?await memory?.getThreadById({threadId}):void 0:threadAfter;if(memory&&resourceId&&thread&&!readOnlyMemory){try{let responseMessages=result.response.messages;if(!responseMessages&&result.object){responseMessages=[{role:"assistant",content:[{type:"text",text:outputText// outputText contains the stringified object
|
|
589
589
|
}]}];}if(responseMessages){const filteredMessages=responseMessages.filter(m=>m.role!=="user");messageList.add(filteredMessages,"response");}if(!threadExists){await memory.createThread({threadId:thread.id,metadata:thread.metadata,title:thread.title,memoryConfig,resourceId:thread.resourceId});}const promises=[saveQueueManager.flushMessages(messageList,threadId,memoryConfig)];if(thread.title?.startsWith("New Thread")){const config=memory.getMergedThreadConfig(memoryConfig);const userMessage=this.getMostRecentUserMessage(messageList.get.all.ui());const{shouldGenerate,model:titleModel,instructions:titleInstructions}=this.resolveTitleGenerationConfig(config?.threads?.generateTitle);if(shouldGenerate&&userMessage){promises.push(this.genTitle(userMessage,runtimeContext,{currentSpan:agentAISpan},titleModel,titleInstructions).then(title=>{if(title){return memory.createThread({threadId:thread.id,resourceId,memoryConfig,title,metadata:thread.metadata});}}));}}await Promise.all(promises);}catch(e){await saveQueueManager.flushMessages(messageList,threadId,memoryConfig);if(e instanceof MastraError){throw e;}const mastraError=new MastraError({id:"AGENT_MEMORY_PERSIST_RESPONSE_MESSAGES_FAILED",domain:"AGENT"/* AGENT */,category:"SYSTEM"/* SYSTEM */,details:{agentName:this.name,runId:runId||"",threadId:threadId||"",result:JSON.stringify(resToLog)}},e);this.logger.trackException(mastraError);this.logger.error(mastraError.toString());throw mastraError;}}else {let responseMessages=result.response.messages;if(!responseMessages&&result.object){responseMessages=[{role:"assistant",content:[{type:"text",text:outputText// outputText contains the stringified object
|
|
590
590
|
}]}];}if(responseMessages){messageList.add(responseMessages,"response");}}await this.#runScorers({messageList,runId,outputText,instructions,runtimeContext,structuredOutput,overrideScorers,tracingContext:{currentSpan:agentAISpan}});agentAISpan?.end({output:{text:result?.text,object:result?.object,files:result?.files}});}async network(messages,options){const runId=options?.runId||this.#mastra?.generateId()||randomUUID();const runtimeContextToUse=options?.runtimeContext||new RuntimeContext();return await networkLoop({networkName:this.name,runtimeContext:runtimeContextToUse,runId,routingAgent:this,routingAgentOptions:{telemetry:options?.telemetry,modelSettings:options?.modelSettings},generateId:()=>this.#mastra?.generateId()||randomUUID(),maxIterations:options?.maxSteps||1,messages,threadId:typeof options?.memory?.thread==="string"?options?.memory?.thread:options?.memory?.thread?.id,resourceId:options?.memory?.resource});}async generateVNext(messages,options){const result=await this.streamVNext(messages,options);if(result.tripwire){return result;}let fullOutput=await result.getFullOutput();const error=fullOutput.error;if(fullOutput.finishReason==="error"&&error){throw error;}return fullOutput;}async streamVNext(messages,streamOptions){const defaultStreamOptions=await this.getDefaultVNextStreamOptions({runtimeContext:streamOptions?.runtimeContext});let mergedStreamOptions={...defaultStreamOptions,...streamOptions,onFinish:this.#mergeOnFinishWithTelemetry(streamOptions,defaultStreamOptions)};let modelOverride;if(mergedStreamOptions.structuredOutput&&mergedStreamOptions.maxSteps===1){if(mergedStreamOptions.structuredOutput.model){modelOverride=mergedStreamOptions.structuredOutput.model;}mergedStreamOptions={...mergedStreamOptions,output:mergedStreamOptions.structuredOutput.schema,structuredOutput:void 0// Remove structuredOutput to avoid confusion downstream
|
|
591
|
-
};}const llm=await this.getLLM({runtimeContext:mergedStreamOptions.runtimeContext,model:modelOverride});if(llm.getModel().specificationVersion!=="v2"){throw new MastraError({id:"AGENT_STREAM_VNEXT_V1_MODEL_NOT_SUPPORTED",domain:"AGENT"/* AGENT */,category:"USER"/* USER */,text:"V1 models are not supported for streamVNext. Please use stream instead."});}const result=await this.#execute({...mergedStreamOptions,messages,methodType:"streamVNext",model:modelOverride});if(result.status!=="success"){if(result.status==="failed"){throw new MastraError({id:"AGENT_STREAM_VNEXT_FAILED",domain:"AGENT"/* AGENT */,category:"USER"/* USER */,text:result.error.message,details:{error:result.error.message}});}throw new MastraError({id:"AGENT_STREAM_VNEXT_UNKNOWN_ERROR",domain:"AGENT"/* AGENT */,category:"USER"/* USER */,text:"An unknown error occurred while streaming"});}return result.result;}async generate(messages,generateOptions={}){if(!generateDeprecationWarningShown){this.logger.warn("Deprecation NOTICE:\nGenerate method will switch to use generateVNext implementation September 23rd, 2025. Please use generateLegacy if you don't want to upgrade just yet.");generateDeprecationWarningShown=true;}return this.generateLegacy(messages,generateOptions);}async generateLegacy(messages,generateOptions={}){const defaultGenerateOptions=await this.getDefaultGenerateOptions({runtimeContext:generateOptions.runtimeContext});const mergedGenerateOptions={...defaultGenerateOptions,...generateOptions,experimental_generateMessageId:defaultGenerateOptions.experimental_generateMessageId||this.#mastra?.generateId?.bind(this.#mastra)};const{llm,before,after}=await this.prepareLLMOptions(messages,mergedGenerateOptions,"generate");if(llm.getModel().specificationVersion!=="v1"){this.logger.error("V2 models are not supported for the current version of generate. Please use generateVNext instead.",{modelId:llm.getModel().modelId});throw new MastraError({id:"AGENT_GENERATE_V2_MODEL_NOT_SUPPORTED",domain:"AGENT"/* AGENT */,category:"USER"/* USER */,details:{modelId:llm.getModel().modelId},text:"V2 models are not supported for the current version of generate. Please use generateVNext instead."});}let llmToUse=llm;const beforeResult=await before();const traceId=getValidTraceId(beforeResult.agentAISpan);if(beforeResult.tripwire){const tripwireResult={text:"",object:void 0,usage:{totalTokens:0,promptTokens:0,completionTokens:0},finishReason:"other",response:{id:randomUUID(),timestamp:/* @__PURE__ */new Date(),modelId:"tripwire",messages:[]},responseMessages:[],toolCalls:[],toolResults:[],warnings:void 0,request:{body:JSON.stringify({messages:[]})},experimental_output:void 0,steps:void 0,experimental_providerMetadata:void 0,tripwire:true,tripwireReason:beforeResult.tripwireReason,traceId};return tripwireResult;}const{experimental_output,output,agentAISpan,...llmOptions}=beforeResult;const tracingContext={currentSpan:agentAISpan};let finalOutputProcessors=mergedGenerateOptions.outputProcessors;if(mergedGenerateOptions.structuredOutput){const agentModel=await this.getModel({runtimeContext:mergedGenerateOptions.runtimeContext});const structuredProcessor=new StructuredOutputProcessor(mergedGenerateOptions.structuredOutput,agentModel);finalOutputProcessors=finalOutputProcessors?[...finalOutputProcessors,structuredProcessor]:[structuredProcessor];}if(!output||experimental_output){const result2=await llmToUse.__text({...llmOptions,tracingContext,experimental_output});const outputProcessorResult2=await this.__runOutputProcessors({runtimeContext:mergedGenerateOptions.runtimeContext||new RuntimeContext(),tracingContext,outputProcessorOverrides:finalOutputProcessors,messageList:new MessageList({threadId:llmOptions.threadId||"",resourceId:llmOptions.resourceId||""}).add({role:"assistant",content:[{type:"text",text:result2.text}]},"response")});if(outputProcessorResult2.tripwireTriggered){const tripwireResult={text:"",object:void 0,usage:{totalTokens:0,promptTokens:0,completionTokens:0},finishReason:"other",response:{id:randomUUID(),timestamp:/* @__PURE__ */new Date(),modelId:"tripwire",messages:[]},responseMessages:[],toolCalls:[],toolResults:[],warnings:void 0,request:{body:JSON.stringify({messages:[]})},experimental_output:void 0,steps:void 0,experimental_providerMetadata:void 0,tripwire:true,tripwireReason:outputProcessorResult2.tripwireReason,traceId};return tripwireResult;}const newText2=outputProcessorResult2.messageList.get.response.v2().map(msg=>msg.content.parts.map(part=>part.type==="text"?part.text:"").join("")).join("");result2.text=newText2;if(finalOutputProcessors&&finalOutputProcessors.length>0){const messages2=outputProcessorResult2.messageList.get.response.v2();this.logger.debug("Checking messages for experimentalOutput metadata:",messages2.map(m=>({role:m.role,hasContentMetadata:!!m.content.metadata,contentMetadata:m.content.metadata})));const messagesWithStructuredData=messages2.filter(msg=>msg.content.metadata&&msg.content.metadata.structuredOutput);this.logger.debug("Messages with structured data:",messagesWithStructuredData.length);if(messagesWithStructuredData[0]&&messagesWithStructuredData[0].content.metadata?.structuredOutput){result2.object=messagesWithStructuredData[0].content.metadata.structuredOutput;this.logger.debug("Using structured data from processor metadata for result.object");}else {try{const processedOutput=JSON.parse(newText2);result2.object=processedOutput;this.logger.debug("Using fallback JSON parsing for result.object");}catch(error){this.logger.warn("Failed to parse processed output as JSON, updating text only",{error});}}}const overrideScorers=mergedGenerateOptions.scorers;const afterResult2=await after({result:result2,outputText:newText2,agentAISpan,...(overrideScorers?{overrideScorers}:{})});if(generateOptions.returnScorerData){result2.scoringData=afterResult2.scoringData;}result2.traceId=traceId;return result2;}const result=await llmToUse.__textObject({...llmOptions,tracingContext,structuredOutput:output});const outputText=JSON.stringify(result.object);const outputProcessorResult=await this.__runOutputProcessors({runtimeContext:mergedGenerateOptions.runtimeContext||new RuntimeContext(),tracingContext,messageList:new MessageList({threadId:llmOptions.threadId||"",resourceId:llmOptions.resourceId||""}).add({role:"assistant",content:[{type:"text",text:outputText}]},"response")});if(outputProcessorResult.tripwireTriggered){const tripwireResult={text:"",object:void 0,usage:{totalTokens:0,promptTokens:0,completionTokens:0},finishReason:"other",response:{id:randomUUID(),timestamp:/* @__PURE__ */new Date(),modelId:"tripwire",messages:[]},responseMessages:[],toolCalls:[],toolResults:[],warnings:void 0,request:{body:JSON.stringify({messages:[]})},experimental_output:void 0,steps:void 0,experimental_providerMetadata:void 0,tripwire:true,tripwireReason:outputProcessorResult.tripwireReason,traceId};return tripwireResult;}const newText=outputProcessorResult.messageList.get.response.v2().map(msg=>msg.content.parts.map(part=>part.type==="text"?part.text:"").join("")).join("");try{const processedObject=JSON.parse(newText);result.object=processedObject;}catch(error){this.logger.warn("Failed to parse processed output as JSON, keeping original result",{error});}const afterResult=await after({result,outputText:newText,...(generateOptions.scorers?{overrideScorers:generateOptions.scorers}:{}),structuredOutput:true,agentAISpan});if(generateOptions.returnScorerData){result.scoringData=afterResult.scoringData;}result.traceId=traceId;return result;}async stream(messages,streamOptions={}){if(!streamDeprecationWarningShown){this.logger.warn("Deprecation NOTICE:\nStream method will switch to use streamVNext implementation September 23rd, 2025. Please use streamLegacy if you don't want to upgrade just yet.");streamDeprecationWarningShown=true;}return this.streamLegacy(messages,streamOptions);}async streamLegacy(messages,streamOptions={}){const defaultStreamOptions=await this.getDefaultStreamOptions({runtimeContext:streamOptions.runtimeContext});const mergedStreamOptions={...defaultStreamOptions,...streamOptions,onFinish:this.#mergeOnFinishWithTelemetry(streamOptions,defaultStreamOptions),experimental_generateMessageId:defaultStreamOptions.experimental_generateMessageId||this.#mastra?.generateId?.bind(this.#mastra)};const{llm,before,after}=await this.prepareLLMOptions(messages,mergedStreamOptions,"stream");if(llm.getModel().specificationVersion!=="v1"){this.logger.error("V2 models are not supported for stream. Please use streamVNext instead.",{modelId:llm.getModel().modelId});throw new MastraError({id:"AGENT_STREAM_V2_MODEL_NOT_SUPPORTED",domain:"AGENT"/* AGENT */,category:"USER"/* USER */,details:{modelId:llm.getModel().modelId},text:"V2 models are not supported for stream. Please use streamVNext instead."});}const beforeResult=await before();const traceId=getValidTraceId(beforeResult.agentAISpan);if(beforeResult.tripwire){const emptyResult={textStream:async function*(){}(),fullStream:Promise.resolve("").then(()=>{const emptyStream=new globalThis.ReadableStream({start(controller){controller.close();}});return emptyStream;}),text:Promise.resolve(""),usage:Promise.resolve({totalTokens:0,promptTokens:0,completionTokens:0}),finishReason:Promise.resolve("other"),tripwire:true,tripwireReason:beforeResult.tripwireReason,response:{id:randomUUID(),timestamp:/* @__PURE__ */new Date(),modelId:"tripwire",messages:[]},toolCalls:Promise.resolve([]),toolResults:Promise.resolve([]),warnings:Promise.resolve(void 0),request:{body:JSON.stringify({messages:[]})},experimental_output:void 0,steps:void 0,experimental_providerMetadata:void 0,traceId,toAIStream:()=>Promise.resolve("").then(()=>{const emptyStream=new globalThis.ReadableStream({start(controller){controller.close();}});return emptyStream;}),get experimental_partialOutputStream(){return async function*(){}();},pipeDataStreamToResponse:()=>Promise.resolve(),pipeTextStreamToResponse:()=>Promise.resolve(),toDataStreamResponse:()=>new Response("",{status:200,headers:{"Content-Type":"text/plain"}}),toTextStreamResponse:()=>new Response("",{status:200,headers:{"Content-Type":"text/plain"}})};return emptyResult;}const{onFinish,runId,output,experimental_output,agentAISpan,...llmOptions}=beforeResult;const overrideScorers=mergedStreamOptions.scorers;const tracingContext={currentSpan:agentAISpan};if(!output||experimental_output){this.logger.debug(`Starting agent ${this.name} llm stream call`,{runId});const streamResult=llm.__stream({...llmOptions,experimental_output,tracingContext,outputProcessors:await this.getResolvedOutputProcessors(mergedStreamOptions.runtimeContext),onFinish:async result=>{try{const outputText=result.text;await after({result,outputText,agentAISpan,...(overrideScorers?{overrideScorers}:{})});}catch(e){this.logger.error("Error saving memory on finish",{error:e,runId});}await onFinish?.({...result,runId});},runId});streamResult.traceId=traceId;return streamResult;}this.logger.debug(`Starting agent ${this.name} llm streamObject call`,{runId});const streamObjectResult=llm.__streamObject({...llmOptions,tracingContext,onFinish:async result=>{try{const outputText=JSON.stringify(result.object);await after({result,outputText,structuredOutput:true,agentAISpan,...(overrideScorers?{overrideScorers}:{})});}catch(e){this.logger.error("Error saving memory on finish",{error:e,runId});}await onFinish?.({...result,runId});},runId,structuredOutput:output});streamObjectResult.traceId=traceId;return streamObjectResult;}/**
|
|
591
|
+
};}const llm=await this.getLLM({runtimeContext:mergedStreamOptions.runtimeContext,model:modelOverride});if(llm.getModel().specificationVersion!=="v2"){const modelInfo=llm.getModel();const modelId=modelInfo.modelId||"unknown";const provider=modelInfo.provider||"unknown";throw new MastraError({id:"AGENT_STREAM_VNEXT_V1_MODEL_NOT_SUPPORTED",domain:"AGENT"/* AGENT */,category:"USER"/* USER */,text:`Agent "${this.name}" is using AI SDK v4 model (${provider}:${modelId}) which is not compatible with streamVNext. Please use AI SDK v5 models or call the stream() method instead. See https://mastra.ai/en/docs/streaming/overview for more information.`,details:{agentName:this.name,modelId,provider,specificationVersion:modelInfo.specificationVersion}});}const result=await this.#execute({...mergedStreamOptions,messages,methodType:"streamVNext",model:modelOverride});if(result.status!=="success"){if(result.status==="failed"){throw new MastraError({id:"AGENT_STREAM_VNEXT_FAILED",domain:"AGENT"/* AGENT */,category:"USER"/* USER */,text:result.error.message,details:{error:result.error.message}});}throw new MastraError({id:"AGENT_STREAM_VNEXT_UNKNOWN_ERROR",domain:"AGENT"/* AGENT */,category:"USER"/* USER */,text:"An unknown error occurred while streaming"});}return result.result;}async generate(messages,generateOptions={}){if(!generateDeprecationWarningShown){this.logger.warn("Deprecation NOTICE:\nGenerate method will switch to use generateVNext implementation September 23rd, 2025. Please use generateLegacy if you don't want to upgrade just yet.");generateDeprecationWarningShown=true;}return this.generateLegacy(messages,generateOptions);}async generateLegacy(messages,generateOptions={}){const defaultGenerateOptions=await this.getDefaultGenerateOptions({runtimeContext:generateOptions.runtimeContext});const mergedGenerateOptions={...defaultGenerateOptions,...generateOptions,experimental_generateMessageId:defaultGenerateOptions.experimental_generateMessageId||this.#mastra?.generateId?.bind(this.#mastra)};const{llm,before,after}=await this.prepareLLMOptions(messages,mergedGenerateOptions,"generate");if(llm.getModel().specificationVersion!=="v1"){this.logger.error("V2 models are not supported for the current version of generate. Please use generateVNext instead.",{modelId:llm.getModel().modelId});throw new MastraError({id:"AGENT_GENERATE_V2_MODEL_NOT_SUPPORTED",domain:"AGENT"/* AGENT */,category:"USER"/* USER */,details:{modelId:llm.getModel().modelId},text:"V2 models are not supported for the current version of generate. Please use generateVNext instead."});}let llmToUse=llm;const beforeResult=await before();const traceId=getValidTraceId(beforeResult.agentAISpan);if(beforeResult.tripwire){const tripwireResult={text:"",object:void 0,usage:{totalTokens:0,promptTokens:0,completionTokens:0},finishReason:"other",response:{id:randomUUID(),timestamp:/* @__PURE__ */new Date(),modelId:"tripwire",messages:[]},responseMessages:[],toolCalls:[],toolResults:[],warnings:void 0,request:{body:JSON.stringify({messages:[]})},experimental_output:void 0,steps:void 0,experimental_providerMetadata:void 0,tripwire:true,tripwireReason:beforeResult.tripwireReason,traceId};return tripwireResult;}const{experimental_output,output,agentAISpan,...llmOptions}=beforeResult;const tracingContext={currentSpan:agentAISpan};let finalOutputProcessors=mergedGenerateOptions.outputProcessors;if(mergedGenerateOptions.structuredOutput){const agentModel=await this.getModel({runtimeContext:mergedGenerateOptions.runtimeContext});const structuredProcessor=new StructuredOutputProcessor(mergedGenerateOptions.structuredOutput,agentModel);finalOutputProcessors=finalOutputProcessors?[...finalOutputProcessors,structuredProcessor]:[structuredProcessor];}if(!output||experimental_output){const result2=await llmToUse.__text({...llmOptions,tracingContext,experimental_output});const outputProcessorResult2=await this.__runOutputProcessors({runtimeContext:mergedGenerateOptions.runtimeContext||new RuntimeContext(),tracingContext,outputProcessorOverrides:finalOutputProcessors,messageList:new MessageList({threadId:llmOptions.threadId||"",resourceId:llmOptions.resourceId||""}).add({role:"assistant",content:[{type:"text",text:result2.text}]},"response")});if(outputProcessorResult2.tripwireTriggered){const tripwireResult={text:"",object:void 0,usage:{totalTokens:0,promptTokens:0,completionTokens:0},finishReason:"other",response:{id:randomUUID(),timestamp:/* @__PURE__ */new Date(),modelId:"tripwire",messages:[]},responseMessages:[],toolCalls:[],toolResults:[],warnings:void 0,request:{body:JSON.stringify({messages:[]})},experimental_output:void 0,steps:void 0,experimental_providerMetadata:void 0,tripwire:true,tripwireReason:outputProcessorResult2.tripwireReason,traceId};return tripwireResult;}const newText2=outputProcessorResult2.messageList.get.response.v2().map(msg=>msg.content.parts.map(part=>part.type==="text"?part.text:"").join("")).join("");result2.text=newText2;if(finalOutputProcessors&&finalOutputProcessors.length>0){const messages2=outputProcessorResult2.messageList.get.response.v2();this.logger.debug("Checking messages for experimentalOutput metadata:",messages2.map(m=>({role:m.role,hasContentMetadata:!!m.content.metadata,contentMetadata:m.content.metadata})));const messagesWithStructuredData=messages2.filter(msg=>msg.content.metadata&&msg.content.metadata.structuredOutput);this.logger.debug("Messages with structured data:",messagesWithStructuredData.length);if(messagesWithStructuredData[0]&&messagesWithStructuredData[0].content.metadata?.structuredOutput){result2.object=messagesWithStructuredData[0].content.metadata.structuredOutput;this.logger.debug("Using structured data from processor metadata for result.object");}else {try{const processedOutput=JSON.parse(newText2);result2.object=processedOutput;this.logger.debug("Using fallback JSON parsing for result.object");}catch(error){this.logger.warn("Failed to parse processed output as JSON, updating text only",{error});}}}const overrideScorers=mergedGenerateOptions.scorers;const afterResult2=await after({result:result2,outputText:newText2,agentAISpan,...(overrideScorers?{overrideScorers}:{})});if(generateOptions.returnScorerData){result2.scoringData=afterResult2.scoringData;}result2.traceId=traceId;return result2;}const result=await llmToUse.__textObject({...llmOptions,tracingContext,structuredOutput:output});const outputText=JSON.stringify(result.object);const outputProcessorResult=await this.__runOutputProcessors({runtimeContext:mergedGenerateOptions.runtimeContext||new RuntimeContext(),tracingContext,messageList:new MessageList({threadId:llmOptions.threadId||"",resourceId:llmOptions.resourceId||""}).add({role:"assistant",content:[{type:"text",text:outputText}]},"response")});if(outputProcessorResult.tripwireTriggered){const tripwireResult={text:"",object:void 0,usage:{totalTokens:0,promptTokens:0,completionTokens:0},finishReason:"other",response:{id:randomUUID(),timestamp:/* @__PURE__ */new Date(),modelId:"tripwire",messages:[]},responseMessages:[],toolCalls:[],toolResults:[],warnings:void 0,request:{body:JSON.stringify({messages:[]})},experimental_output:void 0,steps:void 0,experimental_providerMetadata:void 0,tripwire:true,tripwireReason:outputProcessorResult.tripwireReason,traceId};return tripwireResult;}const newText=outputProcessorResult.messageList.get.response.v2().map(msg=>msg.content.parts.map(part=>part.type==="text"?part.text:"").join("")).join("");try{const processedObject=JSON.parse(newText);result.object=processedObject;}catch(error){this.logger.warn("Failed to parse processed output as JSON, keeping original result",{error});}const afterResult=await after({result,outputText:newText,...(generateOptions.scorers?{overrideScorers:generateOptions.scorers}:{}),structuredOutput:true,agentAISpan});if(generateOptions.returnScorerData){result.scoringData=afterResult.scoringData;}result.traceId=traceId;return result;}async stream(messages,streamOptions={}){if(!streamDeprecationWarningShown){this.logger.warn("Deprecation NOTICE:\nStream method will switch to use streamVNext implementation September 23rd, 2025. Please use streamLegacy if you don't want to upgrade just yet.");streamDeprecationWarningShown=true;}return this.streamLegacy(messages,streamOptions);}async streamLegacy(messages,streamOptions={}){const defaultStreamOptions=await this.getDefaultStreamOptions({runtimeContext:streamOptions.runtimeContext});const mergedStreamOptions={...defaultStreamOptions,...streamOptions,onFinish:this.#mergeOnFinishWithTelemetry(streamOptions,defaultStreamOptions),experimental_generateMessageId:defaultStreamOptions.experimental_generateMessageId||this.#mastra?.generateId?.bind(this.#mastra)};const{llm,before,after}=await this.prepareLLMOptions(messages,mergedStreamOptions,"stream");if(llm.getModel().specificationVersion!=="v1"){this.logger.error("V2 models are not supported for stream. Please use streamVNext instead.",{modelId:llm.getModel().modelId});throw new MastraError({id:"AGENT_STREAM_V2_MODEL_NOT_SUPPORTED",domain:"AGENT"/* AGENT */,category:"USER"/* USER */,details:{modelId:llm.getModel().modelId},text:"V2 models are not supported for stream. Please use streamVNext instead."});}const beforeResult=await before();const traceId=getValidTraceId(beforeResult.agentAISpan);if(beforeResult.tripwire){const emptyResult={textStream:async function*(){}(),fullStream:Promise.resolve("").then(()=>{const emptyStream=new globalThis.ReadableStream({start(controller){controller.close();}});return emptyStream;}),text:Promise.resolve(""),usage:Promise.resolve({totalTokens:0,promptTokens:0,completionTokens:0}),finishReason:Promise.resolve("other"),tripwire:true,tripwireReason:beforeResult.tripwireReason,response:{id:randomUUID(),timestamp:/* @__PURE__ */new Date(),modelId:"tripwire",messages:[]},toolCalls:Promise.resolve([]),toolResults:Promise.resolve([]),warnings:Promise.resolve(void 0),request:{body:JSON.stringify({messages:[]})},experimental_output:void 0,steps:void 0,experimental_providerMetadata:void 0,traceId,toAIStream:()=>Promise.resolve("").then(()=>{const emptyStream=new globalThis.ReadableStream({start(controller){controller.close();}});return emptyStream;}),get experimental_partialOutputStream(){return async function*(){}();},pipeDataStreamToResponse:()=>Promise.resolve(),pipeTextStreamToResponse:()=>Promise.resolve(),toDataStreamResponse:()=>new Response("",{status:200,headers:{"Content-Type":"text/plain"}}),toTextStreamResponse:()=>new Response("",{status:200,headers:{"Content-Type":"text/plain"}})};return emptyResult;}const{onFinish,runId,output,experimental_output,agentAISpan,...llmOptions}=beforeResult;const overrideScorers=mergedStreamOptions.scorers;const tracingContext={currentSpan:agentAISpan};if(!output||experimental_output){this.logger.debug(`Starting agent ${this.name} llm stream call`,{runId});const streamResult=llm.__stream({...llmOptions,experimental_output,tracingContext,outputProcessors:await this.getResolvedOutputProcessors(mergedStreamOptions.runtimeContext),onFinish:async result=>{try{const outputText=result.text;await after({result,outputText,agentAISpan,...(overrideScorers?{overrideScorers}:{})});}catch(e){this.logger.error("Error saving memory on finish",{error:e,runId});}await onFinish?.({...result,runId});},runId});streamResult.traceId=traceId;return streamResult;}this.logger.debug(`Starting agent ${this.name} llm streamObject call`,{runId});const streamObjectResult=llm.__streamObject({...llmOptions,tracingContext,onFinish:async result=>{try{const outputText=JSON.stringify(result.object);await after({result,outputText,structuredOutput:true,agentAISpan,...(overrideScorers?{overrideScorers}:{})});}catch(e){this.logger.error("Error saving memory on finish",{error:e,runId});}await onFinish?.({...result,runId});},runId,structuredOutput:output});streamObjectResult.traceId=traceId;return streamObjectResult;}/**
|
|
592
592
|
* Convert text to speech using the configured voice provider
|
|
593
593
|
* @param input Text or text stream to convert to speech
|
|
594
594
|
* @param options Speech options including speaker and provider-specific options
|
|
@@ -770,5 +770,5 @@ Target: ${this.targetLanguages.join("/")}${translate}`;}};// src/agent/input-pro
|
|
|
770
770
|
var LanguageDetectorInputProcessor=class{name="language-detector";processor;constructor(options){this.processor=new LanguageDetector(options);}async process(args){return this.processor.processInput(args);}};
|
|
771
771
|
|
|
772
772
|
export { AISDKV5OutputStream, Agent, ChunkFrom, DefaultExecutionEngine, ExecutionEngine, LanguageDetector, LanguageDetectorInputProcessor, LegacyStep, LegacyWorkflow, MastraModelOutput, ModerationInputProcessor, ModerationProcessor, PIIDetector, PIIDetectorInputProcessor, PromptInjectionDetector, PromptInjectionDetectorInputProcessor, RESOURCE_TYPES, Run, StructuredOutputProcessor, TripWire, UnicodeNormalizer, UnicodeNormalizerInputProcessor, WhenConditionReturnValue, Workflow, agentToStep, cloneStep, cloneWorkflow, createStep, createWorkflow, getActivePathsAndStatus, getResultActivePaths, getStepResult, getSuspendedPaths, isAgent, isConditionalKey, isErrorEvent, isFinalState, isLimboState, isTransitionEvent, isVariableReference, isWorkflow, loop, mapVariable, mergeChildValue, recursivelyCheckForFinalState, resolveVariables, updateStepInHierarchy, workflowToStep };
|
|
773
|
-
//# sourceMappingURL=chunk-
|
|
774
|
-
//# sourceMappingURL=chunk-
|
|
773
|
+
//# sourceMappingURL=chunk-M65NZ6EW.js.map
|
|
774
|
+
//# sourceMappingURL=chunk-M65NZ6EW.js.map
|