@mastra/core 0.13.2 → 0.14.0-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (327) hide show
  1. package/README.md +5 -44
  2. package/dist/agent/agent.types.d.ts +12 -2
  3. package/dist/agent/agent.types.d.ts.map +1 -1
  4. package/dist/agent/index.cjs +14 -14
  5. package/dist/agent/index.d.ts +17 -17
  6. package/dist/agent/index.d.ts.map +1 -1
  7. package/dist/agent/index.js +2 -2
  8. package/dist/agent/input-processor/index.cjs +28 -0
  9. package/dist/agent/input-processor/index.js +3 -0
  10. package/dist/agent/input-processor/processors/index.d.ts +4 -4
  11. package/dist/agent/input-processor/processors/index.d.ts.map +1 -1
  12. package/dist/agent/input-processor/processors/language-detector.d.ts +6 -139
  13. package/dist/agent/input-processor/processors/language-detector.d.ts.map +1 -1
  14. package/dist/agent/input-processor/processors/moderation.d.ts +5 -94
  15. package/dist/agent/input-processor/processors/moderation.d.ts.map +1 -1
  16. package/dist/agent/input-processor/processors/pii-detector.d.ts +6 -164
  17. package/dist/agent/input-processor/processors/pii-detector.d.ts.map +1 -1
  18. package/dist/agent/input-processor/processors/prompt-injection-detector.d.ts +6 -96
  19. package/dist/agent/input-processor/processors/prompt-injection-detector.d.ts.map +1 -1
  20. package/dist/agent/input-processor/processors/unicode-normalizer.d.ts +9 -25
  21. package/dist/agent/input-processor/processors/unicode-normalizer.d.ts.map +1 -1
  22. package/dist/agent/message-list/index.d.ts +138 -39
  23. package/dist/agent/message-list/index.d.ts.map +1 -1
  24. package/dist/agent/message-list/types.d.ts +3 -0
  25. package/dist/agent/message-list/types.d.ts.map +1 -0
  26. package/dist/agent/message-list/utils/ai-v4-v5/core-model-message.d.ts +3 -0
  27. package/dist/agent/message-list/utils/ai-v4-v5/core-model-message.d.ts.map +1 -0
  28. package/dist/agent/message-list/utils/ai-v4-v5/ui-message.d.ts +3 -0
  29. package/dist/agent/message-list/utils/ai-v4-v5/ui-message.d.ts.map +1 -0
  30. package/dist/agent/message-list/utils/ai-v5/tool.d.ts +16 -0
  31. package/dist/agent/message-list/utils/ai-v5/tool.d.ts.map +1 -0
  32. package/dist/agent/types.d.ts +34 -4
  33. package/dist/agent/types.d.ts.map +1 -1
  34. package/dist/ai-tracing/base.d.ts +22 -7
  35. package/dist/ai-tracing/base.d.ts.map +1 -1
  36. package/dist/ai-tracing/default.d.ts +3 -3
  37. package/dist/ai-tracing/default.d.ts.map +1 -1
  38. package/dist/ai-tracing/index.cjs +253 -72
  39. package/dist/ai-tracing/index.cjs.map +1 -1
  40. package/dist/ai-tracing/index.d.ts +1 -0
  41. package/dist/ai-tracing/index.d.ts.map +1 -1
  42. package/dist/ai-tracing/index.js +243 -71
  43. package/dist/ai-tracing/index.js.map +1 -1
  44. package/dist/ai-tracing/no-op.d.ts +39 -7
  45. package/dist/ai-tracing/no-op.d.ts.map +1 -1
  46. package/dist/ai-tracing/registry.d.ts +29 -4
  47. package/dist/ai-tracing/registry.d.ts.map +1 -1
  48. package/dist/ai-tracing/types.d.ts +112 -53
  49. package/dist/ai-tracing/types.d.ts.map +1 -1
  50. package/dist/ai-tracing/utils.d.ts +24 -0
  51. package/dist/ai-tracing/utils.d.ts.map +1 -0
  52. package/dist/base.cjs +2 -2
  53. package/dist/base.js +1 -1
  54. package/dist/bundler/index.cjs +2 -2
  55. package/dist/bundler/index.js +1 -1
  56. package/dist/{chunk-N3VGOJZV.cjs → chunk-4BMFOQDC.cjs} +27 -47
  57. package/dist/chunk-4BMFOQDC.cjs.map +1 -0
  58. package/dist/{chunk-MTRRRTB4.js → chunk-4XDSHUFK.js} +7 -6
  59. package/dist/chunk-4XDSHUFK.js.map +1 -0
  60. package/dist/{chunk-SSJVFUBZ.js → chunk-67L5DRLO.js} +3 -3
  61. package/dist/{chunk-SSJVFUBZ.js.map → chunk-67L5DRLO.js.map} +1 -1
  62. package/dist/{chunk-FUERFM46.js → chunk-6GF5M4GX.js} +3 -3
  63. package/dist/{chunk-FUERFM46.js.map → chunk-6GF5M4GX.js.map} +1 -1
  64. package/dist/{chunk-7E2SNI5D.cjs → chunk-7AXX55C5.cjs} +5 -5
  65. package/dist/chunk-7AXX55C5.cjs.map +1 -0
  66. package/dist/{chunk-6W6LYR7M.js → chunk-7XGDFDZ2.js} +3 -3
  67. package/dist/{chunk-6W6LYR7M.js.map → chunk-7XGDFDZ2.js.map} +1 -1
  68. package/dist/{chunk-R4HT5XUH.cjs → chunk-ASJVWHIK.cjs} +993 -124
  69. package/dist/chunk-ASJVWHIK.cjs.map +1 -0
  70. package/dist/{chunk-ZDZ57MIU.js → chunk-AWILTHBM.js} +3 -3
  71. package/dist/{chunk-ZDZ57MIU.js.map → chunk-AWILTHBM.js.map} +1 -1
  72. package/dist/{chunk-DDYSOZ25.js → chunk-AYXRNQH3.js} +3 -3
  73. package/dist/{chunk-DDYSOZ25.js.map → chunk-AYXRNQH3.js.map} +1 -1
  74. package/dist/{chunk-U2CK6AN5.cjs → chunk-BCCEYWAV.cjs} +4 -4
  75. package/dist/{chunk-U2CK6AN5.cjs.map → chunk-BCCEYWAV.cjs.map} +1 -1
  76. package/dist/{chunk-MH64VYGF.cjs → chunk-CSTWQQ3C.cjs} +14 -13
  77. package/dist/chunk-CSTWQQ3C.cjs.map +1 -0
  78. package/dist/{chunk-FTPL7ILZ.js → chunk-CWZDKGJ6.js} +3 -3
  79. package/dist/{chunk-FTPL7ILZ.js.map → chunk-CWZDKGJ6.js.map} +1 -1
  80. package/dist/{chunk-MVSUZXER.cjs → chunk-D7P76YH6.cjs} +4 -4
  81. package/dist/{chunk-MVSUZXER.cjs.map → chunk-D7P76YH6.cjs.map} +1 -1
  82. package/dist/{chunk-IL5SHDY4.js → chunk-DAMIFZPW.js} +949 -103
  83. package/dist/chunk-DAMIFZPW.js.map +1 -0
  84. package/dist/{chunk-KLXRYVVQ.cjs → chunk-DVI6XHUC.cjs} +4 -4
  85. package/dist/{chunk-KLXRYVVQ.cjs.map → chunk-DVI6XHUC.cjs.map} +1 -1
  86. package/dist/chunk-ELKY3FEM.cjs +14 -0
  87. package/dist/{chunk-IP5NGA2S.cjs.map → chunk-ELKY3FEM.cjs.map} +1 -1
  88. package/dist/{chunk-TOODGJKM.js → chunk-GZVSHXPP.js} +3 -3
  89. package/dist/chunk-GZVSHXPP.js.map +1 -0
  90. package/dist/{chunk-WA3SWCXD.cjs → chunk-I4XP6RAN.cjs} +9 -9
  91. package/dist/{chunk-WA3SWCXD.cjs.map → chunk-I4XP6RAN.cjs.map} +1 -1
  92. package/dist/{chunk-PLVSXEQP.cjs → chunk-IAP4IWKM.cjs} +4 -4
  93. package/dist/{chunk-PLVSXEQP.cjs.map → chunk-IAP4IWKM.cjs.map} +1 -1
  94. package/dist/{chunk-YT4RGZYO.cjs → chunk-KXCUCBEI.cjs} +8 -8
  95. package/dist/{chunk-YT4RGZYO.cjs.map → chunk-KXCUCBEI.cjs.map} +1 -1
  96. package/dist/{chunk-Z74LG5VH.cjs → chunk-LFAAEOQC.cjs} +21 -4
  97. package/dist/chunk-LFAAEOQC.cjs.map +1 -0
  98. package/dist/{chunk-DK7AVDMI.cjs → chunk-LVVAUDVQ.cjs} +4 -4
  99. package/dist/{chunk-DK7AVDMI.cjs.map → chunk-LVVAUDVQ.cjs.map} +1 -1
  100. package/dist/{chunk-HBMZEZTO.js → chunk-ON4S33NP.js} +3 -3
  101. package/dist/{chunk-HBMZEZTO.js.map → chunk-ON4S33NP.js.map} +1 -1
  102. package/dist/{chunk-L6YBPFYF.js → chunk-OPT2L5AM.js} +1089 -81
  103. package/dist/chunk-OPT2L5AM.js.map +1 -0
  104. package/dist/{chunk-N2KMAW6T.js → chunk-P7WF6NQU.js} +20 -4
  105. package/dist/chunk-P7WF6NQU.js.map +1 -0
  106. package/dist/{chunk-KEMW2BER.js → chunk-PY4MHJTE.js} +3 -3
  107. package/dist/{chunk-KEMW2BER.js.map → chunk-PY4MHJTE.js.map} +1 -1
  108. package/dist/{chunk-TZVJV7EV.js → chunk-QSJZLHWA.js} +5 -5
  109. package/dist/{chunk-TZVJV7EV.js.map → chunk-QSJZLHWA.js.map} +1 -1
  110. package/dist/{chunk-DCOKWJ5G.cjs → chunk-RJCNC57P.cjs} +1110 -79
  111. package/dist/chunk-RJCNC57P.cjs.map +1 -0
  112. package/dist/{chunk-LMW44O4V.cjs → chunk-RPV7GQAX.cjs} +4 -4
  113. package/dist/{chunk-LMW44O4V.cjs.map → chunk-RPV7GQAX.cjs.map} +1 -1
  114. package/dist/{chunk-TZOR5M7H.js → chunk-RYCRCJCY.js} +25 -45
  115. package/dist/chunk-RYCRCJCY.js.map +1 -0
  116. package/dist/{chunk-IDDUQR6P.cjs → chunk-V5WKCX3G.cjs} +3 -3
  117. package/dist/chunk-V5WKCX3G.cjs.map +1 -0
  118. package/dist/{chunk-6AR2Z5ZG.js → chunk-X3GXU6TZ.js} +3 -3
  119. package/dist/chunk-X3GXU6TZ.js.map +1 -0
  120. package/dist/deployer/index.cjs +2 -2
  121. package/dist/deployer/index.js +1 -1
  122. package/dist/index.cjs +74 -70
  123. package/dist/index.js +14 -14
  124. package/dist/llm/index.d.ts +2 -1
  125. package/dist/llm/index.d.ts.map +1 -1
  126. package/dist/llm/model/base.types.d.ts +2 -6
  127. package/dist/llm/model/base.types.d.ts.map +1 -1
  128. package/dist/llm/model/index.d.ts +0 -1
  129. package/dist/llm/model/index.d.ts.map +1 -1
  130. package/dist/llm/model/model.d.ts +3 -2
  131. package/dist/llm/model/model.d.ts.map +1 -1
  132. package/dist/llm/model/model.loop.d.ts +25 -0
  133. package/dist/llm/model/model.loop.d.ts.map +1 -0
  134. package/dist/llm/model/model.loop.types.d.ts +39 -0
  135. package/dist/llm/model/model.loop.types.d.ts.map +1 -0
  136. package/dist/llm/model/shared.types.d.ts +8 -0
  137. package/dist/llm/model/shared.types.d.ts.map +1 -0
  138. package/dist/logger/constants.d.ts +1 -1
  139. package/dist/logger/index.cjs +6 -6
  140. package/dist/logger/index.js +1 -1
  141. package/dist/loop/index.cjs +2848 -0
  142. package/dist/loop/index.cjs.map +1 -0
  143. package/dist/loop/index.d.ts +2 -0
  144. package/dist/loop/index.d.ts.map +1 -0
  145. package/dist/loop/index.js +2842 -0
  146. package/dist/loop/index.js.map +1 -0
  147. package/dist/loop/loop.d.ts +5 -0
  148. package/dist/loop/loop.d.ts.map +1 -0
  149. package/dist/loop/telemetry/index.d.ts +36 -0
  150. package/dist/loop/telemetry/index.d.ts.map +1 -0
  151. package/dist/loop/telemetry/noop.d.ts +3 -0
  152. package/dist/loop/telemetry/noop.d.ts.map +1 -0
  153. package/dist/loop/test-utils/fullStream.d.ts +6 -0
  154. package/dist/loop/test-utils/fullStream.d.ts.map +1 -0
  155. package/dist/loop/test-utils/generateText.d.ts +6 -0
  156. package/dist/loop/test-utils/generateText.d.ts.map +1 -0
  157. package/dist/loop/test-utils/mockTracer.d.ts +47 -0
  158. package/dist/loop/test-utils/mockTracer.d.ts.map +1 -0
  159. package/dist/loop/test-utils/options.d.ts +6 -0
  160. package/dist/loop/test-utils/options.d.ts.map +1 -0
  161. package/dist/loop/test-utils/resultObject.d.ts +6 -0
  162. package/dist/loop/test-utils/resultObject.d.ts.map +1 -0
  163. package/dist/loop/test-utils/streamObject.d.ts +6 -0
  164. package/dist/loop/test-utils/streamObject.d.ts.map +1 -0
  165. package/dist/loop/test-utils/telemetry.d.ts +6 -0
  166. package/dist/loop/test-utils/telemetry.d.ts.map +1 -0
  167. package/dist/loop/test-utils/textStream.d.ts +6 -0
  168. package/dist/loop/test-utils/textStream.d.ts.map +1 -0
  169. package/dist/loop/test-utils/toUIMessageStream.d.ts +6 -0
  170. package/dist/loop/test-utils/toUIMessageStream.d.ts.map +1 -0
  171. package/dist/loop/test-utils/tools.d.ts +6 -0
  172. package/dist/loop/test-utils/tools.d.ts.map +1 -0
  173. package/dist/loop/test-utils/utils.d.ts +40 -0
  174. package/dist/loop/test-utils/utils.d.ts.map +1 -0
  175. package/dist/loop/types.d.ts +68 -0
  176. package/dist/loop/types.d.ts.map +1 -0
  177. package/dist/loop/workflow/llm-execution.d.ts +80 -0
  178. package/dist/loop/workflow/llm-execution.d.ts.map +1 -0
  179. package/dist/loop/workflow/outer-llm-step.d.ts +43 -0
  180. package/dist/loop/workflow/outer-llm-step.d.ts.map +1 -0
  181. package/dist/loop/workflow/run-state.d.ts +24 -0
  182. package/dist/loop/workflow/run-state.d.ts.map +1 -0
  183. package/dist/loop/workflow/schema.d.ts +80 -0
  184. package/dist/loop/workflow/schema.d.ts.map +1 -0
  185. package/dist/loop/workflow/stream.d.ts +6 -0
  186. package/dist/loop/workflow/stream.d.ts.map +1 -0
  187. package/dist/loop/workflow/tool-call-step.d.ts +41 -0
  188. package/dist/loop/workflow/tool-call-step.d.ts.map +1 -0
  189. package/dist/mastra/index.cjs +2 -2
  190. package/dist/mastra/index.d.ts +4 -5
  191. package/dist/mastra/index.d.ts.map +1 -1
  192. package/dist/mastra/index.js +1 -1
  193. package/dist/mcp/index.cjs +4 -4
  194. package/dist/mcp/index.js +2 -2
  195. package/dist/memory/index.cjs +4 -4
  196. package/dist/memory/index.js +1 -1
  197. package/dist/network/index.cjs +6 -6
  198. package/dist/network/index.cjs.map +1 -1
  199. package/dist/network/index.js +3 -3
  200. package/dist/network/index.js.map +1 -1
  201. package/dist/network/network.d.ts +8 -7
  202. package/dist/network/network.d.ts.map +1 -1
  203. package/dist/network/vNext/index.cjs +21 -21
  204. package/dist/network/vNext/index.cjs.map +1 -1
  205. package/dist/network/vNext/index.d.ts +7 -7
  206. package/dist/network/vNext/index.d.ts.map +1 -1
  207. package/dist/network/vNext/index.js +7 -7
  208. package/dist/network/vNext/index.js.map +1 -1
  209. package/dist/processors/index.cjs +512 -0
  210. package/dist/processors/index.cjs.map +1 -0
  211. package/dist/processors/index.d.ts +38 -0
  212. package/dist/processors/index.d.ts.map +1 -0
  213. package/dist/processors/index.js +481 -0
  214. package/dist/processors/index.js.map +1 -0
  215. package/dist/processors/processors/batch-parts.d.ts +42 -0
  216. package/dist/processors/processors/batch-parts.d.ts.map +1 -0
  217. package/dist/processors/processors/index.d.ts +10 -0
  218. package/dist/processors/processors/index.d.ts.map +1 -0
  219. package/dist/processors/processors/language-detector.d.ts +151 -0
  220. package/dist/processors/processors/language-detector.d.ts.map +1 -0
  221. package/dist/processors/processors/moderation.d.ts +130 -0
  222. package/dist/processors/processors/moderation.d.ts.map +1 -0
  223. package/dist/processors/processors/pii-detector.d.ts +197 -0
  224. package/dist/processors/processors/pii-detector.d.ts.map +1 -0
  225. package/dist/processors/processors/prompt-injection-detector.d.ts +108 -0
  226. package/dist/processors/processors/prompt-injection-detector.d.ts.map +1 -0
  227. package/dist/processors/processors/structured-output.d.ts +42 -0
  228. package/dist/processors/processors/structured-output.d.ts.map +1 -0
  229. package/dist/processors/processors/system-prompt-scrubber.d.ts +92 -0
  230. package/dist/processors/processors/system-prompt-scrubber.d.ts.map +1 -0
  231. package/dist/processors/processors/token-limiter.d.ts +66 -0
  232. package/dist/processors/processors/token-limiter.d.ts.map +1 -0
  233. package/dist/processors/processors/unicode-normalizer.d.ts +34 -0
  234. package/dist/processors/processors/unicode-normalizer.d.ts.map +1 -0
  235. package/dist/processors/runner.d.ts +41 -0
  236. package/dist/processors/runner.d.ts.map +1 -0
  237. package/dist/relevance/index.cjs +4 -4
  238. package/dist/relevance/index.js +1 -1
  239. package/dist/scores/index.cjs +5 -5
  240. package/dist/scores/index.js +2 -2
  241. package/dist/server/index.cjs +2 -2
  242. package/dist/server/index.js +1 -1
  243. package/dist/storage/domains/operations/base.d.ts.map +1 -1
  244. package/dist/storage/index.cjs +15 -17
  245. package/dist/storage/index.cjs.map +1 -1
  246. package/dist/storage/index.js +4 -6
  247. package/dist/storage/index.js.map +1 -1
  248. package/dist/stream/aisdk/v4/transform.d.ts +26 -0
  249. package/dist/stream/aisdk/v4/transform.d.ts.map +1 -1
  250. package/dist/stream/aisdk/v5/compat.d.ts +159 -0
  251. package/dist/stream/aisdk/v5/compat.d.ts.map +1 -0
  252. package/dist/stream/aisdk/v5/execute.d.ts +29 -0
  253. package/dist/stream/aisdk/v5/execute.d.ts.map +1 -0
  254. package/dist/stream/aisdk/v5/file.d.ts +38 -0
  255. package/dist/stream/aisdk/v5/file.d.ts.map +1 -0
  256. package/dist/stream/aisdk/v5/input.d.ts +15 -0
  257. package/dist/stream/aisdk/v5/input.d.ts.map +1 -0
  258. package/dist/stream/aisdk/v5/object/schema.d.ts +4 -0
  259. package/dist/stream/aisdk/v5/object/schema.d.ts.map +1 -0
  260. package/dist/stream/aisdk/v5/object/stream-object.d.ts +34 -0
  261. package/dist/stream/aisdk/v5/object/stream-object.d.ts.map +1 -0
  262. package/dist/stream/aisdk/v5/output-helpers.d.ts +76 -0
  263. package/dist/stream/aisdk/v5/output-helpers.d.ts.map +1 -0
  264. package/dist/stream/aisdk/v5/output.d.ts +119 -0
  265. package/dist/stream/aisdk/v5/output.d.ts.map +1 -0
  266. package/dist/stream/aisdk/v5/test-utils.d.ts +19 -0
  267. package/dist/stream/aisdk/v5/test-utils.d.ts.map +1 -0
  268. package/dist/stream/aisdk/v5/transform.d.ts +31 -0
  269. package/dist/stream/aisdk/v5/transform.d.ts.map +1 -0
  270. package/dist/stream/base/input.d.ts +1 -1
  271. package/dist/stream/base/output.d.ts +89 -0
  272. package/dist/stream/base/output.d.ts.map +1 -0
  273. package/dist/stream/types.d.ts +30 -0
  274. package/dist/stream/types.d.ts.map +1 -1
  275. package/dist/test-utils/llm-mock.cjs +2 -2
  276. package/dist/test-utils/llm-mock.cjs.map +1 -1
  277. package/dist/test-utils/llm-mock.d.ts +2 -2
  278. package/dist/test-utils/llm-mock.d.ts.map +1 -1
  279. package/dist/test-utils/llm-mock.js +2 -2
  280. package/dist/test-utils/llm-mock.js.map +1 -1
  281. package/dist/tools/tool-builder/builder.d.ts +3 -2
  282. package/dist/tools/tool-builder/builder.d.ts.map +1 -1
  283. package/dist/tools/types.d.ts +9 -0
  284. package/dist/tools/types.d.ts.map +1 -1
  285. package/dist/tts/index.cjs +2 -2
  286. package/dist/tts/index.js +1 -1
  287. package/dist/types.d.ts +2 -0
  288. package/dist/types.d.ts.map +1 -1
  289. package/dist/utils.cjs +19 -15
  290. package/dist/utils.d.ts +3 -2
  291. package/dist/utils.d.ts.map +1 -1
  292. package/dist/utils.js +1 -1
  293. package/dist/vector/index.cjs +2 -2
  294. package/dist/vector/index.js +1 -1
  295. package/dist/voice/index.cjs +4 -4
  296. package/dist/voice/index.js +1 -1
  297. package/dist/workflows/default.d.ts.map +1 -1
  298. package/dist/workflows/index.cjs +10 -10
  299. package/dist/workflows/index.js +1 -1
  300. package/dist/workflows/legacy/index.cjs +22 -22
  301. package/dist/workflows/legacy/index.js +1 -1
  302. package/loop.d.ts +1 -0
  303. package/package.json +16 -1
  304. package/processors.d.ts +1 -0
  305. package/dist/agent/input-processor/processors/index.cjs +0 -28
  306. package/dist/agent/input-processor/processors/index.js +0 -3
  307. package/dist/agent/input-processor/runner.d.ts +0 -4
  308. package/dist/agent/input-processor/runner.d.ts.map +0 -1
  309. package/dist/chunk-6AR2Z5ZG.js.map +0 -1
  310. package/dist/chunk-7E2SNI5D.cjs.map +0 -1
  311. package/dist/chunk-DCOKWJ5G.cjs.map +0 -1
  312. package/dist/chunk-IDDUQR6P.cjs.map +0 -1
  313. package/dist/chunk-IL5SHDY4.js.map +0 -1
  314. package/dist/chunk-IP5NGA2S.cjs +0 -14
  315. package/dist/chunk-L6YBPFYF.js.map +0 -1
  316. package/dist/chunk-MH64VYGF.cjs.map +0 -1
  317. package/dist/chunk-MTRRRTB4.js.map +0 -1
  318. package/dist/chunk-N2KMAW6T.js.map +0 -1
  319. package/dist/chunk-N3VGOJZV.cjs.map +0 -1
  320. package/dist/chunk-R4HT5XUH.cjs.map +0 -1
  321. package/dist/chunk-TOODGJKM.js.map +0 -1
  322. package/dist/chunk-TZOR5M7H.js.map +0 -1
  323. package/dist/chunk-Z74LG5VH.cjs.map +0 -1
  324. package/dist/llm/model/base.d.ts +0 -26
  325. package/dist/llm/model/base.d.ts.map +0 -1
  326. /package/dist/agent/input-processor/{processors/index.cjs.map → index.cjs.map} +0 -0
  327. /package/dist/agent/input-processor/{processors/index.js.map → index.js.map} +0 -0
@@ -0,0 +1,2848 @@
1
+ 'use strict';
2
+
3
+ var chunkCSTWQQ3C_cjs = require('../chunk-CSTWQQ3C.cjs');
4
+ var chunkRJCNC57P_cjs = require('../chunk-RJCNC57P.cjs');
5
+ var chunkKXCUCBEI_cjs = require('../chunk-KXCUCBEI.cjs');
6
+ var chunkV5WKCX3G_cjs = require('../chunk-V5WKCX3G.cjs');
7
+ var aiV5 = require('ai-v5');
8
+ var web = require('stream/web');
9
+ var providerV5 = require('@ai-sdk/provider-v5');
10
+ var api = require('@opentelemetry/api');
11
+ var z = require('zod');
12
+ var providerUtilsV5 = require('@ai-sdk/provider-utils-v5');
13
+ var providerUtils = require('@ai-sdk/provider-utils');
14
+
15
+ function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
16
+
17
+ var z__default = /*#__PURE__*/_interopDefault(z);
18
+
19
+ function convertFullStreamChunkToUIMessageStream({
20
+ part,
21
+ messageMetadataValue,
22
+ sendReasoning,
23
+ sendSources,
24
+ onError,
25
+ sendStart,
26
+ sendFinish,
27
+ responseMessageId
28
+ }) {
29
+ const partType = part.type;
30
+ switch (partType) {
31
+ case "text-start": {
32
+ return {
33
+ type: "text-start",
34
+ id: part.id,
35
+ ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
36
+ };
37
+ }
38
+ case "text-delta": {
39
+ return {
40
+ type: "text-delta",
41
+ id: part.id,
42
+ delta: part.text,
43
+ ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
44
+ };
45
+ }
46
+ case "text-end": {
47
+ return {
48
+ type: "text-end",
49
+ id: part.id,
50
+ ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
51
+ };
52
+ }
53
+ case "reasoning-start": {
54
+ return {
55
+ type: "reasoning-start",
56
+ id: part.id,
57
+ ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
58
+ };
59
+ }
60
+ case "reasoning-delta": {
61
+ if (sendReasoning) {
62
+ return {
63
+ type: "reasoning-delta",
64
+ id: part.id,
65
+ delta: part.text,
66
+ ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
67
+ };
68
+ }
69
+ return;
70
+ }
71
+ case "reasoning-end": {
72
+ return {
73
+ type: "reasoning-end",
74
+ id: part.id,
75
+ ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
76
+ };
77
+ }
78
+ case "file": {
79
+ return {
80
+ type: "file",
81
+ mediaType: part.file.mediaType,
82
+ url: `data:${part.file.mediaType};base64,${part.file.base64}`
83
+ };
84
+ }
85
+ case "source": {
86
+ if (sendSources && part.sourceType === "url") {
87
+ return {
88
+ type: "source-url",
89
+ sourceId: part.id,
90
+ url: part.url,
91
+ title: part.title,
92
+ ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
93
+ };
94
+ }
95
+ if (sendSources && part.sourceType === "document") {
96
+ return {
97
+ type: "source-document",
98
+ sourceId: part.id,
99
+ mediaType: part.mediaType,
100
+ title: part.title,
101
+ filename: part.filename,
102
+ ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
103
+ };
104
+ }
105
+ return;
106
+ }
107
+ case "tool-input-start": {
108
+ return {
109
+ type: "tool-input-start",
110
+ toolCallId: part.id,
111
+ toolName: part.toolName,
112
+ ...part.providerExecuted != null ? { providerExecuted: part.providerExecuted } : {},
113
+ ...part.dynamic != null ? { dynamic: part.dynamic } : {}
114
+ };
115
+ }
116
+ case "tool-input-delta": {
117
+ return {
118
+ type: "tool-input-delta",
119
+ toolCallId: part.id,
120
+ inputTextDelta: part.delta
121
+ };
122
+ }
123
+ case "tool-call": {
124
+ return {
125
+ type: "tool-input-available",
126
+ toolCallId: part.toolCallId,
127
+ toolName: part.toolName,
128
+ input: part.input,
129
+ ...part.providerExecuted != null ? { providerExecuted: part.providerExecuted } : {},
130
+ ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {},
131
+ ...part.dynamic != null ? { dynamic: part.dynamic } : {}
132
+ };
133
+ }
134
+ case "tool-result": {
135
+ return {
136
+ type: "tool-output-available",
137
+ toolCallId: part.toolCallId,
138
+ output: part.output,
139
+ ...part.providerExecuted != null ? { providerExecuted: part.providerExecuted } : {},
140
+ ...part.dynamic != null ? { dynamic: part.dynamic } : {}
141
+ };
142
+ }
143
+ case "tool-error": {
144
+ return {
145
+ type: "tool-output-error",
146
+ toolCallId: part.toolCallId,
147
+ errorText: onError(part.error),
148
+ ...part.providerExecuted != null ? { providerExecuted: part.providerExecuted } : {},
149
+ ...part.dynamic != null ? { dynamic: part.dynamic } : {}
150
+ };
151
+ }
152
+ case "error": {
153
+ return {
154
+ type: "error",
155
+ errorText: onError(part.error)
156
+ };
157
+ }
158
+ case "start-step": {
159
+ return { type: "start-step" };
160
+ }
161
+ case "finish-step": {
162
+ return { type: "finish-step" };
163
+ }
164
+ case "start": {
165
+ if (sendStart) {
166
+ return {
167
+ type: "start",
168
+ ...messageMetadataValue != null ? { messageMetadata: messageMetadataValue } : {},
169
+ ...responseMessageId != null ? { messageId: responseMessageId } : {}
170
+ };
171
+ }
172
+ return;
173
+ }
174
+ case "finish": {
175
+ if (sendFinish) {
176
+ return {
177
+ type: "finish",
178
+ ...messageMetadataValue != null ? { messageMetadata: messageMetadataValue } : {}
179
+ };
180
+ }
181
+ return;
182
+ }
183
+ case "abort": {
184
+ return part;
185
+ }
186
+ case "tool-input-end": {
187
+ return;
188
+ }
189
+ case "raw": {
190
+ return;
191
+ }
192
+ default: {
193
+ const exhaustiveCheck = partType;
194
+ throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);
195
+ }
196
+ }
197
+ }
198
+ function getResponseUIMessageId({
199
+ originalMessages,
200
+ responseMessageId
201
+ }) {
202
+ if (originalMessages == null) {
203
+ return void 0;
204
+ }
205
+ const lastMessage = originalMessages[originalMessages.length - 1];
206
+ return lastMessage?.role === "assistant" ? lastMessage.id : typeof responseMessageId === "function" ? responseMessageId() : responseMessageId;
207
+ }
208
+ function prepareToolsAndToolChoice({
209
+ tools,
210
+ toolChoice,
211
+ activeTools
212
+ }) {
213
+ if (Object.keys(tools || {}).length === 0) {
214
+ return {
215
+ tools: void 0,
216
+ toolChoice: void 0
217
+ };
218
+ }
219
+ const filteredTools = activeTools != null ? Object.entries(tools || {}).filter(([name]) => activeTools.includes(name)) : Object.entries(tools || {});
220
+ return {
221
+ tools: filteredTools.map(([name, tool]) => {
222
+ try {
223
+ let inputSchema;
224
+ if ("inputSchema" in tool) {
225
+ inputSchema = tool.inputSchema;
226
+ } else if ("parameters" in tool) {
227
+ inputSchema = tool.parameters;
228
+ }
229
+ const sdkTool = aiV5.tool({
230
+ type: "function",
231
+ ...tool,
232
+ inputSchema
233
+ });
234
+ const toolType = sdkTool?.type ?? "function";
235
+ switch (toolType) {
236
+ case void 0:
237
+ case "dynamic":
238
+ case "function":
239
+ return {
240
+ type: "function",
241
+ name,
242
+ description: sdkTool.description,
243
+ inputSchema: aiV5.asSchema(sdkTool.inputSchema).jsonSchema,
244
+ providerOptions: sdkTool.providerOptions
245
+ };
246
+ case "provider-defined":
247
+ return {
248
+ type: "provider-defined",
249
+ name,
250
+ // TODO: as any seems wrong here. are there cases where we don't have an id?
251
+ id: sdkTool.id,
252
+ args: sdkTool.args
253
+ };
254
+ default: {
255
+ const exhaustiveCheck = toolType;
256
+ throw new Error(`Unsupported tool type: ${exhaustiveCheck}`);
257
+ }
258
+ }
259
+ } catch (e) {
260
+ console.error("Error preparing tool", e);
261
+ return null;
262
+ }
263
+ }).filter((tool) => tool !== null),
264
+ toolChoice: toolChoice == null ? { type: "auto" } : typeof toolChoice === "string" ? { type: toolChoice } : { type: "tool", toolName: toolChoice.toolName }
265
+ };
266
+ }
267
+ var DelayedPromise = class {
268
+ status = {
269
+ type: "pending"
270
+ };
271
+ _promise;
272
+ _resolve = void 0;
273
+ _reject = void 0;
274
+ get promise() {
275
+ if (this._promise) {
276
+ return this._promise;
277
+ }
278
+ this._promise = new Promise((resolve, reject) => {
279
+ if (this.status.type === "resolved") {
280
+ resolve(this.status.value);
281
+ } else if (this.status.type === "rejected") {
282
+ reject(this.status.error);
283
+ }
284
+ this._resolve = resolve;
285
+ this._reject = reject;
286
+ });
287
+ return this._promise;
288
+ }
289
+ resolve(value) {
290
+ this.status = { type: "resolved", value };
291
+ if (this._promise) {
292
+ this._resolve?.(value);
293
+ }
294
+ }
295
+ reject(error) {
296
+ this.status = { type: "rejected", error };
297
+ if (this._promise) {
298
+ this._reject?.(error);
299
+ }
300
+ }
301
+ };
302
+ function getOutputSchema({ schema, output }) {
303
+ if (output === "no-schema") {
304
+ return void 0;
305
+ }
306
+ const jsonSchema = schema ? aiV5.asSchema(schema).jsonSchema : void 0;
307
+ if (!jsonSchema) {
308
+ return void 0;
309
+ }
310
+ if (output === "array") {
311
+ const { $schema, ...itemSchema } = jsonSchema;
312
+ const arrayOutputSchema = {
313
+ $schema,
314
+ type: "object",
315
+ properties: {
316
+ elements: { type: "array", items: itemSchema }
317
+ },
318
+ required: ["elements"],
319
+ additionalProperties: false
320
+ };
321
+ return arrayOutputSchema;
322
+ }
323
+ return jsonSchema;
324
+ }
325
+ function getResponseFormat(options) {
326
+ if (!options?.output && options?.schema || options?.output === "object" || options?.output === "array" || options?.output === "no-schema") {
327
+ return {
328
+ type: "json",
329
+ schema: getOutputSchema({ schema: options?.schema, output: options?.output }),
330
+ name: options?.schemaName,
331
+ description: options?.schemaDescription
332
+ };
333
+ }
334
+ return {
335
+ type: "text"
336
+ };
337
+ }
338
+ function createObjectStreamTransformer({
339
+ objectOptions,
340
+ onFinish,
341
+ onError
342
+ }) {
343
+ let textAccumulatedText = "";
344
+ let textPreviousObject = void 0;
345
+ let textPreviousFilteredArray;
346
+ const responseFormat = getResponseFormat(objectOptions);
347
+ return new web.TransformStream({
348
+ async transform(chunk, controller) {
349
+ if (!objectOptions) {
350
+ controller.enqueue(chunk);
351
+ return;
352
+ }
353
+ if (responseFormat.type === "json" && chunk.type === "text-delta" && typeof chunk.payload.text === "string") {
354
+ if (objectOptions?.output === "array") {
355
+ textAccumulatedText += chunk.payload.text;
356
+ const { value: currentObjectJson, state: parseState } = await aiV5.parsePartialJson(textAccumulatedText);
357
+ if (currentObjectJson !== void 0 && !aiV5.isDeepEqualData(textPreviousObject, currentObjectJson)) {
358
+ const rawElements = currentObjectJson?.elements || [];
359
+ const filteredElements = [];
360
+ for (let i = 0; i < rawElements.length; i++) {
361
+ const element = rawElements[i];
362
+ if (i === rawElements.length - 1 && parseState !== "successful-parse") {
363
+ if (element && typeof element === "object" && Object.keys(element).length > 0) {
364
+ filteredElements.push(element);
365
+ }
366
+ } else {
367
+ if (element && typeof element === "object" && Object.keys(element).length > 0) {
368
+ filteredElements.push(element);
369
+ }
370
+ }
371
+ }
372
+ if (!aiV5.isDeepEqualData(textPreviousFilteredArray, filteredElements)) {
373
+ textPreviousFilteredArray = [...filteredElements];
374
+ controller.enqueue({
375
+ type: "object",
376
+ object: filteredElements
377
+ });
378
+ }
379
+ textPreviousObject = currentObjectJson;
380
+ }
381
+ } else if (objectOptions?.output === "no-schema" || !objectOptions?.output) {
382
+ textAccumulatedText += chunk.payload.text;
383
+ const { value: currentObjectJson } = await aiV5.parsePartialJson(textAccumulatedText);
384
+ if (currentObjectJson !== void 0 && typeof currentObjectJson === "object" && !aiV5.isDeepEqualData(textPreviousObject, currentObjectJson)) {
385
+ textPreviousObject = currentObjectJson;
386
+ controller.enqueue({
387
+ type: "object",
388
+ object: currentObjectJson
389
+ });
390
+ }
391
+ }
392
+ }
393
+ controller.enqueue(chunk);
394
+ },
395
+ // TODO: validate against the provided schema,
396
+ // TODO: then call onFinish(data) if valid or call onError(err) if invalid
397
+ // TODO: so that the object promise can be resolved/rejected
398
+ flush() {
399
+ if (responseFormat.type === "json") {
400
+ if (objectOptions?.output === "array") {
401
+ onFinish(textPreviousFilteredArray);
402
+ } else {
403
+ onFinish(textPreviousObject);
404
+ }
405
+ if (!textPreviousObject && !textPreviousFilteredArray) {
406
+ onError("No object generated: could not parse the response.");
407
+ }
408
+ }
409
+ }
410
+ });
411
+ }
412
+ function createJsonTextStreamTransformer(objectOptions) {
413
+ let previousArrayLength = 0;
414
+ let hasStartedArray = false;
415
+ return new web.TransformStream({
416
+ transform(chunk, controller) {
417
+ if (chunk.type !== "object") {
418
+ return;
419
+ }
420
+ if (objectOptions?.output === "array") {
421
+ if (!hasStartedArray) {
422
+ controller.enqueue("[");
423
+ hasStartedArray = true;
424
+ }
425
+ for (let i = previousArrayLength; i < chunk.object.length; i++) {
426
+ const elementJson = JSON.stringify(chunk.object[i]);
427
+ if (i > 0) {
428
+ controller.enqueue("," + elementJson);
429
+ } else {
430
+ controller.enqueue(elementJson);
431
+ }
432
+ }
433
+ previousArrayLength = chunk.object.length;
434
+ } else {
435
+ controller.enqueue(JSON.stringify(chunk.object));
436
+ }
437
+ },
438
+ flush(controller) {
439
+ if (hasStartedArray && objectOptions?.output === "array") {
440
+ controller.enqueue("]");
441
+ }
442
+ }
443
+ });
444
+ }
445
+
446
+ // src/stream/aisdk/v5/output-helpers.ts
447
+ var DefaultStepResult = class {
448
+ content;
449
+ finishReason;
450
+ usage;
451
+ warnings;
452
+ request;
453
+ response;
454
+ providerMetadata;
455
+ constructor({
456
+ content,
457
+ finishReason,
458
+ usage,
459
+ warnings,
460
+ request,
461
+ response,
462
+ providerMetadata
463
+ }) {
464
+ this.content = content;
465
+ this.finishReason = finishReason;
466
+ this.usage = usage;
467
+ this.warnings = warnings;
468
+ this.request = request;
469
+ this.response = response;
470
+ this.providerMetadata = providerMetadata;
471
+ }
472
+ get text() {
473
+ return this.content.filter((part) => part.type === "text").map((part) => part.text).join("");
474
+ }
475
+ get reasoning() {
476
+ return this.content.filter((part) => part.type === "reasoning");
477
+ }
478
+ get reasoningText() {
479
+ return this.reasoning.length === 0 ? void 0 : this.reasoning.map((part) => part.text).join("");
480
+ }
481
+ get files() {
482
+ return this.content.filter((part) => part.type === "file").map((part) => part.file);
483
+ }
484
+ get sources() {
485
+ return this.content.filter((part) => part.type === "source");
486
+ }
487
+ get toolCalls() {
488
+ return this.content.filter((part) => part.type === "tool-call");
489
+ }
490
+ get staticToolCalls() {
491
+ return this.toolCalls.filter((toolCall) => toolCall.dynamic === false);
492
+ }
493
+ get dynamicToolCalls() {
494
+ return this.toolCalls.filter((toolCall) => toolCall.dynamic === true);
495
+ }
496
+ get toolResults() {
497
+ return this.content.filter((part) => part.type === "tool-result");
498
+ }
499
+ get staticToolResults() {
500
+ return this.toolResults.filter((toolResult) => toolResult.dynamic === false);
501
+ }
502
+ get dynamicToolResults() {
503
+ return this.toolResults.filter((toolResult) => toolResult.dynamic === true);
504
+ }
505
+ };
506
+ function reasoningDetailsFromMessages(messages) {
507
+ return messages.flatMap((msg) => {
508
+ if (msg.content?.parts && Array.isArray(msg.content.parts)) {
509
+ return msg.content.parts;
510
+ }
511
+ return [];
512
+ }).filter((part) => part.type === `reasoning`).flatMap((part) => {
513
+ return {
514
+ type: "reasoning",
515
+ text: part.reasoning,
516
+ details: part.details
517
+ };
518
+ });
519
+ }
520
+ function transformSteps({ steps }) {
521
+ return steps.map((step) => {
522
+ return new DefaultStepResult({
523
+ content: step.content,
524
+ warnings: step.warnings ?? [],
525
+ providerMetadata: step.providerMetadata,
526
+ finishReason: step.finishReason,
527
+ response: {
528
+ ...step.response
529
+ },
530
+ request: step.request,
531
+ usage: step.usage
532
+ });
533
+ });
534
+ }
535
+
536
+ // src/stream/aisdk/v5/transform.ts
537
+ function convertFullStreamChunkToMastra(value, ctx) {
538
+ switch (value.type) {
539
+ case "response-metadata":
540
+ return {
541
+ type: "response-metadata",
542
+ runId: ctx.runId,
543
+ from: "AGENT",
544
+ payload: value
545
+ };
546
+ case "text-start":
547
+ return {
548
+ type: "text-start",
549
+ runId: ctx.runId,
550
+ from: "AGENT",
551
+ payload: {
552
+ id: value.id,
553
+ providerMetadata: value.providerMetadata
554
+ }
555
+ };
556
+ case "text-delta":
557
+ if (value.delta) {
558
+ return {
559
+ type: "text-delta",
560
+ runId: ctx.runId,
561
+ from: "AGENT",
562
+ payload: {
563
+ id: value.id,
564
+ providerMetadata: value.providerMetadata,
565
+ text: value.delta
566
+ }
567
+ };
568
+ }
569
+ return;
570
+ case "text-end":
571
+ return {
572
+ type: "text-end",
573
+ runId: ctx.runId,
574
+ from: "AGENT",
575
+ payload: value
576
+ };
577
+ case "reasoning-start":
578
+ return {
579
+ type: "reasoning-start",
580
+ runId: ctx.runId,
581
+ from: "AGENT",
582
+ payload: {
583
+ id: value.id,
584
+ providerMetadata: value.providerMetadata
585
+ }
586
+ };
587
+ case "reasoning-delta":
588
+ return {
589
+ type: "reasoning-delta",
590
+ runId: ctx.runId,
591
+ from: "AGENT",
592
+ payload: {
593
+ id: value.id,
594
+ providerMetadata: value.providerMetadata,
595
+ text: value.delta
596
+ }
597
+ };
598
+ case "reasoning-end":
599
+ return {
600
+ type: "reasoning-end",
601
+ runId: ctx.runId,
602
+ from: "AGENT",
603
+ payload: {
604
+ id: value.id,
605
+ providerMetadata: value.providerMetadata
606
+ }
607
+ };
608
+ case "source":
609
+ return {
610
+ type: "source",
611
+ runId: ctx.runId,
612
+ from: "AGENT",
613
+ payload: {
614
+ id: value.id,
615
+ sourceType: value.sourceType,
616
+ title: value.title,
617
+ mimeType: value.sourceType === "document" ? value.mediaType : void 0,
618
+ filename: value.sourceType === "document" ? value.filename : void 0,
619
+ url: value.sourceType === "url" ? value.url : void 0,
620
+ providerMetadata: value.providerMetadata
621
+ }
622
+ };
623
+ case "file":
624
+ return {
625
+ type: "file",
626
+ runId: ctx.runId,
627
+ from: "AGENT",
628
+ payload: {
629
+ data: value.data,
630
+ base64: typeof value.data === "string" ? value.data : void 0,
631
+ mimeType: value.mediaType
632
+ }
633
+ };
634
+ case "tool-call":
635
+ return {
636
+ type: "tool-call",
637
+ runId: ctx.runId,
638
+ from: "AGENT",
639
+ payload: {
640
+ toolCallId: value.toolCallId,
641
+ toolName: value.toolName,
642
+ args: value.input ? JSON.parse(value.input) : void 0,
643
+ providerExecuted: value.providerExecuted,
644
+ providerMetadata: value.providerMetadata
645
+ }
646
+ };
647
+ case "tool-result":
648
+ return {
649
+ type: "tool-result",
650
+ runId: ctx.runId,
651
+ from: "AGENT",
652
+ payload: {
653
+ toolCallId: value.toolCallId,
654
+ toolName: value.toolName,
655
+ result: value.result,
656
+ isError: value.isError,
657
+ providerExecuted: value.providerExecuted,
658
+ providerMetadata: value.providerMetadata
659
+ }
660
+ };
661
+ case "tool-input-start":
662
+ return {
663
+ type: "tool-call-input-streaming-start",
664
+ runId: ctx.runId,
665
+ from: "AGENT",
666
+ payload: {
667
+ toolCallId: value.id,
668
+ toolName: value.toolName,
669
+ providerExecuted: value.providerExecuted,
670
+ providerMetadata: value.providerMetadata
671
+ }
672
+ };
673
+ case "tool-input-delta":
674
+ if (value.delta) {
675
+ return {
676
+ type: "tool-call-delta",
677
+ runId: ctx.runId,
678
+ from: "AGENT",
679
+ payload: {
680
+ argsTextDelta: value.delta,
681
+ toolCallId: value.id,
682
+ providerMetadata: value.providerMetadata
683
+ }
684
+ };
685
+ }
686
+ return;
687
+ case "tool-input-end":
688
+ return {
689
+ type: "tool-call-input-streaming-end",
690
+ runId: ctx.runId,
691
+ from: "AGENT",
692
+ payload: {
693
+ toolCallId: value.id,
694
+ providerMetadata: value.providerMetadata
695
+ }
696
+ };
697
+ case "finish":
698
+ const { finishReason, usage, providerMetadata, messages, ...rest } = value;
699
+ return {
700
+ type: "finish",
701
+ runId: ctx.runId,
702
+ from: "AGENT",
703
+ payload: {
704
+ stepResult: {
705
+ reason: value.finishReason
706
+ },
707
+ output: {
708
+ usage: {
709
+ ...value.usage ?? {},
710
+ totalTokens: value?.usage?.totalTokens ?? (value.usage?.inputTokens ?? 0) + (value.usage?.outputTokens ?? 0)
711
+ }
712
+ },
713
+ metadata: {
714
+ providerMetadata: value.providerMetadata
715
+ },
716
+ messages,
717
+ ...rest
718
+ }
719
+ };
720
+ case "error":
721
+ return {
722
+ type: "error",
723
+ runId: ctx.runId,
724
+ from: "AGENT",
725
+ payload: value
726
+ };
727
+ case "raw":
728
+ return {
729
+ type: "raw",
730
+ runId: ctx.runId,
731
+ from: "AGENT",
732
+ payload: value.rawValue
733
+ };
734
+ }
735
+ return;
736
+ }
737
+ function convertMastraChunkToAISDKv5({
738
+ chunk,
739
+ mode = "stream"
740
+ }) {
741
+ switch (chunk.type) {
742
+ case "start":
743
+ return {
744
+ type: "start"
745
+ };
746
+ case "step-start":
747
+ const { messageId: _messageId, ...rest } = chunk.payload;
748
+ return {
749
+ type: "start-step",
750
+ request: rest.request,
751
+ warnings: rest.warnings
752
+ };
753
+ case "raw":
754
+ return {
755
+ type: "raw",
756
+ rawValue: chunk.payload
757
+ };
758
+ case "finish": {
759
+ return {
760
+ type: "finish",
761
+ finishReason: chunk.payload.stepResult.reason,
762
+ totalUsage: chunk.payload.output.usage
763
+ };
764
+ }
765
+ case "reasoning-start":
766
+ return {
767
+ type: "reasoning-start",
768
+ id: chunk.payload.id,
769
+ providerMetadata: chunk.payload.providerMetadata
770
+ };
771
+ case "reasoning-delta":
772
+ return {
773
+ type: "reasoning-delta",
774
+ id: chunk.payload.id,
775
+ text: chunk.payload.text,
776
+ providerMetadata: chunk.payload.providerMetadata
777
+ };
778
+ case "reasoning-signature":
779
+ throw new Error('AISDKv5 chunk type "reasoning-signature" not supported');
780
+ // return {
781
+ // type: 'reasoning-signature' as const,
782
+ // id: chunk.payload.id,
783
+ // signature: chunk.payload.signature,
784
+ // };
785
+ case "redacted-reasoning":
786
+ throw new Error('AISDKv5 chunk type "redacted-reasoning" not supported');
787
+ // return {
788
+ // type: 'redacted-reasoning',
789
+ // id: chunk.payload.id,
790
+ // data: chunk.payload.data,
791
+ // };
792
+ case "reasoning-end":
793
+ return {
794
+ type: "reasoning-end",
795
+ id: chunk.payload.id,
796
+ providerMetadata: chunk.payload.providerMetadata
797
+ };
798
+ case "source":
799
+ return {
800
+ type: "source",
801
+ id: chunk.payload.id,
802
+ sourceType: chunk.payload.sourceType,
803
+ filename: chunk.payload.filename,
804
+ mediaType: chunk.payload.mimeType,
805
+ title: chunk.payload.title,
806
+ url: chunk.payload.url,
807
+ providerMetadata: chunk.payload.providerMetadata
808
+ };
809
+ case "file":
810
+ if (mode === "generate") {
811
+ return {
812
+ type: "file",
813
+ file: new chunkRJCNC57P_cjs.DefaultGeneratedFile({
814
+ data: chunk.payload.data,
815
+ mediaType: chunk.payload.mimeType
816
+ })
817
+ };
818
+ }
819
+ return {
820
+ type: "file",
821
+ file: new chunkRJCNC57P_cjs.DefaultGeneratedFileWithType({
822
+ data: chunk.payload.data,
823
+ mediaType: chunk.payload.mimeType
824
+ })
825
+ };
826
+ case "tool-call":
827
+ return {
828
+ type: "tool-call",
829
+ toolCallId: chunk.payload.toolCallId,
830
+ providerMetadata: chunk.payload.providerMetadata,
831
+ providerExecuted: chunk.payload.providerExecuted,
832
+ toolName: chunk.payload.toolName,
833
+ input: chunk.payload.args
834
+ };
835
+ case "tool-call-input-streaming-start":
836
+ return {
837
+ type: "tool-input-start",
838
+ id: chunk.payload.toolCallId,
839
+ toolName: chunk.payload.toolName,
840
+ dynamic: !!chunk.payload.dynamic,
841
+ providerMetadata: chunk.payload.providerMetadata,
842
+ providerExecuted: chunk.payload.providerExecuted
843
+ };
844
+ case "tool-call-input-streaming-end":
845
+ return {
846
+ type: "tool-input-end",
847
+ id: chunk.payload.toolCallId,
848
+ providerMetadata: chunk.payload.providerMetadata
849
+ };
850
+ case "tool-call-delta":
851
+ return {
852
+ type: "tool-input-delta",
853
+ id: chunk.payload.toolCallId,
854
+ delta: chunk.payload.argsTextDelta,
855
+ providerMetadata: chunk.payload.providerMetadata
856
+ };
857
+ case "step-finish": {
858
+ const { request: _request, providerMetadata, ...rest2 } = chunk.payload.metadata;
859
+ return {
860
+ type: "finish-step",
861
+ response: rest2,
862
+ usage: chunk.payload.output.usage,
863
+ // ?
864
+ finishReason: chunk.payload.stepResult.reason,
865
+ providerMetadata
866
+ };
867
+ }
868
+ case "text-delta":
869
+ return {
870
+ type: "text-delta",
871
+ id: chunk.payload.id,
872
+ text: chunk.payload.text,
873
+ providerMetadata: chunk.payload.providerMetadata
874
+ };
875
+ case "text-end":
876
+ return {
877
+ type: "text-end",
878
+ id: chunk.payload.id,
879
+ providerMetadata: chunk.payload.providerMetadata
880
+ };
881
+ case "text-start":
882
+ return {
883
+ type: "text-start",
884
+ id: chunk.payload.id,
885
+ providerMetadata: chunk.payload.providerMetadata
886
+ };
887
+ case "tool-result":
888
+ return {
889
+ type: "tool-result",
890
+ input: chunk.payload.args,
891
+ toolCallId: chunk.payload.toolCallId,
892
+ providerExecuted: chunk.payload.providerExecuted,
893
+ toolName: chunk.payload.toolName,
894
+ output: chunk.payload.result
895
+ // providerMetadata: chunk.payload.providerMetadata, // AI v5 types don't show this?
896
+ };
897
+ case "tool-error":
898
+ return {
899
+ type: "tool-error",
900
+ error: chunk.payload.error,
901
+ input: chunk.payload.args,
902
+ toolCallId: chunk.payload.toolCallId,
903
+ providerExecuted: chunk.payload.providerExecuted,
904
+ toolName: chunk.payload.toolName
905
+ // providerMetadata: chunk.payload.providerMetadata, // AI v5 types don't show this?
906
+ };
907
+ case "abort":
908
+ return {
909
+ type: "abort"
910
+ };
911
+ case "error":
912
+ return {
913
+ type: "error",
914
+ error: chunk.payload.error
915
+ };
916
+ }
917
+ }
918
+
919
+ // src/stream/aisdk/v5/output.ts
920
+ var AISDKV5OutputStream = class {
921
+ #modelOutput;
922
+ #options;
923
+ #messageList;
924
+ constructor({
925
+ modelOutput,
926
+ options,
927
+ messageList
928
+ }) {
929
+ this.#modelOutput = modelOutput;
930
+ this.#options = options;
931
+ this.#messageList = messageList;
932
+ }
933
+ toTextStreamResponse(init) {
934
+ return aiV5.createTextStreamResponse({
935
+ textStream: this.#modelOutput.textStream,
936
+ ...init
937
+ });
938
+ }
939
+ toUIMessageStreamResponse({
940
+ // @ts-ignore
941
+ generateMessageId,
942
+ originalMessages,
943
+ sendFinish,
944
+ sendReasoning,
945
+ sendSources,
946
+ onError,
947
+ sendStart,
948
+ messageMetadata,
949
+ onFinish,
950
+ ...init
951
+ } = {}) {
952
+ return aiV5.createUIMessageStreamResponse({
953
+ stream: this.toUIMessageStream({
954
+ // @ts-ignore
955
+ generateMessageId,
956
+ originalMessages,
957
+ sendFinish,
958
+ sendReasoning,
959
+ sendSources,
960
+ onError,
961
+ sendStart,
962
+ messageMetadata,
963
+ onFinish
964
+ }),
965
+ ...init
966
+ });
967
+ }
968
+ toUIMessageStream({
969
+ // @ts-ignore
970
+ generateMessageId,
971
+ originalMessages,
972
+ sendFinish = true,
973
+ sendReasoning = true,
974
+ sendSources = false,
975
+ onError = providerV5.getErrorMessage,
976
+ sendStart = true,
977
+ messageMetadata,
978
+ onFinish
979
+ } = {}) {
980
+ const responseMessageId = generateMessageId != null ? getResponseUIMessageId({
981
+ originalMessages,
982
+ responseMessageId: generateMessageId
983
+ }) : void 0;
984
+ return aiV5.createUIMessageStream({
985
+ onError,
986
+ onFinish,
987
+ generateId: () => responseMessageId ?? generateMessageId?.(),
988
+ execute: async ({ writer }) => {
989
+ for await (const part of this.fullStream) {
990
+ const messageMetadataValue = messageMetadata?.({ part });
991
+ const partType = part.type;
992
+ const transformedChunk = convertFullStreamChunkToUIMessageStream({
993
+ part,
994
+ sendReasoning,
995
+ messageMetadataValue,
996
+ sendSources,
997
+ sendStart,
998
+ sendFinish,
999
+ responseMessageId,
1000
+ onError
1001
+ });
1002
+ if (transformedChunk) {
1003
+ writer.write(transformedChunk);
1004
+ }
1005
+ if (messageMetadataValue != null && partType !== "start" && partType !== "finish") {
1006
+ writer.write({
1007
+ type: "message-metadata",
1008
+ messageMetadata: messageMetadataValue
1009
+ });
1010
+ }
1011
+ }
1012
+ }
1013
+ });
1014
+ }
1015
+ async consumeStream(options) {
1016
+ try {
1017
+ await aiV5.consumeStream({
1018
+ stream: this.fullStream.pipeThrough(
1019
+ new web.TransformStream({
1020
+ transform(chunk, controller) {
1021
+ controller.enqueue(chunk);
1022
+ }
1023
+ })
1024
+ ),
1025
+ onError: options?.onError
1026
+ });
1027
+ } catch (error) {
1028
+ console.log("consumeStream error", error);
1029
+ options?.onError?.(error);
1030
+ }
1031
+ }
1032
+ get sources() {
1033
+ return this.#modelOutput.sources.map((source) => {
1034
+ return convertMastraChunkToAISDKv5({
1035
+ chunk: source
1036
+ });
1037
+ });
1038
+ }
1039
+ get files() {
1040
+ return this.#modelOutput.files.map((file) => {
1041
+ if (file.type === "file") {
1042
+ return convertMastraChunkToAISDKv5({
1043
+ chunk: file
1044
+ })?.file;
1045
+ }
1046
+ return;
1047
+ }).filter(Boolean);
1048
+ }
1049
+ get generateTextFiles() {
1050
+ return this.#modelOutput.files.map((file) => {
1051
+ if (file.type === "file") {
1052
+ return convertMastraChunkToAISDKv5({
1053
+ chunk: file,
1054
+ mode: "generate"
1055
+ })?.file;
1056
+ }
1057
+ return;
1058
+ }).filter(Boolean);
1059
+ }
1060
+ get toolCalls() {
1061
+ return this.#modelOutput.toolCalls.map((toolCall) => {
1062
+ return convertMastraChunkToAISDKv5({
1063
+ chunk: toolCall
1064
+ });
1065
+ });
1066
+ }
1067
+ get toolResults() {
1068
+ return this.#modelOutput.toolResults.map((toolResult) => {
1069
+ return convertMastraChunkToAISDKv5({
1070
+ chunk: toolResult
1071
+ });
1072
+ });
1073
+ }
1074
+ get reasoningText() {
1075
+ return this.#modelOutput.reasoningText;
1076
+ }
1077
+ get reasoning() {
1078
+ return this.#modelOutput.reasoningDetails;
1079
+ }
1080
+ get response() {
1081
+ return {
1082
+ ...this.#modelOutput.response
1083
+ };
1084
+ }
1085
+ get steps() {
1086
+ return transformSteps({ steps: this.#modelOutput.steps });
1087
+ }
1088
+ get generateTextSteps() {
1089
+ return transformSteps({ steps: this.#modelOutput.steps });
1090
+ }
1091
+ get content() {
1092
+ return this.#messageList.get.response.aiV5.modelContent();
1093
+ }
1094
+ get fullStream() {
1095
+ let startEvent;
1096
+ let hasStarted = false;
1097
+ const fullStream = this.#modelOutput.fullStream;
1098
+ return fullStream.pipeThrough(
1099
+ new web.TransformStream({
1100
+ transform(chunk, controller) {
1101
+ if (chunk.type === "step-start" && !startEvent) {
1102
+ startEvent = convertMastraChunkToAISDKv5({
1103
+ chunk
1104
+ });
1105
+ return;
1106
+ } else if (chunk.type !== "error") {
1107
+ hasStarted = true;
1108
+ }
1109
+ if (startEvent && hasStarted) {
1110
+ controller.enqueue(startEvent);
1111
+ startEvent = void 0;
1112
+ }
1113
+ const transformedChunk = convertMastraChunkToAISDKv5({
1114
+ chunk
1115
+ });
1116
+ if (transformedChunk) {
1117
+ controller.enqueue(transformedChunk);
1118
+ }
1119
+ }
1120
+ })
1121
+ );
1122
+ }
1123
+ async getFullOutput() {
1124
+ await this.consumeStream();
1125
+ let object;
1126
+ if (this.#options.objectOptions) {
1127
+ object = await this.object;
1128
+ }
1129
+ return {
1130
+ text: this.#modelOutput.text,
1131
+ usage: this.#modelOutput.usage,
1132
+ steps: this.generateTextSteps,
1133
+ finishReason: this.#modelOutput.finishReason,
1134
+ warnings: this.#modelOutput.warnings,
1135
+ providerMetadata: this.#modelOutput.providerMetadata,
1136
+ request: this.#modelOutput.request,
1137
+ reasoning: this.reasoning,
1138
+ reasoningText: this.reasoningText,
1139
+ toolCalls: this.toolCalls,
1140
+ toolResults: this.toolResults,
1141
+ sources: this.sources,
1142
+ files: this.generateTextFiles,
1143
+ response: this.response,
1144
+ content: this.content,
1145
+ totalUsage: this.#modelOutput.totalUsage,
1146
+ ...object ? { object } : {}
1147
+ // experimental_output: // TODO
1148
+ };
1149
+ }
1150
+ get object() {
1151
+ return this.#modelOutput.object;
1152
+ }
1153
+ };
1154
+
1155
+ // src/stream/base/output.ts
1156
+ var MastraModelOutput = class extends chunkKXCUCBEI_cjs.MastraBase {
1157
+ #aisdkv5;
1158
+ #baseStream;
1159
+ #bufferedSteps = [];
1160
+ #bufferedReasoningDetails = {};
1161
+ #bufferedByStep = {
1162
+ text: "",
1163
+ reasoning: "",
1164
+ sources: [],
1165
+ files: [],
1166
+ toolCalls: [],
1167
+ toolResults: [],
1168
+ msgCount: 0
1169
+ };
1170
+ #bufferedText = [];
1171
+ #bufferedTextChunks = {};
1172
+ #bufferedSources = [];
1173
+ #bufferedReasoning = [];
1174
+ #bufferedFiles = [];
1175
+ #toolCallArgsDeltas = {};
1176
+ #toolCallDeltaIdNameMap = {};
1177
+ #toolCalls = [];
1178
+ #toolResults = [];
1179
+ #warnings = [];
1180
+ #finishReason;
1181
+ #providerMetadata;
1182
+ #response;
1183
+ #request;
1184
+ #usageCount = {};
1185
+ #objectPromise = new DelayedPromise();
1186
+ runId;
1187
+ #options;
1188
+ constructor({
1189
+ stream,
1190
+ options,
1191
+ model,
1192
+ messageList
1193
+ }) {
1194
+ super({ component: "LLM", name: "MastraModelOutput" });
1195
+ this.#options = options;
1196
+ this.runId = options.runId;
1197
+ const self = this;
1198
+ this.#baseStream = stream.pipeThrough(
1199
+ new web.TransformStream({
1200
+ transform: async (chunk, controller) => {
1201
+ switch (chunk.type) {
1202
+ case "source":
1203
+ self.#bufferedSources.push(chunk);
1204
+ self.#bufferedByStep.sources.push(chunk);
1205
+ break;
1206
+ case "text-delta":
1207
+ self.#bufferedText.push(chunk.payload.text);
1208
+ self.#bufferedByStep.text += chunk.payload.text;
1209
+ if (chunk.payload.id) {
1210
+ const ary = self.#bufferedTextChunks[chunk.payload.id] ?? [];
1211
+ ary.push(chunk.payload.text);
1212
+ self.#bufferedTextChunks[chunk.payload.id] = ary;
1213
+ }
1214
+ break;
1215
+ case "tool-call-input-streaming-start":
1216
+ self.#toolCallDeltaIdNameMap[chunk.payload.toolCallId] = chunk.payload.toolName;
1217
+ break;
1218
+ case "tool-call-delta":
1219
+ if (!self.#toolCallArgsDeltas[chunk.payload.toolCallId]) {
1220
+ self.#toolCallArgsDeltas[chunk.payload.toolCallId] = [];
1221
+ }
1222
+ self.#toolCallArgsDeltas?.[chunk.payload.toolCallId]?.push(chunk.payload.argsTextDelta);
1223
+ chunk.payload.toolName ||= self.#toolCallDeltaIdNameMap[chunk.payload.toolCallId];
1224
+ break;
1225
+ case "file":
1226
+ self.#bufferedFiles.push(chunk);
1227
+ self.#bufferedByStep.files.push(chunk);
1228
+ break;
1229
+ case "reasoning-start":
1230
+ self.#bufferedReasoningDetails[chunk.payload.id] = {
1231
+ type: "reasoning",
1232
+ text: "",
1233
+ providerMetadata: chunk.payload.providerMetadata
1234
+ };
1235
+ break;
1236
+ case "reasoning-delta": {
1237
+ self.#bufferedReasoning.push(chunk.payload.text);
1238
+ self.#bufferedByStep.reasoning += chunk.payload.text;
1239
+ const bufferedReasoning = self.#bufferedReasoningDetails[chunk.payload.id];
1240
+ if (bufferedReasoning) {
1241
+ bufferedReasoning.text += chunk.payload.text;
1242
+ if (chunk.payload.providerMetadata) {
1243
+ bufferedReasoning.providerMetadata = chunk.payload.providerMetadata;
1244
+ }
1245
+ }
1246
+ break;
1247
+ }
1248
+ case "reasoning-end": {
1249
+ const bufferedReasoning = self.#bufferedReasoningDetails[chunk.payload.id];
1250
+ if (chunk.payload.providerMetadata && bufferedReasoning) {
1251
+ bufferedReasoning.providerMetadata = chunk.payload.providerMetadata;
1252
+ }
1253
+ break;
1254
+ }
1255
+ case "tool-call":
1256
+ self.#toolCalls.push(chunk);
1257
+ self.#bufferedByStep.toolCalls.push(chunk);
1258
+ if (chunk.payload?.output?.from === "AGENT" && chunk.payload?.output?.type === "finish") {
1259
+ const finishPayload = chunk.payload?.output.payload;
1260
+ self.updateUsageCount(finishPayload.usage);
1261
+ }
1262
+ break;
1263
+ case "tool-result":
1264
+ self.#toolResults.push(chunk);
1265
+ self.#bufferedByStep.toolResults.push(chunk);
1266
+ break;
1267
+ case "step-finish": {
1268
+ self.updateUsageCount(chunk.payload.output.usage);
1269
+ self.#warnings = chunk.payload.stepResult.warnings;
1270
+ if (chunk.payload.metadata.request) {
1271
+ self.#request = chunk.payload.metadata.request;
1272
+ }
1273
+ const reasoningDetails = reasoningDetailsFromMessages(
1274
+ chunk.payload.messages.all.slice(self.#bufferedByStep.msgCount)
1275
+ );
1276
+ const { providerMetadata, request, ...otherMetadata } = chunk.payload.metadata;
1277
+ const stepResult = {
1278
+ stepType: self.#bufferedSteps.length === 0 ? "initial" : "tool-result",
1279
+ text: self.#bufferedByStep.text,
1280
+ reasoning: self.#bufferedByStep.reasoning || void 0,
1281
+ sources: self.#bufferedByStep.sources,
1282
+ files: self.#bufferedByStep.files,
1283
+ toolCalls: self.#bufferedByStep.toolCalls,
1284
+ toolResults: self.#bufferedByStep.toolResults,
1285
+ warnings: self.warnings,
1286
+ reasoningDetails,
1287
+ providerMetadata,
1288
+ experimental_providerMetadata: providerMetadata,
1289
+ isContinued: chunk.payload.stepResult.isContinued,
1290
+ logprobs: chunk.payload.stepResult.logprobs,
1291
+ finishReason: chunk.payload.stepResult.reason,
1292
+ response: { ...otherMetadata, messages: chunk.payload.messages.nonUser },
1293
+ request,
1294
+ usage: chunk.payload.output.usage,
1295
+ // TODO: need to be able to pass a step id into this fn to get the content for a specific step id
1296
+ content: messageList.get.response.aiV5.stepContent()
1297
+ };
1298
+ await options?.onStepFinish?.(stepResult);
1299
+ self.#bufferedSteps.push(stepResult);
1300
+ self.#bufferedByStep = {
1301
+ text: "",
1302
+ reasoning: "",
1303
+ sources: [],
1304
+ files: [],
1305
+ toolCalls: [],
1306
+ toolResults: [],
1307
+ msgCount: chunk.payload.messages.all.length
1308
+ };
1309
+ break;
1310
+ }
1311
+ case "finish":
1312
+ if (chunk.payload.stepResult.reason) {
1313
+ self.#finishReason = chunk.payload.stepResult.reason;
1314
+ }
1315
+ if (chunk.payload.metadata) {
1316
+ const { providerMetadata, request, ...otherMetadata } = chunk.payload.metadata;
1317
+ self.#providerMetadata = chunk.payload.metadata.providerMetadata;
1318
+ self.#response = {
1319
+ ...otherMetadata,
1320
+ messages: chunk.payload.messages?.all ?? []
1321
+ };
1322
+ }
1323
+ this.populateUsageCount(chunk.payload.output.usage);
1324
+ chunk.payload.output.usage = self.totalUsage;
1325
+ const baseFinishStep = self.#bufferedSteps[self.#bufferedSteps.length - 1];
1326
+ if (baseFinishStep) {
1327
+ let onFinishPayload = {};
1328
+ messageList.add(chunk.payload.messages.all, "response");
1329
+ if (model.version === "v2") {
1330
+ onFinishPayload = {
1331
+ text: baseFinishStep.text,
1332
+ warnings: baseFinishStep.warnings ?? [],
1333
+ finishReason: chunk.payload.stepResult.reason,
1334
+ // TODO: we should add handling for step IDs in message list so you can retrieve step content by step id. And on finish should the content here be from all steps?
1335
+ content: messageList.get.response.aiV5.stepContent(),
1336
+ request: this.request,
1337
+ reasoning: this.aisdk.v5.reasoning,
1338
+ reasoningText: !this.aisdk.v5.reasoningText ? void 0 : this.aisdk.v5.reasoningText,
1339
+ sources: this.aisdk.v5.sources,
1340
+ files: this.aisdk.v5.files,
1341
+ steps: transformSteps({ steps: this.#bufferedSteps }),
1342
+ response: { ...this.response, messages: messageList.get.response.aiV5.model() },
1343
+ usage: chunk.payload.output.usage,
1344
+ totalUsage: self.totalUsage,
1345
+ toolCalls: this.aisdk.v5.toolCalls,
1346
+ toolResults: this.aisdk.v5.toolResults,
1347
+ staticToolCalls: this.aisdk.v5.toolCalls.filter((toolCall) => toolCall.dynamic === false),
1348
+ staticToolResults: this.aisdk.v5.toolResults.filter(
1349
+ (toolResult) => toolResult.dynamic === false
1350
+ ),
1351
+ dynamicToolCalls: this.aisdk.v5.toolCalls.filter((toolCall) => toolCall.dynamic === true),
1352
+ dynamicToolResults: this.aisdk.v5.toolResults.filter(
1353
+ (toolResult) => toolResult.dynamic === true
1354
+ )
1355
+ };
1356
+ }
1357
+ await options?.onFinish?.(onFinishPayload);
1358
+ }
1359
+ if (options?.rootSpan) {
1360
+ options.rootSpan.setAttributes({
1361
+ ...baseFinishStep?.usage.reasoningTokens ? {
1362
+ "stream.usage.reasoningTokens": baseFinishStep.usage.reasoningTokens
1363
+ } : {},
1364
+ ...baseFinishStep?.usage.totalTokens ? {
1365
+ "stream.usage.totalTokens": baseFinishStep.usage.totalTokens
1366
+ } : {},
1367
+ ...baseFinishStep?.usage.inputTokens ? {
1368
+ "stream.usage.inputTokens": baseFinishStep.usage.inputTokens
1369
+ } : {},
1370
+ ...baseFinishStep?.usage.outputTokens ? {
1371
+ "stream.usage.outputTokens": baseFinishStep.usage.outputTokens
1372
+ } : {},
1373
+ ...baseFinishStep?.usage.cachedInputTokens ? {
1374
+ "stream.usage.cachedInputTokens": baseFinishStep.usage.cachedInputTokens
1375
+ } : {},
1376
+ ...baseFinishStep?.providerMetadata ? { "stream.response.providerMetadata": JSON.stringify(baseFinishStep?.providerMetadata) } : {},
1377
+ ...baseFinishStep?.finishReason ? { "stream.response.finishReason": baseFinishStep?.finishReason } : {},
1378
+ ...options?.telemetry_settings?.recordOutputs !== false ? { "stream.response.text": baseFinishStep?.text } : {},
1379
+ ...baseFinishStep?.toolCalls && options?.telemetry_settings?.recordOutputs !== false ? {
1380
+ "stream.response.toolCalls": JSON.stringify(
1381
+ baseFinishStep?.toolCalls?.map((chunk2) => {
1382
+ return {
1383
+ type: "tool-call",
1384
+ toolCallId: chunk2.payload.toolCallId,
1385
+ args: chunk2.payload.args,
1386
+ toolName: chunk2.payload.toolName
1387
+ };
1388
+ })
1389
+ )
1390
+ } : {}
1391
+ });
1392
+ options.rootSpan.end();
1393
+ }
1394
+ break;
1395
+ }
1396
+ controller.enqueue(chunk);
1397
+ }
1398
+ })
1399
+ );
1400
+ this.#aisdkv5 = new AISDKV5OutputStream({
1401
+ modelOutput: this,
1402
+ messageList,
1403
+ options: {
1404
+ toolCallStreaming: options?.toolCallStreaming,
1405
+ objectOptions: options?.objectOptions
1406
+ }
1407
+ });
1408
+ }
1409
+ get text() {
1410
+ return this.#bufferedText.join("");
1411
+ }
1412
+ get reasoning() {
1413
+ return this.#bufferedReasoning.join("");
1414
+ }
1415
+ get reasoningText() {
1416
+ return this.reasoning;
1417
+ }
1418
+ get reasoningDetails() {
1419
+ return Object.values(this.#bufferedReasoningDetails || {});
1420
+ }
1421
+ get sources() {
1422
+ return this.#bufferedSources;
1423
+ }
1424
+ get files() {
1425
+ return this.#bufferedFiles;
1426
+ }
1427
+ get steps() {
1428
+ return this.#bufferedSteps;
1429
+ }
1430
+ teeStream() {
1431
+ const [stream1, stream2] = this.#baseStream.tee();
1432
+ this.#baseStream = stream2;
1433
+ return stream1;
1434
+ }
1435
+ get fullStream() {
1436
+ const self = this;
1437
+ let fullStream = this.teeStream();
1438
+ return fullStream.pipeThrough(
1439
+ createObjectStreamTransformer({
1440
+ objectOptions: self.#options.objectOptions,
1441
+ onFinish: (data) => self.#objectPromise.resolve(data),
1442
+ onError: (error) => self.#objectPromise.reject(error)
1443
+ })
1444
+ ).pipeThrough(
1445
+ new web.TransformStream({
1446
+ transform(chunk, controller) {
1447
+ if (chunk.type === "raw" && !self.#options.includeRawChunks) {
1448
+ return;
1449
+ }
1450
+ controller.enqueue(chunk);
1451
+ }
1452
+ })
1453
+ );
1454
+ }
1455
+ get finishReason() {
1456
+ return this.#finishReason;
1457
+ }
1458
+ get toolCalls() {
1459
+ return this.#toolCalls;
1460
+ }
1461
+ get toolResults() {
1462
+ return this.#toolResults;
1463
+ }
1464
+ get usage() {
1465
+ return this.#usageCount;
1466
+ }
1467
+ get warnings() {
1468
+ return this.#warnings;
1469
+ }
1470
+ get providerMetadata() {
1471
+ return this.#providerMetadata;
1472
+ }
1473
+ get response() {
1474
+ return this.#response;
1475
+ }
1476
+ get request() {
1477
+ return this.#request;
1478
+ }
1479
+ updateUsageCount(usage) {
1480
+ if (!usage) {
1481
+ return;
1482
+ }
1483
+ for (const [key, value] of Object.entries(usage)) {
1484
+ this.#usageCount[key] = (this.#usageCount[key] ?? 0) + (value ?? 0);
1485
+ }
1486
+ }
1487
+ populateUsageCount(usage) {
1488
+ if (!usage) {
1489
+ return;
1490
+ }
1491
+ for (const [key, value] of Object.entries(usage)) {
1492
+ if (!this.#usageCount[key]) {
1493
+ this.#usageCount[key] = value;
1494
+ }
1495
+ }
1496
+ }
1497
+ async consumeStream(options) {
1498
+ try {
1499
+ await aiV5.consumeStream({
1500
+ stream: this.fullStream.pipeThrough(
1501
+ new web.TransformStream({
1502
+ transform(chunk, controller) {
1503
+ controller.enqueue(chunk);
1504
+ }
1505
+ })
1506
+ ),
1507
+ onError: options?.onError
1508
+ });
1509
+ } catch (error) {
1510
+ console.log("consumeStream error", error);
1511
+ options?.onError?.(error);
1512
+ }
1513
+ }
1514
+ async getFullOutput() {
1515
+ await this.consumeStream();
1516
+ let object;
1517
+ if (this.#options.objectOptions) {
1518
+ object = await this.object;
1519
+ }
1520
+ return {
1521
+ text: this.text,
1522
+ usage: this.usage,
1523
+ steps: this.steps,
1524
+ finishReason: this.finishReason,
1525
+ warnings: this.warnings,
1526
+ providerMetadata: this.providerMetadata,
1527
+ request: this.request,
1528
+ reasoning: this.reasoning,
1529
+ reasoningText: this.reasoningText,
1530
+ toolCalls: this.toolCalls,
1531
+ toolResults: this.toolResults,
1532
+ sources: this.sources,
1533
+ files: this.files,
1534
+ response: this.response,
1535
+ totalUsage: this.totalUsage,
1536
+ object
1537
+ // experimental_output: // TODO
1538
+ };
1539
+ }
1540
+ get totalUsage() {
1541
+ let total = 0;
1542
+ for (const [key, value] of Object.entries(this.#usageCount)) {
1543
+ if (key !== "totalTokens" && value && !key.startsWith("cached")) {
1544
+ total += value;
1545
+ }
1546
+ }
1547
+ return {
1548
+ ...this.#usageCount,
1549
+ totalTokens: total
1550
+ };
1551
+ }
1552
+ get aisdk() {
1553
+ return {
1554
+ v5: this.#aisdkv5
1555
+ };
1556
+ }
1557
+ get objectStream() {
1558
+ const self = this;
1559
+ if (!self.#options.objectOptions) {
1560
+ throw new Error("objectStream requires objectOptions");
1561
+ }
1562
+ return this.fullStream.pipeThrough(
1563
+ new web.TransformStream({
1564
+ transform(chunk, controller) {
1565
+ if (chunk.type === "object") {
1566
+ controller.enqueue(chunk.object);
1567
+ }
1568
+ }
1569
+ })
1570
+ );
1571
+ }
1572
+ get elementStream() {
1573
+ let publishedElements = 0;
1574
+ const self = this;
1575
+ if (!self.#options.objectOptions) {
1576
+ throw new Error("elementStream requires objectOptions");
1577
+ }
1578
+ return this.fullStream.pipeThrough(
1579
+ new web.TransformStream({
1580
+ transform(chunk, controller) {
1581
+ switch (chunk.type) {
1582
+ case "object": {
1583
+ const array = chunk.object;
1584
+ if (Array.isArray(array)) {
1585
+ for (; publishedElements < array.length; publishedElements++) {
1586
+ controller.enqueue(array[publishedElements]);
1587
+ }
1588
+ }
1589
+ break;
1590
+ }
1591
+ }
1592
+ }
1593
+ })
1594
+ );
1595
+ }
1596
+ get textStream() {
1597
+ const self = this;
1598
+ if (self.#options.objectOptions) {
1599
+ const responseFormat = getResponseFormat(self.#options.objectOptions);
1600
+ if (responseFormat?.type === "json") {
1601
+ return this.fullStream.pipeThrough(createJsonTextStreamTransformer(self.#options.objectOptions));
1602
+ }
1603
+ }
1604
+ return this.teeStream().pipeThrough(
1605
+ new web.TransformStream({
1606
+ transform(chunk, controller) {
1607
+ if (chunk.type === "text-delta") {
1608
+ controller.enqueue(chunk.payload.text);
1609
+ }
1610
+ }
1611
+ })
1612
+ );
1613
+ }
1614
+ get object() {
1615
+ return this.#objectPromise.promise;
1616
+ }
1617
+ };
1618
+
1619
+ // src/loop/telemetry/noop.ts
1620
+ var noopSpanContext = {
1621
+ traceId: "",
1622
+ spanId: "",
1623
+ traceFlags: 0
1624
+ };
1625
+ var noopSpan = {
1626
+ spanContext() {
1627
+ return noopSpanContext;
1628
+ },
1629
+ setAttribute() {
1630
+ return this;
1631
+ },
1632
+ setAttributes() {
1633
+ return this;
1634
+ },
1635
+ addEvent() {
1636
+ return this;
1637
+ },
1638
+ addLink() {
1639
+ return this;
1640
+ },
1641
+ addLinks() {
1642
+ return this;
1643
+ },
1644
+ setStatus() {
1645
+ return this;
1646
+ },
1647
+ updateName() {
1648
+ return this;
1649
+ },
1650
+ end() {
1651
+ return this;
1652
+ },
1653
+ isRecording() {
1654
+ return false;
1655
+ },
1656
+ recordException() {
1657
+ return this;
1658
+ }
1659
+ };
1660
+ var noopTracer = {
1661
+ startSpan() {
1662
+ return noopSpan;
1663
+ },
1664
+ startActiveSpan(name, arg1, arg2, arg3) {
1665
+ if (typeof arg1 === "function") {
1666
+ return arg1(noopSpan);
1667
+ }
1668
+ if (typeof arg2 === "function") {
1669
+ return arg2(noopSpan);
1670
+ }
1671
+ if (typeof arg3 === "function") {
1672
+ return arg3(noopSpan);
1673
+ }
1674
+ }
1675
+ };
1676
+
1677
+ // src/loop/telemetry/index.ts
1678
+ function getTracer({
1679
+ isEnabled = false,
1680
+ tracer
1681
+ } = {}) {
1682
+ if (!isEnabled) {
1683
+ return noopTracer;
1684
+ }
1685
+ if (tracer) {
1686
+ return tracer;
1687
+ }
1688
+ return api.trace.getTracer("mastra");
1689
+ }
1690
+ function assembleOperationName({
1691
+ operationId,
1692
+ telemetry
1693
+ }) {
1694
+ return {
1695
+ "mastra.operationId": operationId,
1696
+ "operation.name": `${operationId}${telemetry?.functionId != null ? ` ${telemetry.functionId}` : ""}`,
1697
+ ...telemetry?.functionId ? { "resource.name": telemetry?.functionId } : {}
1698
+ };
1699
+ }
1700
+ function getTelemetryAttributes({
1701
+ model,
1702
+ settings,
1703
+ telemetry,
1704
+ headers
1705
+ }) {
1706
+ return {
1707
+ "aisdk.model.provider": model.provider,
1708
+ "aisdk.model.id": model.modelId,
1709
+ // settings:
1710
+ ...Object.entries(settings).reduce((attributes, [key, value]) => {
1711
+ attributes[`stream.settings.${key}`] = value;
1712
+ return attributes;
1713
+ }, {}),
1714
+ // add metadata as attributes:
1715
+ ...Object.entries(telemetry?.metadata ?? {}).reduce((attributes, [key, value]) => {
1716
+ attributes[`stream.telemetry.metadata.${key}`] = value;
1717
+ return attributes;
1718
+ }, {}),
1719
+ // request headers
1720
+ ...Object.entries(headers ?? {}).reduce((attributes, [key, value]) => {
1721
+ if (value !== void 0) {
1722
+ attributes[`stream.request.headers.${key}`] = value;
1723
+ }
1724
+ return attributes;
1725
+ }, {})
1726
+ };
1727
+ }
1728
+ function getRootSpan({
1729
+ operationId,
1730
+ model,
1731
+ modelSettings,
1732
+ telemetry_settings,
1733
+ headers
1734
+ }) {
1735
+ const tracer = getTracer({
1736
+ isEnabled: telemetry_settings?.isEnabled,
1737
+ tracer: telemetry_settings?.tracer
1738
+ });
1739
+ const baseTelemetryAttributes = getTelemetryAttributes({
1740
+ model: {
1741
+ modelId: model.modelId,
1742
+ provider: model.provider
1743
+ },
1744
+ settings: modelSettings ?? {
1745
+ maxRetries: 2
1746
+ },
1747
+ telemetry: telemetry_settings,
1748
+ headers
1749
+ });
1750
+ const rootSpan = tracer.startSpan(operationId).setAttributes({
1751
+ ...assembleOperationName({
1752
+ operationId,
1753
+ telemetry: telemetry_settings
1754
+ }),
1755
+ ...baseTelemetryAttributes
1756
+ });
1757
+ return {
1758
+ rootSpan
1759
+ };
1760
+ }
1761
+
1762
+ // src/stream/base/input.ts
1763
+ var MastraModelInput = class extends chunkKXCUCBEI_cjs.MastraBase {
1764
+ initialize({ runId, createStream, onResult }) {
1765
+ const self = this;
1766
+ const stream = new ReadableStream({
1767
+ async start(controller) {
1768
+ try {
1769
+ const stream2 = await createStream();
1770
+ onResult({
1771
+ warnings: stream2.warnings,
1772
+ request: stream2.request,
1773
+ rawResponse: stream2.rawResponse || stream2.response || {}
1774
+ });
1775
+ await self.transform({
1776
+ runId,
1777
+ stream: stream2.stream,
1778
+ controller
1779
+ });
1780
+ controller.close();
1781
+ } catch (error) {
1782
+ controller.error(error);
1783
+ }
1784
+ }
1785
+ });
1786
+ return stream;
1787
+ }
1788
+ };
1789
+
1790
+ // src/stream/aisdk/v5/input.ts
1791
+ var AISDKV5InputStream = class extends MastraModelInput {
1792
+ constructor({ component, name }) {
1793
+ super({ component, name });
1794
+ }
1795
+ async transform({
1796
+ runId,
1797
+ stream,
1798
+ controller
1799
+ }) {
1800
+ for await (const chunk of stream) {
1801
+ const transformedChunk = convertFullStreamChunkToMastra(chunk, { runId });
1802
+ if (transformedChunk) {
1803
+ controller.enqueue(transformedChunk);
1804
+ }
1805
+ }
1806
+ }
1807
+ };
1808
+
1809
+ // src/stream/aisdk/v5/execute.ts
1810
+ function execute({
1811
+ runId,
1812
+ model,
1813
+ providerOptions,
1814
+ inputMessages,
1815
+ tools,
1816
+ toolChoice,
1817
+ options,
1818
+ onResult,
1819
+ modelStreamSpan,
1820
+ telemetry_settings,
1821
+ includeRawChunks,
1822
+ modelSettings,
1823
+ objectOptions
1824
+ }) {
1825
+ const v5 = new AISDKV5InputStream({
1826
+ component: "LLM",
1827
+ name: model.modelId
1828
+ });
1829
+ const toolsAndToolChoice = prepareToolsAndToolChoice({
1830
+ tools,
1831
+ toolChoice,
1832
+ activeTools: options?.activeTools
1833
+ });
1834
+ if (modelStreamSpan && toolsAndToolChoice?.tools?.length && telemetry_settings?.recordOutputs !== false) {
1835
+ modelStreamSpan.setAttributes({
1836
+ "stream.prompt.tools": toolsAndToolChoice?.tools?.map((tool) => JSON.stringify(tool))
1837
+ });
1838
+ }
1839
+ const stream = v5.initialize({
1840
+ runId,
1841
+ onResult,
1842
+ createStream: async () => {
1843
+ try {
1844
+ const stream2 = await model.doStream({
1845
+ ...toolsAndToolChoice,
1846
+ prompt: inputMessages,
1847
+ providerOptions,
1848
+ abortSignal: options?.abortSignal,
1849
+ includeRawChunks,
1850
+ responseFormat: objectOptions ? getResponseFormat(objectOptions) : void 0,
1851
+ ...modelSettings
1852
+ });
1853
+ return stream2;
1854
+ } catch (error) {
1855
+ console.error("Error creating stream", error);
1856
+ if (providerUtils.isAbortError(error) && options?.abortSignal?.aborted) {
1857
+ console.log("Abort error", error);
1858
+ }
1859
+ return {
1860
+ stream: new ReadableStream({
1861
+ start: async (controller) => {
1862
+ controller.enqueue({
1863
+ type: "error",
1864
+ error
1865
+ });
1866
+ controller.close();
1867
+ }
1868
+ }),
1869
+ warnings: [],
1870
+ request: {},
1871
+ rawResponse: {}
1872
+ };
1873
+ }
1874
+ }
1875
+ });
1876
+ return stream;
1877
+ }
1878
+
1879
+ // src/loop/workflow/run-state.ts
1880
+ var AgenticRunState = class {
1881
+ #state;
1882
+ constructor({ _internal, model }) {
1883
+ this.#state = {
1884
+ responseMetadata: {
1885
+ id: _internal?.generateId?.(),
1886
+ timestamp: _internal?.currentDate?.(),
1887
+ modelId: model.modelId,
1888
+ headers: void 0
1889
+ },
1890
+ isReasoning: false,
1891
+ isStreaming: false,
1892
+ providerOptions: void 0,
1893
+ hasToolCallStreaming: false,
1894
+ hasErrored: false,
1895
+ reasoningDeltas: [],
1896
+ textDeltas: [],
1897
+ stepResult: void 0
1898
+ };
1899
+ }
1900
+ setState(state) {
1901
+ this.#state = {
1902
+ ...this.#state,
1903
+ ...state
1904
+ };
1905
+ }
1906
+ get state() {
1907
+ return this.#state;
1908
+ }
1909
+ };
1910
+ var llmIterationOutputSchema = z__default.default.object({
1911
+ messageId: z__default.default.string(),
1912
+ messages: z__default.default.object({
1913
+ all: z__default.default.array(z__default.default.any()),
1914
+ user: z__default.default.array(z__default.default.any()),
1915
+ nonUser: z__default.default.array(z__default.default.any())
1916
+ }),
1917
+ output: z__default.default.any(),
1918
+ metadata: z__default.default.any(),
1919
+ stepResult: z__default.default.any().optional()
1920
+ });
1921
+ var toolCallInputSchema = z__default.default.object({
1922
+ toolCallId: z__default.default.string(),
1923
+ toolName: z__default.default.string(),
1924
+ args: z__default.default.any(),
1925
+ providerMetadata: z__default.default.any()
1926
+ });
1927
+ var toolCallOutputSchema = toolCallInputSchema.extend({
1928
+ result: z__default.default.any(),
1929
+ error: z__default.default.any().optional()
1930
+ });
1931
+
1932
+ // src/loop/workflow/llm-execution.ts
1933
+ async function processOutputStream({
1934
+ tools,
1935
+ messageId,
1936
+ messageList,
1937
+ outputStream,
1938
+ runState,
1939
+ options,
1940
+ controller,
1941
+ responseFromModel,
1942
+ includeRawChunks
1943
+ }) {
1944
+ for await (const chunk of outputStream.fullStream) {
1945
+ if (!chunk) {
1946
+ continue;
1947
+ }
1948
+ if (chunk.type == "object") {
1949
+ continue;
1950
+ }
1951
+ if (chunk.type !== "reasoning-delta" && chunk.type !== "reasoning-signature" && chunk.type !== "redacted-reasoning" && runState.state.isReasoning) {
1952
+ if (runState.state.reasoningDeltas.length) {
1953
+ messageList.add(
1954
+ {
1955
+ id: messageId,
1956
+ role: "assistant",
1957
+ content: [
1958
+ {
1959
+ type: "reasoning",
1960
+ text: runState.state.reasoningDeltas.join(""),
1961
+ signature: chunk.payload.signature,
1962
+ providerOptions: chunk.payload.providerMetadata ?? runState.state.providerOptions
1963
+ }
1964
+ ]
1965
+ },
1966
+ "response"
1967
+ );
1968
+ }
1969
+ runState.setState({
1970
+ isReasoning: false,
1971
+ reasoningDeltas: []
1972
+ });
1973
+ }
1974
+ if (chunk.type !== "text-delta" && chunk.type !== "tool-call" && runState.state.isStreaming) {
1975
+ if (runState.state.textDeltas.length) {
1976
+ messageList.add(
1977
+ {
1978
+ id: messageId,
1979
+ role: "assistant",
1980
+ content: [
1981
+ chunk.payload.providerMetadata ?? runState.state.providerOptions ? {
1982
+ type: "text",
1983
+ text: runState.state.textDeltas.join(""),
1984
+ providerOptions: chunk.payload.providerMetadata ?? runState.state.providerOptions
1985
+ } : {
1986
+ type: "text",
1987
+ text: runState.state.textDeltas.join("")
1988
+ }
1989
+ ]
1990
+ },
1991
+ "response"
1992
+ );
1993
+ }
1994
+ runState.setState({
1995
+ isStreaming: false,
1996
+ textDeltas: []
1997
+ });
1998
+ }
1999
+ switch (chunk.type) {
2000
+ case "response-metadata":
2001
+ runState.setState({
2002
+ responseMetadata: {
2003
+ id: chunk.payload.id,
2004
+ timestamp: chunk.payload.timestamp,
2005
+ modelId: chunk.payload.modelId,
2006
+ headers: chunk.payload.headers
2007
+ }
2008
+ });
2009
+ break;
2010
+ case "text-delta": {
2011
+ const textDeltasFromState = runState.state.textDeltas;
2012
+ textDeltasFromState.push(chunk.payload.text);
2013
+ runState.setState({
2014
+ textDeltas: textDeltasFromState,
2015
+ isStreaming: true
2016
+ });
2017
+ controller.enqueue(chunk);
2018
+ break;
2019
+ }
2020
+ case "tool-call-input-streaming-start": {
2021
+ const tool = tools?.[chunk.payload.toolName] || Object.values(tools || {})?.find((tool2) => `id` in tool2 && tool2.id === chunk.payload.toolName);
2022
+ if (tool && "onInputStart" in tool) {
2023
+ try {
2024
+ await tool?.onInputStart?.({
2025
+ toolCallId: chunk.payload.toolCallId,
2026
+ messages: messageList.get.input.aiV5.model()?.map((message) => ({
2027
+ role: message.role,
2028
+ content: message.content
2029
+ })),
2030
+ abortSignal: options?.abortSignal
2031
+ });
2032
+ } catch (error) {
2033
+ console.error("Error calling onInputStart", error);
2034
+ }
2035
+ }
2036
+ controller.enqueue(chunk);
2037
+ break;
2038
+ }
2039
+ case "tool-call-delta": {
2040
+ const tool = tools?.[chunk.payload.toolName] || Object.values(tools || {})?.find((tool2) => `id` in tool2 && tool2.id === chunk.payload.toolName);
2041
+ if (tool && "onInputDelta" in tool) {
2042
+ try {
2043
+ await tool?.onInputDelta?.({
2044
+ inputTextDelta: chunk.payload.argsTextDelta,
2045
+ toolCallId: chunk.payload.toolCallId,
2046
+ messages: messageList.get.input.aiV5.model()?.map((message) => ({
2047
+ role: message.role,
2048
+ content: message.content
2049
+ })),
2050
+ abortSignal: options?.abortSignal
2051
+ });
2052
+ } catch (error) {
2053
+ console.error("Error calling onInputDelta", error);
2054
+ }
2055
+ }
2056
+ controller.enqueue(chunk);
2057
+ break;
2058
+ }
2059
+ case "reasoning-start": {
2060
+ runState.setState({
2061
+ providerOptions: chunk.payload.providerMetadata ?? runState.state.providerOptions
2062
+ });
2063
+ if (Object.values(chunk.payload.providerMetadata || {}).find((v) => v?.redactedData)) {
2064
+ messageList.add(
2065
+ {
2066
+ id: messageId,
2067
+ role: "assistant",
2068
+ content: [
2069
+ {
2070
+ type: "reasoning",
2071
+ text: "",
2072
+ providerOptions: chunk.payload.providerMetadata ?? runState.state.providerOptions
2073
+ }
2074
+ ]
2075
+ },
2076
+ "response"
2077
+ );
2078
+ controller.enqueue(chunk);
2079
+ break;
2080
+ }
2081
+ controller.enqueue(chunk);
2082
+ break;
2083
+ }
2084
+ case "reasoning-delta": {
2085
+ const reasoningDeltasFromState = runState.state.reasoningDeltas;
2086
+ reasoningDeltasFromState.push(chunk.payload.text);
2087
+ runState.setState({
2088
+ isReasoning: true,
2089
+ reasoningDeltas: reasoningDeltasFromState,
2090
+ providerOptions: chunk.payload.providerMetadata ?? runState.state.providerOptions
2091
+ });
2092
+ controller.enqueue(chunk);
2093
+ break;
2094
+ }
2095
+ case "file":
2096
+ messageList.add(
2097
+ {
2098
+ id: messageId,
2099
+ role: "assistant",
2100
+ content: [
2101
+ {
2102
+ type: "file",
2103
+ data: chunk.payload.data,
2104
+ mimeType: chunk.payload.mimeType
2105
+ }
2106
+ ]
2107
+ },
2108
+ "response"
2109
+ );
2110
+ controller.enqueue(chunk);
2111
+ break;
2112
+ case "source":
2113
+ messageList.add(
2114
+ {
2115
+ id: messageId,
2116
+ role: "assistant",
2117
+ content: {
2118
+ format: 2,
2119
+ parts: [
2120
+ {
2121
+ type: "source",
2122
+ source: {
2123
+ sourceType: "url",
2124
+ id: chunk.payload.id,
2125
+ url: chunk.payload.url,
2126
+ title: chunk.payload.title,
2127
+ providerMetadata: chunk.payload.providerMetadata
2128
+ }
2129
+ }
2130
+ ]
2131
+ },
2132
+ createdAt: /* @__PURE__ */ new Date()
2133
+ },
2134
+ "response"
2135
+ );
2136
+ controller.enqueue(chunk);
2137
+ break;
2138
+ case "finish":
2139
+ runState.setState({
2140
+ providerOptions: chunk.payload.metadata.providerMetadata,
2141
+ stepResult: {
2142
+ reason: chunk.payload.reason,
2143
+ logprobs: chunk.payload.logprobs,
2144
+ warnings: responseFromModel.warnings,
2145
+ totalUsage: chunk.payload.totalUsage,
2146
+ headers: responseFromModel.rawResponse?.headers,
2147
+ messageId,
2148
+ isContinued: !["stop", "error"].includes(chunk.payload.reason),
2149
+ request: responseFromModel.request
2150
+ }
2151
+ });
2152
+ break;
2153
+ case "error":
2154
+ if (providerUtilsV5.isAbortError(chunk.payload.error) && options?.abortSignal?.aborted) {
2155
+ break;
2156
+ }
2157
+ runState.setState({
2158
+ hasErrored: true
2159
+ });
2160
+ controller.enqueue(chunk);
2161
+ runState.setState({
2162
+ stepResult: {
2163
+ isContinued: false,
2164
+ reason: "error"
2165
+ }
2166
+ });
2167
+ await options?.onError?.({ error: chunk.payload.error });
2168
+ break;
2169
+ default:
2170
+ controller.enqueue(chunk);
2171
+ }
2172
+ if ([
2173
+ "text-delta",
2174
+ "reasoning-delta",
2175
+ "source",
2176
+ "tool-call",
2177
+ "tool-call-input-streaming-start",
2178
+ "tool-call-delta",
2179
+ "raw"
2180
+ ].includes(chunk.type)) {
2181
+ const transformedChunk = convertMastraChunkToAISDKv5({
2182
+ chunk
2183
+ });
2184
+ if (chunk.type === "raw" && !includeRawChunks) {
2185
+ return;
2186
+ }
2187
+ await options?.onChunk?.({ chunk: transformedChunk });
2188
+ }
2189
+ if (runState.state.hasErrored) {
2190
+ break;
2191
+ }
2192
+ }
2193
+ }
2194
+ function createLLMExecutionStep({
2195
+ model,
2196
+ _internal,
2197
+ messageId,
2198
+ runId,
2199
+ modelStreamSpan,
2200
+ telemetry_settings,
2201
+ tools,
2202
+ toolChoice,
2203
+ messageList,
2204
+ includeRawChunks,
2205
+ modelSettings,
2206
+ providerOptions,
2207
+ options,
2208
+ toolCallStreaming,
2209
+ controller,
2210
+ objectOptions
2211
+ }) {
2212
+ return chunkCSTWQQ3C_cjs.createStep({
2213
+ id: "llm-execution",
2214
+ inputSchema: llmIterationOutputSchema,
2215
+ outputSchema: llmIterationOutputSchema,
2216
+ execute: async ({ inputData, bail }) => {
2217
+ const runState = new AgenticRunState({
2218
+ _internal,
2219
+ model
2220
+ });
2221
+ console.log("Starting LLM Execution Step");
2222
+ let modelResult;
2223
+ let warnings;
2224
+ let request;
2225
+ let rawResponse;
2226
+ switch (model.specificationVersion) {
2227
+ case "v2": {
2228
+ modelResult = execute({
2229
+ runId,
2230
+ model,
2231
+ providerOptions,
2232
+ inputMessages: messageList.get.all.aiV5.llmPrompt(),
2233
+ tools,
2234
+ toolChoice,
2235
+ options,
2236
+ modelSettings,
2237
+ telemetry_settings,
2238
+ includeRawChunks,
2239
+ objectOptions,
2240
+ onResult: ({
2241
+ warnings: warningsFromStream,
2242
+ request: requestFromStream,
2243
+ rawResponse: rawResponseFromStream
2244
+ }) => {
2245
+ warnings = warningsFromStream;
2246
+ request = requestFromStream || {};
2247
+ rawResponse = rawResponseFromStream;
2248
+ controller.enqueue({
2249
+ runId,
2250
+ from: "AGENT",
2251
+ type: "step-start",
2252
+ payload: {
2253
+ request: request || {},
2254
+ warnings: [],
2255
+ messageId
2256
+ }
2257
+ });
2258
+ },
2259
+ modelStreamSpan
2260
+ });
2261
+ break;
2262
+ }
2263
+ default: {
2264
+ throw new Error(`Unsupported model version: ${model.specificationVersion}`);
2265
+ }
2266
+ }
2267
+ const outputStream = new MastraModelOutput({
2268
+ model: {
2269
+ modelId: model.modelId,
2270
+ provider: model.provider,
2271
+ version: model.specificationVersion
2272
+ },
2273
+ stream: modelResult,
2274
+ messageList,
2275
+ options: {
2276
+ runId,
2277
+ rootSpan: modelStreamSpan,
2278
+ toolCallStreaming,
2279
+ telemetry_settings,
2280
+ includeRawChunks,
2281
+ objectOptions
2282
+ }
2283
+ });
2284
+ try {
2285
+ await processOutputStream({
2286
+ outputStream,
2287
+ includeRawChunks,
2288
+ tools,
2289
+ messageId,
2290
+ messageList,
2291
+ runState,
2292
+ options,
2293
+ controller,
2294
+ responseFromModel: {
2295
+ warnings,
2296
+ request,
2297
+ rawResponse
2298
+ }
2299
+ });
2300
+ } catch (error) {
2301
+ console.log("Error in LLM Execution Step", error);
2302
+ if (providerUtilsV5.isAbortError(error) && options?.abortSignal?.aborted) {
2303
+ await options?.onAbort?.({
2304
+ steps: inputData?.output?.steps ?? []
2305
+ });
2306
+ controller.enqueue({ type: "abort", runId, from: "AGENT", payload: {} });
2307
+ const usage2 = outputStream.usage;
2308
+ const responseMetadata2 = runState.state.responseMetadata;
2309
+ const text2 = outputStream.text;
2310
+ return bail({
2311
+ messageId,
2312
+ stepResult: {
2313
+ reason: "abort",
2314
+ warnings,
2315
+ isContinued: false
2316
+ },
2317
+ metadata: {
2318
+ providerMetadata: providerOptions,
2319
+ ...responseMetadata2,
2320
+ headers: rawResponse?.headers,
2321
+ request
2322
+ },
2323
+ output: {
2324
+ text: text2,
2325
+ toolCalls: [],
2326
+ usage: usage2 ?? inputData.output?.usage,
2327
+ steps: []
2328
+ },
2329
+ messages: {
2330
+ all: messageList.get.all.v3(),
2331
+ user: messageList.get.input.v3(),
2332
+ nonUser: messageList.get.response.v3()
2333
+ }
2334
+ });
2335
+ }
2336
+ runState.setState({
2337
+ hasErrored: true,
2338
+ stepResult: {
2339
+ isContinued: false,
2340
+ reason: "error"
2341
+ }
2342
+ });
2343
+ }
2344
+ const toolCalls = outputStream.toolCalls?.map((chunk) => {
2345
+ return chunk.payload;
2346
+ });
2347
+ if (toolCalls.length > 0) {
2348
+ const assistantContent = [
2349
+ ...toolCalls.map((toolCall) => {
2350
+ return {
2351
+ type: "tool-call",
2352
+ toolCallId: toolCall.toolCallId,
2353
+ toolName: toolCall.toolName,
2354
+ args: toolCall.args
2355
+ };
2356
+ })
2357
+ ];
2358
+ messageList.add(
2359
+ {
2360
+ id: messageId,
2361
+ role: "assistant",
2362
+ content: assistantContent
2363
+ },
2364
+ "response"
2365
+ );
2366
+ }
2367
+ const finishReason = runState?.state?.stepResult?.reason ?? outputStream.finishReason;
2368
+ const hasErrored = runState.state.hasErrored;
2369
+ const usage = outputStream.usage;
2370
+ const responseMetadata = runState.state.responseMetadata;
2371
+ const text = outputStream.text;
2372
+ const steps = inputData.output?.steps || [];
2373
+ steps.push(
2374
+ new DefaultStepResult({
2375
+ warnings: outputStream.warnings,
2376
+ providerMetadata: providerOptions,
2377
+ finishReason: runState.state.stepResult?.reason,
2378
+ content: messageList.get.response.aiV5.modelContent(),
2379
+ // @ts-ignore this is how it worked internally for transformResponse which was removed TODO: how should this actually work?
2380
+ response: { ...responseMetadata, messages: messageList.get.response.aiV5.model() },
2381
+ request,
2382
+ usage: outputStream.usage
2383
+ })
2384
+ );
2385
+ const messages = {
2386
+ all: messageList.get.all.aiV5.model(),
2387
+ user: messageList.get.input.aiV5.model(),
2388
+ nonUser: messageList.get.response.aiV5.model()
2389
+ };
2390
+ return {
2391
+ messageId,
2392
+ stepResult: {
2393
+ reason: hasErrored ? "error" : finishReason,
2394
+ warnings,
2395
+ isContinued: !["stop", "error"].includes(finishReason)
2396
+ },
2397
+ metadata: {
2398
+ providerMetadata: runState.state.providerOptions,
2399
+ ...responseMetadata,
2400
+ headers: rawResponse?.headers,
2401
+ request
2402
+ },
2403
+ output: {
2404
+ text,
2405
+ toolCalls,
2406
+ usage: usage ?? inputData.output?.usage,
2407
+ steps
2408
+ },
2409
+ messages
2410
+ };
2411
+ }
2412
+ });
2413
+ }
2414
+
2415
+ // src/loop/workflow/tool-call-step.ts
2416
+ function createToolCallStep({
2417
+ tools,
2418
+ messageList,
2419
+ options,
2420
+ telemetry_settings
2421
+ }) {
2422
+ return chunkCSTWQQ3C_cjs.createStep({
2423
+ id: "toolCallStep",
2424
+ inputSchema: toolCallInputSchema,
2425
+ outputSchema: toolCallOutputSchema,
2426
+ execute: async ({ inputData }) => {
2427
+ const tool = tools?.[inputData.toolName] || Object.values(tools || {})?.find((tool2) => `id` in tool2 && tool2.id === inputData.toolName);
2428
+ if (!tool) {
2429
+ throw new Error(`Tool ${inputData.toolName} not found`);
2430
+ }
2431
+ if (tool && "onInputAvailable" in tool) {
2432
+ try {
2433
+ await tool?.onInputAvailable?.({
2434
+ toolCallId: inputData.toolCallId,
2435
+ input: inputData.args,
2436
+ messages: messageList.get.input.aiV5.model(),
2437
+ abortSignal: options?.abortSignal
2438
+ });
2439
+ } catch (error) {
2440
+ console.error("Error calling onInputAvailable", error);
2441
+ }
2442
+ }
2443
+ if (!tool.execute) {
2444
+ return inputData;
2445
+ }
2446
+ const tracer = getTracer({
2447
+ isEnabled: telemetry_settings?.isEnabled,
2448
+ tracer: telemetry_settings?.tracer
2449
+ });
2450
+ const span = tracer.startSpan("mastra.stream.toolCall").setAttributes({
2451
+ ...assembleOperationName({
2452
+ operationId: "mastra.stream.toolCall",
2453
+ telemetry: telemetry_settings
2454
+ }),
2455
+ "stream.toolCall.toolName": inputData.toolName,
2456
+ "stream.toolCall.toolCallId": inputData.toolCallId,
2457
+ "stream.toolCall.args": JSON.stringify(inputData.args)
2458
+ });
2459
+ try {
2460
+ const result = await tool.execute(inputData.args, {
2461
+ abortSignal: options?.abortSignal,
2462
+ toolCallId: inputData.toolCallId,
2463
+ messages: messageList.get.input.aiV5.model()
2464
+ });
2465
+ span.setAttributes({
2466
+ "stream.toolCall.result": JSON.stringify(result)
2467
+ });
2468
+ span.end();
2469
+ return { result, ...inputData };
2470
+ } catch (error) {
2471
+ span.setStatus({
2472
+ code: 2,
2473
+ message: error?.message ?? error
2474
+ });
2475
+ span.recordException(error);
2476
+ return {
2477
+ error,
2478
+ ...inputData
2479
+ };
2480
+ }
2481
+ }
2482
+ });
2483
+ }
2484
+
2485
+ // src/loop/workflow/outer-llm-step.ts
2486
+ function createOuterLLMWorkflow({
2487
+ model,
2488
+ telemetry_settings,
2489
+ _internal,
2490
+ modelStreamSpan,
2491
+ ...rest
2492
+ }) {
2493
+ const llmExecutionStep = createLLMExecutionStep({
2494
+ model,
2495
+ _internal,
2496
+ modelStreamSpan,
2497
+ telemetry_settings,
2498
+ ...rest
2499
+ });
2500
+ const toolCallStep = createToolCallStep({
2501
+ telemetry_settings,
2502
+ ...rest
2503
+ });
2504
+ const messageList = rest.messageList;
2505
+ const llmMappingStep = chunkCSTWQQ3C_cjs.createStep({
2506
+ id: "llmExecutionMappingStep",
2507
+ inputSchema: z__default.default.array(toolCallOutputSchema),
2508
+ outputSchema: llmIterationOutputSchema,
2509
+ execute: async ({ inputData, getStepResult, bail }) => {
2510
+ const initialResult = getStepResult(llmExecutionStep);
2511
+ if (inputData?.every((toolCall) => toolCall?.result === void 0)) {
2512
+ const errorResults = inputData.filter((toolCall) => toolCall?.error);
2513
+ const toolResultMessageId = rest.experimental_generateMessageId?.() || _internal?.generateId?.();
2514
+ if (errorResults?.length) {
2515
+ errorResults.forEach((toolCall) => {
2516
+ const chunk = {
2517
+ type: "tool-error",
2518
+ runId: rest.runId,
2519
+ from: "AGENT",
2520
+ payload: {
2521
+ error: toolCall.error,
2522
+ args: toolCall.args,
2523
+ toolCallId: toolCall.toolCallId,
2524
+ toolName: toolCall.toolName,
2525
+ result: toolCall.result,
2526
+ providerMetadata: toolCall.providerMetadata
2527
+ }
2528
+ };
2529
+ rest.controller.enqueue(chunk);
2530
+ });
2531
+ rest.messageList.add(
2532
+ {
2533
+ id: toolResultMessageId,
2534
+ role: "tool",
2535
+ content: errorResults.map((toolCall) => {
2536
+ return {
2537
+ type: "tool-result",
2538
+ args: toolCall.args,
2539
+ toolCallId: toolCall.toolCallId,
2540
+ toolName: toolCall.toolName,
2541
+ result: {
2542
+ tool_execution_error: toolCall.error?.message ?? toolCall.error
2543
+ }
2544
+ };
2545
+ })
2546
+ },
2547
+ "response"
2548
+ );
2549
+ }
2550
+ initialResult.stepResult.isContinued = false;
2551
+ return bail(initialResult);
2552
+ }
2553
+ if (inputData?.length) {
2554
+ for (const toolCall of inputData) {
2555
+ const chunk = {
2556
+ type: "tool-result",
2557
+ runId: rest.runId,
2558
+ from: "AGENT",
2559
+ payload: {
2560
+ args: toolCall.args,
2561
+ toolCallId: toolCall.toolCallId,
2562
+ toolName: toolCall.toolName,
2563
+ result: toolCall.result,
2564
+ providerMetadata: toolCall.providerMetadata
2565
+ }
2566
+ };
2567
+ rest.controller.enqueue(chunk);
2568
+ if (model.specificationVersion === "v2") {
2569
+ await rest.options?.onChunk?.({
2570
+ chunk: convertMastraChunkToAISDKv5({
2571
+ chunk
2572
+ })
2573
+ });
2574
+ }
2575
+ const toolResultMessageId = rest.experimental_generateMessageId?.() || _internal?.generateId?.();
2576
+ messageList.add(
2577
+ {
2578
+ id: toolResultMessageId,
2579
+ role: "tool",
2580
+ content: inputData.map((toolCall2) => {
2581
+ return {
2582
+ type: "tool-result",
2583
+ args: toolCall2.args,
2584
+ toolCallId: toolCall2.toolCallId,
2585
+ toolName: toolCall2.toolName,
2586
+ result: toolCall2.result
2587
+ };
2588
+ })
2589
+ },
2590
+ "response"
2591
+ );
2592
+ }
2593
+ return {
2594
+ ...initialResult,
2595
+ messages: {
2596
+ all: messageList.get.all.v3(),
2597
+ user: messageList.get.input.v3(),
2598
+ nonUser: messageList.get.response.v3()
2599
+ }
2600
+ };
2601
+ }
2602
+ }
2603
+ });
2604
+ return chunkCSTWQQ3C_cjs.createWorkflow({
2605
+ id: "executionWorkflow",
2606
+ inputSchema: llmIterationOutputSchema,
2607
+ outputSchema: z__default.default.any()
2608
+ }).then(llmExecutionStep).map(({ inputData }) => {
2609
+ if (modelStreamSpan && telemetry_settings?.recordOutputs !== false && inputData.output.toolCalls?.length) {
2610
+ modelStreamSpan.setAttribute(
2611
+ "stream.response.toolCalls",
2612
+ JSON.stringify(
2613
+ inputData.output.toolCalls?.map((toolCall) => {
2614
+ return {
2615
+ toolCallId: toolCall.toolCallId,
2616
+ args: toolCall.args,
2617
+ toolName: toolCall.toolName
2618
+ };
2619
+ })
2620
+ )
2621
+ );
2622
+ }
2623
+ return inputData.output.toolCalls || [];
2624
+ }).foreach(toolCallStep).then(llmMappingStep).commit();
2625
+ }
2626
+
2627
+ // src/loop/workflow/stream.ts
2628
+ function workflowLoopStream({
2629
+ telemetry_settings,
2630
+ model,
2631
+ toolChoice,
2632
+ modelSettings,
2633
+ _internal,
2634
+ modelStreamSpan,
2635
+ ...rest
2636
+ }) {
2637
+ return new web.ReadableStream({
2638
+ start: async (controller) => {
2639
+ const messageId = rest.experimental_generateMessageId?.() || _internal?.generateId?.();
2640
+ modelStreamSpan.setAttributes({
2641
+ ...telemetry_settings?.recordInputs !== false ? {
2642
+ "stream.prompt.toolChoice": toolChoice ? JSON.stringify(toolChoice) : "auto"
2643
+ } : {}
2644
+ });
2645
+ const outerLLMWorkflow = createOuterLLMWorkflow({
2646
+ messageId,
2647
+ model,
2648
+ telemetry_settings,
2649
+ _internal,
2650
+ modelSettings,
2651
+ toolChoice,
2652
+ modelStreamSpan,
2653
+ controller,
2654
+ ...rest
2655
+ });
2656
+ const mainWorkflow = chunkCSTWQQ3C_cjs.createWorkflow({
2657
+ id: "agentic-loop",
2658
+ inputSchema: llmIterationOutputSchema,
2659
+ outputSchema: z__default.default.any()
2660
+ }).dowhile(outerLLMWorkflow, async ({ inputData }) => {
2661
+ let hasFinishedSteps = false;
2662
+ if (rest.stopWhen) {
2663
+ const conditions = await Promise.all(
2664
+ (Array.isArray(rest.stopWhen) ? rest.stopWhen : [rest.stopWhen]).map((condition) => {
2665
+ return condition({
2666
+ steps: inputData.output.steps
2667
+ });
2668
+ })
2669
+ );
2670
+ const hasStopped = conditions.some((condition) => condition);
2671
+ hasFinishedSteps = hasStopped;
2672
+ }
2673
+ inputData.stepResult.isContinued = hasFinishedSteps ? false : inputData.stepResult.isContinued;
2674
+ if (inputData.stepResult.reason !== "abort") {
2675
+ controller.enqueue({
2676
+ type: "step-finish",
2677
+ runId: rest.runId,
2678
+ from: "AGENT",
2679
+ payload: inputData
2680
+ });
2681
+ }
2682
+ modelStreamSpan.setAttributes({
2683
+ "stream.response.id": inputData.metadata.id,
2684
+ "stream.response.model": model.modelId,
2685
+ ...inputData.metadata.providerMetadata ? { "stream.response.providerMetadata": JSON.stringify(inputData.metadata.providerMetadata) } : {},
2686
+ "stream.response.finishReason": inputData.stepResult.reason,
2687
+ "stream.usage.inputTokens": inputData.output.usage?.inputTokens,
2688
+ "stream.usage.outputTokens": inputData.output.usage?.outputTokens,
2689
+ "stream.usage.totalTokens": inputData.output.usage?.totalTokens,
2690
+ ...telemetry_settings?.recordOutputs !== false ? {
2691
+ "stream.response.text": inputData.output.text,
2692
+ "stream.prompt.messages": JSON.stringify(rest.messageList.get.input.aiV5.model())
2693
+ } : {}
2694
+ });
2695
+ modelStreamSpan.end();
2696
+ const reason = inputData.stepResult.reason;
2697
+ if (reason === void 0) {
2698
+ return false;
2699
+ }
2700
+ return inputData.stepResult.isContinued;
2701
+ }).map(({ inputData }) => {
2702
+ const toolCalls = rest.messageList.get.response.v3().filter((message) => message.role === "tool");
2703
+ inputData.output.toolCalls = toolCalls;
2704
+ return inputData;
2705
+ }).commit();
2706
+ const msToFirstChunk = _internal?.now?.() - rest.startTimestamp;
2707
+ modelStreamSpan.addEvent("ai.stream.firstChunk", {
2708
+ "ai.response.msToFirstChunk": msToFirstChunk
2709
+ });
2710
+ modelStreamSpan.setAttributes({
2711
+ "stream.response.timestamp": new Date(rest.startTimestamp).toISOString(),
2712
+ "stream.response.msToFirstChunk": msToFirstChunk
2713
+ });
2714
+ controller.enqueue({
2715
+ type: "start",
2716
+ runId: rest.runId,
2717
+ from: "AGENT",
2718
+ payload: {}
2719
+ });
2720
+ const run = await mainWorkflow.createRunAsync({
2721
+ runId: rest.runId
2722
+ });
2723
+ const executionResult = await run.start({
2724
+ inputData: {
2725
+ messageId,
2726
+ messages: {
2727
+ all: rest.messageList.get.input.aiV5.model(),
2728
+ user: rest.messageList.get.input.aiV5.model(),
2729
+ nonUser: []
2730
+ }
2731
+ }
2732
+ });
2733
+ if (executionResult.status !== "success") {
2734
+ controller.close();
2735
+ return;
2736
+ }
2737
+ if (executionResult.result.stepResult.reason === "abort") {
2738
+ console.log("aborted_result", JSON.stringify(executionResult.result, null, 2));
2739
+ controller.close();
2740
+ return;
2741
+ }
2742
+ controller.enqueue({
2743
+ type: "finish",
2744
+ runId: rest.runId,
2745
+ from: "AGENT",
2746
+ payload: executionResult.result
2747
+ });
2748
+ const msToFinish = (_internal?.now?.() ?? Date.now()) - rest.startTimestamp;
2749
+ modelStreamSpan.addEvent("ai.stream.finish");
2750
+ modelStreamSpan.setAttributes({
2751
+ "stream.response.msToFinish": msToFinish,
2752
+ "stream.response.avgOutputTokensPerSecond": 1e3 * (executionResult?.result?.output?.usage?.outputTokens ?? 0) / msToFinish
2753
+ });
2754
+ controller.close();
2755
+ }
2756
+ });
2757
+ }
2758
+
2759
+ // src/loop/loop.ts
2760
+ function loop({
2761
+ model,
2762
+ logger,
2763
+ runId,
2764
+ idGenerator,
2765
+ telemetry_settings,
2766
+ messageList,
2767
+ includeRawChunks,
2768
+ modelSettings,
2769
+ tools,
2770
+ _internal,
2771
+ ...rest
2772
+ }) {
2773
+ let loggerToUse = logger || new chunkV5WKCX3G_cjs.ConsoleLogger({
2774
+ level: "debug"
2775
+ });
2776
+ let runIdToUse = runId;
2777
+ if (!runIdToUse) {
2778
+ runIdToUse = idGenerator?.() || crypto.randomUUID();
2779
+ }
2780
+ const internalToUse = {
2781
+ now: _internal?.now || (() => Date.now()),
2782
+ generateId: _internal?.generateId || (() => aiV5.generateId()),
2783
+ currentDate: _internal?.currentDate || (() => /* @__PURE__ */ new Date())
2784
+ };
2785
+ let startTimestamp = internalToUse.now?.();
2786
+ const { rootSpan } = getRootSpan({
2787
+ operationId: `mastra.stream`,
2788
+ model: {
2789
+ modelId: model.modelId,
2790
+ provider: model.provider
2791
+ },
2792
+ modelSettings,
2793
+ headers: modelSettings?.headers ?? rest.headers,
2794
+ telemetry_settings
2795
+ });
2796
+ rootSpan.setAttributes({
2797
+ ...telemetry_settings?.recordOutputs !== false ? {
2798
+ "stream.prompt.messages": JSON.stringify(messageList.get.input.aiV5.model())
2799
+ } : {}
2800
+ });
2801
+ const { rootSpan: modelStreamSpan } = getRootSpan({
2802
+ operationId: `mastra.stream.aisdk.doStream`,
2803
+ model: {
2804
+ modelId: model.modelId,
2805
+ provider: model.provider
2806
+ },
2807
+ modelSettings,
2808
+ headers: modelSettings?.headers ?? rest.headers,
2809
+ telemetry_settings
2810
+ });
2811
+ const workflowLoopProps = {
2812
+ model,
2813
+ runId: runIdToUse,
2814
+ logger: loggerToUse,
2815
+ startTimestamp,
2816
+ messageList,
2817
+ includeRawChunks: !!includeRawChunks,
2818
+ _internal: internalToUse,
2819
+ tools,
2820
+ modelStreamSpan,
2821
+ telemetry_settings,
2822
+ ...rest
2823
+ };
2824
+ const streamFn = workflowLoopStream(workflowLoopProps);
2825
+ return new MastraModelOutput({
2826
+ model: {
2827
+ modelId: model.modelId,
2828
+ provider: model.provider,
2829
+ version: model.specificationVersion
2830
+ },
2831
+ stream: streamFn,
2832
+ messageList,
2833
+ options: {
2834
+ runId: runIdToUse,
2835
+ telemetry_settings,
2836
+ rootSpan,
2837
+ toolCallStreaming: rest.toolCallStreaming,
2838
+ onFinish: rest.options?.onFinish,
2839
+ onStepFinish: rest.options?.onStepFinish,
2840
+ includeRawChunks: !!includeRawChunks,
2841
+ objectOptions: rest.objectOptions
2842
+ }
2843
+ });
2844
+ }
2845
+
2846
+ exports.loop = loop;
2847
+ //# sourceMappingURL=index.cjs.map
2848
+ //# sourceMappingURL=index.cjs.map