@mastra/core 0.10.13 → 0.10.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (235) hide show
  1. package/a2a.d.ts +1 -0
  2. package/agent.d.ts +1 -0
  3. package/base.d.ts +1 -0
  4. package/bundler.d.ts +1 -0
  5. package/deployer.d.ts +1 -0
  6. package/di.d.ts +1 -0
  7. package/dist/a2a/index.cjs +79 -0
  8. package/dist/a2a/index.d.cts +780 -0
  9. package/dist/a2a/index.d.ts +780 -0
  10. package/dist/a2a/index.js +68 -0
  11. package/dist/agent/index.cjs +15 -0
  12. package/dist/agent/index.d.cts +29 -0
  13. package/dist/agent/index.d.ts +29 -0
  14. package/dist/agent/index.js +2 -0
  15. package/dist/base-B_y9sMg0.d.cts +162 -0
  16. package/dist/base-CS5eSXbL.d.cts +4117 -0
  17. package/dist/base-ClrXcCRx.d.ts +162 -0
  18. package/dist/base-DBhKcre4.d.ts +4117 -0
  19. package/dist/base.cjs +10 -0
  20. package/dist/base.d.cts +6 -0
  21. package/dist/base.d.ts +6 -0
  22. package/dist/base.js +1 -0
  23. package/dist/bundler/index.cjs +10 -0
  24. package/dist/bundler/index.d.cts +30 -0
  25. package/dist/bundler/index.d.ts +30 -0
  26. package/dist/bundler/index.js +1 -0
  27. package/dist/chunk-2GRYVZ2O.cjs +244 -0
  28. package/dist/chunk-2HDFKWFU.js +2249 -0
  29. package/dist/chunk-32C7JDIZ.js +1 -0
  30. package/dist/chunk-4UBOJFSL.js +989 -0
  31. package/dist/chunk-4UWPFBC6.js +88 -0
  32. package/dist/chunk-4Z3OU5RY.cjs +31 -0
  33. package/dist/chunk-5HTMDAXP.js +359 -0
  34. package/dist/chunk-5IEKR756.js +53 -0
  35. package/dist/chunk-5YDTZN2X.js +114 -0
  36. package/dist/chunk-6UNGH46J.js +75 -0
  37. package/dist/chunk-6Y4UL5Z6.cjs +94 -0
  38. package/dist/chunk-7F6BQXE2.cjs +425 -0
  39. package/dist/chunk-7H2GET5Z.cjs +668 -0
  40. package/dist/chunk-7HZ6NIAF.cjs +2 -0
  41. package/dist/chunk-7MZNOW6W.cjs +2263 -0
  42. package/dist/chunk-7XQIPES3.js +668 -0
  43. package/dist/chunk-AKYTYALY.js +70 -0
  44. package/dist/chunk-ATXBSEFT.js +22 -0
  45. package/dist/chunk-B6TOBUS6.cjs +80 -0
  46. package/dist/chunk-B7SQOKEC.cjs +91 -0
  47. package/dist/chunk-BB4KXGBU.js +83 -0
  48. package/dist/chunk-BB6DPGIV.cjs +6 -0
  49. package/dist/chunk-C4LMN2IR.js +27 -0
  50. package/dist/chunk-DFFVEKIG.js +407 -0
  51. package/dist/chunk-E7AX3U6M.cjs +659 -0
  52. package/dist/chunk-EWCOOO3H.js +5007 -0
  53. package/dist/chunk-F2WMR75C.cjs +183 -0
  54. package/dist/chunk-FBKJ3652.cjs +5034 -0
  55. package/dist/chunk-FEYYOBBG.cjs +24 -0
  56. package/dist/chunk-FL5SZ2XU.js +181 -0
  57. package/dist/chunk-GH2KM66J.js +37 -0
  58. package/dist/chunk-GWFS5DAR.cjs +37 -0
  59. package/dist/chunk-HNEE7IF4.js +60 -0
  60. package/dist/chunk-HSVOEWAM.cjs +2 -0
  61. package/dist/chunk-J52TXHZV.cjs +73 -0
  62. package/dist/chunk-JNMQKJH4.js +10 -0
  63. package/dist/chunk-JQOMTERC.js +89 -0
  64. package/dist/chunk-LABUWBKX.cjs +71 -0
  65. package/dist/chunk-LXFZUKP3.cjs +34 -0
  66. package/dist/chunk-MP2QBLUJ.cjs +70 -0
  67. package/dist/chunk-MUNFCOMB.cjs +62 -0
  68. package/dist/chunk-NH5WJNNS.js +1 -0
  69. package/dist/chunk-P3Q73CAW.cjs +55 -0
  70. package/dist/chunk-PA2YIVIT.js +61 -0
  71. package/dist/chunk-QFTBW7ZZ.cjs +2 -0
  72. package/dist/chunk-QQ5K5TZE.cjs +619 -0
  73. package/dist/chunk-QUSEDVYI.cjs +991 -0
  74. package/dist/chunk-R4V75T7J.js +1 -0
  75. package/dist/chunk-SGGPJWRQ.js +69 -0
  76. package/dist/chunk-ST5RMVLG.cjs +87 -0
  77. package/dist/chunk-TC2SCOTE.js +605 -0
  78. package/dist/chunk-U64IJDC5.cjs +109 -0
  79. package/dist/chunk-UX3B6S2I.cjs +65 -0
  80. package/dist/chunk-V5D2LIF5.js +68 -0
  81. package/dist/chunk-VG4OPO2R.js +240 -0
  82. package/dist/chunk-WQNOATKB.js +103 -0
  83. package/dist/chunk-Y7D2JLKS.js +4 -0
  84. package/dist/chunk-YJEHXYK5.js +657 -0
  85. package/dist/chunk-YOQP5T77.js +32 -0
  86. package/dist/chunk-ZIZ3CVHN.cjs +120 -0
  87. package/dist/chunk-ZPOUMTTH.cjs +362 -0
  88. package/dist/chunk-ZZLBNB3U.cjs +12 -0
  89. package/dist/deployer/index.cjs +10 -0
  90. package/dist/deployer/index.d.cts +19 -0
  91. package/dist/deployer/index.d.ts +19 -0
  92. package/dist/deployer/index.js +1 -0
  93. package/dist/di/index.cjs +10 -0
  94. package/dist/di/index.d.cts +1 -0
  95. package/dist/di/index.d.ts +1 -0
  96. package/dist/di/index.js +1 -0
  97. package/dist/error/index.cjs +22 -0
  98. package/dist/error/index.d.cts +86 -0
  99. package/dist/error/index.d.ts +86 -0
  100. package/dist/error/index.js +1 -0
  101. package/dist/eval/index.cjs +14 -0
  102. package/dist/eval/index.d.cts +43 -0
  103. package/dist/eval/index.d.ts +43 -0
  104. package/dist/eval/index.js +1 -0
  105. package/dist/hooks/index.cjs +18 -0
  106. package/dist/hooks/index.d.cts +33 -0
  107. package/dist/hooks/index.d.ts +33 -0
  108. package/dist/hooks/index.js +1 -0
  109. package/dist/index.cjs +281 -0
  110. package/dist/index.d.cts +92 -0
  111. package/dist/index.d.ts +92 -0
  112. package/dist/index.js +112 -0
  113. package/dist/integration/index.cjs +14 -0
  114. package/dist/integration/index.d.cts +65 -0
  115. package/dist/integration/index.d.ts +65 -0
  116. package/dist/integration/index.js +1 -0
  117. package/dist/llm/index.cjs +10 -0
  118. package/dist/llm/index.d.cts +29 -0
  119. package/dist/llm/index.d.ts +29 -0
  120. package/dist/llm/index.js +1 -0
  121. package/dist/logger/index.cjs +43 -0
  122. package/dist/logger/index.d.cts +96 -0
  123. package/dist/logger/index.d.ts +96 -0
  124. package/dist/logger/index.js +2 -0
  125. package/dist/logger-B8XXh6ya.d.cts +159 -0
  126. package/dist/logger-Bpa2oLL4.d.ts +159 -0
  127. package/dist/mastra/index.cjs +10 -0
  128. package/dist/mastra/index.d.cts +29 -0
  129. package/dist/mastra/index.d.ts +29 -0
  130. package/dist/mastra/index.js +1 -0
  131. package/dist/mcp/index.cjs +106 -0
  132. package/dist/mcp/index.d.cts +29 -0
  133. package/dist/mcp/index.d.ts +29 -0
  134. package/dist/mcp/index.js +100 -0
  135. package/dist/memory/index.cjs +18 -0
  136. package/dist/memory/index.d.cts +29 -0
  137. package/dist/memory/index.d.ts +29 -0
  138. package/dist/memory/index.js +1 -0
  139. package/dist/network/index.cjs +311 -0
  140. package/dist/network/index.d.cts +29 -0
  141. package/dist/network/index.d.ts +29 -0
  142. package/dist/network/index.js +309 -0
  143. package/dist/network/vNext/index.cjs +873 -0
  144. package/dist/network/vNext/index.d.cts +29 -0
  145. package/dist/network/vNext/index.d.ts +29 -0
  146. package/dist/network/vNext/index.js +871 -0
  147. package/dist/relevance/index.cjs +18 -0
  148. package/dist/relevance/index.d.cts +49 -0
  149. package/dist/relevance/index.d.ts +49 -0
  150. package/dist/relevance/index.js +1 -0
  151. package/dist/runtime-context/index.cjs +10 -0
  152. package/dist/runtime-context/index.d.cts +52 -0
  153. package/dist/runtime-context/index.d.ts +52 -0
  154. package/dist/runtime-context/index.js +1 -0
  155. package/dist/server/index.cjs +62 -0
  156. package/dist/server/index.d.cts +52 -0
  157. package/dist/server/index.d.ts +52 -0
  158. package/dist/server/index.js +59 -0
  159. package/dist/storage/index.cjs +336 -0
  160. package/dist/storage/index.d.cts +149 -0
  161. package/dist/storage/index.d.ts +149 -0
  162. package/dist/storage/index.js +303 -0
  163. package/dist/telemetry/index.cjs +30 -0
  164. package/dist/telemetry/index.d.cts +75 -0
  165. package/dist/telemetry/index.d.ts +75 -0
  166. package/dist/telemetry/index.js +1 -0
  167. package/dist/telemetry/otel-vendor.cjs +103 -0
  168. package/dist/telemetry/otel-vendor.d.cts +20 -0
  169. package/dist/telemetry/otel-vendor.d.ts +20 -0
  170. package/dist/telemetry/otel-vendor.js +57 -0
  171. package/dist/tools/index.cjs +18 -0
  172. package/dist/tools/index.d.cts +41 -0
  173. package/dist/tools/index.d.ts +41 -0
  174. package/dist/tools/index.js +1 -0
  175. package/dist/tts/index.cjs +10 -0
  176. package/dist/tts/index.d.cts +28 -0
  177. package/dist/tts/index.d.ts +28 -0
  178. package/dist/tts/index.js +1 -0
  179. package/dist/types-Bo1uigWx.d.cts +17 -0
  180. package/dist/types-Bo1uigWx.d.ts +17 -0
  181. package/dist/utils.cjs +58 -0
  182. package/dist/utils.d.cts +149 -0
  183. package/dist/utils.d.ts +149 -0
  184. package/dist/utils.js +1 -0
  185. package/dist/vector/filter/index.cjs +192 -0
  186. package/dist/vector/filter/index.d.cts +128 -0
  187. package/dist/vector/filter/index.d.ts +128 -0
  188. package/dist/vector/filter/index.js +190 -0
  189. package/dist/vector/index.cjs +10 -0
  190. package/dist/vector/index.d.cts +77 -0
  191. package/dist/vector/index.d.ts +77 -0
  192. package/dist/vector/index.js +1 -0
  193. package/dist/voice/index.cjs +18 -0
  194. package/dist/voice/index.d.cts +29 -0
  195. package/dist/voice/index.d.ts +29 -0
  196. package/dist/voice/index.js +1 -0
  197. package/dist/workflows/constants.cjs +10 -0
  198. package/dist/workflows/constants.d.cts +3 -0
  199. package/dist/workflows/constants.d.ts +3 -0
  200. package/dist/workflows/constants.js +1 -0
  201. package/dist/workflows/index.cjs +42 -0
  202. package/dist/workflows/index.d.cts +282 -0
  203. package/dist/workflows/index.d.ts +282 -0
  204. package/dist/workflows/index.js +1 -0
  205. package/dist/workflows/legacy/index.cjs +90 -0
  206. package/dist/workflows/legacy/index.d.cts +91 -0
  207. package/dist/workflows/legacy/index.d.ts +91 -0
  208. package/dist/workflows/legacy/index.js +1 -0
  209. package/error.d.ts +1 -0
  210. package/eval.d.ts +1 -0
  211. package/hooks.d.ts +1 -0
  212. package/integration.d.ts +1 -0
  213. package/llm.d.ts +1 -0
  214. package/logger.d.ts +1 -0
  215. package/mastra.d.ts +1 -0
  216. package/mcp.d.ts +1 -0
  217. package/memory.d.ts +1 -0
  218. package/network/vNext.d.ts +1 -0
  219. package/network.d.ts +1 -0
  220. package/package.json +1 -1
  221. package/relevance.d.ts +1 -0
  222. package/runtime-context.d.ts +1 -0
  223. package/server.d.ts +1 -0
  224. package/storage.d.ts +1 -0
  225. package/telemetry/otel-vendor.d.ts +1 -0
  226. package/telemetry.d.ts +1 -0
  227. package/tools.d.ts +1 -0
  228. package/tts.d.ts +1 -0
  229. package/utils.d.ts +1 -0
  230. package/vector/filter.d.ts +1 -0
  231. package/vector.d.ts +1 -0
  232. package/voice.d.ts +1 -0
  233. package/workflows/_constants.d.ts +1 -0
  234. package/workflows/legacy.d.ts +1 -0
  235. package/workflows.d.ts +1 -0
@@ -0,0 +1,659 @@
1
+ 'use strict';
2
+
3
+ var chunk7F6BQXE2_cjs = require('./chunk-7F6BQXE2.cjs');
4
+ var chunkB6TOBUS6_cjs = require('./chunk-B6TOBUS6.cjs');
5
+ var chunkP3Q73CAW_cjs = require('./chunk-P3Q73CAW.cjs');
6
+ var chunkZIZ3CVHN_cjs = require('./chunk-ZIZ3CVHN.cjs');
7
+ var schemaCompat = require('@mastra/schema-compat');
8
+ var ai = require('ai');
9
+ var zod = require('zod');
10
+
11
+ // src/llm/model/base.ts
12
+ var MastraLLMBase = class extends chunkP3Q73CAW_cjs.MastraBase {
13
+ constructor({ name }) {
14
+ super({
15
+ component: chunkZIZ3CVHN_cjs.RegisteredLogger.LLM,
16
+ name
17
+ });
18
+ }
19
+ convertToMessages(messages) {
20
+ if (Array.isArray(messages)) {
21
+ return messages.map((m) => {
22
+ if (typeof m === "string") {
23
+ return {
24
+ role: "user",
25
+ content: m
26
+ };
27
+ }
28
+ return m;
29
+ });
30
+ }
31
+ return [
32
+ {
33
+ role: "user",
34
+ content: messages
35
+ }
36
+ ];
37
+ }
38
+ __registerPrimitives(p) {
39
+ if (p.telemetry) {
40
+ this.__setTelemetry(p.telemetry);
41
+ }
42
+ if (p.logger) {
43
+ this.__setLogger(p.logger);
44
+ }
45
+ }
46
+ };
47
+
48
+ // src/llm/model/model.ts
49
+ var MastraLLM = class extends MastraLLMBase {
50
+ #model;
51
+ #mastra;
52
+ constructor({ model, mastra }) {
53
+ super({ name: "aisdk" });
54
+ this.#model = model;
55
+ if (mastra) {
56
+ this.#mastra = mastra;
57
+ if (mastra.getLogger()) {
58
+ this.__setLogger(this.#mastra.getLogger());
59
+ }
60
+ }
61
+ }
62
+ __registerPrimitives(p) {
63
+ if (p.telemetry) {
64
+ this.__setTelemetry(p.telemetry);
65
+ }
66
+ if (p.logger) {
67
+ this.__setLogger(p.logger);
68
+ }
69
+ }
70
+ __registerMastra(p) {
71
+ this.#mastra = p;
72
+ }
73
+ getProvider() {
74
+ return this.#model.provider;
75
+ }
76
+ getModelId() {
77
+ return this.#model.modelId;
78
+ }
79
+ getModel() {
80
+ return this.#model;
81
+ }
82
+ _applySchemaCompat(schema) {
83
+ const model = this.#model;
84
+ const schemaCompatLayers = [];
85
+ if (model) {
86
+ schemaCompatLayers.push(
87
+ new schemaCompat.OpenAIReasoningSchemaCompatLayer(model),
88
+ new schemaCompat.OpenAISchemaCompatLayer(model),
89
+ new schemaCompat.GoogleSchemaCompatLayer(model),
90
+ new schemaCompat.AnthropicSchemaCompatLayer(model),
91
+ new schemaCompat.DeepSeekSchemaCompatLayer(model),
92
+ new schemaCompat.MetaSchemaCompatLayer(model)
93
+ );
94
+ }
95
+ return schemaCompat.applyCompatLayer({
96
+ schema,
97
+ compatLayers: schemaCompatLayers,
98
+ mode: "aiSdkSchema"
99
+ });
100
+ }
101
+ async __text({
102
+ runId,
103
+ messages,
104
+ maxSteps = 5,
105
+ tools = {},
106
+ temperature,
107
+ toolChoice = "auto",
108
+ onStepFinish,
109
+ experimental_output,
110
+ telemetry,
111
+ threadId,
112
+ resourceId,
113
+ memory,
114
+ runtimeContext,
115
+ ...rest
116
+ }) {
117
+ const model = this.#model;
118
+ this.logger.debug(`[LLM] - Generating text`, {
119
+ runId,
120
+ messages,
121
+ maxSteps,
122
+ threadId,
123
+ resourceId,
124
+ tools: Object.keys(tools)
125
+ });
126
+ const argsForExecute = {
127
+ model,
128
+ temperature,
129
+ tools: {
130
+ ...tools
131
+ },
132
+ toolChoice,
133
+ maxSteps,
134
+ onStepFinish: async (props) => {
135
+ try {
136
+ await onStepFinish?.(props);
137
+ } catch (e) {
138
+ const mastraError = new chunkB6TOBUS6_cjs.MastraError(
139
+ {
140
+ id: "LLM_TEXT_ON_STEP_FINISH_CALLBACK_EXECUTION_FAILED",
141
+ domain: "LLM" /* LLM */,
142
+ category: "USER" /* USER */,
143
+ details: {
144
+ modelId: model.modelId,
145
+ modelProvider: model.provider,
146
+ runId: runId ?? "unknown",
147
+ threadId: threadId ?? "unknown",
148
+ resourceId: resourceId ?? "unknown",
149
+ finishReason: props?.finishReason,
150
+ toolCalls: props?.toolCalls ? JSON.stringify(props.toolCalls) : "",
151
+ toolResults: props?.toolResults ? JSON.stringify(props.toolResults) : "",
152
+ usage: props?.usage ? JSON.stringify(props.usage) : ""
153
+ }
154
+ },
155
+ e
156
+ );
157
+ throw mastraError;
158
+ }
159
+ this.logger.debug("[LLM] - Step Change:", {
160
+ text: props?.text,
161
+ toolCalls: props?.toolCalls,
162
+ toolResults: props?.toolResults,
163
+ finishReason: props?.finishReason,
164
+ usage: props?.usage,
165
+ runId
166
+ });
167
+ if (props?.response?.headers?.["x-ratelimit-remaining-tokens"] && parseInt(props?.response?.headers?.["x-ratelimit-remaining-tokens"], 10) < 2e3) {
168
+ this.logger.warn("Rate limit approaching, waiting 10 seconds", { runId });
169
+ await chunk7F6BQXE2_cjs.delay(10 * 1e3);
170
+ }
171
+ },
172
+ ...rest
173
+ };
174
+ let schema;
175
+ if (experimental_output) {
176
+ this.logger.debug("[LLM] - Using experimental output", {
177
+ runId
178
+ });
179
+ if (typeof experimental_output.parse === "function") {
180
+ schema = experimental_output;
181
+ if (schema instanceof zod.z.ZodArray) {
182
+ schema = schema._def.type;
183
+ }
184
+ } else {
185
+ schema = ai.jsonSchema(experimental_output);
186
+ }
187
+ }
188
+ try {
189
+ return await ai.generateText({
190
+ messages,
191
+ ...argsForExecute,
192
+ experimental_telemetry: {
193
+ ...this.experimental_telemetry,
194
+ ...telemetry
195
+ },
196
+ experimental_output: schema ? ai.Output.object({
197
+ schema
198
+ }) : void 0
199
+ });
200
+ } catch (e) {
201
+ const mastraError = new chunkB6TOBUS6_cjs.MastraError(
202
+ {
203
+ id: "LLM_GENERATE_TEXT_AI_SDK_EXECUTION_FAILED",
204
+ domain: "LLM" /* LLM */,
205
+ category: "THIRD_PARTY" /* THIRD_PARTY */,
206
+ details: {
207
+ modelId: model.modelId,
208
+ modelProvider: model.provider,
209
+ runId: runId ?? "unknown",
210
+ threadId: threadId ?? "unknown",
211
+ resourceId: resourceId ?? "unknown"
212
+ }
213
+ },
214
+ e
215
+ );
216
+ throw mastraError;
217
+ }
218
+ }
219
+ async __textObject({
220
+ messages,
221
+ onStepFinish,
222
+ maxSteps = 5,
223
+ tools = {},
224
+ structuredOutput,
225
+ runId,
226
+ temperature,
227
+ toolChoice = "auto",
228
+ telemetry,
229
+ threadId,
230
+ resourceId,
231
+ memory,
232
+ runtimeContext,
233
+ ...rest
234
+ }) {
235
+ const model = this.#model;
236
+ this.logger.debug(`[LLM] - Generating a text object`, { runId });
237
+ const argsForExecute = {
238
+ model,
239
+ temperature,
240
+ tools: {
241
+ ...tools
242
+ },
243
+ maxSteps,
244
+ toolChoice,
245
+ onStepFinish: async (props) => {
246
+ try {
247
+ await onStepFinish?.(props);
248
+ } catch (e) {
249
+ const mastraError = new chunkB6TOBUS6_cjs.MastraError(
250
+ {
251
+ id: "LLM_TEXT_OBJECT_ON_STEP_FINISH_CALLBACK_EXECUTION_FAILED",
252
+ domain: "LLM" /* LLM */,
253
+ category: "USER" /* USER */,
254
+ details: {
255
+ runId: runId ?? "unknown",
256
+ threadId: threadId ?? "unknown",
257
+ resourceId: resourceId ?? "unknown",
258
+ finishReason: props?.finishReason,
259
+ toolCalls: props?.toolCalls ? JSON.stringify(props.toolCalls) : "",
260
+ toolResults: props?.toolResults ? JSON.stringify(props.toolResults) : "",
261
+ usage: props?.usage ? JSON.stringify(props.usage) : ""
262
+ }
263
+ },
264
+ e
265
+ );
266
+ throw mastraError;
267
+ }
268
+ this.logger.debug("[LLM] - Step Change:", {
269
+ text: props?.text,
270
+ toolCalls: props?.toolCalls,
271
+ toolResults: props?.toolResults,
272
+ finishReason: props?.finishReason,
273
+ usage: props?.usage,
274
+ runId
275
+ });
276
+ if (props?.response?.headers?.["x-ratelimit-remaining-tokens"] && parseInt(props?.response?.headers?.["x-ratelimit-remaining-tokens"], 10) < 2e3) {
277
+ this.logger.warn("Rate limit approaching, waiting 10 seconds", { runId });
278
+ await chunk7F6BQXE2_cjs.delay(10 * 1e3);
279
+ }
280
+ },
281
+ ...rest
282
+ };
283
+ let output = "object";
284
+ if (structuredOutput instanceof zod.z.ZodArray) {
285
+ output = "array";
286
+ structuredOutput = structuredOutput._def.type;
287
+ }
288
+ try {
289
+ const processedSchema = this._applySchemaCompat(structuredOutput);
290
+ return await ai.generateObject({
291
+ messages,
292
+ ...argsForExecute,
293
+ output,
294
+ schema: processedSchema,
295
+ experimental_telemetry: {
296
+ ...this.experimental_telemetry,
297
+ ...telemetry
298
+ }
299
+ });
300
+ } catch (e) {
301
+ const mastraError = new chunkB6TOBUS6_cjs.MastraError(
302
+ {
303
+ id: "LLM_GENERATE_OBJECT_AI_SDK_EXECUTION_FAILED",
304
+ domain: "LLM" /* LLM */,
305
+ category: "THIRD_PARTY" /* THIRD_PARTY */,
306
+ details: {
307
+ modelId: model.modelId,
308
+ modelProvider: model.provider,
309
+ runId: runId ?? "unknown",
310
+ threadId: threadId ?? "unknown",
311
+ resourceId: resourceId ?? "unknown"
312
+ }
313
+ },
314
+ e
315
+ );
316
+ throw mastraError;
317
+ }
318
+ }
319
+ __stream({
320
+ messages,
321
+ onStepFinish,
322
+ onFinish,
323
+ maxSteps = 5,
324
+ tools = {},
325
+ runId,
326
+ temperature,
327
+ toolChoice = "auto",
328
+ experimental_output,
329
+ telemetry,
330
+ threadId,
331
+ resourceId,
332
+ memory,
333
+ runtimeContext,
334
+ ...rest
335
+ }) {
336
+ const model = this.#model;
337
+ this.logger.debug(`[LLM] - Streaming text`, {
338
+ runId,
339
+ threadId,
340
+ resourceId,
341
+ messages,
342
+ maxSteps,
343
+ tools: Object.keys(tools || {})
344
+ });
345
+ const argsForExecute = {
346
+ model,
347
+ temperature,
348
+ tools: {
349
+ ...tools
350
+ },
351
+ maxSteps,
352
+ toolChoice,
353
+ onStepFinish: async (props) => {
354
+ try {
355
+ await onStepFinish?.(props);
356
+ } catch (e) {
357
+ const mastraError = new chunkB6TOBUS6_cjs.MastraError(
358
+ {
359
+ id: "LLM_STREAM_ON_STEP_FINISH_CALLBACK_EXECUTION_FAILED",
360
+ domain: "LLM" /* LLM */,
361
+ category: "USER" /* USER */,
362
+ details: {
363
+ modelId: model.modelId,
364
+ modelProvider: model.provider,
365
+ runId: runId ?? "unknown",
366
+ threadId: threadId ?? "unknown",
367
+ resourceId: resourceId ?? "unknown",
368
+ finishReason: props?.finishReason,
369
+ toolCalls: props?.toolCalls ? JSON.stringify(props.toolCalls) : "",
370
+ toolResults: props?.toolResults ? JSON.stringify(props.toolResults) : "",
371
+ usage: props?.usage ? JSON.stringify(props.usage) : ""
372
+ }
373
+ },
374
+ e
375
+ );
376
+ this.logger.trackException(mastraError);
377
+ throw mastraError;
378
+ }
379
+ this.logger.debug("[LLM] - Stream Step Change:", {
380
+ text: props?.text,
381
+ toolCalls: props?.toolCalls,
382
+ toolResults: props?.toolResults,
383
+ finishReason: props?.finishReason,
384
+ usage: props?.usage,
385
+ runId
386
+ });
387
+ if (props?.response?.headers?.["x-ratelimit-remaining-tokens"] && parseInt(props?.response?.headers?.["x-ratelimit-remaining-tokens"], 10) < 2e3) {
388
+ this.logger.warn("Rate limit approaching, waiting 10 seconds", { runId });
389
+ await chunk7F6BQXE2_cjs.delay(10 * 1e3);
390
+ }
391
+ },
392
+ onFinish: async (props) => {
393
+ try {
394
+ await onFinish?.(props);
395
+ } catch (e) {
396
+ const mastraError = new chunkB6TOBUS6_cjs.MastraError(
397
+ {
398
+ id: "LLM_STREAM_ON_FINISH_CALLBACK_EXECUTION_FAILED",
399
+ domain: "LLM" /* LLM */,
400
+ category: "USER" /* USER */,
401
+ details: {
402
+ modelId: model.modelId,
403
+ modelProvider: model.provider,
404
+ runId: runId ?? "unknown",
405
+ threadId: threadId ?? "unknown",
406
+ resourceId: resourceId ?? "unknown",
407
+ finishReason: props?.finishReason,
408
+ toolCalls: props?.toolCalls ? JSON.stringify(props.toolCalls) : "",
409
+ toolResults: props?.toolResults ? JSON.stringify(props.toolResults) : "",
410
+ usage: props?.usage ? JSON.stringify(props.usage) : ""
411
+ }
412
+ },
413
+ e
414
+ );
415
+ this.logger.trackException(mastraError);
416
+ throw mastraError;
417
+ }
418
+ this.logger.debug("[LLM] - Stream Finished:", {
419
+ text: props?.text,
420
+ toolCalls: props?.toolCalls,
421
+ toolResults: props?.toolResults,
422
+ finishReason: props?.finishReason,
423
+ usage: props?.usage,
424
+ runId,
425
+ threadId,
426
+ resourceId
427
+ });
428
+ },
429
+ ...rest
430
+ };
431
+ let schema;
432
+ if (experimental_output) {
433
+ this.logger.debug("[LLM] - Using experimental output", {
434
+ runId
435
+ });
436
+ if (typeof experimental_output.parse === "function") {
437
+ schema = experimental_output;
438
+ if (schema instanceof zod.z.ZodArray) {
439
+ schema = schema._def.type;
440
+ }
441
+ } else {
442
+ schema = ai.jsonSchema(experimental_output);
443
+ }
444
+ }
445
+ try {
446
+ return ai.streamText({
447
+ messages,
448
+ ...argsForExecute,
449
+ experimental_telemetry: {
450
+ ...this.experimental_telemetry,
451
+ ...telemetry
452
+ },
453
+ experimental_output: schema ? ai.Output.object({
454
+ schema
455
+ }) : void 0
456
+ });
457
+ } catch (e) {
458
+ const mastraError = new chunkB6TOBUS6_cjs.MastraError(
459
+ {
460
+ id: "LLM_STREAM_TEXT_AI_SDK_EXECUTION_FAILED",
461
+ domain: "LLM" /* LLM */,
462
+ category: "THIRD_PARTY" /* THIRD_PARTY */,
463
+ details: {
464
+ modelId: model.modelId,
465
+ modelProvider: model.provider,
466
+ runId: runId ?? "unknown",
467
+ threadId: threadId ?? "unknown",
468
+ resourceId: resourceId ?? "unknown"
469
+ }
470
+ },
471
+ e
472
+ );
473
+ throw mastraError;
474
+ }
475
+ }
476
+ __streamObject({
477
+ messages,
478
+ runId,
479
+ tools = {},
480
+ maxSteps = 5,
481
+ toolChoice = "auto",
482
+ runtimeContext,
483
+ threadId,
484
+ resourceId,
485
+ memory,
486
+ temperature,
487
+ onStepFinish,
488
+ onFinish,
489
+ structuredOutput,
490
+ telemetry,
491
+ ...rest
492
+ }) {
493
+ const model = this.#model;
494
+ this.logger.debug(`[LLM] - Streaming structured output`, {
495
+ runId,
496
+ messages,
497
+ maxSteps,
498
+ tools: Object.keys(tools || {})
499
+ });
500
+ const finalTools = tools;
501
+ const argsForExecute = {
502
+ model,
503
+ temperature,
504
+ tools: {
505
+ ...finalTools
506
+ },
507
+ maxSteps,
508
+ toolChoice,
509
+ onStepFinish: async (props) => {
510
+ try {
511
+ await onStepFinish?.(props);
512
+ } catch (e) {
513
+ const mastraError = new chunkB6TOBUS6_cjs.MastraError(
514
+ {
515
+ id: "LLM_STREAM_OBJECT_ON_STEP_FINISH_CALLBACK_EXECUTION_FAILED",
516
+ domain: "LLM" /* LLM */,
517
+ category: "USER" /* USER */,
518
+ details: {
519
+ modelId: model.modelId,
520
+ modelProvider: model.provider,
521
+ runId: runId ?? "unknown",
522
+ threadId: threadId ?? "unknown",
523
+ resourceId: resourceId ?? "unknown",
524
+ usage: props?.usage ? JSON.stringify(props.usage) : "",
525
+ toolCalls: props?.toolCalls ? JSON.stringify(props.toolCalls) : "",
526
+ toolResults: props?.toolResults ? JSON.stringify(props.toolResults) : "",
527
+ finishReason: props?.finishReason
528
+ }
529
+ },
530
+ e
531
+ );
532
+ this.logger.trackException(mastraError);
533
+ throw mastraError;
534
+ }
535
+ this.logger.debug("[LLM] - Stream Step Change:", {
536
+ text: props?.text,
537
+ toolCalls: props?.toolCalls,
538
+ toolResults: props?.toolResults,
539
+ finishReason: props?.finishReason,
540
+ usage: props?.usage,
541
+ runId,
542
+ threadId,
543
+ resourceId
544
+ });
545
+ if (props?.response?.headers?.["x-ratelimit-remaining-tokens"] && parseInt(props?.response?.headers?.["x-ratelimit-remaining-tokens"], 10) < 2e3) {
546
+ this.logger.warn("Rate limit approaching, waiting 10 seconds", { runId });
547
+ await chunk7F6BQXE2_cjs.delay(10 * 1e3);
548
+ }
549
+ },
550
+ onFinish: async (props) => {
551
+ try {
552
+ await onFinish?.(props);
553
+ } catch (e) {
554
+ const mastraError = new chunkB6TOBUS6_cjs.MastraError(
555
+ {
556
+ id: "LLM_STREAM_OBJECT_ON_FINISH_CALLBACK_EXECUTION_FAILED",
557
+ domain: "LLM" /* LLM */,
558
+ category: "USER" /* USER */,
559
+ details: {
560
+ modelId: model.modelId,
561
+ modelProvider: model.provider,
562
+ runId: runId ?? "unknown",
563
+ threadId: threadId ?? "unknown",
564
+ resourceId: resourceId ?? "unknown",
565
+ toolCalls: props?.toolCalls ? JSON.stringify(props.toolCalls) : "",
566
+ toolResults: props?.toolResults ? JSON.stringify(props.toolResults) : "",
567
+ finishReason: props?.finishReason,
568
+ usage: props?.usage ? JSON.stringify(props.usage) : ""
569
+ }
570
+ },
571
+ e
572
+ );
573
+ this.logger.trackException(mastraError);
574
+ throw mastraError;
575
+ }
576
+ this.logger.debug("[LLM] - Stream Finished:", {
577
+ text: props?.text,
578
+ toolCalls: props?.toolCalls,
579
+ toolResults: props?.toolResults,
580
+ finishReason: props?.finishReason,
581
+ usage: props?.usage,
582
+ runId,
583
+ threadId,
584
+ resourceId
585
+ });
586
+ },
587
+ ...rest
588
+ };
589
+ let output = "object";
590
+ if (structuredOutput instanceof zod.z.ZodArray) {
591
+ output = "array";
592
+ structuredOutput = structuredOutput._def.type;
593
+ }
594
+ try {
595
+ const processedSchema = this._applySchemaCompat(structuredOutput);
596
+ return ai.streamObject({
597
+ messages,
598
+ ...argsForExecute,
599
+ output,
600
+ schema: processedSchema,
601
+ experimental_telemetry: {
602
+ ...this.experimental_telemetry,
603
+ ...telemetry
604
+ }
605
+ });
606
+ } catch (e) {
607
+ const mastraError = new chunkB6TOBUS6_cjs.MastraError(
608
+ {
609
+ id: "LLM_STREAM_OBJECT_AI_SDK_EXECUTION_FAILED",
610
+ domain: "LLM" /* LLM */,
611
+ category: "THIRD_PARTY" /* THIRD_PARTY */,
612
+ details: {
613
+ modelId: model.modelId,
614
+ modelProvider: model.provider,
615
+ runId: runId ?? "unknown",
616
+ threadId: threadId ?? "unknown",
617
+ resourceId: resourceId ?? "unknown"
618
+ }
619
+ },
620
+ e
621
+ );
622
+ throw mastraError;
623
+ }
624
+ }
625
+ async generate(messages, { maxSteps = 5, output, ...rest }) {
626
+ const msgs = this.convertToMessages(messages);
627
+ if (!output) {
628
+ return await this.__text({
629
+ messages: msgs,
630
+ maxSteps,
631
+ ...rest
632
+ });
633
+ }
634
+ return await this.__textObject({
635
+ messages: msgs,
636
+ structuredOutput: output,
637
+ maxSteps,
638
+ ...rest
639
+ });
640
+ }
641
+ stream(messages, { maxSteps = 5, output, ...rest }) {
642
+ const msgs = this.convertToMessages(messages);
643
+ if (!output) {
644
+ return this.__stream({
645
+ messages: msgs,
646
+ maxSteps,
647
+ ...rest
648
+ });
649
+ }
650
+ return this.__streamObject({
651
+ messages: msgs,
652
+ structuredOutput: output,
653
+ maxSteps,
654
+ ...rest
655
+ });
656
+ }
657
+ };
658
+
659
+ exports.MastraLLM = MastraLLM;