@mastra/core 0.4.2-alpha.0 → 0.4.2-alpha.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/dist/agent/index.d.ts +4 -4
  2. package/dist/agent/index.js +1 -1
  3. package/dist/{base-eWkcLLSb.d.ts → base-D90KQ4XI.d.ts} +1 -1
  4. package/dist/{base-BdNHFjqH.d.ts → base-hk-xmLC1.d.ts} +30 -23
  5. package/dist/base.d.ts +3 -3
  6. package/dist/base.js +1 -1
  7. package/dist/bundler/index.d.ts +3 -3
  8. package/dist/bundler/index.js +1 -1
  9. package/dist/{chunk-LKOVXFLE.js → chunk-33GSTUNK.js} +4 -4
  10. package/dist/{chunk-Z735LDV7.js → chunk-4YRYBCOZ.js} +1 -1
  11. package/dist/chunk-55NFNRKO.js +10 -0
  12. package/dist/chunk-5XPCMNGW.js +215 -0
  13. package/dist/{chunk-ORM3OCHX.js → chunk-7TG2Y45H.js} +118 -17
  14. package/dist/{chunk-HPIB5X7E.js → chunk-KM7PRKJ2.js} +5 -5
  15. package/dist/{chunk-XD7K4XPP.js → chunk-M626YCHO.js} +106 -6
  16. package/dist/{chunk-6RDA4JYW.js → chunk-OZ4XVJ6F.js} +1 -1
  17. package/dist/{chunk-KP5SHTKA.js → chunk-PHMSPCTC.js} +2 -2
  18. package/dist/{chunk-N7KZVQDK.js → chunk-PL6URKH2.js} +1 -1
  19. package/dist/{chunk-NDQR6IPC.js → chunk-QAAJAHDB.js} +2 -2
  20. package/dist/{chunk-YUOADWSP.js → chunk-SVEAENO7.js} +2 -2
  21. package/dist/{chunk-4ZRHVG25.js → chunk-W5HVJX45.js} +5 -5
  22. package/dist/{chunk-TCMOYQ74.js → chunk-XVS6QFTX.js} +20 -4
  23. package/dist/{chunk-JIV6PDIN.js → chunk-YLCBP7MP.js} +7 -217
  24. package/dist/deployer/index.d.ts +3 -3
  25. package/dist/deployer/index.js +1 -1
  26. package/dist/eval/index.d.ts +4 -4
  27. package/dist/{index-C5uPdbs4.d.ts → index-mKY1XrpK.d.ts} +2 -2
  28. package/dist/index.d.ts +11 -19
  29. package/dist/index.js +15 -26
  30. package/dist/integration/index.d.ts +5 -5
  31. package/dist/llm/index.d.ts +4 -4
  32. package/dist/logger/index.d.ts +2 -2
  33. package/dist/logger/index.js +1 -1
  34. package/dist/mastra/index.d.ts +5 -5
  35. package/dist/mastra/index.js +1 -1
  36. package/dist/memory/index.d.ts +4 -4
  37. package/dist/memory/index.js +1 -1
  38. package/dist/relevance/index.js +1 -1
  39. package/dist/storage/index.d.ts +7 -76
  40. package/dist/storage/index.js +1 -2
  41. package/dist/storage/libsql/index.d.ts +81 -0
  42. package/dist/storage/libsql/index.js +1 -0
  43. package/dist/telemetry/index.d.ts +4 -4
  44. package/dist/telemetry/index.js +1 -1
  45. package/dist/tools/index.d.ts +6 -6
  46. package/dist/tts/index.d.ts +3 -3
  47. package/dist/tts/index.js +1 -1
  48. package/dist/vector/index.d.ts +4 -7
  49. package/dist/vector/index.js +1 -1
  50. package/dist/vector/libsql/index.d.ts +5 -6
  51. package/dist/vector/libsql/index.js +1 -1
  52. package/dist/voice/index.d.ts +3 -3
  53. package/dist/voice/index.js +2 -2
  54. package/dist/{workflow-CbKcJz7a.d.ts → workflow-BxyFP9Wv.d.ts} +3 -3
  55. package/dist/workflows/index.d.ts +6 -6
  56. package/dist/workflows/index.js +1 -1
  57. package/package.json +9 -12
  58. package/dist/chunk-7NPRQT5A.js +0 -111
@@ -1,12 +1,12 @@
1
- import { InstrumentClass } from './chunk-4ZRHVG25.js';
1
+ import { InstrumentClass } from './chunk-W5HVJX45.js';
2
2
  import { delay } from './chunk-WIBGG4X6.js';
3
- import { MastraBase } from './chunk-6RDA4JYW.js';
4
- import { RegisteredLogger } from './chunk-KP5SHTKA.js';
3
+ import { MastraBase } from './chunk-OZ4XVJ6F.js';
4
+ import { RegisteredLogger } from './chunk-PHMSPCTC.js';
5
5
  import { executeHook } from './chunk-BB4KXGBU.js';
6
6
  import { __decoratorStart, __decorateElement, __runInitializers } from './chunk-C6A6W6XS.js';
7
7
  import { randomUUID } from 'crypto';
8
8
  import { z } from 'zod';
9
- import { generateText, jsonSchema, generateObject, streamText, streamObject } from 'ai';
9
+ import { jsonSchema, generateText, Output, generateObject, streamText, streamObject } from 'ai';
10
10
 
11
11
  // src/llm/model/base.ts
12
12
  var MastraLLMBase = class extends MastraBase {
@@ -152,11 +152,11 @@ var MastraLLM = class extends MastraLLMBase {
152
152
  tool: k,
153
153
  props
154
154
  });
155
- return tool.execute({
155
+ return tool?.execute?.({
156
156
  context: props,
157
157
  mastra: this.#mastra,
158
158
  runId
159
- });
159
+ }) ?? void 0;
160
160
  } catch (error) {
161
161
  this.logger.error("Error executing tool", {
162
162
  tool: k,
@@ -181,7 +181,8 @@ var MastraLLM = class extends MastraLLMBase {
181
181
  convertedTools,
182
182
  temperature,
183
183
  toolChoice = "auto",
184
- onStepFinish
184
+ onStepFinish,
185
+ experimental_output
185
186
  }) {
186
187
  const model = this.#model;
187
188
  this.logger.debug(`[LLM] - Generating text`, {
@@ -213,15 +214,34 @@ var MastraLLM = class extends MastraLLMBase {
213
214
  runId
214
215
  });
215
216
  if (props?.response?.headers?.["x-ratelimit-remaining-tokens"] && parseInt(props?.response?.headers?.["x-ratelimit-remaining-tokens"], 10) < 2e3) {
216
- this.logger.warn("Rate limit approaching, waiting 10 seconds");
217
+ this.logger.warn("Rate limit approaching, waiting 10 seconds", {
218
+ runId
219
+ });
217
220
  await delay(10 * 1e3);
218
221
  }
219
222
  }
220
223
  };
224
+ let schema;
225
+ if (experimental_output) {
226
+ this.logger.debug("[LLM] - Using experimental output", {
227
+ runId
228
+ });
229
+ if (typeof experimental_output.parse === "function") {
230
+ schema = experimental_output;
231
+ if (schema instanceof z.ZodArray) {
232
+ schema = schema._def.type;
233
+ }
234
+ } else {
235
+ schema = jsonSchema(experimental_output);
236
+ }
237
+ }
221
238
  return await generateText({
222
239
  messages,
223
240
  ...argsForExecute,
224
- experimental_telemetry: this.experimental_telemetry
241
+ experimental_telemetry: this.experimental_telemetry,
242
+ experimental_output: schema ? Output.object({
243
+ schema
244
+ }) : void 0
225
245
  });
226
246
  }
227
247
  async __textObject({
@@ -297,7 +317,8 @@ var MastraLLM = class extends MastraLLMBase {
297
317
  convertedTools,
298
318
  runId,
299
319
  temperature,
300
- toolChoice = "auto"
320
+ toolChoice = "auto",
321
+ experimental_output
301
322
  }) {
302
323
  const model = this.#model;
303
324
  this.logger.debug(`[LLM] - Streaming text`, {
@@ -347,10 +368,27 @@ var MastraLLM = class extends MastraLLMBase {
347
368
  });
348
369
  }
349
370
  };
371
+ let schema;
372
+ if (experimental_output) {
373
+ this.logger.debug("[LLM] - Using experimental output", {
374
+ runId
375
+ });
376
+ if (typeof experimental_output.parse === "function") {
377
+ schema = experimental_output;
378
+ if (schema instanceof z.ZodArray) {
379
+ schema = schema._def.type;
380
+ }
381
+ } else {
382
+ schema = jsonSchema(experimental_output);
383
+ }
384
+ }
350
385
  return await streamText({
351
386
  messages,
352
387
  ...argsForExecute,
353
- experimental_telemetry: this.experimental_telemetry
388
+ experimental_telemetry: this.experimental_telemetry,
389
+ experimental_output: schema ? Output.object({
390
+ schema
391
+ }) : void 0
354
392
  });
355
393
  }
356
394
  async __streamObject({
@@ -525,7 +563,7 @@ var Agent = class extends (_a = MastraBase) {
525
563
  this.name = config.name;
526
564
  this.instructions = config.instructions;
527
565
  if (!config.model) {
528
- throw new Error(`LanugageModel is required to create an Agent. Please provider the 'model'.`);
566
+ throw new Error(`LanguageModel is required to create an Agent. Please provide the 'model'.`);
529
567
  }
530
568
  this.llm = new MastraLLM({
531
569
  model: config.model
@@ -880,11 +918,11 @@ var Agent = class extends (_a = MastraBase) {
880
918
  args,
881
919
  runId
882
920
  });
883
- return tool.execute({
921
+ return tool?.execute?.({
884
922
  context: args,
885
923
  mastra: this.#mastra,
886
924
  runId
887
- });
925
+ }) ?? void 0;
888
926
  } catch (err) {
889
927
  this.logger.error(`[Agent:${this.name}] - Failed execution`, {
890
928
  error: err,
@@ -1104,7 +1142,8 @@ var Agent = class extends (_a = MastraBase) {
1104
1142
  toolsets,
1105
1143
  output = "text",
1106
1144
  temperature,
1107
- toolChoice = "auto"
1145
+ toolChoice = "auto",
1146
+ experimental_output
1108
1147
  } = {}) {
1109
1148
  let messagesToUse = [];
1110
1149
  if (typeof messages === `string`) {
@@ -1141,6 +1180,30 @@ var Agent = class extends (_a = MastraBase) {
1141
1180
  messageObjects,
1142
1181
  convertedTools
1143
1182
  } = await before();
1183
+ if (output === "text" && experimental_output) {
1184
+ const result2 = await this.llm.__text({
1185
+ messages: messageObjects,
1186
+ tools: this.tools,
1187
+ convertedTools,
1188
+ onStepFinish,
1189
+ maxSteps,
1190
+ runId: runIdToUse,
1191
+ temperature,
1192
+ toolChoice,
1193
+ experimental_output
1194
+ });
1195
+ const outputText2 = result2.text;
1196
+ await after({
1197
+ result: result2,
1198
+ threadId,
1199
+ memoryConfig: memoryOptions,
1200
+ outputText: outputText2,
1201
+ runId: runIdToUse
1202
+ });
1203
+ const newResult = result2;
1204
+ newResult.object = result2.experimental_output;
1205
+ return newResult;
1206
+ }
1144
1207
  if (output === "text") {
1145
1208
  const result2 = await this.llm.__text({
1146
1209
  messages: messageObjects,
@@ -1195,7 +1258,8 @@ var Agent = class extends (_a = MastraBase) {
1195
1258
  toolsets,
1196
1259
  output = "text",
1197
1260
  temperature,
1198
- toolChoice = "auto"
1261
+ toolChoice = "auto",
1262
+ experimental_output
1199
1263
  } = {}) {
1200
1264
  const runIdToUse = runId || randomUUID();
1201
1265
  let messagesToUse = [];
@@ -1232,7 +1296,44 @@ var Agent = class extends (_a = MastraBase) {
1232
1296
  messageObjects,
1233
1297
  convertedTools
1234
1298
  } = await before();
1235
- if (output === "text") {
1299
+ if (output === "text" && experimental_output) {
1300
+ this.logger.debug(`Starting agent ${this.name} llm stream call`, {
1301
+ runId
1302
+ });
1303
+ const streamResult = await this.llm.__stream({
1304
+ messages: messageObjects,
1305
+ temperature,
1306
+ tools: this.tools,
1307
+ convertedTools,
1308
+ onStepFinish,
1309
+ onFinish: async result => {
1310
+ try {
1311
+ const res = JSON.parse(result) || {};
1312
+ const outputText = res.text;
1313
+ await after({
1314
+ result: res,
1315
+ threadId,
1316
+ memoryConfig: memoryOptions,
1317
+ outputText,
1318
+ runId: runIdToUse
1319
+ });
1320
+ } catch (e) {
1321
+ this.logger.error("Error saving memory on finish", {
1322
+ error: e,
1323
+ runId
1324
+ });
1325
+ }
1326
+ onFinish?.(result);
1327
+ },
1328
+ maxSteps,
1329
+ runId: runIdToUse,
1330
+ toolChoice,
1331
+ experimental_output
1332
+ });
1333
+ const newStreamResult = streamResult;
1334
+ newStreamResult.partialObjectStream = streamResult.experimental_partialOutputStream;
1335
+ return newStreamResult;
1336
+ } else if (output === "text") {
1236
1337
  this.logger.debug(`Starting agent ${this.name} llm stream call`, {
1237
1338
  runId
1238
1339
  });
@@ -1,6 +1,6 @@
1
- import { DefaultStorage } from './chunk-JIV6PDIN.js';
2
- import { InstrumentClass, OTLPTraceExporter, Telemetry } from './chunk-4ZRHVG25.js';
3
- import { createLogger, LogLevel, noopLogger } from './chunk-KP5SHTKA.js';
1
+ import { LibSQLStore } from './chunk-YLCBP7MP.js';
2
+ import { InstrumentClass, OTLPTraceExporter, Telemetry } from './chunk-W5HVJX45.js';
3
+ import { createLogger, LogLevel, noopLogger } from './chunk-PHMSPCTC.js';
4
4
  import { __decoratorStart, __decorateElement, __runInitializers } from './chunk-C6A6W6XS.js';
5
5
 
6
6
  // src/mastra/index.ts
@@ -37,13 +37,13 @@ var Mastra = class {
37
37
  this.logger = logger;
38
38
  let storage = config?.storage;
39
39
  if (!storage) {
40
- storage = new DefaultStorage({
40
+ storage = new LibSQLStore({
41
41
  config: {
42
42
  url: process.env.MASTRA_DEFAULT_STORAGE_URL || `:memory:`
43
43
  }
44
44
  });
45
45
  }
46
- if (storage instanceof DefaultStorage && config?.telemetry?.export?.type !== "custom") {
46
+ if (storage instanceof LibSQLStore && config?.telemetry?.export?.type !== "custom") {
47
47
  const newTelemetry = {
48
48
  ...(config?.telemetry || {}),
49
49
  export: {
@@ -1,8 +1,108 @@
1
- import { DefaultStorage } from './chunk-JIV6PDIN.js';
2
- import { DefaultVectorDB } from './chunk-LKOVXFLE.js';
3
- import { defaultEmbedder } from './chunk-7NPRQT5A.js';
1
+ import { LibSQLVector } from './chunk-33GSTUNK.js';
2
+ import { LibSQLStore } from './chunk-YLCBP7MP.js';
4
3
  import { deepMerge } from './chunk-WIBGG4X6.js';
5
- import { MastraBase } from './chunk-6RDA4JYW.js';
4
+ import { MastraBase } from './chunk-OZ4XVJ6F.js';
5
+ import path from 'path';
6
+ import { experimental_customProvider } from 'ai';
7
+ import node_modulesPath from 'node_modules-path';
8
+
9
+ var cachedPath = false;
10
+ function getModelCachePath() {
11
+ if (cachedPath) return cachedPath;
12
+ const firstNodeModules = node_modulesPath().split("node_modules")[0];
13
+ cachedPath = path.join(firstNodeModules, "node_modules", ".fastembed-model-cache");
14
+ return cachedPath;
15
+ }
16
+ function unbundleableImport(name) {
17
+ const nonStaticallyAnalyzableName = `${name}?d=${Date.now()}`;
18
+ return import(nonStaticallyAnalyzableName.split(`?`)[0]);
19
+ }
20
+ async function generateEmbeddings(values, modelType) {
21
+ try {
22
+ let mod;
23
+ const importErrors = [];
24
+ {
25
+ try {
26
+ mod = await unbundleableImport("fastembed");
27
+ } catch (e) {
28
+ if (e instanceof Error) {
29
+ importErrors.push(e);
30
+ } else {
31
+ throw e;
32
+ }
33
+ }
34
+ }
35
+ if (!mod) {
36
+ throw new Error(`${importErrors.map((e) => e.message).join(`
37
+ `)}
38
+
39
+ This runtime does not support fastembed-js, which is the default embedder in Mastra.
40
+ Scroll up to read import errors. These errors mean you can't use the default Mastra embedder on this hosting platform.
41
+ You can either use Mastra Cloud which supports the default embedder, or you can configure an alternate provider.
42
+
43
+ For example if you're using Memory:
44
+
45
+ import { openai } from "@ai-sdk/openai";
46
+
47
+ const memory = new Memory({
48
+ embedder: openai.embedding("text-embedding-3-small"), // <- doesn't have to be openai
49
+ })
50
+
51
+ Visit https://sdk.vercel.ai/docs/foundations/overview#embedding-models to find an alternate embedding provider
52
+
53
+ If you do not want to use the Memory semantic recall feature, you can disable it entirely and this error will go away.
54
+
55
+ const memory = new Memory({
56
+ options: {
57
+ semanticRecall: false // <- an embedder will not be required with this set to false
58
+ }
59
+ })
60
+ `);
61
+ }
62
+ const { FlagEmbedding, EmbeddingModel } = mod;
63
+ const model = await FlagEmbedding.init({
64
+ model: EmbeddingModel[modelType],
65
+ cacheDir: getModelCachePath()
66
+ });
67
+ const embeddings = await model.embed(values);
68
+ const allResults = [];
69
+ for await (const result of embeddings) {
70
+ allResults.push(...result.map((embedding) => Array.from(embedding)));
71
+ }
72
+ if (allResults.length === 0) throw new Error("No embeddings generated");
73
+ return {
74
+ embeddings: allResults
75
+ };
76
+ } catch (error) {
77
+ console.error("Error generating embeddings:", error);
78
+ throw error;
79
+ }
80
+ }
81
+ var fastEmbedProvider = experimental_customProvider({
82
+ textEmbeddingModels: {
83
+ "bge-small-en-v1.5": {
84
+ specificationVersion: "v1",
85
+ provider: "fastembed",
86
+ modelId: "bge-small-en-v1.5",
87
+ maxEmbeddingsPerCall: 256,
88
+ supportsParallelCalls: true,
89
+ async doEmbed({ values }) {
90
+ return generateEmbeddings(values, "BGESmallENV15");
91
+ }
92
+ },
93
+ "bge-base-en-v1.5": {
94
+ specificationVersion: "v1",
95
+ provider: "fastembed",
96
+ modelId: "bge-base-en-v1.5",
97
+ maxEmbeddingsPerCall: 256,
98
+ supportsParallelCalls: true,
99
+ async doEmbed({ values }) {
100
+ return generateEmbeddings(values, "BGEBaseENV15");
101
+ }
102
+ }
103
+ }
104
+ });
105
+ var defaultEmbedder = fastEmbedProvider.textEmbeddingModel;
6
106
 
7
107
  // src/memory/memory.ts
8
108
  var MastraMemory = class extends MastraBase {
@@ -16,7 +116,7 @@ var MastraMemory = class extends MastraBase {
16
116
  };
17
117
  constructor(config) {
18
118
  super({ component: "MEMORY", name: config.name });
19
- this.storage = config.storage || new DefaultStorage({
119
+ this.storage = config.storage || new LibSQLStore({
20
120
  config: {
21
121
  url: "file:memory.db"
22
122
  }
@@ -24,7 +124,7 @@ var MastraMemory = class extends MastraBase {
24
124
  if (config.vector) {
25
125
  this.vector = config.vector;
26
126
  } else {
27
- this.vector = new DefaultVectorDB({
127
+ this.vector = new LibSQLVector({
28
128
  connectionUrl: "file:memory-vector.db"
29
129
  // file name needs to be different than default storage or it wont work properly
30
130
  });
@@ -1,4 +1,4 @@
1
- import { RegisteredLogger, createLogger } from './chunk-KP5SHTKA.js';
1
+ import { RegisteredLogger, createLogger } from './chunk-PHMSPCTC.js';
2
2
 
3
3
  // src/base.ts
4
4
  var MastraBase = class {
@@ -1,6 +1,6 @@
1
+ import { Transform } from 'stream';
1
2
  import pino from 'pino';
2
3
  import pretty from 'pino-pretty';
3
- import { Transform } from 'stream';
4
4
 
5
5
  // src/logger/index.ts
6
6
  var RegisteredLogger = {
@@ -27,7 +27,7 @@ var LoggerTransport = class extends Transform {
27
27
  constructor(opts = {}) {
28
28
  super({ ...opts, objectMode: true });
29
29
  }
30
- async getLogsByRunId({ runId }) {
30
+ async getLogsByRunId(_args) {
31
31
  return [];
32
32
  }
33
33
  async getLogs() {
@@ -1,4 +1,4 @@
1
- import { Agent } from './chunk-ORM3OCHX.js';
1
+ import { Agent } from './chunk-7TG2Y45H.js';
2
2
  import { CohereClient } from 'cohere-ai';
3
3
 
4
4
  var CohereRelevanceScorer = class {
@@ -1,5 +1,5 @@
1
- import { InstrumentClass } from './chunk-4ZRHVG25.js';
2
- import { MastraBase } from './chunk-6RDA4JYW.js';
1
+ import { InstrumentClass } from './chunk-W5HVJX45.js';
2
+ import { MastraBase } from './chunk-OZ4XVJ6F.js';
3
3
  import { __decoratorStart, __decorateElement, __runInitializers } from './chunk-C6A6W6XS.js';
4
4
 
5
5
  // src/tts/index.ts
@@ -1,6 +1,6 @@
1
- import { MastraBase } from './chunk-6RDA4JYW.js';
2
- import { parse } from 'dotenv';
1
+ import { MastraBase } from './chunk-OZ4XVJ6F.js';
3
2
  import { readFile } from 'fs/promises';
3
+ import { parse } from 'dotenv';
4
4
 
5
5
  var MastraBundler = class extends MastraBase {
6
6
  constructor({ name, component = "BUNDLER" }) {
@@ -37,7 +37,7 @@ function withSpan(options) {
37
37
  args.forEach((arg, index) => {
38
38
  try {
39
39
  span.setAttribute(`${spanName}.argument.${index}`, JSON.stringify(arg));
40
- } catch (e) {
40
+ } catch {
41
41
  span.setAttribute(`${spanName}.argument.${index}`, "[Not Serializable]");
42
42
  }
43
43
  });
@@ -57,7 +57,7 @@ function withSpan(options) {
57
57
  return result.then((resolvedValue) => {
58
58
  try {
59
59
  span.setAttribute(`${spanName}.result`, JSON.stringify(resolvedValue));
60
- } catch (e) {
60
+ } catch {
61
61
  span.setAttribute(`${spanName}.result`, "[Not Serializable]");
62
62
  }
63
63
  return resolvedValue;
@@ -65,7 +65,7 @@ function withSpan(options) {
65
65
  }
66
66
  try {
67
67
  span.setAttribute(`${spanName}.result`, JSON.stringify(result));
68
- } catch (e) {
68
+ } catch {
69
69
  span.setAttribute(`${spanName}.result`, "[Not Serializable]");
70
70
  }
71
71
  return result;
@@ -309,7 +309,7 @@ var Telemetry = class _Telemetry {
309
309
  let recordResult2 = function(res) {
310
310
  try {
311
311
  span.setAttribute(`${context3.spanName}.result`, JSON.stringify(res));
312
- } catch (e) {
312
+ } catch {
313
313
  span.setAttribute(`${context3.spanName}.result`, "[Not Serializable]");
314
314
  }
315
315
  span.end();
@@ -338,7 +338,7 @@ var Telemetry = class _Telemetry {
338
338
  args.forEach((arg, index) => {
339
339
  try {
340
340
  span.setAttribute(`${context3.spanName}.argument.${index}`, JSON.stringify(arg));
341
- } catch (e) {
341
+ } catch {
342
342
  span.setAttribute(`${context3.spanName}.argument.${index}`, "[Not Serializable]");
343
343
  }
344
344
  });
@@ -1,4 +1,4 @@
1
- import { MastraBase } from './chunk-6RDA4JYW.js';
1
+ import { MastraBase } from './chunk-OZ4XVJ6F.js';
2
2
  import { context, trace } from '@opentelemetry/api';
3
3
  import { get } from 'radash';
4
4
  import sift from 'sift';
@@ -747,7 +747,22 @@ var Workflow = class extends MastraBase {
747
747
  runId: this.#runId
748
748
  });
749
749
  if (typeof stepConfig?.when === "function") {
750
- const conditionMet = await stepConfig.when({ context });
750
+ const conditionMet = await stepConfig.when({
751
+ context: {
752
+ ...context,
753
+ getStepResult: (stepId) => {
754
+ if (stepId === "trigger") {
755
+ return context.triggerData;
756
+ }
757
+ const result = context.steps[stepId];
758
+ if (result && result.status === "success") {
759
+ return result.output;
760
+ }
761
+ return void 0;
762
+ }
763
+ },
764
+ ...this.#getInjectables()
765
+ });
751
766
  if (conditionMet) {
752
767
  this.logger.debug(`Condition met for step ${stepNode.step.id}`, {
753
768
  stepId: stepNode.step.id,
@@ -868,7 +883,7 @@ var Workflow = class extends MastraBase {
868
883
  });
869
884
  const resolvedData = {
870
885
  ...context,
871
- getStepPayload: (stepId2) => {
886
+ getStepResult: (stepId2) => {
872
887
  if (stepId2 === "trigger") {
873
888
  return context.triggerData;
874
889
  }
@@ -984,7 +999,8 @@ var Workflow = class extends MastraBase {
984
999
  const handler = async ({ context, ...rest }) => {
985
1000
  const targetStep = this.#steps[stepId];
986
1001
  if (!targetStep) throw new Error(`Step not found`);
987
- const { payload = {}, execute } = targetStep;
1002
+ const { payload = {}, execute = async () => {
1003
+ } } = targetStep;
988
1004
  const mergedData = {
989
1005
  ...payload,
990
1006
  ...context