@mastra/core 0.9.0 → 0.9.1-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agent/index.cjs +2 -2
- package/dist/agent/index.d.cts +2 -1
- package/dist/agent/index.d.ts +2 -1
- package/dist/agent/index.js +1 -1
- package/dist/{base-Bk5V1doj.d.ts → base-5aS_vKtG.d.ts} +436 -10
- package/dist/{base-oP3DoUrm.d.cts → base-CDhO2lMb.d.cts} +436 -10
- package/dist/{chunk-ATXF6TIO.cjs → chunk-5RNAWF6S.cjs} +2 -4
- package/dist/{chunk-RASVJ3TR.js → chunk-6UQK6P76.js} +31 -0
- package/dist/{chunk-W5IA5OGL.cjs → chunk-HZZCDTUN.cjs} +31 -0
- package/dist/{chunk-DIZZQ3E5.cjs → chunk-MYAFJQRT.cjs} +2 -2
- package/dist/{chunk-HARYMLZH.js → chunk-XQMX3MB3.js} +3 -4
- package/dist/{chunk-GJWCFDFN.js → chunk-Y6LKS4NI.js} +1 -1
- package/dist/eval/index.d.cts +2 -1
- package/dist/eval/index.d.ts +2 -1
- package/dist/index.cjs +29 -33
- package/dist/index.d.cts +3 -2
- package/dist/index.d.ts +3 -2
- package/dist/index.js +4 -4
- package/dist/integration/index.d.cts +2 -1
- package/dist/integration/index.d.ts +2 -1
- package/dist/llm/index.d.cts +2 -1
- package/dist/llm/index.d.ts +2 -1
- package/dist/mastra/index.cjs +2 -2
- package/dist/mastra/index.d.cts +2 -1
- package/dist/mastra/index.d.ts +2 -1
- package/dist/mastra/index.js +1 -1
- package/dist/memory/index.d.cts +2 -1
- package/dist/memory/index.d.ts +2 -1
- package/dist/network/index.cjs +2 -2
- package/dist/network/index.d.cts +2 -1
- package/dist/network/index.d.ts +2 -1
- package/dist/network/index.js +1 -1
- package/dist/relevance/index.cjs +4 -4
- package/dist/relevance/index.d.cts +2 -1
- package/dist/relevance/index.d.ts +2 -1
- package/dist/relevance/index.js +1 -1
- package/dist/server/index.d.cts +2 -1
- package/dist/server/index.d.ts +2 -1
- package/dist/storage/index.d.cts +2 -1
- package/dist/storage/index.d.ts +2 -1
- package/dist/storage/libsql/index.d.cts +2 -1
- package/dist/storage/libsql/index.d.ts +2 -1
- package/dist/telemetry/index.d.cts +2 -1
- package/dist/telemetry/index.d.ts +2 -1
- package/dist/tools/index.d.cts +2 -1
- package/dist/tools/index.d.ts +2 -1
- package/dist/utils.d.cts +4 -3
- package/dist/utils.d.ts +4 -3
- package/dist/voice/index.d.cts +3 -2
- package/dist/voice/index.d.ts +3 -2
- package/dist/workflows/index.cjs +22 -26
- package/dist/workflows/index.d.cts +3 -2
- package/dist/workflows/index.d.ts +3 -2
- package/dist/workflows/index.js +1 -1
- package/dist/workflows/vNext/index.cjs +975 -0
- package/dist/workflows/vNext/index.d.cts +179 -0
- package/dist/workflows/vNext/index.d.ts +179 -0
- package/dist/workflows/vNext/index.js +963 -0
- package/package.json +11 -1
- package/workflows/vNext.d.ts +1 -0
|
@@ -0,0 +1,963 @@
|
|
|
1
|
+
import { Agent } from '../../chunk-XQMX3MB3.js';
|
|
2
|
+
import { Tool } from '../../chunk-NTHHPNOW.js';
|
|
3
|
+
import { MastraBase } from '../../chunk-CLJQYXNM.js';
|
|
4
|
+
import { RegisteredLogger } from '../../chunk-2BVZNKLX.js';
|
|
5
|
+
import { RuntimeContext } from '../../chunk-M472GIT6.js';
|
|
6
|
+
import { randomUUID } from 'crypto';
|
|
7
|
+
import EventEmitter from 'events';
|
|
8
|
+
import { z } from 'zod';
|
|
9
|
+
|
|
10
|
+
// src/workflows/vNext/execution-engine.ts
|
|
11
|
+
var ExecutionEngine = class extends MastraBase {
|
|
12
|
+
mastra;
|
|
13
|
+
constructor({ mastra }) {
|
|
14
|
+
super({ name: "ExecutionEngine", component: RegisteredLogger.WORKFLOW });
|
|
15
|
+
this.mastra = mastra;
|
|
16
|
+
}
|
|
17
|
+
__registerMastra(mastra) {
|
|
18
|
+
this.mastra = mastra;
|
|
19
|
+
}
|
|
20
|
+
};
|
|
21
|
+
|
|
22
|
+
// src/workflows/vNext/default.ts
|
|
23
|
+
function fmtReturnValue(stepResults, lastOutput, error) {
|
|
24
|
+
const base = {
|
|
25
|
+
status: lastOutput.status,
|
|
26
|
+
steps: stepResults
|
|
27
|
+
};
|
|
28
|
+
if (lastOutput.status === "success") {
|
|
29
|
+
base.result = lastOutput.output;
|
|
30
|
+
} else if (lastOutput.status === "failed") {
|
|
31
|
+
base.error = error instanceof Error ? error.message : error ?? lastOutput.error ?? "Unknown error";
|
|
32
|
+
} else if (lastOutput.status === "suspended") {
|
|
33
|
+
const suspendedStepIds = Object.entries(stepResults).flatMap(([stepId, stepResult]) => {
|
|
34
|
+
if (stepResult?.status === "suspended") {
|
|
35
|
+
const nestedPath = stepResult?.payload?.__workflow_meta?.path;
|
|
36
|
+
return nestedPath ? [[stepId, ...nestedPath]] : [[stepId]];
|
|
37
|
+
}
|
|
38
|
+
return [];
|
|
39
|
+
});
|
|
40
|
+
base.suspended = suspendedStepIds;
|
|
41
|
+
}
|
|
42
|
+
return base;
|
|
43
|
+
}
|
|
44
|
+
var DefaultExecutionEngine = class extends ExecutionEngine {
|
|
45
|
+
/**
|
|
46
|
+
* Executes a workflow run with the provided execution graph and input
|
|
47
|
+
* @param graph The execution graph to execute
|
|
48
|
+
* @param input The input data for the workflow
|
|
49
|
+
* @returns A promise that resolves to the workflow output
|
|
50
|
+
*/
|
|
51
|
+
async execute(params) {
|
|
52
|
+
const { workflowId, runId, graph, input, resume, retryConfig } = params;
|
|
53
|
+
const { attempts = 0, delay = 0 } = retryConfig ?? {};
|
|
54
|
+
const steps = graph.steps;
|
|
55
|
+
if (steps.length === 0) {
|
|
56
|
+
throw new Error("Workflow must have at least one step");
|
|
57
|
+
}
|
|
58
|
+
await this.mastra?.getStorage()?.init();
|
|
59
|
+
let startIdx = 0;
|
|
60
|
+
if (resume?.resumePath) {
|
|
61
|
+
startIdx = resume.resumePath[0];
|
|
62
|
+
resume.resumePath.shift();
|
|
63
|
+
}
|
|
64
|
+
const stepResults = resume?.stepResults || { input };
|
|
65
|
+
let lastOutput;
|
|
66
|
+
for (let i = startIdx; i < steps.length; i++) {
|
|
67
|
+
const entry = steps[i];
|
|
68
|
+
try {
|
|
69
|
+
lastOutput = await this.executeEntry({
|
|
70
|
+
workflowId,
|
|
71
|
+
runId,
|
|
72
|
+
entry,
|
|
73
|
+
prevStep: steps[i - 1],
|
|
74
|
+
stepResults,
|
|
75
|
+
resume,
|
|
76
|
+
executionContext: {
|
|
77
|
+
executionPath: [i],
|
|
78
|
+
suspendedPaths: {},
|
|
79
|
+
retryConfig: { attempts, delay }
|
|
80
|
+
},
|
|
81
|
+
emitter: params.emitter,
|
|
82
|
+
container: params.container
|
|
83
|
+
});
|
|
84
|
+
if (lastOutput.status !== "success") {
|
|
85
|
+
if (entry.type === "step") {
|
|
86
|
+
params.emitter.emit("watch", {
|
|
87
|
+
type: "watch",
|
|
88
|
+
payload: {
|
|
89
|
+
currentStep: {
|
|
90
|
+
id: entry.step.id,
|
|
91
|
+
...lastOutput
|
|
92
|
+
},
|
|
93
|
+
workflowState: {
|
|
94
|
+
status: lastOutput.status,
|
|
95
|
+
steps: stepResults,
|
|
96
|
+
result: null,
|
|
97
|
+
error: lastOutput.error
|
|
98
|
+
}
|
|
99
|
+
},
|
|
100
|
+
eventTimestamp: Date.now()
|
|
101
|
+
});
|
|
102
|
+
}
|
|
103
|
+
return fmtReturnValue(stepResults, lastOutput);
|
|
104
|
+
}
|
|
105
|
+
} catch (e) {
|
|
106
|
+
if (entry.type === "step") {
|
|
107
|
+
params.emitter.emit("watch", {
|
|
108
|
+
type: "watch",
|
|
109
|
+
payload: {
|
|
110
|
+
currentStep: {
|
|
111
|
+
id: entry.step.id,
|
|
112
|
+
...lastOutput
|
|
113
|
+
},
|
|
114
|
+
workflowState: {
|
|
115
|
+
status: lastOutput.status,
|
|
116
|
+
steps: stepResults,
|
|
117
|
+
result: null,
|
|
118
|
+
error: lastOutput.error
|
|
119
|
+
}
|
|
120
|
+
},
|
|
121
|
+
eventTimestamp: Date.now()
|
|
122
|
+
});
|
|
123
|
+
}
|
|
124
|
+
return fmtReturnValue(stepResults, lastOutput, e);
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
return fmtReturnValue(stepResults, lastOutput);
|
|
128
|
+
}
|
|
129
|
+
getStepOutput(stepResults, step) {
|
|
130
|
+
if (!step) {
|
|
131
|
+
return stepResults.input;
|
|
132
|
+
} else if (step.type === "step") {
|
|
133
|
+
return stepResults[step.step.id]?.output;
|
|
134
|
+
} else if (step.type === "parallel" || step.type === "conditional") {
|
|
135
|
+
return step.steps.reduce(
|
|
136
|
+
(acc, entry) => {
|
|
137
|
+
if (entry.type === "step") {
|
|
138
|
+
acc[entry.step.id] = stepResults[entry.step.id]?.output;
|
|
139
|
+
} else if (entry.type === "parallel" || entry.type === "conditional") {
|
|
140
|
+
const parallelResult = this.getStepOutput(stepResults, entry)?.output;
|
|
141
|
+
acc = { ...acc, ...parallelResult };
|
|
142
|
+
} else if (entry.type === "loop") {
|
|
143
|
+
acc[entry.step.id] = stepResults[entry.step.id]?.output;
|
|
144
|
+
} else if (entry.type === "foreach") {
|
|
145
|
+
acc[entry.step.id] = stepResults[entry.step.id]?.output;
|
|
146
|
+
}
|
|
147
|
+
return acc;
|
|
148
|
+
},
|
|
149
|
+
{}
|
|
150
|
+
);
|
|
151
|
+
} else if (step.type === "loop") {
|
|
152
|
+
return stepResults[step.step.id]?.output;
|
|
153
|
+
} else if (step.type === "foreach") {
|
|
154
|
+
return stepResults[step.step.id]?.output;
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
async executeStep({
|
|
158
|
+
step,
|
|
159
|
+
stepResults,
|
|
160
|
+
executionContext,
|
|
161
|
+
resume,
|
|
162
|
+
prevOutput,
|
|
163
|
+
emitter,
|
|
164
|
+
container
|
|
165
|
+
}) {
|
|
166
|
+
let execResults;
|
|
167
|
+
const retries = step.retries ?? executionContext.retryConfig.attempts ?? 0;
|
|
168
|
+
for (let i = 0; i < retries + 1; i++) {
|
|
169
|
+
try {
|
|
170
|
+
let suspended;
|
|
171
|
+
const result = await step.execute({
|
|
172
|
+
mastra: this.mastra,
|
|
173
|
+
container,
|
|
174
|
+
inputData: prevOutput,
|
|
175
|
+
resumeData: resume?.steps[0] === step.id ? resume?.resumePayload : void 0,
|
|
176
|
+
getInitData: () => stepResults?.input,
|
|
177
|
+
getStepResult: (step2) => {
|
|
178
|
+
const result2 = stepResults[step2.id];
|
|
179
|
+
if (result2?.status === "success") {
|
|
180
|
+
return result2.output;
|
|
181
|
+
}
|
|
182
|
+
return null;
|
|
183
|
+
},
|
|
184
|
+
suspend: async (suspendPayload) => {
|
|
185
|
+
executionContext.suspendedPaths[step.id] = executionContext.executionPath;
|
|
186
|
+
suspended = { payload: suspendPayload };
|
|
187
|
+
},
|
|
188
|
+
resume: {
|
|
189
|
+
steps: resume?.steps?.slice(1) || [],
|
|
190
|
+
resumePayload: resume?.resumePayload,
|
|
191
|
+
// @ts-ignore
|
|
192
|
+
runId: stepResults[step.id]?.payload?.__workflow_meta?.runId
|
|
193
|
+
},
|
|
194
|
+
emitter
|
|
195
|
+
});
|
|
196
|
+
if (suspended) {
|
|
197
|
+
execResults = { status: "suspended", payload: suspended.payload };
|
|
198
|
+
} else {
|
|
199
|
+
execResults = { status: "success", output: result };
|
|
200
|
+
}
|
|
201
|
+
break;
|
|
202
|
+
} catch (e) {
|
|
203
|
+
execResults = { status: "failed", error: e instanceof Error ? e.message : "Unknown error" };
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
return execResults;
|
|
207
|
+
}
|
|
208
|
+
async executeParallel({
|
|
209
|
+
workflowId,
|
|
210
|
+
runId,
|
|
211
|
+
entry,
|
|
212
|
+
prevStep,
|
|
213
|
+
stepResults,
|
|
214
|
+
resume,
|
|
215
|
+
executionContext,
|
|
216
|
+
emitter,
|
|
217
|
+
container
|
|
218
|
+
}) {
|
|
219
|
+
let execResults;
|
|
220
|
+
const results = await Promise.all(
|
|
221
|
+
entry.steps.map(
|
|
222
|
+
(step, i) => this.executeEntry({
|
|
223
|
+
workflowId,
|
|
224
|
+
runId,
|
|
225
|
+
entry: step,
|
|
226
|
+
prevStep,
|
|
227
|
+
stepResults,
|
|
228
|
+
resume,
|
|
229
|
+
executionContext: {
|
|
230
|
+
executionPath: [...executionContext.executionPath, i],
|
|
231
|
+
suspendedPaths: executionContext.suspendedPaths,
|
|
232
|
+
retryConfig: executionContext.retryConfig
|
|
233
|
+
},
|
|
234
|
+
emitter,
|
|
235
|
+
container
|
|
236
|
+
})
|
|
237
|
+
)
|
|
238
|
+
);
|
|
239
|
+
const hasFailed = results.find((result) => result.status === "failed");
|
|
240
|
+
const hasSuspended = results.find((result) => result.status === "suspended");
|
|
241
|
+
if (hasFailed) {
|
|
242
|
+
execResults = { status: "failed", error: hasFailed.error };
|
|
243
|
+
} else if (hasSuspended) {
|
|
244
|
+
execResults = { status: "suspended", payload: hasSuspended.payload };
|
|
245
|
+
} else {
|
|
246
|
+
execResults = {
|
|
247
|
+
status: "success",
|
|
248
|
+
output: results.reduce((acc, result, index) => {
|
|
249
|
+
if (result.status === "success") {
|
|
250
|
+
acc[entry.steps[index].step.id] = result.output;
|
|
251
|
+
}
|
|
252
|
+
return acc;
|
|
253
|
+
}, {})
|
|
254
|
+
};
|
|
255
|
+
}
|
|
256
|
+
return execResults;
|
|
257
|
+
}
|
|
258
|
+
async executeConditional({
|
|
259
|
+
workflowId,
|
|
260
|
+
runId,
|
|
261
|
+
entry,
|
|
262
|
+
prevOutput,
|
|
263
|
+
prevStep,
|
|
264
|
+
stepResults,
|
|
265
|
+
resume,
|
|
266
|
+
executionContext,
|
|
267
|
+
emitter,
|
|
268
|
+
container
|
|
269
|
+
}) {
|
|
270
|
+
let execResults;
|
|
271
|
+
const truthyIndexes = (await Promise.all(
|
|
272
|
+
entry.conditions.map(async (cond, index) => {
|
|
273
|
+
try {
|
|
274
|
+
const result = await cond({
|
|
275
|
+
mastra: this.mastra,
|
|
276
|
+
container,
|
|
277
|
+
inputData: prevOutput,
|
|
278
|
+
getInitData: () => stepResults?.input,
|
|
279
|
+
getStepResult: (step) => {
|
|
280
|
+
if (!step?.id) {
|
|
281
|
+
return null;
|
|
282
|
+
}
|
|
283
|
+
const result2 = stepResults[step.id];
|
|
284
|
+
if (result2?.status === "success") {
|
|
285
|
+
return result2.output;
|
|
286
|
+
}
|
|
287
|
+
return null;
|
|
288
|
+
},
|
|
289
|
+
// TODO: this function shouldn't have suspend probably?
|
|
290
|
+
suspend: async (_suspendPayload) => {
|
|
291
|
+
},
|
|
292
|
+
emitter
|
|
293
|
+
});
|
|
294
|
+
return result ? index : null;
|
|
295
|
+
} catch (e) {
|
|
296
|
+
return null;
|
|
297
|
+
}
|
|
298
|
+
})
|
|
299
|
+
)).filter((index) => index !== null);
|
|
300
|
+
const stepsToRun = entry.steps.filter((_, index) => truthyIndexes.includes(index));
|
|
301
|
+
const results = await Promise.all(
|
|
302
|
+
stepsToRun.map(
|
|
303
|
+
(step, index) => this.executeEntry({
|
|
304
|
+
workflowId,
|
|
305
|
+
runId,
|
|
306
|
+
entry: step,
|
|
307
|
+
prevStep,
|
|
308
|
+
stepResults,
|
|
309
|
+
resume,
|
|
310
|
+
executionContext: {
|
|
311
|
+
executionPath: [...executionContext.executionPath, index],
|
|
312
|
+
suspendedPaths: executionContext.suspendedPaths,
|
|
313
|
+
retryConfig: executionContext.retryConfig
|
|
314
|
+
},
|
|
315
|
+
emitter,
|
|
316
|
+
container
|
|
317
|
+
})
|
|
318
|
+
)
|
|
319
|
+
);
|
|
320
|
+
const hasFailed = results.find((result) => result.status === "failed");
|
|
321
|
+
const hasSuspended = results.find((result) => result.status === "suspended");
|
|
322
|
+
if (hasFailed) {
|
|
323
|
+
execResults = { status: "failed", error: hasFailed.error };
|
|
324
|
+
} else if (hasSuspended) {
|
|
325
|
+
execResults = { status: "suspended", payload: hasSuspended.payload };
|
|
326
|
+
} else {
|
|
327
|
+
execResults = {
|
|
328
|
+
status: "success",
|
|
329
|
+
output: results.reduce((acc, result, index) => {
|
|
330
|
+
if (result.status === "success") {
|
|
331
|
+
acc[stepsToRun[index].step.id] = result.output;
|
|
332
|
+
}
|
|
333
|
+
return acc;
|
|
334
|
+
}, {})
|
|
335
|
+
};
|
|
336
|
+
}
|
|
337
|
+
return execResults;
|
|
338
|
+
}
|
|
339
|
+
async executeLoop({
|
|
340
|
+
entry,
|
|
341
|
+
prevOutput,
|
|
342
|
+
stepResults,
|
|
343
|
+
resume,
|
|
344
|
+
executionContext,
|
|
345
|
+
emitter,
|
|
346
|
+
container
|
|
347
|
+
}) {
|
|
348
|
+
const { step, condition } = entry;
|
|
349
|
+
let isTrue = true;
|
|
350
|
+
let result = { status: "success", output: prevOutput };
|
|
351
|
+
do {
|
|
352
|
+
result = await this.executeStep({
|
|
353
|
+
step,
|
|
354
|
+
stepResults,
|
|
355
|
+
executionContext,
|
|
356
|
+
resume,
|
|
357
|
+
prevOutput: result.output,
|
|
358
|
+
emitter,
|
|
359
|
+
container
|
|
360
|
+
});
|
|
361
|
+
if (result.status !== "success") {
|
|
362
|
+
return result;
|
|
363
|
+
}
|
|
364
|
+
isTrue = await condition({
|
|
365
|
+
mastra: this.mastra,
|
|
366
|
+
container,
|
|
367
|
+
inputData: result.output,
|
|
368
|
+
getInitData: () => stepResults?.input,
|
|
369
|
+
getStepResult: (step2) => {
|
|
370
|
+
if (!step2?.id) {
|
|
371
|
+
return null;
|
|
372
|
+
}
|
|
373
|
+
const result2 = stepResults[step2.id];
|
|
374
|
+
return result2?.status === "success" ? result2.output : null;
|
|
375
|
+
},
|
|
376
|
+
suspend: async (_suspendPayload) => {
|
|
377
|
+
},
|
|
378
|
+
emitter
|
|
379
|
+
});
|
|
380
|
+
} while (entry.loopType === "dowhile" ? isTrue : !isTrue);
|
|
381
|
+
return result;
|
|
382
|
+
}
|
|
383
|
+
async executeForeach({
|
|
384
|
+
entry,
|
|
385
|
+
prevOutput,
|
|
386
|
+
stepResults,
|
|
387
|
+
resume,
|
|
388
|
+
executionContext,
|
|
389
|
+
emitter,
|
|
390
|
+
container
|
|
391
|
+
}) {
|
|
392
|
+
const { step, opts } = entry;
|
|
393
|
+
const results = [];
|
|
394
|
+
const concurrency = opts.concurrency;
|
|
395
|
+
for (let i = 0; i < prevOutput.length; i += concurrency) {
|
|
396
|
+
const items = prevOutput.slice(i, i + concurrency);
|
|
397
|
+
const itemsResults = await Promise.all(
|
|
398
|
+
items.map((item) => {
|
|
399
|
+
return this.executeStep({
|
|
400
|
+
step,
|
|
401
|
+
stepResults,
|
|
402
|
+
executionContext,
|
|
403
|
+
resume,
|
|
404
|
+
prevOutput: item,
|
|
405
|
+
emitter,
|
|
406
|
+
container
|
|
407
|
+
});
|
|
408
|
+
})
|
|
409
|
+
);
|
|
410
|
+
for (const result of itemsResults) {
|
|
411
|
+
if (result.status !== "success") {
|
|
412
|
+
return result;
|
|
413
|
+
}
|
|
414
|
+
results.push(result?.output);
|
|
415
|
+
}
|
|
416
|
+
}
|
|
417
|
+
return { status: "success", output: results };
|
|
418
|
+
}
|
|
419
|
+
async executeEntry({
|
|
420
|
+
workflowId,
|
|
421
|
+
runId,
|
|
422
|
+
entry,
|
|
423
|
+
prevStep,
|
|
424
|
+
stepResults,
|
|
425
|
+
resume,
|
|
426
|
+
executionContext,
|
|
427
|
+
emitter,
|
|
428
|
+
container
|
|
429
|
+
}) {
|
|
430
|
+
const prevOutput = this.getStepOutput(stepResults, prevStep);
|
|
431
|
+
let execResults;
|
|
432
|
+
if (entry.type === "step") {
|
|
433
|
+
const { step } = entry;
|
|
434
|
+
execResults = await this.executeStep({
|
|
435
|
+
step,
|
|
436
|
+
stepResults,
|
|
437
|
+
executionContext,
|
|
438
|
+
resume,
|
|
439
|
+
prevOutput,
|
|
440
|
+
emitter,
|
|
441
|
+
container
|
|
442
|
+
});
|
|
443
|
+
} else if (resume?.resumePath?.length && (entry.type === "parallel" || entry.type === "conditional")) {
|
|
444
|
+
const idx = resume.resumePath.shift();
|
|
445
|
+
return this.executeEntry({
|
|
446
|
+
workflowId,
|
|
447
|
+
runId,
|
|
448
|
+
entry: entry.steps[idx],
|
|
449
|
+
prevStep,
|
|
450
|
+
stepResults,
|
|
451
|
+
resume,
|
|
452
|
+
executionContext: {
|
|
453
|
+
executionPath: [...executionContext.executionPath, idx],
|
|
454
|
+
suspendedPaths: executionContext.suspendedPaths,
|
|
455
|
+
retryConfig: executionContext.retryConfig
|
|
456
|
+
},
|
|
457
|
+
emitter,
|
|
458
|
+
container
|
|
459
|
+
});
|
|
460
|
+
} else if (entry.type === "parallel") {
|
|
461
|
+
execResults = await this.executeParallel({
|
|
462
|
+
workflowId,
|
|
463
|
+
runId,
|
|
464
|
+
entry,
|
|
465
|
+
prevStep,
|
|
466
|
+
stepResults,
|
|
467
|
+
resume,
|
|
468
|
+
executionContext,
|
|
469
|
+
emitter,
|
|
470
|
+
container
|
|
471
|
+
});
|
|
472
|
+
} else if (entry.type === "conditional") {
|
|
473
|
+
execResults = await this.executeConditional({
|
|
474
|
+
workflowId,
|
|
475
|
+
runId,
|
|
476
|
+
entry,
|
|
477
|
+
prevStep,
|
|
478
|
+
prevOutput,
|
|
479
|
+
stepResults,
|
|
480
|
+
resume,
|
|
481
|
+
executionContext,
|
|
482
|
+
emitter,
|
|
483
|
+
container
|
|
484
|
+
});
|
|
485
|
+
} else if (entry.type === "loop") {
|
|
486
|
+
execResults = await this.executeLoop({
|
|
487
|
+
workflowId,
|
|
488
|
+
runId,
|
|
489
|
+
entry,
|
|
490
|
+
prevStep,
|
|
491
|
+
prevOutput,
|
|
492
|
+
stepResults,
|
|
493
|
+
resume,
|
|
494
|
+
executionContext,
|
|
495
|
+
emitter,
|
|
496
|
+
container
|
|
497
|
+
});
|
|
498
|
+
} else if (entry.type === "foreach") {
|
|
499
|
+
execResults = await this.executeForeach({
|
|
500
|
+
workflowId,
|
|
501
|
+
runId,
|
|
502
|
+
entry,
|
|
503
|
+
prevStep,
|
|
504
|
+
prevOutput,
|
|
505
|
+
stepResults,
|
|
506
|
+
resume,
|
|
507
|
+
executionContext,
|
|
508
|
+
emitter,
|
|
509
|
+
container
|
|
510
|
+
});
|
|
511
|
+
}
|
|
512
|
+
if (entry.type === "step" || entry.type === "loop" || entry.type === "foreach") {
|
|
513
|
+
stepResults[entry.step.id] = execResults;
|
|
514
|
+
}
|
|
515
|
+
await this.mastra?.getStorage()?.persistWorkflowSnapshot({
|
|
516
|
+
workflowName: workflowId,
|
|
517
|
+
runId,
|
|
518
|
+
snapshot: {
|
|
519
|
+
runId,
|
|
520
|
+
value: {},
|
|
521
|
+
context: stepResults,
|
|
522
|
+
activePaths: [],
|
|
523
|
+
suspendedPaths: executionContext.suspendedPaths,
|
|
524
|
+
// @ts-ignore
|
|
525
|
+
timestamp: Date.now()
|
|
526
|
+
}
|
|
527
|
+
});
|
|
528
|
+
if (entry.type === "step" || entry.type === "loop" || entry.type === "foreach") {
|
|
529
|
+
emitter.emit("watch", {
|
|
530
|
+
type: "watch",
|
|
531
|
+
payload: {
|
|
532
|
+
currentStep: {
|
|
533
|
+
id: entry.step.id,
|
|
534
|
+
status: execResults.status,
|
|
535
|
+
output: execResults.output
|
|
536
|
+
},
|
|
537
|
+
workflowState: {
|
|
538
|
+
status: "running"
|
|
539
|
+
}
|
|
540
|
+
},
|
|
541
|
+
eventTimestamp: Date.now()
|
|
542
|
+
});
|
|
543
|
+
}
|
|
544
|
+
return execResults;
|
|
545
|
+
}
|
|
546
|
+
};
|
|
547
|
+
|
|
548
|
+
// src/workflows/vNext/workflow.ts
|
|
549
|
+
function createStep(params) {
|
|
550
|
+
if (params instanceof Agent) {
|
|
551
|
+
return {
|
|
552
|
+
id: params.name,
|
|
553
|
+
// @ts-ignore
|
|
554
|
+
inputSchema: z.object({
|
|
555
|
+
prompt: z.string()
|
|
556
|
+
// resourceId: z.string().optional(),
|
|
557
|
+
// threadId: z.string().optional(),
|
|
558
|
+
}),
|
|
559
|
+
// @ts-ignore
|
|
560
|
+
outputSchema: z.object({
|
|
561
|
+
text: z.string()
|
|
562
|
+
}),
|
|
563
|
+
execute: async ({ inputData }) => {
|
|
564
|
+
const result = await params.generate(inputData.prompt, {
|
|
565
|
+
// resourceId: inputData.resourceId,
|
|
566
|
+
// threadId: inputData.threadId,
|
|
567
|
+
});
|
|
568
|
+
return {
|
|
569
|
+
text: result.text
|
|
570
|
+
};
|
|
571
|
+
}
|
|
572
|
+
};
|
|
573
|
+
}
|
|
574
|
+
if (params instanceof Tool) {
|
|
575
|
+
if (!params.inputSchema || !params.outputSchema) {
|
|
576
|
+
throw new Error("Tool must have input and output schemas defined");
|
|
577
|
+
}
|
|
578
|
+
return {
|
|
579
|
+
// TODO: tool probably should have strong id type
|
|
580
|
+
// @ts-ignore
|
|
581
|
+
id: params.id,
|
|
582
|
+
inputSchema: params.inputSchema,
|
|
583
|
+
outputSchema: params.outputSchema,
|
|
584
|
+
execute: async ({ inputData, mastra }) => {
|
|
585
|
+
return await params.execute({
|
|
586
|
+
context: inputData,
|
|
587
|
+
mastra
|
|
588
|
+
});
|
|
589
|
+
}
|
|
590
|
+
};
|
|
591
|
+
}
|
|
592
|
+
return {
|
|
593
|
+
id: params.id,
|
|
594
|
+
description: params.description,
|
|
595
|
+
inputSchema: params.inputSchema,
|
|
596
|
+
outputSchema: params.outputSchema,
|
|
597
|
+
resumeSchema: params.resumeSchema,
|
|
598
|
+
suspendSchema: params.suspendSchema,
|
|
599
|
+
execute: params.execute
|
|
600
|
+
};
|
|
601
|
+
}
|
|
602
|
+
function cloneStep(step, opts) {
|
|
603
|
+
return {
|
|
604
|
+
id: opts.id,
|
|
605
|
+
description: step.description,
|
|
606
|
+
inputSchema: step.inputSchema,
|
|
607
|
+
outputSchema: step.outputSchema,
|
|
608
|
+
execute: step.execute
|
|
609
|
+
};
|
|
610
|
+
}
|
|
611
|
+
function createWorkflow(params) {
|
|
612
|
+
return new NewWorkflow(params);
|
|
613
|
+
}
|
|
614
|
+
var NewWorkflow = class extends MastraBase {
|
|
615
|
+
id;
|
|
616
|
+
description;
|
|
617
|
+
inputSchema;
|
|
618
|
+
outputSchema;
|
|
619
|
+
stepFlow;
|
|
620
|
+
executionEngine;
|
|
621
|
+
executionGraph;
|
|
622
|
+
retryConfig;
|
|
623
|
+
#mastra;
|
|
624
|
+
constructor({
|
|
625
|
+
mastra,
|
|
626
|
+
id,
|
|
627
|
+
inputSchema,
|
|
628
|
+
outputSchema,
|
|
629
|
+
description,
|
|
630
|
+
executionEngine,
|
|
631
|
+
retryConfig
|
|
632
|
+
}) {
|
|
633
|
+
super({ name: id, component: RegisteredLogger.WORKFLOW });
|
|
634
|
+
this.id = id;
|
|
635
|
+
this.description = description;
|
|
636
|
+
this.inputSchema = inputSchema;
|
|
637
|
+
this.outputSchema = outputSchema;
|
|
638
|
+
this.retryConfig = retryConfig ?? { attempts: 0, delay: 0 };
|
|
639
|
+
this.executionGraph = this.buildExecutionGraph();
|
|
640
|
+
this.stepFlow = [];
|
|
641
|
+
this.#mastra = mastra;
|
|
642
|
+
if (!executionEngine) {
|
|
643
|
+
this.executionEngine = new DefaultExecutionEngine({ mastra: this.#mastra });
|
|
644
|
+
} else {
|
|
645
|
+
this.executionEngine = executionEngine;
|
|
646
|
+
}
|
|
647
|
+
}
|
|
648
|
+
__registerMastra(mastra) {
|
|
649
|
+
this.#mastra = mastra;
|
|
650
|
+
this.executionEngine.__registerMastra(mastra);
|
|
651
|
+
}
|
|
652
|
+
__registerPrimitives(p) {
|
|
653
|
+
if (p.telemetry) {
|
|
654
|
+
this.__setTelemetry(p.telemetry);
|
|
655
|
+
}
|
|
656
|
+
if (p.logger) {
|
|
657
|
+
this.__setLogger(p.logger);
|
|
658
|
+
}
|
|
659
|
+
}
|
|
660
|
+
/**
|
|
661
|
+
* Adds a step to the workflow
|
|
662
|
+
* @param step The step to add to the workflow
|
|
663
|
+
* @returns The workflow instance for chaining
|
|
664
|
+
*/
|
|
665
|
+
then(step) {
|
|
666
|
+
this.stepFlow.push({ type: "step", step });
|
|
667
|
+
return this;
|
|
668
|
+
}
|
|
669
|
+
map(mappingConfig) {
|
|
670
|
+
const mappingStep = createStep({
|
|
671
|
+
id: `mapping_${randomUUID()}`,
|
|
672
|
+
inputSchema: z.object({}),
|
|
673
|
+
outputSchema: z.object({}),
|
|
674
|
+
execute: async ({ getStepResult, getInitData, container }) => {
|
|
675
|
+
const result = {};
|
|
676
|
+
for (const [key, mapping] of Object.entries(mappingConfig)) {
|
|
677
|
+
const m = mapping;
|
|
678
|
+
if (m.value) {
|
|
679
|
+
result[key] = m.value;
|
|
680
|
+
continue;
|
|
681
|
+
}
|
|
682
|
+
if (m.containerPath) {
|
|
683
|
+
result[key] = container.get(m.containerPath);
|
|
684
|
+
continue;
|
|
685
|
+
}
|
|
686
|
+
const stepResult = m.initData ? getInitData() : getStepResult(m.step);
|
|
687
|
+
if (m.path === ".") {
|
|
688
|
+
result[key] = stepResult;
|
|
689
|
+
continue;
|
|
690
|
+
}
|
|
691
|
+
const pathParts = m.path.split(".");
|
|
692
|
+
let value = stepResult;
|
|
693
|
+
for (const part of pathParts) {
|
|
694
|
+
if (typeof value === "object" && value !== null) {
|
|
695
|
+
value = value[part];
|
|
696
|
+
} else {
|
|
697
|
+
throw new Error(`Invalid path ${m.path} in step ${m.step.id}`);
|
|
698
|
+
}
|
|
699
|
+
}
|
|
700
|
+
result[key] = value;
|
|
701
|
+
}
|
|
702
|
+
return result;
|
|
703
|
+
}
|
|
704
|
+
});
|
|
705
|
+
this.stepFlow.push({ type: "step", step: mappingStep });
|
|
706
|
+
return this;
|
|
707
|
+
}
|
|
708
|
+
// TODO: make typing better here
|
|
709
|
+
parallel(steps) {
|
|
710
|
+
this.stepFlow.push({ type: "parallel", steps: steps.map((step) => ({ type: "step", step })) });
|
|
711
|
+
return this;
|
|
712
|
+
}
|
|
713
|
+
// TODO: make typing better here
|
|
714
|
+
branch(steps) {
|
|
715
|
+
this.stepFlow.push({
|
|
716
|
+
type: "conditional",
|
|
717
|
+
steps: steps.map(([_cond, step]) => ({ type: "step", step })),
|
|
718
|
+
conditions: steps.map(([cond]) => cond)
|
|
719
|
+
});
|
|
720
|
+
return this;
|
|
721
|
+
}
|
|
722
|
+
dowhile(step, condition) {
|
|
723
|
+
this.stepFlow.push({ type: "loop", step, condition, loopType: "dowhile" });
|
|
724
|
+
return this;
|
|
725
|
+
}
|
|
726
|
+
dountil(step, condition) {
|
|
727
|
+
this.stepFlow.push({ type: "loop", step, condition, loopType: "dountil" });
|
|
728
|
+
return this;
|
|
729
|
+
}
|
|
730
|
+
foreach(step, opts) {
|
|
731
|
+
this.stepFlow.push({ type: "foreach", step, opts: opts ?? { concurrency: 1 } });
|
|
732
|
+
return this;
|
|
733
|
+
}
|
|
734
|
+
/**
|
|
735
|
+
* Builds the execution graph for this workflow
|
|
736
|
+
* @returns The execution graph that can be used to execute the workflow
|
|
737
|
+
*/
|
|
738
|
+
buildExecutionGraph() {
|
|
739
|
+
return {
|
|
740
|
+
id: randomUUID(),
|
|
741
|
+
steps: this.stepFlow
|
|
742
|
+
};
|
|
743
|
+
}
|
|
744
|
+
/**
|
|
745
|
+
* Finalizes the workflow definition and prepares it for execution
|
|
746
|
+
* This method should be called after all steps have been added to the workflow
|
|
747
|
+
* @returns A built workflow instance ready for execution
|
|
748
|
+
*/
|
|
749
|
+
commit() {
|
|
750
|
+
this.executionGraph = this.buildExecutionGraph();
|
|
751
|
+
return this;
|
|
752
|
+
}
|
|
753
|
+
/**
|
|
754
|
+
* Creates a new workflow run instance
|
|
755
|
+
* @param options Optional configuration for the run
|
|
756
|
+
* @returns A Run instance that can be used to execute the workflow
|
|
757
|
+
*/
|
|
758
|
+
createRun(options) {
|
|
759
|
+
const runIdToUse = options?.runId || randomUUID();
|
|
760
|
+
return new Run({
|
|
761
|
+
workflowId: this.id,
|
|
762
|
+
runId: runIdToUse,
|
|
763
|
+
executionEngine: this.executionEngine,
|
|
764
|
+
executionGraph: this.executionGraph,
|
|
765
|
+
mastra: this.#mastra,
|
|
766
|
+
retryConfig: this.retryConfig
|
|
767
|
+
});
|
|
768
|
+
}
|
|
769
|
+
async execute({
|
|
770
|
+
inputData,
|
|
771
|
+
resumeData,
|
|
772
|
+
suspend,
|
|
773
|
+
resume,
|
|
774
|
+
emitter,
|
|
775
|
+
mastra
|
|
776
|
+
}) {
|
|
777
|
+
this.__registerMastra(mastra);
|
|
778
|
+
const run = resume?.steps?.length ? this.createRun({ runId: resume.runId }) : this.createRun();
|
|
779
|
+
const unwatch = run.watch((event) => {
|
|
780
|
+
emitter.emit("nested-watch", { event, workflowId: this.id, runId: run.runId, isResume: !!resume?.steps?.length });
|
|
781
|
+
});
|
|
782
|
+
const res = resume?.steps?.length ? await run.resume({ resumeData, step: resume.steps }) : await run.start({ inputData });
|
|
783
|
+
unwatch();
|
|
784
|
+
const suspendedSteps = Object.entries(res.steps).filter(([_stepName, stepResult]) => {
|
|
785
|
+
const stepRes = stepResult;
|
|
786
|
+
return stepRes?.status === "suspended";
|
|
787
|
+
});
|
|
788
|
+
if (suspendedSteps?.length) {
|
|
789
|
+
for (const [stepName, stepResult] of suspendedSteps) {
|
|
790
|
+
const suspendPath = [stepName, ...stepResult?.payload?.__workflow_meta?.path ?? []];
|
|
791
|
+
await suspend({
|
|
792
|
+
...stepResult?.payload,
|
|
793
|
+
__workflow_meta: { runId: run.runId, path: suspendPath }
|
|
794
|
+
});
|
|
795
|
+
}
|
|
796
|
+
}
|
|
797
|
+
if (res.status === "failed") {
|
|
798
|
+
throw new Error(res.error);
|
|
799
|
+
}
|
|
800
|
+
return res.status === "success" ? res.result : void 0;
|
|
801
|
+
}
|
|
802
|
+
async getWorkflowRuns() {
|
|
803
|
+
const storage = this.#mastra?.getStorage();
|
|
804
|
+
if (!storage) {
|
|
805
|
+
this.logger.debug("Cannot get workflow runs. Mastra engine is not initialized");
|
|
806
|
+
return { runs: [], total: 0 };
|
|
807
|
+
}
|
|
808
|
+
return storage.getWorkflowRuns({ workflowName: this.id });
|
|
809
|
+
}
|
|
810
|
+
};
|
|
811
|
+
var Run = class {
|
|
812
|
+
emitter;
|
|
813
|
+
/**
|
|
814
|
+
* Unique identifier for this workflow
|
|
815
|
+
*/
|
|
816
|
+
workflowId;
|
|
817
|
+
/**
|
|
818
|
+
* Unique identifier for this run
|
|
819
|
+
*/
|
|
820
|
+
runId;
|
|
821
|
+
/**
|
|
822
|
+
* Internal state of the workflow run
|
|
823
|
+
*/
|
|
824
|
+
state = {};
|
|
825
|
+
/**
|
|
826
|
+
* The execution engine for this run
|
|
827
|
+
*/
|
|
828
|
+
executionEngine;
|
|
829
|
+
/**
|
|
830
|
+
* The execution graph for this run
|
|
831
|
+
*/
|
|
832
|
+
executionGraph;
|
|
833
|
+
/**
|
|
834
|
+
* The storage for this run
|
|
835
|
+
*/
|
|
836
|
+
#mastra;
|
|
837
|
+
retryConfig;
|
|
838
|
+
constructor(params) {
|
|
839
|
+
this.workflowId = params.workflowId;
|
|
840
|
+
this.runId = params.runId;
|
|
841
|
+
this.executionEngine = params.executionEngine;
|
|
842
|
+
this.executionGraph = params.executionGraph;
|
|
843
|
+
this.#mastra = params.mastra;
|
|
844
|
+
this.emitter = new EventEmitter();
|
|
845
|
+
this.retryConfig = params.retryConfig;
|
|
846
|
+
}
|
|
847
|
+
/**
|
|
848
|
+
* Starts the workflow execution with the provided input
|
|
849
|
+
* @param input The input data for the workflow
|
|
850
|
+
* @returns A promise that resolves to the workflow output
|
|
851
|
+
*/
|
|
852
|
+
async start({
|
|
853
|
+
inputData,
|
|
854
|
+
container
|
|
855
|
+
}) {
|
|
856
|
+
return this.executionEngine.execute({
|
|
857
|
+
workflowId: this.workflowId,
|
|
858
|
+
runId: this.runId,
|
|
859
|
+
graph: this.executionGraph,
|
|
860
|
+
input: inputData,
|
|
861
|
+
emitter: this.emitter,
|
|
862
|
+
retryConfig: this.retryConfig,
|
|
863
|
+
container: container ?? new RuntimeContext()
|
|
864
|
+
});
|
|
865
|
+
}
|
|
866
|
+
watch(cb) {
|
|
867
|
+
this.emitter.on("watch", ({ type, payload, eventTimestamp }) => {
|
|
868
|
+
this.updateState(payload);
|
|
869
|
+
cb({ type, payload: this.getState(), eventTimestamp });
|
|
870
|
+
});
|
|
871
|
+
this.emitter.on("nested-watch", ({ event, workflowId }) => {
|
|
872
|
+
try {
|
|
873
|
+
const { type, payload, eventTimestamp } = event;
|
|
874
|
+
const prefixedSteps = Object.fromEntries(
|
|
875
|
+
Object.entries(payload?.workflowState?.steps ?? {}).map(([stepId, step]) => [
|
|
876
|
+
`${workflowId}.${stepId}`,
|
|
877
|
+
step
|
|
878
|
+
])
|
|
879
|
+
);
|
|
880
|
+
const newPayload = {
|
|
881
|
+
currentStep: {
|
|
882
|
+
...payload?.currentStep,
|
|
883
|
+
id: `${workflowId}.${payload?.currentStep?.id}`
|
|
884
|
+
},
|
|
885
|
+
workflowState: {
|
|
886
|
+
...payload?.workflowState,
|
|
887
|
+
steps: prefixedSteps
|
|
888
|
+
}
|
|
889
|
+
};
|
|
890
|
+
this.updateState(newPayload);
|
|
891
|
+
cb({ type, payload: this.getState(), eventTimestamp });
|
|
892
|
+
} catch (e) {
|
|
893
|
+
console.error(e);
|
|
894
|
+
}
|
|
895
|
+
});
|
|
896
|
+
return () => {
|
|
897
|
+
this.emitter.off("watch", cb);
|
|
898
|
+
};
|
|
899
|
+
}
|
|
900
|
+
async resume(params) {
|
|
901
|
+
const steps = (Array.isArray(params.step) ? params.step : [params.step]).map(
|
|
902
|
+
(step) => typeof step === "string" ? step : step?.id
|
|
903
|
+
);
|
|
904
|
+
const snapshot = await this.#mastra?.storage?.loadWorkflowSnapshot({
|
|
905
|
+
workflowName: this.workflowId,
|
|
906
|
+
runId: this.runId
|
|
907
|
+
});
|
|
908
|
+
return this.executionEngine.execute({
|
|
909
|
+
workflowId: this.workflowId,
|
|
910
|
+
runId: this.runId,
|
|
911
|
+
graph: this.executionGraph,
|
|
912
|
+
input: params.resumeData,
|
|
913
|
+
resume: {
|
|
914
|
+
steps,
|
|
915
|
+
stepResults: snapshot?.context,
|
|
916
|
+
resumePayload: params.resumeData,
|
|
917
|
+
// @ts-ignore
|
|
918
|
+
resumePath: snapshot?.suspendedPaths?.[steps?.[0]]
|
|
919
|
+
},
|
|
920
|
+
emitter: this.emitter,
|
|
921
|
+
container: params.container ?? new RuntimeContext()
|
|
922
|
+
});
|
|
923
|
+
}
|
|
924
|
+
/**
|
|
925
|
+
* Returns the current state of the workflow run
|
|
926
|
+
* @returns The current state of the workflow run
|
|
927
|
+
*/
|
|
928
|
+
getState() {
|
|
929
|
+
return this.state;
|
|
930
|
+
}
|
|
931
|
+
updateState(state) {
|
|
932
|
+
if (state.currentStep) {
|
|
933
|
+
this.state.currentStep = state.currentStep;
|
|
934
|
+
}
|
|
935
|
+
if (state.workflowState) {
|
|
936
|
+
this.state.workflowState = deepMerge(this.state.workflowState ?? {}, state.workflowState ?? {});
|
|
937
|
+
}
|
|
938
|
+
}
|
|
939
|
+
};
|
|
940
|
+
function deepMerge(a, b) {
|
|
941
|
+
if (!a || typeof a !== "object") return b;
|
|
942
|
+
if (!b || typeof b !== "object") return a;
|
|
943
|
+
const result = { ...a };
|
|
944
|
+
for (const key in b) {
|
|
945
|
+
if (b[key] === void 0) continue;
|
|
946
|
+
if (b[key] !== null && typeof b[key] === "object") {
|
|
947
|
+
const aVal = result[key];
|
|
948
|
+
const bVal = b[key];
|
|
949
|
+
if (Array.isArray(bVal)) {
|
|
950
|
+
result[key] = Array.isArray(aVal) ? [...aVal, ...bVal].filter((item) => item !== void 0) : bVal.filter((item) => item !== void 0);
|
|
951
|
+
} else if (typeof aVal === "object" && aVal !== null) {
|
|
952
|
+
result[key] = deepMerge(aVal, bVal);
|
|
953
|
+
} else {
|
|
954
|
+
result[key] = bVal;
|
|
955
|
+
}
|
|
956
|
+
} else {
|
|
957
|
+
result[key] = b[key];
|
|
958
|
+
}
|
|
959
|
+
}
|
|
960
|
+
return result;
|
|
961
|
+
}
|
|
962
|
+
|
|
963
|
+
export { DefaultExecutionEngine, ExecutionEngine, NewWorkflow, Run, cloneStep, createStep, createWorkflow };
|