@mastra/inngest 0.0.0-fix-multi-modal-for-cloud-20251028082043 → 0.0.0-fix-persist-session-cache-option-mcp-server-20251030161352
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +13 -3
- package/dist/index.cjs +162 -102
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +61 -50
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +163 -103
- package/dist/index.js.map +1 -1
- package/package.json +7 -7
package/CHANGELOG.md
CHANGED
|
@@ -1,11 +1,17 @@
|
|
|
1
1
|
# @mastra/inngest
|
|
2
2
|
|
|
3
|
-
## 0.0.0-fix-
|
|
3
|
+
## 0.0.0-fix-persist-session-cache-option-mcp-server-20251030161352
|
|
4
4
|
|
|
5
5
|
### Major Changes
|
|
6
6
|
|
|
7
|
+
- Rename RuntimeContext to RequestContext ([#9511](https://github.com/mastra-ai/mastra/pull/9511))
|
|
8
|
+
|
|
9
|
+
- Changing getAgents -> listAgents, getTools -> listTools, getWorkflows -> listWorkflows ([#9495](https://github.com/mastra-ai/mastra/pull/9495))
|
|
10
|
+
|
|
7
11
|
- Removed old tracing code based on OpenTelemetry ([#9237](https://github.com/mastra-ai/mastra/pull/9237))
|
|
8
12
|
|
|
13
|
+
- Remove legacy evals from Mastra ([#9491](https://github.com/mastra-ai/mastra/pull/9491))
|
|
14
|
+
|
|
9
15
|
### Minor Changes
|
|
10
16
|
|
|
11
17
|
- Update peer dependencies to match core package version bump (1.0.0) ([#9237](https://github.com/mastra-ai/mastra/pull/9237))
|
|
@@ -36,8 +42,12 @@
|
|
|
36
42
|
|
|
37
43
|
- Add tool call approval ([#8649](https://github.com/mastra-ai/mastra/pull/8649))
|
|
38
44
|
|
|
39
|
-
-
|
|
40
|
-
|
|
45
|
+
- Added support for .streamVNext and .stream that uses it in the inngest execution engine ([#9434](https://github.com/mastra-ai/mastra/pull/9434))
|
|
46
|
+
|
|
47
|
+
- Prevent changing workflow status to suspended when some parallel steps are still running ([#9431](https://github.com/mastra-ai/mastra/pull/9431))
|
|
48
|
+
|
|
49
|
+
- Updated dependencies [[`f743dbb`](https://github.com/mastra-ai/mastra/commit/f743dbb8b40d1627b5c10c0e6fc154f4ebb6e394), [`0e8ed46`](https://github.com/mastra-ai/mastra/commit/0e8ed467c54d6901a6a365f270ec15d6faadb36c), [`6c049d9`](https://github.com/mastra-ai/mastra/commit/6c049d94063fdcbd5b81c4912a2bf82a92c9cc0b), [`3443770`](https://github.com/mastra-ai/mastra/commit/3443770662df8eb24c9df3589b2792d78cfcb811), [`9e1911d`](https://github.com/mastra-ai/mastra/commit/9e1911db2b4db85e0e768c3f15e0d61e319869f6), [`ebac155`](https://github.com/mastra-ai/mastra/commit/ebac15564a590117db7078233f927a7e28a85106), [`5948e6a`](https://github.com/mastra-ai/mastra/commit/5948e6a5146c83666ba3f294b2be576c82a513fb), [`8940859`](https://github.com/mastra-ai/mastra/commit/89408593658199b4ad67f7b65e888f344e64a442), [`e629310`](https://github.com/mastra-ai/mastra/commit/e629310f1a73fa236d49ec7a1d1cceb6229dc7cc), [`4c6b492`](https://github.com/mastra-ai/mastra/commit/4c6b492c4dd591c6a592520c1f6855d6e936d71f), [`9d819d5`](https://github.com/mastra-ai/mastra/commit/9d819d54b61481639f4008e4694791bddf187edd), [`71c8d6c`](https://github.com/mastra-ai/mastra/commit/71c8d6c161253207b2b9588bdadb7eed604f7253), [`6179a9b`](https://github.com/mastra-ai/mastra/commit/6179a9ba36ffac326de3cc3c43cdc8028d37c251), [`7051bf3`](https://github.com/mastra-ai/mastra/commit/7051bf38b3b122a069008f861f7bfc004a6d9f6e), [`5df9cce`](https://github.com/mastra-ai/mastra/commit/5df9cce1a753438413f64c11eeef8f845745c2a8), [`c576fc0`](https://github.com/mastra-ai/mastra/commit/c576fc0b100b2085afded91a37c97a0ea0ec09c7), [`9f4a683`](https://github.com/mastra-ai/mastra/commit/9f4a6833e88b52574665c028fd5508ad5c2f6004), [`57d157f`](https://github.com/mastra-ai/mastra/commit/57d157f0b163a95c3e6c9eae31bdb11d1bfc64f9), [`2a90c55`](https://github.com/mastra-ai/mastra/commit/2a90c55a86a9210697d5adaab5ee94584b079adc), [`d78b38d`](https://github.com/mastra-ai/mastra/commit/d78b38d898fce285260d3bbb4befade54331617f), [`c710c16`](https://github.com/mastra-ai/mastra/commit/c710c1652dccfdc4111c8412bca7a6bb1d48b441), [`cfae733`](https://github.com/mastra-ai/mastra/commit/cfae73394f4920635e6c919c8e95ff9a0788e2e5), [`e3dfda7`](https://github.com/mastra-ai/mastra/commit/e3dfda7b11bf3b8c4bb55637028befb5f387fc74), [`844ea5d`](https://github.com/mastra-ai/mastra/commit/844ea5dc0c248961e7bf73629ae7dcff503e853c), [`f0f8f12`](https://github.com/mastra-ai/mastra/commit/f0f8f125c308f2d0fd36942ef652fd852df7522f), [`7b763e5`](https://github.com/mastra-ai/mastra/commit/7b763e52fc3eaf699c2a99f2adf418dd46e4e9a5), [`d36cfbb`](https://github.com/mastra-ai/mastra/commit/d36cfbbb6565ba5f827883cc9bb648eb14befdc1), [`3697853`](https://github.com/mastra-ai/mastra/commit/3697853deeb72017d90e0f38a93c1e29221aeca0), [`a534e95`](https://github.com/mastra-ai/mastra/commit/a534e9591f83b3cc1ebff99c67edf4cda7bf81d3), [`9d0e7fe`](https://github.com/mastra-ai/mastra/commit/9d0e7feca8ed98de959f53476ee1456073673348), [`53d927c`](https://github.com/mastra-ai/mastra/commit/53d927cc6f03bff33655b7e2b788da445a08731d), [`22f64bc`](https://github.com/mastra-ai/mastra/commit/22f64bc1d37149480b58bf2fefe35b79a1e3e7d5), [`bda6370`](https://github.com/mastra-ai/mastra/commit/bda637009360649aaf579919e7873e33553c273e), [`c7f1f7d`](https://github.com/mastra-ai/mastra/commit/c7f1f7d24f61f247f018cc2d1f33bf63212959a7), [`735d8c1`](https://github.com/mastra-ai/mastra/commit/735d8c1c0d19fbc09e6f8b66cf41bc7655993838), [`c218bd3`](https://github.com/mastra-ai/mastra/commit/c218bd3759e32423735b04843a09404572631014), [`2c4438b`](https://github.com/mastra-ai/mastra/commit/2c4438b87817ab7eed818c7990fef010475af1a3), [`2b8893c`](https://github.com/mastra-ai/mastra/commit/2b8893cb108ef9acb72ee7835cd625610d2c1a4a), [`8e5c75b`](https://github.com/mastra-ai/mastra/commit/8e5c75bdb1d08a42d45309a4c72def4b6890230f), [`fa8409b`](https://github.com/mastra-ai/mastra/commit/fa8409bc39cfd8ba6643b9db5269b90b22e2a2f7), [`173c535`](https://github.com/mastra-ai/mastra/commit/173c535c0645b0da404fe09f003778f0b0d4e019)]:
|
|
50
|
+
- @mastra/core@0.0.0-fix-persist-session-cache-option-mcp-server-20251030161352
|
|
41
51
|
|
|
42
52
|
## 0.17.0
|
|
43
53
|
|
package/dist/index.cjs
CHANGED
|
@@ -20,7 +20,7 @@ function serve({
|
|
|
20
20
|
functions: userFunctions = [],
|
|
21
21
|
registerOptions
|
|
22
22
|
}) {
|
|
23
|
-
const wfs = mastra.
|
|
23
|
+
const wfs = mastra.listWorkflows();
|
|
24
24
|
const workflowFunctions = Array.from(
|
|
25
25
|
new Set(
|
|
26
26
|
Object.values(wfs).flatMap((wf) => {
|
|
@@ -110,9 +110,15 @@ var InngestRun = class extends workflows.Run {
|
|
|
110
110
|
});
|
|
111
111
|
}
|
|
112
112
|
}
|
|
113
|
-
async start({
|
|
113
|
+
async start(params) {
|
|
114
|
+
return this._start(params);
|
|
115
|
+
}
|
|
116
|
+
async _start({
|
|
114
117
|
inputData,
|
|
115
|
-
initialState
|
|
118
|
+
initialState,
|
|
119
|
+
outputOptions,
|
|
120
|
+
tracingOptions,
|
|
121
|
+
format
|
|
116
122
|
}) {
|
|
117
123
|
await this.#mastra.getStorage()?.persistWorkflowSnapshot({
|
|
118
124
|
workflowName: this.workflowId,
|
|
@@ -139,7 +145,10 @@ var InngestRun = class extends workflows.Run {
|
|
|
139
145
|
inputData: inputDataToUse,
|
|
140
146
|
initialState: initialStateToUse,
|
|
141
147
|
runId: this.runId,
|
|
142
|
-
resourceId: this.resourceId
|
|
148
|
+
resourceId: this.resourceId,
|
|
149
|
+
outputOptions,
|
|
150
|
+
tracingOptions,
|
|
151
|
+
format
|
|
143
152
|
}
|
|
144
153
|
});
|
|
145
154
|
const eventId = eventOutput.ids[0];
|
|
@@ -228,20 +237,35 @@ var InngestRun = class extends workflows.Run {
|
|
|
228
237
|
});
|
|
229
238
|
};
|
|
230
239
|
}
|
|
231
|
-
streamLegacy({ inputData,
|
|
240
|
+
streamLegacy({ inputData, requestContext } = {}) {
|
|
232
241
|
const { readable, writable } = new TransformStream();
|
|
233
242
|
const writer = writable.getWriter();
|
|
234
243
|
const unwatch = this.watch(async (event) => {
|
|
235
244
|
try {
|
|
245
|
+
await writer.write({
|
|
246
|
+
// @ts-ignore
|
|
247
|
+
type: "start",
|
|
248
|
+
// @ts-ignore
|
|
249
|
+
payload: { runId: this.runId }
|
|
250
|
+
});
|
|
236
251
|
const e = {
|
|
237
252
|
...event,
|
|
238
253
|
type: event.type.replace("workflow-", "")
|
|
239
254
|
};
|
|
255
|
+
if (e.type === "step-output") {
|
|
256
|
+
e.type = e.payload.output.type;
|
|
257
|
+
e.payload = e.payload.output.payload;
|
|
258
|
+
}
|
|
240
259
|
await writer.write(e);
|
|
241
260
|
} catch {
|
|
242
261
|
}
|
|
243
262
|
}, "watch-v2");
|
|
244
263
|
this.closeStreamAction = async () => {
|
|
264
|
+
await writer.write({
|
|
265
|
+
type: "finish",
|
|
266
|
+
// @ts-ignore
|
|
267
|
+
payload: { runId: this.runId }
|
|
268
|
+
});
|
|
245
269
|
unwatch();
|
|
246
270
|
try {
|
|
247
271
|
await writer.close();
|
|
@@ -251,7 +275,7 @@ var InngestRun = class extends workflows.Run {
|
|
|
251
275
|
writer.releaseLock();
|
|
252
276
|
}
|
|
253
277
|
};
|
|
254
|
-
this.executionResults = this.
|
|
278
|
+
this.executionResults = this._start({ inputData, requestContext, format: "legacy" }).then((result) => {
|
|
255
279
|
if (result.status !== "suspended") {
|
|
256
280
|
this.closeStreamAction?.().catch(() => {
|
|
257
281
|
});
|
|
@@ -265,11 +289,18 @@ var InngestRun = class extends workflows.Run {
|
|
|
265
289
|
}
|
|
266
290
|
stream({
|
|
267
291
|
inputData,
|
|
268
|
-
|
|
269
|
-
|
|
292
|
+
requestContext,
|
|
293
|
+
tracingOptions,
|
|
294
|
+
closeOnSuspend = true,
|
|
295
|
+
initialState,
|
|
296
|
+
outputOptions
|
|
270
297
|
} = {}) {
|
|
298
|
+
if (this.closeStreamAction && this.streamOutput) {
|
|
299
|
+
return this.streamOutput;
|
|
300
|
+
}
|
|
301
|
+
this.closeStreamAction = async () => {
|
|
302
|
+
};
|
|
271
303
|
const self = this;
|
|
272
|
-
let streamOutput;
|
|
273
304
|
const stream$1 = new web.ReadableStream({
|
|
274
305
|
async start(controller) {
|
|
275
306
|
const unwatch = self.watch(async ({ type, from = stream.ChunkFrom.WORKFLOW, payload }) => {
|
|
@@ -278,7 +309,7 @@ var InngestRun = class extends workflows.Run {
|
|
|
278
309
|
runId: self.runId,
|
|
279
310
|
from,
|
|
280
311
|
payload: {
|
|
281
|
-
stepName: payload
|
|
312
|
+
stepName: payload?.id,
|
|
282
313
|
...payload
|
|
283
314
|
}
|
|
284
315
|
});
|
|
@@ -291,29 +322,46 @@ var InngestRun = class extends workflows.Run {
|
|
|
291
322
|
console.error("Error closing stream:", err);
|
|
292
323
|
}
|
|
293
324
|
};
|
|
294
|
-
const executionResultsPromise = self.
|
|
325
|
+
const executionResultsPromise = self._start({
|
|
295
326
|
inputData,
|
|
296
|
-
|
|
327
|
+
requestContext,
|
|
328
|
+
// tracingContext, // We are not able to pass a reference to a span here, what to do?
|
|
329
|
+
initialState,
|
|
330
|
+
tracingOptions,
|
|
331
|
+
outputOptions,
|
|
332
|
+
format: "vnext"
|
|
297
333
|
});
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
334
|
+
let executionResults;
|
|
335
|
+
try {
|
|
336
|
+
executionResults = await executionResultsPromise;
|
|
337
|
+
if (closeOnSuspend) {
|
|
338
|
+
self.closeStreamAction?.().catch(() => {
|
|
339
|
+
});
|
|
340
|
+
} else if (executionResults.status !== "suspended") {
|
|
341
|
+
self.closeStreamAction?.().catch(() => {
|
|
342
|
+
});
|
|
343
|
+
}
|
|
344
|
+
if (self.streamOutput) {
|
|
345
|
+
self.streamOutput.updateResults(
|
|
346
|
+
executionResults
|
|
347
|
+
);
|
|
348
|
+
}
|
|
349
|
+
} catch (err) {
|
|
350
|
+
self.streamOutput?.rejectResults(err);
|
|
303
351
|
self.closeStreamAction?.().catch(() => {
|
|
304
352
|
});
|
|
305
353
|
}
|
|
306
|
-
if (streamOutput) {
|
|
307
|
-
streamOutput.updateResults(executionResults);
|
|
308
|
-
}
|
|
309
354
|
}
|
|
310
355
|
});
|
|
311
|
-
streamOutput = new stream.WorkflowRunOutput({
|
|
356
|
+
this.streamOutput = new stream.WorkflowRunOutput({
|
|
312
357
|
runId: this.runId,
|
|
313
358
|
workflowId: this.workflowId,
|
|
314
359
|
stream: stream$1
|
|
315
360
|
});
|
|
316
|
-
return streamOutput;
|
|
361
|
+
return this.streamOutput;
|
|
362
|
+
}
|
|
363
|
+
streamVNext(args = {}) {
|
|
364
|
+
return this.stream(args);
|
|
317
365
|
}
|
|
318
366
|
};
|
|
319
367
|
var InngestWorkflow = class _InngestWorkflow extends workflows.Workflow {
|
|
@@ -437,7 +485,7 @@ var InngestWorkflow = class _InngestWorkflow extends workflows.Workflow {
|
|
|
437
485
|
},
|
|
438
486
|
{ event: `workflow.${this.id}` },
|
|
439
487
|
async ({ event, step, attempt, publish }) => {
|
|
440
|
-
let { inputData, initialState, runId, resourceId, resume, outputOptions } = event.data;
|
|
488
|
+
let { inputData, initialState, runId, resourceId, resume, outputOptions, format } = event.data;
|
|
441
489
|
if (!runId) {
|
|
442
490
|
runId = await step.run(`workflow.${this.id}.runIdGen`, async () => {
|
|
443
491
|
return crypto.randomUUID();
|
|
@@ -476,13 +524,19 @@ var InngestWorkflow = class _InngestWorkflow extends workflows.Workflow {
|
|
|
476
524
|
initialState,
|
|
477
525
|
emitter,
|
|
478
526
|
retryConfig: this.retryConfig,
|
|
479
|
-
|
|
527
|
+
requestContext: new di.RequestContext(),
|
|
480
528
|
// TODO
|
|
481
529
|
resume,
|
|
530
|
+
format,
|
|
482
531
|
abortController: new AbortController(),
|
|
483
|
-
currentSpan:
|
|
484
|
-
|
|
485
|
-
|
|
532
|
+
// currentSpan: undefined, // TODO: Pass actual parent AI span from workflow execution context
|
|
533
|
+
outputOptions,
|
|
534
|
+
writableStream: new WritableStream({
|
|
535
|
+
write(chunk) {
|
|
536
|
+
void emitter.emit("watch-v2", chunk).catch(() => {
|
|
537
|
+
});
|
|
538
|
+
}
|
|
539
|
+
})
|
|
486
540
|
});
|
|
487
541
|
await step.run(`workflow.${this.id}.finalize`, async () => {
|
|
488
542
|
if (result.status === "failed") {
|
|
@@ -520,7 +574,7 @@ function isAgent(params) {
|
|
|
520
574
|
function isTool(params) {
|
|
521
575
|
return params instanceof tools.Tool;
|
|
522
576
|
}
|
|
523
|
-
function createStep(params) {
|
|
577
|
+
function createStep(params, agentOptions) {
|
|
524
578
|
if (isAgent(params)) {
|
|
525
579
|
return {
|
|
526
580
|
id: params.name,
|
|
@@ -528,12 +582,23 @@ function createStep(params) {
|
|
|
528
582
|
// @ts-ignore
|
|
529
583
|
inputSchema: zod.z.object({
|
|
530
584
|
prompt: zod.z.string()
|
|
585
|
+
// resourceId: z.string().optional(),
|
|
586
|
+
// threadId: z.string().optional(),
|
|
531
587
|
}),
|
|
532
588
|
// @ts-ignore
|
|
533
589
|
outputSchema: zod.z.object({
|
|
534
590
|
text: zod.z.string()
|
|
535
591
|
}),
|
|
536
|
-
execute: async ({
|
|
592
|
+
execute: async ({
|
|
593
|
+
inputData,
|
|
594
|
+
[_constants.EMITTER_SYMBOL]: emitter,
|
|
595
|
+
[_constants.STREAM_FORMAT_SYMBOL]: streamFormat,
|
|
596
|
+
requestContext,
|
|
597
|
+
tracingContext,
|
|
598
|
+
abortSignal,
|
|
599
|
+
abort,
|
|
600
|
+
writer
|
|
601
|
+
}) => {
|
|
537
602
|
let streamPromise = {};
|
|
538
603
|
streamPromise.promise = new Promise((resolve, reject) => {
|
|
539
604
|
streamPromise.resolve = resolve;
|
|
@@ -543,48 +608,40 @@ function createStep(params) {
|
|
|
543
608
|
name: params.name,
|
|
544
609
|
args: inputData
|
|
545
610
|
};
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
611
|
+
let stream;
|
|
612
|
+
if ((await params.getModel()).specificationVersion === "v1") {
|
|
613
|
+
const { fullStream } = await params.streamLegacy(inputData.prompt, {
|
|
614
|
+
...agentOptions ?? {},
|
|
615
|
+
// resourceId: inputData.resourceId,
|
|
616
|
+
// threadId: inputData.threadId,
|
|
617
|
+
requestContext,
|
|
549
618
|
tracingContext,
|
|
550
619
|
onFinish: (result) => {
|
|
551
620
|
streamPromise.resolve(result.text);
|
|
621
|
+
void agentOptions?.onFinish?.(result);
|
|
552
622
|
},
|
|
553
623
|
abortSignal
|
|
554
624
|
});
|
|
555
|
-
|
|
556
|
-
return abort();
|
|
557
|
-
}
|
|
558
|
-
await emitter.emit("watch-v2", {
|
|
559
|
-
type: "tool-call-streaming-start",
|
|
560
|
-
...toolData ?? {}
|
|
561
|
-
});
|
|
562
|
-
for await (const chunk of fullStream) {
|
|
563
|
-
if (chunk.type === "text-delta") {
|
|
564
|
-
await emitter.emit("watch-v2", {
|
|
565
|
-
type: "tool-call-delta",
|
|
566
|
-
...toolData ?? {},
|
|
567
|
-
argsTextDelta: chunk.payload.text
|
|
568
|
-
});
|
|
569
|
-
}
|
|
570
|
-
}
|
|
625
|
+
stream = fullStream;
|
|
571
626
|
} else {
|
|
572
|
-
const
|
|
573
|
-
|
|
627
|
+
const modelOutput = await params.stream(inputData.prompt, {
|
|
628
|
+
...agentOptions ?? {},
|
|
629
|
+
requestContext,
|
|
574
630
|
tracingContext,
|
|
575
631
|
onFinish: (result) => {
|
|
576
632
|
streamPromise.resolve(result.text);
|
|
633
|
+
void agentOptions?.onFinish?.(result);
|
|
577
634
|
},
|
|
578
635
|
abortSignal
|
|
579
636
|
});
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
637
|
+
stream = modelOutput.fullStream;
|
|
638
|
+
}
|
|
639
|
+
if (streamFormat === "legacy") {
|
|
583
640
|
await emitter.emit("watch-v2", {
|
|
584
641
|
type: "tool-call-streaming-start",
|
|
585
642
|
...toolData ?? {}
|
|
586
643
|
});
|
|
587
|
-
for await (const chunk of
|
|
644
|
+
for await (const chunk of stream) {
|
|
588
645
|
if (chunk.type === "text-delta") {
|
|
589
646
|
await emitter.emit("watch-v2", {
|
|
590
647
|
type: "tool-call-delta",
|
|
@@ -593,11 +650,18 @@ function createStep(params) {
|
|
|
593
650
|
});
|
|
594
651
|
}
|
|
595
652
|
}
|
|
653
|
+
await emitter.emit("watch-v2", {
|
|
654
|
+
type: "tool-call-streaming-finish",
|
|
655
|
+
...toolData ?? {}
|
|
656
|
+
});
|
|
657
|
+
} else {
|
|
658
|
+
for await (const chunk of stream) {
|
|
659
|
+
await writer.write(chunk);
|
|
660
|
+
}
|
|
661
|
+
}
|
|
662
|
+
if (abortSignal.aborted) {
|
|
663
|
+
return abort();
|
|
596
664
|
}
|
|
597
|
-
await emitter.emit("watch-v2", {
|
|
598
|
-
type: "tool-call-streaming-finish",
|
|
599
|
-
...toolData ?? {}
|
|
600
|
-
});
|
|
601
665
|
return {
|
|
602
666
|
text: await streamPromise.promise
|
|
603
667
|
};
|
|
@@ -616,11 +680,11 @@ function createStep(params) {
|
|
|
616
680
|
description: params.description,
|
|
617
681
|
inputSchema: params.inputSchema,
|
|
618
682
|
outputSchema: params.outputSchema,
|
|
619
|
-
execute: async ({ inputData, mastra,
|
|
683
|
+
execute: async ({ inputData, mastra, requestContext, tracingContext, suspend, resumeData }) => {
|
|
620
684
|
return params.execute({
|
|
621
685
|
context: inputData,
|
|
622
686
|
mastra: aiTracing.wrapMastra(mastra, tracingContext),
|
|
623
|
-
|
|
687
|
+
requestContext,
|
|
624
688
|
tracingContext,
|
|
625
689
|
suspend,
|
|
626
690
|
resumeData
|
|
@@ -683,18 +747,6 @@ var InngestExecutionEngine = class extends workflows.DefaultExecutionEngine {
|
|
|
683
747
|
this.inngestStep = inngestStep;
|
|
684
748
|
this.inngestAttempts = inngestAttempts;
|
|
685
749
|
}
|
|
686
|
-
async execute(params) {
|
|
687
|
-
await params.emitter.emit("watch-v2", {
|
|
688
|
-
type: "workflow-start",
|
|
689
|
-
payload: { runId: params.runId }
|
|
690
|
-
});
|
|
691
|
-
const result = await super.execute(params);
|
|
692
|
-
await params.emitter.emit("watch-v2", {
|
|
693
|
-
type: "workflow-finish",
|
|
694
|
-
payload: { runId: params.runId }
|
|
695
|
-
});
|
|
696
|
-
return result;
|
|
697
|
-
}
|
|
698
750
|
async fmtReturnValue(emitter, stepResults, lastOutput, error) {
|
|
699
751
|
const base = {
|
|
700
752
|
status: lastOutput.status,
|
|
@@ -762,7 +814,7 @@ var InngestExecutionEngine = class extends workflows.DefaultExecutionEngine {
|
|
|
762
814
|
stepResults,
|
|
763
815
|
emitter,
|
|
764
816
|
abortController,
|
|
765
|
-
|
|
817
|
+
requestContext,
|
|
766
818
|
executionContext,
|
|
767
819
|
writableStream,
|
|
768
820
|
tracingContext
|
|
@@ -786,7 +838,7 @@ var InngestExecutionEngine = class extends workflows.DefaultExecutionEngine {
|
|
|
786
838
|
runId,
|
|
787
839
|
workflowId,
|
|
788
840
|
mastra: this.mastra,
|
|
789
|
-
|
|
841
|
+
requestContext,
|
|
790
842
|
inputData: prevOutput,
|
|
791
843
|
state: executionContext.state,
|
|
792
844
|
setState: (state) => {
|
|
@@ -808,7 +860,6 @@ var InngestExecutionEngine = class extends workflows.DefaultExecutionEngine {
|
|
|
808
860
|
abortController?.abort();
|
|
809
861
|
},
|
|
810
862
|
[_constants.EMITTER_SYMBOL]: emitter,
|
|
811
|
-
// TODO: add streamVNext support
|
|
812
863
|
[_constants.STREAM_FORMAT_SYMBOL]: executionContext.format,
|
|
813
864
|
engine: { step: this.inngestStep },
|
|
814
865
|
abortSignal: abortController?.signal,
|
|
@@ -852,7 +903,7 @@ var InngestExecutionEngine = class extends workflows.DefaultExecutionEngine {
|
|
|
852
903
|
stepResults,
|
|
853
904
|
emitter,
|
|
854
905
|
abortController,
|
|
855
|
-
|
|
906
|
+
requestContext,
|
|
856
907
|
executionContext,
|
|
857
908
|
writableStream,
|
|
858
909
|
tracingContext
|
|
@@ -877,7 +928,7 @@ var InngestExecutionEngine = class extends workflows.DefaultExecutionEngine {
|
|
|
877
928
|
runId,
|
|
878
929
|
workflowId,
|
|
879
930
|
mastra: this.mastra,
|
|
880
|
-
|
|
931
|
+
requestContext,
|
|
881
932
|
inputData: prevOutput,
|
|
882
933
|
state: executionContext.state,
|
|
883
934
|
setState: (state) => {
|
|
@@ -900,7 +951,6 @@ var InngestExecutionEngine = class extends workflows.DefaultExecutionEngine {
|
|
|
900
951
|
},
|
|
901
952
|
[_constants.EMITTER_SYMBOL]: emitter,
|
|
902
953
|
[_constants.STREAM_FORMAT_SYMBOL]: executionContext.format,
|
|
903
|
-
// TODO: add streamVNext support
|
|
904
954
|
engine: { step: this.inngestStep },
|
|
905
955
|
abortSignal: abortController?.signal,
|
|
906
956
|
writer: new tools.ToolStream(
|
|
@@ -961,7 +1011,7 @@ var InngestExecutionEngine = class extends workflows.DefaultExecutionEngine {
|
|
|
961
1011
|
prevOutput,
|
|
962
1012
|
emitter,
|
|
963
1013
|
abortController,
|
|
964
|
-
|
|
1014
|
+
requestContext,
|
|
965
1015
|
tracingContext,
|
|
966
1016
|
writableStream,
|
|
967
1017
|
disableScorers
|
|
@@ -1225,6 +1275,7 @@ var InngestExecutionEngine = class extends workflows.DefaultExecutionEngine {
|
|
|
1225
1275
|
resumePayload: resume?.steps[0] === step.id ? resume?.resumePayload : void 0
|
|
1226
1276
|
};
|
|
1227
1277
|
}
|
|
1278
|
+
const stepCallId = crypto.randomUUID();
|
|
1228
1279
|
let stepRes;
|
|
1229
1280
|
try {
|
|
1230
1281
|
stepRes = await this.inngestStep.run(`workflow.${executionContext.workflowId}.step.${step.id}`, async () => {
|
|
@@ -1238,8 +1289,16 @@ var InngestExecutionEngine = class extends workflows.DefaultExecutionEngine {
|
|
|
1238
1289
|
const result = await step.execute({
|
|
1239
1290
|
runId: executionContext.runId,
|
|
1240
1291
|
mastra: this.mastra,
|
|
1241
|
-
|
|
1242
|
-
|
|
1292
|
+
requestContext,
|
|
1293
|
+
writer: new tools.ToolStream(
|
|
1294
|
+
{
|
|
1295
|
+
prefix: "workflow-step",
|
|
1296
|
+
callId: stepCallId,
|
|
1297
|
+
name: step.id,
|
|
1298
|
+
runId: executionContext.runId
|
|
1299
|
+
},
|
|
1300
|
+
writableStream
|
|
1301
|
+
),
|
|
1243
1302
|
state: executionContext?.state ?? {},
|
|
1244
1303
|
setState: (state) => {
|
|
1245
1304
|
executionContext.state = state;
|
|
@@ -1274,6 +1333,7 @@ var InngestExecutionEngine = class extends workflows.DefaultExecutionEngine {
|
|
|
1274
1333
|
runId: stepResults[step.id]?.suspendPayload?.__workflow_meta?.runId
|
|
1275
1334
|
},
|
|
1276
1335
|
[_constants.EMITTER_SYMBOL]: emitter,
|
|
1336
|
+
[_constants.STREAM_FORMAT_SYMBOL]: executionContext.format,
|
|
1277
1337
|
engine: {
|
|
1278
1338
|
step: this.inngestStep
|
|
1279
1339
|
},
|
|
@@ -1395,7 +1455,7 @@ var InngestExecutionEngine = class extends workflows.DefaultExecutionEngine {
|
|
|
1395
1455
|
output: stepRes.result,
|
|
1396
1456
|
workflowId: executionContext.workflowId,
|
|
1397
1457
|
stepId: step.id,
|
|
1398
|
-
|
|
1458
|
+
requestContext,
|
|
1399
1459
|
disableScorers,
|
|
1400
1460
|
tracingContext: { currentSpan: stepAISpan }
|
|
1401
1461
|
});
|
|
@@ -1453,14 +1513,12 @@ var InngestExecutionEngine = class extends workflows.DefaultExecutionEngine {
|
|
|
1453
1513
|
runId,
|
|
1454
1514
|
entry,
|
|
1455
1515
|
prevOutput,
|
|
1456
|
-
prevStep,
|
|
1457
1516
|
stepResults,
|
|
1458
|
-
serializedStepGraph,
|
|
1459
1517
|
resume,
|
|
1460
1518
|
executionContext,
|
|
1461
1519
|
emitter,
|
|
1462
1520
|
abortController,
|
|
1463
|
-
|
|
1521
|
+
requestContext,
|
|
1464
1522
|
writableStream,
|
|
1465
1523
|
disableScorers,
|
|
1466
1524
|
tracingContext
|
|
@@ -1494,7 +1552,7 @@ var InngestExecutionEngine = class extends workflows.DefaultExecutionEngine {
|
|
|
1494
1552
|
runId,
|
|
1495
1553
|
workflowId,
|
|
1496
1554
|
mastra: this.mastra,
|
|
1497
|
-
|
|
1555
|
+
requestContext,
|
|
1498
1556
|
runCount: -1,
|
|
1499
1557
|
retryCount: -1,
|
|
1500
1558
|
inputData: prevOutput,
|
|
@@ -1517,7 +1575,6 @@ var InngestExecutionEngine = class extends workflows.DefaultExecutionEngine {
|
|
|
1517
1575
|
},
|
|
1518
1576
|
[_constants.EMITTER_SYMBOL]: emitter,
|
|
1519
1577
|
[_constants.STREAM_FORMAT_SYMBOL]: executionContext.format,
|
|
1520
|
-
// TODO: add streamVNext support
|
|
1521
1578
|
engine: {
|
|
1522
1579
|
step: this.inngestStep
|
|
1523
1580
|
},
|
|
@@ -1566,13 +1623,14 @@ var InngestExecutionEngine = class extends workflows.DefaultExecutionEngine {
|
|
|
1566
1623
|
}
|
|
1567
1624
|
});
|
|
1568
1625
|
const results = await Promise.all(
|
|
1569
|
-
stepsToRun.map(
|
|
1570
|
-
|
|
1571
|
-
|
|
1572
|
-
|
|
1573
|
-
|
|
1574
|
-
|
|
1575
|
-
|
|
1626
|
+
stepsToRun.map(async (step, index) => {
|
|
1627
|
+
const currStepResult = stepResults[step.step.id];
|
|
1628
|
+
if (currStepResult && currStepResult.status === "success") {
|
|
1629
|
+
return currStepResult;
|
|
1630
|
+
}
|
|
1631
|
+
const result = await this.executeStep({
|
|
1632
|
+
step: step.step,
|
|
1633
|
+
prevOutput,
|
|
1576
1634
|
stepResults,
|
|
1577
1635
|
resume,
|
|
1578
1636
|
executionContext: {
|
|
@@ -1586,26 +1644,28 @@ var InngestExecutionEngine = class extends workflows.DefaultExecutionEngine {
|
|
|
1586
1644
|
},
|
|
1587
1645
|
emitter,
|
|
1588
1646
|
abortController,
|
|
1589
|
-
|
|
1647
|
+
requestContext,
|
|
1590
1648
|
writableStream,
|
|
1591
1649
|
disableScorers,
|
|
1592
1650
|
tracingContext: {
|
|
1593
1651
|
currentSpan: conditionalSpan
|
|
1594
1652
|
}
|
|
1595
|
-
})
|
|
1596
|
-
|
|
1653
|
+
});
|
|
1654
|
+
stepResults[step.step.id] = result;
|
|
1655
|
+
return result;
|
|
1656
|
+
})
|
|
1597
1657
|
);
|
|
1598
|
-
const hasFailed = results.find((result) => result.
|
|
1599
|
-
const hasSuspended = results.find((result) => result.
|
|
1658
|
+
const hasFailed = results.find((result) => result.status === "failed");
|
|
1659
|
+
const hasSuspended = results.find((result) => result.status === "suspended");
|
|
1600
1660
|
if (hasFailed) {
|
|
1601
|
-
execResults = { status: "failed", error: hasFailed.
|
|
1661
|
+
execResults = { status: "failed", error: hasFailed.error };
|
|
1602
1662
|
} else if (hasSuspended) {
|
|
1603
|
-
execResults = { status: "suspended", suspendPayload: hasSuspended.
|
|
1663
|
+
execResults = { status: "suspended", suspendPayload: hasSuspended.suspendPayload };
|
|
1604
1664
|
} else {
|
|
1605
1665
|
execResults = {
|
|
1606
1666
|
status: "success",
|
|
1607
1667
|
output: results.reduce((acc, result, index) => {
|
|
1608
|
-
if (result.
|
|
1668
|
+
if (result.status === "success") {
|
|
1609
1669
|
acc[stepsToRun[index].step.id] = result.output;
|
|
1610
1670
|
}
|
|
1611
1671
|
return acc;
|