@mastra/ai-sdk 0.0.0-extract-tool-ui-inp-playground-ui-20251024041825 → 0.0.0-feat-add-query-option-to-playground-20251209160219

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -3,6 +3,9 @@
3
3
  var server = require('@mastra/core/server');
4
4
  var ai = require('ai');
5
5
  var stream = require('@mastra/core/stream');
6
+ var agent = require('@mastra/core/agent');
7
+ var di = require('@mastra/core/di');
8
+ var processors = require('@mastra/core/processors');
6
9
 
7
10
  // src/chat-route.ts
8
11
 
@@ -10,6 +13,55 @@ var stream = require('@mastra/core/stream');
10
13
  var isDataChunkType = (chunk) => {
11
14
  return chunk && typeof chunk === "object" && "type" in chunk && chunk.type?.startsWith("data-");
12
15
  };
16
+ var isMastraTextStreamChunk = (chunk) => {
17
+ return chunk && typeof chunk === "object" && "type" in chunk && typeof chunk.type === "string" && [
18
+ "text-start",
19
+ "text-delta",
20
+ "text-end",
21
+ "reasoning-start",
22
+ "reasoning-delta",
23
+ "reasoning-end",
24
+ "file",
25
+ "source",
26
+ "tool-input-start",
27
+ "tool-input-delta",
28
+ "tool-call-approval",
29
+ "tool-call-suspended",
30
+ "tool-call",
31
+ "tool-result",
32
+ "tool-error",
33
+ "error",
34
+ "start-step",
35
+ "finish-step",
36
+ "start",
37
+ "finish",
38
+ "abort",
39
+ "tool-input-end",
40
+ "object",
41
+ "tripwire",
42
+ "raw"
43
+ ].includes(chunk.type);
44
+ };
45
+ function safeParseErrorObject(obj) {
46
+ if (typeof obj !== "object" || obj === null) {
47
+ return String(obj);
48
+ }
49
+ try {
50
+ const stringified = JSON.stringify(obj);
51
+ if (stringified === "{}") {
52
+ return String(obj);
53
+ }
54
+ return stringified;
55
+ } catch {
56
+ return String(obj);
57
+ }
58
+ }
59
+ var isAgentExecutionDataChunkType = (chunk) => {
60
+ return chunk && typeof chunk === "object" && "type" in chunk && chunk.type?.startsWith("agent-execution-event-") && "payload" in chunk && typeof chunk.payload === "object" && "type" in chunk.payload && chunk.payload.type?.startsWith("data-");
61
+ };
62
+ var isWorkflowExecutionDataChunkType = (chunk) => {
63
+ return chunk && typeof chunk === "object" && "type" in chunk && chunk.type?.startsWith("workflow-execution-event-") && "payload" in chunk && typeof chunk.payload === "object" && "type" in chunk.payload && chunk.payload.type?.startsWith("data-");
64
+ };
13
65
 
14
66
  // src/helpers.ts
15
67
  function convertMastraChunkToAISDKv5({
@@ -120,6 +172,28 @@ function convertMastraChunkToAISDKv5({
120
172
  toolName: chunk.payload.toolName,
121
173
  input: chunk.payload.args
122
174
  };
175
+ case "tool-call-approval":
176
+ return {
177
+ type: "data-tool-call-approval",
178
+ id: chunk.payload.toolCallId,
179
+ data: {
180
+ runId: chunk.runId,
181
+ toolCallId: chunk.payload.toolCallId,
182
+ toolName: chunk.payload.toolName,
183
+ args: chunk.payload.args
184
+ }
185
+ };
186
+ case "tool-call-suspended":
187
+ return {
188
+ type: "data-tool-call-suspended",
189
+ id: chunk.payload.toolCallId,
190
+ data: {
191
+ runId: chunk.runId,
192
+ toolCallId: chunk.payload.toolCallId,
193
+ toolName: chunk.payload.toolName,
194
+ suspendPayload: chunk.payload.suspendPayload
195
+ }
196
+ };
123
197
  case "tool-call-input-streaming-start":
124
198
  return {
125
199
  type: "tool-input-start",
@@ -210,6 +284,13 @@ function convertMastraChunkToAISDKv5({
210
284
  type: "object",
211
285
  object: chunk.object
212
286
  };
287
+ case "tripwire":
288
+ return {
289
+ type: "data-tripwire",
290
+ data: {
291
+ tripwireReason: chunk.payload.tripwireReason
292
+ }
293
+ };
213
294
  default:
214
295
  if (chunk.type && "payload" in chunk && chunk.payload) {
215
296
  return {
@@ -265,6 +346,14 @@ function convertFullStreamChunkToUIMessageStream({
265
346
  };
266
347
  }
267
348
  case "reasoning-delta": {
349
+ if (sendReasoning) {
350
+ return {
351
+ type: "reasoning-delta",
352
+ id: part.id,
353
+ delta: part.text,
354
+ ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
355
+ };
356
+ }
268
357
  return;
269
358
  }
270
359
  case "reasoning-end": {
@@ -282,6 +371,25 @@ function convertFullStreamChunkToUIMessageStream({
282
371
  };
283
372
  }
284
373
  case "source": {
374
+ if (sendSources && part.sourceType === "url") {
375
+ return {
376
+ type: "source-url",
377
+ sourceId: part.id,
378
+ url: part.url,
379
+ title: part.title,
380
+ ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
381
+ };
382
+ }
383
+ if (sendSources && part.sourceType === "document") {
384
+ return {
385
+ type: "source-document",
386
+ sourceId: part.id,
387
+ mediaType: part.mediaType,
388
+ title: part.title,
389
+ filename: part.filename,
390
+ ...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
391
+ };
392
+ }
285
393
  return;
286
394
  }
287
395
  case "tool-input-start": {
@@ -339,6 +447,14 @@ function convertFullStreamChunkToUIMessageStream({
339
447
  toolCallId: part.toolCallId,
340
448
  payload: part.output
341
449
  };
450
+ } else if (isDataChunkType(part.output)) {
451
+ if (!("data" in part.output)) {
452
+ throw new Error(
453
+ `UI Messages require a data property when using data- prefixed chunks
454
+ ${JSON.stringify(part)}`
455
+ );
456
+ }
457
+ return part.output;
342
458
  }
343
459
  return;
344
460
  }
@@ -364,21 +480,23 @@ function convertFullStreamChunkToUIMessageStream({
364
480
  return { type: "finish-step" };
365
481
  }
366
482
  case "start": {
367
- {
483
+ if (sendStart) {
368
484
  return {
369
485
  type: "start",
370
486
  ...messageMetadataValue != null ? { messageMetadata: messageMetadataValue } : {},
371
487
  ...responseMessageId != null ? { messageId: responseMessageId } : {}
372
488
  };
373
489
  }
490
+ return;
374
491
  }
375
492
  case "finish": {
376
- {
493
+ if (sendFinish) {
377
494
  return {
378
495
  type: "finish",
379
496
  ...messageMetadataValue != null ? { messageMetadata: messageMetadataValue } : {}
380
497
  };
381
498
  }
499
+ return;
382
500
  }
383
501
  case "abort": {
384
502
  return part;
@@ -405,7 +523,10 @@ function convertFullStreamChunkToUIMessageStream({
405
523
  }
406
524
 
407
525
  // src/transformers.ts
408
- function WorkflowStreamToAISDKTransformer() {
526
+ var PRIMITIVE_CACHE_SYMBOL = Symbol("primitive-cache");
527
+ function WorkflowStreamToAISDKTransformer({
528
+ includeTextStreamParts
529
+ } = {}) {
409
530
  const bufferedWorkflows = /* @__PURE__ */ new Map();
410
531
  return new TransformStream({
411
532
  start(controller) {
@@ -419,7 +540,7 @@ function WorkflowStreamToAISDKTransformer() {
419
540
  });
420
541
  },
421
542
  transform(chunk, controller) {
422
- const transformed = transformWorkflow(chunk, bufferedWorkflows);
543
+ const transformed = transformWorkflow(chunk, bufferedWorkflows, false, includeTextStreamParts);
423
544
  if (transformed) controller.enqueue(transformed);
424
545
  }
425
546
  });
@@ -443,20 +564,37 @@ function AgentNetworkToAISDKTransformer() {
443
564
  }
444
565
  });
445
566
  }
446
- function AgentStreamToAISDKTransformer() {
567
+ function AgentStreamToAISDKTransformer({
568
+ lastMessageId,
569
+ sendStart,
570
+ sendFinish,
571
+ sendReasoning,
572
+ sendSources,
573
+ messageMetadata,
574
+ onError
575
+ }) {
447
576
  let bufferedSteps = /* @__PURE__ */ new Map();
577
+ let tripwireOccurred = false;
578
+ let finishEventSent = false;
448
579
  return new TransformStream({
449
580
  transform(chunk, controller) {
581
+ if (chunk.type === "tripwire") {
582
+ tripwireOccurred = true;
583
+ }
584
+ if (chunk.type === "finish") {
585
+ finishEventSent = true;
586
+ }
450
587
  const part = convertMastraChunkToAISDKv5({ chunk, mode: "stream" });
451
588
  const transformedChunk = convertFullStreamChunkToUIMessageStream({
452
589
  part,
453
- sendReasoning: false,
454
- sendSources: false,
455
- sendStart: true,
456
- sendFinish: true,
457
- responseMessageId: chunk.runId,
458
- onError() {
459
- return "Error";
590
+ sendReasoning,
591
+ sendSources,
592
+ messageMetadataValue: messageMetadata?.({ part }),
593
+ sendStart,
594
+ sendFinish,
595
+ responseMessageId: lastMessageId,
596
+ onError(error) {
597
+ return onError ? onError(error) : safeParseErrorObject(error);
460
598
  }
461
599
  });
462
600
  if (transformedChunk) {
@@ -476,6 +614,14 @@ function AgentStreamToAISDKTransformer() {
476
614
  controller.enqueue(transformedChunk);
477
615
  }
478
616
  }
617
+ },
618
+ flush(controller) {
619
+ if (tripwireOccurred && !finishEventSent && sendFinish) {
620
+ controller.enqueue({
621
+ type: "finish",
622
+ finishReason: "other"
623
+ });
624
+ }
479
625
  }
480
626
  });
481
627
  }
@@ -615,7 +761,7 @@ function transformAgent(payload, bufferedSteps) {
615
761
  }
616
762
  return null;
617
763
  }
618
- function transformWorkflow(payload, bufferedWorkflows, isNested) {
764
+ function transformWorkflow(payload, bufferedWorkflows, isNested, includeTextStreamParts) {
619
765
  switch (payload.type) {
620
766
  case "workflow-start":
621
767
  bufferedWorkflows.set(payload.runId, {
@@ -638,7 +784,9 @@ function transformWorkflow(payload, bufferedWorkflows, isNested) {
638
784
  name: payload.payload.id,
639
785
  status: payload.payload.status,
640
786
  input: payload.payload.payload ?? null,
641
- output: null
787
+ output: null,
788
+ suspendPayload: null,
789
+ resumePayload: null
642
790
  };
643
791
  bufferedWorkflows.set(payload.runId, current);
644
792
  return {
@@ -671,6 +819,27 @@ function transformWorkflow(payload, bufferedWorkflows, isNested) {
671
819
  }
672
820
  };
673
821
  }
822
+ case "workflow-step-suspended": {
823
+ const current = bufferedWorkflows.get(payload.runId);
824
+ if (!current) return null;
825
+ current.steps[payload.payload.id] = {
826
+ ...current.steps[payload.payload.id],
827
+ status: payload.payload.status,
828
+ suspendPayload: payload.payload.suspendPayload ?? null,
829
+ resumePayload: payload.payload.resumePayload ?? null,
830
+ output: null
831
+ };
832
+ return {
833
+ type: isNested ? "data-tool-workflow" : "data-workflow",
834
+ id: payload.runId,
835
+ data: {
836
+ name: current.name,
837
+ status: "suspended",
838
+ steps: current.steps,
839
+ output: null
840
+ }
841
+ };
842
+ }
674
843
  case "workflow-finish": {
675
844
  const current = bufferedWorkflows.get(payload.runId);
676
845
  if (!current) return null;
@@ -685,6 +854,29 @@ function transformWorkflow(payload, bufferedWorkflows, isNested) {
685
854
  }
686
855
  };
687
856
  }
857
+ case "workflow-step-output": {
858
+ const output = payload.payload.output;
859
+ if (includeTextStreamParts && output && isMastraTextStreamChunk(output)) {
860
+ const part = convertMastraChunkToAISDKv5({ chunk: output, mode: "stream" });
861
+ const transformedChunk = convertFullStreamChunkToUIMessageStream({
862
+ part,
863
+ onError(error) {
864
+ return safeParseErrorObject(error);
865
+ }
866
+ });
867
+ return transformedChunk;
868
+ }
869
+ if (output && isDataChunkType(output)) {
870
+ if (!("data" in output)) {
871
+ throw new Error(
872
+ `UI Messages require a data property when using data- prefixed chunks
873
+ ${JSON.stringify(output)}`
874
+ );
875
+ }
876
+ return output;
877
+ }
878
+ return null;
879
+ }
688
880
  default: {
689
881
  if (isDataChunkType(payload)) {
690
882
  if (!("data" in payload)) {
@@ -702,19 +894,39 @@ function transformWorkflow(payload, bufferedWorkflows, isNested) {
702
894
  function transformNetwork(payload, bufferedNetworks, isNested) {
703
895
  switch (payload.type) {
704
896
  case "routing-agent-start": {
705
- if (!bufferedNetworks.has(payload.payload.runId)) {
706
- bufferedNetworks.set(payload.payload.runId, {
707
- name: payload.payload.agentId,
708
- steps: []
897
+ if (!bufferedNetworks.has(payload.runId)) {
898
+ bufferedNetworks.set(payload.runId, {
899
+ name: payload.payload.networkId,
900
+ steps: [],
901
+ usage: null,
902
+ output: null
709
903
  });
710
904
  }
905
+ const current = bufferedNetworks.get(payload.runId);
906
+ current.steps.push({
907
+ id: payload.payload.runId,
908
+ name: payload.payload.agentId,
909
+ status: "running",
910
+ iteration: payload.payload.inputData.iteration,
911
+ input: {
912
+ task: payload.payload.inputData.task,
913
+ threadId: payload.payload.inputData.threadId,
914
+ threadResourceId: payload.payload.inputData.threadResourceId
915
+ },
916
+ output: "",
917
+ task: null,
918
+ suspendPayload: null,
919
+ resumePayload: null,
920
+ [PRIMITIVE_CACHE_SYMBOL]: /* @__PURE__ */ new Map()
921
+ });
711
922
  return {
712
923
  type: isNested ? "data-tool-network" : "data-network",
713
- id: payload.payload.runId,
924
+ id: payload.runId,
714
925
  data: {
715
- name: bufferedNetworks.get(payload.payload.runId).name,
926
+ name: bufferedNetworks.get(payload.runId).name,
716
927
  status: "running",
717
- steps: bufferedNetworks.get(payload.payload.runId).steps,
928
+ usage: null,
929
+ steps: bufferedNetworks.get(payload.runId).steps,
718
930
  output: null
719
931
  }
720
932
  };
@@ -737,150 +949,180 @@ function transformNetwork(payload, bufferedNetworks, isNested) {
737
949
  };
738
950
  }
739
951
  case "agent-execution-start": {
740
- const current = bufferedNetworks.get(payload.payload.runId) || { name: "", steps: [] };
952
+ const current = bufferedNetworks.get(payload.runId);
953
+ if (!current) return null;
741
954
  current.steps.push({
955
+ id: payload.payload.runId,
742
956
  name: payload.payload.agentId,
743
957
  status: "running",
744
- input: payload.payload.args || null,
745
- output: null
958
+ iteration: payload.payload.args?.iteration ?? 0,
959
+ input: { prompt: payload.payload.args?.prompt ?? "" },
960
+ output: null,
961
+ task: null,
962
+ suspendPayload: null,
963
+ resumePayload: null,
964
+ [PRIMITIVE_CACHE_SYMBOL]: /* @__PURE__ */ new Map()
746
965
  });
747
- bufferedNetworks.set(payload.payload.runId, current);
966
+ bufferedNetworks.set(payload.runId, current);
748
967
  return {
749
968
  type: isNested ? "data-tool-network" : "data-network",
750
- id: payload.payload.runId,
969
+ id: payload.runId,
751
970
  data: {
752
- name: current.name,
753
- status: "running",
754
- steps: current.steps,
755
- output: null
971
+ ...current,
972
+ status: "running"
756
973
  }
757
974
  };
758
975
  }
759
976
  case "workflow-execution-start": {
760
- const current = bufferedNetworks.get(payload.payload.runId) || { name: "", steps: [] };
977
+ const current = bufferedNetworks.get(payload.runId);
978
+ if (!current) return null;
761
979
  current.steps.push({
762
- name: payload.payload.name,
980
+ id: payload.payload.runId,
981
+ name: payload.payload.workflowId,
763
982
  status: "running",
764
- input: payload.payload.args || null,
765
- output: null
983
+ iteration: payload.payload.args?.iteration ?? 0,
984
+ input: { prompt: payload.payload.args?.prompt ?? "" },
985
+ output: null,
986
+ task: null,
987
+ suspendPayload: null,
988
+ resumePayload: null,
989
+ [PRIMITIVE_CACHE_SYMBOL]: /* @__PURE__ */ new Map()
766
990
  });
767
- bufferedNetworks.set(payload.payload.runId, current);
991
+ bufferedNetworks.set(payload.runId, current);
768
992
  return {
769
993
  type: isNested ? "data-tool-network" : "data-network",
770
- id: payload.payload.runId,
994
+ id: payload.runId,
771
995
  data: {
772
- name: current.name,
773
- status: "running",
774
- steps: current.steps,
775
- output: null
996
+ ...current,
997
+ status: "running"
776
998
  }
777
999
  };
778
1000
  }
779
1001
  case "tool-execution-start": {
780
- const current = bufferedNetworks.get(payload.payload.runId) || { name: "", steps: [] };
1002
+ const current = bufferedNetworks.get(payload.runId);
1003
+ if (!current) return null;
781
1004
  current.steps.push({
1005
+ id: payload.payload.args.toolCallId,
782
1006
  name: payload.payload.args?.toolName,
783
1007
  status: "running",
1008
+ iteration: payload.payload.args?.iteration ? Number(payload.payload.args.iteration) : 0,
1009
+ task: {
1010
+ id: payload.payload.args?.toolName
1011
+ },
784
1012
  input: payload.payload.args?.args || null,
785
- output: null
1013
+ output: null,
1014
+ suspendPayload: null,
1015
+ resumePayload: null,
1016
+ [PRIMITIVE_CACHE_SYMBOL]: /* @__PURE__ */ new Map()
786
1017
  });
787
- bufferedNetworks.set(payload.payload.runId, current);
1018
+ bufferedNetworks.set(payload.runId, current);
788
1019
  return {
789
1020
  type: isNested ? "data-tool-network" : "data-network",
790
- id: payload.payload.runId,
1021
+ id: payload.runId,
791
1022
  data: {
792
- name: current.name,
793
- status: "running",
794
- steps: current.steps,
795
- output: null
1023
+ ...current,
1024
+ status: "running"
796
1025
  }
797
1026
  };
798
1027
  }
799
1028
  case "agent-execution-end": {
800
1029
  const current = bufferedNetworks.get(payload.runId);
801
1030
  if (!current) return null;
802
- current.steps.push({
803
- name: payload.payload.agentId,
804
- status: "success",
805
- input: null,
806
- output: payload.payload.result
807
- });
1031
+ const stepId = payload.payload.runId;
1032
+ const step = current.steps.find((step2) => step2.id === stepId);
1033
+ if (!step) {
1034
+ return null;
1035
+ }
1036
+ step.status = "success";
1037
+ step.output = payload.payload.result;
808
1038
  return {
809
1039
  type: isNested ? "data-tool-network" : "data-network",
810
1040
  id: payload.runId,
811
1041
  data: {
812
- name: current.name,
1042
+ ...current,
1043
+ usage: payload.payload?.usage ?? current.usage,
813
1044
  status: "running",
814
- steps: current.steps,
815
- output: payload.payload.result ?? null
1045
+ output: payload.payload.result ?? current.output
816
1046
  }
817
1047
  };
818
1048
  }
819
1049
  case "tool-execution-end": {
820
1050
  const current = bufferedNetworks.get(payload.runId);
821
1051
  if (!current) return null;
822
- current.steps.push({
823
- name: payload.payload.toolName,
824
- status: "success",
825
- input: null,
826
- output: payload.payload.result
827
- });
1052
+ const stepId = payload.payload.toolCallId;
1053
+ const step = current.steps.find((step2) => step2.id === stepId);
1054
+ if (!step) {
1055
+ return null;
1056
+ }
1057
+ step.status = "success";
1058
+ step.output = payload.payload.result;
828
1059
  return {
829
1060
  type: isNested ? "data-tool-network" : "data-network",
830
1061
  id: payload.runId,
831
1062
  data: {
832
- name: current.name,
1063
+ ...current,
833
1064
  status: "running",
834
- steps: current.steps,
835
- output: payload.payload.result ?? null
1065
+ output: payload.payload.result ?? current.output
836
1066
  }
837
1067
  };
838
1068
  }
839
1069
  case "workflow-execution-end": {
840
1070
  const current = bufferedNetworks.get(payload.runId);
841
1071
  if (!current) return null;
842
- current.steps.push({
843
- name: payload.payload.name,
844
- status: "success",
845
- input: null,
846
- output: payload.payload.result
847
- });
1072
+ const stepId = payload.payload.runId;
1073
+ const step = current.steps.find((step2) => step2.id === stepId);
1074
+ if (!step) {
1075
+ return null;
1076
+ }
1077
+ step.status = "success";
1078
+ step.output = payload.payload.result;
848
1079
  return {
849
1080
  type: isNested ? "data-tool-network" : "data-network",
850
1081
  id: payload.runId,
851
1082
  data: {
852
- name: current.name,
1083
+ ...current,
1084
+ usage: payload.payload?.usage ?? current.usage,
853
1085
  status: "running",
854
- steps: current.steps,
855
- output: payload.payload.result ?? null
1086
+ output: payload.payload.result ?? current.output
856
1087
  }
857
1088
  };
858
1089
  }
859
1090
  case "routing-agent-end": {
860
- const current = bufferedNetworks.get(payload.payload.runId);
1091
+ const current = bufferedNetworks.get(payload.runId);
861
1092
  if (!current) return null;
1093
+ const stepId = payload.payload.runId;
1094
+ const step = current.steps.find((step2) => step2.id === stepId);
1095
+ if (!step) {
1096
+ return null;
1097
+ }
1098
+ step.status = "success";
1099
+ step.task = {
1100
+ id: payload.payload.primitiveId,
1101
+ type: payload.payload.primitiveType,
1102
+ name: payload.payload.task,
1103
+ reason: payload.payload.selectionReason
1104
+ };
1105
+ step.output = payload.payload.result;
862
1106
  return {
863
1107
  type: isNested ? "data-tool-network" : "data-network",
864
- id: payload.payload.runId,
1108
+ id: payload.runId,
865
1109
  data: {
866
- name: current.name,
867
- status: "finished",
868
- steps: current.steps,
869
- output: payload.payload?.result ?? null
1110
+ ...current,
1111
+ usage: payload.payload?.usage ?? current.usage,
1112
+ output: payload.payload?.result ?? current.output
870
1113
  }
871
1114
  };
872
1115
  }
873
1116
  case "network-execution-event-step-finish": {
874
- const current = bufferedNetworks.get(payload.payload.runId);
1117
+ const current = bufferedNetworks.get(payload.runId);
875
1118
  if (!current) return null;
876
1119
  return {
877
1120
  type: isNested ? "data-tool-network" : "data-network",
878
- id: payload.payload.runId,
1121
+ id: payload.runId,
879
1122
  data: {
880
- name: current.name,
1123
+ ...current,
881
1124
  status: "finished",
882
- steps: current.steps,
883
- output: payload.payload?.result ?? null
1125
+ output: payload.payload?.result ?? current.output
884
1126
  }
885
1127
  };
886
1128
  }
@@ -891,14 +1133,85 @@ function transformNetwork(payload, bufferedNetworks, isNested) {
891
1133
  type: isNested ? "data-tool-network" : "data-network",
892
1134
  id: payload.runId,
893
1135
  data: {
894
- name: current.name,
1136
+ ...current,
1137
+ usage: payload.payload?.usage ?? current.usage,
895
1138
  status: "finished",
896
- steps: current.steps,
897
- output: payload.payload?.result ?? null
1139
+ output: payload.payload?.result ?? current.output
898
1140
  }
899
1141
  };
900
1142
  }
901
1143
  default: {
1144
+ if (isAgentExecutionDataChunkType(payload)) {
1145
+ if (!("data" in payload.payload)) {
1146
+ throw new Error(
1147
+ `UI Messages require a data property when using data- prefixed chunks
1148
+ ${JSON.stringify(payload)}`
1149
+ );
1150
+ }
1151
+ const { type, data } = payload.payload;
1152
+ return { type, data };
1153
+ }
1154
+ if (isWorkflowExecutionDataChunkType(payload)) {
1155
+ if (!("data" in payload.payload)) {
1156
+ throw new Error(
1157
+ `UI Messages require a data property when using data- prefixed chunks
1158
+ ${JSON.stringify(payload)}`
1159
+ );
1160
+ }
1161
+ const { type, data } = payload.payload;
1162
+ return { type, data };
1163
+ }
1164
+ if (payload.type.startsWith("agent-execution-event-")) {
1165
+ const stepId = payload.payload.runId;
1166
+ const current = bufferedNetworks.get(payload.runId);
1167
+ if (!current) return null;
1168
+ const step = current.steps.find((step2) => step2.id === stepId);
1169
+ if (!step) {
1170
+ return null;
1171
+ }
1172
+ step[PRIMITIVE_CACHE_SYMBOL] = step[PRIMITIVE_CACHE_SYMBOL] || /* @__PURE__ */ new Map();
1173
+ const result = transformAgent(payload.payload, step[PRIMITIVE_CACHE_SYMBOL]);
1174
+ if (result) {
1175
+ const { request, response, ...data } = result.data;
1176
+ step.task = data;
1177
+ }
1178
+ bufferedNetworks.set(payload.runId, current);
1179
+ return {
1180
+ type: isNested ? "data-tool-network" : "data-network",
1181
+ id: payload.runId,
1182
+ data: {
1183
+ ...current,
1184
+ status: "running"
1185
+ }
1186
+ };
1187
+ }
1188
+ if (payload.type.startsWith("workflow-execution-event-")) {
1189
+ const stepId = payload.payload.runId;
1190
+ const current = bufferedNetworks.get(payload.runId);
1191
+ if (!current) return null;
1192
+ const step = current.steps.find((step2) => step2.id === stepId);
1193
+ if (!step) {
1194
+ return null;
1195
+ }
1196
+ step[PRIMITIVE_CACHE_SYMBOL] = step[PRIMITIVE_CACHE_SYMBOL] || /* @__PURE__ */ new Map();
1197
+ const result = transformWorkflow(payload.payload, step[PRIMITIVE_CACHE_SYMBOL]);
1198
+ if (result && "data" in result) {
1199
+ const data = result.data;
1200
+ step.task = data;
1201
+ if (data.name && step.task) {
1202
+ step.task.id = data.name;
1203
+ }
1204
+ }
1205
+ bufferedNetworks.set(payload.runId, current);
1206
+ return {
1207
+ type: isNested ? "data-tool-network" : "data-network",
1208
+ id: payload.runId,
1209
+ data: {
1210
+ ...current,
1211
+ status: "running"
1212
+ }
1213
+ };
1214
+ }
902
1215
  if (isDataChunkType(payload)) {
903
1216
  if (!("data" in payload)) {
904
1217
  throw new Error(
@@ -906,31 +1219,104 @@ function transformNetwork(payload, bufferedNetworks, isNested) {
906
1219
  ${JSON.stringify(payload)}`
907
1220
  );
908
1221
  }
909
- return payload;
1222
+ const { type, data } = payload;
1223
+ return { type, data };
910
1224
  }
911
1225
  return null;
912
1226
  }
913
1227
  }
914
1228
  }
915
1229
 
916
- // src/to-ai-sdk-format.ts
917
- function toAISdkFormat(stream, options = { from: "agent" }) {
1230
+ // src/convert-streams.ts
1231
+ function toAISdkV5Stream(stream, options = {
1232
+ from: "agent",
1233
+ sendStart: true,
1234
+ sendFinish: true
1235
+ }) {
918
1236
  const from = options?.from;
919
1237
  if (from === "workflow") {
920
- return stream.pipeThrough(WorkflowStreamToAISDKTransformer());
1238
+ const includeTextStreamParts = options?.includeTextStreamParts ?? true;
1239
+ return stream.pipeThrough(
1240
+ WorkflowStreamToAISDKTransformer({ includeTextStreamParts })
1241
+ );
921
1242
  }
922
1243
  if (from === "network") {
923
1244
  return stream.pipeThrough(AgentNetworkToAISDKTransformer());
924
1245
  }
925
1246
  const agentReadable = "fullStream" in stream ? stream.fullStream : stream;
926
- return agentReadable.pipeThrough(AgentStreamToAISDKTransformer());
1247
+ return agentReadable.pipeThrough(
1248
+ AgentStreamToAISDKTransformer({
1249
+ lastMessageId: options?.lastMessageId,
1250
+ sendStart: options?.sendStart,
1251
+ sendFinish: options?.sendFinish,
1252
+ sendReasoning: options?.sendReasoning,
1253
+ sendSources: options?.sendSources,
1254
+ messageMetadata: options?.messageMetadata,
1255
+ onError: options?.onError
1256
+ })
1257
+ );
927
1258
  }
928
1259
 
929
1260
  // src/chat-route.ts
1261
+ async function handleChatStream({
1262
+ mastra,
1263
+ agentId,
1264
+ params,
1265
+ defaultOptions,
1266
+ sendStart = true,
1267
+ sendFinish = true,
1268
+ sendReasoning = false,
1269
+ sendSources = false
1270
+ }) {
1271
+ const { messages, resumeData, runId, requestContext, ...rest } = params;
1272
+ if (resumeData && !runId) {
1273
+ throw new Error("runId is required when resumeData is provided");
1274
+ }
1275
+ const agentObj = mastra.getAgentById(agentId);
1276
+ if (!agentObj) {
1277
+ throw new Error(`Agent ${agentId} not found`);
1278
+ }
1279
+ if (!Array.isArray(messages)) {
1280
+ throw new Error("Messages must be an array of UIMessage objects");
1281
+ }
1282
+ const mergedOptions = {
1283
+ ...defaultOptions,
1284
+ ...rest,
1285
+ ...runId && { runId },
1286
+ requestContext: requestContext || defaultOptions?.requestContext
1287
+ };
1288
+ const result = resumeData ? await agentObj.resumeStream(resumeData, mergedOptions) : await agentObj.stream(messages, mergedOptions);
1289
+ let lastMessageId;
1290
+ if (messages.length) {
1291
+ const lastMessage = messages[messages.length - 1];
1292
+ if (lastMessage?.role === "assistant") {
1293
+ lastMessageId = lastMessage.id;
1294
+ }
1295
+ }
1296
+ return ai.createUIMessageStream({
1297
+ originalMessages: messages,
1298
+ execute: async ({ writer }) => {
1299
+ for await (const part of toAISdkV5Stream(result, {
1300
+ from: "agent",
1301
+ lastMessageId,
1302
+ sendStart,
1303
+ sendFinish,
1304
+ sendReasoning,
1305
+ sendSources
1306
+ })) {
1307
+ writer.write(part);
1308
+ }
1309
+ }
1310
+ });
1311
+ }
930
1312
  function chatRoute({
931
1313
  path = "/chat/:agentId",
932
1314
  agent,
933
- defaultOptions
1315
+ defaultOptions,
1316
+ sendStart = true,
1317
+ sendFinish = true,
1318
+ sendReasoning = false,
1319
+ sendSources = false
934
1320
  }) {
935
1321
  if (!agent && !path.includes("/:agentId")) {
936
1322
  throw new Error("Path must include :agentId to route to the correct agent or pass the agent explicitly");
@@ -959,6 +1345,14 @@ function chatRoute({
959
1345
  schema: {
960
1346
  type: "object",
961
1347
  properties: {
1348
+ resumeData: {
1349
+ type: "object",
1350
+ description: "Resume data for the agent"
1351
+ },
1352
+ runId: {
1353
+ type: "string",
1354
+ description: "The run ID required when resuming an agent execution"
1355
+ },
962
1356
  messages: {
963
1357
  type: "array",
964
1358
  description: "Array of messages in the conversation",
@@ -1029,9 +1423,9 @@ function chatRoute({
1029
1423
  }
1030
1424
  },
1031
1425
  handler: async (c) => {
1032
- const { messages, ...rest } = await c.req.json();
1426
+ const params = await c.req.json();
1033
1427
  const mastra = c.get("mastra");
1034
- const runtimeContext = c.get("runtimeContext");
1428
+ const contextRequestContext = c.get("requestContext");
1035
1429
  let agentToUse = agent;
1036
1430
  if (!agent) {
1037
1431
  const agentId = c.req.param("agentId");
@@ -1042,28 +1436,24 @@ function chatRoute({
1042
1436
  `Fixed agent ID was set together with an agentId path parameter. This can lead to unexpected behavior.`
1043
1437
  );
1044
1438
  }
1045
- if (runtimeContext && defaultOptions?.runtimeContext) {
1046
- mastra.getLogger()?.warn(`"runtimeContext" set in the route options will be overridden by the request's "runtimeContext".`);
1439
+ if (contextRequestContext && defaultOptions?.requestContext) {
1440
+ mastra.getLogger()?.warn(`"requestContext" set in the route options will be overridden by the request's "requestContext".`);
1047
1441
  }
1048
1442
  if (!agentToUse) {
1049
1443
  throw new Error("Agent ID is required");
1050
1444
  }
1051
- const agentObj = mastra.getAgent(agentToUse);
1052
- if (!agentObj) {
1053
- throw new Error(`Agent ${agentToUse} not found`);
1054
- }
1055
- const result = await agentObj.stream(messages, {
1056
- ...defaultOptions,
1057
- ...rest,
1058
- runtimeContext: runtimeContext || defaultOptions?.runtimeContext
1059
- });
1060
- const uiMessageStream = ai.createUIMessageStream({
1061
- originalMessages: messages,
1062
- execute: async ({ writer }) => {
1063
- for await (const part of toAISdkFormat(result, { from: "agent" })) {
1064
- writer.write(part);
1065
- }
1066
- }
1445
+ const uiMessageStream = await handleChatStream({
1446
+ mastra,
1447
+ agentId: agentToUse,
1448
+ params: {
1449
+ ...params,
1450
+ requestContext: contextRequestContext || params.requestContext
1451
+ },
1452
+ defaultOptions,
1453
+ sendStart,
1454
+ sendFinish,
1455
+ sendReasoning,
1456
+ sendSources
1067
1457
  });
1068
1458
  return ai.createUIMessageStreamResponse({
1069
1459
  stream: uiMessageStream
@@ -1071,9 +1461,31 @@ function chatRoute({
1071
1461
  }
1072
1462
  });
1073
1463
  }
1464
+ async function handleWorkflowStream({
1465
+ mastra,
1466
+ workflowId,
1467
+ params,
1468
+ includeTextStreamParts = true
1469
+ }) {
1470
+ const { runId, resourceId, inputData, resumeData, requestContext, ...rest } = params;
1471
+ const workflowObj = mastra.getWorkflowById(workflowId);
1472
+ if (!workflowObj) {
1473
+ throw new Error(`Workflow ${workflowId} not found`);
1474
+ }
1475
+ const run = await workflowObj.createRun({ runId, resourceId, ...rest });
1476
+ const stream = resumeData ? run.resumeStream({ resumeData, ...rest, requestContext }) : run.stream({ inputData, ...rest, requestContext });
1477
+ return ai.createUIMessageStream({
1478
+ execute: async ({ writer }) => {
1479
+ for await (const part of toAISdkV5Stream(stream, { from: "workflow", includeTextStreamParts })) {
1480
+ writer.write(part);
1481
+ }
1482
+ }
1483
+ });
1484
+ }
1074
1485
  function workflowRoute({
1075
1486
  path = "/api/workflows/:workflowId/stream",
1076
- workflow
1487
+ workflow,
1488
+ includeTextStreamParts = true
1077
1489
  }) {
1078
1490
  if (!workflow && !path.includes("/:workflowId")) {
1079
1491
  throw new Error("Path must include :workflowId to route to the correct workflow or pass the workflow explicitly");
@@ -1100,9 +1512,13 @@ function workflowRoute({
1100
1512
  schema: {
1101
1513
  type: "object",
1102
1514
  properties: {
1515
+ runId: { type: "string" },
1516
+ resourceId: { type: "string" },
1103
1517
  inputData: { type: "object", additionalProperties: true },
1104
- runtimeContext: { type: "object", additionalProperties: true },
1105
- tracingOptions: { type: "object", additionalProperties: true }
1518
+ resumeData: { type: "object", additionalProperties: true },
1519
+ requestContext: { type: "object", additionalProperties: true },
1520
+ tracingOptions: { type: "object", additionalProperties: true },
1521
+ step: { type: "string" }
1106
1522
  }
1107
1523
  }
1108
1524
  }
@@ -1120,8 +1536,9 @@ function workflowRoute({
1120
1536
  }
1121
1537
  },
1122
1538
  handler: async (c) => {
1123
- const { inputData, ...rest } = await c.req.json();
1539
+ const params = await c.req.json();
1124
1540
  const mastra = c.get("mastra");
1541
+ const contextRequestContext = c.get("requestContext");
1125
1542
  let workflowToUse = workflow;
1126
1543
  if (!workflow) {
1127
1544
  const workflowId = c.req.param("workflowId");
@@ -1135,23 +1552,47 @@ function workflowRoute({
1135
1552
  if (!workflowToUse) {
1136
1553
  throw new Error("Workflow ID is required");
1137
1554
  }
1138
- const workflowObj = mastra.getWorkflow(workflowToUse);
1139
- if (!workflowObj) {
1140
- throw new Error(`Workflow ${workflowToUse} not found`);
1555
+ if (contextRequestContext && params.requestContext) {
1556
+ mastra.getLogger()?.warn(
1557
+ `"requestContext" from the request body will be ignored because "requestContext" is already set in the route options.`
1558
+ );
1141
1559
  }
1142
- const run = await workflowObj.createRunAsync();
1143
- const stream = run.streamVNext({ inputData, ...rest });
1144
- const uiMessageStream = ai.createUIMessageStream({
1145
- execute: async ({ writer }) => {
1146
- for await (const part of toAISdkFormat(stream, { from: "workflow" })) {
1147
- writer.write(part);
1148
- }
1149
- }
1560
+ const uiMessageStream = await handleWorkflowStream({
1561
+ mastra,
1562
+ workflowId: workflowToUse,
1563
+ params: {
1564
+ ...params,
1565
+ requestContext: contextRequestContext || params.requestContext
1566
+ },
1567
+ includeTextStreamParts
1150
1568
  });
1151
1569
  return ai.createUIMessageStreamResponse({ stream: uiMessageStream });
1152
1570
  }
1153
1571
  });
1154
1572
  }
1573
+ async function handleNetworkStream({
1574
+ mastra,
1575
+ agentId,
1576
+ params,
1577
+ defaultOptions
1578
+ }) {
1579
+ const { messages, ...rest } = params;
1580
+ const agentObj = mastra.getAgentById(agentId);
1581
+ if (!agentObj) {
1582
+ throw new Error(`Agent ${agentId} not found`);
1583
+ }
1584
+ const result = await agentObj.network(messages, {
1585
+ ...defaultOptions,
1586
+ ...rest
1587
+ });
1588
+ return ai.createUIMessageStream({
1589
+ execute: async ({ writer }) => {
1590
+ for await (const part of toAISdkV5Stream(result, { from: "network" })) {
1591
+ writer.write(part);
1592
+ }
1593
+ }
1594
+ });
1595
+ }
1155
1596
  function networkRoute({
1156
1597
  path = "/network/:agentId",
1157
1598
  agent,
@@ -1183,13 +1624,12 @@ function networkRoute({
1183
1624
  type: "object",
1184
1625
  properties: {
1185
1626
  messages: { type: "array", items: { type: "object" } },
1186
- runtimeContext: { type: "object", additionalProperties: true },
1627
+ requestContext: { type: "object", additionalProperties: true },
1187
1628
  runId: { type: "string" },
1188
1629
  maxSteps: { type: "number" },
1189
1630
  threadId: { type: "string" },
1190
1631
  resourceId: { type: "string" },
1191
1632
  modelSettings: { type: "object", additionalProperties: true },
1192
- telemetry: { type: "object", additionalProperties: true },
1193
1633
  tools: { type: "array", items: { type: "object" } }
1194
1634
  },
1195
1635
  required: ["messages"]
@@ -1213,7 +1653,7 @@ function networkRoute({
1213
1653
  }
1214
1654
  },
1215
1655
  handler: async (c) => {
1216
- const { messages, ...rest } = await c.req.json();
1656
+ const params = await c.req.json();
1217
1657
  const mastra = c.get("mastra");
1218
1658
  let agentToUse = agent;
1219
1659
  if (!agent) {
@@ -1228,29 +1668,481 @@ function networkRoute({
1228
1668
  if (!agentToUse) {
1229
1669
  throw new Error("Agent ID is required");
1230
1670
  }
1231
- const agentObj = mastra.getAgent(agentToUse);
1232
- if (!agentObj) {
1233
- throw new Error(`Agent ${agentToUse} not found`);
1671
+ const uiMessageStream = await handleNetworkStream({
1672
+ mastra,
1673
+ agentId: agentToUse,
1674
+ params,
1675
+ defaultOptions
1676
+ });
1677
+ return ai.createUIMessageStreamResponse({ stream: uiMessageStream });
1678
+ }
1679
+ });
1680
+ }
1681
+ function withMastra(model, options = {}) {
1682
+ const { memory, inputProcessors = [], outputProcessors = [] } = options;
1683
+ const allInputProcessors = [...inputProcessors];
1684
+ const allOutputProcessors = [...outputProcessors];
1685
+ if (memory) {
1686
+ const { storage, lastMessages, semanticRecall, workingMemory } = memory;
1687
+ const isWorkingMemoryEnabled = typeof workingMemory === "object" && workingMemory.enabled !== false;
1688
+ if (isWorkingMemoryEnabled && typeof workingMemory === "object") {
1689
+ let template;
1690
+ if (workingMemory.template) {
1691
+ template = {
1692
+ format: "markdown",
1693
+ content: workingMemory.template
1694
+ };
1234
1695
  }
1235
- const result = await agentObj.network(messages, {
1236
- ...defaultOptions,
1237
- ...rest
1696
+ const workingMemoryProcessor = new processors.WorkingMemory({
1697
+ storage,
1698
+ template,
1699
+ scope: workingMemory.scope,
1700
+ useVNext: "version" in workingMemory && workingMemory.version === "vnext"
1701
+ });
1702
+ allInputProcessors.push(workingMemoryProcessor);
1703
+ }
1704
+ if (lastMessages !== false && lastMessages !== void 0) {
1705
+ const messageHistory = new processors.MessageHistory({
1706
+ storage,
1707
+ lastMessages: typeof lastMessages === "number" ? lastMessages : void 0
1708
+ });
1709
+ allInputProcessors.push(messageHistory);
1710
+ allOutputProcessors.push(messageHistory);
1711
+ }
1712
+ if (semanticRecall) {
1713
+ const { vector, embedder, indexName, ...semanticConfig } = semanticRecall;
1714
+ const semanticRecallProcessor = new processors.SemanticRecall({
1715
+ storage,
1716
+ vector,
1717
+ embedder,
1718
+ indexName: indexName || "memory_messages",
1719
+ ...semanticConfig
1720
+ });
1721
+ allInputProcessors.push(semanticRecallProcessor);
1722
+ allOutputProcessors.push(semanticRecallProcessor);
1723
+ }
1724
+ }
1725
+ return ai.wrapLanguageModel({
1726
+ model,
1727
+ middleware: createProcessorMiddleware({
1728
+ inputProcessors: allInputProcessors,
1729
+ outputProcessors: allOutputProcessors,
1730
+ memory: memory ? {
1731
+ threadId: memory.threadId,
1732
+ resourceId: memory.resourceId
1733
+ } : void 0
1734
+ })
1735
+ });
1736
+ }
1737
+ function createProcessorMiddleware(options) {
1738
+ const { inputProcessors = [], outputProcessors = [], memory } = options;
1739
+ const requestContext = new di.RequestContext();
1740
+ if (memory) {
1741
+ requestContext.set("MastraMemory", {
1742
+ thread: memory.threadId ? { id: memory.threadId } : void 0,
1743
+ resourceId: memory.resourceId,
1744
+ memoryConfig: memory.config
1745
+ });
1746
+ }
1747
+ return {
1748
+ middlewareVersion: "v2",
1749
+ /**
1750
+ * Transform params runs input processors (processInput)
1751
+ */
1752
+ async transformParams({ params }) {
1753
+ const messageList = new agent.MessageList({
1754
+ threadId: memory?.threadId,
1755
+ resourceId: memory?.resourceId
1238
1756
  });
1239
- const uiMessageStream = ai.createUIMessageStream({
1240
- execute: async ({ writer }) => {
1241
- for await (const part of toAISdkFormat(result, { from: "network" })) {
1242
- writer.write(part);
1757
+ for (const msg of params.prompt) {
1758
+ if (msg.role === "system") {
1759
+ messageList.addSystem(msg.content);
1760
+ } else {
1761
+ messageList.add(msg, "input");
1762
+ }
1763
+ }
1764
+ for (const processor of inputProcessors) {
1765
+ if (processor.processInput) {
1766
+ try {
1767
+ await processor.processInput({
1768
+ messages: messageList.get.input.db(),
1769
+ systemMessages: messageList.getAllSystemMessages(),
1770
+ messageList,
1771
+ requestContext,
1772
+ abort: (reason) => {
1773
+ throw new agent.TripWire(reason || "Aborted by processor");
1774
+ }
1775
+ });
1776
+ } catch (error) {
1777
+ if (error instanceof agent.TripWire) {
1778
+ return {
1779
+ ...params,
1780
+ providerOptions: {
1781
+ ...params.providerOptions,
1782
+ mastraProcessors: {
1783
+ tripwire: true,
1784
+ reason: error.message
1785
+ }
1786
+ }
1787
+ };
1788
+ }
1789
+ throw error;
1243
1790
  }
1244
1791
  }
1792
+ }
1793
+ const newPrompt = messageList.get.all.aiV5.prompt().map(agent.MessageList.aiV5ModelMessageToV2PromptMessage);
1794
+ return {
1795
+ ...params,
1796
+ prompt: newPrompt
1797
+ };
1798
+ },
1799
+ /**
1800
+ * Wrap generate for non-streaming output processing
1801
+ */
1802
+ async wrapGenerate({ doGenerate, params }) {
1803
+ const processorState = params.providerOptions?.mastraProcessors;
1804
+ if (processorState?.tripwire) {
1805
+ const reason = processorState.reason || "Blocked by processor";
1806
+ return {
1807
+ content: [{ type: "text", text: reason }],
1808
+ finishReason: "stop",
1809
+ usage: { inputTokens: 0, outputTokens: 0, totalTokens: 0 },
1810
+ warnings: [{ type: "other", message: `Tripwire: ${reason}` }]
1811
+ };
1812
+ }
1813
+ const result = await doGenerate();
1814
+ if (!outputProcessors.length) return result;
1815
+ const messageList = new agent.MessageList({
1816
+ threadId: memory?.threadId,
1817
+ resourceId: memory?.resourceId
1245
1818
  });
1246
- return ai.createUIMessageStreamResponse({ stream: uiMessageStream });
1819
+ for (const msg of params.prompt) {
1820
+ if (msg.role === "system") {
1821
+ messageList.addSystem(msg.content);
1822
+ } else {
1823
+ messageList.add(msg, "input");
1824
+ }
1825
+ }
1826
+ const textContent = result.content.filter((c) => c.type === "text").map((c) => c.text).join("");
1827
+ const responseMessage = {
1828
+ id: crypto.randomUUID(),
1829
+ role: "assistant",
1830
+ content: {
1831
+ format: 2,
1832
+ parts: [{ type: "text", text: textContent }]
1833
+ },
1834
+ createdAt: /* @__PURE__ */ new Date(),
1835
+ ...memory?.threadId && { threadId: memory.threadId },
1836
+ ...memory?.resourceId && { resourceId: memory.resourceId }
1837
+ };
1838
+ messageList.add(responseMessage, "response");
1839
+ for (const processor of outputProcessors) {
1840
+ if (processor.processOutputResult) {
1841
+ try {
1842
+ await processor.processOutputResult({
1843
+ messages: messageList.get.all.db(),
1844
+ messageList,
1845
+ requestContext,
1846
+ abort: (reason) => {
1847
+ throw new agent.TripWire(reason || "Aborted by processor");
1848
+ }
1849
+ });
1850
+ } catch (error) {
1851
+ if (error instanceof agent.TripWire) {
1852
+ return {
1853
+ content: [{ type: "text", text: error.message }],
1854
+ finishReason: "stop",
1855
+ usage: result.usage,
1856
+ warnings: [{ type: "other", message: `Output blocked: ${error.message}` }]
1857
+ };
1858
+ }
1859
+ throw error;
1860
+ }
1861
+ }
1862
+ }
1863
+ const processedText = messageList.get.response.db().map((m) => extractTextFromMastraMessage(m)).join("");
1864
+ return {
1865
+ ...result,
1866
+ content: [{ type: "text", text: processedText }]
1867
+ };
1868
+ },
1869
+ /**
1870
+ * Wrap stream for streaming output processing
1871
+ */
1872
+ async wrapStream({ doStream, params }) {
1873
+ const processorState = params.providerOptions?.mastraProcessors;
1874
+ if (processorState?.tripwire) {
1875
+ const reason = processorState.reason || "Blocked by processor";
1876
+ return {
1877
+ stream: createBlockedStream(reason)
1878
+ };
1879
+ }
1880
+ const { stream: stream$1, ...rest } = await doStream();
1881
+ if (!outputProcessors.length) return { stream: stream$1, ...rest };
1882
+ const processorStates = /* @__PURE__ */ new Map();
1883
+ const runId = crypto.randomUUID();
1884
+ const transformedStream = stream$1.pipeThrough(
1885
+ new TransformStream({
1886
+ async transform(chunk, controller) {
1887
+ let mastraChunk = stream.convertFullStreamChunkToMastra(
1888
+ chunk,
1889
+ { runId }
1890
+ );
1891
+ if (!mastraChunk) {
1892
+ controller.enqueue(chunk);
1893
+ return;
1894
+ }
1895
+ for (const processor of outputProcessors) {
1896
+ if (processor.processOutputStream && mastraChunk) {
1897
+ let state = processorStates.get(processor.id);
1898
+ if (!state) {
1899
+ state = { streamParts: [], customState: {} };
1900
+ processorStates.set(processor.id, state);
1901
+ }
1902
+ state.streamParts.push(mastraChunk);
1903
+ try {
1904
+ const result = await processor.processOutputStream({
1905
+ part: mastraChunk,
1906
+ streamParts: state.streamParts,
1907
+ state: state.customState,
1908
+ requestContext,
1909
+ abort: (reason) => {
1910
+ throw new agent.TripWire(reason || "Aborted by processor");
1911
+ }
1912
+ });
1913
+ if (result === null || result === void 0) {
1914
+ mastraChunk = void 0;
1915
+ } else {
1916
+ mastraChunk = result;
1917
+ }
1918
+ } catch (error) {
1919
+ if (error instanceof agent.TripWire) {
1920
+ controller.enqueue({
1921
+ type: "error",
1922
+ error: new Error(error.message)
1923
+ });
1924
+ controller.terminate();
1925
+ return;
1926
+ }
1927
+ throw error;
1928
+ }
1929
+ }
1930
+ }
1931
+ if (mastraChunk) {
1932
+ const aiChunk = convertMastraChunkToAISDKStreamPart(mastraChunk);
1933
+ if (aiChunk) {
1934
+ controller.enqueue(aiChunk);
1935
+ }
1936
+ }
1937
+ }
1938
+ })
1939
+ );
1940
+ return { stream: transformedStream, ...rest };
1941
+ }
1942
+ };
1943
+ }
1944
+ function createBlockedStream(reason) {
1945
+ return new ReadableStream({
1946
+ start(controller) {
1947
+ const id = crypto.randomUUID();
1948
+ controller.enqueue({
1949
+ type: "text-start",
1950
+ id
1951
+ });
1952
+ controller.enqueue({
1953
+ type: "text-delta",
1954
+ id,
1955
+ delta: reason
1956
+ });
1957
+ controller.enqueue({
1958
+ type: "text-end",
1959
+ id
1960
+ });
1961
+ controller.enqueue({
1962
+ type: "finish",
1963
+ finishReason: "stop",
1964
+ usage: { inputTokens: 0, outputTokens: 0, totalTokens: 0 }
1965
+ });
1966
+ controller.close();
1247
1967
  }
1248
1968
  });
1249
1969
  }
1970
+ function extractTextFromMastraMessage(msg) {
1971
+ const content = msg.content;
1972
+ if (typeof content === "string") {
1973
+ return content;
1974
+ }
1975
+ if (content?.parts) {
1976
+ return content.parts.filter((p) => p.type === "text" && "text" in p).map((p) => p.text).join("");
1977
+ }
1978
+ return "";
1979
+ }
1980
+ function convertMastraChunkToAISDKStreamPart(chunk) {
1981
+ switch (chunk.type) {
1982
+ // Text streaming
1983
+ case "text-start":
1984
+ return {
1985
+ type: "text-start",
1986
+ id: chunk.payload.id || crypto.randomUUID(),
1987
+ providerMetadata: chunk.payload.providerMetadata
1988
+ };
1989
+ case "text-delta":
1990
+ return {
1991
+ type: "text-delta",
1992
+ id: chunk.payload.id || crypto.randomUUID(),
1993
+ delta: chunk.payload.text,
1994
+ providerMetadata: chunk.payload.providerMetadata
1995
+ };
1996
+ case "text-end":
1997
+ return {
1998
+ type: "text-end",
1999
+ id: chunk.payload.id || crypto.randomUUID(),
2000
+ providerMetadata: chunk.payload.providerMetadata
2001
+ };
2002
+ // Reasoning streaming
2003
+ case "reasoning-start":
2004
+ return {
2005
+ type: "reasoning-start",
2006
+ id: chunk.payload.id || crypto.randomUUID(),
2007
+ providerMetadata: chunk.payload.providerMetadata
2008
+ };
2009
+ case "reasoning-delta":
2010
+ return {
2011
+ type: "reasoning-delta",
2012
+ id: chunk.payload.id || crypto.randomUUID(),
2013
+ delta: chunk.payload.text,
2014
+ providerMetadata: chunk.payload.providerMetadata
2015
+ };
2016
+ case "reasoning-end":
2017
+ return {
2018
+ type: "reasoning-end",
2019
+ id: chunk.payload.id || crypto.randomUUID(),
2020
+ providerMetadata: chunk.payload.providerMetadata
2021
+ };
2022
+ // Tool call (complete)
2023
+ case "tool-call":
2024
+ return {
2025
+ type: "tool-call",
2026
+ toolCallId: chunk.payload.toolCallId,
2027
+ toolName: chunk.payload.toolName,
2028
+ input: JSON.stringify(chunk.payload.args),
2029
+ providerExecuted: chunk.payload.providerExecuted,
2030
+ providerMetadata: chunk.payload.providerMetadata
2031
+ };
2032
+ // Tool call input streaming
2033
+ case "tool-call-input-streaming-start":
2034
+ return {
2035
+ type: "tool-input-start",
2036
+ id: chunk.payload.toolCallId,
2037
+ toolName: chunk.payload.toolName,
2038
+ providerExecuted: chunk.payload.providerExecuted,
2039
+ providerMetadata: chunk.payload.providerMetadata
2040
+ };
2041
+ case "tool-call-delta":
2042
+ return {
2043
+ type: "tool-input-delta",
2044
+ id: chunk.payload.toolCallId,
2045
+ delta: chunk.payload.argsTextDelta,
2046
+ providerMetadata: chunk.payload.providerMetadata
2047
+ };
2048
+ case "tool-call-input-streaming-end":
2049
+ return {
2050
+ type: "tool-input-end",
2051
+ id: chunk.payload.toolCallId,
2052
+ providerMetadata: chunk.payload.providerMetadata
2053
+ };
2054
+ // Tool result
2055
+ case "tool-result":
2056
+ return {
2057
+ type: "tool-result",
2058
+ toolCallId: chunk.payload.toolCallId,
2059
+ toolName: chunk.payload.toolName,
2060
+ result: { type: "json", value: chunk.payload.result },
2061
+ isError: chunk.payload.isError,
2062
+ providerExecuted: chunk.payload.providerExecuted,
2063
+ providerMetadata: chunk.payload.providerMetadata
2064
+ };
2065
+ // Source (citations)
2066
+ case "source":
2067
+ if (chunk.payload.sourceType === "url") {
2068
+ return {
2069
+ type: "source",
2070
+ sourceType: "url",
2071
+ id: chunk.payload.id,
2072
+ url: chunk.payload.url,
2073
+ title: chunk.payload.title,
2074
+ providerMetadata: chunk.payload.providerMetadata
2075
+ };
2076
+ } else {
2077
+ return {
2078
+ type: "source",
2079
+ sourceType: "document",
2080
+ id: chunk.payload.id,
2081
+ mediaType: chunk.payload.mimeType,
2082
+ title: chunk.payload.title,
2083
+ filename: chunk.payload.filename,
2084
+ providerMetadata: chunk.payload.providerMetadata
2085
+ };
2086
+ }
2087
+ // File output
2088
+ case "file":
2089
+ return {
2090
+ type: "file",
2091
+ data: chunk.payload.data || chunk.payload.base64,
2092
+ mediaType: chunk.payload.mimeType
2093
+ };
2094
+ // Response metadata
2095
+ case "response-metadata":
2096
+ return {
2097
+ type: "response-metadata",
2098
+ ...chunk.payload
2099
+ };
2100
+ // Raw provider data
2101
+ case "raw":
2102
+ return {
2103
+ type: "raw",
2104
+ rawValue: chunk.payload
2105
+ };
2106
+ // Finish
2107
+ case "finish": {
2108
+ const usage = chunk.payload.output?.usage;
2109
+ return {
2110
+ type: "finish",
2111
+ finishReason: chunk.payload.stepResult?.reason || "stop",
2112
+ usage: usage ? {
2113
+ inputTokens: usage.inputTokens || 0,
2114
+ outputTokens: usage.outputTokens || 0,
2115
+ totalTokens: usage.totalTokens || 0
2116
+ } : { inputTokens: 0, outputTokens: 0, totalTokens: 0 },
2117
+ providerMetadata: chunk.payload.metadata?.providerMetadata
2118
+ };
2119
+ }
2120
+ // Error
2121
+ case "error":
2122
+ return {
2123
+ type: "error",
2124
+ error: chunk.payload.error || chunk.payload
2125
+ };
2126
+ default:
2127
+ return null;
2128
+ }
2129
+ }
2130
+
2131
+ // src/to-ai-sdk-format.ts
2132
+ function toAISdkFormat() {
2133
+ throw new Error(
2134
+ 'toAISdkFormat() has been deprecated. Please use toAISdkStream() instead.\n\nMigration:\n import { toAISdkFormat } from "@mastra/ai-sdk";\n // Change to:\n import { toAISdkStream } from "@mastra/ai-sdk";\n\nThe function signature remains the same.'
2135
+ );
2136
+ }
1250
2137
 
1251
2138
  exports.chatRoute = chatRoute;
2139
+ exports.handleChatStream = handleChatStream;
2140
+ exports.handleNetworkStream = handleNetworkStream;
2141
+ exports.handleWorkflowStream = handleWorkflowStream;
1252
2142
  exports.networkRoute = networkRoute;
1253
2143
  exports.toAISdkFormat = toAISdkFormat;
2144
+ exports.toAISdkStream = toAISdkV5Stream;
2145
+ exports.withMastra = withMastra;
1254
2146
  exports.workflowRoute = workflowRoute;
1255
2147
  //# sourceMappingURL=index.cjs.map
1256
2148
  //# sourceMappingURL=index.cjs.map