@mastra/ai-sdk 0.0.0-fix-backport-setserver-20251201144151 → 0.0.0-fix-request-context-as-query-key-20251209093005

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,6 +1,9 @@
1
1
  import { registerApiRoute } from '@mastra/core/server';
2
- import { createUIMessageStream, createUIMessageStreamResponse } from 'ai';
3
- import { DefaultGeneratedFile, DefaultGeneratedFileWithType } from '@mastra/core/stream';
2
+ import { createUIMessageStream, createUIMessageStreamResponse, wrapLanguageModel } from 'ai';
3
+ import { convertFullStreamChunkToMastra, DefaultGeneratedFile, DefaultGeneratedFileWithType } from '@mastra/core/stream';
4
+ import { TripWire, MessageList } from '@mastra/core/agent';
5
+ import { RequestContext } from '@mastra/core/di';
6
+ import { WorkingMemory, MessageHistory, SemanticRecall } from '@mastra/core/processors';
4
7
 
5
8
  // src/chat-route.ts
6
9
 
@@ -20,6 +23,8 @@ var isMastraTextStreamChunk = (chunk) => {
20
23
  "source",
21
24
  "tool-input-start",
22
25
  "tool-input-delta",
26
+ "tool-call-approval",
27
+ "tool-call-suspended",
23
28
  "tool-call",
24
29
  "tool-result",
25
30
  "tool-error",
@@ -30,6 +35,8 @@ var isMastraTextStreamChunk = (chunk) => {
30
35
  "finish",
31
36
  "abort",
32
37
  "tool-input-end",
38
+ "object",
39
+ "tripwire",
33
40
  "raw"
34
41
  ].includes(chunk.type);
35
42
  };
@@ -1132,6 +1139,26 @@ function transformNetwork(payload, bufferedNetworks, isNested) {
1132
1139
  };
1133
1140
  }
1134
1141
  default: {
1142
+ if (isAgentExecutionDataChunkType(payload)) {
1143
+ if (!("data" in payload.payload)) {
1144
+ throw new Error(
1145
+ `UI Messages require a data property when using data- prefixed chunks
1146
+ ${JSON.stringify(payload)}`
1147
+ );
1148
+ }
1149
+ const { type, data } = payload.payload;
1150
+ return { type, data };
1151
+ }
1152
+ if (isWorkflowExecutionDataChunkType(payload)) {
1153
+ if (!("data" in payload.payload)) {
1154
+ throw new Error(
1155
+ `UI Messages require a data property when using data- prefixed chunks
1156
+ ${JSON.stringify(payload)}`
1157
+ );
1158
+ }
1159
+ const { type, data } = payload.payload;
1160
+ return { type, data };
1161
+ }
1135
1162
  if (payload.type.startsWith("agent-execution-event-")) {
1136
1163
  const stepId = payload.payload.runId;
1137
1164
  const current = bufferedNetworks.get(payload.runId);
@@ -1146,6 +1173,15 @@ function transformNetwork(payload, bufferedNetworks, isNested) {
1146
1173
  const { request, response, ...data } = result.data;
1147
1174
  step.task = data;
1148
1175
  }
1176
+ bufferedNetworks.set(payload.runId, current);
1177
+ return {
1178
+ type: isNested ? "data-tool-network" : "data-network",
1179
+ id: payload.runId,
1180
+ data: {
1181
+ ...current,
1182
+ status: "running"
1183
+ }
1184
+ };
1149
1185
  }
1150
1186
  if (payload.type.startsWith("workflow-execution-event-")) {
1151
1187
  const stepId = payload.payload.runId;
@@ -1164,6 +1200,15 @@ function transformNetwork(payload, bufferedNetworks, isNested) {
1164
1200
  step.task.id = data.name;
1165
1201
  }
1166
1202
  }
1203
+ bufferedNetworks.set(payload.runId, current);
1204
+ return {
1205
+ type: isNested ? "data-tool-network" : "data-network",
1206
+ id: payload.runId,
1207
+ data: {
1208
+ ...current,
1209
+ status: "running"
1210
+ }
1211
+ };
1167
1212
  }
1168
1213
  if (isDataChunkType(payload)) {
1169
1214
  if (!("data" in payload)) {
@@ -1175,40 +1220,20 @@ function transformNetwork(payload, bufferedNetworks, isNested) {
1175
1220
  const { type, data } = payload;
1176
1221
  return { type, data };
1177
1222
  }
1178
- if (isAgentExecutionDataChunkType(payload)) {
1179
- if (!("data" in payload.payload)) {
1180
- throw new Error(
1181
- `UI Messages require a data property when using data- prefixed chunks
1182
- ${JSON.stringify(payload)}`
1183
- );
1184
- }
1185
- const { type, data } = payload.payload;
1186
- return { type, data };
1187
- }
1188
- if (isWorkflowExecutionDataChunkType(payload)) {
1189
- if (!("data" in payload.payload)) {
1190
- throw new Error(
1191
- `UI Messages require a data property when using data- prefixed chunks
1192
- ${JSON.stringify(payload)}`
1193
- );
1194
- }
1195
- const { type, data } = payload.payload;
1196
- return { type, data };
1197
- }
1198
1223
  return null;
1199
1224
  }
1200
1225
  }
1201
1226
  }
1202
1227
 
1203
- // src/to-ai-sdk-format.ts
1204
- function toAISdkFormat(stream, options = {
1228
+ // src/convert-streams.ts
1229
+ function toAISdkV5Stream(stream, options = {
1205
1230
  from: "agent",
1206
1231
  sendStart: true,
1207
1232
  sendFinish: true
1208
1233
  }) {
1209
1234
  const from = options?.from;
1210
1235
  if (from === "workflow") {
1211
- const includeTextStreamParts = options?.includeTextStreamParts ?? false;
1236
+ const includeTextStreamParts = options?.includeTextStreamParts ?? true;
1212
1237
  return stream.pipeThrough(
1213
1238
  WorkflowStreamToAISDKTransformer({ includeTextStreamParts })
1214
1239
  );
@@ -1231,6 +1256,57 @@ function toAISdkFormat(stream, options = {
1231
1256
  }
1232
1257
 
1233
1258
  // src/chat-route.ts
1259
+ async function handleChatStream({
1260
+ mastra,
1261
+ agentId,
1262
+ params,
1263
+ defaultOptions,
1264
+ sendStart = true,
1265
+ sendFinish = true,
1266
+ sendReasoning = false,
1267
+ sendSources = false
1268
+ }) {
1269
+ const { messages, resumeData, runId, requestContext, ...rest } = params;
1270
+ if (resumeData && !runId) {
1271
+ throw new Error("runId is required when resumeData is provided");
1272
+ }
1273
+ const agentObj = mastra.getAgentById(agentId);
1274
+ if (!agentObj) {
1275
+ throw new Error(`Agent ${agentId} not found`);
1276
+ }
1277
+ if (!Array.isArray(messages)) {
1278
+ throw new Error("Messages must be an array of UIMessage objects");
1279
+ }
1280
+ const mergedOptions = {
1281
+ ...defaultOptions,
1282
+ ...rest,
1283
+ ...runId && { runId },
1284
+ requestContext: requestContext || defaultOptions?.requestContext
1285
+ };
1286
+ const result = resumeData ? await agentObj.resumeStream(resumeData, mergedOptions) : await agentObj.stream(messages, mergedOptions);
1287
+ let lastMessageId;
1288
+ if (messages.length) {
1289
+ const lastMessage = messages[messages.length - 1];
1290
+ if (lastMessage?.role === "assistant") {
1291
+ lastMessageId = lastMessage.id;
1292
+ }
1293
+ }
1294
+ return createUIMessageStream({
1295
+ originalMessages: messages,
1296
+ execute: async ({ writer }) => {
1297
+ for await (const part of toAISdkV5Stream(result, {
1298
+ from: "agent",
1299
+ lastMessageId,
1300
+ sendStart,
1301
+ sendFinish,
1302
+ sendReasoning,
1303
+ sendSources
1304
+ })) {
1305
+ writer.write(part);
1306
+ }
1307
+ }
1308
+ });
1309
+ }
1234
1310
  function chatRoute({
1235
1311
  path = "/chat/:agentId",
1236
1312
  agent,
@@ -1267,6 +1343,14 @@ function chatRoute({
1267
1343
  schema: {
1268
1344
  type: "object",
1269
1345
  properties: {
1346
+ resumeData: {
1347
+ type: "object",
1348
+ description: "Resume data for the agent"
1349
+ },
1350
+ runId: {
1351
+ type: "string",
1352
+ description: "The run ID required when resuming an agent execution"
1353
+ },
1270
1354
  messages: {
1271
1355
  type: "array",
1272
1356
  description: "Array of messages in the conversation",
@@ -1337,9 +1421,9 @@ function chatRoute({
1337
1421
  }
1338
1422
  },
1339
1423
  handler: async (c) => {
1340
- const { messages, ...rest } = await c.req.json();
1424
+ const params = await c.req.json();
1341
1425
  const mastra = c.get("mastra");
1342
- const runtimeContext = c.get("runtimeContext");
1426
+ const contextRequestContext = c.get("requestContext");
1343
1427
  let agentToUse = agent;
1344
1428
  if (!agent) {
1345
1429
  const agentId = c.req.param("agentId");
@@ -1350,39 +1434,24 @@ function chatRoute({
1350
1434
  `Fixed agent ID was set together with an agentId path parameter. This can lead to unexpected behavior.`
1351
1435
  );
1352
1436
  }
1353
- if (runtimeContext && defaultOptions?.runtimeContext) {
1354
- mastra.getLogger()?.warn(`"runtimeContext" set in the route options will be overridden by the request's "runtimeContext".`);
1437
+ if (contextRequestContext && defaultOptions?.requestContext) {
1438
+ mastra.getLogger()?.warn(`"requestContext" set in the route options will be overridden by the request's "requestContext".`);
1355
1439
  }
1356
1440
  if (!agentToUse) {
1357
1441
  throw new Error("Agent ID is required");
1358
1442
  }
1359
- const agentObj = mastra.getAgentById(agentToUse);
1360
- if (!agentObj) {
1361
- throw new Error(`Agent ${agentToUse} not found`);
1362
- }
1363
- const result = await agentObj.stream(messages, {
1364
- ...defaultOptions,
1365
- ...rest,
1366
- runtimeContext: runtimeContext || defaultOptions?.runtimeContext
1367
- });
1368
- let lastMessageId;
1369
- if (messages.length > 0 && messages[messages.length - 1].role === "assistant") {
1370
- lastMessageId = messages[messages.length - 1].id;
1371
- }
1372
- const uiMessageStream = createUIMessageStream({
1373
- originalMessages: messages,
1374
- execute: async ({ writer }) => {
1375
- for await (const part of toAISdkFormat(result, {
1376
- from: "agent",
1377
- lastMessageId,
1378
- sendStart,
1379
- sendFinish,
1380
- sendReasoning,
1381
- sendSources
1382
- })) {
1383
- writer.write(part);
1384
- }
1385
- }
1443
+ const uiMessageStream = await handleChatStream({
1444
+ mastra,
1445
+ agentId: agentToUse,
1446
+ params: {
1447
+ ...params,
1448
+ requestContext: contextRequestContext || params.requestContext
1449
+ },
1450
+ defaultOptions,
1451
+ sendStart,
1452
+ sendFinish,
1453
+ sendReasoning,
1454
+ sendSources
1386
1455
  });
1387
1456
  return createUIMessageStreamResponse({
1388
1457
  stream: uiMessageStream
@@ -1390,10 +1459,31 @@ function chatRoute({
1390
1459
  }
1391
1460
  });
1392
1461
  }
1462
+ async function handleWorkflowStream({
1463
+ mastra,
1464
+ workflowId,
1465
+ params,
1466
+ includeTextStreamParts = true
1467
+ }) {
1468
+ const { runId, resourceId, inputData, resumeData, requestContext, ...rest } = params;
1469
+ const workflowObj = mastra.getWorkflowById(workflowId);
1470
+ if (!workflowObj) {
1471
+ throw new Error(`Workflow ${workflowId} not found`);
1472
+ }
1473
+ const run = await workflowObj.createRun({ runId, resourceId, ...rest });
1474
+ const stream = resumeData ? run.resumeStream({ resumeData, ...rest, requestContext }) : run.stream({ inputData, ...rest, requestContext });
1475
+ return createUIMessageStream({
1476
+ execute: async ({ writer }) => {
1477
+ for await (const part of toAISdkV5Stream(stream, { from: "workflow", includeTextStreamParts })) {
1478
+ writer.write(part);
1479
+ }
1480
+ }
1481
+ });
1482
+ }
1393
1483
  function workflowRoute({
1394
1484
  path = "/api/workflows/:workflowId/stream",
1395
1485
  workflow,
1396
- includeTextStreamParts = false
1486
+ includeTextStreamParts = true
1397
1487
  }) {
1398
1488
  if (!workflow && !path.includes("/:workflowId")) {
1399
1489
  throw new Error("Path must include :workflowId to route to the correct workflow or pass the workflow explicitly");
@@ -1424,7 +1514,7 @@ function workflowRoute({
1424
1514
  resourceId: { type: "string" },
1425
1515
  inputData: { type: "object", additionalProperties: true },
1426
1516
  resumeData: { type: "object", additionalProperties: true },
1427
- runtimeContext: { type: "object", additionalProperties: true },
1517
+ requestContext: { type: "object", additionalProperties: true },
1428
1518
  tracingOptions: { type: "object", additionalProperties: true },
1429
1519
  step: { type: "string" }
1430
1520
  }
@@ -1444,9 +1534,9 @@ function workflowRoute({
1444
1534
  }
1445
1535
  },
1446
1536
  handler: async (c) => {
1447
- const { runId, resourceId, inputData, resumeData, ...rest } = await c.req.json();
1537
+ const params = await c.req.json();
1448
1538
  const mastra = c.get("mastra");
1449
- const runtimeContext = c.get("runtimeContext");
1539
+ const contextRequestContext = c.get("requestContext");
1450
1540
  let workflowToUse = workflow;
1451
1541
  if (!workflow) {
1452
1542
  const workflowId = c.req.param("workflowId");
@@ -1460,28 +1550,47 @@ function workflowRoute({
1460
1550
  if (!workflowToUse) {
1461
1551
  throw new Error("Workflow ID is required");
1462
1552
  }
1463
- const workflowObj = mastra.getWorkflowById(workflowToUse);
1464
- if (!workflowObj) {
1465
- throw new Error(`Workflow ${workflowToUse} not found`);
1466
- }
1467
- if (runtimeContext && rest.runtimeContext) {
1553
+ if (contextRequestContext && params.requestContext) {
1468
1554
  mastra.getLogger()?.warn(
1469
- `"runtimeContext" from the request body will be ignored because "runtimeContext" is already set in the route options.`
1555
+ `"requestContext" from the request body will be ignored because "requestContext" is already set in the route options.`
1470
1556
  );
1471
1557
  }
1472
- const run = await workflowObj.createRunAsync({ runId, resourceId, ...rest });
1473
- const stream = resumeData ? run.resumeStream({ resumeData, ...rest, runtimeContext: runtimeContext || rest.runtimeContext }) : run.stream({ inputData, ...rest, runtimeContext: runtimeContext || rest.runtimeContext });
1474
- const uiMessageStream = createUIMessageStream({
1475
- execute: async ({ writer }) => {
1476
- for await (const part of toAISdkFormat(stream, { from: "workflow", includeTextStreamParts })) {
1477
- writer.write(part);
1478
- }
1479
- }
1558
+ const uiMessageStream = await handleWorkflowStream({
1559
+ mastra,
1560
+ workflowId: workflowToUse,
1561
+ params: {
1562
+ ...params,
1563
+ requestContext: contextRequestContext || params.requestContext
1564
+ },
1565
+ includeTextStreamParts
1480
1566
  });
1481
1567
  return createUIMessageStreamResponse({ stream: uiMessageStream });
1482
1568
  }
1483
1569
  });
1484
1570
  }
1571
+ async function handleNetworkStream({
1572
+ mastra,
1573
+ agentId,
1574
+ params,
1575
+ defaultOptions
1576
+ }) {
1577
+ const { messages, ...rest } = params;
1578
+ const agentObj = mastra.getAgentById(agentId);
1579
+ if (!agentObj) {
1580
+ throw new Error(`Agent ${agentId} not found`);
1581
+ }
1582
+ const result = await agentObj.network(messages, {
1583
+ ...defaultOptions,
1584
+ ...rest
1585
+ });
1586
+ return createUIMessageStream({
1587
+ execute: async ({ writer }) => {
1588
+ for await (const part of toAISdkV5Stream(result, { from: "network" })) {
1589
+ writer.write(part);
1590
+ }
1591
+ }
1592
+ });
1593
+ }
1485
1594
  function networkRoute({
1486
1595
  path = "/network/:agentId",
1487
1596
  agent,
@@ -1513,13 +1622,12 @@ function networkRoute({
1513
1622
  type: "object",
1514
1623
  properties: {
1515
1624
  messages: { type: "array", items: { type: "object" } },
1516
- runtimeContext: { type: "object", additionalProperties: true },
1625
+ requestContext: { type: "object", additionalProperties: true },
1517
1626
  runId: { type: "string" },
1518
1627
  maxSteps: { type: "number" },
1519
1628
  threadId: { type: "string" },
1520
1629
  resourceId: { type: "string" },
1521
1630
  modelSettings: { type: "object", additionalProperties: true },
1522
- telemetry: { type: "object", additionalProperties: true },
1523
1631
  tools: { type: "array", items: { type: "object" } }
1524
1632
  },
1525
1633
  required: ["messages"]
@@ -1543,7 +1651,7 @@ function networkRoute({
1543
1651
  }
1544
1652
  },
1545
1653
  handler: async (c) => {
1546
- const { messages, ...rest } = await c.req.json();
1654
+ const params = await c.req.json();
1547
1655
  const mastra = c.get("mastra");
1548
1656
  let agentToUse = agent;
1549
1657
  if (!agent) {
@@ -1558,26 +1666,473 @@ function networkRoute({
1558
1666
  if (!agentToUse) {
1559
1667
  throw new Error("Agent ID is required");
1560
1668
  }
1561
- const agentObj = mastra.getAgentById(agentToUse);
1562
- if (!agentObj) {
1563
- throw new Error(`Agent ${agentToUse} not found`);
1669
+ const uiMessageStream = await handleNetworkStream({
1670
+ mastra,
1671
+ agentId: agentToUse,
1672
+ params,
1673
+ defaultOptions
1674
+ });
1675
+ return createUIMessageStreamResponse({ stream: uiMessageStream });
1676
+ }
1677
+ });
1678
+ }
1679
+ function withMastra(model, options = {}) {
1680
+ const { memory, inputProcessors = [], outputProcessors = [] } = options;
1681
+ const allInputProcessors = [...inputProcessors];
1682
+ const allOutputProcessors = [...outputProcessors];
1683
+ if (memory) {
1684
+ const { storage, lastMessages, semanticRecall, workingMemory } = memory;
1685
+ const isWorkingMemoryEnabled = typeof workingMemory === "object" && workingMemory.enabled !== false;
1686
+ if (isWorkingMemoryEnabled && typeof workingMemory === "object") {
1687
+ let template;
1688
+ if (workingMemory.template) {
1689
+ template = {
1690
+ format: "markdown",
1691
+ content: workingMemory.template
1692
+ };
1564
1693
  }
1565
- const result = await agentObj.network(messages, {
1566
- ...defaultOptions,
1567
- ...rest
1694
+ const workingMemoryProcessor = new WorkingMemory({
1695
+ storage,
1696
+ template,
1697
+ scope: workingMemory.scope,
1698
+ useVNext: "version" in workingMemory && workingMemory.version === "vnext"
1699
+ });
1700
+ allInputProcessors.push(workingMemoryProcessor);
1701
+ }
1702
+ if (lastMessages !== false && lastMessages !== void 0) {
1703
+ const messageHistory = new MessageHistory({
1704
+ storage,
1705
+ lastMessages: typeof lastMessages === "number" ? lastMessages : void 0
1706
+ });
1707
+ allInputProcessors.push(messageHistory);
1708
+ allOutputProcessors.push(messageHistory);
1709
+ }
1710
+ if (semanticRecall) {
1711
+ const { vector, embedder, indexName, ...semanticConfig } = semanticRecall;
1712
+ const semanticRecallProcessor = new SemanticRecall({
1713
+ storage,
1714
+ vector,
1715
+ embedder,
1716
+ indexName: indexName || "memory_messages",
1717
+ ...semanticConfig
1718
+ });
1719
+ allInputProcessors.push(semanticRecallProcessor);
1720
+ allOutputProcessors.push(semanticRecallProcessor);
1721
+ }
1722
+ }
1723
+ return wrapLanguageModel({
1724
+ model,
1725
+ middleware: createProcessorMiddleware({
1726
+ inputProcessors: allInputProcessors,
1727
+ outputProcessors: allOutputProcessors,
1728
+ memory: memory ? {
1729
+ threadId: memory.threadId,
1730
+ resourceId: memory.resourceId
1731
+ } : void 0
1732
+ })
1733
+ });
1734
+ }
1735
+ function createProcessorMiddleware(options) {
1736
+ const { inputProcessors = [], outputProcessors = [], memory } = options;
1737
+ const requestContext = new RequestContext();
1738
+ if (memory) {
1739
+ requestContext.set("MastraMemory", {
1740
+ thread: memory.threadId ? { id: memory.threadId } : void 0,
1741
+ resourceId: memory.resourceId,
1742
+ memoryConfig: memory.config
1743
+ });
1744
+ }
1745
+ return {
1746
+ middlewareVersion: "v2",
1747
+ /**
1748
+ * Transform params runs input processors (processInput)
1749
+ */
1750
+ async transformParams({ params }) {
1751
+ const messageList = new MessageList({
1752
+ threadId: memory?.threadId,
1753
+ resourceId: memory?.resourceId
1568
1754
  });
1569
- const uiMessageStream = createUIMessageStream({
1570
- execute: async ({ writer }) => {
1571
- for await (const part of toAISdkFormat(result, { from: "network" })) {
1572
- writer.write(part);
1755
+ for (const msg of params.prompt) {
1756
+ if (msg.role === "system") {
1757
+ messageList.addSystem(msg.content);
1758
+ } else {
1759
+ messageList.add(msg, "input");
1760
+ }
1761
+ }
1762
+ for (const processor of inputProcessors) {
1763
+ if (processor.processInput) {
1764
+ try {
1765
+ await processor.processInput({
1766
+ messages: messageList.get.input.db(),
1767
+ systemMessages: messageList.getAllSystemMessages(),
1768
+ messageList,
1769
+ requestContext,
1770
+ abort: (reason) => {
1771
+ throw new TripWire(reason || "Aborted by processor");
1772
+ }
1773
+ });
1774
+ } catch (error) {
1775
+ if (error instanceof TripWire) {
1776
+ return {
1777
+ ...params,
1778
+ providerOptions: {
1779
+ ...params.providerOptions,
1780
+ mastraProcessors: {
1781
+ tripwire: true,
1782
+ reason: error.message
1783
+ }
1784
+ }
1785
+ };
1786
+ }
1787
+ throw error;
1573
1788
  }
1574
1789
  }
1790
+ }
1791
+ const newPrompt = messageList.get.all.aiV5.prompt().map(MessageList.aiV5ModelMessageToV2PromptMessage);
1792
+ return {
1793
+ ...params,
1794
+ prompt: newPrompt
1795
+ };
1796
+ },
1797
+ /**
1798
+ * Wrap generate for non-streaming output processing
1799
+ */
1800
+ async wrapGenerate({ doGenerate, params }) {
1801
+ const processorState = params.providerOptions?.mastraProcessors;
1802
+ if (processorState?.tripwire) {
1803
+ const reason = processorState.reason || "Blocked by processor";
1804
+ return {
1805
+ content: [{ type: "text", text: reason }],
1806
+ finishReason: "stop",
1807
+ usage: { inputTokens: 0, outputTokens: 0, totalTokens: 0 },
1808
+ warnings: [{ type: "other", message: `Tripwire: ${reason}` }]
1809
+ };
1810
+ }
1811
+ const result = await doGenerate();
1812
+ if (!outputProcessors.length) return result;
1813
+ const messageList = new MessageList({
1814
+ threadId: memory?.threadId,
1815
+ resourceId: memory?.resourceId
1575
1816
  });
1576
- return createUIMessageStreamResponse({ stream: uiMessageStream });
1817
+ for (const msg of params.prompt) {
1818
+ if (msg.role === "system") {
1819
+ messageList.addSystem(msg.content);
1820
+ } else {
1821
+ messageList.add(msg, "input");
1822
+ }
1823
+ }
1824
+ const textContent = result.content.filter((c) => c.type === "text").map((c) => c.text).join("");
1825
+ const responseMessage = {
1826
+ id: crypto.randomUUID(),
1827
+ role: "assistant",
1828
+ content: {
1829
+ format: 2,
1830
+ parts: [{ type: "text", text: textContent }]
1831
+ },
1832
+ createdAt: /* @__PURE__ */ new Date(),
1833
+ ...memory?.threadId && { threadId: memory.threadId },
1834
+ ...memory?.resourceId && { resourceId: memory.resourceId }
1835
+ };
1836
+ messageList.add(responseMessage, "response");
1837
+ for (const processor of outputProcessors) {
1838
+ if (processor.processOutputResult) {
1839
+ try {
1840
+ await processor.processOutputResult({
1841
+ messages: messageList.get.all.db(),
1842
+ messageList,
1843
+ requestContext,
1844
+ abort: (reason) => {
1845
+ throw new TripWire(reason || "Aborted by processor");
1846
+ }
1847
+ });
1848
+ } catch (error) {
1849
+ if (error instanceof TripWire) {
1850
+ return {
1851
+ content: [{ type: "text", text: error.message }],
1852
+ finishReason: "stop",
1853
+ usage: result.usage,
1854
+ warnings: [{ type: "other", message: `Output blocked: ${error.message}` }]
1855
+ };
1856
+ }
1857
+ throw error;
1858
+ }
1859
+ }
1860
+ }
1861
+ const processedText = messageList.get.response.db().map((m) => extractTextFromMastraMessage(m)).join("");
1862
+ return {
1863
+ ...result,
1864
+ content: [{ type: "text", text: processedText }]
1865
+ };
1866
+ },
1867
+ /**
1868
+ * Wrap stream for streaming output processing
1869
+ */
1870
+ async wrapStream({ doStream, params }) {
1871
+ const processorState = params.providerOptions?.mastraProcessors;
1872
+ if (processorState?.tripwire) {
1873
+ const reason = processorState.reason || "Blocked by processor";
1874
+ return {
1875
+ stream: createBlockedStream(reason)
1876
+ };
1877
+ }
1878
+ const { stream, ...rest } = await doStream();
1879
+ if (!outputProcessors.length) return { stream, ...rest };
1880
+ const processorStates = /* @__PURE__ */ new Map();
1881
+ const runId = crypto.randomUUID();
1882
+ const transformedStream = stream.pipeThrough(
1883
+ new TransformStream({
1884
+ async transform(chunk, controller) {
1885
+ let mastraChunk = convertFullStreamChunkToMastra(
1886
+ chunk,
1887
+ { runId }
1888
+ );
1889
+ if (!mastraChunk) {
1890
+ controller.enqueue(chunk);
1891
+ return;
1892
+ }
1893
+ for (const processor of outputProcessors) {
1894
+ if (processor.processOutputStream && mastraChunk) {
1895
+ let state = processorStates.get(processor.id);
1896
+ if (!state) {
1897
+ state = { streamParts: [], customState: {} };
1898
+ processorStates.set(processor.id, state);
1899
+ }
1900
+ state.streamParts.push(mastraChunk);
1901
+ try {
1902
+ const result = await processor.processOutputStream({
1903
+ part: mastraChunk,
1904
+ streamParts: state.streamParts,
1905
+ state: state.customState,
1906
+ requestContext,
1907
+ abort: (reason) => {
1908
+ throw new TripWire(reason || "Aborted by processor");
1909
+ }
1910
+ });
1911
+ if (result === null || result === void 0) {
1912
+ mastraChunk = void 0;
1913
+ } else {
1914
+ mastraChunk = result;
1915
+ }
1916
+ } catch (error) {
1917
+ if (error instanceof TripWire) {
1918
+ controller.enqueue({
1919
+ type: "error",
1920
+ error: new Error(error.message)
1921
+ });
1922
+ controller.terminate();
1923
+ return;
1924
+ }
1925
+ throw error;
1926
+ }
1927
+ }
1928
+ }
1929
+ if (mastraChunk) {
1930
+ const aiChunk = convertMastraChunkToAISDKStreamPart(mastraChunk);
1931
+ if (aiChunk) {
1932
+ controller.enqueue(aiChunk);
1933
+ }
1934
+ }
1935
+ }
1936
+ })
1937
+ );
1938
+ return { stream: transformedStream, ...rest };
1939
+ }
1940
+ };
1941
+ }
1942
+ function createBlockedStream(reason) {
1943
+ return new ReadableStream({
1944
+ start(controller) {
1945
+ const id = crypto.randomUUID();
1946
+ controller.enqueue({
1947
+ type: "text-start",
1948
+ id
1949
+ });
1950
+ controller.enqueue({
1951
+ type: "text-delta",
1952
+ id,
1953
+ delta: reason
1954
+ });
1955
+ controller.enqueue({
1956
+ type: "text-end",
1957
+ id
1958
+ });
1959
+ controller.enqueue({
1960
+ type: "finish",
1961
+ finishReason: "stop",
1962
+ usage: { inputTokens: 0, outputTokens: 0, totalTokens: 0 }
1963
+ });
1964
+ controller.close();
1577
1965
  }
1578
1966
  });
1579
1967
  }
1968
+ function extractTextFromMastraMessage(msg) {
1969
+ const content = msg.content;
1970
+ if (typeof content === "string") {
1971
+ return content;
1972
+ }
1973
+ if (content?.parts) {
1974
+ return content.parts.filter((p) => p.type === "text" && "text" in p).map((p) => p.text).join("");
1975
+ }
1976
+ return "";
1977
+ }
1978
+ function convertMastraChunkToAISDKStreamPart(chunk) {
1979
+ switch (chunk.type) {
1980
+ // Text streaming
1981
+ case "text-start":
1982
+ return {
1983
+ type: "text-start",
1984
+ id: chunk.payload.id || crypto.randomUUID(),
1985
+ providerMetadata: chunk.payload.providerMetadata
1986
+ };
1987
+ case "text-delta":
1988
+ return {
1989
+ type: "text-delta",
1990
+ id: chunk.payload.id || crypto.randomUUID(),
1991
+ delta: chunk.payload.text,
1992
+ providerMetadata: chunk.payload.providerMetadata
1993
+ };
1994
+ case "text-end":
1995
+ return {
1996
+ type: "text-end",
1997
+ id: chunk.payload.id || crypto.randomUUID(),
1998
+ providerMetadata: chunk.payload.providerMetadata
1999
+ };
2000
+ // Reasoning streaming
2001
+ case "reasoning-start":
2002
+ return {
2003
+ type: "reasoning-start",
2004
+ id: chunk.payload.id || crypto.randomUUID(),
2005
+ providerMetadata: chunk.payload.providerMetadata
2006
+ };
2007
+ case "reasoning-delta":
2008
+ return {
2009
+ type: "reasoning-delta",
2010
+ id: chunk.payload.id || crypto.randomUUID(),
2011
+ delta: chunk.payload.text,
2012
+ providerMetadata: chunk.payload.providerMetadata
2013
+ };
2014
+ case "reasoning-end":
2015
+ return {
2016
+ type: "reasoning-end",
2017
+ id: chunk.payload.id || crypto.randomUUID(),
2018
+ providerMetadata: chunk.payload.providerMetadata
2019
+ };
2020
+ // Tool call (complete)
2021
+ case "tool-call":
2022
+ return {
2023
+ type: "tool-call",
2024
+ toolCallId: chunk.payload.toolCallId,
2025
+ toolName: chunk.payload.toolName,
2026
+ input: JSON.stringify(chunk.payload.args),
2027
+ providerExecuted: chunk.payload.providerExecuted,
2028
+ providerMetadata: chunk.payload.providerMetadata
2029
+ };
2030
+ // Tool call input streaming
2031
+ case "tool-call-input-streaming-start":
2032
+ return {
2033
+ type: "tool-input-start",
2034
+ id: chunk.payload.toolCallId,
2035
+ toolName: chunk.payload.toolName,
2036
+ providerExecuted: chunk.payload.providerExecuted,
2037
+ providerMetadata: chunk.payload.providerMetadata
2038
+ };
2039
+ case "tool-call-delta":
2040
+ return {
2041
+ type: "tool-input-delta",
2042
+ id: chunk.payload.toolCallId,
2043
+ delta: chunk.payload.argsTextDelta,
2044
+ providerMetadata: chunk.payload.providerMetadata
2045
+ };
2046
+ case "tool-call-input-streaming-end":
2047
+ return {
2048
+ type: "tool-input-end",
2049
+ id: chunk.payload.toolCallId,
2050
+ providerMetadata: chunk.payload.providerMetadata
2051
+ };
2052
+ // Tool result
2053
+ case "tool-result":
2054
+ return {
2055
+ type: "tool-result",
2056
+ toolCallId: chunk.payload.toolCallId,
2057
+ toolName: chunk.payload.toolName,
2058
+ result: { type: "json", value: chunk.payload.result },
2059
+ isError: chunk.payload.isError,
2060
+ providerExecuted: chunk.payload.providerExecuted,
2061
+ providerMetadata: chunk.payload.providerMetadata
2062
+ };
2063
+ // Source (citations)
2064
+ case "source":
2065
+ if (chunk.payload.sourceType === "url") {
2066
+ return {
2067
+ type: "source",
2068
+ sourceType: "url",
2069
+ id: chunk.payload.id,
2070
+ url: chunk.payload.url,
2071
+ title: chunk.payload.title,
2072
+ providerMetadata: chunk.payload.providerMetadata
2073
+ };
2074
+ } else {
2075
+ return {
2076
+ type: "source",
2077
+ sourceType: "document",
2078
+ id: chunk.payload.id,
2079
+ mediaType: chunk.payload.mimeType,
2080
+ title: chunk.payload.title,
2081
+ filename: chunk.payload.filename,
2082
+ providerMetadata: chunk.payload.providerMetadata
2083
+ };
2084
+ }
2085
+ // File output
2086
+ case "file":
2087
+ return {
2088
+ type: "file",
2089
+ data: chunk.payload.data || chunk.payload.base64,
2090
+ mediaType: chunk.payload.mimeType
2091
+ };
2092
+ // Response metadata
2093
+ case "response-metadata":
2094
+ return {
2095
+ type: "response-metadata",
2096
+ ...chunk.payload
2097
+ };
2098
+ // Raw provider data
2099
+ case "raw":
2100
+ return {
2101
+ type: "raw",
2102
+ rawValue: chunk.payload
2103
+ };
2104
+ // Finish
2105
+ case "finish": {
2106
+ const usage = chunk.payload.output?.usage;
2107
+ return {
2108
+ type: "finish",
2109
+ finishReason: chunk.payload.stepResult?.reason || "stop",
2110
+ usage: usage ? {
2111
+ inputTokens: usage.inputTokens || 0,
2112
+ outputTokens: usage.outputTokens || 0,
2113
+ totalTokens: usage.totalTokens || 0
2114
+ } : { inputTokens: 0, outputTokens: 0, totalTokens: 0 },
2115
+ providerMetadata: chunk.payload.metadata?.providerMetadata
2116
+ };
2117
+ }
2118
+ // Error
2119
+ case "error":
2120
+ return {
2121
+ type: "error",
2122
+ error: chunk.payload.error || chunk.payload
2123
+ };
2124
+ default:
2125
+ return null;
2126
+ }
2127
+ }
2128
+
2129
+ // src/to-ai-sdk-format.ts
2130
+ function toAISdkFormat() {
2131
+ throw new Error(
2132
+ 'toAISdkFormat() has been deprecated. Please use toAISdkStream() instead.\n\nMigration:\n import { toAISdkFormat } from "@mastra/ai-sdk";\n // Change to:\n import { toAISdkStream } from "@mastra/ai-sdk";\n\nThe function signature remains the same.'
2133
+ );
2134
+ }
1580
2135
 
1581
- export { chatRoute, networkRoute, toAISdkFormat, workflowRoute };
2136
+ export { chatRoute, handleChatStream, handleNetworkStream, handleWorkflowStream, networkRoute, toAISdkFormat, toAISdkV5Stream as toAISdkStream, withMastra, workflowRoute };
1582
2137
  //# sourceMappingURL=index.js.map
1583
2138
  //# sourceMappingURL=index.js.map