@mastra/react 0.0.10 → 0.0.11-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,14 @@
1
1
  # @mastra/react-hooks
2
2
 
3
+ ## 0.0.11-alpha.0
4
+
5
+ ### Patch Changes
6
+
7
+ - Add tool call approval ([#8649](https://github.com/mastra-ai/mastra/pull/8649))
8
+
9
+ - Updated dependencies [[`5df9cce`](https://github.com/mastra-ai/mastra/commit/5df9cce1a753438413f64c11eeef8f845745c2a8), [`2060766`](https://github.com/mastra-ai/mastra/commit/20607667bf78ea104cca3e15dfb93ae0b62c9d18)]:
10
+ - @mastra/client-js@0.16.5-alpha.0
11
+
3
12
  ## 0.0.10
4
13
 
5
14
  ### Patch Changes
package/dist/index.cjs CHANGED
@@ -253,6 +253,7 @@ const toUIMessage = ({ chunk, conversation, metadata }) => {
253
253
  }
254
254
  ];
255
255
  }
256
+ case "tool-error":
256
257
  case "tool-result": {
257
258
  const lastMessage = result[result.length - 1];
258
259
  if (!lastMessage || lastMessage.role !== "assistant") return result;
@@ -263,14 +264,15 @@ const toUIMessage = ({ chunk, conversation, metadata }) => {
263
264
  if (toolPartIndex !== -1) {
264
265
  const toolPart = parts[toolPartIndex];
265
266
  if (toolPart.type === "dynamic-tool") {
266
- if (chunk.payload.isError) {
267
+ if (chunk.type === "tool-result" && chunk.payload.isError || chunk.type === "tool-error") {
268
+ const error = chunk.type === "tool-error" ? chunk.payload.error : chunk.payload.result;
267
269
  parts[toolPartIndex] = {
268
270
  type: "dynamic-tool",
269
271
  toolName: toolPart.toolName,
270
272
  toolCallId: toolPart.toolCallId,
271
273
  state: "output-error",
272
274
  input: toolPart.input,
273
- errorText: String(chunk.payload.result),
275
+ errorText: String(error),
274
276
  callProviderMetadata: chunk.payload.providerMetadata
275
277
  };
276
278
  } else {
@@ -399,6 +401,29 @@ const toUIMessage = ({ chunk, conversation, metadata }) => {
399
401
  }
400
402
  ];
401
403
  }
404
+ case "tool-call-approval": {
405
+ const lastMessage = result[result.length - 1];
406
+ if (!lastMessage || lastMessage.role !== "assistant") return result;
407
+ const lastRequireApprovalMetadata = lastMessage.metadata?.mode === "stream" ? lastMessage.metadata?.requireApprovalMetadata : {};
408
+ return [
409
+ ...result.slice(0, -1),
410
+ {
411
+ ...lastMessage,
412
+ metadata: {
413
+ ...lastMessage.metadata,
414
+ mode: "stream",
415
+ requireApprovalMetadata: {
416
+ ...lastRequireApprovalMetadata,
417
+ [chunk.payload.toolCallId]: {
418
+ toolCallId: chunk.payload.toolCallId,
419
+ toolName: chunk.payload.toolName,
420
+ args: chunk.payload.args
421
+ }
422
+ }
423
+ }
424
+ }
425
+ ];
426
+ }
402
427
  case "finish": {
403
428
  const lastMessage = result[result.length - 1];
404
429
  if (!lastMessage || lastMessage.role !== "assistant") return result;
@@ -1137,9 +1162,12 @@ class AISdkNetworkTransformer {
1137
1162
  }
1138
1163
 
1139
1164
  const useChat = ({ agentId, initializeMessages }) => {
1165
+ const _currentRunId = react.useRef(void 0);
1166
+ const _onChunk = react.useRef(void 0);
1140
1167
  const [messages, setMessages] = react.useState(
1141
1168
  () => resolveInitialMessages(initializeMessages?.() || [])
1142
1169
  );
1170
+ const [toolCallApprovals, setToolCallApprovals] = react.useState({});
1143
1171
  const baseClient = useMastraClient();
1144
1172
  const [isRunning, setIsRunning] = react.useState(false);
1145
1173
  const generate = async ({
@@ -1209,7 +1237,8 @@ const useChat = ({ agentId, initializeMessages }) => {
1209
1237
  topP,
1210
1238
  instructions,
1211
1239
  providerOptions,
1212
- maxSteps
1240
+ maxSteps,
1241
+ requireToolApproval
1213
1242
  } = modelSettings || {};
1214
1243
  setIsRunning(true);
1215
1244
  const clientWithAbort = new clientJs.MastraClient({
@@ -1217,9 +1246,10 @@ const useChat = ({ agentId, initializeMessages }) => {
1217
1246
  abortSignal: signal
1218
1247
  });
1219
1248
  const agent = clientWithAbort.getAgent(agentId);
1249
+ const runId = agentId;
1220
1250
  const response = await agent.stream({
1221
1251
  messages: coreUserMessages,
1222
- runId: agentId,
1252
+ runId,
1223
1253
  maxSteps,
1224
1254
  modelSettings: {
1225
1255
  frequencyPenalty,
@@ -1233,12 +1263,11 @@ const useChat = ({ agentId, initializeMessages }) => {
1233
1263
  instructions,
1234
1264
  runtimeContext,
1235
1265
  ...threadId ? { threadId, resourceId: agentId } : {},
1236
- providerOptions
1266
+ providerOptions,
1267
+ requireToolApproval
1237
1268
  });
1238
- if (!response.body) {
1239
- setIsRunning(false);
1240
- throw new Error("[Stream] No response body");
1241
- }
1269
+ _onChunk.current = onChunk;
1270
+ _currentRunId.current = runId;
1242
1271
  await response.processDataStream({
1243
1272
  onChunk: async (chunk) => {
1244
1273
  setMessages((prev) => toUIMessage({ chunk, conversation: prev, metadata: { mode: "stream" } }));
@@ -1287,6 +1316,45 @@ const useChat = ({ agentId, initializeMessages }) => {
1287
1316
  });
1288
1317
  setIsRunning(false);
1289
1318
  };
1319
+ const handleCancelRun = () => {
1320
+ setIsRunning(false);
1321
+ _currentRunId.current = void 0;
1322
+ _onChunk.current = void 0;
1323
+ };
1324
+ const approveToolCall = async (toolCallId) => {
1325
+ const onChunk = _onChunk.current;
1326
+ const currentRunId = _currentRunId.current;
1327
+ if (!currentRunId)
1328
+ return console.info("[approveToolCall] approveToolCall can only be called after a stream has started");
1329
+ setIsRunning(true);
1330
+ setToolCallApprovals((prev) => ({ ...prev, [toolCallId]: { status: "approved" } }));
1331
+ const agent = baseClient.getAgent(agentId);
1332
+ const response = await agent.approveToolCall({ runId: currentRunId, toolCallId });
1333
+ await response.processDataStream({
1334
+ onChunk: async (chunk) => {
1335
+ setMessages((prev) => toUIMessage({ chunk, conversation: prev, metadata: { mode: "stream" } }));
1336
+ onChunk?.(chunk);
1337
+ }
1338
+ });
1339
+ setIsRunning(false);
1340
+ };
1341
+ const declineToolCall = async (toolCallId) => {
1342
+ const onChunk = _onChunk.current;
1343
+ const currentRunId = _currentRunId.current;
1344
+ if (!currentRunId)
1345
+ return console.info("[declineToolCall] declineToolCall can only be called after a stream has started");
1346
+ setIsRunning(true);
1347
+ setToolCallApprovals((prev) => ({ ...prev, [toolCallId]: { status: "declined" } }));
1348
+ const agent = baseClient.getAgent(agentId);
1349
+ const response = await agent.declineToolCall({ runId: currentRunId, toolCallId });
1350
+ await response.processDataStream({
1351
+ onChunk: async (chunk) => {
1352
+ setMessages((prev) => toUIMessage({ chunk, conversation: prev, metadata: { mode: "stream" } }));
1353
+ onChunk?.(chunk);
1354
+ }
1355
+ });
1356
+ setIsRunning(false);
1357
+ };
1290
1358
  const sendMessage = async ({ mode = "stream", ...args }) => {
1291
1359
  const nextMessage = { role: "user", content: [{ type: "text", text: args.message }] };
1292
1360
  const messages2 = args.coreUserMessages ? [nextMessage, ...args.coreUserMessages] : [nextMessage];
@@ -1304,7 +1372,10 @@ const useChat = ({ agentId, initializeMessages }) => {
1304
1372
  sendMessage,
1305
1373
  isRunning,
1306
1374
  messages,
1307
- cancelRun: () => setIsRunning(false)
1375
+ approveToolCall,
1376
+ declineToolCall,
1377
+ cancelRun: handleCancelRun,
1378
+ toolCallApprovals
1308
1379
  };
1309
1380
  };
1310
1381