@mastra/client-js 0.16.4 → 0.16.5-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,16 @@
1
1
  # @mastra/client-js
2
2
 
3
+ ## 0.16.5-alpha.0
4
+
5
+ ### Patch Changes
6
+
7
+ - Add tool call approval ([#8649](https://github.com/mastra-ai/mastra/pull/8649))
8
+
9
+ - Fix error handling and serialization in agent streaming to ensure errors are consistently exposed and preserved. ([#9192](https://github.com/mastra-ai/mastra/pull/9192))
10
+
11
+ - Updated dependencies [[`f743dbb`](https://github.com/mastra-ai/mastra/commit/f743dbb8b40d1627b5c10c0e6fc154f4ebb6e394), [`5df9cce`](https://github.com/mastra-ai/mastra/commit/5df9cce1a753438413f64c11eeef8f845745c2a8), [`2060766`](https://github.com/mastra-ai/mastra/commit/20607667bf78ea104cca3e15dfb93ae0b62c9d18), [`2c4438b`](https://github.com/mastra-ai/mastra/commit/2c4438b87817ab7eed818c7990fef010475af1a3)]:
12
+ - @mastra/core@0.23.0-alpha.0
13
+
3
14
  ## 0.16.4
4
15
 
5
16
  ### Patch Changes
package/dist/index.cjs CHANGED
@@ -2,6 +2,7 @@
2
2
 
3
3
  var uiUtils = require('@ai-sdk/ui-utils');
4
4
  var uuid = require('@lukeed/uuid');
5
+ var error = require('@mastra/core/error');
5
6
  var runtimeContext = require('@mastra/core/runtime-context');
6
7
  var isVercelTool = require('@mastra/core/tools/is-vercel-tool');
7
8
  var zod = require('zod');
@@ -100,11 +101,15 @@ async function sharedProcessMastraStream({
100
101
  console.info("\u{1F3C1} Stream finished");
101
102
  return;
102
103
  }
104
+ let json;
103
105
  try {
104
- const json = JSON.parse(data);
105
- await onChunk(json);
106
+ json = JSON.parse(data);
106
107
  } catch (error) {
107
108
  console.error("\u274C JSON parse error:", error, "Data:", data);
109
+ continue;
110
+ }
111
+ if (json) {
112
+ await onChunk(json);
108
113
  }
109
114
  }
110
115
  }
@@ -951,7 +956,10 @@ var Agent = class extends BaseResource {
951
956
  break;
952
957
  }
953
958
  case "error": {
954
- throw new Error(chunk.payload.error);
959
+ throw error.getErrorFromUnknown(chunk.payload.error, {
960
+ fallbackMessage: "Unknown error in stream",
961
+ supportSerialization: false
962
+ });
955
963
  }
956
964
  case "data": {
957
965
  data.push(...chunk.payload.data);
@@ -978,8 +986,8 @@ var Agent = class extends BaseResource {
978
986
  });
979
987
  onFinish?.({ message, finishReason, usage });
980
988
  }
981
- async processStreamResponse(processedParams, writable) {
982
- const response = await this.request(`/api/agents/${this.agentId}/stream`, {
989
+ async processStreamResponse(processedParams, writable, route = "stream") {
990
+ const response = await this.request(`/api/agents/${this.agentId}/${route}`, {
983
991
  method: "POST",
984
992
  body: processedParams,
985
993
  stream: true
@@ -1164,6 +1172,42 @@ var Agent = class extends BaseResource {
1164
1172
  };
1165
1173
  return streamResponse;
1166
1174
  }
1175
+ async approveToolCall(params) {
1176
+ const { readable, writable } = new TransformStream();
1177
+ const response = await this.processStreamResponse(params, writable, "approve-tool-call");
1178
+ const streamResponse = new Response(readable, {
1179
+ status: response.status,
1180
+ statusText: response.statusText,
1181
+ headers: response.headers
1182
+ });
1183
+ streamResponse.processDataStream = async ({
1184
+ onChunk
1185
+ }) => {
1186
+ await processMastraStream({
1187
+ stream: streamResponse.body,
1188
+ onChunk
1189
+ });
1190
+ };
1191
+ return streamResponse;
1192
+ }
1193
+ async declineToolCall(params) {
1194
+ const { readable, writable } = new TransformStream();
1195
+ const response = await this.processStreamResponse(params, writable, "decline-tool-call");
1196
+ const streamResponse = new Response(readable, {
1197
+ status: response.status,
1198
+ statusText: response.statusText,
1199
+ headers: response.headers
1200
+ });
1201
+ streamResponse.processDataStream = async ({
1202
+ onChunk
1203
+ }) => {
1204
+ await processMastraStream({
1205
+ stream: streamResponse.body,
1206
+ onChunk
1207
+ });
1208
+ };
1209
+ return streamResponse;
1210
+ }
1167
1211
  /**
1168
1212
  * Processes the stream response and handles tool calls
1169
1213
  */