@mastra/client-js 0.10.22-alpha.3 → 0.10.22-alpha.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,5 @@
1
1
 
2
- > @mastra/client-js@0.10.22-alpha.3 build /home/runner/work/mastra/mastra/client-sdks/client-js
2
+ > @mastra/client-js@0.10.22-alpha.4 build /home/runner/work/mastra/mastra/client-sdks/client-js
3
3
  > tsup --config tsup.config.ts
4
4
 
5
5
  CLI Building entry: src/index.ts
@@ -10,9 +10,9 @@
10
10
  CLI Cleaning output folder
11
11
  ESM Build start
12
12
  CJS Build start
13
- ESM dist/index.js 76.44 KB
14
- ESM dist/index.js.map 164.03 KB
15
- ESM ⚡️ Build success in 2769ms
16
- CJS dist/index.cjs 76.75 KB
17
- CJS dist/index.cjs.map 164.22 KB
18
- CJS ⚡️ Build success in 2769ms
13
+ CJS dist/index.cjs 93.90 KB
14
+ CJS dist/index.cjs.map 201.12 KB
15
+ CJS ⚡️ Build success in 3435ms
16
+ ESM dist/index.js 93.57 KB
17
+ ESM dist/index.js.map 200.93 KB
18
+ ESM ⚡️ Build success in 3441ms
package/CHANGELOG.md CHANGED
@@ -1,5 +1,29 @@
1
1
  # @mastra/client-js
2
2
 
3
+ ## 0.10.22-alpha.4
4
+
5
+ ### Patch Changes
6
+
7
+ - 0a7f675: Client JS vnext methods
8
+ - 1d59515: Add options to playground based on modelVersion
9
+ - 195eabb: Process Mastra Stream
10
+ - Updated dependencies [0a7f675]
11
+ - Updated dependencies [12cae67]
12
+ - Updated dependencies [5a37d0c]
13
+ - Updated dependencies [4bde0cb]
14
+ - Updated dependencies [1a80071]
15
+ - Updated dependencies [36a3be8]
16
+ - Updated dependencies [361757b]
17
+ - Updated dependencies [2bb9955]
18
+ - Updated dependencies [2454423]
19
+ - Updated dependencies [a44d91e]
20
+ - Updated dependencies [dfb91e9]
21
+ - Updated dependencies [a741dde]
22
+ - Updated dependencies [7cb3fc0]
23
+ - Updated dependencies [195eabb]
24
+ - Updated dependencies [b78b95b]
25
+ - @mastra/core@0.14.0-alpha.4
26
+
3
27
  ## 0.10.22-alpha.3
4
28
 
5
29
  ### Patch Changes
package/dist/index.cjs CHANGED
@@ -315,6 +315,94 @@ function parseClientRuntimeContext(runtimeContext$1) {
315
315
  }
316
316
  return void 0;
317
317
  }
318
+
319
+ // src/utils/process-mastra-stream.ts
320
+ async function processMastraStream({
321
+ stream,
322
+ onChunk
323
+ }) {
324
+ const reader = stream.getReader();
325
+ const decoder = new TextDecoder();
326
+ let buffer = "";
327
+ try {
328
+ while (true) {
329
+ const { done, value } = await reader.read();
330
+ if (done) break;
331
+ buffer += decoder.decode(value, { stream: true });
332
+ const lines = buffer.split("\n\n");
333
+ buffer = lines.pop() || "";
334
+ for (const line of lines) {
335
+ if (line.startsWith("data: ")) {
336
+ const data = line.slice(6);
337
+ if (data === "[DONE]") {
338
+ console.log("\u{1F3C1} Stream finished");
339
+ return;
340
+ }
341
+ try {
342
+ const json = JSON.parse(data);
343
+ await onChunk(json);
344
+ } catch (error) {
345
+ console.error("\u274C JSON parse error:", error, "Data:", data);
346
+ }
347
+ }
348
+ }
349
+ }
350
+ } finally {
351
+ reader.releaseLock();
352
+ }
353
+ }
354
+
355
+ // src/resources/agent.ts
356
+ async function executeToolCallAndRespond({
357
+ response,
358
+ params,
359
+ runId,
360
+ resourceId,
361
+ threadId,
362
+ runtimeContext,
363
+ respondFn
364
+ }) {
365
+ if (response.finishReason === "tool-calls") {
366
+ const toolCalls = response.toolCalls;
367
+ if (!toolCalls || !Array.isArray(toolCalls)) {
368
+ return response;
369
+ }
370
+ for (const toolCall of toolCalls) {
371
+ const clientTool = params.clientTools?.[toolCall.toolName];
372
+ if (clientTool && clientTool.execute) {
373
+ const result = await clientTool.execute(
374
+ { context: toolCall?.args, runId, resourceId, threadId, runtimeContext },
375
+ {
376
+ messages: response.messages,
377
+ toolCallId: toolCall?.toolCallId
378
+ }
379
+ );
380
+ const updatedMessages = [
381
+ {
382
+ role: "user",
383
+ content: params.messages
384
+ },
385
+ ...response.response.messages,
386
+ {
387
+ role: "tool",
388
+ content: [
389
+ {
390
+ type: "tool-result",
391
+ toolCallId: toolCall.toolCallId,
392
+ toolName: toolCall.toolName,
393
+ result
394
+ }
395
+ ]
396
+ }
397
+ ];
398
+ return respondFn({
399
+ ...params,
400
+ messages: updatedMessages
401
+ });
402
+ }
403
+ }
404
+ }
405
+ }
318
406
  var AgentVoice = class extends BaseResource {
319
407
  constructor(options, agentId) {
320
408
  super(options);
@@ -441,6 +529,34 @@ var Agent = class extends BaseResource {
441
529
  }
442
530
  return response;
443
531
  }
532
+ async generateVNext(params) {
533
+ const processedParams = {
534
+ ...params,
535
+ output: params.output ? zodToJsonSchema(params.output) : void 0,
536
+ runtimeContext: parseClientRuntimeContext(params.runtimeContext),
537
+ clientTools: processClientTools(params.clientTools)
538
+ };
539
+ const { runId, resourceId, threadId, runtimeContext } = processedParams;
540
+ const response = await this.request(
541
+ `/api/agents/${this.agentId}/generate/vnext`,
542
+ {
543
+ method: "POST",
544
+ body: processedParams
545
+ }
546
+ );
547
+ if (response.finishReason === "tool-calls") {
548
+ return executeToolCallAndRespond({
549
+ response,
550
+ params,
551
+ runId,
552
+ resourceId,
553
+ threadId,
554
+ runtimeContext,
555
+ respondFn: this.generateVNext.bind(this)
556
+ });
557
+ }
558
+ return response;
559
+ }
444
560
  async processChatResponse({
445
561
  stream,
446
562
  update,
@@ -731,6 +847,392 @@ var Agent = class extends BaseResource {
731
847
  };
732
848
  return streamResponse;
733
849
  }
850
+ async processChatResponse_vNext({
851
+ stream,
852
+ update,
853
+ onToolCall,
854
+ onFinish,
855
+ getCurrentDate = () => /* @__PURE__ */ new Date(),
856
+ lastMessage
857
+ }) {
858
+ const replaceLastMessage = lastMessage?.role === "assistant";
859
+ let step = replaceLastMessage ? 1 + // find max step in existing tool invocations:
860
+ (lastMessage.toolInvocations?.reduce((max, toolInvocation) => {
861
+ return Math.max(max, toolInvocation.step ?? 0);
862
+ }, 0) ?? 0) : 0;
863
+ const message = replaceLastMessage ? structuredClone(lastMessage) : {
864
+ id: uuid.v4(),
865
+ createdAt: getCurrentDate(),
866
+ role: "assistant",
867
+ content: "",
868
+ parts: []
869
+ };
870
+ let currentTextPart = void 0;
871
+ let currentReasoningPart = void 0;
872
+ let currentReasoningTextDetail = void 0;
873
+ function updateToolInvocationPart(toolCallId, invocation) {
874
+ const part = message.parts.find(
875
+ (part2) => part2.type === "tool-invocation" && part2.toolInvocation.toolCallId === toolCallId
876
+ );
877
+ if (part != null) {
878
+ part.toolInvocation = invocation;
879
+ } else {
880
+ message.parts.push({
881
+ type: "tool-invocation",
882
+ toolInvocation: invocation
883
+ });
884
+ }
885
+ }
886
+ const data = [];
887
+ let messageAnnotations = replaceLastMessage ? lastMessage?.annotations : void 0;
888
+ const partialToolCalls = {};
889
+ let usage = {
890
+ completionTokens: NaN,
891
+ promptTokens: NaN,
892
+ totalTokens: NaN
893
+ };
894
+ let finishReason = "unknown";
895
+ function execUpdate() {
896
+ const copiedData = [...data];
897
+ if (messageAnnotations?.length) {
898
+ message.annotations = messageAnnotations;
899
+ }
900
+ const copiedMessage = {
901
+ // deep copy the message to ensure that deep changes (msg attachments) are updated
902
+ // with SolidJS. SolidJS uses referential integration of sub-objects to detect changes.
903
+ ...structuredClone(message),
904
+ // add a revision id to ensure that the message is updated with SWR. SWR uses a
905
+ // hashing approach by default to detect changes, but it only works for shallow
906
+ // changes. This is why we need to add a revision id to ensure that the message
907
+ // is updated with SWR (without it, the changes get stuck in SWR and are not
908
+ // forwarded to rendering):
909
+ revisionId: uuid.v4()
910
+ };
911
+ update({
912
+ message: copiedMessage,
913
+ data: copiedData,
914
+ replaceLastMessage
915
+ });
916
+ }
917
+ await processMastraStream({
918
+ stream,
919
+ // TODO: casting as any here because the stream types were all typed as any before in core.
920
+ // but this is completely wrong and this fn is probably broken. Remove ":any" and you'll see a bunch of type errors
921
+ onChunk: async (chunk) => {
922
+ switch (chunk.type) {
923
+ case "step-start": {
924
+ if (!replaceLastMessage) {
925
+ message.id = chunk.payload.messageId;
926
+ }
927
+ message.parts.push({ type: "step-start" });
928
+ execUpdate();
929
+ break;
930
+ }
931
+ case "text-delta": {
932
+ if (currentTextPart == null) {
933
+ currentTextPart = {
934
+ type: "text",
935
+ text: chunk.payload.text
936
+ };
937
+ message.parts.push(currentTextPart);
938
+ } else {
939
+ currentTextPart.text += chunk.payload.text;
940
+ }
941
+ message.content += chunk.payload.text;
942
+ execUpdate();
943
+ break;
944
+ }
945
+ case "reasoning-delta": {
946
+ if (currentReasoningTextDetail == null) {
947
+ currentReasoningTextDetail = { type: "text", text: chunk.payload.text };
948
+ if (currentReasoningPart != null) {
949
+ currentReasoningPart.details.push(currentReasoningTextDetail);
950
+ }
951
+ } else {
952
+ currentReasoningTextDetail.text += chunk.payload.text;
953
+ }
954
+ if (currentReasoningPart == null) {
955
+ currentReasoningPart = {
956
+ type: "reasoning",
957
+ reasoning: chunk.payload.text,
958
+ details: [currentReasoningTextDetail]
959
+ };
960
+ message.parts.push(currentReasoningPart);
961
+ } else {
962
+ currentReasoningPart.reasoning += chunk.payload.text;
963
+ }
964
+ message.reasoning = (message.reasoning ?? "") + chunk.payload.text;
965
+ execUpdate();
966
+ break;
967
+ }
968
+ case "file": {
969
+ message.parts.push({
970
+ type: "file",
971
+ mimeType: chunk.payload.mimeType,
972
+ data: chunk.payload.data
973
+ });
974
+ execUpdate();
975
+ break;
976
+ }
977
+ case "source": {
978
+ message.parts.push({
979
+ type: "source",
980
+ source: chunk.payload.source
981
+ });
982
+ execUpdate();
983
+ break;
984
+ }
985
+ case "tool-call": {
986
+ const invocation = {
987
+ state: "call",
988
+ step,
989
+ ...chunk.payload
990
+ };
991
+ if (partialToolCalls[chunk.payload.toolCallId] != null) {
992
+ message.toolInvocations[partialToolCalls[chunk.payload.toolCallId].index] = invocation;
993
+ } else {
994
+ if (message.toolInvocations == null) {
995
+ message.toolInvocations = [];
996
+ }
997
+ message.toolInvocations.push(invocation);
998
+ }
999
+ updateToolInvocationPart(chunk.payload.toolCallId, invocation);
1000
+ execUpdate();
1001
+ if (onToolCall) {
1002
+ const result = await onToolCall({ toolCall: chunk.payload });
1003
+ if (result != null) {
1004
+ const invocation2 = {
1005
+ state: "result",
1006
+ step,
1007
+ ...chunk.payload,
1008
+ result
1009
+ };
1010
+ message.toolInvocations[message.toolInvocations.length - 1] = invocation2;
1011
+ updateToolInvocationPart(chunk.payload.toolCallId, invocation2);
1012
+ execUpdate();
1013
+ }
1014
+ }
1015
+ }
1016
+ case "tool-call-input-streaming-start": {
1017
+ if (message.toolInvocations == null) {
1018
+ message.toolInvocations = [];
1019
+ }
1020
+ partialToolCalls[chunk.payload.toolCallId] = {
1021
+ text: "",
1022
+ step,
1023
+ toolName: chunk.payload.toolName,
1024
+ index: message.toolInvocations.length
1025
+ };
1026
+ const invocation = {
1027
+ state: "partial-call",
1028
+ step,
1029
+ toolCallId: chunk.payload.toolCallId,
1030
+ toolName: chunk.payload.toolName,
1031
+ args: void 0
1032
+ };
1033
+ message.toolInvocations.push(invocation);
1034
+ updateToolInvocationPart(chunk.payload.toolCallId, invocation);
1035
+ execUpdate();
1036
+ break;
1037
+ }
1038
+ case "tool-call-delta": {
1039
+ const partialToolCall = partialToolCalls[chunk.payload.toolCallId];
1040
+ partialToolCall.text += chunk.payload.argsTextDelta;
1041
+ const { value: partialArgs } = uiUtils.parsePartialJson(partialToolCall.text);
1042
+ const invocation = {
1043
+ state: "partial-call",
1044
+ step: partialToolCall.step,
1045
+ toolCallId: chunk.payload.toolCallId,
1046
+ toolName: partialToolCall.toolName,
1047
+ args: partialArgs
1048
+ };
1049
+ message.toolInvocations[partialToolCall.index] = invocation;
1050
+ updateToolInvocationPart(chunk.payload.toolCallId, invocation);
1051
+ execUpdate();
1052
+ break;
1053
+ }
1054
+ case "tool-result": {
1055
+ const toolInvocations = message.toolInvocations;
1056
+ if (toolInvocations == null) {
1057
+ throw new Error("tool_result must be preceded by a tool_call");
1058
+ }
1059
+ const toolInvocationIndex = toolInvocations.findIndex(
1060
+ (invocation2) => invocation2.toolCallId === chunk.payload.toolCallId
1061
+ );
1062
+ if (toolInvocationIndex === -1) {
1063
+ throw new Error("tool_result must be preceded by a tool_call with the same toolCallId");
1064
+ }
1065
+ const invocation = {
1066
+ ...toolInvocations[toolInvocationIndex],
1067
+ state: "result",
1068
+ ...chunk.payload
1069
+ };
1070
+ toolInvocations[toolInvocationIndex] = invocation;
1071
+ updateToolInvocationPart(chunk.payload.toolCallId, invocation);
1072
+ execUpdate();
1073
+ break;
1074
+ }
1075
+ case "error": {
1076
+ throw new Error(chunk.payload.error);
1077
+ }
1078
+ case "data": {
1079
+ data.push(...chunk.payload.data);
1080
+ execUpdate();
1081
+ break;
1082
+ }
1083
+ case "step-finish": {
1084
+ step += 1;
1085
+ currentTextPart = chunk.payload.isContinued ? currentTextPart : void 0;
1086
+ currentReasoningPart = void 0;
1087
+ currentReasoningTextDetail = void 0;
1088
+ execUpdate();
1089
+ break;
1090
+ }
1091
+ case "finish": {
1092
+ finishReason = chunk.payload.finishReason;
1093
+ if (chunk.payload.usage != null) {
1094
+ usage = chunk.payload.usage;
1095
+ }
1096
+ break;
1097
+ }
1098
+ }
1099
+ }
1100
+ });
1101
+ onFinish?.({ message, finishReason, usage });
1102
+ }
1103
+ async processStreamResponse_vNext(processedParams, writable) {
1104
+ const response = await this.request(`/api/agents/${this.agentId}/stream/vnext`, {
1105
+ method: "POST",
1106
+ body: processedParams,
1107
+ stream: true
1108
+ });
1109
+ if (!response.body) {
1110
+ throw new Error("No response body");
1111
+ }
1112
+ try {
1113
+ let toolCalls = [];
1114
+ let messages = [];
1115
+ const [streamForWritable, streamForProcessing] = response.body.tee();
1116
+ streamForWritable.pipeTo(writable, {
1117
+ preventClose: true
1118
+ }).catch((error) => {
1119
+ console.error("Error piping to writable stream:", error);
1120
+ });
1121
+ this.processChatResponse_vNext({
1122
+ stream: streamForProcessing,
1123
+ update: ({ message }) => {
1124
+ const existingIndex = messages.findIndex((m) => m.id === message.id);
1125
+ if (existingIndex !== -1) {
1126
+ messages[existingIndex] = message;
1127
+ } else {
1128
+ messages.push(message);
1129
+ }
1130
+ },
1131
+ onFinish: async ({ finishReason, message }) => {
1132
+ if (finishReason === "tool-calls") {
1133
+ const toolCall = [...message?.parts ?? []].reverse().find((part) => part.type === "tool-invocation")?.toolInvocation;
1134
+ if (toolCall) {
1135
+ toolCalls.push(toolCall);
1136
+ }
1137
+ for (const toolCall2 of toolCalls) {
1138
+ const clientTool = processedParams.clientTools?.[toolCall2.toolName];
1139
+ if (clientTool && clientTool.execute) {
1140
+ const result = await clientTool.execute(
1141
+ {
1142
+ context: toolCall2?.args,
1143
+ runId: processedParams.runId,
1144
+ resourceId: processedParams.resourceId,
1145
+ threadId: processedParams.threadId,
1146
+ runtimeContext: processedParams.runtimeContext
1147
+ },
1148
+ {
1149
+ messages: response.messages,
1150
+ toolCallId: toolCall2?.toolCallId
1151
+ }
1152
+ );
1153
+ const lastMessage = JSON.parse(JSON.stringify(messages[messages.length - 1]));
1154
+ const toolInvocationPart = lastMessage?.parts?.find(
1155
+ (part) => part.type === "tool-invocation" && part.toolInvocation?.toolCallId === toolCall2.toolCallId
1156
+ );
1157
+ if (toolInvocationPart) {
1158
+ toolInvocationPart.toolInvocation = {
1159
+ ...toolInvocationPart.toolInvocation,
1160
+ state: "result",
1161
+ result
1162
+ };
1163
+ }
1164
+ const toolInvocation = lastMessage?.toolInvocations?.find(
1165
+ (toolInvocation2) => toolInvocation2.toolCallId === toolCall2.toolCallId
1166
+ );
1167
+ if (toolInvocation) {
1168
+ toolInvocation.state = "result";
1169
+ toolInvocation.result = result;
1170
+ }
1171
+ const writer = writable.getWriter();
1172
+ try {
1173
+ await writer.write(
1174
+ new TextEncoder().encode(
1175
+ "a:" + JSON.stringify({
1176
+ toolCallId: toolCall2.toolCallId,
1177
+ result
1178
+ }) + "\n"
1179
+ )
1180
+ );
1181
+ } finally {
1182
+ writer.releaseLock();
1183
+ }
1184
+ const originalMessages = processedParams.messages;
1185
+ const messageArray = Array.isArray(originalMessages) ? originalMessages : [originalMessages];
1186
+ this.processStreamResponse_vNext(
1187
+ {
1188
+ ...processedParams,
1189
+ messages: [...messageArray, ...messages.filter((m) => m.id !== lastMessage.id), lastMessage]
1190
+ },
1191
+ writable
1192
+ ).catch((error) => {
1193
+ console.error("Error processing stream response:", error);
1194
+ });
1195
+ }
1196
+ }
1197
+ } else {
1198
+ setTimeout(() => {
1199
+ writable.close();
1200
+ }, 0);
1201
+ }
1202
+ },
1203
+ lastMessage: void 0
1204
+ }).catch((error) => {
1205
+ console.error("Error processing stream response:", error);
1206
+ });
1207
+ } catch (error) {
1208
+ console.error("Error processing stream response:", error);
1209
+ }
1210
+ return response;
1211
+ }
1212
+ async streamVNext(params) {
1213
+ const processedParams = {
1214
+ ...params,
1215
+ output: params.output ? zodToJsonSchema(params.output) : void 0,
1216
+ runtimeContext: parseClientRuntimeContext(params.runtimeContext),
1217
+ clientTools: processClientTools(params.clientTools)
1218
+ };
1219
+ const { readable, writable } = new TransformStream();
1220
+ const response = await this.processStreamResponse_vNext(processedParams, writable);
1221
+ const streamResponse = new Response(readable, {
1222
+ status: response.status,
1223
+ statusText: response.statusText,
1224
+ headers: response.headers
1225
+ });
1226
+ streamResponse.processDataStream = async ({
1227
+ onChunk
1228
+ }) => {
1229
+ await processMastraStream({
1230
+ stream: streamResponse.body,
1231
+ onChunk
1232
+ });
1233
+ };
1234
+ return streamResponse;
1235
+ }
734
1236
  /**
735
1237
  * Processes the stream response and handles tool calls
736
1238
  */