@mastra/client-js 0.12.1-alpha.0 → 0.12.1-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,18 @@
1
1
  # @mastra/client-js
2
2
 
3
+ ## 0.12.1-alpha.1
4
+
5
+ ### Patch Changes
6
+
7
+ - Allow streamVNext and generateVNext to use structuredOutputs from the MastraClient ([#7597](https://github.com/mastra-ai/mastra/pull/7597))
8
+
9
+ - Use workflow streamVNext in playground ([#7575](https://github.com/mastra-ai/mastra/pull/7575))
10
+
11
+ - Make agent streamVNext work with clientTools in client js ([#7577](https://github.com/mastra-ai/mastra/pull/7577))
12
+
13
+ - Updated dependencies [[`47b6dc9`](https://github.com/mastra-ai/mastra/commit/47b6dc94f4976d4f3d3882e8f19eb365bbc5976c), [`565d65f`](https://github.com/mastra-ai/mastra/commit/565d65fc16314a99f081975ec92f2636dff0c86d), [`4da3d68`](https://github.com/mastra-ai/mastra/commit/4da3d68a778e5c4d5a17351ef223289fe2f45a45), [`0b0bbb2`](https://github.com/mastra-ai/mastra/commit/0b0bbb24f4198ead69792e92b68a350f52b45cf3), [`d951f41`](https://github.com/mastra-ai/mastra/commit/d951f41771e4e5da8da4b9f870949f9509e38756), [`8049e2e`](https://github.com/mastra-ai/mastra/commit/8049e2e8cce80a00353c64894c62b695ac34e35e)]:
14
+ - @mastra/core@0.16.1-alpha.1
15
+
3
16
  ## 0.12.1-alpha.0
4
17
 
5
18
  ### Patch Changes
package/dist/index.cjs CHANGED
@@ -364,7 +364,11 @@ var Agent = class extends BaseResource {
364
364
  ...params,
365
365
  output: params.output ? zodToJsonSchema(params.output) : void 0,
366
366
  runtimeContext: parseClientRuntimeContext(params.runtimeContext),
367
- clientTools: processClientTools(params.clientTools)
367
+ clientTools: processClientTools(params.clientTools),
368
+ structuredOutput: params.structuredOutput ? {
369
+ ...params.structuredOutput,
370
+ schema: zodToJsonSchema(params.structuredOutput.schema)
371
+ } : void 0
368
372
  };
369
373
  const { runId, resourceId, threadId, runtimeContext } = processedParams;
370
374
  const response = await this.request(
@@ -869,7 +873,7 @@ var Agent = class extends BaseResource {
869
873
  step,
870
874
  toolCallId: chunk.payload.toolCallId,
871
875
  toolName: chunk.payload.toolName,
872
- args: void 0
876
+ args: chunk.payload.args
873
877
  };
874
878
  message.toolInvocations.push(invocation);
875
879
  updateToolInvocationPart(chunk.payload.toolCallId, invocation);
@@ -923,14 +927,14 @@ var Agent = class extends BaseResource {
923
927
  }
924
928
  case "step-finish": {
925
929
  step += 1;
926
- currentTextPart = chunk.payload.isContinued ? currentTextPart : void 0;
930
+ currentTextPart = chunk.payload.stepResult.isContinued ? currentTextPart : void 0;
927
931
  currentReasoningPart = void 0;
928
932
  currentReasoningTextDetail = void 0;
929
933
  execUpdate();
930
934
  break;
931
935
  }
932
936
  case "finish": {
933
- finishReason = chunk.payload.finishReason;
937
+ finishReason = chunk.payload.stepResult.reason;
934
938
  if (chunk.payload.usage != null) {
935
939
  usage = chunk.payload.usage;
936
940
  }
@@ -954,9 +958,28 @@ var Agent = class extends BaseResource {
954
958
  let toolCalls = [];
955
959
  let messages = [];
956
960
  const [streamForWritable, streamForProcessing] = response.body.tee();
957
- streamForWritable.pipeTo(writable, {
958
- preventClose: true
959
- }).catch((error) => {
961
+ streamForWritable.pipeTo(
962
+ new WritableStream({
963
+ async write(chunk) {
964
+ try {
965
+ const text = new TextDecoder().decode(chunk);
966
+ if (text.includes("[DONE]")) {
967
+ return;
968
+ }
969
+ } catch {
970
+ }
971
+ const writer = writable.getWriter();
972
+ try {
973
+ await writer.write(chunk);
974
+ } finally {
975
+ writer.releaseLock();
976
+ }
977
+ }
978
+ }),
979
+ {
980
+ preventClose: true
981
+ }
982
+ ).catch((error) => {
960
983
  console.error("Error piping to writable stream:", error);
961
984
  });
962
985
  this.processChatResponse_vNext({
@@ -993,7 +1016,8 @@ var Agent = class extends BaseResource {
993
1016
  toolCallId: toolCall2?.toolCallId
994
1017
  }
995
1018
  );
996
- const lastMessage = JSON.parse(JSON.stringify(messages[messages.length - 1]));
1019
+ const lastMessageRaw = messages[messages.length - 1];
1020
+ const lastMessage = lastMessageRaw != null ? JSON.parse(JSON.stringify(lastMessageRaw)) : void 0;
997
1021
  const toolInvocationPart = lastMessage?.parts?.find(
998
1022
  (part) => part.type === "tool-invocation" && part.toolInvocation?.toolCallId === toolCall2.toolCallId
999
1023
  );
@@ -1011,25 +1035,13 @@ var Agent = class extends BaseResource {
1011
1035
  toolInvocation.state = "result";
1012
1036
  toolInvocation.result = result;
1013
1037
  }
1014
- const writer = writable.getWriter();
1015
- try {
1016
- await writer.write(
1017
- new TextEncoder().encode(
1018
- "a:" + JSON.stringify({
1019
- toolCallId: toolCall2.toolCallId,
1020
- result
1021
- }) + "\n"
1022
- )
1023
- );
1024
- } finally {
1025
- writer.releaseLock();
1026
- }
1027
1038
  const originalMessages = processedParams.messages;
1028
1039
  const messageArray = Array.isArray(originalMessages) ? originalMessages : [originalMessages];
1040
+ const updatedMessages = lastMessage != null ? [...messageArray, ...messages.filter((m) => m.id !== lastMessage.id), lastMessage] : [...messageArray, ...messages];
1029
1041
  this.processStreamResponse_vNext(
1030
1042
  {
1031
1043
  ...processedParams,
1032
- messages: [...messageArray, ...messages.filter((m) => m.id !== lastMessage.id), lastMessage]
1044
+ messages: updatedMessages
1033
1045
  },
1034
1046
  writable
1035
1047
  ).catch((error) => {
@@ -1057,7 +1069,11 @@ var Agent = class extends BaseResource {
1057
1069
  ...params,
1058
1070
  output: params.output ? zodToJsonSchema(params.output) : void 0,
1059
1071
  runtimeContext: parseClientRuntimeContext(params.runtimeContext),
1060
- clientTools: processClientTools(params.clientTools)
1072
+ clientTools: processClientTools(params.clientTools),
1073
+ structuredOutput: params.structuredOutput ? {
1074
+ ...params.structuredOutput,
1075
+ schema: zodToJsonSchema(params.structuredOutput.schema)
1076
+ } : void 0
1061
1077
  };
1062
1078
  const { readable, writable } = new TransformStream();
1063
1079
  const response = await this.processStreamResponse_vNext(processedParams, writable);
@@ -1903,6 +1919,57 @@ var Workflow = class extends BaseResource {
1903
1919
  });
1904
1920
  return response.body.pipeThrough(transformStream);
1905
1921
  }
1922
+ /**
1923
+ * Starts a workflow run and returns a stream
1924
+ * @param params - Object containing the optional runId, inputData and runtimeContext
1925
+ * @returns Promise containing the workflow execution results
1926
+ */
1927
+ async streamVNext(params) {
1928
+ const searchParams = new URLSearchParams();
1929
+ if (!!params?.runId) {
1930
+ searchParams.set("runId", params.runId);
1931
+ }
1932
+ const runtimeContext = parseClientRuntimeContext(params.runtimeContext);
1933
+ const response = await this.request(
1934
+ `/api/workflows/${this.workflowId}/streamVNext?${searchParams.toString()}`,
1935
+ {
1936
+ method: "POST",
1937
+ body: { inputData: params.inputData, runtimeContext },
1938
+ stream: true
1939
+ }
1940
+ );
1941
+ if (!response.ok) {
1942
+ throw new Error(`Failed to stream vNext workflow: ${response.statusText}`);
1943
+ }
1944
+ if (!response.body) {
1945
+ throw new Error("Response body is null");
1946
+ }
1947
+ let failedChunk = void 0;
1948
+ const transformStream = new TransformStream({
1949
+ start() {
1950
+ },
1951
+ async transform(chunk, controller) {
1952
+ try {
1953
+ const decoded = new TextDecoder().decode(chunk);
1954
+ const chunks = decoded.split(RECORD_SEPARATOR2);
1955
+ for (const chunk2 of chunks) {
1956
+ if (chunk2) {
1957
+ const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
1958
+ try {
1959
+ const parsedChunk = JSON.parse(newChunk);
1960
+ controller.enqueue(parsedChunk);
1961
+ failedChunk = void 0;
1962
+ } catch {
1963
+ failedChunk = newChunk;
1964
+ }
1965
+ }
1966
+ }
1967
+ } catch {
1968
+ }
1969
+ }
1970
+ });
1971
+ return response.body.pipeThrough(transformStream);
1972
+ }
1906
1973
  /**
1907
1974
  * Resumes a suspended workflow step asynchronously and returns a promise that resolves when the workflow is complete
1908
1975
  * @param params - Object containing the runId, step, resumeData and runtimeContext
@@ -2465,7 +2532,7 @@ var Observability = class extends BaseResource {
2465
2532
  getTraces(params) {
2466
2533
  const { pagination, filters } = params;
2467
2534
  const { page, perPage, dateRange } = pagination || {};
2468
- const { name, spanType } = filters || {};
2535
+ const { name, spanType, entityId, entityType } = filters || {};
2469
2536
  const searchParams = new URLSearchParams();
2470
2537
  if (page !== void 0) {
2471
2538
  searchParams.set("page", String(page));
@@ -2479,6 +2546,10 @@ var Observability = class extends BaseResource {
2479
2546
  if (spanType !== void 0) {
2480
2547
  searchParams.set("spanType", String(spanType));
2481
2548
  }
2549
+ if (entityId && entityType) {
2550
+ searchParams.set("entityId", entityId);
2551
+ searchParams.set("entityType", entityType);
2552
+ }
2482
2553
  if (dateRange) {
2483
2554
  const dateRangeStr = JSON.stringify({
2484
2555
  start: dateRange.start instanceof Date ? dateRange.start.toISOString() : dateRange.start,