@mastra/client-js 0.0.0-fix-cloud-peer-deps-loggers-20250929204101 → 0.0.0-fix-maxSteps-modelsetting-playground-20251008194458
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +143 -3
- package/README.md +4 -4
- package/dist/client.d.ts +1 -13
- package/dist/client.d.ts.map +1 -1
- package/dist/index.cjs +165 -201
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +165 -201
- package/dist/index.js.map +1 -1
- package/dist/resources/agent.d.ts +24 -31
- package/dist/resources/agent.d.ts.map +1 -1
- package/dist/resources/workflow.d.ts +23 -8
- package/dist/resources/workflow.d.ts.map +1 -1
- package/dist/types.d.ts +13 -5
- package/dist/types.d.ts.map +1 -1
- package/dist/utils/process-mastra-stream.d.ts.map +1 -1
- package/package.json +4 -4
- package/dist/resources/vNextNetwork.d.ts +0 -43
- package/dist/resources/vNextNetwork.d.ts.map +0 -1
package/dist/index.js
CHANGED
|
@@ -317,12 +317,6 @@ var Agent = class extends BaseResource {
|
|
|
317
317
|
details(runtimeContext) {
|
|
318
318
|
return this.request(`/api/agents/${this.agentId}${runtimeContextQueryString(runtimeContext)}`);
|
|
319
319
|
}
|
|
320
|
-
async generate(params) {
|
|
321
|
-
console.warn(
|
|
322
|
-
"Deprecation NOTICE:Generate method will switch to use generateVNext implementation September 30th, 2025. Please use generateLegacy if you don't want to upgrade just yet."
|
|
323
|
-
);
|
|
324
|
-
return this.generateLegacy(params);
|
|
325
|
-
}
|
|
326
320
|
async generateLegacy(params) {
|
|
327
321
|
const processedParams = {
|
|
328
322
|
...params,
|
|
@@ -386,7 +380,7 @@ var Agent = class extends BaseResource {
|
|
|
386
380
|
}
|
|
387
381
|
return response;
|
|
388
382
|
}
|
|
389
|
-
async
|
|
383
|
+
async generate(messagesOrParams, options) {
|
|
390
384
|
let params;
|
|
391
385
|
if (typeof messagesOrParams === "object" && "messages" in messagesOrParams) {
|
|
392
386
|
params = messagesOrParams;
|
|
@@ -408,7 +402,7 @@ var Agent = class extends BaseResource {
|
|
|
408
402
|
};
|
|
409
403
|
const { runId, resourceId, threadId, runtimeContext } = processedParams;
|
|
410
404
|
const response = await this.request(
|
|
411
|
-
`/api/agents/${this.agentId}/generate
|
|
405
|
+
`/api/agents/${this.agentId}/generate`,
|
|
412
406
|
{
|
|
413
407
|
method: "POST",
|
|
414
408
|
body: processedParams
|
|
@@ -422,7 +416,7 @@ var Agent = class extends BaseResource {
|
|
|
422
416
|
resourceId,
|
|
423
417
|
threadId,
|
|
424
418
|
runtimeContext,
|
|
425
|
-
respondFn: this.
|
|
419
|
+
respondFn: this.generate.bind(this)
|
|
426
420
|
});
|
|
427
421
|
}
|
|
428
422
|
return response;
|
|
@@ -689,17 +683,6 @@ var Agent = class extends BaseResource {
|
|
|
689
683
|
});
|
|
690
684
|
onFinish?.({ message, finishReason, usage });
|
|
691
685
|
}
|
|
692
|
-
/**
|
|
693
|
-
* Streams a response from the agent
|
|
694
|
-
* @param params - Stream parameters including prompt
|
|
695
|
-
* @returns Promise containing the enhanced Response object with processDataStream method
|
|
696
|
-
*/
|
|
697
|
-
async stream(params) {
|
|
698
|
-
console.warn(
|
|
699
|
-
"Deprecation NOTICE:\nStream method will switch to use streamVNext implementation September 30th, 2025. Please use streamLegacy if you don't want to upgrade just yet."
|
|
700
|
-
);
|
|
701
|
-
return this.streamLegacy(params);
|
|
702
|
-
}
|
|
703
686
|
/**
|
|
704
687
|
* Streams a response from the agent
|
|
705
688
|
* @param params - Stream parameters including prompt
|
|
@@ -801,6 +784,14 @@ var Agent = class extends BaseResource {
|
|
|
801
784
|
// but this is completely wrong and this fn is probably broken. Remove ":any" and you'll see a bunch of type errors
|
|
802
785
|
onChunk: async (chunk) => {
|
|
803
786
|
switch (chunk.type) {
|
|
787
|
+
case "tripwire": {
|
|
788
|
+
message.parts.push({
|
|
789
|
+
type: "text",
|
|
790
|
+
text: chunk.payload.tripwireReason
|
|
791
|
+
});
|
|
792
|
+
execUpdate();
|
|
793
|
+
break;
|
|
794
|
+
}
|
|
804
795
|
case "step-start": {
|
|
805
796
|
if (!replaceLastMessage) {
|
|
806
797
|
message.id = chunk.payload.messageId;
|
|
@@ -982,7 +973,7 @@ var Agent = class extends BaseResource {
|
|
|
982
973
|
onFinish?.({ message, finishReason, usage });
|
|
983
974
|
}
|
|
984
975
|
async processStreamResponse_vNext(processedParams, writable) {
|
|
985
|
-
const response = await this.request(`/api/agents/${this.agentId}/stream
|
|
976
|
+
const response = await this.request(`/api/agents/${this.agentId}/stream`, {
|
|
986
977
|
method: "POST",
|
|
987
978
|
body: processedParams,
|
|
988
979
|
stream: true
|
|
@@ -997,18 +988,17 @@ var Agent = class extends BaseResource {
|
|
|
997
988
|
streamForWritable.pipeTo(
|
|
998
989
|
new WritableStream({
|
|
999
990
|
async write(chunk) {
|
|
991
|
+
let writer;
|
|
1000
992
|
try {
|
|
993
|
+
writer = writable.getWriter();
|
|
1001
994
|
const text = new TextDecoder().decode(chunk);
|
|
1002
|
-
|
|
1003
|
-
|
|
1004
|
-
|
|
995
|
+
const lines = text.split("\n\n");
|
|
996
|
+
const readableLines = lines.filter((line) => line !== "[DONE]").join("\n\n");
|
|
997
|
+
await writer.write(new TextEncoder().encode(readableLines));
|
|
1005
998
|
} catch {
|
|
1006
|
-
|
|
1007
|
-
const writer = writable.getWriter();
|
|
1008
|
-
try {
|
|
1009
|
-
await writer.write(chunk);
|
|
999
|
+
await writer?.write(chunk);
|
|
1010
1000
|
} finally {
|
|
1011
|
-
writer
|
|
1001
|
+
writer?.releaseLock();
|
|
1012
1002
|
}
|
|
1013
1003
|
}
|
|
1014
1004
|
}),
|
|
@@ -1034,9 +1024,11 @@ var Agent = class extends BaseResource {
|
|
|
1034
1024
|
if (toolCall) {
|
|
1035
1025
|
toolCalls.push(toolCall);
|
|
1036
1026
|
}
|
|
1027
|
+
let shouldExecuteClientTool = false;
|
|
1037
1028
|
for (const toolCall2 of toolCalls) {
|
|
1038
1029
|
const clientTool = processedParams.clientTools?.[toolCall2.toolName];
|
|
1039
1030
|
if (clientTool && clientTool.execute) {
|
|
1031
|
+
shouldExecuteClientTool = true;
|
|
1040
1032
|
const result = await clientTool.execute(
|
|
1041
1033
|
{
|
|
1042
1034
|
context: toolCall2?.args,
|
|
@@ -1085,6 +1077,11 @@ var Agent = class extends BaseResource {
|
|
|
1085
1077
|
});
|
|
1086
1078
|
}
|
|
1087
1079
|
}
|
|
1080
|
+
if (!shouldExecuteClientTool) {
|
|
1081
|
+
setTimeout(() => {
|
|
1082
|
+
writable.close();
|
|
1083
|
+
}, 0);
|
|
1084
|
+
}
|
|
1088
1085
|
} else {
|
|
1089
1086
|
setTimeout(() => {
|
|
1090
1087
|
writable.close();
|
|
@@ -1124,7 +1121,7 @@ var Agent = class extends BaseResource {
|
|
|
1124
1121
|
};
|
|
1125
1122
|
return streamResponse;
|
|
1126
1123
|
}
|
|
1127
|
-
async
|
|
1124
|
+
async stream(messagesOrParams, options) {
|
|
1128
1125
|
let params;
|
|
1129
1126
|
if (typeof messagesOrParams === "object" && "messages" in messagesOrParams) {
|
|
1130
1127
|
params = messagesOrParams;
|
|
@@ -1349,6 +1346,12 @@ var Agent = class extends BaseResource {
|
|
|
1349
1346
|
body: params
|
|
1350
1347
|
});
|
|
1351
1348
|
}
|
|
1349
|
+
async generateVNext(_messagesOrParams, _options) {
|
|
1350
|
+
throw new Error("generateVNext has been renamed to generate. Please use generate instead.");
|
|
1351
|
+
}
|
|
1352
|
+
async streamVNext(_messagesOrParams, _options) {
|
|
1353
|
+
throw new Error("streamVNext has been renamed to stream. Please use stream instead.");
|
|
1354
|
+
}
|
|
1352
1355
|
};
|
|
1353
1356
|
|
|
1354
1357
|
// src/resources/memory-thread.ts
|
|
@@ -1697,10 +1700,20 @@ var Workflow = class extends BaseResource {
|
|
|
1697
1700
|
return {
|
|
1698
1701
|
runId,
|
|
1699
1702
|
start: async (p) => {
|
|
1700
|
-
return this.start({
|
|
1703
|
+
return this.start({
|
|
1704
|
+
runId,
|
|
1705
|
+
inputData: p.inputData,
|
|
1706
|
+
runtimeContext: p.runtimeContext,
|
|
1707
|
+
tracingOptions: p.tracingOptions
|
|
1708
|
+
});
|
|
1701
1709
|
},
|
|
1702
1710
|
startAsync: async (p) => {
|
|
1703
|
-
return this.startAsync({
|
|
1711
|
+
return this.startAsync({
|
|
1712
|
+
runId,
|
|
1713
|
+
inputData: p.inputData,
|
|
1714
|
+
runtimeContext: p.runtimeContext,
|
|
1715
|
+
tracingOptions: p.tracingOptions
|
|
1716
|
+
});
|
|
1704
1717
|
},
|
|
1705
1718
|
watch: async (onRecord) => {
|
|
1706
1719
|
return this.watch({ runId }, onRecord);
|
|
@@ -1709,10 +1722,22 @@ var Workflow = class extends BaseResource {
|
|
|
1709
1722
|
return this.stream({ runId, inputData: p.inputData, runtimeContext: p.runtimeContext });
|
|
1710
1723
|
},
|
|
1711
1724
|
resume: async (p) => {
|
|
1712
|
-
return this.resume({
|
|
1725
|
+
return this.resume({
|
|
1726
|
+
runId,
|
|
1727
|
+
step: p.step,
|
|
1728
|
+
resumeData: p.resumeData,
|
|
1729
|
+
runtimeContext: p.runtimeContext,
|
|
1730
|
+
tracingOptions: p.tracingOptions
|
|
1731
|
+
});
|
|
1713
1732
|
},
|
|
1714
1733
|
resumeAsync: async (p) => {
|
|
1715
|
-
return this.resumeAsync({
|
|
1734
|
+
return this.resumeAsync({
|
|
1735
|
+
runId,
|
|
1736
|
+
step: p.step,
|
|
1737
|
+
resumeData: p.resumeData,
|
|
1738
|
+
runtimeContext: p.runtimeContext,
|
|
1739
|
+
tracingOptions: p.tracingOptions
|
|
1740
|
+
});
|
|
1716
1741
|
},
|
|
1717
1742
|
resumeStreamVNext: async (p) => {
|
|
1718
1743
|
return this.resumeStreamVNext({
|
|
@@ -1733,7 +1758,7 @@ var Workflow = class extends BaseResource {
|
|
|
1733
1758
|
const runtimeContext = parseClientRuntimeContext(params.runtimeContext);
|
|
1734
1759
|
return this.request(`/api/workflows/${this.workflowId}/start?runId=${params.runId}`, {
|
|
1735
1760
|
method: "POST",
|
|
1736
|
-
body: { inputData: params?.inputData, runtimeContext }
|
|
1761
|
+
body: { inputData: params?.inputData, runtimeContext, tracingOptions: params.tracingOptions }
|
|
1737
1762
|
});
|
|
1738
1763
|
}
|
|
1739
1764
|
/**
|
|
@@ -1745,6 +1770,7 @@ var Workflow = class extends BaseResource {
|
|
|
1745
1770
|
step,
|
|
1746
1771
|
runId,
|
|
1747
1772
|
resumeData,
|
|
1773
|
+
tracingOptions,
|
|
1748
1774
|
...rest
|
|
1749
1775
|
}) {
|
|
1750
1776
|
const runtimeContext = parseClientRuntimeContext(rest.runtimeContext);
|
|
@@ -1753,7 +1779,8 @@ var Workflow = class extends BaseResource {
|
|
|
1753
1779
|
body: {
|
|
1754
1780
|
step,
|
|
1755
1781
|
resumeData,
|
|
1756
|
-
runtimeContext
|
|
1782
|
+
runtimeContext,
|
|
1783
|
+
tracingOptions
|
|
1757
1784
|
}
|
|
1758
1785
|
});
|
|
1759
1786
|
}
|
|
@@ -1770,7 +1797,7 @@ var Workflow = class extends BaseResource {
|
|
|
1770
1797
|
const runtimeContext = parseClientRuntimeContext(params.runtimeContext);
|
|
1771
1798
|
return this.request(`/api/workflows/${this.workflowId}/start-async?${searchParams.toString()}`, {
|
|
1772
1799
|
method: "POST",
|
|
1773
|
-
body: { inputData: params.inputData, runtimeContext }
|
|
1800
|
+
body: { inputData: params.inputData, runtimeContext, tracingOptions: params.tracingOptions }
|
|
1774
1801
|
});
|
|
1775
1802
|
}
|
|
1776
1803
|
/**
|
|
@@ -1788,7 +1815,7 @@ var Workflow = class extends BaseResource {
|
|
|
1788
1815
|
`/api/workflows/${this.workflowId}/stream?${searchParams.toString()}`,
|
|
1789
1816
|
{
|
|
1790
1817
|
method: "POST",
|
|
1791
|
-
body: { inputData: params.inputData, runtimeContext },
|
|
1818
|
+
body: { inputData: params.inputData, runtimeContext, tracingOptions: params.tracingOptions },
|
|
1792
1819
|
stream: true
|
|
1793
1820
|
}
|
|
1794
1821
|
);
|
|
@@ -1886,7 +1913,12 @@ var Workflow = class extends BaseResource {
|
|
|
1886
1913
|
`/api/workflows/${this.workflowId}/streamVNext?${searchParams.toString()}`,
|
|
1887
1914
|
{
|
|
1888
1915
|
method: "POST",
|
|
1889
|
-
body: {
|
|
1916
|
+
body: {
|
|
1917
|
+
inputData: params.inputData,
|
|
1918
|
+
runtimeContext,
|
|
1919
|
+
closeOnSuspend: params.closeOnSuspend,
|
|
1920
|
+
tracingOptions: params.tracingOptions
|
|
1921
|
+
},
|
|
1890
1922
|
stream: true
|
|
1891
1923
|
}
|
|
1892
1924
|
);
|
|
@@ -1922,6 +1954,53 @@ var Workflow = class extends BaseResource {
|
|
|
1922
1954
|
});
|
|
1923
1955
|
return response.body.pipeThrough(transformStream);
|
|
1924
1956
|
}
|
|
1957
|
+
/**
|
|
1958
|
+
* Observes workflow vNext stream for a workflow run
|
|
1959
|
+
* @param params - Object containing the runId
|
|
1960
|
+
* @returns Promise containing the workflow execution results
|
|
1961
|
+
*/
|
|
1962
|
+
async observeStreamVNext(params) {
|
|
1963
|
+
const searchParams = new URLSearchParams();
|
|
1964
|
+
searchParams.set("runId", params.runId);
|
|
1965
|
+
const response = await this.request(
|
|
1966
|
+
`/api/workflows/${this.workflowId}/observe-streamVNext?${searchParams.toString()}`,
|
|
1967
|
+
{
|
|
1968
|
+
method: "POST",
|
|
1969
|
+
stream: true
|
|
1970
|
+
}
|
|
1971
|
+
);
|
|
1972
|
+
if (!response.ok) {
|
|
1973
|
+
throw new Error(`Failed to observe stream vNext workflow: ${response.statusText}`);
|
|
1974
|
+
}
|
|
1975
|
+
if (!response.body) {
|
|
1976
|
+
throw new Error("Response body is null");
|
|
1977
|
+
}
|
|
1978
|
+
let failedChunk = void 0;
|
|
1979
|
+
const transformStream = new TransformStream({
|
|
1980
|
+
start() {
|
|
1981
|
+
},
|
|
1982
|
+
async transform(chunk, controller) {
|
|
1983
|
+
try {
|
|
1984
|
+
const decoded = new TextDecoder().decode(chunk);
|
|
1985
|
+
const chunks = decoded.split(RECORD_SEPARATOR);
|
|
1986
|
+
for (const chunk2 of chunks) {
|
|
1987
|
+
if (chunk2) {
|
|
1988
|
+
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
1989
|
+
try {
|
|
1990
|
+
const parsedChunk = JSON.parse(newChunk);
|
|
1991
|
+
controller.enqueue(parsedChunk);
|
|
1992
|
+
failedChunk = void 0;
|
|
1993
|
+
} catch {
|
|
1994
|
+
failedChunk = newChunk;
|
|
1995
|
+
}
|
|
1996
|
+
}
|
|
1997
|
+
}
|
|
1998
|
+
} catch {
|
|
1999
|
+
}
|
|
2000
|
+
}
|
|
2001
|
+
});
|
|
2002
|
+
return response.body.pipeThrough(transformStream);
|
|
2003
|
+
}
|
|
1925
2004
|
/**
|
|
1926
2005
|
* Resumes a suspended workflow step asynchronously and returns a promise that resolves when the workflow is complete
|
|
1927
2006
|
* @param params - Object containing the runId, step, resumeData and runtimeContext
|
|
@@ -1934,7 +2013,8 @@ var Workflow = class extends BaseResource {
|
|
|
1934
2013
|
body: {
|
|
1935
2014
|
step: params.step,
|
|
1936
2015
|
resumeData: params.resumeData,
|
|
1937
|
-
runtimeContext
|
|
2016
|
+
runtimeContext,
|
|
2017
|
+
tracingOptions: params.tracingOptions
|
|
1938
2018
|
}
|
|
1939
2019
|
});
|
|
1940
2020
|
}
|
|
@@ -1943,16 +2023,54 @@ var Workflow = class extends BaseResource {
|
|
|
1943
2023
|
* @param params - Object containing the runId, step, resumeData and runtimeContext
|
|
1944
2024
|
* @returns Promise containing the workflow resume results
|
|
1945
2025
|
*/
|
|
1946
|
-
resumeStreamVNext(params) {
|
|
2026
|
+
async resumeStreamVNext(params) {
|
|
2027
|
+
const searchParams = new URLSearchParams();
|
|
2028
|
+
searchParams.set("runId", params.runId);
|
|
1947
2029
|
const runtimeContext = parseClientRuntimeContext(params.runtimeContext);
|
|
1948
|
-
|
|
1949
|
-
|
|
1950
|
-
|
|
1951
|
-
|
|
1952
|
-
|
|
1953
|
-
|
|
2030
|
+
const response = await this.request(
|
|
2031
|
+
`/api/workflows/${this.workflowId}/resume-stream?${searchParams.toString()}`,
|
|
2032
|
+
{
|
|
2033
|
+
method: "POST",
|
|
2034
|
+
body: {
|
|
2035
|
+
step: params.step,
|
|
2036
|
+
resumeData: params.resumeData,
|
|
2037
|
+
runtimeContext,
|
|
2038
|
+
tracingOptions: params.tracingOptions
|
|
2039
|
+
},
|
|
2040
|
+
stream: true
|
|
2041
|
+
}
|
|
2042
|
+
);
|
|
2043
|
+
if (!response.ok) {
|
|
2044
|
+
throw new Error(`Failed to stream vNext workflow: ${response.statusText}`);
|
|
2045
|
+
}
|
|
2046
|
+
if (!response.body) {
|
|
2047
|
+
throw new Error("Response body is null");
|
|
2048
|
+
}
|
|
2049
|
+
let failedChunk = void 0;
|
|
2050
|
+
const transformStream = new TransformStream({
|
|
2051
|
+
start() {
|
|
2052
|
+
},
|
|
2053
|
+
async transform(chunk, controller) {
|
|
2054
|
+
try {
|
|
2055
|
+
const decoded = new TextDecoder().decode(chunk);
|
|
2056
|
+
const chunks = decoded.split(RECORD_SEPARATOR);
|
|
2057
|
+
for (const chunk2 of chunks) {
|
|
2058
|
+
if (chunk2) {
|
|
2059
|
+
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
2060
|
+
try {
|
|
2061
|
+
const parsedChunk = JSON.parse(newChunk);
|
|
2062
|
+
controller.enqueue(parsedChunk);
|
|
2063
|
+
failedChunk = void 0;
|
|
2064
|
+
} catch {
|
|
2065
|
+
failedChunk = newChunk;
|
|
2066
|
+
}
|
|
2067
|
+
}
|
|
2068
|
+
}
|
|
2069
|
+
} catch {
|
|
2070
|
+
}
|
|
1954
2071
|
}
|
|
1955
2072
|
});
|
|
2073
|
+
return response.body.pipeThrough(transformStream);
|
|
1956
2074
|
}
|
|
1957
2075
|
/**
|
|
1958
2076
|
* Watches workflow transitions in real-time
|
|
@@ -2620,145 +2738,6 @@ var NetworkMemoryThread = class extends BaseResource {
|
|
|
2620
2738
|
}
|
|
2621
2739
|
};
|
|
2622
2740
|
|
|
2623
|
-
// src/resources/vNextNetwork.ts
|
|
2624
|
-
var RECORD_SEPARATOR3 = "";
|
|
2625
|
-
var VNextNetwork = class extends BaseResource {
|
|
2626
|
-
constructor(options, networkId) {
|
|
2627
|
-
super(options);
|
|
2628
|
-
this.networkId = networkId;
|
|
2629
|
-
}
|
|
2630
|
-
/**
|
|
2631
|
-
* Retrieves details about the network
|
|
2632
|
-
* @param runtimeContext - Optional runtime context to pass as query parameter
|
|
2633
|
-
* @returns Promise containing vNext network details
|
|
2634
|
-
*/
|
|
2635
|
-
details(runtimeContext) {
|
|
2636
|
-
return this.request(`/api/networks/v-next/${this.networkId}${runtimeContextQueryString(runtimeContext)}`);
|
|
2637
|
-
}
|
|
2638
|
-
/**
|
|
2639
|
-
* Generates a response from the v-next network
|
|
2640
|
-
* @param params - Generation parameters including message
|
|
2641
|
-
* @returns Promise containing the generated response
|
|
2642
|
-
*/
|
|
2643
|
-
generate(params) {
|
|
2644
|
-
return this.request(`/api/networks/v-next/${this.networkId}/generate`, {
|
|
2645
|
-
method: "POST",
|
|
2646
|
-
body: {
|
|
2647
|
-
...params,
|
|
2648
|
-
runtimeContext: parseClientRuntimeContext(params.runtimeContext)
|
|
2649
|
-
}
|
|
2650
|
-
});
|
|
2651
|
-
}
|
|
2652
|
-
/**
|
|
2653
|
-
* Generates a response from the v-next network using multiple primitives
|
|
2654
|
-
* @param params - Generation parameters including message
|
|
2655
|
-
* @returns Promise containing the generated response
|
|
2656
|
-
*/
|
|
2657
|
-
loop(params) {
|
|
2658
|
-
return this.request(`/api/networks/v-next/${this.networkId}/loop`, {
|
|
2659
|
-
method: "POST",
|
|
2660
|
-
body: {
|
|
2661
|
-
...params,
|
|
2662
|
-
runtimeContext: parseClientRuntimeContext(params.runtimeContext)
|
|
2663
|
-
}
|
|
2664
|
-
});
|
|
2665
|
-
}
|
|
2666
|
-
async *streamProcessor(stream) {
|
|
2667
|
-
const reader = stream.getReader();
|
|
2668
|
-
let doneReading = false;
|
|
2669
|
-
let buffer = "";
|
|
2670
|
-
try {
|
|
2671
|
-
while (!doneReading) {
|
|
2672
|
-
const { done, value } = await reader.read();
|
|
2673
|
-
doneReading = done;
|
|
2674
|
-
if (done && !value) continue;
|
|
2675
|
-
try {
|
|
2676
|
-
const decoded = value ? new TextDecoder().decode(value) : "";
|
|
2677
|
-
const chunks = (buffer + decoded).split(RECORD_SEPARATOR3);
|
|
2678
|
-
buffer = chunks.pop() || "";
|
|
2679
|
-
for (const chunk of chunks) {
|
|
2680
|
-
if (chunk) {
|
|
2681
|
-
if (typeof chunk === "string") {
|
|
2682
|
-
try {
|
|
2683
|
-
const parsedChunk = JSON.parse(chunk);
|
|
2684
|
-
yield parsedChunk;
|
|
2685
|
-
} catch {
|
|
2686
|
-
}
|
|
2687
|
-
}
|
|
2688
|
-
}
|
|
2689
|
-
}
|
|
2690
|
-
} catch {
|
|
2691
|
-
}
|
|
2692
|
-
}
|
|
2693
|
-
if (buffer) {
|
|
2694
|
-
try {
|
|
2695
|
-
yield JSON.parse(buffer);
|
|
2696
|
-
} catch {
|
|
2697
|
-
}
|
|
2698
|
-
}
|
|
2699
|
-
} finally {
|
|
2700
|
-
reader.cancel().catch(() => {
|
|
2701
|
-
});
|
|
2702
|
-
}
|
|
2703
|
-
}
|
|
2704
|
-
/**
|
|
2705
|
-
* Streams a response from the v-next network
|
|
2706
|
-
* @param params - Stream parameters including message
|
|
2707
|
-
* @returns Promise containing the results
|
|
2708
|
-
*/
|
|
2709
|
-
async stream(params, onRecord) {
|
|
2710
|
-
const response = await this.request(`/api/networks/v-next/${this.networkId}/stream`, {
|
|
2711
|
-
method: "POST",
|
|
2712
|
-
body: {
|
|
2713
|
-
...params,
|
|
2714
|
-
runtimeContext: parseClientRuntimeContext(params.runtimeContext)
|
|
2715
|
-
},
|
|
2716
|
-
stream: true
|
|
2717
|
-
});
|
|
2718
|
-
if (!response.ok) {
|
|
2719
|
-
throw new Error(`Failed to stream vNext network: ${response.statusText}`);
|
|
2720
|
-
}
|
|
2721
|
-
if (!response.body) {
|
|
2722
|
-
throw new Error("Response body is null");
|
|
2723
|
-
}
|
|
2724
|
-
for await (const record of this.streamProcessor(response.body)) {
|
|
2725
|
-
if (typeof record === "string") {
|
|
2726
|
-
onRecord(JSON.parse(record));
|
|
2727
|
-
} else {
|
|
2728
|
-
onRecord(record);
|
|
2729
|
-
}
|
|
2730
|
-
}
|
|
2731
|
-
}
|
|
2732
|
-
/**
|
|
2733
|
-
* Streams a response from the v-next network loop
|
|
2734
|
-
* @param params - Stream parameters including message
|
|
2735
|
-
* @returns Promise containing the results
|
|
2736
|
-
*/
|
|
2737
|
-
async loopStream(params, onRecord) {
|
|
2738
|
-
const response = await this.request(`/api/networks/v-next/${this.networkId}/loop-stream`, {
|
|
2739
|
-
method: "POST",
|
|
2740
|
-
body: {
|
|
2741
|
-
...params,
|
|
2742
|
-
runtimeContext: parseClientRuntimeContext(params.runtimeContext)
|
|
2743
|
-
},
|
|
2744
|
-
stream: true
|
|
2745
|
-
});
|
|
2746
|
-
if (!response.ok) {
|
|
2747
|
-
throw new Error(`Failed to stream vNext network loop: ${response.statusText}`);
|
|
2748
|
-
}
|
|
2749
|
-
if (!response.body) {
|
|
2750
|
-
throw new Error("Response body is null");
|
|
2751
|
-
}
|
|
2752
|
-
for await (const record of this.streamProcessor(response.body)) {
|
|
2753
|
-
if (typeof record === "string") {
|
|
2754
|
-
onRecord(JSON.parse(record));
|
|
2755
|
-
} else {
|
|
2756
|
-
onRecord(record);
|
|
2757
|
-
}
|
|
2758
|
-
}
|
|
2759
|
-
}
|
|
2760
|
-
};
|
|
2761
|
-
|
|
2762
2741
|
// src/client.ts
|
|
2763
2742
|
var MastraClient = class extends BaseResource {
|
|
2764
2743
|
observability;
|
|
@@ -3100,21 +3079,6 @@ var MastraClient = class extends BaseResource {
|
|
|
3100
3079
|
return this.request(`/api/telemetry`);
|
|
3101
3080
|
}
|
|
3102
3081
|
}
|
|
3103
|
-
/**
|
|
3104
|
-
* Retrieves all available vNext networks
|
|
3105
|
-
* @returns Promise containing map of vNext network IDs to vNext network details
|
|
3106
|
-
*/
|
|
3107
|
-
getVNextNetworks() {
|
|
3108
|
-
return this.request("/api/networks/v-next");
|
|
3109
|
-
}
|
|
3110
|
-
/**
|
|
3111
|
-
* Gets a vNext network instance by ID
|
|
3112
|
-
* @param networkId - ID of the vNext network to retrieve
|
|
3113
|
-
* @returns vNext Network instance
|
|
3114
|
-
*/
|
|
3115
|
-
getVNextNetwork(networkId) {
|
|
3116
|
-
return new VNextNetwork(this.options, networkId);
|
|
3117
|
-
}
|
|
3118
3082
|
/**
|
|
3119
3083
|
* Retrieves a list of available MCP servers.
|
|
3120
3084
|
* @param params - Optional parameters for pagination (limit, offset).
|