@mastra/client-js 0.0.0-configure-project-root-for-private-packages-20250919100548 → 0.0.0-cor235-20251008175106
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +215 -3
- package/README.md +4 -4
- package/dist/client.d.ts +30 -25
- package/dist/client.d.ts.map +1 -1
- package/dist/index.cjs +275 -436
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +275 -436
- package/dist/index.js.map +1 -1
- package/dist/resources/agent.d.ts +40 -31
- package/dist/resources/agent.d.ts.map +1 -1
- package/dist/resources/index.d.ts +0 -1
- package/dist/resources/index.d.ts.map +1 -1
- package/dist/resources/observability.d.ts +17 -1
- package/dist/resources/observability.d.ts.map +1 -1
- package/dist/resources/workflow.d.ts +23 -8
- package/dist/resources/workflow.d.ts.map +1 -1
- package/dist/types.d.ts +50 -26
- package/dist/types.d.ts.map +1 -1
- package/dist/utils/process-mastra-stream.d.ts.map +1 -1
- package/package.json +4 -4
- package/dist/resources/legacy-workflow.d.ts +0 -90
- package/dist/resources/legacy-workflow.d.ts.map +0 -1
- package/dist/resources/vNextNetwork.d.ts +0 -43
- package/dist/resources/vNextNetwork.d.ts.map +0 -1
package/dist/index.js
CHANGED
|
@@ -213,7 +213,9 @@ async function executeToolCallAndRespond({
|
|
|
213
213
|
resourceId,
|
|
214
214
|
threadId,
|
|
215
215
|
runtimeContext,
|
|
216
|
-
tracingContext: { currentSpan: void 0 }
|
|
216
|
+
tracingContext: { currentSpan: void 0 },
|
|
217
|
+
suspend: async () => {
|
|
218
|
+
}
|
|
217
219
|
},
|
|
218
220
|
{
|
|
219
221
|
messages: response.messages,
|
|
@@ -221,11 +223,7 @@ async function executeToolCallAndRespond({
|
|
|
221
223
|
}
|
|
222
224
|
);
|
|
223
225
|
const updatedMessages = [
|
|
224
|
-
|
|
225
|
-
role: "user",
|
|
226
|
-
content: params.messages
|
|
227
|
-
},
|
|
228
|
-
...response.response.messages,
|
|
226
|
+
...response.response.messages || [],
|
|
229
227
|
{
|
|
230
228
|
role: "tool",
|
|
231
229
|
content: [
|
|
@@ -319,12 +317,6 @@ var Agent = class extends BaseResource {
|
|
|
319
317
|
details(runtimeContext) {
|
|
320
318
|
return this.request(`/api/agents/${this.agentId}${runtimeContextQueryString(runtimeContext)}`);
|
|
321
319
|
}
|
|
322
|
-
async generate(params) {
|
|
323
|
-
console.warn(
|
|
324
|
-
"Deprecation NOTICE:Generate method will switch to use generateVNext implementation September 23rd, 2025. Please use generateLegacy if you don't want to upgrade just yet."
|
|
325
|
-
);
|
|
326
|
-
return this.generateLegacy(params);
|
|
327
|
-
}
|
|
328
320
|
async generateLegacy(params) {
|
|
329
321
|
const processedParams = {
|
|
330
322
|
...params,
|
|
@@ -356,7 +348,9 @@ var Agent = class extends BaseResource {
|
|
|
356
348
|
resourceId,
|
|
357
349
|
threadId,
|
|
358
350
|
runtimeContext,
|
|
359
|
-
tracingContext: { currentSpan: void 0 }
|
|
351
|
+
tracingContext: { currentSpan: void 0 },
|
|
352
|
+
suspend: async () => {
|
|
353
|
+
}
|
|
360
354
|
},
|
|
361
355
|
{
|
|
362
356
|
messages: response.messages,
|
|
@@ -364,10 +358,6 @@ var Agent = class extends BaseResource {
|
|
|
364
358
|
}
|
|
365
359
|
);
|
|
366
360
|
const updatedMessages = [
|
|
367
|
-
{
|
|
368
|
-
role: "user",
|
|
369
|
-
content: params.messages
|
|
370
|
-
},
|
|
371
361
|
...response.response.messages,
|
|
372
362
|
{
|
|
373
363
|
role: "tool",
|
|
@@ -390,7 +380,7 @@ var Agent = class extends BaseResource {
|
|
|
390
380
|
}
|
|
391
381
|
return response;
|
|
392
382
|
}
|
|
393
|
-
async
|
|
383
|
+
async generate(messagesOrParams, options) {
|
|
394
384
|
let params;
|
|
395
385
|
if (typeof messagesOrParams === "object" && "messages" in messagesOrParams) {
|
|
396
386
|
params = messagesOrParams;
|
|
@@ -412,7 +402,7 @@ var Agent = class extends BaseResource {
|
|
|
412
402
|
};
|
|
413
403
|
const { runId, resourceId, threadId, runtimeContext } = processedParams;
|
|
414
404
|
const response = await this.request(
|
|
415
|
-
`/api/agents/${this.agentId}/generate
|
|
405
|
+
`/api/agents/${this.agentId}/generate`,
|
|
416
406
|
{
|
|
417
407
|
method: "POST",
|
|
418
408
|
body: processedParams
|
|
@@ -426,7 +416,7 @@ var Agent = class extends BaseResource {
|
|
|
426
416
|
resourceId,
|
|
427
417
|
threadId,
|
|
428
418
|
runtimeContext,
|
|
429
|
-
respondFn: this.
|
|
419
|
+
respondFn: this.generate.bind(this)
|
|
430
420
|
});
|
|
431
421
|
}
|
|
432
422
|
return response;
|
|
@@ -693,17 +683,6 @@ var Agent = class extends BaseResource {
|
|
|
693
683
|
});
|
|
694
684
|
onFinish?.({ message, finishReason, usage });
|
|
695
685
|
}
|
|
696
|
-
/**
|
|
697
|
-
* Streams a response from the agent
|
|
698
|
-
* @param params - Stream parameters including prompt
|
|
699
|
-
* @returns Promise containing the enhanced Response object with processDataStream method
|
|
700
|
-
*/
|
|
701
|
-
async stream(params) {
|
|
702
|
-
console.warn(
|
|
703
|
-
"Deprecation NOTICE:\nStream method will switch to use streamVNext implementation September 23rd, 2025. Please use streamLegacy if you don't want to upgrade just yet."
|
|
704
|
-
);
|
|
705
|
-
return this.streamLegacy(params);
|
|
706
|
-
}
|
|
707
686
|
/**
|
|
708
687
|
* Streams a response from the agent
|
|
709
688
|
* @param params - Stream parameters including prompt
|
|
@@ -805,6 +784,14 @@ var Agent = class extends BaseResource {
|
|
|
805
784
|
// but this is completely wrong and this fn is probably broken. Remove ":any" and you'll see a bunch of type errors
|
|
806
785
|
onChunk: async (chunk) => {
|
|
807
786
|
switch (chunk.type) {
|
|
787
|
+
case "tripwire": {
|
|
788
|
+
message.parts.push({
|
|
789
|
+
type: "text",
|
|
790
|
+
text: chunk.payload.tripwireReason
|
|
791
|
+
});
|
|
792
|
+
execUpdate();
|
|
793
|
+
break;
|
|
794
|
+
}
|
|
808
795
|
case "step-start": {
|
|
809
796
|
if (!replaceLastMessage) {
|
|
810
797
|
message.id = chunk.payload.messageId;
|
|
@@ -986,7 +973,7 @@ var Agent = class extends BaseResource {
|
|
|
986
973
|
onFinish?.({ message, finishReason, usage });
|
|
987
974
|
}
|
|
988
975
|
async processStreamResponse_vNext(processedParams, writable) {
|
|
989
|
-
const response = await this.request(`/api/agents/${this.agentId}/stream
|
|
976
|
+
const response = await this.request(`/api/agents/${this.agentId}/stream`, {
|
|
990
977
|
method: "POST",
|
|
991
978
|
body: processedParams,
|
|
992
979
|
stream: true
|
|
@@ -1001,18 +988,17 @@ var Agent = class extends BaseResource {
|
|
|
1001
988
|
streamForWritable.pipeTo(
|
|
1002
989
|
new WritableStream({
|
|
1003
990
|
async write(chunk) {
|
|
991
|
+
let writer;
|
|
1004
992
|
try {
|
|
993
|
+
writer = writable.getWriter();
|
|
1005
994
|
const text = new TextDecoder().decode(chunk);
|
|
1006
|
-
|
|
1007
|
-
|
|
1008
|
-
|
|
995
|
+
const lines = text.split("\n\n");
|
|
996
|
+
const readableLines = lines.filter((line) => line !== "[DONE]").join("\n\n");
|
|
997
|
+
await writer.write(new TextEncoder().encode(readableLines));
|
|
1009
998
|
} catch {
|
|
1010
|
-
|
|
1011
|
-
const writer = writable.getWriter();
|
|
1012
|
-
try {
|
|
1013
|
-
await writer.write(chunk);
|
|
999
|
+
await writer?.write(chunk);
|
|
1014
1000
|
} finally {
|
|
1015
|
-
writer
|
|
1001
|
+
writer?.releaseLock();
|
|
1016
1002
|
}
|
|
1017
1003
|
}
|
|
1018
1004
|
}),
|
|
@@ -1038,9 +1024,11 @@ var Agent = class extends BaseResource {
|
|
|
1038
1024
|
if (toolCall) {
|
|
1039
1025
|
toolCalls.push(toolCall);
|
|
1040
1026
|
}
|
|
1027
|
+
let shouldExecuteClientTool = false;
|
|
1041
1028
|
for (const toolCall2 of toolCalls) {
|
|
1042
1029
|
const clientTool = processedParams.clientTools?.[toolCall2.toolName];
|
|
1043
1030
|
if (clientTool && clientTool.execute) {
|
|
1031
|
+
shouldExecuteClientTool = true;
|
|
1044
1032
|
const result = await clientTool.execute(
|
|
1045
1033
|
{
|
|
1046
1034
|
context: toolCall2?.args,
|
|
@@ -1049,7 +1037,9 @@ var Agent = class extends BaseResource {
|
|
|
1049
1037
|
threadId: processedParams.threadId,
|
|
1050
1038
|
runtimeContext: processedParams.runtimeContext,
|
|
1051
1039
|
// TODO: Pass proper tracing context when client-js supports tracing
|
|
1052
|
-
tracingContext: { currentSpan: void 0 }
|
|
1040
|
+
tracingContext: { currentSpan: void 0 },
|
|
1041
|
+
suspend: async () => {
|
|
1042
|
+
}
|
|
1053
1043
|
},
|
|
1054
1044
|
{
|
|
1055
1045
|
messages: response.messages,
|
|
@@ -1075,9 +1065,7 @@ var Agent = class extends BaseResource {
|
|
|
1075
1065
|
toolInvocation.state = "result";
|
|
1076
1066
|
toolInvocation.result = result;
|
|
1077
1067
|
}
|
|
1078
|
-
const
|
|
1079
|
-
const messageArray = Array.isArray(originalMessages) ? originalMessages : [originalMessages];
|
|
1080
|
-
const updatedMessages = lastMessage != null ? [...messageArray, ...messages.filter((m) => m.id !== lastMessage.id), lastMessage] : [...messageArray, ...messages];
|
|
1068
|
+
const updatedMessages = lastMessage != null ? [...messages.filter((m) => m.id !== lastMessage.id), lastMessage] : [...messages];
|
|
1081
1069
|
this.processStreamResponse_vNext(
|
|
1082
1070
|
{
|
|
1083
1071
|
...processedParams,
|
|
@@ -1089,6 +1077,11 @@ var Agent = class extends BaseResource {
|
|
|
1089
1077
|
});
|
|
1090
1078
|
}
|
|
1091
1079
|
}
|
|
1080
|
+
if (!shouldExecuteClientTool) {
|
|
1081
|
+
setTimeout(() => {
|
|
1082
|
+
writable.close();
|
|
1083
|
+
}, 0);
|
|
1084
|
+
}
|
|
1092
1085
|
} else {
|
|
1093
1086
|
setTimeout(() => {
|
|
1094
1087
|
writable.close();
|
|
@@ -1128,7 +1121,7 @@ var Agent = class extends BaseResource {
|
|
|
1128
1121
|
};
|
|
1129
1122
|
return streamResponse;
|
|
1130
1123
|
}
|
|
1131
|
-
async
|
|
1124
|
+
async stream(messagesOrParams, options) {
|
|
1132
1125
|
let params;
|
|
1133
1126
|
if (typeof messagesOrParams === "object" && "messages" in messagesOrParams) {
|
|
1134
1127
|
params = messagesOrParams;
|
|
@@ -1213,7 +1206,9 @@ var Agent = class extends BaseResource {
|
|
|
1213
1206
|
threadId: processedParams.threadId,
|
|
1214
1207
|
runtimeContext: processedParams.runtimeContext,
|
|
1215
1208
|
// TODO: Pass proper tracing context when client-js supports tracing
|
|
1216
|
-
tracingContext: { currentSpan: void 0 }
|
|
1209
|
+
tracingContext: { currentSpan: void 0 },
|
|
1210
|
+
suspend: async () => {
|
|
1211
|
+
}
|
|
1217
1212
|
},
|
|
1218
1213
|
{
|
|
1219
1214
|
messages: response.messages,
|
|
@@ -1251,12 +1246,10 @@ var Agent = class extends BaseResource {
|
|
|
1251
1246
|
} finally {
|
|
1252
1247
|
writer.releaseLock();
|
|
1253
1248
|
}
|
|
1254
|
-
const originalMessages = processedParams.messages;
|
|
1255
|
-
const messageArray = Array.isArray(originalMessages) ? originalMessages : [originalMessages];
|
|
1256
1249
|
this.processStreamResponse(
|
|
1257
1250
|
{
|
|
1258
1251
|
...processedParams,
|
|
1259
|
-
messages: [...
|
|
1252
|
+
messages: [...messages.filter((m) => m.id !== lastMessage.id), lastMessage]
|
|
1260
1253
|
},
|
|
1261
1254
|
writable
|
|
1262
1255
|
).catch((error) => {
|
|
@@ -1331,6 +1324,34 @@ var Agent = class extends BaseResource {
|
|
|
1331
1324
|
body: params
|
|
1332
1325
|
});
|
|
1333
1326
|
}
|
|
1327
|
+
/**
|
|
1328
|
+
* Updates the model for the agent in the model list
|
|
1329
|
+
* @param params - Parameters for updating the model
|
|
1330
|
+
* @returns Promise containing the updated model
|
|
1331
|
+
*/
|
|
1332
|
+
updateModelInModelList({ modelConfigId, ...params }) {
|
|
1333
|
+
return this.request(`/api/agents/${this.agentId}/models/${modelConfigId}`, {
|
|
1334
|
+
method: "POST",
|
|
1335
|
+
body: params
|
|
1336
|
+
});
|
|
1337
|
+
}
|
|
1338
|
+
/**
|
|
1339
|
+
* Reorders the models for the agent
|
|
1340
|
+
* @param params - Parameters for reordering the model list
|
|
1341
|
+
* @returns Promise containing the updated model list
|
|
1342
|
+
*/
|
|
1343
|
+
reorderModelList(params) {
|
|
1344
|
+
return this.request(`/api/agents/${this.agentId}/models/reorder`, {
|
|
1345
|
+
method: "POST",
|
|
1346
|
+
body: params
|
|
1347
|
+
});
|
|
1348
|
+
}
|
|
1349
|
+
async generateVNext(_messagesOrParams, _options) {
|
|
1350
|
+
throw new Error("generateVNext has been renamed to generate. Please use generate instead.");
|
|
1351
|
+
}
|
|
1352
|
+
async streamVNext(_messagesOrParams, _options) {
|
|
1353
|
+
throw new Error("streamVNext has been renamed to stream. Please use stream instead.");
|
|
1354
|
+
}
|
|
1334
1355
|
};
|
|
1335
1356
|
|
|
1336
1357
|
// src/resources/memory-thread.ts
|
|
@@ -1481,193 +1502,6 @@ var Vector = class extends BaseResource {
|
|
|
1481
1502
|
}
|
|
1482
1503
|
};
|
|
1483
1504
|
|
|
1484
|
-
// src/resources/legacy-workflow.ts
|
|
1485
|
-
var RECORD_SEPARATOR = "";
|
|
1486
|
-
var LegacyWorkflow = class extends BaseResource {
|
|
1487
|
-
constructor(options, workflowId) {
|
|
1488
|
-
super(options);
|
|
1489
|
-
this.workflowId = workflowId;
|
|
1490
|
-
}
|
|
1491
|
-
/**
|
|
1492
|
-
* Retrieves details about the legacy workflow
|
|
1493
|
-
* @param runtimeContext - Optional runtime context to pass as query parameter
|
|
1494
|
-
* @returns Promise containing legacy workflow details including steps and graphs
|
|
1495
|
-
*/
|
|
1496
|
-
details(runtimeContext) {
|
|
1497
|
-
return this.request(`/api/workflows/legacy/${this.workflowId}${runtimeContextQueryString(runtimeContext)}`);
|
|
1498
|
-
}
|
|
1499
|
-
/**
|
|
1500
|
-
* Retrieves all runs for a legacy workflow
|
|
1501
|
-
* @param params - Parameters for filtering runs
|
|
1502
|
-
* @param runtimeContext - Optional runtime context to pass as query parameter
|
|
1503
|
-
* @returns Promise containing legacy workflow runs array
|
|
1504
|
-
*/
|
|
1505
|
-
runs(params, runtimeContext) {
|
|
1506
|
-
const runtimeContextParam = base64RuntimeContext(parseClientRuntimeContext(runtimeContext));
|
|
1507
|
-
const searchParams = new URLSearchParams();
|
|
1508
|
-
if (params?.fromDate) {
|
|
1509
|
-
searchParams.set("fromDate", params.fromDate.toISOString());
|
|
1510
|
-
}
|
|
1511
|
-
if (params?.toDate) {
|
|
1512
|
-
searchParams.set("toDate", params.toDate.toISOString());
|
|
1513
|
-
}
|
|
1514
|
-
if (params?.limit) {
|
|
1515
|
-
searchParams.set("limit", String(params.limit));
|
|
1516
|
-
}
|
|
1517
|
-
if (params?.offset) {
|
|
1518
|
-
searchParams.set("offset", String(params.offset));
|
|
1519
|
-
}
|
|
1520
|
-
if (params?.resourceId) {
|
|
1521
|
-
searchParams.set("resourceId", params.resourceId);
|
|
1522
|
-
}
|
|
1523
|
-
if (runtimeContextParam) {
|
|
1524
|
-
searchParams.set("runtimeContext", runtimeContextParam);
|
|
1525
|
-
}
|
|
1526
|
-
if (searchParams.size) {
|
|
1527
|
-
return this.request(`/api/workflows/legacy/${this.workflowId}/runs?${searchParams}`);
|
|
1528
|
-
} else {
|
|
1529
|
-
return this.request(`/api/workflows/legacy/${this.workflowId}/runs`);
|
|
1530
|
-
}
|
|
1531
|
-
}
|
|
1532
|
-
/**
|
|
1533
|
-
* Creates a new legacy workflow run
|
|
1534
|
-
* @returns Promise containing the generated run ID
|
|
1535
|
-
*/
|
|
1536
|
-
createRun(params) {
|
|
1537
|
-
const searchParams = new URLSearchParams();
|
|
1538
|
-
if (!!params?.runId) {
|
|
1539
|
-
searchParams.set("runId", params.runId);
|
|
1540
|
-
}
|
|
1541
|
-
return this.request(`/api/workflows/legacy/${this.workflowId}/create-run?${searchParams.toString()}`, {
|
|
1542
|
-
method: "POST"
|
|
1543
|
-
});
|
|
1544
|
-
}
|
|
1545
|
-
/**
|
|
1546
|
-
* Starts a legacy workflow run synchronously without waiting for the workflow to complete
|
|
1547
|
-
* @param params - Object containing the runId and triggerData
|
|
1548
|
-
* @returns Promise containing success message
|
|
1549
|
-
*/
|
|
1550
|
-
start(params) {
|
|
1551
|
-
return this.request(`/api/workflows/legacy/${this.workflowId}/start?runId=${params.runId}`, {
|
|
1552
|
-
method: "POST",
|
|
1553
|
-
body: params?.triggerData
|
|
1554
|
-
});
|
|
1555
|
-
}
|
|
1556
|
-
/**
|
|
1557
|
-
* Resumes a suspended legacy workflow step synchronously without waiting for the workflow to complete
|
|
1558
|
-
* @param stepId - ID of the step to resume
|
|
1559
|
-
* @param runId - ID of the legacy workflow run
|
|
1560
|
-
* @param context - Context to resume the legacy workflow with
|
|
1561
|
-
* @returns Promise containing the legacy workflow resume results
|
|
1562
|
-
*/
|
|
1563
|
-
resume({
|
|
1564
|
-
stepId,
|
|
1565
|
-
runId,
|
|
1566
|
-
context
|
|
1567
|
-
}) {
|
|
1568
|
-
return this.request(`/api/workflows/legacy/${this.workflowId}/resume?runId=${runId}`, {
|
|
1569
|
-
method: "POST",
|
|
1570
|
-
body: {
|
|
1571
|
-
stepId,
|
|
1572
|
-
context
|
|
1573
|
-
}
|
|
1574
|
-
});
|
|
1575
|
-
}
|
|
1576
|
-
/**
|
|
1577
|
-
* Starts a workflow run asynchronously and returns a promise that resolves when the workflow is complete
|
|
1578
|
-
* @param params - Object containing the optional runId and triggerData
|
|
1579
|
-
* @returns Promise containing the workflow execution results
|
|
1580
|
-
*/
|
|
1581
|
-
startAsync(params) {
|
|
1582
|
-
const searchParams = new URLSearchParams();
|
|
1583
|
-
if (!!params?.runId) {
|
|
1584
|
-
searchParams.set("runId", params.runId);
|
|
1585
|
-
}
|
|
1586
|
-
return this.request(`/api/workflows/legacy/${this.workflowId}/start-async?${searchParams.toString()}`, {
|
|
1587
|
-
method: "POST",
|
|
1588
|
-
body: params?.triggerData
|
|
1589
|
-
});
|
|
1590
|
-
}
|
|
1591
|
-
/**
|
|
1592
|
-
* Resumes a suspended legacy workflow step asynchronously and returns a promise that resolves when the workflow is complete
|
|
1593
|
-
* @param params - Object containing the runId, stepId, and context
|
|
1594
|
-
* @returns Promise containing the workflow resume results
|
|
1595
|
-
*/
|
|
1596
|
-
resumeAsync(params) {
|
|
1597
|
-
return this.request(`/api/workflows/legacy/${this.workflowId}/resume-async?runId=${params.runId}`, {
|
|
1598
|
-
method: "POST",
|
|
1599
|
-
body: {
|
|
1600
|
-
stepId: params.stepId,
|
|
1601
|
-
context: params.context
|
|
1602
|
-
}
|
|
1603
|
-
});
|
|
1604
|
-
}
|
|
1605
|
-
/**
|
|
1606
|
-
* Creates an async generator that processes a readable stream and yields records
|
|
1607
|
-
* separated by the Record Separator character (\x1E)
|
|
1608
|
-
*
|
|
1609
|
-
* @param stream - The readable stream to process
|
|
1610
|
-
* @returns An async generator that yields parsed records
|
|
1611
|
-
*/
|
|
1612
|
-
async *streamProcessor(stream) {
|
|
1613
|
-
const reader = stream.getReader();
|
|
1614
|
-
let doneReading = false;
|
|
1615
|
-
let buffer = "";
|
|
1616
|
-
try {
|
|
1617
|
-
while (!doneReading) {
|
|
1618
|
-
const { done, value } = await reader.read();
|
|
1619
|
-
doneReading = done;
|
|
1620
|
-
if (done && !value) continue;
|
|
1621
|
-
try {
|
|
1622
|
-
const decoded = value ? new TextDecoder().decode(value) : "";
|
|
1623
|
-
const chunks = (buffer + decoded).split(RECORD_SEPARATOR);
|
|
1624
|
-
buffer = chunks.pop() || "";
|
|
1625
|
-
for (const chunk of chunks) {
|
|
1626
|
-
if (chunk) {
|
|
1627
|
-
if (typeof chunk === "string") {
|
|
1628
|
-
try {
|
|
1629
|
-
const parsedChunk = JSON.parse(chunk);
|
|
1630
|
-
yield parsedChunk;
|
|
1631
|
-
} catch {
|
|
1632
|
-
}
|
|
1633
|
-
}
|
|
1634
|
-
}
|
|
1635
|
-
}
|
|
1636
|
-
} catch {
|
|
1637
|
-
}
|
|
1638
|
-
}
|
|
1639
|
-
if (buffer) {
|
|
1640
|
-
try {
|
|
1641
|
-
yield JSON.parse(buffer);
|
|
1642
|
-
} catch {
|
|
1643
|
-
}
|
|
1644
|
-
}
|
|
1645
|
-
} finally {
|
|
1646
|
-
reader.cancel().catch(() => {
|
|
1647
|
-
});
|
|
1648
|
-
}
|
|
1649
|
-
}
|
|
1650
|
-
/**
|
|
1651
|
-
* Watches legacy workflow transitions in real-time
|
|
1652
|
-
* @param runId - Optional run ID to filter the watch stream
|
|
1653
|
-
* @returns AsyncGenerator that yields parsed records from the legacy workflow watch stream
|
|
1654
|
-
*/
|
|
1655
|
-
async watch({ runId }, onRecord) {
|
|
1656
|
-
const response = await this.request(`/api/workflows/legacy/${this.workflowId}/watch?runId=${runId}`, {
|
|
1657
|
-
stream: true
|
|
1658
|
-
});
|
|
1659
|
-
if (!response.ok) {
|
|
1660
|
-
throw new Error(`Failed to watch legacy workflow: ${response.statusText}`);
|
|
1661
|
-
}
|
|
1662
|
-
if (!response.body) {
|
|
1663
|
-
throw new Error("Response body is null");
|
|
1664
|
-
}
|
|
1665
|
-
for await (const record of this.streamProcessor(response.body)) {
|
|
1666
|
-
onRecord(record);
|
|
1667
|
-
}
|
|
1668
|
-
}
|
|
1669
|
-
};
|
|
1670
|
-
|
|
1671
1505
|
// src/resources/tool.ts
|
|
1672
1506
|
var Tool = class extends BaseResource {
|
|
1673
1507
|
constructor(options, toolId) {
|
|
@@ -1704,7 +1538,7 @@ var Tool = class extends BaseResource {
|
|
|
1704
1538
|
};
|
|
1705
1539
|
|
|
1706
1540
|
// src/resources/workflow.ts
|
|
1707
|
-
var
|
|
1541
|
+
var RECORD_SEPARATOR = "";
|
|
1708
1542
|
var Workflow = class extends BaseResource {
|
|
1709
1543
|
constructor(options, workflowId) {
|
|
1710
1544
|
super(options);
|
|
@@ -1728,7 +1562,7 @@ var Workflow = class extends BaseResource {
|
|
|
1728
1562
|
if (done && !value) continue;
|
|
1729
1563
|
try {
|
|
1730
1564
|
const decoded = value ? new TextDecoder().decode(value) : "";
|
|
1731
|
-
const chunks = (buffer + decoded).split(
|
|
1565
|
+
const chunks = (buffer + decoded).split(RECORD_SEPARATOR);
|
|
1732
1566
|
buffer = chunks.pop() || "";
|
|
1733
1567
|
for (const chunk of chunks) {
|
|
1734
1568
|
if (chunk) {
|
|
@@ -1866,10 +1700,20 @@ var Workflow = class extends BaseResource {
|
|
|
1866
1700
|
return {
|
|
1867
1701
|
runId,
|
|
1868
1702
|
start: async (p) => {
|
|
1869
|
-
return this.start({
|
|
1703
|
+
return this.start({
|
|
1704
|
+
runId,
|
|
1705
|
+
inputData: p.inputData,
|
|
1706
|
+
runtimeContext: p.runtimeContext,
|
|
1707
|
+
tracingOptions: p.tracingOptions
|
|
1708
|
+
});
|
|
1870
1709
|
},
|
|
1871
1710
|
startAsync: async (p) => {
|
|
1872
|
-
return this.startAsync({
|
|
1711
|
+
return this.startAsync({
|
|
1712
|
+
runId,
|
|
1713
|
+
inputData: p.inputData,
|
|
1714
|
+
runtimeContext: p.runtimeContext,
|
|
1715
|
+
tracingOptions: p.tracingOptions
|
|
1716
|
+
});
|
|
1873
1717
|
},
|
|
1874
1718
|
watch: async (onRecord) => {
|
|
1875
1719
|
return this.watch({ runId }, onRecord);
|
|
@@ -1878,10 +1722,22 @@ var Workflow = class extends BaseResource {
|
|
|
1878
1722
|
return this.stream({ runId, inputData: p.inputData, runtimeContext: p.runtimeContext });
|
|
1879
1723
|
},
|
|
1880
1724
|
resume: async (p) => {
|
|
1881
|
-
return this.resume({
|
|
1725
|
+
return this.resume({
|
|
1726
|
+
runId,
|
|
1727
|
+
step: p.step,
|
|
1728
|
+
resumeData: p.resumeData,
|
|
1729
|
+
runtimeContext: p.runtimeContext,
|
|
1730
|
+
tracingOptions: p.tracingOptions
|
|
1731
|
+
});
|
|
1882
1732
|
},
|
|
1883
1733
|
resumeAsync: async (p) => {
|
|
1884
|
-
return this.resumeAsync({
|
|
1734
|
+
return this.resumeAsync({
|
|
1735
|
+
runId,
|
|
1736
|
+
step: p.step,
|
|
1737
|
+
resumeData: p.resumeData,
|
|
1738
|
+
runtimeContext: p.runtimeContext,
|
|
1739
|
+
tracingOptions: p.tracingOptions
|
|
1740
|
+
});
|
|
1885
1741
|
},
|
|
1886
1742
|
resumeStreamVNext: async (p) => {
|
|
1887
1743
|
return this.resumeStreamVNext({
|
|
@@ -1902,7 +1758,7 @@ var Workflow = class extends BaseResource {
|
|
|
1902
1758
|
const runtimeContext = parseClientRuntimeContext(params.runtimeContext);
|
|
1903
1759
|
return this.request(`/api/workflows/${this.workflowId}/start?runId=${params.runId}`, {
|
|
1904
1760
|
method: "POST",
|
|
1905
|
-
body: { inputData: params?.inputData, runtimeContext }
|
|
1761
|
+
body: { inputData: params?.inputData, runtimeContext, tracingOptions: params.tracingOptions }
|
|
1906
1762
|
});
|
|
1907
1763
|
}
|
|
1908
1764
|
/**
|
|
@@ -1914,6 +1770,7 @@ var Workflow = class extends BaseResource {
|
|
|
1914
1770
|
step,
|
|
1915
1771
|
runId,
|
|
1916
1772
|
resumeData,
|
|
1773
|
+
tracingOptions,
|
|
1917
1774
|
...rest
|
|
1918
1775
|
}) {
|
|
1919
1776
|
const runtimeContext = parseClientRuntimeContext(rest.runtimeContext);
|
|
@@ -1922,7 +1779,8 @@ var Workflow = class extends BaseResource {
|
|
|
1922
1779
|
body: {
|
|
1923
1780
|
step,
|
|
1924
1781
|
resumeData,
|
|
1925
|
-
runtimeContext
|
|
1782
|
+
runtimeContext,
|
|
1783
|
+
tracingOptions
|
|
1926
1784
|
}
|
|
1927
1785
|
});
|
|
1928
1786
|
}
|
|
@@ -1939,7 +1797,7 @@ var Workflow = class extends BaseResource {
|
|
|
1939
1797
|
const runtimeContext = parseClientRuntimeContext(params.runtimeContext);
|
|
1940
1798
|
return this.request(`/api/workflows/${this.workflowId}/start-async?${searchParams.toString()}`, {
|
|
1941
1799
|
method: "POST",
|
|
1942
|
-
body: { inputData: params.inputData, runtimeContext }
|
|
1800
|
+
body: { inputData: params.inputData, runtimeContext, tracingOptions: params.tracingOptions }
|
|
1943
1801
|
});
|
|
1944
1802
|
}
|
|
1945
1803
|
/**
|
|
@@ -1957,7 +1815,7 @@ var Workflow = class extends BaseResource {
|
|
|
1957
1815
|
`/api/workflows/${this.workflowId}/stream?${searchParams.toString()}`,
|
|
1958
1816
|
{
|
|
1959
1817
|
method: "POST",
|
|
1960
|
-
body: { inputData: params.inputData, runtimeContext },
|
|
1818
|
+
body: { inputData: params.inputData, runtimeContext, tracingOptions: params.tracingOptions },
|
|
1961
1819
|
stream: true
|
|
1962
1820
|
}
|
|
1963
1821
|
);
|
|
@@ -1974,7 +1832,7 @@ var Workflow = class extends BaseResource {
|
|
|
1974
1832
|
async transform(chunk, controller) {
|
|
1975
1833
|
try {
|
|
1976
1834
|
const decoded = new TextDecoder().decode(chunk);
|
|
1977
|
-
const chunks = decoded.split(
|
|
1835
|
+
const chunks = decoded.split(RECORD_SEPARATOR);
|
|
1978
1836
|
for (const chunk2 of chunks) {
|
|
1979
1837
|
if (chunk2) {
|
|
1980
1838
|
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
@@ -2021,7 +1879,7 @@ var Workflow = class extends BaseResource {
|
|
|
2021
1879
|
async transform(chunk, controller) {
|
|
2022
1880
|
try {
|
|
2023
1881
|
const decoded = new TextDecoder().decode(chunk);
|
|
2024
|
-
const chunks = decoded.split(
|
|
1882
|
+
const chunks = decoded.split(RECORD_SEPARATOR);
|
|
2025
1883
|
for (const chunk2 of chunks) {
|
|
2026
1884
|
if (chunk2) {
|
|
2027
1885
|
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
@@ -2055,7 +1913,12 @@ var Workflow = class extends BaseResource {
|
|
|
2055
1913
|
`/api/workflows/${this.workflowId}/streamVNext?${searchParams.toString()}`,
|
|
2056
1914
|
{
|
|
2057
1915
|
method: "POST",
|
|
2058
|
-
body: {
|
|
1916
|
+
body: {
|
|
1917
|
+
inputData: params.inputData,
|
|
1918
|
+
runtimeContext,
|
|
1919
|
+
closeOnSuspend: params.closeOnSuspend,
|
|
1920
|
+
tracingOptions: params.tracingOptions
|
|
1921
|
+
},
|
|
2059
1922
|
stream: true
|
|
2060
1923
|
}
|
|
2061
1924
|
);
|
|
@@ -2072,7 +1935,54 @@ var Workflow = class extends BaseResource {
|
|
|
2072
1935
|
async transform(chunk, controller) {
|
|
2073
1936
|
try {
|
|
2074
1937
|
const decoded = new TextDecoder().decode(chunk);
|
|
2075
|
-
const chunks = decoded.split(
|
|
1938
|
+
const chunks = decoded.split(RECORD_SEPARATOR);
|
|
1939
|
+
for (const chunk2 of chunks) {
|
|
1940
|
+
if (chunk2) {
|
|
1941
|
+
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
1942
|
+
try {
|
|
1943
|
+
const parsedChunk = JSON.parse(newChunk);
|
|
1944
|
+
controller.enqueue(parsedChunk);
|
|
1945
|
+
failedChunk = void 0;
|
|
1946
|
+
} catch {
|
|
1947
|
+
failedChunk = newChunk;
|
|
1948
|
+
}
|
|
1949
|
+
}
|
|
1950
|
+
}
|
|
1951
|
+
} catch {
|
|
1952
|
+
}
|
|
1953
|
+
}
|
|
1954
|
+
});
|
|
1955
|
+
return response.body.pipeThrough(transformStream);
|
|
1956
|
+
}
|
|
1957
|
+
/**
|
|
1958
|
+
* Observes workflow vNext stream for a workflow run
|
|
1959
|
+
* @param params - Object containing the runId
|
|
1960
|
+
* @returns Promise containing the workflow execution results
|
|
1961
|
+
*/
|
|
1962
|
+
async observeStreamVNext(params) {
|
|
1963
|
+
const searchParams = new URLSearchParams();
|
|
1964
|
+
searchParams.set("runId", params.runId);
|
|
1965
|
+
const response = await this.request(
|
|
1966
|
+
`/api/workflows/${this.workflowId}/observe-streamVNext?${searchParams.toString()}`,
|
|
1967
|
+
{
|
|
1968
|
+
method: "POST",
|
|
1969
|
+
stream: true
|
|
1970
|
+
}
|
|
1971
|
+
);
|
|
1972
|
+
if (!response.ok) {
|
|
1973
|
+
throw new Error(`Failed to observe stream vNext workflow: ${response.statusText}`);
|
|
1974
|
+
}
|
|
1975
|
+
if (!response.body) {
|
|
1976
|
+
throw new Error("Response body is null");
|
|
1977
|
+
}
|
|
1978
|
+
let failedChunk = void 0;
|
|
1979
|
+
const transformStream = new TransformStream({
|
|
1980
|
+
start() {
|
|
1981
|
+
},
|
|
1982
|
+
async transform(chunk, controller) {
|
|
1983
|
+
try {
|
|
1984
|
+
const decoded = new TextDecoder().decode(chunk);
|
|
1985
|
+
const chunks = decoded.split(RECORD_SEPARATOR);
|
|
2076
1986
|
for (const chunk2 of chunks) {
|
|
2077
1987
|
if (chunk2) {
|
|
2078
1988
|
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
@@ -2103,7 +2013,8 @@ var Workflow = class extends BaseResource {
|
|
|
2103
2013
|
body: {
|
|
2104
2014
|
step: params.step,
|
|
2105
2015
|
resumeData: params.resumeData,
|
|
2106
|
-
runtimeContext
|
|
2016
|
+
runtimeContext,
|
|
2017
|
+
tracingOptions: params.tracingOptions
|
|
2107
2018
|
}
|
|
2108
2019
|
});
|
|
2109
2020
|
}
|
|
@@ -2112,16 +2023,54 @@ var Workflow = class extends BaseResource {
|
|
|
2112
2023
|
* @param params - Object containing the runId, step, resumeData and runtimeContext
|
|
2113
2024
|
* @returns Promise containing the workflow resume results
|
|
2114
2025
|
*/
|
|
2115
|
-
resumeStreamVNext(params) {
|
|
2026
|
+
async resumeStreamVNext(params) {
|
|
2027
|
+
const searchParams = new URLSearchParams();
|
|
2028
|
+
searchParams.set("runId", params.runId);
|
|
2116
2029
|
const runtimeContext = parseClientRuntimeContext(params.runtimeContext);
|
|
2117
|
-
|
|
2118
|
-
|
|
2119
|
-
|
|
2120
|
-
|
|
2121
|
-
|
|
2122
|
-
|
|
2030
|
+
const response = await this.request(
|
|
2031
|
+
`/api/workflows/${this.workflowId}/resume-stream?${searchParams.toString()}`,
|
|
2032
|
+
{
|
|
2033
|
+
method: "POST",
|
|
2034
|
+
body: {
|
|
2035
|
+
step: params.step,
|
|
2036
|
+
resumeData: params.resumeData,
|
|
2037
|
+
runtimeContext,
|
|
2038
|
+
tracingOptions: params.tracingOptions
|
|
2039
|
+
},
|
|
2040
|
+
stream: true
|
|
2041
|
+
}
|
|
2042
|
+
);
|
|
2043
|
+
if (!response.ok) {
|
|
2044
|
+
throw new Error(`Failed to stream vNext workflow: ${response.statusText}`);
|
|
2045
|
+
}
|
|
2046
|
+
if (!response.body) {
|
|
2047
|
+
throw new Error("Response body is null");
|
|
2048
|
+
}
|
|
2049
|
+
let failedChunk = void 0;
|
|
2050
|
+
const transformStream = new TransformStream({
|
|
2051
|
+
start() {
|
|
2052
|
+
},
|
|
2053
|
+
async transform(chunk, controller) {
|
|
2054
|
+
try {
|
|
2055
|
+
const decoded = new TextDecoder().decode(chunk);
|
|
2056
|
+
const chunks = decoded.split(RECORD_SEPARATOR);
|
|
2057
|
+
for (const chunk2 of chunks) {
|
|
2058
|
+
if (chunk2) {
|
|
2059
|
+
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
2060
|
+
try {
|
|
2061
|
+
const parsedChunk = JSON.parse(newChunk);
|
|
2062
|
+
controller.enqueue(parsedChunk);
|
|
2063
|
+
failedChunk = void 0;
|
|
2064
|
+
} catch {
|
|
2065
|
+
failedChunk = newChunk;
|
|
2066
|
+
}
|
|
2067
|
+
}
|
|
2068
|
+
}
|
|
2069
|
+
} catch {
|
|
2070
|
+
}
|
|
2123
2071
|
}
|
|
2124
2072
|
});
|
|
2073
|
+
return response.body.pipeThrough(transformStream);
|
|
2125
2074
|
}
|
|
2126
2075
|
/**
|
|
2127
2076
|
* Watches workflow transitions in real-time
|
|
@@ -2159,7 +2108,7 @@ var Workflow = class extends BaseResource {
|
|
|
2159
2108
|
async start(controller) {
|
|
2160
2109
|
try {
|
|
2161
2110
|
for await (const record of records) {
|
|
2162
|
-
const json = JSON.stringify(record) +
|
|
2111
|
+
const json = JSON.stringify(record) + RECORD_SEPARATOR;
|
|
2163
2112
|
controller.enqueue(encoder.encode(json));
|
|
2164
2113
|
}
|
|
2165
2114
|
controller.close();
|
|
@@ -2282,7 +2231,7 @@ var MCPTool = class extends BaseResource {
|
|
|
2282
2231
|
};
|
|
2283
2232
|
|
|
2284
2233
|
// src/resources/agent-builder.ts
|
|
2285
|
-
var
|
|
2234
|
+
var RECORD_SEPARATOR2 = "";
|
|
2286
2235
|
var AgentBuilder = class extends BaseResource {
|
|
2287
2236
|
constructor(options, actionId) {
|
|
2288
2237
|
super(options);
|
|
@@ -2422,7 +2371,7 @@ var AgentBuilder = class extends BaseResource {
|
|
|
2422
2371
|
if (done && !value) continue;
|
|
2423
2372
|
try {
|
|
2424
2373
|
const decoded = value ? new TextDecoder().decode(value) : "";
|
|
2425
|
-
const chunks = (buffer + decoded).split(
|
|
2374
|
+
const chunks = (buffer + decoded).split(RECORD_SEPARATOR2);
|
|
2426
2375
|
buffer = chunks.pop() || "";
|
|
2427
2376
|
for (const chunk of chunks) {
|
|
2428
2377
|
if (chunk) {
|
|
@@ -2479,7 +2428,7 @@ var AgentBuilder = class extends BaseResource {
|
|
|
2479
2428
|
async transform(chunk, controller) {
|
|
2480
2429
|
try {
|
|
2481
2430
|
const decoded = new TextDecoder().decode(chunk);
|
|
2482
|
-
const chunks = decoded.split(
|
|
2431
|
+
const chunks = decoded.split(RECORD_SEPARATOR2);
|
|
2483
2432
|
for (const chunk2 of chunks) {
|
|
2484
2433
|
if (chunk2) {
|
|
2485
2434
|
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
@@ -2528,7 +2477,7 @@ var AgentBuilder = class extends BaseResource {
|
|
|
2528
2477
|
async transform(chunk, controller) {
|
|
2529
2478
|
try {
|
|
2530
2479
|
const decoded = new TextDecoder().decode(chunk);
|
|
2531
|
-
const chunks = decoded.split(
|
|
2480
|
+
const chunks = decoded.split(RECORD_SEPARATOR2);
|
|
2532
2481
|
for (const chunk2 of chunks) {
|
|
2533
2482
|
if (chunk2) {
|
|
2534
2483
|
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
@@ -2699,6 +2648,31 @@ var Observability = class extends BaseResource {
|
|
|
2699
2648
|
const queryString = searchParams.toString();
|
|
2700
2649
|
return this.request(`/api/observability/traces${queryString ? `?${queryString}` : ""}`);
|
|
2701
2650
|
}
|
|
2651
|
+
/**
|
|
2652
|
+
* Retrieves scores by trace ID and span ID
|
|
2653
|
+
* @param params - Parameters containing trace ID, span ID, and pagination options
|
|
2654
|
+
* @returns Promise containing scores and pagination info
|
|
2655
|
+
*/
|
|
2656
|
+
getScoresBySpan(params) {
|
|
2657
|
+
const { traceId, spanId, page, perPage } = params;
|
|
2658
|
+
const searchParams = new URLSearchParams();
|
|
2659
|
+
if (page !== void 0) {
|
|
2660
|
+
searchParams.set("page", String(page));
|
|
2661
|
+
}
|
|
2662
|
+
if (perPage !== void 0) {
|
|
2663
|
+
searchParams.set("perPage", String(perPage));
|
|
2664
|
+
}
|
|
2665
|
+
const queryString = searchParams.toString();
|
|
2666
|
+
return this.request(
|
|
2667
|
+
`/api/observability/traces/${encodeURIComponent(traceId)}/${encodeURIComponent(spanId)}/scores${queryString ? `?${queryString}` : ""}`
|
|
2668
|
+
);
|
|
2669
|
+
}
|
|
2670
|
+
score(params) {
|
|
2671
|
+
return this.request(`/api/observability/traces/score`, {
|
|
2672
|
+
method: "POST",
|
|
2673
|
+
body: { ...params }
|
|
2674
|
+
});
|
|
2675
|
+
}
|
|
2702
2676
|
};
|
|
2703
2677
|
|
|
2704
2678
|
// src/resources/network-memory-thread.ts
|
|
@@ -2764,145 +2738,6 @@ var NetworkMemoryThread = class extends BaseResource {
|
|
|
2764
2738
|
}
|
|
2765
2739
|
};
|
|
2766
2740
|
|
|
2767
|
-
// src/resources/vNextNetwork.ts
|
|
2768
|
-
var RECORD_SEPARATOR4 = "";
|
|
2769
|
-
var VNextNetwork = class extends BaseResource {
|
|
2770
|
-
constructor(options, networkId) {
|
|
2771
|
-
super(options);
|
|
2772
|
-
this.networkId = networkId;
|
|
2773
|
-
}
|
|
2774
|
-
/**
|
|
2775
|
-
* Retrieves details about the network
|
|
2776
|
-
* @param runtimeContext - Optional runtime context to pass as query parameter
|
|
2777
|
-
* @returns Promise containing vNext network details
|
|
2778
|
-
*/
|
|
2779
|
-
details(runtimeContext) {
|
|
2780
|
-
return this.request(`/api/networks/v-next/${this.networkId}${runtimeContextQueryString(runtimeContext)}`);
|
|
2781
|
-
}
|
|
2782
|
-
/**
|
|
2783
|
-
* Generates a response from the v-next network
|
|
2784
|
-
* @param params - Generation parameters including message
|
|
2785
|
-
* @returns Promise containing the generated response
|
|
2786
|
-
*/
|
|
2787
|
-
generate(params) {
|
|
2788
|
-
return this.request(`/api/networks/v-next/${this.networkId}/generate`, {
|
|
2789
|
-
method: "POST",
|
|
2790
|
-
body: {
|
|
2791
|
-
...params,
|
|
2792
|
-
runtimeContext: parseClientRuntimeContext(params.runtimeContext)
|
|
2793
|
-
}
|
|
2794
|
-
});
|
|
2795
|
-
}
|
|
2796
|
-
/**
|
|
2797
|
-
* Generates a response from the v-next network using multiple primitives
|
|
2798
|
-
* @param params - Generation parameters including message
|
|
2799
|
-
* @returns Promise containing the generated response
|
|
2800
|
-
*/
|
|
2801
|
-
loop(params) {
|
|
2802
|
-
return this.request(`/api/networks/v-next/${this.networkId}/loop`, {
|
|
2803
|
-
method: "POST",
|
|
2804
|
-
body: {
|
|
2805
|
-
...params,
|
|
2806
|
-
runtimeContext: parseClientRuntimeContext(params.runtimeContext)
|
|
2807
|
-
}
|
|
2808
|
-
});
|
|
2809
|
-
}
|
|
2810
|
-
async *streamProcessor(stream) {
|
|
2811
|
-
const reader = stream.getReader();
|
|
2812
|
-
let doneReading = false;
|
|
2813
|
-
let buffer = "";
|
|
2814
|
-
try {
|
|
2815
|
-
while (!doneReading) {
|
|
2816
|
-
const { done, value } = await reader.read();
|
|
2817
|
-
doneReading = done;
|
|
2818
|
-
if (done && !value) continue;
|
|
2819
|
-
try {
|
|
2820
|
-
const decoded = value ? new TextDecoder().decode(value) : "";
|
|
2821
|
-
const chunks = (buffer + decoded).split(RECORD_SEPARATOR4);
|
|
2822
|
-
buffer = chunks.pop() || "";
|
|
2823
|
-
for (const chunk of chunks) {
|
|
2824
|
-
if (chunk) {
|
|
2825
|
-
if (typeof chunk === "string") {
|
|
2826
|
-
try {
|
|
2827
|
-
const parsedChunk = JSON.parse(chunk);
|
|
2828
|
-
yield parsedChunk;
|
|
2829
|
-
} catch {
|
|
2830
|
-
}
|
|
2831
|
-
}
|
|
2832
|
-
}
|
|
2833
|
-
}
|
|
2834
|
-
} catch {
|
|
2835
|
-
}
|
|
2836
|
-
}
|
|
2837
|
-
if (buffer) {
|
|
2838
|
-
try {
|
|
2839
|
-
yield JSON.parse(buffer);
|
|
2840
|
-
} catch {
|
|
2841
|
-
}
|
|
2842
|
-
}
|
|
2843
|
-
} finally {
|
|
2844
|
-
reader.cancel().catch(() => {
|
|
2845
|
-
});
|
|
2846
|
-
}
|
|
2847
|
-
}
|
|
2848
|
-
/**
|
|
2849
|
-
* Streams a response from the v-next network
|
|
2850
|
-
* @param params - Stream parameters including message
|
|
2851
|
-
* @returns Promise containing the results
|
|
2852
|
-
*/
|
|
2853
|
-
async stream(params, onRecord) {
|
|
2854
|
-
const response = await this.request(`/api/networks/v-next/${this.networkId}/stream`, {
|
|
2855
|
-
method: "POST",
|
|
2856
|
-
body: {
|
|
2857
|
-
...params,
|
|
2858
|
-
runtimeContext: parseClientRuntimeContext(params.runtimeContext)
|
|
2859
|
-
},
|
|
2860
|
-
stream: true
|
|
2861
|
-
});
|
|
2862
|
-
if (!response.ok) {
|
|
2863
|
-
throw new Error(`Failed to stream vNext network: ${response.statusText}`);
|
|
2864
|
-
}
|
|
2865
|
-
if (!response.body) {
|
|
2866
|
-
throw new Error("Response body is null");
|
|
2867
|
-
}
|
|
2868
|
-
for await (const record of this.streamProcessor(response.body)) {
|
|
2869
|
-
if (typeof record === "string") {
|
|
2870
|
-
onRecord(JSON.parse(record));
|
|
2871
|
-
} else {
|
|
2872
|
-
onRecord(record);
|
|
2873
|
-
}
|
|
2874
|
-
}
|
|
2875
|
-
}
|
|
2876
|
-
/**
|
|
2877
|
-
* Streams a response from the v-next network loop
|
|
2878
|
-
* @param params - Stream parameters including message
|
|
2879
|
-
* @returns Promise containing the results
|
|
2880
|
-
*/
|
|
2881
|
-
async loopStream(params, onRecord) {
|
|
2882
|
-
const response = await this.request(`/api/networks/v-next/${this.networkId}/loop-stream`, {
|
|
2883
|
-
method: "POST",
|
|
2884
|
-
body: {
|
|
2885
|
-
...params,
|
|
2886
|
-
runtimeContext: parseClientRuntimeContext(params.runtimeContext)
|
|
2887
|
-
},
|
|
2888
|
-
stream: true
|
|
2889
|
-
});
|
|
2890
|
-
if (!response.ok) {
|
|
2891
|
-
throw new Error(`Failed to stream vNext network loop: ${response.statusText}`);
|
|
2892
|
-
}
|
|
2893
|
-
if (!response.body) {
|
|
2894
|
-
throw new Error("Response body is null");
|
|
2895
|
-
}
|
|
2896
|
-
for await (const record of this.streamProcessor(response.body)) {
|
|
2897
|
-
if (typeof record === "string") {
|
|
2898
|
-
onRecord(JSON.parse(record));
|
|
2899
|
-
} else {
|
|
2900
|
-
onRecord(record);
|
|
2901
|
-
}
|
|
2902
|
-
}
|
|
2903
|
-
}
|
|
2904
|
-
};
|
|
2905
|
-
|
|
2906
2741
|
// src/client.ts
|
|
2907
2742
|
var MastraClient = class extends BaseResource {
|
|
2908
2743
|
observability;
|
|
@@ -2940,6 +2775,14 @@ var MastraClient = class extends BaseResource {
|
|
|
2940
2775
|
getMemoryThreads(params) {
|
|
2941
2776
|
return this.request(`/api/memory/threads?resourceid=${params.resourceId}&agentId=${params.agentId}`);
|
|
2942
2777
|
}
|
|
2778
|
+
/**
|
|
2779
|
+
* Retrieves memory config for a resource
|
|
2780
|
+
* @param params - Parameters containing the resource ID
|
|
2781
|
+
* @returns Promise containing array of memory threads
|
|
2782
|
+
*/
|
|
2783
|
+
getMemoryConfig(params) {
|
|
2784
|
+
return this.request(`/api/memory/config?agentId=${params.agentId}`);
|
|
2785
|
+
}
|
|
2943
2786
|
/**
|
|
2944
2787
|
* Creates a new memory thread
|
|
2945
2788
|
* @param params - Parameters for creating the memory thread
|
|
@@ -2956,6 +2799,24 @@ var MastraClient = class extends BaseResource {
|
|
|
2956
2799
|
getMemoryThread(threadId, agentId) {
|
|
2957
2800
|
return new MemoryThread(this.options, threadId, agentId);
|
|
2958
2801
|
}
|
|
2802
|
+
getThreadMessages(threadId, opts = {}) {
|
|
2803
|
+
let url = "";
|
|
2804
|
+
if (opts.agentId) {
|
|
2805
|
+
url = `/api/memory/threads/${threadId}/messages?agentId=${opts.agentId}`;
|
|
2806
|
+
} else if (opts.networkId) {
|
|
2807
|
+
url = `/api/memory/network/threads/${threadId}/messages?networkId=${opts.networkId}`;
|
|
2808
|
+
}
|
|
2809
|
+
return this.request(url);
|
|
2810
|
+
}
|
|
2811
|
+
deleteThread(threadId, opts = {}) {
|
|
2812
|
+
let url = "";
|
|
2813
|
+
if (opts.agentId) {
|
|
2814
|
+
url = `/api/memory/threads/${threadId}?agentId=${opts.agentId}`;
|
|
2815
|
+
} else if (opts.networkId) {
|
|
2816
|
+
url = `/api/memory/network/threads/${threadId}?networkId=${opts.networkId}`;
|
|
2817
|
+
}
|
|
2818
|
+
return this.request(url, { method: "DELETE" });
|
|
2819
|
+
}
|
|
2959
2820
|
/**
|
|
2960
2821
|
* Saves messages to memory
|
|
2961
2822
|
* @param params - Parameters containing messages to save
|
|
@@ -3038,21 +2899,6 @@ var MastraClient = class extends BaseResource {
|
|
|
3038
2899
|
getTool(toolId) {
|
|
3039
2900
|
return new Tool(this.options, toolId);
|
|
3040
2901
|
}
|
|
3041
|
-
/**
|
|
3042
|
-
* Retrieves all available legacy workflows
|
|
3043
|
-
* @returns Promise containing map of legacy workflow IDs to legacy workflow details
|
|
3044
|
-
*/
|
|
3045
|
-
getLegacyWorkflows() {
|
|
3046
|
-
return this.request("/api/workflows/legacy");
|
|
3047
|
-
}
|
|
3048
|
-
/**
|
|
3049
|
-
* Gets a legacy workflow instance by ID
|
|
3050
|
-
* @param workflowId - ID of the legacy workflow to retrieve
|
|
3051
|
-
* @returns Legacy Workflow instance
|
|
3052
|
-
*/
|
|
3053
|
-
getLegacyWorkflow(workflowId) {
|
|
3054
|
-
return new LegacyWorkflow(this.options, workflowId);
|
|
3055
|
-
}
|
|
3056
2902
|
/**
|
|
3057
2903
|
* Retrieves all available workflows
|
|
3058
2904
|
* @param runtimeContext - Optional runtime context to pass as query parameter
|
|
@@ -3233,21 +3079,6 @@ var MastraClient = class extends BaseResource {
|
|
|
3233
3079
|
return this.request(`/api/telemetry`);
|
|
3234
3080
|
}
|
|
3235
3081
|
}
|
|
3236
|
-
/**
|
|
3237
|
-
* Retrieves all available vNext networks
|
|
3238
|
-
* @returns Promise containing map of vNext network IDs to vNext network details
|
|
3239
|
-
*/
|
|
3240
|
-
getVNextNetworks() {
|
|
3241
|
-
return this.request("/api/networks/v-next");
|
|
3242
|
-
}
|
|
3243
|
-
/**
|
|
3244
|
-
* Gets a vNext network instance by ID
|
|
3245
|
-
* @param networkId - ID of the vNext network to retrieve
|
|
3246
|
-
* @returns vNext Network instance
|
|
3247
|
-
*/
|
|
3248
|
-
getVNextNetwork(networkId) {
|
|
3249
|
-
return new VNextNetwork(this.options, networkId);
|
|
3250
|
-
}
|
|
3251
3082
|
/**
|
|
3252
3083
|
* Retrieves a list of available MCP servers.
|
|
3253
3084
|
* @param params - Optional parameters for pagination (limit, offset).
|
|
@@ -3352,7 +3183,7 @@ var MastraClient = class extends BaseResource {
|
|
|
3352
3183
|
* @returns Promise containing the scorer
|
|
3353
3184
|
*/
|
|
3354
3185
|
getScorer(scorerId) {
|
|
3355
|
-
return this.request(`/api/scores/scorers/${scorerId}`);
|
|
3186
|
+
return this.request(`/api/scores/scorers/${encodeURIComponent(scorerId)}`);
|
|
3356
3187
|
}
|
|
3357
3188
|
getScoresByScorerId(params) {
|
|
3358
3189
|
const { page, perPage, scorerId, entityId, entityType } = params;
|
|
@@ -3370,7 +3201,7 @@ var MastraClient = class extends BaseResource {
|
|
|
3370
3201
|
searchParams.set("perPage", String(perPage));
|
|
3371
3202
|
}
|
|
3372
3203
|
const queryString = searchParams.toString();
|
|
3373
|
-
return this.request(`/api/scores/scorer/${scorerId}${queryString ? `?${queryString}` : ""}`);
|
|
3204
|
+
return this.request(`/api/scores/scorer/${encodeURIComponent(scorerId)}${queryString ? `?${queryString}` : ""}`);
|
|
3374
3205
|
}
|
|
3375
3206
|
/**
|
|
3376
3207
|
* Retrieves scores by run ID
|
|
@@ -3387,7 +3218,7 @@ var MastraClient = class extends BaseResource {
|
|
|
3387
3218
|
searchParams.set("perPage", String(perPage));
|
|
3388
3219
|
}
|
|
3389
3220
|
const queryString = searchParams.toString();
|
|
3390
|
-
return this.request(`/api/scores/run/${runId}${queryString ? `?${queryString}` : ""}`);
|
|
3221
|
+
return this.request(`/api/scores/run/${encodeURIComponent(runId)}${queryString ? `?${queryString}` : ""}`);
|
|
3391
3222
|
}
|
|
3392
3223
|
/**
|
|
3393
3224
|
* Retrieves scores by entity ID and type
|
|
@@ -3404,7 +3235,9 @@ var MastraClient = class extends BaseResource {
|
|
|
3404
3235
|
searchParams.set("perPage", String(perPage));
|
|
3405
3236
|
}
|
|
3406
3237
|
const queryString = searchParams.toString();
|
|
3407
|
-
return this.request(
|
|
3238
|
+
return this.request(
|
|
3239
|
+
`/api/scores/entity/${encodeURIComponent(entityType)}/${encodeURIComponent(entityId)}${queryString ? `?${queryString}` : ""}`
|
|
3240
|
+
);
|
|
3408
3241
|
}
|
|
3409
3242
|
/**
|
|
3410
3243
|
* Saves a score
|
|
@@ -3430,6 +3263,12 @@ var MastraClient = class extends BaseResource {
|
|
|
3430
3263
|
getAITraces(params) {
|
|
3431
3264
|
return this.observability.getTraces(params);
|
|
3432
3265
|
}
|
|
3266
|
+
getScoresBySpan(params) {
|
|
3267
|
+
return this.observability.getScoresBySpan(params);
|
|
3268
|
+
}
|
|
3269
|
+
score(params) {
|
|
3270
|
+
return this.observability.score(params);
|
|
3271
|
+
}
|
|
3433
3272
|
};
|
|
3434
3273
|
|
|
3435
3274
|
// src/tools.ts
|