@mastra/client-js 0.0.0-configure-project-root-for-private-packages-20250919100548 → 0.0.0-cor235-20251008175106
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +215 -3
- package/README.md +4 -4
- package/dist/client.d.ts +30 -25
- package/dist/client.d.ts.map +1 -1
- package/dist/index.cjs +275 -436
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +275 -436
- package/dist/index.js.map +1 -1
- package/dist/resources/agent.d.ts +40 -31
- package/dist/resources/agent.d.ts.map +1 -1
- package/dist/resources/index.d.ts +0 -1
- package/dist/resources/index.d.ts.map +1 -1
- package/dist/resources/observability.d.ts +17 -1
- package/dist/resources/observability.d.ts.map +1 -1
- package/dist/resources/workflow.d.ts +23 -8
- package/dist/resources/workflow.d.ts.map +1 -1
- package/dist/types.d.ts +50 -26
- package/dist/types.d.ts.map +1 -1
- package/dist/utils/process-mastra-stream.d.ts.map +1 -1
- package/package.json +4 -4
- package/dist/resources/legacy-workflow.d.ts +0 -90
- package/dist/resources/legacy-workflow.d.ts.map +0 -1
- package/dist/resources/vNextNetwork.d.ts +0 -43
- package/dist/resources/vNextNetwork.d.ts.map +0 -1
package/dist/index.cjs
CHANGED
|
@@ -219,7 +219,9 @@ async function executeToolCallAndRespond({
|
|
|
219
219
|
resourceId,
|
|
220
220
|
threadId,
|
|
221
221
|
runtimeContext,
|
|
222
|
-
tracingContext: { currentSpan: void 0 }
|
|
222
|
+
tracingContext: { currentSpan: void 0 },
|
|
223
|
+
suspend: async () => {
|
|
224
|
+
}
|
|
223
225
|
},
|
|
224
226
|
{
|
|
225
227
|
messages: response.messages,
|
|
@@ -227,11 +229,7 @@ async function executeToolCallAndRespond({
|
|
|
227
229
|
}
|
|
228
230
|
);
|
|
229
231
|
const updatedMessages = [
|
|
230
|
-
|
|
231
|
-
role: "user",
|
|
232
|
-
content: params.messages
|
|
233
|
-
},
|
|
234
|
-
...response.response.messages,
|
|
232
|
+
...response.response.messages || [],
|
|
235
233
|
{
|
|
236
234
|
role: "tool",
|
|
237
235
|
content: [
|
|
@@ -325,12 +323,6 @@ var Agent = class extends BaseResource {
|
|
|
325
323
|
details(runtimeContext) {
|
|
326
324
|
return this.request(`/api/agents/${this.agentId}${runtimeContextQueryString(runtimeContext)}`);
|
|
327
325
|
}
|
|
328
|
-
async generate(params) {
|
|
329
|
-
console.warn(
|
|
330
|
-
"Deprecation NOTICE:Generate method will switch to use generateVNext implementation September 23rd, 2025. Please use generateLegacy if you don't want to upgrade just yet."
|
|
331
|
-
);
|
|
332
|
-
return this.generateLegacy(params);
|
|
333
|
-
}
|
|
334
326
|
async generateLegacy(params) {
|
|
335
327
|
const processedParams = {
|
|
336
328
|
...params,
|
|
@@ -362,7 +354,9 @@ var Agent = class extends BaseResource {
|
|
|
362
354
|
resourceId,
|
|
363
355
|
threadId,
|
|
364
356
|
runtimeContext,
|
|
365
|
-
tracingContext: { currentSpan: void 0 }
|
|
357
|
+
tracingContext: { currentSpan: void 0 },
|
|
358
|
+
suspend: async () => {
|
|
359
|
+
}
|
|
366
360
|
},
|
|
367
361
|
{
|
|
368
362
|
messages: response.messages,
|
|
@@ -370,10 +364,6 @@ var Agent = class extends BaseResource {
|
|
|
370
364
|
}
|
|
371
365
|
);
|
|
372
366
|
const updatedMessages = [
|
|
373
|
-
{
|
|
374
|
-
role: "user",
|
|
375
|
-
content: params.messages
|
|
376
|
-
},
|
|
377
367
|
...response.response.messages,
|
|
378
368
|
{
|
|
379
369
|
role: "tool",
|
|
@@ -396,7 +386,7 @@ var Agent = class extends BaseResource {
|
|
|
396
386
|
}
|
|
397
387
|
return response;
|
|
398
388
|
}
|
|
399
|
-
async
|
|
389
|
+
async generate(messagesOrParams, options) {
|
|
400
390
|
let params;
|
|
401
391
|
if (typeof messagesOrParams === "object" && "messages" in messagesOrParams) {
|
|
402
392
|
params = messagesOrParams;
|
|
@@ -418,7 +408,7 @@ var Agent = class extends BaseResource {
|
|
|
418
408
|
};
|
|
419
409
|
const { runId, resourceId, threadId, runtimeContext } = processedParams;
|
|
420
410
|
const response = await this.request(
|
|
421
|
-
`/api/agents/${this.agentId}/generate
|
|
411
|
+
`/api/agents/${this.agentId}/generate`,
|
|
422
412
|
{
|
|
423
413
|
method: "POST",
|
|
424
414
|
body: processedParams
|
|
@@ -432,7 +422,7 @@ var Agent = class extends BaseResource {
|
|
|
432
422
|
resourceId,
|
|
433
423
|
threadId,
|
|
434
424
|
runtimeContext,
|
|
435
|
-
respondFn: this.
|
|
425
|
+
respondFn: this.generate.bind(this)
|
|
436
426
|
});
|
|
437
427
|
}
|
|
438
428
|
return response;
|
|
@@ -699,17 +689,6 @@ var Agent = class extends BaseResource {
|
|
|
699
689
|
});
|
|
700
690
|
onFinish?.({ message, finishReason, usage });
|
|
701
691
|
}
|
|
702
|
-
/**
|
|
703
|
-
* Streams a response from the agent
|
|
704
|
-
* @param params - Stream parameters including prompt
|
|
705
|
-
* @returns Promise containing the enhanced Response object with processDataStream method
|
|
706
|
-
*/
|
|
707
|
-
async stream(params) {
|
|
708
|
-
console.warn(
|
|
709
|
-
"Deprecation NOTICE:\nStream method will switch to use streamVNext implementation September 23rd, 2025. Please use streamLegacy if you don't want to upgrade just yet."
|
|
710
|
-
);
|
|
711
|
-
return this.streamLegacy(params);
|
|
712
|
-
}
|
|
713
692
|
/**
|
|
714
693
|
* Streams a response from the agent
|
|
715
694
|
* @param params - Stream parameters including prompt
|
|
@@ -811,6 +790,14 @@ var Agent = class extends BaseResource {
|
|
|
811
790
|
// but this is completely wrong and this fn is probably broken. Remove ":any" and you'll see a bunch of type errors
|
|
812
791
|
onChunk: async (chunk) => {
|
|
813
792
|
switch (chunk.type) {
|
|
793
|
+
case "tripwire": {
|
|
794
|
+
message.parts.push({
|
|
795
|
+
type: "text",
|
|
796
|
+
text: chunk.payload.tripwireReason
|
|
797
|
+
});
|
|
798
|
+
execUpdate();
|
|
799
|
+
break;
|
|
800
|
+
}
|
|
814
801
|
case "step-start": {
|
|
815
802
|
if (!replaceLastMessage) {
|
|
816
803
|
message.id = chunk.payload.messageId;
|
|
@@ -992,7 +979,7 @@ var Agent = class extends BaseResource {
|
|
|
992
979
|
onFinish?.({ message, finishReason, usage });
|
|
993
980
|
}
|
|
994
981
|
async processStreamResponse_vNext(processedParams, writable) {
|
|
995
|
-
const response = await this.request(`/api/agents/${this.agentId}/stream
|
|
982
|
+
const response = await this.request(`/api/agents/${this.agentId}/stream`, {
|
|
996
983
|
method: "POST",
|
|
997
984
|
body: processedParams,
|
|
998
985
|
stream: true
|
|
@@ -1007,18 +994,17 @@ var Agent = class extends BaseResource {
|
|
|
1007
994
|
streamForWritable.pipeTo(
|
|
1008
995
|
new WritableStream({
|
|
1009
996
|
async write(chunk) {
|
|
997
|
+
let writer;
|
|
1010
998
|
try {
|
|
999
|
+
writer = writable.getWriter();
|
|
1011
1000
|
const text = new TextDecoder().decode(chunk);
|
|
1012
|
-
|
|
1013
|
-
|
|
1014
|
-
|
|
1001
|
+
const lines = text.split("\n\n");
|
|
1002
|
+
const readableLines = lines.filter((line) => line !== "[DONE]").join("\n\n");
|
|
1003
|
+
await writer.write(new TextEncoder().encode(readableLines));
|
|
1015
1004
|
} catch {
|
|
1016
|
-
|
|
1017
|
-
const writer = writable.getWriter();
|
|
1018
|
-
try {
|
|
1019
|
-
await writer.write(chunk);
|
|
1005
|
+
await writer?.write(chunk);
|
|
1020
1006
|
} finally {
|
|
1021
|
-
writer
|
|
1007
|
+
writer?.releaseLock();
|
|
1022
1008
|
}
|
|
1023
1009
|
}
|
|
1024
1010
|
}),
|
|
@@ -1044,9 +1030,11 @@ var Agent = class extends BaseResource {
|
|
|
1044
1030
|
if (toolCall) {
|
|
1045
1031
|
toolCalls.push(toolCall);
|
|
1046
1032
|
}
|
|
1033
|
+
let shouldExecuteClientTool = false;
|
|
1047
1034
|
for (const toolCall2 of toolCalls) {
|
|
1048
1035
|
const clientTool = processedParams.clientTools?.[toolCall2.toolName];
|
|
1049
1036
|
if (clientTool && clientTool.execute) {
|
|
1037
|
+
shouldExecuteClientTool = true;
|
|
1050
1038
|
const result = await clientTool.execute(
|
|
1051
1039
|
{
|
|
1052
1040
|
context: toolCall2?.args,
|
|
@@ -1055,7 +1043,9 @@ var Agent = class extends BaseResource {
|
|
|
1055
1043
|
threadId: processedParams.threadId,
|
|
1056
1044
|
runtimeContext: processedParams.runtimeContext,
|
|
1057
1045
|
// TODO: Pass proper tracing context when client-js supports tracing
|
|
1058
|
-
tracingContext: { currentSpan: void 0 }
|
|
1046
|
+
tracingContext: { currentSpan: void 0 },
|
|
1047
|
+
suspend: async () => {
|
|
1048
|
+
}
|
|
1059
1049
|
},
|
|
1060
1050
|
{
|
|
1061
1051
|
messages: response.messages,
|
|
@@ -1081,9 +1071,7 @@ var Agent = class extends BaseResource {
|
|
|
1081
1071
|
toolInvocation.state = "result";
|
|
1082
1072
|
toolInvocation.result = result;
|
|
1083
1073
|
}
|
|
1084
|
-
const
|
|
1085
|
-
const messageArray = Array.isArray(originalMessages) ? originalMessages : [originalMessages];
|
|
1086
|
-
const updatedMessages = lastMessage != null ? [...messageArray, ...messages.filter((m) => m.id !== lastMessage.id), lastMessage] : [...messageArray, ...messages];
|
|
1074
|
+
const updatedMessages = lastMessage != null ? [...messages.filter((m) => m.id !== lastMessage.id), lastMessage] : [...messages];
|
|
1087
1075
|
this.processStreamResponse_vNext(
|
|
1088
1076
|
{
|
|
1089
1077
|
...processedParams,
|
|
@@ -1095,6 +1083,11 @@ var Agent = class extends BaseResource {
|
|
|
1095
1083
|
});
|
|
1096
1084
|
}
|
|
1097
1085
|
}
|
|
1086
|
+
if (!shouldExecuteClientTool) {
|
|
1087
|
+
setTimeout(() => {
|
|
1088
|
+
writable.close();
|
|
1089
|
+
}, 0);
|
|
1090
|
+
}
|
|
1098
1091
|
} else {
|
|
1099
1092
|
setTimeout(() => {
|
|
1100
1093
|
writable.close();
|
|
@@ -1134,7 +1127,7 @@ var Agent = class extends BaseResource {
|
|
|
1134
1127
|
};
|
|
1135
1128
|
return streamResponse;
|
|
1136
1129
|
}
|
|
1137
|
-
async
|
|
1130
|
+
async stream(messagesOrParams, options) {
|
|
1138
1131
|
let params;
|
|
1139
1132
|
if (typeof messagesOrParams === "object" && "messages" in messagesOrParams) {
|
|
1140
1133
|
params = messagesOrParams;
|
|
@@ -1219,7 +1212,9 @@ var Agent = class extends BaseResource {
|
|
|
1219
1212
|
threadId: processedParams.threadId,
|
|
1220
1213
|
runtimeContext: processedParams.runtimeContext,
|
|
1221
1214
|
// TODO: Pass proper tracing context when client-js supports tracing
|
|
1222
|
-
tracingContext: { currentSpan: void 0 }
|
|
1215
|
+
tracingContext: { currentSpan: void 0 },
|
|
1216
|
+
suspend: async () => {
|
|
1217
|
+
}
|
|
1223
1218
|
},
|
|
1224
1219
|
{
|
|
1225
1220
|
messages: response.messages,
|
|
@@ -1257,12 +1252,10 @@ var Agent = class extends BaseResource {
|
|
|
1257
1252
|
} finally {
|
|
1258
1253
|
writer.releaseLock();
|
|
1259
1254
|
}
|
|
1260
|
-
const originalMessages = processedParams.messages;
|
|
1261
|
-
const messageArray = Array.isArray(originalMessages) ? originalMessages : [originalMessages];
|
|
1262
1255
|
this.processStreamResponse(
|
|
1263
1256
|
{
|
|
1264
1257
|
...processedParams,
|
|
1265
|
-
messages: [...
|
|
1258
|
+
messages: [...messages.filter((m) => m.id !== lastMessage.id), lastMessage]
|
|
1266
1259
|
},
|
|
1267
1260
|
writable
|
|
1268
1261
|
).catch((error) => {
|
|
@@ -1337,6 +1330,34 @@ var Agent = class extends BaseResource {
|
|
|
1337
1330
|
body: params
|
|
1338
1331
|
});
|
|
1339
1332
|
}
|
|
1333
|
+
/**
|
|
1334
|
+
* Updates the model for the agent in the model list
|
|
1335
|
+
* @param params - Parameters for updating the model
|
|
1336
|
+
* @returns Promise containing the updated model
|
|
1337
|
+
*/
|
|
1338
|
+
updateModelInModelList({ modelConfigId, ...params }) {
|
|
1339
|
+
return this.request(`/api/agents/${this.agentId}/models/${modelConfigId}`, {
|
|
1340
|
+
method: "POST",
|
|
1341
|
+
body: params
|
|
1342
|
+
});
|
|
1343
|
+
}
|
|
1344
|
+
/**
|
|
1345
|
+
* Reorders the models for the agent
|
|
1346
|
+
* @param params - Parameters for reordering the model list
|
|
1347
|
+
* @returns Promise containing the updated model list
|
|
1348
|
+
*/
|
|
1349
|
+
reorderModelList(params) {
|
|
1350
|
+
return this.request(`/api/agents/${this.agentId}/models/reorder`, {
|
|
1351
|
+
method: "POST",
|
|
1352
|
+
body: params
|
|
1353
|
+
});
|
|
1354
|
+
}
|
|
1355
|
+
async generateVNext(_messagesOrParams, _options) {
|
|
1356
|
+
throw new Error("generateVNext has been renamed to generate. Please use generate instead.");
|
|
1357
|
+
}
|
|
1358
|
+
async streamVNext(_messagesOrParams, _options) {
|
|
1359
|
+
throw new Error("streamVNext has been renamed to stream. Please use stream instead.");
|
|
1360
|
+
}
|
|
1340
1361
|
};
|
|
1341
1362
|
|
|
1342
1363
|
// src/resources/memory-thread.ts
|
|
@@ -1487,193 +1508,6 @@ var Vector = class extends BaseResource {
|
|
|
1487
1508
|
}
|
|
1488
1509
|
};
|
|
1489
1510
|
|
|
1490
|
-
// src/resources/legacy-workflow.ts
|
|
1491
|
-
var RECORD_SEPARATOR = "";
|
|
1492
|
-
var LegacyWorkflow = class extends BaseResource {
|
|
1493
|
-
constructor(options, workflowId) {
|
|
1494
|
-
super(options);
|
|
1495
|
-
this.workflowId = workflowId;
|
|
1496
|
-
}
|
|
1497
|
-
/**
|
|
1498
|
-
* Retrieves details about the legacy workflow
|
|
1499
|
-
* @param runtimeContext - Optional runtime context to pass as query parameter
|
|
1500
|
-
* @returns Promise containing legacy workflow details including steps and graphs
|
|
1501
|
-
*/
|
|
1502
|
-
details(runtimeContext) {
|
|
1503
|
-
return this.request(`/api/workflows/legacy/${this.workflowId}${runtimeContextQueryString(runtimeContext)}`);
|
|
1504
|
-
}
|
|
1505
|
-
/**
|
|
1506
|
-
* Retrieves all runs for a legacy workflow
|
|
1507
|
-
* @param params - Parameters for filtering runs
|
|
1508
|
-
* @param runtimeContext - Optional runtime context to pass as query parameter
|
|
1509
|
-
* @returns Promise containing legacy workflow runs array
|
|
1510
|
-
*/
|
|
1511
|
-
runs(params, runtimeContext) {
|
|
1512
|
-
const runtimeContextParam = base64RuntimeContext(parseClientRuntimeContext(runtimeContext));
|
|
1513
|
-
const searchParams = new URLSearchParams();
|
|
1514
|
-
if (params?.fromDate) {
|
|
1515
|
-
searchParams.set("fromDate", params.fromDate.toISOString());
|
|
1516
|
-
}
|
|
1517
|
-
if (params?.toDate) {
|
|
1518
|
-
searchParams.set("toDate", params.toDate.toISOString());
|
|
1519
|
-
}
|
|
1520
|
-
if (params?.limit) {
|
|
1521
|
-
searchParams.set("limit", String(params.limit));
|
|
1522
|
-
}
|
|
1523
|
-
if (params?.offset) {
|
|
1524
|
-
searchParams.set("offset", String(params.offset));
|
|
1525
|
-
}
|
|
1526
|
-
if (params?.resourceId) {
|
|
1527
|
-
searchParams.set("resourceId", params.resourceId);
|
|
1528
|
-
}
|
|
1529
|
-
if (runtimeContextParam) {
|
|
1530
|
-
searchParams.set("runtimeContext", runtimeContextParam);
|
|
1531
|
-
}
|
|
1532
|
-
if (searchParams.size) {
|
|
1533
|
-
return this.request(`/api/workflows/legacy/${this.workflowId}/runs?${searchParams}`);
|
|
1534
|
-
} else {
|
|
1535
|
-
return this.request(`/api/workflows/legacy/${this.workflowId}/runs`);
|
|
1536
|
-
}
|
|
1537
|
-
}
|
|
1538
|
-
/**
|
|
1539
|
-
* Creates a new legacy workflow run
|
|
1540
|
-
* @returns Promise containing the generated run ID
|
|
1541
|
-
*/
|
|
1542
|
-
createRun(params) {
|
|
1543
|
-
const searchParams = new URLSearchParams();
|
|
1544
|
-
if (!!params?.runId) {
|
|
1545
|
-
searchParams.set("runId", params.runId);
|
|
1546
|
-
}
|
|
1547
|
-
return this.request(`/api/workflows/legacy/${this.workflowId}/create-run?${searchParams.toString()}`, {
|
|
1548
|
-
method: "POST"
|
|
1549
|
-
});
|
|
1550
|
-
}
|
|
1551
|
-
/**
|
|
1552
|
-
* Starts a legacy workflow run synchronously without waiting for the workflow to complete
|
|
1553
|
-
* @param params - Object containing the runId and triggerData
|
|
1554
|
-
* @returns Promise containing success message
|
|
1555
|
-
*/
|
|
1556
|
-
start(params) {
|
|
1557
|
-
return this.request(`/api/workflows/legacy/${this.workflowId}/start?runId=${params.runId}`, {
|
|
1558
|
-
method: "POST",
|
|
1559
|
-
body: params?.triggerData
|
|
1560
|
-
});
|
|
1561
|
-
}
|
|
1562
|
-
/**
|
|
1563
|
-
* Resumes a suspended legacy workflow step synchronously without waiting for the workflow to complete
|
|
1564
|
-
* @param stepId - ID of the step to resume
|
|
1565
|
-
* @param runId - ID of the legacy workflow run
|
|
1566
|
-
* @param context - Context to resume the legacy workflow with
|
|
1567
|
-
* @returns Promise containing the legacy workflow resume results
|
|
1568
|
-
*/
|
|
1569
|
-
resume({
|
|
1570
|
-
stepId,
|
|
1571
|
-
runId,
|
|
1572
|
-
context
|
|
1573
|
-
}) {
|
|
1574
|
-
return this.request(`/api/workflows/legacy/${this.workflowId}/resume?runId=${runId}`, {
|
|
1575
|
-
method: "POST",
|
|
1576
|
-
body: {
|
|
1577
|
-
stepId,
|
|
1578
|
-
context
|
|
1579
|
-
}
|
|
1580
|
-
});
|
|
1581
|
-
}
|
|
1582
|
-
/**
|
|
1583
|
-
* Starts a workflow run asynchronously and returns a promise that resolves when the workflow is complete
|
|
1584
|
-
* @param params - Object containing the optional runId and triggerData
|
|
1585
|
-
* @returns Promise containing the workflow execution results
|
|
1586
|
-
*/
|
|
1587
|
-
startAsync(params) {
|
|
1588
|
-
const searchParams = new URLSearchParams();
|
|
1589
|
-
if (!!params?.runId) {
|
|
1590
|
-
searchParams.set("runId", params.runId);
|
|
1591
|
-
}
|
|
1592
|
-
return this.request(`/api/workflows/legacy/${this.workflowId}/start-async?${searchParams.toString()}`, {
|
|
1593
|
-
method: "POST",
|
|
1594
|
-
body: params?.triggerData
|
|
1595
|
-
});
|
|
1596
|
-
}
|
|
1597
|
-
/**
|
|
1598
|
-
* Resumes a suspended legacy workflow step asynchronously and returns a promise that resolves when the workflow is complete
|
|
1599
|
-
* @param params - Object containing the runId, stepId, and context
|
|
1600
|
-
* @returns Promise containing the workflow resume results
|
|
1601
|
-
*/
|
|
1602
|
-
resumeAsync(params) {
|
|
1603
|
-
return this.request(`/api/workflows/legacy/${this.workflowId}/resume-async?runId=${params.runId}`, {
|
|
1604
|
-
method: "POST",
|
|
1605
|
-
body: {
|
|
1606
|
-
stepId: params.stepId,
|
|
1607
|
-
context: params.context
|
|
1608
|
-
}
|
|
1609
|
-
});
|
|
1610
|
-
}
|
|
1611
|
-
/**
|
|
1612
|
-
* Creates an async generator that processes a readable stream and yields records
|
|
1613
|
-
* separated by the Record Separator character (\x1E)
|
|
1614
|
-
*
|
|
1615
|
-
* @param stream - The readable stream to process
|
|
1616
|
-
* @returns An async generator that yields parsed records
|
|
1617
|
-
*/
|
|
1618
|
-
async *streamProcessor(stream) {
|
|
1619
|
-
const reader = stream.getReader();
|
|
1620
|
-
let doneReading = false;
|
|
1621
|
-
let buffer = "";
|
|
1622
|
-
try {
|
|
1623
|
-
while (!doneReading) {
|
|
1624
|
-
const { done, value } = await reader.read();
|
|
1625
|
-
doneReading = done;
|
|
1626
|
-
if (done && !value) continue;
|
|
1627
|
-
try {
|
|
1628
|
-
const decoded = value ? new TextDecoder().decode(value) : "";
|
|
1629
|
-
const chunks = (buffer + decoded).split(RECORD_SEPARATOR);
|
|
1630
|
-
buffer = chunks.pop() || "";
|
|
1631
|
-
for (const chunk of chunks) {
|
|
1632
|
-
if (chunk) {
|
|
1633
|
-
if (typeof chunk === "string") {
|
|
1634
|
-
try {
|
|
1635
|
-
const parsedChunk = JSON.parse(chunk);
|
|
1636
|
-
yield parsedChunk;
|
|
1637
|
-
} catch {
|
|
1638
|
-
}
|
|
1639
|
-
}
|
|
1640
|
-
}
|
|
1641
|
-
}
|
|
1642
|
-
} catch {
|
|
1643
|
-
}
|
|
1644
|
-
}
|
|
1645
|
-
if (buffer) {
|
|
1646
|
-
try {
|
|
1647
|
-
yield JSON.parse(buffer);
|
|
1648
|
-
} catch {
|
|
1649
|
-
}
|
|
1650
|
-
}
|
|
1651
|
-
} finally {
|
|
1652
|
-
reader.cancel().catch(() => {
|
|
1653
|
-
});
|
|
1654
|
-
}
|
|
1655
|
-
}
|
|
1656
|
-
/**
|
|
1657
|
-
* Watches legacy workflow transitions in real-time
|
|
1658
|
-
* @param runId - Optional run ID to filter the watch stream
|
|
1659
|
-
* @returns AsyncGenerator that yields parsed records from the legacy workflow watch stream
|
|
1660
|
-
*/
|
|
1661
|
-
async watch({ runId }, onRecord) {
|
|
1662
|
-
const response = await this.request(`/api/workflows/legacy/${this.workflowId}/watch?runId=${runId}`, {
|
|
1663
|
-
stream: true
|
|
1664
|
-
});
|
|
1665
|
-
if (!response.ok) {
|
|
1666
|
-
throw new Error(`Failed to watch legacy workflow: ${response.statusText}`);
|
|
1667
|
-
}
|
|
1668
|
-
if (!response.body) {
|
|
1669
|
-
throw new Error("Response body is null");
|
|
1670
|
-
}
|
|
1671
|
-
for await (const record of this.streamProcessor(response.body)) {
|
|
1672
|
-
onRecord(record);
|
|
1673
|
-
}
|
|
1674
|
-
}
|
|
1675
|
-
};
|
|
1676
|
-
|
|
1677
1511
|
// src/resources/tool.ts
|
|
1678
1512
|
var Tool = class extends BaseResource {
|
|
1679
1513
|
constructor(options, toolId) {
|
|
@@ -1710,7 +1544,7 @@ var Tool = class extends BaseResource {
|
|
|
1710
1544
|
};
|
|
1711
1545
|
|
|
1712
1546
|
// src/resources/workflow.ts
|
|
1713
|
-
var
|
|
1547
|
+
var RECORD_SEPARATOR = "";
|
|
1714
1548
|
var Workflow = class extends BaseResource {
|
|
1715
1549
|
constructor(options, workflowId) {
|
|
1716
1550
|
super(options);
|
|
@@ -1734,7 +1568,7 @@ var Workflow = class extends BaseResource {
|
|
|
1734
1568
|
if (done && !value) continue;
|
|
1735
1569
|
try {
|
|
1736
1570
|
const decoded = value ? new TextDecoder().decode(value) : "";
|
|
1737
|
-
const chunks = (buffer + decoded).split(
|
|
1571
|
+
const chunks = (buffer + decoded).split(RECORD_SEPARATOR);
|
|
1738
1572
|
buffer = chunks.pop() || "";
|
|
1739
1573
|
for (const chunk of chunks) {
|
|
1740
1574
|
if (chunk) {
|
|
@@ -1872,10 +1706,20 @@ var Workflow = class extends BaseResource {
|
|
|
1872
1706
|
return {
|
|
1873
1707
|
runId,
|
|
1874
1708
|
start: async (p) => {
|
|
1875
|
-
return this.start({
|
|
1709
|
+
return this.start({
|
|
1710
|
+
runId,
|
|
1711
|
+
inputData: p.inputData,
|
|
1712
|
+
runtimeContext: p.runtimeContext,
|
|
1713
|
+
tracingOptions: p.tracingOptions
|
|
1714
|
+
});
|
|
1876
1715
|
},
|
|
1877
1716
|
startAsync: async (p) => {
|
|
1878
|
-
return this.startAsync({
|
|
1717
|
+
return this.startAsync({
|
|
1718
|
+
runId,
|
|
1719
|
+
inputData: p.inputData,
|
|
1720
|
+
runtimeContext: p.runtimeContext,
|
|
1721
|
+
tracingOptions: p.tracingOptions
|
|
1722
|
+
});
|
|
1879
1723
|
},
|
|
1880
1724
|
watch: async (onRecord) => {
|
|
1881
1725
|
return this.watch({ runId }, onRecord);
|
|
@@ -1884,10 +1728,22 @@ var Workflow = class extends BaseResource {
|
|
|
1884
1728
|
return this.stream({ runId, inputData: p.inputData, runtimeContext: p.runtimeContext });
|
|
1885
1729
|
},
|
|
1886
1730
|
resume: async (p) => {
|
|
1887
|
-
return this.resume({
|
|
1731
|
+
return this.resume({
|
|
1732
|
+
runId,
|
|
1733
|
+
step: p.step,
|
|
1734
|
+
resumeData: p.resumeData,
|
|
1735
|
+
runtimeContext: p.runtimeContext,
|
|
1736
|
+
tracingOptions: p.tracingOptions
|
|
1737
|
+
});
|
|
1888
1738
|
},
|
|
1889
1739
|
resumeAsync: async (p) => {
|
|
1890
|
-
return this.resumeAsync({
|
|
1740
|
+
return this.resumeAsync({
|
|
1741
|
+
runId,
|
|
1742
|
+
step: p.step,
|
|
1743
|
+
resumeData: p.resumeData,
|
|
1744
|
+
runtimeContext: p.runtimeContext,
|
|
1745
|
+
tracingOptions: p.tracingOptions
|
|
1746
|
+
});
|
|
1891
1747
|
},
|
|
1892
1748
|
resumeStreamVNext: async (p) => {
|
|
1893
1749
|
return this.resumeStreamVNext({
|
|
@@ -1908,7 +1764,7 @@ var Workflow = class extends BaseResource {
|
|
|
1908
1764
|
const runtimeContext = parseClientRuntimeContext(params.runtimeContext);
|
|
1909
1765
|
return this.request(`/api/workflows/${this.workflowId}/start?runId=${params.runId}`, {
|
|
1910
1766
|
method: "POST",
|
|
1911
|
-
body: { inputData: params?.inputData, runtimeContext }
|
|
1767
|
+
body: { inputData: params?.inputData, runtimeContext, tracingOptions: params.tracingOptions }
|
|
1912
1768
|
});
|
|
1913
1769
|
}
|
|
1914
1770
|
/**
|
|
@@ -1920,6 +1776,7 @@ var Workflow = class extends BaseResource {
|
|
|
1920
1776
|
step,
|
|
1921
1777
|
runId,
|
|
1922
1778
|
resumeData,
|
|
1779
|
+
tracingOptions,
|
|
1923
1780
|
...rest
|
|
1924
1781
|
}) {
|
|
1925
1782
|
const runtimeContext = parseClientRuntimeContext(rest.runtimeContext);
|
|
@@ -1928,7 +1785,8 @@ var Workflow = class extends BaseResource {
|
|
|
1928
1785
|
body: {
|
|
1929
1786
|
step,
|
|
1930
1787
|
resumeData,
|
|
1931
|
-
runtimeContext
|
|
1788
|
+
runtimeContext,
|
|
1789
|
+
tracingOptions
|
|
1932
1790
|
}
|
|
1933
1791
|
});
|
|
1934
1792
|
}
|
|
@@ -1945,7 +1803,7 @@ var Workflow = class extends BaseResource {
|
|
|
1945
1803
|
const runtimeContext = parseClientRuntimeContext(params.runtimeContext);
|
|
1946
1804
|
return this.request(`/api/workflows/${this.workflowId}/start-async?${searchParams.toString()}`, {
|
|
1947
1805
|
method: "POST",
|
|
1948
|
-
body: { inputData: params.inputData, runtimeContext }
|
|
1806
|
+
body: { inputData: params.inputData, runtimeContext, tracingOptions: params.tracingOptions }
|
|
1949
1807
|
});
|
|
1950
1808
|
}
|
|
1951
1809
|
/**
|
|
@@ -1963,7 +1821,7 @@ var Workflow = class extends BaseResource {
|
|
|
1963
1821
|
`/api/workflows/${this.workflowId}/stream?${searchParams.toString()}`,
|
|
1964
1822
|
{
|
|
1965
1823
|
method: "POST",
|
|
1966
|
-
body: { inputData: params.inputData, runtimeContext },
|
|
1824
|
+
body: { inputData: params.inputData, runtimeContext, tracingOptions: params.tracingOptions },
|
|
1967
1825
|
stream: true
|
|
1968
1826
|
}
|
|
1969
1827
|
);
|
|
@@ -1980,7 +1838,7 @@ var Workflow = class extends BaseResource {
|
|
|
1980
1838
|
async transform(chunk, controller) {
|
|
1981
1839
|
try {
|
|
1982
1840
|
const decoded = new TextDecoder().decode(chunk);
|
|
1983
|
-
const chunks = decoded.split(
|
|
1841
|
+
const chunks = decoded.split(RECORD_SEPARATOR);
|
|
1984
1842
|
for (const chunk2 of chunks) {
|
|
1985
1843
|
if (chunk2) {
|
|
1986
1844
|
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
@@ -2027,7 +1885,7 @@ var Workflow = class extends BaseResource {
|
|
|
2027
1885
|
async transform(chunk, controller) {
|
|
2028
1886
|
try {
|
|
2029
1887
|
const decoded = new TextDecoder().decode(chunk);
|
|
2030
|
-
const chunks = decoded.split(
|
|
1888
|
+
const chunks = decoded.split(RECORD_SEPARATOR);
|
|
2031
1889
|
for (const chunk2 of chunks) {
|
|
2032
1890
|
if (chunk2) {
|
|
2033
1891
|
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
@@ -2061,7 +1919,12 @@ var Workflow = class extends BaseResource {
|
|
|
2061
1919
|
`/api/workflows/${this.workflowId}/streamVNext?${searchParams.toString()}`,
|
|
2062
1920
|
{
|
|
2063
1921
|
method: "POST",
|
|
2064
|
-
body: {
|
|
1922
|
+
body: {
|
|
1923
|
+
inputData: params.inputData,
|
|
1924
|
+
runtimeContext,
|
|
1925
|
+
closeOnSuspend: params.closeOnSuspend,
|
|
1926
|
+
tracingOptions: params.tracingOptions
|
|
1927
|
+
},
|
|
2065
1928
|
stream: true
|
|
2066
1929
|
}
|
|
2067
1930
|
);
|
|
@@ -2078,7 +1941,54 @@ var Workflow = class extends BaseResource {
|
|
|
2078
1941
|
async transform(chunk, controller) {
|
|
2079
1942
|
try {
|
|
2080
1943
|
const decoded = new TextDecoder().decode(chunk);
|
|
2081
|
-
const chunks = decoded.split(
|
|
1944
|
+
const chunks = decoded.split(RECORD_SEPARATOR);
|
|
1945
|
+
for (const chunk2 of chunks) {
|
|
1946
|
+
if (chunk2) {
|
|
1947
|
+
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
1948
|
+
try {
|
|
1949
|
+
const parsedChunk = JSON.parse(newChunk);
|
|
1950
|
+
controller.enqueue(parsedChunk);
|
|
1951
|
+
failedChunk = void 0;
|
|
1952
|
+
} catch {
|
|
1953
|
+
failedChunk = newChunk;
|
|
1954
|
+
}
|
|
1955
|
+
}
|
|
1956
|
+
}
|
|
1957
|
+
} catch {
|
|
1958
|
+
}
|
|
1959
|
+
}
|
|
1960
|
+
});
|
|
1961
|
+
return response.body.pipeThrough(transformStream);
|
|
1962
|
+
}
|
|
1963
|
+
/**
|
|
1964
|
+
* Observes workflow vNext stream for a workflow run
|
|
1965
|
+
* @param params - Object containing the runId
|
|
1966
|
+
* @returns Promise containing the workflow execution results
|
|
1967
|
+
*/
|
|
1968
|
+
async observeStreamVNext(params) {
|
|
1969
|
+
const searchParams = new URLSearchParams();
|
|
1970
|
+
searchParams.set("runId", params.runId);
|
|
1971
|
+
const response = await this.request(
|
|
1972
|
+
`/api/workflows/${this.workflowId}/observe-streamVNext?${searchParams.toString()}`,
|
|
1973
|
+
{
|
|
1974
|
+
method: "POST",
|
|
1975
|
+
stream: true
|
|
1976
|
+
}
|
|
1977
|
+
);
|
|
1978
|
+
if (!response.ok) {
|
|
1979
|
+
throw new Error(`Failed to observe stream vNext workflow: ${response.statusText}`);
|
|
1980
|
+
}
|
|
1981
|
+
if (!response.body) {
|
|
1982
|
+
throw new Error("Response body is null");
|
|
1983
|
+
}
|
|
1984
|
+
let failedChunk = void 0;
|
|
1985
|
+
const transformStream = new TransformStream({
|
|
1986
|
+
start() {
|
|
1987
|
+
},
|
|
1988
|
+
async transform(chunk, controller) {
|
|
1989
|
+
try {
|
|
1990
|
+
const decoded = new TextDecoder().decode(chunk);
|
|
1991
|
+
const chunks = decoded.split(RECORD_SEPARATOR);
|
|
2082
1992
|
for (const chunk2 of chunks) {
|
|
2083
1993
|
if (chunk2) {
|
|
2084
1994
|
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
@@ -2109,7 +2019,8 @@ var Workflow = class extends BaseResource {
|
|
|
2109
2019
|
body: {
|
|
2110
2020
|
step: params.step,
|
|
2111
2021
|
resumeData: params.resumeData,
|
|
2112
|
-
runtimeContext
|
|
2022
|
+
runtimeContext,
|
|
2023
|
+
tracingOptions: params.tracingOptions
|
|
2113
2024
|
}
|
|
2114
2025
|
});
|
|
2115
2026
|
}
|
|
@@ -2118,16 +2029,54 @@ var Workflow = class extends BaseResource {
|
|
|
2118
2029
|
* @param params - Object containing the runId, step, resumeData and runtimeContext
|
|
2119
2030
|
* @returns Promise containing the workflow resume results
|
|
2120
2031
|
*/
|
|
2121
|
-
resumeStreamVNext(params) {
|
|
2032
|
+
async resumeStreamVNext(params) {
|
|
2033
|
+
const searchParams = new URLSearchParams();
|
|
2034
|
+
searchParams.set("runId", params.runId);
|
|
2122
2035
|
const runtimeContext = parseClientRuntimeContext(params.runtimeContext);
|
|
2123
|
-
|
|
2124
|
-
|
|
2125
|
-
|
|
2126
|
-
|
|
2127
|
-
|
|
2128
|
-
|
|
2036
|
+
const response = await this.request(
|
|
2037
|
+
`/api/workflows/${this.workflowId}/resume-stream?${searchParams.toString()}`,
|
|
2038
|
+
{
|
|
2039
|
+
method: "POST",
|
|
2040
|
+
body: {
|
|
2041
|
+
step: params.step,
|
|
2042
|
+
resumeData: params.resumeData,
|
|
2043
|
+
runtimeContext,
|
|
2044
|
+
tracingOptions: params.tracingOptions
|
|
2045
|
+
},
|
|
2046
|
+
stream: true
|
|
2047
|
+
}
|
|
2048
|
+
);
|
|
2049
|
+
if (!response.ok) {
|
|
2050
|
+
throw new Error(`Failed to stream vNext workflow: ${response.statusText}`);
|
|
2051
|
+
}
|
|
2052
|
+
if (!response.body) {
|
|
2053
|
+
throw new Error("Response body is null");
|
|
2054
|
+
}
|
|
2055
|
+
let failedChunk = void 0;
|
|
2056
|
+
const transformStream = new TransformStream({
|
|
2057
|
+
start() {
|
|
2058
|
+
},
|
|
2059
|
+
async transform(chunk, controller) {
|
|
2060
|
+
try {
|
|
2061
|
+
const decoded = new TextDecoder().decode(chunk);
|
|
2062
|
+
const chunks = decoded.split(RECORD_SEPARATOR);
|
|
2063
|
+
for (const chunk2 of chunks) {
|
|
2064
|
+
if (chunk2) {
|
|
2065
|
+
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
2066
|
+
try {
|
|
2067
|
+
const parsedChunk = JSON.parse(newChunk);
|
|
2068
|
+
controller.enqueue(parsedChunk);
|
|
2069
|
+
failedChunk = void 0;
|
|
2070
|
+
} catch {
|
|
2071
|
+
failedChunk = newChunk;
|
|
2072
|
+
}
|
|
2073
|
+
}
|
|
2074
|
+
}
|
|
2075
|
+
} catch {
|
|
2076
|
+
}
|
|
2129
2077
|
}
|
|
2130
2078
|
});
|
|
2079
|
+
return response.body.pipeThrough(transformStream);
|
|
2131
2080
|
}
|
|
2132
2081
|
/**
|
|
2133
2082
|
* Watches workflow transitions in real-time
|
|
@@ -2165,7 +2114,7 @@ var Workflow = class extends BaseResource {
|
|
|
2165
2114
|
async start(controller) {
|
|
2166
2115
|
try {
|
|
2167
2116
|
for await (const record of records) {
|
|
2168
|
-
const json = JSON.stringify(record) +
|
|
2117
|
+
const json = JSON.stringify(record) + RECORD_SEPARATOR;
|
|
2169
2118
|
controller.enqueue(encoder.encode(json));
|
|
2170
2119
|
}
|
|
2171
2120
|
controller.close();
|
|
@@ -2288,7 +2237,7 @@ var MCPTool = class extends BaseResource {
|
|
|
2288
2237
|
};
|
|
2289
2238
|
|
|
2290
2239
|
// src/resources/agent-builder.ts
|
|
2291
|
-
var
|
|
2240
|
+
var RECORD_SEPARATOR2 = "";
|
|
2292
2241
|
var AgentBuilder = class extends BaseResource {
|
|
2293
2242
|
constructor(options, actionId) {
|
|
2294
2243
|
super(options);
|
|
@@ -2428,7 +2377,7 @@ var AgentBuilder = class extends BaseResource {
|
|
|
2428
2377
|
if (done && !value) continue;
|
|
2429
2378
|
try {
|
|
2430
2379
|
const decoded = value ? new TextDecoder().decode(value) : "";
|
|
2431
|
-
const chunks = (buffer + decoded).split(
|
|
2380
|
+
const chunks = (buffer + decoded).split(RECORD_SEPARATOR2);
|
|
2432
2381
|
buffer = chunks.pop() || "";
|
|
2433
2382
|
for (const chunk of chunks) {
|
|
2434
2383
|
if (chunk) {
|
|
@@ -2485,7 +2434,7 @@ var AgentBuilder = class extends BaseResource {
|
|
|
2485
2434
|
async transform(chunk, controller) {
|
|
2486
2435
|
try {
|
|
2487
2436
|
const decoded = new TextDecoder().decode(chunk);
|
|
2488
|
-
const chunks = decoded.split(
|
|
2437
|
+
const chunks = decoded.split(RECORD_SEPARATOR2);
|
|
2489
2438
|
for (const chunk2 of chunks) {
|
|
2490
2439
|
if (chunk2) {
|
|
2491
2440
|
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
@@ -2534,7 +2483,7 @@ var AgentBuilder = class extends BaseResource {
|
|
|
2534
2483
|
async transform(chunk, controller) {
|
|
2535
2484
|
try {
|
|
2536
2485
|
const decoded = new TextDecoder().decode(chunk);
|
|
2537
|
-
const chunks = decoded.split(
|
|
2486
|
+
const chunks = decoded.split(RECORD_SEPARATOR2);
|
|
2538
2487
|
for (const chunk2 of chunks) {
|
|
2539
2488
|
if (chunk2) {
|
|
2540
2489
|
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
@@ -2705,6 +2654,31 @@ var Observability = class extends BaseResource {
|
|
|
2705
2654
|
const queryString = searchParams.toString();
|
|
2706
2655
|
return this.request(`/api/observability/traces${queryString ? `?${queryString}` : ""}`);
|
|
2707
2656
|
}
|
|
2657
|
+
/**
|
|
2658
|
+
* Retrieves scores by trace ID and span ID
|
|
2659
|
+
* @param params - Parameters containing trace ID, span ID, and pagination options
|
|
2660
|
+
* @returns Promise containing scores and pagination info
|
|
2661
|
+
*/
|
|
2662
|
+
getScoresBySpan(params) {
|
|
2663
|
+
const { traceId, spanId, page, perPage } = params;
|
|
2664
|
+
const searchParams = new URLSearchParams();
|
|
2665
|
+
if (page !== void 0) {
|
|
2666
|
+
searchParams.set("page", String(page));
|
|
2667
|
+
}
|
|
2668
|
+
if (perPage !== void 0) {
|
|
2669
|
+
searchParams.set("perPage", String(perPage));
|
|
2670
|
+
}
|
|
2671
|
+
const queryString = searchParams.toString();
|
|
2672
|
+
return this.request(
|
|
2673
|
+
`/api/observability/traces/${encodeURIComponent(traceId)}/${encodeURIComponent(spanId)}/scores${queryString ? `?${queryString}` : ""}`
|
|
2674
|
+
);
|
|
2675
|
+
}
|
|
2676
|
+
score(params) {
|
|
2677
|
+
return this.request(`/api/observability/traces/score`, {
|
|
2678
|
+
method: "POST",
|
|
2679
|
+
body: { ...params }
|
|
2680
|
+
});
|
|
2681
|
+
}
|
|
2708
2682
|
};
|
|
2709
2683
|
|
|
2710
2684
|
// src/resources/network-memory-thread.ts
|
|
@@ -2770,145 +2744,6 @@ var NetworkMemoryThread = class extends BaseResource {
|
|
|
2770
2744
|
}
|
|
2771
2745
|
};
|
|
2772
2746
|
|
|
2773
|
-
// src/resources/vNextNetwork.ts
|
|
2774
|
-
var RECORD_SEPARATOR4 = "";
|
|
2775
|
-
var VNextNetwork = class extends BaseResource {
|
|
2776
|
-
constructor(options, networkId) {
|
|
2777
|
-
super(options);
|
|
2778
|
-
this.networkId = networkId;
|
|
2779
|
-
}
|
|
2780
|
-
/**
|
|
2781
|
-
* Retrieves details about the network
|
|
2782
|
-
* @param runtimeContext - Optional runtime context to pass as query parameter
|
|
2783
|
-
* @returns Promise containing vNext network details
|
|
2784
|
-
*/
|
|
2785
|
-
details(runtimeContext) {
|
|
2786
|
-
return this.request(`/api/networks/v-next/${this.networkId}${runtimeContextQueryString(runtimeContext)}`);
|
|
2787
|
-
}
|
|
2788
|
-
/**
|
|
2789
|
-
* Generates a response from the v-next network
|
|
2790
|
-
* @param params - Generation parameters including message
|
|
2791
|
-
* @returns Promise containing the generated response
|
|
2792
|
-
*/
|
|
2793
|
-
generate(params) {
|
|
2794
|
-
return this.request(`/api/networks/v-next/${this.networkId}/generate`, {
|
|
2795
|
-
method: "POST",
|
|
2796
|
-
body: {
|
|
2797
|
-
...params,
|
|
2798
|
-
runtimeContext: parseClientRuntimeContext(params.runtimeContext)
|
|
2799
|
-
}
|
|
2800
|
-
});
|
|
2801
|
-
}
|
|
2802
|
-
/**
|
|
2803
|
-
* Generates a response from the v-next network using multiple primitives
|
|
2804
|
-
* @param params - Generation parameters including message
|
|
2805
|
-
* @returns Promise containing the generated response
|
|
2806
|
-
*/
|
|
2807
|
-
loop(params) {
|
|
2808
|
-
return this.request(`/api/networks/v-next/${this.networkId}/loop`, {
|
|
2809
|
-
method: "POST",
|
|
2810
|
-
body: {
|
|
2811
|
-
...params,
|
|
2812
|
-
runtimeContext: parseClientRuntimeContext(params.runtimeContext)
|
|
2813
|
-
}
|
|
2814
|
-
});
|
|
2815
|
-
}
|
|
2816
|
-
async *streamProcessor(stream) {
|
|
2817
|
-
const reader = stream.getReader();
|
|
2818
|
-
let doneReading = false;
|
|
2819
|
-
let buffer = "";
|
|
2820
|
-
try {
|
|
2821
|
-
while (!doneReading) {
|
|
2822
|
-
const { done, value } = await reader.read();
|
|
2823
|
-
doneReading = done;
|
|
2824
|
-
if (done && !value) continue;
|
|
2825
|
-
try {
|
|
2826
|
-
const decoded = value ? new TextDecoder().decode(value) : "";
|
|
2827
|
-
const chunks = (buffer + decoded).split(RECORD_SEPARATOR4);
|
|
2828
|
-
buffer = chunks.pop() || "";
|
|
2829
|
-
for (const chunk of chunks) {
|
|
2830
|
-
if (chunk) {
|
|
2831
|
-
if (typeof chunk === "string") {
|
|
2832
|
-
try {
|
|
2833
|
-
const parsedChunk = JSON.parse(chunk);
|
|
2834
|
-
yield parsedChunk;
|
|
2835
|
-
} catch {
|
|
2836
|
-
}
|
|
2837
|
-
}
|
|
2838
|
-
}
|
|
2839
|
-
}
|
|
2840
|
-
} catch {
|
|
2841
|
-
}
|
|
2842
|
-
}
|
|
2843
|
-
if (buffer) {
|
|
2844
|
-
try {
|
|
2845
|
-
yield JSON.parse(buffer);
|
|
2846
|
-
} catch {
|
|
2847
|
-
}
|
|
2848
|
-
}
|
|
2849
|
-
} finally {
|
|
2850
|
-
reader.cancel().catch(() => {
|
|
2851
|
-
});
|
|
2852
|
-
}
|
|
2853
|
-
}
|
|
2854
|
-
/**
|
|
2855
|
-
* Streams a response from the v-next network
|
|
2856
|
-
* @param params - Stream parameters including message
|
|
2857
|
-
* @returns Promise containing the results
|
|
2858
|
-
*/
|
|
2859
|
-
async stream(params, onRecord) {
|
|
2860
|
-
const response = await this.request(`/api/networks/v-next/${this.networkId}/stream`, {
|
|
2861
|
-
method: "POST",
|
|
2862
|
-
body: {
|
|
2863
|
-
...params,
|
|
2864
|
-
runtimeContext: parseClientRuntimeContext(params.runtimeContext)
|
|
2865
|
-
},
|
|
2866
|
-
stream: true
|
|
2867
|
-
});
|
|
2868
|
-
if (!response.ok) {
|
|
2869
|
-
throw new Error(`Failed to stream vNext network: ${response.statusText}`);
|
|
2870
|
-
}
|
|
2871
|
-
if (!response.body) {
|
|
2872
|
-
throw new Error("Response body is null");
|
|
2873
|
-
}
|
|
2874
|
-
for await (const record of this.streamProcessor(response.body)) {
|
|
2875
|
-
if (typeof record === "string") {
|
|
2876
|
-
onRecord(JSON.parse(record));
|
|
2877
|
-
} else {
|
|
2878
|
-
onRecord(record);
|
|
2879
|
-
}
|
|
2880
|
-
}
|
|
2881
|
-
}
|
|
2882
|
-
/**
|
|
2883
|
-
* Streams a response from the v-next network loop
|
|
2884
|
-
* @param params - Stream parameters including message
|
|
2885
|
-
* @returns Promise containing the results
|
|
2886
|
-
*/
|
|
2887
|
-
async loopStream(params, onRecord) {
|
|
2888
|
-
const response = await this.request(`/api/networks/v-next/${this.networkId}/loop-stream`, {
|
|
2889
|
-
method: "POST",
|
|
2890
|
-
body: {
|
|
2891
|
-
...params,
|
|
2892
|
-
runtimeContext: parseClientRuntimeContext(params.runtimeContext)
|
|
2893
|
-
},
|
|
2894
|
-
stream: true
|
|
2895
|
-
});
|
|
2896
|
-
if (!response.ok) {
|
|
2897
|
-
throw new Error(`Failed to stream vNext network loop: ${response.statusText}`);
|
|
2898
|
-
}
|
|
2899
|
-
if (!response.body) {
|
|
2900
|
-
throw new Error("Response body is null");
|
|
2901
|
-
}
|
|
2902
|
-
for await (const record of this.streamProcessor(response.body)) {
|
|
2903
|
-
if (typeof record === "string") {
|
|
2904
|
-
onRecord(JSON.parse(record));
|
|
2905
|
-
} else {
|
|
2906
|
-
onRecord(record);
|
|
2907
|
-
}
|
|
2908
|
-
}
|
|
2909
|
-
}
|
|
2910
|
-
};
|
|
2911
|
-
|
|
2912
2747
|
// src/client.ts
|
|
2913
2748
|
var MastraClient = class extends BaseResource {
|
|
2914
2749
|
observability;
|
|
@@ -2946,6 +2781,14 @@ var MastraClient = class extends BaseResource {
|
|
|
2946
2781
|
getMemoryThreads(params) {
|
|
2947
2782
|
return this.request(`/api/memory/threads?resourceid=${params.resourceId}&agentId=${params.agentId}`);
|
|
2948
2783
|
}
|
|
2784
|
+
/**
|
|
2785
|
+
* Retrieves memory config for a resource
|
|
2786
|
+
* @param params - Parameters containing the resource ID
|
|
2787
|
+
* @returns Promise containing array of memory threads
|
|
2788
|
+
*/
|
|
2789
|
+
getMemoryConfig(params) {
|
|
2790
|
+
return this.request(`/api/memory/config?agentId=${params.agentId}`);
|
|
2791
|
+
}
|
|
2949
2792
|
/**
|
|
2950
2793
|
* Creates a new memory thread
|
|
2951
2794
|
* @param params - Parameters for creating the memory thread
|
|
@@ -2962,6 +2805,24 @@ var MastraClient = class extends BaseResource {
|
|
|
2962
2805
|
getMemoryThread(threadId, agentId) {
|
|
2963
2806
|
return new MemoryThread(this.options, threadId, agentId);
|
|
2964
2807
|
}
|
|
2808
|
+
getThreadMessages(threadId, opts = {}) {
|
|
2809
|
+
let url = "";
|
|
2810
|
+
if (opts.agentId) {
|
|
2811
|
+
url = `/api/memory/threads/${threadId}/messages?agentId=${opts.agentId}`;
|
|
2812
|
+
} else if (opts.networkId) {
|
|
2813
|
+
url = `/api/memory/network/threads/${threadId}/messages?networkId=${opts.networkId}`;
|
|
2814
|
+
}
|
|
2815
|
+
return this.request(url);
|
|
2816
|
+
}
|
|
2817
|
+
deleteThread(threadId, opts = {}) {
|
|
2818
|
+
let url = "";
|
|
2819
|
+
if (opts.agentId) {
|
|
2820
|
+
url = `/api/memory/threads/${threadId}?agentId=${opts.agentId}`;
|
|
2821
|
+
} else if (opts.networkId) {
|
|
2822
|
+
url = `/api/memory/network/threads/${threadId}?networkId=${opts.networkId}`;
|
|
2823
|
+
}
|
|
2824
|
+
return this.request(url, { method: "DELETE" });
|
|
2825
|
+
}
|
|
2965
2826
|
/**
|
|
2966
2827
|
* Saves messages to memory
|
|
2967
2828
|
* @param params - Parameters containing messages to save
|
|
@@ -3044,21 +2905,6 @@ var MastraClient = class extends BaseResource {
|
|
|
3044
2905
|
getTool(toolId) {
|
|
3045
2906
|
return new Tool(this.options, toolId);
|
|
3046
2907
|
}
|
|
3047
|
-
/**
|
|
3048
|
-
* Retrieves all available legacy workflows
|
|
3049
|
-
* @returns Promise containing map of legacy workflow IDs to legacy workflow details
|
|
3050
|
-
*/
|
|
3051
|
-
getLegacyWorkflows() {
|
|
3052
|
-
return this.request("/api/workflows/legacy");
|
|
3053
|
-
}
|
|
3054
|
-
/**
|
|
3055
|
-
* Gets a legacy workflow instance by ID
|
|
3056
|
-
* @param workflowId - ID of the legacy workflow to retrieve
|
|
3057
|
-
* @returns Legacy Workflow instance
|
|
3058
|
-
*/
|
|
3059
|
-
getLegacyWorkflow(workflowId) {
|
|
3060
|
-
return new LegacyWorkflow(this.options, workflowId);
|
|
3061
|
-
}
|
|
3062
2908
|
/**
|
|
3063
2909
|
* Retrieves all available workflows
|
|
3064
2910
|
* @param runtimeContext - Optional runtime context to pass as query parameter
|
|
@@ -3239,21 +3085,6 @@ var MastraClient = class extends BaseResource {
|
|
|
3239
3085
|
return this.request(`/api/telemetry`);
|
|
3240
3086
|
}
|
|
3241
3087
|
}
|
|
3242
|
-
/**
|
|
3243
|
-
* Retrieves all available vNext networks
|
|
3244
|
-
* @returns Promise containing map of vNext network IDs to vNext network details
|
|
3245
|
-
*/
|
|
3246
|
-
getVNextNetworks() {
|
|
3247
|
-
return this.request("/api/networks/v-next");
|
|
3248
|
-
}
|
|
3249
|
-
/**
|
|
3250
|
-
* Gets a vNext network instance by ID
|
|
3251
|
-
* @param networkId - ID of the vNext network to retrieve
|
|
3252
|
-
* @returns vNext Network instance
|
|
3253
|
-
*/
|
|
3254
|
-
getVNextNetwork(networkId) {
|
|
3255
|
-
return new VNextNetwork(this.options, networkId);
|
|
3256
|
-
}
|
|
3257
3088
|
/**
|
|
3258
3089
|
* Retrieves a list of available MCP servers.
|
|
3259
3090
|
* @param params - Optional parameters for pagination (limit, offset).
|
|
@@ -3358,7 +3189,7 @@ var MastraClient = class extends BaseResource {
|
|
|
3358
3189
|
* @returns Promise containing the scorer
|
|
3359
3190
|
*/
|
|
3360
3191
|
getScorer(scorerId) {
|
|
3361
|
-
return this.request(`/api/scores/scorers/${scorerId}`);
|
|
3192
|
+
return this.request(`/api/scores/scorers/${encodeURIComponent(scorerId)}`);
|
|
3362
3193
|
}
|
|
3363
3194
|
getScoresByScorerId(params) {
|
|
3364
3195
|
const { page, perPage, scorerId, entityId, entityType } = params;
|
|
@@ -3376,7 +3207,7 @@ var MastraClient = class extends BaseResource {
|
|
|
3376
3207
|
searchParams.set("perPage", String(perPage));
|
|
3377
3208
|
}
|
|
3378
3209
|
const queryString = searchParams.toString();
|
|
3379
|
-
return this.request(`/api/scores/scorer/${scorerId}${queryString ? `?${queryString}` : ""}`);
|
|
3210
|
+
return this.request(`/api/scores/scorer/${encodeURIComponent(scorerId)}${queryString ? `?${queryString}` : ""}`);
|
|
3380
3211
|
}
|
|
3381
3212
|
/**
|
|
3382
3213
|
* Retrieves scores by run ID
|
|
@@ -3393,7 +3224,7 @@ var MastraClient = class extends BaseResource {
|
|
|
3393
3224
|
searchParams.set("perPage", String(perPage));
|
|
3394
3225
|
}
|
|
3395
3226
|
const queryString = searchParams.toString();
|
|
3396
|
-
return this.request(`/api/scores/run/${runId}${queryString ? `?${queryString}` : ""}`);
|
|
3227
|
+
return this.request(`/api/scores/run/${encodeURIComponent(runId)}${queryString ? `?${queryString}` : ""}`);
|
|
3397
3228
|
}
|
|
3398
3229
|
/**
|
|
3399
3230
|
* Retrieves scores by entity ID and type
|
|
@@ -3410,7 +3241,9 @@ var MastraClient = class extends BaseResource {
|
|
|
3410
3241
|
searchParams.set("perPage", String(perPage));
|
|
3411
3242
|
}
|
|
3412
3243
|
const queryString = searchParams.toString();
|
|
3413
|
-
return this.request(
|
|
3244
|
+
return this.request(
|
|
3245
|
+
`/api/scores/entity/${encodeURIComponent(entityType)}/${encodeURIComponent(entityId)}${queryString ? `?${queryString}` : ""}`
|
|
3246
|
+
);
|
|
3414
3247
|
}
|
|
3415
3248
|
/**
|
|
3416
3249
|
* Saves a score
|
|
@@ -3436,6 +3269,12 @@ var MastraClient = class extends BaseResource {
|
|
|
3436
3269
|
getAITraces(params) {
|
|
3437
3270
|
return this.observability.getTraces(params);
|
|
3438
3271
|
}
|
|
3272
|
+
getScoresBySpan(params) {
|
|
3273
|
+
return this.observability.getScoresBySpan(params);
|
|
3274
|
+
}
|
|
3275
|
+
score(params) {
|
|
3276
|
+
return this.observability.score(params);
|
|
3277
|
+
}
|
|
3439
3278
|
};
|
|
3440
3279
|
|
|
3441
3280
|
// src/tools.ts
|