@mastra/client-js 0.0.0-vector-sources-20250516175436 → 0.0.0-vector-query-tool-provider-options-20250828222356
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +18 -0
- package/CHANGELOG.md +1318 -2
- package/LICENSE.md +11 -42
- package/README.md +2 -1
- package/dist/adapters/agui.d.ts +23 -0
- package/dist/adapters/agui.d.ts.map +1 -0
- package/dist/client.d.ts +274 -0
- package/dist/client.d.ts.map +1 -0
- package/dist/example.d.ts +2 -0
- package/dist/example.d.ts.map +1 -0
- package/dist/index.cjs +1801 -137
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.ts +4 -883
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +1803 -139
- package/dist/index.js.map +1 -0
- package/dist/resources/a2a.d.ts +41 -0
- package/dist/resources/a2a.d.ts.map +1 -0
- package/dist/resources/agent.d.ts +130 -0
- package/dist/resources/agent.d.ts.map +1 -0
- package/dist/resources/base.d.ts +13 -0
- package/dist/resources/base.d.ts.map +1 -0
- package/dist/resources/index.d.ts +12 -0
- package/dist/resources/index.d.ts.map +1 -0
- package/dist/resources/legacy-workflow.d.ts +87 -0
- package/dist/resources/legacy-workflow.d.ts.map +1 -0
- package/dist/resources/mcp-tool.d.ts +27 -0
- package/dist/resources/mcp-tool.d.ts.map +1 -0
- package/dist/resources/memory-thread.d.ts +53 -0
- package/dist/resources/memory-thread.d.ts.map +1 -0
- package/dist/resources/network-memory-thread.d.ts +47 -0
- package/dist/resources/network-memory-thread.d.ts.map +1 -0
- package/dist/resources/network.d.ts +30 -0
- package/dist/resources/network.d.ts.map +1 -0
- package/dist/resources/observability.d.ts +19 -0
- package/dist/resources/observability.d.ts.map +1 -0
- package/dist/resources/tool.d.ts +23 -0
- package/dist/resources/tool.d.ts.map +1 -0
- package/dist/resources/vNextNetwork.d.ts +42 -0
- package/dist/resources/vNextNetwork.d.ts.map +1 -0
- package/dist/resources/vector.d.ts +48 -0
- package/dist/resources/vector.d.ts.map +1 -0
- package/dist/resources/workflow.d.ts +154 -0
- package/dist/resources/workflow.d.ts.map +1 -0
- package/dist/types.d.ts +449 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/utils/index.d.ts +3 -0
- package/dist/utils/index.d.ts.map +1 -0
- package/dist/utils/process-client-tools.d.ts +3 -0
- package/dist/utils/process-client-tools.d.ts.map +1 -0
- package/dist/utils/process-mastra-stream.d.ts +7 -0
- package/dist/utils/process-mastra-stream.d.ts.map +1 -0
- package/dist/utils/zod-to-json-schema.d.ts +3 -0
- package/dist/utils/zod-to-json-schema.d.ts.map +1 -0
- package/eslint.config.js +6 -1
- package/integration-tests/agui-adapter.test.ts +122 -0
- package/integration-tests/package.json +18 -0
- package/integration-tests/src/mastra/index.ts +35 -0
- package/integration-tests/vitest.config.ts +9 -0
- package/package.json +32 -19
- package/src/adapters/agui.test.ts +116 -3
- package/src/adapters/agui.ts +30 -12
- package/src/client.ts +333 -24
- package/src/example.ts +46 -15
- package/src/index.test.ts +429 -6
- package/src/index.ts +1 -0
- package/src/resources/a2a.ts +35 -25
- package/src/resources/agent.ts +1284 -20
- package/src/resources/base.ts +8 -1
- package/src/resources/index.ts +3 -2
- package/src/resources/{vnext-workflow.ts → legacy-workflow.ts} +124 -143
- package/src/resources/memory-thread.test.ts +285 -0
- package/src/resources/memory-thread.ts +37 -1
- package/src/resources/network-memory-thread.test.ts +269 -0
- package/src/resources/network-memory-thread.ts +81 -0
- package/src/resources/network.ts +7 -7
- package/src/resources/observability.ts +53 -0
- package/src/resources/tool.ts +4 -3
- package/src/resources/vNextNetwork.ts +194 -0
- package/src/resources/workflow.ts +255 -96
- package/src/types.ts +262 -36
- package/src/utils/index.ts +11 -0
- package/src/utils/process-client-tools.ts +32 -0
- package/src/utils/process-mastra-stream.test.ts +353 -0
- package/src/utils/process-mastra-stream.ts +49 -0
- package/src/utils/zod-to-json-schema.ts +23 -3
- package/src/v2-messages.test.ts +180 -0
- package/tsconfig.build.json +9 -0
- package/tsconfig.json +2 -2
- package/tsup.config.ts +17 -0
- package/dist/index.d.cts +0 -883
package/dist/index.js
CHANGED
|
@@ -1,7 +1,10 @@
|
|
|
1
1
|
import { AbstractAgent, EventType } from '@ag-ui/client';
|
|
2
2
|
import { Observable } from 'rxjs';
|
|
3
|
-
import { processDataStream } from '@ai-sdk/ui-utils';
|
|
4
|
-
import {
|
|
3
|
+
import { processDataStream, parsePartialJson } from '@ai-sdk/ui-utils';
|
|
4
|
+
import { v4 } from '@lukeed/uuid';
|
|
5
|
+
import { RuntimeContext } from '@mastra/core/runtime-context';
|
|
6
|
+
import { isVercelTool } from '@mastra/core/tools/is-vercel-tool';
|
|
7
|
+
import { z } from 'zod';
|
|
5
8
|
import originalZodToJsonSchema from 'zod-to-json-schema';
|
|
6
9
|
|
|
7
10
|
// src/adapters/agui.ts
|
|
@@ -142,6 +145,12 @@ function generateUUID() {
|
|
|
142
145
|
}
|
|
143
146
|
function convertMessagesToMastraMessages(messages) {
|
|
144
147
|
const result = [];
|
|
148
|
+
const toolCallsWithResults = /* @__PURE__ */ new Set();
|
|
149
|
+
for (const message of messages) {
|
|
150
|
+
if (message.role === "tool" && message.toolCallId) {
|
|
151
|
+
toolCallsWithResults.add(message.toolCallId);
|
|
152
|
+
}
|
|
153
|
+
}
|
|
145
154
|
for (const message of messages) {
|
|
146
155
|
if (message.role === "assistant") {
|
|
147
156
|
const parts = message.content ? [{ type: "text", text: message.content }] : [];
|
|
@@ -158,15 +167,22 @@ function convertMessagesToMastraMessages(messages) {
|
|
|
158
167
|
content: parts
|
|
159
168
|
});
|
|
160
169
|
if (message.toolCalls?.length) {
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
+
for (const toolCall of message.toolCalls) {
|
|
171
|
+
if (!toolCallsWithResults.has(toolCall.id)) {
|
|
172
|
+
result.push({
|
|
173
|
+
role: "tool",
|
|
174
|
+
content: [
|
|
175
|
+
{
|
|
176
|
+
type: "tool-result",
|
|
177
|
+
toolCallId: toolCall.id,
|
|
178
|
+
toolName: toolCall.function.name,
|
|
179
|
+
result: JSON.parse(toolCall.function.arguments)
|
|
180
|
+
// This is still wrong but matches test expectations
|
|
181
|
+
}
|
|
182
|
+
]
|
|
183
|
+
});
|
|
184
|
+
}
|
|
185
|
+
}
|
|
170
186
|
}
|
|
171
187
|
} else if (message.role === "user") {
|
|
172
188
|
result.push({
|
|
@@ -179,8 +195,9 @@ function convertMessagesToMastraMessages(messages) {
|
|
|
179
195
|
content: [
|
|
180
196
|
{
|
|
181
197
|
type: "tool-result",
|
|
182
|
-
toolCallId: message.toolCallId,
|
|
198
|
+
toolCallId: message.toolCallId || "unknown",
|
|
183
199
|
toolName: "unknown",
|
|
200
|
+
// toolName is not available in tool messages from CopilotKit
|
|
184
201
|
result: message.content
|
|
185
202
|
}
|
|
186
203
|
]
|
|
@@ -189,13 +206,94 @@ function convertMessagesToMastraMessages(messages) {
|
|
|
189
206
|
}
|
|
190
207
|
return result;
|
|
191
208
|
}
|
|
209
|
+
function parseClientRuntimeContext(runtimeContext) {
|
|
210
|
+
if (runtimeContext) {
|
|
211
|
+
if (runtimeContext instanceof RuntimeContext) {
|
|
212
|
+
return Object.fromEntries(runtimeContext.entries());
|
|
213
|
+
}
|
|
214
|
+
return runtimeContext;
|
|
215
|
+
}
|
|
216
|
+
return void 0;
|
|
217
|
+
}
|
|
218
|
+
function isZodType(value) {
|
|
219
|
+
return typeof value === "object" && value !== null && "_def" in value && "parse" in value && typeof value.parse === "function" && "safeParse" in value && typeof value.safeParse === "function";
|
|
220
|
+
}
|
|
192
221
|
function zodToJsonSchema(zodSchema) {
|
|
193
|
-
if (!(zodSchema
|
|
222
|
+
if (!isZodType(zodSchema)) {
|
|
194
223
|
return zodSchema;
|
|
195
224
|
}
|
|
225
|
+
if ("toJSONSchema" in z) {
|
|
226
|
+
const fn = "toJSONSchema";
|
|
227
|
+
return z[fn].call(z, zodSchema);
|
|
228
|
+
}
|
|
196
229
|
return originalZodToJsonSchema(zodSchema, { $refStrategy: "none" });
|
|
197
230
|
}
|
|
198
231
|
|
|
232
|
+
// src/utils/process-client-tools.ts
|
|
233
|
+
function processClientTools(clientTools) {
|
|
234
|
+
if (!clientTools) {
|
|
235
|
+
return void 0;
|
|
236
|
+
}
|
|
237
|
+
return Object.fromEntries(
|
|
238
|
+
Object.entries(clientTools).map(([key, value]) => {
|
|
239
|
+
if (isVercelTool(value)) {
|
|
240
|
+
return [
|
|
241
|
+
key,
|
|
242
|
+
{
|
|
243
|
+
...value,
|
|
244
|
+
parameters: value.parameters ? zodToJsonSchema(value.parameters) : void 0
|
|
245
|
+
}
|
|
246
|
+
];
|
|
247
|
+
} else {
|
|
248
|
+
return [
|
|
249
|
+
key,
|
|
250
|
+
{
|
|
251
|
+
...value,
|
|
252
|
+
inputSchema: value.inputSchema ? zodToJsonSchema(value.inputSchema) : void 0,
|
|
253
|
+
outputSchema: value.outputSchema ? zodToJsonSchema(value.outputSchema) : void 0
|
|
254
|
+
}
|
|
255
|
+
];
|
|
256
|
+
}
|
|
257
|
+
})
|
|
258
|
+
);
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
// src/utils/process-mastra-stream.ts
|
|
262
|
+
async function processMastraStream({
|
|
263
|
+
stream,
|
|
264
|
+
onChunk
|
|
265
|
+
}) {
|
|
266
|
+
const reader = stream.getReader();
|
|
267
|
+
const decoder = new TextDecoder();
|
|
268
|
+
let buffer = "";
|
|
269
|
+
try {
|
|
270
|
+
while (true) {
|
|
271
|
+
const { done, value } = await reader.read();
|
|
272
|
+
if (done) break;
|
|
273
|
+
buffer += decoder.decode(value, { stream: true });
|
|
274
|
+
const lines = buffer.split("\n\n");
|
|
275
|
+
buffer = lines.pop() || "";
|
|
276
|
+
for (const line of lines) {
|
|
277
|
+
if (line.startsWith("data: ")) {
|
|
278
|
+
const data = line.slice(6);
|
|
279
|
+
if (data === "[DONE]") {
|
|
280
|
+
console.log("\u{1F3C1} Stream finished");
|
|
281
|
+
return;
|
|
282
|
+
}
|
|
283
|
+
try {
|
|
284
|
+
const json = JSON.parse(data);
|
|
285
|
+
await onChunk(json);
|
|
286
|
+
} catch (error) {
|
|
287
|
+
console.error("\u274C JSON parse error:", error, "Data:", data);
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
} finally {
|
|
293
|
+
reader.releaseLock();
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
|
|
199
297
|
// src/resources/base.ts
|
|
200
298
|
var BaseResource = class {
|
|
201
299
|
options;
|
|
@@ -210,18 +308,21 @@ var BaseResource = class {
|
|
|
210
308
|
*/
|
|
211
309
|
async request(path, options = {}) {
|
|
212
310
|
let lastError = null;
|
|
213
|
-
const { baseUrl, retries = 3, backoffMs = 100, maxBackoffMs = 1e3, headers = {} } = this.options;
|
|
311
|
+
const { baseUrl, retries = 3, backoffMs = 100, maxBackoffMs = 1e3, headers = {}, credentials } = this.options;
|
|
214
312
|
let delay = backoffMs;
|
|
215
313
|
for (let attempt = 0; attempt <= retries; attempt++) {
|
|
216
314
|
try {
|
|
217
315
|
const response = await fetch(`${baseUrl.replace(/\/$/, "")}${path}`, {
|
|
218
316
|
...options,
|
|
219
317
|
headers: {
|
|
318
|
+
...options.body && !(options.body instanceof FormData) && (options.method === "POST" || options.method === "PUT") ? { "content-type": "application/json" } : {},
|
|
220
319
|
...headers,
|
|
221
320
|
...options.headers
|
|
222
321
|
// TODO: Bring this back once we figure out what we/users need to do to make this work with cross-origin requests
|
|
223
322
|
// 'x-mastra-client-type': 'js',
|
|
224
323
|
},
|
|
324
|
+
signal: this.options.abortSignal,
|
|
325
|
+
credentials: options.credentials ?? credentials,
|
|
225
326
|
body: options.body instanceof FormData ? options.body : options.body ? JSON.stringify(options.body) : void 0
|
|
226
327
|
});
|
|
227
328
|
if (!response.ok) {
|
|
@@ -256,6 +357,63 @@ var BaseResource = class {
|
|
|
256
357
|
};
|
|
257
358
|
|
|
258
359
|
// src/resources/agent.ts
|
|
360
|
+
async function executeToolCallAndRespond({
|
|
361
|
+
response,
|
|
362
|
+
params,
|
|
363
|
+
runId,
|
|
364
|
+
resourceId,
|
|
365
|
+
threadId,
|
|
366
|
+
runtimeContext,
|
|
367
|
+
respondFn
|
|
368
|
+
}) {
|
|
369
|
+
if (response.finishReason === "tool-calls") {
|
|
370
|
+
const toolCalls = response.toolCalls;
|
|
371
|
+
if (!toolCalls || !Array.isArray(toolCalls)) {
|
|
372
|
+
return response;
|
|
373
|
+
}
|
|
374
|
+
for (const toolCall of toolCalls) {
|
|
375
|
+
const clientTool = params.clientTools?.[toolCall.toolName];
|
|
376
|
+
if (clientTool && clientTool.execute) {
|
|
377
|
+
const result = await clientTool.execute(
|
|
378
|
+
{
|
|
379
|
+
context: toolCall?.args,
|
|
380
|
+
runId,
|
|
381
|
+
resourceId,
|
|
382
|
+
threadId,
|
|
383
|
+
runtimeContext,
|
|
384
|
+
tracingContext: { currentSpan: void 0 }
|
|
385
|
+
},
|
|
386
|
+
{
|
|
387
|
+
messages: response.messages,
|
|
388
|
+
toolCallId: toolCall?.toolCallId
|
|
389
|
+
}
|
|
390
|
+
);
|
|
391
|
+
const updatedMessages = [
|
|
392
|
+
{
|
|
393
|
+
role: "user",
|
|
394
|
+
content: params.messages
|
|
395
|
+
},
|
|
396
|
+
...response.response.messages,
|
|
397
|
+
{
|
|
398
|
+
role: "tool",
|
|
399
|
+
content: [
|
|
400
|
+
{
|
|
401
|
+
type: "tool-result",
|
|
402
|
+
toolCallId: toolCall.toolCallId,
|
|
403
|
+
toolName: toolCall.toolName,
|
|
404
|
+
result
|
|
405
|
+
}
|
|
406
|
+
]
|
|
407
|
+
}
|
|
408
|
+
];
|
|
409
|
+
return respondFn({
|
|
410
|
+
...params,
|
|
411
|
+
messages: updatedMessages
|
|
412
|
+
});
|
|
413
|
+
}
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
}
|
|
259
417
|
var AgentVoice = class extends BaseResource {
|
|
260
418
|
constructor(options, agentId) {
|
|
261
419
|
super(options);
|
|
@@ -302,6 +460,13 @@ var AgentVoice = class extends BaseResource {
|
|
|
302
460
|
getSpeakers() {
|
|
303
461
|
return this.request(`/api/agents/${this.agentId}/voice/speakers`);
|
|
304
462
|
}
|
|
463
|
+
/**
|
|
464
|
+
* Get the listener configuration for the agent's voice provider
|
|
465
|
+
* @returns Promise containing a check if the agent has listening capabilities
|
|
466
|
+
*/
|
|
467
|
+
getListener() {
|
|
468
|
+
return this.request(`/api/agents/${this.agentId}/voice/listener`);
|
|
469
|
+
}
|
|
305
470
|
};
|
|
306
471
|
var Agent = class extends BaseResource {
|
|
307
472
|
constructor(options, agentId) {
|
|
@@ -317,22 +482,360 @@ var Agent = class extends BaseResource {
|
|
|
317
482
|
details() {
|
|
318
483
|
return this.request(`/api/agents/${this.agentId}`);
|
|
319
484
|
}
|
|
320
|
-
|
|
321
|
-
* Generates a response from the agent
|
|
322
|
-
* @param params - Generation parameters including prompt
|
|
323
|
-
* @returns Promise containing the generated response
|
|
324
|
-
*/
|
|
325
|
-
generate(params) {
|
|
485
|
+
async generate(params) {
|
|
326
486
|
const processedParams = {
|
|
327
487
|
...params,
|
|
328
488
|
output: params.output ? zodToJsonSchema(params.output) : void 0,
|
|
329
489
|
experimental_output: params.experimental_output ? zodToJsonSchema(params.experimental_output) : void 0,
|
|
330
|
-
runtimeContext:
|
|
490
|
+
runtimeContext: parseClientRuntimeContext(params.runtimeContext),
|
|
491
|
+
clientTools: processClientTools(params.clientTools)
|
|
331
492
|
};
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
493
|
+
const { runId, resourceId, threadId, runtimeContext } = processedParams;
|
|
494
|
+
const response = await this.request(
|
|
495
|
+
`/api/agents/${this.agentId}/generate`,
|
|
496
|
+
{
|
|
497
|
+
method: "POST",
|
|
498
|
+
body: processedParams
|
|
499
|
+
}
|
|
500
|
+
);
|
|
501
|
+
if (response.finishReason === "tool-calls") {
|
|
502
|
+
const toolCalls = response.toolCalls;
|
|
503
|
+
if (!toolCalls || !Array.isArray(toolCalls)) {
|
|
504
|
+
return response;
|
|
505
|
+
}
|
|
506
|
+
for (const toolCall of toolCalls) {
|
|
507
|
+
const clientTool = params.clientTools?.[toolCall.toolName];
|
|
508
|
+
if (clientTool && clientTool.execute) {
|
|
509
|
+
const result = await clientTool.execute(
|
|
510
|
+
{
|
|
511
|
+
context: toolCall?.args,
|
|
512
|
+
runId,
|
|
513
|
+
resourceId,
|
|
514
|
+
threadId,
|
|
515
|
+
runtimeContext,
|
|
516
|
+
tracingContext: { currentSpan: void 0 }
|
|
517
|
+
},
|
|
518
|
+
{
|
|
519
|
+
messages: response.messages,
|
|
520
|
+
toolCallId: toolCall?.toolCallId
|
|
521
|
+
}
|
|
522
|
+
);
|
|
523
|
+
const updatedMessages = [
|
|
524
|
+
{
|
|
525
|
+
role: "user",
|
|
526
|
+
content: params.messages
|
|
527
|
+
},
|
|
528
|
+
...response.response.messages,
|
|
529
|
+
{
|
|
530
|
+
role: "tool",
|
|
531
|
+
content: [
|
|
532
|
+
{
|
|
533
|
+
type: "tool-result",
|
|
534
|
+
toolCallId: toolCall.toolCallId,
|
|
535
|
+
toolName: toolCall.toolName,
|
|
536
|
+
result
|
|
537
|
+
}
|
|
538
|
+
]
|
|
539
|
+
}
|
|
540
|
+
];
|
|
541
|
+
return this.generate({
|
|
542
|
+
...params,
|
|
543
|
+
messages: updatedMessages
|
|
544
|
+
});
|
|
545
|
+
}
|
|
546
|
+
}
|
|
547
|
+
}
|
|
548
|
+
return response;
|
|
549
|
+
}
|
|
550
|
+
async generateVNext(params) {
|
|
551
|
+
const processedParams = {
|
|
552
|
+
...params,
|
|
553
|
+
output: params.output ? zodToJsonSchema(params.output) : void 0,
|
|
554
|
+
runtimeContext: parseClientRuntimeContext(params.runtimeContext),
|
|
555
|
+
clientTools: processClientTools(params.clientTools)
|
|
556
|
+
};
|
|
557
|
+
const { runId, resourceId, threadId, runtimeContext } = processedParams;
|
|
558
|
+
const response = await this.request(
|
|
559
|
+
`/api/agents/${this.agentId}/generate/vnext`,
|
|
560
|
+
{
|
|
561
|
+
method: "POST",
|
|
562
|
+
body: processedParams
|
|
563
|
+
}
|
|
564
|
+
);
|
|
565
|
+
if (response.finishReason === "tool-calls") {
|
|
566
|
+
return executeToolCallAndRespond({
|
|
567
|
+
response,
|
|
568
|
+
params,
|
|
569
|
+
runId,
|
|
570
|
+
resourceId,
|
|
571
|
+
threadId,
|
|
572
|
+
runtimeContext,
|
|
573
|
+
respondFn: this.generateVNext.bind(this)
|
|
574
|
+
});
|
|
575
|
+
}
|
|
576
|
+
return response;
|
|
577
|
+
}
|
|
578
|
+
async processChatResponse({
|
|
579
|
+
stream,
|
|
580
|
+
update,
|
|
581
|
+
onToolCall,
|
|
582
|
+
onFinish,
|
|
583
|
+
getCurrentDate = () => /* @__PURE__ */ new Date(),
|
|
584
|
+
lastMessage
|
|
585
|
+
}) {
|
|
586
|
+
const replaceLastMessage = lastMessage?.role === "assistant";
|
|
587
|
+
let step = replaceLastMessage ? 1 + // find max step in existing tool invocations:
|
|
588
|
+
(lastMessage.toolInvocations?.reduce((max, toolInvocation) => {
|
|
589
|
+
return Math.max(max, toolInvocation.step ?? 0);
|
|
590
|
+
}, 0) ?? 0) : 0;
|
|
591
|
+
const message = replaceLastMessage ? structuredClone(lastMessage) : {
|
|
592
|
+
id: v4(),
|
|
593
|
+
createdAt: getCurrentDate(),
|
|
594
|
+
role: "assistant",
|
|
595
|
+
content: "",
|
|
596
|
+
parts: []
|
|
597
|
+
};
|
|
598
|
+
let currentTextPart = void 0;
|
|
599
|
+
let currentReasoningPart = void 0;
|
|
600
|
+
let currentReasoningTextDetail = void 0;
|
|
601
|
+
function updateToolInvocationPart(toolCallId, invocation) {
|
|
602
|
+
const part = message.parts.find(
|
|
603
|
+
(part2) => part2.type === "tool-invocation" && part2.toolInvocation.toolCallId === toolCallId
|
|
604
|
+
);
|
|
605
|
+
if (part != null) {
|
|
606
|
+
part.toolInvocation = invocation;
|
|
607
|
+
} else {
|
|
608
|
+
message.parts.push({
|
|
609
|
+
type: "tool-invocation",
|
|
610
|
+
toolInvocation: invocation
|
|
611
|
+
});
|
|
612
|
+
}
|
|
613
|
+
}
|
|
614
|
+
const data = [];
|
|
615
|
+
let messageAnnotations = replaceLastMessage ? lastMessage?.annotations : void 0;
|
|
616
|
+
const partialToolCalls = {};
|
|
617
|
+
let usage = {
|
|
618
|
+
completionTokens: NaN,
|
|
619
|
+
promptTokens: NaN,
|
|
620
|
+
totalTokens: NaN
|
|
621
|
+
};
|
|
622
|
+
let finishReason = "unknown";
|
|
623
|
+
function execUpdate() {
|
|
624
|
+
const copiedData = [...data];
|
|
625
|
+
if (messageAnnotations?.length) {
|
|
626
|
+
message.annotations = messageAnnotations;
|
|
627
|
+
}
|
|
628
|
+
const copiedMessage = {
|
|
629
|
+
// deep copy the message to ensure that deep changes (msg attachments) are updated
|
|
630
|
+
// with SolidJS. SolidJS uses referential integration of sub-objects to detect changes.
|
|
631
|
+
...structuredClone(message),
|
|
632
|
+
// add a revision id to ensure that the message is updated with SWR. SWR uses a
|
|
633
|
+
// hashing approach by default to detect changes, but it only works for shallow
|
|
634
|
+
// changes. This is why we need to add a revision id to ensure that the message
|
|
635
|
+
// is updated with SWR (without it, the changes get stuck in SWR and are not
|
|
636
|
+
// forwarded to rendering):
|
|
637
|
+
revisionId: v4()
|
|
638
|
+
};
|
|
639
|
+
update({
|
|
640
|
+
message: copiedMessage,
|
|
641
|
+
data: copiedData,
|
|
642
|
+
replaceLastMessage
|
|
643
|
+
});
|
|
644
|
+
}
|
|
645
|
+
await processDataStream({
|
|
646
|
+
stream,
|
|
647
|
+
onTextPart(value) {
|
|
648
|
+
if (currentTextPart == null) {
|
|
649
|
+
currentTextPart = {
|
|
650
|
+
type: "text",
|
|
651
|
+
text: value
|
|
652
|
+
};
|
|
653
|
+
message.parts.push(currentTextPart);
|
|
654
|
+
} else {
|
|
655
|
+
currentTextPart.text += value;
|
|
656
|
+
}
|
|
657
|
+
message.content += value;
|
|
658
|
+
execUpdate();
|
|
659
|
+
},
|
|
660
|
+
onReasoningPart(value) {
|
|
661
|
+
if (currentReasoningTextDetail == null) {
|
|
662
|
+
currentReasoningTextDetail = { type: "text", text: value };
|
|
663
|
+
if (currentReasoningPart != null) {
|
|
664
|
+
currentReasoningPart.details.push(currentReasoningTextDetail);
|
|
665
|
+
}
|
|
666
|
+
} else {
|
|
667
|
+
currentReasoningTextDetail.text += value;
|
|
668
|
+
}
|
|
669
|
+
if (currentReasoningPart == null) {
|
|
670
|
+
currentReasoningPart = {
|
|
671
|
+
type: "reasoning",
|
|
672
|
+
reasoning: value,
|
|
673
|
+
details: [currentReasoningTextDetail]
|
|
674
|
+
};
|
|
675
|
+
message.parts.push(currentReasoningPart);
|
|
676
|
+
} else {
|
|
677
|
+
currentReasoningPart.reasoning += value;
|
|
678
|
+
}
|
|
679
|
+
message.reasoning = (message.reasoning ?? "") + value;
|
|
680
|
+
execUpdate();
|
|
681
|
+
},
|
|
682
|
+
onReasoningSignaturePart(value) {
|
|
683
|
+
if (currentReasoningTextDetail != null) {
|
|
684
|
+
currentReasoningTextDetail.signature = value.signature;
|
|
685
|
+
}
|
|
686
|
+
},
|
|
687
|
+
onRedactedReasoningPart(value) {
|
|
688
|
+
if (currentReasoningPart == null) {
|
|
689
|
+
currentReasoningPart = {
|
|
690
|
+
type: "reasoning",
|
|
691
|
+
reasoning: "",
|
|
692
|
+
details: []
|
|
693
|
+
};
|
|
694
|
+
message.parts.push(currentReasoningPart);
|
|
695
|
+
}
|
|
696
|
+
currentReasoningPart.details.push({
|
|
697
|
+
type: "redacted",
|
|
698
|
+
data: value.data
|
|
699
|
+
});
|
|
700
|
+
currentReasoningTextDetail = void 0;
|
|
701
|
+
execUpdate();
|
|
702
|
+
},
|
|
703
|
+
onFilePart(value) {
|
|
704
|
+
message.parts.push({
|
|
705
|
+
type: "file",
|
|
706
|
+
mimeType: value.mimeType,
|
|
707
|
+
data: value.data
|
|
708
|
+
});
|
|
709
|
+
execUpdate();
|
|
710
|
+
},
|
|
711
|
+
onSourcePart(value) {
|
|
712
|
+
message.parts.push({
|
|
713
|
+
type: "source",
|
|
714
|
+
source: value
|
|
715
|
+
});
|
|
716
|
+
execUpdate();
|
|
717
|
+
},
|
|
718
|
+
onToolCallStreamingStartPart(value) {
|
|
719
|
+
if (message.toolInvocations == null) {
|
|
720
|
+
message.toolInvocations = [];
|
|
721
|
+
}
|
|
722
|
+
partialToolCalls[value.toolCallId] = {
|
|
723
|
+
text: "",
|
|
724
|
+
step,
|
|
725
|
+
toolName: value.toolName,
|
|
726
|
+
index: message.toolInvocations.length
|
|
727
|
+
};
|
|
728
|
+
const invocation = {
|
|
729
|
+
state: "partial-call",
|
|
730
|
+
step,
|
|
731
|
+
toolCallId: value.toolCallId,
|
|
732
|
+
toolName: value.toolName,
|
|
733
|
+
args: void 0
|
|
734
|
+
};
|
|
735
|
+
message.toolInvocations.push(invocation);
|
|
736
|
+
updateToolInvocationPart(value.toolCallId, invocation);
|
|
737
|
+
execUpdate();
|
|
738
|
+
},
|
|
739
|
+
onToolCallDeltaPart(value) {
|
|
740
|
+
const partialToolCall = partialToolCalls[value.toolCallId];
|
|
741
|
+
partialToolCall.text += value.argsTextDelta;
|
|
742
|
+
const { value: partialArgs } = parsePartialJson(partialToolCall.text);
|
|
743
|
+
const invocation = {
|
|
744
|
+
state: "partial-call",
|
|
745
|
+
step: partialToolCall.step,
|
|
746
|
+
toolCallId: value.toolCallId,
|
|
747
|
+
toolName: partialToolCall.toolName,
|
|
748
|
+
args: partialArgs
|
|
749
|
+
};
|
|
750
|
+
message.toolInvocations[partialToolCall.index] = invocation;
|
|
751
|
+
updateToolInvocationPart(value.toolCallId, invocation);
|
|
752
|
+
execUpdate();
|
|
753
|
+
},
|
|
754
|
+
async onToolCallPart(value) {
|
|
755
|
+
const invocation = {
|
|
756
|
+
state: "call",
|
|
757
|
+
step,
|
|
758
|
+
...value
|
|
759
|
+
};
|
|
760
|
+
if (partialToolCalls[value.toolCallId] != null) {
|
|
761
|
+
message.toolInvocations[partialToolCalls[value.toolCallId].index] = invocation;
|
|
762
|
+
} else {
|
|
763
|
+
if (message.toolInvocations == null) {
|
|
764
|
+
message.toolInvocations = [];
|
|
765
|
+
}
|
|
766
|
+
message.toolInvocations.push(invocation);
|
|
767
|
+
}
|
|
768
|
+
updateToolInvocationPart(value.toolCallId, invocation);
|
|
769
|
+
execUpdate();
|
|
770
|
+
if (onToolCall) {
|
|
771
|
+
const result = await onToolCall({ toolCall: value });
|
|
772
|
+
if (result != null) {
|
|
773
|
+
const invocation2 = {
|
|
774
|
+
state: "result",
|
|
775
|
+
step,
|
|
776
|
+
...value,
|
|
777
|
+
result
|
|
778
|
+
};
|
|
779
|
+
message.toolInvocations[message.toolInvocations.length - 1] = invocation2;
|
|
780
|
+
updateToolInvocationPart(value.toolCallId, invocation2);
|
|
781
|
+
execUpdate();
|
|
782
|
+
}
|
|
783
|
+
}
|
|
784
|
+
},
|
|
785
|
+
onToolResultPart(value) {
|
|
786
|
+
const toolInvocations = message.toolInvocations;
|
|
787
|
+
if (toolInvocations == null) {
|
|
788
|
+
throw new Error("tool_result must be preceded by a tool_call");
|
|
789
|
+
}
|
|
790
|
+
const toolInvocationIndex = toolInvocations.findIndex((invocation2) => invocation2.toolCallId === value.toolCallId);
|
|
791
|
+
if (toolInvocationIndex === -1) {
|
|
792
|
+
throw new Error("tool_result must be preceded by a tool_call with the same toolCallId");
|
|
793
|
+
}
|
|
794
|
+
const invocation = {
|
|
795
|
+
...toolInvocations[toolInvocationIndex],
|
|
796
|
+
state: "result",
|
|
797
|
+
...value
|
|
798
|
+
};
|
|
799
|
+
toolInvocations[toolInvocationIndex] = invocation;
|
|
800
|
+
updateToolInvocationPart(value.toolCallId, invocation);
|
|
801
|
+
execUpdate();
|
|
802
|
+
},
|
|
803
|
+
onDataPart(value) {
|
|
804
|
+
data.push(...value);
|
|
805
|
+
execUpdate();
|
|
806
|
+
},
|
|
807
|
+
onMessageAnnotationsPart(value) {
|
|
808
|
+
if (messageAnnotations == null) {
|
|
809
|
+
messageAnnotations = [...value];
|
|
810
|
+
} else {
|
|
811
|
+
messageAnnotations.push(...value);
|
|
812
|
+
}
|
|
813
|
+
execUpdate();
|
|
814
|
+
},
|
|
815
|
+
onFinishStepPart(value) {
|
|
816
|
+
step += 1;
|
|
817
|
+
currentTextPart = value.isContinued ? currentTextPart : void 0;
|
|
818
|
+
currentReasoningPart = void 0;
|
|
819
|
+
currentReasoningTextDetail = void 0;
|
|
820
|
+
},
|
|
821
|
+
onStartStepPart(value) {
|
|
822
|
+
if (!replaceLastMessage) {
|
|
823
|
+
message.id = value.messageId;
|
|
824
|
+
}
|
|
825
|
+
message.parts.push({ type: "step-start" });
|
|
826
|
+
execUpdate();
|
|
827
|
+
},
|
|
828
|
+
onFinishMessagePart(value) {
|
|
829
|
+
finishReason = value.finishReason;
|
|
830
|
+
if (value.usage != null) {
|
|
831
|
+
usage = value.usage;
|
|
832
|
+
}
|
|
833
|
+
},
|
|
834
|
+
onErrorPart(error) {
|
|
835
|
+
throw new Error(error);
|
|
836
|
+
}
|
|
335
837
|
});
|
|
838
|
+
onFinish?.({ message, finishReason, usage });
|
|
336
839
|
}
|
|
337
840
|
/**
|
|
338
841
|
* Streams a response from the agent
|
|
@@ -344,9 +847,279 @@ var Agent = class extends BaseResource {
|
|
|
344
847
|
...params,
|
|
345
848
|
output: params.output ? zodToJsonSchema(params.output) : void 0,
|
|
346
849
|
experimental_output: params.experimental_output ? zodToJsonSchema(params.experimental_output) : void 0,
|
|
347
|
-
runtimeContext:
|
|
850
|
+
runtimeContext: parseClientRuntimeContext(params.runtimeContext),
|
|
851
|
+
clientTools: processClientTools(params.clientTools)
|
|
348
852
|
};
|
|
349
|
-
const
|
|
853
|
+
const { readable, writable } = new TransformStream();
|
|
854
|
+
const response = await this.processStreamResponse(processedParams, writable);
|
|
855
|
+
const streamResponse = new Response(readable, {
|
|
856
|
+
status: response.status,
|
|
857
|
+
statusText: response.statusText,
|
|
858
|
+
headers: response.headers
|
|
859
|
+
});
|
|
860
|
+
streamResponse.processDataStream = async (options = {}) => {
|
|
861
|
+
await processDataStream({
|
|
862
|
+
stream: streamResponse.body,
|
|
863
|
+
...options
|
|
864
|
+
});
|
|
865
|
+
};
|
|
866
|
+
return streamResponse;
|
|
867
|
+
}
|
|
868
|
+
async processChatResponse_vNext({
|
|
869
|
+
stream,
|
|
870
|
+
update,
|
|
871
|
+
onToolCall,
|
|
872
|
+
onFinish,
|
|
873
|
+
getCurrentDate = () => /* @__PURE__ */ new Date(),
|
|
874
|
+
lastMessage
|
|
875
|
+
}) {
|
|
876
|
+
const replaceLastMessage = lastMessage?.role === "assistant";
|
|
877
|
+
let step = replaceLastMessage ? 1 + // find max step in existing tool invocations:
|
|
878
|
+
(lastMessage.toolInvocations?.reduce((max, toolInvocation) => {
|
|
879
|
+
return Math.max(max, toolInvocation.step ?? 0);
|
|
880
|
+
}, 0) ?? 0) : 0;
|
|
881
|
+
const message = replaceLastMessage ? structuredClone(lastMessage) : {
|
|
882
|
+
id: v4(),
|
|
883
|
+
createdAt: getCurrentDate(),
|
|
884
|
+
role: "assistant",
|
|
885
|
+
content: "",
|
|
886
|
+
parts: []
|
|
887
|
+
};
|
|
888
|
+
let currentTextPart = void 0;
|
|
889
|
+
let currentReasoningPart = void 0;
|
|
890
|
+
let currentReasoningTextDetail = void 0;
|
|
891
|
+
function updateToolInvocationPart(toolCallId, invocation) {
|
|
892
|
+
const part = message.parts.find(
|
|
893
|
+
(part2) => part2.type === "tool-invocation" && part2.toolInvocation.toolCallId === toolCallId
|
|
894
|
+
);
|
|
895
|
+
if (part != null) {
|
|
896
|
+
part.toolInvocation = invocation;
|
|
897
|
+
} else {
|
|
898
|
+
message.parts.push({
|
|
899
|
+
type: "tool-invocation",
|
|
900
|
+
toolInvocation: invocation
|
|
901
|
+
});
|
|
902
|
+
}
|
|
903
|
+
}
|
|
904
|
+
const data = [];
|
|
905
|
+
let messageAnnotations = replaceLastMessage ? lastMessage?.annotations : void 0;
|
|
906
|
+
const partialToolCalls = {};
|
|
907
|
+
let usage = {
|
|
908
|
+
completionTokens: NaN,
|
|
909
|
+
promptTokens: NaN,
|
|
910
|
+
totalTokens: NaN
|
|
911
|
+
};
|
|
912
|
+
let finishReason = "unknown";
|
|
913
|
+
function execUpdate() {
|
|
914
|
+
const copiedData = [...data];
|
|
915
|
+
if (messageAnnotations?.length) {
|
|
916
|
+
message.annotations = messageAnnotations;
|
|
917
|
+
}
|
|
918
|
+
const copiedMessage = {
|
|
919
|
+
// deep copy the message to ensure that deep changes (msg attachments) are updated
|
|
920
|
+
// with SolidJS. SolidJS uses referential integration of sub-objects to detect changes.
|
|
921
|
+
...structuredClone(message),
|
|
922
|
+
// add a revision id to ensure that the message is updated with SWR. SWR uses a
|
|
923
|
+
// hashing approach by default to detect changes, but it only works for shallow
|
|
924
|
+
// changes. This is why we need to add a revision id to ensure that the message
|
|
925
|
+
// is updated with SWR (without it, the changes get stuck in SWR and are not
|
|
926
|
+
// forwarded to rendering):
|
|
927
|
+
revisionId: v4()
|
|
928
|
+
};
|
|
929
|
+
update({
|
|
930
|
+
message: copiedMessage,
|
|
931
|
+
data: copiedData,
|
|
932
|
+
replaceLastMessage
|
|
933
|
+
});
|
|
934
|
+
}
|
|
935
|
+
await processMastraStream({
|
|
936
|
+
stream,
|
|
937
|
+
// TODO: casting as any here because the stream types were all typed as any before in core.
|
|
938
|
+
// but this is completely wrong and this fn is probably broken. Remove ":any" and you'll see a bunch of type errors
|
|
939
|
+
onChunk: async (chunk) => {
|
|
940
|
+
switch (chunk.type) {
|
|
941
|
+
case "step-start": {
|
|
942
|
+
if (!replaceLastMessage) {
|
|
943
|
+
message.id = chunk.payload.messageId;
|
|
944
|
+
}
|
|
945
|
+
message.parts.push({ type: "step-start" });
|
|
946
|
+
execUpdate();
|
|
947
|
+
break;
|
|
948
|
+
}
|
|
949
|
+
case "text-delta": {
|
|
950
|
+
if (currentTextPart == null) {
|
|
951
|
+
currentTextPart = {
|
|
952
|
+
type: "text",
|
|
953
|
+
text: chunk.payload.text
|
|
954
|
+
};
|
|
955
|
+
message.parts.push(currentTextPart);
|
|
956
|
+
} else {
|
|
957
|
+
currentTextPart.text += chunk.payload.text;
|
|
958
|
+
}
|
|
959
|
+
message.content += chunk.payload.text;
|
|
960
|
+
execUpdate();
|
|
961
|
+
break;
|
|
962
|
+
}
|
|
963
|
+
case "reasoning-delta": {
|
|
964
|
+
if (currentReasoningTextDetail == null) {
|
|
965
|
+
currentReasoningTextDetail = { type: "text", text: chunk.payload.text };
|
|
966
|
+
if (currentReasoningPart != null) {
|
|
967
|
+
currentReasoningPart.details.push(currentReasoningTextDetail);
|
|
968
|
+
}
|
|
969
|
+
} else {
|
|
970
|
+
currentReasoningTextDetail.text += chunk.payload.text;
|
|
971
|
+
}
|
|
972
|
+
if (currentReasoningPart == null) {
|
|
973
|
+
currentReasoningPart = {
|
|
974
|
+
type: "reasoning",
|
|
975
|
+
reasoning: chunk.payload.text,
|
|
976
|
+
details: [currentReasoningTextDetail]
|
|
977
|
+
};
|
|
978
|
+
message.parts.push(currentReasoningPart);
|
|
979
|
+
} else {
|
|
980
|
+
currentReasoningPart.reasoning += chunk.payload.text;
|
|
981
|
+
}
|
|
982
|
+
message.reasoning = (message.reasoning ?? "") + chunk.payload.text;
|
|
983
|
+
execUpdate();
|
|
984
|
+
break;
|
|
985
|
+
}
|
|
986
|
+
case "file": {
|
|
987
|
+
message.parts.push({
|
|
988
|
+
type: "file",
|
|
989
|
+
mimeType: chunk.payload.mimeType,
|
|
990
|
+
data: chunk.payload.data
|
|
991
|
+
});
|
|
992
|
+
execUpdate();
|
|
993
|
+
break;
|
|
994
|
+
}
|
|
995
|
+
case "source": {
|
|
996
|
+
message.parts.push({
|
|
997
|
+
type: "source",
|
|
998
|
+
source: chunk.payload.source
|
|
999
|
+
});
|
|
1000
|
+
execUpdate();
|
|
1001
|
+
break;
|
|
1002
|
+
}
|
|
1003
|
+
case "tool-call": {
|
|
1004
|
+
const invocation = {
|
|
1005
|
+
state: "call",
|
|
1006
|
+
step,
|
|
1007
|
+
...chunk.payload
|
|
1008
|
+
};
|
|
1009
|
+
if (partialToolCalls[chunk.payload.toolCallId] != null) {
|
|
1010
|
+
message.toolInvocations[partialToolCalls[chunk.payload.toolCallId].index] = invocation;
|
|
1011
|
+
} else {
|
|
1012
|
+
if (message.toolInvocations == null) {
|
|
1013
|
+
message.toolInvocations = [];
|
|
1014
|
+
}
|
|
1015
|
+
message.toolInvocations.push(invocation);
|
|
1016
|
+
}
|
|
1017
|
+
updateToolInvocationPart(chunk.payload.toolCallId, invocation);
|
|
1018
|
+
execUpdate();
|
|
1019
|
+
if (onToolCall) {
|
|
1020
|
+
const result = await onToolCall({ toolCall: chunk.payload });
|
|
1021
|
+
if (result != null) {
|
|
1022
|
+
const invocation2 = {
|
|
1023
|
+
state: "result",
|
|
1024
|
+
step,
|
|
1025
|
+
...chunk.payload,
|
|
1026
|
+
result
|
|
1027
|
+
};
|
|
1028
|
+
message.toolInvocations[message.toolInvocations.length - 1] = invocation2;
|
|
1029
|
+
updateToolInvocationPart(chunk.payload.toolCallId, invocation2);
|
|
1030
|
+
execUpdate();
|
|
1031
|
+
}
|
|
1032
|
+
}
|
|
1033
|
+
}
|
|
1034
|
+
case "tool-call-input-streaming-start": {
|
|
1035
|
+
if (message.toolInvocations == null) {
|
|
1036
|
+
message.toolInvocations = [];
|
|
1037
|
+
}
|
|
1038
|
+
partialToolCalls[chunk.payload.toolCallId] = {
|
|
1039
|
+
text: "",
|
|
1040
|
+
step,
|
|
1041
|
+
toolName: chunk.payload.toolName,
|
|
1042
|
+
index: message.toolInvocations.length
|
|
1043
|
+
};
|
|
1044
|
+
const invocation = {
|
|
1045
|
+
state: "partial-call",
|
|
1046
|
+
step,
|
|
1047
|
+
toolCallId: chunk.payload.toolCallId,
|
|
1048
|
+
toolName: chunk.payload.toolName,
|
|
1049
|
+
args: void 0
|
|
1050
|
+
};
|
|
1051
|
+
message.toolInvocations.push(invocation);
|
|
1052
|
+
updateToolInvocationPart(chunk.payload.toolCallId, invocation);
|
|
1053
|
+
execUpdate();
|
|
1054
|
+
break;
|
|
1055
|
+
}
|
|
1056
|
+
case "tool-call-delta": {
|
|
1057
|
+
const partialToolCall = partialToolCalls[chunk.payload.toolCallId];
|
|
1058
|
+
partialToolCall.text += chunk.payload.argsTextDelta;
|
|
1059
|
+
const { value: partialArgs } = parsePartialJson(partialToolCall.text);
|
|
1060
|
+
const invocation = {
|
|
1061
|
+
state: "partial-call",
|
|
1062
|
+
step: partialToolCall.step,
|
|
1063
|
+
toolCallId: chunk.payload.toolCallId,
|
|
1064
|
+
toolName: partialToolCall.toolName,
|
|
1065
|
+
args: partialArgs
|
|
1066
|
+
};
|
|
1067
|
+
message.toolInvocations[partialToolCall.index] = invocation;
|
|
1068
|
+
updateToolInvocationPart(chunk.payload.toolCallId, invocation);
|
|
1069
|
+
execUpdate();
|
|
1070
|
+
break;
|
|
1071
|
+
}
|
|
1072
|
+
case "tool-result": {
|
|
1073
|
+
const toolInvocations = message.toolInvocations;
|
|
1074
|
+
if (toolInvocations == null) {
|
|
1075
|
+
throw new Error("tool_result must be preceded by a tool_call");
|
|
1076
|
+
}
|
|
1077
|
+
const toolInvocationIndex = toolInvocations.findIndex(
|
|
1078
|
+
(invocation2) => invocation2.toolCallId === chunk.payload.toolCallId
|
|
1079
|
+
);
|
|
1080
|
+
if (toolInvocationIndex === -1) {
|
|
1081
|
+
throw new Error("tool_result must be preceded by a tool_call with the same toolCallId");
|
|
1082
|
+
}
|
|
1083
|
+
const invocation = {
|
|
1084
|
+
...toolInvocations[toolInvocationIndex],
|
|
1085
|
+
state: "result",
|
|
1086
|
+
...chunk.payload
|
|
1087
|
+
};
|
|
1088
|
+
toolInvocations[toolInvocationIndex] = invocation;
|
|
1089
|
+
updateToolInvocationPart(chunk.payload.toolCallId, invocation);
|
|
1090
|
+
execUpdate();
|
|
1091
|
+
break;
|
|
1092
|
+
}
|
|
1093
|
+
case "error": {
|
|
1094
|
+
throw new Error(chunk.payload.error);
|
|
1095
|
+
}
|
|
1096
|
+
case "data": {
|
|
1097
|
+
data.push(...chunk.payload.data);
|
|
1098
|
+
execUpdate();
|
|
1099
|
+
break;
|
|
1100
|
+
}
|
|
1101
|
+
case "step-finish": {
|
|
1102
|
+
step += 1;
|
|
1103
|
+
currentTextPart = chunk.payload.isContinued ? currentTextPart : void 0;
|
|
1104
|
+
currentReasoningPart = void 0;
|
|
1105
|
+
currentReasoningTextDetail = void 0;
|
|
1106
|
+
execUpdate();
|
|
1107
|
+
break;
|
|
1108
|
+
}
|
|
1109
|
+
case "finish": {
|
|
1110
|
+
finishReason = chunk.payload.finishReason;
|
|
1111
|
+
if (chunk.payload.usage != null) {
|
|
1112
|
+
usage = chunk.payload.usage;
|
|
1113
|
+
}
|
|
1114
|
+
break;
|
|
1115
|
+
}
|
|
1116
|
+
}
|
|
1117
|
+
}
|
|
1118
|
+
});
|
|
1119
|
+
onFinish?.({ message, finishReason, usage });
|
|
1120
|
+
}
|
|
1121
|
+
async processStreamResponse_vNext(processedParams, writable) {
|
|
1122
|
+
const response = await this.request(`/api/agents/${this.agentId}/stream/vnext`, {
|
|
350
1123
|
method: "POST",
|
|
351
1124
|
body: processedParams,
|
|
352
1125
|
stream: true
|
|
@@ -354,12 +1127,244 @@ var Agent = class extends BaseResource {
|
|
|
354
1127
|
if (!response.body) {
|
|
355
1128
|
throw new Error("No response body");
|
|
356
1129
|
}
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
1130
|
+
try {
|
|
1131
|
+
let toolCalls = [];
|
|
1132
|
+
let messages = [];
|
|
1133
|
+
const [streamForWritable, streamForProcessing] = response.body.tee();
|
|
1134
|
+
streamForWritable.pipeTo(writable, {
|
|
1135
|
+
preventClose: true
|
|
1136
|
+
}).catch((error) => {
|
|
1137
|
+
console.error("Error piping to writable stream:", error);
|
|
1138
|
+
});
|
|
1139
|
+
this.processChatResponse_vNext({
|
|
1140
|
+
stream: streamForProcessing,
|
|
1141
|
+
update: ({ message }) => {
|
|
1142
|
+
const existingIndex = messages.findIndex((m) => m.id === message.id);
|
|
1143
|
+
if (existingIndex !== -1) {
|
|
1144
|
+
messages[existingIndex] = message;
|
|
1145
|
+
} else {
|
|
1146
|
+
messages.push(message);
|
|
1147
|
+
}
|
|
1148
|
+
},
|
|
1149
|
+
onFinish: async ({ finishReason, message }) => {
|
|
1150
|
+
if (finishReason === "tool-calls") {
|
|
1151
|
+
const toolCall = [...message?.parts ?? []].reverse().find((part) => part.type === "tool-invocation")?.toolInvocation;
|
|
1152
|
+
if (toolCall) {
|
|
1153
|
+
toolCalls.push(toolCall);
|
|
1154
|
+
}
|
|
1155
|
+
for (const toolCall2 of toolCalls) {
|
|
1156
|
+
const clientTool = processedParams.clientTools?.[toolCall2.toolName];
|
|
1157
|
+
if (clientTool && clientTool.execute) {
|
|
1158
|
+
const result = await clientTool.execute(
|
|
1159
|
+
{
|
|
1160
|
+
context: toolCall2?.args,
|
|
1161
|
+
runId: processedParams.runId,
|
|
1162
|
+
resourceId: processedParams.resourceId,
|
|
1163
|
+
threadId: processedParams.threadId,
|
|
1164
|
+
runtimeContext: processedParams.runtimeContext,
|
|
1165
|
+
// TODO: Pass proper tracing context when client-js supports tracing
|
|
1166
|
+
tracingContext: { currentSpan: void 0 }
|
|
1167
|
+
},
|
|
1168
|
+
{
|
|
1169
|
+
messages: response.messages,
|
|
1170
|
+
toolCallId: toolCall2?.toolCallId
|
|
1171
|
+
}
|
|
1172
|
+
);
|
|
1173
|
+
const lastMessage = JSON.parse(JSON.stringify(messages[messages.length - 1]));
|
|
1174
|
+
const toolInvocationPart = lastMessage?.parts?.find(
|
|
1175
|
+
(part) => part.type === "tool-invocation" && part.toolInvocation?.toolCallId === toolCall2.toolCallId
|
|
1176
|
+
);
|
|
1177
|
+
if (toolInvocationPart) {
|
|
1178
|
+
toolInvocationPart.toolInvocation = {
|
|
1179
|
+
...toolInvocationPart.toolInvocation,
|
|
1180
|
+
state: "result",
|
|
1181
|
+
result
|
|
1182
|
+
};
|
|
1183
|
+
}
|
|
1184
|
+
const toolInvocation = lastMessage?.toolInvocations?.find(
|
|
1185
|
+
(toolInvocation2) => toolInvocation2.toolCallId === toolCall2.toolCallId
|
|
1186
|
+
);
|
|
1187
|
+
if (toolInvocation) {
|
|
1188
|
+
toolInvocation.state = "result";
|
|
1189
|
+
toolInvocation.result = result;
|
|
1190
|
+
}
|
|
1191
|
+
const writer = writable.getWriter();
|
|
1192
|
+
try {
|
|
1193
|
+
await writer.write(
|
|
1194
|
+
new TextEncoder().encode(
|
|
1195
|
+
"a:" + JSON.stringify({
|
|
1196
|
+
toolCallId: toolCall2.toolCallId,
|
|
1197
|
+
result
|
|
1198
|
+
}) + "\n"
|
|
1199
|
+
)
|
|
1200
|
+
);
|
|
1201
|
+
} finally {
|
|
1202
|
+
writer.releaseLock();
|
|
1203
|
+
}
|
|
1204
|
+
const originalMessages = processedParams.messages;
|
|
1205
|
+
const messageArray = Array.isArray(originalMessages) ? originalMessages : [originalMessages];
|
|
1206
|
+
this.processStreamResponse_vNext(
|
|
1207
|
+
{
|
|
1208
|
+
...processedParams,
|
|
1209
|
+
messages: [...messageArray, ...messages.filter((m) => m.id !== lastMessage.id), lastMessage]
|
|
1210
|
+
},
|
|
1211
|
+
writable
|
|
1212
|
+
).catch((error) => {
|
|
1213
|
+
console.error("Error processing stream response:", error);
|
|
1214
|
+
});
|
|
1215
|
+
}
|
|
1216
|
+
}
|
|
1217
|
+
} else {
|
|
1218
|
+
setTimeout(() => {
|
|
1219
|
+
writable.close();
|
|
1220
|
+
}, 0);
|
|
1221
|
+
}
|
|
1222
|
+
},
|
|
1223
|
+
lastMessage: void 0
|
|
1224
|
+
}).catch((error) => {
|
|
1225
|
+
console.error("Error processing stream response:", error);
|
|
1226
|
+
});
|
|
1227
|
+
} catch (error) {
|
|
1228
|
+
console.error("Error processing stream response:", error);
|
|
1229
|
+
}
|
|
1230
|
+
return response;
|
|
1231
|
+
}
|
|
1232
|
+
async streamVNext(params) {
|
|
1233
|
+
const processedParams = {
|
|
1234
|
+
...params,
|
|
1235
|
+
output: params.output ? zodToJsonSchema(params.output) : void 0,
|
|
1236
|
+
runtimeContext: parseClientRuntimeContext(params.runtimeContext),
|
|
1237
|
+
clientTools: processClientTools(params.clientTools)
|
|
1238
|
+
};
|
|
1239
|
+
const { readable, writable } = new TransformStream();
|
|
1240
|
+
const response = await this.processStreamResponse_vNext(processedParams, writable);
|
|
1241
|
+
const streamResponse = new Response(readable, {
|
|
1242
|
+
status: response.status,
|
|
1243
|
+
statusText: response.statusText,
|
|
1244
|
+
headers: response.headers
|
|
1245
|
+
});
|
|
1246
|
+
streamResponse.processDataStream = async ({
|
|
1247
|
+
onChunk
|
|
1248
|
+
}) => {
|
|
1249
|
+
await processMastraStream({
|
|
1250
|
+
stream: streamResponse.body,
|
|
1251
|
+
onChunk
|
|
361
1252
|
});
|
|
362
1253
|
};
|
|
1254
|
+
return streamResponse;
|
|
1255
|
+
}
|
|
1256
|
+
/**
|
|
1257
|
+
* Processes the stream response and handles tool calls
|
|
1258
|
+
*/
|
|
1259
|
+
async processStreamResponse(processedParams, writable) {
|
|
1260
|
+
const response = await this.request(`/api/agents/${this.agentId}/stream`, {
|
|
1261
|
+
method: "POST",
|
|
1262
|
+
body: processedParams,
|
|
1263
|
+
stream: true
|
|
1264
|
+
});
|
|
1265
|
+
if (!response.body) {
|
|
1266
|
+
throw new Error("No response body");
|
|
1267
|
+
}
|
|
1268
|
+
try {
|
|
1269
|
+
let toolCalls = [];
|
|
1270
|
+
let messages = [];
|
|
1271
|
+
const [streamForWritable, streamForProcessing] = response.body.tee();
|
|
1272
|
+
streamForWritable.pipeTo(writable, {
|
|
1273
|
+
preventClose: true
|
|
1274
|
+
}).catch((error) => {
|
|
1275
|
+
console.error("Error piping to writable stream:", error);
|
|
1276
|
+
});
|
|
1277
|
+
this.processChatResponse({
|
|
1278
|
+
stream: streamForProcessing,
|
|
1279
|
+
update: ({ message }) => {
|
|
1280
|
+
const existingIndex = messages.findIndex((m) => m.id === message.id);
|
|
1281
|
+
if (existingIndex !== -1) {
|
|
1282
|
+
messages[existingIndex] = message;
|
|
1283
|
+
} else {
|
|
1284
|
+
messages.push(message);
|
|
1285
|
+
}
|
|
1286
|
+
},
|
|
1287
|
+
onFinish: async ({ finishReason, message }) => {
|
|
1288
|
+
if (finishReason === "tool-calls") {
|
|
1289
|
+
const toolCall = [...message?.parts ?? []].reverse().find((part) => part.type === "tool-invocation")?.toolInvocation;
|
|
1290
|
+
if (toolCall) {
|
|
1291
|
+
toolCalls.push(toolCall);
|
|
1292
|
+
}
|
|
1293
|
+
for (const toolCall2 of toolCalls) {
|
|
1294
|
+
const clientTool = processedParams.clientTools?.[toolCall2.toolName];
|
|
1295
|
+
if (clientTool && clientTool.execute) {
|
|
1296
|
+
const result = await clientTool.execute(
|
|
1297
|
+
{
|
|
1298
|
+
context: toolCall2?.args,
|
|
1299
|
+
runId: processedParams.runId,
|
|
1300
|
+
resourceId: processedParams.resourceId,
|
|
1301
|
+
threadId: processedParams.threadId,
|
|
1302
|
+
runtimeContext: processedParams.runtimeContext,
|
|
1303
|
+
// TODO: Pass proper tracing context when client-js supports tracing
|
|
1304
|
+
tracingContext: { currentSpan: void 0 }
|
|
1305
|
+
},
|
|
1306
|
+
{
|
|
1307
|
+
messages: response.messages,
|
|
1308
|
+
toolCallId: toolCall2?.toolCallId
|
|
1309
|
+
}
|
|
1310
|
+
);
|
|
1311
|
+
const lastMessage = JSON.parse(JSON.stringify(messages[messages.length - 1]));
|
|
1312
|
+
const toolInvocationPart = lastMessage?.parts?.find(
|
|
1313
|
+
(part) => part.type === "tool-invocation" && part.toolInvocation?.toolCallId === toolCall2.toolCallId
|
|
1314
|
+
);
|
|
1315
|
+
if (toolInvocationPart) {
|
|
1316
|
+
toolInvocationPart.toolInvocation = {
|
|
1317
|
+
...toolInvocationPart.toolInvocation,
|
|
1318
|
+
state: "result",
|
|
1319
|
+
result
|
|
1320
|
+
};
|
|
1321
|
+
}
|
|
1322
|
+
const toolInvocation = lastMessage?.toolInvocations?.find(
|
|
1323
|
+
(toolInvocation2) => toolInvocation2.toolCallId === toolCall2.toolCallId
|
|
1324
|
+
);
|
|
1325
|
+
if (toolInvocation) {
|
|
1326
|
+
toolInvocation.state = "result";
|
|
1327
|
+
toolInvocation.result = result;
|
|
1328
|
+
}
|
|
1329
|
+
const writer = writable.getWriter();
|
|
1330
|
+
try {
|
|
1331
|
+
await writer.write(
|
|
1332
|
+
new TextEncoder().encode(
|
|
1333
|
+
"a:" + JSON.stringify({
|
|
1334
|
+
toolCallId: toolCall2.toolCallId,
|
|
1335
|
+
result
|
|
1336
|
+
}) + "\n"
|
|
1337
|
+
)
|
|
1338
|
+
);
|
|
1339
|
+
} finally {
|
|
1340
|
+
writer.releaseLock();
|
|
1341
|
+
}
|
|
1342
|
+
const originalMessages = processedParams.messages;
|
|
1343
|
+
const messageArray = Array.isArray(originalMessages) ? originalMessages : [originalMessages];
|
|
1344
|
+
this.processStreamResponse(
|
|
1345
|
+
{
|
|
1346
|
+
...processedParams,
|
|
1347
|
+
messages: [...messageArray, ...messages.filter((m) => m.id !== lastMessage.id), lastMessage]
|
|
1348
|
+
},
|
|
1349
|
+
writable
|
|
1350
|
+
).catch((error) => {
|
|
1351
|
+
console.error("Error processing stream response:", error);
|
|
1352
|
+
});
|
|
1353
|
+
}
|
|
1354
|
+
}
|
|
1355
|
+
} else {
|
|
1356
|
+
setTimeout(() => {
|
|
1357
|
+
writable.close();
|
|
1358
|
+
}, 0);
|
|
1359
|
+
}
|
|
1360
|
+
},
|
|
1361
|
+
lastMessage: void 0
|
|
1362
|
+
}).catch((error) => {
|
|
1363
|
+
console.error("Error processing stream response:", error);
|
|
1364
|
+
});
|
|
1365
|
+
} catch (error) {
|
|
1366
|
+
console.error("Error processing stream response:", error);
|
|
1367
|
+
}
|
|
363
1368
|
return response;
|
|
364
1369
|
}
|
|
365
1370
|
/**
|
|
@@ -400,6 +1405,17 @@ var Agent = class extends BaseResource {
|
|
|
400
1405
|
liveEvals() {
|
|
401
1406
|
return this.request(`/api/agents/${this.agentId}/evals/live`);
|
|
402
1407
|
}
|
|
1408
|
+
/**
|
|
1409
|
+
* Updates the model for the agent
|
|
1410
|
+
* @param params - Parameters for updating the model
|
|
1411
|
+
* @returns Promise containing the updated model
|
|
1412
|
+
*/
|
|
1413
|
+
updateModel(params) {
|
|
1414
|
+
return this.request(`/api/agents/${this.agentId}/model`, {
|
|
1415
|
+
method: "POST",
|
|
1416
|
+
body: params
|
|
1417
|
+
});
|
|
1418
|
+
}
|
|
403
1419
|
};
|
|
404
1420
|
var Network = class extends BaseResource {
|
|
405
1421
|
constructor(options, networkId) {
|
|
@@ -504,6 +1520,36 @@ var MemoryThread = class extends BaseResource {
|
|
|
504
1520
|
});
|
|
505
1521
|
return this.request(`/api/memory/threads/${this.threadId}/messages?${query.toString()}`);
|
|
506
1522
|
}
|
|
1523
|
+
/**
|
|
1524
|
+
* Retrieves paginated messages associated with the thread with advanced filtering and selection options
|
|
1525
|
+
* @param params - Pagination parameters including selectBy criteria, page, perPage, date ranges, and message inclusion options
|
|
1526
|
+
* @returns Promise containing paginated thread messages with pagination metadata (total, page, perPage, hasMore)
|
|
1527
|
+
*/
|
|
1528
|
+
getMessagesPaginated({
|
|
1529
|
+
selectBy,
|
|
1530
|
+
...rest
|
|
1531
|
+
}) {
|
|
1532
|
+
const query = new URLSearchParams({
|
|
1533
|
+
...rest,
|
|
1534
|
+
...selectBy ? { selectBy: JSON.stringify(selectBy) } : {}
|
|
1535
|
+
});
|
|
1536
|
+
return this.request(`/api/memory/threads/${this.threadId}/messages/paginated?${query.toString()}`);
|
|
1537
|
+
}
|
|
1538
|
+
/**
|
|
1539
|
+
* Deletes one or more messages from the thread
|
|
1540
|
+
* @param messageIds - Can be a single message ID (string), array of message IDs,
|
|
1541
|
+
* message object with id property, or array of message objects
|
|
1542
|
+
* @returns Promise containing deletion result
|
|
1543
|
+
*/
|
|
1544
|
+
deleteMessages(messageIds) {
|
|
1545
|
+
const query = new URLSearchParams({
|
|
1546
|
+
agentId: this.agentId
|
|
1547
|
+
});
|
|
1548
|
+
return this.request(`/api/memory/messages/delete?${query.toString()}`, {
|
|
1549
|
+
method: "POST",
|
|
1550
|
+
body: { messageIds }
|
|
1551
|
+
});
|
|
1552
|
+
}
|
|
507
1553
|
};
|
|
508
1554
|
|
|
509
1555
|
// src/resources/vector.ts
|
|
@@ -572,24 +1618,24 @@ var Vector = class extends BaseResource {
|
|
|
572
1618
|
}
|
|
573
1619
|
};
|
|
574
1620
|
|
|
575
|
-
// src/resources/workflow.ts
|
|
1621
|
+
// src/resources/legacy-workflow.ts
|
|
576
1622
|
var RECORD_SEPARATOR = "";
|
|
577
|
-
var
|
|
1623
|
+
var LegacyWorkflow = class extends BaseResource {
|
|
578
1624
|
constructor(options, workflowId) {
|
|
579
1625
|
super(options);
|
|
580
1626
|
this.workflowId = workflowId;
|
|
581
1627
|
}
|
|
582
1628
|
/**
|
|
583
|
-
* Retrieves details about the workflow
|
|
584
|
-
* @returns Promise containing workflow details including steps and graphs
|
|
1629
|
+
* Retrieves details about the legacy workflow
|
|
1630
|
+
* @returns Promise containing legacy workflow details including steps and graphs
|
|
585
1631
|
*/
|
|
586
1632
|
details() {
|
|
587
|
-
return this.request(`/api/workflows/${this.workflowId}`);
|
|
1633
|
+
return this.request(`/api/workflows/legacy/${this.workflowId}`);
|
|
588
1634
|
}
|
|
589
1635
|
/**
|
|
590
|
-
* Retrieves all runs for a workflow
|
|
1636
|
+
* Retrieves all runs for a legacy workflow
|
|
591
1637
|
* @param params - Parameters for filtering runs
|
|
592
|
-
* @returns Promise containing workflow runs array
|
|
1638
|
+
* @returns Promise containing legacy workflow runs array
|
|
593
1639
|
*/
|
|
594
1640
|
runs(params) {
|
|
595
1641
|
const searchParams = new URLSearchParams();
|
|
@@ -609,25 +1655,13 @@ var Workflow = class extends BaseResource {
|
|
|
609
1655
|
searchParams.set("resourceId", params.resourceId);
|
|
610
1656
|
}
|
|
611
1657
|
if (searchParams.size) {
|
|
612
|
-
return this.request(`/api/workflows/${this.workflowId}/runs?${searchParams}`);
|
|
1658
|
+
return this.request(`/api/workflows/legacy/${this.workflowId}/runs?${searchParams}`);
|
|
613
1659
|
} else {
|
|
614
|
-
return this.request(`/api/workflows/${this.workflowId}/runs`);
|
|
1660
|
+
return this.request(`/api/workflows/legacy/${this.workflowId}/runs`);
|
|
615
1661
|
}
|
|
616
1662
|
}
|
|
617
1663
|
/**
|
|
618
|
-
*
|
|
619
|
-
* Executes the workflow with the provided parameters
|
|
620
|
-
* @param params - Parameters required for workflow execution
|
|
621
|
-
* @returns Promise containing the workflow execution results
|
|
622
|
-
*/
|
|
623
|
-
execute(params) {
|
|
624
|
-
return this.request(`/api/workflows/${this.workflowId}/execute`, {
|
|
625
|
-
method: "POST",
|
|
626
|
-
body: params
|
|
627
|
-
});
|
|
628
|
-
}
|
|
629
|
-
/**
|
|
630
|
-
* Creates a new workflow run
|
|
1664
|
+
* Creates a new legacy workflow run
|
|
631
1665
|
* @returns Promise containing the generated run ID
|
|
632
1666
|
*/
|
|
633
1667
|
createRun(params) {
|
|
@@ -635,34 +1669,34 @@ var Workflow = class extends BaseResource {
|
|
|
635
1669
|
if (!!params?.runId) {
|
|
636
1670
|
searchParams.set("runId", params.runId);
|
|
637
1671
|
}
|
|
638
|
-
return this.request(`/api/workflows/${this.workflowId}/
|
|
1672
|
+
return this.request(`/api/workflows/legacy/${this.workflowId}/create-run?${searchParams.toString()}`, {
|
|
639
1673
|
method: "POST"
|
|
640
1674
|
});
|
|
641
1675
|
}
|
|
642
1676
|
/**
|
|
643
|
-
* Starts a workflow run synchronously without waiting for the workflow to complete
|
|
1677
|
+
* Starts a legacy workflow run synchronously without waiting for the workflow to complete
|
|
644
1678
|
* @param params - Object containing the runId and triggerData
|
|
645
1679
|
* @returns Promise containing success message
|
|
646
1680
|
*/
|
|
647
1681
|
start(params) {
|
|
648
|
-
return this.request(`/api/workflows/${this.workflowId}/start?runId=${params.runId}`, {
|
|
1682
|
+
return this.request(`/api/workflows/legacy/${this.workflowId}/start?runId=${params.runId}`, {
|
|
649
1683
|
method: "POST",
|
|
650
1684
|
body: params?.triggerData
|
|
651
1685
|
});
|
|
652
1686
|
}
|
|
653
1687
|
/**
|
|
654
|
-
* Resumes a suspended workflow step synchronously without waiting for the workflow to complete
|
|
1688
|
+
* Resumes a suspended legacy workflow step synchronously without waiting for the workflow to complete
|
|
655
1689
|
* @param stepId - ID of the step to resume
|
|
656
|
-
* @param runId - ID of the workflow run
|
|
657
|
-
* @param context - Context to resume the workflow with
|
|
658
|
-
* @returns Promise containing the workflow resume results
|
|
1690
|
+
* @param runId - ID of the legacy workflow run
|
|
1691
|
+
* @param context - Context to resume the legacy workflow with
|
|
1692
|
+
* @returns Promise containing the legacy workflow resume results
|
|
659
1693
|
*/
|
|
660
1694
|
resume({
|
|
661
1695
|
stepId,
|
|
662
1696
|
runId,
|
|
663
1697
|
context
|
|
664
1698
|
}) {
|
|
665
|
-
return this.request(`/api/workflows/${this.workflowId}/resume?runId=${runId}`, {
|
|
1699
|
+
return this.request(`/api/workflows/legacy/${this.workflowId}/resume?runId=${runId}`, {
|
|
666
1700
|
method: "POST",
|
|
667
1701
|
body: {
|
|
668
1702
|
stepId,
|
|
@@ -680,18 +1714,18 @@ var Workflow = class extends BaseResource {
|
|
|
680
1714
|
if (!!params?.runId) {
|
|
681
1715
|
searchParams.set("runId", params.runId);
|
|
682
1716
|
}
|
|
683
|
-
return this.request(`/api/workflows/${this.workflowId}/start-async?${searchParams.toString()}`, {
|
|
1717
|
+
return this.request(`/api/workflows/legacy/${this.workflowId}/start-async?${searchParams.toString()}`, {
|
|
684
1718
|
method: "POST",
|
|
685
1719
|
body: params?.triggerData
|
|
686
1720
|
});
|
|
687
1721
|
}
|
|
688
1722
|
/**
|
|
689
|
-
* Resumes a suspended workflow step asynchronously and returns a promise that resolves when the workflow is complete
|
|
1723
|
+
* Resumes a suspended legacy workflow step asynchronously and returns a promise that resolves when the workflow is complete
|
|
690
1724
|
* @param params - Object containing the runId, stepId, and context
|
|
691
1725
|
* @returns Promise containing the workflow resume results
|
|
692
1726
|
*/
|
|
693
1727
|
resumeAsync(params) {
|
|
694
|
-
return this.request(`/api/workflows/${this.workflowId}/resume-async?runId=${params.runId}`, {
|
|
1728
|
+
return this.request(`/api/workflows/legacy/${this.workflowId}/resume-async?runId=${params.runId}`, {
|
|
695
1729
|
method: "POST",
|
|
696
1730
|
body: {
|
|
697
1731
|
stepId: params.stepId,
|
|
@@ -745,16 +1779,16 @@ var Workflow = class extends BaseResource {
|
|
|
745
1779
|
}
|
|
746
1780
|
}
|
|
747
1781
|
/**
|
|
748
|
-
* Watches workflow transitions in real-time
|
|
1782
|
+
* Watches legacy workflow transitions in real-time
|
|
749
1783
|
* @param runId - Optional run ID to filter the watch stream
|
|
750
|
-
* @returns AsyncGenerator that yields parsed records from the workflow watch stream
|
|
1784
|
+
* @returns AsyncGenerator that yields parsed records from the legacy workflow watch stream
|
|
751
1785
|
*/
|
|
752
1786
|
async watch({ runId }, onRecord) {
|
|
753
|
-
const response = await this.request(`/api/workflows/${this.workflowId}/watch?runId=${runId}`, {
|
|
1787
|
+
const response = await this.request(`/api/workflows/legacy/${this.workflowId}/watch?runId=${runId}`, {
|
|
754
1788
|
stream: true
|
|
755
1789
|
});
|
|
756
1790
|
if (!response.ok) {
|
|
757
|
-
throw new Error(`Failed to watch workflow: ${response.statusText}`);
|
|
1791
|
+
throw new Error(`Failed to watch legacy workflow: ${response.statusText}`);
|
|
758
1792
|
}
|
|
759
1793
|
if (!response.body) {
|
|
760
1794
|
throw new Error("Response body is null");
|
|
@@ -790,7 +1824,7 @@ var Tool = class extends BaseResource {
|
|
|
790
1824
|
}
|
|
791
1825
|
const body = {
|
|
792
1826
|
data: params.data,
|
|
793
|
-
runtimeContext:
|
|
1827
|
+
runtimeContext: parseClientRuntimeContext(params.runtimeContext)
|
|
794
1828
|
};
|
|
795
1829
|
return this.request(`/api/tools/${this.toolId}/execute?${url.toString()}`, {
|
|
796
1830
|
method: "POST",
|
|
@@ -799,15 +1833,15 @@ var Tool = class extends BaseResource {
|
|
|
799
1833
|
}
|
|
800
1834
|
};
|
|
801
1835
|
|
|
802
|
-
// src/resources/
|
|
1836
|
+
// src/resources/workflow.ts
|
|
803
1837
|
var RECORD_SEPARATOR2 = "";
|
|
804
|
-
var
|
|
1838
|
+
var Workflow = class extends BaseResource {
|
|
805
1839
|
constructor(options, workflowId) {
|
|
806
1840
|
super(options);
|
|
807
1841
|
this.workflowId = workflowId;
|
|
808
1842
|
}
|
|
809
1843
|
/**
|
|
810
|
-
* Creates an async generator that processes a readable stream and yields
|
|
1844
|
+
* Creates an async generator that processes a readable stream and yields workflow records
|
|
811
1845
|
* separated by the Record Separator character (\x1E)
|
|
812
1846
|
*
|
|
813
1847
|
* @param stream - The readable stream to process
|
|
@@ -852,16 +1886,16 @@ var VNextWorkflow = class extends BaseResource {
|
|
|
852
1886
|
}
|
|
853
1887
|
}
|
|
854
1888
|
/**
|
|
855
|
-
* Retrieves details about the
|
|
856
|
-
* @returns Promise containing
|
|
1889
|
+
* Retrieves details about the workflow
|
|
1890
|
+
* @returns Promise containing workflow details including steps and graphs
|
|
857
1891
|
*/
|
|
858
1892
|
details() {
|
|
859
|
-
return this.request(`/api/workflows
|
|
1893
|
+
return this.request(`/api/workflows/${this.workflowId}`);
|
|
860
1894
|
}
|
|
861
1895
|
/**
|
|
862
|
-
* Retrieves all runs for a
|
|
1896
|
+
* Retrieves all runs for a workflow
|
|
863
1897
|
* @param params - Parameters for filtering runs
|
|
864
|
-
* @returns Promise containing
|
|
1898
|
+
* @returns Promise containing workflow runs array
|
|
865
1899
|
*/
|
|
866
1900
|
runs(params) {
|
|
867
1901
|
const searchParams = new URLSearchParams();
|
|
@@ -871,23 +1905,60 @@ var VNextWorkflow = class extends BaseResource {
|
|
|
871
1905
|
if (params?.toDate) {
|
|
872
1906
|
searchParams.set("toDate", params.toDate.toISOString());
|
|
873
1907
|
}
|
|
874
|
-
if (params?.limit) {
|
|
1908
|
+
if (params?.limit !== null && params?.limit !== void 0 && !isNaN(Number(params?.limit))) {
|
|
875
1909
|
searchParams.set("limit", String(params.limit));
|
|
876
1910
|
}
|
|
877
|
-
if (params?.offset) {
|
|
1911
|
+
if (params?.offset !== null && params?.offset !== void 0 && !isNaN(Number(params?.offset))) {
|
|
878
1912
|
searchParams.set("offset", String(params.offset));
|
|
879
1913
|
}
|
|
880
1914
|
if (params?.resourceId) {
|
|
881
1915
|
searchParams.set("resourceId", params.resourceId);
|
|
882
1916
|
}
|
|
883
1917
|
if (searchParams.size) {
|
|
884
|
-
return this.request(`/api/workflows
|
|
1918
|
+
return this.request(`/api/workflows/${this.workflowId}/runs?${searchParams}`);
|
|
885
1919
|
} else {
|
|
886
|
-
return this.request(`/api/workflows
|
|
1920
|
+
return this.request(`/api/workflows/${this.workflowId}/runs`);
|
|
887
1921
|
}
|
|
888
1922
|
}
|
|
889
1923
|
/**
|
|
890
|
-
*
|
|
1924
|
+
* Retrieves a specific workflow run by its ID
|
|
1925
|
+
* @param runId - The ID of the workflow run to retrieve
|
|
1926
|
+
* @returns Promise containing the workflow run details
|
|
1927
|
+
*/
|
|
1928
|
+
runById(runId) {
|
|
1929
|
+
return this.request(`/api/workflows/${this.workflowId}/runs/${runId}`);
|
|
1930
|
+
}
|
|
1931
|
+
/**
|
|
1932
|
+
* Retrieves the execution result for a specific workflow run by its ID
|
|
1933
|
+
* @param runId - The ID of the workflow run to retrieve the execution result for
|
|
1934
|
+
* @returns Promise containing the workflow run execution result
|
|
1935
|
+
*/
|
|
1936
|
+
runExecutionResult(runId) {
|
|
1937
|
+
return this.request(`/api/workflows/${this.workflowId}/runs/${runId}/execution-result`);
|
|
1938
|
+
}
|
|
1939
|
+
/**
|
|
1940
|
+
* Cancels a specific workflow run by its ID
|
|
1941
|
+
* @param runId - The ID of the workflow run to cancel
|
|
1942
|
+
* @returns Promise containing a success message
|
|
1943
|
+
*/
|
|
1944
|
+
cancelRun(runId) {
|
|
1945
|
+
return this.request(`/api/workflows/${this.workflowId}/runs/${runId}/cancel`, {
|
|
1946
|
+
method: "POST"
|
|
1947
|
+
});
|
|
1948
|
+
}
|
|
1949
|
+
/**
|
|
1950
|
+
* Sends an event to a specific workflow run by its ID
|
|
1951
|
+
* @param params - Object containing the runId, event and data
|
|
1952
|
+
* @returns Promise containing a success message
|
|
1953
|
+
*/
|
|
1954
|
+
sendRunEvent(params) {
|
|
1955
|
+
return this.request(`/api/workflows/${this.workflowId}/runs/${params.runId}/send-event`, {
|
|
1956
|
+
method: "POST",
|
|
1957
|
+
body: { event: params.event, data: params.data }
|
|
1958
|
+
});
|
|
1959
|
+
}
|
|
1960
|
+
/**
|
|
1961
|
+
* Creates a new workflow run
|
|
891
1962
|
* @param params - Optional object containing the optional runId
|
|
892
1963
|
* @returns Promise containing the runId of the created run
|
|
893
1964
|
*/
|
|
@@ -896,24 +1967,32 @@ var VNextWorkflow = class extends BaseResource {
|
|
|
896
1967
|
if (!!params?.runId) {
|
|
897
1968
|
searchParams.set("runId", params.runId);
|
|
898
1969
|
}
|
|
899
|
-
return this.request(`/api/workflows
|
|
1970
|
+
return this.request(`/api/workflows/${this.workflowId}/create-run?${searchParams.toString()}`, {
|
|
900
1971
|
method: "POST"
|
|
901
1972
|
});
|
|
902
1973
|
}
|
|
903
1974
|
/**
|
|
904
|
-
*
|
|
1975
|
+
* Creates a new workflow run (alias for createRun)
|
|
1976
|
+
* @param params - Optional object containing the optional runId
|
|
1977
|
+
* @returns Promise containing the runId of the created run
|
|
1978
|
+
*/
|
|
1979
|
+
createRunAsync(params) {
|
|
1980
|
+
return this.createRun(params);
|
|
1981
|
+
}
|
|
1982
|
+
/**
|
|
1983
|
+
* Starts a workflow run synchronously without waiting for the workflow to complete
|
|
905
1984
|
* @param params - Object containing the runId, inputData and runtimeContext
|
|
906
1985
|
* @returns Promise containing success message
|
|
907
1986
|
*/
|
|
908
1987
|
start(params) {
|
|
909
|
-
const runtimeContext =
|
|
910
|
-
return this.request(`/api/workflows
|
|
1988
|
+
const runtimeContext = parseClientRuntimeContext(params.runtimeContext);
|
|
1989
|
+
return this.request(`/api/workflows/${this.workflowId}/start?runId=${params.runId}`, {
|
|
911
1990
|
method: "POST",
|
|
912
1991
|
body: { inputData: params?.inputData, runtimeContext }
|
|
913
1992
|
});
|
|
914
1993
|
}
|
|
915
1994
|
/**
|
|
916
|
-
* Resumes a suspended
|
|
1995
|
+
* Resumes a suspended workflow step synchronously without waiting for the workflow to complete
|
|
917
1996
|
* @param params - Object containing the runId, step, resumeData and runtimeContext
|
|
918
1997
|
* @returns Promise containing success message
|
|
919
1998
|
*/
|
|
@@ -923,8 +2002,8 @@ var VNextWorkflow = class extends BaseResource {
|
|
|
923
2002
|
resumeData,
|
|
924
2003
|
...rest
|
|
925
2004
|
}) {
|
|
926
|
-
const runtimeContext =
|
|
927
|
-
return this.request(`/api/workflows
|
|
2005
|
+
const runtimeContext = parseClientRuntimeContext(rest.runtimeContext);
|
|
2006
|
+
return this.request(`/api/workflows/${this.workflowId}/resume?runId=${runId}`, {
|
|
928
2007
|
method: "POST",
|
|
929
2008
|
stream: true,
|
|
930
2009
|
body: {
|
|
@@ -935,29 +2014,80 @@ var VNextWorkflow = class extends BaseResource {
|
|
|
935
2014
|
});
|
|
936
2015
|
}
|
|
937
2016
|
/**
|
|
938
|
-
* Starts a
|
|
2017
|
+
* Starts a workflow run asynchronously and returns a promise that resolves when the workflow is complete
|
|
939
2018
|
* @param params - Object containing the optional runId, inputData and runtimeContext
|
|
940
|
-
* @returns Promise containing the
|
|
2019
|
+
* @returns Promise containing the workflow execution results
|
|
941
2020
|
*/
|
|
942
2021
|
startAsync(params) {
|
|
943
2022
|
const searchParams = new URLSearchParams();
|
|
944
2023
|
if (!!params?.runId) {
|
|
945
2024
|
searchParams.set("runId", params.runId);
|
|
946
2025
|
}
|
|
947
|
-
const runtimeContext =
|
|
948
|
-
return this.request(`/api/workflows
|
|
2026
|
+
const runtimeContext = parseClientRuntimeContext(params.runtimeContext);
|
|
2027
|
+
return this.request(`/api/workflows/${this.workflowId}/start-async?${searchParams.toString()}`, {
|
|
949
2028
|
method: "POST",
|
|
950
2029
|
body: { inputData: params.inputData, runtimeContext }
|
|
951
2030
|
});
|
|
952
2031
|
}
|
|
953
2032
|
/**
|
|
954
|
-
*
|
|
2033
|
+
* Starts a workflow run and returns a stream
|
|
2034
|
+
* @param params - Object containing the optional runId, inputData and runtimeContext
|
|
2035
|
+
* @returns Promise containing the workflow execution results
|
|
2036
|
+
*/
|
|
2037
|
+
async stream(params) {
|
|
2038
|
+
const searchParams = new URLSearchParams();
|
|
2039
|
+
if (!!params?.runId) {
|
|
2040
|
+
searchParams.set("runId", params.runId);
|
|
2041
|
+
}
|
|
2042
|
+
const runtimeContext = parseClientRuntimeContext(params.runtimeContext);
|
|
2043
|
+
const response = await this.request(
|
|
2044
|
+
`/api/workflows/${this.workflowId}/stream?${searchParams.toString()}`,
|
|
2045
|
+
{
|
|
2046
|
+
method: "POST",
|
|
2047
|
+
body: { inputData: params.inputData, runtimeContext },
|
|
2048
|
+
stream: true
|
|
2049
|
+
}
|
|
2050
|
+
);
|
|
2051
|
+
if (!response.ok) {
|
|
2052
|
+
throw new Error(`Failed to stream vNext workflow: ${response.statusText}`);
|
|
2053
|
+
}
|
|
2054
|
+
if (!response.body) {
|
|
2055
|
+
throw new Error("Response body is null");
|
|
2056
|
+
}
|
|
2057
|
+
let failedChunk = void 0;
|
|
2058
|
+
const transformStream = new TransformStream({
|
|
2059
|
+
start() {
|
|
2060
|
+
},
|
|
2061
|
+
async transform(chunk, controller) {
|
|
2062
|
+
try {
|
|
2063
|
+
const decoded = new TextDecoder().decode(chunk);
|
|
2064
|
+
const chunks = decoded.split(RECORD_SEPARATOR2);
|
|
2065
|
+
for (const chunk2 of chunks) {
|
|
2066
|
+
if (chunk2) {
|
|
2067
|
+
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
2068
|
+
try {
|
|
2069
|
+
const parsedChunk = JSON.parse(newChunk);
|
|
2070
|
+
controller.enqueue(parsedChunk);
|
|
2071
|
+
failedChunk = void 0;
|
|
2072
|
+
} catch {
|
|
2073
|
+
failedChunk = newChunk;
|
|
2074
|
+
}
|
|
2075
|
+
}
|
|
2076
|
+
}
|
|
2077
|
+
} catch {
|
|
2078
|
+
}
|
|
2079
|
+
}
|
|
2080
|
+
});
|
|
2081
|
+
return response.body.pipeThrough(transformStream);
|
|
2082
|
+
}
|
|
2083
|
+
/**
|
|
2084
|
+
* Resumes a suspended workflow step asynchronously and returns a promise that resolves when the workflow is complete
|
|
955
2085
|
* @param params - Object containing the runId, step, resumeData and runtimeContext
|
|
956
|
-
* @returns Promise containing the
|
|
2086
|
+
* @returns Promise containing the workflow resume results
|
|
957
2087
|
*/
|
|
958
2088
|
resumeAsync(params) {
|
|
959
|
-
const runtimeContext =
|
|
960
|
-
return this.request(`/api/workflows
|
|
2089
|
+
const runtimeContext = parseClientRuntimeContext(params.runtimeContext);
|
|
2090
|
+
return this.request(`/api/workflows/${this.workflowId}/resume-async?runId=${params.runId}`, {
|
|
961
2091
|
method: "POST",
|
|
962
2092
|
body: {
|
|
963
2093
|
step: params.step,
|
|
@@ -967,24 +2097,51 @@ var VNextWorkflow = class extends BaseResource {
|
|
|
967
2097
|
});
|
|
968
2098
|
}
|
|
969
2099
|
/**
|
|
970
|
-
* Watches
|
|
2100
|
+
* Watches workflow transitions in real-time
|
|
971
2101
|
* @param runId - Optional run ID to filter the watch stream
|
|
972
|
-
* @returns AsyncGenerator that yields parsed records from the
|
|
2102
|
+
* @returns AsyncGenerator that yields parsed records from the workflow watch stream
|
|
973
2103
|
*/
|
|
974
2104
|
async watch({ runId }, onRecord) {
|
|
975
|
-
const response = await this.request(`/api/workflows
|
|
2105
|
+
const response = await this.request(`/api/workflows/${this.workflowId}/watch?runId=${runId}`, {
|
|
976
2106
|
stream: true
|
|
977
2107
|
});
|
|
978
2108
|
if (!response.ok) {
|
|
979
|
-
throw new Error(`Failed to watch
|
|
2109
|
+
throw new Error(`Failed to watch workflow: ${response.statusText}`);
|
|
980
2110
|
}
|
|
981
2111
|
if (!response.body) {
|
|
982
2112
|
throw new Error("Response body is null");
|
|
983
2113
|
}
|
|
984
2114
|
for await (const record of this.streamProcessor(response.body)) {
|
|
985
|
-
|
|
2115
|
+
if (typeof record === "string") {
|
|
2116
|
+
onRecord(JSON.parse(record));
|
|
2117
|
+
} else {
|
|
2118
|
+
onRecord(record);
|
|
2119
|
+
}
|
|
986
2120
|
}
|
|
987
2121
|
}
|
|
2122
|
+
/**
|
|
2123
|
+
* Creates a new ReadableStream from an iterable or async iterable of objects,
|
|
2124
|
+
* serializing each as JSON and separating them with the record separator (\x1E).
|
|
2125
|
+
*
|
|
2126
|
+
* @param records - An iterable or async iterable of objects to stream
|
|
2127
|
+
* @returns A ReadableStream emitting the records as JSON strings separated by the record separator
|
|
2128
|
+
*/
|
|
2129
|
+
static createRecordStream(records) {
|
|
2130
|
+
const encoder = new TextEncoder();
|
|
2131
|
+
return new ReadableStream({
|
|
2132
|
+
async start(controller) {
|
|
2133
|
+
try {
|
|
2134
|
+
for await (const record of records) {
|
|
2135
|
+
const json = JSON.stringify(record) + RECORD_SEPARATOR2;
|
|
2136
|
+
controller.enqueue(encoder.encode(json));
|
|
2137
|
+
}
|
|
2138
|
+
controller.close();
|
|
2139
|
+
} catch (err) {
|
|
2140
|
+
controller.error(err);
|
|
2141
|
+
}
|
|
2142
|
+
}
|
|
2143
|
+
});
|
|
2144
|
+
}
|
|
988
2145
|
};
|
|
989
2146
|
|
|
990
2147
|
// src/resources/a2a.ts
|
|
@@ -998,22 +2155,38 @@ var A2A = class extends BaseResource {
|
|
|
998
2155
|
* @returns Promise containing the agent card information
|
|
999
2156
|
*/
|
|
1000
2157
|
async getCard() {
|
|
1001
|
-
return this.request(`/.well-known/${this.agentId}/agent.json`);
|
|
2158
|
+
return this.request(`/.well-known/${this.agentId}/agent-card.json`);
|
|
1002
2159
|
}
|
|
1003
2160
|
/**
|
|
1004
|
-
* Send a message to the agent and
|
|
2161
|
+
* Send a message to the agent and gets a message or task response
|
|
1005
2162
|
* @param params - Parameters for the task
|
|
1006
|
-
* @returns Promise containing the
|
|
2163
|
+
* @returns Promise containing the response
|
|
1007
2164
|
*/
|
|
1008
2165
|
async sendMessage(params) {
|
|
1009
2166
|
const response = await this.request(`/a2a/${this.agentId}`, {
|
|
1010
2167
|
method: "POST",
|
|
1011
2168
|
body: {
|
|
1012
|
-
method: "
|
|
2169
|
+
method: "message/send",
|
|
1013
2170
|
params
|
|
1014
2171
|
}
|
|
1015
2172
|
});
|
|
1016
|
-
return
|
|
2173
|
+
return response;
|
|
2174
|
+
}
|
|
2175
|
+
/**
|
|
2176
|
+
* Sends a message to an agent to initiate/continue a task and subscribes
|
|
2177
|
+
* the client to real-time updates for that task via Server-Sent Events (SSE).
|
|
2178
|
+
* @param params - Parameters for the task
|
|
2179
|
+
* @returns A stream of Server-Sent Events. Each SSE `data` field contains a `SendStreamingMessageResponse`
|
|
2180
|
+
*/
|
|
2181
|
+
async sendStreamingMessage(params) {
|
|
2182
|
+
const response = await this.request(`/a2a/${this.agentId}`, {
|
|
2183
|
+
method: "POST",
|
|
2184
|
+
body: {
|
|
2185
|
+
method: "message/stream",
|
|
2186
|
+
params
|
|
2187
|
+
}
|
|
2188
|
+
});
|
|
2189
|
+
return response;
|
|
1017
2190
|
}
|
|
1018
2191
|
/**
|
|
1019
2192
|
* Get the status and result of a task
|
|
@@ -1028,7 +2201,7 @@ var A2A = class extends BaseResource {
|
|
|
1028
2201
|
params
|
|
1029
2202
|
}
|
|
1030
2203
|
});
|
|
1031
|
-
return response
|
|
2204
|
+
return response;
|
|
1032
2205
|
}
|
|
1033
2206
|
/**
|
|
1034
2207
|
* Cancel a running task
|
|
@@ -1044,21 +2217,6 @@ var A2A = class extends BaseResource {
|
|
|
1044
2217
|
}
|
|
1045
2218
|
});
|
|
1046
2219
|
}
|
|
1047
|
-
/**
|
|
1048
|
-
* Send a message and subscribe to streaming updates (not fully implemented)
|
|
1049
|
-
* @param params - Parameters for the task
|
|
1050
|
-
* @returns Promise containing the task response
|
|
1051
|
-
*/
|
|
1052
|
-
async sendAndSubscribe(params) {
|
|
1053
|
-
return this.request(`/a2a/${this.agentId}`, {
|
|
1054
|
-
method: "POST",
|
|
1055
|
-
body: {
|
|
1056
|
-
method: "tasks/sendSubscribe",
|
|
1057
|
-
params
|
|
1058
|
-
},
|
|
1059
|
-
stream: true
|
|
1060
|
-
});
|
|
1061
|
-
}
|
|
1062
2220
|
};
|
|
1063
2221
|
|
|
1064
2222
|
// src/resources/mcp-tool.ts
|
|
@@ -1095,10 +2253,260 @@ var MCPTool = class extends BaseResource {
|
|
|
1095
2253
|
}
|
|
1096
2254
|
};
|
|
1097
2255
|
|
|
2256
|
+
// src/resources/observability.ts
|
|
2257
|
+
var Observability = class extends BaseResource {
|
|
2258
|
+
constructor(options) {
|
|
2259
|
+
super(options);
|
|
2260
|
+
}
|
|
2261
|
+
/**
|
|
2262
|
+
* Retrieves a specific AI trace by ID
|
|
2263
|
+
* @param traceId - ID of the trace to retrieve
|
|
2264
|
+
* @returns Promise containing the AI trace with all its spans
|
|
2265
|
+
*/
|
|
2266
|
+
getTrace(traceId) {
|
|
2267
|
+
return this.request(`/api/observability/traces/${traceId}`);
|
|
2268
|
+
}
|
|
2269
|
+
/**
|
|
2270
|
+
* Retrieves paginated list of AI traces with optional filtering
|
|
2271
|
+
* @param params - Parameters for pagination and filtering
|
|
2272
|
+
* @returns Promise containing paginated traces and pagination info
|
|
2273
|
+
*/
|
|
2274
|
+
getTraces(params) {
|
|
2275
|
+
const { pagination, filters } = params;
|
|
2276
|
+
const { page, perPage, dateRange } = pagination || {};
|
|
2277
|
+
const { name, spanType } = filters || {};
|
|
2278
|
+
const searchParams = new URLSearchParams();
|
|
2279
|
+
if (page !== void 0) {
|
|
2280
|
+
searchParams.set("page", String(page));
|
|
2281
|
+
}
|
|
2282
|
+
if (perPage !== void 0) {
|
|
2283
|
+
searchParams.set("perPage", String(perPage));
|
|
2284
|
+
}
|
|
2285
|
+
if (name) {
|
|
2286
|
+
searchParams.set("name", name);
|
|
2287
|
+
}
|
|
2288
|
+
if (spanType !== void 0) {
|
|
2289
|
+
searchParams.set("spanType", String(spanType));
|
|
2290
|
+
}
|
|
2291
|
+
if (dateRange) {
|
|
2292
|
+
const dateRangeStr = JSON.stringify({
|
|
2293
|
+
start: dateRange.start instanceof Date ? dateRange.start.toISOString() : dateRange.start,
|
|
2294
|
+
end: dateRange.end instanceof Date ? dateRange.end.toISOString() : dateRange.end
|
|
2295
|
+
});
|
|
2296
|
+
searchParams.set("dateRange", dateRangeStr);
|
|
2297
|
+
}
|
|
2298
|
+
const queryString = searchParams.toString();
|
|
2299
|
+
return this.request(`/api/observability/traces${queryString ? `?${queryString}` : ""}`);
|
|
2300
|
+
}
|
|
2301
|
+
};
|
|
2302
|
+
|
|
2303
|
+
// src/resources/network-memory-thread.ts
|
|
2304
|
+
var NetworkMemoryThread = class extends BaseResource {
|
|
2305
|
+
constructor(options, threadId, networkId) {
|
|
2306
|
+
super(options);
|
|
2307
|
+
this.threadId = threadId;
|
|
2308
|
+
this.networkId = networkId;
|
|
2309
|
+
}
|
|
2310
|
+
/**
|
|
2311
|
+
* Retrieves the memory thread details
|
|
2312
|
+
* @returns Promise containing thread details including title and metadata
|
|
2313
|
+
*/
|
|
2314
|
+
get() {
|
|
2315
|
+
return this.request(`/api/memory/network/threads/${this.threadId}?networkId=${this.networkId}`);
|
|
2316
|
+
}
|
|
2317
|
+
/**
|
|
2318
|
+
* Updates the memory thread properties
|
|
2319
|
+
* @param params - Update parameters including title and metadata
|
|
2320
|
+
* @returns Promise containing updated thread details
|
|
2321
|
+
*/
|
|
2322
|
+
update(params) {
|
|
2323
|
+
return this.request(`/api/memory/network/threads/${this.threadId}?networkId=${this.networkId}`, {
|
|
2324
|
+
method: "PATCH",
|
|
2325
|
+
body: params
|
|
2326
|
+
});
|
|
2327
|
+
}
|
|
2328
|
+
/**
|
|
2329
|
+
* Deletes the memory thread
|
|
2330
|
+
* @returns Promise containing deletion result
|
|
2331
|
+
*/
|
|
2332
|
+
delete() {
|
|
2333
|
+
return this.request(`/api/memory/network/threads/${this.threadId}?networkId=${this.networkId}`, {
|
|
2334
|
+
method: "DELETE"
|
|
2335
|
+
});
|
|
2336
|
+
}
|
|
2337
|
+
/**
|
|
2338
|
+
* Retrieves messages associated with the thread
|
|
2339
|
+
* @param params - Optional parameters including limit for number of messages to retrieve
|
|
2340
|
+
* @returns Promise containing thread messages and UI messages
|
|
2341
|
+
*/
|
|
2342
|
+
getMessages(params) {
|
|
2343
|
+
const query = new URLSearchParams({
|
|
2344
|
+
networkId: this.networkId,
|
|
2345
|
+
...params?.limit ? { limit: params.limit.toString() } : {}
|
|
2346
|
+
});
|
|
2347
|
+
return this.request(`/api/memory/network/threads/${this.threadId}/messages?${query.toString()}`);
|
|
2348
|
+
}
|
|
2349
|
+
/**
|
|
2350
|
+
* Deletes one or more messages from the thread
|
|
2351
|
+
* @param messageIds - Can be a single message ID (string), array of message IDs,
|
|
2352
|
+
* message object with id property, or array of message objects
|
|
2353
|
+
* @returns Promise containing deletion result
|
|
2354
|
+
*/
|
|
2355
|
+
deleteMessages(messageIds) {
|
|
2356
|
+
const query = new URLSearchParams({
|
|
2357
|
+
networkId: this.networkId
|
|
2358
|
+
});
|
|
2359
|
+
return this.request(`/api/memory/network/messages/delete?${query.toString()}`, {
|
|
2360
|
+
method: "POST",
|
|
2361
|
+
body: { messageIds }
|
|
2362
|
+
});
|
|
2363
|
+
}
|
|
2364
|
+
};
|
|
2365
|
+
|
|
2366
|
+
// src/resources/vNextNetwork.ts
|
|
2367
|
+
var RECORD_SEPARATOR3 = "";
|
|
2368
|
+
var VNextNetwork = class extends BaseResource {
|
|
2369
|
+
constructor(options, networkId) {
|
|
2370
|
+
super(options);
|
|
2371
|
+
this.networkId = networkId;
|
|
2372
|
+
}
|
|
2373
|
+
/**
|
|
2374
|
+
* Retrieves details about the network
|
|
2375
|
+
* @returns Promise containing vNext network details
|
|
2376
|
+
*/
|
|
2377
|
+
details() {
|
|
2378
|
+
return this.request(`/api/networks/v-next/${this.networkId}`);
|
|
2379
|
+
}
|
|
2380
|
+
/**
|
|
2381
|
+
* Generates a response from the v-next network
|
|
2382
|
+
* @param params - Generation parameters including message
|
|
2383
|
+
* @returns Promise containing the generated response
|
|
2384
|
+
*/
|
|
2385
|
+
generate(params) {
|
|
2386
|
+
return this.request(`/api/networks/v-next/${this.networkId}/generate`, {
|
|
2387
|
+
method: "POST",
|
|
2388
|
+
body: {
|
|
2389
|
+
...params,
|
|
2390
|
+
runtimeContext: parseClientRuntimeContext(params.runtimeContext)
|
|
2391
|
+
}
|
|
2392
|
+
});
|
|
2393
|
+
}
|
|
2394
|
+
/**
|
|
2395
|
+
* Generates a response from the v-next network using multiple primitives
|
|
2396
|
+
* @param params - Generation parameters including message
|
|
2397
|
+
* @returns Promise containing the generated response
|
|
2398
|
+
*/
|
|
2399
|
+
loop(params) {
|
|
2400
|
+
return this.request(`/api/networks/v-next/${this.networkId}/loop`, {
|
|
2401
|
+
method: "POST",
|
|
2402
|
+
body: {
|
|
2403
|
+
...params,
|
|
2404
|
+
runtimeContext: parseClientRuntimeContext(params.runtimeContext)
|
|
2405
|
+
}
|
|
2406
|
+
});
|
|
2407
|
+
}
|
|
2408
|
+
async *streamProcessor(stream) {
|
|
2409
|
+
const reader = stream.getReader();
|
|
2410
|
+
let doneReading = false;
|
|
2411
|
+
let buffer = "";
|
|
2412
|
+
try {
|
|
2413
|
+
while (!doneReading) {
|
|
2414
|
+
const { done, value } = await reader.read();
|
|
2415
|
+
doneReading = done;
|
|
2416
|
+
if (done && !value) continue;
|
|
2417
|
+
try {
|
|
2418
|
+
const decoded = value ? new TextDecoder().decode(value) : "";
|
|
2419
|
+
const chunks = (buffer + decoded).split(RECORD_SEPARATOR3);
|
|
2420
|
+
buffer = chunks.pop() || "";
|
|
2421
|
+
for (const chunk of chunks) {
|
|
2422
|
+
if (chunk) {
|
|
2423
|
+
if (typeof chunk === "string") {
|
|
2424
|
+
try {
|
|
2425
|
+
const parsedChunk = JSON.parse(chunk);
|
|
2426
|
+
yield parsedChunk;
|
|
2427
|
+
} catch {
|
|
2428
|
+
}
|
|
2429
|
+
}
|
|
2430
|
+
}
|
|
2431
|
+
}
|
|
2432
|
+
} catch {
|
|
2433
|
+
}
|
|
2434
|
+
}
|
|
2435
|
+
if (buffer) {
|
|
2436
|
+
try {
|
|
2437
|
+
yield JSON.parse(buffer);
|
|
2438
|
+
} catch {
|
|
2439
|
+
}
|
|
2440
|
+
}
|
|
2441
|
+
} finally {
|
|
2442
|
+
reader.cancel().catch(() => {
|
|
2443
|
+
});
|
|
2444
|
+
}
|
|
2445
|
+
}
|
|
2446
|
+
/**
|
|
2447
|
+
* Streams a response from the v-next network
|
|
2448
|
+
* @param params - Stream parameters including message
|
|
2449
|
+
* @returns Promise containing the results
|
|
2450
|
+
*/
|
|
2451
|
+
async stream(params, onRecord) {
|
|
2452
|
+
const response = await this.request(`/api/networks/v-next/${this.networkId}/stream`, {
|
|
2453
|
+
method: "POST",
|
|
2454
|
+
body: {
|
|
2455
|
+
...params,
|
|
2456
|
+
runtimeContext: parseClientRuntimeContext(params.runtimeContext)
|
|
2457
|
+
},
|
|
2458
|
+
stream: true
|
|
2459
|
+
});
|
|
2460
|
+
if (!response.ok) {
|
|
2461
|
+
throw new Error(`Failed to stream vNext network: ${response.statusText}`);
|
|
2462
|
+
}
|
|
2463
|
+
if (!response.body) {
|
|
2464
|
+
throw new Error("Response body is null");
|
|
2465
|
+
}
|
|
2466
|
+
for await (const record of this.streamProcessor(response.body)) {
|
|
2467
|
+
if (typeof record === "string") {
|
|
2468
|
+
onRecord(JSON.parse(record));
|
|
2469
|
+
} else {
|
|
2470
|
+
onRecord(record);
|
|
2471
|
+
}
|
|
2472
|
+
}
|
|
2473
|
+
}
|
|
2474
|
+
/**
|
|
2475
|
+
* Streams a response from the v-next network loop
|
|
2476
|
+
* @param params - Stream parameters including message
|
|
2477
|
+
* @returns Promise containing the results
|
|
2478
|
+
*/
|
|
2479
|
+
async loopStream(params, onRecord) {
|
|
2480
|
+
const response = await this.request(`/api/networks/v-next/${this.networkId}/loop-stream`, {
|
|
2481
|
+
method: "POST",
|
|
2482
|
+
body: {
|
|
2483
|
+
...params,
|
|
2484
|
+
runtimeContext: parseClientRuntimeContext(params.runtimeContext)
|
|
2485
|
+
},
|
|
2486
|
+
stream: true
|
|
2487
|
+
});
|
|
2488
|
+
if (!response.ok) {
|
|
2489
|
+
throw new Error(`Failed to stream vNext network loop: ${response.statusText}`);
|
|
2490
|
+
}
|
|
2491
|
+
if (!response.body) {
|
|
2492
|
+
throw new Error("Response body is null");
|
|
2493
|
+
}
|
|
2494
|
+
for await (const record of this.streamProcessor(response.body)) {
|
|
2495
|
+
if (typeof record === "string") {
|
|
2496
|
+
onRecord(JSON.parse(record));
|
|
2497
|
+
} else {
|
|
2498
|
+
onRecord(record);
|
|
2499
|
+
}
|
|
2500
|
+
}
|
|
2501
|
+
}
|
|
2502
|
+
};
|
|
2503
|
+
|
|
1098
2504
|
// src/client.ts
|
|
1099
2505
|
var MastraClient = class extends BaseResource {
|
|
2506
|
+
observability;
|
|
1100
2507
|
constructor(options) {
|
|
1101
2508
|
super(options);
|
|
2509
|
+
this.observability = new Observability(options);
|
|
1102
2510
|
}
|
|
1103
2511
|
/**
|
|
1104
2512
|
* Retrieves all available agents
|
|
@@ -1172,6 +2580,48 @@ var MastraClient = class extends BaseResource {
|
|
|
1172
2580
|
getMemoryStatus(agentId) {
|
|
1173
2581
|
return this.request(`/api/memory/status?agentId=${agentId}`);
|
|
1174
2582
|
}
|
|
2583
|
+
/**
|
|
2584
|
+
* Retrieves memory threads for a resource
|
|
2585
|
+
* @param params - Parameters containing the resource ID
|
|
2586
|
+
* @returns Promise containing array of memory threads
|
|
2587
|
+
*/
|
|
2588
|
+
getNetworkMemoryThreads(params) {
|
|
2589
|
+
return this.request(`/api/memory/network/threads?resourceid=${params.resourceId}&networkId=${params.networkId}`);
|
|
2590
|
+
}
|
|
2591
|
+
/**
|
|
2592
|
+
* Creates a new memory thread
|
|
2593
|
+
* @param params - Parameters for creating the memory thread
|
|
2594
|
+
* @returns Promise containing the created memory thread
|
|
2595
|
+
*/
|
|
2596
|
+
createNetworkMemoryThread(params) {
|
|
2597
|
+
return this.request(`/api/memory/network/threads?networkId=${params.networkId}`, { method: "POST", body: params });
|
|
2598
|
+
}
|
|
2599
|
+
/**
|
|
2600
|
+
* Gets a memory thread instance by ID
|
|
2601
|
+
* @param threadId - ID of the memory thread to retrieve
|
|
2602
|
+
* @returns MemoryThread instance
|
|
2603
|
+
*/
|
|
2604
|
+
getNetworkMemoryThread(threadId, networkId) {
|
|
2605
|
+
return new NetworkMemoryThread(this.options, threadId, networkId);
|
|
2606
|
+
}
|
|
2607
|
+
/**
|
|
2608
|
+
* Saves messages to memory
|
|
2609
|
+
* @param params - Parameters containing messages to save
|
|
2610
|
+
* @returns Promise containing the saved messages
|
|
2611
|
+
*/
|
|
2612
|
+
saveNetworkMessageToMemory(params) {
|
|
2613
|
+
return this.request(`/api/memory/network/save-messages?networkId=${params.networkId}`, {
|
|
2614
|
+
method: "POST",
|
|
2615
|
+
body: params
|
|
2616
|
+
});
|
|
2617
|
+
}
|
|
2618
|
+
/**
|
|
2619
|
+
* Gets the status of the memory system
|
|
2620
|
+
* @returns Promise containing memory system status
|
|
2621
|
+
*/
|
|
2622
|
+
getNetworkMemoryStatus(networkId) {
|
|
2623
|
+
return this.request(`/api/memory/network/status?networkId=${networkId}`);
|
|
2624
|
+
}
|
|
1175
2625
|
/**
|
|
1176
2626
|
* Retrieves all available tools
|
|
1177
2627
|
* @returns Promise containing map of tool IDs to tool details
|
|
@@ -1187,6 +2637,21 @@ var MastraClient = class extends BaseResource {
|
|
|
1187
2637
|
getTool(toolId) {
|
|
1188
2638
|
return new Tool(this.options, toolId);
|
|
1189
2639
|
}
|
|
2640
|
+
/**
|
|
2641
|
+
* Retrieves all available legacy workflows
|
|
2642
|
+
* @returns Promise containing map of legacy workflow IDs to legacy workflow details
|
|
2643
|
+
*/
|
|
2644
|
+
getLegacyWorkflows() {
|
|
2645
|
+
return this.request("/api/workflows/legacy");
|
|
2646
|
+
}
|
|
2647
|
+
/**
|
|
2648
|
+
* Gets a legacy workflow instance by ID
|
|
2649
|
+
* @param workflowId - ID of the legacy workflow to retrieve
|
|
2650
|
+
* @returns Legacy Workflow instance
|
|
2651
|
+
*/
|
|
2652
|
+
getLegacyWorkflow(workflowId) {
|
|
2653
|
+
return new LegacyWorkflow(this.options, workflowId);
|
|
2654
|
+
}
|
|
1190
2655
|
/**
|
|
1191
2656
|
* Retrieves all available workflows
|
|
1192
2657
|
* @returns Promise containing map of workflow IDs to workflow details
|
|
@@ -1202,21 +2667,6 @@ var MastraClient = class extends BaseResource {
|
|
|
1202
2667
|
getWorkflow(workflowId) {
|
|
1203
2668
|
return new Workflow(this.options, workflowId);
|
|
1204
2669
|
}
|
|
1205
|
-
/**
|
|
1206
|
-
* Retrieves all available vNext workflows
|
|
1207
|
-
* @returns Promise containing map of vNext workflow IDs to vNext workflow details
|
|
1208
|
-
*/
|
|
1209
|
-
getVNextWorkflows() {
|
|
1210
|
-
return this.request("/api/workflows/v-next");
|
|
1211
|
-
}
|
|
1212
|
-
/**
|
|
1213
|
-
* Gets a vNext workflow instance by ID
|
|
1214
|
-
* @param workflowId - ID of the vNext workflow to retrieve
|
|
1215
|
-
* @returns vNext Workflow instance
|
|
1216
|
-
*/
|
|
1217
|
-
getVNextWorkflow(workflowId) {
|
|
1218
|
-
return new VNextWorkflow(this.options, workflowId);
|
|
1219
|
-
}
|
|
1220
2670
|
/**
|
|
1221
2671
|
* Gets a vector instance by name
|
|
1222
2672
|
* @param vectorName - Name of the vector to retrieve
|
|
@@ -1231,7 +2681,41 @@ var MastraClient = class extends BaseResource {
|
|
|
1231
2681
|
* @returns Promise containing array of log messages
|
|
1232
2682
|
*/
|
|
1233
2683
|
getLogs(params) {
|
|
1234
|
-
|
|
2684
|
+
const { transportId, fromDate, toDate, logLevel, filters, page, perPage } = params;
|
|
2685
|
+
const _filters = filters ? Object.entries(filters).map(([key, value]) => `${key}:${value}`) : [];
|
|
2686
|
+
const searchParams = new URLSearchParams();
|
|
2687
|
+
if (transportId) {
|
|
2688
|
+
searchParams.set("transportId", transportId);
|
|
2689
|
+
}
|
|
2690
|
+
if (fromDate) {
|
|
2691
|
+
searchParams.set("fromDate", fromDate.toISOString());
|
|
2692
|
+
}
|
|
2693
|
+
if (toDate) {
|
|
2694
|
+
searchParams.set("toDate", toDate.toISOString());
|
|
2695
|
+
}
|
|
2696
|
+
if (logLevel) {
|
|
2697
|
+
searchParams.set("logLevel", logLevel);
|
|
2698
|
+
}
|
|
2699
|
+
if (page) {
|
|
2700
|
+
searchParams.set("page", String(page));
|
|
2701
|
+
}
|
|
2702
|
+
if (perPage) {
|
|
2703
|
+
searchParams.set("perPage", String(perPage));
|
|
2704
|
+
}
|
|
2705
|
+
if (_filters) {
|
|
2706
|
+
if (Array.isArray(_filters)) {
|
|
2707
|
+
for (const filter of _filters) {
|
|
2708
|
+
searchParams.append("filters", filter);
|
|
2709
|
+
}
|
|
2710
|
+
} else {
|
|
2711
|
+
searchParams.set("filters", _filters);
|
|
2712
|
+
}
|
|
2713
|
+
}
|
|
2714
|
+
if (searchParams.size) {
|
|
2715
|
+
return this.request(`/api/logs?${searchParams}`);
|
|
2716
|
+
} else {
|
|
2717
|
+
return this.request(`/api/logs`);
|
|
2718
|
+
}
|
|
1235
2719
|
}
|
|
1236
2720
|
/**
|
|
1237
2721
|
* Gets logs for a specific run
|
|
@@ -1239,7 +2723,44 @@ var MastraClient = class extends BaseResource {
|
|
|
1239
2723
|
* @returns Promise containing array of log messages
|
|
1240
2724
|
*/
|
|
1241
2725
|
getLogForRun(params) {
|
|
1242
|
-
|
|
2726
|
+
const { runId, transportId, fromDate, toDate, logLevel, filters, page, perPage } = params;
|
|
2727
|
+
const _filters = filters ? Object.entries(filters).map(([key, value]) => `${key}:${value}`) : [];
|
|
2728
|
+
const searchParams = new URLSearchParams();
|
|
2729
|
+
if (runId) {
|
|
2730
|
+
searchParams.set("runId", runId);
|
|
2731
|
+
}
|
|
2732
|
+
if (transportId) {
|
|
2733
|
+
searchParams.set("transportId", transportId);
|
|
2734
|
+
}
|
|
2735
|
+
if (fromDate) {
|
|
2736
|
+
searchParams.set("fromDate", fromDate.toISOString());
|
|
2737
|
+
}
|
|
2738
|
+
if (toDate) {
|
|
2739
|
+
searchParams.set("toDate", toDate.toISOString());
|
|
2740
|
+
}
|
|
2741
|
+
if (logLevel) {
|
|
2742
|
+
searchParams.set("logLevel", logLevel);
|
|
2743
|
+
}
|
|
2744
|
+
if (page) {
|
|
2745
|
+
searchParams.set("page", String(page));
|
|
2746
|
+
}
|
|
2747
|
+
if (perPage) {
|
|
2748
|
+
searchParams.set("perPage", String(perPage));
|
|
2749
|
+
}
|
|
2750
|
+
if (_filters) {
|
|
2751
|
+
if (Array.isArray(_filters)) {
|
|
2752
|
+
for (const filter of _filters) {
|
|
2753
|
+
searchParams.append("filters", filter);
|
|
2754
|
+
}
|
|
2755
|
+
} else {
|
|
2756
|
+
searchParams.set("filters", _filters);
|
|
2757
|
+
}
|
|
2758
|
+
}
|
|
2759
|
+
if (searchParams.size) {
|
|
2760
|
+
return this.request(`/api/logs/${runId}?${searchParams}`);
|
|
2761
|
+
} else {
|
|
2762
|
+
return this.request(`/api/logs/${runId}`);
|
|
2763
|
+
}
|
|
1243
2764
|
}
|
|
1244
2765
|
/**
|
|
1245
2766
|
* List of all log transports
|
|
@@ -1297,6 +2818,13 @@ var MastraClient = class extends BaseResource {
|
|
|
1297
2818
|
getNetworks() {
|
|
1298
2819
|
return this.request("/api/networks");
|
|
1299
2820
|
}
|
|
2821
|
+
/**
|
|
2822
|
+
* Retrieves all available vNext networks
|
|
2823
|
+
* @returns Promise containing map of vNext network IDs to vNext network details
|
|
2824
|
+
*/
|
|
2825
|
+
getVNextNetworks() {
|
|
2826
|
+
return this.request("/api/networks/v-next");
|
|
2827
|
+
}
|
|
1300
2828
|
/**
|
|
1301
2829
|
* Gets a network instance by ID
|
|
1302
2830
|
* @param networkId - ID of the network to retrieve
|
|
@@ -1305,6 +2833,14 @@ var MastraClient = class extends BaseResource {
|
|
|
1305
2833
|
getNetwork(networkId) {
|
|
1306
2834
|
return new Network(this.options, networkId);
|
|
1307
2835
|
}
|
|
2836
|
+
/**
|
|
2837
|
+
* Gets a vNext network instance by ID
|
|
2838
|
+
* @param networkId - ID of the vNext network to retrieve
|
|
2839
|
+
* @returns vNext Network instance
|
|
2840
|
+
*/
|
|
2841
|
+
getVNextNetwork(networkId) {
|
|
2842
|
+
return new VNextNetwork(this.options, networkId);
|
|
2843
|
+
}
|
|
1308
2844
|
/**
|
|
1309
2845
|
* Retrieves a list of available MCP servers.
|
|
1310
2846
|
* @param params - Optional parameters for pagination (limit, offset).
|
|
@@ -1361,6 +2897,134 @@ var MastraClient = class extends BaseResource {
|
|
|
1361
2897
|
getA2A(agentId) {
|
|
1362
2898
|
return new A2A(this.options, agentId);
|
|
1363
2899
|
}
|
|
2900
|
+
/**
|
|
2901
|
+
* Retrieves the working memory for a specific thread (optionally resource-scoped).
|
|
2902
|
+
* @param agentId - ID of the agent.
|
|
2903
|
+
* @param threadId - ID of the thread.
|
|
2904
|
+
* @param resourceId - Optional ID of the resource.
|
|
2905
|
+
* @returns Working memory for the specified thread or resource.
|
|
2906
|
+
*/
|
|
2907
|
+
getWorkingMemory({
|
|
2908
|
+
agentId,
|
|
2909
|
+
threadId,
|
|
2910
|
+
resourceId
|
|
2911
|
+
}) {
|
|
2912
|
+
return this.request(`/api/memory/threads/${threadId}/working-memory?agentId=${agentId}&resourceId=${resourceId}`);
|
|
2913
|
+
}
|
|
2914
|
+
/**
|
|
2915
|
+
* Updates the working memory for a specific thread (optionally resource-scoped).
|
|
2916
|
+
* @param agentId - ID of the agent.
|
|
2917
|
+
* @param threadId - ID of the thread.
|
|
2918
|
+
* @param workingMemory - The new working memory content.
|
|
2919
|
+
* @param resourceId - Optional ID of the resource.
|
|
2920
|
+
*/
|
|
2921
|
+
updateWorkingMemory({
|
|
2922
|
+
agentId,
|
|
2923
|
+
threadId,
|
|
2924
|
+
workingMemory,
|
|
2925
|
+
resourceId
|
|
2926
|
+
}) {
|
|
2927
|
+
return this.request(`/api/memory/threads/${threadId}/working-memory?agentId=${agentId}`, {
|
|
2928
|
+
method: "POST",
|
|
2929
|
+
body: {
|
|
2930
|
+
workingMemory,
|
|
2931
|
+
resourceId
|
|
2932
|
+
}
|
|
2933
|
+
});
|
|
2934
|
+
}
|
|
2935
|
+
/**
|
|
2936
|
+
* Retrieves all available scorers
|
|
2937
|
+
* @returns Promise containing list of available scorers
|
|
2938
|
+
*/
|
|
2939
|
+
getScorers() {
|
|
2940
|
+
return this.request("/api/scores/scorers");
|
|
2941
|
+
}
|
|
2942
|
+
/**
|
|
2943
|
+
* Retrieves a scorer by ID
|
|
2944
|
+
* @param scorerId - ID of the scorer to retrieve
|
|
2945
|
+
* @returns Promise containing the scorer
|
|
2946
|
+
*/
|
|
2947
|
+
getScorer(scorerId) {
|
|
2948
|
+
return this.request(`/api/scores/scorers/${scorerId}`);
|
|
2949
|
+
}
|
|
2950
|
+
getScoresByScorerId(params) {
|
|
2951
|
+
const { page, perPage, scorerId, entityId, entityType } = params;
|
|
2952
|
+
const searchParams = new URLSearchParams();
|
|
2953
|
+
if (entityId) {
|
|
2954
|
+
searchParams.set("entityId", entityId);
|
|
2955
|
+
}
|
|
2956
|
+
if (entityType) {
|
|
2957
|
+
searchParams.set("entityType", entityType);
|
|
2958
|
+
}
|
|
2959
|
+
if (page !== void 0) {
|
|
2960
|
+
searchParams.set("page", String(page));
|
|
2961
|
+
}
|
|
2962
|
+
if (perPage !== void 0) {
|
|
2963
|
+
searchParams.set("perPage", String(perPage));
|
|
2964
|
+
}
|
|
2965
|
+
const queryString = searchParams.toString();
|
|
2966
|
+
return this.request(`/api/scores/scorer/${scorerId}${queryString ? `?${queryString}` : ""}`);
|
|
2967
|
+
}
|
|
2968
|
+
/**
|
|
2969
|
+
* Retrieves scores by run ID
|
|
2970
|
+
* @param params - Parameters containing run ID and pagination options
|
|
2971
|
+
* @returns Promise containing scores and pagination info
|
|
2972
|
+
*/
|
|
2973
|
+
getScoresByRunId(params) {
|
|
2974
|
+
const { runId, page, perPage } = params;
|
|
2975
|
+
const searchParams = new URLSearchParams();
|
|
2976
|
+
if (page !== void 0) {
|
|
2977
|
+
searchParams.set("page", String(page));
|
|
2978
|
+
}
|
|
2979
|
+
if (perPage !== void 0) {
|
|
2980
|
+
searchParams.set("perPage", String(perPage));
|
|
2981
|
+
}
|
|
2982
|
+
const queryString = searchParams.toString();
|
|
2983
|
+
return this.request(`/api/scores/run/${runId}${queryString ? `?${queryString}` : ""}`);
|
|
2984
|
+
}
|
|
2985
|
+
/**
|
|
2986
|
+
* Retrieves scores by entity ID and type
|
|
2987
|
+
* @param params - Parameters containing entity ID, type, and pagination options
|
|
2988
|
+
* @returns Promise containing scores and pagination info
|
|
2989
|
+
*/
|
|
2990
|
+
getScoresByEntityId(params) {
|
|
2991
|
+
const { entityId, entityType, page, perPage } = params;
|
|
2992
|
+
const searchParams = new URLSearchParams();
|
|
2993
|
+
if (page !== void 0) {
|
|
2994
|
+
searchParams.set("page", String(page));
|
|
2995
|
+
}
|
|
2996
|
+
if (perPage !== void 0) {
|
|
2997
|
+
searchParams.set("perPage", String(perPage));
|
|
2998
|
+
}
|
|
2999
|
+
const queryString = searchParams.toString();
|
|
3000
|
+
return this.request(`/api/scores/entity/${entityType}/${entityId}${queryString ? `?${queryString}` : ""}`);
|
|
3001
|
+
}
|
|
3002
|
+
/**
|
|
3003
|
+
* Saves a score
|
|
3004
|
+
* @param params - Parameters containing the score data to save
|
|
3005
|
+
* @returns Promise containing the saved score
|
|
3006
|
+
*/
|
|
3007
|
+
saveScore(params) {
|
|
3008
|
+
return this.request("/api/scores", {
|
|
3009
|
+
method: "POST",
|
|
3010
|
+
body: params
|
|
3011
|
+
});
|
|
3012
|
+
}
|
|
3013
|
+
/**
|
|
3014
|
+
* Retrieves model providers with available keys
|
|
3015
|
+
* @returns Promise containing model providers with available keys
|
|
3016
|
+
*/
|
|
3017
|
+
getModelProviders() {
|
|
3018
|
+
return this.request(`/api/model-providers`);
|
|
3019
|
+
}
|
|
3020
|
+
getAITrace(traceId) {
|
|
3021
|
+
return this.observability.getTrace(traceId);
|
|
3022
|
+
}
|
|
3023
|
+
getAITraces(params) {
|
|
3024
|
+
return this.observability.getTraces(params);
|
|
3025
|
+
}
|
|
1364
3026
|
};
|
|
1365
3027
|
|
|
1366
3028
|
export { MastraClient };
|
|
3029
|
+
//# sourceMappingURL=index.js.map
|
|
3030
|
+
//# sourceMappingURL=index.js.map
|