@mastra/client-js 0.0.0-break-rename-vnext-legacy-20250926163953 → 0.0.0-break-rename-vnext-legacy-20251002212351
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +65 -4
- package/dist/client.d.ts +2 -13
- package/dist/client.d.ts.map +1 -1
- package/dist/index.cjs +173 -180
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +173 -180
- package/dist/index.js.map +1 -1
- package/dist/resources/agent.d.ts +18 -5
- package/dist/resources/agent.d.ts.map +1 -1
- package/dist/resources/observability.d.ts +7 -1
- package/dist/resources/observability.d.ts.map +1 -1
- package/dist/resources/workflow.d.ts +23 -8
- package/dist/resources/workflow.d.ts.map +1 -1
- package/dist/types.d.ts +15 -1
- package/dist/types.d.ts.map +1 -1
- package/package.json +4 -4
- package/dist/resources/vNextNetwork.d.ts +0 -43
- package/dist/resources/vNextNetwork.d.ts.map +0 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,19 +1,80 @@
|
|
|
1
1
|
# @mastra/client-js
|
|
2
2
|
|
|
3
|
-
## 0.0.0-break-rename-vnext-legacy-
|
|
3
|
+
## 0.0.0-break-rename-vnext-legacy-20251002212351
|
|
4
|
+
|
|
5
|
+
### Minor Changes
|
|
6
|
+
|
|
7
|
+
- Breaking change to move the agent.streamVNext/generateVNext implementation to the default stream/generate. The old stream/generate have now been moved to streamLegacy and generateLegacy ([`aef427d`](https://github.com/mastra-ai/mastra/commit/aef427d7e7a414afcf3c561b94289b580f117219))
|
|
8
|
+
|
|
9
|
+
### Patch Changes
|
|
10
|
+
|
|
11
|
+
- Fix an issue preventing showing working memory and semantic recall in the playground ([#8358](https://github.com/mastra-ai/mastra/pull/8358))
|
|
12
|
+
|
|
13
|
+
- Add observe strean to get streans after workflow has been interrupted ([#8318](https://github.com/mastra-ai/mastra/pull/8318))
|
|
14
|
+
|
|
15
|
+
- Updated dependencies [[`00cb6bd`](https://github.com/mastra-ai/mastra/commit/00cb6bdf78737c0fac14a5a0c7b532a11e38558a), [`869ba22`](https://github.com/mastra-ai/mastra/commit/869ba222e1d6b58fc1b65e7c9fd55ca4e01b8c2f), [`1b73665`](https://github.com/mastra-ai/mastra/commit/1b73665e8e23f5c09d49fcf3e7d709c75259259e), [`f7d7475`](https://github.com/mastra-ai/mastra/commit/f7d747507341aef60ed39e4b49318db1f86034a6), [`084b77b`](https://github.com/mastra-ai/mastra/commit/084b77b2955960e0190af8db3f77138aa83ed65c), [`a93ff84`](https://github.com/mastra-ai/mastra/commit/a93ff84b5e1af07ee236ac8873dac9b49aa5d501), [`bc5aacb`](https://github.com/mastra-ai/mastra/commit/bc5aacb646d468d325327e36117129f28cd13bf6), [`6b5af12`](https://github.com/mastra-ai/mastra/commit/6b5af12ce9e09066e0c32e821c203a6954498bea), [`bf60e4a`](https://github.com/mastra-ai/mastra/commit/bf60e4a89c515afd9570b7b79f33b95e7d07c397), [`aef427d`](https://github.com/mastra-ai/mastra/commit/aef427d7e7a414afcf3c561b94289b580f117219), [`e8fe13c`](https://github.com/mastra-ai/mastra/commit/e8fe13c4b4c255a42520127797ec394310f7c919), [`3ca833d`](https://github.com/mastra-ai/mastra/commit/3ca833dc994c38e3c9b4f9b4478a61cd8e07b32a), [`1edb8d1`](https://github.com/mastra-ai/mastra/commit/1edb8d1cfb963e72a12412990fb9170936c9904c), [`fbf6e32`](https://github.com/mastra-ai/mastra/commit/fbf6e324946332d0f5ed8930bf9d4d4479cefd7a), [`4753027`](https://github.com/mastra-ai/mastra/commit/4753027ee889288775c6958bdfeda03ff909af67)]:
|
|
16
|
+
- @mastra/core@0.0.0-break-rename-vnext-legacy-20251002212351
|
|
17
|
+
|
|
18
|
+
## 0.14.1
|
|
19
|
+
|
|
20
|
+
### Patch Changes
|
|
21
|
+
|
|
22
|
+
- Updated dependencies [[`4a70ccc`](https://github.com/mastra-ai/mastra/commit/4a70ccc5cfa12ae9c2b36545a5814cd98e5a0ead), [`0992b8b`](https://github.com/mastra-ai/mastra/commit/0992b8bf0f4f1ba7ad9940883ec4bb8d867d3105), [`283bea0`](https://github.com/mastra-ai/mastra/commit/283bea07adbaf04a27fa3ad2df611095e0825195)]:
|
|
23
|
+
- @mastra/core@0.19.1
|
|
24
|
+
|
|
25
|
+
## 0.14.1-alpha.1
|
|
26
|
+
|
|
27
|
+
### Patch Changes
|
|
28
|
+
|
|
29
|
+
- Updated dependencies [[`4a70ccc`](https://github.com/mastra-ai/mastra/commit/4a70ccc5cfa12ae9c2b36545a5814cd98e5a0ead)]:
|
|
30
|
+
- @mastra/core@0.19.1-alpha.1
|
|
31
|
+
|
|
32
|
+
## 0.14.1-alpha.0
|
|
33
|
+
|
|
34
|
+
### Patch Changes
|
|
35
|
+
|
|
36
|
+
- Updated dependencies [[`0992b8b`](https://github.com/mastra-ai/mastra/commit/0992b8bf0f4f1ba7ad9940883ec4bb8d867d3105), [`283bea0`](https://github.com/mastra-ai/mastra/commit/283bea07adbaf04a27fa3ad2df611095e0825195)]:
|
|
37
|
+
- @mastra/core@0.19.1-alpha.0
|
|
38
|
+
|
|
39
|
+
## 0.14.0
|
|
4
40
|
|
|
5
41
|
### Minor Changes
|
|
6
42
|
|
|
7
43
|
- Remove legacy helpers ([#8017](https://github.com/mastra-ai/mastra/pull/8017))
|
|
8
44
|
|
|
9
|
-
|
|
45
|
+
### Patch Changes
|
|
46
|
+
|
|
47
|
+
- Fix duplicate messages being created using clientTools with client-js sdk ([#8187](https://github.com/mastra-ai/mastra/pull/8187))
|
|
48
|
+
|
|
49
|
+
- Fix hanging stream for non-self executing client-tools ([#8272](https://github.com/mastra-ai/mastra/pull/8272))
|
|
50
|
+
|
|
51
|
+
- Support passing tracing options for start/resume workflows for server APIs and client sdk ([#8277](https://github.com/mastra-ai/mastra/pull/8277))
|
|
52
|
+
|
|
53
|
+
- Support tracing options for workflow streaming endpoints ([#8278](https://github.com/mastra-ai/mastra/pull/8278))
|
|
54
|
+
|
|
55
|
+
- Adjust deprecation warnings ([#8326](https://github.com/mastra-ai/mastra/pull/8326))
|
|
56
|
+
|
|
57
|
+
- Add server apis to get scores by span ([#8237](https://github.com/mastra-ai/mastra/pull/8237))
|
|
58
|
+
|
|
59
|
+
- Updated dependencies [[`dc099b4`](https://github.com/mastra-ai/mastra/commit/dc099b40fb31147ba3f362f98d991892033c4c67), [`504438b`](https://github.com/mastra-ai/mastra/commit/504438b961bde211071186bba63a842c4e3db879), [`b342a68`](https://github.com/mastra-ai/mastra/commit/b342a68e1399cf1ece9ba11bda112db89d21118c), [`a7243e2`](https://github.com/mastra-ai/mastra/commit/a7243e2e58762667a6e3921e755e89d6bb0a3282), [`7fceb0a`](https://github.com/mastra-ai/mastra/commit/7fceb0a327d678e812f90f5387c5bc4f38bd039e), [`303a9c0`](https://github.com/mastra-ai/mastra/commit/303a9c0d7dd58795915979f06a0512359e4532fb), [`df64f9e`](https://github.com/mastra-ai/mastra/commit/df64f9ef814916fff9baedd861c988084e7c41de), [`370f8a6`](https://github.com/mastra-ai/mastra/commit/370f8a6480faec70fef18d72e5f7538f27004301), [`809eea0`](https://github.com/mastra-ai/mastra/commit/809eea092fa80c3f69b9eaf078d843b57fd2a88e), [`683e5a1`](https://github.com/mastra-ai/mastra/commit/683e5a1466e48b686825b2c11f84680f296138e4), [`3679378`](https://github.com/mastra-ai/mastra/commit/3679378673350aa314741dc826f837b1984149bc), [`7775bc2`](https://github.com/mastra-ai/mastra/commit/7775bc20bb1ad1ab24797fb420e4f96c65b0d8ec), [`623ffaf`](https://github.com/mastra-ai/mastra/commit/623ffaf2d969e11e99a0224633cf7b5a0815c857), [`9fc1613`](https://github.com/mastra-ai/mastra/commit/9fc16136400186648880fd990119ac15f7c02ee4), [`61f62aa`](https://github.com/mastra-ai/mastra/commit/61f62aa31bc88fe4ddf8da6240dbcfbeb07358bd), [`db1891a`](https://github.com/mastra-ai/mastra/commit/db1891a4707443720b7cd8a260dc7e1d49b3609c), [`e8f379d`](https://github.com/mastra-ai/mastra/commit/e8f379d390efa264c4e0874f9ac0cf8839b07777), [`652066b`](https://github.com/mastra-ai/mastra/commit/652066bd1efc6bb6813ba950ed1d7573e8b7d9d4), [`3e292ba`](https://github.com/mastra-ai/mastra/commit/3e292ba00837886d5d68a34cbc0d9b703c991883), [`418c136`](https://github.com/mastra-ai/mastra/commit/418c1366843d88e491bca3f87763899ce855ca29), [`ea8d386`](https://github.com/mastra-ai/mastra/commit/ea8d386cd8c5593664515fd5770c06bf2aa980ef), [`67b0f00`](https://github.com/mastra-ai/mastra/commit/67b0f005b520335c71fb85cbaa25df4ce8484a81), [`c2a4919`](https://github.com/mastra-ai/mastra/commit/c2a4919ba6797d8bdb1509e02287496eef69303e), [`c84b7d0`](https://github.com/mastra-ai/mastra/commit/c84b7d093c4657772140cbfd2b15ef72f3315ed5), [`0130986`](https://github.com/mastra-ai/mastra/commit/0130986fc62d0edcc626dd593282661dbb9af141)]:
|
|
60
|
+
- @mastra/core@0.19.0
|
|
61
|
+
|
|
62
|
+
## 0.14.0-alpha.1
|
|
10
63
|
|
|
11
64
|
### Patch Changes
|
|
12
65
|
|
|
13
66
|
- Fix duplicate messages being created using clientTools with client-js sdk ([#8187](https://github.com/mastra-ai/mastra/pull/8187))
|
|
14
67
|
|
|
15
|
-
-
|
|
16
|
-
|
|
68
|
+
- Fix hanging stream for non-self executing client-tools ([#8272](https://github.com/mastra-ai/mastra/pull/8272))
|
|
69
|
+
|
|
70
|
+
- Support passing tracing options for start/resume workflows for server APIs and client sdk ([#8277](https://github.com/mastra-ai/mastra/pull/8277))
|
|
71
|
+
|
|
72
|
+
- Support tracing options for workflow streaming endpoints ([#8278](https://github.com/mastra-ai/mastra/pull/8278))
|
|
73
|
+
|
|
74
|
+
- Add server apis to get scores by span ([#8237](https://github.com/mastra-ai/mastra/pull/8237))
|
|
75
|
+
|
|
76
|
+
- Updated dependencies [[`504438b`](https://github.com/mastra-ai/mastra/commit/504438b961bde211071186bba63a842c4e3db879), [`a7243e2`](https://github.com/mastra-ai/mastra/commit/a7243e2e58762667a6e3921e755e89d6bb0a3282), [`7fceb0a`](https://github.com/mastra-ai/mastra/commit/7fceb0a327d678e812f90f5387c5bc4f38bd039e), [`df64f9e`](https://github.com/mastra-ai/mastra/commit/df64f9ef814916fff9baedd861c988084e7c41de), [`809eea0`](https://github.com/mastra-ai/mastra/commit/809eea092fa80c3f69b9eaf078d843b57fd2a88e), [`683e5a1`](https://github.com/mastra-ai/mastra/commit/683e5a1466e48b686825b2c11f84680f296138e4), [`3679378`](https://github.com/mastra-ai/mastra/commit/3679378673350aa314741dc826f837b1984149bc), [`7775bc2`](https://github.com/mastra-ai/mastra/commit/7775bc20bb1ad1ab24797fb420e4f96c65b0d8ec), [`db1891a`](https://github.com/mastra-ai/mastra/commit/db1891a4707443720b7cd8a260dc7e1d49b3609c), [`e8f379d`](https://github.com/mastra-ai/mastra/commit/e8f379d390efa264c4e0874f9ac0cf8839b07777), [`652066b`](https://github.com/mastra-ai/mastra/commit/652066bd1efc6bb6813ba950ed1d7573e8b7d9d4), [`ea8d386`](https://github.com/mastra-ai/mastra/commit/ea8d386cd8c5593664515fd5770c06bf2aa980ef), [`c2a4919`](https://github.com/mastra-ai/mastra/commit/c2a4919ba6797d8bdb1509e02287496eef69303e), [`0130986`](https://github.com/mastra-ai/mastra/commit/0130986fc62d0edcc626dd593282661dbb9af141)]:
|
|
77
|
+
- @mastra/core@0.19.0-alpha.1
|
|
17
78
|
|
|
18
79
|
## 0.14.0-alpha.0
|
|
19
80
|
|
package/dist/client.d.ts
CHANGED
|
@@ -3,8 +3,7 @@ import type { ServerDetailInfo } from '@mastra/core/mcp';
|
|
|
3
3
|
import type { RuntimeContext } from '@mastra/core/runtime-context';
|
|
4
4
|
import { Agent, MemoryThread, Tool, Workflow, Vector, BaseResource, A2A, MCPTool, AgentBuilder } from './resources/index.js';
|
|
5
5
|
import { NetworkMemoryThread } from './resources/network-memory-thread.js';
|
|
6
|
-
import {
|
|
7
|
-
import type { ClientOptions, CreateMemoryThreadParams, CreateMemoryThreadResponse, GetAgentResponse, GetLogParams, GetLogsParams, GetLogsResponse, GetMemoryThreadParams, GetMemoryThreadResponse, GetTelemetryParams, GetTelemetryResponse, GetToolResponse, GetWorkflowResponse, SaveMessageToMemoryParams, SaveMessageToMemoryResponse, McpServerListResponse, McpServerToolListResponse, GetVNextNetworkResponse, GetNetworkMemoryThreadParams, CreateNetworkMemoryThreadParams, SaveNetworkMessageToMemoryParams, GetScorerResponse, GetScoresByScorerIdParams, GetScoresResponse, GetScoresByRunIdParams, GetScoresByEntityIdParams, SaveScoreParams, SaveScoreResponse, GetAITracesResponse, GetMemoryConfigParams, GetMemoryConfigResponse, GetMemoryThreadMessagesResponse } from './types.js';
|
|
6
|
+
import type { ClientOptions, CreateMemoryThreadParams, CreateMemoryThreadResponse, GetAgentResponse, GetLogParams, GetLogsParams, GetLogsResponse, GetMemoryThreadParams, GetMemoryThreadResponse, GetTelemetryParams, GetTelemetryResponse, GetToolResponse, GetWorkflowResponse, SaveMessageToMemoryParams, SaveMessageToMemoryResponse, McpServerListResponse, McpServerToolListResponse, GetNetworkMemoryThreadParams, CreateNetworkMemoryThreadParams, SaveNetworkMessageToMemoryParams, GetScorerResponse, GetScoresByScorerIdParams, GetScoresResponse, GetScoresByRunIdParams, GetScoresByEntityIdParams, GetScoresBySpanParams, SaveScoreParams, SaveScoreResponse, GetAITracesResponse, GetMemoryConfigParams, GetMemoryConfigResponse, GetMemoryThreadMessagesResponse } from './types.js';
|
|
8
7
|
export declare class MastraClient extends BaseResource {
|
|
9
8
|
private observability;
|
|
10
9
|
constructor(options: ClientOptions);
|
|
@@ -164,17 +163,6 @@ export declare class MastraClient extends BaseResource {
|
|
|
164
163
|
* @returns Promise containing telemetry data
|
|
165
164
|
*/
|
|
166
165
|
getTelemetry(params?: GetTelemetryParams): Promise<GetTelemetryResponse>;
|
|
167
|
-
/**
|
|
168
|
-
* Retrieves all available vNext networks
|
|
169
|
-
* @returns Promise containing map of vNext network IDs to vNext network details
|
|
170
|
-
*/
|
|
171
|
-
getVNextNetworks(): Promise<Array<GetVNextNetworkResponse>>;
|
|
172
|
-
/**
|
|
173
|
-
* Gets a vNext network instance by ID
|
|
174
|
-
* @param networkId - ID of the vNext network to retrieve
|
|
175
|
-
* @returns vNext Network instance
|
|
176
|
-
*/
|
|
177
|
-
getVNextNetwork(networkId: string): VNextNetwork;
|
|
178
166
|
/**
|
|
179
167
|
* Retrieves a list of available MCP servers.
|
|
180
168
|
* @param params - Optional parameters for pagination (limit, offset).
|
|
@@ -275,6 +263,7 @@ export declare class MastraClient extends BaseResource {
|
|
|
275
263
|
getModelProviders(): Promise<string[]>;
|
|
276
264
|
getAITrace(traceId: string): Promise<AITraceRecord>;
|
|
277
265
|
getAITraces(params: AITracesPaginatedArg): Promise<GetAITracesResponse>;
|
|
266
|
+
getScoresBySpan(params: GetScoresBySpanParams): Promise<GetScoresResponse>;
|
|
278
267
|
score(params: {
|
|
279
268
|
scorerName: string;
|
|
280
269
|
targets: Array<{
|
package/dist/client.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"client.d.ts","sourceRoot":"","sources":["../src/client.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,aAAa,EAAE,oBAAoB,EAAE,YAAY,EAAE,MAAM,cAAc,CAAC;AACtF,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,kBAAkB,CAAC;AACzD,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,8BAA8B,CAAC;AACnE,OAAO,EACL,KAAK,EACL,YAAY,EACZ,IAAI,EACJ,QAAQ,EACR,MAAM,EACN,YAAY,EACZ,GAAG,EACH,OAAO,EACP,YAAY,EAEb,MAAM,aAAa,CAAC;AACrB,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AACxE,OAAO,
|
|
1
|
+
{"version":3,"file":"client.d.ts","sourceRoot":"","sources":["../src/client.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,aAAa,EAAE,oBAAoB,EAAE,YAAY,EAAE,MAAM,cAAc,CAAC;AACtF,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,kBAAkB,CAAC;AACzD,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,8BAA8B,CAAC;AACnE,OAAO,EACL,KAAK,EACL,YAAY,EACZ,IAAI,EACJ,QAAQ,EACR,MAAM,EACN,YAAY,EACZ,GAAG,EACH,OAAO,EACP,YAAY,EAEb,MAAM,aAAa,CAAC;AACrB,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AACxE,OAAO,KAAK,EACV,aAAa,EACb,wBAAwB,EACxB,0BAA0B,EAC1B,gBAAgB,EAChB,YAAY,EACZ,aAAa,EACb,eAAe,EACf,qBAAqB,EACrB,uBAAuB,EACvB,kBAAkB,EAClB,oBAAoB,EACpB,eAAe,EACf,mBAAmB,EACnB,yBAAyB,EACzB,2BAA2B,EAC3B,qBAAqB,EACrB,yBAAyB,EACzB,4BAA4B,EAC5B,+BAA+B,EAC/B,gCAAgC,EAChC,iBAAiB,EACjB,yBAAyB,EACzB,iBAAiB,EACjB,sBAAsB,EACtB,yBAAyB,EACzB,qBAAqB,EACrB,eAAe,EACf,iBAAiB,EACjB,mBAAmB,EACnB,qBAAqB,EACrB,uBAAuB,EACvB,+BAA+B,EAChC,MAAM,SAAS,CAAC;AAGjB,qBAAa,YAAa,SAAQ,YAAY;IAC5C,OAAO,CAAC,aAAa,CAAgB;gBACzB,OAAO,EAAE,aAAa;IAKlC;;;;OAIG;IACI,SAAS,CAAC,cAAc,CAAC,EAAE,cAAc,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,gBAAgB,CAAC,CAAC;IAalH;;;;OAIG;IACI,QAAQ,CAAC,OAAO,EAAE,MAAM;IAI/B;;;;OAIG;IACI,gBAAgB,CAAC,MAAM,EAAE,qBAAqB,GAAG,OAAO,CAAC,uBAAuB,CAAC;IAIxF;;;;OAIG;IACI,eAAe,CAAC,MAAM,EAAE,qBAAqB,GAAG,OAAO,CAAC,uBAAuB,CAAC;IAIvF;;;;OAIG;IACI,kBAAkB,CAAC,MAAM,EAAE,wBAAwB,GAAG,OAAO,CAAC,0BAA0B,CAAC;IAIhG;;;;OAIG;IACI,eAAe,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM;IAIjD,iBAAiB,CACtB,QAAQ,EAAE,MAAM,EAChB,IAAI,GAAE;QAAE,OAAO,CAAC,EAAE,MAAM,CAAC;QAAC,SAAS,CAAC,EAAE,MAAM,CAAA;KAAO,GAClD,OAAO,CAAC,+BAA+B,CAAC;IAUpC,YAAY,CACjB,QAAQ,EAAE,MAAM,EAChB,IAAI,GAAE;QAAE,OAAO,CAAC,EAAE,MAAM,CAAC;QAAC,SAAS,CAAC,EAAE,MAAM,CAAA;KAAO,GAClD,OAAO,CAAC;QAAE,OAAO,EAAE,OAAO,CAAC;QAAC,OAAO,EAAE,MAAM,CAAA;KAAE,CAAC;IAWjD;;;;OAIG;IACI,mBAAmB,CAAC,MAAM,EAAE,yBAAyB,GAAG,OAAO,CAAC,2BAA2B,CAAC;IAOnG;;;OAGG;IACI,eAAe,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC;QAAE,MAAM,EAAE,OAAO,CAAA;KAAE,CAAC;IAIrE;;;;OAIG;IACI,uBAAuB,CAAC,MAAM,EAAE,4BAA4B,GAAG,OAAO,CAAC,uBAAuB,CAAC;IAItG;;;;OAIG;IACI,yBAAyB,CAAC,MAAM,EAAE,+BAA+B,GAAG,OAAO,CAAC,0BAA0B,CAAC;IAI9G;;;;OAIG;IACI,sBAAsB,CAAC,QAAQ,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM;IAIjE;;;;OAIG;IACI,0BAA0B,CAAC,MAAM,EAAE,gCAAgC,GAAG,OAAO,CAAC,2BAA2B,CAAC;IAOjH;;;OAGG;IACI,sBAAsB,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC;QAAE,MAAM,EAAE,OAAO,CAAA;KAAE,CAAC;IAI9E;;;;OAIG;IACI,QAAQ,CAAC,cAAc,CAAC,EAAE,cAAc,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;IAahH;;;;OAIG;IACI,OAAO,CAAC,MAAM,EAAE,MAAM;IAI7B;;;;OAIG;IACI,YAAY,CACjB,cAAc,CAAC,EAAE,cAAc,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GACpD,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,mBAAmB,CAAC,CAAC;IAa/C;;;;OAIG;IACI,WAAW,CAAC,UAAU,EAAE,MAAM;IAIrC;;;OAGG;IACI,sBAAsB,IAAI,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;IAItE;;;OAGG;IACI,qBAAqB,CAAC,QAAQ,EAAE,MAAM;IAI7C;;;;OAIG;IACI,SAAS,CAAC,UAAU,EAAE,MAAM;IAInC;;;;OAIG;IACI,OAAO,CAAC,MAAM,EAAE,aAAa,GAAG,OAAO,CAAC,eAAe,CAAC;IAwC/D;;;;OAIG;IACI,YAAY,CAAC,MAAM,EAAE,YAAY,GAAG,OAAO,CAAC,eAAe,CAAC;IA4CnE;;;OAGG;IACI,gBAAgB,IAAI,OAAO,CAAC;QAAE,UAAU,EAAE,MAAM,EAAE,CAAA;KAAE,CAAC;IAI5D;;;;OAIG;IACI,YAAY,CAAC,MAAM,CAAC,EAAE,kBAAkB,GAAG,OAAO,CAAC,oBAAoB,CAAC;IAwC/E;;;;OAIG;IACI,aAAa,CAAC,MAAM,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAC;QAAC,MAAM,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAYlG;;;;;OAKG;IACI,mBAAmB,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,gBAAgB,CAAC;IAStG;;;;OAIG;IACI,iBAAiB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,yBAAyB,CAAC;IAI9E;;;;;;OAMG;IACI,gBAAgB,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO;IAIlE;;;;OAIG;IACI,MAAM,CAAC,OAAO,EAAE,MAAM;IAI7B;;;;;;OAMG;IACI,gBAAgB,CAAC,EACtB,OAAO,EACP,QAAQ,EACR,UAAU,GACX,EAAE;QACD,OAAO,EAAE,MAAM,CAAC;QAChB,QAAQ,EAAE,MAAM,CAAC;QACjB,UAAU,CAAC,EAAE,MAAM,CAAC;KACrB;IAID;;;;;;OAMG;IACI,mBAAmB,CAAC,EACzB,OAAO,EACP,QAAQ,EACR,aAAa,EACb,UAAU,GACX,EAAE;QACD,OAAO,EAAE,MAAM,CAAC;QAChB,QAAQ,EAAE,MAAM,CAAC;QACjB,aAAa,EAAE,MAAM,CAAC;QACtB,UAAU,CAAC,EAAE,MAAM,CAAC;KACrB;IAUD;;;OAGG;IACI,UAAU,IAAI,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,iBAAiB,CAAC,CAAC;IAI/D;;;;OAIG;IACI,SAAS,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,iBAAiB,CAAC;IAIvD,mBAAmB,CAAC,MAAM,EAAE,yBAAyB,GAAG,OAAO,CAAC,iBAAiB,CAAC;IAqBzF;;;;OAIG;IACI,gBAAgB,CAAC,MAAM,EAAE,sBAAsB,GAAG,OAAO,CAAC,iBAAiB,CAAC;IAenF;;;;OAIG;IACI,mBAAmB,CAAC,MAAM,EAAE,yBAAyB,GAAG,OAAO,CAAC,iBAAiB,CAAC;IAiBzF;;;;OAIG;IACI,SAAS,CAAC,MAAM,EAAE,eAAe,GAAG,OAAO,CAAC,iBAAiB,CAAC;IAOrE;;;OAGG;IACH,iBAAiB,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;IAItC,UAAU,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,aAAa,CAAC;IAInD,WAAW,CAAC,MAAM,EAAE,oBAAoB,GAAG,OAAO,CAAC,mBAAmB,CAAC;IAIvE,eAAe,CAAC,MAAM,EAAE,qBAAqB,GAAG,OAAO,CAAC,iBAAiB,CAAC;IAI1E,KAAK,CAAC,MAAM,EAAE;QACZ,UAAU,EAAE,MAAM,CAAC;QACnB,OAAO,EAAE,KAAK,CAAC;YAAE,OAAO,EAAE,MAAM,CAAC;YAAC,MAAM,CAAC,EAAE,MAAM,CAAA;SAAE,CAAC,CAAC;KACtD,GAAG,OAAO,CAAC;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,OAAO,EAAE,MAAM,CAAA;KAAE,CAAC;CAGjD"}
|
package/dist/index.cjs
CHANGED
|
@@ -689,11 +689,6 @@ var Agent = class extends BaseResource {
|
|
|
689
689
|
});
|
|
690
690
|
onFinish?.({ message, finishReason, usage });
|
|
691
691
|
}
|
|
692
|
-
/**
|
|
693
|
-
* Streams a response from the agent
|
|
694
|
-
* @param params - Stream parameters including prompt
|
|
695
|
-
* @returns Promise containing the enhanced Response object with processDataStream method
|
|
696
|
-
*/
|
|
697
692
|
/**
|
|
698
693
|
* Streams a response from the agent
|
|
699
694
|
* @param params - Stream parameters including prompt
|
|
@@ -1028,9 +1023,11 @@ var Agent = class extends BaseResource {
|
|
|
1028
1023
|
if (toolCall) {
|
|
1029
1024
|
toolCalls.push(toolCall);
|
|
1030
1025
|
}
|
|
1026
|
+
let shouldExecuteClientTool = false;
|
|
1031
1027
|
for (const toolCall2 of toolCalls) {
|
|
1032
1028
|
const clientTool = processedParams.clientTools?.[toolCall2.toolName];
|
|
1033
1029
|
if (clientTool && clientTool.execute) {
|
|
1030
|
+
shouldExecuteClientTool = true;
|
|
1034
1031
|
const result = await clientTool.execute(
|
|
1035
1032
|
{
|
|
1036
1033
|
context: toolCall2?.args,
|
|
@@ -1079,6 +1076,11 @@ var Agent = class extends BaseResource {
|
|
|
1079
1076
|
});
|
|
1080
1077
|
}
|
|
1081
1078
|
}
|
|
1079
|
+
if (!shouldExecuteClientTool) {
|
|
1080
|
+
setTimeout(() => {
|
|
1081
|
+
writable.close();
|
|
1082
|
+
}, 0);
|
|
1083
|
+
}
|
|
1082
1084
|
} else {
|
|
1083
1085
|
setTimeout(() => {
|
|
1084
1086
|
writable.close();
|
|
@@ -1343,6 +1345,12 @@ var Agent = class extends BaseResource {
|
|
|
1343
1345
|
body: params
|
|
1344
1346
|
});
|
|
1345
1347
|
}
|
|
1348
|
+
async generateVNext(_messagesOrParams, _options) {
|
|
1349
|
+
throw new Error("generateVNext has been renamed to generate. Please use generate instead.");
|
|
1350
|
+
}
|
|
1351
|
+
async streamVNext(_messagesOrParams, _options) {
|
|
1352
|
+
throw new Error("streamVNext has been renamed to stream. Please use stream instead.");
|
|
1353
|
+
}
|
|
1346
1354
|
};
|
|
1347
1355
|
|
|
1348
1356
|
// src/resources/memory-thread.ts
|
|
@@ -1691,10 +1699,20 @@ var Workflow = class extends BaseResource {
|
|
|
1691
1699
|
return {
|
|
1692
1700
|
runId,
|
|
1693
1701
|
start: async (p) => {
|
|
1694
|
-
return this.start({
|
|
1702
|
+
return this.start({
|
|
1703
|
+
runId,
|
|
1704
|
+
inputData: p.inputData,
|
|
1705
|
+
runtimeContext: p.runtimeContext,
|
|
1706
|
+
tracingOptions: p.tracingOptions
|
|
1707
|
+
});
|
|
1695
1708
|
},
|
|
1696
1709
|
startAsync: async (p) => {
|
|
1697
|
-
return this.startAsync({
|
|
1710
|
+
return this.startAsync({
|
|
1711
|
+
runId,
|
|
1712
|
+
inputData: p.inputData,
|
|
1713
|
+
runtimeContext: p.runtimeContext,
|
|
1714
|
+
tracingOptions: p.tracingOptions
|
|
1715
|
+
});
|
|
1698
1716
|
},
|
|
1699
1717
|
watch: async (onRecord) => {
|
|
1700
1718
|
return this.watch({ runId }, onRecord);
|
|
@@ -1703,10 +1721,22 @@ var Workflow = class extends BaseResource {
|
|
|
1703
1721
|
return this.stream({ runId, inputData: p.inputData, runtimeContext: p.runtimeContext });
|
|
1704
1722
|
},
|
|
1705
1723
|
resume: async (p) => {
|
|
1706
|
-
return this.resume({
|
|
1724
|
+
return this.resume({
|
|
1725
|
+
runId,
|
|
1726
|
+
step: p.step,
|
|
1727
|
+
resumeData: p.resumeData,
|
|
1728
|
+
runtimeContext: p.runtimeContext,
|
|
1729
|
+
tracingOptions: p.tracingOptions
|
|
1730
|
+
});
|
|
1707
1731
|
},
|
|
1708
1732
|
resumeAsync: async (p) => {
|
|
1709
|
-
return this.resumeAsync({
|
|
1733
|
+
return this.resumeAsync({
|
|
1734
|
+
runId,
|
|
1735
|
+
step: p.step,
|
|
1736
|
+
resumeData: p.resumeData,
|
|
1737
|
+
runtimeContext: p.runtimeContext,
|
|
1738
|
+
tracingOptions: p.tracingOptions
|
|
1739
|
+
});
|
|
1710
1740
|
},
|
|
1711
1741
|
resumeStreamVNext: async (p) => {
|
|
1712
1742
|
return this.resumeStreamVNext({
|
|
@@ -1727,7 +1757,7 @@ var Workflow = class extends BaseResource {
|
|
|
1727
1757
|
const runtimeContext = parseClientRuntimeContext(params.runtimeContext);
|
|
1728
1758
|
return this.request(`/api/workflows/${this.workflowId}/start?runId=${params.runId}`, {
|
|
1729
1759
|
method: "POST",
|
|
1730
|
-
body: { inputData: params?.inputData, runtimeContext }
|
|
1760
|
+
body: { inputData: params?.inputData, runtimeContext, tracingOptions: params.tracingOptions }
|
|
1731
1761
|
});
|
|
1732
1762
|
}
|
|
1733
1763
|
/**
|
|
@@ -1739,6 +1769,7 @@ var Workflow = class extends BaseResource {
|
|
|
1739
1769
|
step,
|
|
1740
1770
|
runId,
|
|
1741
1771
|
resumeData,
|
|
1772
|
+
tracingOptions,
|
|
1742
1773
|
...rest
|
|
1743
1774
|
}) {
|
|
1744
1775
|
const runtimeContext = parseClientRuntimeContext(rest.runtimeContext);
|
|
@@ -1747,7 +1778,8 @@ var Workflow = class extends BaseResource {
|
|
|
1747
1778
|
body: {
|
|
1748
1779
|
step,
|
|
1749
1780
|
resumeData,
|
|
1750
|
-
runtimeContext
|
|
1781
|
+
runtimeContext,
|
|
1782
|
+
tracingOptions
|
|
1751
1783
|
}
|
|
1752
1784
|
});
|
|
1753
1785
|
}
|
|
@@ -1764,7 +1796,7 @@ var Workflow = class extends BaseResource {
|
|
|
1764
1796
|
const runtimeContext = parseClientRuntimeContext(params.runtimeContext);
|
|
1765
1797
|
return this.request(`/api/workflows/${this.workflowId}/start-async?${searchParams.toString()}`, {
|
|
1766
1798
|
method: "POST",
|
|
1767
|
-
body: { inputData: params.inputData, runtimeContext }
|
|
1799
|
+
body: { inputData: params.inputData, runtimeContext, tracingOptions: params.tracingOptions }
|
|
1768
1800
|
});
|
|
1769
1801
|
}
|
|
1770
1802
|
/**
|
|
@@ -1782,7 +1814,7 @@ var Workflow = class extends BaseResource {
|
|
|
1782
1814
|
`/api/workflows/${this.workflowId}/stream?${searchParams.toString()}`,
|
|
1783
1815
|
{
|
|
1784
1816
|
method: "POST",
|
|
1785
|
-
body: { inputData: params.inputData, runtimeContext },
|
|
1817
|
+
body: { inputData: params.inputData, runtimeContext, tracingOptions: params.tracingOptions },
|
|
1786
1818
|
stream: true
|
|
1787
1819
|
}
|
|
1788
1820
|
);
|
|
@@ -1880,7 +1912,12 @@ var Workflow = class extends BaseResource {
|
|
|
1880
1912
|
`/api/workflows/${this.workflowId}/streamVNext?${searchParams.toString()}`,
|
|
1881
1913
|
{
|
|
1882
1914
|
method: "POST",
|
|
1883
|
-
body: {
|
|
1915
|
+
body: {
|
|
1916
|
+
inputData: params.inputData,
|
|
1917
|
+
runtimeContext,
|
|
1918
|
+
closeOnSuspend: params.closeOnSuspend,
|
|
1919
|
+
tracingOptions: params.tracingOptions
|
|
1920
|
+
},
|
|
1884
1921
|
stream: true
|
|
1885
1922
|
}
|
|
1886
1923
|
);
|
|
@@ -1916,6 +1953,53 @@ var Workflow = class extends BaseResource {
|
|
|
1916
1953
|
});
|
|
1917
1954
|
return response.body.pipeThrough(transformStream);
|
|
1918
1955
|
}
|
|
1956
|
+
/**
|
|
1957
|
+
* Observes workflow vNext stream for a workflow run
|
|
1958
|
+
* @param params - Object containing the runId
|
|
1959
|
+
* @returns Promise containing the workflow execution results
|
|
1960
|
+
*/
|
|
1961
|
+
async observeStreamVNext(params) {
|
|
1962
|
+
const searchParams = new URLSearchParams();
|
|
1963
|
+
searchParams.set("runId", params.runId);
|
|
1964
|
+
const response = await this.request(
|
|
1965
|
+
`/api/workflows/${this.workflowId}/observe-streamVNext?${searchParams.toString()}`,
|
|
1966
|
+
{
|
|
1967
|
+
method: "POST",
|
|
1968
|
+
stream: true
|
|
1969
|
+
}
|
|
1970
|
+
);
|
|
1971
|
+
if (!response.ok) {
|
|
1972
|
+
throw new Error(`Failed to observe stream vNext workflow: ${response.statusText}`);
|
|
1973
|
+
}
|
|
1974
|
+
if (!response.body) {
|
|
1975
|
+
throw new Error("Response body is null");
|
|
1976
|
+
}
|
|
1977
|
+
let failedChunk = void 0;
|
|
1978
|
+
const transformStream = new TransformStream({
|
|
1979
|
+
start() {
|
|
1980
|
+
},
|
|
1981
|
+
async transform(chunk, controller) {
|
|
1982
|
+
try {
|
|
1983
|
+
const decoded = new TextDecoder().decode(chunk);
|
|
1984
|
+
const chunks = decoded.split(RECORD_SEPARATOR);
|
|
1985
|
+
for (const chunk2 of chunks) {
|
|
1986
|
+
if (chunk2) {
|
|
1987
|
+
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
1988
|
+
try {
|
|
1989
|
+
const parsedChunk = JSON.parse(newChunk);
|
|
1990
|
+
controller.enqueue(parsedChunk);
|
|
1991
|
+
failedChunk = void 0;
|
|
1992
|
+
} catch {
|
|
1993
|
+
failedChunk = newChunk;
|
|
1994
|
+
}
|
|
1995
|
+
}
|
|
1996
|
+
}
|
|
1997
|
+
} catch {
|
|
1998
|
+
}
|
|
1999
|
+
}
|
|
2000
|
+
});
|
|
2001
|
+
return response.body.pipeThrough(transformStream);
|
|
2002
|
+
}
|
|
1919
2003
|
/**
|
|
1920
2004
|
* Resumes a suspended workflow step asynchronously and returns a promise that resolves when the workflow is complete
|
|
1921
2005
|
* @param params - Object containing the runId, step, resumeData and runtimeContext
|
|
@@ -1928,7 +2012,8 @@ var Workflow = class extends BaseResource {
|
|
|
1928
2012
|
body: {
|
|
1929
2013
|
step: params.step,
|
|
1930
2014
|
resumeData: params.resumeData,
|
|
1931
|
-
runtimeContext
|
|
2015
|
+
runtimeContext,
|
|
2016
|
+
tracingOptions: params.tracingOptions
|
|
1932
2017
|
}
|
|
1933
2018
|
});
|
|
1934
2019
|
}
|
|
@@ -1937,16 +2022,54 @@ var Workflow = class extends BaseResource {
|
|
|
1937
2022
|
* @param params - Object containing the runId, step, resumeData and runtimeContext
|
|
1938
2023
|
* @returns Promise containing the workflow resume results
|
|
1939
2024
|
*/
|
|
1940
|
-
resumeStreamVNext(params) {
|
|
2025
|
+
async resumeStreamVNext(params) {
|
|
2026
|
+
const searchParams = new URLSearchParams();
|
|
2027
|
+
searchParams.set("runId", params.runId);
|
|
1941
2028
|
const runtimeContext = parseClientRuntimeContext(params.runtimeContext);
|
|
1942
|
-
|
|
1943
|
-
|
|
1944
|
-
|
|
1945
|
-
|
|
1946
|
-
|
|
1947
|
-
|
|
2029
|
+
const response = await this.request(
|
|
2030
|
+
`/api/workflows/${this.workflowId}/resume-stream?${searchParams.toString()}`,
|
|
2031
|
+
{
|
|
2032
|
+
method: "POST",
|
|
2033
|
+
body: {
|
|
2034
|
+
step: params.step,
|
|
2035
|
+
resumeData: params.resumeData,
|
|
2036
|
+
runtimeContext,
|
|
2037
|
+
tracingOptions: params.tracingOptions
|
|
2038
|
+
},
|
|
2039
|
+
stream: true
|
|
2040
|
+
}
|
|
2041
|
+
);
|
|
2042
|
+
if (!response.ok) {
|
|
2043
|
+
throw new Error(`Failed to stream vNext workflow: ${response.statusText}`);
|
|
2044
|
+
}
|
|
2045
|
+
if (!response.body) {
|
|
2046
|
+
throw new Error("Response body is null");
|
|
2047
|
+
}
|
|
2048
|
+
let failedChunk = void 0;
|
|
2049
|
+
const transformStream = new TransformStream({
|
|
2050
|
+
start() {
|
|
2051
|
+
},
|
|
2052
|
+
async transform(chunk, controller) {
|
|
2053
|
+
try {
|
|
2054
|
+
const decoded = new TextDecoder().decode(chunk);
|
|
2055
|
+
const chunks = decoded.split(RECORD_SEPARATOR);
|
|
2056
|
+
for (const chunk2 of chunks) {
|
|
2057
|
+
if (chunk2) {
|
|
2058
|
+
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
2059
|
+
try {
|
|
2060
|
+
const parsedChunk = JSON.parse(newChunk);
|
|
2061
|
+
controller.enqueue(parsedChunk);
|
|
2062
|
+
failedChunk = void 0;
|
|
2063
|
+
} catch {
|
|
2064
|
+
failedChunk = newChunk;
|
|
2065
|
+
}
|
|
2066
|
+
}
|
|
2067
|
+
}
|
|
2068
|
+
} catch {
|
|
2069
|
+
}
|
|
1948
2070
|
}
|
|
1949
2071
|
});
|
|
2072
|
+
return response.body.pipeThrough(transformStream);
|
|
1950
2073
|
}
|
|
1951
2074
|
/**
|
|
1952
2075
|
* Watches workflow transitions in real-time
|
|
@@ -2524,6 +2647,25 @@ var Observability = class extends BaseResource {
|
|
|
2524
2647
|
const queryString = searchParams.toString();
|
|
2525
2648
|
return this.request(`/api/observability/traces${queryString ? `?${queryString}` : ""}`);
|
|
2526
2649
|
}
|
|
2650
|
+
/**
|
|
2651
|
+
* Retrieves scores by trace ID and span ID
|
|
2652
|
+
* @param params - Parameters containing trace ID, span ID, and pagination options
|
|
2653
|
+
* @returns Promise containing scores and pagination info
|
|
2654
|
+
*/
|
|
2655
|
+
getScoresBySpan(params) {
|
|
2656
|
+
const { traceId, spanId, page, perPage } = params;
|
|
2657
|
+
const searchParams = new URLSearchParams();
|
|
2658
|
+
if (page !== void 0) {
|
|
2659
|
+
searchParams.set("page", String(page));
|
|
2660
|
+
}
|
|
2661
|
+
if (perPage !== void 0) {
|
|
2662
|
+
searchParams.set("perPage", String(perPage));
|
|
2663
|
+
}
|
|
2664
|
+
const queryString = searchParams.toString();
|
|
2665
|
+
return this.request(
|
|
2666
|
+
`/api/observability/traces/${encodeURIComponent(traceId)}/${encodeURIComponent(spanId)}/scores${queryString ? `?${queryString}` : ""}`
|
|
2667
|
+
);
|
|
2668
|
+
}
|
|
2527
2669
|
score(params) {
|
|
2528
2670
|
return this.request(`/api/observability/traces/score`, {
|
|
2529
2671
|
method: "POST",
|
|
@@ -2595,145 +2737,6 @@ var NetworkMemoryThread = class extends BaseResource {
|
|
|
2595
2737
|
}
|
|
2596
2738
|
};
|
|
2597
2739
|
|
|
2598
|
-
// src/resources/vNextNetwork.ts
|
|
2599
|
-
var RECORD_SEPARATOR3 = "";
|
|
2600
|
-
var VNextNetwork = class extends BaseResource {
|
|
2601
|
-
constructor(options, networkId) {
|
|
2602
|
-
super(options);
|
|
2603
|
-
this.networkId = networkId;
|
|
2604
|
-
}
|
|
2605
|
-
/**
|
|
2606
|
-
* Retrieves details about the network
|
|
2607
|
-
* @param runtimeContext - Optional runtime context to pass as query parameter
|
|
2608
|
-
* @returns Promise containing vNext network details
|
|
2609
|
-
*/
|
|
2610
|
-
details(runtimeContext) {
|
|
2611
|
-
return this.request(`/api/networks/v-next/${this.networkId}${runtimeContextQueryString(runtimeContext)}`);
|
|
2612
|
-
}
|
|
2613
|
-
/**
|
|
2614
|
-
* Generates a response from the v-next network
|
|
2615
|
-
* @param params - Generation parameters including message
|
|
2616
|
-
* @returns Promise containing the generated response
|
|
2617
|
-
*/
|
|
2618
|
-
generate(params) {
|
|
2619
|
-
return this.request(`/api/networks/v-next/${this.networkId}/generate`, {
|
|
2620
|
-
method: "POST",
|
|
2621
|
-
body: {
|
|
2622
|
-
...params,
|
|
2623
|
-
runtimeContext: parseClientRuntimeContext(params.runtimeContext)
|
|
2624
|
-
}
|
|
2625
|
-
});
|
|
2626
|
-
}
|
|
2627
|
-
/**
|
|
2628
|
-
* Generates a response from the v-next network using multiple primitives
|
|
2629
|
-
* @param params - Generation parameters including message
|
|
2630
|
-
* @returns Promise containing the generated response
|
|
2631
|
-
*/
|
|
2632
|
-
loop(params) {
|
|
2633
|
-
return this.request(`/api/networks/v-next/${this.networkId}/loop`, {
|
|
2634
|
-
method: "POST",
|
|
2635
|
-
body: {
|
|
2636
|
-
...params,
|
|
2637
|
-
runtimeContext: parseClientRuntimeContext(params.runtimeContext)
|
|
2638
|
-
}
|
|
2639
|
-
});
|
|
2640
|
-
}
|
|
2641
|
-
async *streamProcessor(stream) {
|
|
2642
|
-
const reader = stream.getReader();
|
|
2643
|
-
let doneReading = false;
|
|
2644
|
-
let buffer = "";
|
|
2645
|
-
try {
|
|
2646
|
-
while (!doneReading) {
|
|
2647
|
-
const { done, value } = await reader.read();
|
|
2648
|
-
doneReading = done;
|
|
2649
|
-
if (done && !value) continue;
|
|
2650
|
-
try {
|
|
2651
|
-
const decoded = value ? new TextDecoder().decode(value) : "";
|
|
2652
|
-
const chunks = (buffer + decoded).split(RECORD_SEPARATOR3);
|
|
2653
|
-
buffer = chunks.pop() || "";
|
|
2654
|
-
for (const chunk of chunks) {
|
|
2655
|
-
if (chunk) {
|
|
2656
|
-
if (typeof chunk === "string") {
|
|
2657
|
-
try {
|
|
2658
|
-
const parsedChunk = JSON.parse(chunk);
|
|
2659
|
-
yield parsedChunk;
|
|
2660
|
-
} catch {
|
|
2661
|
-
}
|
|
2662
|
-
}
|
|
2663
|
-
}
|
|
2664
|
-
}
|
|
2665
|
-
} catch {
|
|
2666
|
-
}
|
|
2667
|
-
}
|
|
2668
|
-
if (buffer) {
|
|
2669
|
-
try {
|
|
2670
|
-
yield JSON.parse(buffer);
|
|
2671
|
-
} catch {
|
|
2672
|
-
}
|
|
2673
|
-
}
|
|
2674
|
-
} finally {
|
|
2675
|
-
reader.cancel().catch(() => {
|
|
2676
|
-
});
|
|
2677
|
-
}
|
|
2678
|
-
}
|
|
2679
|
-
/**
|
|
2680
|
-
* Streams a response from the v-next network
|
|
2681
|
-
* @param params - Stream parameters including message
|
|
2682
|
-
* @returns Promise containing the results
|
|
2683
|
-
*/
|
|
2684
|
-
async stream(params, onRecord) {
|
|
2685
|
-
const response = await this.request(`/api/networks/v-next/${this.networkId}/stream`, {
|
|
2686
|
-
method: "POST",
|
|
2687
|
-
body: {
|
|
2688
|
-
...params,
|
|
2689
|
-
runtimeContext: parseClientRuntimeContext(params.runtimeContext)
|
|
2690
|
-
},
|
|
2691
|
-
stream: true
|
|
2692
|
-
});
|
|
2693
|
-
if (!response.ok) {
|
|
2694
|
-
throw new Error(`Failed to stream vNext network: ${response.statusText}`);
|
|
2695
|
-
}
|
|
2696
|
-
if (!response.body) {
|
|
2697
|
-
throw new Error("Response body is null");
|
|
2698
|
-
}
|
|
2699
|
-
for await (const record of this.streamProcessor(response.body)) {
|
|
2700
|
-
if (typeof record === "string") {
|
|
2701
|
-
onRecord(JSON.parse(record));
|
|
2702
|
-
} else {
|
|
2703
|
-
onRecord(record);
|
|
2704
|
-
}
|
|
2705
|
-
}
|
|
2706
|
-
}
|
|
2707
|
-
/**
|
|
2708
|
-
* Streams a response from the v-next network loop
|
|
2709
|
-
* @param params - Stream parameters including message
|
|
2710
|
-
* @returns Promise containing the results
|
|
2711
|
-
*/
|
|
2712
|
-
async loopStream(params, onRecord) {
|
|
2713
|
-
const response = await this.request(`/api/networks/v-next/${this.networkId}/loop-stream`, {
|
|
2714
|
-
method: "POST",
|
|
2715
|
-
body: {
|
|
2716
|
-
...params,
|
|
2717
|
-
runtimeContext: parseClientRuntimeContext(params.runtimeContext)
|
|
2718
|
-
},
|
|
2719
|
-
stream: true
|
|
2720
|
-
});
|
|
2721
|
-
if (!response.ok) {
|
|
2722
|
-
throw new Error(`Failed to stream vNext network loop: ${response.statusText}`);
|
|
2723
|
-
}
|
|
2724
|
-
if (!response.body) {
|
|
2725
|
-
throw new Error("Response body is null");
|
|
2726
|
-
}
|
|
2727
|
-
for await (const record of this.streamProcessor(response.body)) {
|
|
2728
|
-
if (typeof record === "string") {
|
|
2729
|
-
onRecord(JSON.parse(record));
|
|
2730
|
-
} else {
|
|
2731
|
-
onRecord(record);
|
|
2732
|
-
}
|
|
2733
|
-
}
|
|
2734
|
-
}
|
|
2735
|
-
};
|
|
2736
|
-
|
|
2737
2740
|
// src/client.ts
|
|
2738
2741
|
var MastraClient = class extends BaseResource {
|
|
2739
2742
|
observability;
|
|
@@ -3075,21 +3078,6 @@ var MastraClient = class extends BaseResource {
|
|
|
3075
3078
|
return this.request(`/api/telemetry`);
|
|
3076
3079
|
}
|
|
3077
3080
|
}
|
|
3078
|
-
/**
|
|
3079
|
-
* Retrieves all available vNext networks
|
|
3080
|
-
* @returns Promise containing map of vNext network IDs to vNext network details
|
|
3081
|
-
*/
|
|
3082
|
-
getVNextNetworks() {
|
|
3083
|
-
return this.request("/api/networks/v-next");
|
|
3084
|
-
}
|
|
3085
|
-
/**
|
|
3086
|
-
* Gets a vNext network instance by ID
|
|
3087
|
-
* @param networkId - ID of the vNext network to retrieve
|
|
3088
|
-
* @returns vNext Network instance
|
|
3089
|
-
*/
|
|
3090
|
-
getVNextNetwork(networkId) {
|
|
3091
|
-
return new VNextNetwork(this.options, networkId);
|
|
3092
|
-
}
|
|
3093
3081
|
/**
|
|
3094
3082
|
* Retrieves a list of available MCP servers.
|
|
3095
3083
|
* @param params - Optional parameters for pagination (limit, offset).
|
|
@@ -3194,7 +3182,7 @@ var MastraClient = class extends BaseResource {
|
|
|
3194
3182
|
* @returns Promise containing the scorer
|
|
3195
3183
|
*/
|
|
3196
3184
|
getScorer(scorerId) {
|
|
3197
|
-
return this.request(`/api/scores/scorers/${scorerId}`);
|
|
3185
|
+
return this.request(`/api/scores/scorers/${encodeURIComponent(scorerId)}`);
|
|
3198
3186
|
}
|
|
3199
3187
|
getScoresByScorerId(params) {
|
|
3200
3188
|
const { page, perPage, scorerId, entityId, entityType } = params;
|
|
@@ -3212,7 +3200,7 @@ var MastraClient = class extends BaseResource {
|
|
|
3212
3200
|
searchParams.set("perPage", String(perPage));
|
|
3213
3201
|
}
|
|
3214
3202
|
const queryString = searchParams.toString();
|
|
3215
|
-
return this.request(`/api/scores/scorer/${scorerId}${queryString ? `?${queryString}` : ""}`);
|
|
3203
|
+
return this.request(`/api/scores/scorer/${encodeURIComponent(scorerId)}${queryString ? `?${queryString}` : ""}`);
|
|
3216
3204
|
}
|
|
3217
3205
|
/**
|
|
3218
3206
|
* Retrieves scores by run ID
|
|
@@ -3229,7 +3217,7 @@ var MastraClient = class extends BaseResource {
|
|
|
3229
3217
|
searchParams.set("perPage", String(perPage));
|
|
3230
3218
|
}
|
|
3231
3219
|
const queryString = searchParams.toString();
|
|
3232
|
-
return this.request(`/api/scores/run/${runId}${queryString ? `?${queryString}` : ""}`);
|
|
3220
|
+
return this.request(`/api/scores/run/${encodeURIComponent(runId)}${queryString ? `?${queryString}` : ""}`);
|
|
3233
3221
|
}
|
|
3234
3222
|
/**
|
|
3235
3223
|
* Retrieves scores by entity ID and type
|
|
@@ -3246,7 +3234,9 @@ var MastraClient = class extends BaseResource {
|
|
|
3246
3234
|
searchParams.set("perPage", String(perPage));
|
|
3247
3235
|
}
|
|
3248
3236
|
const queryString = searchParams.toString();
|
|
3249
|
-
return this.request(
|
|
3237
|
+
return this.request(
|
|
3238
|
+
`/api/scores/entity/${encodeURIComponent(entityType)}/${encodeURIComponent(entityId)}${queryString ? `?${queryString}` : ""}`
|
|
3239
|
+
);
|
|
3250
3240
|
}
|
|
3251
3241
|
/**
|
|
3252
3242
|
* Saves a score
|
|
@@ -3272,6 +3262,9 @@ var MastraClient = class extends BaseResource {
|
|
|
3272
3262
|
getAITraces(params) {
|
|
3273
3263
|
return this.observability.getTraces(params);
|
|
3274
3264
|
}
|
|
3265
|
+
getScoresBySpan(params) {
|
|
3266
|
+
return this.observability.getScoresBySpan(params);
|
|
3267
|
+
}
|
|
3275
3268
|
score(params) {
|
|
3276
3269
|
return this.observability.score(params);
|
|
3277
3270
|
}
|