@mastra/playground-ui 6.7.2 → 6.9.0-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +47 -0
- package/dist/index.cjs.js +195 -42
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.es.js +208 -59
- package/dist/index.es.js.map +1 -1
- package/dist/src/domains/agents/components/agent-metadata/agent-metadata-model-list.d.ts +1 -3
- package/dist/src/domains/agents/components/agent-metadata/agent-metadata-model-switcher.d.ts +4 -2
- package/dist/src/domains/agents/components/agent-metadata/agent-metadata.d.ts +2 -2
- package/dist/src/domains/agents/hooks/use-agents.d.ts +3 -1
- package/dist/src/index.d.ts +1 -0
- package/dist/src/lib/toast.d.ts +27 -0
- package/package.json +7 -7
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,52 @@
|
|
|
1
1
|
# @mastra/playground-ui
|
|
2
2
|
|
|
3
|
+
## 6.9.0-alpha.0
|
|
4
|
+
|
|
5
|
+
### Minor Changes
|
|
6
|
+
|
|
7
|
+
- Update peer dependencies to match core package version bump (0.23.4) ([#9487](https://github.com/mastra-ai/mastra/pull/9487))
|
|
8
|
+
|
|
9
|
+
### Patch Changes
|
|
10
|
+
|
|
11
|
+
- update peerdeps ([`5ca1cca`](https://github.com/mastra-ai/mastra/commit/5ca1ccac61ffa7141e6d9fa8f22d3ad4d03bf5dc))
|
|
12
|
+
|
|
13
|
+
- Fixes issue where clicking the reset button in the model picker would fail to restore the original LanguageModelV2 (or any other types) object that was passed during agent construction. ([#9487](https://github.com/mastra-ai/mastra/pull/9487))
|
|
14
|
+
|
|
15
|
+
- Remove unused /model-providers API ([#9554](https://github.com/mastra-ai/mastra/pull/9554))
|
|
16
|
+
|
|
17
|
+
- Fix undefined runtimeContext using memory from playground ([#9548](https://github.com/mastra-ai/mastra/pull/9548))
|
|
18
|
+
|
|
19
|
+
- Updated dependencies [[`5ca1cca`](https://github.com/mastra-ai/mastra/commit/5ca1ccac61ffa7141e6d9fa8f22d3ad4d03bf5dc), [`6d7e90d`](https://github.com/mastra-ai/mastra/commit/6d7e90db09713e6250f4d6c3d3cff1b4740e50f9), [`f78b908`](https://github.com/mastra-ai/mastra/commit/f78b9080e11af765969b36b4a619761056030840), [`23c2614`](https://github.com/mastra-ai/mastra/commit/23c26140fdbf04b8c59e8d7d52106d67dad962ec), [`139588d`](https://github.com/mastra-ai/mastra/commit/139588df7755c9111a3060f72a789c1a8c95e091), [`186b29b`](https://github.com/mastra-ai/mastra/commit/186b29bd51ac1dcc24ad3825fdb7207a6d6391a6), [`e365eda`](https://github.com/mastra-ai/mastra/commit/e365eda45795b43707310531cac1e2ce4e5a0712)]:
|
|
20
|
+
- @mastra/client-js@0.16.9-alpha.0
|
|
21
|
+
- @mastra/react@0.0.15-alpha.0
|
|
22
|
+
- @mastra/core@0.24.0-alpha.0
|
|
23
|
+
|
|
24
|
+
## 6.8.0
|
|
25
|
+
|
|
26
|
+
### Minor Changes
|
|
27
|
+
|
|
28
|
+
- Toast error from workflow stream and resume stream ([#9460](https://github.com/mastra-ai/mastra/pull/9460))
|
|
29
|
+
|
|
30
|
+
### Patch Changes
|
|
31
|
+
|
|
32
|
+
- Updated dependencies [[`b55bbce`](https://github.com/mastra-ai/mastra/commit/b55bbce89404d35fdd967012dd503fae343d4c2d), [`d6cb18e`](https://github.com/mastra-ai/mastra/commit/d6cb18e189fd0ac95d4934cf0d6f2866876d1a2e), [`d0ecff7`](https://github.com/mastra-ai/mastra/commit/d0ecff793d5cd50408cd8d1d113f02e28d897a3d), [`e84a959`](https://github.com/mastra-ai/mastra/commit/e84a9592bfdec4f1c9fdf108c9d4ea4e2ee8f7e3), [`e742d37`](https://github.com/mastra-ai/mastra/commit/e742d371f24ef8059670cc05e9aee308eac068b9)]:
|
|
33
|
+
- @mastra/client-js@0.16.8
|
|
34
|
+
- @mastra/core@0.23.3
|
|
35
|
+
- @mastra/react@0.0.14
|
|
36
|
+
|
|
37
|
+
## 6.8.0-alpha.0
|
|
38
|
+
|
|
39
|
+
### Minor Changes
|
|
40
|
+
|
|
41
|
+
- Toast error from workflow stream and resume stream ([#9460](https://github.com/mastra-ai/mastra/pull/9460))
|
|
42
|
+
|
|
43
|
+
### Patch Changes
|
|
44
|
+
|
|
45
|
+
- Updated dependencies [[`b55bbce`](https://github.com/mastra-ai/mastra/commit/b55bbce89404d35fdd967012dd503fae343d4c2d), [`d6cb18e`](https://github.com/mastra-ai/mastra/commit/d6cb18e189fd0ac95d4934cf0d6f2866876d1a2e), [`d0ecff7`](https://github.com/mastra-ai/mastra/commit/d0ecff793d5cd50408cd8d1d113f02e28d897a3d), [`e84a959`](https://github.com/mastra-ai/mastra/commit/e84a9592bfdec4f1c9fdf108c9d4ea4e2ee8f7e3), [`e742d37`](https://github.com/mastra-ai/mastra/commit/e742d371f24ef8059670cc05e9aee308eac068b9)]:
|
|
46
|
+
- @mastra/client-js@0.16.8-alpha.0
|
|
47
|
+
- @mastra/core@0.23.3-alpha.0
|
|
48
|
+
- @mastra/react@0.0.14-alpha.0
|
|
49
|
+
|
|
3
50
|
## 6.7.2
|
|
4
51
|
|
|
5
52
|
### Patch Changes
|
package/dist/index.cjs.js
CHANGED
|
@@ -8133,6 +8133,110 @@ const useWorkflows = () => {
|
|
|
8133
8133
|
});
|
|
8134
8134
|
};
|
|
8135
8135
|
|
|
8136
|
+
const defaultOptions = {
|
|
8137
|
+
duration: 3e3,
|
|
8138
|
+
cancel: {
|
|
8139
|
+
label: /* @__PURE__ */ jsxRuntime.jsx(lucideReact.X, { size: "14" }),
|
|
8140
|
+
onClick: () => {
|
|
8141
|
+
}
|
|
8142
|
+
},
|
|
8143
|
+
unstyled: true,
|
|
8144
|
+
classNames: {
|
|
8145
|
+
toast: "bg-[#0F0F0F] w-full backdrop-accent h-auto rounded-lg gap-2 border border p-4 flex items-start rounded-lg pointer-events-auto",
|
|
8146
|
+
title: "text-white font-semibold text-xs mb-1 -mt-1",
|
|
8147
|
+
description: "!text-text text-sm !font-light",
|
|
8148
|
+
cancelButton: "self-start !bg-transparent !p-0 flex items-center justify-center !text-text opacity-50 order-last hover:opacity-100",
|
|
8149
|
+
actionButton: "!bg-white flex items-center justify-center font-medium !text-black order-last hover:opacity-80"
|
|
8150
|
+
}
|
|
8151
|
+
};
|
|
8152
|
+
function getToastOptions(options) {
|
|
8153
|
+
const { classNames, ...rest } = defaultOptions;
|
|
8154
|
+
const { classNames: optionsClassNames, ...restOptions } = options || {};
|
|
8155
|
+
return {
|
|
8156
|
+
...rest,
|
|
8157
|
+
classNames: {
|
|
8158
|
+
...classNames,
|
|
8159
|
+
title: cn(classNames?.title, "mt-auto", optionsClassNames?.title),
|
|
8160
|
+
toast: cn(classNames?.toast, "!items-center", optionsClassNames?.toast),
|
|
8161
|
+
cancelButton: cn(classNames?.cancelButton, "!self-center", optionsClassNames?.cancelButton),
|
|
8162
|
+
actionButton: cn(classNames?.actionButton, optionsClassNames?.actionButton)
|
|
8163
|
+
},
|
|
8164
|
+
...restOptions
|
|
8165
|
+
};
|
|
8166
|
+
}
|
|
8167
|
+
const toast = (message, options = {}) => {
|
|
8168
|
+
if (Array.isArray(message)) {
|
|
8169
|
+
return message.forEach((msg) => sonner.toast(msg, getToastOptions(options)));
|
|
8170
|
+
} else if (React.isValidElement(message)) {
|
|
8171
|
+
return sonner.toast(message, getToastOptions(options));
|
|
8172
|
+
} else if (typeof message === "string") {
|
|
8173
|
+
return sonner.toast(message, getToastOptions(options));
|
|
8174
|
+
}
|
|
8175
|
+
throw new Error("Invalid message type");
|
|
8176
|
+
};
|
|
8177
|
+
toast.success = (message, options = {}) => {
|
|
8178
|
+
switch (typeof message) {
|
|
8179
|
+
case "string":
|
|
8180
|
+
return sonner.toast.success(message, getToastOptions(options));
|
|
8181
|
+
case "object":
|
|
8182
|
+
return message.forEach((message2) => sonner.toast.success(message2, getToastOptions(options)));
|
|
8183
|
+
}
|
|
8184
|
+
};
|
|
8185
|
+
toast.error = (message, options = {}) => {
|
|
8186
|
+
switch (typeof message) {
|
|
8187
|
+
case "string":
|
|
8188
|
+
return sonner.toast.error(message, getToastOptions(options));
|
|
8189
|
+
case "object":
|
|
8190
|
+
return message.forEach((message2) => sonner.toast.error(message2, getToastOptions(options)));
|
|
8191
|
+
}
|
|
8192
|
+
};
|
|
8193
|
+
toast.warning = (message, options = {}) => {
|
|
8194
|
+
switch (typeof message) {
|
|
8195
|
+
case "string":
|
|
8196
|
+
return sonner.toast.warning(message, getToastOptions(options));
|
|
8197
|
+
case "object":
|
|
8198
|
+
return message.forEach((message2) => sonner.toast.warning(message2, getToastOptions(options)));
|
|
8199
|
+
}
|
|
8200
|
+
};
|
|
8201
|
+
toast.info = (message, options = {}) => {
|
|
8202
|
+
switch (typeof message) {
|
|
8203
|
+
case "string":
|
|
8204
|
+
return sonner.toast.info(message, getToastOptions(options));
|
|
8205
|
+
case "object":
|
|
8206
|
+
return message.forEach((message2) => sonner.toast.info(message2, getToastOptions(options)));
|
|
8207
|
+
}
|
|
8208
|
+
};
|
|
8209
|
+
toast.custom = (message, options = {}) => {
|
|
8210
|
+
return sonner.toast(message, getToastOptions(options));
|
|
8211
|
+
};
|
|
8212
|
+
toast.dismiss = (toastId) => {
|
|
8213
|
+
if (toastId) {
|
|
8214
|
+
sonner.toast.dismiss(toastId);
|
|
8215
|
+
}
|
|
8216
|
+
};
|
|
8217
|
+
toast.promise = ({
|
|
8218
|
+
myPromise,
|
|
8219
|
+
loadingMessage,
|
|
8220
|
+
successMessage,
|
|
8221
|
+
errorMessage,
|
|
8222
|
+
onSuccess,
|
|
8223
|
+
onError,
|
|
8224
|
+
options = {}
|
|
8225
|
+
}) => {
|
|
8226
|
+
return sonner.toast.promise(myPromise, {
|
|
8227
|
+
loading: loadingMessage ?? "Loading...",
|
|
8228
|
+
success: (data) => {
|
|
8229
|
+
onSuccess?.(data);
|
|
8230
|
+
return successMessage;
|
|
8231
|
+
},
|
|
8232
|
+
error: (err) => {
|
|
8233
|
+
onError?.(err);
|
|
8234
|
+
return errorMessage || err?.message || "Error...";
|
|
8235
|
+
},
|
|
8236
|
+
...getToastOptions(options)
|
|
8237
|
+
});
|
|
8238
|
+
};
|
|
8239
|
+
|
|
8136
8240
|
const useExecuteWorkflow = () => {
|
|
8137
8241
|
const client = react$3.useMastraClient();
|
|
8138
8242
|
const createWorkflowRun = reactQuery.useMutation({
|
|
@@ -8229,6 +8333,23 @@ const useStreamWorkflow = () => {
|
|
|
8229
8333
|
}
|
|
8230
8334
|
};
|
|
8231
8335
|
}, []);
|
|
8336
|
+
const handleStreamError = (err, defaultMessage, setIsStreaming2) => {
|
|
8337
|
+
if (err instanceof TypeError) {
|
|
8338
|
+
return;
|
|
8339
|
+
}
|
|
8340
|
+
const errorMessage = err instanceof Error ? err.message : defaultMessage;
|
|
8341
|
+
toast.error(errorMessage);
|
|
8342
|
+
setIsStreaming2?.(false);
|
|
8343
|
+
};
|
|
8344
|
+
const handleWorkflowFinish = (value) => {
|
|
8345
|
+
if (value.type === "workflow-finish") {
|
|
8346
|
+
const streamStatus = value.payload?.workflowStatus;
|
|
8347
|
+
const metadata = value.payload?.metadata;
|
|
8348
|
+
if (streamStatus === "failed") {
|
|
8349
|
+
throw new Error(metadata?.errorMessage || "Workflow execution failed");
|
|
8350
|
+
}
|
|
8351
|
+
}
|
|
8352
|
+
};
|
|
8232
8353
|
const streamWorkflow = reactQuery.useMutation({
|
|
8233
8354
|
mutationFn: async ({
|
|
8234
8355
|
workflowId,
|
|
@@ -8248,7 +8369,9 @@ const useStreamWorkflow = () => {
|
|
|
8248
8369
|
});
|
|
8249
8370
|
const workflow = client.getWorkflow(workflowId);
|
|
8250
8371
|
const stream = await workflow.streamVNext({ runId, inputData, runtimeContext: runtimeContext$1, closeOnSuspend: true });
|
|
8251
|
-
if (!stream)
|
|
8372
|
+
if (!stream) {
|
|
8373
|
+
return handleStreamError(new Error("No stream returned"), "No stream returned", setIsStreaming);
|
|
8374
|
+
}
|
|
8252
8375
|
const reader = stream.getReader();
|
|
8253
8376
|
readerRef.current = reader;
|
|
8254
8377
|
try {
|
|
@@ -8267,10 +8390,13 @@ const useStreamWorkflow = () => {
|
|
|
8267
8390
|
if (value.type === "workflow-step-suspended") {
|
|
8268
8391
|
setIsStreaming(false);
|
|
8269
8392
|
}
|
|
8393
|
+
if (value.type === "workflow-finish") {
|
|
8394
|
+
handleWorkflowFinish(value);
|
|
8395
|
+
}
|
|
8270
8396
|
}
|
|
8271
8397
|
}
|
|
8272
|
-
} catch (
|
|
8273
|
-
|
|
8398
|
+
} catch (err) {
|
|
8399
|
+
handleStreamError(err, "Error streaming workflow");
|
|
8274
8400
|
} finally {
|
|
8275
8401
|
if (isMountedRef.current) {
|
|
8276
8402
|
setIsStreaming(false);
|
|
@@ -8300,7 +8426,9 @@ const useStreamWorkflow = () => {
|
|
|
8300
8426
|
}
|
|
8301
8427
|
const workflow = client.getWorkflow(workflowId);
|
|
8302
8428
|
const stream = await workflow.observeStreamVNext({ runId });
|
|
8303
|
-
if (!stream)
|
|
8429
|
+
if (!stream) {
|
|
8430
|
+
return handleStreamError(new Error("No stream returned"), "No stream returned", setIsStreaming);
|
|
8431
|
+
}
|
|
8304
8432
|
const reader = stream.getReader();
|
|
8305
8433
|
observerRef.current = reader;
|
|
8306
8434
|
try {
|
|
@@ -8319,10 +8447,13 @@ const useStreamWorkflow = () => {
|
|
|
8319
8447
|
if (value.type === "workflow-step-suspended") {
|
|
8320
8448
|
setIsStreaming(false);
|
|
8321
8449
|
}
|
|
8450
|
+
if (value.type === "workflow-finish") {
|
|
8451
|
+
handleWorkflowFinish(value);
|
|
8452
|
+
}
|
|
8322
8453
|
}
|
|
8323
8454
|
}
|
|
8324
|
-
} catch (
|
|
8325
|
-
|
|
8455
|
+
} catch (err) {
|
|
8456
|
+
handleStreamError(err, "Error observing workflow");
|
|
8326
8457
|
} finally {
|
|
8327
8458
|
if (isMountedRef.current) {
|
|
8328
8459
|
setIsStreaming(false);
|
|
@@ -8353,7 +8484,9 @@ const useStreamWorkflow = () => {
|
|
|
8353
8484
|
runtimeContext$1.set(key, value);
|
|
8354
8485
|
});
|
|
8355
8486
|
const stream = await workflow.resumeStreamVNext({ runId, step, resumeData, runtimeContext: runtimeContext$1 });
|
|
8356
|
-
if (!stream)
|
|
8487
|
+
if (!stream) {
|
|
8488
|
+
return handleStreamError(new Error("No stream returned"), "No stream returned", setIsStreaming);
|
|
8489
|
+
}
|
|
8357
8490
|
const reader = stream.getReader();
|
|
8358
8491
|
resumeStreamRef.current = reader;
|
|
8359
8492
|
try {
|
|
@@ -8372,10 +8505,13 @@ const useStreamWorkflow = () => {
|
|
|
8372
8505
|
if (value.type === "workflow-step-suspended") {
|
|
8373
8506
|
setIsStreaming(false);
|
|
8374
8507
|
}
|
|
8508
|
+
if (value.type === "workflow-finish") {
|
|
8509
|
+
handleWorkflowFinish(value);
|
|
8510
|
+
}
|
|
8375
8511
|
}
|
|
8376
8512
|
}
|
|
8377
|
-
} catch (
|
|
8378
|
-
|
|
8513
|
+
} catch (err) {
|
|
8514
|
+
handleStreamError(err, "Error resuming workflow stream");
|
|
8379
8515
|
} finally {
|
|
8380
8516
|
if (isMountedRef.current) {
|
|
8381
8517
|
setIsStreaming(false);
|
|
@@ -8609,9 +8745,10 @@ const useAgentMessages = ({
|
|
|
8609
8745
|
memory
|
|
8610
8746
|
}) => {
|
|
8611
8747
|
const client = react$3.useMastraClient();
|
|
8748
|
+
const { runtimeContext } = usePlaygroundStore();
|
|
8612
8749
|
return reactQuery.useQuery({
|
|
8613
|
-
queryKey: ["memory", "messages", threadId, agentId],
|
|
8614
|
-
queryFn: () => client.getThreadMessages(threadId, { agentId }),
|
|
8750
|
+
queryKey: ["memory", "messages", threadId, agentId, "runtimeContext"],
|
|
8751
|
+
queryFn: () => client.getThreadMessages(threadId, { agentId, runtimeContext }),
|
|
8615
8752
|
enabled: memory && Boolean(threadId),
|
|
8616
8753
|
staleTime: 0,
|
|
8617
8754
|
gcTime: 0,
|
|
@@ -10009,6 +10146,7 @@ function useAgentWorkingMemory(agentId, threadId, resourceId) {
|
|
|
10009
10146
|
const [workingMemoryFormat, setWorkingMemoryFormat] = React.useState("markdown");
|
|
10010
10147
|
const [isLoading, setIsLoading] = React.useState(true);
|
|
10011
10148
|
const [isUpdating, setIsUpdating] = React.useState(false);
|
|
10149
|
+
const { runtimeContext } = usePlaygroundStore();
|
|
10012
10150
|
const refetch = React.useCallback(async () => {
|
|
10013
10151
|
setIsLoading(true);
|
|
10014
10152
|
try {
|
|
@@ -10017,7 +10155,7 @@ function useAgentWorkingMemory(agentId, threadId, resourceId) {
|
|
|
10017
10155
|
setIsLoading(false);
|
|
10018
10156
|
return;
|
|
10019
10157
|
}
|
|
10020
|
-
const res = await client.getWorkingMemory({ agentId, threadId, resourceId });
|
|
10158
|
+
const res = await client.getWorkingMemory({ agentId, threadId, resourceId, runtimeContext });
|
|
10021
10159
|
const { workingMemory, source, workingMemoryTemplate, threadExists: threadExists2 } = res;
|
|
10022
10160
|
setThreadExists(threadExists2);
|
|
10023
10161
|
setWorkingMemoryData(workingMemory);
|
|
@@ -10056,7 +10194,7 @@ function useAgentWorkingMemory(agentId, threadId, resourceId) {
|
|
|
10056
10194
|
throw new Error("Invalid JSON working memory");
|
|
10057
10195
|
}
|
|
10058
10196
|
}
|
|
10059
|
-
await client.updateWorkingMemory({ agentId, threadId, workingMemory: newMemory, resourceId });
|
|
10197
|
+
await client.updateWorkingMemory({ agentId, threadId, workingMemory: newMemory, resourceId, runtimeContext });
|
|
10060
10198
|
refetch();
|
|
10061
10199
|
} catch (error) {
|
|
10062
10200
|
console.error("Error updating working memory", error);
|
|
@@ -14003,6 +14141,7 @@ const AgentMetadataModelSwitcher = ({
|
|
|
14003
14141
|
defaultProvider,
|
|
14004
14142
|
defaultModel,
|
|
14005
14143
|
updateModel,
|
|
14144
|
+
resetModel,
|
|
14006
14145
|
apiUrl = "/api/agents/providers"
|
|
14007
14146
|
}) => {
|
|
14008
14147
|
const [originalProvider] = React.useState(defaultProvider);
|
|
@@ -14020,6 +14159,10 @@ const AgentMetadataModelSwitcher = ({
|
|
|
14020
14159
|
const [providersLoading, setProvidersLoading] = React.useState(true);
|
|
14021
14160
|
const [highlightedProviderIndex, setHighlightedProviderIndex] = React.useState(-1);
|
|
14022
14161
|
const [highlightedModelIndex, setHighlightedModelIndex] = React.useState(-1);
|
|
14162
|
+
React.useEffect(() => {
|
|
14163
|
+
setSelectedModel(defaultModel);
|
|
14164
|
+
setSelectedProvider(defaultProvider || "");
|
|
14165
|
+
}, [defaultModel, defaultProvider]);
|
|
14023
14166
|
const modelInputRef = React.useRef(null);
|
|
14024
14167
|
const providerInputRef = React.useRef(null);
|
|
14025
14168
|
React.useEffect(() => {
|
|
@@ -14173,8 +14316,10 @@ const AgentMetadataModelSwitcher = ({
|
|
|
14173
14316
|
] });
|
|
14174
14317
|
}
|
|
14175
14318
|
const handleReset = async () => {
|
|
14176
|
-
|
|
14177
|
-
|
|
14319
|
+
if (!resetModel) {
|
|
14320
|
+
console.warn("Reset model function not provided");
|
|
14321
|
+
return;
|
|
14322
|
+
}
|
|
14178
14323
|
setProviderSearch("");
|
|
14179
14324
|
setModelSearch("");
|
|
14180
14325
|
setIsSearchingProvider(false);
|
|
@@ -14183,10 +14328,7 @@ const AgentMetadataModelSwitcher = ({
|
|
|
14183
14328
|
setShowModelSuggestions(false);
|
|
14184
14329
|
try {
|
|
14185
14330
|
setLoading(true);
|
|
14186
|
-
await
|
|
14187
|
-
provider: originalProvider,
|
|
14188
|
-
modelId: originalModel
|
|
14189
|
-
});
|
|
14331
|
+
await resetModel();
|
|
14190
14332
|
} catch (error) {
|
|
14191
14333
|
console.error("Failed to reset model:", error);
|
|
14192
14334
|
} finally {
|
|
@@ -14594,7 +14736,6 @@ Switch.displayName = SwitchPrimitives__namespace.Root.displayName;
|
|
|
14594
14736
|
|
|
14595
14737
|
const AgentMetadataModelList = ({
|
|
14596
14738
|
modelList,
|
|
14597
|
-
modelProviders,
|
|
14598
14739
|
updateModelInModelList,
|
|
14599
14740
|
reorderModelList
|
|
14600
14741
|
}) => {
|
|
@@ -14632,7 +14773,6 @@ const AgentMetadataModelList = ({
|
|
|
14632
14773
|
AgentMetadataModelListItem,
|
|
14633
14774
|
{
|
|
14634
14775
|
modelConfig,
|
|
14635
|
-
modelProviders,
|
|
14636
14776
|
updateModelInModelList: updateModel,
|
|
14637
14777
|
showDragHandle: hasMultipleModels,
|
|
14638
14778
|
dragHandleProps: provided2.dragHandleProps
|
|
@@ -14643,7 +14783,6 @@ const AgentMetadataModelList = ({
|
|
|
14643
14783
|
};
|
|
14644
14784
|
const AgentMetadataModelListItem = ({
|
|
14645
14785
|
modelConfig,
|
|
14646
|
-
modelProviders,
|
|
14647
14786
|
updateModelInModelList,
|
|
14648
14787
|
showDragHandle,
|
|
14649
14788
|
dragHandleProps
|
|
@@ -14657,7 +14796,6 @@ const AgentMetadataModelListItem = ({
|
|
|
14657
14796
|
defaultProvider: modelConfig.model.provider,
|
|
14658
14797
|
defaultModel: modelConfig.model.modelId,
|
|
14659
14798
|
updateModel: (params) => updateModelInModelList({ modelConfigId: modelConfig.id, model: params }),
|
|
14660
|
-
modelProviders,
|
|
14661
14799
|
autoSave: true
|
|
14662
14800
|
}
|
|
14663
14801
|
) }),
|
|
@@ -14688,7 +14826,7 @@ const AgentMetadata = ({
|
|
|
14688
14826
|
promptSlot,
|
|
14689
14827
|
hasMemoryEnabled,
|
|
14690
14828
|
updateModel,
|
|
14691
|
-
|
|
14829
|
+
resetModel,
|
|
14692
14830
|
updateModelInModelList,
|
|
14693
14831
|
reorderModelList,
|
|
14694
14832
|
modelVersion
|
|
@@ -14704,7 +14842,6 @@ const AgentMetadata = ({
|
|
|
14704
14842
|
AgentMetadataModelList,
|
|
14705
14843
|
{
|
|
14706
14844
|
modelList: agent.modelList,
|
|
14707
|
-
modelProviders,
|
|
14708
14845
|
updateModelInModelList,
|
|
14709
14846
|
reorderModelList
|
|
14710
14847
|
}
|
|
@@ -14723,7 +14860,7 @@ const AgentMetadata = ({
|
|
|
14723
14860
|
defaultProvider: agent.provider,
|
|
14724
14861
|
defaultModel: agent.modelId,
|
|
14725
14862
|
updateModel,
|
|
14726
|
-
|
|
14863
|
+
resetModel
|
|
14727
14864
|
}
|
|
14728
14865
|
)
|
|
14729
14866
|
}
|
|
@@ -15053,13 +15190,6 @@ const useAgents = () => {
|
|
|
15053
15190
|
queryFn: () => client.getAgents(runtimeContext)
|
|
15054
15191
|
});
|
|
15055
15192
|
};
|
|
15056
|
-
const useModelProviders = () => {
|
|
15057
|
-
const client = react$3.useMastraClient();
|
|
15058
|
-
return reactQuery.useQuery({
|
|
15059
|
-
queryKey: ["model-providers"],
|
|
15060
|
-
queryFn: () => client.getModelProviders()
|
|
15061
|
-
});
|
|
15062
|
-
};
|
|
15063
15193
|
const useUpdateAgentModel = (agentId) => {
|
|
15064
15194
|
const client = react$3.useMastraClient();
|
|
15065
15195
|
const queryClient = reactQuery.useQueryClient();
|
|
@@ -15099,6 +15229,19 @@ const useUpdateModelInModelList = (agentId) => {
|
|
|
15099
15229
|
}
|
|
15100
15230
|
});
|
|
15101
15231
|
};
|
|
15232
|
+
const useResetAgentModel = (agentId) => {
|
|
15233
|
+
const client = react$3.useMastraClient();
|
|
15234
|
+
const queryClient = reactQuery.useQueryClient();
|
|
15235
|
+
return reactQuery.useMutation({
|
|
15236
|
+
mutationFn: async () => client.getAgent(agentId).resetModel(),
|
|
15237
|
+
onSuccess: () => {
|
|
15238
|
+
queryClient.invalidateQueries({ queryKey: ["agent", agentId] });
|
|
15239
|
+
},
|
|
15240
|
+
onError: (err) => {
|
|
15241
|
+
console.error("Error resetting model", err);
|
|
15242
|
+
}
|
|
15243
|
+
});
|
|
15244
|
+
};
|
|
15102
15245
|
|
|
15103
15246
|
const useAgent = (agentId) => {
|
|
15104
15247
|
const client = react$3.useMastraClient();
|
|
@@ -15214,9 +15357,10 @@ const AgentToolPanel = ({ toolId, agentId }) => {
|
|
|
15214
15357
|
|
|
15215
15358
|
const useMemory = (agentId) => {
|
|
15216
15359
|
const client = react$3.useMastraClient();
|
|
15360
|
+
const { runtimeContext } = usePlaygroundStore();
|
|
15217
15361
|
return reactQuery.useQuery({
|
|
15218
15362
|
queryKey: ["memory", agentId],
|
|
15219
|
-
queryFn: () => agentId ? client.getMemoryStatus(agentId) : null,
|
|
15363
|
+
queryFn: () => agentId ? client.getMemoryStatus(agentId, runtimeContext) : null,
|
|
15220
15364
|
enabled: Boolean(agentId),
|
|
15221
15365
|
staleTime: 5 * 60 * 1e3,
|
|
15222
15366
|
// 5 minutes
|
|
@@ -15227,9 +15371,10 @@ const useMemory = (agentId) => {
|
|
|
15227
15371
|
};
|
|
15228
15372
|
const useMemoryConfig = (agentId) => {
|
|
15229
15373
|
const client = react$3.useMastraClient();
|
|
15374
|
+
const { runtimeContext } = usePlaygroundStore();
|
|
15230
15375
|
return reactQuery.useQuery({
|
|
15231
15376
|
queryKey: ["memory", "config", agentId],
|
|
15232
|
-
queryFn: () => agentId ? client.getMemoryConfig({ agentId }) : null,
|
|
15377
|
+
queryFn: () => agentId ? client.getMemoryConfig({ agentId, runtimeContext }) : null,
|
|
15233
15378
|
enabled: Boolean(agentId),
|
|
15234
15379
|
staleTime: 5 * 60 * 1e3,
|
|
15235
15380
|
// 5 minutes
|
|
@@ -15245,9 +15390,10 @@ const useThreads = ({
|
|
|
15245
15390
|
isMemoryEnabled
|
|
15246
15391
|
}) => {
|
|
15247
15392
|
const client = react$3.useMastraClient();
|
|
15393
|
+
const { runtimeContext } = usePlaygroundStore();
|
|
15248
15394
|
return reactQuery.useQuery({
|
|
15249
15395
|
queryKey: ["memory", "threads", resourceId, agentId],
|
|
15250
|
-
queryFn: () => isMemoryEnabled ? client.getMemoryThreads({ resourceId, agentId }) : null,
|
|
15396
|
+
queryFn: () => isMemoryEnabled ? client.getMemoryThreads({ resourceId, agentId, runtimeContext }) : null,
|
|
15251
15397
|
enabled: Boolean(isMemoryEnabled),
|
|
15252
15398
|
staleTime: 0,
|
|
15253
15399
|
gcTime: 0,
|
|
@@ -15258,8 +15404,9 @@ const useThreads = ({
|
|
|
15258
15404
|
const useDeleteThread = () => {
|
|
15259
15405
|
const client = react$3.useMastraClient();
|
|
15260
15406
|
const queryClient = reactQuery.useQueryClient();
|
|
15407
|
+
const { runtimeContext } = usePlaygroundStore();
|
|
15261
15408
|
return reactQuery.useMutation({
|
|
15262
|
-
mutationFn: ({ threadId, agentId, networkId }) => client.deleteThread(threadId, { agentId, networkId }),
|
|
15409
|
+
mutationFn: ({ threadId, agentId, networkId }) => client.deleteThread(threadId, { agentId, networkId, runtimeContext }),
|
|
15263
15410
|
onSuccess: (_, variables) => {
|
|
15264
15411
|
const { agentId, networkId } = variables;
|
|
15265
15412
|
if (agentId) {
|
|
@@ -15280,6 +15427,7 @@ const useMemorySearch = ({
|
|
|
15280
15427
|
resourceId,
|
|
15281
15428
|
threadId
|
|
15282
15429
|
}) => {
|
|
15430
|
+
const { runtimeContext } = usePlaygroundStore();
|
|
15283
15431
|
const searchMemory = async (searchQuery, memoryConfig) => {
|
|
15284
15432
|
if (!searchQuery.trim()) {
|
|
15285
15433
|
return { results: [], count: 0, query: searchQuery };
|
|
@@ -15287,7 +15435,8 @@ const useMemorySearch = ({
|
|
|
15287
15435
|
const params = new URLSearchParams({
|
|
15288
15436
|
searchQuery,
|
|
15289
15437
|
resourceId,
|
|
15290
|
-
agentId
|
|
15438
|
+
agentId,
|
|
15439
|
+
runtimeContext: btoa(JSON.stringify(runtimeContext))
|
|
15291
15440
|
});
|
|
15292
15441
|
if (threadId) {
|
|
15293
15442
|
params.append("threadId", threadId);
|
|
@@ -16647,8 +16796,8 @@ function AgentPromptEnhancer({ agentId }) {
|
|
|
16647
16796
|
|
|
16648
16797
|
function AgentInformation({ agentId, threadId }) {
|
|
16649
16798
|
const { data: agent, isLoading } = useAgent(agentId);
|
|
16650
|
-
const { data: modelProviders } = useModelProviders();
|
|
16651
16799
|
const { mutateAsync: updateModel } = useUpdateAgentModel(agentId);
|
|
16800
|
+
const { mutateAsync: resetModel } = useResetAgentModel(agentId);
|
|
16652
16801
|
const { mutate: reorderModelList } = useReorderModelList(agentId);
|
|
16653
16802
|
const { mutateAsync: updateModelInModelList } = useUpdateModelInModelList(agentId);
|
|
16654
16803
|
const { data: memory, isLoading: isMemoryLoading } = useMemory(agentId);
|
|
@@ -16693,9 +16842,9 @@ function AgentInformation({ agentId, threadId }) {
|
|
|
16693
16842
|
agentId,
|
|
16694
16843
|
agent,
|
|
16695
16844
|
updateModel,
|
|
16845
|
+
resetModel,
|
|
16696
16846
|
updateModelInModelList,
|
|
16697
16847
|
reorderModelList,
|
|
16698
|
-
modelProviders: modelProviders || [],
|
|
16699
16848
|
hasMemoryEnabled: Boolean(memory?.result),
|
|
16700
16849
|
promptSlot: /* @__PURE__ */ jsxRuntime.jsx(AgentPromptEnhancer, { agentId }),
|
|
16701
16850
|
modelVersion: agent.modelVersion
|
|
@@ -17275,7 +17424,6 @@ function TemplateForm({
|
|
|
17275
17424
|
updateModel: onModelUpdate || (() => Promise.resolve({ message: "Updated" })),
|
|
17276
17425
|
closeEditor: () => {
|
|
17277
17426
|
},
|
|
17278
|
-
modelProviders: ["openai", "anthropic", "google", "xai", "groq"],
|
|
17279
17427
|
autoSave: true,
|
|
17280
17428
|
selectProviderPlaceholder: "Provider"
|
|
17281
17429
|
}
|
|
@@ -20339,6 +20487,10 @@ const useEvalsByAgentId = (agentId, type) => {
|
|
|
20339
20487
|
});
|
|
20340
20488
|
};
|
|
20341
20489
|
|
|
20490
|
+
Object.defineProperty(exports, "Toaster", {
|
|
20491
|
+
enumerable: true,
|
|
20492
|
+
get: () => sonner.Toaster
|
|
20493
|
+
});
|
|
20342
20494
|
exports.AgentChat = AgentChat;
|
|
20343
20495
|
exports.AgentCoinIcon = AgentCoinIcon;
|
|
20344
20496
|
exports.AgentEntityHeader = AgentEntityHeader;
|
|
@@ -20547,6 +20699,7 @@ exports.parseError = parseError;
|
|
|
20547
20699
|
exports.providerMapToIcon = providerMapToIcon;
|
|
20548
20700
|
exports.scoresListColumns = scoresListColumns;
|
|
20549
20701
|
exports.spanTypePrefixes = spanTypePrefixes;
|
|
20702
|
+
exports.toast = toast;
|
|
20550
20703
|
exports.traceScoresListColumns = traceScoresListColumns;
|
|
20551
20704
|
exports.tracesListColumns = tracesListColumns;
|
|
20552
20705
|
exports.transformKey = transformKey;
|
|
@@ -20570,11 +20723,11 @@ exports.useMainSidebar = useMainSidebar;
|
|
|
20570
20723
|
exports.useMemory = useMemory;
|
|
20571
20724
|
exports.useMemoryConfig = useMemoryConfig;
|
|
20572
20725
|
exports.useMemorySearch = useMemorySearch;
|
|
20573
|
-
exports.useModelProviders = useModelProviders;
|
|
20574
20726
|
exports.useModelReset = useModelReset;
|
|
20575
20727
|
exports.usePlaygroundStore = usePlaygroundStore;
|
|
20576
20728
|
exports.usePolling = usePolling;
|
|
20577
20729
|
exports.useReorderModelList = useReorderModelList;
|
|
20730
|
+
exports.useResetAgentModel = useResetAgentModel;
|
|
20578
20731
|
exports.useScorer = useScorer;
|
|
20579
20732
|
exports.useScorers = useScorers;
|
|
20580
20733
|
exports.useScoresByEntityId = useScoresByEntityId;
|