seekrai 0.5.2__py3-none-any.whl → 0.5.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- seekrai/abstract/response_parsing.py +2 -0
- seekrai/client.py +8 -0
- seekrai/resources/__init__.py +20 -2
- seekrai/resources/agents/__init__.py +12 -0
- seekrai/resources/agents/agent_inference.py +37 -10
- seekrai/resources/agents/agent_observability.py +135 -0
- seekrai/resources/agents/agents.py +51 -0
- seekrai/resources/agents/python_functions.py +295 -0
- seekrai/resources/alignment.py +460 -1
- seekrai/resources/chat/completions.py +17 -8
- seekrai/resources/embeddings.py +2 -2
- seekrai/resources/explainability.py +92 -0
- seekrai/resources/finetune.py +44 -0
- seekrai/resources/ingestion.py +5 -7
- seekrai/resources/models.py +0 -3
- seekrai/resources/vectordb.py +36 -2
- seekrai/types/__init__.py +30 -3
- seekrai/types/agents/__init__.py +25 -3
- seekrai/types/agents/agent.py +11 -0
- seekrai/types/agents/observability.py +34 -0
- seekrai/types/agents/python_functions.py +29 -0
- seekrai/types/agents/runs.py +51 -1
- seekrai/types/agents/tools/__init__.py +12 -2
- seekrai/types/agents/tools/schemas/__init__.py +8 -0
- seekrai/types/agents/tools/schemas/file_search.py +1 -1
- seekrai/types/agents/tools/schemas/file_search_env.py +0 -1
- seekrai/types/agents/tools/schemas/run_python.py +9 -0
- seekrai/types/agents/tools/schemas/run_python_env.py +8 -0
- seekrai/types/agents/tools/schemas/web_search.py +9 -0
- seekrai/types/agents/tools/schemas/web_search_env.py +7 -0
- seekrai/types/agents/tools/tool.py +9 -3
- seekrai/types/agents/tools/tool_types.py +4 -4
- seekrai/types/alignment.py +36 -0
- seekrai/types/chat_completions.py +1 -0
- seekrai/types/deployments.py +2 -0
- seekrai/types/explainability.py +26 -0
- seekrai/types/files.py +2 -1
- seekrai/types/finetune.py +40 -7
- seekrai/types/vectordb.py +6 -1
- {seekrai-0.5.2.dist-info → seekrai-0.5.24.dist-info}/METADATA +3 -6
- seekrai-0.5.24.dist-info/RECORD +76 -0
- {seekrai-0.5.2.dist-info → seekrai-0.5.24.dist-info}/WHEEL +1 -1
- seekrai/types/agents/tools/tool_env_types.py +0 -4
- seekrai-0.5.2.dist-info/RECORD +0 -67
- {seekrai-0.5.2.dist-info → seekrai-0.5.24.dist-info}/LICENSE +0 -0
- {seekrai-0.5.2.dist-info → seekrai-0.5.24.dist-info}/entry_points.txt +0 -0
seekrai/client.py
CHANGED
|
@@ -24,6 +24,8 @@ class SeekrFlow:
|
|
|
24
24
|
deployments: resources.Deployments
|
|
25
25
|
vector_database: resources.VectorDatabase
|
|
26
26
|
agents: resources.Agents
|
|
27
|
+
observability: resources.AgentObservability
|
|
28
|
+
explainability: resources.Explainability
|
|
27
29
|
|
|
28
30
|
# client options
|
|
29
31
|
client: SeekrFlowClient
|
|
@@ -89,6 +91,8 @@ class SeekrFlow:
|
|
|
89
91
|
self.deployments = resources.Deployments(self.client)
|
|
90
92
|
self.vector_database = resources.VectorDatabase(self.client)
|
|
91
93
|
self.agents = resources.Agents(self.client)
|
|
94
|
+
self.observability = resources.AgentObservability(self.client)
|
|
95
|
+
self.explainability = resources.Explainability(self.client)
|
|
92
96
|
|
|
93
97
|
|
|
94
98
|
class AsyncSeekrFlow:
|
|
@@ -105,6 +109,8 @@ class AsyncSeekrFlow:
|
|
|
105
109
|
deployments: resources.AsyncDeployments
|
|
106
110
|
vector_database: resources.AsyncVectorDatabase
|
|
107
111
|
agents: resources.AsyncAgents
|
|
112
|
+
observability: resources.AsyncAgentObservability
|
|
113
|
+
explainability: resources.AsyncExplainability
|
|
108
114
|
|
|
109
115
|
# client options
|
|
110
116
|
client: SeekrFlowClient
|
|
@@ -170,6 +176,8 @@ class AsyncSeekrFlow:
|
|
|
170
176
|
self.deployments = resources.AsyncDeployments(self.client)
|
|
171
177
|
self.vector_database = resources.AsyncVectorDatabase(self.client)
|
|
172
178
|
self.agents = resources.AsyncAgents(self.client)
|
|
179
|
+
self.observability = resources.AsyncAgentObservability(self.client)
|
|
180
|
+
self.explainability = resources.AsyncExplainability(self.client)
|
|
173
181
|
|
|
174
182
|
|
|
175
183
|
Client = SeekrFlow
|
seekrai/resources/__init__.py
CHANGED
|
@@ -1,9 +1,21 @@
|
|
|
1
|
-
from seekrai.resources.agents import
|
|
2
|
-
|
|
1
|
+
from seekrai.resources.agents import (
|
|
2
|
+
AgentInference,
|
|
3
|
+
AgentObservability,
|
|
4
|
+
Agents,
|
|
5
|
+
AsyncAgentObservability,
|
|
6
|
+
AsyncAgents,
|
|
7
|
+
)
|
|
8
|
+
from seekrai.resources.alignment import (
|
|
9
|
+
Alignment,
|
|
10
|
+
AsyncAlignment,
|
|
11
|
+
AsyncSystemPromptResource,
|
|
12
|
+
SystemPromptResource,
|
|
13
|
+
)
|
|
3
14
|
from seekrai.resources.chat import AsyncChat, Chat
|
|
4
15
|
from seekrai.resources.completions import AsyncCompletions, Completions
|
|
5
16
|
from seekrai.resources.deployments import AsyncDeployments, Deployments
|
|
6
17
|
from seekrai.resources.embeddings import AsyncEmbeddings, Embeddings
|
|
18
|
+
from seekrai.resources.explainability import AsyncExplainability, Explainability
|
|
7
19
|
from seekrai.resources.files import AsyncFiles, Files
|
|
8
20
|
from seekrai.resources.finetune import AsyncFineTuning, FineTuning
|
|
9
21
|
from seekrai.resources.images import AsyncImages, Images
|
|
@@ -16,6 +28,8 @@ from seekrai.resources.vectordb import AsyncVectorDatabase, VectorDatabase
|
|
|
16
28
|
__all__ = [
|
|
17
29
|
"AsyncAlignment",
|
|
18
30
|
"Alignment",
|
|
31
|
+
"AsyncSystemPromptResource",
|
|
32
|
+
"SystemPromptResource",
|
|
19
33
|
"AsyncCompletions",
|
|
20
34
|
"Completions",
|
|
21
35
|
"AsyncChat",
|
|
@@ -38,7 +52,11 @@ __all__ = [
|
|
|
38
52
|
"Deployments",
|
|
39
53
|
"AsyncAgents",
|
|
40
54
|
"Agents",
|
|
55
|
+
"AgentObservability",
|
|
56
|
+
"AsyncAgentObservability",
|
|
41
57
|
"VectorDatabase",
|
|
42
58
|
"AsyncVectorDatabase",
|
|
43
59
|
"AgentInference",
|
|
60
|
+
"AsyncExplainability",
|
|
61
|
+
"Explainability",
|
|
44
62
|
]
|
|
@@ -1,5 +1,13 @@
|
|
|
1
1
|
from seekrai.resources.agents.agent_inference import AgentInference, AsyncAgentInference
|
|
2
|
+
from seekrai.resources.agents.agent_observability import (
|
|
3
|
+
AgentObservability,
|
|
4
|
+
AsyncAgentObservability,
|
|
5
|
+
)
|
|
2
6
|
from seekrai.resources.agents.agents import Agents, AsyncAgents
|
|
7
|
+
from seekrai.resources.agents.python_functions import (
|
|
8
|
+
AsyncCustomFunctions,
|
|
9
|
+
CustomFunctions,
|
|
10
|
+
)
|
|
3
11
|
from seekrai.resources.agents.threads import AgentThreads, AsyncAgentThreads
|
|
4
12
|
|
|
5
13
|
|
|
@@ -10,4 +18,8 @@ __all__ = [
|
|
|
10
18
|
"AsyncAgents",
|
|
11
19
|
"AgentThreads",
|
|
12
20
|
"AsyncAgentThreads",
|
|
21
|
+
"CustomFunctions",
|
|
22
|
+
"AsyncCustomFunctions",
|
|
23
|
+
"AgentObservability",
|
|
24
|
+
"AsyncAgentObservability",
|
|
13
25
|
]
|
|
@@ -1,8 +1,9 @@
|
|
|
1
|
-
from typing import Any, AsyncGenerator, Iterator, Union
|
|
1
|
+
from typing import Any, AsyncGenerator, Iterator, Optional, Union
|
|
2
2
|
|
|
3
3
|
from seekrai.abstract import api_requestor
|
|
4
4
|
from seekrai.seekrflow_response import SeekrFlowResponse
|
|
5
|
-
from seekrai.types import Run, RunRequest, RunResponse, SeekrFlowRequest
|
|
5
|
+
from seekrai.types import ModelSettings, Run, RunRequest, RunResponse, SeekrFlowRequest
|
|
6
|
+
from seekrai.types.agents.runs import ResponseFormat
|
|
6
7
|
|
|
7
8
|
|
|
8
9
|
class AgentInference:
|
|
@@ -16,7 +17,10 @@ class AgentInference:
|
|
|
16
17
|
thread_id: str,
|
|
17
18
|
*,
|
|
18
19
|
stream: bool = False,
|
|
19
|
-
|
|
20
|
+
model_settings: ModelSettings = ModelSettings(),
|
|
21
|
+
response_format: Optional[Any] = None,
|
|
22
|
+
group: Optional[str] = "default_group",
|
|
23
|
+
metadata: Optional[dict[str, str]] = None,
|
|
20
24
|
) -> Union[RunResponse, Iterator[Any]]:
|
|
21
25
|
"""
|
|
22
26
|
Run an inference call on a deployed agent.
|
|
@@ -25,13 +29,23 @@ class AgentInference:
|
|
|
25
29
|
agent_id (str): The unique identifier of the deployed agent.
|
|
26
30
|
thread_id (str): A thread identifier.
|
|
27
31
|
stream (bool, optional): Whether to stream the response. Defaults to False.
|
|
28
|
-
|
|
32
|
+
model_settings (optional): Additional parameters (such as temperature, max_tokens, etc).
|
|
33
|
+
response_format: Optional structured output specification. If provided, the LLM will be constrained to return JSON matching this schema.
|
|
34
|
+
group (str, optional): Label used to associate a group of runs. Defaults to 'default_group'.
|
|
35
|
+
metadata (dict[str, str], optional): Additional metadata used to label runs. Defaults to None.
|
|
29
36
|
|
|
30
37
|
Returns:
|
|
31
38
|
A dictionary with the response (if non-streaming) or an iterator over response chunks.
|
|
32
39
|
"""
|
|
33
|
-
payload = RunRequest(
|
|
34
|
-
|
|
40
|
+
payload = RunRequest(
|
|
41
|
+
agent_id=agent_id,
|
|
42
|
+
model_settings=model_settings,
|
|
43
|
+
response_format=ResponseFormat.from_value(response_format)
|
|
44
|
+
if response_format
|
|
45
|
+
else None,
|
|
46
|
+
group=group,
|
|
47
|
+
metadata=metadata,
|
|
48
|
+
).model_dump()
|
|
35
49
|
endpoint = f"threads/{thread_id}/runs"
|
|
36
50
|
if stream:
|
|
37
51
|
endpoint += "/stream"
|
|
@@ -146,7 +160,10 @@ class AsyncAgentInference:
|
|
|
146
160
|
thread_id: str,
|
|
147
161
|
*,
|
|
148
162
|
stream: bool = False,
|
|
149
|
-
|
|
163
|
+
model_settings: ModelSettings = ModelSettings(),
|
|
164
|
+
response_format: Optional[Any] = None,
|
|
165
|
+
group: Optional[str] = "default_group",
|
|
166
|
+
metadata: Optional[dict[str, str]] = None,
|
|
150
167
|
) -> Union[RunResponse, AsyncGenerator[Any, None]]:
|
|
151
168
|
"""
|
|
152
169
|
Run an inference call on a deployed agent.
|
|
@@ -155,13 +172,23 @@ class AsyncAgentInference:
|
|
|
155
172
|
agent_id (str): The unique identifier of the deployed agent.
|
|
156
173
|
thread_id (str): A thread identifier.
|
|
157
174
|
stream (bool, optional): Whether to stream the response. Defaults to False.
|
|
158
|
-
|
|
175
|
+
model_settings (optional): Additional parameters (such as temperature, max_tokens, etc).
|
|
176
|
+
response_format: Optional structured output specification. If provided, the LLM will be constrained to return JSON matching this schema.
|
|
177
|
+
group (str, optional): Label used to associate a group of runs. Defaults to 'default_group'.
|
|
178
|
+
metadata (dict[str, str], optional): Additional metadata used to label runs. Defaults to None.
|
|
159
179
|
|
|
160
180
|
Returns:
|
|
161
181
|
A dictionary with the response (if non-streaming) or an iterator over response chunks.
|
|
162
182
|
"""
|
|
163
|
-
payload = RunRequest(
|
|
164
|
-
|
|
183
|
+
payload = RunRequest(
|
|
184
|
+
agent_id=agent_id,
|
|
185
|
+
model_settings=model_settings,
|
|
186
|
+
response_format=ResponseFormat.from_value(response_format)
|
|
187
|
+
if response_format
|
|
188
|
+
else None,
|
|
189
|
+
group=group,
|
|
190
|
+
metadata=metadata,
|
|
191
|
+
).model_dump()
|
|
165
192
|
endpoint = f"threads/{thread_id}/runs"
|
|
166
193
|
if stream:
|
|
167
194
|
endpoint += "/stream"
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from typing import Any, Optional
|
|
3
|
+
|
|
4
|
+
from seekrai.abstract import api_requestor
|
|
5
|
+
from seekrai.seekrflow_response import SeekrFlowResponse
|
|
6
|
+
from seekrai.types import SeekrFlowRequest
|
|
7
|
+
from seekrai.types.agents.observability import (
|
|
8
|
+
ObservabilitySpansRequest,
|
|
9
|
+
ObservabilitySpansResponse,
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
BASE_OBSERVABILITY_ENDPOINT = "observability/spans"
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class AgentObservability:
|
|
17
|
+
def __init__(self, client: Any):
|
|
18
|
+
self._client = client
|
|
19
|
+
self._requestor = api_requestor.APIRequestor(client=self._client)
|
|
20
|
+
|
|
21
|
+
def query_spans(
|
|
22
|
+
self,
|
|
23
|
+
min_start_time: Optional[datetime] = None,
|
|
24
|
+
max_start_time: Optional[datetime] = None,
|
|
25
|
+
agent_id: Optional[str] = None,
|
|
26
|
+
run_id: Optional[str] = None,
|
|
27
|
+
trace_id: Optional[str] = None,
|
|
28
|
+
thread_id: Optional[str] = None,
|
|
29
|
+
group: Optional[str] = None,
|
|
30
|
+
metadata: Optional[dict[str, str]] = None,
|
|
31
|
+
limit: int = 100,
|
|
32
|
+
order: str = "desc",
|
|
33
|
+
offset: int = 0,
|
|
34
|
+
) -> ObservabilitySpansResponse:
|
|
35
|
+
"""
|
|
36
|
+
Retrieve spans for a given run or group of runs given a set of facets.
|
|
37
|
+
"""
|
|
38
|
+
payload = ObservabilitySpansRequest(
|
|
39
|
+
min_start_datetime=min_start_time,
|
|
40
|
+
max_start_datetime=max_start_time,
|
|
41
|
+
agent_id=agent_id,
|
|
42
|
+
run_id=run_id,
|
|
43
|
+
trace_id=trace_id,
|
|
44
|
+
thread_id=thread_id,
|
|
45
|
+
group=group,
|
|
46
|
+
metadata=metadata,
|
|
47
|
+
limit=limit,
|
|
48
|
+
order=order,
|
|
49
|
+
offset=offset,
|
|
50
|
+
).model_dump()
|
|
51
|
+
|
|
52
|
+
response, _, _ = self._requestor.request(
|
|
53
|
+
options=SeekrFlowRequest(
|
|
54
|
+
method="POST", url=BASE_OBSERVABILITY_ENDPOINT, params=payload
|
|
55
|
+
)
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
assert isinstance(response, SeekrFlowResponse)
|
|
59
|
+
|
|
60
|
+
return ObservabilitySpansResponse(spans=response.data)
|
|
61
|
+
|
|
62
|
+
def retrieve_span(self, span_id: str) -> Optional[dict[str, Any]]:
|
|
63
|
+
"""
|
|
64
|
+
Retrieve a specific span given a span_id.
|
|
65
|
+
"""
|
|
66
|
+
endpoint = f"{BASE_OBSERVABILITY_ENDPOINT}/{span_id}"
|
|
67
|
+
|
|
68
|
+
response, _, _ = self._requestor.request(
|
|
69
|
+
options=SeekrFlowRequest(method="GET", url=endpoint)
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
assert isinstance(response, SeekrFlowResponse)
|
|
73
|
+
|
|
74
|
+
return response.data
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class AsyncAgentObservability:
|
|
78
|
+
def __init__(self, client: Any) -> None:
|
|
79
|
+
self._client = client
|
|
80
|
+
self._requestor = api_requestor.APIRequestor(client=self._client)
|
|
81
|
+
|
|
82
|
+
async def query_spans(
|
|
83
|
+
self,
|
|
84
|
+
min_start_time: Optional[datetime] = None,
|
|
85
|
+
max_start_time: Optional[datetime] = None,
|
|
86
|
+
agent_id: Optional[str] = None,
|
|
87
|
+
run_id: Optional[str] = None,
|
|
88
|
+
trace_id: Optional[str] = None,
|
|
89
|
+
thread_id: Optional[str] = None,
|
|
90
|
+
group: Optional[str] = None,
|
|
91
|
+
metadata: Optional[dict[str, str]] = None,
|
|
92
|
+
limit: int = 100,
|
|
93
|
+
order: str = "desc",
|
|
94
|
+
offset: int = 0,
|
|
95
|
+
) -> ObservabilitySpansResponse:
|
|
96
|
+
"""
|
|
97
|
+
Retrieve spans for a given run or group of runs given a set of facets.
|
|
98
|
+
"""
|
|
99
|
+
payload = ObservabilitySpansRequest(
|
|
100
|
+
min_start_datetime=min_start_time,
|
|
101
|
+
max_start_datetime=max_start_time,
|
|
102
|
+
agent_id=agent_id,
|
|
103
|
+
run_id=run_id,
|
|
104
|
+
trace_id=trace_id,
|
|
105
|
+
thread_id=thread_id,
|
|
106
|
+
group=group,
|
|
107
|
+
metadata=metadata,
|
|
108
|
+
limit=limit,
|
|
109
|
+
order=order,
|
|
110
|
+
offset=offset,
|
|
111
|
+
).model_dump()
|
|
112
|
+
|
|
113
|
+
response, _, _ = await self._requestor.arequest(
|
|
114
|
+
options=SeekrFlowRequest(
|
|
115
|
+
method="POST", url=BASE_OBSERVABILITY_ENDPOINT, params=payload
|
|
116
|
+
)
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
assert isinstance(response, SeekrFlowResponse)
|
|
120
|
+
|
|
121
|
+
return ObservabilitySpansResponse(spans=response.data)
|
|
122
|
+
|
|
123
|
+
async def retrieve_span(self, span_id: str) -> Optional[dict[str, Any]]:
|
|
124
|
+
"""
|
|
125
|
+
Retrieve a specific span given a span_id.
|
|
126
|
+
"""
|
|
127
|
+
endpoint = f"{BASE_OBSERVABILITY_ENDPOINT}/{span_id}"
|
|
128
|
+
|
|
129
|
+
response, _, _ = self._requestor.request(
|
|
130
|
+
options=SeekrFlowRequest(method="GET", url=endpoint)
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
assert isinstance(response, SeekrFlowResponse)
|
|
134
|
+
|
|
135
|
+
return response.data
|
|
@@ -1,5 +1,9 @@
|
|
|
1
1
|
from seekrai.abstract import api_requestor
|
|
2
2
|
from seekrai.resources.agents.agent_inference import AgentInference, AsyncAgentInference
|
|
3
|
+
from seekrai.resources.agents.python_functions import (
|
|
4
|
+
AsyncCustomFunctions,
|
|
5
|
+
CustomFunctions,
|
|
6
|
+
)
|
|
3
7
|
from seekrai.resources.agents.threads import AgentThreads, AsyncAgentThreads
|
|
4
8
|
from seekrai.seekrflow_response import SeekrFlowResponse
|
|
5
9
|
from seekrai.types import SeekrFlowClient, SeekrFlowRequest
|
|
@@ -7,6 +11,7 @@ from seekrai.types.agents.agent import (
|
|
|
7
11
|
Agent,
|
|
8
12
|
AgentDeleteResponse,
|
|
9
13
|
CreateAgentRequest,
|
|
14
|
+
UpdateAgentRequest,
|
|
10
15
|
)
|
|
11
16
|
|
|
12
17
|
|
|
@@ -18,6 +23,7 @@ class Agents:
|
|
|
18
23
|
)
|
|
19
24
|
self.runs = AgentInference(client)
|
|
20
25
|
self.threads = AgentThreads(client)
|
|
26
|
+
self.custom_functions = CustomFunctions(client)
|
|
21
27
|
|
|
22
28
|
def retrieve(self, agent_id: str) -> Agent:
|
|
23
29
|
"""
|
|
@@ -140,6 +146,28 @@ class Agents:
|
|
|
140
146
|
assert isinstance(response, SeekrFlowResponse)
|
|
141
147
|
return AgentDeleteResponse(**response.data)
|
|
142
148
|
|
|
149
|
+
def update(self, agent_id: str, request: UpdateAgentRequest) -> Agent:
|
|
150
|
+
"""
|
|
151
|
+
Update an existing agent's configuration.
|
|
152
|
+
|
|
153
|
+
Args:
|
|
154
|
+
agent_id: The ID of the agent to update.
|
|
155
|
+
request: The request object containing updated agent config.
|
|
156
|
+
|
|
157
|
+
Returns:
|
|
158
|
+
The updated agent.
|
|
159
|
+
"""
|
|
160
|
+
response, _, _ = self._requestor.request(
|
|
161
|
+
options=SeekrFlowRequest(
|
|
162
|
+
method="PUT",
|
|
163
|
+
url=f"flow/agents/{agent_id}/update",
|
|
164
|
+
params=request.model_dump(),
|
|
165
|
+
),
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
assert isinstance(response, SeekrFlowResponse)
|
|
169
|
+
return Agent(**response.data)
|
|
170
|
+
|
|
143
171
|
|
|
144
172
|
class AsyncAgents:
|
|
145
173
|
def __init__(self, client: SeekrFlowClient) -> None:
|
|
@@ -149,6 +177,7 @@ class AsyncAgents:
|
|
|
149
177
|
)
|
|
150
178
|
self.runs = AsyncAgentInference(client)
|
|
151
179
|
self.threads = AsyncAgentThreads(client)
|
|
180
|
+
self.custom_functions = AsyncCustomFunctions(client)
|
|
152
181
|
|
|
153
182
|
async def retrieve(self, agent_id: str) -> Agent:
|
|
154
183
|
"""
|
|
@@ -270,3 +299,25 @@ class AsyncAgents:
|
|
|
270
299
|
|
|
271
300
|
assert isinstance(response, SeekrFlowResponse)
|
|
272
301
|
return AgentDeleteResponse(**response.data)
|
|
302
|
+
|
|
303
|
+
async def update(self, agent_id: str, request: UpdateAgentRequest) -> Agent:
|
|
304
|
+
"""
|
|
305
|
+
Update an existing agent's configuration.
|
|
306
|
+
|
|
307
|
+
Args:
|
|
308
|
+
agent_id: The ID of the agent to update.
|
|
309
|
+
request: The request object containing updated agent config.
|
|
310
|
+
|
|
311
|
+
Returns:
|
|
312
|
+
The updated agent.
|
|
313
|
+
"""
|
|
314
|
+
response, _, _ = await self._requestor.arequest(
|
|
315
|
+
options=SeekrFlowRequest(
|
|
316
|
+
method="PUT",
|
|
317
|
+
url=f"flow/agents/{agent_id}/update",
|
|
318
|
+
params=request.model_dump(),
|
|
319
|
+
),
|
|
320
|
+
)
|
|
321
|
+
|
|
322
|
+
assert isinstance(response, SeekrFlowResponse)
|
|
323
|
+
return Agent(**response.data)
|