seekrai 0.5.11__tar.gz → 0.5.13__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. {seekrai-0.5.11 → seekrai-0.5.13}/PKG-INFO +1 -1
  2. {seekrai-0.5.11 → seekrai-0.5.13}/pyproject.toml +1 -1
  3. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/resources/agents/__init__.py +6 -0
  4. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/resources/agents/agent_inference.py +21 -8
  5. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/resources/agents/agents.py +6 -0
  6. seekrai-0.5.13/src/seekrai/resources/agents/python_functions.py +330 -0
  7. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/agents/__init__.py +8 -0
  8. seekrai-0.5.13/src/seekrai/types/agents/python_functions.py +29 -0
  9. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/agents/runs.py +48 -18
  10. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/agents/tools/schemas/run_python_env.py +1 -0
  11. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/files.py +1 -0
  12. {seekrai-0.5.11 → seekrai-0.5.13}/LICENSE +0 -0
  13. {seekrai-0.5.11 → seekrai-0.5.13}/README.md +0 -0
  14. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/__init__.py +0 -0
  15. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/abstract/__init__.py +0 -0
  16. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/abstract/api_requestor.py +0 -0
  17. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/abstract/response_parsing.py +0 -0
  18. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/client.py +0 -0
  19. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/constants.py +0 -0
  20. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/error.py +0 -0
  21. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/filemanager.py +0 -0
  22. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/resources/__init__.py +0 -0
  23. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/resources/agents/threads.py +0 -0
  24. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/resources/alignment.py +0 -0
  25. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/resources/chat/__init__.py +0 -0
  26. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/resources/chat/completions.py +0 -0
  27. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/resources/completions.py +0 -0
  28. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/resources/deployments.py +0 -0
  29. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/resources/embeddings.py +0 -0
  30. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/resources/explainability.py +0 -0
  31. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/resources/files.py +0 -0
  32. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/resources/finetune.py +0 -0
  33. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/resources/images.py +0 -0
  34. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/resources/ingestion.py +0 -0
  35. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/resources/models.py +0 -0
  36. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/resources/projects.py +0 -0
  37. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/resources/resource_base.py +0 -0
  38. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/resources/vectordb.py +0 -0
  39. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/seekrflow_response.py +0 -0
  40. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/__init__.py +0 -0
  41. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/abstract.py +0 -0
  42. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/agents/agent.py +0 -0
  43. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/agents/threads.py +0 -0
  44. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/agents/tools/__init__.py +0 -0
  45. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/agents/tools/env_model_config.py +0 -0
  46. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/agents/tools/schemas/__init__.py +0 -0
  47. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/agents/tools/schemas/file_search.py +0 -0
  48. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/agents/tools/schemas/file_search_env.py +0 -0
  49. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/agents/tools/schemas/run_python.py +0 -0
  50. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/agents/tools/schemas/web_search.py +0 -0
  51. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/agents/tools/schemas/web_search_env.py +0 -0
  52. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/agents/tools/tool.py +0 -0
  53. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/agents/tools/tool_types.py +0 -0
  54. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/alignment.py +0 -0
  55. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/chat_completions.py +0 -0
  56. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/common.py +0 -0
  57. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/completions.py +0 -0
  58. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/deployments.py +0 -0
  59. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/embeddings.py +0 -0
  60. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/error.py +0 -0
  61. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/explainability.py +0 -0
  62. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/finetune.py +0 -0
  63. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/images.py +0 -0
  64. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/ingestion.py +0 -0
  65. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/models.py +0 -0
  66. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/projects.py +0 -0
  67. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/types/vectordb.py +0 -0
  68. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/utils/__init__.py +0 -0
  69. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/utils/_log.py +0 -0
  70. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/utils/api_helpers.py +0 -0
  71. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/utils/files.py +0 -0
  72. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/utils/tools.py +0 -0
  73. {seekrai-0.5.11 → seekrai-0.5.13}/src/seekrai/version.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: seekrai
3
- Version: 0.5.11
3
+ Version: 0.5.13
4
4
  Summary: Python client for SeekrAI
5
5
  License: Apache-2.0
6
6
  Author: SeekrFlow
@@ -14,7 +14,7 @@ build-backend = "poetry.core.masonry.api"
14
14
 
15
15
  [tool.poetry]
16
16
  name = "seekrai"
17
- version = "0.5.11"
17
+ version = "0.5.13"
18
18
  authors = [
19
19
  "SeekrFlow <support@seekr.com>"
20
20
  ]
@@ -1,5 +1,9 @@
1
1
  from seekrai.resources.agents.agent_inference import AgentInference, AsyncAgentInference
2
2
  from seekrai.resources.agents.agents import Agents, AsyncAgents
3
+ from seekrai.resources.agents.python_functions import (
4
+ AsyncCustomFunctions,
5
+ CustomFunctions,
6
+ )
3
7
  from seekrai.resources.agents.threads import AgentThreads, AsyncAgentThreads
4
8
 
5
9
 
@@ -10,4 +14,6 @@ __all__ = [
10
14
  "AsyncAgents",
11
15
  "AgentThreads",
12
16
  "AsyncAgentThreads",
17
+ "CustomFunctions",
18
+ "AsyncCustomFunctions",
13
19
  ]
@@ -3,6 +3,7 @@ from typing import Any, AsyncGenerator, Iterator, Optional, Union
3
3
  from seekrai.abstract import api_requestor
4
4
  from seekrai.seekrflow_response import SeekrFlowResponse
5
5
  from seekrai.types import ModelSettings, Run, RunRequest, RunResponse, SeekrFlowRequest
6
+ from seekrai.types.agents.runs import ResponseFormat
6
7
 
7
8
 
8
9
  class AgentInference:
@@ -16,7 +17,8 @@ class AgentInference:
16
17
  thread_id: str,
17
18
  *,
18
19
  stream: bool = False,
19
- model_settings: Optional[ModelSettings] = None,
20
+ model_settings: ModelSettings = ModelSettings(),
21
+ response_format: Optional[Any] = None,
20
22
  ) -> Union[RunResponse, Iterator[Any]]:
21
23
  """
22
24
  Run an inference call on a deployed agent.
@@ -26,13 +28,18 @@ class AgentInference:
26
28
  thread_id (str): A thread identifier.
27
29
  stream (bool, optional): Whether to stream the response. Defaults to False.
28
30
  model_settings (optional): Additional parameters (such as temperature, max_tokens, etc).
31
+ response_format: Optional structured output specification. If provided, the LLM will be constrained to return JSON matching this schema.
29
32
 
30
33
  Returns:
31
34
  A dictionary with the response (if non-streaming) or an iterator over response chunks.
32
35
  """
33
- payload = RunRequest(agent_id=agent_id).model_dump()
34
- if model_settings is not None:
35
- payload["model_settings"] = model_settings.model_dump()
36
+ payload = RunRequest(
37
+ agent_id=agent_id,
38
+ model_settings=model_settings,
39
+ response_format=ResponseFormat.from_value(response_format)
40
+ if response_format
41
+ else None,
42
+ ).model_dump()
36
43
  endpoint = f"threads/{thread_id}/runs"
37
44
  if stream:
38
45
  endpoint += "/stream"
@@ -147,7 +154,8 @@ class AsyncAgentInference:
147
154
  thread_id: str,
148
155
  *,
149
156
  stream: bool = False,
150
- model_settings: Optional[ModelSettings] = None,
157
+ model_settings: ModelSettings = ModelSettings(),
158
+ response_format: Optional[Any] = None,
151
159
  ) -> Union[RunResponse, AsyncGenerator[Any, None]]:
152
160
  """
153
161
  Run an inference call on a deployed agent.
@@ -157,13 +165,18 @@ class AsyncAgentInference:
157
165
  thread_id (str): A thread identifier.
158
166
  stream (bool, optional): Whether to stream the response. Defaults to False.
159
167
  model_settings (optional): Additional parameters (such as temperature, max_tokens, etc).
168
+ response_format: Optional structured output specification. If provided, the LLM will be constrained to return JSON matching this schema.
160
169
 
161
170
  Returns:
162
171
  A dictionary with the response (if non-streaming) or an iterator over response chunks.
163
172
  """
164
- payload = RunRequest(agent_id=agent_id).model_dump()
165
- if model_settings is not None:
166
- payload["model_settings"] = model_settings.model_dump()
173
+ payload = RunRequest(
174
+ agent_id=agent_id,
175
+ model_settings=model_settings,
176
+ response_format=ResponseFormat.from_value(response_format)
177
+ if response_format
178
+ else None,
179
+ ).model_dump()
167
180
  endpoint = f"threads/{thread_id}/runs"
168
181
  if stream:
169
182
  endpoint += "/stream"
@@ -1,5 +1,9 @@
1
1
  from seekrai.abstract import api_requestor
2
2
  from seekrai.resources.agents.agent_inference import AgentInference, AsyncAgentInference
3
+ from seekrai.resources.agents.python_functions import (
4
+ AsyncCustomFunctions,
5
+ CustomFunctions,
6
+ )
3
7
  from seekrai.resources.agents.threads import AgentThreads, AsyncAgentThreads
4
8
  from seekrai.seekrflow_response import SeekrFlowResponse
5
9
  from seekrai.types import SeekrFlowClient, SeekrFlowRequest
@@ -19,6 +23,7 @@ class Agents:
19
23
  )
20
24
  self.runs = AgentInference(client)
21
25
  self.threads = AgentThreads(client)
26
+ self.custom_functions = CustomFunctions(client)
22
27
 
23
28
  def retrieve(self, agent_id: str) -> Agent:
24
29
  """
@@ -172,6 +177,7 @@ class AsyncAgents:
172
177
  )
173
178
  self.runs = AsyncAgentInference(client)
174
179
  self.threads = AsyncAgentThreads(client)
180
+ self.custom_functions = AsyncCustomFunctions(client)
175
181
 
176
182
  async def retrieve(self, agent_id: str) -> Agent:
177
183
  """
@@ -0,0 +1,330 @@
1
+ from pathlib import Path
2
+ from typing import Union
3
+
4
+ from seekrai.abstract import api_requestor
5
+ from seekrai.seekrflow_response import SeekrFlowResponse
6
+ from seekrai.types import SeekrFlowClient, SeekrFlowRequest
7
+ from seekrai.types.agents.python_functions import (
8
+ DeletePythonFunctionResponse,
9
+ PythonFunctionResponse,
10
+ )
11
+
12
+
13
+ class CustomFunctions:
14
+ def __init__(self, client: SeekrFlowClient) -> None:
15
+ self._client = client
16
+ self._requestor = api_requestor.APIRequestor(
17
+ client=self._client,
18
+ )
19
+
20
+ def create(
21
+ self, file_path: Union[str, Path], description: Union[str, None] = None
22
+ ) -> PythonFunctionResponse:
23
+ """
24
+ Upload a new Python function for the user.
25
+
26
+ Args:
27
+ file_path: Path to the Python function file to upload (can be relative or absolute).
28
+ description: Optional description for the function.
29
+
30
+ Returns:
31
+ The newly created Python function.
32
+ """
33
+ # Convert string to Path if needed
34
+ if isinstance(file_path, str):
35
+ file_path = Path(file_path)
36
+
37
+ # Read the file contents
38
+ with file_path.open("rb") as f:
39
+ file_content = f.read()
40
+
41
+ # Prepare multipart form data
42
+ files = {"file": (file_path.name, file_content, "text/plain")}
43
+ params = {}
44
+ if description:
45
+ params["description"] = description
46
+
47
+ response, _, _ = self._requestor.request(
48
+ options=SeekrFlowRequest(
49
+ method="POST",
50
+ url="functions/",
51
+ files=files,
52
+ params=params,
53
+ ),
54
+ )
55
+
56
+ assert isinstance(response, SeekrFlowResponse)
57
+ return PythonFunctionResponse(**response.data)
58
+
59
+ def retrieve(self, function_id: str) -> PythonFunctionResponse:
60
+ """
61
+ Retrieve a Python function by its ID.
62
+
63
+ Args:
64
+ function_id: The ID of the Python function to retrieve.
65
+
66
+ Returns:
67
+ The Python function.
68
+ """
69
+ response, _, _ = self._requestor.request(
70
+ options=SeekrFlowRequest(
71
+ method="GET",
72
+ url=f"functions/{function_id}",
73
+ ),
74
+ )
75
+
76
+ assert isinstance(response, SeekrFlowResponse)
77
+ return PythonFunctionResponse(**response.data)
78
+
79
+ def list_functions(
80
+ self, limit: int = 20, offset: int = 0, order: str = "desc"
81
+ ) -> list[PythonFunctionResponse]:
82
+ """
83
+ List all Python functions for the user.
84
+
85
+ Args:
86
+ limit: Maximum number of functions to return (default: 20).
87
+ offset: Number of functions to skip (default: 0).
88
+ order: Sort order, 'asc' or 'desc' (default: 'desc').
89
+
90
+ Returns:
91
+ A list of Python functions.
92
+ """
93
+ response, _, _ = self._requestor.request(
94
+ options=SeekrFlowRequest(
95
+ method="GET",
96
+ url="functions/",
97
+ params={"limit": limit, "offset": offset, "order": order},
98
+ ),
99
+ )
100
+
101
+ assert isinstance(response, SeekrFlowResponse)
102
+ functions = [PythonFunctionResponse(**func) for func in response.data] # type: ignore
103
+ return functions
104
+
105
+ def update(
106
+ self,
107
+ function_id: str,
108
+ file_path: Union[str, Path, None] = None,
109
+ description: Union[str, None] = None,
110
+ ) -> PythonFunctionResponse:
111
+ """
112
+ Update an existing Python function.
113
+
114
+ Args:
115
+ function_id: The ID of the Python function to update.
116
+ file_path: Optional path to a new Python function file (can be relative or absolute).
117
+ description: Optional new description for the function.
118
+
119
+ Returns:
120
+ The updated Python function.
121
+ """
122
+ files = None
123
+ params = {}
124
+
125
+ if file_path:
126
+ # Convert string to Path if needed
127
+ if isinstance(file_path, str):
128
+ file_path = Path(file_path)
129
+
130
+ # Read the file contents
131
+ with file_path.open("rb") as f:
132
+ file_content = f.read()
133
+
134
+ # Prepare multipart form data
135
+ files = {"file": (file_path.name, file_content, "text/plain")}
136
+
137
+ if description:
138
+ params["description"] = description
139
+
140
+ response, _, _ = self._requestor.request(
141
+ options=SeekrFlowRequest(
142
+ method="PATCH",
143
+ url=f"functions/{function_id}",
144
+ files=files,
145
+ params=params,
146
+ ),
147
+ )
148
+
149
+ assert isinstance(response, SeekrFlowResponse)
150
+ return PythonFunctionResponse(**response.data)
151
+
152
+ def delete(self, function_id: str) -> DeletePythonFunctionResponse:
153
+ """
154
+ Delete a Python function by its ID.
155
+
156
+ Args:
157
+ function_id: The ID of the Python function to delete.
158
+
159
+ Returns:
160
+ A response indicating whether the delete operation was successful.
161
+ """
162
+ response, _, _ = self._requestor.request(
163
+ options=SeekrFlowRequest(
164
+ method="DELETE",
165
+ url=f"functions/{function_id}",
166
+ ),
167
+ )
168
+
169
+ assert isinstance(response, SeekrFlowResponse)
170
+ return DeletePythonFunctionResponse(**response.data)
171
+
172
+
173
+ class AsyncCustomFunctions:
174
+ def __init__(self, client: SeekrFlowClient) -> None:
175
+ self._client = client
176
+ self._requestor = api_requestor.APIRequestor(
177
+ client=self._client,
178
+ )
179
+
180
+ async def create(
181
+ self, file_path: Union[str, Path], description: Union[str, None] = None
182
+ ) -> PythonFunctionResponse:
183
+ """
184
+ Upload a new Python function for the user.
185
+
186
+ Args:
187
+ file_path: Path to the Python function file to upload (can be relative or absolute).
188
+ description: Optional description for the function.
189
+
190
+ Returns:
191
+ The newly created Python function.
192
+ """
193
+ # Convert string to Path if needed
194
+ if isinstance(file_path, str):
195
+ file_path = Path(file_path)
196
+
197
+ # Read the file contents
198
+ with file_path.open("rb") as f:
199
+ file_content = f.read()
200
+
201
+ # Prepare multipart form data
202
+ files = {"file": (file_path.name, file_content, "text/plain")}
203
+ params = {}
204
+ if description:
205
+ params["description"] = description
206
+
207
+ response, _, _ = await self._requestor.arequest(
208
+ options=SeekrFlowRequest(
209
+ method="POST",
210
+ url="functions/",
211
+ files=files,
212
+ params=params,
213
+ ),
214
+ )
215
+
216
+ assert isinstance(response, SeekrFlowResponse)
217
+ return PythonFunctionResponse(**response.data)
218
+
219
+ async def retrieve(self, function_id: str) -> PythonFunctionResponse:
220
+ """
221
+ Retrieve a Python function by its ID.
222
+
223
+ Args:
224
+ function_id: The ID of the Python function to retrieve.
225
+
226
+ Returns:
227
+ The Python function.
228
+ """
229
+ response, _, _ = await self._requestor.arequest(
230
+ options=SeekrFlowRequest(
231
+ method="GET",
232
+ url=f"functions/{function_id}",
233
+ ),
234
+ )
235
+
236
+ assert isinstance(response, SeekrFlowResponse)
237
+ return PythonFunctionResponse(**response.data)
238
+
239
+ async def list_functions(
240
+ self, limit: int = 20, offset: int = 0, order: str = "desc"
241
+ ) -> list[PythonFunctionResponse]:
242
+ """
243
+ List all Python functions for the user.
244
+
245
+ Args:
246
+ limit: Maximum number of functions to return (default: 20).
247
+ offset: Number of functions to skip (default: 0).
248
+ order: Sort order, 'asc' or 'desc' (default: 'desc').
249
+
250
+ Returns:
251
+ A list of Python functions.
252
+ """
253
+ response, _, _ = await self._requestor.arequest(
254
+ options=SeekrFlowRequest(
255
+ method="GET",
256
+ url="functions/",
257
+ params={"limit": limit, "offset": offset, "order": order},
258
+ ),
259
+ )
260
+
261
+ assert isinstance(response, SeekrFlowResponse)
262
+ functions = [PythonFunctionResponse(**func) for func in response.data] # type: ignore
263
+ return functions
264
+
265
+ async def update(
266
+ self,
267
+ function_id: str,
268
+ file_path: Union[str, Path, None] = None,
269
+ description: Union[str, None] = None,
270
+ ) -> PythonFunctionResponse:
271
+ """
272
+ Update an existing Python function.
273
+
274
+ Args:
275
+ function_id: The ID of the Python function to update.
276
+ file_path: Optional path to a new Python function file (can be relative or absolute).
277
+ description: Optional new description for the function.
278
+
279
+ Returns:
280
+ The updated Python function.
281
+ """
282
+ files = None
283
+ params = {}
284
+
285
+ if file_path:
286
+ # Convert string to Path if needed
287
+ if isinstance(file_path, str):
288
+ file_path = Path(file_path)
289
+
290
+ # Read the file contents
291
+ with file_path.open("rb") as f:
292
+ file_content = f.read()
293
+
294
+ # Prepare multipart form data
295
+ files = {"file": (file_path.name, file_content, "text/plain")}
296
+
297
+ if description:
298
+ params["description"] = description
299
+
300
+ response, _, _ = await self._requestor.arequest(
301
+ options=SeekrFlowRequest(
302
+ method="PATCH",
303
+ url=f"functions/{function_id}",
304
+ files=files,
305
+ params=params,
306
+ ),
307
+ )
308
+
309
+ assert isinstance(response, SeekrFlowResponse)
310
+ return PythonFunctionResponse(**response.data)
311
+
312
+ async def delete(self, function_id: str) -> DeletePythonFunctionResponse:
313
+ """
314
+ Delete a Python function by its ID.
315
+
316
+ Args:
317
+ function_id: The ID of the Python function to delete.
318
+
319
+ Returns:
320
+ A response indicating whether the delete operation was successful.
321
+ """
322
+ response, _, _ = await self._requestor.arequest(
323
+ options=SeekrFlowRequest(
324
+ method="DELETE",
325
+ url=f"functions/{function_id}",
326
+ ),
327
+ )
328
+
329
+ assert isinstance(response, SeekrFlowResponse)
330
+ return DeletePythonFunctionResponse(**response.data)
@@ -5,6 +5,11 @@ from seekrai.types.agents.agent import (
5
5
  CreateAgentRequest,
6
6
  ReasoningEffort,
7
7
  )
8
+ from seekrai.types.agents.python_functions import (
9
+ DeletePythonFunctionResponse,
10
+ PythonFunctionBase,
11
+ PythonFunctionResponse,
12
+ )
8
13
  from seekrai.types.agents.runs import (
9
14
  ModelSettings,
10
15
  Run,
@@ -100,4 +105,7 @@ __all__ = [
100
105
  "RunPythonEnv",
101
106
  "WebSearch",
102
107
  "WebSearchEnv",
108
+ "PythonFunctionBase",
109
+ "PythonFunctionResponse",
110
+ "DeletePythonFunctionResponse",
103
111
  ]
@@ -0,0 +1,29 @@
1
+ from datetime import datetime
2
+
3
+ from pydantic import BaseModel, ConfigDict
4
+
5
+
6
+ class PythonFunctionBase(BaseModel):
7
+ """Base model for a Python function, including metadata fields."""
8
+
9
+ model_config = ConfigDict(from_attributes=True)
10
+ id: str
11
+ version: int
12
+ name: str
13
+ description: str
14
+ active: bool
15
+
16
+
17
+ class PythonFunctionResponse(PythonFunctionBase):
18
+ """Response model for a Python function, including code and user info."""
19
+
20
+ code: str
21
+ user_id: str
22
+ created_at: datetime
23
+ updated_at: datetime
24
+
25
+
26
+ class DeletePythonFunctionResponse(BaseModel):
27
+ """Response model for Python function deletion."""
28
+
29
+ deleted: bool
@@ -1,16 +1,63 @@
1
1
  import datetime
2
2
  from enum import Enum
3
- from typing import Any, Optional, Union
3
+ from typing import Any, Dict, Optional, Union
4
4
 
5
+ import pydantic
5
6
  from pydantic import Field
6
7
 
7
8
  from seekrai.types.abstract import BaseModel
8
9
 
9
10
 
11
+ class ModelSettings(BaseModel):
12
+ """Settings to use when calling an LLM.
13
+
14
+ This class holds optional model configuration parameters (e.g. temperature,
15
+ top_p, penalties, truncation, etc.).
16
+
17
+ Not all models/providers support all of these parameters, so please check the API documentation
18
+ for the specific model and provider you are using.
19
+ """
20
+
21
+ temperature: float = Field(default=1.0, ge=0.0, le=2.0)
22
+ top_p: float = Field(default=1.0, ge=0.0, le=1.0)
23
+ frequency_penalty: float = Field(default=0.0, ge=-2.0, le=2.0)
24
+ presence_penalty: float = Field(default=0.0, ge=-2.0, le=2.0)
25
+ max_tokens: Optional[int] = None
26
+
27
+
28
+ class ResponseFormat(BaseModel):
29
+ """Specifies a JSON schema for the response format.
30
+
31
+ When provided, the LLM will be constrained to return a JSON response
32
+ that matches the specified schema.
33
+
34
+ Can be instantiated with:
35
+ - A JSON schema dictionary
36
+ - A Pydantic model class
37
+ - An existing ResponseFormat instance
38
+ """
39
+
40
+ json_schema: Dict[str, Any]
41
+
42
+ @classmethod
43
+ def from_value(cls, value: Any) -> "ResponseFormat":
44
+ if isinstance(value, cls):
45
+ return value
46
+ if isinstance(value, dict):
47
+ return cls(json_schema=value)
48
+ if isinstance(value, type) and issubclass(value, pydantic.BaseModel):
49
+ return cls(json_schema=value.model_json_schema())
50
+ raise ValueError(
51
+ "ResponseFormat configuration is invalid. Expected ResponseFormat, a valid schema or a Pydantic BaseModel."
52
+ )
53
+
54
+
10
55
  class RunRequest(BaseModel):
11
56
  """Request model for creating a run."""
12
57
 
13
58
  agent_id: str = Field(default="default_agent")
59
+ model_settings: ModelSettings = ModelSettings()
60
+ response_format: Optional[Union[ResponseFormat, Dict[str, Any], type]] = None
14
61
 
15
62
 
16
63
  class RunResponse(BaseModel):
@@ -115,20 +162,3 @@ class RunStep(BaseModel):
115
162
  completed_at: Optional[datetime.datetime] = None
116
163
  meta_data: dict[str, Any] = Field(default_factory=dict)
117
164
  usage: Optional[RunStepUsage] = None
118
-
119
-
120
- class ModelSettings(BaseModel):
121
- """Settings to use when calling an LLM.
122
-
123
- This class holds optional model configuration parameters (e.g. temperature,
124
- top_p, penalties, truncation, etc.).
125
-
126
- Not all models/providers support all of these parameters, so please check the API documentation
127
- for the specific model and provider you are using.
128
- """
129
-
130
- temperature: float = Field(default=1.0, ge=0.0, le=2.0)
131
- top_p: float = Field(default=1.0, ge=0.0, le=1.0)
132
- frequency_penalty: float = Field(default=0.0, ge=-2.0, le=2.0)
133
- presence_penalty: float = Field(default=0.0, ge=-2.0, le=2.0)
134
- max_tokens: Optional[int] = None
@@ -5,3 +5,4 @@ from seekrai.types.agents.tools.env_model_config import EnvConfig
5
5
 
6
6
  class RunPythonEnv(EnvConfig):
7
7
  run_python_tool_desc: Optional[str] = None
8
+ function_ids: Optional[list[str]] = None
@@ -11,6 +11,7 @@ from seekrai.types.common import (
11
11
 
12
12
 
13
13
  class FilePurpose(str, Enum):
14
+ ReinforcementFineTune = "reinforcement-fine-tune"
14
15
  FineTune = "fine-tune"
15
16
  PreTrain = "pre-train"
16
17
  Alignment = "alignment"
File without changes
File without changes
File without changes
File without changes