seekrai 0.4.4__py3-none-any.whl → 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- seekrai/__init__.py +0 -1
- seekrai/abstract/api_requestor.py +108 -251
- seekrai/abstract/response_parsing.py +99 -0
- seekrai/client.py +6 -2
- seekrai/filemanager.py +92 -3
- seekrai/resources/__init__.py +6 -1
- seekrai/resources/agents/__init__.py +11 -6
- seekrai/resources/agents/agent_inference.py +236 -29
- seekrai/resources/agents/agents.py +272 -0
- seekrai/resources/agents/threads.py +454 -0
- seekrai/resources/alignment.py +3 -9
- seekrai/resources/completions.py +3 -9
- seekrai/resources/deployments.py +4 -9
- seekrai/resources/embeddings.py +3 -9
- seekrai/resources/files.py +118 -53
- seekrai/resources/finetune.py +3 -9
- seekrai/resources/images.py +3 -5
- seekrai/resources/ingestion.py +3 -9
- seekrai/resources/models.py +35 -124
- seekrai/resources/projects.py +4 -9
- seekrai/resources/resource_base.py +10 -0
- seekrai/resources/vectordb.py +482 -0
- seekrai/types/__init__.py +87 -0
- seekrai/types/agents/__init__.py +89 -0
- seekrai/types/agents/agent.py +42 -0
- seekrai/types/agents/runs.py +117 -0
- seekrai/types/agents/threads.py +265 -0
- seekrai/types/agents/tools/__init__.py +16 -0
- seekrai/types/agents/tools/env_model_config.py +7 -0
- seekrai/types/agents/tools/schemas/__init__.py +8 -0
- seekrai/types/agents/tools/schemas/file_search.py +9 -0
- seekrai/types/agents/tools/schemas/file_search_env.py +11 -0
- seekrai/types/agents/tools/tool.py +14 -0
- seekrai/types/agents/tools/tool_env_types.py +4 -0
- seekrai/types/agents/tools/tool_types.py +10 -0
- seekrai/types/alignment.py +6 -2
- seekrai/types/files.py +3 -0
- seekrai/types/finetune.py +1 -0
- seekrai/types/models.py +3 -0
- seekrai/types/vectordb.py +78 -0
- {seekrai-0.4.4.dist-info → seekrai-0.5.0.dist-info}/METADATA +3 -3
- seekrai-0.5.0.dist-info/RECORD +67 -0
- {seekrai-0.4.4.dist-info → seekrai-0.5.0.dist-info}/WHEEL +1 -1
- seekrai-0.4.4.dist-info/RECORD +0 -49
- {seekrai-0.4.4.dist-info → seekrai-0.5.0.dist-info}/LICENSE +0 -0
- {seekrai-0.4.4.dist-info → seekrai-0.5.0.dist-info}/entry_points.txt +0 -0
seekrai/client.py
CHANGED
|
@@ -22,6 +22,7 @@ class SeekrFlow:
|
|
|
22
22
|
ingestion: resources.Ingestion
|
|
23
23
|
projects: resources.Projects
|
|
24
24
|
deployments: resources.Deployments
|
|
25
|
+
vector_database: resources.VectorDatabase
|
|
25
26
|
agents: resources.Agents
|
|
26
27
|
|
|
27
28
|
# client options
|
|
@@ -86,6 +87,7 @@ class SeekrFlow:
|
|
|
86
87
|
self.ingestion = resources.Ingestion(self.client)
|
|
87
88
|
self.projects = resources.Projects(self.client)
|
|
88
89
|
self.deployments = resources.Deployments(self.client)
|
|
90
|
+
self.vector_database = resources.VectorDatabase(self.client)
|
|
89
91
|
self.agents = resources.Agents(self.client)
|
|
90
92
|
|
|
91
93
|
|
|
@@ -101,7 +103,8 @@ class AsyncSeekrFlow:
|
|
|
101
103
|
ingestion: resources.AsyncIngestion
|
|
102
104
|
projects: resources.AsyncProjects
|
|
103
105
|
deployments: resources.AsyncDeployments
|
|
104
|
-
|
|
106
|
+
vector_database: resources.AsyncVectorDatabase
|
|
107
|
+
agents: resources.AsyncAgents
|
|
105
108
|
|
|
106
109
|
# client options
|
|
107
110
|
client: SeekrFlowClient
|
|
@@ -165,7 +168,8 @@ class AsyncSeekrFlow:
|
|
|
165
168
|
self.ingestion = resources.AsyncIngestion(self.client)
|
|
166
169
|
self.projects = resources.AsyncProjects(self.client)
|
|
167
170
|
self.deployments = resources.AsyncDeployments(self.client)
|
|
168
|
-
self.
|
|
171
|
+
self.vector_database = resources.AsyncVectorDatabase(self.client)
|
|
172
|
+
self.agents = resources.AsyncAgents(self.client)
|
|
169
173
|
|
|
170
174
|
|
|
171
175
|
Client = SeekrFlow
|
seekrai/filemanager.py
CHANGED
|
@@ -91,9 +91,9 @@ def _prepare_output(
|
|
|
91
91
|
|
|
92
92
|
content_type = str(headers.get("content-type"))
|
|
93
93
|
|
|
94
|
-
assert (
|
|
95
|
-
|
|
96
|
-
)
|
|
94
|
+
assert remote_name, (
|
|
95
|
+
"No model name found in fine_tune object. Please specify an `output` file name."
|
|
96
|
+
)
|
|
97
97
|
|
|
98
98
|
if step > 0:
|
|
99
99
|
remote_name += f"-checkpoint-{step}"
|
|
@@ -430,3 +430,92 @@ class UploadManager:
|
|
|
430
430
|
|
|
431
431
|
for progress_bar in progress_bars:
|
|
432
432
|
progress_bar.close()
|
|
433
|
+
|
|
434
|
+
async def bulk_upload_async(
|
|
435
|
+
self,
|
|
436
|
+
url: str,
|
|
437
|
+
files: list[Path],
|
|
438
|
+
*,
|
|
439
|
+
purpose: FilePurpose,
|
|
440
|
+
redirect: bool = False,
|
|
441
|
+
) -> list[FileResponse]:
|
|
442
|
+
"""
|
|
443
|
+
Upload multiple files in a bulk request.
|
|
444
|
+
|
|
445
|
+
Args:
|
|
446
|
+
url: API endpoint to upload to
|
|
447
|
+
files: List of file paths to upload
|
|
448
|
+
purpose: The purpose of the files
|
|
449
|
+
redirect: Whether to redirect after upload
|
|
450
|
+
|
|
451
|
+
Returns:
|
|
452
|
+
List of FileResponse objects for each uploaded file
|
|
453
|
+
"""
|
|
454
|
+
requestor = api_requestor.APIRequestor(
|
|
455
|
+
client=self._client,
|
|
456
|
+
)
|
|
457
|
+
|
|
458
|
+
# Prepare files for multipart form upload
|
|
459
|
+
# Format needs to be a list of tuples: [('files', (filename, file_stream)), ...]
|
|
460
|
+
files_tuples = []
|
|
461
|
+
file_streams = []
|
|
462
|
+
progress_bars = []
|
|
463
|
+
|
|
464
|
+
try:
|
|
465
|
+
for file_path in files:
|
|
466
|
+
file_size = os.stat(file_path.as_posix()).st_size
|
|
467
|
+
|
|
468
|
+
# Create progress bar
|
|
469
|
+
progress_bar = tqdm(
|
|
470
|
+
total=file_size,
|
|
471
|
+
unit="B",
|
|
472
|
+
unit_scale=True,
|
|
473
|
+
desc=f"Uploading file {file_path.name}",
|
|
474
|
+
disable=bool(DISABLE_TQDM),
|
|
475
|
+
)
|
|
476
|
+
progress_bars.append(progress_bar)
|
|
477
|
+
|
|
478
|
+
# Open file and track for cleanup
|
|
479
|
+
file_stream = file_path.open("rb")
|
|
480
|
+
file_streams.append(file_stream)
|
|
481
|
+
|
|
482
|
+
# Create wrapper for progress tracking
|
|
483
|
+
reader_wrapper = CallbackIOWrapper(
|
|
484
|
+
progress_bar.update, file_stream, "read"
|
|
485
|
+
)
|
|
486
|
+
|
|
487
|
+
# Add to files list as a tuple
|
|
488
|
+
files_tuples.append(("files", (file_path.name, reader_wrapper)))
|
|
489
|
+
|
|
490
|
+
# Make the request
|
|
491
|
+
response, _, _ = await requestor.arequest(
|
|
492
|
+
options=SeekrFlowRequest(
|
|
493
|
+
method="PUT",
|
|
494
|
+
url=url,
|
|
495
|
+
files=files_tuples, # Pass as a list of tuples (field_name, (filename, file_stream))
|
|
496
|
+
params={"purpose": purpose.value},
|
|
497
|
+
allow_redirects=redirect,
|
|
498
|
+
),
|
|
499
|
+
stream=False,
|
|
500
|
+
)
|
|
501
|
+
|
|
502
|
+
assert isinstance(response, SeekrFlowResponse)
|
|
503
|
+
|
|
504
|
+
# Parse the response
|
|
505
|
+
if isinstance(response.data, list):
|
|
506
|
+
file_responses = [
|
|
507
|
+
FileResponse(**file_data) for file_data in response.data
|
|
508
|
+
]
|
|
509
|
+
else:
|
|
510
|
+
# Handle case where response might be a single object
|
|
511
|
+
file_responses = [FileResponse(**response.data)]
|
|
512
|
+
|
|
513
|
+
return file_responses
|
|
514
|
+
|
|
515
|
+
finally:
|
|
516
|
+
# Clean up resources
|
|
517
|
+
for file_stream in file_streams:
|
|
518
|
+
file_stream.close()
|
|
519
|
+
|
|
520
|
+
for progress_bar in progress_bars:
|
|
521
|
+
progress_bar.close()
|
seekrai/resources/__init__.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from seekrai.resources.agents import Agents
|
|
1
|
+
from seekrai.resources.agents import AgentInference, Agents, AsyncAgents
|
|
2
2
|
from seekrai.resources.alignment import Alignment, AsyncAlignment
|
|
3
3
|
from seekrai.resources.chat import AsyncChat, Chat
|
|
4
4
|
from seekrai.resources.completions import AsyncCompletions, Completions
|
|
@@ -10,6 +10,7 @@ from seekrai.resources.images import AsyncImages, Images
|
|
|
10
10
|
from seekrai.resources.ingestion import AsyncIngestion, Ingestion
|
|
11
11
|
from seekrai.resources.models import AsyncModels, Models
|
|
12
12
|
from seekrai.resources.projects import AsyncProjects, Projects
|
|
13
|
+
from seekrai.resources.vectordb import AsyncVectorDatabase, VectorDatabase
|
|
13
14
|
|
|
14
15
|
|
|
15
16
|
__all__ = [
|
|
@@ -35,5 +36,9 @@ __all__ = [
|
|
|
35
36
|
"Projects",
|
|
36
37
|
"AsyncDeployments",
|
|
37
38
|
"Deployments",
|
|
39
|
+
"AsyncAgents",
|
|
38
40
|
"Agents",
|
|
41
|
+
"VectorDatabase",
|
|
42
|
+
"AsyncVectorDatabase",
|
|
43
|
+
"AgentInference",
|
|
39
44
|
]
|
|
@@ -1,8 +1,13 @@
|
|
|
1
|
-
from
|
|
1
|
+
from seekrai.resources.agents.agent_inference import AgentInference, AsyncAgentInference
|
|
2
|
+
from seekrai.resources.agents.agents import Agents, AsyncAgents
|
|
3
|
+
from seekrai.resources.agents.threads import AgentThreads, AsyncAgentThreads
|
|
2
4
|
|
|
3
|
-
from .agent_inference import AgentInference
|
|
4
5
|
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
6
|
+
__all__ = [
|
|
7
|
+
"Agents",
|
|
8
|
+
"AgentInference",
|
|
9
|
+
"AsyncAgentInference",
|
|
10
|
+
"AsyncAgents",
|
|
11
|
+
"AgentThreads",
|
|
12
|
+
"AsyncAgentThreads",
|
|
13
|
+
]
|
|
@@ -1,70 +1,277 @@
|
|
|
1
|
-
from typing import Any,
|
|
1
|
+
from typing import Any, AsyncGenerator, Iterator, Union
|
|
2
2
|
|
|
3
3
|
from seekrai.abstract import api_requestor
|
|
4
4
|
from seekrai.seekrflow_response import SeekrFlowResponse
|
|
5
|
-
from seekrai.types import SeekrFlowRequest
|
|
5
|
+
from seekrai.types import Run, RunRequest, RunResponse, SeekrFlowRequest
|
|
6
6
|
|
|
7
7
|
|
|
8
8
|
class AgentInference:
|
|
9
9
|
def __init__(self, client: Any) -> None:
|
|
10
10
|
self._client = client
|
|
11
|
+
self._requestor = api_requestor.APIRequestor(client=self._client)
|
|
11
12
|
|
|
12
13
|
def run(
|
|
13
14
|
self,
|
|
14
|
-
agent_id:
|
|
15
|
-
|
|
15
|
+
agent_id: str,
|
|
16
|
+
thread_id: str,
|
|
16
17
|
*,
|
|
17
18
|
stream: bool = False,
|
|
18
|
-
thread_id: Union[str, None] = None,
|
|
19
|
-
headers: Union[Dict[str, str], None] = None,
|
|
20
19
|
**model_settings: Any,
|
|
21
|
-
) -> Union[
|
|
20
|
+
) -> Union[RunResponse, Iterator[Any]]:
|
|
22
21
|
"""
|
|
23
22
|
Run an inference call on a deployed agent.
|
|
24
23
|
|
|
25
24
|
Args:
|
|
26
|
-
agent_id (
|
|
27
|
-
|
|
25
|
+
agent_id (str): The unique identifier of the deployed agent.
|
|
26
|
+
thread_id (str): A thread identifier.
|
|
28
27
|
stream (bool, optional): Whether to stream the response. Defaults to False.
|
|
29
|
-
thread_id (str, optional): An optional thread identifier.
|
|
30
|
-
headers (dict, optional): Optional HTTP headers to include in the request.
|
|
31
|
-
If provided, these will be merged with default headers.
|
|
32
28
|
**model_settings: Additional parameters (such as temperature, max_tokens, etc).
|
|
33
29
|
|
|
34
30
|
Returns:
|
|
35
31
|
A dictionary with the response (if non-streaming) or an iterator over response chunks.
|
|
36
32
|
"""
|
|
37
|
-
payload
|
|
38
|
-
if thread_id is not None:
|
|
39
|
-
payload["thread_id"] = thread_id
|
|
33
|
+
payload = RunRequest(agent_id=agent_id).model_dump()
|
|
40
34
|
payload.update(model_settings)
|
|
35
|
+
endpoint = f"threads/{thread_id}/runs"
|
|
36
|
+
if stream:
|
|
37
|
+
endpoint += "/stream"
|
|
38
|
+
|
|
39
|
+
response, _, _ = self._requestor.request(
|
|
40
|
+
options=SeekrFlowRequest(
|
|
41
|
+
method="POST",
|
|
42
|
+
url=endpoint,
|
|
43
|
+
params=payload,
|
|
44
|
+
),
|
|
45
|
+
stream=stream,
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
if stream:
|
|
49
|
+
assert not isinstance(response, SeekrFlowResponse)
|
|
50
|
+
return (chunk.data for chunk in response)
|
|
51
|
+
else:
|
|
52
|
+
assert isinstance(response, SeekrFlowResponse)
|
|
53
|
+
return RunResponse(**response.data)
|
|
54
|
+
|
|
55
|
+
def cancel(self, agent_id: str, run_id: str, thread_id: str) -> dict[str, Any]:
|
|
56
|
+
"""Cancels a Run that is in progress.
|
|
57
|
+
|
|
58
|
+
Args:
|
|
59
|
+
agent_id: Identifier for the agent performing the run.
|
|
60
|
+
run_id: Identifier for the run to be cancelled.
|
|
61
|
+
thread_id: Identifier for the thread used by the run.
|
|
62
|
+
|
|
63
|
+
Returns:
|
|
64
|
+
{'status': 'canceled', 'run_id': run_id} on success.
|
|
65
|
+
"""
|
|
66
|
+
response, _, _ = self._requestor.request(
|
|
67
|
+
options=SeekrFlowRequest(
|
|
68
|
+
method="POST",
|
|
69
|
+
url=f"threads/{thread_id}/runs/{run_id}/cancel",
|
|
70
|
+
params=RunRequest(agent_id=agent_id).model_dump(),
|
|
71
|
+
)
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
assert isinstance(response, SeekrFlowResponse)
|
|
75
|
+
return response.data
|
|
76
|
+
|
|
77
|
+
def attach(self, run_id: str, thread_id: str) -> Iterator[Any]:
|
|
78
|
+
"""Returns a stream of output from a Run.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
run_id: Identifier for the Run.
|
|
82
|
+
thread_id: Identifier for the Thread used by the Run.
|
|
83
|
+
|
|
84
|
+
Returns:
|
|
85
|
+
An Iterator of streamed output from the Run.
|
|
86
|
+
"""
|
|
87
|
+
response, _, _ = self._requestor.request(
|
|
88
|
+
options=SeekrFlowRequest(
|
|
89
|
+
method="GET",
|
|
90
|
+
url=f"threads/{thread_id}/runs/{run_id}/attach",
|
|
91
|
+
),
|
|
92
|
+
stream=True,
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
assert not isinstance(response, SeekrFlowResponse)
|
|
96
|
+
return (chunk.data for chunk in response)
|
|
97
|
+
|
|
98
|
+
def retrieve(self, run_id: str, thread_id: str) -> Run:
|
|
99
|
+
"""Retrieves a Run.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
run_id: Identifier for the Run.
|
|
103
|
+
thread_id: Identifier for the Thread used by the Run.
|
|
104
|
+
|
|
105
|
+
Returns:
|
|
106
|
+
The Run whose id matches run_id.
|
|
107
|
+
"""
|
|
108
|
+
response, _, _ = self._requestor.request(
|
|
109
|
+
options=SeekrFlowRequest(
|
|
110
|
+
method="GET",
|
|
111
|
+
url=f"threads/{thread_id}/runs/{run_id}",
|
|
112
|
+
)
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
assert isinstance(response, SeekrFlowResponse)
|
|
116
|
+
return Run(**response.data)
|
|
117
|
+
|
|
118
|
+
def list(self, thread_id: str) -> list[Run]:
|
|
119
|
+
"""Retrieves a list of Runs relevant to a referenced Thread.
|
|
120
|
+
|
|
121
|
+
Args:
|
|
122
|
+
thread_id: Identifier for a Thread.
|
|
123
|
+
|
|
124
|
+
Returns:
|
|
125
|
+
A list of Runs that have leveraged the referenced Thread.
|
|
126
|
+
"""
|
|
127
|
+
response, _, _ = self._requestor.request(
|
|
128
|
+
options=SeekrFlowRequest(
|
|
129
|
+
method="GET",
|
|
130
|
+
url=f"threads/{thread_id}/runs",
|
|
131
|
+
)
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
assert isinstance(response, SeekrFlowResponse)
|
|
135
|
+
return [Run(**run) for run in response.data] # type: ignore
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
class AsyncAgentInference:
|
|
139
|
+
def __init__(self, client: Any) -> None:
|
|
140
|
+
self._client = client
|
|
141
|
+
self._requestor = api_requestor.APIRequestor(client=self._client)
|
|
41
142
|
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
143
|
+
async def run(
|
|
144
|
+
self,
|
|
145
|
+
agent_id: str,
|
|
146
|
+
thread_id: str,
|
|
147
|
+
*,
|
|
148
|
+
stream: bool = False,
|
|
149
|
+
**model_settings: Any,
|
|
150
|
+
) -> Union[RunResponse, AsyncGenerator[Any, None]]:
|
|
151
|
+
"""
|
|
152
|
+
Run an inference call on a deployed agent.
|
|
47
153
|
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
154
|
+
Args:
|
|
155
|
+
agent_id (str): The unique identifier of the deployed agent.
|
|
156
|
+
thread_id (str): A thread identifier.
|
|
157
|
+
stream (bool, optional): Whether to stream the response. Defaults to False.
|
|
158
|
+
**model_settings: Additional parameters (such as temperature, max_tokens, etc).
|
|
51
159
|
|
|
52
|
-
|
|
53
|
-
|
|
160
|
+
Returns:
|
|
161
|
+
A dictionary with the response (if non-streaming) or an iterator over response chunks.
|
|
162
|
+
"""
|
|
163
|
+
payload = RunRequest(agent_id=agent_id).model_dump()
|
|
164
|
+
payload.update(model_settings)
|
|
165
|
+
endpoint = f"threads/{thread_id}/runs"
|
|
166
|
+
if stream:
|
|
167
|
+
endpoint += "/stream"
|
|
54
168
|
|
|
55
|
-
response, _, _ =
|
|
169
|
+
response, _, _ = await self._requestor.arequest(
|
|
56
170
|
options=SeekrFlowRequest(
|
|
57
171
|
method="POST",
|
|
58
172
|
url=endpoint,
|
|
59
173
|
params=payload,
|
|
60
|
-
headers=request_headers,
|
|
61
174
|
),
|
|
62
175
|
stream=stream,
|
|
63
176
|
)
|
|
64
177
|
|
|
65
178
|
if stream:
|
|
66
179
|
assert not isinstance(response, SeekrFlowResponse)
|
|
67
|
-
|
|
180
|
+
|
|
181
|
+
async def output() -> AsyncGenerator[Any, None]:
|
|
182
|
+
async for chunk in response:
|
|
183
|
+
yield chunk.data
|
|
184
|
+
|
|
185
|
+
return output()
|
|
68
186
|
else:
|
|
69
187
|
assert isinstance(response, SeekrFlowResponse)
|
|
70
|
-
return response.data
|
|
188
|
+
return RunResponse(**response.data)
|
|
189
|
+
|
|
190
|
+
async def cancel(
|
|
191
|
+
self, agent_id: str, run_id: str, thread_id: str
|
|
192
|
+
) -> dict[str, Any]:
|
|
193
|
+
"""Cancels a Run that is in progress.
|
|
194
|
+
|
|
195
|
+
Args:
|
|
196
|
+
agent_id: Identifier for the agent performing the run.
|
|
197
|
+
run_id: Identifier for the run to be cancelled.
|
|
198
|
+
thread_id: Identifier for the thread used by the run.
|
|
199
|
+
|
|
200
|
+
Returns:
|
|
201
|
+
{'status': 'canceled', 'run_id': run_id} on success.
|
|
202
|
+
"""
|
|
203
|
+
response, _, _ = await self._requestor.arequest(
|
|
204
|
+
options=SeekrFlowRequest(
|
|
205
|
+
method="POST",
|
|
206
|
+
url=f"threads/{thread_id}/runs/{run_id}/cancel",
|
|
207
|
+
params=RunRequest(agent_id=agent_id).model_dump(),
|
|
208
|
+
)
|
|
209
|
+
)
|
|
210
|
+
|
|
211
|
+
assert isinstance(response, SeekrFlowResponse)
|
|
212
|
+
return response.data
|
|
213
|
+
|
|
214
|
+
async def attach(self, run_id: str, thread_id: str) -> AsyncGenerator[Any, None]:
|
|
215
|
+
"""Returns a stream of output from a Run.
|
|
216
|
+
|
|
217
|
+
Args:
|
|
218
|
+
run_id: Identifier for the Run.
|
|
219
|
+
thread_id: Identifier for the Thread used by the Run.
|
|
220
|
+
|
|
221
|
+
Returns:
|
|
222
|
+
An Iterator of streamed output from the Run.
|
|
223
|
+
"""
|
|
224
|
+
response, _, _ = await self._requestor.arequest(
|
|
225
|
+
options=SeekrFlowRequest(
|
|
226
|
+
method="GET",
|
|
227
|
+
url=f"threads/{thread_id}/runs/{run_id}/attach",
|
|
228
|
+
),
|
|
229
|
+
stream=True,
|
|
230
|
+
)
|
|
231
|
+
|
|
232
|
+
assert not isinstance(response, SeekrFlowResponse)
|
|
233
|
+
|
|
234
|
+
async def output() -> AsyncGenerator[Any, None]:
|
|
235
|
+
async for chunk in response:
|
|
236
|
+
yield chunk.data
|
|
237
|
+
|
|
238
|
+
return output()
|
|
239
|
+
|
|
240
|
+
async def retrieve(self, run_id: str, thread_id: str) -> Run:
|
|
241
|
+
"""Retrieves a Run.
|
|
242
|
+
|
|
243
|
+
Args:
|
|
244
|
+
run_id: Identifier for the Run.
|
|
245
|
+
thread_id: Identifier for the Thread used by the Run.
|
|
246
|
+
|
|
247
|
+
Returns:
|
|
248
|
+
The Run whose id matches run_id.
|
|
249
|
+
"""
|
|
250
|
+
response, _, _ = await self._requestor.arequest(
|
|
251
|
+
options=SeekrFlowRequest(
|
|
252
|
+
method="GET",
|
|
253
|
+
url=f"threads/{thread_id}/runs/{run_id}",
|
|
254
|
+
)
|
|
255
|
+
)
|
|
256
|
+
|
|
257
|
+
assert isinstance(response, SeekrFlowResponse)
|
|
258
|
+
return Run(**response.data)
|
|
259
|
+
|
|
260
|
+
async def list(self, thread_id: str) -> list[Run]:
|
|
261
|
+
"""Retrieves a list of Runs relevant to a referenced Thread.
|
|
262
|
+
|
|
263
|
+
Args:
|
|
264
|
+
thread_id: Identifier for a Thread.
|
|
265
|
+
|
|
266
|
+
Returns:
|
|
267
|
+
A list of Runs that have leveraged the referenced Thread.
|
|
268
|
+
"""
|
|
269
|
+
response, _, _ = await self._requestor.arequest(
|
|
270
|
+
options=SeekrFlowRequest(
|
|
271
|
+
method="GET",
|
|
272
|
+
url=f"threads/{thread_id}/runs",
|
|
273
|
+
)
|
|
274
|
+
)
|
|
275
|
+
|
|
276
|
+
assert isinstance(response, SeekrFlowResponse)
|
|
277
|
+
return [Run(**run) for run in response.data] # type: ignore
|