agno 1.7.4__py3-none-any.whl → 1.7.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +28 -15
- agno/app/agui/async_router.py +5 -5
- agno/app/agui/sync_router.py +5 -5
- agno/app/agui/utils.py +84 -14
- agno/app/fastapi/app.py +1 -1
- agno/app/fastapi/async_router.py +67 -16
- agno/app/fastapi/sync_router.py +80 -14
- agno/document/chunking/row.py +39 -0
- agno/document/reader/base.py +0 -7
- agno/embedder/jina.py +73 -0
- agno/knowledge/agent.py +39 -2
- agno/knowledge/combined.py +1 -1
- agno/memory/agent.py +2 -2
- agno/memory/team.py +2 -2
- agno/models/aws/bedrock.py +311 -15
- agno/models/litellm/chat.py +12 -3
- agno/models/openai/chat.py +1 -22
- agno/models/openai/responses.py +5 -5
- agno/models/portkey/__init__.py +3 -0
- agno/models/portkey/portkey.py +88 -0
- agno/models/xai/xai.py +54 -0
- agno/run/v2/workflow.py +4 -0
- agno/storage/mysql.py +1 -0
- agno/storage/postgres.py +1 -0
- agno/storage/session/v2/workflow.py +29 -5
- agno/storage/singlestore.py +4 -1
- agno/storage/sqlite.py +0 -1
- agno/team/team.py +52 -22
- agno/tools/bitbucket.py +292 -0
- agno/tools/daytona.py +411 -63
- agno/tools/decorator.py +45 -2
- agno/tools/evm.py +123 -0
- agno/tools/function.py +16 -12
- agno/tools/linkup.py +54 -0
- agno/tools/mcp.py +10 -3
- agno/tools/mem0.py +15 -2
- agno/tools/postgres.py +175 -162
- agno/utils/log.py +16 -0
- agno/utils/pprint.py +2 -0
- agno/utils/string.py +14 -0
- agno/vectordb/pgvector/pgvector.py +4 -5
- agno/vectordb/surrealdb/__init__.py +3 -0
- agno/vectordb/surrealdb/surrealdb.py +493 -0
- agno/workflow/v2/workflow.py +144 -19
- agno/workflow/workflow.py +90 -63
- {agno-1.7.4.dist-info → agno-1.7.6.dist-info}/METADATA +19 -1
- {agno-1.7.4.dist-info → agno-1.7.6.dist-info}/RECORD +51 -42
- {agno-1.7.4.dist-info → agno-1.7.6.dist-info}/WHEEL +0 -0
- {agno-1.7.4.dist-info → agno-1.7.6.dist-info}/entry_points.txt +0 -0
- {agno-1.7.4.dist-info → agno-1.7.6.dist-info}/licenses/LICENSE +0 -0
- {agno-1.7.4.dist-info → agno-1.7.6.dist-info}/top_level.txt +0 -0
agno/workflow/v2/workflow.py
CHANGED
|
@@ -1,7 +1,20 @@
|
|
|
1
|
+
import asyncio
|
|
1
2
|
from dataclasses import dataclass
|
|
2
3
|
from datetime import datetime
|
|
3
4
|
from os import getenv
|
|
4
|
-
from typing import
|
|
5
|
+
from typing import (
|
|
6
|
+
Any,
|
|
7
|
+
AsyncIterator,
|
|
8
|
+
Awaitable,
|
|
9
|
+
Callable,
|
|
10
|
+
Dict,
|
|
11
|
+
Iterator,
|
|
12
|
+
List,
|
|
13
|
+
Literal,
|
|
14
|
+
Optional,
|
|
15
|
+
Union,
|
|
16
|
+
overload,
|
|
17
|
+
)
|
|
5
18
|
from uuid import uuid4
|
|
6
19
|
|
|
7
20
|
from pydantic import BaseModel
|
|
@@ -47,7 +60,13 @@ from agno.workflow.v2.parallel import Parallel
|
|
|
47
60
|
from agno.workflow.v2.router import Router
|
|
48
61
|
from agno.workflow.v2.step import Step
|
|
49
62
|
from agno.workflow.v2.steps import Steps
|
|
50
|
-
from agno.workflow.v2.types import
|
|
63
|
+
from agno.workflow.v2.types import (
|
|
64
|
+
StepInput,
|
|
65
|
+
StepMetrics,
|
|
66
|
+
StepOutput,
|
|
67
|
+
WorkflowExecutionInput,
|
|
68
|
+
WorkflowMetrics,
|
|
69
|
+
)
|
|
51
70
|
|
|
52
71
|
WorkflowSteps = Union[
|
|
53
72
|
Callable[
|
|
@@ -536,9 +555,7 @@ class Workflow:
|
|
|
536
555
|
workflow_run_response.content = f"Workflow execution failed: {e}"
|
|
537
556
|
|
|
538
557
|
finally:
|
|
539
|
-
|
|
540
|
-
self.workflow_session.add_run(workflow_run_response)
|
|
541
|
-
self.write_to_storage()
|
|
558
|
+
self._save_run_to_storage(workflow_run_response)
|
|
542
559
|
|
|
543
560
|
return workflow_run_response
|
|
544
561
|
|
|
@@ -722,11 +739,7 @@ class Workflow:
|
|
|
722
739
|
yield self._handle_event(workflow_completed_event, workflow_run_response)
|
|
723
740
|
|
|
724
741
|
# Store the completed workflow response
|
|
725
|
-
|
|
726
|
-
self.workflow_session.add_run(workflow_run_response)
|
|
727
|
-
|
|
728
|
-
# Save to storage after complete execution
|
|
729
|
-
self.write_to_storage()
|
|
742
|
+
self._save_run_to_storage(workflow_run_response)
|
|
730
743
|
|
|
731
744
|
async def _acall_custom_function(
|
|
732
745
|
self, func: Callable, workflow: "Workflow", execution_input: WorkflowExecutionInput, **kwargs: Any
|
|
@@ -808,7 +821,7 @@ class Workflow:
|
|
|
808
821
|
content += str(chunk)
|
|
809
822
|
workflow_run_response.content = content
|
|
810
823
|
else:
|
|
811
|
-
workflow_run_response.content = self.
|
|
824
|
+
workflow_run_response.content = self._call_custom_function(self.steps, self, execution_input, **kwargs)
|
|
812
825
|
workflow_run_response.status = RunStatus.completed
|
|
813
826
|
|
|
814
827
|
else:
|
|
@@ -900,9 +913,7 @@ class Workflow:
|
|
|
900
913
|
workflow_run_response.content = f"Workflow execution failed: {e}"
|
|
901
914
|
|
|
902
915
|
# Store error response
|
|
903
|
-
|
|
904
|
-
self.workflow_session.add_run(workflow_run_response)
|
|
905
|
-
self.write_to_storage()
|
|
916
|
+
self._save_run_to_storage(workflow_run_response)
|
|
906
917
|
|
|
907
918
|
return workflow_run_response
|
|
908
919
|
|
|
@@ -1095,11 +1106,7 @@ class Workflow:
|
|
|
1095
1106
|
yield self._handle_event(workflow_completed_event, workflow_run_response)
|
|
1096
1107
|
|
|
1097
1108
|
# Store the completed workflow response
|
|
1098
|
-
|
|
1099
|
-
self.workflow_session.add_run(workflow_run_response)
|
|
1100
|
-
|
|
1101
|
-
# Save to storage after complete execution
|
|
1102
|
-
self.write_to_storage()
|
|
1109
|
+
self._save_run_to_storage(workflow_run_response)
|
|
1103
1110
|
|
|
1104
1111
|
def _update_workflow_session_state(self):
|
|
1105
1112
|
if not self.workflow_session_state:
|
|
@@ -1118,6 +1125,96 @@ class Workflow:
|
|
|
1118
1125
|
|
|
1119
1126
|
return self.workflow_session_state
|
|
1120
1127
|
|
|
1128
|
+
async def _arun_background(
|
|
1129
|
+
self,
|
|
1130
|
+
message: Optional[Union[str, Dict[str, Any], List[Any], BaseModel]] = None,
|
|
1131
|
+
additional_data: Optional[Dict[str, Any]] = None,
|
|
1132
|
+
user_id: Optional[str] = None,
|
|
1133
|
+
session_id: Optional[str] = None,
|
|
1134
|
+
audio: Optional[List[Audio]] = None,
|
|
1135
|
+
images: Optional[List[Image]] = None,
|
|
1136
|
+
videos: Optional[List[Video]] = None,
|
|
1137
|
+
**kwargs: Any,
|
|
1138
|
+
) -> WorkflowRunResponse:
|
|
1139
|
+
"""Execute workflow in background using asyncio.create_task()"""
|
|
1140
|
+
|
|
1141
|
+
if user_id is not None:
|
|
1142
|
+
self.user_id = user_id
|
|
1143
|
+
if session_id is not None:
|
|
1144
|
+
self.session_id = session_id
|
|
1145
|
+
|
|
1146
|
+
if self.session_id is None:
|
|
1147
|
+
self.session_id = str(uuid4())
|
|
1148
|
+
|
|
1149
|
+
if self.run_id is None:
|
|
1150
|
+
self.run_id = str(uuid4())
|
|
1151
|
+
|
|
1152
|
+
self.initialize_workflow()
|
|
1153
|
+
self.load_session()
|
|
1154
|
+
self._prepare_steps()
|
|
1155
|
+
|
|
1156
|
+
# Create workflow run response with PENDING status
|
|
1157
|
+
workflow_run_response = WorkflowRunResponse(
|
|
1158
|
+
run_id=self.run_id,
|
|
1159
|
+
session_id=self.session_id,
|
|
1160
|
+
workflow_id=self.workflow_id,
|
|
1161
|
+
workflow_name=self.name,
|
|
1162
|
+
created_at=int(datetime.now().timestamp()),
|
|
1163
|
+
status=RunStatus.pending,
|
|
1164
|
+
)
|
|
1165
|
+
|
|
1166
|
+
# Store PENDING response immediately
|
|
1167
|
+
self._save_run_to_storage(workflow_run_response)
|
|
1168
|
+
|
|
1169
|
+
# Prepare execution input
|
|
1170
|
+
inputs = WorkflowExecutionInput(
|
|
1171
|
+
message=message,
|
|
1172
|
+
additional_data=additional_data,
|
|
1173
|
+
audio=audio, # type: ignore
|
|
1174
|
+
images=images, # type: ignore
|
|
1175
|
+
videos=videos, # type: ignore
|
|
1176
|
+
)
|
|
1177
|
+
|
|
1178
|
+
self.update_agents_and_teams_session_info()
|
|
1179
|
+
|
|
1180
|
+
async def execute_workflow_background():
|
|
1181
|
+
"""Simple background execution"""
|
|
1182
|
+
try:
|
|
1183
|
+
# Update status to RUNNING and save
|
|
1184
|
+
workflow_run_response.status = RunStatus.running
|
|
1185
|
+
self._save_run_to_storage(workflow_run_response)
|
|
1186
|
+
|
|
1187
|
+
await self._aexecute(execution_input=inputs, workflow_run_response=workflow_run_response, **kwargs)
|
|
1188
|
+
|
|
1189
|
+
self._save_run_to_storage(workflow_run_response)
|
|
1190
|
+
|
|
1191
|
+
log_debug(f"Background execution completed with status: {workflow_run_response.status}")
|
|
1192
|
+
|
|
1193
|
+
except Exception as e:
|
|
1194
|
+
logger.error(f"Background workflow execution failed: {e}")
|
|
1195
|
+
workflow_run_response.status = RunStatus.error
|
|
1196
|
+
workflow_run_response.content = f"Background execution failed: {str(e)}"
|
|
1197
|
+
self._save_run_to_storage(workflow_run_response)
|
|
1198
|
+
|
|
1199
|
+
# Create and start asyncio task
|
|
1200
|
+
loop = asyncio.get_running_loop()
|
|
1201
|
+
loop.create_task(execute_workflow_background())
|
|
1202
|
+
|
|
1203
|
+
# Return SAME object that will be updated by background execution
|
|
1204
|
+
return workflow_run_response
|
|
1205
|
+
|
|
1206
|
+
def get_run(self, run_id: str) -> Optional[WorkflowRunResponse]:
|
|
1207
|
+
"""Get the status and details of a background workflow run - SIMPLIFIED"""
|
|
1208
|
+
if self.storage is not None and self.session_id is not None:
|
|
1209
|
+
session = self.storage.read(session_id=self.session_id)
|
|
1210
|
+
if session and isinstance(session, WorkflowSessionV2) and session.runs:
|
|
1211
|
+
# Find the run by ID
|
|
1212
|
+
for run in session.runs:
|
|
1213
|
+
if run.run_id == run_id:
|
|
1214
|
+
return run
|
|
1215
|
+
|
|
1216
|
+
return None
|
|
1217
|
+
|
|
1121
1218
|
@overload
|
|
1122
1219
|
def run(
|
|
1123
1220
|
self,
|
|
@@ -1130,6 +1227,7 @@ class Workflow:
|
|
|
1130
1227
|
videos: Optional[List[Video]] = None,
|
|
1131
1228
|
stream: Literal[False] = False,
|
|
1132
1229
|
stream_intermediate_steps: Optional[bool] = None,
|
|
1230
|
+
background: Optional[bool] = False,
|
|
1133
1231
|
) -> WorkflowRunResponse: ...
|
|
1134
1232
|
|
|
1135
1233
|
@overload
|
|
@@ -1144,6 +1242,7 @@ class Workflow:
|
|
|
1144
1242
|
videos: Optional[List[Video]] = None,
|
|
1145
1243
|
stream: Literal[True] = True,
|
|
1146
1244
|
stream_intermediate_steps: Optional[bool] = None,
|
|
1245
|
+
background: Optional[bool] = False,
|
|
1147
1246
|
) -> Iterator[WorkflowRunResponseEvent]: ...
|
|
1148
1247
|
|
|
1149
1248
|
def run(
|
|
@@ -1157,9 +1256,14 @@ class Workflow:
|
|
|
1157
1256
|
videos: Optional[List[Video]] = None,
|
|
1158
1257
|
stream: bool = False,
|
|
1159
1258
|
stream_intermediate_steps: Optional[bool] = None,
|
|
1259
|
+
background: Optional[bool] = False,
|
|
1160
1260
|
**kwargs: Any,
|
|
1161
1261
|
) -> Union[WorkflowRunResponse, Iterator[WorkflowRunResponseEvent]]:
|
|
1162
1262
|
"""Execute the workflow synchronously with optional streaming"""
|
|
1263
|
+
|
|
1264
|
+
if background:
|
|
1265
|
+
raise RuntimeError("Background execution is not supported for sync run()")
|
|
1266
|
+
|
|
1163
1267
|
self._set_debug()
|
|
1164
1268
|
|
|
1165
1269
|
log_debug(f"Workflow Run Start: {self.name}", center=True)
|
|
@@ -1240,6 +1344,7 @@ class Workflow:
|
|
|
1240
1344
|
videos: Optional[List[Video]] = None,
|
|
1241
1345
|
stream: Literal[False] = False,
|
|
1242
1346
|
stream_intermediate_steps: Optional[bool] = None,
|
|
1347
|
+
background: Optional[bool] = False,
|
|
1243
1348
|
) -> WorkflowRunResponse: ...
|
|
1244
1349
|
|
|
1245
1350
|
@overload
|
|
@@ -1254,6 +1359,7 @@ class Workflow:
|
|
|
1254
1359
|
videos: Optional[List[Video]] = None,
|
|
1255
1360
|
stream: Literal[True] = True,
|
|
1256
1361
|
stream_intermediate_steps: Optional[bool] = None,
|
|
1362
|
+
background: Optional[bool] = False,
|
|
1257
1363
|
) -> AsyncIterator[WorkflowRunResponseEvent]: ...
|
|
1258
1364
|
|
|
1259
1365
|
async def arun(
|
|
@@ -1267,9 +1373,22 @@ class Workflow:
|
|
|
1267
1373
|
videos: Optional[List[Video]] = None,
|
|
1268
1374
|
stream: bool = False,
|
|
1269
1375
|
stream_intermediate_steps: Optional[bool] = False,
|
|
1376
|
+
background: Optional[bool] = False,
|
|
1270
1377
|
**kwargs: Any,
|
|
1271
1378
|
) -> Union[WorkflowRunResponse, AsyncIterator[WorkflowRunResponseEvent]]:
|
|
1272
1379
|
"""Execute the workflow synchronously with optional streaming"""
|
|
1380
|
+
if background:
|
|
1381
|
+
return await self._arun_background(
|
|
1382
|
+
message=message,
|
|
1383
|
+
additional_data=additional_data,
|
|
1384
|
+
user_id=user_id,
|
|
1385
|
+
session_id=session_id,
|
|
1386
|
+
audio=audio,
|
|
1387
|
+
images=images,
|
|
1388
|
+
videos=videos,
|
|
1389
|
+
**kwargs,
|
|
1390
|
+
)
|
|
1391
|
+
|
|
1273
1392
|
self._set_debug()
|
|
1274
1393
|
|
|
1275
1394
|
log_debug(f"Async Workflow Run Start: {self.name}", center=True)
|
|
@@ -3103,6 +3222,12 @@ class Workflow:
|
|
|
3103
3222
|
# Update session_state with workflow_session_state
|
|
3104
3223
|
executor.workflow_session_state = self.workflow_session_state
|
|
3105
3224
|
|
|
3225
|
+
def _save_run_to_storage(self, workflow_run_response: WorkflowRunResponse) -> None:
|
|
3226
|
+
"""Helper method to save workflow run response to storage"""
|
|
3227
|
+
if self.workflow_session:
|
|
3228
|
+
self.workflow_session.upsert_run(workflow_run_response)
|
|
3229
|
+
self.write_to_storage()
|
|
3230
|
+
|
|
3106
3231
|
def update_agents_and_teams_session_info(self):
|
|
3107
3232
|
"""Update agents and teams with workflow session information"""
|
|
3108
3233
|
log_debug("Updating agents and teams with session information")
|
agno/workflow/workflow.py
CHANGED
|
@@ -5,7 +5,7 @@ import inspect
|
|
|
5
5
|
from dataclasses import dataclass, field, fields
|
|
6
6
|
from os import getenv
|
|
7
7
|
from types import GeneratorType
|
|
8
|
-
from typing import Any,
|
|
8
|
+
from typing import Any, AsyncIterator, Callable, Dict, List, Optional, Union, cast, get_args
|
|
9
9
|
from uuid import uuid4
|
|
10
10
|
|
|
11
11
|
from pydantic import BaseModel
|
|
@@ -125,6 +125,8 @@ class Workflow:
|
|
|
125
125
|
# Private attributes to store the run method and its parameters
|
|
126
126
|
# The run function provided by the subclass
|
|
127
127
|
self._subclass_run: Optional[Callable] = None
|
|
128
|
+
self._subclass_arun: Optional[Callable] = None
|
|
129
|
+
|
|
128
130
|
# Parameters of the run function
|
|
129
131
|
self._run_parameters: Optional[Dict[str, Any]] = None
|
|
130
132
|
# Return type of the run function
|
|
@@ -242,7 +244,6 @@ class Workflow:
|
|
|
242
244
|
return None
|
|
243
245
|
|
|
244
246
|
# Add to workflow.py after the run_workflow method
|
|
245
|
-
|
|
246
247
|
async def arun_workflow(self, **kwargs: Any):
|
|
247
248
|
"""Run the Workflow asynchronously"""
|
|
248
249
|
|
|
@@ -269,60 +270,14 @@ class Workflow:
|
|
|
269
270
|
|
|
270
271
|
log_debug(f"Workflow Run Start: {self.run_id}", center=True)
|
|
271
272
|
try:
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
self._subclass_run = cast(Callable, self._subclass_run)
|
|
275
|
-
if isasyncgenfunction(self._subclass_run) or isasyncgen(self._subclass_run):
|
|
276
|
-
result = self._subclass_run(**kwargs)
|
|
277
|
-
else:
|
|
278
|
-
result = await self._subclass_run(**kwargs)
|
|
273
|
+
self._subclass_arun = cast(Callable, self._subclass_arun)
|
|
274
|
+
result = await self._subclass_arun(**kwargs)
|
|
279
275
|
except Exception as e:
|
|
280
276
|
logger.error(f"Workflow.arun() failed: {e}")
|
|
281
277
|
raise e
|
|
282
278
|
|
|
283
|
-
# Handle async iterator results
|
|
284
|
-
if isinstance(result, (AsyncIterator, AsyncGenerator)):
|
|
285
|
-
# Initialize the run_response content
|
|
286
|
-
self.run_response.content = ""
|
|
287
|
-
|
|
288
|
-
async def result_generator():
|
|
289
|
-
self.run_response = cast(RunResponse, self.run_response)
|
|
290
|
-
if isinstance(self.memory, WorkflowMemory):
|
|
291
|
-
self.memory = cast(WorkflowMemory, self.memory)
|
|
292
|
-
elif isinstance(self.memory, Memory):
|
|
293
|
-
self.memory = cast(Memory, self.memory)
|
|
294
|
-
|
|
295
|
-
async for item in result:
|
|
296
|
-
if (
|
|
297
|
-
isinstance(item, tuple(get_args(RunResponseEvent)))
|
|
298
|
-
or isinstance(item, tuple(get_args(TeamRunResponseEvent)))
|
|
299
|
-
or isinstance(item, tuple(get_args(WorkflowRunResponseEvent)))
|
|
300
|
-
or isinstance(item, RunResponse)
|
|
301
|
-
):
|
|
302
|
-
# Update the run_id, session_id and workflow_id of the RunResponseEvent
|
|
303
|
-
item.run_id = self.run_id
|
|
304
|
-
item.session_id = self.session_id
|
|
305
|
-
item.workflow_id = self.workflow_id
|
|
306
|
-
|
|
307
|
-
# Update the run_response with the content from the result
|
|
308
|
-
if hasattr(item, "content") and item.content is not None and isinstance(item.content, str):
|
|
309
|
-
self.run_response.content += item.content
|
|
310
|
-
else:
|
|
311
|
-
logger.warning(f"Workflow.arun() should only yield RunResponseEvent objects, got: {type(item)}")
|
|
312
|
-
yield item
|
|
313
|
-
|
|
314
|
-
# Add the run to the memory
|
|
315
|
-
if isinstance(self.memory, WorkflowMemory):
|
|
316
|
-
self.memory.add_run(WorkflowRun(input=self.run_input, response=self.run_response))
|
|
317
|
-
elif isinstance(self.memory, Memory):
|
|
318
|
-
self.memory.add_run(session_id=self.session_id, run=self.run_response) # type: ignore
|
|
319
|
-
# Write this run to the database
|
|
320
|
-
self.write_to_storage()
|
|
321
|
-
log_debug(f"Workflow Run End: {self.run_id}", center=True)
|
|
322
|
-
|
|
323
|
-
return result_generator()
|
|
324
279
|
# Handle single RunResponse result
|
|
325
|
-
|
|
280
|
+
if isinstance(result, RunResponse):
|
|
326
281
|
# Update the result with the run_id, session_id and workflow_id of the workflow run
|
|
327
282
|
result.run_id = self.run_id
|
|
328
283
|
result.session_id = self.session_id
|
|
@@ -345,6 +300,66 @@ class Workflow:
|
|
|
345
300
|
logger.warning(f"Workflow.arun() should only return RunResponse objects, got: {type(result)}")
|
|
346
301
|
return None
|
|
347
302
|
|
|
303
|
+
async def arun_workflow_generator(self, **kwargs: Any) -> AsyncIterator[RunResponse]:
|
|
304
|
+
"""Run the Workflow asynchronously for async generators"""
|
|
305
|
+
|
|
306
|
+
# Set mode, debug, workflow_id, session_id, initialize memory
|
|
307
|
+
self.set_storage_mode()
|
|
308
|
+
self.set_debug()
|
|
309
|
+
self.set_monitoring()
|
|
310
|
+
self.set_workflow_id() # Ensure workflow_id is set
|
|
311
|
+
self.set_session_id()
|
|
312
|
+
self.initialize_memory()
|
|
313
|
+
|
|
314
|
+
# Create a run_id
|
|
315
|
+
self.run_id = str(uuid4())
|
|
316
|
+
|
|
317
|
+
# Set run_input, run_response
|
|
318
|
+
self.run_input = kwargs
|
|
319
|
+
self.run_response = RunResponse(run_id=self.run_id, session_id=self.session_id, workflow_id=self.workflow_id)
|
|
320
|
+
|
|
321
|
+
# Read existing session from storage
|
|
322
|
+
self.read_from_storage()
|
|
323
|
+
|
|
324
|
+
# Update the session_id for all Agent instances
|
|
325
|
+
self.update_agent_session_ids()
|
|
326
|
+
|
|
327
|
+
log_debug(f"Workflow Run Start: {self.run_id}", center=True)
|
|
328
|
+
# Initialize the run_response content
|
|
329
|
+
self.run_response.content = ""
|
|
330
|
+
try:
|
|
331
|
+
self._subclass_arun = cast(Callable, self._subclass_arun)
|
|
332
|
+
async for item in self._subclass_arun(**kwargs):
|
|
333
|
+
if (
|
|
334
|
+
isinstance(item, tuple(get_args(RunResponseEvent)))
|
|
335
|
+
or isinstance(item, tuple(get_args(TeamRunResponseEvent)))
|
|
336
|
+
or isinstance(item, tuple(get_args(WorkflowRunResponseEvent)))
|
|
337
|
+
or isinstance(item, RunResponse)
|
|
338
|
+
):
|
|
339
|
+
# Update the run_id, session_id and workflow_id of the RunResponseEvent
|
|
340
|
+
item.run_id = self.run_id
|
|
341
|
+
item.session_id = self.session_id
|
|
342
|
+
item.workflow_id = self.workflow_id
|
|
343
|
+
|
|
344
|
+
# Update the run_response with the content from the result
|
|
345
|
+
if hasattr(item, "content") and item.content is not None and isinstance(item.content, str):
|
|
346
|
+
self.run_response.content += item.content
|
|
347
|
+
else:
|
|
348
|
+
logger.warning(f"Workflow.run() should only yield RunResponseEvent objects, got: {type(item)}")
|
|
349
|
+
yield item
|
|
350
|
+
|
|
351
|
+
# Add the run to the memory
|
|
352
|
+
if isinstance(self.memory, WorkflowMemory):
|
|
353
|
+
self.memory.add_run(WorkflowRun(input=self.run_input, response=self.run_response))
|
|
354
|
+
elif isinstance(self.memory, Memory):
|
|
355
|
+
self.memory.add_run(session_id=self.session_id, run=self.run_response) # type: ignore
|
|
356
|
+
# Write this run to the database
|
|
357
|
+
self.write_to_storage()
|
|
358
|
+
log_debug(f"Workflow Run End: {self.run_id}", center=True)
|
|
359
|
+
except Exception as e:
|
|
360
|
+
logger.error(f"Workflow.arun() failed: {e}")
|
|
361
|
+
raise e
|
|
362
|
+
|
|
348
363
|
async def arun(self, **kwargs: Any):
|
|
349
364
|
"""Async version of run() that calls arun_workflow()"""
|
|
350
365
|
logger.error(f"{self.__class__.__name__}.arun() method not implemented.")
|
|
@@ -392,23 +407,32 @@ class Workflow:
|
|
|
392
407
|
self.memory = Memory()
|
|
393
408
|
|
|
394
409
|
def update_run_method(self):
|
|
410
|
+
run_type = None
|
|
395
411
|
# Update the run() method to call run_workflow() instead of the subclass's run()
|
|
396
412
|
# First, check if the subclass has a run method
|
|
397
413
|
# If the run() method has been overridden by the subclass,
|
|
398
414
|
# then self.__class__.run is not Workflow.run will be True
|
|
399
|
-
|
|
400
|
-
if self.__class__.run is not Workflow.run or self.__class__.arun is not Workflow.arun:
|
|
415
|
+
if self.__class__.run is not Workflow.run:
|
|
401
416
|
# Store the original run methods bound to the instance
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
417
|
+
self._subclass_run = self.__class__.run.__get__(self)
|
|
418
|
+
run_type = "sync"
|
|
419
|
+
# Get the parameters of the sync run method
|
|
420
|
+
sig = inspect.signature(self.__class__.run)
|
|
421
|
+
|
|
422
|
+
if self.__class__.arun is not Workflow.arun:
|
|
423
|
+
self._subclass_arun = self.__class__.arun.__get__(self)
|
|
424
|
+
run_type = "coroutine"
|
|
425
|
+
|
|
426
|
+
# Get the parameters of the async run method
|
|
427
|
+
sig = inspect.signature(self.__class__.arun)
|
|
428
|
+
|
|
429
|
+
# Check if the async method is a coroutine or async generator
|
|
430
|
+
from inspect import isasyncgenfunction
|
|
431
|
+
|
|
432
|
+
if isasyncgenfunction(self.__class__.arun):
|
|
433
|
+
run_type = "async_generator"
|
|
411
434
|
|
|
435
|
+
if run_type is not None:
|
|
412
436
|
# Convert parameters to a serializable format
|
|
413
437
|
self._run_parameters = {
|
|
414
438
|
param_name: {
|
|
@@ -445,12 +469,15 @@ class Workflow:
|
|
|
445
469
|
# This is so we call run_workflow() instead of the subclass's run()
|
|
446
470
|
if run_type == "sync":
|
|
447
471
|
object.__setattr__(self, "run", self.run_workflow.__get__(self))
|
|
448
|
-
elif run_type == "
|
|
472
|
+
elif run_type == "coroutine":
|
|
449
473
|
object.__setattr__(self, "arun", self.arun_workflow.__get__(self))
|
|
474
|
+
elif run_type == "async_generator":
|
|
475
|
+
object.__setattr__(self, "arun", self.arun_workflow_generator.__get__(self))
|
|
450
476
|
else:
|
|
451
477
|
# If the subclass does not override the run method,
|
|
452
478
|
# the Workflow.run() method will be called and will log an error
|
|
453
479
|
self._subclass_run = self.run
|
|
480
|
+
self._subclass_arun = self.arun
|
|
454
481
|
|
|
455
482
|
self._run_parameters = {}
|
|
456
483
|
self._run_return_type = None
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: agno
|
|
3
|
-
Version: 1.7.
|
|
3
|
+
Version: 1.7.6
|
|
4
4
|
Summary: Agno: a lightweight library for building Multi-Agent Systems
|
|
5
5
|
Author-email: Ashpreet Bedi <ashpreet@agno.com>
|
|
6
6
|
License: Copyright (c) Agno, Inc.
|
|
@@ -443,6 +443,9 @@ Requires-Dist: opentelemetry-exporter-otlp-proto-grpc; extra == "arize"
|
|
|
443
443
|
Requires-Dist: opentelemetry-distro; extra == "arize"
|
|
444
444
|
Provides-Extra: langfuse
|
|
445
445
|
Requires-Dist: langfuse; extra == "langfuse"
|
|
446
|
+
Provides-Extra: aws-bedrock
|
|
447
|
+
Requires-Dist: boto3; extra == "aws-bedrock"
|
|
448
|
+
Requires-Dist: aioboto3; extra == "aws-bedrock"
|
|
446
449
|
Provides-Extra: anthropic
|
|
447
450
|
Requires-Dist: anthropic; extra == "anthropic"
|
|
448
451
|
Provides-Extra: azure
|
|
@@ -472,6 +475,8 @@ Provides-Extra: ollama
|
|
|
472
475
|
Requires-Dist: ollama; extra == "ollama"
|
|
473
476
|
Provides-Extra: openai
|
|
474
477
|
Requires-Dist: openai; extra == "openai"
|
|
478
|
+
Provides-Extra: portkey
|
|
479
|
+
Requires-Dist: portkey-ai; extra == "portkey"
|
|
475
480
|
Provides-Extra: agentql
|
|
476
481
|
Requires-Dist: agentql; extra == "agentql"
|
|
477
482
|
Provides-Extra: apify
|
|
@@ -491,6 +496,8 @@ Provides-Extra: duckdb
|
|
|
491
496
|
Requires-Dist: duckdb; extra == "duckdb"
|
|
492
497
|
Provides-Extra: elevenlabs
|
|
493
498
|
Requires-Dist: elevenlabs; extra == "elevenlabs"
|
|
499
|
+
Provides-Extra: evm
|
|
500
|
+
Requires-Dist: web3; extra == "evm"
|
|
494
501
|
Provides-Extra: exa
|
|
495
502
|
Requires-Dist: exa_py; extra == "exa"
|
|
496
503
|
Provides-Extra: fal
|
|
@@ -521,6 +528,8 @@ Requires-Dist: newspaper4k; extra == "newspaper"
|
|
|
521
528
|
Requires-Dist: lxml_html_clean; extra == "newspaper"
|
|
522
529
|
Provides-Extra: opencv
|
|
523
530
|
Requires-Dist: opencv-python; extra == "opencv"
|
|
531
|
+
Provides-Extra: psycopg2
|
|
532
|
+
Requires-Dist: psycopg2-binary; extra == "psycopg2"
|
|
524
533
|
Provides-Extra: todoist
|
|
525
534
|
Requires-Dist: todoist-api-python; extra == "todoist"
|
|
526
535
|
Provides-Extra: valyu
|
|
@@ -533,6 +542,8 @@ Provides-Extra: youtube
|
|
|
533
542
|
Requires-Dist: youtube_transcript_api; extra == "youtube"
|
|
534
543
|
Provides-Extra: zep
|
|
535
544
|
Requires-Dist: zep-cloud; extra == "zep"
|
|
545
|
+
Provides-Extra: daytona
|
|
546
|
+
Requires-Dist: daytona; extra == "daytona"
|
|
536
547
|
Provides-Extra: oxylabs
|
|
537
548
|
Requires-Dist: oxylabs; extra == "oxylabs"
|
|
538
549
|
Provides-Extra: sql
|
|
@@ -573,6 +584,8 @@ Provides-Extra: clickhouse
|
|
|
573
584
|
Requires-Dist: clickhouse-connect; extra == "clickhouse"
|
|
574
585
|
Provides-Extra: pinecone
|
|
575
586
|
Requires-Dist: pinecone==5.4.2; extra == "pinecone"
|
|
587
|
+
Provides-Extra: surrealdb
|
|
588
|
+
Requires-Dist: surrealdb>=1.0.4; extra == "surrealdb"
|
|
576
589
|
Provides-Extra: pdf
|
|
577
590
|
Requires-Dist: pypdf; extra == "pdf"
|
|
578
591
|
Requires-Dist: rapidocr_onnxruntime; extra == "pdf"
|
|
@@ -601,6 +614,7 @@ Provides-Extra: aws
|
|
|
601
614
|
Requires-Dist: agno-aws; extra == "aws"
|
|
602
615
|
Requires-Dist: agno-docker; extra == "aws"
|
|
603
616
|
Provides-Extra: models
|
|
617
|
+
Requires-Dist: agno[aws-bedrock]; extra == "models"
|
|
604
618
|
Requires-Dist: agno[anthropic]; extra == "models"
|
|
605
619
|
Requires-Dist: agno[azure]; extra == "models"
|
|
606
620
|
Requires-Dist: agno[cerebras]; extra == "models"
|
|
@@ -615,6 +629,7 @@ Requires-Dist: agno[meta]; extra == "models"
|
|
|
615
629
|
Requires-Dist: agno[mistral]; extra == "models"
|
|
616
630
|
Requires-Dist: agno[ollama]; extra == "models"
|
|
617
631
|
Requires-Dist: agno[openai]; extra == "models"
|
|
632
|
+
Requires-Dist: agno[portkey]; extra == "models"
|
|
618
633
|
Provides-Extra: tools
|
|
619
634
|
Requires-Dist: agno[apify]; extra == "tools"
|
|
620
635
|
Requires-Dist: agno[brave]; extra == "tools"
|
|
@@ -632,6 +647,7 @@ Requires-Dist: agno[googlemaps]; extra == "tools"
|
|
|
632
647
|
Requires-Dist: agno[todoist]; extra == "tools"
|
|
633
648
|
Requires-Dist: agno[matplotlib]; extra == "tools"
|
|
634
649
|
Requires-Dist: agno[elevenlabs]; extra == "tools"
|
|
650
|
+
Requires-Dist: agno[evm]; extra == "tools"
|
|
635
651
|
Requires-Dist: agno[fal]; extra == "tools"
|
|
636
652
|
Requires-Dist: agno[webex]; extra == "tools"
|
|
637
653
|
Requires-Dist: agno[mcp]; extra == "tools"
|
|
@@ -644,6 +660,7 @@ Requires-Dist: agno[oxylabs]; extra == "tools"
|
|
|
644
660
|
Requires-Dist: agno[zep]; extra == "tools"
|
|
645
661
|
Requires-Dist: agno[mem0]; extra == "tools"
|
|
646
662
|
Requires-Dist: agno[google_bigquery]; extra == "tools"
|
|
663
|
+
Requires-Dist: agno[psycopg2]; extra == "tools"
|
|
647
664
|
Provides-Extra: storage
|
|
648
665
|
Requires-Dist: agno[sql]; extra == "storage"
|
|
649
666
|
Requires-Dist: agno[postgres]; extra == "storage"
|
|
@@ -664,6 +681,7 @@ Requires-Dist: agno[weaviate]; extra == "vectordbs"
|
|
|
664
681
|
Requires-Dist: agno[milvusdb]; extra == "vectordbs"
|
|
665
682
|
Requires-Dist: agno[clickhouse]; extra == "vectordbs"
|
|
666
683
|
Requires-Dist: agno[pinecone]; extra == "vectordbs"
|
|
684
|
+
Requires-Dist: agno[surrealdb]; extra == "vectordbs"
|
|
667
685
|
Provides-Extra: knowledge
|
|
668
686
|
Requires-Dist: agno[pdf]; extra == "knowledge"
|
|
669
687
|
Requires-Dist: agno[docx]; extra == "knowledge"
|