agno 2.3.6__py3-none-any.whl → 2.3.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +540 -369
- agno/db/mongo/async_mongo.py +0 -24
- agno/db/mongo/mongo.py +0 -16
- agno/db/mysql/mysql.py +0 -19
- agno/db/postgres/async_postgres.py +23 -28
- agno/db/postgres/postgres.py +0 -23
- agno/db/redis/redis.py +0 -4
- agno/db/singlestore/singlestore.py +0 -11
- agno/db/sqlite/async_sqlite.py +0 -24
- agno/db/sqlite/sqlite.py +0 -20
- agno/db/utils.py +2 -0
- agno/models/base.py +168 -15
- agno/models/openai/responses.py +3 -2
- agno/models/response.py +1 -1
- agno/os/interfaces/a2a/utils.py +1 -1
- agno/os/middleware/jwt.py +8 -6
- agno/os/routers/evals/utils.py +13 -3
- agno/run/agent.py +17 -0
- agno/run/requirement.py +98 -0
- agno/run/team.py +10 -0
- agno/team/team.py +179 -96
- agno/tools/postgres.py +76 -36
- agno/tools/redshift.py +406 -0
- agno/tools/toolkit.py +25 -0
- agno/tools/workflow.py +8 -1
- agno/utils/events.py +5 -1
- agno/workflow/parallel.py +8 -2
- agno/workflow/step.py +3 -3
- {agno-2.3.6.dist-info → agno-2.3.8.dist-info}/METADATA +5 -2
- {agno-2.3.6.dist-info → agno-2.3.8.dist-info}/RECORD +33 -32
- agno/tools/memori.py +0 -339
- {agno-2.3.6.dist-info → agno-2.3.8.dist-info}/WHEEL +0 -0
- {agno-2.3.6.dist-info → agno-2.3.8.dist-info}/licenses/LICENSE +0 -0
- {agno-2.3.6.dist-info → agno-2.3.8.dist-info}/top_level.txt +0 -0
agno/team/team.py
CHANGED
|
@@ -3,6 +3,7 @@ from __future__ import annotations
|
|
|
3
3
|
import asyncio
|
|
4
4
|
import contextlib
|
|
5
5
|
import json
|
|
6
|
+
import time
|
|
6
7
|
import warnings
|
|
7
8
|
from collections import ChainMap, deque
|
|
8
9
|
from copy import copy
|
|
@@ -36,7 +37,6 @@ from agno.compression.manager import CompressionManager
|
|
|
36
37
|
from agno.db.base import AsyncBaseDb, BaseDb, SessionType, UserMemory
|
|
37
38
|
from agno.exceptions import (
|
|
38
39
|
InputCheckError,
|
|
39
|
-
ModelProviderError,
|
|
40
40
|
OutputCheckError,
|
|
41
41
|
RunCancelledException,
|
|
42
42
|
)
|
|
@@ -115,7 +115,6 @@ from agno.utils.events import (
|
|
|
115
115
|
create_team_run_cancelled_event,
|
|
116
116
|
create_team_run_completed_event,
|
|
117
117
|
create_team_run_content_completed_event,
|
|
118
|
-
create_team_run_error_event,
|
|
119
118
|
create_team_run_output_content_event,
|
|
120
119
|
create_team_run_started_event,
|
|
121
120
|
create_team_session_summary_completed_event,
|
|
@@ -723,6 +722,8 @@ class Team:
|
|
|
723
722
|
|
|
724
723
|
# List of MCP tools that were initialized on the last run
|
|
725
724
|
self._mcp_tools_initialized_on_run: List[Any] = []
|
|
725
|
+
# List of connectable tools that were initialized on the last run
|
|
726
|
+
self._connectable_tools_initialized_on_run: List[Any] = []
|
|
726
727
|
|
|
727
728
|
# Lazy-initialized shared thread pool executor for background tasks (memory, cultural knowledge, etc.)
|
|
728
729
|
self._background_executor: Optional[Any] = None
|
|
@@ -1046,16 +1047,48 @@ class Team:
|
|
|
1046
1047
|
and any(c.__name__ in ["MCPTools", "MultiMCPTools"] for c in type(tool).__mro__)
|
|
1047
1048
|
and not tool.initialized # type: ignore
|
|
1048
1049
|
):
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
|
|
1050
|
+
try:
|
|
1051
|
+
# Connect the MCP server
|
|
1052
|
+
await tool.connect() # type: ignore
|
|
1053
|
+
self._mcp_tools_initialized_on_run.append(tool)
|
|
1054
|
+
except Exception as e:
|
|
1055
|
+
log_warning(f"Error connecting tool: {str(e)}")
|
|
1052
1056
|
|
|
1053
1057
|
async def _disconnect_mcp_tools(self) -> None:
|
|
1054
1058
|
"""Disconnect the MCP tools from the agent."""
|
|
1055
1059
|
for tool in self._mcp_tools_initialized_on_run:
|
|
1056
|
-
|
|
1060
|
+
try:
|
|
1061
|
+
await tool.close()
|
|
1062
|
+
except Exception as e:
|
|
1063
|
+
log_warning(f"Error disconnecting tool: {str(e)}")
|
|
1057
1064
|
self._mcp_tools_initialized_on_run = []
|
|
1058
1065
|
|
|
1066
|
+
def _connect_connectable_tools(self) -> None:
|
|
1067
|
+
"""Connect tools that require connection management (e.g., database connections)."""
|
|
1068
|
+
if self.tools:
|
|
1069
|
+
for tool in self.tools:
|
|
1070
|
+
if (
|
|
1071
|
+
hasattr(tool, "requires_connect")
|
|
1072
|
+
and tool.requires_connect
|
|
1073
|
+
and hasattr(tool, "connect")
|
|
1074
|
+
and tool not in self._connectable_tools_initialized_on_run
|
|
1075
|
+
):
|
|
1076
|
+
try:
|
|
1077
|
+
tool.connect() # type: ignore
|
|
1078
|
+
self._connectable_tools_initialized_on_run.append(tool)
|
|
1079
|
+
except Exception as e:
|
|
1080
|
+
log_warning(f"Error connecting tool: {str(e)}")
|
|
1081
|
+
|
|
1082
|
+
def _disconnect_connectable_tools(self) -> None:
|
|
1083
|
+
"""Disconnect tools that require connection management."""
|
|
1084
|
+
for tool in self._connectable_tools_initialized_on_run:
|
|
1085
|
+
if hasattr(tool, "close"):
|
|
1086
|
+
try:
|
|
1087
|
+
tool.close() # type: ignore
|
|
1088
|
+
except Exception as e:
|
|
1089
|
+
log_warning(f"Error disconnecting tool: {str(e)}")
|
|
1090
|
+
self._connectable_tools_initialized_on_run = []
|
|
1091
|
+
|
|
1059
1092
|
def _execute_pre_hooks(
|
|
1060
1093
|
self,
|
|
1061
1094
|
hooks: Optional[List[Callable[..., Any]]],
|
|
@@ -1520,7 +1553,12 @@ class Team:
|
|
|
1520
1553
|
|
|
1521
1554
|
# 4. Start memory creation in background thread
|
|
1522
1555
|
memory_future = None
|
|
1523
|
-
if
|
|
1556
|
+
if (
|
|
1557
|
+
run_messages.user_message is not None
|
|
1558
|
+
and self.memory_manager is not None
|
|
1559
|
+
and self.enable_user_memories
|
|
1560
|
+
and not self.enable_agentic_memory
|
|
1561
|
+
):
|
|
1524
1562
|
log_debug("Starting memory creation in background thread.")
|
|
1525
1563
|
memory_future = self.background_executor.submit(
|
|
1526
1564
|
self._make_memories, run_messages=run_messages, user_id=user_id
|
|
@@ -1625,6 +1663,8 @@ class Team:
|
|
|
1625
1663
|
self._cleanup_and_store(run_response=run_response, session=session)
|
|
1626
1664
|
return run_response
|
|
1627
1665
|
finally:
|
|
1666
|
+
# Always disconnect connectable tools
|
|
1667
|
+
self._disconnect_connectable_tools()
|
|
1628
1668
|
cleanup_run(run_response.run_id) # type: ignore
|
|
1629
1669
|
|
|
1630
1670
|
def _run_stream(
|
|
@@ -1725,7 +1765,12 @@ class Team:
|
|
|
1725
1765
|
|
|
1726
1766
|
# 4. Start memory creation in background thread
|
|
1727
1767
|
memory_future = None
|
|
1728
|
-
if
|
|
1768
|
+
if (
|
|
1769
|
+
run_messages.user_message is not None
|
|
1770
|
+
and self.memory_manager is not None
|
|
1771
|
+
and self.enable_user_memories
|
|
1772
|
+
and not self.enable_agentic_memory
|
|
1773
|
+
):
|
|
1729
1774
|
log_debug("Starting memory creation in background thread.")
|
|
1730
1775
|
memory_future = self.background_executor.submit(
|
|
1731
1776
|
self._make_memories, run_messages=run_messages, user_id=user_id
|
|
@@ -1911,6 +1956,8 @@ class Team:
|
|
|
1911
1956
|
# Add the RunOutput to Team Session even when cancelled
|
|
1912
1957
|
self._cleanup_and_store(run_response=run_response, session=session)
|
|
1913
1958
|
finally:
|
|
1959
|
+
# Always disconnect connectable tools
|
|
1960
|
+
self._disconnect_connectable_tools()
|
|
1914
1961
|
# Always clean up the run tracking
|
|
1915
1962
|
cleanup_run(run_response.run_id) # type: ignore
|
|
1916
1963
|
|
|
@@ -1925,7 +1972,6 @@ class Team:
|
|
|
1925
1972
|
session_id: Optional[str] = None,
|
|
1926
1973
|
session_state: Optional[Dict[str, Any]] = None,
|
|
1927
1974
|
user_id: Optional[str] = None,
|
|
1928
|
-
retries: Optional[int] = None,
|
|
1929
1975
|
audio: Optional[Sequence[Audio]] = None,
|
|
1930
1976
|
images: Optional[Sequence[Image]] = None,
|
|
1931
1977
|
videos: Optional[Sequence[Video]] = None,
|
|
@@ -1953,7 +1999,6 @@ class Team:
|
|
|
1953
1999
|
session_state: Optional[Dict[str, Any]] = None,
|
|
1954
2000
|
run_context: Optional[RunContext] = None,
|
|
1955
2001
|
user_id: Optional[str] = None,
|
|
1956
|
-
retries: Optional[int] = None,
|
|
1957
2002
|
audio: Optional[Sequence[Audio]] = None,
|
|
1958
2003
|
images: Optional[Sequence[Image]] = None,
|
|
1959
2004
|
videos: Optional[Sequence[Video]] = None,
|
|
@@ -1982,7 +2027,6 @@ class Team:
|
|
|
1982
2027
|
session_state: Optional[Dict[str, Any]] = None,
|
|
1983
2028
|
run_context: Optional[RunContext] = None,
|
|
1984
2029
|
user_id: Optional[str] = None,
|
|
1985
|
-
retries: Optional[int] = None,
|
|
1986
2030
|
audio: Optional[Sequence[Audio]] = None,
|
|
1987
2031
|
images: Optional[Sequence[Image]] = None,
|
|
1988
2032
|
videos: Optional[Sequence[Video]] = None,
|
|
@@ -2021,6 +2065,7 @@ class Team:
|
|
|
2021
2065
|
DeprecationWarning,
|
|
2022
2066
|
stacklevel=2,
|
|
2023
2067
|
)
|
|
2068
|
+
yield_run_output = yield_run_output or yield_run_response # For backwards compatibility
|
|
2024
2069
|
|
|
2025
2070
|
background_tasks = kwargs.pop("background_tasks", None)
|
|
2026
2071
|
if background_tasks is not None:
|
|
@@ -2158,18 +2203,11 @@ class Team:
|
|
|
2158
2203
|
run_response.metrics = Metrics()
|
|
2159
2204
|
run_response.metrics.start_timer()
|
|
2160
2205
|
|
|
2161
|
-
#
|
|
2162
|
-
|
|
2163
|
-
|
|
2164
|
-
# Run the team
|
|
2165
|
-
last_exception = None
|
|
2166
|
-
num_attempts = retries + 1
|
|
2167
|
-
|
|
2168
|
-
yield_run_output = bool(yield_run_output or yield_run_response) # For backwards compatibility
|
|
2206
|
+
# Set up retry logic
|
|
2207
|
+
num_attempts = self.retries + 1
|
|
2169
2208
|
|
|
2170
2209
|
for attempt in range(num_attempts):
|
|
2171
|
-
|
|
2172
|
-
|
|
2210
|
+
log_debug(f"Retrying Team run {run_id}. Attempt {attempt + 1} of {num_attempts}...")
|
|
2173
2211
|
# Run the team
|
|
2174
2212
|
try:
|
|
2175
2213
|
if stream:
|
|
@@ -2208,18 +2246,6 @@ class Team:
|
|
|
2208
2246
|
except (InputCheckError, OutputCheckError) as e:
|
|
2209
2247
|
log_error(f"Validation failed: {str(e)} | Check: {e.check_trigger}")
|
|
2210
2248
|
raise e
|
|
2211
|
-
except ModelProviderError as e:
|
|
2212
|
-
import time
|
|
2213
|
-
|
|
2214
|
-
log_warning(f"Attempt {attempt + 1}/{num_attempts} failed: {str(e)}")
|
|
2215
|
-
|
|
2216
|
-
last_exception = e
|
|
2217
|
-
if attempt < num_attempts - 1: # Don't sleep on the last attempt
|
|
2218
|
-
if self.exponential_backoff:
|
|
2219
|
-
delay = 2**attempt * self.delay_between_retries
|
|
2220
|
-
else:
|
|
2221
|
-
delay = self.delay_between_retries
|
|
2222
|
-
time.sleep(delay)
|
|
2223
2249
|
except KeyboardInterrupt:
|
|
2224
2250
|
run_response.content = "Operation cancelled by user"
|
|
2225
2251
|
run_response.status = RunStatus.cancelled
|
|
@@ -2232,21 +2258,24 @@ class Team:
|
|
|
2232
2258
|
)
|
|
2233
2259
|
else:
|
|
2234
2260
|
return run_response
|
|
2261
|
+
except Exception as e:
|
|
2262
|
+
# Check if this is the last attempt
|
|
2263
|
+
if attempt < num_attempts - 1:
|
|
2264
|
+
# Calculate delay with exponential backoff if enabled
|
|
2265
|
+
if self.exponential_backoff:
|
|
2266
|
+
delay = self.delay_between_retries * (2**attempt)
|
|
2267
|
+
else:
|
|
2268
|
+
delay = self.delay_between_retries
|
|
2235
2269
|
|
|
2236
|
-
|
|
2237
|
-
|
|
2238
|
-
|
|
2239
|
-
|
|
2240
|
-
|
|
2241
|
-
|
|
2242
|
-
return generator_wrapper(create_team_run_error_event(run_response, error=str(last_exception)))
|
|
2243
|
-
|
|
2244
|
-
raise last_exception
|
|
2245
|
-
else:
|
|
2246
|
-
if stream:
|
|
2247
|
-
return generator_wrapper(create_team_run_error_event(run_response, error=str(last_exception)))
|
|
2270
|
+
log_warning(f"Attempt {attempt + 1}/{num_attempts} failed: {str(e)}. Retrying in {delay}s...")
|
|
2271
|
+
time.sleep(delay)
|
|
2272
|
+
else:
|
|
2273
|
+
# Final attempt failed - re-raise the exception
|
|
2274
|
+
log_error(f"All {num_attempts} attempts failed. Final error: {str(e)}")
|
|
2275
|
+
raise e
|
|
2248
2276
|
|
|
2249
|
-
|
|
2277
|
+
# If we get here, all retries failed
|
|
2278
|
+
raise Exception(f"Failed after {num_attempts} attempts.")
|
|
2250
2279
|
|
|
2251
2280
|
async def _arun(
|
|
2252
2281
|
self,
|
|
@@ -2373,7 +2402,12 @@ class Team:
|
|
|
2373
2402
|
|
|
2374
2403
|
# 6. Start memory creation in background task
|
|
2375
2404
|
memory_task = None
|
|
2376
|
-
if
|
|
2405
|
+
if (
|
|
2406
|
+
run_messages.user_message is not None
|
|
2407
|
+
and self.memory_manager is not None
|
|
2408
|
+
and self.enable_user_memories
|
|
2409
|
+
and not self.enable_agentic_memory
|
|
2410
|
+
):
|
|
2377
2411
|
log_debug("Starting memory creation in background task.")
|
|
2378
2412
|
memory_task = asyncio.create_task(self._amake_memories(run_messages=run_messages, user_id=user_id))
|
|
2379
2413
|
|
|
@@ -2475,6 +2509,8 @@ class Team:
|
|
|
2475
2509
|
|
|
2476
2510
|
return run_response
|
|
2477
2511
|
finally:
|
|
2512
|
+
# Always disconnect connectable tools
|
|
2513
|
+
self._disconnect_connectable_tools()
|
|
2478
2514
|
await self._disconnect_mcp_tools()
|
|
2479
2515
|
# Cancel the memory task if it's still running
|
|
2480
2516
|
if memory_task is not None and not memory_task.done():
|
|
@@ -2609,7 +2645,12 @@ class Team:
|
|
|
2609
2645
|
|
|
2610
2646
|
# 7. Start memory creation in background task
|
|
2611
2647
|
memory_task = None
|
|
2612
|
-
if
|
|
2648
|
+
if (
|
|
2649
|
+
run_messages.user_message is not None
|
|
2650
|
+
and self.memory_manager is not None
|
|
2651
|
+
and self.enable_user_memories
|
|
2652
|
+
and not self.enable_agentic_memory
|
|
2653
|
+
):
|
|
2613
2654
|
log_debug("Starting memory creation in background task.")
|
|
2614
2655
|
memory_task = asyncio.create_task(self._amake_memories(run_messages=run_messages, user_id=user_id))
|
|
2615
2656
|
|
|
@@ -2801,6 +2842,8 @@ class Team:
|
|
|
2801
2842
|
await self._acleanup_and_store(run_response=run_response, session=team_session)
|
|
2802
2843
|
|
|
2803
2844
|
finally:
|
|
2845
|
+
# Always disconnect connectable tools
|
|
2846
|
+
self._disconnect_connectable_tools()
|
|
2804
2847
|
await self._disconnect_mcp_tools()
|
|
2805
2848
|
# Cancel the memory task if it's still running
|
|
2806
2849
|
if memory_task is not None and not memory_task.done():
|
|
@@ -2825,7 +2868,6 @@ class Team:
|
|
|
2825
2868
|
session_state: Optional[Dict[str, Any]] = None,
|
|
2826
2869
|
run_context: Optional[RunContext] = None,
|
|
2827
2870
|
user_id: Optional[str] = None,
|
|
2828
|
-
retries: Optional[int] = None,
|
|
2829
2871
|
audio: Optional[Sequence[Audio]] = None,
|
|
2830
2872
|
images: Optional[Sequence[Image]] = None,
|
|
2831
2873
|
videos: Optional[Sequence[Video]] = None,
|
|
@@ -2853,7 +2895,6 @@ class Team:
|
|
|
2853
2895
|
session_state: Optional[Dict[str, Any]] = None,
|
|
2854
2896
|
run_context: Optional[RunContext] = None,
|
|
2855
2897
|
user_id: Optional[str] = None,
|
|
2856
|
-
retries: Optional[int] = None,
|
|
2857
2898
|
audio: Optional[Sequence[Audio]] = None,
|
|
2858
2899
|
images: Optional[Sequence[Image]] = None,
|
|
2859
2900
|
videos: Optional[Sequence[Video]] = None,
|
|
@@ -2882,7 +2923,6 @@ class Team:
|
|
|
2882
2923
|
session_state: Optional[Dict[str, Any]] = None,
|
|
2883
2924
|
run_context: Optional[RunContext] = None,
|
|
2884
2925
|
user_id: Optional[str] = None,
|
|
2885
|
-
retries: Optional[int] = None,
|
|
2886
2926
|
audio: Optional[Sequence[Audio]] = None,
|
|
2887
2927
|
images: Optional[Sequence[Image]] = None,
|
|
2888
2928
|
videos: Optional[Sequence[Video]] = None,
|
|
@@ -2917,6 +2957,8 @@ class Team:
|
|
|
2917
2957
|
stacklevel=2,
|
|
2918
2958
|
)
|
|
2919
2959
|
|
|
2960
|
+
yield_run_output = yield_run_output or yield_run_response # For backwards compatibility
|
|
2961
|
+
|
|
2920
2962
|
background_tasks = kwargs.pop("background_tasks", None)
|
|
2921
2963
|
if background_tasks is not None:
|
|
2922
2964
|
from fastapi import BackgroundTasks
|
|
@@ -3042,20 +3084,18 @@ class Team:
|
|
|
3042
3084
|
run_response.metrics = Metrics()
|
|
3043
3085
|
run_response.metrics.start_timer()
|
|
3044
3086
|
|
|
3045
|
-
# If no retries are set, use the team's default retries
|
|
3046
|
-
retries = retries if retries is not None else self.retries
|
|
3047
|
-
|
|
3048
|
-
# Run the team
|
|
3049
|
-
last_exception = None
|
|
3050
|
-
num_attempts = retries + 1
|
|
3051
|
-
|
|
3052
3087
|
yield_run_output = bool(yield_run_output or yield_run_response) # For backwards compatibility
|
|
3053
3088
|
|
|
3089
|
+
# Resolve retry parameters
|
|
3090
|
+
num_attempts = self.retries + 1
|
|
3091
|
+
|
|
3054
3092
|
for attempt in range(num_attempts):
|
|
3093
|
+
log_debug(f"Retrying Team run {run_id}. Attempt {attempt + 1} of {num_attempts}...")
|
|
3094
|
+
|
|
3055
3095
|
# Run the team
|
|
3056
3096
|
try:
|
|
3057
3097
|
if stream:
|
|
3058
|
-
|
|
3098
|
+
return self._arun_stream( # type: ignore
|
|
3059
3099
|
input=validated_input,
|
|
3060
3100
|
run_response=run_response,
|
|
3061
3101
|
run_context=run_context,
|
|
@@ -3071,7 +3111,6 @@ class Team:
|
|
|
3071
3111
|
background_tasks=background_tasks,
|
|
3072
3112
|
**kwargs,
|
|
3073
3113
|
)
|
|
3074
|
-
return response_iterator # type: ignore
|
|
3075
3114
|
else:
|
|
3076
3115
|
return self._arun( # type: ignore
|
|
3077
3116
|
input=validated_input,
|
|
@@ -3091,17 +3130,6 @@ class Team:
|
|
|
3091
3130
|
except (InputCheckError, OutputCheckError) as e:
|
|
3092
3131
|
log_error(f"Validation failed: {str(e)} | Check: {e.check_trigger}")
|
|
3093
3132
|
raise e
|
|
3094
|
-
except ModelProviderError as e:
|
|
3095
|
-
log_warning(f"Attempt {attempt + 1}/{num_attempts} failed: {str(e)}")
|
|
3096
|
-
last_exception = e
|
|
3097
|
-
if attempt < num_attempts - 1: # Don't sleep on the last attempt
|
|
3098
|
-
if self.exponential_backoff:
|
|
3099
|
-
delay = 2**attempt * self.delay_between_retries
|
|
3100
|
-
else:
|
|
3101
|
-
delay = self.delay_between_retries
|
|
3102
|
-
import time
|
|
3103
|
-
|
|
3104
|
-
time.sleep(delay)
|
|
3105
3133
|
except KeyboardInterrupt:
|
|
3106
3134
|
run_response.content = "Operation cancelled by user"
|
|
3107
3135
|
run_response.status = RunStatus.cancelled
|
|
@@ -3114,21 +3142,25 @@ class Team:
|
|
|
3114
3142
|
)
|
|
3115
3143
|
else:
|
|
3116
3144
|
return run_response
|
|
3145
|
+
except Exception as e:
|
|
3146
|
+
# Check if this is the last attempt
|
|
3147
|
+
if attempt < num_attempts - 1:
|
|
3148
|
+
# Calculate delay with exponential backoff if enabled
|
|
3149
|
+
if self.exponential_backoff:
|
|
3150
|
+
delay = self.delay_between_retries * (2**attempt)
|
|
3151
|
+
else:
|
|
3152
|
+
delay = self.delay_between_retries
|
|
3117
3153
|
|
|
3118
|
-
|
|
3119
|
-
|
|
3120
|
-
|
|
3121
|
-
|
|
3122
|
-
|
|
3123
|
-
|
|
3124
|
-
|
|
3125
|
-
|
|
3126
|
-
raise last_exception
|
|
3127
|
-
else:
|
|
3128
|
-
if stream:
|
|
3129
|
-
return async_generator_wrapper(create_team_run_error_event(run_response, error=str(last_exception)))
|
|
3154
|
+
log_warning(f"Attempt {attempt + 1}/{num_attempts} failed: {str(e)}. Retrying in {delay}s...")
|
|
3155
|
+
time.sleep(delay)
|
|
3156
|
+
continue
|
|
3157
|
+
else:
|
|
3158
|
+
# Final attempt failed - re-raise the exception
|
|
3159
|
+
log_error(f"All {num_attempts} attempts failed. Final error: {str(e)}")
|
|
3160
|
+
raise e
|
|
3130
3161
|
|
|
3131
|
-
|
|
3162
|
+
# If we get here, all retries failed
|
|
3163
|
+
raise Exception(f"Failed after {num_attempts} attempts.")
|
|
3132
3164
|
|
|
3133
3165
|
def _update_run_response(
|
|
3134
3166
|
self,
|
|
@@ -3779,7 +3811,12 @@ class Team:
|
|
|
3779
3811
|
user_message_str = (
|
|
3780
3812
|
run_messages.user_message.get_content_string() if run_messages.user_message is not None else None
|
|
3781
3813
|
)
|
|
3782
|
-
if
|
|
3814
|
+
if (
|
|
3815
|
+
user_message_str is not None
|
|
3816
|
+
and user_message_str.strip() != ""
|
|
3817
|
+
and self.memory_manager is not None
|
|
3818
|
+
and self.enable_user_memories
|
|
3819
|
+
):
|
|
3783
3820
|
log_debug("Managing user memories")
|
|
3784
3821
|
self.memory_manager.create_user_memories(
|
|
3785
3822
|
message=user_message_str,
|
|
@@ -3795,7 +3832,12 @@ class Team:
|
|
|
3795
3832
|
user_message_str = (
|
|
3796
3833
|
run_messages.user_message.get_content_string() if run_messages.user_message is not None else None
|
|
3797
3834
|
)
|
|
3798
|
-
if
|
|
3835
|
+
if (
|
|
3836
|
+
user_message_str is not None
|
|
3837
|
+
and user_message_str.strip() != ""
|
|
3838
|
+
and self.memory_manager is not None
|
|
3839
|
+
and self.enable_user_memories
|
|
3840
|
+
):
|
|
3799
3841
|
log_debug("Managing user memories")
|
|
3800
3842
|
await self.memory_manager.acreate_user_memories(
|
|
3801
3843
|
message=user_message_str,
|
|
@@ -5333,6 +5375,9 @@ class Team:
|
|
|
5333
5375
|
add_session_state_to_context: Optional[bool] = None,
|
|
5334
5376
|
check_mcp_tools: bool = True,
|
|
5335
5377
|
) -> List[Union[Function, dict]]:
|
|
5378
|
+
# Connect tools that require connection management
|
|
5379
|
+
self._connect_connectable_tools()
|
|
5380
|
+
|
|
5336
5381
|
# Prepare tools
|
|
5337
5382
|
_tools: List[Union[Toolkit, Callable, Function, Dict]] = []
|
|
5338
5383
|
|
|
@@ -5382,6 +5427,7 @@ class Team:
|
|
|
5382
5427
|
run_response=run_response,
|
|
5383
5428
|
knowledge_filters=run_context.knowledge_filters,
|
|
5384
5429
|
async_mode=async_mode,
|
|
5430
|
+
run_context=run_context,
|
|
5385
5431
|
)
|
|
5386
5432
|
)
|
|
5387
5433
|
else:
|
|
@@ -5390,6 +5436,7 @@ class Team:
|
|
|
5390
5436
|
run_response=run_response,
|
|
5391
5437
|
knowledge_filters=run_context.knowledge_filters,
|
|
5392
5438
|
async_mode=async_mode,
|
|
5439
|
+
run_context=run_context,
|
|
5393
5440
|
)
|
|
5394
5441
|
)
|
|
5395
5442
|
|
|
@@ -6579,7 +6626,10 @@ class Team:
|
|
|
6579
6626
|
retrieval_timer = Timer()
|
|
6580
6627
|
retrieval_timer.start()
|
|
6581
6628
|
docs_from_knowledge = self.get_relevant_docs_from_knowledge(
|
|
6582
|
-
query=user_msg_content,
|
|
6629
|
+
query=user_msg_content,
|
|
6630
|
+
filters=run_context.knowledge_filters,
|
|
6631
|
+
run_context=run_context,
|
|
6632
|
+
**kwargs,
|
|
6583
6633
|
)
|
|
6584
6634
|
if docs_from_knowledge is not None:
|
|
6585
6635
|
references = MessageReferences(
|
|
@@ -6734,7 +6784,10 @@ class Team:
|
|
|
6734
6784
|
retrieval_timer = Timer()
|
|
6735
6785
|
retrieval_timer.start()
|
|
6736
6786
|
docs_from_knowledge = await self.aget_relevant_docs_from_knowledge(
|
|
6737
|
-
query=user_msg_content,
|
|
6787
|
+
query=user_msg_content,
|
|
6788
|
+
filters=run_context.knowledge_filters,
|
|
6789
|
+
run_context=run_context,
|
|
6790
|
+
**kwargs,
|
|
6738
6791
|
)
|
|
6739
6792
|
if docs_from_knowledge is not None:
|
|
6740
6793
|
references = MessageReferences(
|
|
@@ -8844,12 +8897,13 @@ class Team:
|
|
|
8844
8897
|
Optional[List[UserMemory]]: The user memories.
|
|
8845
8898
|
"""
|
|
8846
8899
|
if self.memory_manager is None:
|
|
8847
|
-
|
|
8900
|
+
self._set_memory_manager()
|
|
8901
|
+
|
|
8848
8902
|
user_id = user_id if user_id is not None else self.user_id
|
|
8849
8903
|
if user_id is None:
|
|
8850
8904
|
user_id = "default"
|
|
8851
8905
|
|
|
8852
|
-
return self.memory_manager.get_user_memories(user_id=user_id)
|
|
8906
|
+
return self.memory_manager.get_user_memories(user_id=user_id) # type: ignore
|
|
8853
8907
|
|
|
8854
8908
|
async def aget_user_memories(self, user_id: Optional[str] = None) -> Optional[List[UserMemory]]:
|
|
8855
8909
|
"""Get the user memories for the given user ID.
|
|
@@ -8860,12 +8914,13 @@ class Team:
|
|
|
8860
8914
|
Optional[List[UserMemory]]: The user memories.
|
|
8861
8915
|
"""
|
|
8862
8916
|
if self.memory_manager is None:
|
|
8863
|
-
|
|
8917
|
+
self._set_memory_manager()
|
|
8918
|
+
|
|
8864
8919
|
user_id = user_id if user_id is not None else self.user_id
|
|
8865
8920
|
if user_id is None:
|
|
8866
8921
|
user_id = "default"
|
|
8867
8922
|
|
|
8868
|
-
return await self.memory_manager.aget_user_memories(user_id=user_id)
|
|
8923
|
+
return await self.memory_manager.aget_user_memories(user_id=user_id) # type: ignore
|
|
8869
8924
|
|
|
8870
8925
|
###########################################################################
|
|
8871
8926
|
# Handle reasoning content
|
|
@@ -9000,11 +9055,15 @@ class Team:
|
|
|
9000
9055
|
query: str,
|
|
9001
9056
|
num_documents: Optional[int] = None,
|
|
9002
9057
|
filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None,
|
|
9058
|
+
run_context: Optional[RunContext] = None,
|
|
9003
9059
|
**kwargs,
|
|
9004
9060
|
) -> Optional[List[Union[Dict[str, Any], str]]]:
|
|
9005
9061
|
"""Return a list of references from the knowledge base"""
|
|
9006
9062
|
from agno.knowledge.document import Document
|
|
9007
9063
|
|
|
9064
|
+
# Extract dependencies from run_context if available
|
|
9065
|
+
dependencies = run_context.dependencies if run_context else None
|
|
9066
|
+
|
|
9008
9067
|
if num_documents is None and self.knowledge is not None:
|
|
9009
9068
|
num_documents = self.knowledge.max_results
|
|
9010
9069
|
|
|
@@ -9036,6 +9095,11 @@ class Team:
|
|
|
9036
9095
|
knowledge_retriever_kwargs = {"team": self}
|
|
9037
9096
|
if "filters" in sig.parameters:
|
|
9038
9097
|
knowledge_retriever_kwargs["filters"] = filters
|
|
9098
|
+
if "run_context" in sig.parameters:
|
|
9099
|
+
knowledge_retriever_kwargs["run_context"] = run_context
|
|
9100
|
+
elif "dependencies" in sig.parameters:
|
|
9101
|
+
# Backward compatibility: support dependencies parameter
|
|
9102
|
+
knowledge_retriever_kwargs["dependencies"] = dependencies
|
|
9039
9103
|
knowledge_retriever_kwargs.update({"query": query, "num_documents": num_documents, **kwargs})
|
|
9040
9104
|
return self.knowledge_retriever(**knowledge_retriever_kwargs)
|
|
9041
9105
|
except Exception as e:
|
|
@@ -9067,11 +9131,15 @@ class Team:
|
|
|
9067
9131
|
query: str,
|
|
9068
9132
|
num_documents: Optional[int] = None,
|
|
9069
9133
|
filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None,
|
|
9134
|
+
run_context: Optional[RunContext] = None,
|
|
9070
9135
|
**kwargs,
|
|
9071
9136
|
) -> Optional[List[Union[Dict[str, Any], str]]]:
|
|
9072
9137
|
"""Get relevant documents from knowledge base asynchronously."""
|
|
9073
9138
|
from agno.knowledge.document import Document
|
|
9074
9139
|
|
|
9140
|
+
# Extract dependencies from run_context if available
|
|
9141
|
+
dependencies = run_context.dependencies if run_context else None
|
|
9142
|
+
|
|
9075
9143
|
if num_documents is None and self.knowledge is not None:
|
|
9076
9144
|
num_documents = self.knowledge.max_results
|
|
9077
9145
|
|
|
@@ -9103,6 +9171,11 @@ class Team:
|
|
|
9103
9171
|
knowledge_retriever_kwargs = {"team": self}
|
|
9104
9172
|
if "filters" in sig.parameters:
|
|
9105
9173
|
knowledge_retriever_kwargs["filters"] = filters
|
|
9174
|
+
if "run_context" in sig.parameters:
|
|
9175
|
+
knowledge_retriever_kwargs["run_context"] = run_context
|
|
9176
|
+
elif "dependencies" in sig.parameters:
|
|
9177
|
+
# Backward compatibility: support dependencies parameter
|
|
9178
|
+
knowledge_retriever_kwargs["dependencies"] = dependencies
|
|
9106
9179
|
knowledge_retriever_kwargs.update({"query": query, "num_documents": num_documents, **kwargs})
|
|
9107
9180
|
|
|
9108
9181
|
result = self.knowledge_retriever(**knowledge_retriever_kwargs)
|
|
@@ -9187,6 +9260,7 @@ class Team:
|
|
|
9187
9260
|
run_response: TeamRunOutput,
|
|
9188
9261
|
knowledge_filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None,
|
|
9189
9262
|
async_mode: bool = False,
|
|
9263
|
+
run_context: Optional[RunContext] = None,
|
|
9190
9264
|
) -> Function:
|
|
9191
9265
|
"""Factory function to create a search_knowledge_base function with filters."""
|
|
9192
9266
|
|
|
@@ -9202,7 +9276,9 @@ class Team:
|
|
|
9202
9276
|
# Get the relevant documents from the knowledge base, passing filters
|
|
9203
9277
|
retrieval_timer = Timer()
|
|
9204
9278
|
retrieval_timer.start()
|
|
9205
|
-
docs_from_knowledge = self.get_relevant_docs_from_knowledge(
|
|
9279
|
+
docs_from_knowledge = self.get_relevant_docs_from_knowledge(
|
|
9280
|
+
query=query, filters=knowledge_filters, run_context=run_context
|
|
9281
|
+
)
|
|
9206
9282
|
if docs_from_knowledge is not None:
|
|
9207
9283
|
references = MessageReferences(
|
|
9208
9284
|
query=query, references=docs_from_knowledge, time=round(retrieval_timer.elapsed, 4)
|
|
@@ -9229,7 +9305,9 @@ class Team:
|
|
|
9229
9305
|
"""
|
|
9230
9306
|
retrieval_timer = Timer()
|
|
9231
9307
|
retrieval_timer.start()
|
|
9232
|
-
docs_from_knowledge = await self.aget_relevant_docs_from_knowledge(
|
|
9308
|
+
docs_from_knowledge = await self.aget_relevant_docs_from_knowledge(
|
|
9309
|
+
query=query, filters=knowledge_filters, run_context=run_context
|
|
9310
|
+
)
|
|
9233
9311
|
if docs_from_knowledge is not None:
|
|
9234
9312
|
references = MessageReferences(
|
|
9235
9313
|
query=query, references=docs_from_knowledge, time=round(retrieval_timer.elapsed, 4)
|
|
@@ -9256,6 +9334,7 @@ class Team:
|
|
|
9256
9334
|
run_response: TeamRunOutput,
|
|
9257
9335
|
knowledge_filters: Optional[Union[Dict[str, Any], List[FilterExpr]]] = None,
|
|
9258
9336
|
async_mode: bool = False,
|
|
9337
|
+
run_context: Optional[RunContext] = None,
|
|
9259
9338
|
) -> Function:
|
|
9260
9339
|
"""Factory function to create a search_knowledge_base function with filters."""
|
|
9261
9340
|
|
|
@@ -9275,7 +9354,9 @@ class Team:
|
|
|
9275
9354
|
# Get the relevant documents from the knowledge base, passing filters
|
|
9276
9355
|
retrieval_timer = Timer()
|
|
9277
9356
|
retrieval_timer.start()
|
|
9278
|
-
docs_from_knowledge = self.get_relevant_docs_from_knowledge(
|
|
9357
|
+
docs_from_knowledge = self.get_relevant_docs_from_knowledge(
|
|
9358
|
+
query=query, filters=search_filters, run_context=run_context
|
|
9359
|
+
)
|
|
9279
9360
|
if docs_from_knowledge is not None:
|
|
9280
9361
|
references = MessageReferences(
|
|
9281
9362
|
query=query, references=docs_from_knowledge, time=round(retrieval_timer.elapsed, 4)
|
|
@@ -9306,7 +9387,9 @@ class Team:
|
|
|
9306
9387
|
|
|
9307
9388
|
retrieval_timer = Timer()
|
|
9308
9389
|
retrieval_timer.start()
|
|
9309
|
-
docs_from_knowledge = await self.aget_relevant_docs_from_knowledge(
|
|
9390
|
+
docs_from_knowledge = await self.aget_relevant_docs_from_knowledge(
|
|
9391
|
+
query=query, filters=search_filters, run_context=run_context
|
|
9392
|
+
)
|
|
9310
9393
|
if docs_from_knowledge is not None:
|
|
9311
9394
|
references = MessageReferences(
|
|
9312
9395
|
query=query, references=docs_from_knowledge, time=round(retrieval_timer.elapsed, 4)
|