agno 2.3.8__py3-none-any.whl → 2.3.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +134 -94
- agno/db/mysql/__init__.py +2 -1
- agno/db/mysql/async_mysql.py +2888 -0
- agno/db/mysql/mysql.py +17 -8
- agno/db/mysql/utils.py +139 -6
- agno/db/postgres/async_postgres.py +10 -5
- agno/db/postgres/postgres.py +7 -2
- agno/db/schemas/evals.py +1 -0
- agno/db/singlestore/singlestore.py +5 -1
- agno/db/sqlite/async_sqlite.py +3 -3
- agno/eval/__init__.py +10 -0
- agno/eval/accuracy.py +11 -8
- agno/eval/agent_as_judge.py +861 -0
- agno/eval/base.py +29 -0
- agno/eval/utils.py +2 -1
- agno/exceptions.py +7 -0
- agno/knowledge/embedder/openai.py +8 -8
- agno/knowledge/knowledge.py +1142 -176
- agno/media.py +22 -6
- agno/models/aws/claude.py +8 -7
- agno/models/base.py +61 -2
- agno/models/deepseek/deepseek.py +67 -0
- agno/models/google/gemini.py +134 -51
- agno/models/google/utils.py +22 -0
- agno/models/message.py +5 -0
- agno/models/openai/chat.py +4 -0
- agno/os/app.py +64 -74
- agno/os/interfaces/a2a/router.py +3 -4
- agno/os/interfaces/agui/router.py +2 -0
- agno/os/router.py +3 -1607
- agno/os/routers/agents/__init__.py +3 -0
- agno/os/routers/agents/router.py +581 -0
- agno/os/routers/agents/schema.py +261 -0
- agno/os/routers/evals/evals.py +26 -6
- agno/os/routers/evals/schemas.py +34 -2
- agno/os/routers/evals/utils.py +77 -18
- agno/os/routers/knowledge/knowledge.py +1 -1
- agno/os/routers/teams/__init__.py +3 -0
- agno/os/routers/teams/router.py +496 -0
- agno/os/routers/teams/schema.py +257 -0
- agno/os/routers/workflows/__init__.py +3 -0
- agno/os/routers/workflows/router.py +545 -0
- agno/os/routers/workflows/schema.py +75 -0
- agno/os/schema.py +1 -559
- agno/os/utils.py +139 -2
- agno/team/team.py +87 -24
- agno/tools/file_generation.py +12 -6
- agno/tools/firecrawl.py +15 -7
- agno/tools/function.py +37 -23
- agno/tools/shopify.py +1519 -0
- agno/tools/spotify.py +2 -5
- agno/utils/hooks.py +64 -5
- agno/utils/http.py +2 -2
- agno/utils/media.py +11 -1
- agno/utils/print_response/agent.py +8 -0
- agno/utils/print_response/team.py +8 -0
- agno/vectordb/pgvector/pgvector.py +88 -51
- agno/workflow/parallel.py +5 -3
- agno/workflow/step.py +14 -2
- agno/workflow/types.py +38 -2
- agno/workflow/workflow.py +12 -4
- {agno-2.3.8.dist-info → agno-2.3.10.dist-info}/METADATA +7 -2
- {agno-2.3.8.dist-info → agno-2.3.10.dist-info}/RECORD +66 -52
- {agno-2.3.8.dist-info → agno-2.3.10.dist-info}/WHEEL +0 -0
- {agno-2.3.8.dist-info → agno-2.3.10.dist-info}/licenses/LICENSE +0 -0
- {agno-2.3.8.dist-info → agno-2.3.10.dist-info}/top_level.txt +0 -0
agno/os/utils.py
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
|
+
import json
|
|
1
2
|
from datetime import datetime, timezone
|
|
2
3
|
from typing import Any, Callable, Dict, List, Optional, Set, Type, Union
|
|
3
4
|
|
|
4
|
-
from fastapi import FastAPI, HTTPException, UploadFile
|
|
5
|
+
from fastapi import FastAPI, HTTPException, Request, UploadFile
|
|
5
6
|
from fastapi.routing import APIRoute, APIRouter
|
|
6
7
|
from pydantic import BaseModel, create_model
|
|
7
8
|
from starlette.middleware.cors import CORSMiddleware
|
|
@@ -13,13 +14,149 @@ from agno.media import Audio, Image, Video
|
|
|
13
14
|
from agno.media import File as FileMedia
|
|
14
15
|
from agno.models.message import Message
|
|
15
16
|
from agno.os.config import AgentOSConfig
|
|
17
|
+
from agno.run.agent import RunOutputEvent
|
|
18
|
+
from agno.run.team import TeamRunOutputEvent
|
|
19
|
+
from agno.run.workflow import WorkflowRunOutputEvent
|
|
16
20
|
from agno.team.team import Team
|
|
17
21
|
from agno.tools import Toolkit
|
|
18
22
|
from agno.tools.function import Function
|
|
19
|
-
from agno.utils.log import logger
|
|
23
|
+
from agno.utils.log import log_warning, logger
|
|
20
24
|
from agno.workflow.workflow import Workflow
|
|
21
25
|
|
|
22
26
|
|
|
27
|
+
async def get_request_kwargs(request: Request, endpoint_func: Callable) -> Dict[str, Any]:
|
|
28
|
+
"""Given a Request and an endpoint function, return a dictionary with all extra form data fields.
|
|
29
|
+
Args:
|
|
30
|
+
request: The FastAPI Request object
|
|
31
|
+
endpoint_func: The function exposing the endpoint that received the request
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
A dictionary of kwargs
|
|
35
|
+
"""
|
|
36
|
+
import inspect
|
|
37
|
+
|
|
38
|
+
form_data = await request.form()
|
|
39
|
+
sig = inspect.signature(endpoint_func)
|
|
40
|
+
known_fields = set(sig.parameters.keys())
|
|
41
|
+
kwargs: Dict[str, Any] = {key: value for key, value in form_data.items() if key not in known_fields}
|
|
42
|
+
|
|
43
|
+
# Handle JSON parameters. They are passed as strings and need to be deserialized.
|
|
44
|
+
if session_state := kwargs.get("session_state"):
|
|
45
|
+
try:
|
|
46
|
+
if isinstance(session_state, str):
|
|
47
|
+
session_state_dict = json.loads(session_state) # type: ignore
|
|
48
|
+
kwargs["session_state"] = session_state_dict
|
|
49
|
+
except json.JSONDecodeError:
|
|
50
|
+
kwargs.pop("session_state")
|
|
51
|
+
log_warning(f"Invalid session_state parameter couldn't be loaded: {session_state}")
|
|
52
|
+
|
|
53
|
+
if dependencies := kwargs.get("dependencies"):
|
|
54
|
+
try:
|
|
55
|
+
if isinstance(dependencies, str):
|
|
56
|
+
dependencies_dict = json.loads(dependencies) # type: ignore
|
|
57
|
+
kwargs["dependencies"] = dependencies_dict
|
|
58
|
+
except json.JSONDecodeError:
|
|
59
|
+
kwargs.pop("dependencies")
|
|
60
|
+
log_warning(f"Invalid dependencies parameter couldn't be loaded: {dependencies}")
|
|
61
|
+
|
|
62
|
+
if metadata := kwargs.get("metadata"):
|
|
63
|
+
try:
|
|
64
|
+
if isinstance(metadata, str):
|
|
65
|
+
metadata_dict = json.loads(metadata) # type: ignore
|
|
66
|
+
kwargs["metadata"] = metadata_dict
|
|
67
|
+
except json.JSONDecodeError:
|
|
68
|
+
kwargs.pop("metadata")
|
|
69
|
+
log_warning(f"Invalid metadata parameter couldn't be loaded: {metadata}")
|
|
70
|
+
|
|
71
|
+
if knowledge_filters := kwargs.get("knowledge_filters"):
|
|
72
|
+
try:
|
|
73
|
+
if isinstance(knowledge_filters, str):
|
|
74
|
+
knowledge_filters_dict = json.loads(knowledge_filters) # type: ignore
|
|
75
|
+
|
|
76
|
+
# Try to deserialize FilterExpr objects
|
|
77
|
+
from agno.filters import from_dict
|
|
78
|
+
|
|
79
|
+
# Check if it's a single FilterExpr dict or a list of FilterExpr dicts
|
|
80
|
+
if isinstance(knowledge_filters_dict, dict) and "op" in knowledge_filters_dict:
|
|
81
|
+
# Single FilterExpr - convert to list format
|
|
82
|
+
kwargs["knowledge_filters"] = [from_dict(knowledge_filters_dict)]
|
|
83
|
+
elif isinstance(knowledge_filters_dict, list):
|
|
84
|
+
# List of FilterExprs or mixed content
|
|
85
|
+
deserialized = []
|
|
86
|
+
for item in knowledge_filters_dict:
|
|
87
|
+
if isinstance(item, dict) and "op" in item:
|
|
88
|
+
deserialized.append(from_dict(item))
|
|
89
|
+
else:
|
|
90
|
+
# Keep non-FilterExpr items as-is
|
|
91
|
+
deserialized.append(item)
|
|
92
|
+
kwargs["knowledge_filters"] = deserialized
|
|
93
|
+
else:
|
|
94
|
+
# Regular dict filter
|
|
95
|
+
kwargs["knowledge_filters"] = knowledge_filters_dict
|
|
96
|
+
except json.JSONDecodeError:
|
|
97
|
+
kwargs.pop("knowledge_filters")
|
|
98
|
+
log_warning(f"Invalid knowledge_filters parameter couldn't be loaded: {knowledge_filters}")
|
|
99
|
+
except ValueError as e:
|
|
100
|
+
# Filter deserialization failed
|
|
101
|
+
kwargs.pop("knowledge_filters")
|
|
102
|
+
log_warning(f"Invalid FilterExpr in knowledge_filters: {e}")
|
|
103
|
+
|
|
104
|
+
# Handle output_schema - convert JSON schema to dynamic Pydantic model
|
|
105
|
+
if output_schema := kwargs.get("output_schema"):
|
|
106
|
+
try:
|
|
107
|
+
if isinstance(output_schema, str):
|
|
108
|
+
from agno.os.utils import json_schema_to_pydantic_model
|
|
109
|
+
|
|
110
|
+
schema_dict = json.loads(output_schema)
|
|
111
|
+
dynamic_model = json_schema_to_pydantic_model(schema_dict)
|
|
112
|
+
kwargs["output_schema"] = dynamic_model
|
|
113
|
+
except json.JSONDecodeError:
|
|
114
|
+
kwargs.pop("output_schema")
|
|
115
|
+
log_warning(f"Invalid output_schema JSON: {output_schema}")
|
|
116
|
+
except Exception as e:
|
|
117
|
+
kwargs.pop("output_schema")
|
|
118
|
+
log_warning(f"Failed to create output_schema model: {e}")
|
|
119
|
+
|
|
120
|
+
# Parse boolean and null values
|
|
121
|
+
for key, value in kwargs.items():
|
|
122
|
+
if isinstance(value, str) and value.lower() in ["true", "false"]:
|
|
123
|
+
kwargs[key] = value.lower() == "true"
|
|
124
|
+
elif isinstance(value, str) and value.lower() in ["null", "none"]:
|
|
125
|
+
kwargs[key] = None
|
|
126
|
+
|
|
127
|
+
return kwargs
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def format_sse_event(event: Union[RunOutputEvent, TeamRunOutputEvent, WorkflowRunOutputEvent]) -> str:
|
|
131
|
+
"""Parse JSON data into SSE-compliant format.
|
|
132
|
+
|
|
133
|
+
Args:
|
|
134
|
+
event_dict: Dictionary containing the event data
|
|
135
|
+
|
|
136
|
+
Returns:
|
|
137
|
+
SSE-formatted response:
|
|
138
|
+
|
|
139
|
+
```
|
|
140
|
+
event: EventName
|
|
141
|
+
data: { ... }
|
|
142
|
+
|
|
143
|
+
event: AnotherEventName
|
|
144
|
+
data: { ... }
|
|
145
|
+
```
|
|
146
|
+
"""
|
|
147
|
+
try:
|
|
148
|
+
# Parse the JSON to extract the event type
|
|
149
|
+
event_type = event.event or "message"
|
|
150
|
+
|
|
151
|
+
# Serialize to valid JSON with double quotes and no newlines
|
|
152
|
+
clean_json = event.to_json(separators=(",", ":"), indent=None)
|
|
153
|
+
|
|
154
|
+
return f"event: {event_type}\ndata: {clean_json}\n\n"
|
|
155
|
+
except json.JSONDecodeError:
|
|
156
|
+
clean_json = event.to_json(separators=(",", ":"), indent=None)
|
|
157
|
+
return f"event: message\ndata: {clean_json}\n\n"
|
|
158
|
+
|
|
159
|
+
|
|
23
160
|
async def get_db(
|
|
24
161
|
dbs: dict[str, list[Union[BaseDb, AsyncBaseDb]]], db_id: Optional[str] = None, table: Optional[str] = None
|
|
25
162
|
) -> Union[BaseDb, AsyncBaseDb]:
|
agno/team/team.py
CHANGED
|
@@ -11,6 +11,7 @@ from dataclasses import dataclass
|
|
|
11
11
|
from os import getenv
|
|
12
12
|
from textwrap import dedent
|
|
13
13
|
from typing import (
|
|
14
|
+
TYPE_CHECKING,
|
|
14
15
|
Any,
|
|
15
16
|
AsyncIterator,
|
|
16
17
|
Callable,
|
|
@@ -32,6 +33,9 @@ from uuid import uuid4
|
|
|
32
33
|
|
|
33
34
|
from pydantic import BaseModel
|
|
34
35
|
|
|
36
|
+
if TYPE_CHECKING:
|
|
37
|
+
from agno.eval.base import BaseEval
|
|
38
|
+
|
|
35
39
|
from agno.agent import Agent
|
|
36
40
|
from agno.compression.manager import CompressionManager
|
|
37
41
|
from agno.db.base import AsyncBaseDb, BaseDb, SessionType, UserMemory
|
|
@@ -123,7 +127,13 @@ from agno.utils.events import (
|
|
|
123
127
|
create_team_tool_call_started_event,
|
|
124
128
|
handle_event,
|
|
125
129
|
)
|
|
126
|
-
from agno.utils.hooks import
|
|
130
|
+
from agno.utils.hooks import (
|
|
131
|
+
copy_args_for_background,
|
|
132
|
+
filter_hook_args,
|
|
133
|
+
normalize_post_hooks,
|
|
134
|
+
normalize_pre_hooks,
|
|
135
|
+
should_run_hook_in_background,
|
|
136
|
+
)
|
|
127
137
|
from agno.utils.knowledge import get_agentic_or_user_search_filters
|
|
128
138
|
from agno.utils.log import (
|
|
129
139
|
log_debug,
|
|
@@ -266,6 +276,8 @@ class Team:
|
|
|
266
276
|
system_message: Optional[Union[str, Callable, Message]] = None
|
|
267
277
|
# Role for the system message
|
|
268
278
|
system_message_role: str = "system"
|
|
279
|
+
# Introduction for the team
|
|
280
|
+
introduction: Optional[str] = None
|
|
269
281
|
|
|
270
282
|
# If True, resolve the session_state, dependencies, and metadata in the user and system messages
|
|
271
283
|
resolve_in_context: bool = True
|
|
@@ -342,9 +354,9 @@ class Team:
|
|
|
342
354
|
|
|
343
355
|
# --- Team Hooks ---
|
|
344
356
|
# Functions called right after team session is loaded, before processing starts
|
|
345
|
-
pre_hooks: Optional[List[Union[Callable[..., Any], BaseGuardrail]]] = None
|
|
357
|
+
pre_hooks: Optional[List[Union[Callable[..., Any], BaseGuardrail, "BaseEval"]]] = None
|
|
346
358
|
# Functions called after output is generated but before the response is returned
|
|
347
|
-
post_hooks: Optional[List[Union[Callable[..., Any], BaseGuardrail]]] = None
|
|
359
|
+
post_hooks: Optional[List[Union[Callable[..., Any], BaseGuardrail, "BaseEval"]]] = None
|
|
348
360
|
# If True, run hooks as FastAPI background tasks (non-blocking). Set by AgentOS.
|
|
349
361
|
_run_hooks_in_background: Optional[bool] = None
|
|
350
362
|
|
|
@@ -486,6 +498,7 @@ class Team:
|
|
|
486
498
|
add_member_tools_to_context: bool = False,
|
|
487
499
|
system_message: Optional[Union[str, Callable, Message]] = None,
|
|
488
500
|
system_message_role: str = "system",
|
|
501
|
+
introduction: Optional[str] = None,
|
|
489
502
|
additional_input: Optional[List[Union[str, Dict, BaseModel, Message]]] = None,
|
|
490
503
|
dependencies: Optional[Dict[str, Any]] = None,
|
|
491
504
|
add_dependencies_to_context: bool = False,
|
|
@@ -512,8 +525,8 @@ class Team:
|
|
|
512
525
|
tool_call_limit: Optional[int] = None,
|
|
513
526
|
tool_choice: Optional[Union[str, Dict[str, Any]]] = None,
|
|
514
527
|
tool_hooks: Optional[List[Callable]] = None,
|
|
515
|
-
pre_hooks: Optional[List[Union[Callable[..., Any], BaseGuardrail]]] = None,
|
|
516
|
-
post_hooks: Optional[List[Union[Callable[..., Any], BaseGuardrail]]] = None,
|
|
528
|
+
pre_hooks: Optional[List[Union[Callable[..., Any], BaseGuardrail, "BaseEval"]]] = None,
|
|
529
|
+
post_hooks: Optional[List[Union[Callable[..., Any], BaseGuardrail, "BaseEval"]]] = None,
|
|
517
530
|
input_schema: Optional[Type[BaseModel]] = None,
|
|
518
531
|
output_schema: Optional[Type[BaseModel]] = None,
|
|
519
532
|
parser_model: Optional[Union[Model, str]] = None,
|
|
@@ -611,6 +624,7 @@ class Team:
|
|
|
611
624
|
self.add_member_tools_to_context = add_member_tools_to_context
|
|
612
625
|
self.system_message = system_message
|
|
613
626
|
self.system_message_role = system_message_role
|
|
627
|
+
self.introduction = introduction
|
|
614
628
|
self.additional_input = additional_input
|
|
615
629
|
|
|
616
630
|
self.dependencies = dependencies
|
|
@@ -1527,6 +1541,8 @@ class Team:
|
|
|
1527
1541
|
add_history_to_context=add_history_to_context,
|
|
1528
1542
|
add_session_state_to_context=add_session_state_to_context,
|
|
1529
1543
|
add_dependencies_to_context=add_dependencies_to_context,
|
|
1544
|
+
stream=False,
|
|
1545
|
+
stream_events=False,
|
|
1530
1546
|
)
|
|
1531
1547
|
|
|
1532
1548
|
# 3. Prepare run messages
|
|
@@ -1739,6 +1755,8 @@ class Team:
|
|
|
1739
1755
|
add_history_to_context=add_history_to_context,
|
|
1740
1756
|
add_session_state_to_context=add_session_state_to_context,
|
|
1741
1757
|
add_dependencies_to_context=add_dependencies_to_context,
|
|
1758
|
+
stream=True,
|
|
1759
|
+
stream_events=stream_events,
|
|
1742
1760
|
)
|
|
1743
1761
|
|
|
1744
1762
|
# 3. Prepare run messages
|
|
@@ -1972,6 +1990,7 @@ class Team:
|
|
|
1972
1990
|
session_id: Optional[str] = None,
|
|
1973
1991
|
session_state: Optional[Dict[str, Any]] = None,
|
|
1974
1992
|
user_id: Optional[str] = None,
|
|
1993
|
+
run_id: Optional[str] = None,
|
|
1975
1994
|
audio: Optional[Sequence[Audio]] = None,
|
|
1976
1995
|
images: Optional[Sequence[Image]] = None,
|
|
1977
1996
|
videos: Optional[Sequence[Video]] = None,
|
|
@@ -1999,6 +2018,7 @@ class Team:
|
|
|
1999
2018
|
session_state: Optional[Dict[str, Any]] = None,
|
|
2000
2019
|
run_context: Optional[RunContext] = None,
|
|
2001
2020
|
user_id: Optional[str] = None,
|
|
2021
|
+
run_id: Optional[str] = None,
|
|
2002
2022
|
audio: Optional[Sequence[Audio]] = None,
|
|
2003
2023
|
images: Optional[Sequence[Image]] = None,
|
|
2004
2024
|
videos: Optional[Sequence[Video]] = None,
|
|
@@ -2026,6 +2046,7 @@ class Team:
|
|
|
2026
2046
|
session_id: Optional[str] = None,
|
|
2027
2047
|
session_state: Optional[Dict[str, Any]] = None,
|
|
2028
2048
|
run_context: Optional[RunContext] = None,
|
|
2049
|
+
run_id: Optional[str] = None,
|
|
2029
2050
|
user_id: Optional[str] = None,
|
|
2030
2051
|
audio: Optional[Sequence[Audio]] = None,
|
|
2031
2052
|
images: Optional[Sequence[Image]] = None,
|
|
@@ -2047,8 +2068,8 @@ class Team:
|
|
|
2047
2068
|
if self._has_async_db():
|
|
2048
2069
|
raise Exception("run() is not supported with an async DB. Please use arun() instead.")
|
|
2049
2070
|
|
|
2050
|
-
#
|
|
2051
|
-
run_id = str(uuid4())
|
|
2071
|
+
# Set the id for the run and register it immediately for cancellation tracking
|
|
2072
|
+
run_id = run_id or str(uuid4())
|
|
2052
2073
|
register_run(run_id)
|
|
2053
2074
|
|
|
2054
2075
|
# Initialize Team
|
|
@@ -2079,9 +2100,9 @@ class Team:
|
|
|
2079
2100
|
# Normalise hook & guardails
|
|
2080
2101
|
if not self._hooks_normalised:
|
|
2081
2102
|
if self.pre_hooks:
|
|
2082
|
-
self.pre_hooks =
|
|
2103
|
+
self.pre_hooks = normalize_pre_hooks(self.pre_hooks) # type: ignore
|
|
2083
2104
|
if self.post_hooks:
|
|
2084
|
-
self.post_hooks =
|
|
2105
|
+
self.post_hooks = normalize_post_hooks(self.post_hooks) # type: ignore
|
|
2085
2106
|
self._hooks_normalised = True
|
|
2086
2107
|
|
|
2087
2108
|
session_id, user_id = self._initialize_session(session_id=session_id, user_id=user_id)
|
|
@@ -2165,9 +2186,6 @@ class Team:
|
|
|
2165
2186
|
if stream_events is None:
|
|
2166
2187
|
stream_events = False if self.stream_events is None else self.stream_events
|
|
2167
2188
|
|
|
2168
|
-
self.stream = self.stream or stream
|
|
2169
|
-
self.stream_events = self.stream_events or stream_events
|
|
2170
|
-
|
|
2171
2189
|
self.model = cast(Model, self.model)
|
|
2172
2190
|
|
|
2173
2191
|
if self.metadata is not None:
|
|
@@ -2377,6 +2395,8 @@ class Team:
|
|
|
2377
2395
|
add_history_to_context=add_history_to_context,
|
|
2378
2396
|
add_dependencies_to_context=add_dependencies_to_context,
|
|
2379
2397
|
add_session_state_to_context=add_session_state_to_context,
|
|
2398
|
+
stream=False,
|
|
2399
|
+
stream_events=False,
|
|
2380
2400
|
)
|
|
2381
2401
|
|
|
2382
2402
|
# 5. Prepare run messages
|
|
@@ -2621,6 +2641,10 @@ class Team:
|
|
|
2621
2641
|
files=run_input.files,
|
|
2622
2642
|
debug_mode=debug_mode,
|
|
2623
2643
|
add_history_to_context=add_history_to_context,
|
|
2644
|
+
add_dependencies_to_context=add_dependencies_to_context,
|
|
2645
|
+
add_session_state_to_context=add_session_state_to_context,
|
|
2646
|
+
stream=True,
|
|
2647
|
+
stream_events=stream_events,
|
|
2624
2648
|
)
|
|
2625
2649
|
|
|
2626
2650
|
# 6. Prepare run messages
|
|
@@ -2866,6 +2890,7 @@ class Team:
|
|
|
2866
2890
|
stream_intermediate_steps: Optional[bool] = None,
|
|
2867
2891
|
session_id: Optional[str] = None,
|
|
2868
2892
|
session_state: Optional[Dict[str, Any]] = None,
|
|
2893
|
+
run_id: Optional[str] = None,
|
|
2869
2894
|
run_context: Optional[RunContext] = None,
|
|
2870
2895
|
user_id: Optional[str] = None,
|
|
2871
2896
|
audio: Optional[Sequence[Audio]] = None,
|
|
@@ -2893,6 +2918,7 @@ class Team:
|
|
|
2893
2918
|
stream_intermediate_steps: Optional[bool] = None,
|
|
2894
2919
|
session_id: Optional[str] = None,
|
|
2895
2920
|
session_state: Optional[Dict[str, Any]] = None,
|
|
2921
|
+
run_id: Optional[str] = None,
|
|
2896
2922
|
run_context: Optional[RunContext] = None,
|
|
2897
2923
|
user_id: Optional[str] = None,
|
|
2898
2924
|
audio: Optional[Sequence[Audio]] = None,
|
|
@@ -2921,6 +2947,7 @@ class Team:
|
|
|
2921
2947
|
stream_intermediate_steps: Optional[bool] = None,
|
|
2922
2948
|
session_id: Optional[str] = None,
|
|
2923
2949
|
session_state: Optional[Dict[str, Any]] = None,
|
|
2950
|
+
run_id: Optional[str] = None,
|
|
2924
2951
|
run_context: Optional[RunContext] = None,
|
|
2925
2952
|
user_id: Optional[str] = None,
|
|
2926
2953
|
audio: Optional[Sequence[Audio]] = None,
|
|
@@ -2941,8 +2968,8 @@ class Team:
|
|
|
2941
2968
|
) -> Union[TeamRunOutput, AsyncIterator[Union[RunOutputEvent, TeamRunOutputEvent]]]:
|
|
2942
2969
|
"""Run the Team asynchronously and return the response."""
|
|
2943
2970
|
|
|
2944
|
-
#
|
|
2945
|
-
run_id = str(uuid4())
|
|
2971
|
+
# Set the id for the run and register it immediately for cancellation tracking
|
|
2972
|
+
run_id = run_id or str(uuid4())
|
|
2946
2973
|
register_run(run_id)
|
|
2947
2974
|
|
|
2948
2975
|
if (add_history_to_context or self.add_history_to_context) and not self.db and not self.parent_team_id:
|
|
@@ -2971,9 +2998,9 @@ class Team:
|
|
|
2971
2998
|
# Normalise hook & guardails
|
|
2972
2999
|
if not self._hooks_normalised:
|
|
2973
3000
|
if self.pre_hooks:
|
|
2974
|
-
self.pre_hooks =
|
|
3001
|
+
self.pre_hooks = normalize_pre_hooks(self.pre_hooks, async_mode=True) # type: ignore
|
|
2975
3002
|
if self.post_hooks:
|
|
2976
|
-
self.post_hooks =
|
|
3003
|
+
self.post_hooks = normalize_post_hooks(self.post_hooks, async_mode=True) # type: ignore
|
|
2977
3004
|
self._hooks_normalised = True
|
|
2978
3005
|
|
|
2979
3006
|
session_id, user_id = self._initialize_session(session_id=session_id, user_id=user_id)
|
|
@@ -3026,9 +3053,6 @@ class Team:
|
|
|
3026
3053
|
if stream_events is None:
|
|
3027
3054
|
stream_events = False if self.stream_events is None else self.stream_events
|
|
3028
3055
|
|
|
3029
|
-
self.stream = self.stream or stream
|
|
3030
|
-
self.stream_events = self.stream_events or stream_events
|
|
3031
|
-
|
|
3032
3056
|
self.model = cast(Model, self.model)
|
|
3033
3057
|
|
|
3034
3058
|
if self.metadata is not None:
|
|
@@ -3090,8 +3114,6 @@ class Team:
|
|
|
3090
3114
|
num_attempts = self.retries + 1
|
|
3091
3115
|
|
|
3092
3116
|
for attempt in range(num_attempts):
|
|
3093
|
-
log_debug(f"Retrying Team run {run_id}. Attempt {attempt + 1} of {num_attempts}...")
|
|
3094
|
-
|
|
3095
3117
|
# Run the team
|
|
3096
3118
|
try:
|
|
3097
3119
|
if stream:
|
|
@@ -4244,6 +4266,7 @@ class Team:
|
|
|
4244
4266
|
session_id: Optional[str] = None,
|
|
4245
4267
|
session_state: Optional[Dict[str, Any]] = None,
|
|
4246
4268
|
user_id: Optional[str] = None,
|
|
4269
|
+
run_id: Optional[str] = None,
|
|
4247
4270
|
audio: Optional[Sequence[Audio]] = None,
|
|
4248
4271
|
images: Optional[Sequence[Image]] = None,
|
|
4249
4272
|
videos: Optional[Sequence[Video]] = None,
|
|
@@ -4316,6 +4339,7 @@ class Team:
|
|
|
4316
4339
|
session_id=session_id,
|
|
4317
4340
|
session_state=session_state,
|
|
4318
4341
|
user_id=user_id,
|
|
4342
|
+
run_id=run_id,
|
|
4319
4343
|
audio=audio,
|
|
4320
4344
|
images=images,
|
|
4321
4345
|
videos=videos,
|
|
@@ -4344,6 +4368,7 @@ class Team:
|
|
|
4344
4368
|
session_id=session_id,
|
|
4345
4369
|
session_state=session_state,
|
|
4346
4370
|
user_id=user_id,
|
|
4371
|
+
run_id=run_id,
|
|
4347
4372
|
audio=audio,
|
|
4348
4373
|
images=images,
|
|
4349
4374
|
videos=videos,
|
|
@@ -4367,6 +4392,7 @@ class Team:
|
|
|
4367
4392
|
session_id: Optional[str] = None,
|
|
4368
4393
|
session_state: Optional[Dict[str, Any]] = None,
|
|
4369
4394
|
user_id: Optional[str] = None,
|
|
4395
|
+
run_id: Optional[str] = None,
|
|
4370
4396
|
audio: Optional[Sequence[Audio]] = None,
|
|
4371
4397
|
images: Optional[Sequence[Image]] = None,
|
|
4372
4398
|
videos: Optional[Sequence[Video]] = None,
|
|
@@ -4434,6 +4460,7 @@ class Team:
|
|
|
4434
4460
|
session_id=session_id,
|
|
4435
4461
|
session_state=session_state,
|
|
4436
4462
|
user_id=user_id,
|
|
4463
|
+
run_id=run_id,
|
|
4437
4464
|
audio=audio,
|
|
4438
4465
|
images=images,
|
|
4439
4466
|
videos=videos,
|
|
@@ -4462,6 +4489,7 @@ class Team:
|
|
|
4462
4489
|
session_id=session_id,
|
|
4463
4490
|
session_state=session_state,
|
|
4464
4491
|
user_id=user_id,
|
|
4492
|
+
run_id=run_id,
|
|
4465
4493
|
audio=audio,
|
|
4466
4494
|
images=images,
|
|
4467
4495
|
videos=videos,
|
|
@@ -5373,6 +5401,8 @@ class Team:
|
|
|
5373
5401
|
add_history_to_context: Optional[bool] = None,
|
|
5374
5402
|
add_dependencies_to_context: Optional[bool] = None,
|
|
5375
5403
|
add_session_state_to_context: Optional[bool] = None,
|
|
5404
|
+
stream: Optional[bool] = None,
|
|
5405
|
+
stream_events: Optional[bool] = None,
|
|
5376
5406
|
check_mcp_tools: bool = True,
|
|
5377
5407
|
) -> List[Union[Function, dict]]:
|
|
5378
5408
|
# Connect tools that require connection management
|
|
@@ -5467,8 +5497,8 @@ class Team:
|
|
|
5467
5497
|
team_run_context=team_run_context,
|
|
5468
5498
|
input=user_message_content,
|
|
5469
5499
|
user_id=user_id,
|
|
5470
|
-
stream=
|
|
5471
|
-
stream_events=
|
|
5500
|
+
stream=stream or False,
|
|
5501
|
+
stream_events=stream_events or False,
|
|
5472
5502
|
async_mode=async_mode,
|
|
5473
5503
|
images=images, # type: ignore
|
|
5474
5504
|
videos=videos, # type: ignore
|
|
@@ -8207,6 +8237,20 @@ class Team:
|
|
|
8207
8237
|
metadata=self.metadata,
|
|
8208
8238
|
created_at=int(time()),
|
|
8209
8239
|
)
|
|
8240
|
+
if self.introduction is not None:
|
|
8241
|
+
from uuid import uuid4
|
|
8242
|
+
|
|
8243
|
+
team_session.upsert_run(
|
|
8244
|
+
TeamRunOutput(
|
|
8245
|
+
run_id=str(uuid4()),
|
|
8246
|
+
team_id=self.id,
|
|
8247
|
+
session_id=session_id,
|
|
8248
|
+
user_id=user_id,
|
|
8249
|
+
team_name=self.name,
|
|
8250
|
+
content=self.introduction,
|
|
8251
|
+
messages=[Message(role=self.model.assistant_message_role, content=self.introduction)], # type: ignore
|
|
8252
|
+
)
|
|
8253
|
+
)
|
|
8210
8254
|
|
|
8211
8255
|
# Cache the session if relevant
|
|
8212
8256
|
if team_session is not None and self.cache_session:
|
|
@@ -8239,15 +8283,34 @@ class Team:
|
|
|
8239
8283
|
# Create new session if none found
|
|
8240
8284
|
if team_session is None:
|
|
8241
8285
|
log_debug(f"Creating new TeamSession: {session_id}")
|
|
8286
|
+
session_data = {}
|
|
8287
|
+
if self.session_state is not None:
|
|
8288
|
+
from copy import deepcopy
|
|
8289
|
+
|
|
8290
|
+
session_data["session_state"] = deepcopy(self.session_state)
|
|
8242
8291
|
team_session = TeamSession(
|
|
8243
8292
|
session_id=session_id,
|
|
8244
8293
|
team_id=self.id,
|
|
8245
8294
|
user_id=user_id,
|
|
8246
8295
|
team_data=self._get_team_data(),
|
|
8247
|
-
session_data=
|
|
8296
|
+
session_data=session_data,
|
|
8248
8297
|
metadata=self.metadata,
|
|
8249
8298
|
created_at=int(time()),
|
|
8250
8299
|
)
|
|
8300
|
+
if self.introduction is not None:
|
|
8301
|
+
from uuid import uuid4
|
|
8302
|
+
|
|
8303
|
+
team_session.upsert_run(
|
|
8304
|
+
TeamRunOutput(
|
|
8305
|
+
run_id=str(uuid4()),
|
|
8306
|
+
team_id=self.id,
|
|
8307
|
+
session_id=session_id,
|
|
8308
|
+
user_id=user_id,
|
|
8309
|
+
team_name=self.name,
|
|
8310
|
+
content=self.introduction,
|
|
8311
|
+
messages=[Message(role=self.model.assistant_message_role, content=self.introduction)], # type: ignore
|
|
8312
|
+
)
|
|
8313
|
+
)
|
|
8251
8314
|
|
|
8252
8315
|
# Cache the session if relevant
|
|
8253
8316
|
if team_session is not None and self.cache_session:
|
agno/tools/file_generation.py
CHANGED
|
@@ -108,14 +108,16 @@ class FileGenerationTools(Toolkit):
|
|
|
108
108
|
# Save file to disk (if output_directory is set)
|
|
109
109
|
file_path = self._save_file_to_disk(json_content, filename)
|
|
110
110
|
|
|
111
|
+
content_bytes = json_content.encode("utf-8")
|
|
112
|
+
|
|
111
113
|
# Create FileArtifact
|
|
112
114
|
file_artifact = File(
|
|
113
115
|
id=str(uuid4()),
|
|
114
|
-
content=
|
|
116
|
+
content=content_bytes,
|
|
115
117
|
mime_type="application/json",
|
|
116
118
|
file_type="json",
|
|
117
119
|
filename=filename,
|
|
118
|
-
size=len(
|
|
120
|
+
size=len(content_bytes),
|
|
119
121
|
filepath=file_path if file_path else None,
|
|
120
122
|
)
|
|
121
123
|
|
|
@@ -195,14 +197,16 @@ class FileGenerationTools(Toolkit):
|
|
|
195
197
|
# Save file to disk (if output_directory is set)
|
|
196
198
|
file_path = self._save_file_to_disk(csv_content, filename)
|
|
197
199
|
|
|
200
|
+
content_bytes = csv_content.encode("utf-8")
|
|
201
|
+
|
|
198
202
|
# Create FileArtifact
|
|
199
203
|
file_artifact = File(
|
|
200
204
|
id=str(uuid4()),
|
|
201
|
-
content=
|
|
205
|
+
content=content_bytes,
|
|
202
206
|
mime_type="text/csv",
|
|
203
207
|
file_type="csv",
|
|
204
208
|
filename=filename,
|
|
205
|
-
size=len(
|
|
209
|
+
size=len(content_bytes),
|
|
206
210
|
filepath=file_path if file_path else None,
|
|
207
211
|
)
|
|
208
212
|
|
|
@@ -325,14 +329,16 @@ class FileGenerationTools(Toolkit):
|
|
|
325
329
|
# Save file to disk (if output_directory is set)
|
|
326
330
|
file_path = self._save_file_to_disk(content, filename)
|
|
327
331
|
|
|
332
|
+
content_bytes = content.encode("utf-8")
|
|
333
|
+
|
|
328
334
|
# Create FileArtifact
|
|
329
335
|
file_artifact = File(
|
|
330
336
|
id=str(uuid4()),
|
|
331
|
-
content=
|
|
337
|
+
content=content_bytes,
|
|
332
338
|
mime_type="text/plain",
|
|
333
339
|
file_type="txt",
|
|
334
340
|
filename=filename,
|
|
335
|
-
size=len(
|
|
341
|
+
size=len(content_bytes),
|
|
336
342
|
filepath=file_path if file_path else None,
|
|
337
343
|
)
|
|
338
344
|
|
agno/tools/firecrawl.py
CHANGED
|
@@ -101,8 +101,10 @@ class FirecrawlTools(Toolkit):
|
|
|
101
101
|
The results of the crawling.
|
|
102
102
|
"""
|
|
103
103
|
params: Dict[str, Any] = {}
|
|
104
|
-
if self.limit
|
|
105
|
-
params["limit"] = self.limit
|
|
104
|
+
if self.limit is not None:
|
|
105
|
+
params["limit"] = self.limit
|
|
106
|
+
elif limit is not None:
|
|
107
|
+
params["limit"] = limit
|
|
106
108
|
if self.formats:
|
|
107
109
|
params["scrape_options"] = ScrapeOptions(formats=self.formats) # type: ignore
|
|
108
110
|
|
|
@@ -129,15 +131,21 @@ class FirecrawlTools(Toolkit):
|
|
|
129
131
|
limit (int): The maximum number of results to return.
|
|
130
132
|
"""
|
|
131
133
|
params: Dict[str, Any] = {}
|
|
132
|
-
if self.limit
|
|
133
|
-
params["limit"] = self.limit
|
|
134
|
+
if self.limit is not None:
|
|
135
|
+
params["limit"] = self.limit
|
|
136
|
+
elif limit is not None:
|
|
137
|
+
params["limit"] = limit
|
|
134
138
|
if self.formats:
|
|
135
139
|
params["scrape_options"] = ScrapeOptions(formats=self.formats) # type: ignore
|
|
136
140
|
if self.search_params:
|
|
137
141
|
params.update(self.search_params)
|
|
138
142
|
|
|
139
143
|
search_result = self.app.search(query, **params)
|
|
140
|
-
|
|
141
|
-
|
|
144
|
+
|
|
145
|
+
if hasattr(search_result, "success"):
|
|
146
|
+
if search_result.success:
|
|
147
|
+
return json.dumps(search_result.data, cls=CustomJSONEncoder)
|
|
148
|
+
else:
|
|
149
|
+
return f"Error searching with the Firecrawl tool: {search_result.error}"
|
|
142
150
|
else:
|
|
143
|
-
return
|
|
151
|
+
return json.dumps(search_result.model_dump(), cls=CustomJSONEncoder)
|