agno 2.3.2__py3-none-any.whl → 2.3.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +513 -185
- agno/compression/__init__.py +3 -0
- agno/compression/manager.py +176 -0
- agno/db/dynamo/dynamo.py +11 -0
- agno/db/firestore/firestore.py +5 -1
- agno/db/gcs_json/gcs_json_db.py +5 -2
- agno/db/in_memory/in_memory_db.py +5 -2
- agno/db/json/json_db.py +5 -1
- agno/db/migrations/manager.py +4 -4
- agno/db/mongo/async_mongo.py +158 -34
- agno/db/mongo/mongo.py +6 -2
- agno/db/mysql/mysql.py +48 -54
- agno/db/postgres/async_postgres.py +61 -51
- agno/db/postgres/postgres.py +42 -50
- agno/db/redis/redis.py +5 -0
- agno/db/redis/utils.py +5 -5
- agno/db/singlestore/singlestore.py +99 -108
- agno/db/sqlite/async_sqlite.py +29 -27
- agno/db/sqlite/sqlite.py +30 -26
- agno/knowledge/reader/pdf_reader.py +2 -2
- agno/knowledge/reader/tavily_reader.py +0 -1
- agno/memory/__init__.py +14 -1
- agno/memory/manager.py +217 -4
- agno/memory/strategies/__init__.py +15 -0
- agno/memory/strategies/base.py +67 -0
- agno/memory/strategies/summarize.py +196 -0
- agno/memory/strategies/types.py +37 -0
- agno/models/anthropic/claude.py +84 -80
- agno/models/aws/bedrock.py +38 -16
- agno/models/aws/claude.py +97 -277
- agno/models/azure/ai_foundry.py +8 -4
- agno/models/base.py +101 -14
- agno/models/cerebras/cerebras.py +18 -7
- agno/models/cerebras/cerebras_openai.py +4 -2
- agno/models/cohere/chat.py +8 -4
- agno/models/google/gemini.py +578 -20
- agno/models/groq/groq.py +18 -5
- agno/models/huggingface/huggingface.py +17 -6
- agno/models/ibm/watsonx.py +16 -6
- agno/models/litellm/chat.py +17 -7
- agno/models/message.py +19 -5
- agno/models/meta/llama.py +20 -4
- agno/models/mistral/mistral.py +8 -4
- agno/models/ollama/chat.py +17 -6
- agno/models/openai/chat.py +17 -6
- agno/models/openai/responses.py +23 -9
- agno/models/vertexai/claude.py +99 -5
- agno/os/interfaces/agui/router.py +1 -0
- agno/os/interfaces/agui/utils.py +97 -57
- agno/os/router.py +16 -0
- agno/os/routers/memory/memory.py +143 -0
- agno/os/routers/memory/schemas.py +26 -0
- agno/os/schema.py +21 -6
- agno/os/utils.py +134 -10
- agno/run/base.py +2 -1
- agno/run/workflow.py +1 -1
- agno/team/team.py +565 -219
- agno/tools/mcp/mcp.py +1 -1
- agno/utils/agent.py +119 -1
- agno/utils/models/ai_foundry.py +9 -2
- agno/utils/models/claude.py +12 -5
- agno/utils/models/cohere.py +9 -2
- agno/utils/models/llama.py +9 -2
- agno/utils/models/mistral.py +4 -2
- agno/utils/print_response/agent.py +37 -2
- agno/utils/print_response/team.py +52 -0
- agno/utils/tokens.py +41 -0
- agno/workflow/types.py +2 -2
- {agno-2.3.2.dist-info → agno-2.3.3.dist-info}/METADATA +45 -40
- {agno-2.3.2.dist-info → agno-2.3.3.dist-info}/RECORD +73 -66
- {agno-2.3.2.dist-info → agno-2.3.3.dist-info}/WHEEL +0 -0
- {agno-2.3.2.dist-info → agno-2.3.3.dist-info}/licenses/LICENSE +0 -0
- {agno-2.3.2.dist-info → agno-2.3.3.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from dataclasses import dataclass, field
|
|
3
|
+
from textwrap import dedent
|
|
4
|
+
from typing import Any, Dict, List, Optional
|
|
5
|
+
|
|
6
|
+
from agno.models.base import Model
|
|
7
|
+
from agno.models.message import Message
|
|
8
|
+
from agno.models.utils import get_model
|
|
9
|
+
from agno.utils.log import log_error, log_info, log_warning
|
|
10
|
+
|
|
11
|
+
DEFAULT_COMPRESSION_PROMPT = dedent("""\
|
|
12
|
+
You are compressing tool call results to save context space while preserving critical information.
|
|
13
|
+
|
|
14
|
+
Your goal: Extract only the essential information from the tool output.
|
|
15
|
+
|
|
16
|
+
ALWAYS PRESERVE:
|
|
17
|
+
• Specific facts: numbers, statistics, amounts, prices, quantities, metrics
|
|
18
|
+
• Temporal data: dates, times, timestamps (use short format: "Oct 21 2025")
|
|
19
|
+
• Entities: people, companies, products, locations, organizations
|
|
20
|
+
• Identifiers: URLs, IDs, codes, technical identifiers, versions
|
|
21
|
+
• Key quotes, citations, sources (if relevant to agent's task)
|
|
22
|
+
|
|
23
|
+
COMPRESS TO ESSENTIALS:
|
|
24
|
+
• Descriptions: keep only key attributes
|
|
25
|
+
• Explanations: distill to core insight
|
|
26
|
+
• Lists: focus on most relevant items based on agent context
|
|
27
|
+
• Background: minimal context only if critical
|
|
28
|
+
|
|
29
|
+
REMOVE ENTIRELY:
|
|
30
|
+
• Introductions, conclusions, transitions
|
|
31
|
+
• Hedging language ("might", "possibly", "appears to")
|
|
32
|
+
• Meta-commentary ("According to", "The results show")
|
|
33
|
+
• Formatting artifacts (markdown, HTML, JSON structure)
|
|
34
|
+
• Redundant or repetitive information
|
|
35
|
+
• Generic background not relevant to agent's task
|
|
36
|
+
• Promotional language, filler words
|
|
37
|
+
|
|
38
|
+
EXAMPLE:
|
|
39
|
+
Input: "According to recent market analysis and industry reports, OpenAI has made several significant announcements in the technology sector. The company revealed ChatGPT Atlas on October 21, 2025, which represents a new AI-powered browser application that has been specifically designed for macOS users. This browser is strategically positioned to compete with traditional search engines in the market. Additionally, on October 6, 2025, OpenAI launched Apps in ChatGPT, which includes a comprehensive software development kit (SDK) for developers. The company has also announced several initial strategic partners who will be integrating with this new feature, including well-known companies such as Spotify, the popular music streaming service, Zillow, which is a real estate marketplace platform, and Canva, a graphic design platform."
|
|
40
|
+
|
|
41
|
+
Output: "OpenAI - Oct 21 2025: ChatGPT Atlas (AI browser, macOS, search competitor); Oct 6 2025: Apps in ChatGPT + SDK; Partners: Spotify, Zillow, Canva"
|
|
42
|
+
|
|
43
|
+
Be concise while retaining all critical facts.
|
|
44
|
+
""")
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
@dataclass
|
|
48
|
+
class CompressionManager:
|
|
49
|
+
model: Optional[Model] = None
|
|
50
|
+
compress_tool_results: bool = True
|
|
51
|
+
compress_tool_results_limit: int = 3
|
|
52
|
+
compress_tool_call_instructions: Optional[str] = None
|
|
53
|
+
|
|
54
|
+
stats: Dict[str, Any] = field(default_factory=dict)
|
|
55
|
+
|
|
56
|
+
def _is_tool_result_message(self, msg: Message) -> bool:
|
|
57
|
+
return msg.role == "tool"
|
|
58
|
+
|
|
59
|
+
def should_compress(self, messages: List[Message]) -> bool:
|
|
60
|
+
if not self.compress_tool_results:
|
|
61
|
+
return False
|
|
62
|
+
|
|
63
|
+
uncompressed_tools_count = len(
|
|
64
|
+
[m for m in messages if self._is_tool_result_message(m) and m.compressed_content is None]
|
|
65
|
+
)
|
|
66
|
+
should_compress = uncompressed_tools_count >= self.compress_tool_results_limit
|
|
67
|
+
|
|
68
|
+
if should_compress:
|
|
69
|
+
log_info(f"Tool call compression threshold hit. Compressing {uncompressed_tools_count} tool results")
|
|
70
|
+
|
|
71
|
+
return should_compress
|
|
72
|
+
|
|
73
|
+
def _compress_tool_result(self, tool_result: Message) -> Optional[str]:
|
|
74
|
+
if not tool_result:
|
|
75
|
+
return None
|
|
76
|
+
|
|
77
|
+
tool_content = f"Tool: {tool_result.tool_name or 'unknown'}\n{tool_result.content}"
|
|
78
|
+
|
|
79
|
+
self.model = get_model(self.model)
|
|
80
|
+
if not self.model:
|
|
81
|
+
log_warning("No compression model available")
|
|
82
|
+
return None
|
|
83
|
+
|
|
84
|
+
compression_prompt = self.compress_tool_call_instructions or DEFAULT_COMPRESSION_PROMPT
|
|
85
|
+
compression_message = "Tool Results to Compress: " + tool_content + "\n"
|
|
86
|
+
|
|
87
|
+
try:
|
|
88
|
+
response = self.model.response(
|
|
89
|
+
messages=[
|
|
90
|
+
Message(role="system", content=compression_prompt),
|
|
91
|
+
Message(role="user", content=compression_message),
|
|
92
|
+
]
|
|
93
|
+
)
|
|
94
|
+
return response.content
|
|
95
|
+
except Exception as e:
|
|
96
|
+
log_error(f"Error compressing tool result: {e}")
|
|
97
|
+
return tool_content
|
|
98
|
+
|
|
99
|
+
def compress(self, messages: List[Message]) -> None:
|
|
100
|
+
"""Compress uncompressed tool results"""
|
|
101
|
+
if not self.compress_tool_results:
|
|
102
|
+
return
|
|
103
|
+
|
|
104
|
+
uncompressed_tools = [msg for msg in messages if msg.role == "tool" and msg.compressed_content is None]
|
|
105
|
+
|
|
106
|
+
if not uncompressed_tools:
|
|
107
|
+
return
|
|
108
|
+
|
|
109
|
+
# Compress uncompressed tool results
|
|
110
|
+
for tool_msg in uncompressed_tools:
|
|
111
|
+
original_len = len(str(tool_msg.content)) if tool_msg.content else 0
|
|
112
|
+
compressed = self._compress_tool_result(tool_msg)
|
|
113
|
+
if compressed:
|
|
114
|
+
tool_msg.compressed_content = compressed
|
|
115
|
+
# Track stats
|
|
116
|
+
self.stats["messages_compressed"] = self.stats.get("messages_compressed", 0) + 1
|
|
117
|
+
self.stats["original_size"] = self.stats.get("original_size", 0) + original_len
|
|
118
|
+
self.stats["compressed_size"] = self.stats.get("compressed_size", 0) + len(compressed)
|
|
119
|
+
else:
|
|
120
|
+
log_warning(f"Compression failed for {tool_msg.tool_name}")
|
|
121
|
+
|
|
122
|
+
# * Async methods *#
|
|
123
|
+
async def _acompress_tool_result(self, tool_result: Message) -> Optional[str]:
|
|
124
|
+
"""Async compress a single tool result"""
|
|
125
|
+
if not tool_result:
|
|
126
|
+
return None
|
|
127
|
+
|
|
128
|
+
tool_content = f"Tool: {tool_result.tool_name or 'unknown'}\n{tool_result.content}"
|
|
129
|
+
|
|
130
|
+
self.model = get_model(self.model)
|
|
131
|
+
if not self.model:
|
|
132
|
+
log_warning("No compression model available")
|
|
133
|
+
return None
|
|
134
|
+
|
|
135
|
+
compression_prompt = self.compress_tool_call_instructions or DEFAULT_COMPRESSION_PROMPT
|
|
136
|
+
compression_message = "Tool Results to Compress: " + tool_content + "\n"
|
|
137
|
+
|
|
138
|
+
try:
|
|
139
|
+
response = await self.model.aresponse(
|
|
140
|
+
messages=[
|
|
141
|
+
Message(role="system", content=compression_prompt),
|
|
142
|
+
Message(role="user", content=compression_message),
|
|
143
|
+
]
|
|
144
|
+
)
|
|
145
|
+
return response.content
|
|
146
|
+
except Exception as e:
|
|
147
|
+
log_error(f"Error compressing tool result: {e}")
|
|
148
|
+
return tool_content
|
|
149
|
+
|
|
150
|
+
async def acompress(self, messages: List[Message]) -> None:
|
|
151
|
+
"""Async compress uncompressed tool results"""
|
|
152
|
+
if not self.compress_tool_results:
|
|
153
|
+
return
|
|
154
|
+
|
|
155
|
+
uncompressed_tools = [msg for msg in messages if msg.role == "tool" and msg.compressed_content is None]
|
|
156
|
+
|
|
157
|
+
if not uncompressed_tools:
|
|
158
|
+
return
|
|
159
|
+
|
|
160
|
+
# Track original sizes before compression
|
|
161
|
+
original_sizes = [len(str(msg.content)) if msg.content else 0 for msg in uncompressed_tools]
|
|
162
|
+
|
|
163
|
+
# Parallel compression using asyncio.gather
|
|
164
|
+
tasks = [self._acompress_tool_result(msg) for msg in uncompressed_tools]
|
|
165
|
+
results = await asyncio.gather(*tasks)
|
|
166
|
+
|
|
167
|
+
# Apply results and track stats
|
|
168
|
+
for msg, compressed, original_len in zip(uncompressed_tools, results, original_sizes):
|
|
169
|
+
if compressed:
|
|
170
|
+
msg.compressed_content = compressed
|
|
171
|
+
# Track stats
|
|
172
|
+
self.stats["messages_compressed"] = self.stats.get("messages_compressed", 0) + 1
|
|
173
|
+
self.stats["original_size"] = self.stats.get("original_size", 0) + original_len
|
|
174
|
+
self.stats["compressed_size"] = self.stats.get("compressed_size", 0) + len(compressed)
|
|
175
|
+
else:
|
|
176
|
+
log_warning(f"Compression failed for {msg.tool_name}")
|
agno/db/dynamo/dynamo.py
CHANGED
|
@@ -854,6 +854,7 @@ class DynamoDb(BaseDb):
|
|
|
854
854
|
self,
|
|
855
855
|
limit: Optional[int] = None,
|
|
856
856
|
page: Optional[int] = None,
|
|
857
|
+
user_id: Optional[str] = None,
|
|
857
858
|
) -> Tuple[List[Dict[str, Any]], int]:
|
|
858
859
|
"""Get user memories stats.
|
|
859
860
|
|
|
@@ -881,7 +882,17 @@ class DynamoDb(BaseDb):
|
|
|
881
882
|
table_name = self._get_table("memories")
|
|
882
883
|
|
|
883
884
|
# Build filter expression for user_id if provided
|
|
885
|
+
filter_expression = None
|
|
886
|
+
expression_attribute_values = {}
|
|
887
|
+
if user_id:
|
|
888
|
+
filter_expression = "user_id = :user_id"
|
|
889
|
+
expression_attribute_values[":user_id"] = {"S": user_id}
|
|
890
|
+
|
|
884
891
|
scan_kwargs = {"TableName": table_name}
|
|
892
|
+
if filter_expression:
|
|
893
|
+
scan_kwargs["FilterExpression"] = filter_expression
|
|
894
|
+
if expression_attribute_values:
|
|
895
|
+
scan_kwargs["ExpressionAttributeValues"] = expression_attribute_values # type: ignore
|
|
885
896
|
|
|
886
897
|
response = self.client.scan(**scan_kwargs)
|
|
887
898
|
items = response.get("Items", [])
|
agno/db/firestore/firestore.py
CHANGED
|
@@ -873,6 +873,7 @@ class FirestoreDb(BaseDb):
|
|
|
873
873
|
self,
|
|
874
874
|
limit: Optional[int] = None,
|
|
875
875
|
page: Optional[int] = None,
|
|
876
|
+
user_id: Optional[str] = None,
|
|
876
877
|
) -> Tuple[List[Dict[str, Any]], int]:
|
|
877
878
|
"""Get user memories stats.
|
|
878
879
|
|
|
@@ -889,7 +890,10 @@ class FirestoreDb(BaseDb):
|
|
|
889
890
|
try:
|
|
890
891
|
collection_ref = self._get_collection(table_type="memories")
|
|
891
892
|
|
|
892
|
-
|
|
893
|
+
if user_id:
|
|
894
|
+
query = collection_ref.where(filter=FieldFilter("user_id", "==", user_id))
|
|
895
|
+
else:
|
|
896
|
+
query = collection_ref.where(filter=FieldFilter("user_id", "!=", None))
|
|
893
897
|
|
|
894
898
|
docs = query.stream()
|
|
895
899
|
|
agno/db/gcs_json/gcs_json_db.py
CHANGED
|
@@ -635,13 +635,14 @@ class GcsJsonDb(BaseDb):
|
|
|
635
635
|
raise e
|
|
636
636
|
|
|
637
637
|
def get_user_memory_stats(
|
|
638
|
-
self, limit: Optional[int] = None, page: Optional[int] = None
|
|
638
|
+
self, limit: Optional[int] = None, page: Optional[int] = None, user_id: Optional[str] = None
|
|
639
639
|
) -> Tuple[List[Dict[str, Any]], int]:
|
|
640
640
|
"""Get user memory statistics.
|
|
641
641
|
|
|
642
642
|
Args:
|
|
643
643
|
limit (Optional[int]): Maximum number of results to return.
|
|
644
644
|
page (Optional[int]): Page number for pagination.
|
|
645
|
+
user_id (Optional[str]): User ID for filtering.
|
|
645
646
|
|
|
646
647
|
Returns:
|
|
647
648
|
Tuple[List[Dict[str, Any]], int]: List of user memory statistics and total count.
|
|
@@ -652,7 +653,9 @@ class GcsJsonDb(BaseDb):
|
|
|
652
653
|
|
|
653
654
|
for memory in memories:
|
|
654
655
|
memory_user_id = memory.get("user_id")
|
|
655
|
-
|
|
656
|
+
# filter by user_id if provided
|
|
657
|
+
if user_id is not None and memory_user_id != user_id:
|
|
658
|
+
continue
|
|
656
659
|
if memory_user_id:
|
|
657
660
|
if memory_user_id not in user_stats:
|
|
658
661
|
user_stats[memory_user_id] = {
|
|
@@ -520,13 +520,14 @@ class InMemoryDb(BaseDb):
|
|
|
520
520
|
raise e
|
|
521
521
|
|
|
522
522
|
def get_user_memory_stats(
|
|
523
|
-
self, limit: Optional[int] = None, page: Optional[int] = None
|
|
523
|
+
self, limit: Optional[int] = None, page: Optional[int] = None, user_id: Optional[str] = None
|
|
524
524
|
) -> Tuple[List[Dict[str, Any]], int]:
|
|
525
525
|
"""Get user memory statistics.
|
|
526
526
|
|
|
527
527
|
Args:
|
|
528
528
|
limit (Optional[int]): Maximum number of stats to return.
|
|
529
529
|
page (Optional[int]): Page number for pagination.
|
|
530
|
+
user_id (Optional[str]): User ID for filtering.
|
|
530
531
|
|
|
531
532
|
Returns:
|
|
532
533
|
Tuple[List[Dict[str, Any]], int]: List of user memory statistics and total count.
|
|
@@ -539,7 +540,9 @@ class InMemoryDb(BaseDb):
|
|
|
539
540
|
|
|
540
541
|
for memory in self._memories:
|
|
541
542
|
memory_user_id = memory.get("user_id")
|
|
542
|
-
|
|
543
|
+
# filter by user_id if provided
|
|
544
|
+
if user_id is not None and memory_user_id != user_id:
|
|
545
|
+
continue
|
|
543
546
|
if memory_user_id:
|
|
544
547
|
if memory_user_id not in user_stats:
|
|
545
548
|
user_stats[memory_user_id] = {
|
agno/db/json/json_db.py
CHANGED
|
@@ -633,13 +633,14 @@ class JsonDb(BaseDb):
|
|
|
633
633
|
raise e
|
|
634
634
|
|
|
635
635
|
def get_user_memory_stats(
|
|
636
|
-
self, limit: Optional[int] = None, page: Optional[int] = None
|
|
636
|
+
self, limit: Optional[int] = None, page: Optional[int] = None, user_id: Optional[str] = None
|
|
637
637
|
) -> Tuple[List[Dict[str, Any]], int]:
|
|
638
638
|
"""Get user memory statistics.
|
|
639
639
|
|
|
640
640
|
Args:
|
|
641
641
|
limit (Optional[int]): The maximum number of user stats to return.
|
|
642
642
|
page (Optional[int]): The page number.
|
|
643
|
+
user_id (Optional[str]): User ID for filtering.
|
|
643
644
|
|
|
644
645
|
Returns:
|
|
645
646
|
Tuple[List[Dict[str, Any]], int]: A list of dictionaries containing user stats and total count.
|
|
@@ -650,6 +651,9 @@ class JsonDb(BaseDb):
|
|
|
650
651
|
|
|
651
652
|
for memory in memories:
|
|
652
653
|
memory_user_id = memory.get("user_id")
|
|
654
|
+
# filter by user_id if provided
|
|
655
|
+
if user_id is not None and memory_user_id != user_id:
|
|
656
|
+
continue
|
|
653
657
|
if memory_user_id:
|
|
654
658
|
if memory_user_id not in user_stats:
|
|
655
659
|
user_stats[memory_user_id] = {
|
agno/db/migrations/manager.py
CHANGED
|
@@ -23,7 +23,7 @@ class MigrationManager:
|
|
|
23
23
|
def latest_schema_version(self) -> Version:
|
|
24
24
|
return self.available_versions[-1][1]
|
|
25
25
|
|
|
26
|
-
async def up(self, target_version: Optional[str] = None, table_type: Optional[str] = None):
|
|
26
|
+
async def up(self, target_version: Optional[str] = None, table_type: Optional[str] = None, force: bool = False):
|
|
27
27
|
"""Handle executing an up migration.
|
|
28
28
|
|
|
29
29
|
Args:
|
|
@@ -70,7 +70,7 @@ class MigrationManager:
|
|
|
70
70
|
continue
|
|
71
71
|
|
|
72
72
|
# If the target version is less or equal to the current version, no migrations needed
|
|
73
|
-
if _target_version <= current_version:
|
|
73
|
+
if _target_version <= current_version and not force:
|
|
74
74
|
log_warning(
|
|
75
75
|
f"Skipping up migration: the version of table '{table_name}' ({current_version}) is less or equal to the target version ({_target_version})."
|
|
76
76
|
)
|
|
@@ -123,7 +123,7 @@ class MigrationManager:
|
|
|
123
123
|
log_error(f"Error running migration to version {version}: {e}")
|
|
124
124
|
raise
|
|
125
125
|
|
|
126
|
-
async def down(self, target_version: str, table_type: Optional[str] = None):
|
|
126
|
+
async def down(self, target_version: str, table_type: Optional[str] = None, force: bool = False):
|
|
127
127
|
"""Handle executing a down migration.
|
|
128
128
|
|
|
129
129
|
Args:
|
|
@@ -156,7 +156,7 @@ class MigrationManager:
|
|
|
156
156
|
else:
|
|
157
157
|
current_version = packaging_version.parse(self.db.get_latest_schema_version(table_name))
|
|
158
158
|
|
|
159
|
-
if _target_version >= current_version:
|
|
159
|
+
if _target_version >= current_version and not force:
|
|
160
160
|
log_warning(
|
|
161
161
|
f"Skipping down migration: the version of table '{table_name}' ({current_version}) is less or equal to the target version ({_target_version})."
|
|
162
162
|
)
|