agno 2.3.6__py3-none-any.whl → 2.3.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +540 -369
- agno/db/mongo/async_mongo.py +0 -24
- agno/db/mongo/mongo.py +0 -16
- agno/db/mysql/mysql.py +0 -19
- agno/db/postgres/async_postgres.py +23 -28
- agno/db/postgres/postgres.py +0 -23
- agno/db/redis/redis.py +0 -4
- agno/db/singlestore/singlestore.py +0 -11
- agno/db/sqlite/async_sqlite.py +0 -24
- agno/db/sqlite/sqlite.py +0 -20
- agno/db/utils.py +2 -0
- agno/models/base.py +168 -15
- agno/models/openai/responses.py +3 -2
- agno/models/response.py +1 -1
- agno/os/interfaces/a2a/utils.py +1 -1
- agno/os/middleware/jwt.py +8 -6
- agno/os/routers/evals/utils.py +13 -3
- agno/run/agent.py +17 -0
- agno/run/requirement.py +98 -0
- agno/run/team.py +10 -0
- agno/team/team.py +179 -96
- agno/tools/postgres.py +76 -36
- agno/tools/redshift.py +406 -0
- agno/tools/toolkit.py +25 -0
- agno/tools/workflow.py +8 -1
- agno/utils/events.py +5 -1
- agno/workflow/parallel.py +8 -2
- agno/workflow/step.py +3 -3
- {agno-2.3.6.dist-info → agno-2.3.8.dist-info}/METADATA +5 -2
- {agno-2.3.6.dist-info → agno-2.3.8.dist-info}/RECORD +33 -32
- agno/tools/memori.py +0 -339
- {agno-2.3.6.dist-info → agno-2.3.8.dist-info}/WHEEL +0 -0
- {agno-2.3.6.dist-info → agno-2.3.8.dist-info}/licenses/LICENSE +0 -0
- {agno-2.3.6.dist-info → agno-2.3.8.dist-info}/top_level.txt +0 -0
agno/tools/postgres.py
CHANGED
|
@@ -14,6 +14,21 @@ from agno.utils.log import log_debug, log_error
|
|
|
14
14
|
|
|
15
15
|
|
|
16
16
|
class PostgresTools(Toolkit):
|
|
17
|
+
"""
|
|
18
|
+
A toolkit for interacting with PostgreSQL databases.
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
connection (Optional[PgConnection[DictRow]]): Existing database connection to reuse.
|
|
22
|
+
db_name (Optional[str]): Database name to connect to.
|
|
23
|
+
user (Optional[str]): Username for authentication.
|
|
24
|
+
password (Optional[str]): Password for authentication.
|
|
25
|
+
host (Optional[str]): PostgreSQL server hostname.
|
|
26
|
+
port (Optional[int]): PostgreSQL server port number.
|
|
27
|
+
table_schema (str): Default schema for table operations. Default is "public".
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
_requires_connect: bool = True
|
|
31
|
+
|
|
17
32
|
def __init__(
|
|
18
33
|
self,
|
|
19
34
|
connection: Optional[PgConnection[DictRow]] = None,
|
|
@@ -44,50 +59,71 @@ class PostgresTools(Toolkit):
|
|
|
44
59
|
|
|
45
60
|
super().__init__(name="postgres_tools", tools=tools, **kwargs)
|
|
46
61
|
|
|
47
|
-
|
|
48
|
-
def connection(self) -> PgConnection[DictRow]:
|
|
49
|
-
"""
|
|
50
|
-
Returns the Postgres psycopg connection.
|
|
51
|
-
:return psycopg.connection.Connection: psycopg connection
|
|
62
|
+
def connect(self) -> PgConnection[DictRow]:
|
|
52
63
|
"""
|
|
53
|
-
|
|
54
|
-
log_debug("Establishing new PostgreSQL connection.")
|
|
55
|
-
connection_kwargs: Dict[str, Any] = {"row_factory": dict_row}
|
|
56
|
-
if self.db_name:
|
|
57
|
-
connection_kwargs["dbname"] = self.db_name
|
|
58
|
-
if self.user:
|
|
59
|
-
connection_kwargs["user"] = self.user
|
|
60
|
-
if self.password:
|
|
61
|
-
connection_kwargs["password"] = self.password
|
|
62
|
-
if self.host:
|
|
63
|
-
connection_kwargs["host"] = self.host
|
|
64
|
-
if self.port:
|
|
65
|
-
connection_kwargs["port"] = self.port
|
|
66
|
-
|
|
67
|
-
connection_kwargs["options"] = f"-c search_path={self.table_schema}"
|
|
68
|
-
|
|
69
|
-
self._connection = psycopg.connect(**connection_kwargs)
|
|
70
|
-
self._connection.read_only = True
|
|
64
|
+
Establish a connection to the PostgreSQL database.
|
|
71
65
|
|
|
66
|
+
Returns:
|
|
67
|
+
The database connection object.
|
|
68
|
+
"""
|
|
69
|
+
if self._connection is not None and not self._connection.closed:
|
|
70
|
+
log_debug("Connection already established, reusing existing connection")
|
|
71
|
+
return self._connection
|
|
72
|
+
|
|
73
|
+
log_debug("Establishing new PostgreSQL connection.")
|
|
74
|
+
connection_kwargs: Dict[str, Any] = {"row_factory": dict_row}
|
|
75
|
+
if self.db_name:
|
|
76
|
+
connection_kwargs["dbname"] = self.db_name
|
|
77
|
+
if self.user:
|
|
78
|
+
connection_kwargs["user"] = self.user
|
|
79
|
+
if self.password:
|
|
80
|
+
connection_kwargs["password"] = self.password
|
|
81
|
+
if self.host:
|
|
82
|
+
connection_kwargs["host"] = self.host
|
|
83
|
+
if self.port:
|
|
84
|
+
connection_kwargs["port"] = self.port
|
|
85
|
+
|
|
86
|
+
connection_kwargs["options"] = f"-c search_path={self.table_schema}"
|
|
87
|
+
|
|
88
|
+
self._connection = psycopg.connect(**connection_kwargs)
|
|
89
|
+
self._connection.read_only = True
|
|
72
90
|
return self._connection
|
|
73
91
|
|
|
74
|
-
def
|
|
75
|
-
return self
|
|
76
|
-
|
|
77
|
-
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
78
|
-
self.close()
|
|
79
|
-
|
|
80
|
-
def close(self):
|
|
92
|
+
def close(self) -> None:
|
|
81
93
|
"""Closes the database connection if it's open."""
|
|
82
94
|
if self._connection and not self._connection.closed:
|
|
83
95
|
log_debug("Closing PostgreSQL connection.")
|
|
84
96
|
self._connection.close()
|
|
85
97
|
self._connection = None
|
|
86
98
|
|
|
99
|
+
@property
|
|
100
|
+
def is_connected(self) -> bool:
|
|
101
|
+
"""Check if a connection is currently established."""
|
|
102
|
+
return self._connection is not None and not self._connection.closed
|
|
103
|
+
|
|
104
|
+
def _ensure_connection(self) -> PgConnection[DictRow]:
|
|
105
|
+
"""
|
|
106
|
+
Ensure a connection exists, creating one if necessary.
|
|
107
|
+
|
|
108
|
+
Returns:
|
|
109
|
+
The database connection object.
|
|
110
|
+
"""
|
|
111
|
+
if not self.is_connected:
|
|
112
|
+
return self.connect()
|
|
113
|
+
return self._connection
|
|
114
|
+
|
|
115
|
+
def __enter__(self):
|
|
116
|
+
return self.connect()
|
|
117
|
+
|
|
118
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
119
|
+
if self.is_connected:
|
|
120
|
+
self.close()
|
|
121
|
+
|
|
87
122
|
def _execute_query(self, query: str, params: Optional[tuple] = None) -> str:
|
|
88
123
|
try:
|
|
89
|
-
|
|
90
|
-
|
|
124
|
+
connection = self._ensure_connection()
|
|
125
|
+
with connection.cursor() as cursor:
|
|
126
|
+
log_debug("Running PostgreSQL query")
|
|
91
127
|
cursor.execute(query, params)
|
|
92
128
|
|
|
93
129
|
if cursor.description is None:
|
|
@@ -105,8 +141,8 @@ class PostgresTools(Toolkit):
|
|
|
105
141
|
|
|
106
142
|
except psycopg.Error as e:
|
|
107
143
|
log_error(f"Database error: {e}")
|
|
108
|
-
if self.
|
|
109
|
-
self.
|
|
144
|
+
if self._connection and not self._connection.closed:
|
|
145
|
+
self._connection.rollback()
|
|
110
146
|
return f"Error executing query: {e}"
|
|
111
147
|
except Exception as e:
|
|
112
148
|
log_error(f"An unexpected error occurred: {e}")
|
|
@@ -146,7 +182,8 @@ class PostgresTools(Toolkit):
|
|
|
146
182
|
A string containing a summary of the table.
|
|
147
183
|
"""
|
|
148
184
|
try:
|
|
149
|
-
|
|
185
|
+
connection = self._ensure_connection()
|
|
186
|
+
with connection.cursor() as cursor:
|
|
150
187
|
# First, get column information using a parameterized query
|
|
151
188
|
schema_query = """
|
|
152
189
|
SELECT column_name, data_type
|
|
@@ -230,7 +267,8 @@ class PostgresTools(Toolkit):
|
|
|
230
267
|
stmt = sql.SQL("SELECT * FROM {tbl};").format(tbl=table_identifier)
|
|
231
268
|
|
|
232
269
|
try:
|
|
233
|
-
|
|
270
|
+
connection = self._ensure_connection()
|
|
271
|
+
with connection.cursor() as cursor:
|
|
234
272
|
cursor.execute(stmt)
|
|
235
273
|
|
|
236
274
|
if cursor.description is None:
|
|
@@ -245,6 +283,8 @@ class PostgresTools(Toolkit):
|
|
|
245
283
|
|
|
246
284
|
return f"Successfully exported table '{table}' to '{path}'."
|
|
247
285
|
except (psycopg.Error, IOError) as e:
|
|
286
|
+
if self._connection and not self._connection.closed:
|
|
287
|
+
self._connection.rollback()
|
|
248
288
|
return f"Error exporting table: {e}"
|
|
249
289
|
|
|
250
290
|
def run_query(self, query: str) -> str:
|
agno/tools/redshift.py
ADDED
|
@@ -0,0 +1,406 @@
|
|
|
1
|
+
import csv
|
|
2
|
+
from os import getenv
|
|
3
|
+
from typing import Any, Dict, List, Optional
|
|
4
|
+
|
|
5
|
+
try:
|
|
6
|
+
import redshift_connector
|
|
7
|
+
from redshift_connector import Connection
|
|
8
|
+
except ImportError:
|
|
9
|
+
raise ImportError("`redshift_connector` not installed. Please install using `pip install redshift-connector`.")
|
|
10
|
+
|
|
11
|
+
from agno.tools import Toolkit
|
|
12
|
+
from agno.utils.log import log_debug, log_error, log_info
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class RedshiftTools(Toolkit):
|
|
16
|
+
"""
|
|
17
|
+
A toolkit for interacting with Amazon Redshift databases.
|
|
18
|
+
|
|
19
|
+
Supports these authentication methods:
|
|
20
|
+
- Standard username and password authentication
|
|
21
|
+
- IAM authentication with AWS profile
|
|
22
|
+
- IAM authentication with AWS credentials
|
|
23
|
+
|
|
24
|
+
Args:
|
|
25
|
+
host (Optional[str]): Redshift cluster endpoint hostname. Falls back to REDSHIFT_HOST env var.
|
|
26
|
+
port (int): Redshift cluster port number. Default is 5439.
|
|
27
|
+
database (Optional[str]): Database name to connect to. Falls back to REDSHIFT_DATABASE env var.
|
|
28
|
+
user (Optional[str]): Username for standard authentication.
|
|
29
|
+
password (Optional[str]): Password for standard authentication.
|
|
30
|
+
iam (bool): Enable IAM authentication. Default is False.
|
|
31
|
+
cluster_identifier (Optional[str]): Redshift cluster identifier for IAM auth with provisioned clusters. Falls back to REDSHIFT_CLUSTER_IDENTIFIER env var.
|
|
32
|
+
region (Optional[str]): AWS region for IAM credential retrieval. Falls back to AWS_REGION or AWS_DEFAULT_REGION env vars.
|
|
33
|
+
db_user (Optional[str]): Database user for IAM auth with provisioned clusters. Falls back to REDSHIFT_DB_USER env var.
|
|
34
|
+
access_key_id (Optional[str]): AWS access key ID for IAM auth. Falls back to AWS_ACCESS_KEY_ID env var.
|
|
35
|
+
secret_access_key (Optional[str]): AWS secret access key for IAM auth. Falls back to AWS_SECRET_ACCESS_KEY env var.
|
|
36
|
+
session_token (Optional[str]): AWS session token for temporary credentials. Falls back to AWS_SESSION_TOKEN env var.
|
|
37
|
+
profile (Optional[str]): AWS profile name for IAM auth. Falls back to AWS_PROFILE env var.
|
|
38
|
+
ssl (bool): Enable SSL connection. Default is True.
|
|
39
|
+
table_schema (str): Default schema for table operations. Default is "public".
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
_requires_connect: bool = True
|
|
43
|
+
|
|
44
|
+
def __init__(
|
|
45
|
+
self,
|
|
46
|
+
# Connection parameters
|
|
47
|
+
host: Optional[str] = None,
|
|
48
|
+
port: int = 5439,
|
|
49
|
+
database: Optional[str] = None,
|
|
50
|
+
# Standard authentication (username/password)
|
|
51
|
+
user: Optional[str] = None,
|
|
52
|
+
password: Optional[str] = None,
|
|
53
|
+
# IAM Authentication
|
|
54
|
+
iam: bool = False,
|
|
55
|
+
cluster_identifier: Optional[str] = None,
|
|
56
|
+
region: Optional[str] = None,
|
|
57
|
+
db_user: Optional[str] = None,
|
|
58
|
+
# AWS Credentials (for IAM auth)
|
|
59
|
+
access_key_id: Optional[str] = None,
|
|
60
|
+
secret_access_key: Optional[str] = None,
|
|
61
|
+
session_token: Optional[str] = None,
|
|
62
|
+
profile: Optional[str] = None,
|
|
63
|
+
# Connection settings
|
|
64
|
+
ssl: bool = True,
|
|
65
|
+
table_schema: str = "public",
|
|
66
|
+
**kwargs,
|
|
67
|
+
):
|
|
68
|
+
# Connection parameters
|
|
69
|
+
self.host: Optional[str] = host or getenv("REDSHIFT_HOST")
|
|
70
|
+
self.port: int = port
|
|
71
|
+
self.database: Optional[str] = database or getenv("REDSHIFT_DATABASE")
|
|
72
|
+
|
|
73
|
+
# Standard authentication
|
|
74
|
+
self.user: Optional[str] = user
|
|
75
|
+
self.password: Optional[str] = password
|
|
76
|
+
|
|
77
|
+
# IAM authentication parameters
|
|
78
|
+
self.iam: bool = iam
|
|
79
|
+
self.cluster_identifier: Optional[str] = cluster_identifier or getenv("REDSHIFT_CLUSTER_IDENTIFIER")
|
|
80
|
+
self.region: Optional[str] = region or getenv("AWS_REGION") or getenv("AWS_DEFAULT_REGION")
|
|
81
|
+
self.db_user: Optional[str] = db_user or getenv("REDSHIFT_DB_USER")
|
|
82
|
+
|
|
83
|
+
# AWS credentials
|
|
84
|
+
self.access_key_id: Optional[str] = access_key_id or getenv("AWS_ACCESS_KEY_ID")
|
|
85
|
+
self.secret_access_key: Optional[str] = secret_access_key or getenv("AWS_SECRET_ACCESS_KEY")
|
|
86
|
+
self.session_token: Optional[str] = session_token or getenv("AWS_SESSION_TOKEN")
|
|
87
|
+
self.profile: Optional[str] = profile or getenv("AWS_PROFILE")
|
|
88
|
+
|
|
89
|
+
# Connection settings
|
|
90
|
+
self.ssl: bool = ssl
|
|
91
|
+
self.table_schema: str = table_schema
|
|
92
|
+
|
|
93
|
+
# Connection instance
|
|
94
|
+
self._connection: Optional[Connection] = None
|
|
95
|
+
|
|
96
|
+
tools: List[Any] = [
|
|
97
|
+
self.show_tables,
|
|
98
|
+
self.describe_table,
|
|
99
|
+
self.summarize_table,
|
|
100
|
+
self.inspect_query,
|
|
101
|
+
self.run_query,
|
|
102
|
+
self.export_table_to_path,
|
|
103
|
+
]
|
|
104
|
+
|
|
105
|
+
super().__init__(name="redshift_tools", tools=tools, **kwargs)
|
|
106
|
+
|
|
107
|
+
def connect(self) -> Connection:
|
|
108
|
+
"""
|
|
109
|
+
Establish a connection to the Redshift database.
|
|
110
|
+
|
|
111
|
+
Returns:
|
|
112
|
+
The database connection object.
|
|
113
|
+
|
|
114
|
+
Raises:
|
|
115
|
+
redshift_connector.Error: If connection fails.
|
|
116
|
+
"""
|
|
117
|
+
if self._connection is not None:
|
|
118
|
+
log_debug("Connection already established, reusing existing connection")
|
|
119
|
+
return self._connection
|
|
120
|
+
|
|
121
|
+
log_info("Establishing connection to Redshift")
|
|
122
|
+
self._connection = redshift_connector.connect(**self._get_connection_kwargs())
|
|
123
|
+
return self._connection
|
|
124
|
+
|
|
125
|
+
def close(self) -> None:
|
|
126
|
+
"""
|
|
127
|
+
Close the database connection if it exists.
|
|
128
|
+
"""
|
|
129
|
+
if self._connection is not None:
|
|
130
|
+
log_info("Closing Redshift connection")
|
|
131
|
+
try:
|
|
132
|
+
self._connection.close()
|
|
133
|
+
except Exception:
|
|
134
|
+
pass # Connection might already be closed
|
|
135
|
+
self._connection = None
|
|
136
|
+
|
|
137
|
+
@property
|
|
138
|
+
def is_connected(self) -> bool:
|
|
139
|
+
"""Check if a connection is currently established."""
|
|
140
|
+
return self._connection is not None
|
|
141
|
+
|
|
142
|
+
def _ensure_connection(self) -> Connection:
|
|
143
|
+
"""
|
|
144
|
+
Ensure a connection exists, creating one if necessary.
|
|
145
|
+
|
|
146
|
+
Returns:
|
|
147
|
+
The database connection object.
|
|
148
|
+
"""
|
|
149
|
+
if self._connection is None:
|
|
150
|
+
return self.connect()
|
|
151
|
+
return self._connection
|
|
152
|
+
|
|
153
|
+
def _get_connection_kwargs(self) -> Dict[str, Any]:
|
|
154
|
+
"""Build connection kwargs from instance."""
|
|
155
|
+
connection_kwargs: Dict[str, Any] = {}
|
|
156
|
+
|
|
157
|
+
# Common connection parameters
|
|
158
|
+
if self.host:
|
|
159
|
+
connection_kwargs["host"] = self.host
|
|
160
|
+
if self.port:
|
|
161
|
+
connection_kwargs["port"] = self.port
|
|
162
|
+
if self.database:
|
|
163
|
+
connection_kwargs["database"] = self.database
|
|
164
|
+
connection_kwargs["ssl"] = self.ssl
|
|
165
|
+
|
|
166
|
+
# IAM Authentication
|
|
167
|
+
if self.iam:
|
|
168
|
+
connection_kwargs["iam"] = True
|
|
169
|
+
|
|
170
|
+
# For provisioned clusters (not serverless)
|
|
171
|
+
if self.cluster_identifier:
|
|
172
|
+
connection_kwargs["cluster_identifier"] = self.cluster_identifier
|
|
173
|
+
# db_user required for provisioned clusters with IAM
|
|
174
|
+
if self.db_user:
|
|
175
|
+
connection_kwargs["db_user"] = self.db_user
|
|
176
|
+
|
|
177
|
+
# Region for IAM credential retrieval
|
|
178
|
+
if self.region:
|
|
179
|
+
connection_kwargs["region"] = self.region
|
|
180
|
+
|
|
181
|
+
# AWS credentials - either profile or explicit
|
|
182
|
+
if self.profile:
|
|
183
|
+
connection_kwargs["profile"] = self.profile
|
|
184
|
+
else:
|
|
185
|
+
# Explicit AWS credentials
|
|
186
|
+
if self.access_key_id:
|
|
187
|
+
connection_kwargs["access_key_id"] = self.access_key_id
|
|
188
|
+
if self.secret_access_key:
|
|
189
|
+
connection_kwargs["secret_access_key"] = self.secret_access_key
|
|
190
|
+
if self.session_token:
|
|
191
|
+
connection_kwargs["session_token"] = self.session_token
|
|
192
|
+
|
|
193
|
+
else:
|
|
194
|
+
# Standard username/password authentication
|
|
195
|
+
if self.user:
|
|
196
|
+
connection_kwargs["user"] = self.user
|
|
197
|
+
if self.password:
|
|
198
|
+
connection_kwargs["password"] = self.password
|
|
199
|
+
|
|
200
|
+
return connection_kwargs
|
|
201
|
+
|
|
202
|
+
def _execute_query(self, query: str, params: Optional[tuple] = None) -> str:
|
|
203
|
+
try:
|
|
204
|
+
connection = self._ensure_connection()
|
|
205
|
+
with connection.cursor() as cursor:
|
|
206
|
+
log_debug("Running Redshift query")
|
|
207
|
+
cursor.execute(query, params)
|
|
208
|
+
|
|
209
|
+
if cursor.description is None:
|
|
210
|
+
return "Query executed successfully."
|
|
211
|
+
|
|
212
|
+
columns = [desc[0] for desc in cursor.description]
|
|
213
|
+
rows = cursor.fetchall()
|
|
214
|
+
|
|
215
|
+
if not rows:
|
|
216
|
+
return f"Query returned no results.\nColumns: {', '.join(columns)}"
|
|
217
|
+
|
|
218
|
+
header = ",".join(columns)
|
|
219
|
+
data_rows = [",".join(map(str, row)) for row in rows]
|
|
220
|
+
return f"{header}\n" + "\n".join(data_rows)
|
|
221
|
+
|
|
222
|
+
except redshift_connector.Error as e:
|
|
223
|
+
log_error(f"Database error: {e}")
|
|
224
|
+
if self._connection:
|
|
225
|
+
try:
|
|
226
|
+
self._connection.rollback()
|
|
227
|
+
except Exception:
|
|
228
|
+
pass # Connection might be closed
|
|
229
|
+
return f"Error executing query: {e}"
|
|
230
|
+
except Exception as e:
|
|
231
|
+
log_error(f"An unexpected error occurred: {e}")
|
|
232
|
+
return f"An unexpected error occurred: {e}"
|
|
233
|
+
|
|
234
|
+
def show_tables(self) -> str:
|
|
235
|
+
"""Lists all tables in the configured schema."""
|
|
236
|
+
|
|
237
|
+
stmt = "SELECT table_name FROM information_schema.tables WHERE table_schema = %s;"
|
|
238
|
+
return self._execute_query(stmt, (self.table_schema,))
|
|
239
|
+
|
|
240
|
+
def describe_table(self, table: str) -> str:
|
|
241
|
+
"""
|
|
242
|
+
Provides the schema (column name, data type, is nullable) for a given table.
|
|
243
|
+
|
|
244
|
+
Args:
|
|
245
|
+
table: The name of the table to describe.
|
|
246
|
+
|
|
247
|
+
Returns:
|
|
248
|
+
A string describing the table's columns and data types.
|
|
249
|
+
"""
|
|
250
|
+
stmt = """
|
|
251
|
+
SELECT column_name, data_type, is_nullable
|
|
252
|
+
FROM information_schema.columns
|
|
253
|
+
WHERE table_schema = %s AND table_name = %s;
|
|
254
|
+
"""
|
|
255
|
+
return self._execute_query(stmt, (self.table_schema, table))
|
|
256
|
+
|
|
257
|
+
def summarize_table(self, table: str) -> str:
|
|
258
|
+
"""
|
|
259
|
+
Computes and returns key summary statistics for a table's columns.
|
|
260
|
+
|
|
261
|
+
Args:
|
|
262
|
+
table: The name of the table to summarize.
|
|
263
|
+
|
|
264
|
+
Returns:
|
|
265
|
+
A string containing a summary of the table.
|
|
266
|
+
"""
|
|
267
|
+
try:
|
|
268
|
+
connection = self._ensure_connection()
|
|
269
|
+
with connection.cursor() as cursor:
|
|
270
|
+
# First, get column information using a parameterized query
|
|
271
|
+
schema_query = """
|
|
272
|
+
SELECT column_name, data_type
|
|
273
|
+
FROM information_schema.columns
|
|
274
|
+
WHERE table_schema = %s AND table_name = %s;
|
|
275
|
+
"""
|
|
276
|
+
cursor.execute(schema_query, (self.table_schema, table))
|
|
277
|
+
columns = cursor.fetchall()
|
|
278
|
+
if not columns:
|
|
279
|
+
return f"Error: Table '{table}' not found in schema '{self.table_schema}'."
|
|
280
|
+
|
|
281
|
+
summary_parts = [f"Summary for table: {table}\n"]
|
|
282
|
+
|
|
283
|
+
# Redshift uses schema.table format for fully qualified names
|
|
284
|
+
full_table_name = f'"{self.table_schema}"."{table}"'
|
|
285
|
+
|
|
286
|
+
for col in columns:
|
|
287
|
+
col_name = col[0]
|
|
288
|
+
data_type = col[1]
|
|
289
|
+
|
|
290
|
+
query = None
|
|
291
|
+
if any(
|
|
292
|
+
t in data_type.lower()
|
|
293
|
+
for t in [
|
|
294
|
+
"integer",
|
|
295
|
+
"int",
|
|
296
|
+
"bigint",
|
|
297
|
+
"smallint",
|
|
298
|
+
"numeric",
|
|
299
|
+
"decimal",
|
|
300
|
+
"real",
|
|
301
|
+
"double precision",
|
|
302
|
+
"float",
|
|
303
|
+
]
|
|
304
|
+
):
|
|
305
|
+
query = f"""
|
|
306
|
+
SELECT
|
|
307
|
+
COUNT(*) AS total_rows,
|
|
308
|
+
COUNT("{col_name}") AS non_null_rows,
|
|
309
|
+
MIN("{col_name}") AS min,
|
|
310
|
+
MAX("{col_name}") AS max,
|
|
311
|
+
AVG("{col_name}") AS average,
|
|
312
|
+
STDDEV("{col_name}") AS std_deviation
|
|
313
|
+
FROM {full_table_name};
|
|
314
|
+
"""
|
|
315
|
+
elif any(t in data_type.lower() for t in ["char", "varchar", "text", "uuid"]):
|
|
316
|
+
query = f"""
|
|
317
|
+
SELECT
|
|
318
|
+
COUNT(*) AS total_rows,
|
|
319
|
+
COUNT("{col_name}") AS non_null_rows,
|
|
320
|
+
COUNT(DISTINCT "{col_name}") AS unique_values,
|
|
321
|
+
AVG(LEN("{col_name}")) as avg_length
|
|
322
|
+
FROM {full_table_name};
|
|
323
|
+
"""
|
|
324
|
+
|
|
325
|
+
if query:
|
|
326
|
+
cursor.execute(query)
|
|
327
|
+
stats = cursor.fetchone()
|
|
328
|
+
summary_parts.append(f"\n--- Column: {col_name} (Type: {data_type}) ---")
|
|
329
|
+
if stats is not None:
|
|
330
|
+
stats_dict = dict(zip([desc[0] for desc in cursor.description], stats))
|
|
331
|
+
for key, value in stats_dict.items():
|
|
332
|
+
val_str = (
|
|
333
|
+
f"{value:.2f}" if isinstance(value, float) and value is not None else str(value)
|
|
334
|
+
)
|
|
335
|
+
summary_parts.append(f" {key}: {val_str}")
|
|
336
|
+
else:
|
|
337
|
+
summary_parts.append(" No statistics available")
|
|
338
|
+
|
|
339
|
+
return "\n".join(summary_parts)
|
|
340
|
+
|
|
341
|
+
except redshift_connector.Error as e:
|
|
342
|
+
return f"Error summarizing table: {e}"
|
|
343
|
+
|
|
344
|
+
def inspect_query(self, query: str) -> str:
|
|
345
|
+
"""
|
|
346
|
+
Shows the execution plan for a SQL query (using EXPLAIN).
|
|
347
|
+
|
|
348
|
+
Args:
|
|
349
|
+
query: The SQL query to inspect.
|
|
350
|
+
|
|
351
|
+
Returns:
|
|
352
|
+
The query's execution plan.
|
|
353
|
+
"""
|
|
354
|
+
return self._execute_query(f"EXPLAIN {query}")
|
|
355
|
+
|
|
356
|
+
def export_table_to_path(self, table: str, path: str) -> str:
|
|
357
|
+
"""
|
|
358
|
+
Exports a table's data to a local CSV file.
|
|
359
|
+
|
|
360
|
+
Args:
|
|
361
|
+
table: The name of the table to export.
|
|
362
|
+
path: The local file path to save the file.
|
|
363
|
+
|
|
364
|
+
Returns:
|
|
365
|
+
A confirmation message with the file path.
|
|
366
|
+
"""
|
|
367
|
+
log_debug(f"Exporting table {table} to {path}")
|
|
368
|
+
|
|
369
|
+
full_table_name = f'"{self.table_schema}"."{table}"'
|
|
370
|
+
stmt = f"SELECT * FROM {full_table_name};"
|
|
371
|
+
|
|
372
|
+
try:
|
|
373
|
+
connection = self._ensure_connection()
|
|
374
|
+
with connection.cursor() as cursor:
|
|
375
|
+
cursor.execute(stmt)
|
|
376
|
+
|
|
377
|
+
if cursor.description is None:
|
|
378
|
+
return f"Error: Query returned no description for table '{table}'."
|
|
379
|
+
|
|
380
|
+
columns = [desc[0] for desc in cursor.description]
|
|
381
|
+
|
|
382
|
+
with open(path, "w", newline="", encoding="utf-8") as f:
|
|
383
|
+
writer = csv.writer(f)
|
|
384
|
+
writer.writerow(columns)
|
|
385
|
+
writer.writerows(cursor)
|
|
386
|
+
|
|
387
|
+
return f"Successfully exported table '{table}' to '{path}'."
|
|
388
|
+
except (redshift_connector.Error, IOError) as e:
|
|
389
|
+
if self._connection:
|
|
390
|
+
try:
|
|
391
|
+
self._connection.rollback()
|
|
392
|
+
except Exception:
|
|
393
|
+
pass # Connection might be closed
|
|
394
|
+
return f"Error exporting table: {e}"
|
|
395
|
+
|
|
396
|
+
def run_query(self, query: str) -> str:
|
|
397
|
+
"""
|
|
398
|
+
Runs a read-only SQL query and returns the result.
|
|
399
|
+
|
|
400
|
+
Args:
|
|
401
|
+
query: The SQL query to run.
|
|
402
|
+
|
|
403
|
+
Returns:
|
|
404
|
+
The query result as a formatted string.
|
|
405
|
+
"""
|
|
406
|
+
return self._execute_query(query)
|
agno/tools/toolkit.py
CHANGED
|
@@ -6,6 +6,10 @@ from agno.utils.log import log_debug, log_warning, logger
|
|
|
6
6
|
|
|
7
7
|
|
|
8
8
|
class Toolkit:
|
|
9
|
+
# Set to True for toolkits that require connection management (e.g., database connections)
|
|
10
|
+
# When True, the Agent will automatically call connect() before using tools and close() after
|
|
11
|
+
_requires_connect: bool = False
|
|
12
|
+
|
|
9
13
|
def __init__(
|
|
10
14
|
self,
|
|
11
15
|
name: str = "toolkit",
|
|
@@ -139,6 +143,27 @@ class Toolkit:
|
|
|
139
143
|
logger.warning(f"Failed to create Function for: {function.__name__}")
|
|
140
144
|
raise e
|
|
141
145
|
|
|
146
|
+
@property
|
|
147
|
+
def requires_connect(self) -> bool:
|
|
148
|
+
"""Whether the toolkit requires connection management."""
|
|
149
|
+
return self._requires_connect
|
|
150
|
+
|
|
151
|
+
def connect(self) -> None:
|
|
152
|
+
"""
|
|
153
|
+
Establish any required connections for the toolkit.
|
|
154
|
+
Override this method in subclasses that require connection management.
|
|
155
|
+
Called automatically by the Agent when _requires_connect is True.
|
|
156
|
+
"""
|
|
157
|
+
pass
|
|
158
|
+
|
|
159
|
+
def close(self) -> None:
|
|
160
|
+
"""
|
|
161
|
+
Close any open connections for the toolkit.
|
|
162
|
+
Override this method in subclasses that require connection management.
|
|
163
|
+
Called automatically by the Agent when _requires_connect is True.
|
|
164
|
+
"""
|
|
165
|
+
pass
|
|
166
|
+
|
|
142
167
|
def __repr__(self):
|
|
143
168
|
return f"<{self.__class__.__name__} name={self.name} functions={list(self.functions.keys())}>"
|
|
144
169
|
|
agno/tools/workflow.py
CHANGED
|
@@ -130,9 +130,13 @@ class WorkflowTools(Toolkit):
|
|
|
130
130
|
) -> str:
|
|
131
131
|
"""Use this tool to execute the workflow with the specified inputs and parameters.
|
|
132
132
|
After thinking through the requirements, use this tool to run the workflow with appropriate inputs.
|
|
133
|
+
|
|
133
134
|
Args:
|
|
134
|
-
|
|
135
|
+
input: The input data for the workflow.
|
|
135
136
|
"""
|
|
137
|
+
if isinstance(input, dict):
|
|
138
|
+
input = RunWorkflowInput.model_validate(input)
|
|
139
|
+
|
|
136
140
|
try:
|
|
137
141
|
log_debug(f"Running workflow with input: {input.input_data}")
|
|
138
142
|
|
|
@@ -170,6 +174,9 @@ class WorkflowTools(Toolkit):
|
|
|
170
174
|
input_data: The input data for the workflow (use a `str` for a simple input)
|
|
171
175
|
additional_data: The additional data for the workflow. This is a dictionary of key-value pairs that will be passed to the workflow. E.g. {"topic": "food", "style": "Humour"}
|
|
172
176
|
"""
|
|
177
|
+
if isinstance(input, dict):
|
|
178
|
+
input = RunWorkflowInput.model_validate(input)
|
|
179
|
+
|
|
173
180
|
try:
|
|
174
181
|
log_debug(f"Running workflow with input: {input.input_data}")
|
|
175
182
|
|
agno/utils/events.py
CHANGED
|
@@ -35,6 +35,7 @@ from agno.run.agent import (
|
|
|
35
35
|
ToolCallCompletedEvent,
|
|
36
36
|
ToolCallStartedEvent,
|
|
37
37
|
)
|
|
38
|
+
from agno.run.requirement import RunRequirement
|
|
38
39
|
from agno.run.team import MemoryUpdateCompletedEvent as TeamMemoryUpdateCompletedEvent
|
|
39
40
|
from agno.run.team import MemoryUpdateStartedEvent as TeamMemoryUpdateStartedEvent
|
|
40
41
|
from agno.run.team import OutputModelResponseCompletedEvent as TeamOutputModelResponseCompletedEvent
|
|
@@ -136,7 +137,9 @@ def create_run_completed_event(from_run_response: RunOutput) -> RunCompletedEven
|
|
|
136
137
|
|
|
137
138
|
|
|
138
139
|
def create_run_paused_event(
|
|
139
|
-
from_run_response: RunOutput,
|
|
140
|
+
from_run_response: RunOutput,
|
|
141
|
+
tools: Optional[List[ToolExecution]] = None,
|
|
142
|
+
requirements: Optional[List[RunRequirement]] = None,
|
|
140
143
|
) -> RunPausedEvent:
|
|
141
144
|
return RunPausedEvent(
|
|
142
145
|
session_id=from_run_response.session_id,
|
|
@@ -144,6 +147,7 @@ def create_run_paused_event(
|
|
|
144
147
|
agent_name=from_run_response.agent_name, # type: ignore
|
|
145
148
|
run_id=from_run_response.run_id,
|
|
146
149
|
tools=tools,
|
|
150
|
+
requirements=requirements,
|
|
147
151
|
content=from_run_response.content,
|
|
148
152
|
)
|
|
149
153
|
|