agno 2.3.8__py3-none-any.whl → 2.3.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +134 -82
- agno/db/mysql/__init__.py +2 -1
- agno/db/mysql/async_mysql.py +2888 -0
- agno/db/mysql/mysql.py +17 -8
- agno/db/mysql/utils.py +139 -6
- agno/db/postgres/async_postgres.py +10 -5
- agno/db/postgres/postgres.py +7 -2
- agno/db/schemas/evals.py +1 -0
- agno/db/singlestore/singlestore.py +5 -1
- agno/db/sqlite/async_sqlite.py +2 -2
- agno/eval/__init__.py +10 -0
- agno/eval/agent_as_judge.py +860 -0
- agno/eval/base.py +29 -0
- agno/eval/utils.py +2 -1
- agno/exceptions.py +7 -0
- agno/knowledge/embedder/openai.py +8 -8
- agno/knowledge/knowledge.py +1142 -176
- agno/media.py +22 -6
- agno/models/aws/claude.py +8 -7
- agno/models/base.py +27 -1
- agno/models/deepseek/deepseek.py +67 -0
- agno/models/google/gemini.py +65 -11
- agno/models/google/utils.py +22 -0
- agno/models/message.py +2 -0
- agno/models/openai/chat.py +4 -0
- agno/os/app.py +64 -74
- agno/os/interfaces/a2a/router.py +3 -4
- agno/os/interfaces/agui/router.py +2 -0
- agno/os/router.py +3 -1607
- agno/os/routers/agents/__init__.py +3 -0
- agno/os/routers/agents/router.py +581 -0
- agno/os/routers/agents/schema.py +261 -0
- agno/os/routers/evals/evals.py +26 -6
- agno/os/routers/evals/schemas.py +34 -2
- agno/os/routers/evals/utils.py +101 -20
- agno/os/routers/knowledge/knowledge.py +1 -1
- agno/os/routers/teams/__init__.py +3 -0
- agno/os/routers/teams/router.py +496 -0
- agno/os/routers/teams/schema.py +257 -0
- agno/os/routers/workflows/__init__.py +3 -0
- agno/os/routers/workflows/router.py +545 -0
- agno/os/routers/workflows/schema.py +75 -0
- agno/os/schema.py +1 -559
- agno/os/utils.py +139 -2
- agno/team/team.py +73 -16
- agno/tools/file_generation.py +12 -6
- agno/tools/firecrawl.py +15 -7
- agno/utils/hooks.py +64 -5
- agno/utils/http.py +2 -2
- agno/utils/media.py +11 -1
- agno/utils/print_response/agent.py +8 -0
- agno/utils/print_response/team.py +8 -0
- agno/vectordb/pgvector/pgvector.py +88 -51
- agno/workflow/parallel.py +3 -3
- agno/workflow/step.py +14 -2
- agno/workflow/types.py +38 -2
- agno/workflow/workflow.py +12 -4
- {agno-2.3.8.dist-info → agno-2.3.9.dist-info}/METADATA +7 -2
- {agno-2.3.8.dist-info → agno-2.3.9.dist-info}/RECORD +62 -49
- {agno-2.3.8.dist-info → agno-2.3.9.dist-info}/WHEEL +0 -0
- {agno-2.3.8.dist-info → agno-2.3.9.dist-info}/licenses/LICENSE +0 -0
- {agno-2.3.8.dist-info → agno-2.3.9.dist-info}/top_level.txt +0 -0
agno/db/mysql/mysql.py
CHANGED
|
@@ -3,8 +3,6 @@ from datetime import date, datetime, timedelta, timezone
|
|
|
3
3
|
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple, Union
|
|
4
4
|
from uuid import uuid4
|
|
5
5
|
|
|
6
|
-
from sqlalchemy import ForeignKey, Index, UniqueConstraint
|
|
7
|
-
|
|
8
6
|
if TYPE_CHECKING:
|
|
9
7
|
from agno.tracing.schemas import Span, Trace
|
|
10
8
|
|
|
@@ -32,7 +30,7 @@ from agno.utils.log import log_debug, log_error, log_info, log_warning
|
|
|
32
30
|
from agno.utils.string import generate_id
|
|
33
31
|
|
|
34
32
|
try:
|
|
35
|
-
from sqlalchemy import TEXT, and_, cast, func, update
|
|
33
|
+
from sqlalchemy import TEXT, ForeignKey, Index, UniqueConstraint, and_, cast, func, update
|
|
36
34
|
from sqlalchemy.dialects import mysql
|
|
37
35
|
from sqlalchemy.engine import Engine, create_engine
|
|
38
36
|
from sqlalchemy.orm import scoped_session, sessionmaker
|
|
@@ -45,6 +43,7 @@ except ImportError:
|
|
|
45
43
|
class MySQLDb(BaseDb):
|
|
46
44
|
def __init__(
|
|
47
45
|
self,
|
|
46
|
+
id: Optional[str] = None,
|
|
48
47
|
db_engine: Optional[Engine] = None,
|
|
49
48
|
db_schema: Optional[str] = None,
|
|
50
49
|
db_url: Optional[str] = None,
|
|
@@ -57,7 +56,7 @@ class MySQLDb(BaseDb):
|
|
|
57
56
|
traces_table: Optional[str] = None,
|
|
58
57
|
spans_table: Optional[str] = None,
|
|
59
58
|
versions_table: Optional[str] = None,
|
|
60
|
-
|
|
59
|
+
create_schema: bool = True,
|
|
61
60
|
):
|
|
62
61
|
"""
|
|
63
62
|
Interface for interacting with a MySQL database.
|
|
@@ -68,6 +67,7 @@ class MySQLDb(BaseDb):
|
|
|
68
67
|
3. Raise an error if neither is provided
|
|
69
68
|
|
|
70
69
|
Args:
|
|
70
|
+
id (Optional[str]): ID of the database.
|
|
71
71
|
db_url (Optional[str]): The database URL to connect to.
|
|
72
72
|
db_engine (Optional[Engine]): The SQLAlchemy database engine to use.
|
|
73
73
|
db_schema (Optional[str]): The database schema to use.
|
|
@@ -80,7 +80,8 @@ class MySQLDb(BaseDb):
|
|
|
80
80
|
traces_table (Optional[str]): Name of the table to store run traces.
|
|
81
81
|
spans_table (Optional[str]): Name of the table to store span events.
|
|
82
82
|
versions_table (Optional[str]): Name of the table to store schema versions.
|
|
83
|
-
|
|
83
|
+
create_schema (bool): Whether to automatically create the database schema if it doesn't exist.
|
|
84
|
+
Set to False if schema is managed externally (e.g., via migrations). Defaults to True.
|
|
84
85
|
|
|
85
86
|
Raises:
|
|
86
87
|
ValueError: If neither db_url nor db_engine is provided.
|
|
@@ -115,6 +116,7 @@ class MySQLDb(BaseDb):
|
|
|
115
116
|
self.db_engine: Engine = _engine
|
|
116
117
|
self.db_schema: str = db_schema if db_schema is not None else "ai"
|
|
117
118
|
self.metadata: MetaData = MetaData(schema=self.db_schema)
|
|
119
|
+
self.create_schema: bool = create_schema
|
|
118
120
|
|
|
119
121
|
# Initialize database session
|
|
120
122
|
self.Session: scoped_session = scoped_session(sessionmaker(bind=self.db_engine))
|
|
@@ -190,8 +192,9 @@ class MySQLDb(BaseDb):
|
|
|
190
192
|
idx_name = f"idx_{table_name}_{idx_col}"
|
|
191
193
|
table.append_constraint(Index(idx_name, idx_col))
|
|
192
194
|
|
|
193
|
-
|
|
194
|
-
|
|
195
|
+
if self.create_schema:
|
|
196
|
+
with self.Session() as sess, sess.begin():
|
|
197
|
+
create_schema(session=sess, db_schema=self.db_schema)
|
|
195
198
|
|
|
196
199
|
# Create table
|
|
197
200
|
table_created = False
|
|
@@ -252,6 +255,9 @@ class MySQLDb(BaseDb):
|
|
|
252
255
|
(self.metrics_table_name, "metrics"),
|
|
253
256
|
(self.eval_table_name, "evals"),
|
|
254
257
|
(self.knowledge_table_name, "knowledge"),
|
|
258
|
+
(self.culture_table_name, "culture"),
|
|
259
|
+
(self.trace_table_name, "traces"),
|
|
260
|
+
(self.span_table_name, "spans"),
|
|
255
261
|
(self.versions_table_name, "versions"),
|
|
256
262
|
]
|
|
257
263
|
|
|
@@ -1109,9 +1115,12 @@ class MySQLDb(BaseDb):
|
|
|
1109
1115
|
except Exception as e:
|
|
1110
1116
|
log_error(f"Error deleting user memories: {e}")
|
|
1111
1117
|
|
|
1112
|
-
def get_all_memory_topics(self) -> List[str]:
|
|
1118
|
+
def get_all_memory_topics(self, user_id: Optional[str] = None) -> List[str]:
|
|
1113
1119
|
"""Get all memory topics from the database.
|
|
1114
1120
|
|
|
1121
|
+
Args:
|
|
1122
|
+
user_id (Optional[str]): Optional user ID to filter topics.
|
|
1123
|
+
|
|
1115
1124
|
Returns:
|
|
1116
1125
|
List[str]: List of memory topics.
|
|
1117
1126
|
"""
|
agno/db/mysql/utils.py
CHANGED
|
@@ -5,15 +5,14 @@ from datetime import date, datetime, timedelta, timezone
|
|
|
5
5
|
from typing import Any, Dict, List, Optional
|
|
6
6
|
from uuid import uuid4
|
|
7
7
|
|
|
8
|
-
from sqlalchemy import Engine
|
|
9
|
-
|
|
10
8
|
from agno.db.mysql.schemas import get_table_schema_definition
|
|
11
9
|
from agno.db.schemas.culture import CulturalKnowledge
|
|
12
10
|
from agno.utils.log import log_debug, log_error, log_warning
|
|
13
11
|
|
|
14
12
|
try:
|
|
15
|
-
from sqlalchemy import Table
|
|
13
|
+
from sqlalchemy import Engine, Table
|
|
16
14
|
from sqlalchemy.dialects import mysql
|
|
15
|
+
from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession
|
|
17
16
|
from sqlalchemy.inspection import inspect
|
|
18
17
|
from sqlalchemy.orm import Session
|
|
19
18
|
from sqlalchemy.sql.expression import text
|
|
@@ -91,8 +90,10 @@ def is_valid_table(db_engine: Engine, table_name: str, table_type: str, db_schem
|
|
|
91
90
|
Check if the existing table has the expected column names.
|
|
92
91
|
|
|
93
92
|
Args:
|
|
93
|
+
db_engine: Database engine
|
|
94
94
|
table_name (str): Name of the table to validate
|
|
95
|
-
|
|
95
|
+
table_type (str): Type of table (for schema lookup)
|
|
96
|
+
db_schema (str): Database schema name
|
|
96
97
|
|
|
97
98
|
Returns:
|
|
98
99
|
bool: True if table has all expected columns, False otherwise
|
|
@@ -123,6 +124,7 @@ def bulk_upsert_metrics(session: Session, table: Table, metrics_records: list[di
|
|
|
123
124
|
"""Bulk upsert metrics into the database.
|
|
124
125
|
|
|
125
126
|
Args:
|
|
127
|
+
session (Session): The SQLAlchemy session
|
|
126
128
|
table (Table): The table to upsert into.
|
|
127
129
|
metrics_records (list[dict]): The metrics records to upsert.
|
|
128
130
|
|
|
@@ -156,7 +158,10 @@ def bulk_upsert_metrics(session: Session, table: Table, metrics_records: list[di
|
|
|
156
158
|
|
|
157
159
|
for record in metrics_records:
|
|
158
160
|
select_stmt = select(table).where(
|
|
159
|
-
and_(
|
|
161
|
+
and_(
|
|
162
|
+
table.c.date == record["date"],
|
|
163
|
+
table.c.aggregation_period == record["aggregation_period"],
|
|
164
|
+
)
|
|
160
165
|
)
|
|
161
166
|
result = session.execute(select_stmt).fetchone()
|
|
162
167
|
if result:
|
|
@@ -165,6 +170,55 @@ def bulk_upsert_metrics(session: Session, table: Table, metrics_records: list[di
|
|
|
165
170
|
return results # type: ignore
|
|
166
171
|
|
|
167
172
|
|
|
173
|
+
async def abulk_upsert_metrics(session: AsyncSession, table: Table, metrics_records: list[dict]) -> list[dict]:
|
|
174
|
+
"""Async bulk upsert metrics into the database.
|
|
175
|
+
|
|
176
|
+
Args:
|
|
177
|
+
session (AsyncSession): The async SQLAlchemy session
|
|
178
|
+
table (Table): The table to upsert into.
|
|
179
|
+
metrics_records (list[dict]): The metrics records to upsert.
|
|
180
|
+
|
|
181
|
+
Returns:
|
|
182
|
+
list[dict]: The upserted metrics records.
|
|
183
|
+
"""
|
|
184
|
+
if not metrics_records:
|
|
185
|
+
return []
|
|
186
|
+
|
|
187
|
+
results = []
|
|
188
|
+
|
|
189
|
+
# MySQL doesn't support returning in the same way as PostgreSQL
|
|
190
|
+
# We'll need to insert/update and then fetch the records
|
|
191
|
+
for record in metrics_records:
|
|
192
|
+
stmt = mysql.insert(table).values(record)
|
|
193
|
+
|
|
194
|
+
# Columns to update in case of conflict
|
|
195
|
+
update_dict = {
|
|
196
|
+
col.name: record.get(col.name)
|
|
197
|
+
for col in table.columns
|
|
198
|
+
if col.name not in ["id", "date", "created_at", "aggregation_period"] and col.name in record
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
stmt = stmt.on_duplicate_key_update(**update_dict)
|
|
202
|
+
await session.execute(stmt)
|
|
203
|
+
|
|
204
|
+
# Fetch the updated records
|
|
205
|
+
from sqlalchemy import and_, select
|
|
206
|
+
|
|
207
|
+
for record in metrics_records:
|
|
208
|
+
select_stmt = select(table).where(
|
|
209
|
+
and_(
|
|
210
|
+
table.c.date == record["date"],
|
|
211
|
+
table.c.aggregation_period == record["aggregation_period"],
|
|
212
|
+
)
|
|
213
|
+
)
|
|
214
|
+
result = await session.execute(select_stmt)
|
|
215
|
+
fetched_row = result.fetchone()
|
|
216
|
+
if fetched_row:
|
|
217
|
+
results.append(dict(fetched_row._mapping))
|
|
218
|
+
|
|
219
|
+
return results
|
|
220
|
+
|
|
221
|
+
|
|
168
222
|
def calculate_date_metrics(date_to_process: date, sessions_data: dict) -> dict:
|
|
169
223
|
"""Calculate metrics for the given single date.
|
|
170
224
|
|
|
@@ -299,7 +353,9 @@ def get_dates_to_calculate_metrics_for(starting_date: date) -> list[date]:
|
|
|
299
353
|
|
|
300
354
|
|
|
301
355
|
# -- Cultural Knowledge util methods --
|
|
302
|
-
def serialize_cultural_knowledge_for_db(
|
|
356
|
+
def serialize_cultural_knowledge_for_db(
|
|
357
|
+
cultural_knowledge: CulturalKnowledge,
|
|
358
|
+
) -> Dict[str, Any]:
|
|
303
359
|
"""Serialize a CulturalKnowledge object for database storage.
|
|
304
360
|
|
|
305
361
|
Converts the model's separate content, categories, and notes fields
|
|
@@ -353,3 +409,80 @@ def deserialize_cultural_knowledge_from_db(db_row: Dict[str, Any]) -> CulturalKn
|
|
|
353
409
|
"team_id": db_row.get("team_id"),
|
|
354
410
|
}
|
|
355
411
|
)
|
|
412
|
+
|
|
413
|
+
|
|
414
|
+
# -- Async DB util methods --
|
|
415
|
+
async def acreate_schema(session: AsyncSession, db_schema: str) -> None:
|
|
416
|
+
"""Async version: Create the database schema if it doesn't exist.
|
|
417
|
+
|
|
418
|
+
Args:
|
|
419
|
+
session: The async SQLAlchemy session to use
|
|
420
|
+
db_schema (str): The definition of the database schema to create
|
|
421
|
+
"""
|
|
422
|
+
try:
|
|
423
|
+
log_debug(f"Creating database if not exists: {db_schema}")
|
|
424
|
+
# MySQL uses CREATE DATABASE instead of CREATE SCHEMA
|
|
425
|
+
await session.execute(text(f"CREATE DATABASE IF NOT EXISTS `{db_schema}`;"))
|
|
426
|
+
except Exception as e:
|
|
427
|
+
log_warning(f"Could not create database {db_schema}: {e}")
|
|
428
|
+
|
|
429
|
+
|
|
430
|
+
async def ais_table_available(session: AsyncSession, table_name: str, db_schema: str) -> bool:
|
|
431
|
+
"""Async version: Check if a table with the given name exists in the given schema.
|
|
432
|
+
|
|
433
|
+
Returns:
|
|
434
|
+
bool: True if the table exists, False otherwise.
|
|
435
|
+
"""
|
|
436
|
+
try:
|
|
437
|
+
exists_query = text(
|
|
438
|
+
"SELECT 1 FROM information_schema.tables WHERE table_schema = :schema AND table_name = :table"
|
|
439
|
+
)
|
|
440
|
+
result = await session.execute(exists_query, {"schema": db_schema, "table": table_name})
|
|
441
|
+
exists = result.scalar() is not None
|
|
442
|
+
if not exists:
|
|
443
|
+
log_debug(f"Table {db_schema}.{table_name} {'exists' if exists else 'does not exist'}")
|
|
444
|
+
|
|
445
|
+
return exists
|
|
446
|
+
|
|
447
|
+
except Exception as e:
|
|
448
|
+
log_error(f"Error checking if table exists: {e}")
|
|
449
|
+
return False
|
|
450
|
+
|
|
451
|
+
|
|
452
|
+
async def ais_valid_table(db_engine: AsyncEngine, table_name: str, table_type: str, db_schema: str) -> bool:
|
|
453
|
+
"""Async version: Check if the existing table has the expected column names.
|
|
454
|
+
|
|
455
|
+
Args:
|
|
456
|
+
db_engine: Async database engine
|
|
457
|
+
table_name (str): Name of the table to validate
|
|
458
|
+
table_type (str): Type of table (for schema lookup)
|
|
459
|
+
db_schema (str): Database schema name
|
|
460
|
+
|
|
461
|
+
Returns:
|
|
462
|
+
bool: True if table has all expected columns, False otherwise
|
|
463
|
+
"""
|
|
464
|
+
try:
|
|
465
|
+
expected_table_schema = get_table_schema_definition(table_type)
|
|
466
|
+
expected_columns = {col_name for col_name in expected_table_schema.keys() if not col_name.startswith("_")}
|
|
467
|
+
|
|
468
|
+
# Get existing columns from the async engine
|
|
469
|
+
async with db_engine.connect() as conn:
|
|
470
|
+
existing_columns = await conn.run_sync(_get_table_columns, table_name, db_schema)
|
|
471
|
+
|
|
472
|
+
# Check if all expected columns exist
|
|
473
|
+
missing_columns = expected_columns - existing_columns
|
|
474
|
+
if missing_columns:
|
|
475
|
+
log_warning(f"Missing columns {missing_columns} in table {db_schema}.{table_name}")
|
|
476
|
+
return False
|
|
477
|
+
|
|
478
|
+
return True
|
|
479
|
+
except Exception as e:
|
|
480
|
+
log_error(f"Error validating table schema for {db_schema}.{table_name}: {e}")
|
|
481
|
+
return False
|
|
482
|
+
|
|
483
|
+
|
|
484
|
+
def _get_table_columns(connection, table_name: str, db_schema: str) -> set[str]:
|
|
485
|
+
"""Helper function to get table columns using sync inspector."""
|
|
486
|
+
inspector = inspect(connection)
|
|
487
|
+
columns_info = inspector.get_columns(table_name, schema=db_schema)
|
|
488
|
+
return {col["name"] for col in columns_info}
|
|
@@ -56,6 +56,7 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
56
56
|
traces_table: Optional[str] = None,
|
|
57
57
|
spans_table: Optional[str] = None,
|
|
58
58
|
versions_table: Optional[str] = None,
|
|
59
|
+
create_schema: bool = True,
|
|
59
60
|
db_id: Optional[str] = None, # Deprecated, use id instead.
|
|
60
61
|
):
|
|
61
62
|
"""
|
|
@@ -80,6 +81,8 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
80
81
|
traces_table (Optional[str]): Name of the table to store run traces.
|
|
81
82
|
spans_table (Optional[str]): Name of the table to store span events.
|
|
82
83
|
versions_table (Optional[str]): Name of the table to store schema versions.
|
|
84
|
+
create_schema (bool): Whether to automatically create the database schema if it doesn't exist.
|
|
85
|
+
Set to False if schema is managed externally (e.g., via migrations). Defaults to True.
|
|
83
86
|
db_id: Deprecated, use id instead.
|
|
84
87
|
|
|
85
88
|
Raises:
|
|
@@ -116,6 +119,7 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
116
119
|
self.db_engine: AsyncEngine = _engine
|
|
117
120
|
self.db_schema: str = db_schema if db_schema is not None else "ai"
|
|
118
121
|
self.metadata: MetaData = MetaData(schema=self.db_schema)
|
|
122
|
+
self.create_schema: bool = create_schema
|
|
119
123
|
|
|
120
124
|
# Initialize database session factory
|
|
121
125
|
self.async_session_factory = async_sessionmaker(
|
|
@@ -200,8 +204,9 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
200
204
|
idx_name = f"idx_{table_name}_{idx_col}"
|
|
201
205
|
table.append_constraint(Index(idx_name, idx_col))
|
|
202
206
|
|
|
203
|
-
|
|
204
|
-
|
|
207
|
+
if self.create_schema:
|
|
208
|
+
async with self.async_session_factory() as sess, sess.begin():
|
|
209
|
+
await acreate_schema(session=sess, db_schema=self.db_schema)
|
|
205
210
|
|
|
206
211
|
# Create table
|
|
207
212
|
table_created = False
|
|
@@ -1237,7 +1242,7 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
1237
1242
|
Exception: If an error occurs during upsert.
|
|
1238
1243
|
"""
|
|
1239
1244
|
try:
|
|
1240
|
-
table = await self._get_table(table_type="culture")
|
|
1245
|
+
table = await self._get_table(table_type="culture", create_table_if_not_found=True)
|
|
1241
1246
|
|
|
1242
1247
|
# Generate ID if not present
|
|
1243
1248
|
if cultural_knowledge.id is None:
|
|
@@ -1381,7 +1386,7 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
1381
1386
|
Exception: If an error occurs during upsert.
|
|
1382
1387
|
"""
|
|
1383
1388
|
try:
|
|
1384
|
-
table = await self._get_table(table_type="memories")
|
|
1389
|
+
table = await self._get_table(table_type="memories", create_table_if_not_found=True)
|
|
1385
1390
|
|
|
1386
1391
|
current_time = int(time.time())
|
|
1387
1392
|
|
|
@@ -1725,7 +1730,7 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
1725
1730
|
Optional[KnowledgeRow]: The upserted knowledge row, or None if the operation fails.
|
|
1726
1731
|
"""
|
|
1727
1732
|
try:
|
|
1728
|
-
table = await self._get_table(table_type="knowledge")
|
|
1733
|
+
table = await self._get_table(table_type="knowledge", create_table_if_not_found=True)
|
|
1729
1734
|
async with self.async_session_factory() as sess, sess.begin():
|
|
1730
1735
|
# Get the actual table columns to avoid "unconsumed column names" error
|
|
1731
1736
|
table_columns = set(table.columns.keys())
|
agno/db/postgres/postgres.py
CHANGED
|
@@ -57,6 +57,7 @@ class PostgresDb(BaseDb):
|
|
|
57
57
|
spans_table: Optional[str] = None,
|
|
58
58
|
versions_table: Optional[str] = None,
|
|
59
59
|
id: Optional[str] = None,
|
|
60
|
+
create_schema: bool = True,
|
|
60
61
|
):
|
|
61
62
|
"""
|
|
62
63
|
Interface for interacting with a PostgreSQL database.
|
|
@@ -80,6 +81,8 @@ class PostgresDb(BaseDb):
|
|
|
80
81
|
spans_table (Optional[str]): Name of the table to store span events.
|
|
81
82
|
versions_table (Optional[str]): Name of the table to store schema versions.
|
|
82
83
|
id (Optional[str]): ID of the database.
|
|
84
|
+
create_schema (bool): Whether to automatically create the database schema if it doesn't exist.
|
|
85
|
+
Set to False if schema is managed externally (e.g., via migrations). Defaults to True.
|
|
83
86
|
|
|
84
87
|
Raises:
|
|
85
88
|
ValueError: If neither db_url nor db_engine is provided.
|
|
@@ -115,6 +118,7 @@ class PostgresDb(BaseDb):
|
|
|
115
118
|
|
|
116
119
|
self.db_schema: str = db_schema if db_schema is not None else "ai"
|
|
117
120
|
self.metadata: MetaData = MetaData(schema=self.db_schema)
|
|
121
|
+
self.create_schema: bool = create_schema
|
|
118
122
|
|
|
119
123
|
# Initialize database session
|
|
120
124
|
self.Session: scoped_session = scoped_session(sessionmaker(bind=self.db_engine, expire_on_commit=False))
|
|
@@ -204,8 +208,9 @@ class PostgresDb(BaseDb):
|
|
|
204
208
|
idx_name = f"idx_{table_name}_{idx_col}"
|
|
205
209
|
table.append_constraint(Index(idx_name, idx_col))
|
|
206
210
|
|
|
207
|
-
|
|
208
|
-
|
|
211
|
+
if self.create_schema:
|
|
212
|
+
with self.Session() as sess, sess.begin():
|
|
213
|
+
create_schema(session=sess, db_schema=self.db_schema)
|
|
209
214
|
|
|
210
215
|
# Create table
|
|
211
216
|
table_created = False
|
agno/db/schemas/evals.py
CHANGED
|
@@ -57,6 +57,7 @@ class SingleStoreDb(BaseDb):
|
|
|
57
57
|
versions_table: Optional[str] = None,
|
|
58
58
|
traces_table: Optional[str] = None,
|
|
59
59
|
spans_table: Optional[str] = None,
|
|
60
|
+
create_schema: bool = True,
|
|
60
61
|
):
|
|
61
62
|
"""
|
|
62
63
|
Interface for interacting with a SingleStore database.
|
|
@@ -78,6 +79,8 @@ class SingleStoreDb(BaseDb):
|
|
|
78
79
|
eval_table (Optional[str]): Name of the table to store evaluation runs data.
|
|
79
80
|
knowledge_table (Optional[str]): Name of the table to store knowledge content.
|
|
80
81
|
versions_table (Optional[str]): Name of the table to store schema versions.
|
|
82
|
+
create_schema (bool): Whether to automatically create the database schema if it doesn't exist.
|
|
83
|
+
Set to False if schema is managed externally (e.g., via migrations). Defaults to True.
|
|
81
84
|
Raises:
|
|
82
85
|
ValueError: If neither db_url nor db_engine is provided.
|
|
83
86
|
ValueError: If none of the tables are provided.
|
|
@@ -117,6 +120,7 @@ class SingleStoreDb(BaseDb):
|
|
|
117
120
|
self.db_engine: Engine = _engine
|
|
118
121
|
self.db_schema: Optional[str] = db_schema
|
|
119
122
|
self.metadata: MetaData = MetaData(schema=self.db_schema)
|
|
123
|
+
self.create_schema: bool = create_schema
|
|
120
124
|
|
|
121
125
|
# Initialize database session
|
|
122
126
|
self.Session: scoped_session = scoped_session(sessionmaker(bind=self.db_engine))
|
|
@@ -240,7 +244,7 @@ class SingleStoreDb(BaseDb):
|
|
|
240
244
|
table.append_constraint(Index(idx_name, idx_col))
|
|
241
245
|
|
|
242
246
|
# Create schema if one is specified
|
|
243
|
-
if self.db_schema is not None:
|
|
247
|
+
if self.create_schema and self.db_schema is not None:
|
|
244
248
|
with self.Session() as sess, sess.begin():
|
|
245
249
|
create_schema(session=sess, db_schema=self.db_schema)
|
|
246
250
|
|
agno/db/sqlite/async_sqlite.py
CHANGED
|
@@ -1540,7 +1540,7 @@ class AsyncSqliteDb(AsyncBaseDb):
|
|
|
1540
1540
|
Exception: If an error occurs during retrieval.
|
|
1541
1541
|
"""
|
|
1542
1542
|
try:
|
|
1543
|
-
table = await self._get_table(table_type="sessions")
|
|
1543
|
+
table = await self._get_table(table_type="sessions", create_table_if_not_found=True)
|
|
1544
1544
|
if table is None:
|
|
1545
1545
|
return []
|
|
1546
1546
|
|
|
@@ -1878,7 +1878,7 @@ class AsyncSqliteDb(AsyncBaseDb):
|
|
|
1878
1878
|
Exception: If an error occurs during creation.
|
|
1879
1879
|
"""
|
|
1880
1880
|
try:
|
|
1881
|
-
table = await self._get_table(table_type="evals")
|
|
1881
|
+
table = await self._get_table(table_type="evals", create_table_if_not_found=True)
|
|
1882
1882
|
if table is None:
|
|
1883
1883
|
return None
|
|
1884
1884
|
|
agno/eval/__init__.py
CHANGED
|
@@ -1,4 +1,10 @@
|
|
|
1
1
|
from agno.eval.accuracy import AccuracyAgentResponse, AccuracyEval, AccuracyEvaluation, AccuracyResult
|
|
2
|
+
from agno.eval.agent_as_judge import (
|
|
3
|
+
AgentAsJudgeEval,
|
|
4
|
+
AgentAsJudgeEvaluation,
|
|
5
|
+
AgentAsJudgeResult,
|
|
6
|
+
)
|
|
7
|
+
from agno.eval.base import BaseEval
|
|
2
8
|
from agno.eval.performance import PerformanceEval, PerformanceResult
|
|
3
9
|
from agno.eval.reliability import ReliabilityEval, ReliabilityResult
|
|
4
10
|
|
|
@@ -7,6 +13,10 @@ __all__ = [
|
|
|
7
13
|
"AccuracyEvaluation",
|
|
8
14
|
"AccuracyResult",
|
|
9
15
|
"AccuracyEval",
|
|
16
|
+
"AgentAsJudgeEval",
|
|
17
|
+
"AgentAsJudgeEvaluation",
|
|
18
|
+
"AgentAsJudgeResult",
|
|
19
|
+
"BaseEval",
|
|
10
20
|
"PerformanceEval",
|
|
11
21
|
"PerformanceResult",
|
|
12
22
|
"ReliabilityEval",
|