agno 2.3.8__py3-none-any.whl → 2.3.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. agno/agent/agent.py +134 -94
  2. agno/db/mysql/__init__.py +2 -1
  3. agno/db/mysql/async_mysql.py +2888 -0
  4. agno/db/mysql/mysql.py +17 -8
  5. agno/db/mysql/utils.py +139 -6
  6. agno/db/postgres/async_postgres.py +10 -5
  7. agno/db/postgres/postgres.py +7 -2
  8. agno/db/schemas/evals.py +1 -0
  9. agno/db/singlestore/singlestore.py +5 -1
  10. agno/db/sqlite/async_sqlite.py +3 -3
  11. agno/eval/__init__.py +10 -0
  12. agno/eval/accuracy.py +11 -8
  13. agno/eval/agent_as_judge.py +861 -0
  14. agno/eval/base.py +29 -0
  15. agno/eval/utils.py +2 -1
  16. agno/exceptions.py +7 -0
  17. agno/knowledge/embedder/openai.py +8 -8
  18. agno/knowledge/knowledge.py +1142 -176
  19. agno/media.py +22 -6
  20. agno/models/aws/claude.py +8 -7
  21. agno/models/base.py +61 -2
  22. agno/models/deepseek/deepseek.py +67 -0
  23. agno/models/google/gemini.py +134 -51
  24. agno/models/google/utils.py +22 -0
  25. agno/models/message.py +5 -0
  26. agno/models/openai/chat.py +4 -0
  27. agno/os/app.py +64 -74
  28. agno/os/interfaces/a2a/router.py +3 -4
  29. agno/os/interfaces/agui/router.py +2 -0
  30. agno/os/router.py +3 -1607
  31. agno/os/routers/agents/__init__.py +3 -0
  32. agno/os/routers/agents/router.py +581 -0
  33. agno/os/routers/agents/schema.py +261 -0
  34. agno/os/routers/evals/evals.py +26 -6
  35. agno/os/routers/evals/schemas.py +34 -2
  36. agno/os/routers/evals/utils.py +77 -18
  37. agno/os/routers/knowledge/knowledge.py +1 -1
  38. agno/os/routers/teams/__init__.py +3 -0
  39. agno/os/routers/teams/router.py +496 -0
  40. agno/os/routers/teams/schema.py +257 -0
  41. agno/os/routers/workflows/__init__.py +3 -0
  42. agno/os/routers/workflows/router.py +545 -0
  43. agno/os/routers/workflows/schema.py +75 -0
  44. agno/os/schema.py +1 -559
  45. agno/os/utils.py +139 -2
  46. agno/team/team.py +87 -24
  47. agno/tools/file_generation.py +12 -6
  48. agno/tools/firecrawl.py +15 -7
  49. agno/tools/function.py +37 -23
  50. agno/tools/shopify.py +1519 -0
  51. agno/tools/spotify.py +2 -5
  52. agno/utils/hooks.py +64 -5
  53. agno/utils/http.py +2 -2
  54. agno/utils/media.py +11 -1
  55. agno/utils/print_response/agent.py +8 -0
  56. agno/utils/print_response/team.py +8 -0
  57. agno/vectordb/pgvector/pgvector.py +88 -51
  58. agno/workflow/parallel.py +5 -3
  59. agno/workflow/step.py +14 -2
  60. agno/workflow/types.py +38 -2
  61. agno/workflow/workflow.py +12 -4
  62. {agno-2.3.8.dist-info → agno-2.3.10.dist-info}/METADATA +7 -2
  63. {agno-2.3.8.dist-info → agno-2.3.10.dist-info}/RECORD +66 -52
  64. {agno-2.3.8.dist-info → agno-2.3.10.dist-info}/WHEEL +0 -0
  65. {agno-2.3.8.dist-info → agno-2.3.10.dist-info}/licenses/LICENSE +0 -0
  66. {agno-2.3.8.dist-info → agno-2.3.10.dist-info}/top_level.txt +0 -0
agno/db/mysql/mysql.py CHANGED
@@ -3,8 +3,6 @@ from datetime import date, datetime, timedelta, timezone
3
3
  from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple, Union
4
4
  from uuid import uuid4
5
5
 
6
- from sqlalchemy import ForeignKey, Index, UniqueConstraint
7
-
8
6
  if TYPE_CHECKING:
9
7
  from agno.tracing.schemas import Span, Trace
10
8
 
@@ -32,7 +30,7 @@ from agno.utils.log import log_debug, log_error, log_info, log_warning
32
30
  from agno.utils.string import generate_id
33
31
 
34
32
  try:
35
- from sqlalchemy import TEXT, and_, cast, func, update
33
+ from sqlalchemy import TEXT, ForeignKey, Index, UniqueConstraint, and_, cast, func, update
36
34
  from sqlalchemy.dialects import mysql
37
35
  from sqlalchemy.engine import Engine, create_engine
38
36
  from sqlalchemy.orm import scoped_session, sessionmaker
@@ -45,6 +43,7 @@ except ImportError:
45
43
  class MySQLDb(BaseDb):
46
44
  def __init__(
47
45
  self,
46
+ id: Optional[str] = None,
48
47
  db_engine: Optional[Engine] = None,
49
48
  db_schema: Optional[str] = None,
50
49
  db_url: Optional[str] = None,
@@ -57,7 +56,7 @@ class MySQLDb(BaseDb):
57
56
  traces_table: Optional[str] = None,
58
57
  spans_table: Optional[str] = None,
59
58
  versions_table: Optional[str] = None,
60
- id: Optional[str] = None,
59
+ create_schema: bool = True,
61
60
  ):
62
61
  """
63
62
  Interface for interacting with a MySQL database.
@@ -68,6 +67,7 @@ class MySQLDb(BaseDb):
68
67
  3. Raise an error if neither is provided
69
68
 
70
69
  Args:
70
+ id (Optional[str]): ID of the database.
71
71
  db_url (Optional[str]): The database URL to connect to.
72
72
  db_engine (Optional[Engine]): The SQLAlchemy database engine to use.
73
73
  db_schema (Optional[str]): The database schema to use.
@@ -80,7 +80,8 @@ class MySQLDb(BaseDb):
80
80
  traces_table (Optional[str]): Name of the table to store run traces.
81
81
  spans_table (Optional[str]): Name of the table to store span events.
82
82
  versions_table (Optional[str]): Name of the table to store schema versions.
83
- id (Optional[str]): ID of the database.
83
+ create_schema (bool): Whether to automatically create the database schema if it doesn't exist.
84
+ Set to False if schema is managed externally (e.g., via migrations). Defaults to True.
84
85
 
85
86
  Raises:
86
87
  ValueError: If neither db_url nor db_engine is provided.
@@ -115,6 +116,7 @@ class MySQLDb(BaseDb):
115
116
  self.db_engine: Engine = _engine
116
117
  self.db_schema: str = db_schema if db_schema is not None else "ai"
117
118
  self.metadata: MetaData = MetaData(schema=self.db_schema)
119
+ self.create_schema: bool = create_schema
118
120
 
119
121
  # Initialize database session
120
122
  self.Session: scoped_session = scoped_session(sessionmaker(bind=self.db_engine))
@@ -190,8 +192,9 @@ class MySQLDb(BaseDb):
190
192
  idx_name = f"idx_{table_name}_{idx_col}"
191
193
  table.append_constraint(Index(idx_name, idx_col))
192
194
 
193
- with self.Session() as sess, sess.begin():
194
- create_schema(session=sess, db_schema=self.db_schema)
195
+ if self.create_schema:
196
+ with self.Session() as sess, sess.begin():
197
+ create_schema(session=sess, db_schema=self.db_schema)
195
198
 
196
199
  # Create table
197
200
  table_created = False
@@ -252,6 +255,9 @@ class MySQLDb(BaseDb):
252
255
  (self.metrics_table_name, "metrics"),
253
256
  (self.eval_table_name, "evals"),
254
257
  (self.knowledge_table_name, "knowledge"),
258
+ (self.culture_table_name, "culture"),
259
+ (self.trace_table_name, "traces"),
260
+ (self.span_table_name, "spans"),
255
261
  (self.versions_table_name, "versions"),
256
262
  ]
257
263
 
@@ -1109,9 +1115,12 @@ class MySQLDb(BaseDb):
1109
1115
  except Exception as e:
1110
1116
  log_error(f"Error deleting user memories: {e}")
1111
1117
 
1112
- def get_all_memory_topics(self) -> List[str]:
1118
+ def get_all_memory_topics(self, user_id: Optional[str] = None) -> List[str]:
1113
1119
  """Get all memory topics from the database.
1114
1120
 
1121
+ Args:
1122
+ user_id (Optional[str]): Optional user ID to filter topics.
1123
+
1115
1124
  Returns:
1116
1125
  List[str]: List of memory topics.
1117
1126
  """
agno/db/mysql/utils.py CHANGED
@@ -5,15 +5,14 @@ from datetime import date, datetime, timedelta, timezone
5
5
  from typing import Any, Dict, List, Optional
6
6
  from uuid import uuid4
7
7
 
8
- from sqlalchemy import Engine
9
-
10
8
  from agno.db.mysql.schemas import get_table_schema_definition
11
9
  from agno.db.schemas.culture import CulturalKnowledge
12
10
  from agno.utils.log import log_debug, log_error, log_warning
13
11
 
14
12
  try:
15
- from sqlalchemy import Table
13
+ from sqlalchemy import Engine, Table
16
14
  from sqlalchemy.dialects import mysql
15
+ from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession
17
16
  from sqlalchemy.inspection import inspect
18
17
  from sqlalchemy.orm import Session
19
18
  from sqlalchemy.sql.expression import text
@@ -91,8 +90,10 @@ def is_valid_table(db_engine: Engine, table_name: str, table_type: str, db_schem
91
90
  Check if the existing table has the expected column names.
92
91
 
93
92
  Args:
93
+ db_engine: Database engine
94
94
  table_name (str): Name of the table to validate
95
- schema (str): Database schema name
95
+ table_type (str): Type of table (for schema lookup)
96
+ db_schema (str): Database schema name
96
97
 
97
98
  Returns:
98
99
  bool: True if table has all expected columns, False otherwise
@@ -123,6 +124,7 @@ def bulk_upsert_metrics(session: Session, table: Table, metrics_records: list[di
123
124
  """Bulk upsert metrics into the database.
124
125
 
125
126
  Args:
127
+ session (Session): The SQLAlchemy session
126
128
  table (Table): The table to upsert into.
127
129
  metrics_records (list[dict]): The metrics records to upsert.
128
130
 
@@ -156,7 +158,10 @@ def bulk_upsert_metrics(session: Session, table: Table, metrics_records: list[di
156
158
 
157
159
  for record in metrics_records:
158
160
  select_stmt = select(table).where(
159
- and_(table.c.date == record["date"], table.c.aggregation_period == record["aggregation_period"])
161
+ and_(
162
+ table.c.date == record["date"],
163
+ table.c.aggregation_period == record["aggregation_period"],
164
+ )
160
165
  )
161
166
  result = session.execute(select_stmt).fetchone()
162
167
  if result:
@@ -165,6 +170,55 @@ def bulk_upsert_metrics(session: Session, table: Table, metrics_records: list[di
165
170
  return results # type: ignore
166
171
 
167
172
 
173
+ async def abulk_upsert_metrics(session: AsyncSession, table: Table, metrics_records: list[dict]) -> list[dict]:
174
+ """Async bulk upsert metrics into the database.
175
+
176
+ Args:
177
+ session (AsyncSession): The async SQLAlchemy session
178
+ table (Table): The table to upsert into.
179
+ metrics_records (list[dict]): The metrics records to upsert.
180
+
181
+ Returns:
182
+ list[dict]: The upserted metrics records.
183
+ """
184
+ if not metrics_records:
185
+ return []
186
+
187
+ results = []
188
+
189
+ # MySQL doesn't support returning in the same way as PostgreSQL
190
+ # We'll need to insert/update and then fetch the records
191
+ for record in metrics_records:
192
+ stmt = mysql.insert(table).values(record)
193
+
194
+ # Columns to update in case of conflict
195
+ update_dict = {
196
+ col.name: record.get(col.name)
197
+ for col in table.columns
198
+ if col.name not in ["id", "date", "created_at", "aggregation_period"] and col.name in record
199
+ }
200
+
201
+ stmt = stmt.on_duplicate_key_update(**update_dict)
202
+ await session.execute(stmt)
203
+
204
+ # Fetch the updated records
205
+ from sqlalchemy import and_, select
206
+
207
+ for record in metrics_records:
208
+ select_stmt = select(table).where(
209
+ and_(
210
+ table.c.date == record["date"],
211
+ table.c.aggregation_period == record["aggregation_period"],
212
+ )
213
+ )
214
+ result = await session.execute(select_stmt)
215
+ fetched_row = result.fetchone()
216
+ if fetched_row:
217
+ results.append(dict(fetched_row._mapping))
218
+
219
+ return results
220
+
221
+
168
222
  def calculate_date_metrics(date_to_process: date, sessions_data: dict) -> dict:
169
223
  """Calculate metrics for the given single date.
170
224
 
@@ -299,7 +353,9 @@ def get_dates_to_calculate_metrics_for(starting_date: date) -> list[date]:
299
353
 
300
354
 
301
355
  # -- Cultural Knowledge util methods --
302
- def serialize_cultural_knowledge_for_db(cultural_knowledge: CulturalKnowledge) -> Dict[str, Any]:
356
+ def serialize_cultural_knowledge_for_db(
357
+ cultural_knowledge: CulturalKnowledge,
358
+ ) -> Dict[str, Any]:
303
359
  """Serialize a CulturalKnowledge object for database storage.
304
360
 
305
361
  Converts the model's separate content, categories, and notes fields
@@ -353,3 +409,80 @@ def deserialize_cultural_knowledge_from_db(db_row: Dict[str, Any]) -> CulturalKn
353
409
  "team_id": db_row.get("team_id"),
354
410
  }
355
411
  )
412
+
413
+
414
+ # -- Async DB util methods --
415
+ async def acreate_schema(session: AsyncSession, db_schema: str) -> None:
416
+ """Async version: Create the database schema if it doesn't exist.
417
+
418
+ Args:
419
+ session: The async SQLAlchemy session to use
420
+ db_schema (str): The definition of the database schema to create
421
+ """
422
+ try:
423
+ log_debug(f"Creating database if not exists: {db_schema}")
424
+ # MySQL uses CREATE DATABASE instead of CREATE SCHEMA
425
+ await session.execute(text(f"CREATE DATABASE IF NOT EXISTS `{db_schema}`;"))
426
+ except Exception as e:
427
+ log_warning(f"Could not create database {db_schema}: {e}")
428
+
429
+
430
+ async def ais_table_available(session: AsyncSession, table_name: str, db_schema: str) -> bool:
431
+ """Async version: Check if a table with the given name exists in the given schema.
432
+
433
+ Returns:
434
+ bool: True if the table exists, False otherwise.
435
+ """
436
+ try:
437
+ exists_query = text(
438
+ "SELECT 1 FROM information_schema.tables WHERE table_schema = :schema AND table_name = :table"
439
+ )
440
+ result = await session.execute(exists_query, {"schema": db_schema, "table": table_name})
441
+ exists = result.scalar() is not None
442
+ if not exists:
443
+ log_debug(f"Table {db_schema}.{table_name} {'exists' if exists else 'does not exist'}")
444
+
445
+ return exists
446
+
447
+ except Exception as e:
448
+ log_error(f"Error checking if table exists: {e}")
449
+ return False
450
+
451
+
452
+ async def ais_valid_table(db_engine: AsyncEngine, table_name: str, table_type: str, db_schema: str) -> bool:
453
+ """Async version: Check if the existing table has the expected column names.
454
+
455
+ Args:
456
+ db_engine: Async database engine
457
+ table_name (str): Name of the table to validate
458
+ table_type (str): Type of table (for schema lookup)
459
+ db_schema (str): Database schema name
460
+
461
+ Returns:
462
+ bool: True if table has all expected columns, False otherwise
463
+ """
464
+ try:
465
+ expected_table_schema = get_table_schema_definition(table_type)
466
+ expected_columns = {col_name for col_name in expected_table_schema.keys() if not col_name.startswith("_")}
467
+
468
+ # Get existing columns from the async engine
469
+ async with db_engine.connect() as conn:
470
+ existing_columns = await conn.run_sync(_get_table_columns, table_name, db_schema)
471
+
472
+ # Check if all expected columns exist
473
+ missing_columns = expected_columns - existing_columns
474
+ if missing_columns:
475
+ log_warning(f"Missing columns {missing_columns} in table {db_schema}.{table_name}")
476
+ return False
477
+
478
+ return True
479
+ except Exception as e:
480
+ log_error(f"Error validating table schema for {db_schema}.{table_name}: {e}")
481
+ return False
482
+
483
+
484
+ def _get_table_columns(connection, table_name: str, db_schema: str) -> set[str]:
485
+ """Helper function to get table columns using sync inspector."""
486
+ inspector = inspect(connection)
487
+ columns_info = inspector.get_columns(table_name, schema=db_schema)
488
+ return {col["name"] for col in columns_info}
@@ -56,6 +56,7 @@ class AsyncPostgresDb(AsyncBaseDb):
56
56
  traces_table: Optional[str] = None,
57
57
  spans_table: Optional[str] = None,
58
58
  versions_table: Optional[str] = None,
59
+ create_schema: bool = True,
59
60
  db_id: Optional[str] = None, # Deprecated, use id instead.
60
61
  ):
61
62
  """
@@ -80,6 +81,8 @@ class AsyncPostgresDb(AsyncBaseDb):
80
81
  traces_table (Optional[str]): Name of the table to store run traces.
81
82
  spans_table (Optional[str]): Name of the table to store span events.
82
83
  versions_table (Optional[str]): Name of the table to store schema versions.
84
+ create_schema (bool): Whether to automatically create the database schema if it doesn't exist.
85
+ Set to False if schema is managed externally (e.g., via migrations). Defaults to True.
83
86
  db_id: Deprecated, use id instead.
84
87
 
85
88
  Raises:
@@ -116,6 +119,7 @@ class AsyncPostgresDb(AsyncBaseDb):
116
119
  self.db_engine: AsyncEngine = _engine
117
120
  self.db_schema: str = db_schema if db_schema is not None else "ai"
118
121
  self.metadata: MetaData = MetaData(schema=self.db_schema)
122
+ self.create_schema: bool = create_schema
119
123
 
120
124
  # Initialize database session factory
121
125
  self.async_session_factory = async_sessionmaker(
@@ -200,8 +204,9 @@ class AsyncPostgresDb(AsyncBaseDb):
200
204
  idx_name = f"idx_{table_name}_{idx_col}"
201
205
  table.append_constraint(Index(idx_name, idx_col))
202
206
 
203
- async with self.async_session_factory() as sess, sess.begin():
204
- await acreate_schema(session=sess, db_schema=self.db_schema)
207
+ if self.create_schema:
208
+ async with self.async_session_factory() as sess, sess.begin():
209
+ await acreate_schema(session=sess, db_schema=self.db_schema)
205
210
 
206
211
  # Create table
207
212
  table_created = False
@@ -1237,7 +1242,7 @@ class AsyncPostgresDb(AsyncBaseDb):
1237
1242
  Exception: If an error occurs during upsert.
1238
1243
  """
1239
1244
  try:
1240
- table = await self._get_table(table_type="culture")
1245
+ table = await self._get_table(table_type="culture", create_table_if_not_found=True)
1241
1246
 
1242
1247
  # Generate ID if not present
1243
1248
  if cultural_knowledge.id is None:
@@ -1381,7 +1386,7 @@ class AsyncPostgresDb(AsyncBaseDb):
1381
1386
  Exception: If an error occurs during upsert.
1382
1387
  """
1383
1388
  try:
1384
- table = await self._get_table(table_type="memories")
1389
+ table = await self._get_table(table_type="memories", create_table_if_not_found=True)
1385
1390
 
1386
1391
  current_time = int(time.time())
1387
1392
 
@@ -1725,7 +1730,7 @@ class AsyncPostgresDb(AsyncBaseDb):
1725
1730
  Optional[KnowledgeRow]: The upserted knowledge row, or None if the operation fails.
1726
1731
  """
1727
1732
  try:
1728
- table = await self._get_table(table_type="knowledge")
1733
+ table = await self._get_table(table_type="knowledge", create_table_if_not_found=True)
1729
1734
  async with self.async_session_factory() as sess, sess.begin():
1730
1735
  # Get the actual table columns to avoid "unconsumed column names" error
1731
1736
  table_columns = set(table.columns.keys())
@@ -57,6 +57,7 @@ class PostgresDb(BaseDb):
57
57
  spans_table: Optional[str] = None,
58
58
  versions_table: Optional[str] = None,
59
59
  id: Optional[str] = None,
60
+ create_schema: bool = True,
60
61
  ):
61
62
  """
62
63
  Interface for interacting with a PostgreSQL database.
@@ -80,6 +81,8 @@ class PostgresDb(BaseDb):
80
81
  spans_table (Optional[str]): Name of the table to store span events.
81
82
  versions_table (Optional[str]): Name of the table to store schema versions.
82
83
  id (Optional[str]): ID of the database.
84
+ create_schema (bool): Whether to automatically create the database schema if it doesn't exist.
85
+ Set to False if schema is managed externally (e.g., via migrations). Defaults to True.
83
86
 
84
87
  Raises:
85
88
  ValueError: If neither db_url nor db_engine is provided.
@@ -115,6 +118,7 @@ class PostgresDb(BaseDb):
115
118
 
116
119
  self.db_schema: str = db_schema if db_schema is not None else "ai"
117
120
  self.metadata: MetaData = MetaData(schema=self.db_schema)
121
+ self.create_schema: bool = create_schema
118
122
 
119
123
  # Initialize database session
120
124
  self.Session: scoped_session = scoped_session(sessionmaker(bind=self.db_engine, expire_on_commit=False))
@@ -204,8 +208,9 @@ class PostgresDb(BaseDb):
204
208
  idx_name = f"idx_{table_name}_{idx_col}"
205
209
  table.append_constraint(Index(idx_name, idx_col))
206
210
 
207
- with self.Session() as sess, sess.begin():
208
- create_schema(session=sess, db_schema=self.db_schema)
211
+ if self.create_schema:
212
+ with self.Session() as sess, sess.begin():
213
+ create_schema(session=sess, db_schema=self.db_schema)
209
214
 
210
215
  # Create table
211
216
  table_created = False
agno/db/schemas/evals.py CHANGED
@@ -6,6 +6,7 @@ from pydantic import BaseModel
6
6
 
7
7
  class EvalType(str, Enum):
8
8
  ACCURACY = "accuracy"
9
+ AGENT_AS_JUDGE = "agent_as_judge"
9
10
  PERFORMANCE = "performance"
10
11
  RELIABILITY = "reliability"
11
12
 
@@ -57,6 +57,7 @@ class SingleStoreDb(BaseDb):
57
57
  versions_table: Optional[str] = None,
58
58
  traces_table: Optional[str] = None,
59
59
  spans_table: Optional[str] = None,
60
+ create_schema: bool = True,
60
61
  ):
61
62
  """
62
63
  Interface for interacting with a SingleStore database.
@@ -78,6 +79,8 @@ class SingleStoreDb(BaseDb):
78
79
  eval_table (Optional[str]): Name of the table to store evaluation runs data.
79
80
  knowledge_table (Optional[str]): Name of the table to store knowledge content.
80
81
  versions_table (Optional[str]): Name of the table to store schema versions.
82
+ create_schema (bool): Whether to automatically create the database schema if it doesn't exist.
83
+ Set to False if schema is managed externally (e.g., via migrations). Defaults to True.
81
84
  Raises:
82
85
  ValueError: If neither db_url nor db_engine is provided.
83
86
  ValueError: If none of the tables are provided.
@@ -117,6 +120,7 @@ class SingleStoreDb(BaseDb):
117
120
  self.db_engine: Engine = _engine
118
121
  self.db_schema: Optional[str] = db_schema
119
122
  self.metadata: MetaData = MetaData(schema=self.db_schema)
123
+ self.create_schema: bool = create_schema
120
124
 
121
125
  # Initialize database session
122
126
  self.Session: scoped_session = scoped_session(sessionmaker(bind=self.db_engine))
@@ -240,7 +244,7 @@ class SingleStoreDb(BaseDb):
240
244
  table.append_constraint(Index(idx_name, idx_col))
241
245
 
242
246
  # Create schema if one is specified
243
- if self.db_schema is not None:
247
+ if self.create_schema and self.db_schema is not None:
244
248
  with self.Session() as sess, sess.begin():
245
249
  create_schema(session=sess, db_schema=self.db_schema)
246
250
 
@@ -1540,7 +1540,7 @@ class AsyncSqliteDb(AsyncBaseDb):
1540
1540
  Exception: If an error occurs during retrieval.
1541
1541
  """
1542
1542
  try:
1543
- table = await self._get_table(table_type="sessions")
1543
+ table = await self._get_table(table_type="sessions", create_table_if_not_found=True)
1544
1544
  if table is None:
1545
1545
  return []
1546
1546
 
@@ -1825,7 +1825,7 @@ class AsyncSqliteDb(AsyncBaseDb):
1825
1825
  Optional[KnowledgeRow]: The upserted knowledge row, or None if the operation fails.
1826
1826
  """
1827
1827
  try:
1828
- table = await self._get_table(table_type="knowledge")
1828
+ table = await self._get_table(table_type="knowledge", create_table_if_not_found=True)
1829
1829
  if table is None:
1830
1830
  return None
1831
1831
 
@@ -1878,7 +1878,7 @@ class AsyncSqliteDb(AsyncBaseDb):
1878
1878
  Exception: If an error occurs during creation.
1879
1879
  """
1880
1880
  try:
1881
- table = await self._get_table(table_type="evals")
1881
+ table = await self._get_table(table_type="evals", create_table_if_not_found=True)
1882
1882
  if table is None:
1883
1883
  return None
1884
1884
 
agno/eval/__init__.py CHANGED
@@ -1,4 +1,10 @@
1
1
  from agno.eval.accuracy import AccuracyAgentResponse, AccuracyEval, AccuracyEvaluation, AccuracyResult
2
+ from agno.eval.agent_as_judge import (
3
+ AgentAsJudgeEval,
4
+ AgentAsJudgeEvaluation,
5
+ AgentAsJudgeResult,
6
+ )
7
+ from agno.eval.base import BaseEval
2
8
  from agno.eval.performance import PerformanceEval, PerformanceResult
3
9
  from agno.eval.reliability import ReliabilityEval, ReliabilityResult
4
10
 
@@ -7,6 +13,10 @@ __all__ = [
7
13
  "AccuracyEvaluation",
8
14
  "AccuracyResult",
9
15
  "AccuracyEval",
16
+ "AgentAsJudgeEval",
17
+ "AgentAsJudgeEvaluation",
18
+ "AgentAsJudgeResult",
19
+ "BaseEval",
10
20
  "PerformanceEval",
11
21
  "PerformanceResult",
12
22
  "ReliabilityEval",
agno/eval/accuracy.py CHANGED
@@ -282,7 +282,8 @@ Remember: You must only compare the agent_output to the expected_output. The exp
282
282
  ) -> Optional[AccuracyEvaluation]:
283
283
  """Orchestrate the evaluation process."""
284
284
  try:
285
- accuracy_agent_response = evaluator_agent.run(evaluation_input).content
285
+ response = evaluator_agent.run(evaluation_input, stream=False)
286
+ accuracy_agent_response = response.content
286
287
  if accuracy_agent_response is None or not isinstance(accuracy_agent_response, AccuracyAgentResponse):
287
288
  raise EvalError(f"Evaluator Agent returned an invalid response: {accuracy_agent_response}")
288
289
  return AccuracyEvaluation(
@@ -306,7 +307,7 @@ Remember: You must only compare the agent_output to the expected_output. The exp
306
307
  ) -> Optional[AccuracyEvaluation]:
307
308
  """Orchestrate the evaluation process asynchronously."""
308
309
  try:
309
- response = await evaluator_agent.arun(evaluation_input)
310
+ response = await evaluator_agent.arun(evaluation_input, stream=False)
310
311
  accuracy_agent_response = response.content
311
312
  if accuracy_agent_response is None or not isinstance(accuracy_agent_response, AccuracyAgentResponse):
312
313
  raise EvalError(f"Evaluator Agent returned an invalid response: {accuracy_agent_response}")
@@ -362,9 +363,11 @@ Remember: You must only compare the agent_output to the expected_output. The exp
362
363
  agent_session_id = f"eval_{self.eval_id}_{i + 1}"
363
364
 
364
365
  if self.agent is not None:
365
- output = self.agent.run(input=eval_input, session_id=agent_session_id).content
366
+ agent_response = self.agent.run(input=eval_input, session_id=agent_session_id, stream=False)
367
+ output = agent_response.content
366
368
  elif self.team is not None:
367
- output = self.team.run(input=eval_input, session_id=agent_session_id).content
369
+ team_response = self.team.run(input=eval_input, session_id=agent_session_id, stream=False)
370
+ output = team_response.content
368
371
 
369
372
  if not output:
370
373
  logger.error(f"Failed to generate a valid answer on iteration {i + 1}: {output}")
@@ -505,11 +508,11 @@ Remember: You must only compare the agent_output to the expected_output. The exp
505
508
  agent_session_id = f"eval_{self.eval_id}_{i + 1}"
506
509
 
507
510
  if self.agent is not None:
508
- response = await self.agent.arun(input=eval_input, session_id=agent_session_id)
509
- output = response.content
511
+ agent_response = await self.agent.arun(input=eval_input, session_id=agent_session_id, stream=False)
512
+ output = agent_response.content
510
513
  elif self.team is not None:
511
- response = await self.team.arun(input=eval_input, session_id=agent_session_id) # type: ignore
512
- output = response.content
514
+ team_response = await self.team.arun(input=eval_input, session_id=agent_session_id, stream=False)
515
+ output = team_response.content
513
516
 
514
517
  if not output:
515
518
  logger.error(f"Failed to generate a valid answer on iteration {i + 1}: {output}")