agno 2.1.4__py3-none-any.whl → 2.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (95) hide show
  1. agno/agent/agent.py +1775 -538
  2. agno/db/async_postgres/__init__.py +3 -0
  3. agno/db/async_postgres/async_postgres.py +1668 -0
  4. agno/db/async_postgres/schemas.py +124 -0
  5. agno/db/async_postgres/utils.py +289 -0
  6. agno/db/base.py +237 -2
  7. agno/db/dynamo/dynamo.py +2 -2
  8. agno/db/firestore/firestore.py +2 -2
  9. agno/db/firestore/utils.py +4 -2
  10. agno/db/gcs_json/gcs_json_db.py +2 -2
  11. agno/db/in_memory/in_memory_db.py +2 -2
  12. agno/db/json/json_db.py +2 -2
  13. agno/db/migrations/v1_to_v2.py +43 -13
  14. agno/db/mongo/mongo.py +14 -6
  15. agno/db/mongo/utils.py +0 -4
  16. agno/db/mysql/mysql.py +23 -13
  17. agno/db/postgres/postgres.py +17 -6
  18. agno/db/redis/redis.py +2 -2
  19. agno/db/singlestore/singlestore.py +19 -10
  20. agno/db/sqlite/sqlite.py +22 -12
  21. agno/db/sqlite/utils.py +8 -3
  22. agno/db/surrealdb/__init__.py +3 -0
  23. agno/db/surrealdb/metrics.py +292 -0
  24. agno/db/surrealdb/models.py +259 -0
  25. agno/db/surrealdb/queries.py +71 -0
  26. agno/db/surrealdb/surrealdb.py +1193 -0
  27. agno/db/surrealdb/utils.py +87 -0
  28. agno/eval/accuracy.py +50 -43
  29. agno/eval/performance.py +6 -3
  30. agno/eval/reliability.py +6 -3
  31. agno/eval/utils.py +33 -16
  32. agno/exceptions.py +8 -2
  33. agno/knowledge/knowledge.py +260 -46
  34. agno/knowledge/reader/pdf_reader.py +4 -6
  35. agno/knowledge/reader/reader_factory.py +2 -3
  36. agno/memory/manager.py +254 -46
  37. agno/models/anthropic/claude.py +37 -0
  38. agno/os/app.py +8 -7
  39. agno/os/interfaces/a2a/router.py +3 -5
  40. agno/os/interfaces/agui/router.py +4 -1
  41. agno/os/interfaces/agui/utils.py +27 -6
  42. agno/os/interfaces/slack/router.py +2 -4
  43. agno/os/mcp.py +98 -41
  44. agno/os/router.py +23 -0
  45. agno/os/routers/evals/evals.py +52 -20
  46. agno/os/routers/evals/utils.py +14 -14
  47. agno/os/routers/knowledge/knowledge.py +130 -9
  48. agno/os/routers/knowledge/schemas.py +57 -0
  49. agno/os/routers/memory/memory.py +116 -44
  50. agno/os/routers/metrics/metrics.py +16 -6
  51. agno/os/routers/session/session.py +65 -22
  52. agno/os/schema.py +36 -0
  53. agno/os/utils.py +64 -11
  54. agno/reasoning/anthropic.py +80 -0
  55. agno/reasoning/gemini.py +73 -0
  56. agno/reasoning/openai.py +5 -0
  57. agno/reasoning/vertexai.py +76 -0
  58. agno/session/workflow.py +3 -3
  59. agno/team/team.py +968 -179
  60. agno/tools/googlesheets.py +20 -5
  61. agno/tools/mcp_toolbox.py +3 -3
  62. agno/tools/scrapegraph.py +1 -1
  63. agno/utils/models/claude.py +3 -1
  64. agno/utils/streamlit.py +1 -1
  65. agno/vectordb/base.py +22 -1
  66. agno/vectordb/cassandra/cassandra.py +9 -0
  67. agno/vectordb/chroma/chromadb.py +26 -6
  68. agno/vectordb/clickhouse/clickhousedb.py +9 -1
  69. agno/vectordb/couchbase/couchbase.py +11 -0
  70. agno/vectordb/lancedb/lance_db.py +20 -0
  71. agno/vectordb/langchaindb/langchaindb.py +11 -0
  72. agno/vectordb/lightrag/lightrag.py +9 -0
  73. agno/vectordb/llamaindex/llamaindexdb.py +15 -1
  74. agno/vectordb/milvus/milvus.py +23 -0
  75. agno/vectordb/mongodb/mongodb.py +22 -0
  76. agno/vectordb/pgvector/pgvector.py +19 -0
  77. agno/vectordb/pineconedb/pineconedb.py +35 -4
  78. agno/vectordb/qdrant/qdrant.py +24 -0
  79. agno/vectordb/singlestore/singlestore.py +25 -17
  80. agno/vectordb/surrealdb/surrealdb.py +18 -2
  81. agno/vectordb/upstashdb/upstashdb.py +26 -1
  82. agno/vectordb/weaviate/weaviate.py +18 -0
  83. agno/workflow/condition.py +4 -0
  84. agno/workflow/loop.py +4 -0
  85. agno/workflow/parallel.py +4 -0
  86. agno/workflow/router.py +4 -0
  87. agno/workflow/step.py +30 -14
  88. agno/workflow/steps.py +4 -0
  89. agno/workflow/types.py +2 -2
  90. agno/workflow/workflow.py +328 -61
  91. {agno-2.1.4.dist-info → agno-2.1.6.dist-info}/METADATA +100 -41
  92. {agno-2.1.4.dist-info → agno-2.1.6.dist-info}/RECORD +95 -82
  93. {agno-2.1.4.dist-info → agno-2.1.6.dist-info}/WHEEL +0 -0
  94. {agno-2.1.4.dist-info → agno-2.1.6.dist-info}/licenses/LICENSE +0 -0
  95. {agno-2.1.4.dist-info → agno-2.1.6.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,124 @@
1
+ """Table schemas and related utils used by the AsyncPostgresDb class"""
2
+
3
+ from typing import Any
4
+
5
+ try:
6
+ from sqlalchemy.types import JSON, BigInteger, Boolean, Date, String
7
+ except ImportError:
8
+ raise ImportError("`sqlalchemy` not installed. Please install it using `pip install sqlalchemy`")
9
+
10
+ SESSION_TABLE_SCHEMA = {
11
+ "session_id": {"type": String, "nullable": False},
12
+ "session_type": {"type": String, "nullable": False, "index": True},
13
+ "agent_id": {"type": String, "nullable": True},
14
+ "team_id": {"type": String, "nullable": True},
15
+ "workflow_id": {"type": String, "nullable": True},
16
+ "user_id": {"type": String, "nullable": True},
17
+ "session_data": {"type": JSON, "nullable": True},
18
+ "agent_data": {"type": JSON, "nullable": True},
19
+ "team_data": {"type": JSON, "nullable": True},
20
+ "workflow_data": {"type": JSON, "nullable": True},
21
+ "metadata": {"type": JSON, "nullable": True},
22
+ "runs": {"type": JSON, "nullable": True},
23
+ "summary": {"type": JSON, "nullable": True},
24
+ "created_at": {"type": BigInteger, "nullable": False, "index": True},
25
+ "updated_at": {"type": BigInteger, "nullable": True},
26
+ "_unique_constraints": [
27
+ {
28
+ "name": "uq_session_id",
29
+ "columns": ["session_id"],
30
+ },
31
+ ],
32
+ }
33
+
34
+ MEMORY_TABLE_SCHEMA = {
35
+ "memory_id": {"type": String, "primary_key": True, "nullable": False},
36
+ "memory": {"type": JSON, "nullable": False},
37
+ "input": {"type": String, "nullable": True},
38
+ "agent_id": {"type": String, "nullable": True},
39
+ "team_id": {"type": String, "nullable": True},
40
+ "user_id": {"type": String, "nullable": True, "index": True},
41
+ "topics": {"type": JSON, "nullable": True},
42
+ "updated_at": {"type": BigInteger, "nullable": True, "index": True},
43
+ }
44
+
45
+ EVAL_TABLE_SCHEMA = {
46
+ "run_id": {"type": String, "primary_key": True, "nullable": False},
47
+ "eval_type": {"type": String, "nullable": False},
48
+ "eval_data": {"type": JSON, "nullable": False},
49
+ "eval_input": {"type": JSON, "nullable": False},
50
+ "name": {"type": String, "nullable": True},
51
+ "agent_id": {"type": String, "nullable": True},
52
+ "team_id": {"type": String, "nullable": True},
53
+ "workflow_id": {"type": String, "nullable": True},
54
+ "model_id": {"type": String, "nullable": True},
55
+ "model_provider": {"type": String, "nullable": True},
56
+ "evaluated_component_name": {"type": String, "nullable": True},
57
+ "created_at": {"type": BigInteger, "nullable": False, "index": True},
58
+ "updated_at": {"type": BigInteger, "nullable": True},
59
+ }
60
+
61
+ KNOWLEDGE_TABLE_SCHEMA = {
62
+ "id": {"type": String, "primary_key": True, "nullable": False},
63
+ "name": {"type": String, "nullable": False},
64
+ "description": {"type": String, "nullable": False},
65
+ "metadata": {"type": JSON, "nullable": True},
66
+ "type": {"type": String, "nullable": True},
67
+ "size": {"type": BigInteger, "nullable": True},
68
+ "linked_to": {"type": String, "nullable": True},
69
+ "access_count": {"type": BigInteger, "nullable": True},
70
+ "status": {"type": String, "nullable": True},
71
+ "status_message": {"type": String, "nullable": True},
72
+ "created_at": {"type": BigInteger, "nullable": True},
73
+ "updated_at": {"type": BigInteger, "nullable": True},
74
+ "external_id": {"type": String, "nullable": True},
75
+ }
76
+
77
+ METRICS_TABLE_SCHEMA = {
78
+ "id": {"type": String, "primary_key": True, "nullable": False},
79
+ "agent_runs_count": {"type": BigInteger, "nullable": False, "default": 0},
80
+ "team_runs_count": {"type": BigInteger, "nullable": False, "default": 0},
81
+ "workflow_runs_count": {"type": BigInteger, "nullable": False, "default": 0},
82
+ "agent_sessions_count": {"type": BigInteger, "nullable": False, "default": 0},
83
+ "team_sessions_count": {"type": BigInteger, "nullable": False, "default": 0},
84
+ "workflow_sessions_count": {"type": BigInteger, "nullable": False, "default": 0},
85
+ "users_count": {"type": BigInteger, "nullable": False, "default": 0},
86
+ "token_metrics": {"type": JSON, "nullable": False, "default": {}},
87
+ "model_metrics": {"type": JSON, "nullable": False, "default": {}},
88
+ "date": {"type": Date, "nullable": False, "index": True},
89
+ "aggregation_period": {"type": String, "nullable": False},
90
+ "created_at": {"type": BigInteger, "nullable": False},
91
+ "updated_at": {"type": BigInteger, "nullable": True},
92
+ "completed": {"type": Boolean, "nullable": False, "default": False},
93
+ "_unique_constraints": [
94
+ {
95
+ "name": "uq_metrics_date_period",
96
+ "columns": ["date", "aggregation_period"],
97
+ }
98
+ ],
99
+ }
100
+
101
+
102
+ def get_table_schema_definition(table_type: str) -> dict[str, Any]:
103
+ """
104
+ Get the expected schema definition for the given table.
105
+
106
+ Args:
107
+ table_type (str): The type of table to get the schema for.
108
+
109
+ Returns:
110
+ Dict[str, Any]: Dictionary containing column definitions for the table
111
+ """
112
+ schemas = {
113
+ "sessions": SESSION_TABLE_SCHEMA,
114
+ "evals": EVAL_TABLE_SCHEMA,
115
+ "metrics": METRICS_TABLE_SCHEMA,
116
+ "memories": MEMORY_TABLE_SCHEMA,
117
+ "knowledge": KNOWLEDGE_TABLE_SCHEMA,
118
+ }
119
+
120
+ schema = schemas.get(table_type, {})
121
+ if not schema:
122
+ raise ValueError(f"Unknown table type: {table_type}")
123
+
124
+ return schema # type: ignore[return-value]
@@ -0,0 +1,289 @@
1
+ """Utility functions for the AsyncPostgres database class."""
2
+
3
+ import time
4
+ from datetime import date, datetime, timedelta, timezone
5
+ from typing import Any, Dict, List, Optional
6
+ from uuid import uuid4
7
+
8
+ from sqlalchemy.ext.asyncio import AsyncEngine
9
+
10
+ from agno.db.async_postgres.schemas import get_table_schema_definition
11
+ from agno.utils.log import log_debug, log_error, log_warning
12
+
13
+ try:
14
+ from sqlalchemy import Table
15
+ from sqlalchemy.dialects import postgresql
16
+ from sqlalchemy.ext.asyncio import AsyncSession
17
+ from sqlalchemy.inspection import inspect
18
+ from sqlalchemy.sql.expression import text
19
+ except ImportError:
20
+ raise ImportError("`sqlalchemy` not installed. Please install it using `pip install sqlalchemy`")
21
+
22
+
23
+ # -- DB util methods --
24
+ def apply_sorting(stmt, table: Table, sort_by: Optional[str] = None, sort_order: Optional[str] = None):
25
+ """Apply sorting to the given SQLAlchemy statement.
26
+
27
+ Args:
28
+ stmt: The SQLAlchemy statement to modify
29
+ table: The table being queried
30
+ sort_by: The field to sort by
31
+ sort_order: The sort order ('asc' or 'desc')
32
+
33
+ Returns:
34
+ The modified statement with sorting applied
35
+ """
36
+ if sort_by is None:
37
+ return stmt
38
+
39
+ if not hasattr(table.c, sort_by):
40
+ log_debug(f"Invalid sort field: '{sort_by}'. Will not apply any sorting.")
41
+ return stmt
42
+
43
+ # Apply the given sorting
44
+ sort_column = getattr(table.c, sort_by)
45
+ if sort_order and sort_order == "asc":
46
+ return stmt.order_by(sort_column.asc())
47
+ else:
48
+ return stmt.order_by(sort_column.desc())
49
+
50
+
51
+ async def create_schema(session: AsyncSession, db_schema: str) -> None:
52
+ """Create the database schema if it doesn't exist.
53
+
54
+ Args:
55
+ session: The async SQLAlchemy session to use
56
+ db_schema (str): The definition of the database schema to create
57
+ """
58
+ try:
59
+ log_debug(f"Creating schema if not exists: {db_schema}")
60
+ await session.execute(text(f"CREATE SCHEMA IF NOT EXISTS {db_schema};"))
61
+ except Exception as e:
62
+ log_warning(f"Could not create schema {db_schema}: {e}")
63
+
64
+
65
+ async def is_table_available(session: AsyncSession, table_name: str, db_schema: str) -> bool:
66
+ """
67
+ Check if a table with the given name exists in the given schema.
68
+
69
+ Returns:
70
+ bool: True if the table exists, False otherwise.
71
+ """
72
+ try:
73
+ exists_query = text(
74
+ "SELECT 1 FROM information_schema.tables WHERE table_schema = :schema AND table_name = :table"
75
+ )
76
+ result = await session.execute(exists_query, {"schema": db_schema, "table": table_name})
77
+ exists = result.scalar() is not None
78
+ return exists
79
+
80
+ except Exception as e:
81
+ log_error(f"Error checking if table exists: {e}")
82
+ return False
83
+
84
+
85
+ async def is_valid_table(db_engine: AsyncEngine, table_name: str, table_type: str, db_schema: str) -> bool:
86
+ """
87
+ Check if the existing table has the expected column names.
88
+ Args:
89
+ db_engine: The async database engine
90
+ table_name (str): Name of the table to validate
91
+ table_type (str): Type of the table to get schema for
92
+ db_schema (str): Database schema name
93
+ Returns:
94
+ bool: True if table has all expected columns, False otherwise
95
+ """
96
+ try:
97
+ expected_table_schema = get_table_schema_definition(table_type)
98
+ expected_columns = {col_name for col_name in expected_table_schema.keys() if not col_name.startswith("_")}
99
+
100
+ async with db_engine.connect() as conn:
101
+
102
+ def inspect_sync(sync_conn):
103
+ inspector = inspect(sync_conn)
104
+ return inspector.get_columns(table_name, schema=db_schema)
105
+
106
+ existing_columns_info = await conn.run_sync(inspect_sync)
107
+ existing_columns = set(col["name"] for col in existing_columns_info)
108
+
109
+ missing_columns = expected_columns - existing_columns
110
+ if missing_columns:
111
+ log_warning(f"Missing columns {missing_columns} in table {db_schema}.{table_name}")
112
+ return False
113
+
114
+ log_debug(f"Table {db_schema}.{table_name} has all expected columns")
115
+ return True
116
+
117
+ except Exception as e:
118
+ log_error(f"Error validating table schema for {db_schema}.{table_name}: {e}")
119
+ return False
120
+
121
+
122
+ # -- Metrics util methods --
123
+ async def bulk_upsert_metrics(session: AsyncSession, table: Table, metrics_records: list[dict]) -> list[dict]:
124
+ """Bulk upsert metrics into the database.
125
+
126
+ Args:
127
+ session: The async session to use
128
+ table (Table): The table to upsert into.
129
+ metrics_records (list[dict]): The metrics records to upsert.
130
+
131
+ Returns:
132
+ list[dict]: The upserted metrics records.
133
+ """
134
+ if not metrics_records:
135
+ return []
136
+
137
+ results = []
138
+ stmt = postgresql.insert(table)
139
+
140
+ # Columns to update in case of conflict
141
+ update_columns = {
142
+ col.name: stmt.excluded[col.name]
143
+ for col in table.columns
144
+ if col.name not in ["id", "date", "created_at", "aggregation_period"]
145
+ }
146
+
147
+ stmt = stmt.on_conflict_do_update(index_elements=["date", "aggregation_period"], set_=update_columns).returning( # type: ignore
148
+ table
149
+ )
150
+ result = await session.execute(stmt, metrics_records)
151
+ results = [row._mapping for row in result.fetchall()]
152
+ await session.commit()
153
+
154
+ return results # type: ignore
155
+
156
+
157
+ def calculate_date_metrics(date_to_process: date, sessions_data: dict) -> dict:
158
+ """Calculate metrics for the given single date.
159
+
160
+ Args:
161
+ date_to_process (date): The date to calculate metrics for.
162
+ sessions_data (dict): The sessions data to calculate metrics for.
163
+
164
+ Returns:
165
+ dict: The calculated metrics.
166
+ """
167
+ metrics = {
168
+ "users_count": 0,
169
+ "agent_sessions_count": 0,
170
+ "team_sessions_count": 0,
171
+ "workflow_sessions_count": 0,
172
+ "agent_runs_count": 0,
173
+ "team_runs_count": 0,
174
+ "workflow_runs_count": 0,
175
+ }
176
+ token_metrics = {
177
+ "input_tokens": 0,
178
+ "output_tokens": 0,
179
+ "total_tokens": 0,
180
+ "audio_total_tokens": 0,
181
+ "audio_input_tokens": 0,
182
+ "audio_output_tokens": 0,
183
+ "cache_read_tokens": 0,
184
+ "cache_write_tokens": 0,
185
+ "reasoning_tokens": 0,
186
+ }
187
+ model_counts: Dict[str, int] = {}
188
+
189
+ session_types = [
190
+ ("agent", "agent_sessions_count", "agent_runs_count"),
191
+ ("team", "team_sessions_count", "team_runs_count"),
192
+ ("workflow", "workflow_sessions_count", "workflow_runs_count"),
193
+ ]
194
+ all_user_ids = set()
195
+
196
+ for session_type, sessions_count_key, runs_count_key in session_types:
197
+ sessions = sessions_data.get(session_type, [])
198
+ metrics[sessions_count_key] = len(sessions)
199
+
200
+ for session in sessions:
201
+ if session.get("user_id"):
202
+ all_user_ids.add(session["user_id"])
203
+ metrics[runs_count_key] += len(session.get("runs", []))
204
+ if runs := session.get("runs", []):
205
+ for run in runs:
206
+ if model_id := run.get("model"):
207
+ model_provider = run.get("model_provider", "")
208
+ model_counts[f"{model_id}:{model_provider}"] = (
209
+ model_counts.get(f"{model_id}:{model_provider}", 0) + 1
210
+ )
211
+
212
+ session_metrics = session.get("session_data", {}).get("session_metrics", {})
213
+ for field in token_metrics:
214
+ token_metrics[field] += session_metrics.get(field, 0)
215
+
216
+ model_metrics = []
217
+ for model, count in model_counts.items():
218
+ model_id, model_provider = model.split(":")
219
+ model_metrics.append({"model_id": model_id, "model_provider": model_provider, "count": count})
220
+
221
+ metrics["users_count"] = len(all_user_ids)
222
+ current_time = int(time.time())
223
+
224
+ return {
225
+ "id": str(uuid4()),
226
+ "date": date_to_process,
227
+ "completed": date_to_process < datetime.now(timezone.utc).date(),
228
+ "token_metrics": token_metrics,
229
+ "model_metrics": model_metrics,
230
+ "created_at": current_time,
231
+ "updated_at": current_time,
232
+ "aggregation_period": "daily",
233
+ **metrics,
234
+ }
235
+
236
+
237
+ def fetch_all_sessions_data(
238
+ sessions: List[Dict[str, Any]], dates_to_process: list[date], start_timestamp: int
239
+ ) -> Optional[dict]:
240
+ """Return all session data for the given dates, for all session types.
241
+
242
+ Args:
243
+ sessions: List of session dictionaries
244
+ dates_to_process (list[date]): The dates to fetch session data for.
245
+ start_timestamp: Starting timestamp
246
+
247
+ Returns:
248
+ dict: A dictionary with dates as keys and session data as values, for all session types.
249
+
250
+ Example:
251
+ {
252
+ "2000-01-01": {
253
+ "agent": [<session1>, <session2>, ...],
254
+ "team": [...],
255
+ "workflow": [...],
256
+ }
257
+ }
258
+ """
259
+ if not dates_to_process:
260
+ return None
261
+
262
+ all_sessions_data: Dict[str, Dict[str, List[Dict[str, Any]]]] = {
263
+ date_to_process.isoformat(): {"agent": [], "team": [], "workflow": []} for date_to_process in dates_to_process
264
+ }
265
+
266
+ for session in sessions:
267
+ session_date = (
268
+ datetime.fromtimestamp(session.get("created_at", start_timestamp), tz=timezone.utc).date().isoformat()
269
+ )
270
+ if session_date in all_sessions_data:
271
+ all_sessions_data[session_date][session["session_type"]].append(session)
272
+
273
+ return all_sessions_data
274
+
275
+
276
+ def get_dates_to_calculate_metrics_for(starting_date: date) -> list[date]:
277
+ """Return the list of dates to calculate metrics for.
278
+
279
+ Args:
280
+ starting_date (date): The starting date to calculate metrics for.
281
+
282
+ Returns:
283
+ list[date]: The list of dates to calculate metrics for.
284
+ """
285
+ today = datetime.now(timezone.utc).date()
286
+ days_diff = (today - starting_date).days + 1
287
+ if days_diff <= 0:
288
+ return []
289
+ return [starting_date + timedelta(days=x) for x in range(days_diff)]
agno/db/base.py CHANGED
@@ -17,6 +17,8 @@ class SessionType(str, Enum):
17
17
 
18
18
 
19
19
  class BaseDb(ABC):
20
+ """Base abstract class for all our Database implementations."""
21
+
20
22
  def __init__(
21
23
  self,
22
24
  session_table: Optional[str] = None,
@@ -83,7 +85,7 @@ class BaseDb(ABC):
83
85
 
84
86
  @abstractmethod
85
87
  def upsert_sessions(
86
- self, sessions: List[Session], deserialize: Optional[bool] = True
88
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
87
89
  ) -> List[Union[Session, Dict[str, Any]]]:
88
90
  """Bulk upsert multiple sessions for improved performance on large datasets."""
89
91
  raise NotImplementedError
@@ -148,7 +150,7 @@ class BaseDb(ABC):
148
150
 
149
151
  @abstractmethod
150
152
  def upsert_memories(
151
- self, memories: List[UserMemory], deserialize: Optional[bool] = True
153
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
152
154
  ) -> List[Union[UserMemory, Dict[str, Any]]]:
153
155
  """Bulk upsert multiple memories for improved performance on large datasets."""
154
156
  raise NotImplementedError
@@ -261,3 +263,236 @@ class BaseDb(ABC):
261
263
  self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
262
264
  ) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
263
265
  raise NotImplementedError
266
+
267
+
268
+ class AsyncBaseDb(ABC):
269
+ """Base abstract class for all our async database implementations."""
270
+
271
+ def __init__(
272
+ self,
273
+ id: Optional[str] = None,
274
+ session_table: Optional[str] = None,
275
+ memory_table: Optional[str] = None,
276
+ metrics_table: Optional[str] = None,
277
+ eval_table: Optional[str] = None,
278
+ knowledge_table: Optional[str] = None,
279
+ ):
280
+ self.id = id or str(uuid4())
281
+ self.session_table_name = session_table or "agno_sessions"
282
+ self.memory_table_name = memory_table or "agno_memories"
283
+ self.metrics_table_name = metrics_table or "agno_metrics"
284
+ self.eval_table_name = eval_table or "agno_eval_runs"
285
+ self.knowledge_table_name = knowledge_table or "agno_knowledge"
286
+
287
+ # --- Sessions ---
288
+ @abstractmethod
289
+ async def delete_session(self, session_id: str) -> bool:
290
+ raise NotImplementedError
291
+
292
+ @abstractmethod
293
+ async def delete_sessions(self, session_ids: List[str]) -> None:
294
+ raise NotImplementedError
295
+
296
+ @abstractmethod
297
+ async def get_session(
298
+ self,
299
+ session_id: str,
300
+ session_type: SessionType,
301
+ user_id: Optional[str] = None,
302
+ deserialize: Optional[bool] = True,
303
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
304
+ raise NotImplementedError
305
+
306
+ @abstractmethod
307
+ async def get_sessions(
308
+ self,
309
+ session_type: SessionType,
310
+ user_id: Optional[str] = None,
311
+ component_id: Optional[str] = None,
312
+ session_name: Optional[str] = None,
313
+ start_timestamp: Optional[int] = None,
314
+ end_timestamp: Optional[int] = None,
315
+ limit: Optional[int] = None,
316
+ page: Optional[int] = None,
317
+ sort_by: Optional[str] = None,
318
+ sort_order: Optional[str] = None,
319
+ deserialize: Optional[bool] = True,
320
+ ) -> Union[List[Session], Tuple[List[Dict[str, Any]], int]]:
321
+ raise NotImplementedError
322
+
323
+ @abstractmethod
324
+ async def rename_session(
325
+ self, session_id: str, session_type: SessionType, session_name: str, deserialize: Optional[bool] = True
326
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
327
+ raise NotImplementedError
328
+
329
+ @abstractmethod
330
+ async def upsert_session(
331
+ self, session: Session, deserialize: Optional[bool] = True
332
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
333
+ raise NotImplementedError
334
+
335
+ # --- Memory ---
336
+
337
+ @abstractmethod
338
+ async def clear_memories(self) -> None:
339
+ raise NotImplementedError
340
+
341
+ @abstractmethod
342
+ async def delete_user_memory(self, memory_id: str, user_id: Optional[str] = None) -> None:
343
+ raise NotImplementedError
344
+
345
+ @abstractmethod
346
+ async def delete_user_memories(self, memory_ids: List[str], user_id: Optional[str] = None) -> None:
347
+ raise NotImplementedError
348
+
349
+ @abstractmethod
350
+ async def get_all_memory_topics(self, user_id: Optional[str] = None) -> List[str]:
351
+ raise NotImplementedError
352
+
353
+ @abstractmethod
354
+ async def get_user_memory(
355
+ self,
356
+ memory_id: str,
357
+ deserialize: Optional[bool] = True,
358
+ user_id: Optional[str] = None,
359
+ ) -> Optional[Union[UserMemory, Dict[str, Any]]]:
360
+ raise NotImplementedError
361
+
362
+ @abstractmethod
363
+ async def get_user_memories(
364
+ self,
365
+ user_id: Optional[str] = None,
366
+ agent_id: Optional[str] = None,
367
+ team_id: Optional[str] = None,
368
+ topics: Optional[List[str]] = None,
369
+ search_content: Optional[str] = None,
370
+ limit: Optional[int] = None,
371
+ page: Optional[int] = None,
372
+ sort_by: Optional[str] = None,
373
+ sort_order: Optional[str] = None,
374
+ deserialize: Optional[bool] = True,
375
+ ) -> Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
376
+ raise NotImplementedError
377
+
378
+ @abstractmethod
379
+ async def get_user_memory_stats(
380
+ self,
381
+ limit: Optional[int] = None,
382
+ page: Optional[int] = None,
383
+ user_id: Optional[str] = None,
384
+ ) -> Tuple[List[Dict[str, Any]], int]:
385
+ raise NotImplementedError
386
+
387
+ @abstractmethod
388
+ async def upsert_user_memory(
389
+ self, memory: UserMemory, deserialize: Optional[bool] = True
390
+ ) -> Optional[Union[UserMemory, Dict[str, Any]]]:
391
+ raise NotImplementedError
392
+
393
+ # --- Metrics ---
394
+ @abstractmethod
395
+ async def get_metrics(
396
+ self, starting_date: Optional[date] = None, ending_date: Optional[date] = None
397
+ ) -> Tuple[List[Dict[str, Any]], Optional[int]]:
398
+ raise NotImplementedError
399
+
400
+ @abstractmethod
401
+ async def calculate_metrics(self) -> Optional[Any]:
402
+ raise NotImplementedError
403
+
404
+ # --- Knowledge ---
405
+ @abstractmethod
406
+ async def delete_knowledge_content(self, id: str):
407
+ """Delete a knowledge row from the database.
408
+
409
+ Args:
410
+ id (str): The ID of the knowledge row to delete.
411
+ """
412
+ raise NotImplementedError
413
+
414
+ @abstractmethod
415
+ async def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
416
+ """Get a knowledge row from the database.
417
+
418
+ Args:
419
+ id (str): The ID of the knowledge row to get.
420
+
421
+ Returns:
422
+ Optional[KnowledgeRow]: The knowledge row, or None if it doesn't exist.
423
+ """
424
+ raise NotImplementedError
425
+
426
+ @abstractmethod
427
+ async def get_knowledge_contents(
428
+ self,
429
+ limit: Optional[int] = None,
430
+ page: Optional[int] = None,
431
+ sort_by: Optional[str] = None,
432
+ sort_order: Optional[str] = None,
433
+ ) -> Tuple[List[KnowledgeRow], int]:
434
+ """Get all knowledge contents from the database.
435
+
436
+ Args:
437
+ limit (Optional[int]): The maximum number of knowledge contents to return.
438
+ page (Optional[int]): The page number.
439
+ sort_by (Optional[str]): The column to sort by.
440
+ sort_order (Optional[str]): The order to sort by.
441
+
442
+ Returns:
443
+ Tuple[List[KnowledgeRow], int]: The knowledge contents and total count.
444
+
445
+ Raises:
446
+ Exception: If an error occurs during retrieval.
447
+ """
448
+ raise NotImplementedError
449
+
450
+ @abstractmethod
451
+ async def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
452
+ """Upsert knowledge content in the database.
453
+
454
+ Args:
455
+ knowledge_row (KnowledgeRow): The knowledge row to upsert.
456
+
457
+ Returns:
458
+ Optional[KnowledgeRow]: The upserted knowledge row, or None if the operation fails.
459
+ """
460
+ raise NotImplementedError
461
+
462
+ # --- Evals ---
463
+ @abstractmethod
464
+ async def create_eval_run(self, eval_run: EvalRunRecord) -> Optional[EvalRunRecord]:
465
+ raise NotImplementedError
466
+
467
+ @abstractmethod
468
+ async def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
469
+ raise NotImplementedError
470
+
471
+ @abstractmethod
472
+ async def get_eval_run(
473
+ self, eval_run_id: str, deserialize: Optional[bool] = True
474
+ ) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
475
+ raise NotImplementedError
476
+
477
+ @abstractmethod
478
+ async def get_eval_runs(
479
+ self,
480
+ limit: Optional[int] = None,
481
+ page: Optional[int] = None,
482
+ sort_by: Optional[str] = None,
483
+ sort_order: Optional[str] = None,
484
+ agent_id: Optional[str] = None,
485
+ team_id: Optional[str] = None,
486
+ workflow_id: Optional[str] = None,
487
+ model_id: Optional[str] = None,
488
+ filter_type: Optional[EvalFilterType] = None,
489
+ eval_type: Optional[List[EvalType]] = None,
490
+ deserialize: Optional[bool] = True,
491
+ ) -> Union[List[EvalRunRecord], Tuple[List[Dict[str, Any]], int]]:
492
+ raise NotImplementedError
493
+
494
+ @abstractmethod
495
+ async def rename_eval_run(
496
+ self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
497
+ ) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
498
+ raise NotImplementedError
agno/db/dynamo/dynamo.py CHANGED
@@ -524,7 +524,7 @@ class DynamoDb(BaseDb):
524
524
  raise e
525
525
 
526
526
  def upsert_sessions(
527
- self, sessions: List[Session], deserialize: Optional[bool] = True
527
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
528
528
  ) -> List[Union[Session, Dict[str, Any]]]:
529
529
  """
530
530
  Bulk upsert multiple sessions for improved performance on large datasets.
@@ -962,7 +962,7 @@ class DynamoDb(BaseDb):
962
962
  raise e
963
963
 
964
964
  def upsert_memories(
965
- self, memories: List[UserMemory], deserialize: Optional[bool] = True
965
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
966
966
  ) -> List[Union[UserMemory, Dict[str, Any]]]:
967
967
  """
968
968
  Bulk upsert multiple user memories for improved performance on large datasets.