agno 2.0.4__py3-none-any.whl → 2.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. agno/agent/agent.py +74 -85
  2. agno/db/dynamo/dynamo.py +2 -2
  3. agno/db/firestore/firestore.py +3 -2
  4. agno/db/gcs_json/gcs_json_db.py +2 -2
  5. agno/db/json/json_db.py +2 -2
  6. agno/db/migrations/v1_to_v2.py +191 -23
  7. agno/db/mongo/mongo.py +61 -2
  8. agno/db/mysql/mysql.py +5 -5
  9. agno/db/mysql/schemas.py +27 -27
  10. agno/db/postgres/postgres.py +5 -5
  11. agno/db/redis/redis.py +2 -2
  12. agno/db/singlestore/singlestore.py +2 -2
  13. agno/db/sqlite/sqlite.py +6 -5
  14. agno/db/utils.py +0 -14
  15. agno/integrations/discord/client.py +1 -0
  16. agno/knowledge/knowledge.py +7 -7
  17. agno/knowledge/reader/reader_factory.py +7 -3
  18. agno/knowledge/reader/web_search_reader.py +12 -6
  19. agno/models/message.py +109 -0
  20. agno/models/openai/responses.py +6 -0
  21. agno/os/app.py +162 -42
  22. agno/os/interfaces/agui/utils.py +98 -134
  23. agno/os/routers/health.py +0 -1
  24. agno/os/routers/home.py +52 -0
  25. agno/os/routers/knowledge/knowledge.py +2 -2
  26. agno/os/schema.py +21 -0
  27. agno/os/utils.py +0 -8
  28. agno/run/agent.py +3 -3
  29. agno/run/team.py +3 -3
  30. agno/team/team.py +33 -38
  31. agno/tools/duckduckgo.py +15 -11
  32. agno/tools/googlesearch.py +1 -1
  33. agno/utils/string.py +32 -0
  34. agno/utils/tools.py +1 -1
  35. agno/workflow/step.py +4 -3
  36. {agno-2.0.4.dist-info → agno-2.0.5.dist-info}/METADATA +6 -5
  37. {agno-2.0.4.dist-info → agno-2.0.5.dist-info}/RECORD +40 -40
  38. agno/knowledge/reader/url_reader.py +0 -128
  39. {agno-2.0.4.dist-info → agno-2.0.5.dist-info}/WHEEL +0 -0
  40. {agno-2.0.4.dist-info → agno-2.0.5.dist-info}/licenses/LICENSE +0 -0
  41. {agno-2.0.4.dist-info → agno-2.0.5.dist-info}/top_level.txt +0 -0
@@ -1,9 +1,11 @@
1
1
  """Migration utility to migrate your Agno tables from v1 to v2"""
2
2
 
3
+ import json
3
4
  from typing import Any, Dict, List, Optional, Union
4
5
 
5
6
  from sqlalchemy import text
6
7
 
8
+ from agno.db.mongo.mongo import MongoDb
7
9
  from agno.db.mysql.mysql import MySQLDb
8
10
  from agno.db.postgres.postgres import PostgresDb
9
11
  from agno.db.schemas.memory import UserMemory
@@ -12,24 +14,171 @@ from agno.session import AgentSession, TeamSession, WorkflowSession
12
14
  from agno.utils.log import log_error
13
15
 
14
16
 
17
+ def convert_v1_metrics_to_v2(metrics_dict: Dict[str, Any]) -> Dict[str, Any]:
18
+ """Convert v1 metrics dictionary to v2 format by mapping old field names to new ones."""
19
+ if not isinstance(metrics_dict, dict):
20
+ return metrics_dict
21
+
22
+ # Create a copy to avoid modifying the original
23
+ v2_metrics = metrics_dict.copy()
24
+
25
+ # Map v1 field names to v2 field names
26
+ field_mappings = {
27
+ "time": "duration",
28
+ "audio_tokens": "audio_total_tokens",
29
+ "input_audio_tokens": "audio_input_tokens",
30
+ "output_audio_tokens": "audio_output_tokens",
31
+ "cached_tokens": "cache_read_tokens",
32
+ }
33
+
34
+ # Fields to remove (deprecated in v2)
35
+ deprecated_fields = ["prompt_tokens", "completion_tokens", "prompt_tokens_details", "completion_tokens_details"]
36
+
37
+ # Apply field mappings
38
+ for old_field, new_field in field_mappings.items():
39
+ if old_field in v2_metrics:
40
+ v2_metrics[new_field] = v2_metrics.pop(old_field)
41
+
42
+ # Remove deprecated fields
43
+ for field in deprecated_fields:
44
+ v2_metrics.pop(field, None)
45
+
46
+ return v2_metrics
47
+
48
+
49
+ def convert_any_metrics_in_data(data: Any) -> Any:
50
+ """Recursively find and convert any metrics dictionaries in the data structure."""
51
+ if isinstance(data, dict):
52
+ # First filter out deprecated v1 fields
53
+ data = filter_deprecated_v1_fields(data)
54
+
55
+ # Check if this looks like a metrics dictionary
56
+ if _is_metrics_dict(data):
57
+ return convert_v1_metrics_to_v2(data)
58
+
59
+ # Otherwise, recursively process all values
60
+ converted_dict = {}
61
+ for key, value in data.items():
62
+ # Special handling for 'metrics' keys - always convert their values
63
+ if key == "metrics" and isinstance(value, dict):
64
+ converted_dict[key] = convert_v1_metrics_to_v2(value)
65
+ else:
66
+ converted_dict[key] = convert_any_metrics_in_data(value)
67
+ return converted_dict
68
+
69
+ elif isinstance(data, list):
70
+ return [convert_any_metrics_in_data(item) for item in data]
71
+
72
+ else:
73
+ # Not a dict or list, return as-is
74
+ return data
75
+
76
+
77
+ def _is_metrics_dict(data: Dict[str, Any]) -> bool:
78
+ """Check if a dictionary looks like a metrics dictionary based on common field names."""
79
+ if not isinstance(data, dict):
80
+ return False
81
+
82
+ # Common metrics field names (both v1 and v2)
83
+ metrics_indicators = {
84
+ "input_tokens",
85
+ "output_tokens",
86
+ "total_tokens",
87
+ "time",
88
+ "duration",
89
+ "audio_tokens",
90
+ "audio_total_tokens",
91
+ "audio_input_tokens",
92
+ "audio_output_tokens",
93
+ "cached_tokens",
94
+ "cache_read_tokens",
95
+ "cache_write_tokens",
96
+ "reasoning_tokens",
97
+ "prompt_tokens",
98
+ "completion_tokens",
99
+ "time_to_first_token",
100
+ "provider_metrics",
101
+ "additional_metrics",
102
+ }
103
+
104
+ # Deprecated v1 fields that are strong indicators this is a metrics dict
105
+ deprecated_v1_indicators = {"time", "audio_tokens", "cached_tokens", "prompt_tokens", "completion_tokens"}
106
+
107
+ # If we find any deprecated v1 field, it's definitely a metrics dict that needs conversion
108
+ if any(field in data for field in deprecated_v1_indicators):
109
+ return True
110
+
111
+ # Otherwise, if the dict has at least 2 metrics-related fields, consider it a metrics dict
112
+ matching_fields = sum(1 for field in data.keys() if field in metrics_indicators)
113
+ return matching_fields >= 2
114
+
115
+
116
+ def convert_session_data_comprehensively(session_data: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]:
117
+ """Comprehensively convert any metrics found anywhere in session_data from v1 to v2 format."""
118
+ if not session_data:
119
+ return session_data
120
+
121
+ # Use the recursive converter to find and fix all metrics
122
+ return convert_any_metrics_in_data(session_data)
123
+
124
+
125
+ def safe_get_runs_from_memory(memory_data: Any) -> Any:
126
+ """Safely extract runs data from memory field, handling various data types."""
127
+ if memory_data is None:
128
+ return None
129
+
130
+ # If memory_data is a string, try to parse it as JSON
131
+ if isinstance(memory_data, str):
132
+ try:
133
+ memory_dict = json.loads(memory_data)
134
+ if isinstance(memory_dict, dict):
135
+ return memory_dict.get("runs")
136
+ except (json.JSONDecodeError, AttributeError):
137
+ # If JSON parsing fails, memory_data might just be a string value
138
+ return None
139
+
140
+ # If memory_data is already a dict, access runs directly
141
+ elif isinstance(memory_data, dict):
142
+ return memory_data.get("runs")
143
+
144
+ # For any other type, return None
145
+ return None
146
+
147
+
148
+ def filter_deprecated_v1_fields(data: Dict[str, Any]) -> Dict[str, Any]:
149
+ """Remove v1-only fields that don't exist in v2 models."""
150
+ if not isinstance(data, dict):
151
+ return data
152
+
153
+ # Fields that existed in v1 but were removed in v2
154
+ deprecated_fields = {
155
+ "team_session_id", # RunOutput v1 field, removed in v2
156
+ "formatted_tool_calls", # RunOutput v1 field, removed in v2
157
+ # Add other deprecated fields here as needed
158
+ }
159
+
160
+ # Create a copy and remove deprecated fields
161
+ filtered_data = {k: v for k, v in data.items() if k not in deprecated_fields}
162
+ return filtered_data
163
+
164
+
15
165
  def migrate(
16
- db: Union[PostgresDb, MySQLDb, SqliteDb],
166
+ db: Union[PostgresDb, MySQLDb, SqliteDb, MongoDb],
17
167
  v1_db_schema: str,
18
168
  agent_sessions_table_name: Optional[str] = None,
19
169
  team_sessions_table_name: Optional[str] = None,
20
170
  workflow_sessions_table_name: Optional[str] = None,
21
171
  memories_table_name: Optional[str] = None,
22
172
  ):
23
- """Given a PostgresDb and table names, parse and migrate the tables' content to the corresponding v2 tables.
173
+ """Given a database connection and table/collection names, parse and migrate the content to corresponding v2 tables/collections.
24
174
 
25
175
  Args:
26
- db: The database to migrate
27
- v1_db_schema: The schema of the v1 tables
28
- agent_sessions_table_name: The name of the agent sessions table. If not provided, the agent sessions table will not be migrated.
29
- team_sessions_table_name: The name of the team sessions table. If not provided, the team sessions table will not be migrated.
30
- workflow_sessions_table_name: The name of the workflow sessions table. If not provided, the workflow sessions table will not be migrated.
31
- workflow_v2_sessions_table_name: The name of the workflow v2 sessions table. If not provided, the workflow v2 sessions table will not be migrated.
32
- memories_table_name: The name of the memories table. If not provided, the memories table will not be migrated.
176
+ db: The database to migrate (PostgresDb, MySQLDb, SqliteDb, or MongoDb)
177
+ v1_db_schema: The schema of the v1 tables (leave empty for SQLite and MongoDB)
178
+ agent_sessions_table_name: The name of the agent sessions table/collection. If not provided, agent sessions will not be migrated.
179
+ team_sessions_table_name: The name of the team sessions table/collection. If not provided, team sessions will not be migrated.
180
+ workflow_sessions_table_name: The name of the workflow sessions table/collection. If not provided, workflow sessions will not be migrated.
181
+ memories_table_name: The name of the memories table/collection. If not provided, memories will not be migrated.
33
182
  """
34
183
  if agent_sessions_table_name:
35
184
  db.migrate_table_from_v1_to_v2(
@@ -61,14 +210,33 @@ def migrate(
61
210
 
62
211
 
63
212
  def get_all_table_content(db, db_schema: str, table_name: str) -> list[dict[str, Any]]:
64
- """Get all content from the given table"""
213
+ """Get all content from the given table/collection"""
65
214
  try:
66
- with db.Session() as sess:
67
- result = sess.execute(text(f"SELECT * FROM {db_schema}.{table_name}"))
68
- return [row._asdict() for row in result]
215
+ # Check if this is a MongoDB instance
216
+ if hasattr(db, "database") and hasattr(db, "db_client"):
217
+ # MongoDB implementation
218
+ collection = db.database[table_name]
219
+ # Convert MongoDB documents to dictionaries and handle ObjectId
220
+ documents = list(collection.find({}))
221
+ # Convert ObjectId to string for compatibility
222
+ for doc in documents:
223
+ if "_id" in doc:
224
+ doc["_id"] = str(doc["_id"])
225
+ return documents
226
+ else:
227
+ # SQL database implementation (PostgreSQL, MySQL, SQLite)
228
+ with db.Session() as sess:
229
+ # Handle empty schema by omitting the schema prefix (needed for SQLite)
230
+ if db_schema and db_schema.strip():
231
+ sql_query = f"SELECT * FROM {db_schema}.{table_name}"
232
+ else:
233
+ sql_query = f"SELECT * FROM {table_name}"
234
+
235
+ result = sess.execute(text(sql_query))
236
+ return [row._asdict() for row in result]
69
237
 
70
238
  except Exception as e:
71
- log_error(f"Error getting all content from table {table_name}: {e}")
239
+ log_error(f"Error getting all content from table/collection {table_name}: {e}")
72
240
  return []
73
241
 
74
242
 
@@ -82,9 +250,9 @@ def parse_agent_sessions(v1_content: List[Dict[str, Any]]) -> List[AgentSession]
82
250
  "agent_data": item.get("agent_data"),
83
251
  "session_id": item.get("session_id"),
84
252
  "user_id": item.get("user_id"),
85
- "session_data": item.get("session_data"),
86
- "metadata": item.get("extra_data"),
87
- "runs": item.get("memory", {}).get("runs"),
253
+ "session_data": convert_session_data_comprehensively(item.get("session_data")),
254
+ "metadata": convert_any_metrics_in_data(item.get("extra_data")),
255
+ "runs": convert_any_metrics_in_data(safe_get_runs_from_memory(item.get("memory"))),
88
256
  "created_at": item.get("created_at"),
89
257
  "updated_at": item.get("updated_at"),
90
258
  }
@@ -105,9 +273,9 @@ def parse_team_sessions(v1_content: List[Dict[str, Any]]) -> List[TeamSession]:
105
273
  "team_data": item.get("team_data"),
106
274
  "session_id": item.get("session_id"),
107
275
  "user_id": item.get("user_id"),
108
- "session_data": item.get("session_data"),
109
- "metadata": item.get("extra_data"),
110
- "runs": item.get("memory", {}).get("runs"),
276
+ "session_data": convert_session_data_comprehensively(item.get("session_data")),
277
+ "metadata": convert_any_metrics_in_data(item.get("extra_data")),
278
+ "runs": convert_any_metrics_in_data(safe_get_runs_from_memory(item.get("memory"))),
111
279
  "created_at": item.get("created_at"),
112
280
  "updated_at": item.get("updated_at"),
113
281
  }
@@ -128,13 +296,13 @@ def parse_workflow_sessions(v1_content: List[Dict[str, Any]]) -> List[WorkflowSe
128
296
  "workflow_data": item.get("workflow_data"),
129
297
  "session_id": item.get("session_id"),
130
298
  "user_id": item.get("user_id"),
131
- "session_data": item.get("session_data"),
132
- "metadata": item.get("extra_data"),
299
+ "session_data": convert_session_data_comprehensively(item.get("session_data")),
300
+ "metadata": convert_any_metrics_in_data(item.get("extra_data")),
133
301
  "created_at": item.get("created_at"),
134
302
  "updated_at": item.get("updated_at"),
135
303
  # Workflow v2 specific fields
136
304
  "workflow_name": item.get("workflow_name"),
137
- "runs": item.get("runs"),
305
+ "runs": convert_any_metrics_in_data(item.get("runs")),
138
306
  }
139
307
  workflow_session = WorkflowSession.from_dict(session)
140
308
  if workflow_session is not None:
agno/db/mongo/mongo.py CHANGED
@@ -16,9 +16,10 @@ from agno.db.mongo.utils import (
16
16
  from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
17
17
  from agno.db.schemas.knowledge import KnowledgeRow
18
18
  from agno.db.schemas.memory import UserMemory
19
- from agno.db.utils import deserialize_session_json_fields, generate_deterministic_id, serialize_session_json_fields
19
+ from agno.db.utils import deserialize_session_json_fields, serialize_session_json_fields
20
20
  from agno.session import AgentSession, Session, TeamSession, WorkflowSession
21
21
  from agno.utils.log import log_debug, log_error, log_info
22
+ from agno.utils.string import generate_id
22
23
 
23
24
  try:
24
25
  from pymongo import MongoClient, ReturnDocument
@@ -63,7 +64,7 @@ class MongoDb(BaseDb):
63
64
  base_seed = db_url or str(db_client)
64
65
  db_name_suffix = db_name if db_name is not None else "agno"
65
66
  seed = f"{base_seed}#{db_name_suffix}"
66
- id = generate_deterministic_id(seed)
67
+ id = generate_id(seed)
67
68
 
68
69
  super().__init__(
69
70
  id=id,
@@ -1423,3 +1424,61 @@ class MongoDb(BaseDb):
1423
1424
  except Exception as e:
1424
1425
  log_error(f"Error updating eval run name {eval_run_id}: {e}")
1425
1426
  raise
1427
+
1428
+ def migrate_table_from_v1_to_v2(self, v1_db_schema: str, v1_table_name: str, v1_table_type: str):
1429
+ """Migrate all content in the given collection to the right v2 collection"""
1430
+
1431
+ from typing import List, Sequence, Union
1432
+
1433
+ from agno.db.migrations.v1_to_v2 import (
1434
+ get_all_table_content,
1435
+ parse_agent_sessions,
1436
+ parse_memories,
1437
+ parse_team_sessions,
1438
+ parse_workflow_sessions,
1439
+ )
1440
+
1441
+ # Get all content from the old collection
1442
+ old_content: list[dict[str, Any]] = get_all_table_content(
1443
+ db=self,
1444
+ db_schema=v1_db_schema,
1445
+ table_name=v1_table_name,
1446
+ )
1447
+ if not old_content:
1448
+ log_info(f"No content to migrate from collection {v1_table_name}")
1449
+ return
1450
+
1451
+ # Parse the content into the new format
1452
+ memories: List[UserMemory] = []
1453
+ sessions: Sequence[Union[AgentSession, TeamSession, WorkflowSession]] = []
1454
+ if v1_table_type == "agent_sessions":
1455
+ sessions = parse_agent_sessions(old_content)
1456
+ elif v1_table_type == "team_sessions":
1457
+ sessions = parse_team_sessions(old_content)
1458
+ elif v1_table_type == "workflow_sessions":
1459
+ sessions = parse_workflow_sessions(old_content)
1460
+ elif v1_table_type == "memories":
1461
+ memories = parse_memories(old_content)
1462
+ else:
1463
+ raise ValueError(f"Invalid table type: {v1_table_type}")
1464
+
1465
+ # Insert the new content into the new collection
1466
+ if v1_table_type == "agent_sessions":
1467
+ for session in sessions:
1468
+ self.upsert_session(session)
1469
+ log_info(f"Migrated {len(sessions)} Agent sessions to collection: {self.session_table_name}")
1470
+
1471
+ elif v1_table_type == "team_sessions":
1472
+ for session in sessions:
1473
+ self.upsert_session(session)
1474
+ log_info(f"Migrated {len(sessions)} Team sessions to collection: {self.session_table_name}")
1475
+
1476
+ elif v1_table_type == "workflow_sessions":
1477
+ for session in sessions:
1478
+ self.upsert_session(session)
1479
+ log_info(f"Migrated {len(sessions)} Workflow sessions to collection: {self.session_table_name}")
1480
+
1481
+ elif v1_table_type == "memories":
1482
+ for memory in memories:
1483
+ self.upsert_user_memory(memory)
1484
+ log_info(f"Migrated {len(memories)} memories to collection: {self.memory_table_name}")
agno/db/mysql/mysql.py CHANGED
@@ -20,9 +20,9 @@ from agno.db.mysql.utils import (
20
20
  from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
21
21
  from agno.db.schemas.knowledge import KnowledgeRow
22
22
  from agno.db.schemas.memory import UserMemory
23
- from agno.db.utils import generate_deterministic_id
24
23
  from agno.session import AgentSession, Session, TeamSession, WorkflowSession
25
24
  from agno.utils.log import log_debug, log_error, log_info
25
+ from agno.utils.string import generate_id
26
26
 
27
27
  try:
28
28
  from sqlalchemy import TEXT, and_, cast, func, update
@@ -75,7 +75,7 @@ class MySQLDb(BaseDb):
75
75
  base_seed = db_url or str(db_engine.url) # type: ignore
76
76
  schema_suffix = db_schema if db_schema is not None else "ai"
77
77
  seed = f"{base_seed}#{schema_suffix}"
78
- id = generate_deterministic_id(seed)
78
+ id = generate_id(seed)
79
79
 
80
80
  super().__init__(
81
81
  id=id,
@@ -1711,17 +1711,17 @@ class MySQLDb(BaseDb):
1711
1711
  if v1_table_type == "agent_sessions":
1712
1712
  for session in sessions:
1713
1713
  self.upsert_session(session)
1714
- log_info(f"Migrated {len(sessions)} Agent sessions to table: {self.session_table}")
1714
+ log_info(f"Migrated {len(sessions)} Agent sessions to table: {self.session_table_name}")
1715
1715
 
1716
1716
  elif v1_table_type == "team_sessions":
1717
1717
  for session in sessions:
1718
1718
  self.upsert_session(session)
1719
- log_info(f"Migrated {len(sessions)} Team sessions to table: {self.session_table}")
1719
+ log_info(f"Migrated {len(sessions)} Team sessions to table: {self.session_table_name}")
1720
1720
 
1721
1721
  elif v1_table_type == "workflow_sessions":
1722
1722
  for session in sessions:
1723
1723
  self.upsert_session(session)
1724
- log_info(f"Migrated {len(sessions)} Workflow sessions to table: {self.session_table}")
1724
+ log_info(f"Migrated {len(sessions)} Workflow sessions to table: {self.session_table_name}")
1725
1725
 
1726
1726
  elif v1_table_type == "memories":
1727
1727
  for memory in memories:
agno/db/mysql/schemas.py CHANGED
@@ -8,12 +8,12 @@ except ImportError:
8
8
  raise ImportError("`sqlalchemy` not installed. Please install it using `pip install sqlalchemy`")
9
9
 
10
10
  SESSION_TABLE_SCHEMA = {
11
- "session_id": {"type": String, "nullable": False},
12
- "session_type": {"type": String, "nullable": False, "index": True},
13
- "agent_id": {"type": String, "nullable": True},
14
- "team_id": {"type": String, "nullable": True},
15
- "workflow_id": {"type": String, "nullable": True},
16
- "user_id": {"type": String, "nullable": True},
11
+ "session_id": {"type": lambda: String(128), "nullable": False},
12
+ "session_type": {"type": lambda: String(20), "nullable": False, "index": True},
13
+ "agent_id": {"type": lambda: String(128), "nullable": True},
14
+ "team_id": {"type": lambda: String(128), "nullable": True},
15
+ "workflow_id": {"type": lambda: String(128), "nullable": True},
16
+ "user_id": {"type": lambda: String(128), "nullable": True},
17
17
  "session_data": {"type": JSON, "nullable": True},
18
18
  "agent_data": {"type": JSON, "nullable": True},
19
19
  "team_data": {"type": JSON, "nullable": True},
@@ -32,50 +32,50 @@ SESSION_TABLE_SCHEMA = {
32
32
  }
33
33
 
34
34
  USER_MEMORY_TABLE_SCHEMA = {
35
- "memory_id": {"type": String, "primary_key": True, "nullable": False},
35
+ "memory_id": {"type": lambda: String(128), "primary_key": True, "nullable": False},
36
36
  "memory": {"type": JSON, "nullable": False},
37
37
  "input": {"type": Text, "nullable": True},
38
- "agent_id": {"type": String, "nullable": True},
39
- "team_id": {"type": String, "nullable": True},
40
- "user_id": {"type": String, "nullable": True, "index": True},
38
+ "agent_id": {"type": lambda: String(128), "nullable": True},
39
+ "team_id": {"type": lambda: String(128), "nullable": True},
40
+ "user_id": {"type": lambda: String(128), "nullable": True, "index": True},
41
41
  "topics": {"type": JSON, "nullable": True},
42
42
  "updated_at": {"type": BigInteger, "nullable": True, "index": True},
43
43
  }
44
44
 
45
45
  EVAL_TABLE_SCHEMA = {
46
- "run_id": {"type": String, "primary_key": True, "nullable": False},
47
- "eval_type": {"type": String, "nullable": False},
46
+ "run_id": {"type": lambda: String(128), "primary_key": True, "nullable": False},
47
+ "eval_type": {"type": lambda: String(50), "nullable": False},
48
48
  "eval_data": {"type": JSON, "nullable": False},
49
49
  "eval_input": {"type": JSON, "nullable": False},
50
- "name": {"type": String, "nullable": True},
51
- "agent_id": {"type": String, "nullable": True},
52
- "team_id": {"type": String, "nullable": True},
53
- "workflow_id": {"type": String, "nullable": True},
54
- "model_id": {"type": String, "nullable": True},
55
- "model_provider": {"type": String, "nullable": True},
56
- "evaluated_component_name": {"type": String, "nullable": True},
50
+ "name": {"type": lambda: String(255), "nullable": True},
51
+ "agent_id": {"type": lambda: String(128), "nullable": True},
52
+ "team_id": {"type": lambda: String(128), "nullable": True},
53
+ "workflow_id": {"type": lambda: String(128), "nullable": True},
54
+ "model_id": {"type": lambda: String(128), "nullable": True},
55
+ "model_provider": {"type": lambda: String(128), "nullable": True},
56
+ "evaluated_component_name": {"type": lambda: String(255), "nullable": True},
57
57
  "created_at": {"type": BigInteger, "nullable": False, "index": True},
58
58
  "updated_at": {"type": BigInteger, "nullable": True},
59
59
  }
60
60
 
61
61
  KNOWLEDGE_TABLE_SCHEMA = {
62
- "id": {"type": String, "primary_key": True, "nullable": False},
63
- "name": {"type": String, "nullable": False},
62
+ "id": {"type": lambda: String(128), "primary_key": True, "nullable": False},
63
+ "name": {"type": lambda: String(255), "nullable": False},
64
64
  "description": {"type": Text, "nullable": False},
65
65
  "metadata": {"type": JSON, "nullable": True},
66
- "type": {"type": String, "nullable": True},
66
+ "type": {"type": lambda: String(50), "nullable": True},
67
67
  "size": {"type": BigInteger, "nullable": True},
68
- "linked_to": {"type": String, "nullable": True},
68
+ "linked_to": {"type": lambda: String(128), "nullable": True},
69
69
  "access_count": {"type": BigInteger, "nullable": True},
70
70
  "created_at": {"type": BigInteger, "nullable": True},
71
71
  "updated_at": {"type": BigInteger, "nullable": True},
72
- "status": {"type": String, "nullable": True},
72
+ "status": {"type": lambda: String(50), "nullable": True},
73
73
  "status_message": {"type": Text, "nullable": True},
74
- "external_id": {"type": String, "nullable": True},
74
+ "external_id": {"type": lambda: String(128), "nullable": True},
75
75
  }
76
76
 
77
77
  METRICS_TABLE_SCHEMA = {
78
- "id": {"type": String, "primary_key": True, "nullable": False},
78
+ "id": {"type": lambda: String(128), "primary_key": True, "nullable": False},
79
79
  "agent_runs_count": {"type": BigInteger, "nullable": False},
80
80
  "team_runs_count": {"type": BigInteger, "nullable": False},
81
81
  "workflow_runs_count": {"type": BigInteger, "nullable": False},
@@ -86,7 +86,7 @@ METRICS_TABLE_SCHEMA = {
86
86
  "token_metrics": {"type": JSON, "nullable": False},
87
87
  "model_metrics": {"type": JSON, "nullable": False},
88
88
  "date": {"type": Date, "nullable": False, "index": True},
89
- "aggregation_period": {"type": String, "nullable": False},
89
+ "aggregation_period": {"type": lambda: String(20), "nullable": False},
90
90
  "created_at": {"type": BigInteger, "nullable": False},
91
91
  "updated_at": {"type": BigInteger, "nullable": True},
92
92
  "completed": {"type": Boolean, "nullable": False},
@@ -18,9 +18,9 @@ from agno.db.postgres.utils import (
18
18
  from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
19
19
  from agno.db.schemas.knowledge import KnowledgeRow
20
20
  from agno.db.schemas.memory import UserMemory
21
- from agno.db.utils import generate_deterministic_id
22
21
  from agno.session import AgentSession, Session, TeamSession, WorkflowSession
23
22
  from agno.utils.log import log_debug, log_error, log_info, log_warning
23
+ from agno.utils.string import generate_id
24
24
 
25
25
  try:
26
26
  from sqlalchemy import Index, String, UniqueConstraint, func, update
@@ -82,7 +82,7 @@ class PostgresDb(BaseDb):
82
82
  base_seed = db_url or str(db_engine.url) # type: ignore
83
83
  schema_suffix = db_schema if db_schema is not None else "ai"
84
84
  seed = f"{base_seed}#{schema_suffix}"
85
- id = generate_deterministic_id(seed)
85
+ id = generate_id(seed)
86
86
 
87
87
  super().__init__(
88
88
  id=id,
@@ -1700,17 +1700,17 @@ class PostgresDb(BaseDb):
1700
1700
  if v1_table_type == "agent_sessions":
1701
1701
  for session in sessions:
1702
1702
  self.upsert_session(session)
1703
- log_info(f"Migrated {len(sessions)} Agent sessions to table: {self.session_table}")
1703
+ log_info(f"Migrated {len(sessions)} Agent sessions to table: {self.session_table_name}")
1704
1704
 
1705
1705
  elif v1_table_type == "team_sessions":
1706
1706
  for session in sessions:
1707
1707
  self.upsert_session(session)
1708
- log_info(f"Migrated {len(sessions)} Team sessions to table: {self.session_table}")
1708
+ log_info(f"Migrated {len(sessions)} Team sessions to table: {self.session_table_name}")
1709
1709
 
1710
1710
  elif v1_table_type == "workflow_sessions":
1711
1711
  for session in sessions:
1712
1712
  self.upsert_session(session)
1713
- log_info(f"Migrated {len(sessions)} Workflow sessions to table: {self.session_table}")
1713
+ log_info(f"Migrated {len(sessions)} Workflow sessions to table: {self.session_table_name}")
1714
1714
 
1715
1715
  elif v1_table_type == "memories":
1716
1716
  for memory in memories:
agno/db/redis/redis.py CHANGED
@@ -21,9 +21,9 @@ from agno.db.redis.utils import (
21
21
  from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
22
22
  from agno.db.schemas.knowledge import KnowledgeRow
23
23
  from agno.db.schemas.memory import UserMemory
24
- from agno.db.utils import generate_deterministic_id
25
24
  from agno.session import AgentSession, Session, TeamSession, WorkflowSession
26
25
  from agno.utils.log import log_debug, log_error, log_info
26
+ from agno.utils.string import generate_id
27
27
 
28
28
  try:
29
29
  from redis import Redis
@@ -71,7 +71,7 @@ class RedisDb(BaseDb):
71
71
  if id is None:
72
72
  base_seed = db_url or str(redis_client)
73
73
  seed = f"{base_seed}#{db_prefix}"
74
- id = generate_deterministic_id(seed)
74
+ id = generate_id(seed)
75
75
 
76
76
  super().__init__(
77
77
  id=id,
@@ -19,9 +19,9 @@ from agno.db.singlestore.utils import (
19
19
  is_table_available,
20
20
  is_valid_table,
21
21
  )
22
- from agno.db.utils import generate_deterministic_id
23
22
  from agno.session import AgentSession, Session, TeamSession, WorkflowSession
24
23
  from agno.utils.log import log_debug, log_error, log_info, log_warning
24
+ from agno.utils.string import generate_id
25
25
 
26
26
  try:
27
27
  from sqlalchemy import Index, UniqueConstraint, and_, func, update
@@ -74,7 +74,7 @@ class SingleStoreDb(BaseDb):
74
74
  base_seed = db_url or str(db_engine.url) if db_engine else "singlestore" # type: ignore
75
75
  schema_suffix = db_schema if db_schema is not None else "ai"
76
76
  seed = f"{base_seed}#{schema_suffix}"
77
- id = generate_deterministic_id(seed)
77
+ id = generate_id(seed)
78
78
 
79
79
  super().__init__(
80
80
  id=id,
agno/db/sqlite/sqlite.py CHANGED
@@ -18,9 +18,10 @@ from agno.db.sqlite.utils import (
18
18
  is_table_available,
19
19
  is_valid_table,
20
20
  )
21
- from agno.db.utils import deserialize_session_json_fields, generate_deterministic_id, serialize_session_json_fields
21
+ from agno.db.utils import deserialize_session_json_fields, serialize_session_json_fields
22
22
  from agno.session import AgentSession, Session, TeamSession, WorkflowSession
23
23
  from agno.utils.log import log_debug, log_error, log_info, log_warning
24
+ from agno.utils.string import generate_id
24
25
 
25
26
  try:
26
27
  from sqlalchemy import Column, MetaData, Table, and_, func, select, text, update
@@ -70,7 +71,7 @@ class SqliteDb(BaseDb):
70
71
  """
71
72
  if id is None:
72
73
  seed = db_url or db_file or str(db_engine.url) if db_engine else "sqlite:///agno.db"
73
- id = generate_deterministic_id(seed)
74
+ id = generate_id(seed)
74
75
 
75
76
  super().__init__(
76
77
  id=id,
@@ -1666,17 +1667,17 @@ class SqliteDb(BaseDb):
1666
1667
  if v1_table_type == "agent_sessions":
1667
1668
  for session in sessions:
1668
1669
  self.upsert_session(session)
1669
- log_info(f"Migrated {len(sessions)} Agent sessions to table: {self.session_table}")
1670
+ log_info(f"Migrated {len(sessions)} Agent sessions to table: {self.session_table_name}")
1670
1671
 
1671
1672
  elif v1_table_type == "team_sessions":
1672
1673
  for session in sessions:
1673
1674
  self.upsert_session(session)
1674
- log_info(f"Migrated {len(sessions)} Team sessions to table: {self.session_table}")
1675
+ log_info(f"Migrated {len(sessions)} Team sessions to table: {self.session_table_name}")
1675
1676
 
1676
1677
  elif v1_table_type == "workflow_sessions":
1677
1678
  for session in sessions:
1678
1679
  self.upsert_session(session)
1679
- log_info(f"Migrated {len(sessions)} Workflow sessions to table: {self.session_table}")
1680
+ log_info(f"Migrated {len(sessions)} Workflow sessions to table: {self.session_table_name}")
1680
1681
 
1681
1682
  elif v1_table_type == "memories":
1682
1683
  for memory in memories:
agno/db/utils.py CHANGED
@@ -1,7 +1,6 @@
1
1
  """Logic shared across different database implementations"""
2
2
 
3
3
  import json
4
- import uuid
5
4
  from datetime import date, datetime
6
5
  from uuid import UUID
7
6
 
@@ -87,16 +86,3 @@ def deserialize_session_json_fields(session: dict) -> dict:
87
86
  session["runs"] = json.loads(session["runs"])
88
87
 
89
88
  return session
90
-
91
-
92
- def generate_deterministic_id(seed: str) -> str:
93
- """
94
- Generate a deterministic UUID5 based on a seed string.
95
-
96
- Args:
97
- seed (str): The seed string to generate the UUID from.
98
-
99
- Returns:
100
- str: A deterministic UUID5 string.
101
- """
102
- return str(uuid.uuid5(uuid.NAMESPACE_DNS, seed))
@@ -112,6 +112,7 @@ class DiscordClient:
112
112
  # TODO Unhappy with the duplication here but it keeps MyPy from complaining
113
113
  additional_context = dedent(f"""
114
114
  Discord username: {message_user}
115
+ Discord userid: {message_user_id}
115
116
  Discord url: {message_url}
116
117
  """)
117
118
  if self.agent: