agno 2.0.3__py3-none-any.whl → 2.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. agno/agent/agent.py +229 -164
  2. agno/db/dynamo/dynamo.py +8 -0
  3. agno/db/firestore/firestore.py +8 -0
  4. agno/db/gcs_json/gcs_json_db.py +9 -0
  5. agno/db/json/json_db.py +8 -0
  6. agno/db/migrations/v1_to_v2.py +191 -23
  7. agno/db/mongo/mongo.py +68 -0
  8. agno/db/mysql/mysql.py +13 -3
  9. agno/db/mysql/schemas.py +27 -27
  10. agno/db/postgres/postgres.py +19 -11
  11. agno/db/redis/redis.py +6 -0
  12. agno/db/singlestore/schemas.py +1 -1
  13. agno/db/singlestore/singlestore.py +8 -1
  14. agno/db/sqlite/sqlite.py +12 -3
  15. agno/integrations/discord/client.py +1 -0
  16. agno/knowledge/knowledge.py +92 -66
  17. agno/knowledge/reader/reader_factory.py +7 -3
  18. agno/knowledge/reader/web_search_reader.py +12 -6
  19. agno/models/base.py +2 -2
  20. agno/models/message.py +109 -0
  21. agno/models/openai/chat.py +3 -0
  22. agno/models/openai/responses.py +12 -0
  23. agno/models/response.py +5 -0
  24. agno/models/siliconflow/__init__.py +5 -0
  25. agno/models/siliconflow/siliconflow.py +25 -0
  26. agno/os/app.py +164 -41
  27. agno/os/auth.py +24 -14
  28. agno/os/interfaces/agui/utils.py +98 -134
  29. agno/os/router.py +128 -55
  30. agno/os/routers/evals/utils.py +9 -9
  31. agno/os/routers/health.py +25 -0
  32. agno/os/routers/home.py +52 -0
  33. agno/os/routers/knowledge/knowledge.py +11 -11
  34. agno/os/routers/session/session.py +24 -8
  35. agno/os/schema.py +29 -2
  36. agno/os/utils.py +0 -8
  37. agno/run/agent.py +3 -3
  38. agno/run/team.py +3 -3
  39. agno/run/workflow.py +64 -10
  40. agno/session/team.py +1 -0
  41. agno/team/team.py +189 -94
  42. agno/tools/duckduckgo.py +15 -11
  43. agno/tools/googlesearch.py +1 -1
  44. agno/tools/mem0.py +11 -17
  45. agno/tools/memory.py +34 -6
  46. agno/utils/common.py +90 -1
  47. agno/utils/streamlit.py +14 -8
  48. agno/utils/string.py +32 -0
  49. agno/utils/tools.py +1 -1
  50. agno/vectordb/chroma/chromadb.py +8 -2
  51. agno/workflow/step.py +115 -16
  52. agno/workflow/workflow.py +16 -13
  53. {agno-2.0.3.dist-info → agno-2.0.5.dist-info}/METADATA +6 -5
  54. {agno-2.0.3.dist-info → agno-2.0.5.dist-info}/RECORD +57 -54
  55. agno/knowledge/reader/url_reader.py +0 -128
  56. {agno-2.0.3.dist-info → agno-2.0.5.dist-info}/WHEEL +0 -0
  57. {agno-2.0.3.dist-info → agno-2.0.5.dist-info}/licenses/LICENSE +0 -0
  58. {agno-2.0.3.dist-info → agno-2.0.5.dist-info}/top_level.txt +0 -0
agno/db/dynamo/dynamo.py CHANGED
@@ -32,6 +32,7 @@ from agno.db.schemas.knowledge import KnowledgeRow
32
32
  from agno.db.schemas.memory import UserMemory
33
33
  from agno.session import AgentSession, Session, TeamSession, WorkflowSession
34
34
  from agno.utils.log import log_debug, log_error
35
+ from agno.utils.string import generate_id
35
36
 
36
37
  try:
37
38
  import boto3 # type: ignore[import-untyped]
@@ -55,6 +56,7 @@ class DynamoDb(BaseDb):
55
56
  metrics_table: Optional[str] = None,
56
57
  eval_table: Optional[str] = None,
57
58
  knowledge_table: Optional[str] = None,
59
+ id: Optional[str] = None,
58
60
  ):
59
61
  """
60
62
  Interface for interacting with a DynamoDB database.
@@ -69,8 +71,14 @@ class DynamoDb(BaseDb):
69
71
  metrics_table: The name of the metrics table.
70
72
  eval_table: The name of the eval table.
71
73
  knowledge_table: The name of the knowledge table.
74
+ id: ID of the database.
72
75
  """
76
+ if id is None:
77
+ seed = str(db_client) if db_client else f"{region_name}_{aws_access_key_id}"
78
+ id = generate_id(seed)
79
+
73
80
  super().__init__(
81
+ id=id,
74
82
  session_table=session_table,
75
83
  memory_table=memory_table,
76
84
  metrics_table=metrics_table,
@@ -19,6 +19,7 @@ from agno.db.schemas.memory import UserMemory
19
19
  from agno.db.utils import deserialize_session_json_fields, serialize_session_json_fields
20
20
  from agno.session import AgentSession, Session, TeamSession, WorkflowSession
21
21
  from agno.utils.log import log_debug, log_error, log_info
22
+ from agno.utils.string import generate_id
22
23
 
23
24
  try:
24
25
  from google.cloud.firestore import Client, FieldFilter # type: ignore[import-untyped]
@@ -38,6 +39,7 @@ class FirestoreDb(BaseDb):
38
39
  metrics_collection: Optional[str] = None,
39
40
  eval_collection: Optional[str] = None,
40
41
  knowledge_collection: Optional[str] = None,
42
+ id: Optional[str] = None,
41
43
  ):
42
44
  """
43
45
  Interface for interacting with a Firestore database.
@@ -50,11 +52,17 @@ class FirestoreDb(BaseDb):
50
52
  metrics_collection (Optional[str]): Name of the collection to store metrics.
51
53
  eval_collection (Optional[str]): Name of the collection to store evaluation runs.
52
54
  knowledge_collection (Optional[str]): Name of the collection to store knowledge documents.
55
+ id (Optional[str]): ID of the database.
53
56
 
54
57
  Raises:
55
58
  ValueError: If neither project_id nor db_client is provided.
56
59
  """
60
+ if id is None:
61
+ seed = project_id or str(db_client)
62
+ id = generate_id(seed)
63
+
57
64
  super().__init__(
65
+ id=id,
58
66
  session_table=session_collection,
59
67
  memory_table=memory_collection,
60
68
  metrics_table=metrics_collection,
@@ -16,6 +16,7 @@ from agno.db.schemas.knowledge import KnowledgeRow
16
16
  from agno.db.schemas.memory import UserMemory
17
17
  from agno.session import AgentSession, Session, TeamSession, WorkflowSession
18
18
  from agno.utils.log import log_debug, log_error, log_info, log_warning
19
+ from agno.utils.string import generate_id
19
20
 
20
21
  try:
21
22
  from google.cloud import storage as gcs # type: ignore
@@ -35,6 +36,7 @@ class GcsJsonDb(BaseDb):
35
36
  knowledge_table: Optional[str] = None,
36
37
  project: Optional[str] = None,
37
38
  credentials: Optional[Any] = None,
39
+ id: Optional[str] = None,
38
40
  ):
39
41
  """
40
42
  Interface for interacting with JSON files stored in Google Cloud Storage as database.
@@ -50,8 +52,15 @@ class GcsJsonDb(BaseDb):
50
52
  project (Optional[str]): GCP project ID. If None, uses default project.
51
53
  location (Optional[str]): GCS bucket location. If None, uses default location.
52
54
  credentials (Optional[Any]): GCP credentials. If None, uses default credentials.
55
+ id (Optional[str]): ID of the database.
53
56
  """
57
+ if id is None:
58
+ prefix_suffix = prefix or "agno/"
59
+ seed = f"{bucket_name}_{project}#{prefix_suffix}"
60
+ id = generate_id(seed)
61
+
54
62
  super().__init__(
63
+ id=id,
55
64
  session_table=session_table,
56
65
  memory_table=memory_table,
57
66
  metrics_table=metrics_table,
agno/db/json/json_db.py CHANGED
@@ -19,6 +19,7 @@ from agno.db.schemas.knowledge import KnowledgeRow
19
19
  from agno.db.schemas.memory import UserMemory
20
20
  from agno.session import AgentSession, Session, TeamSession, WorkflowSession
21
21
  from agno.utils.log import log_debug, log_error, log_info, log_warning
22
+ from agno.utils.string import generate_id
22
23
 
23
24
 
24
25
  class JsonDb(BaseDb):
@@ -30,6 +31,7 @@ class JsonDb(BaseDb):
30
31
  metrics_table: Optional[str] = None,
31
32
  eval_table: Optional[str] = None,
32
33
  knowledge_table: Optional[str] = None,
34
+ id: Optional[str] = None,
33
35
  ):
34
36
  """
35
37
  Interface for interacting with JSON files as database.
@@ -41,8 +43,14 @@ class JsonDb(BaseDb):
41
43
  metrics_table (Optional[str]): Name of the JSON file to store metrics.
42
44
  eval_table (Optional[str]): Name of the JSON file to store evaluation runs.
43
45
  knowledge_table (Optional[str]): Name of the JSON file to store knowledge content.
46
+ id (Optional[str]): ID of the database.
44
47
  """
48
+ if id is None:
49
+ seed = db_path or "agno_json_db"
50
+ id = generate_id(seed)
51
+
45
52
  super().__init__(
53
+ id=id,
46
54
  session_table=session_table,
47
55
  memory_table=memory_table,
48
56
  metrics_table=metrics_table,
@@ -1,9 +1,11 @@
1
1
  """Migration utility to migrate your Agno tables from v1 to v2"""
2
2
 
3
+ import json
3
4
  from typing import Any, Dict, List, Optional, Union
4
5
 
5
6
  from sqlalchemy import text
6
7
 
8
+ from agno.db.mongo.mongo import MongoDb
7
9
  from agno.db.mysql.mysql import MySQLDb
8
10
  from agno.db.postgres.postgres import PostgresDb
9
11
  from agno.db.schemas.memory import UserMemory
@@ -12,24 +14,171 @@ from agno.session import AgentSession, TeamSession, WorkflowSession
12
14
  from agno.utils.log import log_error
13
15
 
14
16
 
17
+ def convert_v1_metrics_to_v2(metrics_dict: Dict[str, Any]) -> Dict[str, Any]:
18
+ """Convert v1 metrics dictionary to v2 format by mapping old field names to new ones."""
19
+ if not isinstance(metrics_dict, dict):
20
+ return metrics_dict
21
+
22
+ # Create a copy to avoid modifying the original
23
+ v2_metrics = metrics_dict.copy()
24
+
25
+ # Map v1 field names to v2 field names
26
+ field_mappings = {
27
+ "time": "duration",
28
+ "audio_tokens": "audio_total_tokens",
29
+ "input_audio_tokens": "audio_input_tokens",
30
+ "output_audio_tokens": "audio_output_tokens",
31
+ "cached_tokens": "cache_read_tokens",
32
+ }
33
+
34
+ # Fields to remove (deprecated in v2)
35
+ deprecated_fields = ["prompt_tokens", "completion_tokens", "prompt_tokens_details", "completion_tokens_details"]
36
+
37
+ # Apply field mappings
38
+ for old_field, new_field in field_mappings.items():
39
+ if old_field in v2_metrics:
40
+ v2_metrics[new_field] = v2_metrics.pop(old_field)
41
+
42
+ # Remove deprecated fields
43
+ for field in deprecated_fields:
44
+ v2_metrics.pop(field, None)
45
+
46
+ return v2_metrics
47
+
48
+
49
+ def convert_any_metrics_in_data(data: Any) -> Any:
50
+ """Recursively find and convert any metrics dictionaries in the data structure."""
51
+ if isinstance(data, dict):
52
+ # First filter out deprecated v1 fields
53
+ data = filter_deprecated_v1_fields(data)
54
+
55
+ # Check if this looks like a metrics dictionary
56
+ if _is_metrics_dict(data):
57
+ return convert_v1_metrics_to_v2(data)
58
+
59
+ # Otherwise, recursively process all values
60
+ converted_dict = {}
61
+ for key, value in data.items():
62
+ # Special handling for 'metrics' keys - always convert their values
63
+ if key == "metrics" and isinstance(value, dict):
64
+ converted_dict[key] = convert_v1_metrics_to_v2(value)
65
+ else:
66
+ converted_dict[key] = convert_any_metrics_in_data(value)
67
+ return converted_dict
68
+
69
+ elif isinstance(data, list):
70
+ return [convert_any_metrics_in_data(item) for item in data]
71
+
72
+ else:
73
+ # Not a dict or list, return as-is
74
+ return data
75
+
76
+
77
+ def _is_metrics_dict(data: Dict[str, Any]) -> bool:
78
+ """Check if a dictionary looks like a metrics dictionary based on common field names."""
79
+ if not isinstance(data, dict):
80
+ return False
81
+
82
+ # Common metrics field names (both v1 and v2)
83
+ metrics_indicators = {
84
+ "input_tokens",
85
+ "output_tokens",
86
+ "total_tokens",
87
+ "time",
88
+ "duration",
89
+ "audio_tokens",
90
+ "audio_total_tokens",
91
+ "audio_input_tokens",
92
+ "audio_output_tokens",
93
+ "cached_tokens",
94
+ "cache_read_tokens",
95
+ "cache_write_tokens",
96
+ "reasoning_tokens",
97
+ "prompt_tokens",
98
+ "completion_tokens",
99
+ "time_to_first_token",
100
+ "provider_metrics",
101
+ "additional_metrics",
102
+ }
103
+
104
+ # Deprecated v1 fields that are strong indicators this is a metrics dict
105
+ deprecated_v1_indicators = {"time", "audio_tokens", "cached_tokens", "prompt_tokens", "completion_tokens"}
106
+
107
+ # If we find any deprecated v1 field, it's definitely a metrics dict that needs conversion
108
+ if any(field in data for field in deprecated_v1_indicators):
109
+ return True
110
+
111
+ # Otherwise, if the dict has at least 2 metrics-related fields, consider it a metrics dict
112
+ matching_fields = sum(1 for field in data.keys() if field in metrics_indicators)
113
+ return matching_fields >= 2
114
+
115
+
116
+ def convert_session_data_comprehensively(session_data: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]:
117
+ """Comprehensively convert any metrics found anywhere in session_data from v1 to v2 format."""
118
+ if not session_data:
119
+ return session_data
120
+
121
+ # Use the recursive converter to find and fix all metrics
122
+ return convert_any_metrics_in_data(session_data)
123
+
124
+
125
+ def safe_get_runs_from_memory(memory_data: Any) -> Any:
126
+ """Safely extract runs data from memory field, handling various data types."""
127
+ if memory_data is None:
128
+ return None
129
+
130
+ # If memory_data is a string, try to parse it as JSON
131
+ if isinstance(memory_data, str):
132
+ try:
133
+ memory_dict = json.loads(memory_data)
134
+ if isinstance(memory_dict, dict):
135
+ return memory_dict.get("runs")
136
+ except (json.JSONDecodeError, AttributeError):
137
+ # If JSON parsing fails, memory_data might just be a string value
138
+ return None
139
+
140
+ # If memory_data is already a dict, access runs directly
141
+ elif isinstance(memory_data, dict):
142
+ return memory_data.get("runs")
143
+
144
+ # For any other type, return None
145
+ return None
146
+
147
+
148
+ def filter_deprecated_v1_fields(data: Dict[str, Any]) -> Dict[str, Any]:
149
+ """Remove v1-only fields that don't exist in v2 models."""
150
+ if not isinstance(data, dict):
151
+ return data
152
+
153
+ # Fields that existed in v1 but were removed in v2
154
+ deprecated_fields = {
155
+ "team_session_id", # RunOutput v1 field, removed in v2
156
+ "formatted_tool_calls", # RunOutput v1 field, removed in v2
157
+ # Add other deprecated fields here as needed
158
+ }
159
+
160
+ # Create a copy and remove deprecated fields
161
+ filtered_data = {k: v for k, v in data.items() if k not in deprecated_fields}
162
+ return filtered_data
163
+
164
+
15
165
  def migrate(
16
- db: Union[PostgresDb, MySQLDb, SqliteDb],
166
+ db: Union[PostgresDb, MySQLDb, SqliteDb, MongoDb],
17
167
  v1_db_schema: str,
18
168
  agent_sessions_table_name: Optional[str] = None,
19
169
  team_sessions_table_name: Optional[str] = None,
20
170
  workflow_sessions_table_name: Optional[str] = None,
21
171
  memories_table_name: Optional[str] = None,
22
172
  ):
23
- """Given a PostgresDb and table names, parse and migrate the tables' content to the corresponding v2 tables.
173
+ """Given a database connection and table/collection names, parse and migrate the content to corresponding v2 tables/collections.
24
174
 
25
175
  Args:
26
- db: The database to migrate
27
- v1_db_schema: The schema of the v1 tables
28
- agent_sessions_table_name: The name of the agent sessions table. If not provided, the agent sessions table will not be migrated.
29
- team_sessions_table_name: The name of the team sessions table. If not provided, the team sessions table will not be migrated.
30
- workflow_sessions_table_name: The name of the workflow sessions table. If not provided, the workflow sessions table will not be migrated.
31
- workflow_v2_sessions_table_name: The name of the workflow v2 sessions table. If not provided, the workflow v2 sessions table will not be migrated.
32
- memories_table_name: The name of the memories table. If not provided, the memories table will not be migrated.
176
+ db: The database to migrate (PostgresDb, MySQLDb, SqliteDb, or MongoDb)
177
+ v1_db_schema: The schema of the v1 tables (leave empty for SQLite and MongoDB)
178
+ agent_sessions_table_name: The name of the agent sessions table/collection. If not provided, agent sessions will not be migrated.
179
+ team_sessions_table_name: The name of the team sessions table/collection. If not provided, team sessions will not be migrated.
180
+ workflow_sessions_table_name: The name of the workflow sessions table/collection. If not provided, workflow sessions will not be migrated.
181
+ memories_table_name: The name of the memories table/collection. If not provided, memories will not be migrated.
33
182
  """
34
183
  if agent_sessions_table_name:
35
184
  db.migrate_table_from_v1_to_v2(
@@ -61,14 +210,33 @@ def migrate(
61
210
 
62
211
 
63
212
  def get_all_table_content(db, db_schema: str, table_name: str) -> list[dict[str, Any]]:
64
- """Get all content from the given table"""
213
+ """Get all content from the given table/collection"""
65
214
  try:
66
- with db.Session() as sess:
67
- result = sess.execute(text(f"SELECT * FROM {db_schema}.{table_name}"))
68
- return [row._asdict() for row in result]
215
+ # Check if this is a MongoDB instance
216
+ if hasattr(db, "database") and hasattr(db, "db_client"):
217
+ # MongoDB implementation
218
+ collection = db.database[table_name]
219
+ # Convert MongoDB documents to dictionaries and handle ObjectId
220
+ documents = list(collection.find({}))
221
+ # Convert ObjectId to string for compatibility
222
+ for doc in documents:
223
+ if "_id" in doc:
224
+ doc["_id"] = str(doc["_id"])
225
+ return documents
226
+ else:
227
+ # SQL database implementation (PostgreSQL, MySQL, SQLite)
228
+ with db.Session() as sess:
229
+ # Handle empty schema by omitting the schema prefix (needed for SQLite)
230
+ if db_schema and db_schema.strip():
231
+ sql_query = f"SELECT * FROM {db_schema}.{table_name}"
232
+ else:
233
+ sql_query = f"SELECT * FROM {table_name}"
234
+
235
+ result = sess.execute(text(sql_query))
236
+ return [row._asdict() for row in result]
69
237
 
70
238
  except Exception as e:
71
- log_error(f"Error getting all content from table {table_name}: {e}")
239
+ log_error(f"Error getting all content from table/collection {table_name}: {e}")
72
240
  return []
73
241
 
74
242
 
@@ -82,9 +250,9 @@ def parse_agent_sessions(v1_content: List[Dict[str, Any]]) -> List[AgentSession]
82
250
  "agent_data": item.get("agent_data"),
83
251
  "session_id": item.get("session_id"),
84
252
  "user_id": item.get("user_id"),
85
- "session_data": item.get("session_data"),
86
- "metadata": item.get("extra_data"),
87
- "runs": item.get("memory", {}).get("runs"),
253
+ "session_data": convert_session_data_comprehensively(item.get("session_data")),
254
+ "metadata": convert_any_metrics_in_data(item.get("extra_data")),
255
+ "runs": convert_any_metrics_in_data(safe_get_runs_from_memory(item.get("memory"))),
88
256
  "created_at": item.get("created_at"),
89
257
  "updated_at": item.get("updated_at"),
90
258
  }
@@ -105,9 +273,9 @@ def parse_team_sessions(v1_content: List[Dict[str, Any]]) -> List[TeamSession]:
105
273
  "team_data": item.get("team_data"),
106
274
  "session_id": item.get("session_id"),
107
275
  "user_id": item.get("user_id"),
108
- "session_data": item.get("session_data"),
109
- "metadata": item.get("extra_data"),
110
- "runs": item.get("memory", {}).get("runs"),
276
+ "session_data": convert_session_data_comprehensively(item.get("session_data")),
277
+ "metadata": convert_any_metrics_in_data(item.get("extra_data")),
278
+ "runs": convert_any_metrics_in_data(safe_get_runs_from_memory(item.get("memory"))),
111
279
  "created_at": item.get("created_at"),
112
280
  "updated_at": item.get("updated_at"),
113
281
  }
@@ -128,13 +296,13 @@ def parse_workflow_sessions(v1_content: List[Dict[str, Any]]) -> List[WorkflowSe
128
296
  "workflow_data": item.get("workflow_data"),
129
297
  "session_id": item.get("session_id"),
130
298
  "user_id": item.get("user_id"),
131
- "session_data": item.get("session_data"),
132
- "metadata": item.get("extra_data"),
299
+ "session_data": convert_session_data_comprehensively(item.get("session_data")),
300
+ "metadata": convert_any_metrics_in_data(item.get("extra_data")),
133
301
  "created_at": item.get("created_at"),
134
302
  "updated_at": item.get("updated_at"),
135
303
  # Workflow v2 specific fields
136
304
  "workflow_name": item.get("workflow_name"),
137
- "runs": item.get("runs"),
305
+ "runs": convert_any_metrics_in_data(item.get("runs")),
138
306
  }
139
307
  workflow_session = WorkflowSession.from_dict(session)
140
308
  if workflow_session is not None:
agno/db/mongo/mongo.py CHANGED
@@ -19,6 +19,7 @@ from agno.db.schemas.memory import UserMemory
19
19
  from agno.db.utils import deserialize_session_json_fields, serialize_session_json_fields
20
20
  from agno.session import AgentSession, Session, TeamSession, WorkflowSession
21
21
  from agno.utils.log import log_debug, log_error, log_info
22
+ from agno.utils.string import generate_id
22
23
 
23
24
  try:
24
25
  from pymongo import MongoClient, ReturnDocument
@@ -40,6 +41,7 @@ class MongoDb(BaseDb):
40
41
  metrics_collection: Optional[str] = None,
41
42
  eval_collection: Optional[str] = None,
42
43
  knowledge_collection: Optional[str] = None,
44
+ id: Optional[str] = None,
43
45
  ):
44
46
  """
45
47
  Interface for interacting with a MongoDB database.
@@ -53,11 +55,19 @@ class MongoDb(BaseDb):
53
55
  metrics_collection (Optional[str]): Name of the collection to store metrics.
54
56
  eval_collection (Optional[str]): Name of the collection to store evaluation runs.
55
57
  knowledge_collection (Optional[str]): Name of the collection to store knowledge documents.
58
+ id (Optional[str]): ID of the database.
56
59
 
57
60
  Raises:
58
61
  ValueError: If neither db_url nor db_client is provided.
59
62
  """
63
+ if id is None:
64
+ base_seed = db_url or str(db_client)
65
+ db_name_suffix = db_name if db_name is not None else "agno"
66
+ seed = f"{base_seed}#{db_name_suffix}"
67
+ id = generate_id(seed)
68
+
60
69
  super().__init__(
70
+ id=id,
61
71
  session_table=session_collection,
62
72
  memory_table=memory_collection,
63
73
  metrics_table=metrics_collection,
@@ -1414,3 +1424,61 @@ class MongoDb(BaseDb):
1414
1424
  except Exception as e:
1415
1425
  log_error(f"Error updating eval run name {eval_run_id}: {e}")
1416
1426
  raise
1427
+
1428
+ def migrate_table_from_v1_to_v2(self, v1_db_schema: str, v1_table_name: str, v1_table_type: str):
1429
+ """Migrate all content in the given collection to the right v2 collection"""
1430
+
1431
+ from typing import List, Sequence, Union
1432
+
1433
+ from agno.db.migrations.v1_to_v2 import (
1434
+ get_all_table_content,
1435
+ parse_agent_sessions,
1436
+ parse_memories,
1437
+ parse_team_sessions,
1438
+ parse_workflow_sessions,
1439
+ )
1440
+
1441
+ # Get all content from the old collection
1442
+ old_content: list[dict[str, Any]] = get_all_table_content(
1443
+ db=self,
1444
+ db_schema=v1_db_schema,
1445
+ table_name=v1_table_name,
1446
+ )
1447
+ if not old_content:
1448
+ log_info(f"No content to migrate from collection {v1_table_name}")
1449
+ return
1450
+
1451
+ # Parse the content into the new format
1452
+ memories: List[UserMemory] = []
1453
+ sessions: Sequence[Union[AgentSession, TeamSession, WorkflowSession]] = []
1454
+ if v1_table_type == "agent_sessions":
1455
+ sessions = parse_agent_sessions(old_content)
1456
+ elif v1_table_type == "team_sessions":
1457
+ sessions = parse_team_sessions(old_content)
1458
+ elif v1_table_type == "workflow_sessions":
1459
+ sessions = parse_workflow_sessions(old_content)
1460
+ elif v1_table_type == "memories":
1461
+ memories = parse_memories(old_content)
1462
+ else:
1463
+ raise ValueError(f"Invalid table type: {v1_table_type}")
1464
+
1465
+ # Insert the new content into the new collection
1466
+ if v1_table_type == "agent_sessions":
1467
+ for session in sessions:
1468
+ self.upsert_session(session)
1469
+ log_info(f"Migrated {len(sessions)} Agent sessions to collection: {self.session_table_name}")
1470
+
1471
+ elif v1_table_type == "team_sessions":
1472
+ for session in sessions:
1473
+ self.upsert_session(session)
1474
+ log_info(f"Migrated {len(sessions)} Team sessions to collection: {self.session_table_name}")
1475
+
1476
+ elif v1_table_type == "workflow_sessions":
1477
+ for session in sessions:
1478
+ self.upsert_session(session)
1479
+ log_info(f"Migrated {len(sessions)} Workflow sessions to collection: {self.session_table_name}")
1480
+
1481
+ elif v1_table_type == "memories":
1482
+ for memory in memories:
1483
+ self.upsert_user_memory(memory)
1484
+ log_info(f"Migrated {len(memories)} memories to collection: {self.memory_table_name}")
agno/db/mysql/mysql.py CHANGED
@@ -22,6 +22,7 @@ from agno.db.schemas.knowledge import KnowledgeRow
22
22
  from agno.db.schemas.memory import UserMemory
23
23
  from agno.session import AgentSession, Session, TeamSession, WorkflowSession
24
24
  from agno.utils.log import log_debug, log_error, log_info
25
+ from agno.utils.string import generate_id
25
26
 
26
27
  try:
27
28
  from sqlalchemy import TEXT, and_, cast, func, update
@@ -45,6 +46,7 @@ class MySQLDb(BaseDb):
45
46
  metrics_table: Optional[str] = None,
46
47
  eval_table: Optional[str] = None,
47
48
  knowledge_table: Optional[str] = None,
49
+ id: Optional[str] = None,
48
50
  ):
49
51
  """
50
52
  Interface for interacting with a MySQL database.
@@ -63,12 +65,20 @@ class MySQLDb(BaseDb):
63
65
  metrics_table (Optional[str]): Name of the table to store metrics.
64
66
  eval_table (Optional[str]): Name of the table to store evaluation runs data.
65
67
  knowledge_table (Optional[str]): Name of the table to store knowledge content.
68
+ id (Optional[str]): ID of the database.
66
69
 
67
70
  Raises:
68
71
  ValueError: If neither db_url nor db_engine is provided.
69
72
  ValueError: If none of the tables are provided.
70
73
  """
74
+ if id is None:
75
+ base_seed = db_url or str(db_engine.url) # type: ignore
76
+ schema_suffix = db_schema if db_schema is not None else "ai"
77
+ seed = f"{base_seed}#{schema_suffix}"
78
+ id = generate_id(seed)
79
+
71
80
  super().__init__(
81
+ id=id,
72
82
  session_table=session_table,
73
83
  memory_table=memory_table,
74
84
  metrics_table=metrics_table,
@@ -1701,17 +1711,17 @@ class MySQLDb(BaseDb):
1701
1711
  if v1_table_type == "agent_sessions":
1702
1712
  for session in sessions:
1703
1713
  self.upsert_session(session)
1704
- log_info(f"Migrated {len(sessions)} Agent sessions to table: {self.session_table}")
1714
+ log_info(f"Migrated {len(sessions)} Agent sessions to table: {self.session_table_name}")
1705
1715
 
1706
1716
  elif v1_table_type == "team_sessions":
1707
1717
  for session in sessions:
1708
1718
  self.upsert_session(session)
1709
- log_info(f"Migrated {len(sessions)} Team sessions to table: {self.session_table}")
1719
+ log_info(f"Migrated {len(sessions)} Team sessions to table: {self.session_table_name}")
1710
1720
 
1711
1721
  elif v1_table_type == "workflow_sessions":
1712
1722
  for session in sessions:
1713
1723
  self.upsert_session(session)
1714
- log_info(f"Migrated {len(sessions)} Workflow sessions to table: {self.session_table}")
1724
+ log_info(f"Migrated {len(sessions)} Workflow sessions to table: {self.session_table_name}")
1715
1725
 
1716
1726
  elif v1_table_type == "memories":
1717
1727
  for memory in memories:
agno/db/mysql/schemas.py CHANGED
@@ -8,12 +8,12 @@ except ImportError:
8
8
  raise ImportError("`sqlalchemy` not installed. Please install it using `pip install sqlalchemy`")
9
9
 
10
10
  SESSION_TABLE_SCHEMA = {
11
- "session_id": {"type": String, "nullable": False},
12
- "session_type": {"type": String, "nullable": False, "index": True},
13
- "agent_id": {"type": String, "nullable": True},
14
- "team_id": {"type": String, "nullable": True},
15
- "workflow_id": {"type": String, "nullable": True},
16
- "user_id": {"type": String, "nullable": True},
11
+ "session_id": {"type": lambda: String(128), "nullable": False},
12
+ "session_type": {"type": lambda: String(20), "nullable": False, "index": True},
13
+ "agent_id": {"type": lambda: String(128), "nullable": True},
14
+ "team_id": {"type": lambda: String(128), "nullable": True},
15
+ "workflow_id": {"type": lambda: String(128), "nullable": True},
16
+ "user_id": {"type": lambda: String(128), "nullable": True},
17
17
  "session_data": {"type": JSON, "nullable": True},
18
18
  "agent_data": {"type": JSON, "nullable": True},
19
19
  "team_data": {"type": JSON, "nullable": True},
@@ -32,50 +32,50 @@ SESSION_TABLE_SCHEMA = {
32
32
  }
33
33
 
34
34
  USER_MEMORY_TABLE_SCHEMA = {
35
- "memory_id": {"type": String, "primary_key": True, "nullable": False},
35
+ "memory_id": {"type": lambda: String(128), "primary_key": True, "nullable": False},
36
36
  "memory": {"type": JSON, "nullable": False},
37
37
  "input": {"type": Text, "nullable": True},
38
- "agent_id": {"type": String, "nullable": True},
39
- "team_id": {"type": String, "nullable": True},
40
- "user_id": {"type": String, "nullable": True, "index": True},
38
+ "agent_id": {"type": lambda: String(128), "nullable": True},
39
+ "team_id": {"type": lambda: String(128), "nullable": True},
40
+ "user_id": {"type": lambda: String(128), "nullable": True, "index": True},
41
41
  "topics": {"type": JSON, "nullable": True},
42
42
  "updated_at": {"type": BigInteger, "nullable": True, "index": True},
43
43
  }
44
44
 
45
45
  EVAL_TABLE_SCHEMA = {
46
- "run_id": {"type": String, "primary_key": True, "nullable": False},
47
- "eval_type": {"type": String, "nullable": False},
46
+ "run_id": {"type": lambda: String(128), "primary_key": True, "nullable": False},
47
+ "eval_type": {"type": lambda: String(50), "nullable": False},
48
48
  "eval_data": {"type": JSON, "nullable": False},
49
49
  "eval_input": {"type": JSON, "nullable": False},
50
- "name": {"type": String, "nullable": True},
51
- "agent_id": {"type": String, "nullable": True},
52
- "team_id": {"type": String, "nullable": True},
53
- "workflow_id": {"type": String, "nullable": True},
54
- "model_id": {"type": String, "nullable": True},
55
- "model_provider": {"type": String, "nullable": True},
56
- "evaluated_component_name": {"type": String, "nullable": True},
50
+ "name": {"type": lambda: String(255), "nullable": True},
51
+ "agent_id": {"type": lambda: String(128), "nullable": True},
52
+ "team_id": {"type": lambda: String(128), "nullable": True},
53
+ "workflow_id": {"type": lambda: String(128), "nullable": True},
54
+ "model_id": {"type": lambda: String(128), "nullable": True},
55
+ "model_provider": {"type": lambda: String(128), "nullable": True},
56
+ "evaluated_component_name": {"type": lambda: String(255), "nullable": True},
57
57
  "created_at": {"type": BigInteger, "nullable": False, "index": True},
58
58
  "updated_at": {"type": BigInteger, "nullable": True},
59
59
  }
60
60
 
61
61
  KNOWLEDGE_TABLE_SCHEMA = {
62
- "id": {"type": String, "primary_key": True, "nullable": False},
63
- "name": {"type": String, "nullable": False},
62
+ "id": {"type": lambda: String(128), "primary_key": True, "nullable": False},
63
+ "name": {"type": lambda: String(255), "nullable": False},
64
64
  "description": {"type": Text, "nullable": False},
65
65
  "metadata": {"type": JSON, "nullable": True},
66
- "type": {"type": String, "nullable": True},
66
+ "type": {"type": lambda: String(50), "nullable": True},
67
67
  "size": {"type": BigInteger, "nullable": True},
68
- "linked_to": {"type": String, "nullable": True},
68
+ "linked_to": {"type": lambda: String(128), "nullable": True},
69
69
  "access_count": {"type": BigInteger, "nullable": True},
70
70
  "created_at": {"type": BigInteger, "nullable": True},
71
71
  "updated_at": {"type": BigInteger, "nullable": True},
72
- "status": {"type": String, "nullable": True},
72
+ "status": {"type": lambda: String(50), "nullable": True},
73
73
  "status_message": {"type": Text, "nullable": True},
74
- "external_id": {"type": String, "nullable": True},
74
+ "external_id": {"type": lambda: String(128), "nullable": True},
75
75
  }
76
76
 
77
77
  METRICS_TABLE_SCHEMA = {
78
- "id": {"type": String, "primary_key": True, "nullable": False},
78
+ "id": {"type": lambda: String(128), "primary_key": True, "nullable": False},
79
79
  "agent_runs_count": {"type": BigInteger, "nullable": False},
80
80
  "team_runs_count": {"type": BigInteger, "nullable": False},
81
81
  "workflow_runs_count": {"type": BigInteger, "nullable": False},
@@ -86,7 +86,7 @@ METRICS_TABLE_SCHEMA = {
86
86
  "token_metrics": {"type": JSON, "nullable": False},
87
87
  "model_metrics": {"type": JSON, "nullable": False},
88
88
  "date": {"type": Date, "nullable": False, "index": True},
89
- "aggregation_period": {"type": String, "nullable": False},
89
+ "aggregation_period": {"type": lambda: String(20), "nullable": False},
90
90
  "created_at": {"type": BigInteger, "nullable": False},
91
91
  "updated_at": {"type": BigInteger, "nullable": True},
92
92
  "completed": {"type": Boolean, "nullable": False},