agno 2.0.7__py3-none-any.whl → 2.0.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +83 -51
- agno/db/base.py +14 -0
- agno/db/dynamo/dynamo.py +107 -27
- agno/db/firestore/firestore.py +109 -33
- agno/db/gcs_json/gcs_json_db.py +100 -20
- agno/db/in_memory/in_memory_db.py +95 -20
- agno/db/json/json_db.py +101 -21
- agno/db/migrations/v1_to_v2.py +322 -47
- agno/db/mongo/mongo.py +251 -26
- agno/db/mysql/mysql.py +307 -6
- agno/db/postgres/postgres.py +279 -33
- agno/db/redis/redis.py +99 -22
- agno/db/singlestore/singlestore.py +319 -38
- agno/db/sqlite/sqlite.py +339 -23
- agno/knowledge/embedder/sentence_transformer.py +3 -3
- agno/knowledge/knowledge.py +152 -31
- agno/knowledge/types.py +8 -0
- agno/models/anthropic/claude.py +0 -20
- agno/models/cometapi/__init__.py +5 -0
- agno/models/cometapi/cometapi.py +57 -0
- agno/models/google/gemini.py +4 -8
- agno/models/huggingface/huggingface.py +2 -1
- agno/models/ollama/chat.py +52 -3
- agno/models/openai/chat.py +9 -7
- agno/models/openai/responses.py +21 -17
- agno/os/interfaces/agui/agui.py +2 -2
- agno/os/interfaces/agui/utils.py +81 -18
- agno/os/interfaces/base.py +2 -0
- agno/os/interfaces/slack/router.py +50 -10
- agno/os/interfaces/slack/slack.py +6 -4
- agno/os/interfaces/whatsapp/router.py +7 -4
- agno/os/interfaces/whatsapp/whatsapp.py +2 -2
- agno/os/router.py +18 -0
- agno/os/utils.py +10 -2
- agno/reasoning/azure_ai_foundry.py +2 -2
- agno/reasoning/deepseek.py +2 -2
- agno/reasoning/default.py +3 -1
- agno/reasoning/groq.py +2 -2
- agno/reasoning/ollama.py +2 -2
- agno/reasoning/openai.py +2 -2
- agno/run/base.py +15 -2
- agno/session/agent.py +8 -5
- agno/session/team.py +14 -10
- agno/team/team.py +218 -111
- agno/tools/function.py +43 -4
- agno/tools/mcp.py +60 -37
- agno/tools/mcp_toolbox.py +284 -0
- agno/tools/scrapegraph.py +58 -31
- agno/tools/whatsapp.py +1 -1
- agno/utils/gemini.py +147 -19
- agno/utils/models/claude.py +9 -0
- agno/utils/print_response/agent.py +18 -2
- agno/utils/print_response/team.py +22 -6
- agno/utils/reasoning.py +22 -1
- agno/utils/string.py +9 -0
- agno/vectordb/base.py +2 -2
- agno/vectordb/langchaindb/langchaindb.py +5 -7
- agno/vectordb/llamaindex/llamaindexdb.py +25 -6
- agno/workflow/workflow.py +30 -15
- {agno-2.0.7.dist-info → agno-2.0.9.dist-info}/METADATA +4 -1
- {agno-2.0.7.dist-info → agno-2.0.9.dist-info}/RECORD +64 -61
- {agno-2.0.7.dist-info → agno-2.0.9.dist-info}/WHEEL +0 -0
- {agno-2.0.7.dist-info → agno-2.0.9.dist-info}/licenses/LICENSE +0 -0
- {agno-2.0.7.dist-info → agno-2.0.9.dist-info}/top_level.txt +0 -0
agno/db/mysql/mysql.py
CHANGED
|
@@ -499,8 +499,8 @@ class MySQLDb(BaseDb):
|
|
|
499
499
|
raise ValueError(f"Invalid session type: {session_type}")
|
|
500
500
|
|
|
501
501
|
except Exception as e:
|
|
502
|
-
log_error(f"Exception getting
|
|
503
|
-
|
|
502
|
+
log_error(f"Exception getting sessions: {e}")
|
|
503
|
+
raise e
|
|
504
504
|
|
|
505
505
|
def rename_session(
|
|
506
506
|
self, session_id: str, session_type: SessionType, session_name: str, deserialize: Optional[bool] = True
|
|
@@ -705,6 +705,217 @@ class MySQLDb(BaseDb):
|
|
|
705
705
|
log_error(f"Exception upserting into sessions table: {e}")
|
|
706
706
|
return None
|
|
707
707
|
|
|
708
|
+
def upsert_sessions(
|
|
709
|
+
self, sessions: List[Session], deserialize: Optional[bool] = True
|
|
710
|
+
) -> List[Union[Session, Dict[str, Any]]]:
|
|
711
|
+
"""
|
|
712
|
+
Bulk upsert multiple sessions for improved performance on large datasets.
|
|
713
|
+
|
|
714
|
+
Args:
|
|
715
|
+
sessions (List[Session]): List of sessions to upsert.
|
|
716
|
+
deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
|
|
717
|
+
|
|
718
|
+
Returns:
|
|
719
|
+
List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
|
|
720
|
+
|
|
721
|
+
Raises:
|
|
722
|
+
Exception: If an error occurs during bulk upsert.
|
|
723
|
+
"""
|
|
724
|
+
if not sessions:
|
|
725
|
+
return []
|
|
726
|
+
|
|
727
|
+
try:
|
|
728
|
+
table = self._get_table(table_type="sessions", create_table_if_not_found=True)
|
|
729
|
+
if table is None:
|
|
730
|
+
log_info("Sessions table not available, falling back to individual upserts")
|
|
731
|
+
return [
|
|
732
|
+
result
|
|
733
|
+
for session in sessions
|
|
734
|
+
if session is not None
|
|
735
|
+
for result in [self.upsert_session(session, deserialize=deserialize)]
|
|
736
|
+
if result is not None
|
|
737
|
+
]
|
|
738
|
+
|
|
739
|
+
# Group sessions by type for batch processing
|
|
740
|
+
agent_sessions = []
|
|
741
|
+
team_sessions = []
|
|
742
|
+
workflow_sessions = []
|
|
743
|
+
|
|
744
|
+
for session in sessions:
|
|
745
|
+
if isinstance(session, AgentSession):
|
|
746
|
+
agent_sessions.append(session)
|
|
747
|
+
elif isinstance(session, TeamSession):
|
|
748
|
+
team_sessions.append(session)
|
|
749
|
+
elif isinstance(session, WorkflowSession):
|
|
750
|
+
workflow_sessions.append(session)
|
|
751
|
+
|
|
752
|
+
results: List[Union[Session, Dict[str, Any]]] = []
|
|
753
|
+
|
|
754
|
+
# Process each session type in bulk
|
|
755
|
+
with self.Session() as sess, sess.begin():
|
|
756
|
+
# Bulk upsert agent sessions
|
|
757
|
+
if agent_sessions:
|
|
758
|
+
agent_data = []
|
|
759
|
+
for session in agent_sessions:
|
|
760
|
+
session_dict = session.to_dict()
|
|
761
|
+
agent_data.append(
|
|
762
|
+
{
|
|
763
|
+
"session_id": session_dict.get("session_id"),
|
|
764
|
+
"session_type": SessionType.AGENT.value,
|
|
765
|
+
"agent_id": session_dict.get("agent_id"),
|
|
766
|
+
"user_id": session_dict.get("user_id"),
|
|
767
|
+
"runs": session_dict.get("runs"),
|
|
768
|
+
"agent_data": session_dict.get("agent_data"),
|
|
769
|
+
"session_data": session_dict.get("session_data"),
|
|
770
|
+
"summary": session_dict.get("summary"),
|
|
771
|
+
"metadata": session_dict.get("metadata"),
|
|
772
|
+
"created_at": session_dict.get("created_at"),
|
|
773
|
+
"updated_at": session_dict.get("created_at"),
|
|
774
|
+
}
|
|
775
|
+
)
|
|
776
|
+
|
|
777
|
+
if agent_data:
|
|
778
|
+
stmt = mysql.insert(table)
|
|
779
|
+
stmt = stmt.on_duplicate_key_update(
|
|
780
|
+
agent_id=stmt.inserted.agent_id,
|
|
781
|
+
user_id=stmt.inserted.user_id,
|
|
782
|
+
agent_data=stmt.inserted.agent_data,
|
|
783
|
+
session_data=stmt.inserted.session_data,
|
|
784
|
+
summary=stmt.inserted.summary,
|
|
785
|
+
metadata=stmt.inserted.metadata,
|
|
786
|
+
runs=stmt.inserted.runs,
|
|
787
|
+
updated_at=int(time.time()),
|
|
788
|
+
)
|
|
789
|
+
sess.execute(stmt, agent_data)
|
|
790
|
+
|
|
791
|
+
# Fetch the results for agent sessions
|
|
792
|
+
agent_ids = [session.session_id for session in agent_sessions]
|
|
793
|
+
select_stmt = select(table).where(table.c.session_id.in_(agent_ids))
|
|
794
|
+
result = sess.execute(select_stmt).fetchall()
|
|
795
|
+
|
|
796
|
+
for row in result:
|
|
797
|
+
session_dict = dict(row._mapping)
|
|
798
|
+
if deserialize:
|
|
799
|
+
deserialized_agent_session = AgentSession.from_dict(session_dict)
|
|
800
|
+
if deserialized_agent_session is None:
|
|
801
|
+
continue
|
|
802
|
+
results.append(deserialized_agent_session)
|
|
803
|
+
else:
|
|
804
|
+
results.append(session_dict)
|
|
805
|
+
|
|
806
|
+
# Bulk upsert team sessions
|
|
807
|
+
if team_sessions:
|
|
808
|
+
team_data = []
|
|
809
|
+
for session in team_sessions:
|
|
810
|
+
session_dict = session.to_dict()
|
|
811
|
+
team_data.append(
|
|
812
|
+
{
|
|
813
|
+
"session_id": session_dict.get("session_id"),
|
|
814
|
+
"session_type": SessionType.TEAM.value,
|
|
815
|
+
"team_id": session_dict.get("team_id"),
|
|
816
|
+
"user_id": session_dict.get("user_id"),
|
|
817
|
+
"runs": session_dict.get("runs"),
|
|
818
|
+
"team_data": session_dict.get("team_data"),
|
|
819
|
+
"session_data": session_dict.get("session_data"),
|
|
820
|
+
"summary": session_dict.get("summary"),
|
|
821
|
+
"metadata": session_dict.get("metadata"),
|
|
822
|
+
"created_at": session_dict.get("created_at"),
|
|
823
|
+
"updated_at": session_dict.get("created_at"),
|
|
824
|
+
}
|
|
825
|
+
)
|
|
826
|
+
|
|
827
|
+
if team_data:
|
|
828
|
+
stmt = mysql.insert(table)
|
|
829
|
+
stmt = stmt.on_duplicate_key_update(
|
|
830
|
+
team_id=stmt.inserted.team_id,
|
|
831
|
+
user_id=stmt.inserted.user_id,
|
|
832
|
+
team_data=stmt.inserted.team_data,
|
|
833
|
+
session_data=stmt.inserted.session_data,
|
|
834
|
+
summary=stmt.inserted.summary,
|
|
835
|
+
metadata=stmt.inserted.metadata,
|
|
836
|
+
runs=stmt.inserted.runs,
|
|
837
|
+
updated_at=int(time.time()),
|
|
838
|
+
)
|
|
839
|
+
sess.execute(stmt, team_data)
|
|
840
|
+
|
|
841
|
+
# Fetch the results for team sessions
|
|
842
|
+
team_ids = [session.session_id for session in team_sessions]
|
|
843
|
+
select_stmt = select(table).where(table.c.session_id.in_(team_ids))
|
|
844
|
+
result = sess.execute(select_stmt).fetchall()
|
|
845
|
+
|
|
846
|
+
for row in result:
|
|
847
|
+
session_dict = dict(row._mapping)
|
|
848
|
+
if deserialize:
|
|
849
|
+
deserialized_team_session = TeamSession.from_dict(session_dict)
|
|
850
|
+
if deserialized_team_session is None:
|
|
851
|
+
continue
|
|
852
|
+
results.append(deserialized_team_session)
|
|
853
|
+
else:
|
|
854
|
+
results.append(session_dict)
|
|
855
|
+
|
|
856
|
+
# Bulk upsert workflow sessions
|
|
857
|
+
if workflow_sessions:
|
|
858
|
+
workflow_data = []
|
|
859
|
+
for session in workflow_sessions:
|
|
860
|
+
session_dict = session.to_dict()
|
|
861
|
+
workflow_data.append(
|
|
862
|
+
{
|
|
863
|
+
"session_id": session_dict.get("session_id"),
|
|
864
|
+
"session_type": SessionType.WORKFLOW.value,
|
|
865
|
+
"workflow_id": session_dict.get("workflow_id"),
|
|
866
|
+
"user_id": session_dict.get("user_id"),
|
|
867
|
+
"runs": session_dict.get("runs"),
|
|
868
|
+
"workflow_data": session_dict.get("workflow_data"),
|
|
869
|
+
"session_data": session_dict.get("session_data"),
|
|
870
|
+
"summary": session_dict.get("summary"),
|
|
871
|
+
"metadata": session_dict.get("metadata"),
|
|
872
|
+
"created_at": session_dict.get("created_at"),
|
|
873
|
+
"updated_at": session_dict.get("created_at"),
|
|
874
|
+
}
|
|
875
|
+
)
|
|
876
|
+
|
|
877
|
+
if workflow_data:
|
|
878
|
+
stmt = mysql.insert(table)
|
|
879
|
+
stmt = stmt.on_duplicate_key_update(
|
|
880
|
+
workflow_id=stmt.inserted.workflow_id,
|
|
881
|
+
user_id=stmt.inserted.user_id,
|
|
882
|
+
workflow_data=stmt.inserted.workflow_data,
|
|
883
|
+
session_data=stmt.inserted.session_data,
|
|
884
|
+
summary=stmt.inserted.summary,
|
|
885
|
+
metadata=stmt.inserted.metadata,
|
|
886
|
+
runs=stmt.inserted.runs,
|
|
887
|
+
updated_at=int(time.time()),
|
|
888
|
+
)
|
|
889
|
+
sess.execute(stmt, workflow_data)
|
|
890
|
+
|
|
891
|
+
# Fetch the results for workflow sessions
|
|
892
|
+
workflow_ids = [session.session_id for session in workflow_sessions]
|
|
893
|
+
select_stmt = select(table).where(table.c.session_id.in_(workflow_ids))
|
|
894
|
+
result = sess.execute(select_stmt).fetchall()
|
|
895
|
+
|
|
896
|
+
for row in result:
|
|
897
|
+
session_dict = dict(row._mapping)
|
|
898
|
+
if deserialize:
|
|
899
|
+
deserialized_workflow_session = WorkflowSession.from_dict(session_dict)
|
|
900
|
+
if deserialized_workflow_session is None:
|
|
901
|
+
continue
|
|
902
|
+
results.append(deserialized_workflow_session)
|
|
903
|
+
else:
|
|
904
|
+
results.append(session_dict)
|
|
905
|
+
|
|
906
|
+
return results
|
|
907
|
+
|
|
908
|
+
except Exception as e:
|
|
909
|
+
log_error(f"Exception during bulk session upsert, falling back to individual upserts: {e}")
|
|
910
|
+
# Fallback to individual upserts
|
|
911
|
+
return [
|
|
912
|
+
result
|
|
913
|
+
for session in sessions
|
|
914
|
+
if session is not None
|
|
915
|
+
for result in [self.upsert_session(session, deserialize=deserialize)]
|
|
916
|
+
if result is not None
|
|
917
|
+
]
|
|
918
|
+
|
|
708
919
|
# -- Memory methods --
|
|
709
920
|
def delete_user_memory(self, memory_id: str):
|
|
710
921
|
"""Delete a user memory from the database.
|
|
@@ -789,7 +1000,7 @@ class MySQLDb(BaseDb):
|
|
|
789
1000
|
|
|
790
1001
|
except Exception as e:
|
|
791
1002
|
log_error(f"Exception reading from memory table: {e}")
|
|
792
|
-
|
|
1003
|
+
raise e
|
|
793
1004
|
|
|
794
1005
|
def get_user_memory(self, memory_id: str, deserialize: Optional[bool] = True) -> Optional[UserMemory]:
|
|
795
1006
|
"""Get a memory from the database.
|
|
@@ -911,7 +1122,7 @@ class MySQLDb(BaseDb):
|
|
|
911
1122
|
|
|
912
1123
|
except Exception as e:
|
|
913
1124
|
log_error(f"Exception reading from memory table: {e}")
|
|
914
|
-
|
|
1125
|
+
raise e
|
|
915
1126
|
|
|
916
1127
|
def clear_memories(self) -> None:
|
|
917
1128
|
"""Clear all user memories from the database."""
|
|
@@ -1055,6 +1266,96 @@ class MySQLDb(BaseDb):
|
|
|
1055
1266
|
log_error(f"Exception upserting user memory: {e}")
|
|
1056
1267
|
return None
|
|
1057
1268
|
|
|
1269
|
+
def upsert_memories(
|
|
1270
|
+
self, memories: List[UserMemory], deserialize: Optional[bool] = True
|
|
1271
|
+
) -> List[Union[UserMemory, Dict[str, Any]]]:
|
|
1272
|
+
"""
|
|
1273
|
+
Bulk upsert multiple user memories for improved performance on large datasets.
|
|
1274
|
+
|
|
1275
|
+
Args:
|
|
1276
|
+
memories (List[UserMemory]): List of memories to upsert.
|
|
1277
|
+
deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
|
|
1278
|
+
|
|
1279
|
+
Returns:
|
|
1280
|
+
List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories.
|
|
1281
|
+
|
|
1282
|
+
Raises:
|
|
1283
|
+
Exception: If an error occurs during bulk upsert.
|
|
1284
|
+
"""
|
|
1285
|
+
if not memories:
|
|
1286
|
+
return []
|
|
1287
|
+
|
|
1288
|
+
try:
|
|
1289
|
+
table = self._get_table(table_type="memories", create_table_if_not_found=True)
|
|
1290
|
+
if table is None:
|
|
1291
|
+
log_info("Memories table not available, falling back to individual upserts")
|
|
1292
|
+
return [
|
|
1293
|
+
result
|
|
1294
|
+
for memory in memories
|
|
1295
|
+
if memory is not None
|
|
1296
|
+
for result in [self.upsert_user_memory(memory, deserialize=deserialize)]
|
|
1297
|
+
if result is not None
|
|
1298
|
+
]
|
|
1299
|
+
|
|
1300
|
+
# Prepare bulk data
|
|
1301
|
+
bulk_data = []
|
|
1302
|
+
for memory in memories:
|
|
1303
|
+
if memory.memory_id is None:
|
|
1304
|
+
memory.memory_id = str(uuid4())
|
|
1305
|
+
|
|
1306
|
+
bulk_data.append(
|
|
1307
|
+
{
|
|
1308
|
+
"memory_id": memory.memory_id,
|
|
1309
|
+
"memory": memory.memory,
|
|
1310
|
+
"input": memory.input,
|
|
1311
|
+
"user_id": memory.user_id,
|
|
1312
|
+
"agent_id": memory.agent_id,
|
|
1313
|
+
"team_id": memory.team_id,
|
|
1314
|
+
"topics": memory.topics,
|
|
1315
|
+
"updated_at": int(time.time()),
|
|
1316
|
+
}
|
|
1317
|
+
)
|
|
1318
|
+
|
|
1319
|
+
results: List[Union[UserMemory, Dict[str, Any]]] = []
|
|
1320
|
+
|
|
1321
|
+
with self.Session() as sess, sess.begin():
|
|
1322
|
+
# Bulk upsert memories using MySQL ON DUPLICATE KEY UPDATE
|
|
1323
|
+
stmt = mysql.insert(table)
|
|
1324
|
+
stmt = stmt.on_duplicate_key_update(
|
|
1325
|
+
memory=stmt.inserted.memory,
|
|
1326
|
+
topics=stmt.inserted.topics,
|
|
1327
|
+
input=stmt.inserted.input,
|
|
1328
|
+
agent_id=stmt.inserted.agent_id,
|
|
1329
|
+
team_id=stmt.inserted.team_id,
|
|
1330
|
+
updated_at=int(time.time()),
|
|
1331
|
+
)
|
|
1332
|
+
sess.execute(stmt, bulk_data)
|
|
1333
|
+
|
|
1334
|
+
# Fetch results
|
|
1335
|
+
memory_ids = [memory.memory_id for memory in memories if memory.memory_id]
|
|
1336
|
+
select_stmt = select(table).where(table.c.memory_id.in_(memory_ids))
|
|
1337
|
+
result = sess.execute(select_stmt).fetchall()
|
|
1338
|
+
|
|
1339
|
+
for row in result:
|
|
1340
|
+
memory_dict = dict(row._mapping)
|
|
1341
|
+
if deserialize:
|
|
1342
|
+
results.append(UserMemory.from_dict(memory_dict))
|
|
1343
|
+
else:
|
|
1344
|
+
results.append(memory_dict)
|
|
1345
|
+
|
|
1346
|
+
return results
|
|
1347
|
+
|
|
1348
|
+
except Exception as e:
|
|
1349
|
+
log_error(f"Exception during bulk memory upsert, falling back to individual upserts: {e}")
|
|
1350
|
+
# Fallback to individual upserts
|
|
1351
|
+
return [
|
|
1352
|
+
result
|
|
1353
|
+
for memory in memories
|
|
1354
|
+
if memory is not None
|
|
1355
|
+
for result in [self.upsert_user_memory(memory, deserialize=deserialize)]
|
|
1356
|
+
if result is not None
|
|
1357
|
+
]
|
|
1358
|
+
|
|
1058
1359
|
# -- Metrics methods --
|
|
1059
1360
|
def _get_all_sessions_for_metrics_calculation(
|
|
1060
1361
|
self, start_timestamp: Optional[int] = None, end_timestamp: Optional[int] = None
|
|
@@ -1096,7 +1397,7 @@ class MySQLDb(BaseDb):
|
|
|
1096
1397
|
|
|
1097
1398
|
except Exception as e:
|
|
1098
1399
|
log_error(f"Exception reading from sessions table: {e}")
|
|
1099
|
-
|
|
1400
|
+
raise e
|
|
1100
1401
|
|
|
1101
1402
|
def _get_metrics_calculation_starting_date(self, table: Table) -> Optional[date]:
|
|
1102
1403
|
"""Get the first date for which metrics calculation is needed:
|
|
@@ -1633,7 +1934,7 @@ class MySQLDb(BaseDb):
|
|
|
1633
1934
|
|
|
1634
1935
|
except Exception as e:
|
|
1635
1936
|
log_error(f"Exception getting eval runs: {e}")
|
|
1636
|
-
|
|
1937
|
+
raise e
|
|
1637
1938
|
|
|
1638
1939
|
def rename_eval_run(
|
|
1639
1940
|
self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
|