agno 2.0.8__py3-none-any.whl → 2.0.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +13 -8
- agno/db/base.py +14 -0
- agno/db/dynamo/dynamo.py +107 -27
- agno/db/firestore/firestore.py +109 -33
- agno/db/gcs_json/gcs_json_db.py +100 -20
- agno/db/in_memory/in_memory_db.py +95 -20
- agno/db/json/json_db.py +101 -21
- agno/db/migrations/v1_to_v2.py +181 -35
- agno/db/mongo/mongo.py +251 -26
- agno/db/mysql/mysql.py +307 -6
- agno/db/postgres/postgres.py +279 -33
- agno/db/redis/redis.py +99 -22
- agno/db/singlestore/singlestore.py +319 -38
- agno/db/sqlite/sqlite.py +339 -23
- agno/models/anthropic/claude.py +0 -20
- agno/models/aws/claude.py +1 -1
- agno/models/huggingface/huggingface.py +2 -1
- agno/models/ollama/chat.py +28 -2
- agno/models/openai/chat.py +7 -0
- agno/models/openai/responses.py +8 -8
- agno/os/interfaces/base.py +2 -0
- agno/os/interfaces/slack/router.py +50 -10
- agno/os/interfaces/slack/slack.py +6 -4
- agno/os/interfaces/whatsapp/router.py +7 -4
- agno/os/router.py +18 -0
- agno/os/utils.py +2 -2
- agno/reasoning/azure_ai_foundry.py +2 -2
- agno/reasoning/deepseek.py +2 -2
- agno/reasoning/groq.py +2 -2
- agno/reasoning/ollama.py +2 -2
- agno/reasoning/openai.py +2 -2
- agno/run/base.py +15 -2
- agno/team/team.py +10 -12
- agno/tools/mcp_toolbox.py +284 -0
- agno/tools/scrapegraph.py +58 -31
- agno/tools/whatsapp.py +1 -1
- agno/utils/models/claude.py +2 -2
- agno/utils/print_response/agent.py +2 -2
- agno/utils/print_response/team.py +6 -6
- agno/utils/reasoning.py +22 -1
- agno/utils/string.py +9 -0
- agno/workflow/workflow.py +11 -7
- {agno-2.0.8.dist-info → agno-2.0.10.dist-info}/METADATA +4 -1
- {agno-2.0.8.dist-info → agno-2.0.10.dist-info}/RECORD +47 -47
- agno/utils/models/aws_claude.py +0 -170
- {agno-2.0.8.dist-info → agno-2.0.10.dist-info}/WHEEL +0 -0
- {agno-2.0.8.dist-info → agno-2.0.10.dist-info}/licenses/LICENSE +0 -0
- {agno-2.0.8.dist-info → agno-2.0.10.dist-info}/top_level.txt +0 -0
agno/db/sqlite/sqlite.py
CHANGED
|
@@ -185,7 +185,7 @@ class SqliteDb(BaseDb):
|
|
|
185
185
|
|
|
186
186
|
except Exception as e:
|
|
187
187
|
log_error(f"Could not create table '{table_name}': {e}")
|
|
188
|
-
raise
|
|
188
|
+
raise e
|
|
189
189
|
|
|
190
190
|
def _get_table(self, table_type: str, create_table_if_not_found: Optional[bool] = False) -> Optional[Table]:
|
|
191
191
|
if table_type == "sessions":
|
|
@@ -264,7 +264,7 @@ class SqliteDb(BaseDb):
|
|
|
264
264
|
|
|
265
265
|
except Exception as e:
|
|
266
266
|
log_error(f"Error loading existing table {table_name}: {e}")
|
|
267
|
-
raise
|
|
267
|
+
raise e
|
|
268
268
|
|
|
269
269
|
# -- Session methods --
|
|
270
270
|
|
|
@@ -295,7 +295,7 @@ class SqliteDb(BaseDb):
|
|
|
295
295
|
|
|
296
296
|
except Exception as e:
|
|
297
297
|
log_error(f"Error deleting session: {e}")
|
|
298
|
-
|
|
298
|
+
raise e
|
|
299
299
|
|
|
300
300
|
def delete_sessions(self, session_ids: List[str]) -> None:
|
|
301
301
|
"""Delete all given sessions from the database.
|
|
@@ -320,6 +320,7 @@ class SqliteDb(BaseDb):
|
|
|
320
320
|
|
|
321
321
|
except Exception as e:
|
|
322
322
|
log_error(f"Error deleting sessions: {e}")
|
|
323
|
+
raise e
|
|
323
324
|
|
|
324
325
|
def get_session(
|
|
325
326
|
self,
|
|
@@ -378,7 +379,7 @@ class SqliteDb(BaseDb):
|
|
|
378
379
|
|
|
379
380
|
except Exception as e:
|
|
380
381
|
log_debug(f"Exception reading from sessions table: {e}")
|
|
381
|
-
|
|
382
|
+
raise e
|
|
382
383
|
|
|
383
384
|
def get_sessions(
|
|
384
385
|
self,
|
|
@@ -481,7 +482,7 @@ class SqliteDb(BaseDb):
|
|
|
481
482
|
|
|
482
483
|
except Exception as e:
|
|
483
484
|
log_debug(f"Exception reading from sessions table: {e}")
|
|
484
|
-
|
|
485
|
+
raise e
|
|
485
486
|
|
|
486
487
|
def rename_session(
|
|
487
488
|
self, session_id: str, session_type: SessionType, session_name: str, deserialize: Optional[bool] = True
|
|
@@ -544,7 +545,7 @@ class SqliteDb(BaseDb):
|
|
|
544
545
|
|
|
545
546
|
except Exception as e:
|
|
546
547
|
log_error(f"Exception renaming session: {e}")
|
|
547
|
-
|
|
548
|
+
raise e
|
|
548
549
|
|
|
549
550
|
def upsert_session(
|
|
550
551
|
self, session: Session, deserialize: Optional[bool] = True
|
|
@@ -685,7 +686,226 @@ class SqliteDb(BaseDb):
|
|
|
685
686
|
|
|
686
687
|
except Exception as e:
|
|
687
688
|
log_warning(f"Exception upserting into table: {e}")
|
|
688
|
-
|
|
689
|
+
raise e
|
|
690
|
+
|
|
691
|
+
def upsert_sessions(
|
|
692
|
+
self, sessions: List[Session], deserialize: Optional[bool] = True
|
|
693
|
+
) -> List[Union[Session, Dict[str, Any]]]:
|
|
694
|
+
"""
|
|
695
|
+
Bulk upsert multiple sessions for improved performance on large datasets.
|
|
696
|
+
|
|
697
|
+
Args:
|
|
698
|
+
sessions (List[Session]): List of sessions to upsert.
|
|
699
|
+
deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
|
|
700
|
+
|
|
701
|
+
Returns:
|
|
702
|
+
List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
|
|
703
|
+
|
|
704
|
+
Raises:
|
|
705
|
+
Exception: If an error occurs during bulk upsert.
|
|
706
|
+
"""
|
|
707
|
+
if not sessions:
|
|
708
|
+
return []
|
|
709
|
+
|
|
710
|
+
try:
|
|
711
|
+
table = self._get_table(table_type="sessions", create_table_if_not_found=True)
|
|
712
|
+
if table is None:
|
|
713
|
+
log_info("Sessions table not available, falling back to individual upserts")
|
|
714
|
+
return [
|
|
715
|
+
result
|
|
716
|
+
for session in sessions
|
|
717
|
+
if session is not None
|
|
718
|
+
for result in [self.upsert_session(session, deserialize=deserialize)]
|
|
719
|
+
if result is not None
|
|
720
|
+
]
|
|
721
|
+
|
|
722
|
+
# Group sessions by type for batch processing
|
|
723
|
+
agent_sessions = []
|
|
724
|
+
team_sessions = []
|
|
725
|
+
workflow_sessions = []
|
|
726
|
+
|
|
727
|
+
for session in sessions:
|
|
728
|
+
if isinstance(session, AgentSession):
|
|
729
|
+
agent_sessions.append(session)
|
|
730
|
+
elif isinstance(session, TeamSession):
|
|
731
|
+
team_sessions.append(session)
|
|
732
|
+
elif isinstance(session, WorkflowSession):
|
|
733
|
+
workflow_sessions.append(session)
|
|
734
|
+
|
|
735
|
+
results: List[Union[Session, Dict[str, Any]]] = []
|
|
736
|
+
|
|
737
|
+
with self.Session() as sess, sess.begin():
|
|
738
|
+
# Bulk upsert agent sessions
|
|
739
|
+
if agent_sessions:
|
|
740
|
+
agent_data = []
|
|
741
|
+
for session in agent_sessions:
|
|
742
|
+
serialized_session = serialize_session_json_fields(session.to_dict())
|
|
743
|
+
agent_data.append(
|
|
744
|
+
{
|
|
745
|
+
"session_id": serialized_session.get("session_id"),
|
|
746
|
+
"session_type": SessionType.AGENT.value,
|
|
747
|
+
"agent_id": serialized_session.get("agent_id"),
|
|
748
|
+
"user_id": serialized_session.get("user_id"),
|
|
749
|
+
"agent_data": serialized_session.get("agent_data"),
|
|
750
|
+
"session_data": serialized_session.get("session_data"),
|
|
751
|
+
"metadata": serialized_session.get("metadata"),
|
|
752
|
+
"runs": serialized_session.get("runs"),
|
|
753
|
+
"summary": serialized_session.get("summary"),
|
|
754
|
+
"created_at": serialized_session.get("created_at"),
|
|
755
|
+
"updated_at": serialized_session.get("created_at"),
|
|
756
|
+
}
|
|
757
|
+
)
|
|
758
|
+
|
|
759
|
+
if agent_data:
|
|
760
|
+
stmt = sqlite.insert(table)
|
|
761
|
+
stmt = stmt.on_conflict_do_update(
|
|
762
|
+
index_elements=["session_id"],
|
|
763
|
+
set_=dict(
|
|
764
|
+
agent_id=stmt.excluded.agent_id,
|
|
765
|
+
user_id=stmt.excluded.user_id,
|
|
766
|
+
agent_data=stmt.excluded.agent_data,
|
|
767
|
+
session_data=stmt.excluded.session_data,
|
|
768
|
+
metadata=stmt.excluded.metadata,
|
|
769
|
+
runs=stmt.excluded.runs,
|
|
770
|
+
summary=stmt.excluded.summary,
|
|
771
|
+
updated_at=int(time.time()),
|
|
772
|
+
),
|
|
773
|
+
)
|
|
774
|
+
sess.execute(stmt, agent_data)
|
|
775
|
+
|
|
776
|
+
# Fetch the results for agent sessions
|
|
777
|
+
agent_ids = [session.session_id for session in agent_sessions]
|
|
778
|
+
select_stmt = select(table).where(table.c.session_id.in_(agent_ids))
|
|
779
|
+
result = sess.execute(select_stmt).fetchall()
|
|
780
|
+
|
|
781
|
+
for row in result:
|
|
782
|
+
session_dict = deserialize_session_json_fields(dict(row._mapping))
|
|
783
|
+
if deserialize:
|
|
784
|
+
deserialized_agent_session = AgentSession.from_dict(session_dict)
|
|
785
|
+
if deserialized_agent_session is None:
|
|
786
|
+
continue
|
|
787
|
+
results.append(deserialized_agent_session)
|
|
788
|
+
else:
|
|
789
|
+
results.append(session_dict)
|
|
790
|
+
|
|
791
|
+
# Bulk upsert team sessions
|
|
792
|
+
if team_sessions:
|
|
793
|
+
team_data = []
|
|
794
|
+
for session in team_sessions:
|
|
795
|
+
serialized_session = serialize_session_json_fields(session.to_dict())
|
|
796
|
+
team_data.append(
|
|
797
|
+
{
|
|
798
|
+
"session_id": serialized_session.get("session_id"),
|
|
799
|
+
"session_type": SessionType.TEAM.value,
|
|
800
|
+
"team_id": serialized_session.get("team_id"),
|
|
801
|
+
"user_id": serialized_session.get("user_id"),
|
|
802
|
+
"runs": serialized_session.get("runs"),
|
|
803
|
+
"summary": serialized_session.get("summary"),
|
|
804
|
+
"created_at": serialized_session.get("created_at"),
|
|
805
|
+
"updated_at": serialized_session.get("created_at"),
|
|
806
|
+
"team_data": serialized_session.get("team_data"),
|
|
807
|
+
"session_data": serialized_session.get("session_data"),
|
|
808
|
+
"metadata": serialized_session.get("metadata"),
|
|
809
|
+
}
|
|
810
|
+
)
|
|
811
|
+
|
|
812
|
+
if team_data:
|
|
813
|
+
stmt = sqlite.insert(table)
|
|
814
|
+
stmt = stmt.on_conflict_do_update(
|
|
815
|
+
index_elements=["session_id"],
|
|
816
|
+
set_=dict(
|
|
817
|
+
team_id=stmt.excluded.team_id,
|
|
818
|
+
user_id=stmt.excluded.user_id,
|
|
819
|
+
team_data=stmt.excluded.team_data,
|
|
820
|
+
session_data=stmt.excluded.session_data,
|
|
821
|
+
metadata=stmt.excluded.metadata,
|
|
822
|
+
runs=stmt.excluded.runs,
|
|
823
|
+
summary=stmt.excluded.summary,
|
|
824
|
+
updated_at=int(time.time()),
|
|
825
|
+
),
|
|
826
|
+
)
|
|
827
|
+
sess.execute(stmt, team_data)
|
|
828
|
+
|
|
829
|
+
# Fetch the results for team sessions
|
|
830
|
+
team_ids = [session.session_id for session in team_sessions]
|
|
831
|
+
select_stmt = select(table).where(table.c.session_id.in_(team_ids))
|
|
832
|
+
result = sess.execute(select_stmt).fetchall()
|
|
833
|
+
|
|
834
|
+
for row in result:
|
|
835
|
+
session_dict = deserialize_session_json_fields(dict(row._mapping))
|
|
836
|
+
if deserialize:
|
|
837
|
+
deserialized_team_session = TeamSession.from_dict(session_dict)
|
|
838
|
+
if deserialized_team_session is None:
|
|
839
|
+
continue
|
|
840
|
+
results.append(deserialized_team_session)
|
|
841
|
+
else:
|
|
842
|
+
results.append(session_dict)
|
|
843
|
+
|
|
844
|
+
# Bulk upsert workflow sessions
|
|
845
|
+
if workflow_sessions:
|
|
846
|
+
workflow_data = []
|
|
847
|
+
for session in workflow_sessions:
|
|
848
|
+
serialized_session = serialize_session_json_fields(session.to_dict())
|
|
849
|
+
workflow_data.append(
|
|
850
|
+
{
|
|
851
|
+
"session_id": serialized_session.get("session_id"),
|
|
852
|
+
"session_type": SessionType.WORKFLOW.value,
|
|
853
|
+
"workflow_id": serialized_session.get("workflow_id"),
|
|
854
|
+
"user_id": serialized_session.get("user_id"),
|
|
855
|
+
"runs": serialized_session.get("runs"),
|
|
856
|
+
"summary": serialized_session.get("summary"),
|
|
857
|
+
"created_at": serialized_session.get("created_at"),
|
|
858
|
+
"updated_at": serialized_session.get("created_at"),
|
|
859
|
+
"workflow_data": serialized_session.get("workflow_data"),
|
|
860
|
+
"session_data": serialized_session.get("session_data"),
|
|
861
|
+
"metadata": serialized_session.get("metadata"),
|
|
862
|
+
}
|
|
863
|
+
)
|
|
864
|
+
|
|
865
|
+
if workflow_data:
|
|
866
|
+
stmt = sqlite.insert(table)
|
|
867
|
+
stmt = stmt.on_conflict_do_update(
|
|
868
|
+
index_elements=["session_id"],
|
|
869
|
+
set_=dict(
|
|
870
|
+
workflow_id=stmt.excluded.workflow_id,
|
|
871
|
+
user_id=stmt.excluded.user_id,
|
|
872
|
+
workflow_data=stmt.excluded.workflow_data,
|
|
873
|
+
session_data=stmt.excluded.session_data,
|
|
874
|
+
metadata=stmt.excluded.metadata,
|
|
875
|
+
runs=stmt.excluded.runs,
|
|
876
|
+
summary=stmt.excluded.summary,
|
|
877
|
+
updated_at=int(time.time()),
|
|
878
|
+
),
|
|
879
|
+
)
|
|
880
|
+
sess.execute(stmt, workflow_data)
|
|
881
|
+
|
|
882
|
+
# Fetch the results for workflow sessions
|
|
883
|
+
workflow_ids = [session.session_id for session in workflow_sessions]
|
|
884
|
+
select_stmt = select(table).where(table.c.session_id.in_(workflow_ids))
|
|
885
|
+
result = sess.execute(select_stmt).fetchall()
|
|
886
|
+
|
|
887
|
+
for row in result:
|
|
888
|
+
session_dict = deserialize_session_json_fields(dict(row._mapping))
|
|
889
|
+
if deserialize:
|
|
890
|
+
deserialized_workflow_session = WorkflowSession.from_dict(session_dict)
|
|
891
|
+
if deserialized_workflow_session is None:
|
|
892
|
+
continue
|
|
893
|
+
results.append(deserialized_workflow_session)
|
|
894
|
+
else:
|
|
895
|
+
results.append(session_dict)
|
|
896
|
+
|
|
897
|
+
return results
|
|
898
|
+
|
|
899
|
+
except Exception as e:
|
|
900
|
+
log_error(f"Exception during bulk session upsert, falling back to individual upserts: {e}")
|
|
901
|
+
# Fallback to individual upserts
|
|
902
|
+
return [
|
|
903
|
+
result
|
|
904
|
+
for session in sessions
|
|
905
|
+
if session is not None
|
|
906
|
+
for result in [self.upsert_session(session, deserialize=deserialize)]
|
|
907
|
+
if result is not None
|
|
908
|
+
]
|
|
689
909
|
|
|
690
910
|
# -- Memory methods --
|
|
691
911
|
|
|
@@ -715,6 +935,7 @@ class SqliteDb(BaseDb):
|
|
|
715
935
|
|
|
716
936
|
except Exception as e:
|
|
717
937
|
log_error(f"Error deleting user memory: {e}")
|
|
938
|
+
raise e
|
|
718
939
|
|
|
719
940
|
def delete_user_memories(self, memory_ids: List[str]) -> None:
|
|
720
941
|
"""Delete user memories from the database.
|
|
@@ -738,6 +959,7 @@ class SqliteDb(BaseDb):
|
|
|
738
959
|
|
|
739
960
|
except Exception as e:
|
|
740
961
|
log_error(f"Error deleting user memories: {e}")
|
|
962
|
+
raise e
|
|
741
963
|
|
|
742
964
|
def get_all_memory_topics(self) -> List[str]:
|
|
743
965
|
"""Get all memory topics from the database.
|
|
@@ -758,7 +980,7 @@ class SqliteDb(BaseDb):
|
|
|
758
980
|
|
|
759
981
|
except Exception as e:
|
|
760
982
|
log_debug(f"Exception reading from memory table: {e}")
|
|
761
|
-
|
|
983
|
+
raise e
|
|
762
984
|
|
|
763
985
|
def get_user_memory(
|
|
764
986
|
self, memory_id: str, deserialize: Optional[bool] = True
|
|
@@ -796,7 +1018,7 @@ class SqliteDb(BaseDb):
|
|
|
796
1018
|
|
|
797
1019
|
except Exception as e:
|
|
798
1020
|
log_debug(f"Exception reading from memorytable: {e}")
|
|
799
|
-
|
|
1021
|
+
raise e
|
|
800
1022
|
|
|
801
1023
|
def get_user_memories(
|
|
802
1024
|
self,
|
|
@@ -880,7 +1102,7 @@ class SqliteDb(BaseDb):
|
|
|
880
1102
|
|
|
881
1103
|
except Exception as e:
|
|
882
1104
|
log_error(f"Error reading from memory table: {e}")
|
|
883
|
-
|
|
1105
|
+
raise e
|
|
884
1106
|
|
|
885
1107
|
def get_user_memory_stats(
|
|
886
1108
|
self,
|
|
@@ -949,7 +1171,7 @@ class SqliteDb(BaseDb):
|
|
|
949
1171
|
|
|
950
1172
|
except Exception as e:
|
|
951
1173
|
log_error(f"Error getting user memory stats: {e}")
|
|
952
|
-
|
|
1174
|
+
raise e
|
|
953
1175
|
|
|
954
1176
|
def upsert_user_memory(
|
|
955
1177
|
self, memory: UserMemory, deserialize: Optional[bool] = True
|
|
@@ -1011,7 +1233,99 @@ class SqliteDb(BaseDb):
|
|
|
1011
1233
|
|
|
1012
1234
|
except Exception as e:
|
|
1013
1235
|
log_error(f"Error upserting user memory: {e}")
|
|
1014
|
-
|
|
1236
|
+
raise e
|
|
1237
|
+
|
|
1238
|
+
def upsert_memories(
|
|
1239
|
+
self, memories: List[UserMemory], deserialize: Optional[bool] = True
|
|
1240
|
+
) -> List[Union[UserMemory, Dict[str, Any]]]:
|
|
1241
|
+
"""
|
|
1242
|
+
Bulk upsert multiple user memories for improved performance on large datasets.
|
|
1243
|
+
|
|
1244
|
+
Args:
|
|
1245
|
+
memories (List[UserMemory]): List of memories to upsert.
|
|
1246
|
+
deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
|
|
1247
|
+
|
|
1248
|
+
Returns:
|
|
1249
|
+
List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories.
|
|
1250
|
+
|
|
1251
|
+
Raises:
|
|
1252
|
+
Exception: If an error occurs during bulk upsert.
|
|
1253
|
+
"""
|
|
1254
|
+
if not memories:
|
|
1255
|
+
return []
|
|
1256
|
+
|
|
1257
|
+
try:
|
|
1258
|
+
table = self._get_table(table_type="memories", create_table_if_not_found=True)
|
|
1259
|
+
if table is None:
|
|
1260
|
+
log_info("Memories table not available, falling back to individual upserts")
|
|
1261
|
+
return [
|
|
1262
|
+
result
|
|
1263
|
+
for memory in memories
|
|
1264
|
+
if memory is not None
|
|
1265
|
+
for result in [self.upsert_user_memory(memory, deserialize=deserialize)]
|
|
1266
|
+
if result is not None
|
|
1267
|
+
]
|
|
1268
|
+
# Prepare bulk data
|
|
1269
|
+
bulk_data = []
|
|
1270
|
+
for memory in memories:
|
|
1271
|
+
if memory.memory_id is None:
|
|
1272
|
+
memory.memory_id = str(uuid4())
|
|
1273
|
+
|
|
1274
|
+
bulk_data.append(
|
|
1275
|
+
{
|
|
1276
|
+
"user_id": memory.user_id,
|
|
1277
|
+
"agent_id": memory.agent_id,
|
|
1278
|
+
"team_id": memory.team_id,
|
|
1279
|
+
"memory_id": memory.memory_id,
|
|
1280
|
+
"memory": memory.memory,
|
|
1281
|
+
"topics": memory.topics,
|
|
1282
|
+
"updated_at": int(time.time()),
|
|
1283
|
+
}
|
|
1284
|
+
)
|
|
1285
|
+
|
|
1286
|
+
results: List[Union[UserMemory, Dict[str, Any]]] = []
|
|
1287
|
+
|
|
1288
|
+
with self.Session() as sess, sess.begin():
|
|
1289
|
+
# Bulk upsert memories using SQLite ON CONFLICT DO UPDATE
|
|
1290
|
+
stmt = sqlite.insert(table)
|
|
1291
|
+
stmt = stmt.on_conflict_do_update(
|
|
1292
|
+
index_elements=["memory_id"],
|
|
1293
|
+
set_=dict(
|
|
1294
|
+
memory=stmt.excluded.memory,
|
|
1295
|
+
topics=stmt.excluded.topics,
|
|
1296
|
+
input=stmt.excluded.input,
|
|
1297
|
+
agent_id=stmt.excluded.agent_id,
|
|
1298
|
+
team_id=stmt.excluded.team_id,
|
|
1299
|
+
updated_at=int(time.time()),
|
|
1300
|
+
),
|
|
1301
|
+
)
|
|
1302
|
+
sess.execute(stmt, bulk_data)
|
|
1303
|
+
|
|
1304
|
+
# Fetch results
|
|
1305
|
+
memory_ids = [memory.memory_id for memory in memories if memory.memory_id]
|
|
1306
|
+
select_stmt = select(table).where(table.c.memory_id.in_(memory_ids))
|
|
1307
|
+
result = sess.execute(select_stmt).fetchall()
|
|
1308
|
+
|
|
1309
|
+
for row in result:
|
|
1310
|
+
memory_dict = dict(row._mapping)
|
|
1311
|
+
if deserialize:
|
|
1312
|
+
results.append(UserMemory.from_dict(memory_dict))
|
|
1313
|
+
else:
|
|
1314
|
+
results.append(memory_dict)
|
|
1315
|
+
|
|
1316
|
+
return results
|
|
1317
|
+
|
|
1318
|
+
except Exception as e:
|
|
1319
|
+
log_error(f"Exception during bulk memory upsert, falling back to individual upserts: {e}")
|
|
1320
|
+
|
|
1321
|
+
# Fallback to individual upserts
|
|
1322
|
+
return [
|
|
1323
|
+
result
|
|
1324
|
+
for memory in memories
|
|
1325
|
+
if memory is not None
|
|
1326
|
+
for result in [self.upsert_user_memory(memory, deserialize=deserialize)]
|
|
1327
|
+
if result is not None
|
|
1328
|
+
]
|
|
1015
1329
|
|
|
1016
1330
|
def clear_memories(self) -> None:
|
|
1017
1331
|
"""Delete all memories from the database.
|
|
@@ -1031,6 +1345,7 @@ class SqliteDb(BaseDb):
|
|
|
1031
1345
|
from agno.utils.log import log_warning
|
|
1032
1346
|
|
|
1033
1347
|
log_warning(f"Exception deleting all memories: {e}")
|
|
1348
|
+
raise e
|
|
1034
1349
|
|
|
1035
1350
|
# -- Metrics methods --
|
|
1036
1351
|
|
|
@@ -1074,7 +1389,7 @@ class SqliteDb(BaseDb):
|
|
|
1074
1389
|
|
|
1075
1390
|
except Exception as e:
|
|
1076
1391
|
log_error(f"Error reading from sessions table: {e}")
|
|
1077
|
-
|
|
1392
|
+
raise e
|
|
1078
1393
|
|
|
1079
1394
|
def _get_metrics_calculation_starting_date(self, table: Table) -> Optional[date]:
|
|
1080
1395
|
"""Get the first date for which metrics calculation is needed:
|
|
@@ -1219,7 +1534,7 @@ class SqliteDb(BaseDb):
|
|
|
1219
1534
|
|
|
1220
1535
|
except Exception as e:
|
|
1221
1536
|
log_error(f"Error getting metrics: {e}")
|
|
1222
|
-
|
|
1537
|
+
raise e
|
|
1223
1538
|
|
|
1224
1539
|
# -- Knowledge methods --
|
|
1225
1540
|
|
|
@@ -1243,6 +1558,7 @@ class SqliteDb(BaseDb):
|
|
|
1243
1558
|
|
|
1244
1559
|
except Exception as e:
|
|
1245
1560
|
log_error(f"Error deleting knowledge content: {e}")
|
|
1561
|
+
raise e
|
|
1246
1562
|
|
|
1247
1563
|
def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
|
|
1248
1564
|
"""Get a knowledge row from the database.
|
|
@@ -1271,7 +1587,7 @@ class SqliteDb(BaseDb):
|
|
|
1271
1587
|
|
|
1272
1588
|
except Exception as e:
|
|
1273
1589
|
log_error(f"Error getting knowledge content: {e}")
|
|
1274
|
-
|
|
1590
|
+
raise e
|
|
1275
1591
|
|
|
1276
1592
|
def get_knowledge_contents(
|
|
1277
1593
|
self,
|
|
@@ -1321,7 +1637,7 @@ class SqliteDb(BaseDb):
|
|
|
1321
1637
|
|
|
1322
1638
|
except Exception as e:
|
|
1323
1639
|
log_error(f"Error getting knowledge contents: {e}")
|
|
1324
|
-
|
|
1640
|
+
raise e
|
|
1325
1641
|
|
|
1326
1642
|
def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
|
|
1327
1643
|
"""Upsert knowledge content in the database.
|
|
@@ -1369,7 +1685,7 @@ class SqliteDb(BaseDb):
|
|
|
1369
1685
|
|
|
1370
1686
|
except Exception as e:
|
|
1371
1687
|
log_error(f"Error upserting knowledge content: {e}")
|
|
1372
|
-
|
|
1688
|
+
raise e
|
|
1373
1689
|
|
|
1374
1690
|
# -- Eval methods --
|
|
1375
1691
|
|
|
@@ -1404,7 +1720,7 @@ class SqliteDb(BaseDb):
|
|
|
1404
1720
|
|
|
1405
1721
|
except Exception as e:
|
|
1406
1722
|
log_error(f"Error creating eval run: {e}")
|
|
1407
|
-
|
|
1723
|
+
raise e
|
|
1408
1724
|
|
|
1409
1725
|
def delete_eval_run(self, eval_run_id: str) -> None:
|
|
1410
1726
|
"""Delete an eval run from the database.
|
|
@@ -1427,7 +1743,7 @@ class SqliteDb(BaseDb):
|
|
|
1427
1743
|
|
|
1428
1744
|
except Exception as e:
|
|
1429
1745
|
log_error(f"Error deleting eval run {eval_run_id}: {e}")
|
|
1430
|
-
raise
|
|
1746
|
+
raise e
|
|
1431
1747
|
|
|
1432
1748
|
def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
|
|
1433
1749
|
"""Delete multiple eval runs from the database.
|
|
@@ -1450,7 +1766,7 @@ class SqliteDb(BaseDb):
|
|
|
1450
1766
|
|
|
1451
1767
|
except Exception as e:
|
|
1452
1768
|
log_error(f"Error deleting eval runs {eval_run_ids}: {e}")
|
|
1453
|
-
raise
|
|
1769
|
+
raise e
|
|
1454
1770
|
|
|
1455
1771
|
def get_eval_run(
|
|
1456
1772
|
self, eval_run_id: str, deserialize: Optional[bool] = True
|
|
@@ -1488,7 +1804,7 @@ class SqliteDb(BaseDb):
|
|
|
1488
1804
|
|
|
1489
1805
|
except Exception as e:
|
|
1490
1806
|
log_error(f"Exception getting eval run {eval_run_id}: {e}")
|
|
1491
|
-
|
|
1807
|
+
raise e
|
|
1492
1808
|
|
|
1493
1809
|
def get_eval_runs(
|
|
1494
1810
|
self,
|
|
@@ -1582,7 +1898,7 @@ class SqliteDb(BaseDb):
|
|
|
1582
1898
|
|
|
1583
1899
|
except Exception as e:
|
|
1584
1900
|
log_error(f"Exception getting eval runs: {e}")
|
|
1585
|
-
|
|
1901
|
+
raise e
|
|
1586
1902
|
|
|
1587
1903
|
def rename_eval_run(
|
|
1588
1904
|
self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
|
|
@@ -1624,7 +1940,7 @@ class SqliteDb(BaseDb):
|
|
|
1624
1940
|
|
|
1625
1941
|
except Exception as e:
|
|
1626
1942
|
log_error(f"Error renaming eval run {eval_run_id}: {e}")
|
|
1627
|
-
raise
|
|
1943
|
+
raise e
|
|
1628
1944
|
|
|
1629
1945
|
# -- Migrations --
|
|
1630
1946
|
|
agno/models/anthropic/claude.py
CHANGED
|
@@ -421,26 +421,6 @@ class Claude(Model):
|
|
|
421
421
|
log_error(f"Unexpected error calling Claude API: {str(e)}")
|
|
422
422
|
raise ModelProviderError(message=str(e), model_name=self.name, model_id=self.id) from e
|
|
423
423
|
|
|
424
|
-
def format_function_call_results(self, messages: List[Message], function_call_results: List[Message]) -> None:
|
|
425
|
-
"""
|
|
426
|
-
Handle the results of function calls.
|
|
427
|
-
|
|
428
|
-
Args:
|
|
429
|
-
messages (List[Message]): The list of conversation messages.
|
|
430
|
-
function_call_results (List[Message]): The results of the function calls.
|
|
431
|
-
"""
|
|
432
|
-
if len(function_call_results) > 0:
|
|
433
|
-
fc_responses: List = []
|
|
434
|
-
for _fc_message in function_call_results:
|
|
435
|
-
fc_responses.append(
|
|
436
|
-
{
|
|
437
|
-
"type": "tool_result",
|
|
438
|
-
"tool_use_id": _fc_message.tool_call_id,
|
|
439
|
-
"content": str(_fc_message.content),
|
|
440
|
-
}
|
|
441
|
-
)
|
|
442
|
-
messages.append(Message(role="user", content=fc_responses))
|
|
443
|
-
|
|
444
424
|
def get_system_message_for_model(self, tools: Optional[List[Any]] = None) -> Optional[str]:
|
|
445
425
|
if tools is not None and len(tools) > 0:
|
|
446
426
|
tool_call_prompt = "Do not reflect on the quality of the returned search results in your response\n\n"
|
agno/models/aws/claude.py
CHANGED
|
@@ -10,7 +10,7 @@ from agno.models.message import Message
|
|
|
10
10
|
from agno.models.response import ModelResponse
|
|
11
11
|
from agno.run.agent import RunOutput
|
|
12
12
|
from agno.utils.log import log_debug, log_error, log_warning
|
|
13
|
-
from agno.utils.models.
|
|
13
|
+
from agno.utils.models.claude import format_messages
|
|
14
14
|
|
|
15
15
|
try:
|
|
16
16
|
from anthropic import AnthropicBedrock, APIConnectionError, APIStatusError, AsyncAnthropicBedrock, RateLimitError
|
|
@@ -382,7 +382,8 @@ class HuggingFace(Model):
|
|
|
382
382
|
List[Dict[str, Any]]: The built tool calls.
|
|
383
383
|
"""
|
|
384
384
|
tool_calls: List[Dict[str, Any]] = []
|
|
385
|
-
for
|
|
385
|
+
for tool_call in tool_calls_data:
|
|
386
|
+
_tool_call = tool_call[0]
|
|
386
387
|
_index = _tool_call.index
|
|
387
388
|
_tool_call_id = _tool_call.id
|
|
388
389
|
_tool_call_type = _tool_call.type
|
agno/models/ollama/chat.py
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import json
|
|
2
|
-
from dataclasses import dataclass
|
|
2
|
+
from dataclasses import dataclass, field
|
|
3
|
+
from os import getenv
|
|
3
4
|
from typing import Any, AsyncIterator, Dict, Iterator, List, Optional, Type, Union
|
|
4
5
|
|
|
5
6
|
from pydantic import BaseModel
|
|
@@ -10,6 +11,7 @@ from agno.models.message import Message
|
|
|
10
11
|
from agno.models.metrics import Metrics
|
|
11
12
|
from agno.models.response import ModelResponse
|
|
12
13
|
from agno.utils.log import log_debug, log_warning
|
|
14
|
+
from agno.utils.reasoning import extract_thinking_content
|
|
13
15
|
|
|
14
16
|
try:
|
|
15
17
|
from ollama import AsyncClient as AsyncOllamaClient
|
|
@@ -43,6 +45,7 @@ class Ollama(Model):
|
|
|
43
45
|
# Client parameters
|
|
44
46
|
host: Optional[str] = None
|
|
45
47
|
timeout: Optional[Any] = None
|
|
48
|
+
api_key: Optional[str] = field(default_factory=lambda: getenv("OLLAMA_API_KEY"))
|
|
46
49
|
client_params: Optional[Dict[str, Any]] = None
|
|
47
50
|
|
|
48
51
|
# Ollama clients
|
|
@@ -50,10 +53,23 @@ class Ollama(Model):
|
|
|
50
53
|
async_client: Optional[AsyncOllamaClient] = None
|
|
51
54
|
|
|
52
55
|
def _get_client_params(self) -> Dict[str, Any]:
|
|
56
|
+
host = self.host
|
|
57
|
+
headers = {}
|
|
58
|
+
|
|
59
|
+
if self.api_key:
|
|
60
|
+
if not host:
|
|
61
|
+
host = "https://ollama.com"
|
|
62
|
+
headers["authorization"] = f"Bearer {self.api_key}"
|
|
63
|
+
log_debug(f"Using Ollama cloud endpoint: {host}")
|
|
64
|
+
|
|
53
65
|
base_params = {
|
|
54
|
-
"host":
|
|
66
|
+
"host": host,
|
|
55
67
|
"timeout": self.timeout,
|
|
56
68
|
}
|
|
69
|
+
|
|
70
|
+
if headers:
|
|
71
|
+
base_params["headers"] = headers
|
|
72
|
+
|
|
57
73
|
# Create client_params dict with non-None values
|
|
58
74
|
client_params = {k: v for k, v in base_params.items() if v is not None}
|
|
59
75
|
# Add additional client params if provided
|
|
@@ -332,6 +348,16 @@ class Ollama(Model):
|
|
|
332
348
|
if response_message.get("content") is not None:
|
|
333
349
|
model_response.content = response_message.get("content")
|
|
334
350
|
|
|
351
|
+
# Extract thinking content between <think> tags if present
|
|
352
|
+
if model_response.content and model_response.content.find("<think>") != -1:
|
|
353
|
+
reasoning_content, clean_content = extract_thinking_content(model_response.content)
|
|
354
|
+
|
|
355
|
+
if reasoning_content:
|
|
356
|
+
# Store extracted thinking content separately
|
|
357
|
+
model_response.reasoning_content = reasoning_content
|
|
358
|
+
# Update main content with clean version
|
|
359
|
+
model_response.content = clean_content
|
|
360
|
+
|
|
335
361
|
if response_message.get("tool_calls") is not None:
|
|
336
362
|
if model_response.tool_calls is None:
|
|
337
363
|
model_response.tool_calls = []
|
agno/models/openai/chat.py
CHANGED
|
@@ -16,6 +16,7 @@ from agno.models.response import ModelResponse
|
|
|
16
16
|
from agno.run.agent import RunOutput
|
|
17
17
|
from agno.utils.log import log_debug, log_error, log_warning
|
|
18
18
|
from agno.utils.openai import _format_file_for_message, audio_to_message, images_to_message
|
|
19
|
+
from agno.utils.reasoning import extract_thinking_content
|
|
19
20
|
|
|
20
21
|
try:
|
|
21
22
|
from openai import APIConnectionError, APIStatusError, RateLimitError
|
|
@@ -711,6 +712,12 @@ class OpenAIChat(Model):
|
|
|
711
712
|
if response_message.content is not None:
|
|
712
713
|
model_response.content = response_message.content
|
|
713
714
|
|
|
715
|
+
# Extract thinking content before any structured parsing
|
|
716
|
+
if model_response.content:
|
|
717
|
+
reasoning_content, output_content = extract_thinking_content(model_response.content)
|
|
718
|
+
if reasoning_content:
|
|
719
|
+
model_response.reasoning_content = reasoning_content
|
|
720
|
+
model_response.content = output_content
|
|
714
721
|
# Add tool calls
|
|
715
722
|
if response_message.tool_calls is not None and len(response_message.tool_calls) > 0:
|
|
716
723
|
try:
|