agno 2.0.8__py3-none-any.whl → 2.0.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +13 -8
- agno/db/base.py +14 -0
- agno/db/dynamo/dynamo.py +107 -27
- agno/db/firestore/firestore.py +109 -33
- agno/db/gcs_json/gcs_json_db.py +100 -20
- agno/db/in_memory/in_memory_db.py +95 -20
- agno/db/json/json_db.py +101 -21
- agno/db/migrations/v1_to_v2.py +181 -35
- agno/db/mongo/mongo.py +251 -26
- agno/db/mysql/mysql.py +307 -6
- agno/db/postgres/postgres.py +279 -33
- agno/db/redis/redis.py +99 -22
- agno/db/singlestore/singlestore.py +319 -38
- agno/db/sqlite/sqlite.py +339 -23
- agno/models/anthropic/claude.py +0 -20
- agno/models/aws/claude.py +1 -1
- agno/models/huggingface/huggingface.py +2 -1
- agno/models/ollama/chat.py +28 -2
- agno/models/openai/chat.py +7 -0
- agno/models/openai/responses.py +8 -8
- agno/os/interfaces/base.py +2 -0
- agno/os/interfaces/slack/router.py +50 -10
- agno/os/interfaces/slack/slack.py +6 -4
- agno/os/interfaces/whatsapp/router.py +7 -4
- agno/os/router.py +18 -0
- agno/os/utils.py +2 -2
- agno/reasoning/azure_ai_foundry.py +2 -2
- agno/reasoning/deepseek.py +2 -2
- agno/reasoning/groq.py +2 -2
- agno/reasoning/ollama.py +2 -2
- agno/reasoning/openai.py +2 -2
- agno/run/base.py +15 -2
- agno/team/team.py +10 -12
- agno/tools/mcp_toolbox.py +284 -0
- agno/tools/scrapegraph.py +58 -31
- agno/tools/whatsapp.py +1 -1
- agno/utils/models/claude.py +2 -2
- agno/utils/print_response/agent.py +2 -2
- agno/utils/print_response/team.py +6 -6
- agno/utils/reasoning.py +22 -1
- agno/utils/string.py +9 -0
- agno/workflow/workflow.py +11 -7
- {agno-2.0.8.dist-info → agno-2.0.10.dist-info}/METADATA +4 -1
- {agno-2.0.8.dist-info → agno-2.0.10.dist-info}/RECORD +47 -47
- agno/utils/models/aws_claude.py +0 -170
- {agno-2.0.8.dist-info → agno-2.0.10.dist-info}/WHEEL +0 -0
- {agno-2.0.8.dist-info → agno-2.0.10.dist-info}/licenses/LICENSE +0 -0
- {agno-2.0.8.dist-info → agno-2.0.10.dist-info}/top_level.txt +0 -0
|
@@ -393,7 +393,7 @@ class SingleStoreDb(BaseDb):
|
|
|
393
393
|
|
|
394
394
|
except Exception as e:
|
|
395
395
|
log_error(f"Error deleting session: {e}")
|
|
396
|
-
|
|
396
|
+
raise e
|
|
397
397
|
|
|
398
398
|
def delete_sessions(self, session_ids: List[str]) -> None:
|
|
399
399
|
"""Delete all given sessions from the database.
|
|
@@ -418,6 +418,7 @@ class SingleStoreDb(BaseDb):
|
|
|
418
418
|
|
|
419
419
|
except Exception as e:
|
|
420
420
|
log_error(f"Error deleting sessions: {e}")
|
|
421
|
+
raise e
|
|
421
422
|
|
|
422
423
|
def get_session(
|
|
423
424
|
self,
|
|
@@ -476,7 +477,7 @@ class SingleStoreDb(BaseDb):
|
|
|
476
477
|
|
|
477
478
|
except Exception as e:
|
|
478
479
|
log_error(f"Exception reading from session table: {e}")
|
|
479
|
-
|
|
480
|
+
raise e
|
|
480
481
|
|
|
481
482
|
def get_sessions(
|
|
482
483
|
self,
|
|
@@ -580,8 +581,8 @@ class SingleStoreDb(BaseDb):
|
|
|
580
581
|
raise ValueError(f"Invalid session type: {session_type}")
|
|
581
582
|
|
|
582
583
|
except Exception as e:
|
|
583
|
-
|
|
584
|
-
|
|
584
|
+
log_error(f"Exception reading from session table: {e}")
|
|
585
|
+
raise e
|
|
585
586
|
|
|
586
587
|
def rename_session(
|
|
587
588
|
self, session_id: str, session_type: SessionType, session_name: str, deserialize: Optional[bool] = True
|
|
@@ -643,7 +644,7 @@ class SingleStoreDb(BaseDb):
|
|
|
643
644
|
|
|
644
645
|
except Exception as e:
|
|
645
646
|
log_error(f"Error renaming session: {e}")
|
|
646
|
-
|
|
647
|
+
raise e
|
|
647
648
|
|
|
648
649
|
def upsert_session(self, session: Session, deserialize: Optional[bool] = True) -> Optional[Session]:
|
|
649
650
|
"""
|
|
@@ -793,7 +794,200 @@ class SingleStoreDb(BaseDb):
|
|
|
793
794
|
|
|
794
795
|
except Exception as e:
|
|
795
796
|
log_error(f"Error upserting into sessions table: {e}")
|
|
796
|
-
|
|
797
|
+
raise e
|
|
798
|
+
|
|
799
|
+
def upsert_sessions(
|
|
800
|
+
self, sessions: List[Session], deserialize: Optional[bool] = True
|
|
801
|
+
) -> List[Union[Session, Dict[str, Any]]]:
|
|
802
|
+
"""
|
|
803
|
+
Bulk upsert multiple sessions for improved performance on large datasets.
|
|
804
|
+
|
|
805
|
+
Args:
|
|
806
|
+
sessions (List[Session]): List of sessions to upsert.
|
|
807
|
+
deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
|
|
808
|
+
|
|
809
|
+
Returns:
|
|
810
|
+
List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
|
|
811
|
+
|
|
812
|
+
Raises:
|
|
813
|
+
Exception: If an error occurs during bulk upsert.
|
|
814
|
+
"""
|
|
815
|
+
if not sessions:
|
|
816
|
+
return []
|
|
817
|
+
|
|
818
|
+
try:
|
|
819
|
+
table = self._get_table(table_type="sessions", create_table_if_not_found=True)
|
|
820
|
+
if table is None:
|
|
821
|
+
return []
|
|
822
|
+
|
|
823
|
+
# Group sessions by type for batch processing
|
|
824
|
+
agent_sessions = []
|
|
825
|
+
team_sessions = []
|
|
826
|
+
workflow_sessions = []
|
|
827
|
+
|
|
828
|
+
for session in sessions:
|
|
829
|
+
if isinstance(session, AgentSession):
|
|
830
|
+
agent_sessions.append(session)
|
|
831
|
+
elif isinstance(session, TeamSession):
|
|
832
|
+
team_sessions.append(session)
|
|
833
|
+
elif isinstance(session, WorkflowSession):
|
|
834
|
+
workflow_sessions.append(session)
|
|
835
|
+
|
|
836
|
+
results: List[Union[Session, Dict[str, Any]]] = []
|
|
837
|
+
|
|
838
|
+
with self.Session() as sess, sess.begin():
|
|
839
|
+
# Bulk upsert agent sessions
|
|
840
|
+
if agent_sessions:
|
|
841
|
+
agent_data = []
|
|
842
|
+
for session in agent_sessions:
|
|
843
|
+
session_dict = session.to_dict()
|
|
844
|
+
agent_data.append(
|
|
845
|
+
{
|
|
846
|
+
"session_id": session_dict.get("session_id"),
|
|
847
|
+
"session_type": SessionType.AGENT.value,
|
|
848
|
+
"agent_id": session_dict.get("agent_id"),
|
|
849
|
+
"user_id": session_dict.get("user_id"),
|
|
850
|
+
"runs": session_dict.get("runs"),
|
|
851
|
+
"agent_data": session_dict.get("agent_data"),
|
|
852
|
+
"session_data": session_dict.get("session_data"),
|
|
853
|
+
"summary": session_dict.get("summary"),
|
|
854
|
+
"metadata": session_dict.get("metadata"),
|
|
855
|
+
"created_at": session_dict.get("created_at"),
|
|
856
|
+
"updated_at": session_dict.get("created_at"),
|
|
857
|
+
}
|
|
858
|
+
)
|
|
859
|
+
|
|
860
|
+
if agent_data:
|
|
861
|
+
stmt = mysql.insert(table)
|
|
862
|
+
stmt = stmt.on_duplicate_key_update(
|
|
863
|
+
agent_id=stmt.inserted.agent_id,
|
|
864
|
+
user_id=stmt.inserted.user_id,
|
|
865
|
+
agent_data=stmt.inserted.agent_data,
|
|
866
|
+
session_data=stmt.inserted.session_data,
|
|
867
|
+
summary=stmt.inserted.summary,
|
|
868
|
+
metadata=stmt.inserted.metadata,
|
|
869
|
+
runs=stmt.inserted.runs,
|
|
870
|
+
updated_at=int(time.time()),
|
|
871
|
+
)
|
|
872
|
+
sess.execute(stmt, agent_data)
|
|
873
|
+
|
|
874
|
+
# Fetch the results for agent sessions
|
|
875
|
+
agent_ids = [session.session_id for session in agent_sessions]
|
|
876
|
+
select_stmt = select(table).where(table.c.session_id.in_(agent_ids))
|
|
877
|
+
result = sess.execute(select_stmt).fetchall()
|
|
878
|
+
|
|
879
|
+
for row in result:
|
|
880
|
+
if deserialize:
|
|
881
|
+
deserialized_session = AgentSession.from_dict(session_dict)
|
|
882
|
+
if deserialized_session is None:
|
|
883
|
+
continue
|
|
884
|
+
results.append(deserialized_session)
|
|
885
|
+
else:
|
|
886
|
+
results.append(dict(row._mapping))
|
|
887
|
+
|
|
888
|
+
# Bulk upsert team sessions
|
|
889
|
+
if team_sessions:
|
|
890
|
+
team_data = []
|
|
891
|
+
for session in team_sessions:
|
|
892
|
+
session_dict = session.to_dict()
|
|
893
|
+
team_data.append(
|
|
894
|
+
{
|
|
895
|
+
"session_id": session_dict.get("session_id"),
|
|
896
|
+
"session_type": SessionType.TEAM.value,
|
|
897
|
+
"team_id": session_dict.get("team_id"),
|
|
898
|
+
"user_id": session_dict.get("user_id"),
|
|
899
|
+
"runs": session_dict.get("runs"),
|
|
900
|
+
"team_data": session_dict.get("team_data"),
|
|
901
|
+
"session_data": session_dict.get("session_data"),
|
|
902
|
+
"summary": session_dict.get("summary"),
|
|
903
|
+
"metadata": session_dict.get("metadata"),
|
|
904
|
+
"created_at": session_dict.get("created_at"),
|
|
905
|
+
"updated_at": session_dict.get("created_at"),
|
|
906
|
+
}
|
|
907
|
+
)
|
|
908
|
+
|
|
909
|
+
if team_data:
|
|
910
|
+
stmt = mysql.insert(table)
|
|
911
|
+
stmt = stmt.on_duplicate_key_update(
|
|
912
|
+
team_id=stmt.inserted.team_id,
|
|
913
|
+
user_id=stmt.inserted.user_id,
|
|
914
|
+
team_data=stmt.inserted.team_data,
|
|
915
|
+
session_data=stmt.inserted.session_data,
|
|
916
|
+
summary=stmt.inserted.summary,
|
|
917
|
+
metadata=stmt.inserted.metadata,
|
|
918
|
+
runs=stmt.inserted.runs,
|
|
919
|
+
updated_at=int(time.time()),
|
|
920
|
+
)
|
|
921
|
+
sess.execute(stmt, team_data)
|
|
922
|
+
|
|
923
|
+
# Fetch the results for team sessions
|
|
924
|
+
team_ids = [session.session_id for session in team_sessions]
|
|
925
|
+
select_stmt = select(table).where(table.c.session_id.in_(team_ids))
|
|
926
|
+
result = sess.execute(select_stmt).fetchall()
|
|
927
|
+
|
|
928
|
+
for row in result:
|
|
929
|
+
if deserialize:
|
|
930
|
+
deserialized_team_session = TeamSession.from_dict(session_dict)
|
|
931
|
+
if deserialized_team_session is None:
|
|
932
|
+
continue
|
|
933
|
+
results.append(deserialized_team_session)
|
|
934
|
+
else:
|
|
935
|
+
results.append(dict(row._mapping))
|
|
936
|
+
|
|
937
|
+
# Bulk upsert workflow sessions
|
|
938
|
+
if workflow_sessions:
|
|
939
|
+
workflow_data = []
|
|
940
|
+
for session in workflow_sessions:
|
|
941
|
+
session_dict = session.to_dict()
|
|
942
|
+
workflow_data.append(
|
|
943
|
+
{
|
|
944
|
+
"session_id": session_dict.get("session_id"),
|
|
945
|
+
"session_type": SessionType.WORKFLOW.value,
|
|
946
|
+
"workflow_id": session_dict.get("workflow_id"),
|
|
947
|
+
"user_id": session_dict.get("user_id"),
|
|
948
|
+
"runs": session_dict.get("runs"),
|
|
949
|
+
"workflow_data": session_dict.get("workflow_data"),
|
|
950
|
+
"session_data": session_dict.get("session_data"),
|
|
951
|
+
"summary": session_dict.get("summary"),
|
|
952
|
+
"metadata": session_dict.get("metadata"),
|
|
953
|
+
"created_at": session_dict.get("created_at"),
|
|
954
|
+
"updated_at": session_dict.get("created_at"),
|
|
955
|
+
}
|
|
956
|
+
)
|
|
957
|
+
|
|
958
|
+
if workflow_data:
|
|
959
|
+
stmt = mysql.insert(table)
|
|
960
|
+
stmt = stmt.on_duplicate_key_update(
|
|
961
|
+
workflow_id=stmt.inserted.workflow_id,
|
|
962
|
+
user_id=stmt.inserted.user_id,
|
|
963
|
+
workflow_data=stmt.inserted.workflow_data,
|
|
964
|
+
session_data=stmt.inserted.session_data,
|
|
965
|
+
summary=stmt.inserted.summary,
|
|
966
|
+
metadata=stmt.inserted.metadata,
|
|
967
|
+
runs=stmt.inserted.runs,
|
|
968
|
+
updated_at=int(time.time()),
|
|
969
|
+
)
|
|
970
|
+
sess.execute(stmt, workflow_data)
|
|
971
|
+
|
|
972
|
+
# Fetch the results for workflow sessions
|
|
973
|
+
workflow_ids = [session.session_id for session in workflow_sessions]
|
|
974
|
+
select_stmt = select(table).where(table.c.session_id.in_(workflow_ids))
|
|
975
|
+
result = sess.execute(select_stmt).fetchall()
|
|
976
|
+
|
|
977
|
+
for row in result:
|
|
978
|
+
if deserialize:
|
|
979
|
+
deserialized_workflow_session = WorkflowSession.from_dict(session_dict)
|
|
980
|
+
if deserialized_workflow_session is None:
|
|
981
|
+
continue
|
|
982
|
+
results.append(deserialized_workflow_session)
|
|
983
|
+
else:
|
|
984
|
+
results.append(dict(row._mapping))
|
|
985
|
+
|
|
986
|
+
return results
|
|
987
|
+
|
|
988
|
+
except Exception as e:
|
|
989
|
+
log_error(f"Exception during bulk session upsert: {e}")
|
|
990
|
+
return []
|
|
797
991
|
|
|
798
992
|
# -- Memory methods --
|
|
799
993
|
def delete_user_memory(self, memory_id: str):
|
|
@@ -825,6 +1019,7 @@ class SingleStoreDb(BaseDb):
|
|
|
825
1019
|
|
|
826
1020
|
except Exception as e:
|
|
827
1021
|
log_error(f"Error deleting memory: {e}")
|
|
1022
|
+
raise e
|
|
828
1023
|
|
|
829
1024
|
def delete_user_memories(self, memory_ids: List[str]) -> None:
|
|
830
1025
|
"""Delete user memories from the database.
|
|
@@ -848,6 +1043,7 @@ class SingleStoreDb(BaseDb):
|
|
|
848
1043
|
|
|
849
1044
|
except Exception as e:
|
|
850
1045
|
log_error(f"Error deleting memories: {e}")
|
|
1046
|
+
raise e
|
|
851
1047
|
|
|
852
1048
|
def get_all_memory_topics(self) -> List[str]:
|
|
853
1049
|
"""Get all memory topics from the database.
|
|
@@ -875,7 +1071,7 @@ class SingleStoreDb(BaseDb):
|
|
|
875
1071
|
|
|
876
1072
|
except Exception as e:
|
|
877
1073
|
log_error(f"Exception reading from memory table: {e}")
|
|
878
|
-
|
|
1074
|
+
raise e
|
|
879
1075
|
|
|
880
1076
|
def get_user_memory(self, memory_id: str, deserialize: Optional[bool] = True) -> Optional[UserMemory]:
|
|
881
1077
|
"""Get a memory from the database.
|
|
@@ -911,7 +1107,7 @@ class SingleStoreDb(BaseDb):
|
|
|
911
1107
|
|
|
912
1108
|
except Exception as e:
|
|
913
1109
|
log_error(f"Exception reading from memory table: {e}")
|
|
914
|
-
|
|
1110
|
+
raise e
|
|
915
1111
|
|
|
916
1112
|
def get_user_memories(
|
|
917
1113
|
self,
|
|
@@ -995,7 +1191,7 @@ class SingleStoreDb(BaseDb):
|
|
|
995
1191
|
|
|
996
1192
|
except Exception as e:
|
|
997
1193
|
log_error(f"Exception reading from memory table: {e}")
|
|
998
|
-
|
|
1194
|
+
raise e
|
|
999
1195
|
|
|
1000
1196
|
def get_user_memory_stats(
|
|
1001
1197
|
self, limit: Optional[int] = None, page: Optional[int] = None
|
|
@@ -1062,7 +1258,7 @@ class SingleStoreDb(BaseDb):
|
|
|
1062
1258
|
|
|
1063
1259
|
except Exception as e:
|
|
1064
1260
|
log_error(f"Exception getting user memory stats: {e}")
|
|
1065
|
-
|
|
1261
|
+
raise e
|
|
1066
1262
|
|
|
1067
1263
|
def upsert_user_memory(
|
|
1068
1264
|
self, memory: UserMemory, deserialize: Optional[bool] = True
|
|
@@ -1126,7 +1322,83 @@ class SingleStoreDb(BaseDb):
|
|
|
1126
1322
|
|
|
1127
1323
|
except Exception as e:
|
|
1128
1324
|
log_error(f"Error upserting user memory: {e}")
|
|
1129
|
-
|
|
1325
|
+
raise e
|
|
1326
|
+
|
|
1327
|
+
def upsert_memories(
|
|
1328
|
+
self, memories: List[UserMemory], deserialize: Optional[bool] = True
|
|
1329
|
+
) -> List[Union[UserMemory, Dict[str, Any]]]:
|
|
1330
|
+
"""
|
|
1331
|
+
Bulk upsert multiple user memories for improved performance on large datasets.
|
|
1332
|
+
|
|
1333
|
+
Args:
|
|
1334
|
+
memories (List[UserMemory]): List of memories to upsert.
|
|
1335
|
+
deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
|
|
1336
|
+
|
|
1337
|
+
Returns:
|
|
1338
|
+
List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories.
|
|
1339
|
+
|
|
1340
|
+
Raises:
|
|
1341
|
+
Exception: If an error occurs during bulk upsert.
|
|
1342
|
+
"""
|
|
1343
|
+
if not memories:
|
|
1344
|
+
return []
|
|
1345
|
+
|
|
1346
|
+
try:
|
|
1347
|
+
table = self._get_table(table_type="memories", create_table_if_not_found=True)
|
|
1348
|
+
if table is None:
|
|
1349
|
+
return []
|
|
1350
|
+
|
|
1351
|
+
# Prepare data for bulk insert
|
|
1352
|
+
memory_data = []
|
|
1353
|
+
for memory in memories:
|
|
1354
|
+
if memory.memory_id is None:
|
|
1355
|
+
memory.memory_id = str(uuid4())
|
|
1356
|
+
memory_data.append(
|
|
1357
|
+
{
|
|
1358
|
+
"memory_id": memory.memory_id,
|
|
1359
|
+
"memory": memory.memory,
|
|
1360
|
+
"input": memory.input,
|
|
1361
|
+
"user_id": memory.user_id,
|
|
1362
|
+
"agent_id": memory.agent_id,
|
|
1363
|
+
"team_id": memory.team_id,
|
|
1364
|
+
"topics": memory.topics,
|
|
1365
|
+
"updated_at": int(time.time()),
|
|
1366
|
+
}
|
|
1367
|
+
)
|
|
1368
|
+
|
|
1369
|
+
results: List[Union[UserMemory, Dict[str, Any]]] = []
|
|
1370
|
+
|
|
1371
|
+
with self.Session() as sess, sess.begin():
|
|
1372
|
+
if memory_data:
|
|
1373
|
+
stmt = mysql.insert(table)
|
|
1374
|
+
stmt = stmt.on_duplicate_key_update(
|
|
1375
|
+
memory=stmt.inserted.memory,
|
|
1376
|
+
topics=stmt.inserted.topics,
|
|
1377
|
+
input=stmt.inserted.input,
|
|
1378
|
+
user_id=stmt.inserted.user_id,
|
|
1379
|
+
agent_id=stmt.inserted.agent_id,
|
|
1380
|
+
team_id=stmt.inserted.team_id,
|
|
1381
|
+
updated_at=int(time.time()),
|
|
1382
|
+
)
|
|
1383
|
+
sess.execute(stmt, memory_data)
|
|
1384
|
+
|
|
1385
|
+
# Fetch the results
|
|
1386
|
+
memory_ids = [memory.memory_id for memory in memories if memory.memory_id]
|
|
1387
|
+
select_stmt = select(table).where(table.c.memory_id.in_(memory_ids))
|
|
1388
|
+
result = sess.execute(select_stmt).fetchall()
|
|
1389
|
+
|
|
1390
|
+
for row in result:
|
|
1391
|
+
memory_raw = dict(row._mapping)
|
|
1392
|
+
if deserialize:
|
|
1393
|
+
results.append(UserMemory.from_dict(memory_raw))
|
|
1394
|
+
else:
|
|
1395
|
+
results.append(memory_raw)
|
|
1396
|
+
|
|
1397
|
+
return results
|
|
1398
|
+
|
|
1399
|
+
except Exception as e:
|
|
1400
|
+
log_error(f"Exception during bulk memory upsert: {e}")
|
|
1401
|
+
return []
|
|
1130
1402
|
|
|
1131
1403
|
def clear_memories(self) -> None:
|
|
1132
1404
|
"""Delete all memories from the database.
|
|
@@ -1143,7 +1415,8 @@ class SingleStoreDb(BaseDb):
|
|
|
1143
1415
|
sess.execute(table.delete())
|
|
1144
1416
|
|
|
1145
1417
|
except Exception as e:
|
|
1146
|
-
|
|
1418
|
+
log_error(f"Exception deleting all memories: {e}")
|
|
1419
|
+
raise e
|
|
1147
1420
|
|
|
1148
1421
|
# -- Metrics methods --
|
|
1149
1422
|
def _get_all_sessions_for_metrics_calculation(
|
|
@@ -1285,7 +1558,7 @@ class SingleStoreDb(BaseDb):
|
|
|
1285
1558
|
return metrics_records
|
|
1286
1559
|
|
|
1287
1560
|
except Exception as e:
|
|
1288
|
-
log_error(f"Error
|
|
1561
|
+
log_error(f"Error calculating metrics: {e}")
|
|
1289
1562
|
raise e
|
|
1290
1563
|
|
|
1291
1564
|
def get_metrics(
|
|
@@ -1328,7 +1601,7 @@ class SingleStoreDb(BaseDb):
|
|
|
1328
1601
|
|
|
1329
1602
|
except Exception as e:
|
|
1330
1603
|
log_error(f"Error getting metrics: {e}")
|
|
1331
|
-
|
|
1604
|
+
raise e
|
|
1332
1605
|
|
|
1333
1606
|
# -- Knowledge methods --
|
|
1334
1607
|
|
|
@@ -1338,15 +1611,19 @@ class SingleStoreDb(BaseDb):
|
|
|
1338
1611
|
Args:
|
|
1339
1612
|
id (str): The ID of the knowledge row to delete.
|
|
1340
1613
|
"""
|
|
1341
|
-
|
|
1342
|
-
|
|
1343
|
-
|
|
1614
|
+
try:
|
|
1615
|
+
table = self._get_table(table_type="knowledge")
|
|
1616
|
+
if table is None:
|
|
1617
|
+
return
|
|
1344
1618
|
|
|
1345
|
-
|
|
1346
|
-
|
|
1347
|
-
|
|
1619
|
+
with self.Session() as sess, sess.begin():
|
|
1620
|
+
stmt = table.delete().where(table.c.id == id)
|
|
1621
|
+
sess.execute(stmt)
|
|
1348
1622
|
|
|
1349
|
-
|
|
1623
|
+
log_debug(f"Deleted knowledge content with id '{id}'")
|
|
1624
|
+
except Exception as e:
|
|
1625
|
+
log_error(f"Error deleting knowledge content: {e}")
|
|
1626
|
+
raise e
|
|
1350
1627
|
|
|
1351
1628
|
def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
|
|
1352
1629
|
"""Get a knowledge row from the database.
|
|
@@ -1357,16 +1634,20 @@ class SingleStoreDb(BaseDb):
|
|
|
1357
1634
|
Returns:
|
|
1358
1635
|
Optional[KnowledgeRow]: The knowledge row, or None if it doesn't exist.
|
|
1359
1636
|
"""
|
|
1360
|
-
|
|
1361
|
-
|
|
1362
|
-
|
|
1363
|
-
|
|
1364
|
-
with self.Session() as sess, sess.begin():
|
|
1365
|
-
stmt = select(table).where(table.c.id == id)
|
|
1366
|
-
result = sess.execute(stmt).fetchone()
|
|
1367
|
-
if result is None:
|
|
1637
|
+
try:
|
|
1638
|
+
table = self._get_table(table_type="knowledge")
|
|
1639
|
+
if table is None:
|
|
1368
1640
|
return None
|
|
1369
|
-
|
|
1641
|
+
|
|
1642
|
+
with self.Session() as sess, sess.begin():
|
|
1643
|
+
stmt = select(table).where(table.c.id == id)
|
|
1644
|
+
result = sess.execute(stmt).fetchone()
|
|
1645
|
+
if result is None:
|
|
1646
|
+
return None
|
|
1647
|
+
return KnowledgeRow.model_validate(result._mapping)
|
|
1648
|
+
except Exception as e:
|
|
1649
|
+
log_error(f"Error getting knowledge content: {e}")
|
|
1650
|
+
raise e
|
|
1370
1651
|
|
|
1371
1652
|
def get_knowledge_contents(
|
|
1372
1653
|
self,
|
|
@@ -1419,7 +1700,7 @@ class SingleStoreDb(BaseDb):
|
|
|
1419
1700
|
|
|
1420
1701
|
except Exception as e:
|
|
1421
1702
|
log_error(f"Error getting knowledge contents: {e}")
|
|
1422
|
-
|
|
1703
|
+
raise e
|
|
1423
1704
|
|
|
1424
1705
|
def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
|
|
1425
1706
|
"""Upsert knowledge content in the database.
|
|
@@ -1464,7 +1745,7 @@ class SingleStoreDb(BaseDb):
|
|
|
1464
1745
|
|
|
1465
1746
|
except Exception as e:
|
|
1466
1747
|
log_error(f"Error upserting knowledge row: {e}")
|
|
1467
|
-
|
|
1748
|
+
raise e
|
|
1468
1749
|
|
|
1469
1750
|
# -- Eval methods --
|
|
1470
1751
|
|
|
@@ -1498,7 +1779,7 @@ class SingleStoreDb(BaseDb):
|
|
|
1498
1779
|
|
|
1499
1780
|
except Exception as e:
|
|
1500
1781
|
log_error(f"Error creating eval run: {e}")
|
|
1501
|
-
|
|
1782
|
+
raise e
|
|
1502
1783
|
|
|
1503
1784
|
def delete_eval_run(self, eval_run_id: str) -> None:
|
|
1504
1785
|
"""Delete an eval run from the database.
|
|
@@ -1521,7 +1802,7 @@ class SingleStoreDb(BaseDb):
|
|
|
1521
1802
|
|
|
1522
1803
|
except Exception as e:
|
|
1523
1804
|
log_error(f"Error deleting eval run {eval_run_id}: {e}")
|
|
1524
|
-
raise
|
|
1805
|
+
raise e
|
|
1525
1806
|
|
|
1526
1807
|
def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
|
|
1527
1808
|
"""Delete multiple eval runs from the database.
|
|
@@ -1544,7 +1825,7 @@ class SingleStoreDb(BaseDb):
|
|
|
1544
1825
|
|
|
1545
1826
|
except Exception as e:
|
|
1546
1827
|
log_error(f"Error deleting eval runs {eval_run_ids}: {e}")
|
|
1547
|
-
raise
|
|
1828
|
+
raise e
|
|
1548
1829
|
|
|
1549
1830
|
def get_eval_run(
|
|
1550
1831
|
self, eval_run_id: str, deserialize: Optional[bool] = True
|
|
@@ -1582,7 +1863,7 @@ class SingleStoreDb(BaseDb):
|
|
|
1582
1863
|
|
|
1583
1864
|
except Exception as e:
|
|
1584
1865
|
log_error(f"Exception getting eval run {eval_run_id}: {e}")
|
|
1585
|
-
|
|
1866
|
+
raise e
|
|
1586
1867
|
|
|
1587
1868
|
def get_eval_runs(
|
|
1588
1869
|
self,
|
|
@@ -1677,7 +1958,7 @@ class SingleStoreDb(BaseDb):
|
|
|
1677
1958
|
|
|
1678
1959
|
except Exception as e:
|
|
1679
1960
|
log_error(f"Exception getting eval runs: {e}")
|
|
1680
|
-
|
|
1961
|
+
raise e
|
|
1681
1962
|
|
|
1682
1963
|
def rename_eval_run(
|
|
1683
1964
|
self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
|
|
@@ -1716,4 +1997,4 @@ class SingleStoreDb(BaseDb):
|
|
|
1716
1997
|
|
|
1717
1998
|
except Exception as e:
|
|
1718
1999
|
log_error(f"Error renaming eval run {eval_run_id}: {e}")
|
|
1719
|
-
raise
|
|
2000
|
+
raise e
|