agno 2.0.8__py3-none-any.whl → 2.0.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +13 -8
- agno/db/base.py +14 -0
- agno/db/dynamo/dynamo.py +107 -27
- agno/db/firestore/firestore.py +109 -33
- agno/db/gcs_json/gcs_json_db.py +100 -20
- agno/db/in_memory/in_memory_db.py +95 -20
- agno/db/json/json_db.py +101 -21
- agno/db/migrations/v1_to_v2.py +181 -35
- agno/db/mongo/mongo.py +251 -26
- agno/db/mysql/mysql.py +307 -6
- agno/db/postgres/postgres.py +279 -33
- agno/db/redis/redis.py +99 -22
- agno/db/singlestore/singlestore.py +319 -38
- agno/db/sqlite/sqlite.py +339 -23
- agno/models/anthropic/claude.py +0 -20
- agno/models/aws/claude.py +1 -1
- agno/models/huggingface/huggingface.py +2 -1
- agno/models/ollama/chat.py +28 -2
- agno/models/openai/chat.py +7 -0
- agno/models/openai/responses.py +8 -8
- agno/os/interfaces/base.py +2 -0
- agno/os/interfaces/slack/router.py +50 -10
- agno/os/interfaces/slack/slack.py +6 -4
- agno/os/interfaces/whatsapp/router.py +7 -4
- agno/os/router.py +18 -0
- agno/os/utils.py +2 -2
- agno/reasoning/azure_ai_foundry.py +2 -2
- agno/reasoning/deepseek.py +2 -2
- agno/reasoning/groq.py +2 -2
- agno/reasoning/ollama.py +2 -2
- agno/reasoning/openai.py +2 -2
- agno/run/base.py +15 -2
- agno/team/team.py +10 -12
- agno/tools/mcp_toolbox.py +284 -0
- agno/tools/scrapegraph.py +58 -31
- agno/tools/whatsapp.py +1 -1
- agno/utils/models/claude.py +2 -2
- agno/utils/print_response/agent.py +2 -2
- agno/utils/print_response/team.py +6 -6
- agno/utils/reasoning.py +22 -1
- agno/utils/string.py +9 -0
- agno/workflow/workflow.py +11 -7
- {agno-2.0.8.dist-info → agno-2.0.10.dist-info}/METADATA +4 -1
- {agno-2.0.8.dist-info → agno-2.0.10.dist-info}/RECORD +47 -47
- agno/utils/models/aws_claude.py +0 -170
- {agno-2.0.8.dist-info → agno-2.0.10.dist-info}/WHEEL +0 -0
- {agno-2.0.8.dist-info → agno-2.0.10.dist-info}/licenses/LICENSE +0 -0
- {agno-2.0.8.dist-info → agno-2.0.10.dist-info}/top_level.txt +0 -0
agno/db/postgres/postgres.py
CHANGED
|
@@ -307,7 +307,7 @@ class PostgresDb(BaseDb):
|
|
|
307
307
|
|
|
308
308
|
except Exception as e:
|
|
309
309
|
log_error(f"Error deleting session: {e}")
|
|
310
|
-
|
|
310
|
+
raise e
|
|
311
311
|
|
|
312
312
|
def delete_sessions(self, session_ids: List[str]) -> None:
|
|
313
313
|
"""Delete all given sessions from the database.
|
|
@@ -332,6 +332,7 @@ class PostgresDb(BaseDb):
|
|
|
332
332
|
|
|
333
333
|
except Exception as e:
|
|
334
334
|
log_error(f"Error deleting sessions: {e}")
|
|
335
|
+
raise e
|
|
335
336
|
|
|
336
337
|
def get_session(
|
|
337
338
|
self,
|
|
@@ -390,7 +391,7 @@ class PostgresDb(BaseDb):
|
|
|
390
391
|
|
|
391
392
|
except Exception as e:
|
|
392
393
|
log_error(f"Exception reading from session table: {e}")
|
|
393
|
-
|
|
394
|
+
raise e
|
|
394
395
|
|
|
395
396
|
def get_sessions(
|
|
396
397
|
self,
|
|
@@ -493,7 +494,7 @@ class PostgresDb(BaseDb):
|
|
|
493
494
|
|
|
494
495
|
except Exception as e:
|
|
495
496
|
log_error(f"Exception reading from session table: {e}")
|
|
496
|
-
|
|
497
|
+
raise e
|
|
497
498
|
|
|
498
499
|
def rename_session(
|
|
499
500
|
self, session_id: str, session_type: SessionType, session_name: str, deserialize: Optional[bool] = True
|
|
@@ -560,7 +561,7 @@ class PostgresDb(BaseDb):
|
|
|
560
561
|
|
|
561
562
|
except Exception as e:
|
|
562
563
|
log_error(f"Exception renaming session: {e}")
|
|
563
|
-
|
|
564
|
+
raise e
|
|
564
565
|
|
|
565
566
|
def upsert_session(
|
|
566
567
|
self, session: Session, deserialize: Optional[bool] = True
|
|
@@ -700,7 +701,173 @@ class PostgresDb(BaseDb):
|
|
|
700
701
|
|
|
701
702
|
except Exception as e:
|
|
702
703
|
log_error(f"Exception upserting into sessions table: {e}")
|
|
703
|
-
|
|
704
|
+
raise e
|
|
705
|
+
|
|
706
|
+
def upsert_sessions(
|
|
707
|
+
self, sessions: List[Session], deserialize: Optional[bool] = True
|
|
708
|
+
) -> List[Union[Session, Dict[str, Any]]]:
|
|
709
|
+
"""
|
|
710
|
+
Bulk insert or update multiple sessions.
|
|
711
|
+
|
|
712
|
+
Args:
|
|
713
|
+
sessions (List[Session]): The list of session data to upsert.
|
|
714
|
+
deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
|
|
715
|
+
|
|
716
|
+
Returns:
|
|
717
|
+
List[Union[Session, Dict[str, Any]]]: List of upserted sessions
|
|
718
|
+
|
|
719
|
+
Raises:
|
|
720
|
+
Exception: If an error occurs during bulk upsert.
|
|
721
|
+
"""
|
|
722
|
+
try:
|
|
723
|
+
if not sessions:
|
|
724
|
+
return []
|
|
725
|
+
|
|
726
|
+
table = self._get_table(table_type="sessions", create_table_if_not_found=True)
|
|
727
|
+
if table is None:
|
|
728
|
+
return []
|
|
729
|
+
|
|
730
|
+
# Group sessions by type for better handling
|
|
731
|
+
agent_sessions = [s for s in sessions if isinstance(s, AgentSession)]
|
|
732
|
+
team_sessions = [s for s in sessions if isinstance(s, TeamSession)]
|
|
733
|
+
workflow_sessions = [s for s in sessions if isinstance(s, WorkflowSession)]
|
|
734
|
+
|
|
735
|
+
results: List[Union[Session, Dict[str, Any]]] = []
|
|
736
|
+
|
|
737
|
+
# Bulk upsert agent sessions
|
|
738
|
+
if agent_sessions:
|
|
739
|
+
session_records = []
|
|
740
|
+
for agent_session in agent_sessions:
|
|
741
|
+
session_dict = agent_session.to_dict()
|
|
742
|
+
session_records.append(
|
|
743
|
+
{
|
|
744
|
+
"session_id": session_dict.get("session_id"),
|
|
745
|
+
"session_type": SessionType.AGENT.value,
|
|
746
|
+
"agent_id": session_dict.get("agent_id"),
|
|
747
|
+
"user_id": session_dict.get("user_id"),
|
|
748
|
+
"agent_data": session_dict.get("agent_data"),
|
|
749
|
+
"session_data": session_dict.get("session_data"),
|
|
750
|
+
"summary": session_dict.get("summary"),
|
|
751
|
+
"metadata": session_dict.get("metadata"),
|
|
752
|
+
"runs": session_dict.get("runs"),
|
|
753
|
+
"created_at": session_dict.get("created_at"),
|
|
754
|
+
"updated_at": int(time.time()),
|
|
755
|
+
}
|
|
756
|
+
)
|
|
757
|
+
|
|
758
|
+
with self.Session() as sess, sess.begin():
|
|
759
|
+
stmt = postgresql.insert(table)
|
|
760
|
+
update_columns = {
|
|
761
|
+
col.name: stmt.excluded[col.name]
|
|
762
|
+
for col in table.columns
|
|
763
|
+
if col.name not in ["id", "session_id", "created_at"]
|
|
764
|
+
}
|
|
765
|
+
stmt = stmt.on_conflict_do_update(index_elements=["session_id"], set_=update_columns).returning(
|
|
766
|
+
table
|
|
767
|
+
)
|
|
768
|
+
|
|
769
|
+
result = sess.execute(stmt, session_records)
|
|
770
|
+
for row in result.fetchall():
|
|
771
|
+
session_dict = dict(row._mapping)
|
|
772
|
+
if deserialize:
|
|
773
|
+
deserialized_agent_session = AgentSession.from_dict(session_dict)
|
|
774
|
+
if deserialized_agent_session is None:
|
|
775
|
+
continue
|
|
776
|
+
results.append(deserialized_agent_session)
|
|
777
|
+
else:
|
|
778
|
+
results.append(session_dict)
|
|
779
|
+
|
|
780
|
+
# Bulk upsert team sessions
|
|
781
|
+
if team_sessions:
|
|
782
|
+
session_records = []
|
|
783
|
+
for team_session in team_sessions:
|
|
784
|
+
session_dict = team_session.to_dict()
|
|
785
|
+
session_records.append(
|
|
786
|
+
{
|
|
787
|
+
"session_id": session_dict.get("session_id"),
|
|
788
|
+
"session_type": SessionType.TEAM.value,
|
|
789
|
+
"team_id": session_dict.get("team_id"),
|
|
790
|
+
"user_id": session_dict.get("user_id"),
|
|
791
|
+
"team_data": session_dict.get("team_data"),
|
|
792
|
+
"session_data": session_dict.get("session_data"),
|
|
793
|
+
"summary": session_dict.get("summary"),
|
|
794
|
+
"metadata": session_dict.get("metadata"),
|
|
795
|
+
"runs": session_dict.get("runs"),
|
|
796
|
+
"created_at": session_dict.get("created_at"),
|
|
797
|
+
"updated_at": int(time.time()),
|
|
798
|
+
}
|
|
799
|
+
)
|
|
800
|
+
|
|
801
|
+
with self.Session() as sess, sess.begin():
|
|
802
|
+
stmt = postgresql.insert(table)
|
|
803
|
+
update_columns = {
|
|
804
|
+
col.name: stmt.excluded[col.name]
|
|
805
|
+
for col in table.columns
|
|
806
|
+
if col.name not in ["id", "session_id", "created_at"]
|
|
807
|
+
}
|
|
808
|
+
stmt = stmt.on_conflict_do_update(index_elements=["session_id"], set_=update_columns).returning(
|
|
809
|
+
table
|
|
810
|
+
)
|
|
811
|
+
|
|
812
|
+
result = sess.execute(stmt, session_records)
|
|
813
|
+
for row in result.fetchall():
|
|
814
|
+
session_dict = dict(row._mapping)
|
|
815
|
+
if deserialize:
|
|
816
|
+
deserialized_team_session = TeamSession.from_dict(session_dict)
|
|
817
|
+
if deserialized_team_session is None:
|
|
818
|
+
continue
|
|
819
|
+
results.append(deserialized_team_session)
|
|
820
|
+
else:
|
|
821
|
+
results.append(session_dict)
|
|
822
|
+
|
|
823
|
+
# Bulk upsert workflow sessions
|
|
824
|
+
if workflow_sessions:
|
|
825
|
+
session_records = []
|
|
826
|
+
for workflow_session in workflow_sessions:
|
|
827
|
+
session_dict = workflow_session.to_dict()
|
|
828
|
+
session_records.append(
|
|
829
|
+
{
|
|
830
|
+
"session_id": session_dict.get("session_id"),
|
|
831
|
+
"session_type": SessionType.WORKFLOW.value,
|
|
832
|
+
"workflow_id": session_dict.get("workflow_id"),
|
|
833
|
+
"user_id": session_dict.get("user_id"),
|
|
834
|
+
"workflow_data": session_dict.get("workflow_data"),
|
|
835
|
+
"session_data": session_dict.get("session_data"),
|
|
836
|
+
"summary": session_dict.get("summary"),
|
|
837
|
+
"metadata": session_dict.get("metadata"),
|
|
838
|
+
"runs": session_dict.get("runs"),
|
|
839
|
+
"created_at": session_dict.get("created_at"),
|
|
840
|
+
"updated_at": int(time.time()),
|
|
841
|
+
}
|
|
842
|
+
)
|
|
843
|
+
|
|
844
|
+
with self.Session() as sess, sess.begin():
|
|
845
|
+
stmt = postgresql.insert(table)
|
|
846
|
+
update_columns = {
|
|
847
|
+
col.name: stmt.excluded[col.name]
|
|
848
|
+
for col in table.columns
|
|
849
|
+
if col.name not in ["id", "session_id", "created_at"]
|
|
850
|
+
}
|
|
851
|
+
stmt = stmt.on_conflict_do_update(index_elements=["session_id"], set_=update_columns).returning(
|
|
852
|
+
table
|
|
853
|
+
)
|
|
854
|
+
|
|
855
|
+
result = sess.execute(stmt, session_records)
|
|
856
|
+
for row in result.fetchall():
|
|
857
|
+
session_dict = dict(row._mapping)
|
|
858
|
+
if deserialize:
|
|
859
|
+
deserialized_workflow_session = WorkflowSession.from_dict(session_dict)
|
|
860
|
+
if deserialized_workflow_session is None:
|
|
861
|
+
continue
|
|
862
|
+
results.append(deserialized_workflow_session)
|
|
863
|
+
else:
|
|
864
|
+
results.append(session_dict)
|
|
865
|
+
|
|
866
|
+
return results
|
|
867
|
+
|
|
868
|
+
except Exception as e:
|
|
869
|
+
log_error(f"Exception bulk upserting sessions: {e}")
|
|
870
|
+
return []
|
|
704
871
|
|
|
705
872
|
# -- Memory methods --
|
|
706
873
|
def delete_user_memory(self, memory_id: str):
|
|
@@ -729,6 +896,7 @@ class PostgresDb(BaseDb):
|
|
|
729
896
|
|
|
730
897
|
except Exception as e:
|
|
731
898
|
log_error(f"Error deleting user memory: {e}")
|
|
899
|
+
raise e
|
|
732
900
|
|
|
733
901
|
def delete_user_memories(self, memory_ids: List[str]) -> None:
|
|
734
902
|
"""Delete user memories from the database.
|
|
@@ -755,6 +923,7 @@ class PostgresDb(BaseDb):
|
|
|
755
923
|
|
|
756
924
|
except Exception as e:
|
|
757
925
|
log_error(f"Error deleting user memories: {e}")
|
|
926
|
+
raise e
|
|
758
927
|
|
|
759
928
|
def get_all_memory_topics(self) -> List[str]:
|
|
760
929
|
"""Get all memory topics from the database.
|
|
@@ -814,7 +983,7 @@ class PostgresDb(BaseDb):
|
|
|
814
983
|
|
|
815
984
|
except Exception as e:
|
|
816
985
|
log_error(f"Exception reading from memory table: {e}")
|
|
817
|
-
|
|
986
|
+
raise e
|
|
818
987
|
|
|
819
988
|
def get_user_memories(
|
|
820
989
|
self,
|
|
@@ -897,7 +1066,7 @@ class PostgresDb(BaseDb):
|
|
|
897
1066
|
|
|
898
1067
|
except Exception as e:
|
|
899
1068
|
log_error(f"Exception reading from memory table: {e}")
|
|
900
|
-
|
|
1069
|
+
raise e
|
|
901
1070
|
|
|
902
1071
|
def clear_memories(self) -> None:
|
|
903
1072
|
"""Delete all memories from the database.
|
|
@@ -914,7 +1083,8 @@ class PostgresDb(BaseDb):
|
|
|
914
1083
|
sess.execute(table.delete())
|
|
915
1084
|
|
|
916
1085
|
except Exception as e:
|
|
917
|
-
|
|
1086
|
+
log_error(f"Exception deleting all memories: {e}")
|
|
1087
|
+
raise e
|
|
918
1088
|
|
|
919
1089
|
def get_user_memory_stats(
|
|
920
1090
|
self, limit: Optional[int] = None, page: Optional[int] = None
|
|
@@ -981,7 +1151,7 @@ class PostgresDb(BaseDb):
|
|
|
981
1151
|
|
|
982
1152
|
except Exception as e:
|
|
983
1153
|
log_error(f"Exception getting user memory stats: {e}")
|
|
984
|
-
|
|
1154
|
+
raise e
|
|
985
1155
|
|
|
986
1156
|
def upsert_user_memory(
|
|
987
1157
|
self, memory: UserMemory, deserialize: Optional[bool] = True
|
|
@@ -1043,7 +1213,80 @@ class PostgresDb(BaseDb):
|
|
|
1043
1213
|
|
|
1044
1214
|
except Exception as e:
|
|
1045
1215
|
log_error(f"Exception upserting user memory: {e}")
|
|
1046
|
-
|
|
1216
|
+
raise e
|
|
1217
|
+
|
|
1218
|
+
def upsert_memories(
|
|
1219
|
+
self, memories: List[UserMemory], deserialize: Optional[bool] = True
|
|
1220
|
+
) -> List[Union[UserMemory, Dict[str, Any]]]:
|
|
1221
|
+
"""
|
|
1222
|
+
Bulk insert or update multiple memories in the database for improved performance.
|
|
1223
|
+
|
|
1224
|
+
Args:
|
|
1225
|
+
memories (List[UserMemory]): The list of memories to upsert.
|
|
1226
|
+
deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
|
|
1227
|
+
|
|
1228
|
+
Returns:
|
|
1229
|
+
List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories
|
|
1230
|
+
|
|
1231
|
+
Raises:
|
|
1232
|
+
Exception: If an error occurs during bulk upsert.
|
|
1233
|
+
"""
|
|
1234
|
+
try:
|
|
1235
|
+
if not memories:
|
|
1236
|
+
return []
|
|
1237
|
+
|
|
1238
|
+
table = self._get_table(table_type="memories", create_table_if_not_found=True)
|
|
1239
|
+
if table is None:
|
|
1240
|
+
return []
|
|
1241
|
+
|
|
1242
|
+
# Prepare memory records for bulk insert
|
|
1243
|
+
memory_records = []
|
|
1244
|
+
current_time = int(time.time())
|
|
1245
|
+
|
|
1246
|
+
for memory in memories:
|
|
1247
|
+
if memory.memory_id is None:
|
|
1248
|
+
memory.memory_id = str(uuid4())
|
|
1249
|
+
|
|
1250
|
+
memory_records.append(
|
|
1251
|
+
{
|
|
1252
|
+
"memory_id": memory.memory_id,
|
|
1253
|
+
"memory": memory.memory,
|
|
1254
|
+
"input": memory.input,
|
|
1255
|
+
"user_id": memory.user_id,
|
|
1256
|
+
"agent_id": memory.agent_id,
|
|
1257
|
+
"team_id": memory.team_id,
|
|
1258
|
+
"topics": memory.topics,
|
|
1259
|
+
"updated_at": current_time,
|
|
1260
|
+
}
|
|
1261
|
+
)
|
|
1262
|
+
|
|
1263
|
+
results: List[Union[UserMemory, Dict[str, Any]]] = []
|
|
1264
|
+
|
|
1265
|
+
with self.Session() as sess, sess.begin():
|
|
1266
|
+
stmt = postgresql.insert(table)
|
|
1267
|
+
update_columns = {
|
|
1268
|
+
col.name: stmt.excluded[col.name]
|
|
1269
|
+
for col in table.columns
|
|
1270
|
+
if col.name not in ["memory_id"] # Don't update primary key
|
|
1271
|
+
}
|
|
1272
|
+
stmt = stmt.on_conflict_do_update(index_elements=["memory_id"], set_=update_columns).returning(table)
|
|
1273
|
+
|
|
1274
|
+
result = sess.execute(stmt, memory_records)
|
|
1275
|
+
for row in result.fetchall():
|
|
1276
|
+
memory_dict = dict(row._mapping)
|
|
1277
|
+
if deserialize:
|
|
1278
|
+
deserialized_memory = UserMemory.from_dict(memory_dict)
|
|
1279
|
+
if deserialized_memory is None:
|
|
1280
|
+
continue
|
|
1281
|
+
results.append(deserialized_memory)
|
|
1282
|
+
else:
|
|
1283
|
+
results.append(memory_dict)
|
|
1284
|
+
|
|
1285
|
+
return results
|
|
1286
|
+
|
|
1287
|
+
except Exception as e:
|
|
1288
|
+
log_error(f"Exception bulk upserting memories: {e}")
|
|
1289
|
+
return []
|
|
1047
1290
|
|
|
1048
1291
|
# -- Metrics methods --
|
|
1049
1292
|
def _get_all_sessions_for_metrics_calculation(
|
|
@@ -1087,7 +1330,7 @@ class PostgresDb(BaseDb):
|
|
|
1087
1330
|
|
|
1088
1331
|
except Exception as e:
|
|
1089
1332
|
log_error(f"Exception reading from sessions table: {e}")
|
|
1090
|
-
|
|
1333
|
+
raise e
|
|
1091
1334
|
|
|
1092
1335
|
def _get_metrics_calculation_starting_date(self, table: Table) -> Optional[date]:
|
|
1093
1336
|
"""Get the first date for which metrics calculation is needed:
|
|
@@ -1194,7 +1437,7 @@ class PostgresDb(BaseDb):
|
|
|
1194
1437
|
|
|
1195
1438
|
except Exception as e:
|
|
1196
1439
|
log_error(f"Exception refreshing metrics: {e}")
|
|
1197
|
-
|
|
1440
|
+
raise e
|
|
1198
1441
|
|
|
1199
1442
|
def get_metrics(
|
|
1200
1443
|
self,
|
|
@@ -1235,8 +1478,8 @@ class PostgresDb(BaseDb):
|
|
|
1235
1478
|
return [row._mapping for row in result], latest_updated_at
|
|
1236
1479
|
|
|
1237
1480
|
except Exception as e:
|
|
1238
|
-
|
|
1239
|
-
|
|
1481
|
+
log_error(f"Exception getting metrics: {e}")
|
|
1482
|
+
raise e
|
|
1240
1483
|
|
|
1241
1484
|
# -- Knowledge methods --
|
|
1242
1485
|
def delete_knowledge_content(self, id: str):
|
|
@@ -1245,17 +1488,18 @@ class PostgresDb(BaseDb):
|
|
|
1245
1488
|
Args:
|
|
1246
1489
|
id (str): The ID of the knowledge row to delete.
|
|
1247
1490
|
"""
|
|
1248
|
-
table = self._get_table(table_type="knowledge")
|
|
1249
|
-
if table is None:
|
|
1250
|
-
return
|
|
1251
|
-
|
|
1252
1491
|
try:
|
|
1492
|
+
table = self._get_table(table_type="knowledge")
|
|
1493
|
+
if table is None:
|
|
1494
|
+
return
|
|
1495
|
+
|
|
1253
1496
|
with self.Session() as sess, sess.begin():
|
|
1254
1497
|
stmt = table.delete().where(table.c.id == id)
|
|
1255
1498
|
sess.execute(stmt)
|
|
1256
1499
|
|
|
1257
1500
|
except Exception as e:
|
|
1258
1501
|
log_error(f"Exception deleting knowledge content: {e}")
|
|
1502
|
+
raise e
|
|
1259
1503
|
|
|
1260
1504
|
def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
|
|
1261
1505
|
"""Get a knowledge row from the database.
|
|
@@ -1266,11 +1510,11 @@ class PostgresDb(BaseDb):
|
|
|
1266
1510
|
Returns:
|
|
1267
1511
|
Optional[KnowledgeRow]: The knowledge row, or None if it doesn't exist.
|
|
1268
1512
|
"""
|
|
1269
|
-
table = self._get_table(table_type="knowledge")
|
|
1270
|
-
if table is None:
|
|
1271
|
-
return None
|
|
1272
|
-
|
|
1273
1513
|
try:
|
|
1514
|
+
table = self._get_table(table_type="knowledge")
|
|
1515
|
+
if table is None:
|
|
1516
|
+
return None
|
|
1517
|
+
|
|
1274
1518
|
with self.Session() as sess, sess.begin():
|
|
1275
1519
|
stmt = select(table).where(table.c.id == id)
|
|
1276
1520
|
result = sess.execute(stmt).fetchone()
|
|
@@ -1281,7 +1525,7 @@ class PostgresDb(BaseDb):
|
|
|
1281
1525
|
|
|
1282
1526
|
except Exception as e:
|
|
1283
1527
|
log_error(f"Exception getting knowledge content: {e}")
|
|
1284
|
-
|
|
1528
|
+
raise e
|
|
1285
1529
|
|
|
1286
1530
|
def get_knowledge_contents(
|
|
1287
1531
|
self,
|
|
@@ -1305,11 +1549,11 @@ class PostgresDb(BaseDb):
|
|
|
1305
1549
|
Raises:
|
|
1306
1550
|
Exception: If an error occurs during retrieval.
|
|
1307
1551
|
"""
|
|
1308
|
-
table = self._get_table(table_type="knowledge")
|
|
1309
|
-
if table is None:
|
|
1310
|
-
return [], 0
|
|
1311
|
-
|
|
1312
1552
|
try:
|
|
1553
|
+
table = self._get_table(table_type="knowledge")
|
|
1554
|
+
if table is None:
|
|
1555
|
+
return [], 0
|
|
1556
|
+
|
|
1313
1557
|
with self.Session() as sess, sess.begin():
|
|
1314
1558
|
stmt = select(table)
|
|
1315
1559
|
|
|
@@ -1332,7 +1576,7 @@ class PostgresDb(BaseDb):
|
|
|
1332
1576
|
|
|
1333
1577
|
except Exception as e:
|
|
1334
1578
|
log_error(f"Exception getting knowledge contents: {e}")
|
|
1335
|
-
|
|
1579
|
+
raise e
|
|
1336
1580
|
|
|
1337
1581
|
def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
|
|
1338
1582
|
"""Upsert knowledge content in the database.
|
|
@@ -1410,7 +1654,7 @@ class PostgresDb(BaseDb):
|
|
|
1410
1654
|
|
|
1411
1655
|
except Exception as e:
|
|
1412
1656
|
log_error(f"Error upserting knowledge row: {e}")
|
|
1413
|
-
|
|
1657
|
+
raise e
|
|
1414
1658
|
|
|
1415
1659
|
# -- Eval methods --
|
|
1416
1660
|
def create_eval_run(self, eval_run: EvalRunRecord) -> Optional[EvalRunRecord]:
|
|
@@ -1443,7 +1687,7 @@ class PostgresDb(BaseDb):
|
|
|
1443
1687
|
|
|
1444
1688
|
except Exception as e:
|
|
1445
1689
|
log_error(f"Error creating eval run: {e}")
|
|
1446
|
-
|
|
1690
|
+
raise e
|
|
1447
1691
|
|
|
1448
1692
|
def delete_eval_run(self, eval_run_id: str) -> None:
|
|
1449
1693
|
"""Delete an eval run from the database.
|
|
@@ -1467,6 +1711,7 @@ class PostgresDb(BaseDb):
|
|
|
1467
1711
|
|
|
1468
1712
|
except Exception as e:
|
|
1469
1713
|
log_error(f"Error deleting eval run {eval_run_id}: {e}")
|
|
1714
|
+
raise e
|
|
1470
1715
|
|
|
1471
1716
|
def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
|
|
1472
1717
|
"""Delete multiple eval runs from the database.
|
|
@@ -1490,6 +1735,7 @@ class PostgresDb(BaseDb):
|
|
|
1490
1735
|
|
|
1491
1736
|
except Exception as e:
|
|
1492
1737
|
log_error(f"Error deleting eval runs {eval_run_ids}: {e}")
|
|
1738
|
+
raise e
|
|
1493
1739
|
|
|
1494
1740
|
def get_eval_run(
|
|
1495
1741
|
self, eval_run_id: str, deserialize: Optional[bool] = True
|
|
@@ -1527,7 +1773,7 @@ class PostgresDb(BaseDb):
|
|
|
1527
1773
|
|
|
1528
1774
|
except Exception as e:
|
|
1529
1775
|
log_error(f"Exception getting eval run {eval_run_id}: {e}")
|
|
1530
|
-
|
|
1776
|
+
raise e
|
|
1531
1777
|
|
|
1532
1778
|
def get_eval_runs(
|
|
1533
1779
|
self,
|
|
@@ -1622,7 +1868,7 @@ class PostgresDb(BaseDb):
|
|
|
1622
1868
|
|
|
1623
1869
|
except Exception as e:
|
|
1624
1870
|
log_error(f"Exception getting eval runs: {e}")
|
|
1625
|
-
|
|
1871
|
+
raise e
|
|
1626
1872
|
|
|
1627
1873
|
def rename_eval_run(
|
|
1628
1874
|
self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
|
|
@@ -1658,7 +1904,7 @@ class PostgresDb(BaseDb):
|
|
|
1658
1904
|
|
|
1659
1905
|
except Exception as e:
|
|
1660
1906
|
log_error(f"Error upserting eval run name {eval_run_id}: {e}")
|
|
1661
|
-
|
|
1907
|
+
raise e
|
|
1662
1908
|
|
|
1663
1909
|
# -- Migrations --
|
|
1664
1910
|
|