agno 2.0.8__py3-none-any.whl → 2.0.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. agno/agent/agent.py +13 -8
  2. agno/db/base.py +14 -0
  3. agno/db/dynamo/dynamo.py +107 -27
  4. agno/db/firestore/firestore.py +109 -33
  5. agno/db/gcs_json/gcs_json_db.py +100 -20
  6. agno/db/in_memory/in_memory_db.py +95 -20
  7. agno/db/json/json_db.py +101 -21
  8. agno/db/migrations/v1_to_v2.py +181 -35
  9. agno/db/mongo/mongo.py +251 -26
  10. agno/db/mysql/mysql.py +307 -6
  11. agno/db/postgres/postgres.py +279 -33
  12. agno/db/redis/redis.py +99 -22
  13. agno/db/singlestore/singlestore.py +319 -38
  14. agno/db/sqlite/sqlite.py +339 -23
  15. agno/models/anthropic/claude.py +0 -20
  16. agno/models/aws/claude.py +1 -1
  17. agno/models/huggingface/huggingface.py +2 -1
  18. agno/models/ollama/chat.py +28 -2
  19. agno/models/openai/chat.py +7 -0
  20. agno/models/openai/responses.py +8 -8
  21. agno/os/interfaces/base.py +2 -0
  22. agno/os/interfaces/slack/router.py +50 -10
  23. agno/os/interfaces/slack/slack.py +6 -4
  24. agno/os/interfaces/whatsapp/router.py +7 -4
  25. agno/os/router.py +18 -0
  26. agno/os/utils.py +2 -2
  27. agno/reasoning/azure_ai_foundry.py +2 -2
  28. agno/reasoning/deepseek.py +2 -2
  29. agno/reasoning/groq.py +2 -2
  30. agno/reasoning/ollama.py +2 -2
  31. agno/reasoning/openai.py +2 -2
  32. agno/run/base.py +15 -2
  33. agno/team/team.py +10 -12
  34. agno/tools/mcp_toolbox.py +284 -0
  35. agno/tools/scrapegraph.py +58 -31
  36. agno/tools/whatsapp.py +1 -1
  37. agno/utils/models/claude.py +2 -2
  38. agno/utils/print_response/agent.py +2 -2
  39. agno/utils/print_response/team.py +6 -6
  40. agno/utils/reasoning.py +22 -1
  41. agno/utils/string.py +9 -0
  42. agno/workflow/workflow.py +11 -7
  43. {agno-2.0.8.dist-info → agno-2.0.10.dist-info}/METADATA +4 -1
  44. {agno-2.0.8.dist-info → agno-2.0.10.dist-info}/RECORD +47 -47
  45. agno/utils/models/aws_claude.py +0 -170
  46. {agno-2.0.8.dist-info → agno-2.0.10.dist-info}/WHEEL +0 -0
  47. {agno-2.0.8.dist-info → agno-2.0.10.dist-info}/licenses/LICENSE +0 -0
  48. {agno-2.0.8.dist-info → agno-2.0.10.dist-info}/top_level.txt +0 -0
@@ -205,7 +205,7 @@ class FirestoreDb(BaseDb):
205
205
 
206
206
  except Exception as e:
207
207
  log_error(f"Error deleting session: {e}")
208
- return False
208
+ raise e
209
209
 
210
210
  def delete_sessions(self, session_ids: List[str]) -> None:
211
211
  """Delete multiple sessions from the database.
@@ -230,6 +230,7 @@ class FirestoreDb(BaseDb):
230
230
 
231
231
  except Exception as e:
232
232
  log_error(f"Error deleting sessions: {e}")
233
+ raise e
233
234
 
234
235
  def get_session(
235
236
  self,
@@ -288,7 +289,7 @@ class FirestoreDb(BaseDb):
288
289
 
289
290
  except Exception as e:
290
291
  log_error(f"Exception reading session: {e}")
291
- return None
292
+ raise e
292
293
 
293
294
  def get_sessions(
294
295
  self,
@@ -402,7 +403,7 @@ class FirestoreDb(BaseDb):
402
403
 
403
404
  except Exception as e:
404
405
  log_error(f"Exception reading sessions: {e}")
405
- return [] if deserialize else ([], 0)
406
+ raise e
406
407
 
407
408
  def rename_session(
408
409
  self, session_id: str, session_type: SessionType, session_name: str, deserialize: Optional[bool] = True
@@ -457,7 +458,7 @@ class FirestoreDb(BaseDb):
457
458
 
458
459
  except Exception as e:
459
460
  log_error(f"Exception renaming session: {e}")
460
- return None
461
+ raise e
461
462
 
462
463
  def upsert_session(
463
464
  self, session: Session, deserialize: Optional[bool] = True
@@ -554,7 +555,44 @@ class FirestoreDb(BaseDb):
554
555
 
555
556
  except Exception as e:
556
557
  log_error(f"Exception upserting session: {e}")
557
- return None
558
+ raise e
559
+
560
+ def upsert_sessions(
561
+ self, sessions: List[Session], deserialize: Optional[bool] = True
562
+ ) -> List[Union[Session, Dict[str, Any]]]:
563
+ """
564
+ Bulk upsert multiple sessions for improved performance on large datasets.
565
+
566
+ Args:
567
+ sessions (List[Session]): List of sessions to upsert.
568
+ deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
569
+
570
+ Returns:
571
+ List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
572
+
573
+ Raises:
574
+ Exception: If an error occurs during bulk upsert.
575
+ """
576
+ if not sessions:
577
+ return []
578
+
579
+ try:
580
+ log_info(
581
+ f"FirestoreDb doesn't support efficient bulk operations, falling back to individual upserts for {len(sessions)} sessions"
582
+ )
583
+
584
+ # Fall back to individual upserts
585
+ results = []
586
+ for session in sessions:
587
+ if session is not None:
588
+ result = self.upsert_session(session, deserialize=deserialize)
589
+ if result is not None:
590
+ results.append(result)
591
+ return results
592
+
593
+ except Exception as e:
594
+ log_error(f"Exception during bulk session upsert: {e}")
595
+ return []
558
596
 
559
597
  # -- Memory methods --
560
598
 
@@ -587,6 +625,7 @@ class FirestoreDb(BaseDb):
587
625
 
588
626
  except Exception as e:
589
627
  log_error(f"Error deleting user memory: {e}")
628
+ raise e
590
629
 
591
630
  def delete_user_memories(self, memory_ids: List[str]) -> None:
592
631
  """Delete user memories from the database.
@@ -617,6 +656,7 @@ class FirestoreDb(BaseDb):
617
656
 
618
657
  except Exception as e:
619
658
  log_error(f"Error deleting memories: {e}")
659
+ raise e
620
660
 
621
661
  def get_all_memory_topics(self, create_collection_if_not_found: Optional[bool] = True) -> List[str]:
622
662
  """Get all memory topics from the database.
@@ -644,8 +684,8 @@ class FirestoreDb(BaseDb):
644
684
  return [topic for topic in all_topics if topic]
645
685
 
646
686
  except Exception as e:
647
- log_error(f"Exception reading from collection: {e}")
648
- return []
687
+ log_error(f"Exception getting all memory topics: {e}")
688
+ raise e
649
689
 
650
690
  def get_user_memory(self, memory_id: str, deserialize: Optional[bool] = True) -> Optional[UserMemory]:
651
691
  """Get a memory from the database.
@@ -677,8 +717,8 @@ class FirestoreDb(BaseDb):
677
717
  return UserMemory.from_dict(result)
678
718
 
679
719
  except Exception as e:
680
- log_error(f"Exception reading from collection: {e}")
681
- return None
720
+ log_error(f"Exception getting user memory: {e}")
721
+ raise e
682
722
 
683
723
  def get_user_memories(
684
724
  self,
@@ -756,8 +796,8 @@ class FirestoreDb(BaseDb):
756
796
  return [UserMemory.from_dict(record) for record in records]
757
797
 
758
798
  except Exception as e:
759
- log_error(f"Exception reading from collection: {e}")
760
- return []
799
+ log_error(f"Exception getting user memories: {e}")
800
+ raise e
761
801
 
762
802
  def get_user_memory_stats(
763
803
  self,
@@ -813,7 +853,7 @@ class FirestoreDb(BaseDb):
813
853
 
814
854
  except Exception as e:
815
855
  log_error(f"Exception getting user memory stats: {e}")
816
- return [], 0
856
+ raise e
817
857
 
818
858
  def upsert_user_memory(
819
859
  self, memory: UserMemory, deserialize: Optional[bool] = True
@@ -859,7 +899,43 @@ class FirestoreDb(BaseDb):
859
899
 
860
900
  except Exception as e:
861
901
  log_error(f"Exception upserting user memory: {e}")
862
- return None
902
+ raise e
903
+
904
+ def upsert_memories(
905
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True
906
+ ) -> List[Union[UserMemory, Dict[str, Any]]]:
907
+ """
908
+ Bulk upsert multiple user memories for improved performance on large datasets.
909
+
910
+ Args:
911
+ memories (List[UserMemory]): List of memories to upsert.
912
+ deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
913
+
914
+ Returns:
915
+ List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories.
916
+
917
+ Raises:
918
+ Exception: If an error occurs during bulk upsert.
919
+ """
920
+ if not memories:
921
+ return []
922
+
923
+ try:
924
+ log_info(
925
+ f"FirestoreDb doesn't support efficient bulk operations, falling back to individual upserts for {len(memories)} memories"
926
+ )
927
+ # Fall back to individual upserts
928
+ results = []
929
+ for memory in memories:
930
+ if memory is not None:
931
+ result = self.upsert_user_memory(memory, deserialize=deserialize)
932
+ if result is not None:
933
+ results.append(result)
934
+ return results
935
+
936
+ except Exception as e:
937
+ log_error(f"Exception during bulk memory upsert: {e}")
938
+ return []
863
939
 
864
940
  def clear_memories(self) -> None:
865
941
  """Delete all memories from the database.
@@ -892,9 +968,8 @@ class FirestoreDb(BaseDb):
892
968
  batch.commit()
893
969
 
894
970
  except Exception as e:
895
- from agno.utils.log import log_warning
896
-
897
- log_warning(f"Exception deleting all memories: {e}")
971
+ log_error(f"Exception deleting all memories: {e}")
972
+ raise e
898
973
 
899
974
  # -- Metrics methods --
900
975
 
@@ -928,8 +1003,8 @@ class FirestoreDb(BaseDb):
928
1003
  return results
929
1004
 
930
1005
  except Exception as e:
931
- log_error(f"Exception reading from sessions collection: {e}")
932
- return []
1006
+ log_error(f"Exception getting all sessions for metrics calculation: {e}")
1007
+ raise e
933
1008
 
934
1009
  def _get_metrics_calculation_starting_date(self, collection_ref) -> Optional[date]:
935
1010
  """Get the first date for which metrics calculation is needed."""
@@ -961,7 +1036,7 @@ class FirestoreDb(BaseDb):
961
1036
 
962
1037
  except Exception as e:
963
1038
  log_error(f"Exception getting metrics calculation starting date: {e}")
964
- return None
1039
+ raise e
965
1040
 
966
1041
  def calculate_metrics(self) -> Optional[list[dict]]:
967
1042
  """Calculate metrics for all dates without complete metrics."""
@@ -1053,7 +1128,7 @@ class FirestoreDb(BaseDb):
1053
1128
 
1054
1129
  except Exception as e:
1055
1130
  log_error(f"Exception getting metrics: {e}")
1056
- return [], None
1131
+ raise e
1057
1132
 
1058
1133
  # -- Knowledge methods --
1059
1134
 
@@ -1074,7 +1149,8 @@ class FirestoreDb(BaseDb):
1074
1149
  doc.reference.delete()
1075
1150
 
1076
1151
  except Exception as e:
1077
- log_error(f"Error deleting knowledge source: {e}")
1152
+ log_error(f"Error deleting knowledge content: {e}")
1153
+ raise e
1078
1154
 
1079
1155
  def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
1080
1156
  """Get a knowledge row from the database.
@@ -1099,8 +1175,8 @@ class FirestoreDb(BaseDb):
1099
1175
  return None
1100
1176
 
1101
1177
  except Exception as e:
1102
- log_error(f"Error getting knowledge source: {e}")
1103
- return None
1178
+ log_error(f"Error getting knowledge content: {e}")
1179
+ raise e
1104
1180
 
1105
1181
  def get_knowledge_contents(
1106
1182
  self,
@@ -1148,8 +1224,8 @@ class FirestoreDb(BaseDb):
1148
1224
  return knowledge_rows, total_count
1149
1225
 
1150
1226
  except Exception as e:
1151
- log_error(f"Error getting knowledge sources: {e}")
1152
- return [], 0
1227
+ log_error(f"Error getting knowledge contents: {e}")
1228
+ raise e
1153
1229
 
1154
1230
  def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
1155
1231
  """Upsert knowledge content in the database.
@@ -1179,8 +1255,8 @@ class FirestoreDb(BaseDb):
1179
1255
  return knowledge_row
1180
1256
 
1181
1257
  except Exception as e:
1182
- log_error(f"Error upserting knowledge document: {e}")
1183
- return None
1258
+ log_error(f"Error upserting knowledge content: {e}")
1259
+ raise e
1184
1260
 
1185
1261
  # -- Eval methods --
1186
1262
 
@@ -1203,7 +1279,7 @@ class FirestoreDb(BaseDb):
1203
1279
 
1204
1280
  except Exception as e:
1205
1281
  log_error(f"Error creating eval run: {e}")
1206
- return None
1282
+ raise e
1207
1283
 
1208
1284
  def delete_eval_run(self, eval_run_id: str) -> None:
1209
1285
  """Delete an eval run from the database."""
@@ -1223,7 +1299,7 @@ class FirestoreDb(BaseDb):
1223
1299
 
1224
1300
  except Exception as e:
1225
1301
  log_error(f"Error deleting eval run {eval_run_id}: {e}")
1226
- raise
1302
+ raise e
1227
1303
 
1228
1304
  def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
1229
1305
  """Delete multiple eval runs from the database.
@@ -1254,7 +1330,7 @@ class FirestoreDb(BaseDb):
1254
1330
 
1255
1331
  except Exception as e:
1256
1332
  log_error(f"Error deleting eval runs {eval_run_ids}: {e}")
1257
- raise
1333
+ raise e
1258
1334
 
1259
1335
  def get_eval_run(
1260
1336
  self, eval_run_id: str, deserialize: Optional[bool] = True
@@ -1292,7 +1368,7 @@ class FirestoreDb(BaseDb):
1292
1368
 
1293
1369
  except Exception as e:
1294
1370
  log_error(f"Exception getting eval run {eval_run_id}: {e}")
1295
- return None
1371
+ raise e
1296
1372
 
1297
1373
  def get_eval_runs(
1298
1374
  self,
@@ -1393,7 +1469,7 @@ class FirestoreDb(BaseDb):
1393
1469
 
1394
1470
  except Exception as e:
1395
1471
  log_error(f"Exception getting eval runs: {e}")
1396
- return [] if deserialize else ([], 0)
1472
+ raise e
1397
1473
 
1398
1474
  def rename_eval_run(
1399
1475
  self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
@@ -1439,4 +1515,4 @@ class FirestoreDb(BaseDb):
1439
1515
 
1440
1516
  except Exception as e:
1441
1517
  log_error(f"Error updating eval run name {eval_run_id}: {e}")
1442
- raise
1518
+ raise e
@@ -162,7 +162,7 @@ class GcsJsonDb(BaseDb):
162
162
 
163
163
  except Exception as e:
164
164
  log_warning(f"Error deleting session: {e}")
165
- return False
165
+ raise e
166
166
 
167
167
  def delete_sessions(self, session_ids: List[str]) -> None:
168
168
  """Delete multiple sessions from the GCS JSON file.
@@ -181,6 +181,7 @@ class GcsJsonDb(BaseDb):
181
181
 
182
182
  except Exception as e:
183
183
  log_warning(f"Error deleting sessions: {e}")
184
+ raise e
184
185
 
185
186
  def get_session(
186
187
  self,
@@ -233,7 +234,7 @@ class GcsJsonDb(BaseDb):
233
234
 
234
235
  except Exception as e:
235
236
  log_warning(f"Exception reading from session file: {e}")
236
- return None
237
+ raise e
237
238
 
238
239
  def get_sessions(
239
240
  self,
@@ -328,7 +329,7 @@ class GcsJsonDb(BaseDb):
328
329
 
329
330
  except Exception as e:
330
331
  log_warning(f"Exception reading from session file: {e}")
331
- return [] if deserialize else ([], 0)
332
+ raise e
332
333
 
333
334
  def rename_session(
334
335
  self, session_id: str, session_type: SessionType, session_name: str, deserialize: Optional[bool] = True
@@ -363,7 +364,7 @@ class GcsJsonDb(BaseDb):
363
364
  return None
364
365
  except Exception as e:
365
366
  log_warning(f"Exception renaming session: {e}")
366
- return None
367
+ raise e
367
368
 
368
369
  def upsert_session(
369
370
  self, session: Session, deserialize: Optional[bool] = True
@@ -408,7 +409,44 @@ class GcsJsonDb(BaseDb):
408
409
 
409
410
  except Exception as e:
410
411
  log_warning(f"Exception upserting session: {e}")
411
- return None
412
+ raise e
413
+
414
+ def upsert_sessions(
415
+ self, sessions: List[Session], deserialize: Optional[bool] = True
416
+ ) -> List[Union[Session, Dict[str, Any]]]:
417
+ """
418
+ Bulk upsert multiple sessions for improved performance on large datasets.
419
+
420
+ Args:
421
+ sessions (List[Session]): List of sessions to upsert.
422
+ deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
423
+
424
+ Returns:
425
+ List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
426
+
427
+ Raises:
428
+ Exception: If an error occurs during bulk upsert.
429
+ """
430
+ if not sessions:
431
+ return []
432
+
433
+ try:
434
+ log_info(
435
+ f"GcsJsonDb doesn't support efficient bulk operations, falling back to individual upserts for {len(sessions)} sessions"
436
+ )
437
+
438
+ # Fall back to individual upserts
439
+ results = []
440
+ for session in sessions:
441
+ if session is not None:
442
+ result = self.upsert_session(session, deserialize=deserialize)
443
+ if result is not None:
444
+ results.append(result)
445
+ return results
446
+
447
+ except Exception as e:
448
+ log_error(f"Exception during bulk session upsert: {e}")
449
+ return []
412
450
 
413
451
  def _matches_session_key(self, existing_session: Dict[str, Any], session: Session) -> bool:
414
452
  """Check if existing session matches the key for the session type."""
@@ -437,6 +475,7 @@ class GcsJsonDb(BaseDb):
437
475
 
438
476
  except Exception as e:
439
477
  log_warning(f"Error deleting user memory: {e}")
478
+ raise e
440
479
 
441
480
  def delete_user_memories(self, memory_ids: List[str]) -> None:
442
481
  """Delete multiple user memories from the GCS JSON file."""
@@ -447,6 +486,7 @@ class GcsJsonDb(BaseDb):
447
486
  log_debug(f"Successfully deleted user memories with ids: {memory_ids}")
448
487
  except Exception as e:
449
488
  log_warning(f"Error deleting user memories: {e}")
489
+ raise e
450
490
 
451
491
  def get_all_memory_topics(self) -> List[str]:
452
492
  """Get all memory topics from the GCS JSON file."""
@@ -461,7 +501,7 @@ class GcsJsonDb(BaseDb):
461
501
 
462
502
  except Exception as e:
463
503
  log_warning(f"Exception reading from memory file: {e}")
464
- return []
504
+ raise e
465
505
 
466
506
  def get_user_memory(
467
507
  self, memory_id: str, deserialize: Optional[bool] = True
@@ -480,7 +520,7 @@ class GcsJsonDb(BaseDb):
480
520
  return None
481
521
  except Exception as e:
482
522
  log_warning(f"Exception reading from memory file: {e}")
483
- return None
523
+ raise e
484
524
 
485
525
  def get_user_memories(
486
526
  self,
@@ -538,7 +578,7 @@ class GcsJsonDb(BaseDb):
538
578
 
539
579
  except Exception as e:
540
580
  log_warning(f"Exception reading from memory file: {e}")
541
- return [] if deserialize else ([], 0)
581
+ raise e
542
582
 
543
583
  def get_user_memory_stats(
544
584
  self, limit: Optional[int] = None, page: Optional[int] = None
@@ -574,7 +614,7 @@ class GcsJsonDb(BaseDb):
574
614
 
575
615
  except Exception as e:
576
616
  log_warning(f"Exception getting user memory stats: {e}")
577
- return [], 0
617
+ raise e
578
618
 
579
619
  def upsert_user_memory(
580
620
  self, memory: UserMemory, deserialize: Optional[bool] = True
@@ -608,7 +648,43 @@ class GcsJsonDb(BaseDb):
608
648
 
609
649
  except Exception as e:
610
650
  log_error(f"Exception upserting user memory: {e}")
611
- return None
651
+ raise e
652
+
653
+ def upsert_memories(
654
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True
655
+ ) -> List[Union[UserMemory, Dict[str, Any]]]:
656
+ """
657
+ Bulk upsert multiple user memories for improved performance on large datasets.
658
+
659
+ Args:
660
+ memories (List[UserMemory]): List of memories to upsert.
661
+ deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
662
+
663
+ Returns:
664
+ List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories.
665
+
666
+ Raises:
667
+ Exception: If an error occurs during bulk upsert.
668
+ """
669
+ if not memories:
670
+ return []
671
+
672
+ try:
673
+ log_info(
674
+ f"GcsJsonDb doesn't support efficient bulk operations, falling back to individual upserts for {len(memories)} memories"
675
+ )
676
+ # Fall back to individual upserts
677
+ results = []
678
+ for memory in memories:
679
+ if memory is not None:
680
+ result = self.upsert_user_memory(memory, deserialize=deserialize)
681
+ if result is not None:
682
+ results.append(result)
683
+ return results
684
+
685
+ except Exception as e:
686
+ log_error(f"Exception during bulk memory upsert: {e}")
687
+ return []
612
688
 
613
689
  def clear_memories(self) -> None:
614
690
  """Delete all memories from the database.
@@ -622,6 +698,7 @@ class GcsJsonDb(BaseDb):
622
698
 
623
699
  except Exception as e:
624
700
  log_warning(f"Exception deleting all memories: {e}")
701
+ raise e
625
702
 
626
703
  # -- Metrics methods --
627
704
  def calculate_metrics(self) -> Optional[list[dict]]:
@@ -688,7 +765,7 @@ class GcsJsonDb(BaseDb):
688
765
 
689
766
  except Exception as e:
690
767
  log_warning(f"Exception refreshing metrics: {e}")
691
- return None
768
+ raise e
692
769
 
693
770
  def _get_metrics_calculation_starting_date(self, metrics: List[Dict[str, Any]]) -> Optional[date]:
694
771
  """Get the first date for which metrics calculation is needed."""
@@ -743,7 +820,7 @@ class GcsJsonDb(BaseDb):
743
820
 
744
821
  except Exception as e:
745
822
  log_warning(f"Exception reading sessions for metrics: {e}")
746
- return []
823
+ raise e
747
824
 
748
825
  def get_metrics(
749
826
  self,
@@ -775,7 +852,7 @@ class GcsJsonDb(BaseDb):
775
852
 
776
853
  except Exception as e:
777
854
  log_warning(f"Exception getting metrics: {e}")
778
- return [], None
855
+ raise e
779
856
 
780
857
  # -- Knowledge methods --
781
858
  def delete_knowledge_content(self, id: str):
@@ -786,6 +863,7 @@ class GcsJsonDb(BaseDb):
786
863
  self._write_json_file(self.knowledge_table_name, knowledge_items)
787
864
  except Exception as e:
788
865
  log_warning(f"Error deleting knowledge content: {e}")
866
+ raise e
789
867
 
790
868
  def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
791
869
  """Get knowledge content by ID."""
@@ -799,7 +877,7 @@ class GcsJsonDb(BaseDb):
799
877
  return None
800
878
  except Exception as e:
801
879
  log_warning(f"Error getting knowledge content: {e}")
802
- return None
880
+ raise e
803
881
 
804
882
  def get_knowledge_contents(
805
883
  self,
@@ -828,7 +906,7 @@ class GcsJsonDb(BaseDb):
828
906
 
829
907
  except Exception as e:
830
908
  log_warning(f"Error getting knowledge contents: {e}")
831
- return [], 0
909
+ raise e
832
910
 
833
911
  def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
834
912
  """Upsert knowledge content in the GCS JSON file."""
@@ -852,7 +930,7 @@ class GcsJsonDb(BaseDb):
852
930
 
853
931
  except Exception as e:
854
932
  log_warning(f"Error upserting knowledge row: {e}")
855
- return None
933
+ raise e
856
934
 
857
935
  # -- Eval methods --
858
936
  def create_eval_run(self, eval_run: EvalRunRecord) -> Optional[EvalRunRecord]:
@@ -871,7 +949,7 @@ class GcsJsonDb(BaseDb):
871
949
  return eval_run
872
950
  except Exception as e:
873
951
  log_warning(f"Error creating eval run: {e}")
874
- return None
952
+ raise e
875
953
 
876
954
  def delete_eval_run(self, eval_run_id: str) -> None:
877
955
  """Delete an eval run from the GCS JSON file."""
@@ -887,6 +965,7 @@ class GcsJsonDb(BaseDb):
887
965
  log_warning(f"No eval run found with ID: {eval_run_id}")
888
966
  except Exception as e:
889
967
  log_warning(f"Error deleting eval run {eval_run_id}: {e}")
968
+ raise e
890
969
 
891
970
  def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
892
971
  """Delete multiple eval runs from the GCS JSON file."""
@@ -903,6 +982,7 @@ class GcsJsonDb(BaseDb):
903
982
  log_warning(f"No eval runs found with IDs: {eval_run_ids}")
904
983
  except Exception as e:
905
984
  log_warning(f"Error deleting eval runs {eval_run_ids}: {e}")
985
+ raise e
906
986
 
907
987
  def get_eval_run(
908
988
  self, eval_run_id: str, deserialize: Optional[bool] = True
@@ -920,7 +1000,7 @@ class GcsJsonDb(BaseDb):
920
1000
  return None
921
1001
  except Exception as e:
922
1002
  log_warning(f"Exception getting eval run {eval_run_id}: {e}")
923
- return None
1003
+ raise e
924
1004
 
925
1005
  def get_eval_runs(
926
1006
  self,
@@ -986,7 +1066,7 @@ class GcsJsonDb(BaseDb):
986
1066
 
987
1067
  except Exception as e:
988
1068
  log_warning(f"Exception getting eval runs: {e}")
989
- return [] if deserialize else ([], 0)
1069
+ raise e
990
1070
 
991
1071
  def rename_eval_run(
992
1072
  self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
@@ -1009,4 +1089,4 @@ class GcsJsonDb(BaseDb):
1009
1089
  return None
1010
1090
  except Exception as e:
1011
1091
  log_warning(f"Error renaming eval run {eval_run_id}: {e}")
1012
- return None
1092
+ raise e