agno 2.0.7__py3-none-any.whl → 2.0.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +83 -51
- agno/db/base.py +14 -0
- agno/db/dynamo/dynamo.py +107 -27
- agno/db/firestore/firestore.py +109 -33
- agno/db/gcs_json/gcs_json_db.py +100 -20
- agno/db/in_memory/in_memory_db.py +95 -20
- agno/db/json/json_db.py +101 -21
- agno/db/migrations/v1_to_v2.py +322 -47
- agno/db/mongo/mongo.py +251 -26
- agno/db/mysql/mysql.py +307 -6
- agno/db/postgres/postgres.py +279 -33
- agno/db/redis/redis.py +99 -22
- agno/db/singlestore/singlestore.py +319 -38
- agno/db/sqlite/sqlite.py +339 -23
- agno/knowledge/embedder/sentence_transformer.py +3 -3
- agno/knowledge/knowledge.py +152 -31
- agno/knowledge/types.py +8 -0
- agno/models/anthropic/claude.py +0 -20
- agno/models/cometapi/__init__.py +5 -0
- agno/models/cometapi/cometapi.py +57 -0
- agno/models/google/gemini.py +4 -8
- agno/models/huggingface/huggingface.py +2 -1
- agno/models/ollama/chat.py +52 -3
- agno/models/openai/chat.py +9 -7
- agno/models/openai/responses.py +21 -17
- agno/os/interfaces/agui/agui.py +2 -2
- agno/os/interfaces/agui/utils.py +81 -18
- agno/os/interfaces/base.py +2 -0
- agno/os/interfaces/slack/router.py +50 -10
- agno/os/interfaces/slack/slack.py +6 -4
- agno/os/interfaces/whatsapp/router.py +7 -4
- agno/os/interfaces/whatsapp/whatsapp.py +2 -2
- agno/os/router.py +18 -0
- agno/os/utils.py +10 -2
- agno/reasoning/azure_ai_foundry.py +2 -2
- agno/reasoning/deepseek.py +2 -2
- agno/reasoning/default.py +3 -1
- agno/reasoning/groq.py +2 -2
- agno/reasoning/ollama.py +2 -2
- agno/reasoning/openai.py +2 -2
- agno/run/base.py +15 -2
- agno/session/agent.py +8 -5
- agno/session/team.py +14 -10
- agno/team/team.py +218 -111
- agno/tools/function.py +43 -4
- agno/tools/mcp.py +60 -37
- agno/tools/mcp_toolbox.py +284 -0
- agno/tools/scrapegraph.py +58 -31
- agno/tools/whatsapp.py +1 -1
- agno/utils/gemini.py +147 -19
- agno/utils/models/claude.py +9 -0
- agno/utils/print_response/agent.py +18 -2
- agno/utils/print_response/team.py +22 -6
- agno/utils/reasoning.py +22 -1
- agno/utils/string.py +9 -0
- agno/vectordb/base.py +2 -2
- agno/vectordb/langchaindb/langchaindb.py +5 -7
- agno/vectordb/llamaindex/llamaindexdb.py +25 -6
- agno/workflow/workflow.py +30 -15
- {agno-2.0.7.dist-info → agno-2.0.9.dist-info}/METADATA +4 -1
- {agno-2.0.7.dist-info → agno-2.0.9.dist-info}/RECORD +64 -61
- {agno-2.0.7.dist-info → agno-2.0.9.dist-info}/WHEEL +0 -0
- {agno-2.0.7.dist-info → agno-2.0.9.dist-info}/licenses/LICENSE +0 -0
- {agno-2.0.7.dist-info → agno-2.0.9.dist-info}/top_level.txt +0 -0
agno/db/gcs_json/gcs_json_db.py
CHANGED
|
@@ -162,7 +162,7 @@ class GcsJsonDb(BaseDb):
|
|
|
162
162
|
|
|
163
163
|
except Exception as e:
|
|
164
164
|
log_warning(f"Error deleting session: {e}")
|
|
165
|
-
|
|
165
|
+
raise e
|
|
166
166
|
|
|
167
167
|
def delete_sessions(self, session_ids: List[str]) -> None:
|
|
168
168
|
"""Delete multiple sessions from the GCS JSON file.
|
|
@@ -181,6 +181,7 @@ class GcsJsonDb(BaseDb):
|
|
|
181
181
|
|
|
182
182
|
except Exception as e:
|
|
183
183
|
log_warning(f"Error deleting sessions: {e}")
|
|
184
|
+
raise e
|
|
184
185
|
|
|
185
186
|
def get_session(
|
|
186
187
|
self,
|
|
@@ -233,7 +234,7 @@ class GcsJsonDb(BaseDb):
|
|
|
233
234
|
|
|
234
235
|
except Exception as e:
|
|
235
236
|
log_warning(f"Exception reading from session file: {e}")
|
|
236
|
-
|
|
237
|
+
raise e
|
|
237
238
|
|
|
238
239
|
def get_sessions(
|
|
239
240
|
self,
|
|
@@ -328,7 +329,7 @@ class GcsJsonDb(BaseDb):
|
|
|
328
329
|
|
|
329
330
|
except Exception as e:
|
|
330
331
|
log_warning(f"Exception reading from session file: {e}")
|
|
331
|
-
|
|
332
|
+
raise e
|
|
332
333
|
|
|
333
334
|
def rename_session(
|
|
334
335
|
self, session_id: str, session_type: SessionType, session_name: str, deserialize: Optional[bool] = True
|
|
@@ -363,7 +364,7 @@ class GcsJsonDb(BaseDb):
|
|
|
363
364
|
return None
|
|
364
365
|
except Exception as e:
|
|
365
366
|
log_warning(f"Exception renaming session: {e}")
|
|
366
|
-
|
|
367
|
+
raise e
|
|
367
368
|
|
|
368
369
|
def upsert_session(
|
|
369
370
|
self, session: Session, deserialize: Optional[bool] = True
|
|
@@ -408,7 +409,44 @@ class GcsJsonDb(BaseDb):
|
|
|
408
409
|
|
|
409
410
|
except Exception as e:
|
|
410
411
|
log_warning(f"Exception upserting session: {e}")
|
|
411
|
-
|
|
412
|
+
raise e
|
|
413
|
+
|
|
414
|
+
def upsert_sessions(
|
|
415
|
+
self, sessions: List[Session], deserialize: Optional[bool] = True
|
|
416
|
+
) -> List[Union[Session, Dict[str, Any]]]:
|
|
417
|
+
"""
|
|
418
|
+
Bulk upsert multiple sessions for improved performance on large datasets.
|
|
419
|
+
|
|
420
|
+
Args:
|
|
421
|
+
sessions (List[Session]): List of sessions to upsert.
|
|
422
|
+
deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
|
|
423
|
+
|
|
424
|
+
Returns:
|
|
425
|
+
List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
|
|
426
|
+
|
|
427
|
+
Raises:
|
|
428
|
+
Exception: If an error occurs during bulk upsert.
|
|
429
|
+
"""
|
|
430
|
+
if not sessions:
|
|
431
|
+
return []
|
|
432
|
+
|
|
433
|
+
try:
|
|
434
|
+
log_info(
|
|
435
|
+
f"GcsJsonDb doesn't support efficient bulk operations, falling back to individual upserts for {len(sessions)} sessions"
|
|
436
|
+
)
|
|
437
|
+
|
|
438
|
+
# Fall back to individual upserts
|
|
439
|
+
results = []
|
|
440
|
+
for session in sessions:
|
|
441
|
+
if session is not None:
|
|
442
|
+
result = self.upsert_session(session, deserialize=deserialize)
|
|
443
|
+
if result is not None:
|
|
444
|
+
results.append(result)
|
|
445
|
+
return results
|
|
446
|
+
|
|
447
|
+
except Exception as e:
|
|
448
|
+
log_error(f"Exception during bulk session upsert: {e}")
|
|
449
|
+
return []
|
|
412
450
|
|
|
413
451
|
def _matches_session_key(self, existing_session: Dict[str, Any], session: Session) -> bool:
|
|
414
452
|
"""Check if existing session matches the key for the session type."""
|
|
@@ -437,6 +475,7 @@ class GcsJsonDb(BaseDb):
|
|
|
437
475
|
|
|
438
476
|
except Exception as e:
|
|
439
477
|
log_warning(f"Error deleting user memory: {e}")
|
|
478
|
+
raise e
|
|
440
479
|
|
|
441
480
|
def delete_user_memories(self, memory_ids: List[str]) -> None:
|
|
442
481
|
"""Delete multiple user memories from the GCS JSON file."""
|
|
@@ -447,6 +486,7 @@ class GcsJsonDb(BaseDb):
|
|
|
447
486
|
log_debug(f"Successfully deleted user memories with ids: {memory_ids}")
|
|
448
487
|
except Exception as e:
|
|
449
488
|
log_warning(f"Error deleting user memories: {e}")
|
|
489
|
+
raise e
|
|
450
490
|
|
|
451
491
|
def get_all_memory_topics(self) -> List[str]:
|
|
452
492
|
"""Get all memory topics from the GCS JSON file."""
|
|
@@ -461,7 +501,7 @@ class GcsJsonDb(BaseDb):
|
|
|
461
501
|
|
|
462
502
|
except Exception as e:
|
|
463
503
|
log_warning(f"Exception reading from memory file: {e}")
|
|
464
|
-
|
|
504
|
+
raise e
|
|
465
505
|
|
|
466
506
|
def get_user_memory(
|
|
467
507
|
self, memory_id: str, deserialize: Optional[bool] = True
|
|
@@ -480,7 +520,7 @@ class GcsJsonDb(BaseDb):
|
|
|
480
520
|
return None
|
|
481
521
|
except Exception as e:
|
|
482
522
|
log_warning(f"Exception reading from memory file: {e}")
|
|
483
|
-
|
|
523
|
+
raise e
|
|
484
524
|
|
|
485
525
|
def get_user_memories(
|
|
486
526
|
self,
|
|
@@ -538,7 +578,7 @@ class GcsJsonDb(BaseDb):
|
|
|
538
578
|
|
|
539
579
|
except Exception as e:
|
|
540
580
|
log_warning(f"Exception reading from memory file: {e}")
|
|
541
|
-
|
|
581
|
+
raise e
|
|
542
582
|
|
|
543
583
|
def get_user_memory_stats(
|
|
544
584
|
self, limit: Optional[int] = None, page: Optional[int] = None
|
|
@@ -574,7 +614,7 @@ class GcsJsonDb(BaseDb):
|
|
|
574
614
|
|
|
575
615
|
except Exception as e:
|
|
576
616
|
log_warning(f"Exception getting user memory stats: {e}")
|
|
577
|
-
|
|
617
|
+
raise e
|
|
578
618
|
|
|
579
619
|
def upsert_user_memory(
|
|
580
620
|
self, memory: UserMemory, deserialize: Optional[bool] = True
|
|
@@ -608,7 +648,43 @@ class GcsJsonDb(BaseDb):
|
|
|
608
648
|
|
|
609
649
|
except Exception as e:
|
|
610
650
|
log_error(f"Exception upserting user memory: {e}")
|
|
611
|
-
|
|
651
|
+
raise e
|
|
652
|
+
|
|
653
|
+
def upsert_memories(
|
|
654
|
+
self, memories: List[UserMemory], deserialize: Optional[bool] = True
|
|
655
|
+
) -> List[Union[UserMemory, Dict[str, Any]]]:
|
|
656
|
+
"""
|
|
657
|
+
Bulk upsert multiple user memories for improved performance on large datasets.
|
|
658
|
+
|
|
659
|
+
Args:
|
|
660
|
+
memories (List[UserMemory]): List of memories to upsert.
|
|
661
|
+
deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
|
|
662
|
+
|
|
663
|
+
Returns:
|
|
664
|
+
List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories.
|
|
665
|
+
|
|
666
|
+
Raises:
|
|
667
|
+
Exception: If an error occurs during bulk upsert.
|
|
668
|
+
"""
|
|
669
|
+
if not memories:
|
|
670
|
+
return []
|
|
671
|
+
|
|
672
|
+
try:
|
|
673
|
+
log_info(
|
|
674
|
+
f"GcsJsonDb doesn't support efficient bulk operations, falling back to individual upserts for {len(memories)} memories"
|
|
675
|
+
)
|
|
676
|
+
# Fall back to individual upserts
|
|
677
|
+
results = []
|
|
678
|
+
for memory in memories:
|
|
679
|
+
if memory is not None:
|
|
680
|
+
result = self.upsert_user_memory(memory, deserialize=deserialize)
|
|
681
|
+
if result is not None:
|
|
682
|
+
results.append(result)
|
|
683
|
+
return results
|
|
684
|
+
|
|
685
|
+
except Exception as e:
|
|
686
|
+
log_error(f"Exception during bulk memory upsert: {e}")
|
|
687
|
+
return []
|
|
612
688
|
|
|
613
689
|
def clear_memories(self) -> None:
|
|
614
690
|
"""Delete all memories from the database.
|
|
@@ -622,6 +698,7 @@ class GcsJsonDb(BaseDb):
|
|
|
622
698
|
|
|
623
699
|
except Exception as e:
|
|
624
700
|
log_warning(f"Exception deleting all memories: {e}")
|
|
701
|
+
raise e
|
|
625
702
|
|
|
626
703
|
# -- Metrics methods --
|
|
627
704
|
def calculate_metrics(self) -> Optional[list[dict]]:
|
|
@@ -688,7 +765,7 @@ class GcsJsonDb(BaseDb):
|
|
|
688
765
|
|
|
689
766
|
except Exception as e:
|
|
690
767
|
log_warning(f"Exception refreshing metrics: {e}")
|
|
691
|
-
|
|
768
|
+
raise e
|
|
692
769
|
|
|
693
770
|
def _get_metrics_calculation_starting_date(self, metrics: List[Dict[str, Any]]) -> Optional[date]:
|
|
694
771
|
"""Get the first date for which metrics calculation is needed."""
|
|
@@ -743,7 +820,7 @@ class GcsJsonDb(BaseDb):
|
|
|
743
820
|
|
|
744
821
|
except Exception as e:
|
|
745
822
|
log_warning(f"Exception reading sessions for metrics: {e}")
|
|
746
|
-
|
|
823
|
+
raise e
|
|
747
824
|
|
|
748
825
|
def get_metrics(
|
|
749
826
|
self,
|
|
@@ -775,7 +852,7 @@ class GcsJsonDb(BaseDb):
|
|
|
775
852
|
|
|
776
853
|
except Exception as e:
|
|
777
854
|
log_warning(f"Exception getting metrics: {e}")
|
|
778
|
-
|
|
855
|
+
raise e
|
|
779
856
|
|
|
780
857
|
# -- Knowledge methods --
|
|
781
858
|
def delete_knowledge_content(self, id: str):
|
|
@@ -786,6 +863,7 @@ class GcsJsonDb(BaseDb):
|
|
|
786
863
|
self._write_json_file(self.knowledge_table_name, knowledge_items)
|
|
787
864
|
except Exception as e:
|
|
788
865
|
log_warning(f"Error deleting knowledge content: {e}")
|
|
866
|
+
raise e
|
|
789
867
|
|
|
790
868
|
def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
|
|
791
869
|
"""Get knowledge content by ID."""
|
|
@@ -799,7 +877,7 @@ class GcsJsonDb(BaseDb):
|
|
|
799
877
|
return None
|
|
800
878
|
except Exception as e:
|
|
801
879
|
log_warning(f"Error getting knowledge content: {e}")
|
|
802
|
-
|
|
880
|
+
raise e
|
|
803
881
|
|
|
804
882
|
def get_knowledge_contents(
|
|
805
883
|
self,
|
|
@@ -828,7 +906,7 @@ class GcsJsonDb(BaseDb):
|
|
|
828
906
|
|
|
829
907
|
except Exception as e:
|
|
830
908
|
log_warning(f"Error getting knowledge contents: {e}")
|
|
831
|
-
|
|
909
|
+
raise e
|
|
832
910
|
|
|
833
911
|
def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
|
|
834
912
|
"""Upsert knowledge content in the GCS JSON file."""
|
|
@@ -852,7 +930,7 @@ class GcsJsonDb(BaseDb):
|
|
|
852
930
|
|
|
853
931
|
except Exception as e:
|
|
854
932
|
log_warning(f"Error upserting knowledge row: {e}")
|
|
855
|
-
|
|
933
|
+
raise e
|
|
856
934
|
|
|
857
935
|
# -- Eval methods --
|
|
858
936
|
def create_eval_run(self, eval_run: EvalRunRecord) -> Optional[EvalRunRecord]:
|
|
@@ -871,7 +949,7 @@ class GcsJsonDb(BaseDb):
|
|
|
871
949
|
return eval_run
|
|
872
950
|
except Exception as e:
|
|
873
951
|
log_warning(f"Error creating eval run: {e}")
|
|
874
|
-
|
|
952
|
+
raise e
|
|
875
953
|
|
|
876
954
|
def delete_eval_run(self, eval_run_id: str) -> None:
|
|
877
955
|
"""Delete an eval run from the GCS JSON file."""
|
|
@@ -887,6 +965,7 @@ class GcsJsonDb(BaseDb):
|
|
|
887
965
|
log_warning(f"No eval run found with ID: {eval_run_id}")
|
|
888
966
|
except Exception as e:
|
|
889
967
|
log_warning(f"Error deleting eval run {eval_run_id}: {e}")
|
|
968
|
+
raise e
|
|
890
969
|
|
|
891
970
|
def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
|
|
892
971
|
"""Delete multiple eval runs from the GCS JSON file."""
|
|
@@ -903,6 +982,7 @@ class GcsJsonDb(BaseDb):
|
|
|
903
982
|
log_warning(f"No eval runs found with IDs: {eval_run_ids}")
|
|
904
983
|
except Exception as e:
|
|
905
984
|
log_warning(f"Error deleting eval runs {eval_run_ids}: {e}")
|
|
985
|
+
raise e
|
|
906
986
|
|
|
907
987
|
def get_eval_run(
|
|
908
988
|
self, eval_run_id: str, deserialize: Optional[bool] = True
|
|
@@ -920,7 +1000,7 @@ class GcsJsonDb(BaseDb):
|
|
|
920
1000
|
return None
|
|
921
1001
|
except Exception as e:
|
|
922
1002
|
log_warning(f"Exception getting eval run {eval_run_id}: {e}")
|
|
923
|
-
|
|
1003
|
+
raise e
|
|
924
1004
|
|
|
925
1005
|
def get_eval_runs(
|
|
926
1006
|
self,
|
|
@@ -986,7 +1066,7 @@ class GcsJsonDb(BaseDb):
|
|
|
986
1066
|
|
|
987
1067
|
except Exception as e:
|
|
988
1068
|
log_warning(f"Exception getting eval runs: {e}")
|
|
989
|
-
|
|
1069
|
+
raise e
|
|
990
1070
|
|
|
991
1071
|
def rename_eval_run(
|
|
992
1072
|
self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
|
|
@@ -1009,4 +1089,4 @@ class GcsJsonDb(BaseDb):
|
|
|
1009
1089
|
return None
|
|
1010
1090
|
except Exception as e:
|
|
1011
1091
|
log_warning(f"Error renaming eval run {eval_run_id}: {e}")
|
|
1012
|
-
|
|
1092
|
+
raise e
|
|
@@ -56,7 +56,7 @@ class InMemoryDb(BaseDb):
|
|
|
56
56
|
|
|
57
57
|
except Exception as e:
|
|
58
58
|
log_error(f"Error deleting session: {e}")
|
|
59
|
-
|
|
59
|
+
raise e
|
|
60
60
|
|
|
61
61
|
def delete_sessions(self, session_ids: List[str]) -> None:
|
|
62
62
|
"""Delete multiple sessions from in-memory storage.
|
|
@@ -73,6 +73,7 @@ class InMemoryDb(BaseDb):
|
|
|
73
73
|
|
|
74
74
|
except Exception as e:
|
|
75
75
|
log_error(f"Error deleting sessions: {e}")
|
|
76
|
+
raise e
|
|
76
77
|
|
|
77
78
|
def get_session(
|
|
78
79
|
self,
|
|
@@ -123,7 +124,7 @@ class InMemoryDb(BaseDb):
|
|
|
123
124
|
|
|
124
125
|
traceback.print_exc()
|
|
125
126
|
log_error(f"Exception reading session: {e}")
|
|
126
|
-
|
|
127
|
+
raise e
|
|
127
128
|
|
|
128
129
|
def get_sessions(
|
|
129
130
|
self,
|
|
@@ -215,7 +216,7 @@ class InMemoryDb(BaseDb):
|
|
|
215
216
|
|
|
216
217
|
except Exception as e:
|
|
217
218
|
log_error(f"Exception reading sessions: {e}")
|
|
218
|
-
|
|
219
|
+
raise e
|
|
219
220
|
|
|
220
221
|
def rename_session(
|
|
221
222
|
self, session_id: str, session_type: SessionType, session_name: str, deserialize: Optional[bool] = True
|
|
@@ -246,7 +247,7 @@ class InMemoryDb(BaseDb):
|
|
|
246
247
|
|
|
247
248
|
except Exception as e:
|
|
248
249
|
log_error(f"Exception renaming session: {e}")
|
|
249
|
-
|
|
250
|
+
raise e
|
|
250
251
|
|
|
251
252
|
def upsert_session(
|
|
252
253
|
self, session: Session, deserialize: Optional[bool] = True
|
|
@@ -287,7 +288,7 @@ class InMemoryDb(BaseDb):
|
|
|
287
288
|
|
|
288
289
|
except Exception as e:
|
|
289
290
|
log_error(f"Exception upserting session: {e}")
|
|
290
|
-
|
|
291
|
+
raise e
|
|
291
292
|
|
|
292
293
|
def _matches_session_key(self, existing_session: Dict[str, Any], session: Session) -> bool:
|
|
293
294
|
"""Check if existing session matches the key for the session type."""
|
|
@@ -299,6 +300,40 @@ class InMemoryDb(BaseDb):
|
|
|
299
300
|
return existing_session.get("workflow_id") == session.workflow_id
|
|
300
301
|
return False
|
|
301
302
|
|
|
303
|
+
def upsert_sessions(
|
|
304
|
+
self, sessions: List[Session], deserialize: Optional[bool] = True
|
|
305
|
+
) -> List[Union[Session, Dict[str, Any]]]:
|
|
306
|
+
"""
|
|
307
|
+
Bulk upsert multiple sessions for improved performance on large datasets.
|
|
308
|
+
|
|
309
|
+
Args:
|
|
310
|
+
sessions (List[Session]): List of sessions to upsert.
|
|
311
|
+
deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
|
|
312
|
+
|
|
313
|
+
Returns:
|
|
314
|
+
List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
|
|
315
|
+
|
|
316
|
+
Raises:
|
|
317
|
+
Exception: If an error occurs during bulk upsert.
|
|
318
|
+
"""
|
|
319
|
+
if not sessions:
|
|
320
|
+
return []
|
|
321
|
+
|
|
322
|
+
try:
|
|
323
|
+
log_info(f"In-memory database: processing {len(sessions)} sessions with individual upsert operations")
|
|
324
|
+
|
|
325
|
+
results = []
|
|
326
|
+
for session in sessions:
|
|
327
|
+
if session is not None:
|
|
328
|
+
result = self.upsert_session(session, deserialize=deserialize)
|
|
329
|
+
if result is not None:
|
|
330
|
+
results.append(result)
|
|
331
|
+
return results
|
|
332
|
+
|
|
333
|
+
except Exception as e:
|
|
334
|
+
log_error(f"Exception during bulk session upsert: {e}")
|
|
335
|
+
return []
|
|
336
|
+
|
|
302
337
|
# -- Memory methods --
|
|
303
338
|
def delete_user_memory(self, memory_id: str):
|
|
304
339
|
try:
|
|
@@ -312,6 +347,7 @@ class InMemoryDb(BaseDb):
|
|
|
312
347
|
|
|
313
348
|
except Exception as e:
|
|
314
349
|
log_error(f"Error deleting memory: {e}")
|
|
350
|
+
raise e
|
|
315
351
|
|
|
316
352
|
def delete_user_memories(self, memory_ids: List[str]) -> None:
|
|
317
353
|
"""Delete multiple user memories from in-memory storage."""
|
|
@@ -321,6 +357,7 @@ class InMemoryDb(BaseDb):
|
|
|
321
357
|
|
|
322
358
|
except Exception as e:
|
|
323
359
|
log_error(f"Error deleting memories: {e}")
|
|
360
|
+
raise e
|
|
324
361
|
|
|
325
362
|
def get_all_memory_topics(self) -> List[str]:
|
|
326
363
|
try:
|
|
@@ -333,7 +370,7 @@ class InMemoryDb(BaseDb):
|
|
|
333
370
|
|
|
334
371
|
except Exception as e:
|
|
335
372
|
log_error(f"Exception reading from memory storage: {e}")
|
|
336
|
-
|
|
373
|
+
raise e
|
|
337
374
|
|
|
338
375
|
def get_user_memory(
|
|
339
376
|
self, memory_id: str, deserialize: Optional[bool] = True
|
|
@@ -349,7 +386,7 @@ class InMemoryDb(BaseDb):
|
|
|
349
386
|
|
|
350
387
|
except Exception as e:
|
|
351
388
|
log_error(f"Exception reading from memory storage: {e}")
|
|
352
|
-
|
|
389
|
+
raise e
|
|
353
390
|
|
|
354
391
|
def get_user_memories(
|
|
355
392
|
self,
|
|
@@ -404,7 +441,7 @@ class InMemoryDb(BaseDb):
|
|
|
404
441
|
|
|
405
442
|
except Exception as e:
|
|
406
443
|
log_error(f"Exception reading from memory storage: {e}")
|
|
407
|
-
|
|
444
|
+
raise e
|
|
408
445
|
|
|
409
446
|
def get_user_memory_stats(
|
|
410
447
|
self, limit: Optional[int] = None, page: Optional[int] = None
|
|
@@ -439,7 +476,7 @@ class InMemoryDb(BaseDb):
|
|
|
439
476
|
|
|
440
477
|
except Exception as e:
|
|
441
478
|
log_error(f"Exception getting user memory stats: {e}")
|
|
442
|
-
|
|
479
|
+
raise e
|
|
443
480
|
|
|
444
481
|
def upsert_user_memory(
|
|
445
482
|
self, memory: UserMemory, deserialize: Optional[bool] = True
|
|
@@ -468,7 +505,42 @@ class InMemoryDb(BaseDb):
|
|
|
468
505
|
|
|
469
506
|
except Exception as e:
|
|
470
507
|
log_warning(f"Exception upserting user memory: {e}")
|
|
471
|
-
|
|
508
|
+
raise e
|
|
509
|
+
|
|
510
|
+
def upsert_memories(
|
|
511
|
+
self, memories: List[UserMemory], deserialize: Optional[bool] = True
|
|
512
|
+
) -> List[Union[UserMemory, Dict[str, Any]]]:
|
|
513
|
+
"""
|
|
514
|
+
Bulk upsert multiple user memories for improved performance on large datasets.
|
|
515
|
+
|
|
516
|
+
Args:
|
|
517
|
+
memories (List[UserMemory]): List of memories to upsert.
|
|
518
|
+
deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
|
|
519
|
+
|
|
520
|
+
Returns:
|
|
521
|
+
List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories.
|
|
522
|
+
|
|
523
|
+
Raises:
|
|
524
|
+
Exception: If an error occurs during bulk upsert.
|
|
525
|
+
"""
|
|
526
|
+
if not memories:
|
|
527
|
+
return []
|
|
528
|
+
|
|
529
|
+
try:
|
|
530
|
+
log_info(f"In-memory database: processing {len(memories)} memories with individual upsert operations")
|
|
531
|
+
# For in-memory database, individual upserts are actually efficient
|
|
532
|
+
# since we're just manipulating Python lists and dictionaries
|
|
533
|
+
results = []
|
|
534
|
+
for memory in memories:
|
|
535
|
+
if memory is not None:
|
|
536
|
+
result = self.upsert_user_memory(memory, deserialize=deserialize)
|
|
537
|
+
if result is not None:
|
|
538
|
+
results.append(result)
|
|
539
|
+
return results
|
|
540
|
+
|
|
541
|
+
except Exception as e:
|
|
542
|
+
log_error(f"Exception during bulk memory upsert: {e}")
|
|
543
|
+
return []
|
|
472
544
|
|
|
473
545
|
def clear_memories(self) -> None:
|
|
474
546
|
"""Delete all memories.
|
|
@@ -481,6 +553,7 @@ class InMemoryDb(BaseDb):
|
|
|
481
553
|
|
|
482
554
|
except Exception as e:
|
|
483
555
|
log_warning(f"Exception deleting all memories: {e}")
|
|
556
|
+
raise e
|
|
484
557
|
|
|
485
558
|
# -- Metrics methods --
|
|
486
559
|
def calculate_metrics(self) -> Optional[list[dict]]:
|
|
@@ -544,7 +617,7 @@ class InMemoryDb(BaseDb):
|
|
|
544
617
|
|
|
545
618
|
except Exception as e:
|
|
546
619
|
log_warning(f"Exception refreshing metrics: {e}")
|
|
547
|
-
|
|
620
|
+
raise e
|
|
548
621
|
|
|
549
622
|
def _get_metrics_calculation_starting_date(self, metrics: List[Dict[str, Any]]) -> Optional[date]:
|
|
550
623
|
"""Get the first date for which metrics calculation is needed."""
|
|
@@ -595,7 +668,7 @@ class InMemoryDb(BaseDb):
|
|
|
595
668
|
|
|
596
669
|
except Exception as e:
|
|
597
670
|
log_error(f"Exception reading sessions for metrics: {e}")
|
|
598
|
-
|
|
671
|
+
raise e
|
|
599
672
|
|
|
600
673
|
def get_metrics(
|
|
601
674
|
self,
|
|
@@ -625,7 +698,7 @@ class InMemoryDb(BaseDb):
|
|
|
625
698
|
|
|
626
699
|
except Exception as e:
|
|
627
700
|
log_error(f"Exception getting metrics: {e}")
|
|
628
|
-
|
|
701
|
+
raise e
|
|
629
702
|
|
|
630
703
|
# -- Knowledge methods --
|
|
631
704
|
|
|
@@ -643,6 +716,7 @@ class InMemoryDb(BaseDb):
|
|
|
643
716
|
|
|
644
717
|
except Exception as e:
|
|
645
718
|
log_error(f"Error deleting knowledge content: {e}")
|
|
719
|
+
raise e
|
|
646
720
|
|
|
647
721
|
def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
|
|
648
722
|
"""Get a knowledge row from in-memory storage.
|
|
@@ -665,7 +739,7 @@ class InMemoryDb(BaseDb):
|
|
|
665
739
|
|
|
666
740
|
except Exception as e:
|
|
667
741
|
log_error(f"Error getting knowledge content: {e}")
|
|
668
|
-
|
|
742
|
+
raise e
|
|
669
743
|
|
|
670
744
|
def get_knowledge_contents(
|
|
671
745
|
self,
|
|
@@ -707,7 +781,7 @@ class InMemoryDb(BaseDb):
|
|
|
707
781
|
|
|
708
782
|
except Exception as e:
|
|
709
783
|
log_error(f"Error getting knowledge contents: {e}")
|
|
710
|
-
|
|
784
|
+
raise e
|
|
711
785
|
|
|
712
786
|
def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
|
|
713
787
|
"""Upsert knowledge content.
|
|
@@ -739,7 +813,7 @@ class InMemoryDb(BaseDb):
|
|
|
739
813
|
|
|
740
814
|
except Exception as e:
|
|
741
815
|
log_error(f"Error upserting knowledge row: {e}")
|
|
742
|
-
|
|
816
|
+
raise e
|
|
743
817
|
|
|
744
818
|
# -- Eval methods --
|
|
745
819
|
|
|
@@ -759,7 +833,7 @@ class InMemoryDb(BaseDb):
|
|
|
759
833
|
|
|
760
834
|
except Exception as e:
|
|
761
835
|
log_error(f"Error creating eval run: {e}")
|
|
762
|
-
|
|
836
|
+
raise e
|
|
763
837
|
|
|
764
838
|
def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
|
|
765
839
|
"""Delete multiple eval runs from in-memory storage."""
|
|
@@ -775,6 +849,7 @@ class InMemoryDb(BaseDb):
|
|
|
775
849
|
|
|
776
850
|
except Exception as e:
|
|
777
851
|
log_error(f"Error deleting eval runs {eval_run_ids}: {e}")
|
|
852
|
+
raise e
|
|
778
853
|
|
|
779
854
|
def get_eval_run(
|
|
780
855
|
self, eval_run_id: str, deserialize: Optional[bool] = True
|
|
@@ -791,7 +866,7 @@ class InMemoryDb(BaseDb):
|
|
|
791
866
|
|
|
792
867
|
except Exception as e:
|
|
793
868
|
log_error(f"Exception getting eval run {eval_run_id}: {e}")
|
|
794
|
-
|
|
869
|
+
raise e
|
|
795
870
|
|
|
796
871
|
def get_eval_runs(
|
|
797
872
|
self,
|
|
@@ -855,7 +930,7 @@ class InMemoryDb(BaseDb):
|
|
|
855
930
|
|
|
856
931
|
except Exception as e:
|
|
857
932
|
log_error(f"Exception getting eval runs: {e}")
|
|
858
|
-
|
|
933
|
+
raise e
|
|
859
934
|
|
|
860
935
|
def rename_eval_run(
|
|
861
936
|
self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
|
|
@@ -879,4 +954,4 @@ class InMemoryDb(BaseDb):
|
|
|
879
954
|
|
|
880
955
|
except Exception as e:
|
|
881
956
|
log_error(f"Error renaming eval run {eval_run_id}: {e}")
|
|
882
|
-
|
|
957
|
+
raise e
|