agno 2.1.4__py3-none-any.whl → 2.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (88) hide show
  1. agno/agent/agent.py +1767 -535
  2. agno/db/async_postgres/__init__.py +3 -0
  3. agno/db/async_postgres/async_postgres.py +1668 -0
  4. agno/db/async_postgres/schemas.py +124 -0
  5. agno/db/async_postgres/utils.py +289 -0
  6. agno/db/base.py +237 -2
  7. agno/db/dynamo/dynamo.py +2 -2
  8. agno/db/firestore/firestore.py +2 -2
  9. agno/db/firestore/utils.py +4 -2
  10. agno/db/gcs_json/gcs_json_db.py +2 -2
  11. agno/db/in_memory/in_memory_db.py +2 -2
  12. agno/db/json/json_db.py +2 -2
  13. agno/db/migrations/v1_to_v2.py +30 -13
  14. agno/db/mongo/mongo.py +18 -6
  15. agno/db/mysql/mysql.py +35 -13
  16. agno/db/postgres/postgres.py +29 -6
  17. agno/db/redis/redis.py +2 -2
  18. agno/db/singlestore/singlestore.py +2 -2
  19. agno/db/sqlite/sqlite.py +34 -12
  20. agno/db/sqlite/utils.py +8 -3
  21. agno/eval/accuracy.py +50 -43
  22. agno/eval/performance.py +6 -3
  23. agno/eval/reliability.py +6 -3
  24. agno/eval/utils.py +33 -16
  25. agno/exceptions.py +8 -2
  26. agno/knowledge/knowledge.py +260 -46
  27. agno/knowledge/reader/pdf_reader.py +4 -6
  28. agno/knowledge/reader/reader_factory.py +2 -3
  29. agno/memory/manager.py +241 -33
  30. agno/models/anthropic/claude.py +37 -0
  31. agno/os/app.py +8 -7
  32. agno/os/interfaces/a2a/router.py +3 -5
  33. agno/os/interfaces/agui/router.py +4 -1
  34. agno/os/interfaces/agui/utils.py +27 -6
  35. agno/os/interfaces/slack/router.py +2 -4
  36. agno/os/mcp.py +98 -41
  37. agno/os/router.py +23 -0
  38. agno/os/routers/evals/evals.py +52 -20
  39. agno/os/routers/evals/utils.py +14 -14
  40. agno/os/routers/knowledge/knowledge.py +130 -9
  41. agno/os/routers/knowledge/schemas.py +57 -0
  42. agno/os/routers/memory/memory.py +116 -44
  43. agno/os/routers/metrics/metrics.py +16 -6
  44. agno/os/routers/session/session.py +65 -22
  45. agno/os/schema.py +36 -0
  46. agno/os/utils.py +67 -12
  47. agno/reasoning/anthropic.py +80 -0
  48. agno/reasoning/gemini.py +73 -0
  49. agno/reasoning/openai.py +5 -0
  50. agno/reasoning/vertexai.py +76 -0
  51. agno/session/workflow.py +3 -3
  52. agno/team/team.py +918 -175
  53. agno/tools/googlesheets.py +20 -5
  54. agno/tools/mcp_toolbox.py +3 -3
  55. agno/tools/scrapegraph.py +1 -1
  56. agno/utils/models/claude.py +3 -1
  57. agno/utils/streamlit.py +1 -1
  58. agno/vectordb/base.py +22 -1
  59. agno/vectordb/cassandra/cassandra.py +9 -0
  60. agno/vectordb/chroma/chromadb.py +26 -6
  61. agno/vectordb/clickhouse/clickhousedb.py +9 -1
  62. agno/vectordb/couchbase/couchbase.py +11 -0
  63. agno/vectordb/lancedb/lance_db.py +20 -0
  64. agno/vectordb/langchaindb/langchaindb.py +11 -0
  65. agno/vectordb/lightrag/lightrag.py +9 -0
  66. agno/vectordb/llamaindex/llamaindexdb.py +15 -1
  67. agno/vectordb/milvus/milvus.py +23 -0
  68. agno/vectordb/mongodb/mongodb.py +22 -0
  69. agno/vectordb/pgvector/pgvector.py +19 -0
  70. agno/vectordb/pineconedb/pineconedb.py +35 -4
  71. agno/vectordb/qdrant/qdrant.py +24 -0
  72. agno/vectordb/singlestore/singlestore.py +25 -17
  73. agno/vectordb/surrealdb/surrealdb.py +18 -1
  74. agno/vectordb/upstashdb/upstashdb.py +26 -1
  75. agno/vectordb/weaviate/weaviate.py +18 -0
  76. agno/workflow/condition.py +4 -0
  77. agno/workflow/loop.py +4 -0
  78. agno/workflow/parallel.py +4 -0
  79. agno/workflow/router.py +4 -0
  80. agno/workflow/step.py +22 -14
  81. agno/workflow/steps.py +4 -0
  82. agno/workflow/types.py +2 -2
  83. agno/workflow/workflow.py +328 -61
  84. {agno-2.1.4.dist-info → agno-2.1.5.dist-info}/METADATA +100 -41
  85. {agno-2.1.4.dist-info → agno-2.1.5.dist-info}/RECORD +88 -81
  86. {agno-2.1.4.dist-info → agno-2.1.5.dist-info}/WHEEL +0 -0
  87. {agno-2.1.4.dist-info → agno-2.1.5.dist-info}/licenses/LICENSE +0 -0
  88. {agno-2.1.4.dist-info → agno-2.1.5.dist-info}/top_level.txt +0 -0
@@ -558,7 +558,7 @@ class FirestoreDb(BaseDb):
558
558
  raise e
559
559
 
560
560
  def upsert_sessions(
561
- self, sessions: List[Session], deserialize: Optional[bool] = True
561
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
562
562
  ) -> List[Union[Session, Dict[str, Any]]]:
563
563
  """
564
564
  Bulk upsert multiple sessions for improved performance on large datasets.
@@ -939,7 +939,7 @@ class FirestoreDb(BaseDb):
939
939
  raise e
940
940
 
941
941
  def upsert_memories(
942
- self, memories: List[UserMemory], deserialize: Optional[bool] = True
942
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
943
943
  ) -> List[Union[UserMemory, Dict[str, Any]]]:
944
944
  """
945
945
  Bulk upsert multiple user memories for improved performance on large datasets.
@@ -164,11 +164,13 @@ def calculate_date_metrics(date_to_process: date, sessions_data: dict) -> dict:
164
164
  if session.get("user_id"):
165
165
  all_user_ids.add(session["user_id"])
166
166
  runs = session.get("runs", []) or []
167
- metrics[runs_count_key] += len(runs)
168
167
 
169
- if runs := session.get("runs", []):
168
+ if runs:
170
169
  if isinstance(runs, str):
171
170
  runs = json.loads(runs)
171
+
172
+ metrics[runs_count_key] += len(runs)
173
+
172
174
  for run in runs:
173
175
  if model_id := run.get("model"):
174
176
  model_provider = run.get("model_provider", "")
@@ -412,7 +412,7 @@ class GcsJsonDb(BaseDb):
412
412
  raise e
413
413
 
414
414
  def upsert_sessions(
415
- self, sessions: List[Session], deserialize: Optional[bool] = True
415
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
416
416
  ) -> List[Union[Session, Dict[str, Any]]]:
417
417
  """
418
418
  Bulk upsert multiple sessions for improved performance on large datasets.
@@ -704,7 +704,7 @@ class GcsJsonDb(BaseDb):
704
704
  raise e
705
705
 
706
706
  def upsert_memories(
707
- self, memories: List[UserMemory], deserialize: Optional[bool] = True
707
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
708
708
  ) -> List[Union[UserMemory, Dict[str, Any]]]:
709
709
  """
710
710
  Bulk upsert multiple user memories for improved performance on large datasets.
@@ -309,7 +309,7 @@ class InMemoryDb(BaseDb):
309
309
  return False
310
310
 
311
311
  def upsert_sessions(
312
- self, sessions: List[Session], deserialize: Optional[bool] = True
312
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
313
313
  ) -> List[Union[Session, Dict[str, Any]]]:
314
314
  """
315
315
  Bulk upsert multiple sessions for improved performance on large datasets.
@@ -590,7 +590,7 @@ class InMemoryDb(BaseDb):
590
590
  raise e
591
591
 
592
592
  def upsert_memories(
593
- self, memories: List[UserMemory], deserialize: Optional[bool] = True
593
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
594
594
  ) -> List[Union[UserMemory, Dict[str, Any]]]:
595
595
  """
596
596
  Bulk upsert multiple user memories for improved performance on large datasets.
agno/db/json/json_db.py CHANGED
@@ -398,7 +398,7 @@ class JsonDb(BaseDb):
398
398
  raise e
399
399
 
400
400
  def upsert_sessions(
401
- self, sessions: List[Session], deserialize: Optional[bool] = True
401
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
402
402
  ) -> List[Union[Session, Dict[str, Any]]]:
403
403
  """
404
404
  Bulk upsert multiple sessions for improved performance on large datasets.
@@ -703,7 +703,7 @@ class JsonDb(BaseDb):
703
703
  raise e
704
704
 
705
705
  def upsert_memories(
706
- self, memories: List[UserMemory], deserialize: Optional[bool] = True
706
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
707
707
  ) -> List[Union[UserMemory, Dict[str, Any]]]:
708
708
  """
709
709
  Bulk upsert multiple user memories for improved performance on large datasets.
@@ -1,15 +1,12 @@
1
1
  """Migration utility to migrate your Agno tables from v1 to v2"""
2
2
 
3
3
  import json
4
- from typing import Any, Dict, List, Optional, Union
4
+ from typing import Any, Dict, List, Optional, Union, cast
5
5
 
6
6
  from sqlalchemy import text
7
7
 
8
- from agno.db.mongo.mongo import MongoDb
9
- from agno.db.mysql.mysql import MySQLDb
10
- from agno.db.postgres.postgres import PostgresDb
8
+ from agno.db.base import BaseDb
11
9
  from agno.db.schemas.memory import UserMemory
12
- from agno.db.sqlite.sqlite import SqliteDb
13
10
  from agno.session import AgentSession, TeamSession, WorkflowSession
14
11
  from agno.utils.log import log_error, log_info, log_warning
15
12
 
@@ -315,7 +312,7 @@ def convert_v1_fields_to_v2(data: Dict[str, Any]) -> Dict[str, Any]:
315
312
 
316
313
 
317
314
  def migrate(
318
- db: Union[PostgresDb, MySQLDb, SqliteDb, MongoDb],
315
+ db: BaseDb,
319
316
  v1_db_schema: str,
320
317
  agent_sessions_table_name: Optional[str] = None,
321
318
  team_sessions_table_name: Optional[str] = None,
@@ -372,7 +369,7 @@ def migrate(
372
369
 
373
370
 
374
371
  def migrate_table_in_batches(
375
- db: Union[PostgresDb, MySQLDb, SqliteDb, MongoDb],
372
+ db: BaseDb,
376
373
  v1_db_schema: str,
377
374
  v1_table_name: str,
378
375
  v1_table_type: str,
@@ -410,7 +407,7 @@ def migrate_table_in_batches(
410
407
  if hasattr(db, "Session"):
411
408
  db.Session.remove() # type: ignore
412
409
 
413
- db.upsert_sessions(sessions) # type: ignore
410
+ db.upsert_sessions(sessions, preserve_updated_at=True) # type: ignore
414
411
  total_migrated += len(sessions)
415
412
  log_info(f"Bulk upserted {len(sessions)} sessions in batch {batch_count}")
416
413
 
@@ -420,7 +417,7 @@ def migrate_table_in_batches(
420
417
  if hasattr(db, "Session"):
421
418
  db.Session.remove() # type: ignore
422
419
 
423
- db.upsert_memories(memories)
420
+ db.upsert_memories(memories, preserve_updated_at=True)
424
421
  total_migrated += len(memories)
425
422
  log_info(f"Bulk upserted {len(memories)} memories in batch {batch_count}")
426
423
 
@@ -429,12 +426,14 @@ def migrate_table_in_batches(
429
426
  log_info(f"✅ Migration completed for table {v1_table_name}: {total_migrated} total records migrated")
430
427
 
431
428
 
432
- def get_table_content_in_batches(
433
- db: Union[PostgresDb, MySQLDb, SqliteDb, MongoDb], db_schema: str, table_name: str, batch_size: int = 5000
434
- ):
429
+ def get_table_content_in_batches(db: BaseDb, db_schema: str, table_name: str, batch_size: int = 5000):
435
430
  """Get table content in batches to avoid memory issues with large tables"""
436
431
  try:
437
- if isinstance(db, MongoDb):
432
+ if type(db).__name__ == "MongoDb":
433
+ from agno.db.mongo.mongo import MongoDb
434
+
435
+ db = cast(MongoDb, db)
436
+
438
437
  # MongoDB implementation with cursor and batching
439
438
  collection = db.database[table_name]
440
439
  cursor = collection.find({}).batch_size(batch_size)
@@ -455,6 +454,24 @@ def get_table_content_in_batches(
455
454
  yield batch
456
455
  else:
457
456
  # SQL database implementations (PostgresDb, MySQLDb, SqliteDb)
457
+ if type(db).__name__ == "PostgresDb":
458
+ from agno.db.postgres.postgres import PostgresDb
459
+
460
+ db = cast(PostgresDb, db)
461
+
462
+ elif type(db).__name__ == "MySQLDb":
463
+ from agno.db.mysql.mysql import MySQLDb
464
+
465
+ db = cast(MySQLDb, db)
466
+
467
+ elif type(db).__name__ == "SqliteDb":
468
+ from agno.db.sqlite.sqlite import SqliteDb
469
+
470
+ db = cast(SqliteDb, db)
471
+
472
+ else:
473
+ raise ValueError(f"Invalid database type: {type(db).__name__}")
474
+
458
475
  offset = 0
459
476
  while True:
460
477
  # Create a new session for each batch to avoid transaction conflicts
agno/db/mongo/mongo.py CHANGED
@@ -588,7 +588,7 @@ class MongoDb(BaseDb):
588
588
  raise e
589
589
 
590
590
  def upsert_sessions(
591
- self, sessions: List[Session], deserialize: Optional[bool] = True
591
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
592
592
  ) -> List[Union[Session, Dict[str, Any]]]:
593
593
  """
594
594
  Bulk upsert multiple sessions for improved performance on large datasets.
@@ -596,6 +596,7 @@ class MongoDb(BaseDb):
596
596
  Args:
597
597
  sessions (List[Session]): List of sessions to upsert.
598
598
  deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
599
+ preserve_updated_at (bool): If True, preserve the updated_at from the session object.
599
600
 
600
601
  Returns:
601
602
  List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
@@ -629,6 +630,13 @@ class MongoDb(BaseDb):
629
630
 
630
631
  session_dict = session.to_dict()
631
632
 
633
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
634
+ updated_at = (
635
+ session_dict.get("updated_at")
636
+ if preserve_updated_at and session_dict.get("updated_at")
637
+ else int(time.time())
638
+ )
639
+
632
640
  if isinstance(session, AgentSession):
633
641
  record = {
634
642
  "session_id": session_dict.get("session_id"),
@@ -641,7 +649,7 @@ class MongoDb(BaseDb):
641
649
  "summary": session_dict.get("summary"),
642
650
  "metadata": session_dict.get("metadata"),
643
651
  "created_at": session_dict.get("created_at"),
644
- "updated_at": int(time.time()),
652
+ "updated_at": updated_at,
645
653
  }
646
654
  elif isinstance(session, TeamSession):
647
655
  record = {
@@ -655,7 +663,7 @@ class MongoDb(BaseDb):
655
663
  "summary": session_dict.get("summary"),
656
664
  "metadata": session_dict.get("metadata"),
657
665
  "created_at": session_dict.get("created_at"),
658
- "updated_at": int(time.time()),
666
+ "updated_at": updated_at,
659
667
  }
660
668
  elif isinstance(session, WorkflowSession):
661
669
  record = {
@@ -669,7 +677,7 @@ class MongoDb(BaseDb):
669
677
  "summary": session_dict.get("summary"),
670
678
  "metadata": session_dict.get("metadata"),
671
679
  "created_at": session_dict.get("created_at"),
672
- "updated_at": int(time.time()),
680
+ "updated_at": updated_at,
673
681
  }
674
682
  else:
675
683
  continue
@@ -1044,7 +1052,7 @@ class MongoDb(BaseDb):
1044
1052
  raise e
1045
1053
 
1046
1054
  def upsert_memories(
1047
- self, memories: List[UserMemory], deserialize: Optional[bool] = True
1055
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
1048
1056
  ) -> List[Union[UserMemory, Dict[str, Any]]]:
1049
1057
  """
1050
1058
  Bulk upsert multiple user memories for improved performance on large datasets.
@@ -1079,6 +1087,7 @@ class MongoDb(BaseDb):
1079
1087
  operations = []
1080
1088
  results: List[Union[UserMemory, Dict[str, Any]]] = []
1081
1089
 
1090
+ current_time = int(time.time())
1082
1091
  for memory in memories:
1083
1092
  if memory is None:
1084
1093
  continue
@@ -1086,6 +1095,9 @@ class MongoDb(BaseDb):
1086
1095
  if memory.memory_id is None:
1087
1096
  memory.memory_id = str(uuid4())
1088
1097
 
1098
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
1099
+ updated_at = memory.updated_at if preserve_updated_at and memory.updated_at else current_time
1100
+
1089
1101
  record = {
1090
1102
  "user_id": memory.user_id,
1091
1103
  "agent_id": memory.agent_id,
@@ -1093,7 +1105,7 @@ class MongoDb(BaseDb):
1093
1105
  "memory_id": memory.memory_id,
1094
1106
  "memory": memory.memory,
1095
1107
  "topics": memory.topics,
1096
- "updated_at": int(time.time()),
1108
+ "updated_at": updated_at,
1097
1109
  }
1098
1110
 
1099
1111
  operations.append(ReplaceOne(filter={"memory_id": memory.memory_id}, replacement=record, upsert=True))
agno/db/mysql/mysql.py CHANGED
@@ -706,7 +706,7 @@ class MySQLDb(BaseDb):
706
706
  return None
707
707
 
708
708
  def upsert_sessions(
709
- self, sessions: List[Session], deserialize: Optional[bool] = True
709
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
710
710
  ) -> List[Union[Session, Dict[str, Any]]]:
711
711
  """
712
712
  Bulk upsert multiple sessions for improved performance on large datasets.
@@ -714,6 +714,7 @@ class MySQLDb(BaseDb):
714
714
  Args:
715
715
  sessions (List[Session]): List of sessions to upsert.
716
716
  deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
717
+ preserve_updated_at (bool): If True, preserve the updated_at from the session object.
717
718
 
718
719
  Returns:
719
720
  List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
@@ -758,6 +759,12 @@ class MySQLDb(BaseDb):
758
759
  agent_data = []
759
760
  for session in agent_sessions:
760
761
  session_dict = session.to_dict()
762
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
763
+ updated_at = (
764
+ session_dict.get("updated_at")
765
+ if preserve_updated_at and session_dict.get("updated_at")
766
+ else int(time.time())
767
+ )
761
768
  agent_data.append(
762
769
  {
763
770
  "session_id": session_dict.get("session_id"),
@@ -770,7 +777,7 @@ class MySQLDb(BaseDb):
770
777
  "summary": session_dict.get("summary"),
771
778
  "metadata": session_dict.get("metadata"),
772
779
  "created_at": session_dict.get("created_at"),
773
- "updated_at": session_dict.get("created_at"),
780
+ "updated_at": updated_at,
774
781
  }
775
782
  )
776
783
 
@@ -784,7 +791,7 @@ class MySQLDb(BaseDb):
784
791
  summary=stmt.inserted.summary,
785
792
  metadata=stmt.inserted.metadata,
786
793
  runs=stmt.inserted.runs,
787
- updated_at=int(time.time()),
794
+ updated_at=stmt.inserted.updated_at,
788
795
  )
789
796
  sess.execute(stmt, agent_data)
790
797
 
@@ -808,6 +815,12 @@ class MySQLDb(BaseDb):
808
815
  team_data = []
809
816
  for session in team_sessions:
810
817
  session_dict = session.to_dict()
818
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
819
+ updated_at = (
820
+ session_dict.get("updated_at")
821
+ if preserve_updated_at and session_dict.get("updated_at")
822
+ else int(time.time())
823
+ )
811
824
  team_data.append(
812
825
  {
813
826
  "session_id": session_dict.get("session_id"),
@@ -820,7 +833,7 @@ class MySQLDb(BaseDb):
820
833
  "summary": session_dict.get("summary"),
821
834
  "metadata": session_dict.get("metadata"),
822
835
  "created_at": session_dict.get("created_at"),
823
- "updated_at": session_dict.get("created_at"),
836
+ "updated_at": updated_at,
824
837
  }
825
838
  )
826
839
 
@@ -834,7 +847,7 @@ class MySQLDb(BaseDb):
834
847
  summary=stmt.inserted.summary,
835
848
  metadata=stmt.inserted.metadata,
836
849
  runs=stmt.inserted.runs,
837
- updated_at=int(time.time()),
850
+ updated_at=stmt.inserted.updated_at,
838
851
  )
839
852
  sess.execute(stmt, team_data)
840
853
 
@@ -858,6 +871,12 @@ class MySQLDb(BaseDb):
858
871
  workflow_data = []
859
872
  for session in workflow_sessions:
860
873
  session_dict = session.to_dict()
874
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
875
+ updated_at = (
876
+ session_dict.get("updated_at")
877
+ if preserve_updated_at and session_dict.get("updated_at")
878
+ else int(time.time())
879
+ )
861
880
  workflow_data.append(
862
881
  {
863
882
  "session_id": session_dict.get("session_id"),
@@ -870,7 +889,7 @@ class MySQLDb(BaseDb):
870
889
  "summary": session_dict.get("summary"),
871
890
  "metadata": session_dict.get("metadata"),
872
891
  "created_at": session_dict.get("created_at"),
873
- "updated_at": session_dict.get("created_at"),
892
+ "updated_at": updated_at,
874
893
  }
875
894
  )
876
895
 
@@ -884,7 +903,7 @@ class MySQLDb(BaseDb):
884
903
  summary=stmt.inserted.summary,
885
904
  metadata=stmt.inserted.metadata,
886
905
  runs=stmt.inserted.runs,
887
- updated_at=int(time.time()),
906
+ updated_at=stmt.inserted.updated_at,
888
907
  )
889
908
  sess.execute(stmt, workflow_data)
890
909
 
@@ -1281,7 +1300,7 @@ class MySQLDb(BaseDb):
1281
1300
  return None
1282
1301
 
1283
1302
  def upsert_memories(
1284
- self, memories: List[UserMemory], deserialize: Optional[bool] = True
1303
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
1285
1304
  ) -> List[Union[UserMemory, Dict[str, Any]]]:
1286
1305
  """
1287
1306
  Bulk upsert multiple user memories for improved performance on large datasets.
@@ -1313,10 +1332,13 @@ class MySQLDb(BaseDb):
1313
1332
 
1314
1333
  # Prepare bulk data
1315
1334
  bulk_data = []
1335
+ current_time = int(time.time())
1316
1336
  for memory in memories:
1317
1337
  if memory.memory_id is None:
1318
1338
  memory.memory_id = str(uuid4())
1319
1339
 
1340
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
1341
+ updated_at = memory.updated_at if preserve_updated_at and memory.updated_at else current_time
1320
1342
  bulk_data.append(
1321
1343
  {
1322
1344
  "memory_id": memory.memory_id,
@@ -1326,7 +1348,7 @@ class MySQLDb(BaseDb):
1326
1348
  "agent_id": memory.agent_id,
1327
1349
  "team_id": memory.team_id,
1328
1350
  "topics": memory.topics,
1329
- "updated_at": int(time.time()),
1351
+ "updated_at": updated_at,
1330
1352
  }
1331
1353
  )
1332
1354
 
@@ -1341,7 +1363,7 @@ class MySQLDb(BaseDb):
1341
1363
  input=stmt.inserted.input,
1342
1364
  agent_id=stmt.inserted.agent_id,
1343
1365
  team_id=stmt.inserted.team_id,
1344
- updated_at=int(time.time()),
1366
+ updated_at=stmt.inserted.updated_at,
1345
1367
  )
1346
1368
  sess.execute(stmt, bulk_data)
1347
1369
 
@@ -1654,9 +1676,9 @@ class MySQLDb(BaseDb):
1654
1676
  if page is not None:
1655
1677
  stmt = stmt.offset((page - 1) * limit)
1656
1678
 
1657
- result = sess.execute(stmt).fetchall()
1658
- if not result:
1659
- return [], 0
1679
+ result = sess.execute(stmt).fetchall()
1680
+ if not result:
1681
+ return [], 0
1660
1682
 
1661
1683
  return [KnowledgeRow.model_validate(record._mapping) for record in result], total_count
1662
1684
 
@@ -704,7 +704,7 @@ class PostgresDb(BaseDb):
704
704
  raise e
705
705
 
706
706
  def upsert_sessions(
707
- self, sessions: List[Session], deserialize: Optional[bool] = True
707
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
708
708
  ) -> List[Union[Session, Dict[str, Any]]]:
709
709
  """
710
710
  Bulk insert or update multiple sessions.
@@ -712,6 +712,7 @@ class PostgresDb(BaseDb):
712
712
  Args:
713
713
  sessions (List[Session]): The list of session data to upsert.
714
714
  deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
715
+ preserve_updated_at (bool): If True, preserve the updated_at from the session object.
715
716
 
716
717
  Returns:
717
718
  List[Union[Session, Dict[str, Any]]]: List of upserted sessions
@@ -739,6 +740,12 @@ class PostgresDb(BaseDb):
739
740
  session_records = []
740
741
  for agent_session in agent_sessions:
741
742
  session_dict = agent_session.to_dict()
743
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
744
+ updated_at = (
745
+ session_dict.get("updated_at")
746
+ if preserve_updated_at and session_dict.get("updated_at")
747
+ else int(time.time())
748
+ )
742
749
  session_records.append(
743
750
  {
744
751
  "session_id": session_dict.get("session_id"),
@@ -751,7 +758,7 @@ class PostgresDb(BaseDb):
751
758
  "metadata": session_dict.get("metadata"),
752
759
  "runs": session_dict.get("runs"),
753
760
  "created_at": session_dict.get("created_at"),
754
- "updated_at": int(time.time()),
761
+ "updated_at": updated_at,
755
762
  }
756
763
  )
757
764
 
@@ -782,6 +789,12 @@ class PostgresDb(BaseDb):
782
789
  session_records = []
783
790
  for team_session in team_sessions:
784
791
  session_dict = team_session.to_dict()
792
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
793
+ updated_at = (
794
+ session_dict.get("updated_at")
795
+ if preserve_updated_at and session_dict.get("updated_at")
796
+ else int(time.time())
797
+ )
785
798
  session_records.append(
786
799
  {
787
800
  "session_id": session_dict.get("session_id"),
@@ -794,7 +807,7 @@ class PostgresDb(BaseDb):
794
807
  "metadata": session_dict.get("metadata"),
795
808
  "runs": session_dict.get("runs"),
796
809
  "created_at": session_dict.get("created_at"),
797
- "updated_at": int(time.time()),
810
+ "updated_at": updated_at,
798
811
  }
799
812
  )
800
813
 
@@ -825,6 +838,12 @@ class PostgresDb(BaseDb):
825
838
  session_records = []
826
839
  for workflow_session in workflow_sessions:
827
840
  session_dict = workflow_session.to_dict()
841
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
842
+ updated_at = (
843
+ session_dict.get("updated_at")
844
+ if preserve_updated_at and session_dict.get("updated_at")
845
+ else int(time.time())
846
+ )
828
847
  session_records.append(
829
848
  {
830
849
  "session_id": session_dict.get("session_id"),
@@ -837,7 +856,7 @@ class PostgresDb(BaseDb):
837
856
  "metadata": session_dict.get("metadata"),
838
857
  "runs": session_dict.get("runs"),
839
858
  "created_at": session_dict.get("created_at"),
840
- "updated_at": int(time.time()),
859
+ "updated_at": updated_at,
841
860
  }
842
861
  )
843
862
 
@@ -1234,7 +1253,7 @@ class PostgresDb(BaseDb):
1234
1253
  raise e
1235
1254
 
1236
1255
  def upsert_memories(
1237
- self, memories: List[UserMemory], deserialize: Optional[bool] = True
1256
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
1238
1257
  ) -> List[Union[UserMemory, Dict[str, Any]]]:
1239
1258
  """
1240
1259
  Bulk insert or update multiple memories in the database for improved performance.
@@ -1242,6 +1261,8 @@ class PostgresDb(BaseDb):
1242
1261
  Args:
1243
1262
  memories (List[UserMemory]): The list of memories to upsert.
1244
1263
  deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
1264
+ preserve_updated_at (bool): If True, preserve the updated_at from the memory object.
1265
+ If False (default), set updated_at to current time.
1245
1266
 
1246
1267
  Returns:
1247
1268
  List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories
@@ -1265,6 +1286,8 @@ class PostgresDb(BaseDb):
1265
1286
  if memory.memory_id is None:
1266
1287
  memory.memory_id = str(uuid4())
1267
1288
 
1289
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
1290
+ updated_at = memory.updated_at if preserve_updated_at and memory.updated_at else current_time
1268
1291
  memory_records.append(
1269
1292
  {
1270
1293
  "memory_id": memory.memory_id,
@@ -1274,7 +1297,7 @@ class PostgresDb(BaseDb):
1274
1297
  "agent_id": memory.agent_id,
1275
1298
  "team_id": memory.team_id,
1276
1299
  "topics": memory.topics,
1277
- "updated_at": current_time,
1300
+ "updated_at": updated_at,
1278
1301
  }
1279
1302
  )
1280
1303
 
agno/db/redis/redis.py CHANGED
@@ -589,7 +589,7 @@ class RedisDb(BaseDb):
589
589
  raise e
590
590
 
591
591
  def upsert_sessions(
592
- self, sessions: List[Session], deserialize: Optional[bool] = True
592
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
593
593
  ) -> List[Union[Session, Dict[str, Any]]]:
594
594
  """
595
595
  Bulk upsert multiple sessions for improved performance on large datasets.
@@ -912,7 +912,7 @@ class RedisDb(BaseDb):
912
912
  raise e
913
913
 
914
914
  def upsert_memories(
915
- self, memories: List[UserMemory], deserialize: Optional[bool] = True
915
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
916
916
  ) -> List[Union[UserMemory, Dict[str, Any]]]:
917
917
  """
918
918
  Bulk upsert multiple user memories for improved performance on large datasets.
@@ -797,7 +797,7 @@ class SingleStoreDb(BaseDb):
797
797
  raise e
798
798
 
799
799
  def upsert_sessions(
800
- self, sessions: List[Session], deserialize: Optional[bool] = True
800
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
801
801
  ) -> List[Union[Session, Dict[str, Any]]]:
802
802
  """
803
803
  Bulk upsert multiple sessions for improved performance on large datasets.
@@ -1336,7 +1336,7 @@ class SingleStoreDb(BaseDb):
1336
1336
  raise e
1337
1337
 
1338
1338
  def upsert_memories(
1339
- self, memories: List[UserMemory], deserialize: Optional[bool] = True
1339
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
1340
1340
  ) -> List[Union[UserMemory, Dict[str, Any]]]:
1341
1341
  """
1342
1342
  Bulk upsert multiple user memories for improved performance on large datasets.