agno 2.1.4__py3-none-any.whl → 2.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (95) hide show
  1. agno/agent/agent.py +1775 -538
  2. agno/db/async_postgres/__init__.py +3 -0
  3. agno/db/async_postgres/async_postgres.py +1668 -0
  4. agno/db/async_postgres/schemas.py +124 -0
  5. agno/db/async_postgres/utils.py +289 -0
  6. agno/db/base.py +237 -2
  7. agno/db/dynamo/dynamo.py +2 -2
  8. agno/db/firestore/firestore.py +2 -2
  9. agno/db/firestore/utils.py +4 -2
  10. agno/db/gcs_json/gcs_json_db.py +2 -2
  11. agno/db/in_memory/in_memory_db.py +2 -2
  12. agno/db/json/json_db.py +2 -2
  13. agno/db/migrations/v1_to_v2.py +43 -13
  14. agno/db/mongo/mongo.py +14 -6
  15. agno/db/mongo/utils.py +0 -4
  16. agno/db/mysql/mysql.py +23 -13
  17. agno/db/postgres/postgres.py +17 -6
  18. agno/db/redis/redis.py +2 -2
  19. agno/db/singlestore/singlestore.py +19 -10
  20. agno/db/sqlite/sqlite.py +22 -12
  21. agno/db/sqlite/utils.py +8 -3
  22. agno/db/surrealdb/__init__.py +3 -0
  23. agno/db/surrealdb/metrics.py +292 -0
  24. agno/db/surrealdb/models.py +259 -0
  25. agno/db/surrealdb/queries.py +71 -0
  26. agno/db/surrealdb/surrealdb.py +1193 -0
  27. agno/db/surrealdb/utils.py +87 -0
  28. agno/eval/accuracy.py +50 -43
  29. agno/eval/performance.py +6 -3
  30. agno/eval/reliability.py +6 -3
  31. agno/eval/utils.py +33 -16
  32. agno/exceptions.py +8 -2
  33. agno/knowledge/knowledge.py +260 -46
  34. agno/knowledge/reader/pdf_reader.py +4 -6
  35. agno/knowledge/reader/reader_factory.py +2 -3
  36. agno/memory/manager.py +254 -46
  37. agno/models/anthropic/claude.py +37 -0
  38. agno/os/app.py +8 -7
  39. agno/os/interfaces/a2a/router.py +3 -5
  40. agno/os/interfaces/agui/router.py +4 -1
  41. agno/os/interfaces/agui/utils.py +27 -6
  42. agno/os/interfaces/slack/router.py +2 -4
  43. agno/os/mcp.py +98 -41
  44. agno/os/router.py +23 -0
  45. agno/os/routers/evals/evals.py +52 -20
  46. agno/os/routers/evals/utils.py +14 -14
  47. agno/os/routers/knowledge/knowledge.py +130 -9
  48. agno/os/routers/knowledge/schemas.py +57 -0
  49. agno/os/routers/memory/memory.py +116 -44
  50. agno/os/routers/metrics/metrics.py +16 -6
  51. agno/os/routers/session/session.py +65 -22
  52. agno/os/schema.py +36 -0
  53. agno/os/utils.py +64 -11
  54. agno/reasoning/anthropic.py +80 -0
  55. agno/reasoning/gemini.py +73 -0
  56. agno/reasoning/openai.py +5 -0
  57. agno/reasoning/vertexai.py +76 -0
  58. agno/session/workflow.py +3 -3
  59. agno/team/team.py +968 -179
  60. agno/tools/googlesheets.py +20 -5
  61. agno/tools/mcp_toolbox.py +3 -3
  62. agno/tools/scrapegraph.py +1 -1
  63. agno/utils/models/claude.py +3 -1
  64. agno/utils/streamlit.py +1 -1
  65. agno/vectordb/base.py +22 -1
  66. agno/vectordb/cassandra/cassandra.py +9 -0
  67. agno/vectordb/chroma/chromadb.py +26 -6
  68. agno/vectordb/clickhouse/clickhousedb.py +9 -1
  69. agno/vectordb/couchbase/couchbase.py +11 -0
  70. agno/vectordb/lancedb/lance_db.py +20 -0
  71. agno/vectordb/langchaindb/langchaindb.py +11 -0
  72. agno/vectordb/lightrag/lightrag.py +9 -0
  73. agno/vectordb/llamaindex/llamaindexdb.py +15 -1
  74. agno/vectordb/milvus/milvus.py +23 -0
  75. agno/vectordb/mongodb/mongodb.py +22 -0
  76. agno/vectordb/pgvector/pgvector.py +19 -0
  77. agno/vectordb/pineconedb/pineconedb.py +35 -4
  78. agno/vectordb/qdrant/qdrant.py +24 -0
  79. agno/vectordb/singlestore/singlestore.py +25 -17
  80. agno/vectordb/surrealdb/surrealdb.py +18 -2
  81. agno/vectordb/upstashdb/upstashdb.py +26 -1
  82. agno/vectordb/weaviate/weaviate.py +18 -0
  83. agno/workflow/condition.py +4 -0
  84. agno/workflow/loop.py +4 -0
  85. agno/workflow/parallel.py +4 -0
  86. agno/workflow/router.py +4 -0
  87. agno/workflow/step.py +30 -14
  88. agno/workflow/steps.py +4 -0
  89. agno/workflow/types.py +2 -2
  90. agno/workflow/workflow.py +328 -61
  91. {agno-2.1.4.dist-info → agno-2.1.6.dist-info}/METADATA +100 -41
  92. {agno-2.1.4.dist-info → agno-2.1.6.dist-info}/RECORD +95 -82
  93. {agno-2.1.4.dist-info → agno-2.1.6.dist-info}/WHEEL +0 -0
  94. {agno-2.1.4.dist-info → agno-2.1.6.dist-info}/licenses/LICENSE +0 -0
  95. {agno-2.1.4.dist-info → agno-2.1.6.dist-info}/top_level.txt +0 -0
@@ -558,7 +558,7 @@ class FirestoreDb(BaseDb):
558
558
  raise e
559
559
 
560
560
  def upsert_sessions(
561
- self, sessions: List[Session], deserialize: Optional[bool] = True
561
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
562
562
  ) -> List[Union[Session, Dict[str, Any]]]:
563
563
  """
564
564
  Bulk upsert multiple sessions for improved performance on large datasets.
@@ -939,7 +939,7 @@ class FirestoreDb(BaseDb):
939
939
  raise e
940
940
 
941
941
  def upsert_memories(
942
- self, memories: List[UserMemory], deserialize: Optional[bool] = True
942
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
943
943
  ) -> List[Union[UserMemory, Dict[str, Any]]]:
944
944
  """
945
945
  Bulk upsert multiple user memories for improved performance on large datasets.
@@ -164,11 +164,13 @@ def calculate_date_metrics(date_to_process: date, sessions_data: dict) -> dict:
164
164
  if session.get("user_id"):
165
165
  all_user_ids.add(session["user_id"])
166
166
  runs = session.get("runs", []) or []
167
- metrics[runs_count_key] += len(runs)
168
167
 
169
- if runs := session.get("runs", []):
168
+ if runs:
170
169
  if isinstance(runs, str):
171
170
  runs = json.loads(runs)
171
+
172
+ metrics[runs_count_key] += len(runs)
173
+
172
174
  for run in runs:
173
175
  if model_id := run.get("model"):
174
176
  model_provider = run.get("model_provider", "")
@@ -412,7 +412,7 @@ class GcsJsonDb(BaseDb):
412
412
  raise e
413
413
 
414
414
  def upsert_sessions(
415
- self, sessions: List[Session], deserialize: Optional[bool] = True
415
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
416
416
  ) -> List[Union[Session, Dict[str, Any]]]:
417
417
  """
418
418
  Bulk upsert multiple sessions for improved performance on large datasets.
@@ -704,7 +704,7 @@ class GcsJsonDb(BaseDb):
704
704
  raise e
705
705
 
706
706
  def upsert_memories(
707
- self, memories: List[UserMemory], deserialize: Optional[bool] = True
707
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
708
708
  ) -> List[Union[UserMemory, Dict[str, Any]]]:
709
709
  """
710
710
  Bulk upsert multiple user memories for improved performance on large datasets.
@@ -309,7 +309,7 @@ class InMemoryDb(BaseDb):
309
309
  return False
310
310
 
311
311
  def upsert_sessions(
312
- self, sessions: List[Session], deserialize: Optional[bool] = True
312
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
313
313
  ) -> List[Union[Session, Dict[str, Any]]]:
314
314
  """
315
315
  Bulk upsert multiple sessions for improved performance on large datasets.
@@ -590,7 +590,7 @@ class InMemoryDb(BaseDb):
590
590
  raise e
591
591
 
592
592
  def upsert_memories(
593
- self, memories: List[UserMemory], deserialize: Optional[bool] = True
593
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
594
594
  ) -> List[Union[UserMemory, Dict[str, Any]]]:
595
595
  """
596
596
  Bulk upsert multiple user memories for improved performance on large datasets.
agno/db/json/json_db.py CHANGED
@@ -398,7 +398,7 @@ class JsonDb(BaseDb):
398
398
  raise e
399
399
 
400
400
  def upsert_sessions(
401
- self, sessions: List[Session], deserialize: Optional[bool] = True
401
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
402
402
  ) -> List[Union[Session, Dict[str, Any]]]:
403
403
  """
404
404
  Bulk upsert multiple sessions for improved performance on large datasets.
@@ -703,7 +703,7 @@ class JsonDb(BaseDb):
703
703
  raise e
704
704
 
705
705
  def upsert_memories(
706
- self, memories: List[UserMemory], deserialize: Optional[bool] = True
706
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
707
707
  ) -> List[Union[UserMemory, Dict[str, Any]]]:
708
708
  """
709
709
  Bulk upsert multiple user memories for improved performance on large datasets.
@@ -1,15 +1,13 @@
1
1
  """Migration utility to migrate your Agno tables from v1 to v2"""
2
2
 
3
+ import gc
3
4
  import json
4
- from typing import Any, Dict, List, Optional, Union
5
+ from typing import Any, Dict, List, Optional, Union, cast
5
6
 
6
7
  from sqlalchemy import text
7
8
 
8
- from agno.db.mongo.mongo import MongoDb
9
- from agno.db.mysql.mysql import MySQLDb
10
- from agno.db.postgres.postgres import PostgresDb
9
+ from agno.db.base import BaseDb
11
10
  from agno.db.schemas.memory import UserMemory
12
- from agno.db.sqlite.sqlite import SqliteDb
13
11
  from agno.session import AgentSession, TeamSession, WorkflowSession
14
12
  from agno.utils.log import log_error, log_info, log_warning
15
13
 
@@ -315,7 +313,7 @@ def convert_v1_fields_to_v2(data: Dict[str, Any]) -> Dict[str, Any]:
315
313
 
316
314
 
317
315
  def migrate(
318
- db: Union[PostgresDb, MySQLDb, SqliteDb, MongoDb],
316
+ db: BaseDb,
319
317
  v1_db_schema: str,
320
318
  agent_sessions_table_name: Optional[str] = None,
321
319
  team_sessions_table_name: Optional[str] = None,
@@ -372,7 +370,7 @@ def migrate(
372
370
 
373
371
 
374
372
  def migrate_table_in_batches(
375
- db: Union[PostgresDb, MySQLDb, SqliteDb, MongoDb],
373
+ db: BaseDb,
376
374
  v1_db_schema: str,
377
375
  v1_table_name: str,
378
376
  v1_table_type: str,
@@ -410,7 +408,7 @@ def migrate_table_in_batches(
410
408
  if hasattr(db, "Session"):
411
409
  db.Session.remove() # type: ignore
412
410
 
413
- db.upsert_sessions(sessions) # type: ignore
411
+ db.upsert_sessions(sessions, preserve_updated_at=True) # type: ignore
414
412
  total_migrated += len(sessions)
415
413
  log_info(f"Bulk upserted {len(sessions)} sessions in batch {batch_count}")
416
414
 
@@ -420,21 +418,35 @@ def migrate_table_in_batches(
420
418
  if hasattr(db, "Session"):
421
419
  db.Session.remove() # type: ignore
422
420
 
423
- db.upsert_memories(memories)
421
+ db.upsert_memories(memories, preserve_updated_at=True)
424
422
  total_migrated += len(memories)
425
423
  log_info(f"Bulk upserted {len(memories)} memories in batch {batch_count}")
426
424
 
427
425
  log_info(f"Completed batch {batch_count}: migrated {batch_size_actual} records")
428
426
 
427
+ # Explicit cleanup to free memory before next batch
428
+ del batch_content
429
+ if v1_table_type in ["agent_sessions", "team_sessions", "workflow_sessions"]:
430
+ del sessions
431
+ elif v1_table_type == "memories":
432
+ del memories
433
+
434
+ # Force garbage collection to return memory to OS
435
+ # This is necessary because Python's memory allocator retains memory after large operations
436
+ # See: https://github.com/sqlalchemy/sqlalchemy/issues/4616
437
+ gc.collect()
438
+
429
439
  log_info(f"✅ Migration completed for table {v1_table_name}: {total_migrated} total records migrated")
430
440
 
431
441
 
432
- def get_table_content_in_batches(
433
- db: Union[PostgresDb, MySQLDb, SqliteDb, MongoDb], db_schema: str, table_name: str, batch_size: int = 5000
434
- ):
442
+ def get_table_content_in_batches(db: BaseDb, db_schema: str, table_name: str, batch_size: int = 5000):
435
443
  """Get table content in batches to avoid memory issues with large tables"""
436
444
  try:
437
- if isinstance(db, MongoDb):
445
+ if type(db).__name__ == "MongoDb":
446
+ from agno.db.mongo.mongo import MongoDb
447
+
448
+ db = cast(MongoDb, db)
449
+
438
450
  # MongoDB implementation with cursor and batching
439
451
  collection = db.database[table_name]
440
452
  cursor = collection.find({}).batch_size(batch_size)
@@ -455,6 +467,24 @@ def get_table_content_in_batches(
455
467
  yield batch
456
468
  else:
457
469
  # SQL database implementations (PostgresDb, MySQLDb, SqliteDb)
470
+ if type(db).__name__ == "PostgresDb":
471
+ from agno.db.postgres.postgres import PostgresDb
472
+
473
+ db = cast(PostgresDb, db)
474
+
475
+ elif type(db).__name__ == "MySQLDb":
476
+ from agno.db.mysql.mysql import MySQLDb
477
+
478
+ db = cast(MySQLDb, db)
479
+
480
+ elif type(db).__name__ == "SqliteDb":
481
+ from agno.db.sqlite.sqlite import SqliteDb
482
+
483
+ db = cast(SqliteDb, db)
484
+
485
+ else:
486
+ raise ValueError(f"Invalid database type: {type(db).__name__}")
487
+
458
488
  offset = 0
459
489
  while True:
460
490
  # Create a new session for each batch to avoid transaction conflicts
agno/db/mongo/mongo.py CHANGED
@@ -588,7 +588,7 @@ class MongoDb(BaseDb):
588
588
  raise e
589
589
 
590
590
  def upsert_sessions(
591
- self, sessions: List[Session], deserialize: Optional[bool] = True
591
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
592
592
  ) -> List[Union[Session, Dict[str, Any]]]:
593
593
  """
594
594
  Bulk upsert multiple sessions for improved performance on large datasets.
@@ -596,6 +596,7 @@ class MongoDb(BaseDb):
596
596
  Args:
597
597
  sessions (List[Session]): List of sessions to upsert.
598
598
  deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
599
+ preserve_updated_at (bool): If True, preserve the updated_at from the session object.
599
600
 
600
601
  Returns:
601
602
  List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
@@ -629,6 +630,9 @@ class MongoDb(BaseDb):
629
630
 
630
631
  session_dict = session.to_dict()
631
632
 
633
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
634
+ updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
635
+
632
636
  if isinstance(session, AgentSession):
633
637
  record = {
634
638
  "session_id": session_dict.get("session_id"),
@@ -641,7 +645,7 @@ class MongoDb(BaseDb):
641
645
  "summary": session_dict.get("summary"),
642
646
  "metadata": session_dict.get("metadata"),
643
647
  "created_at": session_dict.get("created_at"),
644
- "updated_at": int(time.time()),
648
+ "updated_at": updated_at,
645
649
  }
646
650
  elif isinstance(session, TeamSession):
647
651
  record = {
@@ -655,7 +659,7 @@ class MongoDb(BaseDb):
655
659
  "summary": session_dict.get("summary"),
656
660
  "metadata": session_dict.get("metadata"),
657
661
  "created_at": session_dict.get("created_at"),
658
- "updated_at": int(time.time()),
662
+ "updated_at": updated_at,
659
663
  }
660
664
  elif isinstance(session, WorkflowSession):
661
665
  record = {
@@ -669,7 +673,7 @@ class MongoDb(BaseDb):
669
673
  "summary": session_dict.get("summary"),
670
674
  "metadata": session_dict.get("metadata"),
671
675
  "created_at": session_dict.get("created_at"),
672
- "updated_at": int(time.time()),
676
+ "updated_at": updated_at,
673
677
  }
674
678
  else:
675
679
  continue
@@ -1044,7 +1048,7 @@ class MongoDb(BaseDb):
1044
1048
  raise e
1045
1049
 
1046
1050
  def upsert_memories(
1047
- self, memories: List[UserMemory], deserialize: Optional[bool] = True
1051
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
1048
1052
  ) -> List[Union[UserMemory, Dict[str, Any]]]:
1049
1053
  """
1050
1054
  Bulk upsert multiple user memories for improved performance on large datasets.
@@ -1079,6 +1083,7 @@ class MongoDb(BaseDb):
1079
1083
  operations = []
1080
1084
  results: List[Union[UserMemory, Dict[str, Any]]] = []
1081
1085
 
1086
+ current_time = int(time.time())
1082
1087
  for memory in memories:
1083
1088
  if memory is None:
1084
1089
  continue
@@ -1086,6 +1091,9 @@ class MongoDb(BaseDb):
1086
1091
  if memory.memory_id is None:
1087
1092
  memory.memory_id = str(uuid4())
1088
1093
 
1094
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
1095
+ updated_at = memory.updated_at if preserve_updated_at else current_time
1096
+
1089
1097
  record = {
1090
1098
  "user_id": memory.user_id,
1091
1099
  "agent_id": memory.agent_id,
@@ -1093,7 +1101,7 @@ class MongoDb(BaseDb):
1093
1101
  "memory_id": memory.memory_id,
1094
1102
  "memory": memory.memory,
1095
1103
  "topics": memory.topics,
1096
- "updated_at": int(time.time()),
1104
+ "updated_at": updated_at,
1097
1105
  }
1098
1106
 
1099
1107
  operations.append(ReplaceOne(filter={"memory_id": memory.memory_id}, replacement=record, upsert=True))
agno/db/mongo/utils.py CHANGED
@@ -16,8 +16,6 @@ except ImportError:
16
16
 
17
17
 
18
18
  # -- DB util methods --
19
-
20
-
21
19
  def create_collection_indexes(collection: Collection, collection_type: str) -> None:
22
20
  """Create all required indexes for a collection"""
23
21
  try:
@@ -58,8 +56,6 @@ def apply_pagination(
58
56
 
59
57
 
60
58
  # -- Metrics util methods --
61
-
62
-
63
59
  def calculate_date_metrics(date_to_process: date, sessions_data: dict) -> dict:
64
60
  """Calculate metrics for the given single date."""
65
61
  metrics = {
agno/db/mysql/mysql.py CHANGED
@@ -706,7 +706,7 @@ class MySQLDb(BaseDb):
706
706
  return None
707
707
 
708
708
  def upsert_sessions(
709
- self, sessions: List[Session], deserialize: Optional[bool] = True
709
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
710
710
  ) -> List[Union[Session, Dict[str, Any]]]:
711
711
  """
712
712
  Bulk upsert multiple sessions for improved performance on large datasets.
@@ -714,6 +714,7 @@ class MySQLDb(BaseDb):
714
714
  Args:
715
715
  sessions (List[Session]): List of sessions to upsert.
716
716
  deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
717
+ preserve_updated_at (bool): If True, preserve the updated_at from the session object.
717
718
 
718
719
  Returns:
719
720
  List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
@@ -758,6 +759,8 @@ class MySQLDb(BaseDb):
758
759
  agent_data = []
759
760
  for session in agent_sessions:
760
761
  session_dict = session.to_dict()
762
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
763
+ updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
761
764
  agent_data.append(
762
765
  {
763
766
  "session_id": session_dict.get("session_id"),
@@ -770,7 +773,7 @@ class MySQLDb(BaseDb):
770
773
  "summary": session_dict.get("summary"),
771
774
  "metadata": session_dict.get("metadata"),
772
775
  "created_at": session_dict.get("created_at"),
773
- "updated_at": session_dict.get("created_at"),
776
+ "updated_at": updated_at,
774
777
  }
775
778
  )
776
779
 
@@ -784,7 +787,7 @@ class MySQLDb(BaseDb):
784
787
  summary=stmt.inserted.summary,
785
788
  metadata=stmt.inserted.metadata,
786
789
  runs=stmt.inserted.runs,
787
- updated_at=int(time.time()),
790
+ updated_at=stmt.inserted.updated_at,
788
791
  )
789
792
  sess.execute(stmt, agent_data)
790
793
 
@@ -808,6 +811,8 @@ class MySQLDb(BaseDb):
808
811
  team_data = []
809
812
  for session in team_sessions:
810
813
  session_dict = session.to_dict()
814
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
815
+ updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
811
816
  team_data.append(
812
817
  {
813
818
  "session_id": session_dict.get("session_id"),
@@ -820,7 +825,7 @@ class MySQLDb(BaseDb):
820
825
  "summary": session_dict.get("summary"),
821
826
  "metadata": session_dict.get("metadata"),
822
827
  "created_at": session_dict.get("created_at"),
823
- "updated_at": session_dict.get("created_at"),
828
+ "updated_at": updated_at,
824
829
  }
825
830
  )
826
831
 
@@ -834,7 +839,7 @@ class MySQLDb(BaseDb):
834
839
  summary=stmt.inserted.summary,
835
840
  metadata=stmt.inserted.metadata,
836
841
  runs=stmt.inserted.runs,
837
- updated_at=int(time.time()),
842
+ updated_at=stmt.inserted.updated_at,
838
843
  )
839
844
  sess.execute(stmt, team_data)
840
845
 
@@ -858,6 +863,8 @@ class MySQLDb(BaseDb):
858
863
  workflow_data = []
859
864
  for session in workflow_sessions:
860
865
  session_dict = session.to_dict()
866
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
867
+ updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
861
868
  workflow_data.append(
862
869
  {
863
870
  "session_id": session_dict.get("session_id"),
@@ -870,7 +877,7 @@ class MySQLDb(BaseDb):
870
877
  "summary": session_dict.get("summary"),
871
878
  "metadata": session_dict.get("metadata"),
872
879
  "created_at": session_dict.get("created_at"),
873
- "updated_at": session_dict.get("created_at"),
880
+ "updated_at": updated_at,
874
881
  }
875
882
  )
876
883
 
@@ -884,7 +891,7 @@ class MySQLDb(BaseDb):
884
891
  summary=stmt.inserted.summary,
885
892
  metadata=stmt.inserted.metadata,
886
893
  runs=stmt.inserted.runs,
887
- updated_at=int(time.time()),
894
+ updated_at=stmt.inserted.updated_at,
888
895
  )
889
896
  sess.execute(stmt, workflow_data)
890
897
 
@@ -1281,7 +1288,7 @@ class MySQLDb(BaseDb):
1281
1288
  return None
1282
1289
 
1283
1290
  def upsert_memories(
1284
- self, memories: List[UserMemory], deserialize: Optional[bool] = True
1291
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
1285
1292
  ) -> List[Union[UserMemory, Dict[str, Any]]]:
1286
1293
  """
1287
1294
  Bulk upsert multiple user memories for improved performance on large datasets.
@@ -1313,10 +1320,13 @@ class MySQLDb(BaseDb):
1313
1320
 
1314
1321
  # Prepare bulk data
1315
1322
  bulk_data = []
1323
+ current_time = int(time.time())
1316
1324
  for memory in memories:
1317
1325
  if memory.memory_id is None:
1318
1326
  memory.memory_id = str(uuid4())
1319
1327
 
1328
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
1329
+ updated_at = memory.updated_at if preserve_updated_at else current_time
1320
1330
  bulk_data.append(
1321
1331
  {
1322
1332
  "memory_id": memory.memory_id,
@@ -1326,7 +1336,7 @@ class MySQLDb(BaseDb):
1326
1336
  "agent_id": memory.agent_id,
1327
1337
  "team_id": memory.team_id,
1328
1338
  "topics": memory.topics,
1329
- "updated_at": int(time.time()),
1339
+ "updated_at": updated_at,
1330
1340
  }
1331
1341
  )
1332
1342
 
@@ -1341,7 +1351,7 @@ class MySQLDb(BaseDb):
1341
1351
  input=stmt.inserted.input,
1342
1352
  agent_id=stmt.inserted.agent_id,
1343
1353
  team_id=stmt.inserted.team_id,
1344
- updated_at=int(time.time()),
1354
+ updated_at=stmt.inserted.updated_at,
1345
1355
  )
1346
1356
  sess.execute(stmt, bulk_data)
1347
1357
 
@@ -1654,9 +1664,9 @@ class MySQLDb(BaseDb):
1654
1664
  if page is not None:
1655
1665
  stmt = stmt.offset((page - 1) * limit)
1656
1666
 
1657
- result = sess.execute(stmt).fetchall()
1658
- if not result:
1659
- return [], 0
1667
+ result = sess.execute(stmt).fetchall()
1668
+ if not result:
1669
+ return [], 0
1660
1670
 
1661
1671
  return [KnowledgeRow.model_validate(record._mapping) for record in result], total_count
1662
1672
 
@@ -704,7 +704,7 @@ class PostgresDb(BaseDb):
704
704
  raise e
705
705
 
706
706
  def upsert_sessions(
707
- self, sessions: List[Session], deserialize: Optional[bool] = True
707
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
708
708
  ) -> List[Union[Session, Dict[str, Any]]]:
709
709
  """
710
710
  Bulk insert or update multiple sessions.
@@ -712,6 +712,7 @@ class PostgresDb(BaseDb):
712
712
  Args:
713
713
  sessions (List[Session]): The list of session data to upsert.
714
714
  deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
715
+ preserve_updated_at (bool): If True, preserve the updated_at from the session object.
715
716
 
716
717
  Returns:
717
718
  List[Union[Session, Dict[str, Any]]]: List of upserted sessions
@@ -739,6 +740,8 @@ class PostgresDb(BaseDb):
739
740
  session_records = []
740
741
  for agent_session in agent_sessions:
741
742
  session_dict = agent_session.to_dict()
743
+ # Use preserved updated_at if flag is set (even if None), otherwise use current time
744
+ updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
742
745
  session_records.append(
743
746
  {
744
747
  "session_id": session_dict.get("session_id"),
@@ -751,7 +754,7 @@ class PostgresDb(BaseDb):
751
754
  "metadata": session_dict.get("metadata"),
752
755
  "runs": session_dict.get("runs"),
753
756
  "created_at": session_dict.get("created_at"),
754
- "updated_at": int(time.time()),
757
+ "updated_at": updated_at,
755
758
  }
756
759
  )
757
760
 
@@ -782,6 +785,8 @@ class PostgresDb(BaseDb):
782
785
  session_records = []
783
786
  for team_session in team_sessions:
784
787
  session_dict = team_session.to_dict()
788
+ # Use preserved updated_at if flag is set (even if None), otherwise use current time
789
+ updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
785
790
  session_records.append(
786
791
  {
787
792
  "session_id": session_dict.get("session_id"),
@@ -794,7 +799,7 @@ class PostgresDb(BaseDb):
794
799
  "metadata": session_dict.get("metadata"),
795
800
  "runs": session_dict.get("runs"),
796
801
  "created_at": session_dict.get("created_at"),
797
- "updated_at": int(time.time()),
802
+ "updated_at": updated_at,
798
803
  }
799
804
  )
800
805
 
@@ -825,6 +830,8 @@ class PostgresDb(BaseDb):
825
830
  session_records = []
826
831
  for workflow_session in workflow_sessions:
827
832
  session_dict = workflow_session.to_dict()
833
+ # Use preserved updated_at if flag is set (even if None), otherwise use current time
834
+ updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
828
835
  session_records.append(
829
836
  {
830
837
  "session_id": session_dict.get("session_id"),
@@ -837,7 +844,7 @@ class PostgresDb(BaseDb):
837
844
  "metadata": session_dict.get("metadata"),
838
845
  "runs": session_dict.get("runs"),
839
846
  "created_at": session_dict.get("created_at"),
840
- "updated_at": int(time.time()),
847
+ "updated_at": updated_at,
841
848
  }
842
849
  )
843
850
 
@@ -1234,7 +1241,7 @@ class PostgresDb(BaseDb):
1234
1241
  raise e
1235
1242
 
1236
1243
  def upsert_memories(
1237
- self, memories: List[UserMemory], deserialize: Optional[bool] = True
1244
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
1238
1245
  ) -> List[Union[UserMemory, Dict[str, Any]]]:
1239
1246
  """
1240
1247
  Bulk insert or update multiple memories in the database for improved performance.
@@ -1242,6 +1249,8 @@ class PostgresDb(BaseDb):
1242
1249
  Args:
1243
1250
  memories (List[UserMemory]): The list of memories to upsert.
1244
1251
  deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
1252
+ preserve_updated_at (bool): If True, preserve the updated_at from the memory object.
1253
+ If False (default), set updated_at to current time.
1245
1254
 
1246
1255
  Returns:
1247
1256
  List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories
@@ -1265,6 +1274,8 @@ class PostgresDb(BaseDb):
1265
1274
  if memory.memory_id is None:
1266
1275
  memory.memory_id = str(uuid4())
1267
1276
 
1277
+ # Use preserved updated_at if flag is set (even if None), otherwise use current time
1278
+ updated_at = memory.updated_at if preserve_updated_at else current_time
1268
1279
  memory_records.append(
1269
1280
  {
1270
1281
  "memory_id": memory.memory_id,
@@ -1274,7 +1285,7 @@ class PostgresDb(BaseDb):
1274
1285
  "agent_id": memory.agent_id,
1275
1286
  "team_id": memory.team_id,
1276
1287
  "topics": memory.topics,
1277
- "updated_at": current_time,
1288
+ "updated_at": updated_at,
1278
1289
  }
1279
1290
  )
1280
1291
 
agno/db/redis/redis.py CHANGED
@@ -589,7 +589,7 @@ class RedisDb(BaseDb):
589
589
  raise e
590
590
 
591
591
  def upsert_sessions(
592
- self, sessions: List[Session], deserialize: Optional[bool] = True
592
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
593
593
  ) -> List[Union[Session, Dict[str, Any]]]:
594
594
  """
595
595
  Bulk upsert multiple sessions for improved performance on large datasets.
@@ -912,7 +912,7 @@ class RedisDb(BaseDb):
912
912
  raise e
913
913
 
914
914
  def upsert_memories(
915
- self, memories: List[UserMemory], deserialize: Optional[bool] = True
915
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
916
916
  ) -> List[Union[UserMemory, Dict[str, Any]]]:
917
917
  """
918
918
  Bulk upsert multiple user memories for improved performance on large datasets.