agno 2.1.3__py3-none-any.whl → 2.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +1779 -577
- agno/db/async_postgres/__init__.py +3 -0
- agno/db/async_postgres/async_postgres.py +1668 -0
- agno/db/async_postgres/schemas.py +124 -0
- agno/db/async_postgres/utils.py +289 -0
- agno/db/base.py +237 -2
- agno/db/dynamo/dynamo.py +10 -8
- agno/db/dynamo/schemas.py +1 -10
- agno/db/dynamo/utils.py +2 -2
- agno/db/firestore/firestore.py +2 -2
- agno/db/firestore/utils.py +4 -2
- agno/db/gcs_json/gcs_json_db.py +2 -2
- agno/db/in_memory/in_memory_db.py +2 -2
- agno/db/json/json_db.py +2 -2
- agno/db/migrations/v1_to_v2.py +30 -13
- agno/db/mongo/mongo.py +18 -6
- agno/db/mysql/mysql.py +35 -13
- agno/db/postgres/postgres.py +29 -6
- agno/db/redis/redis.py +2 -2
- agno/db/singlestore/singlestore.py +2 -2
- agno/db/sqlite/sqlite.py +34 -12
- agno/db/sqlite/utils.py +8 -3
- agno/eval/accuracy.py +50 -43
- agno/eval/performance.py +6 -3
- agno/eval/reliability.py +6 -3
- agno/eval/utils.py +33 -16
- agno/exceptions.py +8 -2
- agno/knowledge/embedder/fastembed.py +1 -1
- agno/knowledge/knowledge.py +260 -46
- agno/knowledge/reader/pdf_reader.py +4 -6
- agno/knowledge/reader/reader_factory.py +2 -3
- agno/memory/manager.py +241 -33
- agno/models/anthropic/claude.py +37 -0
- agno/os/app.py +15 -10
- agno/os/interfaces/a2a/router.py +3 -5
- agno/os/interfaces/agui/router.py +4 -1
- agno/os/interfaces/agui/utils.py +33 -6
- agno/os/interfaces/slack/router.py +2 -4
- agno/os/mcp.py +98 -41
- agno/os/router.py +23 -0
- agno/os/routers/evals/evals.py +52 -20
- agno/os/routers/evals/utils.py +14 -14
- agno/os/routers/knowledge/knowledge.py +130 -9
- agno/os/routers/knowledge/schemas.py +57 -0
- agno/os/routers/memory/memory.py +116 -44
- agno/os/routers/metrics/metrics.py +16 -6
- agno/os/routers/session/session.py +65 -22
- agno/os/schema.py +38 -0
- agno/os/utils.py +69 -13
- agno/reasoning/anthropic.py +80 -0
- agno/reasoning/gemini.py +73 -0
- agno/reasoning/openai.py +5 -0
- agno/reasoning/vertexai.py +76 -0
- agno/session/workflow.py +69 -1
- agno/team/team.py +934 -241
- agno/tools/function.py +36 -18
- agno/tools/google_drive.py +270 -0
- agno/tools/googlesheets.py +20 -5
- agno/tools/mcp_toolbox.py +3 -3
- agno/tools/scrapegraph.py +1 -1
- agno/utils/models/claude.py +3 -1
- agno/utils/print_response/workflow.py +112 -12
- agno/utils/streamlit.py +1 -1
- agno/vectordb/base.py +22 -1
- agno/vectordb/cassandra/cassandra.py +9 -0
- agno/vectordb/chroma/chromadb.py +26 -6
- agno/vectordb/clickhouse/clickhousedb.py +9 -1
- agno/vectordb/couchbase/couchbase.py +11 -0
- agno/vectordb/lancedb/lance_db.py +20 -0
- agno/vectordb/langchaindb/langchaindb.py +11 -0
- agno/vectordb/lightrag/lightrag.py +9 -0
- agno/vectordb/llamaindex/llamaindexdb.py +15 -1
- agno/vectordb/milvus/milvus.py +23 -0
- agno/vectordb/mongodb/mongodb.py +22 -0
- agno/vectordb/pgvector/pgvector.py +19 -0
- agno/vectordb/pineconedb/pineconedb.py +35 -4
- agno/vectordb/qdrant/qdrant.py +24 -0
- agno/vectordb/singlestore/singlestore.py +25 -17
- agno/vectordb/surrealdb/surrealdb.py +18 -1
- agno/vectordb/upstashdb/upstashdb.py +26 -1
- agno/vectordb/weaviate/weaviate.py +18 -0
- agno/workflow/condition.py +29 -0
- agno/workflow/loop.py +29 -0
- agno/workflow/parallel.py +141 -113
- agno/workflow/router.py +29 -0
- agno/workflow/step.py +146 -25
- agno/workflow/steps.py +29 -0
- agno/workflow/types.py +26 -1
- agno/workflow/workflow.py +507 -22
- {agno-2.1.3.dist-info → agno-2.1.5.dist-info}/METADATA +100 -41
- {agno-2.1.3.dist-info → agno-2.1.5.dist-info}/RECORD +94 -86
- {agno-2.1.3.dist-info → agno-2.1.5.dist-info}/WHEEL +0 -0
- {agno-2.1.3.dist-info → agno-2.1.5.dist-info}/licenses/LICENSE +0 -0
- {agno-2.1.3.dist-info → agno-2.1.5.dist-info}/top_level.txt +0 -0
agno/db/dynamo/dynamo.py
CHANGED
|
@@ -524,7 +524,7 @@ class DynamoDb(BaseDb):
|
|
|
524
524
|
raise e
|
|
525
525
|
|
|
526
526
|
def upsert_sessions(
|
|
527
|
-
self, sessions: List[Session], deserialize: Optional[bool] = True
|
|
527
|
+
self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
528
528
|
) -> List[Union[Session, Dict[str, Any]]]:
|
|
529
529
|
"""
|
|
530
530
|
Bulk upsert multiple sessions for improved performance on large datasets.
|
|
@@ -962,7 +962,7 @@ class DynamoDb(BaseDb):
|
|
|
962
962
|
raise e
|
|
963
963
|
|
|
964
964
|
def upsert_memories(
|
|
965
|
-
self, memories: List[UserMemory], deserialize: Optional[bool] = True
|
|
965
|
+
self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
966
966
|
) -> List[Union[UserMemory, Dict[str, Any]]]:
|
|
967
967
|
"""
|
|
968
968
|
Bulk upsert multiple user memories for improved performance on large datasets.
|
|
@@ -1454,17 +1454,17 @@ class DynamoDb(BaseDb):
|
|
|
1454
1454
|
"""
|
|
1455
1455
|
import json
|
|
1456
1456
|
|
|
1457
|
-
item = {}
|
|
1457
|
+
item: Dict[str, Any] = {}
|
|
1458
1458
|
for key, value in data.items():
|
|
1459
1459
|
if value is not None:
|
|
1460
1460
|
if isinstance(value, bool):
|
|
1461
|
-
item[key] = {"BOOL":
|
|
1461
|
+
item[key] = {"BOOL": value}
|
|
1462
1462
|
elif isinstance(value, (int, float)):
|
|
1463
1463
|
item[key] = {"N": str(value)}
|
|
1464
1464
|
elif isinstance(value, str):
|
|
1465
1465
|
item[key] = {"S": str(value)}
|
|
1466
1466
|
elif isinstance(value, (dict, list)):
|
|
1467
|
-
item[key] = {"S": json.dumps(
|
|
1467
|
+
item[key] = {"S": json.dumps(value)}
|
|
1468
1468
|
else:
|
|
1469
1469
|
item[key] = {"S": str(value)}
|
|
1470
1470
|
return item
|
|
@@ -1803,14 +1803,16 @@ class DynamoDb(BaseDb):
|
|
|
1803
1803
|
|
|
1804
1804
|
if filter_type is not None:
|
|
1805
1805
|
if filter_type == EvalFilterType.AGENT:
|
|
1806
|
-
filter_expressions.append("agent_id
|
|
1806
|
+
filter_expressions.append("attribute_exists(agent_id)")
|
|
1807
1807
|
elif filter_type == EvalFilterType.TEAM:
|
|
1808
|
-
filter_expressions.append("team_id
|
|
1808
|
+
filter_expressions.append("attribute_exists(team_id)")
|
|
1809
1809
|
elif filter_type == EvalFilterType.WORKFLOW:
|
|
1810
|
-
filter_expressions.append("workflow_id
|
|
1810
|
+
filter_expressions.append("attribute_exists(workflow_id)")
|
|
1811
1811
|
|
|
1812
1812
|
if filter_expressions:
|
|
1813
1813
|
scan_kwargs["FilterExpression"] = " AND ".join(filter_expressions)
|
|
1814
|
+
|
|
1815
|
+
if expression_values:
|
|
1814
1816
|
scan_kwargs["ExpressionAttributeValues"] = expression_values # type: ignore
|
|
1815
1817
|
|
|
1816
1818
|
# Execute scan
|
agno/db/dynamo/schemas.py
CHANGED
|
@@ -73,6 +73,7 @@ USER_MEMORY_TABLE_SCHEMA = {
|
|
|
73
73
|
{"AttributeName": "user_id", "AttributeType": "S"},
|
|
74
74
|
{"AttributeName": "agent_id", "AttributeType": "S"},
|
|
75
75
|
{"AttributeName": "team_id", "AttributeType": "S"},
|
|
76
|
+
{"AttributeName": "workflow_id", "AttributeType": "S"},
|
|
76
77
|
{"AttributeName": "updated_at", "AttributeType": "S"},
|
|
77
78
|
],
|
|
78
79
|
"GlobalSecondaryIndexes": [
|
|
@@ -123,7 +124,6 @@ EVAL_TABLE_SCHEMA = {
|
|
|
123
124
|
"AttributeDefinitions": [
|
|
124
125
|
{"AttributeName": "run_id", "AttributeType": "S"},
|
|
125
126
|
{"AttributeName": "eval_type", "AttributeType": "S"},
|
|
126
|
-
{"AttributeName": "eval_input", "AttributeType": "S"},
|
|
127
127
|
{"AttributeName": "agent_id", "AttributeType": "S"},
|
|
128
128
|
{"AttributeName": "team_id", "AttributeType": "S"},
|
|
129
129
|
{"AttributeName": "workflow_id", "AttributeType": "S"},
|
|
@@ -176,18 +176,9 @@ KNOWLEDGE_TABLE_SCHEMA = {
|
|
|
176
176
|
"KeySchema": [{"AttributeName": "id", "KeyType": "HASH"}],
|
|
177
177
|
"AttributeDefinitions": [
|
|
178
178
|
{"AttributeName": "id", "AttributeType": "S"},
|
|
179
|
-
{"AttributeName": "name", "AttributeType": "S"},
|
|
180
|
-
{"AttributeName": "description", "AttributeType": "S"},
|
|
181
|
-
{"AttributeName": "metadata", "AttributeType": "S"},
|
|
182
179
|
{"AttributeName": "type", "AttributeType": "S"},
|
|
183
|
-
{"AttributeName": "size", "AttributeType": "N"},
|
|
184
|
-
{"AttributeName": "linked_to", "AttributeType": "S"},
|
|
185
|
-
{"AttributeName": "access_count", "AttributeType": "N"},
|
|
186
180
|
{"AttributeName": "status", "AttributeType": "S"},
|
|
187
|
-
{"AttributeName": "status_message", "AttributeType": "S"},
|
|
188
181
|
{"AttributeName": "created_at", "AttributeType": "N"},
|
|
189
|
-
{"AttributeName": "updated_at", "AttributeType": "N"},
|
|
190
|
-
{"AttributeName": "external_id", "AttributeType": "S"},
|
|
191
182
|
],
|
|
192
183
|
"GlobalSecondaryIndexes": [
|
|
193
184
|
{
|
agno/db/dynamo/utils.py
CHANGED
|
@@ -23,7 +23,7 @@ def serialize_to_dynamo_item(data: Dict[str, Any]) -> Dict[str, Any]:
|
|
|
23
23
|
A DynamoDB-ready dict with the serialized data
|
|
24
24
|
|
|
25
25
|
"""
|
|
26
|
-
item = {}
|
|
26
|
+
item: Dict[str, Any] = {}
|
|
27
27
|
for key, value in data.items():
|
|
28
28
|
if value is not None:
|
|
29
29
|
if isinstance(value, (int, float)):
|
|
@@ -31,7 +31,7 @@ def serialize_to_dynamo_item(data: Dict[str, Any]) -> Dict[str, Any]:
|
|
|
31
31
|
elif isinstance(value, str):
|
|
32
32
|
item[key] = {"S": value}
|
|
33
33
|
elif isinstance(value, bool):
|
|
34
|
-
item[key] = {"BOOL":
|
|
34
|
+
item[key] = {"BOOL": value}
|
|
35
35
|
elif isinstance(value, (dict, list)):
|
|
36
36
|
item[key] = {"S": json.dumps(value)}
|
|
37
37
|
else:
|
agno/db/firestore/firestore.py
CHANGED
|
@@ -558,7 +558,7 @@ class FirestoreDb(BaseDb):
|
|
|
558
558
|
raise e
|
|
559
559
|
|
|
560
560
|
def upsert_sessions(
|
|
561
|
-
self, sessions: List[Session], deserialize: Optional[bool] = True
|
|
561
|
+
self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
562
562
|
) -> List[Union[Session, Dict[str, Any]]]:
|
|
563
563
|
"""
|
|
564
564
|
Bulk upsert multiple sessions for improved performance on large datasets.
|
|
@@ -939,7 +939,7 @@ class FirestoreDb(BaseDb):
|
|
|
939
939
|
raise e
|
|
940
940
|
|
|
941
941
|
def upsert_memories(
|
|
942
|
-
self, memories: List[UserMemory], deserialize: Optional[bool] = True
|
|
942
|
+
self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
943
943
|
) -> List[Union[UserMemory, Dict[str, Any]]]:
|
|
944
944
|
"""
|
|
945
945
|
Bulk upsert multiple user memories for improved performance on large datasets.
|
agno/db/firestore/utils.py
CHANGED
|
@@ -164,11 +164,13 @@ def calculate_date_metrics(date_to_process: date, sessions_data: dict) -> dict:
|
|
|
164
164
|
if session.get("user_id"):
|
|
165
165
|
all_user_ids.add(session["user_id"])
|
|
166
166
|
runs = session.get("runs", []) or []
|
|
167
|
-
metrics[runs_count_key] += len(runs)
|
|
168
167
|
|
|
169
|
-
if runs
|
|
168
|
+
if runs:
|
|
170
169
|
if isinstance(runs, str):
|
|
171
170
|
runs = json.loads(runs)
|
|
171
|
+
|
|
172
|
+
metrics[runs_count_key] += len(runs)
|
|
173
|
+
|
|
172
174
|
for run in runs:
|
|
173
175
|
if model_id := run.get("model"):
|
|
174
176
|
model_provider = run.get("model_provider", "")
|
agno/db/gcs_json/gcs_json_db.py
CHANGED
|
@@ -412,7 +412,7 @@ class GcsJsonDb(BaseDb):
|
|
|
412
412
|
raise e
|
|
413
413
|
|
|
414
414
|
def upsert_sessions(
|
|
415
|
-
self, sessions: List[Session], deserialize: Optional[bool] = True
|
|
415
|
+
self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
416
416
|
) -> List[Union[Session, Dict[str, Any]]]:
|
|
417
417
|
"""
|
|
418
418
|
Bulk upsert multiple sessions for improved performance on large datasets.
|
|
@@ -704,7 +704,7 @@ class GcsJsonDb(BaseDb):
|
|
|
704
704
|
raise e
|
|
705
705
|
|
|
706
706
|
def upsert_memories(
|
|
707
|
-
self, memories: List[UserMemory], deserialize: Optional[bool] = True
|
|
707
|
+
self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
708
708
|
) -> List[Union[UserMemory, Dict[str, Any]]]:
|
|
709
709
|
"""
|
|
710
710
|
Bulk upsert multiple user memories for improved performance on large datasets.
|
|
@@ -309,7 +309,7 @@ class InMemoryDb(BaseDb):
|
|
|
309
309
|
return False
|
|
310
310
|
|
|
311
311
|
def upsert_sessions(
|
|
312
|
-
self, sessions: List[Session], deserialize: Optional[bool] = True
|
|
312
|
+
self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
313
313
|
) -> List[Union[Session, Dict[str, Any]]]:
|
|
314
314
|
"""
|
|
315
315
|
Bulk upsert multiple sessions for improved performance on large datasets.
|
|
@@ -590,7 +590,7 @@ class InMemoryDb(BaseDb):
|
|
|
590
590
|
raise e
|
|
591
591
|
|
|
592
592
|
def upsert_memories(
|
|
593
|
-
self, memories: List[UserMemory], deserialize: Optional[bool] = True
|
|
593
|
+
self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
594
594
|
) -> List[Union[UserMemory, Dict[str, Any]]]:
|
|
595
595
|
"""
|
|
596
596
|
Bulk upsert multiple user memories for improved performance on large datasets.
|
agno/db/json/json_db.py
CHANGED
|
@@ -398,7 +398,7 @@ class JsonDb(BaseDb):
|
|
|
398
398
|
raise e
|
|
399
399
|
|
|
400
400
|
def upsert_sessions(
|
|
401
|
-
self, sessions: List[Session], deserialize: Optional[bool] = True
|
|
401
|
+
self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
402
402
|
) -> List[Union[Session, Dict[str, Any]]]:
|
|
403
403
|
"""
|
|
404
404
|
Bulk upsert multiple sessions for improved performance on large datasets.
|
|
@@ -703,7 +703,7 @@ class JsonDb(BaseDb):
|
|
|
703
703
|
raise e
|
|
704
704
|
|
|
705
705
|
def upsert_memories(
|
|
706
|
-
self, memories: List[UserMemory], deserialize: Optional[bool] = True
|
|
706
|
+
self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
707
707
|
) -> List[Union[UserMemory, Dict[str, Any]]]:
|
|
708
708
|
"""
|
|
709
709
|
Bulk upsert multiple user memories for improved performance on large datasets.
|
agno/db/migrations/v1_to_v2.py
CHANGED
|
@@ -1,15 +1,12 @@
|
|
|
1
1
|
"""Migration utility to migrate your Agno tables from v1 to v2"""
|
|
2
2
|
|
|
3
3
|
import json
|
|
4
|
-
from typing import Any, Dict, List, Optional, Union
|
|
4
|
+
from typing import Any, Dict, List, Optional, Union, cast
|
|
5
5
|
|
|
6
6
|
from sqlalchemy import text
|
|
7
7
|
|
|
8
|
-
from agno.db.
|
|
9
|
-
from agno.db.mysql.mysql import MySQLDb
|
|
10
|
-
from agno.db.postgres.postgres import PostgresDb
|
|
8
|
+
from agno.db.base import BaseDb
|
|
11
9
|
from agno.db.schemas.memory import UserMemory
|
|
12
|
-
from agno.db.sqlite.sqlite import SqliteDb
|
|
13
10
|
from agno.session import AgentSession, TeamSession, WorkflowSession
|
|
14
11
|
from agno.utils.log import log_error, log_info, log_warning
|
|
15
12
|
|
|
@@ -315,7 +312,7 @@ def convert_v1_fields_to_v2(data: Dict[str, Any]) -> Dict[str, Any]:
|
|
|
315
312
|
|
|
316
313
|
|
|
317
314
|
def migrate(
|
|
318
|
-
db:
|
|
315
|
+
db: BaseDb,
|
|
319
316
|
v1_db_schema: str,
|
|
320
317
|
agent_sessions_table_name: Optional[str] = None,
|
|
321
318
|
team_sessions_table_name: Optional[str] = None,
|
|
@@ -372,7 +369,7 @@ def migrate(
|
|
|
372
369
|
|
|
373
370
|
|
|
374
371
|
def migrate_table_in_batches(
|
|
375
|
-
db:
|
|
372
|
+
db: BaseDb,
|
|
376
373
|
v1_db_schema: str,
|
|
377
374
|
v1_table_name: str,
|
|
378
375
|
v1_table_type: str,
|
|
@@ -410,7 +407,7 @@ def migrate_table_in_batches(
|
|
|
410
407
|
if hasattr(db, "Session"):
|
|
411
408
|
db.Session.remove() # type: ignore
|
|
412
409
|
|
|
413
|
-
db.upsert_sessions(sessions) # type: ignore
|
|
410
|
+
db.upsert_sessions(sessions, preserve_updated_at=True) # type: ignore
|
|
414
411
|
total_migrated += len(sessions)
|
|
415
412
|
log_info(f"Bulk upserted {len(sessions)} sessions in batch {batch_count}")
|
|
416
413
|
|
|
@@ -420,7 +417,7 @@ def migrate_table_in_batches(
|
|
|
420
417
|
if hasattr(db, "Session"):
|
|
421
418
|
db.Session.remove() # type: ignore
|
|
422
419
|
|
|
423
|
-
db.upsert_memories(memories)
|
|
420
|
+
db.upsert_memories(memories, preserve_updated_at=True)
|
|
424
421
|
total_migrated += len(memories)
|
|
425
422
|
log_info(f"Bulk upserted {len(memories)} memories in batch {batch_count}")
|
|
426
423
|
|
|
@@ -429,12 +426,14 @@ def migrate_table_in_batches(
|
|
|
429
426
|
log_info(f"✅ Migration completed for table {v1_table_name}: {total_migrated} total records migrated")
|
|
430
427
|
|
|
431
428
|
|
|
432
|
-
def get_table_content_in_batches(
|
|
433
|
-
db: Union[PostgresDb, MySQLDb, SqliteDb, MongoDb], db_schema: str, table_name: str, batch_size: int = 5000
|
|
434
|
-
):
|
|
429
|
+
def get_table_content_in_batches(db: BaseDb, db_schema: str, table_name: str, batch_size: int = 5000):
|
|
435
430
|
"""Get table content in batches to avoid memory issues with large tables"""
|
|
436
431
|
try:
|
|
437
|
-
if
|
|
432
|
+
if type(db).__name__ == "MongoDb":
|
|
433
|
+
from agno.db.mongo.mongo import MongoDb
|
|
434
|
+
|
|
435
|
+
db = cast(MongoDb, db)
|
|
436
|
+
|
|
438
437
|
# MongoDB implementation with cursor and batching
|
|
439
438
|
collection = db.database[table_name]
|
|
440
439
|
cursor = collection.find({}).batch_size(batch_size)
|
|
@@ -455,6 +454,24 @@ def get_table_content_in_batches(
|
|
|
455
454
|
yield batch
|
|
456
455
|
else:
|
|
457
456
|
# SQL database implementations (PostgresDb, MySQLDb, SqliteDb)
|
|
457
|
+
if type(db).__name__ == "PostgresDb":
|
|
458
|
+
from agno.db.postgres.postgres import PostgresDb
|
|
459
|
+
|
|
460
|
+
db = cast(PostgresDb, db)
|
|
461
|
+
|
|
462
|
+
elif type(db).__name__ == "MySQLDb":
|
|
463
|
+
from agno.db.mysql.mysql import MySQLDb
|
|
464
|
+
|
|
465
|
+
db = cast(MySQLDb, db)
|
|
466
|
+
|
|
467
|
+
elif type(db).__name__ == "SqliteDb":
|
|
468
|
+
from agno.db.sqlite.sqlite import SqliteDb
|
|
469
|
+
|
|
470
|
+
db = cast(SqliteDb, db)
|
|
471
|
+
|
|
472
|
+
else:
|
|
473
|
+
raise ValueError(f"Invalid database type: {type(db).__name__}")
|
|
474
|
+
|
|
458
475
|
offset = 0
|
|
459
476
|
while True:
|
|
460
477
|
# Create a new session for each batch to avoid transaction conflicts
|
agno/db/mongo/mongo.py
CHANGED
|
@@ -588,7 +588,7 @@ class MongoDb(BaseDb):
|
|
|
588
588
|
raise e
|
|
589
589
|
|
|
590
590
|
def upsert_sessions(
|
|
591
|
-
self, sessions: List[Session], deserialize: Optional[bool] = True
|
|
591
|
+
self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
592
592
|
) -> List[Union[Session, Dict[str, Any]]]:
|
|
593
593
|
"""
|
|
594
594
|
Bulk upsert multiple sessions for improved performance on large datasets.
|
|
@@ -596,6 +596,7 @@ class MongoDb(BaseDb):
|
|
|
596
596
|
Args:
|
|
597
597
|
sessions (List[Session]): List of sessions to upsert.
|
|
598
598
|
deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
|
|
599
|
+
preserve_updated_at (bool): If True, preserve the updated_at from the session object.
|
|
599
600
|
|
|
600
601
|
Returns:
|
|
601
602
|
List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
|
|
@@ -629,6 +630,13 @@ class MongoDb(BaseDb):
|
|
|
629
630
|
|
|
630
631
|
session_dict = session.to_dict()
|
|
631
632
|
|
|
633
|
+
# Use preserved updated_at if flag is set and value exists, otherwise use current time
|
|
634
|
+
updated_at = (
|
|
635
|
+
session_dict.get("updated_at")
|
|
636
|
+
if preserve_updated_at and session_dict.get("updated_at")
|
|
637
|
+
else int(time.time())
|
|
638
|
+
)
|
|
639
|
+
|
|
632
640
|
if isinstance(session, AgentSession):
|
|
633
641
|
record = {
|
|
634
642
|
"session_id": session_dict.get("session_id"),
|
|
@@ -641,7 +649,7 @@ class MongoDb(BaseDb):
|
|
|
641
649
|
"summary": session_dict.get("summary"),
|
|
642
650
|
"metadata": session_dict.get("metadata"),
|
|
643
651
|
"created_at": session_dict.get("created_at"),
|
|
644
|
-
"updated_at":
|
|
652
|
+
"updated_at": updated_at,
|
|
645
653
|
}
|
|
646
654
|
elif isinstance(session, TeamSession):
|
|
647
655
|
record = {
|
|
@@ -655,7 +663,7 @@ class MongoDb(BaseDb):
|
|
|
655
663
|
"summary": session_dict.get("summary"),
|
|
656
664
|
"metadata": session_dict.get("metadata"),
|
|
657
665
|
"created_at": session_dict.get("created_at"),
|
|
658
|
-
"updated_at":
|
|
666
|
+
"updated_at": updated_at,
|
|
659
667
|
}
|
|
660
668
|
elif isinstance(session, WorkflowSession):
|
|
661
669
|
record = {
|
|
@@ -669,7 +677,7 @@ class MongoDb(BaseDb):
|
|
|
669
677
|
"summary": session_dict.get("summary"),
|
|
670
678
|
"metadata": session_dict.get("metadata"),
|
|
671
679
|
"created_at": session_dict.get("created_at"),
|
|
672
|
-
"updated_at":
|
|
680
|
+
"updated_at": updated_at,
|
|
673
681
|
}
|
|
674
682
|
else:
|
|
675
683
|
continue
|
|
@@ -1044,7 +1052,7 @@ class MongoDb(BaseDb):
|
|
|
1044
1052
|
raise e
|
|
1045
1053
|
|
|
1046
1054
|
def upsert_memories(
|
|
1047
|
-
self, memories: List[UserMemory], deserialize: Optional[bool] = True
|
|
1055
|
+
self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
1048
1056
|
) -> List[Union[UserMemory, Dict[str, Any]]]:
|
|
1049
1057
|
"""
|
|
1050
1058
|
Bulk upsert multiple user memories for improved performance on large datasets.
|
|
@@ -1079,6 +1087,7 @@ class MongoDb(BaseDb):
|
|
|
1079
1087
|
operations = []
|
|
1080
1088
|
results: List[Union[UserMemory, Dict[str, Any]]] = []
|
|
1081
1089
|
|
|
1090
|
+
current_time = int(time.time())
|
|
1082
1091
|
for memory in memories:
|
|
1083
1092
|
if memory is None:
|
|
1084
1093
|
continue
|
|
@@ -1086,6 +1095,9 @@ class MongoDb(BaseDb):
|
|
|
1086
1095
|
if memory.memory_id is None:
|
|
1087
1096
|
memory.memory_id = str(uuid4())
|
|
1088
1097
|
|
|
1098
|
+
# Use preserved updated_at if flag is set and value exists, otherwise use current time
|
|
1099
|
+
updated_at = memory.updated_at if preserve_updated_at and memory.updated_at else current_time
|
|
1100
|
+
|
|
1089
1101
|
record = {
|
|
1090
1102
|
"user_id": memory.user_id,
|
|
1091
1103
|
"agent_id": memory.agent_id,
|
|
@@ -1093,7 +1105,7 @@ class MongoDb(BaseDb):
|
|
|
1093
1105
|
"memory_id": memory.memory_id,
|
|
1094
1106
|
"memory": memory.memory,
|
|
1095
1107
|
"topics": memory.topics,
|
|
1096
|
-
"updated_at":
|
|
1108
|
+
"updated_at": updated_at,
|
|
1097
1109
|
}
|
|
1098
1110
|
|
|
1099
1111
|
operations.append(ReplaceOne(filter={"memory_id": memory.memory_id}, replacement=record, upsert=True))
|
agno/db/mysql/mysql.py
CHANGED
|
@@ -706,7 +706,7 @@ class MySQLDb(BaseDb):
|
|
|
706
706
|
return None
|
|
707
707
|
|
|
708
708
|
def upsert_sessions(
|
|
709
|
-
self, sessions: List[Session], deserialize: Optional[bool] = True
|
|
709
|
+
self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
710
710
|
) -> List[Union[Session, Dict[str, Any]]]:
|
|
711
711
|
"""
|
|
712
712
|
Bulk upsert multiple sessions for improved performance on large datasets.
|
|
@@ -714,6 +714,7 @@ class MySQLDb(BaseDb):
|
|
|
714
714
|
Args:
|
|
715
715
|
sessions (List[Session]): List of sessions to upsert.
|
|
716
716
|
deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
|
|
717
|
+
preserve_updated_at (bool): If True, preserve the updated_at from the session object.
|
|
717
718
|
|
|
718
719
|
Returns:
|
|
719
720
|
List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
|
|
@@ -758,6 +759,12 @@ class MySQLDb(BaseDb):
|
|
|
758
759
|
agent_data = []
|
|
759
760
|
for session in agent_sessions:
|
|
760
761
|
session_dict = session.to_dict()
|
|
762
|
+
# Use preserved updated_at if flag is set and value exists, otherwise use current time
|
|
763
|
+
updated_at = (
|
|
764
|
+
session_dict.get("updated_at")
|
|
765
|
+
if preserve_updated_at and session_dict.get("updated_at")
|
|
766
|
+
else int(time.time())
|
|
767
|
+
)
|
|
761
768
|
agent_data.append(
|
|
762
769
|
{
|
|
763
770
|
"session_id": session_dict.get("session_id"),
|
|
@@ -770,7 +777,7 @@ class MySQLDb(BaseDb):
|
|
|
770
777
|
"summary": session_dict.get("summary"),
|
|
771
778
|
"metadata": session_dict.get("metadata"),
|
|
772
779
|
"created_at": session_dict.get("created_at"),
|
|
773
|
-
"updated_at":
|
|
780
|
+
"updated_at": updated_at,
|
|
774
781
|
}
|
|
775
782
|
)
|
|
776
783
|
|
|
@@ -784,7 +791,7 @@ class MySQLDb(BaseDb):
|
|
|
784
791
|
summary=stmt.inserted.summary,
|
|
785
792
|
metadata=stmt.inserted.metadata,
|
|
786
793
|
runs=stmt.inserted.runs,
|
|
787
|
-
updated_at=
|
|
794
|
+
updated_at=stmt.inserted.updated_at,
|
|
788
795
|
)
|
|
789
796
|
sess.execute(stmt, agent_data)
|
|
790
797
|
|
|
@@ -808,6 +815,12 @@ class MySQLDb(BaseDb):
|
|
|
808
815
|
team_data = []
|
|
809
816
|
for session in team_sessions:
|
|
810
817
|
session_dict = session.to_dict()
|
|
818
|
+
# Use preserved updated_at if flag is set and value exists, otherwise use current time
|
|
819
|
+
updated_at = (
|
|
820
|
+
session_dict.get("updated_at")
|
|
821
|
+
if preserve_updated_at and session_dict.get("updated_at")
|
|
822
|
+
else int(time.time())
|
|
823
|
+
)
|
|
811
824
|
team_data.append(
|
|
812
825
|
{
|
|
813
826
|
"session_id": session_dict.get("session_id"),
|
|
@@ -820,7 +833,7 @@ class MySQLDb(BaseDb):
|
|
|
820
833
|
"summary": session_dict.get("summary"),
|
|
821
834
|
"metadata": session_dict.get("metadata"),
|
|
822
835
|
"created_at": session_dict.get("created_at"),
|
|
823
|
-
"updated_at":
|
|
836
|
+
"updated_at": updated_at,
|
|
824
837
|
}
|
|
825
838
|
)
|
|
826
839
|
|
|
@@ -834,7 +847,7 @@ class MySQLDb(BaseDb):
|
|
|
834
847
|
summary=stmt.inserted.summary,
|
|
835
848
|
metadata=stmt.inserted.metadata,
|
|
836
849
|
runs=stmt.inserted.runs,
|
|
837
|
-
updated_at=
|
|
850
|
+
updated_at=stmt.inserted.updated_at,
|
|
838
851
|
)
|
|
839
852
|
sess.execute(stmt, team_data)
|
|
840
853
|
|
|
@@ -858,6 +871,12 @@ class MySQLDb(BaseDb):
|
|
|
858
871
|
workflow_data = []
|
|
859
872
|
for session in workflow_sessions:
|
|
860
873
|
session_dict = session.to_dict()
|
|
874
|
+
# Use preserved updated_at if flag is set and value exists, otherwise use current time
|
|
875
|
+
updated_at = (
|
|
876
|
+
session_dict.get("updated_at")
|
|
877
|
+
if preserve_updated_at and session_dict.get("updated_at")
|
|
878
|
+
else int(time.time())
|
|
879
|
+
)
|
|
861
880
|
workflow_data.append(
|
|
862
881
|
{
|
|
863
882
|
"session_id": session_dict.get("session_id"),
|
|
@@ -870,7 +889,7 @@ class MySQLDb(BaseDb):
|
|
|
870
889
|
"summary": session_dict.get("summary"),
|
|
871
890
|
"metadata": session_dict.get("metadata"),
|
|
872
891
|
"created_at": session_dict.get("created_at"),
|
|
873
|
-
"updated_at":
|
|
892
|
+
"updated_at": updated_at,
|
|
874
893
|
}
|
|
875
894
|
)
|
|
876
895
|
|
|
@@ -884,7 +903,7 @@ class MySQLDb(BaseDb):
|
|
|
884
903
|
summary=stmt.inserted.summary,
|
|
885
904
|
metadata=stmt.inserted.metadata,
|
|
886
905
|
runs=stmt.inserted.runs,
|
|
887
|
-
updated_at=
|
|
906
|
+
updated_at=stmt.inserted.updated_at,
|
|
888
907
|
)
|
|
889
908
|
sess.execute(stmt, workflow_data)
|
|
890
909
|
|
|
@@ -1281,7 +1300,7 @@ class MySQLDb(BaseDb):
|
|
|
1281
1300
|
return None
|
|
1282
1301
|
|
|
1283
1302
|
def upsert_memories(
|
|
1284
|
-
self, memories: List[UserMemory], deserialize: Optional[bool] = True
|
|
1303
|
+
self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
1285
1304
|
) -> List[Union[UserMemory, Dict[str, Any]]]:
|
|
1286
1305
|
"""
|
|
1287
1306
|
Bulk upsert multiple user memories for improved performance on large datasets.
|
|
@@ -1313,10 +1332,13 @@ class MySQLDb(BaseDb):
|
|
|
1313
1332
|
|
|
1314
1333
|
# Prepare bulk data
|
|
1315
1334
|
bulk_data = []
|
|
1335
|
+
current_time = int(time.time())
|
|
1316
1336
|
for memory in memories:
|
|
1317
1337
|
if memory.memory_id is None:
|
|
1318
1338
|
memory.memory_id = str(uuid4())
|
|
1319
1339
|
|
|
1340
|
+
# Use preserved updated_at if flag is set and value exists, otherwise use current time
|
|
1341
|
+
updated_at = memory.updated_at if preserve_updated_at and memory.updated_at else current_time
|
|
1320
1342
|
bulk_data.append(
|
|
1321
1343
|
{
|
|
1322
1344
|
"memory_id": memory.memory_id,
|
|
@@ -1326,7 +1348,7 @@ class MySQLDb(BaseDb):
|
|
|
1326
1348
|
"agent_id": memory.agent_id,
|
|
1327
1349
|
"team_id": memory.team_id,
|
|
1328
1350
|
"topics": memory.topics,
|
|
1329
|
-
"updated_at":
|
|
1351
|
+
"updated_at": updated_at,
|
|
1330
1352
|
}
|
|
1331
1353
|
)
|
|
1332
1354
|
|
|
@@ -1341,7 +1363,7 @@ class MySQLDb(BaseDb):
|
|
|
1341
1363
|
input=stmt.inserted.input,
|
|
1342
1364
|
agent_id=stmt.inserted.agent_id,
|
|
1343
1365
|
team_id=stmt.inserted.team_id,
|
|
1344
|
-
updated_at=
|
|
1366
|
+
updated_at=stmt.inserted.updated_at,
|
|
1345
1367
|
)
|
|
1346
1368
|
sess.execute(stmt, bulk_data)
|
|
1347
1369
|
|
|
@@ -1654,9 +1676,9 @@ class MySQLDb(BaseDb):
|
|
|
1654
1676
|
if page is not None:
|
|
1655
1677
|
stmt = stmt.offset((page - 1) * limit)
|
|
1656
1678
|
|
|
1657
|
-
|
|
1658
|
-
|
|
1659
|
-
|
|
1679
|
+
result = sess.execute(stmt).fetchall()
|
|
1680
|
+
if not result:
|
|
1681
|
+
return [], 0
|
|
1660
1682
|
|
|
1661
1683
|
return [KnowledgeRow.model_validate(record._mapping) for record in result], total_count
|
|
1662
1684
|
|