agno 2.1.4__py3-none-any.whl → 2.1.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +1775 -538
- agno/db/async_postgres/__init__.py +3 -0
- agno/db/async_postgres/async_postgres.py +1668 -0
- agno/db/async_postgres/schemas.py +124 -0
- agno/db/async_postgres/utils.py +289 -0
- agno/db/base.py +237 -2
- agno/db/dynamo/dynamo.py +2 -2
- agno/db/firestore/firestore.py +2 -2
- agno/db/firestore/utils.py +4 -2
- agno/db/gcs_json/gcs_json_db.py +2 -2
- agno/db/in_memory/in_memory_db.py +2 -2
- agno/db/json/json_db.py +2 -2
- agno/db/migrations/v1_to_v2.py +43 -13
- agno/db/mongo/mongo.py +14 -6
- agno/db/mongo/utils.py +0 -4
- agno/db/mysql/mysql.py +23 -13
- agno/db/postgres/postgres.py +17 -6
- agno/db/redis/redis.py +2 -2
- agno/db/singlestore/singlestore.py +19 -10
- agno/db/sqlite/sqlite.py +22 -12
- agno/db/sqlite/utils.py +8 -3
- agno/db/surrealdb/__init__.py +3 -0
- agno/db/surrealdb/metrics.py +292 -0
- agno/db/surrealdb/models.py +259 -0
- agno/db/surrealdb/queries.py +71 -0
- agno/db/surrealdb/surrealdb.py +1193 -0
- agno/db/surrealdb/utils.py +87 -0
- agno/eval/accuracy.py +50 -43
- agno/eval/performance.py +6 -3
- agno/eval/reliability.py +6 -3
- agno/eval/utils.py +33 -16
- agno/exceptions.py +8 -2
- agno/knowledge/knowledge.py +260 -46
- agno/knowledge/reader/pdf_reader.py +4 -6
- agno/knowledge/reader/reader_factory.py +2 -3
- agno/memory/manager.py +254 -46
- agno/models/anthropic/claude.py +37 -0
- agno/os/app.py +8 -7
- agno/os/interfaces/a2a/router.py +3 -5
- agno/os/interfaces/agui/router.py +4 -1
- agno/os/interfaces/agui/utils.py +27 -6
- agno/os/interfaces/slack/router.py +2 -4
- agno/os/mcp.py +98 -41
- agno/os/router.py +23 -0
- agno/os/routers/evals/evals.py +52 -20
- agno/os/routers/evals/utils.py +14 -14
- agno/os/routers/knowledge/knowledge.py +130 -9
- agno/os/routers/knowledge/schemas.py +57 -0
- agno/os/routers/memory/memory.py +116 -44
- agno/os/routers/metrics/metrics.py +16 -6
- agno/os/routers/session/session.py +65 -22
- agno/os/schema.py +36 -0
- agno/os/utils.py +64 -11
- agno/reasoning/anthropic.py +80 -0
- agno/reasoning/gemini.py +73 -0
- agno/reasoning/openai.py +5 -0
- agno/reasoning/vertexai.py +76 -0
- agno/session/workflow.py +3 -3
- agno/team/team.py +968 -179
- agno/tools/googlesheets.py +20 -5
- agno/tools/mcp_toolbox.py +3 -3
- agno/tools/scrapegraph.py +1 -1
- agno/utils/models/claude.py +3 -1
- agno/utils/streamlit.py +1 -1
- agno/vectordb/base.py +22 -1
- agno/vectordb/cassandra/cassandra.py +9 -0
- agno/vectordb/chroma/chromadb.py +26 -6
- agno/vectordb/clickhouse/clickhousedb.py +9 -1
- agno/vectordb/couchbase/couchbase.py +11 -0
- agno/vectordb/lancedb/lance_db.py +20 -0
- agno/vectordb/langchaindb/langchaindb.py +11 -0
- agno/vectordb/lightrag/lightrag.py +9 -0
- agno/vectordb/llamaindex/llamaindexdb.py +15 -1
- agno/vectordb/milvus/milvus.py +23 -0
- agno/vectordb/mongodb/mongodb.py +22 -0
- agno/vectordb/pgvector/pgvector.py +19 -0
- agno/vectordb/pineconedb/pineconedb.py +35 -4
- agno/vectordb/qdrant/qdrant.py +24 -0
- agno/vectordb/singlestore/singlestore.py +25 -17
- agno/vectordb/surrealdb/surrealdb.py +18 -2
- agno/vectordb/upstashdb/upstashdb.py +26 -1
- agno/vectordb/weaviate/weaviate.py +18 -0
- agno/workflow/condition.py +4 -0
- agno/workflow/loop.py +4 -0
- agno/workflow/parallel.py +4 -0
- agno/workflow/router.py +4 -0
- agno/workflow/step.py +30 -14
- agno/workflow/steps.py +4 -0
- agno/workflow/types.py +2 -2
- agno/workflow/workflow.py +328 -61
- {agno-2.1.4.dist-info → agno-2.1.6.dist-info}/METADATA +100 -41
- {agno-2.1.4.dist-info → agno-2.1.6.dist-info}/RECORD +95 -82
- {agno-2.1.4.dist-info → agno-2.1.6.dist-info}/WHEEL +0 -0
- {agno-2.1.4.dist-info → agno-2.1.6.dist-info}/licenses/LICENSE +0 -0
- {agno-2.1.4.dist-info → agno-2.1.6.dist-info}/top_level.txt +0 -0
|
@@ -797,7 +797,7 @@ class SingleStoreDb(BaseDb):
|
|
|
797
797
|
raise e
|
|
798
798
|
|
|
799
799
|
def upsert_sessions(
|
|
800
|
-
self, sessions: List[Session], deserialize: Optional[bool] = True
|
|
800
|
+
self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
801
801
|
) -> List[Union[Session, Dict[str, Any]]]:
|
|
802
802
|
"""
|
|
803
803
|
Bulk upsert multiple sessions for improved performance on large datasets.
|
|
@@ -841,6 +841,8 @@ class SingleStoreDb(BaseDb):
|
|
|
841
841
|
agent_data = []
|
|
842
842
|
for session in agent_sessions:
|
|
843
843
|
session_dict = session.to_dict()
|
|
844
|
+
# Use preserved updated_at if flag is set, otherwise use current time
|
|
845
|
+
updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
|
|
844
846
|
agent_data.append(
|
|
845
847
|
{
|
|
846
848
|
"session_id": session_dict.get("session_id"),
|
|
@@ -853,7 +855,7 @@ class SingleStoreDb(BaseDb):
|
|
|
853
855
|
"summary": session_dict.get("summary"),
|
|
854
856
|
"metadata": session_dict.get("metadata"),
|
|
855
857
|
"created_at": session_dict.get("created_at"),
|
|
856
|
-
"updated_at":
|
|
858
|
+
"updated_at": updated_at,
|
|
857
859
|
}
|
|
858
860
|
)
|
|
859
861
|
|
|
@@ -867,7 +869,7 @@ class SingleStoreDb(BaseDb):
|
|
|
867
869
|
summary=stmt.inserted.summary,
|
|
868
870
|
metadata=stmt.inserted.metadata,
|
|
869
871
|
runs=stmt.inserted.runs,
|
|
870
|
-
updated_at=
|
|
872
|
+
updated_at=stmt.inserted.updated_at,
|
|
871
873
|
)
|
|
872
874
|
sess.execute(stmt, agent_data)
|
|
873
875
|
|
|
@@ -890,6 +892,8 @@ class SingleStoreDb(BaseDb):
|
|
|
890
892
|
team_data = []
|
|
891
893
|
for session in team_sessions:
|
|
892
894
|
session_dict = session.to_dict()
|
|
895
|
+
# Use preserved updated_at if flag is set, otherwise use current time
|
|
896
|
+
updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
|
|
893
897
|
team_data.append(
|
|
894
898
|
{
|
|
895
899
|
"session_id": session_dict.get("session_id"),
|
|
@@ -902,7 +906,7 @@ class SingleStoreDb(BaseDb):
|
|
|
902
906
|
"summary": session_dict.get("summary"),
|
|
903
907
|
"metadata": session_dict.get("metadata"),
|
|
904
908
|
"created_at": session_dict.get("created_at"),
|
|
905
|
-
"updated_at":
|
|
909
|
+
"updated_at": updated_at,
|
|
906
910
|
}
|
|
907
911
|
)
|
|
908
912
|
|
|
@@ -916,7 +920,7 @@ class SingleStoreDb(BaseDb):
|
|
|
916
920
|
summary=stmt.inserted.summary,
|
|
917
921
|
metadata=stmt.inserted.metadata,
|
|
918
922
|
runs=stmt.inserted.runs,
|
|
919
|
-
updated_at=
|
|
923
|
+
updated_at=stmt.inserted.updated_at,
|
|
920
924
|
)
|
|
921
925
|
sess.execute(stmt, team_data)
|
|
922
926
|
|
|
@@ -939,6 +943,8 @@ class SingleStoreDb(BaseDb):
|
|
|
939
943
|
workflow_data = []
|
|
940
944
|
for session in workflow_sessions:
|
|
941
945
|
session_dict = session.to_dict()
|
|
946
|
+
# Use preserved updated_at if flag is set, otherwise use current time
|
|
947
|
+
updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
|
|
942
948
|
workflow_data.append(
|
|
943
949
|
{
|
|
944
950
|
"session_id": session_dict.get("session_id"),
|
|
@@ -951,7 +957,7 @@ class SingleStoreDb(BaseDb):
|
|
|
951
957
|
"summary": session_dict.get("summary"),
|
|
952
958
|
"metadata": session_dict.get("metadata"),
|
|
953
959
|
"created_at": session_dict.get("created_at"),
|
|
954
|
-
"updated_at":
|
|
960
|
+
"updated_at": updated_at,
|
|
955
961
|
}
|
|
956
962
|
)
|
|
957
963
|
|
|
@@ -965,7 +971,7 @@ class SingleStoreDb(BaseDb):
|
|
|
965
971
|
summary=stmt.inserted.summary,
|
|
966
972
|
metadata=stmt.inserted.metadata,
|
|
967
973
|
runs=stmt.inserted.runs,
|
|
968
|
-
updated_at=
|
|
974
|
+
updated_at=stmt.inserted.updated_at,
|
|
969
975
|
)
|
|
970
976
|
sess.execute(stmt, workflow_data)
|
|
971
977
|
|
|
@@ -1336,7 +1342,7 @@ class SingleStoreDb(BaseDb):
|
|
|
1336
1342
|
raise e
|
|
1337
1343
|
|
|
1338
1344
|
def upsert_memories(
|
|
1339
|
-
self, memories: List[UserMemory], deserialize: Optional[bool] = True
|
|
1345
|
+
self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
1340
1346
|
) -> List[Union[UserMemory, Dict[str, Any]]]:
|
|
1341
1347
|
"""
|
|
1342
1348
|
Bulk upsert multiple user memories for improved performance on large datasets.
|
|
@@ -1361,9 +1367,12 @@ class SingleStoreDb(BaseDb):
|
|
|
1361
1367
|
|
|
1362
1368
|
# Prepare data for bulk insert
|
|
1363
1369
|
memory_data = []
|
|
1370
|
+
current_time = int(time.time())
|
|
1364
1371
|
for memory in memories:
|
|
1365
1372
|
if memory.memory_id is None:
|
|
1366
1373
|
memory.memory_id = str(uuid4())
|
|
1374
|
+
# Use preserved updated_at if flag is set, otherwise use current time
|
|
1375
|
+
updated_at = memory.updated_at if preserve_updated_at else current_time
|
|
1367
1376
|
memory_data.append(
|
|
1368
1377
|
{
|
|
1369
1378
|
"memory_id": memory.memory_id,
|
|
@@ -1373,7 +1382,7 @@ class SingleStoreDb(BaseDb):
|
|
|
1373
1382
|
"agent_id": memory.agent_id,
|
|
1374
1383
|
"team_id": memory.team_id,
|
|
1375
1384
|
"topics": memory.topics,
|
|
1376
|
-
"updated_at":
|
|
1385
|
+
"updated_at": updated_at,
|
|
1377
1386
|
}
|
|
1378
1387
|
)
|
|
1379
1388
|
|
|
@@ -1389,7 +1398,7 @@ class SingleStoreDb(BaseDb):
|
|
|
1389
1398
|
user_id=stmt.inserted.user_id,
|
|
1390
1399
|
agent_id=stmt.inserted.agent_id,
|
|
1391
1400
|
team_id=stmt.inserted.team_id,
|
|
1392
|
-
updated_at=
|
|
1401
|
+
updated_at=stmt.inserted.updated_at,
|
|
1393
1402
|
)
|
|
1394
1403
|
sess.execute(stmt, memory_data)
|
|
1395
1404
|
|
agno/db/sqlite/sqlite.py
CHANGED
|
@@ -36,9 +36,9 @@ except ImportError:
|
|
|
36
36
|
class SqliteDb(BaseDb):
|
|
37
37
|
def __init__(
|
|
38
38
|
self,
|
|
39
|
+
db_file: Optional[str] = None,
|
|
39
40
|
db_engine: Optional[Engine] = None,
|
|
40
41
|
db_url: Optional[str] = None,
|
|
41
|
-
db_file: Optional[str] = None,
|
|
42
42
|
session_table: Optional[str] = None,
|
|
43
43
|
memory_table: Optional[str] = None,
|
|
44
44
|
metrics_table: Optional[str] = None,
|
|
@@ -56,9 +56,9 @@ class SqliteDb(BaseDb):
|
|
|
56
56
|
4. Create a new database in the current directory
|
|
57
57
|
|
|
58
58
|
Args:
|
|
59
|
+
db_file (Optional[str]): The database file to connect to.
|
|
59
60
|
db_engine (Optional[Engine]): The SQLAlchemy database engine to use.
|
|
60
61
|
db_url (Optional[str]): The database URL to connect to.
|
|
61
|
-
db_file (Optional[str]): The database file to connect to.
|
|
62
62
|
session_table (Optional[str]): Name of the table to store Agent, Team and Workflow sessions.
|
|
63
63
|
memory_table (Optional[str]): Name of the table to store user memories.
|
|
64
64
|
metrics_table (Optional[str]): Name of the table to store metrics.
|
|
@@ -664,7 +664,7 @@ class SqliteDb(BaseDb):
|
|
|
664
664
|
raise e
|
|
665
665
|
|
|
666
666
|
def upsert_sessions(
|
|
667
|
-
self, sessions: List[Session], deserialize: Optional[bool] = True
|
|
667
|
+
self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
668
668
|
) -> List[Union[Session, Dict[str, Any]]]:
|
|
669
669
|
"""
|
|
670
670
|
Bulk upsert multiple sessions for improved performance on large datasets.
|
|
@@ -672,6 +672,7 @@ class SqliteDb(BaseDb):
|
|
|
672
672
|
Args:
|
|
673
673
|
sessions (List[Session]): List of sessions to upsert.
|
|
674
674
|
deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
|
|
675
|
+
preserve_updated_at (bool): If True, preserve the updated_at from the session object.
|
|
675
676
|
|
|
676
677
|
Returns:
|
|
677
678
|
List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
|
|
@@ -715,6 +716,8 @@ class SqliteDb(BaseDb):
|
|
|
715
716
|
agent_data = []
|
|
716
717
|
for session in agent_sessions:
|
|
717
718
|
serialized_session = serialize_session_json_fields(session.to_dict())
|
|
719
|
+
# Use preserved updated_at if flag is set and value exists, otherwise use current time
|
|
720
|
+
updated_at = serialized_session.get("updated_at") if preserve_updated_at else int(time.time())
|
|
718
721
|
agent_data.append(
|
|
719
722
|
{
|
|
720
723
|
"session_id": serialized_session.get("session_id"),
|
|
@@ -727,7 +730,7 @@ class SqliteDb(BaseDb):
|
|
|
727
730
|
"runs": serialized_session.get("runs"),
|
|
728
731
|
"summary": serialized_session.get("summary"),
|
|
729
732
|
"created_at": serialized_session.get("created_at"),
|
|
730
|
-
"updated_at":
|
|
733
|
+
"updated_at": updated_at,
|
|
731
734
|
}
|
|
732
735
|
)
|
|
733
736
|
|
|
@@ -743,7 +746,7 @@ class SqliteDb(BaseDb):
|
|
|
743
746
|
metadata=stmt.excluded.metadata,
|
|
744
747
|
runs=stmt.excluded.runs,
|
|
745
748
|
summary=stmt.excluded.summary,
|
|
746
|
-
updated_at=
|
|
749
|
+
updated_at=stmt.excluded.updated_at,
|
|
747
750
|
),
|
|
748
751
|
)
|
|
749
752
|
sess.execute(stmt, agent_data)
|
|
@@ -768,6 +771,8 @@ class SqliteDb(BaseDb):
|
|
|
768
771
|
team_data = []
|
|
769
772
|
for session in team_sessions:
|
|
770
773
|
serialized_session = serialize_session_json_fields(session.to_dict())
|
|
774
|
+
# Use preserved updated_at if flag is set and value exists, otherwise use current time
|
|
775
|
+
updated_at = serialized_session.get("updated_at") if preserve_updated_at else int(time.time())
|
|
771
776
|
team_data.append(
|
|
772
777
|
{
|
|
773
778
|
"session_id": serialized_session.get("session_id"),
|
|
@@ -777,7 +782,7 @@ class SqliteDb(BaseDb):
|
|
|
777
782
|
"runs": serialized_session.get("runs"),
|
|
778
783
|
"summary": serialized_session.get("summary"),
|
|
779
784
|
"created_at": serialized_session.get("created_at"),
|
|
780
|
-
"updated_at":
|
|
785
|
+
"updated_at": updated_at,
|
|
781
786
|
"team_data": serialized_session.get("team_data"),
|
|
782
787
|
"session_data": serialized_session.get("session_data"),
|
|
783
788
|
"metadata": serialized_session.get("metadata"),
|
|
@@ -796,7 +801,7 @@ class SqliteDb(BaseDb):
|
|
|
796
801
|
metadata=stmt.excluded.metadata,
|
|
797
802
|
runs=stmt.excluded.runs,
|
|
798
803
|
summary=stmt.excluded.summary,
|
|
799
|
-
updated_at=
|
|
804
|
+
updated_at=stmt.excluded.updated_at,
|
|
800
805
|
),
|
|
801
806
|
)
|
|
802
807
|
sess.execute(stmt, team_data)
|
|
@@ -821,6 +826,8 @@ class SqliteDb(BaseDb):
|
|
|
821
826
|
workflow_data = []
|
|
822
827
|
for session in workflow_sessions:
|
|
823
828
|
serialized_session = serialize_session_json_fields(session.to_dict())
|
|
829
|
+
# Use preserved updated_at if flag is set and value exists, otherwise use current time
|
|
830
|
+
updated_at = serialized_session.get("updated_at") if preserve_updated_at else int(time.time())
|
|
824
831
|
workflow_data.append(
|
|
825
832
|
{
|
|
826
833
|
"session_id": serialized_session.get("session_id"),
|
|
@@ -830,7 +837,7 @@ class SqliteDb(BaseDb):
|
|
|
830
837
|
"runs": serialized_session.get("runs"),
|
|
831
838
|
"summary": serialized_session.get("summary"),
|
|
832
839
|
"created_at": serialized_session.get("created_at"),
|
|
833
|
-
"updated_at":
|
|
840
|
+
"updated_at": updated_at,
|
|
834
841
|
"workflow_data": serialized_session.get("workflow_data"),
|
|
835
842
|
"session_data": serialized_session.get("session_data"),
|
|
836
843
|
"metadata": serialized_session.get("metadata"),
|
|
@@ -849,7 +856,7 @@ class SqliteDb(BaseDb):
|
|
|
849
856
|
metadata=stmt.excluded.metadata,
|
|
850
857
|
runs=stmt.excluded.runs,
|
|
851
858
|
summary=stmt.excluded.summary,
|
|
852
|
-
updated_at=
|
|
859
|
+
updated_at=stmt.excluded.updated_at,
|
|
853
860
|
),
|
|
854
861
|
)
|
|
855
862
|
sess.execute(stmt, workflow_data)
|
|
@@ -1224,7 +1231,7 @@ class SqliteDb(BaseDb):
|
|
|
1224
1231
|
raise e
|
|
1225
1232
|
|
|
1226
1233
|
def upsert_memories(
|
|
1227
|
-
self, memories: List[UserMemory], deserialize: Optional[bool] = True
|
|
1234
|
+
self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
1228
1235
|
) -> List[Union[UserMemory, Dict[str, Any]]]:
|
|
1229
1236
|
"""
|
|
1230
1237
|
Bulk upsert multiple user memories for improved performance on large datasets.
|
|
@@ -1255,10 +1262,13 @@ class SqliteDb(BaseDb):
|
|
|
1255
1262
|
]
|
|
1256
1263
|
# Prepare bulk data
|
|
1257
1264
|
bulk_data = []
|
|
1265
|
+
current_time = int(time.time())
|
|
1258
1266
|
for memory in memories:
|
|
1259
1267
|
if memory.memory_id is None:
|
|
1260
1268
|
memory.memory_id = str(uuid4())
|
|
1261
1269
|
|
|
1270
|
+
# Use preserved updated_at if flag is set and value exists, otherwise use current time
|
|
1271
|
+
updated_at = memory.updated_at if preserve_updated_at else current_time
|
|
1262
1272
|
bulk_data.append(
|
|
1263
1273
|
{
|
|
1264
1274
|
"user_id": memory.user_id,
|
|
@@ -1267,7 +1277,7 @@ class SqliteDb(BaseDb):
|
|
|
1267
1277
|
"memory_id": memory.memory_id,
|
|
1268
1278
|
"memory": memory.memory,
|
|
1269
1279
|
"topics": memory.topics,
|
|
1270
|
-
"updated_at":
|
|
1280
|
+
"updated_at": updated_at,
|
|
1271
1281
|
}
|
|
1272
1282
|
)
|
|
1273
1283
|
|
|
@@ -1284,7 +1294,7 @@ class SqliteDb(BaseDb):
|
|
|
1284
1294
|
input=stmt.excluded.input,
|
|
1285
1295
|
agent_id=stmt.excluded.agent_id,
|
|
1286
1296
|
team_id=stmt.excluded.team_id,
|
|
1287
|
-
updated_at=
|
|
1297
|
+
updated_at=stmt.excluded.updated_at,
|
|
1288
1298
|
),
|
|
1289
1299
|
)
|
|
1290
1300
|
sess.execute(stmt, bulk_data)
|
agno/db/sqlite/utils.py
CHANGED
|
@@ -179,9 +179,11 @@ def calculate_date_metrics(date_to_process: date, sessions_data: dict) -> dict:
|
|
|
179
179
|
for session in sessions:
|
|
180
180
|
if session.get("user_id"):
|
|
181
181
|
all_user_ids.add(session["user_id"])
|
|
182
|
-
|
|
182
|
+
|
|
183
|
+
# Parse runs from JSON string
|
|
183
184
|
if runs := session.get("runs", []):
|
|
184
|
-
runs = json.loads(runs)
|
|
185
|
+
runs = json.loads(runs) if isinstance(runs, str) else runs
|
|
186
|
+
metrics[runs_count_key] += len(runs)
|
|
185
187
|
for run in runs:
|
|
186
188
|
if model_id := run.get("model"):
|
|
187
189
|
model_provider = run.get("model_provider", "")
|
|
@@ -189,7 +191,10 @@ def calculate_date_metrics(date_to_process: date, sessions_data: dict) -> dict:
|
|
|
189
191
|
model_counts.get(f"{model_id}:{model_provider}", 0) + 1
|
|
190
192
|
)
|
|
191
193
|
|
|
192
|
-
|
|
194
|
+
# Parse session_data from JSON string
|
|
195
|
+
session_data = session.get("session_data", {})
|
|
196
|
+
if isinstance(session_data, str):
|
|
197
|
+
session_data = json.loads(session_data)
|
|
193
198
|
session_metrics = session_data.get("session_metrics", {})
|
|
194
199
|
for field in token_metrics:
|
|
195
200
|
token_metrics[field] += session_metrics.get(field, 0)
|
|
@@ -0,0 +1,292 @@
|
|
|
1
|
+
from datetime import date, datetime, timedelta, timezone
|
|
2
|
+
from textwrap import dedent
|
|
3
|
+
from typing import Any, Callable, Dict, List, Optional, Union
|
|
4
|
+
|
|
5
|
+
from surrealdb import BlockingHttpSurrealConnection, BlockingWsSurrealConnection, RecordID
|
|
6
|
+
|
|
7
|
+
from agno.db.base import SessionType
|
|
8
|
+
from agno.db.surrealdb import utils
|
|
9
|
+
from agno.db.surrealdb.models import desurrealize_session, surrealize_dates
|
|
10
|
+
from agno.db.surrealdb.queries import WhereClause
|
|
11
|
+
from agno.utils.log import log_error
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def get_all_sessions_for_metrics_calculation(
|
|
15
|
+
client: Union[BlockingWsSurrealConnection, BlockingHttpSurrealConnection],
|
|
16
|
+
table: str,
|
|
17
|
+
start_timestamp: Optional[datetime] = None,
|
|
18
|
+
end_timestamp: Optional[datetime] = None,
|
|
19
|
+
) -> List[Dict[str, Any]]:
|
|
20
|
+
"""
|
|
21
|
+
Get all sessions of all types (agent, team, workflow) as raw dictionaries.
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
start_timestamp (Optional[int]): The start timestamp to filter by. Defaults to None.
|
|
25
|
+
end_timestamp (Optional[int]): The end timestamp to filter by. Defaults to None.
|
|
26
|
+
|
|
27
|
+
Returns:
|
|
28
|
+
List[Dict[str, Any]]: List of session dictionaries with session_type field.
|
|
29
|
+
|
|
30
|
+
Raises:
|
|
31
|
+
Exception: If an error occurs during retrieval.
|
|
32
|
+
"""
|
|
33
|
+
where = WhereClause()
|
|
34
|
+
|
|
35
|
+
# starting_date
|
|
36
|
+
if start_timestamp is not None:
|
|
37
|
+
where = where.and_("created_at", start_timestamp, ">=")
|
|
38
|
+
|
|
39
|
+
# ending_date
|
|
40
|
+
if end_timestamp is not None:
|
|
41
|
+
where = where.and_("created_at", end_timestamp, "<=")
|
|
42
|
+
|
|
43
|
+
where_clause, where_vars = where.build()
|
|
44
|
+
|
|
45
|
+
# Query
|
|
46
|
+
query = dedent(f"""
|
|
47
|
+
SELECT *
|
|
48
|
+
FROM {table}
|
|
49
|
+
{where_clause}
|
|
50
|
+
""")
|
|
51
|
+
|
|
52
|
+
results = utils.query(client, query, where_vars, dict)
|
|
53
|
+
return [desurrealize_session(x) for x in results]
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def get_metrics_calculation_starting_date(
|
|
57
|
+
client: Union[BlockingWsSurrealConnection, BlockingHttpSurrealConnection], table: str, get_sessions: Callable
|
|
58
|
+
) -> Optional[date]:
|
|
59
|
+
"""Get the first date for which metrics calculation is needed:
|
|
60
|
+
|
|
61
|
+
1. If there are metrics records, return the date of the first day without a complete metrics record.
|
|
62
|
+
2. If there are no metrics records, return the date of the first recorded session.
|
|
63
|
+
3. If there are no metrics records and no sessions records, return None.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
table (Table): The table to get the starting date for.
|
|
67
|
+
|
|
68
|
+
Returns:
|
|
69
|
+
Optional[date]: The starting date for which metrics calculation is needed.
|
|
70
|
+
"""
|
|
71
|
+
query = dedent(f"""
|
|
72
|
+
SELECT * FROM ONLY {table}
|
|
73
|
+
ORDER BY date DESC
|
|
74
|
+
LIMIT 1
|
|
75
|
+
""")
|
|
76
|
+
result = utils.query_one(client, query, {}, dict)
|
|
77
|
+
if result:
|
|
78
|
+
# 1. Return the date of the first day without a complete metrics record
|
|
79
|
+
result_date = result["date"]
|
|
80
|
+
assert isinstance(result_date, datetime)
|
|
81
|
+
result_date = result_date.date()
|
|
82
|
+
|
|
83
|
+
if result.get("completed"):
|
|
84
|
+
return result_date + timedelta(days=1)
|
|
85
|
+
else:
|
|
86
|
+
return result_date
|
|
87
|
+
|
|
88
|
+
# 2. No metrics records. Return the date of the first recorded session
|
|
89
|
+
first_session, _ = get_sessions(
|
|
90
|
+
session_type=SessionType.AGENT, # this is ignored because of component_id=None and deserialize=False
|
|
91
|
+
sort_by="created_at",
|
|
92
|
+
sort_order="asc",
|
|
93
|
+
limit=1,
|
|
94
|
+
component_id=None,
|
|
95
|
+
deserialize=False,
|
|
96
|
+
)
|
|
97
|
+
assert isinstance(first_session, list)
|
|
98
|
+
|
|
99
|
+
first_session_date = first_session[0]["created_at"] if first_session else None
|
|
100
|
+
|
|
101
|
+
# 3. No metrics records and no sessions records. Return None
|
|
102
|
+
if first_session_date is None:
|
|
103
|
+
return None
|
|
104
|
+
|
|
105
|
+
# Handle different types for created_at
|
|
106
|
+
if isinstance(first_session_date, datetime):
|
|
107
|
+
return first_session_date.date()
|
|
108
|
+
elif isinstance(first_session_date, int):
|
|
109
|
+
# Assume it's a Unix timestamp
|
|
110
|
+
return datetime.fromtimestamp(first_session_date, tz=timezone.utc).date()
|
|
111
|
+
elif isinstance(first_session_date, str):
|
|
112
|
+
# Try parsing as ISO format
|
|
113
|
+
return datetime.fromisoformat(first_session_date.replace("Z", "+00:00")).date()
|
|
114
|
+
else:
|
|
115
|
+
# If it's already a date object
|
|
116
|
+
if isinstance(first_session_date, date):
|
|
117
|
+
return first_session_date
|
|
118
|
+
raise ValueError(f"Unexpected type for created_at: {type(first_session_date)}")
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def bulk_upsert_metrics(
|
|
122
|
+
client: Union[BlockingWsSurrealConnection, BlockingHttpSurrealConnection],
|
|
123
|
+
table: str,
|
|
124
|
+
metrics_records: List[Dict[str, Any]],
|
|
125
|
+
) -> List[Dict[str, Any]]:
|
|
126
|
+
"""Bulk upsert metrics into the database.
|
|
127
|
+
|
|
128
|
+
Args:
|
|
129
|
+
table (Table): The table to upsert into.
|
|
130
|
+
metrics_records (List[Dict[str, Any]]): The list of metrics records to upsert.
|
|
131
|
+
|
|
132
|
+
Returns:
|
|
133
|
+
list[dict]: The upserted metrics records.
|
|
134
|
+
"""
|
|
135
|
+
if not metrics_records:
|
|
136
|
+
return []
|
|
137
|
+
|
|
138
|
+
metrics_records = [surrealize_dates(x) for x in metrics_records]
|
|
139
|
+
|
|
140
|
+
try:
|
|
141
|
+
results = []
|
|
142
|
+
from agno.utils.log import log_debug
|
|
143
|
+
|
|
144
|
+
for metric in metrics_records:
|
|
145
|
+
log_debug(f"Upserting metric: {metric}") # Add this
|
|
146
|
+
result = utils.query_one(
|
|
147
|
+
client,
|
|
148
|
+
"UPSERT $record CONTENT $content",
|
|
149
|
+
{"record": RecordID(table, metric["id"]), "content": metric},
|
|
150
|
+
dict,
|
|
151
|
+
)
|
|
152
|
+
if result:
|
|
153
|
+
results.append(result)
|
|
154
|
+
return results
|
|
155
|
+
|
|
156
|
+
except Exception as e:
|
|
157
|
+
import traceback
|
|
158
|
+
|
|
159
|
+
log_error(traceback.format_exc())
|
|
160
|
+
log_error(f"Error upserting metrics: {e}")
|
|
161
|
+
|
|
162
|
+
return []
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
def fetch_all_sessions_data(
|
|
166
|
+
sessions: List[Dict[str, Any]], dates_to_process: list[date], start_timestamp: int
|
|
167
|
+
) -> Optional[dict]:
|
|
168
|
+
"""Return all session data for the given dates, for all session types.
|
|
169
|
+
|
|
170
|
+
Args:
|
|
171
|
+
sessions (List[Dict[str, Any]]): The sessions to process.
|
|
172
|
+
dates_to_process (list[date]): The dates to fetch session data for.
|
|
173
|
+
start_timestamp (int): The start timestamp (fallback if created_at is missing).
|
|
174
|
+
|
|
175
|
+
Returns:
|
|
176
|
+
dict: A dictionary with dates as keys and session data as values, for all session types.
|
|
177
|
+
|
|
178
|
+
Example:
|
|
179
|
+
{
|
|
180
|
+
"2000-01-01": {
|
|
181
|
+
"agent": [<session1>, <session2>, ...],
|
|
182
|
+
"team": [...],
|
|
183
|
+
"workflow": [...],
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
"""
|
|
187
|
+
if not dates_to_process:
|
|
188
|
+
return None
|
|
189
|
+
|
|
190
|
+
all_sessions_data: Dict[str, Dict[str, List[Dict[str, Any]]]] = {
|
|
191
|
+
date_to_process.isoformat(): {"agent": [], "team": [], "workflow": []} for date_to_process in dates_to_process
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
for session in sessions:
|
|
195
|
+
created_at = session.get("created_at", start_timestamp)
|
|
196
|
+
|
|
197
|
+
# Handle different types for created_at
|
|
198
|
+
if isinstance(created_at, datetime):
|
|
199
|
+
session_date = created_at.date().isoformat()
|
|
200
|
+
elif isinstance(created_at, int):
|
|
201
|
+
session_date = datetime.fromtimestamp(created_at, tz=timezone.utc).date().isoformat()
|
|
202
|
+
elif isinstance(created_at, date):
|
|
203
|
+
session_date = created_at.isoformat()
|
|
204
|
+
else:
|
|
205
|
+
# Fallback to start_timestamp if type is unexpected
|
|
206
|
+
session_date = datetime.fromtimestamp(start_timestamp, tz=timezone.utc).date().isoformat()
|
|
207
|
+
|
|
208
|
+
if session_date in all_sessions_data:
|
|
209
|
+
session_type = session.get("session_type", "agent") # Default to agent if missing
|
|
210
|
+
all_sessions_data[session_date][session_type].append(session)
|
|
211
|
+
|
|
212
|
+
return all_sessions_data
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
def calculate_date_metrics(date_to_process: date, sessions_data: dict) -> dict:
|
|
216
|
+
"""Calculate metrics for the given single date.
|
|
217
|
+
|
|
218
|
+
Args:
|
|
219
|
+
date_to_process (date): The date to calculate metrics for.
|
|
220
|
+
sessions_data (dict): The sessions data to calculate metrics for.
|
|
221
|
+
|
|
222
|
+
Returns:
|
|
223
|
+
dict: The calculated metrics.
|
|
224
|
+
"""
|
|
225
|
+
metrics = {
|
|
226
|
+
"users_count": 0,
|
|
227
|
+
"agent_sessions_count": 0,
|
|
228
|
+
"team_sessions_count": 0,
|
|
229
|
+
"workflow_sessions_count": 0,
|
|
230
|
+
"agent_runs_count": 0,
|
|
231
|
+
"team_runs_count": 0,
|
|
232
|
+
"workflow_runs_count": 0,
|
|
233
|
+
}
|
|
234
|
+
token_metrics = {
|
|
235
|
+
"input_tokens": 0,
|
|
236
|
+
"output_tokens": 0,
|
|
237
|
+
"total_tokens": 0,
|
|
238
|
+
"audio_total_tokens": 0,
|
|
239
|
+
"audio_input_tokens": 0,
|
|
240
|
+
"audio_output_tokens": 0,
|
|
241
|
+
"cache_read_tokens": 0,
|
|
242
|
+
"cache_write_tokens": 0,
|
|
243
|
+
"reasoning_tokens": 0,
|
|
244
|
+
}
|
|
245
|
+
model_counts: Dict[str, int] = {}
|
|
246
|
+
|
|
247
|
+
session_types = [
|
|
248
|
+
("agent", "agent_sessions_count", "agent_runs_count"),
|
|
249
|
+
("team", "team_sessions_count", "team_runs_count"),
|
|
250
|
+
("workflow", "workflow_sessions_count", "workflow_runs_count"),
|
|
251
|
+
]
|
|
252
|
+
all_user_ids = set()
|
|
253
|
+
|
|
254
|
+
for session_type, sessions_count_key, runs_count_key in session_types:
|
|
255
|
+
sessions = sessions_data.get(session_type, [])
|
|
256
|
+
metrics[sessions_count_key] = len(sessions)
|
|
257
|
+
|
|
258
|
+
for session in sessions:
|
|
259
|
+
if session.get("user_id"):
|
|
260
|
+
all_user_ids.add(session["user_id"])
|
|
261
|
+
metrics[runs_count_key] += len(session.get("runs", []))
|
|
262
|
+
if runs := session.get("runs", []):
|
|
263
|
+
for run in runs:
|
|
264
|
+
if model_id := run.get("model"):
|
|
265
|
+
model_provider = run.get("model_provider", "")
|
|
266
|
+
model_counts[f"{model_id}:{model_provider}"] = (
|
|
267
|
+
model_counts.get(f"{model_id}:{model_provider}", 0) + 1
|
|
268
|
+
)
|
|
269
|
+
|
|
270
|
+
session_metrics = session.get("session_data", {}).get("session_metrics", {})
|
|
271
|
+
for field in token_metrics:
|
|
272
|
+
token_metrics[field] += session_metrics.get(field, 0)
|
|
273
|
+
|
|
274
|
+
model_metrics = []
|
|
275
|
+
for model, count in model_counts.items():
|
|
276
|
+
model_id, model_provider = model.split(":")
|
|
277
|
+
model_metrics.append({"model_id": model_id, "model_provider": model_provider, "count": count})
|
|
278
|
+
|
|
279
|
+
metrics["users_count"] = len(all_user_ids)
|
|
280
|
+
current_time = datetime.now(timezone.utc)
|
|
281
|
+
|
|
282
|
+
return {
|
|
283
|
+
"id": date_to_process.isoformat(), # Changed: Use date as ID (e.g., "2025-10-16")
|
|
284
|
+
"date": current_time.replace(hour=0, minute=0, second=0, microsecond=0), # Date at midnight UTC
|
|
285
|
+
"completed": date_to_process < datetime.now(timezone.utc).date(),
|
|
286
|
+
"token_metrics": token_metrics,
|
|
287
|
+
"model_metrics": model_metrics,
|
|
288
|
+
"created_at": current_time,
|
|
289
|
+
"updated_at": current_time,
|
|
290
|
+
"aggregation_period": "daily",
|
|
291
|
+
**metrics,
|
|
292
|
+
}
|