agno 2.3.4__py3-none-any.whl → 2.3.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (112) hide show
  1. agno/agent/agent.py +177 -41
  2. agno/culture/manager.py +2 -2
  3. agno/db/base.py +330 -8
  4. agno/db/dynamo/dynamo.py +722 -2
  5. agno/db/dynamo/schemas.py +127 -0
  6. agno/db/firestore/firestore.py +573 -1
  7. agno/db/firestore/schemas.py +40 -0
  8. agno/db/gcs_json/gcs_json_db.py +446 -1
  9. agno/db/in_memory/in_memory_db.py +143 -1
  10. agno/db/json/json_db.py +438 -1
  11. agno/db/mongo/async_mongo.py +522 -0
  12. agno/db/mongo/mongo.py +523 -1
  13. agno/db/mongo/schemas.py +29 -0
  14. agno/db/mysql/mysql.py +536 -3
  15. agno/db/mysql/schemas.py +38 -0
  16. agno/db/postgres/async_postgres.py +541 -13
  17. agno/db/postgres/postgres.py +535 -2
  18. agno/db/postgres/schemas.py +38 -0
  19. agno/db/redis/redis.py +468 -1
  20. agno/db/redis/schemas.py +32 -0
  21. agno/db/singlestore/schemas.py +38 -0
  22. agno/db/singlestore/singlestore.py +523 -1
  23. agno/db/sqlite/async_sqlite.py +548 -9
  24. agno/db/sqlite/schemas.py +38 -0
  25. agno/db/sqlite/sqlite.py +537 -5
  26. agno/db/sqlite/utils.py +6 -8
  27. agno/db/surrealdb/models.py +25 -0
  28. agno/db/surrealdb/surrealdb.py +548 -1
  29. agno/eval/accuracy.py +10 -4
  30. agno/eval/performance.py +10 -4
  31. agno/eval/reliability.py +22 -13
  32. agno/exceptions.py +11 -0
  33. agno/hooks/__init__.py +3 -0
  34. agno/hooks/decorator.py +164 -0
  35. agno/knowledge/chunking/semantic.py +2 -2
  36. agno/models/aimlapi/aimlapi.py +2 -3
  37. agno/models/anthropic/claude.py +18 -13
  38. agno/models/aws/bedrock.py +3 -4
  39. agno/models/aws/claude.py +5 -1
  40. agno/models/azure/ai_foundry.py +2 -2
  41. agno/models/azure/openai_chat.py +8 -0
  42. agno/models/cerebras/cerebras.py +63 -11
  43. agno/models/cerebras/cerebras_openai.py +2 -3
  44. agno/models/cohere/chat.py +1 -5
  45. agno/models/cometapi/cometapi.py +2 -3
  46. agno/models/dashscope/dashscope.py +2 -3
  47. agno/models/deepinfra/deepinfra.py +2 -3
  48. agno/models/deepseek/deepseek.py +2 -3
  49. agno/models/fireworks/fireworks.py +2 -3
  50. agno/models/google/gemini.py +9 -7
  51. agno/models/groq/groq.py +2 -3
  52. agno/models/huggingface/huggingface.py +1 -5
  53. agno/models/ibm/watsonx.py +1 -5
  54. agno/models/internlm/internlm.py +2 -3
  55. agno/models/langdb/langdb.py +6 -4
  56. agno/models/litellm/chat.py +2 -2
  57. agno/models/litellm/litellm_openai.py +2 -3
  58. agno/models/meta/llama.py +1 -5
  59. agno/models/meta/llama_openai.py +4 -5
  60. agno/models/mistral/mistral.py +1 -5
  61. agno/models/nebius/nebius.py +2 -3
  62. agno/models/nvidia/nvidia.py +4 -5
  63. agno/models/openai/chat.py +14 -3
  64. agno/models/openai/responses.py +14 -3
  65. agno/models/openrouter/openrouter.py +4 -5
  66. agno/models/perplexity/perplexity.py +2 -3
  67. agno/models/portkey/portkey.py +7 -6
  68. agno/models/requesty/requesty.py +4 -5
  69. agno/models/response.py +2 -1
  70. agno/models/sambanova/sambanova.py +4 -5
  71. agno/models/siliconflow/siliconflow.py +3 -4
  72. agno/models/together/together.py +4 -5
  73. agno/models/vercel/v0.py +4 -5
  74. agno/models/vllm/vllm.py +19 -14
  75. agno/models/xai/xai.py +4 -5
  76. agno/os/app.py +104 -0
  77. agno/os/config.py +13 -0
  78. agno/os/interfaces/whatsapp/router.py +0 -1
  79. agno/os/mcp.py +1 -0
  80. agno/os/router.py +31 -0
  81. agno/os/routers/traces/__init__.py +3 -0
  82. agno/os/routers/traces/schemas.py +414 -0
  83. agno/os/routers/traces/traces.py +499 -0
  84. agno/os/schema.py +10 -1
  85. agno/os/utils.py +57 -0
  86. agno/run/agent.py +1 -0
  87. agno/run/base.py +17 -0
  88. agno/run/team.py +4 -0
  89. agno/session/team.py +1 -0
  90. agno/table.py +10 -0
  91. agno/team/team.py +214 -65
  92. agno/tools/function.py +10 -8
  93. agno/tools/nano_banana.py +1 -1
  94. agno/tracing/__init__.py +12 -0
  95. agno/tracing/exporter.py +157 -0
  96. agno/tracing/schemas.py +276 -0
  97. agno/tracing/setup.py +111 -0
  98. agno/utils/agent.py +4 -4
  99. agno/utils/hooks.py +56 -1
  100. agno/vectordb/qdrant/qdrant.py +22 -22
  101. agno/workflow/condition.py +8 -0
  102. agno/workflow/loop.py +8 -0
  103. agno/workflow/parallel.py +8 -0
  104. agno/workflow/router.py +8 -0
  105. agno/workflow/step.py +20 -0
  106. agno/workflow/steps.py +8 -0
  107. agno/workflow/workflow.py +83 -17
  108. {agno-2.3.4.dist-info → agno-2.3.5.dist-info}/METADATA +2 -2
  109. {agno-2.3.4.dist-info → agno-2.3.5.dist-info}/RECORD +112 -102
  110. {agno-2.3.4.dist-info → agno-2.3.5.dist-info}/WHEEL +0 -0
  111. {agno-2.3.4.dist-info → agno-2.3.5.dist-info}/licenses/LICENSE +0 -0
  112. {agno-2.3.4.dist-info → agno-2.3.5.dist-info}/top_level.txt +0 -0
agno/db/sqlite/utils.py CHANGED
@@ -87,15 +87,15 @@ async def ais_table_available(session: AsyncSession, table_name: str, db_schema:
87
87
  return False
88
88
 
89
89
 
90
- def is_valid_table(db_engine: Engine, table_name: str, table_type: str, db_schema: Optional[str] = None) -> bool:
90
+ def is_valid_table(db_engine: Engine, table_name: str, table_type: str) -> bool:
91
91
  """
92
92
  Check if the existing table has the expected column names.
93
- Note: db_schema parameter is ignored in SQLite but kept for API compatibility.
93
+
94
94
  Args:
95
95
  db_engine (Engine): Database engine
96
96
  table_name (str): Name of the table to validate
97
97
  table_type (str): Type of table to get expected schema
98
- db_schema (Optional[str]): Database schema name (ignored in SQLite)
98
+
99
99
  Returns:
100
100
  bool: True if table has all expected columns, False otherwise
101
101
  """
@@ -120,17 +120,15 @@ def is_valid_table(db_engine: Engine, table_name: str, table_type: str, db_schem
120
120
  return False
121
121
 
122
122
 
123
- async def ais_valid_table(
124
- db_engine: AsyncEngine, table_name: str, table_type: str, db_schema: Optional[str] = None
125
- ) -> bool:
123
+ async def ais_valid_table(db_engine: AsyncEngine, table_name: str, table_type: str) -> bool:
126
124
  """
127
125
  Check if the existing table has the expected column names.
128
- Note: db_schema parameter is ignored in SQLite but kept for API compatibility.
126
+
129
127
  Args:
130
128
  db_engine (Engine): Database engine
131
129
  table_name (str): Name of the table to validate
132
130
  table_type (str): Type of table to get expected schema
133
- db_schema (Optional[str]): Database schema name (ignored in SQLite)
131
+
134
132
  Returns:
135
133
  bool: True if table has all expected columns, False otherwise
136
134
  """
@@ -23,7 +23,9 @@ TableType = Literal[
23
23
  "memories",
24
24
  "metrics",
25
25
  "sessions",
26
+ "spans",
26
27
  "teams",
28
+ "traces",
27
29
  "users",
28
30
  "workflows",
29
31
  ]
@@ -305,5 +307,28 @@ def get_schema(table_type: TableType, table_name: str) -> str:
305
307
  DEFINE FIELD OVERWRITE created_at ON {table_name} TYPE datetime VALUE time::now();
306
308
  DEFINE FIELD OVERWRITE updated_at ON {table_name} TYPE datetime VALUE time::now();
307
309
  """)
310
+ elif table_type == "traces":
311
+ return dedent(f"""
312
+ {define_table}
313
+ DEFINE FIELD OVERWRITE created_at ON {table_name} TYPE datetime VALUE time::now();
314
+ DEFINE INDEX idx_trace_id ON {table_name} FIELDS trace_id UNIQUE;
315
+ DEFINE INDEX idx_run_id ON {table_name} FIELDS run_id;
316
+ DEFINE INDEX idx_session_id ON {table_name} FIELDS session_id;
317
+ DEFINE INDEX idx_user_id ON {table_name} FIELDS user_id;
318
+ DEFINE INDEX idx_agent_id ON {table_name} FIELDS agent_id;
319
+ DEFINE INDEX idx_team_id ON {table_name} FIELDS team_id;
320
+ DEFINE INDEX idx_workflow_id ON {table_name} FIELDS workflow_id;
321
+ DEFINE INDEX idx_status ON {table_name} FIELDS status;
322
+ DEFINE INDEX idx_start_time ON {table_name} FIELDS start_time;
323
+ """)
324
+ elif table_type == "spans":
325
+ return dedent(f"""
326
+ {define_table}
327
+ DEFINE FIELD OVERWRITE created_at ON {table_name} TYPE datetime VALUE time::now();
328
+ DEFINE INDEX idx_span_id ON {table_name} FIELDS span_id UNIQUE;
329
+ DEFINE INDEX idx_trace_id ON {table_name} FIELDS trace_id;
330
+ DEFINE INDEX idx_parent_span_id ON {table_name} FIELDS parent_span_id;
331
+ DEFINE INDEX idx_start_time ON {table_name} FIELDS start_time;
332
+ """)
308
333
  else:
309
334
  return define_table
@@ -1,6 +1,9 @@
1
1
  from datetime import date, datetime, timedelta, timezone
2
2
  from textwrap import dedent
3
- from typing import Any, Dict, List, Optional, Sequence, Tuple, Union
3
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple, Union
4
+
5
+ if TYPE_CHECKING:
6
+ from agno.tracing.schemas import Span, Trace
4
7
 
5
8
  from agno.db.base import BaseDb, SessionType
6
9
  from agno.db.postgres.utils import (
@@ -64,6 +67,8 @@ class SurrealDb(BaseDb):
64
67
  eval_table: Optional[str] = None,
65
68
  knowledge_table: Optional[str] = None,
66
69
  culture_table: Optional[str] = None,
70
+ traces_table: Optional[str] = None,
71
+ spans_table: Optional[str] = None,
67
72
  id: Optional[str] = None,
68
73
  ):
69
74
  """
@@ -71,6 +76,19 @@ class SurrealDb(BaseDb):
71
76
 
72
77
  Args:
73
78
  client: A blocking connection, either HTTP or WS
79
+ db_url: The URL of the SurrealDB database.
80
+ db_creds: The credentials for the SurrealDB database.
81
+ db_ns: The namespace for the SurrealDB database.
82
+ db_db: The database name for the SurrealDB database.
83
+ session_table: The name of the session table.
84
+ memory_table: The name of the memory table.
85
+ metrics_table: The name of the metrics table.
86
+ eval_table: The name of the eval table.
87
+ knowledge_table: The name of the knowledge table.
88
+ culture_table: The name of the culture table.
89
+ traces_table: The name of the traces table.
90
+ spans_table: The name of the spans table.
91
+ id: The ID of the database.
74
92
  """
75
93
  if id is None:
76
94
  base_seed = db_url
@@ -85,6 +103,8 @@ class SurrealDb(BaseDb):
85
103
  eval_table=eval_table,
86
104
  knowledge_table=knowledge_table,
87
105
  culture_table=culture_table,
106
+ traces_table=traces_table,
107
+ spans_table=spans_table,
88
108
  )
89
109
  self._client = client
90
110
  self._db_url = db_url
@@ -111,7 +131,9 @@ class SurrealDb(BaseDb):
111
131
  "knowledge": self.knowledge_table_name,
112
132
  "memories": self.memory_table_name,
113
133
  "sessions": self.session_table_name,
134
+ "spans": self.span_table_name,
114
135
  "teams": self._teams_table_name,
136
+ "traces": self.trace_table_name,
115
137
  "users": self._users_table_name,
116
138
  "workflows": self._workflows_table_name,
117
139
  }
@@ -159,6 +181,13 @@ class SurrealDb(BaseDb):
159
181
  table_name = self.eval_table_name
160
182
  elif table_type == "metrics":
161
183
  table_name = self.metrics_table_name
184
+ elif table_type == "traces":
185
+ table_name = self.trace_table_name
186
+ elif table_type == "spans":
187
+ # Ensure traces table exists before spans (for foreign key-like relationship)
188
+ if create_table_if_not_found:
189
+ self._get_table("traces", create_table_if_not_found=True)
190
+ table_name = self.span_table_name
162
191
  else:
163
192
  raise NotImplementedError(f"Unknown table type: {table_type}")
164
193
 
@@ -1359,3 +1388,521 @@ class SurrealDb(BaseDb):
1359
1388
  if not raw or not deserialize:
1360
1389
  return raw
1361
1390
  return deserialize_eval_run_record(raw)
1391
+
1392
+ # --- Traces ---
1393
+ def create_trace(self, trace: "Trace") -> None:
1394
+ """Create a single trace record in the database.
1395
+
1396
+ Args:
1397
+ trace: The Trace object to store (one per trace_id).
1398
+ """
1399
+ try:
1400
+ table = self._get_table("traces", create_table_if_not_found=True)
1401
+ record = RecordID(table, trace.trace_id)
1402
+
1403
+ # Check if trace exists
1404
+ existing = self._query_one("SELECT * FROM ONLY $record", {"record": record}, dict)
1405
+
1406
+ if existing:
1407
+ # workflow (level 3) > team (level 2) > agent (level 1) > child/unknown (level 0)
1408
+ def get_component_level(workflow_id: Any, team_id: Any, agent_id: Any, name: str) -> int:
1409
+ is_root_name = ".run" in name or ".arun" in name
1410
+ if not is_root_name:
1411
+ return 0
1412
+ elif workflow_id:
1413
+ return 3
1414
+ elif team_id:
1415
+ return 2
1416
+ elif agent_id:
1417
+ return 1
1418
+ else:
1419
+ return 0
1420
+
1421
+ existing_level = get_component_level(
1422
+ existing.get("workflow_id"),
1423
+ existing.get("team_id"),
1424
+ existing.get("agent_id"),
1425
+ existing.get("name", ""),
1426
+ )
1427
+ new_level = get_component_level(trace.workflow_id, trace.team_id, trace.agent_id, trace.name)
1428
+ should_update_name = new_level > existing_level
1429
+
1430
+ # Parse existing start_time to calculate correct duration
1431
+ existing_start_time = existing.get("start_time")
1432
+ if isinstance(existing_start_time, datetime):
1433
+ recalculated_duration_ms = int((trace.end_time - existing_start_time).total_seconds() * 1000)
1434
+ else:
1435
+ recalculated_duration_ms = trace.duration_ms
1436
+
1437
+ # Build update query
1438
+ update_fields = [
1439
+ "end_time = $end_time",
1440
+ "duration_ms = $duration_ms",
1441
+ "status = $status",
1442
+ ]
1443
+ update_vars: Dict[str, Any] = {
1444
+ "record": record,
1445
+ "end_time": trace.end_time,
1446
+ "duration_ms": recalculated_duration_ms,
1447
+ "status": trace.status,
1448
+ }
1449
+
1450
+ if should_update_name:
1451
+ update_fields.append("name = $name")
1452
+ update_vars["name"] = trace.name
1453
+
1454
+ # Update context fields only if new value is not None
1455
+ if trace.run_id is not None:
1456
+ update_fields.append("run_id = $run_id")
1457
+ update_vars["run_id"] = trace.run_id
1458
+ if trace.session_id is not None:
1459
+ update_fields.append("session_id = $session_id")
1460
+ update_vars["session_id"] = trace.session_id
1461
+ if trace.user_id is not None:
1462
+ update_fields.append("user_id = $user_id")
1463
+ update_vars["user_id"] = trace.user_id
1464
+ if trace.agent_id is not None:
1465
+ update_fields.append("agent_id = $agent_id")
1466
+ update_vars["agent_id"] = trace.agent_id
1467
+ if trace.team_id is not None:
1468
+ update_fields.append("team_id = $team_id")
1469
+ update_vars["team_id"] = trace.team_id
1470
+ if trace.workflow_id is not None:
1471
+ update_fields.append("workflow_id = $workflow_id")
1472
+ update_vars["workflow_id"] = trace.workflow_id
1473
+
1474
+ update_query = f"UPDATE ONLY $record SET {', '.join(update_fields)}"
1475
+ self._query_one(update_query, update_vars, dict)
1476
+ else:
1477
+ # Create new trace
1478
+ trace_dict = trace.to_dict()
1479
+ trace_dict.pop("total_spans", None)
1480
+ trace_dict.pop("error_count", None)
1481
+
1482
+ # Convert datetime fields
1483
+ if isinstance(trace_dict.get("start_time"), str):
1484
+ trace_dict["start_time"] = datetime.fromisoformat(trace_dict["start_time"].replace("Z", "+00:00"))
1485
+ if isinstance(trace_dict.get("end_time"), str):
1486
+ trace_dict["end_time"] = datetime.fromisoformat(trace_dict["end_time"].replace("Z", "+00:00"))
1487
+ if isinstance(trace_dict.get("created_at"), str):
1488
+ trace_dict["created_at"] = datetime.fromisoformat(trace_dict["created_at"].replace("Z", "+00:00"))
1489
+
1490
+ self._query_one(
1491
+ "CREATE ONLY $record CONTENT $content",
1492
+ {"record": record, "content": trace_dict},
1493
+ dict,
1494
+ )
1495
+
1496
+ except Exception as e:
1497
+ log_error(f"Error creating trace: {e}")
1498
+
1499
+ def get_trace(
1500
+ self,
1501
+ trace_id: Optional[str] = None,
1502
+ run_id: Optional[str] = None,
1503
+ ):
1504
+ """Get a single trace by trace_id or other filters.
1505
+
1506
+ Args:
1507
+ trace_id: The unique trace identifier.
1508
+ run_id: Filter by run ID (returns first match).
1509
+
1510
+ Returns:
1511
+ Optional[Trace]: The trace if found, None otherwise.
1512
+
1513
+ Note:
1514
+ If multiple filters are provided, trace_id takes precedence.
1515
+ For other filters, the most recent trace is returned.
1516
+ """
1517
+ try:
1518
+ table = self._get_table("traces", create_table_if_not_found=False)
1519
+ spans_table = self._get_table("spans", create_table_if_not_found=False)
1520
+
1521
+ if trace_id:
1522
+ record = RecordID(table, trace_id)
1523
+ trace_data = self._query_one("SELECT * FROM ONLY $record", {"record": record}, dict)
1524
+ elif run_id:
1525
+ query = dedent(f"""
1526
+ SELECT * FROM {table}
1527
+ WHERE run_id = $run_id
1528
+ ORDER BY start_time DESC
1529
+ LIMIT 1
1530
+ """)
1531
+ trace_data = self._query_one(query, {"run_id": run_id}, dict)
1532
+ else:
1533
+ log_debug("get_trace called without any filter parameters")
1534
+ return None
1535
+
1536
+ if not trace_data:
1537
+ return None
1538
+
1539
+ # Calculate total_spans and error_count
1540
+ id_obj = trace_data.get("id")
1541
+ trace_id_val = trace_data.get("trace_id") or (id_obj.id if id_obj is not None else None)
1542
+ if trace_id_val:
1543
+ count_query = f"SELECT count() as total FROM {spans_table} WHERE trace_id = $trace_id GROUP ALL"
1544
+ count_result = self._query_one(count_query, {"trace_id": trace_id_val}, dict)
1545
+ trace_data["total_spans"] = count_result.get("total", 0) if count_result else 0
1546
+
1547
+ error_query = f"SELECT count() as total FROM {spans_table} WHERE trace_id = $trace_id AND status_code = 'ERROR' GROUP ALL"
1548
+ error_result = self._query_one(error_query, {"trace_id": trace_id_val}, dict)
1549
+ trace_data["error_count"] = error_result.get("total", 0) if error_result else 0
1550
+
1551
+ # Deserialize
1552
+ return self._deserialize_trace(trace_data)
1553
+
1554
+ except Exception as e:
1555
+ log_error(f"Error getting trace: {e}")
1556
+ return None
1557
+
1558
+ def get_traces(
1559
+ self,
1560
+ run_id: Optional[str] = None,
1561
+ session_id: Optional[str] = None,
1562
+ user_id: Optional[str] = None,
1563
+ agent_id: Optional[str] = None,
1564
+ team_id: Optional[str] = None,
1565
+ workflow_id: Optional[str] = None,
1566
+ status: Optional[str] = None,
1567
+ start_time: Optional[datetime] = None,
1568
+ end_time: Optional[datetime] = None,
1569
+ limit: Optional[int] = 20,
1570
+ page: Optional[int] = 1,
1571
+ ) -> tuple[List, int]:
1572
+ """Get traces matching the provided filters with pagination.
1573
+
1574
+ Args:
1575
+ run_id: Filter by run ID.
1576
+ session_id: Filter by session ID.
1577
+ user_id: Filter by user ID.
1578
+ agent_id: Filter by agent ID.
1579
+ team_id: Filter by team ID.
1580
+ workflow_id: Filter by workflow ID.
1581
+ status: Filter by status (OK, ERROR, UNSET).
1582
+ start_time: Filter traces starting after this datetime.
1583
+ end_time: Filter traces ending before this datetime.
1584
+ limit: Maximum number of traces to return per page.
1585
+ page: Page number (1-indexed).
1586
+
1587
+ Returns:
1588
+ tuple[List[Trace], int]: Tuple of (list of matching traces, total count).
1589
+ """
1590
+ try:
1591
+ table = self._get_table("traces", create_table_if_not_found=False)
1592
+ spans_table = self._get_table("spans", create_table_if_not_found=False)
1593
+
1594
+ # Build where clause
1595
+ where = WhereClause()
1596
+ if run_id:
1597
+ where.and_("run_id", run_id)
1598
+ if session_id:
1599
+ where.and_("session_id", session_id)
1600
+ if user_id:
1601
+ where.and_("user_id", user_id)
1602
+ if agent_id:
1603
+ where.and_("agent_id", agent_id)
1604
+ if team_id:
1605
+ where.and_("team_id", team_id)
1606
+ if workflow_id:
1607
+ where.and_("workflow_id", workflow_id)
1608
+ if status:
1609
+ where.and_("status", status)
1610
+ if start_time:
1611
+ where.and_("start_time", start_time, ">=")
1612
+ if end_time:
1613
+ where.and_("end_time", end_time, "<=")
1614
+
1615
+ where_clause, where_vars = where.build()
1616
+
1617
+ # Total count
1618
+ total_count = self._count(table, where_clause, where_vars)
1619
+
1620
+ # Query with pagination
1621
+ order_limit_start_clause = order_limit_start("start_time", "DESC", limit, page)
1622
+ query = dedent(f"""
1623
+ SELECT * FROM {table}
1624
+ {where_clause}
1625
+ {order_limit_start_clause}
1626
+ """)
1627
+ traces_raw = self._query(query, where_vars, dict)
1628
+
1629
+ # Add total_spans and error_count to each trace
1630
+ result_traces = []
1631
+ for trace_data in traces_raw:
1632
+ id_obj = trace_data.get("id")
1633
+ trace_id_val = trace_data.get("trace_id") or (id_obj.id if id_obj is not None else None)
1634
+ if trace_id_val:
1635
+ count_query = f"SELECT count() as total FROM {spans_table} WHERE trace_id = $trace_id GROUP ALL"
1636
+ count_result = self._query_one(count_query, {"trace_id": trace_id_val}, dict)
1637
+ trace_data["total_spans"] = count_result.get("total", 0) if count_result else 0
1638
+
1639
+ error_query = f"SELECT count() as total FROM {spans_table} WHERE trace_id = $trace_id AND status_code = 'ERROR' GROUP ALL"
1640
+ error_result = self._query_one(error_query, {"trace_id": trace_id_val}, dict)
1641
+ trace_data["error_count"] = error_result.get("total", 0) if error_result else 0
1642
+
1643
+ result_traces.append(self._deserialize_trace(trace_data))
1644
+
1645
+ return result_traces, total_count
1646
+
1647
+ except Exception as e:
1648
+ log_error(f"Error getting traces: {e}")
1649
+ return [], 0
1650
+
1651
+ def get_trace_stats(
1652
+ self,
1653
+ user_id: Optional[str] = None,
1654
+ agent_id: Optional[str] = None,
1655
+ team_id: Optional[str] = None,
1656
+ workflow_id: Optional[str] = None,
1657
+ start_time: Optional[datetime] = None,
1658
+ end_time: Optional[datetime] = None,
1659
+ limit: Optional[int] = 20,
1660
+ page: Optional[int] = 1,
1661
+ ) -> tuple[List[Dict[str, Any]], int]:
1662
+ """Get trace statistics grouped by session.
1663
+
1664
+ Args:
1665
+ user_id: Filter by user ID.
1666
+ agent_id: Filter by agent ID.
1667
+ team_id: Filter by team ID.
1668
+ workflow_id: Filter by workflow ID.
1669
+ start_time: Filter sessions with traces created after this datetime.
1670
+ end_time: Filter sessions with traces created before this datetime.
1671
+ limit: Maximum number of sessions to return per page.
1672
+ page: Page number (1-indexed).
1673
+
1674
+ Returns:
1675
+ tuple[List[Dict], int]: Tuple of (list of session stats dicts, total count).
1676
+ Each dict contains: session_id, user_id, agent_id, team_id, workflow_id, total_traces,
1677
+ first_trace_at, last_trace_at.
1678
+ """
1679
+ try:
1680
+ table = self._get_table("traces", create_table_if_not_found=False)
1681
+
1682
+ # Build where clause
1683
+ where = WhereClause()
1684
+ where.and_("!!session_id", True, "=") # Ensure session_id is not null
1685
+ if user_id:
1686
+ where.and_("user_id", user_id)
1687
+ if agent_id:
1688
+ where.and_("agent_id", agent_id)
1689
+ if team_id:
1690
+ where.and_("team_id", team_id)
1691
+ if workflow_id:
1692
+ where.and_("workflow_id", workflow_id)
1693
+ if start_time:
1694
+ where.and_("created_at", start_time, ">=")
1695
+ if end_time:
1696
+ where.and_("created_at", end_time, "<=")
1697
+
1698
+ where_clause, where_vars = where.build()
1699
+
1700
+ # Get total count of unique sessions
1701
+ count_query = dedent(f"""
1702
+ SELECT count() as total FROM (
1703
+ SELECT session_id FROM {table}
1704
+ {where_clause}
1705
+ GROUP BY session_id
1706
+ ) GROUP ALL
1707
+ """)
1708
+ count_result = self._query_one(count_query, where_vars, dict)
1709
+ total_count = count_result.get("total", 0) if count_result else 0
1710
+
1711
+ # Query with aggregation
1712
+ order_limit_start_clause = order_limit_start("last_trace_at", "DESC", limit, page)
1713
+ query = dedent(f"""
1714
+ SELECT
1715
+ session_id,
1716
+ user_id,
1717
+ agent_id,
1718
+ team_id,
1719
+ workflow_id,
1720
+ count() AS total_traces,
1721
+ time::min(created_at) AS first_trace_at,
1722
+ time::max(created_at) AS last_trace_at
1723
+ FROM {table}
1724
+ {where_clause}
1725
+ GROUP BY session_id, user_id, agent_id, team_id, workflow_id
1726
+ {order_limit_start_clause}
1727
+ """)
1728
+ results = self._query(query, where_vars, dict)
1729
+
1730
+ # Convert datetime objects
1731
+ stats_list = []
1732
+ for row in results:
1733
+ stat = dict(row)
1734
+ if isinstance(stat.get("first_trace_at"), datetime):
1735
+ pass # Keep as datetime
1736
+ if isinstance(stat.get("last_trace_at"), datetime):
1737
+ pass # Keep as datetime
1738
+ stats_list.append(stat)
1739
+
1740
+ return stats_list, total_count
1741
+
1742
+ except Exception as e:
1743
+ log_error(f"Error getting trace stats: {e}")
1744
+ return [], 0
1745
+
1746
+ def _deserialize_trace(self, trace_data: dict) -> "Trace":
1747
+ """Helper to deserialize a trace record from SurrealDB."""
1748
+ from agno.tracing.schemas import Trace
1749
+
1750
+ # Handle RecordID for id field
1751
+ if isinstance(trace_data.get("id"), RecordID):
1752
+ if "trace_id" not in trace_data or not trace_data["trace_id"]:
1753
+ trace_data["trace_id"] = trace_data["id"].id
1754
+ del trace_data["id"]
1755
+
1756
+ # Convert datetime to ISO string for Trace.from_dict
1757
+ for field in ["start_time", "end_time", "created_at"]:
1758
+ if isinstance(trace_data.get(field), datetime):
1759
+ trace_data[field] = trace_data[field].isoformat()
1760
+
1761
+ return Trace.from_dict(trace_data)
1762
+
1763
+ # --- Spans ---
1764
+ def create_span(self, span: "Span") -> None:
1765
+ """Create a single span in the database.
1766
+
1767
+ Args:
1768
+ span: The Span object to store.
1769
+ """
1770
+ try:
1771
+ table = self._get_table("spans", create_table_if_not_found=True)
1772
+ record = RecordID(table, span.span_id)
1773
+
1774
+ span_dict = span.to_dict()
1775
+
1776
+ # Convert datetime fields
1777
+ if isinstance(span_dict.get("start_time"), str):
1778
+ span_dict["start_time"] = datetime.fromisoformat(span_dict["start_time"].replace("Z", "+00:00"))
1779
+ if isinstance(span_dict.get("end_time"), str):
1780
+ span_dict["end_time"] = datetime.fromisoformat(span_dict["end_time"].replace("Z", "+00:00"))
1781
+ if isinstance(span_dict.get("created_at"), str):
1782
+ span_dict["created_at"] = datetime.fromisoformat(span_dict["created_at"].replace("Z", "+00:00"))
1783
+
1784
+ self._query_one(
1785
+ "CREATE ONLY $record CONTENT $content",
1786
+ {"record": record, "content": span_dict},
1787
+ dict,
1788
+ )
1789
+
1790
+ except Exception as e:
1791
+ log_error(f"Error creating span: {e}")
1792
+
1793
+ def create_spans(self, spans: List) -> None:
1794
+ """Create multiple spans in the database as a batch.
1795
+
1796
+ Args:
1797
+ spans: List of Span objects to store.
1798
+ """
1799
+ if not spans:
1800
+ return
1801
+
1802
+ try:
1803
+ table = self._get_table("spans", create_table_if_not_found=True)
1804
+
1805
+ for span in spans:
1806
+ record = RecordID(table, span.span_id)
1807
+ span_dict = span.to_dict()
1808
+
1809
+ # Convert datetime fields
1810
+ if isinstance(span_dict.get("start_time"), str):
1811
+ span_dict["start_time"] = datetime.fromisoformat(span_dict["start_time"].replace("Z", "+00:00"))
1812
+ if isinstance(span_dict.get("end_time"), str):
1813
+ span_dict["end_time"] = datetime.fromisoformat(span_dict["end_time"].replace("Z", "+00:00"))
1814
+ if isinstance(span_dict.get("created_at"), str):
1815
+ span_dict["created_at"] = datetime.fromisoformat(span_dict["created_at"].replace("Z", "+00:00"))
1816
+
1817
+ self._query_one(
1818
+ "CREATE ONLY $record CONTENT $content",
1819
+ {"record": record, "content": span_dict},
1820
+ dict,
1821
+ )
1822
+
1823
+ except Exception as e:
1824
+ log_error(f"Error creating spans batch: {e}")
1825
+
1826
+ def get_span(self, span_id: str):
1827
+ """Get a single span by its span_id.
1828
+
1829
+ Args:
1830
+ span_id: The unique span identifier.
1831
+
1832
+ Returns:
1833
+ Optional[Span]: The span if found, None otherwise.
1834
+ """
1835
+ try:
1836
+ table = self._get_table("spans", create_table_if_not_found=False)
1837
+ record = RecordID(table, span_id)
1838
+
1839
+ span_data = self._query_one("SELECT * FROM ONLY $record", {"record": record}, dict)
1840
+ if not span_data:
1841
+ return None
1842
+
1843
+ return self._deserialize_span(span_data)
1844
+
1845
+ except Exception as e:
1846
+ log_error(f"Error getting span: {e}")
1847
+ return None
1848
+
1849
+ def get_spans(
1850
+ self,
1851
+ trace_id: Optional[str] = None,
1852
+ parent_span_id: Optional[str] = None,
1853
+ limit: Optional[int] = 1000,
1854
+ ) -> List:
1855
+ """Get spans matching the provided filters.
1856
+
1857
+ Args:
1858
+ trace_id: Filter by trace ID.
1859
+ parent_span_id: Filter by parent span ID.
1860
+ limit: Maximum number of spans to return.
1861
+
1862
+ Returns:
1863
+ List[Span]: List of matching spans.
1864
+ """
1865
+ try:
1866
+ table = self._get_table("spans", create_table_if_not_found=False)
1867
+
1868
+ # Build where clause
1869
+ where = WhereClause()
1870
+ if trace_id:
1871
+ where.and_("trace_id", trace_id)
1872
+ if parent_span_id:
1873
+ where.and_("parent_span_id", parent_span_id)
1874
+
1875
+ where_clause, where_vars = where.build()
1876
+
1877
+ # Query
1878
+ limit_clause = f"LIMIT {limit}" if limit else ""
1879
+ query = dedent(f"""
1880
+ SELECT * FROM {table}
1881
+ {where_clause}
1882
+ ORDER BY start_time ASC
1883
+ {limit_clause}
1884
+ """)
1885
+ spans_raw = self._query(query, where_vars, dict)
1886
+
1887
+ return [self._deserialize_span(s) for s in spans_raw]
1888
+
1889
+ except Exception as e:
1890
+ log_error(f"Error getting spans: {e}")
1891
+ return []
1892
+
1893
+ def _deserialize_span(self, span_data: dict) -> "Span":
1894
+ """Helper to deserialize a span record from SurrealDB."""
1895
+ from agno.tracing.schemas import Span
1896
+
1897
+ # Handle RecordID for id field
1898
+ if isinstance(span_data.get("id"), RecordID):
1899
+ if "span_id" not in span_data or not span_data["span_id"]:
1900
+ span_data["span_id"] = span_data["id"].id
1901
+ del span_data["id"]
1902
+
1903
+ # Convert datetime to ISO string for Span.from_dict
1904
+ for field in ["start_time", "end_time", "created_at"]:
1905
+ if isinstance(span_data.get(field), datetime):
1906
+ span_data[field] = span_data[field].isoformat()
1907
+
1908
+ return Span.from_dict(span_data)