agno 2.3.3__py3-none-any.whl → 2.3.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (108) hide show
  1. agno/agent/agent.py +177 -41
  2. agno/culture/manager.py +2 -2
  3. agno/db/base.py +330 -8
  4. agno/db/dynamo/dynamo.py +722 -2
  5. agno/db/dynamo/schemas.py +127 -0
  6. agno/db/firestore/firestore.py +573 -1
  7. agno/db/firestore/schemas.py +40 -0
  8. agno/db/gcs_json/gcs_json_db.py +446 -1
  9. agno/db/in_memory/in_memory_db.py +143 -1
  10. agno/db/json/json_db.py +438 -1
  11. agno/db/mongo/async_mongo.py +522 -0
  12. agno/db/mongo/mongo.py +523 -1
  13. agno/db/mongo/schemas.py +29 -0
  14. agno/db/mysql/mysql.py +536 -3
  15. agno/db/mysql/schemas.py +38 -0
  16. agno/db/postgres/async_postgres.py +546 -14
  17. agno/db/postgres/postgres.py +535 -2
  18. agno/db/postgres/schemas.py +38 -0
  19. agno/db/redis/redis.py +468 -1
  20. agno/db/redis/schemas.py +32 -0
  21. agno/db/singlestore/schemas.py +38 -0
  22. agno/db/singlestore/singlestore.py +523 -1
  23. agno/db/sqlite/async_sqlite.py +548 -9
  24. agno/db/sqlite/schemas.py +38 -0
  25. agno/db/sqlite/sqlite.py +537 -5
  26. agno/db/sqlite/utils.py +6 -8
  27. agno/db/surrealdb/models.py +25 -0
  28. agno/db/surrealdb/surrealdb.py +548 -1
  29. agno/eval/accuracy.py +10 -4
  30. agno/eval/performance.py +10 -4
  31. agno/eval/reliability.py +22 -13
  32. agno/exceptions.py +11 -0
  33. agno/hooks/__init__.py +3 -0
  34. agno/hooks/decorator.py +164 -0
  35. agno/knowledge/chunking/semantic.py +2 -2
  36. agno/models/aimlapi/aimlapi.py +17 -0
  37. agno/models/anthropic/claude.py +19 -12
  38. agno/models/aws/bedrock.py +3 -4
  39. agno/models/aws/claude.py +5 -1
  40. agno/models/azure/ai_foundry.py +2 -2
  41. agno/models/azure/openai_chat.py +8 -0
  42. agno/models/cerebras/cerebras.py +61 -4
  43. agno/models/cerebras/cerebras_openai.py +17 -0
  44. agno/models/cohere/chat.py +5 -1
  45. agno/models/cometapi/cometapi.py +18 -1
  46. agno/models/dashscope/dashscope.py +2 -3
  47. agno/models/deepinfra/deepinfra.py +18 -1
  48. agno/models/deepseek/deepseek.py +2 -3
  49. agno/models/fireworks/fireworks.py +18 -1
  50. agno/models/google/gemini.py +8 -2
  51. agno/models/groq/groq.py +5 -2
  52. agno/models/internlm/internlm.py +18 -1
  53. agno/models/langdb/langdb.py +13 -1
  54. agno/models/litellm/chat.py +2 -2
  55. agno/models/litellm/litellm_openai.py +18 -1
  56. agno/models/meta/llama_openai.py +19 -2
  57. agno/models/nebius/nebius.py +2 -3
  58. agno/models/nvidia/nvidia.py +20 -3
  59. agno/models/openai/chat.py +17 -2
  60. agno/models/openai/responses.py +17 -2
  61. agno/models/openrouter/openrouter.py +21 -2
  62. agno/models/perplexity/perplexity.py +17 -1
  63. agno/models/portkey/portkey.py +7 -6
  64. agno/models/requesty/requesty.py +19 -2
  65. agno/models/response.py +2 -1
  66. agno/models/sambanova/sambanova.py +20 -3
  67. agno/models/siliconflow/siliconflow.py +19 -2
  68. agno/models/together/together.py +20 -3
  69. agno/models/vercel/v0.py +20 -3
  70. agno/models/vllm/vllm.py +19 -14
  71. agno/models/xai/xai.py +19 -2
  72. agno/os/app.py +104 -0
  73. agno/os/config.py +13 -0
  74. agno/os/interfaces/whatsapp/router.py +0 -1
  75. agno/os/mcp.py +1 -0
  76. agno/os/router.py +31 -0
  77. agno/os/routers/traces/__init__.py +3 -0
  78. agno/os/routers/traces/schemas.py +414 -0
  79. agno/os/routers/traces/traces.py +499 -0
  80. agno/os/schema.py +22 -1
  81. agno/os/utils.py +57 -0
  82. agno/run/agent.py +1 -0
  83. agno/run/base.py +17 -0
  84. agno/run/team.py +4 -0
  85. agno/session/team.py +1 -0
  86. agno/table.py +10 -0
  87. agno/team/team.py +215 -65
  88. agno/tools/function.py +10 -8
  89. agno/tools/nano_banana.py +1 -1
  90. agno/tracing/__init__.py +12 -0
  91. agno/tracing/exporter.py +157 -0
  92. agno/tracing/schemas.py +276 -0
  93. agno/tracing/setup.py +111 -0
  94. agno/utils/agent.py +4 -4
  95. agno/utils/hooks.py +56 -1
  96. agno/vectordb/qdrant/qdrant.py +22 -22
  97. agno/workflow/condition.py +8 -0
  98. agno/workflow/loop.py +8 -0
  99. agno/workflow/parallel.py +8 -0
  100. agno/workflow/router.py +8 -0
  101. agno/workflow/step.py +20 -0
  102. agno/workflow/steps.py +8 -0
  103. agno/workflow/workflow.py +83 -17
  104. {agno-2.3.3.dist-info → agno-2.3.5.dist-info}/METADATA +2 -2
  105. {agno-2.3.3.dist-info → agno-2.3.5.dist-info}/RECORD +108 -98
  106. {agno-2.3.3.dist-info → agno-2.3.5.dist-info}/WHEEL +0 -0
  107. {agno-2.3.3.dist-info → agno-2.3.5.dist-info}/licenses/LICENSE +0 -0
  108. {agno-2.3.3.dist-info → agno-2.3.5.dist-info}/top_level.txt +0 -0
agno/db/redis/redis.py CHANGED
@@ -1,8 +1,11 @@
1
1
  import time
2
2
  from datetime import date, datetime, timedelta, timezone
3
- from typing import Any, Dict, List, Optional, Tuple, Union
3
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
4
4
  from uuid import uuid4
5
5
 
6
+ if TYPE_CHECKING:
7
+ from agno.tracing.schemas import Span, Trace
8
+
6
9
  from agno.db.base import BaseDb, SessionType
7
10
  from agno.db.redis.utils import (
8
11
  apply_filters,
@@ -48,6 +51,8 @@ class RedisDb(BaseDb):
48
51
  eval_table: Optional[str] = None,
49
52
  knowledge_table: Optional[str] = None,
50
53
  culture_table: Optional[str] = None,
54
+ traces_table: Optional[str] = None,
55
+ spans_table: Optional[str] = None,
51
56
  ):
52
57
  """
53
58
  Interface for interacting with a Redis database.
@@ -71,6 +76,8 @@ class RedisDb(BaseDb):
71
76
  eval_table (Optional[str]): Name of the table to store evaluation runs
72
77
  knowledge_table (Optional[str]): Name of the table to store knowledge documents
73
78
  culture_table (Optional[str]): Name of the table to store cultural knowledge
79
+ traces_table (Optional[str]): Name of the table to store traces
80
+ spans_table (Optional[str]): Name of the table to store spans
74
81
 
75
82
  Raises:
76
83
  ValueError: If neither redis_client nor db_url is provided.
@@ -88,6 +95,8 @@ class RedisDb(BaseDb):
88
95
  eval_table=eval_table,
89
96
  knowledge_table=knowledge_table,
90
97
  culture_table=culture_table,
98
+ traces_table=traces_table,
99
+ spans_table=spans_table,
91
100
  )
92
101
 
93
102
  self.db_prefix = db_prefix
@@ -126,6 +135,12 @@ class RedisDb(BaseDb):
126
135
  elif table_type == "culture":
127
136
  return self.culture_table_name
128
137
 
138
+ elif table_type == "traces":
139
+ return self.trace_table_name
140
+
141
+ elif table_type == "spans":
142
+ return self.span_table_name
143
+
129
144
  else:
130
145
  raise ValueError(f"Unknown table type: {table_type}")
131
146
 
@@ -1676,3 +1691,455 @@ class RedisDb(BaseDb):
1676
1691
  except Exception as e:
1677
1692
  log_error(f"Error upserting cultural knowledge: {e}")
1678
1693
  raise e
1694
+
1695
+ # --- Traces ---
1696
+ def create_trace(self, trace: "Trace") -> None:
1697
+ """Create a single trace record in the database.
1698
+
1699
+ Args:
1700
+ trace: The Trace object to store (one per trace_id).
1701
+ """
1702
+ try:
1703
+ # Check if trace already exists
1704
+ existing = self._get_record("traces", trace.trace_id)
1705
+
1706
+ if existing:
1707
+ # workflow (level 3) > team (level 2) > agent (level 1) > child/unknown (level 0)
1708
+ def get_component_level(
1709
+ workflow_id: Optional[str], team_id: Optional[str], agent_id: Optional[str], name: str
1710
+ ) -> int:
1711
+ # Check if name indicates a root span
1712
+ is_root_name = ".run" in name or ".arun" in name
1713
+
1714
+ if not is_root_name:
1715
+ return 0 # Child span (not a root)
1716
+ elif workflow_id:
1717
+ return 3 # Workflow root
1718
+ elif team_id:
1719
+ return 2 # Team root
1720
+ elif agent_id:
1721
+ return 1 # Agent root
1722
+ else:
1723
+ return 0 # Unknown
1724
+
1725
+ existing_level = get_component_level(
1726
+ existing.get("workflow_id"),
1727
+ existing.get("team_id"),
1728
+ existing.get("agent_id"),
1729
+ existing.get("name", ""),
1730
+ )
1731
+ new_level = get_component_level(trace.workflow_id, trace.team_id, trace.agent_id, trace.name)
1732
+
1733
+ # Only update name if new trace is from a higher or equal level
1734
+ should_update_name = new_level > existing_level
1735
+
1736
+ # Parse existing start_time to calculate correct duration
1737
+ existing_start_time_str = existing.get("start_time")
1738
+ if isinstance(existing_start_time_str, str):
1739
+ existing_start_time = datetime.fromisoformat(existing_start_time_str.replace("Z", "+00:00"))
1740
+ else:
1741
+ existing_start_time = trace.start_time
1742
+
1743
+ recalculated_duration_ms = int((trace.end_time - existing_start_time).total_seconds() * 1000)
1744
+
1745
+ # Update existing record
1746
+ existing["end_time"] = trace.end_time.isoformat()
1747
+ existing["duration_ms"] = recalculated_duration_ms
1748
+ existing["status"] = trace.status
1749
+ if should_update_name:
1750
+ existing["name"] = trace.name
1751
+
1752
+ # Update context fields ONLY if new value is not None (preserve non-null values)
1753
+ if trace.run_id is not None:
1754
+ existing["run_id"] = trace.run_id
1755
+ if trace.session_id is not None:
1756
+ existing["session_id"] = trace.session_id
1757
+ if trace.user_id is not None:
1758
+ existing["user_id"] = trace.user_id
1759
+ if trace.agent_id is not None:
1760
+ existing["agent_id"] = trace.agent_id
1761
+ if trace.team_id is not None:
1762
+ existing["team_id"] = trace.team_id
1763
+ if trace.workflow_id is not None:
1764
+ existing["workflow_id"] = trace.workflow_id
1765
+
1766
+ log_debug(
1767
+ f" Updating trace with context: run_id={existing.get('run_id', 'unchanged')}, "
1768
+ f"session_id={existing.get('session_id', 'unchanged')}, "
1769
+ f"user_id={existing.get('user_id', 'unchanged')}, "
1770
+ f"agent_id={existing.get('agent_id', 'unchanged')}, "
1771
+ f"team_id={existing.get('team_id', 'unchanged')}, "
1772
+ )
1773
+
1774
+ self._store_record(
1775
+ "traces",
1776
+ trace.trace_id,
1777
+ existing,
1778
+ index_fields=["run_id", "session_id", "user_id", "agent_id", "team_id", "workflow_id", "status"],
1779
+ )
1780
+ else:
1781
+ trace_dict = trace.to_dict()
1782
+ trace_dict.pop("total_spans", None)
1783
+ trace_dict.pop("error_count", None)
1784
+ self._store_record(
1785
+ "traces",
1786
+ trace.trace_id,
1787
+ trace_dict,
1788
+ index_fields=["run_id", "session_id", "user_id", "agent_id", "team_id", "workflow_id", "status"],
1789
+ )
1790
+
1791
+ except Exception as e:
1792
+ log_error(f"Error creating trace: {e}")
1793
+ # Don't raise - tracing should not break the main application flow
1794
+
1795
+ def get_trace(
1796
+ self,
1797
+ trace_id: Optional[str] = None,
1798
+ run_id: Optional[str] = None,
1799
+ ):
1800
+ """Get a single trace by trace_id or other filters.
1801
+
1802
+ Args:
1803
+ trace_id: The unique trace identifier.
1804
+ run_id: Filter by run ID (returns first match).
1805
+
1806
+ Returns:
1807
+ Optional[Trace]: The trace if found, None otherwise.
1808
+
1809
+ Note:
1810
+ If multiple filters are provided, trace_id takes precedence.
1811
+ For other filters, the most recent trace is returned.
1812
+ """
1813
+ try:
1814
+ from agno.tracing.schemas import Trace as TraceSchema
1815
+
1816
+ if trace_id:
1817
+ result = self._get_record("traces", trace_id)
1818
+ if result:
1819
+ # Calculate total_spans and error_count
1820
+ all_spans = self._get_all_records("spans")
1821
+ trace_spans = [s for s in all_spans if s.get("trace_id") == trace_id]
1822
+ result["total_spans"] = len(trace_spans)
1823
+ result["error_count"] = len([s for s in trace_spans if s.get("status_code") == "ERROR"])
1824
+ return TraceSchema.from_dict(result)
1825
+ return None
1826
+
1827
+ elif run_id:
1828
+ all_traces = self._get_all_records("traces")
1829
+ matching = [t for t in all_traces if t.get("run_id") == run_id]
1830
+ if matching:
1831
+ # Sort by start_time descending and get most recent
1832
+ matching.sort(key=lambda x: x.get("start_time", ""), reverse=True)
1833
+ result = matching[0]
1834
+ # Calculate total_spans and error_count
1835
+ all_spans = self._get_all_records("spans")
1836
+ trace_spans = [s for s in all_spans if s.get("trace_id") == result.get("trace_id")]
1837
+ result["total_spans"] = len(trace_spans)
1838
+ result["error_count"] = len([s for s in trace_spans if s.get("status_code") == "ERROR"])
1839
+ return TraceSchema.from_dict(result)
1840
+ return None
1841
+
1842
+ else:
1843
+ log_debug("get_trace called without any filter parameters")
1844
+ return None
1845
+
1846
+ except Exception as e:
1847
+ log_error(f"Error getting trace: {e}")
1848
+ return None
1849
+
1850
+ def get_traces(
1851
+ self,
1852
+ run_id: Optional[str] = None,
1853
+ session_id: Optional[str] = None,
1854
+ user_id: Optional[str] = None,
1855
+ agent_id: Optional[str] = None,
1856
+ team_id: Optional[str] = None,
1857
+ workflow_id: Optional[str] = None,
1858
+ status: Optional[str] = None,
1859
+ start_time: Optional[datetime] = None,
1860
+ end_time: Optional[datetime] = None,
1861
+ limit: Optional[int] = 20,
1862
+ page: Optional[int] = 1,
1863
+ ) -> tuple[List, int]:
1864
+ """Get traces matching the provided filters.
1865
+
1866
+ Args:
1867
+ run_id: Filter by run ID.
1868
+ session_id: Filter by session ID.
1869
+ user_id: Filter by user ID.
1870
+ agent_id: Filter by agent ID.
1871
+ team_id: Filter by team ID.
1872
+ workflow_id: Filter by workflow ID.
1873
+ status: Filter by status (OK, ERROR, UNSET).
1874
+ start_time: Filter traces starting after this datetime.
1875
+ end_time: Filter traces ending before this datetime.
1876
+ limit: Maximum number of traces to return per page.
1877
+ page: Page number (1-indexed).
1878
+
1879
+ Returns:
1880
+ tuple[List[Trace], int]: Tuple of (list of matching traces, total count).
1881
+ """
1882
+ try:
1883
+ from agno.tracing.schemas import Trace as TraceSchema
1884
+
1885
+ log_debug(
1886
+ f"get_traces called with filters: run_id={run_id}, session_id={session_id}, "
1887
+ f"user_id={user_id}, agent_id={agent_id}, page={page}, limit={limit}"
1888
+ )
1889
+
1890
+ all_traces = self._get_all_records("traces")
1891
+ all_spans = self._get_all_records("spans")
1892
+
1893
+ # Apply filters
1894
+ filtered_traces = []
1895
+ for trace in all_traces:
1896
+ if run_id and trace.get("run_id") != run_id:
1897
+ continue
1898
+ if session_id and trace.get("session_id") != session_id:
1899
+ continue
1900
+ if user_id and trace.get("user_id") != user_id:
1901
+ continue
1902
+ if agent_id and trace.get("agent_id") != agent_id:
1903
+ continue
1904
+ if team_id and trace.get("team_id") != team_id:
1905
+ continue
1906
+ if workflow_id and trace.get("workflow_id") != workflow_id:
1907
+ continue
1908
+ if status and trace.get("status") != status:
1909
+ continue
1910
+ if start_time:
1911
+ trace_start = trace.get("start_time", "")
1912
+ if trace_start and trace_start < start_time.isoformat():
1913
+ continue
1914
+ if end_time:
1915
+ trace_end = trace.get("end_time", "")
1916
+ if trace_end and trace_end > end_time.isoformat():
1917
+ continue
1918
+
1919
+ filtered_traces.append(trace)
1920
+
1921
+ total_count = len(filtered_traces)
1922
+ log_debug(f"Total matching traces: {total_count}")
1923
+
1924
+ # Sort by start_time descending
1925
+ filtered_traces.sort(key=lambda x: x.get("start_time", ""), reverse=True)
1926
+
1927
+ # Apply pagination
1928
+ paginated_traces = apply_pagination(records=filtered_traces, limit=limit, page=page)
1929
+ log_debug(f"Returning page {page} with {len(paginated_traces)} traces")
1930
+
1931
+ traces = []
1932
+ for row in paginated_traces:
1933
+ # Calculate total_spans and error_count
1934
+ trace_spans = [s for s in all_spans if s.get("trace_id") == row.get("trace_id")]
1935
+ row["total_spans"] = len(trace_spans)
1936
+ row["error_count"] = len([s for s in trace_spans if s.get("status_code") == "ERROR"])
1937
+ traces.append(TraceSchema.from_dict(row))
1938
+
1939
+ return traces, total_count
1940
+
1941
+ except Exception as e:
1942
+ log_error(f"Error getting traces: {e}")
1943
+ return [], 0
1944
+
1945
+ def get_trace_stats(
1946
+ self,
1947
+ user_id: Optional[str] = None,
1948
+ agent_id: Optional[str] = None,
1949
+ team_id: Optional[str] = None,
1950
+ workflow_id: Optional[str] = None,
1951
+ start_time: Optional[datetime] = None,
1952
+ end_time: Optional[datetime] = None,
1953
+ limit: Optional[int] = 20,
1954
+ page: Optional[int] = 1,
1955
+ ) -> tuple[List[Dict[str, Any]], int]:
1956
+ """Get trace statistics grouped by session.
1957
+
1958
+ Args:
1959
+ user_id: Filter by user ID.
1960
+ agent_id: Filter by agent ID.
1961
+ team_id: Filter by team ID.
1962
+ workflow_id: Filter by workflow ID.
1963
+ start_time: Filter sessions with traces created after this datetime.
1964
+ end_time: Filter sessions with traces created before this datetime.
1965
+ limit: Maximum number of sessions to return per page.
1966
+ page: Page number (1-indexed).
1967
+
1968
+ Returns:
1969
+ tuple[List[Dict], int]: Tuple of (list of session stats dicts, total count).
1970
+ Each dict contains: session_id, user_id, agent_id, team_id, total_traces,
1971
+ first_trace_at, last_trace_at.
1972
+ """
1973
+ try:
1974
+ log_debug(
1975
+ f"get_trace_stats called with filters: user_id={user_id}, agent_id={agent_id}, "
1976
+ f"workflow_id={workflow_id}, team_id={team_id}, "
1977
+ f"start_time={start_time}, end_time={end_time}, page={page}, limit={limit}"
1978
+ )
1979
+
1980
+ all_traces = self._get_all_records("traces")
1981
+
1982
+ # Filter traces and group by session_id
1983
+ session_stats: Dict[str, Dict[str, Any]] = {}
1984
+ for trace in all_traces:
1985
+ trace_session_id = trace.get("session_id")
1986
+ if not trace_session_id:
1987
+ continue
1988
+
1989
+ # Apply filters
1990
+ if user_id and trace.get("user_id") != user_id:
1991
+ continue
1992
+ if agent_id and trace.get("agent_id") != agent_id:
1993
+ continue
1994
+ if team_id and trace.get("team_id") != team_id:
1995
+ continue
1996
+ if workflow_id and trace.get("workflow_id") != workflow_id:
1997
+ continue
1998
+
1999
+ created_at = trace.get("created_at", "")
2000
+ if start_time and created_at < start_time.isoformat():
2001
+ continue
2002
+ if end_time and created_at > end_time.isoformat():
2003
+ continue
2004
+
2005
+ if trace_session_id not in session_stats:
2006
+ session_stats[trace_session_id] = {
2007
+ "session_id": trace_session_id,
2008
+ "user_id": trace.get("user_id"),
2009
+ "agent_id": trace.get("agent_id"),
2010
+ "team_id": trace.get("team_id"),
2011
+ "workflow_id": trace.get("workflow_id"),
2012
+ "total_traces": 0,
2013
+ "first_trace_at": created_at,
2014
+ "last_trace_at": created_at,
2015
+ }
2016
+
2017
+ session_stats[trace_session_id]["total_traces"] += 1
2018
+ if created_at < session_stats[trace_session_id]["first_trace_at"]:
2019
+ session_stats[trace_session_id]["first_trace_at"] = created_at
2020
+ if created_at > session_stats[trace_session_id]["last_trace_at"]:
2021
+ session_stats[trace_session_id]["last_trace_at"] = created_at
2022
+
2023
+ # Convert to list and sort by last_trace_at descending
2024
+ stats_list = list(session_stats.values())
2025
+ stats_list.sort(key=lambda x: x.get("last_trace_at", ""), reverse=True)
2026
+
2027
+ total_count = len(stats_list)
2028
+ log_debug(f"Total matching sessions: {total_count}")
2029
+
2030
+ # Apply pagination
2031
+ paginated_stats = apply_pagination(records=stats_list, limit=limit, page=page)
2032
+ log_debug(f"Returning page {page} with {len(paginated_stats)} session stats")
2033
+
2034
+ # Convert ISO strings to datetime objects
2035
+ for stat in paginated_stats:
2036
+ first_trace_at_str = stat["first_trace_at"]
2037
+ last_trace_at_str = stat["last_trace_at"]
2038
+ stat["first_trace_at"] = datetime.fromisoformat(first_trace_at_str.replace("Z", "+00:00"))
2039
+ stat["last_trace_at"] = datetime.fromisoformat(last_trace_at_str.replace("Z", "+00:00"))
2040
+
2041
+ return paginated_stats, total_count
2042
+
2043
+ except Exception as e:
2044
+ log_error(f"Error getting trace stats: {e}")
2045
+ return [], 0
2046
+
2047
+ # --- Spans ---
2048
+ def create_span(self, span: "Span") -> None:
2049
+ """Create a single span in the database.
2050
+
2051
+ Args:
2052
+ span: The Span object to store.
2053
+ """
2054
+ try:
2055
+ self._store_record(
2056
+ "spans",
2057
+ span.span_id,
2058
+ span.to_dict(),
2059
+ index_fields=["trace_id", "parent_span_id"],
2060
+ )
2061
+
2062
+ except Exception as e:
2063
+ log_error(f"Error creating span: {e}")
2064
+
2065
+ def create_spans(self, spans: List) -> None:
2066
+ """Create multiple spans in the database as a batch.
2067
+
2068
+ Args:
2069
+ spans: List of Span objects to store.
2070
+ """
2071
+ if not spans:
2072
+ return
2073
+
2074
+ try:
2075
+ for span in spans:
2076
+ self._store_record(
2077
+ "spans",
2078
+ span.span_id,
2079
+ span.to_dict(),
2080
+ index_fields=["trace_id", "parent_span_id"],
2081
+ )
2082
+
2083
+ except Exception as e:
2084
+ log_error(f"Error creating spans batch: {e}")
2085
+
2086
+ def get_span(self, span_id: str):
2087
+ """Get a single span by its span_id.
2088
+
2089
+ Args:
2090
+ span_id: The unique span identifier.
2091
+
2092
+ Returns:
2093
+ Optional[Span]: The span if found, None otherwise.
2094
+ """
2095
+ try:
2096
+ from agno.tracing.schemas import Span as SpanSchema
2097
+
2098
+ result = self._get_record("spans", span_id)
2099
+ if result:
2100
+ return SpanSchema.from_dict(result)
2101
+ return None
2102
+
2103
+ except Exception as e:
2104
+ log_error(f"Error getting span: {e}")
2105
+ return None
2106
+
2107
+ def get_spans(
2108
+ self,
2109
+ trace_id: Optional[str] = None,
2110
+ parent_span_id: Optional[str] = None,
2111
+ limit: Optional[int] = 1000,
2112
+ ) -> List:
2113
+ """Get spans matching the provided filters.
2114
+
2115
+ Args:
2116
+ trace_id: Filter by trace ID.
2117
+ parent_span_id: Filter by parent span ID.
2118
+ limit: Maximum number of spans to return.
2119
+
2120
+ Returns:
2121
+ List[Span]: List of matching spans.
2122
+ """
2123
+ try:
2124
+ from agno.tracing.schemas import Span as SpanSchema
2125
+
2126
+ all_spans = self._get_all_records("spans")
2127
+
2128
+ # Apply filters
2129
+ filtered_spans = []
2130
+ for span in all_spans:
2131
+ if trace_id and span.get("trace_id") != trace_id:
2132
+ continue
2133
+ if parent_span_id and span.get("parent_span_id") != parent_span_id:
2134
+ continue
2135
+ filtered_spans.append(span)
2136
+
2137
+ # Apply limit
2138
+ if limit:
2139
+ filtered_spans = filtered_spans[:limit]
2140
+
2141
+ return [SpanSchema.from_dict(s) for s in filtered_spans]
2142
+
2143
+ except Exception as e:
2144
+ log_error(f"Error getting spans: {e}")
2145
+ return []
agno/db/redis/schemas.py CHANGED
@@ -97,6 +97,36 @@ CULTURAL_KNOWLEDGE_SCHEMA = {
97
97
  "team_id": {"type": "string"},
98
98
  }
99
99
 
100
+ TRACE_SCHEMA = {
101
+ "trace_id": {"type": "string", "primary_key": True},
102
+ "name": {"type": "string"},
103
+ "status": {"type": "string"},
104
+ "duration_ms": {"type": "integer"},
105
+ "run_id": {"type": "string"},
106
+ "session_id": {"type": "string"},
107
+ "user_id": {"type": "string"},
108
+ "agent_id": {"type": "string"},
109
+ "team_id": {"type": "string"},
110
+ "workflow_id": {"type": "string"},
111
+ "start_time": {"type": "string"},
112
+ "end_time": {"type": "string"},
113
+ "created_at": {"type": "string"},
114
+ }
115
+
116
+ SPAN_SCHEMA = {
117
+ "span_id": {"type": "string", "primary_key": True},
118
+ "trace_id": {"type": "string"},
119
+ "parent_span_id": {"type": "string"},
120
+ "name": {"type": "string"},
121
+ "span_kind": {"type": "string"},
122
+ "status_code": {"type": "string"},
123
+ "status_message": {"type": "string"},
124
+ "start_time": {"type": "string"},
125
+ "end_time": {"type": "string"},
126
+ "attributes": {"type": "json"},
127
+ "created_at": {"type": "string"},
128
+ }
129
+
100
130
 
101
131
  def get_table_schema_definition(table_type: str) -> dict[str, Any]:
102
132
  """
@@ -118,6 +148,8 @@ def get_table_schema_definition(table_type: str) -> dict[str, Any]:
118
148
  "evals": EVAL_SCHEMA,
119
149
  "knowledge": KNOWLEDGE_SCHEMA,
120
150
  "culture": CULTURAL_KNOWLEDGE_SCHEMA,
151
+ "traces": TRACE_SCHEMA,
152
+ "spans": SPAN_SCHEMA,
121
153
  }
122
154
 
123
155
  schema = schemas.get(table_type, {})
@@ -115,6 +115,42 @@ VERSIONS_TABLE_SCHEMA = {
115
115
  "updated_at": {"type": lambda: String(128), "nullable": True},
116
116
  }
117
117
 
118
+ TRACE_TABLE_SCHEMA = {
119
+ "trace_id": {"type": lambda: String(128), "primary_key": True, "nullable": False},
120
+ "name": {"type": lambda: String(512), "nullable": False},
121
+ "status": {"type": lambda: String(20), "nullable": False, "index": True},
122
+ "start_time": {"type": lambda: String(64), "nullable": False, "index": True}, # ISO 8601 datetime string
123
+ "end_time": {"type": lambda: String(64), "nullable": False}, # ISO 8601 datetime string
124
+ "duration_ms": {"type": BigInteger, "nullable": False},
125
+ "run_id": {"type": lambda: String(128), "nullable": True, "index": True},
126
+ "session_id": {"type": lambda: String(128), "nullable": True, "index": True},
127
+ "user_id": {"type": lambda: String(128), "nullable": True, "index": True},
128
+ "agent_id": {"type": lambda: String(128), "nullable": True, "index": True},
129
+ "team_id": {"type": lambda: String(128), "nullable": True, "index": True},
130
+ "workflow_id": {"type": lambda: String(128), "nullable": True, "index": True},
131
+ "created_at": {"type": lambda: String(64), "nullable": False, "index": True}, # ISO 8601 datetime string
132
+ }
133
+
134
+ SPAN_TABLE_SCHEMA = {
135
+ "span_id": {"type": lambda: String(128), "primary_key": True, "nullable": False},
136
+ "trace_id": {
137
+ "type": lambda: String(128),
138
+ "nullable": False,
139
+ "index": True,
140
+ "foreign_key": "agno_traces.trace_id", # Foreign key to traces table
141
+ },
142
+ "parent_span_id": {"type": lambda: String(128), "nullable": True, "index": True},
143
+ "name": {"type": lambda: String(512), "nullable": False},
144
+ "span_kind": {"type": lambda: String(50), "nullable": False},
145
+ "status_code": {"type": lambda: String(20), "nullable": False},
146
+ "status_message": {"type": Text, "nullable": True},
147
+ "start_time": {"type": lambda: String(64), "nullable": False, "index": True}, # ISO 8601 datetime string
148
+ "end_time": {"type": lambda: String(64), "nullable": False}, # ISO 8601 datetime string
149
+ "duration_ms": {"type": BigInteger, "nullable": False},
150
+ "attributes": {"type": JSON, "nullable": True},
151
+ "created_at": {"type": lambda: String(64), "nullable": False, "index": True}, # ISO 8601 datetime string
152
+ }
153
+
118
154
 
119
155
  def get_table_schema_definition(table_type: str) -> dict[str, Any]:
120
156
  """
@@ -132,6 +168,8 @@ def get_table_schema_definition(table_type: str) -> dict[str, Any]:
132
168
  "knowledge": KNOWLEDGE_TABLE_SCHEMA,
133
169
  "culture": CULTURAL_KNOWLEDGE_TABLE_SCHEMA,
134
170
  "versions": VERSIONS_TABLE_SCHEMA,
171
+ "traces": TRACE_TABLE_SCHEMA,
172
+ "spans": SPAN_TABLE_SCHEMA,
135
173
  }
136
174
  schema = schemas.get(table_type, {})
137
175