agno 2.1.2__py3-none-any.whl → 2.3.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (314) hide show
  1. agno/agent/agent.py +5540 -2273
  2. agno/api/api.py +2 -0
  3. agno/api/os.py +1 -1
  4. agno/compression/__init__.py +3 -0
  5. agno/compression/manager.py +247 -0
  6. agno/culture/__init__.py +3 -0
  7. agno/culture/manager.py +956 -0
  8. agno/db/async_postgres/__init__.py +3 -0
  9. agno/db/base.py +689 -6
  10. agno/db/dynamo/dynamo.py +933 -37
  11. agno/db/dynamo/schemas.py +174 -10
  12. agno/db/dynamo/utils.py +63 -4
  13. agno/db/firestore/firestore.py +831 -9
  14. agno/db/firestore/schemas.py +51 -0
  15. agno/db/firestore/utils.py +102 -4
  16. agno/db/gcs_json/gcs_json_db.py +660 -12
  17. agno/db/gcs_json/utils.py +60 -26
  18. agno/db/in_memory/in_memory_db.py +287 -14
  19. agno/db/in_memory/utils.py +60 -2
  20. agno/db/json/json_db.py +590 -14
  21. agno/db/json/utils.py +60 -26
  22. agno/db/migrations/manager.py +199 -0
  23. agno/db/migrations/v1_to_v2.py +43 -13
  24. agno/db/migrations/versions/__init__.py +0 -0
  25. agno/db/migrations/versions/v2_3_0.py +938 -0
  26. agno/db/mongo/__init__.py +15 -1
  27. agno/db/mongo/async_mongo.py +2760 -0
  28. agno/db/mongo/mongo.py +879 -11
  29. agno/db/mongo/schemas.py +42 -0
  30. agno/db/mongo/utils.py +80 -8
  31. agno/db/mysql/__init__.py +2 -1
  32. agno/db/mysql/async_mysql.py +2912 -0
  33. agno/db/mysql/mysql.py +946 -68
  34. agno/db/mysql/schemas.py +72 -10
  35. agno/db/mysql/utils.py +198 -7
  36. agno/db/postgres/__init__.py +2 -1
  37. agno/db/postgres/async_postgres.py +2579 -0
  38. agno/db/postgres/postgres.py +942 -57
  39. agno/db/postgres/schemas.py +81 -18
  40. agno/db/postgres/utils.py +164 -2
  41. agno/db/redis/redis.py +671 -7
  42. agno/db/redis/schemas.py +50 -0
  43. agno/db/redis/utils.py +65 -7
  44. agno/db/schemas/__init__.py +2 -1
  45. agno/db/schemas/culture.py +120 -0
  46. agno/db/schemas/evals.py +1 -0
  47. agno/db/schemas/memory.py +17 -2
  48. agno/db/singlestore/schemas.py +63 -0
  49. agno/db/singlestore/singlestore.py +949 -83
  50. agno/db/singlestore/utils.py +60 -2
  51. agno/db/sqlite/__init__.py +2 -1
  52. agno/db/sqlite/async_sqlite.py +2911 -0
  53. agno/db/sqlite/schemas.py +62 -0
  54. agno/db/sqlite/sqlite.py +965 -46
  55. agno/db/sqlite/utils.py +169 -8
  56. agno/db/surrealdb/__init__.py +3 -0
  57. agno/db/surrealdb/metrics.py +292 -0
  58. agno/db/surrealdb/models.py +334 -0
  59. agno/db/surrealdb/queries.py +71 -0
  60. agno/db/surrealdb/surrealdb.py +1908 -0
  61. agno/db/surrealdb/utils.py +147 -0
  62. agno/db/utils.py +2 -0
  63. agno/eval/__init__.py +10 -0
  64. agno/eval/accuracy.py +75 -55
  65. agno/eval/agent_as_judge.py +861 -0
  66. agno/eval/base.py +29 -0
  67. agno/eval/performance.py +16 -7
  68. agno/eval/reliability.py +28 -16
  69. agno/eval/utils.py +35 -17
  70. agno/exceptions.py +27 -2
  71. agno/filters.py +354 -0
  72. agno/guardrails/prompt_injection.py +1 -0
  73. agno/hooks/__init__.py +3 -0
  74. agno/hooks/decorator.py +164 -0
  75. agno/integrations/discord/client.py +1 -1
  76. agno/knowledge/chunking/agentic.py +13 -10
  77. agno/knowledge/chunking/fixed.py +4 -1
  78. agno/knowledge/chunking/semantic.py +9 -4
  79. agno/knowledge/chunking/strategy.py +59 -15
  80. agno/knowledge/embedder/fastembed.py +1 -1
  81. agno/knowledge/embedder/nebius.py +1 -1
  82. agno/knowledge/embedder/ollama.py +8 -0
  83. agno/knowledge/embedder/openai.py +8 -8
  84. agno/knowledge/embedder/sentence_transformer.py +6 -2
  85. agno/knowledge/embedder/vllm.py +262 -0
  86. agno/knowledge/knowledge.py +1618 -318
  87. agno/knowledge/reader/base.py +6 -2
  88. agno/knowledge/reader/csv_reader.py +8 -10
  89. agno/knowledge/reader/docx_reader.py +5 -6
  90. agno/knowledge/reader/field_labeled_csv_reader.py +16 -20
  91. agno/knowledge/reader/json_reader.py +5 -4
  92. agno/knowledge/reader/markdown_reader.py +8 -8
  93. agno/knowledge/reader/pdf_reader.py +17 -19
  94. agno/knowledge/reader/pptx_reader.py +101 -0
  95. agno/knowledge/reader/reader_factory.py +32 -3
  96. agno/knowledge/reader/s3_reader.py +3 -3
  97. agno/knowledge/reader/tavily_reader.py +193 -0
  98. agno/knowledge/reader/text_reader.py +22 -10
  99. agno/knowledge/reader/web_search_reader.py +1 -48
  100. agno/knowledge/reader/website_reader.py +10 -10
  101. agno/knowledge/reader/wikipedia_reader.py +33 -1
  102. agno/knowledge/types.py +1 -0
  103. agno/knowledge/utils.py +72 -7
  104. agno/media.py +22 -6
  105. agno/memory/__init__.py +14 -1
  106. agno/memory/manager.py +544 -83
  107. agno/memory/strategies/__init__.py +15 -0
  108. agno/memory/strategies/base.py +66 -0
  109. agno/memory/strategies/summarize.py +196 -0
  110. agno/memory/strategies/types.py +37 -0
  111. agno/models/aimlapi/aimlapi.py +17 -0
  112. agno/models/anthropic/claude.py +515 -40
  113. agno/models/aws/bedrock.py +102 -21
  114. agno/models/aws/claude.py +131 -274
  115. agno/models/azure/ai_foundry.py +41 -19
  116. agno/models/azure/openai_chat.py +39 -8
  117. agno/models/base.py +1249 -525
  118. agno/models/cerebras/cerebras.py +91 -21
  119. agno/models/cerebras/cerebras_openai.py +21 -2
  120. agno/models/cohere/chat.py +40 -6
  121. agno/models/cometapi/cometapi.py +18 -1
  122. agno/models/dashscope/dashscope.py +2 -3
  123. agno/models/deepinfra/deepinfra.py +18 -1
  124. agno/models/deepseek/deepseek.py +69 -3
  125. agno/models/fireworks/fireworks.py +18 -1
  126. agno/models/google/gemini.py +877 -80
  127. agno/models/google/utils.py +22 -0
  128. agno/models/groq/groq.py +51 -18
  129. agno/models/huggingface/huggingface.py +17 -6
  130. agno/models/ibm/watsonx.py +16 -6
  131. agno/models/internlm/internlm.py +18 -1
  132. agno/models/langdb/langdb.py +13 -1
  133. agno/models/litellm/chat.py +44 -9
  134. agno/models/litellm/litellm_openai.py +18 -1
  135. agno/models/message.py +28 -5
  136. agno/models/meta/llama.py +47 -14
  137. agno/models/meta/llama_openai.py +22 -17
  138. agno/models/mistral/mistral.py +8 -4
  139. agno/models/nebius/nebius.py +6 -7
  140. agno/models/nvidia/nvidia.py +20 -3
  141. agno/models/ollama/chat.py +24 -8
  142. agno/models/openai/chat.py +104 -29
  143. agno/models/openai/responses.py +101 -81
  144. agno/models/openrouter/openrouter.py +60 -3
  145. agno/models/perplexity/perplexity.py +17 -1
  146. agno/models/portkey/portkey.py +7 -6
  147. agno/models/requesty/requesty.py +24 -4
  148. agno/models/response.py +73 -2
  149. agno/models/sambanova/sambanova.py +20 -3
  150. agno/models/siliconflow/siliconflow.py +19 -2
  151. agno/models/together/together.py +20 -3
  152. agno/models/utils.py +254 -8
  153. agno/models/vercel/v0.py +20 -3
  154. agno/models/vertexai/__init__.py +0 -0
  155. agno/models/vertexai/claude.py +190 -0
  156. agno/models/vllm/vllm.py +19 -14
  157. agno/models/xai/xai.py +19 -2
  158. agno/os/app.py +549 -152
  159. agno/os/auth.py +190 -3
  160. agno/os/config.py +23 -0
  161. agno/os/interfaces/a2a/router.py +8 -11
  162. agno/os/interfaces/a2a/utils.py +1 -1
  163. agno/os/interfaces/agui/router.py +18 -3
  164. agno/os/interfaces/agui/utils.py +152 -39
  165. agno/os/interfaces/slack/router.py +55 -37
  166. agno/os/interfaces/slack/slack.py +9 -1
  167. agno/os/interfaces/whatsapp/router.py +0 -1
  168. agno/os/interfaces/whatsapp/security.py +3 -1
  169. agno/os/mcp.py +110 -52
  170. agno/os/middleware/__init__.py +2 -0
  171. agno/os/middleware/jwt.py +676 -112
  172. agno/os/router.py +40 -1478
  173. agno/os/routers/agents/__init__.py +3 -0
  174. agno/os/routers/agents/router.py +599 -0
  175. agno/os/routers/agents/schema.py +261 -0
  176. agno/os/routers/evals/evals.py +96 -39
  177. agno/os/routers/evals/schemas.py +65 -33
  178. agno/os/routers/evals/utils.py +80 -10
  179. agno/os/routers/health.py +10 -4
  180. agno/os/routers/knowledge/knowledge.py +196 -38
  181. agno/os/routers/knowledge/schemas.py +82 -22
  182. agno/os/routers/memory/memory.py +279 -52
  183. agno/os/routers/memory/schemas.py +46 -17
  184. agno/os/routers/metrics/metrics.py +20 -8
  185. agno/os/routers/metrics/schemas.py +16 -16
  186. agno/os/routers/session/session.py +462 -34
  187. agno/os/routers/teams/__init__.py +3 -0
  188. agno/os/routers/teams/router.py +512 -0
  189. agno/os/routers/teams/schema.py +257 -0
  190. agno/os/routers/traces/__init__.py +3 -0
  191. agno/os/routers/traces/schemas.py +414 -0
  192. agno/os/routers/traces/traces.py +499 -0
  193. agno/os/routers/workflows/__init__.py +3 -0
  194. agno/os/routers/workflows/router.py +624 -0
  195. agno/os/routers/workflows/schema.py +75 -0
  196. agno/os/schema.py +256 -693
  197. agno/os/scopes.py +469 -0
  198. agno/os/utils.py +514 -36
  199. agno/reasoning/anthropic.py +80 -0
  200. agno/reasoning/gemini.py +73 -0
  201. agno/reasoning/openai.py +5 -0
  202. agno/reasoning/vertexai.py +76 -0
  203. agno/run/__init__.py +6 -0
  204. agno/run/agent.py +155 -32
  205. agno/run/base.py +55 -3
  206. agno/run/requirement.py +181 -0
  207. agno/run/team.py +125 -38
  208. agno/run/workflow.py +72 -18
  209. agno/session/agent.py +102 -89
  210. agno/session/summary.py +56 -15
  211. agno/session/team.py +164 -90
  212. agno/session/workflow.py +405 -40
  213. agno/table.py +10 -0
  214. agno/team/team.py +3974 -1903
  215. agno/tools/dalle.py +2 -4
  216. agno/tools/eleven_labs.py +23 -25
  217. agno/tools/exa.py +21 -16
  218. agno/tools/file.py +153 -23
  219. agno/tools/file_generation.py +16 -10
  220. agno/tools/firecrawl.py +15 -7
  221. agno/tools/function.py +193 -38
  222. agno/tools/gmail.py +238 -14
  223. agno/tools/google_drive.py +271 -0
  224. agno/tools/googlecalendar.py +36 -8
  225. agno/tools/googlesheets.py +20 -5
  226. agno/tools/jira.py +20 -0
  227. agno/tools/mcp/__init__.py +10 -0
  228. agno/tools/mcp/mcp.py +331 -0
  229. agno/tools/mcp/multi_mcp.py +347 -0
  230. agno/tools/mcp/params.py +24 -0
  231. agno/tools/mcp_toolbox.py +3 -3
  232. agno/tools/models/nebius.py +5 -5
  233. agno/tools/models_labs.py +20 -10
  234. agno/tools/nano_banana.py +151 -0
  235. agno/tools/notion.py +204 -0
  236. agno/tools/parallel.py +314 -0
  237. agno/tools/postgres.py +76 -36
  238. agno/tools/redshift.py +406 -0
  239. agno/tools/scrapegraph.py +1 -1
  240. agno/tools/shopify.py +1519 -0
  241. agno/tools/slack.py +18 -3
  242. agno/tools/spotify.py +919 -0
  243. agno/tools/tavily.py +146 -0
  244. agno/tools/toolkit.py +25 -0
  245. agno/tools/workflow.py +8 -1
  246. agno/tools/yfinance.py +12 -11
  247. agno/tracing/__init__.py +12 -0
  248. agno/tracing/exporter.py +157 -0
  249. agno/tracing/schemas.py +276 -0
  250. agno/tracing/setup.py +111 -0
  251. agno/utils/agent.py +938 -0
  252. agno/utils/cryptography.py +22 -0
  253. agno/utils/dttm.py +33 -0
  254. agno/utils/events.py +151 -3
  255. agno/utils/gemini.py +15 -5
  256. agno/utils/hooks.py +118 -4
  257. agno/utils/http.py +113 -2
  258. agno/utils/knowledge.py +12 -5
  259. agno/utils/log.py +1 -0
  260. agno/utils/mcp.py +92 -2
  261. agno/utils/media.py +187 -1
  262. agno/utils/merge_dict.py +3 -3
  263. agno/utils/message.py +60 -0
  264. agno/utils/models/ai_foundry.py +9 -2
  265. agno/utils/models/claude.py +49 -14
  266. agno/utils/models/cohere.py +9 -2
  267. agno/utils/models/llama.py +9 -2
  268. agno/utils/models/mistral.py +4 -2
  269. agno/utils/print_response/agent.py +109 -16
  270. agno/utils/print_response/team.py +223 -30
  271. agno/utils/print_response/workflow.py +251 -34
  272. agno/utils/streamlit.py +1 -1
  273. agno/utils/team.py +98 -9
  274. agno/utils/tokens.py +657 -0
  275. agno/vectordb/base.py +39 -7
  276. agno/vectordb/cassandra/cassandra.py +21 -5
  277. agno/vectordb/chroma/chromadb.py +43 -12
  278. agno/vectordb/clickhouse/clickhousedb.py +21 -5
  279. agno/vectordb/couchbase/couchbase.py +29 -5
  280. agno/vectordb/lancedb/lance_db.py +92 -181
  281. agno/vectordb/langchaindb/langchaindb.py +24 -4
  282. agno/vectordb/lightrag/lightrag.py +17 -3
  283. agno/vectordb/llamaindex/llamaindexdb.py +25 -5
  284. agno/vectordb/milvus/milvus.py +50 -37
  285. agno/vectordb/mongodb/__init__.py +7 -1
  286. agno/vectordb/mongodb/mongodb.py +36 -30
  287. agno/vectordb/pgvector/pgvector.py +201 -77
  288. agno/vectordb/pineconedb/pineconedb.py +41 -23
  289. agno/vectordb/qdrant/qdrant.py +67 -54
  290. agno/vectordb/redis/__init__.py +9 -0
  291. agno/vectordb/redis/redisdb.py +682 -0
  292. agno/vectordb/singlestore/singlestore.py +50 -29
  293. agno/vectordb/surrealdb/surrealdb.py +31 -41
  294. agno/vectordb/upstashdb/upstashdb.py +34 -6
  295. agno/vectordb/weaviate/weaviate.py +53 -14
  296. agno/workflow/__init__.py +2 -0
  297. agno/workflow/agent.py +299 -0
  298. agno/workflow/condition.py +120 -18
  299. agno/workflow/loop.py +77 -10
  300. agno/workflow/parallel.py +231 -143
  301. agno/workflow/router.py +118 -17
  302. agno/workflow/step.py +609 -170
  303. agno/workflow/steps.py +73 -6
  304. agno/workflow/types.py +96 -21
  305. agno/workflow/workflow.py +2039 -262
  306. {agno-2.1.2.dist-info → agno-2.3.13.dist-info}/METADATA +201 -66
  307. agno-2.3.13.dist-info/RECORD +613 -0
  308. agno/tools/googlesearch.py +0 -98
  309. agno/tools/mcp.py +0 -679
  310. agno/tools/memori.py +0 -339
  311. agno-2.1.2.dist-info/RECORD +0 -543
  312. {agno-2.1.2.dist-info → agno-2.3.13.dist-info}/WHEEL +0 -0
  313. {agno-2.1.2.dist-info → agno-2.3.13.dist-info}/licenses/LICENSE +0 -0
  314. {agno-2.1.2.dist-info → agno-2.3.13.dist-info}/top_level.txt +0 -0
@@ -1,18 +1,27 @@
1
+ import json
1
2
  import time
2
3
  from datetime import date, datetime, timedelta, timezone
3
- from typing import Any, Dict, List, Optional, Tuple, Union
4
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
4
5
  from uuid import uuid4
5
6
 
7
+ if TYPE_CHECKING:
8
+ from agno.tracing.schemas import Span, Trace
9
+
6
10
  from agno.db.base import BaseDb, SessionType
7
11
  from agno.db.firestore.utils import (
8
12
  apply_pagination,
13
+ apply_pagination_to_records,
9
14
  apply_sorting,
15
+ apply_sorting_to_records,
10
16
  bulk_upsert_metrics,
11
17
  calculate_date_metrics,
12
18
  create_collection_indexes,
19
+ deserialize_cultural_knowledge_from_db,
13
20
  fetch_all_sessions_data,
14
21
  get_dates_to_calculate_metrics_for,
22
+ serialize_cultural_knowledge_for_db,
15
23
  )
24
+ from agno.db.schemas.culture import CulturalKnowledge
16
25
  from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
17
26
  from agno.db.schemas.knowledge import KnowledgeRow
18
27
  from agno.db.schemas.memory import UserMemory
@@ -39,6 +48,9 @@ class FirestoreDb(BaseDb):
39
48
  metrics_collection: Optional[str] = None,
40
49
  eval_collection: Optional[str] = None,
41
50
  knowledge_collection: Optional[str] = None,
51
+ culture_collection: Optional[str] = None,
52
+ traces_collection: Optional[str] = None,
53
+ spans_collection: Optional[str] = None,
42
54
  id: Optional[str] = None,
43
55
  ):
44
56
  """
@@ -52,6 +64,9 @@ class FirestoreDb(BaseDb):
52
64
  metrics_collection (Optional[str]): Name of the collection to store metrics.
53
65
  eval_collection (Optional[str]): Name of the collection to store evaluation runs.
54
66
  knowledge_collection (Optional[str]): Name of the collection to store knowledge documents.
67
+ culture_collection (Optional[str]): Name of the collection to store cultural knowledge.
68
+ traces_collection (Optional[str]): Name of the collection to store traces.
69
+ spans_collection (Optional[str]): Name of the collection to store spans.
55
70
  id (Optional[str]): ID of the database.
56
71
 
57
72
  Raises:
@@ -68,6 +83,9 @@ class FirestoreDb(BaseDb):
68
83
  metrics_table=metrics_collection,
69
84
  eval_table=eval_collection,
70
85
  knowledge_table=knowledge_collection,
86
+ culture_table=culture_collection,
87
+ traces_table=traces_collection,
88
+ spans_table=spans_collection,
71
89
  )
72
90
 
73
91
  _client: Optional[Client] = db_client
@@ -81,6 +99,17 @@ class FirestoreDb(BaseDb):
81
99
 
82
100
  # -- DB methods --
83
101
 
102
+ def table_exists(self, table_name: str) -> bool:
103
+ """Check if a collection with the given name exists in the Firestore database.
104
+
105
+ Args:
106
+ table_name: Name of the collection to check
107
+
108
+ Returns:
109
+ bool: True if the collection exists in the database, False otherwise
110
+ """
111
+ return table_name in self.db_client.list_collections()
112
+
84
113
  def _get_collection(self, table_type: str, create_collection_if_not_found: Optional[bool] = True):
85
114
  """Get or create a collection based on table type.
86
115
 
@@ -146,6 +175,41 @@ class FirestoreDb(BaseDb):
146
175
  )
147
176
  return self.knowledge_collection
148
177
 
178
+ if table_type == "culture":
179
+ if not hasattr(self, "culture_collection"):
180
+ if self.culture_table_name is None:
181
+ raise ValueError("Culture collection was not provided on initialization")
182
+ self.culture_collection = self._get_or_create_collection(
183
+ collection_name=self.culture_table_name,
184
+ collection_type="culture",
185
+ create_collection_if_not_found=create_collection_if_not_found,
186
+ )
187
+ return self.culture_collection
188
+
189
+ if table_type == "traces":
190
+ if not hasattr(self, "traces_collection"):
191
+ if self.trace_table_name is None:
192
+ raise ValueError("Traces collection was not provided on initialization")
193
+ self.traces_collection = self._get_or_create_collection(
194
+ collection_name=self.trace_table_name,
195
+ collection_type="traces",
196
+ create_collection_if_not_found=create_collection_if_not_found,
197
+ )
198
+ return self.traces_collection
199
+
200
+ if table_type == "spans":
201
+ # Ensure traces collection exists first (spans reference traces)
202
+ self._get_collection("traces", create_collection_if_not_found=create_collection_if_not_found)
203
+ if not hasattr(self, "spans_collection"):
204
+ if self.span_table_name is None:
205
+ raise ValueError("Spans collection was not provided on initialization")
206
+ self.spans_collection = self._get_or_create_collection(
207
+ collection_name=self.span_table_name,
208
+ collection_type="spans",
209
+ create_collection_if_not_found=create_collection_if_not_found,
210
+ )
211
+ return self.spans_collection
212
+
149
213
  raise ValueError(f"Unknown table type: {table_type}")
150
214
 
151
215
  def _get_or_create_collection(
@@ -207,6 +271,14 @@ class FirestoreDb(BaseDb):
207
271
  log_error(f"Error deleting session: {e}")
208
272
  raise e
209
273
 
274
+ def get_latest_schema_version(self):
275
+ """Get the latest version of the database schema."""
276
+ pass
277
+
278
+ def upsert_schema_version(self, version: str) -> None:
279
+ """Upsert the schema version into the database."""
280
+ pass
281
+
210
282
  def delete_sessions(self, session_ids: List[str]) -> None:
211
283
  """Delete multiple sessions from the database.
212
284
 
@@ -261,8 +333,6 @@ class FirestoreDb(BaseDb):
261
333
 
262
334
  if user_id is not None:
263
335
  query = query.where(filter=FieldFilter("user_id", "==", user_id))
264
- if session_type is not None:
265
- query = query.where(filter=FieldFilter("session_type", "==", session_type.value))
266
336
 
267
337
  docs = query.stream()
268
338
  result = None
@@ -558,7 +628,7 @@ class FirestoreDb(BaseDb):
558
628
  raise e
559
629
 
560
630
  def upsert_sessions(
561
- self, sessions: List[Session], deserialize: Optional[bool] = True
631
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
562
632
  ) -> List[Union[Session, Dict[str, Any]]]:
563
633
  """
564
634
  Bulk upsert multiple sessions for improved performance on large datasets.
@@ -682,9 +752,7 @@ class FirestoreDb(BaseDb):
682
752
  log_error(f"Error deleting memories: {e}")
683
753
  raise e
684
754
 
685
- def get_all_memory_topics(
686
- self, create_collection_if_not_found: Optional[bool] = True
687
- ) -> List[str]:
755
+ def get_all_memory_topics(self, create_collection_if_not_found: Optional[bool] = True) -> List[str]:
688
756
  """Get all memory topics from the database.
689
757
 
690
758
  Returns:
@@ -839,6 +907,7 @@ class FirestoreDb(BaseDb):
839
907
  self,
840
908
  limit: Optional[int] = None,
841
909
  page: Optional[int] = None,
910
+ user_id: Optional[str] = None,
842
911
  ) -> Tuple[List[Dict[str, Any]], int]:
843
912
  """Get user memories stats.
844
913
 
@@ -855,7 +924,10 @@ class FirestoreDb(BaseDb):
855
924
  try:
856
925
  collection_ref = self._get_collection(table_type="memories")
857
926
 
858
- query = collection_ref.where(filter=FieldFilter("user_id", "!=", None))
927
+ if user_id:
928
+ query = collection_ref.where(filter=FieldFilter("user_id", "==", user_id))
929
+ else:
930
+ query = collection_ref.where(filter=FieldFilter("user_id", "!=", None))
859
931
 
860
932
  docs = query.stream()
861
933
 
@@ -941,7 +1013,7 @@ class FirestoreDb(BaseDb):
941
1013
  raise e
942
1014
 
943
1015
  def upsert_memories(
944
- self, memories: List[UserMemory], deserialize: Optional[bool] = True
1016
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
945
1017
  ) -> List[Union[UserMemory, Dict[str, Any]]]:
946
1018
  """
947
1019
  Bulk upsert multiple user memories for improved performance on large datasets.
@@ -1010,6 +1082,213 @@ class FirestoreDb(BaseDb):
1010
1082
  log_error(f"Exception deleting all memories: {e}")
1011
1083
  raise e
1012
1084
 
1085
+ # -- Cultural Knowledge methods --
1086
+ def clear_cultural_knowledge(self) -> None:
1087
+ """Delete all cultural knowledge from the database.
1088
+
1089
+ Raises:
1090
+ Exception: If an error occurs during deletion.
1091
+ """
1092
+ try:
1093
+ collection_ref = self._get_collection(table_type="culture")
1094
+
1095
+ # Get all documents in the collection
1096
+ docs = collection_ref.stream()
1097
+
1098
+ # Delete all documents in batches
1099
+ batch = self.db_client.batch()
1100
+ batch_count = 0
1101
+
1102
+ for doc in docs:
1103
+ batch.delete(doc.reference)
1104
+ batch_count += 1
1105
+
1106
+ # Firestore batch has a limit of 500 operations
1107
+ if batch_count >= 500:
1108
+ batch.commit()
1109
+ batch = self.db_client.batch()
1110
+ batch_count = 0
1111
+
1112
+ # Commit remaining operations
1113
+ if batch_count > 0:
1114
+ batch.commit()
1115
+
1116
+ except Exception as e:
1117
+ log_error(f"Exception deleting all cultural knowledge: {e}")
1118
+ raise e
1119
+
1120
+ def delete_cultural_knowledge(self, id: str) -> None:
1121
+ """Delete cultural knowledge by ID.
1122
+
1123
+ Args:
1124
+ id (str): The ID of the cultural knowledge to delete.
1125
+
1126
+ Raises:
1127
+ Exception: If an error occurs during deletion.
1128
+ """
1129
+ try:
1130
+ collection_ref = self._get_collection(table_type="culture")
1131
+ docs = collection_ref.where(filter=FieldFilter("id", "==", id)).stream()
1132
+
1133
+ for doc in docs:
1134
+ doc.reference.delete()
1135
+ log_debug(f"Deleted cultural knowledge with ID: {id}")
1136
+
1137
+ except Exception as e:
1138
+ log_error(f"Error deleting cultural knowledge: {e}")
1139
+ raise e
1140
+
1141
+ def get_cultural_knowledge(
1142
+ self, id: str, deserialize: Optional[bool] = True
1143
+ ) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
1144
+ """Get cultural knowledge by ID.
1145
+
1146
+ Args:
1147
+ id (str): The ID of the cultural knowledge to retrieve.
1148
+ deserialize (Optional[bool]): Whether to deserialize to CulturalKnowledge object. Defaults to True.
1149
+
1150
+ Returns:
1151
+ Optional[Union[CulturalKnowledge, Dict[str, Any]]]: The cultural knowledge if found, None otherwise.
1152
+
1153
+ Raises:
1154
+ Exception: If an error occurs during retrieval.
1155
+ """
1156
+ try:
1157
+ collection_ref = self._get_collection(table_type="culture")
1158
+ docs = collection_ref.where(filter=FieldFilter("id", "==", id)).limit(1).stream()
1159
+
1160
+ for doc in docs:
1161
+ result = doc.to_dict()
1162
+ if not deserialize:
1163
+ return result
1164
+ return deserialize_cultural_knowledge_from_db(result)
1165
+
1166
+ return None
1167
+
1168
+ except Exception as e:
1169
+ log_error(f"Error getting cultural knowledge: {e}")
1170
+ raise e
1171
+
1172
+ def get_all_cultural_knowledge(
1173
+ self,
1174
+ agent_id: Optional[str] = None,
1175
+ team_id: Optional[str] = None,
1176
+ name: Optional[str] = None,
1177
+ limit: Optional[int] = None,
1178
+ page: Optional[int] = None,
1179
+ sort_by: Optional[str] = None,
1180
+ sort_order: Optional[str] = None,
1181
+ deserialize: Optional[bool] = True,
1182
+ ) -> Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
1183
+ """Get all cultural knowledge with filtering and pagination.
1184
+
1185
+ Args:
1186
+ agent_id (Optional[str]): Filter by agent ID.
1187
+ team_id (Optional[str]): Filter by team ID.
1188
+ name (Optional[str]): Filter by name (case-insensitive partial match).
1189
+ limit (Optional[int]): Maximum number of results to return.
1190
+ page (Optional[int]): Page number for pagination.
1191
+ sort_by (Optional[str]): Field to sort by.
1192
+ sort_order (Optional[str]): Sort order ('asc' or 'desc').
1193
+ deserialize (Optional[bool]): Whether to deserialize to CulturalKnowledge objects. Defaults to True.
1194
+
1195
+ Returns:
1196
+ Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
1197
+ - When deserialize=True: List of CulturalKnowledge objects
1198
+ - When deserialize=False: Tuple with list of dictionaries and total count
1199
+
1200
+ Raises:
1201
+ Exception: If an error occurs during retrieval.
1202
+ """
1203
+ try:
1204
+ collection_ref = self._get_collection(table_type="culture")
1205
+
1206
+ # Build query with filters
1207
+ query = collection_ref
1208
+ if agent_id is not None:
1209
+ query = query.where(filter=FieldFilter("agent_id", "==", agent_id))
1210
+ if team_id is not None:
1211
+ query = query.where(filter=FieldFilter("team_id", "==", team_id))
1212
+
1213
+ # Get all matching documents
1214
+ docs = query.stream()
1215
+ results = [doc.to_dict() for doc in docs]
1216
+
1217
+ # Apply name filter (Firestore doesn't support regex in queries)
1218
+ if name is not None:
1219
+ results = [r for r in results if name.lower() in r.get("name", "").lower()]
1220
+
1221
+ total_count = len(results)
1222
+
1223
+ # Apply sorting and pagination to in-memory results
1224
+ sorted_results = apply_sorting_to_records(records=results, sort_by=sort_by, sort_order=sort_order)
1225
+ paginated_results = apply_pagination_to_records(records=sorted_results, limit=limit, page=page)
1226
+
1227
+ if not deserialize:
1228
+ return paginated_results, total_count
1229
+
1230
+ return [deserialize_cultural_knowledge_from_db(item) for item in paginated_results]
1231
+
1232
+ except Exception as e:
1233
+ log_error(f"Error getting all cultural knowledge: {e}")
1234
+ raise e
1235
+
1236
+ def upsert_cultural_knowledge(
1237
+ self, cultural_knowledge: CulturalKnowledge, deserialize: Optional[bool] = True
1238
+ ) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
1239
+ """Upsert cultural knowledge in Firestore.
1240
+
1241
+ Args:
1242
+ cultural_knowledge (CulturalKnowledge): The cultural knowledge to upsert.
1243
+ deserialize (Optional[bool]): Whether to deserialize the result. Defaults to True.
1244
+
1245
+ Returns:
1246
+ Optional[Union[CulturalKnowledge, Dict[str, Any]]]: The upserted cultural knowledge.
1247
+
1248
+ Raises:
1249
+ Exception: If an error occurs during upsert.
1250
+ """
1251
+ try:
1252
+ collection_ref = self._get_collection(table_type="culture", create_collection_if_not_found=True)
1253
+
1254
+ # Serialize content, categories, and notes into a dict for DB storage
1255
+ content_dict = serialize_cultural_knowledge_for_db(cultural_knowledge)
1256
+
1257
+ # Create the update document with serialized content
1258
+ update_doc = {
1259
+ "id": cultural_knowledge.id,
1260
+ "name": cultural_knowledge.name,
1261
+ "summary": cultural_knowledge.summary,
1262
+ "content": content_dict if content_dict else None,
1263
+ "metadata": cultural_knowledge.metadata,
1264
+ "input": cultural_knowledge.input,
1265
+ "created_at": cultural_knowledge.created_at,
1266
+ "updated_at": int(time.time()),
1267
+ "agent_id": cultural_knowledge.agent_id,
1268
+ "team_id": cultural_knowledge.team_id,
1269
+ }
1270
+
1271
+ # Find and update or create new document
1272
+ docs = collection_ref.where(filter=FieldFilter("id", "==", cultural_knowledge.id)).limit(1).stream()
1273
+
1274
+ doc_found = False
1275
+ for doc in docs:
1276
+ doc.reference.set(update_doc)
1277
+ doc_found = True
1278
+ break
1279
+
1280
+ if not doc_found:
1281
+ collection_ref.add(update_doc)
1282
+
1283
+ if not deserialize:
1284
+ return update_doc
1285
+
1286
+ return deserialize_cultural_knowledge_from_db(update_doc)
1287
+
1288
+ except Exception as e:
1289
+ log_error(f"Error upserting cultural knowledge: {e}")
1290
+ raise e
1291
+
1013
1292
  # -- Metrics methods --
1014
1293
 
1015
1294
  def _get_all_sessions_for_metrics_calculation(
@@ -1390,6 +1669,9 @@ class FirestoreDb(BaseDb):
1390
1669
  """
1391
1670
  try:
1392
1671
  collection_ref = self._get_collection(table_type="evals")
1672
+ if not collection_ref:
1673
+ return None
1674
+
1393
1675
  docs = collection_ref.where(filter=FieldFilter("run_id", "==", eval_run_id)).stream()
1394
1676
 
1395
1677
  eval_run_raw = None
@@ -1530,6 +1812,8 @@ class FirestoreDb(BaseDb):
1530
1812
  """
1531
1813
  try:
1532
1814
  collection_ref = self._get_collection(table_type="evals")
1815
+ if not collection_ref:
1816
+ return None
1533
1817
 
1534
1818
  docs = collection_ref.where(filter=FieldFilter("run_id", "==", eval_run_id)).stream()
1535
1819
  doc_ref = next((doc.reference for doc in docs), None)
@@ -1555,3 +1839,541 @@ class FirestoreDb(BaseDb):
1555
1839
  except Exception as e:
1556
1840
  log_error(f"Error updating eval run name {eval_run_id}: {e}")
1557
1841
  raise e
1842
+
1843
+ # --- Traces ---
1844
+ def upsert_trace(self, trace: "Trace") -> None:
1845
+ """Create or update a single trace record in the database.
1846
+
1847
+ Args:
1848
+ trace: The Trace object to store (one per trace_id).
1849
+ """
1850
+ try:
1851
+ collection_ref = self._get_collection(table_type="traces", create_collection_if_not_found=True)
1852
+ if collection_ref is None:
1853
+ return
1854
+
1855
+ # Check if trace already exists
1856
+ docs = collection_ref.where(filter=FieldFilter("trace_id", "==", trace.trace_id)).limit(1).stream()
1857
+ existing_doc = None
1858
+ existing_data = None
1859
+ for doc in docs:
1860
+ existing_doc = doc
1861
+ existing_data = doc.to_dict()
1862
+ break
1863
+
1864
+ if existing_data and existing_doc is not None:
1865
+ # Update existing trace
1866
+ def get_component_level(workflow_id, team_id, agent_id, name):
1867
+ is_root_name = ".run" in name or ".arun" in name
1868
+ if not is_root_name:
1869
+ return 0
1870
+ elif workflow_id:
1871
+ return 3
1872
+ elif team_id:
1873
+ return 2
1874
+ elif agent_id:
1875
+ return 1
1876
+ else:
1877
+ return 0
1878
+
1879
+ existing_level = get_component_level(
1880
+ existing_data.get("workflow_id"),
1881
+ existing_data.get("team_id"),
1882
+ existing_data.get("agent_id"),
1883
+ existing_data.get("name", ""),
1884
+ )
1885
+ new_level = get_component_level(trace.workflow_id, trace.team_id, trace.agent_id, trace.name)
1886
+ should_update_name = new_level > existing_level
1887
+
1888
+ # Parse existing start_time to calculate correct duration
1889
+ existing_start_time_str = existing_data.get("start_time")
1890
+ if isinstance(existing_start_time_str, str):
1891
+ existing_start_time = datetime.fromisoformat(existing_start_time_str.replace("Z", "+00:00"))
1892
+ else:
1893
+ existing_start_time = trace.start_time
1894
+
1895
+ recalculated_duration_ms = int((trace.end_time - existing_start_time).total_seconds() * 1000)
1896
+
1897
+ update_values: Dict[str, Any] = {
1898
+ "end_time": trace.end_time.isoformat(),
1899
+ "duration_ms": recalculated_duration_ms,
1900
+ "status": trace.status,
1901
+ }
1902
+
1903
+ if should_update_name:
1904
+ update_values["name"] = trace.name
1905
+
1906
+ # Update context fields only if new value is not None
1907
+ if trace.run_id is not None:
1908
+ update_values["run_id"] = trace.run_id
1909
+ if trace.session_id is not None:
1910
+ update_values["session_id"] = trace.session_id
1911
+ if trace.user_id is not None:
1912
+ update_values["user_id"] = trace.user_id
1913
+ if trace.agent_id is not None:
1914
+ update_values["agent_id"] = trace.agent_id
1915
+ if trace.team_id is not None:
1916
+ update_values["team_id"] = trace.team_id
1917
+ if trace.workflow_id is not None:
1918
+ update_values["workflow_id"] = trace.workflow_id
1919
+
1920
+ existing_doc.reference.update(update_values)
1921
+ else:
1922
+ # Create new trace with initialized counters
1923
+ trace_dict = trace.to_dict()
1924
+ trace_dict["total_spans"] = 0
1925
+ trace_dict["error_count"] = 0
1926
+ collection_ref.add(trace_dict)
1927
+
1928
+ except Exception as e:
1929
+ log_error(f"Error creating trace: {e}")
1930
+
1931
+ def get_trace(
1932
+ self,
1933
+ trace_id: Optional[str] = None,
1934
+ run_id: Optional[str] = None,
1935
+ ):
1936
+ """Get a single trace by trace_id or other filters.
1937
+
1938
+ Args:
1939
+ trace_id: The unique trace identifier.
1940
+ run_id: Filter by run ID (returns first match).
1941
+
1942
+ Returns:
1943
+ Optional[Trace]: The trace if found, None otherwise.
1944
+
1945
+ Note:
1946
+ If multiple filters are provided, trace_id takes precedence.
1947
+ For other filters, the most recent trace is returned.
1948
+ """
1949
+ try:
1950
+ from agno.tracing.schemas import Trace
1951
+
1952
+ collection_ref = self._get_collection(table_type="traces")
1953
+ if collection_ref is None:
1954
+ return None
1955
+
1956
+ if trace_id:
1957
+ docs = collection_ref.where(filter=FieldFilter("trace_id", "==", trace_id)).limit(1).stream()
1958
+ elif run_id:
1959
+ from google.cloud.firestore import Query
1960
+
1961
+ docs = (
1962
+ collection_ref.where(filter=FieldFilter("run_id", "==", run_id))
1963
+ .order_by("start_time", direction=Query.DESCENDING)
1964
+ .limit(1)
1965
+ .stream()
1966
+ )
1967
+ else:
1968
+ log_debug("get_trace called without any filter parameters")
1969
+ return None
1970
+
1971
+ for doc in docs:
1972
+ trace_data = doc.to_dict()
1973
+ # Use stored values (default to 0 if not present)
1974
+ trace_data.setdefault("total_spans", 0)
1975
+ trace_data.setdefault("error_count", 0)
1976
+ return Trace.from_dict(trace_data)
1977
+
1978
+ return None
1979
+
1980
+ except Exception as e:
1981
+ log_error(f"Error getting trace: {e}")
1982
+ return None
1983
+
1984
+ def get_traces(
1985
+ self,
1986
+ run_id: Optional[str] = None,
1987
+ session_id: Optional[str] = None,
1988
+ user_id: Optional[str] = None,
1989
+ agent_id: Optional[str] = None,
1990
+ team_id: Optional[str] = None,
1991
+ workflow_id: Optional[str] = None,
1992
+ status: Optional[str] = None,
1993
+ start_time: Optional[datetime] = None,
1994
+ end_time: Optional[datetime] = None,
1995
+ limit: Optional[int] = 20,
1996
+ page: Optional[int] = 1,
1997
+ ) -> tuple[List, int]:
1998
+ """Get traces matching the provided filters.
1999
+
2000
+ Args:
2001
+ run_id: Filter by run ID.
2002
+ session_id: Filter by session ID.
2003
+ user_id: Filter by user ID.
2004
+ agent_id: Filter by agent ID.
2005
+ team_id: Filter by team ID.
2006
+ workflow_id: Filter by workflow ID.
2007
+ status: Filter by status (OK, ERROR, UNSET).
2008
+ start_time: Filter traces starting after this datetime.
2009
+ end_time: Filter traces ending before this datetime.
2010
+ limit: Maximum number of traces to return per page.
2011
+ page: Page number (1-indexed).
2012
+
2013
+ Returns:
2014
+ tuple[List[Trace], int]: Tuple of (list of matching traces, total count).
2015
+ """
2016
+ try:
2017
+ from agno.tracing.schemas import Trace
2018
+
2019
+ collection_ref = self._get_collection(table_type="traces")
2020
+ if collection_ref is None:
2021
+ return [], 0
2022
+
2023
+ query = collection_ref
2024
+
2025
+ # Apply filters
2026
+ if run_id:
2027
+ query = query.where(filter=FieldFilter("run_id", "==", run_id))
2028
+ if session_id:
2029
+ query = query.where(filter=FieldFilter("session_id", "==", session_id))
2030
+ if user_id:
2031
+ query = query.where(filter=FieldFilter("user_id", "==", user_id))
2032
+ if agent_id:
2033
+ query = query.where(filter=FieldFilter("agent_id", "==", agent_id))
2034
+ if team_id:
2035
+ query = query.where(filter=FieldFilter("team_id", "==", team_id))
2036
+ if workflow_id:
2037
+ query = query.where(filter=FieldFilter("workflow_id", "==", workflow_id))
2038
+ if status:
2039
+ query = query.where(filter=FieldFilter("status", "==", status))
2040
+ if start_time:
2041
+ query = query.where(filter=FieldFilter("start_time", ">=", start_time.isoformat()))
2042
+ if end_time:
2043
+ query = query.where(filter=FieldFilter("end_time", "<=", end_time.isoformat()))
2044
+
2045
+ # Get all matching documents
2046
+ docs = query.stream()
2047
+ all_records = [doc.to_dict() for doc in docs]
2048
+
2049
+ # Sort by start_time descending
2050
+ all_records.sort(key=lambda x: x.get("start_time", ""), reverse=True)
2051
+
2052
+ # Get total count
2053
+ total_count = len(all_records)
2054
+
2055
+ # Apply pagination
2056
+ if limit and page:
2057
+ offset = (page - 1) * limit
2058
+ paginated_records = all_records[offset : offset + limit]
2059
+ elif limit:
2060
+ paginated_records = all_records[:limit]
2061
+ else:
2062
+ paginated_records = all_records
2063
+
2064
+ # Convert to Trace objects with stored span counts
2065
+ traces = []
2066
+ for trace_data in paginated_records:
2067
+ trace_data.setdefault("total_spans", 0)
2068
+ trace_data.setdefault("error_count", 0)
2069
+ traces.append(Trace.from_dict(trace_data))
2070
+
2071
+ return traces, total_count
2072
+
2073
+ except Exception as e:
2074
+ log_error(f"Error getting traces: {e}")
2075
+ return [], 0
2076
+
2077
+ def get_trace_stats(
2078
+ self,
2079
+ user_id: Optional[str] = None,
2080
+ agent_id: Optional[str] = None,
2081
+ team_id: Optional[str] = None,
2082
+ workflow_id: Optional[str] = None,
2083
+ start_time: Optional[datetime] = None,
2084
+ end_time: Optional[datetime] = None,
2085
+ limit: Optional[int] = 20,
2086
+ page: Optional[int] = 1,
2087
+ ) -> tuple[List[Dict[str, Any]], int]:
2088
+ """Get trace statistics grouped by session.
2089
+
2090
+ Args:
2091
+ user_id: Filter by user ID.
2092
+ agent_id: Filter by agent ID.
2093
+ team_id: Filter by team ID.
2094
+ workflow_id: Filter by workflow ID.
2095
+ start_time: Filter sessions with traces created after this datetime.
2096
+ end_time: Filter sessions with traces created before this datetime.
2097
+ limit: Maximum number of sessions to return per page.
2098
+ page: Page number (1-indexed).
2099
+
2100
+ Returns:
2101
+ tuple[List[Dict], int]: Tuple of (list of session stats dicts, total count).
2102
+ Each dict contains: session_id, user_id, agent_id, team_id, workflow_id, total_traces,
2103
+ first_trace_at, last_trace_at.
2104
+ """
2105
+ try:
2106
+ collection_ref = self._get_collection(table_type="traces")
2107
+ if collection_ref is None:
2108
+ return [], 0
2109
+
2110
+ query = collection_ref
2111
+
2112
+ # Apply filters
2113
+ if user_id:
2114
+ query = query.where(filter=FieldFilter("user_id", "==", user_id))
2115
+ if agent_id:
2116
+ query = query.where(filter=FieldFilter("agent_id", "==", agent_id))
2117
+ if team_id:
2118
+ query = query.where(filter=FieldFilter("team_id", "==", team_id))
2119
+ if workflow_id:
2120
+ query = query.where(filter=FieldFilter("workflow_id", "==", workflow_id))
2121
+ if start_time:
2122
+ query = query.where(filter=FieldFilter("created_at", ">=", start_time.isoformat()))
2123
+ if end_time:
2124
+ query = query.where(filter=FieldFilter("created_at", "<=", end_time.isoformat()))
2125
+
2126
+ # Get all matching documents
2127
+ docs = query.stream()
2128
+
2129
+ # Aggregate by session_id
2130
+ session_stats: Dict[str, Dict[str, Any]] = {}
2131
+ for doc in docs:
2132
+ trace_data = doc.to_dict()
2133
+ session_id = trace_data.get("session_id")
2134
+ if not session_id:
2135
+ continue
2136
+
2137
+ if session_id not in session_stats:
2138
+ session_stats[session_id] = {
2139
+ "session_id": session_id,
2140
+ "user_id": trace_data.get("user_id"),
2141
+ "agent_id": trace_data.get("agent_id"),
2142
+ "team_id": trace_data.get("team_id"),
2143
+ "workflow_id": trace_data.get("workflow_id"),
2144
+ "total_traces": 0,
2145
+ "first_trace_at": trace_data.get("created_at"),
2146
+ "last_trace_at": trace_data.get("created_at"),
2147
+ }
2148
+
2149
+ session_stats[session_id]["total_traces"] += 1
2150
+
2151
+ created_at = trace_data.get("created_at")
2152
+ if (
2153
+ created_at
2154
+ and session_stats[session_id]["first_trace_at"]
2155
+ and session_stats[session_id]["last_trace_at"]
2156
+ ):
2157
+ if created_at < session_stats[session_id]["first_trace_at"]:
2158
+ session_stats[session_id]["first_trace_at"] = created_at
2159
+ if created_at > session_stats[session_id]["last_trace_at"]:
2160
+ session_stats[session_id]["last_trace_at"] = created_at
2161
+
2162
+ # Convert to list and sort by last_trace_at descending
2163
+ stats_list = list(session_stats.values())
2164
+ stats_list.sort(key=lambda x: x.get("last_trace_at", ""), reverse=True)
2165
+
2166
+ # Convert datetime strings to datetime objects
2167
+ for stat in stats_list:
2168
+ first_trace_at = stat["first_trace_at"]
2169
+ last_trace_at = stat["last_trace_at"]
2170
+ if isinstance(first_trace_at, str):
2171
+ stat["first_trace_at"] = datetime.fromisoformat(first_trace_at.replace("Z", "+00:00"))
2172
+ if isinstance(last_trace_at, str):
2173
+ stat["last_trace_at"] = datetime.fromisoformat(last_trace_at.replace("Z", "+00:00"))
2174
+
2175
+ # Get total count
2176
+ total_count = len(stats_list)
2177
+
2178
+ # Apply pagination
2179
+ if limit and page:
2180
+ offset = (page - 1) * limit
2181
+ paginated_stats = stats_list[offset : offset + limit]
2182
+ elif limit:
2183
+ paginated_stats = stats_list[:limit]
2184
+ else:
2185
+ paginated_stats = stats_list
2186
+
2187
+ return paginated_stats, total_count
2188
+
2189
+ except Exception as e:
2190
+ log_error(f"Error getting trace stats: {e}")
2191
+ return [], 0
2192
+
2193
+ # --- Spans ---
2194
+ def create_span(self, span: "Span") -> None:
2195
+ """Create a single span in the database.
2196
+
2197
+ Args:
2198
+ span: The Span object to store.
2199
+ """
2200
+ try:
2201
+ collection_ref = self._get_collection(table_type="spans", create_collection_if_not_found=True)
2202
+ if collection_ref is None:
2203
+ return
2204
+
2205
+ span_dict = span.to_dict()
2206
+ # Serialize attributes as JSON string
2207
+ if "attributes" in span_dict and isinstance(span_dict["attributes"], dict):
2208
+ span_dict["attributes"] = json.dumps(span_dict["attributes"])
2209
+
2210
+ collection_ref.add(span_dict)
2211
+
2212
+ # Increment total_spans and error_count on trace
2213
+ traces_collection = self._get_collection(table_type="traces")
2214
+ if traces_collection:
2215
+ try:
2216
+ docs = (
2217
+ traces_collection.where(filter=FieldFilter("trace_id", "==", span.trace_id)).limit(1).stream()
2218
+ )
2219
+ for doc in docs:
2220
+ trace_data = doc.to_dict()
2221
+ current_total = trace_data.get("total_spans", 0)
2222
+ current_errors = trace_data.get("error_count", 0)
2223
+
2224
+ update_values = {"total_spans": current_total + 1}
2225
+ if span.status_code == "ERROR":
2226
+ update_values["error_count"] = current_errors + 1
2227
+
2228
+ doc.reference.update(update_values)
2229
+ break
2230
+ except Exception as update_error:
2231
+ log_debug(f"Could not update trace span counts: {update_error}")
2232
+
2233
+ except Exception as e:
2234
+ log_error(f"Error creating span: {e}")
2235
+
2236
+ def create_spans(self, spans: List) -> None:
2237
+ """Create multiple spans in the database as a batch.
2238
+
2239
+ Args:
2240
+ spans: List of Span objects to store.
2241
+ """
2242
+ if not spans:
2243
+ return
2244
+
2245
+ try:
2246
+ collection_ref = self._get_collection(table_type="spans", create_collection_if_not_found=True)
2247
+ if collection_ref is None:
2248
+ return
2249
+
2250
+ # Firestore batch has a limit of 500 operations
2251
+ batch = self.db_client.batch()
2252
+ batch_count = 0
2253
+
2254
+ for span in spans:
2255
+ span_dict = span.to_dict()
2256
+ # Serialize attributes as JSON string
2257
+ if "attributes" in span_dict and isinstance(span_dict["attributes"], dict):
2258
+ span_dict["attributes"] = json.dumps(span_dict["attributes"])
2259
+
2260
+ doc_ref = collection_ref.document()
2261
+ batch.set(doc_ref, span_dict)
2262
+ batch_count += 1
2263
+
2264
+ # Commit batch if reaching limit
2265
+ if batch_count >= 500:
2266
+ batch.commit()
2267
+ batch = self.db_client.batch()
2268
+ batch_count = 0
2269
+
2270
+ # Commit remaining operations
2271
+ if batch_count > 0:
2272
+ batch.commit()
2273
+
2274
+ # Update trace with total_spans and error_count
2275
+ trace_id = spans[0].trace_id
2276
+ spans_count = len(spans)
2277
+ error_count = sum(1 for s in spans if s.status_code == "ERROR")
2278
+
2279
+ traces_collection = self._get_collection(table_type="traces")
2280
+ if traces_collection:
2281
+ try:
2282
+ docs = traces_collection.where(filter=FieldFilter("trace_id", "==", trace_id)).limit(1).stream()
2283
+ for doc in docs:
2284
+ trace_data = doc.to_dict()
2285
+ current_total = trace_data.get("total_spans", 0)
2286
+ current_errors = trace_data.get("error_count", 0)
2287
+
2288
+ doc.reference.update(
2289
+ {
2290
+ "total_spans": current_total + spans_count,
2291
+ "error_count": current_errors + error_count,
2292
+ }
2293
+ )
2294
+ break
2295
+ except Exception as update_error:
2296
+ log_debug(f"Could not update trace span counts: {update_error}")
2297
+
2298
+ except Exception as e:
2299
+ log_error(f"Error creating spans batch: {e}")
2300
+
2301
+ def get_span(self, span_id: str):
2302
+ """Get a single span by its span_id.
2303
+
2304
+ Args:
2305
+ span_id: The unique span identifier.
2306
+
2307
+ Returns:
2308
+ Optional[Span]: The span if found, None otherwise.
2309
+ """
2310
+ try:
2311
+ from agno.tracing.schemas import Span
2312
+
2313
+ collection_ref = self._get_collection(table_type="spans")
2314
+ if collection_ref is None:
2315
+ return None
2316
+
2317
+ docs = collection_ref.where(filter=FieldFilter("span_id", "==", span_id)).limit(1).stream()
2318
+
2319
+ for doc in docs:
2320
+ span_data = doc.to_dict()
2321
+ # Deserialize attributes from JSON string
2322
+ if "attributes" in span_data and isinstance(span_data["attributes"], str):
2323
+ span_data["attributes"] = json.loads(span_data["attributes"])
2324
+ return Span.from_dict(span_data)
2325
+
2326
+ return None
2327
+
2328
+ except Exception as e:
2329
+ log_error(f"Error getting span: {e}")
2330
+ return None
2331
+
2332
+ def get_spans(
2333
+ self,
2334
+ trace_id: Optional[str] = None,
2335
+ parent_span_id: Optional[str] = None,
2336
+ limit: Optional[int] = 1000,
2337
+ ) -> List:
2338
+ """Get spans matching the provided filters.
2339
+
2340
+ Args:
2341
+ trace_id: Filter by trace ID.
2342
+ parent_span_id: Filter by parent span ID.
2343
+ limit: Maximum number of spans to return.
2344
+
2345
+ Returns:
2346
+ List[Span]: List of matching spans.
2347
+ """
2348
+ try:
2349
+ from agno.tracing.schemas import Span
2350
+
2351
+ collection_ref = self._get_collection(table_type="spans")
2352
+ if collection_ref is None:
2353
+ return []
2354
+
2355
+ query = collection_ref
2356
+
2357
+ if trace_id:
2358
+ query = query.where(filter=FieldFilter("trace_id", "==", trace_id))
2359
+ if parent_span_id:
2360
+ query = query.where(filter=FieldFilter("parent_span_id", "==", parent_span_id))
2361
+
2362
+ if limit:
2363
+ query = query.limit(limit)
2364
+
2365
+ docs = query.stream()
2366
+
2367
+ spans = []
2368
+ for doc in docs:
2369
+ span_data = doc.to_dict()
2370
+ # Deserialize attributes from JSON string
2371
+ if "attributes" in span_data and isinstance(span_data["attributes"], str):
2372
+ span_data["attributes"] = json.loads(span_data["attributes"])
2373
+ spans.append(Span.from_dict(span_data))
2374
+
2375
+ return spans
2376
+
2377
+ except Exception as e:
2378
+ log_error(f"Error getting spans: {e}")
2379
+ return []