agno 2.2.13__py3-none-any.whl → 2.4.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (383) hide show
  1. agno/agent/__init__.py +6 -0
  2. agno/agent/agent.py +5252 -3145
  3. agno/agent/remote.py +525 -0
  4. agno/api/api.py +2 -0
  5. agno/client/__init__.py +3 -0
  6. agno/client/a2a/__init__.py +10 -0
  7. agno/client/a2a/client.py +554 -0
  8. agno/client/a2a/schemas.py +112 -0
  9. agno/client/a2a/utils.py +369 -0
  10. agno/client/os.py +2669 -0
  11. agno/compression/__init__.py +3 -0
  12. agno/compression/manager.py +247 -0
  13. agno/culture/manager.py +2 -2
  14. agno/db/base.py +927 -6
  15. agno/db/dynamo/dynamo.py +788 -2
  16. agno/db/dynamo/schemas.py +128 -0
  17. agno/db/dynamo/utils.py +26 -3
  18. agno/db/firestore/firestore.py +674 -50
  19. agno/db/firestore/schemas.py +41 -0
  20. agno/db/firestore/utils.py +25 -10
  21. agno/db/gcs_json/gcs_json_db.py +506 -3
  22. agno/db/gcs_json/utils.py +14 -2
  23. agno/db/in_memory/in_memory_db.py +203 -4
  24. agno/db/in_memory/utils.py +14 -2
  25. agno/db/json/json_db.py +498 -2
  26. agno/db/json/utils.py +14 -2
  27. agno/db/migrations/manager.py +199 -0
  28. agno/db/migrations/utils.py +19 -0
  29. agno/db/migrations/v1_to_v2.py +54 -16
  30. agno/db/migrations/versions/__init__.py +0 -0
  31. agno/db/migrations/versions/v2_3_0.py +977 -0
  32. agno/db/mongo/async_mongo.py +1013 -39
  33. agno/db/mongo/mongo.py +684 -4
  34. agno/db/mongo/schemas.py +48 -0
  35. agno/db/mongo/utils.py +17 -0
  36. agno/db/mysql/__init__.py +2 -1
  37. agno/db/mysql/async_mysql.py +2958 -0
  38. agno/db/mysql/mysql.py +722 -53
  39. agno/db/mysql/schemas.py +77 -11
  40. agno/db/mysql/utils.py +151 -8
  41. agno/db/postgres/async_postgres.py +1254 -137
  42. agno/db/postgres/postgres.py +2316 -93
  43. agno/db/postgres/schemas.py +153 -21
  44. agno/db/postgres/utils.py +22 -7
  45. agno/db/redis/redis.py +531 -3
  46. agno/db/redis/schemas.py +36 -0
  47. agno/db/redis/utils.py +31 -15
  48. agno/db/schemas/evals.py +1 -0
  49. agno/db/schemas/memory.py +20 -9
  50. agno/db/singlestore/schemas.py +70 -1
  51. agno/db/singlestore/singlestore.py +737 -74
  52. agno/db/singlestore/utils.py +13 -3
  53. agno/db/sqlite/async_sqlite.py +1069 -89
  54. agno/db/sqlite/schemas.py +133 -1
  55. agno/db/sqlite/sqlite.py +2203 -165
  56. agno/db/sqlite/utils.py +21 -11
  57. agno/db/surrealdb/models.py +25 -0
  58. agno/db/surrealdb/surrealdb.py +603 -1
  59. agno/db/utils.py +60 -0
  60. agno/eval/__init__.py +26 -3
  61. agno/eval/accuracy.py +25 -12
  62. agno/eval/agent_as_judge.py +871 -0
  63. agno/eval/base.py +29 -0
  64. agno/eval/performance.py +10 -4
  65. agno/eval/reliability.py +22 -13
  66. agno/eval/utils.py +2 -1
  67. agno/exceptions.py +42 -0
  68. agno/hooks/__init__.py +3 -0
  69. agno/hooks/decorator.py +164 -0
  70. agno/integrations/discord/client.py +13 -2
  71. agno/knowledge/__init__.py +4 -0
  72. agno/knowledge/chunking/code.py +90 -0
  73. agno/knowledge/chunking/document.py +65 -4
  74. agno/knowledge/chunking/fixed.py +4 -1
  75. agno/knowledge/chunking/markdown.py +102 -11
  76. agno/knowledge/chunking/recursive.py +2 -2
  77. agno/knowledge/chunking/semantic.py +130 -48
  78. agno/knowledge/chunking/strategy.py +18 -0
  79. agno/knowledge/embedder/azure_openai.py +0 -1
  80. agno/knowledge/embedder/google.py +1 -1
  81. agno/knowledge/embedder/mistral.py +1 -1
  82. agno/knowledge/embedder/nebius.py +1 -1
  83. agno/knowledge/embedder/openai.py +16 -12
  84. agno/knowledge/filesystem.py +412 -0
  85. agno/knowledge/knowledge.py +4261 -1199
  86. agno/knowledge/protocol.py +134 -0
  87. agno/knowledge/reader/arxiv_reader.py +3 -2
  88. agno/knowledge/reader/base.py +9 -7
  89. agno/knowledge/reader/csv_reader.py +91 -42
  90. agno/knowledge/reader/docx_reader.py +9 -10
  91. agno/knowledge/reader/excel_reader.py +225 -0
  92. agno/knowledge/reader/field_labeled_csv_reader.py +38 -48
  93. agno/knowledge/reader/firecrawl_reader.py +3 -2
  94. agno/knowledge/reader/json_reader.py +16 -22
  95. agno/knowledge/reader/markdown_reader.py +15 -14
  96. agno/knowledge/reader/pdf_reader.py +33 -28
  97. agno/knowledge/reader/pptx_reader.py +9 -10
  98. agno/knowledge/reader/reader_factory.py +135 -1
  99. agno/knowledge/reader/s3_reader.py +8 -16
  100. agno/knowledge/reader/tavily_reader.py +3 -3
  101. agno/knowledge/reader/text_reader.py +15 -14
  102. agno/knowledge/reader/utils/__init__.py +17 -0
  103. agno/knowledge/reader/utils/spreadsheet.py +114 -0
  104. agno/knowledge/reader/web_search_reader.py +8 -65
  105. agno/knowledge/reader/website_reader.py +16 -13
  106. agno/knowledge/reader/wikipedia_reader.py +36 -3
  107. agno/knowledge/reader/youtube_reader.py +3 -2
  108. agno/knowledge/remote_content/__init__.py +33 -0
  109. agno/knowledge/remote_content/config.py +266 -0
  110. agno/knowledge/remote_content/remote_content.py +105 -17
  111. agno/knowledge/utils.py +76 -22
  112. agno/learn/__init__.py +71 -0
  113. agno/learn/config.py +463 -0
  114. agno/learn/curate.py +185 -0
  115. agno/learn/machine.py +725 -0
  116. agno/learn/schemas.py +1114 -0
  117. agno/learn/stores/__init__.py +38 -0
  118. agno/learn/stores/decision_log.py +1156 -0
  119. agno/learn/stores/entity_memory.py +3275 -0
  120. agno/learn/stores/learned_knowledge.py +1583 -0
  121. agno/learn/stores/protocol.py +117 -0
  122. agno/learn/stores/session_context.py +1217 -0
  123. agno/learn/stores/user_memory.py +1495 -0
  124. agno/learn/stores/user_profile.py +1220 -0
  125. agno/learn/utils.py +209 -0
  126. agno/media.py +22 -6
  127. agno/memory/__init__.py +14 -1
  128. agno/memory/manager.py +223 -8
  129. agno/memory/strategies/__init__.py +15 -0
  130. agno/memory/strategies/base.py +66 -0
  131. agno/memory/strategies/summarize.py +196 -0
  132. agno/memory/strategies/types.py +37 -0
  133. agno/models/aimlapi/aimlapi.py +17 -0
  134. agno/models/anthropic/claude.py +434 -59
  135. agno/models/aws/bedrock.py +121 -20
  136. agno/models/aws/claude.py +131 -274
  137. agno/models/azure/ai_foundry.py +10 -6
  138. agno/models/azure/openai_chat.py +33 -10
  139. agno/models/base.py +1162 -561
  140. agno/models/cerebras/cerebras.py +120 -24
  141. agno/models/cerebras/cerebras_openai.py +21 -2
  142. agno/models/cohere/chat.py +65 -6
  143. agno/models/cometapi/cometapi.py +18 -1
  144. agno/models/dashscope/dashscope.py +2 -3
  145. agno/models/deepinfra/deepinfra.py +18 -1
  146. agno/models/deepseek/deepseek.py +69 -3
  147. agno/models/fireworks/fireworks.py +18 -1
  148. agno/models/google/gemini.py +959 -89
  149. agno/models/google/utils.py +22 -0
  150. agno/models/groq/groq.py +48 -18
  151. agno/models/huggingface/huggingface.py +17 -6
  152. agno/models/ibm/watsonx.py +16 -6
  153. agno/models/internlm/internlm.py +18 -1
  154. agno/models/langdb/langdb.py +13 -1
  155. agno/models/litellm/chat.py +88 -9
  156. agno/models/litellm/litellm_openai.py +18 -1
  157. agno/models/message.py +24 -5
  158. agno/models/meta/llama.py +40 -13
  159. agno/models/meta/llama_openai.py +22 -21
  160. agno/models/metrics.py +12 -0
  161. agno/models/mistral/mistral.py +8 -4
  162. agno/models/n1n/__init__.py +3 -0
  163. agno/models/n1n/n1n.py +57 -0
  164. agno/models/nebius/nebius.py +6 -7
  165. agno/models/nvidia/nvidia.py +20 -3
  166. agno/models/ollama/__init__.py +2 -0
  167. agno/models/ollama/chat.py +17 -6
  168. agno/models/ollama/responses.py +100 -0
  169. agno/models/openai/__init__.py +2 -0
  170. agno/models/openai/chat.py +117 -26
  171. agno/models/openai/open_responses.py +46 -0
  172. agno/models/openai/responses.py +110 -32
  173. agno/models/openrouter/__init__.py +2 -0
  174. agno/models/openrouter/openrouter.py +67 -2
  175. agno/models/openrouter/responses.py +146 -0
  176. agno/models/perplexity/perplexity.py +19 -1
  177. agno/models/portkey/portkey.py +7 -6
  178. agno/models/requesty/requesty.py +19 -2
  179. agno/models/response.py +20 -2
  180. agno/models/sambanova/sambanova.py +20 -3
  181. agno/models/siliconflow/siliconflow.py +19 -2
  182. agno/models/together/together.py +20 -3
  183. agno/models/vercel/v0.py +20 -3
  184. agno/models/vertexai/claude.py +124 -4
  185. agno/models/vllm/vllm.py +19 -14
  186. agno/models/xai/xai.py +19 -2
  187. agno/os/app.py +467 -137
  188. agno/os/auth.py +253 -5
  189. agno/os/config.py +22 -0
  190. agno/os/interfaces/a2a/a2a.py +7 -6
  191. agno/os/interfaces/a2a/router.py +635 -26
  192. agno/os/interfaces/a2a/utils.py +32 -33
  193. agno/os/interfaces/agui/agui.py +5 -3
  194. agno/os/interfaces/agui/router.py +26 -16
  195. agno/os/interfaces/agui/utils.py +97 -57
  196. agno/os/interfaces/base.py +7 -7
  197. agno/os/interfaces/slack/router.py +16 -7
  198. agno/os/interfaces/slack/slack.py +7 -7
  199. agno/os/interfaces/whatsapp/router.py +35 -7
  200. agno/os/interfaces/whatsapp/security.py +3 -1
  201. agno/os/interfaces/whatsapp/whatsapp.py +11 -8
  202. agno/os/managers.py +326 -0
  203. agno/os/mcp.py +652 -79
  204. agno/os/middleware/__init__.py +4 -0
  205. agno/os/middleware/jwt.py +718 -115
  206. agno/os/middleware/trailing_slash.py +27 -0
  207. agno/os/router.py +105 -1558
  208. agno/os/routers/agents/__init__.py +3 -0
  209. agno/os/routers/agents/router.py +655 -0
  210. agno/os/routers/agents/schema.py +288 -0
  211. agno/os/routers/components/__init__.py +3 -0
  212. agno/os/routers/components/components.py +475 -0
  213. agno/os/routers/database.py +155 -0
  214. agno/os/routers/evals/evals.py +111 -18
  215. agno/os/routers/evals/schemas.py +38 -5
  216. agno/os/routers/evals/utils.py +80 -11
  217. agno/os/routers/health.py +3 -3
  218. agno/os/routers/knowledge/knowledge.py +284 -35
  219. agno/os/routers/knowledge/schemas.py +14 -2
  220. agno/os/routers/memory/memory.py +274 -11
  221. agno/os/routers/memory/schemas.py +44 -3
  222. agno/os/routers/metrics/metrics.py +30 -15
  223. agno/os/routers/metrics/schemas.py +10 -6
  224. agno/os/routers/registry/__init__.py +3 -0
  225. agno/os/routers/registry/registry.py +337 -0
  226. agno/os/routers/session/session.py +143 -14
  227. agno/os/routers/teams/__init__.py +3 -0
  228. agno/os/routers/teams/router.py +550 -0
  229. agno/os/routers/teams/schema.py +280 -0
  230. agno/os/routers/traces/__init__.py +3 -0
  231. agno/os/routers/traces/schemas.py +414 -0
  232. agno/os/routers/traces/traces.py +549 -0
  233. agno/os/routers/workflows/__init__.py +3 -0
  234. agno/os/routers/workflows/router.py +757 -0
  235. agno/os/routers/workflows/schema.py +139 -0
  236. agno/os/schema.py +157 -584
  237. agno/os/scopes.py +469 -0
  238. agno/os/settings.py +3 -0
  239. agno/os/utils.py +574 -185
  240. agno/reasoning/anthropic.py +85 -1
  241. agno/reasoning/azure_ai_foundry.py +93 -1
  242. agno/reasoning/deepseek.py +102 -2
  243. agno/reasoning/default.py +6 -7
  244. agno/reasoning/gemini.py +87 -3
  245. agno/reasoning/groq.py +109 -2
  246. agno/reasoning/helpers.py +6 -7
  247. agno/reasoning/manager.py +1238 -0
  248. agno/reasoning/ollama.py +93 -1
  249. agno/reasoning/openai.py +115 -1
  250. agno/reasoning/vertexai.py +85 -1
  251. agno/registry/__init__.py +3 -0
  252. agno/registry/registry.py +68 -0
  253. agno/remote/__init__.py +3 -0
  254. agno/remote/base.py +581 -0
  255. agno/run/__init__.py +2 -4
  256. agno/run/agent.py +134 -19
  257. agno/run/base.py +49 -1
  258. agno/run/cancel.py +65 -52
  259. agno/run/cancellation_management/__init__.py +9 -0
  260. agno/run/cancellation_management/base.py +78 -0
  261. agno/run/cancellation_management/in_memory_cancellation_manager.py +100 -0
  262. agno/run/cancellation_management/redis_cancellation_manager.py +236 -0
  263. agno/run/requirement.py +181 -0
  264. agno/run/team.py +111 -19
  265. agno/run/workflow.py +2 -1
  266. agno/session/agent.py +57 -92
  267. agno/session/summary.py +1 -1
  268. agno/session/team.py +62 -115
  269. agno/session/workflow.py +353 -57
  270. agno/skills/__init__.py +17 -0
  271. agno/skills/agent_skills.py +377 -0
  272. agno/skills/errors.py +32 -0
  273. agno/skills/loaders/__init__.py +4 -0
  274. agno/skills/loaders/base.py +27 -0
  275. agno/skills/loaders/local.py +216 -0
  276. agno/skills/skill.py +65 -0
  277. agno/skills/utils.py +107 -0
  278. agno/skills/validator.py +277 -0
  279. agno/table.py +10 -0
  280. agno/team/__init__.py +5 -1
  281. agno/team/remote.py +447 -0
  282. agno/team/team.py +3769 -2202
  283. agno/tools/brandfetch.py +27 -18
  284. agno/tools/browserbase.py +225 -16
  285. agno/tools/crawl4ai.py +3 -0
  286. agno/tools/duckduckgo.py +25 -71
  287. agno/tools/exa.py +0 -21
  288. agno/tools/file.py +14 -13
  289. agno/tools/file_generation.py +12 -6
  290. agno/tools/firecrawl.py +15 -7
  291. agno/tools/function.py +94 -113
  292. agno/tools/google_bigquery.py +11 -2
  293. agno/tools/google_drive.py +4 -3
  294. agno/tools/knowledge.py +9 -4
  295. agno/tools/mcp/mcp.py +301 -18
  296. agno/tools/mcp/multi_mcp.py +269 -14
  297. agno/tools/mem0.py +11 -10
  298. agno/tools/memory.py +47 -46
  299. agno/tools/mlx_transcribe.py +10 -7
  300. agno/tools/models/nebius.py +5 -5
  301. agno/tools/models_labs.py +20 -10
  302. agno/tools/nano_banana.py +151 -0
  303. agno/tools/parallel.py +0 -7
  304. agno/tools/postgres.py +76 -36
  305. agno/tools/python.py +14 -6
  306. agno/tools/reasoning.py +30 -23
  307. agno/tools/redshift.py +406 -0
  308. agno/tools/shopify.py +1519 -0
  309. agno/tools/spotify.py +919 -0
  310. agno/tools/tavily.py +4 -1
  311. agno/tools/toolkit.py +253 -18
  312. agno/tools/websearch.py +93 -0
  313. agno/tools/website.py +1 -1
  314. agno/tools/wikipedia.py +1 -1
  315. agno/tools/workflow.py +56 -48
  316. agno/tools/yfinance.py +12 -11
  317. agno/tracing/__init__.py +12 -0
  318. agno/tracing/exporter.py +161 -0
  319. agno/tracing/schemas.py +276 -0
  320. agno/tracing/setup.py +112 -0
  321. agno/utils/agent.py +251 -10
  322. agno/utils/cryptography.py +22 -0
  323. agno/utils/dttm.py +33 -0
  324. agno/utils/events.py +264 -7
  325. agno/utils/hooks.py +111 -3
  326. agno/utils/http.py +161 -2
  327. agno/utils/mcp.py +49 -8
  328. agno/utils/media.py +22 -1
  329. agno/utils/models/ai_foundry.py +9 -2
  330. agno/utils/models/claude.py +20 -5
  331. agno/utils/models/cohere.py +9 -2
  332. agno/utils/models/llama.py +9 -2
  333. agno/utils/models/mistral.py +4 -2
  334. agno/utils/os.py +0 -0
  335. agno/utils/print_response/agent.py +99 -16
  336. agno/utils/print_response/team.py +223 -24
  337. agno/utils/print_response/workflow.py +0 -2
  338. agno/utils/prompts.py +8 -6
  339. agno/utils/remote.py +23 -0
  340. agno/utils/response.py +1 -13
  341. agno/utils/string.py +91 -2
  342. agno/utils/team.py +62 -12
  343. agno/utils/tokens.py +657 -0
  344. agno/vectordb/base.py +15 -2
  345. agno/vectordb/cassandra/cassandra.py +1 -1
  346. agno/vectordb/chroma/__init__.py +2 -1
  347. agno/vectordb/chroma/chromadb.py +468 -23
  348. agno/vectordb/clickhouse/clickhousedb.py +1 -1
  349. agno/vectordb/couchbase/couchbase.py +6 -2
  350. agno/vectordb/lancedb/lance_db.py +7 -38
  351. agno/vectordb/lightrag/lightrag.py +7 -6
  352. agno/vectordb/milvus/milvus.py +118 -84
  353. agno/vectordb/mongodb/__init__.py +2 -1
  354. agno/vectordb/mongodb/mongodb.py +14 -31
  355. agno/vectordb/pgvector/pgvector.py +120 -66
  356. agno/vectordb/pineconedb/pineconedb.py +2 -19
  357. agno/vectordb/qdrant/__init__.py +2 -1
  358. agno/vectordb/qdrant/qdrant.py +33 -56
  359. agno/vectordb/redis/__init__.py +2 -1
  360. agno/vectordb/redis/redisdb.py +19 -31
  361. agno/vectordb/singlestore/singlestore.py +17 -9
  362. agno/vectordb/surrealdb/surrealdb.py +2 -38
  363. agno/vectordb/weaviate/__init__.py +2 -1
  364. agno/vectordb/weaviate/weaviate.py +7 -3
  365. agno/workflow/__init__.py +5 -1
  366. agno/workflow/agent.py +2 -2
  367. agno/workflow/condition.py +12 -10
  368. agno/workflow/loop.py +28 -9
  369. agno/workflow/parallel.py +21 -13
  370. agno/workflow/remote.py +362 -0
  371. agno/workflow/router.py +12 -9
  372. agno/workflow/step.py +261 -36
  373. agno/workflow/steps.py +12 -8
  374. agno/workflow/types.py +40 -77
  375. agno/workflow/workflow.py +939 -213
  376. {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/METADATA +134 -181
  377. agno-2.4.3.dist-info/RECORD +677 -0
  378. {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/WHEEL +1 -1
  379. agno/tools/googlesearch.py +0 -98
  380. agno/tools/memori.py +0 -339
  381. agno-2.2.13.dist-info/RECORD +0 -575
  382. {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/licenses/LICENSE +0 -0
  383. {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/top_level.txt +0 -0
@@ -1,10 +1,14 @@
1
1
  import json
2
2
  import time
3
3
  from datetime import date, datetime, timedelta, timezone
4
- from typing import Any, Dict, List, Optional, Tuple, Union
4
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
5
5
  from uuid import uuid4
6
6
 
7
+ if TYPE_CHECKING:
8
+ from agno.tracing.schemas import Span, Trace
9
+
7
10
  from agno.db.base import BaseDb, SessionType
11
+ from agno.db.migrations.manager import MigrationManager
8
12
  from agno.db.schemas.culture import CulturalKnowledge
9
13
  from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
10
14
  from agno.db.schemas.knowledge import KnowledgeRow
@@ -27,12 +31,12 @@ from agno.utils.log import log_debug, log_error, log_info, log_warning
27
31
  from agno.utils.string import generate_id
28
32
 
29
33
  try:
30
- from sqlalchemy import Index, UniqueConstraint, and_, func, update
34
+ from sqlalchemy import ForeignKey, Index, UniqueConstraint, and_, func, select, update
31
35
  from sqlalchemy.dialects import mysql
32
36
  from sqlalchemy.engine import Engine, create_engine
33
37
  from sqlalchemy.orm import scoped_session, sessionmaker
34
38
  from sqlalchemy.schema import Column, MetaData, Table
35
- from sqlalchemy.sql.expression import select, text
39
+ from sqlalchemy.sql.expression import text
36
40
  except ImportError:
37
41
  raise ImportError("`sqlalchemy` not installed. Please install it using `pip install sqlalchemy`")
38
42
 
@@ -50,6 +54,10 @@ class SingleStoreDb(BaseDb):
50
54
  metrics_table: Optional[str] = None,
51
55
  eval_table: Optional[str] = None,
52
56
  knowledge_table: Optional[str] = None,
57
+ versions_table: Optional[str] = None,
58
+ traces_table: Optional[str] = None,
59
+ spans_table: Optional[str] = None,
60
+ create_schema: bool = True,
53
61
  ):
54
62
  """
55
63
  Interface for interacting with a SingleStore database.
@@ -70,7 +78,9 @@ class SingleStoreDb(BaseDb):
70
78
  metrics_table (Optional[str]): Name of the table to store metrics.
71
79
  eval_table (Optional[str]): Name of the table to store evaluation runs data.
72
80
  knowledge_table (Optional[str]): Name of the table to store knowledge content.
73
-
81
+ versions_table (Optional[str]): Name of the table to store schema versions.
82
+ create_schema (bool): Whether to automatically create the database schema if it doesn't exist.
83
+ Set to False if schema is managed externally (e.g., via migrations). Defaults to True.
74
84
  Raises:
75
85
  ValueError: If neither db_url nor db_engine is provided.
76
86
  ValueError: If none of the tables are provided.
@@ -89,6 +99,9 @@ class SingleStoreDb(BaseDb):
89
99
  metrics_table=metrics_table,
90
100
  eval_table=eval_table,
91
101
  knowledge_table=knowledge_table,
102
+ versions_table=versions_table,
103
+ traces_table=traces_table,
104
+ spans_table=spans_table,
92
105
  )
93
106
 
94
107
  _engine: Optional[Engine] = db_engine
@@ -106,7 +119,8 @@ class SingleStoreDb(BaseDb):
106
119
  self.db_url: Optional[str] = db_url
107
120
  self.db_engine: Engine = _engine
108
121
  self.db_schema: Optional[str] = db_schema
109
- self.metadata: MetaData = MetaData()
122
+ self.metadata: MetaData = MetaData(schema=self.db_schema)
123
+ self.create_schema: bool = create_schema
110
124
 
111
125
  # Initialize database session
112
126
  self.Session: scoped_session = scoped_session(sessionmaker(bind=self.db_engine))
@@ -124,7 +138,7 @@ class SingleStoreDb(BaseDb):
124
138
  with self.Session() as sess:
125
139
  return is_table_available(session=sess, table_name=table_name, db_schema=self.db_schema)
126
140
 
127
- def _create_table_structure_only(self, table_name: str, table_type: str, db_schema: Optional[str]) -> Table:
141
+ def _create_table_structure_only(self, table_name: str, table_type: str) -> Table:
128
142
  """
129
143
  Create a table structure definition without actually creating the table in the database.
130
144
  Used to avoid autoload issues with SingleStore JSON types.
@@ -132,13 +146,15 @@ class SingleStoreDb(BaseDb):
132
146
  Args:
133
147
  table_name (str): Name of the table
134
148
  table_type (str): Type of table (used to get schema definition)
135
- db_schema (Optional[str]): Database schema name
136
149
 
137
150
  Returns:
138
151
  Table: SQLAlchemy Table object with column definitions
139
152
  """
140
153
  try:
141
- table_schema = get_table_schema_definition(table_type)
154
+ # Pass traces_table_name and db_schema for spans table foreign key resolution
155
+ table_schema = get_table_schema_definition(
156
+ table_type, traces_table_name=self.trace_table_name, db_schema=self.db_schema or "agno"
157
+ )
142
158
 
143
159
  columns: List[Column] = []
144
160
  # Get the columns from the table schema
@@ -158,13 +174,12 @@ class SingleStoreDb(BaseDb):
158
174
  columns.append(Column(*column_args, **column_kwargs))
159
175
 
160
176
  # Create the table object without constraints to avoid autoload issues
161
- table_metadata = MetaData(schema=db_schema)
162
- table = Table(table_name, table_metadata, *columns, schema=db_schema)
177
+ table = Table(table_name, self.metadata, *columns, schema=self.db_schema)
163
178
 
164
179
  return table
165
180
 
166
181
  except Exception as e:
167
- table_ref = f"{db_schema}.{table_name}" if db_schema else table_name
182
+ table_ref = f"{self.db_schema}.{table_name}" if self.db_schema else table_name
168
183
  log_error(f"Could not create table structure for {table_ref}: {e}")
169
184
  raise
170
185
 
@@ -176,28 +191,29 @@ class SingleStoreDb(BaseDb):
176
191
  (self.metrics_table_name, "metrics"),
177
192
  (self.eval_table_name, "evals"),
178
193
  (self.knowledge_table_name, "knowledge"),
194
+ (self.versions_table_name, "versions"),
179
195
  ]
180
196
 
181
197
  for table_name, table_type in tables_to_create:
182
- self._create_table(table_name=table_name, table_type=table_type, db_schema=self.db_schema)
198
+ self._get_or_create_table(table_name=table_name, table_type=table_type, create_table_if_not_found=True)
183
199
 
184
- def _create_table(self, table_name: str, table_type: str, db_schema: Optional[str]) -> Table:
200
+ def _create_table(self, table_name: str, table_type: str) -> Table:
185
201
  """
186
202
  Create a table with the appropriate schema based on the table type.
187
203
 
188
204
  Args:
189
205
  table_name (str): Name of the table to create
190
206
  table_type (str): Type of table (used to get schema definition)
191
- db_schema (Optional[str]): Database schema name
192
207
 
193
208
  Returns:
194
209
  Table: SQLAlchemy Table object
195
210
  """
196
- table_ref = f"{db_schema}.{table_name}" if db_schema else table_name
211
+ table_ref = f"{self.db_schema}.{table_name}" if self.db_schema else table_name
197
212
  try:
198
- table_schema = get_table_schema_definition(table_type)
199
-
200
- log_debug(f"Creating table {table_ref}")
213
+ # Pass traces_table_name and db_schema for spans table foreign key resolution
214
+ table_schema = get_table_schema_definition(
215
+ table_type, traces_table_name=self.trace_table_name, db_schema=self.db_schema or "agno"
216
+ ).copy()
201
217
 
202
218
  columns: List[Column] = []
203
219
  indexes: List[str] = []
@@ -217,11 +233,15 @@ class SingleStoreDb(BaseDb):
217
233
  if col_config.get("unique", False):
218
234
  column_kwargs["unique"] = True
219
235
  unique_constraints.append(col_name)
236
+
237
+ # Handle foreign key constraint
238
+ if "foreign_key" in col_config:
239
+ column_args.append(ForeignKey(col_config["foreign_key"]))
240
+
220
241
  columns.append(Column(*column_args, **column_kwargs))
221
242
 
222
243
  # Create the table object
223
- table_metadata = MetaData(schema=db_schema)
224
- table = Table(table_name, table_metadata, *columns, schema=db_schema)
244
+ table = Table(table_name, self.metadata, *columns, schema=self.db_schema)
225
245
 
226
246
  # Add multi-column unique constraints with table-specific names
227
247
  for constraint in schema_unique_constraints:
@@ -235,48 +255,52 @@ class SingleStoreDb(BaseDb):
235
255
  table.append_constraint(Index(idx_name, idx_col))
236
256
 
237
257
  # Create schema if one is specified
238
- if db_schema is not None:
258
+ if self.create_schema and self.db_schema is not None:
239
259
  with self.Session() as sess, sess.begin():
240
- create_schema(session=sess, db_schema=db_schema)
260
+ create_schema(session=sess, db_schema=self.db_schema)
241
261
 
242
262
  # SingleStore has a limitation on the number of unique multi-field constraints per table.
243
263
  # We need to work around that limitation for the sessions table.
244
- if table_type == "sessions":
245
- with self.Session() as sess, sess.begin():
246
- # Build column definitions
247
- columns_sql = []
248
- for col in table.columns:
249
- col_sql = f"{col.name} {col.type.compile(self.db_engine.dialect)}"
250
- if not col.nullable:
251
- col_sql += " NOT NULL"
252
- columns_sql.append(col_sql)
253
-
254
- columns_def = ", ".join(columns_sql)
255
-
256
- # Add shard key and single unique constraint
257
- table_sql = f"""CREATE TABLE IF NOT EXISTS {table_ref} (
258
- {columns_def},
259
- SHARD KEY (session_id),
260
- UNIQUE KEY uq_session_type (session_id, session_type)
261
- )"""
262
-
263
- sess.execute(text(table_sql))
264
+ table_created = False
265
+ if not self.table_exists(table_name):
266
+ if table_type == "sessions":
267
+ with self.Session() as sess, sess.begin():
268
+ # Build column definitions
269
+ columns_sql = []
270
+ for col in table.columns:
271
+ col_sql = f"{col.name} {col.type.compile(self.db_engine.dialect)}"
272
+ if not col.nullable:
273
+ col_sql += " NOT NULL"
274
+ columns_sql.append(col_sql)
275
+
276
+ columns_def = ", ".join(columns_sql)
277
+
278
+ # Add shard key and single unique constraint
279
+ table_sql = f"""CREATE TABLE IF NOT EXISTS {table_ref} (
280
+ {columns_def},
281
+ SHARD KEY (session_id),
282
+ UNIQUE KEY uq_session_type (session_id, session_type)
283
+ )"""
284
+
285
+ sess.execute(text(table_sql))
286
+ else:
287
+ table.create(self.db_engine, checkfirst=True)
288
+ log_debug(f"Successfully created table '{table_ref}'")
289
+ table_created = True
264
290
  else:
265
- table.create(self.db_engine, checkfirst=True)
291
+ log_debug(f"Table '{table_ref}' already exists, skipping creation")
266
292
 
267
293
  # Create indexes
268
294
  for idx in table.indexes:
269
295
  try:
270
- log_debug(f"Creating index: {idx.name}")
271
-
272
296
  # Check if index already exists
273
297
  with self.Session() as sess:
274
- if db_schema is not None:
298
+ if self.db_schema is not None:
275
299
  exists_query = text(
276
300
  "SELECT 1 FROM information_schema.statistics WHERE table_schema = :schema AND index_name = :index_name"
277
301
  )
278
302
  exists = (
279
- sess.execute(exists_query, {"schema": db_schema, "index_name": idx.name}).scalar()
303
+ sess.execute(exists_query, {"schema": self.db_schema, "index_name": idx.name}).scalar()
280
304
  is not None
281
305
  )
282
306
  else:
@@ -290,10 +314,15 @@ class SingleStoreDb(BaseDb):
290
314
 
291
315
  idx.create(self.db_engine)
292
316
 
317
+ log_debug(f"Created index: {idx.name} for table {table_ref}")
293
318
  except Exception as e:
294
319
  log_error(f"Error creating index {idx.name}: {e}")
295
320
 
296
- log_debug(f"Successfully created table {table_ref}")
321
+ # Store the schema version for the created table
322
+ if table_name != self.versions_table_name and table_created:
323
+ latest_schema_version = MigrationManager(self).latest_schema_version
324
+ self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
325
+
297
326
  return table
298
327
 
299
328
  except Exception as e:
@@ -305,7 +334,6 @@ class SingleStoreDb(BaseDb):
305
334
  self.session_table = self._get_or_create_table(
306
335
  table_name=self.session_table_name,
307
336
  table_type="sessions",
308
- db_schema=self.db_schema,
309
337
  create_table_if_not_found=create_table_if_not_found,
310
338
  )
311
339
  return self.session_table
@@ -314,7 +342,6 @@ class SingleStoreDb(BaseDb):
314
342
  self.memory_table = self._get_or_create_table(
315
343
  table_name=self.memory_table_name,
316
344
  table_type="memories",
317
- db_schema=self.db_schema,
318
345
  create_table_if_not_found=create_table_if_not_found,
319
346
  )
320
347
  return self.memory_table
@@ -323,7 +350,6 @@ class SingleStoreDb(BaseDb):
323
350
  self.metrics_table = self._get_or_create_table(
324
351
  table_name=self.metrics_table_name,
325
352
  table_type="metrics",
326
- db_schema=self.db_schema,
327
353
  create_table_if_not_found=create_table_if_not_found,
328
354
  )
329
355
  return self.metrics_table
@@ -332,7 +358,6 @@ class SingleStoreDb(BaseDb):
332
358
  self.eval_table = self._get_or_create_table(
333
359
  table_name=self.eval_table_name,
334
360
  table_type="evals",
335
- db_schema=self.db_schema,
336
361
  create_table_if_not_found=create_table_if_not_found,
337
362
  )
338
363
  return self.eval_table
@@ -341,7 +366,6 @@ class SingleStoreDb(BaseDb):
341
366
  self.knowledge_table = self._get_or_create_table(
342
367
  table_name=self.knowledge_table_name,
343
368
  table_type="knowledge",
344
- db_schema=self.db_schema,
345
369
  create_table_if_not_found=create_table_if_not_found,
346
370
  )
347
371
  return self.knowledge_table
@@ -350,18 +374,42 @@ class SingleStoreDb(BaseDb):
350
374
  self.culture_table = self._get_or_create_table(
351
375
  table_name=self.culture_table_name,
352
376
  table_type="culture",
353
- db_schema=self.db_schema,
354
377
  create_table_if_not_found=create_table_if_not_found,
355
378
  )
356
379
  return self.culture_table
357
380
 
381
+ if table_type == "versions":
382
+ self.versions_table = self._get_or_create_table(
383
+ table_name=self.versions_table_name,
384
+ table_type="versions",
385
+ create_table_if_not_found=create_table_if_not_found,
386
+ )
387
+ return self.versions_table
388
+
389
+ if table_type == "traces":
390
+ self.traces_table = self._get_or_create_table(
391
+ table_name=self.trace_table_name,
392
+ table_type="traces",
393
+ create_table_if_not_found=create_table_if_not_found,
394
+ )
395
+ return self.traces_table
396
+
397
+ if table_type == "spans":
398
+ # Ensure traces table exists first (for foreign key)
399
+ self._get_table(table_type="traces", create_table_if_not_found=create_table_if_not_found)
400
+ self.spans_table = self._get_or_create_table(
401
+ table_name=self.span_table_name,
402
+ table_type="spans",
403
+ create_table_if_not_found=create_table_if_not_found,
404
+ )
405
+ return self.spans_table
406
+
358
407
  raise ValueError(f"Unknown table type: {table_type}")
359
408
 
360
409
  def _get_or_create_table(
361
410
  self,
362
411
  table_name: str,
363
412
  table_type: str,
364
- db_schema: Optional[str],
365
413
  create_table_if_not_found: Optional[bool] = False,
366
414
  ) -> Optional[Table]:
367
415
  """
@@ -370,37 +418,78 @@ class SingleStoreDb(BaseDb):
370
418
  Args:
371
419
  table_name (str): Name of the table to get or create
372
420
  table_type (str): Type of table (used to get schema definition)
373
- db_schema (Optional[str]): Database schema name
374
421
 
375
422
  Returns:
376
423
  Table: SQLAlchemy Table object representing the schema.
377
424
  """
378
425
 
379
426
  with self.Session() as sess, sess.begin():
380
- table_is_available = is_table_available(session=sess, table_name=table_name, db_schema=db_schema)
427
+ table_is_available = is_table_available(session=sess, table_name=table_name, db_schema=self.db_schema)
381
428
 
382
429
  if not table_is_available:
383
430
  if not create_table_if_not_found:
384
431
  return None
385
- return self._create_table(table_name=table_name, table_type=table_type, db_schema=db_schema)
432
+
433
+ # Also store the schema version for the created table
434
+ if table_name != self.versions_table_name:
435
+ latest_schema_version = MigrationManager(self).latest_schema_version
436
+ self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
437
+
438
+ return self._create_table(table_name=table_name, table_type=table_type)
386
439
 
387
440
  if not is_valid_table(
388
441
  db_engine=self.db_engine,
389
442
  table_name=table_name,
390
443
  table_type=table_type,
391
- db_schema=db_schema,
444
+ db_schema=self.db_schema,
392
445
  ):
393
- table_ref = f"{db_schema}.{table_name}" if db_schema else table_name
446
+ table_ref = f"{self.db_schema}.{table_name}" if self.db_schema else table_name
394
447
  raise ValueError(f"Table {table_ref} has an invalid schema")
395
448
 
396
449
  try:
397
- return self._create_table_structure_only(table_name=table_name, table_type=table_type, db_schema=db_schema)
450
+ return self._create_table_structure_only(table_name=table_name, table_type=table_type)
398
451
 
399
452
  except Exception as e:
400
- table_ref = f"{db_schema}.{table_name}" if db_schema else table_name
453
+ table_ref = f"{self.db_schema}.{table_name}" if self.db_schema else table_name
401
454
  log_error(f"Error loading existing table {table_ref}: {e}")
402
455
  raise
403
456
 
457
+ def get_latest_schema_version(self, table_name: str) -> str:
458
+ """Get the latest version of the database schema."""
459
+ table = self._get_table(table_type="versions", create_table_if_not_found=True)
460
+ if table is None:
461
+ return "2.0.0"
462
+ with self.Session() as sess:
463
+ stmt = select(table)
464
+ # Latest version for the given table
465
+ stmt = stmt.where(table.c.table_name == table_name)
466
+ stmt = stmt.order_by(table.c.version.desc()).limit(1)
467
+ result = sess.execute(stmt).fetchone()
468
+ if result is None:
469
+ return "2.0.0"
470
+ version_dict = dict(result._mapping)
471
+ return version_dict.get("version") or "2.0.0"
472
+
473
+ def upsert_schema_version(self, table_name: str, version: str) -> None:
474
+ """Upsert the schema version into the database."""
475
+ table = self._get_table(table_type="versions", create_table_if_not_found=True)
476
+ if table is None:
477
+ return
478
+ current_datetime = datetime.now().isoformat()
479
+ with self.Session() as sess, sess.begin():
480
+ stmt = mysql.insert(table).values(
481
+ table_name=table_name,
482
+ version=version,
483
+ created_at=current_datetime, # Store as ISO format string
484
+ updated_at=current_datetime,
485
+ )
486
+ # Update version if table_name already exists
487
+ stmt = stmt.on_duplicate_key_update(
488
+ version=version,
489
+ updated_at=current_datetime,
490
+ )
491
+ sess.execute(stmt)
492
+
404
493
  # -- Session methods --
405
494
  def delete_session(self, session_id: str) -> bool:
406
495
  """
@@ -1247,13 +1336,14 @@ class SingleStoreDb(BaseDb):
1247
1336
  raise e
1248
1337
 
1249
1338
  def get_user_memory_stats(
1250
- self, limit: Optional[int] = None, page: Optional[int] = None
1339
+ self, limit: Optional[int] = None, page: Optional[int] = None, user_id: Optional[str] = None
1251
1340
  ) -> Tuple[List[Dict[str, Any]], int]:
1252
1341
  """Get user memories stats.
1253
1342
 
1254
1343
  Args:
1255
1344
  limit (Optional[int]): The maximum number of user stats to return.
1256
1345
  page (Optional[int]): The page number.
1346
+ user_id (Optional[str]): User ID for filtering.
1257
1347
 
1258
1348
  Returns:
1259
1349
  Tuple[List[Dict[str, Any]], int]: A list of dictionaries containing user stats and total count.
@@ -1276,16 +1366,17 @@ class SingleStoreDb(BaseDb):
1276
1366
  return [], 0
1277
1367
 
1278
1368
  with self.Session() as sess, sess.begin():
1279
- stmt = (
1280
- select(
1281
- table.c.user_id,
1282
- func.count(table.c.memory_id).label("total_memories"),
1283
- func.max(table.c.updated_at).label("last_memory_updated_at"),
1284
- )
1285
- .where(table.c.user_id.is_not(None))
1286
- .group_by(table.c.user_id)
1287
- .order_by(func.max(table.c.updated_at).desc())
1369
+ stmt = select(
1370
+ table.c.user_id,
1371
+ func.count(table.c.memory_id).label("total_memories"),
1372
+ func.max(table.c.updated_at).label("last_memory_updated_at"),
1288
1373
  )
1374
+ if user_id is not None:
1375
+ stmt = stmt.where(table.c.user_id == user_id)
1376
+ else:
1377
+ stmt = stmt.where(table.c.user_id.is_not(None))
1378
+ stmt = stmt.group_by(table.c.user_id)
1379
+ stmt = stmt.order_by(func.max(table.c.updated_at).desc())
1289
1380
 
1290
1381
  count_stmt = select(func.count()).select_from(stmt.alias())
1291
1382
  total_count = sess.execute(count_stmt).scalar()
@@ -1339,6 +1430,8 @@ class SingleStoreDb(BaseDb):
1339
1430
  if memory.memory_id is None:
1340
1431
  memory.memory_id = str(uuid4())
1341
1432
 
1433
+ current_time = int(time.time())
1434
+
1342
1435
  stmt = mysql.insert(table).values(
1343
1436
  memory_id=memory.memory_id,
1344
1437
  memory=memory.memory,
@@ -1347,7 +1440,9 @@ class SingleStoreDb(BaseDb):
1347
1440
  agent_id=memory.agent_id,
1348
1441
  team_id=memory.team_id,
1349
1442
  topics=memory.topics,
1350
- updated_at=int(time.time()),
1443
+ feedback=memory.feedback,
1444
+ created_at=memory.created_at,
1445
+ updated_at=current_time,
1351
1446
  )
1352
1447
  stmt = stmt.on_duplicate_key_update(
1353
1448
  memory=stmt.inserted.memory,
@@ -1356,7 +1451,10 @@ class SingleStoreDb(BaseDb):
1356
1451
  user_id=stmt.inserted.user_id,
1357
1452
  agent_id=stmt.inserted.agent_id,
1358
1453
  team_id=stmt.inserted.team_id,
1359
- updated_at=int(time.time()),
1454
+ feedback=stmt.inserted.feedback,
1455
+ updated_at=stmt.inserted.updated_at,
1456
+ # Preserve created_at on update - don't overwrite existing value
1457
+ created_at=table.c.created_at,
1360
1458
  )
1361
1459
 
1362
1460
  sess.execute(stmt)
@@ -1404,11 +1502,13 @@ class SingleStoreDb(BaseDb):
1404
1502
  # Prepare data for bulk insert
1405
1503
  memory_data = []
1406
1504
  current_time = int(time.time())
1505
+
1407
1506
  for memory in memories:
1408
1507
  if memory.memory_id is None:
1409
1508
  memory.memory_id = str(uuid4())
1410
1509
  # Use preserved updated_at if flag is set, otherwise use current time
1411
1510
  updated_at = memory.updated_at if preserve_updated_at else current_time
1511
+
1412
1512
  memory_data.append(
1413
1513
  {
1414
1514
  "memory_id": memory.memory_id,
@@ -1418,6 +1518,8 @@ class SingleStoreDb(BaseDb):
1418
1518
  "agent_id": memory.agent_id,
1419
1519
  "team_id": memory.team_id,
1420
1520
  "topics": memory.topics,
1521
+ "feedback": memory.feedback,
1522
+ "created_at": memory.created_at,
1421
1523
  "updated_at": updated_at,
1422
1524
  }
1423
1525
  )
@@ -1434,7 +1536,10 @@ class SingleStoreDb(BaseDb):
1434
1536
  user_id=stmt.inserted.user_id,
1435
1537
  agent_id=stmt.inserted.agent_id,
1436
1538
  team_id=stmt.inserted.team_id,
1539
+ feedback=stmt.inserted.feedback,
1437
1540
  updated_at=stmt.inserted.updated_at,
1541
+ # Preserve created_at on update
1542
+ created_at=table.c.created_at,
1438
1543
  )
1439
1544
  sess.execute(stmt, memory_data)
1440
1545
 
@@ -2270,3 +2375,561 @@ class SingleStoreDb(BaseDb):
2270
2375
  except Exception as e:
2271
2376
  log_error(f"Error upserting cultural knowledge: {e}")
2272
2377
  raise e
2378
+
2379
+ # --- Traces ---
2380
+ def _get_traces_base_query(self, table: Table, spans_table: Optional[Table] = None):
2381
+ """Build base query for traces with aggregated span counts.
2382
+
2383
+ Args:
2384
+ table: The traces table.
2385
+ spans_table: The spans table (optional).
2386
+
2387
+ Returns:
2388
+ SQLAlchemy select statement with total_spans and error_count calculated dynamically.
2389
+ """
2390
+ from sqlalchemy import case, literal
2391
+
2392
+ if spans_table is not None:
2393
+ # JOIN with spans table to calculate total_spans and error_count
2394
+ return (
2395
+ select(
2396
+ table,
2397
+ func.coalesce(func.count(spans_table.c.span_id), 0).label("total_spans"),
2398
+ func.coalesce(func.sum(case((spans_table.c.status_code == "ERROR", 1), else_=0)), 0).label(
2399
+ "error_count"
2400
+ ),
2401
+ )
2402
+ .select_from(table.outerjoin(spans_table, table.c.trace_id == spans_table.c.trace_id))
2403
+ .group_by(table.c.trace_id)
2404
+ )
2405
+ else:
2406
+ # Fallback if spans table doesn't exist
2407
+ return select(table, literal(0).label("total_spans"), literal(0).label("error_count"))
2408
+
2409
+ def _get_trace_component_level_expr(self, workflow_id_col, team_id_col, agent_id_col, name_col):
2410
+ """Build a SQL CASE expression that returns the component level for a trace.
2411
+
2412
+ Component levels (higher = more important):
2413
+ - 3: Workflow root (.run or .arun with workflow_id)
2414
+ - 2: Team root (.run or .arun with team_id)
2415
+ - 1: Agent root (.run or .arun with agent_id)
2416
+ - 0: Child span (not a root)
2417
+
2418
+ Args:
2419
+ workflow_id_col: SQL column/expression for workflow_id
2420
+ team_id_col: SQL column/expression for team_id
2421
+ agent_id_col: SQL column/expression for agent_id
2422
+ name_col: SQL column/expression for name
2423
+
2424
+ Returns:
2425
+ SQLAlchemy CASE expression returning the component level as an integer.
2426
+ """
2427
+ from sqlalchemy import case, or_
2428
+
2429
+ is_root_name = or_(name_col.like("%.run%"), name_col.like("%.arun%"))
2430
+
2431
+ return case(
2432
+ # Workflow root (level 3)
2433
+ (and_(workflow_id_col.isnot(None), is_root_name), 3),
2434
+ # Team root (level 2)
2435
+ (and_(team_id_col.isnot(None), is_root_name), 2),
2436
+ # Agent root (level 1)
2437
+ (and_(agent_id_col.isnot(None), is_root_name), 1),
2438
+ # Child span or unknown (level 0)
2439
+ else_=0,
2440
+ )
2441
+
2442
+ def upsert_trace(self, trace: "Trace") -> None:
2443
+ """Create or update a single trace record in the database.
2444
+
2445
+ Uses INSERT ... ON DUPLICATE KEY UPDATE (upsert) to handle concurrent inserts
2446
+ atomically and avoid race conditions.
2447
+
2448
+ Args:
2449
+ trace: The Trace object to store (one per trace_id).
2450
+ """
2451
+ from sqlalchemy import case
2452
+
2453
+ try:
2454
+ table = self._get_table(table_type="traces", create_table_if_not_found=True)
2455
+ if table is None:
2456
+ return
2457
+
2458
+ trace_dict = trace.to_dict()
2459
+ trace_dict.pop("total_spans", None)
2460
+ trace_dict.pop("error_count", None)
2461
+
2462
+ with self.Session() as sess, sess.begin():
2463
+ # Use upsert to handle concurrent inserts atomically
2464
+ # On conflict, update fields while preserving existing non-null context values
2465
+ # and keeping the earliest start_time
2466
+ insert_stmt = mysql.insert(table).values(trace_dict)
2467
+
2468
+ # Build component level expressions for comparing trace priority
2469
+ new_level = self._get_trace_component_level_expr(
2470
+ insert_stmt.inserted.workflow_id,
2471
+ insert_stmt.inserted.team_id,
2472
+ insert_stmt.inserted.agent_id,
2473
+ insert_stmt.inserted.name,
2474
+ )
2475
+ existing_level = self._get_trace_component_level_expr(
2476
+ table.c.workflow_id,
2477
+ table.c.team_id,
2478
+ table.c.agent_id,
2479
+ table.c.name,
2480
+ )
2481
+
2482
+ # Build the ON DUPLICATE KEY UPDATE clause
2483
+ # Use LEAST for start_time, GREATEST for end_time to capture full trace duration
2484
+ # Duration is calculated using TIMESTAMPDIFF in microseconds then converted to ms
2485
+ upsert_stmt = insert_stmt.on_duplicate_key_update(
2486
+ end_time=func.greatest(table.c.end_time, insert_stmt.inserted.end_time),
2487
+ start_time=func.least(table.c.start_time, insert_stmt.inserted.start_time),
2488
+ # Calculate duration in milliseconds using TIMESTAMPDIFF
2489
+ # TIMESTAMPDIFF(MICROSECOND, start, end) / 1000 gives milliseconds
2490
+ duration_ms=func.timestampdiff(
2491
+ text("MICROSECOND"),
2492
+ func.least(table.c.start_time, insert_stmt.inserted.start_time),
2493
+ func.greatest(table.c.end_time, insert_stmt.inserted.end_time),
2494
+ )
2495
+ / 1000,
2496
+ status=insert_stmt.inserted.status,
2497
+ # Update name only if new trace is from a higher-level component
2498
+ # Priority: workflow (3) > team (2) > agent (1) > child spans (0)
2499
+ name=case(
2500
+ (new_level > existing_level, insert_stmt.inserted.name),
2501
+ else_=table.c.name,
2502
+ ),
2503
+ # Preserve existing non-null context values using COALESCE
2504
+ run_id=func.coalesce(insert_stmt.inserted.run_id, table.c.run_id),
2505
+ session_id=func.coalesce(insert_stmt.inserted.session_id, table.c.session_id),
2506
+ user_id=func.coalesce(insert_stmt.inserted.user_id, table.c.user_id),
2507
+ agent_id=func.coalesce(insert_stmt.inserted.agent_id, table.c.agent_id),
2508
+ team_id=func.coalesce(insert_stmt.inserted.team_id, table.c.team_id),
2509
+ workflow_id=func.coalesce(insert_stmt.inserted.workflow_id, table.c.workflow_id),
2510
+ )
2511
+ sess.execute(upsert_stmt)
2512
+
2513
+ except Exception as e:
2514
+ log_error(f"Error creating trace: {e}")
2515
+ # Don't raise - tracing should not break the main application flow
2516
+
2517
+ def get_trace(
2518
+ self,
2519
+ trace_id: Optional[str] = None,
2520
+ run_id: Optional[str] = None,
2521
+ ):
2522
+ """Get a single trace by trace_id or other filters.
2523
+
2524
+ Args:
2525
+ trace_id: The unique trace identifier.
2526
+ run_id: Filter by run ID (returns first match).
2527
+
2528
+ Returns:
2529
+ Optional[Trace]: The trace if found, None otherwise.
2530
+
2531
+ Note:
2532
+ If multiple filters are provided, trace_id takes precedence.
2533
+ For other filters, the most recent trace is returned.
2534
+ """
2535
+ try:
2536
+ from agno.tracing.schemas import Trace
2537
+
2538
+ table = self._get_table(table_type="traces")
2539
+ if table is None:
2540
+ return None
2541
+
2542
+ # Get spans table for JOIN
2543
+ spans_table = self._get_table(table_type="spans")
2544
+
2545
+ with self.Session() as sess:
2546
+ # Build query with aggregated span counts
2547
+ stmt = self._get_traces_base_query(table, spans_table)
2548
+
2549
+ if trace_id:
2550
+ stmt = stmt.where(table.c.trace_id == trace_id)
2551
+ elif run_id:
2552
+ stmt = stmt.where(table.c.run_id == run_id)
2553
+ else:
2554
+ log_debug("get_trace called without any filter parameters")
2555
+ return None
2556
+
2557
+ # Order by most recent and get first result
2558
+ stmt = stmt.order_by(table.c.start_time.desc()).limit(1)
2559
+ result = sess.execute(stmt).fetchone()
2560
+
2561
+ if result:
2562
+ return Trace.from_dict(dict(result._mapping))
2563
+ return None
2564
+
2565
+ except Exception as e:
2566
+ log_error(f"Error getting trace: {e}")
2567
+ return None
2568
+
2569
+ def get_traces(
2570
+ self,
2571
+ run_id: Optional[str] = None,
2572
+ session_id: Optional[str] = None,
2573
+ user_id: Optional[str] = None,
2574
+ agent_id: Optional[str] = None,
2575
+ team_id: Optional[str] = None,
2576
+ workflow_id: Optional[str] = None,
2577
+ status: Optional[str] = None,
2578
+ start_time: Optional[datetime] = None,
2579
+ end_time: Optional[datetime] = None,
2580
+ limit: Optional[int] = 20,
2581
+ page: Optional[int] = 1,
2582
+ ) -> tuple[List, int]:
2583
+ """Get traces matching the provided filters.
2584
+
2585
+ Args:
2586
+ run_id: Filter by run ID.
2587
+ session_id: Filter by session ID.
2588
+ user_id: Filter by user ID.
2589
+ agent_id: Filter by agent ID.
2590
+ team_id: Filter by team ID.
2591
+ workflow_id: Filter by workflow ID.
2592
+ status: Filter by status (OK, ERROR, UNSET).
2593
+ start_time: Filter traces starting after this datetime.
2594
+ end_time: Filter traces ending before this datetime.
2595
+ limit: Maximum number of traces to return per page.
2596
+ page: Page number (1-indexed).
2597
+
2598
+ Returns:
2599
+ tuple[List[Trace], int]: Tuple of (list of matching traces, total count).
2600
+ """
2601
+ try:
2602
+ from agno.tracing.schemas import Trace
2603
+
2604
+ log_debug(
2605
+ f"get_traces called with filters: run_id={run_id}, session_id={session_id}, user_id={user_id}, agent_id={agent_id}, page={page}, limit={limit}"
2606
+ )
2607
+
2608
+ table = self._get_table(table_type="traces")
2609
+ if table is None:
2610
+ log_debug("Traces table not found")
2611
+ return [], 0
2612
+
2613
+ # Get spans table for JOIN
2614
+ spans_table = self._get_table(table_type="spans")
2615
+
2616
+ with self.Session() as sess:
2617
+ # Build base query with aggregated span counts
2618
+ base_stmt = self._get_traces_base_query(table, spans_table)
2619
+
2620
+ # Apply filters
2621
+ if run_id:
2622
+ base_stmt = base_stmt.where(table.c.run_id == run_id)
2623
+ if session_id:
2624
+ base_stmt = base_stmt.where(table.c.session_id == session_id)
2625
+ if user_id:
2626
+ base_stmt = base_stmt.where(table.c.user_id == user_id)
2627
+ if agent_id:
2628
+ base_stmt = base_stmt.where(table.c.agent_id == agent_id)
2629
+ if team_id:
2630
+ base_stmt = base_stmt.where(table.c.team_id == team_id)
2631
+ if workflow_id:
2632
+ base_stmt = base_stmt.where(table.c.workflow_id == workflow_id)
2633
+ if status:
2634
+ base_stmt = base_stmt.where(table.c.status == status)
2635
+ if start_time:
2636
+ # Convert datetime to ISO string for comparison
2637
+ base_stmt = base_stmt.where(table.c.start_time >= start_time.isoformat())
2638
+ if end_time:
2639
+ # Convert datetime to ISO string for comparison
2640
+ base_stmt = base_stmt.where(table.c.end_time <= end_time.isoformat())
2641
+
2642
+ # Get total count
2643
+ count_stmt = select(func.count()).select_from(base_stmt.alias())
2644
+ total_count = sess.execute(count_stmt).scalar() or 0
2645
+
2646
+ # Apply pagination
2647
+ offset = (page - 1) * limit if page and limit else 0
2648
+ paginated_stmt = base_stmt.order_by(table.c.start_time.desc()).limit(limit).offset(offset)
2649
+
2650
+ results = sess.execute(paginated_stmt).fetchall()
2651
+
2652
+ traces = [Trace.from_dict(dict(row._mapping)) for row in results]
2653
+ return traces, total_count
2654
+
2655
+ except Exception as e:
2656
+ log_error(f"Error getting traces: {e}")
2657
+ return [], 0
2658
+
2659
+ def get_trace_stats(
2660
+ self,
2661
+ user_id: Optional[str] = None,
2662
+ agent_id: Optional[str] = None,
2663
+ team_id: Optional[str] = None,
2664
+ workflow_id: Optional[str] = None,
2665
+ start_time: Optional[datetime] = None,
2666
+ end_time: Optional[datetime] = None,
2667
+ limit: Optional[int] = 20,
2668
+ page: Optional[int] = 1,
2669
+ ) -> tuple[List[Dict[str, Any]], int]:
2670
+ """Get trace statistics grouped by session.
2671
+
2672
+ Args:
2673
+ user_id: Filter by user ID.
2674
+ agent_id: Filter by agent ID.
2675
+ team_id: Filter by team ID.
2676
+ workflow_id: Filter by workflow ID.
2677
+ start_time: Filter sessions with traces created after this datetime.
2678
+ end_time: Filter sessions with traces created before this datetime.
2679
+ limit: Maximum number of sessions to return per page.
2680
+ page: Page number (1-indexed).
2681
+
2682
+ Returns:
2683
+ tuple[List[Dict], int]: Tuple of (list of session stats dicts, total count).
2684
+ Each dict contains: session_id, user_id, agent_id, team_id, total_traces,
2685
+ first_trace_at, last_trace_at.
2686
+ """
2687
+ try:
2688
+ log_debug(
2689
+ f"get_trace_stats called with filters: user_id={user_id}, agent_id={agent_id}, "
2690
+ f"workflow_id={workflow_id}, team_id={team_id}, "
2691
+ f"start_time={start_time}, end_time={end_time}, page={page}, limit={limit}"
2692
+ )
2693
+
2694
+ table = self._get_table(table_type="traces")
2695
+ if table is None:
2696
+ log_debug("Traces table not found")
2697
+ return [], 0
2698
+
2699
+ with self.Session() as sess:
2700
+ # Build base query grouped by session_id
2701
+ base_stmt = (
2702
+ select(
2703
+ table.c.session_id,
2704
+ table.c.user_id,
2705
+ table.c.agent_id,
2706
+ table.c.team_id,
2707
+ table.c.workflow_id,
2708
+ func.count(table.c.trace_id).label("total_traces"),
2709
+ func.min(table.c.created_at).label("first_trace_at"),
2710
+ func.max(table.c.created_at).label("last_trace_at"),
2711
+ )
2712
+ .where(table.c.session_id.isnot(None)) # Only sessions with session_id
2713
+ .group_by(
2714
+ table.c.session_id, table.c.user_id, table.c.agent_id, table.c.team_id, table.c.workflow_id
2715
+ )
2716
+ )
2717
+
2718
+ # Apply filters
2719
+ if user_id:
2720
+ base_stmt = base_stmt.where(table.c.user_id == user_id)
2721
+ if workflow_id:
2722
+ base_stmt = base_stmt.where(table.c.workflow_id == workflow_id)
2723
+ if team_id:
2724
+ base_stmt = base_stmt.where(table.c.team_id == team_id)
2725
+ if agent_id:
2726
+ base_stmt = base_stmt.where(table.c.agent_id == agent_id)
2727
+ if start_time:
2728
+ # Convert datetime to ISO string for comparison
2729
+ base_stmt = base_stmt.where(table.c.created_at >= start_time.isoformat())
2730
+ if end_time:
2731
+ # Convert datetime to ISO string for comparison
2732
+ base_stmt = base_stmt.where(table.c.created_at <= end_time.isoformat())
2733
+
2734
+ # Get total count of sessions
2735
+ count_stmt = select(func.count()).select_from(base_stmt.alias())
2736
+ total_count = sess.execute(count_stmt).scalar() or 0
2737
+ log_debug(f"Total matching sessions: {total_count}")
2738
+
2739
+ # Apply pagination and ordering
2740
+ offset = (page - 1) * limit if page and limit else 0
2741
+ paginated_stmt = base_stmt.order_by(func.max(table.c.created_at).desc()).limit(limit).offset(offset)
2742
+
2743
+ results = sess.execute(paginated_stmt).fetchall()
2744
+ log_debug(f"Returning page {page} with {len(results)} session stats")
2745
+
2746
+ # Convert to list of dicts with datetime objects
2747
+ stats_list = []
2748
+ for row in results:
2749
+ # Convert ISO strings to datetime objects
2750
+ first_trace_at_str = row.first_trace_at
2751
+ last_trace_at_str = row.last_trace_at
2752
+
2753
+ # Parse ISO format strings to datetime objects (handle None values)
2754
+ first_trace_at = None
2755
+ last_trace_at = None
2756
+ if first_trace_at_str is not None:
2757
+ first_trace_at = datetime.fromisoformat(first_trace_at_str.replace("Z", "+00:00"))
2758
+ if last_trace_at_str is not None:
2759
+ last_trace_at = datetime.fromisoformat(last_trace_at_str.replace("Z", "+00:00"))
2760
+
2761
+ stats_list.append(
2762
+ {
2763
+ "session_id": row.session_id,
2764
+ "user_id": row.user_id,
2765
+ "agent_id": row.agent_id,
2766
+ "team_id": row.team_id,
2767
+ "workflow_id": row.workflow_id,
2768
+ "total_traces": row.total_traces,
2769
+ "first_trace_at": first_trace_at,
2770
+ "last_trace_at": last_trace_at,
2771
+ }
2772
+ )
2773
+
2774
+ return stats_list, total_count
2775
+
2776
+ except Exception as e:
2777
+ log_error(f"Error getting trace stats: {e}")
2778
+ return [], 0
2779
+
2780
+ # --- Spans ---
2781
+ def create_span(self, span: "Span") -> None:
2782
+ """Create a single span in the database.
2783
+
2784
+ Args:
2785
+ span: The Span object to store.
2786
+ """
2787
+ try:
2788
+ table = self._get_table(table_type="spans", create_table_if_not_found=True)
2789
+ if table is None:
2790
+ return
2791
+
2792
+ with self.Session() as sess, sess.begin():
2793
+ stmt = mysql.insert(table).values(span.to_dict())
2794
+ sess.execute(stmt)
2795
+
2796
+ except Exception as e:
2797
+ log_error(f"Error creating span: {e}")
2798
+
2799
+ def create_spans(self, spans: List) -> None:
2800
+ """Create multiple spans in the database as a batch.
2801
+
2802
+ Args:
2803
+ spans: List of Span objects to store.
2804
+ """
2805
+ if not spans:
2806
+ return
2807
+
2808
+ try:
2809
+ table = self._get_table(table_type="spans", create_table_if_not_found=True)
2810
+ if table is None:
2811
+ return
2812
+
2813
+ with self.Session() as sess, sess.begin():
2814
+ for span in spans:
2815
+ stmt = mysql.insert(table).values(span.to_dict())
2816
+ sess.execute(stmt)
2817
+
2818
+ except Exception as e:
2819
+ log_error(f"Error creating spans batch: {e}")
2820
+
2821
+ def get_span(self, span_id: str):
2822
+ """Get a single span by its span_id.
2823
+
2824
+ Args:
2825
+ span_id: The unique span identifier.
2826
+
2827
+ Returns:
2828
+ Optional[Span]: The span if found, None otherwise.
2829
+ """
2830
+ try:
2831
+ from agno.tracing.schemas import Span
2832
+
2833
+ table = self._get_table(table_type="spans")
2834
+ if table is None:
2835
+ return None
2836
+
2837
+ with self.Session() as sess:
2838
+ stmt = select(table).where(table.c.span_id == span_id)
2839
+ result = sess.execute(stmt).fetchone()
2840
+ if result:
2841
+ return Span.from_dict(dict(result._mapping))
2842
+ return None
2843
+
2844
+ except Exception as e:
2845
+ log_error(f"Error getting span: {e}")
2846
+ return None
2847
+
2848
+ def get_spans(
2849
+ self,
2850
+ trace_id: Optional[str] = None,
2851
+ parent_span_id: Optional[str] = None,
2852
+ limit: Optional[int] = 1000,
2853
+ ) -> List:
2854
+ """Get spans matching the provided filters.
2855
+
2856
+ Args:
2857
+ trace_id: Filter by trace ID.
2858
+ parent_span_id: Filter by parent span ID.
2859
+ limit: Maximum number of spans to return.
2860
+
2861
+ Returns:
2862
+ List[Span]: List of matching spans.
2863
+ """
2864
+ try:
2865
+ from agno.tracing.schemas import Span
2866
+
2867
+ table = self._get_table(table_type="spans")
2868
+ if table is None:
2869
+ return []
2870
+
2871
+ with self.Session() as sess:
2872
+ stmt = select(table)
2873
+
2874
+ # Apply filters
2875
+ if trace_id:
2876
+ stmt = stmt.where(table.c.trace_id == trace_id)
2877
+ if parent_span_id:
2878
+ stmt = stmt.where(table.c.parent_span_id == parent_span_id)
2879
+
2880
+ if limit:
2881
+ stmt = stmt.limit(limit)
2882
+
2883
+ results = sess.execute(stmt).fetchall()
2884
+ return [Span.from_dict(dict(row._mapping)) for row in results]
2885
+
2886
+ except Exception as e:
2887
+ log_error(f"Error getting spans: {e}")
2888
+ return []
2889
+
2890
+ # -- Learning methods (stubs) --
2891
+ def get_learning(
2892
+ self,
2893
+ learning_type: str,
2894
+ user_id: Optional[str] = None,
2895
+ agent_id: Optional[str] = None,
2896
+ team_id: Optional[str] = None,
2897
+ session_id: Optional[str] = None,
2898
+ namespace: Optional[str] = None,
2899
+ entity_id: Optional[str] = None,
2900
+ entity_type: Optional[str] = None,
2901
+ ) -> Optional[Dict[str, Any]]:
2902
+ raise NotImplementedError("Learning methods not yet implemented for SingleStoreDb")
2903
+
2904
+ def upsert_learning(
2905
+ self,
2906
+ id: str,
2907
+ learning_type: str,
2908
+ content: Dict[str, Any],
2909
+ user_id: Optional[str] = None,
2910
+ agent_id: Optional[str] = None,
2911
+ team_id: Optional[str] = None,
2912
+ session_id: Optional[str] = None,
2913
+ namespace: Optional[str] = None,
2914
+ entity_id: Optional[str] = None,
2915
+ entity_type: Optional[str] = None,
2916
+ metadata: Optional[Dict[str, Any]] = None,
2917
+ ) -> None:
2918
+ raise NotImplementedError("Learning methods not yet implemented for SingleStoreDb")
2919
+
2920
+ def delete_learning(self, id: str) -> bool:
2921
+ raise NotImplementedError("Learning methods not yet implemented for SingleStoreDb")
2922
+
2923
+ def get_learnings(
2924
+ self,
2925
+ learning_type: Optional[str] = None,
2926
+ user_id: Optional[str] = None,
2927
+ agent_id: Optional[str] = None,
2928
+ team_id: Optional[str] = None,
2929
+ session_id: Optional[str] = None,
2930
+ namespace: Optional[str] = None,
2931
+ entity_id: Optional[str] = None,
2932
+ entity_type: Optional[str] = None,
2933
+ limit: Optional[int] = None,
2934
+ ) -> List[Dict[str, Any]]:
2935
+ raise NotImplementedError("Learning methods not yet implemented for SingleStoreDb")