agno 0.1.2__py3-none-any.whl → 2.3.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (723) hide show
  1. agno/__init__.py +8 -0
  2. agno/agent/__init__.py +44 -5
  3. agno/agent/agent.py +10531 -2975
  4. agno/api/agent.py +14 -53
  5. agno/api/api.py +7 -46
  6. agno/api/evals.py +22 -0
  7. agno/api/os.py +17 -0
  8. agno/api/routes.py +6 -25
  9. agno/api/schemas/__init__.py +9 -0
  10. agno/api/schemas/agent.py +6 -9
  11. agno/api/schemas/evals.py +16 -0
  12. agno/api/schemas/os.py +14 -0
  13. agno/api/schemas/team.py +10 -10
  14. agno/api/schemas/utils.py +21 -0
  15. agno/api/schemas/workflows.py +16 -0
  16. agno/api/settings.py +53 -0
  17. agno/api/team.py +22 -26
  18. agno/api/workflow.py +28 -0
  19. agno/cloud/aws/base.py +214 -0
  20. agno/cloud/aws/s3/__init__.py +2 -0
  21. agno/cloud/aws/s3/api_client.py +43 -0
  22. agno/cloud/aws/s3/bucket.py +195 -0
  23. agno/cloud/aws/s3/object.py +57 -0
  24. agno/compression/__init__.py +3 -0
  25. agno/compression/manager.py +247 -0
  26. agno/culture/__init__.py +3 -0
  27. agno/culture/manager.py +956 -0
  28. agno/db/__init__.py +24 -0
  29. agno/db/async_postgres/__init__.py +3 -0
  30. agno/db/base.py +946 -0
  31. agno/db/dynamo/__init__.py +3 -0
  32. agno/db/dynamo/dynamo.py +2781 -0
  33. agno/db/dynamo/schemas.py +442 -0
  34. agno/db/dynamo/utils.py +743 -0
  35. agno/db/firestore/__init__.py +3 -0
  36. agno/db/firestore/firestore.py +2379 -0
  37. agno/db/firestore/schemas.py +181 -0
  38. agno/db/firestore/utils.py +376 -0
  39. agno/db/gcs_json/__init__.py +3 -0
  40. agno/db/gcs_json/gcs_json_db.py +1791 -0
  41. agno/db/gcs_json/utils.py +228 -0
  42. agno/db/in_memory/__init__.py +3 -0
  43. agno/db/in_memory/in_memory_db.py +1312 -0
  44. agno/db/in_memory/utils.py +230 -0
  45. agno/db/json/__init__.py +3 -0
  46. agno/db/json/json_db.py +1777 -0
  47. agno/db/json/utils.py +230 -0
  48. agno/db/migrations/manager.py +199 -0
  49. agno/db/migrations/v1_to_v2.py +635 -0
  50. agno/db/migrations/versions/v2_3_0.py +938 -0
  51. agno/db/mongo/__init__.py +17 -0
  52. agno/db/mongo/async_mongo.py +2760 -0
  53. agno/db/mongo/mongo.py +2597 -0
  54. agno/db/mongo/schemas.py +119 -0
  55. agno/db/mongo/utils.py +276 -0
  56. agno/db/mysql/__init__.py +4 -0
  57. agno/db/mysql/async_mysql.py +2912 -0
  58. agno/db/mysql/mysql.py +2923 -0
  59. agno/db/mysql/schemas.py +186 -0
  60. agno/db/mysql/utils.py +488 -0
  61. agno/db/postgres/__init__.py +4 -0
  62. agno/db/postgres/async_postgres.py +2579 -0
  63. agno/db/postgres/postgres.py +2870 -0
  64. agno/db/postgres/schemas.py +187 -0
  65. agno/db/postgres/utils.py +442 -0
  66. agno/db/redis/__init__.py +3 -0
  67. agno/db/redis/redis.py +2141 -0
  68. agno/db/redis/schemas.py +159 -0
  69. agno/db/redis/utils.py +346 -0
  70. agno/db/schemas/__init__.py +4 -0
  71. agno/db/schemas/culture.py +120 -0
  72. agno/db/schemas/evals.py +34 -0
  73. agno/db/schemas/knowledge.py +40 -0
  74. agno/db/schemas/memory.py +61 -0
  75. agno/db/singlestore/__init__.py +3 -0
  76. agno/db/singlestore/schemas.py +179 -0
  77. agno/db/singlestore/singlestore.py +2877 -0
  78. agno/db/singlestore/utils.py +384 -0
  79. agno/db/sqlite/__init__.py +4 -0
  80. agno/db/sqlite/async_sqlite.py +2911 -0
  81. agno/db/sqlite/schemas.py +181 -0
  82. agno/db/sqlite/sqlite.py +2908 -0
  83. agno/db/sqlite/utils.py +429 -0
  84. agno/db/surrealdb/__init__.py +3 -0
  85. agno/db/surrealdb/metrics.py +292 -0
  86. agno/db/surrealdb/models.py +334 -0
  87. agno/db/surrealdb/queries.py +71 -0
  88. agno/db/surrealdb/surrealdb.py +1908 -0
  89. agno/db/surrealdb/utils.py +147 -0
  90. agno/db/utils.py +118 -0
  91. agno/eval/__init__.py +24 -0
  92. agno/eval/accuracy.py +666 -276
  93. agno/eval/agent_as_judge.py +861 -0
  94. agno/eval/base.py +29 -0
  95. agno/eval/performance.py +779 -0
  96. agno/eval/reliability.py +241 -62
  97. agno/eval/utils.py +120 -0
  98. agno/exceptions.py +143 -1
  99. agno/filters.py +354 -0
  100. agno/guardrails/__init__.py +6 -0
  101. agno/guardrails/base.py +19 -0
  102. agno/guardrails/openai.py +144 -0
  103. agno/guardrails/pii.py +94 -0
  104. agno/guardrails/prompt_injection.py +52 -0
  105. agno/hooks/__init__.py +3 -0
  106. agno/hooks/decorator.py +164 -0
  107. agno/integrations/discord/__init__.py +3 -0
  108. agno/integrations/discord/client.py +203 -0
  109. agno/knowledge/__init__.py +5 -1
  110. agno/{document → knowledge}/chunking/agentic.py +22 -14
  111. agno/{document → knowledge}/chunking/document.py +2 -2
  112. agno/{document → knowledge}/chunking/fixed.py +7 -6
  113. agno/knowledge/chunking/markdown.py +151 -0
  114. agno/{document → knowledge}/chunking/recursive.py +15 -3
  115. agno/knowledge/chunking/row.py +39 -0
  116. agno/knowledge/chunking/semantic.py +91 -0
  117. agno/knowledge/chunking/strategy.py +165 -0
  118. agno/knowledge/content.py +74 -0
  119. agno/knowledge/document/__init__.py +5 -0
  120. agno/{document → knowledge/document}/base.py +12 -2
  121. agno/knowledge/embedder/__init__.py +5 -0
  122. agno/knowledge/embedder/aws_bedrock.py +343 -0
  123. agno/knowledge/embedder/azure_openai.py +210 -0
  124. agno/{embedder → knowledge/embedder}/base.py +8 -0
  125. agno/knowledge/embedder/cohere.py +323 -0
  126. agno/knowledge/embedder/fastembed.py +62 -0
  127. agno/{embedder → knowledge/embedder}/fireworks.py +1 -1
  128. agno/knowledge/embedder/google.py +258 -0
  129. agno/knowledge/embedder/huggingface.py +94 -0
  130. agno/knowledge/embedder/jina.py +182 -0
  131. agno/knowledge/embedder/langdb.py +22 -0
  132. agno/knowledge/embedder/mistral.py +206 -0
  133. agno/knowledge/embedder/nebius.py +13 -0
  134. agno/knowledge/embedder/ollama.py +154 -0
  135. agno/knowledge/embedder/openai.py +195 -0
  136. agno/knowledge/embedder/sentence_transformer.py +63 -0
  137. agno/{embedder → knowledge/embedder}/together.py +1 -1
  138. agno/knowledge/embedder/vllm.py +262 -0
  139. agno/knowledge/embedder/voyageai.py +165 -0
  140. agno/knowledge/knowledge.py +3006 -0
  141. agno/knowledge/reader/__init__.py +7 -0
  142. agno/knowledge/reader/arxiv_reader.py +81 -0
  143. agno/knowledge/reader/base.py +95 -0
  144. agno/knowledge/reader/csv_reader.py +164 -0
  145. agno/knowledge/reader/docx_reader.py +82 -0
  146. agno/knowledge/reader/field_labeled_csv_reader.py +290 -0
  147. agno/knowledge/reader/firecrawl_reader.py +201 -0
  148. agno/knowledge/reader/json_reader.py +88 -0
  149. agno/knowledge/reader/markdown_reader.py +137 -0
  150. agno/knowledge/reader/pdf_reader.py +431 -0
  151. agno/knowledge/reader/pptx_reader.py +101 -0
  152. agno/knowledge/reader/reader_factory.py +313 -0
  153. agno/knowledge/reader/s3_reader.py +89 -0
  154. agno/knowledge/reader/tavily_reader.py +193 -0
  155. agno/knowledge/reader/text_reader.py +127 -0
  156. agno/knowledge/reader/web_search_reader.py +325 -0
  157. agno/knowledge/reader/website_reader.py +455 -0
  158. agno/knowledge/reader/wikipedia_reader.py +91 -0
  159. agno/knowledge/reader/youtube_reader.py +78 -0
  160. agno/knowledge/remote_content/remote_content.py +88 -0
  161. agno/knowledge/reranker/__init__.py +3 -0
  162. agno/{reranker → knowledge/reranker}/base.py +1 -1
  163. agno/{reranker → knowledge/reranker}/cohere.py +2 -2
  164. agno/knowledge/reranker/infinity.py +195 -0
  165. agno/knowledge/reranker/sentence_transformer.py +54 -0
  166. agno/knowledge/types.py +39 -0
  167. agno/knowledge/utils.py +234 -0
  168. agno/media.py +439 -95
  169. agno/memory/__init__.py +16 -3
  170. agno/memory/manager.py +1474 -123
  171. agno/memory/strategies/__init__.py +15 -0
  172. agno/memory/strategies/base.py +66 -0
  173. agno/memory/strategies/summarize.py +196 -0
  174. agno/memory/strategies/types.py +37 -0
  175. agno/models/aimlapi/__init__.py +5 -0
  176. agno/models/aimlapi/aimlapi.py +62 -0
  177. agno/models/anthropic/__init__.py +4 -0
  178. agno/models/anthropic/claude.py +960 -496
  179. agno/models/aws/__init__.py +15 -0
  180. agno/models/aws/bedrock.py +686 -451
  181. agno/models/aws/claude.py +190 -183
  182. agno/models/azure/__init__.py +18 -1
  183. agno/models/azure/ai_foundry.py +489 -0
  184. agno/models/azure/openai_chat.py +89 -40
  185. agno/models/base.py +2477 -550
  186. agno/models/cerebras/__init__.py +12 -0
  187. agno/models/cerebras/cerebras.py +565 -0
  188. agno/models/cerebras/cerebras_openai.py +131 -0
  189. agno/models/cohere/__init__.py +4 -0
  190. agno/models/cohere/chat.py +306 -492
  191. agno/models/cometapi/__init__.py +5 -0
  192. agno/models/cometapi/cometapi.py +74 -0
  193. agno/models/dashscope/__init__.py +5 -0
  194. agno/models/dashscope/dashscope.py +90 -0
  195. agno/models/deepinfra/__init__.py +5 -0
  196. agno/models/deepinfra/deepinfra.py +45 -0
  197. agno/models/deepseek/__init__.py +4 -0
  198. agno/models/deepseek/deepseek.py +110 -9
  199. agno/models/fireworks/__init__.py +4 -0
  200. agno/models/fireworks/fireworks.py +19 -22
  201. agno/models/google/__init__.py +3 -7
  202. agno/models/google/gemini.py +1717 -662
  203. agno/models/google/utils.py +22 -0
  204. agno/models/groq/__init__.py +4 -0
  205. agno/models/groq/groq.py +391 -666
  206. agno/models/huggingface/__init__.py +4 -0
  207. agno/models/huggingface/huggingface.py +266 -538
  208. agno/models/ibm/__init__.py +5 -0
  209. agno/models/ibm/watsonx.py +432 -0
  210. agno/models/internlm/__init__.py +3 -0
  211. agno/models/internlm/internlm.py +20 -3
  212. agno/models/langdb/__init__.py +1 -0
  213. agno/models/langdb/langdb.py +60 -0
  214. agno/models/litellm/__init__.py +14 -0
  215. agno/models/litellm/chat.py +503 -0
  216. agno/models/litellm/litellm_openai.py +42 -0
  217. agno/models/llama_cpp/__init__.py +5 -0
  218. agno/models/llama_cpp/llama_cpp.py +22 -0
  219. agno/models/lmstudio/__init__.py +5 -0
  220. agno/models/lmstudio/lmstudio.py +25 -0
  221. agno/models/message.py +361 -39
  222. agno/models/meta/__init__.py +12 -0
  223. agno/models/meta/llama.py +502 -0
  224. agno/models/meta/llama_openai.py +79 -0
  225. agno/models/metrics.py +120 -0
  226. agno/models/mistral/__init__.py +4 -0
  227. agno/models/mistral/mistral.py +293 -393
  228. agno/models/nebius/__init__.py +3 -0
  229. agno/models/nebius/nebius.py +53 -0
  230. agno/models/nexus/__init__.py +3 -0
  231. agno/models/nexus/nexus.py +22 -0
  232. agno/models/nvidia/__init__.py +4 -0
  233. agno/models/nvidia/nvidia.py +22 -3
  234. agno/models/ollama/__init__.py +4 -2
  235. agno/models/ollama/chat.py +257 -492
  236. agno/models/openai/__init__.py +7 -0
  237. agno/models/openai/chat.py +725 -770
  238. agno/models/openai/like.py +16 -2
  239. agno/models/openai/responses.py +1121 -0
  240. agno/models/openrouter/__init__.py +4 -0
  241. agno/models/openrouter/openrouter.py +62 -5
  242. agno/models/perplexity/__init__.py +5 -0
  243. agno/models/perplexity/perplexity.py +203 -0
  244. agno/models/portkey/__init__.py +3 -0
  245. agno/models/portkey/portkey.py +82 -0
  246. agno/models/requesty/__init__.py +5 -0
  247. agno/models/requesty/requesty.py +69 -0
  248. agno/models/response.py +177 -7
  249. agno/models/sambanova/__init__.py +4 -0
  250. agno/models/sambanova/sambanova.py +23 -4
  251. agno/models/siliconflow/__init__.py +5 -0
  252. agno/models/siliconflow/siliconflow.py +42 -0
  253. agno/models/together/__init__.py +4 -0
  254. agno/models/together/together.py +21 -164
  255. agno/models/utils.py +266 -0
  256. agno/models/vercel/__init__.py +3 -0
  257. agno/models/vercel/v0.py +43 -0
  258. agno/models/vertexai/__init__.py +0 -1
  259. agno/models/vertexai/claude.py +190 -0
  260. agno/models/vllm/__init__.py +3 -0
  261. agno/models/vllm/vllm.py +83 -0
  262. agno/models/xai/__init__.py +2 -0
  263. agno/models/xai/xai.py +111 -7
  264. agno/os/__init__.py +3 -0
  265. agno/os/app.py +1027 -0
  266. agno/os/auth.py +244 -0
  267. agno/os/config.py +126 -0
  268. agno/os/interfaces/__init__.py +1 -0
  269. agno/os/interfaces/a2a/__init__.py +3 -0
  270. agno/os/interfaces/a2a/a2a.py +42 -0
  271. agno/os/interfaces/a2a/router.py +249 -0
  272. agno/os/interfaces/a2a/utils.py +924 -0
  273. agno/os/interfaces/agui/__init__.py +3 -0
  274. agno/os/interfaces/agui/agui.py +47 -0
  275. agno/os/interfaces/agui/router.py +147 -0
  276. agno/os/interfaces/agui/utils.py +574 -0
  277. agno/os/interfaces/base.py +25 -0
  278. agno/os/interfaces/slack/__init__.py +3 -0
  279. agno/os/interfaces/slack/router.py +148 -0
  280. agno/os/interfaces/slack/security.py +30 -0
  281. agno/os/interfaces/slack/slack.py +47 -0
  282. agno/os/interfaces/whatsapp/__init__.py +3 -0
  283. agno/os/interfaces/whatsapp/router.py +210 -0
  284. agno/os/interfaces/whatsapp/security.py +55 -0
  285. agno/os/interfaces/whatsapp/whatsapp.py +36 -0
  286. agno/os/mcp.py +293 -0
  287. agno/os/middleware/__init__.py +9 -0
  288. agno/os/middleware/jwt.py +797 -0
  289. agno/os/router.py +258 -0
  290. agno/os/routers/__init__.py +3 -0
  291. agno/os/routers/agents/__init__.py +3 -0
  292. agno/os/routers/agents/router.py +599 -0
  293. agno/os/routers/agents/schema.py +261 -0
  294. agno/os/routers/evals/__init__.py +3 -0
  295. agno/os/routers/evals/evals.py +450 -0
  296. agno/os/routers/evals/schemas.py +174 -0
  297. agno/os/routers/evals/utils.py +231 -0
  298. agno/os/routers/health.py +31 -0
  299. agno/os/routers/home.py +52 -0
  300. agno/os/routers/knowledge/__init__.py +3 -0
  301. agno/os/routers/knowledge/knowledge.py +1008 -0
  302. agno/os/routers/knowledge/schemas.py +178 -0
  303. agno/os/routers/memory/__init__.py +3 -0
  304. agno/os/routers/memory/memory.py +661 -0
  305. agno/os/routers/memory/schemas.py +88 -0
  306. agno/os/routers/metrics/__init__.py +3 -0
  307. agno/os/routers/metrics/metrics.py +190 -0
  308. agno/os/routers/metrics/schemas.py +47 -0
  309. agno/os/routers/session/__init__.py +3 -0
  310. agno/os/routers/session/session.py +997 -0
  311. agno/os/routers/teams/__init__.py +3 -0
  312. agno/os/routers/teams/router.py +512 -0
  313. agno/os/routers/teams/schema.py +257 -0
  314. agno/os/routers/traces/__init__.py +3 -0
  315. agno/os/routers/traces/schemas.py +414 -0
  316. agno/os/routers/traces/traces.py +499 -0
  317. agno/os/routers/workflows/__init__.py +3 -0
  318. agno/os/routers/workflows/router.py +624 -0
  319. agno/os/routers/workflows/schema.py +75 -0
  320. agno/os/schema.py +534 -0
  321. agno/os/scopes.py +469 -0
  322. agno/{playground → os}/settings.py +7 -15
  323. agno/os/utils.py +973 -0
  324. agno/reasoning/anthropic.py +80 -0
  325. agno/reasoning/azure_ai_foundry.py +67 -0
  326. agno/reasoning/deepseek.py +63 -0
  327. agno/reasoning/default.py +97 -0
  328. agno/reasoning/gemini.py +73 -0
  329. agno/reasoning/groq.py +71 -0
  330. agno/reasoning/helpers.py +24 -1
  331. agno/reasoning/ollama.py +67 -0
  332. agno/reasoning/openai.py +86 -0
  333. agno/reasoning/step.py +2 -1
  334. agno/reasoning/vertexai.py +76 -0
  335. agno/run/__init__.py +6 -0
  336. agno/run/agent.py +822 -0
  337. agno/run/base.py +247 -0
  338. agno/run/cancel.py +81 -0
  339. agno/run/requirement.py +181 -0
  340. agno/run/team.py +767 -0
  341. agno/run/workflow.py +708 -0
  342. agno/session/__init__.py +10 -0
  343. agno/session/agent.py +260 -0
  344. agno/session/summary.py +265 -0
  345. agno/session/team.py +342 -0
  346. agno/session/workflow.py +501 -0
  347. agno/table.py +10 -0
  348. agno/team/__init__.py +37 -0
  349. agno/team/team.py +9536 -0
  350. agno/tools/__init__.py +7 -0
  351. agno/tools/agentql.py +120 -0
  352. agno/tools/airflow.py +22 -12
  353. agno/tools/api.py +122 -0
  354. agno/tools/apify.py +276 -83
  355. agno/tools/{arxiv_toolkit.py → arxiv.py} +20 -12
  356. agno/tools/aws_lambda.py +28 -7
  357. agno/tools/aws_ses.py +66 -0
  358. agno/tools/baidusearch.py +11 -4
  359. agno/tools/bitbucket.py +292 -0
  360. agno/tools/brandfetch.py +213 -0
  361. agno/tools/bravesearch.py +106 -0
  362. agno/tools/brightdata.py +367 -0
  363. agno/tools/browserbase.py +209 -0
  364. agno/tools/calcom.py +32 -23
  365. agno/tools/calculator.py +24 -37
  366. agno/tools/cartesia.py +187 -0
  367. agno/tools/{clickup_tool.py → clickup.py} +17 -28
  368. agno/tools/confluence.py +91 -26
  369. agno/tools/crawl4ai.py +139 -43
  370. agno/tools/csv_toolkit.py +28 -22
  371. agno/tools/dalle.py +36 -22
  372. agno/tools/daytona.py +475 -0
  373. agno/tools/decorator.py +169 -14
  374. agno/tools/desi_vocal.py +23 -11
  375. agno/tools/discord.py +32 -29
  376. agno/tools/docker.py +716 -0
  377. agno/tools/duckdb.py +76 -81
  378. agno/tools/duckduckgo.py +43 -40
  379. agno/tools/e2b.py +703 -0
  380. agno/tools/eleven_labs.py +65 -54
  381. agno/tools/email.py +13 -5
  382. agno/tools/evm.py +129 -0
  383. agno/tools/exa.py +324 -42
  384. agno/tools/fal.py +39 -35
  385. agno/tools/file.py +196 -30
  386. agno/tools/file_generation.py +356 -0
  387. agno/tools/financial_datasets.py +288 -0
  388. agno/tools/firecrawl.py +108 -33
  389. agno/tools/function.py +960 -122
  390. agno/tools/giphy.py +34 -12
  391. agno/tools/github.py +1294 -97
  392. agno/tools/gmail.py +922 -0
  393. agno/tools/google_bigquery.py +117 -0
  394. agno/tools/google_drive.py +271 -0
  395. agno/tools/google_maps.py +253 -0
  396. agno/tools/googlecalendar.py +607 -107
  397. agno/tools/googlesheets.py +377 -0
  398. agno/tools/hackernews.py +20 -12
  399. agno/tools/jina.py +24 -14
  400. agno/tools/jira.py +48 -19
  401. agno/tools/knowledge.py +218 -0
  402. agno/tools/linear.py +82 -43
  403. agno/tools/linkup.py +58 -0
  404. agno/tools/local_file_system.py +15 -7
  405. agno/tools/lumalab.py +41 -26
  406. agno/tools/mcp/__init__.py +10 -0
  407. agno/tools/mcp/mcp.py +331 -0
  408. agno/tools/mcp/multi_mcp.py +347 -0
  409. agno/tools/mcp/params.py +24 -0
  410. agno/tools/mcp_toolbox.py +284 -0
  411. agno/tools/mem0.py +193 -0
  412. agno/tools/memory.py +419 -0
  413. agno/tools/mlx_transcribe.py +11 -9
  414. agno/tools/models/azure_openai.py +190 -0
  415. agno/tools/models/gemini.py +203 -0
  416. agno/tools/models/groq.py +158 -0
  417. agno/tools/models/morph.py +186 -0
  418. agno/tools/models/nebius.py +124 -0
  419. agno/tools/models_labs.py +163 -82
  420. agno/tools/moviepy_video.py +18 -13
  421. agno/tools/nano_banana.py +151 -0
  422. agno/tools/neo4j.py +134 -0
  423. agno/tools/newspaper.py +15 -4
  424. agno/tools/newspaper4k.py +19 -6
  425. agno/tools/notion.py +204 -0
  426. agno/tools/openai.py +181 -17
  427. agno/tools/openbb.py +27 -20
  428. agno/tools/opencv.py +321 -0
  429. agno/tools/openweather.py +233 -0
  430. agno/tools/oxylabs.py +385 -0
  431. agno/tools/pandas.py +25 -15
  432. agno/tools/parallel.py +314 -0
  433. agno/tools/postgres.py +238 -185
  434. agno/tools/pubmed.py +125 -13
  435. agno/tools/python.py +48 -35
  436. agno/tools/reasoning.py +283 -0
  437. agno/tools/reddit.py +207 -29
  438. agno/tools/redshift.py +406 -0
  439. agno/tools/replicate.py +69 -26
  440. agno/tools/resend.py +11 -6
  441. agno/tools/scrapegraph.py +179 -19
  442. agno/tools/searxng.py +23 -31
  443. agno/tools/serpapi.py +15 -10
  444. agno/tools/serper.py +255 -0
  445. agno/tools/shell.py +23 -12
  446. agno/tools/shopify.py +1519 -0
  447. agno/tools/slack.py +56 -14
  448. agno/tools/sleep.py +8 -6
  449. agno/tools/spider.py +35 -11
  450. agno/tools/spotify.py +919 -0
  451. agno/tools/sql.py +34 -19
  452. agno/tools/tavily.py +158 -8
  453. agno/tools/telegram.py +18 -8
  454. agno/tools/todoist.py +218 -0
  455. agno/tools/toolkit.py +134 -9
  456. agno/tools/trafilatura.py +388 -0
  457. agno/tools/trello.py +25 -28
  458. agno/tools/twilio.py +18 -9
  459. agno/tools/user_control_flow.py +78 -0
  460. agno/tools/valyu.py +228 -0
  461. agno/tools/visualization.py +467 -0
  462. agno/tools/webbrowser.py +28 -0
  463. agno/tools/webex.py +76 -0
  464. agno/tools/website.py +23 -19
  465. agno/tools/webtools.py +45 -0
  466. agno/tools/whatsapp.py +286 -0
  467. agno/tools/wikipedia.py +28 -19
  468. agno/tools/workflow.py +285 -0
  469. agno/tools/{twitter.py → x.py} +142 -46
  470. agno/tools/yfinance.py +41 -39
  471. agno/tools/youtube.py +34 -17
  472. agno/tools/zendesk.py +15 -5
  473. agno/tools/zep.py +454 -0
  474. agno/tools/zoom.py +86 -37
  475. agno/tracing/__init__.py +12 -0
  476. agno/tracing/exporter.py +157 -0
  477. agno/tracing/schemas.py +276 -0
  478. agno/tracing/setup.py +111 -0
  479. agno/utils/agent.py +938 -0
  480. agno/utils/audio.py +37 -1
  481. agno/utils/certs.py +27 -0
  482. agno/utils/code_execution.py +11 -0
  483. agno/utils/common.py +103 -20
  484. agno/utils/cryptography.py +22 -0
  485. agno/utils/dttm.py +33 -0
  486. agno/utils/events.py +700 -0
  487. agno/utils/functions.py +107 -37
  488. agno/utils/gemini.py +426 -0
  489. agno/utils/hooks.py +171 -0
  490. agno/utils/http.py +185 -0
  491. agno/utils/json_schema.py +159 -37
  492. agno/utils/knowledge.py +36 -0
  493. agno/utils/location.py +19 -0
  494. agno/utils/log.py +221 -8
  495. agno/utils/mcp.py +214 -0
  496. agno/utils/media.py +335 -14
  497. agno/utils/merge_dict.py +22 -1
  498. agno/utils/message.py +77 -2
  499. agno/utils/models/ai_foundry.py +50 -0
  500. agno/utils/models/claude.py +373 -0
  501. agno/utils/models/cohere.py +94 -0
  502. agno/utils/models/llama.py +85 -0
  503. agno/utils/models/mistral.py +100 -0
  504. agno/utils/models/openai_responses.py +140 -0
  505. agno/utils/models/schema_utils.py +153 -0
  506. agno/utils/models/watsonx.py +41 -0
  507. agno/utils/openai.py +257 -0
  508. agno/utils/pickle.py +1 -1
  509. agno/utils/pprint.py +124 -8
  510. agno/utils/print_response/agent.py +930 -0
  511. agno/utils/print_response/team.py +1914 -0
  512. agno/utils/print_response/workflow.py +1668 -0
  513. agno/utils/prompts.py +111 -0
  514. agno/utils/reasoning.py +108 -0
  515. agno/utils/response.py +163 -0
  516. agno/utils/serialize.py +32 -0
  517. agno/utils/shell.py +4 -4
  518. agno/utils/streamlit.py +487 -0
  519. agno/utils/string.py +204 -51
  520. agno/utils/team.py +139 -0
  521. agno/utils/timer.py +9 -2
  522. agno/utils/tokens.py +657 -0
  523. agno/utils/tools.py +19 -1
  524. agno/utils/whatsapp.py +305 -0
  525. agno/utils/yaml_io.py +3 -3
  526. agno/vectordb/__init__.py +2 -0
  527. agno/vectordb/base.py +87 -9
  528. agno/vectordb/cassandra/__init__.py +5 -1
  529. agno/vectordb/cassandra/cassandra.py +383 -27
  530. agno/vectordb/chroma/__init__.py +4 -0
  531. agno/vectordb/chroma/chromadb.py +748 -83
  532. agno/vectordb/clickhouse/__init__.py +7 -1
  533. agno/vectordb/clickhouse/clickhousedb.py +554 -53
  534. agno/vectordb/couchbase/__init__.py +3 -0
  535. agno/vectordb/couchbase/couchbase.py +1446 -0
  536. agno/vectordb/lancedb/__init__.py +5 -0
  537. agno/vectordb/lancedb/lance_db.py +730 -98
  538. agno/vectordb/langchaindb/__init__.py +5 -0
  539. agno/vectordb/langchaindb/langchaindb.py +163 -0
  540. agno/vectordb/lightrag/__init__.py +5 -0
  541. agno/vectordb/lightrag/lightrag.py +388 -0
  542. agno/vectordb/llamaindex/__init__.py +3 -0
  543. agno/vectordb/llamaindex/llamaindexdb.py +166 -0
  544. agno/vectordb/milvus/__init__.py +3 -0
  545. agno/vectordb/milvus/milvus.py +966 -78
  546. agno/vectordb/mongodb/__init__.py +9 -1
  547. agno/vectordb/mongodb/mongodb.py +1175 -172
  548. agno/vectordb/pgvector/__init__.py +8 -0
  549. agno/vectordb/pgvector/pgvector.py +599 -115
  550. agno/vectordb/pineconedb/__init__.py +5 -1
  551. agno/vectordb/pineconedb/pineconedb.py +406 -43
  552. agno/vectordb/qdrant/__init__.py +4 -0
  553. agno/vectordb/qdrant/qdrant.py +914 -61
  554. agno/vectordb/redis/__init__.py +9 -0
  555. agno/vectordb/redis/redisdb.py +682 -0
  556. agno/vectordb/singlestore/__init__.py +8 -1
  557. agno/vectordb/singlestore/singlestore.py +771 -0
  558. agno/vectordb/surrealdb/__init__.py +3 -0
  559. agno/vectordb/surrealdb/surrealdb.py +663 -0
  560. agno/vectordb/upstashdb/__init__.py +5 -0
  561. agno/vectordb/upstashdb/upstashdb.py +718 -0
  562. agno/vectordb/weaviate/__init__.py +8 -0
  563. agno/vectordb/weaviate/index.py +15 -0
  564. agno/vectordb/weaviate/weaviate.py +1009 -0
  565. agno/workflow/__init__.py +23 -1
  566. agno/workflow/agent.py +299 -0
  567. agno/workflow/condition.py +759 -0
  568. agno/workflow/loop.py +756 -0
  569. agno/workflow/parallel.py +853 -0
  570. agno/workflow/router.py +723 -0
  571. agno/workflow/step.py +1564 -0
  572. agno/workflow/steps.py +613 -0
  573. agno/workflow/types.py +556 -0
  574. agno/workflow/workflow.py +4327 -514
  575. agno-2.3.13.dist-info/METADATA +639 -0
  576. agno-2.3.13.dist-info/RECORD +613 -0
  577. {agno-0.1.2.dist-info → agno-2.3.13.dist-info}/WHEEL +1 -1
  578. agno-2.3.13.dist-info/licenses/LICENSE +201 -0
  579. agno/api/playground.py +0 -91
  580. agno/api/schemas/playground.py +0 -22
  581. agno/api/schemas/user.py +0 -22
  582. agno/api/schemas/workspace.py +0 -46
  583. agno/api/user.py +0 -160
  584. agno/api/workspace.py +0 -151
  585. agno/cli/auth_server.py +0 -118
  586. agno/cli/config.py +0 -275
  587. agno/cli/console.py +0 -88
  588. agno/cli/credentials.py +0 -23
  589. agno/cli/entrypoint.py +0 -571
  590. agno/cli/operator.py +0 -355
  591. agno/cli/settings.py +0 -85
  592. agno/cli/ws/ws_cli.py +0 -817
  593. agno/constants.py +0 -13
  594. agno/document/__init__.py +0 -1
  595. agno/document/chunking/semantic.py +0 -47
  596. agno/document/chunking/strategy.py +0 -31
  597. agno/document/reader/__init__.py +0 -1
  598. agno/document/reader/arxiv_reader.py +0 -41
  599. agno/document/reader/base.py +0 -22
  600. agno/document/reader/csv_reader.py +0 -84
  601. agno/document/reader/docx_reader.py +0 -46
  602. agno/document/reader/firecrawl_reader.py +0 -99
  603. agno/document/reader/json_reader.py +0 -43
  604. agno/document/reader/pdf_reader.py +0 -219
  605. agno/document/reader/s3/pdf_reader.py +0 -46
  606. agno/document/reader/s3/text_reader.py +0 -51
  607. agno/document/reader/text_reader.py +0 -41
  608. agno/document/reader/website_reader.py +0 -175
  609. agno/document/reader/youtube_reader.py +0 -50
  610. agno/embedder/__init__.py +0 -1
  611. agno/embedder/azure_openai.py +0 -86
  612. agno/embedder/cohere.py +0 -72
  613. agno/embedder/fastembed.py +0 -37
  614. agno/embedder/google.py +0 -73
  615. agno/embedder/huggingface.py +0 -54
  616. agno/embedder/mistral.py +0 -80
  617. agno/embedder/ollama.py +0 -57
  618. agno/embedder/openai.py +0 -74
  619. agno/embedder/sentence_transformer.py +0 -38
  620. agno/embedder/voyageai.py +0 -64
  621. agno/eval/perf.py +0 -201
  622. agno/file/__init__.py +0 -1
  623. agno/file/file.py +0 -16
  624. agno/file/local/csv.py +0 -32
  625. agno/file/local/txt.py +0 -19
  626. agno/infra/app.py +0 -240
  627. agno/infra/base.py +0 -144
  628. agno/infra/context.py +0 -20
  629. agno/infra/db_app.py +0 -52
  630. agno/infra/resource.py +0 -205
  631. agno/infra/resources.py +0 -55
  632. agno/knowledge/agent.py +0 -230
  633. agno/knowledge/arxiv.py +0 -22
  634. agno/knowledge/combined.py +0 -22
  635. agno/knowledge/csv.py +0 -28
  636. agno/knowledge/csv_url.py +0 -19
  637. agno/knowledge/document.py +0 -20
  638. agno/knowledge/docx.py +0 -30
  639. agno/knowledge/json.py +0 -28
  640. agno/knowledge/langchain.py +0 -71
  641. agno/knowledge/llamaindex.py +0 -66
  642. agno/knowledge/pdf.py +0 -28
  643. agno/knowledge/pdf_url.py +0 -26
  644. agno/knowledge/s3/base.py +0 -60
  645. agno/knowledge/s3/pdf.py +0 -21
  646. agno/knowledge/s3/text.py +0 -23
  647. agno/knowledge/text.py +0 -30
  648. agno/knowledge/website.py +0 -88
  649. agno/knowledge/wikipedia.py +0 -31
  650. agno/knowledge/youtube.py +0 -22
  651. agno/memory/agent.py +0 -392
  652. agno/memory/classifier.py +0 -104
  653. agno/memory/db/__init__.py +0 -1
  654. agno/memory/db/base.py +0 -42
  655. agno/memory/db/mongodb.py +0 -189
  656. agno/memory/db/postgres.py +0 -203
  657. agno/memory/db/sqlite.py +0 -193
  658. agno/memory/memory.py +0 -15
  659. agno/memory/row.py +0 -36
  660. agno/memory/summarizer.py +0 -192
  661. agno/memory/summary.py +0 -19
  662. agno/memory/workflow.py +0 -38
  663. agno/models/google/gemini_openai.py +0 -26
  664. agno/models/ollama/hermes.py +0 -221
  665. agno/models/ollama/tools.py +0 -362
  666. agno/models/vertexai/gemini.py +0 -595
  667. agno/playground/__init__.py +0 -3
  668. agno/playground/async_router.py +0 -421
  669. agno/playground/deploy.py +0 -249
  670. agno/playground/operator.py +0 -92
  671. agno/playground/playground.py +0 -91
  672. agno/playground/schemas.py +0 -76
  673. agno/playground/serve.py +0 -55
  674. agno/playground/sync_router.py +0 -405
  675. agno/reasoning/agent.py +0 -68
  676. agno/run/response.py +0 -112
  677. agno/storage/agent/__init__.py +0 -0
  678. agno/storage/agent/base.py +0 -38
  679. agno/storage/agent/dynamodb.py +0 -350
  680. agno/storage/agent/json.py +0 -92
  681. agno/storage/agent/mongodb.py +0 -228
  682. agno/storage/agent/postgres.py +0 -367
  683. agno/storage/agent/session.py +0 -79
  684. agno/storage/agent/singlestore.py +0 -303
  685. agno/storage/agent/sqlite.py +0 -357
  686. agno/storage/agent/yaml.py +0 -93
  687. agno/storage/workflow/__init__.py +0 -0
  688. agno/storage/workflow/base.py +0 -40
  689. agno/storage/workflow/mongodb.py +0 -233
  690. agno/storage/workflow/postgres.py +0 -366
  691. agno/storage/workflow/session.py +0 -60
  692. agno/storage/workflow/sqlite.py +0 -359
  693. agno/tools/googlesearch.py +0 -88
  694. agno/utils/defaults.py +0 -57
  695. agno/utils/filesystem.py +0 -39
  696. agno/utils/git.py +0 -52
  697. agno/utils/json_io.py +0 -30
  698. agno/utils/load_env.py +0 -19
  699. agno/utils/py_io.py +0 -19
  700. agno/utils/pyproject.py +0 -18
  701. agno/utils/resource_filter.py +0 -31
  702. agno/vectordb/singlestore/s2vectordb.py +0 -390
  703. agno/vectordb/singlestore/s2vectordb2.py +0 -355
  704. agno/workspace/__init__.py +0 -0
  705. agno/workspace/config.py +0 -325
  706. agno/workspace/enums.py +0 -6
  707. agno/workspace/helpers.py +0 -48
  708. agno/workspace/operator.py +0 -758
  709. agno/workspace/settings.py +0 -63
  710. agno-0.1.2.dist-info/LICENSE +0 -375
  711. agno-0.1.2.dist-info/METADATA +0 -502
  712. agno-0.1.2.dist-info/RECORD +0 -352
  713. agno-0.1.2.dist-info/entry_points.txt +0 -3
  714. /agno/{cli → db/migrations}/__init__.py +0 -0
  715. /agno/{cli/ws → db/migrations/versions}/__init__.py +0 -0
  716. /agno/{document/chunking/__init__.py → db/schemas/metrics.py} +0 -0
  717. /agno/{document/reader/s3 → integrations}/__init__.py +0 -0
  718. /agno/{file/local → knowledge/chunking}/__init__.py +0 -0
  719. /agno/{infra → knowledge/remote_content}/__init__.py +0 -0
  720. /agno/{knowledge/s3 → tools/models}/__init__.py +0 -0
  721. /agno/{reranker → utils/models}/__init__.py +0 -0
  722. /agno/{storage → utils/print_response}/__init__.py +0 -0
  723. {agno-0.1.2.dist-info → agno-2.3.13.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,2877 @@
1
+ import json
2
+ import time
3
+ from datetime import date, datetime, timedelta, timezone
4
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
5
+ from uuid import uuid4
6
+
7
+ if TYPE_CHECKING:
8
+ from agno.tracing.schemas import Span, Trace
9
+
10
+ from agno.db.base import BaseDb, SessionType
11
+ from agno.db.migrations.manager import MigrationManager
12
+ from agno.db.schemas.culture import CulturalKnowledge
13
+ from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
14
+ from agno.db.schemas.knowledge import KnowledgeRow
15
+ from agno.db.schemas.memory import UserMemory
16
+ from agno.db.singlestore.schemas import get_table_schema_definition
17
+ from agno.db.singlestore.utils import (
18
+ apply_sorting,
19
+ bulk_upsert_metrics,
20
+ calculate_date_metrics,
21
+ create_schema,
22
+ deserialize_cultural_knowledge_from_db,
23
+ fetch_all_sessions_data,
24
+ get_dates_to_calculate_metrics_for,
25
+ is_table_available,
26
+ is_valid_table,
27
+ serialize_cultural_knowledge_for_db,
28
+ )
29
+ from agno.session import AgentSession, Session, TeamSession, WorkflowSession
30
+ from agno.utils.log import log_debug, log_error, log_info, log_warning
31
+ from agno.utils.string import generate_id
32
+
33
+ try:
34
+ from sqlalchemy import Index, UniqueConstraint, and_, func, select, update
35
+ from sqlalchemy.dialects import mysql
36
+ from sqlalchemy.engine import Engine, create_engine
37
+ from sqlalchemy.orm import scoped_session, sessionmaker
38
+ from sqlalchemy.schema import Column, MetaData, Table
39
+ from sqlalchemy.sql.expression import text
40
+ except ImportError:
41
+ raise ImportError("`sqlalchemy` not installed. Please install it using `pip install sqlalchemy`")
42
+
43
+
44
+ class SingleStoreDb(BaseDb):
45
+ def __init__(
46
+ self,
47
+ id: Optional[str] = None,
48
+ db_engine: Optional[Engine] = None,
49
+ db_schema: Optional[str] = None,
50
+ db_url: Optional[str] = None,
51
+ session_table: Optional[str] = None,
52
+ culture_table: Optional[str] = None,
53
+ memory_table: Optional[str] = None,
54
+ metrics_table: Optional[str] = None,
55
+ eval_table: Optional[str] = None,
56
+ knowledge_table: Optional[str] = None,
57
+ versions_table: Optional[str] = None,
58
+ traces_table: Optional[str] = None,
59
+ spans_table: Optional[str] = None,
60
+ create_schema: bool = True,
61
+ ):
62
+ """
63
+ Interface for interacting with a SingleStore database.
64
+
65
+ The following order is used to determine the database connection:
66
+ 1. Use the db_engine if provided
67
+ 2. Use the db_url
68
+ 3. Raise an error if neither is provided
69
+
70
+ Args:
71
+ id (Optional[str]): The ID of the database.
72
+ db_engine (Optional[Engine]): The SQLAlchemy database engine to use.
73
+ db_schema (Optional[str]): The database schema to use.
74
+ db_url (Optional[str]): The database URL to connect to.
75
+ session_table (Optional[str]): Name of the table to store Agent, Team and Workflow sessions.
76
+ culture_table (Optional[str]): Name of the table to store cultural knowledge.
77
+ memory_table (Optional[str]): Name of the table to store memories.
78
+ metrics_table (Optional[str]): Name of the table to store metrics.
79
+ eval_table (Optional[str]): Name of the table to store evaluation runs data.
80
+ knowledge_table (Optional[str]): Name of the table to store knowledge content.
81
+ versions_table (Optional[str]): Name of the table to store schema versions.
82
+ create_schema (bool): Whether to automatically create the database schema if it doesn't exist.
83
+ Set to False if schema is managed externally (e.g., via migrations). Defaults to True.
84
+ Raises:
85
+ ValueError: If neither db_url nor db_engine is provided.
86
+ ValueError: If none of the tables are provided.
87
+ """
88
+ if id is None:
89
+ base_seed = db_url or str(db_engine.url) if db_engine else "singlestore" # type: ignore
90
+ schema_suffix = db_schema if db_schema is not None else "ai"
91
+ seed = f"{base_seed}#{schema_suffix}"
92
+ id = generate_id(seed)
93
+
94
+ super().__init__(
95
+ id=id,
96
+ session_table=session_table,
97
+ culture_table=culture_table,
98
+ memory_table=memory_table,
99
+ metrics_table=metrics_table,
100
+ eval_table=eval_table,
101
+ knowledge_table=knowledge_table,
102
+ versions_table=versions_table,
103
+ traces_table=traces_table,
104
+ spans_table=spans_table,
105
+ )
106
+
107
+ _engine: Optional[Engine] = db_engine
108
+ if _engine is None and db_url is not None:
109
+ _engine = create_engine(
110
+ db_url,
111
+ connect_args={
112
+ "charset": "utf8mb4",
113
+ "ssl": {"ssl_disabled": False, "ssl_ca": None, "ssl_check_hostname": False},
114
+ },
115
+ )
116
+ if _engine is None:
117
+ raise ValueError("One of db_url or db_engine must be provided")
118
+
119
+ self.db_url: Optional[str] = db_url
120
+ self.db_engine: Engine = _engine
121
+ self.db_schema: Optional[str] = db_schema
122
+ self.metadata: MetaData = MetaData(schema=self.db_schema)
123
+ self.create_schema: bool = create_schema
124
+
125
+ # Initialize database session
126
+ self.Session: scoped_session = scoped_session(sessionmaker(bind=self.db_engine))
127
+
128
+ # -- DB methods --
129
+ def table_exists(self, table_name: str) -> bool:
130
+ """Check if a table with the given name exists in the SingleStore database.
131
+
132
+ Args:
133
+ table_name: Name of the table to check
134
+
135
+ Returns:
136
+ bool: True if the table exists in the database, False otherwise
137
+ """
138
+ with self.Session() as sess:
139
+ return is_table_available(session=sess, table_name=table_name, db_schema=self.db_schema)
140
+
141
+ def _create_table_structure_only(self, table_name: str, table_type: str) -> Table:
142
+ """
143
+ Create a table structure definition without actually creating the table in the database.
144
+ Used to avoid autoload issues with SingleStore JSON types.
145
+
146
+ Args:
147
+ table_name (str): Name of the table
148
+ table_type (str): Type of table (used to get schema definition)
149
+
150
+ Returns:
151
+ Table: SQLAlchemy Table object with column definitions
152
+ """
153
+ try:
154
+ table_schema = get_table_schema_definition(table_type)
155
+
156
+ columns: List[Column] = []
157
+ # Get the columns from the table schema
158
+ for col_name, col_config in table_schema.items():
159
+ # Skip constraint definitions
160
+ if col_name.startswith("_"):
161
+ continue
162
+
163
+ column_args = [col_name, col_config["type"]()]
164
+ column_kwargs: Dict[str, Any] = {}
165
+ if col_config.get("primary_key", False):
166
+ column_kwargs["primary_key"] = True
167
+ if "nullable" in col_config:
168
+ column_kwargs["nullable"] = col_config["nullable"]
169
+ if col_config.get("unique", False):
170
+ column_kwargs["unique"] = True
171
+ columns.append(Column(*column_args, **column_kwargs))
172
+
173
+ # Create the table object without constraints to avoid autoload issues
174
+ table = Table(table_name, self.metadata, *columns, schema=self.db_schema)
175
+
176
+ return table
177
+
178
+ except Exception as e:
179
+ table_ref = f"{self.db_schema}.{table_name}" if self.db_schema else table_name
180
+ log_error(f"Could not create table structure for {table_ref}: {e}")
181
+ raise
182
+
183
+ def _create_all_tables(self):
184
+ """Create all tables for the database."""
185
+ tables_to_create = [
186
+ (self.session_table_name, "sessions"),
187
+ (self.memory_table_name, "memories"),
188
+ (self.metrics_table_name, "metrics"),
189
+ (self.eval_table_name, "evals"),
190
+ (self.knowledge_table_name, "knowledge"),
191
+ (self.versions_table_name, "versions"),
192
+ ]
193
+
194
+ for table_name, table_type in tables_to_create:
195
+ self._get_or_create_table(table_name=table_name, table_type=table_type, create_table_if_not_found=True)
196
+
197
+ def _create_table(self, table_name: str, table_type: str) -> Table:
198
+ """
199
+ Create a table with the appropriate schema based on the table type.
200
+
201
+ Args:
202
+ table_name (str): Name of the table to create
203
+ table_type (str): Type of table (used to get schema definition)
204
+
205
+ Returns:
206
+ Table: SQLAlchemy Table object
207
+ """
208
+ table_ref = f"{self.db_schema}.{table_name}" if self.db_schema else table_name
209
+ try:
210
+ table_schema = get_table_schema_definition(table_type)
211
+
212
+ columns: List[Column] = []
213
+ indexes: List[str] = []
214
+ unique_constraints: List[str] = []
215
+ schema_unique_constraints = table_schema.pop("_unique_constraints", [])
216
+
217
+ # Get the columns, indexes, and unique constraints from the table schema
218
+ for col_name, col_config in table_schema.items():
219
+ column_args = [col_name, col_config["type"]()]
220
+ column_kwargs: Dict[str, Any] = {}
221
+ if col_config.get("primary_key", False):
222
+ column_kwargs["primary_key"] = True
223
+ if "nullable" in col_config:
224
+ column_kwargs["nullable"] = col_config["nullable"]
225
+ if col_config.get("index", False):
226
+ indexes.append(col_name)
227
+ if col_config.get("unique", False):
228
+ column_kwargs["unique"] = True
229
+ unique_constraints.append(col_name)
230
+ columns.append(Column(*column_args, **column_kwargs))
231
+
232
+ # Create the table object
233
+ table = Table(table_name, self.metadata, *columns, schema=self.db_schema)
234
+
235
+ # Add multi-column unique constraints with table-specific names
236
+ for constraint in schema_unique_constraints:
237
+ constraint_name = f"{table_name}_{constraint['name']}"
238
+ constraint_columns = constraint["columns"]
239
+ table.append_constraint(UniqueConstraint(*constraint_columns, name=constraint_name))
240
+
241
+ # Add indexes to the table definition
242
+ for idx_col in indexes:
243
+ idx_name = f"idx_{table_name}_{idx_col}"
244
+ table.append_constraint(Index(idx_name, idx_col))
245
+
246
+ # Create schema if one is specified
247
+ if self.create_schema and self.db_schema is not None:
248
+ with self.Session() as sess, sess.begin():
249
+ create_schema(session=sess, db_schema=self.db_schema)
250
+
251
+ # SingleStore has a limitation on the number of unique multi-field constraints per table.
252
+ # We need to work around that limitation for the sessions table.
253
+ table_created = False
254
+ if not self.table_exists(table_name):
255
+ if table_type == "sessions":
256
+ with self.Session() as sess, sess.begin():
257
+ # Build column definitions
258
+ columns_sql = []
259
+ for col in table.columns:
260
+ col_sql = f"{col.name} {col.type.compile(self.db_engine.dialect)}"
261
+ if not col.nullable:
262
+ col_sql += " NOT NULL"
263
+ columns_sql.append(col_sql)
264
+
265
+ columns_def = ", ".join(columns_sql)
266
+
267
+ # Add shard key and single unique constraint
268
+ table_sql = f"""CREATE TABLE IF NOT EXISTS {table_ref} (
269
+ {columns_def},
270
+ SHARD KEY (session_id),
271
+ UNIQUE KEY uq_session_type (session_id, session_type)
272
+ )"""
273
+
274
+ sess.execute(text(table_sql))
275
+ else:
276
+ table.create(self.db_engine, checkfirst=True)
277
+ log_debug(f"Successfully created table '{table_ref}'")
278
+ table_created = True
279
+ else:
280
+ log_debug(f"Table '{table_ref}' already exists, skipping creation")
281
+
282
+ # Create indexes
283
+ for idx in table.indexes:
284
+ try:
285
+ # Check if index already exists
286
+ with self.Session() as sess:
287
+ if self.db_schema is not None:
288
+ exists_query = text(
289
+ "SELECT 1 FROM information_schema.statistics WHERE table_schema = :schema AND index_name = :index_name"
290
+ )
291
+ exists = (
292
+ sess.execute(exists_query, {"schema": self.db_schema, "index_name": idx.name}).scalar()
293
+ is not None
294
+ )
295
+ else:
296
+ exists_query = text(
297
+ "SELECT 1 FROM information_schema.statistics WHERE table_schema = DATABASE() AND index_name = :index_name"
298
+ )
299
+ exists = sess.execute(exists_query, {"index_name": idx.name}).scalar() is not None
300
+ if exists:
301
+ log_debug(f"Index {idx.name} already exists in {table_ref}, skipping creation")
302
+ continue
303
+
304
+ idx.create(self.db_engine)
305
+
306
+ log_debug(f"Created index: {idx.name} for table {table_ref}")
307
+ except Exception as e:
308
+ log_error(f"Error creating index {idx.name}: {e}")
309
+
310
+ # Store the schema version for the created table
311
+ if table_name != self.versions_table_name and table_created:
312
+ latest_schema_version = MigrationManager(self).latest_schema_version
313
+ self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
314
+
315
+ return table
316
+
317
+ except Exception as e:
318
+ log_error(f"Could not create table {table_ref}: {e}")
319
+ raise
320
+
321
+ def _get_table(self, table_type: str, create_table_if_not_found: Optional[bool] = False) -> Optional[Table]:
322
+ if table_type == "sessions":
323
+ self.session_table = self._get_or_create_table(
324
+ table_name=self.session_table_name,
325
+ table_type="sessions",
326
+ create_table_if_not_found=create_table_if_not_found,
327
+ )
328
+ return self.session_table
329
+
330
+ if table_type == "memories":
331
+ self.memory_table = self._get_or_create_table(
332
+ table_name=self.memory_table_name,
333
+ table_type="memories",
334
+ create_table_if_not_found=create_table_if_not_found,
335
+ )
336
+ return self.memory_table
337
+
338
+ if table_type == "metrics":
339
+ self.metrics_table = self._get_or_create_table(
340
+ table_name=self.metrics_table_name,
341
+ table_type="metrics",
342
+ create_table_if_not_found=create_table_if_not_found,
343
+ )
344
+ return self.metrics_table
345
+
346
+ if table_type == "evals":
347
+ self.eval_table = self._get_or_create_table(
348
+ table_name=self.eval_table_name,
349
+ table_type="evals",
350
+ create_table_if_not_found=create_table_if_not_found,
351
+ )
352
+ return self.eval_table
353
+
354
+ if table_type == "knowledge":
355
+ self.knowledge_table = self._get_or_create_table(
356
+ table_name=self.knowledge_table_name,
357
+ table_type="knowledge",
358
+ create_table_if_not_found=create_table_if_not_found,
359
+ )
360
+ return self.knowledge_table
361
+
362
+ if table_type == "culture":
363
+ self.culture_table = self._get_or_create_table(
364
+ table_name=self.culture_table_name,
365
+ table_type="culture",
366
+ create_table_if_not_found=create_table_if_not_found,
367
+ )
368
+ return self.culture_table
369
+
370
+ if table_type == "versions":
371
+ self.versions_table = self._get_or_create_table(
372
+ table_name=self.versions_table_name,
373
+ table_type="versions",
374
+ create_table_if_not_found=create_table_if_not_found,
375
+ )
376
+ return self.versions_table
377
+
378
+ if table_type == "traces":
379
+ self.traces_table = self._get_or_create_table(
380
+ table_name=self.trace_table_name,
381
+ table_type="traces",
382
+ create_table_if_not_found=create_table_if_not_found,
383
+ )
384
+ return self.traces_table
385
+
386
+ if table_type == "spans":
387
+ # Ensure traces table exists first (for foreign key)
388
+ self._get_table(table_type="traces", create_table_if_not_found=create_table_if_not_found)
389
+ self.spans_table = self._get_or_create_table(
390
+ table_name=self.span_table_name,
391
+ table_type="spans",
392
+ create_table_if_not_found=create_table_if_not_found,
393
+ )
394
+ return self.spans_table
395
+
396
+ raise ValueError(f"Unknown table type: {table_type}")
397
+
398
+ def _get_or_create_table(
399
+ self,
400
+ table_name: str,
401
+ table_type: str,
402
+ create_table_if_not_found: Optional[bool] = False,
403
+ ) -> Optional[Table]:
404
+ """
405
+ Check if the table exists and is valid, else create it.
406
+
407
+ Args:
408
+ table_name (str): Name of the table to get or create
409
+ table_type (str): Type of table (used to get schema definition)
410
+
411
+ Returns:
412
+ Table: SQLAlchemy Table object representing the schema.
413
+ """
414
+
415
+ with self.Session() as sess, sess.begin():
416
+ table_is_available = is_table_available(session=sess, table_name=table_name, db_schema=self.db_schema)
417
+
418
+ if not table_is_available:
419
+ if not create_table_if_not_found:
420
+ return None
421
+
422
+ # Also store the schema version for the created table
423
+ if table_name != self.versions_table_name:
424
+ latest_schema_version = MigrationManager(self).latest_schema_version
425
+ self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
426
+
427
+ return self._create_table(table_name=table_name, table_type=table_type)
428
+
429
+ if not is_valid_table(
430
+ db_engine=self.db_engine,
431
+ table_name=table_name,
432
+ table_type=table_type,
433
+ db_schema=self.db_schema,
434
+ ):
435
+ table_ref = f"{self.db_schema}.{table_name}" if self.db_schema else table_name
436
+ raise ValueError(f"Table {table_ref} has an invalid schema")
437
+
438
+ try:
439
+ return self._create_table_structure_only(table_name=table_name, table_type=table_type)
440
+
441
+ except Exception as e:
442
+ table_ref = f"{self.db_schema}.{table_name}" if self.db_schema else table_name
443
+ log_error(f"Error loading existing table {table_ref}: {e}")
444
+ raise
445
+
446
+ def get_latest_schema_version(self, table_name: str) -> str:
447
+ """Get the latest version of the database schema."""
448
+ table = self._get_table(table_type="versions", create_table_if_not_found=True)
449
+ if table is None:
450
+ return "2.0.0"
451
+ with self.Session() as sess:
452
+ stmt = select(table)
453
+ # Latest version for the given table
454
+ stmt = stmt.where(table.c.table_name == table_name)
455
+ stmt = stmt.order_by(table.c.version.desc()).limit(1)
456
+ result = sess.execute(stmt).fetchone()
457
+ if result is None:
458
+ return "2.0.0"
459
+ version_dict = dict(result._mapping)
460
+ return version_dict.get("version") or "2.0.0"
461
+
462
+ def upsert_schema_version(self, table_name: str, version: str) -> None:
463
+ """Upsert the schema version into the database."""
464
+ table = self._get_table(table_type="versions", create_table_if_not_found=True)
465
+ if table is None:
466
+ return
467
+ current_datetime = datetime.now().isoformat()
468
+ with self.Session() as sess, sess.begin():
469
+ stmt = mysql.insert(table).values(
470
+ table_name=table_name,
471
+ version=version,
472
+ created_at=current_datetime, # Store as ISO format string
473
+ updated_at=current_datetime,
474
+ )
475
+ # Update version if table_name already exists
476
+ stmt = stmt.on_duplicate_key_update(
477
+ version=version,
478
+ updated_at=current_datetime,
479
+ )
480
+ sess.execute(stmt)
481
+
482
+ # -- Session methods --
483
+ def delete_session(self, session_id: str) -> bool:
484
+ """
485
+ Delete a session from the database.
486
+
487
+ Args:
488
+ session_id (str): ID of the session to delete
489
+
490
+ Returns:
491
+ bool: True if the session was deleted, False otherwise.
492
+
493
+ Raises:
494
+ Exception: If an error occurs during deletion.
495
+ """
496
+ try:
497
+ table = self._get_table(table_type="sessions")
498
+ if table is None:
499
+ return False
500
+
501
+ with self.Session() as sess, sess.begin():
502
+ delete_stmt = table.delete().where(table.c.session_id == session_id)
503
+ result = sess.execute(delete_stmt)
504
+ if result.rowcount == 0:
505
+ log_debug(f"No session found to delete with session_id: {session_id} in table {table.name}")
506
+ return False
507
+ else:
508
+ log_debug(f"Successfully deleted session with session_id: {session_id} in table {table.name}")
509
+ return True
510
+
511
+ except Exception as e:
512
+ log_error(f"Error deleting session: {e}")
513
+ raise e
514
+
515
+ def delete_sessions(self, session_ids: List[str]) -> None:
516
+ """Delete all given sessions from the database.
517
+ Can handle multiple session types in the same run.
518
+
519
+ Args:
520
+ session_ids (List[str]): The IDs of the sessions to delete.
521
+
522
+ Raises:
523
+ Exception: If an error occurs during deletion.
524
+ """
525
+ try:
526
+ table = self._get_table(table_type="sessions")
527
+ if table is None:
528
+ return
529
+
530
+ with self.Session() as sess, sess.begin():
531
+ delete_stmt = table.delete().where(table.c.session_id.in_(session_ids))
532
+ result = sess.execute(delete_stmt)
533
+
534
+ log_debug(f"Successfully deleted {result.rowcount} sessions")
535
+
536
+ except Exception as e:
537
+ log_error(f"Error deleting sessions: {e}")
538
+ raise e
539
+
540
+ def get_session(
541
+ self,
542
+ session_id: str,
543
+ session_type: SessionType,
544
+ user_id: Optional[str] = None,
545
+ deserialize: Optional[bool] = True,
546
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
547
+ """
548
+ Read a session from the database.
549
+
550
+ Args:
551
+ session_id (str): ID of the session to read.
552
+ session_type (SessionType): Type of session to get.
553
+ user_id (Optional[str]): User ID to filter by. Defaults to None.
554
+ deserialize (Optional[bool]): Whether to serialize the session. Defaults to True.
555
+
556
+ Returns:
557
+ Union[Session, Dict[str, Any], None]:
558
+ - When deserialize=True: Session object
559
+ - When deserialize=False: Session dictionary
560
+
561
+ Raises:
562
+ Exception: If an error occurs during retrieval.
563
+ """
564
+ try:
565
+ table = self._get_table(table_type="sessions")
566
+ if table is None:
567
+ return None
568
+
569
+ with self.Session() as sess:
570
+ stmt = select(table).where(table.c.session_id == session_id)
571
+
572
+ if user_id is not None:
573
+ stmt = stmt.where(table.c.user_id == user_id)
574
+ result = sess.execute(stmt).fetchone()
575
+ if result is None:
576
+ return None
577
+
578
+ session = dict(result._mapping)
579
+
580
+ if not deserialize:
581
+ return session
582
+
583
+ if session_type == SessionType.AGENT:
584
+ return AgentSession.from_dict(session)
585
+ elif session_type == SessionType.TEAM:
586
+ return TeamSession.from_dict(session)
587
+ elif session_type == SessionType.WORKFLOW:
588
+ return WorkflowSession.from_dict(session)
589
+ else:
590
+ raise ValueError(f"Invalid session type: {session_type}")
591
+
592
+ except Exception as e:
593
+ log_error(f"Exception reading from session table: {e}")
594
+ raise e
595
+
596
+ def get_sessions(
597
+ self,
598
+ session_type: Optional[SessionType] = None,
599
+ user_id: Optional[str] = None,
600
+ component_id: Optional[str] = None,
601
+ session_name: Optional[str] = None,
602
+ start_timestamp: Optional[int] = None,
603
+ end_timestamp: Optional[int] = None,
604
+ limit: Optional[int] = None,
605
+ page: Optional[int] = None,
606
+ sort_by: Optional[str] = None,
607
+ sort_order: Optional[str] = None,
608
+ deserialize: Optional[bool] = True,
609
+ ) -> Union[List[Session], Tuple[List[Dict[str, Any]], int]]:
610
+ """
611
+ Get all sessions in the given table. Can filter by user_id and entity_id.
612
+
613
+ Args:
614
+ session_type (Optional[SessionType]): The type of session to filter by.
615
+ user_id (Optional[str]): The ID of the user to filter by.
616
+ component_id (Optional[str]): The ID of the agent / workflow to filter by.
617
+ session_name (Optional[str]): The name of the session to filter by.
618
+ start_timestamp (Optional[int]): The start timestamp to filter by.
619
+ end_timestamp (Optional[int]): The end timestamp to filter by.
620
+ limit (Optional[int]): The maximum number of sessions to return. Defaults to None.
621
+ page (Optional[int]): The page number to return. Defaults to None.
622
+ sort_by (Optional[str]): The field to sort by. Defaults to None.
623
+ sort_order (Optional[str]): The sort order. Defaults to None.
624
+ deserialize (Optional[bool]): Whether to serialize the sessions. Defaults to True.
625
+ create_table_if_not_found (Optional[bool]): Whether to create the table if it doesn't exist.
626
+
627
+ Returns:
628
+ Union[List[Session], Tuple[List[Dict], int]]:
629
+ - When deserialize=True: List of Session objects
630
+ - When deserialize=False: Tuple of (session dictionaries, total count)
631
+
632
+ Raises:
633
+ Exception: If an error occurs during retrieval.
634
+ """
635
+ try:
636
+ table = self._get_table(table_type="sessions")
637
+ if table is None:
638
+ return [] if deserialize else ([], 0)
639
+
640
+ with self.Session() as sess, sess.begin():
641
+ stmt = select(table)
642
+
643
+ # Filtering
644
+ if user_id is not None:
645
+ stmt = stmt.where(table.c.user_id == user_id)
646
+ if component_id is not None:
647
+ if session_type == SessionType.AGENT:
648
+ stmt = stmt.where(table.c.agent_id == component_id)
649
+ elif session_type == SessionType.TEAM:
650
+ stmt = stmt.where(table.c.team_id == component_id)
651
+ elif session_type == SessionType.WORKFLOW:
652
+ stmt = stmt.where(table.c.workflow_id == component_id)
653
+ if start_timestamp is not None:
654
+ stmt = stmt.where(table.c.created_at >= start_timestamp)
655
+ if end_timestamp is not None:
656
+ stmt = stmt.where(table.c.created_at <= end_timestamp)
657
+ if session_name is not None:
658
+ # SingleStore JSON extraction syntax
659
+ stmt = stmt.where(
660
+ func.coalesce(func.JSON_EXTRACT_STRING(table.c.session_data, "session_name"), "").like(
661
+ f"%{session_name}%"
662
+ )
663
+ )
664
+ if session_type is not None:
665
+ session_type_value = session_type.value if isinstance(session_type, SessionType) else session_type
666
+ stmt = stmt.where(table.c.session_type == session_type_value)
667
+
668
+ count_stmt = select(func.count()).select_from(stmt.alias())
669
+ total_count = sess.execute(count_stmt).scalar()
670
+
671
+ # Sorting
672
+ stmt = apply_sorting(stmt, table, sort_by, sort_order)
673
+
674
+ # Paginating
675
+ if limit is not None:
676
+ stmt = stmt.limit(limit)
677
+ if page is not None:
678
+ stmt = stmt.offset((page - 1) * limit)
679
+
680
+ records = sess.execute(stmt).fetchall()
681
+ if records is None:
682
+ return [] if deserialize else ([], 0)
683
+
684
+ session = [dict(record._mapping) for record in records]
685
+ if not deserialize:
686
+ return session, total_count
687
+
688
+ if session_type == SessionType.AGENT:
689
+ return [AgentSession.from_dict(record) for record in session] # type: ignore
690
+ elif session_type == SessionType.TEAM:
691
+ return [TeamSession.from_dict(record) for record in session] # type: ignore
692
+ elif session_type == SessionType.WORKFLOW:
693
+ return [WorkflowSession.from_dict(record) for record in session] # type: ignore
694
+ else:
695
+ raise ValueError(f"Invalid session type: {session_type}")
696
+
697
+ except Exception as e:
698
+ log_error(f"Exception reading from session table: {e}")
699
+ raise e
700
+
701
+ def rename_session(
702
+ self, session_id: str, session_type: SessionType, session_name: str, deserialize: Optional[bool] = True
703
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
704
+ """
705
+ Rename a session in the database.
706
+
707
+ Args:
708
+ session_id (str): The ID of the session to rename.
709
+ session_type (SessionType): The type of session to rename.
710
+ session_name (str): The new name for the session.
711
+ deserialize (Optional[bool]): Whether to serialize the session. Defaults to True.
712
+
713
+ Returns:
714
+ Optional[Union[Session, Dict[str, Any]]]:
715
+ - When deserialize=True: Session object
716
+ - When deserialize=False: Session dictionary
717
+
718
+ Raises:
719
+ Exception: If an error occurs during renaming.
720
+ """
721
+ try:
722
+ table = self._get_table(table_type="sessions")
723
+ if table is None:
724
+ return None
725
+
726
+ with self.Session() as sess, sess.begin():
727
+ stmt = (
728
+ update(table)
729
+ .where(table.c.session_id == session_id)
730
+ .where(table.c.session_type == session_type.value)
731
+ .values(session_data=func.JSON_SET_STRING(table.c.session_data, "session_name", session_name))
732
+ )
733
+ result = sess.execute(stmt)
734
+ if result.rowcount == 0:
735
+ return None
736
+
737
+ # Fetch the updated record
738
+ select_stmt = select(table).where(table.c.session_id == session_id)
739
+ row = sess.execute(select_stmt).fetchone()
740
+ if not row:
741
+ return None
742
+
743
+ session = dict(row._mapping)
744
+
745
+ log_debug(f"Renamed session with id '{session_id}' to '{session_name}'")
746
+
747
+ if not deserialize:
748
+ return session
749
+
750
+ if session_type == SessionType.AGENT:
751
+ return AgentSession.from_dict(session)
752
+ elif session_type == SessionType.TEAM:
753
+ return TeamSession.from_dict(session)
754
+ elif session_type == SessionType.WORKFLOW:
755
+ return WorkflowSession.from_dict(session)
756
+ else:
757
+ raise ValueError(f"Invalid session type: {session_type}")
758
+
759
+ except Exception as e:
760
+ log_error(f"Error renaming session: {e}")
761
+ raise e
762
+
763
+ def upsert_session(self, session: Session, deserialize: Optional[bool] = True) -> Optional[Session]:
764
+ """
765
+ Insert or update a session in the database.
766
+
767
+ Args:
768
+ session (Session): The session data to upsert.
769
+ deserialize (Optional[bool]): Whether to deserialize the session. Defaults to True.
770
+
771
+ Returns:
772
+ Optional[Union[Session, Dict[str, Any]]]:
773
+ - When deserialize=True: Session object
774
+ - When deserialize=False: Session dictionary
775
+
776
+ Raises:
777
+ Exception: If an error occurs during upsert.
778
+ """
779
+ try:
780
+ table = self._get_table(table_type="sessions", create_table_if_not_found=True)
781
+ if table is None:
782
+ return None
783
+
784
+ session_dict = session.to_dict()
785
+
786
+ if isinstance(session, AgentSession):
787
+ with self.Session() as sess, sess.begin():
788
+ stmt = mysql.insert(table).values(
789
+ session_id=session_dict.get("session_id"),
790
+ session_type=SessionType.AGENT.value,
791
+ agent_id=session_dict.get("agent_id"),
792
+ user_id=session_dict.get("user_id"),
793
+ runs=session_dict.get("runs"),
794
+ agent_data=session_dict.get("agent_data"),
795
+ session_data=session_dict.get("session_data"),
796
+ summary=session_dict.get("summary"),
797
+ metadata=session_dict.get("metadata"),
798
+ created_at=session_dict.get("created_at"),
799
+ updated_at=session_dict.get("created_at"),
800
+ )
801
+ stmt = stmt.on_duplicate_key_update(
802
+ agent_id=stmt.inserted.agent_id,
803
+ user_id=stmt.inserted.user_id,
804
+ agent_data=stmt.inserted.agent_data,
805
+ session_data=stmt.inserted.session_data,
806
+ summary=stmt.inserted.summary,
807
+ metadata=stmt.inserted.metadata,
808
+ runs=stmt.inserted.runs,
809
+ updated_at=int(time.time()),
810
+ )
811
+ sess.execute(stmt)
812
+
813
+ # Fetch the result
814
+ select_stmt = select(table).where(
815
+ (table.c.session_id == session_dict.get("session_id"))
816
+ & (table.c.agent_id == session_dict.get("agent_id"))
817
+ )
818
+ row = sess.execute(select_stmt).fetchone()
819
+ if row is None:
820
+ return None
821
+
822
+ if not deserialize:
823
+ return row._mapping
824
+
825
+ return AgentSession.from_dict(row._mapping)
826
+
827
+ elif isinstance(session, TeamSession):
828
+ with self.Session() as sess, sess.begin():
829
+ stmt = mysql.insert(table).values(
830
+ session_id=session_dict.get("session_id"),
831
+ session_type=SessionType.TEAM.value,
832
+ team_id=session_dict.get("team_id"),
833
+ user_id=session_dict.get("user_id"),
834
+ runs=session_dict.get("runs"),
835
+ team_data=session_dict.get("team_data"),
836
+ session_data=session_dict.get("session_data"),
837
+ summary=session_dict.get("summary"),
838
+ metadata=session_dict.get("metadata"),
839
+ created_at=session_dict.get("created_at"),
840
+ updated_at=session_dict.get("created_at"),
841
+ )
842
+ stmt = stmt.on_duplicate_key_update(
843
+ team_id=stmt.inserted.team_id,
844
+ user_id=stmt.inserted.user_id,
845
+ team_data=stmt.inserted.team_data,
846
+ session_data=stmt.inserted.session_data,
847
+ summary=stmt.inserted.summary,
848
+ metadata=stmt.inserted.metadata,
849
+ runs=stmt.inserted.runs,
850
+ updated_at=int(time.time()),
851
+ )
852
+ sess.execute(stmt)
853
+
854
+ # Fetch the result
855
+ select_stmt = select(table).where(
856
+ (table.c.session_id == session_dict.get("session_id"))
857
+ & (table.c.team_id == session_dict.get("team_id"))
858
+ )
859
+ row = sess.execute(select_stmt).fetchone()
860
+ if row is None:
861
+ return None
862
+
863
+ if not deserialize:
864
+ return row._mapping
865
+
866
+ return TeamSession.from_dict(row._mapping)
867
+
868
+ else:
869
+ with self.Session() as sess, sess.begin():
870
+ stmt = mysql.insert(table).values(
871
+ session_id=session_dict.get("session_id"),
872
+ session_type=SessionType.WORKFLOW.value,
873
+ workflow_id=session_dict.get("workflow_id"),
874
+ user_id=session_dict.get("user_id"),
875
+ runs=session_dict.get("runs"),
876
+ workflow_data=session_dict.get("workflow_data"),
877
+ session_data=session_dict.get("session_data"),
878
+ summary=session_dict.get("summary"),
879
+ metadata=session_dict.get("metadata"),
880
+ created_at=session_dict.get("created_at"),
881
+ updated_at=session_dict.get("created_at"),
882
+ )
883
+ stmt = stmt.on_duplicate_key_update(
884
+ workflow_id=stmt.inserted.workflow_id,
885
+ user_id=stmt.inserted.user_id,
886
+ workflow_data=stmt.inserted.workflow_data,
887
+ session_data=stmt.inserted.session_data,
888
+ summary=stmt.inserted.summary,
889
+ metadata=stmt.inserted.metadata,
890
+ runs=stmt.inserted.runs,
891
+ updated_at=int(time.time()),
892
+ )
893
+ sess.execute(stmt)
894
+
895
+ # Fetch the result
896
+ select_stmt = select(table).where(
897
+ (table.c.session_id == session_dict.get("session_id"))
898
+ & (table.c.workflow_id == session_dict.get("workflow_id"))
899
+ )
900
+ row = sess.execute(select_stmt).fetchone()
901
+ if row is None:
902
+ return None
903
+
904
+ if not deserialize:
905
+ return row._mapping
906
+
907
+ return WorkflowSession.from_dict(row._mapping)
908
+
909
+ except Exception as e:
910
+ log_error(f"Error upserting into sessions table: {e}")
911
+ raise e
912
+
913
+ def upsert_sessions(
914
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
915
+ ) -> List[Union[Session, Dict[str, Any]]]:
916
+ """
917
+ Bulk upsert multiple sessions for improved performance on large datasets.
918
+
919
+ Args:
920
+ sessions (List[Session]): List of sessions to upsert.
921
+ deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
922
+
923
+ Returns:
924
+ List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
925
+
926
+ Raises:
927
+ Exception: If an error occurs during bulk upsert.
928
+ """
929
+ if not sessions:
930
+ return []
931
+
932
+ try:
933
+ table = self._get_table(table_type="sessions", create_table_if_not_found=True)
934
+ if table is None:
935
+ return []
936
+
937
+ # Group sessions by type for batch processing
938
+ agent_sessions = []
939
+ team_sessions = []
940
+ workflow_sessions = []
941
+
942
+ for session in sessions:
943
+ if isinstance(session, AgentSession):
944
+ agent_sessions.append(session)
945
+ elif isinstance(session, TeamSession):
946
+ team_sessions.append(session)
947
+ elif isinstance(session, WorkflowSession):
948
+ workflow_sessions.append(session)
949
+
950
+ results: List[Union[Session, Dict[str, Any]]] = []
951
+
952
+ with self.Session() as sess, sess.begin():
953
+ # Bulk upsert agent sessions
954
+ if agent_sessions:
955
+ agent_data = []
956
+ for session in agent_sessions:
957
+ session_dict = session.to_dict()
958
+ # Use preserved updated_at if flag is set, otherwise use current time
959
+ updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
960
+ agent_data.append(
961
+ {
962
+ "session_id": session_dict.get("session_id"),
963
+ "session_type": SessionType.AGENT.value,
964
+ "agent_id": session_dict.get("agent_id"),
965
+ "user_id": session_dict.get("user_id"),
966
+ "runs": session_dict.get("runs"),
967
+ "agent_data": session_dict.get("agent_data"),
968
+ "session_data": session_dict.get("session_data"),
969
+ "summary": session_dict.get("summary"),
970
+ "metadata": session_dict.get("metadata"),
971
+ "created_at": session_dict.get("created_at"),
972
+ "updated_at": updated_at,
973
+ }
974
+ )
975
+
976
+ if agent_data:
977
+ stmt = mysql.insert(table)
978
+ stmt = stmt.on_duplicate_key_update(
979
+ agent_id=stmt.inserted.agent_id,
980
+ user_id=stmt.inserted.user_id,
981
+ agent_data=stmt.inserted.agent_data,
982
+ session_data=stmt.inserted.session_data,
983
+ summary=stmt.inserted.summary,
984
+ metadata=stmt.inserted.metadata,
985
+ runs=stmt.inserted.runs,
986
+ updated_at=stmt.inserted.updated_at,
987
+ )
988
+ sess.execute(stmt, agent_data)
989
+
990
+ # Fetch the results for agent sessions
991
+ agent_ids = [session.session_id for session in agent_sessions]
992
+ select_stmt = select(table).where(table.c.session_id.in_(agent_ids))
993
+ result = sess.execute(select_stmt).fetchall()
994
+
995
+ for row in result:
996
+ if deserialize:
997
+ deserialized_session = AgentSession.from_dict(session_dict)
998
+ if deserialized_session is None:
999
+ continue
1000
+ results.append(deserialized_session)
1001
+ else:
1002
+ results.append(dict(row._mapping))
1003
+
1004
+ # Bulk upsert team sessions
1005
+ if team_sessions:
1006
+ team_data = []
1007
+ for session in team_sessions:
1008
+ session_dict = session.to_dict()
1009
+ # Use preserved updated_at if flag is set, otherwise use current time
1010
+ updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
1011
+ team_data.append(
1012
+ {
1013
+ "session_id": session_dict.get("session_id"),
1014
+ "session_type": SessionType.TEAM.value,
1015
+ "team_id": session_dict.get("team_id"),
1016
+ "user_id": session_dict.get("user_id"),
1017
+ "runs": session_dict.get("runs"),
1018
+ "team_data": session_dict.get("team_data"),
1019
+ "session_data": session_dict.get("session_data"),
1020
+ "summary": session_dict.get("summary"),
1021
+ "metadata": session_dict.get("metadata"),
1022
+ "created_at": session_dict.get("created_at"),
1023
+ "updated_at": updated_at,
1024
+ }
1025
+ )
1026
+
1027
+ if team_data:
1028
+ stmt = mysql.insert(table)
1029
+ stmt = stmt.on_duplicate_key_update(
1030
+ team_id=stmt.inserted.team_id,
1031
+ user_id=stmt.inserted.user_id,
1032
+ team_data=stmt.inserted.team_data,
1033
+ session_data=stmt.inserted.session_data,
1034
+ summary=stmt.inserted.summary,
1035
+ metadata=stmt.inserted.metadata,
1036
+ runs=stmt.inserted.runs,
1037
+ updated_at=stmt.inserted.updated_at,
1038
+ )
1039
+ sess.execute(stmt, team_data)
1040
+
1041
+ # Fetch the results for team sessions
1042
+ team_ids = [session.session_id for session in team_sessions]
1043
+ select_stmt = select(table).where(table.c.session_id.in_(team_ids))
1044
+ result = sess.execute(select_stmt).fetchall()
1045
+
1046
+ for row in result:
1047
+ if deserialize:
1048
+ deserialized_team_session = TeamSession.from_dict(session_dict)
1049
+ if deserialized_team_session is None:
1050
+ continue
1051
+ results.append(deserialized_team_session)
1052
+ else:
1053
+ results.append(dict(row._mapping))
1054
+
1055
+ # Bulk upsert workflow sessions
1056
+ if workflow_sessions:
1057
+ workflow_data = []
1058
+ for session in workflow_sessions:
1059
+ session_dict = session.to_dict()
1060
+ # Use preserved updated_at if flag is set, otherwise use current time
1061
+ updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
1062
+ workflow_data.append(
1063
+ {
1064
+ "session_id": session_dict.get("session_id"),
1065
+ "session_type": SessionType.WORKFLOW.value,
1066
+ "workflow_id": session_dict.get("workflow_id"),
1067
+ "user_id": session_dict.get("user_id"),
1068
+ "runs": session_dict.get("runs"),
1069
+ "workflow_data": session_dict.get("workflow_data"),
1070
+ "session_data": session_dict.get("session_data"),
1071
+ "summary": session_dict.get("summary"),
1072
+ "metadata": session_dict.get("metadata"),
1073
+ "created_at": session_dict.get("created_at"),
1074
+ "updated_at": updated_at,
1075
+ }
1076
+ )
1077
+
1078
+ if workflow_data:
1079
+ stmt = mysql.insert(table)
1080
+ stmt = stmt.on_duplicate_key_update(
1081
+ workflow_id=stmt.inserted.workflow_id,
1082
+ user_id=stmt.inserted.user_id,
1083
+ workflow_data=stmt.inserted.workflow_data,
1084
+ session_data=stmt.inserted.session_data,
1085
+ summary=stmt.inserted.summary,
1086
+ metadata=stmt.inserted.metadata,
1087
+ runs=stmt.inserted.runs,
1088
+ updated_at=stmt.inserted.updated_at,
1089
+ )
1090
+ sess.execute(stmt, workflow_data)
1091
+
1092
+ # Fetch the results for workflow sessions
1093
+ workflow_ids = [session.session_id for session in workflow_sessions]
1094
+ select_stmt = select(table).where(table.c.session_id.in_(workflow_ids))
1095
+ result = sess.execute(select_stmt).fetchall()
1096
+
1097
+ for row in result:
1098
+ if deserialize:
1099
+ deserialized_workflow_session = WorkflowSession.from_dict(session_dict)
1100
+ if deserialized_workflow_session is None:
1101
+ continue
1102
+ results.append(deserialized_workflow_session)
1103
+ else:
1104
+ results.append(dict(row._mapping))
1105
+
1106
+ return results
1107
+
1108
+ except Exception as e:
1109
+ log_error(f"Exception during bulk session upsert: {e}")
1110
+ return []
1111
+
1112
+ # -- Memory methods --
1113
+ def delete_user_memory(self, memory_id: str, user_id: Optional[str] = None):
1114
+ """Delete a user memory from the database.
1115
+
1116
+ Args:
1117
+ memory_id (str): The ID of the memory to delete.
1118
+ user_id (Optional[str]): The ID of the user to filter by. Defaults to None.
1119
+
1120
+ Returns:
1121
+ bool: True if deletion was successful, False otherwise.
1122
+
1123
+ Raises:
1124
+ Exception: If an error occurs during deletion.
1125
+ """
1126
+ try:
1127
+ table = self._get_table(table_type="memories")
1128
+ if table is None:
1129
+ return
1130
+
1131
+ with self.Session() as sess, sess.begin():
1132
+ delete_stmt = table.delete().where(table.c.memory_id == memory_id)
1133
+ if user_id is not None:
1134
+ delete_stmt = delete_stmt.where(table.c.user_id == user_id)
1135
+ result = sess.execute(delete_stmt)
1136
+
1137
+ success = result.rowcount > 0
1138
+ if success:
1139
+ log_debug(f"Successfully deleted memory id: {memory_id}")
1140
+ else:
1141
+ log_debug(f"No memory found with id: {memory_id}")
1142
+
1143
+ except Exception as e:
1144
+ log_error(f"Error deleting memory: {e}")
1145
+ raise e
1146
+
1147
+ def delete_user_memories(self, memory_ids: List[str], user_id: Optional[str] = None) -> None:
1148
+ """Delete user memories from the database.
1149
+
1150
+ Args:
1151
+ memory_ids (List[str]): The IDs of the memories to delete.
1152
+ user_id (Optional[str]): The ID of the user to filter by. Defaults to None.
1153
+
1154
+ Raises:
1155
+ Exception: If an error occurs during deletion.
1156
+ """
1157
+ try:
1158
+ table = self._get_table(table_type="memories")
1159
+ if table is None:
1160
+ return
1161
+
1162
+ with self.Session() as sess, sess.begin():
1163
+ delete_stmt = table.delete().where(table.c.memory_id.in_(memory_ids))
1164
+ if user_id is not None:
1165
+ delete_stmt = delete_stmt.where(table.c.user_id == user_id)
1166
+ result = sess.execute(delete_stmt)
1167
+ if result.rowcount == 0:
1168
+ log_debug(f"No memories found with ids: {memory_ids}")
1169
+
1170
+ except Exception as e:
1171
+ log_error(f"Error deleting memories: {e}")
1172
+ raise e
1173
+
1174
+ def get_all_memory_topics(self) -> List[str]:
1175
+ """Get all memory topics from the database.
1176
+
1177
+ Returns:
1178
+ List[str]: List of memory topics.
1179
+ """
1180
+ try:
1181
+ table = self._get_table(table_type="memories")
1182
+ if table is None:
1183
+ return []
1184
+
1185
+ with self.Session() as sess, sess.begin():
1186
+ stmt = select(table.c.topics)
1187
+ result = sess.execute(stmt).fetchall()
1188
+
1189
+ topics = []
1190
+ for record in result:
1191
+ if record is not None and record[0] is not None:
1192
+ topic_list = json.loads(record[0]) if isinstance(record[0], str) else record[0]
1193
+ if isinstance(topic_list, list):
1194
+ topics.extend(topic_list)
1195
+
1196
+ return list(set(topics))
1197
+
1198
+ except Exception as e:
1199
+ log_error(f"Exception reading from memory table: {e}")
1200
+ raise e
1201
+
1202
+ def get_user_memory(
1203
+ self, memory_id: str, deserialize: Optional[bool] = True, user_id: Optional[str] = None
1204
+ ) -> Optional[UserMemory]:
1205
+ """Get a memory from the database.
1206
+
1207
+ Args:
1208
+ memory_id (str): The ID of the memory to get.
1209
+ deserialize (Optional[bool]): Whether to serialize the memory. Defaults to True.
1210
+ user_id (Optional[str]): The ID of the user to filter by. Defaults to None.
1211
+
1212
+ Returns:
1213
+ Union[UserMemory, Dict[str, Any], None]:
1214
+ - When deserialize=True: UserMemory object
1215
+ - When deserialize=False: UserMemory dictionary
1216
+
1217
+ Raises:
1218
+ Exception: If an error occurs during retrieval.
1219
+ """
1220
+ try:
1221
+ table = self._get_table(table_type="memories")
1222
+ if table is None:
1223
+ return None
1224
+
1225
+ with self.Session() as sess, sess.begin():
1226
+ stmt = select(table).where(table.c.memory_id == memory_id)
1227
+ if user_id is not None:
1228
+ stmt = stmt.where(table.c.user_id == user_id)
1229
+
1230
+ result = sess.execute(stmt).fetchone()
1231
+ if not result:
1232
+ return None
1233
+
1234
+ memory_raw = result._mapping
1235
+ if not deserialize:
1236
+ return memory_raw
1237
+ return UserMemory.from_dict(memory_raw)
1238
+
1239
+ except Exception as e:
1240
+ log_error(f"Exception reading from memory table: {e}")
1241
+ raise e
1242
+
1243
+ def get_user_memories(
1244
+ self,
1245
+ user_id: Optional[str] = None,
1246
+ agent_id: Optional[str] = None,
1247
+ team_id: Optional[str] = None,
1248
+ topics: Optional[List[str]] = None,
1249
+ search_content: Optional[str] = None,
1250
+ limit: Optional[int] = None,
1251
+ page: Optional[int] = None,
1252
+ sort_by: Optional[str] = None,
1253
+ sort_order: Optional[str] = None,
1254
+ deserialize: Optional[bool] = True,
1255
+ ) -> Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
1256
+ """Get all memories from the database as UserMemory objects.
1257
+
1258
+ Args:
1259
+ user_id (Optional[str]): The ID of the user to filter by.
1260
+ agent_id (Optional[str]): The ID of the agent to filter by.
1261
+ team_id (Optional[str]): The ID of the team to filter by.
1262
+ topics (Optional[List[str]]): The topics to filter by.
1263
+ search_content (Optional[str]): The content to search for.
1264
+ limit (Optional[int]): The maximum number of memories to return.
1265
+ page (Optional[int]): The page number.
1266
+ sort_by (Optional[str]): The column to sort by.
1267
+ sort_order (Optional[str]): The order to sort by.
1268
+ deserialize (Optional[bool]): Whether to serialize the memories. Defaults to True.
1269
+
1270
+
1271
+ Returns:
1272
+ Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
1273
+ - When deserialize=True: List of UserMemory objects
1274
+ - When deserialize=False: Tuple of (memory dictionaries, total count)
1275
+
1276
+ Raises:
1277
+ Exception: If an error occurs during retrieval.
1278
+ """
1279
+ try:
1280
+ table = self._get_table(table_type="memories")
1281
+ if table is None:
1282
+ return [] if deserialize else ([], 0)
1283
+
1284
+ with self.Session() as sess, sess.begin():
1285
+ stmt = select(table)
1286
+ # Filtering
1287
+ if user_id is not None:
1288
+ stmt = stmt.where(table.c.user_id == user_id)
1289
+ if agent_id is not None:
1290
+ stmt = stmt.where(table.c.agent_id == agent_id)
1291
+ if team_id is not None:
1292
+ stmt = stmt.where(table.c.team_id == team_id)
1293
+ if topics is not None:
1294
+ topic_conditions = [func.JSON_ARRAY_CONTAINS_STRING(table.c.topics, topic) for topic in topics]
1295
+ if topic_conditions:
1296
+ stmt = stmt.where(and_(*topic_conditions))
1297
+ if search_content is not None:
1298
+ stmt = stmt.where(table.c.memory.like(f"%{search_content}%"))
1299
+
1300
+ # Get total count after applying filtering
1301
+ count_stmt = select(func.count()).select_from(stmt.alias())
1302
+ total_count = sess.execute(count_stmt).scalar()
1303
+
1304
+ # Sorting
1305
+ stmt = apply_sorting(stmt, table, sort_by, sort_order)
1306
+
1307
+ # Paginating
1308
+ if limit is not None:
1309
+ stmt = stmt.limit(limit)
1310
+ if page is not None:
1311
+ stmt = stmt.offset((page - 1) * limit)
1312
+
1313
+ result = sess.execute(stmt).fetchall()
1314
+ if not result:
1315
+ return [] if deserialize else ([], 0)
1316
+
1317
+ memories_raw = [record._mapping for record in result]
1318
+ if not deserialize:
1319
+ return memories_raw, total_count
1320
+
1321
+ return [UserMemory.from_dict(record) for record in memories_raw]
1322
+
1323
+ except Exception as e:
1324
+ log_error(f"Exception reading from memory table: {e}")
1325
+ raise e
1326
+
1327
+ def get_user_memory_stats(
1328
+ self, limit: Optional[int] = None, page: Optional[int] = None, user_id: Optional[str] = None
1329
+ ) -> Tuple[List[Dict[str, Any]], int]:
1330
+ """Get user memories stats.
1331
+
1332
+ Args:
1333
+ limit (Optional[int]): The maximum number of user stats to return.
1334
+ page (Optional[int]): The page number.
1335
+ user_id (Optional[str]): User ID for filtering.
1336
+
1337
+ Returns:
1338
+ Tuple[List[Dict[str, Any]], int]: A list of dictionaries containing user stats and total count.
1339
+
1340
+ Example:
1341
+ (
1342
+ [
1343
+ {
1344
+ "user_id": "123",
1345
+ "total_memories": 10,
1346
+ "last_memory_updated_at": 1714560000,
1347
+ },
1348
+ ],
1349
+ total_count: 1,
1350
+ )
1351
+ """
1352
+ try:
1353
+ table = self._get_table(table_type="memories")
1354
+ if table is None:
1355
+ return [], 0
1356
+
1357
+ with self.Session() as sess, sess.begin():
1358
+ stmt = select(
1359
+ table.c.user_id,
1360
+ func.count(table.c.memory_id).label("total_memories"),
1361
+ func.max(table.c.updated_at).label("last_memory_updated_at"),
1362
+ )
1363
+ if user_id is not None:
1364
+ stmt = stmt.where(table.c.user_id == user_id)
1365
+ else:
1366
+ stmt = stmt.where(table.c.user_id.is_not(None))
1367
+ stmt = stmt.group_by(table.c.user_id)
1368
+ stmt = stmt.order_by(func.max(table.c.updated_at).desc())
1369
+
1370
+ count_stmt = select(func.count()).select_from(stmt.alias())
1371
+ total_count = sess.execute(count_stmt).scalar()
1372
+
1373
+ # Pagination
1374
+ if limit is not None:
1375
+ stmt = stmt.limit(limit)
1376
+ if page is not None:
1377
+ stmt = stmt.offset((page - 1) * limit)
1378
+
1379
+ result = sess.execute(stmt).fetchall()
1380
+ if not result:
1381
+ return [], 0
1382
+
1383
+ return [
1384
+ {
1385
+ "user_id": record.user_id, # type: ignore
1386
+ "total_memories": record.total_memories,
1387
+ "last_memory_updated_at": record.last_memory_updated_at,
1388
+ }
1389
+ for record in result
1390
+ ], total_count
1391
+
1392
+ except Exception as e:
1393
+ log_error(f"Exception getting user memory stats: {e}")
1394
+ raise e
1395
+
1396
+ def upsert_user_memory(
1397
+ self, memory: UserMemory, deserialize: Optional[bool] = True
1398
+ ) -> Optional[Union[UserMemory, Dict[str, Any]]]:
1399
+ """Upsert a user memory in the database.
1400
+
1401
+ Args:
1402
+ memory (UserMemory): The user memory to upsert.
1403
+ deserialize (Optional[bool]): Whether to serialize the memory. Defaults to True.
1404
+
1405
+ Returns:
1406
+ Optional[Union[UserMemory, Dict[str, Any]]]:
1407
+ - When deserialize=True: UserMemory object
1408
+ - When deserialize=False: UserMemory dictionary
1409
+
1410
+ Raises:
1411
+ Exception: If an error occurs during upsert.
1412
+ """
1413
+ try:
1414
+ table = self._get_table(table_type="memories", create_table_if_not_found=True)
1415
+ if table is None:
1416
+ return None
1417
+
1418
+ with self.Session() as sess, sess.begin():
1419
+ if memory.memory_id is None:
1420
+ memory.memory_id = str(uuid4())
1421
+
1422
+ current_time = int(time.time())
1423
+
1424
+ stmt = mysql.insert(table).values(
1425
+ memory_id=memory.memory_id,
1426
+ memory=memory.memory,
1427
+ input=memory.input,
1428
+ user_id=memory.user_id,
1429
+ agent_id=memory.agent_id,
1430
+ team_id=memory.team_id,
1431
+ topics=memory.topics,
1432
+ feedback=memory.feedback,
1433
+ created_at=memory.created_at,
1434
+ updated_at=current_time,
1435
+ )
1436
+ stmt = stmt.on_duplicate_key_update(
1437
+ memory=stmt.inserted.memory,
1438
+ topics=stmt.inserted.topics,
1439
+ input=stmt.inserted.input,
1440
+ user_id=stmt.inserted.user_id,
1441
+ agent_id=stmt.inserted.agent_id,
1442
+ team_id=stmt.inserted.team_id,
1443
+ feedback=stmt.inserted.feedback,
1444
+ updated_at=stmt.inserted.updated_at,
1445
+ # Preserve created_at on update - don't overwrite existing value
1446
+ created_at=table.c.created_at,
1447
+ )
1448
+
1449
+ sess.execute(stmt)
1450
+
1451
+ # Fetch the result
1452
+ select_stmt = select(table).where(table.c.memory_id == memory.memory_id)
1453
+ row = sess.execute(select_stmt).fetchone()
1454
+ if row is None:
1455
+ return None
1456
+
1457
+ memory_raw = row._mapping
1458
+ if not memory_raw or not deserialize:
1459
+ return memory_raw
1460
+
1461
+ return UserMemory.from_dict(memory_raw)
1462
+
1463
+ except Exception as e:
1464
+ log_error(f"Error upserting user memory: {e}")
1465
+ raise e
1466
+
1467
+ def upsert_memories(
1468
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
1469
+ ) -> List[Union[UserMemory, Dict[str, Any]]]:
1470
+ """
1471
+ Bulk upsert multiple user memories for improved performance on large datasets.
1472
+
1473
+ Args:
1474
+ memories (List[UserMemory]): List of memories to upsert.
1475
+ deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
1476
+
1477
+ Returns:
1478
+ List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories.
1479
+
1480
+ Raises:
1481
+ Exception: If an error occurs during bulk upsert.
1482
+ """
1483
+ if not memories:
1484
+ return []
1485
+
1486
+ try:
1487
+ table = self._get_table(table_type="memories", create_table_if_not_found=True)
1488
+ if table is None:
1489
+ return []
1490
+
1491
+ # Prepare data for bulk insert
1492
+ memory_data = []
1493
+ current_time = int(time.time())
1494
+
1495
+ for memory in memories:
1496
+ if memory.memory_id is None:
1497
+ memory.memory_id = str(uuid4())
1498
+ # Use preserved updated_at if flag is set, otherwise use current time
1499
+ updated_at = memory.updated_at if preserve_updated_at else current_time
1500
+
1501
+ memory_data.append(
1502
+ {
1503
+ "memory_id": memory.memory_id,
1504
+ "memory": memory.memory,
1505
+ "input": memory.input,
1506
+ "user_id": memory.user_id,
1507
+ "agent_id": memory.agent_id,
1508
+ "team_id": memory.team_id,
1509
+ "topics": memory.topics,
1510
+ "feedback": memory.feedback,
1511
+ "created_at": memory.created_at,
1512
+ "updated_at": updated_at,
1513
+ }
1514
+ )
1515
+
1516
+ results: List[Union[UserMemory, Dict[str, Any]]] = []
1517
+
1518
+ with self.Session() as sess, sess.begin():
1519
+ if memory_data:
1520
+ stmt = mysql.insert(table)
1521
+ stmt = stmt.on_duplicate_key_update(
1522
+ memory=stmt.inserted.memory,
1523
+ topics=stmt.inserted.topics,
1524
+ input=stmt.inserted.input,
1525
+ user_id=stmt.inserted.user_id,
1526
+ agent_id=stmt.inserted.agent_id,
1527
+ team_id=stmt.inserted.team_id,
1528
+ feedback=stmt.inserted.feedback,
1529
+ updated_at=stmt.inserted.updated_at,
1530
+ # Preserve created_at on update
1531
+ created_at=table.c.created_at,
1532
+ )
1533
+ sess.execute(stmt, memory_data)
1534
+
1535
+ # Fetch the results
1536
+ memory_ids = [memory.memory_id for memory in memories if memory.memory_id]
1537
+ select_stmt = select(table).where(table.c.memory_id.in_(memory_ids))
1538
+ result = sess.execute(select_stmt).fetchall()
1539
+
1540
+ for row in result:
1541
+ memory_raw = dict(row._mapping)
1542
+ if deserialize:
1543
+ results.append(UserMemory.from_dict(memory_raw))
1544
+ else:
1545
+ results.append(memory_raw)
1546
+
1547
+ return results
1548
+
1549
+ except Exception as e:
1550
+ log_error(f"Exception during bulk memory upsert: {e}")
1551
+ return []
1552
+
1553
+ def clear_memories(self) -> None:
1554
+ """Delete all memories from the database.
1555
+
1556
+ Raises:
1557
+ Exception: If an error occurs during deletion.
1558
+ """
1559
+ try:
1560
+ table = self._get_table(table_type="memories")
1561
+ if table is None:
1562
+ return
1563
+
1564
+ with self.Session() as sess, sess.begin():
1565
+ sess.execute(table.delete())
1566
+
1567
+ except Exception as e:
1568
+ log_error(f"Exception deleting all memories: {e}")
1569
+ raise e
1570
+
1571
+ # -- Metrics methods --
1572
+ def _get_all_sessions_for_metrics_calculation(
1573
+ self, start_timestamp: Optional[int] = None, end_timestamp: Optional[int] = None
1574
+ ) -> List[Dict[str, Any]]:
1575
+ """
1576
+ Get all sessions of all types (agent, team, workflow) as raw dictionaries.
1577
+
1578
+ Args:
1579
+ start_timestamp (Optional[int]): The start timestamp to filter by. Defaults to None.
1580
+ end_timestamp (Optional[int]): The end timestamp to filter by. Defaults to None.
1581
+
1582
+ Returns:
1583
+ List[Dict[str, Any]]: List of session dictionaries with session_type field.
1584
+
1585
+ Raises:
1586
+ Exception: If an error occurs during retrieval.
1587
+ """
1588
+ try:
1589
+ table = self._get_table(table_type="sessions")
1590
+ if table is None:
1591
+ return []
1592
+
1593
+ stmt = select(
1594
+ table.c.user_id,
1595
+ table.c.session_data,
1596
+ table.c.runs,
1597
+ table.c.created_at,
1598
+ table.c.session_type,
1599
+ )
1600
+
1601
+ if start_timestamp is not None:
1602
+ stmt = stmt.where(table.c.created_at >= start_timestamp)
1603
+ if end_timestamp is not None:
1604
+ stmt = stmt.where(table.c.created_at <= end_timestamp)
1605
+
1606
+ with self.Session() as sess:
1607
+ result = sess.execute(stmt).fetchall()
1608
+ return [record._mapping for record in result]
1609
+
1610
+ except Exception as e:
1611
+ log_error(f"Exception reading from sessions table: {e}")
1612
+ return []
1613
+
1614
+ def _get_metrics_calculation_starting_date(self, table: Table) -> Optional[date]:
1615
+ """Get the first date for which metrics calculation is needed:
1616
+
1617
+ 1. If there are metrics records, return the date of the first day without a complete metrics record.
1618
+ 2. If there are no metrics records, return the date of the first recorded session.
1619
+ 3. If there are no metrics records and no sessions records, return None.
1620
+
1621
+ Args:
1622
+ table (Table): The table to get the starting date for.
1623
+
1624
+ Returns:
1625
+ Optional[date]: The starting date for which metrics calculation is needed.
1626
+ """
1627
+ with self.Session() as sess:
1628
+ stmt = select(table).order_by(table.c.date.desc()).limit(1)
1629
+ result = sess.execute(stmt).fetchone()
1630
+
1631
+ # 1. Return the date of the first day without a complete metrics record.
1632
+ if result is not None:
1633
+ if result.completed:
1634
+ return result._mapping["date"] + timedelta(days=1)
1635
+ else:
1636
+ return result._mapping["date"]
1637
+
1638
+ # 2. No metrics records. Return the date of the first recorded session.
1639
+ sessions_result, _ = self.get_sessions(sort_by="created_at", sort_order="asc", limit=1, deserialize=False)
1640
+ if not isinstance(sessions_result, list):
1641
+ raise ValueError("Error obtaining session list to calculate metrics")
1642
+
1643
+ first_session_date = sessions_result[0]["created_at"] if sessions_result and len(sessions_result) > 0 else None # type: ignore
1644
+
1645
+ # 3. No metrics records and no sessions records. Return None.
1646
+ if first_session_date is None:
1647
+ return None
1648
+
1649
+ return datetime.fromtimestamp(first_session_date, tz=timezone.utc).date()
1650
+
1651
+ def calculate_metrics(self) -> Optional[list[dict]]:
1652
+ """Calculate metrics for all dates without complete metrics.
1653
+
1654
+ Returns:
1655
+ Optional[list[dict]]: The calculated metrics.
1656
+
1657
+ Raises:
1658
+ Exception: If an error occurs during metrics calculation.
1659
+ """
1660
+ try:
1661
+ table = self._get_table(table_type="metrics", create_table_if_not_found=True)
1662
+ if table is None:
1663
+ return None
1664
+
1665
+ starting_date = self._get_metrics_calculation_starting_date(table)
1666
+ if starting_date is None:
1667
+ log_info("No session data found. Won't calculate metrics.")
1668
+ return None
1669
+
1670
+ dates_to_process = get_dates_to_calculate_metrics_for(starting_date)
1671
+ if not dates_to_process:
1672
+ log_info("Metrics already calculated for all relevant dates.")
1673
+ return None
1674
+
1675
+ start_timestamp = int(datetime.combine(dates_to_process[0], datetime.min.time()).timestamp())
1676
+ end_timestamp = int(
1677
+ datetime.combine(dates_to_process[-1] + timedelta(days=1), datetime.min.time()).timestamp()
1678
+ )
1679
+
1680
+ sessions = self._get_all_sessions_for_metrics_calculation(
1681
+ start_timestamp=start_timestamp, end_timestamp=end_timestamp
1682
+ )
1683
+ all_sessions_data = fetch_all_sessions_data(
1684
+ sessions=sessions, dates_to_process=dates_to_process, start_timestamp=start_timestamp
1685
+ )
1686
+ if not all_sessions_data:
1687
+ log_info("No new session data found. Won't calculate metrics.")
1688
+ return None
1689
+
1690
+ metrics_records = []
1691
+ for date_to_process in dates_to_process:
1692
+ date_key = date_to_process.isoformat()
1693
+ sessions_for_date = all_sessions_data.get(date_key, {})
1694
+
1695
+ # Skip dates with no sessions
1696
+ if not any(len(sessions) > 0 for sessions in sessions_for_date.values()):
1697
+ continue
1698
+
1699
+ metrics_record = calculate_date_metrics(date_to_process, sessions_for_date)
1700
+ metrics_records.append(metrics_record)
1701
+
1702
+ if metrics_records:
1703
+ with self.Session() as sess, sess.begin():
1704
+ bulk_upsert_metrics(session=sess, table=table, metrics_records=metrics_records)
1705
+
1706
+ log_debug("Updated metrics calculations")
1707
+
1708
+ return metrics_records
1709
+
1710
+ except Exception as e:
1711
+ log_error(f"Error calculating metrics: {e}")
1712
+ raise e
1713
+
1714
+ def get_metrics(
1715
+ self,
1716
+ starting_date: Optional[date] = None,
1717
+ ending_date: Optional[date] = None,
1718
+ ) -> Tuple[List[dict], Optional[int]]:
1719
+ """Get all metrics matching the given date range.
1720
+
1721
+ Args:
1722
+ starting_date (Optional[date]): The starting date to filter metrics by.
1723
+ ending_date (Optional[date]): The ending date to filter metrics by.
1724
+
1725
+ Returns:
1726
+ Tuple[List[dict], int]: A tuple containing the metrics and the timestamp of the latest update.
1727
+
1728
+ Raises:
1729
+ Exception: If an error occurs during retrieval.
1730
+ """
1731
+ try:
1732
+ table = self._get_table(table_type="metrics", create_table_if_not_found=True)
1733
+ if table is None:
1734
+ return [], 0
1735
+
1736
+ with self.Session() as sess, sess.begin():
1737
+ stmt = select(table)
1738
+ if starting_date:
1739
+ stmt = stmt.where(table.c.date >= starting_date)
1740
+ if ending_date:
1741
+ stmt = stmt.where(table.c.date <= ending_date)
1742
+ result = sess.execute(stmt).fetchall()
1743
+ if not result:
1744
+ return [], None
1745
+
1746
+ # Get the latest updated_at
1747
+ latest_stmt = select(func.max(table.c.updated_at))
1748
+ latest_updated_at = sess.execute(latest_stmt).scalar()
1749
+
1750
+ return [row._mapping for row in result], latest_updated_at
1751
+
1752
+ except Exception as e:
1753
+ log_error(f"Error getting metrics: {e}")
1754
+ raise e
1755
+
1756
+ # -- Knowledge methods --
1757
+
1758
+ def delete_knowledge_content(self, id: str):
1759
+ """Delete a knowledge row from the database.
1760
+
1761
+ Args:
1762
+ id (str): The ID of the knowledge row to delete.
1763
+ """
1764
+ try:
1765
+ table = self._get_table(table_type="knowledge")
1766
+ if table is None:
1767
+ return
1768
+
1769
+ with self.Session() as sess, sess.begin():
1770
+ stmt = table.delete().where(table.c.id == id)
1771
+ sess.execute(stmt)
1772
+
1773
+ log_debug(f"Deleted knowledge content with id '{id}'")
1774
+ except Exception as e:
1775
+ log_error(f"Error deleting knowledge content: {e}")
1776
+ raise e
1777
+
1778
+ def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
1779
+ """Get a knowledge row from the database.
1780
+
1781
+ Args:
1782
+ id (str): The ID of the knowledge row to get.
1783
+
1784
+ Returns:
1785
+ Optional[KnowledgeRow]: The knowledge row, or None if it doesn't exist.
1786
+ """
1787
+ try:
1788
+ table = self._get_table(table_type="knowledge")
1789
+ if table is None:
1790
+ return None
1791
+
1792
+ with self.Session() as sess, sess.begin():
1793
+ stmt = select(table).where(table.c.id == id)
1794
+ result = sess.execute(stmt).fetchone()
1795
+ if result is None:
1796
+ return None
1797
+ return KnowledgeRow.model_validate(result._mapping)
1798
+ except Exception as e:
1799
+ log_error(f"Error getting knowledge content: {e}")
1800
+ raise e
1801
+
1802
+ def get_knowledge_contents(
1803
+ self,
1804
+ limit: Optional[int] = None,
1805
+ page: Optional[int] = None,
1806
+ sort_by: Optional[str] = None,
1807
+ sort_order: Optional[str] = None,
1808
+ ) -> Tuple[List[KnowledgeRow], int]:
1809
+ """Get all knowledge contents from the database.
1810
+
1811
+ Args:
1812
+ limit (Optional[int]): The maximum number of knowledge contents to return.
1813
+ page (Optional[int]): The page number.
1814
+ sort_by (Optional[str]): The column to sort by.
1815
+ sort_order (Optional[str]): The order to sort by.
1816
+
1817
+ Returns:
1818
+ Tuple[List[KnowledgeRow], int]: The knowledge contents and total count.
1819
+
1820
+ Raises:
1821
+ Exception: If an error occurs during retrieval.
1822
+ """
1823
+ table = self._get_table(table_type="knowledge")
1824
+ if table is None:
1825
+ return [], 0
1826
+
1827
+ try:
1828
+ with self.Session() as sess, sess.begin():
1829
+ stmt = select(table)
1830
+
1831
+ # Apply sorting
1832
+ if sort_by is not None:
1833
+ stmt = stmt.order_by(getattr(table.c, sort_by) * (1 if sort_order == "asc" else -1))
1834
+
1835
+ # Get total count before applying limit and pagination
1836
+ count_stmt = select(func.count()).select_from(stmt.alias())
1837
+ total_count = sess.execute(count_stmt).scalar()
1838
+
1839
+ # Apply pagination after count
1840
+ if limit is not None:
1841
+ stmt = stmt.limit(limit)
1842
+ if page is not None:
1843
+ stmt = stmt.offset((page - 1) * limit)
1844
+
1845
+ result = sess.execute(stmt).fetchall()
1846
+ if result is None:
1847
+ return [], 0
1848
+
1849
+ return [KnowledgeRow.model_validate(record._mapping) for record in result], total_count
1850
+
1851
+ except Exception as e:
1852
+ log_error(f"Error getting knowledge contents: {e}")
1853
+ raise e
1854
+
1855
+ def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
1856
+ """Upsert knowledge content in the database.
1857
+
1858
+ Args:
1859
+ knowledge_row (KnowledgeRow): The knowledge row to upsert.
1860
+
1861
+ Returns:
1862
+ Optional[KnowledgeRow]: The upserted knowledge row, or None if the operation fails.
1863
+ """
1864
+ try:
1865
+ table = self._get_table(table_type="knowledge", create_table_if_not_found=True)
1866
+ if table is None:
1867
+ return None
1868
+
1869
+ with self.Session() as sess, sess.begin():
1870
+ # Only include fields that are not None in the update
1871
+ update_fields = {
1872
+ k: v
1873
+ for k, v in {
1874
+ "name": knowledge_row.name,
1875
+ "description": knowledge_row.description,
1876
+ "metadata": knowledge_row.metadata,
1877
+ "type": knowledge_row.type,
1878
+ "size": knowledge_row.size,
1879
+ "linked_to": knowledge_row.linked_to,
1880
+ "access_count": knowledge_row.access_count,
1881
+ "status": knowledge_row.status,
1882
+ "status_message": knowledge_row.status_message,
1883
+ "created_at": knowledge_row.created_at,
1884
+ "updated_at": knowledge_row.updated_at,
1885
+ "external_id": knowledge_row.external_id,
1886
+ }.items()
1887
+ if v is not None
1888
+ }
1889
+
1890
+ stmt = mysql.insert(table).values(knowledge_row.model_dump())
1891
+ stmt = stmt.on_duplicate_key_update(**update_fields)
1892
+ sess.execute(stmt)
1893
+
1894
+ return knowledge_row
1895
+
1896
+ except Exception as e:
1897
+ log_error(f"Error upserting knowledge row: {e}")
1898
+ raise e
1899
+
1900
+ # -- Eval methods --
1901
+
1902
+ def create_eval_run(self, eval_run: EvalRunRecord) -> Optional[EvalRunRecord]:
1903
+ """Create an EvalRunRecord in the database.
1904
+
1905
+ Args:
1906
+ eval_run (EvalRunRecord): The eval run to create.
1907
+
1908
+ Returns:
1909
+ Optional[EvalRunRecord]: The created eval run, or None if the operation fails.
1910
+
1911
+ Raises:
1912
+ Exception: If an error occurs during creation.
1913
+ """
1914
+ try:
1915
+ table = self._get_table(table_type="evals", create_table_if_not_found=True)
1916
+ if table is None:
1917
+ return None
1918
+
1919
+ with self.Session() as sess, sess.begin():
1920
+ current_time = int(time.time())
1921
+ stmt = mysql.insert(table).values(
1922
+ {"created_at": current_time, "updated_at": current_time, **eval_run.model_dump()}
1923
+ )
1924
+ sess.execute(stmt)
1925
+
1926
+ log_debug(f"Created eval run with id '{eval_run.run_id}'")
1927
+
1928
+ return eval_run
1929
+
1930
+ except Exception as e:
1931
+ log_error(f"Error creating eval run: {e}")
1932
+ raise e
1933
+
1934
+ def delete_eval_run(self, eval_run_id: str) -> None:
1935
+ """Delete an eval run from the database.
1936
+
1937
+ Args:
1938
+ eval_run_id (str): The ID of the eval run to delete.
1939
+ """
1940
+ try:
1941
+ table = self._get_table(table_type="evals")
1942
+ if table is None:
1943
+ return
1944
+
1945
+ with self.Session() as sess, sess.begin():
1946
+ stmt = table.delete().where(table.c.run_id == eval_run_id)
1947
+ result = sess.execute(stmt)
1948
+ if result.rowcount == 0:
1949
+ log_warning(f"No eval run found with ID: {eval_run_id}")
1950
+ else:
1951
+ log_debug(f"Deleted eval run with ID: {eval_run_id}")
1952
+
1953
+ except Exception as e:
1954
+ log_error(f"Error deleting eval run {eval_run_id}: {e}")
1955
+ raise e
1956
+
1957
+ def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
1958
+ """Delete multiple eval runs from the database.
1959
+
1960
+ Args:
1961
+ eval_run_ids (List[str]): List of eval run IDs to delete.
1962
+ """
1963
+ try:
1964
+ table = self._get_table(table_type="evals")
1965
+ if table is None:
1966
+ return
1967
+
1968
+ with self.Session() as sess, sess.begin():
1969
+ stmt = table.delete().where(table.c.run_id.in_(eval_run_ids))
1970
+ result = sess.execute(stmt)
1971
+ if result.rowcount == 0:
1972
+ log_debug(f"No eval runs found with IDs: {eval_run_ids}")
1973
+ else:
1974
+ log_debug(f"Deleted {result.rowcount} eval runs")
1975
+
1976
+ except Exception as e:
1977
+ log_error(f"Error deleting eval runs {eval_run_ids}: {e}")
1978
+ raise e
1979
+
1980
+ def get_eval_run(
1981
+ self, eval_run_id: str, deserialize: Optional[bool] = True
1982
+ ) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
1983
+ """Get an eval run from the database.
1984
+
1985
+ Args:
1986
+ eval_run_id (str): The ID of the eval run to get.
1987
+ deserialize (Optional[bool]): Whether to serialize the eval run. Defaults to True.
1988
+
1989
+ Returns:
1990
+ Optional[Union[EvalRunRecord, Dict[str, Any]]]:
1991
+ - When deserialize=True: EvalRunRecord object
1992
+ - When deserialize=False: EvalRun dictionary
1993
+
1994
+ Raises:
1995
+ Exception: If an error occurs during retrieval.
1996
+ """
1997
+ try:
1998
+ table = self._get_table(table_type="evals")
1999
+ if table is None:
2000
+ return None
2001
+
2002
+ with self.Session() as sess, sess.begin():
2003
+ stmt = select(table).where(table.c.run_id == eval_run_id)
2004
+ result = sess.execute(stmt).fetchone()
2005
+ if result is None:
2006
+ return None
2007
+
2008
+ eval_run_raw = result._mapping
2009
+ if not deserialize:
2010
+ return eval_run_raw
2011
+
2012
+ return EvalRunRecord.model_validate(eval_run_raw)
2013
+
2014
+ except Exception as e:
2015
+ log_error(f"Exception getting eval run {eval_run_id}: {e}")
2016
+ raise e
2017
+
2018
+ def get_eval_runs(
2019
+ self,
2020
+ limit: Optional[int] = None,
2021
+ page: Optional[int] = None,
2022
+ sort_by: Optional[str] = None,
2023
+ sort_order: Optional[str] = None,
2024
+ agent_id: Optional[str] = None,
2025
+ team_id: Optional[str] = None,
2026
+ workflow_id: Optional[str] = None,
2027
+ model_id: Optional[str] = None,
2028
+ filter_type: Optional[EvalFilterType] = None,
2029
+ eval_type: Optional[List[EvalType]] = None,
2030
+ deserialize: Optional[bool] = True,
2031
+ ) -> Union[List[EvalRunRecord], Tuple[List[Dict[str, Any]], int]]:
2032
+ """Get all eval runs from the database.
2033
+
2034
+ Args:
2035
+ limit (Optional[int]): The maximum number of eval runs to return.
2036
+ page (Optional[int]): The page number.
2037
+ sort_by (Optional[str]): The column to sort by.
2038
+ sort_order (Optional[str]): The order to sort by.
2039
+ agent_id (Optional[str]): The ID of the agent to filter by.
2040
+ team_id (Optional[str]): The ID of the team to filter by.
2041
+ workflow_id (Optional[str]): The ID of the workflow to filter by.
2042
+ model_id (Optional[str]): The ID of the model to filter by.
2043
+ eval_type (Optional[List[EvalType]]): The type(s) of eval to filter by.
2044
+ filter_type (Optional[EvalFilterType]): Filter by component type (agent, team, workflow).
2045
+ deserialize (Optional[bool]): Whether to serialize the eval runs. Defaults to True.
2046
+ create_table_if_not_found (Optional[bool]): Whether to create the table if it doesn't exist.
2047
+
2048
+ Returns:
2049
+ Union[List[EvalRunRecord], Tuple[List[Dict[str, Any]], int]]:
2050
+ - When deserialize=True: List of EvalRunRecord objects
2051
+ - When deserialize=False: List of dictionaries
2052
+
2053
+ Raises:
2054
+ Exception: If an error occurs during retrieval.
2055
+ """
2056
+ try:
2057
+ table = self._get_table(table_type="evals")
2058
+ if table is None:
2059
+ return [] if deserialize else ([], 0)
2060
+
2061
+ with self.Session() as sess, sess.begin():
2062
+ stmt = select(table)
2063
+
2064
+ # Filtering
2065
+ if agent_id is not None:
2066
+ stmt = stmt.where(table.c.agent_id == agent_id)
2067
+ if team_id is not None:
2068
+ stmt = stmt.where(table.c.team_id == team_id)
2069
+ if workflow_id is not None:
2070
+ stmt = stmt.where(table.c.workflow_id == workflow_id)
2071
+ if model_id is not None:
2072
+ stmt = stmt.where(table.c.model_id == model_id)
2073
+ if eval_type is not None and len(eval_type) > 0:
2074
+ stmt = stmt.where(table.c.eval_type.in_(eval_type))
2075
+ if filter_type is not None:
2076
+ if filter_type == EvalFilterType.AGENT:
2077
+ stmt = stmt.where(table.c.agent_id.is_not(None))
2078
+ elif filter_type == EvalFilterType.TEAM:
2079
+ stmt = stmt.where(table.c.team_id.is_not(None))
2080
+ elif filter_type == EvalFilterType.WORKFLOW:
2081
+ stmt = stmt.where(table.c.workflow_id.is_not(None))
2082
+
2083
+ # Get total count after applying filtering
2084
+ count_stmt = select(func.count()).select_from(stmt.alias())
2085
+ total_count = sess.execute(count_stmt).scalar()
2086
+
2087
+ # Sorting
2088
+ if sort_by is None:
2089
+ stmt = stmt.order_by(table.c.created_at.desc())
2090
+ else:
2091
+ stmt = apply_sorting(stmt, table, sort_by, sort_order)
2092
+
2093
+ # Paginating
2094
+ if limit is not None:
2095
+ stmt = stmt.limit(limit)
2096
+ if page is not None:
2097
+ stmt = stmt.offset((page - 1) * limit)
2098
+
2099
+ result = sess.execute(stmt).fetchall()
2100
+ if not result:
2101
+ return [] if deserialize else ([], 0)
2102
+
2103
+ eval_runs_raw = [row._mapping for row in result]
2104
+ if not deserialize:
2105
+ return eval_runs_raw, total_count
2106
+
2107
+ return [EvalRunRecord.model_validate(row) for row in eval_runs_raw]
2108
+
2109
+ except Exception as e:
2110
+ log_error(f"Exception getting eval runs: {e}")
2111
+ raise e
2112
+
2113
+ def rename_eval_run(
2114
+ self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
2115
+ ) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
2116
+ """Upsert the name of an eval run in the database, returning raw dictionary.
2117
+
2118
+ Args:
2119
+ eval_run_id (str): The ID of the eval run to update.
2120
+ name (str): The new name of the eval run.
2121
+
2122
+ Returns:
2123
+ Optional[Dict[str, Any]]: The updated eval run, or None if the operation fails.
2124
+
2125
+ Raises:
2126
+ Exception: If an error occurs during update.
2127
+ """
2128
+ try:
2129
+ table = self._get_table(table_type="evals")
2130
+ if table is None:
2131
+ return None
2132
+
2133
+ with self.Session() as sess, sess.begin():
2134
+ stmt = (
2135
+ table.update().where(table.c.run_id == eval_run_id).values(name=name, updated_at=int(time.time()))
2136
+ )
2137
+ sess.execute(stmt)
2138
+
2139
+ eval_run_raw = self.get_eval_run(eval_run_id=eval_run_id, deserialize=deserialize)
2140
+
2141
+ log_debug(f"Renamed eval run with id '{eval_run_id}' to '{name}'")
2142
+
2143
+ if not eval_run_raw or not deserialize:
2144
+ return eval_run_raw
2145
+
2146
+ return EvalRunRecord.model_validate(eval_run_raw)
2147
+
2148
+ except Exception as e:
2149
+ log_error(f"Error renaming eval run {eval_run_id}: {e}")
2150
+ raise e
2151
+
2152
+ # -- Culture methods --
2153
+
2154
+ def clear_cultural_knowledge(self) -> None:
2155
+ """Delete all cultural knowledge from the database.
2156
+
2157
+ Raises:
2158
+ Exception: If an error occurs during deletion.
2159
+ """
2160
+ try:
2161
+ table = self._get_table(table_type="culture")
2162
+ if table is None:
2163
+ return
2164
+
2165
+ with self.Session() as sess, sess.begin():
2166
+ sess.execute(table.delete())
2167
+
2168
+ except Exception as e:
2169
+ log_warning(f"Exception deleting all cultural knowledge: {e}")
2170
+ raise e
2171
+
2172
+ def delete_cultural_knowledge(self, id: str) -> None:
2173
+ """Delete a cultural knowledge entry from the database.
2174
+
2175
+ Args:
2176
+ id (str): The ID of the cultural knowledge to delete.
2177
+
2178
+ Raises:
2179
+ Exception: If an error occurs during deletion.
2180
+ """
2181
+ try:
2182
+ table = self._get_table(table_type="culture")
2183
+ if table is None:
2184
+ return
2185
+
2186
+ with self.Session() as sess, sess.begin():
2187
+ delete_stmt = table.delete().where(table.c.id == id)
2188
+ result = sess.execute(delete_stmt)
2189
+
2190
+ success = result.rowcount > 0
2191
+ if success:
2192
+ log_debug(f"Successfully deleted cultural knowledge id: {id}")
2193
+ else:
2194
+ log_debug(f"No cultural knowledge found with id: {id}")
2195
+
2196
+ except Exception as e:
2197
+ log_error(f"Error deleting cultural knowledge: {e}")
2198
+ raise e
2199
+
2200
+ def get_cultural_knowledge(
2201
+ self, id: str, deserialize: Optional[bool] = True
2202
+ ) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
2203
+ """Get a cultural knowledge entry from the database.
2204
+
2205
+ Args:
2206
+ id (str): The ID of the cultural knowledge to get.
2207
+ deserialize (Optional[bool]): Whether to deserialize the cultural knowledge. Defaults to True.
2208
+
2209
+ Returns:
2210
+ Optional[Union[CulturalKnowledge, Dict[str, Any]]]: The cultural knowledge entry, or None if it doesn't exist.
2211
+
2212
+ Raises:
2213
+ Exception: If an error occurs during retrieval.
2214
+ """
2215
+ try:
2216
+ table = self._get_table(table_type="culture")
2217
+ if table is None:
2218
+ return None
2219
+
2220
+ with self.Session() as sess, sess.begin():
2221
+ stmt = select(table).where(table.c.id == id)
2222
+ result = sess.execute(stmt).fetchone()
2223
+ if result is None:
2224
+ return None
2225
+
2226
+ db_row = dict(result._mapping)
2227
+ if not db_row or not deserialize:
2228
+ return db_row
2229
+
2230
+ return deserialize_cultural_knowledge_from_db(db_row)
2231
+
2232
+ except Exception as e:
2233
+ log_error(f"Exception reading from cultural knowledge table: {e}")
2234
+ raise e
2235
+
2236
+ def get_all_cultural_knowledge(
2237
+ self,
2238
+ name: Optional[str] = None,
2239
+ agent_id: Optional[str] = None,
2240
+ team_id: Optional[str] = None,
2241
+ limit: Optional[int] = None,
2242
+ page: Optional[int] = None,
2243
+ sort_by: Optional[str] = None,
2244
+ sort_order: Optional[str] = None,
2245
+ deserialize: Optional[bool] = True,
2246
+ ) -> Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
2247
+ """Get all cultural knowledge from the database as CulturalKnowledge objects.
2248
+
2249
+ Args:
2250
+ name (Optional[str]): The name of the cultural knowledge to filter by.
2251
+ agent_id (Optional[str]): The ID of the agent to filter by.
2252
+ team_id (Optional[str]): The ID of the team to filter by.
2253
+ limit (Optional[int]): The maximum number of cultural knowledge entries to return.
2254
+ page (Optional[int]): The page number.
2255
+ sort_by (Optional[str]): The column to sort by.
2256
+ sort_order (Optional[str]): The order to sort by.
2257
+ deserialize (Optional[bool]): Whether to deserialize the cultural knowledge. Defaults to True.
2258
+
2259
+ Returns:
2260
+ Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
2261
+ - When deserialize=True: List of CulturalKnowledge objects
2262
+ - When deserialize=False: List of CulturalKnowledge dictionaries and total count
2263
+
2264
+ Raises:
2265
+ Exception: If an error occurs during retrieval.
2266
+ """
2267
+ try:
2268
+ table = self._get_table(table_type="culture")
2269
+ if table is None:
2270
+ return [] if deserialize else ([], 0)
2271
+
2272
+ with self.Session() as sess, sess.begin():
2273
+ stmt = select(table)
2274
+
2275
+ # Filtering
2276
+ if name is not None:
2277
+ stmt = stmt.where(table.c.name == name)
2278
+ if agent_id is not None:
2279
+ stmt = stmt.where(table.c.agent_id == agent_id)
2280
+ if team_id is not None:
2281
+ stmt = stmt.where(table.c.team_id == team_id)
2282
+
2283
+ # Get total count after applying filtering
2284
+ count_stmt = select(func.count()).select_from(stmt.alias())
2285
+ total_count = sess.execute(count_stmt).scalar()
2286
+
2287
+ # Sorting
2288
+ stmt = apply_sorting(stmt, table, sort_by, sort_order)
2289
+ # Paginating
2290
+ if limit is not None:
2291
+ stmt = stmt.limit(limit)
2292
+ if page is not None:
2293
+ stmt = stmt.offset((page - 1) * limit)
2294
+
2295
+ result = sess.execute(stmt).fetchall()
2296
+ if not result:
2297
+ return [] if deserialize else ([], 0)
2298
+
2299
+ db_rows = [dict(record._mapping) for record in result]
2300
+
2301
+ if not deserialize:
2302
+ return db_rows, total_count
2303
+
2304
+ return [deserialize_cultural_knowledge_from_db(row) for row in db_rows]
2305
+
2306
+ except Exception as e:
2307
+ log_error(f"Error reading from cultural knowledge table: {e}")
2308
+ raise e
2309
+
2310
+ def upsert_cultural_knowledge(
2311
+ self, cultural_knowledge: CulturalKnowledge, deserialize: Optional[bool] = True
2312
+ ) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
2313
+ """Upsert a cultural knowledge entry into the database.
2314
+
2315
+ Args:
2316
+ cultural_knowledge (CulturalKnowledge): The cultural knowledge to upsert.
2317
+ deserialize (Optional[bool]): Whether to deserialize the cultural knowledge. Defaults to True.
2318
+
2319
+ Returns:
2320
+ Optional[CulturalKnowledge]: The upserted cultural knowledge entry.
2321
+
2322
+ Raises:
2323
+ Exception: If an error occurs during upsert.
2324
+ """
2325
+ try:
2326
+ table = self._get_table(table_type="culture", create_table_if_not_found=True)
2327
+ if table is None:
2328
+ return None
2329
+
2330
+ if cultural_knowledge.id is None:
2331
+ cultural_knowledge.id = str(uuid4())
2332
+
2333
+ # Serialize content, categories, and notes into a JSON dict for DB storage
2334
+ content_dict = serialize_cultural_knowledge_for_db(cultural_knowledge)
2335
+
2336
+ with self.Session() as sess, sess.begin():
2337
+ stmt = mysql.insert(table).values(
2338
+ id=cultural_knowledge.id,
2339
+ name=cultural_knowledge.name,
2340
+ summary=cultural_knowledge.summary,
2341
+ content=content_dict if content_dict else None,
2342
+ metadata=cultural_knowledge.metadata,
2343
+ input=cultural_knowledge.input,
2344
+ created_at=cultural_knowledge.created_at,
2345
+ updated_at=int(time.time()),
2346
+ agent_id=cultural_knowledge.agent_id,
2347
+ team_id=cultural_knowledge.team_id,
2348
+ )
2349
+ stmt = stmt.on_duplicate_key_update(
2350
+ name=cultural_knowledge.name,
2351
+ summary=cultural_knowledge.summary,
2352
+ content=content_dict if content_dict else None,
2353
+ metadata=cultural_knowledge.metadata,
2354
+ input=cultural_knowledge.input,
2355
+ updated_at=int(time.time()),
2356
+ agent_id=cultural_knowledge.agent_id,
2357
+ team_id=cultural_knowledge.team_id,
2358
+ )
2359
+ sess.execute(stmt)
2360
+
2361
+ # Fetch the inserted/updated row
2362
+ return self.get_cultural_knowledge(id=cultural_knowledge.id, deserialize=deserialize)
2363
+
2364
+ except Exception as e:
2365
+ log_error(f"Error upserting cultural knowledge: {e}")
2366
+ raise e
2367
+
2368
+ # --- Traces ---
2369
+ def _get_traces_base_query(self, table: Table, spans_table: Optional[Table] = None):
2370
+ """Build base query for traces with aggregated span counts.
2371
+
2372
+ Args:
2373
+ table: The traces table.
2374
+ spans_table: The spans table (optional).
2375
+
2376
+ Returns:
2377
+ SQLAlchemy select statement with total_spans and error_count calculated dynamically.
2378
+ """
2379
+ from sqlalchemy import case, literal
2380
+
2381
+ if spans_table is not None:
2382
+ # JOIN with spans table to calculate total_spans and error_count
2383
+ return (
2384
+ select(
2385
+ table,
2386
+ func.coalesce(func.count(spans_table.c.span_id), 0).label("total_spans"),
2387
+ func.coalesce(func.sum(case((spans_table.c.status_code == "ERROR", 1), else_=0)), 0).label(
2388
+ "error_count"
2389
+ ),
2390
+ )
2391
+ .select_from(table.outerjoin(spans_table, table.c.trace_id == spans_table.c.trace_id))
2392
+ .group_by(table.c.trace_id)
2393
+ )
2394
+ else:
2395
+ # Fallback if spans table doesn't exist
2396
+ return select(table, literal(0).label("total_spans"), literal(0).label("error_count"))
2397
+
2398
+ def _get_trace_component_level_expr(self, workflow_id_col, team_id_col, agent_id_col, name_col):
2399
+ """Build a SQL CASE expression that returns the component level for a trace.
2400
+
2401
+ Component levels (higher = more important):
2402
+ - 3: Workflow root (.run or .arun with workflow_id)
2403
+ - 2: Team root (.run or .arun with team_id)
2404
+ - 1: Agent root (.run or .arun with agent_id)
2405
+ - 0: Child span (not a root)
2406
+
2407
+ Args:
2408
+ workflow_id_col: SQL column/expression for workflow_id
2409
+ team_id_col: SQL column/expression for team_id
2410
+ agent_id_col: SQL column/expression for agent_id
2411
+ name_col: SQL column/expression for name
2412
+
2413
+ Returns:
2414
+ SQLAlchemy CASE expression returning the component level as an integer.
2415
+ """
2416
+ from sqlalchemy import case, or_
2417
+
2418
+ is_root_name = or_(name_col.like("%.run%"), name_col.like("%.arun%"))
2419
+
2420
+ return case(
2421
+ # Workflow root (level 3)
2422
+ (and_(workflow_id_col.isnot(None), is_root_name), 3),
2423
+ # Team root (level 2)
2424
+ (and_(team_id_col.isnot(None), is_root_name), 2),
2425
+ # Agent root (level 1)
2426
+ (and_(agent_id_col.isnot(None), is_root_name), 1),
2427
+ # Child span or unknown (level 0)
2428
+ else_=0,
2429
+ )
2430
+
2431
+ def upsert_trace(self, trace: "Trace") -> None:
2432
+ """Create or update a single trace record in the database.
2433
+
2434
+ Uses INSERT ... ON DUPLICATE KEY UPDATE (upsert) to handle concurrent inserts
2435
+ atomically and avoid race conditions.
2436
+
2437
+ Args:
2438
+ trace: The Trace object to store (one per trace_id).
2439
+ """
2440
+ from sqlalchemy import case
2441
+
2442
+ try:
2443
+ table = self._get_table(table_type="traces", create_table_if_not_found=True)
2444
+ if table is None:
2445
+ return
2446
+
2447
+ trace_dict = trace.to_dict()
2448
+ trace_dict.pop("total_spans", None)
2449
+ trace_dict.pop("error_count", None)
2450
+
2451
+ with self.Session() as sess, sess.begin():
2452
+ # Use upsert to handle concurrent inserts atomically
2453
+ # On conflict, update fields while preserving existing non-null context values
2454
+ # and keeping the earliest start_time
2455
+ insert_stmt = mysql.insert(table).values(trace_dict)
2456
+
2457
+ # Build component level expressions for comparing trace priority
2458
+ new_level = self._get_trace_component_level_expr(
2459
+ insert_stmt.inserted.workflow_id,
2460
+ insert_stmt.inserted.team_id,
2461
+ insert_stmt.inserted.agent_id,
2462
+ insert_stmt.inserted.name,
2463
+ )
2464
+ existing_level = self._get_trace_component_level_expr(
2465
+ table.c.workflow_id,
2466
+ table.c.team_id,
2467
+ table.c.agent_id,
2468
+ table.c.name,
2469
+ )
2470
+
2471
+ # Build the ON DUPLICATE KEY UPDATE clause
2472
+ # Use LEAST for start_time, GREATEST for end_time to capture full trace duration
2473
+ # Duration is calculated using TIMESTAMPDIFF in microseconds then converted to ms
2474
+ upsert_stmt = insert_stmt.on_duplicate_key_update(
2475
+ end_time=func.greatest(table.c.end_time, insert_stmt.inserted.end_time),
2476
+ start_time=func.least(table.c.start_time, insert_stmt.inserted.start_time),
2477
+ # Calculate duration in milliseconds using TIMESTAMPDIFF
2478
+ # TIMESTAMPDIFF(MICROSECOND, start, end) / 1000 gives milliseconds
2479
+ duration_ms=func.timestampdiff(
2480
+ text("MICROSECOND"),
2481
+ func.least(table.c.start_time, insert_stmt.inserted.start_time),
2482
+ func.greatest(table.c.end_time, insert_stmt.inserted.end_time),
2483
+ )
2484
+ / 1000,
2485
+ status=insert_stmt.inserted.status,
2486
+ # Update name only if new trace is from a higher-level component
2487
+ # Priority: workflow (3) > team (2) > agent (1) > child spans (0)
2488
+ name=case(
2489
+ (new_level > existing_level, insert_stmt.inserted.name),
2490
+ else_=table.c.name,
2491
+ ),
2492
+ # Preserve existing non-null context values using COALESCE
2493
+ run_id=func.coalesce(insert_stmt.inserted.run_id, table.c.run_id),
2494
+ session_id=func.coalesce(insert_stmt.inserted.session_id, table.c.session_id),
2495
+ user_id=func.coalesce(insert_stmt.inserted.user_id, table.c.user_id),
2496
+ agent_id=func.coalesce(insert_stmt.inserted.agent_id, table.c.agent_id),
2497
+ team_id=func.coalesce(insert_stmt.inserted.team_id, table.c.team_id),
2498
+ workflow_id=func.coalesce(insert_stmt.inserted.workflow_id, table.c.workflow_id),
2499
+ )
2500
+ sess.execute(upsert_stmt)
2501
+
2502
+ except Exception as e:
2503
+ log_error(f"Error creating trace: {e}")
2504
+ # Don't raise - tracing should not break the main application flow
2505
+
2506
+ def get_trace(
2507
+ self,
2508
+ trace_id: Optional[str] = None,
2509
+ run_id: Optional[str] = None,
2510
+ ):
2511
+ """Get a single trace by trace_id or other filters.
2512
+
2513
+ Args:
2514
+ trace_id: The unique trace identifier.
2515
+ run_id: Filter by run ID (returns first match).
2516
+
2517
+ Returns:
2518
+ Optional[Trace]: The trace if found, None otherwise.
2519
+
2520
+ Note:
2521
+ If multiple filters are provided, trace_id takes precedence.
2522
+ For other filters, the most recent trace is returned.
2523
+ """
2524
+ try:
2525
+ from agno.tracing.schemas import Trace
2526
+
2527
+ table = self._get_table(table_type="traces")
2528
+ if table is None:
2529
+ return None
2530
+
2531
+ # Get spans table for JOIN
2532
+ spans_table = self._get_table(table_type="spans")
2533
+
2534
+ with self.Session() as sess:
2535
+ # Build query with aggregated span counts
2536
+ stmt = self._get_traces_base_query(table, spans_table)
2537
+
2538
+ if trace_id:
2539
+ stmt = stmt.where(table.c.trace_id == trace_id)
2540
+ elif run_id:
2541
+ stmt = stmt.where(table.c.run_id == run_id)
2542
+ else:
2543
+ log_debug("get_trace called without any filter parameters")
2544
+ return None
2545
+
2546
+ # Order by most recent and get first result
2547
+ stmt = stmt.order_by(table.c.start_time.desc()).limit(1)
2548
+ result = sess.execute(stmt).fetchone()
2549
+
2550
+ if result:
2551
+ return Trace.from_dict(dict(result._mapping))
2552
+ return None
2553
+
2554
+ except Exception as e:
2555
+ log_error(f"Error getting trace: {e}")
2556
+ return None
2557
+
2558
+ def get_traces(
2559
+ self,
2560
+ run_id: Optional[str] = None,
2561
+ session_id: Optional[str] = None,
2562
+ user_id: Optional[str] = None,
2563
+ agent_id: Optional[str] = None,
2564
+ team_id: Optional[str] = None,
2565
+ workflow_id: Optional[str] = None,
2566
+ status: Optional[str] = None,
2567
+ start_time: Optional[datetime] = None,
2568
+ end_time: Optional[datetime] = None,
2569
+ limit: Optional[int] = 20,
2570
+ page: Optional[int] = 1,
2571
+ ) -> tuple[List, int]:
2572
+ """Get traces matching the provided filters.
2573
+
2574
+ Args:
2575
+ run_id: Filter by run ID.
2576
+ session_id: Filter by session ID.
2577
+ user_id: Filter by user ID.
2578
+ agent_id: Filter by agent ID.
2579
+ team_id: Filter by team ID.
2580
+ workflow_id: Filter by workflow ID.
2581
+ status: Filter by status (OK, ERROR, UNSET).
2582
+ start_time: Filter traces starting after this datetime.
2583
+ end_time: Filter traces ending before this datetime.
2584
+ limit: Maximum number of traces to return per page.
2585
+ page: Page number (1-indexed).
2586
+
2587
+ Returns:
2588
+ tuple[List[Trace], int]: Tuple of (list of matching traces, total count).
2589
+ """
2590
+ try:
2591
+ from agno.tracing.schemas import Trace
2592
+
2593
+ log_debug(
2594
+ f"get_traces called with filters: run_id={run_id}, session_id={session_id}, user_id={user_id}, agent_id={agent_id}, page={page}, limit={limit}"
2595
+ )
2596
+
2597
+ table = self._get_table(table_type="traces")
2598
+ if table is None:
2599
+ log_debug("Traces table not found")
2600
+ return [], 0
2601
+
2602
+ # Get spans table for JOIN
2603
+ spans_table = self._get_table(table_type="spans")
2604
+
2605
+ with self.Session() as sess:
2606
+ # Build base query with aggregated span counts
2607
+ base_stmt = self._get_traces_base_query(table, spans_table)
2608
+
2609
+ # Apply filters
2610
+ if run_id:
2611
+ base_stmt = base_stmt.where(table.c.run_id == run_id)
2612
+ if session_id:
2613
+ base_stmt = base_stmt.where(table.c.session_id == session_id)
2614
+ if user_id:
2615
+ base_stmt = base_stmt.where(table.c.user_id == user_id)
2616
+ if agent_id:
2617
+ base_stmt = base_stmt.where(table.c.agent_id == agent_id)
2618
+ if team_id:
2619
+ base_stmt = base_stmt.where(table.c.team_id == team_id)
2620
+ if workflow_id:
2621
+ base_stmt = base_stmt.where(table.c.workflow_id == workflow_id)
2622
+ if status:
2623
+ base_stmt = base_stmt.where(table.c.status == status)
2624
+ if start_time:
2625
+ # Convert datetime to ISO string for comparison
2626
+ base_stmt = base_stmt.where(table.c.start_time >= start_time.isoformat())
2627
+ if end_time:
2628
+ # Convert datetime to ISO string for comparison
2629
+ base_stmt = base_stmt.where(table.c.end_time <= end_time.isoformat())
2630
+
2631
+ # Get total count
2632
+ count_stmt = select(func.count()).select_from(base_stmt.alias())
2633
+ total_count = sess.execute(count_stmt).scalar() or 0
2634
+
2635
+ # Apply pagination
2636
+ offset = (page - 1) * limit if page and limit else 0
2637
+ paginated_stmt = base_stmt.order_by(table.c.start_time.desc()).limit(limit).offset(offset)
2638
+
2639
+ results = sess.execute(paginated_stmt).fetchall()
2640
+
2641
+ traces = [Trace.from_dict(dict(row._mapping)) for row in results]
2642
+ return traces, total_count
2643
+
2644
+ except Exception as e:
2645
+ log_error(f"Error getting traces: {e}")
2646
+ return [], 0
2647
+
2648
+ def get_trace_stats(
2649
+ self,
2650
+ user_id: Optional[str] = None,
2651
+ agent_id: Optional[str] = None,
2652
+ team_id: Optional[str] = None,
2653
+ workflow_id: Optional[str] = None,
2654
+ start_time: Optional[datetime] = None,
2655
+ end_time: Optional[datetime] = None,
2656
+ limit: Optional[int] = 20,
2657
+ page: Optional[int] = 1,
2658
+ ) -> tuple[List[Dict[str, Any]], int]:
2659
+ """Get trace statistics grouped by session.
2660
+
2661
+ Args:
2662
+ user_id: Filter by user ID.
2663
+ agent_id: Filter by agent ID.
2664
+ team_id: Filter by team ID.
2665
+ workflow_id: Filter by workflow ID.
2666
+ start_time: Filter sessions with traces created after this datetime.
2667
+ end_time: Filter sessions with traces created before this datetime.
2668
+ limit: Maximum number of sessions to return per page.
2669
+ page: Page number (1-indexed).
2670
+
2671
+ Returns:
2672
+ tuple[List[Dict], int]: Tuple of (list of session stats dicts, total count).
2673
+ Each dict contains: session_id, user_id, agent_id, team_id, total_traces,
2674
+ first_trace_at, last_trace_at.
2675
+ """
2676
+ try:
2677
+ log_debug(
2678
+ f"get_trace_stats called with filters: user_id={user_id}, agent_id={agent_id}, "
2679
+ f"workflow_id={workflow_id}, team_id={team_id}, "
2680
+ f"start_time={start_time}, end_time={end_time}, page={page}, limit={limit}"
2681
+ )
2682
+
2683
+ table = self._get_table(table_type="traces")
2684
+ if table is None:
2685
+ log_debug("Traces table not found")
2686
+ return [], 0
2687
+
2688
+ with self.Session() as sess:
2689
+ # Build base query grouped by session_id
2690
+ base_stmt = (
2691
+ select(
2692
+ table.c.session_id,
2693
+ table.c.user_id,
2694
+ table.c.agent_id,
2695
+ table.c.team_id,
2696
+ table.c.workflow_id,
2697
+ func.count(table.c.trace_id).label("total_traces"),
2698
+ func.min(table.c.created_at).label("first_trace_at"),
2699
+ func.max(table.c.created_at).label("last_trace_at"),
2700
+ )
2701
+ .where(table.c.session_id.isnot(None)) # Only sessions with session_id
2702
+ .group_by(
2703
+ table.c.session_id, table.c.user_id, table.c.agent_id, table.c.team_id, table.c.workflow_id
2704
+ )
2705
+ )
2706
+
2707
+ # Apply filters
2708
+ if user_id:
2709
+ base_stmt = base_stmt.where(table.c.user_id == user_id)
2710
+ if workflow_id:
2711
+ base_stmt = base_stmt.where(table.c.workflow_id == workflow_id)
2712
+ if team_id:
2713
+ base_stmt = base_stmt.where(table.c.team_id == team_id)
2714
+ if agent_id:
2715
+ base_stmt = base_stmt.where(table.c.agent_id == agent_id)
2716
+ if start_time:
2717
+ # Convert datetime to ISO string for comparison
2718
+ base_stmt = base_stmt.where(table.c.created_at >= start_time.isoformat())
2719
+ if end_time:
2720
+ # Convert datetime to ISO string for comparison
2721
+ base_stmt = base_stmt.where(table.c.created_at <= end_time.isoformat())
2722
+
2723
+ # Get total count of sessions
2724
+ count_stmt = select(func.count()).select_from(base_stmt.alias())
2725
+ total_count = sess.execute(count_stmt).scalar() or 0
2726
+ log_debug(f"Total matching sessions: {total_count}")
2727
+
2728
+ # Apply pagination and ordering
2729
+ offset = (page - 1) * limit if page and limit else 0
2730
+ paginated_stmt = base_stmt.order_by(func.max(table.c.created_at).desc()).limit(limit).offset(offset)
2731
+
2732
+ results = sess.execute(paginated_stmt).fetchall()
2733
+ log_debug(f"Returning page {page} with {len(results)} session stats")
2734
+
2735
+ # Convert to list of dicts with datetime objects
2736
+ stats_list = []
2737
+ for row in results:
2738
+ # Convert ISO strings to datetime objects
2739
+ first_trace_at_str = row.first_trace_at
2740
+ last_trace_at_str = row.last_trace_at
2741
+
2742
+ # Parse ISO format strings to datetime objects (handle None values)
2743
+ first_trace_at = None
2744
+ last_trace_at = None
2745
+ if first_trace_at_str is not None:
2746
+ first_trace_at = datetime.fromisoformat(first_trace_at_str.replace("Z", "+00:00"))
2747
+ if last_trace_at_str is not None:
2748
+ last_trace_at = datetime.fromisoformat(last_trace_at_str.replace("Z", "+00:00"))
2749
+
2750
+ stats_list.append(
2751
+ {
2752
+ "session_id": row.session_id,
2753
+ "user_id": row.user_id,
2754
+ "agent_id": row.agent_id,
2755
+ "team_id": row.team_id,
2756
+ "workflow_id": row.workflow_id,
2757
+ "total_traces": row.total_traces,
2758
+ "first_trace_at": first_trace_at,
2759
+ "last_trace_at": last_trace_at,
2760
+ }
2761
+ )
2762
+
2763
+ return stats_list, total_count
2764
+
2765
+ except Exception as e:
2766
+ log_error(f"Error getting trace stats: {e}")
2767
+ return [], 0
2768
+
2769
+ # --- Spans ---
2770
+ def create_span(self, span: "Span") -> None:
2771
+ """Create a single span in the database.
2772
+
2773
+ Args:
2774
+ span: The Span object to store.
2775
+ """
2776
+ try:
2777
+ table = self._get_table(table_type="spans", create_table_if_not_found=True)
2778
+ if table is None:
2779
+ return
2780
+
2781
+ with self.Session() as sess, sess.begin():
2782
+ stmt = mysql.insert(table).values(span.to_dict())
2783
+ sess.execute(stmt)
2784
+
2785
+ except Exception as e:
2786
+ log_error(f"Error creating span: {e}")
2787
+
2788
+ def create_spans(self, spans: List) -> None:
2789
+ """Create multiple spans in the database as a batch.
2790
+
2791
+ Args:
2792
+ spans: List of Span objects to store.
2793
+ """
2794
+ if not spans:
2795
+ return
2796
+
2797
+ try:
2798
+ table = self._get_table(table_type="spans", create_table_if_not_found=True)
2799
+ if table is None:
2800
+ return
2801
+
2802
+ with self.Session() as sess, sess.begin():
2803
+ for span in spans:
2804
+ stmt = mysql.insert(table).values(span.to_dict())
2805
+ sess.execute(stmt)
2806
+
2807
+ except Exception as e:
2808
+ log_error(f"Error creating spans batch: {e}")
2809
+
2810
+ def get_span(self, span_id: str):
2811
+ """Get a single span by its span_id.
2812
+
2813
+ Args:
2814
+ span_id: The unique span identifier.
2815
+
2816
+ Returns:
2817
+ Optional[Span]: The span if found, None otherwise.
2818
+ """
2819
+ try:
2820
+ from agno.tracing.schemas import Span
2821
+
2822
+ table = self._get_table(table_type="spans")
2823
+ if table is None:
2824
+ return None
2825
+
2826
+ with self.Session() as sess:
2827
+ stmt = select(table).where(table.c.span_id == span_id)
2828
+ result = sess.execute(stmt).fetchone()
2829
+ if result:
2830
+ return Span.from_dict(dict(result._mapping))
2831
+ return None
2832
+
2833
+ except Exception as e:
2834
+ log_error(f"Error getting span: {e}")
2835
+ return None
2836
+
2837
+ def get_spans(
2838
+ self,
2839
+ trace_id: Optional[str] = None,
2840
+ parent_span_id: Optional[str] = None,
2841
+ limit: Optional[int] = 1000,
2842
+ ) -> List:
2843
+ """Get spans matching the provided filters.
2844
+
2845
+ Args:
2846
+ trace_id: Filter by trace ID.
2847
+ parent_span_id: Filter by parent span ID.
2848
+ limit: Maximum number of spans to return.
2849
+
2850
+ Returns:
2851
+ List[Span]: List of matching spans.
2852
+ """
2853
+ try:
2854
+ from agno.tracing.schemas import Span
2855
+
2856
+ table = self._get_table(table_type="spans")
2857
+ if table is None:
2858
+ return []
2859
+
2860
+ with self.Session() as sess:
2861
+ stmt = select(table)
2862
+
2863
+ # Apply filters
2864
+ if trace_id:
2865
+ stmt = stmt.where(table.c.trace_id == trace_id)
2866
+ if parent_span_id:
2867
+ stmt = stmt.where(table.c.parent_span_id == parent_span_id)
2868
+
2869
+ if limit:
2870
+ stmt = stmt.limit(limit)
2871
+
2872
+ results = sess.execute(stmt).fetchall()
2873
+ return [Span.from_dict(dict(row._mapping)) for row in results]
2874
+
2875
+ except Exception as e:
2876
+ log_error(f"Error getting spans: {e}")
2877
+ return []