agno 0.1.2__py3-none-any.whl → 2.3.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (723) hide show
  1. agno/__init__.py +8 -0
  2. agno/agent/__init__.py +44 -5
  3. agno/agent/agent.py +10531 -2975
  4. agno/api/agent.py +14 -53
  5. agno/api/api.py +7 -46
  6. agno/api/evals.py +22 -0
  7. agno/api/os.py +17 -0
  8. agno/api/routes.py +6 -25
  9. agno/api/schemas/__init__.py +9 -0
  10. agno/api/schemas/agent.py +6 -9
  11. agno/api/schemas/evals.py +16 -0
  12. agno/api/schemas/os.py +14 -0
  13. agno/api/schemas/team.py +10 -10
  14. agno/api/schemas/utils.py +21 -0
  15. agno/api/schemas/workflows.py +16 -0
  16. agno/api/settings.py +53 -0
  17. agno/api/team.py +22 -26
  18. agno/api/workflow.py +28 -0
  19. agno/cloud/aws/base.py +214 -0
  20. agno/cloud/aws/s3/__init__.py +2 -0
  21. agno/cloud/aws/s3/api_client.py +43 -0
  22. agno/cloud/aws/s3/bucket.py +195 -0
  23. agno/cloud/aws/s3/object.py +57 -0
  24. agno/compression/__init__.py +3 -0
  25. agno/compression/manager.py +247 -0
  26. agno/culture/__init__.py +3 -0
  27. agno/culture/manager.py +956 -0
  28. agno/db/__init__.py +24 -0
  29. agno/db/async_postgres/__init__.py +3 -0
  30. agno/db/base.py +946 -0
  31. agno/db/dynamo/__init__.py +3 -0
  32. agno/db/dynamo/dynamo.py +2781 -0
  33. agno/db/dynamo/schemas.py +442 -0
  34. agno/db/dynamo/utils.py +743 -0
  35. agno/db/firestore/__init__.py +3 -0
  36. agno/db/firestore/firestore.py +2379 -0
  37. agno/db/firestore/schemas.py +181 -0
  38. agno/db/firestore/utils.py +376 -0
  39. agno/db/gcs_json/__init__.py +3 -0
  40. agno/db/gcs_json/gcs_json_db.py +1791 -0
  41. agno/db/gcs_json/utils.py +228 -0
  42. agno/db/in_memory/__init__.py +3 -0
  43. agno/db/in_memory/in_memory_db.py +1312 -0
  44. agno/db/in_memory/utils.py +230 -0
  45. agno/db/json/__init__.py +3 -0
  46. agno/db/json/json_db.py +1777 -0
  47. agno/db/json/utils.py +230 -0
  48. agno/db/migrations/manager.py +199 -0
  49. agno/db/migrations/v1_to_v2.py +635 -0
  50. agno/db/migrations/versions/v2_3_0.py +938 -0
  51. agno/db/mongo/__init__.py +17 -0
  52. agno/db/mongo/async_mongo.py +2760 -0
  53. agno/db/mongo/mongo.py +2597 -0
  54. agno/db/mongo/schemas.py +119 -0
  55. agno/db/mongo/utils.py +276 -0
  56. agno/db/mysql/__init__.py +4 -0
  57. agno/db/mysql/async_mysql.py +2912 -0
  58. agno/db/mysql/mysql.py +2923 -0
  59. agno/db/mysql/schemas.py +186 -0
  60. agno/db/mysql/utils.py +488 -0
  61. agno/db/postgres/__init__.py +4 -0
  62. agno/db/postgres/async_postgres.py +2579 -0
  63. agno/db/postgres/postgres.py +2870 -0
  64. agno/db/postgres/schemas.py +187 -0
  65. agno/db/postgres/utils.py +442 -0
  66. agno/db/redis/__init__.py +3 -0
  67. agno/db/redis/redis.py +2141 -0
  68. agno/db/redis/schemas.py +159 -0
  69. agno/db/redis/utils.py +346 -0
  70. agno/db/schemas/__init__.py +4 -0
  71. agno/db/schemas/culture.py +120 -0
  72. agno/db/schemas/evals.py +34 -0
  73. agno/db/schemas/knowledge.py +40 -0
  74. agno/db/schemas/memory.py +61 -0
  75. agno/db/singlestore/__init__.py +3 -0
  76. agno/db/singlestore/schemas.py +179 -0
  77. agno/db/singlestore/singlestore.py +2877 -0
  78. agno/db/singlestore/utils.py +384 -0
  79. agno/db/sqlite/__init__.py +4 -0
  80. agno/db/sqlite/async_sqlite.py +2911 -0
  81. agno/db/sqlite/schemas.py +181 -0
  82. agno/db/sqlite/sqlite.py +2908 -0
  83. agno/db/sqlite/utils.py +429 -0
  84. agno/db/surrealdb/__init__.py +3 -0
  85. agno/db/surrealdb/metrics.py +292 -0
  86. agno/db/surrealdb/models.py +334 -0
  87. agno/db/surrealdb/queries.py +71 -0
  88. agno/db/surrealdb/surrealdb.py +1908 -0
  89. agno/db/surrealdb/utils.py +147 -0
  90. agno/db/utils.py +118 -0
  91. agno/eval/__init__.py +24 -0
  92. agno/eval/accuracy.py +666 -276
  93. agno/eval/agent_as_judge.py +861 -0
  94. agno/eval/base.py +29 -0
  95. agno/eval/performance.py +779 -0
  96. agno/eval/reliability.py +241 -62
  97. agno/eval/utils.py +120 -0
  98. agno/exceptions.py +143 -1
  99. agno/filters.py +354 -0
  100. agno/guardrails/__init__.py +6 -0
  101. agno/guardrails/base.py +19 -0
  102. agno/guardrails/openai.py +144 -0
  103. agno/guardrails/pii.py +94 -0
  104. agno/guardrails/prompt_injection.py +52 -0
  105. agno/hooks/__init__.py +3 -0
  106. agno/hooks/decorator.py +164 -0
  107. agno/integrations/discord/__init__.py +3 -0
  108. agno/integrations/discord/client.py +203 -0
  109. agno/knowledge/__init__.py +5 -1
  110. agno/{document → knowledge}/chunking/agentic.py +22 -14
  111. agno/{document → knowledge}/chunking/document.py +2 -2
  112. agno/{document → knowledge}/chunking/fixed.py +7 -6
  113. agno/knowledge/chunking/markdown.py +151 -0
  114. agno/{document → knowledge}/chunking/recursive.py +15 -3
  115. agno/knowledge/chunking/row.py +39 -0
  116. agno/knowledge/chunking/semantic.py +91 -0
  117. agno/knowledge/chunking/strategy.py +165 -0
  118. agno/knowledge/content.py +74 -0
  119. agno/knowledge/document/__init__.py +5 -0
  120. agno/{document → knowledge/document}/base.py +12 -2
  121. agno/knowledge/embedder/__init__.py +5 -0
  122. agno/knowledge/embedder/aws_bedrock.py +343 -0
  123. agno/knowledge/embedder/azure_openai.py +210 -0
  124. agno/{embedder → knowledge/embedder}/base.py +8 -0
  125. agno/knowledge/embedder/cohere.py +323 -0
  126. agno/knowledge/embedder/fastembed.py +62 -0
  127. agno/{embedder → knowledge/embedder}/fireworks.py +1 -1
  128. agno/knowledge/embedder/google.py +258 -0
  129. agno/knowledge/embedder/huggingface.py +94 -0
  130. agno/knowledge/embedder/jina.py +182 -0
  131. agno/knowledge/embedder/langdb.py +22 -0
  132. agno/knowledge/embedder/mistral.py +206 -0
  133. agno/knowledge/embedder/nebius.py +13 -0
  134. agno/knowledge/embedder/ollama.py +154 -0
  135. agno/knowledge/embedder/openai.py +195 -0
  136. agno/knowledge/embedder/sentence_transformer.py +63 -0
  137. agno/{embedder → knowledge/embedder}/together.py +1 -1
  138. agno/knowledge/embedder/vllm.py +262 -0
  139. agno/knowledge/embedder/voyageai.py +165 -0
  140. agno/knowledge/knowledge.py +3006 -0
  141. agno/knowledge/reader/__init__.py +7 -0
  142. agno/knowledge/reader/arxiv_reader.py +81 -0
  143. agno/knowledge/reader/base.py +95 -0
  144. agno/knowledge/reader/csv_reader.py +164 -0
  145. agno/knowledge/reader/docx_reader.py +82 -0
  146. agno/knowledge/reader/field_labeled_csv_reader.py +290 -0
  147. agno/knowledge/reader/firecrawl_reader.py +201 -0
  148. agno/knowledge/reader/json_reader.py +88 -0
  149. agno/knowledge/reader/markdown_reader.py +137 -0
  150. agno/knowledge/reader/pdf_reader.py +431 -0
  151. agno/knowledge/reader/pptx_reader.py +101 -0
  152. agno/knowledge/reader/reader_factory.py +313 -0
  153. agno/knowledge/reader/s3_reader.py +89 -0
  154. agno/knowledge/reader/tavily_reader.py +193 -0
  155. agno/knowledge/reader/text_reader.py +127 -0
  156. agno/knowledge/reader/web_search_reader.py +325 -0
  157. agno/knowledge/reader/website_reader.py +455 -0
  158. agno/knowledge/reader/wikipedia_reader.py +91 -0
  159. agno/knowledge/reader/youtube_reader.py +78 -0
  160. agno/knowledge/remote_content/remote_content.py +88 -0
  161. agno/knowledge/reranker/__init__.py +3 -0
  162. agno/{reranker → knowledge/reranker}/base.py +1 -1
  163. agno/{reranker → knowledge/reranker}/cohere.py +2 -2
  164. agno/knowledge/reranker/infinity.py +195 -0
  165. agno/knowledge/reranker/sentence_transformer.py +54 -0
  166. agno/knowledge/types.py +39 -0
  167. agno/knowledge/utils.py +234 -0
  168. agno/media.py +439 -95
  169. agno/memory/__init__.py +16 -3
  170. agno/memory/manager.py +1474 -123
  171. agno/memory/strategies/__init__.py +15 -0
  172. agno/memory/strategies/base.py +66 -0
  173. agno/memory/strategies/summarize.py +196 -0
  174. agno/memory/strategies/types.py +37 -0
  175. agno/models/aimlapi/__init__.py +5 -0
  176. agno/models/aimlapi/aimlapi.py +62 -0
  177. agno/models/anthropic/__init__.py +4 -0
  178. agno/models/anthropic/claude.py +960 -496
  179. agno/models/aws/__init__.py +15 -0
  180. agno/models/aws/bedrock.py +686 -451
  181. agno/models/aws/claude.py +190 -183
  182. agno/models/azure/__init__.py +18 -1
  183. agno/models/azure/ai_foundry.py +489 -0
  184. agno/models/azure/openai_chat.py +89 -40
  185. agno/models/base.py +2477 -550
  186. agno/models/cerebras/__init__.py +12 -0
  187. agno/models/cerebras/cerebras.py +565 -0
  188. agno/models/cerebras/cerebras_openai.py +131 -0
  189. agno/models/cohere/__init__.py +4 -0
  190. agno/models/cohere/chat.py +306 -492
  191. agno/models/cometapi/__init__.py +5 -0
  192. agno/models/cometapi/cometapi.py +74 -0
  193. agno/models/dashscope/__init__.py +5 -0
  194. agno/models/dashscope/dashscope.py +90 -0
  195. agno/models/deepinfra/__init__.py +5 -0
  196. agno/models/deepinfra/deepinfra.py +45 -0
  197. agno/models/deepseek/__init__.py +4 -0
  198. agno/models/deepseek/deepseek.py +110 -9
  199. agno/models/fireworks/__init__.py +4 -0
  200. agno/models/fireworks/fireworks.py +19 -22
  201. agno/models/google/__init__.py +3 -7
  202. agno/models/google/gemini.py +1717 -662
  203. agno/models/google/utils.py +22 -0
  204. agno/models/groq/__init__.py +4 -0
  205. agno/models/groq/groq.py +391 -666
  206. agno/models/huggingface/__init__.py +4 -0
  207. agno/models/huggingface/huggingface.py +266 -538
  208. agno/models/ibm/__init__.py +5 -0
  209. agno/models/ibm/watsonx.py +432 -0
  210. agno/models/internlm/__init__.py +3 -0
  211. agno/models/internlm/internlm.py +20 -3
  212. agno/models/langdb/__init__.py +1 -0
  213. agno/models/langdb/langdb.py +60 -0
  214. agno/models/litellm/__init__.py +14 -0
  215. agno/models/litellm/chat.py +503 -0
  216. agno/models/litellm/litellm_openai.py +42 -0
  217. agno/models/llama_cpp/__init__.py +5 -0
  218. agno/models/llama_cpp/llama_cpp.py +22 -0
  219. agno/models/lmstudio/__init__.py +5 -0
  220. agno/models/lmstudio/lmstudio.py +25 -0
  221. agno/models/message.py +361 -39
  222. agno/models/meta/__init__.py +12 -0
  223. agno/models/meta/llama.py +502 -0
  224. agno/models/meta/llama_openai.py +79 -0
  225. agno/models/metrics.py +120 -0
  226. agno/models/mistral/__init__.py +4 -0
  227. agno/models/mistral/mistral.py +293 -393
  228. agno/models/nebius/__init__.py +3 -0
  229. agno/models/nebius/nebius.py +53 -0
  230. agno/models/nexus/__init__.py +3 -0
  231. agno/models/nexus/nexus.py +22 -0
  232. agno/models/nvidia/__init__.py +4 -0
  233. agno/models/nvidia/nvidia.py +22 -3
  234. agno/models/ollama/__init__.py +4 -2
  235. agno/models/ollama/chat.py +257 -492
  236. agno/models/openai/__init__.py +7 -0
  237. agno/models/openai/chat.py +725 -770
  238. agno/models/openai/like.py +16 -2
  239. agno/models/openai/responses.py +1121 -0
  240. agno/models/openrouter/__init__.py +4 -0
  241. agno/models/openrouter/openrouter.py +62 -5
  242. agno/models/perplexity/__init__.py +5 -0
  243. agno/models/perplexity/perplexity.py +203 -0
  244. agno/models/portkey/__init__.py +3 -0
  245. agno/models/portkey/portkey.py +82 -0
  246. agno/models/requesty/__init__.py +5 -0
  247. agno/models/requesty/requesty.py +69 -0
  248. agno/models/response.py +177 -7
  249. agno/models/sambanova/__init__.py +4 -0
  250. agno/models/sambanova/sambanova.py +23 -4
  251. agno/models/siliconflow/__init__.py +5 -0
  252. agno/models/siliconflow/siliconflow.py +42 -0
  253. agno/models/together/__init__.py +4 -0
  254. agno/models/together/together.py +21 -164
  255. agno/models/utils.py +266 -0
  256. agno/models/vercel/__init__.py +3 -0
  257. agno/models/vercel/v0.py +43 -0
  258. agno/models/vertexai/__init__.py +0 -1
  259. agno/models/vertexai/claude.py +190 -0
  260. agno/models/vllm/__init__.py +3 -0
  261. agno/models/vllm/vllm.py +83 -0
  262. agno/models/xai/__init__.py +2 -0
  263. agno/models/xai/xai.py +111 -7
  264. agno/os/__init__.py +3 -0
  265. agno/os/app.py +1027 -0
  266. agno/os/auth.py +244 -0
  267. agno/os/config.py +126 -0
  268. agno/os/interfaces/__init__.py +1 -0
  269. agno/os/interfaces/a2a/__init__.py +3 -0
  270. agno/os/interfaces/a2a/a2a.py +42 -0
  271. agno/os/interfaces/a2a/router.py +249 -0
  272. agno/os/interfaces/a2a/utils.py +924 -0
  273. agno/os/interfaces/agui/__init__.py +3 -0
  274. agno/os/interfaces/agui/agui.py +47 -0
  275. agno/os/interfaces/agui/router.py +147 -0
  276. agno/os/interfaces/agui/utils.py +574 -0
  277. agno/os/interfaces/base.py +25 -0
  278. agno/os/interfaces/slack/__init__.py +3 -0
  279. agno/os/interfaces/slack/router.py +148 -0
  280. agno/os/interfaces/slack/security.py +30 -0
  281. agno/os/interfaces/slack/slack.py +47 -0
  282. agno/os/interfaces/whatsapp/__init__.py +3 -0
  283. agno/os/interfaces/whatsapp/router.py +210 -0
  284. agno/os/interfaces/whatsapp/security.py +55 -0
  285. agno/os/interfaces/whatsapp/whatsapp.py +36 -0
  286. agno/os/mcp.py +293 -0
  287. agno/os/middleware/__init__.py +9 -0
  288. agno/os/middleware/jwt.py +797 -0
  289. agno/os/router.py +258 -0
  290. agno/os/routers/__init__.py +3 -0
  291. agno/os/routers/agents/__init__.py +3 -0
  292. agno/os/routers/agents/router.py +599 -0
  293. agno/os/routers/agents/schema.py +261 -0
  294. agno/os/routers/evals/__init__.py +3 -0
  295. agno/os/routers/evals/evals.py +450 -0
  296. agno/os/routers/evals/schemas.py +174 -0
  297. agno/os/routers/evals/utils.py +231 -0
  298. agno/os/routers/health.py +31 -0
  299. agno/os/routers/home.py +52 -0
  300. agno/os/routers/knowledge/__init__.py +3 -0
  301. agno/os/routers/knowledge/knowledge.py +1008 -0
  302. agno/os/routers/knowledge/schemas.py +178 -0
  303. agno/os/routers/memory/__init__.py +3 -0
  304. agno/os/routers/memory/memory.py +661 -0
  305. agno/os/routers/memory/schemas.py +88 -0
  306. agno/os/routers/metrics/__init__.py +3 -0
  307. agno/os/routers/metrics/metrics.py +190 -0
  308. agno/os/routers/metrics/schemas.py +47 -0
  309. agno/os/routers/session/__init__.py +3 -0
  310. agno/os/routers/session/session.py +997 -0
  311. agno/os/routers/teams/__init__.py +3 -0
  312. agno/os/routers/teams/router.py +512 -0
  313. agno/os/routers/teams/schema.py +257 -0
  314. agno/os/routers/traces/__init__.py +3 -0
  315. agno/os/routers/traces/schemas.py +414 -0
  316. agno/os/routers/traces/traces.py +499 -0
  317. agno/os/routers/workflows/__init__.py +3 -0
  318. agno/os/routers/workflows/router.py +624 -0
  319. agno/os/routers/workflows/schema.py +75 -0
  320. agno/os/schema.py +534 -0
  321. agno/os/scopes.py +469 -0
  322. agno/{playground → os}/settings.py +7 -15
  323. agno/os/utils.py +973 -0
  324. agno/reasoning/anthropic.py +80 -0
  325. agno/reasoning/azure_ai_foundry.py +67 -0
  326. agno/reasoning/deepseek.py +63 -0
  327. agno/reasoning/default.py +97 -0
  328. agno/reasoning/gemini.py +73 -0
  329. agno/reasoning/groq.py +71 -0
  330. agno/reasoning/helpers.py +24 -1
  331. agno/reasoning/ollama.py +67 -0
  332. agno/reasoning/openai.py +86 -0
  333. agno/reasoning/step.py +2 -1
  334. agno/reasoning/vertexai.py +76 -0
  335. agno/run/__init__.py +6 -0
  336. agno/run/agent.py +822 -0
  337. agno/run/base.py +247 -0
  338. agno/run/cancel.py +81 -0
  339. agno/run/requirement.py +181 -0
  340. agno/run/team.py +767 -0
  341. agno/run/workflow.py +708 -0
  342. agno/session/__init__.py +10 -0
  343. agno/session/agent.py +260 -0
  344. agno/session/summary.py +265 -0
  345. agno/session/team.py +342 -0
  346. agno/session/workflow.py +501 -0
  347. agno/table.py +10 -0
  348. agno/team/__init__.py +37 -0
  349. agno/team/team.py +9536 -0
  350. agno/tools/__init__.py +7 -0
  351. agno/tools/agentql.py +120 -0
  352. agno/tools/airflow.py +22 -12
  353. agno/tools/api.py +122 -0
  354. agno/tools/apify.py +276 -83
  355. agno/tools/{arxiv_toolkit.py → arxiv.py} +20 -12
  356. agno/tools/aws_lambda.py +28 -7
  357. agno/tools/aws_ses.py +66 -0
  358. agno/tools/baidusearch.py +11 -4
  359. agno/tools/bitbucket.py +292 -0
  360. agno/tools/brandfetch.py +213 -0
  361. agno/tools/bravesearch.py +106 -0
  362. agno/tools/brightdata.py +367 -0
  363. agno/tools/browserbase.py +209 -0
  364. agno/tools/calcom.py +32 -23
  365. agno/tools/calculator.py +24 -37
  366. agno/tools/cartesia.py +187 -0
  367. agno/tools/{clickup_tool.py → clickup.py} +17 -28
  368. agno/tools/confluence.py +91 -26
  369. agno/tools/crawl4ai.py +139 -43
  370. agno/tools/csv_toolkit.py +28 -22
  371. agno/tools/dalle.py +36 -22
  372. agno/tools/daytona.py +475 -0
  373. agno/tools/decorator.py +169 -14
  374. agno/tools/desi_vocal.py +23 -11
  375. agno/tools/discord.py +32 -29
  376. agno/tools/docker.py +716 -0
  377. agno/tools/duckdb.py +76 -81
  378. agno/tools/duckduckgo.py +43 -40
  379. agno/tools/e2b.py +703 -0
  380. agno/tools/eleven_labs.py +65 -54
  381. agno/tools/email.py +13 -5
  382. agno/tools/evm.py +129 -0
  383. agno/tools/exa.py +324 -42
  384. agno/tools/fal.py +39 -35
  385. agno/tools/file.py +196 -30
  386. agno/tools/file_generation.py +356 -0
  387. agno/tools/financial_datasets.py +288 -0
  388. agno/tools/firecrawl.py +108 -33
  389. agno/tools/function.py +960 -122
  390. agno/tools/giphy.py +34 -12
  391. agno/tools/github.py +1294 -97
  392. agno/tools/gmail.py +922 -0
  393. agno/tools/google_bigquery.py +117 -0
  394. agno/tools/google_drive.py +271 -0
  395. agno/tools/google_maps.py +253 -0
  396. agno/tools/googlecalendar.py +607 -107
  397. agno/tools/googlesheets.py +377 -0
  398. agno/tools/hackernews.py +20 -12
  399. agno/tools/jina.py +24 -14
  400. agno/tools/jira.py +48 -19
  401. agno/tools/knowledge.py +218 -0
  402. agno/tools/linear.py +82 -43
  403. agno/tools/linkup.py +58 -0
  404. agno/tools/local_file_system.py +15 -7
  405. agno/tools/lumalab.py +41 -26
  406. agno/tools/mcp/__init__.py +10 -0
  407. agno/tools/mcp/mcp.py +331 -0
  408. agno/tools/mcp/multi_mcp.py +347 -0
  409. agno/tools/mcp/params.py +24 -0
  410. agno/tools/mcp_toolbox.py +284 -0
  411. agno/tools/mem0.py +193 -0
  412. agno/tools/memory.py +419 -0
  413. agno/tools/mlx_transcribe.py +11 -9
  414. agno/tools/models/azure_openai.py +190 -0
  415. agno/tools/models/gemini.py +203 -0
  416. agno/tools/models/groq.py +158 -0
  417. agno/tools/models/morph.py +186 -0
  418. agno/tools/models/nebius.py +124 -0
  419. agno/tools/models_labs.py +163 -82
  420. agno/tools/moviepy_video.py +18 -13
  421. agno/tools/nano_banana.py +151 -0
  422. agno/tools/neo4j.py +134 -0
  423. agno/tools/newspaper.py +15 -4
  424. agno/tools/newspaper4k.py +19 -6
  425. agno/tools/notion.py +204 -0
  426. agno/tools/openai.py +181 -17
  427. agno/tools/openbb.py +27 -20
  428. agno/tools/opencv.py +321 -0
  429. agno/tools/openweather.py +233 -0
  430. agno/tools/oxylabs.py +385 -0
  431. agno/tools/pandas.py +25 -15
  432. agno/tools/parallel.py +314 -0
  433. agno/tools/postgres.py +238 -185
  434. agno/tools/pubmed.py +125 -13
  435. agno/tools/python.py +48 -35
  436. agno/tools/reasoning.py +283 -0
  437. agno/tools/reddit.py +207 -29
  438. agno/tools/redshift.py +406 -0
  439. agno/tools/replicate.py +69 -26
  440. agno/tools/resend.py +11 -6
  441. agno/tools/scrapegraph.py +179 -19
  442. agno/tools/searxng.py +23 -31
  443. agno/tools/serpapi.py +15 -10
  444. agno/tools/serper.py +255 -0
  445. agno/tools/shell.py +23 -12
  446. agno/tools/shopify.py +1519 -0
  447. agno/tools/slack.py +56 -14
  448. agno/tools/sleep.py +8 -6
  449. agno/tools/spider.py +35 -11
  450. agno/tools/spotify.py +919 -0
  451. agno/tools/sql.py +34 -19
  452. agno/tools/tavily.py +158 -8
  453. agno/tools/telegram.py +18 -8
  454. agno/tools/todoist.py +218 -0
  455. agno/tools/toolkit.py +134 -9
  456. agno/tools/trafilatura.py +388 -0
  457. agno/tools/trello.py +25 -28
  458. agno/tools/twilio.py +18 -9
  459. agno/tools/user_control_flow.py +78 -0
  460. agno/tools/valyu.py +228 -0
  461. agno/tools/visualization.py +467 -0
  462. agno/tools/webbrowser.py +28 -0
  463. agno/tools/webex.py +76 -0
  464. agno/tools/website.py +23 -19
  465. agno/tools/webtools.py +45 -0
  466. agno/tools/whatsapp.py +286 -0
  467. agno/tools/wikipedia.py +28 -19
  468. agno/tools/workflow.py +285 -0
  469. agno/tools/{twitter.py → x.py} +142 -46
  470. agno/tools/yfinance.py +41 -39
  471. agno/tools/youtube.py +34 -17
  472. agno/tools/zendesk.py +15 -5
  473. agno/tools/zep.py +454 -0
  474. agno/tools/zoom.py +86 -37
  475. agno/tracing/__init__.py +12 -0
  476. agno/tracing/exporter.py +157 -0
  477. agno/tracing/schemas.py +276 -0
  478. agno/tracing/setup.py +111 -0
  479. agno/utils/agent.py +938 -0
  480. agno/utils/audio.py +37 -1
  481. agno/utils/certs.py +27 -0
  482. agno/utils/code_execution.py +11 -0
  483. agno/utils/common.py +103 -20
  484. agno/utils/cryptography.py +22 -0
  485. agno/utils/dttm.py +33 -0
  486. agno/utils/events.py +700 -0
  487. agno/utils/functions.py +107 -37
  488. agno/utils/gemini.py +426 -0
  489. agno/utils/hooks.py +171 -0
  490. agno/utils/http.py +185 -0
  491. agno/utils/json_schema.py +159 -37
  492. agno/utils/knowledge.py +36 -0
  493. agno/utils/location.py +19 -0
  494. agno/utils/log.py +221 -8
  495. agno/utils/mcp.py +214 -0
  496. agno/utils/media.py +335 -14
  497. agno/utils/merge_dict.py +22 -1
  498. agno/utils/message.py +77 -2
  499. agno/utils/models/ai_foundry.py +50 -0
  500. agno/utils/models/claude.py +373 -0
  501. agno/utils/models/cohere.py +94 -0
  502. agno/utils/models/llama.py +85 -0
  503. agno/utils/models/mistral.py +100 -0
  504. agno/utils/models/openai_responses.py +140 -0
  505. agno/utils/models/schema_utils.py +153 -0
  506. agno/utils/models/watsonx.py +41 -0
  507. agno/utils/openai.py +257 -0
  508. agno/utils/pickle.py +1 -1
  509. agno/utils/pprint.py +124 -8
  510. agno/utils/print_response/agent.py +930 -0
  511. agno/utils/print_response/team.py +1914 -0
  512. agno/utils/print_response/workflow.py +1668 -0
  513. agno/utils/prompts.py +111 -0
  514. agno/utils/reasoning.py +108 -0
  515. agno/utils/response.py +163 -0
  516. agno/utils/serialize.py +32 -0
  517. agno/utils/shell.py +4 -4
  518. agno/utils/streamlit.py +487 -0
  519. agno/utils/string.py +204 -51
  520. agno/utils/team.py +139 -0
  521. agno/utils/timer.py +9 -2
  522. agno/utils/tokens.py +657 -0
  523. agno/utils/tools.py +19 -1
  524. agno/utils/whatsapp.py +305 -0
  525. agno/utils/yaml_io.py +3 -3
  526. agno/vectordb/__init__.py +2 -0
  527. agno/vectordb/base.py +87 -9
  528. agno/vectordb/cassandra/__init__.py +5 -1
  529. agno/vectordb/cassandra/cassandra.py +383 -27
  530. agno/vectordb/chroma/__init__.py +4 -0
  531. agno/vectordb/chroma/chromadb.py +748 -83
  532. agno/vectordb/clickhouse/__init__.py +7 -1
  533. agno/vectordb/clickhouse/clickhousedb.py +554 -53
  534. agno/vectordb/couchbase/__init__.py +3 -0
  535. agno/vectordb/couchbase/couchbase.py +1446 -0
  536. agno/vectordb/lancedb/__init__.py +5 -0
  537. agno/vectordb/lancedb/lance_db.py +730 -98
  538. agno/vectordb/langchaindb/__init__.py +5 -0
  539. agno/vectordb/langchaindb/langchaindb.py +163 -0
  540. agno/vectordb/lightrag/__init__.py +5 -0
  541. agno/vectordb/lightrag/lightrag.py +388 -0
  542. agno/vectordb/llamaindex/__init__.py +3 -0
  543. agno/vectordb/llamaindex/llamaindexdb.py +166 -0
  544. agno/vectordb/milvus/__init__.py +3 -0
  545. agno/vectordb/milvus/milvus.py +966 -78
  546. agno/vectordb/mongodb/__init__.py +9 -1
  547. agno/vectordb/mongodb/mongodb.py +1175 -172
  548. agno/vectordb/pgvector/__init__.py +8 -0
  549. agno/vectordb/pgvector/pgvector.py +599 -115
  550. agno/vectordb/pineconedb/__init__.py +5 -1
  551. agno/vectordb/pineconedb/pineconedb.py +406 -43
  552. agno/vectordb/qdrant/__init__.py +4 -0
  553. agno/vectordb/qdrant/qdrant.py +914 -61
  554. agno/vectordb/redis/__init__.py +9 -0
  555. agno/vectordb/redis/redisdb.py +682 -0
  556. agno/vectordb/singlestore/__init__.py +8 -1
  557. agno/vectordb/singlestore/singlestore.py +771 -0
  558. agno/vectordb/surrealdb/__init__.py +3 -0
  559. agno/vectordb/surrealdb/surrealdb.py +663 -0
  560. agno/vectordb/upstashdb/__init__.py +5 -0
  561. agno/vectordb/upstashdb/upstashdb.py +718 -0
  562. agno/vectordb/weaviate/__init__.py +8 -0
  563. agno/vectordb/weaviate/index.py +15 -0
  564. agno/vectordb/weaviate/weaviate.py +1009 -0
  565. agno/workflow/__init__.py +23 -1
  566. agno/workflow/agent.py +299 -0
  567. agno/workflow/condition.py +759 -0
  568. agno/workflow/loop.py +756 -0
  569. agno/workflow/parallel.py +853 -0
  570. agno/workflow/router.py +723 -0
  571. agno/workflow/step.py +1564 -0
  572. agno/workflow/steps.py +613 -0
  573. agno/workflow/types.py +556 -0
  574. agno/workflow/workflow.py +4327 -514
  575. agno-2.3.13.dist-info/METADATA +639 -0
  576. agno-2.3.13.dist-info/RECORD +613 -0
  577. {agno-0.1.2.dist-info → agno-2.3.13.dist-info}/WHEEL +1 -1
  578. agno-2.3.13.dist-info/licenses/LICENSE +201 -0
  579. agno/api/playground.py +0 -91
  580. agno/api/schemas/playground.py +0 -22
  581. agno/api/schemas/user.py +0 -22
  582. agno/api/schemas/workspace.py +0 -46
  583. agno/api/user.py +0 -160
  584. agno/api/workspace.py +0 -151
  585. agno/cli/auth_server.py +0 -118
  586. agno/cli/config.py +0 -275
  587. agno/cli/console.py +0 -88
  588. agno/cli/credentials.py +0 -23
  589. agno/cli/entrypoint.py +0 -571
  590. agno/cli/operator.py +0 -355
  591. agno/cli/settings.py +0 -85
  592. agno/cli/ws/ws_cli.py +0 -817
  593. agno/constants.py +0 -13
  594. agno/document/__init__.py +0 -1
  595. agno/document/chunking/semantic.py +0 -47
  596. agno/document/chunking/strategy.py +0 -31
  597. agno/document/reader/__init__.py +0 -1
  598. agno/document/reader/arxiv_reader.py +0 -41
  599. agno/document/reader/base.py +0 -22
  600. agno/document/reader/csv_reader.py +0 -84
  601. agno/document/reader/docx_reader.py +0 -46
  602. agno/document/reader/firecrawl_reader.py +0 -99
  603. agno/document/reader/json_reader.py +0 -43
  604. agno/document/reader/pdf_reader.py +0 -219
  605. agno/document/reader/s3/pdf_reader.py +0 -46
  606. agno/document/reader/s3/text_reader.py +0 -51
  607. agno/document/reader/text_reader.py +0 -41
  608. agno/document/reader/website_reader.py +0 -175
  609. agno/document/reader/youtube_reader.py +0 -50
  610. agno/embedder/__init__.py +0 -1
  611. agno/embedder/azure_openai.py +0 -86
  612. agno/embedder/cohere.py +0 -72
  613. agno/embedder/fastembed.py +0 -37
  614. agno/embedder/google.py +0 -73
  615. agno/embedder/huggingface.py +0 -54
  616. agno/embedder/mistral.py +0 -80
  617. agno/embedder/ollama.py +0 -57
  618. agno/embedder/openai.py +0 -74
  619. agno/embedder/sentence_transformer.py +0 -38
  620. agno/embedder/voyageai.py +0 -64
  621. agno/eval/perf.py +0 -201
  622. agno/file/__init__.py +0 -1
  623. agno/file/file.py +0 -16
  624. agno/file/local/csv.py +0 -32
  625. agno/file/local/txt.py +0 -19
  626. agno/infra/app.py +0 -240
  627. agno/infra/base.py +0 -144
  628. agno/infra/context.py +0 -20
  629. agno/infra/db_app.py +0 -52
  630. agno/infra/resource.py +0 -205
  631. agno/infra/resources.py +0 -55
  632. agno/knowledge/agent.py +0 -230
  633. agno/knowledge/arxiv.py +0 -22
  634. agno/knowledge/combined.py +0 -22
  635. agno/knowledge/csv.py +0 -28
  636. agno/knowledge/csv_url.py +0 -19
  637. agno/knowledge/document.py +0 -20
  638. agno/knowledge/docx.py +0 -30
  639. agno/knowledge/json.py +0 -28
  640. agno/knowledge/langchain.py +0 -71
  641. agno/knowledge/llamaindex.py +0 -66
  642. agno/knowledge/pdf.py +0 -28
  643. agno/knowledge/pdf_url.py +0 -26
  644. agno/knowledge/s3/base.py +0 -60
  645. agno/knowledge/s3/pdf.py +0 -21
  646. agno/knowledge/s3/text.py +0 -23
  647. agno/knowledge/text.py +0 -30
  648. agno/knowledge/website.py +0 -88
  649. agno/knowledge/wikipedia.py +0 -31
  650. agno/knowledge/youtube.py +0 -22
  651. agno/memory/agent.py +0 -392
  652. agno/memory/classifier.py +0 -104
  653. agno/memory/db/__init__.py +0 -1
  654. agno/memory/db/base.py +0 -42
  655. agno/memory/db/mongodb.py +0 -189
  656. agno/memory/db/postgres.py +0 -203
  657. agno/memory/db/sqlite.py +0 -193
  658. agno/memory/memory.py +0 -15
  659. agno/memory/row.py +0 -36
  660. agno/memory/summarizer.py +0 -192
  661. agno/memory/summary.py +0 -19
  662. agno/memory/workflow.py +0 -38
  663. agno/models/google/gemini_openai.py +0 -26
  664. agno/models/ollama/hermes.py +0 -221
  665. agno/models/ollama/tools.py +0 -362
  666. agno/models/vertexai/gemini.py +0 -595
  667. agno/playground/__init__.py +0 -3
  668. agno/playground/async_router.py +0 -421
  669. agno/playground/deploy.py +0 -249
  670. agno/playground/operator.py +0 -92
  671. agno/playground/playground.py +0 -91
  672. agno/playground/schemas.py +0 -76
  673. agno/playground/serve.py +0 -55
  674. agno/playground/sync_router.py +0 -405
  675. agno/reasoning/agent.py +0 -68
  676. agno/run/response.py +0 -112
  677. agno/storage/agent/__init__.py +0 -0
  678. agno/storage/agent/base.py +0 -38
  679. agno/storage/agent/dynamodb.py +0 -350
  680. agno/storage/agent/json.py +0 -92
  681. agno/storage/agent/mongodb.py +0 -228
  682. agno/storage/agent/postgres.py +0 -367
  683. agno/storage/agent/session.py +0 -79
  684. agno/storage/agent/singlestore.py +0 -303
  685. agno/storage/agent/sqlite.py +0 -357
  686. agno/storage/agent/yaml.py +0 -93
  687. agno/storage/workflow/__init__.py +0 -0
  688. agno/storage/workflow/base.py +0 -40
  689. agno/storage/workflow/mongodb.py +0 -233
  690. agno/storage/workflow/postgres.py +0 -366
  691. agno/storage/workflow/session.py +0 -60
  692. agno/storage/workflow/sqlite.py +0 -359
  693. agno/tools/googlesearch.py +0 -88
  694. agno/utils/defaults.py +0 -57
  695. agno/utils/filesystem.py +0 -39
  696. agno/utils/git.py +0 -52
  697. agno/utils/json_io.py +0 -30
  698. agno/utils/load_env.py +0 -19
  699. agno/utils/py_io.py +0 -19
  700. agno/utils/pyproject.py +0 -18
  701. agno/utils/resource_filter.py +0 -31
  702. agno/vectordb/singlestore/s2vectordb.py +0 -390
  703. agno/vectordb/singlestore/s2vectordb2.py +0 -355
  704. agno/workspace/__init__.py +0 -0
  705. agno/workspace/config.py +0 -325
  706. agno/workspace/enums.py +0 -6
  707. agno/workspace/helpers.py +0 -48
  708. agno/workspace/operator.py +0 -758
  709. agno/workspace/settings.py +0 -63
  710. agno-0.1.2.dist-info/LICENSE +0 -375
  711. agno-0.1.2.dist-info/METADATA +0 -502
  712. agno-0.1.2.dist-info/RECORD +0 -352
  713. agno-0.1.2.dist-info/entry_points.txt +0 -3
  714. /agno/{cli → db/migrations}/__init__.py +0 -0
  715. /agno/{cli/ws → db/migrations/versions}/__init__.py +0 -0
  716. /agno/{document/chunking/__init__.py → db/schemas/metrics.py} +0 -0
  717. /agno/{document/reader/s3 → integrations}/__init__.py +0 -0
  718. /agno/{file/local → knowledge/chunking}/__init__.py +0 -0
  719. /agno/{infra → knowledge/remote_content}/__init__.py +0 -0
  720. /agno/{knowledge/s3 → tools/models}/__init__.py +0 -0
  721. /agno/{reranker → utils/models}/__init__.py +0 -0
  722. /agno/{storage → utils/print_response}/__init__.py +0 -0
  723. {agno-0.1.2.dist-info → agno-2.3.13.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,2760 @@
1
+ import asyncio
2
+ import time
3
+ from datetime import date, datetime, timedelta, timezone
4
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
5
+ from uuid import uuid4
6
+
7
+ if TYPE_CHECKING:
8
+ from agno.tracing.schemas import Span, Trace
9
+
10
+ from agno.db.base import AsyncBaseDb, SessionType
11
+ from agno.db.mongo.utils import (
12
+ apply_pagination,
13
+ apply_sorting,
14
+ bulk_upsert_metrics,
15
+ calculate_date_metrics,
16
+ create_collection_indexes_async,
17
+ deserialize_cultural_knowledge_from_db,
18
+ fetch_all_sessions_data,
19
+ get_dates_to_calculate_metrics_for,
20
+ serialize_cultural_knowledge_for_db,
21
+ )
22
+ from agno.db.schemas.culture import CulturalKnowledge
23
+ from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
24
+ from agno.db.schemas.knowledge import KnowledgeRow
25
+ from agno.db.schemas.memory import UserMemory
26
+ from agno.db.utils import deserialize_session_json_fields
27
+ from agno.session import AgentSession, Session, TeamSession, WorkflowSession
28
+ from agno.utils.log import log_debug, log_error, log_info
29
+ from agno.utils.string import generate_id
30
+
31
+ try:
32
+ from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorCollection, AsyncIOMotorDatabase # type: ignore
33
+
34
+ MOTOR_AVAILABLE = True
35
+ except ImportError:
36
+ MOTOR_AVAILABLE = False
37
+ AsyncIOMotorClient = None # type: ignore
38
+ AsyncIOMotorCollection = None # type: ignore
39
+ AsyncIOMotorDatabase = None # type: ignore
40
+
41
+ try:
42
+ from pymongo import AsyncMongoClient # type: ignore
43
+ from pymongo.collection import AsyncCollection # type: ignore
44
+ from pymongo.database import AsyncDatabase # type: ignore
45
+
46
+ PYMONGO_ASYNC_AVAILABLE = True
47
+ except ImportError:
48
+ PYMONGO_ASYNC_AVAILABLE = False
49
+ AsyncMongoClient = None # type: ignore
50
+ AsyncDatabase = None # type: ignore
51
+ AsyncCollection = None # type: ignore
52
+
53
+ try:
54
+ from pymongo import ReturnDocument
55
+ from pymongo.errors import OperationFailure
56
+ except ImportError:
57
+ raise ImportError("`pymongo` not installed. Please install it using `pip install -U pymongo`")
58
+
59
+ # Ensure at least one async library is available
60
+ if not MOTOR_AVAILABLE and not PYMONGO_ASYNC_AVAILABLE:
61
+ raise ImportError(
62
+ "Neither `motor` nor PyMongo async is installed. "
63
+ "Please install one of them using:\n"
64
+ " - `pip install -U 'pymongo>=4.9'` (recommended)"
65
+ " - `pip install -U motor` (legacy, deprecated)\n"
66
+ )
67
+
68
+ # Create union types for client, database, and collection
69
+ if TYPE_CHECKING:
70
+ if MOTOR_AVAILABLE and PYMONGO_ASYNC_AVAILABLE:
71
+ AsyncMongoClientType = Union[AsyncIOMotorClient, AsyncMongoClient] # type: ignore
72
+ AsyncMongoDatabaseType = Union[AsyncIOMotorDatabase, AsyncDatabase] # type: ignore
73
+ AsyncMongoCollectionType = Union[AsyncIOMotorCollection, AsyncCollection] # type: ignore
74
+ elif MOTOR_AVAILABLE:
75
+ AsyncMongoClientType = AsyncIOMotorClient # type: ignore
76
+ AsyncMongoDatabaseType = AsyncIOMotorDatabase # type: ignore
77
+ AsyncMongoCollectionType = AsyncIOMotorCollection # type: ignore
78
+ else:
79
+ AsyncMongoClientType = AsyncMongoClient # type: ignore
80
+ AsyncMongoDatabaseType = AsyncDatabase # type: ignore
81
+ AsyncMongoCollectionType = AsyncCollection # type: ignore
82
+ else:
83
+ # Runtime type - use Any to avoid import issues
84
+ AsyncMongoClientType = Any
85
+ AsyncMongoDatabaseType = Any
86
+ AsyncMongoCollectionType = Any
87
+
88
+
89
+ # Client type constants (defined before class to allow use in _detect_client_type)
90
+ _CLIENT_TYPE_MOTOR = "motor"
91
+ _CLIENT_TYPE_PYMONGO_ASYNC = "pymongo_async"
92
+ _CLIENT_TYPE_UNKNOWN = "unknown"
93
+
94
+
95
+ def _detect_client_type(client: Any) -> str:
96
+ """Detect whether a client is Motor or PyMongo async."""
97
+ if client is None:
98
+ return _CLIENT_TYPE_UNKNOWN
99
+
100
+ # Check PyMongo async
101
+ if PYMONGO_ASYNC_AVAILABLE and AsyncMongoClient is not None:
102
+ try:
103
+ if isinstance(client, AsyncMongoClient):
104
+ return _CLIENT_TYPE_PYMONGO_ASYNC
105
+ except (TypeError, AttributeError):
106
+ pass # Fall through to next check
107
+
108
+ if MOTOR_AVAILABLE and AsyncIOMotorClient is not None:
109
+ try:
110
+ if isinstance(client, AsyncIOMotorClient):
111
+ return _CLIENT_TYPE_MOTOR
112
+ except (TypeError, AttributeError):
113
+ pass # Fall through to fallback
114
+
115
+ # Fallback to string matching only if isinstance fails
116
+ # (should rarely happen, but useful for edge cases)
117
+ client_type_name = type(client).__name__
118
+ if "Motor" in client_type_name or "AsyncIOMotor" in client_type_name:
119
+ return _CLIENT_TYPE_MOTOR
120
+ elif "AsyncMongo" in client_type_name:
121
+ return _CLIENT_TYPE_PYMONGO_ASYNC
122
+
123
+ # Last resort: check module name
124
+ module_name = type(client).__module__
125
+ if "motor" in module_name:
126
+ return _CLIENT_TYPE_MOTOR
127
+ elif "pymongo" in module_name:
128
+ return _CLIENT_TYPE_PYMONGO_ASYNC
129
+
130
+ return _CLIENT_TYPE_UNKNOWN
131
+
132
+
133
+ class AsyncMongoDb(AsyncBaseDb):
134
+ # Client type constants (class-level access to module constants)
135
+ CLIENT_TYPE_MOTOR = _CLIENT_TYPE_MOTOR
136
+ CLIENT_TYPE_PYMONGO_ASYNC = _CLIENT_TYPE_PYMONGO_ASYNC
137
+ CLIENT_TYPE_UNKNOWN = _CLIENT_TYPE_UNKNOWN
138
+
139
+ def __init__(
140
+ self,
141
+ db_client: Optional[Union["AsyncIOMotorClient", "AsyncMongoClient"]] = None,
142
+ db_name: Optional[str] = None,
143
+ db_url: Optional[str] = None,
144
+ session_collection: Optional[str] = None,
145
+ memory_collection: Optional[str] = None,
146
+ metrics_collection: Optional[str] = None,
147
+ eval_collection: Optional[str] = None,
148
+ knowledge_collection: Optional[str] = None,
149
+ culture_collection: Optional[str] = None,
150
+ traces_collection: Optional[str] = None,
151
+ spans_collection: Optional[str] = None,
152
+ id: Optional[str] = None,
153
+ ):
154
+ """
155
+ Async interface for interacting with a MongoDB database.
156
+
157
+ Supports both Motor (legacy) and PyMongo async (recommended) clients.
158
+ When both libraries are available, PyMongo async is preferred.
159
+
160
+ Args:
161
+ db_client (Optional[Union[AsyncIOMotorClient, AsyncMongoClient]]):
162
+ The MongoDB async client to use. Can be either Motor's AsyncIOMotorClient
163
+ or PyMongo's AsyncMongoClient. If not provided, a client will be created
164
+ from db_url using the preferred available library.
165
+ db_name (Optional[str]): The name of the database to use.
166
+ db_url (Optional[str]): The database URL to connect to.
167
+ session_collection (Optional[str]): Name of the collection to store sessions.
168
+ memory_collection (Optional[str]): Name of the collection to store memories.
169
+ metrics_collection (Optional[str]): Name of the collection to store metrics.
170
+ eval_collection (Optional[str]): Name of the collection to store evaluation runs.
171
+ knowledge_collection (Optional[str]): Name of the collection to store knowledge documents.
172
+ culture_collection (Optional[str]): Name of the collection to store cultural knowledge.
173
+ traces_collection (Optional[str]): Name of the collection to store traces.
174
+ spans_collection (Optional[str]): Name of the collection to store spans.
175
+ id (Optional[str]): ID of the database.
176
+
177
+ Raises:
178
+ ValueError: If neither db_url nor db_client is provided, or if db_client type is unsupported.
179
+ ImportError: If neither motor nor pymongo async is installed.
180
+ """
181
+ if id is None:
182
+ base_seed = db_url or str(db_client)
183
+ db_name_suffix = db_name if db_name is not None else "agno"
184
+ seed = f"{base_seed}#{db_name_suffix}"
185
+ id = generate_id(seed)
186
+
187
+ super().__init__(
188
+ id=id,
189
+ session_table=session_collection,
190
+ memory_table=memory_collection,
191
+ metrics_table=metrics_collection,
192
+ eval_table=eval_collection,
193
+ knowledge_table=knowledge_collection,
194
+ culture_table=culture_collection,
195
+ traces_table=traces_collection,
196
+ spans_table=spans_collection,
197
+ )
198
+
199
+ # Detect client type if provided
200
+ if db_client is not None:
201
+ self._client_type = _detect_client_type(db_client)
202
+ if self._client_type == self.CLIENT_TYPE_UNKNOWN:
203
+ raise ValueError(
204
+ f"Unsupported MongoDB client type: {type(db_client).__name__}. "
205
+ "Only Motor (AsyncIOMotorClient) or PyMongo async (AsyncMongoClient) are supported."
206
+ )
207
+ else:
208
+ # Auto-select preferred library when creating from URL
209
+ # Prefer PyMongo async if available, fallback to Motor
210
+ self._client_type = self.CLIENT_TYPE_PYMONGO_ASYNC if PYMONGO_ASYNC_AVAILABLE else self.CLIENT_TYPE_MOTOR
211
+
212
+ # Store configuration for lazy initialization
213
+ self._provided_client: Optional[AsyncMongoClientType] = db_client
214
+ self.db_url: Optional[str] = db_url
215
+ self.db_name: str = db_name if db_name is not None else "agno"
216
+
217
+ if self._provided_client is None and self.db_url is None:
218
+ raise ValueError("One of db_url or db_client must be provided")
219
+
220
+ # Client and database will be lazily initialized per event loop
221
+ self._client: Optional[AsyncMongoClientType] = None
222
+ self._database: Optional[AsyncMongoDatabaseType] = None
223
+ self._event_loop: Optional[asyncio.AbstractEventLoop] = None
224
+
225
+ async def table_exists(self, table_name: str) -> bool:
226
+ """Check if a collection with the given name exists in the MongoDB database.
227
+
228
+ Args:
229
+ table_name: Name of the collection to check
230
+
231
+ Returns:
232
+ bool: True if the collection exists in the database, False otherwise
233
+ """
234
+ collection_names = await self.database.list_collection_names()
235
+ return table_name in collection_names
236
+
237
+ async def _create_all_tables(self):
238
+ """Create all configured MongoDB collections if they don't exist."""
239
+ collections_to_create = [
240
+ ("sessions", self.session_table_name),
241
+ ("memories", self.memory_table_name),
242
+ ("metrics", self.metrics_table_name),
243
+ ("evals", self.eval_table_name),
244
+ ("knowledge", self.knowledge_table_name),
245
+ ("culture", self.culture_table_name),
246
+ ]
247
+
248
+ for collection_type, collection_name in collections_to_create:
249
+ if collection_name and not await self.table_exists(collection_name):
250
+ await self._get_collection(collection_type, create_collection_if_not_found=True)
251
+
252
+ def _ensure_client(self) -> AsyncMongoClientType:
253
+ """
254
+ Ensure the MongoDB async client is valid for the current event loop.
255
+
256
+ Both Motor's AsyncIOMotorClient and PyMongo's AsyncMongoClient are tied to
257
+ the event loop they were created in. If we detect a new event loop, we need
258
+ to refresh the client.
259
+
260
+ Returns:
261
+ Union[AsyncIOMotorClient, AsyncMongoClient]: A valid client for the current event loop.
262
+ """
263
+ try:
264
+ current_loop = asyncio.get_running_loop()
265
+ except RuntimeError:
266
+ # No running loop, return existing client or create new one
267
+ if self._client is None:
268
+ if self._provided_client is not None:
269
+ self._client = self._provided_client
270
+ elif self.db_url is not None:
271
+ # Create client based on detected type
272
+ if self._client_type == self.CLIENT_TYPE_PYMONGO_ASYNC and PYMONGO_ASYNC_AVAILABLE:
273
+ self._client = AsyncMongoClient(self.db_url) # type: ignore
274
+ elif self._client_type == self.CLIENT_TYPE_MOTOR and MOTOR_AVAILABLE:
275
+ self._client = AsyncIOMotorClient(self.db_url) # type: ignore
276
+ else:
277
+ raise RuntimeError(f"Client type '{self._client_type}' not available")
278
+ return self._client # type: ignore
279
+
280
+ # Check if we're in a different event loop
281
+ if self._event_loop is None or self._event_loop is not current_loop:
282
+ # New event loop detected, create new client
283
+ if self._provided_client is not None:
284
+ # User provided a client, use it but warn them
285
+ client_type_name = (
286
+ "AsyncMongoClient" if self._client_type == self.CLIENT_TYPE_PYMONGO_ASYNC else "AsyncIOMotorClient"
287
+ )
288
+ log_debug(
289
+ f"New event loop detected. Using provided {client_type_name}, "
290
+ "which may cause issues if it was created in a different event loop."
291
+ )
292
+ self._client = self._provided_client
293
+ elif self.db_url is not None:
294
+ if self._client_type == self.CLIENT_TYPE_PYMONGO_ASYNC and PYMONGO_ASYNC_AVAILABLE:
295
+ self._client = AsyncMongoClient(self.db_url) # type: ignore
296
+ elif self._client_type == self.CLIENT_TYPE_MOTOR and MOTOR_AVAILABLE:
297
+ self._client = AsyncIOMotorClient(self.db_url) # type: ignore
298
+ else:
299
+ raise RuntimeError(f"Client type '{self._client_type}' not available")
300
+
301
+ self._event_loop = current_loop
302
+ self._database = None # Reset database reference
303
+ # Clear collection caches and initialization flags when switching event loops
304
+ for attr in list(vars(self).keys()):
305
+ if attr.endswith("_collection") or attr.endswith("_initialized"):
306
+ delattr(self, attr)
307
+
308
+ return self._client # type: ignore
309
+
310
+ @property
311
+ def db_client(self) -> AsyncMongoClientType:
312
+ """Get the MongoDB client, ensuring it's valid for the current event loop."""
313
+ return self._ensure_client()
314
+
315
+ @property
316
+ def database(self) -> AsyncMongoDatabaseType:
317
+ """Get the MongoDB database, ensuring it's valid for the current event loop."""
318
+ try:
319
+ current_loop = asyncio.get_running_loop()
320
+ if self._database is None or self._event_loop != current_loop:
321
+ self._database = self.db_client[self.db_name] # type: ignore
322
+ except RuntimeError:
323
+ # No running loop - fallback to existing database or create new one
324
+ if self._database is None:
325
+ self._database = self.db_client[self.db_name] # type: ignore
326
+ return self._database
327
+
328
+ # -- DB methods --
329
+
330
+ def _should_reset_collection_cache(self) -> bool:
331
+ """Check if collection cache should be reset due to event loop change."""
332
+ try:
333
+ current_loop = asyncio.get_running_loop()
334
+ return self._event_loop is not current_loop
335
+ except RuntimeError:
336
+ return False
337
+
338
+ async def _get_collection(
339
+ self, table_type: str, create_collection_if_not_found: Optional[bool] = True
340
+ ) -> Optional[AsyncMongoCollectionType]:
341
+ """Get or create a collection based on table type.
342
+
343
+ Args:
344
+ table_type (str): The type of table to get or create.
345
+ create_collection_if_not_found (Optional[bool]): Whether to create the collection if it doesn't exist.
346
+
347
+ Returns:
348
+ Union[AsyncIOMotorCollection, AsyncCollection]: The collection object.
349
+ """
350
+ # Ensure client is valid for current event loop before accessing collections
351
+ _ = self.db_client # This triggers _ensure_client()
352
+
353
+ # Check if collections need to be reset due to event loop change
354
+ reset_cache = self._should_reset_collection_cache()
355
+
356
+ if table_type == "sessions":
357
+ if reset_cache or not hasattr(self, "session_collection"):
358
+ if self.session_table_name is None:
359
+ raise ValueError("Session collection was not provided on initialization")
360
+ self.session_collection = await self._get_or_create_collection(
361
+ collection_name=self.session_table_name,
362
+ collection_type="sessions",
363
+ create_collection_if_not_found=create_collection_if_not_found,
364
+ )
365
+ return self.session_collection
366
+
367
+ if table_type == "memories":
368
+ if reset_cache or not hasattr(self, "memory_collection"):
369
+ if self.memory_table_name is None:
370
+ raise ValueError("Memory collection was not provided on initialization")
371
+ self.memory_collection = await self._get_or_create_collection(
372
+ collection_name=self.memory_table_name,
373
+ collection_type="memories",
374
+ create_collection_if_not_found=create_collection_if_not_found,
375
+ )
376
+ return self.memory_collection
377
+
378
+ if table_type == "metrics":
379
+ if reset_cache or not hasattr(self, "metrics_collection"):
380
+ if self.metrics_table_name is None:
381
+ raise ValueError("Metrics collection was not provided on initialization")
382
+ self.metrics_collection = await self._get_or_create_collection(
383
+ collection_name=self.metrics_table_name,
384
+ collection_type="metrics",
385
+ create_collection_if_not_found=create_collection_if_not_found,
386
+ )
387
+ return self.metrics_collection
388
+
389
+ if table_type == "evals":
390
+ if reset_cache or not hasattr(self, "eval_collection"):
391
+ if self.eval_table_name is None:
392
+ raise ValueError("Eval collection was not provided on initialization")
393
+ self.eval_collection = await self._get_or_create_collection(
394
+ collection_name=self.eval_table_name,
395
+ collection_type="evals",
396
+ create_collection_if_not_found=create_collection_if_not_found,
397
+ )
398
+ return self.eval_collection
399
+
400
+ if table_type == "knowledge":
401
+ if reset_cache or not hasattr(self, "knowledge_collection"):
402
+ if self.knowledge_table_name is None:
403
+ raise ValueError("Knowledge collection was not provided on initialization")
404
+ self.knowledge_collection = await self._get_or_create_collection(
405
+ collection_name=self.knowledge_table_name,
406
+ collection_type="knowledge",
407
+ create_collection_if_not_found=create_collection_if_not_found,
408
+ )
409
+ return self.knowledge_collection
410
+
411
+ if table_type == "culture":
412
+ if reset_cache or not hasattr(self, "culture_collection"):
413
+ if self.culture_table_name is None:
414
+ raise ValueError("Culture collection was not provided on initialization")
415
+ self.culture_collection = await self._get_or_create_collection(
416
+ collection_name=self.culture_table_name,
417
+ collection_type="culture",
418
+ create_collection_if_not_found=create_collection_if_not_found,
419
+ )
420
+ return self.culture_collection
421
+
422
+ if table_type == "traces":
423
+ if reset_cache or not hasattr(self, "traces_collection"):
424
+ if self.trace_table_name is None:
425
+ raise ValueError("Traces collection was not provided on initialization")
426
+ self.traces_collection = await self._get_or_create_collection(
427
+ collection_name=self.trace_table_name,
428
+ collection_type="traces",
429
+ create_collection_if_not_found=create_collection_if_not_found,
430
+ )
431
+ return self.traces_collection
432
+
433
+ if table_type == "spans":
434
+ if reset_cache or not hasattr(self, "spans_collection"):
435
+ if self.span_table_name is None:
436
+ raise ValueError("Spans collection was not provided on initialization")
437
+ self.spans_collection = await self._get_or_create_collection(
438
+ collection_name=self.span_table_name,
439
+ collection_type="spans",
440
+ create_collection_if_not_found=create_collection_if_not_found,
441
+ )
442
+ return self.spans_collection
443
+
444
+ raise ValueError(f"Unknown table type: {table_type}")
445
+
446
+ async def _get_or_create_collection(
447
+ self, collection_name: str, collection_type: str, create_collection_if_not_found: Optional[bool] = True
448
+ ) -> Optional[AsyncMongoCollectionType]:
449
+ """Get or create a collection with proper indexes.
450
+
451
+ Args:
452
+ collection_name (str): The name of the collection to get or create.
453
+ collection_type (str): The type of collection to get or create.
454
+ create_collection_if_not_found (Optional[bool]): Whether to create the collection if it doesn't exist.
455
+
456
+ Returns:
457
+ Union[AsyncIOMotorCollection, AsyncCollection]: The collection object.
458
+ """
459
+ try:
460
+ collection = self.database[collection_name]
461
+
462
+ if not hasattr(self, f"_{collection_name}_initialized"):
463
+ if not create_collection_if_not_found:
464
+ return None
465
+ # Create indexes asynchronously for async MongoDB collections
466
+ await create_collection_indexes_async(collection, collection_type)
467
+ setattr(self, f"_{collection_name}_initialized", True)
468
+ log_debug(f"Initialized collection '{collection_name}'")
469
+ else:
470
+ log_debug(f"Collection '{collection_name}' already initialized")
471
+
472
+ return collection
473
+
474
+ except Exception as e:
475
+ log_error(f"Error getting collection {collection_name}: {e}")
476
+ raise
477
+
478
+ def get_latest_schema_version(self):
479
+ """Get the latest version of the database schema."""
480
+ pass
481
+
482
+ def upsert_schema_version(self, version: str) -> None:
483
+ """Upsert the schema version into the database."""
484
+ pass
485
+
486
+ # -- Session methods --
487
+
488
+ async def delete_session(self, session_id: str) -> bool:
489
+ """Delete a session from the database.
490
+
491
+ Args:
492
+ session_id (str): The ID of the session to delete.
493
+
494
+ Returns:
495
+ bool: True if the session was deleted, False otherwise.
496
+
497
+ Raises:
498
+ Exception: If there is an error deleting the session.
499
+ """
500
+ try:
501
+ collection = await self._get_collection(table_type="sessions")
502
+ if collection is None:
503
+ return False
504
+
505
+ result = await collection.delete_one({"session_id": session_id})
506
+ if result.deleted_count == 0:
507
+ log_debug(f"No session found to delete with session_id: {session_id}")
508
+ return False
509
+ else:
510
+ log_debug(f"Successfully deleted session with session_id: {session_id}")
511
+ return True
512
+
513
+ except Exception as e:
514
+ log_error(f"Error deleting session: {e}")
515
+ raise e
516
+
517
+ async def delete_sessions(self, session_ids: List[str]) -> None:
518
+ """Delete multiple sessions from the database.
519
+
520
+ Args:
521
+ session_ids (List[str]): The IDs of the sessions to delete.
522
+ """
523
+ try:
524
+ collection = await self._get_collection(table_type="sessions")
525
+ if collection is None:
526
+ return
527
+
528
+ result = await collection.delete_many({"session_id": {"$in": session_ids}})
529
+ log_debug(f"Successfully deleted {result.deleted_count} sessions")
530
+
531
+ except Exception as e:
532
+ log_error(f"Error deleting sessions: {e}")
533
+ raise e
534
+
535
+ async def get_session(
536
+ self,
537
+ session_id: str,
538
+ session_type: SessionType,
539
+ user_id: Optional[str] = None,
540
+ deserialize: Optional[bool] = True,
541
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
542
+ """Read a session from the database.
543
+
544
+ Args:
545
+ session_id (str): The ID of the session to get.
546
+ session_type (SessionType): The type of session to get.
547
+ user_id (Optional[str]): The ID of the user to get the session for.
548
+ deserialize (Optional[bool]): Whether to serialize the session. Defaults to True.
549
+
550
+ Returns:
551
+ Union[Session, Dict[str, Any], None]:
552
+ - When deserialize=True: Session object
553
+ - When deserialize=False: Session dictionary
554
+
555
+ Raises:
556
+ Exception: If there is an error reading the session.
557
+ """
558
+ try:
559
+ collection = await self._get_collection(table_type="sessions")
560
+ if collection is None:
561
+ return None
562
+
563
+ query = {"session_id": session_id}
564
+ if user_id is not None:
565
+ query["user_id"] = user_id
566
+ if session_type is not None:
567
+ query["session_type"] = session_type
568
+
569
+ result = await collection.find_one(query)
570
+ if result is None:
571
+ return None
572
+
573
+ session = deserialize_session_json_fields(result)
574
+ if not deserialize:
575
+ return session
576
+
577
+ if session_type == SessionType.AGENT:
578
+ return AgentSession.from_dict(session)
579
+ elif session_type == SessionType.TEAM:
580
+ return TeamSession.from_dict(session)
581
+ elif session_type == SessionType.WORKFLOW:
582
+ return WorkflowSession.from_dict(session)
583
+ else:
584
+ raise ValueError(f"Invalid session type: {session_type}")
585
+
586
+ except Exception as e:
587
+ log_error(f"Exception reading session: {e}")
588
+ raise e
589
+
590
+ async def get_sessions(
591
+ self,
592
+ session_type: Optional[SessionType] = None,
593
+ user_id: Optional[str] = None,
594
+ component_id: Optional[str] = None,
595
+ session_name: Optional[str] = None,
596
+ start_timestamp: Optional[int] = None,
597
+ end_timestamp: Optional[int] = None,
598
+ limit: Optional[int] = None,
599
+ page: Optional[int] = None,
600
+ sort_by: Optional[str] = None,
601
+ sort_order: Optional[str] = None,
602
+ deserialize: Optional[bool] = True,
603
+ ) -> Union[List[Session], Tuple[List[Dict[str, Any]], int]]:
604
+ """Get all sessions.
605
+
606
+ Args:
607
+ session_type (Optional[SessionType]): The type of session to get.
608
+ user_id (Optional[str]): The ID of the user to get the session for.
609
+ component_id (Optional[str]): The ID of the component to get the session for.
610
+ session_name (Optional[str]): The name of the session to filter by.
611
+ start_timestamp (Optional[int]): The start timestamp to filter sessions by.
612
+ end_timestamp (Optional[int]): The end timestamp to filter sessions by.
613
+ limit (Optional[int]): The limit of the sessions to get.
614
+ page (Optional[int]): The page number to get.
615
+ sort_by (Optional[str]): The field to sort the sessions by.
616
+ sort_order (Optional[str]): The order to sort the sessions by.
617
+ deserialize (Optional[bool]): Whether to serialize the sessions. Defaults to True.
618
+
619
+ Returns:
620
+ Union[List[AgentSession], List[TeamSession], List[WorkflowSession], Tuple[List[Dict[str, Any]], int]]:
621
+ - When deserialize=True: List of Session objects
622
+ - When deserialize=False: List of session dictionaries and the total count
623
+
624
+ Raises:
625
+ Exception: If there is an error reading the sessions.
626
+ """
627
+ try:
628
+ collection = await self._get_collection(table_type="sessions")
629
+ if collection is None:
630
+ return [] if deserialize else ([], 0)
631
+
632
+ # Filtering
633
+ query: Dict[str, Any] = {}
634
+ if user_id is not None:
635
+ query["user_id"] = user_id
636
+ if session_type is not None:
637
+ query["session_type"] = session_type
638
+ if component_id is not None:
639
+ if session_type == SessionType.AGENT:
640
+ query["agent_id"] = component_id
641
+ elif session_type == SessionType.TEAM:
642
+ query["team_id"] = component_id
643
+ elif session_type == SessionType.WORKFLOW:
644
+ query["workflow_id"] = component_id
645
+ if start_timestamp is not None:
646
+ query["created_at"] = {"$gte": start_timestamp}
647
+ if end_timestamp is not None:
648
+ if "created_at" in query:
649
+ query["created_at"]["$lte"] = end_timestamp
650
+ else:
651
+ query["created_at"] = {"$lte": end_timestamp}
652
+ if session_name is not None:
653
+ query["session_data.session_name"] = {"$regex": session_name, "$options": "i"}
654
+
655
+ # Get total count
656
+ total_count = await collection.count_documents(query)
657
+
658
+ cursor = collection.find(query)
659
+
660
+ # Sorting
661
+ sort_criteria = apply_sorting({}, sort_by, sort_order)
662
+ if sort_criteria:
663
+ cursor = cursor.sort(sort_criteria)
664
+
665
+ # Pagination
666
+ query_args = apply_pagination({}, limit, page)
667
+ if query_args.get("skip"):
668
+ cursor = cursor.skip(query_args["skip"])
669
+ if query_args.get("limit"):
670
+ cursor = cursor.limit(query_args["limit"])
671
+
672
+ records = await cursor.to_list(length=None)
673
+ if records is None:
674
+ return [] if deserialize else ([], 0)
675
+ sessions_raw = [deserialize_session_json_fields(record) for record in records]
676
+
677
+ if not deserialize:
678
+ return sessions_raw, total_count
679
+
680
+ sessions: List[Union[AgentSession, TeamSession, WorkflowSession]] = []
681
+ for record in sessions_raw:
682
+ if session_type == SessionType.AGENT.value:
683
+ agent_session = AgentSession.from_dict(record)
684
+ if agent_session is not None:
685
+ sessions.append(agent_session)
686
+ elif session_type == SessionType.TEAM.value:
687
+ team_session = TeamSession.from_dict(record)
688
+ if team_session is not None:
689
+ sessions.append(team_session)
690
+ elif session_type == SessionType.WORKFLOW.value:
691
+ workflow_session = WorkflowSession.from_dict(record)
692
+ if workflow_session is not None:
693
+ sessions.append(workflow_session)
694
+
695
+ return sessions
696
+
697
+ except Exception as e:
698
+ log_error(f"Exception reading sessions: {e}")
699
+ raise e
700
+
701
+ async def rename_session(
702
+ self, session_id: str, session_type: SessionType, session_name: str, deserialize: Optional[bool] = True
703
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
704
+ """Rename a session in the database.
705
+
706
+ Args:
707
+ session_id (str): The ID of the session to rename.
708
+ session_type (SessionType): The type of session to rename.
709
+ session_name (str): The new name of the session.
710
+ deserialize (Optional[bool]): Whether to serialize the session. Defaults to True.
711
+
712
+ Returns:
713
+ Optional[Union[Session, Dict[str, Any]]]:
714
+ - When deserialize=True: Session object
715
+ - When deserialize=False: Session dictionary
716
+
717
+ Raises:
718
+ Exception: If there is an error renaming the session.
719
+ """
720
+ try:
721
+ collection = await self._get_collection(table_type="sessions")
722
+ if collection is None:
723
+ return None
724
+
725
+ try:
726
+ result = await collection.find_one_and_update(
727
+ {"session_id": session_id},
728
+ {"$set": {"session_data.session_name": session_name, "updated_at": int(time.time())}},
729
+ return_document=ReturnDocument.AFTER,
730
+ upsert=False,
731
+ )
732
+ except OperationFailure:
733
+ # If the update fails because session_data doesn't contain a session_name yet, we initialize session_data
734
+ result = await collection.find_one_and_update(
735
+ {"session_id": session_id},
736
+ {"$set": {"session_data": {"session_name": session_name}, "updated_at": int(time.time())}},
737
+ return_document=ReturnDocument.AFTER,
738
+ upsert=False,
739
+ )
740
+ if not result:
741
+ return None
742
+
743
+ deserialized_session = deserialize_session_json_fields(result)
744
+
745
+ if not deserialize:
746
+ return deserialized_session
747
+
748
+ if session_type == SessionType.AGENT.value:
749
+ return AgentSession.from_dict(deserialized_session)
750
+ elif session_type == SessionType.TEAM.value:
751
+ return TeamSession.from_dict(deserialized_session)
752
+ else:
753
+ return WorkflowSession.from_dict(deserialized_session)
754
+
755
+ except Exception as e:
756
+ log_error(f"Exception renaming session: {e}")
757
+ raise e
758
+
759
+ async def upsert_session(
760
+ self, session: Session, deserialize: Optional[bool] = True
761
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
762
+ """Insert or update a session in the database.
763
+
764
+ Args:
765
+ session (Session): The session to upsert.
766
+ deserialize (Optional[bool]): Whether to deserialize the session. Defaults to True.
767
+
768
+ Returns:
769
+ Optional[Union[Session, Dict[str, Any]]]: The upserted session.
770
+
771
+ Raises:
772
+ Exception: If there is an error upserting the session.
773
+ """
774
+ try:
775
+ collection = await self._get_collection(table_type="sessions", create_collection_if_not_found=True)
776
+ if collection is None:
777
+ return None
778
+
779
+ session_dict = session.to_dict()
780
+
781
+ if isinstance(session, AgentSession):
782
+ record = {
783
+ "session_id": session_dict.get("session_id"),
784
+ "session_type": SessionType.AGENT.value,
785
+ "agent_id": session_dict.get("agent_id"),
786
+ "user_id": session_dict.get("user_id"),
787
+ "runs": session_dict.get("runs"),
788
+ "agent_data": session_dict.get("agent_data"),
789
+ "session_data": session_dict.get("session_data"),
790
+ "summary": session_dict.get("summary"),
791
+ "metadata": session_dict.get("metadata"),
792
+ "created_at": session_dict.get("created_at"),
793
+ "updated_at": int(time.time()),
794
+ }
795
+
796
+ result = await collection.find_one_and_replace(
797
+ filter={"session_id": session_dict.get("session_id")},
798
+ replacement=record,
799
+ upsert=True,
800
+ return_document=ReturnDocument.AFTER,
801
+ )
802
+ if not result:
803
+ return None
804
+
805
+ session = result # type: ignore
806
+
807
+ if not deserialize:
808
+ return session
809
+
810
+ return AgentSession.from_dict(session) # type: ignore
811
+
812
+ elif isinstance(session, TeamSession):
813
+ record = {
814
+ "session_id": session_dict.get("session_id"),
815
+ "session_type": SessionType.TEAM.value,
816
+ "team_id": session_dict.get("team_id"),
817
+ "user_id": session_dict.get("user_id"),
818
+ "runs": session_dict.get("runs"),
819
+ "team_data": session_dict.get("team_data"),
820
+ "session_data": session_dict.get("session_data"),
821
+ "summary": session_dict.get("summary"),
822
+ "metadata": session_dict.get("metadata"),
823
+ "created_at": session_dict.get("created_at"),
824
+ "updated_at": int(time.time()),
825
+ }
826
+
827
+ result = await collection.find_one_and_replace(
828
+ filter={"session_id": session_dict.get("session_id")},
829
+ replacement=record,
830
+ upsert=True,
831
+ return_document=ReturnDocument.AFTER,
832
+ )
833
+ if not result:
834
+ return None
835
+
836
+ # MongoDB stores native objects, no deserialization needed for document fields
837
+ session = result # type: ignore
838
+
839
+ if not deserialize:
840
+ return session
841
+
842
+ return TeamSession.from_dict(session) # type: ignore
843
+
844
+ else:
845
+ record = {
846
+ "session_id": session_dict.get("session_id"),
847
+ "session_type": SessionType.WORKFLOW.value,
848
+ "workflow_id": session_dict.get("workflow_id"),
849
+ "user_id": session_dict.get("user_id"),
850
+ "runs": session_dict.get("runs"),
851
+ "workflow_data": session_dict.get("workflow_data"),
852
+ "session_data": session_dict.get("session_data"),
853
+ "summary": session_dict.get("summary"),
854
+ "metadata": session_dict.get("metadata"),
855
+ "created_at": session_dict.get("created_at"),
856
+ "updated_at": int(time.time()),
857
+ }
858
+
859
+ result = await collection.find_one_and_replace(
860
+ filter={"session_id": session_dict.get("session_id")},
861
+ replacement=record,
862
+ upsert=True,
863
+ return_document=ReturnDocument.AFTER,
864
+ )
865
+ if not result:
866
+ return None
867
+
868
+ session = result # type: ignore
869
+
870
+ if not deserialize:
871
+ return session
872
+
873
+ return WorkflowSession.from_dict(session) # type: ignore
874
+
875
+ except Exception as e:
876
+ log_error(f"Exception upserting session: {e}")
877
+ raise e
878
+
879
+ async def upsert_sessions(
880
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
881
+ ) -> List[Union[Session, Dict[str, Any]]]:
882
+ """
883
+ Bulk upsert multiple sessions for improved performance on large datasets.
884
+
885
+ Args:
886
+ sessions (List[Session]): List of sessions to upsert.
887
+ deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
888
+ preserve_updated_at (bool): If True, preserve the updated_at from the session object.
889
+
890
+ Returns:
891
+ List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
892
+
893
+ Raises:
894
+ Exception: If an error occurs during bulk upsert.
895
+ """
896
+ if not sessions:
897
+ return []
898
+
899
+ try:
900
+ collection = await self._get_collection(table_type="sessions", create_collection_if_not_found=True)
901
+ if collection is None:
902
+ log_info("Sessions collection not available, falling back to individual upserts")
903
+ return [
904
+ result
905
+ for session in sessions
906
+ if session is not None
907
+ for result in [await self.upsert_session(session, deserialize=deserialize)]
908
+ if result is not None
909
+ ]
910
+
911
+ from pymongo import ReplaceOne
912
+
913
+ operations = []
914
+ results: List[Union[Session, Dict[str, Any]]] = []
915
+
916
+ for session in sessions:
917
+ if session is None:
918
+ continue
919
+
920
+ session_dict = session.to_dict()
921
+
922
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
923
+ updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
924
+
925
+ if isinstance(session, AgentSession):
926
+ record = {
927
+ "session_id": session_dict.get("session_id"),
928
+ "session_type": SessionType.AGENT.value,
929
+ "agent_id": session_dict.get("agent_id"),
930
+ "user_id": session_dict.get("user_id"),
931
+ "runs": session_dict.get("runs"),
932
+ "agent_data": session_dict.get("agent_data"),
933
+ "session_data": session_dict.get("session_data"),
934
+ "summary": session_dict.get("summary"),
935
+ "metadata": session_dict.get("metadata"),
936
+ "created_at": session_dict.get("created_at"),
937
+ "updated_at": updated_at,
938
+ }
939
+ elif isinstance(session, TeamSession):
940
+ record = {
941
+ "session_id": session_dict.get("session_id"),
942
+ "session_type": SessionType.TEAM.value,
943
+ "team_id": session_dict.get("team_id"),
944
+ "user_id": session_dict.get("user_id"),
945
+ "runs": session_dict.get("runs"),
946
+ "team_data": session_dict.get("team_data"),
947
+ "session_data": session_dict.get("session_data"),
948
+ "summary": session_dict.get("summary"),
949
+ "metadata": session_dict.get("metadata"),
950
+ "created_at": session_dict.get("created_at"),
951
+ "updated_at": updated_at,
952
+ }
953
+ elif isinstance(session, WorkflowSession):
954
+ record = {
955
+ "session_id": session_dict.get("session_id"),
956
+ "session_type": SessionType.WORKFLOW.value,
957
+ "workflow_id": session_dict.get("workflow_id"),
958
+ "user_id": session_dict.get("user_id"),
959
+ "runs": session_dict.get("runs"),
960
+ "workflow_data": session_dict.get("workflow_data"),
961
+ "session_data": session_dict.get("session_data"),
962
+ "summary": session_dict.get("summary"),
963
+ "metadata": session_dict.get("metadata"),
964
+ "created_at": session_dict.get("created_at"),
965
+ "updated_at": updated_at,
966
+ }
967
+ else:
968
+ continue
969
+
970
+ operations.append(
971
+ ReplaceOne(filter={"session_id": record["session_id"]}, replacement=record, upsert=True)
972
+ )
973
+
974
+ if operations:
975
+ # Execute bulk write
976
+ await collection.bulk_write(operations)
977
+
978
+ # Fetch the results
979
+ session_ids = [session.session_id for session in sessions if session and session.session_id]
980
+ cursor = collection.find({"session_id": {"$in": session_ids}})
981
+
982
+ async for doc in cursor:
983
+ session_dict = doc
984
+
985
+ if deserialize:
986
+ session_type = doc.get("session_type")
987
+ if session_type == SessionType.AGENT.value:
988
+ deserialized_agent_session = AgentSession.from_dict(session_dict)
989
+ if deserialized_agent_session is None:
990
+ continue
991
+ results.append(deserialized_agent_session)
992
+
993
+ elif session_type == SessionType.TEAM.value:
994
+ deserialized_team_session = TeamSession.from_dict(session_dict)
995
+ if deserialized_team_session is None:
996
+ continue
997
+ results.append(deserialized_team_session)
998
+
999
+ elif session_type == SessionType.WORKFLOW.value:
1000
+ deserialized_workflow_session = WorkflowSession.from_dict(session_dict)
1001
+ if deserialized_workflow_session is None:
1002
+ continue
1003
+ results.append(deserialized_workflow_session)
1004
+ else:
1005
+ results.append(session_dict)
1006
+
1007
+ return results
1008
+
1009
+ except Exception as e:
1010
+ log_error(f"Exception during bulk session upsert, falling back to individual upserts: {e}")
1011
+
1012
+ # Fallback to individual upserts
1013
+ return [
1014
+ result
1015
+ for session in sessions
1016
+ if session is not None
1017
+ for result in [await self.upsert_session(session, deserialize=deserialize)]
1018
+ if result is not None
1019
+ ]
1020
+
1021
+ # -- Memory methods --
1022
+
1023
+ async def delete_user_memory(self, memory_id: str, user_id: Optional[str] = None):
1024
+ """Delete a user memory from the database.
1025
+
1026
+ Args:
1027
+ memory_id (str): The ID of the memory to delete.
1028
+ user_id (Optional[str]): The ID of the user to verify ownership. If provided, only delete if the memory belongs to this user.
1029
+
1030
+ Returns:
1031
+ bool: True if the memory was deleted, False otherwise.
1032
+
1033
+ Raises:
1034
+ Exception: If there is an error deleting the memory.
1035
+ """
1036
+ try:
1037
+ collection = await self._get_collection(table_type="memories")
1038
+ if collection is None:
1039
+ return
1040
+
1041
+ query = {"memory_id": memory_id}
1042
+ if user_id is not None:
1043
+ query["user_id"] = user_id
1044
+
1045
+ result = await collection.delete_one(query)
1046
+
1047
+ success = result.deleted_count > 0
1048
+ if success:
1049
+ log_debug(f"Successfully deleted memory id: {memory_id}")
1050
+ else:
1051
+ log_debug(f"No memory found with id: {memory_id}")
1052
+
1053
+ except Exception as e:
1054
+ log_error(f"Error deleting memory: {e}")
1055
+ raise e
1056
+
1057
+ async def delete_user_memories(self, memory_ids: List[str], user_id: Optional[str] = None) -> None:
1058
+ """Delete user memories from the database.
1059
+
1060
+ Args:
1061
+ memory_ids (List[str]): The IDs of the memories to delete.
1062
+ user_id (Optional[str]): The ID of the user to verify ownership. If provided, only delete memories that belong to this user.
1063
+
1064
+ Raises:
1065
+ Exception: If there is an error deleting the memories.
1066
+ """
1067
+ try:
1068
+ collection = await self._get_collection(table_type="memories")
1069
+ if collection is None:
1070
+ return
1071
+
1072
+ query: Dict[str, Any] = {"memory_id": {"$in": memory_ids}}
1073
+ if user_id is not None:
1074
+ query["user_id"] = user_id
1075
+
1076
+ result = await collection.delete_many(query)
1077
+
1078
+ if result.deleted_count == 0:
1079
+ log_debug(f"No memories found with ids: {memory_ids}")
1080
+
1081
+ except Exception as e:
1082
+ log_error(f"Error deleting memories: {e}")
1083
+ raise e
1084
+
1085
+ async def get_all_memory_topics(self, user_id: Optional[str] = None) -> List[str]:
1086
+ """Get all memory topics from the database.
1087
+
1088
+ Args:
1089
+ user_id (Optional[str]): The ID of the user to filter by. Defaults to None.
1090
+
1091
+ Returns:
1092
+ List[str]: The topics.
1093
+
1094
+ Raises:
1095
+ Exception: If there is an error getting the topics.
1096
+ """
1097
+ try:
1098
+ collection = await self._get_collection(table_type="memories")
1099
+ if collection is None:
1100
+ return []
1101
+
1102
+ query = {}
1103
+ if user_id is not None:
1104
+ query["user_id"] = user_id
1105
+
1106
+ topics = await collection.distinct("topics", query)
1107
+ return [topic for topic in topics if topic]
1108
+
1109
+ except Exception as e:
1110
+ log_error(f"Exception reading from collection: {e}")
1111
+ raise e
1112
+
1113
+ async def get_user_memory(
1114
+ self, memory_id: str, deserialize: Optional[bool] = True, user_id: Optional[str] = None
1115
+ ) -> Optional[UserMemory]:
1116
+ """Get a memory from the database.
1117
+
1118
+ Args:
1119
+ memory_id (str): The ID of the memory to get.
1120
+ deserialize (Optional[bool]): Whether to serialize the memory. Defaults to True.
1121
+ user_id (Optional[str]): The ID of the user to verify ownership. If provided, only return the memory if it belongs to this user.
1122
+
1123
+ Returns:
1124
+ Optional[UserMemory]:
1125
+ - When deserialize=True: UserMemory object
1126
+ - When deserialize=False: Memory dictionary
1127
+
1128
+ Raises:
1129
+ Exception: If there is an error getting the memory.
1130
+ """
1131
+ try:
1132
+ collection = await self._get_collection(table_type="memories")
1133
+ if collection is None:
1134
+ return None
1135
+
1136
+ query = {"memory_id": memory_id}
1137
+ if user_id is not None:
1138
+ query["user_id"] = user_id
1139
+
1140
+ result = await collection.find_one(query)
1141
+ if result is None or not deserialize:
1142
+ return result
1143
+
1144
+ # Remove MongoDB's _id field before creating UserMemory object
1145
+ result_filtered = {k: v for k, v in result.items() if k != "_id"}
1146
+ return UserMemory.from_dict(result_filtered)
1147
+
1148
+ except Exception as e:
1149
+ log_error(f"Exception reading from collection: {e}")
1150
+ raise e
1151
+
1152
+ async def get_user_memories(
1153
+ self,
1154
+ user_id: Optional[str] = None,
1155
+ agent_id: Optional[str] = None,
1156
+ team_id: Optional[str] = None,
1157
+ topics: Optional[List[str]] = None,
1158
+ search_content: Optional[str] = None,
1159
+ limit: Optional[int] = None,
1160
+ page: Optional[int] = None,
1161
+ sort_by: Optional[str] = None,
1162
+ sort_order: Optional[str] = None,
1163
+ deserialize: Optional[bool] = True,
1164
+ ) -> Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
1165
+ """Get all memories from the database as UserMemory objects.
1166
+
1167
+ Args:
1168
+ user_id (Optional[str]): The ID of the user to get the memories for.
1169
+ agent_id (Optional[str]): The ID of the agent to get the memories for.
1170
+ team_id (Optional[str]): The ID of the team to get the memories for.
1171
+ topics (Optional[List[str]]): The topics to filter the memories by.
1172
+ search_content (Optional[str]): The content to filter the memories by.
1173
+ limit (Optional[int]): The limit of the memories to get.
1174
+ page (Optional[int]): The page number to get.
1175
+ sort_by (Optional[str]): The field to sort the memories by.
1176
+ sort_order (Optional[str]): The order to sort the memories by.
1177
+ deserialize (Optional[bool]): Whether to serialize the memories. Defaults to True.
1178
+
1179
+ Returns:
1180
+ Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
1181
+ - When deserialize=True: List of UserMemory objects
1182
+ - When deserialize=False: Tuple of (memory dictionaries, total count)
1183
+
1184
+ Raises:
1185
+ Exception: If there is an error getting the memories.
1186
+ """
1187
+ try:
1188
+ collection = await self._get_collection(table_type="memories")
1189
+ if collection is None:
1190
+ return [] if deserialize else ([], 0)
1191
+
1192
+ query: Dict[str, Any] = {}
1193
+ if user_id is not None:
1194
+ query["user_id"] = user_id
1195
+ if agent_id is not None:
1196
+ query["agent_id"] = agent_id
1197
+ if team_id is not None:
1198
+ query["team_id"] = team_id
1199
+ if topics is not None:
1200
+ query["topics"] = {"$in": topics}
1201
+ if search_content is not None:
1202
+ query["memory"] = {"$regex": search_content, "$options": "i"}
1203
+
1204
+ # Get total count
1205
+ total_count = await collection.count_documents(query)
1206
+
1207
+ # Apply sorting
1208
+ sort_criteria = apply_sorting({}, sort_by, sort_order)
1209
+
1210
+ # Apply pagination
1211
+ query_args = apply_pagination({}, limit, page)
1212
+
1213
+ cursor = collection.find(query)
1214
+ if sort_criteria:
1215
+ cursor = cursor.sort(sort_criteria)
1216
+ if query_args.get("skip"):
1217
+ cursor = cursor.skip(query_args["skip"])
1218
+ if query_args.get("limit"):
1219
+ cursor = cursor.limit(query_args["limit"])
1220
+
1221
+ records = await cursor.to_list(length=None)
1222
+ if not deserialize:
1223
+ return records, total_count
1224
+
1225
+ # Remove MongoDB's _id field before creating UserMemory objects
1226
+ return [UserMemory.from_dict({k: v for k, v in record.items() if k != "_id"}) for record in records]
1227
+
1228
+ except Exception as e:
1229
+ log_error(f"Exception reading from collection: {e}")
1230
+ raise e
1231
+
1232
+ async def get_user_memory_stats(
1233
+ self,
1234
+ limit: Optional[int] = None,
1235
+ page: Optional[int] = None,
1236
+ user_id: Optional[str] = None,
1237
+ ) -> Tuple[List[Dict[str, Any]], int]:
1238
+ """Get user memories stats.
1239
+
1240
+ Args:
1241
+ limit (Optional[int]): The limit of the memories to get.
1242
+ page (Optional[int]): The page number to get.
1243
+ user_id (Optional[str]): The ID of the user to filter by. Defaults to None.
1244
+
1245
+ Returns:
1246
+ Tuple[List[Dict[str, Any]], int]: A tuple containing the memories stats and the total count.
1247
+
1248
+ Raises:
1249
+ Exception: If there is an error getting the memories stats.
1250
+ """
1251
+ try:
1252
+ collection = await self._get_collection(table_type="memories")
1253
+ if collection is None:
1254
+ return [], 0
1255
+
1256
+ match_stage: Dict[str, Any] = {"user_id": {"$ne": None}}
1257
+ if user_id is not None:
1258
+ match_stage["user_id"] = user_id
1259
+
1260
+ pipeline = [
1261
+ {"$match": match_stage},
1262
+ {
1263
+ "$group": {
1264
+ "_id": "$user_id",
1265
+ "total_memories": {"$sum": 1},
1266
+ "last_memory_updated_at": {"$max": "$updated_at"},
1267
+ }
1268
+ },
1269
+ {"$sort": {"last_memory_updated_at": -1}},
1270
+ ]
1271
+
1272
+ # Get total count
1273
+ count_pipeline = pipeline + [{"$count": "total"}]
1274
+ count_result = await collection.aggregate(count_pipeline).to_list(length=1)
1275
+ total_count = count_result[0]["total"] if count_result else 0
1276
+
1277
+ # Apply pagination
1278
+ if limit is not None:
1279
+ if page is not None:
1280
+ pipeline.append({"$skip": (page - 1) * limit}) # type: ignore
1281
+ pipeline.append({"$limit": limit}) # type: ignore
1282
+
1283
+ results = await collection.aggregate(pipeline).to_list(length=None)
1284
+
1285
+ formatted_results = [
1286
+ {
1287
+ "user_id": result["_id"],
1288
+ "total_memories": result["total_memories"],
1289
+ "last_memory_updated_at": result["last_memory_updated_at"],
1290
+ }
1291
+ for result in results
1292
+ ]
1293
+
1294
+ return formatted_results, total_count
1295
+
1296
+ except Exception as e:
1297
+ log_error(f"Exception getting user memory stats: {e}")
1298
+ raise e
1299
+
1300
+ async def upsert_user_memory(
1301
+ self, memory: UserMemory, deserialize: Optional[bool] = True
1302
+ ) -> Optional[Union[UserMemory, Dict[str, Any]]]:
1303
+ """Upsert a user memory in the database.
1304
+
1305
+ Args:
1306
+ memory (UserMemory): The memory to upsert.
1307
+ deserialize (Optional[bool]): Whether to serialize the memory. Defaults to True.
1308
+
1309
+ Returns:
1310
+ Optional[Union[UserMemory, Dict[str, Any]]]:
1311
+ - When deserialize=True: UserMemory object
1312
+ - When deserialize=False: Memory dictionary
1313
+
1314
+ Raises:
1315
+ Exception: If there is an error upserting the memory.
1316
+ """
1317
+ try:
1318
+ collection = await self._get_collection(table_type="memories", create_collection_if_not_found=True)
1319
+ if collection is None:
1320
+ return None
1321
+
1322
+ if memory.memory_id is None:
1323
+ memory.memory_id = str(uuid4())
1324
+
1325
+ update_doc = {
1326
+ "user_id": memory.user_id,
1327
+ "agent_id": memory.agent_id,
1328
+ "team_id": memory.team_id,
1329
+ "memory_id": memory.memory_id,
1330
+ "memory": memory.memory,
1331
+ "topics": memory.topics,
1332
+ "updated_at": int(time.time()),
1333
+ }
1334
+
1335
+ result = await collection.replace_one({"memory_id": memory.memory_id}, update_doc, upsert=True)
1336
+
1337
+ if result.upserted_id:
1338
+ update_doc["_id"] = result.upserted_id
1339
+
1340
+ if not deserialize:
1341
+ return update_doc
1342
+
1343
+ # Remove MongoDB's _id field before creating UserMemory object
1344
+ update_doc_filtered = {k: v for k, v in update_doc.items() if k != "_id"}
1345
+ return UserMemory.from_dict(update_doc_filtered)
1346
+
1347
+ except Exception as e:
1348
+ log_error(f"Exception upserting user memory: {e}")
1349
+ raise e
1350
+
1351
+ async def upsert_memories(
1352
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
1353
+ ) -> List[Union[UserMemory, Dict[str, Any]]]:
1354
+ """
1355
+ Bulk upsert multiple user memories for improved performance on large datasets.
1356
+
1357
+ Args:
1358
+ memories (List[UserMemory]): List of memories to upsert.
1359
+ deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
1360
+ preserve_updated_at (bool): If True, preserve the updated_at from the memory object.
1361
+
1362
+ Returns:
1363
+ List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories.
1364
+
1365
+ Raises:
1366
+ Exception: If an error occurs during bulk upsert.
1367
+ """
1368
+ if not memories:
1369
+ return []
1370
+
1371
+ try:
1372
+ collection = await self._get_collection(table_type="memories", create_collection_if_not_found=True)
1373
+ if collection is None:
1374
+ log_info("Memories collection not available, falling back to individual upserts")
1375
+ return [
1376
+ result
1377
+ for memory in memories
1378
+ if memory is not None
1379
+ for result in [await self.upsert_user_memory(memory, deserialize=deserialize)]
1380
+ if result is not None
1381
+ ]
1382
+
1383
+ from pymongo import ReplaceOne
1384
+
1385
+ operations = []
1386
+ results: List[Union[UserMemory, Dict[str, Any]]] = []
1387
+
1388
+ current_time = int(time.time())
1389
+ for memory in memories:
1390
+ if memory is None:
1391
+ continue
1392
+
1393
+ if memory.memory_id is None:
1394
+ memory.memory_id = str(uuid4())
1395
+
1396
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
1397
+ updated_at = memory.updated_at if preserve_updated_at else current_time
1398
+
1399
+ record = {
1400
+ "user_id": memory.user_id,
1401
+ "agent_id": memory.agent_id,
1402
+ "team_id": memory.team_id,
1403
+ "memory_id": memory.memory_id,
1404
+ "memory": memory.memory,
1405
+ "topics": memory.topics,
1406
+ "input": memory.input,
1407
+ "feedback": memory.feedback,
1408
+ "created_at": memory.created_at,
1409
+ "updated_at": updated_at,
1410
+ }
1411
+
1412
+ operations.append(ReplaceOne(filter={"memory_id": memory.memory_id}, replacement=record, upsert=True))
1413
+
1414
+ if operations:
1415
+ # Execute bulk write
1416
+ await collection.bulk_write(operations)
1417
+
1418
+ # Fetch the results
1419
+ memory_ids = [memory.memory_id for memory in memories if memory and memory.memory_id]
1420
+ cursor = collection.find({"memory_id": {"$in": memory_ids}})
1421
+
1422
+ async for doc in cursor:
1423
+ if deserialize:
1424
+ # Remove MongoDB's _id field before creating UserMemory object
1425
+ doc_filtered = {k: v for k, v in doc.items() if k != "_id"}
1426
+ results.append(UserMemory.from_dict(doc_filtered))
1427
+ else:
1428
+ results.append(doc)
1429
+
1430
+ return results
1431
+
1432
+ except Exception as e:
1433
+ log_error(f"Exception during bulk memory upsert, falling back to individual upserts: {e}")
1434
+
1435
+ # Fallback to individual upserts
1436
+ return [
1437
+ result
1438
+ for memory in memories
1439
+ if memory is not None
1440
+ for result in [await self.upsert_user_memory(memory, deserialize=deserialize)]
1441
+ if result is not None
1442
+ ]
1443
+
1444
+ async def clear_memories(self) -> None:
1445
+ """Delete all memories from the database.
1446
+
1447
+ Raises:
1448
+ Exception: If an error occurs during deletion.
1449
+ """
1450
+ try:
1451
+ collection = await self._get_collection(table_type="memories")
1452
+ if collection is None:
1453
+ return
1454
+
1455
+ await collection.delete_many({})
1456
+
1457
+ except Exception as e:
1458
+ log_error(f"Exception deleting all memories: {e}")
1459
+ raise e
1460
+
1461
+ # -- Cultural Knowledge methods --
1462
+ async def clear_cultural_knowledge(self) -> None:
1463
+ """Delete all cultural knowledge from the database.
1464
+
1465
+ Raises:
1466
+ Exception: If an error occurs during deletion.
1467
+ """
1468
+ try:
1469
+ collection = await self._get_collection(table_type="culture")
1470
+ if collection is None:
1471
+ return
1472
+
1473
+ await collection.delete_many({})
1474
+
1475
+ except Exception as e:
1476
+ log_error(f"Exception deleting all cultural knowledge: {e}")
1477
+ raise e
1478
+
1479
+ async def delete_cultural_knowledge(self, id: str) -> None:
1480
+ """Delete cultural knowledge by ID.
1481
+
1482
+ Args:
1483
+ id (str): The ID of the cultural knowledge to delete.
1484
+
1485
+ Raises:
1486
+ Exception: If an error occurs during deletion.
1487
+ """
1488
+ try:
1489
+ collection = await self._get_collection(table_type="culture")
1490
+ if collection is None:
1491
+ return
1492
+
1493
+ await collection.delete_one({"id": id})
1494
+ log_debug(f"Deleted cultural knowledge with ID: {id}")
1495
+
1496
+ except Exception as e:
1497
+ log_error(f"Error deleting cultural knowledge: {e}")
1498
+ raise e
1499
+
1500
+ async def get_cultural_knowledge(
1501
+ self, id: str, deserialize: Optional[bool] = True
1502
+ ) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
1503
+ """Get cultural knowledge by ID.
1504
+
1505
+ Args:
1506
+ id (str): The ID of the cultural knowledge to retrieve.
1507
+ deserialize (Optional[bool]): Whether to deserialize to CulturalKnowledge object. Defaults to True.
1508
+
1509
+ Returns:
1510
+ Optional[Union[CulturalKnowledge, Dict[str, Any]]]: The cultural knowledge if found, None otherwise.
1511
+
1512
+ Raises:
1513
+ Exception: If an error occurs during retrieval.
1514
+ """
1515
+ try:
1516
+ collection = await self._get_collection(table_type="culture")
1517
+ if collection is None:
1518
+ return None
1519
+
1520
+ result = await collection.find_one({"id": id})
1521
+ if result is None:
1522
+ return None
1523
+
1524
+ # Remove MongoDB's _id field
1525
+ result_filtered = {k: v for k, v in result.items() if k != "_id"}
1526
+
1527
+ if not deserialize:
1528
+ return result_filtered
1529
+
1530
+ return deserialize_cultural_knowledge_from_db(result_filtered)
1531
+
1532
+ except Exception as e:
1533
+ log_error(f"Error getting cultural knowledge: {e}")
1534
+ raise e
1535
+
1536
+ async def get_all_cultural_knowledge(
1537
+ self,
1538
+ agent_id: Optional[str] = None,
1539
+ team_id: Optional[str] = None,
1540
+ name: Optional[str] = None,
1541
+ limit: Optional[int] = None,
1542
+ page: Optional[int] = None,
1543
+ sort_by: Optional[str] = None,
1544
+ sort_order: Optional[str] = None,
1545
+ deserialize: Optional[bool] = True,
1546
+ ) -> Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
1547
+ """Get all cultural knowledge with filtering and pagination.
1548
+
1549
+ Args:
1550
+ agent_id (Optional[str]): Filter by agent ID.
1551
+ team_id (Optional[str]): Filter by team ID.
1552
+ name (Optional[str]): Filter by name (case-insensitive partial match).
1553
+ limit (Optional[int]): Maximum number of results to return.
1554
+ page (Optional[int]): Page number for pagination.
1555
+ sort_by (Optional[str]): Field to sort by.
1556
+ sort_order (Optional[str]): Sort order ('asc' or 'desc').
1557
+ deserialize (Optional[bool]): Whether to deserialize to CulturalKnowledge objects. Defaults to True.
1558
+
1559
+ Returns:
1560
+ Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
1561
+ - When deserialize=True: List of CulturalKnowledge objects
1562
+ - When deserialize=False: Tuple with list of dictionaries and total count
1563
+
1564
+ Raises:
1565
+ Exception: If an error occurs during retrieval.
1566
+ """
1567
+ try:
1568
+ collection = await self._get_collection(table_type="culture")
1569
+ if collection is None:
1570
+ if not deserialize:
1571
+ return [], 0
1572
+ return []
1573
+
1574
+ # Build query
1575
+ query: Dict[str, Any] = {}
1576
+ if agent_id is not None:
1577
+ query["agent_id"] = agent_id
1578
+ if team_id is not None:
1579
+ query["team_id"] = team_id
1580
+ if name is not None:
1581
+ query["name"] = {"$regex": name, "$options": "i"}
1582
+
1583
+ # Get total count for pagination
1584
+ total_count = await collection.count_documents(query)
1585
+
1586
+ # Apply sorting
1587
+ sort_criteria = apply_sorting({}, sort_by, sort_order)
1588
+
1589
+ # Apply pagination
1590
+ query_args = apply_pagination({}, limit, page)
1591
+
1592
+ cursor = collection.find(query)
1593
+ if sort_criteria:
1594
+ cursor = cursor.sort(sort_criteria)
1595
+ if query_args.get("skip"):
1596
+ cursor = cursor.skip(query_args["skip"])
1597
+ if query_args.get("limit"):
1598
+ cursor = cursor.limit(query_args["limit"])
1599
+
1600
+ # Remove MongoDB's _id field from all results
1601
+ results_filtered = [{k: v for k, v in item.items() if k != "_id"} async for item in cursor]
1602
+
1603
+ if not deserialize:
1604
+ return results_filtered, total_count
1605
+
1606
+ return [deserialize_cultural_knowledge_from_db(item) for item in results_filtered]
1607
+
1608
+ except Exception as e:
1609
+ log_error(f"Error getting all cultural knowledge: {e}")
1610
+ raise e
1611
+
1612
+ async def upsert_cultural_knowledge(
1613
+ self, cultural_knowledge: CulturalKnowledge, deserialize: Optional[bool] = True
1614
+ ) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
1615
+ """Upsert cultural knowledge in MongoDB.
1616
+
1617
+ Args:
1618
+ cultural_knowledge (CulturalKnowledge): The cultural knowledge to upsert.
1619
+ deserialize (Optional[bool]): Whether to deserialize the result. Defaults to True.
1620
+
1621
+ Returns:
1622
+ Optional[Union[CulturalKnowledge, Dict[str, Any]]]: The upserted cultural knowledge.
1623
+
1624
+ Raises:
1625
+ Exception: If an error occurs during upsert.
1626
+ """
1627
+ try:
1628
+ collection = await self._get_collection(table_type="culture", create_collection_if_not_found=True)
1629
+ if collection is None:
1630
+ return None
1631
+
1632
+ # Serialize content, categories, and notes into a dict for DB storage
1633
+ content_dict = serialize_cultural_knowledge_for_db(cultural_knowledge)
1634
+
1635
+ # Create the document with serialized content
1636
+ update_doc = {
1637
+ "id": cultural_knowledge.id,
1638
+ "name": cultural_knowledge.name,
1639
+ "summary": cultural_knowledge.summary,
1640
+ "content": content_dict if content_dict else None,
1641
+ "metadata": cultural_knowledge.metadata,
1642
+ "input": cultural_knowledge.input,
1643
+ "created_at": cultural_knowledge.created_at,
1644
+ "updated_at": int(time.time()),
1645
+ "agent_id": cultural_knowledge.agent_id,
1646
+ "team_id": cultural_knowledge.team_id,
1647
+ }
1648
+
1649
+ result = await collection.replace_one({"id": cultural_knowledge.id}, update_doc, upsert=True)
1650
+
1651
+ if result.upserted_id:
1652
+ update_doc["_id"] = result.upserted_id
1653
+
1654
+ # Remove MongoDB's _id field
1655
+ doc_filtered = {k: v for k, v in update_doc.items() if k != "_id"}
1656
+
1657
+ if not deserialize:
1658
+ return doc_filtered
1659
+
1660
+ return deserialize_cultural_knowledge_from_db(doc_filtered)
1661
+
1662
+ except Exception as e:
1663
+ log_error(f"Error upserting cultural knowledge: {e}")
1664
+ raise e
1665
+
1666
+ # -- Metrics methods --
1667
+
1668
+ async def _get_all_sessions_for_metrics_calculation(
1669
+ self, start_timestamp: Optional[int] = None, end_timestamp: Optional[int] = None
1670
+ ) -> List[Dict[str, Any]]:
1671
+ """Get all sessions of all types for metrics calculation."""
1672
+ try:
1673
+ collection = await self._get_collection(table_type="sessions")
1674
+ if collection is None:
1675
+ return []
1676
+
1677
+ query = {}
1678
+ if start_timestamp is not None:
1679
+ query["created_at"] = {"$gte": start_timestamp}
1680
+ if end_timestamp is not None:
1681
+ if "created_at" in query:
1682
+ query["created_at"]["$lte"] = end_timestamp
1683
+ else:
1684
+ query["created_at"] = {"$lte": end_timestamp}
1685
+
1686
+ projection = {
1687
+ "user_id": 1,
1688
+ "session_data": 1,
1689
+ "runs": 1,
1690
+ "created_at": 1,
1691
+ "session_type": 1,
1692
+ }
1693
+
1694
+ results = await collection.find(query, projection).to_list(length=None)
1695
+ return results
1696
+
1697
+ except Exception as e:
1698
+ log_error(f"Exception reading from sessions collection: {e}")
1699
+ return []
1700
+
1701
+ async def _get_metrics_calculation_starting_date(self, collection: AsyncMongoCollectionType) -> Optional[date]:
1702
+ """Get the first date for which metrics calculation is needed."""
1703
+ try:
1704
+ result = await collection.find_one({}, sort=[("date", -1)], limit=1)
1705
+
1706
+ if result is not None:
1707
+ result_date = datetime.strptime(result["date"], "%Y-%m-%d").date()
1708
+ if result.get("completed"):
1709
+ return result_date + timedelta(days=1)
1710
+ else:
1711
+ return result_date
1712
+
1713
+ # No metrics records. Return the date of the first recorded session.
1714
+ first_session_result = await self.get_sessions(
1715
+ sort_by="created_at", sort_order="asc", limit=1, deserialize=False
1716
+ )
1717
+ first_session_date = first_session_result[0][0]["created_at"] if first_session_result[0] else None # type: ignore
1718
+
1719
+ if first_session_date is None:
1720
+ return None
1721
+
1722
+ return datetime.fromtimestamp(first_session_date, tz=timezone.utc).date()
1723
+
1724
+ except Exception as e:
1725
+ log_error(f"Exception getting metrics calculation starting date: {e}")
1726
+ return None
1727
+
1728
+ async def calculate_metrics(self) -> Optional[list[dict]]:
1729
+ """Calculate metrics for all dates without complete metrics."""
1730
+ try:
1731
+ collection = await self._get_collection(table_type="metrics", create_collection_if_not_found=True)
1732
+ if collection is None:
1733
+ return None
1734
+
1735
+ starting_date = await self._get_metrics_calculation_starting_date(collection)
1736
+ if starting_date is None:
1737
+ log_info("No session data found. Won't calculate metrics.")
1738
+ return None
1739
+
1740
+ dates_to_process = get_dates_to_calculate_metrics_for(starting_date)
1741
+ if not dates_to_process:
1742
+ log_info("Metrics already calculated for all relevant dates.")
1743
+ return None
1744
+
1745
+ start_timestamp = int(
1746
+ datetime.combine(dates_to_process[0], datetime.min.time()).replace(tzinfo=timezone.utc).timestamp()
1747
+ )
1748
+ end_timestamp = int(
1749
+ datetime.combine(dates_to_process[-1] + timedelta(days=1), datetime.min.time())
1750
+ .replace(tzinfo=timezone.utc)
1751
+ .timestamp()
1752
+ )
1753
+
1754
+ sessions = await self._get_all_sessions_for_metrics_calculation(
1755
+ start_timestamp=start_timestamp, end_timestamp=end_timestamp
1756
+ )
1757
+ all_sessions_data = fetch_all_sessions_data(
1758
+ sessions=sessions, dates_to_process=dates_to_process, start_timestamp=start_timestamp
1759
+ )
1760
+ if not all_sessions_data:
1761
+ log_info("No new session data found. Won't calculate metrics.")
1762
+ return None
1763
+
1764
+ results = []
1765
+ metrics_records = []
1766
+
1767
+ for date_to_process in dates_to_process:
1768
+ date_key = date_to_process.isoformat()
1769
+ sessions_for_date = all_sessions_data.get(date_key, {})
1770
+
1771
+ # Skip dates with no sessions
1772
+ if not any(len(sessions) > 0 for sessions in sessions_for_date.values()):
1773
+ continue
1774
+
1775
+ metrics_record = calculate_date_metrics(date_to_process, sessions_for_date)
1776
+ metrics_records.append(metrics_record)
1777
+
1778
+ if metrics_records:
1779
+ results = bulk_upsert_metrics(collection, metrics_records) # type: ignore
1780
+
1781
+ return results
1782
+
1783
+ except Exception as e:
1784
+ log_error(f"Error calculating metrics: {e}")
1785
+ raise e
1786
+
1787
+ async def get_metrics(
1788
+ self,
1789
+ starting_date: Optional[date] = None,
1790
+ ending_date: Optional[date] = None,
1791
+ ) -> Tuple[List[dict], Optional[int]]:
1792
+ """Get all metrics matching the given date range."""
1793
+ try:
1794
+ collection = await self._get_collection(table_type="metrics")
1795
+ if collection is None:
1796
+ return [], None
1797
+
1798
+ query = {}
1799
+ if starting_date:
1800
+ query["date"] = {"$gte": starting_date.isoformat()}
1801
+ if ending_date:
1802
+ if "date" in query:
1803
+ query["date"]["$lte"] = ending_date.isoformat()
1804
+ else:
1805
+ query["date"] = {"$lte": ending_date.isoformat()}
1806
+
1807
+ records = await collection.find(query).to_list(length=None)
1808
+ if not records:
1809
+ return [], None
1810
+
1811
+ # Get the latest updated_at
1812
+ latest_updated_at = max(record.get("updated_at", 0) for record in records)
1813
+
1814
+ return records, latest_updated_at
1815
+
1816
+ except Exception as e:
1817
+ log_error(f"Error getting metrics: {e}")
1818
+ raise e
1819
+
1820
+ # -- Knowledge methods --
1821
+
1822
+ async def delete_knowledge_content(self, id: str):
1823
+ """Delete a knowledge row from the database.
1824
+
1825
+ Args:
1826
+ id (str): The ID of the knowledge row to delete.
1827
+
1828
+ Raises:
1829
+ Exception: If an error occurs during deletion.
1830
+ """
1831
+ try:
1832
+ collection = await self._get_collection(table_type="knowledge")
1833
+ if collection is None:
1834
+ return
1835
+
1836
+ await collection.delete_one({"id": id})
1837
+
1838
+ log_debug(f"Deleted knowledge content with id '{id}'")
1839
+
1840
+ except Exception as e:
1841
+ log_error(f"Error deleting knowledge content: {e}")
1842
+ raise e
1843
+
1844
+ async def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
1845
+ """Get a knowledge row from the database.
1846
+
1847
+ Args:
1848
+ id (str): The ID of the knowledge row to get.
1849
+
1850
+ Returns:
1851
+ Optional[KnowledgeRow]: The knowledge row, or None if it doesn't exist.
1852
+
1853
+ Raises:
1854
+ Exception: If an error occurs during retrieval.
1855
+ """
1856
+ try:
1857
+ collection = await self._get_collection(table_type="knowledge")
1858
+ if collection is None:
1859
+ return None
1860
+
1861
+ result = await collection.find_one({"id": id})
1862
+ if result is None:
1863
+ return None
1864
+
1865
+ return KnowledgeRow.model_validate(result)
1866
+
1867
+ except Exception as e:
1868
+ log_error(f"Error getting knowledge content: {e}")
1869
+ raise e
1870
+
1871
+ async def get_knowledge_contents(
1872
+ self,
1873
+ limit: Optional[int] = None,
1874
+ page: Optional[int] = None,
1875
+ sort_by: Optional[str] = None,
1876
+ sort_order: Optional[str] = None,
1877
+ ) -> Tuple[List[KnowledgeRow], int]:
1878
+ """Get all knowledge contents from the database.
1879
+
1880
+ Args:
1881
+ limit (Optional[int]): The maximum number of knowledge contents to return.
1882
+ page (Optional[int]): The page number.
1883
+ sort_by (Optional[str]): The column to sort by.
1884
+ sort_order (Optional[str]): The order to sort by.
1885
+
1886
+ Returns:
1887
+ Tuple[List[KnowledgeRow], int]: The knowledge contents and total count.
1888
+
1889
+ Raises:
1890
+ Exception: If an error occurs during retrieval.
1891
+ """
1892
+ try:
1893
+ collection = await self._get_collection(table_type="knowledge")
1894
+ if collection is None:
1895
+ return [], 0
1896
+
1897
+ query: Dict[str, Any] = {}
1898
+
1899
+ # Get total count
1900
+ total_count = await collection.count_documents(query)
1901
+
1902
+ # Apply sorting
1903
+ sort_criteria = apply_sorting({}, sort_by, sort_order)
1904
+
1905
+ # Apply pagination
1906
+ query_args = apply_pagination({}, limit, page)
1907
+
1908
+ cursor = collection.find(query)
1909
+ if sort_criteria:
1910
+ cursor = cursor.sort(sort_criteria)
1911
+ if query_args.get("skip"):
1912
+ cursor = cursor.skip(query_args["skip"])
1913
+ if query_args.get("limit"):
1914
+ cursor = cursor.limit(query_args["limit"])
1915
+
1916
+ records = await cursor.to_list(length=None)
1917
+ knowledge_rows = [KnowledgeRow.model_validate(record) for record in records]
1918
+
1919
+ return knowledge_rows, total_count
1920
+
1921
+ except Exception as e:
1922
+ log_error(f"Error getting knowledge contents: {e}")
1923
+ raise e
1924
+
1925
+ async def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
1926
+ """Upsert knowledge content in the database.
1927
+
1928
+ Args:
1929
+ knowledge_row (KnowledgeRow): The knowledge row to upsert.
1930
+
1931
+ Returns:
1932
+ Optional[KnowledgeRow]: The upserted knowledge row, or None if the operation fails.
1933
+
1934
+ Raises:
1935
+ Exception: If an error occurs during upsert.
1936
+ """
1937
+ try:
1938
+ collection = await self._get_collection(table_type="knowledge", create_collection_if_not_found=True)
1939
+ if collection is None:
1940
+ return None
1941
+
1942
+ update_doc = knowledge_row.model_dump()
1943
+ await collection.replace_one({"id": knowledge_row.id}, update_doc, upsert=True)
1944
+
1945
+ return knowledge_row
1946
+
1947
+ except Exception as e:
1948
+ log_error(f"Error upserting knowledge content: {e}")
1949
+ raise e
1950
+
1951
+ # -- Eval methods --
1952
+
1953
+ async def create_eval_run(self, eval_run: EvalRunRecord) -> Optional[EvalRunRecord]:
1954
+ """Create an EvalRunRecord in the database."""
1955
+ try:
1956
+ collection = await self._get_collection(table_type="evals", create_collection_if_not_found=True)
1957
+ if collection is None:
1958
+ return None
1959
+
1960
+ current_time = int(time.time())
1961
+ eval_dict = eval_run.model_dump()
1962
+ eval_dict["created_at"] = current_time
1963
+ eval_dict["updated_at"] = current_time
1964
+
1965
+ await collection.insert_one(eval_dict)
1966
+
1967
+ log_debug(f"Created eval run with id '{eval_run.run_id}'")
1968
+
1969
+ return eval_run
1970
+
1971
+ except Exception as e:
1972
+ log_error(f"Error creating eval run: {e}")
1973
+ raise e
1974
+
1975
+ async def delete_eval_run(self, eval_run_id: str) -> None:
1976
+ """Delete an eval run from the database."""
1977
+ try:
1978
+ collection = await self._get_collection(table_type="evals")
1979
+ if collection is None:
1980
+ return
1981
+
1982
+ result = await collection.delete_one({"run_id": eval_run_id})
1983
+
1984
+ if result.deleted_count == 0:
1985
+ log_debug(f"No eval run found with ID: {eval_run_id}")
1986
+ else:
1987
+ log_debug(f"Deleted eval run with ID: {eval_run_id}")
1988
+
1989
+ except Exception as e:
1990
+ log_error(f"Error deleting eval run {eval_run_id}: {e}")
1991
+ raise e
1992
+
1993
+ async def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
1994
+ """Delete multiple eval runs from the database."""
1995
+ try:
1996
+ collection = await self._get_collection(table_type="evals")
1997
+ if collection is None:
1998
+ return
1999
+
2000
+ result = await collection.delete_many({"run_id": {"$in": eval_run_ids}})
2001
+
2002
+ if result.deleted_count == 0:
2003
+ log_debug(f"No eval runs found with IDs: {eval_run_ids}")
2004
+ else:
2005
+ log_debug(f"Deleted {result.deleted_count} eval runs")
2006
+
2007
+ except Exception as e:
2008
+ log_error(f"Error deleting eval runs {eval_run_ids}: {e}")
2009
+ raise e
2010
+
2011
+ async def get_eval_run_raw(self, eval_run_id: str) -> Optional[Dict[str, Any]]:
2012
+ """Get an eval run from the database as a raw dictionary."""
2013
+ try:
2014
+ collection = await self._get_collection(table_type="evals")
2015
+ if collection is None:
2016
+ return None
2017
+
2018
+ result = await collection.find_one({"run_id": eval_run_id})
2019
+ return result
2020
+
2021
+ except Exception as e:
2022
+ log_error(f"Exception getting eval run {eval_run_id}: {e}")
2023
+ raise e
2024
+
2025
+ async def get_eval_run(
2026
+ self, eval_run_id: str, deserialize: Optional[bool] = True
2027
+ ) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
2028
+ """Get an eval run from the database.
2029
+
2030
+ Args:
2031
+ eval_run_id (str): The ID of the eval run to get.
2032
+ deserialize (Optional[bool]): Whether to serialize the eval run. Defaults to True.
2033
+
2034
+ Returns:
2035
+ Optional[Union[EvalRunRecord, Dict[str, Any]]]:
2036
+ - When deserialize=True: EvalRunRecord object
2037
+ - When deserialize=False: EvalRun dictionary
2038
+
2039
+ Raises:
2040
+ Exception: If there is an error getting the eval run.
2041
+ """
2042
+ try:
2043
+ collection = await self._get_collection(table_type="evals")
2044
+ if collection is None:
2045
+ return None
2046
+
2047
+ eval_run_raw = await collection.find_one({"run_id": eval_run_id})
2048
+
2049
+ if not eval_run_raw:
2050
+ return None
2051
+
2052
+ if not deserialize:
2053
+ return eval_run_raw
2054
+
2055
+ return EvalRunRecord.model_validate(eval_run_raw)
2056
+
2057
+ except Exception as e:
2058
+ log_error(f"Exception getting eval run {eval_run_id}: {e}")
2059
+ raise e
2060
+
2061
+ async def get_eval_runs(
2062
+ self,
2063
+ limit: Optional[int] = None,
2064
+ page: Optional[int] = None,
2065
+ sort_by: Optional[str] = None,
2066
+ sort_order: Optional[str] = None,
2067
+ agent_id: Optional[str] = None,
2068
+ team_id: Optional[str] = None,
2069
+ workflow_id: Optional[str] = None,
2070
+ model_id: Optional[str] = None,
2071
+ filter_type: Optional[EvalFilterType] = None,
2072
+ eval_type: Optional[List[EvalType]] = None,
2073
+ deserialize: Optional[bool] = True,
2074
+ ) -> Union[List[EvalRunRecord], Tuple[List[Dict[str, Any]], int]]:
2075
+ """Get all eval runs from the database.
2076
+
2077
+ Args:
2078
+ limit (Optional[int]): The maximum number of eval runs to return.
2079
+ page (Optional[int]): The page number to return.
2080
+ sort_by (Optional[str]): The field to sort by.
2081
+ sort_order (Optional[str]): The order to sort by.
2082
+ agent_id (Optional[str]): The ID of the agent to filter by.
2083
+ team_id (Optional[str]): The ID of the team to filter by.
2084
+ workflow_id (Optional[str]): The ID of the workflow to filter by.
2085
+ model_id (Optional[str]): The ID of the model to filter by.
2086
+ eval_type (Optional[List[EvalType]]): The type of eval to filter by.
2087
+ filter_type (Optional[EvalFilterType]): The type of filter to apply.
2088
+ deserialize (Optional[bool]): Whether to serialize the eval runs. Defaults to True.
2089
+
2090
+ Returns:
2091
+ Union[List[EvalRunRecord], Tuple[List[Dict[str, Any]], int]]:
2092
+ - When deserialize=True: List of EvalRunRecord objects
2093
+ - When deserialize=False: List of eval run dictionaries and the total count
2094
+
2095
+ Raises:
2096
+ Exception: If there is an error getting the eval runs.
2097
+ """
2098
+ try:
2099
+ collection = await self._get_collection(table_type="evals")
2100
+ if collection is None:
2101
+ return [] if deserialize else ([], 0)
2102
+
2103
+ query: Dict[str, Any] = {}
2104
+ if agent_id is not None:
2105
+ query["agent_id"] = agent_id
2106
+ if team_id is not None:
2107
+ query["team_id"] = team_id
2108
+ if workflow_id is not None:
2109
+ query["workflow_id"] = workflow_id
2110
+ if model_id is not None:
2111
+ query["model_id"] = model_id
2112
+ if eval_type is not None and len(eval_type) > 0:
2113
+ query["eval_type"] = {"$in": eval_type}
2114
+ if filter_type is not None:
2115
+ if filter_type == EvalFilterType.AGENT:
2116
+ query["agent_id"] = {"$ne": None}
2117
+ elif filter_type == EvalFilterType.TEAM:
2118
+ query["team_id"] = {"$ne": None}
2119
+ elif filter_type == EvalFilterType.WORKFLOW:
2120
+ query["workflow_id"] = {"$ne": None}
2121
+
2122
+ # Get total count
2123
+ total_count = await collection.count_documents(query)
2124
+
2125
+ # Apply default sorting by created_at desc if no sort parameters provided
2126
+ if sort_by is None:
2127
+ sort_criteria = [("created_at", -1)]
2128
+ else:
2129
+ sort_criteria = apply_sorting({}, sort_by, sort_order)
2130
+
2131
+ # Apply pagination
2132
+ query_args = apply_pagination({}, limit, page)
2133
+
2134
+ cursor = collection.find(query)
2135
+ if sort_criteria:
2136
+ cursor = cursor.sort(sort_criteria)
2137
+ if query_args.get("skip"):
2138
+ cursor = cursor.skip(query_args["skip"])
2139
+ if query_args.get("limit"):
2140
+ cursor = cursor.limit(query_args["limit"])
2141
+
2142
+ records = await cursor.to_list(length=None)
2143
+ if not records:
2144
+ return [] if deserialize else ([], 0)
2145
+
2146
+ if not deserialize:
2147
+ return records, total_count
2148
+
2149
+ return [EvalRunRecord.model_validate(row) for row in records]
2150
+
2151
+ except Exception as e:
2152
+ log_error(f"Exception getting eval runs: {e}")
2153
+ raise e
2154
+
2155
+ async def rename_eval_run(
2156
+ self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
2157
+ ) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
2158
+ """Update the name of an eval run in the database.
2159
+
2160
+ Args:
2161
+ eval_run_id (str): The ID of the eval run to update.
2162
+ name (str): The new name of the eval run.
2163
+ deserialize (Optional[bool]): Whether to serialize the eval run. Defaults to True.
2164
+
2165
+ Returns:
2166
+ Optional[Union[EvalRunRecord, Dict[str, Any]]]:
2167
+ - When deserialize=True: EvalRunRecord object
2168
+ - When deserialize=False: EvalRun dictionary
2169
+
2170
+ Raises:
2171
+ Exception: If there is an error updating the eval run.
2172
+ """
2173
+ try:
2174
+ collection = await self._get_collection(table_type="evals")
2175
+ if collection is None:
2176
+ return None
2177
+
2178
+ result = await collection.find_one_and_update(
2179
+ {"run_id": eval_run_id}, {"$set": {"name": name, "updated_at": int(time.time())}}
2180
+ )
2181
+
2182
+ log_debug(f"Renamed eval run with id '{eval_run_id}' to '{name}'")
2183
+
2184
+ if not result or not deserialize:
2185
+ return result
2186
+
2187
+ return EvalRunRecord.model_validate(result)
2188
+
2189
+ except Exception as e:
2190
+ log_error(f"Error updating eval run name {eval_run_id}: {e}")
2191
+ raise e
2192
+
2193
+ # --- Traces ---
2194
+ def _get_component_level(
2195
+ self, workflow_id: Optional[str], team_id: Optional[str], agent_id: Optional[str], name: str
2196
+ ) -> int:
2197
+ """Get the component level for a trace based on its context.
2198
+
2199
+ Component levels (higher = more important):
2200
+ - 3: Workflow root (.run or .arun with workflow_id)
2201
+ - 2: Team root (.run or .arun with team_id)
2202
+ - 1: Agent root (.run or .arun with agent_id)
2203
+ - 0: Child span (not a root)
2204
+
2205
+ Args:
2206
+ workflow_id: The workflow ID of the trace.
2207
+ team_id: The team ID of the trace.
2208
+ agent_id: The agent ID of the trace.
2209
+ name: The name of the trace.
2210
+
2211
+ Returns:
2212
+ int: The component level (0-3).
2213
+ """
2214
+ # Check if name indicates a root span
2215
+ is_root_name = ".run" in name or ".arun" in name
2216
+
2217
+ if not is_root_name:
2218
+ return 0 # Child span (not a root)
2219
+ elif workflow_id:
2220
+ return 3 # Workflow root
2221
+ elif team_id:
2222
+ return 2 # Team root
2223
+ elif agent_id:
2224
+ return 1 # Agent root
2225
+ else:
2226
+ return 0 # Unknown
2227
+
2228
+ async def upsert_trace(self, trace: "Trace") -> None:
2229
+ """Create or update a single trace record in the database.
2230
+
2231
+ Uses MongoDB's update_one with upsert=True and aggregation pipeline
2232
+ to handle concurrent inserts atomically and avoid race conditions.
2233
+
2234
+ Args:
2235
+ trace: The Trace object to store (one per trace_id).
2236
+ """
2237
+ try:
2238
+ collection = await self._get_collection(table_type="traces", create_collection_if_not_found=True)
2239
+ if collection is None:
2240
+ return
2241
+
2242
+ trace_dict = trace.to_dict()
2243
+ trace_dict.pop("total_spans", None)
2244
+ trace_dict.pop("error_count", None)
2245
+
2246
+ # Calculate the component level for the new trace
2247
+ new_level = self._get_component_level(trace.workflow_id, trace.team_id, trace.agent_id, trace.name)
2248
+
2249
+ # Use MongoDB aggregation pipeline update for atomic upsert
2250
+ # This allows conditional logic within a single atomic operation
2251
+ pipeline: List[Dict[str, Any]] = [
2252
+ {
2253
+ "$set": {
2254
+ # Always update these fields
2255
+ "status": trace.status,
2256
+ "created_at": {"$ifNull": ["$created_at", trace_dict.get("created_at")]},
2257
+ # Use $min for start_time (keep earliest)
2258
+ "start_time": {
2259
+ "$cond": {
2260
+ "if": {"$eq": [{"$type": "$start_time"}, "missing"]},
2261
+ "then": trace_dict.get("start_time"),
2262
+ "else": {"$min": ["$start_time", trace_dict.get("start_time")]},
2263
+ }
2264
+ },
2265
+ # Use $max for end_time (keep latest)
2266
+ "end_time": {
2267
+ "$cond": {
2268
+ "if": {"$eq": [{"$type": "$end_time"}, "missing"]},
2269
+ "then": trace_dict.get("end_time"),
2270
+ "else": {"$max": ["$end_time", trace_dict.get("end_time")]},
2271
+ }
2272
+ },
2273
+ # Preserve existing non-null context values using $ifNull
2274
+ "run_id": {"$ifNull": [trace.run_id, "$run_id"]},
2275
+ "session_id": {"$ifNull": [trace.session_id, "$session_id"]},
2276
+ "user_id": {"$ifNull": [trace.user_id, "$user_id"]},
2277
+ "agent_id": {"$ifNull": [trace.agent_id, "$agent_id"]},
2278
+ "team_id": {"$ifNull": [trace.team_id, "$team_id"]},
2279
+ "workflow_id": {"$ifNull": [trace.workflow_id, "$workflow_id"]},
2280
+ }
2281
+ },
2282
+ {
2283
+ "$set": {
2284
+ # Calculate duration_ms from the (potentially updated) start_time and end_time
2285
+ # MongoDB stores dates as strings in ISO format, so we need to parse them
2286
+ "duration_ms": {
2287
+ "$cond": {
2288
+ "if": {
2289
+ "$and": [
2290
+ {"$ne": [{"$type": "$start_time"}, "missing"]},
2291
+ {"$ne": [{"$type": "$end_time"}, "missing"]},
2292
+ ]
2293
+ },
2294
+ "then": {
2295
+ "$subtract": [
2296
+ {"$toLong": {"$toDate": "$end_time"}},
2297
+ {"$toLong": {"$toDate": "$start_time"}},
2298
+ ]
2299
+ },
2300
+ "else": trace_dict.get("duration_ms", 0),
2301
+ }
2302
+ },
2303
+ # Update name based on component level priority
2304
+ # Only update if new trace is from a higher-level component
2305
+ "name": {
2306
+ "$cond": {
2307
+ "if": {"$eq": [{"$type": "$name"}, "missing"]},
2308
+ "then": trace.name,
2309
+ "else": {
2310
+ "$cond": {
2311
+ "if": {
2312
+ "$gt": [
2313
+ new_level,
2314
+ {
2315
+ "$switch": {
2316
+ "branches": [
2317
+ # Check if existing name is a root span
2318
+ {
2319
+ "case": {
2320
+ "$not": {
2321
+ "$or": [
2322
+ {
2323
+ "$regexMatch": {
2324
+ "input": {"$ifNull": ["$name", ""]},
2325
+ "regex": "\\.run",
2326
+ }
2327
+ },
2328
+ {
2329
+ "$regexMatch": {
2330
+ "input": {"$ifNull": ["$name", ""]},
2331
+ "regex": "\\.arun",
2332
+ }
2333
+ },
2334
+ ]
2335
+ }
2336
+ },
2337
+ "then": 0,
2338
+ },
2339
+ # Workflow root (level 3)
2340
+ {
2341
+ "case": {"$ne": ["$workflow_id", None]},
2342
+ "then": 3,
2343
+ },
2344
+ # Team root (level 2)
2345
+ {
2346
+ "case": {"$ne": ["$team_id", None]},
2347
+ "then": 2,
2348
+ },
2349
+ # Agent root (level 1)
2350
+ {
2351
+ "case": {"$ne": ["$agent_id", None]},
2352
+ "then": 1,
2353
+ },
2354
+ ],
2355
+ "default": 0,
2356
+ }
2357
+ },
2358
+ ]
2359
+ },
2360
+ "then": trace.name,
2361
+ "else": "$name",
2362
+ }
2363
+ },
2364
+ }
2365
+ },
2366
+ }
2367
+ },
2368
+ ]
2369
+
2370
+ # Perform atomic upsert using aggregation pipeline
2371
+ await collection.update_one(
2372
+ {"trace_id": trace.trace_id},
2373
+ pipeline,
2374
+ upsert=True,
2375
+ )
2376
+
2377
+ except Exception as e:
2378
+ log_error(f"Error creating trace: {e}")
2379
+ # Don't raise - tracing should not break the main application flow
2380
+
2381
+ async def get_trace(
2382
+ self,
2383
+ trace_id: Optional[str] = None,
2384
+ run_id: Optional[str] = None,
2385
+ ):
2386
+ """Get a single trace by trace_id or other filters.
2387
+
2388
+ Args:
2389
+ trace_id: The unique trace identifier.
2390
+ run_id: Filter by run ID (returns first match).
2391
+
2392
+ Returns:
2393
+ Optional[Trace]: The trace if found, None otherwise.
2394
+
2395
+ Note:
2396
+ If multiple filters are provided, trace_id takes precedence.
2397
+ For other filters, the most recent trace is returned.
2398
+ """
2399
+ try:
2400
+ from agno.tracing.schemas import Trace as TraceSchema
2401
+
2402
+ collection = await self._get_collection(table_type="traces")
2403
+ if collection is None:
2404
+ return None
2405
+
2406
+ # Get spans collection for aggregation
2407
+ spans_collection = await self._get_collection(table_type="spans")
2408
+
2409
+ query: Dict[str, Any] = {}
2410
+ if trace_id:
2411
+ query["trace_id"] = trace_id
2412
+ elif run_id:
2413
+ query["run_id"] = run_id
2414
+ else:
2415
+ log_debug("get_trace called without any filter parameters")
2416
+ return None
2417
+
2418
+ # Find trace with sorting by most recent
2419
+ result = await collection.find_one(query, sort=[("start_time", -1)])
2420
+
2421
+ if result:
2422
+ # Calculate total_spans and error_count from spans collection
2423
+ total_spans = 0
2424
+ error_count = 0
2425
+ if spans_collection is not None:
2426
+ total_spans = await spans_collection.count_documents({"trace_id": result["trace_id"]})
2427
+ error_count = await spans_collection.count_documents(
2428
+ {"trace_id": result["trace_id"], "status_code": "ERROR"}
2429
+ )
2430
+
2431
+ result["total_spans"] = total_spans
2432
+ result["error_count"] = error_count
2433
+ # Remove MongoDB's _id field
2434
+ result.pop("_id", None)
2435
+ return TraceSchema.from_dict(result)
2436
+ return None
2437
+
2438
+ except Exception as e:
2439
+ log_error(f"Error getting trace: {e}")
2440
+ return None
2441
+
2442
+ async def get_traces(
2443
+ self,
2444
+ run_id: Optional[str] = None,
2445
+ session_id: Optional[str] = None,
2446
+ user_id: Optional[str] = None,
2447
+ agent_id: Optional[str] = None,
2448
+ team_id: Optional[str] = None,
2449
+ workflow_id: Optional[str] = None,
2450
+ status: Optional[str] = None,
2451
+ start_time: Optional[datetime] = None,
2452
+ end_time: Optional[datetime] = None,
2453
+ limit: Optional[int] = 20,
2454
+ page: Optional[int] = 1,
2455
+ ) -> tuple[List, int]:
2456
+ """Get traces matching the provided filters with pagination.
2457
+
2458
+ Args:
2459
+ run_id: Filter by run ID.
2460
+ session_id: Filter by session ID.
2461
+ user_id: Filter by user ID.
2462
+ agent_id: Filter by agent ID.
2463
+ team_id: Filter by team ID.
2464
+ workflow_id: Filter by workflow ID.
2465
+ status: Filter by status (OK, ERROR, UNSET).
2466
+ start_time: Filter traces starting after this datetime.
2467
+ end_time: Filter traces ending before this datetime.
2468
+ limit: Maximum number of traces to return per page.
2469
+ page: Page number (1-indexed).
2470
+
2471
+ Returns:
2472
+ tuple[List[Trace], int]: Tuple of (list of matching traces, total count).
2473
+ """
2474
+ try:
2475
+ from agno.tracing.schemas import Trace as TraceSchema
2476
+
2477
+ collection = await self._get_collection(table_type="traces")
2478
+ if collection is None:
2479
+ log_debug("Traces collection not found")
2480
+ return [], 0
2481
+
2482
+ # Get spans collection for aggregation
2483
+ spans_collection = await self._get_collection(table_type="spans")
2484
+
2485
+ # Build query
2486
+ query: Dict[str, Any] = {}
2487
+ if run_id:
2488
+ query["run_id"] = run_id
2489
+ if session_id:
2490
+ query["session_id"] = session_id
2491
+ if user_id:
2492
+ query["user_id"] = user_id
2493
+ if agent_id:
2494
+ query["agent_id"] = agent_id
2495
+ if team_id:
2496
+ query["team_id"] = team_id
2497
+ if workflow_id:
2498
+ query["workflow_id"] = workflow_id
2499
+ if status:
2500
+ query["status"] = status
2501
+ if start_time:
2502
+ query["start_time"] = {"$gte": start_time.isoformat()}
2503
+ if end_time:
2504
+ if "end_time" in query:
2505
+ query["end_time"]["$lte"] = end_time.isoformat()
2506
+ else:
2507
+ query["end_time"] = {"$lte": end_time.isoformat()}
2508
+
2509
+ # Get total count
2510
+ total_count = await collection.count_documents(query)
2511
+
2512
+ # Apply pagination
2513
+ skip = ((page or 1) - 1) * (limit or 20)
2514
+ cursor = collection.find(query).sort("start_time", -1).skip(skip).limit(limit or 20)
2515
+
2516
+ results = await cursor.to_list(length=None)
2517
+
2518
+ traces = []
2519
+ for row in results:
2520
+ # Calculate total_spans and error_count from spans collection
2521
+ total_spans = 0
2522
+ error_count = 0
2523
+ if spans_collection is not None:
2524
+ total_spans = await spans_collection.count_documents({"trace_id": row["trace_id"]})
2525
+ error_count = await spans_collection.count_documents(
2526
+ {"trace_id": row["trace_id"], "status_code": "ERROR"}
2527
+ )
2528
+
2529
+ row["total_spans"] = total_spans
2530
+ row["error_count"] = error_count
2531
+ # Remove MongoDB's _id field
2532
+ row.pop("_id", None)
2533
+ traces.append(TraceSchema.from_dict(row))
2534
+
2535
+ return traces, total_count
2536
+
2537
+ except Exception as e:
2538
+ log_error(f"Error getting traces: {e}")
2539
+ return [], 0
2540
+
2541
+ async def get_trace_stats(
2542
+ self,
2543
+ user_id: Optional[str] = None,
2544
+ agent_id: Optional[str] = None,
2545
+ team_id: Optional[str] = None,
2546
+ workflow_id: Optional[str] = None,
2547
+ start_time: Optional[datetime] = None,
2548
+ end_time: Optional[datetime] = None,
2549
+ limit: Optional[int] = 20,
2550
+ page: Optional[int] = 1,
2551
+ ) -> tuple[List[Dict[str, Any]], int]:
2552
+ """Get trace statistics grouped by session.
2553
+
2554
+ Args:
2555
+ user_id: Filter by user ID.
2556
+ agent_id: Filter by agent ID.
2557
+ team_id: Filter by team ID.
2558
+ workflow_id: Filter by workflow ID.
2559
+ start_time: Filter sessions with traces created after this datetime.
2560
+ end_time: Filter sessions with traces created before this datetime.
2561
+ limit: Maximum number of sessions to return per page.
2562
+ page: Page number (1-indexed).
2563
+
2564
+ Returns:
2565
+ tuple[List[Dict], int]: Tuple of (list of session stats dicts, total count).
2566
+ Each dict contains: session_id, user_id, agent_id, team_id, total_traces,
2567
+ workflow_id, first_trace_at, last_trace_at.
2568
+ """
2569
+ try:
2570
+ collection = await self._get_collection(table_type="traces")
2571
+ if collection is None:
2572
+ log_debug("Traces collection not found")
2573
+ return [], 0
2574
+
2575
+ # Build match stage
2576
+ match_stage: Dict[str, Any] = {"session_id": {"$ne": None}}
2577
+ if user_id:
2578
+ match_stage["user_id"] = user_id
2579
+ if agent_id:
2580
+ match_stage["agent_id"] = agent_id
2581
+ if team_id:
2582
+ match_stage["team_id"] = team_id
2583
+ if workflow_id:
2584
+ match_stage["workflow_id"] = workflow_id
2585
+ if start_time:
2586
+ match_stage["created_at"] = {"$gte": start_time.isoformat()}
2587
+ if end_time:
2588
+ if "created_at" in match_stage:
2589
+ match_stage["created_at"]["$lte"] = end_time.isoformat()
2590
+ else:
2591
+ match_stage["created_at"] = {"$lte": end_time.isoformat()}
2592
+
2593
+ # Build aggregation pipeline
2594
+ pipeline: List[Dict[str, Any]] = [
2595
+ {"$match": match_stage},
2596
+ {
2597
+ "$group": {
2598
+ "_id": "$session_id",
2599
+ "user_id": {"$first": "$user_id"},
2600
+ "agent_id": {"$first": "$agent_id"},
2601
+ "team_id": {"$first": "$team_id"},
2602
+ "workflow_id": {"$first": "$workflow_id"},
2603
+ "total_traces": {"$sum": 1},
2604
+ "first_trace_at": {"$min": "$created_at"},
2605
+ "last_trace_at": {"$max": "$created_at"},
2606
+ }
2607
+ },
2608
+ {"$sort": {"last_trace_at": -1}},
2609
+ ]
2610
+
2611
+ # Get total count
2612
+ count_pipeline = pipeline + [{"$count": "total"}]
2613
+ count_result = await collection.aggregate(count_pipeline).to_list(length=1)
2614
+ total_count = count_result[0]["total"] if count_result else 0
2615
+
2616
+ # Apply pagination
2617
+ skip = ((page or 1) - 1) * (limit or 20)
2618
+ pipeline.append({"$skip": skip})
2619
+ pipeline.append({"$limit": limit or 20})
2620
+
2621
+ results = await collection.aggregate(pipeline).to_list(length=None)
2622
+
2623
+ # Convert to list of dicts with datetime objects
2624
+ stats_list = []
2625
+ for row in results:
2626
+ # Convert ISO strings to datetime objects
2627
+ first_trace_at_str = row["first_trace_at"]
2628
+ last_trace_at_str = row["last_trace_at"]
2629
+
2630
+ # Parse ISO format strings to datetime objects
2631
+ first_trace_at = datetime.fromisoformat(first_trace_at_str.replace("Z", "+00:00"))
2632
+ last_trace_at = datetime.fromisoformat(last_trace_at_str.replace("Z", "+00:00"))
2633
+
2634
+ stats_list.append(
2635
+ {
2636
+ "session_id": row["_id"],
2637
+ "user_id": row["user_id"],
2638
+ "agent_id": row["agent_id"],
2639
+ "team_id": row["team_id"],
2640
+ "workflow_id": row["workflow_id"],
2641
+ "total_traces": row["total_traces"],
2642
+ "first_trace_at": first_trace_at,
2643
+ "last_trace_at": last_trace_at,
2644
+ }
2645
+ )
2646
+
2647
+ return stats_list, total_count
2648
+
2649
+ except Exception as e:
2650
+ log_error(f"Error getting trace stats: {e}")
2651
+ return [], 0
2652
+
2653
+ # --- Spans ---
2654
+ async def create_span(self, span: "Span") -> None:
2655
+ """Create a single span in the database.
2656
+
2657
+ Args:
2658
+ span: The Span object to store.
2659
+ """
2660
+ try:
2661
+ collection = await self._get_collection(table_type="spans", create_collection_if_not_found=True)
2662
+ if collection is None:
2663
+ return
2664
+
2665
+ await collection.insert_one(span.to_dict())
2666
+
2667
+ except Exception as e:
2668
+ log_error(f"Error creating span: {e}")
2669
+
2670
+ async def create_spans(self, spans: List) -> None:
2671
+ """Create multiple spans in the database as a batch.
2672
+
2673
+ Args:
2674
+ spans: List of Span objects to store.
2675
+ """
2676
+ if not spans:
2677
+ return
2678
+
2679
+ try:
2680
+ collection = await self._get_collection(table_type="spans", create_collection_if_not_found=True)
2681
+ if collection is None:
2682
+ return
2683
+
2684
+ span_dicts = [span.to_dict() for span in spans]
2685
+ await collection.insert_many(span_dicts)
2686
+
2687
+ except Exception as e:
2688
+ log_error(f"Error creating spans batch: {e}")
2689
+
2690
+ async def get_span(self, span_id: str):
2691
+ """Get a single span by its span_id.
2692
+
2693
+ Args:
2694
+ span_id: The unique span identifier.
2695
+
2696
+ Returns:
2697
+ Optional[Span]: The span if found, None otherwise.
2698
+ """
2699
+ try:
2700
+ from agno.tracing.schemas import Span as SpanSchema
2701
+
2702
+ collection = await self._get_collection(table_type="spans")
2703
+ if collection is None:
2704
+ return None
2705
+
2706
+ result = await collection.find_one({"span_id": span_id})
2707
+ if result:
2708
+ # Remove MongoDB's _id field
2709
+ result.pop("_id", None)
2710
+ return SpanSchema.from_dict(result)
2711
+ return None
2712
+
2713
+ except Exception as e:
2714
+ log_error(f"Error getting span: {e}")
2715
+ return None
2716
+
2717
+ async def get_spans(
2718
+ self,
2719
+ trace_id: Optional[str] = None,
2720
+ parent_span_id: Optional[str] = None,
2721
+ limit: Optional[int] = 1000,
2722
+ ) -> List:
2723
+ """Get spans matching the provided filters.
2724
+
2725
+ Args:
2726
+ trace_id: Filter by trace ID.
2727
+ parent_span_id: Filter by parent span ID.
2728
+ limit: Maximum number of spans to return.
2729
+
2730
+ Returns:
2731
+ List[Span]: List of matching spans.
2732
+ """
2733
+ try:
2734
+ from agno.tracing.schemas import Span as SpanSchema
2735
+
2736
+ collection = await self._get_collection(table_type="spans")
2737
+ if collection is None:
2738
+ return []
2739
+
2740
+ # Build query
2741
+ query: Dict[str, Any] = {}
2742
+ if trace_id:
2743
+ query["trace_id"] = trace_id
2744
+ if parent_span_id:
2745
+ query["parent_span_id"] = parent_span_id
2746
+
2747
+ cursor = collection.find(query).limit(limit or 1000)
2748
+ results = await cursor.to_list(length=None)
2749
+
2750
+ spans = []
2751
+ for row in results:
2752
+ # Remove MongoDB's _id field
2753
+ row.pop("_id", None)
2754
+ spans.append(SpanSchema.from_dict(row))
2755
+
2756
+ return spans
2757
+
2758
+ except Exception as e:
2759
+ log_error(f"Error getting spans: {e}")
2760
+ return []