agno 2.2.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (575) hide show
  1. agno/__init__.py +8 -0
  2. agno/agent/__init__.py +51 -0
  3. agno/agent/agent.py +10405 -0
  4. agno/api/__init__.py +0 -0
  5. agno/api/agent.py +28 -0
  6. agno/api/api.py +40 -0
  7. agno/api/evals.py +22 -0
  8. agno/api/os.py +17 -0
  9. agno/api/routes.py +13 -0
  10. agno/api/schemas/__init__.py +9 -0
  11. agno/api/schemas/agent.py +16 -0
  12. agno/api/schemas/evals.py +16 -0
  13. agno/api/schemas/os.py +14 -0
  14. agno/api/schemas/response.py +6 -0
  15. agno/api/schemas/team.py +16 -0
  16. agno/api/schemas/utils.py +21 -0
  17. agno/api/schemas/workflows.py +16 -0
  18. agno/api/settings.py +53 -0
  19. agno/api/team.py +30 -0
  20. agno/api/workflow.py +28 -0
  21. agno/cloud/aws/base.py +214 -0
  22. agno/cloud/aws/s3/__init__.py +2 -0
  23. agno/cloud/aws/s3/api_client.py +43 -0
  24. agno/cloud/aws/s3/bucket.py +195 -0
  25. agno/cloud/aws/s3/object.py +57 -0
  26. agno/culture/__init__.py +3 -0
  27. agno/culture/manager.py +956 -0
  28. agno/db/__init__.py +24 -0
  29. agno/db/async_postgres/__init__.py +3 -0
  30. agno/db/base.py +598 -0
  31. agno/db/dynamo/__init__.py +3 -0
  32. agno/db/dynamo/dynamo.py +2042 -0
  33. agno/db/dynamo/schemas.py +314 -0
  34. agno/db/dynamo/utils.py +743 -0
  35. agno/db/firestore/__init__.py +3 -0
  36. agno/db/firestore/firestore.py +1795 -0
  37. agno/db/firestore/schemas.py +140 -0
  38. agno/db/firestore/utils.py +376 -0
  39. agno/db/gcs_json/__init__.py +3 -0
  40. agno/db/gcs_json/gcs_json_db.py +1335 -0
  41. agno/db/gcs_json/utils.py +228 -0
  42. agno/db/in_memory/__init__.py +3 -0
  43. agno/db/in_memory/in_memory_db.py +1160 -0
  44. agno/db/in_memory/utils.py +230 -0
  45. agno/db/json/__init__.py +3 -0
  46. agno/db/json/json_db.py +1328 -0
  47. agno/db/json/utils.py +230 -0
  48. agno/db/migrations/__init__.py +0 -0
  49. agno/db/migrations/v1_to_v2.py +635 -0
  50. agno/db/mongo/__init__.py +17 -0
  51. agno/db/mongo/async_mongo.py +2026 -0
  52. agno/db/mongo/mongo.py +1982 -0
  53. agno/db/mongo/schemas.py +87 -0
  54. agno/db/mongo/utils.py +259 -0
  55. agno/db/mysql/__init__.py +3 -0
  56. agno/db/mysql/mysql.py +2308 -0
  57. agno/db/mysql/schemas.py +138 -0
  58. agno/db/mysql/utils.py +355 -0
  59. agno/db/postgres/__init__.py +4 -0
  60. agno/db/postgres/async_postgres.py +1927 -0
  61. agno/db/postgres/postgres.py +2260 -0
  62. agno/db/postgres/schemas.py +139 -0
  63. agno/db/postgres/utils.py +442 -0
  64. agno/db/redis/__init__.py +3 -0
  65. agno/db/redis/redis.py +1660 -0
  66. agno/db/redis/schemas.py +123 -0
  67. agno/db/redis/utils.py +346 -0
  68. agno/db/schemas/__init__.py +4 -0
  69. agno/db/schemas/culture.py +120 -0
  70. agno/db/schemas/evals.py +33 -0
  71. agno/db/schemas/knowledge.py +40 -0
  72. agno/db/schemas/memory.py +46 -0
  73. agno/db/schemas/metrics.py +0 -0
  74. agno/db/singlestore/__init__.py +3 -0
  75. agno/db/singlestore/schemas.py +130 -0
  76. agno/db/singlestore/singlestore.py +2272 -0
  77. agno/db/singlestore/utils.py +384 -0
  78. agno/db/sqlite/__init__.py +4 -0
  79. agno/db/sqlite/async_sqlite.py +2293 -0
  80. agno/db/sqlite/schemas.py +133 -0
  81. agno/db/sqlite/sqlite.py +2288 -0
  82. agno/db/sqlite/utils.py +431 -0
  83. agno/db/surrealdb/__init__.py +3 -0
  84. agno/db/surrealdb/metrics.py +292 -0
  85. agno/db/surrealdb/models.py +309 -0
  86. agno/db/surrealdb/queries.py +71 -0
  87. agno/db/surrealdb/surrealdb.py +1353 -0
  88. agno/db/surrealdb/utils.py +147 -0
  89. agno/db/utils.py +116 -0
  90. agno/debug.py +18 -0
  91. agno/eval/__init__.py +14 -0
  92. agno/eval/accuracy.py +834 -0
  93. agno/eval/performance.py +773 -0
  94. agno/eval/reliability.py +306 -0
  95. agno/eval/utils.py +119 -0
  96. agno/exceptions.py +161 -0
  97. agno/filters.py +354 -0
  98. agno/guardrails/__init__.py +6 -0
  99. agno/guardrails/base.py +19 -0
  100. agno/guardrails/openai.py +144 -0
  101. agno/guardrails/pii.py +94 -0
  102. agno/guardrails/prompt_injection.py +52 -0
  103. agno/integrations/__init__.py +0 -0
  104. agno/integrations/discord/__init__.py +3 -0
  105. agno/integrations/discord/client.py +203 -0
  106. agno/knowledge/__init__.py +5 -0
  107. agno/knowledge/chunking/__init__.py +0 -0
  108. agno/knowledge/chunking/agentic.py +79 -0
  109. agno/knowledge/chunking/document.py +91 -0
  110. agno/knowledge/chunking/fixed.py +57 -0
  111. agno/knowledge/chunking/markdown.py +151 -0
  112. agno/knowledge/chunking/recursive.py +63 -0
  113. agno/knowledge/chunking/row.py +39 -0
  114. agno/knowledge/chunking/semantic.py +86 -0
  115. agno/knowledge/chunking/strategy.py +165 -0
  116. agno/knowledge/content.py +74 -0
  117. agno/knowledge/document/__init__.py +5 -0
  118. agno/knowledge/document/base.py +58 -0
  119. agno/knowledge/embedder/__init__.py +5 -0
  120. agno/knowledge/embedder/aws_bedrock.py +343 -0
  121. agno/knowledge/embedder/azure_openai.py +210 -0
  122. agno/knowledge/embedder/base.py +23 -0
  123. agno/knowledge/embedder/cohere.py +323 -0
  124. agno/knowledge/embedder/fastembed.py +62 -0
  125. agno/knowledge/embedder/fireworks.py +13 -0
  126. agno/knowledge/embedder/google.py +258 -0
  127. agno/knowledge/embedder/huggingface.py +94 -0
  128. agno/knowledge/embedder/jina.py +182 -0
  129. agno/knowledge/embedder/langdb.py +22 -0
  130. agno/knowledge/embedder/mistral.py +206 -0
  131. agno/knowledge/embedder/nebius.py +13 -0
  132. agno/knowledge/embedder/ollama.py +154 -0
  133. agno/knowledge/embedder/openai.py +195 -0
  134. agno/knowledge/embedder/sentence_transformer.py +63 -0
  135. agno/knowledge/embedder/together.py +13 -0
  136. agno/knowledge/embedder/vllm.py +262 -0
  137. agno/knowledge/embedder/voyageai.py +165 -0
  138. agno/knowledge/knowledge.py +1988 -0
  139. agno/knowledge/reader/__init__.py +7 -0
  140. agno/knowledge/reader/arxiv_reader.py +81 -0
  141. agno/knowledge/reader/base.py +95 -0
  142. agno/knowledge/reader/csv_reader.py +166 -0
  143. agno/knowledge/reader/docx_reader.py +82 -0
  144. agno/knowledge/reader/field_labeled_csv_reader.py +292 -0
  145. agno/knowledge/reader/firecrawl_reader.py +201 -0
  146. agno/knowledge/reader/json_reader.py +87 -0
  147. agno/knowledge/reader/markdown_reader.py +137 -0
  148. agno/knowledge/reader/pdf_reader.py +431 -0
  149. agno/knowledge/reader/pptx_reader.py +101 -0
  150. agno/knowledge/reader/reader_factory.py +313 -0
  151. agno/knowledge/reader/s3_reader.py +89 -0
  152. agno/knowledge/reader/tavily_reader.py +194 -0
  153. agno/knowledge/reader/text_reader.py +115 -0
  154. agno/knowledge/reader/web_search_reader.py +372 -0
  155. agno/knowledge/reader/website_reader.py +455 -0
  156. agno/knowledge/reader/wikipedia_reader.py +59 -0
  157. agno/knowledge/reader/youtube_reader.py +78 -0
  158. agno/knowledge/remote_content/__init__.py +0 -0
  159. agno/knowledge/remote_content/remote_content.py +88 -0
  160. agno/knowledge/reranker/__init__.py +3 -0
  161. agno/knowledge/reranker/base.py +14 -0
  162. agno/knowledge/reranker/cohere.py +64 -0
  163. agno/knowledge/reranker/infinity.py +195 -0
  164. agno/knowledge/reranker/sentence_transformer.py +54 -0
  165. agno/knowledge/types.py +39 -0
  166. agno/knowledge/utils.py +189 -0
  167. agno/media.py +462 -0
  168. agno/memory/__init__.py +3 -0
  169. agno/memory/manager.py +1327 -0
  170. agno/models/__init__.py +0 -0
  171. agno/models/aimlapi/__init__.py +5 -0
  172. agno/models/aimlapi/aimlapi.py +45 -0
  173. agno/models/anthropic/__init__.py +5 -0
  174. agno/models/anthropic/claude.py +757 -0
  175. agno/models/aws/__init__.py +15 -0
  176. agno/models/aws/bedrock.py +701 -0
  177. agno/models/aws/claude.py +378 -0
  178. agno/models/azure/__init__.py +18 -0
  179. agno/models/azure/ai_foundry.py +485 -0
  180. agno/models/azure/openai_chat.py +131 -0
  181. agno/models/base.py +2175 -0
  182. agno/models/cerebras/__init__.py +12 -0
  183. agno/models/cerebras/cerebras.py +501 -0
  184. agno/models/cerebras/cerebras_openai.py +112 -0
  185. agno/models/cohere/__init__.py +5 -0
  186. agno/models/cohere/chat.py +389 -0
  187. agno/models/cometapi/__init__.py +5 -0
  188. agno/models/cometapi/cometapi.py +57 -0
  189. agno/models/dashscope/__init__.py +5 -0
  190. agno/models/dashscope/dashscope.py +91 -0
  191. agno/models/deepinfra/__init__.py +5 -0
  192. agno/models/deepinfra/deepinfra.py +28 -0
  193. agno/models/deepseek/__init__.py +5 -0
  194. agno/models/deepseek/deepseek.py +61 -0
  195. agno/models/defaults.py +1 -0
  196. agno/models/fireworks/__init__.py +5 -0
  197. agno/models/fireworks/fireworks.py +26 -0
  198. agno/models/google/__init__.py +5 -0
  199. agno/models/google/gemini.py +1085 -0
  200. agno/models/groq/__init__.py +5 -0
  201. agno/models/groq/groq.py +556 -0
  202. agno/models/huggingface/__init__.py +5 -0
  203. agno/models/huggingface/huggingface.py +491 -0
  204. agno/models/ibm/__init__.py +5 -0
  205. agno/models/ibm/watsonx.py +422 -0
  206. agno/models/internlm/__init__.py +3 -0
  207. agno/models/internlm/internlm.py +26 -0
  208. agno/models/langdb/__init__.py +1 -0
  209. agno/models/langdb/langdb.py +48 -0
  210. agno/models/litellm/__init__.py +14 -0
  211. agno/models/litellm/chat.py +468 -0
  212. agno/models/litellm/litellm_openai.py +25 -0
  213. agno/models/llama_cpp/__init__.py +5 -0
  214. agno/models/llama_cpp/llama_cpp.py +22 -0
  215. agno/models/lmstudio/__init__.py +5 -0
  216. agno/models/lmstudio/lmstudio.py +25 -0
  217. agno/models/message.py +434 -0
  218. agno/models/meta/__init__.py +12 -0
  219. agno/models/meta/llama.py +475 -0
  220. agno/models/meta/llama_openai.py +78 -0
  221. agno/models/metrics.py +120 -0
  222. agno/models/mistral/__init__.py +5 -0
  223. agno/models/mistral/mistral.py +432 -0
  224. agno/models/nebius/__init__.py +3 -0
  225. agno/models/nebius/nebius.py +54 -0
  226. agno/models/nexus/__init__.py +3 -0
  227. agno/models/nexus/nexus.py +22 -0
  228. agno/models/nvidia/__init__.py +5 -0
  229. agno/models/nvidia/nvidia.py +28 -0
  230. agno/models/ollama/__init__.py +5 -0
  231. agno/models/ollama/chat.py +441 -0
  232. agno/models/openai/__init__.py +9 -0
  233. agno/models/openai/chat.py +883 -0
  234. agno/models/openai/like.py +27 -0
  235. agno/models/openai/responses.py +1050 -0
  236. agno/models/openrouter/__init__.py +5 -0
  237. agno/models/openrouter/openrouter.py +66 -0
  238. agno/models/perplexity/__init__.py +5 -0
  239. agno/models/perplexity/perplexity.py +187 -0
  240. agno/models/portkey/__init__.py +3 -0
  241. agno/models/portkey/portkey.py +81 -0
  242. agno/models/requesty/__init__.py +5 -0
  243. agno/models/requesty/requesty.py +52 -0
  244. agno/models/response.py +199 -0
  245. agno/models/sambanova/__init__.py +5 -0
  246. agno/models/sambanova/sambanova.py +28 -0
  247. agno/models/siliconflow/__init__.py +5 -0
  248. agno/models/siliconflow/siliconflow.py +25 -0
  249. agno/models/together/__init__.py +5 -0
  250. agno/models/together/together.py +25 -0
  251. agno/models/utils.py +266 -0
  252. agno/models/vercel/__init__.py +3 -0
  253. agno/models/vercel/v0.py +26 -0
  254. agno/models/vertexai/__init__.py +0 -0
  255. agno/models/vertexai/claude.py +70 -0
  256. agno/models/vllm/__init__.py +3 -0
  257. agno/models/vllm/vllm.py +78 -0
  258. agno/models/xai/__init__.py +3 -0
  259. agno/models/xai/xai.py +113 -0
  260. agno/os/__init__.py +3 -0
  261. agno/os/app.py +876 -0
  262. agno/os/auth.py +57 -0
  263. agno/os/config.py +104 -0
  264. agno/os/interfaces/__init__.py +1 -0
  265. agno/os/interfaces/a2a/__init__.py +3 -0
  266. agno/os/interfaces/a2a/a2a.py +42 -0
  267. agno/os/interfaces/a2a/router.py +250 -0
  268. agno/os/interfaces/a2a/utils.py +924 -0
  269. agno/os/interfaces/agui/__init__.py +3 -0
  270. agno/os/interfaces/agui/agui.py +47 -0
  271. agno/os/interfaces/agui/router.py +144 -0
  272. agno/os/interfaces/agui/utils.py +534 -0
  273. agno/os/interfaces/base.py +25 -0
  274. agno/os/interfaces/slack/__init__.py +3 -0
  275. agno/os/interfaces/slack/router.py +148 -0
  276. agno/os/interfaces/slack/security.py +30 -0
  277. agno/os/interfaces/slack/slack.py +47 -0
  278. agno/os/interfaces/whatsapp/__init__.py +3 -0
  279. agno/os/interfaces/whatsapp/router.py +211 -0
  280. agno/os/interfaces/whatsapp/security.py +53 -0
  281. agno/os/interfaces/whatsapp/whatsapp.py +36 -0
  282. agno/os/mcp.py +292 -0
  283. agno/os/middleware/__init__.py +7 -0
  284. agno/os/middleware/jwt.py +233 -0
  285. agno/os/router.py +1763 -0
  286. agno/os/routers/__init__.py +3 -0
  287. agno/os/routers/evals/__init__.py +3 -0
  288. agno/os/routers/evals/evals.py +430 -0
  289. agno/os/routers/evals/schemas.py +142 -0
  290. agno/os/routers/evals/utils.py +162 -0
  291. agno/os/routers/health.py +31 -0
  292. agno/os/routers/home.py +52 -0
  293. agno/os/routers/knowledge/__init__.py +3 -0
  294. agno/os/routers/knowledge/knowledge.py +997 -0
  295. agno/os/routers/knowledge/schemas.py +178 -0
  296. agno/os/routers/memory/__init__.py +3 -0
  297. agno/os/routers/memory/memory.py +515 -0
  298. agno/os/routers/memory/schemas.py +62 -0
  299. agno/os/routers/metrics/__init__.py +3 -0
  300. agno/os/routers/metrics/metrics.py +190 -0
  301. agno/os/routers/metrics/schemas.py +47 -0
  302. agno/os/routers/session/__init__.py +3 -0
  303. agno/os/routers/session/session.py +997 -0
  304. agno/os/schema.py +1055 -0
  305. agno/os/settings.py +43 -0
  306. agno/os/utils.py +630 -0
  307. agno/py.typed +0 -0
  308. agno/reasoning/__init__.py +0 -0
  309. agno/reasoning/anthropic.py +80 -0
  310. agno/reasoning/azure_ai_foundry.py +67 -0
  311. agno/reasoning/deepseek.py +63 -0
  312. agno/reasoning/default.py +97 -0
  313. agno/reasoning/gemini.py +73 -0
  314. agno/reasoning/groq.py +71 -0
  315. agno/reasoning/helpers.py +63 -0
  316. agno/reasoning/ollama.py +67 -0
  317. agno/reasoning/openai.py +86 -0
  318. agno/reasoning/step.py +31 -0
  319. agno/reasoning/vertexai.py +76 -0
  320. agno/run/__init__.py +6 -0
  321. agno/run/agent.py +787 -0
  322. agno/run/base.py +229 -0
  323. agno/run/cancel.py +81 -0
  324. agno/run/messages.py +32 -0
  325. agno/run/team.py +753 -0
  326. agno/run/workflow.py +708 -0
  327. agno/session/__init__.py +10 -0
  328. agno/session/agent.py +295 -0
  329. agno/session/summary.py +265 -0
  330. agno/session/team.py +392 -0
  331. agno/session/workflow.py +205 -0
  332. agno/team/__init__.py +37 -0
  333. agno/team/team.py +8793 -0
  334. agno/tools/__init__.py +10 -0
  335. agno/tools/agentql.py +120 -0
  336. agno/tools/airflow.py +69 -0
  337. agno/tools/api.py +122 -0
  338. agno/tools/apify.py +314 -0
  339. agno/tools/arxiv.py +127 -0
  340. agno/tools/aws_lambda.py +53 -0
  341. agno/tools/aws_ses.py +66 -0
  342. agno/tools/baidusearch.py +89 -0
  343. agno/tools/bitbucket.py +292 -0
  344. agno/tools/brandfetch.py +213 -0
  345. agno/tools/bravesearch.py +106 -0
  346. agno/tools/brightdata.py +367 -0
  347. agno/tools/browserbase.py +209 -0
  348. agno/tools/calcom.py +255 -0
  349. agno/tools/calculator.py +151 -0
  350. agno/tools/cartesia.py +187 -0
  351. agno/tools/clickup.py +244 -0
  352. agno/tools/confluence.py +240 -0
  353. agno/tools/crawl4ai.py +158 -0
  354. agno/tools/csv_toolkit.py +185 -0
  355. agno/tools/dalle.py +110 -0
  356. agno/tools/daytona.py +475 -0
  357. agno/tools/decorator.py +262 -0
  358. agno/tools/desi_vocal.py +108 -0
  359. agno/tools/discord.py +161 -0
  360. agno/tools/docker.py +716 -0
  361. agno/tools/duckdb.py +379 -0
  362. agno/tools/duckduckgo.py +91 -0
  363. agno/tools/e2b.py +703 -0
  364. agno/tools/eleven_labs.py +196 -0
  365. agno/tools/email.py +67 -0
  366. agno/tools/evm.py +129 -0
  367. agno/tools/exa.py +396 -0
  368. agno/tools/fal.py +127 -0
  369. agno/tools/file.py +240 -0
  370. agno/tools/file_generation.py +350 -0
  371. agno/tools/financial_datasets.py +288 -0
  372. agno/tools/firecrawl.py +143 -0
  373. agno/tools/function.py +1187 -0
  374. agno/tools/giphy.py +93 -0
  375. agno/tools/github.py +1760 -0
  376. agno/tools/gmail.py +922 -0
  377. agno/tools/google_bigquery.py +117 -0
  378. agno/tools/google_drive.py +270 -0
  379. agno/tools/google_maps.py +253 -0
  380. agno/tools/googlecalendar.py +674 -0
  381. agno/tools/googlesearch.py +98 -0
  382. agno/tools/googlesheets.py +377 -0
  383. agno/tools/hackernews.py +77 -0
  384. agno/tools/jina.py +101 -0
  385. agno/tools/jira.py +170 -0
  386. agno/tools/knowledge.py +218 -0
  387. agno/tools/linear.py +426 -0
  388. agno/tools/linkup.py +58 -0
  389. agno/tools/local_file_system.py +90 -0
  390. agno/tools/lumalab.py +183 -0
  391. agno/tools/mcp/__init__.py +10 -0
  392. agno/tools/mcp/mcp.py +331 -0
  393. agno/tools/mcp/multi_mcp.py +347 -0
  394. agno/tools/mcp/params.py +24 -0
  395. agno/tools/mcp_toolbox.py +284 -0
  396. agno/tools/mem0.py +193 -0
  397. agno/tools/memori.py +339 -0
  398. agno/tools/memory.py +419 -0
  399. agno/tools/mlx_transcribe.py +139 -0
  400. agno/tools/models/__init__.py +0 -0
  401. agno/tools/models/azure_openai.py +190 -0
  402. agno/tools/models/gemini.py +203 -0
  403. agno/tools/models/groq.py +158 -0
  404. agno/tools/models/morph.py +186 -0
  405. agno/tools/models/nebius.py +124 -0
  406. agno/tools/models_labs.py +195 -0
  407. agno/tools/moviepy_video.py +349 -0
  408. agno/tools/neo4j.py +134 -0
  409. agno/tools/newspaper.py +46 -0
  410. agno/tools/newspaper4k.py +93 -0
  411. agno/tools/notion.py +204 -0
  412. agno/tools/openai.py +202 -0
  413. agno/tools/openbb.py +160 -0
  414. agno/tools/opencv.py +321 -0
  415. agno/tools/openweather.py +233 -0
  416. agno/tools/oxylabs.py +385 -0
  417. agno/tools/pandas.py +102 -0
  418. agno/tools/parallel.py +314 -0
  419. agno/tools/postgres.py +257 -0
  420. agno/tools/pubmed.py +188 -0
  421. agno/tools/python.py +205 -0
  422. agno/tools/reasoning.py +283 -0
  423. agno/tools/reddit.py +467 -0
  424. agno/tools/replicate.py +117 -0
  425. agno/tools/resend.py +62 -0
  426. agno/tools/scrapegraph.py +222 -0
  427. agno/tools/searxng.py +152 -0
  428. agno/tools/serpapi.py +116 -0
  429. agno/tools/serper.py +255 -0
  430. agno/tools/shell.py +53 -0
  431. agno/tools/slack.py +136 -0
  432. agno/tools/sleep.py +20 -0
  433. agno/tools/spider.py +116 -0
  434. agno/tools/sql.py +154 -0
  435. agno/tools/streamlit/__init__.py +0 -0
  436. agno/tools/streamlit/components.py +113 -0
  437. agno/tools/tavily.py +254 -0
  438. agno/tools/telegram.py +48 -0
  439. agno/tools/todoist.py +218 -0
  440. agno/tools/tool_registry.py +1 -0
  441. agno/tools/toolkit.py +146 -0
  442. agno/tools/trafilatura.py +388 -0
  443. agno/tools/trello.py +274 -0
  444. agno/tools/twilio.py +186 -0
  445. agno/tools/user_control_flow.py +78 -0
  446. agno/tools/valyu.py +228 -0
  447. agno/tools/visualization.py +467 -0
  448. agno/tools/webbrowser.py +28 -0
  449. agno/tools/webex.py +76 -0
  450. agno/tools/website.py +54 -0
  451. agno/tools/webtools.py +45 -0
  452. agno/tools/whatsapp.py +286 -0
  453. agno/tools/wikipedia.py +63 -0
  454. agno/tools/workflow.py +278 -0
  455. agno/tools/x.py +335 -0
  456. agno/tools/yfinance.py +257 -0
  457. agno/tools/youtube.py +184 -0
  458. agno/tools/zendesk.py +82 -0
  459. agno/tools/zep.py +454 -0
  460. agno/tools/zoom.py +382 -0
  461. agno/utils/__init__.py +0 -0
  462. agno/utils/agent.py +820 -0
  463. agno/utils/audio.py +49 -0
  464. agno/utils/certs.py +27 -0
  465. agno/utils/code_execution.py +11 -0
  466. agno/utils/common.py +132 -0
  467. agno/utils/dttm.py +13 -0
  468. agno/utils/enum.py +22 -0
  469. agno/utils/env.py +11 -0
  470. agno/utils/events.py +696 -0
  471. agno/utils/format_str.py +16 -0
  472. agno/utils/functions.py +166 -0
  473. agno/utils/gemini.py +426 -0
  474. agno/utils/hooks.py +57 -0
  475. agno/utils/http.py +74 -0
  476. agno/utils/json_schema.py +234 -0
  477. agno/utils/knowledge.py +36 -0
  478. agno/utils/location.py +19 -0
  479. agno/utils/log.py +255 -0
  480. agno/utils/mcp.py +214 -0
  481. agno/utils/media.py +352 -0
  482. agno/utils/merge_dict.py +41 -0
  483. agno/utils/message.py +118 -0
  484. agno/utils/models/__init__.py +0 -0
  485. agno/utils/models/ai_foundry.py +43 -0
  486. agno/utils/models/claude.py +358 -0
  487. agno/utils/models/cohere.py +87 -0
  488. agno/utils/models/llama.py +78 -0
  489. agno/utils/models/mistral.py +98 -0
  490. agno/utils/models/openai_responses.py +140 -0
  491. agno/utils/models/schema_utils.py +153 -0
  492. agno/utils/models/watsonx.py +41 -0
  493. agno/utils/openai.py +257 -0
  494. agno/utils/pickle.py +32 -0
  495. agno/utils/pprint.py +178 -0
  496. agno/utils/print_response/__init__.py +0 -0
  497. agno/utils/print_response/agent.py +842 -0
  498. agno/utils/print_response/team.py +1724 -0
  499. agno/utils/print_response/workflow.py +1668 -0
  500. agno/utils/prompts.py +111 -0
  501. agno/utils/reasoning.py +108 -0
  502. agno/utils/response.py +163 -0
  503. agno/utils/response_iterator.py +17 -0
  504. agno/utils/safe_formatter.py +24 -0
  505. agno/utils/serialize.py +32 -0
  506. agno/utils/shell.py +22 -0
  507. agno/utils/streamlit.py +487 -0
  508. agno/utils/string.py +231 -0
  509. agno/utils/team.py +139 -0
  510. agno/utils/timer.py +41 -0
  511. agno/utils/tools.py +102 -0
  512. agno/utils/web.py +23 -0
  513. agno/utils/whatsapp.py +305 -0
  514. agno/utils/yaml_io.py +25 -0
  515. agno/vectordb/__init__.py +3 -0
  516. agno/vectordb/base.py +127 -0
  517. agno/vectordb/cassandra/__init__.py +5 -0
  518. agno/vectordb/cassandra/cassandra.py +501 -0
  519. agno/vectordb/cassandra/extra_param_mixin.py +11 -0
  520. agno/vectordb/cassandra/index.py +13 -0
  521. agno/vectordb/chroma/__init__.py +5 -0
  522. agno/vectordb/chroma/chromadb.py +929 -0
  523. agno/vectordb/clickhouse/__init__.py +9 -0
  524. agno/vectordb/clickhouse/clickhousedb.py +835 -0
  525. agno/vectordb/clickhouse/index.py +9 -0
  526. agno/vectordb/couchbase/__init__.py +3 -0
  527. agno/vectordb/couchbase/couchbase.py +1442 -0
  528. agno/vectordb/distance.py +7 -0
  529. agno/vectordb/lancedb/__init__.py +6 -0
  530. agno/vectordb/lancedb/lance_db.py +995 -0
  531. agno/vectordb/langchaindb/__init__.py +5 -0
  532. agno/vectordb/langchaindb/langchaindb.py +163 -0
  533. agno/vectordb/lightrag/__init__.py +5 -0
  534. agno/vectordb/lightrag/lightrag.py +388 -0
  535. agno/vectordb/llamaindex/__init__.py +3 -0
  536. agno/vectordb/llamaindex/llamaindexdb.py +166 -0
  537. agno/vectordb/milvus/__init__.py +4 -0
  538. agno/vectordb/milvus/milvus.py +1182 -0
  539. agno/vectordb/mongodb/__init__.py +9 -0
  540. agno/vectordb/mongodb/mongodb.py +1417 -0
  541. agno/vectordb/pgvector/__init__.py +12 -0
  542. agno/vectordb/pgvector/index.py +23 -0
  543. agno/vectordb/pgvector/pgvector.py +1462 -0
  544. agno/vectordb/pineconedb/__init__.py +5 -0
  545. agno/vectordb/pineconedb/pineconedb.py +747 -0
  546. agno/vectordb/qdrant/__init__.py +5 -0
  547. agno/vectordb/qdrant/qdrant.py +1134 -0
  548. agno/vectordb/redis/__init__.py +9 -0
  549. agno/vectordb/redis/redisdb.py +694 -0
  550. agno/vectordb/search.py +7 -0
  551. agno/vectordb/singlestore/__init__.py +10 -0
  552. agno/vectordb/singlestore/index.py +41 -0
  553. agno/vectordb/singlestore/singlestore.py +763 -0
  554. agno/vectordb/surrealdb/__init__.py +3 -0
  555. agno/vectordb/surrealdb/surrealdb.py +699 -0
  556. agno/vectordb/upstashdb/__init__.py +5 -0
  557. agno/vectordb/upstashdb/upstashdb.py +718 -0
  558. agno/vectordb/weaviate/__init__.py +8 -0
  559. agno/vectordb/weaviate/index.py +15 -0
  560. agno/vectordb/weaviate/weaviate.py +1005 -0
  561. agno/workflow/__init__.py +23 -0
  562. agno/workflow/agent.py +299 -0
  563. agno/workflow/condition.py +738 -0
  564. agno/workflow/loop.py +735 -0
  565. agno/workflow/parallel.py +824 -0
  566. agno/workflow/router.py +702 -0
  567. agno/workflow/step.py +1432 -0
  568. agno/workflow/steps.py +592 -0
  569. agno/workflow/types.py +520 -0
  570. agno/workflow/workflow.py +4321 -0
  571. agno-2.2.13.dist-info/METADATA +614 -0
  572. agno-2.2.13.dist-info/RECORD +575 -0
  573. agno-2.2.13.dist-info/WHEEL +5 -0
  574. agno-2.2.13.dist-info/licenses/LICENSE +201 -0
  575. agno-2.2.13.dist-info/top_level.txt +1 -0
@@ -0,0 +1,2026 @@
1
+ import time
2
+ from datetime import date, datetime, timedelta, timezone
3
+ from typing import Any, Dict, List, Optional, Tuple, Union
4
+ from uuid import uuid4
5
+
6
+ from agno.db.base import AsyncBaseDb, SessionType
7
+ from agno.db.mongo.utils import (
8
+ apply_pagination,
9
+ apply_sorting,
10
+ bulk_upsert_metrics,
11
+ calculate_date_metrics,
12
+ create_collection_indexes,
13
+ deserialize_cultural_knowledge_from_db,
14
+ fetch_all_sessions_data,
15
+ get_dates_to_calculate_metrics_for,
16
+ serialize_cultural_knowledge_for_db,
17
+ )
18
+ from agno.db.schemas.culture import CulturalKnowledge
19
+ from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
20
+ from agno.db.schemas.knowledge import KnowledgeRow
21
+ from agno.db.schemas.memory import UserMemory
22
+ from agno.db.utils import deserialize_session_json_fields
23
+ from agno.session import AgentSession, Session, TeamSession, WorkflowSession
24
+ from agno.utils.log import log_debug, log_error, log_info
25
+ from agno.utils.string import generate_id
26
+
27
+ try:
28
+ import asyncio
29
+
30
+ from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorCollection, AsyncIOMotorDatabase # type: ignore
31
+ except ImportError:
32
+ raise ImportError("`motor` not installed. Please install it using `pip install -U motor`")
33
+
34
+ try:
35
+ from pymongo import ReturnDocument
36
+ from pymongo.errors import OperationFailure
37
+ except ImportError:
38
+ raise ImportError("`pymongo` not installed. Please install it using `pip install -U pymongo`")
39
+
40
+
41
+ class AsyncMongoDb(AsyncBaseDb):
42
+ def __init__(
43
+ self,
44
+ db_client: Optional[AsyncIOMotorClient] = None,
45
+ db_name: Optional[str] = None,
46
+ db_url: Optional[str] = None,
47
+ session_collection: Optional[str] = None,
48
+ memory_collection: Optional[str] = None,
49
+ metrics_collection: Optional[str] = None,
50
+ eval_collection: Optional[str] = None,
51
+ knowledge_collection: Optional[str] = None,
52
+ culture_collection: Optional[str] = None,
53
+ id: Optional[str] = None,
54
+ ):
55
+ """
56
+ Async interface for interacting with a MongoDB database using Motor.
57
+
58
+ Args:
59
+ db_client (Optional[AsyncIOMotorClient]): The MongoDB async client to use.
60
+ db_name (Optional[str]): The name of the database to use.
61
+ db_url (Optional[str]): The database URL to connect to.
62
+ session_collection (Optional[str]): Name of the collection to store sessions.
63
+ memory_collection (Optional[str]): Name of the collection to store memories.
64
+ metrics_collection (Optional[str]): Name of the collection to store metrics.
65
+ eval_collection (Optional[str]): Name of the collection to store evaluation runs.
66
+ knowledge_collection (Optional[str]): Name of the collection to store knowledge documents.
67
+ culture_collection (Optional[str]): Name of the collection to store cultural knowledge.
68
+ id (Optional[str]): ID of the database.
69
+
70
+ Raises:
71
+ ValueError: If neither db_url nor db_client is provided.
72
+ """
73
+ if id is None:
74
+ base_seed = db_url or str(db_client)
75
+ db_name_suffix = db_name if db_name is not None else "agno"
76
+ seed = f"{base_seed}#{db_name_suffix}"
77
+ id = generate_id(seed)
78
+
79
+ super().__init__(
80
+ id=id,
81
+ session_table=session_collection,
82
+ memory_table=memory_collection,
83
+ metrics_table=metrics_collection,
84
+ eval_table=eval_collection,
85
+ knowledge_table=knowledge_collection,
86
+ culture_table=culture_collection,
87
+ )
88
+
89
+ # Store configuration for lazy initialization
90
+ self._provided_client: Optional[AsyncIOMotorClient] = db_client
91
+ self.db_url: Optional[str] = db_url
92
+ self.db_name: str = db_name if db_name is not None else "agno"
93
+
94
+ if self._provided_client is None and self.db_url is None:
95
+ raise ValueError("One of db_url or db_client must be provided")
96
+
97
+ # Client and database will be lazily initialized per event loop
98
+ self._client: Optional[AsyncIOMotorClient] = None
99
+ self._database: Optional[AsyncIOMotorDatabase] = None
100
+ self._event_loop: Optional[asyncio.AbstractEventLoop] = None
101
+
102
+ async def table_exists(self, table_name: str) -> bool:
103
+ """Check if a collection with the given name exists in the MongoDB database.
104
+
105
+ Args:
106
+ table_name: Name of the collection to check
107
+
108
+ Returns:
109
+ bool: True if the collection exists in the database, False otherwise
110
+ """
111
+ collection_names = await self.database.list_collection_names()
112
+ return table_name in collection_names
113
+
114
+ async def _create_all_tables(self):
115
+ """Create all configured MongoDB collections if they don't exist."""
116
+ collections_to_create = [
117
+ ("sessions", self.session_table_name),
118
+ ("memories", self.memory_table_name),
119
+ ("metrics", self.metrics_table_name),
120
+ ("evals", self.eval_table_name),
121
+ ("knowledge", self.knowledge_table_name),
122
+ ("culture", self.culture_table_name),
123
+ ]
124
+
125
+ for collection_type, collection_name in collections_to_create:
126
+ if collection_name and not await self.table_exists(collection_name):
127
+ await self._get_collection(collection_type, create_collection_if_not_found=True)
128
+
129
+ def _ensure_client(self) -> AsyncIOMotorClient:
130
+ """
131
+ Ensure the Motor client is valid for the current event loop.
132
+
133
+ Motor's AsyncIOMotorClient is tied to the event loop it was created in.
134
+ If we detect a new event loop, we need to refresh the client.
135
+
136
+ Returns:
137
+ AsyncIOMotorClient: A valid client for the current event loop.
138
+ """
139
+ try:
140
+ current_loop = asyncio.get_running_loop()
141
+ except RuntimeError:
142
+ # No running loop, return existing client or create new one
143
+ if self._client is None:
144
+ if self._provided_client is not None:
145
+ self._client = self._provided_client
146
+ elif self.db_url is not None:
147
+ self._client = AsyncIOMotorClient(self.db_url)
148
+ log_debug("Created AsyncIOMotorClient outside event loop")
149
+ return self._client # type: ignore
150
+
151
+ # Check if we're in a different event loop
152
+ if self._event_loop is None or self._event_loop is not current_loop:
153
+ # New event loop detected, create new client
154
+ if self._provided_client is not None:
155
+ # User provided a client, use it but warn them
156
+ log_debug(
157
+ "New event loop detected. Using provided AsyncIOMotorClient, "
158
+ "which may cause issues if it was created in a different event loop."
159
+ )
160
+ self._client = self._provided_client
161
+ elif self.db_url is not None:
162
+ # Create a new client for this event loop
163
+ old_loop_id = id(self._event_loop) if self._event_loop else "None"
164
+ new_loop_id = id(current_loop)
165
+ log_debug(f"Event loop changed from {old_loop_id} to {new_loop_id}, creating new AsyncIOMotorClient")
166
+ self._client = AsyncIOMotorClient(self.db_url)
167
+
168
+ self._event_loop = current_loop
169
+ self._database = None # Reset database reference
170
+ # Clear collection caches when switching event loops
171
+ for attr in list(vars(self).keys()):
172
+ if attr.endswith("_collection"):
173
+ delattr(self, attr)
174
+
175
+ return self._client # type: ignore
176
+
177
+ @property
178
+ def db_client(self) -> AsyncIOMotorClient:
179
+ """Get the MongoDB client, ensuring it's valid for the current event loop."""
180
+ return self._ensure_client()
181
+
182
+ @property
183
+ def database(self) -> AsyncIOMotorDatabase:
184
+ """Get the MongoDB database, ensuring it's valid for the current event loop."""
185
+ try:
186
+ current_loop = asyncio.get_running_loop()
187
+ if self._database is None or self._event_loop != current_loop:
188
+ self._database = self.db_client[self.db_name]
189
+ except RuntimeError:
190
+ # No running loop - fallback to existing database or create new one
191
+ if self._database is None:
192
+ self._database = self.db_client[self.db_name]
193
+ return self._database
194
+
195
+ # -- DB methods --
196
+
197
+ def _should_reset_collection_cache(self) -> bool:
198
+ """Check if collection cache should be reset due to event loop change."""
199
+ try:
200
+ current_loop = asyncio.get_running_loop()
201
+ return self._event_loop is not current_loop
202
+ except RuntimeError:
203
+ return False
204
+
205
+ async def _get_collection(
206
+ self, table_type: str, create_collection_if_not_found: Optional[bool] = True
207
+ ) -> Optional[AsyncIOMotorCollection]:
208
+ """Get or create a collection based on table type.
209
+
210
+ Args:
211
+ table_type (str): The type of table to get or create.
212
+ create_collection_if_not_found (Optional[bool]): Whether to create the collection if it doesn't exist.
213
+
214
+ Returns:
215
+ AsyncIOMotorCollection: The collection object.
216
+ """
217
+ # Ensure client is valid for current event loop before accessing collections
218
+ _ = self.db_client # This triggers _ensure_client()
219
+
220
+ # Check if collections need to be reset due to event loop change
221
+ reset_cache = self._should_reset_collection_cache()
222
+
223
+ if table_type == "sessions":
224
+ if reset_cache or not hasattr(self, "session_collection"):
225
+ if self.session_table_name is None:
226
+ raise ValueError("Session collection was not provided on initialization")
227
+ self.session_collection = await self._get_or_create_collection(
228
+ collection_name=self.session_table_name,
229
+ collection_type="sessions",
230
+ create_collection_if_not_found=create_collection_if_not_found,
231
+ )
232
+ return self.session_collection
233
+
234
+ if table_type == "memories":
235
+ if reset_cache or not hasattr(self, "memory_collection"):
236
+ if self.memory_table_name is None:
237
+ raise ValueError("Memory collection was not provided on initialization")
238
+ self.memory_collection = await self._get_or_create_collection(
239
+ collection_name=self.memory_table_name,
240
+ collection_type="memories",
241
+ create_collection_if_not_found=create_collection_if_not_found,
242
+ )
243
+ return self.memory_collection
244
+
245
+ if table_type == "metrics":
246
+ if reset_cache or not hasattr(self, "metrics_collection"):
247
+ if self.metrics_table_name is None:
248
+ raise ValueError("Metrics collection was not provided on initialization")
249
+ self.metrics_collection = await self._get_or_create_collection(
250
+ collection_name=self.metrics_table_name,
251
+ collection_type="metrics",
252
+ create_collection_if_not_found=create_collection_if_not_found,
253
+ )
254
+ return self.metrics_collection
255
+
256
+ if table_type == "evals":
257
+ if reset_cache or not hasattr(self, "eval_collection"):
258
+ if self.eval_table_name is None:
259
+ raise ValueError("Eval collection was not provided on initialization")
260
+ self.eval_collection = await self._get_or_create_collection(
261
+ collection_name=self.eval_table_name,
262
+ collection_type="evals",
263
+ create_collection_if_not_found=create_collection_if_not_found,
264
+ )
265
+ return self.eval_collection
266
+
267
+ if table_type == "knowledge":
268
+ if reset_cache or not hasattr(self, "knowledge_collection"):
269
+ if self.knowledge_table_name is None:
270
+ raise ValueError("Knowledge collection was not provided on initialization")
271
+ self.knowledge_collection = await self._get_or_create_collection(
272
+ collection_name=self.knowledge_table_name,
273
+ collection_type="knowledge",
274
+ create_collection_if_not_found=create_collection_if_not_found,
275
+ )
276
+ return self.knowledge_collection
277
+
278
+ if table_type == "culture":
279
+ if reset_cache or not hasattr(self, "culture_collection"):
280
+ if self.culture_table_name is None:
281
+ raise ValueError("Culture collection was not provided on initialization")
282
+ self.culture_collection = await self._get_or_create_collection(
283
+ collection_name=self.culture_table_name,
284
+ collection_type="culture",
285
+ create_collection_if_not_found=create_collection_if_not_found,
286
+ )
287
+ return self.culture_collection
288
+
289
+ raise ValueError(f"Unknown table type: {table_type}")
290
+
291
+ async def _get_or_create_collection(
292
+ self, collection_name: str, collection_type: str, create_collection_if_not_found: Optional[bool] = True
293
+ ) -> Optional[AsyncIOMotorCollection]:
294
+ """Get or create a collection with proper indexes.
295
+
296
+ Args:
297
+ collection_name (str): The name of the collection to get or create.
298
+ collection_type (str): The type of collection to get or create.
299
+ create_collection_if_not_found (Optional[bool]): Whether to create the collection if it doesn't exist.
300
+
301
+ Returns:
302
+ Optional[AsyncIOMotorCollection]: The collection object.
303
+ """
304
+ try:
305
+ collection = self.database[collection_name]
306
+
307
+ if not hasattr(self, f"_{collection_name}_initialized"):
308
+ if not create_collection_if_not_found:
309
+ return None
310
+ # Note: Motor doesn't have sync create_index, so we use it as-is
311
+ # The indexes are created in the background
312
+ create_collection_indexes(collection, collection_type) # type: ignore
313
+ setattr(self, f"_{collection_name}_initialized", True)
314
+ log_debug(f"Initialized collection '{collection_name}'")
315
+ else:
316
+ log_debug(f"Collection '{collection_name}' already initialized")
317
+
318
+ return collection
319
+
320
+ except Exception as e:
321
+ log_error(f"Error getting collection {collection_name}: {e}")
322
+ raise
323
+
324
+ # -- Session methods --
325
+
326
+ async def delete_session(self, session_id: str) -> bool:
327
+ """Delete a session from the database.
328
+
329
+ Args:
330
+ session_id (str): The ID of the session to delete.
331
+
332
+ Returns:
333
+ bool: True if the session was deleted, False otherwise.
334
+
335
+ Raises:
336
+ Exception: If there is an error deleting the session.
337
+ """
338
+ try:
339
+ collection = await self._get_collection(table_type="sessions")
340
+ if collection is None:
341
+ return False
342
+
343
+ result = await collection.delete_one({"session_id": session_id})
344
+ if result.deleted_count == 0:
345
+ log_debug(f"No session found to delete with session_id: {session_id}")
346
+ return False
347
+ else:
348
+ log_debug(f"Successfully deleted session with session_id: {session_id}")
349
+ return True
350
+
351
+ except Exception as e:
352
+ log_error(f"Error deleting session: {e}")
353
+ raise e
354
+
355
+ async def delete_sessions(self, session_ids: List[str]) -> None:
356
+ """Delete multiple sessions from the database.
357
+
358
+ Args:
359
+ session_ids (List[str]): The IDs of the sessions to delete.
360
+ """
361
+ try:
362
+ collection = await self._get_collection(table_type="sessions")
363
+ if collection is None:
364
+ return
365
+
366
+ result = await collection.delete_many({"session_id": {"$in": session_ids}})
367
+ log_debug(f"Successfully deleted {result.deleted_count} sessions")
368
+
369
+ except Exception as e:
370
+ log_error(f"Error deleting sessions: {e}")
371
+ raise e
372
+
373
+ async def get_session(
374
+ self,
375
+ session_id: str,
376
+ session_type: SessionType,
377
+ user_id: Optional[str] = None,
378
+ deserialize: Optional[bool] = True,
379
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
380
+ """Read a session from the database.
381
+
382
+ Args:
383
+ session_id (str): The ID of the session to get.
384
+ session_type (SessionType): The type of session to get.
385
+ user_id (Optional[str]): The ID of the user to get the session for.
386
+ deserialize (Optional[bool]): Whether to serialize the session. Defaults to True.
387
+
388
+ Returns:
389
+ Union[Session, Dict[str, Any], None]:
390
+ - When deserialize=True: Session object
391
+ - When deserialize=False: Session dictionary
392
+
393
+ Raises:
394
+ Exception: If there is an error reading the session.
395
+ """
396
+ try:
397
+ collection = await self._get_collection(table_type="sessions")
398
+ if collection is None:
399
+ return None
400
+
401
+ query = {"session_id": session_id}
402
+ if user_id is not None:
403
+ query["user_id"] = user_id
404
+ if session_type is not None:
405
+ query["session_type"] = session_type
406
+
407
+ result = await collection.find_one(query)
408
+ if result is None:
409
+ return None
410
+
411
+ session = deserialize_session_json_fields(result)
412
+ if not deserialize:
413
+ return session
414
+
415
+ if session_type == SessionType.AGENT:
416
+ return AgentSession.from_dict(session)
417
+ elif session_type == SessionType.TEAM:
418
+ return TeamSession.from_dict(session)
419
+ elif session_type == SessionType.WORKFLOW:
420
+ return WorkflowSession.from_dict(session)
421
+ else:
422
+ raise ValueError(f"Invalid session type: {session_type}")
423
+
424
+ except Exception as e:
425
+ log_error(f"Exception reading session: {e}")
426
+ raise e
427
+
428
+ async def get_sessions(
429
+ self,
430
+ session_type: Optional[SessionType] = None,
431
+ user_id: Optional[str] = None,
432
+ component_id: Optional[str] = None,
433
+ session_name: Optional[str] = None,
434
+ start_timestamp: Optional[int] = None,
435
+ end_timestamp: Optional[int] = None,
436
+ limit: Optional[int] = None,
437
+ page: Optional[int] = None,
438
+ sort_by: Optional[str] = None,
439
+ sort_order: Optional[str] = None,
440
+ deserialize: Optional[bool] = True,
441
+ ) -> Union[List[Session], Tuple[List[Dict[str, Any]], int]]:
442
+ """Get all sessions.
443
+
444
+ Args:
445
+ session_type (Optional[SessionType]): The type of session to get.
446
+ user_id (Optional[str]): The ID of the user to get the session for.
447
+ component_id (Optional[str]): The ID of the component to get the session for.
448
+ session_name (Optional[str]): The name of the session to filter by.
449
+ start_timestamp (Optional[int]): The start timestamp to filter sessions by.
450
+ end_timestamp (Optional[int]): The end timestamp to filter sessions by.
451
+ limit (Optional[int]): The limit of the sessions to get.
452
+ page (Optional[int]): The page number to get.
453
+ sort_by (Optional[str]): The field to sort the sessions by.
454
+ sort_order (Optional[str]): The order to sort the sessions by.
455
+ deserialize (Optional[bool]): Whether to serialize the sessions. Defaults to True.
456
+
457
+ Returns:
458
+ Union[List[AgentSession], List[TeamSession], List[WorkflowSession], Tuple[List[Dict[str, Any]], int]]:
459
+ - When deserialize=True: List of Session objects
460
+ - When deserialize=False: List of session dictionaries and the total count
461
+
462
+ Raises:
463
+ Exception: If there is an error reading the sessions.
464
+ """
465
+ try:
466
+ collection = await self._get_collection(table_type="sessions")
467
+ if collection is None:
468
+ return [] if deserialize else ([], 0)
469
+
470
+ # Filtering
471
+ query: Dict[str, Any] = {}
472
+ if user_id is not None:
473
+ query["user_id"] = user_id
474
+ if session_type is not None:
475
+ query["session_type"] = session_type
476
+ if component_id is not None:
477
+ if session_type == SessionType.AGENT:
478
+ query["agent_id"] = component_id
479
+ elif session_type == SessionType.TEAM:
480
+ query["team_id"] = component_id
481
+ elif session_type == SessionType.WORKFLOW:
482
+ query["workflow_id"] = component_id
483
+ if start_timestamp is not None:
484
+ query["created_at"] = {"$gte": start_timestamp}
485
+ if end_timestamp is not None:
486
+ if "created_at" in query:
487
+ query["created_at"]["$lte"] = end_timestamp
488
+ else:
489
+ query["created_at"] = {"$lte": end_timestamp}
490
+ if session_name is not None:
491
+ query["session_data.session_name"] = {"$regex": session_name, "$options": "i"}
492
+
493
+ # Get total count
494
+ total_count = await collection.count_documents(query)
495
+
496
+ cursor = collection.find(query)
497
+
498
+ # Sorting
499
+ sort_criteria = apply_sorting({}, sort_by, sort_order)
500
+ if sort_criteria:
501
+ cursor = cursor.sort(sort_criteria)
502
+
503
+ # Pagination
504
+ query_args = apply_pagination({}, limit, page)
505
+ if query_args.get("skip"):
506
+ cursor = cursor.skip(query_args["skip"])
507
+ if query_args.get("limit"):
508
+ cursor = cursor.limit(query_args["limit"])
509
+
510
+ records = await cursor.to_list(length=None)
511
+ if records is None:
512
+ return [] if deserialize else ([], 0)
513
+ sessions_raw = [deserialize_session_json_fields(record) for record in records]
514
+
515
+ if not deserialize:
516
+ return sessions_raw, total_count
517
+
518
+ sessions: List[Union[AgentSession, TeamSession, WorkflowSession]] = []
519
+ for record in sessions_raw:
520
+ if session_type == SessionType.AGENT.value:
521
+ agent_session = AgentSession.from_dict(record)
522
+ if agent_session is not None:
523
+ sessions.append(agent_session)
524
+ elif session_type == SessionType.TEAM.value:
525
+ team_session = TeamSession.from_dict(record)
526
+ if team_session is not None:
527
+ sessions.append(team_session)
528
+ elif session_type == SessionType.WORKFLOW.value:
529
+ workflow_session = WorkflowSession.from_dict(record)
530
+ if workflow_session is not None:
531
+ sessions.append(workflow_session)
532
+
533
+ return sessions
534
+
535
+ except Exception as e:
536
+ log_error(f"Exception reading sessions: {e}")
537
+ raise e
538
+
539
+ async def rename_session(
540
+ self, session_id: str, session_type: SessionType, session_name: str, deserialize: Optional[bool] = True
541
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
542
+ """Rename a session in the database.
543
+
544
+ Args:
545
+ session_id (str): The ID of the session to rename.
546
+ session_type (SessionType): The type of session to rename.
547
+ session_name (str): The new name of the session.
548
+ deserialize (Optional[bool]): Whether to serialize the session. Defaults to True.
549
+
550
+ Returns:
551
+ Optional[Union[Session, Dict[str, Any]]]:
552
+ - When deserialize=True: Session object
553
+ - When deserialize=False: Session dictionary
554
+
555
+ Raises:
556
+ Exception: If there is an error renaming the session.
557
+ """
558
+ try:
559
+ collection = await self._get_collection(table_type="sessions")
560
+ if collection is None:
561
+ return None
562
+
563
+ try:
564
+ result = await collection.find_one_and_update(
565
+ {"session_id": session_id},
566
+ {"$set": {"session_data.session_name": session_name, "updated_at": int(time.time())}},
567
+ return_document=ReturnDocument.AFTER,
568
+ upsert=False,
569
+ )
570
+ except OperationFailure:
571
+ # If the update fails because session_data doesn't contain a session_name yet, we initialize session_data
572
+ result = await collection.find_one_and_update(
573
+ {"session_id": session_id},
574
+ {"$set": {"session_data": {"session_name": session_name}, "updated_at": int(time.time())}},
575
+ return_document=ReturnDocument.AFTER,
576
+ upsert=False,
577
+ )
578
+ if not result:
579
+ return None
580
+
581
+ deserialized_session = deserialize_session_json_fields(result)
582
+
583
+ if not deserialize:
584
+ return deserialized_session
585
+
586
+ if session_type == SessionType.AGENT.value:
587
+ return AgentSession.from_dict(deserialized_session)
588
+ elif session_type == SessionType.TEAM.value:
589
+ return TeamSession.from_dict(deserialized_session)
590
+ else:
591
+ return WorkflowSession.from_dict(deserialized_session)
592
+
593
+ except Exception as e:
594
+ log_error(f"Exception renaming session: {e}")
595
+ raise e
596
+
597
+ async def upsert_session(
598
+ self, session: Session, deserialize: Optional[bool] = True
599
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
600
+ """Insert or update a session in the database.
601
+
602
+ Args:
603
+ session (Session): The session to upsert.
604
+ deserialize (Optional[bool]): Whether to deserialize the session. Defaults to True.
605
+
606
+ Returns:
607
+ Optional[Union[Session, Dict[str, Any]]]: The upserted session.
608
+
609
+ Raises:
610
+ Exception: If there is an error upserting the session.
611
+ """
612
+ try:
613
+ collection = await self._get_collection(table_type="sessions", create_collection_if_not_found=True)
614
+ if collection is None:
615
+ return None
616
+
617
+ session_dict = session.to_dict()
618
+
619
+ if isinstance(session, AgentSession):
620
+ record = {
621
+ "session_id": session_dict.get("session_id"),
622
+ "session_type": SessionType.AGENT.value,
623
+ "agent_id": session_dict.get("agent_id"),
624
+ "user_id": session_dict.get("user_id"),
625
+ "runs": session_dict.get("runs"),
626
+ "agent_data": session_dict.get("agent_data"),
627
+ "session_data": session_dict.get("session_data"),
628
+ "summary": session_dict.get("summary"),
629
+ "metadata": session_dict.get("metadata"),
630
+ "created_at": session_dict.get("created_at"),
631
+ "updated_at": int(time.time()),
632
+ }
633
+
634
+ result = await collection.find_one_and_replace(
635
+ filter={"session_id": session_dict.get("session_id")},
636
+ replacement=record,
637
+ upsert=True,
638
+ return_document=ReturnDocument.AFTER,
639
+ )
640
+ if not result:
641
+ return None
642
+
643
+ session = result # type: ignore
644
+
645
+ if not deserialize:
646
+ return session
647
+
648
+ return AgentSession.from_dict(session) # type: ignore
649
+
650
+ elif isinstance(session, TeamSession):
651
+ record = {
652
+ "session_id": session_dict.get("session_id"),
653
+ "session_type": SessionType.TEAM.value,
654
+ "team_id": session_dict.get("team_id"),
655
+ "user_id": session_dict.get("user_id"),
656
+ "runs": session_dict.get("runs"),
657
+ "team_data": session_dict.get("team_data"),
658
+ "session_data": session_dict.get("session_data"),
659
+ "summary": session_dict.get("summary"),
660
+ "metadata": session_dict.get("metadata"),
661
+ "created_at": session_dict.get("created_at"),
662
+ "updated_at": int(time.time()),
663
+ }
664
+
665
+ result = await collection.find_one_and_replace(
666
+ filter={"session_id": session_dict.get("session_id")},
667
+ replacement=record,
668
+ upsert=True,
669
+ return_document=ReturnDocument.AFTER,
670
+ )
671
+ if not result:
672
+ return None
673
+
674
+ # MongoDB stores native objects, no deserialization needed for document fields
675
+ session = result # type: ignore
676
+
677
+ if not deserialize:
678
+ return session
679
+
680
+ return TeamSession.from_dict(session) # type: ignore
681
+
682
+ else:
683
+ record = {
684
+ "session_id": session_dict.get("session_id"),
685
+ "session_type": SessionType.WORKFLOW.value,
686
+ "workflow_id": session_dict.get("workflow_id"),
687
+ "user_id": session_dict.get("user_id"),
688
+ "runs": session_dict.get("runs"),
689
+ "workflow_data": session_dict.get("workflow_data"),
690
+ "session_data": session_dict.get("session_data"),
691
+ "summary": session_dict.get("summary"),
692
+ "metadata": session_dict.get("metadata"),
693
+ "created_at": session_dict.get("created_at"),
694
+ "updated_at": int(time.time()),
695
+ }
696
+
697
+ result = await collection.find_one_and_replace(
698
+ filter={"session_id": session_dict.get("session_id")},
699
+ replacement=record,
700
+ upsert=True,
701
+ return_document=ReturnDocument.AFTER,
702
+ )
703
+ if not result:
704
+ return None
705
+
706
+ session = result # type: ignore
707
+
708
+ if not deserialize:
709
+ return session
710
+
711
+ return WorkflowSession.from_dict(session) # type: ignore
712
+
713
+ except Exception as e:
714
+ log_error(f"Exception upserting session: {e}")
715
+ raise e
716
+
717
+ async def upsert_sessions(
718
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
719
+ ) -> List[Union[Session, Dict[str, Any]]]:
720
+ """
721
+ Bulk upsert multiple sessions for improved performance on large datasets.
722
+
723
+ Args:
724
+ sessions (List[Session]): List of sessions to upsert.
725
+ deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
726
+ preserve_updated_at (bool): If True, preserve the updated_at from the session object.
727
+
728
+ Returns:
729
+ List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
730
+
731
+ Raises:
732
+ Exception: If an error occurs during bulk upsert.
733
+ """
734
+ if not sessions:
735
+ return []
736
+
737
+ try:
738
+ collection = await self._get_collection(table_type="sessions", create_collection_if_not_found=True)
739
+ if collection is None:
740
+ log_info("Sessions collection not available, falling back to individual upserts")
741
+ return [
742
+ result
743
+ for session in sessions
744
+ if session is not None
745
+ for result in [await self.upsert_session(session, deserialize=deserialize)]
746
+ if result is not None
747
+ ]
748
+
749
+ from pymongo import ReplaceOne
750
+
751
+ operations = []
752
+ results: List[Union[Session, Dict[str, Any]]] = []
753
+
754
+ for session in sessions:
755
+ if session is None:
756
+ continue
757
+
758
+ session_dict = session.to_dict()
759
+
760
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
761
+ updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
762
+
763
+ if isinstance(session, AgentSession):
764
+ record = {
765
+ "session_id": session_dict.get("session_id"),
766
+ "session_type": SessionType.AGENT.value,
767
+ "agent_id": session_dict.get("agent_id"),
768
+ "user_id": session_dict.get("user_id"),
769
+ "runs": session_dict.get("runs"),
770
+ "agent_data": session_dict.get("agent_data"),
771
+ "session_data": session_dict.get("session_data"),
772
+ "summary": session_dict.get("summary"),
773
+ "metadata": session_dict.get("metadata"),
774
+ "created_at": session_dict.get("created_at"),
775
+ "updated_at": updated_at,
776
+ }
777
+ elif isinstance(session, TeamSession):
778
+ record = {
779
+ "session_id": session_dict.get("session_id"),
780
+ "session_type": SessionType.TEAM.value,
781
+ "team_id": session_dict.get("team_id"),
782
+ "user_id": session_dict.get("user_id"),
783
+ "runs": session_dict.get("runs"),
784
+ "team_data": session_dict.get("team_data"),
785
+ "session_data": session_dict.get("session_data"),
786
+ "summary": session_dict.get("summary"),
787
+ "metadata": session_dict.get("metadata"),
788
+ "created_at": session_dict.get("created_at"),
789
+ "updated_at": updated_at,
790
+ }
791
+ elif isinstance(session, WorkflowSession):
792
+ record = {
793
+ "session_id": session_dict.get("session_id"),
794
+ "session_type": SessionType.WORKFLOW.value,
795
+ "workflow_id": session_dict.get("workflow_id"),
796
+ "user_id": session_dict.get("user_id"),
797
+ "runs": session_dict.get("runs"),
798
+ "workflow_data": session_dict.get("workflow_data"),
799
+ "session_data": session_dict.get("session_data"),
800
+ "summary": session_dict.get("summary"),
801
+ "metadata": session_dict.get("metadata"),
802
+ "created_at": session_dict.get("created_at"),
803
+ "updated_at": updated_at,
804
+ }
805
+ else:
806
+ continue
807
+
808
+ operations.append(
809
+ ReplaceOne(filter={"session_id": record["session_id"]}, replacement=record, upsert=True)
810
+ )
811
+
812
+ if operations:
813
+ # Execute bulk write
814
+ await collection.bulk_write(operations)
815
+
816
+ # Fetch the results
817
+ session_ids = [session.session_id for session in sessions if session and session.session_id]
818
+ cursor = collection.find({"session_id": {"$in": session_ids}})
819
+
820
+ async for doc in cursor:
821
+ session_dict = doc
822
+
823
+ if deserialize:
824
+ session_type = doc.get("session_type")
825
+ if session_type == SessionType.AGENT.value:
826
+ deserialized_agent_session = AgentSession.from_dict(session_dict)
827
+ if deserialized_agent_session is None:
828
+ continue
829
+ results.append(deserialized_agent_session)
830
+
831
+ elif session_type == SessionType.TEAM.value:
832
+ deserialized_team_session = TeamSession.from_dict(session_dict)
833
+ if deserialized_team_session is None:
834
+ continue
835
+ results.append(deserialized_team_session)
836
+
837
+ elif session_type == SessionType.WORKFLOW.value:
838
+ deserialized_workflow_session = WorkflowSession.from_dict(session_dict)
839
+ if deserialized_workflow_session is None:
840
+ continue
841
+ results.append(deserialized_workflow_session)
842
+ else:
843
+ results.append(session_dict)
844
+
845
+ return results
846
+
847
+ except Exception as e:
848
+ log_error(f"Exception during bulk session upsert, falling back to individual upserts: {e}")
849
+
850
+ # Fallback to individual upserts
851
+ return [
852
+ result
853
+ for session in sessions
854
+ if session is not None
855
+ for result in [await self.upsert_session(session, deserialize=deserialize)]
856
+ if result is not None
857
+ ]
858
+
859
+ # -- Memory methods --
860
+
861
+ async def delete_user_memory(self, memory_id: str, user_id: Optional[str] = None):
862
+ """Delete a user memory from the database.
863
+
864
+ Args:
865
+ memory_id (str): The ID of the memory to delete.
866
+ user_id (Optional[str]): The ID of the user to verify ownership. If provided, only delete if the memory belongs to this user.
867
+
868
+ Returns:
869
+ bool: True if the memory was deleted, False otherwise.
870
+
871
+ Raises:
872
+ Exception: If there is an error deleting the memory.
873
+ """
874
+ try:
875
+ collection = await self._get_collection(table_type="memories")
876
+ if collection is None:
877
+ return
878
+
879
+ query = {"memory_id": memory_id}
880
+ if user_id is not None:
881
+ query["user_id"] = user_id
882
+
883
+ result = await collection.delete_one(query)
884
+
885
+ success = result.deleted_count > 0
886
+ if success:
887
+ log_debug(f"Successfully deleted memory id: {memory_id}")
888
+ else:
889
+ log_debug(f"No memory found with id: {memory_id}")
890
+
891
+ except Exception as e:
892
+ log_error(f"Error deleting memory: {e}")
893
+ raise e
894
+
895
+ async def delete_user_memories(self, memory_ids: List[str], user_id: Optional[str] = None) -> None:
896
+ """Delete user memories from the database.
897
+
898
+ Args:
899
+ memory_ids (List[str]): The IDs of the memories to delete.
900
+ user_id (Optional[str]): The ID of the user to verify ownership. If provided, only delete memories that belong to this user.
901
+
902
+ Raises:
903
+ Exception: If there is an error deleting the memories.
904
+ """
905
+ try:
906
+ collection = await self._get_collection(table_type="memories")
907
+ if collection is None:
908
+ return
909
+
910
+ query: Dict[str, Any] = {"memory_id": {"$in": memory_ids}}
911
+ if user_id is not None:
912
+ query["user_id"] = user_id
913
+
914
+ result = await collection.delete_many(query)
915
+
916
+ if result.deleted_count == 0:
917
+ log_debug(f"No memories found with ids: {memory_ids}")
918
+
919
+ except Exception as e:
920
+ log_error(f"Error deleting memories: {e}")
921
+ raise e
922
+
923
+ async def get_all_memory_topics(self, user_id: Optional[str] = None) -> List[str]:
924
+ """Get all memory topics from the database.
925
+
926
+ Args:
927
+ user_id (Optional[str]): The ID of the user to filter by. Defaults to None.
928
+
929
+ Returns:
930
+ List[str]: The topics.
931
+
932
+ Raises:
933
+ Exception: If there is an error getting the topics.
934
+ """
935
+ try:
936
+ collection = await self._get_collection(table_type="memories")
937
+ if collection is None:
938
+ return []
939
+
940
+ query = {}
941
+ if user_id is not None:
942
+ query["user_id"] = user_id
943
+
944
+ topics = await collection.distinct("topics", query)
945
+ return [topic for topic in topics if topic]
946
+
947
+ except Exception as e:
948
+ log_error(f"Exception reading from collection: {e}")
949
+ raise e
950
+
951
+ async def get_user_memory(
952
+ self, memory_id: str, deserialize: Optional[bool] = True, user_id: Optional[str] = None
953
+ ) -> Optional[UserMemory]:
954
+ """Get a memory from the database.
955
+
956
+ Args:
957
+ memory_id (str): The ID of the memory to get.
958
+ deserialize (Optional[bool]): Whether to serialize the memory. Defaults to True.
959
+ user_id (Optional[str]): The ID of the user to verify ownership. If provided, only return the memory if it belongs to this user.
960
+
961
+ Returns:
962
+ Optional[UserMemory]:
963
+ - When deserialize=True: UserMemory object
964
+ - When deserialize=False: Memory dictionary
965
+
966
+ Raises:
967
+ Exception: If there is an error getting the memory.
968
+ """
969
+ try:
970
+ collection = await self._get_collection(table_type="memories")
971
+ if collection is None:
972
+ return None
973
+
974
+ query = {"memory_id": memory_id}
975
+ if user_id is not None:
976
+ query["user_id"] = user_id
977
+
978
+ result = await collection.find_one(query)
979
+ if result is None or not deserialize:
980
+ return result
981
+
982
+ # Remove MongoDB's _id field before creating UserMemory object
983
+ result_filtered = {k: v for k, v in result.items() if k != "_id"}
984
+ return UserMemory.from_dict(result_filtered)
985
+
986
+ except Exception as e:
987
+ log_error(f"Exception reading from collection: {e}")
988
+ raise e
989
+
990
+ async def get_user_memories(
991
+ self,
992
+ user_id: Optional[str] = None,
993
+ agent_id: Optional[str] = None,
994
+ team_id: Optional[str] = None,
995
+ topics: Optional[List[str]] = None,
996
+ search_content: Optional[str] = None,
997
+ limit: Optional[int] = None,
998
+ page: Optional[int] = None,
999
+ sort_by: Optional[str] = None,
1000
+ sort_order: Optional[str] = None,
1001
+ deserialize: Optional[bool] = True,
1002
+ ) -> Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
1003
+ """Get all memories from the database as UserMemory objects.
1004
+
1005
+ Args:
1006
+ user_id (Optional[str]): The ID of the user to get the memories for.
1007
+ agent_id (Optional[str]): The ID of the agent to get the memories for.
1008
+ team_id (Optional[str]): The ID of the team to get the memories for.
1009
+ topics (Optional[List[str]]): The topics to filter the memories by.
1010
+ search_content (Optional[str]): The content to filter the memories by.
1011
+ limit (Optional[int]): The limit of the memories to get.
1012
+ page (Optional[int]): The page number to get.
1013
+ sort_by (Optional[str]): The field to sort the memories by.
1014
+ sort_order (Optional[str]): The order to sort the memories by.
1015
+ deserialize (Optional[bool]): Whether to serialize the memories. Defaults to True.
1016
+
1017
+ Returns:
1018
+ Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
1019
+ - When deserialize=True: List of UserMemory objects
1020
+ - When deserialize=False: Tuple of (memory dictionaries, total count)
1021
+
1022
+ Raises:
1023
+ Exception: If there is an error getting the memories.
1024
+ """
1025
+ try:
1026
+ collection = await self._get_collection(table_type="memories")
1027
+ if collection is None:
1028
+ return [] if deserialize else ([], 0)
1029
+
1030
+ query: Dict[str, Any] = {}
1031
+ if user_id is not None:
1032
+ query["user_id"] = user_id
1033
+ if agent_id is not None:
1034
+ query["agent_id"] = agent_id
1035
+ if team_id is not None:
1036
+ query["team_id"] = team_id
1037
+ if topics is not None:
1038
+ query["topics"] = {"$in": topics}
1039
+ if search_content is not None:
1040
+ query["memory"] = {"$regex": search_content, "$options": "i"}
1041
+
1042
+ # Get total count
1043
+ total_count = await collection.count_documents(query)
1044
+
1045
+ # Apply sorting
1046
+ sort_criteria = apply_sorting({}, sort_by, sort_order)
1047
+
1048
+ # Apply pagination
1049
+ query_args = apply_pagination({}, limit, page)
1050
+
1051
+ cursor = collection.find(query)
1052
+ if sort_criteria:
1053
+ cursor = cursor.sort(sort_criteria)
1054
+ if query_args.get("skip"):
1055
+ cursor = cursor.skip(query_args["skip"])
1056
+ if query_args.get("limit"):
1057
+ cursor = cursor.limit(query_args["limit"])
1058
+
1059
+ records = await cursor.to_list(length=None)
1060
+ if not deserialize:
1061
+ return records, total_count
1062
+
1063
+ # Remove MongoDB's _id field before creating UserMemory objects
1064
+ return [UserMemory.from_dict({k: v for k, v in record.items() if k != "_id"}) for record in records]
1065
+
1066
+ except Exception as e:
1067
+ log_error(f"Exception reading from collection: {e}")
1068
+ raise e
1069
+
1070
+ async def get_user_memory_stats(
1071
+ self,
1072
+ limit: Optional[int] = None,
1073
+ page: Optional[int] = None,
1074
+ user_id: Optional[str] = None,
1075
+ ) -> Tuple[List[Dict[str, Any]], int]:
1076
+ """Get user memories stats.
1077
+
1078
+ Args:
1079
+ limit (Optional[int]): The limit of the memories to get.
1080
+ page (Optional[int]): The page number to get.
1081
+ user_id (Optional[str]): The ID of the user to filter by. Defaults to None.
1082
+
1083
+ Returns:
1084
+ Tuple[List[Dict[str, Any]], int]: A tuple containing the memories stats and the total count.
1085
+
1086
+ Raises:
1087
+ Exception: If there is an error getting the memories stats.
1088
+ """
1089
+ try:
1090
+ collection = await self._get_collection(table_type="memories")
1091
+ if collection is None:
1092
+ return [], 0
1093
+
1094
+ match_stage: Dict[str, Any] = {"user_id": {"$ne": None}}
1095
+ if user_id is not None:
1096
+ match_stage["user_id"] = user_id
1097
+
1098
+ pipeline = [
1099
+ {"$match": match_stage},
1100
+ {
1101
+ "$group": {
1102
+ "_id": "$user_id",
1103
+ "total_memories": {"$sum": 1},
1104
+ "last_memory_updated_at": {"$max": "$updated_at"},
1105
+ }
1106
+ },
1107
+ {"$sort": {"last_memory_updated_at": -1}},
1108
+ ]
1109
+
1110
+ # Get total count
1111
+ count_pipeline = pipeline + [{"$count": "total"}]
1112
+ count_result = await collection.aggregate(count_pipeline).to_list(length=1)
1113
+ total_count = count_result[0]["total"] if count_result else 0
1114
+
1115
+ # Apply pagination
1116
+ if limit is not None:
1117
+ if page is not None:
1118
+ pipeline.append({"$skip": (page - 1) * limit}) # type: ignore
1119
+ pipeline.append({"$limit": limit}) # type: ignore
1120
+
1121
+ results = await collection.aggregate(pipeline).to_list(length=None)
1122
+
1123
+ formatted_results = [
1124
+ {
1125
+ "user_id": result["_id"],
1126
+ "total_memories": result["total_memories"],
1127
+ "last_memory_updated_at": result["last_memory_updated_at"],
1128
+ }
1129
+ for result in results
1130
+ ]
1131
+
1132
+ return formatted_results, total_count
1133
+
1134
+ except Exception as e:
1135
+ log_error(f"Exception getting user memory stats: {e}")
1136
+ raise e
1137
+
1138
+ async def upsert_user_memory(
1139
+ self, memory: UserMemory, deserialize: Optional[bool] = True
1140
+ ) -> Optional[Union[UserMemory, Dict[str, Any]]]:
1141
+ """Upsert a user memory in the database.
1142
+
1143
+ Args:
1144
+ memory (UserMemory): The memory to upsert.
1145
+ deserialize (Optional[bool]): Whether to serialize the memory. Defaults to True.
1146
+
1147
+ Returns:
1148
+ Optional[Union[UserMemory, Dict[str, Any]]]:
1149
+ - When deserialize=True: UserMemory object
1150
+ - When deserialize=False: Memory dictionary
1151
+
1152
+ Raises:
1153
+ Exception: If there is an error upserting the memory.
1154
+ """
1155
+ try:
1156
+ collection = await self._get_collection(table_type="memories", create_collection_if_not_found=True)
1157
+ if collection is None:
1158
+ return None
1159
+
1160
+ if memory.memory_id is None:
1161
+ memory.memory_id = str(uuid4())
1162
+
1163
+ update_doc = {
1164
+ "user_id": memory.user_id,
1165
+ "agent_id": memory.agent_id,
1166
+ "team_id": memory.team_id,
1167
+ "memory_id": memory.memory_id,
1168
+ "memory": memory.memory,
1169
+ "topics": memory.topics,
1170
+ "updated_at": int(time.time()),
1171
+ }
1172
+
1173
+ result = await collection.replace_one({"memory_id": memory.memory_id}, update_doc, upsert=True)
1174
+
1175
+ if result.upserted_id:
1176
+ update_doc["_id"] = result.upserted_id
1177
+
1178
+ if not deserialize:
1179
+ return update_doc
1180
+
1181
+ # Remove MongoDB's _id field before creating UserMemory object
1182
+ update_doc_filtered = {k: v for k, v in update_doc.items() if k != "_id"}
1183
+ return UserMemory.from_dict(update_doc_filtered)
1184
+
1185
+ except Exception as e:
1186
+ log_error(f"Exception upserting user memory: {e}")
1187
+ raise e
1188
+
1189
+ async def upsert_memories(
1190
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
1191
+ ) -> List[Union[UserMemory, Dict[str, Any]]]:
1192
+ """
1193
+ Bulk upsert multiple user memories for improved performance on large datasets.
1194
+
1195
+ Args:
1196
+ memories (List[UserMemory]): List of memories to upsert.
1197
+ deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
1198
+ preserve_updated_at (bool): If True, preserve the updated_at from the memory object.
1199
+
1200
+ Returns:
1201
+ List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories.
1202
+
1203
+ Raises:
1204
+ Exception: If an error occurs during bulk upsert.
1205
+ """
1206
+ if not memories:
1207
+ return []
1208
+
1209
+ try:
1210
+ collection = await self._get_collection(table_type="memories", create_collection_if_not_found=True)
1211
+ if collection is None:
1212
+ log_info("Memories collection not available, falling back to individual upserts")
1213
+ return [
1214
+ result
1215
+ for memory in memories
1216
+ if memory is not None
1217
+ for result in [await self.upsert_user_memory(memory, deserialize=deserialize)]
1218
+ if result is not None
1219
+ ]
1220
+
1221
+ from pymongo import ReplaceOne
1222
+
1223
+ operations = []
1224
+ results: List[Union[UserMemory, Dict[str, Any]]] = []
1225
+
1226
+ current_time = int(time.time())
1227
+ for memory in memories:
1228
+ if memory is None:
1229
+ continue
1230
+
1231
+ if memory.memory_id is None:
1232
+ memory.memory_id = str(uuid4())
1233
+
1234
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
1235
+ updated_at = memory.updated_at if preserve_updated_at else current_time
1236
+
1237
+ record = {
1238
+ "user_id": memory.user_id,
1239
+ "agent_id": memory.agent_id,
1240
+ "team_id": memory.team_id,
1241
+ "memory_id": memory.memory_id,
1242
+ "memory": memory.memory,
1243
+ "topics": memory.topics,
1244
+ "updated_at": updated_at,
1245
+ }
1246
+
1247
+ operations.append(ReplaceOne(filter={"memory_id": memory.memory_id}, replacement=record, upsert=True))
1248
+
1249
+ if operations:
1250
+ # Execute bulk write
1251
+ await collection.bulk_write(operations)
1252
+
1253
+ # Fetch the results
1254
+ memory_ids = [memory.memory_id for memory in memories if memory and memory.memory_id]
1255
+ cursor = collection.find({"memory_id": {"$in": memory_ids}})
1256
+
1257
+ async for doc in cursor:
1258
+ if deserialize:
1259
+ # Remove MongoDB's _id field before creating UserMemory object
1260
+ doc_filtered = {k: v for k, v in doc.items() if k != "_id"}
1261
+ results.append(UserMemory.from_dict(doc_filtered))
1262
+ else:
1263
+ results.append(doc)
1264
+
1265
+ return results
1266
+
1267
+ except Exception as e:
1268
+ log_error(f"Exception during bulk memory upsert, falling back to individual upserts: {e}")
1269
+
1270
+ # Fallback to individual upserts
1271
+ return [
1272
+ result
1273
+ for memory in memories
1274
+ if memory is not None
1275
+ for result in [await self.upsert_user_memory(memory, deserialize=deserialize)]
1276
+ if result is not None
1277
+ ]
1278
+
1279
+ async def clear_memories(self) -> None:
1280
+ """Delete all memories from the database.
1281
+
1282
+ Raises:
1283
+ Exception: If an error occurs during deletion.
1284
+ """
1285
+ try:
1286
+ collection = await self._get_collection(table_type="memories")
1287
+ if collection is None:
1288
+ return
1289
+
1290
+ await collection.delete_many({})
1291
+
1292
+ except Exception as e:
1293
+ log_error(f"Exception deleting all memories: {e}")
1294
+ raise e
1295
+
1296
+ # -- Cultural Knowledge methods --
1297
+ async def clear_cultural_knowledge(self) -> None:
1298
+ """Delete all cultural knowledge from the database.
1299
+
1300
+ Raises:
1301
+ Exception: If an error occurs during deletion.
1302
+ """
1303
+ try:
1304
+ collection = await self._get_collection(table_type="culture")
1305
+ if collection is None:
1306
+ return
1307
+
1308
+ await collection.delete_many({})
1309
+
1310
+ except Exception as e:
1311
+ log_error(f"Exception deleting all cultural knowledge: {e}")
1312
+ raise e
1313
+
1314
+ async def delete_cultural_knowledge(self, id: str) -> None:
1315
+ """Delete cultural knowledge by ID.
1316
+
1317
+ Args:
1318
+ id (str): The ID of the cultural knowledge to delete.
1319
+
1320
+ Raises:
1321
+ Exception: If an error occurs during deletion.
1322
+ """
1323
+ try:
1324
+ collection = await self._get_collection(table_type="culture")
1325
+ if collection is None:
1326
+ return
1327
+
1328
+ await collection.delete_one({"id": id})
1329
+ log_debug(f"Deleted cultural knowledge with ID: {id}")
1330
+
1331
+ except Exception as e:
1332
+ log_error(f"Error deleting cultural knowledge: {e}")
1333
+ raise e
1334
+
1335
+ async def get_cultural_knowledge(
1336
+ self, id: str, deserialize: Optional[bool] = True
1337
+ ) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
1338
+ """Get cultural knowledge by ID.
1339
+
1340
+ Args:
1341
+ id (str): The ID of the cultural knowledge to retrieve.
1342
+ deserialize (Optional[bool]): Whether to deserialize to CulturalKnowledge object. Defaults to True.
1343
+
1344
+ Returns:
1345
+ Optional[Union[CulturalKnowledge, Dict[str, Any]]]: The cultural knowledge if found, None otherwise.
1346
+
1347
+ Raises:
1348
+ Exception: If an error occurs during retrieval.
1349
+ """
1350
+ try:
1351
+ collection = await self._get_collection(table_type="culture")
1352
+ if collection is None:
1353
+ return None
1354
+
1355
+ result = await collection.find_one({"id": id})
1356
+ if result is None:
1357
+ return None
1358
+
1359
+ # Remove MongoDB's _id field
1360
+ result_filtered = {k: v for k, v in result.items() if k != "_id"}
1361
+
1362
+ if not deserialize:
1363
+ return result_filtered
1364
+
1365
+ return deserialize_cultural_knowledge_from_db(result_filtered)
1366
+
1367
+ except Exception as e:
1368
+ log_error(f"Error getting cultural knowledge: {e}")
1369
+ raise e
1370
+
1371
+ async def get_all_cultural_knowledge(
1372
+ self,
1373
+ agent_id: Optional[str] = None,
1374
+ team_id: Optional[str] = None,
1375
+ name: Optional[str] = None,
1376
+ limit: Optional[int] = None,
1377
+ page: Optional[int] = None,
1378
+ sort_by: Optional[str] = None,
1379
+ sort_order: Optional[str] = None,
1380
+ deserialize: Optional[bool] = True,
1381
+ ) -> Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
1382
+ """Get all cultural knowledge with filtering and pagination.
1383
+
1384
+ Args:
1385
+ agent_id (Optional[str]): Filter by agent ID.
1386
+ team_id (Optional[str]): Filter by team ID.
1387
+ name (Optional[str]): Filter by name (case-insensitive partial match).
1388
+ limit (Optional[int]): Maximum number of results to return.
1389
+ page (Optional[int]): Page number for pagination.
1390
+ sort_by (Optional[str]): Field to sort by.
1391
+ sort_order (Optional[str]): Sort order ('asc' or 'desc').
1392
+ deserialize (Optional[bool]): Whether to deserialize to CulturalKnowledge objects. Defaults to True.
1393
+
1394
+ Returns:
1395
+ Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
1396
+ - When deserialize=True: List of CulturalKnowledge objects
1397
+ - When deserialize=False: Tuple with list of dictionaries and total count
1398
+
1399
+ Raises:
1400
+ Exception: If an error occurs during retrieval.
1401
+ """
1402
+ try:
1403
+ collection = await self._get_collection(table_type="culture")
1404
+ if collection is None:
1405
+ if not deserialize:
1406
+ return [], 0
1407
+ return []
1408
+
1409
+ # Build query
1410
+ query: Dict[str, Any] = {}
1411
+ if agent_id is not None:
1412
+ query["agent_id"] = agent_id
1413
+ if team_id is not None:
1414
+ query["team_id"] = team_id
1415
+ if name is not None:
1416
+ query["name"] = {"$regex": name, "$options": "i"}
1417
+
1418
+ # Get total count for pagination
1419
+ total_count = await collection.count_documents(query)
1420
+
1421
+ # Apply sorting
1422
+ sort_criteria = apply_sorting({}, sort_by, sort_order)
1423
+
1424
+ # Apply pagination
1425
+ query_args = apply_pagination({}, limit, page)
1426
+
1427
+ cursor = collection.find(query)
1428
+ if sort_criteria:
1429
+ cursor = cursor.sort(sort_criteria)
1430
+ if query_args.get("skip"):
1431
+ cursor = cursor.skip(query_args["skip"])
1432
+ if query_args.get("limit"):
1433
+ cursor = cursor.limit(query_args["limit"])
1434
+
1435
+ # Remove MongoDB's _id field from all results
1436
+ results_filtered = [{k: v for k, v in item.items() if k != "_id"} async for item in cursor]
1437
+
1438
+ if not deserialize:
1439
+ return results_filtered, total_count
1440
+
1441
+ return [deserialize_cultural_knowledge_from_db(item) for item in results_filtered]
1442
+
1443
+ except Exception as e:
1444
+ log_error(f"Error getting all cultural knowledge: {e}")
1445
+ raise e
1446
+
1447
+ async def upsert_cultural_knowledge(
1448
+ self, cultural_knowledge: CulturalKnowledge, deserialize: Optional[bool] = True
1449
+ ) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
1450
+ """Upsert cultural knowledge in MongoDB.
1451
+
1452
+ Args:
1453
+ cultural_knowledge (CulturalKnowledge): The cultural knowledge to upsert.
1454
+ deserialize (Optional[bool]): Whether to deserialize the result. Defaults to True.
1455
+
1456
+ Returns:
1457
+ Optional[Union[CulturalKnowledge, Dict[str, Any]]]: The upserted cultural knowledge.
1458
+
1459
+ Raises:
1460
+ Exception: If an error occurs during upsert.
1461
+ """
1462
+ try:
1463
+ collection = await self._get_collection(table_type="culture", create_collection_if_not_found=True)
1464
+ if collection is None:
1465
+ return None
1466
+
1467
+ # Serialize content, categories, and notes into a dict for DB storage
1468
+ content_dict = serialize_cultural_knowledge_for_db(cultural_knowledge)
1469
+
1470
+ # Create the document with serialized content
1471
+ update_doc = {
1472
+ "id": cultural_knowledge.id,
1473
+ "name": cultural_knowledge.name,
1474
+ "summary": cultural_knowledge.summary,
1475
+ "content": content_dict if content_dict else None,
1476
+ "metadata": cultural_knowledge.metadata,
1477
+ "input": cultural_knowledge.input,
1478
+ "created_at": cultural_knowledge.created_at,
1479
+ "updated_at": int(time.time()),
1480
+ "agent_id": cultural_knowledge.agent_id,
1481
+ "team_id": cultural_knowledge.team_id,
1482
+ }
1483
+
1484
+ result = await collection.replace_one({"id": cultural_knowledge.id}, update_doc, upsert=True)
1485
+
1486
+ if result.upserted_id:
1487
+ update_doc["_id"] = result.upserted_id
1488
+
1489
+ # Remove MongoDB's _id field
1490
+ doc_filtered = {k: v for k, v in update_doc.items() if k != "_id"}
1491
+
1492
+ if not deserialize:
1493
+ return doc_filtered
1494
+
1495
+ return deserialize_cultural_knowledge_from_db(doc_filtered)
1496
+
1497
+ except Exception as e:
1498
+ log_error(f"Error upserting cultural knowledge: {e}")
1499
+ raise e
1500
+
1501
+ # -- Metrics methods --
1502
+
1503
+ async def _get_all_sessions_for_metrics_calculation(
1504
+ self, start_timestamp: Optional[int] = None, end_timestamp: Optional[int] = None
1505
+ ) -> List[Dict[str, Any]]:
1506
+ """Get all sessions of all types for metrics calculation."""
1507
+ try:
1508
+ collection = await self._get_collection(table_type="sessions")
1509
+ if collection is None:
1510
+ return []
1511
+
1512
+ query = {}
1513
+ if start_timestamp is not None:
1514
+ query["created_at"] = {"$gte": start_timestamp}
1515
+ if end_timestamp is not None:
1516
+ if "created_at" in query:
1517
+ query["created_at"]["$lte"] = end_timestamp
1518
+ else:
1519
+ query["created_at"] = {"$lte": end_timestamp}
1520
+
1521
+ projection = {
1522
+ "user_id": 1,
1523
+ "session_data": 1,
1524
+ "runs": 1,
1525
+ "created_at": 1,
1526
+ "session_type": 1,
1527
+ }
1528
+
1529
+ results = await collection.find(query, projection).to_list(length=None)
1530
+ return results
1531
+
1532
+ except Exception as e:
1533
+ log_error(f"Exception reading from sessions collection: {e}")
1534
+ return []
1535
+
1536
+ async def _get_metrics_calculation_starting_date(self, collection: AsyncIOMotorCollection) -> Optional[date]:
1537
+ """Get the first date for which metrics calculation is needed."""
1538
+ try:
1539
+ result = await collection.find_one({}, sort=[("date", -1)], limit=1)
1540
+
1541
+ if result is not None:
1542
+ result_date = datetime.strptime(result["date"], "%Y-%m-%d").date()
1543
+ if result.get("completed"):
1544
+ return result_date + timedelta(days=1)
1545
+ else:
1546
+ return result_date
1547
+
1548
+ # No metrics records. Return the date of the first recorded session.
1549
+ first_session_result = await self.get_sessions(
1550
+ sort_by="created_at", sort_order="asc", limit=1, deserialize=False
1551
+ )
1552
+ first_session_date = first_session_result[0][0]["created_at"] if first_session_result[0] else None # type: ignore
1553
+
1554
+ if first_session_date is None:
1555
+ return None
1556
+
1557
+ return datetime.fromtimestamp(first_session_date, tz=timezone.utc).date()
1558
+
1559
+ except Exception as e:
1560
+ log_error(f"Exception getting metrics calculation starting date: {e}")
1561
+ return None
1562
+
1563
+ async def calculate_metrics(self) -> Optional[list[dict]]:
1564
+ """Calculate metrics for all dates without complete metrics."""
1565
+ try:
1566
+ collection = await self._get_collection(table_type="metrics", create_collection_if_not_found=True)
1567
+ if collection is None:
1568
+ return None
1569
+
1570
+ starting_date = await self._get_metrics_calculation_starting_date(collection)
1571
+ if starting_date is None:
1572
+ log_info("No session data found. Won't calculate metrics.")
1573
+ return None
1574
+
1575
+ dates_to_process = get_dates_to_calculate_metrics_for(starting_date)
1576
+ if not dates_to_process:
1577
+ log_info("Metrics already calculated for all relevant dates.")
1578
+ return None
1579
+
1580
+ start_timestamp = int(
1581
+ datetime.combine(dates_to_process[0], datetime.min.time()).replace(tzinfo=timezone.utc).timestamp()
1582
+ )
1583
+ end_timestamp = int(
1584
+ datetime.combine(dates_to_process[-1] + timedelta(days=1), datetime.min.time())
1585
+ .replace(tzinfo=timezone.utc)
1586
+ .timestamp()
1587
+ )
1588
+
1589
+ sessions = await self._get_all_sessions_for_metrics_calculation(
1590
+ start_timestamp=start_timestamp, end_timestamp=end_timestamp
1591
+ )
1592
+ all_sessions_data = fetch_all_sessions_data(
1593
+ sessions=sessions, dates_to_process=dates_to_process, start_timestamp=start_timestamp
1594
+ )
1595
+ if not all_sessions_data:
1596
+ log_info("No new session data found. Won't calculate metrics.")
1597
+ return None
1598
+
1599
+ results = []
1600
+ metrics_records = []
1601
+
1602
+ for date_to_process in dates_to_process:
1603
+ date_key = date_to_process.isoformat()
1604
+ sessions_for_date = all_sessions_data.get(date_key, {})
1605
+
1606
+ # Skip dates with no sessions
1607
+ if not any(len(sessions) > 0 for sessions in sessions_for_date.values()):
1608
+ continue
1609
+
1610
+ metrics_record = calculate_date_metrics(date_to_process, sessions_for_date)
1611
+ metrics_records.append(metrics_record)
1612
+
1613
+ if metrics_records:
1614
+ results = bulk_upsert_metrics(collection, metrics_records) # type: ignore
1615
+
1616
+ return results
1617
+
1618
+ except Exception as e:
1619
+ log_error(f"Error calculating metrics: {e}")
1620
+ raise e
1621
+
1622
+ async def get_metrics(
1623
+ self,
1624
+ starting_date: Optional[date] = None,
1625
+ ending_date: Optional[date] = None,
1626
+ ) -> Tuple[List[dict], Optional[int]]:
1627
+ """Get all metrics matching the given date range."""
1628
+ try:
1629
+ collection = await self._get_collection(table_type="metrics")
1630
+ if collection is None:
1631
+ return [], None
1632
+
1633
+ query = {}
1634
+ if starting_date:
1635
+ query["date"] = {"$gte": starting_date.isoformat()}
1636
+ if ending_date:
1637
+ if "date" in query:
1638
+ query["date"]["$lte"] = ending_date.isoformat()
1639
+ else:
1640
+ query["date"] = {"$lte": ending_date.isoformat()}
1641
+
1642
+ records = await collection.find(query).to_list(length=None)
1643
+ if not records:
1644
+ return [], None
1645
+
1646
+ # Get the latest updated_at
1647
+ latest_updated_at = max(record.get("updated_at", 0) for record in records)
1648
+
1649
+ return records, latest_updated_at
1650
+
1651
+ except Exception as e:
1652
+ log_error(f"Error getting metrics: {e}")
1653
+ raise e
1654
+
1655
+ # -- Knowledge methods --
1656
+
1657
+ async def delete_knowledge_content(self, id: str):
1658
+ """Delete a knowledge row from the database.
1659
+
1660
+ Args:
1661
+ id (str): The ID of the knowledge row to delete.
1662
+
1663
+ Raises:
1664
+ Exception: If an error occurs during deletion.
1665
+ """
1666
+ try:
1667
+ collection = await self._get_collection(table_type="knowledge")
1668
+ if collection is None:
1669
+ return
1670
+
1671
+ await collection.delete_one({"id": id})
1672
+
1673
+ log_debug(f"Deleted knowledge content with id '{id}'")
1674
+
1675
+ except Exception as e:
1676
+ log_error(f"Error deleting knowledge content: {e}")
1677
+ raise e
1678
+
1679
+ async def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
1680
+ """Get a knowledge row from the database.
1681
+
1682
+ Args:
1683
+ id (str): The ID of the knowledge row to get.
1684
+
1685
+ Returns:
1686
+ Optional[KnowledgeRow]: The knowledge row, or None if it doesn't exist.
1687
+
1688
+ Raises:
1689
+ Exception: If an error occurs during retrieval.
1690
+ """
1691
+ try:
1692
+ collection = await self._get_collection(table_type="knowledge")
1693
+ if collection is None:
1694
+ return None
1695
+
1696
+ result = await collection.find_one({"id": id})
1697
+ if result is None:
1698
+ return None
1699
+
1700
+ return KnowledgeRow.model_validate(result)
1701
+
1702
+ except Exception as e:
1703
+ log_error(f"Error getting knowledge content: {e}")
1704
+ raise e
1705
+
1706
+ async def get_knowledge_contents(
1707
+ self,
1708
+ limit: Optional[int] = None,
1709
+ page: Optional[int] = None,
1710
+ sort_by: Optional[str] = None,
1711
+ sort_order: Optional[str] = None,
1712
+ ) -> Tuple[List[KnowledgeRow], int]:
1713
+ """Get all knowledge contents from the database.
1714
+
1715
+ Args:
1716
+ limit (Optional[int]): The maximum number of knowledge contents to return.
1717
+ page (Optional[int]): The page number.
1718
+ sort_by (Optional[str]): The column to sort by.
1719
+ sort_order (Optional[str]): The order to sort by.
1720
+
1721
+ Returns:
1722
+ Tuple[List[KnowledgeRow], int]: The knowledge contents and total count.
1723
+
1724
+ Raises:
1725
+ Exception: If an error occurs during retrieval.
1726
+ """
1727
+ try:
1728
+ collection = await self._get_collection(table_type="knowledge")
1729
+ if collection is None:
1730
+ return [], 0
1731
+
1732
+ query: Dict[str, Any] = {}
1733
+
1734
+ # Get total count
1735
+ total_count = await collection.count_documents(query)
1736
+
1737
+ # Apply sorting
1738
+ sort_criteria = apply_sorting({}, sort_by, sort_order)
1739
+
1740
+ # Apply pagination
1741
+ query_args = apply_pagination({}, limit, page)
1742
+
1743
+ cursor = collection.find(query)
1744
+ if sort_criteria:
1745
+ cursor = cursor.sort(sort_criteria)
1746
+ if query_args.get("skip"):
1747
+ cursor = cursor.skip(query_args["skip"])
1748
+ if query_args.get("limit"):
1749
+ cursor = cursor.limit(query_args["limit"])
1750
+
1751
+ records = await cursor.to_list(length=None)
1752
+ knowledge_rows = [KnowledgeRow.model_validate(record) for record in records]
1753
+
1754
+ return knowledge_rows, total_count
1755
+
1756
+ except Exception as e:
1757
+ log_error(f"Error getting knowledge contents: {e}")
1758
+ raise e
1759
+
1760
+ async def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
1761
+ """Upsert knowledge content in the database.
1762
+
1763
+ Args:
1764
+ knowledge_row (KnowledgeRow): The knowledge row to upsert.
1765
+
1766
+ Returns:
1767
+ Optional[KnowledgeRow]: The upserted knowledge row, or None if the operation fails.
1768
+
1769
+ Raises:
1770
+ Exception: If an error occurs during upsert.
1771
+ """
1772
+ try:
1773
+ collection = await self._get_collection(table_type="knowledge", create_collection_if_not_found=True)
1774
+ if collection is None:
1775
+ return None
1776
+
1777
+ update_doc = knowledge_row.model_dump()
1778
+ await collection.replace_one({"id": knowledge_row.id}, update_doc, upsert=True)
1779
+
1780
+ return knowledge_row
1781
+
1782
+ except Exception as e:
1783
+ log_error(f"Error upserting knowledge content: {e}")
1784
+ raise e
1785
+
1786
+ # -- Eval methods --
1787
+
1788
+ async def create_eval_run(self, eval_run: EvalRunRecord) -> Optional[EvalRunRecord]:
1789
+ """Create an EvalRunRecord in the database."""
1790
+ try:
1791
+ collection = await self._get_collection(table_type="evals", create_collection_if_not_found=True)
1792
+ if collection is None:
1793
+ return None
1794
+
1795
+ current_time = int(time.time())
1796
+ eval_dict = eval_run.model_dump()
1797
+ eval_dict["created_at"] = current_time
1798
+ eval_dict["updated_at"] = current_time
1799
+
1800
+ await collection.insert_one(eval_dict)
1801
+
1802
+ log_debug(f"Created eval run with id '{eval_run.run_id}'")
1803
+
1804
+ return eval_run
1805
+
1806
+ except Exception as e:
1807
+ log_error(f"Error creating eval run: {e}")
1808
+ raise e
1809
+
1810
+ async def delete_eval_run(self, eval_run_id: str) -> None:
1811
+ """Delete an eval run from the database."""
1812
+ try:
1813
+ collection = await self._get_collection(table_type="evals")
1814
+ if collection is None:
1815
+ return
1816
+
1817
+ result = await collection.delete_one({"run_id": eval_run_id})
1818
+
1819
+ if result.deleted_count == 0:
1820
+ log_debug(f"No eval run found with ID: {eval_run_id}")
1821
+ else:
1822
+ log_debug(f"Deleted eval run with ID: {eval_run_id}")
1823
+
1824
+ except Exception as e:
1825
+ log_error(f"Error deleting eval run {eval_run_id}: {e}")
1826
+ raise e
1827
+
1828
+ async def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
1829
+ """Delete multiple eval runs from the database."""
1830
+ try:
1831
+ collection = await self._get_collection(table_type="evals")
1832
+ if collection is None:
1833
+ return
1834
+
1835
+ result = await collection.delete_many({"run_id": {"$in": eval_run_ids}})
1836
+
1837
+ if result.deleted_count == 0:
1838
+ log_debug(f"No eval runs found with IDs: {eval_run_ids}")
1839
+ else:
1840
+ log_debug(f"Deleted {result.deleted_count} eval runs")
1841
+
1842
+ except Exception as e:
1843
+ log_error(f"Error deleting eval runs {eval_run_ids}: {e}")
1844
+ raise e
1845
+
1846
+ async def get_eval_run_raw(self, eval_run_id: str) -> Optional[Dict[str, Any]]:
1847
+ """Get an eval run from the database as a raw dictionary."""
1848
+ try:
1849
+ collection = await self._get_collection(table_type="evals")
1850
+ if collection is None:
1851
+ return None
1852
+
1853
+ result = await collection.find_one({"run_id": eval_run_id})
1854
+ return result
1855
+
1856
+ except Exception as e:
1857
+ log_error(f"Exception getting eval run {eval_run_id}: {e}")
1858
+ raise e
1859
+
1860
+ async def get_eval_run(
1861
+ self, eval_run_id: str, deserialize: Optional[bool] = True
1862
+ ) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
1863
+ """Get an eval run from the database.
1864
+
1865
+ Args:
1866
+ eval_run_id (str): The ID of the eval run to get.
1867
+ deserialize (Optional[bool]): Whether to serialize the eval run. Defaults to True.
1868
+
1869
+ Returns:
1870
+ Optional[Union[EvalRunRecord, Dict[str, Any]]]:
1871
+ - When deserialize=True: EvalRunRecord object
1872
+ - When deserialize=False: EvalRun dictionary
1873
+
1874
+ Raises:
1875
+ Exception: If there is an error getting the eval run.
1876
+ """
1877
+ try:
1878
+ collection = await self._get_collection(table_type="evals")
1879
+ if collection is None:
1880
+ return None
1881
+
1882
+ eval_run_raw = await collection.find_one({"run_id": eval_run_id})
1883
+
1884
+ if not eval_run_raw:
1885
+ return None
1886
+
1887
+ if not deserialize:
1888
+ return eval_run_raw
1889
+
1890
+ return EvalRunRecord.model_validate(eval_run_raw)
1891
+
1892
+ except Exception as e:
1893
+ log_error(f"Exception getting eval run {eval_run_id}: {e}")
1894
+ raise e
1895
+
1896
+ async def get_eval_runs(
1897
+ self,
1898
+ limit: Optional[int] = None,
1899
+ page: Optional[int] = None,
1900
+ sort_by: Optional[str] = None,
1901
+ sort_order: Optional[str] = None,
1902
+ agent_id: Optional[str] = None,
1903
+ team_id: Optional[str] = None,
1904
+ workflow_id: Optional[str] = None,
1905
+ model_id: Optional[str] = None,
1906
+ filter_type: Optional[EvalFilterType] = None,
1907
+ eval_type: Optional[List[EvalType]] = None,
1908
+ deserialize: Optional[bool] = True,
1909
+ ) -> Union[List[EvalRunRecord], Tuple[List[Dict[str, Any]], int]]:
1910
+ """Get all eval runs from the database.
1911
+
1912
+ Args:
1913
+ limit (Optional[int]): The maximum number of eval runs to return.
1914
+ page (Optional[int]): The page number to return.
1915
+ sort_by (Optional[str]): The field to sort by.
1916
+ sort_order (Optional[str]): The order to sort by.
1917
+ agent_id (Optional[str]): The ID of the agent to filter by.
1918
+ team_id (Optional[str]): The ID of the team to filter by.
1919
+ workflow_id (Optional[str]): The ID of the workflow to filter by.
1920
+ model_id (Optional[str]): The ID of the model to filter by.
1921
+ eval_type (Optional[List[EvalType]]): The type of eval to filter by.
1922
+ filter_type (Optional[EvalFilterType]): The type of filter to apply.
1923
+ deserialize (Optional[bool]): Whether to serialize the eval runs. Defaults to True.
1924
+
1925
+ Returns:
1926
+ Union[List[EvalRunRecord], Tuple[List[Dict[str, Any]], int]]:
1927
+ - When deserialize=True: List of EvalRunRecord objects
1928
+ - When deserialize=False: List of eval run dictionaries and the total count
1929
+
1930
+ Raises:
1931
+ Exception: If there is an error getting the eval runs.
1932
+ """
1933
+ try:
1934
+ collection = await self._get_collection(table_type="evals")
1935
+ if collection is None:
1936
+ return [] if deserialize else ([], 0)
1937
+
1938
+ query: Dict[str, Any] = {}
1939
+ if agent_id is not None:
1940
+ query["agent_id"] = agent_id
1941
+ if team_id is not None:
1942
+ query["team_id"] = team_id
1943
+ if workflow_id is not None:
1944
+ query["workflow_id"] = workflow_id
1945
+ if model_id is not None:
1946
+ query["model_id"] = model_id
1947
+ if eval_type is not None and len(eval_type) > 0:
1948
+ query["eval_type"] = {"$in": eval_type}
1949
+ if filter_type is not None:
1950
+ if filter_type == EvalFilterType.AGENT:
1951
+ query["agent_id"] = {"$ne": None}
1952
+ elif filter_type == EvalFilterType.TEAM:
1953
+ query["team_id"] = {"$ne": None}
1954
+ elif filter_type == EvalFilterType.WORKFLOW:
1955
+ query["workflow_id"] = {"$ne": None}
1956
+
1957
+ # Get total count
1958
+ total_count = await collection.count_documents(query)
1959
+
1960
+ # Apply default sorting by created_at desc if no sort parameters provided
1961
+ if sort_by is None:
1962
+ sort_criteria = [("created_at", -1)]
1963
+ else:
1964
+ sort_criteria = apply_sorting({}, sort_by, sort_order)
1965
+
1966
+ # Apply pagination
1967
+ query_args = apply_pagination({}, limit, page)
1968
+
1969
+ cursor = collection.find(query)
1970
+ if sort_criteria:
1971
+ cursor = cursor.sort(sort_criteria)
1972
+ if query_args.get("skip"):
1973
+ cursor = cursor.skip(query_args["skip"])
1974
+ if query_args.get("limit"):
1975
+ cursor = cursor.limit(query_args["limit"])
1976
+
1977
+ records = await cursor.to_list(length=None)
1978
+ if not records:
1979
+ return [] if deserialize else ([], 0)
1980
+
1981
+ if not deserialize:
1982
+ return records, total_count
1983
+
1984
+ return [EvalRunRecord.model_validate(row) for row in records]
1985
+
1986
+ except Exception as e:
1987
+ log_error(f"Exception getting eval runs: {e}")
1988
+ raise e
1989
+
1990
+ async def rename_eval_run(
1991
+ self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
1992
+ ) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
1993
+ """Update the name of an eval run in the database.
1994
+
1995
+ Args:
1996
+ eval_run_id (str): The ID of the eval run to update.
1997
+ name (str): The new name of the eval run.
1998
+ deserialize (Optional[bool]): Whether to serialize the eval run. Defaults to True.
1999
+
2000
+ Returns:
2001
+ Optional[Union[EvalRunRecord, Dict[str, Any]]]:
2002
+ - When deserialize=True: EvalRunRecord object
2003
+ - When deserialize=False: EvalRun dictionary
2004
+
2005
+ Raises:
2006
+ Exception: If there is an error updating the eval run.
2007
+ """
2008
+ try:
2009
+ collection = await self._get_collection(table_type="evals")
2010
+ if collection is None:
2011
+ return None
2012
+
2013
+ result = await collection.find_one_and_update(
2014
+ {"run_id": eval_run_id}, {"$set": {"name": name, "updated_at": int(time.time())}}
2015
+ )
2016
+
2017
+ log_debug(f"Renamed eval run with id '{eval_run_id}' to '{name}'")
2018
+
2019
+ if not result or not deserialize:
2020
+ return result
2021
+
2022
+ return EvalRunRecord.model_validate(result)
2023
+
2024
+ except Exception as e:
2025
+ log_error(f"Error updating eval run name {eval_run_id}: {e}")
2026
+ raise e