agno 2.2.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (575) hide show
  1. agno/__init__.py +8 -0
  2. agno/agent/__init__.py +51 -0
  3. agno/agent/agent.py +10405 -0
  4. agno/api/__init__.py +0 -0
  5. agno/api/agent.py +28 -0
  6. agno/api/api.py +40 -0
  7. agno/api/evals.py +22 -0
  8. agno/api/os.py +17 -0
  9. agno/api/routes.py +13 -0
  10. agno/api/schemas/__init__.py +9 -0
  11. agno/api/schemas/agent.py +16 -0
  12. agno/api/schemas/evals.py +16 -0
  13. agno/api/schemas/os.py +14 -0
  14. agno/api/schemas/response.py +6 -0
  15. agno/api/schemas/team.py +16 -0
  16. agno/api/schemas/utils.py +21 -0
  17. agno/api/schemas/workflows.py +16 -0
  18. agno/api/settings.py +53 -0
  19. agno/api/team.py +30 -0
  20. agno/api/workflow.py +28 -0
  21. agno/cloud/aws/base.py +214 -0
  22. agno/cloud/aws/s3/__init__.py +2 -0
  23. agno/cloud/aws/s3/api_client.py +43 -0
  24. agno/cloud/aws/s3/bucket.py +195 -0
  25. agno/cloud/aws/s3/object.py +57 -0
  26. agno/culture/__init__.py +3 -0
  27. agno/culture/manager.py +956 -0
  28. agno/db/__init__.py +24 -0
  29. agno/db/async_postgres/__init__.py +3 -0
  30. agno/db/base.py +598 -0
  31. agno/db/dynamo/__init__.py +3 -0
  32. agno/db/dynamo/dynamo.py +2042 -0
  33. agno/db/dynamo/schemas.py +314 -0
  34. agno/db/dynamo/utils.py +743 -0
  35. agno/db/firestore/__init__.py +3 -0
  36. agno/db/firestore/firestore.py +1795 -0
  37. agno/db/firestore/schemas.py +140 -0
  38. agno/db/firestore/utils.py +376 -0
  39. agno/db/gcs_json/__init__.py +3 -0
  40. agno/db/gcs_json/gcs_json_db.py +1335 -0
  41. agno/db/gcs_json/utils.py +228 -0
  42. agno/db/in_memory/__init__.py +3 -0
  43. agno/db/in_memory/in_memory_db.py +1160 -0
  44. agno/db/in_memory/utils.py +230 -0
  45. agno/db/json/__init__.py +3 -0
  46. agno/db/json/json_db.py +1328 -0
  47. agno/db/json/utils.py +230 -0
  48. agno/db/migrations/__init__.py +0 -0
  49. agno/db/migrations/v1_to_v2.py +635 -0
  50. agno/db/mongo/__init__.py +17 -0
  51. agno/db/mongo/async_mongo.py +2026 -0
  52. agno/db/mongo/mongo.py +1982 -0
  53. agno/db/mongo/schemas.py +87 -0
  54. agno/db/mongo/utils.py +259 -0
  55. agno/db/mysql/__init__.py +3 -0
  56. agno/db/mysql/mysql.py +2308 -0
  57. agno/db/mysql/schemas.py +138 -0
  58. agno/db/mysql/utils.py +355 -0
  59. agno/db/postgres/__init__.py +4 -0
  60. agno/db/postgres/async_postgres.py +1927 -0
  61. agno/db/postgres/postgres.py +2260 -0
  62. agno/db/postgres/schemas.py +139 -0
  63. agno/db/postgres/utils.py +442 -0
  64. agno/db/redis/__init__.py +3 -0
  65. agno/db/redis/redis.py +1660 -0
  66. agno/db/redis/schemas.py +123 -0
  67. agno/db/redis/utils.py +346 -0
  68. agno/db/schemas/__init__.py +4 -0
  69. agno/db/schemas/culture.py +120 -0
  70. agno/db/schemas/evals.py +33 -0
  71. agno/db/schemas/knowledge.py +40 -0
  72. agno/db/schemas/memory.py +46 -0
  73. agno/db/schemas/metrics.py +0 -0
  74. agno/db/singlestore/__init__.py +3 -0
  75. agno/db/singlestore/schemas.py +130 -0
  76. agno/db/singlestore/singlestore.py +2272 -0
  77. agno/db/singlestore/utils.py +384 -0
  78. agno/db/sqlite/__init__.py +4 -0
  79. agno/db/sqlite/async_sqlite.py +2293 -0
  80. agno/db/sqlite/schemas.py +133 -0
  81. agno/db/sqlite/sqlite.py +2288 -0
  82. agno/db/sqlite/utils.py +431 -0
  83. agno/db/surrealdb/__init__.py +3 -0
  84. agno/db/surrealdb/metrics.py +292 -0
  85. agno/db/surrealdb/models.py +309 -0
  86. agno/db/surrealdb/queries.py +71 -0
  87. agno/db/surrealdb/surrealdb.py +1353 -0
  88. agno/db/surrealdb/utils.py +147 -0
  89. agno/db/utils.py +116 -0
  90. agno/debug.py +18 -0
  91. agno/eval/__init__.py +14 -0
  92. agno/eval/accuracy.py +834 -0
  93. agno/eval/performance.py +773 -0
  94. agno/eval/reliability.py +306 -0
  95. agno/eval/utils.py +119 -0
  96. agno/exceptions.py +161 -0
  97. agno/filters.py +354 -0
  98. agno/guardrails/__init__.py +6 -0
  99. agno/guardrails/base.py +19 -0
  100. agno/guardrails/openai.py +144 -0
  101. agno/guardrails/pii.py +94 -0
  102. agno/guardrails/prompt_injection.py +52 -0
  103. agno/integrations/__init__.py +0 -0
  104. agno/integrations/discord/__init__.py +3 -0
  105. agno/integrations/discord/client.py +203 -0
  106. agno/knowledge/__init__.py +5 -0
  107. agno/knowledge/chunking/__init__.py +0 -0
  108. agno/knowledge/chunking/agentic.py +79 -0
  109. agno/knowledge/chunking/document.py +91 -0
  110. agno/knowledge/chunking/fixed.py +57 -0
  111. agno/knowledge/chunking/markdown.py +151 -0
  112. agno/knowledge/chunking/recursive.py +63 -0
  113. agno/knowledge/chunking/row.py +39 -0
  114. agno/knowledge/chunking/semantic.py +86 -0
  115. agno/knowledge/chunking/strategy.py +165 -0
  116. agno/knowledge/content.py +74 -0
  117. agno/knowledge/document/__init__.py +5 -0
  118. agno/knowledge/document/base.py +58 -0
  119. agno/knowledge/embedder/__init__.py +5 -0
  120. agno/knowledge/embedder/aws_bedrock.py +343 -0
  121. agno/knowledge/embedder/azure_openai.py +210 -0
  122. agno/knowledge/embedder/base.py +23 -0
  123. agno/knowledge/embedder/cohere.py +323 -0
  124. agno/knowledge/embedder/fastembed.py +62 -0
  125. agno/knowledge/embedder/fireworks.py +13 -0
  126. agno/knowledge/embedder/google.py +258 -0
  127. agno/knowledge/embedder/huggingface.py +94 -0
  128. agno/knowledge/embedder/jina.py +182 -0
  129. agno/knowledge/embedder/langdb.py +22 -0
  130. agno/knowledge/embedder/mistral.py +206 -0
  131. agno/knowledge/embedder/nebius.py +13 -0
  132. agno/knowledge/embedder/ollama.py +154 -0
  133. agno/knowledge/embedder/openai.py +195 -0
  134. agno/knowledge/embedder/sentence_transformer.py +63 -0
  135. agno/knowledge/embedder/together.py +13 -0
  136. agno/knowledge/embedder/vllm.py +262 -0
  137. agno/knowledge/embedder/voyageai.py +165 -0
  138. agno/knowledge/knowledge.py +1988 -0
  139. agno/knowledge/reader/__init__.py +7 -0
  140. agno/knowledge/reader/arxiv_reader.py +81 -0
  141. agno/knowledge/reader/base.py +95 -0
  142. agno/knowledge/reader/csv_reader.py +166 -0
  143. agno/knowledge/reader/docx_reader.py +82 -0
  144. agno/knowledge/reader/field_labeled_csv_reader.py +292 -0
  145. agno/knowledge/reader/firecrawl_reader.py +201 -0
  146. agno/knowledge/reader/json_reader.py +87 -0
  147. agno/knowledge/reader/markdown_reader.py +137 -0
  148. agno/knowledge/reader/pdf_reader.py +431 -0
  149. agno/knowledge/reader/pptx_reader.py +101 -0
  150. agno/knowledge/reader/reader_factory.py +313 -0
  151. agno/knowledge/reader/s3_reader.py +89 -0
  152. agno/knowledge/reader/tavily_reader.py +194 -0
  153. agno/knowledge/reader/text_reader.py +115 -0
  154. agno/knowledge/reader/web_search_reader.py +372 -0
  155. agno/knowledge/reader/website_reader.py +455 -0
  156. agno/knowledge/reader/wikipedia_reader.py +59 -0
  157. agno/knowledge/reader/youtube_reader.py +78 -0
  158. agno/knowledge/remote_content/__init__.py +0 -0
  159. agno/knowledge/remote_content/remote_content.py +88 -0
  160. agno/knowledge/reranker/__init__.py +3 -0
  161. agno/knowledge/reranker/base.py +14 -0
  162. agno/knowledge/reranker/cohere.py +64 -0
  163. agno/knowledge/reranker/infinity.py +195 -0
  164. agno/knowledge/reranker/sentence_transformer.py +54 -0
  165. agno/knowledge/types.py +39 -0
  166. agno/knowledge/utils.py +189 -0
  167. agno/media.py +462 -0
  168. agno/memory/__init__.py +3 -0
  169. agno/memory/manager.py +1327 -0
  170. agno/models/__init__.py +0 -0
  171. agno/models/aimlapi/__init__.py +5 -0
  172. agno/models/aimlapi/aimlapi.py +45 -0
  173. agno/models/anthropic/__init__.py +5 -0
  174. agno/models/anthropic/claude.py +757 -0
  175. agno/models/aws/__init__.py +15 -0
  176. agno/models/aws/bedrock.py +701 -0
  177. agno/models/aws/claude.py +378 -0
  178. agno/models/azure/__init__.py +18 -0
  179. agno/models/azure/ai_foundry.py +485 -0
  180. agno/models/azure/openai_chat.py +131 -0
  181. agno/models/base.py +2175 -0
  182. agno/models/cerebras/__init__.py +12 -0
  183. agno/models/cerebras/cerebras.py +501 -0
  184. agno/models/cerebras/cerebras_openai.py +112 -0
  185. agno/models/cohere/__init__.py +5 -0
  186. agno/models/cohere/chat.py +389 -0
  187. agno/models/cometapi/__init__.py +5 -0
  188. agno/models/cometapi/cometapi.py +57 -0
  189. agno/models/dashscope/__init__.py +5 -0
  190. agno/models/dashscope/dashscope.py +91 -0
  191. agno/models/deepinfra/__init__.py +5 -0
  192. agno/models/deepinfra/deepinfra.py +28 -0
  193. agno/models/deepseek/__init__.py +5 -0
  194. agno/models/deepseek/deepseek.py +61 -0
  195. agno/models/defaults.py +1 -0
  196. agno/models/fireworks/__init__.py +5 -0
  197. agno/models/fireworks/fireworks.py +26 -0
  198. agno/models/google/__init__.py +5 -0
  199. agno/models/google/gemini.py +1085 -0
  200. agno/models/groq/__init__.py +5 -0
  201. agno/models/groq/groq.py +556 -0
  202. agno/models/huggingface/__init__.py +5 -0
  203. agno/models/huggingface/huggingface.py +491 -0
  204. agno/models/ibm/__init__.py +5 -0
  205. agno/models/ibm/watsonx.py +422 -0
  206. agno/models/internlm/__init__.py +3 -0
  207. agno/models/internlm/internlm.py +26 -0
  208. agno/models/langdb/__init__.py +1 -0
  209. agno/models/langdb/langdb.py +48 -0
  210. agno/models/litellm/__init__.py +14 -0
  211. agno/models/litellm/chat.py +468 -0
  212. agno/models/litellm/litellm_openai.py +25 -0
  213. agno/models/llama_cpp/__init__.py +5 -0
  214. agno/models/llama_cpp/llama_cpp.py +22 -0
  215. agno/models/lmstudio/__init__.py +5 -0
  216. agno/models/lmstudio/lmstudio.py +25 -0
  217. agno/models/message.py +434 -0
  218. agno/models/meta/__init__.py +12 -0
  219. agno/models/meta/llama.py +475 -0
  220. agno/models/meta/llama_openai.py +78 -0
  221. agno/models/metrics.py +120 -0
  222. agno/models/mistral/__init__.py +5 -0
  223. agno/models/mistral/mistral.py +432 -0
  224. agno/models/nebius/__init__.py +3 -0
  225. agno/models/nebius/nebius.py +54 -0
  226. agno/models/nexus/__init__.py +3 -0
  227. agno/models/nexus/nexus.py +22 -0
  228. agno/models/nvidia/__init__.py +5 -0
  229. agno/models/nvidia/nvidia.py +28 -0
  230. agno/models/ollama/__init__.py +5 -0
  231. agno/models/ollama/chat.py +441 -0
  232. agno/models/openai/__init__.py +9 -0
  233. agno/models/openai/chat.py +883 -0
  234. agno/models/openai/like.py +27 -0
  235. agno/models/openai/responses.py +1050 -0
  236. agno/models/openrouter/__init__.py +5 -0
  237. agno/models/openrouter/openrouter.py +66 -0
  238. agno/models/perplexity/__init__.py +5 -0
  239. agno/models/perplexity/perplexity.py +187 -0
  240. agno/models/portkey/__init__.py +3 -0
  241. agno/models/portkey/portkey.py +81 -0
  242. agno/models/requesty/__init__.py +5 -0
  243. agno/models/requesty/requesty.py +52 -0
  244. agno/models/response.py +199 -0
  245. agno/models/sambanova/__init__.py +5 -0
  246. agno/models/sambanova/sambanova.py +28 -0
  247. agno/models/siliconflow/__init__.py +5 -0
  248. agno/models/siliconflow/siliconflow.py +25 -0
  249. agno/models/together/__init__.py +5 -0
  250. agno/models/together/together.py +25 -0
  251. agno/models/utils.py +266 -0
  252. agno/models/vercel/__init__.py +3 -0
  253. agno/models/vercel/v0.py +26 -0
  254. agno/models/vertexai/__init__.py +0 -0
  255. agno/models/vertexai/claude.py +70 -0
  256. agno/models/vllm/__init__.py +3 -0
  257. agno/models/vllm/vllm.py +78 -0
  258. agno/models/xai/__init__.py +3 -0
  259. agno/models/xai/xai.py +113 -0
  260. agno/os/__init__.py +3 -0
  261. agno/os/app.py +876 -0
  262. agno/os/auth.py +57 -0
  263. agno/os/config.py +104 -0
  264. agno/os/interfaces/__init__.py +1 -0
  265. agno/os/interfaces/a2a/__init__.py +3 -0
  266. agno/os/interfaces/a2a/a2a.py +42 -0
  267. agno/os/interfaces/a2a/router.py +250 -0
  268. agno/os/interfaces/a2a/utils.py +924 -0
  269. agno/os/interfaces/agui/__init__.py +3 -0
  270. agno/os/interfaces/agui/agui.py +47 -0
  271. agno/os/interfaces/agui/router.py +144 -0
  272. agno/os/interfaces/agui/utils.py +534 -0
  273. agno/os/interfaces/base.py +25 -0
  274. agno/os/interfaces/slack/__init__.py +3 -0
  275. agno/os/interfaces/slack/router.py +148 -0
  276. agno/os/interfaces/slack/security.py +30 -0
  277. agno/os/interfaces/slack/slack.py +47 -0
  278. agno/os/interfaces/whatsapp/__init__.py +3 -0
  279. agno/os/interfaces/whatsapp/router.py +211 -0
  280. agno/os/interfaces/whatsapp/security.py +53 -0
  281. agno/os/interfaces/whatsapp/whatsapp.py +36 -0
  282. agno/os/mcp.py +292 -0
  283. agno/os/middleware/__init__.py +7 -0
  284. agno/os/middleware/jwt.py +233 -0
  285. agno/os/router.py +1763 -0
  286. agno/os/routers/__init__.py +3 -0
  287. agno/os/routers/evals/__init__.py +3 -0
  288. agno/os/routers/evals/evals.py +430 -0
  289. agno/os/routers/evals/schemas.py +142 -0
  290. agno/os/routers/evals/utils.py +162 -0
  291. agno/os/routers/health.py +31 -0
  292. agno/os/routers/home.py +52 -0
  293. agno/os/routers/knowledge/__init__.py +3 -0
  294. agno/os/routers/knowledge/knowledge.py +997 -0
  295. agno/os/routers/knowledge/schemas.py +178 -0
  296. agno/os/routers/memory/__init__.py +3 -0
  297. agno/os/routers/memory/memory.py +515 -0
  298. agno/os/routers/memory/schemas.py +62 -0
  299. agno/os/routers/metrics/__init__.py +3 -0
  300. agno/os/routers/metrics/metrics.py +190 -0
  301. agno/os/routers/metrics/schemas.py +47 -0
  302. agno/os/routers/session/__init__.py +3 -0
  303. agno/os/routers/session/session.py +997 -0
  304. agno/os/schema.py +1055 -0
  305. agno/os/settings.py +43 -0
  306. agno/os/utils.py +630 -0
  307. agno/py.typed +0 -0
  308. agno/reasoning/__init__.py +0 -0
  309. agno/reasoning/anthropic.py +80 -0
  310. agno/reasoning/azure_ai_foundry.py +67 -0
  311. agno/reasoning/deepseek.py +63 -0
  312. agno/reasoning/default.py +97 -0
  313. agno/reasoning/gemini.py +73 -0
  314. agno/reasoning/groq.py +71 -0
  315. agno/reasoning/helpers.py +63 -0
  316. agno/reasoning/ollama.py +67 -0
  317. agno/reasoning/openai.py +86 -0
  318. agno/reasoning/step.py +31 -0
  319. agno/reasoning/vertexai.py +76 -0
  320. agno/run/__init__.py +6 -0
  321. agno/run/agent.py +787 -0
  322. agno/run/base.py +229 -0
  323. agno/run/cancel.py +81 -0
  324. agno/run/messages.py +32 -0
  325. agno/run/team.py +753 -0
  326. agno/run/workflow.py +708 -0
  327. agno/session/__init__.py +10 -0
  328. agno/session/agent.py +295 -0
  329. agno/session/summary.py +265 -0
  330. agno/session/team.py +392 -0
  331. agno/session/workflow.py +205 -0
  332. agno/team/__init__.py +37 -0
  333. agno/team/team.py +8793 -0
  334. agno/tools/__init__.py +10 -0
  335. agno/tools/agentql.py +120 -0
  336. agno/tools/airflow.py +69 -0
  337. agno/tools/api.py +122 -0
  338. agno/tools/apify.py +314 -0
  339. agno/tools/arxiv.py +127 -0
  340. agno/tools/aws_lambda.py +53 -0
  341. agno/tools/aws_ses.py +66 -0
  342. agno/tools/baidusearch.py +89 -0
  343. agno/tools/bitbucket.py +292 -0
  344. agno/tools/brandfetch.py +213 -0
  345. agno/tools/bravesearch.py +106 -0
  346. agno/tools/brightdata.py +367 -0
  347. agno/tools/browserbase.py +209 -0
  348. agno/tools/calcom.py +255 -0
  349. agno/tools/calculator.py +151 -0
  350. agno/tools/cartesia.py +187 -0
  351. agno/tools/clickup.py +244 -0
  352. agno/tools/confluence.py +240 -0
  353. agno/tools/crawl4ai.py +158 -0
  354. agno/tools/csv_toolkit.py +185 -0
  355. agno/tools/dalle.py +110 -0
  356. agno/tools/daytona.py +475 -0
  357. agno/tools/decorator.py +262 -0
  358. agno/tools/desi_vocal.py +108 -0
  359. agno/tools/discord.py +161 -0
  360. agno/tools/docker.py +716 -0
  361. agno/tools/duckdb.py +379 -0
  362. agno/tools/duckduckgo.py +91 -0
  363. agno/tools/e2b.py +703 -0
  364. agno/tools/eleven_labs.py +196 -0
  365. agno/tools/email.py +67 -0
  366. agno/tools/evm.py +129 -0
  367. agno/tools/exa.py +396 -0
  368. agno/tools/fal.py +127 -0
  369. agno/tools/file.py +240 -0
  370. agno/tools/file_generation.py +350 -0
  371. agno/tools/financial_datasets.py +288 -0
  372. agno/tools/firecrawl.py +143 -0
  373. agno/tools/function.py +1187 -0
  374. agno/tools/giphy.py +93 -0
  375. agno/tools/github.py +1760 -0
  376. agno/tools/gmail.py +922 -0
  377. agno/tools/google_bigquery.py +117 -0
  378. agno/tools/google_drive.py +270 -0
  379. agno/tools/google_maps.py +253 -0
  380. agno/tools/googlecalendar.py +674 -0
  381. agno/tools/googlesearch.py +98 -0
  382. agno/tools/googlesheets.py +377 -0
  383. agno/tools/hackernews.py +77 -0
  384. agno/tools/jina.py +101 -0
  385. agno/tools/jira.py +170 -0
  386. agno/tools/knowledge.py +218 -0
  387. agno/tools/linear.py +426 -0
  388. agno/tools/linkup.py +58 -0
  389. agno/tools/local_file_system.py +90 -0
  390. agno/tools/lumalab.py +183 -0
  391. agno/tools/mcp/__init__.py +10 -0
  392. agno/tools/mcp/mcp.py +331 -0
  393. agno/tools/mcp/multi_mcp.py +347 -0
  394. agno/tools/mcp/params.py +24 -0
  395. agno/tools/mcp_toolbox.py +284 -0
  396. agno/tools/mem0.py +193 -0
  397. agno/tools/memori.py +339 -0
  398. agno/tools/memory.py +419 -0
  399. agno/tools/mlx_transcribe.py +139 -0
  400. agno/tools/models/__init__.py +0 -0
  401. agno/tools/models/azure_openai.py +190 -0
  402. agno/tools/models/gemini.py +203 -0
  403. agno/tools/models/groq.py +158 -0
  404. agno/tools/models/morph.py +186 -0
  405. agno/tools/models/nebius.py +124 -0
  406. agno/tools/models_labs.py +195 -0
  407. agno/tools/moviepy_video.py +349 -0
  408. agno/tools/neo4j.py +134 -0
  409. agno/tools/newspaper.py +46 -0
  410. agno/tools/newspaper4k.py +93 -0
  411. agno/tools/notion.py +204 -0
  412. agno/tools/openai.py +202 -0
  413. agno/tools/openbb.py +160 -0
  414. agno/tools/opencv.py +321 -0
  415. agno/tools/openweather.py +233 -0
  416. agno/tools/oxylabs.py +385 -0
  417. agno/tools/pandas.py +102 -0
  418. agno/tools/parallel.py +314 -0
  419. agno/tools/postgres.py +257 -0
  420. agno/tools/pubmed.py +188 -0
  421. agno/tools/python.py +205 -0
  422. agno/tools/reasoning.py +283 -0
  423. agno/tools/reddit.py +467 -0
  424. agno/tools/replicate.py +117 -0
  425. agno/tools/resend.py +62 -0
  426. agno/tools/scrapegraph.py +222 -0
  427. agno/tools/searxng.py +152 -0
  428. agno/tools/serpapi.py +116 -0
  429. agno/tools/serper.py +255 -0
  430. agno/tools/shell.py +53 -0
  431. agno/tools/slack.py +136 -0
  432. agno/tools/sleep.py +20 -0
  433. agno/tools/spider.py +116 -0
  434. agno/tools/sql.py +154 -0
  435. agno/tools/streamlit/__init__.py +0 -0
  436. agno/tools/streamlit/components.py +113 -0
  437. agno/tools/tavily.py +254 -0
  438. agno/tools/telegram.py +48 -0
  439. agno/tools/todoist.py +218 -0
  440. agno/tools/tool_registry.py +1 -0
  441. agno/tools/toolkit.py +146 -0
  442. agno/tools/trafilatura.py +388 -0
  443. agno/tools/trello.py +274 -0
  444. agno/tools/twilio.py +186 -0
  445. agno/tools/user_control_flow.py +78 -0
  446. agno/tools/valyu.py +228 -0
  447. agno/tools/visualization.py +467 -0
  448. agno/tools/webbrowser.py +28 -0
  449. agno/tools/webex.py +76 -0
  450. agno/tools/website.py +54 -0
  451. agno/tools/webtools.py +45 -0
  452. agno/tools/whatsapp.py +286 -0
  453. agno/tools/wikipedia.py +63 -0
  454. agno/tools/workflow.py +278 -0
  455. agno/tools/x.py +335 -0
  456. agno/tools/yfinance.py +257 -0
  457. agno/tools/youtube.py +184 -0
  458. agno/tools/zendesk.py +82 -0
  459. agno/tools/zep.py +454 -0
  460. agno/tools/zoom.py +382 -0
  461. agno/utils/__init__.py +0 -0
  462. agno/utils/agent.py +820 -0
  463. agno/utils/audio.py +49 -0
  464. agno/utils/certs.py +27 -0
  465. agno/utils/code_execution.py +11 -0
  466. agno/utils/common.py +132 -0
  467. agno/utils/dttm.py +13 -0
  468. agno/utils/enum.py +22 -0
  469. agno/utils/env.py +11 -0
  470. agno/utils/events.py +696 -0
  471. agno/utils/format_str.py +16 -0
  472. agno/utils/functions.py +166 -0
  473. agno/utils/gemini.py +426 -0
  474. agno/utils/hooks.py +57 -0
  475. agno/utils/http.py +74 -0
  476. agno/utils/json_schema.py +234 -0
  477. agno/utils/knowledge.py +36 -0
  478. agno/utils/location.py +19 -0
  479. agno/utils/log.py +255 -0
  480. agno/utils/mcp.py +214 -0
  481. agno/utils/media.py +352 -0
  482. agno/utils/merge_dict.py +41 -0
  483. agno/utils/message.py +118 -0
  484. agno/utils/models/__init__.py +0 -0
  485. agno/utils/models/ai_foundry.py +43 -0
  486. agno/utils/models/claude.py +358 -0
  487. agno/utils/models/cohere.py +87 -0
  488. agno/utils/models/llama.py +78 -0
  489. agno/utils/models/mistral.py +98 -0
  490. agno/utils/models/openai_responses.py +140 -0
  491. agno/utils/models/schema_utils.py +153 -0
  492. agno/utils/models/watsonx.py +41 -0
  493. agno/utils/openai.py +257 -0
  494. agno/utils/pickle.py +32 -0
  495. agno/utils/pprint.py +178 -0
  496. agno/utils/print_response/__init__.py +0 -0
  497. agno/utils/print_response/agent.py +842 -0
  498. agno/utils/print_response/team.py +1724 -0
  499. agno/utils/print_response/workflow.py +1668 -0
  500. agno/utils/prompts.py +111 -0
  501. agno/utils/reasoning.py +108 -0
  502. agno/utils/response.py +163 -0
  503. agno/utils/response_iterator.py +17 -0
  504. agno/utils/safe_formatter.py +24 -0
  505. agno/utils/serialize.py +32 -0
  506. agno/utils/shell.py +22 -0
  507. agno/utils/streamlit.py +487 -0
  508. agno/utils/string.py +231 -0
  509. agno/utils/team.py +139 -0
  510. agno/utils/timer.py +41 -0
  511. agno/utils/tools.py +102 -0
  512. agno/utils/web.py +23 -0
  513. agno/utils/whatsapp.py +305 -0
  514. agno/utils/yaml_io.py +25 -0
  515. agno/vectordb/__init__.py +3 -0
  516. agno/vectordb/base.py +127 -0
  517. agno/vectordb/cassandra/__init__.py +5 -0
  518. agno/vectordb/cassandra/cassandra.py +501 -0
  519. agno/vectordb/cassandra/extra_param_mixin.py +11 -0
  520. agno/vectordb/cassandra/index.py +13 -0
  521. agno/vectordb/chroma/__init__.py +5 -0
  522. agno/vectordb/chroma/chromadb.py +929 -0
  523. agno/vectordb/clickhouse/__init__.py +9 -0
  524. agno/vectordb/clickhouse/clickhousedb.py +835 -0
  525. agno/vectordb/clickhouse/index.py +9 -0
  526. agno/vectordb/couchbase/__init__.py +3 -0
  527. agno/vectordb/couchbase/couchbase.py +1442 -0
  528. agno/vectordb/distance.py +7 -0
  529. agno/vectordb/lancedb/__init__.py +6 -0
  530. agno/vectordb/lancedb/lance_db.py +995 -0
  531. agno/vectordb/langchaindb/__init__.py +5 -0
  532. agno/vectordb/langchaindb/langchaindb.py +163 -0
  533. agno/vectordb/lightrag/__init__.py +5 -0
  534. agno/vectordb/lightrag/lightrag.py +388 -0
  535. agno/vectordb/llamaindex/__init__.py +3 -0
  536. agno/vectordb/llamaindex/llamaindexdb.py +166 -0
  537. agno/vectordb/milvus/__init__.py +4 -0
  538. agno/vectordb/milvus/milvus.py +1182 -0
  539. agno/vectordb/mongodb/__init__.py +9 -0
  540. agno/vectordb/mongodb/mongodb.py +1417 -0
  541. agno/vectordb/pgvector/__init__.py +12 -0
  542. agno/vectordb/pgvector/index.py +23 -0
  543. agno/vectordb/pgvector/pgvector.py +1462 -0
  544. agno/vectordb/pineconedb/__init__.py +5 -0
  545. agno/vectordb/pineconedb/pineconedb.py +747 -0
  546. agno/vectordb/qdrant/__init__.py +5 -0
  547. agno/vectordb/qdrant/qdrant.py +1134 -0
  548. agno/vectordb/redis/__init__.py +9 -0
  549. agno/vectordb/redis/redisdb.py +694 -0
  550. agno/vectordb/search.py +7 -0
  551. agno/vectordb/singlestore/__init__.py +10 -0
  552. agno/vectordb/singlestore/index.py +41 -0
  553. agno/vectordb/singlestore/singlestore.py +763 -0
  554. agno/vectordb/surrealdb/__init__.py +3 -0
  555. agno/vectordb/surrealdb/surrealdb.py +699 -0
  556. agno/vectordb/upstashdb/__init__.py +5 -0
  557. agno/vectordb/upstashdb/upstashdb.py +718 -0
  558. agno/vectordb/weaviate/__init__.py +8 -0
  559. agno/vectordb/weaviate/index.py +15 -0
  560. agno/vectordb/weaviate/weaviate.py +1005 -0
  561. agno/workflow/__init__.py +23 -0
  562. agno/workflow/agent.py +299 -0
  563. agno/workflow/condition.py +738 -0
  564. agno/workflow/loop.py +735 -0
  565. agno/workflow/parallel.py +824 -0
  566. agno/workflow/router.py +702 -0
  567. agno/workflow/step.py +1432 -0
  568. agno/workflow/steps.py +592 -0
  569. agno/workflow/types.py +520 -0
  570. agno/workflow/workflow.py +4321 -0
  571. agno-2.2.13.dist-info/METADATA +614 -0
  572. agno-2.2.13.dist-info/RECORD +575 -0
  573. agno-2.2.13.dist-info/WHEEL +5 -0
  574. agno-2.2.13.dist-info/licenses/LICENSE +201 -0
  575. agno-2.2.13.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1927 @@
1
+ import time
2
+ from datetime import date, datetime, timedelta, timezone
3
+ from typing import Any, Dict, List, Optional, Sequence, Tuple, Union
4
+ from uuid import uuid4
5
+
6
+ from agno.db.base import AsyncBaseDb, SessionType
7
+ from agno.db.postgres.schemas import get_table_schema_definition
8
+ from agno.db.postgres.utils import (
9
+ abulk_upsert_metrics,
10
+ acreate_schema,
11
+ ais_table_available,
12
+ ais_valid_table,
13
+ apply_sorting,
14
+ calculate_date_metrics,
15
+ deserialize_cultural_knowledge,
16
+ fetch_all_sessions_data,
17
+ get_dates_to_calculate_metrics_for,
18
+ serialize_cultural_knowledge,
19
+ )
20
+ from agno.db.schemas.culture import CulturalKnowledge
21
+ from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
22
+ from agno.db.schemas.knowledge import KnowledgeRow
23
+ from agno.db.schemas.memory import UserMemory
24
+ from agno.session import AgentSession, Session, TeamSession, WorkflowSession
25
+ from agno.utils.log import log_debug, log_error, log_info, log_warning
26
+
27
+ try:
28
+ from sqlalchemy import Index, String, UniqueConstraint, func, update
29
+ from sqlalchemy.dialects import postgresql
30
+ from sqlalchemy.ext.asyncio import AsyncEngine, async_sessionmaker, create_async_engine
31
+ from sqlalchemy.schema import Column, MetaData, Table
32
+ from sqlalchemy.sql.expression import select, text
33
+ except ImportError:
34
+ raise ImportError("`sqlalchemy` not installed. Please install it using `pip install sqlalchemy`")
35
+
36
+
37
+ class AsyncPostgresDb(AsyncBaseDb):
38
+ def __init__(
39
+ self,
40
+ id: Optional[str] = None,
41
+ db_url: Optional[str] = None,
42
+ db_engine: Optional[AsyncEngine] = None,
43
+ db_schema: Optional[str] = None,
44
+ session_table: Optional[str] = None,
45
+ memory_table: Optional[str] = None,
46
+ metrics_table: Optional[str] = None,
47
+ eval_table: Optional[str] = None,
48
+ knowledge_table: Optional[str] = None,
49
+ culture_table: Optional[str] = None,
50
+ db_id: Optional[str] = None, # Deprecated, use id instead.
51
+ ):
52
+ """
53
+ Async interface for interacting with a PostgreSQL database.
54
+
55
+ The following order is used to determine the database connection:
56
+ 1. Use the db_engine if provided
57
+ 2. Use the db_url
58
+ 3. Raise an error if neither is provided
59
+
60
+ Args:
61
+ id (Optional[str]): The ID of the database.
62
+ db_url (Optional[str]): The database URL to connect to.
63
+ db_engine (Optional[AsyncEngine]): The SQLAlchemy async database engine to use.
64
+ db_schema (Optional[str]): The database schema to use.
65
+ session_table (Optional[str]): Name of the table to store Agent, Team and Workflow sessions.
66
+ memory_table (Optional[str]): Name of the table to store memories.
67
+ metrics_table (Optional[str]): Name of the table to store metrics.
68
+ eval_table (Optional[str]): Name of the table to store evaluation runs data.
69
+ knowledge_table (Optional[str]): Name of the table to store knowledge content.
70
+ culture_table (Optional[str]): Name of the table to store cultural knowledge.
71
+ db_id: Deprecated, use id instead.
72
+
73
+ Raises:
74
+ ValueError: If neither db_url nor db_engine is provided.
75
+ ValueError: If none of the tables are provided.
76
+ """
77
+ if db_id is not None:
78
+ log_warning("db_id is deprecated and will be removed in a future version, use id instead.")
79
+
80
+ super().__init__(
81
+ id=id or db_id,
82
+ session_table=session_table,
83
+ memory_table=memory_table,
84
+ metrics_table=metrics_table,
85
+ eval_table=eval_table,
86
+ knowledge_table=knowledge_table,
87
+ culture_table=culture_table,
88
+ )
89
+
90
+ _engine: Optional[AsyncEngine] = db_engine
91
+ if _engine is None and db_url is not None:
92
+ _engine = create_async_engine(db_url)
93
+ if _engine is None:
94
+ raise ValueError("One of db_url or db_engine must be provided")
95
+
96
+ self.db_url: Optional[str] = db_url
97
+ self.db_engine: AsyncEngine = _engine
98
+ self.db_schema: str = db_schema if db_schema is not None else "ai"
99
+ self.metadata: MetaData = MetaData()
100
+
101
+ # Initialize database session factory
102
+ self.async_session_factory = async_sessionmaker(bind=self.db_engine)
103
+
104
+ # -- DB methods --
105
+ async def table_exists(self, table_name: str) -> bool:
106
+ """Check if a table with the given name exists in the Postgres database.
107
+
108
+ Args:
109
+ table_name: Name of the table to check
110
+
111
+ Returns:
112
+ bool: True if the table exists in the database, False otherwise
113
+ """
114
+ async with self.async_session_factory() as sess:
115
+ return await ais_table_available(session=sess, table_name=table_name, db_schema=self.db_schema)
116
+
117
+ async def _create_all_tables(self):
118
+ """Create all tables for the database."""
119
+ tables_to_create = [
120
+ (self.session_table_name, "sessions"),
121
+ (self.memory_table_name, "memories"),
122
+ (self.metrics_table_name, "metrics"),
123
+ (self.eval_table_name, "evals"),
124
+ (self.knowledge_table_name, "knowledge"),
125
+ ]
126
+
127
+ for table_name, table_type in tables_to_create:
128
+ await self._create_table(table_name=table_name, table_type=table_type, db_schema=self.db_schema)
129
+
130
+ async def _create_table(self, table_name: str, table_type: str, db_schema: str) -> Table:
131
+ """
132
+ Create a table with the appropriate schema based on the table type.
133
+
134
+ Args:
135
+ table_name (str): Name of the table to create
136
+ table_type (str): Type of table (used to get schema definition)
137
+ db_schema (str): Database schema name
138
+
139
+ Returns:
140
+ Table: SQLAlchemy Table object
141
+ """
142
+ try:
143
+ table_schema = get_table_schema_definition(table_type).copy()
144
+
145
+ columns: List[Column] = []
146
+ indexes: List[str] = []
147
+ unique_constraints: List[str] = []
148
+ schema_unique_constraints = table_schema.pop("_unique_constraints", [])
149
+
150
+ # Get the columns, indexes, and unique constraints from the table schema
151
+ for col_name, col_config in table_schema.items():
152
+ column_args = [col_name, col_config["type"]()]
153
+ column_kwargs = {}
154
+ if col_config.get("primary_key", False):
155
+ column_kwargs["primary_key"] = True
156
+ if "nullable" in col_config:
157
+ column_kwargs["nullable"] = col_config["nullable"]
158
+ if col_config.get("index", False):
159
+ indexes.append(col_name)
160
+ if col_config.get("unique", False):
161
+ column_kwargs["unique"] = True
162
+ unique_constraints.append(col_name)
163
+ columns.append(Column(*column_args, **column_kwargs)) # type: ignore
164
+
165
+ # Create the table object
166
+ table_metadata = MetaData(schema=db_schema)
167
+ table = Table(table_name, table_metadata, *columns, schema=db_schema)
168
+
169
+ # Add multi-column unique constraints with table-specific names
170
+ for constraint in schema_unique_constraints:
171
+ constraint_name = f"{table_name}_{constraint['name']}"
172
+ constraint_columns = constraint["columns"]
173
+ table.append_constraint(UniqueConstraint(*constraint_columns, name=constraint_name))
174
+
175
+ # Add indexes to the table definition
176
+ for idx_col in indexes:
177
+ idx_name = f"idx_{table_name}_{idx_col}"
178
+ table.append_constraint(Index(idx_name, idx_col))
179
+
180
+ async with self.async_session_factory() as sess, sess.begin():
181
+ await acreate_schema(session=sess, db_schema=db_schema)
182
+
183
+ # Create table
184
+ async with self.db_engine.begin() as conn:
185
+ await conn.run_sync(table.create, checkfirst=True)
186
+
187
+ # Create indexes
188
+ for idx in table.indexes:
189
+ try:
190
+ # Check if index already exists
191
+ async with self.async_session_factory() as sess:
192
+ exists_query = text(
193
+ "SELECT 1 FROM pg_indexes WHERE schemaname = :schema AND indexname = :index_name"
194
+ )
195
+ result = await sess.execute(exists_query, {"schema": db_schema, "index_name": idx.name})
196
+ exists = result.scalar() is not None
197
+ if exists:
198
+ log_debug(f"Index {idx.name} already exists in {db_schema}.{table_name}, skipping creation")
199
+ continue
200
+
201
+ async with self.db_engine.begin() as conn:
202
+ await conn.run_sync(idx.create)
203
+ log_debug(f"Created index: {idx.name} for table {db_schema}.{table_name}")
204
+
205
+ except Exception as e:
206
+ log_error(f"Error creating index {idx.name}: {e}")
207
+
208
+ log_debug(f"Successfully created table {table_name} in schema {db_schema}")
209
+ return table
210
+
211
+ except Exception as e:
212
+ log_error(f"Could not create table {db_schema}.{table_name}: {e}")
213
+ raise
214
+
215
+ async def _get_table(self, table_type: str) -> Table:
216
+ if table_type == "sessions":
217
+ if not hasattr(self, "session_table"):
218
+ self.session_table = await self._get_or_create_table(
219
+ table_name=self.session_table_name, table_type="sessions", db_schema=self.db_schema
220
+ )
221
+ return self.session_table
222
+
223
+ if table_type == "memories":
224
+ if not hasattr(self, "memory_table"):
225
+ self.memory_table = await self._get_or_create_table(
226
+ table_name=self.memory_table_name, table_type="memories", db_schema=self.db_schema
227
+ )
228
+ return self.memory_table
229
+
230
+ if table_type == "metrics":
231
+ if not hasattr(self, "metrics_table"):
232
+ self.metrics_table = await self._get_or_create_table(
233
+ table_name=self.metrics_table_name, table_type="metrics", db_schema=self.db_schema
234
+ )
235
+ return self.metrics_table
236
+
237
+ if table_type == "evals":
238
+ if not hasattr(self, "eval_table"):
239
+ self.eval_table = await self._get_or_create_table(
240
+ table_name=self.eval_table_name, table_type="evals", db_schema=self.db_schema
241
+ )
242
+ return self.eval_table
243
+
244
+ if table_type == "knowledge":
245
+ if not hasattr(self, "knowledge_table"):
246
+ self.knowledge_table = await self._get_or_create_table(
247
+ table_name=self.knowledge_table_name, table_type="knowledge", db_schema=self.db_schema
248
+ )
249
+ return self.knowledge_table
250
+
251
+ if table_type == "culture":
252
+ if not hasattr(self, "culture_table"):
253
+ self.culture_table = await self._get_or_create_table(
254
+ table_name=self.culture_table_name, table_type="culture", db_schema=self.db_schema
255
+ )
256
+ return self.culture_table
257
+
258
+ raise ValueError(f"Unknown table type: {table_type}")
259
+
260
+ async def _get_or_create_table(self, table_name: str, table_type: str, db_schema: str) -> Table:
261
+ """
262
+ Check if the table exists and is valid, else create it.
263
+
264
+ Args:
265
+ table_name (str): Name of the table to get or create
266
+ table_type (str): Type of table (used to get schema definition)
267
+ db_schema (str): Database schema name
268
+
269
+ Returns:
270
+ Table: SQLAlchemy Table object representing the schema.
271
+ """
272
+
273
+ async with self.async_session_factory() as sess, sess.begin():
274
+ table_is_available = await ais_table_available(session=sess, table_name=table_name, db_schema=db_schema)
275
+
276
+ if not table_is_available:
277
+ return await self._create_table(table_name=table_name, table_type=table_type, db_schema=db_schema)
278
+
279
+ if not await ais_valid_table(
280
+ db_engine=self.db_engine,
281
+ table_name=table_name,
282
+ table_type=table_type,
283
+ db_schema=db_schema,
284
+ ):
285
+ raise ValueError(f"Table {db_schema}.{table_name} has an invalid schema")
286
+
287
+ try:
288
+ async with self.db_engine.connect() as conn:
289
+
290
+ def create_table(connection):
291
+ return Table(table_name, self.metadata, schema=db_schema, autoload_with=connection)
292
+
293
+ table = await conn.run_sync(create_table)
294
+ return table
295
+
296
+ except Exception as e:
297
+ log_error(f"Error loading existing table {db_schema}.{table_name}: {e}")
298
+ raise
299
+
300
+ # -- Session methods --
301
+ async def delete_session(self, session_id: str) -> bool:
302
+ """
303
+ Delete a session from the database.
304
+
305
+ Args:
306
+ session_id (str): ID of the session to delete
307
+
308
+ Returns:
309
+ bool: True if the session was deleted, False otherwise.
310
+
311
+ Raises:
312
+ Exception: If an error occurs during deletion.
313
+ """
314
+ try:
315
+ table = await self._get_table(table_type="sessions")
316
+
317
+ async with self.async_session_factory() as sess, sess.begin():
318
+ delete_stmt = table.delete().where(table.c.session_id == session_id)
319
+ result = await sess.execute(delete_stmt)
320
+
321
+ if result.rowcount == 0: # type: ignore
322
+ log_debug(f"No session found to delete with session_id: {session_id} in table {table.name}")
323
+ return False
324
+
325
+ else:
326
+ log_debug(f"Successfully deleted session with session_id: {session_id} in table {table.name}")
327
+ return True
328
+
329
+ except Exception as e:
330
+ log_error(f"Error deleting session: {e}")
331
+ return False
332
+
333
+ async def delete_sessions(self, session_ids: List[str]) -> None:
334
+ """Delete all given sessions from the database.
335
+ Can handle multiple session types in the same run.
336
+
337
+ Args:
338
+ session_ids (List[str]): The IDs of the sessions to delete.
339
+
340
+ Raises:
341
+ Exception: If an error occurs during deletion.
342
+ """
343
+ try:
344
+ table = await self._get_table(table_type="sessions")
345
+
346
+ async with self.async_session_factory() as sess, sess.begin():
347
+ delete_stmt = table.delete().where(table.c.session_id.in_(session_ids))
348
+ result = await sess.execute(delete_stmt)
349
+
350
+ log_debug(f"Successfully deleted {result.rowcount} sessions") # type: ignore
351
+
352
+ except Exception as e:
353
+ log_error(f"Error deleting sessions: {e}")
354
+
355
+ async def get_session(
356
+ self,
357
+ session_id: str,
358
+ session_type: SessionType,
359
+ user_id: Optional[str] = None,
360
+ deserialize: Optional[bool] = True,
361
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
362
+ """
363
+ Read a session from the database.
364
+
365
+ Args:
366
+ session_id (str): ID of the session to read.
367
+ user_id (Optional[str]): User ID to filter by. Defaults to None.
368
+ session_type (Optional[SessionType]): Type of session to read. Defaults to None.
369
+ deserialize (Optional[bool]): Whether to serialize the session. Defaults to True.
370
+
371
+ Returns:
372
+ Union[Session, Dict[str, Any], None]:
373
+ - When deserialize=True: Session object
374
+ - When deserialize=False: Session dictionary
375
+
376
+ Raises:
377
+ Exception: If an error occurs during retrieval.
378
+ """
379
+ try:
380
+ table = await self._get_table(table_type="sessions")
381
+
382
+ async with self.async_session_factory() as sess:
383
+ stmt = select(table).where(table.c.session_id == session_id)
384
+
385
+ if user_id is not None:
386
+ stmt = stmt.where(table.c.user_id == user_id)
387
+ result = await sess.execute(stmt)
388
+ row = result.fetchone()
389
+ if row is None:
390
+ return None
391
+
392
+ session = dict(row._mapping)
393
+
394
+ if not deserialize:
395
+ return session
396
+
397
+ if session_type == SessionType.AGENT:
398
+ return AgentSession.from_dict(session)
399
+ elif session_type == SessionType.TEAM:
400
+ return TeamSession.from_dict(session)
401
+ elif session_type == SessionType.WORKFLOW:
402
+ return WorkflowSession.from_dict(session)
403
+ else:
404
+ raise ValueError(f"Invalid session type: {session_type}")
405
+
406
+ except Exception as e:
407
+ log_error(f"Exception reading from session table: {e}")
408
+ return None
409
+
410
+ async def get_sessions(
411
+ self,
412
+ session_type: Optional[SessionType] = None,
413
+ user_id: Optional[str] = None,
414
+ component_id: Optional[str] = None,
415
+ session_name: Optional[str] = None,
416
+ start_timestamp: Optional[int] = None,
417
+ end_timestamp: Optional[int] = None,
418
+ limit: Optional[int] = None,
419
+ page: Optional[int] = None,
420
+ sort_by: Optional[str] = None,
421
+ sort_order: Optional[str] = None,
422
+ deserialize: Optional[bool] = True,
423
+ ) -> Union[List[Session], Tuple[List[Dict[str, Any]], int]]:
424
+ """
425
+ Get all sessions in the given table. Can filter by user_id and entity_id.
426
+
427
+ Args:
428
+ user_id (Optional[str]): The ID of the user to filter by.
429
+ component_id (Optional[str]): The ID of the agent / workflow to filter by.
430
+ start_timestamp (Optional[int]): The start timestamp to filter by.
431
+ end_timestamp (Optional[int]): The end timestamp to filter by.
432
+ session_name (Optional[str]): The name of the session to filter by.
433
+ limit (Optional[int]): The maximum number of sessions to return. Defaults to None.
434
+ page (Optional[int]): The page number to return. Defaults to None.
435
+ sort_by (Optional[str]): The field to sort by. Defaults to None.
436
+ sort_order (Optional[str]): The sort order. Defaults to None.
437
+ deserialize (Optional[bool]): Whether to serialize the sessions. Defaults to True.
438
+
439
+ Returns:
440
+ Union[List[Session], Tuple[List[Dict], int]]:
441
+ - When deserialize=True: List of Session objects
442
+ - When deserialize=False: Tuple of (session dictionaries, total count)
443
+
444
+ Raises:
445
+ Exception: If an error occurs during retrieval.
446
+ """
447
+ try:
448
+ table = await self._get_table(table_type="sessions")
449
+
450
+ async with self.async_session_factory() as sess, sess.begin():
451
+ stmt = select(table)
452
+
453
+ # Filtering
454
+ if user_id is not None:
455
+ stmt = stmt.where(table.c.user_id == user_id)
456
+ if component_id is not None:
457
+ if session_type == SessionType.AGENT:
458
+ stmt = stmt.where(table.c.agent_id == component_id)
459
+ elif session_type == SessionType.TEAM:
460
+ stmt = stmt.where(table.c.team_id == component_id)
461
+ elif session_type == SessionType.WORKFLOW:
462
+ stmt = stmt.where(table.c.workflow_id == component_id)
463
+ if start_timestamp is not None:
464
+ stmt = stmt.where(table.c.created_at >= start_timestamp)
465
+ if end_timestamp is not None:
466
+ stmt = stmt.where(table.c.created_at <= end_timestamp)
467
+ if session_name is not None:
468
+ stmt = stmt.where(
469
+ func.coalesce(func.json_extract_path_text(table.c.session_data, "session_name"), "").ilike(
470
+ f"%{session_name}%"
471
+ )
472
+ )
473
+ if session_type is not None:
474
+ session_type_value = session_type.value if isinstance(session_type, SessionType) else session_type
475
+ stmt = stmt.where(table.c.session_type == session_type_value)
476
+
477
+ count_stmt = select(func.count()).select_from(stmt.alias())
478
+ total_count = await sess.scalar(count_stmt) or 0
479
+
480
+ # Sorting
481
+ stmt = apply_sorting(stmt, table, sort_by, sort_order)
482
+
483
+ # Paginating
484
+ if limit is not None:
485
+ stmt = stmt.limit(limit)
486
+ if page is not None:
487
+ stmt = stmt.offset((page - 1) * limit)
488
+
489
+ result = await sess.execute(stmt)
490
+ records = result.fetchall()
491
+ if records is None:
492
+ return [], 0
493
+
494
+ session = [dict(record._mapping) for record in records]
495
+ if not deserialize:
496
+ return session, total_count
497
+
498
+ if session_type == SessionType.AGENT:
499
+ return [AgentSession.from_dict(record) for record in session] # type: ignore
500
+ elif session_type == SessionType.TEAM:
501
+ return [TeamSession.from_dict(record) for record in session] # type: ignore
502
+ elif session_type == SessionType.WORKFLOW:
503
+ return [WorkflowSession.from_dict(record) for record in session] # type: ignore
504
+ else:
505
+ raise ValueError(f"Invalid session type: {session_type}")
506
+
507
+ except Exception as e:
508
+ log_error(f"Exception reading from session table: {e}")
509
+ return [] if deserialize else ([], 0)
510
+
511
+ async def rename_session(
512
+ self, session_id: str, session_type: SessionType, session_name: str, deserialize: Optional[bool] = True
513
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
514
+ """
515
+ Rename a session in the database.
516
+
517
+ Args:
518
+ session_id (str): The ID of the session to rename.
519
+ session_type (SessionType): The type of session to rename.
520
+ session_name (str): The new name for the session.
521
+ deserialize (Optional[bool]): Whether to serialize the session. Defaults to True.
522
+
523
+ Returns:
524
+ Optional[Union[Session, Dict[str, Any]]]:
525
+ - When deserialize=True: Session object
526
+ - When deserialize=False: Session dictionary
527
+
528
+ Raises:
529
+ Exception: If an error occurs during renaming.
530
+ """
531
+ try:
532
+ table = await self._get_table(table_type="sessions")
533
+
534
+ async with self.async_session_factory() as sess, sess.begin():
535
+ stmt = (
536
+ update(table)
537
+ .where(table.c.session_id == session_id)
538
+ .where(table.c.session_type == session_type.value)
539
+ .values(
540
+ session_data=func.cast(
541
+ func.jsonb_set(
542
+ func.cast(table.c.session_data, postgresql.JSONB),
543
+ text("'{session_name}'"),
544
+ func.to_jsonb(session_name),
545
+ ),
546
+ postgresql.JSON,
547
+ )
548
+ )
549
+ .returning(*table.c)
550
+ )
551
+ result = await sess.execute(stmt)
552
+ row = result.fetchone()
553
+ if not row:
554
+ return None
555
+
556
+ log_debug(f"Renamed session with id '{session_id}' to '{session_name}'")
557
+
558
+ session = dict(row._mapping)
559
+ if not deserialize:
560
+ return session
561
+
562
+ # Return the appropriate session type
563
+ if session_type == SessionType.AGENT:
564
+ return AgentSession.from_dict(session)
565
+ elif session_type == SessionType.TEAM:
566
+ return TeamSession.from_dict(session)
567
+ elif session_type == SessionType.WORKFLOW:
568
+ return WorkflowSession.from_dict(session)
569
+ else:
570
+ raise ValueError(f"Invalid session type: {session_type}")
571
+
572
+ except Exception as e:
573
+ log_error(f"Exception renaming session: {e}")
574
+ return None
575
+
576
+ async def upsert_session(
577
+ self, session: Session, deserialize: Optional[bool] = True
578
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
579
+ """
580
+ Insert or update a session in the database.
581
+
582
+ Args:
583
+ session (Session): The session data to upsert.
584
+ deserialize (Optional[bool]): Whether to deserialize the session. Defaults to True.
585
+
586
+ Returns:
587
+ Optional[Union[Session, Dict[str, Any]]]:
588
+ - When deserialize=True: Session object
589
+ - When deserialize=False: Session dictionary
590
+
591
+ Raises:
592
+ Exception: If an error occurs during upsert.
593
+ """
594
+ try:
595
+ table = await self._get_table(table_type="sessions")
596
+ session_dict = session.to_dict()
597
+
598
+ if isinstance(session, AgentSession):
599
+ async with self.async_session_factory() as sess, sess.begin():
600
+ stmt = postgresql.insert(table).values(
601
+ session_id=session_dict.get("session_id"),
602
+ session_type=SessionType.AGENT.value,
603
+ agent_id=session_dict.get("agent_id"),
604
+ user_id=session_dict.get("user_id"),
605
+ runs=session_dict.get("runs"),
606
+ agent_data=session_dict.get("agent_data"),
607
+ session_data=session_dict.get("session_data"),
608
+ summary=session_dict.get("summary"),
609
+ metadata=session_dict.get("metadata"),
610
+ created_at=session_dict.get("created_at"),
611
+ updated_at=session_dict.get("created_at"),
612
+ )
613
+ stmt = stmt.on_conflict_do_update( # type: ignore
614
+ index_elements=["session_id"],
615
+ set_=dict(
616
+ agent_id=session_dict.get("agent_id"),
617
+ user_id=session_dict.get("user_id"),
618
+ agent_data=session_dict.get("agent_data"),
619
+ session_data=session_dict.get("session_data"),
620
+ summary=session_dict.get("summary"),
621
+ metadata=session_dict.get("metadata"),
622
+ runs=session_dict.get("runs"),
623
+ updated_at=int(time.time()),
624
+ ),
625
+ ).returning(table)
626
+ result = await sess.execute(stmt)
627
+ row = result.fetchone()
628
+ if row is None:
629
+ return None
630
+ session_dict = dict(row._mapping)
631
+
632
+ log_debug(f"Upserted agent session with id '{session_dict.get('session_id')}'")
633
+
634
+ if not deserialize:
635
+ return session_dict
636
+ return AgentSession.from_dict(session_dict)
637
+
638
+ elif isinstance(session, TeamSession):
639
+ async with self.async_session_factory() as sess, sess.begin():
640
+ stmt = postgresql.insert(table).values(
641
+ session_id=session_dict.get("session_id"),
642
+ session_type=SessionType.TEAM.value,
643
+ team_id=session_dict.get("team_id"),
644
+ user_id=session_dict.get("user_id"),
645
+ runs=session_dict.get("runs"),
646
+ team_data=session_dict.get("team_data"),
647
+ session_data=session_dict.get("session_data"),
648
+ summary=session_dict.get("summary"),
649
+ metadata=session_dict.get("metadata"),
650
+ created_at=session_dict.get("created_at"),
651
+ updated_at=session_dict.get("created_at"),
652
+ )
653
+ stmt = stmt.on_conflict_do_update( # type: ignore
654
+ index_elements=["session_id"],
655
+ set_=dict(
656
+ team_id=session_dict.get("team_id"),
657
+ user_id=session_dict.get("user_id"),
658
+ team_data=session_dict.get("team_data"),
659
+ session_data=session_dict.get("session_data"),
660
+ summary=session_dict.get("summary"),
661
+ metadata=session_dict.get("metadata"),
662
+ runs=session_dict.get("runs"),
663
+ updated_at=int(time.time()),
664
+ ),
665
+ ).returning(table)
666
+ result = await sess.execute(stmt)
667
+ row = result.fetchone()
668
+ if row is None:
669
+ return None
670
+ session_dict = dict(row._mapping)
671
+
672
+ log_debug(f"Upserted team session with id '{session_dict.get('session_id')}'")
673
+
674
+ if not deserialize:
675
+ return session_dict
676
+ return TeamSession.from_dict(session_dict)
677
+
678
+ elif isinstance(session, WorkflowSession):
679
+ async with self.async_session_factory() as sess, sess.begin():
680
+ stmt = postgresql.insert(table).values(
681
+ session_id=session_dict.get("session_id"),
682
+ session_type=SessionType.WORKFLOW.value,
683
+ workflow_id=session_dict.get("workflow_id"),
684
+ user_id=session_dict.get("user_id"),
685
+ runs=session_dict.get("runs"),
686
+ workflow_data=session_dict.get("workflow_data"),
687
+ session_data=session_dict.get("session_data"),
688
+ summary=session_dict.get("summary"),
689
+ metadata=session_dict.get("metadata"),
690
+ created_at=session_dict.get("created_at"),
691
+ updated_at=session_dict.get("created_at"),
692
+ )
693
+ stmt = stmt.on_conflict_do_update( # type: ignore
694
+ index_elements=["session_id"],
695
+ set_=dict(
696
+ workflow_id=session_dict.get("workflow_id"),
697
+ user_id=session_dict.get("user_id"),
698
+ workflow_data=session_dict.get("workflow_data"),
699
+ session_data=session_dict.get("session_data"),
700
+ summary=session_dict.get("summary"),
701
+ metadata=session_dict.get("metadata"),
702
+ runs=session_dict.get("runs"),
703
+ updated_at=int(time.time()),
704
+ ),
705
+ ).returning(table)
706
+ result = await sess.execute(stmt)
707
+ row = result.fetchone()
708
+ if row is None:
709
+ return None
710
+ session_dict = dict(row._mapping)
711
+
712
+ log_debug(f"Upserted workflow session with id '{session_dict.get('session_id')}'")
713
+
714
+ if not deserialize:
715
+ return session_dict
716
+ return WorkflowSession.from_dict(session_dict)
717
+
718
+ else:
719
+ raise ValueError(f"Invalid session type: {session.session_type}")
720
+
721
+ except Exception as e:
722
+ log_error(f"Exception upserting into sessions table: {e}")
723
+ return None
724
+
725
+ # -- Memory methods --
726
+ async def delete_user_memory(self, memory_id: str):
727
+ """Delete a user memory from the database.
728
+
729
+ Returns:
730
+ bool: True if deletion was successful, False otherwise.
731
+
732
+ Raises:
733
+ Exception: If an error occurs during deletion.
734
+ """
735
+ try:
736
+ table = await self._get_table(table_type="memories")
737
+
738
+ async with self.async_session_factory() as sess, sess.begin():
739
+ delete_stmt = table.delete().where(table.c.memory_id == memory_id)
740
+ result = await sess.execute(delete_stmt)
741
+
742
+ success = result.rowcount > 0 # type: ignore
743
+ if success:
744
+ log_debug(f"Successfully deleted user memory id: {memory_id}")
745
+ else:
746
+ log_debug(f"No user memory found with id: {memory_id}")
747
+
748
+ except Exception as e:
749
+ log_error(f"Error deleting user memory: {e}")
750
+
751
+ async def delete_user_memories(self, memory_ids: List[str]) -> None:
752
+ """Delete user memories from the database.
753
+
754
+ Args:
755
+ memory_ids (List[str]): The IDs of the memories to delete.
756
+
757
+ Raises:
758
+ Exception: If an error occurs during deletion.
759
+ """
760
+ try:
761
+ table = await self._get_table(table_type="memories")
762
+
763
+ async with self.async_session_factory() as sess, sess.begin():
764
+ delete_stmt = table.delete().where(table.c.memory_id.in_(memory_ids))
765
+ result = await sess.execute(delete_stmt)
766
+
767
+ if result.rowcount == 0: # type: ignore
768
+ log_debug(f"No user memories found with ids: {memory_ids}")
769
+ else:
770
+ log_debug(f"Successfully deleted {result.rowcount} user memories") # type: ignore
771
+
772
+ except Exception as e:
773
+ log_error(f"Error deleting user memories: {e}")
774
+
775
+ async def get_all_memory_topics(self) -> List[str]:
776
+ """Get all memory topics from the database.
777
+
778
+ Returns:
779
+ List[str]: List of memory topics.
780
+ """
781
+ try:
782
+ table = await self._get_table(table_type="memories")
783
+
784
+ async with self.async_session_factory() as sess, sess.begin():
785
+ stmt = select(func.json_array_elements_text(table.c.topics))
786
+ result = await sess.execute(stmt)
787
+ records = result.fetchall()
788
+
789
+ return list(set([record[0] for record in records]))
790
+
791
+ except Exception as e:
792
+ log_error(f"Exception reading from memory table: {e}")
793
+ return []
794
+
795
+ async def get_user_memory(
796
+ self, memory_id: str, deserialize: Optional[bool] = True
797
+ ) -> Optional[Union[UserMemory, Dict[str, Any]]]:
798
+ """Get a memory from the database.
799
+
800
+ Args:
801
+ memory_id (str): The ID of the memory to get.
802
+ deserialize (Optional[bool]): Whether to serialize the memory. Defaults to True.
803
+
804
+ Returns:
805
+ Union[UserMemory, Dict[str, Any], None]:
806
+ - When deserialize=True: UserMemory object
807
+ - When deserialize=False: UserMemory dictionary
808
+
809
+ Raises:
810
+ Exception: If an error occurs during retrieval.
811
+ """
812
+ try:
813
+ table = await self._get_table(table_type="memories")
814
+
815
+ async with self.async_session_factory() as sess, sess.begin():
816
+ stmt = select(table).where(table.c.memory_id == memory_id)
817
+
818
+ result = await sess.execute(stmt)
819
+ row = result.fetchone()
820
+ if not row:
821
+ return None
822
+
823
+ memory_raw = dict(row._mapping)
824
+ if not deserialize:
825
+ return memory_raw
826
+
827
+ return UserMemory.from_dict(memory_raw)
828
+
829
+ except Exception as e:
830
+ log_error(f"Exception reading from memory table: {e}")
831
+ return None
832
+
833
+ async def get_user_memories(
834
+ self,
835
+ user_id: Optional[str] = None,
836
+ agent_id: Optional[str] = None,
837
+ team_id: Optional[str] = None,
838
+ topics: Optional[List[str]] = None,
839
+ search_content: Optional[str] = None,
840
+ limit: Optional[int] = None,
841
+ page: Optional[int] = None,
842
+ sort_by: Optional[str] = None,
843
+ sort_order: Optional[str] = None,
844
+ deserialize: Optional[bool] = True,
845
+ ) -> Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
846
+ """Get all memories from the database as UserMemory objects.
847
+
848
+ Args:
849
+ user_id (Optional[str]): The ID of the user to filter by.
850
+ agent_id (Optional[str]): The ID of the agent to filter by.
851
+ team_id (Optional[str]): The ID of the team to filter by.
852
+ topics (Optional[List[str]]): The topics to filter by.
853
+ search_content (Optional[str]): The content to search for.
854
+ limit (Optional[int]): The maximum number of memories to return.
855
+ page (Optional[int]): The page number.
856
+ sort_by (Optional[str]): The column to sort by.
857
+ sort_order (Optional[str]): The order to sort by.
858
+ deserialize (Optional[bool]): Whether to serialize the memories. Defaults to True.
859
+
860
+ Returns:
861
+ Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
862
+ - When deserialize=True: List of UserMemory objects
863
+ - When deserialize=False: Tuple of (memory dictionaries, total count)
864
+
865
+ Raises:
866
+ Exception: If an error occurs during retrieval.
867
+ """
868
+ try:
869
+ table = await self._get_table(table_type="memories")
870
+
871
+ async with self.async_session_factory() as sess, sess.begin():
872
+ stmt = select(table)
873
+ # Filtering
874
+ if user_id is not None:
875
+ stmt = stmt.where(table.c.user_id == user_id)
876
+ if agent_id is not None:
877
+ stmt = stmt.where(table.c.agent_id == agent_id)
878
+ if team_id is not None:
879
+ stmt = stmt.where(table.c.team_id == team_id)
880
+ if topics is not None:
881
+ for topic in topics:
882
+ stmt = stmt.where(func.cast(table.c.topics, String).like(f'%"{topic}"%'))
883
+ if search_content is not None:
884
+ stmt = stmt.where(func.cast(table.c.memory, postgresql.TEXT).ilike(f"%{search_content}%"))
885
+
886
+ # Get total count after applying filtering
887
+ count_stmt = select(func.count()).select_from(stmt.alias())
888
+ total_count = await sess.scalar(count_stmt) or 0
889
+
890
+ # Sorting
891
+ stmt = apply_sorting(stmt, table, sort_by, sort_order)
892
+
893
+ # Paginating
894
+ if limit is not None:
895
+ stmt = stmt.limit(limit)
896
+ if page is not None:
897
+ stmt = stmt.offset((page - 1) * limit)
898
+
899
+ result = await sess.execute(stmt)
900
+ records = result.fetchall()
901
+ if not records:
902
+ return [] if deserialize else ([], 0)
903
+
904
+ memories_raw = [dict(record._mapping) for record in records]
905
+ if not deserialize:
906
+ return memories_raw, total_count
907
+
908
+ return [UserMemory.from_dict(record) for record in memories_raw]
909
+
910
+ except Exception as e:
911
+ log_error(f"Exception reading from memory table: {e}")
912
+ return [] if deserialize else ([], 0)
913
+
914
+ async def clear_memories(self) -> None:
915
+ """Delete all memories from the database.
916
+
917
+ Raises:
918
+ Exception: If an error occurs during deletion.
919
+ """
920
+ try:
921
+ table = await self._get_table(table_type="memories")
922
+
923
+ async with self.async_session_factory() as sess, sess.begin():
924
+ await sess.execute(table.delete())
925
+
926
+ except Exception as e:
927
+ log_warning(f"Exception deleting all memories: {e}")
928
+
929
+ # -- Cultural Knowledge methods --
930
+ async def clear_cultural_knowledge(self) -> None:
931
+ """Delete all cultural knowledge from the database.
932
+
933
+ Raises:
934
+ Exception: If an error occurs during deletion.
935
+ """
936
+ try:
937
+ table = await self._get_table(table_type="culture")
938
+
939
+ async with self.async_session_factory() as sess, sess.begin():
940
+ await sess.execute(table.delete())
941
+
942
+ except Exception as e:
943
+ log_warning(f"Exception deleting all cultural knowledge: {e}")
944
+
945
+ async def delete_cultural_knowledge(self, id: str) -> None:
946
+ """Delete cultural knowledge by ID.
947
+
948
+ Args:
949
+ id (str): The ID of the cultural knowledge to delete.
950
+
951
+ Raises:
952
+ Exception: If an error occurs during deletion.
953
+ """
954
+ try:
955
+ table = await self._get_table(table_type="culture")
956
+
957
+ async with self.async_session_factory() as sess, sess.begin():
958
+ stmt = table.delete().where(table.c.id == id)
959
+ await sess.execute(stmt)
960
+
961
+ except Exception as e:
962
+ log_warning(f"Exception deleting cultural knowledge: {e}")
963
+ raise e
964
+
965
+ async def get_cultural_knowledge(
966
+ self, id: str, deserialize: Optional[bool] = True
967
+ ) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
968
+ """Get cultural knowledge by ID.
969
+
970
+ Args:
971
+ id (str): The ID of the cultural knowledge to retrieve.
972
+ deserialize (Optional[bool]): Whether to deserialize to CulturalKnowledge object. Defaults to True.
973
+
974
+ Returns:
975
+ Optional[Union[CulturalKnowledge, Dict[str, Any]]]: The cultural knowledge if found, None otherwise.
976
+
977
+ Raises:
978
+ Exception: If an error occurs during retrieval.
979
+ """
980
+ try:
981
+ table = await self._get_table(table_type="culture")
982
+
983
+ async with self.async_session_factory() as sess:
984
+ stmt = select(table).where(table.c.id == id)
985
+ result = await sess.execute(stmt)
986
+ row = result.fetchone()
987
+
988
+ if row is None:
989
+ return None
990
+
991
+ db_row = dict(row._mapping)
992
+
993
+ if not deserialize:
994
+ return db_row
995
+
996
+ return deserialize_cultural_knowledge(db_row)
997
+
998
+ except Exception as e:
999
+ log_warning(f"Exception reading cultural knowledge: {e}")
1000
+ raise e
1001
+
1002
+ async def get_all_cultural_knowledge(
1003
+ self,
1004
+ agent_id: Optional[str] = None,
1005
+ team_id: Optional[str] = None,
1006
+ name: Optional[str] = None,
1007
+ limit: Optional[int] = None,
1008
+ page: Optional[int] = None,
1009
+ sort_by: Optional[str] = None,
1010
+ sort_order: Optional[str] = None,
1011
+ deserialize: Optional[bool] = True,
1012
+ ) -> Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
1013
+ """Get all cultural knowledge with filtering and pagination.
1014
+
1015
+ Args:
1016
+ agent_id (Optional[str]): Filter by agent ID.
1017
+ team_id (Optional[str]): Filter by team ID.
1018
+ name (Optional[str]): Filter by name (case-insensitive partial match).
1019
+ limit (Optional[int]): Maximum number of results to return.
1020
+ page (Optional[int]): Page number for pagination.
1021
+ sort_by (Optional[str]): Field to sort by.
1022
+ sort_order (Optional[str]): Sort order ('asc' or 'desc').
1023
+ deserialize (Optional[bool]): Whether to deserialize to CulturalKnowledge objects. Defaults to True.
1024
+
1025
+ Returns:
1026
+ Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
1027
+ - When deserialize=True: List of CulturalKnowledge objects
1028
+ - When deserialize=False: Tuple with list of dictionaries and total count
1029
+
1030
+ Raises:
1031
+ Exception: If an error occurs during retrieval.
1032
+ """
1033
+ try:
1034
+ table = await self._get_table(table_type="culture")
1035
+
1036
+ async with self.async_session_factory() as sess:
1037
+ # Build query with filters
1038
+ stmt = select(table)
1039
+ if agent_id is not None:
1040
+ stmt = stmt.where(table.c.agent_id == agent_id)
1041
+ if team_id is not None:
1042
+ stmt = stmt.where(table.c.team_id == team_id)
1043
+ if name is not None:
1044
+ stmt = stmt.where(table.c.name.ilike(f"%{name}%"))
1045
+
1046
+ # Get total count
1047
+ count_stmt = select(func.count()).select_from(stmt.alias())
1048
+ total_count_result = await sess.execute(count_stmt)
1049
+ total_count = total_count_result.scalar() or 0
1050
+
1051
+ # Apply sorting
1052
+ stmt = apply_sorting(stmt, table, sort_by, sort_order)
1053
+
1054
+ # Apply pagination
1055
+ if limit is not None:
1056
+ stmt = stmt.limit(limit)
1057
+ if page is not None:
1058
+ stmt = stmt.offset((page - 1) * limit)
1059
+
1060
+ # Execute query
1061
+ result = await sess.execute(stmt)
1062
+ rows = result.fetchall()
1063
+
1064
+ db_rows = [dict(row._mapping) for row in rows]
1065
+
1066
+ if not deserialize:
1067
+ return db_rows, total_count
1068
+
1069
+ return [deserialize_cultural_knowledge(row) for row in db_rows]
1070
+
1071
+ except Exception as e:
1072
+ log_warning(f"Exception reading all cultural knowledge: {e}")
1073
+ raise e
1074
+
1075
+ async def upsert_cultural_knowledge(
1076
+ self, cultural_knowledge: CulturalKnowledge, deserialize: Optional[bool] = True
1077
+ ) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
1078
+ """Upsert cultural knowledge in the database.
1079
+
1080
+ Args:
1081
+ cultural_knowledge (CulturalKnowledge): The cultural knowledge to upsert.
1082
+ deserialize (Optional[bool]): Whether to deserialize the result. Defaults to True.
1083
+
1084
+ Returns:
1085
+ Optional[Union[CulturalKnowledge, Dict[str, Any]]]: The upserted cultural knowledge.
1086
+
1087
+ Raises:
1088
+ Exception: If an error occurs during upsert.
1089
+ """
1090
+ try:
1091
+ table = await self._get_table(table_type="culture")
1092
+
1093
+ # Generate ID if not present
1094
+ if cultural_knowledge.id is None:
1095
+ cultural_knowledge.id = str(uuid4())
1096
+
1097
+ # Serialize content, categories, and notes into a JSON dict for DB storage
1098
+ content_dict = serialize_cultural_knowledge(cultural_knowledge)
1099
+
1100
+ async with self.async_session_factory() as sess, sess.begin():
1101
+ # Use PostgreSQL-specific insert with on_conflict_do_update
1102
+ insert_stmt = postgresql.insert(table).values(
1103
+ id=cultural_knowledge.id,
1104
+ name=cultural_knowledge.name,
1105
+ summary=cultural_knowledge.summary,
1106
+ content=content_dict if content_dict else None,
1107
+ metadata=cultural_knowledge.metadata,
1108
+ input=cultural_knowledge.input,
1109
+ created_at=cultural_knowledge.created_at,
1110
+ updated_at=int(time.time()),
1111
+ agent_id=cultural_knowledge.agent_id,
1112
+ team_id=cultural_knowledge.team_id,
1113
+ )
1114
+
1115
+ # Update all fields except id on conflict
1116
+ update_dict = {
1117
+ "name": cultural_knowledge.name,
1118
+ "summary": cultural_knowledge.summary,
1119
+ "content": content_dict if content_dict else None,
1120
+ "metadata": cultural_knowledge.metadata,
1121
+ "input": cultural_knowledge.input,
1122
+ "updated_at": int(time.time()),
1123
+ "agent_id": cultural_knowledge.agent_id,
1124
+ "team_id": cultural_knowledge.team_id,
1125
+ }
1126
+ upsert_stmt = insert_stmt.on_conflict_do_update(index_elements=["id"], set_=update_dict).returning(
1127
+ table
1128
+ )
1129
+
1130
+ result = await sess.execute(upsert_stmt)
1131
+ row = result.fetchone()
1132
+
1133
+ if row is None:
1134
+ return None
1135
+
1136
+ db_row = dict(row._mapping)
1137
+
1138
+ if not deserialize:
1139
+ return db_row
1140
+
1141
+ # Deserialize from DB format to model format
1142
+ return deserialize_cultural_knowledge(db_row)
1143
+
1144
+ except Exception as e:
1145
+ log_warning(f"Exception upserting cultural knowledge: {e}")
1146
+ raise e
1147
+
1148
+ async def get_user_memory_stats(
1149
+ self, limit: Optional[int] = None, page: Optional[int] = None
1150
+ ) -> Tuple[List[Dict[str, Any]], int]:
1151
+ """Get user memories stats.
1152
+
1153
+ Args:
1154
+ limit (Optional[int]): The maximum number of user stats to return.
1155
+ page (Optional[int]): The page number.
1156
+
1157
+ Returns:
1158
+ Tuple[List[Dict[str, Any]], int]: A list of dictionaries containing user stats and total count.
1159
+
1160
+ Example:
1161
+ (
1162
+ [
1163
+ {
1164
+ "user_id": "123",
1165
+ "total_memories": 10,
1166
+ "last_memory_updated_at": 1714560000,
1167
+ },
1168
+ ],
1169
+ total_count: 1,
1170
+ )
1171
+ """
1172
+ try:
1173
+ table = await self._get_table(table_type="memories")
1174
+
1175
+ async with self.async_session_factory() as sess, sess.begin():
1176
+ stmt = (
1177
+ select(
1178
+ table.c.user_id,
1179
+ func.count(table.c.memory_id).label("total_memories"),
1180
+ func.max(table.c.updated_at).label("last_memory_updated_at"),
1181
+ )
1182
+ .where(table.c.user_id.is_not(None))
1183
+ .group_by(table.c.user_id)
1184
+ .order_by(func.max(table.c.updated_at).desc())
1185
+ )
1186
+
1187
+ count_stmt = select(func.count()).select_from(stmt.alias())
1188
+ total_count = await sess.scalar(count_stmt) or 0
1189
+
1190
+ # Pagination
1191
+ if limit is not None:
1192
+ stmt = stmt.limit(limit)
1193
+ if page is not None:
1194
+ stmt = stmt.offset((page - 1) * limit)
1195
+
1196
+ result = await sess.execute(stmt)
1197
+ records = result.fetchall()
1198
+ if not records:
1199
+ return [], 0
1200
+
1201
+ return [
1202
+ {
1203
+ "user_id": record.user_id, # type: ignore
1204
+ "total_memories": record.total_memories,
1205
+ "last_memory_updated_at": record.last_memory_updated_at,
1206
+ }
1207
+ for record in records
1208
+ ], total_count
1209
+
1210
+ except Exception as e:
1211
+ log_error(f"Exception getting user memory stats: {e}")
1212
+ return [], 0
1213
+
1214
+ async def upsert_user_memory(
1215
+ self, memory: UserMemory, deserialize: Optional[bool] = True
1216
+ ) -> Optional[Union[UserMemory, Dict[str, Any]]]:
1217
+ """Upsert a user memory in the database.
1218
+
1219
+ Args:
1220
+ memory (UserMemory): The user memory to upsert.
1221
+ deserialize (Optional[bool]): Whether to serialize the memory. Defaults to True.
1222
+
1223
+ Returns:
1224
+ Optional[Union[UserMemory, Dict[str, Any]]]:
1225
+ - When deserialize=True: UserMemory object
1226
+ - When deserialize=False: UserMemory dictionary
1227
+
1228
+ Raises:
1229
+ Exception: If an error occurs during upsert.
1230
+ """
1231
+ try:
1232
+ table = await self._get_table(table_type="memories")
1233
+
1234
+ async with self.async_session_factory() as sess, sess.begin():
1235
+ if memory.memory_id is None:
1236
+ memory.memory_id = str(uuid4())
1237
+
1238
+ stmt = postgresql.insert(table).values(
1239
+ memory_id=memory.memory_id,
1240
+ memory=memory.memory,
1241
+ input=memory.input,
1242
+ user_id=memory.user_id,
1243
+ agent_id=memory.agent_id,
1244
+ team_id=memory.team_id,
1245
+ topics=memory.topics,
1246
+ updated_at=int(time.time()),
1247
+ )
1248
+ stmt = stmt.on_conflict_do_update( # type: ignore
1249
+ index_elements=["memory_id"],
1250
+ set_=dict(
1251
+ memory=memory.memory,
1252
+ topics=memory.topics,
1253
+ input=memory.input,
1254
+ agent_id=memory.agent_id,
1255
+ team_id=memory.team_id,
1256
+ updated_at=int(time.time()),
1257
+ ),
1258
+ ).returning(table)
1259
+
1260
+ result = await sess.execute(stmt)
1261
+ row = result.fetchone()
1262
+ if row is None:
1263
+ return None
1264
+
1265
+ memory_raw = dict(row._mapping)
1266
+
1267
+ log_debug(f"Upserted user memory with id '{memory.memory_id}'")
1268
+
1269
+ if not memory_raw or not deserialize:
1270
+ return memory_raw
1271
+
1272
+ return UserMemory.from_dict(memory_raw)
1273
+
1274
+ except Exception as e:
1275
+ log_error(f"Exception upserting user memory: {e}")
1276
+ return None
1277
+
1278
+ # -- Metrics methods --
1279
+ async def _get_all_sessions_for_metrics_calculation(
1280
+ self, start_timestamp: Optional[int] = None, end_timestamp: Optional[int] = None
1281
+ ) -> List[Dict[str, Any]]:
1282
+ """
1283
+ Get all sessions of all types (agent, team, workflow) as raw dictionaries.
1284
+
1285
+ Args:
1286
+ start_timestamp (Optional[int]): The start timestamp to filter by. Defaults to None.
1287
+ end_timestamp (Optional[int]): The end timestamp to filter by. Defaults to None.
1288
+
1289
+ Returns:
1290
+ List[Dict[str, Any]]: List of session dictionaries with session_type field.
1291
+
1292
+ Raises:
1293
+ Exception: If an error occurs during retrieval.
1294
+ """
1295
+ try:
1296
+ table = await self._get_table(table_type="sessions")
1297
+
1298
+ stmt = select(
1299
+ table.c.user_id,
1300
+ table.c.session_data,
1301
+ table.c.runs,
1302
+ table.c.created_at,
1303
+ table.c.session_type,
1304
+ )
1305
+
1306
+ if start_timestamp is not None:
1307
+ stmt = stmt.where(table.c.created_at >= start_timestamp)
1308
+ if end_timestamp is not None:
1309
+ stmt = stmt.where(table.c.created_at <= end_timestamp)
1310
+
1311
+ async with self.async_session_factory() as sess:
1312
+ result = await sess.execute(stmt)
1313
+ records = result.fetchall()
1314
+
1315
+ return [dict(record._mapping) for record in records]
1316
+
1317
+ except Exception as e:
1318
+ log_error(f"Exception reading from sessions table: {e}")
1319
+ return []
1320
+
1321
+ async def _get_metrics_calculation_starting_date(self, table: Table) -> Optional[date]:
1322
+ """Get the first date for which metrics calculation is needed:
1323
+
1324
+ 1. If there are metrics records, return the date of the first day without a complete metrics record.
1325
+ 2. If there are no metrics records, return the date of the first recorded session.
1326
+ 3. If there are no metrics records and no sessions records, return None.
1327
+
1328
+ Args:
1329
+ table (Table): The table to get the starting date for.
1330
+
1331
+ Returns:
1332
+ Optional[date]: The starting date for which metrics calculation is needed.
1333
+ """
1334
+ async with self.async_session_factory() as sess:
1335
+ stmt = select(table).order_by(table.c.date.desc()).limit(1)
1336
+ result = await sess.execute(stmt)
1337
+ row = result.fetchone()
1338
+
1339
+ # 1. Return the date of the first day without a complete metrics record.
1340
+ if row is not None:
1341
+ if row.completed:
1342
+ return row._mapping["date"] + timedelta(days=1)
1343
+ else:
1344
+ return row._mapping["date"]
1345
+
1346
+ # 2. No metrics records. Return the date of the first recorded session.
1347
+ first_session, _ = await self.get_sessions(sort_by="created_at", sort_order="asc", limit=1, deserialize=False)
1348
+
1349
+ first_session_date = first_session[0]["created_at"] if first_session else None # type: ignore[index]
1350
+
1351
+ # 3. No metrics records and no sessions records. Return None.
1352
+ if first_session_date is None:
1353
+ return None
1354
+
1355
+ return datetime.fromtimestamp(first_session_date, tz=timezone.utc).date()
1356
+
1357
+ async def calculate_metrics(self) -> Optional[list[dict]]:
1358
+ """Calculate metrics for all dates without complete metrics.
1359
+
1360
+ Returns:
1361
+ Optional[list[dict]]: The calculated metrics.
1362
+
1363
+ Raises:
1364
+ Exception: If an error occurs during metrics calculation.
1365
+ """
1366
+ try:
1367
+ table = await self._get_table(table_type="metrics")
1368
+
1369
+ starting_date = await self._get_metrics_calculation_starting_date(table)
1370
+
1371
+ if starting_date is None:
1372
+ log_info("No session data found. Won't calculate metrics.")
1373
+ return None
1374
+
1375
+ dates_to_process = get_dates_to_calculate_metrics_for(starting_date)
1376
+ if not dates_to_process:
1377
+ log_info("Metrics already calculated for all relevant dates.")
1378
+ return None
1379
+
1380
+ start_timestamp = int(
1381
+ datetime.combine(dates_to_process[0], datetime.min.time()).replace(tzinfo=timezone.utc).timestamp()
1382
+ )
1383
+ end_timestamp = int(
1384
+ datetime.combine(dates_to_process[-1] + timedelta(days=1), datetime.min.time())
1385
+ .replace(tzinfo=timezone.utc)
1386
+ .timestamp()
1387
+ )
1388
+
1389
+ sessions = await self._get_all_sessions_for_metrics_calculation(
1390
+ start_timestamp=start_timestamp, end_timestamp=end_timestamp
1391
+ )
1392
+
1393
+ all_sessions_data = fetch_all_sessions_data(
1394
+ sessions=sessions, dates_to_process=dates_to_process, start_timestamp=start_timestamp
1395
+ )
1396
+ if not all_sessions_data:
1397
+ log_info("No new session data found. Won't calculate metrics.")
1398
+ return None
1399
+
1400
+ results = []
1401
+ metrics_records = []
1402
+
1403
+ for date_to_process in dates_to_process:
1404
+ date_key = date_to_process.isoformat()
1405
+ sessions_for_date = all_sessions_data.get(date_key, {})
1406
+
1407
+ # Skip dates with no sessions
1408
+ if not any(len(sessions) > 0 for sessions in sessions_for_date.values()):
1409
+ continue
1410
+
1411
+ metrics_record = calculate_date_metrics(date_to_process, sessions_for_date)
1412
+
1413
+ metrics_records.append(metrics_record)
1414
+
1415
+ if metrics_records:
1416
+ async with self.async_session_factory() as sess, sess.begin():
1417
+ results = await abulk_upsert_metrics(session=sess, table=table, metrics_records=metrics_records)
1418
+
1419
+ log_debug("Updated metrics calculations")
1420
+
1421
+ return results
1422
+
1423
+ except Exception as e:
1424
+ log_error(f"Exception refreshing metrics: {e}")
1425
+ return None
1426
+
1427
+ async def get_metrics(
1428
+ self, starting_date: Optional[date] = None, ending_date: Optional[date] = None
1429
+ ) -> Tuple[List[dict], Optional[int]]:
1430
+ """Get all metrics matching the given date range.
1431
+
1432
+ Args:
1433
+ starting_date (Optional[date]): The starting date to filter metrics by.
1434
+ ending_date (Optional[date]): The ending date to filter metrics by.
1435
+
1436
+ Returns:
1437
+ Tuple[List[dict], Optional[int]]: A tuple containing the metrics and the timestamp of the latest update.
1438
+
1439
+ Raises:
1440
+ Exception: If an error occurs during retrieval.
1441
+ """
1442
+ try:
1443
+ table = await self._get_table(table_type="metrics")
1444
+
1445
+ async with self.async_session_factory() as sess, sess.begin():
1446
+ stmt = select(table)
1447
+ if starting_date:
1448
+ stmt = stmt.where(table.c.date >= starting_date)
1449
+ if ending_date:
1450
+ stmt = stmt.where(table.c.date <= ending_date)
1451
+ result = await sess.execute(stmt)
1452
+ records = result.fetchall()
1453
+ if not records:
1454
+ return [], None
1455
+
1456
+ # Get the latest updated_at
1457
+ latest_stmt = select(func.max(table.c.updated_at))
1458
+ latest_result = await sess.execute(latest_stmt)
1459
+ latest_updated_at = latest_result.scalar()
1460
+
1461
+ return [dict(row._mapping) for row in records], latest_updated_at
1462
+
1463
+ except Exception as e:
1464
+ log_warning(f"Exception getting metrics: {e}")
1465
+ return [], None
1466
+
1467
+ # -- Knowledge methods --
1468
+ async def delete_knowledge_content(self, id: str):
1469
+ """Delete a knowledge row from the database.
1470
+
1471
+ Args:
1472
+ id (str): The ID of the knowledge row to delete.
1473
+ """
1474
+ table = await self._get_table(table_type="knowledge")
1475
+
1476
+ try:
1477
+ async with self.async_session_factory() as sess, sess.begin():
1478
+ stmt = table.delete().where(table.c.id == id)
1479
+ await sess.execute(stmt)
1480
+
1481
+ except Exception as e:
1482
+ log_error(f"Exception deleting knowledge content: {e}")
1483
+
1484
+ async def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
1485
+ """Get a knowledge row from the database.
1486
+
1487
+ Args:
1488
+ id (str): The ID of the knowledge row to get.
1489
+
1490
+ Returns:
1491
+ Optional[KnowledgeRow]: The knowledge row, or None if it doesn't exist.
1492
+ """
1493
+ table = await self._get_table(table_type="knowledge")
1494
+
1495
+ try:
1496
+ async with self.async_session_factory() as sess, sess.begin():
1497
+ stmt = select(table).where(table.c.id == id)
1498
+ result = await sess.execute(stmt)
1499
+ row = result.fetchone()
1500
+ if row is None:
1501
+ return None
1502
+
1503
+ return KnowledgeRow.model_validate(row._mapping)
1504
+
1505
+ except Exception as e:
1506
+ log_error(f"Exception getting knowledge content: {e}")
1507
+ return None
1508
+
1509
+ async def get_knowledge_contents(
1510
+ self,
1511
+ limit: Optional[int] = None,
1512
+ page: Optional[int] = None,
1513
+ sort_by: Optional[str] = None,
1514
+ sort_order: Optional[str] = None,
1515
+ ) -> Tuple[List[KnowledgeRow], int]:
1516
+ """Get all knowledge contents from the database.
1517
+
1518
+ Args:
1519
+ limit (Optional[int]): The maximum number of knowledge contents to return.
1520
+ page (Optional[int]): The page number.
1521
+ sort_by (Optional[str]): The column to sort by.
1522
+ sort_order (Optional[str]): The order to sort by.
1523
+
1524
+ Returns:
1525
+ List[KnowledgeRow]: The knowledge contents.
1526
+
1527
+ Raises:
1528
+ Exception: If an error occurs during retrieval.
1529
+ """
1530
+ table = await self._get_table(table_type="knowledge")
1531
+
1532
+ try:
1533
+ async with self.async_session_factory() as sess, sess.begin():
1534
+ stmt = select(table)
1535
+
1536
+ # Apply sorting
1537
+ if sort_by is not None:
1538
+ stmt = stmt.order_by(getattr(table.c, sort_by) * (1 if sort_order == "asc" else -1))
1539
+
1540
+ # Get total count before applying limit and pagination
1541
+ count_stmt = select(func.count()).select_from(stmt.alias())
1542
+ total_count = await sess.scalar(count_stmt) or 0
1543
+
1544
+ # Apply pagination after count
1545
+ if limit is not None:
1546
+ stmt = stmt.limit(limit)
1547
+ if page is not None:
1548
+ stmt = stmt.offset((page - 1) * limit)
1549
+
1550
+ result = await sess.execute(stmt)
1551
+ records = result.fetchall()
1552
+ return [KnowledgeRow.model_validate(record._mapping) for record in records], total_count
1553
+
1554
+ except Exception as e:
1555
+ log_error(f"Exception getting knowledge contents: {e}")
1556
+ return [], 0
1557
+
1558
+ async def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
1559
+ """Upsert knowledge content in the database.
1560
+
1561
+ Args:
1562
+ knowledge_row (KnowledgeRow): The knowledge row to upsert.
1563
+
1564
+ Returns:
1565
+ Optional[KnowledgeRow]: The upserted knowledge row, or None if the operation fails.
1566
+ """
1567
+ try:
1568
+ table = await self._get_table(table_type="knowledge")
1569
+ async with self.async_session_factory() as sess, sess.begin():
1570
+ # Get the actual table columns to avoid "unconsumed column names" error
1571
+ table_columns = set(table.columns.keys())
1572
+
1573
+ # Only include fields that exist in the table and are not None
1574
+ insert_data = {}
1575
+ update_fields = {}
1576
+
1577
+ # Map of KnowledgeRow fields to table columns
1578
+ field_mapping = {
1579
+ "id": "id",
1580
+ "name": "name",
1581
+ "description": "description",
1582
+ "metadata": "metadata",
1583
+ "type": "type",
1584
+ "size": "size",
1585
+ "linked_to": "linked_to",
1586
+ "access_count": "access_count",
1587
+ "status": "status",
1588
+ "status_message": "status_message",
1589
+ "created_at": "created_at",
1590
+ "updated_at": "updated_at",
1591
+ "external_id": "external_id",
1592
+ }
1593
+
1594
+ # Build insert and update data only for fields that exist in the table
1595
+ for model_field, table_column in field_mapping.items():
1596
+ if table_column in table_columns:
1597
+ value = getattr(knowledge_row, model_field, None)
1598
+ if value is not None:
1599
+ insert_data[table_column] = value
1600
+ # Don't include ID in update_fields since it's the primary key
1601
+ if table_column != "id":
1602
+ update_fields[table_column] = value
1603
+
1604
+ # Ensure id is always included for the insert
1605
+ if "id" in table_columns and knowledge_row.id:
1606
+ insert_data["id"] = knowledge_row.id
1607
+
1608
+ # Handle case where update_fields is empty (all fields are None or don't exist in table)
1609
+ if not update_fields:
1610
+ # If we have insert_data, just do an insert without conflict resolution
1611
+ if insert_data:
1612
+ stmt = postgresql.insert(table).values(insert_data)
1613
+ await sess.execute(stmt)
1614
+ else:
1615
+ # If we have no data at all, this is an error
1616
+ log_error("No valid fields found for knowledge row upsert")
1617
+ return None
1618
+ else:
1619
+ # Normal upsert with conflict resolution
1620
+ stmt = (
1621
+ postgresql.insert(table)
1622
+ .values(insert_data)
1623
+ .on_conflict_do_update(index_elements=["id"], set_=update_fields)
1624
+ )
1625
+ await sess.execute(stmt)
1626
+
1627
+ log_debug(f"Upserted knowledge row with id '{knowledge_row.id}'")
1628
+
1629
+ return knowledge_row
1630
+
1631
+ except Exception as e:
1632
+ log_error(f"Error upserting knowledge row: {e}")
1633
+ return None
1634
+
1635
+ # -- Eval methods --
1636
+ async def create_eval_run(self, eval_run: EvalRunRecord) -> Optional[EvalRunRecord]:
1637
+ """Create an EvalRunRecord in the database.
1638
+
1639
+ Args:
1640
+ eval_run (EvalRunRecord): The eval run to create.
1641
+
1642
+ Returns:
1643
+ Optional[EvalRunRecord]: The created eval run, or None if the operation fails.
1644
+
1645
+ Raises:
1646
+ Exception: If an error occurs during creation.
1647
+ """
1648
+ try:
1649
+ table = await self._get_table(table_type="evals")
1650
+
1651
+ async with self.async_session_factory() as sess, sess.begin():
1652
+ current_time = int(time.time())
1653
+ stmt = postgresql.insert(table).values(
1654
+ {"created_at": current_time, "updated_at": current_time, **eval_run.model_dump()}
1655
+ )
1656
+ await sess.execute(stmt)
1657
+
1658
+ log_debug(f"Created eval run with id '{eval_run.run_id}'")
1659
+
1660
+ return eval_run
1661
+
1662
+ except Exception as e:
1663
+ log_error(f"Error creating eval run: {e}")
1664
+ return None
1665
+
1666
+ async def delete_eval_run(self, eval_run_id: str) -> None:
1667
+ """Delete an eval run from the database.
1668
+
1669
+ Args:
1670
+ eval_run_id (str): The ID of the eval run to delete.
1671
+ """
1672
+ try:
1673
+ table = await self._get_table(table_type="evals")
1674
+
1675
+ async with self.async_session_factory() as sess, sess.begin():
1676
+ stmt = table.delete().where(table.c.run_id == eval_run_id)
1677
+ result = await sess.execute(stmt)
1678
+
1679
+ if result.rowcount == 0: # type: ignore
1680
+ log_warning(f"No eval run found with ID: {eval_run_id}")
1681
+ else:
1682
+ log_debug(f"Deleted eval run with ID: {eval_run_id}")
1683
+
1684
+ except Exception as e:
1685
+ log_error(f"Error deleting eval run {eval_run_id}: {e}")
1686
+
1687
+ async def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
1688
+ """Delete multiple eval runs from the database.
1689
+
1690
+ Args:
1691
+ eval_run_ids (List[str]): List of eval run IDs to delete.
1692
+ """
1693
+ try:
1694
+ table = await self._get_table(table_type="evals")
1695
+
1696
+ async with self.async_session_factory() as sess, sess.begin():
1697
+ stmt = table.delete().where(table.c.run_id.in_(eval_run_ids))
1698
+ result = await sess.execute(stmt)
1699
+
1700
+ if result.rowcount == 0: # type: ignore
1701
+ log_warning(f"No eval runs found with IDs: {eval_run_ids}")
1702
+ else:
1703
+ log_debug(f"Deleted {result.rowcount} eval runs") # type: ignore
1704
+
1705
+ except Exception as e:
1706
+ log_error(f"Error deleting eval runs {eval_run_ids}: {e}")
1707
+
1708
+ async def get_eval_run(
1709
+ self, eval_run_id: str, deserialize: Optional[bool] = True
1710
+ ) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
1711
+ """Get an eval run from the database.
1712
+
1713
+ Args:
1714
+ eval_run_id (str): The ID of the eval run to get.
1715
+ deserialize (Optional[bool]): Whether to serialize the eval run. Defaults to True.
1716
+
1717
+ Returns:
1718
+ Optional[Union[EvalRunRecord, Dict[str, Any]]]:
1719
+ - When deserialize=True: EvalRunRecord object
1720
+ - When deserialize=False: EvalRun dictionary
1721
+
1722
+ Raises:
1723
+ Exception: If an error occurs during retrieval.
1724
+ """
1725
+ try:
1726
+ table = await self._get_table(table_type="evals")
1727
+
1728
+ async with self.async_session_factory() as sess, sess.begin():
1729
+ stmt = select(table).where(table.c.run_id == eval_run_id)
1730
+ result = await sess.execute(stmt)
1731
+ row = result.fetchone()
1732
+ if row is None:
1733
+ return None
1734
+
1735
+ eval_run_raw = dict(row._mapping)
1736
+ if not deserialize:
1737
+ return eval_run_raw
1738
+
1739
+ return EvalRunRecord.model_validate(eval_run_raw)
1740
+
1741
+ except Exception as e:
1742
+ log_error(f"Exception getting eval run {eval_run_id}: {e}")
1743
+ return None
1744
+
1745
+ async def get_eval_runs(
1746
+ self,
1747
+ limit: Optional[int] = None,
1748
+ page: Optional[int] = None,
1749
+ sort_by: Optional[str] = None,
1750
+ sort_order: Optional[str] = None,
1751
+ agent_id: Optional[str] = None,
1752
+ team_id: Optional[str] = None,
1753
+ workflow_id: Optional[str] = None,
1754
+ model_id: Optional[str] = None,
1755
+ filter_type: Optional[EvalFilterType] = None,
1756
+ eval_type: Optional[List[EvalType]] = None,
1757
+ deserialize: Optional[bool] = True,
1758
+ ) -> Union[List[EvalRunRecord], Tuple[List[Dict[str, Any]], int]]:
1759
+ """Get all eval runs from the database.
1760
+
1761
+ Args:
1762
+ limit (Optional[int]): The maximum number of eval runs to return.
1763
+ page (Optional[int]): The page number.
1764
+ sort_by (Optional[str]): The column to sort by.
1765
+ sort_order (Optional[str]): The order to sort by.
1766
+ agent_id (Optional[str]): The ID of the agent to filter by.
1767
+ team_id (Optional[str]): The ID of the team to filter by.
1768
+ workflow_id (Optional[str]): The ID of the workflow to filter by.
1769
+ model_id (Optional[str]): The ID of the model to filter by.
1770
+ eval_type (Optional[List[EvalType]]): The type(s) of eval to filter by.
1771
+ filter_type (Optional[EvalFilterType]): Filter by component type (agent, team, workflow).
1772
+ deserialize (Optional[bool]): Whether to serialize the eval runs. Defaults to True.
1773
+
1774
+ Returns:
1775
+ Union[List[EvalRunRecord], Tuple[List[Dict[str, Any]], int]]:
1776
+ - When deserialize=True: List of EvalRunRecord objects
1777
+ - When deserialize=False: List of dictionaries
1778
+
1779
+ Raises:
1780
+ Exception: If an error occurs during retrieval.
1781
+ """
1782
+ try:
1783
+ table = await self._get_table(table_type="evals")
1784
+
1785
+ async with self.async_session_factory() as sess, sess.begin():
1786
+ stmt = select(table)
1787
+
1788
+ # Filtering
1789
+ if agent_id is not None:
1790
+ stmt = stmt.where(table.c.agent_id == agent_id)
1791
+ if team_id is not None:
1792
+ stmt = stmt.where(table.c.team_id == team_id)
1793
+ if workflow_id is not None:
1794
+ stmt = stmt.where(table.c.workflow_id == workflow_id)
1795
+ if model_id is not None:
1796
+ stmt = stmt.where(table.c.model_id == model_id)
1797
+ if eval_type is not None and len(eval_type) > 0:
1798
+ stmt = stmt.where(table.c.eval_type.in_(eval_type))
1799
+ if filter_type is not None:
1800
+ if filter_type == EvalFilterType.AGENT:
1801
+ stmt = stmt.where(table.c.agent_id.is_not(None))
1802
+ elif filter_type == EvalFilterType.TEAM:
1803
+ stmt = stmt.where(table.c.team_id.is_not(None))
1804
+ elif filter_type == EvalFilterType.WORKFLOW:
1805
+ stmt = stmt.where(table.c.workflow_id.is_not(None))
1806
+
1807
+ # Get total count after applying filtering
1808
+ count_stmt = select(func.count()).select_from(stmt.alias())
1809
+ total_count = await sess.scalar(count_stmt) or 0
1810
+
1811
+ # Sorting
1812
+ if sort_by is None:
1813
+ stmt = stmt.order_by(table.c.created_at.desc())
1814
+ else:
1815
+ stmt = apply_sorting(stmt, table, sort_by, sort_order)
1816
+
1817
+ # Paginating
1818
+ if limit is not None:
1819
+ stmt = stmt.limit(limit)
1820
+ if page is not None:
1821
+ stmt = stmt.offset((page - 1) * limit)
1822
+
1823
+ result = await sess.execute(stmt)
1824
+ records = result.fetchall()
1825
+ if not records:
1826
+ return [] if deserialize else ([], 0)
1827
+
1828
+ eval_runs_raw = [dict(row._mapping) for row in records]
1829
+ if not deserialize:
1830
+ return eval_runs_raw, total_count
1831
+
1832
+ return [EvalRunRecord.model_validate(row) for row in eval_runs_raw]
1833
+
1834
+ except Exception as e:
1835
+ log_error(f"Exception getting eval runs: {e}")
1836
+ return [] if deserialize else ([], 0)
1837
+
1838
+ async def rename_eval_run(
1839
+ self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
1840
+ ) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
1841
+ """Upsert the name of an eval run in the database, returning raw dictionary.
1842
+
1843
+ Args:
1844
+ eval_run_id (str): The ID of the eval run to update.
1845
+ name (str): The new name of the eval run.
1846
+
1847
+ Returns:
1848
+ Optional[Dict[str, Any]]: The updated eval run, or None if the operation fails.
1849
+
1850
+ Raises:
1851
+ Exception: If an error occurs during update.
1852
+ """
1853
+ try:
1854
+ table = await self._get_table(table_type="evals")
1855
+ async with self.async_session_factory() as sess, sess.begin():
1856
+ stmt = (
1857
+ table.update().where(table.c.run_id == eval_run_id).values(name=name, updated_at=int(time.time()))
1858
+ )
1859
+ await sess.execute(stmt)
1860
+
1861
+ eval_run_raw = await self.get_eval_run(eval_run_id=eval_run_id, deserialize=deserialize)
1862
+ if not eval_run_raw or not deserialize:
1863
+ return eval_run_raw
1864
+
1865
+ return EvalRunRecord.model_validate(eval_run_raw)
1866
+
1867
+ except Exception as e:
1868
+ log_error(f"Error upserting eval run name {eval_run_id}: {e}")
1869
+ return None
1870
+
1871
+ # -- Migrations --
1872
+
1873
+ async def migrate_table_from_v1_to_v2(self, v1_db_schema: str, v1_table_name: str, v1_table_type: str):
1874
+ """Migrate all content in the given table to the right v2 table"""
1875
+
1876
+ from agno.db.migrations.v1_to_v2 import (
1877
+ get_all_table_content,
1878
+ parse_agent_sessions,
1879
+ parse_memories,
1880
+ parse_team_sessions,
1881
+ parse_workflow_sessions,
1882
+ )
1883
+
1884
+ # Get all content from the old table
1885
+ old_content: list[dict[str, Any]] = get_all_table_content(
1886
+ db=self,
1887
+ db_schema=v1_db_schema,
1888
+ table_name=v1_table_name,
1889
+ )
1890
+ if not old_content:
1891
+ log_info(f"No content to migrate from table {v1_table_name}")
1892
+ return
1893
+
1894
+ # Parse the content into the new format
1895
+ memories: List[UserMemory] = []
1896
+ sessions: Sequence[Union[AgentSession, TeamSession, WorkflowSession]] = []
1897
+ if v1_table_type == "agent_sessions":
1898
+ sessions = parse_agent_sessions(old_content)
1899
+ elif v1_table_type == "team_sessions":
1900
+ sessions = parse_team_sessions(old_content)
1901
+ elif v1_table_type == "workflow_sessions":
1902
+ sessions = parse_workflow_sessions(old_content)
1903
+ elif v1_table_type == "memories":
1904
+ memories = parse_memories(old_content)
1905
+ else:
1906
+ raise ValueError(f"Invalid table type: {v1_table_type}")
1907
+
1908
+ # Insert the new content into the new table
1909
+ if v1_table_type == "agent_sessions":
1910
+ for session in sessions:
1911
+ await self.upsert_session(session)
1912
+ log_info(f"Migrated {len(sessions)} Agent sessions to table: {self.session_table}")
1913
+
1914
+ elif v1_table_type == "team_sessions":
1915
+ for session in sessions:
1916
+ await self.upsert_session(session)
1917
+ log_info(f"Migrated {len(sessions)} Team sessions to table: {self.session_table}")
1918
+
1919
+ elif v1_table_type == "workflow_sessions":
1920
+ for session in sessions:
1921
+ await self.upsert_session(session)
1922
+ log_info(f"Migrated {len(sessions)} Workflow sessions to table: {self.session_table}")
1923
+
1924
+ elif v1_table_type == "memories":
1925
+ for memory in memories:
1926
+ await self.upsert_user_memory(memory)
1927
+ log_info(f"Migrated {len(memories)} memories to table: {self.memory_table}")