agno 2.2.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (575) hide show
  1. agno/__init__.py +8 -0
  2. agno/agent/__init__.py +51 -0
  3. agno/agent/agent.py +10405 -0
  4. agno/api/__init__.py +0 -0
  5. agno/api/agent.py +28 -0
  6. agno/api/api.py +40 -0
  7. agno/api/evals.py +22 -0
  8. agno/api/os.py +17 -0
  9. agno/api/routes.py +13 -0
  10. agno/api/schemas/__init__.py +9 -0
  11. agno/api/schemas/agent.py +16 -0
  12. agno/api/schemas/evals.py +16 -0
  13. agno/api/schemas/os.py +14 -0
  14. agno/api/schemas/response.py +6 -0
  15. agno/api/schemas/team.py +16 -0
  16. agno/api/schemas/utils.py +21 -0
  17. agno/api/schemas/workflows.py +16 -0
  18. agno/api/settings.py +53 -0
  19. agno/api/team.py +30 -0
  20. agno/api/workflow.py +28 -0
  21. agno/cloud/aws/base.py +214 -0
  22. agno/cloud/aws/s3/__init__.py +2 -0
  23. agno/cloud/aws/s3/api_client.py +43 -0
  24. agno/cloud/aws/s3/bucket.py +195 -0
  25. agno/cloud/aws/s3/object.py +57 -0
  26. agno/culture/__init__.py +3 -0
  27. agno/culture/manager.py +956 -0
  28. agno/db/__init__.py +24 -0
  29. agno/db/async_postgres/__init__.py +3 -0
  30. agno/db/base.py +598 -0
  31. agno/db/dynamo/__init__.py +3 -0
  32. agno/db/dynamo/dynamo.py +2042 -0
  33. agno/db/dynamo/schemas.py +314 -0
  34. agno/db/dynamo/utils.py +743 -0
  35. agno/db/firestore/__init__.py +3 -0
  36. agno/db/firestore/firestore.py +1795 -0
  37. agno/db/firestore/schemas.py +140 -0
  38. agno/db/firestore/utils.py +376 -0
  39. agno/db/gcs_json/__init__.py +3 -0
  40. agno/db/gcs_json/gcs_json_db.py +1335 -0
  41. agno/db/gcs_json/utils.py +228 -0
  42. agno/db/in_memory/__init__.py +3 -0
  43. agno/db/in_memory/in_memory_db.py +1160 -0
  44. agno/db/in_memory/utils.py +230 -0
  45. agno/db/json/__init__.py +3 -0
  46. agno/db/json/json_db.py +1328 -0
  47. agno/db/json/utils.py +230 -0
  48. agno/db/migrations/__init__.py +0 -0
  49. agno/db/migrations/v1_to_v2.py +635 -0
  50. agno/db/mongo/__init__.py +17 -0
  51. agno/db/mongo/async_mongo.py +2026 -0
  52. agno/db/mongo/mongo.py +1982 -0
  53. agno/db/mongo/schemas.py +87 -0
  54. agno/db/mongo/utils.py +259 -0
  55. agno/db/mysql/__init__.py +3 -0
  56. agno/db/mysql/mysql.py +2308 -0
  57. agno/db/mysql/schemas.py +138 -0
  58. agno/db/mysql/utils.py +355 -0
  59. agno/db/postgres/__init__.py +4 -0
  60. agno/db/postgres/async_postgres.py +1927 -0
  61. agno/db/postgres/postgres.py +2260 -0
  62. agno/db/postgres/schemas.py +139 -0
  63. agno/db/postgres/utils.py +442 -0
  64. agno/db/redis/__init__.py +3 -0
  65. agno/db/redis/redis.py +1660 -0
  66. agno/db/redis/schemas.py +123 -0
  67. agno/db/redis/utils.py +346 -0
  68. agno/db/schemas/__init__.py +4 -0
  69. agno/db/schemas/culture.py +120 -0
  70. agno/db/schemas/evals.py +33 -0
  71. agno/db/schemas/knowledge.py +40 -0
  72. agno/db/schemas/memory.py +46 -0
  73. agno/db/schemas/metrics.py +0 -0
  74. agno/db/singlestore/__init__.py +3 -0
  75. agno/db/singlestore/schemas.py +130 -0
  76. agno/db/singlestore/singlestore.py +2272 -0
  77. agno/db/singlestore/utils.py +384 -0
  78. agno/db/sqlite/__init__.py +4 -0
  79. agno/db/sqlite/async_sqlite.py +2293 -0
  80. agno/db/sqlite/schemas.py +133 -0
  81. agno/db/sqlite/sqlite.py +2288 -0
  82. agno/db/sqlite/utils.py +431 -0
  83. agno/db/surrealdb/__init__.py +3 -0
  84. agno/db/surrealdb/metrics.py +292 -0
  85. agno/db/surrealdb/models.py +309 -0
  86. agno/db/surrealdb/queries.py +71 -0
  87. agno/db/surrealdb/surrealdb.py +1353 -0
  88. agno/db/surrealdb/utils.py +147 -0
  89. agno/db/utils.py +116 -0
  90. agno/debug.py +18 -0
  91. agno/eval/__init__.py +14 -0
  92. agno/eval/accuracy.py +834 -0
  93. agno/eval/performance.py +773 -0
  94. agno/eval/reliability.py +306 -0
  95. agno/eval/utils.py +119 -0
  96. agno/exceptions.py +161 -0
  97. agno/filters.py +354 -0
  98. agno/guardrails/__init__.py +6 -0
  99. agno/guardrails/base.py +19 -0
  100. agno/guardrails/openai.py +144 -0
  101. agno/guardrails/pii.py +94 -0
  102. agno/guardrails/prompt_injection.py +52 -0
  103. agno/integrations/__init__.py +0 -0
  104. agno/integrations/discord/__init__.py +3 -0
  105. agno/integrations/discord/client.py +203 -0
  106. agno/knowledge/__init__.py +5 -0
  107. agno/knowledge/chunking/__init__.py +0 -0
  108. agno/knowledge/chunking/agentic.py +79 -0
  109. agno/knowledge/chunking/document.py +91 -0
  110. agno/knowledge/chunking/fixed.py +57 -0
  111. agno/knowledge/chunking/markdown.py +151 -0
  112. agno/knowledge/chunking/recursive.py +63 -0
  113. agno/knowledge/chunking/row.py +39 -0
  114. agno/knowledge/chunking/semantic.py +86 -0
  115. agno/knowledge/chunking/strategy.py +165 -0
  116. agno/knowledge/content.py +74 -0
  117. agno/knowledge/document/__init__.py +5 -0
  118. agno/knowledge/document/base.py +58 -0
  119. agno/knowledge/embedder/__init__.py +5 -0
  120. agno/knowledge/embedder/aws_bedrock.py +343 -0
  121. agno/knowledge/embedder/azure_openai.py +210 -0
  122. agno/knowledge/embedder/base.py +23 -0
  123. agno/knowledge/embedder/cohere.py +323 -0
  124. agno/knowledge/embedder/fastembed.py +62 -0
  125. agno/knowledge/embedder/fireworks.py +13 -0
  126. agno/knowledge/embedder/google.py +258 -0
  127. agno/knowledge/embedder/huggingface.py +94 -0
  128. agno/knowledge/embedder/jina.py +182 -0
  129. agno/knowledge/embedder/langdb.py +22 -0
  130. agno/knowledge/embedder/mistral.py +206 -0
  131. agno/knowledge/embedder/nebius.py +13 -0
  132. agno/knowledge/embedder/ollama.py +154 -0
  133. agno/knowledge/embedder/openai.py +195 -0
  134. agno/knowledge/embedder/sentence_transformer.py +63 -0
  135. agno/knowledge/embedder/together.py +13 -0
  136. agno/knowledge/embedder/vllm.py +262 -0
  137. agno/knowledge/embedder/voyageai.py +165 -0
  138. agno/knowledge/knowledge.py +1988 -0
  139. agno/knowledge/reader/__init__.py +7 -0
  140. agno/knowledge/reader/arxiv_reader.py +81 -0
  141. agno/knowledge/reader/base.py +95 -0
  142. agno/knowledge/reader/csv_reader.py +166 -0
  143. agno/knowledge/reader/docx_reader.py +82 -0
  144. agno/knowledge/reader/field_labeled_csv_reader.py +292 -0
  145. agno/knowledge/reader/firecrawl_reader.py +201 -0
  146. agno/knowledge/reader/json_reader.py +87 -0
  147. agno/knowledge/reader/markdown_reader.py +137 -0
  148. agno/knowledge/reader/pdf_reader.py +431 -0
  149. agno/knowledge/reader/pptx_reader.py +101 -0
  150. agno/knowledge/reader/reader_factory.py +313 -0
  151. agno/knowledge/reader/s3_reader.py +89 -0
  152. agno/knowledge/reader/tavily_reader.py +194 -0
  153. agno/knowledge/reader/text_reader.py +115 -0
  154. agno/knowledge/reader/web_search_reader.py +372 -0
  155. agno/knowledge/reader/website_reader.py +455 -0
  156. agno/knowledge/reader/wikipedia_reader.py +59 -0
  157. agno/knowledge/reader/youtube_reader.py +78 -0
  158. agno/knowledge/remote_content/__init__.py +0 -0
  159. agno/knowledge/remote_content/remote_content.py +88 -0
  160. agno/knowledge/reranker/__init__.py +3 -0
  161. agno/knowledge/reranker/base.py +14 -0
  162. agno/knowledge/reranker/cohere.py +64 -0
  163. agno/knowledge/reranker/infinity.py +195 -0
  164. agno/knowledge/reranker/sentence_transformer.py +54 -0
  165. agno/knowledge/types.py +39 -0
  166. agno/knowledge/utils.py +189 -0
  167. agno/media.py +462 -0
  168. agno/memory/__init__.py +3 -0
  169. agno/memory/manager.py +1327 -0
  170. agno/models/__init__.py +0 -0
  171. agno/models/aimlapi/__init__.py +5 -0
  172. agno/models/aimlapi/aimlapi.py +45 -0
  173. agno/models/anthropic/__init__.py +5 -0
  174. agno/models/anthropic/claude.py +757 -0
  175. agno/models/aws/__init__.py +15 -0
  176. agno/models/aws/bedrock.py +701 -0
  177. agno/models/aws/claude.py +378 -0
  178. agno/models/azure/__init__.py +18 -0
  179. agno/models/azure/ai_foundry.py +485 -0
  180. agno/models/azure/openai_chat.py +131 -0
  181. agno/models/base.py +2175 -0
  182. agno/models/cerebras/__init__.py +12 -0
  183. agno/models/cerebras/cerebras.py +501 -0
  184. agno/models/cerebras/cerebras_openai.py +112 -0
  185. agno/models/cohere/__init__.py +5 -0
  186. agno/models/cohere/chat.py +389 -0
  187. agno/models/cometapi/__init__.py +5 -0
  188. agno/models/cometapi/cometapi.py +57 -0
  189. agno/models/dashscope/__init__.py +5 -0
  190. agno/models/dashscope/dashscope.py +91 -0
  191. agno/models/deepinfra/__init__.py +5 -0
  192. agno/models/deepinfra/deepinfra.py +28 -0
  193. agno/models/deepseek/__init__.py +5 -0
  194. agno/models/deepseek/deepseek.py +61 -0
  195. agno/models/defaults.py +1 -0
  196. agno/models/fireworks/__init__.py +5 -0
  197. agno/models/fireworks/fireworks.py +26 -0
  198. agno/models/google/__init__.py +5 -0
  199. agno/models/google/gemini.py +1085 -0
  200. agno/models/groq/__init__.py +5 -0
  201. agno/models/groq/groq.py +556 -0
  202. agno/models/huggingface/__init__.py +5 -0
  203. agno/models/huggingface/huggingface.py +491 -0
  204. agno/models/ibm/__init__.py +5 -0
  205. agno/models/ibm/watsonx.py +422 -0
  206. agno/models/internlm/__init__.py +3 -0
  207. agno/models/internlm/internlm.py +26 -0
  208. agno/models/langdb/__init__.py +1 -0
  209. agno/models/langdb/langdb.py +48 -0
  210. agno/models/litellm/__init__.py +14 -0
  211. agno/models/litellm/chat.py +468 -0
  212. agno/models/litellm/litellm_openai.py +25 -0
  213. agno/models/llama_cpp/__init__.py +5 -0
  214. agno/models/llama_cpp/llama_cpp.py +22 -0
  215. agno/models/lmstudio/__init__.py +5 -0
  216. agno/models/lmstudio/lmstudio.py +25 -0
  217. agno/models/message.py +434 -0
  218. agno/models/meta/__init__.py +12 -0
  219. agno/models/meta/llama.py +475 -0
  220. agno/models/meta/llama_openai.py +78 -0
  221. agno/models/metrics.py +120 -0
  222. agno/models/mistral/__init__.py +5 -0
  223. agno/models/mistral/mistral.py +432 -0
  224. agno/models/nebius/__init__.py +3 -0
  225. agno/models/nebius/nebius.py +54 -0
  226. agno/models/nexus/__init__.py +3 -0
  227. agno/models/nexus/nexus.py +22 -0
  228. agno/models/nvidia/__init__.py +5 -0
  229. agno/models/nvidia/nvidia.py +28 -0
  230. agno/models/ollama/__init__.py +5 -0
  231. agno/models/ollama/chat.py +441 -0
  232. agno/models/openai/__init__.py +9 -0
  233. agno/models/openai/chat.py +883 -0
  234. agno/models/openai/like.py +27 -0
  235. agno/models/openai/responses.py +1050 -0
  236. agno/models/openrouter/__init__.py +5 -0
  237. agno/models/openrouter/openrouter.py +66 -0
  238. agno/models/perplexity/__init__.py +5 -0
  239. agno/models/perplexity/perplexity.py +187 -0
  240. agno/models/portkey/__init__.py +3 -0
  241. agno/models/portkey/portkey.py +81 -0
  242. agno/models/requesty/__init__.py +5 -0
  243. agno/models/requesty/requesty.py +52 -0
  244. agno/models/response.py +199 -0
  245. agno/models/sambanova/__init__.py +5 -0
  246. agno/models/sambanova/sambanova.py +28 -0
  247. agno/models/siliconflow/__init__.py +5 -0
  248. agno/models/siliconflow/siliconflow.py +25 -0
  249. agno/models/together/__init__.py +5 -0
  250. agno/models/together/together.py +25 -0
  251. agno/models/utils.py +266 -0
  252. agno/models/vercel/__init__.py +3 -0
  253. agno/models/vercel/v0.py +26 -0
  254. agno/models/vertexai/__init__.py +0 -0
  255. agno/models/vertexai/claude.py +70 -0
  256. agno/models/vllm/__init__.py +3 -0
  257. agno/models/vllm/vllm.py +78 -0
  258. agno/models/xai/__init__.py +3 -0
  259. agno/models/xai/xai.py +113 -0
  260. agno/os/__init__.py +3 -0
  261. agno/os/app.py +876 -0
  262. agno/os/auth.py +57 -0
  263. agno/os/config.py +104 -0
  264. agno/os/interfaces/__init__.py +1 -0
  265. agno/os/interfaces/a2a/__init__.py +3 -0
  266. agno/os/interfaces/a2a/a2a.py +42 -0
  267. agno/os/interfaces/a2a/router.py +250 -0
  268. agno/os/interfaces/a2a/utils.py +924 -0
  269. agno/os/interfaces/agui/__init__.py +3 -0
  270. agno/os/interfaces/agui/agui.py +47 -0
  271. agno/os/interfaces/agui/router.py +144 -0
  272. agno/os/interfaces/agui/utils.py +534 -0
  273. agno/os/interfaces/base.py +25 -0
  274. agno/os/interfaces/slack/__init__.py +3 -0
  275. agno/os/interfaces/slack/router.py +148 -0
  276. agno/os/interfaces/slack/security.py +30 -0
  277. agno/os/interfaces/slack/slack.py +47 -0
  278. agno/os/interfaces/whatsapp/__init__.py +3 -0
  279. agno/os/interfaces/whatsapp/router.py +211 -0
  280. agno/os/interfaces/whatsapp/security.py +53 -0
  281. agno/os/interfaces/whatsapp/whatsapp.py +36 -0
  282. agno/os/mcp.py +292 -0
  283. agno/os/middleware/__init__.py +7 -0
  284. agno/os/middleware/jwt.py +233 -0
  285. agno/os/router.py +1763 -0
  286. agno/os/routers/__init__.py +3 -0
  287. agno/os/routers/evals/__init__.py +3 -0
  288. agno/os/routers/evals/evals.py +430 -0
  289. agno/os/routers/evals/schemas.py +142 -0
  290. agno/os/routers/evals/utils.py +162 -0
  291. agno/os/routers/health.py +31 -0
  292. agno/os/routers/home.py +52 -0
  293. agno/os/routers/knowledge/__init__.py +3 -0
  294. agno/os/routers/knowledge/knowledge.py +997 -0
  295. agno/os/routers/knowledge/schemas.py +178 -0
  296. agno/os/routers/memory/__init__.py +3 -0
  297. agno/os/routers/memory/memory.py +515 -0
  298. agno/os/routers/memory/schemas.py +62 -0
  299. agno/os/routers/metrics/__init__.py +3 -0
  300. agno/os/routers/metrics/metrics.py +190 -0
  301. agno/os/routers/metrics/schemas.py +47 -0
  302. agno/os/routers/session/__init__.py +3 -0
  303. agno/os/routers/session/session.py +997 -0
  304. agno/os/schema.py +1055 -0
  305. agno/os/settings.py +43 -0
  306. agno/os/utils.py +630 -0
  307. agno/py.typed +0 -0
  308. agno/reasoning/__init__.py +0 -0
  309. agno/reasoning/anthropic.py +80 -0
  310. agno/reasoning/azure_ai_foundry.py +67 -0
  311. agno/reasoning/deepseek.py +63 -0
  312. agno/reasoning/default.py +97 -0
  313. agno/reasoning/gemini.py +73 -0
  314. agno/reasoning/groq.py +71 -0
  315. agno/reasoning/helpers.py +63 -0
  316. agno/reasoning/ollama.py +67 -0
  317. agno/reasoning/openai.py +86 -0
  318. agno/reasoning/step.py +31 -0
  319. agno/reasoning/vertexai.py +76 -0
  320. agno/run/__init__.py +6 -0
  321. agno/run/agent.py +787 -0
  322. agno/run/base.py +229 -0
  323. agno/run/cancel.py +81 -0
  324. agno/run/messages.py +32 -0
  325. agno/run/team.py +753 -0
  326. agno/run/workflow.py +708 -0
  327. agno/session/__init__.py +10 -0
  328. agno/session/agent.py +295 -0
  329. agno/session/summary.py +265 -0
  330. agno/session/team.py +392 -0
  331. agno/session/workflow.py +205 -0
  332. agno/team/__init__.py +37 -0
  333. agno/team/team.py +8793 -0
  334. agno/tools/__init__.py +10 -0
  335. agno/tools/agentql.py +120 -0
  336. agno/tools/airflow.py +69 -0
  337. agno/tools/api.py +122 -0
  338. agno/tools/apify.py +314 -0
  339. agno/tools/arxiv.py +127 -0
  340. agno/tools/aws_lambda.py +53 -0
  341. agno/tools/aws_ses.py +66 -0
  342. agno/tools/baidusearch.py +89 -0
  343. agno/tools/bitbucket.py +292 -0
  344. agno/tools/brandfetch.py +213 -0
  345. agno/tools/bravesearch.py +106 -0
  346. agno/tools/brightdata.py +367 -0
  347. agno/tools/browserbase.py +209 -0
  348. agno/tools/calcom.py +255 -0
  349. agno/tools/calculator.py +151 -0
  350. agno/tools/cartesia.py +187 -0
  351. agno/tools/clickup.py +244 -0
  352. agno/tools/confluence.py +240 -0
  353. agno/tools/crawl4ai.py +158 -0
  354. agno/tools/csv_toolkit.py +185 -0
  355. agno/tools/dalle.py +110 -0
  356. agno/tools/daytona.py +475 -0
  357. agno/tools/decorator.py +262 -0
  358. agno/tools/desi_vocal.py +108 -0
  359. agno/tools/discord.py +161 -0
  360. agno/tools/docker.py +716 -0
  361. agno/tools/duckdb.py +379 -0
  362. agno/tools/duckduckgo.py +91 -0
  363. agno/tools/e2b.py +703 -0
  364. agno/tools/eleven_labs.py +196 -0
  365. agno/tools/email.py +67 -0
  366. agno/tools/evm.py +129 -0
  367. agno/tools/exa.py +396 -0
  368. agno/tools/fal.py +127 -0
  369. agno/tools/file.py +240 -0
  370. agno/tools/file_generation.py +350 -0
  371. agno/tools/financial_datasets.py +288 -0
  372. agno/tools/firecrawl.py +143 -0
  373. agno/tools/function.py +1187 -0
  374. agno/tools/giphy.py +93 -0
  375. agno/tools/github.py +1760 -0
  376. agno/tools/gmail.py +922 -0
  377. agno/tools/google_bigquery.py +117 -0
  378. agno/tools/google_drive.py +270 -0
  379. agno/tools/google_maps.py +253 -0
  380. agno/tools/googlecalendar.py +674 -0
  381. agno/tools/googlesearch.py +98 -0
  382. agno/tools/googlesheets.py +377 -0
  383. agno/tools/hackernews.py +77 -0
  384. agno/tools/jina.py +101 -0
  385. agno/tools/jira.py +170 -0
  386. agno/tools/knowledge.py +218 -0
  387. agno/tools/linear.py +426 -0
  388. agno/tools/linkup.py +58 -0
  389. agno/tools/local_file_system.py +90 -0
  390. agno/tools/lumalab.py +183 -0
  391. agno/tools/mcp/__init__.py +10 -0
  392. agno/tools/mcp/mcp.py +331 -0
  393. agno/tools/mcp/multi_mcp.py +347 -0
  394. agno/tools/mcp/params.py +24 -0
  395. agno/tools/mcp_toolbox.py +284 -0
  396. agno/tools/mem0.py +193 -0
  397. agno/tools/memori.py +339 -0
  398. agno/tools/memory.py +419 -0
  399. agno/tools/mlx_transcribe.py +139 -0
  400. agno/tools/models/__init__.py +0 -0
  401. agno/tools/models/azure_openai.py +190 -0
  402. agno/tools/models/gemini.py +203 -0
  403. agno/tools/models/groq.py +158 -0
  404. agno/tools/models/morph.py +186 -0
  405. agno/tools/models/nebius.py +124 -0
  406. agno/tools/models_labs.py +195 -0
  407. agno/tools/moviepy_video.py +349 -0
  408. agno/tools/neo4j.py +134 -0
  409. agno/tools/newspaper.py +46 -0
  410. agno/tools/newspaper4k.py +93 -0
  411. agno/tools/notion.py +204 -0
  412. agno/tools/openai.py +202 -0
  413. agno/tools/openbb.py +160 -0
  414. agno/tools/opencv.py +321 -0
  415. agno/tools/openweather.py +233 -0
  416. agno/tools/oxylabs.py +385 -0
  417. agno/tools/pandas.py +102 -0
  418. agno/tools/parallel.py +314 -0
  419. agno/tools/postgres.py +257 -0
  420. agno/tools/pubmed.py +188 -0
  421. agno/tools/python.py +205 -0
  422. agno/tools/reasoning.py +283 -0
  423. agno/tools/reddit.py +467 -0
  424. agno/tools/replicate.py +117 -0
  425. agno/tools/resend.py +62 -0
  426. agno/tools/scrapegraph.py +222 -0
  427. agno/tools/searxng.py +152 -0
  428. agno/tools/serpapi.py +116 -0
  429. agno/tools/serper.py +255 -0
  430. agno/tools/shell.py +53 -0
  431. agno/tools/slack.py +136 -0
  432. agno/tools/sleep.py +20 -0
  433. agno/tools/spider.py +116 -0
  434. agno/tools/sql.py +154 -0
  435. agno/tools/streamlit/__init__.py +0 -0
  436. agno/tools/streamlit/components.py +113 -0
  437. agno/tools/tavily.py +254 -0
  438. agno/tools/telegram.py +48 -0
  439. agno/tools/todoist.py +218 -0
  440. agno/tools/tool_registry.py +1 -0
  441. agno/tools/toolkit.py +146 -0
  442. agno/tools/trafilatura.py +388 -0
  443. agno/tools/trello.py +274 -0
  444. agno/tools/twilio.py +186 -0
  445. agno/tools/user_control_flow.py +78 -0
  446. agno/tools/valyu.py +228 -0
  447. agno/tools/visualization.py +467 -0
  448. agno/tools/webbrowser.py +28 -0
  449. agno/tools/webex.py +76 -0
  450. agno/tools/website.py +54 -0
  451. agno/tools/webtools.py +45 -0
  452. agno/tools/whatsapp.py +286 -0
  453. agno/tools/wikipedia.py +63 -0
  454. agno/tools/workflow.py +278 -0
  455. agno/tools/x.py +335 -0
  456. agno/tools/yfinance.py +257 -0
  457. agno/tools/youtube.py +184 -0
  458. agno/tools/zendesk.py +82 -0
  459. agno/tools/zep.py +454 -0
  460. agno/tools/zoom.py +382 -0
  461. agno/utils/__init__.py +0 -0
  462. agno/utils/agent.py +820 -0
  463. agno/utils/audio.py +49 -0
  464. agno/utils/certs.py +27 -0
  465. agno/utils/code_execution.py +11 -0
  466. agno/utils/common.py +132 -0
  467. agno/utils/dttm.py +13 -0
  468. agno/utils/enum.py +22 -0
  469. agno/utils/env.py +11 -0
  470. agno/utils/events.py +696 -0
  471. agno/utils/format_str.py +16 -0
  472. agno/utils/functions.py +166 -0
  473. agno/utils/gemini.py +426 -0
  474. agno/utils/hooks.py +57 -0
  475. agno/utils/http.py +74 -0
  476. agno/utils/json_schema.py +234 -0
  477. agno/utils/knowledge.py +36 -0
  478. agno/utils/location.py +19 -0
  479. agno/utils/log.py +255 -0
  480. agno/utils/mcp.py +214 -0
  481. agno/utils/media.py +352 -0
  482. agno/utils/merge_dict.py +41 -0
  483. agno/utils/message.py +118 -0
  484. agno/utils/models/__init__.py +0 -0
  485. agno/utils/models/ai_foundry.py +43 -0
  486. agno/utils/models/claude.py +358 -0
  487. agno/utils/models/cohere.py +87 -0
  488. agno/utils/models/llama.py +78 -0
  489. agno/utils/models/mistral.py +98 -0
  490. agno/utils/models/openai_responses.py +140 -0
  491. agno/utils/models/schema_utils.py +153 -0
  492. agno/utils/models/watsonx.py +41 -0
  493. agno/utils/openai.py +257 -0
  494. agno/utils/pickle.py +32 -0
  495. agno/utils/pprint.py +178 -0
  496. agno/utils/print_response/__init__.py +0 -0
  497. agno/utils/print_response/agent.py +842 -0
  498. agno/utils/print_response/team.py +1724 -0
  499. agno/utils/print_response/workflow.py +1668 -0
  500. agno/utils/prompts.py +111 -0
  501. agno/utils/reasoning.py +108 -0
  502. agno/utils/response.py +163 -0
  503. agno/utils/response_iterator.py +17 -0
  504. agno/utils/safe_formatter.py +24 -0
  505. agno/utils/serialize.py +32 -0
  506. agno/utils/shell.py +22 -0
  507. agno/utils/streamlit.py +487 -0
  508. agno/utils/string.py +231 -0
  509. agno/utils/team.py +139 -0
  510. agno/utils/timer.py +41 -0
  511. agno/utils/tools.py +102 -0
  512. agno/utils/web.py +23 -0
  513. agno/utils/whatsapp.py +305 -0
  514. agno/utils/yaml_io.py +25 -0
  515. agno/vectordb/__init__.py +3 -0
  516. agno/vectordb/base.py +127 -0
  517. agno/vectordb/cassandra/__init__.py +5 -0
  518. agno/vectordb/cassandra/cassandra.py +501 -0
  519. agno/vectordb/cassandra/extra_param_mixin.py +11 -0
  520. agno/vectordb/cassandra/index.py +13 -0
  521. agno/vectordb/chroma/__init__.py +5 -0
  522. agno/vectordb/chroma/chromadb.py +929 -0
  523. agno/vectordb/clickhouse/__init__.py +9 -0
  524. agno/vectordb/clickhouse/clickhousedb.py +835 -0
  525. agno/vectordb/clickhouse/index.py +9 -0
  526. agno/vectordb/couchbase/__init__.py +3 -0
  527. agno/vectordb/couchbase/couchbase.py +1442 -0
  528. agno/vectordb/distance.py +7 -0
  529. agno/vectordb/lancedb/__init__.py +6 -0
  530. agno/vectordb/lancedb/lance_db.py +995 -0
  531. agno/vectordb/langchaindb/__init__.py +5 -0
  532. agno/vectordb/langchaindb/langchaindb.py +163 -0
  533. agno/vectordb/lightrag/__init__.py +5 -0
  534. agno/vectordb/lightrag/lightrag.py +388 -0
  535. agno/vectordb/llamaindex/__init__.py +3 -0
  536. agno/vectordb/llamaindex/llamaindexdb.py +166 -0
  537. agno/vectordb/milvus/__init__.py +4 -0
  538. agno/vectordb/milvus/milvus.py +1182 -0
  539. agno/vectordb/mongodb/__init__.py +9 -0
  540. agno/vectordb/mongodb/mongodb.py +1417 -0
  541. agno/vectordb/pgvector/__init__.py +12 -0
  542. agno/vectordb/pgvector/index.py +23 -0
  543. agno/vectordb/pgvector/pgvector.py +1462 -0
  544. agno/vectordb/pineconedb/__init__.py +5 -0
  545. agno/vectordb/pineconedb/pineconedb.py +747 -0
  546. agno/vectordb/qdrant/__init__.py +5 -0
  547. agno/vectordb/qdrant/qdrant.py +1134 -0
  548. agno/vectordb/redis/__init__.py +9 -0
  549. agno/vectordb/redis/redisdb.py +694 -0
  550. agno/vectordb/search.py +7 -0
  551. agno/vectordb/singlestore/__init__.py +10 -0
  552. agno/vectordb/singlestore/index.py +41 -0
  553. agno/vectordb/singlestore/singlestore.py +763 -0
  554. agno/vectordb/surrealdb/__init__.py +3 -0
  555. agno/vectordb/surrealdb/surrealdb.py +699 -0
  556. agno/vectordb/upstashdb/__init__.py +5 -0
  557. agno/vectordb/upstashdb/upstashdb.py +718 -0
  558. agno/vectordb/weaviate/__init__.py +8 -0
  559. agno/vectordb/weaviate/index.py +15 -0
  560. agno/vectordb/weaviate/weaviate.py +1005 -0
  561. agno/workflow/__init__.py +23 -0
  562. agno/workflow/agent.py +299 -0
  563. agno/workflow/condition.py +738 -0
  564. agno/workflow/loop.py +735 -0
  565. agno/workflow/parallel.py +824 -0
  566. agno/workflow/router.py +702 -0
  567. agno/workflow/step.py +1432 -0
  568. agno/workflow/steps.py +592 -0
  569. agno/workflow/types.py +520 -0
  570. agno/workflow/workflow.py +4321 -0
  571. agno-2.2.13.dist-info/METADATA +614 -0
  572. agno-2.2.13.dist-info/RECORD +575 -0
  573. agno-2.2.13.dist-info/WHEEL +5 -0
  574. agno-2.2.13.dist-info/licenses/LICENSE +201 -0
  575. agno-2.2.13.dist-info/top_level.txt +1 -0
@@ -0,0 +1,2288 @@
1
+ import time
2
+ from datetime import date, datetime, timedelta, timezone
3
+ from pathlib import Path
4
+ from typing import Any, Dict, List, Optional, Sequence, Tuple, Union, cast
5
+ from uuid import uuid4
6
+
7
+ from agno.db.base import BaseDb, SessionType
8
+ from agno.db.schemas.culture import CulturalKnowledge
9
+ from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
10
+ from agno.db.schemas.knowledge import KnowledgeRow
11
+ from agno.db.schemas.memory import UserMemory
12
+ from agno.db.sqlite.schemas import get_table_schema_definition
13
+ from agno.db.sqlite.utils import (
14
+ apply_sorting,
15
+ bulk_upsert_metrics,
16
+ calculate_date_metrics,
17
+ deserialize_cultural_knowledge_from_db,
18
+ fetch_all_sessions_data,
19
+ get_dates_to_calculate_metrics_for,
20
+ is_table_available,
21
+ is_valid_table,
22
+ serialize_cultural_knowledge_for_db,
23
+ )
24
+ from agno.db.utils import deserialize_session_json_fields, serialize_session_json_fields
25
+ from agno.session import AgentSession, Session, TeamSession, WorkflowSession
26
+ from agno.utils.log import log_debug, log_error, log_info, log_warning
27
+ from agno.utils.string import generate_id
28
+
29
+ try:
30
+ from sqlalchemy import Column, MetaData, Table, and_, func, select, text
31
+ from sqlalchemy.dialects import sqlite
32
+ from sqlalchemy.engine import Engine, create_engine
33
+ from sqlalchemy.orm import scoped_session, sessionmaker
34
+ from sqlalchemy.schema import Index, UniqueConstraint
35
+ except ImportError:
36
+ raise ImportError("`sqlalchemy` not installed. Please install it using `pip install sqlalchemy`")
37
+
38
+
39
+ class SqliteDb(BaseDb):
40
+ def __init__(
41
+ self,
42
+ db_file: Optional[str] = None,
43
+ db_engine: Optional[Engine] = None,
44
+ db_url: Optional[str] = None,
45
+ session_table: Optional[str] = None,
46
+ culture_table: Optional[str] = None,
47
+ memory_table: Optional[str] = None,
48
+ metrics_table: Optional[str] = None,
49
+ eval_table: Optional[str] = None,
50
+ knowledge_table: Optional[str] = None,
51
+ id: Optional[str] = None,
52
+ ):
53
+ """
54
+ Interface for interacting with a SQLite database.
55
+
56
+ The following order is used to determine the database connection:
57
+ 1. Use the db_engine
58
+ 2. Use the db_url
59
+ 3. Use the db_file
60
+ 4. Create a new database in the current directory
61
+
62
+ Args:
63
+ db_file (Optional[str]): The database file to connect to.
64
+ db_engine (Optional[Engine]): The SQLAlchemy database engine to use.
65
+ db_url (Optional[str]): The database URL to connect to.
66
+ session_table (Optional[str]): Name of the table to store Agent, Team and Workflow sessions.
67
+ culture_table (Optional[str]): Name of the table to store cultural notions.
68
+ memory_table (Optional[str]): Name of the table to store user memories.
69
+ metrics_table (Optional[str]): Name of the table to store metrics.
70
+ eval_table (Optional[str]): Name of the table to store evaluation runs data.
71
+ knowledge_table (Optional[str]): Name of the table to store knowledge documents data.
72
+ id (Optional[str]): ID of the database.
73
+
74
+ Raises:
75
+ ValueError: If none of the tables are provided.
76
+ """
77
+ if id is None:
78
+ seed = db_url or db_file or str(db_engine.url) if db_engine else "sqlite:///agno.db"
79
+ id = generate_id(seed)
80
+
81
+ super().__init__(
82
+ id=id,
83
+ session_table=session_table,
84
+ culture_table=culture_table,
85
+ memory_table=memory_table,
86
+ metrics_table=metrics_table,
87
+ eval_table=eval_table,
88
+ knowledge_table=knowledge_table,
89
+ )
90
+
91
+ _engine: Optional[Engine] = db_engine
92
+ if _engine is None:
93
+ if db_url is not None:
94
+ _engine = create_engine(db_url)
95
+ elif db_file is not None:
96
+ db_path = Path(db_file).resolve()
97
+ db_path.parent.mkdir(parents=True, exist_ok=True)
98
+ db_file = str(db_path)
99
+ _engine = create_engine(f"sqlite:///{db_path}")
100
+ else:
101
+ # If none of db_engine, db_url, or db_file are provided, create a db in the current directory
102
+ default_db_path = Path("./agno.db").resolve()
103
+ _engine = create_engine(f"sqlite:///{default_db_path}")
104
+ db_file = str(default_db_path)
105
+ log_debug(f"Created SQLite database: {default_db_path}")
106
+
107
+ self.db_engine: Engine = _engine
108
+ self.db_url: Optional[str] = db_url
109
+ self.db_file: Optional[str] = db_file
110
+ self.metadata: MetaData = MetaData()
111
+
112
+ # Initialize database session
113
+ self.Session: scoped_session = scoped_session(sessionmaker(bind=self.db_engine))
114
+
115
+ # -- DB methods --
116
+ def table_exists(self, table_name: str) -> bool:
117
+ """Check if a table with the given name exists in the SQLite database.
118
+
119
+ Args:
120
+ table_name: Name of the table to check
121
+
122
+ Returns:
123
+ bool: True if the table exists in the database, False otherwise
124
+ """
125
+ with self.Session() as sess:
126
+ return is_table_available(session=sess, table_name=table_name)
127
+
128
+ def _create_all_tables(self):
129
+ """Create all tables for the database."""
130
+ tables_to_create = [
131
+ (self.session_table_name, "sessions"),
132
+ (self.memory_table_name, "memories"),
133
+ (self.metrics_table_name, "metrics"),
134
+ (self.eval_table_name, "evals"),
135
+ (self.knowledge_table_name, "knowledge"),
136
+ ]
137
+
138
+ for table_name, table_type in tables_to_create:
139
+ self._create_table(table_name=table_name, table_type=table_type)
140
+
141
+ def _create_table(self, table_name: str, table_type: str) -> Table:
142
+ """
143
+ Create a table with the appropriate schema based on the table type.
144
+
145
+ Args:
146
+ table_name (str): Name of the table to create
147
+ table_type (str): Type of table (used to get schema definition)
148
+
149
+ Returns:
150
+ Table: SQLAlchemy Table object
151
+ """
152
+ try:
153
+ table_schema = get_table_schema_definition(table_type)
154
+ log_debug(f"Creating table {table_name}")
155
+
156
+ columns: List[Column] = []
157
+ indexes: List[str] = []
158
+ unique_constraints: List[str] = []
159
+ schema_unique_constraints = table_schema.pop("_unique_constraints", [])
160
+
161
+ # Get the columns, indexes, and unique constraints from the table schema
162
+ for col_name, col_config in table_schema.items():
163
+ column_args = [col_name, col_config["type"]()]
164
+ column_kwargs = {}
165
+
166
+ if col_config.get("primary_key", False):
167
+ column_kwargs["primary_key"] = True
168
+ if "nullable" in col_config:
169
+ column_kwargs["nullable"] = col_config["nullable"]
170
+ if col_config.get("index", False):
171
+ indexes.append(col_name)
172
+ if col_config.get("unique", False):
173
+ column_kwargs["unique"] = True
174
+ unique_constraints.append(col_name)
175
+
176
+ columns.append(Column(*column_args, **column_kwargs)) # type: ignore
177
+
178
+ # Create the table object
179
+ table_metadata = MetaData()
180
+ table = Table(table_name, table_metadata, *columns)
181
+
182
+ # Add multi-column unique constraints with table-specific names
183
+ for constraint in schema_unique_constraints:
184
+ constraint_name = f"{table_name}_{constraint['name']}"
185
+ constraint_columns = constraint["columns"]
186
+ table.append_constraint(UniqueConstraint(*constraint_columns, name=constraint_name))
187
+
188
+ # Add indexes to the table definition
189
+ for idx_col in indexes:
190
+ idx_name = f"idx_{table_name}_{idx_col}"
191
+ table.append_constraint(Index(idx_name, idx_col))
192
+
193
+ # Create table
194
+ table.create(self.db_engine, checkfirst=True)
195
+
196
+ # Create indexes
197
+ for idx in table.indexes:
198
+ try:
199
+ log_debug(f"Creating index: {idx.name}")
200
+ # Check if index already exists
201
+ with self.Session() as sess:
202
+ exists_query = text("SELECT 1 FROM sqlite_master WHERE type = 'index' AND name = :index_name")
203
+ exists = sess.execute(exists_query, {"index_name": idx.name}).scalar() is not None
204
+ if exists:
205
+ log_debug(f"Index {idx.name} already exists in table {table_name}, skipping creation")
206
+ continue
207
+
208
+ idx.create(self.db_engine)
209
+
210
+ except Exception as e:
211
+ log_warning(f"Error creating index {idx.name}: {e}")
212
+
213
+ log_debug(f"Successfully created table '{table_name}'")
214
+ return table
215
+
216
+ except Exception as e:
217
+ log_error(f"Could not create table '{table_name}': {e}")
218
+ raise e
219
+
220
+ def _get_table(self, table_type: str, create_table_if_not_found: Optional[bool] = False) -> Optional[Table]:
221
+ if table_type == "sessions":
222
+ self.session_table = self._get_or_create_table(
223
+ table_name=self.session_table_name,
224
+ table_type=table_type,
225
+ create_table_if_not_found=create_table_if_not_found,
226
+ )
227
+ return self.session_table
228
+
229
+ elif table_type == "memories":
230
+ self.memory_table = self._get_or_create_table(
231
+ table_name=self.memory_table_name,
232
+ table_type="memories",
233
+ create_table_if_not_found=create_table_if_not_found,
234
+ )
235
+ return self.memory_table
236
+
237
+ elif table_type == "metrics":
238
+ self.metrics_table = self._get_or_create_table(
239
+ table_name=self.metrics_table_name,
240
+ table_type="metrics",
241
+ create_table_if_not_found=create_table_if_not_found,
242
+ )
243
+ return self.metrics_table
244
+
245
+ elif table_type == "evals":
246
+ self.eval_table = self._get_or_create_table(
247
+ table_name=self.eval_table_name,
248
+ table_type="evals",
249
+ create_table_if_not_found=create_table_if_not_found,
250
+ )
251
+
252
+ return self.eval_table
253
+
254
+ elif table_type == "knowledge":
255
+ self.knowledge_table = self._get_or_create_table(
256
+ table_name=self.knowledge_table_name,
257
+ table_type="knowledge",
258
+ create_table_if_not_found=create_table_if_not_found,
259
+ )
260
+ return self.knowledge_table
261
+
262
+ elif table_type == "culture":
263
+ self.culture_table = self._get_or_create_table(
264
+ table_name=self.culture_table_name,
265
+ table_type="culture",
266
+ create_table_if_not_found=create_table_if_not_found,
267
+ )
268
+ return self.culture_table
269
+
270
+ else:
271
+ raise ValueError(f"Unknown table type: '{table_type}'")
272
+
273
+ def _get_or_create_table(
274
+ self,
275
+ table_name: str,
276
+ table_type: str,
277
+ create_table_if_not_found: Optional[bool] = False,
278
+ ) -> Optional[Table]:
279
+ """
280
+ Check if the table exists and is valid, else create it.
281
+
282
+ Args:
283
+ table_name (str): Name of the table to get or create
284
+ table_type (str): Type of table (used to get schema definition)
285
+
286
+ Returns:
287
+ Table: SQLAlchemy Table object
288
+ """
289
+ with self.Session() as sess, sess.begin():
290
+ table_is_available = is_table_available(session=sess, table_name=table_name)
291
+
292
+ if not table_is_available:
293
+ if not create_table_if_not_found:
294
+ return None
295
+ return self._create_table(table_name=table_name, table_type=table_type)
296
+
297
+ # SQLite version of table validation (no schema)
298
+ if not is_valid_table(db_engine=self.db_engine, table_name=table_name, table_type=table_type):
299
+ raise ValueError(f"Table {table_name} has an invalid schema")
300
+
301
+ try:
302
+ table = Table(table_name, self.metadata, autoload_with=self.db_engine)
303
+ log_debug(f"Loaded existing table {table_name}")
304
+ return table
305
+
306
+ except Exception as e:
307
+ log_error(f"Error loading existing table {table_name}: {e}")
308
+ raise e
309
+
310
+ # -- Session methods --
311
+
312
+ def delete_session(self, session_id: str) -> bool:
313
+ """
314
+ Delete a session from the database.
315
+
316
+ Args:
317
+ session_id (str): ID of the session to delete
318
+
319
+ Raises:
320
+ Exception: If an error occurs during deletion.
321
+ """
322
+ try:
323
+ table = self._get_table(table_type="sessions")
324
+ if table is None:
325
+ return False
326
+
327
+ with self.Session() as sess, sess.begin():
328
+ delete_stmt = table.delete().where(table.c.session_id == session_id)
329
+ result = sess.execute(delete_stmt)
330
+ if result.rowcount == 0:
331
+ log_debug(f"No session found to deletewith session_id: {session_id}")
332
+ return False
333
+ else:
334
+ log_debug(f"Successfully deleted session with session_id: {session_id}")
335
+ return True
336
+
337
+ except Exception as e:
338
+ log_error(f"Error deleting session: {e}")
339
+ raise e
340
+
341
+ def delete_sessions(self, session_ids: List[str]) -> None:
342
+ """Delete all given sessions from the database.
343
+ Can handle multiple session types in the same run.
344
+
345
+ Args:
346
+ session_ids (List[str]): The IDs of the sessions to delete.
347
+
348
+ Raises:
349
+ Exception: If an error occurs during deletion.
350
+ """
351
+ try:
352
+ table = self._get_table(table_type="sessions")
353
+ if table is None:
354
+ return
355
+
356
+ with self.Session() as sess, sess.begin():
357
+ delete_stmt = table.delete().where(table.c.session_id.in_(session_ids))
358
+ result = sess.execute(delete_stmt)
359
+
360
+ log_debug(f"Successfully deleted {result.rowcount} sessions")
361
+
362
+ except Exception as e:
363
+ log_error(f"Error deleting sessions: {e}")
364
+ raise e
365
+
366
+ def get_session(
367
+ self,
368
+ session_id: str,
369
+ session_type: SessionType,
370
+ user_id: Optional[str] = None,
371
+ deserialize: Optional[bool] = True,
372
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
373
+ """
374
+ Read a session from the database.
375
+
376
+ Args:
377
+ session_id (str): ID of the session to read.
378
+ session_type (SessionType): Type of session to get.
379
+ user_id (Optional[str]): User ID to filter by. Defaults to None.
380
+ deserialize (Optional[bool]): Whether to serialize the session. Defaults to True.
381
+
382
+ Returns:
383
+ Optional[Union[Session, Dict[str, Any]]]:
384
+ - When deserialize=True: Session object
385
+ - When deserialize=False: Session dictionary
386
+
387
+ Raises:
388
+ Exception: If an error occurs during retrieval.
389
+ """
390
+ try:
391
+ table = self._get_table(table_type="sessions")
392
+ if table is None:
393
+ return None
394
+
395
+ with self.Session() as sess, sess.begin():
396
+ stmt = select(table).where(table.c.session_id == session_id)
397
+
398
+ # Filtering
399
+ if user_id is not None:
400
+ stmt = stmt.where(table.c.user_id == user_id)
401
+
402
+ result = sess.execute(stmt).fetchone()
403
+ if result is None:
404
+ return None
405
+
406
+ session_raw = deserialize_session_json_fields(dict(result._mapping))
407
+ if not session_raw or not deserialize:
408
+ return session_raw
409
+
410
+ if session_type == SessionType.AGENT:
411
+ return AgentSession.from_dict(session_raw)
412
+ elif session_type == SessionType.TEAM:
413
+ return TeamSession.from_dict(session_raw)
414
+ elif session_type == SessionType.WORKFLOW:
415
+ return WorkflowSession.from_dict(session_raw)
416
+ else:
417
+ raise ValueError(f"Invalid session type: {session_type}")
418
+
419
+ except Exception as e:
420
+ log_debug(f"Exception reading from sessions table: {e}")
421
+ raise e
422
+
423
+ def get_sessions(
424
+ self,
425
+ session_type: Optional[SessionType] = None,
426
+ user_id: Optional[str] = None,
427
+ component_id: Optional[str] = None,
428
+ session_name: Optional[str] = None,
429
+ start_timestamp: Optional[int] = None,
430
+ end_timestamp: Optional[int] = None,
431
+ limit: Optional[int] = None,
432
+ page: Optional[int] = None,
433
+ sort_by: Optional[str] = None,
434
+ sort_order: Optional[str] = None,
435
+ deserialize: Optional[bool] = True,
436
+ ) -> Union[List[Session], Tuple[List[Dict[str, Any]], int]]:
437
+ """
438
+ Get all sessions in the given table. Can filter by user_id and entity_id.
439
+ Args:
440
+ session_type (Optional[SessionType]): The type of session to get.
441
+ user_id (Optional[str]): The ID of the user to filter by.
442
+ component_id (Optional[str]): The ID of the agent / workflow to filter by.
443
+ session_name (Optional[str]): The name of the session to filter by.
444
+ start_timestamp (Optional[int]): The start timestamp to filter by.
445
+ end_timestamp (Optional[int]): The end timestamp to filter by.
446
+ limit (Optional[int]): The maximum number of sessions to return. Defaults to None.
447
+ page (Optional[int]): The page number to return. Defaults to None.
448
+ sort_by (Optional[str]): The field to sort by. Defaults to None.
449
+ sort_order (Optional[str]): The sort order. Defaults to None.
450
+ deserialize (Optional[bool]): Whether to serialize the sessions. Defaults to True.
451
+ create_table_if_not_found (Optional[bool]): Whether to create the table if it doesn't exist.
452
+
453
+ Returns:
454
+ List[Session]:
455
+ - When deserialize=True: List of Session objects matching the criteria.
456
+ - When deserialize=False: List of Session dictionaries matching the criteria.
457
+
458
+ Raises:
459
+ Exception: If an error occurs during retrieval.
460
+ """
461
+ try:
462
+ table = self._get_table(table_type="sessions")
463
+ if table is None:
464
+ return [] if deserialize else ([], 0)
465
+
466
+ with self.Session() as sess, sess.begin():
467
+ stmt = select(table)
468
+
469
+ # Filtering
470
+ if user_id is not None:
471
+ stmt = stmt.where(table.c.user_id == user_id)
472
+ if component_id is not None:
473
+ if session_type == SessionType.AGENT:
474
+ stmt = stmt.where(table.c.agent_id == component_id)
475
+ elif session_type == SessionType.TEAM:
476
+ stmt = stmt.where(table.c.team_id == component_id)
477
+ elif session_type == SessionType.WORKFLOW:
478
+ stmt = stmt.where(table.c.workflow_id == component_id)
479
+ if start_timestamp is not None:
480
+ stmt = stmt.where(table.c.created_at >= start_timestamp)
481
+ if end_timestamp is not None:
482
+ stmt = stmt.where(table.c.created_at <= end_timestamp)
483
+ if session_name is not None:
484
+ stmt = stmt.where(table.c.session_data.like(f"%{session_name}%"))
485
+ if session_type is not None:
486
+ stmt = stmt.where(table.c.session_type == session_type.value)
487
+
488
+ # Getting total count
489
+ count_stmt = select(func.count()).select_from(stmt.alias())
490
+ total_count = sess.execute(count_stmt).scalar()
491
+
492
+ # Sorting
493
+ stmt = apply_sorting(stmt, table, sort_by, sort_order)
494
+
495
+ # Paginating
496
+ if limit is not None:
497
+ stmt = stmt.limit(limit)
498
+ if page is not None:
499
+ stmt = stmt.offset((page - 1) * limit)
500
+
501
+ records = sess.execute(stmt).fetchall()
502
+ if records is None:
503
+ return [] if deserialize else ([], 0)
504
+
505
+ sessions_raw = [deserialize_session_json_fields(dict(record._mapping)) for record in records]
506
+ if not deserialize:
507
+ return sessions_raw, total_count
508
+ if not sessions_raw:
509
+ return []
510
+
511
+ if session_type == SessionType.AGENT:
512
+ return [AgentSession.from_dict(record) for record in sessions_raw] # type: ignore
513
+ elif session_type == SessionType.TEAM:
514
+ return [TeamSession.from_dict(record) for record in sessions_raw] # type: ignore
515
+ elif session_type == SessionType.WORKFLOW:
516
+ return [WorkflowSession.from_dict(record) for record in sessions_raw] # type: ignore
517
+ else:
518
+ raise ValueError(f"Invalid session type: {session_type}")
519
+
520
+ except Exception as e:
521
+ log_debug(f"Exception reading from sessions table: {e}")
522
+ raise e
523
+
524
+ def rename_session(
525
+ self,
526
+ session_id: str,
527
+ session_type: SessionType,
528
+ session_name: str,
529
+ deserialize: Optional[bool] = True,
530
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
531
+ """
532
+ Rename a session in the database.
533
+
534
+ Args:
535
+ session_id (str): The ID of the session to rename.
536
+ session_type (SessionType): The type of session to rename.
537
+ session_name (str): The new name for the session.
538
+ deserialize (Optional[bool]): Whether to serialize the session. Defaults to True.
539
+
540
+ Returns:
541
+ Optional[Union[Session, Dict[str, Any]]]:
542
+ - When deserialize=True: Session object
543
+ - When deserialize=False: Session dictionary
544
+
545
+ Raises:
546
+ Exception: If an error occurs during renaming.
547
+ """
548
+ try:
549
+ # Get the current session as a deserialized object
550
+ # Get the session record
551
+ session = self.get_session(session_id, session_type, deserialize=True)
552
+ if session is None:
553
+ return None
554
+
555
+ session = cast(Session, session)
556
+ # Update the session name
557
+ if session.session_data is None:
558
+ session.session_data = {}
559
+ session.session_data["session_name"] = session_name
560
+
561
+ # Upsert the updated session back to the database
562
+ return self.upsert_session(session, deserialize=deserialize)
563
+
564
+ except Exception as e:
565
+ log_error(f"Exception renaming session: {e}")
566
+ raise e
567
+
568
+ def upsert_session(
569
+ self, session: Session, deserialize: Optional[bool] = True
570
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
571
+ """
572
+ Insert or update a session in the database.
573
+
574
+ Args:
575
+ session (Session): The session data to upsert.
576
+ deserialize (Optional[bool]): Whether to serialize the session. Defaults to True.
577
+
578
+ Returns:
579
+ Optional[Session]:
580
+ - When deserialize=True: Session object
581
+ - When deserialize=False: Session dictionary
582
+
583
+ Raises:
584
+ Exception: If an error occurs during upserting.
585
+ """
586
+ try:
587
+ table = self._get_table(table_type="sessions", create_table_if_not_found=True)
588
+ if table is None:
589
+ return None
590
+
591
+ serialized_session = serialize_session_json_fields(session.to_dict())
592
+
593
+ if isinstance(session, AgentSession):
594
+ with self.Session() as sess, sess.begin():
595
+ stmt = sqlite.insert(table).values(
596
+ session_id=serialized_session.get("session_id"),
597
+ session_type=SessionType.AGENT.value,
598
+ agent_id=serialized_session.get("agent_id"),
599
+ user_id=serialized_session.get("user_id"),
600
+ agent_data=serialized_session.get("agent_data"),
601
+ session_data=serialized_session.get("session_data"),
602
+ metadata=serialized_session.get("metadata"),
603
+ runs=serialized_session.get("runs"),
604
+ summary=serialized_session.get("summary"),
605
+ created_at=serialized_session.get("created_at"),
606
+ updated_at=serialized_session.get("created_at"),
607
+ )
608
+ stmt = stmt.on_conflict_do_update(
609
+ index_elements=["session_id"],
610
+ set_=dict(
611
+ agent_id=serialized_session.get("agent_id"),
612
+ user_id=serialized_session.get("user_id"),
613
+ runs=serialized_session.get("runs"),
614
+ summary=serialized_session.get("summary"),
615
+ agent_data=serialized_session.get("agent_data"),
616
+ session_data=serialized_session.get("session_data"),
617
+ metadata=serialized_session.get("metadata"),
618
+ updated_at=int(time.time()),
619
+ ),
620
+ )
621
+ stmt = stmt.returning(*table.columns) # type: ignore
622
+ result = sess.execute(stmt)
623
+ row = result.fetchone()
624
+
625
+ session_raw = deserialize_session_json_fields(dict(row._mapping)) if row else None
626
+ if session_raw is None or not deserialize:
627
+ return session_raw
628
+ return AgentSession.from_dict(session_raw)
629
+
630
+ elif isinstance(session, TeamSession):
631
+ with self.Session() as sess, sess.begin():
632
+ stmt = sqlite.insert(table).values(
633
+ session_id=serialized_session.get("session_id"),
634
+ session_type=SessionType.TEAM.value,
635
+ team_id=serialized_session.get("team_id"),
636
+ user_id=serialized_session.get("user_id"),
637
+ runs=serialized_session.get("runs"),
638
+ summary=serialized_session.get("summary"),
639
+ created_at=serialized_session.get("created_at"),
640
+ updated_at=serialized_session.get("created_at"),
641
+ team_data=serialized_session.get("team_data"),
642
+ session_data=serialized_session.get("session_data"),
643
+ metadata=serialized_session.get("metadata"),
644
+ )
645
+
646
+ stmt = stmt.on_conflict_do_update(
647
+ index_elements=["session_id"],
648
+ set_=dict(
649
+ team_id=serialized_session.get("team_id"),
650
+ user_id=serialized_session.get("user_id"),
651
+ summary=serialized_session.get("summary"),
652
+ runs=serialized_session.get("runs"),
653
+ team_data=serialized_session.get("team_data"),
654
+ session_data=serialized_session.get("session_data"),
655
+ metadata=serialized_session.get("metadata"),
656
+ updated_at=int(time.time()),
657
+ ),
658
+ )
659
+ stmt = stmt.returning(*table.columns) # type: ignore
660
+ result = sess.execute(stmt)
661
+ row = result.fetchone()
662
+
663
+ session_raw = deserialize_session_json_fields(dict(row._mapping)) if row else None
664
+ if session_raw is None or not deserialize:
665
+ return session_raw
666
+ return TeamSession.from_dict(session_raw)
667
+
668
+ else:
669
+ with self.Session() as sess, sess.begin():
670
+ stmt = sqlite.insert(table).values(
671
+ session_id=serialized_session.get("session_id"),
672
+ session_type=SessionType.WORKFLOW.value,
673
+ workflow_id=serialized_session.get("workflow_id"),
674
+ user_id=serialized_session.get("user_id"),
675
+ runs=serialized_session.get("runs"),
676
+ summary=serialized_session.get("summary"),
677
+ created_at=serialized_session.get("created_at") or int(time.time()),
678
+ updated_at=serialized_session.get("updated_at") or int(time.time()),
679
+ workflow_data=serialized_session.get("workflow_data"),
680
+ session_data=serialized_session.get("session_data"),
681
+ metadata=serialized_session.get("metadata"),
682
+ )
683
+ stmt = stmt.on_conflict_do_update(
684
+ index_elements=["session_id"],
685
+ set_=dict(
686
+ workflow_id=serialized_session.get("workflow_id"),
687
+ user_id=serialized_session.get("user_id"),
688
+ summary=serialized_session.get("summary"),
689
+ runs=serialized_session.get("runs"),
690
+ workflow_data=serialized_session.get("workflow_data"),
691
+ session_data=serialized_session.get("session_data"),
692
+ metadata=serialized_session.get("metadata"),
693
+ updated_at=int(time.time()),
694
+ ),
695
+ )
696
+ stmt = stmt.returning(*table.columns) # type: ignore
697
+ result = sess.execute(stmt)
698
+ row = result.fetchone()
699
+
700
+ session_raw = deserialize_session_json_fields(dict(row._mapping)) if row else None
701
+ if session_raw is None or not deserialize:
702
+ return session_raw
703
+ return WorkflowSession.from_dict(session_raw)
704
+
705
+ except Exception as e:
706
+ log_warning(f"Exception upserting into table: {e}")
707
+ raise e
708
+
709
+ def upsert_sessions(
710
+ self,
711
+ sessions: List[Session],
712
+ deserialize: Optional[bool] = True,
713
+ preserve_updated_at: bool = False,
714
+ ) -> List[Union[Session, Dict[str, Any]]]:
715
+ """
716
+ Bulk upsert multiple sessions for improved performance on large datasets.
717
+
718
+ Args:
719
+ sessions (List[Session]): List of sessions to upsert.
720
+ deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
721
+ preserve_updated_at (bool): If True, preserve the updated_at from the session object.
722
+
723
+ Returns:
724
+ List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
725
+
726
+ Raises:
727
+ Exception: If an error occurs during bulk upsert.
728
+ """
729
+ if not sessions:
730
+ return []
731
+
732
+ try:
733
+ table = self._get_table(table_type="sessions", create_table_if_not_found=True)
734
+ if table is None:
735
+ log_info("Sessions table not available, falling back to individual upserts")
736
+ return [
737
+ result
738
+ for session in sessions
739
+ if session is not None
740
+ for result in [self.upsert_session(session, deserialize=deserialize)]
741
+ if result is not None
742
+ ]
743
+
744
+ # Group sessions by type for batch processing
745
+ agent_sessions = []
746
+ team_sessions = []
747
+ workflow_sessions = []
748
+
749
+ for session in sessions:
750
+ if isinstance(session, AgentSession):
751
+ agent_sessions.append(session)
752
+ elif isinstance(session, TeamSession):
753
+ team_sessions.append(session)
754
+ elif isinstance(session, WorkflowSession):
755
+ workflow_sessions.append(session)
756
+
757
+ results: List[Union[Session, Dict[str, Any]]] = []
758
+
759
+ with self.Session() as sess, sess.begin():
760
+ # Bulk upsert agent sessions
761
+ if agent_sessions:
762
+ agent_data = []
763
+ for session in agent_sessions:
764
+ serialized_session = serialize_session_json_fields(session.to_dict())
765
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
766
+ updated_at = serialized_session.get("updated_at") if preserve_updated_at else int(time.time())
767
+ agent_data.append(
768
+ {
769
+ "session_id": serialized_session.get("session_id"),
770
+ "session_type": SessionType.AGENT.value,
771
+ "agent_id": serialized_session.get("agent_id"),
772
+ "user_id": serialized_session.get("user_id"),
773
+ "agent_data": serialized_session.get("agent_data"),
774
+ "session_data": serialized_session.get("session_data"),
775
+ "metadata": serialized_session.get("metadata"),
776
+ "runs": serialized_session.get("runs"),
777
+ "summary": serialized_session.get("summary"),
778
+ "created_at": serialized_session.get("created_at"),
779
+ "updated_at": updated_at,
780
+ }
781
+ )
782
+
783
+ if agent_data:
784
+ stmt = sqlite.insert(table)
785
+ stmt = stmt.on_conflict_do_update(
786
+ index_elements=["session_id"],
787
+ set_=dict(
788
+ agent_id=stmt.excluded.agent_id,
789
+ user_id=stmt.excluded.user_id,
790
+ agent_data=stmt.excluded.agent_data,
791
+ session_data=stmt.excluded.session_data,
792
+ metadata=stmt.excluded.metadata,
793
+ runs=stmt.excluded.runs,
794
+ summary=stmt.excluded.summary,
795
+ updated_at=stmt.excluded.updated_at,
796
+ ),
797
+ )
798
+ sess.execute(stmt, agent_data)
799
+
800
+ # Fetch the results for agent sessions
801
+ agent_ids = [session.session_id for session in agent_sessions]
802
+ select_stmt = select(table).where(table.c.session_id.in_(agent_ids))
803
+ result = sess.execute(select_stmt).fetchall()
804
+
805
+ for row in result:
806
+ session_dict = deserialize_session_json_fields(dict(row._mapping))
807
+ if deserialize:
808
+ deserialized_agent_session = AgentSession.from_dict(session_dict)
809
+ if deserialized_agent_session is None:
810
+ continue
811
+ results.append(deserialized_agent_session)
812
+ else:
813
+ results.append(session_dict)
814
+
815
+ # Bulk upsert team sessions
816
+ if team_sessions:
817
+ team_data = []
818
+ for session in team_sessions:
819
+ serialized_session = serialize_session_json_fields(session.to_dict())
820
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
821
+ updated_at = serialized_session.get("updated_at") if preserve_updated_at else int(time.time())
822
+ team_data.append(
823
+ {
824
+ "session_id": serialized_session.get("session_id"),
825
+ "session_type": SessionType.TEAM.value,
826
+ "team_id": serialized_session.get("team_id"),
827
+ "user_id": serialized_session.get("user_id"),
828
+ "runs": serialized_session.get("runs"),
829
+ "summary": serialized_session.get("summary"),
830
+ "created_at": serialized_session.get("created_at"),
831
+ "updated_at": updated_at,
832
+ "team_data": serialized_session.get("team_data"),
833
+ "session_data": serialized_session.get("session_data"),
834
+ "metadata": serialized_session.get("metadata"),
835
+ }
836
+ )
837
+
838
+ if team_data:
839
+ stmt = sqlite.insert(table)
840
+ stmt = stmt.on_conflict_do_update(
841
+ index_elements=["session_id"],
842
+ set_=dict(
843
+ team_id=stmt.excluded.team_id,
844
+ user_id=stmt.excluded.user_id,
845
+ team_data=stmt.excluded.team_data,
846
+ session_data=stmt.excluded.session_data,
847
+ metadata=stmt.excluded.metadata,
848
+ runs=stmt.excluded.runs,
849
+ summary=stmt.excluded.summary,
850
+ updated_at=stmt.excluded.updated_at,
851
+ ),
852
+ )
853
+ sess.execute(stmt, team_data)
854
+
855
+ # Fetch the results for team sessions
856
+ team_ids = [session.session_id for session in team_sessions]
857
+ select_stmt = select(table).where(table.c.session_id.in_(team_ids))
858
+ result = sess.execute(select_stmt).fetchall()
859
+
860
+ for row in result:
861
+ session_dict = deserialize_session_json_fields(dict(row._mapping))
862
+ if deserialize:
863
+ deserialized_team_session = TeamSession.from_dict(session_dict)
864
+ if deserialized_team_session is None:
865
+ continue
866
+ results.append(deserialized_team_session)
867
+ else:
868
+ results.append(session_dict)
869
+
870
+ # Bulk upsert workflow sessions
871
+ if workflow_sessions:
872
+ workflow_data = []
873
+ for session in workflow_sessions:
874
+ serialized_session = serialize_session_json_fields(session.to_dict())
875
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
876
+ updated_at = serialized_session.get("updated_at") if preserve_updated_at else int(time.time())
877
+ workflow_data.append(
878
+ {
879
+ "session_id": serialized_session.get("session_id"),
880
+ "session_type": SessionType.WORKFLOW.value,
881
+ "workflow_id": serialized_session.get("workflow_id"),
882
+ "user_id": serialized_session.get("user_id"),
883
+ "runs": serialized_session.get("runs"),
884
+ "summary": serialized_session.get("summary"),
885
+ "created_at": serialized_session.get("created_at"),
886
+ "updated_at": updated_at,
887
+ "workflow_data": serialized_session.get("workflow_data"),
888
+ "session_data": serialized_session.get("session_data"),
889
+ "metadata": serialized_session.get("metadata"),
890
+ }
891
+ )
892
+
893
+ if workflow_data:
894
+ stmt = sqlite.insert(table)
895
+ stmt = stmt.on_conflict_do_update(
896
+ index_elements=["session_id"],
897
+ set_=dict(
898
+ workflow_id=stmt.excluded.workflow_id,
899
+ user_id=stmt.excluded.user_id,
900
+ workflow_data=stmt.excluded.workflow_data,
901
+ session_data=stmt.excluded.session_data,
902
+ metadata=stmt.excluded.metadata,
903
+ runs=stmt.excluded.runs,
904
+ summary=stmt.excluded.summary,
905
+ updated_at=stmt.excluded.updated_at,
906
+ ),
907
+ )
908
+ sess.execute(stmt, workflow_data)
909
+
910
+ # Fetch the results for workflow sessions
911
+ workflow_ids = [session.session_id for session in workflow_sessions]
912
+ select_stmt = select(table).where(table.c.session_id.in_(workflow_ids))
913
+ result = sess.execute(select_stmt).fetchall()
914
+
915
+ for row in result:
916
+ session_dict = deserialize_session_json_fields(dict(row._mapping))
917
+ if deserialize:
918
+ deserialized_workflow_session = WorkflowSession.from_dict(session_dict)
919
+ if deserialized_workflow_session is None:
920
+ continue
921
+ results.append(deserialized_workflow_session)
922
+ else:
923
+ results.append(session_dict)
924
+
925
+ return results
926
+
927
+ except Exception as e:
928
+ log_error(f"Exception during bulk session upsert, falling back to individual upserts: {e}")
929
+ # Fallback to individual upserts
930
+ return [
931
+ result
932
+ for session in sessions
933
+ if session is not None
934
+ for result in [self.upsert_session(session, deserialize=deserialize)]
935
+ if result is not None
936
+ ]
937
+
938
+ # -- Memory methods --
939
+
940
+ def delete_user_memory(self, memory_id: str, user_id: Optional[str] = None):
941
+ """Delete a user memory from the database.
942
+
943
+ Args:
944
+ memory_id (str): The ID of the memory to delete.
945
+ user_id (Optional[str]): The user ID to filter by. Defaults to None.
946
+
947
+ Returns:
948
+ bool: True if deletion was successful, False otherwise.
949
+
950
+ Raises:
951
+ Exception: If an error occurs during deletion.
952
+ """
953
+ try:
954
+ table = self._get_table(table_type="memories")
955
+ if table is None:
956
+ return
957
+
958
+ with self.Session() as sess, sess.begin():
959
+ delete_stmt = table.delete().where(table.c.memory_id == memory_id)
960
+ if user_id is not None:
961
+ delete_stmt = delete_stmt.where(table.c.user_id == user_id)
962
+ result = sess.execute(delete_stmt)
963
+
964
+ success = result.rowcount > 0
965
+ if success:
966
+ log_debug(f"Successfully deleted user memory id: {memory_id}")
967
+ else:
968
+ log_debug(f"No user memory found with id: {memory_id}")
969
+
970
+ except Exception as e:
971
+ log_error(f"Error deleting user memory: {e}")
972
+ raise e
973
+
974
+ def delete_user_memories(self, memory_ids: List[str], user_id: Optional[str] = None) -> None:
975
+ """Delete user memories from the database.
976
+
977
+ Args:
978
+ memory_ids (List[str]): The IDs of the memories to delete.
979
+ user_id (Optional[str]): The user ID to filter by. Defaults to None.
980
+
981
+ Raises:
982
+ Exception: If an error occurs during deletion.
983
+ """
984
+ try:
985
+ table = self._get_table(table_type="memories")
986
+ if table is None:
987
+ return
988
+
989
+ with self.Session() as sess, sess.begin():
990
+ delete_stmt = table.delete().where(table.c.memory_id.in_(memory_ids))
991
+ if user_id is not None:
992
+ delete_stmt = delete_stmt.where(table.c.user_id == user_id)
993
+ result = sess.execute(delete_stmt)
994
+ if result.rowcount == 0:
995
+ log_debug(f"No user memories found with ids: {memory_ids}")
996
+
997
+ except Exception as e:
998
+ log_error(f"Error deleting user memories: {e}")
999
+ raise e
1000
+
1001
+ def get_all_memory_topics(self) -> List[str]:
1002
+ """Get all memory topics from the database.
1003
+
1004
+ Returns:
1005
+ List[str]: List of memory topics.
1006
+ """
1007
+ try:
1008
+ table = self._get_table(table_type="memories")
1009
+ if table is None:
1010
+ return []
1011
+
1012
+ with self.Session() as sess, sess.begin():
1013
+ # Select topics from all results
1014
+ stmt = select(func.json_array_elements_text(table.c.topics)).select_from(table)
1015
+ result = sess.execute(stmt).fetchall()
1016
+
1017
+ return list(set([record[0] for record in result]))
1018
+
1019
+ except Exception as e:
1020
+ log_debug(f"Exception reading from memory table: {e}")
1021
+ raise e
1022
+
1023
+ def get_user_memory(
1024
+ self,
1025
+ memory_id: str,
1026
+ deserialize: Optional[bool] = True,
1027
+ user_id: Optional[str] = None,
1028
+ ) -> Optional[Union[UserMemory, Dict[str, Any]]]:
1029
+ """Get a memory from the database.
1030
+
1031
+ Args:
1032
+ memory_id (str): The ID of the memory to get.
1033
+ deserialize (Optional[bool]): Whether to serialize the memory. Defaults to True.
1034
+ user_id (Optional[str]): The user ID to filter by. Defaults to None.
1035
+
1036
+ Returns:
1037
+ Optional[Union[UserMemory, Dict[str, Any]]]:
1038
+ - When deserialize=True: UserMemory object
1039
+ - When deserialize=False: Memory dictionary
1040
+
1041
+ Raises:
1042
+ Exception: If an error occurs during retrieval.
1043
+ """
1044
+ try:
1045
+ table = self._get_table(table_type="memories")
1046
+ if table is None:
1047
+ return None
1048
+
1049
+ with self.Session() as sess, sess.begin():
1050
+ stmt = select(table).where(table.c.memory_id == memory_id)
1051
+ if user_id is not None:
1052
+ stmt = stmt.where(table.c.user_id == user_id)
1053
+ result = sess.execute(stmt).fetchone()
1054
+ if result is None:
1055
+ return None
1056
+
1057
+ memory_raw = dict(result._mapping)
1058
+ if not memory_raw or not deserialize:
1059
+ return memory_raw
1060
+
1061
+ return UserMemory.from_dict(memory_raw)
1062
+
1063
+ except Exception as e:
1064
+ log_debug(f"Exception reading from memorytable: {e}")
1065
+ raise e
1066
+
1067
+ def get_user_memories(
1068
+ self,
1069
+ user_id: Optional[str] = None,
1070
+ agent_id: Optional[str] = None,
1071
+ team_id: Optional[str] = None,
1072
+ topics: Optional[List[str]] = None,
1073
+ search_content: Optional[str] = None,
1074
+ limit: Optional[int] = None,
1075
+ page: Optional[int] = None,
1076
+ sort_by: Optional[str] = None,
1077
+ sort_order: Optional[str] = None,
1078
+ deserialize: Optional[bool] = True,
1079
+ ) -> Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
1080
+ """Get all memories from the database as UserMemory objects.
1081
+
1082
+ Args:
1083
+ user_id (Optional[str]): The ID of the user to filter by.
1084
+ agent_id (Optional[str]): The ID of the agent to filter by.
1085
+ team_id (Optional[str]): The ID of the team to filter by.
1086
+ topics (Optional[List[str]]): The topics to filter by.
1087
+ search_content (Optional[str]): The content to search for.
1088
+ limit (Optional[int]): The maximum number of memories to return.
1089
+ page (Optional[int]): The page number.
1090
+ sort_by (Optional[str]): The column to sort by.
1091
+ sort_order (Optional[str]): The order to sort by.
1092
+ deserialize (Optional[bool]): Whether to serialize the memories. Defaults to True.
1093
+
1094
+
1095
+ Returns:
1096
+ Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
1097
+ - When deserialize=True: List of UserMemory objects
1098
+ - When deserialize=False: List of UserMemory dictionaries and total count
1099
+
1100
+ Raises:
1101
+ Exception: If an error occurs during retrieval.
1102
+ """
1103
+ try:
1104
+ table = self._get_table(table_type="memories")
1105
+ if table is None:
1106
+ return [] if deserialize else ([], 0)
1107
+
1108
+ with self.Session() as sess, sess.begin():
1109
+ stmt = select(table)
1110
+
1111
+ # Filtering
1112
+ if user_id is not None:
1113
+ stmt = stmt.where(table.c.user_id == user_id)
1114
+ if agent_id is not None:
1115
+ stmt = stmt.where(table.c.agent_id == agent_id)
1116
+ if team_id is not None:
1117
+ stmt = stmt.where(table.c.team_id == team_id)
1118
+ if topics is not None:
1119
+ topic_conditions = [text(f"topics::text LIKE '%\"{topic}\"%'") for topic in topics]
1120
+ stmt = stmt.where(and_(*topic_conditions))
1121
+ if search_content is not None:
1122
+ stmt = stmt.where(table.c.memory.ilike(f"%{search_content}%"))
1123
+
1124
+ # Get total count after applying filtering
1125
+ count_stmt = select(func.count()).select_from(stmt.alias())
1126
+ total_count = sess.execute(count_stmt).scalar()
1127
+
1128
+ # Sorting
1129
+ stmt = apply_sorting(stmt, table, sort_by, sort_order)
1130
+ # Paginating
1131
+ if limit is not None:
1132
+ stmt = stmt.limit(limit)
1133
+ if page is not None:
1134
+ stmt = stmt.offset((page - 1) * limit)
1135
+
1136
+ result = sess.execute(stmt).fetchall()
1137
+ if not result:
1138
+ return [] if deserialize else ([], 0)
1139
+
1140
+ memories_raw = [record._mapping for record in result]
1141
+
1142
+ if not deserialize:
1143
+ return memories_raw, total_count
1144
+
1145
+ return [UserMemory.from_dict(record) for record in memories_raw]
1146
+
1147
+ except Exception as e:
1148
+ log_error(f"Error reading from memory table: {e}")
1149
+ raise e
1150
+
1151
+ def get_user_memory_stats(
1152
+ self,
1153
+ limit: Optional[int] = None,
1154
+ page: Optional[int] = None,
1155
+ ) -> Tuple[List[Dict[str, Any]], int]:
1156
+ """Get user memories stats.
1157
+
1158
+ Args:
1159
+ limit (Optional[int]): The maximum number of user stats to return.
1160
+ page (Optional[int]): The page number.
1161
+
1162
+ Returns:
1163
+ Tuple[List[Dict[str, Any]], int]: A list of dictionaries containing user stats and total count.
1164
+
1165
+ Example:
1166
+ (
1167
+ [
1168
+ {
1169
+ "user_id": "123",
1170
+ "total_memories": 10,
1171
+ "last_memory_updated_at": 1714560000,
1172
+ },
1173
+ ],
1174
+ total_count: 1,
1175
+ )
1176
+ """
1177
+ try:
1178
+ table = self._get_table(table_type="memories")
1179
+ if table is None:
1180
+ return [], 0
1181
+
1182
+ with self.Session() as sess, sess.begin():
1183
+ stmt = (
1184
+ select(
1185
+ table.c.user_id,
1186
+ func.count(table.c.memory_id).label("total_memories"),
1187
+ func.max(table.c.updated_at).label("last_memory_updated_at"),
1188
+ )
1189
+ .where(table.c.user_id.is_not(None))
1190
+ .group_by(table.c.user_id)
1191
+ .order_by(func.max(table.c.updated_at).desc())
1192
+ )
1193
+
1194
+ count_stmt = select(func.count()).select_from(stmt.alias())
1195
+ total_count = sess.execute(count_stmt).scalar()
1196
+
1197
+ # Pagination
1198
+ if limit is not None:
1199
+ stmt = stmt.limit(limit)
1200
+ if page is not None:
1201
+ stmt = stmt.offset((page - 1) * limit)
1202
+
1203
+ result = sess.execute(stmt).fetchall()
1204
+ if not result:
1205
+ return [], 0
1206
+
1207
+ return [
1208
+ {
1209
+ "user_id": record.user_id, # type: ignore
1210
+ "total_memories": record.total_memories,
1211
+ "last_memory_updated_at": record.last_memory_updated_at,
1212
+ }
1213
+ for record in result
1214
+ ], total_count
1215
+
1216
+ except Exception as e:
1217
+ log_error(f"Error getting user memory stats: {e}")
1218
+ raise e
1219
+
1220
+ def upsert_user_memory(
1221
+ self, memory: UserMemory, deserialize: Optional[bool] = True
1222
+ ) -> Optional[Union[UserMemory, Dict[str, Any]]]:
1223
+ """Upsert a user memory in the database.
1224
+
1225
+ Args:
1226
+ memory (UserMemory): The user memory to upsert.
1227
+ deserialize (Optional[bool]): Whether to serialize the memory. Defaults to True.
1228
+
1229
+ Returns:
1230
+ Optional[Union[UserMemory, Dict[str, Any]]]:
1231
+ - When deserialize=True: UserMemory object
1232
+ - When deserialize=False: UserMemory dictionary
1233
+
1234
+ Raises:
1235
+ Exception: If an error occurs during upsert.
1236
+ """
1237
+ try:
1238
+ table = self._get_table(table_type="memories", create_table_if_not_found=True)
1239
+ if table is None:
1240
+ return None
1241
+
1242
+ if memory.memory_id is None:
1243
+ memory.memory_id = str(uuid4())
1244
+
1245
+ with self.Session() as sess, sess.begin():
1246
+ stmt = sqlite.insert(table).values(
1247
+ user_id=memory.user_id,
1248
+ agent_id=memory.agent_id,
1249
+ team_id=memory.team_id,
1250
+ memory_id=memory.memory_id,
1251
+ memory=memory.memory,
1252
+ topics=memory.topics,
1253
+ input=memory.input,
1254
+ updated_at=int(time.time()),
1255
+ )
1256
+ stmt = stmt.on_conflict_do_update( # type: ignore
1257
+ index_elements=["memory_id"],
1258
+ set_=dict(
1259
+ memory=memory.memory,
1260
+ topics=memory.topics,
1261
+ input=memory.input,
1262
+ updated_at=int(time.time()),
1263
+ ),
1264
+ ).returning(table)
1265
+
1266
+ result = sess.execute(stmt)
1267
+ row = result.fetchone()
1268
+
1269
+ if row is None:
1270
+ return None
1271
+
1272
+ memory_raw = row._mapping
1273
+ if not memory_raw or not deserialize:
1274
+ return memory_raw
1275
+
1276
+ return UserMemory.from_dict(memory_raw)
1277
+
1278
+ except Exception as e:
1279
+ log_error(f"Error upserting user memory: {e}")
1280
+ raise e
1281
+
1282
+ def upsert_memories(
1283
+ self,
1284
+ memories: List[UserMemory],
1285
+ deserialize: Optional[bool] = True,
1286
+ preserve_updated_at: bool = False,
1287
+ ) -> List[Union[UserMemory, Dict[str, Any]]]:
1288
+ """
1289
+ Bulk upsert multiple user memories for improved performance on large datasets.
1290
+
1291
+ Args:
1292
+ memories (List[UserMemory]): List of memories to upsert.
1293
+ deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
1294
+
1295
+ Returns:
1296
+ List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories.
1297
+
1298
+ Raises:
1299
+ Exception: If an error occurs during bulk upsert.
1300
+ """
1301
+ if not memories:
1302
+ return []
1303
+
1304
+ try:
1305
+ table = self._get_table(table_type="memories", create_table_if_not_found=True)
1306
+ if table is None:
1307
+ log_info("Memories table not available, falling back to individual upserts")
1308
+ return [
1309
+ result
1310
+ for memory in memories
1311
+ if memory is not None
1312
+ for result in [self.upsert_user_memory(memory, deserialize=deserialize)]
1313
+ if result is not None
1314
+ ]
1315
+ # Prepare bulk data
1316
+ bulk_data = []
1317
+ current_time = int(time.time())
1318
+ for memory in memories:
1319
+ if memory.memory_id is None:
1320
+ memory.memory_id = str(uuid4())
1321
+
1322
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
1323
+ updated_at = memory.updated_at if preserve_updated_at else current_time
1324
+ bulk_data.append(
1325
+ {
1326
+ "user_id": memory.user_id,
1327
+ "agent_id": memory.agent_id,
1328
+ "team_id": memory.team_id,
1329
+ "memory_id": memory.memory_id,
1330
+ "memory": memory.memory,
1331
+ "topics": memory.topics,
1332
+ "updated_at": updated_at,
1333
+ }
1334
+ )
1335
+
1336
+ results: List[Union[UserMemory, Dict[str, Any]]] = []
1337
+
1338
+ with self.Session() as sess, sess.begin():
1339
+ # Bulk upsert memories using SQLite ON CONFLICT DO UPDATE
1340
+ stmt = sqlite.insert(table)
1341
+ stmt = stmt.on_conflict_do_update(
1342
+ index_elements=["memory_id"],
1343
+ set_=dict(
1344
+ memory=stmt.excluded.memory,
1345
+ topics=stmt.excluded.topics,
1346
+ input=stmt.excluded.input,
1347
+ agent_id=stmt.excluded.agent_id,
1348
+ team_id=stmt.excluded.team_id,
1349
+ updated_at=stmt.excluded.updated_at,
1350
+ ),
1351
+ )
1352
+ sess.execute(stmt, bulk_data)
1353
+
1354
+ # Fetch results
1355
+ memory_ids = [memory.memory_id for memory in memories if memory.memory_id]
1356
+ select_stmt = select(table).where(table.c.memory_id.in_(memory_ids))
1357
+ result = sess.execute(select_stmt).fetchall()
1358
+
1359
+ for row in result:
1360
+ memory_dict = dict(row._mapping)
1361
+ if deserialize:
1362
+ results.append(UserMemory.from_dict(memory_dict))
1363
+ else:
1364
+ results.append(memory_dict)
1365
+
1366
+ return results
1367
+
1368
+ except Exception as e:
1369
+ log_error(f"Exception during bulk memory upsert, falling back to individual upserts: {e}")
1370
+
1371
+ # Fallback to individual upserts
1372
+ return [
1373
+ result
1374
+ for memory in memories
1375
+ if memory is not None
1376
+ for result in [self.upsert_user_memory(memory, deserialize=deserialize)]
1377
+ if result is not None
1378
+ ]
1379
+
1380
+ def clear_memories(self) -> None:
1381
+ """Delete all memories from the database.
1382
+
1383
+ Raises:
1384
+ Exception: If an error occurs during deletion.
1385
+ """
1386
+ try:
1387
+ table = self._get_table(table_type="memories")
1388
+ if table is None:
1389
+ return
1390
+
1391
+ with self.Session() as sess, sess.begin():
1392
+ sess.execute(table.delete())
1393
+
1394
+ except Exception as e:
1395
+ from agno.utils.log import log_warning
1396
+
1397
+ log_warning(f"Exception deleting all memories: {e}")
1398
+ raise e
1399
+
1400
+ # -- Metrics methods --
1401
+
1402
+ def _get_all_sessions_for_metrics_calculation(
1403
+ self, start_timestamp: Optional[int] = None, end_timestamp: Optional[int] = None
1404
+ ) -> List[Dict[str, Any]]:
1405
+ """
1406
+ Get all sessions of all types (agent, team, workflow) as raw dictionaries.
1407
+
1408
+ Args:
1409
+ start_timestamp (Optional[int]): The start timestamp to filter by. Defaults to None.
1410
+ end_timestamp (Optional[int]): The end timestamp to filter by. Defaults to None.
1411
+
1412
+ Returns:
1413
+ List[Dict[str, Any]]: List of session dictionaries with session_type field.
1414
+
1415
+ Raises:
1416
+ Exception: If an error occurs during retrieval.
1417
+ """
1418
+ try:
1419
+ table = self._get_table(table_type="sessions")
1420
+ if table is None:
1421
+ return []
1422
+
1423
+ stmt = select(
1424
+ table.c.user_id,
1425
+ table.c.session_data,
1426
+ table.c.runs,
1427
+ table.c.created_at,
1428
+ table.c.session_type,
1429
+ )
1430
+
1431
+ if start_timestamp is not None:
1432
+ stmt = stmt.where(table.c.created_at >= start_timestamp)
1433
+ if end_timestamp is not None:
1434
+ stmt = stmt.where(table.c.created_at <= end_timestamp)
1435
+
1436
+ with self.Session() as sess:
1437
+ result = sess.execute(stmt).fetchall()
1438
+ return [record._mapping for record in result]
1439
+
1440
+ except Exception as e:
1441
+ log_error(f"Error reading from sessions table: {e}")
1442
+ raise e
1443
+
1444
+ def _get_metrics_calculation_starting_date(self, table: Table) -> Optional[date]:
1445
+ """Get the first date for which metrics calculation is needed:
1446
+
1447
+ 1. If there are metrics records, return the date of the first day without a complete metrics record.
1448
+ 2. If there are no metrics records, return the date of the first recorded session.
1449
+ 3. If there are no metrics records and no sessions records, return None.
1450
+
1451
+ Args:
1452
+ table (Table): The table to get the starting date for.
1453
+
1454
+ Returns:
1455
+ Optional[date]: The starting date for which metrics calculation is needed.
1456
+ """
1457
+ with self.Session() as sess:
1458
+ stmt = select(table).order_by(table.c.date.desc()).limit(1)
1459
+ result = sess.execute(stmt).fetchone()
1460
+
1461
+ # 1. Return the date of the first day without a complete metrics record.
1462
+ if result is not None:
1463
+ if result.completed:
1464
+ return result._mapping["date"] + timedelta(days=1)
1465
+ else:
1466
+ return result._mapping["date"]
1467
+
1468
+ # 2. No metrics records. Return the date of the first recorded session.
1469
+ first_session, _ = self.get_sessions(sort_by="created_at", sort_order="asc", limit=1, deserialize=False)
1470
+ first_session_date = first_session[0]["created_at"] if first_session else None # type: ignore
1471
+
1472
+ # 3. No metrics records and no sessions records. Return None.
1473
+ if not first_session_date:
1474
+ return None
1475
+
1476
+ return datetime.fromtimestamp(first_session_date, tz=timezone.utc).date()
1477
+
1478
+ def calculate_metrics(self) -> Optional[list[dict]]:
1479
+ """Calculate metrics for all dates without complete metrics.
1480
+
1481
+ Returns:
1482
+ Optional[list[dict]]: The calculated metrics.
1483
+
1484
+ Raises:
1485
+ Exception: If an error occurs during metrics calculation.
1486
+ """
1487
+ try:
1488
+ table = self._get_table(table_type="metrics", create_table_if_not_found=True)
1489
+ if table is None:
1490
+ return None
1491
+
1492
+ starting_date = self._get_metrics_calculation_starting_date(table)
1493
+ if starting_date is None:
1494
+ log_info("No session data found. Won't calculate metrics.")
1495
+ return None
1496
+
1497
+ dates_to_process = get_dates_to_calculate_metrics_for(starting_date)
1498
+ if not dates_to_process:
1499
+ log_info("Metrics already calculated for all relevant dates.")
1500
+ return None
1501
+
1502
+ start_timestamp = int(
1503
+ datetime.combine(dates_to_process[0], datetime.min.time()).replace(tzinfo=timezone.utc).timestamp()
1504
+ )
1505
+ end_timestamp = int(
1506
+ datetime.combine(dates_to_process[-1] + timedelta(days=1), datetime.min.time())
1507
+ .replace(tzinfo=timezone.utc)
1508
+ .timestamp()
1509
+ )
1510
+
1511
+ sessions = self._get_all_sessions_for_metrics_calculation(
1512
+ start_timestamp=start_timestamp, end_timestamp=end_timestamp
1513
+ )
1514
+ all_sessions_data = fetch_all_sessions_data(
1515
+ sessions=sessions,
1516
+ dates_to_process=dates_to_process,
1517
+ start_timestamp=start_timestamp,
1518
+ )
1519
+ if not all_sessions_data:
1520
+ log_info("No new session data found. Won't calculate metrics.")
1521
+ return None
1522
+
1523
+ results = []
1524
+ metrics_records = []
1525
+
1526
+ for date_to_process in dates_to_process:
1527
+ date_key = date_to_process.isoformat()
1528
+ sessions_for_date = all_sessions_data.get(date_key, {})
1529
+
1530
+ # Skip dates with no sessions
1531
+ if not any(len(sessions) > 0 for sessions in sessions_for_date.values()):
1532
+ continue
1533
+
1534
+ metrics_record = calculate_date_metrics(date_to_process, sessions_for_date)
1535
+ metrics_records.append(metrics_record)
1536
+
1537
+ if metrics_records:
1538
+ with self.Session() as sess, sess.begin():
1539
+ results = bulk_upsert_metrics(session=sess, table=table, metrics_records=metrics_records)
1540
+
1541
+ log_debug("Updated metrics calculations")
1542
+
1543
+ return results
1544
+
1545
+ except Exception as e:
1546
+ log_error(f"Error refreshing metrics: {e}")
1547
+ raise e
1548
+
1549
+ def get_metrics(
1550
+ self,
1551
+ starting_date: Optional[date] = None,
1552
+ ending_date: Optional[date] = None,
1553
+ ) -> Tuple[List[dict], Optional[int]]:
1554
+ """Get all metrics matching the given date range.
1555
+
1556
+ Args:
1557
+ starting_date (Optional[date]): The starting date to filter metrics by.
1558
+ ending_date (Optional[date]): The ending date to filter metrics by.
1559
+
1560
+ Returns:
1561
+ Tuple[List[dict], Optional[int]]: A tuple containing the metrics and the timestamp of the latest update.
1562
+
1563
+ Raises:
1564
+ Exception: If an error occurs during retrieval.
1565
+ """
1566
+ try:
1567
+ table = self._get_table(table_type="metrics", create_table_if_not_found=True)
1568
+ if table is None:
1569
+ return [], None
1570
+
1571
+ with self.Session() as sess, sess.begin():
1572
+ stmt = select(table)
1573
+ if starting_date:
1574
+ stmt = stmt.where(table.c.date >= starting_date)
1575
+ if ending_date:
1576
+ stmt = stmt.where(table.c.date <= ending_date)
1577
+ result = sess.execute(stmt).fetchall()
1578
+ if not result:
1579
+ return [], None
1580
+
1581
+ # Get the latest updated_at
1582
+ latest_stmt = select(func.max(table.c.updated_at))
1583
+ latest_updated_at = sess.execute(latest_stmt).scalar()
1584
+
1585
+ return [row._mapping for row in result], latest_updated_at
1586
+
1587
+ except Exception as e:
1588
+ log_error(f"Error getting metrics: {e}")
1589
+ raise e
1590
+
1591
+ # -- Knowledge methods --
1592
+
1593
+ def delete_knowledge_content(self, id: str):
1594
+ """Delete a knowledge row from the database.
1595
+
1596
+ Args:
1597
+ id (str): The ID of the knowledge row to delete.
1598
+
1599
+ Raises:
1600
+ Exception: If an error occurs during deletion.
1601
+ """
1602
+ table = self._get_table(table_type="knowledge")
1603
+ if table is None:
1604
+ return
1605
+
1606
+ try:
1607
+ with self.Session() as sess, sess.begin():
1608
+ stmt = table.delete().where(table.c.id == id)
1609
+ sess.execute(stmt)
1610
+
1611
+ except Exception as e:
1612
+ log_error(f"Error deleting knowledge content: {e}")
1613
+ raise e
1614
+
1615
+ def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
1616
+ """Get a knowledge row from the database.
1617
+
1618
+ Args:
1619
+ id (str): The ID of the knowledge row to get.
1620
+
1621
+ Returns:
1622
+ Optional[KnowledgeRow]: The knowledge row, or None if it doesn't exist.
1623
+
1624
+ Raises:
1625
+ Exception: If an error occurs during retrieval.
1626
+ """
1627
+ table = self._get_table(table_type="knowledge")
1628
+ if table is None:
1629
+ return None
1630
+
1631
+ try:
1632
+ with self.Session() as sess, sess.begin():
1633
+ stmt = select(table).where(table.c.id == id)
1634
+ result = sess.execute(stmt).fetchone()
1635
+ if result is None:
1636
+ return None
1637
+
1638
+ return KnowledgeRow.model_validate(result._mapping)
1639
+
1640
+ except Exception as e:
1641
+ log_error(f"Error getting knowledge content: {e}")
1642
+ raise e
1643
+
1644
+ def get_knowledge_contents(
1645
+ self,
1646
+ limit: Optional[int] = None,
1647
+ page: Optional[int] = None,
1648
+ sort_by: Optional[str] = None,
1649
+ sort_order: Optional[str] = None,
1650
+ ) -> Tuple[List[KnowledgeRow], int]:
1651
+ """Get all knowledge contents from the database.
1652
+
1653
+ Args:
1654
+ limit (Optional[int]): The maximum number of knowledge contents to return.
1655
+ page (Optional[int]): The page number.
1656
+ sort_by (Optional[str]): The column to sort by.
1657
+ sort_order (Optional[str]): The order to sort by.
1658
+
1659
+ Returns:
1660
+ Tuple[List[KnowledgeRow], int]: The knowledge contents and total count.
1661
+
1662
+ Raises:
1663
+ Exception: If an error occurs during retrieval.
1664
+ """
1665
+ table = self._get_table(table_type="knowledge")
1666
+ if table is None:
1667
+ return [], 0
1668
+
1669
+ try:
1670
+ with self.Session() as sess, sess.begin():
1671
+ stmt = select(table)
1672
+
1673
+ # Apply sorting
1674
+ if sort_by is not None:
1675
+ stmt = stmt.order_by(getattr(table.c, sort_by) * (1 if sort_order == "asc" else -1))
1676
+
1677
+ # Get total count before applying limit and pagination
1678
+ count_stmt = select(func.count()).select_from(stmt.alias())
1679
+ total_count = sess.execute(count_stmt).scalar()
1680
+
1681
+ # Apply pagination after count
1682
+ if limit is not None:
1683
+ stmt = stmt.limit(limit)
1684
+ if page is not None:
1685
+ stmt = stmt.offset((page - 1) * limit)
1686
+
1687
+ result = sess.execute(stmt).fetchall()
1688
+ return [KnowledgeRow.model_validate(record._mapping) for record in result], total_count
1689
+
1690
+ except Exception as e:
1691
+ log_error(f"Error getting knowledge contents: {e}")
1692
+ raise e
1693
+
1694
+ def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
1695
+ """Upsert knowledge content in the database.
1696
+
1697
+ Args:
1698
+ knowledge_row (KnowledgeRow): The knowledge row to upsert.
1699
+
1700
+ Returns:
1701
+ Optional[KnowledgeRow]: The upserted knowledge row, or None if the operation fails.
1702
+ """
1703
+ try:
1704
+ table = self._get_table(table_type="knowledge", create_table_if_not_found=True)
1705
+ if table is None:
1706
+ return None
1707
+
1708
+ with self.Session() as sess, sess.begin():
1709
+ update_fields = {
1710
+ k: v
1711
+ for k, v in {
1712
+ "name": knowledge_row.name,
1713
+ "description": knowledge_row.description,
1714
+ "metadata": knowledge_row.metadata,
1715
+ "type": knowledge_row.type,
1716
+ "size": knowledge_row.size,
1717
+ "linked_to": knowledge_row.linked_to,
1718
+ "access_count": knowledge_row.access_count,
1719
+ "status": knowledge_row.status,
1720
+ "status_message": knowledge_row.status_message,
1721
+ "created_at": knowledge_row.created_at,
1722
+ "updated_at": knowledge_row.updated_at,
1723
+ "external_id": knowledge_row.external_id,
1724
+ }.items()
1725
+ # Filtering out None fields if updating
1726
+ if v is not None
1727
+ }
1728
+
1729
+ stmt = (
1730
+ sqlite.insert(table)
1731
+ .values(knowledge_row.model_dump())
1732
+ .on_conflict_do_update(index_elements=["id"], set_=update_fields)
1733
+ )
1734
+ sess.execute(stmt)
1735
+
1736
+ return knowledge_row
1737
+
1738
+ except Exception as e:
1739
+ log_error(f"Error upserting knowledge content: {e}")
1740
+ raise e
1741
+
1742
+ # -- Eval methods --
1743
+
1744
+ def create_eval_run(self, eval_run: EvalRunRecord) -> Optional[EvalRunRecord]:
1745
+ """Create an EvalRunRecord in the database.
1746
+
1747
+ Args:
1748
+ eval_run (EvalRunRecord): The eval run to create.
1749
+
1750
+ Returns:
1751
+ Optional[EvalRunRecord]: The created eval run, or None if the operation fails.
1752
+
1753
+ Raises:
1754
+ Exception: If an error occurs during creation.
1755
+ """
1756
+ try:
1757
+ table = self._get_table(table_type="evals", create_table_if_not_found=True)
1758
+ if table is None:
1759
+ return None
1760
+
1761
+ with self.Session() as sess, sess.begin():
1762
+ current_time = int(time.time())
1763
+ stmt = sqlite.insert(table).values(
1764
+ {
1765
+ "created_at": current_time,
1766
+ "updated_at": current_time,
1767
+ **eval_run.model_dump(),
1768
+ }
1769
+ )
1770
+ sess.execute(stmt)
1771
+ sess.commit()
1772
+
1773
+ log_debug(f"Created eval run with id '{eval_run.run_id}'")
1774
+
1775
+ return eval_run
1776
+
1777
+ except Exception as e:
1778
+ log_error(f"Error creating eval run: {e}")
1779
+ raise e
1780
+
1781
+ def delete_eval_run(self, eval_run_id: str) -> None:
1782
+ """Delete an eval run from the database.
1783
+
1784
+ Args:
1785
+ eval_run_id (str): The ID of the eval run to delete.
1786
+ """
1787
+ try:
1788
+ table = self._get_table(table_type="evals")
1789
+ if table is None:
1790
+ return
1791
+
1792
+ with self.Session() as sess, sess.begin():
1793
+ stmt = table.delete().where(table.c.run_id == eval_run_id)
1794
+ result = sess.execute(stmt)
1795
+ if result.rowcount == 0:
1796
+ log_warning(f"No eval run found with ID: {eval_run_id}")
1797
+ else:
1798
+ log_debug(f"Deleted eval run with ID: {eval_run_id}")
1799
+
1800
+ except Exception as e:
1801
+ log_error(f"Error deleting eval run {eval_run_id}: {e}")
1802
+ raise e
1803
+
1804
+ def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
1805
+ """Delete multiple eval runs from the database.
1806
+
1807
+ Args:
1808
+ eval_run_ids (List[str]): List of eval run IDs to delete.
1809
+ """
1810
+ try:
1811
+ table = self._get_table(table_type="evals")
1812
+ if table is None:
1813
+ return
1814
+
1815
+ with self.Session() as sess, sess.begin():
1816
+ stmt = table.delete().where(table.c.run_id.in_(eval_run_ids))
1817
+ result = sess.execute(stmt)
1818
+ if result.rowcount == 0:
1819
+ log_debug(f"No eval runs found with IDs: {eval_run_ids}")
1820
+ else:
1821
+ log_debug(f"Deleted {result.rowcount} eval runs")
1822
+
1823
+ except Exception as e:
1824
+ log_error(f"Error deleting eval runs {eval_run_ids}: {e}")
1825
+ raise e
1826
+
1827
+ def get_eval_run(
1828
+ self, eval_run_id: str, deserialize: Optional[bool] = True
1829
+ ) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
1830
+ """Get an eval run from the database.
1831
+
1832
+ Args:
1833
+ eval_run_id (str): The ID of the eval run to get.
1834
+ deserialize (Optional[bool]): Whether to serialize the eval run. Defaults to True.
1835
+
1836
+ Returns:
1837
+ Optional[Union[EvalRunRecord, Dict[str, Any]]]:
1838
+ - When deserialize=True: EvalRunRecord object
1839
+ - When deserialize=False: EvalRun dictionary
1840
+
1841
+ Raises:
1842
+ Exception: If an error occurs during retrieval.
1843
+ """
1844
+ try:
1845
+ table = self._get_table(table_type="evals")
1846
+ if table is None:
1847
+ return None
1848
+
1849
+ with self.Session() as sess, sess.begin():
1850
+ stmt = select(table).where(table.c.run_id == eval_run_id)
1851
+ result = sess.execute(stmt).fetchone()
1852
+ if result is None:
1853
+ return None
1854
+
1855
+ eval_run_raw = result._mapping
1856
+ if not eval_run_raw or not deserialize:
1857
+ return eval_run_raw
1858
+
1859
+ return EvalRunRecord.model_validate(eval_run_raw)
1860
+
1861
+ except Exception as e:
1862
+ log_error(f"Exception getting eval run {eval_run_id}: {e}")
1863
+ raise e
1864
+
1865
+ def get_eval_runs(
1866
+ self,
1867
+ limit: Optional[int] = None,
1868
+ page: Optional[int] = None,
1869
+ sort_by: Optional[str] = None,
1870
+ sort_order: Optional[str] = None,
1871
+ agent_id: Optional[str] = None,
1872
+ team_id: Optional[str] = None,
1873
+ workflow_id: Optional[str] = None,
1874
+ model_id: Optional[str] = None,
1875
+ filter_type: Optional[EvalFilterType] = None,
1876
+ eval_type: Optional[List[EvalType]] = None,
1877
+ deserialize: Optional[bool] = True,
1878
+ ) -> Union[List[EvalRunRecord], Tuple[List[Dict[str, Any]], int]]:
1879
+ """Get all eval runs from the database.
1880
+
1881
+ Args:
1882
+ limit (Optional[int]): The maximum number of eval runs to return.
1883
+ page (Optional[int]): The page number.
1884
+ sort_by (Optional[str]): The column to sort by.
1885
+ sort_order (Optional[str]): The order to sort by.
1886
+ agent_id (Optional[str]): The ID of the agent to filter by.
1887
+ team_id (Optional[str]): The ID of the team to filter by.
1888
+ workflow_id (Optional[str]): The ID of the workflow to filter by.
1889
+ model_id (Optional[str]): The ID of the model to filter by.
1890
+ eval_type (Optional[List[EvalType]]): The type(s) of eval to filter by.
1891
+ filter_type (Optional[EvalFilterType]): Filter by component type (agent, team, workflow).
1892
+ deserialize (Optional[bool]): Whether to serialize the eval runs. Defaults to True.
1893
+ create_table_if_not_found (Optional[bool]): Whether to create the table if it doesn't exist.
1894
+
1895
+ Returns:
1896
+ Union[List[EvalRunRecord], Tuple[List[Dict[str, Any]], int]]:
1897
+ - When deserialize=True: List of EvalRunRecord objects
1898
+ - When deserialize=False: List of EvalRun dictionaries and total count
1899
+
1900
+ Raises:
1901
+ Exception: If an error occurs during retrieval.
1902
+ """
1903
+ try:
1904
+ table = self._get_table(table_type="evals")
1905
+ if table is None:
1906
+ return [] if deserialize else ([], 0)
1907
+
1908
+ with self.Session() as sess, sess.begin():
1909
+ stmt = select(table)
1910
+
1911
+ # Filtering
1912
+ if agent_id is not None:
1913
+ stmt = stmt.where(table.c.agent_id == agent_id)
1914
+ if team_id is not None:
1915
+ stmt = stmt.where(table.c.team_id == team_id)
1916
+ if workflow_id is not None:
1917
+ stmt = stmt.where(table.c.workflow_id == workflow_id)
1918
+ if model_id is not None:
1919
+ stmt = stmt.where(table.c.model_id == model_id)
1920
+ if eval_type is not None and len(eval_type) > 0:
1921
+ stmt = stmt.where(table.c.eval_type.in_(eval_type))
1922
+ if filter_type is not None:
1923
+ if filter_type == EvalFilterType.AGENT:
1924
+ stmt = stmt.where(table.c.agent_id.is_not(None))
1925
+ elif filter_type == EvalFilterType.TEAM:
1926
+ stmt = stmt.where(table.c.team_id.is_not(None))
1927
+ elif filter_type == EvalFilterType.WORKFLOW:
1928
+ stmt = stmt.where(table.c.workflow_id.is_not(None))
1929
+
1930
+ # Get total count after applying filtering
1931
+ count_stmt = select(func.count()).select_from(stmt.alias())
1932
+ total_count = sess.execute(count_stmt).scalar()
1933
+
1934
+ # Sorting - apply default sort by created_at desc if no sort parameters provided
1935
+ if sort_by is None:
1936
+ stmt = stmt.order_by(table.c.created_at.desc())
1937
+ else:
1938
+ stmt = apply_sorting(stmt, table, sort_by, sort_order)
1939
+ # Paginating
1940
+ if limit is not None:
1941
+ stmt = stmt.limit(limit)
1942
+ if page is not None:
1943
+ stmt = stmt.offset((page - 1) * limit)
1944
+
1945
+ result = sess.execute(stmt).fetchall()
1946
+ if not result:
1947
+ return [] if deserialize else ([], 0)
1948
+
1949
+ eval_runs_raw = [row._mapping for row in result]
1950
+ if not deserialize:
1951
+ return eval_runs_raw, total_count
1952
+
1953
+ return [EvalRunRecord.model_validate(row) for row in eval_runs_raw]
1954
+
1955
+ except Exception as e:
1956
+ log_error(f"Exception getting eval runs: {e}")
1957
+ raise e
1958
+
1959
+ def rename_eval_run(
1960
+ self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
1961
+ ) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
1962
+ """Upsert the name of an eval run in the database, returning raw dictionary.
1963
+
1964
+ Args:
1965
+ eval_run_id (str): The ID of the eval run to update.
1966
+ name (str): The new name of the eval run.
1967
+ deserialize (Optional[bool]): Whether to serialize the eval run. Defaults to True.
1968
+
1969
+ Returns:
1970
+ Optional[Union[EvalRunRecord, Dict[str, Any]]]:
1971
+ - When deserialize=True: EvalRunRecord object
1972
+ - When deserialize=False: EvalRun dictionary
1973
+
1974
+ Raises:
1975
+ Exception: If an error occurs during update.
1976
+ """
1977
+ try:
1978
+ table = self._get_table(table_type="evals")
1979
+ if table is None:
1980
+ return None
1981
+
1982
+ with self.Session() as sess, sess.begin():
1983
+ stmt = (
1984
+ table.update().where(table.c.run_id == eval_run_id).values(name=name, updated_at=int(time.time()))
1985
+ )
1986
+ sess.execute(stmt)
1987
+
1988
+ eval_run_raw = self.get_eval_run(eval_run_id=eval_run_id, deserialize=deserialize)
1989
+
1990
+ log_debug(f"Renamed eval run with id '{eval_run_id}' to '{name}'")
1991
+
1992
+ if not eval_run_raw or not deserialize:
1993
+ return eval_run_raw
1994
+
1995
+ return EvalRunRecord.model_validate(eval_run_raw)
1996
+
1997
+ except Exception as e:
1998
+ log_error(f"Error renaming eval run {eval_run_id}: {e}")
1999
+ raise e
2000
+
2001
+ # -- Migrations --
2002
+
2003
+ def migrate_table_from_v1_to_v2(self, v1_db_schema: str, v1_table_name: str, v1_table_type: str):
2004
+ """Migrate all content in the given table to the right v2 table"""
2005
+
2006
+ from agno.db.migrations.v1_to_v2 import (
2007
+ get_all_table_content,
2008
+ parse_agent_sessions,
2009
+ parse_memories,
2010
+ parse_team_sessions,
2011
+ parse_workflow_sessions,
2012
+ )
2013
+
2014
+ # Get all content from the old table
2015
+ old_content: list[dict[str, Any]] = get_all_table_content(
2016
+ db=self,
2017
+ db_schema=v1_db_schema,
2018
+ table_name=v1_table_name,
2019
+ )
2020
+ if not old_content:
2021
+ log_info(f"No content to migrate from table {v1_table_name}")
2022
+ return
2023
+
2024
+ # Parse the content into the new format
2025
+ memories: List[UserMemory] = []
2026
+ sessions: Sequence[Union[AgentSession, TeamSession, WorkflowSession]] = []
2027
+ if v1_table_type == "agent_sessions":
2028
+ sessions = parse_agent_sessions(old_content)
2029
+ elif v1_table_type == "team_sessions":
2030
+ sessions = parse_team_sessions(old_content)
2031
+ elif v1_table_type == "workflow_sessions":
2032
+ sessions = parse_workflow_sessions(old_content)
2033
+ elif v1_table_type == "memories":
2034
+ memories = parse_memories(old_content)
2035
+ else:
2036
+ raise ValueError(f"Invalid table type: {v1_table_type}")
2037
+
2038
+ # Insert the new content into the new table
2039
+ if v1_table_type == "agent_sessions":
2040
+ for session in sessions:
2041
+ self.upsert_session(session)
2042
+ log_info(f"Migrated {len(sessions)} Agent sessions to table: {self.session_table_name}")
2043
+
2044
+ elif v1_table_type == "team_sessions":
2045
+ for session in sessions:
2046
+ self.upsert_session(session)
2047
+ log_info(f"Migrated {len(sessions)} Team sessions to table: {self.session_table_name}")
2048
+
2049
+ elif v1_table_type == "workflow_sessions":
2050
+ for session in sessions:
2051
+ self.upsert_session(session)
2052
+ log_info(f"Migrated {len(sessions)} Workflow sessions to table: {self.session_table_name}")
2053
+
2054
+ elif v1_table_type == "memories":
2055
+ for memory in memories:
2056
+ self.upsert_user_memory(memory)
2057
+ log_info(f"Migrated {len(memories)} memories to table: {self.memory_table}")
2058
+
2059
+ # -- Culture methods --
2060
+
2061
+ def clear_cultural_knowledge(self) -> None:
2062
+ """Delete all cultural artifacts from the database.
2063
+
2064
+ Raises:
2065
+ Exception: If an error occurs during deletion.
2066
+ """
2067
+ try:
2068
+ table = self._get_table(table_type="culture")
2069
+ if table is None:
2070
+ return
2071
+
2072
+ with self.Session() as sess, sess.begin():
2073
+ sess.execute(table.delete())
2074
+
2075
+ except Exception as e:
2076
+ from agno.utils.log import log_warning
2077
+
2078
+ log_warning(f"Exception deleting all cultural artifacts: {e}")
2079
+ raise e
2080
+
2081
+ def delete_cultural_knowledge(self, id: str) -> None:
2082
+ """Delete a cultural artifact from the database.
2083
+
2084
+ Args:
2085
+ id (str): The ID of the cultural artifact to delete.
2086
+
2087
+ Raises:
2088
+ Exception: If an error occurs during deletion.
2089
+ """
2090
+ try:
2091
+ table = self._get_table(table_type="culture")
2092
+ if table is None:
2093
+ return
2094
+
2095
+ with self.Session() as sess, sess.begin():
2096
+ delete_stmt = table.delete().where(table.c.id == id)
2097
+ result = sess.execute(delete_stmt)
2098
+
2099
+ success = result.rowcount > 0
2100
+ if success:
2101
+ log_debug(f"Successfully deleted cultural artifact id: {id}")
2102
+ else:
2103
+ log_debug(f"No cultural artifact found with id: {id}")
2104
+
2105
+ except Exception as e:
2106
+ log_error(f"Error deleting cultural artifact: {e}")
2107
+ raise e
2108
+
2109
+ def get_cultural_knowledge(
2110
+ self, id: str, deserialize: Optional[bool] = True
2111
+ ) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
2112
+ """Get a cultural artifact from the database.
2113
+
2114
+ Args:
2115
+ id (str): The ID of the cultural artifact to get.
2116
+ deserialize (Optional[bool]): Whether to serialize the cultural artifact. Defaults to True.
2117
+
2118
+ Returns:
2119
+ Optional[CulturalKnowledge]: The cultural artifact, or None if it doesn't exist.
2120
+
2121
+ Raises:
2122
+ Exception: If an error occurs during retrieval.
2123
+ """
2124
+ try:
2125
+ table = self._get_table(table_type="culture")
2126
+ if table is None:
2127
+ return None
2128
+
2129
+ with self.Session() as sess, sess.begin():
2130
+ stmt = select(table).where(table.c.id == id)
2131
+ result = sess.execute(stmt).fetchone()
2132
+ if result is None:
2133
+ return None
2134
+
2135
+ db_row = dict(result._mapping)
2136
+ if not db_row or not deserialize:
2137
+ return db_row
2138
+
2139
+ return deserialize_cultural_knowledge_from_db(db_row)
2140
+
2141
+ except Exception as e:
2142
+ log_error(f"Exception reading from cultural artifacts table: {e}")
2143
+ raise e
2144
+
2145
+ def get_all_cultural_knowledge(
2146
+ self,
2147
+ name: Optional[str] = None,
2148
+ agent_id: Optional[str] = None,
2149
+ team_id: Optional[str] = None,
2150
+ limit: Optional[int] = None,
2151
+ page: Optional[int] = None,
2152
+ sort_by: Optional[str] = None,
2153
+ sort_order: Optional[str] = None,
2154
+ deserialize: Optional[bool] = True,
2155
+ ) -> Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
2156
+ """Get all cultural artifacts from the database as CulturalNotion objects.
2157
+
2158
+ Args:
2159
+ name (Optional[str]): The name of the cultural artifact to filter by.
2160
+ agent_id (Optional[str]): The ID of the agent to filter by.
2161
+ team_id (Optional[str]): The ID of the team to filter by.
2162
+ limit (Optional[int]): The maximum number of cultural artifacts to return.
2163
+ page (Optional[int]): The page number.
2164
+ sort_by (Optional[str]): The column to sort by.
2165
+ sort_order (Optional[str]): The order to sort by.
2166
+ deserialize (Optional[bool]): Whether to serialize the cultural artifacts. Defaults to True.
2167
+
2168
+ Returns:
2169
+ Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
2170
+ - When deserialize=True: List of CulturalNotion objects
2171
+ - When deserialize=False: List of CulturalNotion dictionaries and total count
2172
+
2173
+ Raises:
2174
+ Exception: If an error occurs during retrieval.
2175
+ """
2176
+ try:
2177
+ table = self._get_table(table_type="culture")
2178
+ if table is None:
2179
+ return [] if deserialize else ([], 0)
2180
+
2181
+ with self.Session() as sess, sess.begin():
2182
+ stmt = select(table)
2183
+
2184
+ # Filtering
2185
+ if name is not None:
2186
+ stmt = stmt.where(table.c.name == name)
2187
+ if agent_id is not None:
2188
+ stmt = stmt.where(table.c.agent_id == agent_id)
2189
+ if team_id is not None:
2190
+ stmt = stmt.where(table.c.team_id == team_id)
2191
+
2192
+ # Get total count after applying filtering
2193
+ count_stmt = select(func.count()).select_from(stmt.alias())
2194
+ total_count = sess.execute(count_stmt).scalar()
2195
+
2196
+ # Sorting
2197
+ stmt = apply_sorting(stmt, table, sort_by, sort_order)
2198
+ # Paginating
2199
+ if limit is not None:
2200
+ stmt = stmt.limit(limit)
2201
+ if page is not None:
2202
+ stmt = stmt.offset((page - 1) * limit)
2203
+
2204
+ result = sess.execute(stmt).fetchall()
2205
+ if not result:
2206
+ return [] if deserialize else ([], 0)
2207
+
2208
+ db_rows = [dict(record._mapping) for record in result]
2209
+
2210
+ if not deserialize:
2211
+ return db_rows, total_count
2212
+
2213
+ return [deserialize_cultural_knowledge_from_db(row) for row in db_rows]
2214
+
2215
+ except Exception as e:
2216
+ log_error(f"Error reading from cultural artifacts table: {e}")
2217
+ raise e
2218
+
2219
+ def upsert_cultural_knowledge(
2220
+ self, cultural_knowledge: CulturalKnowledge, deserialize: Optional[bool] = True
2221
+ ) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
2222
+ """Upsert a cultural artifact into the database.
2223
+
2224
+ Args:
2225
+ cultural_knowledge (CulturalKnowledge): The cultural artifact to upsert.
2226
+ deserialize (Optional[bool]): Whether to serialize the cultural artifact. Defaults to True.
2227
+
2228
+ Returns:
2229
+ Optional[Union[CulturalNotion, Dict[str, Any]]]:
2230
+ - When deserialize=True: CulturalNotion object
2231
+ - When deserialize=False: CulturalNotion dictionary
2232
+
2233
+ Raises:
2234
+ Exception: If an error occurs during upsert.
2235
+ """
2236
+ try:
2237
+ table = self._get_table(table_type="culture", create_table_if_not_found=True)
2238
+ if table is None:
2239
+ return None
2240
+
2241
+ if cultural_knowledge.id is None:
2242
+ cultural_knowledge.id = str(uuid4())
2243
+
2244
+ # Serialize content, categories, and notes into a JSON string for DB storage (SQLite requires strings)
2245
+ content_json_str = serialize_cultural_knowledge_for_db(cultural_knowledge)
2246
+
2247
+ with self.Session() as sess, sess.begin():
2248
+ stmt = sqlite.insert(table).values(
2249
+ id=cultural_knowledge.id,
2250
+ name=cultural_knowledge.name,
2251
+ summary=cultural_knowledge.summary,
2252
+ content=content_json_str,
2253
+ metadata=cultural_knowledge.metadata,
2254
+ input=cultural_knowledge.input,
2255
+ created_at=cultural_knowledge.created_at,
2256
+ updated_at=int(time.time()),
2257
+ agent_id=cultural_knowledge.agent_id,
2258
+ team_id=cultural_knowledge.team_id,
2259
+ )
2260
+ stmt = stmt.on_conflict_do_update( # type: ignore
2261
+ index_elements=["id"],
2262
+ set_=dict(
2263
+ name=cultural_knowledge.name,
2264
+ summary=cultural_knowledge.summary,
2265
+ content=content_json_str,
2266
+ metadata=cultural_knowledge.metadata,
2267
+ input=cultural_knowledge.input,
2268
+ updated_at=int(time.time()),
2269
+ agent_id=cultural_knowledge.agent_id,
2270
+ team_id=cultural_knowledge.team_id,
2271
+ ),
2272
+ ).returning(table)
2273
+
2274
+ result = sess.execute(stmt)
2275
+ row = result.fetchone()
2276
+
2277
+ if row is None:
2278
+ return None
2279
+
2280
+ db_row: Dict[str, Any] = dict(row._mapping)
2281
+ if not db_row or not deserialize:
2282
+ return db_row
2283
+
2284
+ return deserialize_cultural_knowledge_from_db(db_row)
2285
+
2286
+ except Exception as e:
2287
+ log_error(f"Error upserting cultural knowledge: {e}")
2288
+ raise e