agno 2.2.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (575) hide show
  1. agno/__init__.py +8 -0
  2. agno/agent/__init__.py +51 -0
  3. agno/agent/agent.py +10405 -0
  4. agno/api/__init__.py +0 -0
  5. agno/api/agent.py +28 -0
  6. agno/api/api.py +40 -0
  7. agno/api/evals.py +22 -0
  8. agno/api/os.py +17 -0
  9. agno/api/routes.py +13 -0
  10. agno/api/schemas/__init__.py +9 -0
  11. agno/api/schemas/agent.py +16 -0
  12. agno/api/schemas/evals.py +16 -0
  13. agno/api/schemas/os.py +14 -0
  14. agno/api/schemas/response.py +6 -0
  15. agno/api/schemas/team.py +16 -0
  16. agno/api/schemas/utils.py +21 -0
  17. agno/api/schemas/workflows.py +16 -0
  18. agno/api/settings.py +53 -0
  19. agno/api/team.py +30 -0
  20. agno/api/workflow.py +28 -0
  21. agno/cloud/aws/base.py +214 -0
  22. agno/cloud/aws/s3/__init__.py +2 -0
  23. agno/cloud/aws/s3/api_client.py +43 -0
  24. agno/cloud/aws/s3/bucket.py +195 -0
  25. agno/cloud/aws/s3/object.py +57 -0
  26. agno/culture/__init__.py +3 -0
  27. agno/culture/manager.py +956 -0
  28. agno/db/__init__.py +24 -0
  29. agno/db/async_postgres/__init__.py +3 -0
  30. agno/db/base.py +598 -0
  31. agno/db/dynamo/__init__.py +3 -0
  32. agno/db/dynamo/dynamo.py +2042 -0
  33. agno/db/dynamo/schemas.py +314 -0
  34. agno/db/dynamo/utils.py +743 -0
  35. agno/db/firestore/__init__.py +3 -0
  36. agno/db/firestore/firestore.py +1795 -0
  37. agno/db/firestore/schemas.py +140 -0
  38. agno/db/firestore/utils.py +376 -0
  39. agno/db/gcs_json/__init__.py +3 -0
  40. agno/db/gcs_json/gcs_json_db.py +1335 -0
  41. agno/db/gcs_json/utils.py +228 -0
  42. agno/db/in_memory/__init__.py +3 -0
  43. agno/db/in_memory/in_memory_db.py +1160 -0
  44. agno/db/in_memory/utils.py +230 -0
  45. agno/db/json/__init__.py +3 -0
  46. agno/db/json/json_db.py +1328 -0
  47. agno/db/json/utils.py +230 -0
  48. agno/db/migrations/__init__.py +0 -0
  49. agno/db/migrations/v1_to_v2.py +635 -0
  50. agno/db/mongo/__init__.py +17 -0
  51. agno/db/mongo/async_mongo.py +2026 -0
  52. agno/db/mongo/mongo.py +1982 -0
  53. agno/db/mongo/schemas.py +87 -0
  54. agno/db/mongo/utils.py +259 -0
  55. agno/db/mysql/__init__.py +3 -0
  56. agno/db/mysql/mysql.py +2308 -0
  57. agno/db/mysql/schemas.py +138 -0
  58. agno/db/mysql/utils.py +355 -0
  59. agno/db/postgres/__init__.py +4 -0
  60. agno/db/postgres/async_postgres.py +1927 -0
  61. agno/db/postgres/postgres.py +2260 -0
  62. agno/db/postgres/schemas.py +139 -0
  63. agno/db/postgres/utils.py +442 -0
  64. agno/db/redis/__init__.py +3 -0
  65. agno/db/redis/redis.py +1660 -0
  66. agno/db/redis/schemas.py +123 -0
  67. agno/db/redis/utils.py +346 -0
  68. agno/db/schemas/__init__.py +4 -0
  69. agno/db/schemas/culture.py +120 -0
  70. agno/db/schemas/evals.py +33 -0
  71. agno/db/schemas/knowledge.py +40 -0
  72. agno/db/schemas/memory.py +46 -0
  73. agno/db/schemas/metrics.py +0 -0
  74. agno/db/singlestore/__init__.py +3 -0
  75. agno/db/singlestore/schemas.py +130 -0
  76. agno/db/singlestore/singlestore.py +2272 -0
  77. agno/db/singlestore/utils.py +384 -0
  78. agno/db/sqlite/__init__.py +4 -0
  79. agno/db/sqlite/async_sqlite.py +2293 -0
  80. agno/db/sqlite/schemas.py +133 -0
  81. agno/db/sqlite/sqlite.py +2288 -0
  82. agno/db/sqlite/utils.py +431 -0
  83. agno/db/surrealdb/__init__.py +3 -0
  84. agno/db/surrealdb/metrics.py +292 -0
  85. agno/db/surrealdb/models.py +309 -0
  86. agno/db/surrealdb/queries.py +71 -0
  87. agno/db/surrealdb/surrealdb.py +1353 -0
  88. agno/db/surrealdb/utils.py +147 -0
  89. agno/db/utils.py +116 -0
  90. agno/debug.py +18 -0
  91. agno/eval/__init__.py +14 -0
  92. agno/eval/accuracy.py +834 -0
  93. agno/eval/performance.py +773 -0
  94. agno/eval/reliability.py +306 -0
  95. agno/eval/utils.py +119 -0
  96. agno/exceptions.py +161 -0
  97. agno/filters.py +354 -0
  98. agno/guardrails/__init__.py +6 -0
  99. agno/guardrails/base.py +19 -0
  100. agno/guardrails/openai.py +144 -0
  101. agno/guardrails/pii.py +94 -0
  102. agno/guardrails/prompt_injection.py +52 -0
  103. agno/integrations/__init__.py +0 -0
  104. agno/integrations/discord/__init__.py +3 -0
  105. agno/integrations/discord/client.py +203 -0
  106. agno/knowledge/__init__.py +5 -0
  107. agno/knowledge/chunking/__init__.py +0 -0
  108. agno/knowledge/chunking/agentic.py +79 -0
  109. agno/knowledge/chunking/document.py +91 -0
  110. agno/knowledge/chunking/fixed.py +57 -0
  111. agno/knowledge/chunking/markdown.py +151 -0
  112. agno/knowledge/chunking/recursive.py +63 -0
  113. agno/knowledge/chunking/row.py +39 -0
  114. agno/knowledge/chunking/semantic.py +86 -0
  115. agno/knowledge/chunking/strategy.py +165 -0
  116. agno/knowledge/content.py +74 -0
  117. agno/knowledge/document/__init__.py +5 -0
  118. agno/knowledge/document/base.py +58 -0
  119. agno/knowledge/embedder/__init__.py +5 -0
  120. agno/knowledge/embedder/aws_bedrock.py +343 -0
  121. agno/knowledge/embedder/azure_openai.py +210 -0
  122. agno/knowledge/embedder/base.py +23 -0
  123. agno/knowledge/embedder/cohere.py +323 -0
  124. agno/knowledge/embedder/fastembed.py +62 -0
  125. agno/knowledge/embedder/fireworks.py +13 -0
  126. agno/knowledge/embedder/google.py +258 -0
  127. agno/knowledge/embedder/huggingface.py +94 -0
  128. agno/knowledge/embedder/jina.py +182 -0
  129. agno/knowledge/embedder/langdb.py +22 -0
  130. agno/knowledge/embedder/mistral.py +206 -0
  131. agno/knowledge/embedder/nebius.py +13 -0
  132. agno/knowledge/embedder/ollama.py +154 -0
  133. agno/knowledge/embedder/openai.py +195 -0
  134. agno/knowledge/embedder/sentence_transformer.py +63 -0
  135. agno/knowledge/embedder/together.py +13 -0
  136. agno/knowledge/embedder/vllm.py +262 -0
  137. agno/knowledge/embedder/voyageai.py +165 -0
  138. agno/knowledge/knowledge.py +1988 -0
  139. agno/knowledge/reader/__init__.py +7 -0
  140. agno/knowledge/reader/arxiv_reader.py +81 -0
  141. agno/knowledge/reader/base.py +95 -0
  142. agno/knowledge/reader/csv_reader.py +166 -0
  143. agno/knowledge/reader/docx_reader.py +82 -0
  144. agno/knowledge/reader/field_labeled_csv_reader.py +292 -0
  145. agno/knowledge/reader/firecrawl_reader.py +201 -0
  146. agno/knowledge/reader/json_reader.py +87 -0
  147. agno/knowledge/reader/markdown_reader.py +137 -0
  148. agno/knowledge/reader/pdf_reader.py +431 -0
  149. agno/knowledge/reader/pptx_reader.py +101 -0
  150. agno/knowledge/reader/reader_factory.py +313 -0
  151. agno/knowledge/reader/s3_reader.py +89 -0
  152. agno/knowledge/reader/tavily_reader.py +194 -0
  153. agno/knowledge/reader/text_reader.py +115 -0
  154. agno/knowledge/reader/web_search_reader.py +372 -0
  155. agno/knowledge/reader/website_reader.py +455 -0
  156. agno/knowledge/reader/wikipedia_reader.py +59 -0
  157. agno/knowledge/reader/youtube_reader.py +78 -0
  158. agno/knowledge/remote_content/__init__.py +0 -0
  159. agno/knowledge/remote_content/remote_content.py +88 -0
  160. agno/knowledge/reranker/__init__.py +3 -0
  161. agno/knowledge/reranker/base.py +14 -0
  162. agno/knowledge/reranker/cohere.py +64 -0
  163. agno/knowledge/reranker/infinity.py +195 -0
  164. agno/knowledge/reranker/sentence_transformer.py +54 -0
  165. agno/knowledge/types.py +39 -0
  166. agno/knowledge/utils.py +189 -0
  167. agno/media.py +462 -0
  168. agno/memory/__init__.py +3 -0
  169. agno/memory/manager.py +1327 -0
  170. agno/models/__init__.py +0 -0
  171. agno/models/aimlapi/__init__.py +5 -0
  172. agno/models/aimlapi/aimlapi.py +45 -0
  173. agno/models/anthropic/__init__.py +5 -0
  174. agno/models/anthropic/claude.py +757 -0
  175. agno/models/aws/__init__.py +15 -0
  176. agno/models/aws/bedrock.py +701 -0
  177. agno/models/aws/claude.py +378 -0
  178. agno/models/azure/__init__.py +18 -0
  179. agno/models/azure/ai_foundry.py +485 -0
  180. agno/models/azure/openai_chat.py +131 -0
  181. agno/models/base.py +2175 -0
  182. agno/models/cerebras/__init__.py +12 -0
  183. agno/models/cerebras/cerebras.py +501 -0
  184. agno/models/cerebras/cerebras_openai.py +112 -0
  185. agno/models/cohere/__init__.py +5 -0
  186. agno/models/cohere/chat.py +389 -0
  187. agno/models/cometapi/__init__.py +5 -0
  188. agno/models/cometapi/cometapi.py +57 -0
  189. agno/models/dashscope/__init__.py +5 -0
  190. agno/models/dashscope/dashscope.py +91 -0
  191. agno/models/deepinfra/__init__.py +5 -0
  192. agno/models/deepinfra/deepinfra.py +28 -0
  193. agno/models/deepseek/__init__.py +5 -0
  194. agno/models/deepseek/deepseek.py +61 -0
  195. agno/models/defaults.py +1 -0
  196. agno/models/fireworks/__init__.py +5 -0
  197. agno/models/fireworks/fireworks.py +26 -0
  198. agno/models/google/__init__.py +5 -0
  199. agno/models/google/gemini.py +1085 -0
  200. agno/models/groq/__init__.py +5 -0
  201. agno/models/groq/groq.py +556 -0
  202. agno/models/huggingface/__init__.py +5 -0
  203. agno/models/huggingface/huggingface.py +491 -0
  204. agno/models/ibm/__init__.py +5 -0
  205. agno/models/ibm/watsonx.py +422 -0
  206. agno/models/internlm/__init__.py +3 -0
  207. agno/models/internlm/internlm.py +26 -0
  208. agno/models/langdb/__init__.py +1 -0
  209. agno/models/langdb/langdb.py +48 -0
  210. agno/models/litellm/__init__.py +14 -0
  211. agno/models/litellm/chat.py +468 -0
  212. agno/models/litellm/litellm_openai.py +25 -0
  213. agno/models/llama_cpp/__init__.py +5 -0
  214. agno/models/llama_cpp/llama_cpp.py +22 -0
  215. agno/models/lmstudio/__init__.py +5 -0
  216. agno/models/lmstudio/lmstudio.py +25 -0
  217. agno/models/message.py +434 -0
  218. agno/models/meta/__init__.py +12 -0
  219. agno/models/meta/llama.py +475 -0
  220. agno/models/meta/llama_openai.py +78 -0
  221. agno/models/metrics.py +120 -0
  222. agno/models/mistral/__init__.py +5 -0
  223. agno/models/mistral/mistral.py +432 -0
  224. agno/models/nebius/__init__.py +3 -0
  225. agno/models/nebius/nebius.py +54 -0
  226. agno/models/nexus/__init__.py +3 -0
  227. agno/models/nexus/nexus.py +22 -0
  228. agno/models/nvidia/__init__.py +5 -0
  229. agno/models/nvidia/nvidia.py +28 -0
  230. agno/models/ollama/__init__.py +5 -0
  231. agno/models/ollama/chat.py +441 -0
  232. agno/models/openai/__init__.py +9 -0
  233. agno/models/openai/chat.py +883 -0
  234. agno/models/openai/like.py +27 -0
  235. agno/models/openai/responses.py +1050 -0
  236. agno/models/openrouter/__init__.py +5 -0
  237. agno/models/openrouter/openrouter.py +66 -0
  238. agno/models/perplexity/__init__.py +5 -0
  239. agno/models/perplexity/perplexity.py +187 -0
  240. agno/models/portkey/__init__.py +3 -0
  241. agno/models/portkey/portkey.py +81 -0
  242. agno/models/requesty/__init__.py +5 -0
  243. agno/models/requesty/requesty.py +52 -0
  244. agno/models/response.py +199 -0
  245. agno/models/sambanova/__init__.py +5 -0
  246. agno/models/sambanova/sambanova.py +28 -0
  247. agno/models/siliconflow/__init__.py +5 -0
  248. agno/models/siliconflow/siliconflow.py +25 -0
  249. agno/models/together/__init__.py +5 -0
  250. agno/models/together/together.py +25 -0
  251. agno/models/utils.py +266 -0
  252. agno/models/vercel/__init__.py +3 -0
  253. agno/models/vercel/v0.py +26 -0
  254. agno/models/vertexai/__init__.py +0 -0
  255. agno/models/vertexai/claude.py +70 -0
  256. agno/models/vllm/__init__.py +3 -0
  257. agno/models/vllm/vllm.py +78 -0
  258. agno/models/xai/__init__.py +3 -0
  259. agno/models/xai/xai.py +113 -0
  260. agno/os/__init__.py +3 -0
  261. agno/os/app.py +876 -0
  262. agno/os/auth.py +57 -0
  263. agno/os/config.py +104 -0
  264. agno/os/interfaces/__init__.py +1 -0
  265. agno/os/interfaces/a2a/__init__.py +3 -0
  266. agno/os/interfaces/a2a/a2a.py +42 -0
  267. agno/os/interfaces/a2a/router.py +250 -0
  268. agno/os/interfaces/a2a/utils.py +924 -0
  269. agno/os/interfaces/agui/__init__.py +3 -0
  270. agno/os/interfaces/agui/agui.py +47 -0
  271. agno/os/interfaces/agui/router.py +144 -0
  272. agno/os/interfaces/agui/utils.py +534 -0
  273. agno/os/interfaces/base.py +25 -0
  274. agno/os/interfaces/slack/__init__.py +3 -0
  275. agno/os/interfaces/slack/router.py +148 -0
  276. agno/os/interfaces/slack/security.py +30 -0
  277. agno/os/interfaces/slack/slack.py +47 -0
  278. agno/os/interfaces/whatsapp/__init__.py +3 -0
  279. agno/os/interfaces/whatsapp/router.py +211 -0
  280. agno/os/interfaces/whatsapp/security.py +53 -0
  281. agno/os/interfaces/whatsapp/whatsapp.py +36 -0
  282. agno/os/mcp.py +292 -0
  283. agno/os/middleware/__init__.py +7 -0
  284. agno/os/middleware/jwt.py +233 -0
  285. agno/os/router.py +1763 -0
  286. agno/os/routers/__init__.py +3 -0
  287. agno/os/routers/evals/__init__.py +3 -0
  288. agno/os/routers/evals/evals.py +430 -0
  289. agno/os/routers/evals/schemas.py +142 -0
  290. agno/os/routers/evals/utils.py +162 -0
  291. agno/os/routers/health.py +31 -0
  292. agno/os/routers/home.py +52 -0
  293. agno/os/routers/knowledge/__init__.py +3 -0
  294. agno/os/routers/knowledge/knowledge.py +997 -0
  295. agno/os/routers/knowledge/schemas.py +178 -0
  296. agno/os/routers/memory/__init__.py +3 -0
  297. agno/os/routers/memory/memory.py +515 -0
  298. agno/os/routers/memory/schemas.py +62 -0
  299. agno/os/routers/metrics/__init__.py +3 -0
  300. agno/os/routers/metrics/metrics.py +190 -0
  301. agno/os/routers/metrics/schemas.py +47 -0
  302. agno/os/routers/session/__init__.py +3 -0
  303. agno/os/routers/session/session.py +997 -0
  304. agno/os/schema.py +1055 -0
  305. agno/os/settings.py +43 -0
  306. agno/os/utils.py +630 -0
  307. agno/py.typed +0 -0
  308. agno/reasoning/__init__.py +0 -0
  309. agno/reasoning/anthropic.py +80 -0
  310. agno/reasoning/azure_ai_foundry.py +67 -0
  311. agno/reasoning/deepseek.py +63 -0
  312. agno/reasoning/default.py +97 -0
  313. agno/reasoning/gemini.py +73 -0
  314. agno/reasoning/groq.py +71 -0
  315. agno/reasoning/helpers.py +63 -0
  316. agno/reasoning/ollama.py +67 -0
  317. agno/reasoning/openai.py +86 -0
  318. agno/reasoning/step.py +31 -0
  319. agno/reasoning/vertexai.py +76 -0
  320. agno/run/__init__.py +6 -0
  321. agno/run/agent.py +787 -0
  322. agno/run/base.py +229 -0
  323. agno/run/cancel.py +81 -0
  324. agno/run/messages.py +32 -0
  325. agno/run/team.py +753 -0
  326. agno/run/workflow.py +708 -0
  327. agno/session/__init__.py +10 -0
  328. agno/session/agent.py +295 -0
  329. agno/session/summary.py +265 -0
  330. agno/session/team.py +392 -0
  331. agno/session/workflow.py +205 -0
  332. agno/team/__init__.py +37 -0
  333. agno/team/team.py +8793 -0
  334. agno/tools/__init__.py +10 -0
  335. agno/tools/agentql.py +120 -0
  336. agno/tools/airflow.py +69 -0
  337. agno/tools/api.py +122 -0
  338. agno/tools/apify.py +314 -0
  339. agno/tools/arxiv.py +127 -0
  340. agno/tools/aws_lambda.py +53 -0
  341. agno/tools/aws_ses.py +66 -0
  342. agno/tools/baidusearch.py +89 -0
  343. agno/tools/bitbucket.py +292 -0
  344. agno/tools/brandfetch.py +213 -0
  345. agno/tools/bravesearch.py +106 -0
  346. agno/tools/brightdata.py +367 -0
  347. agno/tools/browserbase.py +209 -0
  348. agno/tools/calcom.py +255 -0
  349. agno/tools/calculator.py +151 -0
  350. agno/tools/cartesia.py +187 -0
  351. agno/tools/clickup.py +244 -0
  352. agno/tools/confluence.py +240 -0
  353. agno/tools/crawl4ai.py +158 -0
  354. agno/tools/csv_toolkit.py +185 -0
  355. agno/tools/dalle.py +110 -0
  356. agno/tools/daytona.py +475 -0
  357. agno/tools/decorator.py +262 -0
  358. agno/tools/desi_vocal.py +108 -0
  359. agno/tools/discord.py +161 -0
  360. agno/tools/docker.py +716 -0
  361. agno/tools/duckdb.py +379 -0
  362. agno/tools/duckduckgo.py +91 -0
  363. agno/tools/e2b.py +703 -0
  364. agno/tools/eleven_labs.py +196 -0
  365. agno/tools/email.py +67 -0
  366. agno/tools/evm.py +129 -0
  367. agno/tools/exa.py +396 -0
  368. agno/tools/fal.py +127 -0
  369. agno/tools/file.py +240 -0
  370. agno/tools/file_generation.py +350 -0
  371. agno/tools/financial_datasets.py +288 -0
  372. agno/tools/firecrawl.py +143 -0
  373. agno/tools/function.py +1187 -0
  374. agno/tools/giphy.py +93 -0
  375. agno/tools/github.py +1760 -0
  376. agno/tools/gmail.py +922 -0
  377. agno/tools/google_bigquery.py +117 -0
  378. agno/tools/google_drive.py +270 -0
  379. agno/tools/google_maps.py +253 -0
  380. agno/tools/googlecalendar.py +674 -0
  381. agno/tools/googlesearch.py +98 -0
  382. agno/tools/googlesheets.py +377 -0
  383. agno/tools/hackernews.py +77 -0
  384. agno/tools/jina.py +101 -0
  385. agno/tools/jira.py +170 -0
  386. agno/tools/knowledge.py +218 -0
  387. agno/tools/linear.py +426 -0
  388. agno/tools/linkup.py +58 -0
  389. agno/tools/local_file_system.py +90 -0
  390. agno/tools/lumalab.py +183 -0
  391. agno/tools/mcp/__init__.py +10 -0
  392. agno/tools/mcp/mcp.py +331 -0
  393. agno/tools/mcp/multi_mcp.py +347 -0
  394. agno/tools/mcp/params.py +24 -0
  395. agno/tools/mcp_toolbox.py +284 -0
  396. agno/tools/mem0.py +193 -0
  397. agno/tools/memori.py +339 -0
  398. agno/tools/memory.py +419 -0
  399. agno/tools/mlx_transcribe.py +139 -0
  400. agno/tools/models/__init__.py +0 -0
  401. agno/tools/models/azure_openai.py +190 -0
  402. agno/tools/models/gemini.py +203 -0
  403. agno/tools/models/groq.py +158 -0
  404. agno/tools/models/morph.py +186 -0
  405. agno/tools/models/nebius.py +124 -0
  406. agno/tools/models_labs.py +195 -0
  407. agno/tools/moviepy_video.py +349 -0
  408. agno/tools/neo4j.py +134 -0
  409. agno/tools/newspaper.py +46 -0
  410. agno/tools/newspaper4k.py +93 -0
  411. agno/tools/notion.py +204 -0
  412. agno/tools/openai.py +202 -0
  413. agno/tools/openbb.py +160 -0
  414. agno/tools/opencv.py +321 -0
  415. agno/tools/openweather.py +233 -0
  416. agno/tools/oxylabs.py +385 -0
  417. agno/tools/pandas.py +102 -0
  418. agno/tools/parallel.py +314 -0
  419. agno/tools/postgres.py +257 -0
  420. agno/tools/pubmed.py +188 -0
  421. agno/tools/python.py +205 -0
  422. agno/tools/reasoning.py +283 -0
  423. agno/tools/reddit.py +467 -0
  424. agno/tools/replicate.py +117 -0
  425. agno/tools/resend.py +62 -0
  426. agno/tools/scrapegraph.py +222 -0
  427. agno/tools/searxng.py +152 -0
  428. agno/tools/serpapi.py +116 -0
  429. agno/tools/serper.py +255 -0
  430. agno/tools/shell.py +53 -0
  431. agno/tools/slack.py +136 -0
  432. agno/tools/sleep.py +20 -0
  433. agno/tools/spider.py +116 -0
  434. agno/tools/sql.py +154 -0
  435. agno/tools/streamlit/__init__.py +0 -0
  436. agno/tools/streamlit/components.py +113 -0
  437. agno/tools/tavily.py +254 -0
  438. agno/tools/telegram.py +48 -0
  439. agno/tools/todoist.py +218 -0
  440. agno/tools/tool_registry.py +1 -0
  441. agno/tools/toolkit.py +146 -0
  442. agno/tools/trafilatura.py +388 -0
  443. agno/tools/trello.py +274 -0
  444. agno/tools/twilio.py +186 -0
  445. agno/tools/user_control_flow.py +78 -0
  446. agno/tools/valyu.py +228 -0
  447. agno/tools/visualization.py +467 -0
  448. agno/tools/webbrowser.py +28 -0
  449. agno/tools/webex.py +76 -0
  450. agno/tools/website.py +54 -0
  451. agno/tools/webtools.py +45 -0
  452. agno/tools/whatsapp.py +286 -0
  453. agno/tools/wikipedia.py +63 -0
  454. agno/tools/workflow.py +278 -0
  455. agno/tools/x.py +335 -0
  456. agno/tools/yfinance.py +257 -0
  457. agno/tools/youtube.py +184 -0
  458. agno/tools/zendesk.py +82 -0
  459. agno/tools/zep.py +454 -0
  460. agno/tools/zoom.py +382 -0
  461. agno/utils/__init__.py +0 -0
  462. agno/utils/agent.py +820 -0
  463. agno/utils/audio.py +49 -0
  464. agno/utils/certs.py +27 -0
  465. agno/utils/code_execution.py +11 -0
  466. agno/utils/common.py +132 -0
  467. agno/utils/dttm.py +13 -0
  468. agno/utils/enum.py +22 -0
  469. agno/utils/env.py +11 -0
  470. agno/utils/events.py +696 -0
  471. agno/utils/format_str.py +16 -0
  472. agno/utils/functions.py +166 -0
  473. agno/utils/gemini.py +426 -0
  474. agno/utils/hooks.py +57 -0
  475. agno/utils/http.py +74 -0
  476. agno/utils/json_schema.py +234 -0
  477. agno/utils/knowledge.py +36 -0
  478. agno/utils/location.py +19 -0
  479. agno/utils/log.py +255 -0
  480. agno/utils/mcp.py +214 -0
  481. agno/utils/media.py +352 -0
  482. agno/utils/merge_dict.py +41 -0
  483. agno/utils/message.py +118 -0
  484. agno/utils/models/__init__.py +0 -0
  485. agno/utils/models/ai_foundry.py +43 -0
  486. agno/utils/models/claude.py +358 -0
  487. agno/utils/models/cohere.py +87 -0
  488. agno/utils/models/llama.py +78 -0
  489. agno/utils/models/mistral.py +98 -0
  490. agno/utils/models/openai_responses.py +140 -0
  491. agno/utils/models/schema_utils.py +153 -0
  492. agno/utils/models/watsonx.py +41 -0
  493. agno/utils/openai.py +257 -0
  494. agno/utils/pickle.py +32 -0
  495. agno/utils/pprint.py +178 -0
  496. agno/utils/print_response/__init__.py +0 -0
  497. agno/utils/print_response/agent.py +842 -0
  498. agno/utils/print_response/team.py +1724 -0
  499. agno/utils/print_response/workflow.py +1668 -0
  500. agno/utils/prompts.py +111 -0
  501. agno/utils/reasoning.py +108 -0
  502. agno/utils/response.py +163 -0
  503. agno/utils/response_iterator.py +17 -0
  504. agno/utils/safe_formatter.py +24 -0
  505. agno/utils/serialize.py +32 -0
  506. agno/utils/shell.py +22 -0
  507. agno/utils/streamlit.py +487 -0
  508. agno/utils/string.py +231 -0
  509. agno/utils/team.py +139 -0
  510. agno/utils/timer.py +41 -0
  511. agno/utils/tools.py +102 -0
  512. agno/utils/web.py +23 -0
  513. agno/utils/whatsapp.py +305 -0
  514. agno/utils/yaml_io.py +25 -0
  515. agno/vectordb/__init__.py +3 -0
  516. agno/vectordb/base.py +127 -0
  517. agno/vectordb/cassandra/__init__.py +5 -0
  518. agno/vectordb/cassandra/cassandra.py +501 -0
  519. agno/vectordb/cassandra/extra_param_mixin.py +11 -0
  520. agno/vectordb/cassandra/index.py +13 -0
  521. agno/vectordb/chroma/__init__.py +5 -0
  522. agno/vectordb/chroma/chromadb.py +929 -0
  523. agno/vectordb/clickhouse/__init__.py +9 -0
  524. agno/vectordb/clickhouse/clickhousedb.py +835 -0
  525. agno/vectordb/clickhouse/index.py +9 -0
  526. agno/vectordb/couchbase/__init__.py +3 -0
  527. agno/vectordb/couchbase/couchbase.py +1442 -0
  528. agno/vectordb/distance.py +7 -0
  529. agno/vectordb/lancedb/__init__.py +6 -0
  530. agno/vectordb/lancedb/lance_db.py +995 -0
  531. agno/vectordb/langchaindb/__init__.py +5 -0
  532. agno/vectordb/langchaindb/langchaindb.py +163 -0
  533. agno/vectordb/lightrag/__init__.py +5 -0
  534. agno/vectordb/lightrag/lightrag.py +388 -0
  535. agno/vectordb/llamaindex/__init__.py +3 -0
  536. agno/vectordb/llamaindex/llamaindexdb.py +166 -0
  537. agno/vectordb/milvus/__init__.py +4 -0
  538. agno/vectordb/milvus/milvus.py +1182 -0
  539. agno/vectordb/mongodb/__init__.py +9 -0
  540. agno/vectordb/mongodb/mongodb.py +1417 -0
  541. agno/vectordb/pgvector/__init__.py +12 -0
  542. agno/vectordb/pgvector/index.py +23 -0
  543. agno/vectordb/pgvector/pgvector.py +1462 -0
  544. agno/vectordb/pineconedb/__init__.py +5 -0
  545. agno/vectordb/pineconedb/pineconedb.py +747 -0
  546. agno/vectordb/qdrant/__init__.py +5 -0
  547. agno/vectordb/qdrant/qdrant.py +1134 -0
  548. agno/vectordb/redis/__init__.py +9 -0
  549. agno/vectordb/redis/redisdb.py +694 -0
  550. agno/vectordb/search.py +7 -0
  551. agno/vectordb/singlestore/__init__.py +10 -0
  552. agno/vectordb/singlestore/index.py +41 -0
  553. agno/vectordb/singlestore/singlestore.py +763 -0
  554. agno/vectordb/surrealdb/__init__.py +3 -0
  555. agno/vectordb/surrealdb/surrealdb.py +699 -0
  556. agno/vectordb/upstashdb/__init__.py +5 -0
  557. agno/vectordb/upstashdb/upstashdb.py +718 -0
  558. agno/vectordb/weaviate/__init__.py +8 -0
  559. agno/vectordb/weaviate/index.py +15 -0
  560. agno/vectordb/weaviate/weaviate.py +1005 -0
  561. agno/workflow/__init__.py +23 -0
  562. agno/workflow/agent.py +299 -0
  563. agno/workflow/condition.py +738 -0
  564. agno/workflow/loop.py +735 -0
  565. agno/workflow/parallel.py +824 -0
  566. agno/workflow/router.py +702 -0
  567. agno/workflow/step.py +1432 -0
  568. agno/workflow/steps.py +592 -0
  569. agno/workflow/types.py +520 -0
  570. agno/workflow/workflow.py +4321 -0
  571. agno-2.2.13.dist-info/METADATA +614 -0
  572. agno-2.2.13.dist-info/RECORD +575 -0
  573. agno-2.2.13.dist-info/WHEEL +5 -0
  574. agno-2.2.13.dist-info/licenses/LICENSE +201 -0
  575. agno-2.2.13.dist-info/top_level.txt +1 -0
agno/db/mysql/mysql.py ADDED
@@ -0,0 +1,2308 @@
1
+ import time
2
+ from datetime import date, datetime, timedelta, timezone
3
+ from typing import Any, Dict, List, Optional, Sequence, Tuple, Union
4
+ from uuid import uuid4
5
+
6
+ from sqlalchemy import Index, UniqueConstraint
7
+
8
+ from agno.db.base import BaseDb, SessionType
9
+ from agno.db.mysql.schemas import get_table_schema_definition
10
+ from agno.db.mysql.utils import (
11
+ apply_sorting,
12
+ bulk_upsert_metrics,
13
+ calculate_date_metrics,
14
+ create_schema,
15
+ deserialize_cultural_knowledge_from_db,
16
+ fetch_all_sessions_data,
17
+ get_dates_to_calculate_metrics_for,
18
+ is_table_available,
19
+ is_valid_table,
20
+ serialize_cultural_knowledge_for_db,
21
+ )
22
+ from agno.db.schemas.culture import CulturalKnowledge
23
+ from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
24
+ from agno.db.schemas.knowledge import KnowledgeRow
25
+ from agno.db.schemas.memory import UserMemory
26
+ from agno.session import AgentSession, Session, TeamSession, WorkflowSession
27
+ from agno.utils.log import log_debug, log_error, log_info, log_warning
28
+ from agno.utils.string import generate_id
29
+
30
+ try:
31
+ from sqlalchemy import TEXT, and_, cast, func, update
32
+ from sqlalchemy.dialects import mysql
33
+ from sqlalchemy.engine import Engine, create_engine
34
+ from sqlalchemy.orm import scoped_session, sessionmaker
35
+ from sqlalchemy.schema import Column, MetaData, Table
36
+ from sqlalchemy.sql.expression import select, text
37
+ except ImportError:
38
+ raise ImportError("`sqlalchemy` not installed. Please install it using `pip install sqlalchemy`")
39
+
40
+
41
+ class MySQLDb(BaseDb):
42
+ def __init__(
43
+ self,
44
+ db_engine: Optional[Engine] = None,
45
+ db_schema: Optional[str] = None,
46
+ db_url: Optional[str] = None,
47
+ session_table: Optional[str] = None,
48
+ culture_table: Optional[str] = None,
49
+ memory_table: Optional[str] = None,
50
+ metrics_table: Optional[str] = None,
51
+ eval_table: Optional[str] = None,
52
+ knowledge_table: Optional[str] = None,
53
+ id: Optional[str] = None,
54
+ ):
55
+ """
56
+ Interface for interacting with a MySQL database.
57
+
58
+ The following order is used to determine the database connection:
59
+ 1. Use the db_engine if provided
60
+ 2. Use the db_url
61
+ 3. Raise an error if neither is provided
62
+
63
+ Args:
64
+ db_url (Optional[str]): The database URL to connect to.
65
+ db_engine (Optional[Engine]): The SQLAlchemy database engine to use.
66
+ db_schema (Optional[str]): The database schema to use.
67
+ session_table (Optional[str]): Name of the table to store Agent, Team and Workflow sessions.
68
+ culture_table (Optional[str]): Name of the table to store cultural knowledge.
69
+ memory_table (Optional[str]): Name of the table to store memories.
70
+ metrics_table (Optional[str]): Name of the table to store metrics.
71
+ eval_table (Optional[str]): Name of the table to store evaluation runs data.
72
+ knowledge_table (Optional[str]): Name of the table to store knowledge content.
73
+ id (Optional[str]): ID of the database.
74
+
75
+ Raises:
76
+ ValueError: If neither db_url nor db_engine is provided.
77
+ ValueError: If none of the tables are provided.
78
+ """
79
+ if id is None:
80
+ base_seed = db_url or str(db_engine.url) # type: ignore
81
+ schema_suffix = db_schema if db_schema is not None else "ai"
82
+ seed = f"{base_seed}#{schema_suffix}"
83
+ id = generate_id(seed)
84
+
85
+ super().__init__(
86
+ id=id,
87
+ session_table=session_table,
88
+ culture_table=culture_table,
89
+ memory_table=memory_table,
90
+ metrics_table=metrics_table,
91
+ eval_table=eval_table,
92
+ knowledge_table=knowledge_table,
93
+ )
94
+
95
+ _engine: Optional[Engine] = db_engine
96
+ if _engine is None and db_url is not None:
97
+ _engine = create_engine(db_url)
98
+ if _engine is None:
99
+ raise ValueError("One of db_url or db_engine must be provided")
100
+
101
+ self.db_url: Optional[str] = db_url
102
+ self.db_engine: Engine = _engine
103
+ self.db_schema: str = db_schema if db_schema is not None else "ai"
104
+ self.metadata: MetaData = MetaData()
105
+
106
+ # Initialize database session
107
+ self.Session: scoped_session = scoped_session(sessionmaker(bind=self.db_engine))
108
+
109
+ # -- DB methods --
110
+ def table_exists(self, table_name: str) -> bool:
111
+ """Check if a table with the given name exists in the MySQL database.
112
+
113
+ Args:
114
+ table_name: Name of the table to check
115
+
116
+ Returns:
117
+ bool: True if the table exists in the database, False otherwise
118
+ """
119
+ with self.Session() as sess:
120
+ return is_table_available(session=sess, table_name=table_name, db_schema=self.db_schema)
121
+
122
+ def _create_table(self, table_name: str, table_type: str, db_schema: str) -> Table:
123
+ """
124
+ Create a table with the appropriate schema based on the table type.
125
+
126
+ Args:
127
+ table_name (str): Name of the table to create
128
+ table_type (str): Type of table (used to get schema definition)
129
+ db_schema (str): Database schema name
130
+
131
+ Returns:
132
+ Table: SQLAlchemy Table object
133
+ """
134
+ try:
135
+ table_schema = get_table_schema_definition(table_type)
136
+
137
+ log_debug(f"Creating table {table_name}")
138
+
139
+ columns: List[Column] = []
140
+ indexes: List[str] = []
141
+ unique_constraints: List[str] = []
142
+ schema_unique_constraints = table_schema.pop("_unique_constraints", [])
143
+
144
+ # Get the columns, indexes, and unique constraints from the table schema
145
+ for col_name, col_config in table_schema.items():
146
+ column_args = [col_name, col_config["type"]()]
147
+ column_kwargs = {}
148
+ if col_config.get("primary_key", False):
149
+ column_kwargs["primary_key"] = True
150
+ if "nullable" in col_config:
151
+ column_kwargs["nullable"] = col_config["nullable"]
152
+ if col_config.get("index", False):
153
+ indexes.append(col_name)
154
+ if col_config.get("unique", False):
155
+ column_kwargs["unique"] = True
156
+ unique_constraints.append(col_name)
157
+ columns.append(Column(*column_args, **column_kwargs)) # type: ignore
158
+
159
+ # Create the table object
160
+ table_metadata = MetaData(schema=db_schema)
161
+ table = Table(table_name, table_metadata, *columns, schema=db_schema)
162
+
163
+ # Add multi-column unique constraints with table-specific names
164
+ for constraint in schema_unique_constraints:
165
+ constraint_name = f"{table_name}_{constraint['name']}"
166
+ constraint_columns = constraint["columns"]
167
+ table.append_constraint(UniqueConstraint(*constraint_columns, name=constraint_name))
168
+
169
+ # Add indexes to the table definition
170
+ for idx_col in indexes:
171
+ idx_name = f"idx_{table_name}_{idx_col}"
172
+ table.append_constraint(Index(idx_name, idx_col))
173
+
174
+ with self.Session() as sess, sess.begin():
175
+ create_schema(session=sess, db_schema=db_schema)
176
+
177
+ # Create table
178
+ table.create(self.db_engine, checkfirst=True)
179
+
180
+ # Create indexes
181
+ for idx in table.indexes:
182
+ try:
183
+ log_debug(f"Creating index: {idx.name}")
184
+
185
+ # Check if index already exists
186
+ with self.Session() as sess:
187
+ exists_query = text(
188
+ "SELECT 1 FROM information_schema.statistics WHERE table_schema = :schema "
189
+ "AND table_name = :table_name AND index_name = :index_name"
190
+ )
191
+ exists = (
192
+ sess.execute(
193
+ exists_query, {"schema": db_schema, "table_name": table_name, "index_name": idx.name}
194
+ ).scalar()
195
+ is not None
196
+ )
197
+ if exists:
198
+ log_debug(f"Index {idx.name} already exists in {db_schema}.{table_name}, skipping creation")
199
+ continue
200
+
201
+ idx.create(self.db_engine)
202
+
203
+ except Exception as e:
204
+ log_error(f"Error creating index {idx.name}: {e}")
205
+
206
+ log_debug(f"Successfully created table {db_schema}.{table_name}")
207
+ return table
208
+
209
+ except Exception as e:
210
+ log_error(f"Could not create table {db_schema}.{table_name}: {e}")
211
+ raise
212
+
213
+ def _create_all_tables(self):
214
+ """Create all tables for the database."""
215
+ tables_to_create = [
216
+ (self.session_table_name, "sessions"),
217
+ (self.memory_table_name, "memories"),
218
+ (self.metrics_table_name, "metrics"),
219
+ (self.eval_table_name, "evals"),
220
+ (self.knowledge_table_name, "knowledge"),
221
+ ]
222
+
223
+ for table_name, table_type in tables_to_create:
224
+ self._create_table(table_name=table_name, table_type=table_type, db_schema=self.db_schema)
225
+
226
+ def _get_table(self, table_type: str, create_table_if_not_found: Optional[bool] = False) -> Optional[Table]:
227
+ if table_type == "sessions":
228
+ self.session_table = self._get_or_create_table(
229
+ table_name=self.session_table_name,
230
+ table_type="sessions",
231
+ db_schema=self.db_schema,
232
+ create_table_if_not_found=create_table_if_not_found,
233
+ )
234
+ return self.session_table
235
+
236
+ if table_type == "memories":
237
+ self.memory_table = self._get_or_create_table(
238
+ table_name=self.memory_table_name,
239
+ table_type="memories",
240
+ db_schema=self.db_schema,
241
+ create_table_if_not_found=create_table_if_not_found,
242
+ )
243
+ return self.memory_table
244
+
245
+ if table_type == "metrics":
246
+ self.metrics_table = self._get_or_create_table(
247
+ table_name=self.metrics_table_name,
248
+ table_type="metrics",
249
+ db_schema=self.db_schema,
250
+ create_table_if_not_found=create_table_if_not_found,
251
+ )
252
+ return self.metrics_table
253
+
254
+ if table_type == "evals":
255
+ self.eval_table = self._get_or_create_table(
256
+ table_name=self.eval_table_name,
257
+ table_type="evals",
258
+ db_schema=self.db_schema,
259
+ create_table_if_not_found=create_table_if_not_found,
260
+ )
261
+ return self.eval_table
262
+
263
+ if table_type == "knowledge":
264
+ self.knowledge_table = self._get_or_create_table(
265
+ table_name=self.knowledge_table_name,
266
+ table_type="knowledge",
267
+ db_schema=self.db_schema,
268
+ create_table_if_not_found=create_table_if_not_found,
269
+ )
270
+ return self.knowledge_table
271
+
272
+ if table_type == "culture":
273
+ self.culture_table = self._get_or_create_table(
274
+ table_name=self.culture_table_name,
275
+ table_type="culture",
276
+ db_schema=self.db_schema,
277
+ create_table_if_not_found=create_table_if_not_found,
278
+ )
279
+ return self.culture_table
280
+
281
+ raise ValueError(f"Unknown table type: {table_type}")
282
+
283
+ def _get_or_create_table(
284
+ self, table_name: str, table_type: str, db_schema: str, create_table_if_not_found: Optional[bool] = False
285
+ ) -> Optional[Table]:
286
+ """
287
+ Check if the table exists and is valid, else create it.
288
+
289
+ Args:
290
+ table_name (str): Name of the table to get or create
291
+ table_type (str): Type of table (used to get schema definition)
292
+ db_schema (str): Database schema name
293
+
294
+ Returns:
295
+ Table: SQLAlchemy Table object representing the schema.
296
+ """
297
+
298
+ with self.Session() as sess, sess.begin():
299
+ table_is_available = is_table_available(session=sess, table_name=table_name, db_schema=db_schema)
300
+
301
+ if not table_is_available:
302
+ if not create_table_if_not_found:
303
+ return None
304
+
305
+ return self._create_table(table_name=table_name, table_type=table_type, db_schema=db_schema)
306
+
307
+ if not is_valid_table(
308
+ db_engine=self.db_engine,
309
+ table_name=table_name,
310
+ table_type=table_type,
311
+ db_schema=db_schema,
312
+ ):
313
+ raise ValueError(f"Table {db_schema}.{table_name} has an invalid schema")
314
+
315
+ try:
316
+ table = Table(table_name, self.metadata, schema=db_schema, autoload_with=self.db_engine)
317
+ log_debug(f"Loaded existing table {db_schema}.{table_name}")
318
+ return table
319
+
320
+ except Exception as e:
321
+ log_error(f"Error loading existing table {db_schema}.{table_name}: {e}")
322
+ raise
323
+
324
+ # -- Session methods --
325
+ def delete_session(self, session_id: str) -> bool:
326
+ """
327
+ Delete a session from the database.
328
+
329
+ Args:
330
+ session_id (str): ID of the session to delete
331
+
332
+ Returns:
333
+ bool: True if the session was deleted, False otherwise.
334
+
335
+ Raises:
336
+ Exception: If an error occurs during deletion.
337
+ """
338
+ try:
339
+ table = self._get_table(table_type="sessions")
340
+ if table is None:
341
+ return False
342
+
343
+ with self.Session() as sess, sess.begin():
344
+ delete_stmt = table.delete().where(table.c.session_id == session_id)
345
+ result = sess.execute(delete_stmt)
346
+ if result.rowcount == 0:
347
+ log_debug(f"No session found to delete with session_id: {session_id} in table {table.name}")
348
+ return False
349
+ else:
350
+ log_debug(f"Successfully deleted session with session_id: {session_id} in table {table.name}")
351
+ return True
352
+
353
+ except Exception as e:
354
+ log_error(f"Error deleting session: {e}")
355
+ return False
356
+
357
+ def delete_sessions(self, session_ids: List[str]) -> None:
358
+ """Delete all given sessions from the database.
359
+ Can handle multiple session types in the same run.
360
+
361
+ Args:
362
+ session_ids (List[str]): The IDs of the sessions to delete.
363
+
364
+ Raises:
365
+ Exception: If an error occurs during deletion.
366
+ """
367
+ try:
368
+ table = self._get_table(table_type="sessions")
369
+ if table is None:
370
+ return
371
+
372
+ with self.Session() as sess, sess.begin():
373
+ delete_stmt = table.delete().where(table.c.session_id.in_(session_ids))
374
+ result = sess.execute(delete_stmt)
375
+
376
+ log_debug(f"Successfully deleted {result.rowcount} sessions")
377
+
378
+ except Exception as e:
379
+ log_error(f"Error deleting sessions: {e}")
380
+
381
+ def get_session(
382
+ self,
383
+ session_id: str,
384
+ session_type: SessionType,
385
+ user_id: Optional[str] = None,
386
+ deserialize: Optional[bool] = True,
387
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
388
+ """
389
+ Read a session from the database.
390
+
391
+ Args:
392
+ session_id (str): ID of the session to read.
393
+ session_type (SessionType): Type of session to get.
394
+ user_id (Optional[str]): User ID to filter by. Defaults to None.
395
+ deserialize (Optional[bool]): Whether to serialize the session. Defaults to True.
396
+
397
+ Returns:
398
+ Union[Session, Dict[str, Any], None]:
399
+ - When deserialize=True: Session object
400
+ - When deserialize=False: Session dictionary
401
+
402
+ Raises:
403
+ Exception: If an error occurs during retrieval.
404
+ """
405
+ try:
406
+ table = self._get_table(table_type="sessions")
407
+ if table is None:
408
+ return None
409
+
410
+ with self.Session() as sess:
411
+ stmt = select(table).where(table.c.session_id == session_id)
412
+
413
+ if user_id is not None:
414
+ stmt = stmt.where(table.c.user_id == user_id)
415
+ result = sess.execute(stmt).fetchone()
416
+ if result is None:
417
+ return None
418
+
419
+ session = dict(result._mapping)
420
+
421
+ if not deserialize:
422
+ return session
423
+
424
+ if session_type == SessionType.AGENT:
425
+ return AgentSession.from_dict(session)
426
+ elif session_type == SessionType.TEAM:
427
+ return TeamSession.from_dict(session)
428
+ elif session_type == SessionType.WORKFLOW:
429
+ return WorkflowSession.from_dict(session)
430
+ else:
431
+ raise ValueError(f"Invalid session type: {session_type}")
432
+
433
+ except Exception as e:
434
+ log_error(f"Exception reading from session table: {e}")
435
+ return None
436
+
437
+ def get_sessions(
438
+ self,
439
+ session_type: Optional[SessionType] = None,
440
+ user_id: Optional[str] = None,
441
+ component_id: Optional[str] = None,
442
+ session_name: Optional[str] = None,
443
+ start_timestamp: Optional[int] = None,
444
+ end_timestamp: Optional[int] = None,
445
+ limit: Optional[int] = None,
446
+ page: Optional[int] = None,
447
+ sort_by: Optional[str] = None,
448
+ sort_order: Optional[str] = None,
449
+ deserialize: Optional[bool] = True,
450
+ ) -> Union[List[Session], Tuple[List[Dict[str, Any]], int]]:
451
+ """
452
+ Get all sessions in the given table. Can filter by user_id and entity_id.
453
+
454
+ Args:
455
+ session_type (Optional[SessionType]): The type of sessions to get.
456
+ user_id (Optional[str]): The ID of the user to filter by.
457
+ entity_id (Optional[str]): The ID of the agent / workflow to filter by.
458
+ start_timestamp (Optional[int]): The start timestamp to filter by.
459
+ end_timestamp (Optional[int]): The end timestamp to filter by.
460
+ session_name (Optional[str]): The name of the session to filter by.
461
+ limit (Optional[int]): The maximum number of sessions to return. Defaults to None.
462
+ page (Optional[int]): The page number to return. Defaults to None.
463
+ sort_by (Optional[str]): The field to sort by. Defaults to None.
464
+ sort_order (Optional[str]): The sort order. Defaults to None.
465
+ deserialize (Optional[bool]): Whether to serialize the sessions. Defaults to True.
466
+ create_table_if_not_found (Optional[bool]): Whether to create the table if it doesn't exist.
467
+
468
+ Returns:
469
+ Union[List[Session], Tuple[List[Dict], int]]:
470
+ - When deserialize=True: List of Session objects
471
+ - When deserialize=False: Tuple of (session dictionaries, total count)
472
+
473
+ Raises:
474
+ Exception: If an error occurs during retrieval.
475
+ """
476
+ try:
477
+ table = self._get_table(table_type="sessions")
478
+ if table is None:
479
+ return [] if deserialize else ([], 0)
480
+
481
+ with self.Session() as sess, sess.begin():
482
+ stmt = select(table)
483
+
484
+ # Filtering
485
+ if user_id is not None:
486
+ stmt = stmt.where(table.c.user_id == user_id)
487
+ if component_id is not None:
488
+ if session_type == SessionType.AGENT:
489
+ stmt = stmt.where(table.c.agent_id == component_id)
490
+ elif session_type == SessionType.TEAM:
491
+ stmt = stmt.where(table.c.team_id == component_id)
492
+ elif session_type == SessionType.WORKFLOW:
493
+ stmt = stmt.where(table.c.workflow_id == component_id)
494
+ if start_timestamp is not None:
495
+ stmt = stmt.where(table.c.created_at >= start_timestamp)
496
+ if end_timestamp is not None:
497
+ stmt = stmt.where(table.c.created_at <= end_timestamp)
498
+ if session_name is not None:
499
+ # MySQL JSON extraction syntax
500
+ stmt = stmt.where(
501
+ func.coalesce(
502
+ func.json_unquote(func.json_extract(table.c.session_data, "$.session_name")), ""
503
+ ).ilike(f"%{session_name}%")
504
+ )
505
+ if session_type is not None:
506
+ session_type_value = session_type.value if isinstance(session_type, SessionType) else session_type
507
+ stmt = stmt.where(table.c.session_type == session_type_value)
508
+
509
+ count_stmt = select(func.count()).select_from(stmt.alias())
510
+ total_count = sess.execute(count_stmt).scalar()
511
+
512
+ # Sorting
513
+ stmt = apply_sorting(stmt, table, sort_by, sort_order)
514
+
515
+ # Paginating
516
+ if limit is not None:
517
+ stmt = stmt.limit(limit)
518
+ if page is not None:
519
+ stmt = stmt.offset((page - 1) * limit)
520
+
521
+ result = sess.execute(stmt).fetchall()
522
+ if not result:
523
+ return [] if deserialize else ([], 0)
524
+
525
+ session_dicts = [dict(row._mapping) for row in result]
526
+ if not deserialize:
527
+ return session_dicts, total_count
528
+
529
+ if session_type == SessionType.AGENT:
530
+ return [AgentSession.from_dict(record) for record in session_dicts] # type: ignore
531
+ elif session_type == SessionType.TEAM:
532
+ return [TeamSession.from_dict(record) for record in session_dicts] # type: ignore
533
+ elif session_type == SessionType.WORKFLOW:
534
+ return [WorkflowSession.from_dict(record) for record in session_dicts] # type: ignore
535
+ else:
536
+ raise ValueError(f"Invalid session type: {session_type}")
537
+
538
+ except Exception as e:
539
+ log_error(f"Exception getting sessions: {e}")
540
+ raise e
541
+
542
+ def rename_session(
543
+ self, session_id: str, session_type: SessionType, session_name: str, deserialize: Optional[bool] = True
544
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
545
+ """
546
+ Rename a session in the database.
547
+
548
+ Args:
549
+ session_id (str): The ID of the session to rename.
550
+ session_type (SessionType): The type of session to rename.
551
+ session_name (str): The new name for the session.
552
+ deserialize (Optional[bool]): Whether to serialize the session. Defaults to True.
553
+
554
+ Returns:
555
+ Optional[Union[Session, Dict[str, Any]]]:
556
+ - When deserialize=True: Session object
557
+ - When deserialize=False: Session dictionary
558
+
559
+ Raises:
560
+ Exception: If an error occurs during renaming.
561
+ """
562
+ try:
563
+ table = self._get_table(table_type="sessions")
564
+ if table is None:
565
+ return None
566
+
567
+ with self.Session() as sess, sess.begin():
568
+ # MySQL JSON_SET syntax
569
+ stmt = (
570
+ update(table)
571
+ .where(table.c.session_id == session_id)
572
+ .where(table.c.session_type == session_type.value)
573
+ .values(session_data=func.json_set(table.c.session_data, "$.session_name", session_name))
574
+ )
575
+ sess.execute(stmt)
576
+
577
+ # Fetch the updated row
578
+ select_stmt = select(table).where(table.c.session_id == session_id)
579
+ result = sess.execute(select_stmt)
580
+ row = result.fetchone()
581
+ if not row:
582
+ return None
583
+
584
+ session = dict(row._mapping)
585
+ if not deserialize:
586
+ return session
587
+
588
+ # Return the appropriate session type
589
+ if session_type == SessionType.AGENT:
590
+ return AgentSession.from_dict(session)
591
+ elif session_type == SessionType.TEAM:
592
+ return TeamSession.from_dict(session)
593
+ elif session_type == SessionType.WORKFLOW:
594
+ return WorkflowSession.from_dict(session)
595
+ else:
596
+ raise ValueError(f"Invalid session type: {session_type}")
597
+
598
+ except Exception as e:
599
+ log_error(f"Exception renaming session: {e}")
600
+ return None
601
+
602
+ def upsert_session(
603
+ self, session: Session, deserialize: Optional[bool] = True
604
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
605
+ """
606
+ Insert or update a session in the database.
607
+
608
+ Args:
609
+ session (Session): The session data to upsert.
610
+ deserialize (Optional[bool]): Whether to deserialize the session. Defaults to True.
611
+
612
+ Returns:
613
+ Optional[Union[Session, Dict[str, Any]]]:
614
+ - When deserialize=True: Session object
615
+ - When deserialize=False: Session dictionary
616
+
617
+ Raises:
618
+ Exception: If an error occurs during upsert.
619
+ """
620
+ try:
621
+ table = self._get_table(table_type="sessions", create_table_if_not_found=True)
622
+ if table is None:
623
+ return None
624
+
625
+ session_dict = session.to_dict()
626
+
627
+ if isinstance(session, AgentSession):
628
+ with self.Session() as sess, sess.begin():
629
+ stmt = mysql.insert(table).values(
630
+ session_id=session_dict.get("session_id"),
631
+ session_type=SessionType.AGENT.value,
632
+ agent_id=session_dict.get("agent_id"),
633
+ user_id=session_dict.get("user_id"),
634
+ runs=session_dict.get("runs"),
635
+ agent_data=session_dict.get("agent_data"),
636
+ session_data=session_dict.get("session_data"),
637
+ summary=session_dict.get("summary"),
638
+ metadata=session_dict.get("metadata"),
639
+ created_at=session_dict.get("created_at"),
640
+ updated_at=session_dict.get("created_at"),
641
+ )
642
+ stmt = stmt.on_duplicate_key_update(
643
+ agent_id=session_dict.get("agent_id"),
644
+ user_id=session_dict.get("user_id"),
645
+ agent_data=session_dict.get("agent_data"),
646
+ session_data=session_dict.get("session_data"),
647
+ summary=session_dict.get("summary"),
648
+ metadata=session_dict.get("metadata"),
649
+ runs=session_dict.get("runs"),
650
+ updated_at=int(time.time()),
651
+ )
652
+ sess.execute(stmt)
653
+
654
+ # Fetch the row
655
+ select_stmt = select(table).where(table.c.session_id == session_dict.get("session_id"))
656
+ result = sess.execute(select_stmt)
657
+ row = result.fetchone()
658
+ if not row:
659
+ return None
660
+ session_dict = dict(row._mapping)
661
+ if session_dict is None or not deserialize:
662
+ return session_dict
663
+ return AgentSession.from_dict(session_dict)
664
+
665
+ elif isinstance(session, TeamSession):
666
+ with self.Session() as sess, sess.begin():
667
+ stmt = mysql.insert(table).values(
668
+ session_id=session_dict.get("session_id"),
669
+ session_type=SessionType.TEAM.value,
670
+ team_id=session_dict.get("team_id"),
671
+ user_id=session_dict.get("user_id"),
672
+ runs=session_dict.get("runs"),
673
+ team_data=session_dict.get("team_data"),
674
+ session_data=session_dict.get("session_data"),
675
+ summary=session_dict.get("summary"),
676
+ metadata=session_dict.get("metadata"),
677
+ created_at=session_dict.get("created_at"),
678
+ updated_at=session_dict.get("created_at"),
679
+ )
680
+ stmt = stmt.on_duplicate_key_update(
681
+ team_id=session_dict.get("team_id"),
682
+ user_id=session_dict.get("user_id"),
683
+ team_data=session_dict.get("team_data"),
684
+ session_data=session_dict.get("session_data"),
685
+ summary=session_dict.get("summary"),
686
+ metadata=session_dict.get("metadata"),
687
+ runs=session_dict.get("runs"),
688
+ updated_at=int(time.time()),
689
+ )
690
+ sess.execute(stmt)
691
+
692
+ # Fetch the row
693
+ select_stmt = select(table).where(table.c.session_id == session_dict.get("session_id"))
694
+ result = sess.execute(select_stmt)
695
+ row = result.fetchone()
696
+ if not row:
697
+ return None
698
+ session_dict = dict(row._mapping)
699
+ if session_dict is None or not deserialize:
700
+ return session_dict
701
+ return TeamSession.from_dict(session_dict)
702
+
703
+ else:
704
+ with self.Session() as sess, sess.begin():
705
+ stmt = mysql.insert(table).values(
706
+ session_id=session_dict.get("session_id"),
707
+ session_type=SessionType.WORKFLOW.value,
708
+ workflow_id=session_dict.get("workflow_id"),
709
+ user_id=session_dict.get("user_id"),
710
+ runs=session_dict.get("runs"),
711
+ workflow_data=session_dict.get("workflow_data"),
712
+ session_data=session_dict.get("session_data"),
713
+ summary=session_dict.get("summary"),
714
+ metadata=session_dict.get("metadata"),
715
+ created_at=session_dict.get("created_at"),
716
+ updated_at=session_dict.get("created_at"),
717
+ )
718
+ stmt = stmt.on_duplicate_key_update(
719
+ workflow_id=session_dict.get("workflow_id"),
720
+ user_id=session_dict.get("user_id"),
721
+ workflow_data=session_dict.get("workflow_data"),
722
+ session_data=session_dict.get("session_data"),
723
+ summary=session_dict.get("summary"),
724
+ metadata=session_dict.get("metadata"),
725
+ runs=session_dict.get("runs"),
726
+ updated_at=int(time.time()),
727
+ )
728
+ sess.execute(stmt)
729
+
730
+ # Fetch the row
731
+ select_stmt = select(table).where(table.c.session_id == session_dict.get("session_id"))
732
+ result = sess.execute(select_stmt)
733
+ row = result.fetchone()
734
+ if not row:
735
+ return None
736
+ session_dict = dict(row._mapping)
737
+ if session_dict is None or not deserialize:
738
+ return session_dict
739
+ return WorkflowSession.from_dict(session_dict)
740
+
741
+ except Exception as e:
742
+ log_error(f"Exception upserting into sessions table: {e}")
743
+ return None
744
+
745
+ def upsert_sessions(
746
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
747
+ ) -> List[Union[Session, Dict[str, Any]]]:
748
+ """
749
+ Bulk upsert multiple sessions for improved performance on large datasets.
750
+
751
+ Args:
752
+ sessions (List[Session]): List of sessions to upsert.
753
+ deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
754
+ preserve_updated_at (bool): If True, preserve the updated_at from the session object.
755
+
756
+ Returns:
757
+ List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
758
+
759
+ Raises:
760
+ Exception: If an error occurs during bulk upsert.
761
+ """
762
+ if not sessions:
763
+ return []
764
+
765
+ try:
766
+ table = self._get_table(table_type="sessions", create_table_if_not_found=True)
767
+ if table is None:
768
+ log_info("Sessions table not available, falling back to individual upserts")
769
+ return [
770
+ result
771
+ for session in sessions
772
+ if session is not None
773
+ for result in [self.upsert_session(session, deserialize=deserialize)]
774
+ if result is not None
775
+ ]
776
+
777
+ # Group sessions by type for batch processing
778
+ agent_sessions = []
779
+ team_sessions = []
780
+ workflow_sessions = []
781
+
782
+ for session in sessions:
783
+ if isinstance(session, AgentSession):
784
+ agent_sessions.append(session)
785
+ elif isinstance(session, TeamSession):
786
+ team_sessions.append(session)
787
+ elif isinstance(session, WorkflowSession):
788
+ workflow_sessions.append(session)
789
+
790
+ results: List[Union[Session, Dict[str, Any]]] = []
791
+
792
+ # Process each session type in bulk
793
+ with self.Session() as sess, sess.begin():
794
+ # Bulk upsert agent sessions
795
+ if agent_sessions:
796
+ agent_data = []
797
+ for session in agent_sessions:
798
+ session_dict = session.to_dict()
799
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
800
+ updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
801
+ agent_data.append(
802
+ {
803
+ "session_id": session_dict.get("session_id"),
804
+ "session_type": SessionType.AGENT.value,
805
+ "agent_id": session_dict.get("agent_id"),
806
+ "user_id": session_dict.get("user_id"),
807
+ "runs": session_dict.get("runs"),
808
+ "agent_data": session_dict.get("agent_data"),
809
+ "session_data": session_dict.get("session_data"),
810
+ "summary": session_dict.get("summary"),
811
+ "metadata": session_dict.get("metadata"),
812
+ "created_at": session_dict.get("created_at"),
813
+ "updated_at": updated_at,
814
+ }
815
+ )
816
+
817
+ if agent_data:
818
+ stmt = mysql.insert(table)
819
+ stmt = stmt.on_duplicate_key_update(
820
+ agent_id=stmt.inserted.agent_id,
821
+ user_id=stmt.inserted.user_id,
822
+ agent_data=stmt.inserted.agent_data,
823
+ session_data=stmt.inserted.session_data,
824
+ summary=stmt.inserted.summary,
825
+ metadata=stmt.inserted.metadata,
826
+ runs=stmt.inserted.runs,
827
+ updated_at=stmt.inserted.updated_at,
828
+ )
829
+ sess.execute(stmt, agent_data)
830
+
831
+ # Fetch the results for agent sessions
832
+ agent_ids = [session.session_id for session in agent_sessions]
833
+ select_stmt = select(table).where(table.c.session_id.in_(agent_ids))
834
+ result = sess.execute(select_stmt).fetchall()
835
+
836
+ for row in result:
837
+ session_dict = dict(row._mapping)
838
+ if deserialize:
839
+ deserialized_agent_session = AgentSession.from_dict(session_dict)
840
+ if deserialized_agent_session is None:
841
+ continue
842
+ results.append(deserialized_agent_session)
843
+ else:
844
+ results.append(session_dict)
845
+
846
+ # Bulk upsert team sessions
847
+ if team_sessions:
848
+ team_data = []
849
+ for session in team_sessions:
850
+ session_dict = session.to_dict()
851
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
852
+ updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
853
+ team_data.append(
854
+ {
855
+ "session_id": session_dict.get("session_id"),
856
+ "session_type": SessionType.TEAM.value,
857
+ "team_id": session_dict.get("team_id"),
858
+ "user_id": session_dict.get("user_id"),
859
+ "runs": session_dict.get("runs"),
860
+ "team_data": session_dict.get("team_data"),
861
+ "session_data": session_dict.get("session_data"),
862
+ "summary": session_dict.get("summary"),
863
+ "metadata": session_dict.get("metadata"),
864
+ "created_at": session_dict.get("created_at"),
865
+ "updated_at": updated_at,
866
+ }
867
+ )
868
+
869
+ if team_data:
870
+ stmt = mysql.insert(table)
871
+ stmt = stmt.on_duplicate_key_update(
872
+ team_id=stmt.inserted.team_id,
873
+ user_id=stmt.inserted.user_id,
874
+ team_data=stmt.inserted.team_data,
875
+ session_data=stmt.inserted.session_data,
876
+ summary=stmt.inserted.summary,
877
+ metadata=stmt.inserted.metadata,
878
+ runs=stmt.inserted.runs,
879
+ updated_at=stmt.inserted.updated_at,
880
+ )
881
+ sess.execute(stmt, team_data)
882
+
883
+ # Fetch the results for team sessions
884
+ team_ids = [session.session_id for session in team_sessions]
885
+ select_stmt = select(table).where(table.c.session_id.in_(team_ids))
886
+ result = sess.execute(select_stmt).fetchall()
887
+
888
+ for row in result:
889
+ session_dict = dict(row._mapping)
890
+ if deserialize:
891
+ deserialized_team_session = TeamSession.from_dict(session_dict)
892
+ if deserialized_team_session is None:
893
+ continue
894
+ results.append(deserialized_team_session)
895
+ else:
896
+ results.append(session_dict)
897
+
898
+ # Bulk upsert workflow sessions
899
+ if workflow_sessions:
900
+ workflow_data = []
901
+ for session in workflow_sessions:
902
+ session_dict = session.to_dict()
903
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
904
+ updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
905
+ workflow_data.append(
906
+ {
907
+ "session_id": session_dict.get("session_id"),
908
+ "session_type": SessionType.WORKFLOW.value,
909
+ "workflow_id": session_dict.get("workflow_id"),
910
+ "user_id": session_dict.get("user_id"),
911
+ "runs": session_dict.get("runs"),
912
+ "workflow_data": session_dict.get("workflow_data"),
913
+ "session_data": session_dict.get("session_data"),
914
+ "summary": session_dict.get("summary"),
915
+ "metadata": session_dict.get("metadata"),
916
+ "created_at": session_dict.get("created_at"),
917
+ "updated_at": updated_at,
918
+ }
919
+ )
920
+
921
+ if workflow_data:
922
+ stmt = mysql.insert(table)
923
+ stmt = stmt.on_duplicate_key_update(
924
+ workflow_id=stmt.inserted.workflow_id,
925
+ user_id=stmt.inserted.user_id,
926
+ workflow_data=stmt.inserted.workflow_data,
927
+ session_data=stmt.inserted.session_data,
928
+ summary=stmt.inserted.summary,
929
+ metadata=stmt.inserted.metadata,
930
+ runs=stmt.inserted.runs,
931
+ updated_at=stmt.inserted.updated_at,
932
+ )
933
+ sess.execute(stmt, workflow_data)
934
+
935
+ # Fetch the results for workflow sessions
936
+ workflow_ids = [session.session_id for session in workflow_sessions]
937
+ select_stmt = select(table).where(table.c.session_id.in_(workflow_ids))
938
+ result = sess.execute(select_stmt).fetchall()
939
+
940
+ for row in result:
941
+ session_dict = dict(row._mapping)
942
+ if deserialize:
943
+ deserialized_workflow_session = WorkflowSession.from_dict(session_dict)
944
+ if deserialized_workflow_session is None:
945
+ continue
946
+ results.append(deserialized_workflow_session)
947
+ else:
948
+ results.append(session_dict)
949
+
950
+ return results
951
+
952
+ except Exception as e:
953
+ log_error(f"Exception during bulk session upsert, falling back to individual upserts: {e}")
954
+ # Fallback to individual upserts
955
+ return [
956
+ result
957
+ for session in sessions
958
+ if session is not None
959
+ for result in [self.upsert_session(session, deserialize=deserialize)]
960
+ if result is not None
961
+ ]
962
+
963
+ # -- Memory methods --
964
+ def delete_user_memory(self, memory_id: str, user_id: Optional[str] = None):
965
+ """Delete a user memory from the database.
966
+
967
+ Args:
968
+ memory_id (str): The ID of the memory to delete.
969
+ user_id (Optional[str]): The user ID to filter by. Defaults to None.
970
+
971
+ Returns:
972
+ bool: True if deletion was successful, False otherwise.
973
+
974
+ Raises:
975
+ Exception: If an error occurs during deletion.
976
+ """
977
+ try:
978
+ table = self._get_table(table_type="memories")
979
+ if table is None:
980
+ return
981
+
982
+ with self.Session() as sess, sess.begin():
983
+ delete_stmt = table.delete().where(table.c.memory_id == memory_id)
984
+ if user_id is not None:
985
+ delete_stmt = delete_stmt.where(table.c.user_id == user_id)
986
+ result = sess.execute(delete_stmt)
987
+
988
+ success = result.rowcount > 0
989
+ if success:
990
+ log_debug(f"Successfully deleted user memory id: {memory_id}")
991
+ else:
992
+ log_debug(f"No user memory found with id: {memory_id}")
993
+
994
+ except Exception as e:
995
+ log_error(f"Error deleting user memory: {e}")
996
+
997
+ def delete_user_memories(self, memory_ids: List[str], user_id: Optional[str] = None) -> None:
998
+ """Delete user memories from the database.
999
+
1000
+ Args:
1001
+ memory_ids (List[str]): The IDs of the memories to delete.
1002
+ user_id (Optional[str]): The user ID to filter by. Defaults to None.
1003
+
1004
+ Raises:
1005
+ Exception: If an error occurs during deletion.
1006
+ """
1007
+ try:
1008
+ table = self._get_table(table_type="memories")
1009
+ if table is None:
1010
+ return
1011
+
1012
+ with self.Session() as sess, sess.begin():
1013
+ delete_stmt = table.delete().where(table.c.memory_id.in_(memory_ids))
1014
+ if user_id is not None:
1015
+ delete_stmt = delete_stmt.where(table.c.user_id == user_id)
1016
+ result = sess.execute(delete_stmt)
1017
+ if result.rowcount == 0:
1018
+ log_debug(f"No user memories found with ids: {memory_ids}")
1019
+
1020
+ except Exception as e:
1021
+ log_error(f"Error deleting user memories: {e}")
1022
+
1023
+ def get_all_memory_topics(self) -> List[str]:
1024
+ """Get all memory topics from the database.
1025
+
1026
+ Returns:
1027
+ List[str]: List of memory topics.
1028
+ """
1029
+ try:
1030
+ table = self._get_table(table_type="memories")
1031
+ if table is None:
1032
+ return []
1033
+
1034
+ with self.Session() as sess, sess.begin():
1035
+ # MySQL approach: extract JSON array elements differently
1036
+ stmt = select(table.c.topics)
1037
+ result = sess.execute(stmt).fetchall()
1038
+
1039
+ topics_set = set()
1040
+ for row in result:
1041
+ if row[0]:
1042
+ # Parse JSON array and add topics to set
1043
+ import json
1044
+
1045
+ try:
1046
+ topics = json.loads(row[0]) if isinstance(row[0], str) else row[0]
1047
+ if isinstance(topics, list):
1048
+ topics_set.update(topics)
1049
+ except Exception:
1050
+ pass
1051
+
1052
+ return list(topics_set)
1053
+
1054
+ except Exception as e:
1055
+ log_error(f"Exception reading from memory table: {e}")
1056
+ raise e
1057
+
1058
+ def get_user_memory(
1059
+ self, memory_id: str, deserialize: Optional[bool] = True, user_id: Optional[str] = None
1060
+ ) -> Optional[UserMemory]:
1061
+ """Get a memory from the database.
1062
+
1063
+ Args:
1064
+ memory_id (str): The ID of the memory to get.
1065
+ deserialize (Optional[bool]): Whether to serialize the memory. Defaults to True.
1066
+ user_id (Optional[str]): The user ID to filter by. Defaults to None.
1067
+
1068
+ Returns:
1069
+ Union[UserMemory, Dict[str, Any], None]:
1070
+ - When deserialize=True: UserMemory object
1071
+ - When deserialize=False: UserMemory dictionary
1072
+
1073
+ Raises:
1074
+ Exception: If an error occurs during retrieval.
1075
+ """
1076
+ try:
1077
+ table = self._get_table(table_type="memories")
1078
+ if table is None:
1079
+ return None
1080
+
1081
+ with self.Session() as sess, sess.begin():
1082
+ stmt = select(table).where(table.c.memory_id == memory_id)
1083
+ if user_id is not None:
1084
+ stmt = stmt.where(table.c.user_id == user_id)
1085
+
1086
+ result = sess.execute(stmt).fetchone()
1087
+ if not result:
1088
+ return None
1089
+
1090
+ memory_raw = result._mapping
1091
+ if not deserialize:
1092
+ return memory_raw
1093
+ return UserMemory.from_dict(memory_raw)
1094
+
1095
+ except Exception as e:
1096
+ log_error(f"Exception reading from memory table: {e}")
1097
+ return None
1098
+
1099
+ def get_user_memories(
1100
+ self,
1101
+ user_id: Optional[str] = None,
1102
+ agent_id: Optional[str] = None,
1103
+ team_id: Optional[str] = None,
1104
+ topics: Optional[List[str]] = None,
1105
+ search_content: Optional[str] = None,
1106
+ limit: Optional[int] = None,
1107
+ page: Optional[int] = None,
1108
+ sort_by: Optional[str] = None,
1109
+ sort_order: Optional[str] = None,
1110
+ deserialize: Optional[bool] = True,
1111
+ ) -> Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
1112
+ """Get all memories from the database as MemoryRow objects.
1113
+
1114
+ Args:
1115
+ user_id (Optional[str]): The ID of the user to filter by.
1116
+ agent_id (Optional[str]): The ID of the agent to filter by.
1117
+ team_id (Optional[str]): The ID of the team to filter by.
1118
+ topics (Optional[List[str]]): The topics to filter by.
1119
+ search_content (Optional[str]): The content to search for.
1120
+ limit (Optional[int]): The maximum number of memories to return.
1121
+ page (Optional[int]): The page number.
1122
+ sort_by (Optional[str]): The column to sort by.
1123
+ sort_order (Optional[str]): The order to sort by.
1124
+ deserialize (Optional[bool]): Whether to serialize the memories. Defaults to True.
1125
+
1126
+
1127
+ Returns:
1128
+ Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
1129
+ - When deserialize=True: List of UserMemory objects
1130
+ - When deserialize=False: Tuple of (memory dictionaries, total count)
1131
+
1132
+ Raises:
1133
+ Exception: If an error occurs during retrieval.
1134
+ """
1135
+ try:
1136
+ table = self._get_table(table_type="memories")
1137
+ if table is None:
1138
+ return [] if deserialize else ([], 0)
1139
+
1140
+ with self.Session() as sess, sess.begin():
1141
+ stmt = select(table)
1142
+ # Filtering
1143
+ if user_id is not None:
1144
+ stmt = stmt.where(table.c.user_id == user_id)
1145
+ if agent_id is not None:
1146
+ stmt = stmt.where(table.c.agent_id == agent_id)
1147
+ if team_id is not None:
1148
+ stmt = stmt.where(table.c.team_id == team_id)
1149
+ if topics is not None:
1150
+ # MySQL JSON contains syntax
1151
+ topic_conditions = []
1152
+ for topic in topics:
1153
+ topic_conditions.append(func.json_contains(table.c.topics, f'"{topic}"'))
1154
+ stmt = stmt.where(and_(*topic_conditions))
1155
+ if search_content is not None:
1156
+ stmt = stmt.where(cast(table.c.memory, TEXT).ilike(f"%{search_content}%"))
1157
+
1158
+ # Get total count after applying filtering
1159
+ count_stmt = select(func.count()).select_from(stmt.alias())
1160
+ total_count = sess.execute(count_stmt).scalar()
1161
+
1162
+ # Sorting
1163
+ stmt = apply_sorting(stmt, table, sort_by, sort_order)
1164
+
1165
+ # Paginating
1166
+ if limit is not None:
1167
+ stmt = stmt.limit(limit)
1168
+ if page is not None:
1169
+ stmt = stmt.offset((page - 1) * limit)
1170
+
1171
+ result = sess.execute(stmt).fetchall()
1172
+ if not result:
1173
+ return [] if deserialize else ([], 0)
1174
+
1175
+ memories_raw = [record._mapping for record in result]
1176
+ if not deserialize:
1177
+ return memories_raw, total_count
1178
+
1179
+ return [UserMemory.from_dict(record) for record in memories_raw]
1180
+
1181
+ except Exception as e:
1182
+ log_error(f"Exception reading from memory table: {e}")
1183
+ raise e
1184
+
1185
+ def clear_memories(self) -> None:
1186
+ """Clear all user memories from the database."""
1187
+ try:
1188
+ table = self._get_table(table_type="memories")
1189
+ if table is None:
1190
+ return
1191
+
1192
+ with self.Session() as sess, sess.begin():
1193
+ sess.execute(table.delete())
1194
+ except Exception as e:
1195
+ log_error(f"Exception clearing user memories: {e}")
1196
+
1197
+ def get_user_memory_stats(
1198
+ self, limit: Optional[int] = None, page: Optional[int] = None
1199
+ ) -> Tuple[List[Dict[str, Any]], int]:
1200
+ """Get user memories stats.
1201
+
1202
+ Args:
1203
+ limit (Optional[int]): The maximum number of user stats to return.
1204
+ page (Optional[int]): The page number.
1205
+
1206
+ Returns:
1207
+ Tuple[List[Dict[str, Any]], int]: A list of dictionaries containing user stats and total count.
1208
+
1209
+ Example:
1210
+ (
1211
+ [
1212
+ {
1213
+ "user_id": "123",
1214
+ "total_memories": 10,
1215
+ "last_memory_updated_at": 1714560000,
1216
+ },
1217
+ ],
1218
+ total_count: 1,
1219
+ )
1220
+ """
1221
+ try:
1222
+ table = self._get_table(table_type="memories")
1223
+ if table is None:
1224
+ return [], 0
1225
+
1226
+ with self.Session() as sess, sess.begin():
1227
+ stmt = (
1228
+ select(
1229
+ table.c.user_id,
1230
+ func.count(table.c.memory_id).label("total_memories"),
1231
+ func.max(table.c.updated_at).label("last_memory_updated_at"),
1232
+ )
1233
+ .where(table.c.user_id.is_not(None))
1234
+ .group_by(table.c.user_id)
1235
+ .order_by(func.max(table.c.updated_at).desc())
1236
+ )
1237
+
1238
+ count_stmt = select(func.count()).select_from(stmt.alias())
1239
+ total_count = sess.execute(count_stmt).scalar()
1240
+
1241
+ # Pagination
1242
+ if limit is not None:
1243
+ stmt = stmt.limit(limit)
1244
+ if page is not None:
1245
+ stmt = stmt.offset((page - 1) * limit)
1246
+
1247
+ result = sess.execute(stmt).fetchall()
1248
+ if not result:
1249
+ return [], 0
1250
+
1251
+ return [
1252
+ {
1253
+ "user_id": record.user_id, # type: ignore
1254
+ "total_memories": record.total_memories,
1255
+ "last_memory_updated_at": record.last_memory_updated_at,
1256
+ }
1257
+ for record in result
1258
+ ], total_count
1259
+
1260
+ except Exception as e:
1261
+ log_error(f"Exception getting user memory stats: {e}")
1262
+ return [], 0
1263
+
1264
+ def upsert_user_memory(
1265
+ self, memory: UserMemory, deserialize: Optional[bool] = True
1266
+ ) -> Optional[Union[UserMemory, Dict[str, Any]]]:
1267
+ """Upsert a user memory in the database.
1268
+
1269
+ Args:
1270
+ memory (UserMemory): The user memory to upsert.
1271
+ deserialize (Optional[bool]): Whether to serialize the memory. Defaults to True.
1272
+
1273
+ Returns:
1274
+ Optional[Union[UserMemory, Dict[str, Any]]]:
1275
+ - When deserialize=True: UserMemory object
1276
+ - When deserialize=False: UserMemory dictionary
1277
+
1278
+ Raises:
1279
+ Exception: If an error occurs during upsert.
1280
+ """
1281
+ try:
1282
+ table = self._get_table(table_type="memories", create_table_if_not_found=True)
1283
+ if table is None:
1284
+ return None
1285
+
1286
+ with self.Session() as sess, sess.begin():
1287
+ if memory.memory_id is None:
1288
+ memory.memory_id = str(uuid4())
1289
+
1290
+ stmt = mysql.insert(table).values(
1291
+ memory_id=memory.memory_id,
1292
+ memory=memory.memory,
1293
+ input=memory.input,
1294
+ user_id=memory.user_id,
1295
+ agent_id=memory.agent_id,
1296
+ team_id=memory.team_id,
1297
+ topics=memory.topics,
1298
+ updated_at=int(time.time()),
1299
+ )
1300
+ stmt = stmt.on_duplicate_key_update(
1301
+ memory=memory.memory,
1302
+ topics=memory.topics,
1303
+ input=memory.input,
1304
+ agent_id=memory.agent_id,
1305
+ team_id=memory.team_id,
1306
+ updated_at=int(time.time()),
1307
+ )
1308
+ sess.execute(stmt)
1309
+
1310
+ # Fetch the row
1311
+ select_stmt = select(table).where(table.c.memory_id == memory.memory_id)
1312
+ result = sess.execute(select_stmt)
1313
+ row = result.fetchone()
1314
+ if not row:
1315
+ return None
1316
+
1317
+ memory_raw = row._mapping
1318
+ if not memory_raw or not deserialize:
1319
+ return memory_raw
1320
+
1321
+ return UserMemory.from_dict(memory_raw)
1322
+
1323
+ except Exception as e:
1324
+ log_error(f"Exception upserting user memory: {e}")
1325
+ return None
1326
+
1327
+ def upsert_memories(
1328
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
1329
+ ) -> List[Union[UserMemory, Dict[str, Any]]]:
1330
+ """
1331
+ Bulk upsert multiple user memories for improved performance on large datasets.
1332
+
1333
+ Args:
1334
+ memories (List[UserMemory]): List of memories to upsert.
1335
+ deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
1336
+
1337
+ Returns:
1338
+ List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories.
1339
+
1340
+ Raises:
1341
+ Exception: If an error occurs during bulk upsert.
1342
+ """
1343
+ if not memories:
1344
+ return []
1345
+
1346
+ try:
1347
+ table = self._get_table(table_type="memories", create_table_if_not_found=True)
1348
+ if table is None:
1349
+ log_info("Memories table not available, falling back to individual upserts")
1350
+ return [
1351
+ result
1352
+ for memory in memories
1353
+ if memory is not None
1354
+ for result in [self.upsert_user_memory(memory, deserialize=deserialize)]
1355
+ if result is not None
1356
+ ]
1357
+
1358
+ # Prepare bulk data
1359
+ bulk_data = []
1360
+ current_time = int(time.time())
1361
+ for memory in memories:
1362
+ if memory.memory_id is None:
1363
+ memory.memory_id = str(uuid4())
1364
+
1365
+ # Use preserved updated_at if flag is set and value exists, otherwise use current time
1366
+ updated_at = memory.updated_at if preserve_updated_at else current_time
1367
+ bulk_data.append(
1368
+ {
1369
+ "memory_id": memory.memory_id,
1370
+ "memory": memory.memory,
1371
+ "input": memory.input,
1372
+ "user_id": memory.user_id,
1373
+ "agent_id": memory.agent_id,
1374
+ "team_id": memory.team_id,
1375
+ "topics": memory.topics,
1376
+ "updated_at": updated_at,
1377
+ }
1378
+ )
1379
+
1380
+ results: List[Union[UserMemory, Dict[str, Any]]] = []
1381
+
1382
+ with self.Session() as sess, sess.begin():
1383
+ # Bulk upsert memories using MySQL ON DUPLICATE KEY UPDATE
1384
+ stmt = mysql.insert(table)
1385
+ stmt = stmt.on_duplicate_key_update(
1386
+ memory=stmt.inserted.memory,
1387
+ topics=stmt.inserted.topics,
1388
+ input=stmt.inserted.input,
1389
+ agent_id=stmt.inserted.agent_id,
1390
+ team_id=stmt.inserted.team_id,
1391
+ updated_at=stmt.inserted.updated_at,
1392
+ )
1393
+ sess.execute(stmt, bulk_data)
1394
+
1395
+ # Fetch results
1396
+ memory_ids = [memory.memory_id for memory in memories if memory.memory_id]
1397
+ select_stmt = select(table).where(table.c.memory_id.in_(memory_ids))
1398
+ result = sess.execute(select_stmt).fetchall()
1399
+
1400
+ for row in result:
1401
+ memory_dict = dict(row._mapping)
1402
+ if deserialize:
1403
+ results.append(UserMemory.from_dict(memory_dict))
1404
+ else:
1405
+ results.append(memory_dict)
1406
+
1407
+ return results
1408
+
1409
+ except Exception as e:
1410
+ log_error(f"Exception during bulk memory upsert, falling back to individual upserts: {e}")
1411
+ # Fallback to individual upserts
1412
+ return [
1413
+ result
1414
+ for memory in memories
1415
+ if memory is not None
1416
+ for result in [self.upsert_user_memory(memory, deserialize=deserialize)]
1417
+ if result is not None
1418
+ ]
1419
+
1420
+ # -- Metrics methods --
1421
+ def _get_all_sessions_for_metrics_calculation(
1422
+ self, start_timestamp: Optional[int] = None, end_timestamp: Optional[int] = None
1423
+ ) -> List[Dict[str, Any]]:
1424
+ """
1425
+ Get all sessions of all types (agent, team, workflow) as raw dictionaries.
1426
+
1427
+ Args:
1428
+ start_timestamp (Optional[int]): The start timestamp to filter by. Defaults to None.
1429
+ end_timestamp (Optional[int]): The end timestamp to filter by. Defaults to None.
1430
+
1431
+ Returns:
1432
+ List[Dict[str, Any]]: List of session dictionaries with session_type field.
1433
+
1434
+ Raises:
1435
+ Exception: If an error occurs during retrieval.
1436
+ """
1437
+ try:
1438
+ table = self._get_table(table_type="sessions")
1439
+ if table is None:
1440
+ return []
1441
+
1442
+ stmt = select(
1443
+ table.c.user_id,
1444
+ table.c.session_data,
1445
+ table.c.runs,
1446
+ table.c.created_at,
1447
+ table.c.session_type,
1448
+ )
1449
+
1450
+ if start_timestamp is not None:
1451
+ stmt = stmt.where(table.c.created_at >= start_timestamp)
1452
+ if end_timestamp is not None:
1453
+ stmt = stmt.where(table.c.created_at <= end_timestamp)
1454
+
1455
+ with self.Session() as sess:
1456
+ result = sess.execute(stmt).fetchall()
1457
+ return [record._mapping for record in result]
1458
+
1459
+ except Exception as e:
1460
+ log_error(f"Exception reading from sessions table: {e}")
1461
+ raise e
1462
+
1463
+ def _get_metrics_calculation_starting_date(self, table: Table) -> Optional[date]:
1464
+ """Get the first date for which metrics calculation is needed:
1465
+
1466
+ 1. If there are metrics records, return the date of the first day without a complete metrics record.
1467
+ 2. If there are no metrics records, return the date of the first recorded session.
1468
+ 3. If there are no metrics records and no sessions records, return None.
1469
+
1470
+ Args:
1471
+ table (Table): The table to get the starting date for.
1472
+
1473
+ Returns:
1474
+ Optional[date]: The starting date for which metrics calculation is needed.
1475
+ """
1476
+ with self.Session() as sess:
1477
+ stmt = select(table).order_by(table.c.date.desc()).limit(1)
1478
+ result = sess.execute(stmt).fetchone()
1479
+
1480
+ # 1. Return the date of the first day without a complete metrics record.
1481
+ if result is not None:
1482
+ if result.completed:
1483
+ return result._mapping["date"] + timedelta(days=1)
1484
+ else:
1485
+ return result._mapping["date"]
1486
+
1487
+ # 2. No metrics records. Return the date of the first recorded session.
1488
+ first_session, _ = self.get_sessions(sort_by="created_at", sort_order="asc", limit=1, deserialize=False)
1489
+ if not isinstance(first_session, list):
1490
+ raise ValueError("Error obtaining session list to calculate metrics")
1491
+
1492
+ first_session_date = first_session[0]["created_at"] if first_session else None
1493
+
1494
+ # 3. No metrics records and no sessions records. Return None.
1495
+ if first_session_date is None:
1496
+ return None
1497
+
1498
+ return datetime.fromtimestamp(first_session_date, tz=timezone.utc).date()
1499
+
1500
+ def calculate_metrics(self) -> Optional[list[dict]]:
1501
+ """Calculate metrics for all dates without complete metrics.
1502
+
1503
+ Returns:
1504
+ Optional[list[dict]]: The calculated metrics.
1505
+
1506
+ Raises:
1507
+ Exception: If an error occurs during metrics calculation.
1508
+ """
1509
+ try:
1510
+ table = self._get_table(table_type="metrics", create_table_if_not_found=True)
1511
+ if table is None:
1512
+ return None
1513
+
1514
+ starting_date = self._get_metrics_calculation_starting_date(table)
1515
+ if starting_date is None:
1516
+ log_info("No session data found. Won't calculate metrics.")
1517
+ return None
1518
+
1519
+ dates_to_process = get_dates_to_calculate_metrics_for(starting_date)
1520
+ if not dates_to_process:
1521
+ log_info("Metrics already calculated for all relevant dates.")
1522
+ return None
1523
+
1524
+ start_timestamp = int(
1525
+ datetime.combine(dates_to_process[0], datetime.min.time()).replace(tzinfo=timezone.utc).timestamp()
1526
+ )
1527
+ end_timestamp = int(
1528
+ datetime.combine(dates_to_process[-1] + timedelta(days=1), datetime.min.time())
1529
+ .replace(tzinfo=timezone.utc)
1530
+ .timestamp()
1531
+ )
1532
+
1533
+ sessions = self._get_all_sessions_for_metrics_calculation(
1534
+ start_timestamp=start_timestamp, end_timestamp=end_timestamp
1535
+ )
1536
+ all_sessions_data = fetch_all_sessions_data(
1537
+ sessions=sessions, dates_to_process=dates_to_process, start_timestamp=start_timestamp
1538
+ )
1539
+ if not all_sessions_data:
1540
+ log_info("No new session data found. Won't calculate metrics.")
1541
+ return None
1542
+
1543
+ results = []
1544
+ metrics_records = []
1545
+
1546
+ for date_to_process in dates_to_process:
1547
+ date_key = date_to_process.isoformat()
1548
+ sessions_for_date = all_sessions_data.get(date_key, {})
1549
+
1550
+ # Skip dates with no sessions
1551
+ if not any(len(sessions) > 0 for sessions in sessions_for_date.values()):
1552
+ continue
1553
+
1554
+ metrics_record = calculate_date_metrics(date_to_process, sessions_for_date)
1555
+ metrics_records.append(metrics_record)
1556
+
1557
+ if metrics_records:
1558
+ with self.Session() as sess, sess.begin():
1559
+ results = bulk_upsert_metrics(session=sess, table=table, metrics_records=metrics_records)
1560
+
1561
+ return results
1562
+
1563
+ except Exception as e:
1564
+ log_error(f"Exception refreshing metrics: {e}")
1565
+ return None
1566
+
1567
+ def get_metrics(
1568
+ self,
1569
+ starting_date: Optional[date] = None,
1570
+ ending_date: Optional[date] = None,
1571
+ ) -> Tuple[List[dict], Optional[int]]:
1572
+ """Get all metrics matching the given date range.
1573
+
1574
+ Args:
1575
+ starting_date (Optional[date]): The starting date to filter metrics by.
1576
+ ending_date (Optional[date]): The ending date to filter metrics by.
1577
+
1578
+ Returns:
1579
+ Tuple[List[dict], Optional[int]]: A tuple containing the metrics and the timestamp of the latest update.
1580
+
1581
+ Raises:
1582
+ Exception: If an error occurs during retrieval.
1583
+ """
1584
+ try:
1585
+ table = self._get_table(table_type="metrics", create_table_if_not_found=True)
1586
+ if table is None:
1587
+ return [], 0
1588
+
1589
+ with self.Session() as sess, sess.begin():
1590
+ stmt = select(table)
1591
+ if starting_date:
1592
+ stmt = stmt.where(table.c.date >= starting_date)
1593
+ if ending_date:
1594
+ stmt = stmt.where(table.c.date <= ending_date)
1595
+ result = sess.execute(stmt).fetchall()
1596
+ if not result:
1597
+ return [], None
1598
+
1599
+ # Get the latest updated_at
1600
+ latest_stmt = select(func.max(table.c.updated_at))
1601
+ latest_updated_at = sess.execute(latest_stmt).scalar()
1602
+
1603
+ return [row._mapping for row in result], latest_updated_at
1604
+
1605
+ except Exception as e:
1606
+ log_error(f"Exception getting metrics: {e}")
1607
+ return [], None
1608
+
1609
+ # -- Knowledge methods --
1610
+
1611
+ def delete_knowledge_content(self, id: str):
1612
+ """Delete a knowledge row from the database.
1613
+
1614
+ Args:
1615
+ id (str): The ID of the knowledge row to delete.
1616
+
1617
+ Raises:
1618
+ Exception: If an error occurs during deletion.
1619
+ """
1620
+ table = self._get_table(table_type="knowledge")
1621
+ if table is None:
1622
+ return None
1623
+
1624
+ try:
1625
+ with self.Session() as sess, sess.begin():
1626
+ stmt = table.delete().where(table.c.id == id)
1627
+ sess.execute(stmt)
1628
+
1629
+ except Exception as e:
1630
+ log_error(f"Exception deleting knowledge content: {e}")
1631
+
1632
+ def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
1633
+ """Get a knowledge row from the database.
1634
+
1635
+ Args:
1636
+ id (str): The ID of the knowledge row to get.
1637
+
1638
+ Returns:
1639
+ Optional[KnowledgeRow]: The knowledge row, or None if it doesn't exist.
1640
+
1641
+ Raises:
1642
+ Exception: If an error occurs during retrieval.
1643
+ """
1644
+ table = self._get_table(table_type="knowledge")
1645
+ if table is None:
1646
+ return None
1647
+
1648
+ try:
1649
+ with self.Session() as sess, sess.begin():
1650
+ stmt = select(table).where(table.c.id == id)
1651
+ result = sess.execute(stmt).fetchone()
1652
+ if result is None:
1653
+ return None
1654
+ return KnowledgeRow.model_validate(result._mapping)
1655
+
1656
+ except Exception as e:
1657
+ log_error(f"Exception getting knowledge content: {e}")
1658
+ return None
1659
+
1660
+ def get_knowledge_contents(
1661
+ self,
1662
+ limit: Optional[int] = None,
1663
+ page: Optional[int] = None,
1664
+ sort_by: Optional[str] = None,
1665
+ sort_order: Optional[str] = None,
1666
+ ) -> Tuple[List[KnowledgeRow], int]:
1667
+ """Get all knowledge contents from the database.
1668
+
1669
+ Args:
1670
+ limit (Optional[int]): The maximum number of knowledge contents to return.
1671
+ page (Optional[int]): The page number.
1672
+ sort_by (Optional[str]): The column to sort by.
1673
+ sort_order (Optional[str]): The order to sort by.
1674
+ create_table_if_not_found (Optional[bool]): Whether to create the table if it doesn't exist.
1675
+
1676
+ Returns:
1677
+ Tuple[List[KnowledgeRow], int]: The knowledge contents and total count.
1678
+
1679
+ Raises:
1680
+ Exception: If an error occurs during retrieval.
1681
+ """
1682
+ table = self._get_table(table_type="knowledge")
1683
+ if table is None:
1684
+ return [], 0
1685
+
1686
+ try:
1687
+ with self.Session() as sess, sess.begin():
1688
+ stmt = select(table)
1689
+
1690
+ # Apply sorting
1691
+ if sort_by is not None:
1692
+ stmt = stmt.order_by(getattr(table.c, sort_by) * (1 if sort_order == "asc" else -1))
1693
+
1694
+ # Get total count before applying limit and pagination
1695
+ count_stmt = select(func.count()).select_from(stmt.alias())
1696
+ total_count = sess.execute(count_stmt).scalar()
1697
+
1698
+ # Apply pagination after count
1699
+ if limit is not None:
1700
+ stmt = stmt.limit(limit)
1701
+ if page is not None:
1702
+ stmt = stmt.offset((page - 1) * limit)
1703
+
1704
+ result = sess.execute(stmt).fetchall()
1705
+ if not result:
1706
+ return [], 0
1707
+
1708
+ return [KnowledgeRow.model_validate(record._mapping) for record in result], total_count
1709
+
1710
+ except Exception as e:
1711
+ log_error(f"Exception getting knowledge contents: {e}")
1712
+ return [], 0
1713
+
1714
+ def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
1715
+ """Upsert knowledge content in the database.
1716
+
1717
+ Args:
1718
+ knowledge_row (KnowledgeRow): The knowledge row to upsert.
1719
+
1720
+ Returns:
1721
+ Optional[KnowledgeRow]: The upserted knowledge row, or None if the operation fails.
1722
+
1723
+ Raises:
1724
+ Exception: If an error occurs during upsert.
1725
+ """
1726
+ try:
1727
+ table = self._get_table(table_type="knowledge", create_table_if_not_found=True)
1728
+ if table is None:
1729
+ return None
1730
+
1731
+ with self.Session() as sess, sess.begin():
1732
+ # Get the actual table columns to avoid "unconsumed column names" error
1733
+ table_columns = set(table.columns.keys())
1734
+
1735
+ # Only include fields that exist in the table and are not None
1736
+ insert_data = {}
1737
+ update_fields = {}
1738
+
1739
+ # Map of KnowledgeRow fields to table columns
1740
+ field_mapping = {
1741
+ "id": "id",
1742
+ "name": "name",
1743
+ "description": "description",
1744
+ "metadata": "metadata",
1745
+ "type": "type",
1746
+ "size": "size",
1747
+ "linked_to": "linked_to",
1748
+ "access_count": "access_count",
1749
+ "status": "status",
1750
+ "status_message": "status_message",
1751
+ "created_at": "created_at",
1752
+ "updated_at": "updated_at",
1753
+ "external_id": "external_id",
1754
+ }
1755
+
1756
+ # Build insert and update data only for fields that exist in the table
1757
+ for model_field, table_column in field_mapping.items():
1758
+ if table_column in table_columns:
1759
+ value = getattr(knowledge_row, model_field, None)
1760
+ if value is not None:
1761
+ insert_data[table_column] = value
1762
+ # Don't include ID in update_fields since it's the primary key
1763
+ if table_column != "id":
1764
+ update_fields[table_column] = value
1765
+
1766
+ # Ensure id is always included for the insert
1767
+ if "id" in table_columns and knowledge_row.id:
1768
+ insert_data["id"] = knowledge_row.id
1769
+
1770
+ # Handle case where update_fields is empty (all fields are None or don't exist in table)
1771
+ if not update_fields:
1772
+ # If we have insert_data, just do an insert without conflict resolution
1773
+ if insert_data:
1774
+ stmt = mysql.insert(table).values(insert_data)
1775
+ sess.execute(stmt)
1776
+ else:
1777
+ # If we have no data at all, this is an error
1778
+ log_error("No valid fields found for knowledge row upsert")
1779
+ return None
1780
+ else:
1781
+ # Normal upsert with conflict resolution
1782
+ stmt = mysql.insert(table).values(insert_data).on_duplicate_key_update(**update_fields)
1783
+ sess.execute(stmt)
1784
+
1785
+ return knowledge_row
1786
+
1787
+ except Exception as e:
1788
+ log_error(f"Error upserting knowledge row: {e}")
1789
+ return None
1790
+
1791
+ # -- Eval methods --
1792
+
1793
+ def create_eval_run(self, eval_run: EvalRunRecord) -> Optional[EvalRunRecord]:
1794
+ """Create an EvalRunRecord in the database.
1795
+
1796
+ Args:
1797
+ eval_run (EvalRunRecord): The eval run to create.
1798
+
1799
+ Returns:
1800
+ Optional[EvalRunRecord]: The created eval run, or None if the operation fails.
1801
+
1802
+ Raises:
1803
+ Exception: If an error occurs during creation.
1804
+ """
1805
+ try:
1806
+ table = self._get_table(table_type="evals", create_table_if_not_found=True)
1807
+ if table is None:
1808
+ return None
1809
+
1810
+ with self.Session() as sess, sess.begin():
1811
+ current_time = int(time.time())
1812
+ stmt = mysql.insert(table).values(
1813
+ {"created_at": current_time, "updated_at": current_time, **eval_run.model_dump()}
1814
+ )
1815
+ sess.execute(stmt)
1816
+
1817
+ return eval_run
1818
+
1819
+ except Exception as e:
1820
+ log_error(f"Error creating eval run: {e}")
1821
+ return None
1822
+
1823
+ def delete_eval_run(self, eval_run_id: str) -> None:
1824
+ """Delete an eval run from the database.
1825
+
1826
+ Args:
1827
+ eval_run_id (str): The ID of the eval run to delete.
1828
+ """
1829
+ try:
1830
+ table = self._get_table(table_type="evals")
1831
+ if table is None:
1832
+ return
1833
+
1834
+ with self.Session() as sess, sess.begin():
1835
+ stmt = table.delete().where(table.c.run_id == eval_run_id)
1836
+ result = sess.execute(stmt)
1837
+ if result.rowcount == 0:
1838
+ log_error(f"No eval run found with ID: {eval_run_id}")
1839
+ else:
1840
+ log_debug(f"Deleted eval run with ID: {eval_run_id}")
1841
+
1842
+ except Exception as e:
1843
+ log_error(f"Error deleting eval run {eval_run_id}: {e}")
1844
+
1845
+ def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
1846
+ """Delete multiple eval runs from the database.
1847
+
1848
+ Args:
1849
+ eval_run_ids (List[str]): List of eval run IDs to delete.
1850
+ """
1851
+ try:
1852
+ table = self._get_table(table_type="evals")
1853
+ if table is None:
1854
+ return
1855
+
1856
+ with self.Session() as sess, sess.begin():
1857
+ stmt = table.delete().where(table.c.run_id.in_(eval_run_ids))
1858
+ result = sess.execute(stmt)
1859
+ if result.rowcount == 0:
1860
+ log_error(f"No eval runs found with IDs: {eval_run_ids}")
1861
+ else:
1862
+ log_debug(f"Deleted {result.rowcount} eval runs")
1863
+
1864
+ except Exception as e:
1865
+ log_error(f"Error deleting eval runs {eval_run_ids}: {e}")
1866
+
1867
+ def get_eval_run(
1868
+ self, eval_run_id: str, deserialize: Optional[bool] = True
1869
+ ) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
1870
+ """Get an eval run from the database.
1871
+
1872
+ Args:
1873
+ eval_run_id (str): The ID of the eval run to get.
1874
+ deserialize (Optional[bool]): Whether to serialize the eval run. Defaults to True.
1875
+
1876
+ Returns:
1877
+ Optional[Union[EvalRunRecord, Dict[str, Any]]]:
1878
+ - When deserialize=True: EvalRunRecord object
1879
+ - When deserialize=False: EvalRun dictionary
1880
+
1881
+ Raises:
1882
+ Exception: If an error occurs during retrieval.
1883
+ """
1884
+ try:
1885
+ table = self._get_table(table_type="evals")
1886
+ if table is None:
1887
+ return None
1888
+
1889
+ with self.Session() as sess, sess.begin():
1890
+ stmt = select(table).where(table.c.run_id == eval_run_id)
1891
+ result = sess.execute(stmt).fetchone()
1892
+ if result is None:
1893
+ return None
1894
+
1895
+ eval_run_raw = result._mapping
1896
+ if not deserialize:
1897
+ return eval_run_raw
1898
+
1899
+ return EvalRunRecord.model_validate(eval_run_raw)
1900
+
1901
+ except Exception as e:
1902
+ log_error(f"Exception getting eval run {eval_run_id}: {e}")
1903
+ return None
1904
+
1905
+ def get_eval_runs(
1906
+ self,
1907
+ limit: Optional[int] = None,
1908
+ page: Optional[int] = None,
1909
+ sort_by: Optional[str] = None,
1910
+ sort_order: Optional[str] = None,
1911
+ agent_id: Optional[str] = None,
1912
+ team_id: Optional[str] = None,
1913
+ workflow_id: Optional[str] = None,
1914
+ model_id: Optional[str] = None,
1915
+ filter_type: Optional[EvalFilterType] = None,
1916
+ eval_type: Optional[List[EvalType]] = None,
1917
+ deserialize: Optional[bool] = True,
1918
+ ) -> Union[List[EvalRunRecord], Tuple[List[Dict[str, Any]], int]]:
1919
+ """Get all eval runs from the database.
1920
+
1921
+ Args:
1922
+ limit (Optional[int]): The maximum number of eval runs to return.
1923
+ page (Optional[int]): The page number.
1924
+ sort_by (Optional[str]): The column to sort by.
1925
+ sort_order (Optional[str]): The order to sort by.
1926
+ agent_id (Optional[str]): The ID of the agent to filter by.
1927
+ team_id (Optional[str]): The ID of the team to filter by.
1928
+ workflow_id (Optional[str]): The ID of the workflow to filter by.
1929
+ model_id (Optional[str]): The ID of the model to filter by.
1930
+ eval_type (Optional[List[EvalType]]): The type(s) of eval to filter by.
1931
+ filter_type (Optional[EvalFilterType]): Filter by component type (agent, team, workflow).
1932
+ deserialize (Optional[bool]): Whether to serialize the eval runs. Defaults to True.
1933
+ create_table_if_not_found (Optional[bool]): Whether to create the table if it doesn't exist.
1934
+
1935
+ Returns:
1936
+ Union[List[EvalRunRecord], Tuple[List[Dict[str, Any]], int]]:
1937
+ - When deserialize=True: List of EvalRunRecord objects
1938
+ - When deserialize=False: List of dictionaries
1939
+
1940
+ Raises:
1941
+ Exception: If an error occurs during retrieval.
1942
+ """
1943
+ try:
1944
+ table = self._get_table(table_type="evals")
1945
+ if table is None:
1946
+ return [] if deserialize else ([], 0)
1947
+
1948
+ with self.Session() as sess, sess.begin():
1949
+ stmt = select(table)
1950
+
1951
+ # Filtering
1952
+ if agent_id is not None:
1953
+ stmt = stmt.where(table.c.agent_id == agent_id)
1954
+ if team_id is not None:
1955
+ stmt = stmt.where(table.c.team_id == team_id)
1956
+ if workflow_id is not None:
1957
+ stmt = stmt.where(table.c.workflow_id == workflow_id)
1958
+ if model_id is not None:
1959
+ stmt = stmt.where(table.c.model_id == model_id)
1960
+ if eval_type is not None and len(eval_type) > 0:
1961
+ stmt = stmt.where(table.c.eval_type.in_(eval_type))
1962
+ if filter_type is not None:
1963
+ if filter_type == EvalFilterType.AGENT:
1964
+ stmt = stmt.where(table.c.agent_id.is_not(None))
1965
+ elif filter_type == EvalFilterType.TEAM:
1966
+ stmt = stmt.where(table.c.team_id.is_not(None))
1967
+ elif filter_type == EvalFilterType.WORKFLOW:
1968
+ stmt = stmt.where(table.c.workflow_id.is_not(None))
1969
+
1970
+ # Get total count after applying filtering
1971
+ count_stmt = select(func.count()).select_from(stmt.alias())
1972
+ total_count = sess.execute(count_stmt).scalar()
1973
+
1974
+ # Sorting
1975
+ if sort_by is None:
1976
+ stmt = stmt.order_by(table.c.created_at.desc())
1977
+ else:
1978
+ stmt = apply_sorting(stmt, table, sort_by, sort_order)
1979
+
1980
+ # Paginating
1981
+ if limit is not None:
1982
+ stmt = stmt.limit(limit)
1983
+ if page is not None:
1984
+ stmt = stmt.offset((page - 1) * limit)
1985
+
1986
+ result = sess.execute(stmt).fetchall()
1987
+ if not result:
1988
+ return [] if deserialize else ([], 0)
1989
+
1990
+ eval_runs_raw = [row._mapping for row in result]
1991
+ if not deserialize:
1992
+ return eval_runs_raw, total_count
1993
+
1994
+ return [EvalRunRecord.model_validate(row) for row in eval_runs_raw]
1995
+
1996
+ except Exception as e:
1997
+ log_error(f"Exception getting eval runs: {e}")
1998
+ raise e
1999
+
2000
+ def rename_eval_run(
2001
+ self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
2002
+ ) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
2003
+ """Upsert the name of an eval run in the database, returning raw dictionary.
2004
+
2005
+ Args:
2006
+ eval_run_id (str): The ID of the eval run to update.
2007
+ name (str): The new name of the eval run.
2008
+
2009
+ Returns:
2010
+ Optional[Dict[str, Any]]: The updated eval run, or None if the operation fails.
2011
+
2012
+ Raises:
2013
+ Exception: If an error occurs during update.
2014
+ """
2015
+ try:
2016
+ table = self._get_table(table_type="evals")
2017
+ if table is None:
2018
+ return None
2019
+
2020
+ with self.Session() as sess, sess.begin():
2021
+ stmt = (
2022
+ table.update().where(table.c.run_id == eval_run_id).values(name=name, updated_at=int(time.time()))
2023
+ )
2024
+ sess.execute(stmt)
2025
+
2026
+ eval_run_raw = self.get_eval_run(eval_run_id=eval_run_id, deserialize=deserialize)
2027
+ if not eval_run_raw or not deserialize:
2028
+ return eval_run_raw
2029
+
2030
+ return EvalRunRecord.model_validate(eval_run_raw)
2031
+
2032
+ except Exception as e:
2033
+ log_error(f"Error upserting eval run name {eval_run_id}: {e}")
2034
+ return None
2035
+
2036
+ # -- Culture methods --
2037
+
2038
+ def clear_cultural_knowledge(self) -> None:
2039
+ """Delete all cultural knowledge from the database.
2040
+
2041
+ Raises:
2042
+ Exception: If an error occurs during deletion.
2043
+ """
2044
+ try:
2045
+ table = self._get_table(table_type="culture")
2046
+ if table is None:
2047
+ return
2048
+
2049
+ with self.Session() as sess, sess.begin():
2050
+ sess.execute(table.delete())
2051
+
2052
+ except Exception as e:
2053
+ log_warning(f"Exception deleting all cultural knowledge: {e}")
2054
+ raise e
2055
+
2056
+ def delete_cultural_knowledge(self, id: str) -> None:
2057
+ """Delete a cultural knowledge entry from the database.
2058
+
2059
+ Args:
2060
+ id (str): The ID of the cultural knowledge to delete.
2061
+
2062
+ Raises:
2063
+ Exception: If an error occurs during deletion.
2064
+ """
2065
+ try:
2066
+ table = self._get_table(table_type="culture")
2067
+ if table is None:
2068
+ return
2069
+
2070
+ with self.Session() as sess, sess.begin():
2071
+ delete_stmt = table.delete().where(table.c.id == id)
2072
+ result = sess.execute(delete_stmt)
2073
+
2074
+ success = result.rowcount > 0
2075
+ if success:
2076
+ log_debug(f"Successfully deleted cultural knowledge id: {id}")
2077
+ else:
2078
+ log_debug(f"No cultural knowledge found with id: {id}")
2079
+
2080
+ except Exception as e:
2081
+ log_error(f"Error deleting cultural knowledge: {e}")
2082
+ raise e
2083
+
2084
+ def get_cultural_knowledge(
2085
+ self, id: str, deserialize: Optional[bool] = True
2086
+ ) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
2087
+ """Get a cultural knowledge entry from the database.
2088
+
2089
+ Args:
2090
+ id (str): The ID of the cultural knowledge to get.
2091
+ deserialize (Optional[bool]): Whether to deserialize the cultural knowledge. Defaults to True.
2092
+
2093
+ Returns:
2094
+ Optional[Union[CulturalKnowledge, Dict[str, Any]]]: The cultural knowledge entry, or None if it doesn't exist.
2095
+
2096
+ Raises:
2097
+ Exception: If an error occurs during retrieval.
2098
+ """
2099
+ try:
2100
+ table = self._get_table(table_type="culture")
2101
+ if table is None:
2102
+ return None
2103
+
2104
+ with self.Session() as sess, sess.begin():
2105
+ stmt = select(table).where(table.c.id == id)
2106
+ result = sess.execute(stmt).fetchone()
2107
+ if result is None:
2108
+ return None
2109
+
2110
+ db_row = dict(result._mapping)
2111
+ if not db_row or not deserialize:
2112
+ return db_row
2113
+
2114
+ return deserialize_cultural_knowledge_from_db(db_row)
2115
+
2116
+ except Exception as e:
2117
+ log_error(f"Exception reading from cultural knowledge table: {e}")
2118
+ raise e
2119
+
2120
+ def get_all_cultural_knowledge(
2121
+ self,
2122
+ name: Optional[str] = None,
2123
+ agent_id: Optional[str] = None,
2124
+ team_id: Optional[str] = None,
2125
+ limit: Optional[int] = None,
2126
+ page: Optional[int] = None,
2127
+ sort_by: Optional[str] = None,
2128
+ sort_order: Optional[str] = None,
2129
+ deserialize: Optional[bool] = True,
2130
+ ) -> Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
2131
+ """Get all cultural knowledge from the database as CulturalKnowledge objects.
2132
+
2133
+ Args:
2134
+ name (Optional[str]): The name of the cultural knowledge to filter by.
2135
+ agent_id (Optional[str]): The ID of the agent to filter by.
2136
+ team_id (Optional[str]): The ID of the team to filter by.
2137
+ limit (Optional[int]): The maximum number of cultural knowledge entries to return.
2138
+ page (Optional[int]): The page number.
2139
+ sort_by (Optional[str]): The column to sort by.
2140
+ sort_order (Optional[str]): The order to sort by.
2141
+ deserialize (Optional[bool]): Whether to deserialize the cultural knowledge. Defaults to True.
2142
+
2143
+ Returns:
2144
+ Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
2145
+ - When deserialize=True: List of CulturalKnowledge objects
2146
+ - When deserialize=False: List of CulturalKnowledge dictionaries and total count
2147
+
2148
+ Raises:
2149
+ Exception: If an error occurs during retrieval.
2150
+ """
2151
+ try:
2152
+ table = self._get_table(table_type="culture")
2153
+ if table is None:
2154
+ return [] if deserialize else ([], 0)
2155
+
2156
+ with self.Session() as sess, sess.begin():
2157
+ stmt = select(table)
2158
+
2159
+ # Filtering
2160
+ if name is not None:
2161
+ stmt = stmt.where(table.c.name == name)
2162
+ if agent_id is not None:
2163
+ stmt = stmt.where(table.c.agent_id == agent_id)
2164
+ if team_id is not None:
2165
+ stmt = stmt.where(table.c.team_id == team_id)
2166
+
2167
+ # Get total count after applying filtering
2168
+ count_stmt = select(func.count()).select_from(stmt.alias())
2169
+ total_count = sess.execute(count_stmt).scalar()
2170
+
2171
+ # Sorting
2172
+ stmt = apply_sorting(stmt, table, sort_by, sort_order)
2173
+ # Paginating
2174
+ if limit is not None:
2175
+ stmt = stmt.limit(limit)
2176
+ if page is not None:
2177
+ stmt = stmt.offset((page - 1) * limit)
2178
+
2179
+ result = sess.execute(stmt).fetchall()
2180
+ if not result:
2181
+ return [] if deserialize else ([], 0)
2182
+
2183
+ db_rows = [dict(record._mapping) for record in result]
2184
+
2185
+ if not deserialize:
2186
+ return db_rows, total_count
2187
+
2188
+ return [deserialize_cultural_knowledge_from_db(row) for row in db_rows]
2189
+
2190
+ except Exception as e:
2191
+ log_error(f"Error reading from cultural knowledge table: {e}")
2192
+ raise e
2193
+
2194
+ def upsert_cultural_knowledge(
2195
+ self, cultural_knowledge: CulturalKnowledge, deserialize: Optional[bool] = True
2196
+ ) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
2197
+ """Upsert a cultural knowledge entry into the database.
2198
+
2199
+ Args:
2200
+ cultural_knowledge (CulturalKnowledge): The cultural knowledge to upsert.
2201
+ deserialize (Optional[bool]): Whether to deserialize the cultural knowledge. Defaults to True.
2202
+
2203
+ Returns:
2204
+ Optional[CulturalKnowledge]: The upserted cultural knowledge entry.
2205
+
2206
+ Raises:
2207
+ Exception: If an error occurs during upsert.
2208
+ """
2209
+ try:
2210
+ table = self._get_table(table_type="culture", create_table_if_not_found=True)
2211
+ if table is None:
2212
+ return None
2213
+
2214
+ if cultural_knowledge.id is None:
2215
+ cultural_knowledge.id = str(uuid4())
2216
+
2217
+ # Serialize content, categories, and notes into a JSON dict for DB storage
2218
+ content_dict = serialize_cultural_knowledge_for_db(cultural_knowledge)
2219
+
2220
+ with self.Session() as sess, sess.begin():
2221
+ stmt = mysql.insert(table).values(
2222
+ id=cultural_knowledge.id,
2223
+ name=cultural_knowledge.name,
2224
+ summary=cultural_knowledge.summary,
2225
+ content=content_dict if content_dict else None,
2226
+ metadata=cultural_knowledge.metadata,
2227
+ input=cultural_knowledge.input,
2228
+ created_at=cultural_knowledge.created_at,
2229
+ updated_at=int(time.time()),
2230
+ agent_id=cultural_knowledge.agent_id,
2231
+ team_id=cultural_knowledge.team_id,
2232
+ )
2233
+ stmt = stmt.on_duplicate_key_update(
2234
+ name=cultural_knowledge.name,
2235
+ summary=cultural_knowledge.summary,
2236
+ content=content_dict if content_dict else None,
2237
+ metadata=cultural_knowledge.metadata,
2238
+ input=cultural_knowledge.input,
2239
+ updated_at=int(time.time()),
2240
+ agent_id=cultural_knowledge.agent_id,
2241
+ team_id=cultural_knowledge.team_id,
2242
+ )
2243
+ sess.execute(stmt)
2244
+
2245
+ # Fetch the inserted/updated row
2246
+ return self.get_cultural_knowledge(id=cultural_knowledge.id, deserialize=deserialize)
2247
+
2248
+ except Exception as e:
2249
+ log_error(f"Error upserting cultural knowledge: {e}")
2250
+ raise e
2251
+
2252
+ # -- Migrations --
2253
+
2254
+ def migrate_table_from_v1_to_v2(self, v1_db_schema: str, v1_table_name: str, v1_table_type: str):
2255
+ """Migrate all content in the given table to the right v2 table"""
2256
+
2257
+ from agno.db.migrations.v1_to_v2 import (
2258
+ get_all_table_content,
2259
+ parse_agent_sessions,
2260
+ parse_memories,
2261
+ parse_team_sessions,
2262
+ parse_workflow_sessions,
2263
+ )
2264
+
2265
+ # Get all content from the old table
2266
+ old_content: list[dict[str, Any]] = get_all_table_content(
2267
+ db=self,
2268
+ db_schema=v1_db_schema,
2269
+ table_name=v1_table_name,
2270
+ )
2271
+ if not old_content:
2272
+ log_info(f"No content to migrate from table {v1_table_name}")
2273
+ return
2274
+
2275
+ # Parse the content into the new format
2276
+ memories: List[UserMemory] = []
2277
+ sessions: Sequence[Union[AgentSession, TeamSession, WorkflowSession]] = []
2278
+ if v1_table_type == "agent_sessions":
2279
+ sessions = parse_agent_sessions(old_content)
2280
+ elif v1_table_type == "team_sessions":
2281
+ sessions = parse_team_sessions(old_content)
2282
+ elif v1_table_type == "workflow_sessions":
2283
+ sessions = parse_workflow_sessions(old_content)
2284
+ elif v1_table_type == "memories":
2285
+ memories = parse_memories(old_content)
2286
+ else:
2287
+ raise ValueError(f"Invalid table type: {v1_table_type}")
2288
+
2289
+ # Insert the new content into the new table
2290
+ if v1_table_type == "agent_sessions":
2291
+ for session in sessions:
2292
+ self.upsert_session(session)
2293
+ log_info(f"Migrated {len(sessions)} Agent sessions to table: {self.session_table_name}")
2294
+
2295
+ elif v1_table_type == "team_sessions":
2296
+ for session in sessions:
2297
+ self.upsert_session(session)
2298
+ log_info(f"Migrated {len(sessions)} Team sessions to table: {self.session_table_name}")
2299
+
2300
+ elif v1_table_type == "workflow_sessions":
2301
+ for session in sessions:
2302
+ self.upsert_session(session)
2303
+ log_info(f"Migrated {len(sessions)} Workflow sessions to table: {self.session_table_name}")
2304
+
2305
+ elif v1_table_type == "memories":
2306
+ for memory in memories:
2307
+ self.upsert_user_memory(memory)
2308
+ log_info(f"Migrated {len(memories)} memories to table: {self.memory_table}")