agno 0.1.2__py3-none-any.whl → 2.3.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (723) hide show
  1. agno/__init__.py +8 -0
  2. agno/agent/__init__.py +44 -5
  3. agno/agent/agent.py +10531 -2975
  4. agno/api/agent.py +14 -53
  5. agno/api/api.py +7 -46
  6. agno/api/evals.py +22 -0
  7. agno/api/os.py +17 -0
  8. agno/api/routes.py +6 -25
  9. agno/api/schemas/__init__.py +9 -0
  10. agno/api/schemas/agent.py +6 -9
  11. agno/api/schemas/evals.py +16 -0
  12. agno/api/schemas/os.py +14 -0
  13. agno/api/schemas/team.py +10 -10
  14. agno/api/schemas/utils.py +21 -0
  15. agno/api/schemas/workflows.py +16 -0
  16. agno/api/settings.py +53 -0
  17. agno/api/team.py +22 -26
  18. agno/api/workflow.py +28 -0
  19. agno/cloud/aws/base.py +214 -0
  20. agno/cloud/aws/s3/__init__.py +2 -0
  21. agno/cloud/aws/s3/api_client.py +43 -0
  22. agno/cloud/aws/s3/bucket.py +195 -0
  23. agno/cloud/aws/s3/object.py +57 -0
  24. agno/compression/__init__.py +3 -0
  25. agno/compression/manager.py +247 -0
  26. agno/culture/__init__.py +3 -0
  27. agno/culture/manager.py +956 -0
  28. agno/db/__init__.py +24 -0
  29. agno/db/async_postgres/__init__.py +3 -0
  30. agno/db/base.py +946 -0
  31. agno/db/dynamo/__init__.py +3 -0
  32. agno/db/dynamo/dynamo.py +2781 -0
  33. agno/db/dynamo/schemas.py +442 -0
  34. agno/db/dynamo/utils.py +743 -0
  35. agno/db/firestore/__init__.py +3 -0
  36. agno/db/firestore/firestore.py +2379 -0
  37. agno/db/firestore/schemas.py +181 -0
  38. agno/db/firestore/utils.py +376 -0
  39. agno/db/gcs_json/__init__.py +3 -0
  40. agno/db/gcs_json/gcs_json_db.py +1791 -0
  41. agno/db/gcs_json/utils.py +228 -0
  42. agno/db/in_memory/__init__.py +3 -0
  43. agno/db/in_memory/in_memory_db.py +1312 -0
  44. agno/db/in_memory/utils.py +230 -0
  45. agno/db/json/__init__.py +3 -0
  46. agno/db/json/json_db.py +1777 -0
  47. agno/db/json/utils.py +230 -0
  48. agno/db/migrations/manager.py +199 -0
  49. agno/db/migrations/v1_to_v2.py +635 -0
  50. agno/db/migrations/versions/v2_3_0.py +938 -0
  51. agno/db/mongo/__init__.py +17 -0
  52. agno/db/mongo/async_mongo.py +2760 -0
  53. agno/db/mongo/mongo.py +2597 -0
  54. agno/db/mongo/schemas.py +119 -0
  55. agno/db/mongo/utils.py +276 -0
  56. agno/db/mysql/__init__.py +4 -0
  57. agno/db/mysql/async_mysql.py +2912 -0
  58. agno/db/mysql/mysql.py +2923 -0
  59. agno/db/mysql/schemas.py +186 -0
  60. agno/db/mysql/utils.py +488 -0
  61. agno/db/postgres/__init__.py +4 -0
  62. agno/db/postgres/async_postgres.py +2579 -0
  63. agno/db/postgres/postgres.py +2870 -0
  64. agno/db/postgres/schemas.py +187 -0
  65. agno/db/postgres/utils.py +442 -0
  66. agno/db/redis/__init__.py +3 -0
  67. agno/db/redis/redis.py +2141 -0
  68. agno/db/redis/schemas.py +159 -0
  69. agno/db/redis/utils.py +346 -0
  70. agno/db/schemas/__init__.py +4 -0
  71. agno/db/schemas/culture.py +120 -0
  72. agno/db/schemas/evals.py +34 -0
  73. agno/db/schemas/knowledge.py +40 -0
  74. agno/db/schemas/memory.py +61 -0
  75. agno/db/singlestore/__init__.py +3 -0
  76. agno/db/singlestore/schemas.py +179 -0
  77. agno/db/singlestore/singlestore.py +2877 -0
  78. agno/db/singlestore/utils.py +384 -0
  79. agno/db/sqlite/__init__.py +4 -0
  80. agno/db/sqlite/async_sqlite.py +2911 -0
  81. agno/db/sqlite/schemas.py +181 -0
  82. agno/db/sqlite/sqlite.py +2908 -0
  83. agno/db/sqlite/utils.py +429 -0
  84. agno/db/surrealdb/__init__.py +3 -0
  85. agno/db/surrealdb/metrics.py +292 -0
  86. agno/db/surrealdb/models.py +334 -0
  87. agno/db/surrealdb/queries.py +71 -0
  88. agno/db/surrealdb/surrealdb.py +1908 -0
  89. agno/db/surrealdb/utils.py +147 -0
  90. agno/db/utils.py +118 -0
  91. agno/eval/__init__.py +24 -0
  92. agno/eval/accuracy.py +666 -276
  93. agno/eval/agent_as_judge.py +861 -0
  94. agno/eval/base.py +29 -0
  95. agno/eval/performance.py +779 -0
  96. agno/eval/reliability.py +241 -62
  97. agno/eval/utils.py +120 -0
  98. agno/exceptions.py +143 -1
  99. agno/filters.py +354 -0
  100. agno/guardrails/__init__.py +6 -0
  101. agno/guardrails/base.py +19 -0
  102. agno/guardrails/openai.py +144 -0
  103. agno/guardrails/pii.py +94 -0
  104. agno/guardrails/prompt_injection.py +52 -0
  105. agno/hooks/__init__.py +3 -0
  106. agno/hooks/decorator.py +164 -0
  107. agno/integrations/discord/__init__.py +3 -0
  108. agno/integrations/discord/client.py +203 -0
  109. agno/knowledge/__init__.py +5 -1
  110. agno/{document → knowledge}/chunking/agentic.py +22 -14
  111. agno/{document → knowledge}/chunking/document.py +2 -2
  112. agno/{document → knowledge}/chunking/fixed.py +7 -6
  113. agno/knowledge/chunking/markdown.py +151 -0
  114. agno/{document → knowledge}/chunking/recursive.py +15 -3
  115. agno/knowledge/chunking/row.py +39 -0
  116. agno/knowledge/chunking/semantic.py +91 -0
  117. agno/knowledge/chunking/strategy.py +165 -0
  118. agno/knowledge/content.py +74 -0
  119. agno/knowledge/document/__init__.py +5 -0
  120. agno/{document → knowledge/document}/base.py +12 -2
  121. agno/knowledge/embedder/__init__.py +5 -0
  122. agno/knowledge/embedder/aws_bedrock.py +343 -0
  123. agno/knowledge/embedder/azure_openai.py +210 -0
  124. agno/{embedder → knowledge/embedder}/base.py +8 -0
  125. agno/knowledge/embedder/cohere.py +323 -0
  126. agno/knowledge/embedder/fastembed.py +62 -0
  127. agno/{embedder → knowledge/embedder}/fireworks.py +1 -1
  128. agno/knowledge/embedder/google.py +258 -0
  129. agno/knowledge/embedder/huggingface.py +94 -0
  130. agno/knowledge/embedder/jina.py +182 -0
  131. agno/knowledge/embedder/langdb.py +22 -0
  132. agno/knowledge/embedder/mistral.py +206 -0
  133. agno/knowledge/embedder/nebius.py +13 -0
  134. agno/knowledge/embedder/ollama.py +154 -0
  135. agno/knowledge/embedder/openai.py +195 -0
  136. agno/knowledge/embedder/sentence_transformer.py +63 -0
  137. agno/{embedder → knowledge/embedder}/together.py +1 -1
  138. agno/knowledge/embedder/vllm.py +262 -0
  139. agno/knowledge/embedder/voyageai.py +165 -0
  140. agno/knowledge/knowledge.py +3006 -0
  141. agno/knowledge/reader/__init__.py +7 -0
  142. agno/knowledge/reader/arxiv_reader.py +81 -0
  143. agno/knowledge/reader/base.py +95 -0
  144. agno/knowledge/reader/csv_reader.py +164 -0
  145. agno/knowledge/reader/docx_reader.py +82 -0
  146. agno/knowledge/reader/field_labeled_csv_reader.py +290 -0
  147. agno/knowledge/reader/firecrawl_reader.py +201 -0
  148. agno/knowledge/reader/json_reader.py +88 -0
  149. agno/knowledge/reader/markdown_reader.py +137 -0
  150. agno/knowledge/reader/pdf_reader.py +431 -0
  151. agno/knowledge/reader/pptx_reader.py +101 -0
  152. agno/knowledge/reader/reader_factory.py +313 -0
  153. agno/knowledge/reader/s3_reader.py +89 -0
  154. agno/knowledge/reader/tavily_reader.py +193 -0
  155. agno/knowledge/reader/text_reader.py +127 -0
  156. agno/knowledge/reader/web_search_reader.py +325 -0
  157. agno/knowledge/reader/website_reader.py +455 -0
  158. agno/knowledge/reader/wikipedia_reader.py +91 -0
  159. agno/knowledge/reader/youtube_reader.py +78 -0
  160. agno/knowledge/remote_content/remote_content.py +88 -0
  161. agno/knowledge/reranker/__init__.py +3 -0
  162. agno/{reranker → knowledge/reranker}/base.py +1 -1
  163. agno/{reranker → knowledge/reranker}/cohere.py +2 -2
  164. agno/knowledge/reranker/infinity.py +195 -0
  165. agno/knowledge/reranker/sentence_transformer.py +54 -0
  166. agno/knowledge/types.py +39 -0
  167. agno/knowledge/utils.py +234 -0
  168. agno/media.py +439 -95
  169. agno/memory/__init__.py +16 -3
  170. agno/memory/manager.py +1474 -123
  171. agno/memory/strategies/__init__.py +15 -0
  172. agno/memory/strategies/base.py +66 -0
  173. agno/memory/strategies/summarize.py +196 -0
  174. agno/memory/strategies/types.py +37 -0
  175. agno/models/aimlapi/__init__.py +5 -0
  176. agno/models/aimlapi/aimlapi.py +62 -0
  177. agno/models/anthropic/__init__.py +4 -0
  178. agno/models/anthropic/claude.py +960 -496
  179. agno/models/aws/__init__.py +15 -0
  180. agno/models/aws/bedrock.py +686 -451
  181. agno/models/aws/claude.py +190 -183
  182. agno/models/azure/__init__.py +18 -1
  183. agno/models/azure/ai_foundry.py +489 -0
  184. agno/models/azure/openai_chat.py +89 -40
  185. agno/models/base.py +2477 -550
  186. agno/models/cerebras/__init__.py +12 -0
  187. agno/models/cerebras/cerebras.py +565 -0
  188. agno/models/cerebras/cerebras_openai.py +131 -0
  189. agno/models/cohere/__init__.py +4 -0
  190. agno/models/cohere/chat.py +306 -492
  191. agno/models/cometapi/__init__.py +5 -0
  192. agno/models/cometapi/cometapi.py +74 -0
  193. agno/models/dashscope/__init__.py +5 -0
  194. agno/models/dashscope/dashscope.py +90 -0
  195. agno/models/deepinfra/__init__.py +5 -0
  196. agno/models/deepinfra/deepinfra.py +45 -0
  197. agno/models/deepseek/__init__.py +4 -0
  198. agno/models/deepseek/deepseek.py +110 -9
  199. agno/models/fireworks/__init__.py +4 -0
  200. agno/models/fireworks/fireworks.py +19 -22
  201. agno/models/google/__init__.py +3 -7
  202. agno/models/google/gemini.py +1717 -662
  203. agno/models/google/utils.py +22 -0
  204. agno/models/groq/__init__.py +4 -0
  205. agno/models/groq/groq.py +391 -666
  206. agno/models/huggingface/__init__.py +4 -0
  207. agno/models/huggingface/huggingface.py +266 -538
  208. agno/models/ibm/__init__.py +5 -0
  209. agno/models/ibm/watsonx.py +432 -0
  210. agno/models/internlm/__init__.py +3 -0
  211. agno/models/internlm/internlm.py +20 -3
  212. agno/models/langdb/__init__.py +1 -0
  213. agno/models/langdb/langdb.py +60 -0
  214. agno/models/litellm/__init__.py +14 -0
  215. agno/models/litellm/chat.py +503 -0
  216. agno/models/litellm/litellm_openai.py +42 -0
  217. agno/models/llama_cpp/__init__.py +5 -0
  218. agno/models/llama_cpp/llama_cpp.py +22 -0
  219. agno/models/lmstudio/__init__.py +5 -0
  220. agno/models/lmstudio/lmstudio.py +25 -0
  221. agno/models/message.py +361 -39
  222. agno/models/meta/__init__.py +12 -0
  223. agno/models/meta/llama.py +502 -0
  224. agno/models/meta/llama_openai.py +79 -0
  225. agno/models/metrics.py +120 -0
  226. agno/models/mistral/__init__.py +4 -0
  227. agno/models/mistral/mistral.py +293 -393
  228. agno/models/nebius/__init__.py +3 -0
  229. agno/models/nebius/nebius.py +53 -0
  230. agno/models/nexus/__init__.py +3 -0
  231. agno/models/nexus/nexus.py +22 -0
  232. agno/models/nvidia/__init__.py +4 -0
  233. agno/models/nvidia/nvidia.py +22 -3
  234. agno/models/ollama/__init__.py +4 -2
  235. agno/models/ollama/chat.py +257 -492
  236. agno/models/openai/__init__.py +7 -0
  237. agno/models/openai/chat.py +725 -770
  238. agno/models/openai/like.py +16 -2
  239. agno/models/openai/responses.py +1121 -0
  240. agno/models/openrouter/__init__.py +4 -0
  241. agno/models/openrouter/openrouter.py +62 -5
  242. agno/models/perplexity/__init__.py +5 -0
  243. agno/models/perplexity/perplexity.py +203 -0
  244. agno/models/portkey/__init__.py +3 -0
  245. agno/models/portkey/portkey.py +82 -0
  246. agno/models/requesty/__init__.py +5 -0
  247. agno/models/requesty/requesty.py +69 -0
  248. agno/models/response.py +177 -7
  249. agno/models/sambanova/__init__.py +4 -0
  250. agno/models/sambanova/sambanova.py +23 -4
  251. agno/models/siliconflow/__init__.py +5 -0
  252. agno/models/siliconflow/siliconflow.py +42 -0
  253. agno/models/together/__init__.py +4 -0
  254. agno/models/together/together.py +21 -164
  255. agno/models/utils.py +266 -0
  256. agno/models/vercel/__init__.py +3 -0
  257. agno/models/vercel/v0.py +43 -0
  258. agno/models/vertexai/__init__.py +0 -1
  259. agno/models/vertexai/claude.py +190 -0
  260. agno/models/vllm/__init__.py +3 -0
  261. agno/models/vllm/vllm.py +83 -0
  262. agno/models/xai/__init__.py +2 -0
  263. agno/models/xai/xai.py +111 -7
  264. agno/os/__init__.py +3 -0
  265. agno/os/app.py +1027 -0
  266. agno/os/auth.py +244 -0
  267. agno/os/config.py +126 -0
  268. agno/os/interfaces/__init__.py +1 -0
  269. agno/os/interfaces/a2a/__init__.py +3 -0
  270. agno/os/interfaces/a2a/a2a.py +42 -0
  271. agno/os/interfaces/a2a/router.py +249 -0
  272. agno/os/interfaces/a2a/utils.py +924 -0
  273. agno/os/interfaces/agui/__init__.py +3 -0
  274. agno/os/interfaces/agui/agui.py +47 -0
  275. agno/os/interfaces/agui/router.py +147 -0
  276. agno/os/interfaces/agui/utils.py +574 -0
  277. agno/os/interfaces/base.py +25 -0
  278. agno/os/interfaces/slack/__init__.py +3 -0
  279. agno/os/interfaces/slack/router.py +148 -0
  280. agno/os/interfaces/slack/security.py +30 -0
  281. agno/os/interfaces/slack/slack.py +47 -0
  282. agno/os/interfaces/whatsapp/__init__.py +3 -0
  283. agno/os/interfaces/whatsapp/router.py +210 -0
  284. agno/os/interfaces/whatsapp/security.py +55 -0
  285. agno/os/interfaces/whatsapp/whatsapp.py +36 -0
  286. agno/os/mcp.py +293 -0
  287. agno/os/middleware/__init__.py +9 -0
  288. agno/os/middleware/jwt.py +797 -0
  289. agno/os/router.py +258 -0
  290. agno/os/routers/__init__.py +3 -0
  291. agno/os/routers/agents/__init__.py +3 -0
  292. agno/os/routers/agents/router.py +599 -0
  293. agno/os/routers/agents/schema.py +261 -0
  294. agno/os/routers/evals/__init__.py +3 -0
  295. agno/os/routers/evals/evals.py +450 -0
  296. agno/os/routers/evals/schemas.py +174 -0
  297. agno/os/routers/evals/utils.py +231 -0
  298. agno/os/routers/health.py +31 -0
  299. agno/os/routers/home.py +52 -0
  300. agno/os/routers/knowledge/__init__.py +3 -0
  301. agno/os/routers/knowledge/knowledge.py +1008 -0
  302. agno/os/routers/knowledge/schemas.py +178 -0
  303. agno/os/routers/memory/__init__.py +3 -0
  304. agno/os/routers/memory/memory.py +661 -0
  305. agno/os/routers/memory/schemas.py +88 -0
  306. agno/os/routers/metrics/__init__.py +3 -0
  307. agno/os/routers/metrics/metrics.py +190 -0
  308. agno/os/routers/metrics/schemas.py +47 -0
  309. agno/os/routers/session/__init__.py +3 -0
  310. agno/os/routers/session/session.py +997 -0
  311. agno/os/routers/teams/__init__.py +3 -0
  312. agno/os/routers/teams/router.py +512 -0
  313. agno/os/routers/teams/schema.py +257 -0
  314. agno/os/routers/traces/__init__.py +3 -0
  315. agno/os/routers/traces/schemas.py +414 -0
  316. agno/os/routers/traces/traces.py +499 -0
  317. agno/os/routers/workflows/__init__.py +3 -0
  318. agno/os/routers/workflows/router.py +624 -0
  319. agno/os/routers/workflows/schema.py +75 -0
  320. agno/os/schema.py +534 -0
  321. agno/os/scopes.py +469 -0
  322. agno/{playground → os}/settings.py +7 -15
  323. agno/os/utils.py +973 -0
  324. agno/reasoning/anthropic.py +80 -0
  325. agno/reasoning/azure_ai_foundry.py +67 -0
  326. agno/reasoning/deepseek.py +63 -0
  327. agno/reasoning/default.py +97 -0
  328. agno/reasoning/gemini.py +73 -0
  329. agno/reasoning/groq.py +71 -0
  330. agno/reasoning/helpers.py +24 -1
  331. agno/reasoning/ollama.py +67 -0
  332. agno/reasoning/openai.py +86 -0
  333. agno/reasoning/step.py +2 -1
  334. agno/reasoning/vertexai.py +76 -0
  335. agno/run/__init__.py +6 -0
  336. agno/run/agent.py +822 -0
  337. agno/run/base.py +247 -0
  338. agno/run/cancel.py +81 -0
  339. agno/run/requirement.py +181 -0
  340. agno/run/team.py +767 -0
  341. agno/run/workflow.py +708 -0
  342. agno/session/__init__.py +10 -0
  343. agno/session/agent.py +260 -0
  344. agno/session/summary.py +265 -0
  345. agno/session/team.py +342 -0
  346. agno/session/workflow.py +501 -0
  347. agno/table.py +10 -0
  348. agno/team/__init__.py +37 -0
  349. agno/team/team.py +9536 -0
  350. agno/tools/__init__.py +7 -0
  351. agno/tools/agentql.py +120 -0
  352. agno/tools/airflow.py +22 -12
  353. agno/tools/api.py +122 -0
  354. agno/tools/apify.py +276 -83
  355. agno/tools/{arxiv_toolkit.py → arxiv.py} +20 -12
  356. agno/tools/aws_lambda.py +28 -7
  357. agno/tools/aws_ses.py +66 -0
  358. agno/tools/baidusearch.py +11 -4
  359. agno/tools/bitbucket.py +292 -0
  360. agno/tools/brandfetch.py +213 -0
  361. agno/tools/bravesearch.py +106 -0
  362. agno/tools/brightdata.py +367 -0
  363. agno/tools/browserbase.py +209 -0
  364. agno/tools/calcom.py +32 -23
  365. agno/tools/calculator.py +24 -37
  366. agno/tools/cartesia.py +187 -0
  367. agno/tools/{clickup_tool.py → clickup.py} +17 -28
  368. agno/tools/confluence.py +91 -26
  369. agno/tools/crawl4ai.py +139 -43
  370. agno/tools/csv_toolkit.py +28 -22
  371. agno/tools/dalle.py +36 -22
  372. agno/tools/daytona.py +475 -0
  373. agno/tools/decorator.py +169 -14
  374. agno/tools/desi_vocal.py +23 -11
  375. agno/tools/discord.py +32 -29
  376. agno/tools/docker.py +716 -0
  377. agno/tools/duckdb.py +76 -81
  378. agno/tools/duckduckgo.py +43 -40
  379. agno/tools/e2b.py +703 -0
  380. agno/tools/eleven_labs.py +65 -54
  381. agno/tools/email.py +13 -5
  382. agno/tools/evm.py +129 -0
  383. agno/tools/exa.py +324 -42
  384. agno/tools/fal.py +39 -35
  385. agno/tools/file.py +196 -30
  386. agno/tools/file_generation.py +356 -0
  387. agno/tools/financial_datasets.py +288 -0
  388. agno/tools/firecrawl.py +108 -33
  389. agno/tools/function.py +960 -122
  390. agno/tools/giphy.py +34 -12
  391. agno/tools/github.py +1294 -97
  392. agno/tools/gmail.py +922 -0
  393. agno/tools/google_bigquery.py +117 -0
  394. agno/tools/google_drive.py +271 -0
  395. agno/tools/google_maps.py +253 -0
  396. agno/tools/googlecalendar.py +607 -107
  397. agno/tools/googlesheets.py +377 -0
  398. agno/tools/hackernews.py +20 -12
  399. agno/tools/jina.py +24 -14
  400. agno/tools/jira.py +48 -19
  401. agno/tools/knowledge.py +218 -0
  402. agno/tools/linear.py +82 -43
  403. agno/tools/linkup.py +58 -0
  404. agno/tools/local_file_system.py +15 -7
  405. agno/tools/lumalab.py +41 -26
  406. agno/tools/mcp/__init__.py +10 -0
  407. agno/tools/mcp/mcp.py +331 -0
  408. agno/tools/mcp/multi_mcp.py +347 -0
  409. agno/tools/mcp/params.py +24 -0
  410. agno/tools/mcp_toolbox.py +284 -0
  411. agno/tools/mem0.py +193 -0
  412. agno/tools/memory.py +419 -0
  413. agno/tools/mlx_transcribe.py +11 -9
  414. agno/tools/models/azure_openai.py +190 -0
  415. agno/tools/models/gemini.py +203 -0
  416. agno/tools/models/groq.py +158 -0
  417. agno/tools/models/morph.py +186 -0
  418. agno/tools/models/nebius.py +124 -0
  419. agno/tools/models_labs.py +163 -82
  420. agno/tools/moviepy_video.py +18 -13
  421. agno/tools/nano_banana.py +151 -0
  422. agno/tools/neo4j.py +134 -0
  423. agno/tools/newspaper.py +15 -4
  424. agno/tools/newspaper4k.py +19 -6
  425. agno/tools/notion.py +204 -0
  426. agno/tools/openai.py +181 -17
  427. agno/tools/openbb.py +27 -20
  428. agno/tools/opencv.py +321 -0
  429. agno/tools/openweather.py +233 -0
  430. agno/tools/oxylabs.py +385 -0
  431. agno/tools/pandas.py +25 -15
  432. agno/tools/parallel.py +314 -0
  433. agno/tools/postgres.py +238 -185
  434. agno/tools/pubmed.py +125 -13
  435. agno/tools/python.py +48 -35
  436. agno/tools/reasoning.py +283 -0
  437. agno/tools/reddit.py +207 -29
  438. agno/tools/redshift.py +406 -0
  439. agno/tools/replicate.py +69 -26
  440. agno/tools/resend.py +11 -6
  441. agno/tools/scrapegraph.py +179 -19
  442. agno/tools/searxng.py +23 -31
  443. agno/tools/serpapi.py +15 -10
  444. agno/tools/serper.py +255 -0
  445. agno/tools/shell.py +23 -12
  446. agno/tools/shopify.py +1519 -0
  447. agno/tools/slack.py +56 -14
  448. agno/tools/sleep.py +8 -6
  449. agno/tools/spider.py +35 -11
  450. agno/tools/spotify.py +919 -0
  451. agno/tools/sql.py +34 -19
  452. agno/tools/tavily.py +158 -8
  453. agno/tools/telegram.py +18 -8
  454. agno/tools/todoist.py +218 -0
  455. agno/tools/toolkit.py +134 -9
  456. agno/tools/trafilatura.py +388 -0
  457. agno/tools/trello.py +25 -28
  458. agno/tools/twilio.py +18 -9
  459. agno/tools/user_control_flow.py +78 -0
  460. agno/tools/valyu.py +228 -0
  461. agno/tools/visualization.py +467 -0
  462. agno/tools/webbrowser.py +28 -0
  463. agno/tools/webex.py +76 -0
  464. agno/tools/website.py +23 -19
  465. agno/tools/webtools.py +45 -0
  466. agno/tools/whatsapp.py +286 -0
  467. agno/tools/wikipedia.py +28 -19
  468. agno/tools/workflow.py +285 -0
  469. agno/tools/{twitter.py → x.py} +142 -46
  470. agno/tools/yfinance.py +41 -39
  471. agno/tools/youtube.py +34 -17
  472. agno/tools/zendesk.py +15 -5
  473. agno/tools/zep.py +454 -0
  474. agno/tools/zoom.py +86 -37
  475. agno/tracing/__init__.py +12 -0
  476. agno/tracing/exporter.py +157 -0
  477. agno/tracing/schemas.py +276 -0
  478. agno/tracing/setup.py +111 -0
  479. agno/utils/agent.py +938 -0
  480. agno/utils/audio.py +37 -1
  481. agno/utils/certs.py +27 -0
  482. agno/utils/code_execution.py +11 -0
  483. agno/utils/common.py +103 -20
  484. agno/utils/cryptography.py +22 -0
  485. agno/utils/dttm.py +33 -0
  486. agno/utils/events.py +700 -0
  487. agno/utils/functions.py +107 -37
  488. agno/utils/gemini.py +426 -0
  489. agno/utils/hooks.py +171 -0
  490. agno/utils/http.py +185 -0
  491. agno/utils/json_schema.py +159 -37
  492. agno/utils/knowledge.py +36 -0
  493. agno/utils/location.py +19 -0
  494. agno/utils/log.py +221 -8
  495. agno/utils/mcp.py +214 -0
  496. agno/utils/media.py +335 -14
  497. agno/utils/merge_dict.py +22 -1
  498. agno/utils/message.py +77 -2
  499. agno/utils/models/ai_foundry.py +50 -0
  500. agno/utils/models/claude.py +373 -0
  501. agno/utils/models/cohere.py +94 -0
  502. agno/utils/models/llama.py +85 -0
  503. agno/utils/models/mistral.py +100 -0
  504. agno/utils/models/openai_responses.py +140 -0
  505. agno/utils/models/schema_utils.py +153 -0
  506. agno/utils/models/watsonx.py +41 -0
  507. agno/utils/openai.py +257 -0
  508. agno/utils/pickle.py +1 -1
  509. agno/utils/pprint.py +124 -8
  510. agno/utils/print_response/agent.py +930 -0
  511. agno/utils/print_response/team.py +1914 -0
  512. agno/utils/print_response/workflow.py +1668 -0
  513. agno/utils/prompts.py +111 -0
  514. agno/utils/reasoning.py +108 -0
  515. agno/utils/response.py +163 -0
  516. agno/utils/serialize.py +32 -0
  517. agno/utils/shell.py +4 -4
  518. agno/utils/streamlit.py +487 -0
  519. agno/utils/string.py +204 -51
  520. agno/utils/team.py +139 -0
  521. agno/utils/timer.py +9 -2
  522. agno/utils/tokens.py +657 -0
  523. agno/utils/tools.py +19 -1
  524. agno/utils/whatsapp.py +305 -0
  525. agno/utils/yaml_io.py +3 -3
  526. agno/vectordb/__init__.py +2 -0
  527. agno/vectordb/base.py +87 -9
  528. agno/vectordb/cassandra/__init__.py +5 -1
  529. agno/vectordb/cassandra/cassandra.py +383 -27
  530. agno/vectordb/chroma/__init__.py +4 -0
  531. agno/vectordb/chroma/chromadb.py +748 -83
  532. agno/vectordb/clickhouse/__init__.py +7 -1
  533. agno/vectordb/clickhouse/clickhousedb.py +554 -53
  534. agno/vectordb/couchbase/__init__.py +3 -0
  535. agno/vectordb/couchbase/couchbase.py +1446 -0
  536. agno/vectordb/lancedb/__init__.py +5 -0
  537. agno/vectordb/lancedb/lance_db.py +730 -98
  538. agno/vectordb/langchaindb/__init__.py +5 -0
  539. agno/vectordb/langchaindb/langchaindb.py +163 -0
  540. agno/vectordb/lightrag/__init__.py +5 -0
  541. agno/vectordb/lightrag/lightrag.py +388 -0
  542. agno/vectordb/llamaindex/__init__.py +3 -0
  543. agno/vectordb/llamaindex/llamaindexdb.py +166 -0
  544. agno/vectordb/milvus/__init__.py +3 -0
  545. agno/vectordb/milvus/milvus.py +966 -78
  546. agno/vectordb/mongodb/__init__.py +9 -1
  547. agno/vectordb/mongodb/mongodb.py +1175 -172
  548. agno/vectordb/pgvector/__init__.py +8 -0
  549. agno/vectordb/pgvector/pgvector.py +599 -115
  550. agno/vectordb/pineconedb/__init__.py +5 -1
  551. agno/vectordb/pineconedb/pineconedb.py +406 -43
  552. agno/vectordb/qdrant/__init__.py +4 -0
  553. agno/vectordb/qdrant/qdrant.py +914 -61
  554. agno/vectordb/redis/__init__.py +9 -0
  555. agno/vectordb/redis/redisdb.py +682 -0
  556. agno/vectordb/singlestore/__init__.py +8 -1
  557. agno/vectordb/singlestore/singlestore.py +771 -0
  558. agno/vectordb/surrealdb/__init__.py +3 -0
  559. agno/vectordb/surrealdb/surrealdb.py +663 -0
  560. agno/vectordb/upstashdb/__init__.py +5 -0
  561. agno/vectordb/upstashdb/upstashdb.py +718 -0
  562. agno/vectordb/weaviate/__init__.py +8 -0
  563. agno/vectordb/weaviate/index.py +15 -0
  564. agno/vectordb/weaviate/weaviate.py +1009 -0
  565. agno/workflow/__init__.py +23 -1
  566. agno/workflow/agent.py +299 -0
  567. agno/workflow/condition.py +759 -0
  568. agno/workflow/loop.py +756 -0
  569. agno/workflow/parallel.py +853 -0
  570. agno/workflow/router.py +723 -0
  571. agno/workflow/step.py +1564 -0
  572. agno/workflow/steps.py +613 -0
  573. agno/workflow/types.py +556 -0
  574. agno/workflow/workflow.py +4327 -514
  575. agno-2.3.13.dist-info/METADATA +639 -0
  576. agno-2.3.13.dist-info/RECORD +613 -0
  577. {agno-0.1.2.dist-info → agno-2.3.13.dist-info}/WHEEL +1 -1
  578. agno-2.3.13.dist-info/licenses/LICENSE +201 -0
  579. agno/api/playground.py +0 -91
  580. agno/api/schemas/playground.py +0 -22
  581. agno/api/schemas/user.py +0 -22
  582. agno/api/schemas/workspace.py +0 -46
  583. agno/api/user.py +0 -160
  584. agno/api/workspace.py +0 -151
  585. agno/cli/auth_server.py +0 -118
  586. agno/cli/config.py +0 -275
  587. agno/cli/console.py +0 -88
  588. agno/cli/credentials.py +0 -23
  589. agno/cli/entrypoint.py +0 -571
  590. agno/cli/operator.py +0 -355
  591. agno/cli/settings.py +0 -85
  592. agno/cli/ws/ws_cli.py +0 -817
  593. agno/constants.py +0 -13
  594. agno/document/__init__.py +0 -1
  595. agno/document/chunking/semantic.py +0 -47
  596. agno/document/chunking/strategy.py +0 -31
  597. agno/document/reader/__init__.py +0 -1
  598. agno/document/reader/arxiv_reader.py +0 -41
  599. agno/document/reader/base.py +0 -22
  600. agno/document/reader/csv_reader.py +0 -84
  601. agno/document/reader/docx_reader.py +0 -46
  602. agno/document/reader/firecrawl_reader.py +0 -99
  603. agno/document/reader/json_reader.py +0 -43
  604. agno/document/reader/pdf_reader.py +0 -219
  605. agno/document/reader/s3/pdf_reader.py +0 -46
  606. agno/document/reader/s3/text_reader.py +0 -51
  607. agno/document/reader/text_reader.py +0 -41
  608. agno/document/reader/website_reader.py +0 -175
  609. agno/document/reader/youtube_reader.py +0 -50
  610. agno/embedder/__init__.py +0 -1
  611. agno/embedder/azure_openai.py +0 -86
  612. agno/embedder/cohere.py +0 -72
  613. agno/embedder/fastembed.py +0 -37
  614. agno/embedder/google.py +0 -73
  615. agno/embedder/huggingface.py +0 -54
  616. agno/embedder/mistral.py +0 -80
  617. agno/embedder/ollama.py +0 -57
  618. agno/embedder/openai.py +0 -74
  619. agno/embedder/sentence_transformer.py +0 -38
  620. agno/embedder/voyageai.py +0 -64
  621. agno/eval/perf.py +0 -201
  622. agno/file/__init__.py +0 -1
  623. agno/file/file.py +0 -16
  624. agno/file/local/csv.py +0 -32
  625. agno/file/local/txt.py +0 -19
  626. agno/infra/app.py +0 -240
  627. agno/infra/base.py +0 -144
  628. agno/infra/context.py +0 -20
  629. agno/infra/db_app.py +0 -52
  630. agno/infra/resource.py +0 -205
  631. agno/infra/resources.py +0 -55
  632. agno/knowledge/agent.py +0 -230
  633. agno/knowledge/arxiv.py +0 -22
  634. agno/knowledge/combined.py +0 -22
  635. agno/knowledge/csv.py +0 -28
  636. agno/knowledge/csv_url.py +0 -19
  637. agno/knowledge/document.py +0 -20
  638. agno/knowledge/docx.py +0 -30
  639. agno/knowledge/json.py +0 -28
  640. agno/knowledge/langchain.py +0 -71
  641. agno/knowledge/llamaindex.py +0 -66
  642. agno/knowledge/pdf.py +0 -28
  643. agno/knowledge/pdf_url.py +0 -26
  644. agno/knowledge/s3/base.py +0 -60
  645. agno/knowledge/s3/pdf.py +0 -21
  646. agno/knowledge/s3/text.py +0 -23
  647. agno/knowledge/text.py +0 -30
  648. agno/knowledge/website.py +0 -88
  649. agno/knowledge/wikipedia.py +0 -31
  650. agno/knowledge/youtube.py +0 -22
  651. agno/memory/agent.py +0 -392
  652. agno/memory/classifier.py +0 -104
  653. agno/memory/db/__init__.py +0 -1
  654. agno/memory/db/base.py +0 -42
  655. agno/memory/db/mongodb.py +0 -189
  656. agno/memory/db/postgres.py +0 -203
  657. agno/memory/db/sqlite.py +0 -193
  658. agno/memory/memory.py +0 -15
  659. agno/memory/row.py +0 -36
  660. agno/memory/summarizer.py +0 -192
  661. agno/memory/summary.py +0 -19
  662. agno/memory/workflow.py +0 -38
  663. agno/models/google/gemini_openai.py +0 -26
  664. agno/models/ollama/hermes.py +0 -221
  665. agno/models/ollama/tools.py +0 -362
  666. agno/models/vertexai/gemini.py +0 -595
  667. agno/playground/__init__.py +0 -3
  668. agno/playground/async_router.py +0 -421
  669. agno/playground/deploy.py +0 -249
  670. agno/playground/operator.py +0 -92
  671. agno/playground/playground.py +0 -91
  672. agno/playground/schemas.py +0 -76
  673. agno/playground/serve.py +0 -55
  674. agno/playground/sync_router.py +0 -405
  675. agno/reasoning/agent.py +0 -68
  676. agno/run/response.py +0 -112
  677. agno/storage/agent/__init__.py +0 -0
  678. agno/storage/agent/base.py +0 -38
  679. agno/storage/agent/dynamodb.py +0 -350
  680. agno/storage/agent/json.py +0 -92
  681. agno/storage/agent/mongodb.py +0 -228
  682. agno/storage/agent/postgres.py +0 -367
  683. agno/storage/agent/session.py +0 -79
  684. agno/storage/agent/singlestore.py +0 -303
  685. agno/storage/agent/sqlite.py +0 -357
  686. agno/storage/agent/yaml.py +0 -93
  687. agno/storage/workflow/__init__.py +0 -0
  688. agno/storage/workflow/base.py +0 -40
  689. agno/storage/workflow/mongodb.py +0 -233
  690. agno/storage/workflow/postgres.py +0 -366
  691. agno/storage/workflow/session.py +0 -60
  692. agno/storage/workflow/sqlite.py +0 -359
  693. agno/tools/googlesearch.py +0 -88
  694. agno/utils/defaults.py +0 -57
  695. agno/utils/filesystem.py +0 -39
  696. agno/utils/git.py +0 -52
  697. agno/utils/json_io.py +0 -30
  698. agno/utils/load_env.py +0 -19
  699. agno/utils/py_io.py +0 -19
  700. agno/utils/pyproject.py +0 -18
  701. agno/utils/resource_filter.py +0 -31
  702. agno/vectordb/singlestore/s2vectordb.py +0 -390
  703. agno/vectordb/singlestore/s2vectordb2.py +0 -355
  704. agno/workspace/__init__.py +0 -0
  705. agno/workspace/config.py +0 -325
  706. agno/workspace/enums.py +0 -6
  707. agno/workspace/helpers.py +0 -48
  708. agno/workspace/operator.py +0 -758
  709. agno/workspace/settings.py +0 -63
  710. agno-0.1.2.dist-info/LICENSE +0 -375
  711. agno-0.1.2.dist-info/METADATA +0 -502
  712. agno-0.1.2.dist-info/RECORD +0 -352
  713. agno-0.1.2.dist-info/entry_points.txt +0 -3
  714. /agno/{cli → db/migrations}/__init__.py +0 -0
  715. /agno/{cli/ws → db/migrations/versions}/__init__.py +0 -0
  716. /agno/{document/chunking/__init__.py → db/schemas/metrics.py} +0 -0
  717. /agno/{document/reader/s3 → integrations}/__init__.py +0 -0
  718. /agno/{file/local → knowledge/chunking}/__init__.py +0 -0
  719. /agno/{infra → knowledge/remote_content}/__init__.py +0 -0
  720. /agno/{knowledge/s3 → tools/models}/__init__.py +0 -0
  721. /agno/{reranker → utils/models}/__init__.py +0 -0
  722. /agno/{storage → utils/print_response}/__init__.py +0 -0
  723. {agno-0.1.2.dist-info → agno-2.3.13.dist-info}/top_level.txt +0 -0
agno/db/redis/redis.py ADDED
@@ -0,0 +1,2141 @@
1
+ import time
2
+ from datetime import date, datetime, timedelta, timezone
3
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
4
+ from uuid import uuid4
5
+
6
+ if TYPE_CHECKING:
7
+ from agno.tracing.schemas import Span, Trace
8
+
9
+ from agno.db.base import BaseDb, SessionType
10
+ from agno.db.redis.utils import (
11
+ apply_filters,
12
+ apply_pagination,
13
+ apply_sorting,
14
+ calculate_date_metrics,
15
+ create_index_entries,
16
+ deserialize_cultural_knowledge_from_db,
17
+ deserialize_data,
18
+ fetch_all_sessions_data,
19
+ generate_redis_key,
20
+ get_all_keys_for_table,
21
+ get_dates_to_calculate_metrics_for,
22
+ remove_index_entries,
23
+ serialize_cultural_knowledge_for_db,
24
+ serialize_data,
25
+ )
26
+ from agno.db.schemas.culture import CulturalKnowledge
27
+ from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
28
+ from agno.db.schemas.knowledge import KnowledgeRow
29
+ from agno.db.schemas.memory import UserMemory
30
+ from agno.session import AgentSession, Session, TeamSession, WorkflowSession
31
+ from agno.utils.log import log_debug, log_error, log_info
32
+ from agno.utils.string import generate_id
33
+
34
+ try:
35
+ from redis import Redis, RedisCluster
36
+ except ImportError:
37
+ raise ImportError("`redis` not installed. Please install it using `pip install redis`")
38
+
39
+
40
+ class RedisDb(BaseDb):
41
+ def __init__(
42
+ self,
43
+ id: Optional[str] = None,
44
+ redis_client: Optional[Union[Redis, RedisCluster]] = None,
45
+ db_url: Optional[str] = None,
46
+ db_prefix: str = "agno",
47
+ expire: Optional[int] = None,
48
+ session_table: Optional[str] = None,
49
+ memory_table: Optional[str] = None,
50
+ metrics_table: Optional[str] = None,
51
+ eval_table: Optional[str] = None,
52
+ knowledge_table: Optional[str] = None,
53
+ culture_table: Optional[str] = None,
54
+ traces_table: Optional[str] = None,
55
+ spans_table: Optional[str] = None,
56
+ ):
57
+ """
58
+ Interface for interacting with a Redis database.
59
+
60
+ The following order is used to determine the database connection:
61
+ 1. Use the redis_client if provided
62
+ 2. Use the db_url
63
+ 3. Raise an error if neither is provided
64
+
65
+ db_url only supports single-node Redis connections, if you need Redis Cluster support, provide a redis_client.
66
+
67
+ Args:
68
+ id (Optional[str]): The ID of the database.
69
+ redis_client (Optional[Redis]): Redis client instance to use. If not provided a new client will be created.
70
+ db_url (Optional[str]): Redis connection URL (e.g., "redis://localhost:6379/0" or "rediss://user:pass@host:port/db")
71
+ db_prefix (str): Prefix for all Redis keys
72
+ expire (Optional[int]): TTL for Redis keys in seconds
73
+ session_table (Optional[str]): Name of the table to store sessions
74
+ memory_table (Optional[str]): Name of the table to store memories
75
+ metrics_table (Optional[str]): Name of the table to store metrics
76
+ eval_table (Optional[str]): Name of the table to store evaluation runs
77
+ knowledge_table (Optional[str]): Name of the table to store knowledge documents
78
+ culture_table (Optional[str]): Name of the table to store cultural knowledge
79
+ traces_table (Optional[str]): Name of the table to store traces
80
+ spans_table (Optional[str]): Name of the table to store spans
81
+
82
+ Raises:
83
+ ValueError: If neither redis_client nor db_url is provided.
84
+ """
85
+ if id is None:
86
+ base_seed = db_url or str(redis_client)
87
+ seed = f"{base_seed}#{db_prefix}"
88
+ id = generate_id(seed)
89
+
90
+ super().__init__(
91
+ id=id,
92
+ session_table=session_table,
93
+ memory_table=memory_table,
94
+ metrics_table=metrics_table,
95
+ eval_table=eval_table,
96
+ knowledge_table=knowledge_table,
97
+ culture_table=culture_table,
98
+ traces_table=traces_table,
99
+ spans_table=spans_table,
100
+ )
101
+
102
+ self.db_prefix = db_prefix
103
+ self.expire = expire
104
+
105
+ if redis_client is not None:
106
+ self.redis_client = redis_client
107
+ elif db_url is not None:
108
+ self.redis_client = Redis.from_url(db_url, decode_responses=True)
109
+ else:
110
+ raise ValueError("One of redis_client or db_url must be provided")
111
+
112
+ # -- DB methods --
113
+
114
+ def table_exists(self, table_name: str) -> bool:
115
+ """Redis implementation, always returns True."""
116
+ return True
117
+
118
+ def _get_table_name(self, table_type: str) -> str:
119
+ """Get the active table name for the given table type."""
120
+ if table_type == "sessions":
121
+ return self.session_table_name
122
+
123
+ elif table_type == "memories":
124
+ return self.memory_table_name
125
+
126
+ elif table_type == "metrics":
127
+ return self.metrics_table_name
128
+
129
+ elif table_type == "evals":
130
+ return self.eval_table_name
131
+
132
+ elif table_type == "knowledge":
133
+ return self.knowledge_table_name
134
+
135
+ elif table_type == "culture":
136
+ return self.culture_table_name
137
+
138
+ elif table_type == "traces":
139
+ return self.trace_table_name
140
+
141
+ elif table_type == "spans":
142
+ return self.span_table_name
143
+
144
+ else:
145
+ raise ValueError(f"Unknown table type: {table_type}")
146
+
147
+ def _store_record(
148
+ self, table_type: str, record_id: str, data: Dict[str, Any], index_fields: Optional[List[str]] = None
149
+ ) -> bool:
150
+ """Generic method to store a record in Redis, considering optional indexing.
151
+
152
+ Args:
153
+ table_type (str): The type of table to store the record in.
154
+ record_id (str): The ID of the record to store.
155
+ data (Dict[str, Any]): The data to store in the record.
156
+ index_fields (Optional[List[str]]): The fields to index the record by.
157
+
158
+ Returns:
159
+ bool: True if the record was stored successfully, False otherwise.
160
+ """
161
+ try:
162
+ key = generate_redis_key(prefix=self.db_prefix, table_type=table_type, key_id=record_id)
163
+ serialized_data = serialize_data(data)
164
+
165
+ self.redis_client.set(key, serialized_data, ex=self.expire)
166
+
167
+ if index_fields:
168
+ create_index_entries(
169
+ redis_client=self.redis_client,
170
+ prefix=self.db_prefix,
171
+ table_type=table_type,
172
+ record_id=record_id,
173
+ record_data=data,
174
+ index_fields=index_fields,
175
+ )
176
+
177
+ return True
178
+
179
+ except Exception as e:
180
+ log_error(f"Error storing Redis record: {e}")
181
+ return False
182
+
183
+ def _get_record(self, table_type: str, record_id: str) -> Optional[Dict[str, Any]]:
184
+ """Generic method to get a record from Redis.
185
+
186
+ Args:
187
+ table_type (str): The type of table to get the record from.
188
+ record_id (str): The ID of the record to get.
189
+
190
+ Returns:
191
+ Optional[Dict[str, Any]]: The record data if found, None otherwise.
192
+ """
193
+ try:
194
+ key = generate_redis_key(prefix=self.db_prefix, table_type=table_type, key_id=record_id)
195
+
196
+ data = self.redis_client.get(key)
197
+ if data is None:
198
+ return None
199
+
200
+ return deserialize_data(data) # type: ignore
201
+
202
+ except Exception as e:
203
+ log_error(f"Error getting record {record_id}: {e}")
204
+ return None
205
+
206
+ def _delete_record(self, table_type: str, record_id: str, index_fields: Optional[List[str]] = None) -> bool:
207
+ """Generic method to delete a record from Redis.
208
+
209
+ Args:
210
+ table_type (str): The type of table to delete the record from.
211
+ record_id (str): The ID of the record to delete.
212
+ index_fields (Optional[List[str]]): The fields to index the record by.
213
+
214
+ Returns:
215
+ bool: True if the record was deleted successfully, False otherwise.
216
+
217
+ Raises:
218
+ Exception: If any error occurs while deleting the record.
219
+ """
220
+ try:
221
+ # Handle index deletion first
222
+ if index_fields:
223
+ record_data = self._get_record(table_type, record_id)
224
+ if record_data:
225
+ remove_index_entries(
226
+ redis_client=self.redis_client,
227
+ prefix=self.db_prefix,
228
+ table_type=table_type,
229
+ record_id=record_id,
230
+ record_data=record_data,
231
+ index_fields=index_fields,
232
+ )
233
+
234
+ key = generate_redis_key(prefix=self.db_prefix, table_type=table_type, key_id=record_id)
235
+ result = self.redis_client.delete(key)
236
+ if result is None or result == 0:
237
+ return False
238
+
239
+ return True
240
+
241
+ except Exception as e:
242
+ log_error(f"Error deleting record {record_id}: {e}")
243
+ return False
244
+
245
+ def _get_all_records(self, table_type: str) -> List[Dict[str, Any]]:
246
+ """Generic method to get all records for a table type.
247
+
248
+ Args:
249
+ table_type (str): The type of table to get the records from.
250
+
251
+ Returns:
252
+ List[Dict[str, Any]]: The records data if found, None otherwise.
253
+
254
+ Raises:
255
+ Exception: If any error occurs while getting the records.
256
+ """
257
+ try:
258
+ keys = get_all_keys_for_table(redis_client=self.redis_client, prefix=self.db_prefix, table_type=table_type)
259
+
260
+ records = []
261
+ for key in keys:
262
+ data = self.redis_client.get(key)
263
+ if data:
264
+ records.append(deserialize_data(data)) # type: ignore
265
+
266
+ return records
267
+
268
+ except Exception as e:
269
+ log_error(f"Error getting all records for {table_type}: {e}")
270
+ return []
271
+
272
+ def get_latest_schema_version(self):
273
+ """Get the latest version of the database schema."""
274
+ pass
275
+
276
+ def upsert_schema_version(self, version: str) -> None:
277
+ """Upsert the schema version into the database."""
278
+ pass
279
+
280
+ # -- Session methods --
281
+
282
+ def delete_session(self, session_id: str) -> bool:
283
+ """Delete a session from Redis.
284
+
285
+ Args:
286
+ session_id (str): The ID of the session to delete.
287
+
288
+ Raises:
289
+ Exception: If any error occurs while deleting the session.
290
+ """
291
+ try:
292
+ if self._delete_record(
293
+ table_type="sessions",
294
+ record_id=session_id,
295
+ index_fields=["user_id", "agent_id", "team_id", "workflow_id", "session_type"],
296
+ ):
297
+ log_debug(f"Successfully deleted session: {session_id}")
298
+ return True
299
+ else:
300
+ log_debug(f"No session found to delete with session_id: {session_id}")
301
+ return False
302
+
303
+ except Exception as e:
304
+ log_error(f"Error deleting session: {e}")
305
+ raise e
306
+
307
+ def delete_sessions(self, session_ids: List[str]) -> None:
308
+ """Delete multiple sessions from Redis.
309
+
310
+ Args:
311
+ session_ids (List[str]): The IDs of the sessions to delete.
312
+
313
+ Raises:
314
+ Exception: If any error occurs while deleting the sessions.
315
+ """
316
+ try:
317
+ deleted_count = 0
318
+ for session_id in session_ids:
319
+ if self._delete_record(
320
+ "sessions",
321
+ session_id,
322
+ index_fields=["user_id", "agent_id", "team_id", "workflow_id", "session_type"],
323
+ ):
324
+ deleted_count += 1
325
+ log_debug(f"Successfully deleted {deleted_count} sessions")
326
+
327
+ except Exception as e:
328
+ log_error(f"Error deleting sessions: {e}")
329
+ raise e
330
+
331
+ def get_session(
332
+ self,
333
+ session_id: str,
334
+ session_type: SessionType,
335
+ user_id: Optional[str] = None,
336
+ deserialize: Optional[bool] = True,
337
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
338
+ """Read a session from Redis.
339
+
340
+ Args:
341
+ session_id (str): The ID of the session to get.
342
+ session_type (SessionType): The type of session to get.
343
+ user_id (Optional[str]): The ID of the user to filter by.
344
+
345
+ Returns:
346
+ Optional[Union[AgentSession, TeamSession, WorkflowSession]]: The session if found, None otherwise.
347
+
348
+ Raises:
349
+ Exception: If any error occurs while getting the session.
350
+ """
351
+ try:
352
+ session = self._get_record("sessions", session_id)
353
+ if session is None:
354
+ return None
355
+
356
+ # Apply filters
357
+ if user_id is not None and session.get("user_id") != user_id:
358
+ return None
359
+
360
+ if not deserialize:
361
+ return session
362
+
363
+ if session_type == SessionType.AGENT.value:
364
+ return AgentSession.from_dict(session)
365
+ elif session_type == SessionType.TEAM.value:
366
+ return TeamSession.from_dict(session)
367
+ elif session_type == SessionType.WORKFLOW.value:
368
+ return WorkflowSession.from_dict(session)
369
+ else:
370
+ raise ValueError(f"Invalid session type: {session_type}")
371
+
372
+ except Exception as e:
373
+ log_error(f"Exception reading session: {e}")
374
+ raise e
375
+
376
+ # TODO: optimizable
377
+ def get_sessions(
378
+ self,
379
+ session_type: Optional[SessionType] = None,
380
+ user_id: Optional[str] = None,
381
+ component_id: Optional[str] = None,
382
+ session_name: Optional[str] = None,
383
+ start_timestamp: Optional[int] = None,
384
+ end_timestamp: Optional[int] = None,
385
+ limit: Optional[int] = None,
386
+ page: Optional[int] = None,
387
+ sort_by: Optional[str] = None,
388
+ sort_order: Optional[str] = None,
389
+ deserialize: Optional[bool] = True,
390
+ create_index_if_not_found: Optional[bool] = True,
391
+ ) -> Union[List[Session], Tuple[List[Dict[str, Any]], int]]:
392
+ """Get all sessions matching the given filters.
393
+
394
+ Args:
395
+ session_type (Optional[SessionType]): The type of session to filter by.
396
+ user_id (Optional[str]): The ID of the user to filter by.
397
+ component_id (Optional[str]): The ID of the component to filter by.
398
+ session_name (Optional[str]): The name of the session to filter by.
399
+ limit (Optional[int]): The maximum number of sessions to return.
400
+ page (Optional[int]): The page number to return.
401
+ sort_by (Optional[str]): The field to sort by.
402
+ sort_order (Optional[str]): The order to sort by.
403
+
404
+ Returns:
405
+ List[Union[AgentSession, TeamSession, WorkflowSession]]: The list of sessions.
406
+ """
407
+ try:
408
+ all_sessions = self._get_all_records("sessions")
409
+
410
+ conditions: Dict[str, Any] = {}
411
+ if session_type is not None:
412
+ conditions["session_type"] = session_type
413
+ if user_id is not None:
414
+ conditions["user_id"] = user_id
415
+
416
+ filtered_sessions = apply_filters(records=all_sessions, conditions=conditions)
417
+
418
+ if component_id is not None:
419
+ if session_type == SessionType.AGENT:
420
+ filtered_sessions = [s for s in filtered_sessions if s.get("agent_id") == component_id]
421
+ elif session_type == SessionType.TEAM:
422
+ filtered_sessions = [s for s in filtered_sessions if s.get("team_id") == component_id]
423
+ elif session_type == SessionType.WORKFLOW:
424
+ filtered_sessions = [s for s in filtered_sessions if s.get("workflow_id") == component_id]
425
+ if start_timestamp is not None:
426
+ filtered_sessions = [s for s in filtered_sessions if s.get("created_at", 0) >= start_timestamp]
427
+ if end_timestamp is not None:
428
+ filtered_sessions = [s for s in filtered_sessions if s.get("created_at", 0) <= end_timestamp]
429
+
430
+ if session_name is not None:
431
+ filtered_sessions = [
432
+ s
433
+ for s in filtered_sessions
434
+ if session_name.lower() in s.get("session_data", {}).get("session_name", "").lower()
435
+ ]
436
+
437
+ sorted_sessions = apply_sorting(records=filtered_sessions, sort_by=sort_by, sort_order=sort_order)
438
+ sessions = apply_pagination(records=sorted_sessions, limit=limit, page=page)
439
+ sessions = [record for record in sessions]
440
+
441
+ if not deserialize:
442
+ return sessions, len(filtered_sessions)
443
+
444
+ if session_type == SessionType.AGENT:
445
+ return [AgentSession.from_dict(record) for record in sessions] # type: ignore
446
+ elif session_type == SessionType.TEAM:
447
+ return [TeamSession.from_dict(record) for record in sessions] # type: ignore
448
+ elif session_type == SessionType.WORKFLOW:
449
+ return [WorkflowSession.from_dict(record) for record in sessions] # type: ignore
450
+ else:
451
+ raise ValueError(f"Invalid session type: {session_type}")
452
+
453
+ except Exception as e:
454
+ log_error(f"Exception reading sessions: {e}")
455
+ raise e
456
+
457
+ def rename_session(
458
+ self, session_id: str, session_type: SessionType, session_name: str, deserialize: Optional[bool] = True
459
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
460
+ """Rename a session in Redis.
461
+
462
+ Args:
463
+ session_id (str): The ID of the session to rename.
464
+ session_type (SessionType): The type of session to rename.
465
+ session_name (str): The new name of the session.
466
+
467
+ Returns:
468
+ Optional[Session]: The renamed session if successful, None otherwise.
469
+
470
+ Raises:
471
+ Exception: If any error occurs while renaming the session.
472
+ """
473
+ try:
474
+ session = self._get_record("sessions", session_id)
475
+ if session is None:
476
+ return None
477
+
478
+ # Update session_name, in session_data
479
+ if "session_data" not in session:
480
+ session["session_data"] = {}
481
+ session["session_data"]["session_name"] = session_name
482
+ session["updated_at"] = int(time.time())
483
+
484
+ # Store updated session
485
+ success = self._store_record("sessions", session_id, session)
486
+ if not success:
487
+ return None
488
+
489
+ log_debug(f"Renamed session with id '{session_id}' to '{session_name}'")
490
+
491
+ if not deserialize:
492
+ return session
493
+
494
+ if session_type == SessionType.AGENT:
495
+ return AgentSession.from_dict(session)
496
+ elif session_type == SessionType.TEAM:
497
+ return TeamSession.from_dict(session)
498
+ elif session_type == SessionType.WORKFLOW:
499
+ return WorkflowSession.from_dict(session)
500
+ else:
501
+ raise ValueError(f"Invalid session type: {session_type}")
502
+
503
+ except Exception as e:
504
+ log_error(f"Error renaming session: {e}")
505
+ raise e
506
+
507
+ def upsert_session(
508
+ self, session: Session, deserialize: Optional[bool] = True
509
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
510
+ """Insert or update a session in Redis.
511
+
512
+ Args:
513
+ session (Session): The session to upsert.
514
+
515
+ Returns:
516
+ Optional[Session]: The upserted session if successful, None otherwise.
517
+
518
+ Raises:
519
+ Exception: If any error occurs while upserting the session.
520
+ """
521
+ try:
522
+ session_dict = session.to_dict()
523
+
524
+ if isinstance(session, AgentSession):
525
+ data = {
526
+ "session_id": session_dict.get("session_id"),
527
+ "session_type": SessionType.AGENT.value,
528
+ "agent_id": session_dict.get("agent_id"),
529
+ "team_id": session_dict.get("team_id"),
530
+ "workflow_id": session_dict.get("workflow_id"),
531
+ "user_id": session_dict.get("user_id"),
532
+ "runs": session_dict.get("runs"),
533
+ "agent_data": session_dict.get("agent_data"),
534
+ "team_data": session_dict.get("team_data"),
535
+ "workflow_data": session_dict.get("workflow_data"),
536
+ "session_data": session_dict.get("session_data"),
537
+ "summary": session_dict.get("summary"),
538
+ "metadata": session_dict.get("metadata"),
539
+ "created_at": session_dict.get("created_at") or int(time.time()),
540
+ "updated_at": int(time.time()),
541
+ }
542
+
543
+ success = self._store_record(
544
+ table_type="sessions",
545
+ record_id=session.session_id,
546
+ data=data,
547
+ index_fields=["user_id", "agent_id", "session_type"],
548
+ )
549
+ if not success:
550
+ return None
551
+
552
+ if not deserialize:
553
+ return data
554
+
555
+ return AgentSession.from_dict(data)
556
+
557
+ elif isinstance(session, TeamSession):
558
+ data = {
559
+ "session_id": session_dict.get("session_id"),
560
+ "session_type": SessionType.TEAM.value,
561
+ "agent_id": None,
562
+ "team_id": session_dict.get("team_id"),
563
+ "workflow_id": None,
564
+ "user_id": session_dict.get("user_id"),
565
+ "runs": session_dict.get("runs"),
566
+ "team_data": session_dict.get("team_data"),
567
+ "agent_data": None,
568
+ "workflow_data": None,
569
+ "session_data": session_dict.get("session_data"),
570
+ "summary": session_dict.get("summary"),
571
+ "metadata": session_dict.get("metadata"),
572
+ "created_at": session_dict.get("created_at") or int(time.time()),
573
+ "updated_at": int(time.time()),
574
+ }
575
+
576
+ success = self._store_record(
577
+ table_type="sessions",
578
+ record_id=session.session_id,
579
+ data=data,
580
+ index_fields=["user_id", "team_id", "session_type"],
581
+ )
582
+ if not success:
583
+ return None
584
+
585
+ if not deserialize:
586
+ return data
587
+
588
+ return TeamSession.from_dict(data)
589
+
590
+ else:
591
+ data = {
592
+ "session_id": session_dict.get("session_id"),
593
+ "session_type": SessionType.WORKFLOW.value,
594
+ "workflow_id": session_dict.get("workflow_id"),
595
+ "user_id": session_dict.get("user_id"),
596
+ "runs": session_dict.get("runs"),
597
+ "workflow_data": session_dict.get("workflow_data"),
598
+ "session_data": session_dict.get("session_data"),
599
+ "metadata": session_dict.get("metadata"),
600
+ "created_at": session_dict.get("created_at") or int(time.time()),
601
+ "updated_at": int(time.time()),
602
+ "agent_id": None,
603
+ "team_id": None,
604
+ "agent_data": None,
605
+ "team_data": None,
606
+ "summary": None,
607
+ }
608
+
609
+ success = self._store_record(
610
+ table_type="sessions",
611
+ record_id=session.session_id,
612
+ data=data,
613
+ index_fields=["user_id", "workflow_id", "session_type"],
614
+ )
615
+ if not success:
616
+ return None
617
+
618
+ if not deserialize:
619
+ return data
620
+
621
+ return WorkflowSession.from_dict(data)
622
+
623
+ except Exception as e:
624
+ log_error(f"Error upserting session: {e}")
625
+ raise e
626
+
627
+ def upsert_sessions(
628
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
629
+ ) -> List[Union[Session, Dict[str, Any]]]:
630
+ """
631
+ Bulk upsert multiple sessions for improved performance on large datasets.
632
+
633
+ Args:
634
+ sessions (List[Session]): List of sessions to upsert.
635
+ deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
636
+
637
+ Returns:
638
+ List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
639
+
640
+ Raises:
641
+ Exception: If an error occurs during bulk upsert.
642
+ """
643
+ if not sessions:
644
+ return []
645
+
646
+ try:
647
+ log_info(
648
+ f"RedisDb doesn't support efficient bulk operations, falling back to individual upserts for {len(sessions)} sessions"
649
+ )
650
+
651
+ # Fall back to individual upserts
652
+ results = []
653
+ for session in sessions:
654
+ if session is not None:
655
+ result = self.upsert_session(session, deserialize=deserialize)
656
+ if result is not None:
657
+ results.append(result)
658
+ return results
659
+
660
+ except Exception as e:
661
+ log_error(f"Exception during bulk session upsert: {e}")
662
+ return []
663
+
664
+ # -- Memory methods --
665
+
666
+ def delete_user_memory(self, memory_id: str, user_id: Optional[str] = None):
667
+ """Delete a user memory from Redis.
668
+
669
+ Args:
670
+ memory_id (str): The ID of the memory to delete.
671
+ user_id (Optional[str]): The ID of the user. If provided, verifies the memory belongs to this user before deleting.
672
+
673
+ Returns:
674
+ bool: True if the memory was deleted, False otherwise.
675
+
676
+ Raises:
677
+ Exception: If any error occurs while deleting the memory.
678
+ """
679
+ try:
680
+ # If user_id is provided, verify ownership before deleting
681
+ if user_id is not None:
682
+ memory = self._get_record("memories", memory_id)
683
+ if memory is None:
684
+ log_debug(f"No user memory found with id: {memory_id}")
685
+ return
686
+ if memory.get("user_id") != user_id:
687
+ log_debug(f"Memory {memory_id} does not belong to user {user_id}")
688
+ return
689
+
690
+ if self._delete_record(
691
+ "memories", memory_id, index_fields=["user_id", "agent_id", "team_id", "workflow_id"]
692
+ ):
693
+ log_debug(f"Successfully deleted user memory id: {memory_id}")
694
+ else:
695
+ log_debug(f"No user memory found with id: {memory_id}")
696
+
697
+ except Exception as e:
698
+ log_error(f"Error deleting user memory: {e}")
699
+ raise e
700
+
701
+ def delete_user_memories(self, memory_ids: List[str], user_id: Optional[str] = None) -> None:
702
+ """Delete user memories from Redis.
703
+
704
+ Args:
705
+ memory_ids (List[str]): The IDs of the memories to delete.
706
+ user_id (Optional[str]): The ID of the user. If provided, only deletes memories belonging to this user.
707
+ """
708
+ try:
709
+ # TODO: cant we optimize this?
710
+ for memory_id in memory_ids:
711
+ # If user_id is provided, verify ownership before deleting
712
+ if user_id is not None:
713
+ memory = self._get_record("memories", memory_id)
714
+ if memory is None:
715
+ continue
716
+ if memory.get("user_id") != user_id:
717
+ log_debug(f"Memory {memory_id} does not belong to user {user_id}, skipping deletion")
718
+ continue
719
+
720
+ self._delete_record(
721
+ "memories",
722
+ memory_id,
723
+ index_fields=["user_id", "agent_id", "team_id", "workflow_id"],
724
+ )
725
+
726
+ except Exception as e:
727
+ log_error(f"Error deleting user memories: {e}")
728
+ raise e
729
+
730
+ def get_all_memory_topics(self) -> List[str]:
731
+ """Get all memory topics from Redis.
732
+
733
+ Returns:
734
+ List[str]: The list of memory topics.
735
+ """
736
+ try:
737
+ all_memories = self._get_all_records("memories")
738
+
739
+ topics = set()
740
+ for memory in all_memories:
741
+ memory_topics = memory.get("topics", [])
742
+ if isinstance(memory_topics, list):
743
+ topics.update(memory_topics)
744
+
745
+ return list(topics)
746
+
747
+ except Exception as e:
748
+ log_error(f"Exception reading memory topics: {e}")
749
+ raise e
750
+
751
+ def get_user_memory(
752
+ self, memory_id: str, deserialize: Optional[bool] = True, user_id: Optional[str] = None
753
+ ) -> Optional[Union[UserMemory, Dict[str, Any]]]:
754
+ """Get a memory from Redis.
755
+
756
+ Args:
757
+ memory_id (str): The ID of the memory to get.
758
+ deserialize (Optional[bool]): Whether to deserialize the memory. Defaults to True.
759
+ user_id (Optional[str]): The ID of the user. If provided, only returns the memory if it belongs to this user.
760
+
761
+ Returns:
762
+ Optional[UserMemory]: The memory data if found, None otherwise.
763
+ """
764
+ try:
765
+ memory_raw = self._get_record("memories", memory_id)
766
+ if memory_raw is None:
767
+ return None
768
+
769
+ # Filter by user_id if provided
770
+ if user_id is not None and memory_raw.get("user_id") != user_id:
771
+ return None
772
+
773
+ if not deserialize:
774
+ return memory_raw
775
+
776
+ return UserMemory.from_dict(memory_raw)
777
+
778
+ except Exception as e:
779
+ log_error(f"Exception reading memory: {e}")
780
+ raise e
781
+
782
+ def get_user_memories(
783
+ self,
784
+ user_id: Optional[str] = None,
785
+ agent_id: Optional[str] = None,
786
+ team_id: Optional[str] = None,
787
+ topics: Optional[List[str]] = None,
788
+ search_content: Optional[str] = None,
789
+ limit: Optional[int] = None,
790
+ page: Optional[int] = None,
791
+ sort_by: Optional[str] = None,
792
+ sort_order: Optional[str] = None,
793
+ deserialize: Optional[bool] = True,
794
+ ) -> Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
795
+ """Get all memories from Redis as UserMemory objects.
796
+
797
+ Args:
798
+ user_id (Optional[str]): The ID of the user to filter by.
799
+ agent_id (Optional[str]): The ID of the agent to filter by.
800
+ team_id (Optional[str]): The ID of the team to filter by.
801
+ topics (Optional[List[str]]): The topics to filter by.
802
+ search_content (Optional[str]): The content to search for.
803
+ limit (Optional[int]): The maximum number of memories to return.
804
+ page (Optional[int]): The page number to return.
805
+ sort_by (Optional[str]): The field to sort by.
806
+ sort_order (Optional[str]): The order to sort by.
807
+ deserialize (Optional[bool]): Whether to deserialize the memories.
808
+
809
+ Returns:
810
+ Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
811
+ - When deserialize=True: List of UserMemory objects
812
+ - When deserialize=False: Tuple of (memory dictionaries, total count)
813
+
814
+ Raises:
815
+ Exception: If any error occurs while reading the memories.
816
+ """
817
+ try:
818
+ all_memories = self._get_all_records("memories")
819
+
820
+ # Apply filters
821
+ conditions = {}
822
+ if user_id is not None:
823
+ conditions["user_id"] = user_id
824
+ if agent_id is not None:
825
+ conditions["agent_id"] = agent_id
826
+ if team_id is not None:
827
+ conditions["team_id"] = team_id
828
+
829
+ filtered_memories = apply_filters(records=all_memories, conditions=conditions)
830
+
831
+ # Apply topic filter
832
+ if topics is not None:
833
+ filtered_memories = [
834
+ m for m in filtered_memories if any(topic in m.get("topics", []) for topic in topics)
835
+ ]
836
+
837
+ # Apply content search
838
+ if search_content is not None:
839
+ filtered_memories = [
840
+ m for m in filtered_memories if search_content.lower() in str(m.get("memory", "")).lower()
841
+ ]
842
+
843
+ sorted_memories = apply_sorting(records=filtered_memories, sort_by=sort_by, sort_order=sort_order)
844
+ paginated_memories = apply_pagination(records=sorted_memories, limit=limit, page=page)
845
+
846
+ if not deserialize:
847
+ return paginated_memories, len(filtered_memories)
848
+
849
+ return [UserMemory.from_dict(record) for record in paginated_memories]
850
+
851
+ except Exception as e:
852
+ log_error(f"Exception reading memories: {e}")
853
+ raise e
854
+
855
+ def get_user_memory_stats(
856
+ self,
857
+ limit: Optional[int] = None,
858
+ page: Optional[int] = None,
859
+ user_id: Optional[str] = None,
860
+ ) -> Tuple[List[Dict[str, Any]], int]:
861
+ """Get user memory stats from Redis.
862
+
863
+ Args:
864
+ limit (Optional[int]): The maximum number of stats to return.
865
+ page (Optional[int]): The page number to return.
866
+ user_id (Optional[str]): User ID for filtering.
867
+
868
+ Returns:
869
+ Tuple[List[Dict[str, Any]], int]: A tuple containing the list of stats and the total number of stats.
870
+
871
+ Raises:
872
+ Exception: If any error occurs while getting the user memory stats.
873
+ """
874
+ try:
875
+ all_memories = self._get_all_records("memories")
876
+
877
+ # Group by user_id
878
+ user_stats = {}
879
+ for memory in all_memories:
880
+ memory_user_id = memory.get("user_id")
881
+ # filter by user_id if provided
882
+ if user_id is not None and memory_user_id != user_id:
883
+ continue
884
+ if memory_user_id is None:
885
+ continue
886
+
887
+ if memory_user_id not in user_stats:
888
+ user_stats[memory_user_id] = {
889
+ "user_id": memory_user_id,
890
+ "total_memories": 0,
891
+ "last_memory_updated_at": 0,
892
+ }
893
+
894
+ user_stats[memory_user_id]["total_memories"] += 1
895
+ updated_at = memory.get("updated_at", 0)
896
+ if updated_at > user_stats[memory_user_id]["last_memory_updated_at"]:
897
+ user_stats[memory_user_id]["last_memory_updated_at"] = updated_at
898
+
899
+ stats_list = list(user_stats.values())
900
+
901
+ # Sorting by last_memory_updated_at descending
902
+ stats_list.sort(key=lambda x: x["last_memory_updated_at"], reverse=True)
903
+
904
+ total_count = len(stats_list)
905
+
906
+ paginated_stats = apply_pagination(records=stats_list, limit=limit, page=page)
907
+
908
+ return paginated_stats, total_count
909
+
910
+ except Exception as e:
911
+ log_error(f"Exception getting user memory stats: {e}")
912
+ raise e
913
+
914
+ def upsert_user_memory(
915
+ self, memory: UserMemory, deserialize: Optional[bool] = True
916
+ ) -> Optional[Union[UserMemory, Dict[str, Any]]]:
917
+ """Upsert a user memory in Redis.
918
+
919
+ Args:
920
+ memory (UserMemory): The memory to upsert.
921
+
922
+ Returns:
923
+ Optional[UserMemory]: The upserted memory data if successful, None otherwise.
924
+ """
925
+ try:
926
+ if memory.memory_id is None:
927
+ memory.memory_id = str(uuid4())
928
+
929
+ data = {
930
+ "user_id": memory.user_id,
931
+ "agent_id": memory.agent_id,
932
+ "team_id": memory.team_id,
933
+ "memory_id": memory.memory_id,
934
+ "memory": memory.memory,
935
+ "topics": memory.topics,
936
+ "input": memory.input,
937
+ "feedback": memory.feedback,
938
+ "created_at": memory.created_at,
939
+ "updated_at": int(time.time()),
940
+ }
941
+
942
+ success = self._store_record(
943
+ "memories", memory.memory_id, data, index_fields=["user_id", "agent_id", "team_id", "workflow_id"]
944
+ )
945
+
946
+ if not success:
947
+ return None
948
+
949
+ if not deserialize:
950
+ return data
951
+
952
+ return UserMemory.from_dict(data)
953
+
954
+ except Exception as e:
955
+ log_error(f"Error upserting user memory: {e}")
956
+ raise e
957
+
958
+ def upsert_memories(
959
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
960
+ ) -> List[Union[UserMemory, Dict[str, Any]]]:
961
+ """
962
+ Bulk upsert multiple user memories for improved performance on large datasets.
963
+
964
+ Args:
965
+ memories (List[UserMemory]): List of memories to upsert.
966
+ deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
967
+
968
+ Returns:
969
+ List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories.
970
+
971
+ Raises:
972
+ Exception: If an error occurs during bulk upsert.
973
+ """
974
+ if not memories:
975
+ return []
976
+
977
+ try:
978
+ log_info(
979
+ f"RedisDb doesn't support efficient bulk operations, falling back to individual upserts for {len(memories)} memories"
980
+ )
981
+
982
+ # Fall back to individual upserts
983
+ results = []
984
+ for memory in memories:
985
+ if memory is not None:
986
+ result = self.upsert_user_memory(memory, deserialize=deserialize)
987
+ if result is not None:
988
+ results.append(result)
989
+ return results
990
+
991
+ except Exception as e:
992
+ log_error(f"Exception during bulk memory upsert: {e}")
993
+ return []
994
+
995
+ def clear_memories(self) -> None:
996
+ """Delete all memories from the database.
997
+
998
+ Raises:
999
+ Exception: If an error occurs during deletion.
1000
+ """
1001
+ try:
1002
+ # Get all keys for memories table
1003
+ keys = get_all_keys_for_table(redis_client=self.redis_client, prefix=self.db_prefix, table_type="memories")
1004
+
1005
+ if keys:
1006
+ # Delete all memory keys in a single batch operation
1007
+ self.redis_client.delete(*keys)
1008
+
1009
+ except Exception as e:
1010
+ log_error(f"Exception deleting all memories: {e}")
1011
+ raise e
1012
+
1013
+ # -- Metrics methods --
1014
+
1015
+ def _get_all_sessions_for_metrics_calculation(
1016
+ self, start_timestamp: Optional[int] = None, end_timestamp: Optional[int] = None
1017
+ ) -> List[Dict[str, Any]]:
1018
+ """Get all sessions for metrics calculation.
1019
+
1020
+ Args:
1021
+ start_timestamp (Optional[int]): The start timestamp to filter by.
1022
+ end_timestamp (Optional[int]): The end timestamp to filter by.
1023
+
1024
+ Returns:
1025
+ List[Dict[str, Any]]: The list of sessions.
1026
+
1027
+ Raises:
1028
+ Exception: If any error occurs while getting the sessions.
1029
+ """
1030
+ try:
1031
+ all_sessions = self._get_all_records("sessions")
1032
+
1033
+ # Filter by timestamp if provided
1034
+ if start_timestamp is not None or end_timestamp is not None:
1035
+ filtered_sessions = []
1036
+ for session in all_sessions:
1037
+ created_at = session.get("created_at", 0)
1038
+ if start_timestamp is not None and created_at < start_timestamp:
1039
+ continue
1040
+ if end_timestamp is not None and created_at > end_timestamp:
1041
+ continue
1042
+ filtered_sessions.append(session)
1043
+ return filtered_sessions
1044
+
1045
+ return all_sessions
1046
+
1047
+ except Exception as e:
1048
+ log_error(f"Error reading sessions for metrics: {e}")
1049
+ raise e
1050
+
1051
+ def _get_metrics_calculation_starting_date(self) -> Optional[date]:
1052
+ """Get the first date for which metrics calculation is needed.
1053
+
1054
+ Returns:
1055
+ Optional[date]: The first date for which metrics calculation is needed.
1056
+
1057
+ Raises:
1058
+ Exception: If any error occurs while getting the metrics calculation starting date.
1059
+ """
1060
+ try:
1061
+ all_metrics = self._get_all_records("metrics")
1062
+
1063
+ if all_metrics:
1064
+ # Find the latest completed metric
1065
+ completed_metrics = [m for m in all_metrics if m.get("completed", False)]
1066
+ if completed_metrics:
1067
+ latest_completed = max(completed_metrics, key=lambda x: x.get("date", ""))
1068
+ return datetime.fromisoformat(latest_completed["date"]).date() + timedelta(days=1)
1069
+ else:
1070
+ # Find the earliest incomplete metric
1071
+ incomplete_metrics = [m for m in all_metrics if not m.get("completed", False)]
1072
+ if incomplete_metrics:
1073
+ earliest_incomplete = min(incomplete_metrics, key=lambda x: x.get("date", ""))
1074
+ return datetime.fromisoformat(earliest_incomplete["date"]).date()
1075
+
1076
+ # No metrics records, find first session
1077
+ sessions_raw, _ = self.get_sessions(sort_by="created_at", sort_order="asc", limit=1, deserialize=False)
1078
+ if sessions_raw:
1079
+ first_session_date = sessions_raw[0]["created_at"] # type: ignore
1080
+ return datetime.fromtimestamp(first_session_date, tz=timezone.utc).date()
1081
+
1082
+ return None
1083
+
1084
+ except Exception as e:
1085
+ log_error(f"Error getting metrics starting date: {e}")
1086
+ raise e
1087
+
1088
+ def calculate_metrics(self) -> Optional[list[dict]]:
1089
+ """Calculate metrics for all dates without complete metrics.
1090
+
1091
+ Returns:
1092
+ Optional[list[dict]]: The list of metrics.
1093
+
1094
+ Raises:
1095
+ Exception: If any error occurs while calculating the metrics.
1096
+ """
1097
+ try:
1098
+ starting_date = self._get_metrics_calculation_starting_date()
1099
+ if starting_date is None:
1100
+ log_info("No session data found. Won't calculate metrics.")
1101
+ return None
1102
+
1103
+ dates_to_process = get_dates_to_calculate_metrics_for(starting_date)
1104
+ if not dates_to_process:
1105
+ log_info("Metrics already calculated for all relevant dates.")
1106
+ return None
1107
+
1108
+ start_timestamp = int(datetime.combine(dates_to_process[0], datetime.min.time()).timestamp())
1109
+ end_timestamp = int(
1110
+ datetime.combine(dates_to_process[-1] + timedelta(days=1), datetime.min.time()).timestamp()
1111
+ )
1112
+
1113
+ sessions = self._get_all_sessions_for_metrics_calculation(
1114
+ start_timestamp=start_timestamp, end_timestamp=end_timestamp
1115
+ )
1116
+ all_sessions_data = fetch_all_sessions_data(
1117
+ sessions=sessions, dates_to_process=dates_to_process, start_timestamp=start_timestamp
1118
+ )
1119
+ if not all_sessions_data:
1120
+ log_info("No new session data found. Won't calculate metrics.")
1121
+ return None
1122
+
1123
+ results = []
1124
+ for date_to_process in dates_to_process:
1125
+ date_key = date_to_process.isoformat()
1126
+ sessions_for_date = all_sessions_data.get(date_key, {})
1127
+
1128
+ # Skip dates with no sessions
1129
+ if not any(len(sessions) > 0 for sessions in sessions_for_date.values()):
1130
+ continue
1131
+
1132
+ metrics_record = calculate_date_metrics(date_to_process, sessions_for_date)
1133
+
1134
+ # Check if a record already exists for this date and aggregation period
1135
+ existing_record = self._get_record("metrics", metrics_record["id"])
1136
+ if existing_record:
1137
+ # Update the existing record while preserving created_at
1138
+ metrics_record["created_at"] = existing_record.get("created_at", metrics_record["created_at"])
1139
+
1140
+ success = self._store_record("metrics", metrics_record["id"], metrics_record)
1141
+ if success:
1142
+ results.append(metrics_record)
1143
+
1144
+ log_debug("Updated metrics calculations")
1145
+
1146
+ return results
1147
+
1148
+ except Exception as e:
1149
+ log_error(f"Error calculating metrics: {e}")
1150
+ raise e
1151
+
1152
+ def get_metrics(
1153
+ self,
1154
+ starting_date: Optional[date] = None,
1155
+ ending_date: Optional[date] = None,
1156
+ ) -> Tuple[List[dict], Optional[int]]:
1157
+ """Get all metrics matching the given date range.
1158
+
1159
+ Args:
1160
+ starting_date (Optional[date]): The starting date to filter by.
1161
+ ending_date (Optional[date]): The ending date to filter by.
1162
+
1163
+ Returns:
1164
+ Tuple[List[dict], Optional[int]]: A tuple containing the list of metrics and the latest updated_at.
1165
+
1166
+ Raises:
1167
+ Exception: If any error occurs while getting the metrics.
1168
+ """
1169
+ try:
1170
+ all_metrics = self._get_all_records("metrics")
1171
+
1172
+ # Filter by date range
1173
+ if starting_date is not None or ending_date is not None:
1174
+ filtered_metrics = []
1175
+ for metric in all_metrics:
1176
+ metric_date = datetime.fromisoformat(metric.get("date", "")).date()
1177
+ if starting_date is not None and metric_date < starting_date:
1178
+ continue
1179
+ if ending_date is not None and metric_date > ending_date:
1180
+ continue
1181
+ filtered_metrics.append(metric)
1182
+ all_metrics = filtered_metrics
1183
+
1184
+ # Get latest updated_at
1185
+ latest_updated_at = None
1186
+ if all_metrics:
1187
+ latest_updated_at = max(metric.get("updated_at", 0) for metric in all_metrics)
1188
+
1189
+ return all_metrics, latest_updated_at
1190
+
1191
+ except Exception as e:
1192
+ log_error(f"Error getting metrics: {e}")
1193
+ raise e
1194
+
1195
+ # -- Knowledge methods --
1196
+
1197
+ def delete_knowledge_content(self, id: str):
1198
+ """Delete a knowledge row from the database.
1199
+
1200
+ Args:
1201
+ id (str): The ID of the knowledge row to delete.
1202
+
1203
+ Raises:
1204
+ Exception: If any error occurs while deleting the knowledge content.
1205
+ """
1206
+ try:
1207
+ self._delete_record("knowledge", id)
1208
+
1209
+ except Exception as e:
1210
+ log_error(f"Error deleting knowledge content: {e}")
1211
+ raise e
1212
+
1213
+ def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
1214
+ """Get a knowledge row from the database.
1215
+
1216
+ Args:
1217
+ id (str): The ID of the knowledge row to get.
1218
+
1219
+ Returns:
1220
+ Optional[KnowledgeRow]: The knowledge row, or None if it doesn't exist.
1221
+
1222
+ Raises:
1223
+ Exception: If any error occurs while getting the knowledge content.
1224
+ """
1225
+ try:
1226
+ document_raw = self._get_record("knowledge", id)
1227
+ if document_raw is None:
1228
+ return None
1229
+
1230
+ return KnowledgeRow.model_validate(document_raw)
1231
+
1232
+ except Exception as e:
1233
+ log_error(f"Error getting knowledge content: {e}")
1234
+ raise e
1235
+
1236
+ def get_knowledge_contents(
1237
+ self,
1238
+ limit: Optional[int] = None,
1239
+ page: Optional[int] = None,
1240
+ sort_by: Optional[str] = None,
1241
+ sort_order: Optional[str] = None,
1242
+ ) -> Tuple[List[KnowledgeRow], int]:
1243
+ """Get all knowledge contents from the database.
1244
+
1245
+ Args:
1246
+ limit (Optional[int]): The maximum number of knowledge contents to return.
1247
+ page (Optional[int]): The page number.
1248
+ sort_by (Optional[str]): The column to sort by.
1249
+ sort_order (Optional[str]): The order to sort by.
1250
+
1251
+ Returns:
1252
+ Tuple[List[KnowledgeRow], int]: The knowledge contents and total count.
1253
+
1254
+ Raises:
1255
+ Exception: If an error occurs during retrieval.
1256
+
1257
+ Raises:
1258
+ Exception: If any error occurs while getting the knowledge contents.
1259
+ """
1260
+ try:
1261
+ all_documents = self._get_all_records("knowledge")
1262
+ if len(all_documents) == 0:
1263
+ return [], 0
1264
+
1265
+ total_count = len(all_documents)
1266
+
1267
+ # Apply sorting
1268
+ sorted_documents = apply_sorting(records=all_documents, sort_by=sort_by, sort_order=sort_order)
1269
+
1270
+ # Apply pagination
1271
+ paginated_documents = apply_pagination(records=sorted_documents, limit=limit, page=page)
1272
+
1273
+ return [KnowledgeRow.model_validate(doc) for doc in paginated_documents], total_count
1274
+
1275
+ except Exception as e:
1276
+ log_error(f"Error getting knowledge contents: {e}")
1277
+ raise e
1278
+
1279
+ def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
1280
+ """Upsert knowledge content in the database.
1281
+
1282
+ Args:
1283
+ knowledge_row (KnowledgeRow): The knowledge row to upsert.
1284
+
1285
+ Returns:
1286
+ Optional[KnowledgeRow]: The upserted knowledge row, or None if the operation fails.
1287
+
1288
+ Raises:
1289
+ Exception: If any error occurs while upserting the knowledge content.
1290
+ """
1291
+ try:
1292
+ data = knowledge_row.model_dump()
1293
+ success = self._store_record("knowledge", knowledge_row.id, data) # type: ignore
1294
+
1295
+ return knowledge_row if success else None
1296
+
1297
+ except Exception as e:
1298
+ log_error(f"Error upserting knowledge content: {e}")
1299
+ raise e
1300
+
1301
+ # -- Eval methods --
1302
+
1303
+ def create_eval_run(self, eval_run: EvalRunRecord) -> Optional[EvalRunRecord]:
1304
+ """Create an EvalRunRecord in Redis.
1305
+
1306
+ Args:
1307
+ eval_run (EvalRunRecord): The eval run to create.
1308
+
1309
+ Returns:
1310
+ Optional[EvalRunRecord]: The created eval run if successful, None otherwise.
1311
+
1312
+ Raises:
1313
+ Exception: If any error occurs while creating the eval run.
1314
+ """
1315
+ try:
1316
+ current_time = int(time.time())
1317
+ data = {"created_at": current_time, "updated_at": current_time, **eval_run.model_dump()}
1318
+
1319
+ success = self._store_record(
1320
+ "evals",
1321
+ eval_run.run_id,
1322
+ data,
1323
+ index_fields=["agent_id", "team_id", "workflow_id", "model_id", "eval_type"],
1324
+ )
1325
+
1326
+ log_debug(f"Created eval run with id '{eval_run.run_id}'")
1327
+
1328
+ return eval_run if success else None
1329
+
1330
+ except Exception as e:
1331
+ log_error(f"Error creating eval run: {e}")
1332
+ raise e
1333
+
1334
+ def delete_eval_run(self, eval_run_id: str) -> None:
1335
+ """Delete an eval run from Redis.
1336
+
1337
+ Args:
1338
+ eval_run_id (str): The ID of the eval run to delete.
1339
+
1340
+ Raises:
1341
+ Exception: If any error occurs while deleting the eval run.
1342
+ """
1343
+ try:
1344
+ if self._delete_record(
1345
+ "evals", eval_run_id, index_fields=["agent_id", "team_id", "workflow_id", "model_id", "eval_type"]
1346
+ ):
1347
+ log_debug(f"Deleted eval run with ID: {eval_run_id}")
1348
+ else:
1349
+ log_debug(f"No eval run found with ID: {eval_run_id}")
1350
+
1351
+ except Exception as e:
1352
+ log_error(f"Error deleting eval run {eval_run_id}: {e}")
1353
+ raise
1354
+
1355
+ def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
1356
+ """Delete multiple eval runs from Redis.
1357
+
1358
+ Args:
1359
+ eval_run_ids (List[str]): The IDs of the eval runs to delete.
1360
+
1361
+ Raises:
1362
+ Exception: If any error occurs while deleting the eval runs.
1363
+ """
1364
+ try:
1365
+ deleted_count = 0
1366
+ for eval_run_id in eval_run_ids:
1367
+ if self._delete_record(
1368
+ "evals", eval_run_id, index_fields=["agent_id", "team_id", "workflow_id", "model_id", "eval_type"]
1369
+ ):
1370
+ deleted_count += 1
1371
+
1372
+ if deleted_count == 0:
1373
+ log_debug(f"No eval runs found with IDs: {eval_run_ids}")
1374
+ else:
1375
+ log_debug(f"Deleted {deleted_count} eval runs")
1376
+
1377
+ except Exception as e:
1378
+ log_error(f"Error deleting eval runs {eval_run_ids}: {e}")
1379
+ raise
1380
+
1381
+ def get_eval_run(
1382
+ self, eval_run_id: str, deserialize: Optional[bool] = True
1383
+ ) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
1384
+ """Get an eval run from Redis.
1385
+
1386
+ Args:
1387
+ eval_run_id (str): The ID of the eval run to get.
1388
+
1389
+ Returns:
1390
+ Optional[EvalRunRecord]: The eval run if found, None otherwise.
1391
+
1392
+ Raises:
1393
+ Exception: If any error occurs while getting the eval run.
1394
+ """
1395
+ try:
1396
+ eval_run_raw = self._get_record("evals", eval_run_id)
1397
+ if eval_run_raw is None:
1398
+ return None
1399
+
1400
+ if not deserialize:
1401
+ return eval_run_raw
1402
+
1403
+ return EvalRunRecord.model_validate(eval_run_raw)
1404
+
1405
+ except Exception as e:
1406
+ log_error(f"Exception getting eval run {eval_run_id}: {e}")
1407
+ raise e
1408
+
1409
+ def get_eval_runs(
1410
+ self,
1411
+ limit: Optional[int] = None,
1412
+ page: Optional[int] = None,
1413
+ sort_by: Optional[str] = None,
1414
+ sort_order: Optional[str] = None,
1415
+ agent_id: Optional[str] = None,
1416
+ team_id: Optional[str] = None,
1417
+ workflow_id: Optional[str] = None,
1418
+ model_id: Optional[str] = None,
1419
+ filter_type: Optional[EvalFilterType] = None,
1420
+ eval_type: Optional[List[EvalType]] = None,
1421
+ deserialize: Optional[bool] = True,
1422
+ ) -> Union[List[EvalRunRecord], Tuple[List[Dict[str, Any]], int]]:
1423
+ """Get all eval runs from Redis.
1424
+
1425
+ Args:
1426
+ limit (Optional[int]): The maximum number of eval runs to return.
1427
+ page (Optional[int]): The page number to return.
1428
+ sort_by (Optional[str]): The field to sort by.
1429
+ sort_order (Optional[str]): The order to sort by.
1430
+
1431
+ Returns:
1432
+ List[EvalRunRecord]: The list of eval runs.
1433
+
1434
+ Raises:
1435
+ Exception: If any error occurs while getting the eval runs.
1436
+ """
1437
+ try:
1438
+ all_eval_runs = self._get_all_records("evals")
1439
+
1440
+ # Apply filters
1441
+ filtered_runs = []
1442
+ for run in all_eval_runs:
1443
+ # Agent/team/workflow filters
1444
+ if agent_id is not None and run.get("agent_id") != agent_id:
1445
+ continue
1446
+ if team_id is not None and run.get("team_id") != team_id:
1447
+ continue
1448
+ if workflow_id is not None and run.get("workflow_id") != workflow_id:
1449
+ continue
1450
+ if model_id is not None and run.get("model_id") != model_id:
1451
+ continue
1452
+
1453
+ # Eval type filter
1454
+ if eval_type is not None and len(eval_type) > 0:
1455
+ if run.get("eval_type") not in eval_type:
1456
+ continue
1457
+
1458
+ # Filter type
1459
+ if filter_type is not None:
1460
+ if filter_type == EvalFilterType.AGENT and run.get("agent_id") is None:
1461
+ continue
1462
+ elif filter_type == EvalFilterType.TEAM and run.get("team_id") is None:
1463
+ continue
1464
+ elif filter_type == EvalFilterType.WORKFLOW and run.get("workflow_id") is None:
1465
+ continue
1466
+
1467
+ filtered_runs.append(run)
1468
+
1469
+ if sort_by is None:
1470
+ sort_by = "created_at"
1471
+ sort_order = "desc"
1472
+
1473
+ sorted_runs = apply_sorting(records=filtered_runs, sort_by=sort_by, sort_order=sort_order)
1474
+ paginated_runs = apply_pagination(records=sorted_runs, limit=limit, page=page)
1475
+
1476
+ if not deserialize:
1477
+ return paginated_runs, len(filtered_runs)
1478
+
1479
+ return [EvalRunRecord.model_validate(row) for row in paginated_runs]
1480
+
1481
+ except Exception as e:
1482
+ log_error(f"Exception getting eval runs: {e}")
1483
+ raise e
1484
+
1485
+ def rename_eval_run(
1486
+ self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
1487
+ ) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
1488
+ """Update the name of an eval run in Redis.
1489
+
1490
+ Args:
1491
+ eval_run_id (str): The ID of the eval run to rename.
1492
+ name (str): The new name of the eval run.
1493
+
1494
+ Returns:
1495
+ Optional[Dict[str, Any]]: The updated eval run data if successful, None otherwise.
1496
+
1497
+ Raises:
1498
+ Exception: If any error occurs while updating the eval run name.
1499
+ """
1500
+ try:
1501
+ eval_run_data = self._get_record("evals", eval_run_id)
1502
+ if eval_run_data is None:
1503
+ return None
1504
+
1505
+ eval_run_data["name"] = name
1506
+ eval_run_data["updated_at"] = int(time.time())
1507
+
1508
+ success = self._store_record("evals", eval_run_id, eval_run_data)
1509
+ if not success:
1510
+ return None
1511
+
1512
+ log_debug(f"Renamed eval run with id '{eval_run_id}' to '{name}'")
1513
+
1514
+ if not deserialize:
1515
+ return eval_run_data
1516
+
1517
+ return EvalRunRecord.model_validate(eval_run_data)
1518
+
1519
+ except Exception as e:
1520
+ log_error(f"Error updating eval run name {eval_run_id}: {e}")
1521
+ raise
1522
+
1523
+ # -- Cultural Knowledge methods --
1524
+ def clear_cultural_knowledge(self) -> None:
1525
+ """Delete all cultural knowledge from the database.
1526
+
1527
+ Raises:
1528
+ Exception: If an error occurs during deletion.
1529
+ """
1530
+ try:
1531
+ keys = get_all_keys_for_table(redis_client=self.redis_client, prefix=self.db_prefix, table_type="culture")
1532
+
1533
+ if keys:
1534
+ self.redis_client.delete(*keys)
1535
+
1536
+ except Exception as e:
1537
+ log_error(f"Exception deleting all cultural knowledge: {e}")
1538
+ raise e
1539
+
1540
+ def delete_cultural_knowledge(self, id: str) -> None:
1541
+ """Delete cultural knowledge by ID.
1542
+
1543
+ Args:
1544
+ id (str): The ID of the cultural knowledge to delete.
1545
+
1546
+ Raises:
1547
+ Exception: If an error occurs during deletion.
1548
+ """
1549
+ try:
1550
+ if self._delete_record("culture", id, index_fields=["name", "agent_id", "team_id"]):
1551
+ log_debug(f"Successfully deleted cultural knowledge id: {id}")
1552
+ else:
1553
+ log_debug(f"No cultural knowledge found with id: {id}")
1554
+
1555
+ except Exception as e:
1556
+ log_error(f"Error deleting cultural knowledge: {e}")
1557
+ raise e
1558
+
1559
+ def get_cultural_knowledge(
1560
+ self, id: str, deserialize: Optional[bool] = True
1561
+ ) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
1562
+ """Get cultural knowledge by ID.
1563
+
1564
+ Args:
1565
+ id (str): The ID of the cultural knowledge to retrieve.
1566
+ deserialize (Optional[bool]): Whether to deserialize to CulturalKnowledge object. Defaults to True.
1567
+
1568
+ Returns:
1569
+ Optional[Union[CulturalKnowledge, Dict[str, Any]]]: The cultural knowledge if found, None otherwise.
1570
+
1571
+ Raises:
1572
+ Exception: If an error occurs during retrieval.
1573
+ """
1574
+ try:
1575
+ cultural_knowledge = self._get_record("culture", id)
1576
+
1577
+ if cultural_knowledge is None:
1578
+ return None
1579
+
1580
+ if not deserialize:
1581
+ return cultural_knowledge
1582
+
1583
+ return deserialize_cultural_knowledge_from_db(cultural_knowledge)
1584
+
1585
+ except Exception as e:
1586
+ log_error(f"Error getting cultural knowledge: {e}")
1587
+ raise e
1588
+
1589
+ def get_all_cultural_knowledge(
1590
+ self,
1591
+ agent_id: Optional[str] = None,
1592
+ team_id: Optional[str] = None,
1593
+ name: Optional[str] = None,
1594
+ limit: Optional[int] = None,
1595
+ page: Optional[int] = None,
1596
+ sort_by: Optional[str] = None,
1597
+ sort_order: Optional[str] = None,
1598
+ deserialize: Optional[bool] = True,
1599
+ ) -> Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
1600
+ """Get all cultural knowledge with filtering and pagination.
1601
+
1602
+ Args:
1603
+ agent_id (Optional[str]): Filter by agent ID.
1604
+ team_id (Optional[str]): Filter by team ID.
1605
+ name (Optional[str]): Filter by name (case-insensitive partial match).
1606
+ limit (Optional[int]): Maximum number of results to return.
1607
+ page (Optional[int]): Page number for pagination.
1608
+ sort_by (Optional[str]): Field to sort by.
1609
+ sort_order (Optional[str]): Sort order ('asc' or 'desc').
1610
+ deserialize (Optional[bool]): Whether to deserialize to CulturalKnowledge objects. Defaults to True.
1611
+
1612
+ Returns:
1613
+ Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
1614
+ - When deserialize=True: List of CulturalKnowledge objects
1615
+ - When deserialize=False: Tuple with list of dictionaries and total count
1616
+
1617
+ Raises:
1618
+ Exception: If an error occurs during retrieval.
1619
+ """
1620
+ try:
1621
+ all_cultural_knowledge = self._get_all_records("culture")
1622
+
1623
+ # Apply filters
1624
+ filtered_items = []
1625
+ for item in all_cultural_knowledge:
1626
+ if agent_id is not None and item.get("agent_id") != agent_id:
1627
+ continue
1628
+ if team_id is not None and item.get("team_id") != team_id:
1629
+ continue
1630
+ if name is not None and name.lower() not in item.get("name", "").lower():
1631
+ continue
1632
+
1633
+ filtered_items.append(item)
1634
+
1635
+ sorted_items = apply_sorting(records=filtered_items, sort_by=sort_by, sort_order=sort_order)
1636
+ paginated_items = apply_pagination(records=sorted_items, limit=limit, page=page)
1637
+
1638
+ if not deserialize:
1639
+ return paginated_items, len(filtered_items)
1640
+
1641
+ return [deserialize_cultural_knowledge_from_db(item) for item in paginated_items]
1642
+
1643
+ except Exception as e:
1644
+ log_error(f"Error getting all cultural knowledge: {e}")
1645
+ raise e
1646
+
1647
+ def upsert_cultural_knowledge(
1648
+ self, cultural_knowledge: CulturalKnowledge, deserialize: Optional[bool] = True
1649
+ ) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
1650
+ """Upsert cultural knowledge in Redis.
1651
+
1652
+ Args:
1653
+ cultural_knowledge (CulturalKnowledge): The cultural knowledge to upsert.
1654
+ deserialize (Optional[bool]): Whether to deserialize the result. Defaults to True.
1655
+
1656
+ Returns:
1657
+ Optional[Union[CulturalKnowledge, Dict[str, Any]]]: The upserted cultural knowledge.
1658
+
1659
+ Raises:
1660
+ Exception: If an error occurs during upsert.
1661
+ """
1662
+ try:
1663
+ # Serialize content, categories, and notes into a dict for DB storage
1664
+ content_dict = serialize_cultural_knowledge_for_db(cultural_knowledge)
1665
+ item_id = cultural_knowledge.id or str(uuid4())
1666
+
1667
+ # Create the item dict with serialized content
1668
+ data = {
1669
+ "id": item_id,
1670
+ "name": cultural_knowledge.name,
1671
+ "summary": cultural_knowledge.summary,
1672
+ "content": content_dict if content_dict else None,
1673
+ "metadata": cultural_knowledge.metadata,
1674
+ "input": cultural_knowledge.input,
1675
+ "created_at": cultural_knowledge.created_at,
1676
+ "updated_at": int(time.time()),
1677
+ "agent_id": cultural_knowledge.agent_id,
1678
+ "team_id": cultural_knowledge.team_id,
1679
+ }
1680
+
1681
+ success = self._store_record("culture", item_id, data, index_fields=["name", "agent_id", "team_id"])
1682
+
1683
+ if not success:
1684
+ return None
1685
+
1686
+ if not deserialize:
1687
+ return data
1688
+
1689
+ return deserialize_cultural_knowledge_from_db(data)
1690
+
1691
+ except Exception as e:
1692
+ log_error(f"Error upserting cultural knowledge: {e}")
1693
+ raise e
1694
+
1695
+ # --- Traces ---
1696
+ def upsert_trace(self, trace: "Trace") -> None:
1697
+ """Create or update a single trace record in the database.
1698
+
1699
+ Args:
1700
+ trace: The Trace object to store (one per trace_id).
1701
+ """
1702
+ try:
1703
+ # Check if trace already exists
1704
+ existing = self._get_record("traces", trace.trace_id)
1705
+
1706
+ if existing:
1707
+ # workflow (level 3) > team (level 2) > agent (level 1) > child/unknown (level 0)
1708
+ def get_component_level(
1709
+ workflow_id: Optional[str], team_id: Optional[str], agent_id: Optional[str], name: str
1710
+ ) -> int:
1711
+ # Check if name indicates a root span
1712
+ is_root_name = ".run" in name or ".arun" in name
1713
+
1714
+ if not is_root_name:
1715
+ return 0 # Child span (not a root)
1716
+ elif workflow_id:
1717
+ return 3 # Workflow root
1718
+ elif team_id:
1719
+ return 2 # Team root
1720
+ elif agent_id:
1721
+ return 1 # Agent root
1722
+ else:
1723
+ return 0 # Unknown
1724
+
1725
+ existing_level = get_component_level(
1726
+ existing.get("workflow_id"),
1727
+ existing.get("team_id"),
1728
+ existing.get("agent_id"),
1729
+ existing.get("name", ""),
1730
+ )
1731
+ new_level = get_component_level(trace.workflow_id, trace.team_id, trace.agent_id, trace.name)
1732
+
1733
+ # Only update name if new trace is from a higher or equal level
1734
+ should_update_name = new_level > existing_level
1735
+
1736
+ # Parse existing start_time to calculate correct duration
1737
+ existing_start_time_str = existing.get("start_time")
1738
+ if isinstance(existing_start_time_str, str):
1739
+ existing_start_time = datetime.fromisoformat(existing_start_time_str.replace("Z", "+00:00"))
1740
+ else:
1741
+ existing_start_time = trace.start_time
1742
+
1743
+ recalculated_duration_ms = int((trace.end_time - existing_start_time).total_seconds() * 1000)
1744
+
1745
+ # Update existing record
1746
+ existing["end_time"] = trace.end_time.isoformat()
1747
+ existing["duration_ms"] = recalculated_duration_ms
1748
+ existing["status"] = trace.status
1749
+ if should_update_name:
1750
+ existing["name"] = trace.name
1751
+
1752
+ # Update context fields ONLY if new value is not None (preserve non-null values)
1753
+ if trace.run_id is not None:
1754
+ existing["run_id"] = trace.run_id
1755
+ if trace.session_id is not None:
1756
+ existing["session_id"] = trace.session_id
1757
+ if trace.user_id is not None:
1758
+ existing["user_id"] = trace.user_id
1759
+ if trace.agent_id is not None:
1760
+ existing["agent_id"] = trace.agent_id
1761
+ if trace.team_id is not None:
1762
+ existing["team_id"] = trace.team_id
1763
+ if trace.workflow_id is not None:
1764
+ existing["workflow_id"] = trace.workflow_id
1765
+
1766
+ log_debug(
1767
+ f" Updating trace with context: run_id={existing.get('run_id', 'unchanged')}, "
1768
+ f"session_id={existing.get('session_id', 'unchanged')}, "
1769
+ f"user_id={existing.get('user_id', 'unchanged')}, "
1770
+ f"agent_id={existing.get('agent_id', 'unchanged')}, "
1771
+ f"team_id={existing.get('team_id', 'unchanged')}, "
1772
+ )
1773
+
1774
+ self._store_record(
1775
+ "traces",
1776
+ trace.trace_id,
1777
+ existing,
1778
+ index_fields=["run_id", "session_id", "user_id", "agent_id", "team_id", "workflow_id", "status"],
1779
+ )
1780
+ else:
1781
+ trace_dict = trace.to_dict()
1782
+ trace_dict.pop("total_spans", None)
1783
+ trace_dict.pop("error_count", None)
1784
+ self._store_record(
1785
+ "traces",
1786
+ trace.trace_id,
1787
+ trace_dict,
1788
+ index_fields=["run_id", "session_id", "user_id", "agent_id", "team_id", "workflow_id", "status"],
1789
+ )
1790
+
1791
+ except Exception as e:
1792
+ log_error(f"Error creating trace: {e}")
1793
+ # Don't raise - tracing should not break the main application flow
1794
+
1795
+ def get_trace(
1796
+ self,
1797
+ trace_id: Optional[str] = None,
1798
+ run_id: Optional[str] = None,
1799
+ ):
1800
+ """Get a single trace by trace_id or other filters.
1801
+
1802
+ Args:
1803
+ trace_id: The unique trace identifier.
1804
+ run_id: Filter by run ID (returns first match).
1805
+
1806
+ Returns:
1807
+ Optional[Trace]: The trace if found, None otherwise.
1808
+
1809
+ Note:
1810
+ If multiple filters are provided, trace_id takes precedence.
1811
+ For other filters, the most recent trace is returned.
1812
+ """
1813
+ try:
1814
+ from agno.tracing.schemas import Trace as TraceSchema
1815
+
1816
+ if trace_id:
1817
+ result = self._get_record("traces", trace_id)
1818
+ if result:
1819
+ # Calculate total_spans and error_count
1820
+ all_spans = self._get_all_records("spans")
1821
+ trace_spans = [s for s in all_spans if s.get("trace_id") == trace_id]
1822
+ result["total_spans"] = len(trace_spans)
1823
+ result["error_count"] = len([s for s in trace_spans if s.get("status_code") == "ERROR"])
1824
+ return TraceSchema.from_dict(result)
1825
+ return None
1826
+
1827
+ elif run_id:
1828
+ all_traces = self._get_all_records("traces")
1829
+ matching = [t for t in all_traces if t.get("run_id") == run_id]
1830
+ if matching:
1831
+ # Sort by start_time descending and get most recent
1832
+ matching.sort(key=lambda x: x.get("start_time", ""), reverse=True)
1833
+ result = matching[0]
1834
+ # Calculate total_spans and error_count
1835
+ all_spans = self._get_all_records("spans")
1836
+ trace_spans = [s for s in all_spans if s.get("trace_id") == result.get("trace_id")]
1837
+ result["total_spans"] = len(trace_spans)
1838
+ result["error_count"] = len([s for s in trace_spans if s.get("status_code") == "ERROR"])
1839
+ return TraceSchema.from_dict(result)
1840
+ return None
1841
+
1842
+ else:
1843
+ log_debug("get_trace called without any filter parameters")
1844
+ return None
1845
+
1846
+ except Exception as e:
1847
+ log_error(f"Error getting trace: {e}")
1848
+ return None
1849
+
1850
+ def get_traces(
1851
+ self,
1852
+ run_id: Optional[str] = None,
1853
+ session_id: Optional[str] = None,
1854
+ user_id: Optional[str] = None,
1855
+ agent_id: Optional[str] = None,
1856
+ team_id: Optional[str] = None,
1857
+ workflow_id: Optional[str] = None,
1858
+ status: Optional[str] = None,
1859
+ start_time: Optional[datetime] = None,
1860
+ end_time: Optional[datetime] = None,
1861
+ limit: Optional[int] = 20,
1862
+ page: Optional[int] = 1,
1863
+ ) -> tuple[List, int]:
1864
+ """Get traces matching the provided filters.
1865
+
1866
+ Args:
1867
+ run_id: Filter by run ID.
1868
+ session_id: Filter by session ID.
1869
+ user_id: Filter by user ID.
1870
+ agent_id: Filter by agent ID.
1871
+ team_id: Filter by team ID.
1872
+ workflow_id: Filter by workflow ID.
1873
+ status: Filter by status (OK, ERROR, UNSET).
1874
+ start_time: Filter traces starting after this datetime.
1875
+ end_time: Filter traces ending before this datetime.
1876
+ limit: Maximum number of traces to return per page.
1877
+ page: Page number (1-indexed).
1878
+
1879
+ Returns:
1880
+ tuple[List[Trace], int]: Tuple of (list of matching traces, total count).
1881
+ """
1882
+ try:
1883
+ from agno.tracing.schemas import Trace as TraceSchema
1884
+
1885
+ log_debug(
1886
+ f"get_traces called with filters: run_id={run_id}, session_id={session_id}, "
1887
+ f"user_id={user_id}, agent_id={agent_id}, page={page}, limit={limit}"
1888
+ )
1889
+
1890
+ all_traces = self._get_all_records("traces")
1891
+ all_spans = self._get_all_records("spans")
1892
+
1893
+ # Apply filters
1894
+ filtered_traces = []
1895
+ for trace in all_traces:
1896
+ if run_id and trace.get("run_id") != run_id:
1897
+ continue
1898
+ if session_id and trace.get("session_id") != session_id:
1899
+ continue
1900
+ if user_id and trace.get("user_id") != user_id:
1901
+ continue
1902
+ if agent_id and trace.get("agent_id") != agent_id:
1903
+ continue
1904
+ if team_id and trace.get("team_id") != team_id:
1905
+ continue
1906
+ if workflow_id and trace.get("workflow_id") != workflow_id:
1907
+ continue
1908
+ if status and trace.get("status") != status:
1909
+ continue
1910
+ if start_time:
1911
+ trace_start = trace.get("start_time", "")
1912
+ if trace_start and trace_start < start_time.isoformat():
1913
+ continue
1914
+ if end_time:
1915
+ trace_end = trace.get("end_time", "")
1916
+ if trace_end and trace_end > end_time.isoformat():
1917
+ continue
1918
+
1919
+ filtered_traces.append(trace)
1920
+
1921
+ total_count = len(filtered_traces)
1922
+
1923
+ # Sort by start_time descending
1924
+ filtered_traces.sort(key=lambda x: x.get("start_time", ""), reverse=True)
1925
+
1926
+ # Apply pagination
1927
+ paginated_traces = apply_pagination(records=filtered_traces, limit=limit, page=page)
1928
+
1929
+ traces = []
1930
+ for row in paginated_traces:
1931
+ # Calculate total_spans and error_count
1932
+ trace_spans = [s for s in all_spans if s.get("trace_id") == row.get("trace_id")]
1933
+ row["total_spans"] = len(trace_spans)
1934
+ row["error_count"] = len([s for s in trace_spans if s.get("status_code") == "ERROR"])
1935
+ traces.append(TraceSchema.from_dict(row))
1936
+
1937
+ return traces, total_count
1938
+
1939
+ except Exception as e:
1940
+ log_error(f"Error getting traces: {e}")
1941
+ return [], 0
1942
+
1943
+ def get_trace_stats(
1944
+ self,
1945
+ user_id: Optional[str] = None,
1946
+ agent_id: Optional[str] = None,
1947
+ team_id: Optional[str] = None,
1948
+ workflow_id: Optional[str] = None,
1949
+ start_time: Optional[datetime] = None,
1950
+ end_time: Optional[datetime] = None,
1951
+ limit: Optional[int] = 20,
1952
+ page: Optional[int] = 1,
1953
+ ) -> tuple[List[Dict[str, Any]], int]:
1954
+ """Get trace statistics grouped by session.
1955
+
1956
+ Args:
1957
+ user_id: Filter by user ID.
1958
+ agent_id: Filter by agent ID.
1959
+ team_id: Filter by team ID.
1960
+ workflow_id: Filter by workflow ID.
1961
+ start_time: Filter sessions with traces created after this datetime.
1962
+ end_time: Filter sessions with traces created before this datetime.
1963
+ limit: Maximum number of sessions to return per page.
1964
+ page: Page number (1-indexed).
1965
+
1966
+ Returns:
1967
+ tuple[List[Dict], int]: Tuple of (list of session stats dicts, total count).
1968
+ Each dict contains: session_id, user_id, agent_id, team_id, total_traces,
1969
+ first_trace_at, last_trace_at.
1970
+ """
1971
+ try:
1972
+ log_debug(
1973
+ f"get_trace_stats called with filters: user_id={user_id}, agent_id={agent_id}, "
1974
+ f"workflow_id={workflow_id}, team_id={team_id}, "
1975
+ f"start_time={start_time}, end_time={end_time}, page={page}, limit={limit}"
1976
+ )
1977
+
1978
+ all_traces = self._get_all_records("traces")
1979
+
1980
+ # Filter traces and group by session_id
1981
+ session_stats: Dict[str, Dict[str, Any]] = {}
1982
+ for trace in all_traces:
1983
+ trace_session_id = trace.get("session_id")
1984
+ if not trace_session_id:
1985
+ continue
1986
+
1987
+ # Apply filters
1988
+ if user_id and trace.get("user_id") != user_id:
1989
+ continue
1990
+ if agent_id and trace.get("agent_id") != agent_id:
1991
+ continue
1992
+ if team_id and trace.get("team_id") != team_id:
1993
+ continue
1994
+ if workflow_id and trace.get("workflow_id") != workflow_id:
1995
+ continue
1996
+
1997
+ created_at = trace.get("created_at", "")
1998
+ if start_time and created_at < start_time.isoformat():
1999
+ continue
2000
+ if end_time and created_at > end_time.isoformat():
2001
+ continue
2002
+
2003
+ if trace_session_id not in session_stats:
2004
+ session_stats[trace_session_id] = {
2005
+ "session_id": trace_session_id,
2006
+ "user_id": trace.get("user_id"),
2007
+ "agent_id": trace.get("agent_id"),
2008
+ "team_id": trace.get("team_id"),
2009
+ "workflow_id": trace.get("workflow_id"),
2010
+ "total_traces": 0,
2011
+ "first_trace_at": created_at,
2012
+ "last_trace_at": created_at,
2013
+ }
2014
+
2015
+ session_stats[trace_session_id]["total_traces"] += 1
2016
+ if created_at < session_stats[trace_session_id]["first_trace_at"]:
2017
+ session_stats[trace_session_id]["first_trace_at"] = created_at
2018
+ if created_at > session_stats[trace_session_id]["last_trace_at"]:
2019
+ session_stats[trace_session_id]["last_trace_at"] = created_at
2020
+
2021
+ # Convert to list and sort by last_trace_at descending
2022
+ stats_list = list(session_stats.values())
2023
+ stats_list.sort(key=lambda x: x.get("last_trace_at", ""), reverse=True)
2024
+
2025
+ total_count = len(stats_list)
2026
+
2027
+ # Apply pagination
2028
+ paginated_stats = apply_pagination(records=stats_list, limit=limit, page=page)
2029
+
2030
+ # Convert ISO strings to datetime objects
2031
+ for stat in paginated_stats:
2032
+ first_trace_at_str = stat["first_trace_at"]
2033
+ last_trace_at_str = stat["last_trace_at"]
2034
+ stat["first_trace_at"] = datetime.fromisoformat(first_trace_at_str.replace("Z", "+00:00"))
2035
+ stat["last_trace_at"] = datetime.fromisoformat(last_trace_at_str.replace("Z", "+00:00"))
2036
+
2037
+ return paginated_stats, total_count
2038
+
2039
+ except Exception as e:
2040
+ log_error(f"Error getting trace stats: {e}")
2041
+ return [], 0
2042
+
2043
+ # --- Spans ---
2044
+ def create_span(self, span: "Span") -> None:
2045
+ """Create a single span in the database.
2046
+
2047
+ Args:
2048
+ span: The Span object to store.
2049
+ """
2050
+ try:
2051
+ self._store_record(
2052
+ "spans",
2053
+ span.span_id,
2054
+ span.to_dict(),
2055
+ index_fields=["trace_id", "parent_span_id"],
2056
+ )
2057
+
2058
+ except Exception as e:
2059
+ log_error(f"Error creating span: {e}")
2060
+
2061
+ def create_spans(self, spans: List) -> None:
2062
+ """Create multiple spans in the database as a batch.
2063
+
2064
+ Args:
2065
+ spans: List of Span objects to store.
2066
+ """
2067
+ if not spans:
2068
+ return
2069
+
2070
+ try:
2071
+ for span in spans:
2072
+ self._store_record(
2073
+ "spans",
2074
+ span.span_id,
2075
+ span.to_dict(),
2076
+ index_fields=["trace_id", "parent_span_id"],
2077
+ )
2078
+
2079
+ except Exception as e:
2080
+ log_error(f"Error creating spans batch: {e}")
2081
+
2082
+ def get_span(self, span_id: str):
2083
+ """Get a single span by its span_id.
2084
+
2085
+ Args:
2086
+ span_id: The unique span identifier.
2087
+
2088
+ Returns:
2089
+ Optional[Span]: The span if found, None otherwise.
2090
+ """
2091
+ try:
2092
+ from agno.tracing.schemas import Span as SpanSchema
2093
+
2094
+ result = self._get_record("spans", span_id)
2095
+ if result:
2096
+ return SpanSchema.from_dict(result)
2097
+ return None
2098
+
2099
+ except Exception as e:
2100
+ log_error(f"Error getting span: {e}")
2101
+ return None
2102
+
2103
+ def get_spans(
2104
+ self,
2105
+ trace_id: Optional[str] = None,
2106
+ parent_span_id: Optional[str] = None,
2107
+ limit: Optional[int] = 1000,
2108
+ ) -> List:
2109
+ """Get spans matching the provided filters.
2110
+
2111
+ Args:
2112
+ trace_id: Filter by trace ID.
2113
+ parent_span_id: Filter by parent span ID.
2114
+ limit: Maximum number of spans to return.
2115
+
2116
+ Returns:
2117
+ List[Span]: List of matching spans.
2118
+ """
2119
+ try:
2120
+ from agno.tracing.schemas import Span as SpanSchema
2121
+
2122
+ all_spans = self._get_all_records("spans")
2123
+
2124
+ # Apply filters
2125
+ filtered_spans = []
2126
+ for span in all_spans:
2127
+ if trace_id and span.get("trace_id") != trace_id:
2128
+ continue
2129
+ if parent_span_id and span.get("parent_span_id") != parent_span_id:
2130
+ continue
2131
+ filtered_spans.append(span)
2132
+
2133
+ # Apply limit
2134
+ if limit:
2135
+ filtered_spans = filtered_spans[:limit]
2136
+
2137
+ return [SpanSchema.from_dict(s) for s in filtered_spans]
2138
+
2139
+ except Exception as e:
2140
+ log_error(f"Error getting spans: {e}")
2141
+ return []