agno 0.1.2__py3-none-any.whl → 2.3.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (723) hide show
  1. agno/__init__.py +8 -0
  2. agno/agent/__init__.py +44 -5
  3. agno/agent/agent.py +10531 -2975
  4. agno/api/agent.py +14 -53
  5. agno/api/api.py +7 -46
  6. agno/api/evals.py +22 -0
  7. agno/api/os.py +17 -0
  8. agno/api/routes.py +6 -25
  9. agno/api/schemas/__init__.py +9 -0
  10. agno/api/schemas/agent.py +6 -9
  11. agno/api/schemas/evals.py +16 -0
  12. agno/api/schemas/os.py +14 -0
  13. agno/api/schemas/team.py +10 -10
  14. agno/api/schemas/utils.py +21 -0
  15. agno/api/schemas/workflows.py +16 -0
  16. agno/api/settings.py +53 -0
  17. agno/api/team.py +22 -26
  18. agno/api/workflow.py +28 -0
  19. agno/cloud/aws/base.py +214 -0
  20. agno/cloud/aws/s3/__init__.py +2 -0
  21. agno/cloud/aws/s3/api_client.py +43 -0
  22. agno/cloud/aws/s3/bucket.py +195 -0
  23. agno/cloud/aws/s3/object.py +57 -0
  24. agno/compression/__init__.py +3 -0
  25. agno/compression/manager.py +247 -0
  26. agno/culture/__init__.py +3 -0
  27. agno/culture/manager.py +956 -0
  28. agno/db/__init__.py +24 -0
  29. agno/db/async_postgres/__init__.py +3 -0
  30. agno/db/base.py +946 -0
  31. agno/db/dynamo/__init__.py +3 -0
  32. agno/db/dynamo/dynamo.py +2781 -0
  33. agno/db/dynamo/schemas.py +442 -0
  34. agno/db/dynamo/utils.py +743 -0
  35. agno/db/firestore/__init__.py +3 -0
  36. agno/db/firestore/firestore.py +2379 -0
  37. agno/db/firestore/schemas.py +181 -0
  38. agno/db/firestore/utils.py +376 -0
  39. agno/db/gcs_json/__init__.py +3 -0
  40. agno/db/gcs_json/gcs_json_db.py +1791 -0
  41. agno/db/gcs_json/utils.py +228 -0
  42. agno/db/in_memory/__init__.py +3 -0
  43. agno/db/in_memory/in_memory_db.py +1312 -0
  44. agno/db/in_memory/utils.py +230 -0
  45. agno/db/json/__init__.py +3 -0
  46. agno/db/json/json_db.py +1777 -0
  47. agno/db/json/utils.py +230 -0
  48. agno/db/migrations/manager.py +199 -0
  49. agno/db/migrations/v1_to_v2.py +635 -0
  50. agno/db/migrations/versions/v2_3_0.py +938 -0
  51. agno/db/mongo/__init__.py +17 -0
  52. agno/db/mongo/async_mongo.py +2760 -0
  53. agno/db/mongo/mongo.py +2597 -0
  54. agno/db/mongo/schemas.py +119 -0
  55. agno/db/mongo/utils.py +276 -0
  56. agno/db/mysql/__init__.py +4 -0
  57. agno/db/mysql/async_mysql.py +2912 -0
  58. agno/db/mysql/mysql.py +2923 -0
  59. agno/db/mysql/schemas.py +186 -0
  60. agno/db/mysql/utils.py +488 -0
  61. agno/db/postgres/__init__.py +4 -0
  62. agno/db/postgres/async_postgres.py +2579 -0
  63. agno/db/postgres/postgres.py +2870 -0
  64. agno/db/postgres/schemas.py +187 -0
  65. agno/db/postgres/utils.py +442 -0
  66. agno/db/redis/__init__.py +3 -0
  67. agno/db/redis/redis.py +2141 -0
  68. agno/db/redis/schemas.py +159 -0
  69. agno/db/redis/utils.py +346 -0
  70. agno/db/schemas/__init__.py +4 -0
  71. agno/db/schemas/culture.py +120 -0
  72. agno/db/schemas/evals.py +34 -0
  73. agno/db/schemas/knowledge.py +40 -0
  74. agno/db/schemas/memory.py +61 -0
  75. agno/db/singlestore/__init__.py +3 -0
  76. agno/db/singlestore/schemas.py +179 -0
  77. agno/db/singlestore/singlestore.py +2877 -0
  78. agno/db/singlestore/utils.py +384 -0
  79. agno/db/sqlite/__init__.py +4 -0
  80. agno/db/sqlite/async_sqlite.py +2911 -0
  81. agno/db/sqlite/schemas.py +181 -0
  82. agno/db/sqlite/sqlite.py +2908 -0
  83. agno/db/sqlite/utils.py +429 -0
  84. agno/db/surrealdb/__init__.py +3 -0
  85. agno/db/surrealdb/metrics.py +292 -0
  86. agno/db/surrealdb/models.py +334 -0
  87. agno/db/surrealdb/queries.py +71 -0
  88. agno/db/surrealdb/surrealdb.py +1908 -0
  89. agno/db/surrealdb/utils.py +147 -0
  90. agno/db/utils.py +118 -0
  91. agno/eval/__init__.py +24 -0
  92. agno/eval/accuracy.py +666 -276
  93. agno/eval/agent_as_judge.py +861 -0
  94. agno/eval/base.py +29 -0
  95. agno/eval/performance.py +779 -0
  96. agno/eval/reliability.py +241 -62
  97. agno/eval/utils.py +120 -0
  98. agno/exceptions.py +143 -1
  99. agno/filters.py +354 -0
  100. agno/guardrails/__init__.py +6 -0
  101. agno/guardrails/base.py +19 -0
  102. agno/guardrails/openai.py +144 -0
  103. agno/guardrails/pii.py +94 -0
  104. agno/guardrails/prompt_injection.py +52 -0
  105. agno/hooks/__init__.py +3 -0
  106. agno/hooks/decorator.py +164 -0
  107. agno/integrations/discord/__init__.py +3 -0
  108. agno/integrations/discord/client.py +203 -0
  109. agno/knowledge/__init__.py +5 -1
  110. agno/{document → knowledge}/chunking/agentic.py +22 -14
  111. agno/{document → knowledge}/chunking/document.py +2 -2
  112. agno/{document → knowledge}/chunking/fixed.py +7 -6
  113. agno/knowledge/chunking/markdown.py +151 -0
  114. agno/{document → knowledge}/chunking/recursive.py +15 -3
  115. agno/knowledge/chunking/row.py +39 -0
  116. agno/knowledge/chunking/semantic.py +91 -0
  117. agno/knowledge/chunking/strategy.py +165 -0
  118. agno/knowledge/content.py +74 -0
  119. agno/knowledge/document/__init__.py +5 -0
  120. agno/{document → knowledge/document}/base.py +12 -2
  121. agno/knowledge/embedder/__init__.py +5 -0
  122. agno/knowledge/embedder/aws_bedrock.py +343 -0
  123. agno/knowledge/embedder/azure_openai.py +210 -0
  124. agno/{embedder → knowledge/embedder}/base.py +8 -0
  125. agno/knowledge/embedder/cohere.py +323 -0
  126. agno/knowledge/embedder/fastembed.py +62 -0
  127. agno/{embedder → knowledge/embedder}/fireworks.py +1 -1
  128. agno/knowledge/embedder/google.py +258 -0
  129. agno/knowledge/embedder/huggingface.py +94 -0
  130. agno/knowledge/embedder/jina.py +182 -0
  131. agno/knowledge/embedder/langdb.py +22 -0
  132. agno/knowledge/embedder/mistral.py +206 -0
  133. agno/knowledge/embedder/nebius.py +13 -0
  134. agno/knowledge/embedder/ollama.py +154 -0
  135. agno/knowledge/embedder/openai.py +195 -0
  136. agno/knowledge/embedder/sentence_transformer.py +63 -0
  137. agno/{embedder → knowledge/embedder}/together.py +1 -1
  138. agno/knowledge/embedder/vllm.py +262 -0
  139. agno/knowledge/embedder/voyageai.py +165 -0
  140. agno/knowledge/knowledge.py +3006 -0
  141. agno/knowledge/reader/__init__.py +7 -0
  142. agno/knowledge/reader/arxiv_reader.py +81 -0
  143. agno/knowledge/reader/base.py +95 -0
  144. agno/knowledge/reader/csv_reader.py +164 -0
  145. agno/knowledge/reader/docx_reader.py +82 -0
  146. agno/knowledge/reader/field_labeled_csv_reader.py +290 -0
  147. agno/knowledge/reader/firecrawl_reader.py +201 -0
  148. agno/knowledge/reader/json_reader.py +88 -0
  149. agno/knowledge/reader/markdown_reader.py +137 -0
  150. agno/knowledge/reader/pdf_reader.py +431 -0
  151. agno/knowledge/reader/pptx_reader.py +101 -0
  152. agno/knowledge/reader/reader_factory.py +313 -0
  153. agno/knowledge/reader/s3_reader.py +89 -0
  154. agno/knowledge/reader/tavily_reader.py +193 -0
  155. agno/knowledge/reader/text_reader.py +127 -0
  156. agno/knowledge/reader/web_search_reader.py +325 -0
  157. agno/knowledge/reader/website_reader.py +455 -0
  158. agno/knowledge/reader/wikipedia_reader.py +91 -0
  159. agno/knowledge/reader/youtube_reader.py +78 -0
  160. agno/knowledge/remote_content/remote_content.py +88 -0
  161. agno/knowledge/reranker/__init__.py +3 -0
  162. agno/{reranker → knowledge/reranker}/base.py +1 -1
  163. agno/{reranker → knowledge/reranker}/cohere.py +2 -2
  164. agno/knowledge/reranker/infinity.py +195 -0
  165. agno/knowledge/reranker/sentence_transformer.py +54 -0
  166. agno/knowledge/types.py +39 -0
  167. agno/knowledge/utils.py +234 -0
  168. agno/media.py +439 -95
  169. agno/memory/__init__.py +16 -3
  170. agno/memory/manager.py +1474 -123
  171. agno/memory/strategies/__init__.py +15 -0
  172. agno/memory/strategies/base.py +66 -0
  173. agno/memory/strategies/summarize.py +196 -0
  174. agno/memory/strategies/types.py +37 -0
  175. agno/models/aimlapi/__init__.py +5 -0
  176. agno/models/aimlapi/aimlapi.py +62 -0
  177. agno/models/anthropic/__init__.py +4 -0
  178. agno/models/anthropic/claude.py +960 -496
  179. agno/models/aws/__init__.py +15 -0
  180. agno/models/aws/bedrock.py +686 -451
  181. agno/models/aws/claude.py +190 -183
  182. agno/models/azure/__init__.py +18 -1
  183. agno/models/azure/ai_foundry.py +489 -0
  184. agno/models/azure/openai_chat.py +89 -40
  185. agno/models/base.py +2477 -550
  186. agno/models/cerebras/__init__.py +12 -0
  187. agno/models/cerebras/cerebras.py +565 -0
  188. agno/models/cerebras/cerebras_openai.py +131 -0
  189. agno/models/cohere/__init__.py +4 -0
  190. agno/models/cohere/chat.py +306 -492
  191. agno/models/cometapi/__init__.py +5 -0
  192. agno/models/cometapi/cometapi.py +74 -0
  193. agno/models/dashscope/__init__.py +5 -0
  194. agno/models/dashscope/dashscope.py +90 -0
  195. agno/models/deepinfra/__init__.py +5 -0
  196. agno/models/deepinfra/deepinfra.py +45 -0
  197. agno/models/deepseek/__init__.py +4 -0
  198. agno/models/deepseek/deepseek.py +110 -9
  199. agno/models/fireworks/__init__.py +4 -0
  200. agno/models/fireworks/fireworks.py +19 -22
  201. agno/models/google/__init__.py +3 -7
  202. agno/models/google/gemini.py +1717 -662
  203. agno/models/google/utils.py +22 -0
  204. agno/models/groq/__init__.py +4 -0
  205. agno/models/groq/groq.py +391 -666
  206. agno/models/huggingface/__init__.py +4 -0
  207. agno/models/huggingface/huggingface.py +266 -538
  208. agno/models/ibm/__init__.py +5 -0
  209. agno/models/ibm/watsonx.py +432 -0
  210. agno/models/internlm/__init__.py +3 -0
  211. agno/models/internlm/internlm.py +20 -3
  212. agno/models/langdb/__init__.py +1 -0
  213. agno/models/langdb/langdb.py +60 -0
  214. agno/models/litellm/__init__.py +14 -0
  215. agno/models/litellm/chat.py +503 -0
  216. agno/models/litellm/litellm_openai.py +42 -0
  217. agno/models/llama_cpp/__init__.py +5 -0
  218. agno/models/llama_cpp/llama_cpp.py +22 -0
  219. agno/models/lmstudio/__init__.py +5 -0
  220. agno/models/lmstudio/lmstudio.py +25 -0
  221. agno/models/message.py +361 -39
  222. agno/models/meta/__init__.py +12 -0
  223. agno/models/meta/llama.py +502 -0
  224. agno/models/meta/llama_openai.py +79 -0
  225. agno/models/metrics.py +120 -0
  226. agno/models/mistral/__init__.py +4 -0
  227. agno/models/mistral/mistral.py +293 -393
  228. agno/models/nebius/__init__.py +3 -0
  229. agno/models/nebius/nebius.py +53 -0
  230. agno/models/nexus/__init__.py +3 -0
  231. agno/models/nexus/nexus.py +22 -0
  232. agno/models/nvidia/__init__.py +4 -0
  233. agno/models/nvidia/nvidia.py +22 -3
  234. agno/models/ollama/__init__.py +4 -2
  235. agno/models/ollama/chat.py +257 -492
  236. agno/models/openai/__init__.py +7 -0
  237. agno/models/openai/chat.py +725 -770
  238. agno/models/openai/like.py +16 -2
  239. agno/models/openai/responses.py +1121 -0
  240. agno/models/openrouter/__init__.py +4 -0
  241. agno/models/openrouter/openrouter.py +62 -5
  242. agno/models/perplexity/__init__.py +5 -0
  243. agno/models/perplexity/perplexity.py +203 -0
  244. agno/models/portkey/__init__.py +3 -0
  245. agno/models/portkey/portkey.py +82 -0
  246. agno/models/requesty/__init__.py +5 -0
  247. agno/models/requesty/requesty.py +69 -0
  248. agno/models/response.py +177 -7
  249. agno/models/sambanova/__init__.py +4 -0
  250. agno/models/sambanova/sambanova.py +23 -4
  251. agno/models/siliconflow/__init__.py +5 -0
  252. agno/models/siliconflow/siliconflow.py +42 -0
  253. agno/models/together/__init__.py +4 -0
  254. agno/models/together/together.py +21 -164
  255. agno/models/utils.py +266 -0
  256. agno/models/vercel/__init__.py +3 -0
  257. agno/models/vercel/v0.py +43 -0
  258. agno/models/vertexai/__init__.py +0 -1
  259. agno/models/vertexai/claude.py +190 -0
  260. agno/models/vllm/__init__.py +3 -0
  261. agno/models/vllm/vllm.py +83 -0
  262. agno/models/xai/__init__.py +2 -0
  263. agno/models/xai/xai.py +111 -7
  264. agno/os/__init__.py +3 -0
  265. agno/os/app.py +1027 -0
  266. agno/os/auth.py +244 -0
  267. agno/os/config.py +126 -0
  268. agno/os/interfaces/__init__.py +1 -0
  269. agno/os/interfaces/a2a/__init__.py +3 -0
  270. agno/os/interfaces/a2a/a2a.py +42 -0
  271. agno/os/interfaces/a2a/router.py +249 -0
  272. agno/os/interfaces/a2a/utils.py +924 -0
  273. agno/os/interfaces/agui/__init__.py +3 -0
  274. agno/os/interfaces/agui/agui.py +47 -0
  275. agno/os/interfaces/agui/router.py +147 -0
  276. agno/os/interfaces/agui/utils.py +574 -0
  277. agno/os/interfaces/base.py +25 -0
  278. agno/os/interfaces/slack/__init__.py +3 -0
  279. agno/os/interfaces/slack/router.py +148 -0
  280. agno/os/interfaces/slack/security.py +30 -0
  281. agno/os/interfaces/slack/slack.py +47 -0
  282. agno/os/interfaces/whatsapp/__init__.py +3 -0
  283. agno/os/interfaces/whatsapp/router.py +210 -0
  284. agno/os/interfaces/whatsapp/security.py +55 -0
  285. agno/os/interfaces/whatsapp/whatsapp.py +36 -0
  286. agno/os/mcp.py +293 -0
  287. agno/os/middleware/__init__.py +9 -0
  288. agno/os/middleware/jwt.py +797 -0
  289. agno/os/router.py +258 -0
  290. agno/os/routers/__init__.py +3 -0
  291. agno/os/routers/agents/__init__.py +3 -0
  292. agno/os/routers/agents/router.py +599 -0
  293. agno/os/routers/agents/schema.py +261 -0
  294. agno/os/routers/evals/__init__.py +3 -0
  295. agno/os/routers/evals/evals.py +450 -0
  296. agno/os/routers/evals/schemas.py +174 -0
  297. agno/os/routers/evals/utils.py +231 -0
  298. agno/os/routers/health.py +31 -0
  299. agno/os/routers/home.py +52 -0
  300. agno/os/routers/knowledge/__init__.py +3 -0
  301. agno/os/routers/knowledge/knowledge.py +1008 -0
  302. agno/os/routers/knowledge/schemas.py +178 -0
  303. agno/os/routers/memory/__init__.py +3 -0
  304. agno/os/routers/memory/memory.py +661 -0
  305. agno/os/routers/memory/schemas.py +88 -0
  306. agno/os/routers/metrics/__init__.py +3 -0
  307. agno/os/routers/metrics/metrics.py +190 -0
  308. agno/os/routers/metrics/schemas.py +47 -0
  309. agno/os/routers/session/__init__.py +3 -0
  310. agno/os/routers/session/session.py +997 -0
  311. agno/os/routers/teams/__init__.py +3 -0
  312. agno/os/routers/teams/router.py +512 -0
  313. agno/os/routers/teams/schema.py +257 -0
  314. agno/os/routers/traces/__init__.py +3 -0
  315. agno/os/routers/traces/schemas.py +414 -0
  316. agno/os/routers/traces/traces.py +499 -0
  317. agno/os/routers/workflows/__init__.py +3 -0
  318. agno/os/routers/workflows/router.py +624 -0
  319. agno/os/routers/workflows/schema.py +75 -0
  320. agno/os/schema.py +534 -0
  321. agno/os/scopes.py +469 -0
  322. agno/{playground → os}/settings.py +7 -15
  323. agno/os/utils.py +973 -0
  324. agno/reasoning/anthropic.py +80 -0
  325. agno/reasoning/azure_ai_foundry.py +67 -0
  326. agno/reasoning/deepseek.py +63 -0
  327. agno/reasoning/default.py +97 -0
  328. agno/reasoning/gemini.py +73 -0
  329. agno/reasoning/groq.py +71 -0
  330. agno/reasoning/helpers.py +24 -1
  331. agno/reasoning/ollama.py +67 -0
  332. agno/reasoning/openai.py +86 -0
  333. agno/reasoning/step.py +2 -1
  334. agno/reasoning/vertexai.py +76 -0
  335. agno/run/__init__.py +6 -0
  336. agno/run/agent.py +822 -0
  337. agno/run/base.py +247 -0
  338. agno/run/cancel.py +81 -0
  339. agno/run/requirement.py +181 -0
  340. agno/run/team.py +767 -0
  341. agno/run/workflow.py +708 -0
  342. agno/session/__init__.py +10 -0
  343. agno/session/agent.py +260 -0
  344. agno/session/summary.py +265 -0
  345. agno/session/team.py +342 -0
  346. agno/session/workflow.py +501 -0
  347. agno/table.py +10 -0
  348. agno/team/__init__.py +37 -0
  349. agno/team/team.py +9536 -0
  350. agno/tools/__init__.py +7 -0
  351. agno/tools/agentql.py +120 -0
  352. agno/tools/airflow.py +22 -12
  353. agno/tools/api.py +122 -0
  354. agno/tools/apify.py +276 -83
  355. agno/tools/{arxiv_toolkit.py → arxiv.py} +20 -12
  356. agno/tools/aws_lambda.py +28 -7
  357. agno/tools/aws_ses.py +66 -0
  358. agno/tools/baidusearch.py +11 -4
  359. agno/tools/bitbucket.py +292 -0
  360. agno/tools/brandfetch.py +213 -0
  361. agno/tools/bravesearch.py +106 -0
  362. agno/tools/brightdata.py +367 -0
  363. agno/tools/browserbase.py +209 -0
  364. agno/tools/calcom.py +32 -23
  365. agno/tools/calculator.py +24 -37
  366. agno/tools/cartesia.py +187 -0
  367. agno/tools/{clickup_tool.py → clickup.py} +17 -28
  368. agno/tools/confluence.py +91 -26
  369. agno/tools/crawl4ai.py +139 -43
  370. agno/tools/csv_toolkit.py +28 -22
  371. agno/tools/dalle.py +36 -22
  372. agno/tools/daytona.py +475 -0
  373. agno/tools/decorator.py +169 -14
  374. agno/tools/desi_vocal.py +23 -11
  375. agno/tools/discord.py +32 -29
  376. agno/tools/docker.py +716 -0
  377. agno/tools/duckdb.py +76 -81
  378. agno/tools/duckduckgo.py +43 -40
  379. agno/tools/e2b.py +703 -0
  380. agno/tools/eleven_labs.py +65 -54
  381. agno/tools/email.py +13 -5
  382. agno/tools/evm.py +129 -0
  383. agno/tools/exa.py +324 -42
  384. agno/tools/fal.py +39 -35
  385. agno/tools/file.py +196 -30
  386. agno/tools/file_generation.py +356 -0
  387. agno/tools/financial_datasets.py +288 -0
  388. agno/tools/firecrawl.py +108 -33
  389. agno/tools/function.py +960 -122
  390. agno/tools/giphy.py +34 -12
  391. agno/tools/github.py +1294 -97
  392. agno/tools/gmail.py +922 -0
  393. agno/tools/google_bigquery.py +117 -0
  394. agno/tools/google_drive.py +271 -0
  395. agno/tools/google_maps.py +253 -0
  396. agno/tools/googlecalendar.py +607 -107
  397. agno/tools/googlesheets.py +377 -0
  398. agno/tools/hackernews.py +20 -12
  399. agno/tools/jina.py +24 -14
  400. agno/tools/jira.py +48 -19
  401. agno/tools/knowledge.py +218 -0
  402. agno/tools/linear.py +82 -43
  403. agno/tools/linkup.py +58 -0
  404. agno/tools/local_file_system.py +15 -7
  405. agno/tools/lumalab.py +41 -26
  406. agno/tools/mcp/__init__.py +10 -0
  407. agno/tools/mcp/mcp.py +331 -0
  408. agno/tools/mcp/multi_mcp.py +347 -0
  409. agno/tools/mcp/params.py +24 -0
  410. agno/tools/mcp_toolbox.py +284 -0
  411. agno/tools/mem0.py +193 -0
  412. agno/tools/memory.py +419 -0
  413. agno/tools/mlx_transcribe.py +11 -9
  414. agno/tools/models/azure_openai.py +190 -0
  415. agno/tools/models/gemini.py +203 -0
  416. agno/tools/models/groq.py +158 -0
  417. agno/tools/models/morph.py +186 -0
  418. agno/tools/models/nebius.py +124 -0
  419. agno/tools/models_labs.py +163 -82
  420. agno/tools/moviepy_video.py +18 -13
  421. agno/tools/nano_banana.py +151 -0
  422. agno/tools/neo4j.py +134 -0
  423. agno/tools/newspaper.py +15 -4
  424. agno/tools/newspaper4k.py +19 -6
  425. agno/tools/notion.py +204 -0
  426. agno/tools/openai.py +181 -17
  427. agno/tools/openbb.py +27 -20
  428. agno/tools/opencv.py +321 -0
  429. agno/tools/openweather.py +233 -0
  430. agno/tools/oxylabs.py +385 -0
  431. agno/tools/pandas.py +25 -15
  432. agno/tools/parallel.py +314 -0
  433. agno/tools/postgres.py +238 -185
  434. agno/tools/pubmed.py +125 -13
  435. agno/tools/python.py +48 -35
  436. agno/tools/reasoning.py +283 -0
  437. agno/tools/reddit.py +207 -29
  438. agno/tools/redshift.py +406 -0
  439. agno/tools/replicate.py +69 -26
  440. agno/tools/resend.py +11 -6
  441. agno/tools/scrapegraph.py +179 -19
  442. agno/tools/searxng.py +23 -31
  443. agno/tools/serpapi.py +15 -10
  444. agno/tools/serper.py +255 -0
  445. agno/tools/shell.py +23 -12
  446. agno/tools/shopify.py +1519 -0
  447. agno/tools/slack.py +56 -14
  448. agno/tools/sleep.py +8 -6
  449. agno/tools/spider.py +35 -11
  450. agno/tools/spotify.py +919 -0
  451. agno/tools/sql.py +34 -19
  452. agno/tools/tavily.py +158 -8
  453. agno/tools/telegram.py +18 -8
  454. agno/tools/todoist.py +218 -0
  455. agno/tools/toolkit.py +134 -9
  456. agno/tools/trafilatura.py +388 -0
  457. agno/tools/trello.py +25 -28
  458. agno/tools/twilio.py +18 -9
  459. agno/tools/user_control_flow.py +78 -0
  460. agno/tools/valyu.py +228 -0
  461. agno/tools/visualization.py +467 -0
  462. agno/tools/webbrowser.py +28 -0
  463. agno/tools/webex.py +76 -0
  464. agno/tools/website.py +23 -19
  465. agno/tools/webtools.py +45 -0
  466. agno/tools/whatsapp.py +286 -0
  467. agno/tools/wikipedia.py +28 -19
  468. agno/tools/workflow.py +285 -0
  469. agno/tools/{twitter.py → x.py} +142 -46
  470. agno/tools/yfinance.py +41 -39
  471. agno/tools/youtube.py +34 -17
  472. agno/tools/zendesk.py +15 -5
  473. agno/tools/zep.py +454 -0
  474. agno/tools/zoom.py +86 -37
  475. agno/tracing/__init__.py +12 -0
  476. agno/tracing/exporter.py +157 -0
  477. agno/tracing/schemas.py +276 -0
  478. agno/tracing/setup.py +111 -0
  479. agno/utils/agent.py +938 -0
  480. agno/utils/audio.py +37 -1
  481. agno/utils/certs.py +27 -0
  482. agno/utils/code_execution.py +11 -0
  483. agno/utils/common.py +103 -20
  484. agno/utils/cryptography.py +22 -0
  485. agno/utils/dttm.py +33 -0
  486. agno/utils/events.py +700 -0
  487. agno/utils/functions.py +107 -37
  488. agno/utils/gemini.py +426 -0
  489. agno/utils/hooks.py +171 -0
  490. agno/utils/http.py +185 -0
  491. agno/utils/json_schema.py +159 -37
  492. agno/utils/knowledge.py +36 -0
  493. agno/utils/location.py +19 -0
  494. agno/utils/log.py +221 -8
  495. agno/utils/mcp.py +214 -0
  496. agno/utils/media.py +335 -14
  497. agno/utils/merge_dict.py +22 -1
  498. agno/utils/message.py +77 -2
  499. agno/utils/models/ai_foundry.py +50 -0
  500. agno/utils/models/claude.py +373 -0
  501. agno/utils/models/cohere.py +94 -0
  502. agno/utils/models/llama.py +85 -0
  503. agno/utils/models/mistral.py +100 -0
  504. agno/utils/models/openai_responses.py +140 -0
  505. agno/utils/models/schema_utils.py +153 -0
  506. agno/utils/models/watsonx.py +41 -0
  507. agno/utils/openai.py +257 -0
  508. agno/utils/pickle.py +1 -1
  509. agno/utils/pprint.py +124 -8
  510. agno/utils/print_response/agent.py +930 -0
  511. agno/utils/print_response/team.py +1914 -0
  512. agno/utils/print_response/workflow.py +1668 -0
  513. agno/utils/prompts.py +111 -0
  514. agno/utils/reasoning.py +108 -0
  515. agno/utils/response.py +163 -0
  516. agno/utils/serialize.py +32 -0
  517. agno/utils/shell.py +4 -4
  518. agno/utils/streamlit.py +487 -0
  519. agno/utils/string.py +204 -51
  520. agno/utils/team.py +139 -0
  521. agno/utils/timer.py +9 -2
  522. agno/utils/tokens.py +657 -0
  523. agno/utils/tools.py +19 -1
  524. agno/utils/whatsapp.py +305 -0
  525. agno/utils/yaml_io.py +3 -3
  526. agno/vectordb/__init__.py +2 -0
  527. agno/vectordb/base.py +87 -9
  528. agno/vectordb/cassandra/__init__.py +5 -1
  529. agno/vectordb/cassandra/cassandra.py +383 -27
  530. agno/vectordb/chroma/__init__.py +4 -0
  531. agno/vectordb/chroma/chromadb.py +748 -83
  532. agno/vectordb/clickhouse/__init__.py +7 -1
  533. agno/vectordb/clickhouse/clickhousedb.py +554 -53
  534. agno/vectordb/couchbase/__init__.py +3 -0
  535. agno/vectordb/couchbase/couchbase.py +1446 -0
  536. agno/vectordb/lancedb/__init__.py +5 -0
  537. agno/vectordb/lancedb/lance_db.py +730 -98
  538. agno/vectordb/langchaindb/__init__.py +5 -0
  539. agno/vectordb/langchaindb/langchaindb.py +163 -0
  540. agno/vectordb/lightrag/__init__.py +5 -0
  541. agno/vectordb/lightrag/lightrag.py +388 -0
  542. agno/vectordb/llamaindex/__init__.py +3 -0
  543. agno/vectordb/llamaindex/llamaindexdb.py +166 -0
  544. agno/vectordb/milvus/__init__.py +3 -0
  545. agno/vectordb/milvus/milvus.py +966 -78
  546. agno/vectordb/mongodb/__init__.py +9 -1
  547. agno/vectordb/mongodb/mongodb.py +1175 -172
  548. agno/vectordb/pgvector/__init__.py +8 -0
  549. agno/vectordb/pgvector/pgvector.py +599 -115
  550. agno/vectordb/pineconedb/__init__.py +5 -1
  551. agno/vectordb/pineconedb/pineconedb.py +406 -43
  552. agno/vectordb/qdrant/__init__.py +4 -0
  553. agno/vectordb/qdrant/qdrant.py +914 -61
  554. agno/vectordb/redis/__init__.py +9 -0
  555. agno/vectordb/redis/redisdb.py +682 -0
  556. agno/vectordb/singlestore/__init__.py +8 -1
  557. agno/vectordb/singlestore/singlestore.py +771 -0
  558. agno/vectordb/surrealdb/__init__.py +3 -0
  559. agno/vectordb/surrealdb/surrealdb.py +663 -0
  560. agno/vectordb/upstashdb/__init__.py +5 -0
  561. agno/vectordb/upstashdb/upstashdb.py +718 -0
  562. agno/vectordb/weaviate/__init__.py +8 -0
  563. agno/vectordb/weaviate/index.py +15 -0
  564. agno/vectordb/weaviate/weaviate.py +1009 -0
  565. agno/workflow/__init__.py +23 -1
  566. agno/workflow/agent.py +299 -0
  567. agno/workflow/condition.py +759 -0
  568. agno/workflow/loop.py +756 -0
  569. agno/workflow/parallel.py +853 -0
  570. agno/workflow/router.py +723 -0
  571. agno/workflow/step.py +1564 -0
  572. agno/workflow/steps.py +613 -0
  573. agno/workflow/types.py +556 -0
  574. agno/workflow/workflow.py +4327 -514
  575. agno-2.3.13.dist-info/METADATA +639 -0
  576. agno-2.3.13.dist-info/RECORD +613 -0
  577. {agno-0.1.2.dist-info → agno-2.3.13.dist-info}/WHEEL +1 -1
  578. agno-2.3.13.dist-info/licenses/LICENSE +201 -0
  579. agno/api/playground.py +0 -91
  580. agno/api/schemas/playground.py +0 -22
  581. agno/api/schemas/user.py +0 -22
  582. agno/api/schemas/workspace.py +0 -46
  583. agno/api/user.py +0 -160
  584. agno/api/workspace.py +0 -151
  585. agno/cli/auth_server.py +0 -118
  586. agno/cli/config.py +0 -275
  587. agno/cli/console.py +0 -88
  588. agno/cli/credentials.py +0 -23
  589. agno/cli/entrypoint.py +0 -571
  590. agno/cli/operator.py +0 -355
  591. agno/cli/settings.py +0 -85
  592. agno/cli/ws/ws_cli.py +0 -817
  593. agno/constants.py +0 -13
  594. agno/document/__init__.py +0 -1
  595. agno/document/chunking/semantic.py +0 -47
  596. agno/document/chunking/strategy.py +0 -31
  597. agno/document/reader/__init__.py +0 -1
  598. agno/document/reader/arxiv_reader.py +0 -41
  599. agno/document/reader/base.py +0 -22
  600. agno/document/reader/csv_reader.py +0 -84
  601. agno/document/reader/docx_reader.py +0 -46
  602. agno/document/reader/firecrawl_reader.py +0 -99
  603. agno/document/reader/json_reader.py +0 -43
  604. agno/document/reader/pdf_reader.py +0 -219
  605. agno/document/reader/s3/pdf_reader.py +0 -46
  606. agno/document/reader/s3/text_reader.py +0 -51
  607. agno/document/reader/text_reader.py +0 -41
  608. agno/document/reader/website_reader.py +0 -175
  609. agno/document/reader/youtube_reader.py +0 -50
  610. agno/embedder/__init__.py +0 -1
  611. agno/embedder/azure_openai.py +0 -86
  612. agno/embedder/cohere.py +0 -72
  613. agno/embedder/fastembed.py +0 -37
  614. agno/embedder/google.py +0 -73
  615. agno/embedder/huggingface.py +0 -54
  616. agno/embedder/mistral.py +0 -80
  617. agno/embedder/ollama.py +0 -57
  618. agno/embedder/openai.py +0 -74
  619. agno/embedder/sentence_transformer.py +0 -38
  620. agno/embedder/voyageai.py +0 -64
  621. agno/eval/perf.py +0 -201
  622. agno/file/__init__.py +0 -1
  623. agno/file/file.py +0 -16
  624. agno/file/local/csv.py +0 -32
  625. agno/file/local/txt.py +0 -19
  626. agno/infra/app.py +0 -240
  627. agno/infra/base.py +0 -144
  628. agno/infra/context.py +0 -20
  629. agno/infra/db_app.py +0 -52
  630. agno/infra/resource.py +0 -205
  631. agno/infra/resources.py +0 -55
  632. agno/knowledge/agent.py +0 -230
  633. agno/knowledge/arxiv.py +0 -22
  634. agno/knowledge/combined.py +0 -22
  635. agno/knowledge/csv.py +0 -28
  636. agno/knowledge/csv_url.py +0 -19
  637. agno/knowledge/document.py +0 -20
  638. agno/knowledge/docx.py +0 -30
  639. agno/knowledge/json.py +0 -28
  640. agno/knowledge/langchain.py +0 -71
  641. agno/knowledge/llamaindex.py +0 -66
  642. agno/knowledge/pdf.py +0 -28
  643. agno/knowledge/pdf_url.py +0 -26
  644. agno/knowledge/s3/base.py +0 -60
  645. agno/knowledge/s3/pdf.py +0 -21
  646. agno/knowledge/s3/text.py +0 -23
  647. agno/knowledge/text.py +0 -30
  648. agno/knowledge/website.py +0 -88
  649. agno/knowledge/wikipedia.py +0 -31
  650. agno/knowledge/youtube.py +0 -22
  651. agno/memory/agent.py +0 -392
  652. agno/memory/classifier.py +0 -104
  653. agno/memory/db/__init__.py +0 -1
  654. agno/memory/db/base.py +0 -42
  655. agno/memory/db/mongodb.py +0 -189
  656. agno/memory/db/postgres.py +0 -203
  657. agno/memory/db/sqlite.py +0 -193
  658. agno/memory/memory.py +0 -15
  659. agno/memory/row.py +0 -36
  660. agno/memory/summarizer.py +0 -192
  661. agno/memory/summary.py +0 -19
  662. agno/memory/workflow.py +0 -38
  663. agno/models/google/gemini_openai.py +0 -26
  664. agno/models/ollama/hermes.py +0 -221
  665. agno/models/ollama/tools.py +0 -362
  666. agno/models/vertexai/gemini.py +0 -595
  667. agno/playground/__init__.py +0 -3
  668. agno/playground/async_router.py +0 -421
  669. agno/playground/deploy.py +0 -249
  670. agno/playground/operator.py +0 -92
  671. agno/playground/playground.py +0 -91
  672. agno/playground/schemas.py +0 -76
  673. agno/playground/serve.py +0 -55
  674. agno/playground/sync_router.py +0 -405
  675. agno/reasoning/agent.py +0 -68
  676. agno/run/response.py +0 -112
  677. agno/storage/agent/__init__.py +0 -0
  678. agno/storage/agent/base.py +0 -38
  679. agno/storage/agent/dynamodb.py +0 -350
  680. agno/storage/agent/json.py +0 -92
  681. agno/storage/agent/mongodb.py +0 -228
  682. agno/storage/agent/postgres.py +0 -367
  683. agno/storage/agent/session.py +0 -79
  684. agno/storage/agent/singlestore.py +0 -303
  685. agno/storage/agent/sqlite.py +0 -357
  686. agno/storage/agent/yaml.py +0 -93
  687. agno/storage/workflow/__init__.py +0 -0
  688. agno/storage/workflow/base.py +0 -40
  689. agno/storage/workflow/mongodb.py +0 -233
  690. agno/storage/workflow/postgres.py +0 -366
  691. agno/storage/workflow/session.py +0 -60
  692. agno/storage/workflow/sqlite.py +0 -359
  693. agno/tools/googlesearch.py +0 -88
  694. agno/utils/defaults.py +0 -57
  695. agno/utils/filesystem.py +0 -39
  696. agno/utils/git.py +0 -52
  697. agno/utils/json_io.py +0 -30
  698. agno/utils/load_env.py +0 -19
  699. agno/utils/py_io.py +0 -19
  700. agno/utils/pyproject.py +0 -18
  701. agno/utils/resource_filter.py +0 -31
  702. agno/vectordb/singlestore/s2vectordb.py +0 -390
  703. agno/vectordb/singlestore/s2vectordb2.py +0 -355
  704. agno/workspace/__init__.py +0 -0
  705. agno/workspace/config.py +0 -325
  706. agno/workspace/enums.py +0 -6
  707. agno/workspace/helpers.py +0 -48
  708. agno/workspace/operator.py +0 -758
  709. agno/workspace/settings.py +0 -63
  710. agno-0.1.2.dist-info/LICENSE +0 -375
  711. agno-0.1.2.dist-info/METADATA +0 -502
  712. agno-0.1.2.dist-info/RECORD +0 -352
  713. agno-0.1.2.dist-info/entry_points.txt +0 -3
  714. /agno/{cli → db/migrations}/__init__.py +0 -0
  715. /agno/{cli/ws → db/migrations/versions}/__init__.py +0 -0
  716. /agno/{document/chunking/__init__.py → db/schemas/metrics.py} +0 -0
  717. /agno/{document/reader/s3 → integrations}/__init__.py +0 -0
  718. /agno/{file/local → knowledge/chunking}/__init__.py +0 -0
  719. /agno/{infra → knowledge/remote_content}/__init__.py +0 -0
  720. /agno/{knowledge/s3 → tools/models}/__init__.py +0 -0
  721. /agno/{reranker → utils/models}/__init__.py +0 -0
  722. /agno/{storage → utils/print_response}/__init__.py +0 -0
  723. {agno-0.1.2.dist-info → agno-2.3.13.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,2781 @@
1
+ import json
2
+ import time
3
+ from datetime import date, datetime, timedelta, timezone
4
+ from os import getenv
5
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
6
+
7
+ if TYPE_CHECKING:
8
+ from agno.tracing.schemas import Span, Trace
9
+
10
+ from agno.db.base import BaseDb, SessionType
11
+ from agno.db.dynamo.schemas import get_table_schema_definition
12
+ from agno.db.dynamo.utils import (
13
+ apply_pagination,
14
+ apply_sorting,
15
+ build_query_filter_expression,
16
+ build_topic_filter_expression,
17
+ calculate_date_metrics,
18
+ create_table_if_not_exists,
19
+ deserialize_cultural_knowledge_from_db,
20
+ deserialize_eval_record,
21
+ deserialize_from_dynamodb_item,
22
+ deserialize_knowledge_row,
23
+ deserialize_session,
24
+ deserialize_session_result,
25
+ execute_query_with_pagination,
26
+ fetch_all_sessions_data,
27
+ get_dates_to_calculate_metrics_for,
28
+ merge_with_existing_session,
29
+ prepare_session_data,
30
+ serialize_cultural_knowledge_for_db,
31
+ serialize_eval_record,
32
+ serialize_knowledge_row,
33
+ serialize_to_dynamo_item,
34
+ )
35
+ from agno.db.schemas.culture import CulturalKnowledge
36
+ from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
37
+ from agno.db.schemas.knowledge import KnowledgeRow
38
+ from agno.db.schemas.memory import UserMemory
39
+ from agno.session import AgentSession, Session, TeamSession, WorkflowSession
40
+ from agno.utils.log import log_debug, log_error, log_info
41
+ from agno.utils.string import generate_id
42
+
43
+ try:
44
+ import boto3 # type: ignore[import-untyped]
45
+ except ImportError:
46
+ raise ImportError("`boto3` not installed. Please install it using `pip install boto3`")
47
+
48
+
49
+ # DynamoDB batch_write_item has a hard limit of 25 items per request
50
+ DYNAMO_BATCH_SIZE_LIMIT = 25
51
+
52
+
53
+ class DynamoDb(BaseDb):
54
+ def __init__(
55
+ self,
56
+ db_client=None,
57
+ region_name: Optional[str] = None,
58
+ aws_access_key_id: Optional[str] = None,
59
+ aws_secret_access_key: Optional[str] = None,
60
+ session_table: Optional[str] = None,
61
+ culture_table: Optional[str] = None,
62
+ memory_table: Optional[str] = None,
63
+ metrics_table: Optional[str] = None,
64
+ eval_table: Optional[str] = None,
65
+ knowledge_table: Optional[str] = None,
66
+ traces_table: Optional[str] = None,
67
+ spans_table: Optional[str] = None,
68
+ id: Optional[str] = None,
69
+ ):
70
+ """
71
+ Interface for interacting with a DynamoDB database.
72
+
73
+ Args:
74
+ db_client: The DynamoDB client to use.
75
+ region_name: AWS region name.
76
+ aws_access_key_id: AWS access key ID.
77
+ aws_secret_access_key: AWS secret access key.
78
+ session_table: The name of the session table.
79
+ culture_table: The name of the culture table.
80
+ memory_table: The name of the memory table.
81
+ metrics_table: The name of the metrics table.
82
+ eval_table: The name of the eval table.
83
+ knowledge_table: The name of the knowledge table.
84
+ traces_table: The name of the traces table.
85
+ spans_table: The name of the spans table.
86
+ id: ID of the database.
87
+ """
88
+ if id is None:
89
+ seed = str(db_client) if db_client else f"{region_name}_{aws_access_key_id}"
90
+ id = generate_id(seed)
91
+
92
+ super().__init__(
93
+ id=id,
94
+ session_table=session_table,
95
+ culture_table=culture_table,
96
+ memory_table=memory_table,
97
+ metrics_table=metrics_table,
98
+ eval_table=eval_table,
99
+ knowledge_table=knowledge_table,
100
+ traces_table=traces_table,
101
+ spans_table=spans_table,
102
+ )
103
+
104
+ if db_client is not None:
105
+ self.client = db_client
106
+ else:
107
+ if not region_name and not getenv("AWS_REGION"):
108
+ raise ValueError("AWS_REGION is not set. Please set the AWS_REGION environment variable.")
109
+ if not aws_access_key_id and not getenv("AWS_ACCESS_KEY_ID"):
110
+ raise ValueError("AWS_ACCESS_KEY_ID is not set. Please set the AWS_ACCESS_KEY_ID environment variable.")
111
+ if not aws_secret_access_key and not getenv("AWS_SECRET_ACCESS_KEY"):
112
+ raise ValueError(
113
+ "AWS_SECRET_ACCESS_KEY is not set. Please set the AWS_SECRET_ACCESS_KEY environment variable."
114
+ )
115
+
116
+ session_kwargs = {}
117
+ session_kwargs["region_name"] = region_name or getenv("AWS_REGION")
118
+ session_kwargs["aws_access_key_id"] = aws_access_key_id or getenv("AWS_ACCESS_KEY_ID")
119
+ session_kwargs["aws_secret_access_key"] = aws_secret_access_key or getenv("AWS_SECRET_ACCESS_KEY")
120
+
121
+ session = boto3.Session(**session_kwargs)
122
+ self.client = session.client("dynamodb")
123
+
124
+ def table_exists(self, table_name: str) -> bool:
125
+ """Check if a DynamoDB table exists.
126
+
127
+ Args:
128
+ table_name: The name of the table to check
129
+
130
+ Returns:
131
+ bool: True if the table exists, False otherwise
132
+ """
133
+ try:
134
+ self.client.describe_table(TableName=table_name)
135
+ return True
136
+ except self.client.exceptions.ResourceNotFoundException:
137
+ return False
138
+
139
+ def _create_all_tables(self):
140
+ """Create all configured DynamoDB tables if they don't exist."""
141
+ tables_to_create = [
142
+ ("sessions", self.session_table_name),
143
+ ("memories", self.memory_table_name),
144
+ ("metrics", self.metrics_table_name),
145
+ ("evals", self.eval_table_name),
146
+ ("knowledge", self.knowledge_table_name),
147
+ ("culture", self.culture_table_name),
148
+ ]
149
+
150
+ for table_type, table_name in tables_to_create:
151
+ if not self.table_exists(table_name):
152
+ schema = get_table_schema_definition(table_type)
153
+ schema["TableName"] = table_name
154
+ create_table_if_not_exists(self.client, table_name, schema)
155
+
156
+ def _get_table(self, table_type: str, create_table_if_not_found: Optional[bool] = True) -> Optional[str]:
157
+ """
158
+ Get table name and ensure the table exists, creating it if needed.
159
+
160
+ Args:
161
+ table_type: Type of table ("sessions", "memories", "metrics", "evals", "knowledge", "culture", "traces", "spans")
162
+
163
+ Returns:
164
+ str: The table name
165
+
166
+ Raises:
167
+ ValueError: If table name is not configured or table type is unknown
168
+ """
169
+ table_name = None
170
+
171
+ if table_type == "sessions":
172
+ table_name = self.session_table_name
173
+ elif table_type == "memories":
174
+ table_name = self.memory_table_name
175
+ elif table_type == "metrics":
176
+ table_name = self.metrics_table_name
177
+ elif table_type == "evals":
178
+ table_name = self.eval_table_name
179
+ elif table_type == "knowledge":
180
+ table_name = self.knowledge_table_name
181
+ elif table_type == "culture":
182
+ table_name = self.culture_table_name
183
+ elif table_type == "traces":
184
+ table_name = self.trace_table_name
185
+ elif table_type == "spans":
186
+ # Ensure traces table exists first (spans reference traces)
187
+ self._get_table("traces", create_table_if_not_found=True)
188
+ table_name = self.span_table_name
189
+ else:
190
+ raise ValueError(f"Unknown table type: {table_type}")
191
+
192
+ # Check if table exists, create if it doesn't
193
+ if not self.table_exists(table_name) and create_table_if_not_found:
194
+ schema = get_table_schema_definition(table_type)
195
+ schema["TableName"] = table_name
196
+ create_table_if_not_exists(self.client, table_name, schema)
197
+
198
+ return table_name
199
+
200
+ def get_latest_schema_version(self):
201
+ """Get the latest version of the database schema."""
202
+ pass
203
+
204
+ def upsert_schema_version(self, version: str) -> None:
205
+ """Upsert the schema version into the database."""
206
+ pass
207
+
208
+ # --- Sessions ---
209
+
210
+ def delete_session(self, session_id: Optional[str] = None) -> bool:
211
+ """
212
+ Delete a session from the database.
213
+
214
+ Args:
215
+ session_id: The ID of the session to delete.
216
+
217
+ Raises:
218
+ Exception: If any error occurs while deleting the session.
219
+ """
220
+ if not session_id:
221
+ return False
222
+
223
+ try:
224
+ self.client.delete_item(
225
+ TableName=self.session_table_name,
226
+ Key={"session_id": {"S": session_id}},
227
+ )
228
+ return True
229
+
230
+ except Exception as e:
231
+ log_error(f"Failed to delete session {session_id}: {e}")
232
+ raise e
233
+
234
+ def delete_sessions(self, session_ids: List[str]) -> None:
235
+ """
236
+ Delete sessions from the database in batches.
237
+
238
+ Args:
239
+ session_ids: List of session IDs to delete
240
+
241
+ Raises:
242
+ Exception: If any error occurs while deleting the sessions.
243
+ """
244
+ if not session_ids or not self.session_table_name:
245
+ return
246
+
247
+ try:
248
+ # Process the items to delete in batches of the max allowed size or less
249
+ for i in range(0, len(session_ids), DYNAMO_BATCH_SIZE_LIMIT):
250
+ batch = session_ids[i : i + DYNAMO_BATCH_SIZE_LIMIT]
251
+ delete_requests = []
252
+
253
+ for session_id in batch:
254
+ delete_requests.append({"DeleteRequest": {"Key": {"session_id": {"S": session_id}}}})
255
+
256
+ if delete_requests:
257
+ self.client.batch_write_item(RequestItems={self.session_table_name: delete_requests})
258
+
259
+ except Exception as e:
260
+ log_error(f"Failed to delete sessions: {e}")
261
+ raise e
262
+
263
+ def get_session(
264
+ self,
265
+ session_id: str,
266
+ session_type: SessionType,
267
+ user_id: Optional[str] = None,
268
+ deserialize: Optional[bool] = True,
269
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
270
+ """
271
+ Get a session from the database as a Session object.
272
+
273
+ Args:
274
+ session_id (str): The ID of the session to get.
275
+ session_type (SessionType): The type of session to get.
276
+ user_id (Optional[str]): The ID of the user to get the session for.
277
+ deserialize (Optional[bool]): Whether to deserialize the session.
278
+
279
+ Returns:
280
+ Optional[Session]: The session data as a Session object.
281
+
282
+ Raises:
283
+ Exception: If any error occurs while getting the session.
284
+ """
285
+ try:
286
+ table_name = self._get_table("sessions")
287
+ response = self.client.get_item(
288
+ TableName=table_name,
289
+ Key={"session_id": {"S": session_id}},
290
+ )
291
+
292
+ item = response.get("Item")
293
+ if not item:
294
+ return None
295
+
296
+ session = deserialize_from_dynamodb_item(item)
297
+
298
+ if user_id and session.get("user_id") != user_id:
299
+ return None
300
+
301
+ if not session:
302
+ return None
303
+
304
+ if not deserialize:
305
+ return session
306
+
307
+ if session_type == SessionType.AGENT:
308
+ return AgentSession.from_dict(session)
309
+ elif session_type == SessionType.TEAM:
310
+ return TeamSession.from_dict(session)
311
+ elif session_type == SessionType.WORKFLOW:
312
+ return WorkflowSession.from_dict(session)
313
+ else:
314
+ raise ValueError(f"Invalid session type: {session_type}")
315
+
316
+ except Exception as e:
317
+ log_error(f"Failed to get session {session_id}: {e}")
318
+ raise e
319
+
320
+ def get_sessions(
321
+ self,
322
+ session_type: SessionType,
323
+ user_id: Optional[str] = None,
324
+ component_id: Optional[str] = None,
325
+ session_name: Optional[str] = None,
326
+ start_timestamp: Optional[int] = None,
327
+ end_timestamp: Optional[int] = None,
328
+ limit: Optional[int] = None,
329
+ page: Optional[int] = None,
330
+ sort_by: Optional[str] = None,
331
+ sort_order: Optional[str] = None,
332
+ deserialize: Optional[bool] = True,
333
+ ) -> Union[List[Session], Tuple[List[Dict[str, Any]], int]]:
334
+ try:
335
+ table_name = self._get_table("sessions")
336
+ if table_name is None:
337
+ return [] if deserialize else ([], 0)
338
+
339
+ # Build filter expression for additional filters
340
+ filter_expression = None
341
+ expression_attribute_names = {}
342
+ expression_attribute_values = {":session_type": {"S": session_type.value}}
343
+
344
+ if user_id:
345
+ filter_expression = "#user_id = :user_id"
346
+ expression_attribute_names["#user_id"] = "user_id"
347
+ expression_attribute_values[":user_id"] = {"S": user_id}
348
+
349
+ if component_id:
350
+ # Map component_id to the appropriate field based on session type
351
+ if session_type == SessionType.AGENT:
352
+ component_filter = "#agent_id = :component_id"
353
+ expression_attribute_names["#agent_id"] = "agent_id"
354
+ elif session_type == SessionType.TEAM:
355
+ component_filter = "#team_id = :component_id"
356
+ expression_attribute_names["#team_id"] = "team_id"
357
+ else:
358
+ component_filter = "#workflow_id = :component_id"
359
+ expression_attribute_names["#workflow_id"] = "workflow_id"
360
+
361
+ if component_filter:
362
+ expression_attribute_values[":component_id"] = {"S": component_id}
363
+ if filter_expression:
364
+ filter_expression += f" AND {component_filter}"
365
+ else:
366
+ filter_expression = component_filter
367
+
368
+ if session_name:
369
+ name_filter = "#session_name = :session_name"
370
+ expression_attribute_names["#session_name"] = "session_name"
371
+ expression_attribute_values[":session_name"] = {"S": session_name}
372
+ if filter_expression:
373
+ filter_expression += f" AND {name_filter}"
374
+ else:
375
+ filter_expression = name_filter
376
+
377
+ # Use GSI query for session_type
378
+ query_kwargs = {
379
+ "TableName": table_name,
380
+ "IndexName": "session_type-created_at-index",
381
+ "KeyConditionExpression": "session_type = :session_type",
382
+ "ExpressionAttributeValues": expression_attribute_values,
383
+ }
384
+ if filter_expression:
385
+ query_kwargs["FilterExpression"] = filter_expression
386
+ if expression_attribute_names:
387
+ query_kwargs["ExpressionAttributeNames"] = expression_attribute_names
388
+
389
+ # Apply sorting
390
+ if sort_by == "created_at":
391
+ query_kwargs["ScanIndexForward"] = sort_order != "desc" # type: ignore
392
+
393
+ # Apply limit at DynamoDB level
394
+ if limit and not page:
395
+ query_kwargs["Limit"] = limit # type: ignore
396
+
397
+ items = []
398
+ response = self.client.query(**query_kwargs)
399
+ items.extend(response.get("Items", []))
400
+
401
+ # Handle pagination
402
+ while "LastEvaluatedKey" in response:
403
+ query_kwargs["ExclusiveStartKey"] = response["LastEvaluatedKey"]
404
+ response = self.client.query(**query_kwargs)
405
+ items.extend(response.get("Items", []))
406
+
407
+ # Convert DynamoDB items to session data
408
+ sessions_data = []
409
+ for item in items:
410
+ session_data = deserialize_from_dynamodb_item(item)
411
+ if session_data:
412
+ sessions_data.append(session_data)
413
+
414
+ # Apply in-memory sorting for fields not supported by DynamoDB
415
+ if sort_by and sort_by != "created_at":
416
+ sessions_data = apply_sorting(sessions_data, sort_by, sort_order)
417
+
418
+ # Get total count before pagination
419
+ total_count = len(sessions_data)
420
+
421
+ # Apply pagination
422
+ if page:
423
+ sessions_data = apply_pagination(sessions_data, limit, page)
424
+
425
+ if not deserialize:
426
+ return sessions_data, total_count
427
+
428
+ sessions = []
429
+ for session_data in sessions_data:
430
+ session = deserialize_session(session_data)
431
+ if session:
432
+ sessions.append(session)
433
+
434
+ return sessions
435
+
436
+ except Exception as e:
437
+ log_error(f"Failed to get sessions: {e}")
438
+ raise e
439
+
440
+ def rename_session(
441
+ self,
442
+ session_id: str,
443
+ session_type: SessionType,
444
+ session_name: str,
445
+ deserialize: Optional[bool] = True,
446
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
447
+ """
448
+ Rename a session in the database.
449
+
450
+ Args:
451
+ session_id: The ID of the session to rename.
452
+ session_type: The type of session to rename.
453
+ session_name: The new name for the session.
454
+
455
+ Returns:
456
+ Optional[Session]: The renamed session if successful, None otherwise.
457
+
458
+ Raises:
459
+ Exception: If any error occurs while renaming the session.
460
+ """
461
+ try:
462
+ if not self.session_table_name:
463
+ raise Exception("Sessions table not found")
464
+
465
+ # Get current session_data
466
+ get_response = self.client.get_item(
467
+ TableName=self.session_table_name,
468
+ Key={"session_id": {"S": session_id}},
469
+ )
470
+ current_item = get_response.get("Item")
471
+ if not current_item:
472
+ return None
473
+
474
+ # Update session_data with the new session_name
475
+ session_data = deserialize_from_dynamodb_item(current_item).get("session_data", {})
476
+ session_data["session_name"] = session_name
477
+ response = self.client.update_item(
478
+ TableName=self.session_table_name,
479
+ Key={"session_id": {"S": session_id}},
480
+ UpdateExpression="SET session_data = :session_data, updated_at = :updated_at",
481
+ ConditionExpression="session_type = :session_type",
482
+ ExpressionAttributeValues={
483
+ ":session_data": {"S": json.dumps(session_data)},
484
+ ":session_type": {"S": session_type.value},
485
+ ":updated_at": {"N": str(int(time.time()))},
486
+ },
487
+ ReturnValues="ALL_NEW",
488
+ )
489
+ item = response.get("Attributes")
490
+ if not item:
491
+ return None
492
+
493
+ session = deserialize_from_dynamodb_item(item)
494
+ if not deserialize:
495
+ return session
496
+
497
+ if session_type == SessionType.AGENT:
498
+ return AgentSession.from_dict(session)
499
+ elif session_type == SessionType.TEAM:
500
+ return TeamSession.from_dict(session)
501
+ else:
502
+ return WorkflowSession.from_dict(session)
503
+
504
+ except Exception as e:
505
+ log_error(f"Failed to rename session {session_id}: {e}")
506
+ raise e
507
+
508
+ def upsert_session(
509
+ self, session: Session, deserialize: Optional[bool] = True
510
+ ) -> Optional[Union[Session, Dict[str, Any]]]:
511
+ """
512
+ Upsert a session into the database.
513
+
514
+ This method provides true upsert behavior: creates a new session if it doesn't exist,
515
+ or updates an existing session while preserving important fields.
516
+
517
+ Args:
518
+ session (Session): The session to upsert.
519
+ deserialize (Optional[bool]): Whether to deserialize the session.
520
+
521
+ Returns:
522
+ Optional[Session]: The upserted session if successful, None otherwise.
523
+ """
524
+ try:
525
+ table_name = self._get_table("sessions", create_table_if_not_found=True)
526
+
527
+ # Get session if it already exists in the db.
528
+ # We need to do this to handle updating nested fields.
529
+ response = self.client.get_item(TableName=table_name, Key={"session_id": {"S": session.session_id}})
530
+ existing_item = response.get("Item")
531
+
532
+ # Prepare the session to upsert, merging with existing session if it exists.
533
+ serialized_session = prepare_session_data(session)
534
+ if existing_item:
535
+ serialized_session = merge_with_existing_session(serialized_session, existing_item)
536
+ serialized_session["updated_at"] = int(time.time())
537
+ else:
538
+ serialized_session["updated_at"] = serialized_session["created_at"]
539
+
540
+ # Upsert
541
+ item = serialize_to_dynamo_item(serialized_session)
542
+ self.client.put_item(TableName=table_name, Item=item)
543
+
544
+ return deserialize_session_result(serialized_session, session, deserialize)
545
+
546
+ except Exception as e:
547
+ log_error(f"Failed to upsert session {session.session_id}: {e}")
548
+ raise e
549
+
550
+ def upsert_sessions(
551
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
552
+ ) -> List[Union[Session, Dict[str, Any]]]:
553
+ """
554
+ Bulk upsert multiple sessions for improved performance on large datasets.
555
+
556
+ Args:
557
+ sessions (List[Session]): List of sessions to upsert.
558
+ deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
559
+
560
+ Returns:
561
+ List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
562
+
563
+ Raises:
564
+ Exception: If an error occurs during bulk upsert.
565
+ """
566
+ if not sessions:
567
+ return []
568
+
569
+ try:
570
+ log_info(
571
+ f"DynamoDb doesn't support efficient bulk operations, falling back to individual upserts for {len(sessions)} sessions"
572
+ )
573
+
574
+ # Fall back to individual upserts
575
+ results = []
576
+ for session in sessions:
577
+ if session is not None:
578
+ result = self.upsert_session(session, deserialize=deserialize)
579
+ if result is not None:
580
+ results.append(result)
581
+ return results
582
+
583
+ except Exception as e:
584
+ log_error(f"Exception during bulk session upsert: {e}")
585
+ return []
586
+
587
+ # --- User Memory ---
588
+
589
+ def delete_user_memory(self, memory_id: str, user_id: Optional[str] = None) -> None:
590
+ """
591
+ Delete a user memory from the database.
592
+
593
+ Args:
594
+ memory_id: The ID of the memory to delete.
595
+ user_id: The ID of the user (optional, for filtering).
596
+
597
+ Raises:
598
+ Exception: If any error occurs while deleting the user memory.
599
+ """
600
+ try:
601
+ # If user_id is provided, verify the memory belongs to the user before deleting
602
+ if user_id:
603
+ response = self.client.get_item(
604
+ TableName=self.memory_table_name,
605
+ Key={"memory_id": {"S": memory_id}},
606
+ )
607
+ item = response.get("Item")
608
+ if item:
609
+ memory_data = deserialize_from_dynamodb_item(item)
610
+ if memory_data.get("user_id") != user_id:
611
+ log_debug(f"Memory {memory_id} does not belong to user {user_id}")
612
+ return
613
+
614
+ self.client.delete_item(
615
+ TableName=self.memory_table_name,
616
+ Key={"memory_id": {"S": memory_id}},
617
+ )
618
+ log_debug(f"Deleted user memory {memory_id}")
619
+
620
+ except Exception as e:
621
+ log_error(f"Failed to delete user memory {memory_id}: {e}")
622
+ raise e
623
+
624
+ def delete_user_memories(self, memory_ids: List[str], user_id: Optional[str] = None) -> None:
625
+ """
626
+ Delete user memories from the database in batches.
627
+
628
+ Args:
629
+ memory_ids: List of memory IDs to delete
630
+ user_id: The ID of the user (optional, for filtering).
631
+
632
+ Raises:
633
+ Exception: If any error occurs while deleting the user memories.
634
+ """
635
+
636
+ try:
637
+ # If user_id is provided, filter memory_ids to only those belonging to the user
638
+ if user_id:
639
+ filtered_memory_ids = []
640
+ for memory_id in memory_ids:
641
+ response = self.client.get_item(
642
+ TableName=self.memory_table_name,
643
+ Key={"memory_id": {"S": memory_id}},
644
+ )
645
+ item = response.get("Item")
646
+ if item:
647
+ memory_data = deserialize_from_dynamodb_item(item)
648
+ if memory_data.get("user_id") == user_id:
649
+ filtered_memory_ids.append(memory_id)
650
+ memory_ids = filtered_memory_ids
651
+
652
+ for i in range(0, len(memory_ids), DYNAMO_BATCH_SIZE_LIMIT):
653
+ batch = memory_ids[i : i + DYNAMO_BATCH_SIZE_LIMIT]
654
+
655
+ delete_requests = []
656
+ for memory_id in batch:
657
+ delete_requests.append({"DeleteRequest": {"Key": {"memory_id": {"S": memory_id}}}})
658
+
659
+ self.client.batch_write_item(RequestItems={self.memory_table_name: delete_requests})
660
+
661
+ except Exception as e:
662
+ log_error(f"Failed to delete user memories: {e}")
663
+ raise e
664
+
665
+ def get_all_memory_topics(self) -> List[str]:
666
+ """Get all memory topics from the database.
667
+
668
+ Args:
669
+ user_id: The ID of the user (optional, for filtering).
670
+
671
+ Returns:
672
+ List[str]: List of unique memory topics.
673
+ """
674
+ try:
675
+ table_name = self._get_table("memories")
676
+ if table_name is None:
677
+ return []
678
+
679
+ # Build filter expression for user_id if provided
680
+ scan_kwargs = {"TableName": table_name}
681
+
682
+ # Scan the table to get memories
683
+ response = self.client.scan(**scan_kwargs)
684
+ items = response.get("Items", [])
685
+
686
+ # Handle pagination
687
+ while "LastEvaluatedKey" in response:
688
+ scan_kwargs["ExclusiveStartKey"] = response["LastEvaluatedKey"]
689
+ response = self.client.scan(**scan_kwargs)
690
+ items.extend(response.get("Items", []))
691
+
692
+ # Extract topics from all memories
693
+ all_topics = set()
694
+ for item in items:
695
+ memory_data = deserialize_from_dynamodb_item(item)
696
+ topics = memory_data.get("memory", {}).get("topics", [])
697
+ all_topics.update(topics)
698
+
699
+ return list(all_topics)
700
+
701
+ except Exception as e:
702
+ log_error(f"Exception reading from memory table: {e}")
703
+ raise e
704
+
705
+ def get_user_memory(
706
+ self,
707
+ memory_id: str,
708
+ deserialize: Optional[bool] = True,
709
+ user_id: Optional[str] = None,
710
+ ) -> Optional[Union[UserMemory, Dict[str, Any]]]:
711
+ """
712
+ Get a user memory from the database as a UserMemory object.
713
+
714
+ Args:
715
+ memory_id: The ID of the memory to get.
716
+ deserialize: Whether to deserialize the memory.
717
+ user_id: The ID of the user (optional, for filtering).
718
+
719
+ Returns:
720
+ Optional[UserMemory]: The user memory data if found, None otherwise.
721
+
722
+ Raises:
723
+ Exception: If any error occurs while getting the user memory.
724
+ """
725
+ try:
726
+ table_name = self._get_table("memories")
727
+ response = self.client.get_item(TableName=table_name, Key={"memory_id": {"S": memory_id}})
728
+
729
+ item = response.get("Item")
730
+ if not item:
731
+ return None
732
+
733
+ item = deserialize_from_dynamodb_item(item)
734
+
735
+ # Filter by user_id if provided
736
+ if user_id and item.get("user_id") != user_id:
737
+ return None
738
+
739
+ if not deserialize:
740
+ return item
741
+
742
+ return UserMemory.from_dict(item)
743
+
744
+ except Exception as e:
745
+ log_error(f"Failed to get user memory {memory_id}: {e}")
746
+ raise e
747
+
748
+ def get_user_memories(
749
+ self,
750
+ user_id: Optional[str] = None,
751
+ agent_id: Optional[str] = None,
752
+ team_id: Optional[str] = None,
753
+ topics: Optional[List[str]] = None,
754
+ search_content: Optional[str] = None,
755
+ limit: Optional[int] = None,
756
+ page: Optional[int] = None,
757
+ sort_by: Optional[str] = None,
758
+ sort_order: Optional[str] = None,
759
+ deserialize: Optional[bool] = True,
760
+ ) -> Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
761
+ """
762
+ Get user memories from the database as a list of UserMemory objects.
763
+
764
+ Args:
765
+ user_id: The ID of the user to get the memories for.
766
+ agent_id: The ID of the agent to get the memories for.
767
+ team_id: The ID of the team to get the memories for.
768
+ workflow_id: The ID of the workflow to get the memories for.
769
+ topics: The topics to filter the memories by.
770
+ search_content: The content to search for in the memories.
771
+ limit: The maximum number of memories to return.
772
+ page: The page number to return.
773
+ sort_by: The field to sort the memories by.
774
+ sort_order: The order to sort the memories by.
775
+ deserialize: Whether to deserialize the memories.
776
+
777
+ Returns:
778
+ Union[List[UserMemory], List[Dict[str, Any]], Tuple[List[Dict[str, Any]], int]]: The user memories data.
779
+
780
+ Raises:
781
+ Exception: If any error occurs while getting the user memories.
782
+ """
783
+ try:
784
+ table_name = self._get_table("memories")
785
+ if table_name is None:
786
+ return [] if deserialize else ([], 0)
787
+
788
+ # Build filter expressions for component filters
789
+ (
790
+ filter_expression,
791
+ expression_attribute_names,
792
+ expression_attribute_values,
793
+ ) = build_query_filter_expression(filters={"agent_id": agent_id, "team_id": team_id})
794
+
795
+ # Build topic filter expression if topics provided
796
+ if topics:
797
+ topic_filter, topic_values = build_topic_filter_expression(topics)
798
+ expression_attribute_values.update(topic_values)
799
+ filter_expression = f"{filter_expression} AND {topic_filter}" if filter_expression else topic_filter
800
+
801
+ # Add search content filter if provided
802
+ if search_content:
803
+ search_filter = "contains(memory, :search_content)"
804
+ expression_attribute_values[":search_content"] = {"S": search_content}
805
+ filter_expression = f"{filter_expression} AND {search_filter}" if filter_expression else search_filter
806
+
807
+ # Determine whether to use GSI query or table scan
808
+ if user_id:
809
+ # Use GSI query when user_id is provided
810
+ key_condition_expression = "#user_id = :user_id"
811
+
812
+ # Set up expression attributes for GSI key condition
813
+ expression_attribute_names["#user_id"] = "user_id"
814
+ expression_attribute_values[":user_id"] = {"S": user_id}
815
+
816
+ # Execute query with pagination
817
+ items = execute_query_with_pagination(
818
+ self.client,
819
+ table_name,
820
+ "user_id-updated_at-index",
821
+ key_condition_expression,
822
+ expression_attribute_names,
823
+ expression_attribute_values,
824
+ filter_expression,
825
+ sort_by,
826
+ sort_order,
827
+ limit,
828
+ page,
829
+ )
830
+ else:
831
+ # Use table scan when user_id is None
832
+ scan_kwargs = {"TableName": table_name}
833
+
834
+ if filter_expression:
835
+ scan_kwargs["FilterExpression"] = filter_expression
836
+ if expression_attribute_names:
837
+ scan_kwargs["ExpressionAttributeNames"] = expression_attribute_names # type: ignore
838
+ if expression_attribute_values:
839
+ scan_kwargs["ExpressionAttributeValues"] = expression_attribute_values # type: ignore
840
+
841
+ # Execute scan
842
+ response = self.client.scan(**scan_kwargs)
843
+ items = response.get("Items", [])
844
+
845
+ # Handle pagination for scan
846
+ while "LastEvaluatedKey" in response:
847
+ scan_kwargs["ExclusiveStartKey"] = response["LastEvaluatedKey"]
848
+ response = self.client.scan(**scan_kwargs)
849
+ items.extend(response.get("Items", []))
850
+
851
+ items = [deserialize_from_dynamodb_item(item) for item in items]
852
+
853
+ if sort_by and sort_by != "updated_at":
854
+ items = apply_sorting(items, sort_by, sort_order)
855
+
856
+ if page:
857
+ paginated_items = apply_pagination(items, limit, page)
858
+
859
+ if not deserialize:
860
+ return paginated_items, len(items)
861
+
862
+ return [UserMemory.from_dict(item) for item in items]
863
+
864
+ except Exception as e:
865
+ log_error(f"Failed to get user memories: {e}")
866
+ raise e
867
+
868
+ def get_user_memory_stats(
869
+ self,
870
+ limit: Optional[int] = None,
871
+ page: Optional[int] = None,
872
+ user_id: Optional[str] = None,
873
+ ) -> Tuple[List[Dict[str, Any]], int]:
874
+ """Get user memories stats.
875
+
876
+ Args:
877
+ limit (Optional[int]): The maximum number of user stats to return.
878
+ page (Optional[int]): The page number.
879
+ user_id (Optional[str]): The ID of the user (optional, for filtering).
880
+
881
+ Returns:
882
+ Tuple[List[Dict[str, Any]], int]: A list of dictionaries containing user stats and total count.
883
+
884
+ Example:
885
+ (
886
+ [
887
+ {
888
+ "user_id": "123",
889
+ "total_memories": 10,
890
+ "last_memory_updated_at": 1714560000,
891
+ },
892
+ ],
893
+ total_count: 1,
894
+ )
895
+ """
896
+ try:
897
+ table_name = self._get_table("memories")
898
+
899
+ # Build filter expression for user_id if provided
900
+ filter_expression = None
901
+ expression_attribute_values = {}
902
+ if user_id:
903
+ filter_expression = "user_id = :user_id"
904
+ expression_attribute_values[":user_id"] = {"S": user_id}
905
+
906
+ scan_kwargs = {"TableName": table_name}
907
+ if filter_expression:
908
+ scan_kwargs["FilterExpression"] = filter_expression
909
+ if expression_attribute_values:
910
+ scan_kwargs["ExpressionAttributeValues"] = expression_attribute_values # type: ignore
911
+
912
+ response = self.client.scan(**scan_kwargs)
913
+ items = response.get("Items", [])
914
+
915
+ # Handle pagination
916
+ while "LastEvaluatedKey" in response:
917
+ scan_kwargs["ExclusiveStartKey"] = response["LastEvaluatedKey"]
918
+ response = self.client.scan(**scan_kwargs)
919
+ items.extend(response.get("Items", []))
920
+
921
+ # Aggregate stats by user_id
922
+ user_stats = {}
923
+ for item in items:
924
+ memory_data = deserialize_from_dynamodb_item(item)
925
+ current_user_id = memory_data.get("user_id")
926
+
927
+ if current_user_id:
928
+ if current_user_id not in user_stats:
929
+ user_stats[current_user_id] = {
930
+ "user_id": current_user_id,
931
+ "total_memories": 0,
932
+ "last_memory_updated_at": None,
933
+ }
934
+
935
+ user_stats[current_user_id]["total_memories"] += 1
936
+
937
+ updated_at = memory_data.get("updated_at")
938
+ if updated_at:
939
+ updated_at_dt = datetime.fromisoformat(updated_at.replace("Z", "+00:00"))
940
+ updated_at_timestamp = int(updated_at_dt.timestamp())
941
+
942
+ if updated_at_timestamp and (
943
+ user_stats[current_user_id]["last_memory_updated_at"] is None
944
+ or updated_at_timestamp > user_stats[current_user_id]["last_memory_updated_at"]
945
+ ):
946
+ user_stats[current_user_id]["last_memory_updated_at"] = updated_at_timestamp
947
+
948
+ # Convert to list and apply sorting
949
+ stats_list = list(user_stats.values())
950
+ stats_list.sort(
951
+ key=lambda x: (x["last_memory_updated_at"] if x["last_memory_updated_at"] is not None else 0),
952
+ reverse=True,
953
+ )
954
+
955
+ total_count = len(stats_list)
956
+
957
+ # Apply pagination
958
+ if limit is not None:
959
+ start_index = 0
960
+ if page is not None and page > 1:
961
+ start_index = (page - 1) * limit
962
+ stats_list = stats_list[start_index : start_index + limit]
963
+
964
+ return stats_list, total_count
965
+
966
+ except Exception as e:
967
+ log_error(f"Failed to get user memory stats: {e}")
968
+ raise e
969
+
970
+ def upsert_user_memory(
971
+ self, memory: UserMemory, deserialize: Optional[bool] = True
972
+ ) -> Optional[Union[UserMemory, Dict[str, Any]]]:
973
+ """
974
+ Upsert a user memory into the database.
975
+
976
+ Args:
977
+ memory: The memory to upsert.
978
+
979
+ Returns:
980
+ Optional[Dict[str, Any]]: The upserted memory data if successful, None otherwise.
981
+ """
982
+ try:
983
+ table_name = self._get_table("memories", create_table_if_not_found=True)
984
+ memory_dict = memory.to_dict()
985
+ memory_dict["updated_at"] = datetime.now(timezone.utc).isoformat()
986
+ item = serialize_to_dynamo_item(memory_dict)
987
+
988
+ self.client.put_item(TableName=table_name, Item=item)
989
+
990
+ if not deserialize:
991
+ return memory_dict
992
+
993
+ return UserMemory.from_dict(memory_dict)
994
+
995
+ except Exception as e:
996
+ log_error(f"Failed to upsert user memory: {e}")
997
+ raise e
998
+
999
+ def upsert_memories(
1000
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
1001
+ ) -> List[Union[UserMemory, Dict[str, Any]]]:
1002
+ """
1003
+ Bulk upsert multiple user memories for improved performance on large datasets.
1004
+
1005
+ Args:
1006
+ memories (List[UserMemory]): List of memories to upsert.
1007
+ deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
1008
+
1009
+ Returns:
1010
+ List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories.
1011
+
1012
+ Raises:
1013
+ Exception: If an error occurs during bulk upsert.
1014
+ """
1015
+ if not memories:
1016
+ return []
1017
+
1018
+ try:
1019
+ log_info(
1020
+ f"DynamoDb doesn't support efficient bulk operations, falling back to individual upserts for {len(memories)} memories"
1021
+ )
1022
+
1023
+ # Fall back to individual upserts
1024
+ results = []
1025
+ for memory in memories:
1026
+ if memory is not None:
1027
+ result = self.upsert_user_memory(memory, deserialize=deserialize)
1028
+ if result is not None:
1029
+ results.append(result)
1030
+ return results
1031
+
1032
+ except Exception as e:
1033
+ log_error(f"Exception during bulk memory upsert: {e}")
1034
+ return []
1035
+
1036
+ def clear_memories(self) -> None:
1037
+ """Delete all memories from the database.
1038
+
1039
+ Raises:
1040
+ Exception: If an error occurs during deletion.
1041
+ """
1042
+ try:
1043
+ table_name = self._get_table("memories")
1044
+
1045
+ # Scan the table to get all items
1046
+ response = self.client.scan(TableName=table_name)
1047
+ items = response.get("Items", [])
1048
+
1049
+ # Handle pagination for scan
1050
+ while "LastEvaluatedKey" in response:
1051
+ response = self.client.scan(TableName=table_name, ExclusiveStartKey=response["LastEvaluatedKey"])
1052
+ items.extend(response.get("Items", []))
1053
+
1054
+ if not items:
1055
+ return
1056
+
1057
+ # Delete items in batches
1058
+ for i in range(0, len(items), DYNAMO_BATCH_SIZE_LIMIT):
1059
+ batch = items[i : i + DYNAMO_BATCH_SIZE_LIMIT]
1060
+
1061
+ delete_requests = []
1062
+ for item in batch:
1063
+ # Extract the memory_id from the item
1064
+ memory_id = item.get("memory_id", {}).get("S")
1065
+ if memory_id:
1066
+ delete_requests.append({"DeleteRequest": {"Key": {"memory_id": {"S": memory_id}}}})
1067
+
1068
+ if delete_requests:
1069
+ self.client.batch_write_item(RequestItems={table_name: delete_requests})
1070
+
1071
+ except Exception as e:
1072
+ from agno.utils.log import log_warning
1073
+
1074
+ log_warning(f"Exception deleting all memories: {e}")
1075
+ raise e
1076
+
1077
+ # --- Metrics ---
1078
+
1079
+ def calculate_metrics(self) -> Optional[Any]:
1080
+ """Calculate metrics for all dates without complete metrics.
1081
+
1082
+ Returns:
1083
+ Optional[Any]: The calculated metrics or None if no metrics table.
1084
+
1085
+ Raises:
1086
+ Exception: If an error occurs during metrics calculation.
1087
+ """
1088
+ if not self.metrics_table_name:
1089
+ return None
1090
+
1091
+ try:
1092
+ from agno.utils.log import log_info
1093
+
1094
+ # Get starting date for metrics calculation
1095
+ starting_date = self._get_metrics_calculation_starting_date()
1096
+ if starting_date is None:
1097
+ log_info("No session data found. Won't calculate metrics.")
1098
+ return None
1099
+
1100
+ # Get dates that need metrics calculation
1101
+ dates_to_process = get_dates_to_calculate_metrics_for(starting_date)
1102
+ if not dates_to_process:
1103
+ log_info("Metrics already calculated for all relevant dates.")
1104
+ return None
1105
+
1106
+ # Get timestamp range for session data
1107
+ start_timestamp = int(datetime.combine(dates_to_process[0], datetime.min.time()).timestamp())
1108
+ end_timestamp = int(
1109
+ datetime.combine(dates_to_process[-1] + timedelta(days=1), datetime.min.time()).timestamp()
1110
+ )
1111
+
1112
+ # Get all sessions for the date range
1113
+ sessions = self._get_all_sessions_for_metrics_calculation(
1114
+ start_timestamp=start_timestamp, end_timestamp=end_timestamp
1115
+ )
1116
+
1117
+ # Process session data for metrics calculation
1118
+
1119
+ all_sessions_data = fetch_all_sessions_data(
1120
+ sessions=sessions,
1121
+ dates_to_process=dates_to_process,
1122
+ start_timestamp=start_timestamp,
1123
+ )
1124
+
1125
+ if not all_sessions_data:
1126
+ log_info("No new session data found. Won't calculate metrics.")
1127
+ return None
1128
+
1129
+ # Calculate metrics for each date
1130
+ results = []
1131
+ metrics_records = []
1132
+ for date_to_process in dates_to_process:
1133
+ date_key = date_to_process.isoformat()
1134
+ sessions_for_date = all_sessions_data.get(date_key, {})
1135
+
1136
+ # Skip dates with no sessions
1137
+ if not any(len(sessions) > 0 for sessions in sessions_for_date.values()):
1138
+ continue
1139
+
1140
+ metrics_record = calculate_date_metrics(date_to_process, sessions_for_date)
1141
+ metrics_records.append(metrics_record)
1142
+
1143
+ # Store metrics in DynamoDB
1144
+ if metrics_records:
1145
+ results = self._bulk_upsert_metrics(metrics_records)
1146
+
1147
+ log_debug("Updated metrics calculations")
1148
+
1149
+ return results
1150
+
1151
+ except Exception as e:
1152
+ log_error(f"Failed to calculate metrics: {e}")
1153
+ raise e
1154
+
1155
+ def _get_metrics_calculation_starting_date(self) -> Optional[date]:
1156
+ """Get the first date for which metrics calculation is needed:
1157
+ 1. If there are metrics records, return the date of the first day without a complete metrics record.
1158
+ 2. If there are no metrics records, return the date of the first recorded session.
1159
+ 3. If there are no metrics records and no sessions records, return None.
1160
+
1161
+ Returns:
1162
+ Optional[date]: The starting date for which metrics calculation is needed.
1163
+ """
1164
+ try:
1165
+ metrics_table_name = self._get_table("metrics")
1166
+
1167
+ # 1. Check for existing metrics records
1168
+ response = self.client.scan(
1169
+ TableName=metrics_table_name,
1170
+ ProjectionExpression="#date, completed",
1171
+ ExpressionAttributeNames={"#date": "date"},
1172
+ Limit=1000, # Get reasonable number of records to find incomplete ones
1173
+ )
1174
+
1175
+ metrics_items = response.get("Items", [])
1176
+
1177
+ # Handle pagination to get all metrics records
1178
+ while "LastEvaluatedKey" in response:
1179
+ response = self.client.scan(
1180
+ TableName=metrics_table_name,
1181
+ ProjectionExpression="#date, completed",
1182
+ ExpressionAttributeNames={"#date": "date"},
1183
+ ExclusiveStartKey=response["LastEvaluatedKey"],
1184
+ Limit=1000,
1185
+ )
1186
+ metrics_items.extend(response.get("Items", []))
1187
+
1188
+ if metrics_items:
1189
+ # Find the latest date with metrics
1190
+ latest_complete_date = None
1191
+ incomplete_dates = []
1192
+
1193
+ for item in metrics_items:
1194
+ metrics_data = deserialize_from_dynamodb_item(item)
1195
+ record_date = datetime.fromisoformat(metrics_data["date"]).date()
1196
+ is_completed = metrics_data.get("completed", False)
1197
+
1198
+ if is_completed:
1199
+ if latest_complete_date is None or record_date > latest_complete_date:
1200
+ latest_complete_date = record_date
1201
+ else:
1202
+ incomplete_dates.append(record_date)
1203
+
1204
+ # Return the earliest incomplete date, or the day after the latest complete date
1205
+ if incomplete_dates:
1206
+ return min(incomplete_dates)
1207
+ elif latest_complete_date:
1208
+ return latest_complete_date + timedelta(days=1)
1209
+
1210
+ # 2. No metrics records. Return the date of the first recorded session.
1211
+ sessions_table_name = self._get_table("sessions")
1212
+
1213
+ earliest_session_date = None
1214
+ for session_type in ["agent", "team", "workflow"]:
1215
+ response = self.client.query(
1216
+ TableName=sessions_table_name,
1217
+ IndexName="session_type-created_at-index",
1218
+ KeyConditionExpression="session_type = :session_type",
1219
+ ExpressionAttributeValues={":session_type": {"S": session_type}},
1220
+ ScanIndexForward=True, # Ascending order to get earliest
1221
+ Limit=1,
1222
+ )
1223
+
1224
+ items = response.get("Items", [])
1225
+ if items:
1226
+ first_session = deserialize_from_dynamodb_item(items[0])
1227
+ first_session_timestamp = first_session.get("created_at")
1228
+
1229
+ if first_session_timestamp:
1230
+ session_date = datetime.fromtimestamp(first_session_timestamp, tz=timezone.utc).date()
1231
+ if earliest_session_date is None or session_date < earliest_session_date:
1232
+ earliest_session_date = session_date
1233
+
1234
+ # 3. Return the earliest session date or None if no sessions exist
1235
+ return earliest_session_date
1236
+
1237
+ except Exception as e:
1238
+ log_error(f"Failed to get metrics calculation starting date: {e}")
1239
+ raise e
1240
+
1241
+ def _get_all_sessions_for_metrics_calculation(
1242
+ self, start_timestamp: int, end_timestamp: int
1243
+ ) -> List[Dict[str, Any]]:
1244
+ """Get all sessions within a timestamp range for metrics calculation.
1245
+
1246
+ Args:
1247
+ start_timestamp: Start timestamp (inclusive)
1248
+ end_timestamp: End timestamp (exclusive)
1249
+
1250
+ Returns:
1251
+ List[Dict[str, Any]]: List of session data dictionaries
1252
+ """
1253
+ try:
1254
+ table_name = self._get_table("sessions")
1255
+ all_sessions = []
1256
+
1257
+ # Query sessions by different types within the time range
1258
+ for session_type in ["agent", "team", "workflow"]:
1259
+ response = self.client.query(
1260
+ TableName=table_name,
1261
+ IndexName="session_type-created_at-index",
1262
+ KeyConditionExpression="session_type = :session_type AND created_at BETWEEN :start_ts AND :end_ts",
1263
+ ExpressionAttributeValues={
1264
+ ":session_type": {"S": session_type},
1265
+ ":start_ts": {"N": str(start_timestamp)},
1266
+ ":end_ts": {"N": str(end_timestamp)},
1267
+ },
1268
+ )
1269
+
1270
+ items = response.get("Items", [])
1271
+
1272
+ # Handle pagination
1273
+ while "LastEvaluatedKey" in response:
1274
+ response = self.client.query(
1275
+ TableName=table_name,
1276
+ IndexName="session_type-created_at-index",
1277
+ KeyConditionExpression="session_type = :session_type AND created_at BETWEEN :start_ts AND :end_ts",
1278
+ ExpressionAttributeValues={
1279
+ ":session_type": {"S": session_type},
1280
+ ":start_ts": {"N": str(start_timestamp)},
1281
+ ":end_ts": {"N": str(end_timestamp)},
1282
+ },
1283
+ ExclusiveStartKey=response["LastEvaluatedKey"],
1284
+ )
1285
+ items.extend(response.get("Items", []))
1286
+
1287
+ # Deserialize sessions
1288
+ for item in items:
1289
+ session_data = deserialize_from_dynamodb_item(item)
1290
+ if session_data:
1291
+ all_sessions.append(session_data)
1292
+
1293
+ return all_sessions
1294
+
1295
+ except Exception as e:
1296
+ log_error(f"Failed to get sessions for metrics calculation: {e}")
1297
+ raise e
1298
+
1299
+ def _bulk_upsert_metrics(self, metrics_records: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
1300
+ """Bulk upsert metrics records into DynamoDB with proper deduplication.
1301
+
1302
+ Args:
1303
+ metrics_records: List of metrics records to upsert
1304
+
1305
+ Returns:
1306
+ List[Dict[str, Any]]: List of upserted records
1307
+ """
1308
+ try:
1309
+ table_name = self._get_table("metrics")
1310
+ if table_name is None:
1311
+ return []
1312
+
1313
+ results = []
1314
+
1315
+ # Process each record individually to handle proper upsert
1316
+ for record in metrics_records:
1317
+ upserted_record = self._upsert_single_metrics_record(table_name, record)
1318
+ if upserted_record:
1319
+ results.append(upserted_record)
1320
+
1321
+ return results
1322
+
1323
+ except Exception as e:
1324
+ log_error(f"Failed to bulk upsert metrics: {e}")
1325
+ raise e
1326
+
1327
+ def _upsert_single_metrics_record(self, table_name: str, record: Dict[str, Any]) -> Optional[Dict[str, Any]]:
1328
+ """Upsert a single metrics record, checking for existing records with the same date.
1329
+
1330
+ Args:
1331
+ table_name: The DynamoDB table name
1332
+ record: The metrics record to upsert
1333
+
1334
+ Returns:
1335
+ Optional[Dict[str, Any]]: The upserted record or None if failed
1336
+ """
1337
+ try:
1338
+ date_str = record.get("date")
1339
+ if not date_str:
1340
+ log_error("Metrics record missing date field")
1341
+ return None
1342
+
1343
+ # Convert date object to string if needed
1344
+ if hasattr(date_str, "isoformat"):
1345
+ date_str = date_str.isoformat()
1346
+
1347
+ # Check if a record already exists for this date
1348
+ existing_record = self._get_existing_metrics_record(table_name, date_str)
1349
+
1350
+ if existing_record:
1351
+ return self._update_existing_metrics_record(table_name, existing_record, record)
1352
+ else:
1353
+ return self._create_new_metrics_record(table_name, record)
1354
+
1355
+ except Exception as e:
1356
+ log_error(f"Failed to upsert single metrics record: {e}")
1357
+ raise e
1358
+
1359
+ def _get_existing_metrics_record(self, table_name: str, date_str: str) -> Optional[Dict[str, Any]]:
1360
+ """Get existing metrics record for a given date.
1361
+
1362
+ Args:
1363
+ table_name: The DynamoDB table name
1364
+ date_str: The date string to search for
1365
+
1366
+ Returns:
1367
+ Optional[Dict[str, Any]]: The existing record or None if not found
1368
+ """
1369
+ try:
1370
+ # Query using the date-aggregation_period-index
1371
+ response = self.client.query(
1372
+ TableName=table_name,
1373
+ IndexName="date-aggregation_period-index",
1374
+ KeyConditionExpression="#date = :date AND aggregation_period = :period",
1375
+ ExpressionAttributeNames={"#date": "date"},
1376
+ ExpressionAttributeValues={
1377
+ ":date": {"S": date_str},
1378
+ ":period": {"S": "daily"},
1379
+ },
1380
+ Limit=1,
1381
+ )
1382
+
1383
+ items = response.get("Items", [])
1384
+ if items:
1385
+ return deserialize_from_dynamodb_item(items[0])
1386
+ return None
1387
+
1388
+ except Exception as e:
1389
+ log_error(f"Failed to get existing metrics record for date {date_str}: {e}")
1390
+ raise e
1391
+
1392
+ def _update_existing_metrics_record(
1393
+ self,
1394
+ table_name: str,
1395
+ existing_record: Dict[str, Any],
1396
+ new_record: Dict[str, Any],
1397
+ ) -> Optional[Dict[str, Any]]:
1398
+ """Update an existing metrics record.
1399
+
1400
+ Args:
1401
+ table_name: The DynamoDB table name
1402
+ existing_record: The existing record
1403
+ new_record: The new record data
1404
+
1405
+ Returns:
1406
+ Optional[Dict[str, Any]]: The updated record or None if failed
1407
+ """
1408
+ try:
1409
+ # Use the existing record's ID
1410
+ new_record["id"] = existing_record["id"]
1411
+ new_record["updated_at"] = int(time.time())
1412
+
1413
+ # Prepare and serialize the record
1414
+ prepared_record = self._prepare_metrics_record_for_dynamo(new_record)
1415
+ item = self._serialize_metrics_to_dynamo_item(prepared_record)
1416
+
1417
+ # Update the record
1418
+ self.client.put_item(TableName=table_name, Item=item)
1419
+
1420
+ return new_record
1421
+
1422
+ except Exception as e:
1423
+ log_error(f"Failed to update existing metrics record: {e}")
1424
+ raise e
1425
+
1426
+ def _create_new_metrics_record(self, table_name: str, record: Dict[str, Any]) -> Optional[Dict[str, Any]]:
1427
+ """Create a new metrics record.
1428
+
1429
+ Args:
1430
+ table_name: The DynamoDB table name
1431
+ record: The record to create
1432
+
1433
+ Returns:
1434
+ Optional[Dict[str, Any]]: The created record or None if failed
1435
+ """
1436
+ try:
1437
+ # Prepare and serialize the record
1438
+ prepared_record = self._prepare_metrics_record_for_dynamo(record)
1439
+ item = self._serialize_metrics_to_dynamo_item(prepared_record)
1440
+
1441
+ # Create the record
1442
+ self.client.put_item(TableName=table_name, Item=item)
1443
+
1444
+ return record
1445
+
1446
+ except Exception as e:
1447
+ log_error(f"Failed to create new metrics record: {e}")
1448
+ raise e
1449
+
1450
+ def _prepare_metrics_record_for_dynamo(self, record: Dict[str, Any]) -> Dict[str, Any]:
1451
+ """Prepare a metrics record for DynamoDB serialization by converting all data types properly.
1452
+
1453
+ Args:
1454
+ record: The metrics record to prepare
1455
+
1456
+ Returns:
1457
+ Dict[str, Any]: The prepared record ready for DynamoDB serialization
1458
+ """
1459
+
1460
+ def convert_value(value):
1461
+ """Recursively convert values to DynamoDB-compatible types."""
1462
+ if value is None:
1463
+ return None
1464
+ elif isinstance(value, bool):
1465
+ return value
1466
+ elif isinstance(value, (int, float)):
1467
+ return value
1468
+ elif isinstance(value, str):
1469
+ return value
1470
+ elif hasattr(value, "isoformat"): # date/datetime objects
1471
+ return value.isoformat()
1472
+ elif isinstance(value, dict):
1473
+ return {k: convert_value(v) for k, v in value.items()}
1474
+ elif isinstance(value, list):
1475
+ return [convert_value(item) for item in value]
1476
+ else:
1477
+ return str(value)
1478
+
1479
+ return {key: convert_value(value) for key, value in record.items()}
1480
+
1481
+ def _serialize_metrics_to_dynamo_item(self, data: Dict[str, Any]) -> Dict[str, Any]:
1482
+ """Serialize metrics data to DynamoDB item format with proper boolean handling.
1483
+
1484
+ Args:
1485
+ data: The metrics data to serialize
1486
+
1487
+ Returns:
1488
+ Dict[str, Any]: DynamoDB-ready item
1489
+ """
1490
+ import json
1491
+
1492
+ item: Dict[str, Any] = {}
1493
+ for key, value in data.items():
1494
+ if value is not None:
1495
+ if isinstance(value, bool):
1496
+ item[key] = {"BOOL": value}
1497
+ elif isinstance(value, (int, float)):
1498
+ item[key] = {"N": str(value)}
1499
+ elif isinstance(value, str):
1500
+ item[key] = {"S": str(value)}
1501
+ elif isinstance(value, (dict, list)):
1502
+ item[key] = {"S": json.dumps(value)}
1503
+ else:
1504
+ item[key] = {"S": str(value)}
1505
+ return item
1506
+
1507
+ def get_metrics(
1508
+ self,
1509
+ starting_date: Optional[date] = None,
1510
+ ending_date: Optional[date] = None,
1511
+ ) -> Tuple[List[Any], Optional[int]]:
1512
+ """
1513
+ Get metrics from the database.
1514
+
1515
+ Args:
1516
+ starting_date: The starting date to filter metrics by.
1517
+ ending_date: The ending date to filter metrics by.
1518
+
1519
+ Returns:
1520
+ Tuple[List[Any], Optional[int]]: A tuple containing the metrics data and the total count.
1521
+
1522
+ Raises:
1523
+ Exception: If any error occurs while getting the metrics.
1524
+ """
1525
+
1526
+ try:
1527
+ table_name = self._get_table("metrics")
1528
+ if table_name is None:
1529
+ return ([], None)
1530
+
1531
+ # Build query parameters
1532
+ scan_kwargs: Dict[str, Any] = {"TableName": table_name}
1533
+
1534
+ if starting_date or ending_date:
1535
+ filter_expressions = []
1536
+ expression_values = {}
1537
+
1538
+ if starting_date:
1539
+ filter_expressions.append("#date >= :start_date")
1540
+ expression_values[":start_date"] = {"S": starting_date.isoformat()}
1541
+
1542
+ if ending_date:
1543
+ filter_expressions.append("#date <= :end_date")
1544
+ expression_values[":end_date"] = {"S": ending_date.isoformat()}
1545
+
1546
+ scan_kwargs["FilterExpression"] = " AND ".join(filter_expressions)
1547
+ scan_kwargs["ExpressionAttributeNames"] = {"#date": "date"}
1548
+ scan_kwargs["ExpressionAttributeValues"] = expression_values
1549
+
1550
+ # Execute scan
1551
+ response = self.client.scan(**scan_kwargs)
1552
+ items = response.get("Items", [])
1553
+
1554
+ # Handle pagination
1555
+ while "LastEvaluatedKey" in response:
1556
+ scan_kwargs["ExclusiveStartKey"] = response["LastEvaluatedKey"]
1557
+ response = self.client.scan(**scan_kwargs)
1558
+ items.extend(response.get("Items", []))
1559
+
1560
+ # Convert to metrics data
1561
+ metrics_data = []
1562
+ for item in items:
1563
+ metric_data = deserialize_from_dynamodb_item(item)
1564
+ if metric_data:
1565
+ metrics_data.append(metric_data)
1566
+
1567
+ return metrics_data, len(metrics_data)
1568
+
1569
+ except Exception as e:
1570
+ log_error(f"Failed to get metrics: {e}")
1571
+ raise e
1572
+
1573
+ # --- Knowledge methods ---
1574
+
1575
+ def delete_knowledge_content(self, id: str):
1576
+ """Delete a knowledge row from the database.
1577
+
1578
+ Args:
1579
+ id (str): The ID of the knowledge row to delete.
1580
+
1581
+ Raises:
1582
+ Exception: If an error occurs during deletion.
1583
+ """
1584
+ try:
1585
+ table_name = self._get_table("knowledge")
1586
+
1587
+ self.client.delete_item(TableName=table_name, Key={"id": {"S": id}})
1588
+
1589
+ log_debug(f"Deleted knowledge content {id}")
1590
+
1591
+ except Exception as e:
1592
+ log_error(f"Failed to delete knowledge content {id}: {e}")
1593
+ raise e
1594
+
1595
+ def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
1596
+ """Get a knowledge row from the database.
1597
+
1598
+ Args:
1599
+ id (str): The ID of the knowledge row to get.
1600
+
1601
+ Returns:
1602
+ Optional[KnowledgeRow]: The knowledge row, or None if it doesn't exist.
1603
+ """
1604
+ try:
1605
+ table_name = self._get_table("knowledge")
1606
+ response = self.client.get_item(TableName=table_name, Key={"id": {"S": id}})
1607
+
1608
+ item = response.get("Item")
1609
+ if item:
1610
+ return deserialize_knowledge_row(item)
1611
+
1612
+ return None
1613
+
1614
+ except Exception as e:
1615
+ log_error(f"Failed to get knowledge content {id}: {e}")
1616
+ raise e
1617
+
1618
+ def get_knowledge_contents(
1619
+ self,
1620
+ limit: Optional[int] = None,
1621
+ page: Optional[int] = None,
1622
+ sort_by: Optional[str] = None,
1623
+ sort_order: Optional[str] = None,
1624
+ ) -> Tuple[List[KnowledgeRow], int]:
1625
+ """Get all knowledge contents from the database.
1626
+
1627
+ Args:
1628
+ limit (Optional[int]): The maximum number of knowledge contents to return.
1629
+ page (Optional[int]): The page number.
1630
+ sort_by (Optional[str]): The column to sort by.
1631
+ sort_order (Optional[str]): The order to sort by.
1632
+ create_table_if_not_found (Optional[bool]): Whether to create the table if it doesn't exist.
1633
+
1634
+ Returns:
1635
+ Tuple[List[KnowledgeRow], int]: The knowledge contents and total count.
1636
+
1637
+ Raises:
1638
+ Exception: If an error occurs during retrieval.
1639
+ """
1640
+ try:
1641
+ table_name = self._get_table("knowledge")
1642
+ if table_name is None:
1643
+ return [], 0
1644
+
1645
+ response = self.client.scan(TableName=table_name)
1646
+ items = response.get("Items", [])
1647
+
1648
+ # Handle pagination
1649
+ while "LastEvaluatedKey" in response:
1650
+ response = self.client.scan(
1651
+ TableName=table_name,
1652
+ ExclusiveStartKey=response["LastEvaluatedKey"],
1653
+ )
1654
+ items.extend(response.get("Items", []))
1655
+
1656
+ # Convert to knowledge rows
1657
+ knowledge_rows = []
1658
+ for item in items:
1659
+ try:
1660
+ knowledge_row = deserialize_knowledge_row(item)
1661
+ knowledge_rows.append(knowledge_row)
1662
+ except Exception as e:
1663
+ log_error(f"Failed to deserialize knowledge row: {e}")
1664
+
1665
+ # Apply sorting
1666
+ if sort_by:
1667
+ reverse = sort_order == "desc"
1668
+ knowledge_rows = sorted(
1669
+ knowledge_rows,
1670
+ key=lambda x: getattr(x, sort_by, ""),
1671
+ reverse=reverse,
1672
+ )
1673
+
1674
+ # Get total count before pagination
1675
+ total_count = len(knowledge_rows)
1676
+
1677
+ # Apply pagination
1678
+ if limit:
1679
+ start_index = 0
1680
+ if page and page > 1:
1681
+ start_index = (page - 1) * limit
1682
+ knowledge_rows = knowledge_rows[start_index : start_index + limit]
1683
+
1684
+ return knowledge_rows, total_count
1685
+
1686
+ except Exception as e:
1687
+ log_error(f"Failed to get knowledge contents: {e}")
1688
+ raise e
1689
+
1690
+ def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
1691
+ """Upsert knowledge content in the database.
1692
+
1693
+ Args:
1694
+ knowledge_row (KnowledgeRow): The knowledge row to upsert.
1695
+
1696
+ Returns:
1697
+ Optional[KnowledgeRow]: The upserted knowledge row, or None if the operation fails.
1698
+ """
1699
+ try:
1700
+ table_name = self._get_table("knowledge", create_table_if_not_found=True)
1701
+ item = serialize_knowledge_row(knowledge_row)
1702
+
1703
+ self.client.put_item(TableName=table_name, Item=item)
1704
+
1705
+ return knowledge_row
1706
+
1707
+ except Exception as e:
1708
+ log_error(f"Failed to upsert knowledge content {knowledge_row.id}: {e}")
1709
+ raise e
1710
+
1711
+ # --- Eval ---
1712
+
1713
+ def create_eval_run(self, eval_run: EvalRunRecord) -> Optional[EvalRunRecord]:
1714
+ """Create an eval run in the database.
1715
+
1716
+ Args:
1717
+ eval_run (EvalRunRecord): The eval run to create.
1718
+
1719
+ Returns:
1720
+ Optional[EvalRunRecord]: The created eval run, or None if the operation fails.
1721
+
1722
+ Raises:
1723
+ Exception: If an error occurs during creation.
1724
+ """
1725
+ try:
1726
+ table_name = self._get_table("evals", create_table_if_not_found=True)
1727
+
1728
+ item = serialize_eval_record(eval_run)
1729
+ current_time = int(datetime.now(timezone.utc).timestamp())
1730
+ item["created_at"] = {"N": str(current_time)}
1731
+ item["updated_at"] = {"N": str(current_time)}
1732
+
1733
+ self.client.put_item(TableName=table_name, Item=item)
1734
+
1735
+ return eval_run
1736
+
1737
+ except Exception as e:
1738
+ log_error(f"Failed to create eval run: {e}")
1739
+ raise e
1740
+
1741
+ def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
1742
+ if not eval_run_ids or not self.eval_table_name:
1743
+ return
1744
+
1745
+ try:
1746
+ for i in range(0, len(eval_run_ids), DYNAMO_BATCH_SIZE_LIMIT):
1747
+ batch = eval_run_ids[i : i + DYNAMO_BATCH_SIZE_LIMIT]
1748
+
1749
+ delete_requests = []
1750
+ for eval_run_id in batch:
1751
+ delete_requests.append({"DeleteRequest": {"Key": {"run_id": {"S": eval_run_id}}}})
1752
+
1753
+ self.client.batch_write_item(RequestItems={self.eval_table_name: delete_requests})
1754
+
1755
+ except Exception as e:
1756
+ log_error(f"Failed to delete eval runs: {e}")
1757
+ raise e
1758
+
1759
+ def get_eval_run_raw(self, eval_run_id: str, table: Optional[Any] = None) -> Optional[Dict[str, Any]]:
1760
+ if not self.eval_table_name:
1761
+ return None
1762
+
1763
+ try:
1764
+ response = self.client.get_item(TableName=self.eval_table_name, Key={"run_id": {"S": eval_run_id}})
1765
+
1766
+ item = response.get("Item")
1767
+ if item:
1768
+ return deserialize_from_dynamodb_item(item)
1769
+ return None
1770
+
1771
+ except Exception as e:
1772
+ log_error(f"Failed to get eval run {eval_run_id}: {e}")
1773
+ raise e
1774
+
1775
+ def get_eval_run(self, eval_run_id: str, table: Optional[Any] = None) -> Optional[EvalRunRecord]:
1776
+ if not self.eval_table_name:
1777
+ return None
1778
+
1779
+ try:
1780
+ response = self.client.get_item(TableName=self.eval_table_name, Key={"run_id": {"S": eval_run_id}})
1781
+
1782
+ item = response.get("Item")
1783
+ if item:
1784
+ return deserialize_eval_record(item)
1785
+ return None
1786
+
1787
+ except Exception as e:
1788
+ log_error(f"Failed to get eval run {eval_run_id}: {e}")
1789
+ raise e
1790
+
1791
+ def get_eval_runs(
1792
+ self,
1793
+ limit: Optional[int] = None,
1794
+ page: Optional[int] = None,
1795
+ sort_by: Optional[str] = None,
1796
+ sort_order: Optional[str] = None,
1797
+ agent_id: Optional[str] = None,
1798
+ team_id: Optional[str] = None,
1799
+ workflow_id: Optional[str] = None,
1800
+ model_id: Optional[str] = None,
1801
+ filter_type: Optional[EvalFilterType] = None,
1802
+ eval_type: Optional[List[EvalType]] = None,
1803
+ deserialize: Optional[bool] = True,
1804
+ ) -> Union[List[EvalRunRecord], Tuple[List[Dict[str, Any]], int]]:
1805
+ try:
1806
+ table_name = self._get_table("evals")
1807
+ if table_name is None:
1808
+ return [] if deserialize else ([], 0)
1809
+
1810
+ scan_kwargs = {"TableName": table_name}
1811
+
1812
+ filter_expressions = []
1813
+ expression_values = {}
1814
+
1815
+ if agent_id:
1816
+ filter_expressions.append("agent_id = :agent_id")
1817
+ expression_values[":agent_id"] = {"S": agent_id}
1818
+
1819
+ if team_id:
1820
+ filter_expressions.append("team_id = :team_id")
1821
+ expression_values[":team_id"] = {"S": team_id}
1822
+
1823
+ if workflow_id:
1824
+ filter_expressions.append("workflow_id = :workflow_id")
1825
+ expression_values[":workflow_id"] = {"S": workflow_id}
1826
+
1827
+ if model_id:
1828
+ filter_expressions.append("model_id = :model_id")
1829
+ expression_values[":model_id"] = {"S": model_id}
1830
+
1831
+ if eval_type is not None and len(eval_type) > 0:
1832
+ eval_type_conditions = []
1833
+ for i, et in enumerate(eval_type):
1834
+ param_name = f":eval_type_{i}"
1835
+ eval_type_conditions.append(f"eval_type = {param_name}")
1836
+ expression_values[param_name] = {"S": str(et.value)}
1837
+ filter_expressions.append(f"({' OR '.join(eval_type_conditions)})")
1838
+
1839
+ if filter_type is not None:
1840
+ if filter_type == EvalFilterType.AGENT:
1841
+ filter_expressions.append("attribute_exists(agent_id)")
1842
+ elif filter_type == EvalFilterType.TEAM:
1843
+ filter_expressions.append("attribute_exists(team_id)")
1844
+ elif filter_type == EvalFilterType.WORKFLOW:
1845
+ filter_expressions.append("attribute_exists(workflow_id)")
1846
+
1847
+ if filter_expressions:
1848
+ scan_kwargs["FilterExpression"] = " AND ".join(filter_expressions)
1849
+
1850
+ if expression_values:
1851
+ scan_kwargs["ExpressionAttributeValues"] = expression_values # type: ignore
1852
+
1853
+ # Execute scan
1854
+ response = self.client.scan(**scan_kwargs)
1855
+ items = response.get("Items", [])
1856
+
1857
+ # Handle pagination
1858
+ while "LastEvaluatedKey" in response:
1859
+ scan_kwargs["ExclusiveStartKey"] = response["LastEvaluatedKey"]
1860
+ response = self.client.scan(**scan_kwargs)
1861
+ items.extend(response.get("Items", []))
1862
+
1863
+ # Convert to eval data
1864
+ eval_data = []
1865
+ for item in items:
1866
+ eval_item = deserialize_from_dynamodb_item(item)
1867
+ if eval_item:
1868
+ eval_data.append(eval_item)
1869
+
1870
+ # Apply sorting
1871
+ eval_data = apply_sorting(eval_data, sort_by, sort_order)
1872
+
1873
+ # Get total count before pagination
1874
+ total_count = len(eval_data)
1875
+
1876
+ # Apply pagination
1877
+ eval_data = apply_pagination(eval_data, limit, page)
1878
+
1879
+ if not deserialize:
1880
+ return eval_data, total_count
1881
+
1882
+ eval_runs = []
1883
+ for eval_item in eval_data:
1884
+ eval_run = EvalRunRecord.model_validate(eval_item)
1885
+ eval_runs.append(eval_run)
1886
+ return eval_runs
1887
+
1888
+ except Exception as e:
1889
+ log_error(f"Failed to get eval runs: {e}")
1890
+ raise e
1891
+
1892
+ def rename_eval_run(
1893
+ self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
1894
+ ) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
1895
+ if not self.eval_table_name:
1896
+ return None
1897
+
1898
+ try:
1899
+ response = self.client.update_item(
1900
+ TableName=self.eval_table_name,
1901
+ Key={"run_id": {"S": eval_run_id}},
1902
+ UpdateExpression="SET #name = :name, updated_at = :updated_at",
1903
+ ExpressionAttributeNames={"#name": "name"},
1904
+ ExpressionAttributeValues={
1905
+ ":name": {"S": name},
1906
+ ":updated_at": {"N": str(int(time.time()))},
1907
+ },
1908
+ ReturnValues="ALL_NEW",
1909
+ )
1910
+
1911
+ item = response.get("Attributes")
1912
+ if item is None:
1913
+ return None
1914
+
1915
+ log_debug(f"Renamed eval run with id '{eval_run_id}' to '{name}'")
1916
+
1917
+ item = deserialize_from_dynamodb_item(item)
1918
+ return EvalRunRecord.model_validate(item) if deserialize else item
1919
+
1920
+ except Exception as e:
1921
+ log_error(f"Failed to rename eval run {eval_run_id}: {e}")
1922
+ raise e
1923
+
1924
+ # -- Culture methods --
1925
+
1926
+ def clear_cultural_knowledge(self) -> None:
1927
+ """Delete all cultural knowledge from the database."""
1928
+ try:
1929
+ table_name = self._get_table("culture")
1930
+ response = self.client.scan(TableName=table_name, ProjectionExpression="id")
1931
+
1932
+ with self.client.batch_writer(table_name) as batch:
1933
+ for item in response.get("Items", []):
1934
+ batch.delete_item(Key={"id": item["id"]})
1935
+ except Exception as e:
1936
+ log_error(f"Failed to clear cultural knowledge: {e}")
1937
+ raise e
1938
+
1939
+ def delete_cultural_knowledge(self, id: str) -> None:
1940
+ """Delete a cultural knowledge entry from the database."""
1941
+ try:
1942
+ table_name = self._get_table("culture")
1943
+ self.client.delete_item(TableName=table_name, Key={"id": {"S": id}})
1944
+ except Exception as e:
1945
+ log_error(f"Failed to delete cultural knowledge {id}: {e}")
1946
+ raise e
1947
+
1948
+ def get_cultural_knowledge(
1949
+ self, id: str, deserialize: Optional[bool] = True
1950
+ ) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
1951
+ """Get a cultural knowledge entry from the database."""
1952
+ try:
1953
+ table_name = self._get_table("culture")
1954
+ response = self.client.get_item(TableName=table_name, Key={"id": {"S": id}})
1955
+
1956
+ item = response.get("Item")
1957
+ if not item:
1958
+ return None
1959
+
1960
+ db_row = deserialize_from_dynamodb_item(item)
1961
+ if not deserialize:
1962
+ return db_row
1963
+
1964
+ return deserialize_cultural_knowledge_from_db(db_row)
1965
+ except Exception as e:
1966
+ log_error(f"Failed to get cultural knowledge {id}: {e}")
1967
+ raise e
1968
+
1969
+ def get_all_cultural_knowledge(
1970
+ self,
1971
+ name: Optional[str] = None,
1972
+ agent_id: Optional[str] = None,
1973
+ team_id: Optional[str] = None,
1974
+ limit: Optional[int] = None,
1975
+ page: Optional[int] = None,
1976
+ sort_by: Optional[str] = None,
1977
+ sort_order: Optional[str] = None,
1978
+ deserialize: Optional[bool] = True,
1979
+ ) -> Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
1980
+ """Get all cultural knowledge from the database."""
1981
+ try:
1982
+ table_name = self._get_table("culture")
1983
+
1984
+ # Build filter expression
1985
+ filter_expressions = []
1986
+ expression_values = {}
1987
+
1988
+ if name:
1989
+ filter_expressions.append("#name = :name")
1990
+ expression_values[":name"] = {"S": name}
1991
+ if agent_id:
1992
+ filter_expressions.append("agent_id = :agent_id")
1993
+ expression_values[":agent_id"] = {"S": agent_id}
1994
+ if team_id:
1995
+ filter_expressions.append("team_id = :team_id")
1996
+ expression_values[":team_id"] = {"S": team_id}
1997
+
1998
+ scan_kwargs: Dict[str, Any] = {"TableName": table_name}
1999
+ if filter_expressions:
2000
+ scan_kwargs["FilterExpression"] = " AND ".join(filter_expressions)
2001
+ scan_kwargs["ExpressionAttributeValues"] = expression_values
2002
+ if name:
2003
+ scan_kwargs["ExpressionAttributeNames"] = {"#name": "name"}
2004
+
2005
+ # Execute scan
2006
+ response = self.client.scan(**scan_kwargs)
2007
+ items = response.get("Items", [])
2008
+
2009
+ # Continue scanning if there's more data
2010
+ while "LastEvaluatedKey" in response:
2011
+ scan_kwargs["ExclusiveStartKey"] = response["LastEvaluatedKey"]
2012
+ response = self.client.scan(**scan_kwargs)
2013
+ items.extend(response.get("Items", []))
2014
+
2015
+ # Deserialize items from DynamoDB format
2016
+ db_rows = [deserialize_from_dynamodb_item(item) for item in items]
2017
+
2018
+ # Apply sorting
2019
+ if sort_by:
2020
+ reverse = sort_order == "desc" if sort_order else False
2021
+ db_rows.sort(key=lambda x: x.get(sort_by, ""), reverse=reverse)
2022
+
2023
+ # Apply pagination
2024
+ total_count = len(db_rows)
2025
+ if limit and page:
2026
+ start = (page - 1) * limit
2027
+ db_rows = db_rows[start : start + limit]
2028
+ elif limit:
2029
+ db_rows = db_rows[:limit]
2030
+
2031
+ if not deserialize:
2032
+ return db_rows, total_count
2033
+
2034
+ return [deserialize_cultural_knowledge_from_db(row) for row in db_rows]
2035
+ except Exception as e:
2036
+ log_error(f"Failed to get all cultural knowledge: {e}")
2037
+ raise e
2038
+
2039
+ def upsert_cultural_knowledge(
2040
+ self, cultural_knowledge: CulturalKnowledge, deserialize: Optional[bool] = True
2041
+ ) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
2042
+ """Upsert a cultural knowledge entry into the database."""
2043
+ try:
2044
+ from uuid import uuid4
2045
+
2046
+ table_name = self._get_table("culture", create_table_if_not_found=True)
2047
+
2048
+ if not cultural_knowledge.id:
2049
+ cultural_knowledge.id = str(uuid4())
2050
+
2051
+ # Serialize content, categories, and notes into a dict for DB storage
2052
+ content_dict = serialize_cultural_knowledge_for_db(cultural_knowledge)
2053
+
2054
+ # Create the item dict with serialized content
2055
+ item_dict = {
2056
+ "id": cultural_knowledge.id,
2057
+ "name": cultural_knowledge.name,
2058
+ "summary": cultural_knowledge.summary,
2059
+ "content": content_dict if content_dict else None,
2060
+ "metadata": cultural_knowledge.metadata,
2061
+ "input": cultural_knowledge.input,
2062
+ "created_at": cultural_knowledge.created_at,
2063
+ "updated_at": int(time.time()),
2064
+ "agent_id": cultural_knowledge.agent_id,
2065
+ "team_id": cultural_knowledge.team_id,
2066
+ }
2067
+
2068
+ # Convert to DynamoDB format
2069
+ item = serialize_to_dynamo_item(item_dict)
2070
+ self.client.put_item(TableName=table_name, Item=item)
2071
+
2072
+ return self.get_cultural_knowledge(cultural_knowledge.id, deserialize=deserialize)
2073
+
2074
+ except Exception as e:
2075
+ log_error(f"Failed to upsert cultural knowledge: {e}")
2076
+ raise e
2077
+
2078
+ # --- Traces ---
2079
+ def upsert_trace(self, trace: "Trace") -> None:
2080
+ """Create or update a single trace record in the database.
2081
+
2082
+ Args:
2083
+ trace: The Trace object to store (one per trace_id).
2084
+ """
2085
+ try:
2086
+ table_name = self._get_table("traces", create_table_if_not_found=True)
2087
+ if table_name is None:
2088
+ return
2089
+
2090
+ # Check if trace already exists
2091
+ response = self.client.get_item(
2092
+ TableName=table_name,
2093
+ Key={"trace_id": {"S": trace.trace_id}},
2094
+ )
2095
+
2096
+ existing_item = response.get("Item")
2097
+ if existing_item:
2098
+ # Update existing trace
2099
+ existing = deserialize_from_dynamodb_item(existing_item)
2100
+
2101
+ # Determine component level for name update priority
2102
+ def get_component_level(workflow_id, team_id, agent_id, name):
2103
+ is_root_name = ".run" in name or ".arun" in name
2104
+ if not is_root_name:
2105
+ return 0
2106
+ elif workflow_id:
2107
+ return 3
2108
+ elif team_id:
2109
+ return 2
2110
+ elif agent_id:
2111
+ return 1
2112
+ else:
2113
+ return 0
2114
+
2115
+ existing_level = get_component_level(
2116
+ existing.get("workflow_id"),
2117
+ existing.get("team_id"),
2118
+ existing.get("agent_id"),
2119
+ existing.get("name", ""),
2120
+ )
2121
+ new_level = get_component_level(trace.workflow_id, trace.team_id, trace.agent_id, trace.name)
2122
+ should_update_name = new_level > existing_level
2123
+
2124
+ # Parse existing start_time to calculate correct duration
2125
+ existing_start_time_str = existing.get("start_time")
2126
+ if isinstance(existing_start_time_str, str):
2127
+ existing_start_time = datetime.fromisoformat(existing_start_time_str.replace("Z", "+00:00"))
2128
+ else:
2129
+ existing_start_time = trace.start_time
2130
+
2131
+ recalculated_duration_ms = int((trace.end_time - existing_start_time).total_seconds() * 1000)
2132
+
2133
+ # Build update expression
2134
+ update_parts = [
2135
+ "end_time = :end_time",
2136
+ "duration_ms = :duration_ms",
2137
+ "#status = :status",
2138
+ ]
2139
+ expression_attr_names = {"#status": "status"}
2140
+ expression_attr_values: Dict[str, Any] = {
2141
+ ":end_time": {"S": trace.end_time.isoformat()},
2142
+ ":duration_ms": {"N": str(recalculated_duration_ms)},
2143
+ ":status": {"S": trace.status},
2144
+ }
2145
+
2146
+ if should_update_name:
2147
+ update_parts.append("#name = :name")
2148
+ expression_attr_names["#name"] = "name"
2149
+ expression_attr_values[":name"] = {"S": trace.name}
2150
+
2151
+ if trace.run_id is not None:
2152
+ update_parts.append("run_id = :run_id")
2153
+ expression_attr_values[":run_id"] = {"S": trace.run_id}
2154
+ if trace.session_id is not None:
2155
+ update_parts.append("session_id = :session_id")
2156
+ expression_attr_values[":session_id"] = {"S": trace.session_id}
2157
+ if trace.user_id is not None:
2158
+ update_parts.append("user_id = :user_id")
2159
+ expression_attr_values[":user_id"] = {"S": trace.user_id}
2160
+ if trace.agent_id is not None:
2161
+ update_parts.append("agent_id = :agent_id")
2162
+ expression_attr_values[":agent_id"] = {"S": trace.agent_id}
2163
+ if trace.team_id is not None:
2164
+ update_parts.append("team_id = :team_id")
2165
+ expression_attr_values[":team_id"] = {"S": trace.team_id}
2166
+ if trace.workflow_id is not None:
2167
+ update_parts.append("workflow_id = :workflow_id")
2168
+ expression_attr_values[":workflow_id"] = {"S": trace.workflow_id}
2169
+
2170
+ self.client.update_item(
2171
+ TableName=table_name,
2172
+ Key={"trace_id": {"S": trace.trace_id}},
2173
+ UpdateExpression="SET " + ", ".join(update_parts),
2174
+ ExpressionAttributeNames=expression_attr_names,
2175
+ ExpressionAttributeValues=expression_attr_values,
2176
+ )
2177
+ else:
2178
+ # Create new trace with initialized counters
2179
+ trace_dict = trace.to_dict()
2180
+ trace_dict["total_spans"] = 0
2181
+ trace_dict["error_count"] = 0
2182
+ item = serialize_to_dynamo_item(trace_dict)
2183
+ self.client.put_item(TableName=table_name, Item=item)
2184
+
2185
+ except Exception as e:
2186
+ log_error(f"Error creating trace: {e}")
2187
+
2188
+ def get_trace(
2189
+ self,
2190
+ trace_id: Optional[str] = None,
2191
+ run_id: Optional[str] = None,
2192
+ ):
2193
+ """Get a single trace by trace_id or other filters.
2194
+
2195
+ Args:
2196
+ trace_id: The unique trace identifier.
2197
+ run_id: Filter by run ID (returns first match).
2198
+
2199
+ Returns:
2200
+ Optional[Trace]: The trace if found, None otherwise.
2201
+
2202
+ Note:
2203
+ If multiple filters are provided, trace_id takes precedence.
2204
+ For other filters, the most recent trace is returned.
2205
+ """
2206
+ try:
2207
+ from agno.tracing.schemas import Trace
2208
+
2209
+ table_name = self._get_table("traces")
2210
+ if table_name is None:
2211
+ return None
2212
+
2213
+ if trace_id:
2214
+ # Direct lookup by primary key
2215
+ response = self.client.get_item(
2216
+ TableName=table_name,
2217
+ Key={"trace_id": {"S": trace_id}},
2218
+ )
2219
+ item = response.get("Item")
2220
+ if item:
2221
+ trace_data = deserialize_from_dynamodb_item(item)
2222
+ trace_data.setdefault("total_spans", 0)
2223
+ trace_data.setdefault("error_count", 0)
2224
+ return Trace.from_dict(trace_data)
2225
+ return None
2226
+
2227
+ elif run_id:
2228
+ # Query using GSI
2229
+ response = self.client.query(
2230
+ TableName=table_name,
2231
+ IndexName="run_id-start_time-index",
2232
+ KeyConditionExpression="run_id = :run_id",
2233
+ ExpressionAttributeValues={":run_id": {"S": run_id}},
2234
+ ScanIndexForward=False, # Descending order
2235
+ Limit=1,
2236
+ )
2237
+ items = response.get("Items", [])
2238
+ if items:
2239
+ trace_data = deserialize_from_dynamodb_item(items[0])
2240
+ # Use stored values (default to 0 if not present)
2241
+ trace_data.setdefault("total_spans", 0)
2242
+ trace_data.setdefault("error_count", 0)
2243
+ return Trace.from_dict(trace_data)
2244
+ return None
2245
+
2246
+ else:
2247
+ log_debug("get_trace called without any filter parameters")
2248
+ return None
2249
+
2250
+ except Exception as e:
2251
+ log_error(f"Error getting trace: {e}")
2252
+ return None
2253
+
2254
+ def get_traces(
2255
+ self,
2256
+ run_id: Optional[str] = None,
2257
+ session_id: Optional[str] = None,
2258
+ user_id: Optional[str] = None,
2259
+ agent_id: Optional[str] = None,
2260
+ team_id: Optional[str] = None,
2261
+ workflow_id: Optional[str] = None,
2262
+ status: Optional[str] = None,
2263
+ start_time: Optional[datetime] = None,
2264
+ end_time: Optional[datetime] = None,
2265
+ limit: Optional[int] = 20,
2266
+ page: Optional[int] = 1,
2267
+ ) -> tuple[List, int]:
2268
+ """Get traces matching the provided filters.
2269
+
2270
+ Args:
2271
+ run_id: Filter by run ID.
2272
+ session_id: Filter by session ID.
2273
+ user_id: Filter by user ID.
2274
+ agent_id: Filter by agent ID.
2275
+ team_id: Filter by team ID.
2276
+ workflow_id: Filter by workflow ID.
2277
+ status: Filter by status (OK, ERROR, UNSET).
2278
+ start_time: Filter traces starting after this datetime.
2279
+ end_time: Filter traces ending before this datetime.
2280
+ limit: Maximum number of traces to return per page.
2281
+ page: Page number (1-indexed).
2282
+
2283
+ Returns:
2284
+ tuple[List[Trace], int]: Tuple of (list of matching traces, total count).
2285
+ """
2286
+ try:
2287
+ from agno.tracing.schemas import Trace
2288
+
2289
+ table_name = self._get_table("traces")
2290
+ if table_name is None:
2291
+ return [], 0
2292
+
2293
+ # Determine if we can use a GSI query or need to scan
2294
+ use_gsi = False
2295
+ gsi_name = None
2296
+ key_condition = None
2297
+ key_values: Dict[str, Any] = {}
2298
+
2299
+ # Check for GSI-compatible filters (only one can be used as key condition)
2300
+ if session_id:
2301
+ use_gsi = True
2302
+ gsi_name = "session_id-start_time-index"
2303
+ key_condition = "session_id = :session_id"
2304
+ key_values[":session_id"] = {"S": session_id}
2305
+ elif user_id:
2306
+ use_gsi = True
2307
+ gsi_name = "user_id-start_time-index"
2308
+ key_condition = "user_id = :user_id"
2309
+ key_values[":user_id"] = {"S": user_id}
2310
+ elif agent_id:
2311
+ use_gsi = True
2312
+ gsi_name = "agent_id-start_time-index"
2313
+ key_condition = "agent_id = :agent_id"
2314
+ key_values[":agent_id"] = {"S": agent_id}
2315
+ elif team_id:
2316
+ use_gsi = True
2317
+ gsi_name = "team_id-start_time-index"
2318
+ key_condition = "team_id = :team_id"
2319
+ key_values[":team_id"] = {"S": team_id}
2320
+ elif workflow_id:
2321
+ use_gsi = True
2322
+ gsi_name = "workflow_id-start_time-index"
2323
+ key_condition = "workflow_id = :workflow_id"
2324
+ key_values[":workflow_id"] = {"S": workflow_id}
2325
+ elif run_id:
2326
+ use_gsi = True
2327
+ gsi_name = "run_id-start_time-index"
2328
+ key_condition = "run_id = :run_id"
2329
+ key_values[":run_id"] = {"S": run_id}
2330
+ elif status:
2331
+ use_gsi = True
2332
+ gsi_name = "status-start_time-index"
2333
+ key_condition = "#status = :status"
2334
+ key_values[":status"] = {"S": status}
2335
+
2336
+ # Build filter expression for additional filters
2337
+ filter_parts = []
2338
+ filter_values: Dict[str, Any] = {}
2339
+ expression_attr_names: Dict[str, str] = {}
2340
+
2341
+ if start_time:
2342
+ filter_parts.append("start_time >= :start_time")
2343
+ filter_values[":start_time"] = {"S": start_time.isoformat()}
2344
+ if end_time:
2345
+ filter_parts.append("end_time <= :end_time")
2346
+ filter_values[":end_time"] = {"S": end_time.isoformat()}
2347
+
2348
+ if status and gsi_name != "status-start_time-index":
2349
+ filter_parts.append("#status = :filter_status")
2350
+ filter_values[":filter_status"] = {"S": status}
2351
+ expression_attr_names["#status"] = "status"
2352
+
2353
+ items = []
2354
+ if use_gsi and gsi_name and key_condition:
2355
+ # Use GSI query
2356
+ query_kwargs: Dict[str, Any] = {
2357
+ "TableName": table_name,
2358
+ "IndexName": gsi_name,
2359
+ "KeyConditionExpression": key_condition,
2360
+ "ExpressionAttributeValues": {**key_values, **filter_values},
2361
+ "ScanIndexForward": False, # Descending order by start_time
2362
+ }
2363
+ if gsi_name == "status-start_time-index":
2364
+ expression_attr_names["#status"] = "status"
2365
+ if expression_attr_names:
2366
+ query_kwargs["ExpressionAttributeNames"] = expression_attr_names
2367
+ if filter_parts:
2368
+ query_kwargs["FilterExpression"] = " AND ".join(filter_parts)
2369
+
2370
+ response = self.client.query(**query_kwargs)
2371
+ items.extend(response.get("Items", []))
2372
+
2373
+ while "LastEvaluatedKey" in response:
2374
+ query_kwargs["ExclusiveStartKey"] = response["LastEvaluatedKey"]
2375
+ response = self.client.query(**query_kwargs)
2376
+ items.extend(response.get("Items", []))
2377
+ else:
2378
+ # Use scan
2379
+ scan_kwargs: Dict[str, Any] = {"TableName": table_name}
2380
+ if filter_parts:
2381
+ scan_kwargs["FilterExpression"] = " AND ".join(filter_parts)
2382
+ scan_kwargs["ExpressionAttributeValues"] = filter_values
2383
+ if expression_attr_names:
2384
+ scan_kwargs["ExpressionAttributeNames"] = expression_attr_names
2385
+
2386
+ response = self.client.scan(**scan_kwargs)
2387
+ items.extend(response.get("Items", []))
2388
+
2389
+ while "LastEvaluatedKey" in response:
2390
+ scan_kwargs["ExclusiveStartKey"] = response["LastEvaluatedKey"]
2391
+ response = self.client.scan(**scan_kwargs)
2392
+ items.extend(response.get("Items", []))
2393
+
2394
+ # Deserialize items
2395
+ traces_data = [deserialize_from_dynamodb_item(item) for item in items]
2396
+
2397
+ # Sort by start_time descending
2398
+ traces_data.sort(key=lambda x: x.get("start_time", ""), reverse=True)
2399
+
2400
+ # Get total count
2401
+ total_count = len(traces_data)
2402
+
2403
+ # Apply pagination
2404
+ offset = (page - 1) * limit if page and limit else 0
2405
+ paginated_data = traces_data[offset : offset + limit] if limit else traces_data
2406
+
2407
+ # Use stored total_spans and error_count (default to 0 if not present)
2408
+ traces = []
2409
+ for trace_data in paginated_data:
2410
+ # Use stored values - these are updated by create_spans
2411
+ trace_data.setdefault("total_spans", 0)
2412
+ trace_data.setdefault("error_count", 0)
2413
+ traces.append(Trace.from_dict(trace_data))
2414
+
2415
+ return traces, total_count
2416
+
2417
+ except Exception as e:
2418
+ log_error(f"Error getting traces: {e}")
2419
+ return [], 0
2420
+
2421
+ def get_trace_stats(
2422
+ self,
2423
+ user_id: Optional[str] = None,
2424
+ agent_id: Optional[str] = None,
2425
+ team_id: Optional[str] = None,
2426
+ workflow_id: Optional[str] = None,
2427
+ start_time: Optional[datetime] = None,
2428
+ end_time: Optional[datetime] = None,
2429
+ limit: Optional[int] = 20,
2430
+ page: Optional[int] = 1,
2431
+ ) -> tuple[List[Dict[str, Any]], int]:
2432
+ """Get trace statistics grouped by session.
2433
+
2434
+ Args:
2435
+ user_id: Filter by user ID.
2436
+ agent_id: Filter by agent ID.
2437
+ team_id: Filter by team ID.
2438
+ workflow_id: Filter by workflow ID.
2439
+ start_time: Filter sessions with traces created after this datetime.
2440
+ end_time: Filter sessions with traces created before this datetime.
2441
+ limit: Maximum number of sessions to return per page.
2442
+ page: Page number (1-indexed).
2443
+
2444
+ Returns:
2445
+ tuple[List[Dict], int]: Tuple of (list of session stats dicts, total count).
2446
+ Each dict contains: session_id, user_id, agent_id, team_id, workflow_id, total_traces,
2447
+ first_trace_at, last_trace_at.
2448
+ """
2449
+ try:
2450
+ table_name = self._get_table("traces")
2451
+ if table_name is None:
2452
+ return [], 0
2453
+
2454
+ # Fetch all traces and aggregate in memory (DynamoDB doesn't support GROUP BY)
2455
+ scan_kwargs: Dict[str, Any] = {"TableName": table_name}
2456
+
2457
+ # Build filter expression
2458
+ filter_parts = []
2459
+ filter_values: Dict[str, Any] = {}
2460
+
2461
+ if user_id:
2462
+ filter_parts.append("user_id = :user_id")
2463
+ filter_values[":user_id"] = {"S": user_id}
2464
+ if agent_id:
2465
+ filter_parts.append("agent_id = :agent_id")
2466
+ filter_values[":agent_id"] = {"S": agent_id}
2467
+ if team_id:
2468
+ filter_parts.append("team_id = :team_id")
2469
+ filter_values[":team_id"] = {"S": team_id}
2470
+ if workflow_id:
2471
+ filter_parts.append("workflow_id = :workflow_id")
2472
+ filter_values[":workflow_id"] = {"S": workflow_id}
2473
+ if start_time:
2474
+ filter_parts.append("created_at >= :start_time")
2475
+ filter_values[":start_time"] = {"S": start_time.isoformat()}
2476
+ if end_time:
2477
+ filter_parts.append("created_at <= :end_time")
2478
+ filter_values[":end_time"] = {"S": end_time.isoformat()}
2479
+
2480
+ # Filter for records with session_id
2481
+ filter_parts.append("attribute_exists(session_id)")
2482
+
2483
+ if filter_parts:
2484
+ scan_kwargs["FilterExpression"] = " AND ".join(filter_parts)
2485
+ if filter_values:
2486
+ scan_kwargs["ExpressionAttributeValues"] = filter_values
2487
+
2488
+ # Scan all matching traces
2489
+ items = []
2490
+ response = self.client.scan(**scan_kwargs)
2491
+ items.extend(response.get("Items", []))
2492
+
2493
+ while "LastEvaluatedKey" in response:
2494
+ scan_kwargs["ExclusiveStartKey"] = response["LastEvaluatedKey"]
2495
+ response = self.client.scan(**scan_kwargs)
2496
+ items.extend(response.get("Items", []))
2497
+
2498
+ # Aggregate by session_id
2499
+ session_stats: Dict[str, Dict[str, Any]] = {}
2500
+ for item in items:
2501
+ trace_data = deserialize_from_dynamodb_item(item)
2502
+ session_id = trace_data.get("session_id")
2503
+ if not session_id:
2504
+ continue
2505
+
2506
+ if session_id not in session_stats:
2507
+ session_stats[session_id] = {
2508
+ "session_id": session_id,
2509
+ "user_id": trace_data.get("user_id"),
2510
+ "agent_id": trace_data.get("agent_id"),
2511
+ "team_id": trace_data.get("team_id"),
2512
+ "workflow_id": trace_data.get("workflow_id"),
2513
+ "total_traces": 0,
2514
+ "first_trace_at": trace_data.get("created_at"),
2515
+ "last_trace_at": trace_data.get("created_at"),
2516
+ }
2517
+
2518
+ session_stats[session_id]["total_traces"] += 1
2519
+
2520
+ created_at = trace_data.get("created_at")
2521
+ if (
2522
+ created_at
2523
+ and session_stats[session_id]["first_trace_at"]
2524
+ and session_stats[session_id]["last_trace_at"]
2525
+ ):
2526
+ if created_at < session_stats[session_id]["first_trace_at"]:
2527
+ session_stats[session_id]["first_trace_at"] = created_at
2528
+ if created_at > session_stats[session_id]["last_trace_at"]:
2529
+ session_stats[session_id]["last_trace_at"] = created_at
2530
+
2531
+ # Convert to list and sort by last_trace_at descending
2532
+ stats_list = list(session_stats.values())
2533
+ stats_list.sort(key=lambda x: x.get("last_trace_at", ""), reverse=True)
2534
+
2535
+ # Convert datetime strings to datetime objects
2536
+ for stat in stats_list:
2537
+ first_trace_at = stat["first_trace_at"]
2538
+ last_trace_at = stat["last_trace_at"]
2539
+ if isinstance(first_trace_at, str):
2540
+ stat["first_trace_at"] = datetime.fromisoformat(first_trace_at.replace("Z", "+00:00"))
2541
+ if isinstance(last_trace_at, str):
2542
+ stat["last_trace_at"] = datetime.fromisoformat(last_trace_at.replace("Z", "+00:00"))
2543
+
2544
+ # Get total count
2545
+ total_count = len(stats_list)
2546
+
2547
+ # Apply pagination
2548
+ offset = (page - 1) * limit if page and limit else 0
2549
+ paginated_stats = stats_list[offset : offset + limit] if limit else stats_list
2550
+
2551
+ return paginated_stats, total_count
2552
+
2553
+ except Exception as e:
2554
+ log_error(f"Error getting trace stats: {e}")
2555
+ return [], 0
2556
+
2557
+ # --- Spans ---
2558
+ def create_span(self, span: "Span") -> None:
2559
+ """Create a single span in the database.
2560
+
2561
+ Args:
2562
+ span: The Span object to store.
2563
+ """
2564
+ try:
2565
+ table_name = self._get_table("spans", create_table_if_not_found=True)
2566
+ if table_name is None:
2567
+ return
2568
+
2569
+ span_dict = span.to_dict()
2570
+ # Serialize attributes as JSON string
2571
+ if "attributes" in span_dict and isinstance(span_dict["attributes"], dict):
2572
+ span_dict["attributes"] = json.dumps(span_dict["attributes"])
2573
+
2574
+ item = serialize_to_dynamo_item(span_dict)
2575
+ self.client.put_item(TableName=table_name, Item=item)
2576
+
2577
+ # Increment total_spans and error_count on trace
2578
+ traces_table_name = self._get_table("traces")
2579
+ if traces_table_name:
2580
+ try:
2581
+ update_expr = "ADD total_spans :inc"
2582
+ expr_values: Dict[str, Any] = {":inc": {"N": "1"}}
2583
+
2584
+ if span.status_code == "ERROR":
2585
+ update_expr += ", error_count :inc"
2586
+
2587
+ self.client.update_item(
2588
+ TableName=traces_table_name,
2589
+ Key={"trace_id": {"S": span.trace_id}},
2590
+ UpdateExpression=update_expr,
2591
+ ExpressionAttributeValues=expr_values,
2592
+ )
2593
+ except Exception as update_error:
2594
+ log_debug(f"Could not update trace span counts: {update_error}")
2595
+
2596
+ except Exception as e:
2597
+ log_error(f"Error creating span: {e}")
2598
+
2599
+ def create_spans(self, spans: List) -> None:
2600
+ """Create multiple spans in the database as a batch.
2601
+
2602
+ Args:
2603
+ spans: List of Span objects to store.
2604
+ """
2605
+ if not spans:
2606
+ return
2607
+
2608
+ try:
2609
+ table_name = self._get_table("spans", create_table_if_not_found=True)
2610
+ if table_name is None:
2611
+ return
2612
+
2613
+ for i in range(0, len(spans), DYNAMO_BATCH_SIZE_LIMIT):
2614
+ batch = spans[i : i + DYNAMO_BATCH_SIZE_LIMIT]
2615
+ put_requests = []
2616
+
2617
+ for span in batch:
2618
+ span_dict = span.to_dict()
2619
+ # Serialize attributes as JSON string
2620
+ if "attributes" in span_dict and isinstance(span_dict["attributes"], dict):
2621
+ span_dict["attributes"] = json.dumps(span_dict["attributes"])
2622
+
2623
+ item = serialize_to_dynamo_item(span_dict)
2624
+ put_requests.append({"PutRequest": {"Item": item}})
2625
+
2626
+ if put_requests:
2627
+ self.client.batch_write_item(RequestItems={table_name: put_requests})
2628
+
2629
+ # Update trace with total_spans and error_count using ADD (atomic increment)
2630
+ trace_id = spans[0].trace_id
2631
+ spans_count = len(spans)
2632
+ error_count = sum(1 for s in spans if s.status_code == "ERROR")
2633
+
2634
+ traces_table_name = self._get_table("traces")
2635
+ if traces_table_name:
2636
+ try:
2637
+ # Use ADD for atomic increment - works even if attributes don't exist yet
2638
+ update_expr = "ADD total_spans :spans_inc"
2639
+ expr_values: Dict[str, Any] = {":spans_inc": {"N": str(spans_count)}}
2640
+
2641
+ if error_count > 0:
2642
+ update_expr += ", error_count :error_inc"
2643
+ expr_values[":error_inc"] = {"N": str(error_count)}
2644
+
2645
+ self.client.update_item(
2646
+ TableName=traces_table_name,
2647
+ Key={"trace_id": {"S": trace_id}},
2648
+ UpdateExpression=update_expr,
2649
+ ExpressionAttributeValues=expr_values,
2650
+ )
2651
+ except Exception as update_error:
2652
+ log_debug(f"Could not update trace span counts: {update_error}")
2653
+
2654
+ except Exception as e:
2655
+ log_error(f"Error creating spans batch: {e}")
2656
+
2657
+ def get_span(self, span_id: str):
2658
+ """Get a single span by its span_id.
2659
+
2660
+ Args:
2661
+ span_id: The unique span identifier.
2662
+
2663
+ Returns:
2664
+ Optional[Span]: The span if found, None otherwise.
2665
+ """
2666
+ try:
2667
+ from agno.tracing.schemas import Span
2668
+
2669
+ table_name = self._get_table("spans")
2670
+ if table_name is None:
2671
+ return None
2672
+
2673
+ response = self.client.get_item(
2674
+ TableName=table_name,
2675
+ Key={"span_id": {"S": span_id}},
2676
+ )
2677
+
2678
+ item = response.get("Item")
2679
+ if item:
2680
+ span_data = deserialize_from_dynamodb_item(item)
2681
+ # Deserialize attributes from JSON string
2682
+ if "attributes" in span_data and isinstance(span_data["attributes"], str):
2683
+ span_data["attributes"] = json.loads(span_data["attributes"])
2684
+ return Span.from_dict(span_data)
2685
+ return None
2686
+
2687
+ except Exception as e:
2688
+ log_error(f"Error getting span: {e}")
2689
+ return None
2690
+
2691
+ def get_spans(
2692
+ self,
2693
+ trace_id: Optional[str] = None,
2694
+ parent_span_id: Optional[str] = None,
2695
+ limit: Optional[int] = 1000,
2696
+ ) -> List:
2697
+ """Get spans matching the provided filters.
2698
+
2699
+ Args:
2700
+ trace_id: Filter by trace ID.
2701
+ parent_span_id: Filter by parent span ID.
2702
+ limit: Maximum number of spans to return.
2703
+
2704
+ Returns:
2705
+ List[Span]: List of matching spans.
2706
+ """
2707
+ try:
2708
+ from agno.tracing.schemas import Span
2709
+
2710
+ table_name = self._get_table("spans")
2711
+ if table_name is None:
2712
+ return []
2713
+
2714
+ items = []
2715
+
2716
+ if trace_id:
2717
+ # Use GSI query
2718
+ query_kwargs: Dict[str, Any] = {
2719
+ "TableName": table_name,
2720
+ "IndexName": "trace_id-start_time-index",
2721
+ "KeyConditionExpression": "trace_id = :trace_id",
2722
+ "ExpressionAttributeValues": {":trace_id": {"S": trace_id}},
2723
+ }
2724
+ if limit:
2725
+ query_kwargs["Limit"] = limit
2726
+
2727
+ response = self.client.query(**query_kwargs)
2728
+ items.extend(response.get("Items", []))
2729
+
2730
+ while "LastEvaluatedKey" in response and (limit is None or len(items) < limit):
2731
+ query_kwargs["ExclusiveStartKey"] = response["LastEvaluatedKey"]
2732
+ response = self.client.query(**query_kwargs)
2733
+ items.extend(response.get("Items", []))
2734
+
2735
+ elif parent_span_id:
2736
+ # Use GSI query
2737
+ query_kwargs = {
2738
+ "TableName": table_name,
2739
+ "IndexName": "parent_span_id-start_time-index",
2740
+ "KeyConditionExpression": "parent_span_id = :parent_span_id",
2741
+ "ExpressionAttributeValues": {":parent_span_id": {"S": parent_span_id}},
2742
+ }
2743
+ if limit:
2744
+ query_kwargs["Limit"] = limit
2745
+
2746
+ response = self.client.query(**query_kwargs)
2747
+ items.extend(response.get("Items", []))
2748
+
2749
+ while "LastEvaluatedKey" in response and (limit is None or len(items) < limit):
2750
+ query_kwargs["ExclusiveStartKey"] = response["LastEvaluatedKey"]
2751
+ response = self.client.query(**query_kwargs)
2752
+ items.extend(response.get("Items", []))
2753
+
2754
+ else:
2755
+ # Scan all spans
2756
+ scan_kwargs: Dict[str, Any] = {"TableName": table_name}
2757
+ if limit:
2758
+ scan_kwargs["Limit"] = limit
2759
+
2760
+ response = self.client.scan(**scan_kwargs)
2761
+ items.extend(response.get("Items", []))
2762
+
2763
+ while "LastEvaluatedKey" in response and (limit is None or len(items) < limit):
2764
+ scan_kwargs["ExclusiveStartKey"] = response["LastEvaluatedKey"]
2765
+ response = self.client.scan(**scan_kwargs)
2766
+ items.extend(response.get("Items", []))
2767
+
2768
+ # Deserialize items
2769
+ spans = []
2770
+ for item in items[:limit] if limit else items:
2771
+ span_data = deserialize_from_dynamodb_item(item)
2772
+ # Deserialize attributes from JSON string
2773
+ if "attributes" in span_data and isinstance(span_data["attributes"], str):
2774
+ span_data["attributes"] = json.loads(span_data["attributes"])
2775
+ spans.append(Span.from_dict(span_data))
2776
+
2777
+ return spans
2778
+
2779
+ except Exception as e:
2780
+ log_error(f"Error getting spans: {e}")
2781
+ return []